all: merge master (743372f) into gopls-release-branch.0.12

For golang/go#59818

Conflicts:

- gopls/go.sum

Merge List:

+ 2023-05-16 743372f58 gopls/internal/lsp/debug: limit to 100 recent trace entries
+ 2023-05-16 522243a71 go/ssa/interp: avoid hard-coding GOOS and GOARCH
+ 2023-05-16 a0593829f gopls/internal/lsp/cache: fail workspace load on context cancellation
+ 2023-05-16 e5c8d4db7 gopls/internal/lsp/cache: unexport ValidBuildConfiguration
+ 2023-05-16 f4e8a711a go/ssa: use core type for composite literal addresses
+ 2023-05-16 3b25dbddd go/ssa: use core types for array length
+ 2023-05-16 e7048d518 gopls/internal/lsp/filecache: front with a 100MB in-memory LRU cache
+ 2023-05-16 8c0fcd2c6 gopls/internal/lsp/lru: extract LRU logic to a standalone package
+ 2023-05-16 19d700c67 go/ssa/interp: use the actual GOOS and GOARCH to interpret tests
+ 2023-05-16 9dcd3d5dc gopls/internal/lsp/filecache: limit Get parallelism too
+ 2023-05-16 7f203f0c3 go/ssa: consolidate use of underlying pointer
+ 2023-05-15 a13793e31 gopls/internal/lsp: add quick-fixes to manage the go.work file
+ 2023-05-15 12a0517ad internal/gcimporter: improve error handling
+ 2023-05-15 5eb1eb932 gopls/internal/lsp/cache: call filecache.Set asynchronously
+ 2023-05-15 35fe77a6b gopls/internal/lsp/filecache: limit parallelism in Set
+ 2023-05-12 1c9fe3f82 gopls: improve diagnostics for orphaned files
+ 2023-05-12 ad74ff634 go/gcexportdata: drop support for the ancient binary format
+ 2023-05-11 01128f9fb gopls/internal/lsp/source: fix bug renaming field to unexported
+ 2023-05-11 cd39d2be4 internal/lsp/cache: support loading multiple orphaned files
+ 2023-05-11 abeba28e7 gopls/internal/regtest/marker: support proxy files in marker tests
+ 2023-05-11 18186f0c6 go/analysis/passes/slog: simplify function matching
+ 2023-05-11 9aa9d134d cmd/bisect, internal/bisect: use more precise skip suffixes, written in hex
+ 2023-05-11 3034d9c3a gopls/internal/lsp/cmd: append, don't overwrite, in PublishDiagnostics
+ 2023-05-10 4318d630d gopls: change the default value of "symbolScope" to "all"
+ 2023-05-10 f44f50e39 gopls/internal/lsp/source: implementation: report builtin 'error'
+ 2023-05-10 787e7207e gopls/internal/lsp: optimize checks for ignored files
+ 2023-05-10 4ed7de18d gopls: add google-cloud-go as a benchmark repo
+ 2023-05-10 d799eba7d gopls: additional instrumentation during goimports
+ 2023-05-10 a7e7dc44c go/ssa: rename test cases in TestGenericBodies
+ 2023-05-10 d668f586c gopls/internal/regtest/marker: require cgo for issue59944.txt
+ 2023-05-10 005fa64ff go/analysis/passes/slog: add check for slog.Group
+ 2023-05-09 033e628ac go/ssa: more reindentation of TestGenericBodies
+ 2023-05-09 231084894 go/ssa: reindent test cases in TestGenericBodies
+ 2023-05-09 90e9c6599 gopls/internal/lsp/cache: skip type errors after parse errors
+ 2023-05-09 08b24db43 gopls/internal/lsp/regtest: check for "// @marker" rogue space
+ 2023-05-09 b9c20f65e gopls/internal/regtest/marker: migrate @diag to new marker tests
+ 2023-05-09 4609d79b0 cmd/bisect: add -compile and -godebug shorthands
+ 2023-05-09 ddfa2200a internal/fuzzy: improvements to the symbol scoring algorithm
+ 2023-05-09 344924276 go/types/objectpath: don't panic when receiver is missing a method
+ 2023-05-08 0809ec2e4 gopls/internal/lsp/source: document {All,Workspace}Metadata
+ 2023-05-08 8f7fb01dd go/analysis/unitchecker: add test of go vet on std
+ 2023-05-08 23e52a3e1 bisect: diagnose bad targets better
+ 2023-05-08 d5af8894f gopls: set GOWORK=off for loads from debug and safetoken tests
+ 2023-05-08 c93329a94 go/analysis/passes/printf: reshorten diagnostic about %s in Println call
+ 2023-05-08 62197261c go.mod: update golang.org/x dependencies
+ 2023-05-08 f4d143ebc go/ssa: cleanup TestGenericBodies to pickup package name
+ 2023-05-08 3d99ebe9f gopls/diff/unified: remove redundant information
+ 2023-05-05 479f5c690 go/ssa: Origin is only available after building
+ 2023-05-05 6d1dd1267 go/analysis: simplify unusedresult
+ 2023-05-05 4a2dd0d22 go/callgraph/vta: allow pointer to type parameter in sliceArrayElem
+ 2023-05-05 7c6c2b7f7 go/ssa: keep syntax for instantiations
+ 2023-05-05 ce1b96b59 all: fix some comments
+ 2023-05-05 0500fd459 go/ssa: use core type in address
+ 2023-05-05 8e9b1853d gopls/internal/lsp/source: add the "symbolScope" option
+ 2023-05-05 4ac71c005 internal/imports: sort fixes for deterministic results
+ 2023-05-05 573915d85 gopls/internal/lsp/cache: always init the resolver in runProcessEnvFunc
+ 2023-05-05 dd0938175 bisect: move to internal/bisect until API is reviewed
+ 2023-05-05 516063ad6 bisect, cmd/bisect: add new library and tool
+ 2023-05-05 58fedf60c internal/diffp: add patience diff from main repo
+ 2023-05-04 7b684a9a8 gopls: port import tests to the new marker framework
+ 2023-05-04 87974e327 gopls/internal/lsp/filecache: skip TestConcurrency on plan9 builder
+ 2023-05-04 c64bb76ef gopls/internal/lsp/source: make infertypeargs a convenience analyzer
+ 2023-05-04 91b7a8edf go/ssa: use origin method in source lookup
+ 2023-05-03 ba892bba5 go/types/internal/play: show formatted AST
+ 2023-05-03 7df6af86e gopls/internal/lsp/filecache: silently discard CRC errors on macOS
+ 2023-05-02 2563079e1 go/analysis/passes/printf: update directive diagnostic message
+ 2023-05-02 3f6c3b33c gopls: upgrade to latest version of LSP protocol
+ 2023-05-02 216928d1c gopls: port function extraction marker tests to the new framework
+ 2023-05-02 2415ce159 gopls: skip tests that load gopls packages if x/tools replacement is missing
+ 2023-05-02 17e5ef3c9 gopls/internal/lsp/cache: remove cycle check from buildMetadata
+ 2023-05-02 affb5fc1a gopls/internal/lsp/source: fix crash in definitions of builtins
+ 2023-05-02 558d7011f gopls/internal/lsp/source: fix another bug in reference expansion
+ 2023-05-01 51dfad7f4 gopls: port workspace/symbol marker tests to the new framework
+ 2023-05-01 c6b5780e5 gopls: fix build at older Go versions
+ 2023-05-01 7590fe4de gopls/internal/lsp: hover over linkname directives.
+ 2023-05-01 ec61ad324 gopls/internal/lsp/source: add invert-if-condition refactoring
+ 2023-04-28 5283a0178 go/packages: delete TestCgoOption as it doesn't test the cgo option
+ 2023-04-28 e2f3b2509 gopls/internal/bug: record bug reports in file cache
+ 2023-04-28 ab24b7b9e gopls/internal/regtest/marker: add a test case for issue #59554
+ 2023-04-28 8235df3e0 gopls/internal/regtest/marker: port format tests to the new framework
+ 2023-04-28 80dc03199 internal/diff: unified: match diff delete for empty file
+ 2023-04-27 165099bd7 all: remove repeated definite articles
+ 2023-04-27 ac40903ee internal/gocommand: simplify GoVersion invocation
+ 2023-04-27 9e0a7b105 copyright: don't require checkout to be named tools
+ 2023-04-26 94ed378f9 internal/lsp/testdata: update for new diagnostic from go list
+ 2023-04-26 a5c37ba21 gopls/internal/regtest/marker: skip on small machines
+ 2023-04-25 76e1037d7 internal/testenv: assume that mobile platforms are small machines
+ 2023-04-25 17e2d7055 gopls/internal/bug: remove unused Data parameter
+ 2023-04-25 dd89a2eb9 gopls/internal/lsp: remove now redundant debouncing logic
+ 2023-04-25 2a9398d92 go/analysis/passes/slog: do not report multiple incorrect keys
+ 2023-04-25 4baa3dc48 gopls/internal/bug: move bug package into gopls

Change-Id: I4cb08fe593bbc00102dbd266ad4ef1b6a1150e08
diff --git a/cmd/bisect/go119.go b/cmd/bisect/go119.go
new file mode 100644
index 0000000..debe4e0
--- /dev/null
+++ b/cmd/bisect/go119.go
@@ -0,0 +1,13 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.20
+
+package main
+
+import "os/exec"
+
+func cmdInterrupt(cmd *exec.Cmd) {
+	// cmd.Cancel and cmd.WaitDelay not available before Go 1.20.
+}
diff --git a/cmd/bisect/go120.go b/cmd/bisect/go120.go
new file mode 100644
index 0000000..c85edf7
--- /dev/null
+++ b/cmd/bisect/go120.go
@@ -0,0 +1,26 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.20
+
+package main
+
+import (
+	"os"
+	"os/exec"
+	"time"
+)
+
+func cmdInterrupt(cmd *exec.Cmd) {
+	cmd.Cancel = func() error {
+		// On timeout, send interrupt,
+		// in hopes of shutting down process tree.
+		// Ignore errors sending signal; it's all best effort
+		// and not even implemented on Windows.
+		// TODO(rsc): Maybe use a new process group and kill the whole group?
+		cmd.Process.Signal(os.Interrupt)
+		return nil
+	}
+	cmd.WaitDelay = 2 * time.Second
+}
diff --git a/cmd/bisect/main.go b/cmd/bisect/main.go
new file mode 100644
index 0000000..6a3745c
--- /dev/null
+++ b/cmd/bisect/main.go
@@ -0,0 +1,733 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Bisect finds changes responsible for causing a failure.
+// A typical use is to identify the source locations in a program
+// that are miscompiled by a given compiler optimization.
+//
+// Usage:
+//
+//	bisect [flags] [var=value...] command [arguments...]
+//
+// Bisect operates on a target command line – the target – that can be
+// run with various changes individually enabled or disabled. With none
+// of the changes enabled, the target is known to succeed (exit with exit
+// code zero). With all the changes enabled, the target is known to fail
+// (exit any other way). Bisect repeats the target with different sets of
+// changes enabled, using binary search to find (non-overlapping) minimal
+// change sets that provoke the failure.
+//
+// The target must cooperate with bisect by accepting a change pattern
+// and then enabling and reporting the changes that match that pattern.
+// The change pattern is passed to the target by substituting it anywhere
+// the string PATTERN appears in the environment values or the command
+// arguments. For each change that matches the pattern, the target must
+// enable that change and also print one or more “match lines”
+// (to standard output or standard error) describing the change.
+// The [golang.org/x/tools/internal/bisect] package provides functions to help
+// targets implement this protocol. We plan to publish that package
+// in a non-internal location after finalizing its API.
+//
+// Bisect starts by running the target with no changes enabled and then
+// with all changes enabled. It expects the former to succeed and the latter to fail,
+// and then it will search for the minimal set of changes that must be enabled
+// to provoke the failure. If the situation is reversed – the target fails with no
+// changes enabled and succeeds with all changes enabled – then bisect
+// automatically runs in reverse as well, searching for the minimal set of changes
+// that must be disabled to provoke the failure.
+//
+// Bisect prints tracing logs to standard error and the minimal change sets
+// to standard output.
+//
+// # Command Line Flags
+//
+// Bisect supports the following command-line flags:
+//
+//	-max=M
+//
+// Stop after finding M minimal change sets. The default is no maximum, meaning to run until
+// all changes that provoke a failure have been identified.
+//
+//	-maxset=S
+//
+// Disallow change sets larger than S elements. The default is no maximum.
+//
+//	-timeout=D
+//
+// If the target runs for longer than duration D, stop the target and interpret that as a failure.
+// The default is no timeout.
+//
+//	-count=N
+//
+// Run each trial N times (default 2), checking for consistency.
+//
+//	-v
+//
+// Print verbose output, showing each run and its match lines.
+//
+// In addition to these general flags,
+// bisect supports a few “shortcut” flags that make it more convenient
+// to use with specific targets.
+//
+//	-compile=<rewrite>
+//
+// This flag is equivalent to adding an environment variable
+// “GOCOMPILEDEBUG=<rewrite>hash=PATTERN”,
+// which, as discussed in more detail in the example below,
+// allows bisect to identify the specific source locations where the
+// compiler rewrite causes the target to fail.
+//
+//	-godebug=<name>=<value>
+//
+// This flag is equivalent to adding an environment variable
+// “GODEBUG=<name>=<value>#PATTERN”,
+// which allows bisect to identify the specific call stacks where
+// the changed [GODEBUG setting] value causes the target to fail.
+//
+// # Example
+//
+// The Go compiler provides support for enabling or disabling certain rewrites
+// and optimizations to allow bisect to identify specific source locations where
+// the rewrite causes the program to fail. For example, to bisect a failure caused
+// by the new loop variable semantics:
+//
+//	bisect go test -gcflags=all=-d=loopvarhash=PATTERN
+//
+// The -gcflags=all= instructs the go command to pass the -d=... to the Go compiler
+// when compiling all packages. Bisect varies PATTERN to determine the minimal set of changes
+// needed to reproduce the failure.
+//
+// The go command also checks the GOCOMPILEDEBUG environment variable for flags
+// to pass to the compiler, so the above command is equivalent to:
+//
+//	bisect GOCOMPILEDEBUG=loopvarhash=PATTERN go test
+//
+// Finally, as mentioned earlier, the -compile flag allows shortening this command further:
+//
+//	bisect -compile=loopvar go test
+//
+// # Defeating Build Caches
+//
+// Build systems cache build results, to avoid repeating the same compilations
+// over and over. When using a cached build result, the go command (correctly)
+// reprints the cached standard output and standard error associated with that
+// command invocation. (This makes commands like 'go build -gcflags=-S' for
+// printing an assembly listing work reliably.)
+//
+// Unfortunately, most build systems, including Bazel, are not as careful
+// as the go command about reprinting compiler output. If the compiler is
+// what prints match lines, a build system that suppresses compiler
+// output when using cached compiler results will confuse bisect.
+// To defeat such build caches, bisect replaces the literal text “RANDOM”
+// in environment values and command arguments with a random 64-bit value
+// during each invocation. The Go compiler conveniently accepts a
+// -d=ignore=... debug flag that ignores its argument, so to run the
+// previous example using Bazel, the invocation is:
+//
+//	bazel test --define=gc_goopts=-d=loopvarhash=PATTERN,unused=RANDOM //path/to:test
+//
+// [GODEBUG setting]: https://tip.golang.org/doc/godebug
+package main
+
+import (
+	"context"
+	"flag"
+	"fmt"
+	"io"
+	"log"
+	"math/bits"
+	"math/rand"
+	"os"
+	"os/exec"
+	"sort"
+	"strconv"
+	"strings"
+	"time"
+
+	"golang.org/x/tools/internal/bisect"
+)
+
+// Preserve import of bisect, to allow [bisect.Match] in the doc comment.
+var _ bisect.Matcher
+
+func usage() {
+	fmt.Fprintf(os.Stderr, "usage: bisect [flags] [var=value...] command [arguments...]\n")
+	flag.PrintDefaults()
+	os.Exit(2)
+}
+
+func main() {
+	log.SetFlags(0)
+	log.SetPrefix("bisect: ")
+
+	var b Bisect
+	b.Stdout = os.Stdout
+	b.Stderr = os.Stderr
+	flag.IntVar(&b.Max, "max", 0, "stop after finding `m` failing change sets")
+	flag.IntVar(&b.MaxSet, "maxset", 0, "do not search for change sets larger than `s` elements")
+	flag.DurationVar(&b.Timeout, "timeout", 0, "stop target and consider failed after duration `d`")
+	flag.IntVar(&b.Count, "count", 2, "run target `n` times for each trial")
+	flag.BoolVar(&b.Verbose, "v", false, "enable verbose output")
+
+	env := ""
+	envFlag := ""
+	flag.Func("compile", "bisect source locations affected by Go compiler `rewrite` (fma, loopvar, ...)", func(value string) error {
+		if envFlag != "" {
+			return fmt.Errorf("cannot use -%s and -compile", envFlag)
+		}
+		envFlag = "compile"
+		env = "GOCOMPILEDEBUG=" + value + "hash=PATTERN"
+		return nil
+	})
+	flag.Func("godebug", "bisect call stacks affected by GODEBUG setting `name=value`", func(value string) error {
+		if envFlag != "" {
+			return fmt.Errorf("cannot use -%s and -godebug", envFlag)
+		}
+		envFlag = "godebug"
+		env = "GODEBUG=" + value + "#PATTERN"
+		return nil
+	})
+
+	flag.Usage = usage
+	flag.Parse()
+	args := flag.Args()
+
+	// Split command line into env settings, command name, args.
+	i := 0
+	for i < len(args) && strings.Contains(args[i], "=") {
+		i++
+	}
+	if i == len(args) {
+		usage()
+	}
+	b.Env, b.Cmd, b.Args = args[:i], args[i], args[i+1:]
+	if env != "" {
+		b.Env = append([]string{env}, b.Env...)
+	}
+
+	// Check that PATTERN is available for us to vary.
+	found := false
+	for _, e := range b.Env {
+		if _, v, _ := strings.Cut(e, "="); strings.Contains(v, "PATTERN") {
+			found = true
+		}
+	}
+	for _, a := range b.Args {
+		if strings.Contains(a, "PATTERN") {
+			found = true
+		}
+	}
+	if !found {
+		log.Fatalf("no PATTERN in target environment or args")
+	}
+
+	if !b.Search() {
+		os.Exit(1)
+	}
+}
+
+// A Bisect holds the state for a bisect invocation.
+type Bisect struct {
+	// Env is the additional environment variables for the command.
+	// PATTERN and RANDOM are substituted in the values, but not the names.
+	Env []string
+
+	// Cmd is the command (program name) to run.
+	// PATTERN and RANDOM are not substituted.
+	Cmd string
+
+	// Args is the command arguments.
+	// PATTERN and RANDOM are substituted anywhere they appear.
+	Args []string
+
+	// Command-line flags controlling bisect behavior.
+	Max     int           // maximum number of sets to report (0 = unlimited)
+	MaxSet  int           // maximum number of elements in a set (0 = unlimited)
+	Timeout time.Duration // kill target and assume failed after this duration (0 = unlimited)
+	Count   int           // run target this many times for each trial and give up if flaky (min 1 assumed; default 2 on command line set in main)
+	Verbose bool          // print long output about each trial (only useful for debugging bisect itself)
+
+	// State for running bisect, replaced during testing.
+	// Failing change sets are printed to Stdout; all other output goes to Stderr.
+	Stdout  io.Writer                                                             // where to write standard output (usually os.Stdout)
+	Stderr  io.Writer                                                             // where to write standard error (usually os.Stderr)
+	TestRun func(env []string, cmd string, args []string) (out []byte, err error) // if non-nil, used instead of exec.Command
+
+	// State maintained by Search.
+
+	// By default, Search looks for a minimal set of changes that cause a failure when enabled.
+	// If Disable is true, the search is inverted and seeks a minimal set of changes that
+	// cause a failure when disabled. In this case, the search proceeds as normal except that
+	// each pattern starts with a !.
+	Disable bool
+
+	// SkipDigits is the number of hex digits to use in skip messages.
+	// If the set of available changes is the same in each run, as it should be,
+	// then this doesn't matter: we'll only exclude suffixes that uniquely identify
+	// a given change. But for some programs, especially bisecting runtime
+	// behaviors, sometimes enabling one change unlocks questions about other
+	// changes. Strictly speaking this is a misuse of bisect, but just to make
+	// bisect more robust, we use the y and n runs to create an estimate of the
+	// number of bits needed for a unique suffix, and then we round it up to
+	// a number of hex digits, with one extra digit for good measure, and then
+	// we always use that many hex digits for skips.
+	SkipHexDigits int
+
+	// Add is a list of suffixes to add to every trial, because they
+	// contain changes that are necessary for a group we are assembling.
+	Add []string
+
+	// Skip is a list of suffixes that uniquely identify changes to exclude from every trial,
+	// because they have already been used in failing change sets.
+	// Suffixes later in the list may only be unique after removing
+	// the ones earlier in the list.
+	// Skip applies after Add.
+	Skip []string
+}
+
+// A Result holds the result of a single target trial.
+type Result struct {
+	Success bool   // whether the target succeeded (exited with zero status)
+	Cmd     string // full target command line
+	Out     string // full target output (stdout and stderr combined)
+
+	Suffix    string   // the suffix used for collecting MatchIDs, MatchText, and MatchFull
+	MatchIDs  []uint64 // match IDs enabled during this trial
+	MatchText []string // match reports for the IDs, with match markers removed
+	MatchFull []string // full match lines for the IDs, with match markers kept
+}
+
+// &searchFatal is a special panic value to signal that Search failed.
+// This lets us unwind the search recursion on a fatal error
+// but have Search return normally.
+var searchFatal int
+
+// Search runs a bisect search according to the configuration in b.
+// It reports whether any failing change sets were found.
+func (b *Bisect) Search() bool {
+	defer func() {
+		// Recover from panic(&searchFatal), implicitly returning false from Search.
+		// Re-panic on any other panic.
+		if e := recover(); e != nil && e != &searchFatal {
+			panic(e)
+		}
+	}()
+
+	// Run with no changes and all changes, to figure out which direction we're searching.
+	// The goal is to find the minimal set of changes to toggle
+	// starting with the state where everything works.
+	// If "no changes" succeeds and "all changes" fails,
+	// we're looking for a minimal set of changes to enable to provoke the failure
+	// (broken = runY, b.Negate = false)
+	// If "no changes" fails and "all changes" succeeds,
+	// we're looking for a minimal set of changes to disable to provoke the failure
+	// (broken = runN, b.Negate = true).
+
+	b.Logf("checking target with all changes disabled")
+	runN := b.Run("n")
+
+	b.Logf("checking target with all changes enabled")
+	runY := b.Run("y")
+
+	var broken *Result
+	switch {
+	case runN.Success && !runY.Success:
+		b.Logf("target succeeds with no changes, fails with all changes")
+		b.Logf("searching for minimal set of enabled changes causing failure")
+		broken = runY
+		b.Disable = false
+
+	case !runN.Success && runY.Success:
+		b.Logf("target fails with no changes, succeeds with all changes")
+		b.Logf("searching for minimal set of disabled changes causing failure")
+		broken = runN
+		b.Disable = true
+
+	case runN.Success && runY.Success:
+		b.Fatalf("target succeeds with no changes and all changes")
+
+	case !runN.Success && !runY.Success:
+		b.Fatalf("target fails with no changes and all changes")
+	}
+
+	// Compute minimum number of bits needed to distinguish
+	// all the changes we saw during N and all the changes we saw during Y.
+	b.SkipHexDigits = skipHexDigits(runN.MatchIDs, runY.MatchIDs)
+
+	// Loop finding and printing change sets, until none remain.
+	found := 0
+	for {
+		// Find set.
+		bad := b.search(broken)
+		if bad == nil {
+			if found == 0 {
+				b.Fatalf("cannot find any failing change sets of size ≤ %d", b.MaxSet)
+			}
+			break
+		}
+
+		// Confirm that set really does fail, to avoid false accusations.
+		// Also asking for user-visible output; earlier runs did not.
+		b.Logf("confirming failing change set")
+		b.Add = append(b.Add[:0], bad...)
+		broken = b.Run("v")
+		if broken.Success {
+			b.Logf("confirmation run succeeded unexpectedly")
+		}
+		b.Add = b.Add[:0]
+
+		// Print confirmed change set.
+		found++
+		b.Logf("FOUND failing change set")
+		desc := "(enabling changes causes failure)"
+		if b.Disable {
+			desc = "(disabling changes causes failure)"
+		}
+		fmt.Fprintf(b.Stdout, "--- change set #%d %s\n%s\n---\n", found, desc, strings.Join(broken.MatchText, "\n"))
+
+		// Stop if we've found enough change sets.
+		if b.Max > 0 && found >= b.Max {
+			break
+		}
+
+		// If running bisect target | tee bad.txt, prints to stdout and stderr
+		// both appear on the terminal, but the ones to stdout go through tee
+		// and can take a little bit of extra time. Sleep 1 millisecond to give
+		// tee time to catch up, so that its stdout print does not get interlaced
+		// with the stderr print from the next b.Log message.
+		time.Sleep(1 * time.Millisecond)
+
+		// Disable the now-known-bad changes and see if any failures remain.
+		b.Logf("checking for more failures")
+		b.Skip = append(bad, b.Skip...)
+		broken = b.Run("")
+		if broken.Success {
+			what := "enabled"
+			if b.Disable {
+				what = "disabled"
+			}
+			b.Logf("target succeeds with all remaining changes %s", what)
+			break
+		}
+		b.Logf("target still fails; searching for more bad changes")
+	}
+	return true
+}
+
+// Fatalf prints a message to standard error and then panics,
+// causing Search to return false.
+func (b *Bisect) Fatalf(format string, args ...any) {
+	s := fmt.Sprintf("bisect: fatal error: "+format, args...)
+	if !strings.HasSuffix(s, "\n") {
+		s += "\n"
+	}
+	b.Stderr.Write([]byte(s))
+	panic(&searchFatal)
+}
+
+// Logf prints a message to standard error.
+func (b *Bisect) Logf(format string, args ...any) {
+	s := fmt.Sprintf("bisect: "+format, args...)
+	if !strings.HasSuffix(s, "\n") {
+		s += "\n"
+	}
+	b.Stderr.Write([]byte(s))
+}
+
+func skipHexDigits(idY, idN []uint64) int {
+	var all []uint64
+	seen := make(map[uint64]bool)
+	for _, x := range idY {
+		seen[x] = true
+		all = append(all, x)
+	}
+	for _, x := range idN {
+		if !seen[x] {
+			seen[x] = true
+			all = append(all, x)
+		}
+	}
+	sort.Slice(all, func(i, j int) bool { return bits.Reverse64(all[i]) < bits.Reverse64(all[j]) })
+	digits := sort.Search(64/4, func(digits int) bool {
+		mask := uint64(1)<<(4*digits) - 1
+		for i := 0; i+1 < len(all); i++ {
+			if all[i]&mask == all[i+1]&mask {
+				return false
+			}
+		}
+		return true
+	})
+	if digits < 64/4 {
+		digits++
+	}
+	return digits
+}
+
+// search searches for a single locally minimal change set.
+//
+// Invariant: r describes the result of r.Suffix + b.Add, which failed.
+// (There's an implicit -b.Skip everywhere here. b.Skip does not change.)
+// We want to extend r.Suffix to preserve the failure, working toward
+// a suffix that identifies a single change.
+func (b *Bisect) search(r *Result) []string {
+	// The caller should be passing in a failure result that we diagnose.
+	if r.Success {
+		b.Fatalf("internal error: unexpected success") // mistake by caller
+	}
+
+	// If the failure reported no changes, the target is misbehaving.
+	if len(r.MatchIDs) == 0 {
+		b.Fatalf("failure with no reported changes:\n\n$ %s\n%s\n", r.Cmd, r.Out)
+	}
+
+	// If there's one matching change, that's the one we're looking for.
+	if len(r.MatchIDs) == 1 {
+		return []string{fmt.Sprintf("x%0*x", b.SkipHexDigits, r.MatchIDs[0]&(1<<(4*b.SkipHexDigits)-1))}
+	}
+
+	// If the suffix we were tracking in the trial is already 64 bits,
+	// either the target is bad or bisect itself is buggy.
+	if len(r.Suffix) >= 64 {
+		b.Fatalf("failed to isolate a single change with very long suffix")
+	}
+
+	// We want to split the current matchIDs by left-extending the suffix with 0 and 1.
+	// If all the matches have the same next bit, that won't cause a split, which doesn't
+	// break the algorithm but does waste time. Avoid wasting time by left-extending
+	// the suffix to the longest suffix shared by all the current match IDs
+	// before adding 0 or 1.
+	suffix := commonSuffix(r.MatchIDs)
+	if !strings.HasSuffix(suffix, r.Suffix) {
+		b.Fatalf("internal error: invalid common suffix") // bug in commonSuffix
+	}
+
+	// Run 0suffix and 1suffix. If one fails, chase down the failure in that half.
+	r0 := b.Run("0" + suffix)
+	if !r0.Success {
+		return b.search(r0)
+	}
+	r1 := b.Run("1" + suffix)
+	if !r1.Success {
+		return b.search(r1)
+	}
+
+	// suffix failed, but 0suffix and 1suffix succeeded.
+	// Assuming the target isn't flaky, this means we need
+	// at least one change from 0suffix AND at least one from 1suffix.
+	// We are already tracking N = len(b.Add) other changes and are
+	// allowed to build sets of size at least 1+N (or we shouldn't be here at all).
+	// If we aren't allowed to build sets of size 2+N, give up this branch.
+	if b.MaxSet > 0 && 2+len(b.Add) > b.MaxSet {
+		return nil
+	}
+
+	// Adding all matches for 1suffix, recurse to narrow down 0suffix.
+	old := len(b.Add)
+	b.Add = append(b.Add, "1"+suffix)
+	r0 = b.Run("0" + suffix)
+	if r0.Success {
+		// 0suffix + b.Add + 1suffix = suffix + b.Add is what r describes, and it failed.
+		b.Fatalf("target fails inconsistently")
+	}
+	bad0 := b.search(r0)
+	if bad0 == nil {
+		// Search failed due to MaxSet limit.
+		return nil
+	}
+	b.Add = b.Add[:old]
+
+	// Adding the specific match we found in 0suffix, recurse to narrow down 1suffix.
+	b.Add = append(b.Add[:old], bad0...)
+	r1 = b.Run("1" + suffix)
+	if r1.Success {
+		// 1suffix + b.Add + bad0 = bad0 + b.Add + 1suffix is what b.search(r0) reported as a failure.
+		b.Fatalf("target fails inconsistently")
+	}
+	bad1 := b.search(r1)
+	if bad1 == nil {
+		// Search failed due to MaxSet limit.
+		return nil
+	}
+	b.Add = b.Add[:old]
+
+	// bad0 and bad1 together provoke the failure.
+	return append(bad0, bad1...)
+}
+
+// Run runs a set of trials selecting changes with the given suffix,
+// plus the ones in b.Add and not the ones in b.Skip.
+// The returned result's MatchIDs, MatchText, and MatchFull
+// only list the changes that match suffix.
+// When b.Count > 1, Run runs b.Count trials and requires
+// that they all succeed or they all fail. If not, it calls b.Fatalf.
+func (b *Bisect) Run(suffix string) *Result {
+	out := b.run(suffix)
+	for i := 1; i < b.Count; i++ {
+		r := b.run(suffix)
+		if r.Success != out.Success {
+			b.Fatalf("target fails inconsistently")
+		}
+	}
+	return out
+}
+
+// run runs a single trial for Run.
+func (b *Bisect) run(suffix string) *Result {
+	random := fmt.Sprint(rand.Uint64())
+
+	// Accept suffix == "v" to mean we need user-visible output.
+	visible := ""
+	if suffix == "v" {
+		visible = "v"
+		suffix = ""
+	}
+
+	// Construct change ID pattern.
+	var pattern string
+	if suffix == "y" || suffix == "n" {
+		pattern = suffix
+		suffix = ""
+	} else {
+		var elem []string
+		if suffix != "" {
+			elem = append(elem, "+", suffix)
+		}
+		for _, x := range b.Add {
+			elem = append(elem, "+", x)
+		}
+		for _, x := range b.Skip {
+			elem = append(elem, "-", x)
+		}
+		pattern = strings.Join(elem, "")
+		if pattern == "" {
+			pattern = "y"
+		}
+	}
+	if b.Disable {
+		pattern = "!" + pattern
+	}
+	pattern = visible + pattern
+
+	// Construct substituted env and args.
+	env := make([]string, len(b.Env))
+	for i, x := range b.Env {
+		k, v, _ := strings.Cut(x, "=")
+		env[i] = k + "=" + replace(v, pattern, random)
+	}
+	args := make([]string, len(b.Args))
+	for i, x := range b.Args {
+		args[i] = replace(x, pattern, random)
+	}
+
+	// Construct and log command line.
+	// There is no newline in the log print.
+	// The line will be completed when the command finishes.
+	cmdText := strings.Join(append(append(env, b.Cmd), args...), " ")
+	fmt.Fprintf(b.Stderr, "bisect: run: %s...", cmdText)
+
+	// Run command with args and env.
+	var out []byte
+	var err error
+	if b.TestRun != nil {
+		out, err = b.TestRun(env, b.Cmd, args)
+	} else {
+		ctx := context.Background()
+		if b.Timeout != 0 {
+			var cancel context.CancelFunc
+			ctx, cancel = context.WithTimeout(ctx, b.Timeout)
+			defer cancel()
+		}
+		cmd := exec.CommandContext(ctx, b.Cmd, args...)
+		cmd.Env = append(os.Environ(), env...)
+		// Set up cmd.Cancel, cmd.WaitDelay on Go 1.20 and later
+		// TODO(rsc): Inline go120.go's cmdInterrupt once we stop supporting Go 1.19.
+		cmdInterrupt(cmd)
+		out, err = cmd.CombinedOutput()
+	}
+
+	// Parse output to construct result.
+	r := &Result{
+		Suffix:  suffix,
+		Success: err == nil,
+		Cmd:     cmdText,
+		Out:     string(out),
+	}
+
+	// Calculate bits, mask to identify suffix matches.
+	var bits, mask uint64
+	if suffix != "" && suffix != "y" && suffix != "n" && suffix != "v" {
+		var err error
+		bits, err = strconv.ParseUint(suffix, 2, 64)
+		if err != nil {
+			b.Fatalf("internal error: bad suffix")
+		}
+		mask = uint64(1<<len(suffix)) - 1
+	}
+
+	// Process output, collecting match reports for suffix.
+	have := make(map[uint64]bool)
+	all := r.Out
+	for all != "" {
+		var line string
+		line, all, _ = strings.Cut(all, "\n")
+		short, id, ok := bisect.CutMarker(line)
+		if !ok || (id&mask) != bits {
+			continue
+		}
+
+		if !have[id] {
+			have[id] = true
+			r.MatchIDs = append(r.MatchIDs, id)
+		}
+		r.MatchText = append(r.MatchText, short)
+		r.MatchFull = append(r.MatchFull, line)
+	}
+
+	// Finish log print from above, describing the command's completion.
+	if err == nil {
+		fmt.Fprintf(b.Stderr, " ok (%d matches)\n", len(r.MatchIDs))
+	} else {
+		fmt.Fprintf(b.Stderr, " FAIL (%d matches)\n", len(r.MatchIDs))
+	}
+
+	if err != nil && len(r.MatchIDs) == 0 {
+		b.Fatalf("target failed without printing any matches\n%s", r.Out)
+	}
+
+	// In verbose mode, print extra debugging: all the lines with match markers.
+	if b.Verbose {
+		b.Logf("matches:\n%s", strings.Join(r.MatchFull, "\n\t"))
+	}
+
+	return r
+}
+
+// replace returns x with literal text PATTERN and RANDOM replaced by pattern and random.
+func replace(x, pattern, random string) string {
+	x = strings.ReplaceAll(x, "PATTERN", pattern)
+	x = strings.ReplaceAll(x, "RANDOM", random)
+	return x
+}
+
+// commonSuffix returns the longest common binary suffix shared by all uint64s in list.
+// If list is empty, commonSuffix returns an empty string.
+func commonSuffix(list []uint64) string {
+	if len(list) == 0 {
+		return ""
+	}
+	b := list[0]
+	n := 64
+	for _, x := range list {
+		for x&((1<<n)-1) != b {
+			n--
+			b &= (1 << n) - 1
+		}
+	}
+	s := make([]byte, n)
+	for i := n - 1; i >= 0; i-- {
+		s[i] = '0' + byte(b&1)
+		b >>= 1
+	}
+	return string(s[:])
+}
diff --git a/cmd/bisect/main_test.go b/cmd/bisect/main_test.go
new file mode 100644
index 0000000..bff1bf2
--- /dev/null
+++ b/cmd/bisect/main_test.go
@@ -0,0 +1,233 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"bytes"
+	"encoding/json"
+	"flag"
+	"fmt"
+	"go/build/constraint"
+	"math/rand"
+	"os"
+	"path/filepath"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/internal/bisect"
+	"golang.org/x/tools/internal/diffp"
+	"golang.org/x/tools/txtar"
+)
+
+var update = flag.Bool("update", false, "update testdata with new stdout/stderr")
+
+func Test(t *testing.T) {
+	files, err := filepath.Glob("testdata/*.txt")
+	if err != nil {
+		t.Fatal(err)
+	}
+	for _, file := range files {
+		t.Run(strings.TrimSuffix(filepath.Base(file), ".txt"), func(t *testing.T) {
+			data, err := os.ReadFile(file)
+			if err != nil {
+				t.Fatal(err)
+			}
+			a := txtar.Parse(data)
+			var wantStdout, wantStderr []byte
+			files := a.Files
+			if len(files) > 0 && files[0].Name == "stdout" {
+				wantStdout = files[0].Data
+				files = files[1:]
+			}
+			if len(files) > 0 && files[0].Name == "stderr" {
+				wantStderr = files[0].Data
+				files = files[1:]
+			}
+			if len(files) > 0 {
+				t.Fatalf("unexpected txtar entry: %s", files[0].Name)
+			}
+
+			var tt struct {
+				Fail   string
+				Bisect Bisect
+			}
+			if err := json.Unmarshal(a.Comment, &tt); err != nil {
+				t.Fatal(err)
+			}
+
+			expr, err := constraint.Parse("//go:build " + tt.Fail)
+			if err != nil {
+				t.Fatalf("invalid Cmd: %v", err)
+			}
+
+			rnd := rand.New(rand.NewSource(1))
+			b := &tt.Bisect
+			b.Cmd = "test"
+			b.Args = []string{"PATTERN"}
+			var stdout, stderr bytes.Buffer
+			b.Stdout = &stdout
+			b.Stderr = &stderr
+			b.TestRun = func(env []string, cmd string, args []string) (out []byte, err error) {
+				pattern := args[0]
+				m, err := bisect.New(pattern)
+				if err != nil {
+					t.Fatal(err)
+				}
+				have := make(map[string]bool)
+				for i, color := range colors {
+					if m.ShouldEnable(uint64(i)) {
+						have[color] = true
+					}
+					if m.ShouldReport(uint64(i)) {
+						out = fmt.Appendf(out, "%s %s\n", color, bisect.Marker(uint64(i)))
+					}
+				}
+				err = nil
+				if eval(rnd, expr, have) {
+					err = fmt.Errorf("failed")
+				}
+				return out, err
+			}
+
+			if !b.Search() {
+				stderr.WriteString("<bisect failed>\n")
+			}
+			rewrite := false
+			if !bytes.Equal(stdout.Bytes(), wantStdout) {
+				if *update {
+					rewrite = true
+				} else {
+					t.Errorf("incorrect stdout: %s", diffp.Diff("have", stdout.Bytes(), "want", wantStdout))
+				}
+			}
+			if !bytes.Equal(stderr.Bytes(), wantStderr) {
+				if *update {
+					rewrite = true
+				} else {
+					t.Errorf("incorrect stderr: %s", diffp.Diff("have", stderr.Bytes(), "want", wantStderr))
+				}
+			}
+			if rewrite {
+				a.Files = []txtar.File{{Name: "stdout", Data: stdout.Bytes()}, {Name: "stderr", Data: stderr.Bytes()}}
+				err := os.WriteFile(file, txtar.Format(a), 0666)
+				if err != nil {
+					t.Fatal(err)
+				}
+				t.Logf("updated %s", file)
+			}
+		})
+	}
+}
+
+func eval(rnd *rand.Rand, z constraint.Expr, have map[string]bool) bool {
+	switch z := z.(type) {
+	default:
+		panic(fmt.Sprintf("unexpected type %T", z))
+	case *constraint.NotExpr:
+		return !eval(rnd, z.X, have)
+	case *constraint.AndExpr:
+		return eval(rnd, z.X, have) && eval(rnd, z.Y, have)
+	case *constraint.OrExpr:
+		return eval(rnd, z.X, have) || eval(rnd, z.Y, have)
+	case *constraint.TagExpr:
+		if z.Tag == "random" {
+			return rnd.Intn(2) == 1
+		}
+		return have[z.Tag]
+	}
+}
+
+var colors = strings.Fields(`
+	aliceblue
+	amaranth
+	amber
+	amethyst
+	applegreen
+	applered
+	apricot
+	aquamarine
+	azure
+	babyblue
+	beige
+	brickred
+	black
+	blue
+	bluegreen
+	blueviolet
+	blush
+	bronze
+	brown
+	burgundy
+	byzantium
+	carmine
+	cerise
+	cerulean
+	champagne
+	chartreusegreen
+	chocolate
+	cobaltblue
+	coffee
+	copper
+	coral
+	crimson
+	cyan
+	desertsand
+	electricblue
+	emerald
+	erin
+	gold
+	gray
+	green
+	harlequin
+	indigo
+	ivory
+	jade
+	junglegreen
+	lavender
+	lemon
+	lilac
+	lime
+	magenta
+	magentarose
+	maroon
+	mauve
+	navyblue
+	ochre
+	olive
+	orange
+	orangered
+	orchid
+	peach
+	pear
+	periwinkle
+	persianblue
+	pink
+	plum
+	prussianblue
+	puce
+	purple
+	raspberry
+	red
+	redviolet
+	rose
+	ruby
+	salmon
+	sangria
+	sapphire
+	scarlet
+	silver
+	slategray
+	springbud
+	springgreen
+	tan
+	taupe
+	teal
+	turquoise
+	ultramarine
+	violet
+	viridian
+	white
+	yellow
+`)
diff --git a/cmd/bisect/rand.go b/cmd/bisect/rand.go
new file mode 100644
index 0000000..daa01d3
--- /dev/null
+++ b/cmd/bisect/rand.go
@@ -0,0 +1,20 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Starting in Go 1.20, the global rand is auto-seeded,
+// with a better value than the current Unix nanoseconds.
+// Only seed if we're using older versions of Go.
+
+//go:build !go1.20
+
+package main
+
+import (
+	"math/rand"
+	"time"
+)
+
+func init() {
+	rand.Seed(time.Now().UnixNano())
+}
diff --git a/cmd/bisect/testdata/README.md b/cmd/bisect/testdata/README.md
new file mode 100644
index 0000000..e5978df
--- /dev/null
+++ b/cmd/bisect/testdata/README.md
@@ -0,0 +1,29 @@
+This directory contains test inputs for the bisect command.
+
+Each text file is a txtar archive (see <https://pkg.go.dev/golang.org/x/tools/txtar>
+or `go doc txtar`).
+
+The comment at the top of the archive is a JSON object describing a
+target behavior. Specifically, the Fail key gives a boolean expression
+that should provoke a failure. Bisect's job is to discover this
+condition.
+
+The Bisect key describes settings in the Bisect struct that we want to
+change, to simulate the use of various command-line options.
+
+The txtar archive files should be "stdout" and "stderr", giving the
+expected standard output and standard error. If the bisect command
+should exit with a non-zero status, the stderr in the archive will end
+with the line "<bisect failed>".
+
+Running `go test -update` will rewrite the stdout and stderr files in
+each testdata archive to match the current state of the tool. This is
+a useful command when the logging prints from bisect change or when
+writing a new test.
+
+To use `go test -update` to write a new test:
+
+ - Create a new .txt file with just a JSON object at the top,
+   specifying what you want to test.
+ - Run `go test -update`.
+ - Reload the .txt file and read the stdout and stderr to see if you agree.
diff --git a/cmd/bisect/testdata/basic.txt b/cmd/bisect/testdata/basic.txt
new file mode 100644
index 0000000..10c98df
--- /dev/null
+++ b/cmd/bisect/testdata/basic.txt
@@ -0,0 +1,44 @@
+{"Fail": "amber || apricot"}
+-- stdout --
+--- change set #1 (enabling changes causes failure)
+amber
+---
+--- change set #2 (enabling changes causes failure)
+apricot
+---
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... FAIL (45 matches)
+bisect: run: test +00... ok (23 matches)
+bisect: run: test +10... FAIL (22 matches)
+bisect: run: test +010... FAIL (11 matches)
+bisect: run: test +0010... FAIL (6 matches)
+bisect: run: test +00010... FAIL (3 matches)
+bisect: run: test +000010... FAIL (2 matches)
+bisect: run: test +0000010... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x002... FAIL (89 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x002... FAIL (44 matches)
+bisect: run: test +00-x002... ok (23 matches)
+bisect: run: test +10-x002... FAIL (21 matches)
+bisect: run: test +010-x002... ok (10 matches)
+bisect: run: test +110-x002... FAIL (11 matches)
+bisect: run: test +0110-x002... FAIL (6 matches)
+bisect: run: test +00110-x002... FAIL (3 matches)
+bisect: run: test +000110-x002... FAIL (2 matches)
+bisect: run: test +0000110-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x006-x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x006-x002... ok (88 matches)
+bisect: target succeeds with all remaining changes enabled
diff --git a/cmd/bisect/testdata/count2.txt b/cmd/bisect/testdata/count2.txt
new file mode 100644
index 0000000..9e7e9f4
--- /dev/null
+++ b/cmd/bisect/testdata/count2.txt
@@ -0,0 +1,67 @@
+{"Fail": "amber || apricot", "Bisect": {"Count": 2}}
+-- stdout --
+--- change set #1 (enabling changes causes failure)
+amber
+---
+--- change set #2 (enabling changes causes failure)
+apricot
+---
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... FAIL (45 matches)
+bisect: run: test +0... FAIL (45 matches)
+bisect: run: test +00... ok (23 matches)
+bisect: run: test +00... ok (23 matches)
+bisect: run: test +10... FAIL (22 matches)
+bisect: run: test +10... FAIL (22 matches)
+bisect: run: test +010... FAIL (11 matches)
+bisect: run: test +010... FAIL (11 matches)
+bisect: run: test +0010... FAIL (6 matches)
+bisect: run: test +0010... FAIL (6 matches)
+bisect: run: test +00010... FAIL (3 matches)
+bisect: run: test +00010... FAIL (3 matches)
+bisect: run: test +000010... FAIL (2 matches)
+bisect: run: test +000010... FAIL (2 matches)
+bisect: run: test +0000010... FAIL (1 matches)
+bisect: run: test +0000010... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x002... FAIL (1 matches)
+bisect: run: test v+x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x002... FAIL (89 matches)
+bisect: run: test -x002... FAIL (89 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x002... FAIL (44 matches)
+bisect: run: test +0-x002... FAIL (44 matches)
+bisect: run: test +00-x002... ok (23 matches)
+bisect: run: test +00-x002... ok (23 matches)
+bisect: run: test +10-x002... FAIL (21 matches)
+bisect: run: test +10-x002... FAIL (21 matches)
+bisect: run: test +010-x002... ok (10 matches)
+bisect: run: test +010-x002... ok (10 matches)
+bisect: run: test +110-x002... FAIL (11 matches)
+bisect: run: test +110-x002... FAIL (11 matches)
+bisect: run: test +0110-x002... FAIL (6 matches)
+bisect: run: test +0110-x002... FAIL (6 matches)
+bisect: run: test +00110-x002... FAIL (3 matches)
+bisect: run: test +00110-x002... FAIL (3 matches)
+bisect: run: test +000110-x002... FAIL (2 matches)
+bisect: run: test +000110-x002... FAIL (2 matches)
+bisect: run: test +0000110-x002... FAIL (1 matches)
+bisect: run: test +0000110-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x006-x002... FAIL (1 matches)
+bisect: run: test v+x006-x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x006-x002... ok (88 matches)
+bisect: run: test -x006-x002... ok (88 matches)
+bisect: target succeeds with all remaining changes enabled
diff --git a/cmd/bisect/testdata/double.txt b/cmd/bisect/testdata/double.txt
new file mode 100644
index 0000000..427ed09
--- /dev/null
+++ b/cmd/bisect/testdata/double.txt
@@ -0,0 +1,57 @@
+{"Fail": "amber || apricot && peach"}
+-- stdout --
+--- change set #1 (enabling changes causes failure)
+amber
+---
+--- change set #2 (enabling changes causes failure)
+apricot
+peach
+---
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... FAIL (45 matches)
+bisect: run: test +00... ok (23 matches)
+bisect: run: test +10... FAIL (22 matches)
+bisect: run: test +010... FAIL (11 matches)
+bisect: run: test +0010... FAIL (6 matches)
+bisect: run: test +00010... FAIL (3 matches)
+bisect: run: test +000010... FAIL (2 matches)
+bisect: run: test +0000010... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x002... FAIL (89 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x002... ok (44 matches)
+bisect: run: test +1-x002... ok (45 matches)
+bisect: run: test +0+1-x002... FAIL (44 matches)
+bisect: run: test +00+1-x002... ok (23 matches)
+bisect: run: test +10+1-x002... FAIL (21 matches)
+bisect: run: test +010+1-x002... ok (10 matches)
+bisect: run: test +110+1-x002... FAIL (11 matches)
+bisect: run: test +0110+1-x002... FAIL (6 matches)
+bisect: run: test +00110+1-x002... FAIL (3 matches)
+bisect: run: test +000110+1-x002... FAIL (2 matches)
+bisect: run: test +0000110+1-x002... FAIL (1 matches)
+bisect: run: test +1+x006-x002... FAIL (45 matches)
+bisect: run: test +01+x006-x002... ok (23 matches)
+bisect: run: test +11+x006-x002... FAIL (22 matches)
+bisect: run: test +011+x006-x002... FAIL (11 matches)
+bisect: run: test +0011+x006-x002... ok (6 matches)
+bisect: run: test +1011+x006-x002... FAIL (5 matches)
+bisect: run: test +01011+x006-x002... ok (3 matches)
+bisect: run: test +11011+x006-x002... FAIL (2 matches)
+bisect: run: test +011011+x006-x002... ok (1 matches)
+bisect: run: test +111011+x006-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x006+x03b-x002... FAIL (2 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x006-x03b-x002... ok (87 matches)
+bisect: target succeeds with all remaining changes enabled
diff --git a/cmd/bisect/testdata/max1.txt b/cmd/bisect/testdata/max1.txt
new file mode 100644
index 0000000..4014276
--- /dev/null
+++ b/cmd/bisect/testdata/max1.txt
@@ -0,0 +1,23 @@
+{"Fail": "amber || apricot && peach", "Bisect": {"Max": 1}}
+-- stdout --
+--- change set #1 (enabling changes causes failure)
+amber
+---
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... FAIL (45 matches)
+bisect: run: test +00... ok (23 matches)
+bisect: run: test +10... FAIL (22 matches)
+bisect: run: test +010... FAIL (11 matches)
+bisect: run: test +0010... FAIL (6 matches)
+bisect: run: test +00010... FAIL (3 matches)
+bisect: run: test +000010... FAIL (2 matches)
+bisect: run: test +0000010... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x002... FAIL (1 matches)
+bisect: FOUND failing change set
diff --git a/cmd/bisect/testdata/max2.txt b/cmd/bisect/testdata/max2.txt
new file mode 100644
index 0000000..981b902
--- /dev/null
+++ b/cmd/bisect/testdata/max2.txt
@@ -0,0 +1,59 @@
+{"Fail": "amber || apricot && peach || red && green && blue || cyan && magenta && yellow && black", "Bisect": {"Max": 2}}
+-- stdout --
+--- change set #1 (enabling changes causes failure)
+amber
+---
+--- change set #2 (enabling changes causes failure)
+blue
+green
+red
+---
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... FAIL (45 matches)
+bisect: run: test +00... ok (23 matches)
+bisect: run: test +10... FAIL (22 matches)
+bisect: run: test +010... FAIL (11 matches)
+bisect: run: test +0010... FAIL (6 matches)
+bisect: run: test +00010... FAIL (3 matches)
+bisect: run: test +000010... FAIL (2 matches)
+bisect: run: test +0000010... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x002... FAIL (89 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x002... ok (44 matches)
+bisect: run: test +1-x002... FAIL (45 matches)
+bisect: run: test +01-x002... ok (23 matches)
+bisect: run: test +11-x002... ok (22 matches)
+bisect: run: test +01+11-x002... FAIL (23 matches)
+bisect: run: test +001+11-x002... ok (12 matches)
+bisect: run: test +101+11-x002... FAIL (11 matches)
+bisect: run: test +0101+11-x002... ok (6 matches)
+bisect: run: test +1101+11-x002... ok (5 matches)
+bisect: run: test +0101+11+1101-x002... FAIL (6 matches)
+bisect: run: test +00101+11+1101-x002... FAIL (3 matches)
+bisect: run: test +000101+11+1101-x002... FAIL (2 matches)
+bisect: run: test +0000101+11+1101-x002... ok (1 matches)
+bisect: run: test +1000101+11+1101-x002... FAIL (1 matches)
+bisect: run: test +1101+11+x045-x002... FAIL (5 matches)
+bisect: run: test +01101+11+x045-x002... FAIL (3 matches)
+bisect: run: test +001101+11+x045-x002... FAIL (2 matches)
+bisect: run: test +0001101+11+x045-x002... FAIL (1 matches)
+bisect: run: test +11+x045+x00d-x002... FAIL (22 matches)
+bisect: run: test +011+x045+x00d-x002... ok (11 matches)
+bisect: run: test +111+x045+x00d-x002... FAIL (11 matches)
+bisect: run: test +0111+x045+x00d-x002... FAIL (6 matches)
+bisect: run: test +00111+x045+x00d-x002... FAIL (3 matches)
+bisect: run: test +000111+x045+x00d-x002... ok (2 matches)
+bisect: run: test +100111+x045+x00d-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x045+x00d+x027-x002... FAIL (3 matches)
+bisect: FOUND failing change set
diff --git a/cmd/bisect/testdata/maxset.txt b/cmd/bisect/testdata/maxset.txt
new file mode 100644
index 0000000..cf8af34
--- /dev/null
+++ b/cmd/bisect/testdata/maxset.txt
@@ -0,0 +1,84 @@
+{"Fail": "amber || apricot && peach || red && green && blue || cyan && magenta && yellow && black", "Bisect": {"MaxSet": 3}}
+-- stdout --
+--- change set #1 (enabling changes causes failure)
+amber
+---
+--- change set #2 (enabling changes causes failure)
+blue
+green
+red
+---
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... FAIL (45 matches)
+bisect: run: test +00... ok (23 matches)
+bisect: run: test +10... FAIL (22 matches)
+bisect: run: test +010... FAIL (11 matches)
+bisect: run: test +0010... FAIL (6 matches)
+bisect: run: test +00010... FAIL (3 matches)
+bisect: run: test +000010... FAIL (2 matches)
+bisect: run: test +0000010... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x002... FAIL (89 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x002... ok (44 matches)
+bisect: run: test +1-x002... FAIL (45 matches)
+bisect: run: test +01-x002... ok (23 matches)
+bisect: run: test +11-x002... ok (22 matches)
+bisect: run: test +01+11-x002... FAIL (23 matches)
+bisect: run: test +001+11-x002... ok (12 matches)
+bisect: run: test +101+11-x002... FAIL (11 matches)
+bisect: run: test +0101+11-x002... ok (6 matches)
+bisect: run: test +1101+11-x002... ok (5 matches)
+bisect: run: test +0101+11+1101-x002... FAIL (6 matches)
+bisect: run: test +00101+11+1101-x002... FAIL (3 matches)
+bisect: run: test +000101+11+1101-x002... FAIL (2 matches)
+bisect: run: test +0000101+11+1101-x002... ok (1 matches)
+bisect: run: test +1000101+11+1101-x002... FAIL (1 matches)
+bisect: run: test +1101+11+x045-x002... FAIL (5 matches)
+bisect: run: test +01101+11+x045-x002... FAIL (3 matches)
+bisect: run: test +001101+11+x045-x002... FAIL (2 matches)
+bisect: run: test +0001101+11+x045-x002... FAIL (1 matches)
+bisect: run: test +11+x045+x00d-x002... FAIL (22 matches)
+bisect: run: test +011+x045+x00d-x002... ok (11 matches)
+bisect: run: test +111+x045+x00d-x002... FAIL (11 matches)
+bisect: run: test +0111+x045+x00d-x002... FAIL (6 matches)
+bisect: run: test +00111+x045+x00d-x002... FAIL (3 matches)
+bisect: run: test +000111+x045+x00d-x002... ok (2 matches)
+bisect: run: test +100111+x045+x00d-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x045+x00d+x027-x002... FAIL (3 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x045-x00d-x027-x002... FAIL (86 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x045-x00d-x027-x002... ok (44 matches)
+bisect: run: test +1-x045-x00d-x027-x002... ok (42 matches)
+bisect: run: test +0+1-x045-x00d-x027-x002... FAIL (44 matches)
+bisect: run: test +00+1-x045-x00d-x027-x002... FAIL (23 matches)
+bisect: run: test +000+1-x045-x00d-x027-x002... ok (12 matches)
+bisect: run: test +100+1-x045-x00d-x027-x002... ok (11 matches)
+bisect: run: test +000+1+100-x045-x00d-x027-x002... FAIL (12 matches)
+bisect: run: test +0000+1+100-x045-x00d-x027-x002... FAIL (6 matches)
+bisect: run: test +00000+1+100-x045-x00d-x027-x002... FAIL (3 matches)
+bisect: run: test +000000+1+100-x045-x00d-x027-x002... ok (2 matches)
+bisect: run: test +100000+1+100-x045-x00d-x027-x002... FAIL (1 matches)
+bisect: run: test +100+1+x020-x045-x00d-x027-x002... FAIL (11 matches)
+bisect: run: test +0100+1+x020-x045-x00d-x027-x002... ok (6 matches)
+bisect: run: test +1100+1+x020-x045-x00d-x027-x002... FAIL (5 matches)
+bisect: run: test +01100+1+x020-x045-x00d-x027-x002... FAIL (3 matches)
+bisect: run: test +001100+1+x020-x045-x00d-x027-x002... FAIL (2 matches)
+bisect: run: test +0001100+1+x020-x045-x00d-x027-x002... FAIL (1 matches)
+bisect: run: test +1+x020+x00c-x045-x00d-x027-x002... FAIL (42 matches)
+bisect: run: test +01+x020+x00c-x045-x00d-x027-x002... FAIL (21 matches)
+bisect: run: test +001+x020+x00c-x045-x00d-x027-x002... FAIL (12 matches)
+bisect: run: test +0001+x020+x00c-x045-x00d-x027-x002... ok (6 matches)
+bisect: run: test +1001+x020+x00c-x045-x00d-x027-x002... ok (6 matches)
diff --git a/cmd/bisect/testdata/maxset1.txt b/cmd/bisect/testdata/maxset1.txt
new file mode 100644
index 0000000..250d4a6
--- /dev/null
+++ b/cmd/bisect/testdata/maxset1.txt
@@ -0,0 +1,13 @@
+{"Fail": "apricot && peach", "Bisect": {"MaxSet": 1}}
+-- stdout --
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... ok (45 matches)
+bisect: run: test +1... ok (45 matches)
+bisect: fatal error: cannot find any failing change sets of size ≤ 1
+<bisect failed>
diff --git a/cmd/bisect/testdata/maxset4.txt b/cmd/bisect/testdata/maxset4.txt
new file mode 100644
index 0000000..8211c4c
--- /dev/null
+++ b/cmd/bisect/testdata/maxset4.txt
@@ -0,0 +1,138 @@
+{"Fail": "amber || apricot && peach || red && green && blue || cyan && magenta && yellow && black", "Bisect": {"MaxSet": 4}}
+-- stdout --
+--- change set #1 (enabling changes causes failure)
+amber
+---
+--- change set #2 (enabling changes causes failure)
+blue
+green
+red
+---
+--- change set #3 (enabling changes causes failure)
+black
+cyan
+magenta
+yellow
+---
+--- change set #4 (enabling changes causes failure)
+apricot
+peach
+---
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... FAIL (45 matches)
+bisect: run: test +00... ok (23 matches)
+bisect: run: test +10... FAIL (22 matches)
+bisect: run: test +010... FAIL (11 matches)
+bisect: run: test +0010... FAIL (6 matches)
+bisect: run: test +00010... FAIL (3 matches)
+bisect: run: test +000010... FAIL (2 matches)
+bisect: run: test +0000010... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x002... FAIL (89 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x002... ok (44 matches)
+bisect: run: test +1-x002... FAIL (45 matches)
+bisect: run: test +01-x002... ok (23 matches)
+bisect: run: test +11-x002... ok (22 matches)
+bisect: run: test +01+11-x002... FAIL (23 matches)
+bisect: run: test +001+11-x002... ok (12 matches)
+bisect: run: test +101+11-x002... FAIL (11 matches)
+bisect: run: test +0101+11-x002... ok (6 matches)
+bisect: run: test +1101+11-x002... ok (5 matches)
+bisect: run: test +0101+11+1101-x002... FAIL (6 matches)
+bisect: run: test +00101+11+1101-x002... FAIL (3 matches)
+bisect: run: test +000101+11+1101-x002... FAIL (2 matches)
+bisect: run: test +0000101+11+1101-x002... ok (1 matches)
+bisect: run: test +1000101+11+1101-x002... FAIL (1 matches)
+bisect: run: test +1101+11+x045-x002... FAIL (5 matches)
+bisect: run: test +01101+11+x045-x002... FAIL (3 matches)
+bisect: run: test +001101+11+x045-x002... FAIL (2 matches)
+bisect: run: test +0001101+11+x045-x002... FAIL (1 matches)
+bisect: run: test +11+x045+x00d-x002... FAIL (22 matches)
+bisect: run: test +011+x045+x00d-x002... ok (11 matches)
+bisect: run: test +111+x045+x00d-x002... FAIL (11 matches)
+bisect: run: test +0111+x045+x00d-x002... FAIL (6 matches)
+bisect: run: test +00111+x045+x00d-x002... FAIL (3 matches)
+bisect: run: test +000111+x045+x00d-x002... ok (2 matches)
+bisect: run: test +100111+x045+x00d-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x045+x00d+x027-x002... FAIL (3 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x045-x00d-x027-x002... FAIL (86 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x045-x00d-x027-x002... ok (44 matches)
+bisect: run: test +1-x045-x00d-x027-x002... ok (42 matches)
+bisect: run: test +0+1-x045-x00d-x027-x002... FAIL (44 matches)
+bisect: run: test +00+1-x045-x00d-x027-x002... FAIL (23 matches)
+bisect: run: test +000+1-x045-x00d-x027-x002... ok (12 matches)
+bisect: run: test +100+1-x045-x00d-x027-x002... ok (11 matches)
+bisect: run: test +000+1+100-x045-x00d-x027-x002... FAIL (12 matches)
+bisect: run: test +0000+1+100-x045-x00d-x027-x002... FAIL (6 matches)
+bisect: run: test +00000+1+100-x045-x00d-x027-x002... FAIL (3 matches)
+bisect: run: test +000000+1+100-x045-x00d-x027-x002... ok (2 matches)
+bisect: run: test +100000+1+100-x045-x00d-x027-x002... FAIL (1 matches)
+bisect: run: test +100+1+x020-x045-x00d-x027-x002... FAIL (11 matches)
+bisect: run: test +0100+1+x020-x045-x00d-x027-x002... ok (6 matches)
+bisect: run: test +1100+1+x020-x045-x00d-x027-x002... FAIL (5 matches)
+bisect: run: test +01100+1+x020-x045-x00d-x027-x002... FAIL (3 matches)
+bisect: run: test +001100+1+x020-x045-x00d-x027-x002... FAIL (2 matches)
+bisect: run: test +0001100+1+x020-x045-x00d-x027-x002... FAIL (1 matches)
+bisect: run: test +1+x020+x00c-x045-x00d-x027-x002... FAIL (42 matches)
+bisect: run: test +01+x020+x00c-x045-x00d-x027-x002... FAIL (21 matches)
+bisect: run: test +001+x020+x00c-x045-x00d-x027-x002... FAIL (12 matches)
+bisect: run: test +0001+x020+x00c-x045-x00d-x027-x002... ok (6 matches)
+bisect: run: test +1001+x020+x00c-x045-x00d-x027-x002... ok (6 matches)
+bisect: run: test +0001+x020+x00c+1001-x045-x00d-x027-x002... FAIL (6 matches)
+bisect: run: test +00001+x020+x00c+1001-x045-x00d-x027-x002... ok (3 matches)
+bisect: run: test +10001+x020+x00c+1001-x045-x00d-x027-x002... FAIL (3 matches)
+bisect: run: test +010001+x020+x00c+1001-x045-x00d-x027-x002... ok (2 matches)
+bisect: run: test +110001+x020+x00c+1001-x045-x00d-x027-x002... FAIL (1 matches)
+bisect: run: test +1001+x020+x00c+x031-x045-x00d-x027-x002... FAIL (6 matches)
+bisect: run: test +01001+x020+x00c+x031-x045-x00d-x027-x002... ok (3 matches)
+bisect: run: test +11001+x020+x00c+x031-x045-x00d-x027-x002... FAIL (3 matches)
+bisect: run: test +011001+x020+x00c+x031-x045-x00d-x027-x002... FAIL (2 matches)
+bisect: run: test +0011001+x020+x00c+x031-x045-x00d-x027-x002... ok (1 matches)
+bisect: run: test +1011001+x020+x00c+x031-x045-x00d-x027-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x020+x00c+x031+x059-x045-x00d-x027-x002... FAIL (4 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (82 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x020-x00c-x031-x059-x045-x00d-x027-x002... ok (42 matches)
+bisect: run: test +1-x020-x00c-x031-x059-x045-x00d-x027-x002... ok (40 matches)
+bisect: run: test +0+1-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (42 matches)
+bisect: run: test +00+1-x020-x00c-x031-x059-x045-x00d-x027-x002... ok (21 matches)
+bisect: run: test +10+1-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (21 matches)
+bisect: run: test +010+1-x020-x00c-x031-x059-x045-x00d-x027-x002... ok (10 matches)
+bisect: run: test +110+1-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (11 matches)
+bisect: run: test +0110+1-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (6 matches)
+bisect: run: test +00110+1-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (3 matches)
+bisect: run: test +000110+1-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (2 matches)
+bisect: run: test +0000110+1-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (1 matches)
+bisect: run: test +1+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (40 matches)
+bisect: run: test +01+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... ok (19 matches)
+bisect: run: test +11+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (21 matches)
+bisect: run: test +011+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (11 matches)
+bisect: run: test +0011+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... ok (6 matches)
+bisect: run: test +1011+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (5 matches)
+bisect: run: test +01011+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... ok (3 matches)
+bisect: run: test +11011+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (2 matches)
+bisect: run: test +011011+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... ok (1 matches)
+bisect: run: test +111011+x006-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x006+x03b-x020-x00c-x031-x059-x045-x00d-x027-x002... FAIL (2 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x006-x03b-x020-x00c-x031-x059-x045-x00d-x027-x002... ok (80 matches)
+bisect: target succeeds with all remaining changes enabled
diff --git a/cmd/bisect/testdata/negate.txt b/cmd/bisect/testdata/negate.txt
new file mode 100644
index 0000000..92ace59
--- /dev/null
+++ b/cmd/bisect/testdata/negate.txt
@@ -0,0 +1,57 @@
+{"Fail": "!amber || !apricot && !peach"}
+-- stdout --
+--- change set #1 (disabling changes causes failure)
+amber
+---
+--- change set #2 (disabling changes causes failure)
+apricot
+peach
+---
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... FAIL (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... ok (90 matches)
+bisect: target fails with no changes, succeeds with all changes
+bisect: searching for minimal set of disabled changes causing failure
+bisect: run: test !+0... FAIL (45 matches)
+bisect: run: test !+00... ok (23 matches)
+bisect: run: test !+10... FAIL (22 matches)
+bisect: run: test !+010... FAIL (11 matches)
+bisect: run: test !+0010... FAIL (6 matches)
+bisect: run: test !+00010... FAIL (3 matches)
+bisect: run: test !+000010... FAIL (2 matches)
+bisect: run: test !+0000010... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v!+x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test !-x002... FAIL (89 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test !+0-x002... ok (44 matches)
+bisect: run: test !+1-x002... ok (45 matches)
+bisect: run: test !+0+1-x002... FAIL (44 matches)
+bisect: run: test !+00+1-x002... ok (23 matches)
+bisect: run: test !+10+1-x002... FAIL (21 matches)
+bisect: run: test !+010+1-x002... ok (10 matches)
+bisect: run: test !+110+1-x002... FAIL (11 matches)
+bisect: run: test !+0110+1-x002... FAIL (6 matches)
+bisect: run: test !+00110+1-x002... FAIL (3 matches)
+bisect: run: test !+000110+1-x002... FAIL (2 matches)
+bisect: run: test !+0000110+1-x002... FAIL (1 matches)
+bisect: run: test !+1+x006-x002... FAIL (45 matches)
+bisect: run: test !+01+x006-x002... ok (23 matches)
+bisect: run: test !+11+x006-x002... FAIL (22 matches)
+bisect: run: test !+011+x006-x002... FAIL (11 matches)
+bisect: run: test !+0011+x006-x002... ok (6 matches)
+bisect: run: test !+1011+x006-x002... FAIL (5 matches)
+bisect: run: test !+01011+x006-x002... ok (3 matches)
+bisect: run: test !+11011+x006-x002... FAIL (2 matches)
+bisect: run: test !+011011+x006-x002... ok (1 matches)
+bisect: run: test !+111011+x006-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v!+x006+x03b-x002... FAIL (2 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test !-x006-x03b-x002... ok (87 matches)
+bisect: target succeeds with all remaining changes disabled
diff --git a/cmd/bisect/testdata/rand.txt b/cmd/bisect/testdata/rand.txt
new file mode 100644
index 0000000..74c2659
--- /dev/null
+++ b/cmd/bisect/testdata/rand.txt
@@ -0,0 +1,59 @@
+{"Fail": "amber || apricot || blue && random"}
+-- stdout --
+--- change set #1 (enabling changes causes failure)
+amber
+---
+--- change set #2 (enabling changes causes failure)
+apricot
+---
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... FAIL (45 matches)
+bisect: run: test +00... ok (23 matches)
+bisect: run: test +10... FAIL (22 matches)
+bisect: run: test +010... FAIL (11 matches)
+bisect: run: test +0010... FAIL (6 matches)
+bisect: run: test +00010... FAIL (3 matches)
+bisect: run: test +000010... FAIL (2 matches)
+bisect: run: test +0000010... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x002... FAIL (89 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x002... FAIL (44 matches)
+bisect: run: test +00-x002... ok (23 matches)
+bisect: run: test +10-x002... FAIL (21 matches)
+bisect: run: test +010-x002... ok (10 matches)
+bisect: run: test +110-x002... FAIL (11 matches)
+bisect: run: test +0110-x002... FAIL (6 matches)
+bisect: run: test +00110-x002... FAIL (3 matches)
+bisect: run: test +000110-x002... FAIL (2 matches)
+bisect: run: test +0000110-x002... FAIL (1 matches)
+bisect: confirming failing change set
+bisect: run: test v+x006-x002... FAIL (1 matches)
+bisect: FOUND failing change set
+bisect: checking for more failures
+bisect: run: test -x006-x002... FAIL (88 matches)
+bisect: target still fails; searching for more bad changes
+bisect: run: test +0-x006-x002... ok (43 matches)
+bisect: run: test +1-x006-x002... FAIL (45 matches)
+bisect: run: test +01-x006-x002... FAIL (23 matches)
+bisect: run: test +001-x006-x002... ok (12 matches)
+bisect: run: test +101-x006-x002... FAIL (11 matches)
+bisect: run: test +0101-x006-x002... ok (6 matches)
+bisect: run: test +1101-x006-x002... FAIL (5 matches)
+bisect: run: test +01101-x006-x002... ok (3 matches)
+bisect: run: test +11101-x006-x002... ok (2 matches)
+bisect: run: test +01101+11101-x006-x002... FAIL (3 matches)
+bisect: run: test +001101+11101-x006-x002... ok (2 matches)
+bisect: run: test +101101+11101-x006-x002... ok (1 matches)
+bisect: run: test +001101+11101+101101-x006-x002... ok (2 matches)
+bisect: fatal error: target fails inconsistently
+<bisect failed>
diff --git a/cmd/bisect/testdata/rand1.txt b/cmd/bisect/testdata/rand1.txt
new file mode 100644
index 0000000..2196293
--- /dev/null
+++ b/cmd/bisect/testdata/rand1.txt
@@ -0,0 +1,24 @@
+{"Fail": "blue && random"}
+-- stdout --
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... ok (45 matches)
+bisect: run: test +1... FAIL (45 matches)
+bisect: run: test +01... FAIL (23 matches)
+bisect: run: test +001... ok (12 matches)
+bisect: run: test +101... FAIL (11 matches)
+bisect: run: test +0101... ok (6 matches)
+bisect: run: test +1101... FAIL (5 matches)
+bisect: run: test +01101... ok (3 matches)
+bisect: run: test +11101... ok (2 matches)
+bisect: run: test +01101+11101... FAIL (3 matches)
+bisect: run: test +001101+11101... ok (2 matches)
+bisect: run: test +101101+11101... ok (1 matches)
+bisect: run: test +001101+11101+101101... ok (2 matches)
+bisect: fatal error: target fails inconsistently
+<bisect failed>
diff --git a/cmd/bisect/testdata/rand2.txt b/cmd/bisect/testdata/rand2.txt
new file mode 100644
index 0000000..c952226
--- /dev/null
+++ b/cmd/bisect/testdata/rand2.txt
@@ -0,0 +1,19 @@
+{"Fail": "blue && random", "Bisect": {"Count": 2}}
+-- stdout --
+-- stderr --
+bisect: checking target with all changes disabled
+bisect: run: test n... ok (90 matches)
+bisect: run: test n... ok (90 matches)
+bisect: checking target with all changes enabled
+bisect: run: test y... FAIL (90 matches)
+bisect: run: test y... FAIL (90 matches)
+bisect: target succeeds with no changes, fails with all changes
+bisect: searching for minimal set of enabled changes causing failure
+bisect: run: test +0... ok (45 matches)
+bisect: run: test +0... ok (45 matches)
+bisect: run: test +1... FAIL (45 matches)
+bisect: run: test +1... FAIL (45 matches)
+bisect: run: test +01... FAIL (23 matches)
+bisect: run: test +01... ok (23 matches)
+bisect: fatal error: target fails inconsistently
+<bisect failed>
diff --git a/cmd/guru/guru.go b/cmd/guru/guru.go
index 7a42aaa..f8e6cfa 100644
--- a/cmd/guru/guru.go
+++ b/cmd/guru/guru.go
@@ -337,7 +337,7 @@
 //   - a QueryPos, denoting the extent of the user's query.
 //   - nil, meaning no position at all.
 //
-// The output format is is compatible with the 'gnu'
+// The output format is compatible with the 'gnu'
 // compilation-error-regexp in Emacs' compilation mode.
 func fprintf(w io.Writer, fset *token.FileSet, pos interface{}, format string, args ...interface{}) {
 	var start, end token.Pos
diff --git a/cmd/present/static/notes.js b/cmd/present/static/notes.js
index a6d327f..ea4911e 100644
--- a/cmd/present/static/notes.js
+++ b/cmd/present/static/notes.js
@@ -26,7 +26,7 @@
   initNotes();
 }
 
-// Create an unique key for the local storage so we don't mix the
+// Create a unique key for the local storage so we don't mix the
 // destSlide of different presentations. For golang.org/issue/24688.
 function destSlideKey() {
   var key = '';
diff --git a/copyright/copyright_test.go b/copyright/copyright_test.go
index 1d63147..7f78925 100644
--- a/copyright/copyright_test.go
+++ b/copyright/copyright_test.go
@@ -8,22 +8,12 @@
 package copyright
 
 import (
-	"os"
-	"path/filepath"
 	"strings"
 	"testing"
 )
 
 func TestToolsCopyright(t *testing.T) {
-	cwd, err := os.Getwd()
-	if err != nil {
-		t.Fatal(err)
-	}
-	tools := filepath.Dir(cwd)
-	if !strings.HasSuffix(filepath.Base(tools), "tools") {
-		t.Fatalf("current working directory is %s, expected tools", tools)
-	}
-	files, err := checkCopyright(tools)
+	files, err := checkCopyright("..")
 	if err != nil {
 		t.Fatal(err)
 	}
diff --git a/go.mod b/go.mod
index 1d3c36f..385b73a 100644
--- a/go.mod
+++ b/go.mod
@@ -5,8 +5,8 @@
 require (
 	github.com/yuin/goldmark v1.4.13
 	golang.org/x/mod v0.10.0
-	golang.org/x/net v0.9.0
-	golang.org/x/sys v0.7.0
+	golang.org/x/net v0.10.0
+	golang.org/x/sys v0.8.0
 )
 
-require golang.org/x/sync v0.1.0
+require golang.org/x/sync v0.2.0
diff --git a/go.sum b/go.sum
index 13ac18d..ab743b3 100644
--- a/go.sum
+++ b/go.sum
@@ -10,24 +10,25 @@
 golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
 golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
 golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
-golang.org/x/net v0.9.0 h1:aWJ/m6xSmxWBx+V0XRHTlrYrPG56jKsLdTFmsSsCzOM=
-golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
+golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
+golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
 golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI=
+golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU=
-golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
+golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
 golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
 golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
-golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
+golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
diff --git a/go/analysis/passes/ifaceassert/parameterized.go b/go/analysis/passes/ifaceassert/parameterized.go
index b35f62d..b84577f 100644
--- a/go/analysis/passes/ifaceassert/parameterized.go
+++ b/go/analysis/passes/ifaceassert/parameterized.go
@@ -67,7 +67,7 @@
 		// of a generic function type (or an interface method) that is
 		// part of the type we're testing. We don't care about these type
 		// parameters.
-		// Similarly, the receiver of a method may declare (rather then
+		// Similarly, the receiver of a method may declare (rather than
 		// use) type parameters, we don't care about those either.
 		// Thus, we only need to look at the input and result parameters.
 		return w.isParameterized(t.Params()) || w.isParameterized(t.Results())
diff --git a/go/analysis/passes/internal/analysisutil/util.go b/go/analysis/passes/internal/analysisutil/util.go
index ac37e47..6d8039f 100644
--- a/go/analysis/passes/internal/analysisutil/util.go
+++ b/go/analysis/passes/internal/analysisutil/util.go
@@ -118,3 +118,12 @@
 	}
 	return false
 }
+
+// IsNamed reports whether t is exactly a named type in a package with a given path.
+func IsNamed(t types.Type, path, name string) bool {
+	if n, ok := t.(*types.Named); ok {
+		obj := n.Obj()
+		return obj.Pkg().Path() == path && obj.Name() == name
+	}
+	return false
+}
diff --git a/go/analysis/passes/nilfunc/nilfunc.go b/go/analysis/passes/nilfunc/nilfunc.go
index 5118850..6df1343 100644
--- a/go/analysis/passes/nilfunc/nilfunc.go
+++ b/go/analysis/passes/nilfunc/nilfunc.go
@@ -19,10 +19,6 @@
 	"golang.org/x/tools/internal/typeparams"
 )
 
-const Doc = `check for useless comparisons between functions and nil
-
-A useless comparison is one like f == nil as opposed to f() == nil.`
-
 //go:embed doc.go
 var doc string
 
diff --git a/go/analysis/passes/printf/printf.go b/go/analysis/passes/printf/printf.go
index 919edac..b2b8c67 100644
--- a/go/analysis/passes/printf/printf.go
+++ b/go/analysis/passes/printf/printf.go
@@ -278,7 +278,7 @@
 			// print/printf function can take, adding an ellipsis
 			// would break the program. For example:
 			//
-			//   func foo(arg1 string, arg2 ...interface{} {
+			//   func foo(arg1 string, arg2 ...interface{}) {
 			//       fmt.Printf("%s %v", arg1, arg2)
 			//   }
 			return
@@ -1056,7 +1056,7 @@
 		if strings.Contains(s, "%") {
 			m := printFormatRE.FindStringSubmatch(s)
 			if m != nil {
-				pass.ReportRangef(call, "%s call has possible formatting directive %s", fn.FullName(), m[0])
+				pass.ReportRangef(call, "%s call has possible Printf formatting directive %s", fn.FullName(), m[0])
 			}
 		}
 	}
diff --git a/go/analysis/passes/printf/testdata/src/a/a.go b/go/analysis/passes/printf/testdata/src/a/a.go
index 8aa4af9..18b9e3b 100644
--- a/go/analysis/passes/printf/testdata/src/a/a.go
+++ b/go/analysis/passes/printf/testdata/src/a/a.go
@@ -150,10 +150,10 @@
 	fmt.Printf("%s", nonemptyinterface)         // correct (the type is responsible for formatting)
 	fmt.Printf("%.*s %d %6g", 3, "hi", 23, 'x') // want "fmt.Printf format %6g has arg 'x' of wrong type rune"
 	fmt.Println()                               // not an error
-	fmt.Println("%s", "hi")                     // want "fmt.Println call has possible formatting directive %s"
-	fmt.Println("%v", "hi")                     // want "fmt.Println call has possible formatting directive %v"
-	fmt.Println("%T", "hi")                     // want "fmt.Println call has possible formatting directive %T"
-	fmt.Println("%s"+" there", "hi")            // want "fmt.Println call has possible formatting directive %s"
+	fmt.Println("%s", "hi")                     // want "fmt.Println call has possible Printf formatting directive %s"
+	fmt.Println("%v", "hi")                     // want "fmt.Println call has possible Printf formatting directive %v"
+	fmt.Println("%T", "hi")                     // want "fmt.Println call has possible Printf formatting directive %T"
+	fmt.Println("%s"+" there", "hi")            // want "fmt.Println call has possible Printf formatting directive %s"
 	fmt.Println("0.0%")                         // correct (trailing % couldn't be a formatting directive)
 	fmt.Printf("%s", "hi", 3)                   // want "fmt.Printf call needs 1 arg but has 2 args"
 	_ = fmt.Sprintf("%"+("s"), "hi", 3)         // want "fmt.Sprintf call needs 1 arg but has 2 args"
@@ -177,19 +177,19 @@
 	Printf(format, "hi")              // want "a.Printf format %s reads arg #2, but call has 1 arg$"
 	Printf("%s %d %.3v %q", "str", 4) // want "a.Printf format %.3v reads arg #3, but call has 2 args"
 	f := new(ptrStringer)
-	f.Warn(0, "%s", "hello", 3)           // want `\(\*a.ptrStringer\).Warn call has possible formatting directive %s`
+	f.Warn(0, "%s", "hello", 3)           // want `\(\*a.ptrStringer\).Warn call has possible Printf formatting directive %s`
 	f.Warnf(0, "%s", "hello", 3)          // want `\(\*a.ptrStringer\).Warnf call needs 1 arg but has 2 args`
 	f.Warnf(0, "%r", "hello")             // want `\(\*a.ptrStringer\).Warnf format %r has unknown verb r`
 	f.Warnf(0, "%#s", "hello")            // want `\(\*a.ptrStringer\).Warnf format %#s has unrecognized flag #`
-	f.Warn2(0, "%s", "hello", 3)          // want `\(\*a.ptrStringer\).Warn2 call has possible formatting directive %s`
+	f.Warn2(0, "%s", "hello", 3)          // want `\(\*a.ptrStringer\).Warn2 call has possible Printf formatting directive %s`
 	f.Warnf2(0, "%s", "hello", 3)         // want `\(\*a.ptrStringer\).Warnf2 call needs 1 arg but has 2 args`
 	f.Warnf2(0, "%r", "hello")            // want `\(\*a.ptrStringer\).Warnf2 format %r has unknown verb r`
 	f.Warnf2(0, "%#s", "hello")           // want `\(\*a.ptrStringer\).Warnf2 format %#s has unrecognized flag #`
-	f.Wrap(0, "%s", "hello", 3)           // want `\(\*a.ptrStringer\).Wrap call has possible formatting directive %s`
+	f.Wrap(0, "%s", "hello", 3)           // want `\(\*a.ptrStringer\).Wrap call has possible Printf formatting directive %s`
 	f.Wrapf(0, "%s", "hello", 3)          // want `\(\*a.ptrStringer\).Wrapf call needs 1 arg but has 2 args`
 	f.Wrapf(0, "%r", "hello")             // want `\(\*a.ptrStringer\).Wrapf format %r has unknown verb r`
 	f.Wrapf(0, "%#s", "hello")            // want `\(\*a.ptrStringer\).Wrapf format %#s has unrecognized flag #`
-	f.Wrap2(0, "%s", "hello", 3)          // want `\(\*a.ptrStringer\).Wrap2 call has possible formatting directive %s`
+	f.Wrap2(0, "%s", "hello", 3)          // want `\(\*a.ptrStringer\).Wrap2 call has possible Printf formatting directive %s`
 	f.Wrapf2(0, "%s", "hello", 3)         // want `\(\*a.ptrStringer\).Wrapf2 call needs 1 arg but has 2 args`
 	f.Wrapf2(0, "%r", "hello")            // want `\(\*a.ptrStringer\).Wrapf2 format %r has unknown verb r`
 	f.Wrapf2(0, "%#s", "hello")           // want `\(\*a.ptrStringer\).Wrapf2 format %#s has unrecognized flag #`
@@ -226,7 +226,7 @@
 	var et1 *testing.T
 	et1.Error()         // ok
 	et1.Error("hi")     // ok
-	et1.Error("%d", 3)  // want `\(\*testing.common\).Error call has possible formatting directive %d`
+	et1.Error("%d", 3)  // want `\(\*testing.common\).Error call has possible Printf formatting directive %d`
 	et1.Errorf("%s", 1) // want `\(\*testing.common\).Errorf format %s has arg 1 of wrong type int`
 	var et3 errorTest3
 	et3.Error() // ok, not an error method.
@@ -253,7 +253,7 @@
 	// Special handling for Log.
 	math.Log(3) // OK
 	var t *testing.T
-	t.Log("%d", 3) // want `\(\*testing.common\).Log call has possible formatting directive %d`
+	t.Log("%d", 3) // want `\(\*testing.common\).Log call has possible Printf formatting directive %d`
 	t.Logf("%d", 3)
 	t.Logf("%d", "hi") // want `\(\*testing.common\).Logf format %d has arg "hi" of wrong type string`
 
@@ -307,27 +307,27 @@
 	Printf(someString(), "hello") // OK
 
 	// Printf wrappers in package log should be detected automatically
-	logpkg.Fatal("%d", 1)    // want "log.Fatal call has possible formatting directive %d"
+	logpkg.Fatal("%d", 1)    // want "log.Fatal call has possible Printf formatting directive %d"
 	logpkg.Fatalf("%d", "x") // want `log.Fatalf format %d has arg "x" of wrong type string`
-	logpkg.Fatalln("%d", 1)  // want "log.Fatalln call has possible formatting directive %d"
-	logpkg.Panic("%d", 1)    // want "log.Panic call has possible formatting directive %d"
+	logpkg.Fatalln("%d", 1)  // want "log.Fatalln call has possible Printf formatting directive %d"
+	logpkg.Panic("%d", 1)    // want "log.Panic call has possible Printf formatting directive %d"
 	logpkg.Panicf("%d", "x") // want `log.Panicf format %d has arg "x" of wrong type string`
-	logpkg.Panicln("%d", 1)  // want "log.Panicln call has possible formatting directive %d"
-	logpkg.Print("%d", 1)    // want "log.Print call has possible formatting directive %d"
+	logpkg.Panicln("%d", 1)  // want "log.Panicln call has possible Printf formatting directive %d"
+	logpkg.Print("%d", 1)    // want "log.Print call has possible Printf formatting directive %d"
 	logpkg.Printf("%d", "x") // want `log.Printf format %d has arg "x" of wrong type string`
-	logpkg.Println("%d", 1)  // want "log.Println call has possible formatting directive %d"
+	logpkg.Println("%d", 1)  // want "log.Println call has possible Printf formatting directive %d"
 
 	// Methods too.
 	var l *logpkg.Logger
-	l.Fatal("%d", 1)    // want `\(\*log.Logger\).Fatal call has possible formatting directive %d`
+	l.Fatal("%d", 1)    // want `\(\*log.Logger\).Fatal call has possible Printf formatting directive %d`
 	l.Fatalf("%d", "x") // want `\(\*log.Logger\).Fatalf format %d has arg "x" of wrong type string`
-	l.Fatalln("%d", 1)  // want `\(\*log.Logger\).Fatalln call has possible formatting directive %d`
-	l.Panic("%d", 1)    // want `\(\*log.Logger\).Panic call has possible formatting directive %d`
+	l.Fatalln("%d", 1)  // want `\(\*log.Logger\).Fatalln call has possible Printf formatting directive %d`
+	l.Panic("%d", 1)    // want `\(\*log.Logger\).Panic call has possible Printf formatting directive %d`
 	l.Panicf("%d", "x") // want `\(\*log.Logger\).Panicf format %d has arg "x" of wrong type string`
-	l.Panicln("%d", 1)  // want `\(\*log.Logger\).Panicln call has possible formatting directive %d`
-	l.Print("%d", 1)    // want `\(\*log.Logger\).Print call has possible formatting directive %d`
+	l.Panicln("%d", 1)  // want `\(\*log.Logger\).Panicln call has possible Printf formatting directive %d`
+	l.Print("%d", 1)    // want `\(\*log.Logger\).Print call has possible Printf formatting directive %d`
 	l.Printf("%d", "x") // want `\(\*log.Logger\).Printf format %d has arg "x" of wrong type string`
-	l.Println("%d", 1)  // want `\(\*log.Logger\).Println call has possible formatting directive %d`
+	l.Println("%d", 1)  // want `\(\*log.Logger\).Println call has possible Printf formatting directive %d`
 
 	// Issue 26486
 	dbg("", 1) // no error "call has arguments but no formatting directive"
@@ -361,7 +361,7 @@
 	eis.Errorf(0, "%w", err)       // OK
 	ess.Errorf("ERROR", "%w", err) // OK
 	fmt.Appendf(nil, "%d", "123")  // want `wrong type`
-	fmt.Append(nil, "%d", 123)     // want `possible formatting directive`
+	fmt.Append(nil, "%d", 123)     // want `fmt.Append call has possible Printf formatting directive %d`
 
 }
 
@@ -839,7 +839,7 @@
 // Printf wrappers from external package
 func externalPackage() {
 	b.Wrapf("%s", 1) // want "Wrapf format %s has arg 1 of wrong type int"
-	b.Wrap("%s", 1)  // want "Wrap call has possible formatting directive %s"
+	b.Wrap("%s", 1)  // want "Wrap call has possible Printf formatting directive %s"
 	b.NoWrap("%s", 1)
 	b.Wrapf2("%s", 1) // want "Wrapf2 format %s has arg 1 of wrong type int"
 }
diff --git a/go/analysis/passes/slog/slog.go b/go/analysis/passes/slog/slog.go
index c5fcfec..874ebec 100644
--- a/go/analysis/passes/slog/slog.go
+++ b/go/analysis/passes/slog/slog.go
@@ -66,6 +66,10 @@
 			// Not a slog function that takes key-value pairs.
 			return
 		}
+		if isMethodExpr(pass.TypesInfo, call) {
+			// Call is to a method value. Skip the first argument.
+			skipArgs++
+		}
 		if len(call.Args) <= skipArgs {
 			// Too few args; perhaps there are no k-v pairs.
 			return
@@ -74,7 +78,7 @@
 		// Check this call.
 		// The first position should hold a key or Attr.
 		pos := key
-		sawUnknown := false
+		var unknownArg ast.Expr // nil or the last unknown argument
 		for _, arg := range call.Args[skipArgs:] {
 			t := pass.TypesInfo.Types[arg].Type
 			switch pos {
@@ -86,17 +90,19 @@
 				case isAttr(t):
 					pos = key
 				case types.IsInterface(t):
-					// We don't know if this arg is a string or an Attr, so we don't know what to expect next.
-					// (We could see if one of interface's methods isn't a method of Attr, and thus know
-					// for sure that this type is definitely not a string or Attr, but it doesn't seem
-					// worth the effort for such an unlikely case.)
+					// As we do not do dataflow, we do not know what the dynamic type is.
+					// It could be a string or an Attr so we don't know what to expect next.
 					pos = unknown
 				default:
-					// Definitely not a key.
-					pass.ReportRangef(call, "%s arg %q should be a string or a slog.Attr (possible missing key or value)",
-						shortName(fn), analysisutil.Format(pass.Fset, arg))
-					// Assume this was supposed to be a value, and expect a key next.
-					pos = key
+					if unknownArg == nil {
+						pass.ReportRangef(arg, "%s arg %q should be a string or a slog.Attr (possible missing key or value)",
+							shortName(fn), analysisutil.Format(pass.Fset, arg))
+					} else {
+						pass.ReportRangef(arg, "%s arg %q should probably be a string or a slog.Attr (previous arg %q cannot be a key)",
+							shortName(fn), analysisutil.Format(pass.Fset, arg), analysisutil.Format(pass.Fset, unknownArg))
+					}
+					// Stop here so we report at most one missing key per call.
+					return
 				}
 
 			case value:
@@ -105,31 +111,27 @@
 				pos = key
 
 			case unknown:
-				// We don't know anything about this position, but all hope is not lost.
+				// Once we encounter an unknown position, we can never be
+				// sure if a problem later or at the end of the call is due to a
+				// missing final value, or a non-key in key position.
+				// In both cases, unknownArg != nil.
+				unknownArg = arg
+
+				// We don't know what is expected about this position, but all hope is not lost.
 				if t != stringType && !isAttr(t) && !types.IsInterface(t) {
 					// This argument is definitely not a key.
 					//
-					// The previous argument could have been a key, in which case this is the
+					// unknownArg cannot have been a key, in which case this is the
 					// corresponding value, and the next position should hold another key.
-					// We will assume that.
 					pos = key
-					// Another possibility: the previous argument was an Attr, and this is
-					// a value incorrectly placed in a key position.
-					// If we assumed this case instead, we might produce a false positive
-					// (since the first case might actually hold).
-
-					// Once we encounter an unknown position, we can never be
-					// sure if a problem at the end of the call is due to a
-					// missing final value, or a non-key in key position.
-					sawUnknown = true
 				}
 			}
 		}
 		if pos == value {
-			if sawUnknown {
-				pass.ReportRangef(call, "call to %s has a missing or misplaced value", shortName(fn))
-			} else {
+			if unknownArg == nil {
 				pass.ReportRangef(call, "call to %s missing a final value", shortName(fn))
+			} else {
+				pass.ReportRangef(call, "call to %s has a missing or misplaced value", shortName(fn))
 			}
 		}
 	})
@@ -137,7 +139,7 @@
 }
 
 func isAttr(t types.Type) bool {
-	return t.String() == "log/slog.Attr"
+	return analysisutil.IsNamed(t, "log/slog", "Attr")
 }
 
 // shortName returns a name for the function that is shorter than FullName.
@@ -170,49 +172,63 @@
 	if pkg := fn.Pkg(); pkg == nil || pkg.Path() != "log/slog" {
 		return 0, false
 	}
+	var recvName string // by default a slog package function
 	recv := fn.Type().(*types.Signature).Recv()
-	if recv == nil {
-		// TODO: If #59204 is accepted, uncomment the lines below.
-		// if fn.Name() == "Group" {
-		// 	return 0, true
-		// }
-		skip, ok := slogOutputFuncs[fn.Name()]
-		return skip, ok
-	}
-	var recvName string
-	if pt, ok := recv.Type().(*types.Pointer); ok {
-		if nt, ok := pt.Elem().(*types.Named); ok {
+	if recv != nil {
+		t := recv.Type()
+		if pt, ok := t.(*types.Pointer); ok {
+			t = pt.Elem()
+		}
+		if nt, ok := t.(*types.Named); !ok {
+			return 0, false
+		} else {
 			recvName = nt.Obj().Name()
 		}
 	}
-	if recvName == "" {
-		return 0, false
-	}
-	// The methods on *Logger include all the top-level output methods, as well as "With".
-	if recvName == "Logger" {
-		if fn.Name() == "With" {
-			return 0, true
-		}
-		skip, ok := slogOutputFuncs[fn.Name()]
-		return skip, ok
-	}
-	if recvName == "Record" && fn.Name() == "Add" {
-		return 0, true
-	}
-	return 0, false
+	skip, ok := kvFuncs[recvName][fn.Name()]
+	return skip, ok
 }
 
-// The names of top-level functions and *Logger methods in log/slog that take
+// The names of functions and methods in log/slog that take
 // ...any for key-value pairs, mapped to the number of initial args to skip in
 // order to get to the ones that match the ...any parameter.
-var slogOutputFuncs = map[string]int{
-	"Debug":    1,
-	"Info":     1,
-	"Warn":     1,
-	"Error":    1,
-	"DebugCtx": 2,
-	"InfoCtx":  2,
-	"WarnCtx":  2,
-	"ErrorCtx": 2,
-	"Log":      3,
+// The first key is the dereferenced receiver type name, or "" for a function.
+var kvFuncs = map[string]map[string]int{
+	"": map[string]int{
+		"Debug":    1,
+		"Info":     1,
+		"Warn":     1,
+		"Error":    1,
+		"DebugCtx": 2,
+		"InfoCtx":  2,
+		"WarnCtx":  2,
+		"ErrorCtx": 2,
+		"Log":      3,
+		"Group":    0,
+	},
+	"Logger": map[string]int{
+		"Debug":    1,
+		"Info":     1,
+		"Warn":     1,
+		"Error":    1,
+		"DebugCtx": 2,
+		"InfoCtx":  2,
+		"WarnCtx":  2,
+		"ErrorCtx": 2,
+		"Log":      3,
+		"With":     0,
+	},
+	"Record": map[string]int{
+		"Add": 0,
+	},
+}
+
+// isMethodExpr reports whether a call is to a MethodExpr.
+func isMethodExpr(info *types.Info, c *ast.CallExpr) bool {
+	s, ok := c.Fun.(*ast.SelectorExpr)
+	if !ok {
+		return false
+	}
+	sel := info.Selections[s]
+	return sel != nil && sel.Kind() == types.MethodExpr
 }
diff --git a/go/analysis/passes/slog/slog_test.go b/go/analysis/passes/slog/slog_test.go
index 7184215..b64b256 100644
--- a/go/analysis/passes/slog/slog_test.go
+++ b/go/analysis/passes/slog/slog_test.go
@@ -15,5 +15,5 @@
 func Test(t *testing.T) {
 	testenv.NeedsGo1Point(t, 21)
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, slog.Analyzer, "a")
+	analysistest.Run(t, testdata, slog.Analyzer, "a", "b")
 }
diff --git a/go/analysis/passes/slog/testdata/src/a/a.go b/go/analysis/passes/slog/testdata/src/a/a.go
index bed7f70..a13aac7 100644
--- a/go/analysis/passes/slog/testdata/src/a/a.go
+++ b/go/analysis/passes/slog/testdata/src/a/a.go
@@ -9,6 +9,7 @@
 package a
 
 import (
+	"context"
 	"fmt"
 	"log/slog"
 )
@@ -25,11 +26,18 @@
 	// Valid calls.
 	slog.Info("msg")
 	slog.Info("msg", "a", 1)
+	slog.Info("", "a", 1, "b", "two")
 	l.Debug("msg", "a", 1)
 	l.With("a", 1)
+	slog.Warn("msg", slog.Int("a", 1))
 	slog.Warn("msg", slog.Int("a", 1), "k", 2)
 	l.WarnCtx(nil, "msg", "a", 1, slog.Int("b", 2), slog.Int("c", 3), "d", 4)
+	l.DebugCtx(nil, "msg", "a", 1, slog.Int("b", 2), slog.Int("c", 3), "d", 4, slog.Int("e", 5))
 	r.Add("a", 1, "b", 2)
+	(*slog.Logger).Debug(l, "msg", "a", 1, "b", 2)
+
+	var key string
+	r.Add(key, 1)
 
 	// bad
 	slog.Info("msg", 1)                     // want `slog.Info arg "1" should be a string or a slog.Attr`
@@ -40,12 +48,24 @@
 	r.Add("K", "v", "k")                    // want `call to slog.Record.Add missing a final value`
 	l.With("a", "b", 2)                     // want `slog.Logger.With arg "2" should be a string or a slog.Attr`
 
+	// Report the first problem if there are multiple bad keys.
+	slog.Debug("msg", "a", 1, 2, 3, 4) // want `slog.Debug arg "2" should be a string or a slog.Attr`
+	slog.Debug("msg", "a", 1, 2, 3, 4) // want `slog.Debug arg "2" should be a string or a slog.Attr`
+
 	slog.Log(nil, slog.LevelWarn, "msg", "a", "b", 2) // want `slog.Log arg "2" should be a string or a slog.Attr`
 
+	// Test method expression call.
+	(*slog.Logger).Debug(l, "msg", "a", 1, 2, 3) // want `slog.Logger.Debug arg "2" should be a string or a slog.Attr`
+
 	// Skip calls with spread args.
 	var args []any
 	slog.Info("msg", args...)
 
+	// Report keys that are statically not exactly "string".
+	type MyString string
+	myKey := MyString("a")  // any(x) looks like <MyString, "a">.
+	slog.Info("", myKey, 1) // want `slog.Info arg "myKey" should be a string or a slog.Attr`
+
 	// The variadic part of all the calls below begins with an argument of
 	// static type any, followed by an integer.
 	// Even though the we don't know the dynamic type of the first arg, and thus
@@ -74,4 +94,58 @@
 	// Another invalid call we can't detect. Here the first argument is wrong.
 	a = 1
 	slog.Info("msg", a, 7, "b", 5)
+
+	// We can detect the first case as the type of key is UntypedNil,
+	// e.g. not yet assigned to any and not yet an interface.
+	// We cannot detect the second.
+	slog.Debug("msg", nil, 2) // want `slog.Debug arg "nil" should be a string or a slog.Attr`
+	slog.Debug("msg", any(nil), 2)
+
+	// Recovery from unknown value.
+	slog.Debug("msg", any(nil), "a")
+	slog.Debug("msg", any(nil), "a", 2)
+	slog.Debug("msg", any(nil), "a", 2, "b") // want `call to slog.Debug has a missing or misplaced value`
+	slog.Debug("msg", any(nil), 2, 3, 4)     // want "slog.Debug arg \\\"3\\\" should probably be a string or a slog.Attr \\(previous arg \\\"2\\\" cannot be a key\\)"
 }
+
+func All() {
+	// Test all functions and methods at least once.
+	var (
+		l   *slog.Logger
+		r   slog.Record
+		ctx context.Context
+	)
+	slog.Debug("msg", 1, 2) // want `slog.Debug arg "1" should be a string or a slog.Attr`
+	slog.Error("msg", 1, 2) // want `slog.Error arg "1" should be a string or a slog.Attr`
+	slog.Info("msg", 1, 2)  // want `slog.Info arg "1" should be a string or a slog.Attr`
+	slog.Warn("msg", 1, 2)  // want `slog.Warn arg "1" should be a string or a slog.Attr`
+
+	slog.DebugCtx(ctx, "msg", 1, 2) // want `slog.DebugCtx arg "1" should be a string or a slog.Attr`
+	slog.ErrorCtx(ctx, "msg", 1, 2) // want `slog.ErrorCtx arg "1" should be a string or a slog.Attr`
+	slog.InfoCtx(ctx, "msg", 1, 2)  // want `slog.InfoCtx arg "1" should be a string or a slog.Attr`
+	slog.WarnCtx(ctx, "msg", 1, 2)  // want `slog.WarnCtx arg "1" should be a string or a slog.Attr`
+
+	slog.Log(ctx, slog.LevelDebug, "msg", 1, 2) // want `slog.Log arg "1" should be a string or a slog.Attr`
+
+	l.Debug("msg", 1, 2) // want `slog.Logger.Debug arg "1" should be a string or a slog.Attr`
+	l.Error("msg", 1, 2) // want `slog.Logger.Error arg "1" should be a string or a slog.Attr`
+	l.Info("msg", 1, 2)  // want `slog.Logger.Info arg "1" should be a string or a slog.Attr`
+	l.Warn("msg", 1, 2)  // want `slog.Logger.Warn arg "1" should be a string or a slog.Attr`
+
+	l.DebugCtx(ctx, "msg", 1, 2) // want `slog.Logger.DebugCtx arg "1" should be a string or a slog.Attr`
+	l.ErrorCtx(ctx, "msg", 1, 2) // want `slog.Logger.ErrorCtx arg "1" should be a string or a slog.Attr`
+	l.InfoCtx(ctx, "msg", 1, 2)  // want `slog.Logger.InfoCtx arg "1" should be a string or a slog.Attr`
+	l.WarnCtx(ctx, "msg", 1, 2)  // want `slog.Logger.WarnCtx arg "1" should be a string or a slog.Attr`
+
+	l.Log(ctx, slog.LevelDebug, "msg", 1, 2) // want `slog.Logger.Log arg "1" should be a string or a slog.Attr`
+
+	_ = l.With(1, 2) // want `slog.Logger.With arg "1" should be a string or a slog.Attr`
+
+	r.Add(1, 2) // want `slog.Record.Add arg "1" should be a string or a slog.Attr`
+
+	_ = slog.Group("a", 1, 2, 3) // want `slog.Group arg "2" should be a string or a slog.Attr`
+
+}
+
+// Used in tests by package b.
+var MyLogger = slog.Default()
diff --git a/go/analysis/passes/slog/testdata/src/b/b.go b/go/analysis/passes/slog/testdata/src/b/b.go
new file mode 100644
index 0000000..ebf24a9
--- /dev/null
+++ b/go/analysis/passes/slog/testdata/src/b/b.go
@@ -0,0 +1,15 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the slog checker.
+
+//go:build go1.21
+
+package b
+
+import "a"
+
+func Imported() {
+	_ = a.MyLogger.With("a", 1, 2, 3) // want `slog.Logger.With arg "2" should be a string or a slog.Attr`
+}
diff --git a/go/analysis/passes/timeformat/timeformat.go b/go/analysis/passes/timeformat/timeformat.go
index a1b5d2f..c45b9fa 100644
--- a/go/analysis/passes/timeformat/timeformat.go
+++ b/go/analysis/passes/timeformat/timeformat.go
@@ -24,13 +24,6 @@
 const badFormat = "2006-02-01"
 const goodFormat = "2006-01-02"
 
-const Doc = `check for calls of (time.Time).Format or time.Parse with 2006-02-01
-
-The timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)
-format. Internationally, "yyyy-dd-mm" does not occur in common calendar date
-standards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.
-`
-
 //go:embed doc.go
 var doc string
 
diff --git a/go/analysis/passes/unsafeptr/doc.go b/go/analysis/passes/unsafeptr/doc.go
index 524fd04..de10804 100644
--- a/go/analysis/passes/unsafeptr/doc.go
+++ b/go/analysis/passes/unsafeptr/doc.go
@@ -13,5 +13,5 @@
 // to convert integers to pointers. A conversion from uintptr to
 // unsafe.Pointer is invalid if it implies that there is a uintptr-typed
 // word in memory that holds a pointer value, because that word will be
-// invisible to stack copying and to the garbage collector.`
+// invisible to stack copying and to the garbage collector.
 package unsafeptr
diff --git a/go/analysis/passes/unsafeptr/unsafeptr.go b/go/analysis/passes/unsafeptr/unsafeptr.go
index b1a32f2..e43ac20 100644
--- a/go/analysis/passes/unsafeptr/unsafeptr.go
+++ b/go/analysis/passes/unsafeptr/unsafeptr.go
@@ -18,14 +18,6 @@
 	"golang.org/x/tools/go/ast/inspector"
 )
 
-const Doc = `check for invalid conversions of uintptr to unsafe.Pointer
-
-The unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer
-to convert integers to pointers. A conversion from uintptr to
-unsafe.Pointer is invalid if it implies that there is a uintptr-typed
-word in memory that holds a pointer value, because that word will be
-invisible to stack copying and to the garbage collector.`
-
 //go:embed doc.go
 var doc string
 
diff --git a/go/analysis/passes/unusedresult/cmd/unusedresult/main.go b/go/analysis/passes/unusedresult/cmd/unusedresult/main.go
new file mode 100644
index 0000000..8116c6e
--- /dev/null
+++ b/go/analysis/passes/unusedresult/cmd/unusedresult/main.go
@@ -0,0 +1,14 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// The unusedresult command applies the golang.org/x/tools/go/analysis/passes/unusedresult
+// analysis to the specified packages of Go source code.
+package main
+
+import (
+	"golang.org/x/tools/go/analysis/passes/unusedresult"
+	"golang.org/x/tools/go/analysis/singlechecker"
+)
+
+func main() { singlechecker.Main(unusedresult.Analyzer) }
diff --git a/go/analysis/passes/unusedresult/doc.go b/go/analysis/passes/unusedresult/doc.go
index 0a713ca..a1bf4cf 100644
--- a/go/analysis/passes/unusedresult/doc.go
+++ b/go/analysis/passes/unusedresult/doc.go
@@ -9,9 +9,11 @@
 //
 // unusedresult: check for unused results of calls to some functions
 //
-// Some functions like fmt.Errorf return a result and have no side effects,
-// so it is always a mistake to discard the result. This analyzer reports
-// calls to certain functions in which the result of the call is ignored.
+// Some functions like fmt.Errorf return a result and have no side
+// effects, so it is always a mistake to discard the result. Other
+// functions may return an error that must not be ignored, or a cleanup
+// operation that must be called. This analyzer reports calls to
+// functions like these when the result of the call is ignored.
 //
 // The set of functions may be controlled using flags.
 package unusedresult
diff --git a/go/analysis/passes/unusedresult/testdata/src/a/a.go b/go/analysis/passes/unusedresult/testdata/src/a/a.go
index 50b2f56..7a41f4a 100644
--- a/go/analysis/passes/unusedresult/testdata/src/a/a.go
+++ b/go/analysis/passes/unusedresult/testdata/src/a/a.go
@@ -8,6 +8,7 @@
 	"bytes"
 	"errors"
 	"fmt"
+	. "fmt"
 )
 
 func _() {
@@ -20,8 +21,11 @@
 	err.Error() // want `result of \(error\).Error call not used`
 
 	var buf bytes.Buffer
-	buf.String() // want `result of \(bytes.Buffer\).String call not used`
+	buf.String() // want `result of \(\*bytes.Buffer\).String call not used`
 
 	fmt.Sprint("")  // want "result of fmt.Sprint call not used"
 	fmt.Sprintf("") // want "result of fmt.Sprintf call not used"
+
+	Sprint("")  // want "result of fmt.Sprint call not used"
+	Sprintf("") // want "result of fmt.Sprintf call not used"
 }
diff --git a/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go b/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go
index c770ccd..04d0e30 100644
--- a/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go
+++ b/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go
@@ -23,7 +23,7 @@
 	err.Error() // want `result of \(error\).Error call not used`
 
 	var buf bytes.Buffer
-	buf.String() // want `result of \(bytes.Buffer\).String call not used`
+	buf.String() // want `result of \(\*bytes.Buffer\).String call not used`
 
 	fmt.Sprint("")  // want "result of fmt.Sprint call not used"
 	fmt.Sprintf("") // want "result of fmt.Sprintf call not used"
@@ -32,10 +32,10 @@
 	_ = userdefs.MustUse[int](2)
 
 	s := userdefs.SingleTypeParam[int]{X: 1}
-	s.String() // want `result of \(typeparams/userdefs.SingleTypeParam\[int\]\).String call not used`
+	s.String() // want `result of \(\*typeparams/userdefs.SingleTypeParam\[int\]\).String call not used`
 	_ = s.String()
 
 	m := userdefs.MultiTypeParam[int, string]{X: 1, Y: "one"}
-	m.String() // want `result of \(typeparams/userdefs.MultiTypeParam\[int, string\]\).String call not used`
+	m.String() // want `result of \(\*typeparams/userdefs.MultiTypeParam\[int, string\]\).String call not used`
 	_ = m.String()
-}
\ No newline at end of file
+}
diff --git a/go/analysis/passes/unusedresult/unusedresult.go b/go/analysis/passes/unusedresult/unusedresult.go
index 051bc7c..cb487a2 100644
--- a/go/analysis/passes/unusedresult/unusedresult.go
+++ b/go/analysis/passes/unusedresult/unusedresult.go
@@ -3,9 +3,16 @@
 // license that can be found in the LICENSE file.
 
 // Package unusedresult defines an analyzer that checks for unused
-// results of calls to certain pure functions.
+// results of calls to certain functions.
 package unusedresult
 
+// It is tempting to make this analysis inductive: for each function
+// that tail-calls one of the functions that we check, check those
+// functions too. However, just because you must use the result of
+// fmt.Sprintf doesn't mean you need to use the result of every
+// function that returns a formatted string: it may have other results
+// and effects.
+
 import (
 	_ "embed"
 	"go/ast"
@@ -18,17 +25,9 @@
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
 	"golang.org/x/tools/go/ast/inspector"
-	"golang.org/x/tools/internal/typeparams"
+	"golang.org/x/tools/go/types/typeutil"
 )
 
-const Doc = `check for unused results of calls to some functions
-
-Some functions like fmt.Errorf return a result and have no side effects,
-so it is always a mistake to discard the result. This analyzer reports
-calls to certain functions in which the result of the call is ignored.
-
-The set of functions may be controlled using flags.`
-
 //go:embed doc.go
 var doc string
 
@@ -56,15 +55,9 @@
 	//    ignoringTheErrorWouldBeVeryBad() // oops
 	//
 
-	// Also, it is tempting to make this analysis modular: one
-	// could export a "mustUseResult" fact for each function that
-	// tail-calls one of the functions that we check, and check
-	// those functions too.
-	//
-	// However, just because you must use the result of
-	// fmt.Sprintf doesn't mean you need to use the result of
-	// every function that returns a formatted string:
-	// it may have other results and effects.
+	// List standard library functions here.
+	// The context.With{Cancel,Deadline,Timeout} entries are
+	// effectively redundant wrt the lostcancel analyzer.
 	funcs.Set("errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse,context.WithValue,context.WithCancel,context.WithDeadline,context.WithTimeout")
 	Analyzer.Flags.Var(&funcs, "funcs",
 		"comma-separated list of functions whose results must be used")
@@ -77,6 +70,14 @@
 func run(pass *analysis.Pass) (interface{}, error) {
 	inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
 
+	// Split functions into (pkg, name) pairs to save allocation later.
+	pkgFuncs := make(map[[2]string]bool, len(funcs))
+	for s := range funcs {
+		if i := strings.LastIndexByte(s, '.'); i > 0 {
+			pkgFuncs[[2]string{s[:i], s[i+1:]}] = true
+		}
+	}
+
 	nodeFilter := []ast.Node{
 		(*ast.ExprStmt)(nil),
 	}
@@ -85,41 +86,26 @@
 		if !ok {
 			return // not a call statement
 		}
-		fun := analysisutil.Unparen(call.Fun)
 
-		if pass.TypesInfo.Types[fun].IsType() {
-			return // a conversion, not a call
-		}
-
-		x, _, _, _ := typeparams.UnpackIndexExpr(fun)
-		if x != nil {
-			fun = x // If this is generic function or method call, skip the instantiation arguments
-		}
-
-		selector, ok := fun.(*ast.SelectorExpr)
+		// Call to function or method?
+		fn, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Func)
 		if !ok {
-			return // neither a method call nor a qualified ident
+			return // e.g. var or builtin
 		}
 
-		sel, ok := pass.TypesInfo.Selections[selector]
-		if ok && sel.Kind() == types.MethodVal {
+		if sig := fn.Type().(*types.Signature); sig.Recv() != nil {
 			// method (e.g. foo.String())
-			obj := sel.Obj().(*types.Func)
-			sig := sel.Type().(*types.Signature)
 			if types.Identical(sig, sigNoArgsStringResult) {
-				if stringMethods[obj.Name()] {
+				if stringMethods[fn.Name()] {
 					pass.Reportf(call.Lparen, "result of (%s).%s call not used",
-						sig.Recv().Type(), obj.Name())
+						sig.Recv().Type(), fn.Name())
 				}
 			}
-		} else if !ok {
-			// package-qualified function (e.g. fmt.Errorf)
-			obj := pass.TypesInfo.Uses[selector.Sel]
-			if obj, ok := obj.(*types.Func); ok {
-				qname := obj.Pkg().Path() + "." + obj.Name()
-				if funcs[qname] {
-					pass.Reportf(call.Lparen, "result of %v call not used", qname)
-				}
+		} else {
+			// package-level function (e.g. fmt.Errorf)
+			if pkgFuncs[[2]string{fn.Pkg().Path(), fn.Name()}] {
+				pass.Reportf(call.Lparen, "result of %s.%s call not used",
+					fn.Pkg().Path(), fn.Name())
 			}
 		}
 	})
diff --git a/go/analysis/unitchecker/main.go b/go/analysis/unitchecker/main.go
index a054a2d..6e08ce9 100644
--- a/go/analysis/unitchecker/main.go
+++ b/go/analysis/unitchecker/main.go
@@ -27,16 +27,23 @@
 	"golang.org/x/tools/go/analysis/passes/cgocall"
 	"golang.org/x/tools/go/analysis/passes/composite"
 	"golang.org/x/tools/go/analysis/passes/copylock"
+	"golang.org/x/tools/go/analysis/passes/directive"
 	"golang.org/x/tools/go/analysis/passes/errorsas"
+	"golang.org/x/tools/go/analysis/passes/framepointer"
 	"golang.org/x/tools/go/analysis/passes/httpresponse"
+	"golang.org/x/tools/go/analysis/passes/ifaceassert"
 	"golang.org/x/tools/go/analysis/passes/loopclosure"
 	"golang.org/x/tools/go/analysis/passes/lostcancel"
 	"golang.org/x/tools/go/analysis/passes/nilfunc"
 	"golang.org/x/tools/go/analysis/passes/printf"
 	"golang.org/x/tools/go/analysis/passes/shift"
+	"golang.org/x/tools/go/analysis/passes/sigchanyzer"
 	"golang.org/x/tools/go/analysis/passes/stdmethods"
+	"golang.org/x/tools/go/analysis/passes/stringintconv"
 	"golang.org/x/tools/go/analysis/passes/structtag"
+	"golang.org/x/tools/go/analysis/passes/testinggoroutine"
 	"golang.org/x/tools/go/analysis/passes/tests"
+	"golang.org/x/tools/go/analysis/passes/timeformat"
 	"golang.org/x/tools/go/analysis/passes/unmarshal"
 	"golang.org/x/tools/go/analysis/passes/unreachable"
 	"golang.org/x/tools/go/analysis/passes/unsafeptr"
@@ -53,16 +60,23 @@
 		cgocall.Analyzer,
 		composite.Analyzer,
 		copylock.Analyzer,
+		directive.Analyzer,
 		errorsas.Analyzer,
+		framepointer.Analyzer,
 		httpresponse.Analyzer,
+		ifaceassert.Analyzer,
 		loopclosure.Analyzer,
 		lostcancel.Analyzer,
 		nilfunc.Analyzer,
 		printf.Analyzer,
 		shift.Analyzer,
+		sigchanyzer.Analyzer,
 		stdmethods.Analyzer,
+		stringintconv.Analyzer,
 		structtag.Analyzer,
 		tests.Analyzer,
+		testinggoroutine.Analyzer,
+		timeformat.Analyzer,
 		unmarshal.Analyzer,
 		unreachable.Analyzer,
 		unsafeptr.Analyzer,
diff --git a/go/analysis/unitchecker/unitchecker.go b/go/analysis/unitchecker/unitchecker.go
index 3c6fbe4..ff22d23 100644
--- a/go/analysis/unitchecker/unitchecker.go
+++ b/go/analysis/unitchecker/unitchecker.go
@@ -183,11 +183,6 @@
 	return cfg, nil
 }
 
-var importerForCompiler = func(_ *token.FileSet, compiler string, lookup importer.Lookup) types.Importer {
-	// broken legacy implementation (https://golang.org/issue/28995)
-	return importer.For(compiler, lookup)
-}
-
 func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]result, error) {
 	// Load, parse, typecheck.
 	var files []*ast.File
@@ -203,7 +198,7 @@
 		}
 		files = append(files, f)
 	}
-	compilerImporter := importerForCompiler(fset, cfg.Compiler, func(path string) (io.ReadCloser, error) {
+	compilerImporter := importer.ForCompiler(fset, cfg.Compiler, func(path string) (io.ReadCloser, error) {
 		// path is a resolved package path, not an import path.
 		file, ok := cfg.PackageFile[path]
 		if !ok {
diff --git a/go/analysis/unitchecker/unitchecker112.go b/go/analysis/unitchecker/unitchecker112.go
deleted file mode 100644
index 3180f4a..0000000
--- a/go/analysis/unitchecker/unitchecker112.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build go1.12
-// +build go1.12
-
-package unitchecker
-
-import "go/importer"
-
-func init() {
-	importerForCompiler = importer.ForCompiler
-}
diff --git a/go/analysis/unitchecker/unitchecker_test.go b/go/analysis/unitchecker/unitchecker_test.go
index 197abd9..1ed0012 100644
--- a/go/analysis/unitchecker/unitchecker_test.go
+++ b/go/analysis/unitchecker/unitchecker_test.go
@@ -2,15 +2,8 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-//go:build go1.12
-// +build go1.12
-
 package unitchecker_test
 
-// This test depends on features such as
-// go vet's support for vetx files (1.11) and
-// the (*os.ProcessState).ExitCode method (1.12).
-
 import (
 	"flag"
 	"os"
@@ -28,17 +21,23 @@
 )
 
 func TestMain(m *testing.M) {
-	if os.Getenv("UNITCHECKER_CHILD") == "1" {
-		// child process
-		main()
+	// child process?
+	switch os.Getenv("ENTRYPOINT") {
+	case "vet":
+		vet()
+		panic("unreachable")
+	case "minivet":
+		minivet()
 		panic("unreachable")
 	}
 
+	// test process
 	flag.Parse()
 	os.Exit(m.Run())
 }
 
-func main() {
+// minivet is a vet-like tool with a few analyzers, for testing.
+func minivet() {
 	unitchecker.Main(
 		findcall.Analyzer,
 		printf.Analyzer,
@@ -162,7 +161,7 @@
 	} {
 		cmd := exec.Command("go", "vet", "-vettool="+os.Args[0], "-findcall.name=MyFunc123")
 		cmd.Args = append(cmd.Args, strings.Fields(test.args)...)
-		cmd.Env = append(exported.Config.Env, "UNITCHECKER_CHILD=1")
+		cmd.Env = append(exported.Config.Env, "ENTRYPOINT=minivet")
 		cmd.Dir = exported.Config.Dir
 
 		out, err := cmd.CombinedOutput()
diff --git a/go/analysis/unitchecker/vet_std_test.go b/go/analysis/unitchecker/vet_std_test.go
new file mode 100644
index 0000000..feea1a2
--- /dev/null
+++ b/go/analysis/unitchecker/vet_std_test.go
@@ -0,0 +1,97 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package unitchecker_test
+
+import (
+	"os"
+	"os/exec"
+	"runtime"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/go/analysis/passes/asmdecl"
+	"golang.org/x/tools/go/analysis/passes/assign"
+	"golang.org/x/tools/go/analysis/passes/atomic"
+	"golang.org/x/tools/go/analysis/passes/bools"
+	"golang.org/x/tools/go/analysis/passes/buildtag"
+	"golang.org/x/tools/go/analysis/passes/cgocall"
+	"golang.org/x/tools/go/analysis/passes/composite"
+	"golang.org/x/tools/go/analysis/passes/copylock"
+	"golang.org/x/tools/go/analysis/passes/directive"
+	"golang.org/x/tools/go/analysis/passes/errorsas"
+	"golang.org/x/tools/go/analysis/passes/framepointer"
+	"golang.org/x/tools/go/analysis/passes/httpresponse"
+	"golang.org/x/tools/go/analysis/passes/ifaceassert"
+	"golang.org/x/tools/go/analysis/passes/loopclosure"
+	"golang.org/x/tools/go/analysis/passes/lostcancel"
+	"golang.org/x/tools/go/analysis/passes/nilfunc"
+	"golang.org/x/tools/go/analysis/passes/printf"
+	"golang.org/x/tools/go/analysis/passes/shift"
+	"golang.org/x/tools/go/analysis/passes/sigchanyzer"
+	"golang.org/x/tools/go/analysis/passes/stdmethods"
+	"golang.org/x/tools/go/analysis/passes/stringintconv"
+	"golang.org/x/tools/go/analysis/passes/structtag"
+	"golang.org/x/tools/go/analysis/passes/testinggoroutine"
+	"golang.org/x/tools/go/analysis/passes/tests"
+	"golang.org/x/tools/go/analysis/passes/timeformat"
+	"golang.org/x/tools/go/analysis/passes/unmarshal"
+	"golang.org/x/tools/go/analysis/passes/unreachable"
+	"golang.org/x/tools/go/analysis/passes/unusedresult"
+	"golang.org/x/tools/go/analysis/unitchecker"
+)
+
+// vet is the entrypoint of this executable when ENTRYPOINT=vet.
+// Keep consistent with the actual vet in GOROOT/src/cmd/vet/main.go.
+func vet() {
+	unitchecker.Main(
+		asmdecl.Analyzer,
+		assign.Analyzer,
+		atomic.Analyzer,
+		bools.Analyzer,
+		buildtag.Analyzer,
+		cgocall.Analyzer,
+		composite.Analyzer,
+		copylock.Analyzer,
+		directive.Analyzer,
+		errorsas.Analyzer,
+		framepointer.Analyzer,
+		httpresponse.Analyzer,
+		ifaceassert.Analyzer,
+		loopclosure.Analyzer,
+		lostcancel.Analyzer,
+		nilfunc.Analyzer,
+		printf.Analyzer,
+		shift.Analyzer,
+		sigchanyzer.Analyzer,
+		stdmethods.Analyzer,
+		stringintconv.Analyzer,
+		structtag.Analyzer,
+		tests.Analyzer,
+		testinggoroutine.Analyzer,
+		timeformat.Analyzer,
+		unmarshal.Analyzer,
+		unreachable.Analyzer,
+		// unsafeptr.Analyzer, // currently reports findings in runtime
+		unusedresult.Analyzer,
+	)
+}
+
+// TestVetStdlib runs the same analyzers as the actual vet over the
+// standard library, using go vet and unitchecker, to ensure that
+// there are no findings.
+func TestVetStdlib(t *testing.T) {
+	if testing.Short() {
+		t.Skip("skipping in -short mode")
+	}
+	if version := runtime.Version(); !strings.HasPrefix(version, "devel") {
+		t.Skipf("This test is only wanted on development branches where code can be easily fixed. Skipping because runtime.Version=%q.", version)
+	}
+
+	cmd := exec.Command("go", "vet", "-vettool="+os.Args[0], "std")
+	cmd.Env = append(os.Environ(), "ENTRYPOINT=vet")
+	if out, err := cmd.CombinedOutput(); err != nil {
+		t.Errorf("go vet std failed (%v):\n%s", err, out)
+	}
+}
diff --git a/go/callgraph/vta/helpers_test.go b/go/callgraph/vta/helpers_test.go
index 768365f..facf6af 100644
--- a/go/callgraph/vta/helpers_test.go
+++ b/go/callgraph/vta/helpers_test.go
@@ -5,12 +5,14 @@
 package vta
 
 import (
+	"bytes"
 	"fmt"
 	"go/ast"
 	"go/parser"
 	"io/ioutil"
 	"sort"
 	"strings"
+	"testing"
 
 	"golang.org/x/tools/go/callgraph"
 	"golang.org/x/tools/go/ssa/ssautil"
@@ -114,3 +116,12 @@
 	}
 	return gs
 }
+
+// Logs the functions of prog to t.
+func logFns(t testing.TB, prog *ssa.Program) {
+	for fn := range ssautil.AllFunctions(prog) {
+		var buf bytes.Buffer
+		fn.WriteTo(&buf)
+		t.Log(buf.String())
+	}
+}
diff --git a/go/callgraph/vta/testdata/src/arrays_generics.go b/go/callgraph/vta/testdata/src/arrays_generics.go
new file mode 100644
index 0000000..7712d4c
--- /dev/null
+++ b/go/callgraph/vta/testdata/src/arrays_generics.go
@@ -0,0 +1,28 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// go:build ignore
+
+package testdata
+
+type F func()
+
+func set[T [1]F | [2]F](arr *T, i int) {
+	// Indexes into a pointer to an indexable type T and T does not have a coretype.
+	// SSA instruction:	t0 = &arr[i]
+	(*arr)[i] = bar
+}
+
+func bar() {
+	print("here")
+}
+
+func Foo() {
+	var arr [1]F
+	set(&arr, 0)
+	arr[0]()
+}
+
+// WANT:
+// Foo: set[[1]testdata.F](t0, 0:int) -> set[[1]testdata.F]; t3() -> bar
diff --git a/go/callgraph/vta/utils.go b/go/callgraph/vta/utils.go
index d183198..3471aae 100644
--- a/go/callgraph/vta/utils.go
+++ b/go/callgraph/vta/utils.go
@@ -123,7 +123,14 @@
 func sliceArrayElem(t types.Type) types.Type {
 	switch u := t.Underlying().(type) {
 	case *types.Pointer:
-		return u.Elem().Underlying().(*types.Array).Elem()
+		switch e := u.Elem().Underlying().(type) {
+		case *types.Array:
+			return e.Elem()
+		case *types.Interface:
+			return sliceArrayElem(e) // e is a type param with matching element types.
+		default:
+			panic(t)
+		}
 	case *types.Array:
 		return u.Elem()
 	case *types.Slice:
diff --git a/go/callgraph/vta/vta_test.go b/go/callgraph/vta/vta_test.go
index 75a8ceb..47962e3 100644
--- a/go/callgraph/vta/vta_test.go
+++ b/go/callgraph/vta/vta_test.go
@@ -124,18 +124,26 @@
 	}
 
 	// TODO(zpavlinovic): add more tests
-	file := "testdata/src/callgraph_generics.go"
-	prog, want, err := testProg(file, ssa.InstantiateGenerics)
-	if err != nil {
-		t.Fatalf("couldn't load test file '%s': %s", file, err)
+	files := []string{
+		"testdata/src/arrays_generics.go",
+		"testdata/src/callgraph_generics.go",
 	}
-	if len(want) == 0 {
-		t.Fatalf("couldn't find want in `%s`", file)
-	}
+	for _, file := range files {
+		t.Run(file, func(t *testing.T) {
+			prog, want, err := testProg(file, ssa.InstantiateGenerics)
+			if err != nil {
+				t.Fatalf("couldn't load test file '%s': %s", file, err)
+			}
+			if len(want) == 0 {
+				t.Fatalf("couldn't find want in `%s`", file)
+			}
 
-	g := CallGraph(ssautil.AllFunctions(prog), cha.CallGraph(prog))
-	got := callGraphStr(g)
-	if diff := setdiff(want, got); len(diff) != 0 {
-		t.Errorf("computed callgraph %v should contain %v (diff: %v)", got, want, diff)
+			g := CallGraph(ssautil.AllFunctions(prog), cha.CallGraph(prog))
+			got := callGraphStr(g)
+			if diff := setdiff(want, got); len(diff) != 0 {
+				t.Errorf("computed callgraph %v should contain %v (diff: %v)", got, want, diff)
+				logFns(t, prog)
+			}
+		})
 	}
 }
diff --git a/go/gcexportdata/gcexportdata.go b/go/gcexportdata/gcexportdata.go
index 165ede0..03543bd 100644
--- a/go/gcexportdata/gcexportdata.go
+++ b/go/gcexportdata/gcexportdata.go
@@ -128,15 +128,14 @@
 	// (from "version"). Select appropriate importer.
 	if len(data) > 0 {
 		switch data[0] {
-		case 'i':
+		case 'v', 'c', 'd': // binary, till go1.10
+			return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0])
+
+		case 'i': // indexed, till go1.19
 			_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
 			return pkg, err
 
-		case 'v', 'c', 'd':
-			_, pkg, err := gcimporter.BImportData(fset, imports, data, path)
-			return pkg, err
-
-		case 'u':
+		case 'u': // unified, from go1.20
 			_, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path)
 			return pkg, err
 
diff --git a/go/packages/stdlib_test.go b/go/packages/stdlib_test.go
index f8b93df..aac1ea5 100644
--- a/go/packages/stdlib_test.go
+++ b/go/packages/stdlib_test.go
@@ -5,11 +5,7 @@
 package packages_test
 
 import (
-	"bytes"
-	"io/ioutil"
-	"path/filepath"
 	"runtime"
-	"strings"
 	"testing"
 	"time"
 
@@ -54,81 +50,3 @@
 	t.Log("Metadata:   ", t1.Sub(t0))                          // ~800ms on 12 threads
 	t.Log("#MB:        ", int64(memstats.Alloc-alloc)/1000000) // ~1MB
 }
-
-func TestCgoOption(t *testing.T) {
-	skipIfShort(t, "uses tons of memory (https://golang.org/issue/14113)")
-
-	testenv.NeedsGoPackages(t)
-
-	// TODO(adonovan): see if we can get away without these old
-	// go/loader hacks now that we use the go list command.
-	//
-	// switch runtime.GOOS {
-	// // On these systems, the net and os/user packages don't use cgo
-	// // or the std library is incomplete (Android).
-	// case "android", "plan9", "solaris", "windows":
-	// 	t.Skipf("no cgo or incomplete std lib on %s", runtime.GOOS)
-	// }
-	// // In nocgo builds (e.g. linux-amd64-nocgo),
-	// // there is no "runtime/cgo" package,
-	// // so cgo-generated Go files will have a failing import.
-	// if !build.Default.CgoEnabled {
-	// 	return
-	// }
-
-	// Test that we can load cgo-using packages with
-	// DisableCgo=true/false, which, among other things, causes go
-	// list to select pure Go/native implementations, respectively,
-	// based on build tags.
-	//
-	// Each entry specifies a package-level object and the generic
-	// file expected to define it when cgo is disabled.
-	// When cgo is enabled, the exact file is not specified (since
-	// it varies by platform), but must differ from the generic one.
-	//
-	// The test also loads the actual file to verify that the
-	// object is indeed defined at that location.
-	for _, test := range []struct {
-		pkg, declKeyword, name, genericFile string
-	}{
-		{"net", "type", "addrinfoErrno", "cgo_stub.go"},
-		{"os/user", "func", "current", "lookup_stubs.go"},
-	} {
-		cfg := &packages.Config{Mode: packages.LoadSyntax}
-		pkgs, err := packages.Load(cfg, test.pkg)
-		if err != nil {
-			t.Errorf("Load failed: %v", err)
-			continue
-		}
-		if packages.PrintErrors(pkgs) > 0 {
-			t.Error("there were errors loading standard library")
-			continue
-		}
-		pkg := pkgs[0]
-		obj := pkg.Types.Scope().Lookup(test.name)
-		if obj == nil {
-			t.Errorf("no object %s.%s", test.pkg, test.name)
-			continue
-		}
-		posn := pkg.Fset.Position(obj.Pos())
-		gotFile := filepath.Base(posn.Filename)
-		filesMatch := gotFile == test.genericFile
-
-		if filesMatch {
-			t.Errorf("!DisableCgo: %s found in %s, want native file",
-				obj, gotFile)
-		}
-
-		// Load the file and check the object is declared at the right place.
-		b, err := ioutil.ReadFile(posn.Filename)
-		if err != nil {
-			t.Errorf("can't read %s: %s", posn.Filename, err)
-			continue
-		}
-		line := string(bytes.Split(b, []byte("\n"))[posn.Line-1])
-		// Don't assume posn.Column is accurate.
-		if !strings.Contains(line, test.declKeyword+" "+test.name) {
-			t.Errorf("%s: %s not declared here (looking at %q)", posn, obj, line)
-		}
-	}
-}
diff --git a/go/ssa/builder.go b/go/ssa/builder.go
index ffa6667..8931fb4 100644
--- a/go/ssa/builder.go
+++ b/go/ssa/builder.go
@@ -363,7 +363,7 @@
 		}
 
 	case "new":
-		alloc := emitNew(fn, deref(typ), pos)
+		alloc := emitNew(fn, mustDeref(typ), pos)
 		alloc.Comment = "new"
 		return alloc
 
@@ -373,10 +373,8 @@
 		// We must still evaluate the value, though.  (If it
 		// was side-effect free, the whole call would have
 		// been constant-folded.)
-		//
-		// Type parameters are always non-constant so use Underlying.
-		t := deref(fn.typeOf(args[0])).Underlying()
-		if at, ok := t.(*types.Array); ok {
+		t, _ := deref(fn.typeOf(args[0]))
+		if at, ok := typeparams.CoreType(t).(*types.Array); ok {
 			b.expr(fn, args[0]) // for effects only
 			return intConst(at.Len())
 		}
@@ -431,12 +429,12 @@
 		return &address{addr: v, pos: e.Pos(), expr: e}
 
 	case *ast.CompositeLit:
-		t := deref(fn.typeOf(e))
+		typ, _ := deref(fn.typeOf(e))
 		var v *Alloc
 		if escaping {
-			v = emitNew(fn, t, e.Lbrace)
+			v = emitNew(fn, typ, e.Lbrace)
 		} else {
-			v = fn.addLocal(t, e.Lbrace)
+			v = fn.addLocal(typ, e.Lbrace)
 		}
 		v.Comment = "complit"
 		var sb storebuf
@@ -459,7 +457,7 @@
 		wantAddr := true
 		v := b.receiver(fn, e.X, wantAddr, escaping, sel)
 		index := sel.index[len(sel.index)-1]
-		fld := typeparams.CoreType(deref(v.Type())).(*types.Struct).Field(index)
+		fld := fieldOf(mustDeref(v.Type()), index) // v is an addr.
 
 		// Due to the two phases of resolving AssignStmt, a panic from x.f = p()
 		// when x is nil is required to come after the side-effects of
@@ -508,7 +506,7 @@
 			v.setType(et)
 			return fn.emit(v)
 		}
-		return &lazyAddress{addr: emit, t: deref(et), pos: e.Lbrack, expr: e}
+		return &lazyAddress{addr: emit, t: mustDeref(et), pos: e.Lbrack, expr: e}
 
 	case *ast.StarExpr:
 		return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e}
@@ -554,7 +552,7 @@
 		// so if the type of the location is a pointer,
 		// an &-operation is implied.
 		if _, ok := loc.(blank); !ok { // avoid calling blank.typ()
-			if isPointer(loc.typ()) {
+			if _, ok := deref(loc.typ()); ok {
 				ptr := b.addr(fn, e, true).address(fn)
 				// copy address
 				if sb != nil {
@@ -584,7 +582,7 @@
 
 				// Subtle: emit debug ref for aggregate types only;
 				// slice and map are handled by store ops in compLit.
-				switch loc.typ().Underlying().(type) {
+				switch typeparams.CoreType(loc.typ()).(type) {
 				case *types.Struct, *types.Array:
 					emitDebugRef(fn, e, addr, true)
 				}
@@ -831,7 +829,7 @@
 			// The result is a "bound".
 			obj := sel.obj.(*types.Func)
 			rt := fn.typ(recvType(obj))
-			wantAddr := isPointer(rt)
+			_, wantAddr := deptr(rt)
 			escaping := true
 			v := b.receiver(fn, e.X, wantAddr, escaping, sel)
 
@@ -958,8 +956,9 @@
 //
 // escaping is defined as per builder.addr().
 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value {
+
 	var v Value
-	if wantAddr && !sel.indirect && !isPointer(fn.typeOf(e)) {
+	if _, eptr := deptr(fn.typeOf(e)); wantAddr && !sel.indirect && !eptr {
 		v = b.addr(fn, e, escaping).address(fn)
 	} else {
 		v = b.expr(fn, e)
@@ -968,7 +967,7 @@
 	last := len(sel.index) - 1
 	// The position of implicit selection is the position of the inducing receiver expression.
 	v = emitImplicitSelections(fn, v, sel.index[:last], e.Pos())
-	if !wantAddr && isPointer(v.Type()) {
+	if _, vptr := deptr(v.Type()); !wantAddr && vptr {
 		v = emitLoad(fn, v)
 	}
 	return v
@@ -987,7 +986,7 @@
 			obj := sel.obj.(*types.Func)
 			recv := recvType(obj)
 
-			wantAddr := isPointer(recv)
+			_, wantAddr := deptr(recv)
 			escaping := true
 			v := b.receiver(fn, selector.X, wantAddr, escaping, sel)
 			if types.IsInterface(recv) {
@@ -1253,36 +1252,13 @@
 // literal has type *T behaves like &T{}.
 // In that case, addr must hold a T, not a *T.
 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) {
-	typ := deref(fn.typeOf(e))                        // type with name [may be type param]
-	t := deref(typeparams.CoreType(typ)).Underlying() // core type for comp lit case
-	// Computing typ and t is subtle as these handle pointer types.
-	// For example, &T{...} is valid even for maps and slices.
-	// Also typ should refer to T (not *T) while t should be the core type of T.
-	//
-	// To show the ordering to take into account, consider the composite literal
-	// expressions `&T{f: 1}` and `{f: 1}` within the expression `[]S{{f: 1}}` here:
-	//   type N struct{f int}
-	//   func _[T N, S *N]() {
-	//     _ = &T{f: 1}
-	//     _ = []S{{f: 1}}
-	//   }
-	// For `&T{f: 1}`, we compute `typ` and `t` as:
-	//     typeOf(&T{f: 1}) == *T
-	//     deref(*T)        == T (typ)
-	//     CoreType(T)      == N
-	//     deref(N)         == N
-	//     N.Underlying()   == struct{f int} (t)
-	// For `{f: 1}` in `[]S{{f: 1}}`,  we compute `typ` and `t` as:
-	//     typeOf({f: 1})   == S
-	//     deref(S)         == S (typ)
-	//     CoreType(S)      == *N
-	//     deref(*N)        == N
-	//     N.Underlying()   == struct{f int} (t)
-	switch t := t.(type) {
+	typ, _ := deref(fn.typeOf(e)) // type with name [may be type param]
+	switch t := typeparams.CoreType(typ).(type) {
 	case *types.Struct:
 		if !isZero && len(e.Elts) != t.NumFields() {
 			// memclear
-			sb.store(&address{addr, e.Lbrace, nil}, zeroConst(deref(addr.Type())))
+			zt, _ := deref(addr.Type())
+			sb.store(&address{addr, e.Lbrace, nil}, zeroConst(zt))
 			isZero = true
 		}
 		for i, e := range e.Elts {
@@ -1326,7 +1302,8 @@
 
 			if !isZero && int64(len(e.Elts)) != at.Len() {
 				// memclear
-				sb.store(&address{array, e.Lbrace, nil}, zeroConst(deref(array.Type())))
+				zt, _ := deref(array.Type())
+				sb.store(&address{array, e.Lbrace, nil}, zeroConst(zt))
 			}
 		}
 
@@ -1379,8 +1356,13 @@
 			//	map[*struct{}]bool{{}: true}
 			// An &-operation may be implied:
 			//	map[*struct{}]bool{&struct{}{}: true}
+			wantAddr := false
+			if _, ok := unparen(e.Key).(*ast.CompositeLit); ok {
+				_, wantAddr = deref(t.Key())
+			}
+
 			var key Value
-			if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) {
+			if wantAddr {
 				// A CompositeLit never evaluates to a pointer,
 				// so if the type of the location is a pointer,
 				// an &-operation is implied.
@@ -1407,7 +1389,7 @@
 		sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m)
 
 	default:
-		panic("unexpected CompositeLit type: " + t.String())
+		panic("unexpected CompositeLit type: " + typ.String())
 	}
 }
 
@@ -1873,15 +1855,14 @@
 
 	// Determine number of iterations.
 	var length Value
-	if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok {
+	dt, _ := deref(x.Type())
+	if arr, ok := typeparams.CoreType(dt).(*types.Array); ok {
 		// For array or *array, the number of iterations is
 		// known statically thanks to the type.  We avoid a
 		// data dependence upon x, permitting later dead-code
 		// elimination if x is pure, static unrolling, etc.
 		// Ranging over a nil *array may have >0 iterations.
 		// We still generate code for x, in case it has effects.
-		//
-		// TypeParams do not have constant length. Use underlying instead of core type.
 		length = intConst(arr.Len())
 	} else {
 		// length = len(x).
diff --git a/go/ssa/builder_generic_test.go b/go/ssa/builder_generic_test.go
index 2588f74..77de326 100644
--- a/go/ssa/builder_generic_test.go
+++ b/go/ssa/builder_generic_test.go
@@ -5,6 +5,7 @@
 package ssa_test
 
 import (
+	"bytes"
 	"fmt"
 	"go/parser"
 	"go/token"
@@ -34,441 +35,469 @@
 	if !typeparams.Enabled {
 		t.Skip("TestGenericBodies requires type parameters")
 	}
-	for _, test := range []struct {
-		pkg      string // name of the package.
-		contents string // contents of the Go package.
-	}{
-		{
-			pkg: "p",
-			contents: `
-			package p
+	for _, contents := range []string{
+		`
+		package p00
 
-			func f(x int) {
-				var i interface{}
-				print(i, 0) //@ types("interface{}", int)
-				print()     //@ types()
-				print(x)    //@ types(int)
+		func f(x int) {
+			var i interface{}
+			print(i, 0) //@ types("interface{}", int)
+			print()     //@ types()
+			print(x)    //@ types(int)
+		}
+		`,
+		`
+		package p01
+
+		func f[T any](x T) {
+			print(x) //@ types(T)
+		}
+		`,
+		`
+		package p02
+
+		func f[T ~int]() {
+			var x T
+			print(x) //@ types(T)
+		}
+		`,
+		`
+		package p03
+
+		func a[T ~[4]byte](x T) {
+			for k, v := range x {
+				print(x, k, v) //@ types(T, int, byte)
 			}
-			`,
-		},
-		{
-			pkg: "q",
-			contents: `
-			package q
-
-			func f[T any](x T) {
-				print(x) //@ types(T)
+		}
+		func b[T ~*[4]byte](x T) {
+			for k, v := range x {
+				print(x, k, v) //@ types(T, int, byte)
 			}
-			`,
-		},
-		{
-			pkg: "r",
-			contents: `
-			package r
-
-			func f[T ~int]() {
-				var x T
-				print(x) //@ types(T)
+		}
+		func c[T ~[]byte](x T) {
+			for k, v := range x {
+				print(x, k, v) //@ types(T, int, byte)
 			}
-			`,
-		},
-		{
-			pkg: "s",
-			contents: `
-			package s
+		}
+		func d[T ~string](x T) {
+			for k, v := range x {
+				print(x, k, v) //@ types(T, int, rune)
+			}
+		}
+		func e[T ~map[int]string](x T) {
+			for k, v := range x {
+				print(x, k, v) //@ types(T, int, string)
+			}
+		}
+		func f[T ~chan string](x T) {
+			for v := range x {
+				print(x, v) //@ types(T, string)
+			}
+		}
 
-			func a[T ~[4]byte](x T) {
-				for k, v := range x {
-					print(x, k, v) //@ types(T, int, byte)
+		func From() {
+			type A [4]byte
+			print(a[A]) //@ types("func(x p03.A)")
+
+			type B *[4]byte
+			print(b[B]) //@ types("func(x p03.B)")
+
+			type C []byte
+			print(c[C]) //@ types("func(x p03.C)")
+
+			type D string
+			print(d[D]) //@ types("func(x p03.D)")
+
+			type E map[int]string
+			print(e[E]) //@ types("func(x p03.E)")
+
+			type F chan string
+			print(f[F]) //@ types("func(x p03.F)")
+		}
+		`,
+		`
+		package p05
+
+		func f[S any, T ~chan S](x T) {
+			for v := range x {
+				print(x, v) //@ types(T, S)
+			}
+		}
+
+		func From() {
+			type F chan string
+			print(f[string, F]) //@ types("func(x p05.F)")
+		}
+		`,
+		`
+		package p06
+
+		func fibonacci[T ~chan int](c, quit T) {
+			x, y := 0, 1
+			for {
+				select {
+				case c <- x:
+					x, y = y, x+y
+				case <-quit:
+					print(c, quit, x, y) //@ types(T, T, int, int)
+					return
 				}
 			}
-			func b[T ~*[4]byte](x T) {
-				for k, v := range x {
-					print(x, k, v) //@ types(T, int, byte)
+		}
+		func start[T ~chan int](c, quit T) {
+			go func() {
+				for i := 0; i < 10; i++ {
+					print(<-c) //@ types(int)
+				}
+				quit <- 0
+			}()
+		}
+		func From() {
+			type F chan int
+			c := make(F)
+			quit := make(F)
+			print(start[F], c, quit)     //@ types("func(c p06.F, quit p06.F)", "p06.F", "p06.F")
+			print(fibonacci[F], c, quit) //@ types("func(c p06.F, quit p06.F)", "p06.F", "p06.F")
+		}
+		`,
+		`
+		package p07
+
+		func f[T ~struct{ x int; y string }](i int) T {
+			u := []T{ T{0, "lorem"},  T{1, "ipsum"}}
+			return u[i]
+		}
+		func From() {
+			type S struct{ x int; y string }
+			print(f[S])     //@ types("func(i int) p07.S")
+		}
+		`,
+		`
+		package p08
+
+		func f[T ~[4]int8](x T, l, h int) []int8 {
+			return x[l:h]
+		}
+		func g[T ~*[4]int16](x T, l, h int) []int16 {
+			return x[l:h]
+		}
+		func h[T ~[]int32](x T, l, h int) T {
+			return x[l:h]
+		}
+		func From() {
+			type F [4]int8
+			type G *[4]int16
+			type H []int32
+			print(f[F](F{}, 0, 0))  //@ types("[]int8")
+			print(g[G](nil, 0, 0)) //@ types("[]int16")
+			print(h[H](nil, 0, 0)) //@ types("p08.H")
+		}
+		`,
+		`
+		package p09
+
+		func h[E any, T ~[]E](x T, l, h int) []E {
+			s := x[l:h]
+			print(s) //@ types("T")
+			return s
+		}
+		func From() {
+			type H []int32
+			print(h[int32, H](nil, 0, 0)) //@ types("[]int32")
+		}
+		`,
+		`
+		package p10
+
+		// Test "make" builtin with different forms on core types and
+		// when capacities are constants or variable.
+		func h[E any, T ~[]E](m, n int) {
+			print(make(T, 3))    //@ types(T)
+			print(make(T, 3, 5)) //@ types(T)
+			print(make(T, m))    //@ types(T)
+			print(make(T, m, n)) //@ types(T)
+		}
+		func i[K comparable, E any, T ~map[K]E](m int) {
+			print(make(T))    //@ types(T)
+			print(make(T, 5)) //@ types(T)
+			print(make(T, m)) //@ types(T)
+		}
+		func j[E any, T ~chan E](m int) {
+			print(make(T))    //@ types(T)
+			print(make(T, 6)) //@ types(T)
+			print(make(T, m)) //@ types(T)
+		}
+		func From() {
+			type H []int32
+			h[int32, H](3, 4)
+			type I map[int8]H
+			i[int8, H, I](5)
+			type J chan I
+			j[I, J](6)
+		}
+		`,
+		`
+		package p11
+
+		func h[T ~[4]int](x T) {
+			print(len(x), cap(x)) //@ types(int, int)
+		}
+		func i[T ~[4]byte | []int | ~chan uint8](x T) {
+			print(len(x), cap(x)) //@ types(int, int)
+		}
+		func j[T ~[4]int | any | map[string]int]() {
+			print(new(T)) //@ types("*T")
+		}
+		func k[T ~[4]int | any | map[string]int](x T) {
+			print(x) //@ types(T)
+			panic(x)
+		}
+		`,
+		`
+		package p12
+
+		func f[E any, F ~func() E](x F) {
+			print(x, x()) //@ types(F, E)
+		}
+		func From() {
+			type T func() int
+			f[int, T](func() int { return 0 })
+			f[int, func() int](func() int { return 1 })
+		}
+		`,
+		`
+		package p13
+
+		func f[E any, M ~map[string]E](m M) {
+			y, ok := m["lorem"]
+			print(m, y, ok) //@ types(M, E, bool)
+		}
+		func From() {
+			type O map[string][]int
+			f(O{"lorem": []int{0, 1, 2, 3}})
+		}
+		`,
+		`
+		package p14
+
+		func a[T interface{ []int64 | [5]int64 }](x T) int64 {
+			print(x, x[2], x[3]) //@ types(T, int64, int64)
+			x[2] = 5
+			return x[3]
+		}
+		func b[T interface{ []byte | string }](x T) byte {
+			print(x, x[3]) //@ types(T, byte)
+			return x[3]
+		}
+		func c[T interface{ []byte }](x T) byte {
+			print(x, x[2], x[3]) //@ types(T, byte, byte)
+			x[2] = 'b'
+			return x[3]
+		}
+		func d[T interface{ map[int]int64 }](x T) int64 {
+			print(x, x[2], x[3]) //@ types(T, int64, int64)
+			x[2] = 43
+			return x[3]
+		}
+		func e[T ~string](t T) {
+			print(t, t[0]) //@ types(T, uint8)
+		}
+		func f[T ~string|[]byte](t T) {
+			print(t, t[0]) //@ types(T, uint8)
+		}
+		func g[T []byte](t T) {
+			print(t, t[0]) //@ types(T, byte)
+		}
+		func h[T ~[4]int|[]int](t T) {
+			print(t, t[0]) //@ types(T, int)
+		}
+		func i[T ~[4]int|*[4]int|[]int](t T) {
+			print(t, t[0]) //@ types(T, int)
+		}
+		func j[T ~[4]int|*[4]int|[]int](t T) {
+			print(t, &t[0]) //@ types(T, "*int")
+		}
+		`,
+		`
+		package p15
+
+		type MyInt int
+		type Other int
+		type MyInterface interface{ foo() }
+
+		// ChangeType tests
+		func ct0(x int) { v := MyInt(x);  print(x, v) /*@ types(int, "p15.MyInt")*/ }
+		func ct1[T MyInt | Other, S int ](x S) { v := T(x);  print(x, v) /*@ types(S, T)*/ }
+		func ct2[T int, S MyInt | int ](x S) { v := T(x); print(x, v) /*@ types(S, T)*/ }
+		func ct3[T MyInt | Other, S MyInt | int ](x S) { v := T(x) ; print(x, v) /*@ types(S, T)*/ }
+
+		// Convert tests
+		func co0[T int | int8](x MyInt) { v := T(x); print(x, v) /*@ types("p15.MyInt", T)*/}
+		func co1[T int | int8](x T) { v := MyInt(x); print(x, v) /*@ types(T, "p15.MyInt")*/ }
+		func co2[S, T int | int8](x T) { v := S(x); print(x, v) /*@ types(T, S)*/ }
+
+		// MakeInterface tests
+		func mi0[T MyInterface](x T) { v := MyInterface(x); print(x, v) /*@ types(T, "p15.MyInterface")*/ }
+
+		// NewConst tests
+		func nc0[T any]() { v := (*T)(nil); print(v) /*@ types("*T")*/}
+
+		// SliceToArrayPointer
+		func sl0[T *[4]int | *[2]int](x []int) { v := T(x); print(x, v) /*@ types("[]int", T)*/ }
+		func sl1[T *[4]int | *[2]int, S []int](x S) { v := T(x); print(x, v) /*@ types(S, T)*/ }
+		`,
+		`
+		package p16
+
+		func c[T interface{ foo() string }](x T) {
+			print(x, x.foo, x.foo())  /*@ types(T, "func() string", string)*/
+		}
+		`,
+		`
+		package p17
+
+		func eq[T comparable](t T, i interface{}) bool {
+			return t == i
+		}
+		`,
+		// TODO(59983): investigate why writing g.c panics in (*FieldAddr).String.
+		`
+		package p18
+
+		type S struct{ f int }
+		func c[P *S]() []P { return []P{{f: 1}} }
+		`,
+		`
+		package p19
+
+		func sign[bytes []byte | string](s bytes) (bool, bool) {
+			neg := false
+			if len(s) > 0 && (s[0] == '-' || s[0] == '+') {
+				neg = s[0] == '-'
+				s = s[1:]
+			}
+			return !neg, len(s) > 0
+		}
+		`,
+		`package p20
+
+		func digits[bytes []byte | string](s bytes) bool {
+			for _, c := range []byte(s) {
+				if c < '0' || '9' < c {
+					return false
 				}
 			}
-			func c[T ~[]byte](x T) {
-				for k, v := range x {
-					print(x, k, v) //@ types(T, int, byte)
-				}
-			}
-			func d[T ~string](x T) {
-				for k, v := range x {
-					print(x, k, v) //@ types(T, int, rune)
-				}
-			}
-			func e[T ~map[int]string](x T) {
-				for k, v := range x {
-					print(x, k, v) //@ types(T, int, string)
-				}
-			}
-			func f[T ~chan string](x T) {
-				for v := range x {
-					print(x, v) //@ types(T, string)
-				}
-			}
+			return true
+		}
+		`,
+		`
+		package p21
 
-			func From() {
-				type A [4]byte
-				print(a[A]) //@ types("func(x s.A)")
+		type E interface{}
 
-				type B *[4]byte
-				print(b[B]) //@ types("func(x s.B)")
+		func Foo[T E, PT interface{ *T }]() T {
+			pt := PT(new(T))
+			x := *pt
+			print(x)  /*@ types(T)*/
+			return x
+		}
+		`,
+		`
+		package p22
 
-				type C []byte
-				print(c[C]) //@ types("func(x s.C)")
+		func f[M any, PM *M](p PM) {
+			var m M
+			*p = m
+			print(m)  /*@ types(M)*/
+			print(p)  /*@ types(PM)*/
+		}
+		`,
+		`
+		package p23
 
-				type D string
-				print(d[D]) //@ types("func(x s.D)")
+		type A struct{int}
+		func (*A) Marker() {}
 
-				type E map[int]string
-				print(e[E]) //@ types("func(x s.E)")
+		type B struct{string}
+		func (*B) Marker() {}
 
-				type F chan string
-				print(f[F]) //@ types("func(x s.F)")
-			}
-			`,
-		},
-		{
-			pkg: "t",
-			contents: `
-			package t
+		type C struct{float32}
+		func (*C) Marker() {}
 
-			func f[S any, T ~chan S](x T) {
-				for v := range x {
-					print(x, v) //@ types(T, S)
-				}
-			}
+		func process[T interface {
+			*A
+			*B
+			*C
+			Marker()
+		}](v T) {
+			v.Marker()
+			a := *(any(v).(*A)); print(a)  /*@ types("p23.A")*/
+			b := *(any(v).(*B)); print(b)  /*@ types("p23.B")*/
+			c := *(any(v).(*C)); print(c)  /*@ types("p23.C")*/
+		}
+		`,
+		`
+		package p24
 
-			func From() {
-				type F chan string
-				print(f[string, F]) //@ types("func(x t.F)")
-			}
-			`,
-		},
-		{
-			pkg: "u",
-			contents: `
-			package u
+		func a[T any](f func() [4]T) {
+			x := len(f())
+			print(x) /*@ types("int")*/
+		}
 
-			func fibonacci[T ~chan int](c, quit T) {
-				x, y := 0, 1
-				for {
-					select {
-					case c <- x:
-						x, y = y, x+y
-					case <-quit:
-						print(c, quit, x, y) //@ types(T, T, int, int)
-						return
-					}
-				}
-			}
-			func start[T ~chan int](c, quit T) {
-				go func() {
-					for i := 0; i < 10; i++ {
-						print(<-c) //@ types(int)
-					}
-					quit <- 0
-				}()
-			}
-			func From() {
-				type F chan int
-				c := make(F)
-				quit := make(F)
-				print(start[F], c, quit)     //@ types("func(c u.F, quit u.F)", "u.F", "u.F")
-				print(fibonacci[F], c, quit) //@ types("func(c u.F, quit u.F)", "u.F", "u.F")
-			}
-			`,
-		},
-		{
-			pkg: "v",
-			contents: `
-			package v
+		func b[T [4]any](f func() T) {
+			x := len(f())
+			print(x) /*@ types("int")*/
+		}
 
-			func f[T ~struct{ x int; y string }](i int) T {
-				u := []T{ T{0, "lorem"},  T{1, "ipsum"}}
-				return u[i]
-			}
-			func From() {
-				type S struct{ x int; y string }
-				print(f[S])     //@ types("func(i int) v.S")
-			}
-			`,
-		},
-		{
-			pkg: "w",
-			contents: `
-			package w
+		func c[T any](f func() *[4]T) {
+			x := len(f())
+			print(x) /*@ types("int")*/
+		}
 
-			func f[T ~[4]int8](x T, l, h int) []int8 {
-				return x[l:h]
-			}
-			func g[T ~*[4]int16](x T, l, h int) []int16 {
-				return x[l:h]
-			}
-			func h[T ~[]int32](x T, l, h int) T {
-				return x[l:h]
-			}
-			func From() {
-				type F [4]int8
-				type G *[4]int16
-				type H []int32
-				print(f[F](F{}, 0, 0))  //@ types("[]int8")
-				print(g[G](nil, 0, 0)) //@ types("[]int16")
-				print(h[H](nil, 0, 0)) //@ types("w.H")
-			}
-			`,
-		},
-		{
-			pkg: "x",
-			contents: `
-			package x
+		func d[T *[4]any](f func() T) {
+			x := len(f())
+			print(x) /*@ types("int")*/
+		}
+		`,
+		`
+		package p25
 
-			func h[E any, T ~[]E](x T, l, h int) []E {
-				s := x[l:h]
-				print(s) //@ types("T")
-				return s
+		func a[T any]() {
+			var f func() [4]T
+			for i, v := range f() {
+				print(i, v) /*@ types("int", "T")*/
 			}
-			func From() {
-				type H []int32
-				print(h[int32, H](nil, 0, 0)) //@ types("[]int32")
-			}
-			`,
-		},
-		{
-			pkg: "y",
-			contents: `
-			package y
+		}
 
-			// Test "make" builtin with different forms on core types and
-			// when capacities are constants or variable.
-			func h[E any, T ~[]E](m, n int) {
-				print(make(T, 3))    //@ types(T)
-				print(make(T, 3, 5)) //@ types(T)
-				print(make(T, m))    //@ types(T)
-				print(make(T, m, n)) //@ types(T)
+		func b[T [4]any](f func() T) {
+			for i, v := range f() {
+				print(i, v) /*@ types("int", "any")*/
 			}
-			func i[K comparable, E any, T ~map[K]E](m int) {
-				print(make(T))    //@ types(T)
-				print(make(T, 5)) //@ types(T)
-				print(make(T, m)) //@ types(T)
-			}
-			func j[E any, T ~chan E](m int) {
-				print(make(T))    //@ types(T)
-				print(make(T, 6)) //@ types(T)
-				print(make(T, m)) //@ types(T)
-			}
-			func From() {
-				type H []int32
-				h[int32, H](3, 4)
-				type I map[int8]H
-				i[int8, H, I](5)
-				type J chan I
-				j[I, J](6)
-			}
-			`,
-		},
-		{
-			pkg: "z",
-			contents: `
-			package z
+		}
 
-			func h[T ~[4]int](x T) {
-				print(len(x), cap(x)) //@ types(int, int)
+		func c[T any](f func() *[4]T) {
+			for i, v := range f() {
+				print(i, v) /*@ types("int", "T")*/
 			}
-			func i[T ~[4]byte | []int | ~chan uint8](x T) {
-				print(len(x), cap(x)) //@ types(int, int)
-			}
-			func j[T ~[4]int | any | map[string]int]() {
-				print(new(T)) //@ types("*T")
-			}
-			func k[T ~[4]int | any | map[string]int](x T) {
-				print(x) //@ types(T)
-				panic(x)
-			}
-			`,
-		},
-		{
-			pkg: "a",
-			contents: `
-			package a
+		}
 
-			func f[E any, F ~func() E](x F) {
-				print(x, x()) //@ types(F, E)
+		func d[T *[4]any](f func() T) {
+			for i, v := range f() {
+				print(i, v) /*@ types("int", "any")*/
 			}
-			func From() {
-				type T func() int
-				f[int, T](func() int { return 0 })
-				f[int, func() int](func() int { return 1 })
-			}
-			`,
-		},
-		{
-			pkg: "b",
-			contents: `
-			package b
-
-			func f[E any, M ~map[string]E](m M) {
-				y, ok := m["lorem"]
-				print(m, y, ok) //@ types(M, E, bool)
-			}
-			func From() {
-				type O map[string][]int
-				f(O{"lorem": []int{0, 1, 2, 3}})
-			}
-			`,
-		},
-		{
-			pkg: "c",
-			contents: `
-			package c
-
-			func a[T interface{ []int64 | [5]int64 }](x T) int64 {
-				print(x, x[2], x[3]) //@ types(T, int64, int64)
-				x[2] = 5
-				return x[3]
-			}
-			func b[T interface{ []byte | string }](x T) byte {
-				print(x, x[3]) //@ types(T, byte)
-		        return x[3]
-			}
-			func c[T interface{ []byte }](x T) byte {
-				print(x, x[2], x[3]) //@ types(T, byte, byte)
-				x[2] = 'b'
-				return x[3]
-			}
-			func d[T interface{ map[int]int64 }](x T) int64 {
-				print(x, x[2], x[3]) //@ types(T, int64, int64)
-				x[2] = 43
-        		return x[3]
-			}
-			func e[T ~string](t T) {
-				print(t, t[0]) //@ types(T, uint8)
-			}
-			func f[T ~string|[]byte](t T) {
-				print(t, t[0]) //@ types(T, uint8)
-			}
-			func g[T []byte](t T) {
-				print(t, t[0]) //@ types(T, byte)
-			}
-			func h[T ~[4]int|[]int](t T) {
-				print(t, t[0]) //@ types(T, int)
-			}
-			func i[T ~[4]int|*[4]int|[]int](t T) {
-				print(t, t[0]) //@ types(T, int)
-			}
-			func j[T ~[4]int|*[4]int|[]int](t T) {
-				print(t, &t[0]) //@ types(T, "*int")
-			}
-			`,
-		},
-		{
-			pkg: "d",
-			contents: `
-			package d
-
-			type MyInt int
-			type Other int
-			type MyInterface interface{ foo() }
-
-			// ChangeType tests
-			func ct0(x int) { v := MyInt(x);  print(x, v) /*@ types(int, "d.MyInt")*/ }
-			func ct1[T MyInt | Other, S int ](x S) { v := T(x);  print(x, v) /*@ types(S, T)*/ }
-			func ct2[T int, S MyInt | int ](x S) { v := T(x); print(x, v) /*@ types(S, T)*/ }
-			func ct3[T MyInt | Other, S MyInt | int ](x S) { v := T(x) ; print(x, v) /*@ types(S, T)*/ }
-
-			// Convert tests
-			func co0[T int | int8](x MyInt) { v := T(x); print(x, v) /*@ types("d.MyInt", T)*/}
-			func co1[T int | int8](x T) { v := MyInt(x); print(x, v) /*@ types(T, "d.MyInt")*/ }
-			func co2[S, T int | int8](x T) { v := S(x); print(x, v) /*@ types(T, S)*/ }
-
-			// MakeInterface tests
-			func mi0[T MyInterface](x T) { v := MyInterface(x); print(x, v) /*@ types(T, "d.MyInterface")*/ }
-
-			// NewConst tests
-			func nc0[T any]() { v := (*T)(nil); print(v) /*@ types("*T")*/}
-
-			// SliceToArrayPointer
-			func sl0[T *[4]int | *[2]int](x []int) { v := T(x); print(x, v) /*@ types("[]int", T)*/ }
-			func sl1[T *[4]int | *[2]int, S []int](x S) { v := T(x); print(x, v) /*@ types(S, T)*/ }
-			`,
-		},
-		{
-			pkg: "e",
-			contents: `
-			package e
-
-			func c[T interface{ foo() string }](x T) {
-				print(x, x.foo, x.foo())  /*@ types(T, "func() string", string)*/
-			}
-			`,
-		},
-		{
-			pkg: "f",
-			contents: `package f
-
-			func eq[T comparable](t T, i interface{}) bool {
-				return t == i
-			}
-			`,
-		},
-		{
-			pkg: "g",
-			contents: `package g
-			type S struct{ f int }
-			func c[P *S]() []P { return []P{{f: 1}} }
-			`,
-		},
-		{
-			pkg: "h",
-			contents: `package h
-			func sign[bytes []byte | string](s bytes) (bool, bool) {
-				neg := false
-				if len(s) > 0 && (s[0] == '-' || s[0] == '+') {
-					neg = s[0] == '-'
-					s = s[1:]
-				}
-				return !neg, len(s) > 0
-			}`,
-		},
-		{
-			pkg: "i",
-			contents: `package i
-			func digits[bytes []byte | string](s bytes) bool {
-				for _, c := range []byte(s) {
-					if c < '0' || '9' < c {
-						return false
-					}
-				}
-				return true
-			}`,
-		},
-		{
-			pkg: "j",
-			contents: `
-			package j
-
-			type E interface{}
-
-			func Foo[T E, PT interface{ *T }]() T {
-				pt := PT(new(T))
-				x := *pt
-				print(x)  /*@ types(T)*/
-				return x
-			}
-			`,
-		},
+		}
+		`,
 	} {
-		test := test
-		t.Run(test.pkg, func(t *testing.T) {
+		contents := contents
+		pkgname := packageName(t, contents)
+		t.Run(pkgname, func(t *testing.T) {
 			// Parse
 			conf := loader.Config{ParserMode: parser.ParseComments}
-			fname := test.pkg + ".go"
-			f, err := conf.ParseFile(fname, test.contents)
+			f, err := conf.ParseFile("file.go", contents)
 			if err != nil {
 				t.Fatalf("parse: %v", err)
 			}
-			conf.CreateFromFiles(test.pkg, f)
+			conf.CreateFromFiles(pkgname, f)
 
 			// Load
 			lprog, err := conf.Load()
@@ -483,11 +512,11 @@
 					prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
 				}
 			}
-			p := prog.Package(lprog.Package(test.pkg).Pkg)
+			p := prog.Package(lprog.Package(pkgname).Pkg)
 			p.Build()
 
 			// Collect calls to the builtin print function.
-			probes := make(map[*ssa.CallCommon]bool)
+			probes := make(map[*ssa.CallCommon]*ssa.Function)
 			for _, mem := range p.Members {
 				if fn, ok := mem.(*ssa.Function); ok {
 					for _, bb := range fn.Blocks {
@@ -495,7 +524,7 @@
 							if i, ok := i.(ssa.CallInstruction); ok {
 								call := i.Common()
 								if b, ok := call.Value.(*ssa.Builtin); ok && b.Name() == "print" {
-									probes[i.Common()] = true
+									probes[i.Common()] = fn
 								}
 							}
 						}
@@ -539,6 +568,7 @@
 				}
 				if got, want := fmt.Sprint(args), fmt.Sprint(note.Args); got != want {
 					t.Errorf("Arguments to print() were expected to be %q. got %q", want, got)
+					logFunction(t, probes[call])
 				}
 			}
 		})
@@ -563,33 +593,103 @@
 	const contents = `
 	package p
 
-	//@ instrs("f", "*ssa.TypeAssert")
-	//@ instrs("f", "*ssa.Call", "print(nil:interface{}, 0:int)")
-	func f(x int) { // non-generic smoke test.
+	//@ instrs("f0", "*ssa.TypeAssert")
+	//@ instrs("f0", "*ssa.Call", "print(nil:interface{}, 0:int)")
+	func f0(x int) { // non-generic smoke test.
 		var i interface{}
 		print(i, 0)
 	}
 
-	//@ instrs("h", "*ssa.Alloc", "local T (u)")
-	//@ instrs("h", "*ssa.FieldAddr", "&t0.x [#0]")
-	func h[T ~struct{ x string }]() T {
+	//@ instrs("f1", "*ssa.Alloc", "local T (u)")
+	//@ instrs("f1", "*ssa.FieldAddr", "&t0.x [#0]")
+	func f1[T ~struct{ x string }]() T {
 		u := T{"lorem"}
 		return u
 	}
 
-	//@ instrs("c", "*ssa.TypeAssert", "typeassert t0.(interface{})")
-	//@ instrs("c", "*ssa.Call", "invoke x.foo()")
-	func c[T interface{ foo() string }](x T) {
+	//@ instrs("f1b", "*ssa.Alloc", "new T (complit)")
+	//@ instrs("f1b", "*ssa.FieldAddr", "&t0.x [#0]")
+	func f1b[T ~struct{ x string }]() *T {
+		u := &T{"lorem"}
+		return u
+	}
+
+	//@ instrs("f2", "*ssa.TypeAssert", "typeassert t0.(interface{})")
+	//@ instrs("f2", "*ssa.Call", "invoke x.foo()")
+	func f2[T interface{ foo() string }](x T) {
 		_ = x.foo
 		_ = x.foo()
 	}
 
-	//@ instrs("d", "*ssa.TypeAssert", "typeassert t0.(interface{})")
-	//@ instrs("d", "*ssa.Call", "invoke x.foo()")
-	func d[T interface{ foo() string; comparable }](x T) {
+	//@ instrs("f3", "*ssa.TypeAssert", "typeassert t0.(interface{})")
+	//@ instrs("f3", "*ssa.Call", "invoke x.foo()")
+	func f3[T interface{ foo() string; comparable }](x T) {
 		_ = x.foo
 		_ = x.foo()
 	}
+
+	//@ instrs("f4", "*ssa.BinOp", "t1 + 1:int", "t2 < 4:int")
+	//@ instrs("f4", "*ssa.Call", "f()", "print(t2, t4)")
+	func f4[T [4]string](f func() T) {
+		for i, v := range f() {
+			print(i, v)
+		}
+	}
+
+	//@ instrs("f5", "*ssa.Call", "nil:func()()")
+	func f5() {
+		var f func()
+		f()
+	}
+
+	type S struct{ f int }
+
+	//@ instrs("f6", "*ssa.Alloc", "new [1]P (slicelit)", "new S (complit)")
+	//@ instrs("f6", "*ssa.IndexAddr", "&t0[0:int]")
+	//@ instrs("f6", "*ssa.FieldAddr", "&t2.f [#0]")
+	func f6[P *S]() []P { return []P{{f: 1}} }
+
+	//@ instrs("f7", "*ssa.Alloc", "local S (complit)")
+	//@ instrs("f7", "*ssa.FieldAddr", "&t0.f [#0]")
+	func f7[T any, S struct{f T}](x T) S { return S{f: x} }
+
+	//@ instrs("f8", "*ssa.Alloc", "new [1]P (slicelit)", "new struct{f T} (complit)")
+	//@ instrs("f8", "*ssa.IndexAddr", "&t0[0:int]")
+	//@ instrs("f8", "*ssa.FieldAddr", "&t2.f [#0]")
+	func f8[T any, P *struct{f T}](x T) []P { return []P{{f: x}} }
+
+	//@ instrs("f9", "*ssa.Alloc", "new [1]PS (slicelit)", "new S (complit)")
+	//@ instrs("f9", "*ssa.IndexAddr", "&t0[0:int]")
+	//@ instrs("f9", "*ssa.FieldAddr", "&t2.f [#0]")
+	func f9[T any, S struct{f T}, PS *S](x T) {
+		_ = []PS{{f: x}}
+	}
+
+	//@ instrs("f10", "*ssa.FieldAddr", "&t0.x [#0]")
+	//@ instrs("f10", "*ssa.Store", "*t0 = *new(T):T", "*t1 = 4:int")
+	func f10[T ~struct{ x, y int }]() T {
+		var u T
+		u = T{x: 4}
+		return u
+	}
+
+	//@ instrs("f11", "*ssa.FieldAddr", "&t1.y [#1]")
+	//@ instrs("f11", "*ssa.Store", "*t1 = *new(T):T", "*t2 = 5:int")
+	func f11[T ~struct{ x, y int }, PT *T]() PT {
+		var u PT = new(T)
+		*u = T{y: 5}
+		return u
+	}
+
+	//@ instrs("f12", "*ssa.Alloc", "new struct{f T} (complit)")
+	//@ instrs("f12", "*ssa.MakeMap", "make map[P]bool 1:int")
+	func f12[T any, P *struct{f T}](x T) map[P]bool { return map[P]bool{{}: true} }
+
+	//@ instrs("f13", "&v[0:int]")
+	//@ instrs("f13", "*ssa.Store", "*t0 = 7:int", "*v = *new(A):A")
+	func f13[A [3]int, PA *A](v PA) {
+		*v = A{7}
+	}
 	`
 
 	// Parse
@@ -666,7 +766,8 @@
 
 	// Check each expectation.
 	for key, value := range expectations {
-		if _, ok := p.Members[key.function]; !ok {
+		fn, ok := p.Members[key.function].(*ssa.Function)
+		if !ok {
 			t.Errorf("Expectation on %s does not match a member in %s", key.function, p.Pkg.Name())
 		}
 		got, want := value.matches, value.wants
@@ -674,6 +775,24 @@
 		sort.Strings(want)
 		if !reflect.DeepEqual(want, got) {
 			t.Errorf("Within %s wanted instructions of kind %s: %q. got %q", key.function, key.kind, want, got)
+			logFunction(t, fn)
 		}
 	}
 }
+
+// packageName is a test helper to extract the package name from a string
+// containing the content of a go file.
+func packageName(t testing.TB, content string) string {
+	f, err := parser.ParseFile(token.NewFileSet(), "", content, parser.PackageClauseOnly)
+	if err != nil {
+		t.Fatalf("parsing the file %q failed with error: %s", content, err)
+	}
+	return f.Name.Name
+}
+
+func logFunction(t testing.TB, fn *ssa.Function) {
+	// TODO: Consider adding a ssa.Function.GoString() so this can be logged to t via '%#v'.
+	var buf bytes.Buffer
+	ssa.WriteFunction(&buf, fn)
+	t.Log(buf.String())
+}
diff --git a/go/ssa/builder_test.go b/go/ssa/builder_test.go
index b3bb09c..06a8ee6 100644
--- a/go/ssa/builder_test.go
+++ b/go/ssa/builder_test.go
@@ -1005,3 +1005,77 @@
 		t.Error("Failed to find any Named to struct types")
 	}
 }
+
+// TestSyntax ensures that a function's Syntax is available when
+// debug info is enabled.
+func TestSyntax(t *testing.T) {
+	if !typeparams.Enabled {
+		t.Skip("TestSyntax uses type parameters.")
+	}
+
+	const input = `package p
+
+	type P int
+	func (x *P) g() *P { return x }
+
+	func F[T ~int]() *T {
+		type S1 *T
+		type S2 *T
+		type S3 *T
+		f1 := func() S1 {
+			f2 := func() S2 {
+				return S2(nil)
+			}
+			return S1(f2())
+		}
+		f3 := func() S3 {
+			return S3(f1())
+		}
+		return (*T)(f3())
+	}
+	var _ = F[int]
+	`
+
+	// Parse
+	var conf loader.Config
+	f, err := conf.ParseFile("<input>", input)
+	if err != nil {
+		t.Fatalf("parse: %v", err)
+	}
+	conf.CreateFromFiles("p", f)
+
+	// Load
+	lprog, err := conf.Load()
+	if err != nil {
+		t.Fatalf("Load: %v", err)
+	}
+
+	// Create and build SSA
+	prog := ssautil.CreateProgram(lprog, ssa.GlobalDebug|ssa.InstantiateGenerics)
+	prog.Build()
+
+	// Collect syntax information for all of the functions.
+	got := make(map[string]string)
+	for fn := range ssautil.AllFunctions(prog) {
+		if fn.Name() == "init" {
+			continue
+		}
+		syntax := fn.Syntax()
+		got[fn.Name()] = fmt.Sprintf("%T : %s @ %d", syntax, fn.Signature, prog.Fset.Position(syntax.Pos()).Line)
+	}
+
+	want := map[string]string{
+		"g":          "*ast.FuncDecl : func() *p.P @ 4",
+		"F":          "*ast.FuncDecl : func[T ~int]() *T @ 6",
+		"F$1":        "*ast.FuncLit : func() p.S1 @ 10",
+		"F$1$1":      "*ast.FuncLit : func() p.S2 @ 11",
+		"F$2":        "*ast.FuncLit : func() p.S3 @ 16",
+		"F[int]":     "*ast.FuncDecl : func() *int @ 6",
+		"F[int]$1":   "*ast.FuncLit : func() p.S1 @ 10",
+		"F[int]$1$1": "*ast.FuncLit : func() p.S2 @ 11",
+		"F[int]$2":   "*ast.FuncLit : func() p.S3 @ 16",
+	}
+	if !reflect.DeepEqual(got, want) {
+		t.Errorf("Expected the functions with signature to be:\n\t%#v.\n Got:\n\t%#v", want, got)
+	}
+}
diff --git a/go/ssa/emit.go b/go/ssa/emit.go
index 4ba049d..80e30b6 100644
--- a/go/ssa/emit.go
+++ b/go/ssa/emit.go
@@ -11,8 +11,6 @@
 	"go/ast"
 	"go/token"
 	"go/types"
-
-	"golang.org/x/tools/internal/typeparams"
 )
 
 // emitNew emits to f a new (heap Alloc) instruction allocating an
@@ -29,7 +27,7 @@
 // new temporary, and returns the value so defined.
 func emitLoad(f *Function, addr Value) *UnOp {
 	v := &UnOp{Op: token.MUL, X: addr}
-	v.setType(deref(typeparams.CoreType(addr.Type())))
+	v.setType(mustDeref(addr.Type()))
 	f.emit(v)
 	return v
 }
@@ -372,9 +370,10 @@
 // emitStore emits to f an instruction to store value val at location
 // addr, applying implicit conversions as required by assignability rules.
 func emitStore(f *Function, addr, val Value, pos token.Pos) *Store {
+	typ := mustDeref(addr.Type())
 	s := &Store{
 		Addr: addr,
-		Val:  emitConv(f, val, deref(addr.Type())),
+		Val:  emitConv(f, val, typ),
 		pos:  pos,
 	}
 	f.emit(s)
@@ -477,9 +476,8 @@
 // value of a field.
 func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) Value {
 	for _, index := range indices {
-		fld := typeparams.CoreType(deref(v.Type())).(*types.Struct).Field(index)
-
-		if isPointer(v.Type()) {
+		if st, vptr := deptr(v.Type()); vptr {
+			fld := fieldOf(st, index)
 			instr := &FieldAddr{
 				X:     v,
 				Field: index,
@@ -488,10 +486,11 @@
 			instr.setType(types.NewPointer(fld.Type()))
 			v = f.emit(instr)
 			// Load the field's value iff indirectly embedded.
-			if isPointer(fld.Type()) {
+			if _, fldptr := deptr(fld.Type()); fldptr {
 				v = emitLoad(f, v)
 			}
 		} else {
+			fld := fieldOf(v.Type(), index)
 			instr := &Field{
 				X:     v,
 				Field: index,
@@ -511,8 +510,8 @@
 // field's value.
 // Ident id is used for position and debug info.
 func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value {
-	fld := typeparams.CoreType(deref(v.Type())).(*types.Struct).Field(index)
-	if isPointer(v.Type()) {
+	if st, vptr := deptr(v.Type()); vptr {
+		fld := fieldOf(st, index)
 		instr := &FieldAddr{
 			X:     v,
 			Field: index,
@@ -525,6 +524,7 @@
 			v = emitLoad(f, v)
 		}
 	} else {
+		fld := fieldOf(v.Type(), index)
 		instr := &Field{
 			X:     v,
 			Field: index,
diff --git a/go/ssa/func.go b/go/ssa/func.go
index 57f5f71..60cf53f 100644
--- a/go/ssa/func.go
+++ b/go/ssa/func.go
@@ -382,7 +382,9 @@
 
 // debugInfo reports whether debug info is wanted for this function.
 func (f *Function) debugInfo() bool {
-	return f.Pkg != nil && f.Pkg.debug
+	// debug info for instantiations follows the debug info of their origin.
+	p := f.declaredPackage()
+	return p != nil && p.debug
 }
 
 // addNamedLocal creates a local variable, adds it to function f and
@@ -594,7 +596,7 @@
 	if len(f.Locals) > 0 {
 		buf.WriteString("# Locals:\n")
 		for i, l := range f.Locals {
-			fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from))
+			fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(mustDeref(l.Type()), from))
 		}
 	}
 	writeSignature(buf, from, f.Name(), f.Signature, f.Params)
diff --git a/go/ssa/interp/external.go b/go/ssa/interp/external.go
index 51b3be0..7a79924 100644
--- a/go/ssa/interp/external.go
+++ b/go/ssa/interp/external.go
@@ -312,10 +312,6 @@
 	switch name {
 	case "GOSSAINTERP":
 		return "1"
-	case "GOARCH":
-		return "amd64"
-	case "GOOS":
-		return "linux"
 	}
 	return os.Getenv(name)
 }
diff --git a/go/ssa/interp/interp.go b/go/ssa/interp/interp.go
index 58cac46..79363f5 100644
--- a/go/ssa/interp/interp.go
+++ b/go/ssa/interp/interp.go
@@ -635,15 +635,6 @@
 	return iface{}
 }
 
-// setGlobal sets the value of a system-initialized global variable.
-func setGlobal(i *interpreter, pkg *ssa.Package, name string, v value) {
-	if g, ok := i.globals[pkg.Var(name)]; ok {
-		*g = v
-		return
-	}
-	panic("no global variable: " + pkg.Pkg.Path() + "." + name)
-}
-
 // Interpret interprets the Go program whose main package is mainpkg.
 // mode specifies various interpreter options.  filename and args are
 // the initial values of os.Args for the target program.  sizes is the
diff --git a/go/ssa/interp/interp_test.go b/go/ssa/interp/interp_test.go
index 70ddcee..64ede78 100644
--- a/go/ssa/interp/interp_test.go
+++ b/go/ssa/interp/interp_test.go
@@ -23,9 +23,11 @@
 	"log"
 	"os"
 	"path/filepath"
+	"runtime"
 	"strings"
 	"testing"
 	"time"
+	"unsafe"
 
 	"golang.org/x/tools/go/loader"
 	"golang.org/x/tools/go/ssa"
@@ -136,15 +138,14 @@
 		testdataTests = append(testdataTests, "typeassert.go")
 		testdataTests = append(testdataTests, "zeros.go")
 	}
+
+	// GOROOT/test used to assume that GOOS and GOARCH were explicitly set in the
+	// environment, so do that here for TestGorootTest.
+	os.Setenv("GOOS", runtime.GOOS)
+	os.Setenv("GOARCH", runtime.GOARCH)
 }
 
-// Specific GOARCH to use for a test case in go.tools/go/ssa/interp/testdata/.
-// Defaults to amd64 otherwise.
-var testdataArchs = map[string]string{
-	"width32.go": "386",
-}
-
-func run(t *testing.T, input string) bool {
+func run(t *testing.T, input string, goroot string) {
 	// The recover2 test case is broken on Go 1.14+. See golang/go#34089.
 	// TODO(matloob): Fix this.
 	if filepath.Base(input) == "recover2.go" {
@@ -155,18 +156,17 @@
 
 	start := time.Now()
 
-	ctx := build.Default    // copy
-	ctx.GOROOT = "testdata" // fake goroot
-	ctx.GOOS = "linux"
-	ctx.GOARCH = "amd64"
-	if arch, ok := testdataArchs[filepath.Base(input)]; ok {
-		ctx.GOARCH = arch
+	ctx := build.Default // copy
+	ctx.GOROOT = goroot
+	ctx.GOOS = runtime.GOOS
+	ctx.GOARCH = runtime.GOARCH
+	if filepath.Base(input) == "width32.go" && unsafe.Sizeof(int(0)) > 4 {
+		t.Skipf("skipping: width32.go checks behavior for a 32-bit int")
 	}
 
 	conf := loader.Config{Build: &ctx}
 	if _, err := conf.FromArgs([]string{input}, true); err != nil {
-		t.Errorf("FromArgs(%s) failed: %s", input, err)
-		return false
+		t.Fatalf("FromArgs(%s) failed: %s", input, err)
 	}
 
 	conf.Import("runtime")
@@ -188,8 +188,7 @@
 
 	iprog, err := conf.Load()
 	if err != nil {
-		t.Errorf("conf.Load(%s) failed: %s", input, err)
-		return false
+		t.Fatalf("conf.Load(%s) failed: %s", input, err)
 	}
 
 	bmode := ssa.InstantiateGenerics | ssa.SanityCheckFunctions
@@ -205,6 +204,9 @@
 	interp.CapturedOutput = new(bytes.Buffer)
 
 	sizes := types.SizesFor("gc", ctx.GOARCH)
+	if sizes.Sizeof(types.Typ[types.Int]) < 4 {
+		panic("bogus SizesFor")
+	}
 	hint = fmt.Sprintf("To trace execution, run:\n%% go build golang.org/x/tools/cmd/ssadump && ./ssadump -build=C -test -run --interp=T %s\n", input)
 	var imode interp.Mode // default mode
 	// imode |= interp.DisableRecover // enable for debugging
@@ -223,44 +225,76 @@
 	if false {
 		t.Log(input, time.Since(start)) // test profiling
 	}
-
-	return true
 }
 
-func printFailures(failures []string) {
-	if failures != nil {
-		fmt.Println("The following tests failed:")
-		for _, f := range failures {
-			fmt.Printf("\t%s\n", f)
+// makeGoroot copies testdata/src into the "src" directory of a temporary
+// location to mimic GOROOT/src, and adds a file "runtime/consts.go" containing
+// declarations for GOOS and GOARCH that match the GOOS and GOARCH of this test.
+//
+// It returns the directory that should be used for GOROOT.
+func makeGoroot(t *testing.T) string {
+	goroot := t.TempDir()
+	src := filepath.Join(goroot, "src")
+
+	err := filepath.Walk("testdata/src", func(path string, info os.FileInfo, err error) error {
+		if err != nil {
+			return err
 		}
+
+		rel, err := filepath.Rel("testdata/src", path)
+		if err != nil {
+			return err
+		}
+		targ := filepath.Join(src, rel)
+
+		if info.IsDir() {
+			return os.Mkdir(targ, info.Mode().Perm()|0700)
+		}
+
+		b, err := os.ReadFile(path)
+		if err != nil {
+			return err
+		}
+		return os.WriteFile(targ, b, info.Mode().Perm())
+	})
+	if err != nil {
+		t.Fatal(err)
 	}
+
+	constsGo := fmt.Sprintf(`package runtime
+const GOOS = %q
+const GOARCH = %q
+`, runtime.GOOS, runtime.GOARCH)
+	err = os.WriteFile(filepath.Join(src, "runtime/consts.go"), []byte(constsGo), 0644)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	return goroot
 }
 
 // TestTestdataFiles runs the interpreter on testdata/*.go.
 func TestTestdataFiles(t *testing.T) {
+	goroot := makeGoroot(t)
 	cwd, err := os.Getwd()
 	if err != nil {
 		log.Fatal(err)
 	}
-	var failures []string
 	for _, input := range testdataTests {
-		if !run(t, filepath.Join(cwd, "testdata", input)) {
-			failures = append(failures, input)
-		}
+		t.Run(input, func(t *testing.T) {
+			run(t, filepath.Join(cwd, "testdata", input), goroot)
+		})
 	}
-	printFailures(failures)
 }
 
 // TestGorootTest runs the interpreter on $GOROOT/test/*.go.
 func TestGorootTest(t *testing.T) {
-	var failures []string
-
+	goroot := makeGoroot(t)
 	for _, input := range gorootTestTests {
-		if !run(t, filepath.Join(build.Default.GOROOT, "test", input)) {
-			failures = append(failures, input)
-		}
+		t.Run(input, func(t *testing.T) {
+			run(t, filepath.Join(build.Default.GOROOT, "test", input), goroot)
+		})
 	}
-	printFailures(failures)
 }
 
 // TestTypeparamTest runs the interpreter on runnable examples
@@ -270,23 +304,23 @@
 	if !typeparams.Enabled {
 		return
 	}
+	goroot := makeGoroot(t)
 
 	// Skip known failures for the given reason.
 	// TODO(taking): Address these.
 	skip := map[string]string{
-		"chans.go":       "interp tests do not support runtime.SetFinalizer",
-		"issue23536.go":  "unknown reason",
-		"issue376214.go": "unknown issue with variadic cast on bytes",
-		"issue48042.go":  "interp tests do not handle reflect.Value.SetInt",
-		"issue47716.go":  "interp tests do not handle unsafe.Sizeof",
-		"issue50419.go":  "interp tests do not handle dispatch to String() correctly",
-		"issue51733.go":  "interp does not handle unsafe casts",
-		"ordered.go":     "math.NaN() comparisons not being handled correctly",
-		"orderedmap.go":  "interp tests do not support runtime.SetFinalizer",
-		"stringer.go":    "unknown reason",
-		"issue48317.go":  "interp tests do not support encoding/json",
-		"issue48318.go":  "interp tests do not support encoding/json",
-		"issue58513.go":  "interp tests do not support runtime.Caller",
+		"chans.go":      "interp tests do not support runtime.SetFinalizer",
+		"issue23536.go": "unknown reason",
+		"issue48042.go": "interp tests do not handle reflect.Value.SetInt",
+		"issue47716.go": "interp tests do not handle unsafe.Sizeof",
+		"issue50419.go": "interp tests do not handle dispatch to String() correctly",
+		"issue51733.go": "interp does not handle unsafe casts",
+		"ordered.go":    "math.NaN() comparisons not being handled correctly",
+		"orderedmap.go": "interp tests do not support runtime.SetFinalizer",
+		"stringer.go":   "unknown reason",
+		"issue48317.go": "interp tests do not support encoding/json",
+		"issue48318.go": "interp tests do not support encoding/json",
+		"issue58513.go": "interp tests do not support runtime.Caller",
 	}
 	// Collect all of the .go files in dir that are runnable.
 	dir := filepath.Join(build.Default.GOROOT, "test", "typeparam")
@@ -294,34 +328,28 @@
 	if err != nil {
 		t.Fatal(err)
 	}
-	var inputs []string
 	for _, entry := range list {
 		if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") {
 			continue // Consider standalone go files.
 		}
-		if reason := skip[entry.Name()]; reason != "" {
-			t.Logf("skipping %q due to %s.", entry.Name(), reason)
-			continue
-		}
-		input := filepath.Join(dir, entry.Name())
-		src, err := os.ReadFile(input)
-		if err != nil {
-			t.Fatal(err)
-		}
-		// Only build test files that can be compiled, or compiled and run.
-		if bytes.HasPrefix(src, []byte("// run")) && !bytes.HasPrefix(src, []byte("// rundir")) {
-			inputs = append(inputs, input)
-		} else {
-			t.Logf("Not a `// run` file: %s", entry.Name())
-		}
-	}
+		t.Run(entry.Name(), func(t *testing.T) {
+			input := filepath.Join(dir, entry.Name())
+			src, err := os.ReadFile(input)
+			if err != nil {
+				t.Fatal(err)
+			}
 
-	var failures []string
-	for _, input := range inputs {
-		t.Log("running", input)
-		if !run(t, input) {
-			failures = append(failures, input)
-		}
+			// Only build test files that can be compiled, or compiled and run.
+			if !bytes.HasPrefix(src, []byte("// run")) || bytes.HasPrefix(src, []byte("// rundir")) {
+				t.Logf("Not a `// run` file: %s", entry.Name())
+				return
+			}
+
+			if reason := skip[entry.Name()]; reason != "" {
+				t.Skipf("skipping: %s", reason)
+			}
+
+			run(t, input, goroot)
+		})
 	}
-	printFailures(failures)
 }
diff --git a/go/ssa/interp/ops.go b/go/ssa/interp/ops.go
index 39830bc..a42d89b 100644
--- a/go/ssa/interp/ops.go
+++ b/go/ssa/interp/ops.go
@@ -92,8 +92,8 @@
 func fitsInt(x int64, sizes types.Sizes) bool {
 	intSize := sizes.Sizeof(types.Typ[types.Int])
 	if intSize < sizes.Sizeof(types.Typ[types.Int64]) {
-		maxInt := int64(1)<<(intSize-1) - 1
-		minInt := -int64(1) << (intSize - 1)
+		maxInt := int64(1)<<((intSize*8)-1) - 1
+		minInt := -int64(1) << ((intSize * 8) - 1)
 		return minInt <= x && x <= maxInt
 	}
 	return true
diff --git a/go/ssa/interp/testdata/src/runtime/runtime.go b/go/ssa/interp/testdata/src/runtime/runtime.go
index c60c7fc..f94684b 100644
--- a/go/ssa/interp/testdata/src/runtime/runtime.go
+++ b/go/ssa/interp/testdata/src/runtime/runtime.go
@@ -16,7 +16,4 @@
 	RuntimeError()
 }
 
-const GOOS = "linux"
-const GOARCH = "amd64"
-
 func GC()
diff --git a/go/ssa/lift.go b/go/ssa/lift.go
index b9cf7bc..dbd8790 100644
--- a/go/ssa/lift.go
+++ b/go/ssa/lift.go
@@ -460,7 +460,7 @@
 				*fresh++
 
 				phi.pos = alloc.Pos()
-				phi.setType(deref(alloc.Type()))
+				phi.setType(mustDeref(alloc.Type()))
 				phi.block = v
 				if debugLifting {
 					fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v)
@@ -505,7 +505,7 @@
 func renamed(renaming []Value, alloc *Alloc) Value {
 	v := renaming[alloc.index]
 	if v == nil {
-		v = zeroConst(deref(alloc.Type()))
+		v = zeroConst(mustDeref(alloc.Type()))
 		renaming[alloc.index] = v
 	}
 	return v
diff --git a/go/ssa/lvalue.go b/go/ssa/lvalue.go
index 51122b8..186cfca 100644
--- a/go/ssa/lvalue.go
+++ b/go/ssa/lvalue.go
@@ -25,7 +25,7 @@
 
 // An address is an lvalue represented by a true pointer.
 type address struct {
-	addr Value
+	addr Value     // must have a pointer core type.
 	pos  token.Pos // source position
 	expr ast.Expr  // source syntax of the value (not address) [debug mode]
 }
@@ -52,7 +52,7 @@
 }
 
 func (a *address) typ() types.Type {
-	return deref(a.addr.Type())
+	return mustDeref(a.addr.Type())
 }
 
 // An element is an lvalue represented by m[k], the location of an
diff --git a/go/ssa/methods.go b/go/ssa/methods.go
index 4185618..2944983 100644
--- a/go/ssa/methods.go
+++ b/go/ssa/methods.go
@@ -101,8 +101,11 @@
 		sel := toSelection(sel)
 		obj := sel.obj.(*types.Func)
 
+		_, ptrObj := deptr(recvType(obj))
+		_, ptrRecv := deptr(sel.recv)
+
 		needsPromotion := len(sel.index) > 1
-		needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.recv)
+		needsIndirection := !ptrObj && ptrRecv
 		if needsPromotion || needsIndirection {
 			fn = makeWrapper(prog, sel, cr)
 		} else {
diff --git a/go/ssa/parameterized.go b/go/ssa/parameterized.go
index 3fc4348..b90ee0e 100644
--- a/go/ssa/parameterized.go
+++ b/go/ssa/parameterized.go
@@ -63,7 +63,7 @@
 		// of a generic function type (or an interface method) that is
 		// part of the type we're testing. We don't care about these type
 		// parameters.
-		// Similarly, the receiver of a method may declare (rather then
+		// Similarly, the receiver of a method may declare (rather than
 		// use) type parameters, we don't care about those either.
 		// Thus, we only need to look at the input and result parameters.
 		return w.isParameterized(t.Params()) || w.isParameterized(t.Results())
diff --git a/go/ssa/print.go b/go/ssa/print.go
index 8b78319..7f34a7b 100644
--- a/go/ssa/print.go
+++ b/go/ssa/print.go
@@ -95,7 +95,7 @@
 		op = "new"
 	}
 	from := v.Parent().relPkg()
-	return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment)
+	return fmt.Sprintf("%s %s (%s)", op, relType(mustDeref(v.Type()), from), v.Comment)
 }
 
 func (v *Phi) String() string {
@@ -259,21 +259,19 @@
 }
 
 func (v *FieldAddr) String() string {
-	st := typeparams.CoreType(deref(v.X.Type())).(*types.Struct)
 	// Be robust against a bad index.
 	name := "?"
-	if 0 <= v.Field && v.Field < st.NumFields() {
-		name = st.Field(v.Field).Name()
+	if fld := fieldOf(mustDeref(v.X.Type()), v.Field); fld != nil {
+		name = fld.Name()
 	}
 	return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field)
 }
 
 func (v *Field) String() string {
-	st := typeparams.CoreType(v.X.Type()).(*types.Struct)
 	// Be robust against a bad index.
 	name := "?"
-	if 0 <= v.Field && v.Field < st.NumFields() {
-		name = st.Field(v.Field).Name()
+	if fld := fieldOf(v.X.Type(), v.Field); fld != nil {
+		name = fld.Name()
 	}
 	return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field)
 }
@@ -452,7 +450,7 @@
 
 		case *Global:
 			fmt.Fprintf(buf, "  var   %-*s %s\n",
-				maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from))
+				maxname, name, relType(mustDeref(mem.Type()), from))
 		}
 	}
 
diff --git a/go/ssa/source.go b/go/ssa/source.go
index b9a0836..9c900e3 100644
--- a/go/ssa/source.go
+++ b/go/ssa/source.go
@@ -121,7 +121,9 @@
 				// Don't call Program.Method: avoid creating wrappers.
 				obj := mset.At(i).Obj().(*types.Func)
 				if obj.Pos() == pos {
-					return pkg.objects[obj].(*Function)
+					// obj from MethodSet may not be the origin type.
+					m := typeparams.OriginMethod(obj)
+					return pkg.objects[m].(*Function)
 				}
 			}
 		}
diff --git a/go/ssa/source_test.go b/go/ssa/source_test.go
index eb266ed..4fba8a5 100644
--- a/go/ssa/source_test.go
+++ b/go/ssa/source_test.go
@@ -24,6 +24,7 @@
 	"golang.org/x/tools/go/loader"
 	"golang.org/x/tools/go/ssa"
 	"golang.org/x/tools/go/ssa/ssautil"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func TestObjValueLookup(t *testing.T) {
@@ -384,6 +385,19 @@
 		  func init() { println(func(){print(900)}) }`,
 			"900", "main.init#1$1"},
 	}
+	if typeparams.Enabled {
+		tests = append(tests, struct {
+			input  string
+			substr string
+			fn     string
+		}{
+			`package main
+			type S[T any] struct{}
+			func (*S[T]) Foo() { println(1000) }
+			type P[T any] struct{ *S[T] }`,
+			"1000", "(*main.S[T]).Foo",
+		})
+	}
 	for _, test := range tests {
 		conf := loader.Config{Fset: token.NewFileSet()}
 		f, start, end := findInterval(t, conf.Fset, test.input, test.substr)
diff --git a/go/ssa/ssa.go b/go/ssa/ssa.go
index c3471c1..eeb9681 100644
--- a/go/ssa/ssa.go
+++ b/go/ssa/ssa.go
@@ -1535,12 +1535,25 @@
 // from fn.Origin().
 func (fn *Function) TypeArgs() []types.Type { return fn.typeargs }
 
-// Origin is the function fn is an instantiation of. Returns nil if fn is not
-// an instantiation.
+// Origin returns the generic function from which fn was instantiated,
+// or nil if fn is not an instantiation.
 func (fn *Function) Origin() *Function {
 	if fn.parent != nil && len(fn.typeargs) > 0 {
-		// Nested functions are BUILT at a different time than there instances.
-		return fn.parent.Origin().AnonFuncs[fn.anonIdx]
+		// Nested functions are BUILT at a different time than their instances.
+		// Build declared package if not yet BUILT. This is not an expected use
+		// case, but is simple and robust.
+		fn.declaredPackage().Build()
+	}
+	return origin(fn)
+}
+
+// origin is the function that fn is an instantiation of. Returns nil if fn is
+// not an instantiation.
+//
+// Precondition: fn and the origin function are done building.
+func origin(fn *Function) *Function {
+	if fn.parent != nil && len(fn.typeargs) > 0 {
+		return origin(fn.parent).AnonFuncs[fn.anonIdx]
 	}
 	return fn.topLevelOrigin
 }
diff --git a/go/ssa/subst.go b/go/ssa/subst.go
index 7efab35..89c41a8 100644
--- a/go/ssa/subst.go
+++ b/go/ssa/subst.go
@@ -249,7 +249,7 @@
 	}
 
 	// methods for the interface. Initially nil if there is no known change needed.
-	// Signatures for the method where recv is nil. NewInterfaceType fills in the recievers.
+	// Signatures for the method where recv is nil. NewInterfaceType fills in the receivers.
 	var methods []*types.Func
 	initMethods := func(n int) { // copy first n explicit methods
 		methods = make([]*types.Func, iface.NumExplicitMethods())
@@ -262,7 +262,7 @@
 	for i := 0; i < iface.NumExplicitMethods(); i++ {
 		f := iface.ExplicitMethod(i)
 		// On interfaces, we need to cycle break on anonymous interface types
-		// being in a cycle with their signatures being in cycles with their recievers
+		// being in a cycle with their signatures being in cycles with their receivers
 		// that do not go through a Named.
 		norecv := changeRecv(f.Type().(*types.Signature), nil)
 		sig := subst.typ(norecv)
diff --git a/go/ssa/util.go b/go/ssa/util.go
index db53aeb..7735dd8 100644
--- a/go/ssa/util.go
+++ b/go/ssa/util.go
@@ -43,12 +43,6 @@
 
 //// Type utilities.  Some of these belong in go/types.
 
-// isPointer returns true for types whose underlying type is a pointer.
-func isPointer(typ types.Type) bool {
-	_, ok := typ.Underlying().(*types.Pointer)
-	return ok
-}
-
 // isNonTypeParamInterface reports whether t is an interface type but not a type parameter.
 func isNonTypeParamInterface(t types.Type) bool {
 	return !typeparams.IsTypeParam(t) && types.IsInterface(t)
@@ -100,12 +94,33 @@
 	return all && basics >= 1 && tset.Len()-basics <= 1
 }
 
-// deref returns a pointer's element type; otherwise it returns typ.
-func deref(typ types.Type) types.Type {
+// deptr returns a pointer's element type and true; otherwise it returns (typ, false).
+// This function is oblivious to core types and is not suitable for generics.
+//
+// TODO: Deprecate this function once all usages have been audited.
+func deptr(typ types.Type) (types.Type, bool) {
 	if p, ok := typ.Underlying().(*types.Pointer); ok {
-		return p.Elem()
+		return p.Elem(), true
 	}
-	return typ
+	return typ, false
+}
+
+// deref returns the element type of a type with a pointer core type and true;
+// otherwise it returns (typ, false).
+func deref(typ types.Type) (types.Type, bool) {
+	if p, ok := typeparams.CoreType(typ).(*types.Pointer); ok {
+		return p.Elem(), true
+	}
+	return typ, false
+}
+
+// mustDeref returns the element type of a type with a pointer core type.
+// Panics on failure.
+func mustDeref(typ types.Type) types.Type {
+	if et, ok := deref(typ); ok {
+		return et
+	}
+	panic("cannot dereference type " + typ.String())
 }
 
 // recvType returns the receiver type of method obj.
@@ -113,6 +128,17 @@
 	return obj.Type().(*types.Signature).Recv().Type()
 }
 
+// fieldOf returns the index'th field of the (core type of) a struct type;
+// otherwise returns nil.
+func fieldOf(typ types.Type, index int) *types.Var {
+	if st, ok := typeparams.CoreType(typ).(*types.Struct); ok {
+		if 0 <= index && index < st.NumFields() {
+			return st.Field(index)
+		}
+	}
+	return nil
+}
+
 // isUntyped returns true for types that are untyped.
 func isUntyped(typ types.Type) bool {
 	b, ok := typ.(*types.Basic)
@@ -172,16 +198,14 @@
 	return filtered
 }
 
-// receiverTypeArgs returns the type arguments to a function's reciever.
-// Returns an empty list if obj does not have a reciever or its reciever does not have type arguments.
+// receiverTypeArgs returns the type arguments to a function's receiver.
+// Returns an empty list if obj does not have a receiver or its receiver does not have type arguments.
 func receiverTypeArgs(obj *types.Func) []types.Type {
 	rtype := recvType(obj)
 	if rtype == nil {
 		return nil
 	}
-	if isPointer(rtype) {
-		rtype = rtype.(*types.Pointer).Elem()
-	}
+	rtype, _ = deptr(rtype)
 	named, ok := rtype.(*types.Named)
 	if !ok {
 		return nil
diff --git a/go/ssa/wrappers.go b/go/ssa/wrappers.go
index 228daf6..123ea68 100644
--- a/go/ssa/wrappers.go
+++ b/go/ssa/wrappers.go
@@ -82,12 +82,14 @@
 	indices := sel.index
 
 	var v Value = fn.Locals[0] // spilled receiver
-	if isPointer(sel.recv) {
+	srdt, ptrRecv := deptr(sel.recv)
+	if ptrRecv {
 		v = emitLoad(fn, v)
 
 		// For simple indirection wrappers, perform an informative nil-check:
 		// "value method (T).f called using nil *T pointer"
-		if len(indices) == 1 && !isPointer(recvType(obj)) {
+		_, ptrObj := deptr(recvType(obj))
+		if len(indices) == 1 && !ptrObj {
 			var c Call
 			c.Call.Value = &Builtin{
 				name: "ssa:wrapnilchk",
@@ -97,7 +99,7 @@
 			}
 			c.Call.Args = []Value{
 				v,
-				stringConst(deref(sel.recv).String()),
+				stringConst(srdt.String()),
 				stringConst(sel.obj.Name()),
 			}
 			c.setType(v.Type())
@@ -121,7 +123,7 @@
 
 	var c Call
 	if r := recvType(obj); !types.IsInterface(r) { // concrete method
-		if !isPointer(r) {
+		if _, ptrObj := deptr(r); !ptrObj {
 			v = emitLoad(fn, v)
 		}
 		callee := prog.originFunc(obj)
diff --git a/go/types/internal/play/play.go b/go/types/internal/play/play.go
index e53d988..099e28a 100644
--- a/go/types/internal/play/play.go
+++ b/go/types/internal/play/play.go
@@ -15,6 +15,7 @@
 	"encoding/json"
 	"fmt"
 	"go/ast"
+	"go/format"
 	"go/token"
 	"go/types"
 	"io"
@@ -167,6 +168,10 @@
 
 	// Syntax debug output.
 	ast.Fprint(out, fset, path[0], nil) // ignore errors
+	fmt.Fprintf(out, "\n")
+
+	// Pretty-print of selected syntax.
+	format.Node(out, fset, path[0])
 
 	// Clean up the messy temp file name.
 	outStr := strings.ReplaceAll(out.String(), f.Name(), "play.go")
@@ -263,4 +268,5 @@
 const mainCSS = `
 textarea { width: 6in; }
 body { color: gray; }
+div#out { font-family: monospace; font-size: 80%; }
 `
diff --git a/go/types/objectpath/objectpath.go b/go/types/objectpath/objectpath.go
index e064a1a..aa7dfac 100644
--- a/go/types/objectpath/objectpath.go
+++ b/go/types/objectpath/objectpath.go
@@ -418,7 +418,13 @@
 		}
 	}
 
-	panic(fmt.Sprintf("couldn't find method %s on type %s", meth, named))
+	// Due to golang/go#59944, go/types fails to associate the receiver with
+	// certain methods on cgo types.
+	//
+	// TODO(rfindley): replace this panic once golang/go#59944 is fixed in all Go
+	// versions gopls supports.
+	return "", false
+	// panic(fmt.Sprintf("couldn't find method %s on type %s; methods: %#v", meth, named, enc.namedMethods(named)))
 }
 
 // find finds obj within type T, returning the path to it, or nil if not found.
diff --git a/godoc/dirtrees.go b/godoc/dirtrees.go
index f6a5ba0..51aa1f3 100644
--- a/godoc/dirtrees.go
+++ b/godoc/dirtrees.go
@@ -220,7 +220,7 @@
 	// The root could be a symbolic link so use Stat not Lstat.
 	d, err := c.fs.Stat(root)
 	// If we fail here, report detailed error messages; otherwise
-	// is is hard to see why a directory tree was not built.
+	// is hard to see why a directory tree was not built.
 	switch {
 	case err != nil:
 		log.Printf("newDirectory(%s): %s", root, err)
diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md
index 2717108..15eb2d9 100644
--- a/gopls/doc/analyzers.md
+++ b/gopls/doc/analyzers.md
@@ -217,22 +217,6 @@
 
 **Enabled by default.**
 
-## **infertypeargs**
-
-check for unnecessary type arguments in call expressions
-
-Explicit type arguments may be omitted from call expressions if they can be
-inferred from function arguments, or from other type arguments:
-
-	func f[T any](T) {}
-	
-	func _() {
-		f[string]("foo") // string could be inferred
-	}
-
-
-**Enabled by default.**
-
 ## **loopclosure**
 
 check references to loop variables from within nested functions
@@ -600,7 +584,7 @@
 to convert integers to pointers. A conversion from uintptr to
 unsafe.Pointer is invalid if it implies that there is a uintptr-typed
 word in memory that holds a pointer value, because that word will be
-invisible to stack copying and to the garbage collector.`
+invisible to stack copying and to the garbage collector.
 
 **Enabled by default.**
 
@@ -623,9 +607,11 @@
 
 check for unused results of calls to some functions
 
-Some functions like fmt.Errorf return a result and have no side effects,
-so it is always a mistake to discard the result. This analyzer reports
-calls to certain functions in which the result of the call is ignored.
+Some functions like fmt.Errorf return a result and have no side
+effects, so it is always a mistake to discard the result. Other
+functions may return an error that must not be ignored, or a cleanup
+operation that must be called. This analyzer reports calls to
+functions like these when the result of the call is ignored.
 
 The set of functions may be controlled using flags.
 
@@ -755,6 +741,22 @@
 
 **Enabled by default.**
 
+## **infertypeargs**
+
+check for unnecessary type arguments in call expressions
+
+Explicit type arguments may be omitted from call expressions if they can be
+inferred from function arguments, or from other type arguments:
+
+	func f[T any](T) {}
+	
+	func _() {
+		f[string]("foo") // string could be inferred
+	}
+
+
+**Enabled by default.**
+
 ## **stubmethods**
 
 stub methods analyzer
diff --git a/gopls/doc/commands.md b/gopls/doc/commands.md
index be031e9..8fe677b 100644
--- a/gopls/doc/commands.md
+++ b/gopls/doc/commands.md
@@ -289,6 +289,21 @@
 }
 ```
 
+### **run `go work [args...]`, and apply the resulting go.work**
+Identifier: `gopls.run_go_work_command`
+
+edits to the current go.work file.
+
+Args:
+
+```
+{
+	"ViewID": string,
+	"InitFirst": bool,
+	"Args": []string,
+}
+```
+
 ### **Run govulncheck.**
 Identifier: `gopls.run_govulncheck`
 
diff --git a/gopls/doc/generate_test.go b/gopls/doc/generate_test.go
index 5dc97d2..99f366c 100644
--- a/gopls/doc/generate_test.go
+++ b/gopls/doc/generate_test.go
@@ -11,10 +11,12 @@
 )
 
 func TestGenerated(t *testing.T) {
+	testenv.NeedsGoPackages(t)
 	// This test fails on 1.18 Kokoro for unknown reasons; in any case, it
 	// suffices to run this test on any builder.
 	testenv.NeedsGo1Point(t, 19)
-	testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code.
+
+	testenv.NeedsLocalXTools(t)
 
 	ok, err := doMain(false)
 	if err != nil {
diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md
index f15af0b..d3ffcc0 100644
--- a/gopls/doc/settings.md
+++ b/gopls/doc/settings.md
@@ -456,6 +456,22 @@
 
 Default: `"Dynamic"`.
 
+##### **symbolScope** *enum*
+
+symbolScope controls which packages are searched for workspace/symbol
+requests. The default value, "workspace", searches only workspace
+packages. The legacy behavior, "all", causes all loaded packages to be
+searched, including dependencies; this is more expensive and may return
+unwanted results.
+
+Must be one of:
+
+* `"all"` matches symbols in any loaded package, including
+dependencies.
+* `"workspace"` matches symbols in workspace packages only.
+
+Default: `"all"`.
+
 #### **verboseOutput** *bool*
 
 **This setting is for debugging purposes only.**
diff --git a/gopls/go.mod b/gopls/go.mod
index b344223..23b1b72 100644
--- a/gopls/go.mod
+++ b/gopls/go.mod
@@ -8,8 +8,8 @@
 	github.com/jba/templatecheck v0.6.0
 	github.com/sergi/go-diff v1.1.0
 	golang.org/x/mod v0.10.0
-	golang.org/x/sync v0.1.0
-	golang.org/x/sys v0.7.0
+	golang.org/x/sync v0.2.0
+	golang.org/x/sys v0.8.0
 	golang.org/x/text v0.9.0
 	golang.org/x/tools v0.8.1-0.20230424211344-8f555829531a
 	golang.org/x/vuln v0.0.0-20230110180137-6ad3e3d07815
@@ -25,3 +25,5 @@
 	golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e // indirect
 	golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 // indirect
 )
+
+replace golang.org/x/tools => ../
diff --git a/gopls/go.sum b/gopls/go.sum
index 09bdc58..f6308a7 100644
--- a/gopls/go.sum
+++ b/gopls/go.sum
@@ -42,85 +42,42 @@
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
 github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
-github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
-github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
-golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
 golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
 golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e h1:+WEEuIdZHnUeJJmEUjyYC2gfUMj69yZXw17EnHg/otA=
 golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e/go.mod h1:Kr81I6Kryrl9sr8s2FK3vxD90NdsKWRuOIl2O4CvYbA=
 golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
 golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 h1:2O2DON6y3XMJiQRAS1UWU+54aec2uopH3x7MAiqGW6Y=
 golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
-golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
 golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
 golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
 golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
 golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
 golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk=
 golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
-golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
 golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
-golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
-golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
-golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE=
-golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
-golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
-golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
 golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI=
+golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20211213223007-03aa0b5f6827/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU=
-golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
+golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
-golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
-golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
-golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
-golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
-golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
 golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
 golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
-golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
-golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
-golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
-golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
-golang.org/x/tools v0.4.1-0.20221208213631-3f74d914ae6d/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ=
-golang.org/x/tools v0.4.1-0.20221217013628-b4dfc36097e2/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ=
-golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
-golang.org/x/tools v0.8.1-0.20230424211344-8f555829531a h1:KHEqDLzlCb81TJ1YepkoRJGkUSw1QTB1o6aVuWZV/UY=
-golang.org/x/tools v0.8.1-0.20230424211344-8f555829531a/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4=
 golang.org/x/vuln v0.0.0-20230110180137-6ad3e3d07815 h1:A9kONVi4+AnuOr1dopsibH6hLi1Huy54cbeJxnq4vmU=
 golang.org/x/vuln v0.0.0-20230110180137-6ad3e3d07815/go.mod h1:XJiVExZgoZfrrxoTeVsFYrSSk1snhfpOEC95JL+A4T0=
-golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
diff --git a/internal/bug/bug.go b/gopls/internal/bug/bug.go
similarity index 76%
rename from internal/bug/bug.go
rename to gopls/internal/bug/bug.go
index c18d35a..1bf7d30 100644
--- a/internal/bug/bug.go
+++ b/gopls/internal/bug/bug.go
@@ -27,7 +27,7 @@
 var (
 	mu        sync.Mutex
 	exemplars map[string]Bug
-	waiters   []chan<- Bug
+	handlers  []func(Bug)
 )
 
 // A Bug represents an unexpected event or broken invariant. They are used for
@@ -36,34 +36,30 @@
 	File        string // file containing the call to bug.Report
 	Line        int    // line containing the call to bug.Report
 	Description string // description of the bug
-	Data        Data   // additional metadata
 	Key         string // key identifying the bug (file:line if available)
 	Stack       string // call stack
 }
 
-// Data is additional metadata to record for a bug.
-type Data map[string]interface{}
-
 // Reportf reports a formatted bug message.
 func Reportf(format string, args ...interface{}) {
-	report(fmt.Sprintf(format, args...), nil)
+	report(fmt.Sprintf(format, args...))
 }
 
 // Errorf calls fmt.Errorf for the given arguments, and reports the resulting
 // error message as a bug.
 func Errorf(format string, args ...interface{}) error {
 	err := fmt.Errorf(format, args...)
-	report(err.Error(), nil)
+	report(err.Error())
 	return err
 }
 
 // Report records a new bug encountered on the server.
 // It uses reflection to report the position of the immediate caller.
-func Report(description string, data Data) {
-	report(description, data)
+func Report(description string) {
+	report(description)
 }
 
-func report(description string, data Data) {
+func report(description string) {
 	_, file, line, ok := runtime.Caller(2) // all exported reporting functions call report directly
 
 	key := "<missing callsite>"
@@ -79,37 +75,36 @@
 		File:        file,
 		Line:        line,
 		Description: description,
-		Data:        data,
 		Key:         key,
 		Stack:       string(debug.Stack()),
 	}
 
 	mu.Lock()
-	defer mu.Unlock()
-
-	if exemplars == nil {
-		exemplars = make(map[string]Bug)
-	}
-
 	if _, ok := exemplars[key]; !ok {
+		if exemplars == nil {
+			exemplars = make(map[string]Bug)
+		}
 		exemplars[key] = bug // capture one exemplar per key
 	}
+	hh := handlers
+	handlers = nil
+	mu.Unlock()
 
-	for _, waiter := range waiters {
-		waiter <- bug
+	// Call the handlers outside the critical section since a
+	// handler may itself fail and call bug.Report. Since handlers
+	// are one-shot, the inner call should be trivial.
+	for _, handle := range hh {
+		handle(bug)
 	}
-	waiters = nil
 }
 
-// Notify returns a channel that will be sent the next bug to occur on the
-// server. This channel only ever receives one bug.
-func Notify() <-chan Bug {
+// Handle adds a handler function that will be called with the next
+// bug to occur on the server. The handler only ever receives one bug.
+// It is called synchronously, and should return in a timely manner.
+func Handle(h func(Bug)) {
 	mu.Lock()
 	defer mu.Unlock()
-
-	ch := make(chan Bug, 1) // 1-buffered so that bug reporting is non-blocking
-	waiters = append(waiters, ch)
-	return ch
+	handlers = append(handlers, h)
 }
 
 // List returns a slice of bug exemplars -- the first bugs to occur at each
diff --git a/internal/bug/bug_test.go b/gopls/internal/bug/bug_test.go
similarity index 60%
rename from internal/bug/bug_test.go
rename to gopls/internal/bug/bug_test.go
index edfc103..2e36221 100644
--- a/internal/bug/bug_test.go
+++ b/gopls/internal/bug/bug_test.go
@@ -11,18 +11,18 @@
 
 func resetForTesting() {
 	exemplars = nil
-	waiters = nil
+	handlers = nil
 }
 
 func TestListBugs(t *testing.T) {
 	defer resetForTesting()
 
-	Report("bad", nil)
+	Report("bad")
 
 	wantBugs(t, "bad")
 
 	for i := 0; i < 3; i++ {
-		Report(fmt.Sprintf("index:%d", i), nil)
+		Report(fmt.Sprintf("index:%d", i))
 	}
 
 	wantBugs(t, "bad", "index:0")
@@ -44,22 +44,21 @@
 	}
 }
 
-func TestBugNotification(t *testing.T) {
+func TestBugHandler(t *testing.T) {
 	defer resetForTesting()
 
-	Report("unseen", nil)
+	Report("unseen")
 
-	notify1 := Notify()
-	notify2 := Notify()
+	// Both handlers are called, in order of registration, only once.
+	var got string
+	Handle(func(b Bug) { got += "1:" + b.Description })
+	Handle(func(b Bug) { got += "2:" + b.Description })
 
-	Report("seen", Data{"answer": 42})
+	Report("seen")
 
-	for _, got := range []Bug{<-notify1, <-notify2} {
-		if got, want := got.Description, "seen"; got != want {
-			t.Errorf("Saw bug %q, want %q", got, want)
-		}
-		if got, want := got.Data["answer"], 42; got != want {
-			t.Errorf(`bug.Data["answer"] = %v, want %v`, got, want)
-		}
+	Report("again")
+
+	if want := "1:seen2:seen"; got != want {
+		t.Errorf("got %q, want %q", got, want)
 	}
 }
diff --git a/gopls/internal/hooks/diff.go b/gopls/internal/hooks/diff.go
index f7fec5a..a6ad65f 100644
--- a/gopls/internal/hooks/diff.go
+++ b/gopls/internal/hooks/diff.go
@@ -16,7 +16,7 @@
 	"time"
 
 	"github.com/sergi/go-diff/diffmatchpatch"
-	"golang.org/x/tools/internal/bug"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/internal/diff"
 )
 
diff --git a/gopls/internal/lsp/analysis/fillstruct/fillstruct.go b/gopls/internal/lsp/analysis/fillstruct/fillstruct.go
index af29a36..a26faf2 100644
--- a/gopls/internal/lsp/analysis/fillstruct/fillstruct.go
+++ b/gopls/internal/lsp/analysis/fillstruct/fillstruct.go
@@ -168,7 +168,7 @@
 		return nil, fmt.Errorf("%s is not a (pointer to) struct type",
 			types.TypeString(typ, types.RelativeTo(pkg)))
 	}
-	// Inv: typ is the the possibly-named struct type.
+	// Inv: typ is the possibly-named struct type.
 
 	fieldCount := tStruct.NumFields()
 
diff --git a/gopls/internal/lsp/cache/analysis.go b/gopls/internal/lsp/cache/analysis.go
index c236f05..4679041 100644
--- a/gopls/internal/lsp/cache/analysis.go
+++ b/gopls/internal/lsp/cache/analysis.go
@@ -27,10 +27,11 @@
 
 	"golang.org/x/sync/errgroup"
 	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/filecache"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/source"
-	"golang.org/x/tools/internal/bug"
+	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/facts"
 	"golang.org/x/tools/internal/gcimporter"
 	"golang.org/x/tools/internal/memoize"
@@ -454,13 +455,15 @@
 		if err != nil {
 			return nil, err
 		}
-		data := mustEncode(summary)
-		if false {
-			log.Printf("Set key=%d value=%d id=%s\n", len(key), len(data), id)
-		}
-		if err := filecache.Set(cacheKind, key, data); err != nil {
-			return nil, fmt.Errorf("internal error updating shared cache: %v", err)
-		}
+		go func() {
+			data := mustEncode(summary)
+			if false {
+				log.Printf("Set key=%d value=%d id=%s\n", len(key), len(data), id)
+			}
+			if err := filecache.Set(cacheKind, key, data); err != nil {
+				event.Error(ctx, "internal error updating analysis shared cache", err)
+			}
+		}()
 	}
 
 	// Hit or miss, we need to merge the export data from
@@ -605,7 +608,6 @@
 
 	// TODO(adonovan): port the old logic to:
 	// - gather go/packages diagnostics from m.Errors? (port goPackagesErrorDiagnostics)
-	// - record unparseable file URIs so we can suppress type errors for these files.
 	// - gather diagnostics from expandErrors + typeErrorDiagnostics + depsErrors.
 
 	// -- analysis --
@@ -762,7 +764,16 @@
 		Sizes: m.TypesSizes,
 		Error: func(e error) {
 			pkg.compiles = false // type error
-			pkg.typeErrors = append(pkg.typeErrors, e.(types.Error))
+
+			// Suppress type errors in files with parse errors
+			// as parser recovery can be quite lossy (#59888).
+			typeError := e.(types.Error)
+			for _, p := range parsed {
+				if p.ParseErr != nil && source.NodeContains(p.File, typeError.Pos) {
+					return
+				}
+			}
+			pkg.typeErrors = append(pkg.typeErrors, typeError)
 		},
 		Importer: importerFunc(func(importPath string) (*types.Package, error) {
 			if importPath == "unsafe" {
diff --git a/gopls/internal/lsp/cache/check.go b/gopls/internal/lsp/cache/check.go
index a603047..83ea177 100644
--- a/gopls/internal/lsp/cache/check.go
+++ b/gopls/internal/lsp/cache/check.go
@@ -22,6 +22,7 @@
 	"golang.org/x/sync/errgroup"
 	"golang.org/x/tools/go/ast/astutil"
 	goplsastutil "golang.org/x/tools/gopls/internal/astutil"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/filecache"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/source"
@@ -29,7 +30,6 @@
 	"golang.org/x/tools/gopls/internal/lsp/source/typerefs"
 	"golang.org/x/tools/gopls/internal/lsp/source/xrefs"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/event/tag"
 	"golang.org/x/tools/internal/gcimporter"
@@ -322,7 +322,7 @@
 // forEachPackage does a pre- and post- order traversal of the packages
 // specified by ids using the provided pre and post functions.
 //
-// The pre func is is optional. If set, pre is evaluated after the package
+// The pre func is optional. If set, pre is evaluated after the package
 // handle has been constructed, but before type-checking. If pre returns false,
 // type-checking is skipped for this package handle.
 //
@@ -1440,7 +1440,7 @@
 			depPH := b.handles[id]
 			if depPH == nil {
 				// e.g. missing metadata for dependencies in buildPackageHandle
-				return nil, missingPkgError(path, inputs.moduleMode)
+				return nil, missingPkgError(inputs.id, path, inputs.moduleMode)
 			}
 			if !source.IsValidImport(inputs.pkgPath, depPH.m.PkgPath) {
 				return nil, fmt.Errorf("invalid use of internal package %q", path)
@@ -1601,13 +1601,17 @@
 
 // missingPkgError returns an error message for a missing package that varies
 // based on the user's workspace mode.
-func missingPkgError(pkgPath string, moduleMode bool) error {
+func missingPkgError(from PackageID, pkgPath string, moduleMode bool) error {
 	// TODO(rfindley): improve this error. Previous versions of this error had
 	// access to the full snapshot, and could provide more information (such as
 	// the initialization error).
 	if moduleMode {
-		// Previously, we would present the initialization error here.
-		return fmt.Errorf("no required module provides package %q", pkgPath)
+		if source.IsCommandLineArguments(from) {
+			return fmt.Errorf("current file is not included in a workspace module")
+		} else {
+			// Previously, we would present the initialization error here.
+			return fmt.Errorf("no required module provides package %q", pkgPath)
+		}
 	} else {
 		// Previously, we would list the directories in GOROOT and GOPATH here.
 		return fmt.Errorf("cannot find package %q in GOROOT or GOPATH", pkgPath)
diff --git a/gopls/internal/lsp/cache/standalone_go116.go b/gopls/internal/lsp/cache/constraints.go
similarity index 63%
rename from gopls/internal/lsp/cache/standalone_go116.go
rename to gopls/internal/lsp/cache/constraints.go
index 2f72d5f..9503abc 100644
--- a/gopls/internal/lsp/cache/standalone_go116.go
+++ b/gopls/internal/lsp/cache/constraints.go
@@ -2,12 +2,10 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-//go:build go1.16
-// +build go1.16
-
 package cache
 
 import (
+	"go/ast"
 	"go/build/constraint"
 	"go/parser"
 	"go/token"
@@ -26,25 +24,38 @@
 		return false
 	}
 
-	for _, cg := range f.Comments {
+	found := false
+	walkConstraints(f, func(c constraint.Expr) bool {
+		if tag, ok := c.(*constraint.TagExpr); ok {
+			for _, t := range standaloneTags {
+				if t == tag.Tag {
+					found = true
+					return false
+				}
+			}
+		}
+		return true
+	})
+
+	return found
+}
+
+// walkConstraints calls f for each constraint expression in the file, until
+// all constraints are exhausted or f returns false.
+func walkConstraints(file *ast.File, f func(constraint.Expr) bool) {
+	for _, cg := range file.Comments {
 		// Even with PackageClauseOnly the parser consumes the semicolon following
 		// the package clause, so we must guard against comments that come after
 		// the package name.
-		if cg.Pos() > f.Name.Pos() {
+		if cg.Pos() > file.Name.Pos() {
 			continue
 		}
 		for _, comment := range cg.List {
 			if c, err := constraint.Parse(comment.Text); err == nil {
-				if tag, ok := c.(*constraint.TagExpr); ok {
-					for _, t := range standaloneTags {
-						if t == tag.Tag {
-							return true
-						}
-					}
+				if !f(c) {
+					return
 				}
 			}
 		}
 	}
-
-	return false
 }
diff --git a/gopls/internal/lsp/cache/standalone_go116_test.go b/gopls/internal/lsp/cache/constraints_test.go
similarity index 100%
rename from gopls/internal/lsp/cache/standalone_go116_test.go
rename to gopls/internal/lsp/cache/constraints_test.go
diff --git a/gopls/internal/lsp/cache/errors.go b/gopls/internal/lsp/cache/errors.go
index fcb5c5c..c9379bf 100644
--- a/gopls/internal/lsp/cache/errors.go
+++ b/gopls/internal/lsp/cache/errors.go
@@ -21,13 +21,13 @@
 	"strings"
 
 	"golang.org/x/tools/go/packages"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/command"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
 	"golang.org/x/tools/gopls/internal/lsp/source"
 	"golang.org/x/tools/gopls/internal/span"
 	"golang.org/x/tools/internal/analysisinternal"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/typesinternal"
 )
 
diff --git a/gopls/internal/lsp/cache/graph.go b/gopls/internal/lsp/cache/graph.go
index e812b04..684bdab 100644
--- a/gopls/internal/lsp/cache/graph.go
+++ b/gopls/internal/lsp/cache/graph.go
@@ -8,9 +8,9 @@
 	"sort"
 
 	"golang.org/x/tools/go/packages"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/source"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 )
 
 // A metadataGraph is an immutable and transitively closed import
@@ -24,6 +24,8 @@
 
 	// ids maps file URIs to package IDs, sorted by (!valid, cli, packageID).
 	// A single file may belong to multiple packages due to tests packages.
+	//
+	// Invariant: all IDs present in the ids map exist in the metadata map.
 	ids map[span.URI][]PackageID
 }
 
diff --git a/gopls/internal/lsp/cache/imports.go b/gopls/internal/lsp/cache/imports.go
index 9337176..55085a2 100644
--- a/gopls/internal/lsp/cache/imports.go
+++ b/gopls/internal/lsp/cache/imports.go
@@ -29,16 +29,12 @@
 	cachedModFileHash      source.Hash
 	cachedBuildFlags       []string
 	cachedDirectoryFilters []string
-
-	// runOnce records whether runProcessEnvFunc has been called at least once.
-	// This is necessary to avoid resetting state before the process env is
-	// populated.
-	//
-	// TODO(rfindley): this shouldn't be necessary.
-	runOnce bool
 }
 
-func (s *importsState) runProcessEnvFunc(ctx context.Context, snapshot *snapshot, fn func(*imports.Options) error) error {
+func (s *importsState) runProcessEnvFunc(ctx context.Context, snapshot *snapshot, fn func(context.Context, *imports.Options) error) error {
+	ctx, done := event.Start(ctx, "cache.importsState.runProcessEnvFunc")
+	defer done()
+
 	s.mu.Lock()
 	defer s.mu.Unlock()
 
@@ -72,24 +68,19 @@
 	// update the processEnv. Clearing caches blocks on any background
 	// scans.
 	if changed {
-		// As a special case, skip cleanup the first time -- we haven't fully
-		// initialized the environment yet and calling GetResolver will do
-		// unnecessary work and potentially mess up the go.mod file.
-		if s.runOnce {
-			if resolver, err := s.processEnv.GetResolver(); err == nil {
-				if modResolver, ok := resolver.(*imports.ModuleResolver); ok {
-					modResolver.ClearForNewMod()
-				}
+		if err := populateProcessEnvFromSnapshot(ctx, s.processEnv, snapshot); err != nil {
+			return err
+		}
+
+		if resolver, err := s.processEnv.GetResolver(); err == nil {
+			if modResolver, ok := resolver.(*imports.ModuleResolver); ok {
+				modResolver.ClearForNewMod()
 			}
 		}
 
 		s.cachedModFileHash = modFileHash
 		s.cachedBuildFlags = currentBuildFlags
 		s.cachedDirectoryFilters = currentDirectoryFilters
-		if err := s.populateProcessEnv(ctx, snapshot); err != nil {
-			return err
-		}
-		s.runOnce = true
 	}
 
 	// Run the user function.
@@ -105,7 +96,7 @@
 		LocalPrefix: localPrefix,
 	}
 
-	if err := fn(opts); err != nil {
+	if err := fn(ctx, opts); err != nil {
 		return err
 	}
 
@@ -122,10 +113,12 @@
 	return nil
 }
 
-// populateProcessEnv sets the dynamically configurable fields for the view's
-// process environment. Assumes that the caller is holding the s.view.importsMu.
-func (s *importsState) populateProcessEnv(ctx context.Context, snapshot *snapshot) error {
-	pe := s.processEnv
+// populateProcessEnvFromSnapshot sets the dynamically configurable fields for
+// the view's process environment. Assumes that the caller is holding the
+// importsState mutex.
+func populateProcessEnvFromSnapshot(ctx context.Context, pe *imports.ProcessEnv, snapshot *snapshot) error {
+	ctx, done := event.Start(ctx, "cache.populateProcessEnvFromSnapshot")
+	defer done()
 
 	if snapshot.view.Options().VerboseOutput {
 		pe.Logf = func(format string, args ...interface{}) {
@@ -166,6 +159,9 @@
 }
 
 func (s *importsState) refreshProcessEnv() {
+	ctx, done := event.Start(s.ctx, "cache.importsState.refreshProcessEnv")
+	defer done()
+
 	start := time.Now()
 
 	s.mu.Lock()
@@ -177,9 +173,9 @@
 
 	event.Log(s.ctx, "background imports cache refresh starting")
 	if err := imports.PrimeCache(context.Background(), env); err == nil {
-		event.Log(s.ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start)))
+		event.Log(ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start)))
 	} else {
-		event.Log(s.ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start)), keys.Err.Of(err))
+		event.Log(ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start)), keys.Err.Of(err))
 	}
 	s.mu.Lock()
 	s.cacheRefreshDuration = time.Since(start)
diff --git a/gopls/internal/lsp/cache/load.go b/gopls/internal/lsp/cache/load.go
index 5f796f0..111b074 100644
--- a/gopls/internal/lsp/cache/load.go
+++ b/gopls/internal/lsp/cache/load.go
@@ -16,10 +16,10 @@
 	"time"
 
 	"golang.org/x/tools/go/packages"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/source"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/event/tag"
 	"golang.org/x/tools/internal/gocommand"
@@ -36,12 +36,15 @@
 //
 // The resulting error may wrap the moduleErrorMap error type, representing
 // errors associated with specific modules.
+//
+// If scopes contains a file scope there must be exactly one scope.
 func (s *snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadScope) (err error) {
 	id := atomic.AddUint64(&loadID, 1)
 	eventName := fmt.Sprintf("go/packages.Load #%d", id) // unique name for logging
 
 	var query []string
 	var containsDir bool // for logging
+	var standalone bool  // whether this is a load of a standalone file
 
 	// Keep track of module query -> module path so that we can later correlate query
 	// errors with errors.
@@ -55,7 +58,14 @@
 			query = append(query, string(scope))
 
 		case fileLoadScope:
+			// Given multiple scopes, the resulting load might contain inaccurate
+			// information. For example go/packages returns at most one command-line
+			// arguments package, and does not handle a combination of standalone
+			// files and packages.
 			uri := span.URI(scope)
+			if len(scopes) > 1 {
+				panic(fmt.Sprintf("internal error: load called with multiple scopes when a file scope is present (file: %s)", uri))
+			}
 			fh := s.FindFile(uri)
 			if fh == nil || s.View().FileKind(fh) != source.Go {
 				// Don't try to load a file that doesn't exist, or isn't a go file.
@@ -66,6 +76,7 @@
 				continue
 			}
 			if isStandaloneFile(contents, s.view.Options().StandaloneTags) {
+				standalone = true
 				query = append(query, uri.Filename())
 			} else {
 				query = append(query, fmt.Sprintf("file=%s", uri.Filename()))
@@ -79,7 +90,7 @@
 		case viewLoadScope:
 			// If we are outside of GOPATH, a module, or some other known
 			// build system, don't load subdirectories.
-			if !s.ValidBuildConfiguration() {
+			if !s.validBuildConfiguration() {
 				query = append(query, "./")
 			} else {
 				query = append(query, "./...")
@@ -144,6 +155,36 @@
 		return fmt.Errorf("packages.Load error: %w", err)
 	}
 
+	if standalone && len(pkgs) > 1 {
+		return bug.Errorf("internal error: go/packages returned multiple packages for standalone file")
+	}
+
+	// Workaround for a bug (?) that has been in go/packages since
+	// the outset: Package("unsafe").GoFiles=[], whereas it should
+	// include unsafe/unsafe.go. Derive it from builtins.go.
+	//
+	// This workaround relies on the fact that we always add both
+	// builtins and unsafe to the set of scopes in the workspace load.
+	//
+	// TODO(adonovan): fix upstream in go/packages.
+	// (Does this need a proposal? Arguably not.)
+	{
+		var builtin, unsafe *packages.Package
+		for _, pkg := range pkgs {
+			switch pkg.ID {
+			case "unsafe":
+				unsafe = pkg
+			case "builtin":
+				builtin = pkg
+			}
+		}
+		if builtin != nil && unsafe != nil && len(builtin.GoFiles) == 1 {
+			unsafe.GoFiles = []string{
+				filepath.Join(filepath.Dir(builtin.GoFiles[0]), "../unsafe/unsafe.go"),
+			}
+		}
+	}
+
 	moduleErrs := make(map[string][]packages.Error) // module path -> errors
 	filterFunc := s.view.filterFunc()
 	newMetadata := make(map[PackageID]*source.Metadata)
@@ -196,9 +237,7 @@
 		if allFilesExcluded(pkg.GoFiles, filterFunc) {
 			continue
 		}
-		if err := buildMetadata(ctx, pkg, cfg, query, newMetadata, nil); err != nil {
-			return err
-		}
+		buildMetadata(newMetadata, pkg, cfg.Dir, standalone)
 	}
 
 	s.mu.Lock()
@@ -315,12 +354,12 @@
 
 	// Apply diagnostics about the workspace configuration to relevant open
 	// files.
-	openFiles := s.openFiles()
+	openFiles := s.overlays()
 
 	// If the snapshot does not have a valid build configuration, it may be
 	// that the user has opened a directory that contains multiple modules.
 	// Check for that an warn about it.
-	if !s.ValidBuildConfiguration() {
+	if !s.validBuildConfiguration() {
 		var msg string
 		if s.view.goversion >= 18 {
 			msg = `gopls was not able to find modules in your workspace.
@@ -349,7 +388,7 @@
 			rootMod = uri.Filename()
 		}
 		rootDir := filepath.Dir(rootMod)
-		nestedModules := make(map[string][]source.FileHandle)
+		nestedModules := make(map[string][]*Overlay)
 		for _, fh := range openFiles {
 			mod, err := findRootPattern(ctx, filepath.Dir(fh.URI().Filename()), "go.mod", s)
 			if err != nil {
@@ -378,9 +417,9 @@
 		// "orphaned". Don't show a general diagnostic in the progress bar,
 		// because the user may still want to edit a file in a nested module.
 		var srcDiags []*source.Diagnostic
-		for modDir, uris := range nestedModules {
+		for modDir, files := range nestedModules {
 			msg := fmt.Sprintf("This file is in %s, which is a nested module in the %s module.\n%s", modDir, rootMod, multiModuleMsg)
-			srcDiags = append(srcDiags, s.applyCriticalErrorToFiles(ctx, msg, uris)...)
+			srcDiags = append(srcDiags, s.applyCriticalErrorToFiles(ctx, msg, files)...)
 		}
 		if len(srcDiags) != 0 {
 			return fmt.Errorf("You have opened a nested module.\n%s", multiModuleMsg), srcDiags
@@ -389,7 +428,7 @@
 	return nil, nil
 }
 
-func (s *snapshot) applyCriticalErrorToFiles(ctx context.Context, msg string, files []source.FileHandle) []*source.Diagnostic {
+func (s *snapshot) applyCriticalErrorToFiles(ctx context.Context, msg string, files []*Overlay) []*source.Diagnostic {
 	var srcDiags []*source.Diagnostic
 	for _, fh := range files {
 		// Place the diagnostics on the package or module declarations.
@@ -423,42 +462,29 @@
 // buildMetadata populates the updates map with metadata updates to
 // apply, based on the given pkg. It recurs through pkg.Imports to ensure that
 // metadata exists for all dependencies.
-func buildMetadata(ctx context.Context, pkg *packages.Package, cfg *packages.Config, query []string, updates map[PackageID]*source.Metadata, path []PackageID) error {
+func buildMetadata(updates map[PackageID]*source.Metadata, pkg *packages.Package, loadDir string, standalone bool) {
 	// Allow for multiple ad-hoc packages in the workspace (see #47584).
 	pkgPath := PackagePath(pkg.PkgPath)
 	id := PackageID(pkg.ID)
 
-	// TODO(rfindley): this creates at most one command-line-arguments package
-	// per load, but if we pass multiple file= queries to go/packages, there may
-	// be multiple command-line-arguments packages.
-	//
-	// As reported in golang/go#59318, this can result in accidentally quadratic
-	// loading behavior.
 	if source.IsCommandLineArguments(id) {
-		suffix := ":" + strings.Join(query, ",")
+		if len(pkg.CompiledGoFiles) != 1 {
+			bug.Reportf("unexpected files in command-line-arguments package: %v", pkg.CompiledGoFiles)
+			return
+		}
+		suffix := pkg.CompiledGoFiles[0]
 		id = PackageID(pkg.ID + suffix)
 		pkgPath = PackagePath(pkg.PkgPath + suffix)
 	}
 
+	// Duplicate?
 	if _, ok := updates[id]; ok {
-		// If we've already seen this dependency, there may be an import cycle, or
-		// we may have reached the same package transitively via distinct paths.
-		// Check the path to confirm.
-
-		// TODO(rfindley): this doesn't look sufficient. Any single piece of new
-		// metadata could theoretically introduce import cycles in the metadata
-		// graph. What's the point of this limited check here (and is it even
-		// possible to get an import cycle in data from go/packages)? Consider
-		// simply returning, so that this function need not return an error.
-		//
-		// We should consider doing a more complete guard against import cycles
-		// elsewhere.
-		for _, prev := range path {
-			if prev == id {
-				return fmt.Errorf("import cycle detected: %q", id)
-			}
-		}
-		return nil
+		// A package was encountered twice due to shared
+		// subgraphs (common) or cycles (rare). Although "go
+		// list" usually breaks cycles, we don't rely on it.
+		// breakImportCycles in metadataGraph.Clone takes care
+		// of it later.
+		return
 	}
 
 	// Recreate the metadata rather than reusing it to avoid locking.
@@ -468,10 +494,11 @@
 		Name:       PackageName(pkg.Name),
 		ForTest:    PackagePath(packagesinternal.GetForTest(pkg)),
 		TypesSizes: pkg.TypesSizes,
-		LoadDir:    cfg.Dir,
+		LoadDir:    loadDir,
 		Module:     pkg.Module,
 		Errors:     pkg.Errors,
 		DepsErrors: packagesinternal.GetDepsErrors(pkg),
+		Standalone: standalone,
 	}
 
 	updates[id] = m
@@ -484,6 +511,10 @@
 		uri := span.URIFromPath(filename)
 		m.GoFiles = append(m.GoFiles, uri)
 	}
+	for _, filename := range pkg.IgnoredFiles {
+		uri := span.URIFromPath(filename)
+		m.IgnoredFiles = append(m.IgnoredFiles, uri)
+	}
 
 	depsByImpPath := make(map[ImportPath]PackageID)
 	depsByPkgPath := make(map[PackagePath]PackageID)
@@ -572,17 +603,13 @@
 
 		depsByImpPath[importPath] = PackageID(imported.ID)
 		depsByPkgPath[PackagePath(imported.PkgPath)] = PackageID(imported.ID)
-		if err := buildMetadata(ctx, imported, cfg, query, updates, append(path, id)); err != nil {
-			event.Error(ctx, "error in dependency", err)
-		}
+		buildMetadata(updates, imported, loadDir, false) // only top level packages can be standalone
 	}
 	m.DepsByImpPath = depsByImpPath
 	m.DepsByPkgPath = depsByPkgPath
 
 	// m.Diagnostics is set later in the loading pass, using
 	// computeLoadDiagnostics.
-
-	return nil
 }
 
 // computeLoadDiagnostics computes and sets m.Diagnostics for the given metadata m.
@@ -679,7 +706,8 @@
 	}
 
 	for uri := range uris {
-		if s.isOpenLocked(uri) {
+		fh, _ := s.files.Get(uri)
+		if _, open := fh.(*Overlay); open {
 			return true
 		}
 	}
diff --git a/gopls/internal/lsp/cache/session.go b/gopls/internal/lsp/cache/session.go
index 704412f..eaad67c 100644
--- a/gopls/internal/lsp/cache/session.go
+++ b/gopls/internal/lsp/cache/session.go
@@ -12,11 +12,11 @@
 	"sync"
 	"sync/atomic"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/govulncheck"
 	"golang.org/x/tools/gopls/internal/lsp/source"
 	"golang.org/x/tools/gopls/internal/lsp/source/typerefs"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/imports"
@@ -46,6 +46,11 @@
 func (s *Session) ID() string     { return s.id }
 func (s *Session) String() string { return s.id }
 
+// GoCommandRunner returns the gocommand Runner for this session.
+func (s *Session) GoCommandRunner() *gocommand.Runner {
+	return s.gocmdRunner
+}
+
 // Options returns a copy of the SessionOptions for this session.
 func (s *Session) Options() *source.Options {
 	s.optionsMu.Lock()
@@ -113,7 +118,8 @@
 		return nil, nil, func() {}, err
 	}
 
-	wsModFiles, wsModFilesErr := computeWorkspaceModFiles(ctx, info.gomod, info.effectiveGOWORK(), info.effectiveGO111MODULE(), s)
+	gowork, _ := info.GOWORK()
+	wsModFiles, wsModFilesErr := computeWorkspaceModFiles(ctx, info.gomod, gowork, info.effectiveGO111MODULE(), s)
 
 	// We want a true background context and not a detached context here
 	// the spans need to be unrelated and no tag values should pollute it.
@@ -199,8 +205,8 @@
 	return v, snapshot, snapshot.Acquire(), nil
 }
 
-// View returns a view with a matching name, if the session has one.
-func (s *Session) View(name string) *View {
+// ViewByName returns a view with a matching name, if the session has one.
+func (s *Session) ViewByName(name string) *View {
 	s.viewMu.Lock()
 	defer s.viewMu.Unlock()
 	for _, view := range s.views {
@@ -211,6 +217,18 @@
 	return nil
 }
 
+// View returns the view with a matching id, if present.
+func (s *Session) View(id string) (*View, error) {
+	s.viewMu.Lock()
+	defer s.viewMu.Unlock()
+	for _, view := range s.views {
+		if view.ID() == id {
+			return view, nil
+		}
+	}
+	return nil, fmt.Errorf("no view with ID %q", id)
+}
+
 // ViewOf returns a view corresponding to the given URI.
 // If the file is not already associated with a view, pick one using some heuristics.
 func (s *Session) ViewOf(uri span.URI) (*View, error) {
@@ -308,7 +326,16 @@
 		return nil, fmt.Errorf("view %q not found", view.id)
 	}
 
-	v, _, release, err := s.createView(ctx, view.name, view.folder, options, seqID)
+	v, snapshot, release, err := s.createView(ctx, view.name, view.folder, options, seqID)
+	// The new snapshot has lost the history of the previous view. As a result,
+	// it may not see open files that aren't in its build configuration (as it
+	// would have done via didOpen notifications). This can lead to inconsistent
+	// behavior when configuration is changed mid-session.
+	//
+	// Ensure the new snapshot observes all open files.
+	for _, o := range v.fs.Overlays() {
+		_, _ = snapshot.ReadFile(ctx, o.URI())
+	}
 	release()
 
 	if err != nil {
diff --git a/gopls/internal/lsp/cache/snapshot.go b/gopls/internal/lsp/cache/snapshot.go
index cf2b930..7988a72 100644
--- a/gopls/internal/lsp/cache/snapshot.go
+++ b/gopls/internal/lsp/cache/snapshot.go
@@ -10,6 +10,7 @@
 	"errors"
 	"fmt"
 	"go/ast"
+	"go/build/constraint"
 	"go/token"
 	"go/types"
 	"io"
@@ -29,6 +30,8 @@
 	"golang.org/x/sync/errgroup"
 	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/go/types/objectpath"
+	"golang.org/x/tools/gopls/internal/bug"
+	"golang.org/x/tools/gopls/internal/lsp/command"
 	"golang.org/x/tools/gopls/internal/lsp/filecache"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/source"
@@ -36,7 +39,6 @@
 	"golang.org/x/tools/gopls/internal/lsp/source/typerefs"
 	"golang.org/x/tools/gopls/internal/lsp/source/xrefs"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/event/tag"
 	"golang.org/x/tools/internal/gocommand"
@@ -74,7 +76,7 @@
 	// view.initializationSema.
 	initialized bool
 	// initializedErr holds the last error resulting from initialization. If
-	// initialization fails, we only retry when the the workspace modules change,
+	// initialization fails, we only retry when the workspace modules change,
 	// to avoid too many go/packages calls.
 	initializedErr *source.CriticalError
 
@@ -185,6 +187,21 @@
 
 	// pkgIndex is an index of package IDs, for efficient storage of typerefs.
 	pkgIndex *typerefs.PackageIndex
+
+	// Only compute module prefixes once, as they are used with high frequency to
+	// detect ignored files.
+	ignoreFilterOnce sync.Once
+	ignoreFilter     *ignoreFilter
+
+	// If non-nil, the result of computing orphaned file diagnostics.
+	//
+	// Only the field, not the map itself, is guarded by the mutex. The map must
+	// not be mutated.
+	//
+	// Used to save work across diagnostics+code action passes.
+	// TODO(rfindley): refactor all of this so there's no need to re-evaluate
+	// diagnostics during code-action.
+	orphanedFileDiagnostics map[span.URI]*source.Diagnostic
 }
 
 var globalSnapshotID uint64
@@ -229,7 +246,7 @@
 // The destroyedBy argument is used for debugging.
 //
 // v.snapshotMu must be held while calling this function, in order to preserve
-// the invariants described by the the docstring for v.snapshot.
+// the invariants described by the docstring for v.snapshot.
 func (v *View) destroy(s *snapshot, destroyedBy string) {
 	v.snapshotWG.Add(1)
 	go func() {
@@ -287,7 +304,8 @@
 }
 
 func (s *snapshot) WorkFile() span.URI {
-	return s.view.effectiveGOWORK()
+	gowork, _ := s.view.GOWORK()
+	return gowork
 }
 
 func (s *snapshot) Templates() map[span.URI]source.FileHandle {
@@ -303,7 +321,7 @@
 	return tmpls
 }
 
-func (s *snapshot) ValidBuildConfiguration() bool {
+func (s *snapshot) validBuildConfiguration() bool {
 	// Since we only really understand the `go` command, if the user has a
 	// different GOPACKAGESDRIVER, assume that their configuration is valid.
 	if s.view.hasGopackagesDriver {
@@ -361,7 +379,7 @@
 
 	// If the view has an invalid configuration, don't build the workspace
 	// module.
-	validBuildConfiguration := s.ValidBuildConfiguration()
+	validBuildConfiguration := s.validBuildConfiguration()
 	if !validBuildConfiguration {
 		return mode
 	}
@@ -538,7 +556,7 @@
 	// the main (workspace) module. Otherwise, we should use the module for
 	// the passed-in working dir.
 	if mode == source.LoadWorkspace {
-		if s.view.effectiveGOWORK() == "" && s.view.gomod != "" {
+		if gowork, _ := s.view.GOWORK(); gowork == "" && s.view.gomod != "" {
 			modURI = s.view.gomod
 		}
 	} else {
@@ -896,15 +914,9 @@
 	defer func() {
 		s.activePackages.Set(id, active, nil) // store the result either way: remember that pkg is not open
 	}()
-	for _, cgf := range pkg.Metadata().GoFiles {
-		if s.isOpenLocked(cgf) {
-			return pkg
-		}
-	}
-	for _, cgf := range pkg.Metadata().CompiledGoFiles {
-		if s.isOpenLocked(cgf) {
-			return pkg
-		}
+
+	if containsOpenFileLocked(s, pkg.Metadata()) {
+		return pkg
 	}
 	return nil
 }
@@ -929,7 +941,7 @@
 	}
 
 	// If GOWORK is outside the folder, ensure we are watching it.
-	gowork := s.view.effectiveGOWORK()
+	gowork, _ := s.view.GOWORK()
 	if gowork != "" && !source.InDir(s.view.folder.Filename(), gowork.Filename()) {
 		patterns[gowork.Filename()] = struct{}{}
 	}
@@ -1113,18 +1125,26 @@
 // a loaded package. It awaits snapshot loading.
 //
 // TODO(rfindley): move this to the top of cache/symbols.go
-func (s *snapshot) Symbols(ctx context.Context) (map[span.URI][]source.Symbol, error) {
+func (s *snapshot) Symbols(ctx context.Context, workspaceOnly bool) (map[span.URI][]source.Symbol, error) {
 	if err := s.awaitLoaded(ctx); err != nil {
 		return nil, err
 	}
 
-	// Build symbols for all loaded Go files.
-	s.mu.Lock()
-	meta := s.meta
-	s.mu.Unlock()
+	var (
+		meta []*source.Metadata
+		err  error
+	)
+	if workspaceOnly {
+		meta, err = s.WorkspaceMetadata(ctx)
+	} else {
+		meta, err = s.AllMetadata(ctx)
+	}
+	if err != nil {
+		return nil, fmt.Errorf("loading metadata: %v", err)
+	}
 
 	goFiles := make(map[span.URI]struct{})
-	for _, m := range meta.metadata {
+	for _, m := range meta {
 		for _, uri := range m.GoFiles {
 			goFiles[uri] = struct{}{}
 		}
@@ -1187,7 +1207,7 @@
 func moduleForURI(modFiles map[span.URI]struct{}, uri span.URI) span.URI {
 	var match span.URI
 	for modURI := range modFiles {
-		if !source.InDir(span.Dir(modURI).Filename(), uri.Filename()) {
+		if !source.InDir(filepath.Dir(modURI.Filename()), uri.Filename()) {
 			continue
 		}
 		if len(modURI) > len(match) {
@@ -1202,7 +1222,6 @@
 //
 // The given uri must be a file, not a directory.
 func nearestModFile(ctx context.Context, uri span.URI, fs source.FileSource) (span.URI, error) {
-	// TODO(rfindley)
 	dir := filepath.Dir(uri.Filename())
 	mod, err := findRootPattern(ctx, dir, "go.mod", fs)
 	if err != nil {
@@ -1248,11 +1267,11 @@
 	}
 }
 
-// noValidMetadataForURILocked reports whether there is any valid metadata for
-// the given URI.
-func (s *snapshot) noValidMetadataForURILocked(uri span.URI) bool {
+// noRealPackagesForURILocked reports whether there are any
+// non-command-line-arguments packages containing the given URI.
+func (s *snapshot) noRealPackagesForURILocked(uri span.URI) bool {
 	for _, id := range s.meta.ids[uri] {
-		if _, ok := s.meta.metadata[id]; ok {
+		if !source.IsCommandLineArguments(id) || s.meta.metadata[id].Standalone {
 			return false
 		}
 	}
@@ -1338,26 +1357,10 @@
 func (s *snapshot) IsOpen(uri span.URI) bool {
 	s.mu.Lock()
 	defer s.mu.Unlock()
-	return s.isOpenLocked(uri)
 
-}
-
-func (s *snapshot) openFiles() []source.FileHandle {
-	s.mu.Lock()
-	defer s.mu.Unlock()
-
-	var open []source.FileHandle
-	s.files.Range(func(uri span.URI, fh source.FileHandle) {
-		if isFileOpen(fh) {
-			open = append(open, fh)
-		}
-	})
-	return open
-}
-
-func (s *snapshot) isOpenLocked(uri span.URI) bool {
 	fh, _ := s.files.Get(uri)
-	return isFileOpen(fh)
+	_, open := fh.(*Overlay)
+	return open
 }
 
 func isFileOpen(fh source.FileHandle) bool {
@@ -1445,8 +1448,8 @@
 If you are using modules, please open your editor to a directory in your module.
 If you believe this warning is incorrect, please file an issue: https://github.com/golang/go/issues/new.`
 
-func shouldShowAdHocPackagesWarning(snapshot source.Snapshot, active []*source.Metadata) string {
-	if !snapshot.ValidBuildConfiguration() {
+func shouldShowAdHocPackagesWarning(snapshot *snapshot, active []*source.Metadata) string {
+	if !snapshot.validBuildConfiguration() {
 		for _, m := range active {
 			// A blank entry in DepsByImpPath
 			// indicates a missing dependency.
@@ -1556,7 +1559,7 @@
 
 	// If the view's build configuration is invalid, we cannot reload by
 	// package path. Just reload the directory instead.
-	if !s.ValidBuildConfiguration() {
+	if !s.validBuildConfiguration() {
 		scopes = []loadScope{viewLoadScope("LOAD_INVALID_VIEW")}
 	}
 
@@ -1577,45 +1580,81 @@
 //
 // An error is returned if the load is canceled.
 func (s *snapshot) reloadOrphanedOpenFiles(ctx context.Context) error {
+	s.mu.Lock()
+	meta := s.meta
+	s.mu.Unlock()
 	// When we load ./... or a package path directly, we may not get packages
 	// that exist only in overlays. As a workaround, we search all of the files
 	// available in the snapshot and reload their metadata individually using a
 	// file= query if the metadata is unavailable.
-	files := s.orphanedOpenFiles()
-
-	// Files without a valid package declaration can't be loaded. Don't try.
-	var scopes []loadScope
-	for _, file := range files {
-		pgf, err := s.ParseGo(ctx, file, source.ParseHeader)
-		if err != nil {
+	open := s.overlays()
+	var files []*Overlay
+	for _, o := range open {
+		uri := o.URI()
+		if s.IsBuiltin(uri) || s.view.FileKind(o) != source.Go {
 			continue
 		}
-		if !pgf.File.Package.IsValid() {
-			continue
+		if len(meta.ids[uri]) == 0 {
+			files = append(files, o)
 		}
-
-		scopes = append(scopes, fileLoadScope(file.URI()))
 	}
-
-	if len(scopes) == 0 {
+	if len(files) == 0 {
 		return nil
 	}
 
-	// The regtests match this exact log message, keep them in sync.
-	event.Log(ctx, "reloadOrphanedFiles reloading", tag.Query.Of(scopes))
-	err := s.load(ctx, false, scopes...)
+	// Filter to files that are not known to be unloadable.
+	s.mu.Lock()
+	loadable := files[:0]
+	for _, file := range files {
+		if _, unloadable := s.unloadableFiles[file.URI()]; !unloadable {
+			loadable = append(loadable, file)
+		}
+	}
+	files = loadable
+	s.mu.Unlock()
+
+	var uris []span.URI
+	for _, file := range files {
+		uris = append(uris, file.URI())
+	}
+
+	event.Log(ctx, "reloadOrphanedFiles reloading", tag.Files.Of(uris))
+
+	var g errgroup.Group
+
+	cpulimit := runtime.GOMAXPROCS(0)
+	g.SetLimit(cpulimit)
+
+	// Load files one-at-a-time. go/packages can return at most one
+	// command-line-arguments package per query.
+	for _, file := range files {
+		file := file
+		g.Go(func() error {
+			pgf, err := s.ParseGo(ctx, file, source.ParseHeader)
+			if err != nil || !pgf.File.Package.IsValid() {
+				return nil // need a valid header
+			}
+			return s.load(ctx, false, fileLoadScope(file.URI()))
+		})
+	}
 
 	// If we failed to load some files, i.e. they have no metadata,
 	// mark the failures so we don't bother retrying until the file's
 	// content changes.
 	//
-	// TODO(rfindley): is it possible that the the load stopped early for an
+	// TODO(rfindley): is it possible that the load stopped early for an
 	// unrelated errors? If so, add a fallback?
-	//
-	// Check for context cancellation so that we don't incorrectly mark files
-	// as unloadable, but don't return before setting all workspace packages.
-	if ctx.Err() != nil {
-		return ctx.Err()
+
+	if err := g.Wait(); err != nil {
+		// Check for context cancellation so that we don't incorrectly mark files
+		// as unloadable, but don't return before setting all workspace packages.
+		if ctx.Err() != nil {
+			return ctx.Err()
+		}
+
+		if !errors.Is(err, errNoPackages) {
+			event.Error(ctx, "reloadOrphanedFiles: failed to load", err, tag.Files.Of(uris))
+		}
 	}
 
 	// If the context was not canceled, we assume that the result of loading
@@ -1624,51 +1663,239 @@
 	// prevents us from falling into recursive reloading where we only make a bit
 	// of progress each time.
 	s.mu.Lock()
-	for _, scope := range scopes {
+	defer s.mu.Unlock()
+	for _, file := range files {
 		// TODO(rfindley): instead of locking here, we should have load return the
 		// metadata graph that resulted from loading.
-		uri := span.URI(scope.(fileLoadScope))
-		if s.noValidMetadataForURILocked(uri) {
+		uri := file.URI()
+		if len(s.meta.ids) == 0 {
 			s.unloadableFiles[uri] = struct{}{}
 		}
 	}
-	s.mu.Unlock()
-
-	if err != nil && !errors.Is(err, errNoPackages) {
-		event.Error(ctx, "reloadOrphanedFiles: failed to load", err, tag.Query.Of(scopes))
-	}
 
 	return nil
 }
 
-func (s *snapshot) orphanedOpenFiles() []source.FileHandle {
+// OrphanedFileDiagnostics reports diagnostics describing why open files have
+// no packages or have only command-line-arguments packages.
+//
+// If the resulting diagnostic is nil, the file is either not orphaned or we
+// can't produce a good diagnostic.
+//
+// TODO(rfindley): reconcile the definition of "orphaned" here with
+// reloadOrphanedFiles. The latter does not include files with
+// command-line-arguments packages.
+func (s *snapshot) OrphanedFileDiagnostics(ctx context.Context) (map[span.URI]*source.Diagnostic, error) {
+	// Orphaned file diagnostics are queried from code actions to produce
+	// quick-fixes (and may be queried many times, once for each file).
+	//
+	// Because they are non-trivial to compute, record them optimistically to
+	// avoid most redundant work.
+	//
+	// This is a hacky workaround: in the future we should avoid recomputing
+	// anything when codeActions provide a diagnostic: simply read the published
+	// diagnostic, if it exists.
+	s.mu.Lock()
+	existing := s.orphanedFileDiagnostics
+	s.mu.Unlock()
+	if existing != nil {
+		return existing, nil
+	}
+
+	if err := s.awaitLoaded(ctx); err != nil {
+		return nil, err
+	}
+
+	var files []*Overlay
+
+searchOverlays:
+	for _, o := range s.overlays() {
+		uri := o.URI()
+		if s.IsBuiltin(uri) || s.view.FileKind(o) != source.Go {
+			continue
+		}
+		md, err := s.MetadataForFile(ctx, uri)
+		if err != nil {
+			return nil, err
+		}
+		for _, m := range md {
+			if !source.IsCommandLineArguments(m.ID) || m.Standalone {
+				continue searchOverlays
+			}
+		}
+		files = append(files, o)
+	}
+	if len(files) == 0 {
+		return nil, nil
+	}
+
+	loadedModFiles := make(map[span.URI]struct{}) // all mod files, including dependencies
+	ignoredFiles := make(map[span.URI]bool)       // files reported in packages.Package.IgnoredFiles
+
+	meta, err := s.AllMetadata(ctx)
+	if err != nil {
+		return nil, err
+	}
+
+	for _, meta := range meta {
+		if meta.Module != nil && meta.Module.GoMod != "" {
+			gomod := span.URIFromPath(meta.Module.GoMod)
+			loadedModFiles[gomod] = struct{}{}
+		}
+		for _, ignored := range meta.IgnoredFiles {
+			ignoredFiles[ignored] = true
+		}
+	}
+
+	diagnostics := make(map[span.URI]*source.Diagnostic)
+	for _, fh := range files {
+		// Only warn about orphaned files if the file is well-formed enough to
+		// actually be part of a package.
+		//
+		// Use ParseGo as for open files this is likely to be a cache hit (we'll have )
+		pgf, err := s.ParseGo(ctx, fh, source.ParseHeader)
+		if err != nil {
+			continue
+		}
+		if !pgf.File.Name.Pos().IsValid() {
+			continue
+		}
+		rng, err := pgf.PosRange(pgf.File.Name.Pos(), pgf.File.Name.End())
+		if err != nil {
+			continue
+		}
+
+		var (
+			msg            string                // if non-empty, report a diagnostic with this message
+			suggestedFixes []source.SuggestedFix // associated fixes, if any
+		)
+
+		// If we have a relevant go.mod file, check whether the file is orphaned
+		// due to its go.mod file being inactive. We could also offer a
+		// prescriptive diagnostic in the case that there is no go.mod file, but it
+		// is harder to be precise in that case, and less important.
+		if goMod, err := nearestModFile(ctx, fh.URI(), s); err == nil && goMod != "" {
+			if _, ok := loadedModFiles[goMod]; !ok {
+				modDir := filepath.Dir(goMod.Filename())
+				viewDir := s.view.folder.Filename()
+
+				// When the module is underneath the view dir, we offer
+				// "use all modules" quick-fixes.
+				inDir := source.InDir(viewDir, modDir)
+
+				if rel, err := filepath.Rel(viewDir, modDir); err == nil {
+					modDir = rel
+				}
+
+				var fix string
+				if s.view.goversion >= 18 {
+					if s.view.gowork != "" {
+						fix = fmt.Sprintf("To fix this problem, you can add this module to your go.work file (%s)", s.view.gowork)
+						if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work use`", command.RunGoWorkArgs{
+							ViewID: s.view.ID(),
+							Args:   []string{"use", modDir},
+						}); err == nil {
+							suggestedFixes = append(suggestedFixes, source.SuggestedFix{
+								Title:      "Use this module in your go.work file",
+								Command:    &cmd,
+								ActionKind: protocol.QuickFix,
+							})
+						}
+
+						if inDir {
+							if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work use -r`", command.RunGoWorkArgs{
+								ViewID: s.view.ID(),
+								Args:   []string{"use", "-r", "."},
+							}); err == nil {
+								suggestedFixes = append(suggestedFixes, source.SuggestedFix{
+									Title:      "Use all modules in your workspace",
+									Command:    &cmd,
+									ActionKind: protocol.QuickFix,
+								})
+							}
+						}
+					} else {
+						fix = "To fix this problem, you can add a go.work file that uses this directory."
+
+						if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work init && go work use`", command.RunGoWorkArgs{
+							ViewID:    s.view.ID(),
+							InitFirst: true,
+							Args:      []string{"use", modDir},
+						}); err == nil {
+							suggestedFixes = []source.SuggestedFix{
+								{
+									Title:      "Add a go.work file using this module",
+									Command:    &cmd,
+									ActionKind: protocol.QuickFix,
+								},
+							}
+						}
+
+						if inDir {
+							if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work init && go work use -r`", command.RunGoWorkArgs{
+								ViewID:    s.view.ID(),
+								InitFirst: true,
+								Args:      []string{"use", "-r", "."},
+							}); err == nil {
+								suggestedFixes = append(suggestedFixes, source.SuggestedFix{
+									Title:      "Add a go.work file using all modules in your workspace",
+									Command:    &cmd,
+									ActionKind: protocol.QuickFix,
+								})
+							}
+						}
+					}
+				} else {
+					fix = `To work with multiple modules simultaneously, please upgrade to Go 1.18 or
+later, reinstall gopls, and use a go.work file.`
+				}
+				msg = fmt.Sprintf(`This file is in directory %q, which is not included in your workspace.
+%s
+See the documentation for more information on setting up your workspace:
+https://github.com/golang/tools/blob/master/gopls/doc/workspace.md.`, modDir, fix)
+			}
+		}
+
+		if msg == "" && ignoredFiles[fh.URI()] {
+			// TODO(rfindley): use the constraint package to check if the file
+			// _actually_ satisfies the current build context.
+			hasConstraint := false
+			walkConstraints(pgf.File, func(constraint.Expr) bool {
+				hasConstraint = true
+				return false
+			})
+			var fix string
+			if hasConstraint {
+				fix = `This file may be excluded due to its build tags; try adding "-tags=<build tag>" to your gopls "buildFlags" configuration
+See the documentation for more information on working with build tags:
+https://github.com/golang/tools/blob/master/gopls/doc/settings.md#buildflags-string.`
+			} else if strings.Contains(filepath.Base(fh.URI().Filename()), "_") {
+				fix = `This file may be excluded due to its GOOS/GOARCH, or other build constraints.`
+			} else {
+				fix = `This file is ignored by your gopls build.` // we don't know why
+			}
+			msg = fmt.Sprintf("No packages found for open file %s.\n%s", fh.URI().Filename(), fix)
+		}
+
+		if msg != "" {
+			// Only report diagnostics if we detect an actual exclusion.
+			diagnostics[fh.URI()] = &source.Diagnostic{
+				URI:            fh.URI(),
+				Range:          rng,
+				Severity:       protocol.SeverityWarning,
+				Source:         source.ListError,
+				Message:        msg,
+				SuggestedFixes: suggestedFixes,
+			}
+		}
+	}
+
 	s.mu.Lock()
 	defer s.mu.Unlock()
-
-	var files []source.FileHandle
-	s.files.Range(func(uri span.URI, fh source.FileHandle) {
-		// Only consider open files, which will be represented as overlays.
-		if _, isOverlay := fh.(*Overlay); !isOverlay {
-			return
-		}
-		// Don't try to reload metadata for go.mod files.
-		if s.view.FileKind(fh) != source.Go {
-			return
-		}
-		// If the URI doesn't belong to this view, then it's not in a workspace
-		// package and should not be reloaded directly.
-		if !source.InDir(s.view.folder.Filename(), uri.Filename()) {
-			return
-		}
-		// Don't reload metadata for files we've already deemed unloadable.
-		if _, ok := s.unloadableFiles[uri]; ok {
-			return
-		}
-		if s.noValidMetadataForURILocked(uri) {
-			files = append(files, fh)
-		}
-	})
-	return files
+	if s.orphanedFileDiagnostics == nil { // another thread may have won the race
+		s.orphanedFileDiagnostics = diagnostics
+	}
+	return s.orphanedFileDiagnostics, nil
 }
 
 // TODO(golang/go#53756): this function needs to consider more than just the
@@ -1713,7 +1940,7 @@
 	reinit := false
 	wsModFiles, wsModFilesErr := s.workspaceModFiles, s.workspaceModFilesErr
 
-	if workURI := s.view.effectiveGOWORK(); workURI != "" {
+	if workURI, _ := s.view.GOWORK(); workURI != "" {
 		if change, ok := changes[workURI]; ok {
 			wsModFiles, wsModFilesErr = computeWorkspaceModFiles(ctx, s.view.gomod, workURI, s.view.effectiveGO111MODULE(), &unappliedChanges{
 				originalSnapshot: s,
@@ -2326,7 +2553,7 @@
 	return pgfs[0], nil
 }
 
-func (s *snapshot) IsBuiltin(ctx context.Context, uri span.URI) bool {
+func (s *snapshot) IsBuiltin(uri span.URI) bool {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 	// We should always get the builtin URI in a canonical form, so use simple
diff --git a/gopls/internal/lsp/cache/standalone_go115.go b/gopls/internal/lsp/cache/standalone_go115.go
deleted file mode 100644
index 79569ae..0000000
--- a/gopls/internal/lsp/cache/standalone_go115.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build !go1.16
-// +build !go1.16
-
-package cache
-
-// isStandaloneFile returns false, as the 'standaloneTags' setting is
-// unsupported on Go 1.15 and earlier.
-func isStandaloneFile(src []byte, standaloneTags []string) bool {
-	return false
-}
diff --git a/gopls/internal/lsp/cache/view.go b/gopls/internal/lsp/cache/view.go
index b6317e0..d999170 100644
--- a/gopls/internal/lsp/cache/view.go
+++ b/gopls/internal/lsp/cache/view.go
@@ -68,7 +68,7 @@
 	vulns   map[span.URI]*govulncheck.Result
 
 	// fs is the file source used to populate this view.
-	fs source.FileSource
+	fs *overlayFS
 
 	// seenFiles tracks files that the view has accessed.
 	// TODO(golang/go#57558): this notion is fundamentally problematic, and
@@ -145,13 +145,16 @@
 	}
 }
 
-// effectiveGOWORK returns the effective GOWORK value for this workspace, if
+// GOWORK returns the effective GOWORK value for this workspace, if
 // any, in URI form.
-func (w workspaceInformation) effectiveGOWORK() span.URI {
+//
+// The second result reports whether the effective GOWORK value is "" because
+// GOWORK=off.
+func (w workspaceInformation) GOWORK() (span.URI, bool) {
 	if w.gowork == "off" || w.gowork == "" {
-		return ""
+		return "", w.gowork == "off"
 	}
-	return span.URIFromPath(w.gowork)
+	return span.URIFromPath(w.gowork), false
 }
 
 // GO111MODULE returns the value of GO111MODULE to use for running the go
@@ -421,7 +424,7 @@
 		v.folder.Filename(),
 		v.workingDir().Filename(),
 		strings.TrimRight(v.workspaceInformation.goversionOutput, "\n"),
-		v.snapshot.ValidBuildConfiguration(),
+		v.snapshot.validBuildConfiguration(),
 		buildFlags,
 		v.goEnv,
 	)
@@ -436,7 +439,7 @@
 	return buf.String()
 }
 
-func (s *snapshot) RunProcessEnvFunc(ctx context.Context, fn func(*imports.Options) error) error {
+func (s *snapshot) RunProcessEnvFunc(ctx context.Context, fn func(context.Context, *imports.Options) error) error {
 	return s.view.importsState.runProcessEnvFunc(ctx, s, fn)
 }
 
@@ -540,7 +543,7 @@
 	//
 	// TODO(rfindley): Make sure the go.work files are always known
 	// to the view.
-	if c.URI == v.effectiveGOWORK() {
+	if gowork, _ := v.GOWORK(); gowork == c.URI {
 		return true
 	}
 
@@ -588,22 +591,57 @@
 	v.snapshotWG.Wait()
 }
 
+// While go list ./... skips directories starting with '.', '_', or 'testdata',
+// gopls may still load them via file queries. Explicitly filter them out.
 func (s *snapshot) IgnoredFile(uri span.URI) bool {
-	filename := uri.Filename()
-	var prefixes []string
-	if len(s.workspaceModFiles) == 0 {
-		for _, entry := range filepath.SplitList(s.view.gopath) {
-			prefixes = append(prefixes, filepath.Join(entry, "src"))
-		}
-	} else {
-		prefixes = append(prefixes, s.view.gomodcache)
-		for m := range s.workspaceModFiles {
-			prefixes = append(prefixes, span.Dir(m).Filename())
+	// Fast path: if uri doesn't contain '.', '_', or 'testdata', it is not
+	// possible that it is ignored.
+	{
+		uriStr := string(uri)
+		if !strings.Contains(uriStr, ".") && !strings.Contains(uriStr, "_") && !strings.Contains(uriStr, "testdata") {
+			return false
 		}
 	}
-	for _, prefix := range prefixes {
-		if strings.HasPrefix(filename, prefix) {
-			return checkIgnored(filename[len(prefix):])
+
+	s.ignoreFilterOnce.Do(func() {
+		var dirs []string
+		if len(s.workspaceModFiles) == 0 {
+			for _, entry := range filepath.SplitList(s.view.gopath) {
+				dirs = append(dirs, filepath.Join(entry, "src"))
+			}
+		} else {
+			dirs = append(dirs, s.view.gomodcache)
+			for m := range s.workspaceModFiles {
+				dirs = append(dirs, filepath.Dir(m.Filename()))
+			}
+		}
+		s.ignoreFilter = newIgnoreFilter(dirs)
+	})
+
+	return s.ignoreFilter.ignored(uri.Filename())
+}
+
+// An ignoreFilter implements go list's exclusion rules via its 'ignored' method.
+type ignoreFilter struct {
+	prefixes []string // root dirs, ending in filepath.Separator
+}
+
+// newIgnoreFilter returns a new ignoreFilter implementing exclusion rules
+// relative to the provided directories.
+func newIgnoreFilter(dirs []string) *ignoreFilter {
+	f := new(ignoreFilter)
+	for _, d := range dirs {
+		f.prefixes = append(f.prefixes, filepath.Clean(d)+string(filepath.Separator))
+	}
+	return f
+}
+
+func (f *ignoreFilter) ignored(filename string) bool {
+	for _, prefix := range f.prefixes {
+		if suffix := strings.TrimPrefix(filename, prefix); suffix != filename {
+			if checkIgnored(suffix) {
+				return true
+			}
 		}
 	}
 	return false
@@ -615,6 +653,8 @@
 //	Directory and file names that begin with "." or "_" are ignored
 //	by the go tool, as are directories named "testdata".
 func checkIgnored(suffix string) bool {
+	// Note: this could be further optimized by writing a HasSegment helper, a
+	// segment-boundary respecting variant of strings.Contains.
 	for _, component := range strings.Split(suffix, string(filepath.Separator)) {
 		if len(component) == 0 {
 			continue
@@ -712,16 +752,18 @@
 			// errors.
 			fh, err := s.ReadFile(ctx, modURI)
 			if err != nil {
-				if ctx.Err() == nil {
-					addError(modURI, err)
+				if ctx.Err() != nil {
+					return ctx.Err()
 				}
+				addError(modURI, err)
 				continue
 			}
 			parsed, err := s.ParseMod(ctx, fh)
 			if err != nil {
-				if ctx.Err() == nil {
-					addError(modURI, err)
+				if ctx.Err() != nil {
+					return ctx.Err()
 				}
+				addError(modURI, err)
 				continue
 			}
 			if parsed.File == nil || parsed.File.Module == nil {
@@ -741,8 +783,10 @@
 	// If we're loading anything, ensure we also load builtin,
 	// since it provides fake definitions (and documentation)
 	// for types like int that are used everywhere.
+	// ("unsafe" is also needed since its sole GoFiles is
+	// derived from that of "builtin" via a workaround in load.)
 	if len(scopes) > 0 {
-		scopes = append(scopes, packageLoadScope("builtin"))
+		scopes = append(scopes, packageLoadScope("builtin"), packageLoadScope("unsafe"))
 	}
 	loadErr = s.load(ctx, true, scopes...)
 
@@ -909,7 +953,7 @@
 	// TODO(golang/go#57514): eliminate the expandWorkspaceToModule setting
 	// entirely.
 	if v.Options().ExpandWorkspaceToModule && v.gomod != "" {
-		return span.Dir(v.gomod)
+		return span.URIFromPath(filepath.Dir(v.gomod.Filename()))
 	}
 	return v.folder
 }
diff --git a/gopls/internal/lsp/cache/view_test.go b/gopls/internal/lsp/cache/view_test.go
index 9e6d23b..90471ed 100644
--- a/gopls/internal/lsp/cache/view_test.go
+++ b/gopls/internal/lsp/cache/view_test.go
@@ -276,3 +276,32 @@
 	b, _ := json.MarshalIndent(x, "", " ")
 	return string(b)
 }
+
+func TestIgnoreFilter(t *testing.T) {
+	tests := []struct {
+		dirs []string
+		path string
+		want bool
+	}{
+		{[]string{"a"}, "a/testdata/foo", true},
+		{[]string{"a"}, "a/_ignore/foo", true},
+		{[]string{"a"}, "a/.ignore/foo", true},
+		{[]string{"a"}, "b/testdata/foo", false},
+		{[]string{"a"}, "testdata/foo", false},
+		{[]string{"a", "b"}, "b/testdata/foo", true},
+		{[]string{"a"}, "atestdata/foo", false},
+	}
+
+	for _, test := range tests {
+		// convert to filepaths, for convenience
+		for i, dir := range test.dirs {
+			test.dirs[i] = filepath.FromSlash(dir)
+		}
+		test.path = filepath.FromSlash(test.path)
+
+		f := newIgnoreFilter(test.dirs)
+		if got := f.ignored(test.path); got != test.want {
+			t.Errorf("newIgnoreFilter(%q).ignore(%q) = %t, want %t", test.dirs, test.path, got, test.want)
+		}
+	}
+}
diff --git a/gopls/internal/lsp/cmd/cmd.go b/gopls/internal/lsp/cmd/cmd.go
index 0cbbd60..02e135a 100644
--- a/gopls/internal/lsp/cmd/cmd.go
+++ b/gopls/internal/lsp/cmd/cmd.go
@@ -406,7 +406,7 @@
 	diagnosticsMu   sync.Mutex
 	diagnosticsDone chan struct{}
 
-	filesMu sync.Mutex
+	filesMu sync.Mutex // guards files map and each cmdFile.diagnostics
 	files   map[span.URI]*cmdFile
 }
 
@@ -518,6 +518,11 @@
 }
 
 func (c *cmdClient) PublishDiagnostics(ctx context.Context, p *protocol.PublishDiagnosticsParams) error {
+	var debug = os.Getenv(DebugSuggestedFixEnvVar) == "true"
+	if debug {
+		log.Printf("PublishDiagnostics URI=%v Diagnostics=%v", p.URI, p.Diagnostics)
+	}
+
 	if p.URI == "gopls://diagnostics-done" {
 		close(c.diagnosticsDone)
 	}
@@ -530,7 +535,24 @@
 	defer c.filesMu.Unlock()
 
 	file := c.getFile(ctx, fileURI(p.URI))
-	file.diagnostics = p.Diagnostics
+	file.diagnostics = append(file.diagnostics, p.Diagnostics...)
+
+	// Perform a crude in-place deduplication.
+	// TODO(golang/go#60122): replace the ad-hoc gopls/diagnoseFiles
+	// non-standard request with support for textDocument/diagnostic,
+	// so that we don't need to do this de-duplication.
+	type key [5]interface{}
+	seen := make(map[key]bool)
+	out := file.diagnostics[:0]
+	for _, d := range file.diagnostics {
+		k := key{d.Range, d.Severity, d.Code, d.Source, d.Message}
+		if !seen[k] {
+			seen[k] = true
+			out = append(out, d)
+		}
+	}
+	file.diagnostics = out
+
 	return nil
 }
 
diff --git a/gopls/internal/lsp/cmd/stats.go b/gopls/internal/lsp/cmd/stats.go
index c5f0467..1b9df2f 100644
--- a/gopls/internal/lsp/cmd/stats.go
+++ b/gopls/internal/lsp/cmd/stats.go
@@ -17,12 +17,13 @@
 	"sync"
 	"time"
 
+	goplsbug "golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp"
 	"golang.org/x/tools/gopls/internal/lsp/command"
 	"golang.org/x/tools/gopls/internal/lsp/debug"
+	"golang.org/x/tools/gopls/internal/lsp/filecache"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/source"
-	goplsbug "golang.org/x/tools/internal/bug"
 )
 
 type stats struct {
@@ -47,6 +48,15 @@
 }
 
 func (s *stats) Run(ctx context.Context, args ...string) error {
+
+	// This undocumented environment variable allows
+	// the cmd integration test to trigger a call to bug.Report.
+	if msg := os.Getenv("TEST_GOPLS_BUG"); msg != "" {
+		filecache.Start() // effect: register bug handler
+		goplsbug.Report(msg)
+		return nil
+	}
+
 	if s.app.Remote != "" {
 		// stats does not work with -remote.
 		// Other sessions on the daemon may interfere with results.
@@ -126,12 +136,15 @@
 	}
 	defer conn.terminate(ctx)
 
-	// bug.List only reports bugs that have been encountered in the current
-	// process, so only list bugs after the initial workspace load has completed.
-	//
-	// TODO(rfindley): persist bugs to the gopls cache, so that they can be
-	// interrogated.
-	stats.Bugs = goplsbug.List()
+	// Gather bug reports produced by any process using
+	// this executable and persisted in the cache.
+	stats.BugReports = []string{} // non-nil for JSON
+	do("Gathering bug reports", func() error {
+		for _, report := range filecache.BugReports() {
+			stats.BugReports = append(stats.BugReports, string(report))
+		}
+		return nil
+	})
 
 	if _, err := do("Querying memstats", func() error {
 		memStats, err := conn.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{
@@ -184,7 +197,7 @@
 	GoVersion                    string
 	GoplsVersion                 string
 	InitialWorkspaceLoadDuration string // in time.Duration string form
-	Bugs                         []goplsbug.Bug
+	BugReports                   []string
 	MemStats                     command.MemStatsResult
 	WorkspaceStats               command.WorkspaceStatsResult
 	DirStats                     dirStats
diff --git a/gopls/internal/lsp/cmd/suggested_fix.go b/gopls/internal/lsp/cmd/suggested_fix.go
index d7d0b09..1128688 100644
--- a/gopls/internal/lsp/cmd/suggested_fix.go
+++ b/gopls/internal/lsp/cmd/suggested_fix.go
@@ -67,13 +67,24 @@
 	if err != nil {
 		return err
 	}
+	rng, err := file.mapper.SpanRange(from)
+	if err != nil {
+		return err
+	}
 
+	// Get diagnostics.
 	if err := conn.diagnoseFiles(ctx, []span.URI{uri}); err != nil {
 		return err
 	}
+	diagnostics := []protocol.Diagnostic{} // LSP wants non-nil slice
 	conn.Client.filesMu.Lock()
-	defer conn.Client.filesMu.Unlock()
+	diagnostics = append(diagnostics, file.diagnostics...)
+	conn.Client.filesMu.Unlock()
+	if debug {
+		log.Printf("file diagnostics: %#v", diagnostics)
+	}
 
+	// Request code actions
 	codeActionKinds := []protocol.CodeActionKind{protocol.QuickFix}
 	if len(args) > 1 {
 		codeActionKinds = []protocol.CodeActionKind{}
@@ -81,25 +92,13 @@
 			codeActionKinds = append(codeActionKinds, protocol.CodeActionKind(k))
 		}
 	}
-
-	rng, err := file.mapper.SpanRange(from)
-	if err != nil {
-		return err
-	}
-	if file.diagnostics == nil {
-		// LSP requires a slice, not a nil.
-		file.diagnostics = []protocol.Diagnostic{}
-	}
-	if debug {
-		log.Printf("file diagnostics: %#v", file.diagnostics)
-	}
 	p := protocol.CodeActionParams{
 		TextDocument: protocol.TextDocumentIdentifier{
 			URI: protocol.URIFromSpanURI(uri),
 		},
 		Context: protocol.CodeActionContext{
 			Only:        codeActionKinds,
-			Diagnostics: file.diagnostics,
+			Diagnostics: diagnostics,
 		},
 		Range: rng,
 	}
@@ -111,6 +110,7 @@
 		log.Printf("code actions: %#v", actions)
 	}
 
+	// Gather edits from matching code actions.
 	var edits []protocol.TextEdit
 	for _, a := range actions {
 		if a.Command != nil {
diff --git a/gopls/internal/lsp/cmd/test/cmdtest.go b/gopls/internal/lsp/cmd/test/cmdtest.go
deleted file mode 100644
index 7f8a13b..0000000
--- a/gopls/internal/lsp/cmd/test/cmdtest.go
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package cmdtest contains the test suite for the command line behavior of gopls.
-package cmdtest
diff --git a/gopls/internal/lsp/cmd/test/integration_test.go b/gopls/internal/lsp/cmd/test/integration_test.go
index f3130eb..c95790c 100644
--- a/gopls/internal/lsp/cmd/test/integration_test.go
+++ b/gopls/internal/lsp/cmd/test/integration_test.go
@@ -1,6 +1,8 @@
 // Copyright 2023 The Go Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
+
+// Package cmdtest contains the test suite for the command line behavior of gopls.
 package cmdtest
 
 // This file defines integration tests of each gopls subcommand that
@@ -28,6 +30,7 @@
 	"context"
 	"encoding/json"
 	"fmt"
+	"math/rand"
 	"os"
 	"path/filepath"
 	"regexp"
@@ -35,11 +38,11 @@
 	"testing"
 
 	exec "golang.org/x/sys/execabs"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	"golang.org/x/tools/gopls/internal/lsp/cmd"
 	"golang.org/x/tools/gopls/internal/lsp/debug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/testenv"
 	"golang.org/x/tools/internal/tool"
 	"golang.org/x/tools/txtar"
@@ -694,6 +697,15 @@
 package foo
 `)
 
+	// Trigger a bug report with a distinctive string
+	// and check that it was durably recorded.
+	oops := fmt.Sprintf("oops-%d", rand.Int())
+	{
+		env := []string{"TEST_GOPLS_BUG=" + oops}
+		res := goplsWithEnv(t, tree, env, "stats")
+		res.checkExit(true)
+	}
+
 	res := gopls(t, tree, "stats")
 	res.checkExit(true)
 
@@ -728,6 +740,22 @@
 			t.Errorf("stats.%s = %d, want %d", check.field, check.got, check.want)
 		}
 	}
+
+	// Check that we got a BugReport with the expected message.
+	{
+		got := fmt.Sprint(stats.BugReports)
+		wants := []string{
+			"cmd/stats.go", // File containing call to bug.Report
+			oops,           // Description
+		}
+		for _, want := range wants {
+			if !strings.Contains(got, want) {
+				t.Errorf("BugReports does not contain %q. Got:<<%s>>", want, got)
+				break
+			}
+		}
+	}
+
 }
 
 // TestFix tests the 'fix' subcommand (../suggested_fix.go).
@@ -814,7 +842,12 @@
 
 // This function is a stand-in for gopls.main in ../../../../main.go.
 func goplsMain() {
-	bug.PanicOnBugs = true // (not in the production command)
+	// Panic on bugs (unlike the production gopls command),
+	// except in tests that inject calls to bug.Report.
+	if os.Getenv("TEST_GOPLS_BUG") == "" {
+		bug.PanicOnBugs = true
+	}
+
 	tool.Main(context.Background(), cmd.New("gopls", "", nil, hooks.Options), os.Args[1:])
 }
 
@@ -844,6 +877,10 @@
 
 // gopls executes gopls in a child process.
 func gopls(t *testing.T, dir string, args ...string) *result {
+	return goplsWithEnv(t, dir, nil, args...)
+}
+
+func goplsWithEnv(t *testing.T, dir string, env []string, args ...string) *result {
 	testenv.NeedsTool(t, "go")
 
 	// Catch inadvertent use of dir=".", which would make
@@ -857,6 +894,7 @@
 		"ENTRYPOINT=goplsMain",
 		fmt.Sprintf("%s=true", cmd.DebugSuggestedFixEnvVar),
 	)
+	goplsCmd.Env = append(goplsCmd.Env, env...)
 	goplsCmd.Dir = dir
 	goplsCmd.Stdout = new(bytes.Buffer)
 	goplsCmd.Stderr = new(bytes.Buffer)
diff --git a/gopls/internal/lsp/cmd/usage/workspace_symbol.hlp b/gopls/internal/lsp/cmd/usage/workspace_symbol.hlp
index a61b47b..ed22e98 100644
--- a/gopls/internal/lsp/cmd/usage/workspace_symbol.hlp
+++ b/gopls/internal/lsp/cmd/usage/workspace_symbol.hlp
@@ -9,5 +9,5 @@
 
 workspace_symbol-flags:
   -matcher=string
-    	specifies the type of matcher: fuzzy, caseSensitive, or caseInsensitive.
-    	The default is caseInsensitive.
+    	specifies the type of matcher: fuzzy, fastfuzzy, casesensitive, or caseinsensitive.
+    	The default is caseinsensitive.
diff --git a/gopls/internal/lsp/cmd/workspace_symbol.go b/gopls/internal/lsp/cmd/workspace_symbol.go
index ed28df0..520d6bc 100644
--- a/gopls/internal/lsp/cmd/workspace_symbol.go
+++ b/gopls/internal/lsp/cmd/workspace_symbol.go
@@ -8,6 +8,7 @@
 	"context"
 	"flag"
 	"fmt"
+	"strings"
 
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/source"
@@ -16,7 +17,7 @@
 
 // workspaceSymbol implements the workspace_symbol verb for gopls.
 type workspaceSymbol struct {
-	Matcher string `flag:"matcher" help:"specifies the type of matcher: fuzzy, caseSensitive, or caseInsensitive.\nThe default is caseInsensitive."`
+	Matcher string `flag:"matcher" help:"specifies the type of matcher: fuzzy, fastfuzzy, casesensitive, or caseinsensitive.\nThe default is caseinsensitive."`
 
 	app *Application
 }
@@ -46,10 +47,10 @@
 		if opts != nil {
 			opts(o)
 		}
-		switch r.Matcher {
+		switch strings.ToLower(r.Matcher) {
 		case "fuzzy":
 			o.SymbolMatcher = source.SymbolFuzzy
-		case "caseSensitive":
+		case "casesensitive":
 			o.SymbolMatcher = source.SymbolCaseSensitive
 		case "fastfuzzy":
 			o.SymbolMatcher = source.SymbolFastFuzzy
diff --git a/gopls/internal/lsp/code_action.go b/gopls/internal/lsp/code_action.go
index 5891cd2..8658ba55 100644
--- a/gopls/internal/lsp/code_action.go
+++ b/gopls/internal/lsp/code_action.go
@@ -176,6 +176,18 @@
 					},
 				})
 			}
+
+			diags, err := snapshot.OrphanedFileDiagnostics(ctx)
+			if err != nil {
+				return nil, err
+			}
+			if d, ok := diags[fh.URI()]; ok {
+				quickFixes, err := codeActionsMatchingDiagnostics(ctx, snapshot, diagnostics, []*source.Diagnostic{d})
+				if err != nil {
+					return nil, err
+				}
+				codeActions = append(codeActions, quickFixes...)
+			}
 		}
 		if ctx.Err() != nil {
 			return nil, ctx.Err()
@@ -255,6 +267,14 @@
 			codeActions = append(codeActions, fixes...)
 		}
 
+		if wanted[protocol.RefactorRewrite] {
+			fixes, err := refactoringFixes(ctx, snapshot, uri, params.Range)
+			if err != nil {
+				return nil, err
+			}
+			codeActions = append(codeActions, fixes...)
+		}
+
 	default:
 		// Unsupported file kind for a code action.
 		return nil, nil
@@ -395,6 +415,46 @@
 	return actions, nil
 }
 
+func refactoringFixes(ctx context.Context, snapshot source.Snapshot, uri span.URI, rng protocol.Range) ([]protocol.CodeAction, error) {
+	fh, err := snapshot.ReadFile(ctx, uri)
+	if err != nil {
+		return nil, err
+	}
+
+	pgf, err := snapshot.ParseGo(ctx, fh, source.ParseFull)
+	if err != nil {
+		return nil, err
+	}
+
+	start, end, err := pgf.RangePos(rng)
+	if err != nil {
+		return nil, err
+	}
+
+	var commands []protocol.Command
+	if _, ok, _ := source.CanInvertIfCondition(pgf.File, start, end); ok {
+		cmd, err := command.NewApplyFixCommand("Invert if condition", command.ApplyFixArgs{
+			URI:   protocol.URIFromSpanURI(uri),
+			Fix:   source.InvertIfCondition,
+			Range: rng,
+		})
+		if err != nil {
+			return nil, err
+		}
+		commands = append(commands, cmd)
+	}
+
+	var actions []protocol.CodeAction
+	for i := range commands {
+		actions = append(actions, protocol.CodeAction{
+			Title:   commands[i].Title,
+			Kind:    protocol.RefactorRewrite,
+			Command: &commands[i],
+		})
+	}
+	return actions, nil
+}
+
 func documentChanges(fh source.FileHandle, edits []protocol.TextEdit) []protocol.DocumentChanges {
 	return []protocol.DocumentChanges{
 		{
diff --git a/gopls/internal/lsp/command.go b/gopls/internal/lsp/command.go
index 6fa8312..7236087 100644
--- a/gopls/internal/lsp/command.go
+++ b/gopls/internal/lsp/command.go
@@ -22,6 +22,7 @@
 
 	"golang.org/x/mod/modfile"
 	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/govulncheck"
 	"golang.org/x/tools/gopls/internal/lsp/cache"
 	"golang.org/x/tools/gopls/internal/lsp/command"
@@ -69,6 +70,7 @@
 	async       bool                 // whether to run the command asynchronously. Async commands can only return errors.
 	requireSave bool                 // whether all files must be saved for the command to work
 	progress    string               // title to use for progress reporting. If empty, no progress will be reported.
+	forView     string               // view to resolve to a snapshot; incompatible with forURI
 	forURI      protocol.DocumentURI // URI to resolve to a snapshot. If unset, snapshot will be nil.
 }
 
@@ -103,6 +105,9 @@
 		}
 	}
 	var deps commandDeps
+	if cfg.forURI != "" && cfg.forView != "" {
+		return bug.Errorf("internal error: forURI=%q, forView=%q", cfg.forURI, cfg.forView)
+	}
 	if cfg.forURI != "" {
 		var ok bool
 		var release func()
@@ -114,6 +119,17 @@
 			}
 			return fmt.Errorf("invalid file URL: %v", cfg.forURI)
 		}
+	} else if cfg.forView != "" {
+		view, err := c.s.session.View(cfg.forView)
+		if err != nil {
+			return err
+		}
+		var release func()
+		deps.snapshot, release, err = view.Snapshot()
+		if err != nil {
+			return err
+		}
+		defer release()
 	}
 	ctx, cancel := context.WithCancel(xcontext.Detach(ctx))
 	if cfg.progress != "" {
@@ -576,40 +592,26 @@
 	}
 	modURI := snapshot.GoModForFile(uri)
 	sumURI := span.URIFromPath(strings.TrimSuffix(modURI.Filename(), ".mod") + ".sum")
-	modEdits, err := applyFileEdits(ctx, snapshot, modURI, newModBytes)
+	modEdits, err := collectFileEdits(ctx, snapshot, modURI, newModBytes)
 	if err != nil {
 		return err
 	}
-	sumEdits, err := applyFileEdits(ctx, snapshot, sumURI, newSumBytes)
+	sumEdits, err := collectFileEdits(ctx, snapshot, sumURI, newSumBytes)
 	if err != nil {
 		return err
 	}
-	changes := append(sumEdits, modEdits...)
-	if len(changes) == 0 {
-		return nil
-	}
-	documentChanges := []protocol.DocumentChanges{} // must be a slice
-	for _, change := range changes {
-		change := change
-		documentChanges = append(documentChanges, protocol.DocumentChanges{
-			TextDocumentEdit: &change,
-		})
-	}
-	response, err := s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{
-		Edit: protocol.WorkspaceEdit{
-			DocumentChanges: documentChanges,
-		},
-	})
-	if err != nil {
-		return err
-	}
-	if !response.Applied {
-		return fmt.Errorf("edits not applied because of %s", response.FailureReason)
-	}
-	return nil
+	return applyFileEdits(ctx, s.client, append(sumEdits, modEdits...))
 }
 
-func applyFileEdits(ctx context.Context, snapshot source.Snapshot, uri span.URI, newContent []byte) ([]protocol.TextDocumentEdit, error) {
+// collectFileEdits collects any file edits required to transform the snapshot
+// file specified by uri to the provided new content.
+//
+// If the file is not open, collectFileEdits simply writes the new content to
+// disk.
+//
+// TODO(rfindley): fix this API asymmetry. It should be up to the caller to
+// write the file or apply the edits.
+func collectFileEdits(ctx context.Context, snapshot source.Snapshot, uri span.URI, newContent []byte) ([]protocol.TextDocumentEdit, error) {
 	fh, err := snapshot.ReadFile(ctx, uri)
 	if err != nil {
 		return nil, err
@@ -618,6 +620,7 @@
 	if err != nil && !os.IsNotExist(err) {
 		return nil, err
 	}
+
 	if bytes.Equal(oldContent, newContent) {
 		return nil, nil
 	}
@@ -647,6 +650,31 @@
 	}}, nil
 }
 
+func applyFileEdits(ctx context.Context, cli protocol.Client, edits []protocol.TextDocumentEdit) error {
+	if len(edits) == 0 {
+		return nil
+	}
+	documentChanges := []protocol.DocumentChanges{} // must be a slice
+	for _, change := range edits {
+		change := change
+		documentChanges = append(documentChanges, protocol.DocumentChanges{
+			TextDocumentEdit: &change,
+		})
+	}
+	response, err := cli.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{
+		Edit: protocol.WorkspaceEdit{
+			DocumentChanges: documentChanges,
+		},
+	})
+	if err != nil {
+		return err
+	}
+	if !response.Applied {
+		return fmt.Errorf("edits not applied because of %s", response.FailureReason)
+	}
+	return nil
+}
+
 func runGoGetModule(invoke func(...string) (*bytes.Buffer, error), addRequire bool, args []string) error {
 	if addRequire {
 		if err := addModuleRequire(invoke, args); err != nil {
@@ -1038,3 +1066,82 @@
 
 	return stats
 }
+
+// RunGoWorkCommand invokes `go work <args>` with the provided arguments.
+//
+// args.InitFirst controls whether to first run `go work init`. This allows a
+// single command to both create and recursively populate a go.work file -- as
+// of writing there is no `go work init -r`.
+//
+// Some thought went into implementing this command. Unlike the go.mod commands
+// above, this command simply invokes the go command and relies on the client
+// to notify gopls of file changes via didChangeWatchedFile notifications.
+// We could instead run these commands with GOWORK set to a temp file, but that
+// poses the following problems:
+//   - directory locations in the resulting temp go.work file will be computed
+//     relative to the directory containing that go.work. If the go.work is in a
+//     tempdir, the directories will need to be translated to/from that dir.
+//   - it would be simpler to use a temp go.work file in the workspace
+//     directory, or whichever directory contains the real go.work file, but
+//     that sets a bad precedent of writing to a user-owned directory. We
+//     shouldn't start doing that.
+//   - Sending workspace edits to create a go.work file would require using
+//     the CreateFile resource operation, which would need to be tested in every
+//     client as we haven't used it before. We don't have time for that right
+//     now.
+//
+// Therefore, we simply require that the current go.work file is saved (if it
+// exists), and delegate to the go command.
+func (c *commandHandler) RunGoWorkCommand(ctx context.Context, args command.RunGoWorkArgs) error {
+	return c.run(ctx, commandConfig{
+		progress: "Running go work command",
+		forView:  args.ViewID,
+	}, func(ctx context.Context, deps commandDeps) (runErr error) {
+		snapshot := deps.snapshot
+		view := snapshot.View().(*cache.View)
+		viewDir := view.Folder().Filename()
+
+		// If the user has explicitly set GOWORK=off, we should warn them
+		// explicitly and avoid potentially misleading errors below.
+		goworkURI, off := view.GOWORK()
+		if off {
+			return fmt.Errorf("cannot modify go.work files when GOWORK=off")
+		}
+		gowork := goworkURI.Filename()
+
+		if goworkURI != "" {
+			fh, err := snapshot.ReadFile(ctx, goworkURI)
+			if err != nil {
+				return fmt.Errorf("reading current go.work file: %v", err)
+			}
+			if !fh.Saved() {
+				return fmt.Errorf("must save workspace file %s before running go work commands", goworkURI)
+			}
+		} else {
+			if !args.InitFirst {
+				// If go.work does not exist, we should have detected that and asked
+				// for InitFirst.
+				return bug.Errorf("internal error: cannot run go work command: required go.work file not found")
+			}
+			gowork = filepath.Join(viewDir, "go.work")
+			if err := c.invokeGoWork(ctx, viewDir, gowork, []string{"init"}); err != nil {
+				return fmt.Errorf("running `go work init`: %v", err)
+			}
+		}
+
+		return c.invokeGoWork(ctx, viewDir, gowork, args.Args)
+	})
+}
+
+func (c *commandHandler) invokeGoWork(ctx context.Context, viewDir, gowork string, args []string) error {
+	inv := gocommand.Invocation{
+		Verb:       "work",
+		Args:       args,
+		WorkingDir: viewDir,
+		Env:        append(os.Environ(), fmt.Sprintf("GOWORK=%s", gowork)),
+	}
+	if _, err := c.s.session.GoCommandRunner().Run(ctx, inv); err != nil {
+		return fmt.Errorf("running go work command: %v", err)
+	}
+	return nil
+}
diff --git a/gopls/internal/lsp/command/command_gen.go b/gopls/internal/lsp/command/command_gen.go
index a6f9940..8003b17 100644
--- a/gopls/internal/lsp/command/command_gen.go
+++ b/gopls/internal/lsp/command/command_gen.go
@@ -34,6 +34,7 @@
 	RegenerateCgo         Command = "regenerate_cgo"
 	RemoveDependency      Command = "remove_dependency"
 	ResetGoModDiagnostics Command = "reset_go_mod_diagnostics"
+	RunGoWorkCommand      Command = "run_go_work_command"
 	RunGovulncheck        Command = "run_govulncheck"
 	RunTests              Command = "run_tests"
 	StartDebugging        Command = "start_debugging"
@@ -62,6 +63,7 @@
 	RegenerateCgo,
 	RemoveDependency,
 	ResetGoModDiagnostics,
+	RunGoWorkCommand,
 	RunGovulncheck,
 	RunTests,
 	StartDebugging,
@@ -162,6 +164,12 @@
 			return nil, err
 		}
 		return nil, s.ResetGoModDiagnostics(ctx, a0)
+	case "gopls.run_go_work_command":
+		var a0 RunGoWorkArgs
+		if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
+			return nil, err
+		}
+		return nil, s.RunGoWorkCommand(ctx, a0)
 	case "gopls.run_govulncheck":
 		var a0 VulncheckArgs
 		if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
@@ -404,6 +412,18 @@
 	}, nil
 }
 
+func NewRunGoWorkCommandCommand(title string, a0 RunGoWorkArgs) (protocol.Command, error) {
+	args, err := MarshalArgs(a0)
+	if err != nil {
+		return protocol.Command{}, err
+	}
+	return protocol.Command{
+		Title:     title,
+		Command:   "gopls.run_go_work_command",
+		Arguments: args,
+	}, nil
+}
+
 func NewRunGovulncheckCommand(title string, a0 VulncheckArgs) (protocol.Command, error) {
 	args, err := MarshalArgs(a0)
 	if err != nil {
diff --git a/gopls/internal/lsp/command/interface.go b/gopls/internal/lsp/command/interface.go
index 969ed8a..1342e84 100644
--- a/gopls/internal/lsp/command/interface.go
+++ b/gopls/internal/lsp/command/interface.go
@@ -170,6 +170,10 @@
 	// This command is intended for internal use only, by the gopls stats
 	// command.
 	WorkspaceStats(context.Context) (WorkspaceStatsResult, error)
+
+	// RunGoWorkCommand: run `go work [args...]`, and apply the resulting go.work
+	// edits to the current go.work file.
+	RunGoWorkCommand(context.Context, RunGoWorkArgs) error
 }
 
 type RunTestsArgs struct {
@@ -447,3 +451,9 @@
 	CompiledGoFiles int // total number of compiled Go files across all packages
 	Modules         int // total number of unique modules
 }
+
+type RunGoWorkArgs struct {
+	ViewID    string   // ID of the view to run the command from
+	InitFirst bool     // Whether to run `go work init` first
+	Args      []string // Args to pass to `go work`
+}
diff --git a/gopls/internal/lsp/command/interface_test.go b/gopls/internal/lsp/command/interface_test.go
index e602293..2eb6f9a 100644
--- a/gopls/internal/lsp/command/interface_test.go
+++ b/gopls/internal/lsp/command/interface_test.go
@@ -14,7 +14,8 @@
 )
 
 func TestGenerated(t *testing.T) {
-	testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code.
+	testenv.NeedsGoPackages(t)
+	testenv.NeedsLocalXTools(t)
 
 	onDisk, err := ioutil.ReadFile("command_gen.go")
 	if err != nil {
diff --git a/gopls/internal/lsp/debounce.go b/gopls/internal/lsp/debounce.go
deleted file mode 100644
index bd59cf2..0000000
--- a/gopls/internal/lsp/debounce.go
+++ /dev/null
@@ -1,76 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package lsp
-
-import (
-	"context"
-	"sync"
-	"time"
-)
-
-type debounceEvent struct {
-	order uint64
-	done  chan struct{}
-}
-
-type debouncer struct {
-	mu     sync.Mutex
-	events map[string]*debounceEvent
-}
-
-func newDebouncer() *debouncer {
-	return &debouncer{
-		events: make(map[string]*debounceEvent),
-	}
-}
-
-// debounce returns a channel that receives a boolean reporting whether,
-// by the time the delay channel receives a value, this call is (or will be)
-// the most recent call with the highest order number for its key.
-//
-// If ctx is done before the delay channel receives a value, the channel
-// reports false.
-func (d *debouncer) debounce(ctx context.Context, key string, order uint64, delay <-chan time.Time) <-chan bool {
-	okc := make(chan bool, 1)
-
-	d.mu.Lock()
-	if prev, ok := d.events[key]; ok {
-		if prev.order > order {
-			// If we have a logical ordering of events (as is the case for snapshots),
-			// don't overwrite a later event with an earlier event.
-			d.mu.Unlock()
-			okc <- false
-			return okc
-		}
-		close(prev.done)
-	}
-	done := make(chan struct{})
-	next := &debounceEvent{
-		order: order,
-		done:  done,
-	}
-	d.events[key] = next
-	d.mu.Unlock()
-
-	go func() {
-		ok := false
-		select {
-		case <-delay:
-			d.mu.Lock()
-			if d.events[key] == next {
-				ok = true
-				delete(d.events, key)
-			} else {
-				// The event was superseded before we acquired d.mu.
-			}
-			d.mu.Unlock()
-		case <-done:
-		case <-ctx.Done():
-		}
-		okc <- ok
-	}()
-
-	return okc
-}
diff --git a/gopls/internal/lsp/debounce_test.go b/gopls/internal/lsp/debounce_test.go
deleted file mode 100644
index aac7e6c..0000000
--- a/gopls/internal/lsp/debounce_test.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package lsp
-
-import (
-	"context"
-	"testing"
-	"time"
-)
-
-func TestDebouncer(t *testing.T) {
-	t.Parallel()
-
-	type event struct {
-		key       string
-		order     uint64
-		wantFired bool
-	}
-	tests := []struct {
-		label  string
-		events []*event
-	}{
-		{
-			label: "overridden",
-			events: []*event{
-				{key: "a", order: 1, wantFired: false},
-				{key: "a", order: 2, wantFired: true},
-			},
-		},
-		{
-			label: "distinct labels",
-			events: []*event{
-				{key: "a", order: 1, wantFired: true},
-				{key: "b", order: 2, wantFired: true},
-			},
-		},
-		{
-			label: "reverse order",
-			events: []*event{
-				{key: "a", order: 2, wantFired: true},
-				{key: "a", order: 1, wantFired: false},
-			},
-		},
-		{
-			label: "multiple overrides",
-			events: []*event{
-				{key: "a", order: 1, wantFired: false},
-				{key: "a", order: 2, wantFired: false},
-				{key: "a", order: 3, wantFired: false},
-				{key: "a", order: 4, wantFired: false},
-				{key: "a", order: 5, wantFired: true},
-			},
-		},
-	}
-	for _, test := range tests {
-		test := test
-		t.Run(test.label, func(t *testing.T) {
-			ctx := context.Background()
-			d := newDebouncer()
-
-			delays := make([]chan time.Time, len(test.events))
-			okcs := make([]<-chan bool, len(test.events))
-
-			// Register the events in deterministic order, synchronously.
-			for i, e := range test.events {
-				delays[i] = make(chan time.Time, 1)
-				okcs[i] = d.debounce(ctx, e.key, e.order, delays[i])
-			}
-
-			// Now see which event fired.
-			for i, okc := range okcs {
-				event := test.events[i]
-				delays[i] <- time.Now()
-				fired := <-okc
-				if fired != event.wantFired {
-					t.Errorf("[key: %q, order: %d]: fired = %t, want %t", event.key, event.order, fired, event.wantFired)
-				}
-			}
-		})
-	}
-}
-
-func TestDebouncer_ContextCancellation(t *testing.T) {
-	t.Parallel()
-
-	ctx, cancel := context.WithCancel(context.Background())
-	d := newDebouncer()
-	c := make(chan time.Time, 1)
-
-	okc := d.debounce(ctx, "", 0, c)
-	cancel()
-	if ok := <-okc; ok {
-		t.Error("<-debounce(ctx, ...) returned true after cancellation")
-	}
-}
diff --git a/gopls/internal/lsp/debug/serve.go b/gopls/internal/lsp/debug/serve.go
index 9882b46..3c17dad 100644
--- a/gopls/internal/lsp/debug/serve.go
+++ b/gopls/internal/lsp/debug/serve.go
@@ -26,10 +26,10 @@
 	"sync"
 	"time"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/cache"
 	"golang.org/x/tools/gopls/internal/lsp/debug/log"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/event/core"
 	"golang.org/x/tools/internal/event/export"
@@ -469,7 +469,7 @@
 			http.Error(w, "OK", 200)
 		})
 		mux.HandleFunc("/_makeabug", func(w http.ResponseWriter, r *http.Request) {
-			bug.Report("bug here", nil)
+			bug.Report("bug here")
 			http.Error(w, "made a bug", http.StatusOK)
 		})
 
diff --git a/gopls/internal/lsp/debug/trace.go b/gopls/internal/lsp/debug/trace.go
index 80cb3dc..31c5a53 100644
--- a/gopls/internal/lsp/debug/trace.go
+++ b/gopls/internal/lsp/debug/trace.go
@@ -35,7 +35,7 @@
 
         <H2>Recent spans (oldest first)</H2>
         <p>
-	A finite number of recent span start/end times are shown below.
+        A finite number of recent span start/end times are shown below.
         The nesting represents the children of a parent span (and the log events within a span).
         A span may appear twice: chronologically at toplevel, and nested within its parent.
         </p>
@@ -259,13 +259,10 @@
 // addRecentLocked appends a start or end event to the "recent" log,
 // evicting an old entry if necessary.
 func (t *traces) addRecentLocked(span *traceSpan, start bool) {
-	const (
-		maxRecent = 100 // number of log entries before age-based eviction
-		maxAge    = 1 * time.Minute
-	)
 	t.recent = append(t.recent, spanStartEnd{Start: start, Span: span})
 
-	for len(t.recent) > maxRecent && t.recent[0].Time().Before(time.Now().Add(-maxAge)) {
+	const maxRecent = 100 // number of log entries before eviction
+	for len(t.recent) > maxRecent {
 		t.recent[0] = spanStartEnd{} // aid GC
 		t.recent = t.recent[1:]
 		t.recentEvictions++
diff --git a/gopls/internal/lsp/diagnostics.go b/gopls/internal/lsp/diagnostics.go
index 0ba8399..90c2232 100644
--- a/gopls/internal/lsp/diagnostics.go
+++ b/gopls/internal/lsp/diagnostics.go
@@ -15,13 +15,13 @@
 	"sync"
 	"time"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/mod"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/source"
 	"golang.org/x/tools/gopls/internal/lsp/template"
 	"golang.org/x/tools/gopls/internal/lsp/work"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/event/tag"
 )
@@ -181,24 +181,17 @@
 		// does not analyze) packages directly affected by
 		// file modifications.
 		//
-		// The second phase runs analysis on the entire snapshot,
-		// and is debounced by the configured delay.
+		// The second phase runs after the delay, and does everything.
 		s.diagnoseChangedFiles(ctx, snapshot, changedURIs, onDisk)
 		s.publishDiagnostics(ctx, false, snapshot)
 
-		// We debounce diagnostics separately for each view, using the snapshot
-		// local ID as logical ordering.
-		//
-		// TODO(rfindley): it would be cleaner to simply put the diagnostic
-		// debouncer on the view, and remove the "key" argument to debouncing.
-		if ok := <-s.diagDebouncer.debounce(ctx, snapshot.View().ID(), uint64(snapshot.GlobalID()), time.After(delay)); ok {
-			s.diagnose(ctx, snapshot, analyzeOpenPackages)
-			s.publishDiagnostics(ctx, true, snapshot)
+		select {
+		case <-time.After(delay):
+		case <-ctx.Done():
+			return
 		}
-		return
 	}
 
-	// Ignore possible workspace configuration warnings in the normal flow.
 	s.diagnose(ctx, snapshot, analyzeOpenPackages)
 	s.publishDiagnostics(ctx, true, snapshot)
 }
@@ -221,7 +214,7 @@
 		}
 
 		// Don't request type-checking for builtin.go: it's not a real package.
-		if snapshot.IsBuiltin(ctx, uri) {
+		if snapshot.IsBuiltin(uri) {
 			continue
 		}
 
@@ -372,10 +365,10 @@
 		var hasNonIgnored, hasOpenFile bool
 		for _, uri := range m.CompiledGoFiles {
 			seen[uri] = struct{}{}
-			if !snapshot.IgnoredFile(uri) {
+			if !hasNonIgnored && !snapshot.IgnoredFile(uri) {
 				hasNonIgnored = true
 			}
-			if snapshot.IsOpen(uri) {
+			if !hasOpenFile && snapshot.IsOpen(uri) {
 				hasOpenFile = true
 			}
 		}
@@ -398,15 +391,14 @@
 	// Orphaned files.
 	// Confirm that every opened file belongs to a package (if any exist in
 	// the workspace). Otherwise, add a diagnostic to the file.
-	for _, o := range s.session.Overlays() {
-		if _, ok := seen[o.URI()]; ok {
-			continue
+	if diags, err := snapshot.OrphanedFileDiagnostics(ctx); err == nil {
+		for uri, diag := range diags {
+			s.storeDiagnostics(snapshot, uri, orphanedSource, []*source.Diagnostic{diag}, true)
 		}
-		diagnostic := s.checkForOrphanedFile(ctx, snapshot, o)
-		if diagnostic == nil {
-			continue
+	} else {
+		if ctx.Err() == nil {
+			event.Error(ctx, "computing orphaned file diagnostics", err, source.SnapshotLabels(snapshot)...)
 		}
-		s.storeDiagnostics(snapshot, o.URI(), orphanedSource, []*source.Diagnostic{diagnostic}, true)
 	}
 }
 
@@ -482,7 +474,7 @@
 	// Merge analysis diagnostics with package diagnostics, and store the
 	// resulting analysis diagnostics.
 	for uri, adiags := range analysisDiags {
-		if snapshot.IsBuiltin(ctx, uri) {
+		if snapshot.IsBuiltin(uri) {
 			bug.Reportf("go/analysis reported diagnostics for the builtin file: %v", adiags)
 			continue
 		}
@@ -513,7 +505,7 @@
 		}
 		// builtin.go exists only for documentation purposes, and is not valid Go code.
 		// Don't report distracting errors
-		if snapshot.IsBuiltin(ctx, uri) {
+		if snapshot.IsBuiltin(uri) {
 			bug.Reportf("type checking reported diagnostics for the builtin file: %v", diags)
 			continue
 		}
@@ -675,66 +667,6 @@
 	}
 }
 
-// checkForOrphanedFile checks that the given URIs can be mapped to packages.
-// If they cannot and the workspace is not otherwise unloaded, it also surfaces
-// a warning, suggesting that the user check the file for build tags.
-func (s *Server) checkForOrphanedFile(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) *source.Diagnostic {
-	// TODO(rfindley): this function may fail to produce a diagnostic for a
-	// variety of reasons, some of which should probably not be ignored. For
-	// example, should this function be tolerant of the case where fh does not
-	// exist, or does not have a package name?
-	//
-	// It would be better to panic or report a bug in several of the cases below,
-	// so that we can move toward guaranteeing we show the user a meaningful
-	// error whenever it makes sense.
-	if snapshot.View().FileKind(fh) != source.Go {
-		return nil
-	}
-	// builtin files won't have a package, but they are never orphaned.
-	if snapshot.IsBuiltin(ctx, fh.URI()) {
-		return nil
-	}
-
-	// This call has the effect of inserting fh into snapshot.files,
-	// where for better or worse (actually: just worse) it influences
-	// the sets of open, known, and orphaned files.
-	snapshot.ReadFile(ctx, fh.URI())
-
-	metas, _ := snapshot.MetadataForFile(ctx, fh.URI())
-	if len(metas) > 0 || ctx.Err() != nil {
-		return nil // file has a package (or cancelled)
-	}
-	// Inv: file does not belong to a package we know about.
-	pgf, err := snapshot.ParseGo(ctx, fh, source.ParseHeader)
-	if err != nil {
-		return nil
-	}
-	if !pgf.File.Name.Pos().IsValid() {
-		return nil
-	}
-	rng, err := pgf.NodeRange(pgf.File.Name)
-	if err != nil {
-		return nil
-	}
-	// If the file no longer has a name ending in .go, this diagnostic is wrong
-	if filepath.Ext(fh.URI().Filename()) != ".go" {
-		return nil
-	}
-	// TODO(rstambler): We should be able to parse the build tags in the
-	// file and show a more specific error message. For now, put the diagnostic
-	// on the package declaration.
-	return &source.Diagnostic{
-		URI:      fh.URI(),
-		Range:    rng,
-		Severity: protocol.SeverityWarning,
-		Source:   source.ListError,
-		Message: fmt.Sprintf(`No packages found for open file %s: %v.
-If this file contains build tags, try adding "-tags=<build tag>" to your gopls "buildFlags" configuration (see (https://github.com/golang/tools/blob/master/gopls/doc/settings.md#buildflags-string).
-Otherwise, see the troubleshooting guidelines for help investigating (https://github.com/golang/tools/blob/master/gopls/doc/troubleshooting.md).
-`, fh.URI().Filename(), err),
-	}
-}
-
 // publishDiagnostics collects and publishes any unpublished diagnostic reports.
 func (s *Server) publishDiagnostics(ctx context.Context, final bool, snapshot source.Snapshot) {
 	ctx, done := event.Start(ctx, "Server.publishDiagnostics", source.SnapshotLabels(snapshot)...)
diff --git a/gopls/internal/lsp/fake/client.go b/gopls/internal/lsp/fake/client.go
index bb9bda0..b619ef5 100644
--- a/gopls/internal/lsp/fake/client.go
+++ b/gopls/internal/lsp/fake/client.go
@@ -14,7 +14,7 @@
 )
 
 // ClientHooks are a set of optional hooks called during handling of
-// the corresponding client method (see protocol.Client for the the
+// the corresponding client method (see protocol.Client for the
 // LSP server-to-client RPCs) in order to make test expectations
 // awaitable.
 type ClientHooks struct {
diff --git a/gopls/internal/lsp/fake/sandbox.go b/gopls/internal/lsp/fake/sandbox.go
index a155756..7afdb99 100644
--- a/gopls/internal/lsp/fake/sandbox.go
+++ b/gopls/internal/lsp/fake/sandbox.go
@@ -254,10 +254,11 @@
 // RunGoCommand executes a go command in the sandbox. If checkForFileChanges is
 // true, the sandbox scans the working directory and emits file change events
 // for any file changes it finds.
-func (sb *Sandbox) RunGoCommand(ctx context.Context, dir, verb string, args []string, checkForFileChanges bool) error {
+func (sb *Sandbox) RunGoCommand(ctx context.Context, dir, verb string, args, env []string, checkForFileChanges bool) error {
 	inv := sb.goCommandInvocation()
 	inv.Verb = verb
 	inv.Args = args
+	inv.Env = append(inv.Env, env...)
 	if dir != "" {
 		inv.WorkingDir = sb.Workdir.AbsPath(dir)
 	}
@@ -289,7 +290,7 @@
 func (sb *Sandbox) Close() error {
 	var goCleanErr error
 	if sb.gopath != "" {
-		goCleanErr = sb.RunGoCommand(context.Background(), "", "clean", []string{"-modcache"}, false)
+		goCleanErr = sb.RunGoCommand(context.Background(), "", "clean", []string{"-modcache"}, nil, false)
 	}
 	err := robustio.RemoveAll(sb.rootdir)
 	if err != nil || goCleanErr != nil {
diff --git a/gopls/internal/lsp/filecache/filecache.go b/gopls/internal/lsp/filecache/filecache.go
index b241add..c4e2ce4 100644
--- a/gopls/internal/lsp/filecache/filecache.go
+++ b/gopls/internal/lsp/filecache/filecache.go
@@ -24,19 +24,23 @@
 	"bytes"
 	"crypto/sha256"
 	"encoding/binary"
+	"encoding/hex"
 	"errors"
 	"fmt"
 	"hash/crc32"
 	"io"
+	"io/fs"
 	"log"
 	"os"
 	"path/filepath"
+	"runtime"
 	"sort"
 	"sync"
 	"sync/atomic"
 	"time"
 
-	"golang.org/x/tools/internal/bug"
+	"golang.org/x/tools/gopls/internal/bug"
+	"golang.org/x/tools/gopls/internal/lsp/lru"
 	"golang.org/x/tools/internal/lockedfile"
 )
 
@@ -48,11 +52,31 @@
 	go getCacheDir()
 }
 
+// As an optimization, use a 100MB in-memory LRU cache in front of filecache
+// operations. This reduces I/O for operations such as diagnostics or
+// implementations that repeatedly access the same cache entries.
+var memCache = lru.New(100 * 1e6)
+
+type memKey struct {
+	kind string
+	key  [32]byte
+}
+
 // Get retrieves from the cache and returns a newly allocated
 // copy of the value most recently supplied to Set(kind, key),
 // possibly by another process.
 // Get returns ErrNotFound if the value was not found.
 func Get(kind string, key [32]byte) ([]byte, error) {
+	// First consult the read-through memory cache.
+	// Note that memory cache hits do not update the times
+	// used for LRU eviction of the file-based cache.
+	if value := memCache.Get(memKey{kind, key}); value != nil {
+		return value.([]byte), nil
+	}
+
+	iolimit <- struct{}{}        // acquire a token
+	defer func() { <-iolimit }() // release a token
+
 	name, err := filename(kind, key)
 	if err != nil {
 		return nil, err
@@ -79,7 +103,16 @@
 	// issue #59289. TODO(adonovan): stop printing the entire file
 	// once we've seen enough reports to understand the pattern.
 	if binary.LittleEndian.Uint32(checksum) != crc32.ChecksumIEEE(value) {
-		return nil, bug.Errorf("internal error in filecache.Get(%q, %x): invalid checksum at end of %d-byte file %s:\n%q",
+		// Darwin has repeatedly displayed a problem (#59895)
+		// whereby the checksum portion (and only it) is zero,
+		// which suggests a bug in its file system . Don't
+		// panic, but keep an eye on other failures for now.
+		errorf := bug.Errorf
+		if binary.LittleEndian.Uint32(checksum) == 0 && runtime.GOOS == "darwin" {
+			errorf = fmt.Errorf
+		}
+
+		return nil, errorf("internal error in filecache.Get(%q, %x): invalid checksum at end of %d-byte file %s:\n%q",
 			kind, key, len(data), name, data)
 	}
 
@@ -97,6 +130,7 @@
 		return nil, fmt.Errorf("failed to update access time: %w", err)
 	}
 
+	memCache.Set(memKey{kind, key}, value, len(value))
 	return value, nil
 }
 
@@ -106,6 +140,11 @@
 
 // Set updates the value in the cache.
 func Set(kind string, key [32]byte, value []byte) error {
+	memCache.Set(memKey{kind, key}, value, len(value))
+
+	iolimit <- struct{}{}        // acquire a token
+	defer func() { <-iolimit }() // release a token
+
 	name, err := filename(kind, key)
 	if err != nil {
 		return err
@@ -145,6 +184,8 @@
 		0600)
 }
 
+var iolimit = make(chan struct{}, 128) // counting semaphore to limit I/O concurrency in Set.
+
 var budget int64 = 1e9 // 1GB
 
 // SetBudget sets a soft limit on disk usage of the cache (in bytes)
@@ -420,3 +461,48 @@
 		}
 	}
 }
+
+const bugKind = "bug" // reserved kind for gopls bug reports
+
+func init() {
+	// Register a handler to durably record this process's first
+	// assertion failure in the cache so that we can ask users to
+	// share this information via the stats command.
+	bug.Handle(func(bug bug.Bug) {
+		// Wait for cache init (bugs in tests happen early).
+		_, _ = getCacheDir()
+
+		value := []byte(fmt.Sprintf("%s: %+v", time.Now().Format(time.RFC3339), bug))
+		key := sha256.Sum256(value)
+		_ = Set(bugKind, key, value)
+	})
+}
+
+// BugReports returns a new unordered array of the contents
+// of all cached bug reports produced by this executable.
+func BugReports() [][]byte {
+	dir, err := getCacheDir()
+	if err != nil {
+		return nil // ignore initialization errors
+	}
+	var result [][]byte
+	_ = filepath.Walk(filepath.Join(dir, bugKind),
+		func(path string, info fs.FileInfo, err error) error {
+			if err != nil {
+				return nil // ignore readdir/stat errors
+			}
+			if !info.IsDir() {
+				var key [32]byte
+				n, err := hex.Decode(key[:], []byte(filepath.Base(path)))
+				if err != nil || n != len(key) {
+					return nil // ignore malformed file names
+				}
+				content, err := Get(bugKind, key)
+				if err == nil { // ignore read errors
+					result = append(result, content)
+				}
+			}
+			return nil
+		})
+	return result
+}
diff --git a/gopls/internal/lsp/filecache/filecache_test.go b/gopls/internal/lsp/filecache/filecache_test.go
index b027f6d..a078fd5 100644
--- a/gopls/internal/lsp/filecache/filecache_test.go
+++ b/gopls/internal/lsp/filecache/filecache_test.go
@@ -60,6 +60,9 @@
 
 // TestConcurrency exercises concurrent access to the same entry.
 func TestConcurrency(t *testing.T) {
+	if os.Getenv("GO_BUILDER_NAME") == "plan9-arm" {
+		t.Skip(`skipping on plan9-arm builder due to golang/go#58748: failing with 'mount rpc error'`)
+	}
 	const kind = "TestConcurrency"
 	key := uniqueKey()
 	const N = 100 // concurrency level
@@ -215,6 +218,7 @@
 		b.Fatal(err)
 	}
 	b.ResetTimer()
+	b.SetBytes(int64(len(value)))
 
 	var group errgroup.Group
 	group.SetLimit(50)
@@ -228,3 +232,34 @@
 		b.Fatal(err)
 	}
 }
+
+// These two benchmarks are asymmetric: the one for Get imposes a
+// modest bound on concurrency (50) whereas the one for Set imposes a
+// much higher concurrency (1000) to test the implementation's
+// self-imposed bound.
+
+func BenchmarkUncontendedSet(b *testing.B) {
+	const kind = "BenchmarkUncontendedSet"
+	key := uniqueKey()
+	var value [8192]byte
+
+	const P = 1000 // parallelism
+	b.SetBytes(P * int64(len(value)))
+
+	for i := 0; i < b.N; i++ {
+		// Perform P concurrent calls to Set. All must succeed.
+		var group errgroup.Group
+		for range [P]bool{} {
+			group.Go(func() error {
+				return filecache.Set(kind, key, value[:])
+			})
+		}
+		if err := group.Wait(); err != nil {
+			if strings.Contains(err.Error(), "operation not supported") ||
+				strings.Contains(err.Error(), "not implemented") {
+				b.Skipf("skipping: %v", err)
+			}
+			b.Fatal(err)
+		}
+	}
+}
diff --git a/gopls/internal/lsp/general.go b/gopls/internal/lsp/general.go
index ea0cd86..9d12f97 100644
--- a/gopls/internal/lsp/general.go
+++ b/gopls/internal/lsp/general.go
@@ -16,11 +16,11 @@
 	"strings"
 	"sync"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/debug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/source"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/jsonrpc2"
 )
@@ -63,17 +63,17 @@
 
 	if options.ShowBugReports {
 		// Report the next bug that occurs on the server.
-		bugCh := bug.Notify()
-		go func() {
-			b := <-bugCh
+		bug.Handle(func(b bug.Bug) {
 			msg := &protocol.ShowMessageParams{
 				Type:    protocol.Error,
 				Message: fmt.Sprintf("A bug occurred on the server: %s\nLocation:%s", b.Description, b.Key),
 			}
-			if err := s.eventuallyShowMessage(context.Background(), msg); err != nil {
-				log.Printf("error showing bug: %v", err)
-			}
-		}()
+			go func() {
+				if err := s.eventuallyShowMessage(context.Background(), msg); err != nil {
+					log.Printf("error showing bug: %v", err)
+				}
+			}()
+		})
 	}
 
 	folders := params.WorkspaceFolders
diff --git a/gopls/internal/lsp/lru/lru.go b/gopls/internal/lsp/lru/lru.go
new file mode 100644
index 0000000..5750f41
--- /dev/null
+++ b/gopls/internal/lsp/lru/lru.go
@@ -0,0 +1,153 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// The lru package implements a fixed-size in-memory LRU cache.
+package lru
+
+import (
+	"container/heap"
+	"fmt"
+	"sync"
+)
+
+type any = interface{} // TODO: remove once gopls only builds at go1.18+
+
+// A Cache is a fixed-size in-memory LRU cache.
+type Cache struct {
+	capacity int
+
+	mu    sync.Mutex
+	used  int            // used capacity, in user-specified units
+	m     map[any]*entry // k/v lookup
+	lru   queue          // min-atime priority queue of *entry
+	clock int64          // clock time, incremented whenever the cache is updated
+}
+
+type entry struct {
+	key   any
+	value any
+	size  int   // caller-specified size
+	atime int64 // last access / set time
+	index int   // index of entry in the heap slice
+}
+
+// New creates a new Cache with the given capacity, which must be positive.
+//
+// The cache capacity uses arbitrary units, which are specified during the Set
+// operation.
+func New(capacity int) *Cache {
+	if capacity == 0 {
+		panic("zero capacity")
+	}
+
+	return &Cache{
+		capacity: capacity,
+		m:        make(map[any]*entry),
+	}
+}
+
+// Get retrieves the value for the specified key, or nil if the key is not
+// found.
+//
+// If the key is found, its access time is updated.
+func (c *Cache) Get(key any) any {
+	c.mu.Lock()
+	defer c.mu.Unlock()
+
+	c.clock++ // every access updates the clock
+
+	if e, ok := c.m[key]; ok { // cache hit
+		e.atime = c.clock
+		heap.Fix(&c.lru, e.index)
+		return e.value
+	}
+
+	return nil
+}
+
+// Set stores a value for the specified key, using its given size to update the
+// current cache size, evicting old entries as necessary to fit in the cache
+// capacity.
+//
+// Size must be a non-negative value. If size is larger than the cache
+// capacity, the value is not stored and the cache is not modified.
+func (c *Cache) Set(key, value any, size int) {
+	if size < 0 {
+		panic(fmt.Sprintf("size must be non-negative, got %d", size))
+	}
+	if size > c.capacity {
+		return // uncacheable
+	}
+
+	c.mu.Lock()
+	defer c.mu.Unlock()
+
+	c.clock++
+
+	// Remove the existing cache entry for key, if it exists.
+	e, ok := c.m[key]
+	if ok {
+		c.used -= e.size
+		heap.Remove(&c.lru, e.index)
+		delete(c.m, key)
+	}
+
+	// Evict entries until the new value will fit.
+	newUsed := c.used + size
+	if newUsed < 0 {
+		return // integer overflow; return silently
+	}
+	c.used = newUsed
+	for c.used > c.capacity {
+		// evict oldest entry
+		e = heap.Pop(&c.lru).(*entry)
+		c.used -= e.size
+		delete(c.m, e.key)
+	}
+
+	// Store the new value.
+	// Opt: e is evicted, so it can be reused to reduce allocation.
+	if e == nil {
+		e = new(entry)
+	}
+	e.key = key
+	e.value = value
+	e.size = size
+	e.atime = c.clock
+	c.m[e.key] = e
+	heap.Push(&c.lru, e)
+
+	if len(c.m) != len(c.lru) {
+		panic("map and LRU are inconsistent")
+	}
+}
+
+// -- priority queue boilerplate --
+
+// queue is a min-atime priority queue of cache entries.
+type queue []*entry
+
+func (q queue) Len() int { return len(q) }
+
+func (q queue) Less(i, j int) bool { return q[i].atime < q[j].atime }
+
+func (q queue) Swap(i, j int) {
+	q[i], q[j] = q[j], q[i]
+	q[i].index = i
+	q[j].index = j
+}
+
+func (q *queue) Push(x any) {
+	e := x.(*entry)
+	e.index = len(*q)
+	*q = append(*q, e)
+}
+
+func (q *queue) Pop() any {
+	last := len(*q) - 1
+	e := (*q)[last]
+	(*q)[last] = nil // aid GC
+	*q = (*q)[:last]
+	return e
+}
diff --git a/gopls/internal/lsp/lru/lru_fuzz_test.go b/gopls/internal/lsp/lru/lru_fuzz_test.go
new file mode 100644
index 0000000..c5afcee
--- /dev/null
+++ b/gopls/internal/lsp/lru/lru_fuzz_test.go
@@ -0,0 +1,41 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package lru_test
+
+import (
+	"testing"
+
+	"golang.org/x/tools/gopls/internal/lsp/lru"
+)
+
+// Simple fuzzing test for consistency.
+func FuzzCache(f *testing.F) {
+	type op struct {
+		set        bool
+		key, value byte
+	}
+	f.Fuzz(func(t *testing.T, data []byte) {
+		var ops []op
+		for len(data) >= 3 {
+			ops = append(ops, op{data[0]%2 == 0, data[1], data[2]})
+			data = data[3:]
+		}
+		cache := lru.New(100)
+		var reference [256]byte
+		for _, op := range ops {
+			if op.set {
+				reference[op.key] = op.value
+				cache.Set(op.key, op.value, 1)
+			} else {
+				if v := cache.Get(op.key); v != nil && v != reference[op.key] {
+					t.Fatalf("cache.Get(%d) = %d, want %d", op.key, v, reference[op.key])
+				}
+			}
+		}
+	})
+}
diff --git a/gopls/internal/lsp/lru/lru_test.go b/gopls/internal/lsp/lru/lru_test.go
new file mode 100644
index 0000000..165a647
--- /dev/null
+++ b/gopls/internal/lsp/lru/lru_test.go
@@ -0,0 +1,156 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package lru_test
+
+import (
+	"bytes"
+	cryptorand "crypto/rand"
+	"fmt"
+	"log"
+	mathrand "math/rand"
+	"strings"
+	"testing"
+
+	"golang.org/x/sync/errgroup"
+	"golang.org/x/tools/gopls/internal/lsp/lru"
+)
+
+type any = interface{} // TODO: remove once gopls only builds at go1.18+
+
+func TestCache(t *testing.T) {
+	type get struct {
+		key  string
+		want any
+	}
+	type set struct {
+		key, value string
+	}
+
+	tests := []struct {
+		label string
+		steps []any
+	}{
+		{"empty cache", []any{
+			get{"a", nil},
+			get{"b", nil},
+		}},
+		{"zero-length string", []any{
+			set{"a", ""},
+			get{"a", ""},
+		}},
+		{"under capacity", []any{
+			set{"a", "123"},
+			set{"b", "456"},
+			get{"a", "123"},
+			get{"b", "456"},
+		}},
+		{"over capacity", []any{
+			set{"a", "123"},
+			set{"b", "456"},
+			set{"c", "78901"},
+			get{"a", nil},
+			get{"b", "456"},
+			get{"c", "78901"},
+		}},
+		{"access ordering", []any{
+			set{"a", "123"},
+			set{"b", "456"},
+			get{"a", "123"},
+			set{"c", "78901"},
+			get{"a", "123"},
+			get{"b", nil},
+			get{"c", "78901"},
+		}},
+	}
+
+	for _, test := range tests {
+		t.Run(test.label, func(t *testing.T) {
+			c := lru.New(10)
+			for i, step := range test.steps {
+				switch step := step.(type) {
+				case get:
+					if got := c.Get(step.key); got != step.want {
+						t.Errorf("#%d: c.Get(%q) = %q, want %q", i, step.key, got, step.want)
+					}
+				case set:
+					c.Set(step.key, step.value, len(step.value))
+				}
+			}
+		})
+	}
+}
+
+// TestConcurrency exercises concurrent access to the same entry.
+//
+// It is a copy of TestConcurrency from the filecache package.
+func TestConcurrency(t *testing.T) {
+	key := uniqueKey()
+	const N = 100 // concurrency level
+
+	// Construct N distinct values, each larger
+	// than a typical 4KB OS file buffer page.
+	var values [N][8192]byte
+	for i := range values {
+		if _, err := mathrand.Read(values[i][:]); err != nil {
+			t.Fatalf("rand: %v", err)
+		}
+	}
+
+	cache := lru.New(100 * 1e6) // 100MB cache
+
+	// get calls Get and verifies that the cache entry
+	// matches one of the values passed to Set.
+	get := func(mustBeFound bool) error {
+		got := cache.Get(key)
+		if got == nil {
+			if !mustBeFound {
+				return nil
+			}
+			return fmt.Errorf("Get did not return a value")
+		}
+		gotBytes := got.([]byte)
+		for _, want := range values {
+			if bytes.Equal(want[:], gotBytes) {
+				return nil // a match
+			}
+		}
+		return fmt.Errorf("Get returned a value that was never Set")
+	}
+
+	// Perform N concurrent calls to Set and Get.
+	// All sets must succeed.
+	// All gets must return nothing, or one of the Set values;
+	// there is no third possibility.
+	var group errgroup.Group
+	for i := range values {
+		i := i
+		v := values[i][:]
+		group.Go(func() error {
+			cache.Set(key, v, len(v))
+			return nil
+		})
+		group.Go(func() error { return get(false) })
+	}
+	if err := group.Wait(); err != nil {
+		if strings.Contains(err.Error(), "operation not supported") ||
+			strings.Contains(err.Error(), "not implemented") {
+			t.Skipf("skipping: %v", err)
+		}
+		t.Fatal(err)
+	}
+
+	// A final Get must report one of the values that was Set.
+	if err := get(true); err != nil {
+		t.Fatalf("final Get failed: %v", err)
+	}
+}
+
+// uniqueKey returns a key that has never been used before.
+func uniqueKey() (key [32]byte) {
+	if _, err := cryptorand.Read(key[:]); err != nil {
+		log.Fatalf("rand: %v", err)
+	}
+	return
+}
diff --git a/gopls/internal/lsp/lsp_test.go b/gopls/internal/lsp/lsp_test.go
index 9dec7d3..ed3baa2 100644
--- a/gopls/internal/lsp/lsp_test.go
+++ b/gopls/internal/lsp/lsp_test.go
@@ -9,13 +9,13 @@
 	"context"
 	"fmt"
 	"os"
-	"os/exec"
 	"path/filepath"
 	"sort"
 	"strings"
 	"testing"
 
 	"github.com/google/go-cmp/cmp"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/cache"
 	"golang.org/x/tools/gopls/internal/lsp/command"
 	"golang.org/x/tools/gopls/internal/lsp/debug"
@@ -24,7 +24,6 @@
 	"golang.org/x/tools/gopls/internal/lsp/tests"
 	"golang.org/x/tools/gopls/internal/lsp/tests/compare"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/testenv"
 )
 
@@ -242,7 +241,7 @@
 
 func (r *runner) Diagnostics(t *testing.T, uri span.URI, want []*source.Diagnostic) {
 	// Get the diagnostics for this view if we have not done it before.
-	v := r.server.session.View(r.data.Config.Dir)
+	v := r.server.session.ViewByName(r.data.Config.Dir)
 	r.collectDiagnostics(v)
 	tests.CompareDiagnostics(t, uri, want, r.diagnostics[uri])
 }
@@ -386,39 +385,6 @@
 	return res, nil
 }
 
-func (r *runner) Format(t *testing.T, spn span.Span) {
-	uri := spn.URI()
-	filename := uri.Filename()
-	gofmted := r.data.Golden(t, "gofmt", filename, func() ([]byte, error) {
-		cmd := exec.Command("gofmt", filename)
-		out, _ := cmd.Output() // ignore error, sometimes we have intentionally ungofmt-able files
-		return out, nil
-	})
-
-	edits, err := r.server.Formatting(r.ctx, &protocol.DocumentFormattingParams{
-		TextDocument: protocol.TextDocumentIdentifier{
-			URI: protocol.URIFromSpanURI(uri),
-		},
-	})
-	if err != nil {
-		if len(gofmted) > 0 {
-			t.Error(err)
-		}
-		return
-	}
-	m, err := r.data.Mapper(uri)
-	if err != nil {
-		t.Fatal(err)
-	}
-	got, _, err := source.ApplyProtocolEdits(m, edits)
-	if err != nil {
-		t.Error(err)
-	}
-	if diff := compare.Bytes(gofmted, got); diff != "" {
-		t.Errorf("format failed for %s (-want +got):\n%s", filename, diff)
-	}
-}
-
 func (r *runner) SemanticTokens(t *testing.T, spn span.Span) {
 	uri := spn.URI()
 	filename := uri.Filename()
@@ -452,39 +418,6 @@
 	}
 }
 
-func (r *runner) Import(t *testing.T, spn span.Span) {
-	// Invokes textDocument/codeAction and applies all the "goimports" edits.
-
-	uri := spn.URI()
-	filename := uri.Filename()
-	actions, err := r.server.CodeAction(r.ctx, &protocol.CodeActionParams{
-		TextDocument: protocol.TextDocumentIdentifier{
-			URI: protocol.URIFromSpanURI(uri),
-		},
-	})
-	if err != nil {
-		t.Fatal(err)
-	}
-	m, err := r.data.Mapper(uri)
-	if err != nil {
-		t.Fatal(err)
-	}
-	got := m.Content
-	if len(actions) > 0 {
-		res, err := applyTextDocumentEdits(r, actions[0].Edit.DocumentChanges)
-		if err != nil {
-			t.Fatal(err)
-		}
-		got = res[uri]
-	}
-	want := r.data.Golden(t, "goimports", filename, func() ([]byte, error) {
-		return got, nil
-	})
-	if diff := compare.Bytes(want, got); diff != "" {
-		t.Errorf("import failed for %s:\n%s", filename, diff)
-	}
-}
-
 func (r *runner) SuggestedFix(t *testing.T, spn span.Span, actionKinds []tests.SuggestedFix, expectedActions int) {
 	uri := spn.URI()
 	view, err := r.server.session.ViewOf(uri)
@@ -583,58 +516,6 @@
 	}
 }
 
-func (r *runner) FunctionExtraction(t *testing.T, start span.Span, end span.Span) {
-	uri := start.URI()
-	m, err := r.data.Mapper(uri)
-	if err != nil {
-		t.Fatal(err)
-	}
-	spn := span.New(start.URI(), start.Start(), end.End())
-	rng, err := m.SpanRange(spn)
-	if err != nil {
-		t.Fatal(err)
-	}
-	actionsRaw, err := r.server.CodeAction(r.ctx, &protocol.CodeActionParams{
-		TextDocument: protocol.TextDocumentIdentifier{
-			URI: protocol.URIFromSpanURI(uri),
-		},
-		Range: rng,
-		Context: protocol.CodeActionContext{
-			Only: []protocol.CodeActionKind{"refactor.extract"},
-		},
-	})
-	if err != nil {
-		t.Fatal(err)
-	}
-	var actions []protocol.CodeAction
-	for _, action := range actionsRaw {
-		if action.Command.Title == "Extract function" {
-			actions = append(actions, action)
-		}
-	}
-	// Hack: We assume that we only get one code action per range.
-	// TODO(rstambler): Support multiple code actions per test.
-	if len(actions) == 0 || len(actions) > 1 {
-		t.Fatalf("unexpected number of code actions, want 1, got %v", len(actions))
-	}
-	_, err = r.server.ExecuteCommand(r.ctx, &protocol.ExecuteCommandParams{
-		Command:   actions[0].Command.Command,
-		Arguments: actions[0].Command.Arguments,
-	})
-	if err != nil {
-		t.Fatal(err)
-	}
-	res := <-r.editRecv
-	for u, got := range res {
-		want := r.data.Golden(t, "functionextraction_"+tests.SpanName(spn), u.Filename(), func() ([]byte, error) {
-			return got, nil
-		})
-		if diff := compare.Bytes(want, got); diff != "" {
-			t.Errorf("function extraction failed for %s:\n%s", u.Filename(), diff)
-		}
-	}
-}
-
 func (r *runner) MethodExtraction(t *testing.T, start span.Span, end span.Span) {
 	uri := start.URI()
 	m, err := r.data.Mapper(uri)
@@ -990,35 +871,6 @@
 	return res, nil
 }
 
-func (r *runner) WorkspaceSymbols(t *testing.T, uri span.URI, query string, typ tests.WorkspaceSymbolsTestType) {
-	matcher := tests.WorkspaceSymbolsTestTypeToMatcher(typ)
-
-	original := r.server.session.Options()
-	modified := original
-	modified.SymbolMatcher = matcher
-	r.server.session.SetOptions(modified)
-	defer r.server.session.SetOptions(original)
-
-	params := &protocol.WorkspaceSymbolParams{
-		Query: query,
-	}
-	gotSymbols, err := r.server.Symbol(r.ctx, params)
-	if err != nil {
-		t.Fatal(err)
-	}
-	got, err := tests.WorkspaceSymbolsString(r.ctx, r.data, uri, gotSymbols)
-	if err != nil {
-		t.Fatal(err)
-	}
-	got = filepath.ToSlash(tests.Normalize(got, r.normalizers))
-	want := string(r.data.Golden(t, fmt.Sprintf("workspace_symbol-%s-%s", strings.ToLower(string(matcher)), query), uri.Filename(), func() ([]byte, error) {
-		return []byte(got), nil
-	}))
-	if diff := compare.Text(want, got); diff != "" {
-		t.Error(diff)
-	}
-}
-
 func (r *runner) SignatureHelp(t *testing.T, spn span.Span, want *protocol.SignatureHelp) {
 	m, err := r.data.Mapper(spn.URI())
 	if err != nil {
diff --git a/gopls/internal/lsp/mod/inlayhint.go b/gopls/internal/lsp/mod/inlayhint.go
index e494cc7..4f08dd2 100644
--- a/gopls/internal/lsp/mod/inlayhint.go
+++ b/gopls/internal/lsp/mod/inlayhint.go
@@ -18,46 +18,46 @@
 	if err != nil {
 		return nil, err
 	}
-	return unexpectedVersion(ctx, snapshot, pm), nil
-}
 
-// Compare the version of the module used in the snapshot's metadata with the
-// version requested by the module, in both cases, taking replaces into account.
-// Produce an InlayHint when the version is the module is not the one usedd.
-func unexpectedVersion(ctx context.Context, snapshot source.Snapshot, pm *source.ParsedModule) []protocol.InlayHint {
-	var ans []protocol.InlayHint
-	if pm.File == nil {
-		return nil
-	}
+	// Compare the version of the module used in the snapshot's metadata with the
+	// version requested by the module, in both cases, taking replaces into account.
+	// Produce an InlayHint when the version is the module is not the one used.
+
 	replaces := make(map[string]*modfile.Replace)
-	requires := make(map[string]*modfile.Require)
 	for _, x := range pm.File.Replace {
 		replaces[x.Old.Path] = x
 	}
+
+	requires := make(map[string]*modfile.Require)
 	for _, x := range pm.File.Require {
 		requires[x.Mod.Path] = x
 	}
-	am, _ := snapshot.AllMetadata(ctx)
+
+	am, err := snapshot.AllMetadata(ctx)
+	if err != nil {
+		return nil, err
+	}
+
+	var ans []protocol.InlayHint
 	seen := make(map[string]bool)
 	for _, meta := range am {
-		if meta == nil || meta.Module == nil || seen[meta.Module.Path] {
+		if meta.Module == nil || seen[meta.Module.Path] {
 			continue
 		}
 		seen[meta.Module.Path] = true
-		metaMod := meta.Module
-		metaVersion := metaMod.Version
-		if metaMod.Replace != nil {
-			metaVersion = metaMod.Replace.Version
+		metaVersion := meta.Module.Version
+		if meta.Module.Replace != nil {
+			metaVersion = meta.Module.Replace.Version
 		}
 		// These versions can be blank, as in gopls/go.mod's local replace
-		if oldrepl, ok := replaces[metaMod.Path]; ok && oldrepl.New.Version != metaVersion {
+		if oldrepl, ok := replaces[meta.Module.Path]; ok && oldrepl.New.Version != metaVersion {
 			ih := genHint(oldrepl.Syntax, oldrepl.New.Version, metaVersion, pm.Mapper)
 			if ih != nil {
 				ans = append(ans, *ih)
 			}
-		} else if oldreq, ok := requires[metaMod.Path]; ok && oldreq.Mod.Version != metaVersion {
+		} else if oldreq, ok := requires[meta.Module.Path]; ok && oldreq.Mod.Version != metaVersion {
 			// maybe it was replaced:
-			if _, ok := replaces[metaMod.Path]; ok {
+			if _, ok := replaces[meta.Module.Path]; ok {
 				continue
 			}
 			ih := genHint(oldreq.Syntax, oldreq.Mod.Version, metaVersion, pm.Mapper)
@@ -66,7 +66,7 @@
 			}
 		}
 	}
-	return ans
+	return ans, nil
 }
 
 func genHint(mline *modfile.Line, oldVersion, newVersion string, m *protocol.Mapper) *protocol.InlayHint {
diff --git a/gopls/internal/lsp/protocol/generate/README.md b/gopls/internal/lsp/protocol/generate/README.md
index c8047f3..af5f101 100644
--- a/gopls/internal/lsp/protocol/generate/README.md
+++ b/gopls/internal/lsp/protocol/generate/README.md
@@ -13,6 +13,7 @@
 exact version can be tied to a githash. By default, the command will download the `github.com/microsoft/vscode-languageserver-node` repository to a temporary directory.
 
 The specification has five sections
+
 1. Requests, which describe the Request and Response types for request methods (e.g., *textDocument/didChange*),
 2. Notifications, which describe the Request types for notification methods,
 3. Structures, which describe named struct-like types,
@@ -28,6 +29,7 @@
 Finally, the specified types are Typescript types, which are quite different from Go types.
 
 ### Optionality
+
 The specification can mark fields in structs as Optional. The client distinguishes between missing
 fields and `null` fields in some cases. The Go translation for an optional type
 should be making sure the field's value
@@ -35,6 +37,7 @@
 adding `*` to the field's type if the type is not a reference type.
 
 ### Types
+
 The specification uses a number of different types, only a few of which correspond directly to Go types.
 The specification's types are "base", "reference", "map", "literal", "stringLiteral", "tuple", "and", "or".
 The "base" types correspond directly to Go types, although some Go types needs to be chosen for `URI` and `DocumentUri`. (The "base" types`RegExp`, `BooleanLiteral`, `NumericLiteral` never occur.)
@@ -70,6 +73,7 @@
 have a single non-null component, and these are converted to the component type.
 
 ## Processing
+
 The code parses the json specification file, and scans all the types. It assigns names, as described
 above, to the types that are unnamed in the specification, and constructs Go equivalents as required.
 (Most of this code is in typenames.go.)
@@ -80,6 +84,7 @@
 And tsprotocol.go contains the type and const definitions.
 
 ### Accommodating gopls
+
 As the code generates output, mostly in generateoutput.go and main.go,
 it makes adjustments so that no changes are required to the existing Go code.
 (Organizing the computation this way makes the code's structure simpler, but results in
@@ -107,6 +112,7 @@
 whose type is an "or" of 3 stringLiterals, which just becomes a `string`.
 
 ### Checking
+
 `TestAll(t *testing.T)` checks that there are no unexpected fields in the json specification.
 
 While the code is executing, it checks that all the entries in the maps in tables.go are used.
@@ -119,6 +125,7 @@
 slightly between the new and the old, and is not worth fixing.)
 
 ### Some history
+
 The original stub code was written by hand, but with the protocol under active development, that
 couldn't last. The web page existed before the json specification, but it lagged the implementation
 and was hard to process by machine. So the earlier version of the generating code was written in Typescript, and
@@ -128,9 +135,10 @@
 The output was functional, but idiosyncratic, and the code was fragile and barely maintainable.
 
 ### The future
+
 Most of the adjustments using the maps in tables.go could be removed by making changes, mostly to names,
 in the gopls code. Using more "or" types in gopls requires more elaborate, but stereotyped, changes.
 But even without all the adjustments, making this its own module would face problems; a number of
 dependencies would have to be factored out. And, it is fragile. The custom unmarshaling code knows what
 types it expects. A design that return an 'any' on unexpected types would match the json
-'ignore unexpected values' philosophy better, but the the Go code would need extra checking.
+'ignore unexpected values' philosophy better, but the Go code would need extra checking.
diff --git a/gopls/internal/lsp/protocol/generate/main.go b/gopls/internal/lsp/protocol/generate/main.go
index d111491..bdf473d 100644
--- a/gopls/internal/lsp/protocol/generate/main.go
+++ b/gopls/internal/lsp/protocol/generate/main.go
@@ -31,7 +31,7 @@
 // For example, tag release/protocol/3.17.3 of the repo defines protocol version 3.17.0.
 // (Point releases are reflected in the git tag version even when they are cosmetic
 // and don't change the protocol.)
-var lspGitRef = "release/protocol/3.17.3-next.6"
+var lspGitRef = "release/protocol/3.17.4-next.0"
 
 var (
 	repodir   = flag.String("d", "", "directory containing clone of "+vscodeRepo)
diff --git a/gopls/internal/lsp/protocol/mapper.go b/gopls/internal/lsp/protocol/mapper.go
index d61524d..9e683d9 100644
--- a/gopls/internal/lsp/protocol/mapper.go
+++ b/gopls/internal/lsp/protocol/mapper.go
@@ -72,9 +72,9 @@
 	"sync"
 	"unicode/utf8"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 )
 
 // A Mapper wraps the content of a file and provides mapping
diff --git a/gopls/internal/lsp/protocol/tsclient.go b/gopls/internal/lsp/protocol/tsclient.go
index cfafecf..8fb3c2d 100644
--- a/gopls/internal/lsp/protocol/tsclient.go
+++ b/gopls/internal/lsp/protocol/tsclient.go
@@ -6,8 +6,8 @@
 
 package protocol
 
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.3-next.6 (hash 56c23c557e3568a9f56f42435fd5a80f9458957f).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.3-next.6/protocol/metaModel.json
+// Code generated from protocol/metaModel.json at ref release/protocol/3.17.4-next.0 (hash 5c6ec4f537f304aa1ad645b5fd2bbb757fc40ed1).
+// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.4-next.0/protocol/metaModel.json
 // LSP metaData.version = 3.17.0.
 
 import (
diff --git a/gopls/internal/lsp/protocol/tsjson.go b/gopls/internal/lsp/protocol/tsjson.go
index 320fa08..e5443cf 100644
--- a/gopls/internal/lsp/protocol/tsjson.go
+++ b/gopls/internal/lsp/protocol/tsjson.go
@@ -6,8 +6,8 @@
 
 package protocol
 
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.3-next.6 (hash 56c23c557e3568a9f56f42435fd5a80f9458957f).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.3-next.6/protocol/metaModel.json
+// Code generated from protocol/metaModel.json at ref release/protocol/3.17.4-next.0 (hash 5c6ec4f537f304aa1ad645b5fd2bbb757fc40ed1).
+// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.4-next.0/protocol/metaModel.json
 // LSP metaData.version = 3.17.0.
 
 import "encoding/json"
diff --git a/gopls/internal/lsp/protocol/tsprotocol.go b/gopls/internal/lsp/protocol/tsprotocol.go
index e639a57..8469aeb 100644
--- a/gopls/internal/lsp/protocol/tsprotocol.go
+++ b/gopls/internal/lsp/protocol/tsprotocol.go
@@ -6,8 +6,8 @@
 
 package protocol
 
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.3-next.6 (hash 56c23c557e3568a9f56f42435fd5a80f9458957f).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.3-next.6/protocol/metaModel.json
+// Code generated from protocol/metaModel.json at ref release/protocol/3.17.4-next.0 (hash 5c6ec4f537f304aa1ad645b5fd2bbb757fc40ed1).
+// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.4-next.0/protocol/metaModel.json
 // LSP metaData.version = 3.17.0.
 
 import "encoding/json"
diff --git a/gopls/internal/lsp/protocol/tsserver.go b/gopls/internal/lsp/protocol/tsserver.go
index e02e1fd..004a2e6 100644
--- a/gopls/internal/lsp/protocol/tsserver.go
+++ b/gopls/internal/lsp/protocol/tsserver.go
@@ -6,8 +6,8 @@
 
 package protocol
 
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.3-next.6 (hash 56c23c557e3568a9f56f42435fd5a80f9458957f).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.3-next.6/protocol/metaModel.json
+// Code generated from protocol/metaModel.json at ref release/protocol/3.17.4-next.0 (hash 5c6ec4f537f304aa1ad645b5fd2bbb757fc40ed1).
+// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.4-next.0/protocol/metaModel.json
 // LSP metaData.version = 3.17.0.
 
 import (
diff --git a/gopls/internal/lsp/regtest/marker.go b/gopls/internal/lsp/regtest/marker.go
index 2d18496..29722c9 100644
--- a/gopls/internal/lsp/regtest/marker.go
+++ b/gopls/internal/lsp/regtest/marker.go
@@ -8,7 +8,6 @@
 	"bytes"
 	"context"
 	"encoding/json"
-	"errors"
 	"flag"
 	"fmt"
 	"go/token"
@@ -19,6 +18,7 @@
 	"path/filepath"
 	"reflect"
 	"regexp"
+	"runtime"
 	"sort"
 	"strings"
 	"testing"
@@ -98,11 +98,18 @@
 //
 // # Special files
 //
-// There are three types of file within the test archive that are given special
+// There are several types of file within the test archive that are given special
 // treatment by the test runner:
 //   - "flags": this file is treated as a whitespace-separated list of flags
-//     that configure the MarkerTest instance. For example, -min_go=go1.18 sets
-//     the minimum required Go version for the test.
+//     that configure the MarkerTest instance. Supported flags:
+//     -min_go=go1.18 sets the minimum Go version for the test;
+//     -cgo requires that CGO_ENABLED is set and the cgo tool is available
+//     -write_sumfile=a,b,c instructs the test runner to generate go.sum files
+//     in these directories before running the test.
+//     -skip_goos=a,b,c instructs the test runner to skip the test for the
+//     listed GOOS values.
+//     TODO(rfindley): using build constraint expressions for -skip_goos would
+//     be clearer.
 //     TODO(rfindley): support flag values containing whitespace.
 //   - "settings.json": this file is parsed as JSON, and used as the
 //     session configuration (see gopls/doc/settings.md)
@@ -115,20 +122,44 @@
 //     Foo were of type *Golden, the test runner would convert the identifier a
 //     in the call @foo(a, "b", 3) into a *Golden by collecting golden file
 //     data starting with "@a/".
+//   - proxy files: any file starting with proxy/ is treated as a Go proxy
+//     file. If present, these files are written to a separate temporary
+//     directory and GOPROXY is set to file://<proxy directory>.
 //
 // # Marker types
 //
 // The following markers are supported within marker tests:
 //
+//   - codeaction(kind, start, end, golden): specifies a codeaction to request
+//     for the given range. To support multi-line ranges, the range is defined
+//     to be between start.Start and end.End. The golden directory contains
+//     changed file content after the code action is applied.
+//
+//   - codeactionerr(kind, start, end, wantError): specifies a codeaction that
+//     fails with an error that matches the expectation.
+//
 //   - complete(location, ...labels): specifies expected completion results at
 //     the given location.
 //
 //   - diag(location, regexp): specifies an expected diagnostic matching the
 //     given regexp at the given location. The test runner requires
-//     a 1:1 correspondence between observed diagnostics and diag annotations
+//     a 1:1 correspondence between observed diagnostics and diag annotations.
+//     The diagnostics source and kind fields are ignored, to reduce fuss.
+//
+//     The specified location must match the start position of the diagnostic,
+//     but end positions are ignored.
+//
+//     TODO(adonovan): in the older marker framework, the annotation asserted
+//     two additional fields (source="compiler", kind="error"). Restore them?
 //
 //   - def(src, dst location): perform a textDocument/definition request at
-//     the src location, and check the the result points to the dst location.
+//     the src location, and check the result points to the dst location.
+//
+//   - format(golden): perform a textDocument/format request for the enclosing
+//     file, and compare against the named golden file. If the formatting
+//     request succeeds, the golden file must contain the resulting formatted
+//     source. If the formatting request fails, the golden file must contain
+//     the error message.
 //
 //   - hover(src, dst location, g Golden): perform a textDocument/hover at the
 //     src location, and checks that the result is the dst location, with hover
@@ -170,6 +201,14 @@
 //     location information. There is no point to using more than one
 //     @symbol marker in a given file.
 //
+//   - workspacesymbol(query, golden): makes a workspace/symbol request for the
+//     given query, formats the response with one symbol per line, and compares
+//     against the named golden file. As workspace symbols are by definition a
+//     workspace-wide request, the location of the workspace symbol marker does
+//     not matter. Each line is of the form:
+//
+//     location name kind
+//
 // # Argument conversion
 //
 // Marker arguments are first parsed by the go/expect package, which accepts
@@ -258,6 +297,9 @@
 //   - parallelize/optimize test execution
 //   - reorganize regtest packages (and rename to just 'test'?)
 //   - Rename the files .txtar.
+//   - Provide some means by which locations in the standard library
+//     (or builtin.go) can be named, so that, for example, we can we
+//     can assert that MyError implements the built-in error type.
 //
 // Existing marker tests (in ../testdata) to port:
 //   - CallHierarchy
@@ -301,6 +343,12 @@
 
 	for _, test := range tests {
 		t.Run(test.name, func(t *testing.T) {
+			for _, goos := range test.skipGOOS {
+				if runtime.GOOS == goos {
+					t.Skipf("skipping on %s due to -skip_goos", runtime.GOOS)
+				}
+			}
+
 			// TODO(rfindley): it may be more useful to have full support for build
 			// constraints.
 			if test.minGoVersion != "" {
@@ -310,6 +358,9 @@
 				}
 				testenv.NeedsGo1Point(t, go1point)
 			}
+			if test.cgo {
+				testenv.NeedsTool(t, "cgo")
+			}
 			config := fake.EditorConfig{
 				Settings: test.settings,
 				Env:      test.env,
@@ -320,10 +371,10 @@
 				}
 				config.Settings["diagnosticsDelay"] = "10ms"
 			}
-			run := &markerTestRun{
-				test: test,
-				env:  newEnv(t, cache, test.files, config),
 
+			run := &markerTestRun{
+				test:      test,
+				env:       newEnv(t, cache, test.files, test.proxyFiles, test.writeGoSum, config),
 				locations: make(map[expect.Identifier]protocol.Location),
 				diags:     make(map[protocol.Location][]protocol.Diagnostic),
 			}
@@ -360,8 +411,11 @@
 				uri := run.env.Sandbox.Workdir.URI(path)
 				for _, diag := range params.Diagnostics {
 					loc := protocol.Location{
-						URI:   uri,
-						Range: diag.Range,
+						URI: uri,
+						Range: protocol.Range{
+							Start: diag.Range.Start,
+							End:   diag.Range.Start, // ignore end positions
+						},
 					}
 					run.diags[loc] = append(run.diags[loc], diag)
 				}
@@ -417,6 +471,11 @@
 	note *expect.Note
 }
 
+// server returns the LSP server for the marker test run.
+func (m marker) server() protocol.Server {
+	return m.run.env.Editor.Server
+}
+
 // errorf reports an error with a prefix indicating the position of the marker note.
 //
 // It formats the error message using mark.sprintf.
@@ -482,17 +541,21 @@
 // Marker funcs should not mutate the test environment (e.g. via opening files
 // or applying edits in the editor).
 var markerFuncs = map[string]markerFunc{
-	"complete":       makeMarkerFunc(completeMarker),
-	"def":            makeMarkerFunc(defMarker),
-	"diag":           makeMarkerFunc(diagMarker),
-	"hover":          makeMarkerFunc(hoverMarker),
-	"implementation": makeMarkerFunc(implementationMarker),
-	"loc":            makeMarkerFunc(locMarker),
-	"rename":         makeMarkerFunc(renameMarker),
-	"renameerr":      makeMarkerFunc(renameErrMarker),
-	"suggestedfix":   makeMarkerFunc(suggestedfixMarker),
-	"symbol":         makeMarkerFunc(symbolMarker),
-	"refs":           makeMarkerFunc(refsMarker),
+	"codeaction":      makeMarkerFunc(codeActionMarker),
+	"codeactionerr":   makeMarkerFunc(codeActionErrMarker),
+	"complete":        makeMarkerFunc(completeMarker),
+	"def":             makeMarkerFunc(defMarker),
+	"diag":            makeMarkerFunc(diagMarker),
+	"hover":           makeMarkerFunc(hoverMarker),
+	"format":          makeMarkerFunc(formatMarker),
+	"implementation":  makeMarkerFunc(implementationMarker),
+	"loc":             makeMarkerFunc(locMarker),
+	"rename":          makeMarkerFunc(renameMarker),
+	"renameerr":       makeMarkerFunc(renameErrMarker),
+	"suggestedfix":    makeMarkerFunc(suggestedfixMarker),
+	"symbol":          makeMarkerFunc(symbolMarker),
+	"refs":            makeMarkerFunc(refsMarker),
+	"workspacesymbol": makeMarkerFunc(workspaceSymbolMarker),
 }
 
 // markerTest holds all the test data extracted from a test txtar archive.
@@ -500,20 +563,24 @@
 // See the documentation for RunMarkerTests for more information on the archive
 // format.
 type markerTest struct {
-	name     string                 // relative path to the txtar file in the testdata dir
-	fset     *token.FileSet         // fileset used for parsing notes
-	content  []byte                 // raw test content
-	archive  *txtar.Archive         // original test archive
-	settings map[string]interface{} // gopls settings
-	env      map[string]string      // editor environment
-	files    map[string][]byte      // data files from the archive (excluding special files)
-	notes    []*expect.Note         // extracted notes from data files
-	golden   map[string]*Golden     // extracted golden content, by identifier name
+	name       string                 // relative path to the txtar file in the testdata dir
+	fset       *token.FileSet         // fileset used for parsing notes
+	content    []byte                 // raw test content
+	archive    *txtar.Archive         // original test archive
+	settings   map[string]interface{} // gopls settings
+	env        map[string]string      // editor environment
+	proxyFiles map[string][]byte      // proxy content
+	files      map[string][]byte      // data files from the archive (excluding special files)
+	notes      []*expect.Note         // extracted notes from data files
+	golden     map[string]*Golden     // extracted golden content, by identifier name
 
 	// flags holds flags extracted from the special "flags" archive file.
 	flags []string
 	// Parsed flags values.
 	minGoVersion string
+	cgo          bool
+	writeGoSum   []string // comma separated dirs to write go sum for
+	skipGOOS     []string // comma separated GOOS values to skip
 }
 
 // flagSet returns the flagset used for parsing the special "flags" file in the
@@ -521,9 +588,28 @@
 func (t *markerTest) flagSet() *flag.FlagSet {
 	flags := flag.NewFlagSet(t.name, flag.ContinueOnError)
 	flags.StringVar(&t.minGoVersion, "min_go", "", "if set, the minimum go1.X version required for this test")
+	flags.BoolVar(&t.cgo, "cgo", false, "if set, requires cgo (both the cgo tool and CGO_ENABLED=1)")
+	flags.Var((*stringListValue)(&t.writeGoSum), "write_sumfile", "if set, write the sumfile for these directories")
+	flags.Var((*stringListValue)(&t.skipGOOS), "skip_goos", "if set, skip this test on these GOOS values")
 	return flags
 }
 
+// stringListValue implements flag.Value.
+type stringListValue []string
+
+func (l *stringListValue) Set(s string) error {
+	if s != "" {
+		for _, d := range strings.Split(s, ",") {
+			*l = append(*l, strings.TrimSpace(d))
+		}
+	}
+	return nil
+}
+
+func (l stringListValue) String() string {
+	return strings.Join([]string(l), ",")
+}
+
 func (t *markerTest) getGolden(id string) *Golden {
 	golden, ok := t.golden[id]
 	// If there was no golden content for this identifier, we must create one
@@ -589,9 +675,6 @@
 //
 // See the documentation for RunMarkerTests for more details on the test data
 // archive.
-//
-// TODO(rfindley): this test could sanity check the results. For example, it is
-// too easy to write "// @" instead of "//@", which we will happy skip silently.
 func loadMarkerTests(dir string) ([]*markerTest, error) {
 	var tests []*markerTest
 	err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
@@ -600,6 +683,7 @@
 			if err != nil {
 				return err
 			}
+
 			name := strings.TrimPrefix(path, dir+string(filepath.Separator))
 			test, err := loadMarkerTest(name, content)
 			if err != nil {
@@ -665,11 +749,26 @@
 			}
 			test.golden[id].data[name] = file.Data
 
+		case strings.HasPrefix(file.Name, "proxy/"):
+			name := file.Name[len("proxy/"):]
+			if test.proxyFiles == nil {
+				test.proxyFiles = make(map[string][]byte)
+			}
+			test.proxyFiles[name] = file.Data
+
 		default: // ordinary file content
 			notes, err := expect.Parse(test.fset, file.Name, file.Data)
 			if err != nil {
 				return nil, fmt.Errorf("parsing notes in %q: %v", file.Name, err)
 			}
+
+			// Reject common misspelling: "// @mark".
+			// TODO(adonovan): permit "// @" within a string. Detect multiple spaces.
+			if i := bytes.Index(file.Data, []byte("// @")); i >= 0 {
+				line := 1 + bytes.Count(file.Data[:i], []byte("\n"))
+				return nil, fmt.Errorf("%s:%d: unwanted space before marker (// @)", file.Name, line)
+			}
+
 			test.notes = append(test.notes, notes...)
 			test.files[file.Name] = file.Data
 		}
@@ -719,6 +818,8 @@
 		default:
 			if _, ok := test.files[file.Name]; ok { // ordinary file
 				arch.Files = append(arch.Files, file)
+			} else if strings.HasPrefix(file.Name, "proxy/") { // proxy file
+				arch.Files = append(arch.Files, file)
 			} else if data, ok := updatedGolden[file.Name]; ok { // golden file
 				arch.Files = append(arch.Files, txtar.File{Name: file.Name, Data: data})
 				delete(updatedGolden, file.Name)
@@ -744,16 +845,22 @@
 //
 // TODO(rfindley): simplify and refactor the construction of testing
 // environments across regtests, marker tests, and benchmarks.
-func newEnv(t *testing.T, cache *cache.Cache, files map[string][]byte, config fake.EditorConfig) *Env {
+func newEnv(t *testing.T, cache *cache.Cache, files, proxyFiles map[string][]byte, writeGoSum []string, config fake.EditorConfig) *Env {
 	sandbox, err := fake.NewSandbox(&fake.SandboxConfig{
-		RootDir: t.TempDir(),
-		GOPROXY: "https://proxy.golang.org",
-		Files:   files,
+		RootDir:    t.TempDir(),
+		Files:      files,
+		ProxyFiles: proxyFiles,
 	})
 	if err != nil {
 		t.Fatal(err)
 	}
 
+	for _, dir := range writeGoSum {
+		if err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "..."}, []string{"GOWORK=off"}, true); err != nil {
+			t.Fatal(err)
+		}
+	}
+
 	// Put a debug instance in the context to prevent logging to stderr.
 	// See associated TODO in runner.go: we should revisit this pattern.
 	ctx := context.Background()
@@ -797,7 +904,7 @@
 	// Collected information.
 	// Each @diag/@suggestedfix marker eliminates an entry from diags.
 	locations map[expect.Identifier]protocol.Location
-	diags     map[protocol.Location][]protocol.Diagnostic
+	diags     map[protocol.Location][]protocol.Diagnostic // diagnostics by position; location end == start
 }
 
 // sprintf returns a formatted string after applying pre-processing to
@@ -852,8 +959,21 @@
 // archive-relative paths for files and including the line number in the full
 // archive file.
 func (run *markerTestRun) fmtLoc(loc protocol.Location) string {
+	formatted := run.fmtLocDetails(loc, true)
+	if formatted == "" {
+		run.env.T.Errorf("unable to find %s in test archive", loc)
+		return "<invalid location>"
+	}
+	return formatted
+}
+
+// See fmtLoc. If includeTxtPos is not set, the position in the full archive
+// file is omitted.
+//
+// If the location cannot be found within the archive, fmtLocDetails returns "".
+func (run *markerTestRun) fmtLocDetails(loc protocol.Location, includeTxtPos bool) string {
 	if loc == (protocol.Location{}) {
-		return "<missing location>"
+		return ""
 	}
 	lines := bytes.Count(run.test.archive.Comment, []byte("\n"))
 	var name string
@@ -867,8 +987,7 @@
 		lines += bytes.Count(f.Data, []byte("\n"))
 	}
 	if name == "" {
-		run.env.T.Errorf("unable to find %s in test archive", loc)
-		return "<invalid location>"
+		return ""
 	}
 	m, err := run.env.Editor.Mapper(name)
 	if err != nil {
@@ -893,7 +1012,11 @@
 		}
 	}
 
-	return fmt.Sprintf("%s:%s (%s:%s)", name, innerSpan, run.test.name, outerSpan)
+	if includeTxtPos {
+		return fmt.Sprintf("%s:%s (%s:%s)", name, innerSpan, run.test.name, outerSpan)
+	} else {
+		return fmt.Sprintf("%s:%s", name, innerSpan)
+	}
 }
 
 // makeMarkerFunc uses reflection to create a markerFunc for the given func value.
@@ -1160,7 +1283,7 @@
 	}
 }
 
-// defMarker implements the @godef marker, running textDocument/definition at
+// defMarker implements the @def marker, running textDocument/definition at
 // the given src location and asserting that there is exactly one resulting
 // location, matching dst.
 //
@@ -1173,6 +1296,40 @@
 	}
 }
 
+// formatMarker implements the @format marker.
+func formatMarker(mark marker, golden *Golden) {
+	edits, err := mark.server().Formatting(mark.run.env.Ctx, &protocol.DocumentFormattingParams{
+		TextDocument: protocol.TextDocumentIdentifier{URI: mark.uri()},
+	})
+	var got []byte
+	if err != nil {
+		got = []byte(err.Error() + "\n") // all golden content is newline terminated
+	} else {
+		env := mark.run.env
+		filename := env.Sandbox.Workdir.URIToPath(mark.uri())
+		mapper, err := env.Editor.Mapper(filename)
+		if err != nil {
+			mark.errorf("Editor.Mapper(%s) failed: %v", filename, err)
+		}
+
+		got, _, err = source.ApplyProtocolEdits(mapper, edits)
+		if err != nil {
+			mark.errorf("ApplyProtocolEdits failed: %v", err)
+			return
+		}
+	}
+
+	want, ok := golden.Get(mark.run.env.T, "", got)
+	if !ok {
+		mark.errorf("missing golden file @%s", golden.id)
+		return
+	}
+
+	if diff := compare.Bytes(want, got); diff != "" {
+		mark.errorf("golden file @%s does not match format results:\n%s", golden.id, diff)
+	}
+}
+
 // hoverMarker implements the @hover marker, running textDocument/hover at the
 // given src location and asserting that the resulting hover is over the dst
 // location (typically a span surrounding src), and that the markdown content
@@ -1215,20 +1372,27 @@
 // diagMarker implements the @diag marker. It eliminates diagnostics from
 // the observed set in mark.test.
 func diagMarker(mark marker, loc protocol.Location, re *regexp.Regexp) {
-	if _, err := removeDiagnostic(mark, loc, re); err != nil {
-		mark.errorf("%v", err)
+	if _, ok := removeDiagnostic(mark, loc, re); !ok {
+		mark.errorf("no diagnostic at %v matches %q", loc, re)
 	}
 }
 
-func removeDiagnostic(mark marker, loc protocol.Location, re *regexp.Regexp) (protocol.Diagnostic, error) {
+// removeDiagnostic looks for a diagnostic matching loc at the given position.
+//
+// If found, it returns (diag, true), and eliminates the matched diagnostic
+// from the unmatched set.
+//
+// If not found, it returns (protocol.Diagnostic{}, false).
+func removeDiagnostic(mark marker, loc protocol.Location, re *regexp.Regexp) (protocol.Diagnostic, bool) {
+	loc.Range.End = loc.Range.Start // diagnostics ignore end position.
 	diags := mark.run.diags[loc]
 	for i, diag := range diags {
 		if re.MatchString(diag.Message) {
 			mark.run.diags[loc] = append(diags[:i], diags[i+1:]...)
-			return diag, nil
+			return diag, true
 		}
 	}
-	return protocol.Diagnostic{}, errors.New(mark.sprintf("no diagnostic at %v matches %q", loc, re))
+	return protocol.Diagnostic{}, false
 }
 
 // renameMarker implements the @rename(location, new, golden) marker.
@@ -1265,41 +1429,74 @@
 		return nil, err
 	}
 
-	return applyDocumentChanges(env, editMap.DocumentChanges)
+	fileChanges := make(map[string][]byte)
+	if err := applyDocumentChanges(env, editMap.DocumentChanges, fileChanges); err != nil {
+		return nil, fmt.Errorf("applying document changes: %v", err)
+	}
+	return fileChanges, nil
 }
 
-// applyDocumentChanges returns the effect of applying the document
-// changes to the contents of the Editor buffers. The actual editor
-// buffers are unchanged.
-func applyDocumentChanges(env *Env, changes []protocol.DocumentChanges) (map[string][]byte, error) {
-	result := make(map[string][]byte)
+// applyDocumentChanges applies the given document changes to the editor buffer
+// content, recording the resulting contents in the fileChanges map. It is an
+// error for a change to an edit a file that is already present in the
+// fileChanges map.
+func applyDocumentChanges(env *Env, changes []protocol.DocumentChanges, fileChanges map[string][]byte) error {
+	getMapper := func(path string) (*protocol.Mapper, error) {
+		if _, ok := fileChanges[path]; ok {
+			return nil, fmt.Errorf("internal error: %s is already edited", path)
+		}
+		return env.Editor.Mapper(path)
+	}
+
 	for _, change := range changes {
 		if change.RenameFile != nil {
 			// rename
 			oldFile := env.Sandbox.Workdir.URIToPath(change.RenameFile.OldURI)
-			newFile := env.Sandbox.Workdir.URIToPath(change.RenameFile.NewURI)
-			mapper, err := env.Editor.Mapper(oldFile)
+			mapper, err := getMapper(oldFile)
 			if err != nil {
-				return nil, err
+				return err
 			}
-			result[newFile] = mapper.Content
-
+			newFile := env.Sandbox.Workdir.URIToPath(change.RenameFile.NewURI)
+			fileChanges[newFile] = mapper.Content
 		} else {
 			// edit
 			filename := env.Sandbox.Workdir.URIToPath(change.TextDocumentEdit.TextDocument.URI)
-			mapper, err := env.Editor.Mapper(filename)
+			mapper, err := getMapper(filename)
 			if err != nil {
-				return nil, err
+				return err
 			}
 			patched, _, err := source.ApplyProtocolEdits(mapper, change.TextDocumentEdit.Edits)
 			if err != nil {
-				return nil, err
+				return err
 			}
-			result[filename] = patched
+			fileChanges[filename] = patched
 		}
 	}
 
-	return result, nil
+	return nil
+}
+
+func codeActionMarker(mark marker, actionKind string, start, end protocol.Location, golden *Golden) {
+	// Request the range from start.Start to end.End.
+	loc := start
+	loc.Range.End = end.Range.End
+
+	// Apply the fix it suggests.
+	changed, err := codeAction(mark.run.env, loc.URI, loc.Range, actionKind, nil)
+	if err != nil {
+		mark.errorf("codeAction failed: %v", err)
+		return
+	}
+
+	// Check the file state.
+	checkChangedFiles(mark, changed, golden)
+}
+
+func codeActionErrMarker(mark marker, actionKind string, start, end protocol.Location, wantErr wantError) {
+	loc := start
+	loc.Range.End = end.Range.End
+	_, err := codeAction(mark.run.env, loc.URI, loc.Range, actionKind, nil)
+	wantErr.check(mark, err)
 }
 
 // suggestedfixMarker implements the @suggestedfix(location, regexp,
@@ -1307,15 +1504,16 @@
 // the expectation of a diagnostic, but then it applies the first code
 // action of the specified kind suggested by the matched diagnostic.
 func suggestedfixMarker(mark marker, loc protocol.Location, re *regexp.Regexp, actionKind string, golden *Golden) {
+	loc.Range.End = loc.Range.Start // diagnostics ignore end position.
 	// Find and remove the matching diagnostic.
-	diag, err := removeDiagnostic(mark, loc, re)
-	if err != nil {
-		mark.errorf("%v", err)
+	diag, ok := removeDiagnostic(mark, loc, re)
+	if !ok {
+		mark.errorf("no diagnostic at %v matches %q", loc, re)
 		return
 	}
 
 	// Apply the fix it suggests.
-	changed, err := suggestedfix(mark.run.env, loc, diag, actionKind)
+	changed, err := codeAction(mark.run.env, loc.URI, diag.Range, actionKind, &diag)
 	if err != nil {
 		mark.errorf("suggestedfix failed: %v. (Use @suggestedfixerr for expected errors.)", err)
 		return
@@ -1325,19 +1523,29 @@
 	checkChangedFiles(mark, changed, golden)
 }
 
-func suggestedfix(env *Env, loc protocol.Location, diag protocol.Diagnostic, actionKind string) (map[string][]byte, error) {
-
+// codeAction executes a textDocument/codeAction request for the specified
+// location and kind. If diag is non-nil, it is used as the code action
+// context.
+//
+// The resulting map contains resulting file contents after the code action is
+// applied. Currently, this function does not support code actions that return
+// edits directly; it only supports code action commands.
+func codeAction(env *Env, uri protocol.DocumentURI, rng protocol.Range, actionKind string, diag *protocol.Diagnostic) (map[string][]byte, error) {
 	// Request all code actions that apply to the diagnostic.
 	// (The protocol supports filtering using Context.Only={actionKind}
 	// but we can give a better error if we don't filter.)
-	actions, err := env.Editor.Server.CodeAction(env.Ctx, &protocol.CodeActionParams{
-		TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI},
-		Range:        diag.Range,
+	params := &protocol.CodeActionParams{
+		TextDocument: protocol.TextDocumentIdentifier{URI: uri},
+		Range:        rng,
 		Context: protocol.CodeActionContext{
-			Only:        nil, // => all kinds
-			Diagnostics: []protocol.Diagnostic{diag},
+			Only: nil, // => all kinds
 		},
-	})
+	}
+	if diag != nil {
+		params.Context.Diagnostics = []protocol.Diagnostic{*diag}
+	}
+
+	actions, err := env.Editor.Server.CodeAction(env.Ctx, params)
 	if err != nil {
 		return nil, err
 	}
@@ -1357,40 +1565,56 @@
 	}
 	action := candidates[0]
 
+	// Apply the codeAction.
+	//
+	// Spec:
+	//  "If a code action provides an edit and a command, first the edit is
+	//  executed and then the command."
+	fileChanges := make(map[string][]byte)
 	// An action may specify an edit and/or a command, to be
 	// applied in that order. But since applyDocumentChanges(env,
 	// action.Edit.DocumentChanges) doesn't compose, for now we
 	// assert that all commands used in the @suggestedfix tests
 	// return only a command.
-	if action.Edit.DocumentChanges != nil {
-		env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Edit.DocumentChanges", action.Kind, action.Title)
-	}
-	if action.Command == nil {
-		return nil, fmt.Errorf("missing CodeAction{Kind=%s, Title=%q}.Command", action.Kind, action.Title)
+	if action.Edit != nil {
+		if action.Edit.Changes != nil {
+			env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Edit.Changes", action.Kind, action.Title)
+		}
+		if action.Edit.DocumentChanges != nil {
+			if err := applyDocumentChanges(env, action.Edit.DocumentChanges, fileChanges); err != nil {
+				return nil, fmt.Errorf("applying document changes: %v", err)
+			}
+		}
 	}
 
-	// This is a typical CodeAction command:
-	//
-	//   Title:     "Implement error"
-	//   Command:   gopls.apply_fix
-	//   Arguments: [{"Fix":"stub_methods","URI":".../a.go","Range":...}}]
-	//
-	// The client makes an ExecuteCommand RPC to the server,
-	// which dispatches it to the ApplyFix handler.
-	// ApplyFix dispatches to the "stub_methods" suggestedfix hook (the meat).
-	// The server then makes an ApplyEdit RPC to the client,
-	// whose Awaiter hook gathers the edits instead of applying them.
+	if action.Command != nil {
+		// This is a typical CodeAction command:
+		//
+		//   Title:     "Implement error"
+		//   Command:   gopls.apply_fix
+		//   Arguments: [{"Fix":"stub_methods","URI":".../a.go","Range":...}}]
+		//
+		// The client makes an ExecuteCommand RPC to the server,
+		// which dispatches it to the ApplyFix handler.
+		// ApplyFix dispatches to the "stub_methods" suggestedfix hook (the meat).
+		// The server then makes an ApplyEdit RPC to the client,
+		// whose Awaiter hook gathers the edits instead of applying them.
 
-	_ = env.Awaiter.takeDocumentChanges() // reset (assuming Env is confined to this thread)
+		_ = env.Awaiter.takeDocumentChanges() // reset (assuming Env is confined to this thread)
 
-	if _, err := env.Editor.Server.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{
-		Command:   action.Command.Command,
-		Arguments: action.Command.Arguments,
-	}); err != nil {
-		env.T.Fatalf("error converting command %q to edits: %v", action.Command.Command, err)
+		if _, err := env.Editor.Server.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{
+			Command:   action.Command.Command,
+			Arguments: action.Command.Arguments,
+		}); err != nil {
+			env.T.Fatalf("error converting command %q to edits: %v", action.Command.Command, err)
+		}
+
+		if err := applyDocumentChanges(env, env.Awaiter.takeDocumentChanges(), fileChanges); err != nil {
+			return nil, fmt.Errorf("applying document changes from command: %v", err)
+		}
 	}
 
-	return applyDocumentChanges(env, env.Awaiter.takeDocumentChanges())
+	return fileChanges, nil
 }
 
 // TODO(adonovan): suggestedfixerr
@@ -1398,7 +1622,7 @@
 // refsMarker implements the @refs marker.
 func refsMarker(mark marker, src protocol.Location, want ...protocol.Location) {
 	refs := func(includeDeclaration bool, want []protocol.Location) error {
-		got, err := mark.run.env.Editor.Server.References(mark.run.env.Ctx, &protocol.ReferenceParams{
+		got, err := mark.server().References(mark.run.env.Ctx, &protocol.ReferenceParams{
 			TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src),
 			Context: protocol.ReferenceContext{
 				IncludeDeclaration: includeDeclaration,
@@ -1428,7 +1652,7 @@
 
 // implementationMarker implements the @implementation marker.
 func implementationMarker(mark marker, src protocol.Location, want ...protocol.Location) {
-	got, err := mark.run.env.Editor.Server.Implementation(mark.run.env.Ctx, &protocol.ImplementationParams{
+	got, err := mark.server().Implementation(mark.run.env.Ctx, &protocol.ImplementationParams{
 		TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src),
 	})
 	if err != nil {
@@ -1443,7 +1667,7 @@
 // symbolMarker implements the @symbol marker.
 func symbolMarker(mark marker, golden *Golden) {
 	// Retrieve information about all symbols in this file.
-	symbols, err := mark.run.env.Editor.Server.DocumentSymbol(mark.run.env.Ctx, &protocol.DocumentSymbolParams{
+	symbols, err := mark.server().DocumentSymbol(mark.run.env.Ctx, &protocol.DocumentSymbolParams{
 		TextDocument: protocol.TextDocumentIdentifier{URI: mark.uri()},
 	})
 	if err != nil {
@@ -1526,3 +1750,37 @@
 	}
 	return nil
 }
+
+func workspaceSymbolMarker(mark marker, query string, golden *Golden) {
+	params := &protocol.WorkspaceSymbolParams{
+		Query: query,
+	}
+
+	gotSymbols, err := mark.server().Symbol(mark.run.env.Ctx, params)
+	if err != nil {
+		mark.errorf("Symbol(%q) failed: %v", query, err)
+		return
+	}
+	var got bytes.Buffer
+	for _, s := range gotSymbols {
+		// Omit the txtar position of the symbol location; otherwise edits to the
+		// txtar archive lead to unexpected failures.
+		loc := mark.run.fmtLocDetails(s.Location, false)
+		// TODO(rfindley): can we do better here, by detecting if the location is
+		// relative to GOROOT?
+		if loc == "" {
+			loc = "<unknown>"
+		}
+		fmt.Fprintf(&got, "%s %s %s\n", loc, s.Name, s.Kind)
+	}
+
+	want, ok := golden.Get(mark.run.env.T, "", got.Bytes())
+	if !ok {
+		mark.errorf("missing golden file @%s", golden.id)
+		return
+	}
+
+	if diff := compare.Bytes(want, got.Bytes()); diff != "" {
+		mark.errorf("Symbol(%q) mismatch:\n%s", query, diff)
+	}
+}
diff --git a/gopls/internal/lsp/regtest/wrappers.go b/gopls/internal/lsp/regtest/wrappers.go
index 0315c6d..163960f 100644
--- a/gopls/internal/lsp/regtest/wrappers.go
+++ b/gopls/internal/lsp/regtest/wrappers.go
@@ -256,7 +256,7 @@
 // directory.
 func (e *Env) RunGoCommand(verb string, args ...string) {
 	e.T.Helper()
-	if err := e.Sandbox.RunGoCommand(e.Ctx, "", verb, args, true); err != nil {
+	if err := e.Sandbox.RunGoCommand(e.Ctx, "", verb, args, nil, true); err != nil {
 		e.T.Fatal(err)
 	}
 }
@@ -265,7 +265,7 @@
 // relative directory of the sandbox.
 func (e *Env) RunGoCommandInDir(dir, verb string, args ...string) {
 	e.T.Helper()
-	if err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, true); err != nil {
+	if err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, nil, true); err != nil {
 		e.T.Fatal(err)
 	}
 }
@@ -286,7 +286,7 @@
 func (e *Env) DumpGoSum(dir string) {
 	e.T.Helper()
 
-	if err := e.Sandbox.RunGoCommand(e.Ctx, dir, "list", []string{"-mod=mod", "..."}, true); err != nil {
+	if err := e.Sandbox.RunGoCommand(e.Ctx, dir, "list", []string{"-mod=mod", "..."}, nil, true); err != nil {
 		e.T.Fatal(err)
 	}
 	sumFile := path.Join(dir, "/go.sum")
diff --git a/gopls/internal/lsp/safetoken/safetoken_test.go b/gopls/internal/lsp/safetoken/safetoken_test.go
index 7f796d8..83a50fb 100644
--- a/gopls/internal/lsp/safetoken/safetoken_test.go
+++ b/gopls/internal/lsp/safetoken/safetoken_test.go
@@ -72,10 +72,20 @@
 // suggests alternatives.
 func TestGoplsSourceDoesNotCallTokenFileMethods(t *testing.T) {
 	testenv.NeedsGoPackages(t)
+	testenv.NeedsGo1Point(t, 18)
+	testenv.NeedsLocalXTools(t)
 
-	pkgs, err := packages.Load(&packages.Config{
+	cfg := &packages.Config{
 		Mode: packages.NeedName | packages.NeedModule | packages.NeedCompiledGoFiles | packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedImports | packages.NeedDeps,
-	}, "go/token", "golang.org/x/tools/gopls/...")
+	}
+	cfg.Env = os.Environ()
+	cfg.Env = append(cfg.Env,
+		"GOPACKAGESDRIVER=off",
+		"GOWORK=off", // necessary for -mod=mod below
+		"GOFLAGS=-mod=mod",
+	)
+
+	pkgs, err := packages.Load(cfg, "go/token", "golang.org/x/tools/gopls/...")
 	if err != nil {
 		t.Fatal(err)
 	}
diff --git a/gopls/internal/lsp/server.go b/gopls/internal/lsp/server.go
index 33f251b..9f82e90 100644
--- a/gopls/internal/lsp/server.go
+++ b/gopls/internal/lsp/server.go
@@ -35,7 +35,6 @@
 		client:                client,
 		diagnosticsSema:       make(chan struct{}, concurrentAnalyses),
 		progress:              progress.NewTracker(client),
-		diagDebouncer:         newDebouncer(),
 	}
 }
 
@@ -105,9 +104,6 @@
 
 	progress *progress.Tracker
 
-	// diagDebouncer is used for debouncing diagnostics.
-	diagDebouncer *debouncer
-
 	// When the workspace fails to load, we show its status through a progress
 	// report with an error message.
 	criticalErrorStatusMu sync.Mutex
diff --git a/gopls/internal/lsp/server_gen.go b/gopls/internal/lsp/server_gen.go
index 3d736c6..33c70e2 100644
--- a/gopls/internal/lsp/server_gen.go
+++ b/gopls/internal/lsp/server_gen.go
@@ -236,16 +236,16 @@
 	return s.selectionRange(ctx, params)
 }
 
-func (s *Server) SemanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) {
-	return s.semanticTokensFull(ctx, p)
+func (s *Server) SemanticTokensFull(ctx context.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) {
+	return s.semanticTokensFull(ctx, params)
 }
 
-func (s *Server) SemanticTokensFullDelta(ctx context.Context, p *protocol.SemanticTokensDeltaParams) (interface{}, error) {
+func (s *Server) SemanticTokensFullDelta(context.Context, *protocol.SemanticTokensDeltaParams) (interface{}, error) {
 	return nil, notImplemented("SemanticTokensFullDelta")
 }
 
-func (s *Server) SemanticTokensRange(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) {
-	return s.semanticTokensRange(ctx, p)
+func (s *Server) SemanticTokensRange(ctx context.Context, params *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) {
+	return s.semanticTokensRange(ctx, params)
 }
 
 func (s *Server) SetTrace(context.Context, *protocol.SetTraceParams) error {
diff --git a/gopls/internal/lsp/source/api_json.go b/gopls/internal/lsp/source/api_json.go
index 88ff209..281772b 100644
--- a/gopls/internal/lsp/source/api_json.go
+++ b/gopls/internal/lsp/source/api_json.go
@@ -195,6 +195,23 @@
 				Hierarchy: "ui.navigation",
 			},
 			{
+				Name: "symbolScope",
+				Type: "enum",
+				Doc:  "symbolScope controls which packages are searched for workspace/symbol\nrequests. The default value, \"workspace\", searches only workspace\npackages. The legacy behavior, \"all\", causes all loaded packages to be\nsearched, including dependencies; this is more expensive and may return\nunwanted results.\n",
+				EnumValues: []EnumValue{
+					{
+						Value: "\"all\"",
+						Doc:   "`\"all\"` matches symbols in any loaded package, including\ndependencies.\n",
+					},
+					{
+						Value: "\"workspace\"",
+						Doc:   "`\"workspace\"` matches symbols in workspace packages only.\n",
+					},
+				},
+				Default:   "\"all\"",
+				Hierarchy: "ui.navigation",
+			},
+			{
 				Name: "analyses",
 				Type: "map[string]bool",
 				Doc:  "analyses specify analyses that the user would like to enable or disable.\nA map of the names of analysis passes that should be enabled/disabled.\nA full list of analyzers that gopls uses can be found in\n[analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md).\n\nExample Usage:\n\n```json5\n...\n\"analyses\": {\n  \"unreachable\": false, // Disable the unreachable analyzer.\n  \"unusedparams\": true  // Enable the unusedparams analyzer.\n}\n...\n```\n",
@@ -282,11 +299,6 @@
 							Default: "true",
 						},
 						{
-							Name:    "\"infertypeargs\"",
-							Doc:     "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
-							Default: "true",
-						},
-						{
 							Name:    "\"loopclosure\"",
 							Doc:     "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v.\n\n\tfor _, v := range list {\n\t    defer func() {\n\t        use(v) // incorrect\n\t    }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t    v := v // new var per iteration\n\t    defer func() {\n\t        use(v) // ok\n\t    }()\n\t}\n\nThe next example uses a go statement and has a similar problem.\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t    go func() {\n\t        use(v)  // incorrect, and a data race\n\t    }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t    for _, test := range tests {\n\t        t.Run(test.name, func(t *testing.T) {\n\t            t.Parallel()\n\t            use(test) // incorrect, and a data race\n\t        })\n\t    }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop.\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines",
 							Default: "true",
@@ -383,7 +395,7 @@
 						},
 						{
 							Name:    "\"unsafeptr\"",
-							Doc:     "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.`",
+							Doc:     "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.",
 							Default: "true",
 						},
 						{
@@ -393,7 +405,7 @@
 						},
 						{
 							Name:    "\"unusedresult\"",
-							Doc:     "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side effects,\nso it is always a mistake to discard the result. This analyzer reports\ncalls to certain functions in which the result of the call is ignored.\n\nThe set of functions may be controlled using flags.",
+							Doc:     "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side\neffects, so it is always a mistake to discard the result. Other\nfunctions may return an error that must not be ignored, or a cleanup\noperation that must be called. This analyzer reports calls to\nfunctions like these when the result of the call is ignored.\n\nThe set of functions may be controlled using flags.",
 							Default: "true",
 						},
 						{
@@ -437,6 +449,11 @@
 							Default: "true",
 						},
 						{
+							Name:    "\"infertypeargs\"",
+							Doc:     "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
+							Default: "true",
+						},
+						{
 							Name:    "\"stubmethods\"",
 							Doc:     "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface",
 							Default: "true",
@@ -752,6 +769,12 @@
 			ArgDoc:  "{\n\t\"URIArg\": {\n\t\t\"URI\": string,\n\t},\n\t// Optional: source of the diagnostics to reset.\n\t// If not set, all resettable go.mod diagnostics will be cleared.\n\t\"DiagnosticSource\": string,\n}",
 		},
 		{
+			Command: "gopls.run_go_work_command",
+			Title:   "run `go work [args...]`, and apply the resulting go.work",
+			Doc:     "edits to the current go.work file.",
+			ArgDoc:  "{\n\t\"ViewID\": string,\n\t\"InitFirst\": bool,\n\t\"Args\": []string,\n}",
+		},
+		{
 			Command:   "gopls.run_govulncheck",
 			Title:     "Run govulncheck.",
 			Doc:       "Run vulnerability check (`govulncheck`).",
@@ -952,11 +975,6 @@
 			Default: true,
 		},
 		{
-			Name:    "infertypeargs",
-			Doc:     "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
-			Default: true,
-		},
-		{
 			Name:    "loopclosure",
 			Doc:     "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v.\n\n\tfor _, v := range list {\n\t    defer func() {\n\t        use(v) // incorrect\n\t    }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t    v := v // new var per iteration\n\t    defer func() {\n\t        use(v) // ok\n\t    }()\n\t}\n\nThe next example uses a go statement and has a similar problem.\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t    go func() {\n\t        use(v)  // incorrect, and a data race\n\t    }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t    for _, test := range tests {\n\t        t.Run(test.name, func(t *testing.T) {\n\t            t.Parallel()\n\t            use(test) // incorrect, and a data race\n\t        })\n\t    }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop.\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines",
 			URL:     "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/loopclosure",
@@ -1067,7 +1085,7 @@
 		},
 		{
 			Name:    "unsafeptr",
-			Doc:     "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.`",
+			Doc:     "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.",
 			URL:     "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unsafeptr",
 			Default: true,
 		},
@@ -1077,7 +1095,7 @@
 		},
 		{
 			Name:    "unusedresult",
-			Doc:     "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side effects,\nso it is always a mistake to discard the result. This analyzer reports\ncalls to certain functions in which the result of the call is ignored.\n\nThe set of functions may be controlled using flags.",
+			Doc:     "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side\neffects, so it is always a mistake to discard the result. Other\nfunctions may return an error that must not be ignored, or a cleanup\noperation that must be called. This analyzer reports calls to\nfunctions like these when the result of the call is ignored.\n\nThe set of functions may be controlled using flags.",
 			URL:     "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedresult",
 			Default: true,
 		},
@@ -1120,6 +1138,11 @@
 			Default: true,
 		},
 		{
+			Name:    "infertypeargs",
+			Doc:     "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
+			Default: true,
+		},
+		{
 			Name:    "stubmethods",
 			Doc:     "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface",
 			Default: true,
diff --git a/gopls/internal/lsp/source/call_hierarchy.go b/gopls/internal/lsp/source/call_hierarchy.go
index f66d936..8faf425 100644
--- a/gopls/internal/lsp/source/call_hierarchy.go
+++ b/gopls/internal/lsp/source/call_hierarchy.go
@@ -14,10 +14,10 @@
 	"path/filepath"
 
 	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/event/tag"
 )
diff --git a/gopls/internal/lsp/source/completion/completion.go b/gopls/internal/lsp/source/completion/completion.go
index e6e53e0..ad5ce16 100644
--- a/gopls/internal/lsp/source/completion/completion.go
+++ b/gopls/internal/lsp/source/completion/completion.go
@@ -200,7 +200,7 @@
 	// completionCallbacks is a list of callbacks to collect completions that
 	// require expensive operations. This includes operations where we search
 	// through the entire module cache.
-	completionCallbacks []func(opts *imports.Options) error
+	completionCallbacks []func(context.Context, *imports.Options) error
 
 	// surrounding describes the identifier surrounding the position.
 	surrounding *Selection
@@ -887,7 +887,7 @@
 		})
 	}
 
-	c.completionCallbacks = append(c.completionCallbacks, func(opts *imports.Options) error {
+	c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error {
 		return imports.GetImportPaths(ctx, searchImports, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env)
 	})
 	return nil
@@ -1174,7 +1174,8 @@
 	// not assume global Pos/Object realms and then use export
 	// data instead of the quick parse approach taken here.
 
-	// First, we search among packages in the workspace.
+	// First, we search among packages in the forward transitive
+	// closure of the workspace.
 	// We'll use a fast parse to extract package members
 	// from those that match the name/path criterion.
 	all, err := c.snapshot.AllMetadata(ctx)
@@ -1194,7 +1195,7 @@
 	// Rank import paths as goimports would.
 	var relevances map[string]float64
 	if len(paths) > 0 {
-		if err := c.snapshot.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
+		if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error {
 			var err error
 			relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths)
 			return err
@@ -1341,7 +1342,7 @@
 		}
 	}
 
-	c.completionCallbacks = append(c.completionCallbacks, func(opts *imports.Options) error {
+	c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error {
 		defer cancel()
 		return imports.GetPackageExports(ctx, add, id.Name, c.filename, c.pkg.GetTypes().Name(), opts.Env)
 	})
@@ -1610,7 +1611,7 @@
 
 	count := 0
 
-	// Search packages across the entire workspace.
+	// Search the forward transitive closure of the workspace.
 	all, err := c.snapshot.AllMetadata(ctx)
 	if err != nil {
 		return err
@@ -1634,7 +1635,7 @@
 	// Rank candidates using goimports' algorithm.
 	var relevances map[string]float64
 	if len(paths) != 0 {
-		if err := c.snapshot.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
+		if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error {
 			var err error
 			relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths)
 			return err
@@ -1707,7 +1708,7 @@
 		})
 		count++
 	}
-	c.completionCallbacks = append(c.completionCallbacks, func(opts *imports.Options) error {
+	c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error {
 		defer cancel()
 		return imports.GetAllCandidates(ctx, add, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env)
 	})
diff --git a/gopls/internal/lsp/source/definition.go b/gopls/internal/lsp/source/definition.go
index eb9118c..90a4329 100644
--- a/gopls/internal/lsp/source/definition.go
+++ b/gopls/internal/lsp/source/definition.go
@@ -11,9 +11,9 @@
 	"go/token"
 	"go/types"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 )
 
@@ -64,25 +64,49 @@
 		return nil, nil
 	}
 
-	// Handle built-in identifiers.
-	if obj.Parent() == types.Universe {
-		builtin, err := snapshot.BuiltinFile(ctx)
-		if err != nil {
-			return nil, err
+	// Handle objects with no position: builtin, unsafe.
+	if !obj.Pos().IsValid() {
+		var pgf *ParsedGoFile
+		if obj.Parent() == types.Universe {
+			// pseudo-package "builtin"
+			builtinPGF, err := snapshot.BuiltinFile(ctx)
+			if err != nil {
+				return nil, err
+			}
+			pgf = builtinPGF
+
+		} else if obj.Pkg() == types.Unsafe {
+			// package "unsafe"
+			unsafe := snapshot.Metadata("unsafe")
+			if unsafe == nil {
+				return nil, fmt.Errorf("no metadata for package 'unsafe'")
+			}
+			uri := unsafe.GoFiles[0]
+			fh, err := snapshot.ReadFile(ctx, uri)
+			if err != nil {
+				return nil, err
+			}
+			pgf, err = snapshot.ParseGo(ctx, fh, ParseFull&^SkipObjectResolution)
+			if err != nil {
+				return nil, err
+			}
+
+		} else {
+			return nil, bug.Errorf("internal error: no position for %v", obj.Name())
 		}
-		// Note that builtinObj is an ast.Object, not types.Object :)
-		builtinObj := builtin.File.Scope.Lookup(obj.Name())
-		if builtinObj == nil {
-			// Every builtin should have documentation.
-			return nil, bug.Errorf("internal error: no builtin object for %s", obj.Name())
+		// Inv: pgf ∈ {builtin,unsafe}.go
+
+		// Use legacy (go/ast) object resolution.
+		astObj := pgf.File.Scope.Lookup(obj.Name())
+		if astObj == nil {
+			// Every built-in should have documentation syntax.
+			return nil, bug.Errorf("internal error: no object for %s", obj.Name())
 		}
-		decl, ok := builtinObj.Decl.(ast.Node)
+		decl, ok := astObj.Decl.(ast.Node)
 		if !ok {
 			return nil, bug.Errorf("internal error: no declaration for %s", obj.Name())
 		}
-		// The builtin package isn't in the dependency graph, so the usual
-		// utilities won't work here.
-		loc, err := builtin.PosLocation(decl.Pos(), decl.Pos()+token.Pos(len(obj.Name())))
+		loc, err := pgf.PosLocation(decl.Pos(), decl.Pos()+token.Pos(len(obj.Name())))
 		if err != nil {
 			return nil, err
 		}
@@ -90,16 +114,11 @@
 	}
 
 	// Finally, map the object position.
-	var locs []protocol.Location
-	if !obj.Pos().IsValid() {
-		return nil, bug.Errorf("internal error: no position for %v", obj.Name())
-	}
 	loc, err := mapPosition(ctx, pkg.FileSet(), snapshot, obj.Pos(), adjustedObjEnd(obj))
 	if err != nil {
 		return nil, err
 	}
-	locs = append(locs, loc)
-	return locs, nil
+	return []protocol.Location{loc}, nil
 }
 
 // referencedObject returns the identifier and object referenced at the
diff --git a/gopls/internal/lsp/source/extract.go b/gopls/internal/lsp/source/extract.go
index 5a94bbf..d785107 100644
--- a/gopls/internal/lsp/source/extract.go
+++ b/gopls/internal/lsp/source/extract.go
@@ -18,9 +18,9 @@
 
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
 	"golang.org/x/tools/internal/analysisinternal"
-	"golang.org/x/tools/internal/bug"
 )
 
 func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
diff --git a/gopls/internal/lsp/source/fix.go b/gopls/internal/lsp/source/fix.go
index 08abdd0..cb8e5a3 100644
--- a/gopls/internal/lsp/source/fix.go
+++ b/gopls/internal/lsp/source/fix.go
@@ -12,11 +12,11 @@
 	"go/types"
 
 	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/analysis/fillstruct"
 	"golang.org/x/tools/gopls/internal/lsp/analysis/undeclaredname"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 )
 
 type (
@@ -34,22 +34,24 @@
 )
 
 const (
-	FillStruct      = "fill_struct"
-	StubMethods     = "stub_methods"
-	UndeclaredName  = "undeclared_name"
-	ExtractVariable = "extract_variable"
-	ExtractFunction = "extract_function"
-	ExtractMethod   = "extract_method"
+	FillStruct        = "fill_struct"
+	StubMethods       = "stub_methods"
+	UndeclaredName    = "undeclared_name"
+	ExtractVariable   = "extract_variable"
+	ExtractFunction   = "extract_function"
+	ExtractMethod     = "extract_method"
+	InvertIfCondition = "invert_if_condition"
 )
 
 // suggestedFixes maps a suggested fix command id to its handler.
 var suggestedFixes = map[string]SuggestedFixFunc{
-	FillStruct:      singleFile(fillstruct.SuggestedFix),
-	UndeclaredName:  singleFile(undeclaredname.SuggestedFix),
-	ExtractVariable: singleFile(extractVariable),
-	ExtractFunction: singleFile(extractFunction),
-	ExtractMethod:   singleFile(extractMethod),
-	StubMethods:     stubSuggestedFixFunc,
+	FillStruct:        singleFile(fillstruct.SuggestedFix),
+	UndeclaredName:    singleFile(undeclaredname.SuggestedFix),
+	ExtractVariable:   singleFile(extractVariable),
+	ExtractFunction:   singleFile(extractFunction),
+	ExtractMethod:     singleFile(extractMethod),
+	InvertIfCondition: singleFile(invertIfCondition),
+	StubMethods:       stubSuggestedFixFunc,
 }
 
 // singleFile calls analyzers that expect inputs for a single file
diff --git a/gopls/internal/lsp/source/folding_range.go b/gopls/internal/lsp/source/folding_range.go
index 56bcc87..9f63c77 100644
--- a/gopls/internal/lsp/source/folding_range.go
+++ b/gopls/internal/lsp/source/folding_range.go
@@ -11,9 +11,9 @@
 	"sort"
 	"strings"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
-	"golang.org/x/tools/internal/bug"
 )
 
 // FoldingRangeInfo holds range and kind info of folding for an ast.Node
diff --git a/gopls/internal/lsp/source/format.go b/gopls/internal/lsp/source/format.go
index ac73c76..dfc4f76 100644
--- a/gopls/internal/lsp/source/format.go
+++ b/gopls/internal/lsp/source/format.go
@@ -116,8 +116,8 @@
 	if err != nil {
 		return nil, nil, err
 	}
-	if err := snapshot.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
-		allFixEdits, editsPerFix, err = computeImportEdits(snapshot, pgf, opts)
+	if err := snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error {
+		allFixEdits, editsPerFix, err = computeImportEdits(ctx, snapshot, pgf, opts)
 		return err
 	}); err != nil {
 		return nil, nil, fmt.Errorf("AllImportsFixes: %v", err)
@@ -127,11 +127,11 @@
 
 // computeImportEdits computes a set of edits that perform one or all of the
 // necessary import fixes.
-func computeImportEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) {
+func computeImportEdits(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) {
 	filename := pgf.URI.Filename()
 
 	// Build up basic information about the original file.
-	allFixes, err := imports.FixImports(filename, pgf.Src, options)
+	allFixes, err := imports.FixImports(ctx, filename, pgf.Src, options)
 	if err != nil {
 		return nil, nil, err
 	}
diff --git a/gopls/internal/lsp/source/hover.go b/gopls/internal/lsp/source/hover.go
index fe0bc9a..6fc4d79 100644
--- a/gopls/internal/lsp/source/hover.go
+++ b/gopls/internal/lsp/source/hover.go
@@ -22,10 +22,10 @@
 	"golang.org/x/text/unicode/runenames"
 	"golang.org/x/tools/go/ast/astutil"
 	"golang.org/x/tools/go/types/typeutil"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/tokeninternal"
 	"golang.org/x/tools/internal/typeparams"
@@ -121,6 +121,22 @@
 		}
 	}
 
+	// Handle linkname directive by overriding what to look for.
+	var linkedRange *protocol.Range // range referenced by linkname directive, or nil
+	if pkgPath, name, offset := parseLinkname(ctx, snapshot, fh, pp); pkgPath != "" && name != "" {
+		// rng covering 2nd linkname argument: pkgPath.name.
+		rng, err := pgf.PosRange(pgf.Tok.Pos(offset), pgf.Tok.Pos(offset+len(pkgPath)+len(".")+len(name)))
+		if err != nil {
+			return protocol.Range{}, nil, fmt.Errorf("range over linkname arg: %w", err)
+		}
+		linkedRange = &rng
+
+		pkg, pgf, pos, err = findLinkname(ctx, snapshot, PackagePath(pkgPath), name)
+		if err != nil {
+			return protocol.Range{}, nil, fmt.Errorf("find linkname: %w", err)
+		}
+	}
+
 	// The general case: compute hover information for the object referenced by
 	// the identifier at pos.
 	ident, obj, selectedType := referencedObject(pkg, pgf, pos)
@@ -128,9 +144,15 @@
 		return protocol.Range{}, nil, nil // no object to hover
 	}
 
-	rng, err := pgf.NodeRange(ident)
-	if err != nil {
-		return protocol.Range{}, nil, err
+	// Unless otherwise specified, rng covers the ident being hovered.
+	var rng protocol.Range
+	if linkedRange != nil {
+		rng = *linkedRange
+	} else {
+		rng, err = pgf.NodeRange(ident)
+		if err != nil {
+			return protocol.Range{}, nil, err
+		}
 	}
 
 	// By convention, we qualify hover information relative to the package
@@ -342,8 +364,6 @@
 		return nil, err
 	}
 
-	// TODO(rfindley): add a test for jump to definition of error.Error (which is
-	// probably failing, considering it lacks special handling).
 	if obj.Name() == "Error" {
 		signature := obj.String()
 		return &HoverJSON{
diff --git a/gopls/internal/lsp/source/implementation.go b/gopls/internal/lsp/source/implementation.go
index bcb8b94..25beccf 100644
--- a/gopls/internal/lsp/source/implementation.go
+++ b/gopls/internal/lsp/source/implementation.go
@@ -30,7 +30,6 @@
 //
 // TODO(adonovan):
 // - Audit to ensure robustness in face of type errors.
-// - Support 'error' and 'error.Error', which were also lacking from the old implementation.
 // - Eliminate false positives due to 'tricky' cases of the global algorithm.
 // - Ensure we have test coverage of:
 //      type aliases
@@ -152,8 +151,9 @@
 		return nil, nil
 	}
 
-	// The global search needs to look at every package in the workspace;
-	// see package ./methodsets.
+	// The global search needs to look at every package in the
+	// forward transitive closure of the workspace; see package
+	// ./methodsets.
 	//
 	// For now we do all the type checking before beginning the search.
 	// TODO(adonovan): opt: search in parallel topological order
@@ -387,9 +387,38 @@
 		locs = append(locs, loc)
 	}
 
+	// Special case: for types that satisfy error, report builtin.go (see #59527).
+	if types.Implements(queryType, errorInterfaceType) {
+		loc, err := errorLocation(ctx, snapshot)
+		if err != nil {
+			return nil, err
+		}
+		locs = append(locs, loc)
+	}
+
 	return locs, nil
 }
 
+var errorInterfaceType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
+
+// errorLocation returns the location of the 'error' type in builtin.go.
+func errorLocation(ctx context.Context, snapshot Snapshot) (protocol.Location, error) {
+	pgf, err := snapshot.BuiltinFile(ctx)
+	if err != nil {
+		return protocol.Location{}, err
+	}
+	for _, decl := range pgf.File.Decls {
+		if decl, ok := decl.(*ast.GenDecl); ok {
+			for _, spec := range decl.Specs {
+				if spec, ok := spec.(*ast.TypeSpec); ok && spec.Name.Name == "error" {
+					return pgf.NodeLocation(spec.Name)
+				}
+			}
+		}
+	}
+	return protocol.Location{}, fmt.Errorf("built-in error type not found")
+}
+
 // concreteImplementsIntf returns true if a is an interface type implemented by
 // concrete type b, or vice versa.
 func concreteImplementsIntf(a, b types.Type) bool {
diff --git a/gopls/internal/lsp/source/invertifcondition.go b/gopls/internal/lsp/source/invertifcondition.go
new file mode 100644
index 0000000..2b11485
--- /dev/null
+++ b/gopls/internal/lsp/source/invertifcondition.go
@@ -0,0 +1,268 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"go/types"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/gopls/internal/lsp/safetoken"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+// invertIfCondition is a singleFileFixFunc that inverts an if/else statement
+func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, _ *types.Info) (*analysis.SuggestedFix, error) {
+	ifStatement, _, err := CanInvertIfCondition(file, start, end)
+	if err != nil {
+		return nil, err
+	}
+
+	var replaceElse analysis.TextEdit
+
+	endsWithReturn, err := endsWithReturn(ifStatement.Else)
+	if err != nil {
+		return nil, err
+	}
+
+	if endsWithReturn {
+		// Replace the whole else part with an empty line and an unindented
+		// version of the original if body
+		sourcePos := safetoken.StartPosition(fset, ifStatement.Pos())
+
+		indent := sourcePos.Column - 1
+		if indent < 0 {
+			indent = 0
+		}
+
+		standaloneBodyText := ifBodyToStandaloneCode(fset, ifStatement.Body, src)
+		replaceElse = analysis.TextEdit{
+			Pos:     ifStatement.Body.Rbrace + 1, // 1 == len("}")
+			End:     ifStatement.End(),
+			NewText: []byte("\n\n" + strings.Repeat("\t", indent) + standaloneBodyText),
+		}
+	} else {
+		// Replace the else body text with the if body text
+		bodyStart := safetoken.StartPosition(fset, ifStatement.Body.Lbrace)
+		bodyEnd := safetoken.EndPosition(fset, ifStatement.Body.Rbrace+1) // 1 == len("}")
+		bodyText := src[bodyStart.Offset:bodyEnd.Offset]
+		replaceElse = analysis.TextEdit{
+			Pos:     ifStatement.Else.Pos(),
+			End:     ifStatement.Else.End(),
+			NewText: bodyText,
+		}
+	}
+
+	// Replace the if text with the else text
+	elsePosInSource := safetoken.StartPosition(fset, ifStatement.Else.Pos())
+	elseEndInSource := safetoken.EndPosition(fset, ifStatement.Else.End())
+	elseText := src[elsePosInSource.Offset:elseEndInSource.Offset]
+	replaceBodyWithElse := analysis.TextEdit{
+		Pos:     ifStatement.Body.Pos(),
+		End:     ifStatement.Body.End(),
+		NewText: elseText,
+	}
+
+	// Replace the if condition with its inverse
+	inverseCondition, err := invertCondition(fset, ifStatement.Cond, src)
+	if err != nil {
+		return nil, err
+	}
+	replaceConditionWithInverse := analysis.TextEdit{
+		Pos:     ifStatement.Cond.Pos(),
+		End:     ifStatement.Cond.End(),
+		NewText: inverseCondition,
+	}
+
+	// Return a SuggestedFix with just that TextEdit in there
+	return &analysis.SuggestedFix{
+		TextEdits: []analysis.TextEdit{
+			replaceConditionWithInverse,
+			replaceBodyWithElse,
+			replaceElse,
+		},
+	}, nil
+}
+
+func endsWithReturn(elseBranch ast.Stmt) (bool, error) {
+	elseBlock, isBlockStatement := elseBranch.(*ast.BlockStmt)
+	if !isBlockStatement {
+		return false, fmt.Errorf("Unable to figure out whether this ends with return: %T", elseBranch)
+	}
+
+	if len(elseBlock.List) == 0 {
+		// Empty blocks don't end in returns
+		return false, nil
+	}
+
+	lastStatement := elseBlock.List[len(elseBlock.List)-1]
+
+	_, lastStatementIsReturn := lastStatement.(*ast.ReturnStmt)
+	return lastStatementIsReturn, nil
+}
+
+// Turn { fmt.Println("Hello") } into just fmt.Println("Hello"), with one less
+// level of indentation.
+//
+// The first line of the result will not be indented, but all of the following
+// lines will.
+func ifBodyToStandaloneCode(fset *token.FileSet, ifBody *ast.BlockStmt, src []byte) string {
+	// Get the whole body (without the surrounding braces) as a string
+	bodyStart := safetoken.StartPosition(fset, ifBody.Lbrace+1) // 1 == len("}")
+	bodyEnd := safetoken.EndPosition(fset, ifBody.Rbrace)
+	bodyWithoutBraces := string(src[bodyStart.Offset:bodyEnd.Offset])
+	bodyWithoutBraces = strings.TrimSpace(bodyWithoutBraces)
+
+	// Unindent
+	bodyWithoutBraces = strings.ReplaceAll(bodyWithoutBraces, "\n\t", "\n")
+
+	return bodyWithoutBraces
+}
+
+func invertCondition(fset *token.FileSet, cond ast.Expr, src []byte) ([]byte, error) {
+	condStart := safetoken.StartPosition(fset, cond.Pos())
+	condEnd := safetoken.EndPosition(fset, cond.End())
+	oldText := string(src[condStart.Offset:condEnd.Offset])
+
+	switch expr := cond.(type) {
+	case *ast.Ident, *ast.ParenExpr, *ast.CallExpr, *ast.StarExpr, *ast.IndexExpr, *typeparams.IndexListExpr, *ast.SelectorExpr:
+		newText := "!" + oldText
+		if oldText == "true" {
+			newText = "false"
+		} else if oldText == "false" {
+			newText = "true"
+		}
+
+		return []byte(newText), nil
+
+	case *ast.UnaryExpr:
+		if expr.Op != token.NOT {
+			// This should never happen
+			return dumbInvert(fset, cond, src), nil
+		}
+
+		inverse := expr.X
+		if p, isParen := inverse.(*ast.ParenExpr); isParen {
+			// We got !(x), remove the parentheses with the ! so we get just "x"
+			inverse = p.X
+
+			start := safetoken.StartPosition(fset, inverse.Pos())
+			end := safetoken.EndPosition(fset, inverse.End())
+			if start.Line != end.Line {
+				// The expression is multi-line, so we can't remove the parentheses
+				inverse = expr.X
+			}
+		}
+
+		start := safetoken.StartPosition(fset, inverse.Pos())
+		end := safetoken.EndPosition(fset, inverse.End())
+		textWithoutNot := src[start.Offset:end.Offset]
+
+		return textWithoutNot, nil
+
+	case *ast.BinaryExpr:
+		// These inversions are unsound for floating point NaN, but that's ok.
+		negations := map[token.Token]string{
+			token.EQL: "!=",
+			token.LSS: ">=",
+			token.GTR: "<=",
+			token.NEQ: "==",
+			token.LEQ: ">",
+			token.GEQ: "<",
+		}
+
+		negation, negationFound := negations[expr.Op]
+		if !negationFound {
+			return invertAndOr(fset, expr, src)
+		}
+
+		xPosInSource := safetoken.StartPosition(fset, expr.X.Pos())
+		opPosInSource := safetoken.StartPosition(fset, expr.OpPos)
+		yPosInSource := safetoken.StartPosition(fset, expr.Y.Pos())
+
+		textBeforeOp := string(src[xPosInSource.Offset:opPosInSource.Offset])
+
+		oldOpWithTrailingWhitespace := string(src[opPosInSource.Offset:yPosInSource.Offset])
+		newOpWithTrailingWhitespace := negation + oldOpWithTrailingWhitespace[len(expr.Op.String()):]
+
+		textAfterOp := string(src[yPosInSource.Offset:condEnd.Offset])
+
+		return []byte(textBeforeOp + newOpWithTrailingWhitespace + textAfterOp), nil
+	}
+
+	return dumbInvert(fset, cond, src), nil
+}
+
+// dumbInvert is a fallback, inverting cond into !(cond).
+func dumbInvert(fset *token.FileSet, expr ast.Expr, src []byte) []byte {
+	start := safetoken.StartPosition(fset, expr.Pos())
+	end := safetoken.EndPosition(fset, expr.End())
+	text := string(src[start.Offset:end.Offset])
+	return []byte("!(" + text + ")")
+}
+
+func invertAndOr(fset *token.FileSet, expr *ast.BinaryExpr, src []byte) ([]byte, error) {
+	if expr.Op != token.LAND && expr.Op != token.LOR {
+		// Neither AND nor OR, don't know how to invert this
+		return dumbInvert(fset, expr, src), nil
+	}
+
+	oppositeOp := "&&"
+	if expr.Op == token.LAND {
+		oppositeOp = "||"
+	}
+
+	xEndInSource := safetoken.EndPosition(fset, expr.X.End())
+	opPosInSource := safetoken.StartPosition(fset, expr.OpPos)
+	whitespaceAfterBefore := src[xEndInSource.Offset:opPosInSource.Offset]
+
+	invertedBefore, err := invertCondition(fset, expr.X, src)
+	if err != nil {
+		return nil, err
+	}
+
+	invertedAfter, err := invertCondition(fset, expr.Y, src)
+	if err != nil {
+		return nil, err
+	}
+
+	yPosInSource := safetoken.StartPosition(fset, expr.Y.Pos())
+
+	oldOpWithTrailingWhitespace := string(src[opPosInSource.Offset:yPosInSource.Offset])
+	newOpWithTrailingWhitespace := oppositeOp + oldOpWithTrailingWhitespace[len(expr.Op.String()):]
+
+	return []byte(string(invertedBefore) + string(whitespaceAfterBefore) + newOpWithTrailingWhitespace + string(invertedAfter)), nil
+}
+
+// CanInvertIfCondition reports whether we can do invert-if-condition on the
+// code in the given range
+func CanInvertIfCondition(file *ast.File, start, end token.Pos) (*ast.IfStmt, bool, error) {
+	path, _ := astutil.PathEnclosingInterval(file, start, end)
+	for _, node := range path {
+		stmt, isIfStatement := node.(*ast.IfStmt)
+		if !isIfStatement {
+			continue
+		}
+
+		if stmt.Else == nil {
+			// Can't invert conditions without else clauses
+			return nil, false, fmt.Errorf("else clause required")
+		}
+
+		if _, hasElseIf := stmt.Else.(*ast.IfStmt); hasElseIf {
+			// Can't invert conditions with else-if clauses, unclear what that
+			// would look like
+			return nil, false, fmt.Errorf("else-if not supported")
+		}
+
+		return stmt, true, nil
+	}
+
+	return nil, false, fmt.Errorf("not an if statement")
+}
diff --git a/gopls/internal/lsp/source/known_packages.go b/gopls/internal/lsp/source/known_packages.go
index 4414852..1113403 100644
--- a/gopls/internal/lsp/source/known_packages.go
+++ b/gopls/internal/lsp/source/known_packages.go
@@ -51,7 +51,7 @@
 		}
 	}
 
-	// Now find candidates among known packages.
+	// Now find candidates among all known packages.
 	knownPkgs, err := snapshot.AllMetadata(ctx)
 	if err != nil {
 		return nil, err
@@ -85,7 +85,7 @@
 	}
 
 	// Augment the set by invoking the goimports algorithm.
-	if err := snapshot.RunProcessEnvFunc(ctx, func(o *imports.Options) error {
+	if err := snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, o *imports.Options) error {
 		ctx, cancel := context.WithTimeout(ctx, time.Millisecond*80)
 		defer cancel()
 		var seenMu sync.Mutex
diff --git a/gopls/internal/lsp/source/linkname.go b/gopls/internal/lsp/source/linkname.go
index 04efa61..84890a6 100644
--- a/gopls/internal/lsp/source/linkname.go
+++ b/gopls/internal/lsp/source/linkname.go
@@ -23,30 +23,43 @@
 
 // LinknameDefinition finds the definition of the linkname directive in fh at pos.
 // If there is no linkname directive at pos, returns ErrNoLinkname.
-func LinknameDefinition(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) ([]protocol.Location, error) {
-	pkgPath, name := parseLinkname(ctx, snapshot, fh, pos)
+func LinknameDefinition(ctx context.Context, snapshot Snapshot, fh FileHandle, from protocol.Position) ([]protocol.Location, error) {
+	pkgPath, name, _ := parseLinkname(ctx, snapshot, fh, from)
 	if pkgPath == "" {
 		return nil, ErrNoLinkname
 	}
-	return findLinkname(ctx, snapshot, fh, pos, PackagePath(pkgPath), name)
+
+	_, pgf, pos, err := findLinkname(ctx, snapshot, PackagePath(pkgPath), name)
+	if err != nil {
+		return nil, fmt.Errorf("find linkname: %w", err)
+	}
+	loc, err := pgf.PosLocation(pos, pos+token.Pos(len(name)))
+	if err != nil {
+		return nil, fmt.Errorf("location of linkname: %w", err)
+	}
+	return []protocol.Location{loc}, nil
 }
 
 // parseLinkname attempts to parse a go:linkname declaration at the given pos.
-// If successful, it returns the package path and object name referenced by the second
-// argument of the linkname directive.
+// If successful, it returns
+// - package path referenced
+// - object name referenced
+// - byte offset in fh of the start of the link target
+// of the linkname directives 2nd argument.
 //
-// If the position is not in the second argument of a go:linkname directive, or parsing fails, it returns "", "".
-func parseLinkname(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) (pkgPath, name string) {
+// If the position is not in the second argument of a go:linkname directive,
+// or parsing fails, it returns "", "", 0.
+func parseLinkname(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) (pkgPath, name string, targetOffset int) {
 	// TODO(adonovan): opt: parsing isn't necessary here.
 	// We're only looking for a line comment.
 	pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
 	if err != nil {
-		return "", ""
+		return "", "", 0
 	}
 
 	offset, err := pgf.Mapper.PositionOffset(pos)
 	if err != nil {
-		return "", ""
+		return "", "", 0
 	}
 
 	// Looking for pkgpath in '//go:linkname f pkgpath.g'.
@@ -54,23 +67,24 @@
 	directive, end := findLinknameAtOffset(pgf, offset)
 	parts := strings.Fields(directive)
 	if len(parts) != 3 {
-		return "", ""
+		return "", "", 0
 	}
 
 	// Inside 2nd arg [start, end]?
 	// (Assumes no trailing spaces.)
 	start := end - len(parts[2])
 	if !(start <= offset && offset <= end) {
-		return "", ""
+		return "", "", 0
 	}
 	linkname := parts[2]
 
 	// Split the pkg path from the name.
 	dot := strings.LastIndexByte(linkname, '.')
 	if dot < 0 {
-		return "", ""
+		return "", "", 0
 	}
-	return linkname[:dot], linkname[dot+1:]
+
+	return linkname[:dot], linkname[dot+1:], start
 }
 
 // findLinknameAtOffset returns the first linkname directive on line and its end offset.
@@ -80,9 +94,17 @@
 		for _, com := range grp.List {
 			if strings.HasPrefix(com.Text, "//go:linkname") {
 				p := safetoken.Position(pgf.Tok, com.Pos())
-				end := p.Offset + len(com.Text)
+
+				// Sometimes source code (typically tests) has another
+				// comment after the directive, trim that away.
+				text := com.Text
+				if i := strings.LastIndex(text, "//"); i != 0 {
+					text = strings.TrimSpace(text[:i])
+				}
+
+				end := p.Offset + len(text)
 				if p.Offset <= offset && offset < end {
-					return com.Text, end
+					return text, end
 				}
 			}
 		}
@@ -92,14 +114,14 @@
 
 // findLinkname searches dependencies of packages containing fh for an object
 // with linker name matching the given package path and name.
-func findLinkname(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position, pkgPath PackagePath, name string) ([]protocol.Location, error) {
+func findLinkname(ctx context.Context, snapshot Snapshot, pkgPath PackagePath, name string) (Package, *ParsedGoFile, token.Pos, error) {
 	// Typically the linkname refers to a forward dependency
 	// or a reverse dependency, but in general it may refer
-	// to any package in the workspace.
+	// to any package that is linked with this one.
 	var pkgMeta *Metadata
 	metas, err := snapshot.AllMetadata(ctx)
 	if err != nil {
-		return nil, err
+		return nil, nil, token.NoPos, err
 	}
 	RemoveIntermediateTestVariants(&metas)
 	for _, meta := range metas {
@@ -109,29 +131,26 @@
 		}
 	}
 	if pkgMeta == nil {
-		return nil, fmt.Errorf("cannot find package %q", pkgPath)
+		return nil, nil, token.NoPos, fmt.Errorf("cannot find package %q", pkgPath)
 	}
 
 	// When found, type check the desired package (snapshot.TypeCheck in TypecheckFull mode),
 	pkgs, err := snapshot.TypeCheck(ctx, pkgMeta.ID)
 	if err != nil {
-		return nil, err
+		return nil, nil, token.NoPos, err
 	}
 	pkg := pkgs[0]
 
 	obj := pkg.GetTypes().Scope().Lookup(name)
 	if obj == nil {
-		return nil, fmt.Errorf("package %q does not define %s", pkgPath, name)
+		return nil, nil, token.NoPos, fmt.Errorf("package %q does not define %s", pkgPath, name)
 	}
 
 	objURI := safetoken.StartPosition(pkg.FileSet(), obj.Pos())
 	pgf, err := pkg.File(span.URIFromPath(objURI.Filename))
 	if err != nil {
-		return nil, err
+		return nil, nil, token.NoPos, err
 	}
-	loc, err := pgf.PosLocation(obj.Pos(), obj.Pos()+token.Pos(len(name)))
-	if err != nil {
-		return nil, err
-	}
-	return []protocol.Location{loc}, nil
+
+	return pkg, pgf, obj.Pos(), nil
 }
diff --git a/gopls/internal/lsp/source/options.go b/gopls/internal/lsp/source/options.go
index 3357495..2ca8895 100644
--- a/gopls/internal/lsp/source/options.go
+++ b/gopls/internal/lsp/source/options.go
@@ -143,6 +143,7 @@
 						ImportShortcut: BothShortcuts,
 						SymbolMatcher:  SymbolFastFuzzy,
 						SymbolStyle:    DynamicSymbols,
+						SymbolScope:    AllSymbolScope,
 					},
 					CompletionOptions: CompletionOptions{
 						Matcher:                        Fuzzy,
@@ -454,6 +455,13 @@
 	// }
 	// ```
 	SymbolStyle SymbolStyle `status:"advanced"`
+
+	// SymbolScope controls which packages are searched for workspace/symbol
+	// requests. The default value, "workspace", searches only workspace
+	// packages. The legacy behavior, "all", causes all loaded packages to be
+	// searched, including dependencies; this is more expensive and may return
+	// unwanted results.
+	SymbolScope SymbolScope
 }
 
 // UserOptions holds custom Gopls configuration (not part of the LSP) that is
@@ -617,6 +625,8 @@
 	CaseSensitive   Matcher = "CaseSensitive"
 )
 
+// A SymbolMatcher controls the matching of symbols for workspace/symbol
+// requests.
 type SymbolMatcher string
 
 const (
@@ -626,6 +636,7 @@
 	SymbolCaseSensitive   SymbolMatcher = "CaseSensitive"
 )
 
+// A SymbolStyle controls the formatting of symbols in workspace/symbol results.
 type SymbolStyle string
 
 const (
@@ -642,6 +653,17 @@
 	DynamicSymbols SymbolStyle = "Dynamic"
 )
 
+// A SymbolScope controls the search scope for workspace/symbol requests.
+type SymbolScope string
+
+const (
+	// WorkspaceSymbolScope matches symbols in workspace packages only.
+	WorkspaceSymbolScope SymbolScope = "workspace"
+	// AllSymbolScope matches symbols in any loaded package, including
+	// dependencies.
+	AllSymbolScope SymbolScope = "all"
+)
+
 type HoverKind string
 
 const (
@@ -969,6 +991,14 @@
 			o.SymbolStyle = SymbolStyle(s)
 		}
 
+	case "symbolScope":
+		if s, ok := result.asOneOf(
+			string(WorkspaceSymbolScope),
+			string(AllSymbolScope),
+		); ok {
+			o.SymbolScope = SymbolScope(s)
+		}
+
 	case "hoverKind":
 		if s, ok := result.asOneOf(
 			string(NoDocumentation),
@@ -1402,6 +1432,11 @@
 			Fix:        StubMethods,
 			Enabled:    true,
 		},
+		infertypeargs.Analyzer.Name: {
+			Analyzer:   infertypeargs.Analyzer,
+			Enabled:    true,
+			ActionKind: []protocol.CodeActionKind{protocol.RefactorRewrite},
+		},
 	}
 }
 
@@ -1445,7 +1480,6 @@
 		unusedparams.Analyzer.Name:     {Analyzer: unusedparams.Analyzer, Enabled: false},
 		unusedwrite.Analyzer.Name:      {Analyzer: unusedwrite.Analyzer, Enabled: false},
 		useany.Analyzer.Name:           {Analyzer: useany.Analyzer, Enabled: false},
-		infertypeargs.Analyzer.Name:    {Analyzer: infertypeargs.Analyzer, Enabled: true},
 		embeddirective.Analyzer.Name:   {Analyzer: embeddirective.Analyzer, Enabled: true},
 		timeformat.Analyzer.Name:       {Analyzer: timeformat.Analyzer, Enabled: true},
 
diff --git a/gopls/internal/lsp/source/references.go b/gopls/internal/lsp/source/references.go
index 75e3a61..939f01a 100644
--- a/gopls/internal/lsp/source/references.go
+++ b/gopls/internal/lsp/source/references.go
@@ -25,11 +25,11 @@
 
 	"golang.org/x/sync/errgroup"
 	"golang.org/x/tools/go/types/objectpath"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
 	"golang.org/x/tools/gopls/internal/lsp/source/methodsets"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 )
 
@@ -246,14 +246,14 @@
 
 	// nil, error, error.Error, iota, or other built-in?
 	if obj.Pkg() == nil {
-		// For some reason, existing tests require that iota has no references,
-		// nor an error. TODO(adonovan): do something more principled.
-		if obj.Name() == "iota" {
-			return nil, nil
-		}
-
 		return nil, fmt.Errorf("references to builtin %q are not supported", obj.Name())
 	}
+	if !obj.Pos().IsValid() {
+		if obj.Pkg().Path() != "unsafe" {
+			bug.Reportf("references: object %v has no position", obj)
+		}
+		return nil, fmt.Errorf("references to unsafe.%s are not supported", obj.Name())
+	}
 
 	// Find metadata of all packages containing the object's defining file.
 	// This may include the query pkg, and possibly other variants.
@@ -432,7 +432,7 @@
 				targets[obj] = true
 			}
 
-			return localReferences(pkg, targets, report)
+			return localReferences(pkg, targets, true, report)
 		})
 	}
 
@@ -453,11 +453,18 @@
 			for objpath := range globalTargets[pkg.Metadata().PkgPath] {
 				obj, err := objectpath.Object(pkg.GetTypes(), objpath)
 				if err != nil {
-					return err // can't happen?
+					// No such object, because it was
+					// declared only in the test variant.
+					continue
 				}
 				targets[obj] = true
 			}
-			return localReferences(pkg, targets, report)
+
+			// Don't include corresponding types or methods
+			// since expansions did that already, and we don't
+			// want (e.g.) concrete -> interface -> concrete.
+			const correspond = false
+			return localReferences(pkg, targets, correspond, report)
 		})
 	}
 
@@ -550,18 +557,22 @@
 	return group.Wait()
 }
 
-// localReferences traverses syntax and reports each reference to one of the target objects.
-func localReferences(pkg Package, targets map[types.Object]bool, report func(loc protocol.Location, isDecl bool)) error {
-	// If we're searching for references to a method, broaden the
-	// search to include references to corresponding methods of
-	// mutually assignable receiver types.
+// localReferences traverses syntax and reports each reference to one
+// of the target objects, or (if correspond is set) an object that
+// corresponds to one of them via interface satisfaction.
+func localReferences(pkg Package, targets map[types.Object]bool, correspond bool, report func(loc protocol.Location, isDecl bool)) error {
+	// If we're searching for references to a method optionally
+	// broaden the search to include references to corresponding
+	// methods of mutually assignable receiver types.
 	// (We use a slice, but objectsAt never returns >1 methods.)
 	var methodRecvs []types.Type
 	var methodName string // name of an arbitrary target, iff a method
-	for obj := range targets {
-		if t := effectiveReceiver(obj); t != nil {
-			methodRecvs = append(methodRecvs, t)
-			methodName = obj.Name()
+	if correspond {
+		for obj := range targets {
+			if t := effectiveReceiver(obj); t != nil {
+				methodRecvs = append(methodRecvs, t)
+				methodName = obj.Name()
+			}
 		}
 	}
 
diff --git a/gopls/internal/lsp/source/rename.go b/gopls/internal/lsp/source/rename.go
index 34a8278..60fb48d 100644
--- a/gopls/internal/lsp/source/rename.go
+++ b/gopls/internal/lsp/source/rename.go
@@ -59,10 +59,10 @@
 	"golang.org/x/tools/go/ast/astutil"
 	"golang.org/x/tools/go/types/objectpath"
 	"golang.org/x/tools/go/types/typeutil"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/diff"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/typeparams"
@@ -718,8 +718,8 @@
 // directory.
 //
 // It updates package clauses and import paths for the renamed package as well
-// as any other packages affected by the directory renaming among packages
-// described by allMetadata.
+// as any other packages affected by the directory renaming among all packages
+// known to the snapshot.
 func renamePackage(ctx context.Context, s Snapshot, f FileHandle, newName PackageName) (map[span.URI][]diff.Edit, error) {
 	if strings.HasSuffix(string(newName), "_test") {
 		return nil, fmt.Errorf("cannot rename to _test package")
diff --git a/gopls/internal/lsp/source/rename_check.go b/gopls/internal/lsp/source/rename_check.go
index a858bb7..7affb76 100644
--- a/gopls/internal/lsp/source/rename_check.go
+++ b/gopls/internal/lsp/source/rename_check.go
@@ -398,63 +398,63 @@
 // checkStructField checks that the field renaming will not cause
 // conflicts at its declaration, or ambiguity or changes to any selection.
 func (r *renamer) checkStructField(from *types.Var) {
-	// Check that the struct declaration is free of field conflicts,
-	// and field/method conflicts.
 
+	// If this is the declaring package, check that the struct
+	// declaration is free of field conflicts, and field/method
+	// conflicts.
+	//
 	// go/types offers no easy way to get from a field (or interface
 	// method) to its declaring struct (or interface), so we must
 	// ascend the AST.
-	pgf, ok := enclosingFile(r.pkg, from.Pos())
-	if !ok {
-		return // not declared by syntax of this package
-	}
-	path, _ := astutil.PathEnclosingInterval(pgf.File, from.Pos(), from.Pos())
-	// path matches this pattern:
-	// [Ident SelectorExpr? StarExpr? Field FieldList StructType ParenExpr* ... File]
+	if pgf, ok := enclosingFile(r.pkg, from.Pos()); ok {
+		path, _ := astutil.PathEnclosingInterval(pgf.File, from.Pos(), from.Pos())
+		// path matches this pattern:
+		// [Ident SelectorExpr? StarExpr? Field FieldList StructType ParenExpr* ... File]
 
-	// Ascend to FieldList.
-	var i int
-	for {
-		if _, ok := path[i].(*ast.FieldList); ok {
-			break
+		// Ascend to FieldList.
+		var i int
+		for {
+			if _, ok := path[i].(*ast.FieldList); ok {
+				break
+			}
+			i++
 		}
 		i++
-	}
-	i++
-	tStruct := path[i].(*ast.StructType)
-	i++
-	// Ascend past parens (unlikely).
-	for {
-		_, ok := path[i].(*ast.ParenExpr)
-		if !ok {
-			break
-		}
+		tStruct := path[i].(*ast.StructType)
 		i++
-	}
-	if spec, ok := path[i].(*ast.TypeSpec); ok {
-		// This struct is also a named type.
-		// We must check for direct (non-promoted) field/field
-		// and method/field conflicts.
-		named := r.pkg.GetTypesInfo().Defs[spec.Name].Type()
-		prev, indices, _ := types.LookupFieldOrMethod(named, true, r.pkg.GetTypes(), r.to)
-		if len(indices) == 1 {
-			r.errorf(from.Pos(), "renaming this field %q to %q",
-				from.Name(), r.to)
-			r.errorf(prev.Pos(), "\twould conflict with this %s",
-				objectKind(prev))
-			return // skip checkSelections to avoid redundant errors
+		// Ascend past parens (unlikely).
+		for {
+			_, ok := path[i].(*ast.ParenExpr)
+			if !ok {
+				break
+			}
+			i++
 		}
-	} else {
-		// This struct is not a named type.
-		// We need only check for direct (non-promoted) field/field conflicts.
-		T := r.pkg.GetTypesInfo().Types[tStruct].Type.Underlying().(*types.Struct)
-		for i := 0; i < T.NumFields(); i++ {
-			if prev := T.Field(i); prev.Name() == r.to {
+		if spec, ok := path[i].(*ast.TypeSpec); ok {
+			// This struct is also a named type.
+			// We must check for direct (non-promoted) field/field
+			// and method/field conflicts.
+			named := r.pkg.GetTypesInfo().Defs[spec.Name].Type()
+			prev, indices, _ := types.LookupFieldOrMethod(named, true, r.pkg.GetTypes(), r.to)
+			if len(indices) == 1 {
 				r.errorf(from.Pos(), "renaming this field %q to %q",
 					from.Name(), r.to)
-				r.errorf(prev.Pos(), "\twould conflict with this field")
+				r.errorf(prev.Pos(), "\twould conflict with this %s",
+					objectKind(prev))
 				return // skip checkSelections to avoid redundant errors
 			}
+		} else {
+			// This struct is not a named type.
+			// We need only check for direct (non-promoted) field/field conflicts.
+			T := r.pkg.GetTypesInfo().Types[tStruct].Type.Underlying().(*types.Struct)
+			for i := 0; i < T.NumFields(); i++ {
+				if prev := T.Field(i); prev.Name() == r.to {
+					r.errorf(from.Pos(), "renaming this field %q to %q",
+						from.Name(), r.to)
+					r.errorf(prev.Pos(), "\twould conflict with this field")
+					return // skip checkSelections to avoid redundant errors
+				}
+			}
 		}
 	}
 
diff --git a/gopls/internal/lsp/source/stub.go b/gopls/internal/lsp/source/stub.go
index 3b9c063..b7b2292 100644
--- a/gopls/internal/lsp/source/stub.go
+++ b/gopls/internal/lsp/source/stub.go
@@ -18,10 +18,10 @@
 
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/analysis/stubmethods"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/tokeninternal"
 	"golang.org/x/tools/internal/typeparams"
 )
diff --git a/gopls/internal/lsp/source/types_format.go b/gopls/internal/lsp/source/types_format.go
index 1c9f361..d6fdfe2 100644
--- a/gopls/internal/lsp/source/types_format.go
+++ b/gopls/internal/lsp/source/types_format.go
@@ -15,8 +15,8 @@
 	"go/types"
 	"strings"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/event/tag"
 	"golang.org/x/tools/internal/tokeninternal"
diff --git a/gopls/internal/lsp/source/util.go b/gopls/internal/lsp/source/util.go
index ec5940f..cbb1780 100644
--- a/gopls/internal/lsp/source/util.go
+++ b/gopls/internal/lsp/source/util.go
@@ -16,10 +16,10 @@
 	"strconv"
 	"strings"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
 	"golang.org/x/tools/gopls/internal/span"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/tokeninternal"
 	"golang.org/x/tools/internal/typeparams"
 )
diff --git a/gopls/internal/lsp/source/view.go b/gopls/internal/lsp/source/view.go
index de69852..6dd3811 100644
--- a/gopls/internal/lsp/source/view.go
+++ b/gopls/internal/lsp/source/view.go
@@ -63,11 +63,6 @@
 	// on behalf of this snapshot.
 	BackgroundContext() context.Context
 
-	// ValidBuildConfiguration returns true if there is some error in the
-	// user's workspace. In particular, if they are both outside of a module
-	// and their GOPATH.
-	ValidBuildConfiguration() bool
-
 	// A Snapshot is a caching implementation of FileSource whose
 	// ReadFile method returns consistent information about the existence
 	// and content of each file throughout its lifetime.
@@ -117,7 +112,7 @@
 
 	// RunProcessEnvFunc runs fn with the process env for this snapshot's view.
 	// Note: the process env contains cached module and filesystem state.
-	RunProcessEnvFunc(ctx context.Context, fn func(*imports.Options) error) error
+	RunProcessEnvFunc(ctx context.Context, fn func(context.Context, *imports.Options) error) error
 
 	// ModFiles are the go.mod files enclosed in the snapshot's view and known
 	// to the snapshot.
@@ -151,7 +146,7 @@
 	BuiltinFile(ctx context.Context) (*ParsedGoFile, error)
 
 	// IsBuiltin reports whether uri is part of the builtin package.
-	IsBuiltin(ctx context.Context, uri span.URI) bool
+	IsBuiltin(uri span.URI) bool
 
 	// CriticalError returns any critical errors in the workspace.
 	//
@@ -159,7 +154,10 @@
 	CriticalError(ctx context.Context) *CriticalError
 
 	// Symbols returns all symbols in the snapshot.
-	Symbols(ctx context.Context) (map[span.URI][]Symbol, error)
+	//
+	// If workspaceOnly is set, this only includes symbols from files in a
+	// workspace package. Otherwise, it returns symbols from all loaded packages.
+	Symbols(ctx context.Context, workspaceOnly bool) (map[span.URI][]Symbol, error)
 
 	// -- package metadata --
 
@@ -172,9 +170,23 @@
 	// WorkspaceMetadata returns a new, unordered slice containing
 	// metadata for all ordinary and test packages (but not
 	// intermediate test variants) in the workspace.
+	//
+	// The workspace is the set of modules typically defined by a
+	// go.work file. It is not transitively closed: for example,
+	// the standard library is not usually part of the workspace
+	// even though every module in the workspace depends on it.
+	//
+	// Operations that must inspect all the dependencies of the
+	// workspace packages should instead use AllMetadata.
 	WorkspaceMetadata(ctx context.Context) ([]*Metadata, error)
 
-	// AllMetadata returns a new unordered array of metadata for all packages in the workspace.
+	// AllMetadata returns a new unordered array of metadata for
+	// all packages known to this snapshot, which includes the
+	// packages of all workspace modules plus their transitive
+	// import dependencies.
+	//
+	// It may also contain ad-hoc packages for standalone files.
+	// It includes all test variants.
 	AllMetadata(ctx context.Context) ([]*Metadata, error)
 
 	// Metadata returns the metadata for the specified package,
@@ -190,6 +202,12 @@
 	// It returns an error if the context was cancelled.
 	MetadataForFile(ctx context.Context, uri span.URI) ([]*Metadata, error)
 
+	// OrphanedFileDiagnostics reports diagnostics for files that have no package
+	// associations or which only have only command-line-arguments packages.
+	//
+	// The caller must not mutate the result.
+	OrphanedFileDiagnostics(ctx context.Context) (map[span.URI]*Diagnostic, error)
+
 	// -- package type-checking --
 
 	// TypeCheck parses and type-checks the specified packages,
@@ -513,21 +531,29 @@
 
 // Metadata represents package metadata retrieved from go/packages.
 // The Deps* maps do not contain self-import edges.
+//
+// An ad-hoc package (without go.mod or GOPATH) has its ID, PkgPath,
+// and LoadDir equal to the absolute path of its directory.
 type Metadata struct {
-	ID              PackageID
-	PkgPath         PackagePath
-	Name            PackageName
+	ID      PackageID
+	PkgPath PackagePath
+	Name    PackageName
+
+	// these three fields are as defined by go/packages.Package
 	GoFiles         []span.URI
 	CompiledGoFiles []span.URI
-	ForTest         PackagePath // package path under test, or ""
-	TypesSizes      types.Sizes
-	Errors          []packages.Error          // must be set for packages in import cycles
-	DepsByImpPath   map[ImportPath]PackageID  // may contain dups; empty ID => missing
-	DepsByPkgPath   map[PackagePath]PackageID // values are unique and non-empty
-	Module          *packages.Module
-	DepsErrors      []*packagesinternal.PackageError
-	Diagnostics     []*Diagnostic // processed diagnostics from 'go list'
-	LoadDir         string        // directory from which go/packages was run
+	IgnoredFiles    []span.URI
+
+	ForTest       PackagePath // package path under test, or ""
+	TypesSizes    types.Sizes
+	Errors        []packages.Error          // must be set for packages in import cycles
+	DepsByImpPath map[ImportPath]PackageID  // may contain dups; empty ID => missing
+	DepsByPkgPath map[PackagePath]PackageID // values are unique and non-empty
+	Module        *packages.Module
+	DepsErrors    []*packagesinternal.PackageError
+	Diagnostics   []*Diagnostic // processed diagnostics from 'go list'
+	LoadDir       string        // directory from which go/packages was run
+	Standalone    bool          // package synthesized for a standalone file (e.g. ignore-tagged)
 }
 
 func (m *Metadata) String() string { return string(m.ID) }
@@ -539,7 +565,7 @@
 // import metadata (DepsBy{Imp,Pkg}Path).
 //
 // Such test variants arise when an x_test package (in this case net/url_test)
-// imports a package (in this case net/http) that itself imports the the
+// imports a package (in this case net/http) that itself imports the
 // non-x_test package (in this case net/url).
 //
 // This is done so that the forward transitive closure of net/url_test has
diff --git a/gopls/internal/lsp/source/workspace_symbol.go b/gopls/internal/lsp/source/workspace_symbol.go
index a0ffe3f..bf92c77 100644
--- a/gopls/internal/lsp/source/workspace_symbol.go
+++ b/gopls/internal/lsp/source/workspace_symbol.go
@@ -317,10 +317,16 @@
 		filters := v.Options().DirectoryFilters
 		filterer := NewFilterer(filters)
 		folder := filepath.ToSlash(v.Folder().Filename())
-		symbols, err := snapshot.Symbols(ctx)
+
+		workspaceOnly := true
+		if v.Options().SymbolScope == AllSymbolScope {
+			workspaceOnly = false
+		}
+		symbols, err := snapshot.Symbols(ctx, workspaceOnly)
 		if err != nil {
 			return nil, err
 		}
+
 		for uri, syms := range symbols {
 			norm := filepath.ToSlash(uri.Filename())
 			nm := strings.TrimPrefix(norm, folder)
@@ -478,7 +484,10 @@
 			// every field or method nesting level to access the field decreases
 			// the score by a factor of 1.0 - depth*depthFactor, up to a depth of
 			// 3.
-			depthFactor = 0.2
+			//
+			// Use a small constant here, as this exists mostly to break ties
+			// (e.g. given a type Foo and a field x.Foo, prefer Foo).
+			depthFactor = 0.01
 		)
 
 		startWord := true
@@ -496,6 +505,8 @@
 			}
 		}
 
+		// TODO(rfindley): use metadata to determine if the file is in a workspace
+		// package, rather than this heuristic.
 		inWorkspace := false
 		for _, root := range roots {
 			if strings.HasPrefix(string(i.uri), root) {
diff --git a/gopls/internal/lsp/testdata/analyzer/bad_test.go b/gopls/internal/lsp/testdata/analyzer/bad_test.go
deleted file mode 100644
index b1724c6..0000000
--- a/gopls/internal/lsp/testdata/analyzer/bad_test.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package analyzer
-
-import (
-	"fmt"
-	"sync"
-	"testing"
-	"time"
-)
-
-func Testbad(t *testing.T) { //@diag("", "tests", "Testbad has malformed name: first letter after 'Test' must not be lowercase", "warning")
-	var x sync.Mutex
-	_ = x //@diag("x", "copylocks", "assignment copies lock value to _: sync.Mutex", "warning")
-
-	printfWrapper("%s") //@diag(re`printfWrapper\(.*\)`, "printf", "golang.org/lsptests/analyzer.printfWrapper format %s reads arg #1, but call has 0 args", "warning")
-}
-
-func printfWrapper(format string, args ...interface{}) {
-	fmt.Printf(format, args...)
-}
-
-func _() {
-	now := time.Now()
-	fmt.Println(now.Format("2006-02-01")) //@diag("2006-02-01", "timeformat", "2006-02-01 should be 2006-01-02", "warning")
-}
diff --git a/gopls/internal/lsp/testdata/bad/bad0_go121.go b/gopls/internal/lsp/testdata/bad/bad0_go121.go
index 01bea17..c4f4ecc 100644
--- a/gopls/internal/lsp/testdata/bad/bad0_go121.go
+++ b/gopls/internal/lsp/testdata/bad/bad0_go121.go
@@ -5,7 +5,7 @@
 
 // TODO(matloob): uncomment this and remove the space between the // and the @diag
 // once the changes that produce the new go list error are submitted.
-// import _ "golang.org/lsptests/assign/internal/secret" // @diag("\"golang.org/lsptests/assign/internal/secret\"", "compiler", "could not import golang.org/lsptests/assign/internal/secret \\(invalid use of internal package \"golang.org/lsptests/assign/internal/secret\"\\)", "error"),diag("_", "go list", "use of internal package golang.org/lsptests/assign/internal/secret not allowed", "error")
+import _ "golang.org/lsptests/assign/internal/secret" //@diag("\"golang.org/lsptests/assign/internal/secret\"", "compiler", "could not import golang.org/lsptests/assign/internal/secret \\(invalid use of internal package \"golang.org/lsptests/assign/internal/secret\"\\)", "error"),diag("_", "go list", "use of internal package golang.org/lsptests/assign/internal/secret not allowed", "error")
 
 func stuff() { //@item(stuff, "stuff", "func()", "func")
 	x := "heeeeyyyy"
diff --git a/gopls/internal/lsp/testdata/badstmt/badstmt.go.in b/gopls/internal/lsp/testdata/badstmt/badstmt.go.in
index 81aee20..3b8f9e0 100644
--- a/gopls/internal/lsp/testdata/badstmt/badstmt.go.in
+++ b/gopls/internal/lsp/testdata/badstmt/badstmt.go.in
@@ -4,13 +4,12 @@
 	"golang.org/lsptests/foo"
 )
 
-// The nonewvars expectation asserts that the go/analysis framework ran.
-// See comments in noparse.
+// (The syntax error causes suppression of diagnostics for type errors.
+// See issue #59888.)
 
 func _(x int) {
 	defer foo.F //@complete(" //", Foo),diag(" //", "syntax", "function must be invoked in defer statement|expression in defer must be function call", "error")
 	defer foo.F //@complete(" //", Foo)
-	x := 123 //@diag(":=", "nonewvars", "no new variables", "warning")
 }
 
 func _() {
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_args_returns.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_args_returns.go
deleted file mode 100644
index 63d24df..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_args_returns.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package extract
-
-func _() {
-	a := 1
-	a = 5     //@mark(exSt0, "a")
-	a = a + 2 //@mark(exEn0, "2")
-	//@extractfunc(exSt0, exEn0)
-	b := a * 2 //@mark(exB, "	b")
-	_ = 3 + 4  //@mark(exEnd, "4")
-	//@extractfunc(exB, exEnd)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_args_returns.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_args_returns.go.golden
deleted file mode 100644
index b15345e..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_args_returns.go.golden
+++ /dev/null
@@ -1,37 +0,0 @@
--- functionextraction_extract_args_returns_5_2 --
-package extract
-
-func _() {
-	a := 1
-	//@mark(exSt0, "a")
-	a = newFunction(a) //@mark(exEn0, "2")
-	//@extractfunc(exSt0, exEn0)
-	b := a * 2 //@mark(exB, "	b")
-	_ = 3 + 4  //@mark(exEnd, "4")
-	//@extractfunc(exB, exEnd)
-}
-
-func newFunction(a int) int {
-	a = 5
-	a = a + 2
-	return a
-}
-
--- functionextraction_extract_args_returns_8_1 --
-package extract
-
-func _() {
-	a := 1
-	a = 5     //@mark(exSt0, "a")
-	a = a + 2 //@mark(exEn0, "2")
-	//@extractfunc(exSt0, exEn0)
-	//@mark(exB, "	b")
-	newFunction(a)  //@mark(exEnd, "4")
-	//@extractfunc(exB, exEnd)
-}
-
-func newFunction(a int) {
-	b := a * 2
-	_ = 3 + 4
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_basic.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_basic.go
deleted file mode 100644
index 5e44de2..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_basic.go
+++ /dev/null
@@ -1,8 +0,0 @@
-package extract
-
-func _() { //@mark(exSt25, "{")
-	a := 1    //@mark(exSt1, "a")
-	_ = 3 + 4 //@mark(exEn1, "4")
-	//@extractfunc(exSt1, exEn1)
-	//@extractfunc(exSt25, exEn25)
-} //@mark(exEn25, "}")
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_basic.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_basic.go.golden
deleted file mode 100644
index 18adc4d..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_basic.go.golden
+++ /dev/null
@@ -1,30 +0,0 @@
--- functionextraction_extract_basic_3_10 --
-package extract
-
-func _() { //@mark(exSt25, "{")
-	//@mark(exSt1, "a")
-	newFunction() //@mark(exEn1, "4")
-	//@extractfunc(exSt1, exEn1)
-	//@extractfunc(exSt25, exEn25)
-}
-
-func newFunction() {
-	a := 1
-	_ = 3 + 4
-} //@mark(exEn25, "}")
-
--- functionextraction_extract_basic_4_2 --
-package extract
-
-func _() { //@mark(exSt25, "{")
-	//@mark(exSt1, "a")
-	newFunction() //@mark(exEn1, "4")
-	//@extractfunc(exSt1, exEn1)
-	//@extractfunc(exSt25, exEn25)
-}
-
-func newFunction() {
-	a := 1
-	_ = 3 + 4
-} //@mark(exEn25, "}")
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_basic_comment.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_basic_comment.go
deleted file mode 100644
index 71f969e..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_basic_comment.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package extract
-
-func _() {
-	a := /* comment in the middle of a line */ 1 //@mark(exSt18, "a")
-	// Comment on its own line  //@mark(exSt19, "Comment")
-	_ = 3 + 4 //@mark(exEn18, "4"),mark(exEn19, "4"),mark(exSt20, "_")
-	// Comment right after 3 + 4
-
-	// Comment after with space //@mark(exEn20, "Comment")
-
-	//@extractfunc(exSt18, exEn18),extractfunc(exSt19, exEn19),extractfunc(exSt20, exEn20)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_basic_comment.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_basic_comment.go.golden
deleted file mode 100644
index 1b2869e..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_basic_comment.go.golden
+++ /dev/null
@@ -1,57 +0,0 @@
--- functionextraction_extract_basic_comment_4_2 --
-package extract
-
-func _() {
-	/* comment in the middle of a line */
-	//@mark(exSt18, "a")
-	// Comment on its own line  //@mark(exSt19, "Comment")
-	newFunction() //@mark(exEn18, "4"),mark(exEn19, "4"),mark(exSt20, "_")
-	// Comment right after 3 + 4
-
-	// Comment after with space //@mark(exEn20, "Comment")
-
-	//@extractfunc(exSt18, exEn18),extractfunc(exSt19, exEn19),extractfunc(exSt20, exEn20)
-}
-
-func newFunction() {
-	a := 1
-
-	_ = 3 + 4
-}
-
--- functionextraction_extract_basic_comment_5_5 --
-package extract
-
-func _() {
-	a := /* comment in the middle of a line */ 1 //@mark(exSt18, "a")
-	// Comment on its own line  //@mark(exSt19, "Comment")
-	newFunction() //@mark(exEn18, "4"),mark(exEn19, "4"),mark(exSt20, "_")
-	// Comment right after 3 + 4
-
-	// Comment after with space //@mark(exEn20, "Comment")
-
-	//@extractfunc(exSt18, exEn18),extractfunc(exSt19, exEn19),extractfunc(exSt20, exEn20)
-}
-
-func newFunction() {
-	_ = 3 + 4
-}
-
--- functionextraction_extract_basic_comment_6_2 --
-package extract
-
-func _() {
-	a := /* comment in the middle of a line */ 1 //@mark(exSt18, "a")
-	// Comment on its own line  //@mark(exSt19, "Comment")
-	newFunction() //@mark(exEn18, "4"),mark(exEn19, "4"),mark(exSt20, "_")
-	// Comment right after 3 + 4
-
-	// Comment after with space //@mark(exEn20, "Comment")
-
-	//@extractfunc(exSt18, exEn18),extractfunc(exSt19, exEn19),extractfunc(exSt20, exEn20)
-}
-
-func newFunction() {
-	_ = 3 + 4
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_issue_44813.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_issue_44813.go
deleted file mode 100644
index 9713b91..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_issue_44813.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package extract
-
-import "fmt"
-
-func main() {
-	x := []rune{} //@mark(exSt9, "x")
-	s := "HELLO"
-	for _, c := range s {
-		x = append(x, c)
-	} //@mark(exEn9, "}")
-	//@extractfunc(exSt9, exEn9)
-	fmt.Printf("%x\n", x)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_issue_44813.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_issue_44813.go.golden
deleted file mode 100644
index 3198c9f..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_issue_44813.go.golden
+++ /dev/null
@@ -1,21 +0,0 @@
--- functionextraction_extract_issue_44813_6_2 --
-package extract
-
-import "fmt"
-
-func main() {
-	//@mark(exSt9, "x")
-	x := newFunction() //@mark(exEn9, "}")
-	//@extractfunc(exSt9, exEn9)
-	fmt.Printf("%x\n", x)
-}
-
-func newFunction() []rune {
-	x := []rune{}
-	s := "HELLO"
-	for _, c := range s {
-		x = append(x, c)
-	}
-	return x
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_redefine.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_redefine.go
deleted file mode 100644
index 604f475..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_redefine.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package extract
-
-import "strconv"
-
-func _() {
-	i, err := strconv.Atoi("1")
-	u, err := strconv.Atoi("2") //@extractfunc("u", ")")
-	if i == u || err == nil {
-		return
-	}
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_redefine.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_redefine.go.golden
deleted file mode 100644
index e2ee217..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_redefine.go.golden
+++ /dev/null
@@ -1,18 +0,0 @@
--- functionextraction_extract_redefine_7_2 --
-package extract
-
-import "strconv"
-
-func _() {
-	i, err := strconv.Atoi("1")
-	u, err := newFunction() //@extractfunc("u", ")")
-	if i == u || err == nil {
-		return
-	}
-}
-
-func newFunction() (int, error) {
-	u, err := strconv.Atoi("2")
-	return u, err
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic.go
deleted file mode 100644
index 1ff24da..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic.go
+++ /dev/null
@@ -1,10 +0,0 @@
-package extract
-
-func _() bool {
-	x := 1
-	if x == 0 { //@mark(exSt2, "if")
-		return true
-	} //@mark(exEn2, "}")
-	return false
-	//@extractfunc(exSt2, exEn2)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic.go.golden
deleted file mode 100644
index 6103d1e..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic.go.golden
+++ /dev/null
@@ -1,21 +0,0 @@
--- functionextraction_extract_return_basic_5_2 --
-package extract
-
-func _() bool {
-	x := 1
-	//@mark(exSt2, "if")
-	shouldReturn, returnValue := newFunction(x)
-	if shouldReturn {
-		return returnValue
-	} //@mark(exEn2, "}")
-	return false
-	//@extractfunc(exSt2, exEn2)
-}
-
-func newFunction(x int) (bool, bool) {
-	if x == 0 {
-		return true, true
-	}
-	return false, false
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic_nonnested.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic_nonnested.go
deleted file mode 100644
index 08573ac..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic_nonnested.go
+++ /dev/null
@@ -1,10 +0,0 @@
-package extract
-
-func _() bool {
-	x := 1 //@mark(exSt13, "x")
-	if x == 0 {
-		return true
-	}
-	return false //@mark(exEn13, "false")
-	//@extractfunc(exSt13, exEn13)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic_nonnested.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic_nonnested.go.golden
deleted file mode 100644
index 19e48da..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_basic_nonnested.go.golden
+++ /dev/null
@@ -1,17 +0,0 @@
--- functionextraction_extract_return_basic_nonnested_4_2 --
-package extract
-
-func _() bool {
-	//@mark(exSt13, "x")
-	return newFunction() //@mark(exEn13, "false")
-	//@extractfunc(exSt13, exEn13)
-}
-
-func newFunction() bool {
-	x := 1
-	if x == 0 {
-		return true
-	}
-	return false
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex.go
deleted file mode 100644
index 605c5ec..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package extract
-
-import "fmt"
-
-func _() (int, string, error) {
-	x := 1
-	y := "hello"
-	z := "bye" //@mark(exSt3, "z")
-	if y == z {
-		return x, y, fmt.Errorf("same")
-	} else {
-		z = "hi"
-		return x, z, nil
-	} //@mark(exEn3, "}")
-	return x, z, nil
-	//@extractfunc(exSt3, exEn3)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex.go.golden
deleted file mode 100644
index 4d20122..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex.go.golden
+++ /dev/null
@@ -1,28 +0,0 @@
--- functionextraction_extract_return_complex_8_2 --
-package extract
-
-import "fmt"
-
-func _() (int, string, error) {
-	x := 1
-	y := "hello"
-	//@mark(exSt3, "z")
-	z, shouldReturn, returnValue, returnValue1, returnValue2 := newFunction(y, x)
-	if shouldReturn {
-		return returnValue, returnValue1, returnValue2
-	} //@mark(exEn3, "}")
-	return x, z, nil
-	//@extractfunc(exSt3, exEn3)
-}
-
-func newFunction(y string, x int) (string, bool, int, string, error) {
-	z := "bye"
-	if y == z {
-		return "", true, x, y, fmt.Errorf("same")
-	} else {
-		z = "hi"
-		return "", true, x, z, nil
-	}
-	return z, false, 0, "", nil
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex_nonnested.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex_nonnested.go
deleted file mode 100644
index 6b2a4d8..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex_nonnested.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package extract
-
-import "fmt"
-
-func _() (int, string, error) {
-	x := 1
-	y := "hello"
-	z := "bye" //@mark(exSt10, "z")
-	if y == z {
-		return x, y, fmt.Errorf("same")
-	} else {
-		z = "hi"
-		return x, z, nil
-	}
-	return x, z, nil //@mark(exEn10, "nil")
-	//@extractfunc(exSt10, exEn10)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex_nonnested.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex_nonnested.go.golden
deleted file mode 100644
index de54b15..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_complex_nonnested.go.golden
+++ /dev/null
@@ -1,24 +0,0 @@
--- functionextraction_extract_return_complex_nonnested_8_2 --
-package extract
-
-import "fmt"
-
-func _() (int, string, error) {
-	x := 1
-	y := "hello"
-	//@mark(exSt10, "z")
-	return newFunction(y, x) //@mark(exEn10, "nil")
-	//@extractfunc(exSt10, exEn10)
-}
-
-func newFunction(y string, x int) (int, string, error) {
-	z := "bye"
-	if y == z {
-		return x, y, fmt.Errorf("same")
-	} else {
-		z = "hi"
-		return x, z, nil
-	}
-	return x, z, nil
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit.go
deleted file mode 100644
index b3fb4fd..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package extract
-
-import "go/ast"
-
-func _() {
-	ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool {
-		if n == nil { //@mark(exSt4, "if")
-			return true
-		} //@mark(exEn4, "}")
-		return false
-	})
-	//@extractfunc(exSt4, exEn4)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit.go.golden
deleted file mode 100644
index 3af747c..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit.go.golden
+++ /dev/null
@@ -1,24 +0,0 @@
--- functionextraction_extract_return_func_lit_7_3 --
-package extract
-
-import "go/ast"
-
-func _() {
-	ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool {
-		//@mark(exSt4, "if")
-		shouldReturn, returnValue := newFunction(n)
-		if shouldReturn {
-			return returnValue
-		} //@mark(exEn4, "}")
-		return false
-	})
-	//@extractfunc(exSt4, exEn4)
-}
-
-func newFunction(n ast.Node) (bool, bool) {
-	if n == nil {
-		return true, true
-	}
-	return false, false
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit_nonnested.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit_nonnested.go
deleted file mode 100644
index c22db2a..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit_nonnested.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package extract
-
-import "go/ast"
-
-func _() {
-	ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool {
-		if n == nil { //@mark(exSt11, "if")
-			return true
-		}
-		return false //@mark(exEn11, "false")
-	})
-	//@extractfunc(exSt11, exEn11)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit_nonnested.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit_nonnested.go.golden
deleted file mode 100644
index efa22ba..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_func_lit_nonnested.go.golden
+++ /dev/null
@@ -1,20 +0,0 @@
--- functionextraction_extract_return_func_lit_nonnested_7_3 --
-package extract
-
-import "go/ast"
-
-func _() {
-	ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool {
-		//@mark(exSt11, "if")
-		return newFunction(n) //@mark(exEn11, "false")
-	})
-	//@extractfunc(exSt11, exEn11)
-}
-
-func newFunction(n ast.Node) bool {
-	if n == nil {
-		return true
-	}
-	return false
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init.go
deleted file mode 100644
index c1994c1..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package extract
-
-func _() string {
-	x := 1
-	if x == 0 { //@mark(exSt5, "if")
-		x = 3
-		return "a"
-	} //@mark(exEn5, "}")
-	x = 2
-	return "b"
-	//@extractfunc(exSt5, exEn5)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init.go.golden
deleted file mode 100644
index 31d1b2d..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init.go.golden
+++ /dev/null
@@ -1,23 +0,0 @@
--- functionextraction_extract_return_init_5_2 --
-package extract
-
-func _() string {
-	x := 1
-	//@mark(exSt5, "if")
-	shouldReturn, returnValue := newFunction(x)
-	if shouldReturn {
-		return returnValue
-	} //@mark(exEn5, "}")
-	x = 2
-	return "b"
-	//@extractfunc(exSt5, exEn5)
-}
-
-func newFunction(x int) (bool, string) {
-	if x == 0 {
-		x = 3
-		return true, "a"
-	}
-	return false, ""
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init_nonnested.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init_nonnested.go
deleted file mode 100644
index bb5ed08..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init_nonnested.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package extract
-
-func _() string {
-	x := 1
-	if x == 0 { //@mark(exSt12, "if")
-		x = 3
-		return "a"
-	}
-	x = 2
-	return "b" //@mark(exEn12, "\"b\"")
-	//@extractfunc(exSt12, exEn12)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init_nonnested.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init_nonnested.go.golden
deleted file mode 100644
index 58bb573..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_return_init_nonnested.go.golden
+++ /dev/null
@@ -1,19 +0,0 @@
--- functionextraction_extract_return_init_nonnested_5_2 --
-package extract
-
-func _() string {
-	x := 1
-	//@mark(exSt12, "if")
-	return newFunction(x) //@mark(exEn12, "\"b\"")
-	//@extractfunc(exSt12, exEn12)
-}
-
-func newFunction(x int) string {
-	if x == 0 {
-		x = 3
-		return "a"
-	}
-	x = 2
-	return "b"
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_scope.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_scope.go
deleted file mode 100644
index 6cc141f..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_scope.go
+++ /dev/null
@@ -1,10 +0,0 @@
-package extract
-
-func _() {
-	newFunction := 1
-	a := newFunction //@extractfunc("a", "newFunction")
-}
-
-func newFunction1() int {
-	return 1
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_scope.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_scope.go.golden
deleted file mode 100644
index a4803b4..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_scope.go.golden
+++ /dev/null
@@ -1,16 +0,0 @@
--- functionextraction_extract_scope_5_2 --
-package extract
-
-func _() {
-	newFunction := 1
-	newFunction2(newFunction) //@extractfunc("a", "newFunction")
-}
-
-func newFunction2(newFunction int) {
-	a := newFunction
-}
-
-func newFunction1() int {
-	return 1
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_initialization.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_initialization.go
deleted file mode 100644
index da2c669..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_initialization.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package extract
-
-func _() {
-	var a []int
-	a = append(a, 2) //@mark(exSt6, "a")
-	b := 4           //@mark(exEn6, "4")
-	//@extractfunc(exSt6, exEn6)
-	a = append(a, b)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_initialization.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_initialization.go.golden
deleted file mode 100644
index 8be5040..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_initialization.go.golden
+++ /dev/null
@@ -1,17 +0,0 @@
--- functionextraction_extract_smart_initialization_5_2 --
-package extract
-
-func _() {
-	var a []int
-	//@mark(exSt6, "a")
-	a, b := newFunction(a)           //@mark(exEn6, "4")
-	//@extractfunc(exSt6, exEn6)
-	a = append(a, b)
-}
-
-func newFunction(a []int) ([]int, int) {
-	a = append(a, 2)
-	b := 4
-	return a, b
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_return.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_return.go
deleted file mode 100644
index 264d680..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_return.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package extract
-
-func _() {
-	var b []int
-	var a int
-	a = 2 //@mark(exSt7, "a")
-	b = []int{}
-	b = append(b, a) //@mark(exEn7, ")")
-	b[0] = 1
-	//@extractfunc(exSt7, exEn7)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_return.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_return.go.golden
deleted file mode 100644
index fdf55ae..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_smart_return.go.golden
+++ /dev/null
@@ -1,19 +0,0 @@
--- functionextraction_extract_smart_return_6_2 --
-package extract
-
-func _() {
-	var b []int
-	var a int
-	//@mark(exSt7, "a")
-	b = newFunction(a, b) //@mark(exEn7, ")")
-	b[0] = 1
-	//@extractfunc(exSt7, exEn7)
-}
-
-func newFunction(a int, b []int) []int {
-	a = 2
-	b = []int{}
-	b = append(b, a)
-	return b
-}
-
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_unnecessary_param.go b/gopls/internal/lsp/testdata/extract/extract_function/extract_unnecessary_param.go
deleted file mode 100644
index a6eb1f8..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_unnecessary_param.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package extract
-
-func _() {
-	var b []int
-	var a int
-	a := 2 //@mark(exSt8, "a")
-	b = []int{}
-	b = append(b, a) //@mark(exEn8, ")")
-	b[0] = 1
-	if a == 2 {
-		return
-	}
-	//@extractfunc(exSt8, exEn8)
-}
diff --git a/gopls/internal/lsp/testdata/extract/extract_function/extract_unnecessary_param.go.golden b/gopls/internal/lsp/testdata/extract/extract_function/extract_unnecessary_param.go.golden
deleted file mode 100644
index 4374f37..0000000
--- a/gopls/internal/lsp/testdata/extract/extract_function/extract_unnecessary_param.go.golden
+++ /dev/null
@@ -1,22 +0,0 @@
--- functionextraction_extract_unnecessary_param_6_2 --
-package extract
-
-func _() {
-	var b []int
-	var a int
-	//@mark(exSt8, "a")
-	a, b = newFunction(b) //@mark(exEn8, ")")
-	b[0] = 1
-	if a == 2 {
-		return
-	}
-	//@extractfunc(exSt8, exEn8)
-}
-
-func newFunction(b []int) (int, []int) {
-	a := 2
-	b = []int{}
-	b = append(b, a)
-	return a, b
-}
-
diff --git a/gopls/internal/lsp/testdata/format/bad_format.go.golden b/gopls/internal/lsp/testdata/format/bad_format.go.golden
deleted file mode 100644
index f0c24d6..0000000
--- a/gopls/internal/lsp/testdata/format/bad_format.go.golden
+++ /dev/null
@@ -1,21 +0,0 @@
--- gofmt --
-package format //@format("package")
-
-import (
-	"fmt"
-	"log"
-	"runtime"
-)
-
-func hello() {
-
-	var x int //@diag("x", "compiler", "x declared (and|but) not used", "error")
-}
-
-func hi() {
-	runtime.GOROOT()
-	fmt.Printf("")
-
-	log.Printf("")
-}
-
diff --git a/gopls/internal/lsp/testdata/format/bad_format.go.in b/gopls/internal/lsp/testdata/format/bad_format.go.in
deleted file mode 100644
index 995ec39..0000000
--- a/gopls/internal/lsp/testdata/format/bad_format.go.in
+++ /dev/null
@@ -1,22 +0,0 @@
-package format //@format("package")
-
-import (
-	"runtime"
-	"fmt"
-	"log"
-)
-
-func hello() {
-
-
-
-
-	var x int //@diag("x", "compiler", "x declared (and|but) not used", "error")
-}
-
-func hi() {
-	runtime.GOROOT()
-	fmt.Printf("")
-
-	log.Printf("")
-}
diff --git a/gopls/internal/lsp/testdata/format/good_format.go b/gopls/internal/lsp/testdata/format/good_format.go
deleted file mode 100644
index 01cb161..0000000
--- a/gopls/internal/lsp/testdata/format/good_format.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package format //@format("package")
-
-import (
-	"log"
-)
-
-func goodbye() {
-	log.Printf("byeeeee")
-}
diff --git a/gopls/internal/lsp/testdata/format/good_format.go.golden b/gopls/internal/lsp/testdata/format/good_format.go.golden
deleted file mode 100644
index 99f47e2..0000000
--- a/gopls/internal/lsp/testdata/format/good_format.go.golden
+++ /dev/null
@@ -1,11 +0,0 @@
--- gofmt --
-package format //@format("package")
-
-import (
-	"log"
-)
-
-func goodbye() {
-	log.Printf("byeeeee")
-}
-
diff --git a/gopls/internal/lsp/testdata/format/newline_format.go.golden b/gopls/internal/lsp/testdata/format/newline_format.go.golden
deleted file mode 100644
index 7c76afd..0000000
--- a/gopls/internal/lsp/testdata/format/newline_format.go.golden
+++ /dev/null
@@ -1,4 +0,0 @@
--- gofmt --
-package format //@format("package")
-func _()       {}
-
diff --git a/gopls/internal/lsp/testdata/format/newline_format.go.in b/gopls/internal/lsp/testdata/format/newline_format.go.in
deleted file mode 100644
index fe597b9..0000000
--- a/gopls/internal/lsp/testdata/format/newline_format.go.in
+++ /dev/null
@@ -1,2 +0,0 @@
-package format //@format("package")
-func _() {}
\ No newline at end of file
diff --git a/gopls/internal/lsp/testdata/format/one_line.go.golden b/gopls/internal/lsp/testdata/format/one_line.go.golden
deleted file mode 100644
index 4d11f84..0000000
--- a/gopls/internal/lsp/testdata/format/one_line.go.golden
+++ /dev/null
@@ -1,3 +0,0 @@
--- gofmt --
-package format //@format("package")
-
diff --git a/gopls/internal/lsp/testdata/format/one_line.go.in b/gopls/internal/lsp/testdata/format/one_line.go.in
deleted file mode 100644
index 30f4137..0000000
--- a/gopls/internal/lsp/testdata/format/one_line.go.in
+++ /dev/null
@@ -1 +0,0 @@
-package format //@format("package")
\ No newline at end of file
diff --git a/gopls/internal/lsp/testdata/generated/generated.go b/gopls/internal/lsp/testdata/generated/generated.go
deleted file mode 100644
index c7adc18..0000000
--- a/gopls/internal/lsp/testdata/generated/generated.go
+++ /dev/null
@@ -1,7 +0,0 @@
-package generated
-
-// Code generated by generator.go. DO NOT EDIT.
-
-func _() {
-	var y int //@diag("y", "compiler", "y declared (and|but) not used", "error")
-}
diff --git a/gopls/internal/lsp/testdata/generated/generator.go b/gopls/internal/lsp/testdata/generated/generator.go
deleted file mode 100644
index 8e2a4fa..0000000
--- a/gopls/internal/lsp/testdata/generated/generator.go
+++ /dev/null
@@ -1,5 +0,0 @@
-package generated
-
-func _() {
-	var x int //@diag("x", "compiler", "x declared (and|but) not used", "error")
-}
diff --git a/gopls/internal/lsp/testdata/good/good0.go b/gopls/internal/lsp/testdata/good/good0.go
index 89450a8..666171b 100644
--- a/gopls/internal/lsp/testdata/good/good0.go
+++ b/gopls/internal/lsp/testdata/good/good0.go
@@ -1,4 +1,4 @@
-package good //@diag("package", "no_diagnostics", "", "error")
+package good
 
 func stuff() { //@item(good_stuff, "stuff", "func()", "func"),prepare("stu", "stuff", "stuff")
 	x := 5
diff --git a/gopls/internal/lsp/testdata/good/good1.go b/gopls/internal/lsp/testdata/good/good1.go
index 624d814..7d39629 100644
--- a/gopls/internal/lsp/testdata/good/good1.go
+++ b/gopls/internal/lsp/testdata/good/good1.go
@@ -1,4 +1,4 @@
-package good //@diag("package", "no_diagnostics", "", "error")
+package good
 
 import (
 	"golang.org/lsptests/types" //@item(types_import, "types", "\"golang.org/lsptests/types\"", "package")
diff --git a/gopls/internal/lsp/testdata/imports/add_import.go.golden b/gopls/internal/lsp/testdata/imports/add_import.go.golden
deleted file mode 100644
index 16af110..0000000
--- a/gopls/internal/lsp/testdata/imports/add_import.go.golden
+++ /dev/null
@@ -1,13 +0,0 @@
--- goimports --
-package imports //@import("package")
-
-import (
-	"bytes"
-	"fmt"
-)
-
-func _() {
-	fmt.Println("")
-	bytes.NewBuffer(nil)
-}
-
diff --git a/gopls/internal/lsp/testdata/imports/add_import.go.in b/gopls/internal/lsp/testdata/imports/add_import.go.in
deleted file mode 100644
index 7928e6f..0000000
--- a/gopls/internal/lsp/testdata/imports/add_import.go.in
+++ /dev/null
@@ -1,10 +0,0 @@
-package imports //@import("package")
-
-import (
-	"fmt"
-)
-
-func _() {
-	fmt.Println("")
-	bytes.NewBuffer(nil)
-}
diff --git a/gopls/internal/lsp/testdata/imports/good_imports.go.golden b/gopls/internal/lsp/testdata/imports/good_imports.go.golden
deleted file mode 100644
index 2abdae4..0000000
--- a/gopls/internal/lsp/testdata/imports/good_imports.go.golden
+++ /dev/null
@@ -1,9 +0,0 @@
--- goimports --
-package imports //@import("package")
-
-import "fmt"
-
-func _() {
-fmt.Println("")
-}
-
diff --git a/gopls/internal/lsp/testdata/imports/good_imports.go.in b/gopls/internal/lsp/testdata/imports/good_imports.go.in
deleted file mode 100644
index a03c06c..0000000
--- a/gopls/internal/lsp/testdata/imports/good_imports.go.in
+++ /dev/null
@@ -1,7 +0,0 @@
-package imports //@import("package")
-
-import "fmt"
-
-func _() {
-fmt.Println("")
-}
diff --git a/gopls/internal/lsp/testdata/imports/issue35458.go.golden b/gopls/internal/lsp/testdata/imports/issue35458.go.golden
deleted file mode 100644
index f077260..0000000
--- a/gopls/internal/lsp/testdata/imports/issue35458.go.golden
+++ /dev/null
@@ -1,20 +0,0 @@
--- goimports --
-// package doc
-package imports //@import("package")
-
-
-
-
-
-
-func _() {
-	println("Hello, world!")
-}
-
-
-
-
-
-
-
-
diff --git a/gopls/internal/lsp/testdata/imports/issue35458.go.in b/gopls/internal/lsp/testdata/imports/issue35458.go.in
deleted file mode 100644
index 7420c21..0000000
--- a/gopls/internal/lsp/testdata/imports/issue35458.go.in
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-
-
-
-// package doc
-package imports //@import("package")
-
-
-
-
-
-
-func _() {
-	println("Hello, world!")
-}
-
-
-
-
-
-
-
diff --git a/gopls/internal/lsp/testdata/imports/multiple_blocks.go.golden b/gopls/internal/lsp/testdata/imports/multiple_blocks.go.golden
deleted file mode 100644
index d37a6c7..0000000
--- a/gopls/internal/lsp/testdata/imports/multiple_blocks.go.golden
+++ /dev/null
@@ -1,9 +0,0 @@
--- goimports --
-package imports //@import("package")
-
-import "fmt"
-
-func _() {
-	fmt.Println("")
-}
-
diff --git a/gopls/internal/lsp/testdata/imports/multiple_blocks.go.in b/gopls/internal/lsp/testdata/imports/multiple_blocks.go.in
deleted file mode 100644
index 3f2fb99..0000000
--- a/gopls/internal/lsp/testdata/imports/multiple_blocks.go.in
+++ /dev/null
@@ -1,9 +0,0 @@
-package imports //@import("package")
-
-import "fmt"
-
-import "bytes"
-
-func _() {
-	fmt.Println("")
-}
diff --git a/gopls/internal/lsp/testdata/imports/needs_imports.go.golden b/gopls/internal/lsp/testdata/imports/needs_imports.go.golden
deleted file mode 100644
index fd60328..0000000
--- a/gopls/internal/lsp/testdata/imports/needs_imports.go.golden
+++ /dev/null
@@ -1,13 +0,0 @@
--- goimports --
-package imports //@import("package")
-
-import (
-	"fmt"
-	"log"
-)
-
-func goodbye() {
-	fmt.Printf("HI")
-	log.Printf("byeeeee")
-}
-
diff --git a/gopls/internal/lsp/testdata/imports/needs_imports.go.in b/gopls/internal/lsp/testdata/imports/needs_imports.go.in
deleted file mode 100644
index 949d56a..0000000
--- a/gopls/internal/lsp/testdata/imports/needs_imports.go.in
+++ /dev/null
@@ -1,6 +0,0 @@
-package imports //@import("package")
-
-func goodbye() {
-	fmt.Printf("HI")
-	log.Printf("byeeeee")
-}
diff --git a/gopls/internal/lsp/testdata/imports/remove_import.go.golden b/gopls/internal/lsp/testdata/imports/remove_import.go.golden
deleted file mode 100644
index 3df8088..0000000
--- a/gopls/internal/lsp/testdata/imports/remove_import.go.golden
+++ /dev/null
@@ -1,11 +0,0 @@
--- goimports --
-package imports //@import("package")
-
-import (
-	"fmt"
-)
-
-func _() {
-	fmt.Println("")
-}
-
diff --git a/gopls/internal/lsp/testdata/imports/remove_import.go.in b/gopls/internal/lsp/testdata/imports/remove_import.go.in
deleted file mode 100644
index 09060ba..0000000
--- a/gopls/internal/lsp/testdata/imports/remove_import.go.in
+++ /dev/null
@@ -1,10 +0,0 @@
-package imports //@import("package")
-
-import (
-	"bytes"
-	"fmt"
-)
-
-func _() {
-	fmt.Println("")
-}
diff --git a/gopls/internal/lsp/testdata/imports/remove_imports.go.golden b/gopls/internal/lsp/testdata/imports/remove_imports.go.golden
deleted file mode 100644
index 530c8c0..0000000
--- a/gopls/internal/lsp/testdata/imports/remove_imports.go.golden
+++ /dev/null
@@ -1,6 +0,0 @@
--- goimports --
-package imports //@import("package")
-
-func _() {
-}
-
diff --git a/gopls/internal/lsp/testdata/imports/remove_imports.go.in b/gopls/internal/lsp/testdata/imports/remove_imports.go.in
deleted file mode 100644
index 44d065f..0000000
--- a/gopls/internal/lsp/testdata/imports/remove_imports.go.in
+++ /dev/null
@@ -1,9 +0,0 @@
-package imports //@import("package")
-
-import (
-	"bytes"
-	"fmt"
-)
-
-func _() {
-}
diff --git a/gopls/internal/lsp/testdata/imports/two_lines.go.golden b/gopls/internal/lsp/testdata/imports/two_lines.go.golden
deleted file mode 100644
index ec118a4..0000000
--- a/gopls/internal/lsp/testdata/imports/two_lines.go.golden
+++ /dev/null
@@ -1,4 +0,0 @@
--- goimports --
-package main
-func main()  {} //@import("main")
-
diff --git a/gopls/internal/lsp/testdata/imports/two_lines.go.in b/gopls/internal/lsp/testdata/imports/two_lines.go.in
deleted file mode 100644
index eee5345..0000000
--- a/gopls/internal/lsp/testdata/imports/two_lines.go.in
+++ /dev/null
@@ -1,2 +0,0 @@
-package main
-func main()  {} //@import("main")
diff --git a/gopls/internal/lsp/testdata/invertifcondition/boolean.go b/gopls/internal/lsp/testdata/invertifcondition/boolean.go
new file mode 100644
index 0000000..9a01d98
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/boolean.go
@@ -0,0 +1,14 @@
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func Boolean() {
+	b := true
+	if b { //@suggestedfix("if b", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/boolean.go.golden b/gopls/internal/lsp/testdata/invertifcondition/boolean.go.golden
new file mode 100644
index 0000000..9add599
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/boolean.go.golden
@@ -0,0 +1,16 @@
+-- suggestedfix_boolean_9_2 --
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func Boolean() {
+	b := true
+	if !b {
+		fmt.Println("B")
+	} else { //@suggestedfix("if b", "refactor.rewrite", "")
+		fmt.Println("A")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go b/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go
new file mode 100644
index 0000000..3fadab7
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go
@@ -0,0 +1,14 @@
+package invertifcondition
+
+import (
+	"fmt"
+	"os"
+)
+
+func BooleanFn() {
+	if os.IsPathSeparator('X') { //@suggestedfix("if os.IsPathSeparator('X')", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go.golden b/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go.golden
new file mode 100644
index 0000000..26e8193
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go.golden
@@ -0,0 +1,16 @@
+-- suggestedfix_boolean_fn_9_2 --
+package invertifcondition
+
+import (
+	"fmt"
+	"os"
+)
+
+func BooleanFn() {
+	if !os.IsPathSeparator('X') {
+		fmt.Println("B")
+	} else { //@suggestedfix("if os.IsPathSeparator('X')", "refactor.rewrite", "")
+		fmt.Println("A")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go b/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go
new file mode 100644
index 0000000..3793cdd
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go
@@ -0,0 +1,16 @@
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func DontRemoveParens() {
+	a := false
+	b := true
+	if !(a ||
+		b) { //@suggestedfix("b", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go.golden b/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go.golden
new file mode 100644
index 0000000..a47f1ca
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go.golden
@@ -0,0 +1,18 @@
+-- suggestedfix_dont_remove_parens_11_3 --
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func DontRemoveParens() {
+	a := false
+	b := true
+	if (a ||
+		b) {
+		fmt.Println("B")
+	} else { //@suggestedfix("b", "refactor.rewrite", "")
+		fmt.Println("A")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/else_if.go b/gopls/internal/lsp/testdata/invertifcondition/else_if.go
new file mode 100644
index 0000000..847225f
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/else_if.go
@@ -0,0 +1,22 @@
+package invertifcondition
+
+import (
+	"fmt"
+	"os"
+)
+
+func ElseIf() {
+	// No inversion expected when there's not else clause
+	if len(os.Args) > 2 {
+		fmt.Println("A")
+	}
+
+	// No inversion expected for else-if, that would become unreadable
+	if len(os.Args) > 2 {
+		fmt.Println("A")
+	} else if os.Args[0] == "X" { //@suggestedfix(re"if os.Args.0. == .X.", "refactor.rewrite", "")
+		fmt.Println("B")
+	} else {
+		fmt.Println("C")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/else_if.go.golden b/gopls/internal/lsp/testdata/invertifcondition/else_if.go.golden
new file mode 100644
index 0000000..bbfb13d
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/else_if.go.golden
@@ -0,0 +1,24 @@
+-- suggestedfix_else_if_17_9 --
+package invertifcondition
+
+import (
+	"fmt"
+	"os"
+)
+
+func ElseIf() {
+	// No inversion expected when there's not else clause
+	if len(os.Args) > 2 {
+		fmt.Println("A")
+	}
+
+	// No inversion expected for else-if, that would become unreadable
+	if len(os.Args) > 2 {
+		fmt.Println("A")
+	} else if os.Args[0] != "X" {
+		fmt.Println("C")
+	} else { //@suggestedfix(re"if os.Args.0. == .X.", "refactor.rewrite", "")
+		fmt.Println("B")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/greater_than.go b/gopls/internal/lsp/testdata/invertifcondition/greater_than.go
new file mode 100644
index 0000000..41b7e35
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/greater_than.go
@@ -0,0 +1,14 @@
+package invertifcondition
+
+import (
+	"fmt"
+	"os"
+)
+
+func GreaterThan() {
+	if len(os.Args) > 2 { //@suggestedfix("i", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/greater_than.go.golden b/gopls/internal/lsp/testdata/invertifcondition/greater_than.go.golden
new file mode 100644
index 0000000..ee87d61
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/greater_than.go.golden
@@ -0,0 +1,16 @@
+-- suggestedfix_greater_than_9_2 --
+package invertifcondition
+
+import (
+	"fmt"
+	"os"
+)
+
+func GreaterThan() {
+	if len(os.Args) <= 2 {
+		fmt.Println("B")
+	} else { //@suggestedfix("i", "refactor.rewrite", "")
+		fmt.Println("A")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go b/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go
new file mode 100644
index 0000000..def9716
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go
@@ -0,0 +1,14 @@
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func NotBoolean() {
+	b := true
+	if !b { //@suggestedfix("if !b", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go.golden b/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go.golden
new file mode 100644
index 0000000..3dfbf0f
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go.golden
@@ -0,0 +1,16 @@
+-- suggestedfix_not_boolean_9_2 --
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func NotBoolean() {
+	b := true
+	if b {
+		fmt.Println("B")
+	} else { //@suggestedfix("if !b", "refactor.rewrite", "")
+		fmt.Println("A")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/remove_else.go b/gopls/internal/lsp/testdata/invertifcondition/remove_else.go
new file mode 100644
index 0000000..a8e39e7
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/remove_else.go
@@ -0,0 +1,16 @@
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func RemoveElse() {
+	if true { //@suggestedfix("if true", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+		return
+	}
+
+	fmt.Println("C")
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/remove_else.go.golden b/gopls/internal/lsp/testdata/invertifcondition/remove_else.go.golden
new file mode 100644
index 0000000..7362d24
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/remove_else.go.golden
@@ -0,0 +1,19 @@
+-- suggestedfix_remove_else_8_2 --
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func RemoveElse() {
+	if false {
+		fmt.Println("B")
+		return
+	}
+
+	//@suggestedfix("if true", "refactor.rewrite", "")
+	fmt.Println("A")
+
+	fmt.Println("C")
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go b/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go
new file mode 100644
index 0000000..6d3b6ab
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go
@@ -0,0 +1,14 @@
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func RemoveParens() {
+	b := true
+	if !(b) { //@suggestedfix("if", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go.golden b/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go.golden
new file mode 100644
index 0000000..62e6ae0
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go.golden
@@ -0,0 +1,16 @@
+-- suggestedfix_remove_parens_9_2 --
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func RemoveParens() {
+	b := true
+	if b {
+		fmt.Println("B")
+	} else { //@suggestedfix("if", "refactor.rewrite", "")
+		fmt.Println("A")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon.go b/gopls/internal/lsp/testdata/invertifcondition/semicolon.go
new file mode 100644
index 0000000..a235893
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/semicolon.go
@@ -0,0 +1,13 @@
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func Semicolon() {
+	if _, err := fmt.Println("x"); err != nil { //@suggestedfix("if", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon.go.golden b/gopls/internal/lsp/testdata/invertifcondition/semicolon.go.golden
new file mode 100644
index 0000000..25c8930
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/semicolon.go.golden
@@ -0,0 +1,15 @@
+-- suggestedfix_semicolon_8_2 --
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func Semicolon() {
+	if _, err := fmt.Println("x"); err == nil {
+		fmt.Println("B")
+	} else { //@suggestedfix("if", "refactor.rewrite", "")
+		fmt.Println("A")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go b/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go
new file mode 100644
index 0000000..21fc4f5
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go
@@ -0,0 +1,13 @@
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func SemicolonAnd() {
+	if n, err := fmt.Println("x"); err != nil && n > 0 { //@suggestedfix("f", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go.golden b/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go.golden
new file mode 100644
index 0000000..27e6b94
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go.golden
@@ -0,0 +1,15 @@
+-- suggestedfix_semicolon_and_8_3 --
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func SemicolonAnd() {
+	if n, err := fmt.Println("x"); err == nil || n <= 0 {
+		fmt.Println("B")
+	} else { //@suggestedfix("f", "refactor.rewrite", "")
+		fmt.Println("A")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go b/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go
new file mode 100644
index 0000000..53ec3b3
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go
@@ -0,0 +1,13 @@
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func SemicolonOr() {
+	if n, err := fmt.Println("x"); err != nil || n < 5 { //@suggestedfix(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite", "")
+		fmt.Println("A")
+	} else {
+		fmt.Println("B")
+	}
+}
diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go.golden b/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go.golden
new file mode 100644
index 0000000..9613526
--- /dev/null
+++ b/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go.golden
@@ -0,0 +1,15 @@
+-- suggestedfix_semicolon_or_8_2 --
+package invertifcondition
+
+import (
+	"fmt"
+)
+
+func SemicolonOr() {
+	if n, err := fmt.Println("x"); err == nil && n >= 5 {
+		fmt.Println("B")
+	} else { //@suggestedfix(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite", "")
+		fmt.Println("A")
+	}
+}
+
diff --git a/gopls/internal/lsp/testdata/noparse/noparse.go.in b/gopls/internal/lsp/testdata/noparse/noparse.go.in
index 8b0bfaa..e69de29 100644
--- a/gopls/internal/lsp/testdata/noparse/noparse.go.in
+++ b/gopls/internal/lsp/testdata/noparse/noparse.go.in
@@ -1,24 +0,0 @@
-package noparse
-
-// The type error was chosen carefully to exercise a type-error analyzer.
-// We use the 'nonewvars' analyzer because the other candidates are tricky:
-//
-// - The 'unusedvariable' analyzer is disabled by default, so it is not
-//   consistently enabled across Test{LSP,CommandLine} tests, which
-//   both process this file.
-// - The 'undeclaredname' analyzer depends on the text of the go/types
-//   "undeclared name" error, which changed in go1.20.
-// - The 'noresultvalues' analyzer produces a diagnostic containing newlines,
-//   which breaks the parser used by TestCommandLine.
-//
-// This comment is all that remains of my afternoon.
-
-func bye(x int) {
-	x := 123 //@diag(":=", "nonewvars", "no new variables", "warning")
-}
-
-func stuff() {
-	
-}
-
-func .() {} //@diag(".", "syntax", "expected 'IDENT', found '.'", "error")
diff --git a/gopls/internal/lsp/testdata/noparse_format/noparse_format.go.golden b/gopls/internal/lsp/testdata/noparse_format/noparse_format.go.golden
deleted file mode 100644
index 0060c5c..0000000
--- a/gopls/internal/lsp/testdata/noparse_format/noparse_format.go.golden
+++ /dev/null
@@ -1,2 +0,0 @@
--- gofmt --
-
diff --git a/gopls/internal/lsp/testdata/noparse_format/noparse_format.go.in b/gopls/internal/lsp/testdata/noparse_format/noparse_format.go.in
deleted file mode 100644
index 311a99a..0000000
--- a/gopls/internal/lsp/testdata/noparse_format/noparse_format.go.in
+++ /dev/null
@@ -1,14 +0,0 @@
-// +build go1.11
-
-package noparse_format //@format("package")
-
-// The nonewvars expectation asserts that the go/analysis framework ran.
-// See comments in badstmt.
-
-func what() {
-	var hi func()
-	if {		hi() //@diag("{", "syntax", "missing condition in if statement", "error")
-	}
-	hi := nil //@diag(":=", "nonewvars", "no new variables", "warning")
-}
-
diff --git a/gopls/internal/lsp/testdata/noparse_format/parse_format.go.golden b/gopls/internal/lsp/testdata/noparse_format/parse_format.go.golden
deleted file mode 100644
index 667c90b..0000000
--- a/gopls/internal/lsp/testdata/noparse_format/parse_format.go.golden
+++ /dev/null
@@ -1,7 +0,0 @@
--- gofmt --
-package noparse_format //@format("package")
-
-func _() {
-	f()
-}
-
diff --git a/gopls/internal/lsp/testdata/noparse_format/parse_format.go.in b/gopls/internal/lsp/testdata/noparse_format/parse_format.go.in
deleted file mode 100644
index 4b98cf8..0000000
--- a/gopls/internal/lsp/testdata/noparse_format/parse_format.go.in
+++ /dev/null
@@ -1,5 +0,0 @@
-package noparse_format //@format("package")
-
-func _() {
-f()
-}
\ No newline at end of file
diff --git a/gopls/internal/lsp/testdata/rundespiteerrors/rundespiteerrors.go b/gopls/internal/lsp/testdata/rundespiteerrors/rundespiteerrors.go
deleted file mode 100644
index 783e9a5..0000000
--- a/gopls/internal/lsp/testdata/rundespiteerrors/rundespiteerrors.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package rundespiteerrors
-
-// This test verifies that analyzers without RunDespiteErrors are not
-// executed on a package containing type errors (see issue #54762).
-func _() {
-	// A type error.
-	_ = 1 + "" //@diag("1", "compiler", "mismatched types|cannot convert", "error")
-
-	// A violation of an analyzer for which RunDespiteErrors=false:
-	// no diagnostic is produced; the diag comment is merely illustrative.
-	for _ = range "" { //diag("for _", "simplifyrange", "simplify range expression", "warning")
-
-	}
-}
diff --git a/gopls/internal/lsp/testdata/summary.txt.golden b/gopls/internal/lsp/testdata/summary.txt.golden
index 9e13d42..c572e26 100644
--- a/gopls/internal/lsp/testdata/summary.txt.golden
+++ b/gopls/internal/lsp/testdata/summary.txt.golden
@@ -8,13 +8,10 @@
 FuzzyCompletionsCount = 8
 RankedCompletionsCount = 164
 CaseSensitiveCompletionsCount = 4
-DiagnosticsCount = 42
+DiagnosticsCount = 23
 FoldingRangesCount = 2
-FormatCount = 6
-ImportCount = 8
 SemanticTokenCount = 3
-SuggestedFixCount = 65
-FunctionExtractionCount = 27
+SuggestedFixCount = 73
 MethodExtractionCount = 6
 DefinitionsCount = 46
 TypeDefinitionsCount = 18
@@ -22,7 +19,6 @@
 InlayHintsCount = 4
 RenamesCount = 41
 PrepareRenamesCount = 7
-WorkspaceSymbolsCount = 20
 SignaturesCount = 33
 LinksCount = 7
 SelectionRangesCount = 3
diff --git a/gopls/internal/lsp/testdata/summary_go1.18.txt.golden b/gopls/internal/lsp/testdata/summary_go1.18.txt.golden
index 184695e..da3b553 100644
--- a/gopls/internal/lsp/testdata/summary_go1.18.txt.golden
+++ b/gopls/internal/lsp/testdata/summary_go1.18.txt.golden
@@ -8,13 +8,10 @@
 FuzzyCompletionsCount = 8
 RankedCompletionsCount = 174
 CaseSensitiveCompletionsCount = 4
-DiagnosticsCount = 42
+DiagnosticsCount = 23
 FoldingRangesCount = 2
-FormatCount = 6
-ImportCount = 8
 SemanticTokenCount = 3
-SuggestedFixCount = 71
-FunctionExtractionCount = 27
+SuggestedFixCount = 79
 MethodExtractionCount = 6
 DefinitionsCount = 46
 TypeDefinitionsCount = 18
@@ -22,7 +19,6 @@
 InlayHintsCount = 5
 RenamesCount = 48
 PrepareRenamesCount = 7
-WorkspaceSymbolsCount = 20
 SignaturesCount = 33
 LinksCount = 7
 SelectionRangesCount = 3
diff --git a/gopls/internal/lsp/testdata/summary_go1.21.txt.golden b/gopls/internal/lsp/testdata/summary_go1.21.txt.golden
index 9e5db38..52fba36 100644
--- a/gopls/internal/lsp/testdata/summary_go1.21.txt.golden
+++ b/gopls/internal/lsp/testdata/summary_go1.21.txt.golden
@@ -8,13 +8,10 @@
 FuzzyCompletionsCount = 8
 RankedCompletionsCount = 174
 CaseSensitiveCompletionsCount = 4
-DiagnosticsCount = 41
+DiagnosticsCount = 24
 FoldingRangesCount = 2
-FormatCount = 6
-ImportCount = 8
 SemanticTokenCount = 3
-SuggestedFixCount = 71
-FunctionExtractionCount = 27
+SuggestedFixCount = 79
 MethodExtractionCount = 6
 DefinitionsCount = 46
 TypeDefinitionsCount = 18
@@ -22,7 +19,6 @@
 InlayHintsCount = 5
 RenamesCount = 48
 PrepareRenamesCount = 7
-WorkspaceSymbolsCount = 20
 SignaturesCount = 33
 LinksCount = 7
 SelectionRangesCount = 3
diff --git a/gopls/internal/lsp/testdata/undeclared/var.go b/gopls/internal/lsp/testdata/undeclared/var.go
deleted file mode 100644
index 3fda582..0000000
--- a/gopls/internal/lsp/testdata/undeclared/var.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package undeclared
-
-func m() int {
-	z, _ := 1+y, 11 //@diag("y", "compiler", "(undeclared name|undefined): y", "error"),suggestedfix("y", "quickfix", "")
-	if 100 < 90 {
-		z = 1
-	} else if 100 > n+2 { //@diag("n", "compiler", "(undeclared name|undefined): n", "error"),suggestedfix("n", "quickfix", "")
-		z = 4
-	}
-	for i < 200 { //@diag("i", "compiler", "(undeclared name|undefined): i", "error"),suggestedfix("i", "quickfix", "")
-	}
-	r() //@diag("r", "compiler", "(undeclared name|undefined): r", "error")
-	return z
-}
diff --git a/gopls/internal/lsp/testdata/undeclared/var.go.golden b/gopls/internal/lsp/testdata/undeclared/var.go.golden
deleted file mode 100644
index de5cbb4..0000000
--- a/gopls/internal/lsp/testdata/undeclared/var.go.golden
+++ /dev/null
@@ -1,51 +0,0 @@
--- suggestedfix_var_10_6 --
-package undeclared
-
-func m() int {
-	z, _ := 1+y, 11 //@diag("y", "compiler", "(undeclared name|undefined): y", "error"),suggestedfix("y", "quickfix", "")
-	if 100 < 90 {
-		z = 1
-	} else if 100 > n+2 { //@diag("n", "compiler", "(undeclared name|undefined): n", "error"),suggestedfix("n", "quickfix", "")
-		z = 4
-	}
-	i := 
-	for i < 200 { //@diag("i", "compiler", "(undeclared name|undefined): i", "error"),suggestedfix("i", "quickfix", "")
-	}
-	r() //@diag("r", "compiler", "(undeclared name|undefined): r", "error")
-	return z
-}
-
--- suggestedfix_var_4_12 --
-package undeclared
-
-func m() int {
-	y := 
-	z, _ := 1+y, 11 //@diag("y", "compiler", "(undeclared name|undefined): y", "error"),suggestedfix("y", "quickfix", "")
-	if 100 < 90 {
-		z = 1
-	} else if 100 > n+2 { //@diag("n", "compiler", "(undeclared name|undefined): n", "error"),suggestedfix("n", "quickfix", "")
-		z = 4
-	}
-	for i < 200 { //@diag("i", "compiler", "(undeclared name|undefined): i", "error"),suggestedfix("i", "quickfix", "")
-	}
-	r() //@diag("r", "compiler", "(undeclared name|undefined): r", "error")
-	return z
-}
-
--- suggestedfix_var_7_18 --
-package undeclared
-
-func m() int {
-	z, _ := 1+y, 11 //@diag("y", "compiler", "(undeclared name|undefined): y", "error"),suggestedfix("y", "quickfix", "")
-	n := 
-	if 100 < 90 {
-		z = 1
-	} else if 100 > n+2 { //@diag("n", "compiler", "(undeclared name|undefined): n", "error"),suggestedfix("n", "quickfix", "")
-		z = 4
-	}
-	for i < 200 { //@diag("i", "compiler", "(undeclared name|undefined): i", "error"),suggestedfix("i", "quickfix", "")
-	}
-	r() //@diag("r", "compiler", "(undeclared name|undefined): r", "error")
-	return z
-}
-
diff --git a/gopls/internal/lsp/testdata/workspacesymbol/a/a.go b/gopls/internal/lsp/testdata/workspacesymbol/a/a.go
deleted file mode 100644
index 4ae9997..0000000
--- a/gopls/internal/lsp/testdata/workspacesymbol/a/a.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package a
-
-var RandomGopherVariableA = "a"
-
-const RandomGopherConstantA = "a"
-
-const (
-	randomgopherinvariable = iota
-)
diff --git a/gopls/internal/lsp/testdata/workspacesymbol/a/a_test.go b/gopls/internal/lsp/testdata/workspacesymbol/a/a_test.go
deleted file mode 100644
index 0d97c50..0000000
--- a/gopls/internal/lsp/testdata/workspacesymbol/a/a_test.go
+++ /dev/null
@@ -1,3 +0,0 @@
-package a
-
-var RandomGopherTestVariableA = "a"
diff --git a/gopls/internal/lsp/testdata/workspacesymbol/a/a_x_test.go b/gopls/internal/lsp/testdata/workspacesymbol/a/a_x_test.go
deleted file mode 100644
index 747cd17..0000000
--- a/gopls/internal/lsp/testdata/workspacesymbol/a/a_x_test.go
+++ /dev/null
@@ -1,3 +0,0 @@
-package a_test
-
-var RandomGopherXTestVariableA = "a"
diff --git a/gopls/internal/lsp/testdata/workspacesymbol/b/b.go b/gopls/internal/lsp/testdata/workspacesymbol/b/b.go
deleted file mode 100644
index b2e2092..0000000
--- a/gopls/internal/lsp/testdata/workspacesymbol/b/b.go
+++ /dev/null
@@ -1,7 +0,0 @@
-package b
-
-var RandomGopherVariableB = "b"
-
-type RandomGopherStructB struct {
-	Bar int
-}
diff --git a/gopls/internal/lsp/testdata/workspacesymbol/issue44806.go b/gopls/internal/lsp/testdata/workspacesymbol/issue44806.go
deleted file mode 100644
index 6a6e03a..0000000
--- a/gopls/internal/lsp/testdata/workspacesymbol/issue44806.go
+++ /dev/null
@@ -1,10 +0,0 @@
-package main
-
-type T struct{}
-
-// We should accept all valid receiver syntax when scanning symbols.
-func (*(T)) m1() {}
-func (*T) m2()   {}
-func (T) m3()    {}
-func ((T)) m4()    {}
-func ((*T)) m5()   {}
diff --git a/gopls/internal/lsp/testdata/workspacesymbol/main.go b/gopls/internal/lsp/testdata/workspacesymbol/main.go
deleted file mode 100644
index 36ec8f1..0000000
--- a/gopls/internal/lsp/testdata/workspacesymbol/main.go
+++ /dev/null
@@ -1,47 +0,0 @@
-package main
-
-import (
-	"encoding/json"
-	"fmt"
-)
-
-func main() { // function
-	fmt.Println("Hello")
-}
-
-var myvar int // variable
-
-type myType string // basic type
-
-type myDecoder json.Decoder // to use the encoding/json import
-
-func (m *myType) Blahblah() {} // method
-
-type myStruct struct { // struct type
-	myStructField int // struct field
-}
-
-type myInterface interface { // interface
-	DoSomeCoolStuff() string // interface method
-}
-
-type embed struct {
-	myStruct
-
-	nestedStruct struct {
-		nestedField int
-
-		nestedStruct2 struct {
-			int
-		}
-	}
-
-	nestedInterface interface {
-		myInterface
-		nestedMethod()
-	}
-}
-
-func Dunk() int { return 0 }
-
-func dunk() {}
diff --git a/gopls/internal/lsp/testdata/workspacesymbol/p/p.go b/gopls/internal/lsp/testdata/workspacesymbol/p/p.go
deleted file mode 100644
index 409cc35..0000000
--- a/gopls/internal/lsp/testdata/workspacesymbol/p/p.go
+++ /dev/null
@@ -1,3 +0,0 @@
-package p
-
-const Message = "Hello World." // constant
diff --git a/gopls/internal/lsp/testdata/workspacesymbol/query.go b/gopls/internal/lsp/testdata/workspacesymbol/query.go
deleted file mode 100644
index 883aae2..0000000
--- a/gopls/internal/lsp/testdata/workspacesymbol/query.go
+++ /dev/null
@@ -1,29 +0,0 @@
-package main
-
-// Contains all of the workspace symbol queries.
-
-// -- Fuzzy matching --
-//@workspacesymbolfuzzy("rgop")
-//@workspacesymbolfuzzy("randoma")
-//@workspacesymbolfuzzy("randomb")
-
-// -- Case sensitive --
-//@workspacesymbolcasesensitive("main.main")
-//@workspacesymbolcasesensitive("p.Message")
-//@workspacesymbolcasesensitive("main.myvar")
-//@workspacesymbolcasesensitive("main.myType")
-//@workspacesymbolcasesensitive("main.myType.Blahblah")
-//@workspacesymbolcasesensitive("main.myStruct")
-//@workspacesymbolcasesensitive("main.myStruct.myStructField")
-//@workspacesymbolcasesensitive("main.myInterface")
-//@workspacesymbolcasesensitive("main.myInterface.DoSomeCoolStuff")
-//@workspacesymbolcasesensitive("main.embed.myStruct")
-//@workspacesymbolcasesensitive("main.embed.nestedStruct.nestedStruct2.int")
-//@workspacesymbolcasesensitive("main.embed.nestedInterface.myInterface")
-//@workspacesymbolcasesensitive("main.embed.nestedInterface.nestedMethod")
-//@workspacesymbolcasesensitive("dunk")
-//@workspacesymbolcasesensitive("Dunk")
-
-// -- Standard --
-//@workspacesymbol("")
-//@workspacesymbol("randomgophervar")
diff --git a/gopls/internal/lsp/testdata/workspacesymbol/query.go.golden b/gopls/internal/lsp/testdata/workspacesymbol/query.go.golden
deleted file mode 100644
index 4c6d470..0000000
--- a/gopls/internal/lsp/testdata/workspacesymbol/query.go.golden
+++ /dev/null
@@ -1,83 +0,0 @@
--- workspace_symbol-caseinsensitive- --
-
-
--- workspace_symbol-caseinsensitive-randomgophervar --
-workspacesymbol/a/a.go:3:5-26 RandomGopherVariableA Variable
-workspacesymbol/b/b.go:3:5-26 RandomGopherVariableB Variable
-
--- workspace_symbol-casesensitive-Dunk --
-workspacesymbol/main.go:45:6-10 Dunk Function
-
--- workspace_symbol-casesensitive-dunk --
-workspacesymbol/main.go:47:6-10 dunk Function
-
--- workspace_symbol-casesensitive-main.embed.myStruct --
-workspacesymbol/main.go:29:2-10 main.embed.myStruct Field
-
--- workspace_symbol-casesensitive-main.embed.nestedInterface.myInterface --
-workspacesymbol/main.go:40:3-14 main.embed.nestedInterface.myInterface Interface
-
--- workspace_symbol-casesensitive-main.embed.nestedInterface.nestedMethod --
-workspacesymbol/main.go:41:3-15 main.embed.nestedInterface.nestedMethod Method
-
--- workspace_symbol-casesensitive-main.embed.nestedStruct.nestedStruct2.int --
-workspacesymbol/main.go:35:4-7 main.embed.nestedStruct.nestedStruct2.int Field
-
--- workspace_symbol-casesensitive-main.main --
-workspacesymbol/main.go:8:6-10 main.main Function
-
--- workspace_symbol-casesensitive-main.myInterface --
-workspacesymbol/main.go:24:6-17 main.myInterface Interface
-workspacesymbol/main.go:25:2-17 main.myInterface.DoSomeCoolStuff Method
-
--- workspace_symbol-casesensitive-main.myInterface.DoSomeCoolStuff --
-workspacesymbol/main.go:25:2-17 main.myInterface.DoSomeCoolStuff Method
-
--- workspace_symbol-casesensitive-main.myStruct --
-workspacesymbol/main.go:20:6-14 main.myStruct Struct
-workspacesymbol/main.go:21:2-15 main.myStruct.myStructField Field
-
--- workspace_symbol-casesensitive-main.myStruct.myStructField --
-workspacesymbol/main.go:21:2-15 main.myStruct.myStructField Field
-
--- workspace_symbol-casesensitive-main.myType --
-workspacesymbol/main.go:14:6-12 main.myType Class
-workspacesymbol/main.go:18:18-26 main.myType.Blahblah Method
-
--- workspace_symbol-casesensitive-main.myType.Blahblah --
-workspacesymbol/main.go:18:18-26 main.myType.Blahblah Method
-
--- workspace_symbol-casesensitive-main.myvar --
-workspacesymbol/main.go:12:5-10 main.myvar Variable
-
--- workspace_symbol-casesensitive-p.Message --
-workspacesymbol/p/p.go:3:7-14 p.Message Constant
-
--- workspace_symbol-fuzzy-randoma --
-workspacesymbol/a/a.go:3:5-26 RandomGopherVariableA Variable
-workspacesymbol/a/a.go:5:7-28 RandomGopherConstantA Constant
-workspacesymbol/a/a.go:8:2-24 randomgopherinvariable Constant
-workspacesymbol/a/a_test.go:3:5-30 RandomGopherTestVariableA Variable
-workspacesymbol/a/a_x_test.go:3:5-31 RandomGopherXTestVariableA Variable
-workspacesymbol/b/b.go:3:5-26 RandomGopherVariableB Variable
-workspacesymbol/b/b.go:6:2-5 RandomGopherStructB.Bar Field
-
--- workspace_symbol-fuzzy-randomb --
-workspacesymbol/a/a.go:3:5-26 RandomGopherVariableA Variable
-workspacesymbol/a/a.go:8:2-24 randomgopherinvariable Constant
-workspacesymbol/a/a_test.go:3:5-30 RandomGopherTestVariableA Variable
-workspacesymbol/a/a_x_test.go:3:5-31 RandomGopherXTestVariableA Variable
-workspacesymbol/b/b.go:3:5-26 RandomGopherVariableB Variable
-workspacesymbol/b/b.go:5:6-25 RandomGopherStructB Struct
-workspacesymbol/b/b.go:6:2-5 RandomGopherStructB.Bar Field
-
--- workspace_symbol-fuzzy-rgop --
-workspacesymbol/a/a.go:3:5-26 RandomGopherVariableA Variable
-workspacesymbol/a/a.go:5:7-28 RandomGopherConstantA Constant
-workspacesymbol/a/a.go:8:2-24 randomgopherinvariable Constant
-workspacesymbol/a/a_test.go:3:5-30 RandomGopherTestVariableA Variable
-workspacesymbol/a/a_x_test.go:3:5-31 RandomGopherXTestVariableA Variable
-workspacesymbol/b/b.go:3:5-26 RandomGopherVariableB Variable
-workspacesymbol/b/b.go:5:6-25 RandomGopherStructB Struct
-workspacesymbol/b/b.go:6:2-5 RandomGopherStructB.Bar Field
-
diff --git a/gopls/internal/lsp/tests/tests.go b/gopls/internal/lsp/tests/tests.go
index 697839c..9ab114e 100644
--- a/gopls/internal/lsp/tests/tests.go
+++ b/gopls/internal/lsp/tests/tests.go
@@ -77,18 +77,14 @@
 type CaseSensitiveCompletions = map[span.Span][]Completion
 type RankCompletions = map[span.Span][]Completion
 type FoldingRanges = []span.Span
-type Formats = []span.Span
-type Imports = []span.Span
 type SemanticTokens = []span.Span
 type SuggestedFixes = map[span.Span][]SuggestedFix
-type FunctionExtractions = map[span.Span]span.Span
 type MethodExtractions = map[span.Span]span.Span
 type Definitions = map[span.Span]Definition
 type Highlights = map[span.Span][]span.Span
 type Renames = map[span.Span]string
 type PrepareRenames = map[span.Span]*source.PrepareItem
 type InlayHints = []span.Span
-type WorkspaceSymbols = map[WorkspaceSymbolsTestType]map[span.URI][]string
 type Signatures = map[span.Span]*protocol.SignatureHelp
 type Links = map[span.URI][]Link
 type AddImport = map[span.URI]string
@@ -109,18 +105,14 @@
 	CaseSensitiveCompletions CaseSensitiveCompletions
 	RankCompletions          RankCompletions
 	FoldingRanges            FoldingRanges
-	Formats                  Formats
-	Imports                  Imports
 	SemanticTokens           SemanticTokens
 	SuggestedFixes           SuggestedFixes
-	FunctionExtractions      FunctionExtractions
 	MethodExtractions        MethodExtractions
 	Definitions              Definitions
 	Highlights               Highlights
 	Renames                  Renames
 	InlayHints               InlayHints
 	PrepareRenames           PrepareRenames
-	WorkspaceSymbols         WorkspaceSymbols
 	Signatures               Signatures
 	Links                    Links
 	AddImport                AddImport
@@ -155,18 +147,14 @@
 	CaseSensitiveCompletion(*testing.T, span.Span, Completion, CompletionItems)
 	RankCompletion(*testing.T, span.Span, Completion, CompletionItems)
 	FoldingRanges(*testing.T, span.Span)
-	Format(*testing.T, span.Span)
-	Import(*testing.T, span.Span)
 	SemanticTokens(*testing.T, span.Span)
 	SuggestedFix(*testing.T, span.Span, []SuggestedFix, int)
-	FunctionExtraction(*testing.T, span.Span, span.Span)
 	MethodExtraction(*testing.T, span.Span, span.Span)
 	Definition(*testing.T, span.Span, Definition)
 	Highlight(*testing.T, span.Span, []span.Span)
 	InlayHints(*testing.T, span.Span)
 	Rename(*testing.T, span.Span, string)
 	PrepareRename(*testing.T, span.Span, *source.PrepareItem)
-	WorkspaceSymbols(*testing.T, span.URI, string, WorkspaceSymbolsTestType)
 	SignatureHelp(*testing.T, span.Span, *protocol.SignatureHelp)
 	Link(*testing.T, span.URI, []Link)
 	AddImport(*testing.T, span.URI, string)
@@ -202,19 +190,6 @@
 	CompletionRank
 )
 
-type WorkspaceSymbolsTestType int
-
-const (
-	// Default runs the standard workspace symbols tests.
-	WorkspaceSymbolsDefault = WorkspaceSymbolsTestType(iota)
-
-	// Fuzzy tests workspace symbols with fuzzy matching.
-	WorkspaceSymbolsFuzzy
-
-	// CaseSensitive tests workspace symbols with case sensitive.
-	WorkspaceSymbolsCaseSensitive
-)
-
 type Completion struct {
 	CompletionItems []token.Pos
 }
@@ -307,9 +282,7 @@
 		Renames:                  make(Renames),
 		PrepareRenames:           make(PrepareRenames),
 		SuggestedFixes:           make(SuggestedFixes),
-		FunctionExtractions:      make(FunctionExtractions),
 		MethodExtractions:        make(MethodExtractions),
-		WorkspaceSymbols:         make(WorkspaceSymbols),
 		Signatures:               make(Signatures),
 		Links:                    make(Links),
 		AddImport:                make(AddImport),
@@ -456,8 +429,6 @@
 		"rank":           datum.collectCompletions(CompletionRank),
 		"snippet":        datum.collectCompletionSnippets,
 		"fold":           datum.collectFoldingRanges,
-		"format":         datum.collectFormats,
-		"import":         datum.collectImports,
 		"semantic":       datum.collectSemanticTokens,
 		"godef":          datum.collectDefinitions,
 		"typdef":         datum.collectTypeDefinitions,
@@ -469,7 +440,6 @@
 		"signature":      datum.collectSignatures,
 		"link":           datum.collectLinks,
 		"suggestedfix":   datum.collectSuggestedFixes,
-		"extractfunc":    datum.collectFunctionExtractions,
 		"extractmethod":  datum.collectMethodExtractions,
 		"incomingcalls":  datum.collectIncomingCalls,
 		"outgoingcalls":  datum.collectOutgoingCalls,
@@ -481,11 +451,8 @@
 
 	// Collect names for the entries that require golden files.
 	if err := datum.Exported.Expect(map[string]interface{}{
-		"godef":                        datum.collectDefinitionNames,
-		"hoverdef":                     datum.collectDefinitionNames,
-		"workspacesymbol":              datum.collectWorkspaceSymbols(WorkspaceSymbolsDefault),
-		"workspacesymbolfuzzy":         datum.collectWorkspaceSymbols(WorkspaceSymbolsFuzzy),
-		"workspacesymbolcasesensitive": datum.collectWorkspaceSymbols(WorkspaceSymbolsCaseSensitive),
+		"godef":    datum.collectDefinitionNames,
+		"hoverdef": datum.collectDefinitionNames,
 	}); err != nil {
 		t.Fatal(err)
 	}
@@ -653,26 +620,6 @@
 		}
 	})
 
-	t.Run("Format", func(t *testing.T) {
-		t.Helper()
-		for _, spn := range data.Formats {
-			t.Run(uriName(spn.URI()), func(t *testing.T) {
-				t.Helper()
-				tests.Format(t, spn)
-			})
-		}
-	})
-
-	t.Run("Import", func(t *testing.T) {
-		t.Helper()
-		for _, spn := range data.Imports {
-			t.Run(uriName(spn.URI()), func(t *testing.T) {
-				t.Helper()
-				tests.Import(t, spn)
-			})
-		}
-	})
-
 	t.Run("SemanticTokens", func(t *testing.T) {
 		t.Helper()
 		for _, spn := range data.SemanticTokens {
@@ -697,20 +644,6 @@
 		}
 	})
 
-	t.Run("FunctionExtraction", func(t *testing.T) {
-		t.Helper()
-		for start, end := range data.FunctionExtractions {
-			// Check if we should skip this spn if the -modfile flag is not available.
-			if shouldSkip(data, start.URI()) {
-				continue
-			}
-			t.Run(SpanName(start), func(t *testing.T) {
-				t.Helper()
-				tests.FunctionExtraction(t, start, end)
-			})
-		}
-	})
-
 	t.Run("MethodExtraction", func(t *testing.T) {
 		t.Helper()
 		for start, end := range data.MethodExtractions {
@@ -778,30 +711,6 @@
 		}
 	})
 
-	t.Run("WorkspaceSymbols", func(t *testing.T) {
-		t.Helper()
-
-		for _, typ := range []WorkspaceSymbolsTestType{
-			WorkspaceSymbolsDefault,
-			WorkspaceSymbolsCaseSensitive,
-			WorkspaceSymbolsFuzzy,
-		} {
-			for uri, cases := range data.WorkspaceSymbols[typ] {
-				for _, query := range cases {
-					name := query
-					if name == "" {
-						name = "EmptyQuery"
-					}
-					t.Run(name, func(t *testing.T) {
-						t.Helper()
-						tests.WorkspaceSymbols(t, uri, query, typ)
-					})
-				}
-			}
-		}
-
-	})
-
 	t.Run("SignatureHelp", func(t *testing.T) {
 		t.Helper()
 		for spn, expectedSignature := range data.Signatures {
@@ -906,15 +815,6 @@
 		return count
 	}
 
-	countWorkspaceSymbols := func(c map[WorkspaceSymbolsTestType]map[span.URI][]string) (count int) {
-		for _, typs := range c {
-			for _, queries := range typs {
-				count += len(queries)
-			}
-		}
-		return count
-	}
-
 	fmt.Fprintf(buf, "CallHierarchyCount = %v\n", len(data.CallHierarchy))
 	fmt.Fprintf(buf, "CodeLensCount = %v\n", countCodeLens(data.CodeLens))
 	fmt.Fprintf(buf, "CompletionsCount = %v\n", countCompletions(data.Completions))
@@ -926,11 +826,8 @@
 	fmt.Fprintf(buf, "CaseSensitiveCompletionsCount = %v\n", countCompletions(data.CaseSensitiveCompletions))
 	fmt.Fprintf(buf, "DiagnosticsCount = %v\n", diagnosticsCount)
 	fmt.Fprintf(buf, "FoldingRangesCount = %v\n", len(data.FoldingRanges))
-	fmt.Fprintf(buf, "FormatCount = %v\n", len(data.Formats))
-	fmt.Fprintf(buf, "ImportCount = %v\n", len(data.Imports))
 	fmt.Fprintf(buf, "SemanticTokenCount = %v\n", len(data.SemanticTokens))
 	fmt.Fprintf(buf, "SuggestedFixCount = %v\n", len(data.SuggestedFixes))
-	fmt.Fprintf(buf, "FunctionExtractionCount = %v\n", len(data.FunctionExtractions))
 	fmt.Fprintf(buf, "MethodExtractionCount = %v\n", len(data.MethodExtractions))
 	fmt.Fprintf(buf, "DefinitionsCount = %v\n", definitionCount)
 	fmt.Fprintf(buf, "TypeDefinitionsCount = %v\n", typeDefinitionCount)
@@ -938,7 +835,6 @@
 	fmt.Fprintf(buf, "InlayHintsCount = %v\n", len(data.InlayHints))
 	fmt.Fprintf(buf, "RenamesCount = %v\n", len(data.Renames))
 	fmt.Fprintf(buf, "PrepareRenamesCount = %v\n", len(data.PrepareRenames))
-	fmt.Fprintf(buf, "WorkspaceSymbolsCount = %v\n", countWorkspaceSymbols(data.WorkspaceSymbols))
 	fmt.Fprintf(buf, "SignaturesCount = %v\n", len(data.Signatures))
 	fmt.Fprintf(buf, "LinksCount = %v\n", linksCount)
 	fmt.Fprintf(buf, "SelectionRangesCount = %v\n", len(data.SelectionRanges))
@@ -1104,14 +1000,6 @@
 	data.FoldingRanges = append(data.FoldingRanges, spn)
 }
 
-func (data *Data) collectFormats(spn span.Span) {
-	data.Formats = append(data.Formats, spn)
-}
-
-func (data *Data) collectImports(spn span.Span) {
-	data.Imports = append(data.Imports, spn)
-}
-
 func (data *Data) collectAddImports(spn span.Span, imp string) {
 	data.AddImport[spn.URI()] = imp
 }
@@ -1124,12 +1012,6 @@
 	data.SuggestedFixes[spn] = append(data.SuggestedFixes[spn], SuggestedFix{actionKind, fix})
 }
 
-func (data *Data) collectFunctionExtractions(start span.Span, end span.Span) {
-	if _, ok := data.FunctionExtractions[start]; !ok {
-		data.FunctionExtractions[start] = end
-	}
-}
-
 func (data *Data) collectMethodExtractions(start span.Span, end span.Span) {
 	if _, ok := data.MethodExtractions[start]; !ok {
 		data.MethodExtractions[start] = end
@@ -1233,17 +1115,6 @@
 	return rng
 }
 
-func (data *Data) collectWorkspaceSymbols(typ WorkspaceSymbolsTestType) func(*expect.Note, string) {
-	return func(note *expect.Note, query string) {
-		if data.WorkspaceSymbols[typ] == nil {
-			data.WorkspaceSymbols[typ] = make(map[span.URI][]string)
-		}
-		pos := safetoken.StartPosition(data.Exported.ExpectFileSet, note.Pos)
-		uri := span.URIFromPath(pos.Filename)
-		data.WorkspaceSymbols[typ][uri] = append(data.WorkspaceSymbols[typ][uri], query)
-	}
-}
-
 func (data *Data) collectSignatures(spn span.Span, signature string, activeParam int64) {
 	data.Signatures[spn] = &protocol.SignatureHelp{
 		Signatures: []protocol.SignatureInformation{
diff --git a/gopls/internal/lsp/tests/util.go b/gopls/internal/lsp/tests/util.go
index deadfa8..b8da2c1 100644
--- a/gopls/internal/lsp/tests/util.go
+++ b/gopls/internal/lsp/tests/util.go
@@ -6,11 +6,9 @@
 
 import (
 	"bytes"
-	"context"
 	"fmt"
 	"go/token"
 	"path"
-	"path/filepath"
 	"regexp"
 	"sort"
 	"strconv"
@@ -95,17 +93,11 @@
 }
 
 // CompareDiagnostics reports testing errors to t when the diagnostic set got
-// does not match want. If the sole expectation has source "no_diagnostics",
-// the test expects that no diagnostics were received for the given document.
+// does not match want.
 func CompareDiagnostics(t *testing.T, uri span.URI, want, got []*source.Diagnostic) {
 	t.Helper()
 	fileName := path.Base(string(uri))
 
-	// A special case to test that there are no diagnostics for a file.
-	if len(want) == 1 && want[0].Source == "no_diagnostics" {
-		want = nil
-	}
-
 	// Build a helper function to match an actual diagnostic to an overlapping
 	// expected diagnostic (if any).
 	unmatched := make([]*source.Diagnostic, len(want))
@@ -484,37 +476,3 @@
 		opts.Hints[name] = true
 	}
 }
-
-func WorkspaceSymbolsString(ctx context.Context, data *Data, queryURI span.URI, symbols []protocol.SymbolInformation) (string, error) {
-	queryDir := filepath.Dir(queryURI.Filename())
-	var filtered []string
-	for _, s := range symbols {
-		uri := s.Location.URI.SpanURI()
-		dir := filepath.Dir(uri.Filename())
-		if !source.InDir(queryDir, dir) { // assume queries always issue from higher directories
-			continue
-		}
-		m, err := data.Mapper(uri)
-		if err != nil {
-			return "", err
-		}
-		spn, err := m.LocationSpan(s.Location)
-		if err != nil {
-			return "", err
-		}
-		filtered = append(filtered, fmt.Sprintf("%s %s %s", spn, s.Name, s.Kind))
-	}
-	sort.Strings(filtered)
-	return strings.Join(filtered, "\n") + "\n", nil
-}
-
-func WorkspaceSymbolsTestTypeToMatcher(typ WorkspaceSymbolsTestType) source.SymbolMatcher {
-	switch typ {
-	case WorkspaceSymbolsFuzzy:
-		return source.SymbolFuzzy
-	case WorkspaceSymbolsCaseSensitive:
-		return source.SymbolCaseSensitive
-	default:
-		return source.SymbolCaseInsensitive
-	}
-}
diff --git a/gopls/internal/lsp/workspace.go b/gopls/internal/lsp/workspace.go
index 53cdcac..818135e 100644
--- a/gopls/internal/lsp/workspace.go
+++ b/gopls/internal/lsp/workspace.go
@@ -17,7 +17,7 @@
 func (s *Server) didChangeWorkspaceFolders(ctx context.Context, params *protocol.DidChangeWorkspaceFoldersParams) error {
 	event := params.Event
 	for _, folder := range event.Removed {
-		view := s.session.View(folder.Name)
+		view := s.session.ViewByName(folder.Name)
 		if view != nil {
 			s.session.RemoveView(view)
 		} else {
diff --git a/gopls/internal/regtest/bench/bench_test.go b/gopls/internal/regtest/bench/bench_test.go
index 9b50905..28eec27 100644
--- a/gopls/internal/regtest/bench/bench_test.go
+++ b/gopls/internal/regtest/bench/bench_test.go
@@ -17,10 +17,10 @@
 	"testing"
 	"time"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	"golang.org/x/tools/gopls/internal/lsp/cmd"
 	"golang.org/x/tools/gopls/internal/lsp/fake"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/fakenet"
 	"golang.org/x/tools/internal/jsonrpc2"
diff --git a/gopls/internal/regtest/bench/definition_test.go b/gopls/internal/regtest/bench/definition_test.go
index a3e68f5..f73bcb0 100644
--- a/gopls/internal/regtest/bench/definition_test.go
+++ b/gopls/internal/regtest/bench/definition_test.go
@@ -15,6 +15,7 @@
 		regexp string
 	}{
 		{"istio", "pkg/config/model.go", `gogotypes\.(MarshalAny)`},
+		{"google-cloud-go", "httpreplay/httpreplay.go", `proxy\.(ForRecording)`},
 		{"kubernetes", "pkg/controller/lookup_cache.go", `hashutil\.(DeepHashObject)`},
 		{"kuma", "api/generic/insights.go", `proto\.(Message)`},
 		{"pkgsite", "internal/log/log.go", `derrors\.(Wrap)`},
diff --git a/gopls/internal/regtest/bench/didchange_test.go b/gopls/internal/regtest/bench/didchange_test.go
index da51c08..6bde10e 100644
--- a/gopls/internal/regtest/bench/didchange_test.go
+++ b/gopls/internal/regtest/bench/didchange_test.go
@@ -23,6 +23,7 @@
 	repo string
 	file string
 }{
+	{"google-cloud-go", "httpreplay/httpreplay.go"},
 	{"istio", "pkg/fuzz/util.go"},
 	{"kubernetes", "pkg/controller/lookup_cache.go"},
 	{"kuma", "api/generic/insights.go"},
diff --git a/gopls/internal/regtest/bench/hover_test.go b/gopls/internal/regtest/bench/hover_test.go
index e89e03b..afc1b3c 100644
--- a/gopls/internal/regtest/bench/hover_test.go
+++ b/gopls/internal/regtest/bench/hover_test.go
@@ -14,6 +14,7 @@
 		file   string
 		regexp string
 	}{
+		{"google-cloud-go", "httpreplay/httpreplay.go", `proxy\.(ForRecording)`},
 		{"istio", "pkg/config/model.go", `gogotypes\.(MarshalAny)`},
 		{"kubernetes", "pkg/apis/core/types.go", "type (Pod)"},
 		{"kuma", "api/generic/insights.go", `proto\.(Message)`},
diff --git a/gopls/internal/regtest/bench/implementations_test.go b/gopls/internal/regtest/bench/implementations_test.go
index 219f42a..ff64e8b 100644
--- a/gopls/internal/regtest/bench/implementations_test.go
+++ b/gopls/internal/regtest/bench/implementations_test.go
@@ -12,6 +12,7 @@
 		file   string
 		regexp string
 	}{
+		{"google-cloud-go", "httpreplay/httpreplay.go", `type (Recorder)`},
 		{"istio", "pkg/config/mesh/watcher.go", `type (Watcher)`},
 		{"kubernetes", "pkg/controller/lookup_cache.go", `objectWithMeta`},
 		{"kuma", "api/generic/insights.go", `type (Insight)`},
diff --git a/gopls/internal/regtest/bench/iwl_test.go b/gopls/internal/regtest/bench/iwl_test.go
index 32bfa9b..c4a2d0f 100644
--- a/gopls/internal/regtest/bench/iwl_test.go
+++ b/gopls/internal/regtest/bench/iwl_test.go
@@ -20,12 +20,13 @@
 		repo string
 		file string
 	}{
-		{"tools", "internal/lsp/cache/snapshot.go"},
+		{"google-cloud-go", "httpreplay/httpreplay.go"},
+		{"istio", "pkg/fuzz/util.go"},
 		{"kubernetes", "pkg/controller/lookup_cache.go"},
+		{"kuma", "api/generic/insights.go"},
 		{"pkgsite", "internal/frontend/server.go"},
 		{"starlark", "starlark/eval.go"},
-		{"istio", "pkg/fuzz/util.go"},
-		{"kuma", "api/generic/insights.go"},
+		{"tools", "internal/lsp/cache/snapshot.go"},
 	}
 
 	for _, test := range tests {
diff --git a/gopls/internal/regtest/bench/references_test.go b/gopls/internal/regtest/bench/references_test.go
index d47ea56..099d9bd 100644
--- a/gopls/internal/regtest/bench/references_test.go
+++ b/gopls/internal/regtest/bench/references_test.go
@@ -12,6 +12,7 @@
 		file   string
 		regexp string
 	}{
+		{"google-cloud-go", "httpreplay/httpreplay.go", `func (NewRecorder)`},
 		{"istio", "pkg/config/model.go", "type (Meta)"},
 		{"kubernetes", "pkg/controller/lookup_cache.go", "type (objectWithMeta)"},
 		{"kuma", "pkg/events/interfaces.go", "type (Event)"},
diff --git a/gopls/internal/regtest/bench/rename_test.go b/gopls/internal/regtest/bench/rename_test.go
index bd1ce94..ebb3482 100644
--- a/gopls/internal/regtest/bench/rename_test.go
+++ b/gopls/internal/regtest/bench/rename_test.go
@@ -16,9 +16,10 @@
 		regexp   string
 		baseName string
 	}{
+		{"google-cloud-go", "httpreplay/httpreplay.go", `func (NewRecorder)`, "NewRecorder"},
+		{"istio", "pkg/config/model.go", `(Namespace) string`, "Namespace"},
 		{"kubernetes", "pkg/controller/lookup_cache.go", `hashutil\.(DeepHashObject)`, "DeepHashObject"},
 		{"kuma", "pkg/events/interfaces.go", `Delete`, "Delete"},
-		{"istio", "pkg/config/model.go", `(Namespace) string`, "Namespace"},
 		{"pkgsite", "internal/log/log.go", `func (Infof)`, "Infof"},
 		{"starlark", "starlark/eval.go", `Program\) (Filename)`, "Filename"},
 		{"tools", "internal/lsp/cache/snapshot.go", `meta \*(metadataGraph)`, "metadataGraph"},
diff --git a/gopls/internal/regtest/bench/repo_test.go b/gopls/internal/regtest/bench/repo_test.go
index 7b238f5..0b92b12 100644
--- a/gopls/internal/regtest/bench/repo_test.go
+++ b/gopls/internal/regtest/bench/repo_test.go
@@ -26,6 +26,14 @@
 // These repos were selected to represent a variety of different types of
 // codebases.
 var repos = map[string]*repo{
+	// google-cloud-go has 145 workspace modules (!), and is quite large.
+	"google-cloud-go": {
+		name:   "google-cloud-go",
+		url:    "https://github.com/googleapis/google-cloud-go.git",
+		commit: "07da765765218debf83148cc7ed8a36d6e8921d5",
+		inDir:  flag.String("cloud_go_dir", "", "if set, reuse this directory as google-cloud-go@07da7657"),
+	},
+
 	// Used by x/benchmarks; large.
 	"istio": {
 		name:   "istio",
diff --git a/gopls/internal/regtest/codelens/codelens_test.go b/gopls/internal/regtest/codelens/codelens_test.go
index 79b5df0..68f2982 100644
--- a/gopls/internal/regtest/codelens/codelens_test.go
+++ b/gopls/internal/regtest/codelens/codelens_test.go
@@ -8,10 +8,10 @@
 	"fmt"
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
 	"golang.org/x/tools/gopls/internal/lsp/tests/compare"
-	"golang.org/x/tools/internal/bug"
 
 	"golang.org/x/tools/gopls/internal/lsp/command"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
diff --git a/gopls/internal/regtest/codelens/gcdetails_test.go b/gopls/internal/regtest/codelens/gcdetails_test.go
index e0642d6..ebb0249 100644
--- a/gopls/internal/regtest/codelens/gcdetails_test.go
+++ b/gopls/internal/regtest/codelens/gcdetails_test.go
@@ -9,11 +9,11 @@
 	"strings"
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/lsp/command"
 	"golang.org/x/tools/gopls/internal/lsp/fake"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
-	"golang.org/x/tools/internal/bug"
 )
 
 func TestGCDetails_Toggle(t *testing.T) {
diff --git a/gopls/internal/regtest/completion/completion_test.go b/gopls/internal/regtest/completion/completion_test.go
index 81addba..872bdc2 100644
--- a/gopls/internal/regtest/completion/completion_test.go
+++ b/gopls/internal/regtest/completion/completion_test.go
@@ -10,9 +10,9 @@
 	"testing"
 
 	"github.com/google/go-cmp/cmp"
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/testenv"
 
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
diff --git a/gopls/internal/regtest/debug/debug_test.go b/gopls/internal/regtest/debug/debug_test.go
index f8efb8f..dc39f81 100644
--- a/gopls/internal/regtest/debug/debug_test.go
+++ b/gopls/internal/regtest/debug/debug_test.go
@@ -7,9 +7,9 @@
 import (
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
-	"golang.org/x/tools/internal/bug"
 )
 
 func TestMain(m *testing.M) {
@@ -24,7 +24,7 @@
 		Settings{"showBugReports": true},
 	).Run(t, "", func(t *testing.T, env *Env) {
 		const desc = "got a bug"
-		bug.Report(desc, nil)
+		bug.Report(desc)
 		env.Await(ShownMessage(desc))
 	})
 }
diff --git a/gopls/internal/regtest/diagnostics/diagnostics_test.go b/gopls/internal/regtest/diagnostics/diagnostics_test.go
index 38d6f4a..f8e59a0 100644
--- a/gopls/internal/regtest/diagnostics/diagnostics_test.go
+++ b/gopls/internal/regtest/diagnostics/diagnostics_test.go
@@ -10,12 +10,12 @@
 	"os/exec"
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	"golang.org/x/tools/gopls/internal/lsp"
 	"golang.org/x/tools/gopls/internal/lsp/fake"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/testenv"
 )
 
@@ -299,7 +299,7 @@
 				InitialWorkspaceLoad,
 				Diagnostics(env.AtRegexp("main.go", `"mod.com/bob"`)),
 			)
-			if err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, true); err != nil {
+			if err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, nil, true); err != nil {
 				t.Fatal(err)
 			}
 			env.AfterChange(
diff --git a/gopls/internal/regtest/inlayhints/inlayhints_test.go b/gopls/internal/regtest/inlayhints/inlayhints_test.go
index d4caabe..a4b3764 100644
--- a/gopls/internal/regtest/inlayhints/inlayhints_test.go
+++ b/gopls/internal/regtest/inlayhints/inlayhints_test.go
@@ -6,10 +6,10 @@
 import (
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
 	"golang.org/x/tools/gopls/internal/lsp/source"
-	"golang.org/x/tools/internal/bug"
 )
 
 func TestMain(m *testing.M) {
diff --git a/gopls/internal/regtest/marker/marker_test.go b/gopls/internal/regtest/marker/marker_test.go
index ac051a5..41c8e46 100644
--- a/gopls/internal/regtest/marker/marker_test.go
+++ b/gopls/internal/regtest/marker/marker_test.go
@@ -5,11 +5,18 @@
 package marker
 
 import (
+	"os"
 	"testing"
 
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
+	"golang.org/x/tools/internal/testenv"
 )
 
+func TestMain(m *testing.M) {
+	testenv.ExitIfSmallMachine()
+	os.Exit(m.Run())
+}
+
 // Note: we use a separate package for the marker tests so that we can easily
 // compare their performance to the existing marker tests in ./internal/lsp.
 
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/functionextraction.txt b/gopls/internal/regtest/marker/testdata/codeaction/functionextraction.txt
new file mode 100644
index 0000000..d5bd986
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/functionextraction.txt
@@ -0,0 +1,583 @@
+This test verifies various behaviors of function extraction.
+
+-- go.mod --
+module mod.test/extract
+
+go 1.18
+
+-- basic.go --
+package extract
+
+func _() { //@codeaction("refactor.extract", "{", closeBracket, outer)
+	a := 1    //@codeaction("refactor.extract", "a", end, inner)
+	_ = a + 4 //@loc(end, "4")
+} //@loc(closeBracket, "}")
+
+-- @inner/basic.go --
+package extract
+
+func _() { //@codeaction("refactor.extract", "{", closeBracket, outer)
+	//@codeaction("refactor.extract", "a", end, inner)
+	newFunction() //@loc(end, "4")
+}
+
+func newFunction() {
+	a := 1
+	_ = a + 4
+} //@loc(closeBracket, "}")
+
+-- @outer/basic.go --
+package extract
+
+func _() { //@codeaction("refactor.extract", "{", closeBracket, outer)
+	//@codeaction("refactor.extract", "a", end, inner)
+	newFunction() //@loc(end, "4")
+}
+
+func newFunction() {
+	a := 1
+	_ = a + 4
+} //@loc(closeBracket, "}")
+
+-- return.go --
+package extract
+
+func _() bool {
+	x := 1
+	if x == 0 { //@codeaction("refactor.extract", "if", ifend, return)
+		return true
+	} //@loc(ifend, "}")
+	return false
+}
+
+-- @return/return.go --
+package extract
+
+func _() bool {
+	x := 1
+	//@codeaction("refactor.extract", "if", ifend, return)
+	shouldReturn, returnValue := newFunction(x)
+	if shouldReturn {
+		return returnValue
+	} //@loc(ifend, "}")
+	return false
+}
+
+func newFunction(x int) (bool, bool) {
+	if x == 0 {
+		return true, true
+	}
+	return false, false
+}
+
+-- return_nonnested.go --
+package extract
+
+func _() bool {
+	x := 1 //@codeaction("refactor.extract", "x", rnnEnd, rnn)
+	if x == 0 {
+		return true
+	}
+	return false //@loc(rnnEnd, "false")
+}
+
+-- @rnn/return_nonnested.go --
+package extract
+
+func _() bool {
+	//@codeaction("refactor.extract", "x", rnnEnd, rnn)
+	return newFunction() //@loc(rnnEnd, "false")
+}
+
+func newFunction() bool {
+	x := 1
+	if x == 0 {
+		return true
+	}
+	return false
+}
+
+-- return_complex.go --
+package extract
+
+import "fmt"
+
+func _() (int, string, error) {
+	x := 1
+	y := "hello"
+	z := "bye" //@codeaction("refactor.extract", "z", rcEnd, rc)
+	if y == z {
+		return x, y, fmt.Errorf("same")
+	} else if false {
+		z = "hi"
+		return x, z, nil
+	} //@loc(rcEnd, "}")
+	return x, z, nil
+}
+
+-- @rc/return_complex.go --
+package extract
+
+import "fmt"
+
+func _() (int, string, error) {
+	x := 1
+	y := "hello"
+	//@codeaction("refactor.extract", "z", rcEnd, rc)
+	z, shouldReturn, returnValue, returnValue1, returnValue2 := newFunction(y, x)
+	if shouldReturn {
+		return returnValue, returnValue1, returnValue2
+	} //@loc(rcEnd, "}")
+	return x, z, nil
+}
+
+func newFunction(y string, x int) (string, bool, int, string, error) {
+	z := "bye"
+	if y == z {
+		return "", true, x, y, fmt.Errorf("same")
+	} else if false {
+		z = "hi"
+		return "", true, x, z, nil
+	}
+	return z, false, 0, "", nil
+}
+
+-- return_complex_nonnested.go --
+package extract
+
+import "fmt"
+
+func _() (int, string, error) {
+	x := 1
+	y := "hello"
+	z := "bye" //@codeaction("refactor.extract", "z", rcnnEnd, rcnn)
+	if y == z {
+		return x, y, fmt.Errorf("same")
+	} else if false {
+		z = "hi"
+		return x, z, nil
+	}
+	return x, z, nil //@loc(rcnnEnd, "nil")
+}
+
+-- @rcnn/return_complex_nonnested.go --
+package extract
+
+import "fmt"
+
+func _() (int, string, error) {
+	x := 1
+	y := "hello"
+	//@codeaction("refactor.extract", "z", rcnnEnd, rcnn)
+	return newFunction(y, x) //@loc(rcnnEnd, "nil")
+}
+
+func newFunction(y string, x int) (int, string, error) {
+	z := "bye"
+	if y == z {
+		return x, y, fmt.Errorf("same")
+	} else if false {
+		z = "hi"
+		return x, z, nil
+	}
+	return x, z, nil
+}
+
+-- return_func_lit.go --
+package extract
+
+import "go/ast"
+
+func _() {
+	ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool {
+		if n == nil { //@codeaction("refactor.extract", "if", rflEnd, rfl)
+			return true
+		} //@loc(rflEnd, "}")
+		return false
+	})
+}
+
+-- @rfl/return_func_lit.go --
+package extract
+
+import "go/ast"
+
+func _() {
+	ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool {
+		//@codeaction("refactor.extract", "if", rflEnd, rfl)
+		shouldReturn, returnValue := newFunction(n)
+		if shouldReturn {
+			return returnValue
+		} //@loc(rflEnd, "}")
+		return false
+	})
+}
+
+func newFunction(n ast.Node) (bool, bool) {
+	if n == nil {
+		return true, true
+	}
+	return false, false
+}
+
+-- return_func_lit_nonnested.go --
+package extract
+
+import "go/ast"
+
+func _() {
+	ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool {
+		if n == nil { //@codeaction("refactor.extract", "if", rflnnEnd, rflnn)
+			return true
+		}
+		return false //@loc(rflnnEnd, "false")
+	})
+}
+
+-- @rflnn/return_func_lit_nonnested.go --
+package extract
+
+import "go/ast"
+
+func _() {
+	ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool {
+		//@codeaction("refactor.extract", "if", rflnnEnd, rflnn)
+		return newFunction(n) //@loc(rflnnEnd, "false")
+	})
+}
+
+func newFunction(n ast.Node) bool {
+	if n == nil {
+		return true
+	}
+	return false
+}
+
+-- return_init.go --
+package extract
+
+func _() string {
+	x := 1
+	if x == 0 { //@codeaction("refactor.extract", "if", riEnd, ri)
+		x = 3
+		return "a"
+	} //@loc(riEnd, "}")
+	x = 2
+	return "b"
+}
+
+-- @ri/return_init.go --
+package extract
+
+func _() string {
+	x := 1
+	//@codeaction("refactor.extract", "if", riEnd, ri)
+	shouldReturn, returnValue := newFunction(x)
+	if shouldReturn {
+		return returnValue
+	} //@loc(riEnd, "}")
+	x = 2
+	return "b"
+}
+
+func newFunction(x int) (bool, string) {
+	if x == 0 {
+		x = 3
+		return true, "a"
+	}
+	return false, ""
+}
+
+-- return_init_nonnested.go --
+package extract
+
+func _() string {
+	x := 1
+	if x == 0 { //@codeaction("refactor.extract", "if", rinnEnd, rinn)
+		x = 3
+		return "a"
+	}
+	x = 2
+	return "b" //@loc(rinnEnd, "\"b\"")
+}
+
+-- @rinn/return_init_nonnested.go --
+package extract
+
+func _() string {
+	x := 1
+	//@codeaction("refactor.extract", "if", rinnEnd, rinn)
+	return newFunction(x) //@loc(rinnEnd, "\"b\"")
+}
+
+func newFunction(x int) string {
+	if x == 0 {
+		x = 3
+		return "a"
+	}
+	x = 2
+	return "b"
+}
+
+-- args_returns.go --
+package extract
+
+func _() {
+	a := 1
+	a = 5     //@codeaction("refactor.extract", "a", araend, ara)
+	a = a + 2 //@loc(araend, "2")
+
+	b := a * 2 //@codeaction("refactor.extract", "b", arbend, arb)
+	_ = b + 4  //@loc(arbend, "4")
+}
+
+-- @ara/args_returns.go --
+package extract
+
+func _() {
+	a := 1
+	//@codeaction("refactor.extract", "a", araend, ara)
+	a = newFunction(a) //@loc(araend, "2")
+
+	b := a * 2 //@codeaction("refactor.extract", "b", arbend, arb)
+	_ = b + 4  //@loc(arbend, "4")
+}
+
+func newFunction(a int) int {
+	a = 5
+	a = a + 2
+	return a
+}
+
+-- @arb/args_returns.go --
+package extract
+
+func _() {
+	a := 1
+	a = 5     //@codeaction("refactor.extract", "a", araend, ara)
+	a = a + 2 //@loc(araend, "2")
+
+	//@codeaction("refactor.extract", "b", arbend, arb)
+	newFunction(a)  //@loc(arbend, "4")
+}
+
+func newFunction(a int) {
+	b := a * 2
+	_ = b + 4
+}
+
+-- scope.go --
+package extract
+
+func _() {
+	newFunction := 1
+	a := newFunction //@codeaction("refactor.extract", "a", "newFunction", scope)
+	_ = a // avoid diagnostic
+}
+
+func newFunction1() int {
+	return 1
+}
+
+-- @scope/scope.go --
+package extract
+
+func _() {
+	newFunction := 1
+	a := newFunction2(newFunction) //@codeaction("refactor.extract", "a", "newFunction", scope)
+	_ = a // avoid diagnostic
+}
+
+func newFunction2(newFunction int) int {
+	a := newFunction
+	return a
+}
+
+func newFunction1() int {
+	return 1
+}
+
+-- smart_initialization.go --
+package extract
+
+func _() {
+	var a []int
+	a = append(a, 2) //@codeaction("refactor.extract", "a", siEnd, si)
+	b := 4           //@loc(siEnd, "4")
+	a = append(a, b)
+}
+
+-- @si/smart_initialization.go --
+package extract
+
+func _() {
+	var a []int
+	//@codeaction("refactor.extract", "a", siEnd, si)
+	a, b := newFunction(a)           //@loc(siEnd, "4")
+	a = append(a, b)
+}
+
+func newFunction(a []int) ([]int, int) {
+	a = append(a, 2)
+	b := 4
+	return a, b
+}
+
+-- smart_return.go --
+package extract
+
+func _() {
+	var b []int
+	var a int
+	a = 2 //@codeaction("refactor.extract", "a", srEnd, sr)
+	b = []int{}
+	b = append(b, a) //@loc(srEnd, ")")
+	b[0] = 1
+}
+
+-- @sr/smart_return.go --
+package extract
+
+func _() {
+	var b []int
+	var a int
+	//@codeaction("refactor.extract", "a", srEnd, sr)
+	b = newFunction(a, b) //@loc(srEnd, ")")
+	b[0] = 1
+}
+
+func newFunction(a int, b []int) []int {
+	a = 2
+	b = []int{}
+	b = append(b, a)
+	return b
+}
+
+-- unnecessary_param.go --
+package extract
+
+func _() {
+	var b []int
+	a := 2 //@codeaction("refactor.extract", "a", upEnd, up)
+	b = []int{}
+	b = append(b, a) //@loc(upEnd, ")")
+	b[0] = 1
+	if a == 2 {
+		return
+	}
+}
+
+-- @up/unnecessary_param.go --
+package extract
+
+func _() {
+	var b []int
+	//@codeaction("refactor.extract", "a", upEnd, up)
+	a, b := newFunction(b) //@loc(upEnd, ")")
+	b[0] = 1
+	if a == 2 {
+		return
+	}
+}
+
+func newFunction(b []int) (int, []int) {
+	a := 2
+	b = []int{}
+	b = append(b, a)
+	return a, b
+}
+
+-- comment.go --
+package extract
+
+func _() {
+	a := /* comment in the middle of a line */ 1 //@codeaction("refactor.extract", "a", commentEnd, comment1)
+	// Comment on its own line  //@codeaction("refactor.extract", "Comment", commentEnd, comment2)
+	_ = a + 4 //@loc(commentEnd, "4"),codeaction("refactor.extract", "_", lastComment, comment3)
+	// Comment right after 3 + 4
+
+	// Comment after with space //@loc(lastComment, "Comment")
+}
+
+-- @comment1/comment.go --
+package extract
+
+func _() {
+	/* comment in the middle of a line */
+	//@codeaction("refactor.extract", "a", commentEnd, comment1)
+	// Comment on its own line  //@codeaction("refactor.extract", "Comment", commentEnd, comment2)
+	newFunction() //@loc(commentEnd, "4"),codeaction("refactor.extract", "_", lastComment, comment3)
+	// Comment right after 3 + 4
+
+	// Comment after with space //@loc(lastComment, "Comment")
+}
+
+func newFunction() {
+	a := 1
+
+	_ = a + 4
+}
+
+-- @comment2/comment.go --
+package extract
+
+func _() {
+	a := /* comment in the middle of a line */ 1 //@codeaction("refactor.extract", "a", commentEnd, comment1)
+	// Comment on its own line  //@codeaction("refactor.extract", "Comment", commentEnd, comment2)
+	newFunction(a) //@loc(commentEnd, "4"),codeaction("refactor.extract", "_", lastComment, comment3)
+	// Comment right after 3 + 4
+
+	// Comment after with space //@loc(lastComment, "Comment")
+}
+
+func newFunction(a int) {
+	_ = a + 4
+}
+
+-- @comment3/comment.go --
+package extract
+
+func _() {
+	a := /* comment in the middle of a line */ 1 //@codeaction("refactor.extract", "a", commentEnd, comment1)
+	// Comment on its own line  //@codeaction("refactor.extract", "Comment", commentEnd, comment2)
+	newFunction(a) //@loc(commentEnd, "4"),codeaction("refactor.extract", "_", lastComment, comment3)
+	// Comment right after 3 + 4
+
+	// Comment after with space //@loc(lastComment, "Comment")
+}
+
+func newFunction(a int) {
+	_ = a + 4
+}
+
+-- redefine.go --
+package extract
+
+import "strconv"
+
+func _() {
+	i, err := strconv.Atoi("1")
+	u, err := strconv.Atoi("2") //@codeaction("refactor.extract", "u", ")", redefine)
+	if i == u || err == nil {
+		return
+	}
+}
+
+-- @redefine/redefine.go --
+package extract
+
+import "strconv"
+
+func _() {
+	i, err := strconv.Atoi("1")
+	u, err := newFunction() //@codeaction("refactor.extract", "u", ")", redefine)
+	if i == u || err == nil {
+		return
+	}
+}
+
+func newFunction() (int, error) {
+	u, err := strconv.Atoi("2")
+	return u, err
+}
+
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/functionextraction_issue44813.txt b/gopls/internal/regtest/marker/testdata/codeaction/functionextraction_issue44813.txt
new file mode 100644
index 0000000..46369d0
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/functionextraction_issue44813.txt
@@ -0,0 +1,42 @@
+This test verifies the fix for golang/go#44813: extraction failure when there
+are blank identifiers.
+
+-- go.mod --
+module mod.test/extract
+
+go 1.18
+
+-- p.go --
+package extract
+
+import "fmt"
+
+func main() {
+	x := []rune{} //@codeaction("refactor.extract", "x", end, ext)
+	s := "HELLO"
+	for _, c := range s {
+		x = append(x, c)
+	} //@loc(end, "}")
+	fmt.Printf("%x\n", x)
+}
+
+-- @ext/p.go --
+package extract
+
+import "fmt"
+
+func main() {
+	//@codeaction("refactor.extract", "x", end, ext)
+	x := newFunction() //@loc(end, "}")
+	fmt.Printf("%x\n", x)
+}
+
+func newFunction() []rune {
+	x := []rune{}
+	s := "HELLO"
+	for _, c := range s {
+		x = append(x, c)
+	}
+	return x
+}
+
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/imports.txt b/gopls/internal/regtest/marker/testdata/codeaction/imports.txt
new file mode 100644
index 0000000..325733e
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/imports.txt
@@ -0,0 +1,175 @@
+This test verifies the behavior of the 'source.organizeImports' code action.
+
+-- go.mod --
+module mod.test/imports
+
+go 1.18
+
+-- add.go --
+package imports //@codeaction("source.organizeImports", "imports", "", add)
+
+import (
+	"fmt"
+)
+
+func _() {
+	fmt.Println("")
+	bytes.NewBuffer(nil) //@diag("bytes", re"(undeclared|undefined)")
+}
+
+-- @add/add.go --
+package imports //@codeaction("source.organizeImports", "imports", "", add)
+
+import (
+	"bytes"
+	"fmt"
+)
+
+func _() {
+	fmt.Println("")
+	bytes.NewBuffer(nil) //@diag("bytes", re"(undeclared|undefined)")
+}
+
+-- good.go --
+package imports //@codeactionerr("source.organizeImports", "imports", "", re"found 0 CodeActions")
+
+import "fmt"
+
+func _() {
+fmt.Println("")
+}
+
+-- issue35458.go --
+
+
+
+
+
+// package doc
+package imports //@codeaction("source.organizeImports", "imports", "", issue35458)
+
+
+
+
+
+
+func _() {
+	println("Hello, world!")
+}
+
+
+
+
+
+
+
+
+-- @issue35458/issue35458.go --
+// package doc
+package imports //@codeaction("source.organizeImports", "imports", "", issue35458)
+
+
+
+
+
+
+func _() {
+	println("Hello, world!")
+}
+
+
+
+
+
+
+
+
+-- multi.go --
+package imports //@codeaction("source.organizeImports", "imports", "", multi)
+
+import "fmt"
+
+import "bytes" //@diag("\"bytes\"", re"not used")
+
+func _() {
+	fmt.Println("")
+}
+
+-- @multi/multi.go --
+package imports //@codeaction("source.organizeImports", "imports", "", multi)
+
+import "fmt"
+
+//@diag("\"bytes\"", re"not used")
+
+func _() {
+	fmt.Println("")
+}
+
+-- needs.go --
+package imports //@codeaction("source.organizeImports", "package", "", needs)
+
+func goodbye() {
+	fmt.Printf("HI") //@diag("fmt", re"(undeclared|undefined)")
+	log.Printf("byeeeee") //@diag("log", re"(undeclared|undefined)")
+}
+
+-- @needs/needs.go --
+package imports //@codeaction("source.organizeImports", "package", "", needs)
+
+import (
+	"fmt"
+	"log"
+)
+
+func goodbye() {
+	fmt.Printf("HI") //@diag("fmt", re"(undeclared|undefined)")
+	log.Printf("byeeeee") //@diag("log", re"(undeclared|undefined)")
+}
+
+-- remove.go --
+package imports //@codeaction("source.organizeImports", "package", "", remove)
+
+import (
+	"bytes" //@diag("\"bytes\"", re"not used")
+	"fmt"
+)
+
+func _() {
+	fmt.Println("")
+}
+
+-- @remove/remove.go --
+package imports //@codeaction("source.organizeImports", "package", "", remove)
+
+import (
+	"fmt"
+)
+
+func _() {
+	fmt.Println("")
+}
+
+-- removeall.go --
+package imports //@codeaction("source.organizeImports", "package", "", removeall)
+
+import (
+	"bytes" //@diag("\"bytes\"", re"not used")
+	"fmt" //@diag("\"fmt\"", re"not used")
+
+)
+
+func _() {
+}
+
+-- @removeall/removeall.go --
+package imports //@codeaction("source.organizeImports", "package", "", removeall)
+
+//@diag("\"fmt\"", re"not used")
+
+func _() {
+}
+
+-- twolines.go --
+package imports
+func main()  {} //@codeactionerr("source.organizeImports", "main", "", re"found 0")
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/infertypeargs.txt b/gopls/internal/regtest/marker/testdata/codeaction/infertypeargs.txt
new file mode 100644
index 0000000..8ee1b67
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/infertypeargs.txt
@@ -0,0 +1,38 @@
+This test verifies the infertypeargs refactoring.
+
+-- flags --
+-min_go=go1.18
+
+-- go.mod --
+module mod.test/infertypeargs
+
+go 1.18
+
+-- p.go --
+package infertypeargs
+
+func app[S interface{ ~[]E }, E interface{}](s S, e E) S {
+	return append(s, e)
+}
+
+func _() {
+	_ = app[[]int]
+	_ = app[[]int, int]
+	_ = app[[]int]([]int{}, 0) //@codeaction("refactor.rewrite", "app", ")", infer)
+	_ = app([]int{}, 0)
+}
+
+-- @infer/p.go --
+package infertypeargs
+
+func app[S interface{ ~[]E }, E interface{}](s S, e E) S {
+	return append(s, e)
+}
+
+func _() {
+	_ = app[[]int]
+	_ = app[[]int, int]
+	_ = app([]int{}, 0) //@codeaction("refactor.rewrite", "app", ")", infer)
+	_ = app([]int{}, 0)
+}
+
diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/addgowork.txt b/gopls/internal/regtest/marker/testdata/diagnostics/addgowork.txt
new file mode 100644
index 0000000..2cb7d2b
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/diagnostics/addgowork.txt
@@ -0,0 +1,45 @@
+This test demonstrates diagnostics for adding a go.work file.
+
+Quick-fixes change files on disk, so are tested by regtests.
+
+TODO(rfindley): improve the "cannot find package" import errors.
+
+-- flags --
+-min_go=go1.18
+
+-- a/go.mod --
+module mod.com/a
+
+go 1.18
+
+-- a/main.go --
+package main //@diag("main", re"add a go.work file")
+
+import "mod.com/a/lib" //@diag("\"mod.com", re"cannot find package")
+
+func main() {
+	_ = lib.C
+}
+
+-- a/lib/lib.go --
+package lib //@diag("lib", re"add a go.work file")
+
+const C = "b"
+-- b/go.mod --
+module mod.com/b
+
+go 1.18
+
+-- b/main.go --
+package main //@diag("main", re"add a go.work file")
+
+import "mod.com/b/lib" //@diag("\"mod.com", re"cannot find package")
+
+func main() {
+	_ = lib.C
+}
+
+-- b/lib/lib.go --
+package lib //@diag("lib", re"add a go.work file")
+
+const C = "b"
diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt b/gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt
new file mode 100644
index 0000000..6e7e465
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt
@@ -0,0 +1,32 @@
+Test of warning diagnostics from various analyzers:
+tests, copylocks, printf, and timeformat.
+
+-- go.mod --
+module example.com
+go 1.12
+
+-- bad_test.go --
+package analyzer
+
+import (
+	"fmt"
+	"sync"
+	"testing"
+	"time"
+)
+
+func Testbad(t *testing.T) { //@diag("", re"Testbad has malformed name: first letter after 'Test' must not be lowercase")
+	var x sync.Mutex
+	_ = x //@diag("x", re"assignment copies lock value to _: sync.Mutex")
+
+	printfWrapper("%s") //@diag(re`printfWrapper\(.*\)`, re"example.com.printfWrapper format %s reads arg #1, but call has 0 args")
+}
+
+func printfWrapper(format string, args ...interface{}) {
+	fmt.Printf(format, args...)
+}
+
+func _() {
+	now := time.Now()
+	fmt.Println(now.Format("2006-02-01")) //@diag("2006-02-01", re"2006-02-01 should be 2006-01-02")
+}
diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/excludedfile.txt b/gopls/internal/regtest/marker/testdata/diagnostics/excludedfile.txt
new file mode 100644
index 0000000..5944cbe
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/diagnostics/excludedfile.txt
@@ -0,0 +1,38 @@
+This test demonstrates diagnostics for various forms of file exclusion.
+
+Skip on plan9, an arbitrary GOOS, so that we can exercise GOOS exclusions
+resulting from file suffixes.
+
+-- flags --
+-min_go=go1.18
+-skip_goos=plan9
+
+-- go.work --
+go 1.21
+
+use (
+	./a
+)
+-- a/go.mod --
+module mod.com/a
+
+go 1.18
+
+-- a/a.go --
+package a
+
+-- a/a_plan9.go --
+package a //@diag(re"package (a)", re"excluded due to its GOOS/GOARCH")
+
+-- a/a_ignored.go --
+//go:build skip
+package a //@diag(re"package (a)", re"excluded due to its build tags")
+
+-- b/go.mod --
+module mod.com/b
+
+go 1.18
+
+-- b/b.go --
+package b //@diag(re"package (b)", re"add this module to your go.work")
+
diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/generated.txt b/gopls/internal/regtest/marker/testdata/diagnostics/generated.txt
new file mode 100644
index 0000000..bae69b1
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/diagnostics/generated.txt
@@ -0,0 +1,21 @@
+Test of "undeclared" diagnostic in generated code.
+
+-- go.mod --
+module example.com
+go 1.12
+
+-- generated.go --
+package generated
+
+// Code generated by generator.go. DO NOT EDIT.
+
+func _() {
+	var y int //@diag("y", re"y declared (and|but) not used")
+}
+
+-- generator.go --
+package generated
+
+func _() {
+	var x int //@diag("x", re"x declared (and|but) not used")
+}
diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/parseerr.txt b/gopls/internal/regtest/marker/testdata/diagnostics/parseerr.txt
new file mode 100644
index 0000000..d0df08d
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/diagnostics/parseerr.txt
@@ -0,0 +1,27 @@
+
+This test exercises diagnostics produced for syntax errors.
+
+Because parser error recovery can be quite lossy, diagnostics
+for type errors are suppressed in files with syntax errors;
+see issue #59888. But diagnostics are reported for type errors
+in well-formed files of the same package.
+
+-- go.mod --
+module example.com
+go 1.12
+
+-- bad.go --
+package p
+
+func f() {
+	append("") // no diagnostic for type error in file containing syntax error
+}
+
+func .() {} //@diag(re"func ().", re"expected 'IDENT', found '.'")
+
+-- good.go --
+package p
+
+func g() {
+	append("") //@diag(re`""`, re"a slice")
+}
diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/rundespiteerrors.txt b/gopls/internal/regtest/marker/testdata/diagnostics/rundespiteerrors.txt
new file mode 100644
index 0000000..70e4ebb
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/diagnostics/rundespiteerrors.txt
@@ -0,0 +1,27 @@
+This test verifies that analyzers without RunDespiteErrors are not
+executed on a package containing type errors (see issue #54762).
+
+We require go1.18 because the range of the `1 + ""` go/types error
+changed then, and the new @diag marker is quite particular.
+
+-- go.mod --
+module example.com
+go 1.12
+
+-- flags --
+-min_go=go1.18
+
+-- a.go --
+package a
+
+func _() {
+	// A type error.
+	_ = 1 + "" //@diag(`1 + ""`, re"mismatched types|cannot convert")
+
+	// A violation of an analyzer for which RunDespiteErrors=false:
+	// no (simplifyrange, warning) diagnostic is produced; the diag
+	// comment is merely illustrative.
+	for _ = range "" { //diag("for _", "simplify range expression", )
+
+	}
+}
diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/typeerr.txt b/gopls/internal/regtest/marker/testdata/diagnostics/typeerr.txt
new file mode 100644
index 0000000..345c48e
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/diagnostics/typeerr.txt
@@ -0,0 +1,33 @@
+
+This test exercises diagnostics produced for type errors
+in the absence of syntax errors.
+
+The type error was chosen to exercise the 'nonewvars' type-error analyzer.
+(The 'undeclaredname' analyzer depends on the text of the go/types
+"undeclared name" error, which changed in go1.20.)
+
+The append() type error was also carefully chosen to have text and
+position that are invariant across all versions of Go run by the builders.
+
+-- go.mod --
+module example.com
+go 1.12
+
+-- typeerr.go --
+package a
+
+func f(x int) {
+	append("") //@diag(re`""`, re"a slice")
+
+	x := 123 //@diag(re"x := 123", re"no new variables"), suggestedfix(re"():", re"no new variables", "quickfix", fix)
+}
+
+-- @fix/typeerr.go --
+package a
+
+func f(x int) {
+	append("") //@diag(re`""`, re"a slice")
+
+	x = 123 //@diag(re"x := 123", re"no new variables"), suggestedfix(re"():", re"no new variables", "quickfix", fix)
+}
+
diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/usemodule.txt b/gopls/internal/regtest/marker/testdata/diagnostics/usemodule.txt
new file mode 100644
index 0000000..35d2e43
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/diagnostics/usemodule.txt
@@ -0,0 +1,51 @@
+This test demonstrates diagnostics for a module that is missing from the
+go.work file.
+
+Quick-fixes change files on disk, so are tested by regtests.
+
+-- flags --
+-min_go=go1.18
+
+-- go.work --
+go 1.21
+
+use (
+	./a
+)
+
+-- a/go.mod --
+module mod.com/a
+
+go 1.18
+
+-- a/main.go --
+package main
+
+import "mod.com/a/lib"
+
+func main() {
+	_ = lib.C
+}
+
+-- a/lib/lib.go --
+package lib
+
+const C = "b"
+-- b/go.mod --
+module mod.com/b
+
+go 1.18
+
+-- b/main.go --
+package main //@diag("main", re"add this module to your go.work")
+
+import "mod.com/b/lib" //@diag("\"mod.com", re"not included in a workspace module")
+
+func main() {
+	_ = lib.C
+}
+
+-- b/lib/lib.go --
+package lib //@diag("lib", re"add this module to your go.work")
+
+const C = "b"
diff --git a/gopls/internal/regtest/marker/testdata/fixedbugs/issue59318.txt b/gopls/internal/regtest/marker/testdata/fixedbugs/issue59318.txt
new file mode 100644
index 0000000..65385f7
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/fixedbugs/issue59318.txt
@@ -0,0 +1,22 @@
+This test verifies that we can load multiple orphaned files as
+command-line-arguments packages.
+
+Previously, we would load only one because go/packages returns at most one
+command-line-arguments package per query.
+
+-- a/main.go --
+package main
+
+func main() {
+	var a int //@diag(re"var (a)", re"not used")
+}
+-- b/main.go --
+package main
+
+func main() {
+	var b int //@diag(re"var (b)", re"not used")
+}
+-- c/go.mod --
+module c.com // The existence of this module avoids a workspace error.
+
+go 1.18
diff --git a/gopls/internal/regtest/marker/testdata/fixedbugs/issue59944.txt b/gopls/internal/regtest/marker/testdata/fixedbugs/issue59944.txt
new file mode 100644
index 0000000..9e39d8f
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/fixedbugs/issue59944.txt
@@ -0,0 +1,33 @@
+This test verifies that gopls does not panic when encountering the go/types
+bug described in golang/go#59944: the Bindingf function is not included in
+the methodset of its receiver type.
+
+Adapted from the code in question from the issue.
+
+-- flags --
+-cgo
+
+-- go.mod --
+module example.com
+
+go 1.12
+
+-- cgo.go --
+package x
+
+import "fmt"
+
+/*
+struct layout {
+	int field;
+};
+*/
+import "C"
+
+type Layout = C.struct_layout
+
+// Bindingf is a printf wrapper. This was necessary to trigger the panic in
+// objectpath while encoding facts.
+func (l *Layout) Bindingf(format string, args ...interface{}) {
+	fmt.Printf(format, args...)
+}
diff --git a/gopls/internal/regtest/marker/testdata/format/format.txt b/gopls/internal/regtest/marker/testdata/format/format.txt
new file mode 100644
index 0000000..b143738
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/format/format.txt
@@ -0,0 +1,80 @@
+This test checks basic behavior of textDocument/formatting requests.
+
+-- go.mod --
+module mod.com
+
+go 1.18
+-- good.go --
+package format //@format(good)
+
+import (
+	"log"
+)
+
+func goodbye() {
+	log.Printf("byeeeee")
+}
+
+-- @good --
+package format //@format(good)
+
+import (
+	"log"
+)
+
+func goodbye() {
+	log.Printf("byeeeee")
+}
+-- bad.go --
+package format //@format(bad)
+
+import (
+	"runtime"
+	"fmt"
+	"log"
+)
+
+func hello() {
+
+
+
+
+	var x int //@diag("x", re"x declared (and|but) not used")
+}
+
+func hi() {
+	runtime.GOROOT()
+	fmt.Printf("")
+
+	log.Printf("")
+}
+-- @bad --
+package format //@format(bad)
+
+import (
+	"fmt"
+	"log"
+	"runtime"
+)
+
+func hello() {
+
+	var x int //@diag("x", re"x declared (and|but) not used")
+}
+
+func hi() {
+	runtime.GOROOT()
+	fmt.Printf("")
+
+	log.Printf("")
+}
+-- newline.go --
+package format //@format(newline)
+func _() {}
+-- @newline --
+package format //@format(newline)
+func _()       {}
+-- oneline.go --
+package format //@format(oneline)
+-- @oneline --
+package format //@format(oneline)
diff --git a/gopls/internal/regtest/marker/testdata/format/issue59554.txt b/gopls/internal/regtest/marker/testdata/format/issue59554.txt
new file mode 100644
index 0000000..1e49e38
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/format/issue59554.txt
@@ -0,0 +1,33 @@
+Test case for golang/go#59554: data corruption on formatting due to line
+directives.
+
+Note that gofumpt is needed for this test case, as it reformats var decls into
+short var decls.
+
+Note that gofumpt requires Go 1.18.
+
+-- flags --
+-min_go=go1.18
+
+-- settings.json --
+{
+	"formatting.gofumpt": true
+}
+-- main.go --
+package main //@format(main)
+
+func Match(data []byte) int {
+//line :1
+	var idx = ^uint(0)
+	_ = idx
+	return -1
+}
+-- @main --
+package main //@format(main)
+
+func Match(data []byte) int {
+//line :1
+	idx := ^uint(0)
+	_ = idx
+	return -1
+}
diff --git a/gopls/internal/regtest/marker/testdata/format/noparse.txt b/gopls/internal/regtest/marker/testdata/format/noparse.txt
new file mode 100644
index 0000000..afc96cc
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/format/noparse.txt
@@ -0,0 +1,27 @@
+This test checks that formatting does not run on code that has parse errors.
+
+-- parse.go --
+package noparse_format //@format(parse)
+
+func _() {
+f() //@diag("f", re"(undefined|undeclared name): f")
+}
+-- @parse --
+package noparse_format //@format(parse)
+
+func _() {
+	f() //@diag("f", re"(undefined|undeclared name): f")
+}
+-- noparse.go --
+package noparse_format //@format(noparse)
+
+// The nonewvars expectation asserts that the go/analysis framework ran.
+
+func what() {
+	var hi func()
+	if {		hi() //@diag(re"(){", re".*missing.*")
+	}
+	hi := nil
+}
+-- @noparse --
+7:5: missing condition in if statement
diff --git a/gopls/internal/regtest/marker/testdata/hover/generics.txt b/gopls/internal/regtest/marker/testdata/hover/generics.txt
index 673e860..d512f7f 100644
--- a/gopls/internal/regtest/marker/testdata/hover/generics.txt
+++ b/gopls/internal/regtest/marker/testdata/hover/generics.txt
@@ -39,8 +39,7 @@
 func _() {
 	_ = app[[]int]             //@hover("app", "app", appint)
 	_ = app[[]int, int]        //@hover("app", "app", appint)
-	// TODO(rfindley): eliminate this diagnostic.
-	_ = app[[]int]([]int{}, 0) //@hover("app", "app", appint),diag("[[]int]", re"unnecessary type arguments")
+	_ = app[[]int]([]int{}, 0) //@hover("app", "app", appint)
 	_ = app([]int{}, 0)        //@hover("app", "app", appint)
 }
 
diff --git a/gopls/internal/regtest/marker/testdata/hover/linkname.txt b/gopls/internal/regtest/marker/testdata/hover/linkname.txt
new file mode 100644
index 0000000..0d244c4
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/hover/linkname.txt
@@ -0,0 +1,29 @@
+This test check hover on the 2nd argument in go:linkname directives.
+-- go.mod --
+module mod.com
+
+-- upper/upper.go --
+package upper
+
+import (
+	_ "unsafe"
+	_ "mod.com/lower"
+)
+
+//go:linkname foo mod.com/lower.bar //@hover("mod.com/lower.bar", "mod.com/lower.bar", bar)
+func foo() string
+
+-- lower/lower.go --
+package lower
+
+// bar does foo.
+func bar() string {
+	return "foo by bar"
+}
+
+-- @bar/hover.md --
+```go
+func bar() string
+```
+
+bar does foo.
diff --git a/gopls/internal/regtest/marker/testdata/quickfix/undeclared.txt b/gopls/internal/regtest/marker/testdata/quickfix/undeclared.txt
new file mode 100644
index 0000000..6dc27ee
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/quickfix/undeclared.txt
@@ -0,0 +1,62 @@
+Tests of suggested fixes for "undeclared name" diagnostics,
+which are of ("compiler", "error") type.
+
+-- go.mod --
+module example.com
+go 1.12
+
+-- a.go --
+package p
+
+func a() {
+	z, _ := 1+y, 11 //@suggestedfix("y", re"(undeclared name|undefined): y", "quickfix", a)
+	_ = z
+}
+
+-- @a/a.go --
+package p
+
+func a() {
+	y := 
+	z, _ := 1+y, 11 //@suggestedfix("y", re"(undeclared name|undefined): y", "quickfix", a)
+	_ = z
+}
+
+-- b.go --
+package p
+
+func b() {
+	if 100 < 90 {
+	} else if 100 > n+2 { //@suggestedfix("n", re"(undeclared name|undefined): n", "quickfix", b)
+	}
+}
+
+-- @b/b.go --
+package p
+
+func b() {
+	n := 
+	if 100 < 90 {
+	} else if 100 > n+2 { //@suggestedfix("n", re"(undeclared name|undefined): n", "quickfix", b)
+	}
+}
+
+-- c.go --
+package p
+
+func c() {
+	for i < 200 { //@suggestedfix("i", re"(undeclared name|undefined): i", "quickfix", c)
+	}
+	r() //@diag("r", re"(undeclared name|undefined): r")
+}
+
+-- @c/c.go --
+package p
+
+func c() {
+	i := 
+	for i < 200 { //@suggestedfix("i", re"(undeclared name|undefined): i", "quickfix", c)
+	}
+	r() //@diag("r", re"(undeclared name|undefined): r")
+}
+
diff --git a/gopls/internal/regtest/marker/testdata/quickfix/unusedrequire.txt b/gopls/internal/regtest/marker/testdata/quickfix/unusedrequire.txt
new file mode 100644
index 0000000..6317b73
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/quickfix/unusedrequire.txt
@@ -0,0 +1,24 @@
+This test checks the suggested fix to remove unused require statements from
+go.mod files.
+
+-- flags --
+-write_sumfile=a
+
+-- proxy/example.com@v1.0.0/x.go --
+package pkg
+const X = 1
+
+-- a/go.mod --
+module mod.com
+
+go 1.14
+
+require example.com v1.0.0 //@suggestedfix("require", re"not used", "quickfix", a)
+
+-- @a/a/go.mod --
+module mod.com
+
+go 1.14
+-- a/main.go --
+package main
+func main() {}
diff --git a/gopls/internal/regtest/marker/testdata/quickfix/unusedrequire_gowork.txt b/gopls/internal/regtest/marker/testdata/quickfix/unusedrequire_gowork.txt
new file mode 100644
index 0000000..8a090d7
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/quickfix/unusedrequire_gowork.txt
@@ -0,0 +1,49 @@
+This test checks the suggested fix to remove unused require statements from
+go.mod files, when a go.work file is used.
+
+Note that unlike unusedrequire.txt, we need not write go.sum files when
+a go.work file is used.
+
+-- flags --
+-min_go=go1.18
+
+-- proxy/example.com@v1.0.0/x.go --
+package pkg
+const X = 1
+
+-- go.work --
+go 1.21
+
+use (
+	./a
+	./b
+)
+-- a/go.mod --
+module mod.com/a
+
+go 1.14
+
+require example.com v1.0.0 //@suggestedfix("require", re"not used", "quickfix", a)
+
+-- @a/a/go.mod --
+module mod.com/a
+
+go 1.14
+-- a/main.go --
+package main
+func main() {}
+
+-- b/go.mod --
+module mod.com/b
+
+go 1.14
+
+require example.com v1.0.0 //@suggestedfix("require", re"not used", "quickfix", b)
+
+-- @b/b/go.mod --
+module mod.com/b
+
+go 1.14
+-- b/main.go --
+package main
+func main() {}
diff --git a/gopls/internal/regtest/marker/testdata/references/iota.txt b/gopls/internal/regtest/marker/testdata/references/iota.txt
deleted file mode 100644
index ec1150b..0000000
--- a/gopls/internal/regtest/marker/testdata/references/iota.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-Test of references to iota. The result is the empty set.
-
-TODO(adonovan): treat iota like other built-ins and reject it.
-
--- go.mod --
-module example.com
-go 1.12
-
--- a/a.go --
-package a
-
-const (
-	zero = iota //@loc(iota, "iota"), refs("iota")
-	one
-	two = iota
-)
diff --git a/gopls/internal/regtest/marker/testdata/references/issue59851.txt b/gopls/internal/regtest/marker/testdata/references/issue59851.txt
new file mode 100644
index 0000000..86a6359
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/references/issue59851.txt
@@ -0,0 +1,29 @@
+Regression test for 'references' bug golang/go#59851.
+
+-- go.mod --
+module example.com
+go 1.12
+
+-- a/a.go --
+package a
+
+type Iface interface {
+     Method()
+}
+
+type implOne struct{}
+
+func (implOne) Method() {} //@loc(def1, "Method"), refs(def1, def1, ref1, iref, ireftest)
+
+var _ = implOne.Method //@loc(ref1, "Method")
+var _ = Iface(nil).Method //@loc(iref, "Method")
+
+-- a/a_test.go --
+package a
+
+type implTwo struct{}
+
+func (implTwo) Method() {} //@loc(def2, "Method"), refs(def2, def2, iref, ref2, ireftest)
+
+var _ = implTwo.Method //@loc(ref2, "Method")
+var _ = Iface(nil).Method //@loc(ireftest, "Method")
diff --git a/gopls/internal/regtest/marker/testdata/rename/unexported.txt b/gopls/internal/regtest/marker/testdata/rename/unexported.txt
new file mode 100644
index 0000000..e5631fa
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/rename/unexported.txt
@@ -0,0 +1,25 @@
+
+This test attempts to rename a.S.X to x, which would make it
+inaccessible from its external test package. The rename tool
+should report an error rather than wrecking the program.
+See issue #59403.
+
+-- go.mod --
+module example.com
+go 1.12
+
+-- a/a.go --
+package a
+
+var S struct{ X int } //@renameerr("X", x, oops)
+
+-- a/a_test.go --
+package a_test
+
+import "example.com/a"
+
+var Y = a.S.X
+
+-- @oops --
+a/a.go:3:15: renaming "X" to "x" would make it unexported
+a/a_test.go:5:13:	breaking references from packages such as "example.com/a_test"
diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/allscope.txt b/gopls/internal/regtest/marker/testdata/workspacesymbol/allscope.txt
new file mode 100644
index 0000000..18fe4e5
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/workspacesymbol/allscope.txt
@@ -0,0 +1,30 @@
+This test verifies behavior when "symbolScope" is set to "all".
+
+-- settings.json --
+{
+	"symbolStyle": "full",
+	"symbolMatcher": "casesensitive",
+	"symbolScope": "all"
+}
+
+-- go.mod --
+module mod.test/symbols
+
+go 1.18
+
+-- query.go --
+package symbols
+
+//@workspacesymbol("fmt.Println", println)
+
+-- fmt/fmt.go --
+package fmt
+
+import "fmt"
+
+func Println(s string) {
+	fmt.Println(s)
+}
+-- @println --
+fmt/fmt.go:5:6-13 mod.test/symbols/fmt.Println Function
+<unknown> fmt.Println Function
diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/caseinsensitive.txt b/gopls/internal/regtest/marker/testdata/workspacesymbol/caseinsensitive.txt
new file mode 100644
index 0000000..f853e8d
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/workspacesymbol/caseinsensitive.txt
@@ -0,0 +1,26 @@
+This file contains test for symbol matches using the caseinsensitive matcher.
+
+-- settings.json --
+{
+	"symbolMatcher": "caseinsensitive"
+}
+
+-- go.mod --
+module mod.test/caseinsensitive
+
+go 1.18
+
+-- p.go --
+package caseinsensitive
+
+//@workspacesymbol("", blank)
+//@workspacesymbol("randomgophervar", randomgophervar)
+
+var RandomGopherVariableA int
+var randomgopherVariableB int
+var RandomGopherOtherVariable int
+
+-- @blank --
+-- @randomgophervar --
+p.go:6:5-26 RandomGopherVariableA Variable
+p.go:7:5-26 randomgopherVariableB Variable
diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/casesensitive.txt b/gopls/internal/regtest/marker/testdata/workspacesymbol/casesensitive.txt
new file mode 100644
index 0000000..725e9db
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/workspacesymbol/casesensitive.txt
@@ -0,0 +1,116 @@
+This file contains tests for symbol matches using the casesensitive matcher.
+
+For historical reasons, it also verifies general behavior of the symbol search.
+
+-- settings.json --
+{
+	"symbolMatcher": "casesensitive"
+}
+
+-- go.mod --
+module mod.test/casesensitive
+
+go 1.18
+
+-- main.go --
+package main
+
+//@workspacesymbol("main.main", main)
+//@workspacesymbol("p.Message", Message)
+//@workspacesymbol("main.myvar", myvar)
+//@workspacesymbol("main.myType", myType)
+//@workspacesymbol("main.myType.Blahblah", blahblah)
+//@workspacesymbol("main.myStruct", myStruct)
+//@workspacesymbol("main.myStruct.myStructField", myStructField)
+//@workspacesymbol("main.myInterface", myInterface)
+//@workspacesymbol("main.myInterface.DoSomeCoolStuff", DoSomeCoolStuff)
+//@workspacesymbol("main.embed.myStruct", embeddedStruct)
+//@workspacesymbol("main.embed.nestedStruct.nestedStruct2.int", int)
+//@workspacesymbol("main.embed.nestedInterface.myInterface", nestedInterface)
+//@workspacesymbol("main.embed.nestedInterface.nestedMethod", nestedMethod)
+//@workspacesymbol("dunk", dunk)
+//@workspacesymbol("Dunk", Dunk)
+
+import (
+	"encoding/json"
+	"fmt"
+)
+
+func main() { // function
+	fmt.Println("Hello")
+}
+
+var myvar int // variable
+
+type myType string // basic type
+
+type myDecoder json.Decoder // to use the encoding/json import
+
+func (m *myType) Blahblah() {} // method
+
+type myStruct struct { // struct type
+	myStructField int // struct field
+}
+
+type myInterface interface { // interface
+	DoSomeCoolStuff() string // interface method
+}
+
+type embed struct {
+	myStruct
+
+	nestedStruct struct {
+		nestedField int
+
+		nestedStruct2 struct {
+			int
+		}
+	}
+
+	nestedInterface interface {
+		myInterface
+		nestedMethod()
+	}
+}
+
+func Dunk() int { return 0 }
+
+func dunk() {}
+
+-- p/p.go --
+package p
+
+const Message = "Hello World." // constant
+-- @DoSomeCoolStuff --
+main.go:41:2-17 main.myInterface.DoSomeCoolStuff Method
+-- @Dunk --
+main.go:61:6-10 Dunk Function
+-- @Message --
+p/p.go:3:7-14 p.Message Constant
+-- @blahblah --
+main.go:34:18-26 main.myType.Blahblah Method
+-- @dunk --
+main.go:63:6-10 dunk Function
+-- @int --
+main.go:51:4-7 main.embed.nestedStruct.nestedStruct2.int Field
+-- @main --
+main.go:24:6-10 main.main Function
+-- @myInterface --
+main.go:40:6-17 main.myInterface Interface
+main.go:41:2-17 main.myInterface.DoSomeCoolStuff Method
+-- @myStruct --
+main.go:36:6-14 main.myStruct Struct
+main.go:37:2-15 main.myStruct.myStructField Field
+-- @myStructField --
+main.go:37:2-15 main.myStruct.myStructField Field
+-- @myType --
+main.go:30:6-12 main.myType Class
+main.go:34:18-26 main.myType.Blahblah Method
+-- @myvar --
+main.go:28:5-10 main.myvar Variable
+-- @nestedInterface --
+main.go:56:3-14 main.embed.nestedInterface.myInterface Interface
+-- @nestedMethod --
+main.go:57:3-15 main.embed.nestedInterface.nestedMethod Method
+-- @embeddedStruct --
+main.go:45:2-10 main.embed.myStruct Field
diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/issue44806.txt b/gopls/internal/regtest/marker/testdata/workspacesymbol/issue44806.txt
new file mode 100644
index 0000000..b2cd0b5
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/workspacesymbol/issue44806.txt
@@ -0,0 +1,27 @@
+This test verifies the fix for the crash encountered in golang/go#44806.
+
+-- go.mod --
+module mod.test/symbol
+
+go 1.18
+-- symbol.go --
+package symbol
+
+//@workspacesymbol("m", m)
+
+type T struct{}
+
+// We should accept all valid receiver syntax when scanning symbols.
+func (*(T)) m1() {}
+func (*T) m2()   {}
+func (T) m3()    {}
+func ((T)) m4()    {}
+func ((*T)) m5()   {}
+
+-- @m --
+symbol.go:8:13-15 T.m1 Method
+symbol.go:9:11-13 T.m2 Method
+symbol.go:10:10-12 T.m3 Method
+symbol.go:11:12-14 T.m4 Method
+symbol.go:12:13-15 T.m5 Method
+symbol.go:5:6-7 symbol.T Struct
diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/workspacesymbol.txt b/gopls/internal/regtest/marker/testdata/workspacesymbol/workspacesymbol.txt
new file mode 100644
index 0000000..cdf9e26
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/workspacesymbol/workspacesymbol.txt
@@ -0,0 +1,72 @@
+This test contains tests for basic functionality of the workspace/symbol
+request.
+
+TODO(rfindley): add a test for the legacy 'fuzzy' symbol matcher using setting ("symbolMatcher": "fuzzy"). This test uses the default matcher ("fastFuzzy").
+
+-- go.mod --
+module mod.test/symbols
+
+go 1.18
+
+-- query.go --
+package symbols
+
+//@workspacesymbol("rgop", rgop)
+//@workspacesymbol("randoma", randoma)
+//@workspacesymbol("randomb", randomb)
+
+-- a/a.go --
+package a
+
+var RandomGopherVariableA = "a"
+
+const RandomGopherConstantA = "a"
+
+const (
+	randomgopherinvariable = iota
+)
+
+-- a/a_test.go --
+package a
+
+var RandomGopherTestVariableA = "a"
+
+-- a/a_x_test.go --
+package a_test
+
+var RandomGopherXTestVariableA = "a"
+
+-- b/b.go --
+package b
+
+var RandomGopherVariableB = "b"
+
+type RandomGopherStructB struct {
+	Bar int
+}
+
+-- @rgop --
+b/b.go:5:6-25 RandomGopherStructB Struct
+a/a.go:5:7-28 RandomGopherConstantA Constant
+a/a.go:3:5-26 RandomGopherVariableA Variable
+b/b.go:3:5-26 RandomGopherVariableB Variable
+a/a_test.go:3:5-30 RandomGopherTestVariableA Variable
+a/a_x_test.go:3:5-31 RandomGopherXTestVariableA Variable
+a/a.go:8:2-24 randomgopherinvariable Constant
+b/b.go:6:2-5 RandomGopherStructB.Bar Field
+-- @randoma --
+a/a.go:5:7-28 RandomGopherConstantA Constant
+a/a.go:3:5-26 RandomGopherVariableA Variable
+b/b.go:3:5-26 RandomGopherVariableB Variable
+a/a.go:8:2-24 randomgopherinvariable Constant
+a/a_test.go:3:5-30 RandomGopherTestVariableA Variable
+a/a_x_test.go:3:5-31 RandomGopherXTestVariableA Variable
+b/b.go:6:2-5 RandomGopherStructB.Bar Field
+-- @randomb --
+b/b.go:5:6-25 RandomGopherStructB Struct
+a/a.go:3:5-26 RandomGopherVariableA Variable
+b/b.go:3:5-26 RandomGopherVariableB Variable
+a/a.go:8:2-24 randomgopherinvariable Constant
+a/a_test.go:3:5-30 RandomGopherTestVariableA Variable
+a/a_x_test.go:3:5-31 RandomGopherXTestVariableA Variable
+b/b.go:6:2-5 RandomGopherStructB.Bar Field
diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/wsscope.txt b/gopls/internal/regtest/marker/testdata/workspacesymbol/wsscope.txt
new file mode 100644
index 0000000..e49483a
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/workspacesymbol/wsscope.txt
@@ -0,0 +1,29 @@
+This test verifies behavior when "symbolScope" is set to "workspace".
+
+-- settings.json --
+{
+	"symbolStyle": "full",
+	"symbolMatcher": "casesensitive",
+	"symbolScope": "workspace"
+}
+
+-- go.mod --
+module mod.test/symbols
+
+go 1.18
+
+-- query.go --
+package symbols
+
+//@workspacesymbol("fmt.Println", println)
+
+-- fmt/fmt.go --
+package fmt
+
+import "fmt"
+
+func Println(s string) {
+	fmt.Println(s)
+}
+-- @println --
+fmt/fmt.go:5:6-13 mod.test/symbols/fmt.Println Function
diff --git a/gopls/internal/regtest/misc/definition_test.go b/gopls/internal/regtest/misc/definition_test.go
index c2dd67f..9f24ef6 100644
--- a/gopls/internal/regtest/misc/definition_test.go
+++ b/gopls/internal/regtest/misc/definition_test.go
@@ -476,7 +476,7 @@
 		}
 
 		// Run 'go mod vendor' outside the editor.
-		if err := env.Sandbox.RunGoCommand(env.Ctx, ".", "mod", []string{"vendor"}, true); err != nil {
+		if err := env.Sandbox.RunGoCommand(env.Ctx, ".", "mod", []string{"vendor"}, nil, true); err != nil {
 			t.Fatalf("go mod vendor: %v", err)
 		}
 
diff --git a/gopls/internal/regtest/misc/hover_test.go b/gopls/internal/regtest/misc/hover_test.go
index 72a6e23..24ee6d8 100644
--- a/gopls/internal/regtest/misc/hover_test.go
+++ b/gopls/internal/regtest/misc/hover_test.go
@@ -382,3 +382,42 @@
 		})
 	}
 }
+
+const linknameHover = `
+-- go.mod --
+module mod.com
+
+-- upper/upper.go --
+package upper
+
+import (
+	_ "unsafe"
+	_ "mod.com/lower"
+)
+
+//go:linkname foo mod.com/lower.bar
+func foo() string
+
+-- lower/lower.go --
+package lower
+
+// bar does foo.
+func bar() string {
+	return "foo by bar"
+}`
+
+func TestHoverLinknameDirective(t *testing.T) {
+	Run(t, linknameHover, func(t *testing.T, env *Env) {
+		// Jump from directives 2nd arg.
+		env.OpenFile("upper/upper.go")
+		from := env.RegexpSearch("upper/upper.go", `lower.bar`)
+
+		hover, _ := env.Hover(from)
+		content := hover.Value
+
+		expect := "bar does foo"
+		if !strings.Contains(content, expect) {
+			t.Errorf("hover: %q does not contain: %q", content, expect)
+		}
+	})
+}
diff --git a/gopls/internal/regtest/misc/misc_test.go b/gopls/internal/regtest/misc/misc_test.go
index 12aea69..5138b76 100644
--- a/gopls/internal/regtest/misc/misc_test.go
+++ b/gopls/internal/regtest/misc/misc_test.go
@@ -7,9 +7,9 @@
 import (
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	"golang.org/x/tools/gopls/internal/lsp/regtest"
-	"golang.org/x/tools/internal/bug"
 )
 
 func TestMain(m *testing.M) {
diff --git a/gopls/internal/regtest/misc/references_test.go b/gopls/internal/regtest/misc/references_test.go
index a207333..1e14f1b 100644
--- a/gopls/internal/regtest/misc/references_test.go
+++ b/gopls/internal/regtest/misc/references_test.go
@@ -7,6 +7,8 @@
 import (
 	"fmt"
 	"os"
+	"path/filepath"
+	"reflect"
 	"sort"
 	"strings"
 	"testing"
@@ -100,6 +102,60 @@
 	})
 }
 
+func TestDefsRefsBuiltins(t *testing.T) {
+	testenv.NeedsGo1Point(t, 17) // for unsafe.{Add,Slice}
+	// TODO(adonovan): add unsafe.SliceData,String,StringData} in later go versions.
+	const files = `
+-- go.mod --
+module example.com
+go 1.16
+
+-- a.go --
+package a
+
+import "unsafe"
+
+const _ = iota
+var _ error
+var _ int
+var _ = append()
+var _ = unsafe.Pointer(nil)
+var _ = unsafe.Add(nil, nil)
+var _ = unsafe.Sizeof(0)
+var _ = unsafe.Alignof(0)
+var _ = unsafe.Slice(nil, 0)
+`
+
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("a.go")
+		for _, name := range strings.Fields(
+			"iota error int nil append iota Pointer Sizeof Alignof Add Slice") {
+			loc := env.RegexpSearch("a.go", `\b`+name+`\b`)
+
+			// definition -> {builtin,unsafe}.go
+			def, err := env.Editor.GoToDefinition(env.Ctx, loc)
+			if err != nil {
+				t.Errorf("definition(%q) failed: %v", name, err)
+			} else if (!strings.HasSuffix(string(def.URI), "builtin.go") &&
+				!strings.HasSuffix(string(def.URI), "unsafe.go")) ||
+				def.Range.Start.Line == 0 {
+				t.Errorf("definition(%q) = %v, want {builtin,unsafe}.go",
+					name, def)
+			}
+
+			// "references to (builtin "Foo"|unsafe.Foo) are not supported"
+			_, err = env.Editor.References(env.Ctx, loc)
+			gotErr := fmt.Sprint(err)
+			if !strings.Contains(gotErr, "references to") ||
+				!strings.Contains(gotErr, "not supported") ||
+				!strings.Contains(gotErr, name) {
+				t.Errorf("references(%q) error: got %q, want %q",
+					name, gotErr, "references to ... are not supported")
+			}
+		}
+	})
+}
+
 func TestPackageReferences(t *testing.T) {
 	tests := []struct {
 		packageName  string
@@ -261,11 +317,11 @@
 			// - inside the foo.mod/bar [foo.mod/bar.test] test variant package
 			// - from the foo.mod/bar_test [foo.mod/bar.test] x_test package
 			// - from the foo.mod/foo package
-			{"Blah", []string{"bar/bar.go", "bar/bar_test.go", "bar/bar_x_test.go", "foo/foo.go"}},
+			{"Blah", []string{"bar/bar.go:3", "bar/bar_test.go:7", "bar/bar_x_test.go:12", "foo/foo.go:12"}},
 
 			// Foo is referenced in bar_x_test.go via the intermediate test variant
 			// foo.mod/foo [foo.mod/bar.test].
-			{"Foo", []string{"bar/bar_x_test.go", "foo/foo.go"}},
+			{"Foo", []string{"bar/bar_x_test.go:13", "foo/foo.go:5"}},
 		}
 
 		for _, test := range refTests {
@@ -285,11 +341,11 @@
 			// InterfaceM is implemented both in foo.mod/bar [foo.mod/bar.test] (which
 			// doesn't import foo), and in foo.mod/bar_test [foo.mod/bar.test], which
 			// imports the test variant of foo.
-			{"InterfaceM", []string{"bar/bar_test.go", "bar/bar_x_test.go"}},
+			{"InterfaceM", []string{"bar/bar_test.go:3", "bar/bar_x_test.go:8"}},
 
 			// A search within the ordinary package to should find implementations
 			// (Fer) within the augmented test package.
-			{"InterfaceF", []string{"foo/foo_test.go"}},
+			{"InterfaceF", []string{"foo/foo_test.go:3"}},
 		}
 
 		for _, test := range implTests {
@@ -363,7 +419,7 @@
 		checkVendor(env.Implementations(refLoc), false)
 
 		// Run 'go mod vendor' outside the editor.
-		if err := env.Sandbox.RunGoCommand(env.Ctx, ".", "mod", []string{"vendor"}, true); err != nil {
+		if err := env.Sandbox.RunGoCommand(env.Ctx, ".", "mod", []string{"vendor"}, nil, true); err != nil {
 			t.Fatalf("go mod vendor: %v", err)
 		}
 
@@ -449,19 +505,78 @@
 		env.OpenFile("a/a.go")
 		refLoc := env.RegexpSearch("a/a.go", "F")
 		got := fileLocations(env, env.References(refLoc))
-		want := []string{"a/a.go", "b/b.go", "lib/lib.go"}
+		want := []string{"a/a.go:5", "b/b.go:5", "lib/lib.go:3"}
 		if diff := cmp.Diff(want, got); diff != "" {
 			t.Errorf("incorrect References (-want +got):\n%s", diff)
 		}
 	})
 }
 
-// fileLocations returns a new sorted array of the relative
-// file name of each location. Duplicates are not removed.
+// Test an 'implementation' query on a type that implements 'error'.
+// (Unfortunately builtin locations cannot be expressed using @loc
+// in the marker test framework.)
+func TestImplementationsOfError(t *testing.T) {
+	const src = `
+-- go.mod --
+module example.com
+go 1.12
+
+-- a.go --
+package a
+
+type Error2 interface {
+	Error() string
+}
+
+type MyError int
+func (MyError) Error() string { return "" }
+
+type MyErrorPtr int
+func (*MyErrorPtr) Error() string { return "" }
+`
+	Run(t, src, func(t *testing.T, env *Env) {
+		env.OpenFile("a.go")
+
+		for _, test := range []struct {
+			re   string
+			want []string
+		}{
+			// error type
+			{"Error2", []string{"a.go:10", "a.go:7", "std:builtin/builtin.go"}},
+			{"MyError", []string{"a.go:3", "std:builtin/builtin.go"}},
+			{"MyErrorPtr", []string{"a.go:3", "std:builtin/builtin.go"}},
+			// error.Error method
+			{"(Error).. string", []string{"a.go:11", "a.go:8", "std:builtin/builtin.go"}},
+			{"MyError. (Error)", []string{"a.go:4", "std:builtin/builtin.go"}},
+			{"MyErrorPtr. (Error)", []string{"a.go:4", "std:builtin/builtin.go"}},
+		} {
+			matchLoc := env.RegexpSearch("a.go", test.re)
+			impls := env.Implementations(matchLoc)
+			got := fileLocations(env, impls)
+			if !reflect.DeepEqual(got, test.want) {
+				t.Errorf("Implementations(%q) = %q, want %q",
+					test.re, got, test.want)
+			}
+		}
+	})
+}
+
+// fileLocations returns a new sorted array of the
+// relative file name and line number of each location.
+// Duplicates are not removed.
+// Standard library filenames are abstracted for robustness.
 func fileLocations(env *regtest.Env, locs []protocol.Location) []string {
 	got := make([]string, 0, len(locs))
 	for _, loc := range locs {
-		got = append(got, env.Sandbox.Workdir.URIToPath(loc.URI))
+		path := env.Sandbox.Workdir.URIToPath(loc.URI) // (slashified)
+		if i := strings.LastIndex(path, "/src/"); i >= 0 && filepath.IsAbs(path) {
+			// Absolute path with "src" segment: assume it's in GOROOT.
+			// Strip directory and don't add line/column since they are fragile.
+			path = "std:" + path[i+len("/src/"):]
+		} else {
+			path = fmt.Sprintf("%s:%d", path, loc.Range.Start.Line+1)
+		}
+		got = append(got, path)
 	}
 	sort.Strings(got)
 	return got
diff --git a/gopls/internal/regtest/misc/workspace_symbol_test.go b/gopls/internal/regtest/misc/workspace_symbol_test.go
index a492e1d..849743b 100644
--- a/gopls/internal/regtest/misc/workspace_symbol_test.go
+++ b/gopls/internal/regtest/misc/workspace_symbol_test.go
@@ -7,7 +7,7 @@
 import (
 	"testing"
 
-	"golang.org/x/tools/gopls/internal/lsp/protocol"
+	"github.com/google/go-cmp/cmp"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
 	"golang.org/x/tools/gopls/internal/lsp/source"
 )
@@ -21,7 +21,7 @@
 -- a.go --
 package p
 
-const C1 = "a.go"
+const K1 = "a.go"
 -- exclude.go --
 
 //go:build exclude
@@ -29,23 +29,19 @@
 
 package exclude
 
-const C2 = "exclude.go"
+const K2 = "exclude.go"
 `
 
+	// NB: the name K was chosen to avoid spurious
+	// matches in the always-present "unsafe" package.
 	Run(t, files, func(t *testing.T, env *Env) {
 		env.OpenFile("a.go")
-		syms := env.Symbol("C")
-		if got, want := len(syms), 1; got != want {
-			t.Errorf("got %d symbols, want %d", got, want)
-		}
+		checkSymbols(env, "K", "K1")
 
 		// Opening up an ignored file will result in an overlay with missing
 		// metadata, but this shouldn't break workspace symbols requests.
 		env.OpenFile("exclude.go")
-		syms = env.Symbol("C")
-		if got, want := len(syms), 1; got != want {
-			t.Errorf("got %d symbols, want %d", got, want)
-		}
+		checkSymbols(env, "K", "K1")
 	})
 }
 
@@ -71,15 +67,14 @@
 	WithOptions(
 		Settings{"symbolMatcher": symbolMatcher},
 	).Run(t, files, func(t *testing.T, env *Env) {
-		want := []string{
+		checkSymbols(env, "Foo",
 			"Foo",    // prefer exact segment matches first
 			"FooBar", // ...followed by exact word matches
 			"Fooex",  // shorter than Fooest, FooBar, lexically before Fooey
 			"Fooey",  // shorter than Fooest, Foobar
 			"Fooest",
-		}
-		got := env.Symbol("Foo")
-		compareSymbols(t, got, want...)
+			"unsafe.Offsetof", // a very fuzzy match
+		)
 	})
 }
 
@@ -102,23 +97,21 @@
 	WithOptions(
 		Settings{"symbolMatcher": symbolMatcher},
 	).Run(t, files, func(t *testing.T, env *Env) {
-		compareSymbols(t, env.Symbol("ABC"), "ABC", "AxxBxxCxx")
-		compareSymbols(t, env.Symbol("'ABC"), "ABC")
-		compareSymbols(t, env.Symbol("^mod.com"), "mod.com/a.ABC", "mod.com/a.AxxBxxCxx")
-		compareSymbols(t, env.Symbol("^mod.com Axx"), "mod.com/a.AxxBxxCxx")
-		compareSymbols(t, env.Symbol("C$"), "ABC")
+		checkSymbols(env, "ABC", "ABC", "AxxBxxCxx")
+		checkSymbols(env, "'ABC", "ABC")
+		checkSymbols(env, "^mod.com", "mod.com/a.ABC", "mod.com/a.AxxBxxCxx")
+		checkSymbols(env, "^mod.com Axx", "mod.com/a.AxxBxxCxx")
+		checkSymbols(env, "C$", "ABC")
 	})
 }
 
-func compareSymbols(t *testing.T, got []protocol.SymbolInformation, want ...string) {
-	t.Helper()
-	if len(got) != len(want) {
-		t.Errorf("got %d symbols, want %d", len(got), len(want))
+func checkSymbols(env *Env, query string, want ...string) {
+	env.T.Helper()
+	var got []string
+	for _, info := range env.Symbol(query) {
+		got = append(got, info.Name)
 	}
-
-	for i := range got {
-		if got[i].Name != want[i] {
-			t.Errorf("got[%d] = %q, want %q", i, got[i].Name, want[i])
-		}
+	if diff := cmp.Diff(got, want); diff != "" {
+		env.T.Errorf("unexpected Symbol(%q) result (+want -got):\n%s", query, diff)
 	}
 }
diff --git a/gopls/internal/regtest/modfile/modfile_test.go b/gopls/internal/regtest/modfile/modfile_test.go
index ab3a6ac..03e60ac 100644
--- a/gopls/internal/regtest/modfile/modfile_test.go
+++ b/gopls/internal/regtest/modfile/modfile_test.go
@@ -10,10 +10,10 @@
 	"strings"
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
 	"golang.org/x/tools/gopls/internal/lsp/tests/compare"
-	"golang.org/x/tools/internal/bug"
 
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/internal/testenv"
@@ -336,48 +336,6 @@
 	})
 }
 
-func TestUnusedDiag(t *testing.T) {
-
-	const proxy = `
--- example.com@v1.0.0/x.go --
-package pkg
-const X = 1
-`
-	const files = `
--- a/go.mod --
-module mod.com
-go 1.14
-require example.com v1.0.0
--- a/go.sum --
-example.com v1.0.0 h1:38O7j5rEBajXk+Q5wzLbRN7KqMkSgEiN9NqcM1O2bBM=
-example.com v1.0.0/go.mod h1:vUsPMGpx9ZXXzECCOsOmYCW7npJTwuA16yl89n3Mgls=
--- a/main.go --
-package main
-func main() {}
-`
-
-	const want = `module mod.com
-
-go 1.14
-`
-
-	RunMultiple{
-		{"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))},
-		{"nested", WithOptions(ProxyFiles(proxy))},
-	}.Run(t, files, func(t *testing.T, env *Env) {
-		env.OpenFile("a/go.mod")
-		var d protocol.PublishDiagnosticsParams
-		env.AfterChange(
-			Diagnostics(env.AtRegexp("a/go.mod", `require example.com`)),
-			ReadDiagnostics("a/go.mod", &d),
-		)
-		env.ApplyQuickFixes("a/go.mod", d.Diagnostics)
-		if got := env.BufferText("a/go.mod"); got != want {
-			t.Fatalf("unexpected go.mod content:\n%s", compare.Text(want, got))
-		}
-	})
-}
-
 // Test to reproduce golang/go#39041. It adds a new require to a go.mod file
 // that already has an unused require.
 func TestNewDepWithUnusedDep(t *testing.T) {
diff --git a/gopls/internal/regtest/template/template_test.go b/gopls/internal/regtest/template/template_test.go
index 4863564..cd190cd 100644
--- a/gopls/internal/regtest/template/template_test.go
+++ b/gopls/internal/regtest/template/template_test.go
@@ -8,10 +8,10 @@
 	"strings"
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
-	"golang.org/x/tools/internal/bug"
 )
 
 func TestMain(m *testing.M) {
diff --git a/gopls/internal/regtest/watch/watch_test.go b/gopls/internal/regtest/watch/watch_test.go
index edb479a..f485b74 100644
--- a/gopls/internal/regtest/watch/watch_test.go
+++ b/gopls/internal/regtest/watch/watch_test.go
@@ -7,9 +7,9 @@
 import (
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	. "golang.org/x/tools/gopls/internal/lsp/regtest"
-	"golang.org/x/tools/internal/bug"
 
 	"golang.org/x/tools/gopls/internal/lsp/fake"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
@@ -577,7 +577,7 @@
 		env.AfterChange(
 			NoDiagnostics(ForFile("main.go")),
 		)
-		if err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, true); err != nil {
+		if err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, nil, true); err != nil {
 			t.Fatal(err)
 		}
 
diff --git a/gopls/internal/regtest/workspace/metadata_test.go b/gopls/internal/regtest/workspace/metadata_test.go
index ff72beb..cd91da8 100644
--- a/gopls/internal/regtest/workspace/metadata_test.go
+++ b/gopls/internal/regtest/workspace/metadata_test.go
@@ -97,7 +97,7 @@
 		// packages for bar.go
 		env.RegexpReplace("bar.go", "ignore", "excluded")
 		env.AfterChange(
-			Diagnostics(env.AtRegexp("bar.go", "package (main)"), WithMessage("No packages")),
+			Diagnostics(env.AtRegexp("bar.go", "package (main)"), WithMessage("not included in your workspace")),
 		)
 	})
 }
diff --git a/gopls/internal/regtest/workspace/quickfix_test.go b/gopls/internal/regtest/workspace/quickfix_test.go
new file mode 100644
index 0000000..5cb08f0
--- /dev/null
+++ b/gopls/internal/regtest/workspace/quickfix_test.go
@@ -0,0 +1,344 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package workspace
+
+import (
+	"fmt"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/gopls/internal/lsp/protocol"
+	"golang.org/x/tools/gopls/internal/lsp/tests/compare"
+	"golang.org/x/tools/internal/testenv"
+
+	. "golang.org/x/tools/gopls/internal/lsp/regtest"
+)
+
+func TestQuickFix_UseModule(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18) // needs go.work
+
+	const files = `
+-- go.work --
+go 1.20
+
+use (
+	./a
+)
+-- a/go.mod --
+module mod.com/a
+
+go 1.18
+
+-- a/main.go --
+package main
+
+import "mod.com/a/lib"
+
+func main() {
+	_ = lib.C
+}
+
+-- a/lib/lib.go --
+package lib
+
+const C = "b"
+-- b/go.mod --
+module mod.com/b
+
+go 1.18
+
+-- b/main.go --
+package main
+
+import "mod.com/b/lib"
+
+func main() {
+	_ = lib.C
+}
+
+-- b/lib/lib.go --
+package lib
+
+const C = "b"
+`
+
+	for _, title := range []string{
+		"Use this module",
+		"Use all modules",
+	} {
+		t.Run(title, func(t *testing.T) {
+			Run(t, files, func(t *testing.T, env *Env) {
+				env.OpenFile("b/main.go")
+				var d protocol.PublishDiagnosticsParams
+				env.AfterChange(ReadDiagnostics("b/main.go", &d))
+				fixes := env.GetQuickFixes("b/main.go", d.Diagnostics)
+				var toApply []protocol.CodeAction
+				for _, fix := range fixes {
+					if strings.Contains(fix.Title, title) {
+						toApply = append(toApply, fix)
+					}
+				}
+				if len(toApply) != 1 {
+					t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply)
+				}
+				env.ApplyCodeAction(toApply[0])
+				env.AfterChange(NoDiagnostics())
+				want := `go 1.20
+
+use (
+	./a
+	./b
+)
+`
+				got := env.ReadWorkspaceFile("go.work")
+				if diff := compare.Text(want, got); diff != "" {
+					t.Errorf("unexpeced go.work content:\n%s", diff)
+				}
+			})
+		})
+	}
+}
+
+func TestQuickFix_AddGoWork(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18) // needs go.work
+
+	v := goVersion(t)
+	const files = `
+-- a/go.mod --
+module mod.com/a
+
+go 1.18
+
+-- a/main.go --
+package main
+
+import "mod.com/a/lib"
+
+func main() {
+	_ = lib.C
+}
+
+-- a/lib/lib.go --
+package lib
+
+const C = "b"
+-- b/go.mod --
+module mod.com/b
+
+go 1.18
+
+-- b/main.go --
+package main
+
+import "mod.com/b/lib"
+
+func main() {
+	_ = lib.C
+}
+
+-- b/lib/lib.go --
+package lib
+
+const C = "b"
+`
+
+	tests := []struct {
+		name  string
+		file  string
+		title string
+		want  string
+	}{
+		{
+			"use b",
+			"b/main.go",
+			"Add a go.work file using this module",
+			fmt.Sprintf(`go 1.%d
+
+use ./b
+`, v),
+		},
+		{
+			"use a",
+			"a/main.go",
+			"Add a go.work file using this module",
+			fmt.Sprintf(`go 1.%d
+
+use ./a
+`, v),
+		},
+		{
+			"use all",
+			"a/main.go",
+			"Add a go.work file using all modules",
+			fmt.Sprintf(`go 1.%d
+
+use (
+	./a
+	./b
+)
+`, v),
+		},
+	}
+
+	for _, test := range tests {
+		t.Run(test.name, func(t *testing.T) {
+			Run(t, files, func(t *testing.T, env *Env) {
+				env.OpenFile(test.file)
+				var d protocol.PublishDiagnosticsParams
+				env.AfterChange(ReadDiagnostics(test.file, &d))
+				fixes := env.GetQuickFixes(test.file, d.Diagnostics)
+				var toApply []protocol.CodeAction
+				for _, fix := range fixes {
+					if strings.Contains(fix.Title, test.title) {
+						toApply = append(toApply, fix)
+					}
+				}
+				if len(toApply) != 1 {
+					t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), test.title, toApply)
+				}
+				env.ApplyCodeAction(toApply[0])
+				env.AfterChange(
+					NoDiagnostics(ForFile(test.file)),
+				)
+
+				got := env.ReadWorkspaceFile("go.work")
+				if diff := compare.Text(test.want, got); diff != "" {
+					t.Errorf("unexpected go.work content:\n%s", diff)
+				}
+			})
+		})
+	}
+}
+
+func TestQuickFix_UnsavedGoWork(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18) // needs go.work
+
+	const files = `
+-- go.work --
+go 1.21
+
+use (
+	./a
+)
+-- a/go.mod --
+module mod.com/a
+
+go 1.18
+
+-- a/main.go --
+package main
+
+func main() {}
+-- b/go.mod --
+module mod.com/b
+
+go 1.18
+
+-- b/main.go --
+package main
+
+func main() {}
+`
+
+	for _, title := range []string{
+		"Use this module",
+		"Use all modules",
+	} {
+		t.Run(title, func(t *testing.T) {
+			Run(t, files, func(t *testing.T, env *Env) {
+				env.OpenFile("go.work")
+				env.OpenFile("b/main.go")
+				env.RegexpReplace("go.work", "go 1.21", "go 1.21 // arbitrary comment")
+				var d protocol.PublishDiagnosticsParams
+				env.AfterChange(ReadDiagnostics("b/main.go", &d))
+				fixes := env.GetQuickFixes("b/main.go", d.Diagnostics)
+				var toApply []protocol.CodeAction
+				for _, fix := range fixes {
+					if strings.Contains(fix.Title, title) {
+						toApply = append(toApply, fix)
+					}
+				}
+				if len(toApply) != 1 {
+					t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply)
+				}
+				fix := toApply[0]
+				err := env.Editor.ApplyCodeAction(env.Ctx, fix)
+				if err == nil {
+					t.Fatalf("codeAction(%q) succeeded unexpectedly", fix.Title)
+				}
+
+				if got := err.Error(); !strings.Contains(got, "must save") {
+					t.Errorf("codeAction(%q) returned error %q, want containing \"must save\"", fix.Title, err)
+				}
+			})
+		})
+	}
+}
+
+func TestQuickFix_GOWORKOff(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18) // needs go.work
+
+	const files = `
+-- go.work --
+go 1.21
+
+use (
+	./a
+)
+-- a/go.mod --
+module mod.com/a
+
+go 1.18
+
+-- a/main.go --
+package main
+
+func main() {}
+-- b/go.mod --
+module mod.com/b
+
+go 1.18
+
+-- b/main.go --
+package main
+
+func main() {}
+`
+
+	for _, title := range []string{
+		"Use this module",
+		"Use all modules",
+	} {
+		t.Run(title, func(t *testing.T) {
+			WithOptions(
+				EnvVars{"GOWORK": "off"},
+			).Run(t, files, func(t *testing.T, env *Env) {
+				env.OpenFile("go.work")
+				env.OpenFile("b/main.go")
+				var d protocol.PublishDiagnosticsParams
+				env.AfterChange(ReadDiagnostics("b/main.go", &d))
+				fixes := env.GetQuickFixes("b/main.go", d.Diagnostics)
+				var toApply []protocol.CodeAction
+				for _, fix := range fixes {
+					if strings.Contains(fix.Title, title) {
+						toApply = append(toApply, fix)
+					}
+				}
+				if len(toApply) != 1 {
+					t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply)
+				}
+				fix := toApply[0]
+				err := env.Editor.ApplyCodeAction(env.Ctx, fix)
+				if err == nil {
+					t.Fatalf("codeAction(%q) succeeded unexpectedly", fix.Title)
+				}
+
+				if got := err.Error(); !strings.Contains(got, "GOWORK=off") {
+					t.Errorf("codeAction(%q) returned error %q, want containing \"GOWORK=off\"", fix.Title, err)
+				}
+			})
+		})
+	}
+}
diff --git a/gopls/internal/regtest/workspace/standalone_test.go b/gopls/internal/regtest/workspace/standalone_test.go
index e1021df..c9ce2f0 100644
--- a/gopls/internal/regtest/workspace/standalone_test.go
+++ b/gopls/internal/regtest/workspace/standalone_test.go
@@ -22,7 +22,7 @@
 -- lib/lib.go --
 package lib
 
-const C = 0
+const K = 0
 
 type I interface {
 	M()
@@ -37,13 +37,13 @@
 	"mod.test/lib"
 )
 
-const C = 1
+const K = 1
 
 type Mer struct{}
 func (Mer) M()
 
 func main() {
-	println(lib.C + C)
+	println(lib.K + K)
 }
 `
 	WithOptions(
@@ -53,13 +53,18 @@
 		Modes(Default),
 	).Run(t, files, func(t *testing.T, env *Env) {
 		// Initially, gopls should not know about the standalone file as it hasn't
-		// been opened. Therefore, we should only find one symbol 'C'.
-		syms := env.Symbol("C")
+		// been opened. Therefore, we should only find one symbol 'K'.
+		//
+		// (The choice of "K" is a little sleazy: it was originally "C" until
+		// we started adding "unsafe" to the workspace unconditionally, which
+		// caused a spurious match of "unsafe.Slice". But in practice every
+		// workspace depends on unsafe.)
+		syms := env.Symbol("K")
 		if got, want := len(syms), 1; got != want {
-			t.Errorf("got %d symbols, want %d", got, want)
+			t.Errorf("got %d symbols, want %d (%+v)", got, want, syms)
 		}
 
-		// Similarly, we should only find one reference to "C", and no
+		// Similarly, we should only find one reference to "K", and no
 		// implementations of I.
 		checkLocations := func(method string, gotLocations []protocol.Location, wantFiles ...string) {
 			var gotFiles []string
@@ -76,14 +81,14 @@
 		env.OpenFile("lib/lib.go")
 		env.AfterChange(NoDiagnostics())
 
-		// Replacing C with D should not cause any workspace diagnostics, since we
+		// Replacing K with D should not cause any workspace diagnostics, since we
 		// haven't yet opened the standalone file.
-		env.RegexpReplace("lib/lib.go", "C", "D")
+		env.RegexpReplace("lib/lib.go", "K", "D")
 		env.AfterChange(NoDiagnostics())
-		env.RegexpReplace("lib/lib.go", "D", "C")
+		env.RegexpReplace("lib/lib.go", "D", "K")
 		env.AfterChange(NoDiagnostics())
 
-		refs := env.References(env.RegexpSearch("lib/lib.go", "C"))
+		refs := env.References(env.RegexpSearch("lib/lib.go", "K"))
 		checkLocations("References", refs, "lib/lib.go")
 
 		impls := env.Implementations(env.RegexpSearch("lib/lib.go", "I"))
@@ -95,56 +100,56 @@
 
 		// Having opened the standalone file, we should find its symbols in the
 		// workspace.
-		syms = env.Symbol("C")
+		syms = env.Symbol("K")
 		if got, want := len(syms), 2; got != want {
 			t.Fatalf("got %d symbols, want %d", got, want)
 		}
 
-		foundMainC := false
+		foundMainK := false
 		var symNames []string
 		for _, sym := range syms {
 			symNames = append(symNames, sym.Name)
-			if sym.Name == "main.C" {
-				foundMainC = true
+			if sym.Name == "main.K" {
+				foundMainK = true
 			}
 		}
-		if !foundMainC {
-			t.Errorf("WorkspaceSymbol(\"C\") = %v, want containing main.C", symNames)
+		if !foundMainK {
+			t.Errorf("WorkspaceSymbol(\"K\") = %v, want containing main.K", symNames)
 		}
 
 		// We should resolve workspace definitions in the standalone file.
-		fileLoc := env.GoToDefinition(env.RegexpSearch("lib/ignore.go", "lib.(C)"))
+		fileLoc := env.GoToDefinition(env.RegexpSearch("lib/ignore.go", "lib.(K)"))
 		file := env.Sandbox.Workdir.URIToPath(fileLoc.URI)
 		if got, want := file, "lib/lib.go"; got != want {
-			t.Errorf("GoToDefinition(lib.C) = %v, want %v", got, want)
+			t.Errorf("GoToDefinition(lib.K) = %v, want %v", got, want)
 		}
 
 		// ...as well as intra-file definitions
-		loc := env.GoToDefinition(env.RegexpSearch("lib/ignore.go", "\\+ (C)"))
-		wantLoc := env.RegexpSearch("lib/ignore.go", "const (C)")
+		loc := env.GoToDefinition(env.RegexpSearch("lib/ignore.go", "\\+ (K)"))
+		wantLoc := env.RegexpSearch("lib/ignore.go", "const (K)")
 		if loc != wantLoc {
-			t.Errorf("GoToDefinition(C) = %v, want %v", loc, wantLoc)
+			t.Errorf("GoToDefinition(K) = %v, want %v", loc, wantLoc)
 		}
 
-		// Renaming "lib.C" to "lib.D" should cause a diagnostic in the standalone
+		// Renaming "lib.K" to "lib.D" should cause a diagnostic in the standalone
 		// file.
-		env.RegexpReplace("lib/lib.go", "C", "D")
-		env.AfterChange(Diagnostics(env.AtRegexp("lib/ignore.go", "lib.(C)")))
+		env.RegexpReplace("lib/lib.go", "K", "D")
+		env.AfterChange(Diagnostics(env.AtRegexp("lib/ignore.go", "lib.(K)")))
 
 		// Undoing the replacement should fix diagnostics
-		env.RegexpReplace("lib/lib.go", "D", "C")
+		env.RegexpReplace("lib/lib.go", "D", "K")
 		env.AfterChange(NoDiagnostics())
 
 		// Now that our workspace has no errors, we should be able to find
 		// references and rename.
-		refs = env.References(env.RegexpSearch("lib/lib.go", "C"))
+		refs = env.References(env.RegexpSearch("lib/lib.go", "K"))
 		checkLocations("References", refs, "lib/lib.go", "lib/ignore.go")
 
 		impls = env.Implementations(env.RegexpSearch("lib/lib.go", "I"))
 		checkLocations("Implementations", impls, "lib/ignore.go")
 
 		// Renaming should rename in the standalone package.
-		env.Rename(env.RegexpSearch("lib/lib.go", "C"), "D")
+		env.Rename(env.RegexpSearch("lib/lib.go", "K"), "D")
 		env.RegexpSearch("lib/ignore.go", "lib.D")
 	})
 }
diff --git a/gopls/internal/regtest/workspace/workspace_test.go b/gopls/internal/regtest/workspace/workspace_test.go
index 0aff471..5a94e42 100644
--- a/gopls/internal/regtest/workspace/workspace_test.go
+++ b/gopls/internal/regtest/workspace/workspace_test.go
@@ -11,11 +11,11 @@
 	"strings"
 	"testing"
 
+	"golang.org/x/tools/gopls/internal/bug"
 	"golang.org/x/tools/gopls/internal/hooks"
 	"golang.org/x/tools/gopls/internal/lsp"
 	"golang.org/x/tools/gopls/internal/lsp/fake"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
-	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/testenv"
 
@@ -647,7 +647,7 @@
 		}
 
 		// This fails if guarded with a OnceMet(DoneWithSave(), ...), because it is
-		// debounced (and therefore not synchronous with the change).
+		// delayed (and therefore not synchronous with the change).
 		env.Await(NoDiagnostics(ForFile("modb/go.mod")))
 
 		// Test Formatting.
diff --git a/gopls/internal/span/uri.go b/gopls/internal/span/uri.go
index e6191f7..cf2d66d 100644
--- a/gopls/internal/span/uri.go
+++ b/gopls/internal/span/uri.go
@@ -175,11 +175,3 @@
 	}
 	return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':'
 }
-
-// Dir returns the URI for the directory containing uri. Dir panics if uri is
-// not a file uri.
-//
-// TODO(rfindley): add a unit test for various edge cases.
-func Dir(uri URI) URI {
-	return URIFromPath(filepath.Dir(uri.Filename()))
-}
diff --git a/gopls/test/debug/debug_test.go b/gopls/test/debug/debug_test.go
index be1f509..757dd2f 100644
--- a/gopls/test/debug/debug_test.go
+++ b/gopls/test/debug/debug_test.go
@@ -13,6 +13,7 @@
 import (
 	"go/ast"
 	"html/template"
+	"os"
 	"runtime"
 	"sort"
 	"strings"
@@ -44,11 +45,19 @@
 }
 
 func TestTemplates(t *testing.T) {
-	testenv.NeedsGoBuild(t)
+	testenv.NeedsGoPackages(t)
+	testenv.NeedsLocalXTools(t)
 
 	cfg := &packages.Config{
-		Mode: packages.NeedTypesInfo | packages.LoadAllSyntax, // figure out what's necessary PJW
+		Mode: packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo,
 	}
+	cfg.Env = os.Environ()
+	cfg.Env = append(cfg.Env,
+		"GOPACKAGESDRIVER=off",
+		"GOWORK=off", // necessary for -mod=mod below
+		"GOFLAGS=-mod=mod",
+	)
+
 	pkgs, err := packages.Load(cfg, "golang.org/x/tools/gopls/internal/lsp/debug")
 	if err != nil {
 		t.Fatal(err)
@@ -107,7 +116,9 @@
 		// the FuncMap is an annoyance; should not be necessary
 		if err := templatecheck.CheckHTML(v.tmpl, v.data); err != nil {
 			t.Errorf("%s: %v", k, err)
+			continue
 		}
+		t.Logf("%s ok", k)
 	}
 }
 
diff --git a/internal/bisect/bisect.go b/internal/bisect/bisect.go
new file mode 100644
index 0000000..50cf53b
--- /dev/null
+++ b/internal/bisect/bisect.go
@@ -0,0 +1,522 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package bisect can be used by compilers and other programs
+// to serve as a target for the bisect debugging tool.
+// See [golang.org/x/tools/cmd/bisect] for details about using the tool.
+//
+// To be a bisect target, allowing bisect to help determine which of a set of independent
+// changes provokes a failure, a program needs to:
+//
+//  1. Define a way to accept a change pattern on its command line or in its environment.
+//     The most common mechanism is a command-line flag.
+//     The pattern can be passed to [New] to create a [Matcher], the compiled form of a pattern.
+//
+//  2. Assign each change a unique ID. One possibility is to use a sequence number,
+//     but the most common mechanism is to hash some kind of identifying information
+//     like the file and line number where the change might be applied.
+//     [Hash] hashes its arguments to compute an ID.
+//
+//  3. Enable each change that the pattern says should be enabled.
+//     The [Matcher.Enable] method answers this question for a given change ID.
+//
+//  4. Report each change that the pattern says should be reported.
+//     The [Matcher.Report] method answers this question for a given change ID.
+//     The report consists of one more lines on standard error or standard output
+//     that contain a “match marker”. [Marker] returns the match marker for a given ID.
+//     When bisect reports a change as causing the failure, it identifies the change
+//     by printing those report lines, with the match marker removed.
+//
+// # Example Usage
+//
+// A program starts by defining how it receives the pattern. In this example, we will assume a flag.
+// The next step is to compile the pattern:
+//
+//	m, err := bisect.New(patternFlag)
+//	if err != nil {
+//		log.Fatal(err)
+//	}
+//
+// Then, each time a potential change is considered, the program computes
+// a change ID by hashing identifying information (source file and line, in this case)
+// and then calls m.ShouldEnable and m.ShouldReport to decide whether to
+// enable and report the change, respectively:
+//
+//	for each change {
+//		h := bisect.Hash(file, line)
+//		if m.ShouldEnable(h) {
+//			enableChange()
+//		}
+//		if m.ShouldReport(h) {
+//			log.Printf("%v %s:%d", bisect.Marker(h), file, line)
+//		}
+//	}
+//
+// Note that the two return different values when bisect is searching for a
+// minimal set of changes to disable to provoke a failure.
+//
+// Finally, note that New returns a nil Matcher when there is no pattern,
+// meaning that the target is not running under bisect at all.
+// In that common case, the computation of the hash can be avoided entirely
+// by checking for m == nil first:
+//
+//	for each change {
+//		if m == nil {
+//			enableChange()
+//		} else {
+//			h := bisect.Hash(file, line)
+//			if m.ShouldEnable(h) {
+//				enableChange()
+//			}
+//			if m.ShouldReport(h) {
+//				log.Printf("%v %s:%d", bisect.Marker(h), file, line)
+//			}
+//		}
+//	}
+//
+// # Pattern Syntax
+//
+// Patterns are generated by the bisect tool and interpreted by [New].
+// Users should not have to understand the patterns except when
+// debugging a target's bisect support or debugging the bisect tool itself.
+//
+// The pattern syntax selecting a change is a sequence of bit strings
+// separated by + and - operators. Each bit string denotes the set of
+// changes with IDs ending in those bits, + is set addition, - is set subtraction,
+// and the expression is evaluated in the usual left-to-right order.
+// The special binary number “y” denotes the set of all changes,
+// standing in for the empty bit string.
+// In the expression, all the + operators must appear before all the - operators.
+// A leading + adds to an empty set. A leading - subtracts from the set of all
+// possible suffixes.
+//
+// For example:
+//
+//   - “01+10” and “+01+10” both denote the set of changes
+//     with IDs ending with the bits 01 or 10.
+//
+//   - “01+10-1001” denotes the set of changes with IDs
+//     ending with the bits 01 or 10, but excluding those ending in 1001.
+//
+//   - “-01-1000” and “y-01-1000 both denote the set of all changes
+//     with IDs not ending in 01 nor 1000.
+//
+//   - “0+1-01+001” is not a valid pattern, because all the + operators do not
+//     appear before all the - operators.
+//
+// In the syntaxes described so far, the pattern specifies the changes to
+// enable and report. If a pattern is prefixed by a “!”, the meaning
+// changes: the pattern specifies the changes to DISABLE and report. This
+// mode of operation is needed when a program passes with all changes
+// enabled but fails with no changes enabled. In this case, bisect
+// searches for minimal sets of changes to disable.
+// Put another way, the leading “!” inverts the result from [Matcher.ShouldEnable]
+// but does not invert the result from [Matcher.ShouldReport].
+//
+// As a convenience for manual debugging, “n” is an alias for “!y”,
+// meaning to disable and report all changes.
+//
+// Finally, a leading “v” in the pattern indicates that the reports will be shown
+// to the user of bisect to describe the changes involved in a failure.
+// At the API level, the leading “v” causes [Matcher.Visible] to return true.
+// See the next section for details.
+//
+// # Match Reports
+//
+// The target program must enable only those changed matched
+// by the pattern, and it must print a match report for each such change.
+// A match report consists of one or more lines of text that will be
+// printed by the bisect tool to describe a change implicated in causing
+// a failure. Each line in the report for a given change must contain a
+// match marker with that change ID, as returned by [Marker].
+// The markers are elided when displaying the lines to the user.
+//
+// A match marker has the form “[bisect-match 0x1234]” where
+// 0x1234 is the change ID in hexadecimal.
+// An alternate form is “[bisect-match 010101]”, giving the change ID in binary.
+//
+// When [Matcher.Visible] returns false, the match reports are only
+// being processed by bisect to learn the set of enabled changes,
+// not shown to the user, meaning that each report can be a match
+// marker on a line by itself, eliding the usual textual description.
+// When the textual description is expensive to compute,
+// checking [Matcher.Visible] can help the avoid that expense
+// in most runs.
+package bisect
+
+// New creates and returns a new Matcher implementing the given pattern.
+// The pattern syntax is defined in the package doc comment.
+//
+// In addition to the pattern syntax syntax, New("") returns nil, nil.
+// The nil *Matcher is valid for use: it returns true from ShouldEnable
+// and false from ShouldReport for all changes. Callers can avoid calling
+// [Hash], [Matcher.ShouldEnable], and [Matcher.ShouldPrint] entirely
+// when they recognize the nil Matcher.
+func New(pattern string) (*Matcher, error) {
+	if pattern == "" {
+		return nil, nil
+	}
+
+	m := new(Matcher)
+
+	// Allow multiple v, so that “bisect cmd vPATTERN” can force verbose all the time.
+	p := pattern
+	for len(p) > 0 && p[0] == 'v' {
+		m.verbose = true
+		p = p[1:]
+		if p == "" {
+			return nil, &parseError{"invalid pattern syntax: " + pattern}
+		}
+	}
+
+	// Allow multiple !, each negating the last, so that “bisect cmd !PATTERN” works
+	// even when bisect chooses to add its own !.
+	m.enable = true
+	for len(p) > 0 && p[0] == '!' {
+		m.enable = !m.enable
+		p = p[1:]
+		if p == "" {
+			return nil, &parseError{"invalid pattern syntax: " + pattern}
+		}
+	}
+
+	if p == "n" {
+		// n is an alias for !y.
+		m.enable = !m.enable
+		p = "y"
+	}
+
+	// Parse actual pattern syntax.
+	result := true
+	bits := uint64(0)
+	start := 0
+	wid := 1 // 1-bit (binary); sometimes 4-bit (hex)
+	for i := 0; i <= len(p); i++ {
+		// Imagine a trailing - at the end of the pattern to flush final suffix
+		c := byte('-')
+		if i < len(p) {
+			c = p[i]
+		}
+		if i == start && wid == 1 && c == 'x' { // leading x for hex
+			start = i + 1
+			wid = 4
+			continue
+		}
+		switch c {
+		default:
+			return nil, &parseError{"invalid pattern syntax: " + pattern}
+		case '2', '3', '4', '5', '6', '7', '8', '9':
+			if wid != 4 {
+				return nil, &parseError{"invalid pattern syntax: " + pattern}
+			}
+			fallthrough
+		case '0', '1':
+			bits <<= wid
+			bits |= uint64(c - '0')
+		case 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F':
+			if wid != 4 {
+				return nil, &parseError{"invalid pattern syntax: " + pattern}
+			}
+			bits <<= 4
+			bits |= uint64(c&^0x20 - 'A' + 10)
+		case 'y':
+			if i+1 < len(p) && (p[i+1] == '0' || p[i+1] == '1') {
+				return nil, &parseError{"invalid pattern syntax: " + pattern}
+			}
+			bits = 0
+		case '+', '-':
+			if c == '+' && result == false {
+				// Have already seen a -. Should be - from here on.
+				return nil, &parseError{"invalid pattern syntax (+ after -): " + pattern}
+			}
+			if i > 0 {
+				n := (i - start) * wid
+				if n > 64 {
+					return nil, &parseError{"pattern bits too long: " + pattern}
+				}
+				if n <= 0 {
+					return nil, &parseError{"invalid pattern syntax: " + pattern}
+				}
+				if p[start] == 'y' {
+					n = 0
+				}
+				mask := uint64(1)<<n - 1
+				m.list = append(m.list, cond{mask, bits, result})
+			} else if c == '-' {
+				// leading - subtracts from complete set
+				m.list = append(m.list, cond{0, 0, true})
+			}
+			bits = 0
+			result = c == '+'
+			start = i + 1
+			wid = 1
+		}
+	}
+	return m, nil
+}
+
+// A Matcher is the parsed, compiled form of a PATTERN string.
+// The nil *Matcher is valid: it has all changes enabled but none reported.
+type Matcher struct {
+	verbose bool
+	enable  bool   // when true, list is for “enable and report” (when false, “disable and report”)
+	list    []cond // conditions; later ones win over earlier ones
+}
+
+// A cond is a single condition in the matcher.
+// Given an input id, if id&mask == bits, return the result.
+type cond struct {
+	mask   uint64
+	bits   uint64
+	result bool
+}
+
+// Verbose reports whether the reports will be shown to users
+// and need to include a human-readable change description.
+// If not, the target can print just the Marker on a line by itself
+// and perhaps save some computation.
+func (m *Matcher) Verbose() bool {
+	return m.verbose
+}
+
+// ShouldEnable reports whether the change with the given id should be enabled.
+func (m *Matcher) ShouldEnable(id uint64) bool {
+	if m == nil {
+		return true
+	}
+	for i := len(m.list) - 1; i >= 0; i-- {
+		c := &m.list[i]
+		if id&c.mask == c.bits {
+			return c.result == m.enable
+		}
+	}
+	return false == m.enable
+}
+
+// ShouldReport reports whether the change with the given id should be reported.
+func (m *Matcher) ShouldReport(id uint64) bool {
+	if m == nil {
+		return false
+	}
+	for i := len(m.list) - 1; i >= 0; i-- {
+		c := &m.list[i]
+		if id&c.mask == c.bits {
+			return c.result
+		}
+	}
+	return false
+}
+
+// Marker returns the match marker text to use on any line reporting details
+// about a match of the given ID.
+// It always returns the hexadecimal format.
+func Marker(id uint64) string {
+	return string(AppendMarker(nil, id))
+}
+
+// AppendMarker is like [Marker] but appends the marker to dst.
+func AppendMarker(dst []byte, id uint64) []byte {
+	const prefix = "[bisect-match 0x"
+	var buf [len(prefix) + 16 + 1]byte
+	copy(buf[:], prefix)
+	for i := 0; i < 16; i++ {
+		buf[len(prefix)+i] = "0123456789abcdef"[id>>60]
+		id <<= 4
+	}
+	buf[len(prefix)+16] = ']'
+	return append(dst, buf[:]...)
+}
+
+// CutMarker finds the first match marker in line and removes it,
+// returning the shortened line (with the marker removed),
+// the ID from the match marker,
+// and whether a marker was found at all.
+// If there is no marker, CutMarker returns line, 0, false.
+func CutMarker(line string) (short string, id uint64, ok bool) {
+	// Find first instance of prefix.
+	prefix := "[bisect-match "
+	i := 0
+	for ; ; i++ {
+		if i >= len(line)-len(prefix) {
+			return line, 0, false
+		}
+		if line[i] == '[' && line[i:i+len(prefix)] == prefix {
+			break
+		}
+	}
+
+	// Scan to ].
+	j := i + len(prefix)
+	for j < len(line) && line[j] != ']' {
+		j++
+	}
+	if j >= len(line) {
+		return line, 0, false
+	}
+
+	// Parse id.
+	idstr := line[i+len(prefix) : j]
+	if len(idstr) >= 3 && idstr[:2] == "0x" {
+		// parse hex
+		if len(idstr) > 2+16 { // max 0x + 16 digits
+			return line, 0, false
+		}
+		for i := 2; i < len(idstr); i++ {
+			id <<= 4
+			switch c := idstr[i]; {
+			case '0' <= c && c <= '9':
+				id |= uint64(c - '0')
+			case 'a' <= c && c <= 'f':
+				id |= uint64(c - 'a' + 10)
+			case 'A' <= c && c <= 'F':
+				id |= uint64(c - 'A' + 10)
+			}
+		}
+	} else {
+		if idstr == "" || len(idstr) > 64 { // min 1 digit, max 64 digits
+			return line, 0, false
+		}
+		// parse binary
+		for i := 0; i < len(idstr); i++ {
+			id <<= 1
+			switch c := idstr[i]; c {
+			default:
+				return line, 0, false
+			case '0', '1':
+				id |= uint64(c - '0')
+			}
+		}
+	}
+
+	// Construct shortened line.
+	// Remove at most one space from around the marker,
+	// so that "foo [marker] bar" shortens to "foo bar".
+	j++ // skip ]
+	if i > 0 && line[i-1] == ' ' {
+		i--
+	} else if j < len(line) && line[j] == ' ' {
+		j++
+	}
+	short = line[:i] + line[j:]
+	return short, id, true
+}
+
+// Hash computes a hash of the data arguments,
+// each of which must be of type string, byte, int, uint, int32, uint32, int64, uint64, uintptr, or a slice of one of those types.
+func Hash(data ...any) uint64 {
+	h := offset64
+	for _, v := range data {
+		switch v := v.(type) {
+		default:
+			// Note: Not printing the type, because reflect.ValueOf(v)
+			// would make the interfaces prepared by the caller escape
+			// and therefore allocate. This way, Hash(file, line) runs
+			// without any allocation. It should be clear from the
+			// source code calling Hash what the bad argument was.
+			panic("bisect.Hash: unexpected argument type")
+		case string:
+			h = fnvString(h, v)
+		case byte:
+			h = fnv(h, v)
+		case int:
+			h = fnvUint64(h, uint64(v))
+		case uint:
+			h = fnvUint64(h, uint64(v))
+		case int32:
+			h = fnvUint32(h, uint32(v))
+		case uint32:
+			h = fnvUint32(h, v)
+		case int64:
+			h = fnvUint64(h, uint64(v))
+		case uint64:
+			h = fnvUint64(h, v)
+		case uintptr:
+			h = fnvUint64(h, uint64(v))
+		case []string:
+			for _, x := range v {
+				h = fnvString(h, x)
+			}
+		case []byte:
+			for _, x := range v {
+				h = fnv(h, x)
+			}
+		case []int:
+			for _, x := range v {
+				h = fnvUint64(h, uint64(x))
+			}
+		case []uint:
+			for _, x := range v {
+				h = fnvUint64(h, uint64(x))
+			}
+		case []int32:
+			for _, x := range v {
+				h = fnvUint32(h, uint32(x))
+			}
+		case []uint32:
+			for _, x := range v {
+				h = fnvUint32(h, x)
+			}
+		case []int64:
+			for _, x := range v {
+				h = fnvUint64(h, uint64(x))
+			}
+		case []uint64:
+			for _, x := range v {
+				h = fnvUint64(h, x)
+			}
+		case []uintptr:
+			for _, x := range v {
+				h = fnvUint64(h, uint64(x))
+			}
+		}
+	}
+	return h
+}
+
+// Trivial error implementation, here to avoid importing errors.
+
+type parseError struct{ text string }
+
+func (e *parseError) Error() string { return e.text }
+
+// FNV-1a implementation. See Go's hash/fnv/fnv.go.
+// Copied here for simplicity (can handle uints directly)
+// and to avoid the dependency.
+
+const (
+	offset64 uint64 = 14695981039346656037
+	prime64  uint64 = 1099511628211
+)
+
+func fnv(h uint64, x byte) uint64 {
+	h ^= uint64(x)
+	h *= prime64
+	return h
+}
+
+func fnvString(h uint64, x string) uint64 {
+	for i := 0; i < len(x); i++ {
+		h ^= uint64(x[i])
+		h *= prime64
+	}
+	return h
+}
+
+func fnvUint64(h uint64, x uint64) uint64 {
+	for i := 0; i < 8; i++ {
+		h ^= uint64(x & 0xFF)
+		x >>= 8
+		h *= prime64
+	}
+	return h
+}
+
+func fnvUint32(h uint64, x uint32) uint64 {
+	for i := 0; i < 4; i++ {
+		h ^= uint64(x & 0xFF)
+		x >>= 8
+		h *= prime64
+	}
+	return h
+}
diff --git a/internal/bisect/bisect_test.go b/internal/bisect/bisect_test.go
new file mode 100644
index 0000000..1688f47
--- /dev/null
+++ b/internal/bisect/bisect_test.go
@@ -0,0 +1,35 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bisect
+
+import (
+	"os"
+	"path/filepath"
+	"strings"
+	"testing"
+)
+
+// In order for package bisect to be copied into the standard library
+// and used by very low-level packages such as internal/godebug,
+// it needs to have no imports at all.
+func TestNoImports(t *testing.T) {
+	files, err := filepath.Glob("*.go")
+	if err != nil {
+		t.Fatal(err)
+	}
+	for _, file := range files {
+		if strings.HasSuffix(file, "_test.go") {
+			continue
+		}
+		data, err := os.ReadFile(file)
+		if err != nil {
+			t.Error(err)
+			continue
+		}
+		if strings.Contains(string(data), "\nimport") {
+			t.Errorf("%s contains imports; package bisect must not import other packages", file)
+		}
+	}
+}
diff --git a/internal/diff/diff.go b/internal/diff/diff.go
index 2bc63c2..602f1e7 100644
--- a/internal/diff/diff.go
+++ b/internal/diff/diff.go
@@ -115,8 +115,8 @@
 	}
 
 	// Do all edits begin and end at the start of a line?
-	// TODO(adonovan): opt: is this fast path necessary?
-	// (Also, it complicates the result ownership.)
+	// TODO(adonovan, pjw): why does omitting this 'optimization'
+	// cause tests to fail? (TestDiff/insert-line,extra_newline)
 	for _, edit := range edits {
 		if edit.Start >= len(src) || // insertion at EOF
 			edit.Start > 0 && src[edit.Start-1] != '\n' || // not at line start
diff --git a/internal/diff/difftest/difftest.go b/internal/diff/difftest/difftest.go
index 4a25111..9b00590 100644
--- a/internal/diff/difftest/difftest.go
+++ b/internal/diff/difftest/difftest.go
@@ -120,6 +120,17 @@
 `[1:],
 	Edits: []diff.Edit{{Start: 0, End: 1, New: "B"}},
 }, {
+	Name: "delete_empty",
+	In:   "meow",
+	Out:  "", // GNU diff -u special case: +0,0
+	Unified: UnifiedPrefix + `
+@@ -1 +0,0 @@
+-meow
+\ No newline at end of file
+`[1:],
+	Edits:     []diff.Edit{{Start: 0, End: 4, New: ""}},
+	LineEdits: []diff.Edit{{Start: 0, End: 4, New: ""}},
+}, {
 	Name: "append_empty",
 	In:   "", // GNU diff -u special case: -0,0
 	Out:  "AB\nC",
@@ -261,6 +272,13 @@
 -
  A
 `,
+	}, {
+		Name:      "unified_lines",
+		In:        "aaa\nccc\n",
+		Out:       "aaa\nbbb\nccc\n",
+		Edits:     []diff.Edit{{Start: 3, End: 3, New: "\nbbb"}},
+		LineEdits: []diff.Edit{{Start: 0, End: 4, New: "aaa\nbbb\n"}},
+		Unified:   UnifiedPrefix + "@@ -1,2 +1,3 @@\n aaa\n+bbb\n ccc\n",
 	},
 }
 
diff --git a/internal/diff/lcs/doc.go b/internal/diff/lcs/doc.go
index dc779f3..9029dd2 100644
--- a/internal/diff/lcs/doc.go
+++ b/internal/diff/lcs/doc.go
@@ -151,6 +151,6 @@
 and can be found at
 http://www.xmailserver.org/diff2.pdf
 
-(There is a generic implementation of the algorithm the the repository with git hash
+(There is a generic implementation of the algorithm the repository with git hash
 b9ad7e4ade3a686d608e44475390ad428e60e7fc)
 */
diff --git a/internal/diff/unified.go b/internal/diff/unified.go
index fa376f1..ed2c22e 100644
--- a/internal/diff/unified.go
+++ b/internal/diff/unified.go
@@ -156,9 +156,18 @@
 			last++
 		}
 		if edit.New != "" {
-			for _, content := range splitLines(edit.New) {
-				h.Lines = append(h.Lines, line{Kind: Insert, Content: content})
+			for i, content := range splitLines(edit.New) {
 				toLine++
+				// Merge identical Delete+Insert.
+				// This is an unwanted output of converting diffs to line diffs
+				// that is easiest to fix by postprocessing.
+				// e.g.  issue #59232: ("aaa\nccc\n", "aaa\nbbb\nccc")
+				// -> [Delete "aaa\n", Insert "aaa\n", Insert "bbb\n", ...].
+				if i == 0 && last > start && h.Lines[len(h.Lines)-1].Content == content {
+					h.Lines[len(h.Lines)-1].Kind = Equal
+					continue
+				}
+				h.Lines = append(h.Lines, line{Kind: Insert, Content: content})
 			}
 		}
 	}
@@ -226,6 +235,9 @@
 		}
 		if toCount > 1 {
 			fmt.Fprintf(b, " +%d,%d", hunk.ToLine, toCount)
+		} else if hunk.ToLine == 1 && toCount == 0 {
+			// Match odd GNU diff -u behavior adding to empty file.
+			fmt.Fprintf(b, " +0,0")
 		} else {
 			fmt.Fprintf(b, " +%d", hunk.ToLine)
 		}
diff --git a/internal/diffp/diff.go b/internal/diffp/diff.go
new file mode 100644
index 0000000..aa5ef81
--- /dev/null
+++ b/internal/diffp/diff.go
@@ -0,0 +1,264 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package diffp implements a basic diff algorithm equivalent to patience diff.
+// It is a copy of internal/diff from the main Go repo, renamed to diffp to avoid
+// conflict with the existing golang.org/x/tools/internal/diff.
+package diffp
+
+import (
+	"bytes"
+	"fmt"
+	"sort"
+	"strings"
+)
+
+// A pair is a pair of values tracked for both the x and y side of a diff.
+// It is typically a pair of line indexes.
+type pair struct{ x, y int }
+
+// Diff returns an anchored diff of the two texts old and new
+// in the “unified diff” format. If old and new are identical,
+// Diff returns a nil slice (no output).
+//
+// Unix diff implementations typically look for a diff with
+// the smallest number of lines inserted and removed,
+// which can in the worst case take time quadratic in the
+// number of lines in the texts. As a result, many implementations
+// either can be made to run for a long time or cut off the search
+// after a predetermined amount of work.
+//
+// In contrast, this implementation looks for a diff with the
+// smallest number of “unique” lines inserted and removed,
+// where unique means a line that appears just once in both old and new.
+// We call this an “anchored diff” because the unique lines anchor
+// the chosen matching regions. An anchored diff is usually clearer
+// than a standard diff, because the algorithm does not try to
+// reuse unrelated blank lines or closing braces.
+// The algorithm also guarantees to run in O(n log n) time
+// instead of the standard O(n²) time.
+//
+// Some systems call this approach a “patience diff,” named for
+// the “patience sorting” algorithm, itself named for a solitaire card game.
+// We avoid that name for two reasons. First, the name has been used
+// for a few different variants of the algorithm, so it is imprecise.
+// Second, the name is frequently interpreted as meaning that you have
+// to wait longer (to be patient) for the diff, meaning that it is a slower algorithm,
+// when in fact the algorithm is faster than the standard one.
+func Diff(oldName string, old []byte, newName string, new []byte) []byte {
+	if bytes.Equal(old, new) {
+		return nil
+	}
+	x := lines(old)
+	y := lines(new)
+
+	// Print diff header.
+	var out bytes.Buffer
+	fmt.Fprintf(&out, "diff %s %s\n", oldName, newName)
+	fmt.Fprintf(&out, "--- %s\n", oldName)
+	fmt.Fprintf(&out, "+++ %s\n", newName)
+
+	// Loop over matches to consider,
+	// expanding each match to include surrounding lines,
+	// and then printing diff chunks.
+	// To avoid setup/teardown cases outside the loop,
+	// tgs returns a leading {0,0} and trailing {len(x), len(y)} pair
+	// in the sequence of matches.
+	var (
+		done  pair     // printed up to x[:done.x] and y[:done.y]
+		chunk pair     // start lines of current chunk
+		count pair     // number of lines from each side in current chunk
+		ctext []string // lines for current chunk
+	)
+	for _, m := range tgs(x, y) {
+		if m.x < done.x {
+			// Already handled scanning forward from earlier match.
+			continue
+		}
+
+		// Expand matching lines as far possible,
+		// establishing that x[start.x:end.x] == y[start.y:end.y].
+		// Note that on the first (or last) iteration we may (or definitey do)
+		// have an empty match: start.x==end.x and start.y==end.y.
+		start := m
+		for start.x > done.x && start.y > done.y && x[start.x-1] == y[start.y-1] {
+			start.x--
+			start.y--
+		}
+		end := m
+		for end.x < len(x) && end.y < len(y) && x[end.x] == y[end.y] {
+			end.x++
+			end.y++
+		}
+
+		// Emit the mismatched lines before start into this chunk.
+		// (No effect on first sentinel iteration, when start = {0,0}.)
+		for _, s := range x[done.x:start.x] {
+			ctext = append(ctext, "-"+s)
+			count.x++
+		}
+		for _, s := range y[done.y:start.y] {
+			ctext = append(ctext, "+"+s)
+			count.y++
+		}
+
+		// If we're not at EOF and have too few common lines,
+		// the chunk includes all the common lines and continues.
+		const C = 3 // number of context lines
+		if (end.x < len(x) || end.y < len(y)) &&
+			(end.x-start.x < C || (len(ctext) > 0 && end.x-start.x < 2*C)) {
+			for _, s := range x[start.x:end.x] {
+				ctext = append(ctext, " "+s)
+				count.x++
+				count.y++
+			}
+			done = end
+			continue
+		}
+
+		// End chunk with common lines for context.
+		if len(ctext) > 0 {
+			n := end.x - start.x
+			if n > C {
+				n = C
+			}
+			for _, s := range x[start.x : start.x+n] {
+				ctext = append(ctext, " "+s)
+				count.x++
+				count.y++
+			}
+			done = pair{start.x + n, start.y + n}
+
+			// Format and emit chunk.
+			// Convert line numbers to 1-indexed.
+			// Special case: empty file shows up as 0,0 not 1,0.
+			if count.x > 0 {
+				chunk.x++
+			}
+			if count.y > 0 {
+				chunk.y++
+			}
+			fmt.Fprintf(&out, "@@ -%d,%d +%d,%d @@\n", chunk.x, count.x, chunk.y, count.y)
+			for _, s := range ctext {
+				out.WriteString(s)
+			}
+			count.x = 0
+			count.y = 0
+			ctext = ctext[:0]
+		}
+
+		// If we reached EOF, we're done.
+		if end.x >= len(x) && end.y >= len(y) {
+			break
+		}
+
+		// Otherwise start a new chunk.
+		chunk = pair{end.x - C, end.y - C}
+		for _, s := range x[chunk.x:end.x] {
+			ctext = append(ctext, " "+s)
+			count.x++
+			count.y++
+		}
+		done = end
+	}
+
+	return out.Bytes()
+}
+
+// lines returns the lines in the file x, including newlines.
+// If the file does not end in a newline, one is supplied
+// along with a warning about the missing newline.
+func lines(x []byte) []string {
+	l := strings.SplitAfter(string(x), "\n")
+	if l[len(l)-1] == "" {
+		l = l[:len(l)-1]
+	} else {
+		// Treat last line as having a message about the missing newline attached,
+		// using the same text as BSD/GNU diff (including the leading backslash).
+		l[len(l)-1] += "\n\\ No newline at end of file\n"
+	}
+	return l
+}
+
+// tgs returns the pairs of indexes of the longest common subsequence
+// of unique lines in x and y, where a unique line is one that appears
+// once in x and once in y.
+//
+// The longest common subsequence algorithm is as described in
+// Thomas G. Szymanski, “A Special Case of the Maximal Common
+// Subsequence Problem,” Princeton TR #170 (January 1975),
+// available at https://research.swtch.com/tgs170.pdf.
+func tgs(x, y []string) []pair {
+	// Count the number of times each string appears in a and b.
+	// We only care about 0, 1, many, counted as 0, -1, -2
+	// for the x side and 0, -4, -8 for the y side.
+	// Using negative numbers now lets us distinguish positive line numbers later.
+	m := make(map[string]int)
+	for _, s := range x {
+		if c := m[s]; c > -2 {
+			m[s] = c - 1
+		}
+	}
+	for _, s := range y {
+		if c := m[s]; c > -8 {
+			m[s] = c - 4
+		}
+	}
+
+	// Now unique strings can be identified by m[s] = -1+-4.
+	//
+	// Gather the indexes of those strings in x and y, building:
+	//	xi[i] = increasing indexes of unique strings in x.
+	//	yi[i] = increasing indexes of unique strings in y.
+	//	inv[i] = index j such that x[xi[i]] = y[yi[j]].
+	var xi, yi, inv []int
+	for i, s := range y {
+		if m[s] == -1+-4 {
+			m[s] = len(yi)
+			yi = append(yi, i)
+		}
+	}
+	for i, s := range x {
+		if j, ok := m[s]; ok && j >= 0 {
+			xi = append(xi, i)
+			inv = append(inv, j)
+		}
+	}
+
+	// Apply Algorithm A from Szymanski's paper.
+	// In those terms, A = J = inv and B = [0, n).
+	// We add sentinel pairs {0,0}, and {len(x),len(y)}
+	// to the returned sequence, to help the processing loop.
+	J := inv
+	n := len(xi)
+	T := make([]int, n)
+	L := make([]int, n)
+	for i := range T {
+		T[i] = n + 1
+	}
+	for i := 0; i < n; i++ {
+		k := sort.Search(n, func(k int) bool {
+			return T[k] >= J[i]
+		})
+		T[k] = J[i]
+		L[i] = k + 1
+	}
+	k := 0
+	for _, v := range L {
+		if k < v {
+			k = v
+		}
+	}
+	seq := make([]pair, 2+k)
+	seq[1+k] = pair{len(x), len(y)} // sentinel at end
+	lastj := n
+	for i := n - 1; i >= 0; i-- {
+		if L[i] == k && J[i] < lastj {
+			seq[k] = pair{xi[i], yi[J[i]]}
+			k--
+		}
+	}
+	seq[0] = pair{0, 0} // sentinel at start
+	return seq
+}
diff --git a/internal/diffp/diff_test.go b/internal/diffp/diff_test.go
new file mode 100644
index 0000000..acb95df
--- /dev/null
+++ b/internal/diffp/diff_test.go
@@ -0,0 +1,44 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package diffp
+
+import (
+	"bytes"
+	"path/filepath"
+	"testing"
+
+	"golang.org/x/tools/txtar"
+)
+
+func clean(text []byte) []byte {
+	text = bytes.ReplaceAll(text, []byte("$\n"), []byte("\n"))
+	text = bytes.TrimSuffix(text, []byte("^D\n"))
+	return text
+}
+
+func Test(t *testing.T) {
+	files, _ := filepath.Glob("testdata/*.txt")
+	if len(files) == 0 {
+		t.Fatalf("no testdata")
+	}
+
+	for _, file := range files {
+		t.Run(filepath.Base(file), func(t *testing.T) {
+			a, err := txtar.ParseFile(file)
+			if err != nil {
+				t.Fatal(err)
+			}
+			if len(a.Files) != 3 || a.Files[2].Name != "diff" {
+				t.Fatalf("%s: want three files, third named \"diff\"", file)
+			}
+			diffs := Diff(a.Files[0].Name, clean(a.Files[0].Data), a.Files[1].Name, clean(a.Files[1].Data))
+			want := clean(a.Files[2].Data)
+			if !bytes.Equal(diffs, want) {
+				t.Fatalf("%s: have:\n%s\nwant:\n%s\n%s", file,
+					diffs, want, Diff("have", diffs, "want", want))
+			}
+		})
+	}
+}
diff --git a/internal/diffp/testdata/allnew.txt b/internal/diffp/testdata/allnew.txt
new file mode 100644
index 0000000..8875649
--- /dev/null
+++ b/internal/diffp/testdata/allnew.txt
@@ -0,0 +1,13 @@
+-- old --
+-- new --
+a
+b
+c
+-- diff --
+diff old new
+--- old
++++ new
+@@ -0,0 +1,3 @@
++a
++b
++c
diff --git a/internal/diffp/testdata/allold.txt b/internal/diffp/testdata/allold.txt
new file mode 100644
index 0000000..bcc9ac0
--- /dev/null
+++ b/internal/diffp/testdata/allold.txt
@@ -0,0 +1,13 @@
+-- old --
+a
+b
+c
+-- new --
+-- diff --
+diff old new
+--- old
++++ new
+@@ -1,3 +0,0 @@
+-a
+-b
+-c
diff --git a/internal/diffp/testdata/basic.txt b/internal/diffp/testdata/basic.txt
new file mode 100644
index 0000000..d2565b5
--- /dev/null
+++ b/internal/diffp/testdata/basic.txt
@@ -0,0 +1,35 @@
+Example from Hunt and McIlroy, “An Algorithm for Differential File Comparison.”
+https://www.cs.dartmouth.edu/~doug/diff.pdf
+
+-- old --
+a
+b
+c
+d
+e
+f
+g
+-- new --
+w
+a
+b
+x
+y
+z
+e
+-- diff --
+diff old new
+--- old
++++ new
+@@ -1,7 +1,7 @@
++w
+ a
+ b
+-c
+-d
++x
++y
++z
+ e
+-f
+-g
diff --git a/internal/diffp/testdata/dups.txt b/internal/diffp/testdata/dups.txt
new file mode 100644
index 0000000..d10524d
--- /dev/null
+++ b/internal/diffp/testdata/dups.txt
@@ -0,0 +1,40 @@
+-- old --
+a
+
+b
+
+c
+
+d
+
+e
+
+f
+-- new --
+a
+
+B
+
+C
+
+d
+
+e
+
+f
+-- diff --
+diff old new
+--- old
++++ new
+@@ -1,8 +1,8 @@
+ a
+ $
+-b
+-
+-c
++B
++
++C
+ $
+ d
+ $
diff --git a/internal/diffp/testdata/end.txt b/internal/diffp/testdata/end.txt
new file mode 100644
index 0000000..158637c
--- /dev/null
+++ b/internal/diffp/testdata/end.txt
@@ -0,0 +1,38 @@
+-- old --
+1
+2
+3
+4
+5
+6
+7
+eight
+nine
+ten
+eleven
+-- new --
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+-- diff --
+diff old new
+--- old
++++ new
+@@ -5,7 +5,6 @@
+ 5
+ 6
+ 7
+-eight
+-nine
+-ten
+-eleven
++8
++9
++10
diff --git a/internal/diffp/testdata/eof.txt b/internal/diffp/testdata/eof.txt
new file mode 100644
index 0000000..5dc145c
--- /dev/null
+++ b/internal/diffp/testdata/eof.txt
@@ -0,0 +1,9 @@
+-- old --
+a
+b
+c^D
+-- new --
+a
+b
+c^D
+-- diff --
diff --git a/internal/diffp/testdata/eof1.txt b/internal/diffp/testdata/eof1.txt
new file mode 100644
index 0000000..1ebf621
--- /dev/null
+++ b/internal/diffp/testdata/eof1.txt
@@ -0,0 +1,18 @@
+-- old --
+a
+b
+c
+-- new --
+a
+b
+c^D
+-- diff --
+diff old new
+--- old
++++ new
+@@ -1,3 +1,3 @@
+ a
+ b
+-c
++c
+\ No newline at end of file
diff --git a/internal/diffp/testdata/eof2.txt b/internal/diffp/testdata/eof2.txt
new file mode 100644
index 0000000..047705e
--- /dev/null
+++ b/internal/diffp/testdata/eof2.txt
@@ -0,0 +1,18 @@
+-- old --
+a
+b
+c^D
+-- new --
+a
+b
+c
+-- diff --
+diff old new
+--- old
++++ new
+@@ -1,3 +1,3 @@
+ a
+ b
+-c
+\ No newline at end of file
++c
diff --git a/internal/diffp/testdata/long.txt b/internal/diffp/testdata/long.txt
new file mode 100644
index 0000000..3fc99f7
--- /dev/null
+++ b/internal/diffp/testdata/long.txt
@@ -0,0 +1,62 @@
+-- old --
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+14½
+15
+16
+17
+18
+19
+20
+-- new --
+1
+2
+3
+4
+5
+6
+8
+9
+10
+11
+12
+13
+14
+17
+18
+19
+20
+-- diff --
+diff old new
+--- old
++++ new
+@@ -4,7 +4,6 @@
+ 4
+ 5
+ 6
+-7
+ 8
+ 9
+ 10
+@@ -12,9 +11,6 @@
+ 12
+ 13
+ 14
+-14½
+-15
+-16
+ 17
+ 18
+ 19
diff --git a/internal/diffp/testdata/same.txt b/internal/diffp/testdata/same.txt
new file mode 100644
index 0000000..86b1100
--- /dev/null
+++ b/internal/diffp/testdata/same.txt
@@ -0,0 +1,5 @@
+-- old --
+hello world
+-- new --
+hello world
+-- diff --
diff --git a/internal/diffp/testdata/start.txt b/internal/diffp/testdata/start.txt
new file mode 100644
index 0000000..217b2fd
--- /dev/null
+++ b/internal/diffp/testdata/start.txt
@@ -0,0 +1,34 @@
+-- old --
+e
+pi
+4
+5
+6
+7
+8
+9
+10
+-- new --
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+-- diff --
+diff old new
+--- old
++++ new
+@@ -1,5 +1,6 @@
+-e
+-pi
++1
++2
++3
+ 4
+ 5
+ 6
diff --git a/internal/diffp/testdata/triv.txt b/internal/diffp/testdata/triv.txt
new file mode 100644
index 0000000..ab5759f
--- /dev/null
+++ b/internal/diffp/testdata/triv.txt
@@ -0,0 +1,40 @@
+Another example from Hunt and McIlroy,
+“An Algorithm for Differential File Comparison.”
+https://www.cs.dartmouth.edu/~doug/diff.pdf
+
+Anchored diff gives up on finding anything,
+since there are no unique lines.
+
+-- old --
+a
+b
+c
+a
+b
+b
+a
+-- new --
+c
+a
+b
+a
+b
+c
+-- diff --
+diff old new
+--- old
++++ new
+@@ -1,7 +1,6 @@
+-a
+-b
+-c
+-a
+-b
+-b
+-a
++c
++a
++b
++a
++b
++c
diff --git a/internal/fuzzy/symbol.go b/internal/fuzzy/symbol.go
index 073a4cd..bf93041 100644
--- a/internal/fuzzy/symbol.go
+++ b/internal/fuzzy/symbol.go
@@ -26,9 +26,6 @@
 //     symbol or identifiers, so doing this avoids allocating strings.
 //   - We can return the index of the right-most match, allowing us to trim
 //     irrelevant qualification.
-//
-// This implementation is experimental, serving as a reference fast algorithm
-// to compare to the fuzzy algorithm implemented by Matcher.
 type SymbolMatcher struct {
 	// Using buffers of length 256 is both a reasonable size for most qualified
 	// symbols, and makes it easy to avoid bounds checks by using uint8 indexes.
@@ -169,19 +166,29 @@
 	// Score is the average score for each character.
 	//
 	// A character score is the multiple of:
-	//   1. 1.0 if the character starts a segment, .8 if the character start a
-	//      mid-segment word, otherwise 0.6. This carries over to immediately
-	//      following characters.
-	//   2. For the final character match, the multiplier from (1) is reduced to
-	//     .8 if the next character in the input is a mid-segment word, or 0.6 if
-	//      the next character in the input is not a word or segment start. This
-	//      ensures that we favor whole-word or whole-segment matches over prefix
-	//      matches.
-	//   3. 1.0 if the character is part of the last segment, otherwise
-	//      1.0-.2*<segments from the right>, with a max segment count of 3.
+	//   1. 1.0 if the character starts a segment or is preceded by a matching
+	//      character, 0.9 if the character starts a mid-segment word, else 0.6.
 	//
-	// This is a very naive algorithm, but it is fast. There's lots of prior art
-	// here, and we should leverage it. For example, we could explicitly consider
+	//      Note that characters preceded by a matching character get the max
+	//      score of 1.0 so that sequential or exact matches are preferred, even
+	//      if they don't start/end at a segment or word boundary. For example, a
+	//      match for "func" in intfuncs should have a higher score than in
+	//      ifunmatched.
+	//
+	//      For the final character match, the multiplier from (1) is reduced to
+	//      0.9 if the next character in the input is a mid-segment word, or 0.6
+	//      if the next character in the input is not a word or segment start.
+	//      This ensures that we favor whole-word or whole-segment matches over
+	//      prefix matches.
+	//
+	//   2. 1.0 if the character is part of the last segment, otherwise
+	//      1.0-0.1*<segments from the right>, with a max segment count of 3.
+	//      Notably 1.0-0.1*3 = 0.7 > 0.6, so that foo/_/_/_/_ (a match very
+	//      early in a qualified symbol name) still scores higher than _f_o_o_
+	//      (a completely split match).
+	//
+	// This is a naive algorithm, but it is fast. There's lots of prior art here
+	// that could be leveraged. For example, we could explicitly consider
 	// character distance, and exact matches of words or segments.
 	//
 	// Also note that this might not actually find the highest scoring match, as
@@ -192,10 +199,10 @@
 	p = m.pattern[pi]
 
 	const (
-		segStreak  = 1.0
-		wordStreak = 0.8
+		segStreak  = 1.0 // start of segment or sequential match
+		wordStreak = 0.9 // start of word match
 		noStreak   = 0.6
-		perSegment = 0.2 // we count at most 3 segments above
+		perSegment = 0.1 // we count at most 3 segments above
 	)
 
 	streakBonus := noStreak
@@ -228,6 +235,7 @@
 			if finalChar {
 				break
 			}
+			streakBonus = segStreak // see above: sequential characters get the max score
 		} else {
 			streakBonus = noStreak
 		}
diff --git a/internal/fuzzy/symbol_test.go b/internal/fuzzy/symbol_test.go
index df74bbe..2a9d9b6 100644
--- a/internal/fuzzy/symbol_test.go
+++ b/internal/fuzzy/symbol_test.go
@@ -40,12 +40,12 @@
 	symbols := []string{
 		"this.is.better.than.most",
 		"test.foo.bar",
-		"atest",
 		"thebest",
 		"test.foo",
 		"test.foo",
-		"tTest",
+		"atest",
 		"testage",
+		"tTest",
 		"foo.test",
 		"test",
 	}
@@ -60,6 +60,33 @@
 	}
 }
 
+// Test that we strongly prefer exact matches.
+//
+// In golang/go#60027, we preferred "Runner" for the query "rune" over several
+// results containing the word "rune" exactly. Following this observation,
+// scoring was tweaked to more strongly emphasize sequential characters and
+// exact matches.
+func TestSymbolRanking_Issue60027(t *testing.T) {
+	matcher := NewSymbolMatcher("rune")
+
+	// symbols to match, in ascending order of ranking.
+	symbols := []string{
+		"Runner",
+		"singleRuneParam",
+		"Config.ifsRune",
+		"Parser.rune",
+	}
+	prev := 0.0
+	for _, sym := range symbols {
+		_, score := matcher.Match([]string{sym})
+		t.Logf("Match(%q) = %v", sym, score)
+		if score < prev {
+			t.Errorf("Match(%q) = _, %v, want > %v", sym, score, prev)
+		}
+		prev = score
+	}
+}
+
 func TestChunkedMatch(t *testing.T) {
 	matcher := NewSymbolMatcher("test")
 
diff --git a/internal/gcimporter/bexport.go b/internal/gcimporter/bexport.go
deleted file mode 100644
index 30582ed..0000000
--- a/internal/gcimporter/bexport.go
+++ /dev/null
@@ -1,852 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Binary package export.
-// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
-// see that file for specification of the format.
-
-package gcimporter
-
-import (
-	"bytes"
-	"encoding/binary"
-	"fmt"
-	"go/constant"
-	"go/token"
-	"go/types"
-	"math"
-	"math/big"
-	"sort"
-	"strings"
-)
-
-// If debugFormat is set, each integer and string value is preceded by a marker
-// and position information in the encoding. This mechanism permits an importer
-// to recognize immediately when it is out of sync. The importer recognizes this
-// mode automatically (i.e., it can import export data produced with debugging
-// support even if debugFormat is not set at the time of import). This mode will
-// lead to massively larger export data (by a factor of 2 to 3) and should only
-// be enabled during development and debugging.
-//
-// NOTE: This flag is the first flag to enable if importing dies because of
-// (suspected) format errors, and whenever a change is made to the format.
-const debugFormat = false // default: false
-
-// Current export format version. Increase with each format change.
-//
-// Note: The latest binary (non-indexed) export format is at version 6.
-// This exporter is still at level 4, but it doesn't matter since
-// the binary importer can handle older versions just fine.
-//
-//	6: package height (CL 105038) -- NOT IMPLEMENTED HERE
-//	5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMENTED HERE
-//	4: type name objects support type aliases, uses aliasTag
-//	3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
-//	2: removed unused bool in ODCL export (compiler only)
-//	1: header format change (more regular), export package for _ struct fields
-//	0: Go1.7 encoding
-const exportVersion = 4
-
-// trackAllTypes enables cycle tracking for all types, not just named
-// types. The existing compiler invariants assume that unnamed types
-// that are not completely set up are not used, or else there are spurious
-// errors.
-// If disabled, only named types are tracked, possibly leading to slightly
-// less efficient encoding in rare cases. It also prevents the export of
-// some corner-case type declarations (but those are not handled correctly
-// with with the textual export format either).
-// TODO(gri) enable and remove once issues caused by it are fixed
-const trackAllTypes = false
-
-type exporter struct {
-	fset *token.FileSet
-	out  bytes.Buffer
-
-	// object -> index maps, indexed in order of serialization
-	strIndex map[string]int
-	pkgIndex map[*types.Package]int
-	typIndex map[types.Type]int
-
-	// position encoding
-	posInfoFormat bool
-	prevFile      string
-	prevLine      int
-
-	// debugging support
-	written int // bytes written
-	indent  int // for trace
-}
-
-// internalError represents an error generated inside this package.
-type internalError string
-
-func (e internalError) Error() string { return "gcimporter: " + string(e) }
-
-func internalErrorf(format string, args ...interface{}) error {
-	return internalError(fmt.Sprintf(format, args...))
-}
-
-// BExportData returns binary export data for pkg.
-// If no file set is provided, position info will be missing.
-func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
-	if !debug {
-		defer func() {
-			if e := recover(); e != nil {
-				if ierr, ok := e.(internalError); ok {
-					err = ierr
-					return
-				}
-				// Not an internal error; panic again.
-				panic(e)
-			}
-		}()
-	}
-
-	p := exporter{
-		fset:          fset,
-		strIndex:      map[string]int{"": 0}, // empty string is mapped to 0
-		pkgIndex:      make(map[*types.Package]int),
-		typIndex:      make(map[types.Type]int),
-		posInfoFormat: true, // TODO(gri) might become a flag, eventually
-	}
-
-	// write version info
-	// The version string must start with "version %d" where %d is the version
-	// number. Additional debugging information may follow after a blank; that
-	// text is ignored by the importer.
-	p.rawStringln(fmt.Sprintf("version %d", exportVersion))
-	var debug string
-	if debugFormat {
-		debug = "debug"
-	}
-	p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
-	p.bool(trackAllTypes)
-	p.bool(p.posInfoFormat)
-
-	// --- generic export data ---
-
-	// populate type map with predeclared "known" types
-	for index, typ := range predeclared() {
-		p.typIndex[typ] = index
-	}
-	if len(p.typIndex) != len(predeclared()) {
-		return nil, internalError("duplicate entries in type map?")
-	}
-
-	// write package data
-	p.pkg(pkg, true)
-	if trace {
-		p.tracef("\n")
-	}
-
-	// write objects
-	objcount := 0
-	scope := pkg.Scope()
-	for _, name := range scope.Names() {
-		if !token.IsExported(name) {
-			continue
-		}
-		if trace {
-			p.tracef("\n")
-		}
-		p.obj(scope.Lookup(name))
-		objcount++
-	}
-
-	// indicate end of list
-	if trace {
-		p.tracef("\n")
-	}
-	p.tag(endTag)
-
-	// for self-verification only (redundant)
-	p.int(objcount)
-
-	if trace {
-		p.tracef("\n")
-	}
-
-	// --- end of export data ---
-
-	return p.out.Bytes(), nil
-}
-
-func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
-	if pkg == nil {
-		panic(internalError("unexpected nil pkg"))
-	}
-
-	// if we saw the package before, write its index (>= 0)
-	if i, ok := p.pkgIndex[pkg]; ok {
-		p.index('P', i)
-		return
-	}
-
-	// otherwise, remember the package, write the package tag (< 0) and package data
-	if trace {
-		p.tracef("P%d = { ", len(p.pkgIndex))
-		defer p.tracef("} ")
-	}
-	p.pkgIndex[pkg] = len(p.pkgIndex)
-
-	p.tag(packageTag)
-	p.string(pkg.Name())
-	if emptypath {
-		p.string("")
-	} else {
-		p.string(pkg.Path())
-	}
-}
-
-func (p *exporter) obj(obj types.Object) {
-	switch obj := obj.(type) {
-	case *types.Const:
-		p.tag(constTag)
-		p.pos(obj)
-		p.qualifiedName(obj)
-		p.typ(obj.Type())
-		p.value(obj.Val())
-
-	case *types.TypeName:
-		if obj.IsAlias() {
-			p.tag(aliasTag)
-			p.pos(obj)
-			p.qualifiedName(obj)
-		} else {
-			p.tag(typeTag)
-		}
-		p.typ(obj.Type())
-
-	case *types.Var:
-		p.tag(varTag)
-		p.pos(obj)
-		p.qualifiedName(obj)
-		p.typ(obj.Type())
-
-	case *types.Func:
-		p.tag(funcTag)
-		p.pos(obj)
-		p.qualifiedName(obj)
-		sig := obj.Type().(*types.Signature)
-		p.paramList(sig.Params(), sig.Variadic())
-		p.paramList(sig.Results(), false)
-
-	default:
-		panic(internalErrorf("unexpected object %v (%T)", obj, obj))
-	}
-}
-
-func (p *exporter) pos(obj types.Object) {
-	if !p.posInfoFormat {
-		return
-	}
-
-	file, line := p.fileLine(obj)
-	if file == p.prevFile {
-		// common case: write line delta
-		// delta == 0 means different file or no line change
-		delta := line - p.prevLine
-		p.int(delta)
-		if delta == 0 {
-			p.int(-1) // -1 means no file change
-		}
-	} else {
-		// different file
-		p.int(0)
-		// Encode filename as length of common prefix with previous
-		// filename, followed by (possibly empty) suffix. Filenames
-		// frequently share path prefixes, so this can save a lot
-		// of space and make export data size less dependent on file
-		// path length. The suffix is unlikely to be empty because
-		// file names tend to end in ".go".
-		n := commonPrefixLen(p.prevFile, file)
-		p.int(n)           // n >= 0
-		p.string(file[n:]) // write suffix only
-		p.prevFile = file
-		p.int(line)
-	}
-	p.prevLine = line
-}
-
-func (p *exporter) fileLine(obj types.Object) (file string, line int) {
-	if p.fset != nil {
-		pos := p.fset.Position(obj.Pos())
-		file = pos.Filename
-		line = pos.Line
-	}
-	return
-}
-
-func commonPrefixLen(a, b string) int {
-	if len(a) > len(b) {
-		a, b = b, a
-	}
-	// len(a) <= len(b)
-	i := 0
-	for i < len(a) && a[i] == b[i] {
-		i++
-	}
-	return i
-}
-
-func (p *exporter) qualifiedName(obj types.Object) {
-	p.string(obj.Name())
-	p.pkg(obj.Pkg(), false)
-}
-
-func (p *exporter) typ(t types.Type) {
-	if t == nil {
-		panic(internalError("nil type"))
-	}
-
-	// Possible optimization: Anonymous pointer types *T where
-	// T is a named type are common. We could canonicalize all
-	// such types *T to a single type PT = *T. This would lead
-	// to at most one *T entry in typIndex, and all future *T's
-	// would be encoded as the respective index directly. Would
-	// save 1 byte (pointerTag) per *T and reduce the typIndex
-	// size (at the cost of a canonicalization map). We can do
-	// this later, without encoding format change.
-
-	// if we saw the type before, write its index (>= 0)
-	if i, ok := p.typIndex[t]; ok {
-		p.index('T', i)
-		return
-	}
-
-	// otherwise, remember the type, write the type tag (< 0) and type data
-	if trackAllTypes {
-		if trace {
-			p.tracef("T%d = {>\n", len(p.typIndex))
-			defer p.tracef("<\n} ")
-		}
-		p.typIndex[t] = len(p.typIndex)
-	}
-
-	switch t := t.(type) {
-	case *types.Named:
-		if !trackAllTypes {
-			// if we don't track all types, track named types now
-			p.typIndex[t] = len(p.typIndex)
-		}
-
-		p.tag(namedTag)
-		p.pos(t.Obj())
-		p.qualifiedName(t.Obj())
-		p.typ(t.Underlying())
-		if !types.IsInterface(t) {
-			p.assocMethods(t)
-		}
-
-	case *types.Array:
-		p.tag(arrayTag)
-		p.int64(t.Len())
-		p.typ(t.Elem())
-
-	case *types.Slice:
-		p.tag(sliceTag)
-		p.typ(t.Elem())
-
-	case *dddSlice:
-		p.tag(dddTag)
-		p.typ(t.elem)
-
-	case *types.Struct:
-		p.tag(structTag)
-		p.fieldList(t)
-
-	case *types.Pointer:
-		p.tag(pointerTag)
-		p.typ(t.Elem())
-
-	case *types.Signature:
-		p.tag(signatureTag)
-		p.paramList(t.Params(), t.Variadic())
-		p.paramList(t.Results(), false)
-
-	case *types.Interface:
-		p.tag(interfaceTag)
-		p.iface(t)
-
-	case *types.Map:
-		p.tag(mapTag)
-		p.typ(t.Key())
-		p.typ(t.Elem())
-
-	case *types.Chan:
-		p.tag(chanTag)
-		p.int(int(3 - t.Dir())) // hack
-		p.typ(t.Elem())
-
-	default:
-		panic(internalErrorf("unexpected type %T: %s", t, t))
-	}
-}
-
-func (p *exporter) assocMethods(named *types.Named) {
-	// Sort methods (for determinism).
-	var methods []*types.Func
-	for i := 0; i < named.NumMethods(); i++ {
-		methods = append(methods, named.Method(i))
-	}
-	sort.Sort(methodsByName(methods))
-
-	p.int(len(methods))
-
-	if trace && methods != nil {
-		p.tracef("associated methods {>\n")
-	}
-
-	for i, m := range methods {
-		if trace && i > 0 {
-			p.tracef("\n")
-		}
-
-		p.pos(m)
-		name := m.Name()
-		p.string(name)
-		if !exported(name) {
-			p.pkg(m.Pkg(), false)
-		}
-
-		sig := m.Type().(*types.Signature)
-		p.paramList(types.NewTuple(sig.Recv()), false)
-		p.paramList(sig.Params(), sig.Variadic())
-		p.paramList(sig.Results(), false)
-		p.int(0) // dummy value for go:nointerface pragma - ignored by importer
-	}
-
-	if trace && methods != nil {
-		p.tracef("<\n} ")
-	}
-}
-
-type methodsByName []*types.Func
-
-func (x methodsByName) Len() int           { return len(x) }
-func (x methodsByName) Swap(i, j int)      { x[i], x[j] = x[j], x[i] }
-func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
-
-func (p *exporter) fieldList(t *types.Struct) {
-	if trace && t.NumFields() > 0 {
-		p.tracef("fields {>\n")
-		defer p.tracef("<\n} ")
-	}
-
-	p.int(t.NumFields())
-	for i := 0; i < t.NumFields(); i++ {
-		if trace && i > 0 {
-			p.tracef("\n")
-		}
-		p.field(t.Field(i))
-		p.string(t.Tag(i))
-	}
-}
-
-func (p *exporter) field(f *types.Var) {
-	if !f.IsField() {
-		panic(internalError("field expected"))
-	}
-
-	p.pos(f)
-	p.fieldName(f)
-	p.typ(f.Type())
-}
-
-func (p *exporter) iface(t *types.Interface) {
-	// TODO(gri): enable importer to load embedded interfaces,
-	// then emit Embeddeds and ExplicitMethods separately here.
-	p.int(0)
-
-	n := t.NumMethods()
-	if trace && n > 0 {
-		p.tracef("methods {>\n")
-		defer p.tracef("<\n} ")
-	}
-	p.int(n)
-	for i := 0; i < n; i++ {
-		if trace && i > 0 {
-			p.tracef("\n")
-		}
-		p.method(t.Method(i))
-	}
-}
-
-func (p *exporter) method(m *types.Func) {
-	sig := m.Type().(*types.Signature)
-	if sig.Recv() == nil {
-		panic(internalError("method expected"))
-	}
-
-	p.pos(m)
-	p.string(m.Name())
-	if m.Name() != "_" && !token.IsExported(m.Name()) {
-		p.pkg(m.Pkg(), false)
-	}
-
-	// interface method; no need to encode receiver.
-	p.paramList(sig.Params(), sig.Variadic())
-	p.paramList(sig.Results(), false)
-}
-
-func (p *exporter) fieldName(f *types.Var) {
-	name := f.Name()
-
-	if f.Anonymous() {
-		// anonymous field - we distinguish between 3 cases:
-		// 1) field name matches base type name and is exported
-		// 2) field name matches base type name and is not exported
-		// 3) field name doesn't match base type name (alias name)
-		bname := basetypeName(f.Type())
-		if name == bname {
-			if token.IsExported(name) {
-				name = "" // 1) we don't need to know the field name or package
-			} else {
-				name = "?" // 2) use unexported name "?" to force package export
-			}
-		} else {
-			// 3) indicate alias and export name as is
-			// (this requires an extra "@" but this is a rare case)
-			p.string("@")
-		}
-	}
-
-	p.string(name)
-	if name != "" && !token.IsExported(name) {
-		p.pkg(f.Pkg(), false)
-	}
-}
-
-func basetypeName(typ types.Type) string {
-	switch typ := deref(typ).(type) {
-	case *types.Basic:
-		return typ.Name()
-	case *types.Named:
-		return typ.Obj().Name()
-	default:
-		return "" // unnamed type
-	}
-}
-
-func (p *exporter) paramList(params *types.Tuple, variadic bool) {
-	// use negative length to indicate unnamed parameters
-	// (look at the first parameter only since either all
-	// names are present or all are absent)
-	n := params.Len()
-	if n > 0 && params.At(0).Name() == "" {
-		n = -n
-	}
-	p.int(n)
-	for i := 0; i < params.Len(); i++ {
-		q := params.At(i)
-		t := q.Type()
-		if variadic && i == params.Len()-1 {
-			t = &dddSlice{t.(*types.Slice).Elem()}
-		}
-		p.typ(t)
-		if n > 0 {
-			name := q.Name()
-			p.string(name)
-			if name != "_" {
-				p.pkg(q.Pkg(), false)
-			}
-		}
-		p.string("") // no compiler-specific info
-	}
-}
-
-func (p *exporter) value(x constant.Value) {
-	if trace {
-		p.tracef("= ")
-	}
-
-	switch x.Kind() {
-	case constant.Bool:
-		tag := falseTag
-		if constant.BoolVal(x) {
-			tag = trueTag
-		}
-		p.tag(tag)
-
-	case constant.Int:
-		if v, exact := constant.Int64Val(x); exact {
-			// common case: x fits into an int64 - use compact encoding
-			p.tag(int64Tag)
-			p.int64(v)
-			return
-		}
-		// uncommon case: large x - use float encoding
-		// (powers of 2 will be encoded efficiently with exponent)
-		p.tag(floatTag)
-		p.float(constant.ToFloat(x))
-
-	case constant.Float:
-		p.tag(floatTag)
-		p.float(x)
-
-	case constant.Complex:
-		p.tag(complexTag)
-		p.float(constant.Real(x))
-		p.float(constant.Imag(x))
-
-	case constant.String:
-		p.tag(stringTag)
-		p.string(constant.StringVal(x))
-
-	case constant.Unknown:
-		// package contains type errors
-		p.tag(unknownTag)
-
-	default:
-		panic(internalErrorf("unexpected value %v (%T)", x, x))
-	}
-}
-
-func (p *exporter) float(x constant.Value) {
-	if x.Kind() != constant.Float {
-		panic(internalErrorf("unexpected constant %v, want float", x))
-	}
-	// extract sign (there is no -0)
-	sign := constant.Sign(x)
-	if sign == 0 {
-		// x == 0
-		p.int(0)
-		return
-	}
-	// x != 0
-
-	var f big.Float
-	if v, exact := constant.Float64Val(x); exact {
-		// float64
-		f.SetFloat64(v)
-	} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
-		// TODO(gri): add big.Rat accessor to constant.Value.
-		r := valueToRat(num)
-		f.SetRat(r.Quo(r, valueToRat(denom)))
-	} else {
-		// Value too large to represent as a fraction => inaccessible.
-		// TODO(gri): add big.Float accessor to constant.Value.
-		f.SetFloat64(math.MaxFloat64) // FIXME
-	}
-
-	// extract exponent such that 0.5 <= m < 1.0
-	var m big.Float
-	exp := f.MantExp(&m)
-
-	// extract mantissa as *big.Int
-	// - set exponent large enough so mant satisfies mant.IsInt()
-	// - get *big.Int from mant
-	m.SetMantExp(&m, int(m.MinPrec()))
-	mant, acc := m.Int(nil)
-	if acc != big.Exact {
-		panic(internalError("internal error"))
-	}
-
-	p.int(sign)
-	p.int(exp)
-	p.string(string(mant.Bytes()))
-}
-
-func valueToRat(x constant.Value) *big.Rat {
-	// Convert little-endian to big-endian.
-	// I can't believe this is necessary.
-	bytes := constant.Bytes(x)
-	for i := 0; i < len(bytes)/2; i++ {
-		bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
-	}
-	return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
-}
-
-func (p *exporter) bool(b bool) bool {
-	if trace {
-		p.tracef("[")
-		defer p.tracef("= %v] ", b)
-	}
-
-	x := 0
-	if b {
-		x = 1
-	}
-	p.int(x)
-	return b
-}
-
-// ----------------------------------------------------------------------------
-// Low-level encoders
-
-func (p *exporter) index(marker byte, index int) {
-	if index < 0 {
-		panic(internalError("invalid index < 0"))
-	}
-	if debugFormat {
-		p.marker('t')
-	}
-	if trace {
-		p.tracef("%c%d ", marker, index)
-	}
-	p.rawInt64(int64(index))
-}
-
-func (p *exporter) tag(tag int) {
-	if tag >= 0 {
-		panic(internalError("invalid tag >= 0"))
-	}
-	if debugFormat {
-		p.marker('t')
-	}
-	if trace {
-		p.tracef("%s ", tagString[-tag])
-	}
-	p.rawInt64(int64(tag))
-}
-
-func (p *exporter) int(x int) {
-	p.int64(int64(x))
-}
-
-func (p *exporter) int64(x int64) {
-	if debugFormat {
-		p.marker('i')
-	}
-	if trace {
-		p.tracef("%d ", x)
-	}
-	p.rawInt64(x)
-}
-
-func (p *exporter) string(s string) {
-	if debugFormat {
-		p.marker('s')
-	}
-	if trace {
-		p.tracef("%q ", s)
-	}
-	// if we saw the string before, write its index (>= 0)
-	// (the empty string is mapped to 0)
-	if i, ok := p.strIndex[s]; ok {
-		p.rawInt64(int64(i))
-		return
-	}
-	// otherwise, remember string and write its negative length and bytes
-	p.strIndex[s] = len(p.strIndex)
-	p.rawInt64(-int64(len(s)))
-	for i := 0; i < len(s); i++ {
-		p.rawByte(s[i])
-	}
-}
-
-// marker emits a marker byte and position information which makes
-// it easy for a reader to detect if it is "out of sync". Used for
-// debugFormat format only.
-func (p *exporter) marker(m byte) {
-	p.rawByte(m)
-	// Enable this for help tracking down the location
-	// of an incorrect marker when running in debugFormat.
-	if false && trace {
-		p.tracef("#%d ", p.written)
-	}
-	p.rawInt64(int64(p.written))
-}
-
-// rawInt64 should only be used by low-level encoders.
-func (p *exporter) rawInt64(x int64) {
-	var tmp [binary.MaxVarintLen64]byte
-	n := binary.PutVarint(tmp[:], x)
-	for i := 0; i < n; i++ {
-		p.rawByte(tmp[i])
-	}
-}
-
-// rawStringln should only be used to emit the initial version string.
-func (p *exporter) rawStringln(s string) {
-	for i := 0; i < len(s); i++ {
-		p.rawByte(s[i])
-	}
-	p.rawByte('\n')
-}
-
-// rawByte is the bottleneck interface to write to p.out.
-// rawByte escapes b as follows (any encoding does that
-// hides '$'):
-//
-//	'$'  => '|' 'S'
-//	'|'  => '|' '|'
-//
-// Necessary so other tools can find the end of the
-// export data by searching for "$$".
-// rawByte should only be used by low-level encoders.
-func (p *exporter) rawByte(b byte) {
-	switch b {
-	case '$':
-		// write '$' as '|' 'S'
-		b = 'S'
-		fallthrough
-	case '|':
-		// write '|' as '|' '|'
-		p.out.WriteByte('|')
-		p.written++
-	}
-	p.out.WriteByte(b)
-	p.written++
-}
-
-// tracef is like fmt.Printf but it rewrites the format string
-// to take care of indentation.
-func (p *exporter) tracef(format string, args ...interface{}) {
-	if strings.ContainsAny(format, "<>\n") {
-		var buf bytes.Buffer
-		for i := 0; i < len(format); i++ {
-			// no need to deal with runes
-			ch := format[i]
-			switch ch {
-			case '>':
-				p.indent++
-				continue
-			case '<':
-				p.indent--
-				continue
-			}
-			buf.WriteByte(ch)
-			if ch == '\n' {
-				for j := p.indent; j > 0; j-- {
-					buf.WriteString(".  ")
-				}
-			}
-		}
-		format = buf.String()
-	}
-	fmt.Printf(format, args...)
-}
-
-// Debugging support.
-// (tagString is only used when tracing is enabled)
-var tagString = [...]string{
-	// Packages
-	-packageTag: "package",
-
-	// Types
-	-namedTag:     "named type",
-	-arrayTag:     "array",
-	-sliceTag:     "slice",
-	-dddTag:       "ddd",
-	-structTag:    "struct",
-	-pointerTag:   "pointer",
-	-signatureTag: "signature",
-	-interfaceTag: "interface",
-	-mapTag:       "map",
-	-chanTag:      "chan",
-
-	// Values
-	-falseTag:    "false",
-	-trueTag:     "true",
-	-int64Tag:    "int64",
-	-floatTag:    "float",
-	-fractionTag: "fraction",
-	-complexTag:  "complex",
-	-stringTag:   "string",
-	-unknownTag:  "unknown",
-
-	// Type aliases
-	-aliasTag: "alias",
-}
diff --git a/internal/gcimporter/bexport_test.go b/internal/gcimporter/bexport_test.go
index bc2390c..978c46e 100644
--- a/internal/gcimporter/bexport_test.go
+++ b/internal/gcimporter/bexport_test.go
@@ -5,10 +5,9 @@
 package gcimporter_test
 
 import (
+	"bytes"
 	"fmt"
 	"go/ast"
-	"go/build"
-	"go/constant"
 	"go/parser"
 	"go/token"
 	"go/types"
@@ -19,157 +18,18 @@
 	"strings"
 	"testing"
 
-	"golang.org/x/tools/go/ast/inspector"
-	"golang.org/x/tools/go/buildutil"
-	"golang.org/x/tools/go/loader"
 	"golang.org/x/tools/internal/gcimporter"
-	"golang.org/x/tools/internal/testenv"
 	"golang.org/x/tools/internal/typeparams"
-	"golang.org/x/tools/internal/typeparams/genericfeatures"
 )
 
 var isRace = false
 
-func TestBExportData_stdlib(t *testing.T) {
-	if runtime.Compiler == "gccgo" {
-		t.Skip("gccgo standard library is inaccessible")
-	}
-	testenv.NeedsGoBuild(t)
-	if isRace {
-		t.Skipf("stdlib tests take too long in race mode and flake on builders")
-	}
-	if testing.Short() {
-		t.Skip("skipping RAM hungry test in -short mode")
-	}
-
-	// Load, parse and type-check the program.
-	ctxt := build.Default // copy
-	ctxt.GOPATH = ""      // disable GOPATH
-	conf := loader.Config{
-		Build:       &ctxt,
-		AllowErrors: true,
-		TypeChecker: types.Config{
-			Error: func(err error) { t.Log(err) },
-		},
-	}
-	for _, path := range buildutil.AllPackages(conf.Build) {
-		conf.Import(path)
-	}
-
-	// Create a package containing type and value errors to ensure
-	// they are properly encoded/decoded.
-	f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors
-const UnknownValue = "" + 0
-type UnknownType undefined
-`)
-	if err != nil {
-		t.Fatal(err)
-	}
-	conf.CreateFromFiles("haserrors", f)
-
-	prog, err := conf.Load()
-	if err != nil {
-		t.Fatalf("Load failed: %v", err)
-	}
-
-	numPkgs := len(prog.AllPackages)
-	if want := minStdlibPackages; numPkgs < want {
-		t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
-	}
-
-	checked := 0
-	for pkg, info := range prog.AllPackages {
-		if info.Files == nil {
-			continue // empty directory
-		}
-		// Binary export does not support generic code.
-		inspect := inspector.New(info.Files)
-		if genericfeatures.ForPackage(inspect, &info.Info) != 0 {
-			t.Logf("skipping package %q which uses generics", pkg.Path())
-			continue
-		}
-		checked++
-		exportdata, err := gcimporter.BExportData(conf.Fset, pkg)
-		if err != nil {
-			t.Fatal(err)
-		}
-
-		imports := make(map[string]*types.Package)
-		fset2 := token.NewFileSet()
-		n, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path())
-		if err != nil {
-			t.Errorf("BImportData(%s): %v", pkg.Path(), err)
-			continue
-		}
-		if n != len(exportdata) {
-			t.Errorf("BImportData(%s) decoded %d bytes, want %d",
-				pkg.Path(), n, len(exportdata))
-		}
-
-		// Compare the packages' corresponding members.
-		for _, name := range pkg.Scope().Names() {
-			if !token.IsExported(name) {
-				continue
-			}
-			obj1 := pkg.Scope().Lookup(name)
-			obj2 := pkg2.Scope().Lookup(name)
-			if obj2 == nil {
-				t.Errorf("%s.%s not found, want %s", pkg.Path(), name, obj1)
-				continue
-			}
-
-			fl1 := fileLine(conf.Fset, obj1)
-			fl2 := fileLine(fset2, obj2)
-			if fl1 != fl2 {
-				t.Errorf("%s.%s: got posn %s, want %s",
-					pkg.Path(), name, fl2, fl1)
-			}
-
-			if err := equalObj(obj1, obj2); err != nil {
-				t.Errorf("%s.%s: %s\ngot:  %s\nwant: %s",
-					pkg.Path(), name, err, obj2, obj1)
-			}
-		}
-	}
-	if want := minStdlibPackages; checked < want {
-		t.Errorf("Checked only %d packages, want at least %d", checked, want)
-	}
-}
-
 func fileLine(fset *token.FileSet, obj types.Object) string {
 	posn := fset.Position(obj.Pos())
 	filename := filepath.Clean(strings.ReplaceAll(posn.Filename, "$GOROOT", runtime.GOROOT()))
 	return fmt.Sprintf("%s:%d", filename, posn.Line)
 }
 
-// equalObj reports how x and y differ.  They are assumed to belong to
-// different universes so cannot be compared directly.
-func equalObj(x, y types.Object) error {
-	if reflect.TypeOf(x) != reflect.TypeOf(y) {
-		return fmt.Errorf("%T vs %T", x, y)
-	}
-	xt := x.Type()
-	yt := y.Type()
-	switch x.(type) {
-	case *types.Var, *types.Func:
-		// ok
-	case *types.Const:
-		xval := x.(*types.Const).Val()
-		yval := y.(*types.Const).Val()
-		// Use string comparison for floating-point values since rounding is permitted.
-		if constant.Compare(xval, token.NEQ, yval) &&
-			!(xval.Kind() == constant.Float && xval.String() == yval.String()) {
-			return fmt.Errorf("unequal constants %s vs %s", xval, yval)
-		}
-	case *types.TypeName:
-		xt = xt.Underlying()
-		yt = yt.Underlying()
-	default:
-		return fmt.Errorf("unexpected %T", x)
-	}
-	return equalType(xt, yt)
-}
-
 func equalType(x, y types.Type) error {
 	if reflect.TypeOf(x) != reflect.TypeOf(y) {
 		return fmt.Errorf("unequal kinds: %T vs %T", x, y)
@@ -448,15 +308,16 @@
 	}
 
 	// export
-	exportdata, err := gcimporter.BExportData(fset1, pkg)
-	if err != nil {
+	var out bytes.Buffer
+	if err := gcimporter.IExportData(&out, fset1, pkg); err != nil {
 		t.Fatal(err)
 	}
+	exportdata := out.Bytes()
 
 	// import
 	imports := make(map[string]*types.Package)
 	fset2 := token.NewFileSet()
-	_, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path())
+	_, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path())
 	if err != nil {
 		t.Fatalf("BImportData(%s): %v", pkg.Path(), err)
 	}
@@ -513,38 +374,3 @@
 		}
 	}
 }
-
-func TestTypeAliases(t *testing.T) {
-	// parse and typecheck
-	fset1 := token.NewFileSet()
-	f, err := parser.ParseFile(fset1, "p.go", src, 0)
-	if err != nil {
-		t.Fatal(err)
-	}
-	var conf types.Config
-	pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil)
-	if err == nil {
-		// foo in undeclared in src; we should see an error
-		t.Fatal("invalid source type-checked without error")
-	}
-	if pkg1 == nil {
-		// despite incorrect src we should see a (partially) type-checked package
-		t.Fatal("nil package returned")
-	}
-	checkPkg(t, pkg1, "export")
-
-	// export
-	exportdata, err := gcimporter.BExportData(fset1, pkg1)
-	if err != nil {
-		t.Fatal(err)
-	}
-
-	// import
-	imports := make(map[string]*types.Package)
-	fset2 := token.NewFileSet()
-	_, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg1.Path())
-	if err != nil {
-		t.Fatalf("BImportData(%s): %v", pkg1.Path(), err)
-	}
-	checkPkg(t, pkg2, "import")
-}
diff --git a/internal/gcimporter/bimport.go b/internal/gcimporter/bimport.go
index b85de01..d98b0db 100644
--- a/internal/gcimporter/bimport.go
+++ b/internal/gcimporter/bimport.go
@@ -2,340 +2,24 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go.
+// This file contains the remaining vestiges of
+// $GOROOT/src/go/internal/gcimporter/bimport.go.
 
 package gcimporter
 
 import (
-	"encoding/binary"
 	"fmt"
-	"go/constant"
 	"go/token"
 	"go/types"
-	"sort"
-	"strconv"
-	"strings"
 	"sync"
-	"unicode"
-	"unicode/utf8"
 )
 
-type importer struct {
-	imports    map[string]*types.Package
-	data       []byte
-	importpath string
-	buf        []byte // for reading strings
-	version    int    // export format version
-
-	// object lists
-	strList       []string           // in order of appearance
-	pathList      []string           // in order of appearance
-	pkgList       []*types.Package   // in order of appearance
-	typList       []types.Type       // in order of appearance
-	interfaceList []*types.Interface // for delayed completion only
-	trackAllTypes bool
-
-	// position encoding
-	posInfoFormat bool
-	prevFile      string
-	prevLine      int
-	fake          fakeFileSet
-
-	// debugging support
-	debugFormat bool
-	read        int // bytes read
-}
-
-// BImportData imports a package from the serialized package data
-// and returns the number of bytes consumed and a reference to the package.
-// If the export data version is not recognized or the format is otherwise
-// compromised, an error is returned.
-func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
-	// catch panics and return them as errors
-	const currentVersion = 6
-	version := -1 // unknown version
-	defer func() {
-		if e := recover(); e != nil {
-			// Return a (possibly nil or incomplete) package unchanged (see #16088).
-			if version > currentVersion {
-				err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
-			} else {
-				err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
-			}
-		}
-	}()
-
-	p := importer{
-		imports:    imports,
-		data:       data,
-		importpath: path,
-		version:    version,
-		strList:    []string{""}, // empty string is mapped to 0
-		pathList:   []string{""}, // empty string is mapped to 0
-		fake: fakeFileSet{
-			fset:  fset,
-			files: make(map[string]*fileInfo),
-		},
-	}
-	defer p.fake.setLines() // set lines for files in fset
-
-	// read version info
-	var versionstr string
-	if b := p.rawByte(); b == 'c' || b == 'd' {
-		// Go1.7 encoding; first byte encodes low-level
-		// encoding format (compact vs debug).
-		// For backward-compatibility only (avoid problems with
-		// old installed packages). Newly compiled packages use
-		// the extensible format string.
-		// TODO(gri) Remove this support eventually; after Go1.8.
-		if b == 'd' {
-			p.debugFormat = true
-		}
-		p.trackAllTypes = p.rawByte() == 'a'
-		p.posInfoFormat = p.int() != 0
-		versionstr = p.string()
-		if versionstr == "v1" {
-			version = 0
-		}
-	} else {
-		// Go1.8 extensible encoding
-		// read version string and extract version number (ignore anything after the version number)
-		versionstr = p.rawStringln(b)
-		if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" {
-			if v, err := strconv.Atoi(s[1]); err == nil && v > 0 {
-				version = v
-			}
-		}
-	}
-	p.version = version
-
-	// read version specific flags - extend as necessary
-	switch p.version {
-	// case currentVersion:
-	// 	...
-	//	fallthrough
-	case currentVersion, 5, 4, 3, 2, 1:
-		p.debugFormat = p.rawStringln(p.rawByte()) == "debug"
-		p.trackAllTypes = p.int() != 0
-		p.posInfoFormat = p.int() != 0
-	case 0:
-		// Go1.7 encoding format - nothing to do here
-	default:
-		errorf("unknown bexport format version %d (%q)", p.version, versionstr)
-	}
-
-	// --- generic export data ---
-
-	// populate typList with predeclared "known" types
-	p.typList = append(p.typList, predeclared()...)
-
-	// read package data
-	pkg = p.pkg()
-
-	// read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go)
-	objcount := 0
-	for {
-		tag := p.tagOrIndex()
-		if tag == endTag {
-			break
-		}
-		p.obj(tag)
-		objcount++
-	}
-
-	// self-verification
-	if count := p.int(); count != objcount {
-		errorf("got %d objects; want %d", objcount, count)
-	}
-
-	// ignore compiler-specific import data
-
-	// complete interfaces
-	// TODO(gri) re-investigate if we still need to do this in a delayed fashion
-	for _, typ := range p.interfaceList {
-		typ.Complete()
-	}
-
-	// record all referenced packages as imports
-	list := append(([]*types.Package)(nil), p.pkgList[1:]...)
-	sort.Sort(byPath(list))
-	pkg.SetImports(list)
-
-	// package was imported completely and without errors
-	pkg.MarkComplete()
-
-	return p.read, pkg, nil
-}
-
 func errorf(format string, args ...interface{}) {
 	panic(fmt.Sprintf(format, args...))
 }
 
-func (p *importer) pkg() *types.Package {
-	// if the package was seen before, i is its index (>= 0)
-	i := p.tagOrIndex()
-	if i >= 0 {
-		return p.pkgList[i]
-	}
-
-	// otherwise, i is the package tag (< 0)
-	if i != packageTag {
-		errorf("unexpected package tag %d version %d", i, p.version)
-	}
-
-	// read package data
-	name := p.string()
-	var path string
-	if p.version >= 5 {
-		path = p.path()
-	} else {
-		path = p.string()
-	}
-	if p.version >= 6 {
-		p.int() // package height; unused by go/types
-	}
-
-	// we should never see an empty package name
-	if name == "" {
-		errorf("empty package name in import")
-	}
-
-	// an empty path denotes the package we are currently importing;
-	// it must be the first package we see
-	if (path == "") != (len(p.pkgList) == 0) {
-		errorf("package path %q for pkg index %d", path, len(p.pkgList))
-	}
-
-	// if the package was imported before, use that one; otherwise create a new one
-	if path == "" {
-		path = p.importpath
-	}
-	pkg := p.imports[path]
-	if pkg == nil {
-		pkg = types.NewPackage(path, name)
-		p.imports[path] = pkg
-	} else if pkg.Name() != name {
-		errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path)
-	}
-	p.pkgList = append(p.pkgList, pkg)
-
-	return pkg
-}
-
-// objTag returns the tag value for each object kind.
-func objTag(obj types.Object) int {
-	switch obj.(type) {
-	case *types.Const:
-		return constTag
-	case *types.TypeName:
-		return typeTag
-	case *types.Var:
-		return varTag
-	case *types.Func:
-		return funcTag
-	default:
-		errorf("unexpected object: %v (%T)", obj, obj) // panics
-		panic("unreachable")
-	}
-}
-
-func sameObj(a, b types.Object) bool {
-	// Because unnamed types are not canonicalized, we cannot simply compare types for
-	// (pointer) identity.
-	// Ideally we'd check equality of constant values as well, but this is good enough.
-	return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type())
-}
-
-func (p *importer) declare(obj types.Object) {
-	pkg := obj.Pkg()
-	if alt := pkg.Scope().Insert(obj); alt != nil {
-		// This can only trigger if we import a (non-type) object a second time.
-		// Excluding type aliases, this cannot happen because 1) we only import a package
-		// once; and b) we ignore compiler-specific export data which may contain
-		// functions whose inlined function bodies refer to other functions that
-		// were already imported.
-		// However, type aliases require reexporting the original type, so we need
-		// to allow it (see also the comment in cmd/compile/internal/gc/bimport.go,
-		// method importer.obj, switch case importing functions).
-		// TODO(gri) review/update this comment once the gc compiler handles type aliases.
-		if !sameObj(obj, alt) {
-			errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt)
-		}
-	}
-}
-
-func (p *importer) obj(tag int) {
-	switch tag {
-	case constTag:
-		pos := p.pos()
-		pkg, name := p.qualifiedName()
-		typ := p.typ(nil, nil)
-		val := p.value()
-		p.declare(types.NewConst(pos, pkg, name, typ, val))
-
-	case aliasTag:
-		// TODO(gri) verify type alias hookup is correct
-		pos := p.pos()
-		pkg, name := p.qualifiedName()
-		typ := p.typ(nil, nil)
-		p.declare(types.NewTypeName(pos, pkg, name, typ))
-
-	case typeTag:
-		p.typ(nil, nil)
-
-	case varTag:
-		pos := p.pos()
-		pkg, name := p.qualifiedName()
-		typ := p.typ(nil, nil)
-		p.declare(types.NewVar(pos, pkg, name, typ))
-
-	case funcTag:
-		pos := p.pos()
-		pkg, name := p.qualifiedName()
-		params, isddd := p.paramList()
-		result, _ := p.paramList()
-		sig := types.NewSignature(nil, params, result, isddd)
-		p.declare(types.NewFunc(pos, pkg, name, sig))
-
-	default:
-		errorf("unexpected object tag %d", tag)
-	}
-}
-
 const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go
 
-func (p *importer) pos() token.Pos {
-	if !p.posInfoFormat {
-		return token.NoPos
-	}
-
-	file := p.prevFile
-	line := p.prevLine
-	delta := p.int()
-	line += delta
-	if p.version >= 5 {
-		if delta == deltaNewFile {
-			if n := p.int(); n >= 0 {
-				// file changed
-				file = p.path()
-				line = n
-			}
-		}
-	} else {
-		if delta == 0 {
-			if n := p.int(); n >= 0 {
-				// file changed
-				file = p.prevFile[:n] + p.string()
-				line = p.int()
-			}
-		}
-	}
-	p.prevFile = file
-	p.prevLine = line
-
-	return p.fake.pos(file, line, 0)
-}
-
 // Synthesize a token.Pos
 type fakeFileSet struct {
 	fset  *token.FileSet
@@ -389,205 +73,6 @@
 	fakeLinesOnce sync.Once
 )
 
-func (p *importer) qualifiedName() (pkg *types.Package, name string) {
-	name = p.string()
-	pkg = p.pkg()
-	return
-}
-
-func (p *importer) record(t types.Type) {
-	p.typList = append(p.typList, t)
-}
-
-// A dddSlice is a types.Type representing ...T parameters.
-// It only appears for parameter types and does not escape
-// the importer.
-type dddSlice struct {
-	elem types.Type
-}
-
-func (t *dddSlice) Underlying() types.Type { return t }
-func (t *dddSlice) String() string         { return "..." + t.elem.String() }
-
-// parent is the package which declared the type; parent == nil means
-// the package currently imported. The parent package is needed for
-// exported struct fields and interface methods which don't contain
-// explicit package information in the export data.
-//
-// A non-nil tname is used as the "owner" of the result type; i.e.,
-// the result type is the underlying type of tname. tname is used
-// to give interface methods a named receiver type where possible.
-func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type {
-	// if the type was seen before, i is its index (>= 0)
-	i := p.tagOrIndex()
-	if i >= 0 {
-		return p.typList[i]
-	}
-
-	// otherwise, i is the type tag (< 0)
-	switch i {
-	case namedTag:
-		// read type object
-		pos := p.pos()
-		parent, name := p.qualifiedName()
-		scope := parent.Scope()
-		obj := scope.Lookup(name)
-
-		// if the object doesn't exist yet, create and insert it
-		if obj == nil {
-			obj = types.NewTypeName(pos, parent, name, nil)
-			scope.Insert(obj)
-		}
-
-		if _, ok := obj.(*types.TypeName); !ok {
-			errorf("pkg = %s, name = %s => %s", parent, name, obj)
-		}
-
-		// associate new named type with obj if it doesn't exist yet
-		t0 := types.NewNamed(obj.(*types.TypeName), nil, nil)
-
-		// but record the existing type, if any
-		tname := obj.Type().(*types.Named) // tname is either t0 or the existing type
-		p.record(tname)
-
-		// read underlying type
-		t0.SetUnderlying(p.typ(parent, t0))
-
-		// interfaces don't have associated methods
-		if types.IsInterface(t0) {
-			return tname
-		}
-
-		// read associated methods
-		for i := p.int(); i > 0; i-- {
-			// TODO(gri) replace this with something closer to fieldName
-			pos := p.pos()
-			name := p.string()
-			if !exported(name) {
-				p.pkg()
-			}
-
-			recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver?
-			params, isddd := p.paramList()
-			result, _ := p.paramList()
-			p.int() // go:nointerface pragma - discarded
-
-			sig := types.NewSignature(recv.At(0), params, result, isddd)
-			t0.AddMethod(types.NewFunc(pos, parent, name, sig))
-		}
-
-		return tname
-
-	case arrayTag:
-		t := new(types.Array)
-		if p.trackAllTypes {
-			p.record(t)
-		}
-
-		n := p.int64()
-		*t = *types.NewArray(p.typ(parent, nil), n)
-		return t
-
-	case sliceTag:
-		t := new(types.Slice)
-		if p.trackAllTypes {
-			p.record(t)
-		}
-
-		*t = *types.NewSlice(p.typ(parent, nil))
-		return t
-
-	case dddTag:
-		t := new(dddSlice)
-		if p.trackAllTypes {
-			p.record(t)
-		}
-
-		t.elem = p.typ(parent, nil)
-		return t
-
-	case structTag:
-		t := new(types.Struct)
-		if p.trackAllTypes {
-			p.record(t)
-		}
-
-		*t = *types.NewStruct(p.fieldList(parent))
-		return t
-
-	case pointerTag:
-		t := new(types.Pointer)
-		if p.trackAllTypes {
-			p.record(t)
-		}
-
-		*t = *types.NewPointer(p.typ(parent, nil))
-		return t
-
-	case signatureTag:
-		t := new(types.Signature)
-		if p.trackAllTypes {
-			p.record(t)
-		}
-
-		params, isddd := p.paramList()
-		result, _ := p.paramList()
-		*t = *types.NewSignature(nil, params, result, isddd)
-		return t
-
-	case interfaceTag:
-		// Create a dummy entry in the type list. This is safe because we
-		// cannot expect the interface type to appear in a cycle, as any
-		// such cycle must contain a named type which would have been
-		// first defined earlier.
-		// TODO(gri) Is this still true now that we have type aliases?
-		// See issue #23225.
-		n := len(p.typList)
-		if p.trackAllTypes {
-			p.record(nil)
-		}
-
-		var embeddeds []types.Type
-		for n := p.int(); n > 0; n-- {
-			p.pos()
-			embeddeds = append(embeddeds, p.typ(parent, nil))
-		}
-
-		t := newInterface(p.methodList(parent, tname), embeddeds)
-		p.interfaceList = append(p.interfaceList, t)
-		if p.trackAllTypes {
-			p.typList[n] = t
-		}
-		return t
-
-	case mapTag:
-		t := new(types.Map)
-		if p.trackAllTypes {
-			p.record(t)
-		}
-
-		key := p.typ(parent, nil)
-		val := p.typ(parent, nil)
-		*t = *types.NewMap(key, val)
-		return t
-
-	case chanTag:
-		t := new(types.Chan)
-		if p.trackAllTypes {
-			p.record(t)
-		}
-
-		dir := chanDir(p.int())
-		val := p.typ(parent, nil)
-		*t = *types.NewChan(dir, val)
-		return t
-
-	default:
-		errorf("unexpected type tag %d", i) // panics
-		panic("unreachable")
-	}
-}
-
 func chanDir(d int) types.ChanDir {
 	// tag values must match the constants in cmd/compile/internal/gc/go.go
 	switch d {
@@ -603,394 +88,6 @@
 	}
 }
 
-func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) {
-	if n := p.int(); n > 0 {
-		fields = make([]*types.Var, n)
-		tags = make([]string, n)
-		for i := range fields {
-			fields[i], tags[i] = p.field(parent)
-		}
-	}
-	return
-}
-
-func (p *importer) field(parent *types.Package) (*types.Var, string) {
-	pos := p.pos()
-	pkg, name, alias := p.fieldName(parent)
-	typ := p.typ(parent, nil)
-	tag := p.string()
-
-	anonymous := false
-	if name == "" {
-		// anonymous field - typ must be T or *T and T must be a type name
-		switch typ := deref(typ).(type) {
-		case *types.Basic: // basic types are named types
-			pkg = nil // // objects defined in Universe scope have no package
-			name = typ.Name()
-		case *types.Named:
-			name = typ.Obj().Name()
-		default:
-			errorf("named base type expected")
-		}
-		anonymous = true
-	} else if alias {
-		// anonymous field: we have an explicit name because it's an alias
-		anonymous = true
-	}
-
-	return types.NewField(pos, pkg, name, typ, anonymous), tag
-}
-
-func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) {
-	if n := p.int(); n > 0 {
-		methods = make([]*types.Func, n)
-		for i := range methods {
-			methods[i] = p.method(parent, baseType)
-		}
-	}
-	return
-}
-
-func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func {
-	pos := p.pos()
-	pkg, name, _ := p.fieldName(parent)
-	// If we don't have a baseType, use a nil receiver.
-	// A receiver using the actual interface type (which
-	// we don't know yet) will be filled in when we call
-	// types.Interface.Complete.
-	var recv *types.Var
-	if baseType != nil {
-		recv = types.NewVar(token.NoPos, parent, "", baseType)
-	}
-	params, isddd := p.paramList()
-	result, _ := p.paramList()
-	sig := types.NewSignature(recv, params, result, isddd)
-	return types.NewFunc(pos, pkg, name, sig)
-}
-
-func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) {
-	name = p.string()
-	pkg = parent
-	if pkg == nil {
-		// use the imported package instead
-		pkg = p.pkgList[0]
-	}
-	if p.version == 0 && name == "_" {
-		// version 0 didn't export a package for _ fields
-		return
-	}
-	switch name {
-	case "":
-		// 1) field name matches base type name and is exported: nothing to do
-	case "?":
-		// 2) field name matches base type name and is not exported: need package
-		name = ""
-		pkg = p.pkg()
-	case "@":
-		// 3) field name doesn't match type name (alias)
-		name = p.string()
-		alias = true
-		fallthrough
-	default:
-		if !exported(name) {
-			pkg = p.pkg()
-		}
-	}
-	return
-}
-
-func (p *importer) paramList() (*types.Tuple, bool) {
-	n := p.int()
-	if n == 0 {
-		return nil, false
-	}
-	// negative length indicates unnamed parameters
-	named := true
-	if n < 0 {
-		n = -n
-		named = false
-	}
-	// n > 0
-	params := make([]*types.Var, n)
-	isddd := false
-	for i := range params {
-		params[i], isddd = p.param(named)
-	}
-	return types.NewTuple(params...), isddd
-}
-
-func (p *importer) param(named bool) (*types.Var, bool) {
-	t := p.typ(nil, nil)
-	td, isddd := t.(*dddSlice)
-	if isddd {
-		t = types.NewSlice(td.elem)
-	}
-
-	var pkg *types.Package
-	var name string
-	if named {
-		name = p.string()
-		if name == "" {
-			errorf("expected named parameter")
-		}
-		if name != "_" {
-			pkg = p.pkg()
-		}
-		if i := strings.Index(name, "·"); i > 0 {
-			name = name[:i] // cut off gc-specific parameter numbering
-		}
-	}
-
-	// read and discard compiler-specific info
-	p.string()
-
-	return types.NewVar(token.NoPos, pkg, name, t), isddd
-}
-
-func exported(name string) bool {
-	ch, _ := utf8.DecodeRuneInString(name)
-	return unicode.IsUpper(ch)
-}
-
-func (p *importer) value() constant.Value {
-	switch tag := p.tagOrIndex(); tag {
-	case falseTag:
-		return constant.MakeBool(false)
-	case trueTag:
-		return constant.MakeBool(true)
-	case int64Tag:
-		return constant.MakeInt64(p.int64())
-	case floatTag:
-		return p.float()
-	case complexTag:
-		re := p.float()
-		im := p.float()
-		return constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
-	case stringTag:
-		return constant.MakeString(p.string())
-	case unknownTag:
-		return constant.MakeUnknown()
-	default:
-		errorf("unexpected value tag %d", tag) // panics
-		panic("unreachable")
-	}
-}
-
-func (p *importer) float() constant.Value {
-	sign := p.int()
-	if sign == 0 {
-		return constant.MakeInt64(0)
-	}
-
-	exp := p.int()
-	mant := []byte(p.string()) // big endian
-
-	// remove leading 0's if any
-	for len(mant) > 0 && mant[0] == 0 {
-		mant = mant[1:]
-	}
-
-	// convert to little endian
-	// TODO(gri) go/constant should have a more direct conversion function
-	//           (e.g., once it supports a big.Float based implementation)
-	for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 {
-		mant[i], mant[j] = mant[j], mant[i]
-	}
-
-	// adjust exponent (constant.MakeFromBytes creates an integer value,
-	// but mant represents the mantissa bits such that 0.5 <= mant < 1.0)
-	exp -= len(mant) << 3
-	if len(mant) > 0 {
-		for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 {
-			exp++
-		}
-	}
-
-	x := constant.MakeFromBytes(mant)
-	switch {
-	case exp < 0:
-		d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
-		x = constant.BinaryOp(x, token.QUO, d)
-	case exp > 0:
-		x = constant.Shift(x, token.SHL, uint(exp))
-	}
-
-	if sign < 0 {
-		x = constant.UnaryOp(token.SUB, x, 0)
-	}
-	return x
-}
-
-// ----------------------------------------------------------------------------
-// Low-level decoders
-
-func (p *importer) tagOrIndex() int {
-	if p.debugFormat {
-		p.marker('t')
-	}
-
-	return int(p.rawInt64())
-}
-
-func (p *importer) int() int {
-	x := p.int64()
-	if int64(int(x)) != x {
-		errorf("exported integer too large")
-	}
-	return int(x)
-}
-
-func (p *importer) int64() int64 {
-	if p.debugFormat {
-		p.marker('i')
-	}
-
-	return p.rawInt64()
-}
-
-func (p *importer) path() string {
-	if p.debugFormat {
-		p.marker('p')
-	}
-	// if the path was seen before, i is its index (>= 0)
-	// (the empty string is at index 0)
-	i := p.rawInt64()
-	if i >= 0 {
-		return p.pathList[i]
-	}
-	// otherwise, i is the negative path length (< 0)
-	a := make([]string, -i)
-	for n := range a {
-		a[n] = p.string()
-	}
-	s := strings.Join(a, "/")
-	p.pathList = append(p.pathList, s)
-	return s
-}
-
-func (p *importer) string() string {
-	if p.debugFormat {
-		p.marker('s')
-	}
-	// if the string was seen before, i is its index (>= 0)
-	// (the empty string is at index 0)
-	i := p.rawInt64()
-	if i >= 0 {
-		return p.strList[i]
-	}
-	// otherwise, i is the negative string length (< 0)
-	if n := int(-i); n <= cap(p.buf) {
-		p.buf = p.buf[:n]
-	} else {
-		p.buf = make([]byte, n)
-	}
-	for i := range p.buf {
-		p.buf[i] = p.rawByte()
-	}
-	s := string(p.buf)
-	p.strList = append(p.strList, s)
-	return s
-}
-
-func (p *importer) marker(want byte) {
-	if got := p.rawByte(); got != want {
-		errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read)
-	}
-
-	pos := p.read
-	if n := int(p.rawInt64()); n != pos {
-		errorf("incorrect position: got %d; want %d", n, pos)
-	}
-}
-
-// rawInt64 should only be used by low-level decoders.
-func (p *importer) rawInt64() int64 {
-	i, err := binary.ReadVarint(p)
-	if err != nil {
-		errorf("read error: %v", err)
-	}
-	return i
-}
-
-// rawStringln should only be used to read the initial version string.
-func (p *importer) rawStringln(b byte) string {
-	p.buf = p.buf[:0]
-	for b != '\n' {
-		p.buf = append(p.buf, b)
-		b = p.rawByte()
-	}
-	return string(p.buf)
-}
-
-// needed for binary.ReadVarint in rawInt64
-func (p *importer) ReadByte() (byte, error) {
-	return p.rawByte(), nil
-}
-
-// byte is the bottleneck interface for reading p.data.
-// It unescapes '|' 'S' to '$' and '|' '|' to '|'.
-// rawByte should only be used by low-level decoders.
-func (p *importer) rawByte() byte {
-	b := p.data[0]
-	r := 1
-	if b == '|' {
-		b = p.data[1]
-		r = 2
-		switch b {
-		case 'S':
-			b = '$'
-		case '|':
-			// nothing to do
-		default:
-			errorf("unexpected escape sequence in export data")
-		}
-	}
-	p.data = p.data[r:]
-	p.read += r
-	return b
-
-}
-
-// ----------------------------------------------------------------------------
-// Export format
-
-// Tags. Must be < 0.
-const (
-	// Objects
-	packageTag = -(iota + 1)
-	constTag
-	typeTag
-	varTag
-	funcTag
-	endTag
-
-	// Types
-	namedTag
-	arrayTag
-	sliceTag
-	dddTag
-	structTag
-	pointerTag
-	signatureTag
-	interfaceTag
-	mapTag
-	chanTag
-
-	// Values
-	falseTag
-	trueTag
-	int64Tag
-	floatTag
-	fractionTag // not used by gc
-	complexTag
-	stringTag
-	nilTag     // only used by gc (appears in exported inlined function bodies)
-	unknownTag // not used by gc (only appears in packages with errors)
-
-	// Type aliases
-	aliasTag
-)
-
 var predeclOnce sync.Once
 var predecl []types.Type // initialized lazily
 
diff --git a/internal/gcimporter/gcimporter.go b/internal/gcimporter/gcimporter.go
index a973dec..b122371 100644
--- a/internal/gcimporter/gcimporter.go
+++ b/internal/gcimporter/gcimporter.go
@@ -230,20 +230,17 @@
 		// Or, define a new standard go/types/gcexportdata package.
 		fset := token.NewFileSet()
 
-		// The indexed export format starts with an 'i'; the older
-		// binary export format starts with a 'c', 'd', or 'v'
-		// (from "version"). Select appropriate importer.
+		// Select appropriate importer.
 		if len(data) > 0 {
 			switch data[0] {
-			case 'i':
+			case 'v', 'c', 'd': // binary, till go1.10
+				return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0])
+
+			case 'i': // indexed, till go1.19
 				_, pkg, err := IImportData(fset, packages, data[1:], id)
 				return pkg, err
 
-			case 'v', 'c', 'd':
-				_, pkg, err := BImportData(fset, packages, data, id)
-				return pkg, err
-
-			case 'u':
+			case 'u': // unified, from go1.20
 				_, pkg, err := UImportData(fset, packages, data[1:size], id)
 				return pkg, err
 
diff --git a/internal/gcimporter/gcimporter_test.go b/internal/gcimporter/gcimporter_test.go
index 33c1c1a..3d17e11 100644
--- a/internal/gcimporter/gcimporter_test.go
+++ b/internal/gcimporter/gcimporter_test.go
@@ -314,14 +314,6 @@
 		// test that export data can be imported
 		_, err := Import(make(map[string]*types.Package), pkgpath, dir, nil)
 		if err != nil {
-			// ok to fail if it fails with a newer version error for select files
-			if strings.Contains(err.Error(), "newer version") {
-				switch name {
-				case "test_go1.11_999b.a", "test_go1.11_999i.a":
-					continue
-				}
-				// fall through
-			}
 			t.Errorf("import %q failed: %v", pkgpath, err)
 			continue
 		}
@@ -351,7 +343,7 @@
 		_, err = Import(make(map[string]*types.Package), pkgpath, corruptdir, nil)
 		if err == nil {
 			t.Errorf("import corrupted %q succeeded", pkgpath)
-		} else if msg := err.Error(); !strings.Contains(msg, "version skew") {
+		} else if msg := err.Error(); !strings.Contains(msg, "internal error") {
 			t.Errorf("import %q error incorrect (%s)", pkgpath, msg)
 		}
 	}
diff --git a/internal/gcimporter/iexport.go b/internal/gcimporter/iexport.go
index a0dc0b5..9930d8c 100644
--- a/internal/gcimporter/iexport.go
+++ b/internal/gcimporter/iexport.go
@@ -969,6 +969,16 @@
 	return &f
 }
 
+func valueToRat(x constant.Value) *big.Rat {
+	// Convert little-endian to big-endian.
+	// I can't believe this is necessary.
+	bytes := constant.Bytes(x)
+	for i := 0; i < len(bytes)/2; i++ {
+		bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
+	}
+	return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
+}
+
 // mpint exports a multi-precision integer.
 //
 // For unsigned types, small values are written out as a single
@@ -1178,3 +1188,12 @@
 	q.head++
 	return obj
 }
+
+// internalError represents an error generated inside this package.
+type internalError string
+
+func (e internalError) Error() string { return "gcimporter: " + string(e) }
+
+func internalErrorf(format string, args ...interface{}) error {
+	return internalError(fmt.Sprintf(format, args...))
+}
diff --git a/internal/gcimporter/iimport.go b/internal/gcimporter/iimport.go
index be6dace..94a5eba 100644
--- a/internal/gcimporter/iimport.go
+++ b/internal/gcimporter/iimport.go
@@ -131,7 +131,7 @@
 				} else if version > currentVersion {
 					err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
 				} else {
-					err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
+					err = fmt.Errorf("internal error while importing %q (%v); please report an issue", path, e)
 				}
 			}
 		}()
@@ -140,11 +140,8 @@
 	r := &intReader{bytes.NewReader(data), path}
 
 	if bundle {
-		bundleVersion := r.uint64()
-		switch bundleVersion {
-		case bundleVersion:
-		default:
-			errorf("unknown bundle format version %d", bundleVersion)
+		if v := r.uint64(); v != bundleVersion {
+			errorf("unknown bundle format version %d", v)
 		}
 	}
 
diff --git a/internal/gcimporter/shallow_test.go b/internal/gcimporter/shallow_test.go
index 848bd91..f73d1b3 100644
--- a/internal/gcimporter/shallow_test.go
+++ b/internal/gcimporter/shallow_test.go
@@ -66,7 +66,7 @@
 }
 
 // typecheck reads, parses, and type-checks a package.
-// It squirrels the export data in the the ppkg.ExportFile field.
+// It squirrels the export data in the ppkg.ExportFile field.
 func typecheck(t *testing.T, ppkg *packages.Package) {
 	if ppkg.PkgPath == "unsafe" {
 		return // unsafe is special
diff --git a/internal/gcimporter/testdata/versions/test.go b/internal/gcimporter/testdata/versions/test.go
index 6362adc..924f444 100644
--- a/internal/gcimporter/testdata/versions/test.go
+++ b/internal/gcimporter/testdata/versions/test.go
@@ -13,10 +13,7 @@
 //
 // go build -o test_go1.$X_$Y.a test.go
 //
-// with $X = Go version and $Y = export format version
-// (add 'b' or 'i' to distinguish between binary and
-// indexed format starting with 1.11 as long as both
-// formats are supported).
+// with $X = Go version and $Y = export format version (e.g. 'i', 'u').
 //
 // Make sure this source is extended such that it exercises
 // whatever export format change has taken place.
diff --git a/internal/gcimporter/testdata/versions/test_go1.11_0i.a b/internal/gcimporter/testdata/versions/test_go1.11_0i.a
deleted file mode 100644
index b00fefe..0000000
--- a/internal/gcimporter/testdata/versions/test_go1.11_0i.a
+++ /dev/null
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.11_6b.a b/internal/gcimporter/testdata/versions/test_go1.11_6b.a
deleted file mode 100644
index c0a211e..0000000
--- a/internal/gcimporter/testdata/versions/test_go1.11_6b.a
+++ /dev/null
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.11_999b.a b/internal/gcimporter/testdata/versions/test_go1.11_999b.a
deleted file mode 100644
index c35d22d..0000000
--- a/internal/gcimporter/testdata/versions/test_go1.11_999b.a
+++ /dev/null
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.11_999i.a b/internal/gcimporter/testdata/versions/test_go1.11_999i.a
deleted file mode 100644
index 99401d7..0000000
--- a/internal/gcimporter/testdata/versions/test_go1.11_999i.a
+++ /dev/null
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.16_i.a b/internal/gcimporter/testdata/versions/test_go1.16_i.a
new file mode 100644
index 0000000..35dc863
--- /dev/null
+++ b/internal/gcimporter/testdata/versions/test_go1.16_i.a
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.17_i.a b/internal/gcimporter/testdata/versions/test_go1.17_i.a
new file mode 100644
index 0000000..7a8ecb7
--- /dev/null
+++ b/internal/gcimporter/testdata/versions/test_go1.17_i.a
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.18.5_i.a b/internal/gcimporter/testdata/versions/test_go1.18.5_i.a
new file mode 100644
index 0000000..6ed126f
--- /dev/null
+++ b/internal/gcimporter/testdata/versions/test_go1.18.5_i.a
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.19_i.a b/internal/gcimporter/testdata/versions/test_go1.19_i.a
new file mode 100644
index 0000000..ff8f599
--- /dev/null
+++ b/internal/gcimporter/testdata/versions/test_go1.19_i.a
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.20_u.a b/internal/gcimporter/testdata/versions/test_go1.20_u.a
new file mode 100644
index 0000000..608dba8
--- /dev/null
+++ b/internal/gcimporter/testdata/versions/test_go1.20_u.a
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.7_0.a b/internal/gcimporter/testdata/versions/test_go1.7_0.a
deleted file mode 100644
index edb6c3f..0000000
--- a/internal/gcimporter/testdata/versions/test_go1.7_0.a
+++ /dev/null
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.7_1.a b/internal/gcimporter/testdata/versions/test_go1.7_1.a
deleted file mode 100644
index 554d04a..0000000
--- a/internal/gcimporter/testdata/versions/test_go1.7_1.a
+++ /dev/null
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.8_4.a b/internal/gcimporter/testdata/versions/test_go1.8_4.a
deleted file mode 100644
index 26b8531..0000000
--- a/internal/gcimporter/testdata/versions/test_go1.8_4.a
+++ /dev/null
Binary files differ
diff --git a/internal/gcimporter/testdata/versions/test_go1.8_5.a b/internal/gcimporter/testdata/versions/test_go1.8_5.a
deleted file mode 100644
index 60e52ef..0000000
--- a/internal/gcimporter/testdata/versions/test_go1.8_5.a
+++ /dev/null
Binary files differ
diff --git a/internal/gcimporter/ureader_yes.go b/internal/gcimporter/ureader_yes.go
index 34fc783..b977435 100644
--- a/internal/gcimporter/ureader_yes.go
+++ b/internal/gcimporter/ureader_yes.go
@@ -10,6 +10,7 @@
 package gcimporter
 
 import (
+	"fmt"
 	"go/token"
 	"go/types"
 	"sort"
@@ -63,6 +64,14 @@
 }
 
 func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+	if !debug {
+		defer func() {
+			if x := recover(); x != nil {
+				err = fmt.Errorf("internal error in importing %q (%v); please report an issue", path, x)
+			}
+		}()
+	}
+
 	s := string(data)
 	s = s[:strings.LastIndex(s, "\n$$\n")]
 	input := pkgbits.NewPkgDecoder(path, s)
diff --git a/internal/gocommand/invoke.go b/internal/gocommand/invoke.go
index 3c0afe7..8d9fc98 100644
--- a/internal/gocommand/invoke.go
+++ b/internal/gocommand/invoke.go
@@ -24,6 +24,9 @@
 	exec "golang.org/x/sys/execabs"
 
 	"golang.org/x/tools/internal/event"
+	"golang.org/x/tools/internal/event/keys"
+	"golang.org/x/tools/internal/event/label"
+	"golang.org/x/tools/internal/event/tag"
 )
 
 // An Runner will run go command invocations and serialize
@@ -53,9 +56,19 @@
 // 1.14: go: updating go.mod: existing contents have changed since last read
 var modConcurrencyError = regexp.MustCompile(`go:.*go.mod.*contents have changed`)
 
+// verb is an event label for the go command verb.
+var verb = keys.NewString("verb", "go command verb")
+
+func invLabels(inv Invocation) []label.Label {
+	return []label.Label{verb.Of(inv.Verb), tag.Directory.Of(inv.WorkingDir)}
+}
+
 // Run is a convenience wrapper around RunRaw.
 // It returns only stdout and a "friendly" error.
 func (runner *Runner) Run(ctx context.Context, inv Invocation) (*bytes.Buffer, error) {
+	ctx, done := event.Start(ctx, "gocommand.Runner.Run", invLabels(inv)...)
+	defer done()
+
 	stdout, _, friendly, _ := runner.RunRaw(ctx, inv)
 	return stdout, friendly
 }
@@ -63,6 +76,9 @@
 // RunPiped runs the invocation serially, always waiting for any concurrent
 // invocations to complete first.
 func (runner *Runner) RunPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) error {
+	ctx, done := event.Start(ctx, "gocommand.Runner.RunPiped", invLabels(inv)...)
+	defer done()
+
 	_, err := runner.runPiped(ctx, inv, stdout, stderr)
 	return err
 }
@@ -70,6 +86,8 @@
 // RunRaw runs the invocation, serializing requests only if they fight over
 // go.mod changes.
 func (runner *Runner) RunRaw(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) {
+	ctx, done := event.Start(ctx, "gocommand.Runner.RunRaw", invLabels(inv)...)
+	defer done()
 	// Make sure the runner is always initialized.
 	runner.initialize()
 
diff --git a/internal/gocommand/version.go b/internal/gocommand/version.go
index 307a76d..446c584 100644
--- a/internal/gocommand/version.go
+++ b/internal/gocommand/version.go
@@ -23,21 +23,11 @@
 func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) {
 	inv.Verb = "list"
 	inv.Args = []string{"-e", "-f", `{{context.ReleaseTags}}`, `--`, `unsafe`}
-	inv.Env = append(append([]string{}, inv.Env...), "GO111MODULE=off")
-	// Unset any unneeded flags, and remove them from BuildFlags, if they're
-	// present.
-	inv.ModFile = ""
+	inv.BuildFlags = nil // This is not a build command.
 	inv.ModFlag = ""
-	var buildFlags []string
-	for _, flag := range inv.BuildFlags {
-		// Flags can be prefixed by one or two dashes.
-		f := strings.TrimPrefix(strings.TrimPrefix(flag, "-"), "-")
-		if strings.HasPrefix(f, "mod=") || strings.HasPrefix(f, "modfile=") {
-			continue
-		}
-		buildFlags = append(buildFlags, flag)
-	}
-	inv.BuildFlags = buildFlags
+	inv.ModFile = ""
+	inv.Env = append(inv.Env[:len(inv.Env):len(inv.Env)], "GO111MODULE=off")
+
 	stdoutBytes, err := r.Run(ctx, inv)
 	if err != nil {
 		return 0, err
diff --git a/internal/imports/fix.go b/internal/imports/fix.go
index 642a5ac..d4f1b4e 100644
--- a/internal/imports/fix.go
+++ b/internal/imports/fix.go
@@ -26,6 +26,7 @@
 	"unicode/utf8"
 
 	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/gopathwalk"
 )
@@ -414,9 +415,16 @@
 			})
 		}
 	}
+	// Collecting fixes involved map iteration, so sort for stability. See
+	// golang/go#59976.
+	sortFixes(fixes)
 
+	// collect selected fixes in a separate slice, so that it can be sorted
+	// separately. Note that these fixes must occur after fixes to existing
+	// imports. TODO(rfindley): figure out why.
+	var selectedFixes []*ImportFix
 	for _, imp := range selected {
-		fixes = append(fixes, &ImportFix{
+		selectedFixes = append(selectedFixes, &ImportFix{
 			StmtInfo: ImportInfo{
 				Name:       p.importSpecName(imp),
 				ImportPath: imp.ImportPath,
@@ -425,8 +433,25 @@
 			FixType:   AddImport,
 		})
 	}
+	sortFixes(selectedFixes)
 
-	return fixes, true
+	return append(fixes, selectedFixes...), true
+}
+
+func sortFixes(fixes []*ImportFix) {
+	sort.Slice(fixes, func(i, j int) bool {
+		fi, fj := fixes[i], fixes[j]
+		if fi.StmtInfo.ImportPath != fj.StmtInfo.ImportPath {
+			return fi.StmtInfo.ImportPath < fj.StmtInfo.ImportPath
+		}
+		if fi.StmtInfo.Name != fj.StmtInfo.Name {
+			return fi.StmtInfo.Name < fj.StmtInfo.Name
+		}
+		if fi.IdentName != fj.IdentName {
+			return fi.IdentName < fj.IdentName
+		}
+		return fi.FixType < fj.FixType
+	})
 }
 
 // importSpecName gets the import name of imp in the import spec.
@@ -519,7 +544,7 @@
 var fixImports = fixImportsDefault
 
 func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error {
-	fixes, err := getFixes(fset, f, filename, env)
+	fixes, err := getFixes(context.Background(), fset, f, filename, env)
 	if err != nil {
 		return err
 	}
@@ -529,7 +554,7 @@
 
 // getFixes gets the import fixes that need to be made to f in order to fix the imports.
 // It does not modify the ast.
-func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) {
+func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) {
 	abs, err := filepath.Abs(filename)
 	if err != nil {
 		return nil, err
@@ -583,7 +608,7 @@
 
 	// Go look for candidates in $GOPATH, etc. We don't necessarily load
 	// the real exports of sibling imports, so keep assuming their contents.
-	if err := addExternalCandidates(p, p.missingRefs, filename); err != nil {
+	if err := addExternalCandidates(ctx, p, p.missingRefs, filename); err != nil {
 		return nil, err
 	}
 
@@ -1031,7 +1056,10 @@
 	exportsLoaded func(pkg *pkg, exports []string)
 }
 
-func addExternalCandidates(pass *pass, refs references, filename string) error {
+func addExternalCandidates(ctx context.Context, pass *pass, refs references, filename string) error {
+	ctx, done := event.Start(ctx, "imports.addExternalCandidates")
+	defer done()
+
 	var mu sync.Mutex
 	found := make(map[string][]pkgDistance)
 	callback := &scanCallback{
diff --git a/internal/imports/imports.go b/internal/imports/imports.go
index 95a8838..58e637b 100644
--- a/internal/imports/imports.go
+++ b/internal/imports/imports.go
@@ -11,6 +11,7 @@
 import (
 	"bufio"
 	"bytes"
+	"context"
 	"fmt"
 	"go/ast"
 	"go/format"
@@ -23,6 +24,7 @@
 	"strings"
 
 	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/internal/event"
 )
 
 // Options is golang.org/x/tools/imports.Options with extra internal-only options.
@@ -66,14 +68,17 @@
 //
 // Note that filename's directory influences which imports can be chosen,
 // so it is important that filename be accurate.
-func FixImports(filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) {
+func FixImports(ctx context.Context, filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) {
+	ctx, done := event.Start(ctx, "imports.FixImports")
+	defer done()
+
 	fileSet := token.NewFileSet()
 	file, _, err := parse(fileSet, filename, src, opt)
 	if err != nil {
 		return nil, err
 	}
 
-	return getFixes(fileSet, file, filename, opt.Env)
+	return getFixes(ctx, fileSet, file, filename, opt.Env)
 }
 
 // ApplyFixes applies all of the fixes to the file and formats it. extraMode
diff --git a/internal/imports/mod.go b/internal/imports/mod.go
index 7d99d04..1389d38 100644
--- a/internal/imports/mod.go
+++ b/internal/imports/mod.go
@@ -19,6 +19,7 @@
 	"strings"
 
 	"golang.org/x/mod/module"
+	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/gopathwalk"
 )
@@ -424,6 +425,9 @@
 }
 
 func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error {
+	ctx, done := event.Start(ctx, "imports.ModuleResolver.scan")
+	defer done()
+
 	if err := r.init(); err != nil {
 		return err
 	}
diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go
index f633522..9b01888 100644
--- a/internal/testenv/testenv.go
+++ b/internal/testenv/testenv.go
@@ -12,6 +12,7 @@
 	"go/build"
 	"io/ioutil"
 	"os"
+	"path/filepath"
 	"runtime"
 	"runtime/debug"
 	"strings"
@@ -19,6 +20,7 @@
 	"testing"
 	"time"
 
+	"golang.org/x/mod/modfile"
 	"golang.org/x/tools/internal/goroot"
 
 	exec "golang.org/x/sys/execabs"
@@ -278,7 +280,12 @@
 		// For now, we'll skip them instead.
 		fmt.Fprintf(os.Stderr, "skipping test: %s builder is too slow (https://golang.org/issue/49321)\n", b)
 	default:
-		return
+		switch runtime.GOOS {
+		case "android", "ios":
+			fmt.Fprintf(os.Stderr, "skipping test: assuming that %s is resource-constrained\n", runtime.GOOS)
+		default:
+			return
+		}
 	}
 	os.Exit(0)
 }
@@ -395,3 +402,41 @@
 	}
 	return path
 }
+
+// NeedsLocalXTools skips t if the golang.org/x/tools module is replaced and
+// its replacement directory does not exist (or does not contain the module).
+func NeedsLocalXTools(t testing.TB) {
+	t.Helper()
+
+	NeedsTool(t, "go")
+
+	cmd := Command(t, "go", "list", "-f", "{{with .Replace}}{{.Dir}}{{end}}", "-m", "golang.org/x/tools")
+	out, err := cmd.Output()
+	if err != nil {
+		if ee, ok := err.(*exec.ExitError); ok && len(ee.Stderr) > 0 {
+			t.Skipf("skipping test: %v: %v\n%s", cmd, err, ee.Stderr)
+		}
+		t.Skipf("skipping test: %v: %v", cmd, err)
+	}
+
+	dir := string(bytes.TrimSpace(out))
+	if dir == "" {
+		// No replacement directory, and (since we didn't set -e) no error either.
+		// Maybe x/tools isn't replaced at all (as in a gopls release, or when
+		// using a go.work file that includes the x/tools module).
+		return
+	}
+
+	// We found the directory where x/tools would exist if we're in a clone of the
+	// repo. Is it there? (If not, we're probably in the module cache instead.)
+	modFilePath := filepath.Join(dir, "go.mod")
+	b, err := os.ReadFile(modFilePath)
+	if err != nil {
+		t.Skipf("skipping test: x/tools replacement not found: %v", err)
+	}
+	modulePath := modfile.ModulePath(b)
+
+	if want := "golang.org/x/tools"; modulePath != want {
+		t.Skipf("skipping test: %s module path is %q, not %q", modFilePath, modulePath, want)
+	}
+}