all: merge master (53e637b) into gopls-release-branch.0.14
Also add back the replace directive.
For golang/go#63220
Merge List:
+ 2023-10-16 53e637bd2 internal/refactor/inline: improve check for last uses of free vars
+ 2023-10-16 61bb3e97f internal/refactor/inline: less hacky solution for eliding braces
+ 2023-10-16 8a71c399c gopls/internal/lsp/source: abort change signature on overlapping calls
+ 2023-10-16 f744e4be4 go/ssa: propagate goversions in ssa
+ 2023-10-16 6fcd7783a gopls/internal/lsp: add code actions to remove unused parameters
+ 2023-10-16 918e96ac7 internal/refactor/inline: use binding decl with literalization
+ 2023-10-13 12b5dadef gopls: deprecate "tempModfile" and "expandWorkspaceToModule" settings
+ 2023-10-12 f85b3f7bc internal/refactor/inline: don't treat blanks as decls in declares()
+ 2023-10-12 348453412 internal/refactor/inline: docs for 0.14 release notes
+ 2023-10-11 b9b97d982 go/types/objectpath: remove method sorting
Change-Id: I371204ec58877fcfaa03f7ba663a6b5718b8ecf0
diff --git a/go/analysis/unitchecker/unitchecker.go b/go/analysis/unitchecker/unitchecker.go
index 53c3f4a..0a40652 100644
--- a/go/analysis/unitchecker/unitchecker.go
+++ b/go/analysis/unitchecker/unitchecker.go
@@ -319,7 +319,7 @@
analyzers = filtered
// Read facts from imported packages.
- facts, err := facts.NewDecoder(pkg).Decode(false, makeFactImporter(cfg))
+ facts, err := facts.NewDecoder(pkg).Decode(makeFactImporter(cfg))
if err != nil {
return nil, err
}
@@ -418,7 +418,7 @@
results[i].diagnostics = act.diagnostics
}
- data := facts.Encode(false)
+ data := facts.Encode()
if err := exportFacts(cfg, data); err != nil {
return nil, fmt.Errorf("failed to export analysis facts: %v", err)
}
diff --git a/go/ssa/builder.go b/go/ssa/builder.go
index 0e49537..9e8d12e 100644
--- a/go/ssa/builder.go
+++ b/go/ssa/builder.go
@@ -647,7 +647,8 @@
typeparams: fn.typeparams, // share the parent's type parameters.
typeargs: fn.typeargs, // share the parent's type arguments.
info: fn.info,
- subst: fn.subst, // share the parent's type substitutions.
+ subst: fn.subst, // share the parent's type substitutions.
+ goversion: fn.goversion, // share the parent's goversion
}
fn.AnonFuncs = append(fn.AnonFuncs, fn2)
b.created.Add(fn2)
@@ -2516,11 +2517,18 @@
if len(p.info.InitOrder) > 0 && len(p.files) == 0 {
panic("no source files provided for package. cannot initialize globals")
}
+
for _, varinit := range p.info.InitOrder {
if init.Prog.mode&LogSource != 0 {
fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n",
varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos()))
}
+ // Initializers for global vars are evaluated in dependency
+ // order, but may come from arbitrary files of the package
+ // with different versions, so we transiently update
+ // init.goversion for each one. (Since init is a synthetic
+ // function it has no syntax of its own that needs a version.)
+ init.goversion = p.initVersion[varinit.Rhs]
if len(varinit.Lhs) == 1 {
// 1:1 initialization: var x, y = a(), b()
var lval lvalue
@@ -2541,6 +2549,7 @@
}
}
}
+ init.goversion = "" // The rest of the init function is synthetic. No syntax => no goversion.
// Call all of the declared init() functions in source order.
for _, file := range p.files {
@@ -2585,8 +2594,11 @@
b.needsRuntimeTypes() // Add all of the runtime type information. May CREATE Functions.
}
- p.info = nil // We no longer need ASTs or go/types deductions.
- p.created = nil // We no longer need created functions.
+ // We no longer need transient information: ASTs or go/types deductions.
+ p.info = nil
+ p.created = nil
+ p.files = nil
+ p.initVersion = nil
if p.Prog.mode&SanityCheckFunctions != 0 {
sanityCheckPackage(p)
diff --git a/go/ssa/builder_go122_test.go b/go/ssa/builder_go122_test.go
new file mode 100644
index 0000000..21930d6
--- /dev/null
+++ b/go/ssa/builder_go122_test.go
@@ -0,0 +1,88 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.22
+// +build go1.22
+
+package ssa_test
+
+import (
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "testing"
+
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+func TestMultipleGoversions(t *testing.T) {
+ var contents = map[string]string{
+ "post.go": `
+ //go:build go1.22
+ package p
+
+ var distinct = func(l []int) []*int {
+ var r []*int
+ for i := range l {
+ r = append(r, &i)
+ }
+ return r
+ }(l)
+ `,
+ "pre.go": `
+ package p
+
+ var l = []int{0, 0, 0}
+
+ var same = func(l []int) []*int {
+ var r []*int
+ for i := range l {
+ r = append(r, &i)
+ }
+ return r
+ }(l)
+ `,
+ }
+
+ fset := token.NewFileSet()
+ var files []*ast.File
+ for _, fname := range []string{"post.go", "pre.go"} {
+ file, err := parser.ParseFile(fset, fname, contents[fname], 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ files = append(files, file)
+ }
+
+ pkg := types.NewPackage("p", "")
+ conf := &types.Config{Importer: nil, GoVersion: "go1.21"}
+ p, _, err := ssautil.BuildPackage(conf, fset, pkg, files, ssa.SanityCheckFunctions)
+ if err != nil {
+ t.Errorf("unexpected error: %v", err)
+ }
+
+ fns := ssautil.AllFunctions(p.Prog)
+ names := make(map[string]*ssa.Function)
+ for fn := range fns {
+ names[fn.String()] = fn
+ }
+ for _, item := range []struct{ name, wantSyn, wantPos string }{
+ {"p.init", "package initializer", "-"},
+ {"p.init$1", "", "post.go:5:17"},
+ {"p.init$2", "", "pre.go:6:13"},
+ } {
+ fn := names[item.name]
+ if fn == nil {
+ t.Fatalf("Could not find function named %q in package %s", item.name, p)
+ }
+ if fn.Synthetic != item.wantSyn {
+ t.Errorf("Function %q.Syntethic=%q. expected %q", fn, fn.Synthetic, item.wantSyn)
+ }
+ if got := fset.Position(fn.Pos()).String(); got != item.wantPos {
+ t.Errorf("Function %q.Pos()=%q. expected %q", fn, got, item.wantPos)
+ }
+ }
+}
diff --git a/go/ssa/create.go b/go/ssa/create.go
index 1bf88c8..90cb9bb 100644
--- a/go/ssa/create.go
+++ b/go/ssa/create.go
@@ -47,9 +47,10 @@
// typechecker object obj.
//
// For objects from Go source code, syntax is the associated syntax
-// tree (for funcs and vars only); it will be used during the build
+// tree (for funcs and vars only) and goversion defines the
+// appropriate interpretation; they will be used during the build
// phase.
-func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
+func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node, goversion string) {
name := obj.Name()
switch obj := obj.(type) {
case *types.Builtin:
@@ -108,6 +109,7 @@
Prog: pkg.Prog,
typeparams: tparams,
info: pkg.info,
+ goversion: goversion,
}
pkg.created.Add(fn)
if syntax == nil {
@@ -130,7 +132,7 @@
// membersFromDecl populates package pkg with members for each
// typechecker object (var, func, const or type) associated with the
// specified decl.
-func membersFromDecl(pkg *Package, decl ast.Decl) {
+func membersFromDecl(pkg *Package, decl ast.Decl, goversion string) {
switch decl := decl.(type) {
case *ast.GenDecl: // import, const, type or var
switch decl.Tok {
@@ -138,16 +140,19 @@
for _, spec := range decl.Specs {
for _, id := range spec.(*ast.ValueSpec).Names {
if !isBlankIdent(id) {
- memberFromObject(pkg, pkg.info.Defs[id], nil)
+ memberFromObject(pkg, pkg.info.Defs[id], nil, "")
}
}
}
case token.VAR:
for _, spec := range decl.Specs {
+ for _, rhs := range spec.(*ast.ValueSpec).Values {
+ pkg.initVersion[rhs] = goversion
+ }
for _, id := range spec.(*ast.ValueSpec).Names {
if !isBlankIdent(id) {
- memberFromObject(pkg, pkg.info.Defs[id], spec)
+ memberFromObject(pkg, pkg.info.Defs[id], spec, goversion)
}
}
}
@@ -156,7 +161,7 @@
for _, spec := range decl.Specs {
id := spec.(*ast.TypeSpec).Name
if !isBlankIdent(id) {
- memberFromObject(pkg, pkg.info.Defs[id], nil)
+ memberFromObject(pkg, pkg.info.Defs[id], nil, "")
}
}
}
@@ -164,7 +169,7 @@
case *ast.FuncDecl:
id := decl.Name
if !isBlankIdent(id) {
- memberFromObject(pkg, pkg.info.Defs[id], decl)
+ memberFromObject(pkg, pkg.info.Defs[id], decl, goversion)
}
}
}
@@ -197,8 +202,10 @@
Members: make(map[string]Member),
objects: make(map[types.Object]Member),
Pkg: pkg,
- info: info, // transient (CREATE and BUILD phases)
- files: files, // transient (CREATE and BUILD phases)
+ // transient values (CREATE and BUILD phases)
+ info: info,
+ files: files,
+ initVersion: make(map[ast.Expr]string),
}
// Add init() function.
@@ -209,6 +216,7 @@
Pkg: p,
Prog: prog,
info: p.info,
+ goversion: "", // See Package.build() for details.
}
p.Members[p.init.name] = p.init
p.created.Add(p.init)
@@ -218,8 +226,9 @@
if len(files) > 0 {
// Go source package.
for _, file := range files {
+ goversion := goversionOf(p, file)
for _, decl := range file.Decls {
- membersFromDecl(p, decl)
+ membersFromDecl(p, decl, goversion)
}
}
} else {
@@ -229,11 +238,11 @@
scope := p.Pkg.Scope()
for _, name := range scope.Names() {
obj := scope.Lookup(name)
- memberFromObject(p, obj, nil)
+ memberFromObject(p, obj, nil, "")
if obj, ok := obj.(*types.TypeName); ok {
if named, ok := obj.Type().(*types.Named); ok {
for i, n := 0, named.NumMethods(); i < n; i++ {
- memberFromObject(p, named.Method(i), nil)
+ memberFromObject(p, named.Method(i), nil, "")
}
}
}
diff --git a/go/ssa/func.go b/go/ssa/func.go
index 38c3e31..7b37444 100644
--- a/go/ssa/func.go
+++ b/go/ssa/func.go
@@ -324,6 +324,7 @@
f.namedResults = nil // (used by lifting)
f.info = nil
f.subst = nil
+ f.goversion = ""
numberRegisters(f) // uses f.namedRegisters
}
diff --git a/go/ssa/instantiate.go b/go/ssa/instantiate.go
index 38249de..94c7b64 100644
--- a/go/ssa/instantiate.go
+++ b/go/ssa/instantiate.go
@@ -32,8 +32,8 @@
fn *Function // fn.typeparams.Len() > 0 and len(fn.typeargs) == 0.
instances map[*typeList]*Function // canonical type arguments to an instance.
syntax *ast.FuncDecl // fn.syntax copy for instantiating after fn is done. nil on synthetic packages.
- info *types.Info // fn.pkg.info copy for building after fn is done.. nil on synthetic packages.
-
+ info *types.Info // fn.pkg.info copy for building after fn is done. nil on synthetic packages.
+ goversion string // goversion to build syntax with.
// TODO(taking): Consider ways to allow for clearing syntax and info when done building.
// May require a public API change as MethodValue can request these be built after prog.Build() is done.
}
@@ -66,9 +66,10 @@
if _, ok := prog.instances[fn]; !ok {
prog.instances[fn] = &instanceSet{
- fn: fn,
- syntax: syntax,
- info: fn.info,
+ fn: fn,
+ syntax: syntax,
+ info: fn.info,
+ goversion: fn.goversion,
}
}
}
@@ -169,8 +170,8 @@
typeargs: targs,
info: insts.info, // on synthetic packages info is nil.
subst: subst,
+ goversion: insts.goversion,
}
-
cr.Add(instance)
insts.instances[key] = instance
return instance
diff --git a/go/ssa/source.go b/go/ssa/source.go
index 9c900e3..487abcf 100644
--- a/go/ssa/source.go
+++ b/go/ssa/source.go
@@ -11,6 +11,7 @@
// the originating syntax, as specified.
import (
+ "fmt"
"go/ast"
"go/token"
"go/types"
@@ -131,6 +132,31 @@
return nil
}
+// goversionOf returns the goversion of a node in the package
+// where the node is either a function declaration or the initial
+// value of a package level variable declaration.
+func goversionOf(p *Package, file *ast.File) string {
+ if p.info == nil {
+ return ""
+ }
+
+ // TODO(taking): Update to the following when internal/versions available:
+ // return versions.Lang(versions.FileVersions(p.info, file))
+ return fileVersions(file)
+}
+
+// TODO(taking): Remove when internal/versions is available.
+var fileVersions = func(file *ast.File) string { return "" }
+
+// parses a goXX.YY version or returns a negative version on an error.
+// TODO(taking): Switch to a permanent solution when internal/versions is submitted.
+func parseGoVersion(x string) (major, minor int) {
+ if _, err := fmt.Sscanf(x, "go%d.%d", &major, &minor); err != nil || major < 0 || minor < 0 {
+ return -1, -1
+ }
+ return
+}
+
// ValueForExpr returns the SSA Value that corresponds to non-constant
// expression e.
//
diff --git a/go/ssa/ssa.go b/go/ssa/ssa.go
index bd42f2e..04b7157 100644
--- a/go/ssa/ssa.go
+++ b/go/ssa/ssa.go
@@ -57,11 +57,12 @@
// The following fields are set transiently, then cleared
// after building.
- buildOnce sync.Once // ensures package building occurs once
- ninit int32 // number of init functions
- info *types.Info // package type information
- files []*ast.File // package ASTs
- created creator // members created as a result of building this package (includes declared functions, wrappers)
+ buildOnce sync.Once // ensures package building occurs once
+ ninit int32 // number of init functions
+ info *types.Info // package type information
+ files []*ast.File // package ASTs
+ created creator // members created as a result of building this package (includes declared functions, wrappers)
+ initVersion map[ast.Expr]string // goversion to use for each global var init expr
}
// A Member is a member of a Go package, implemented by *NamedConst,
@@ -338,6 +339,7 @@
lblocks map[types.Object]*lblock // labelled blocks
info *types.Info // *types.Info to build from. nil for wrappers.
subst *subster // non-nil => expand generic body using this type substitution of ground types
+ goversion string // Go version of syntax (NB: init is special)
}
// BasicBlock represents an SSA basic block.
diff --git a/go/ssa/ssautil/load.go b/go/ssa/ssautil/load.go
index 96d69a2..281ad10 100644
--- a/go/ssa/ssautil/load.go
+++ b/go/ssa/ssautil/load.go
@@ -147,6 +147,7 @@
Selections: make(map[*ast.SelectorExpr]*types.Selection),
}
typeparams.InitInstanceInfo(info)
+ // versions.InitFileVersions(info) // TODO(taking): Enable when internal/versions is available.
if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil {
return nil, nil, err
}
diff --git a/go/ssa/versions_go122.go b/go/ssa/versions_go122.go
new file mode 100644
index 0000000..b74165a
--- /dev/null
+++ b/go/ssa/versions_go122.go
@@ -0,0 +1,21 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.22
+// +build go1.22
+
+package ssa
+
+import (
+ "go/ast"
+)
+
+func init() {
+ fileVersions = func(file *ast.File) string {
+ if maj, min := parseGoVersion(file.GoVersion); maj >= 0 && min >= 0 {
+ return file.GoVersion
+ }
+ return ""
+ }
+}
diff --git a/go/ssa/wrappers.go b/go/ssa/wrappers.go
index 123ea68..f71c27d 100644
--- a/go/ssa/wrappers.go
+++ b/go/ssa/wrappers.go
@@ -72,7 +72,9 @@
Synthetic: description,
Prog: prog,
pos: obj.Pos(),
- info: nil, // info is not set on wrappers.
+ // wrappers have no syntax
+ info: nil,
+ goversion: "",
}
cr.Add(fn)
fn.startBody()
@@ -200,7 +202,9 @@
Synthetic: description,
Prog: prog,
pos: obj.Pos(),
- info: nil, // info is not set on wrappers.
+ // wrappers have no syntax
+ info: nil,
+ goversion: "",
}
cr.Add(fn)
diff --git a/go/types/objectpath/objectpath.go b/go/types/objectpath/objectpath.go
index fa5834b..e742ecc 100644
--- a/go/types/objectpath/objectpath.go
+++ b/go/types/objectpath/objectpath.go
@@ -26,13 +26,10 @@
import (
"fmt"
"go/types"
- "sort"
"strconv"
"strings"
- _ "unsafe"
"golang.org/x/tools/internal/typeparams"
- "golang.org/x/tools/internal/typesinternal"
)
// A Path is an opaque name that identifies a types.Object
@@ -123,20 +120,7 @@
// An Encoder amortizes the cost of encoding the paths of multiple objects.
// The zero value of an Encoder is ready to use.
type Encoder struct {
- scopeMemo map[*types.Scope][]types.Object // memoization of scopeObjects
- namedMethodsMemo map[*types.Named][]*types.Func // memoization of namedMethods()
- skipMethodSorting bool
-}
-
-// Expose back doors so that gopls can avoid method sorting, which can dominate
-// analysis on certain repositories.
-//
-// TODO(golang/go#61443): remove this.
-func init() {
- typesinternal.SkipEncoderMethodSorting = func(enc interface{}) {
- enc.(*Encoder).skipMethodSorting = true
- }
- typesinternal.ObjectpathObject = object
+ scopeMemo map[*types.Scope][]types.Object // memoization of scopeObjects
}
// For returns the path to an object relative to its package,
@@ -328,31 +312,18 @@
// Inspect declared methods of defined types.
if T, ok := o.Type().(*types.Named); ok {
path = append(path, opType)
- if !enc.skipMethodSorting {
- // Note that method index here is always with respect
- // to canonical ordering of methods, regardless of how
- // they appear in the underlying type.
- for i, m := range enc.namedMethods(T) {
- path2 := appendOpArg(path, opMethod, i)
- if m == obj {
- return Path(path2), nil // found declared method
- }
- if r := find(obj, m.Type(), append(path2, opType), nil); r != nil {
- return Path(r), nil
- }
+ // The method index here is always with respect
+ // to the underlying go/types data structures,
+ // which ultimately derives from source order
+ // and must be preserved by export data.
+ for i := 0; i < T.NumMethods(); i++ {
+ m := T.Method(i)
+ path2 := appendOpArg(path, opMethod, i)
+ if m == obj {
+ return Path(path2), nil // found declared method
}
- } else {
- // This branch must match the logic in the branch above, using go/types
- // APIs without sorting.
- for i := 0; i < T.NumMethods(); i++ {
- m := T.Method(i)
- path2 := appendOpArg(path, opMethod, i)
- if m == obj {
- return Path(path2), nil // found declared method
- }
- if r := find(obj, m.Type(), append(path2, opType), nil); r != nil {
- return Path(r), nil
- }
+ if r := find(obj, m.Type(), append(path2, opType), nil); r != nil {
+ return Path(r), nil
}
}
}
@@ -448,22 +419,13 @@
path = append(path, name...)
path = append(path, opType)
- if !enc.skipMethodSorting {
- for i, m := range enc.namedMethods(named) {
- if m == meth {
- path = appendOpArg(path, opMethod, i)
- return Path(path), true
- }
- }
- } else {
- // This branch must match the logic of the branch above, using go/types
- // APIs without sorting.
- for i := 0; i < named.NumMethods(); i++ {
- m := named.Method(i)
- if m == meth {
- path = appendOpArg(path, opMethod, i)
- return Path(path), true
- }
+ // Method indices are w.r.t. the go/types data structures,
+ // ultimately deriving from source order,
+ // which is preserved by export data.
+ for i := 0; i < named.NumMethods(); i++ {
+ if named.Method(i) == meth {
+ path = appendOpArg(path, opMethod, i)
+ return Path(path), true
}
}
@@ -576,12 +538,7 @@
// Object returns the object denoted by path p within the package pkg.
func Object(pkg *types.Package, p Path) (types.Object, error) {
- return object(pkg, string(p), false)
-}
-
-// Note: the skipMethodSorting parameter must match the value of
-// Encoder.skipMethodSorting used during encoding.
-func object(pkg *types.Package, pathstr string, skipMethodSorting bool) (types.Object, error) {
+ pathstr := string(p)
if pathstr == "" {
return nil, fmt.Errorf("empty path")
}
@@ -747,12 +704,7 @@
if index >= t.NumMethods() {
return nil, fmt.Errorf("method index %d out of range [0-%d)", index, t.NumMethods())
}
- if skipMethodSorting {
- obj = t.Method(index)
- } else {
- methods := namedMethods(t) // (unmemoized)
- obj = methods[index] // Id-ordered
- }
+ obj = t.Method(index)
default:
return nil, fmt.Errorf("cannot apply %q to %s (got %T, want interface or named)", code, t, t)
@@ -779,33 +731,6 @@
return obj, nil // success
}
-// namedMethods returns the methods of a Named type in ascending Id order.
-func namedMethods(named *types.Named) []*types.Func {
- methods := make([]*types.Func, named.NumMethods())
- for i := range methods {
- methods[i] = named.Method(i)
- }
- sort.Slice(methods, func(i, j int) bool {
- return methods[i].Id() < methods[j].Id()
- })
- return methods
-}
-
-// namedMethods is a memoization of the namedMethods function. Callers must not modify the result.
-func (enc *Encoder) namedMethods(named *types.Named) []*types.Func {
- m := enc.namedMethodsMemo
- if m == nil {
- m = make(map[*types.Named][]*types.Func)
- enc.namedMethodsMemo = m
- }
- methods, ok := m[named]
- if !ok {
- methods = namedMethods(named) // allocates and sorts
- m[named] = methods
- }
- return methods
-}
-
// scopeObjects is a memoization of scope objects.
// Callers must not modify the result.
func (enc *Encoder) scopeObjects(scope *types.Scope) []types.Object {
diff --git a/gopls/doc/commands.md b/gopls/doc/commands.md
index 833dad9..3404c91 100644
--- a/gopls/doc/commands.md
+++ b/gopls/doc/commands.md
@@ -84,6 +84,26 @@
}
```
+### **performs a "change signature" refactoring.**
+Identifier: `gopls.change_signature`
+
+This command is experimental, currently only supporting parameter removal.
+Its signature will certainly change in the future (pun intended).
+
+Args:
+
+```
+{
+ "RemoveParameter": {
+ "uri": string,
+ "range": {
+ "start": { ... },
+ "end": { ... },
+ },
+ },
+}
+```
+
### **Check for upgrades**
Identifier: `gopls.check_upgrades`
diff --git a/gopls/doc/inline-after.png b/gopls/doc/inline-after.png
new file mode 100644
index 0000000..843a845
--- /dev/null
+++ b/gopls/doc/inline-after.png
Binary files differ
diff --git a/gopls/doc/inline-before.png b/gopls/doc/inline-before.png
new file mode 100644
index 0000000..e3adbd4
--- /dev/null
+++ b/gopls/doc/inline-before.png
Binary files differ
diff --git a/gopls/doc/refactor-inline.md b/gopls/doc/refactor-inline.md
new file mode 100644
index 0000000..dd857f8
--- /dev/null
+++ b/gopls/doc/refactor-inline.md
@@ -0,0 +1,161 @@
+
+Gopls v0.14 supports a new refactoring operation:
+inlining of function calls.
+
+You can find it in VS Code by selecting a static call to a function or
+method f and choosing the `Refactor...` command followed by `Inline
+call to f`.
+Other editors and LSP clients have their own idiomatic command for it;
+for example, in Emacs with Eglot it is
+[`M-x eglot-code-action-inline`](https://joaotavora.github.io/eglot/#index-M_002dx-eglot_002dcode_002daction_002dinline)
+and in Vim with coc.nvim it is `coc-rename`.
+
+<!-- source code used for images:
+
+func six() int {
+ return sum(1, 2, 3)
+}
+
+func sum(values ...int) int {
+ total := 0
+ for _, v := range values {
+ total += v
+ }
+ return total
+}
+-->
+
+
+
+Inlining replaces the call expression by a copy of the function body,
+with parameters replaced by arguments.
+Inlining is useful for a number of reasons.
+Perhaps you want to eliminate a call to a deprecated
+function such as `ioutil.ReadFile` by replacing it with a call to the
+newer `os.ReadFile`; inlining will do that for you.
+Or perhaps you want to copy and modify an existing function in some
+way; inlining can provide a starting point.
+The inlining logic also provides a building block for
+other refactorings to come, such as "change signature".
+
+Not every call can be inlined.
+Of course, the tool needs to know which function is being called, so
+you can't inline a dynamic call through a function value or interface
+method; but static calls to methods are fine.
+Nor can you inline a call if the callee is declared in another package
+and refers to non-exported parts of that package, or to [internal
+packages](https://go.dev/doc/go1.4#internalpackages) that are
+inaccessible to the caller.
+
+When inlining is possible, it's critical that the tool preserve
+the original behavior of the program.
+We don't want refactoring to break the build, or, worse, to introduce
+subtle latent bugs.
+This is especially important when inlining tools are used to perform
+automated clean-ups in large code bases.
+We must be able to trust the tool.
+Our inliner is very careful not to make guesses or unsound
+assumptions about the behavior of the code.
+However, that does mean it sometimes produces a change that differs
+from what someone with expert knowledge of the same code might have
+written by hand.
+
+In the most difficult cases, especially with complex control flow, it
+may not be safe to eliminate the function call at all.
+For example, the behavior of a `defer` statement is intimately tied to
+its enclosing function call, and `defer` is the only control
+construct that can be used to handle panics, so it cannot be reduced
+into simpler constructs.
+So, for example, given a function f defined as:
+
+```go
+func f(s string) {
+ defer fmt.Println("goodbye")
+ fmt.Println(s)
+}
+```
+a call `f("hello")` will be inlined to:
+```go
+ func() {
+ defer fmt.Println("goodbye")
+ fmt.Println("hello")
+ }()
+```
+Although the parameter was eliminated, the function call remains.
+
+An inliner is a bit like an optimizing compiler.
+A compiler is considered "correct" if it doesn't change the meaning of
+the program in translation from source language to target language.
+An _optimizing_ compiler exploits the particulars of the input to
+generate better code, where "better" usually means more efficient.
+As users report inputs that cause the compiler to emit suboptimal
+code, the compiler is improved to recognize more cases, or more rules,
+and more exceptions to rules---but this process has no end.
+Inlining is similar, except that "better" code means tidier code.
+The most conservative translation provides a simple but (hopefully!)
+correct foundation, on top of which endless rules, and exceptions to
+rules, can embellish and improve the quality of the output.
+
+The following section lists some of the technical
+challenges involved in sound inlining:
+
+- **Effects:** When replacing a parameter by its argument expression,
+ we must be careful not to change the effects of the call. For
+ example, if we call a function `func twice(x int) int { return x + x }`
+ with `twice(g())`, we do not want to see `g() + g()`, which would
+ cause g's effects to occur twice, and potentially each call might
+ return a different value. All effects must occur the same number of
+ times, and in the same order. This requires analyzing both the
+ arguments and the callee function to determine whether they are
+ "pure", whether they read variables, or whether (and when) they
+ update them too. The inliner will introduce a declaration such as
+ `var x int = g()` when it cannot prove that it is safe to substitute
+ the argument throughout.
+
+- **Constants:** If inlining always replaced a parameter by its argument
+ when the value is constant, some programs would no longer build
+ because checks previously done at run time would happen at compile time.
+ For example `func index(s string, i int) byte { return s[i] }`
+ is a valid function, but if inlining were to replace the call `index("abc", 3)`
+ by the expression `"abc"[3]`, the compiler will report that the
+ index `3` is out of bounds for the string `"abc"`.
+ The inliner will prevent substitution of parameters by problematic
+ constant arguments, again introducing a `var` declaration instead.
+
+- **Referential integrity:** When a parameter variable is replaced by
+ its argument expression, we must ensure that any names in the
+ argument expression continue to refer to the same thing---not to a
+ different declaration in the callee function body that happens to
+ use the same name! The inliner must replace local references such as
+ `Printf` by qualified references such as `fmt.Printf`, and add an
+ import of package `fmt` as needed.
+
+- **Implicit conversions:** When passing an argument to a function, it
+ is implicitly converted to the parameter type.
+ If we eliminate the parameter variable, we don't want to
+ lose the conversion as it may be important.
+ For example, in `func f(x any) { y := x; fmt.Printf("%T", &y) }` the
+ type of variable y is `any`, so the program prints `"*interface{}"`.
+ But if inlining the call `f(1)` were to produce the statement `y :=
+ 1`, then the type of y would have changed to `int`, which could
+ cause a compile error or, as in this case, a bug, as the program
+ now prints `"*int"`. When the inliner substitutes a parameter variable
+ by its argument value, it may need to introduce explicit conversions
+ of each value to the original parameter type, such as `y := any(1)`.
+
+- **Last reference:** When an argument expression has no effects
+ and its corresponding parameter is never used, the expression
+ may be eliminated. However, if the expression contains the last
+ reference to a local variable at the caller, this may cause a compile
+ error because the variable is now unused! So the inliner must be
+ cautious about eliminating references to local variables.
+
+This is just a taste of the problem domain. If you're curious, the
+documentation for [golang.org/x/tools/internal/refactor/inline](https://pkg.go.dev/golang.org/x/tools/internal/refactor/inline) has
+more detail. All of this is to say, it's a complex problem, and we aim
+for correctness first of all. We've already implemented a number of
+important "tidiness optimizations" and we expect more to follow.
+
+Please give the inliner a try, and if you find any bugs (where the
+transformation is incorrect), please do report them. We'd also like to
+hear what "optimizations" you'd like to see next.
diff --git a/gopls/go.mod b/gopls/go.mod
index ee778bd..7312a0b 100644
--- a/gopls/go.mod
+++ b/gopls/go.mod
@@ -26,3 +26,5 @@
golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 // indirect
)
+
+replace golang.org/x/tools => ../
diff --git a/gopls/go.sum b/gopls/go.sum
index b3a077b..0b35591 100644
--- a/gopls/go.sum
+++ b/gopls/go.sum
@@ -28,23 +28,30 @@
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 h1:2O2DON6y3XMJiQRAS1UWU+54aec2uopH3x7MAiqGW6Y=
golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
+golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY=
golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
+golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/telemetry v0.0.0-20231011160506-788d5629a052 h1:1baVNneD/IRxmu8JQdBuki78zUqBtZxq8smZXQj0X2Y=
golang.org/x/telemetry v0.0.0-20231011160506-788d5629a052/go.mod h1:6p4ScoNeC2dhpQ1nSSMmkZ7mEj5JQUSCyc0uExBp5T4=
+golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
+golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
-golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.14.1-0.20231011191651-895b4eda3179 h1:kBkfilXKDhpSZzc4N6Q8OCtlPrzZbu1b1aczWgZNp0w=
-golang.org/x/tools v0.14.1-0.20231011191651-895b4eda3179/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
golang.org/x/vuln v1.0.1 h1:KUas02EjQK5LTuIx1OylBQdKKZ9jeugs+HiqO5HormU=
golang.org/x/vuln v1.0.1/go.mod h1:bb2hMwln/tqxg32BNY4CcxHWtHXuYa3SbIBmtsyjxtM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
diff --git a/gopls/internal/lsp/analysis/unusedparams/cmd/main.go b/gopls/internal/lsp/analysis/unusedparams/cmd/main.go
new file mode 100644
index 0000000..fafb126
--- /dev/null
+++ b/gopls/internal/lsp/analysis/unusedparams/cmd/main.go
@@ -0,0 +1,13 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// The stringintconv command runs the stringintconv analyzer.
+package main
+
+import (
+ "golang.org/x/tools/go/analysis/singlechecker"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/unusedparams"
+)
+
+func main() { singlechecker.Main(unusedparams.Analyzer) }
diff --git a/gopls/internal/lsp/analysis/unusedparams/unusedparams.go b/gopls/internal/lsp/analysis/unusedparams/unusedparams.go
index e0ef5ef..64702b2 100644
--- a/gopls/internal/lsp/analysis/unusedparams/unusedparams.go
+++ b/gopls/internal/lsp/analysis/unusedparams/unusedparams.go
@@ -28,11 +28,20 @@
- functions in test files
- functions with empty bodies or those with just a return stmt`
-var Analyzer = &analysis.Analyzer{
- Name: "unusedparams",
- Doc: Doc,
- Requires: []*analysis.Analyzer{inspect.Analyzer},
- Run: run,
+var (
+ Analyzer = &analysis.Analyzer{
+ Name: "unusedparams",
+ Doc: Doc,
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: run,
+ }
+ inspectLits bool
+ inspectWrappers bool
+)
+
+func init() {
+ Analyzer.Flags.BoolVar(&inspectLits, "lits", true, "inspect function literals")
+ Analyzer.Flags.BoolVar(&inspectWrappers, "wrappers", false, "inspect functions whose body consists of a single return statement")
}
type paramData struct {
@@ -45,7 +54,9 @@
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
nodeFilter := []ast.Node{
(*ast.FuncDecl)(nil),
- (*ast.FuncLit)(nil),
+ }
+ if inspectLits {
+ nodeFilter = append(nodeFilter, (*ast.FuncLit)(nil))
}
inspect.Preorder(nodeFilter, func(n ast.Node) {
@@ -62,6 +73,7 @@
if f.Recv != nil {
return
}
+
// Ignore functions in _test.go files to reduce false positives.
if file := pass.Fset.File(n.Pos()); file != nil && strings.HasSuffix(file.Name(), "_test.go") {
return
@@ -76,8 +88,10 @@
switch expr := body.List[0].(type) {
case *ast.ReturnStmt:
- // Ignore functions that only contain a return statement to reduce false positives.
- return
+ if !inspectWrappers {
+ // Ignore functions that only contain a return statement to reduce false positives.
+ return
+ }
case *ast.ExprStmt:
callExpr, ok := expr.X.(*ast.CallExpr)
if !ok || len(body.List) > 1 {
diff --git a/gopls/internal/lsp/cache/analysis.go b/gopls/internal/lsp/cache/analysis.go
index dfb0995..0466af3 100644
--- a/gopls/internal/lsp/cache/analysis.go
+++ b/gopls/internal/lsp/cache/analysis.go
@@ -1201,7 +1201,7 @@
// by "deep" export data. Better still, use a "shallow" approach.
// Read and decode analysis facts for each direct import.
- factset, err := pkg.factsDecoder.Decode(true, func(pkgPath string) ([]byte, error) {
+ factset, err := pkg.factsDecoder.Decode(func(pkgPath string) ([]byte, error) {
if !hasFacts {
return nil, nil // analyzer doesn't use facts, so no vdeps
}
@@ -1343,7 +1343,7 @@
panic(fmt.Sprintf("%v: Pass.ExportPackageFact(%T) called after Run", act, fact))
}
- factsdata := factset.Encode(true)
+ factsdata := factset.Encode()
return result, &actionSummary{
Diagnostics: diagnostics,
Facts: factsdata,
diff --git a/gopls/internal/lsp/cache/check.go b/gopls/internal/lsp/cache/check.go
index e0be99b..a95c439 100644
--- a/gopls/internal/lsp/cache/check.go
+++ b/gopls/internal/lsp/cache/check.go
@@ -1487,6 +1487,7 @@
return pkg, nil
}
+// TODO(golang/go#63472): this looks wrong with the new Go version syntax.
var goVersionRx = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`)
func doTypeCheck(ctx context.Context, b *typeCheckBatch, inputs typeCheckInputs) (*syntaxPackage, error) {
diff --git a/gopls/internal/lsp/code_action.go b/gopls/internal/lsp/code_action.go
index 555131e..d756748 100644
--- a/gopls/internal/lsp/code_action.go
+++ b/gopls/internal/lsp/code_action.go
@@ -430,6 +430,26 @@
rerr = bug.Errorf("refactor.rewrite code actions panicked: %v", r)
}
}()
+
+ var actions []protocol.CodeAction
+
+ if canRemoveParameter(pkg, pgf, rng) {
+ cmd, err := command.NewChangeSignatureCommand("remove unused parameter", command.ChangeSignatureArgs{
+ RemoveParameter: protocol.Location{
+ URI: protocol.URIFromSpanURI(pgf.URI),
+ Range: rng,
+ },
+ })
+ if err != nil {
+ return nil, err
+ }
+ actions = append(actions, protocol.CodeAction{
+ Title: "Refactor: remove unused parameter",
+ Kind: protocol.RefactorRewrite,
+ Command: &cmd,
+ })
+ }
+
start, end, err := pgf.RangePos(rng)
if err != nil {
return nil, err
@@ -471,7 +491,6 @@
}
}
- var actions []protocol.CodeAction
for i := range commands {
actions = append(actions, protocol.CodeAction{
Title: commands[i].Title,
@@ -510,6 +529,43 @@
return actions, nil
}
+// canRemoveParameter reports whether we can remove the function parameter
+// indicated by the given [start, end) range.
+//
+// This is true if:
+// - [start, end) is contained within an unused field or parameter name
+// - ... of a non-method function declaration.
+func canRemoveParameter(pkg source.Package, pgf *source.ParsedGoFile, rng protocol.Range) bool {
+ info := source.FindParam(pgf, rng)
+ if info.Decl == nil || info.Field == nil {
+ return false
+ }
+
+ if len(info.Field.Names) == 0 {
+ return true // no names => field is unused
+ }
+ if info.Name == nil {
+ return false // no name is indicated
+ }
+ if info.Name.Name == "_" {
+ return true // trivially unused
+ }
+
+ obj := pkg.GetTypesInfo().Defs[info.Name]
+ if obj == nil {
+ return false // something went wrong
+ }
+
+ used := false
+ ast.Inspect(info.Decl.Body, func(node ast.Node) bool {
+ if n, ok := node.(*ast.Ident); ok && pkg.GetTypesInfo().Uses[n] == obj {
+ used = true
+ }
+ return !used // keep going until we find a use
+ })
+ return !used
+}
+
// refactorInline returns inline actions available at the specified range.
func refactorInline(ctx context.Context, snapshot source.Snapshot, pkg source.Package, pgf *source.ParsedGoFile, fh source.FileHandle, rng protocol.Range) ([]protocol.CodeAction, error) {
var commands []protocol.Command
diff --git a/gopls/internal/lsp/command.go b/gopls/internal/lsp/command.go
index f4d4a9e..8fe1ec8 100644
--- a/gopls/internal/lsp/command.go
+++ b/gopls/internal/lsp/command.go
@@ -1210,3 +1210,25 @@
event.Log(ctx, fmt.Sprintf("client declined to open document %v", url))
}
}
+
+func (c *commandHandler) ChangeSignature(ctx context.Context, args command.ChangeSignatureArgs) error {
+ return c.run(ctx, commandConfig{
+ forURI: args.RemoveParameter.URI,
+ }, func(ctx context.Context, deps commandDeps) error {
+ // For now, gopls only supports removing unused parameters.
+ changes, err := source.RemoveUnusedParameter(ctx, deps.fh, args.RemoveParameter.Range, deps.snapshot)
+ if err != nil {
+ return err
+ }
+ r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{
+ Edit: protocol.WorkspaceEdit{
+ DocumentChanges: changes,
+ },
+ })
+ if !r.Applied {
+ return fmt.Errorf("failed to apply edits: %v", r.FailureReason)
+ }
+
+ return nil
+ })
+}
diff --git a/gopls/internal/lsp/command/command_gen.go b/gopls/internal/lsp/command/command_gen.go
index 5dd2a9d..e54030c 100644
--- a/gopls/internal/lsp/command/command_gen.go
+++ b/gopls/internal/lsp/command/command_gen.go
@@ -23,6 +23,7 @@
AddImport Command = "add_import"
AddTelemetryCounters Command = "add_telemetry_counters"
ApplyFix Command = "apply_fix"
+ ChangeSignature Command = "change_signature"
CheckUpgrades Command = "check_upgrades"
EditGoDirective Command = "edit_go_directive"
FetchVulncheckResult Command = "fetch_vulncheck_result"
@@ -56,6 +57,7 @@
AddImport,
AddTelemetryCounters,
ApplyFix,
+ ChangeSignature,
CheckUpgrades,
EditGoDirective,
FetchVulncheckResult,
@@ -110,6 +112,12 @@
return nil, err
}
return nil, s.ApplyFix(ctx, a0)
+ case "gopls.change_signature":
+ var a0 ChangeSignatureArgs
+ if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
+ return nil, err
+ }
+ return nil, s.ChangeSignature(ctx, a0)
case "gopls.check_upgrades":
var a0 CheckUpgradesArgs
if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
@@ -308,6 +316,18 @@
}, nil
}
+func NewChangeSignatureCommand(title string, a0 ChangeSignatureArgs) (protocol.Command, error) {
+ args, err := MarshalArgs(a0)
+ if err != nil {
+ return protocol.Command{}, err
+ }
+ return protocol.Command{
+ Title: title,
+ Command: "gopls.change_signature",
+ Arguments: args,
+ }, nil
+}
+
func NewCheckUpgradesCommand(title string, a0 CheckUpgradesArgs) (protocol.Command, error) {
args, err := MarshalArgs(a0)
if err != nil {
diff --git a/gopls/internal/lsp/command/interface.go b/gopls/internal/lsp/command/interface.go
index c3bd921..066f16f 100644
--- a/gopls/internal/lsp/command/interface.go
+++ b/gopls/internal/lsp/command/interface.go
@@ -201,6 +201,12 @@
// the user to ask if they want to enable Go telemetry uploading. If the user
// responds 'Yes', the telemetry mode is set to "on".
MaybePromptForTelemetry(context.Context) error
+
+ // ChangeSignature: performs a "change signature" refactoring.
+ //
+ // This command is experimental, currently only supporting parameter removal.
+ // Its signature will certainly change in the future (pun intended).
+ ChangeSignature(context.Context, ChangeSignatureArgs) error
}
type RunTestsArgs struct {
@@ -519,3 +525,8 @@
Names []string // Name of counters.
Values []int64 // Values added to the corresponding counters. Must be non-negative.
}
+
+// ChangeSignatureArgs specifies a "change signature" refactoring to perform.
+type ChangeSignatureArgs struct {
+ RemoveParameter protocol.Location
+}
diff --git a/gopls/internal/lsp/regtest/marker.go b/gopls/internal/lsp/regtest/marker.go
index f6c1d4c..928f77d 100644
--- a/gopls/internal/lsp/regtest/marker.go
+++ b/gopls/internal/lsp/regtest/marker.go
@@ -1001,6 +1001,9 @@
// ...followed by any new golden files.
var newGoldenFiles []txtar.File
for filename, data := range updatedGolden {
+ // TODO(rfindley): it looks like this implicitly removes trailing newlines
+ // from golden content. Is there any way to fix that? Perhaps we should
+ // just make the diff tolerant of missing newlines?
newGoldenFiles = append(newGoldenFiles, txtar.File{Name: filename, Data: data})
}
// Sort new golden files lexically.
@@ -2047,7 +2050,7 @@
Command: action.Command.Command,
Arguments: action.Command.Arguments,
}); err != nil {
- env.T.Fatalf("error converting command %q to edits: %v", action.Command.Command, err)
+ return nil, err
}
if err := applyDocumentChanges(env, env.Awaiter.takeDocumentChanges(), fileChanges); err != nil {
diff --git a/gopls/internal/lsp/source/api_json.go b/gopls/internal/lsp/source/api_json.go
index 0fd6b07..0ad9f1d 100644
--- a/gopls/internal/lsp/source/api_json.go
+++ b/gopls/internal/lsp/source/api_json.go
@@ -734,6 +734,12 @@
ArgDoc: "{\n\t// The fix to apply.\n\t\"Fix\": string,\n\t// The file URI for the document to fix.\n\t\"URI\": string,\n\t// The document range to scan for fixes.\n\t\"Range\": {\n\t\t\"start\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t\t\"end\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t},\n}",
},
{
+ Command: "gopls.change_signature",
+ Title: "performs a \"change signature\" refactoring.",
+ Doc: "This command is experimental, currently only supporting parameter removal.\nIts signature will certainly change in the future (pun intended).",
+ ArgDoc: "{\n\t\"RemoveParameter\": {\n\t\t\"uri\": string,\n\t\t\"range\": {\n\t\t\t\"start\": { ... },\n\t\t\t\"end\": { ... },\n\t\t},\n\t},\n}",
+ },
+ {
Command: "gopls.check_upgrades",
Title: "Check for upgrades",
Doc: "Checks for module upgrades.",
diff --git a/gopls/internal/lsp/source/change_signature.go b/gopls/internal/lsp/source/change_signature.go
new file mode 100644
index 0000000..8dfd013
--- /dev/null
+++ b/gopls/internal/lsp/source/change_signature.go
@@ -0,0 +1,572 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "regexp"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/gopls/internal/bug"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/imports"
+ internalastutil "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/refactor/inline"
+ "golang.org/x/tools/internal/tokeninternal"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+// RemoveUnusedParameter computes a refactoring to remove the parameter
+// indicated by the given range, which must be contained within an unused
+// parameter name or field.
+//
+// This operation is a work in progress. Remaining TODO:
+// - Handle function assignment correctly.
+// - Improve the extra newlines in output.
+// - Stream type checking via ForEachPackage.
+// - Avoid unnecessary additional type checking.
+func RemoveUnusedParameter(ctx context.Context, fh FileHandle, rng protocol.Range, snapshot Snapshot) ([]protocol.DocumentChanges, error) {
+ pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI())
+ if err != nil {
+ return nil, err
+ }
+ if perrors, terrors := pkg.GetParseErrors(), pkg.GetTypeErrors(); len(perrors) > 0 || len(terrors) > 0 {
+ var sample string
+ if len(perrors) > 0 {
+ sample = perrors[0].Error()
+ } else {
+ sample = terrors[0].Error()
+ }
+ return nil, fmt.Errorf("can't change signatures for packages with parse or type errors: (e.g. %s)", sample)
+ }
+
+ info := FindParam(pgf, rng)
+ if info.Decl == nil {
+ return nil, fmt.Errorf("failed to find declaration")
+ }
+ if info.Decl.Recv != nil {
+ return nil, fmt.Errorf("can't change signature of methods (yet)")
+ }
+ if info.Field == nil {
+ return nil, fmt.Errorf("failed to find field")
+ }
+
+ // Create the new declaration, which is a copy of the original decl with the
+ // unnecessary parameter removed.
+ newDecl := internalastutil.CloneNode(info.Decl)
+ if info.Name != nil {
+ names := remove(newDecl.Type.Params.List[info.FieldIndex].Names, info.NameIndex)
+ newDecl.Type.Params.List[info.FieldIndex].Names = names
+ }
+ if len(newDecl.Type.Params.List[info.FieldIndex].Names) == 0 {
+ // Unnamed, or final name was removed: in either case, remove the field.
+ newDecl.Type.Params.List = remove(newDecl.Type.Params.List, info.FieldIndex)
+ }
+
+ // Compute inputs into building a wrapper function around the modified
+ // signature.
+ var (
+ params = internalastutil.CloneNode(info.Decl.Type.Params) // "_" names will be modified
+ args []ast.Expr // arguments to delegate
+ variadic = false // whether the signature is variadic
+ )
+ {
+ allNames := make(map[string]bool) // for renaming blanks
+ for _, fld := range params.List {
+ for _, n := range fld.Names {
+ if n.Name != "_" {
+ allNames[n.Name] = true
+ }
+ }
+ }
+ blanks := 0
+ for i, fld := range params.List {
+ for j, n := range fld.Names {
+ if i == info.FieldIndex && j == info.NameIndex {
+ continue
+ }
+ if n.Name == "_" {
+ // Create names for blank (_) parameters so the delegating wrapper
+ // can refer to them.
+ for {
+ newName := fmt.Sprintf("blank%d", blanks)
+ blanks++
+ if !allNames[newName] {
+ n.Name = newName
+ break
+ }
+ }
+ }
+ args = append(args, &ast.Ident{Name: n.Name})
+ if i == len(params.List)-1 {
+ _, variadic = fld.Type.(*ast.Ellipsis)
+ }
+ }
+ }
+ }
+
+ // Rewrite all referring calls.
+ newContent, err := rewriteCalls(ctx, signatureRewrite{
+ snapshot: snapshot,
+ pkg: pkg,
+ pgf: pgf,
+ origDecl: info.Decl,
+ newDecl: newDecl,
+ params: params,
+ callArgs: args,
+ variadic: variadic,
+ })
+ if err != nil {
+ return nil, err
+ }
+ // Finally, rewrite the original declaration. We do this after inlining all
+ // calls, as there may be calls in the same file as the declaration. But none
+ // of the inlining should have changed the location of the original
+ // declaration.
+ {
+ idx := findDecl(pgf.File, info.Decl)
+ if idx < 0 {
+ return nil, bug.Errorf("didn't find original decl")
+ }
+
+ src, ok := newContent[pgf.URI]
+ if !ok {
+ src = pgf.Src
+ }
+ fset := tokeninternal.FileSetFor(pgf.Tok)
+ src, err = rewriteSignature(fset, idx, src, newDecl)
+ newContent[pgf.URI] = src
+ }
+
+ // Translate the resulting state into document changes.
+ var changes []protocol.DocumentChanges
+ for uri, after := range newContent {
+ fh, err := snapshot.ReadFile(ctx, uri)
+ if err != nil {
+ return nil, err
+ }
+ before, err := fh.Content()
+ if err != nil {
+ return nil, err
+ }
+ edits := diff.Bytes(before, after)
+ mapper := protocol.NewMapper(uri, before)
+ pedits, err := ToProtocolEdits(mapper, edits)
+ if err != nil {
+ return nil, fmt.Errorf("computing edits for %s: %v", uri, err)
+ }
+ changes = append(changes, protocol.DocumentChanges{
+ TextDocumentEdit: &protocol.TextDocumentEdit{
+ TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{
+ Version: fh.Version(),
+ TextDocumentIdentifier: protocol.TextDocumentIdentifier{URI: protocol.URIFromSpanURI(uri)},
+ },
+ Edits: pedits,
+ },
+ })
+ }
+ return changes, nil
+}
+
+// rewriteSignature rewrites the signature of the declIdx'th declaration in src
+// to use the signature of newDecl (described by fset).
+//
+// TODO(rfindley): I think this operation could be generalized, for example by
+// using a concept of a 'nodepath' to correlate nodes between two related
+// files.
+//
+// Note that with its current application, rewriteSignature is expected to
+// succeed. Separate bug.Errorf calls are used below (rather than one call at
+// the callsite) in order to have greater precision.
+func rewriteSignature(fset *token.FileSet, declIdx int, src0 []byte, newDecl *ast.FuncDecl) ([]byte, error) {
+ // Parse the new file0 content, to locate the original params.
+ file0, err := parser.ParseFile(fset, "", src0, parser.ParseComments|parser.SkipObjectResolution)
+ if err != nil {
+ return nil, bug.Errorf("re-parsing declaring file failed: %v", err)
+ }
+ decl0, _ := file0.Decls[declIdx].(*ast.FuncDecl)
+ // Inlining shouldn't have changed the location of any declarations, but do
+ // a sanity check.
+ if decl0 == nil || decl0.Name.Name != newDecl.Name.Name {
+ return nil, bug.Errorf("inlining affected declaration order: found %v, not func %s", decl0, newDecl.Name.Name)
+ }
+ opening0, closing0, err := safetoken.Offsets(fset.File(decl0.Pos()), decl0.Type.Params.Opening, decl0.Type.Params.Closing)
+ if err != nil {
+ return nil, bug.Errorf("can't find params: %v", err)
+ }
+
+ // Format the modified signature and apply a textual replacement. This
+ // minimizes comment disruption.
+ formattedType := FormatNode(fset, newDecl.Type)
+ expr, err := parser.ParseExprFrom(fset, "", []byte(formattedType), 0)
+ if err != nil {
+ return nil, bug.Errorf("parsing modified signature: %v", err)
+ }
+ newType := expr.(*ast.FuncType)
+ opening1, closing1, err := safetoken.Offsets(fset.File(newType.Pos()), newType.Params.Opening, newType.Params.Closing)
+ if err != nil {
+ return nil, bug.Errorf("param offsets: %v", err)
+ }
+ newParams := formattedType[opening1 : closing1+1]
+
+ // Splice.
+ var buf bytes.Buffer
+ buf.Write(src0[:opening0])
+ buf.WriteString(newParams)
+ buf.Write(src0[closing0+1:])
+ newSrc := buf.Bytes()
+ if len(file0.Imports) > 0 {
+ formatted, err := imports.Process("output", newSrc, nil)
+ if err != nil {
+ return nil, bug.Errorf("imports.Process failed: %v", err)
+ }
+ newSrc = formatted
+ }
+ return newSrc, nil
+}
+
+// ParamInfo records information about a param identified by a position.
+type ParamInfo struct {
+ Decl *ast.FuncDecl // enclosing func decl, or nil
+ FieldIndex int // index of Field in Decl.Type.Params, or -1
+ Field *ast.Field // enclosing field of Decl, or nil
+ NameIndex int // index of Name in Field.Names, or nil
+ Name *ast.Ident // indicated name (either enclosing, or Field.Names[0] if len(Field.Names) == 1)
+}
+
+// FindParam finds the parameter information spanned by the given range.
+func FindParam(pgf *ParsedGoFile, rng protocol.Range) ParamInfo {
+ info := ParamInfo{FieldIndex: -1, NameIndex: -1}
+ start, end, err := pgf.RangePos(rng)
+ if err != nil {
+ bug.Reportf("(file=%v).RangePos(%v) failed: %v", pgf.URI, rng, err)
+ return info
+ }
+
+ path, _ := astutil.PathEnclosingInterval(pgf.File, start, end)
+ var (
+ id *ast.Ident
+ field *ast.Field
+ decl *ast.FuncDecl
+ )
+ // Find the outermost enclosing node of each kind, whether or not they match
+ // the semantics described in the docstring.
+ for _, n := range path {
+ switch n := n.(type) {
+ case *ast.Ident:
+ id = n
+ case *ast.Field:
+ field = n
+ case *ast.FuncDecl:
+ decl = n
+ }
+ }
+ // Check the conditions described in the docstring.
+ if decl == nil {
+ return info
+ }
+ info.Decl = decl
+ for fi, f := range decl.Type.Params.List {
+ if f == field {
+ info.FieldIndex = fi
+ info.Field = f
+ for ni, n := range f.Names {
+ if n == id {
+ info.NameIndex = ni
+ info.Name = n
+ break
+ }
+ }
+ if info.Name == nil && len(info.Field.Names) == 1 {
+ info.NameIndex = 0
+ info.Name = info.Field.Names[0]
+ }
+ break
+ }
+ }
+ return info
+}
+
+// signatureRewrite defines a rewritten function signature.
+//
+// See rewriteCalls for more details.
+type signatureRewrite struct {
+ snapshot Snapshot
+ pkg Package
+ pgf *ParsedGoFile
+ origDecl, newDecl *ast.FuncDecl
+ params *ast.FieldList
+ callArgs []ast.Expr
+ variadic bool
+}
+
+// rewriteCalls returns the document changes required to rewrite the
+// signature of origDecl to that of newDecl.
+//
+// This is a rather complicated factoring of the rewrite operation, but is able
+// to describe arbitrary rewrites. Specifically, rewriteCalls creates a
+// synthetic copy of pkg, where the original function declaration is changed to
+// be a trivial wrapper around the new declaration. params and callArgs are
+// used to perform this delegation: params must have the same type as origDecl,
+// but may have renamed parameters (such as is required for delegating blank
+// parameters). callArgs are the arguments of the delegated call (i.e. using
+// params).
+//
+// For example, consider removing the unused 'b' parameter below, rewriting
+//
+// func Foo(a, b, c, _ int) int {
+// return a+c
+// }
+//
+// To
+//
+// func Foo(a, c, _ int) int {
+// return a+c
+// }
+//
+// In this case, rewriteCalls is parameterized as follows:
+// - origDecl is the original declaration
+// - newDecl is the new declaration, which is a copy of origDecl less the 'b'
+// parameter.
+// - params is a new parameter list (a, b, c, blank0 int) to be used for the
+// new wrapper.
+// - callArgs is the argument list (a, c, blank0), to be used to call the new
+// delegate.
+//
+// rewriting is expressed this way so that rewriteCalls can own the details
+// of *how* this rewriting is performed. For example, as of writing it names
+// the synthetic delegate G_o_p_l_s_foo, but the caller need not know this.
+//
+// By passing an entirely new declaration, rewriteCalls may be used for
+// signature refactorings that may affect the function body, such as removing
+// or adding return values.
+func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[span.URI][]byte, error) {
+ // tag is a unique prefix that is added to the delegated declaration.
+ //
+ // It must have a ~0% probability of causing collisions with existing names.
+ const tag = "G_o_p_l_s_"
+
+ var (
+ modifiedSrc []byte
+ modifiedFile *ast.File
+ modifiedDecl *ast.FuncDecl
+ )
+ {
+ delegate := internalastutil.CloneNode(rw.newDecl) // clone before modifying
+ delegate.Name.Name = tag + delegate.Name.Name
+ if obj := rw.pkg.GetTypes().Scope().Lookup(delegate.Name.Name); obj != nil {
+ return nil, fmt.Errorf("synthetic name %q conflicts with an existing declaration", delegate.Name.Name)
+ }
+
+ wrapper := internalastutil.CloneNode(rw.origDecl)
+ wrapper.Type.Params = rw.params
+ call := &ast.CallExpr{
+ Fun: &ast.Ident{Name: delegate.Name.Name},
+ Args: rw.callArgs,
+ }
+ if rw.variadic {
+ call.Ellipsis = 1 // must not be token.NoPos
+ }
+
+ var stmt ast.Stmt
+ if delegate.Type.Results.NumFields() > 0 {
+ stmt = &ast.ReturnStmt{
+ Results: []ast.Expr{call},
+ }
+ } else {
+ stmt = &ast.ExprStmt{
+ X: call,
+ }
+ }
+ wrapper.Body = &ast.BlockStmt{
+ List: []ast.Stmt{stmt},
+ }
+
+ fset := tokeninternal.FileSetFor(rw.pgf.Tok)
+ var err error
+ modifiedSrc, err = replaceFileDecl(rw.pgf, rw.origDecl, delegate)
+ if err != nil {
+ return nil, err
+ }
+ // TODO(rfindley): we can probably get away with one fewer parse operations
+ // by returning the modified AST from replaceDecl. Investigate if that is
+ // accurate.
+ modifiedSrc = append(modifiedSrc, []byte("\n\n"+FormatNode(fset, wrapper))...)
+ modifiedFile, err = parser.ParseFile(rw.pkg.FileSet(), rw.pgf.URI.Filename(), modifiedSrc, parser.ParseComments|parser.SkipObjectResolution)
+ if err != nil {
+ return nil, err
+ }
+ modifiedDecl = modifiedFile.Decls[len(modifiedFile.Decls)-1].(*ast.FuncDecl)
+ }
+
+ // Type check pkg again with the modified file, to compute the synthetic
+ // callee.
+ logf := logger(ctx, "change signature", rw.snapshot.Options().VerboseOutput)
+ pkg2, info, err := reTypeCheck(logf, rw.pkg, map[span.URI]*ast.File{rw.pgf.URI: modifiedFile}, false)
+ if err != nil {
+ return nil, err
+ }
+ calleeInfo, err := inline.AnalyzeCallee(logf, rw.pkg.FileSet(), pkg2, info, modifiedDecl, modifiedSrc)
+ if err != nil {
+ return nil, fmt.Errorf("analyzing callee: %v", err)
+ }
+
+ post := func(got []byte) []byte { return bytes.ReplaceAll(got, []byte(tag), nil) }
+ return inlineAllCalls(ctx, logf, rw.snapshot, rw.pkg, rw.pgf, rw.origDecl, calleeInfo, post)
+}
+
+// reTypeCheck re-type checks orig with new file contents defined by fileMask.
+//
+// It expects that any newly added imports are already present in the
+// transitive imports of orig.
+//
+// If expectErrors is true, reTypeCheck allows errors in the new package.
+// TODO(rfindley): perhaps this should be a filter to specify which errors are
+// acceptable.
+func reTypeCheck(logf func(string, ...any), orig Package, fileMask map[span.URI]*ast.File, expectErrors bool) (*types.Package, *types.Info, error) {
+ pkg := types.NewPackage(string(orig.Metadata().PkgPath), string(orig.Metadata().Name))
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Instances: make(map[*ast.Ident]types.Instance),
+ }
+ {
+ var files []*ast.File
+ for _, pgf := range orig.CompiledGoFiles() {
+ if mask, ok := fileMask[pgf.URI]; ok {
+ files = append(files, mask)
+ } else {
+ files = append(files, pgf.File)
+ }
+ }
+
+ // Implement a BFS for imports in the transitive package graph.
+ //
+ // Note that this only works if any newly added imports are expected to be
+ // present among transitive imports. In general we cannot assume this to
+ // be the case, but in the special case of removing a parameter it works
+ // because any parameter types must be present in export data.
+ var importer func(importPath string) (*types.Package, error)
+ {
+ var (
+ importsByPath = make(map[string]*types.Package) // cached imports
+ toSearch = []*types.Package{orig.GetTypes()} // packages to search
+ searched = make(map[string]bool) // path -> (false, if present in toSearch; true, if already searched)
+ )
+ importer = func(path string) (*types.Package, error) {
+ if p, ok := importsByPath[path]; ok {
+ return p, nil
+ }
+ for len(toSearch) > 0 {
+ pkg := toSearch[0]
+ toSearch = toSearch[1:]
+ searched[pkg.Path()] = true
+ for _, p := range pkg.Imports() {
+ // TODO(rfindley): this is incorrect: p.Path() is a package path,
+ // whereas path is an import path. We can fix this by reporting any
+ // newly added imports from inlining, or by using the ImporterFrom
+ // interface and package metadata.
+ //
+ // TODO(rfindley): can't the inliner also be wrong here? It's
+ // possible that an import path means different things depending on
+ // the location.
+ importsByPath[p.Path()] = p
+ if _, ok := searched[p.Path()]; !ok {
+ searched[p.Path()] = false
+ toSearch = append(toSearch, p)
+ }
+ }
+ if p, ok := importsByPath[path]; ok {
+ return p, nil
+ }
+ }
+ return nil, fmt.Errorf("missing import")
+ }
+ }
+ cfg := &types.Config{
+ Sizes: orig.Metadata().TypesSizes,
+ Importer: ImporterFunc(importer),
+ }
+
+ // Copied from cache/check.go.
+ // TODO(rfindley): factor this out and fix goVersionRx.
+ // Set Go dialect.
+ if module := orig.Metadata().Module; module != nil && module.GoVersion != "" {
+ goVersion := "go" + module.GoVersion
+ // types.NewChecker panics if GoVersion is invalid.
+ // An unparsable mod file should probably stop us
+ // before we get here, but double check just in case.
+ if goVersionRx.MatchString(goVersion) {
+ typesinternal.SetGoVersion(cfg, goVersion)
+ }
+ }
+ if expectErrors {
+ cfg.Error = func(err error) {
+ logf("re-type checking: expected error: %v", err)
+ }
+ }
+ typesinternal.SetUsesCgo(cfg)
+ checker := types.NewChecker(cfg, orig.FileSet(), pkg, info)
+ if err := checker.Files(files); err != nil && !expectErrors {
+ return nil, nil, fmt.Errorf("type checking rewritten package: %v", err)
+ }
+ }
+ return pkg, info, nil
+}
+
+// TODO(golang/go#63472): this looks wrong with the new Go version syntax.
+var goVersionRx = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`)
+
+func remove[T any](s []T, i int) []T {
+ return append(s[:i], s[i+1:]...)
+}
+
+// replaceFileDecl replaces old with new in the file described by pgf.
+//
+// TODO(rfindley): generalize, and combine with rewriteSignature.
+func replaceFileDecl(pgf *ParsedGoFile, old, new ast.Decl) ([]byte, error) {
+ i := findDecl(pgf.File, old)
+ if i == -1 {
+ return nil, bug.Errorf("didn't find old declaration")
+ }
+ start, end, err := safetoken.Offsets(pgf.Tok, old.Pos(), old.End())
+ if err != nil {
+ return nil, err
+ }
+ var out bytes.Buffer
+ out.Write(pgf.Src[:start])
+ fset := tokeninternal.FileSetFor(pgf.Tok)
+ if err := format.Node(&out, fset, new); err != nil {
+ return nil, bug.Errorf("formatting new node: %v", err)
+ }
+ out.Write(pgf.Src[end:])
+ return out.Bytes(), nil
+}
+
+// findDecl finds the index of decl in file.Decls.
+//
+// TODO: use slices.Index when it is available.
+func findDecl(file *ast.File, decl ast.Decl) int {
+ for i, d := range file.Decls {
+ if d == decl {
+ return i
+ }
+ }
+ return -1
+}
diff --git a/gopls/internal/lsp/source/inline.go b/gopls/internal/lsp/source/inline.go
index 4e6e16f..da3e8e5 100644
--- a/gopls/internal/lsp/source/inline.go
+++ b/gopls/internal/lsp/source/inline.go
@@ -106,9 +106,7 @@
// Users can consult the gopls event log to see
// why a particular inlining strategy was chosen.
- logf := func(format string, args ...any) {
- event.Log(ctx, "inliner: "+fmt.Sprintf(format, args...))
- }
+ logf := logger(ctx, "inliner", snapshot.Options().VerboseOutput)
callee, err := inline.AnalyzeCallee(logf, calleePkg.FileSet(), calleePkg.GetTypes(), calleePkg.GetTypesInfo(), calleeDecl, calleePGF.Src)
if err != nil {
@@ -136,3 +134,14 @@
TextEdits: diffToTextEdits(callerPGF.Tok, diff.Bytes(callerPGF.Src, got)),
}, nil
}
+
+// TODO(adonovan): change the inliner to instead accept an io.Writer.
+func logger(ctx context.Context, name string, verbose bool) func(format string, args ...any) {
+ if verbose {
+ return func(format string, args ...any) {
+ event.Log(ctx, name+": "+fmt.Sprintf(format, args...))
+ }
+ } else {
+ return func(string, ...any) {}
+ }
+}
diff --git a/gopls/internal/lsp/source/inline_all.go b/gopls/internal/lsp/source/inline_all.go
new file mode 100644
index 0000000..848b5f7
--- /dev/null
+++ b/gopls/internal/lsp/source/inline_all.go
@@ -0,0 +1,275 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/types"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/gopls/internal/bug"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/refactor/inline"
+)
+
+// inlineAllCalls inlines all calls to the original function declaration
+// described by callee, returning the resulting modified file content.
+//
+// inlining everything is currently an expensive operation: it involves re-type
+// checking every package that contains a potential call, as reported by
+// References. In cases where there are multiple calls per file, inlineAllCalls
+// must type check repeatedly for each additional call.
+//
+// The provided post processing function is applied to the resulting source
+// after each transformation. This is necessary because we are using this
+// function to inline synthetic wrappers for the purpose of signature
+// rewriting. The delegated function has a fake name that doesn't exist in the
+// snapshot, and so we can't re-type check until we replace this fake name.
+//
+// TODO(rfindley): this only works because removing a parameter is a very
+// narrow operation. A better solution would be to allow for ad-hoc snapshots
+// that expose the full machinery of real snapshots: minimal invalidation,
+// batched type checking, etc. Then we could actually rewrite the declaring
+// package in this snapshot (and so 'post' would not be necessary), and could
+// robustly re-type check for the purpose of iterative inlining, even if the
+// inlined code pulls in new imports that weren't present in export data.
+//
+// The code below notes where are assumptions are made that only hold true in
+// the case of parameter removal (annotated with 'Assumption:')
+func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot Snapshot, pkg Package, pgf *ParsedGoFile, origDecl *ast.FuncDecl, callee *inline.Callee, post func([]byte) []byte) (map[span.URI][]byte, error) {
+ // Collect references.
+ var refs []protocol.Location
+ {
+ funcPos, err := pgf.Mapper.PosPosition(pgf.Tok, origDecl.Name.NamePos)
+ if err != nil {
+ return nil, err
+ }
+ fh, err := snapshot.ReadFile(ctx, pgf.URI)
+ if err != nil {
+ return nil, err
+ }
+ refs, err = References(ctx, snapshot, fh, funcPos, false)
+ if err != nil {
+ return nil, fmt.Errorf("finding references to rewrite: %v", err)
+ }
+ }
+
+ // Type-check the narrowest package containing each reference.
+ // TODO(rfindley): we should expose forEachPackage in order to operate in
+ // parallel and to reduce peak memory for this operation.
+ var (
+ pkgForRef = make(map[protocol.Location]PackageID)
+ pkgs = make(map[PackageID]Package)
+ )
+ {
+ needPkgs := make(map[PackageID]struct{})
+ for _, ref := range refs {
+ md, err := NarrowestMetadataForFile(ctx, snapshot, ref.URI.SpanURI())
+ if err != nil {
+ return nil, fmt.Errorf("finding ref metadata: %v", err)
+ }
+ pkgForRef[ref] = md.ID
+ needPkgs[md.ID] = struct{}{}
+ }
+ var pkgIDs []PackageID
+ for id := range needPkgs { // TODO: use maps.Keys once it is available to us
+ pkgIDs = append(pkgIDs, id)
+ }
+
+ refPkgs, err := snapshot.TypeCheck(ctx, pkgIDs...)
+ if err != nil {
+ return nil, fmt.Errorf("type checking reference packages: %v", err)
+ }
+
+ for _, p := range refPkgs {
+ pkgs[p.Metadata().ID] = p
+ }
+ }
+
+ // Organize calls by top file declaration. Calls within a single file may
+ // affect each other, as the inlining edit may affect the surrounding scope
+ // or imports Therefore, when inlining subsequent calls in the same
+ // declaration, we must re-type check.
+
+ type fileCalls struct {
+ pkg Package
+ pgf *ParsedGoFile
+ calls []*ast.CallExpr
+ }
+
+ refsByFile := make(map[span.URI]*fileCalls)
+ for _, ref := range refs {
+ refpkg := pkgs[pkgForRef[ref]]
+ pgf, err := refpkg.File(ref.URI.SpanURI())
+ if err != nil {
+ return nil, bug.Errorf("finding %s in %s: %v", ref.URI, refpkg.Metadata().ID, err)
+ }
+ start, end, err := pgf.RangePos(ref.Range)
+ if err != nil {
+ return nil, bug.Errorf("RangePos(ref): %v", err)
+ }
+
+ // Look for the surrounding call expression.
+ var (
+ name *ast.Ident
+ call *ast.CallExpr
+ )
+ path, _ := astutil.PathEnclosingInterval(pgf.File, start, end)
+ name, _ = path[0].(*ast.Ident)
+ if _, ok := path[1].(*ast.SelectorExpr); ok {
+ call, _ = path[2].(*ast.CallExpr)
+ } else {
+ call, _ = path[1].(*ast.CallExpr)
+ }
+ if name == nil || call == nil {
+ // TODO(rfindley): handle this case with eta-abstraction:
+ // a reference to the target function f in a non-call position
+ // use(f)
+ // is replaced by
+ // use(func(...) { f(...) })
+ return nil, fmt.Errorf("cannot inline: found non-call function reference %v", ref)
+ }
+ // Sanity check.
+ if obj := refpkg.GetTypesInfo().ObjectOf(name); obj == nil ||
+ obj.Name() != origDecl.Name.Name ||
+ obj.Pkg() == nil ||
+ obj.Pkg().Path() != string(pkg.Metadata().PkgPath) {
+ return nil, bug.Errorf("cannot inline: corrupted reference %v", ref)
+ }
+
+ callInfo, ok := refsByFile[ref.URI.SpanURI()]
+ if !ok {
+ callInfo = &fileCalls{
+ pkg: refpkg,
+ pgf: pgf,
+ }
+ refsByFile[ref.URI.SpanURI()] = callInfo
+ }
+ callInfo.calls = append(callInfo.calls, call)
+ }
+
+ // Inline each call within the same decl in sequence, re-typechecking after
+ // each one. If there is only a single call within the decl, we can avoid
+ // additional type checking.
+ //
+ // Assumption: inlining does not affect the package scope, so we can operate
+ // on separate files independently.
+ result := make(map[span.URI][]byte)
+ for uri, callInfo := range refsByFile {
+ var (
+ calls = callInfo.calls
+ fset = callInfo.pkg.FileSet()
+ tpkg = callInfo.pkg.GetTypes()
+ tinfo = callInfo.pkg.GetTypesInfo()
+ file = callInfo.pgf.File
+ content = callInfo.pgf.Src
+ )
+
+ // Check for overlapping calls (such as Foo(Foo())). We can't handle these
+ // because inlining may change the source order of the inner call with
+ // respect to the inlined outer call, and so the heuristic we use to find
+ // the next call (counting from top-to-bottom) does not work.
+ for i := range calls {
+ if i > 0 && calls[i-1].End() > calls[i].Pos() {
+ return nil, fmt.Errorf("%s: can't inline overlapping call %s", uri, types.ExprString(calls[i-1]))
+ }
+ }
+
+ currentCall := 0
+ for currentCall < len(calls) {
+ caller := &inline.Caller{
+ Fset: fset,
+ Types: tpkg,
+ Info: tinfo,
+ File: file,
+ Call: calls[currentCall],
+ Content: content,
+ }
+ var err error
+ content, err = inline.Inline(logf, caller, callee)
+ if err != nil {
+ return nil, fmt.Errorf("inlining failed: %v", err)
+ }
+ if post != nil {
+ content = post(content)
+ }
+ if len(calls) <= 1 {
+ // No need to re-type check, as we've inlined all calls.
+ break
+ }
+
+ // TODO(rfindley): develop a theory of "trivial" inlining, which are
+ // inlinings that don't require re-type checking.
+ //
+ // In principle, if the inlining only involves replacing one call with
+ // another, the scope of the caller is unchanged and there is no need to
+ // type check again before inlining subsequent calls (edits should not
+ // overlap, and should not affect each other semantically). However, it
+ // feels sufficiently complicated that, to be safe, this optimization is
+ // deferred until later.
+
+ file, err = parser.ParseFile(fset, uri.Filename(), content, parser.ParseComments|parser.SkipObjectResolution)
+ if err != nil {
+ return nil, bug.Errorf("inlined file failed to parse: %v", err)
+ }
+
+ // After inlining one call with a removed parameter, the package will
+ // fail to type check due to "not enough arguments". Therefore, we must
+ // allow type errors here.
+ //
+ // Assumption: the resulting type errors do not affect the correctness of
+ // subsequent inlining, because invalid arguments to a call do not affect
+ // anything in the surrounding scope.
+ //
+ // TODO(rfindley): improve this.
+ tpkg, tinfo, err = reTypeCheck(logf, callInfo.pkg, map[span.URI]*ast.File{uri: file}, true)
+ if err != nil {
+ return nil, bug.Errorf("type checking after inlining failed: %v", err)
+ }
+
+ // Collect calls to the target function in the modified declaration.
+ var calls2 []*ast.CallExpr
+ ast.Inspect(file, func(n ast.Node) bool {
+ if call, ok := n.(*ast.CallExpr); ok {
+ fn := typeutil.StaticCallee(tinfo, call)
+ if fn != nil && fn.Pkg().Path() == string(pkg.Metadata().PkgPath) && fn.Name() == origDecl.Name.Name {
+ calls2 = append(calls2, call)
+ }
+ }
+ return true
+ })
+
+ // If the number of calls has increased, this process will never cease.
+ // If the number of calls has decreased, assume that inlining removed a
+ // call.
+ // If the number of calls didn't change, assume that inlining replaced
+ // a call, and move on to the next.
+ //
+ // Assumption: we're inlining a call that has at most one recursive
+ // reference (which holds for signature rewrites).
+ //
+ // TODO(rfindley): this isn't good enough. We should be able to support
+ // inlining all existing calls even if they increase calls. How do we
+ // correlate the before and after syntax?
+ switch {
+ case len(calls2) > len(calls):
+ return nil, fmt.Errorf("inlining increased calls %d->%d, possible recursive call? content:\n%s", len(calls), len(calls2), content)
+ case len(calls2) < len(calls):
+ calls = calls2
+ case len(calls2) == len(calls):
+ calls = calls2
+ currentCall++
+ }
+ }
+
+ result[callInfo.pgf.URI] = content
+ }
+ return result, nil
+}
diff --git a/gopls/internal/lsp/source/options.go b/gopls/internal/lsp/source/options.go
index ec544fc..e57a3fd 100644
--- a/gopls/internal/lsp/source/options.go
+++ b/gopls/internal/lsp/source/options.go
@@ -1181,6 +1181,7 @@
result.setBool(&o.VerboseWorkDoneProgress)
case "tempModfile":
+ result.softErrorf("gopls setting \"tempModfile\" is deprecated.\nPlease comment on https://go.dev/issue/63537 if this impacts your workflow.")
result.setBool(&o.TempModfile)
case "showBugReports":
@@ -1207,6 +1208,7 @@
result.setBool(&o.NoSemanticNumber)
case "expandWorkspaceToModule":
+ result.softErrorf("gopls setting \"expandWorkspaceToModule\" is deprecated.\nPlease comment on https://go.dev/issue/63536 if this impacts your workflow.")
result.setBool(&o.ExpandWorkspaceToModule)
case "experimentalPostfixCompletions":
@@ -1356,6 +1358,11 @@
r.Error = &SoftError{msg}
}
+// softErrorf reports a soft error related to the current option.
+func (r *OptionResult) softErrorf(format string, args ...any) {
+ r.Error = &SoftError{fmt.Sprintf(format, args...)}
+}
+
// unexpected reports that the current setting is not known to gopls.
func (r *OptionResult) unexpected() {
r.Error = fmt.Errorf("unexpected gopls setting %q", r.Name)
diff --git a/gopls/internal/lsp/source/util.go b/gopls/internal/lsp/source/util.go
index d0ecd50..2cbce61 100644
--- a/gopls/internal/lsp/source/util.go
+++ b/gopls/internal/lsp/source/util.go
@@ -119,6 +119,8 @@
func FormatNode(fset *token.FileSet, n ast.Node) string {
var buf strings.Builder
if err := printer.Fprint(&buf, fset, n); err != nil {
+ // TODO(rfindley): we should use bug.Reportf here.
+ // We encounter this during completion.resolveInvalid.
return ""
}
return buf.String()
@@ -531,3 +533,9 @@
}
return nil
}
+
+// An importFunc is an implementation of the single-method
+// types.Importer interface based on a function value.
+type ImporterFunc func(path string) (*types.Package, error)
+
+func (f ImporterFunc) Import(path string) (*types.Package, error) { return f(path) }
diff --git a/gopls/internal/lsp/source/view.go b/gopls/internal/lsp/source/view.go
index f4a8192..d501e04 100644
--- a/gopls/internal/lsp/source/view.go
+++ b/gopls/internal/lsp/source/view.go
@@ -284,24 +284,44 @@
return []label.Label{tag.Snapshot.Of(snapshot.SequenceID()), tag.Directory.Of(snapshot.View().Folder())}
}
-// NarrowestPackageForFile is a convenience function that selects the
-// narrowest non-ITV package to which this file belongs, type-checks
-// it in the requested mode (full or workspace), and returns it, along
-// with the parse tree of that file.
+// NarrowestPackageForFile is a convenience function that selects the narrowest
+// non-ITV package to which this file belongs, type-checks it in the requested
+// mode (full or workspace), and returns it, along with the parse tree of that
+// file.
//
-// The "narrowest" package is the one with the fewest number of files
-// that includes the given file. This solves the problem of test
-// variants, as the test will have more files than the non-test package.
-// (Historically the preference was a parameter but widest was almost
-// never needed.)
+// The "narrowest" package is the one with the fewest number of files that
+// includes the given file. This solves the problem of test variants, as the
+// test will have more files than the non-test package.
//
-// An intermediate test variant (ITV) package has identical source
-// to a regular package but resolves imports differently.
-// gopls should never need to type-check them.
+// An intermediate test variant (ITV) package has identical source to a regular
+// package but resolves imports differently. gopls should never need to
+// type-check them.
//
-// Type-checking is expensive. Call snapshot.ParseGo if all you need
-// is a parse tree, or snapshot.MetadataForFile if you only need metadata.
+// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse
+// tree, or snapshot.MetadataForFile if you only need metadata.
func NarrowestPackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI) (Package, *ParsedGoFile, error) {
+ return selectPackageForFile(ctx, snapshot, uri, func(metas []*Metadata) *Metadata { return metas[0] })
+}
+
+// WidestPackageForFile is a convenience function that selects the widest
+// non-ITV package to which this file belongs, type-checks it in the requested
+// mode (full or workspace), and returns it, along with the parse tree of that
+// file.
+//
+// The "widest" package is the one with the most number of files that includes
+// the given file. Which is the test variant if one exists.
+//
+// An intermediate test variant (ITV) package has identical source to a regular
+// package but resolves imports differently. gopls should never need to
+// type-check them.
+//
+// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse
+// tree, or snapshot.MetadataForFile if you only need metadata.
+func WidestPackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI) (Package, *ParsedGoFile, error) {
+ return selectPackageForFile(ctx, snapshot, uri, func(metas []*Metadata) *Metadata { return metas[len(metas)-1] })
+}
+
+func selectPackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI, selector func([]*Metadata) *Metadata) (Package, *ParsedGoFile, error) {
metas, err := snapshot.MetadataForFile(ctx, uri)
if err != nil {
return nil, nil, err
@@ -310,8 +330,8 @@
if len(metas) == 0 {
return nil, nil, fmt.Errorf("no package metadata for file %s", uri)
}
- narrowest := metas[0]
- pkgs, err := snapshot.TypeCheck(ctx, narrowest.ID)
+ md := selector(metas)
+ pkgs, err := snapshot.TypeCheck(ctx, md.ID)
if err != nil {
return nil, nil, err
}
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt b/gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt
new file mode 100644
index 0000000..7caa660
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt
@@ -0,0 +1,246 @@
+This test exercises the refactoring to remove unused parameters.
+
+-- go.mod --
+module unused.mod
+
+go 1.18
+
+-- a/a.go --
+package a
+
+func A(x, unused int) int { //@codeaction("refactor.rewrite", "unused", "unused", a)
+ return x
+}
+
+-- @a/a/a.go --
+package a
+
+func A(x int) int { //@codeaction("refactor.rewrite", "unused", "unused", a)
+ return x
+}
+
+-- a/a2.go --
+package a
+
+func _() {
+ A(1, 2)
+}
+
+-- a/a_test.go --
+package a
+
+func _() {
+ A(1, 2)
+}
+
+-- a/a_x_test.go --
+package a_test
+
+import "unused.mod/a"
+
+func _() {
+ a.A(1, 2)
+}
+
+-- b/b.go --
+package b
+
+import "unused.mod/a"
+
+func f() int {
+ return 1
+}
+
+func g() int {
+ return 2
+}
+
+func _() {
+ a.A(f(), 1)
+}
+
+-- @a/a/a2.go --
+package a
+
+func _() {
+ A(1)
+}
+-- @a/a/a_test.go --
+package a
+
+func _() {
+ A(1)
+}
+-- @a/a/a_x_test.go --
+package a_test
+
+import "unused.mod/a"
+
+func _() {
+ a.A(1)
+}
+-- @a/b/b.go --
+package b
+
+import "unused.mod/a"
+
+func f() int {
+ return 1
+}
+
+func g() int {
+ return 2
+}
+
+func _() {
+ a.A(f())
+}
+-- field/field.go --
+package field
+
+func Field(x int, field int) { //@codeaction("refactor.rewrite", "int", "int", field)
+}
+
+func _() {
+ Field(1, 2)
+}
+-- @field/field/field.go --
+package field
+
+func Field(field int) { //@codeaction("refactor.rewrite", "int", "int", field)
+}
+
+func _() {
+ Field(2)
+}
+-- ellipsis/ellipsis.go --
+package ellipsis
+
+func Ellipsis(...any) { //@codeaction("refactor.rewrite", "any", "any", ellipsis)
+}
+
+func _() {
+ // TODO(rfindley): investigate the broken formatting resulting from these inlinings.
+ Ellipsis()
+ Ellipsis(1)
+ Ellipsis(1, 2)
+ Ellipsis(1, f(), g())
+ Ellipsis(h())
+ Ellipsis(i()...)
+}
+
+func f() int
+func g() int
+func h() (int, int)
+func i() []any
+
+-- @ellipsis/ellipsis/ellipsis.go --
+package ellipsis
+
+func Ellipsis() { //@codeaction("refactor.rewrite", "any", "any", ellipsis)
+}
+
+func _() {
+ // TODO(rfindley): investigate the broken formatting resulting from these inlinings.
+ Ellipsis()
+ Ellipsis()
+ Ellipsis()
+ var _ []any = []any{1, f(), g()}
+ Ellipsis()
+ func(_ ...any) {
+ Ellipsis()
+ }(h())
+ var _ []any = i()
+ Ellipsis()
+}
+
+func f() int
+func g() int
+func h() (int, int)
+func i() []any
+-- ellipsis2/ellipsis2.go --
+package ellipsis2
+
+func Ellipsis2(_, _ int, rest ...int) { //@codeaction("refactor.rewrite", "_", "_", ellipsis2)
+}
+
+func _() {
+ Ellipsis2(1,2,3)
+ Ellipsis2(h())
+ Ellipsis2(1,2, []int{3, 4}...)
+}
+
+func h() (int, int)
+
+-- @ellipsis2/ellipsis2/ellipsis2.go --
+package ellipsis2
+
+func Ellipsis2(_ int, rest ...int) { //@codeaction("refactor.rewrite", "_", "_", ellipsis2)
+}
+
+func _() {
+ Ellipsis2(2, []int{3}...)
+ func(_, blank0 int, rest ...int) {
+ Ellipsis2(blank0, rest...)
+ }(h())
+ Ellipsis2(2, []int{3, 4}...)
+}
+
+func h() (int, int)
+-- overlapping/overlapping.go --
+package overlapping
+
+func Overlapping(i int) int { //@codeactionerr("refactor.rewrite", re"(i) int", re"(i) int", re"overlapping")
+ return 0
+}
+
+func _() {
+ x := Overlapping(Overlapping(0))
+ _ = x
+}
+
+-- effects/effects.go --
+package effects
+
+func effects(x, y int) int { //@codeaction("refactor.rewrite", "y", "y", effects)
+ return x
+}
+
+func f() int
+func g() int
+
+func _() {
+ effects(f(), g())
+ effects(f(), g())
+}
+-- @effects/effects/effects.go --
+package effects
+
+func effects(x int) int { //@codeaction("refactor.rewrite", "y", "y", effects)
+ return x
+}
+
+func f() int
+func g() int
+
+func _() {
+ var x, _ int = f(), g()
+ effects(x)
+ {
+ var x, _ int = f(), g()
+ effects(x)
+ }
+}
+-- recursive/recursive.go --
+package recursive
+
+func Recursive(x int) int { //@codeaction("refactor.rewrite", "x", "x", recursive)
+ return Recursive(1)
+}
+
+-- @recursive/recursive/recursive.go --
+package recursive
+
+func Recursive() int { //@codeaction("refactor.rewrite", "x", "x", recursive)
+ return Recursive()
+}
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam_formatting.txt b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_formatting.txt
new file mode 100644
index 0000000..39f3ddb
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_formatting.txt
@@ -0,0 +1,55 @@
+This test exercises behavior of change signature refactoring with respect to
+comments.
+
+Currently, inline comments around arguments or parameters are dropped, which is
+probably acceptable. Fixing this is likely intractible without fixing comment
+representation in the AST.
+
+-- go.mod --
+module unused.mod
+
+go 1.18
+
+-- a/a.go --
+package a
+
+// A doc comment.
+func A(x /* used parameter */, unused int /* unused parameter */ ) int { //@codeaction("refactor.rewrite", "unused", "unused", a)
+ // about to return
+ return x // returning
+ // just returned
+}
+
+// This function makes calls.
+func _() {
+ // about to call
+ A(one() /* used arg */, 2 /* unused arg */) // calling
+ // just called
+}
+
+func one() int {
+ // I should be unaffected!
+ return 1
+}
+
+-- @a/a/a.go --
+package a
+
+// A doc comment.
+func A(x int) int { //@codeaction("refactor.rewrite", "unused", "unused", a)
+ // about to return
+ return x // returning
+ // just returned
+}
+
+// This function makes calls.
+func _() {
+ // about to call
+ A(one()) // calling
+ // just called
+}
+
+func one() int {
+ // I should be unaffected!
+ return 1
+}
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam_funcvalue.txt b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_funcvalue.txt
new file mode 100644
index 0000000..4173184
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_funcvalue.txt
@@ -0,0 +1,19 @@
+This test exercises change signature refactoring handling of function values.
+
+TODO(rfindley): use a literalization strategy to allow these references.
+
+-- go.mod --
+module unused.mod
+
+go 1.18
+
+-- a/a.go --
+package a
+
+func A(x, unused int) int { //@codeactionerr("refactor.rewrite", "unused", "unused", re"non-call function reference")
+ return x
+}
+
+func _() {
+ _ = A
+}
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam_imports.txt b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_imports.txt
new file mode 100644
index 0000000..d616fe2
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_imports.txt
@@ -0,0 +1,160 @@
+This test checks the behavior of removing a parameter with respect to various
+import scenarios.
+
+-- go.mod --
+module mod.test
+
+go 1.21
+
+
+-- a/a1.go --
+package a
+
+import "mod.test/b"
+
+func _() {
+ b.B(<-b.Chan, <-b.Chan)
+}
+
+-- a/a2.go --
+package a
+
+import "mod.test/b"
+
+func _() {
+ b.B(<-b.Chan, <-b.Chan)
+ b.B(<-b.Chan, <-b.Chan)
+}
+
+-- a/a3.go --
+package a
+
+import "mod.test/b"
+
+func _() {
+ b.B(<-b.Chan, <-b.Chan)
+}
+
+func _() {
+ b.B(<-b.Chan, <-b.Chan)
+}
+
+-- a/a4.go --
+package a
+
+// TODO(rfindley/adonovan): inlining here adds an additional import of
+// mod.test/b. Can we do better?
+import (
+ . "mod.test/b"
+)
+
+func _() {
+ B(<-Chan, <-Chan)
+}
+
+-- b/b.go --
+package b
+
+import "mod.test/c"
+
+var Chan chan c.C
+
+func B(x, y c.C) { //@codeaction("refactor.rewrite", "x", "x", b)
+}
+
+-- c/c.go --
+package c
+
+type C int
+
+-- d/d.go --
+package d
+
+// Removing the parameter should remove this import.
+import "mod.test/c"
+
+func D(x c.C) { //@codeaction("refactor.rewrite", "x", "x", d)
+}
+
+func _() {
+ D(1)
+}
+
+-- @b/a/a1.go --
+package a
+
+import (
+ "mod.test/b"
+ c "mod.test/c"
+)
+
+func _() {
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+}
+-- @b/a/a2.go --
+package a
+
+import (
+ "mod.test/b"
+ c "mod.test/c"
+)
+
+func _() {
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+}
+-- @b/a/a3.go --
+package a
+
+import (
+ "mod.test/b"
+ c "mod.test/c"
+)
+
+func _() {
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+}
+
+func _() {
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+}
+-- @b/a/a4.go --
+package a
+
+// TODO(rfindley/adonovan): inlining here adds an additional import of
+// mod.test/b. Can we do better?
+import (
+ . "mod.test/b"
+ b "mod.test/b"
+ c "mod.test/c"
+)
+
+func _() {
+ var _ c.C = <-Chan
+ b.B(<-Chan)
+}
+-- @b/b/b.go --
+package b
+
+import "mod.test/c"
+
+var Chan chan c.C
+
+func B(y c.C) { //@codeaction("refactor.rewrite", "x", "x", b)
+}
+-- @d/d/d.go --
+package d
+
+// Removing the parameter should remove this import.
+
+func D() { //@codeaction("refactor.rewrite", "x", "x", d)
+}
+
+func _() {
+ D()
+}
diff --git a/gopls/internal/regtest/misc/configuration_test.go b/gopls/internal/regtest/misc/configuration_test.go
index 3f9c5b9..d74162e 100644
--- a/gopls/internal/regtest/misc/configuration_test.go
+++ b/gopls/internal/regtest/misc/configuration_test.go
@@ -141,6 +141,8 @@
"experimentalUseInvalidMetadata": true,
"experimentalWatchedFileDelay": "1s",
"experimentalWorkspaceModule": true,
+ "tempModfile": true,
+ "expandWorkspaceToModule": false,
},
).Run(t, "", func(t *testing.T, env *Env) {
env.OnceMet(
@@ -148,6 +150,8 @@
ShownMessage("experimentalWorkspaceModule"),
ShownMessage("experimentalUseInvalidMetadata"),
ShownMessage("experimentalWatchedFileDelay"),
+ ShownMessage("https://go.dev/issue/63537"), // issue to remove tempModfile
+ ShownMessage("https://go.dev/issue/63536"), // issue to remove expandWorkspaceToModule
)
})
}
diff --git a/internal/astutil/clone.go b/internal/astutil/clone.go
new file mode 100644
index 0000000..d5ee82c
--- /dev/null
+++ b/internal/astutil/clone.go
@@ -0,0 +1,71 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "go/ast"
+ "reflect"
+)
+
+// CloneNode returns a deep copy of a Node.
+// It omits pointers to ast.{Scope,Object} variables.
+func CloneNode[T ast.Node](n T) T {
+ return cloneNode(n).(T)
+}
+
+func cloneNode(n ast.Node) ast.Node {
+ var clone func(x reflect.Value) reflect.Value
+ set := func(dst, src reflect.Value) {
+ src = clone(src)
+ if src.IsValid() {
+ dst.Set(src)
+ }
+ }
+ clone = func(x reflect.Value) reflect.Value {
+ switch x.Kind() {
+ case reflect.Ptr:
+ if x.IsNil() {
+ return x
+ }
+ // Skip fields of types potentially involved in cycles.
+ switch x.Interface().(type) {
+ case *ast.Object, *ast.Scope:
+ return reflect.Zero(x.Type())
+ }
+ y := reflect.New(x.Type().Elem())
+ set(y.Elem(), x.Elem())
+ return y
+
+ case reflect.Struct:
+ y := reflect.New(x.Type()).Elem()
+ for i := 0; i < x.Type().NumField(); i++ {
+ set(y.Field(i), x.Field(i))
+ }
+ return y
+
+ case reflect.Slice:
+ if x.IsNil() {
+ return x
+ }
+ y := reflect.MakeSlice(x.Type(), x.Len(), x.Cap())
+ for i := 0; i < x.Len(); i++ {
+ set(y.Index(i), x.Index(i))
+ }
+ return y
+
+ case reflect.Interface:
+ y := reflect.New(x.Type()).Elem()
+ set(y, x.Elem())
+ return y
+
+ case reflect.Array, reflect.Chan, reflect.Func, reflect.Map, reflect.UnsafePointer:
+ panic(x) // unreachable in AST
+
+ default:
+ return x // bool, string, number
+ }
+ }
+ return clone(reflect.ValueOf(n)).Interface().(ast.Node)
+}
diff --git a/internal/facts/facts.go b/internal/facts/facts.go
index 8480ea0..f0aa97e 100644
--- a/internal/facts/facts.go
+++ b/internal/facts/facts.go
@@ -48,7 +48,6 @@
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/types/objectpath"
- "golang.org/x/tools/internal/typesinternal"
)
const debug = false
@@ -205,9 +204,7 @@
//
// Concurrent calls to Decode are safe, so long as the
// [GetPackageFunc] (if any) is also concurrency-safe.
-//
-// TODO(golang/go#61443): eliminate skipMethodSorting one way or the other.
-func (d *Decoder) Decode(skipMethodSorting bool, read func(pkgPath string) ([]byte, error)) (*Set, error) {
+func (d *Decoder) Decode(read func(pkgPath string) ([]byte, error)) (*Set, error) {
// Read facts from imported packages.
// Facts may describe indirectly imported packages, or their objects.
m := make(map[key]analysis.Fact) // one big bucket
@@ -247,7 +244,7 @@
key := key{pkg: factPkg, t: reflect.TypeOf(f.Fact)}
if f.Object != "" {
// object fact
- obj, err := typesinternal.ObjectpathObject(factPkg, string(f.Object), skipMethodSorting)
+ obj, err := objectpath.Object(factPkg, f.Object)
if err != nil {
// (most likely due to unexported object)
// TODO(adonovan): audit for other possibilities.
@@ -271,11 +268,8 @@
//
// It may fail if one of the Facts could not be gob-encoded, but this is
// a sign of a bug in an Analyzer.
-func (s *Set) Encode(skipMethodSorting bool) []byte {
+func (s *Set) Encode() []byte {
encoder := new(objectpath.Encoder)
- if skipMethodSorting {
- typesinternal.SkipEncoderMethodSorting(encoder)
- }
// TODO(adonovan): opt: use a more efficient encoding
// that avoids repeating PkgPath for each fact.
diff --git a/internal/facts/facts_test.go b/internal/facts/facts_test.go
index 7eb766e..4f1e8d6 100644
--- a/internal/facts/facts_test.go
+++ b/internal/facts/facts_test.go
@@ -311,7 +311,7 @@
}
// decode
- facts, err := facts.NewDecoder(pkg).Decode(false, read)
+ facts, err := facts.NewDecoder(pkg).Decode(read)
if err != nil {
t.Fatalf("Decode failed: %v", err)
}
@@ -345,7 +345,7 @@
}
// encode
- factmap[pkg.Path()] = facts.Encode(false)
+ factmap[pkg.Path()] = facts.Encode()
}
}
@@ -413,7 +413,7 @@
}
obj := pkg.Scope().Lookup("A")
- s, err := facts.NewDecoder(pkg).Decode(false, func(pkgPath string) ([]byte, error) { return nil, nil })
+ s, err := facts.NewDecoder(pkg).Decode(func(pkgPath string) ([]byte, error) { return nil, nil })
if err != nil {
t.Fatal(err)
}
@@ -528,7 +528,7 @@
packages[pkg.Path()] = pkg
// decode facts
- facts, err := facts.NewDecoder(pkg).Decode(false, read)
+ facts, err := facts.NewDecoder(pkg).Decode(read)
if err != nil {
t.Fatalf("Decode failed: %v", err)
}
@@ -555,7 +555,7 @@
}
// encode facts
- factmap[pkg.Path()] = facts.Encode(false)
+ factmap[pkg.Path()] = facts.Encode()
}
})
}
diff --git a/internal/refactor/inline/doc.go b/internal/refactor/inline/doc.go
index b13241f..6bb4cef 100644
--- a/internal/refactor/inline/doc.go
+++ b/internal/refactor/inline/doc.go
@@ -251,10 +251,6 @@
could be achieved by returning metadata alongside the result
and having the client conditionally discard the change.
- - Is it acceptable to skip effects that are limited to runtime
- panics? Can we avoid evaluating an argument x.f
- or a[i] when the corresponding parameter is unused?
-
- Support inlining of generic functions, replacing type parameters
by their instantiations.
@@ -262,9 +258,6 @@
But note that the existing algorithm makes widespread assumptions
that the callee is a package-level function or method.
- - Eliminate parens and braces inserted conservatively when they
- are redundant.
-
- Eliminate explicit conversions of "untyped" literals inserted
conservatively when they are redundant. For example, the
conversion int32(1) is redundant when this value is used only as a
diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go
index 618b179..8a6d777 100644
--- a/internal/refactor/inline/inline.go
+++ b/internal/refactor/inline/inline.go
@@ -21,6 +21,7 @@
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/imports"
+ internalastutil "golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typeparams"
)
@@ -182,15 +183,19 @@
// Precise comment handling would make this a
// non-issue. Formatting wouldn't really need a
// FileSet at all.
- mark := out.Len()
- if err := format.Node(&out, caller.Fset, res.new); err != nil {
- return nil, err
- }
if elideBraces {
- // Overwrite unnecessary {...} braces with spaces.
- // TODO(adonovan): less hacky solution.
- out.Bytes()[mark] = ' '
- out.Bytes()[out.Len()-1] = ' '
+ for i, stmt := range res.new.(*ast.BlockStmt).List {
+ if i > 0 {
+ out.WriteByte('\n')
+ }
+ if err := format.Node(&out, caller.Fset, stmt); err != nil {
+ return nil, err
+ }
+ }
+ } else {
+ if err := format.Node(&out, caller.Fset, res.new); err != nil {
+ return nil, err
+ }
}
out.Write(caller.Content[end:])
const mode = parser.ParseComments | parser.SkipObjectResolution | parser.AllErrors
@@ -487,8 +492,12 @@
if samePkg {
// Caller and callee are in same package.
// Check caller has not shadowed the decl.
- found := caller.lookup(obj.Name) // can't fail
- if !isPkgLevel(found) {
+ //
+ // This may fail if the callee is "fake", such as for signature
+ // refactoring where the callee is modified to be a trivial wrapper
+ // around the refactored signature.
+ found := caller.lookup(obj.Name)
+ if found != nil && !isPkgLevel(found) {
return nil, fmt.Errorf("cannot inline because %q is shadowed in caller by a %s (line %d)",
obj.Name, objectKind(found),
caller.Fset.PositionFor(found.Pos(), false).Line)
@@ -897,9 +906,6 @@
// The body may use defer, arbitrary control flow, and
// multiple returns.
//
- // TODO(adonovan): omit the braces if the sets of
- // names in the two blocks are disjoint.
- //
// TODO(adonovan): add a strategy for a 'void tail
// call', i.e. a call statement prior to an (explicit
// or implicit) return.
@@ -937,8 +943,6 @@
// - all parameters and result vars can be eliminated
// or replaced by a binding decl,
// - caller ExprStmt is in unrestricted statement context.
- //
- // If there is only a single statement, the braces are omitted.
if stmt := callStmt(caller.path, true); stmt != nil &&
(!needBindingDecl || bindingDeclStmt != nil) &&
!callee.HasDefer &&
@@ -951,9 +955,6 @@
if needBindingDecl {
body.List = prepend(bindingDeclStmt, body.List...)
}
- if len(body.List) == 1 { // FIXME do this opt later
- repl = body.List[0] // singleton: omit braces
- }
res.old = stmt
res.new = repl
return res, nil
@@ -989,15 +990,32 @@
}
// Infallible general case: literalization.
+ //
+ // func(params) { body }(args)
+ //
logf("strategy: literalization")
+ funcLit := &ast.FuncLit{
+ Type: calleeDecl.Type,
+ Body: calleeDecl.Body,
+ }
+
+ // Literalization can still make use of a binding
+ // decl as it gives a more natural reading order:
+ //
+ // func() { var params = args; body }()
+ //
+ // TODO(adonovan): relax the allResultsUnreferenced requirement
+ // by adding a parameter-only (no named results) binding decl.
+ if bindingDeclStmt != nil && allResultsUnreferenced {
+ funcLit.Type.Params.List = nil
+ remainingArgs = nil
+ funcLit.Body.List = prepend(bindingDeclStmt, funcLit.Body.List...)
+ }
// Emit a new call to a function literal in place of
// the callee name, with appropriate replacements.
newCall := &ast.CallExpr{
- Fun: &ast.FuncLit{
- Type: calleeDecl.Type,
- Body: calleeDecl.Body,
- },
+ Fun: funcLit,
Ellipsis: token.NoPos, // f(slice...) is always simplified
Args: remainingArgs,
}
@@ -1240,11 +1258,11 @@
// remove the last reference to a caller local var.
if caller.enclosingFunc != nil {
for free := range arg.freevars {
- if v, ok := caller.lookup(free).(*types.Var); ok && within(v.Pos(), caller.enclosingFunc.Body) {
- // TODO(adonovan): be more precise and check that v
- // is indeed referenced only by call arguments.
- // Better: proceed, but blank out its declaration as needed.
- logf("keeping param %q: arg contains perhaps the last reference to possible caller local %v @ %v",
+ // TODO(rfindley): we can get this 100% right by looking for
+ // references among other arguments which have non-zero references
+ // within the callee.
+ if v, ok := caller.lookup(free).(*types.Var); ok && within(v.Pos(), caller.enclosingFunc.Body) && !isUsedOutsideCall(caller, v) {
+ logf("keeping param %q: arg contains perhaps the last reference to caller local %v @ %v",
param.info.Name, v, caller.Fset.PositionFor(v.Pos(), false))
continue next
}
@@ -1306,7 +1324,7 @@
logf("replacing parameter %q by argument %q",
param.info.Name, debugFormatNode(caller.Fset, arg.expr))
for _, ref := range param.info.Refs {
- replaceCalleeID(ref, cloneNode(arg.expr).(ast.Expr))
+ replaceCalleeID(ref, internalastutil.CloneNode(arg.expr).(ast.Expr))
}
params[i] = nil // substituted
args[i] = nil // substituted
@@ -1314,6 +1332,34 @@
}
}
+// isUsedOutsideCall reports whether v is used outside of caller.Call, within
+// the body of caller.enclosingFunc.
+func isUsedOutsideCall(caller *Caller, v *types.Var) bool {
+ used := false
+ ast.Inspect(caller.enclosingFunc.Body, func(n ast.Node) bool {
+ if n == caller.Call {
+ return false
+ }
+ switch n := n.(type) {
+ case *ast.Ident:
+ if use := caller.Info.Uses[n]; use == v {
+ used = true
+ }
+ case *ast.FuncType:
+ // All params are used.
+ for _, fld := range n.Params.List {
+ for _, n := range fld.Names {
+ if def := caller.Info.Defs[n]; def == v {
+ used = true
+ }
+ }
+ }
+ }
+ return !used // keep going until we find a use
+ })
+ return used
+}
+
// checkFalconConstraints checks whether constant arguments
// are safe to substitute (e.g. s[i] -> ""[0] is not safe.)
//
@@ -1536,11 +1582,12 @@
// createBindingDecl constructs a "binding decl" that implements
// parameter assignment and declares any named result variables
-// referenced by the callee.
+// referenced by the callee. It returns nil if there were no
+// unsubstituted parameters.
//
// It may not always be possible to create the decl (e.g. due to
-// shadowing), in which case it returns nil; but if it succeeds, the
-// declaration may be used by reduction strategies to relax the
+// shadowing), in which case it also returns nil; but if it succeeds,
+// the declaration may be used by reduction strategies to relax the
// requirement that all parameters have been substituted.
//
// For example, a call:
@@ -1682,14 +1729,19 @@
}
}
- decl := &ast.DeclStmt{
+ if len(specs) == 0 {
+ logf("binding decl not needed: all parameters substituted")
+ return nil
+ }
+
+ stmt := &ast.DeclStmt{
Decl: &ast.GenDecl{
Tok: token.VAR,
Specs: specs,
},
}
- logf("binding decl: %s", debugFormatNode(caller.Fset, decl))
- return decl
+ logf("binding decl: %s", debugFormatNode(caller.Fset, stmt))
+ return stmt
}
// lookup does a symbol lookup in the lexical environment of the caller.
@@ -2317,60 +2369,6 @@
}
}
-// cloneNode returns a deep copy of a Node.
-// It omits pointers to ast.{Scope,Object} variables.
-func cloneNode(n ast.Node) ast.Node {
- var clone func(x reflect.Value) reflect.Value
- set := func(dst, src reflect.Value) {
- src = clone(src)
- if src.IsValid() {
- dst.Set(src)
- }
- }
- clone = func(x reflect.Value) reflect.Value {
- switch x.Kind() {
- case reflect.Ptr:
- if x.IsNil() {
- return x
- }
- // Skip fields of types potentially involved in cycles.
- switch x.Interface().(type) {
- case *ast.Object, *ast.Scope:
- return reflect.Zero(x.Type())
- }
- y := reflect.New(x.Type().Elem())
- set(y.Elem(), x.Elem())
- return y
-
- case reflect.Struct:
- y := reflect.New(x.Type()).Elem()
- for i := 0; i < x.Type().NumField(); i++ {
- set(y.Field(i), x.Field(i))
- }
- return y
-
- case reflect.Slice:
- y := reflect.MakeSlice(x.Type(), x.Len(), x.Cap())
- for i := 0; i < x.Len(); i++ {
- set(y.Index(i), x.Index(i))
- }
- return y
-
- case reflect.Interface:
- y := reflect.New(x.Type()).Elem()
- set(y, x.Elem())
- return y
-
- case reflect.Array, reflect.Chan, reflect.Func, reflect.Map, reflect.UnsafePointer:
- panic(x) // unreachable in AST
-
- default:
- return x // bool, string, number
- }
- }
- return clone(reflect.ValueOf(n)).Interface().(ast.Node)
-}
-
// clearPositions destroys token.Pos information within the tree rooted at root,
// as positions in callee trees may cause caller comments to be emitted prematurely.
//
@@ -2622,6 +2620,7 @@
}
}
}
+ delete(names, "_")
return names
}
diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go
index 189ac3f..99cc0de 100644
--- a/internal/refactor/inline/inline_test.go
+++ b/internal/refactor/inline/inline_test.go
@@ -388,6 +388,17 @@
`func _(ch chan int) { f(ch) }`,
`func _(ch chan int) { <-(<-chan int)(ch) }`,
},
+ {
+ // (a regression test for unnecessary braces)
+ "In block elision, blank decls don't count when computing name conflicts.",
+ `func f(x int) { var _ = x; var _ = 3 }`,
+ `func _() { var _ = 1; f(2) }`,
+ `func _() {
+ var _ = 1
+ var _ = 2
+ var _ = 3
+}`,
+ },
})
}
@@ -584,6 +595,36 @@
`func _() { var local int; _ = local }`,
},
{
+ "Arguments that are used are detected",
+ `func f(int) {}`,
+ `func _() { var local int; _ = local; f(local) }`,
+ `func _() { var local int; _ = local }`,
+ },
+ {
+ "Arguments that are used are detected",
+ `func f(x, y int) { print(x) }`,
+ `func _() { var z int; f(z, z) }`,
+ `func _() {
+ var z int
+ var _ int = z
+ print(z)
+}`,
+ },
+ {
+ "Function parameters are always used",
+ `func f(int) {}`,
+ `func _() {
+ func(local int) {
+ f(local)
+ }(1)
+}`,
+ `func _() {
+ func(local int) {
+
+ }(1)
+}`,
+ },
+ {
"Regression test for detection of shadowing in nested functions.",
`func f(x int) { _ = func() { y := 1; print(y); print(x) } }`,
`func _(y int) { f(y) } `,
@@ -682,7 +723,7 @@
"Variadic cancellation (basic).",
`func f(args ...any) { defer f(&args); println(args) }`,
`func _(slice []any) { f(slice...) }`,
- `func _(slice []any) { func(args []any) { defer f(&args); println(args) }(slice) }`,
+ `func _(slice []any) { func() { var args []any = slice; defer f(&args); println(args) }() }`,
},
{
"Variadic cancellation (literalization with parameter elimination).",
@@ -786,6 +827,24 @@
`func _(x *T) { f(x, recover()) }`,
`func _(x *T) { x.g(recover()) }`,
},
+ {
+ "Literalization can make use of a binding decl (all params).",
+ `func f(x, y int) int { defer println(); return y + x }; func g(int) int`,
+ `func _() { println(f(g(1), g(2))) }`,
+ `func _() { println(func() int { var x, y int = g(1), g(2); defer println(); return y + x }()) }`,
+ },
+ {
+ "Literalization can make use of a binding decl (some params).",
+ `func f(x, y int) int { z := y + x; defer println(); return z }; func g(int) int`,
+ `func _() { println(f(g(1), g(2))) }`,
+ `func _() { println(func() int { var x int = g(1); z := g(2) + x; defer println(); return z }()) }`,
+ },
+ {
+ "Literalization can't yet use of a binding decl if named results.",
+ `func f(x, y int) (z int) { z = y + x; defer println(); return }; func g(int) int`,
+ `func _() { println(f(g(1), g(2))) }`,
+ `func _() { println(func(x int) (z int) { z = g(2) + x; defer println(); return }(g(1))) }`,
+ },
})
}
diff --git a/internal/refactor/inline/testdata/basic-literal.txtar b/internal/refactor/inline/testdata/basic-literal.txtar
index a74fbda..7ae640a 100644
--- a/internal/refactor/inline/testdata/basic-literal.txtar
+++ b/internal/refactor/inline/testdata/basic-literal.txtar
@@ -23,7 +23,7 @@
package a
func _() {
- func(x int) int { defer print(); return x + 2 }(recover().(int)) //@ inline(re"add", add1)
+ func() int { var x int = recover().(int); defer print(); return x + 2 }() //@ inline(re"add", add1)
}
func add(x, y int) int { defer print(); return x + y }
diff --git a/internal/refactor/inline/testdata/import-shadow.txtar b/internal/refactor/inline/testdata/import-shadow.txtar
index 4188a52..9d1abdb 100644
--- a/internal/refactor/inline/testdata/import-shadow.txtar
+++ b/internal/refactor/inline/testdata/import-shadow.txtar
@@ -87,10 +87,8 @@
var x b.T
func A(b int) {
-
b0.One()
- b0.Two()
- //@ inline(re"F", fresult)
+ b0.Two() //@ inline(re"F", fresult)
}
-- d/d.go --
diff --git a/internal/refactor/inline/testdata/issue62667.txtar b/internal/refactor/inline/testdata/issue62667.txtar
index 21420e2..b6ff83b 100644
--- a/internal/refactor/inline/testdata/issue62667.txtar
+++ b/internal/refactor/inline/testdata/issue62667.txtar
@@ -38,7 +38,7 @@
)
func A() {
- func(path string) { defer func() {}(); path0.Split(path) }(g()) //@ inline(re"Dir", result)
+ func() { var path string = g(); defer func() {}(); path0.Split(path) }() //@ inline(re"Dir", result)
}
func g() string
\ No newline at end of file
diff --git a/internal/refactor/inline/testdata/issue63298.txtar b/internal/refactor/inline/testdata/issue63298.txtar
index e355e8e..990ebcd 100644
--- a/internal/refactor/inline/testdata/issue63298.txtar
+++ b/internal/refactor/inline/testdata/issue63298.txtar
@@ -45,8 +45,6 @@
)
func _() {
-
b.B()
b0.B()
-
-}
\ No newline at end of file
+}
diff --git a/internal/refactor/inline/testdata/method.txtar b/internal/refactor/inline/testdata/method.txtar
index b141b09..92343ed 100644
--- a/internal/refactor/inline/testdata/method.txtar
+++ b/internal/refactor/inline/testdata/method.txtar
@@ -104,10 +104,8 @@
func _() {
var ptr *T
-
var _ T = *ptr
- _ = 1
- //@ inline(re"h", h)
+ _ = 1 //@ inline(re"h", h)
}
-- a/i.go --
diff --git a/internal/refactor/inline/testdata/multistmt-body.txtar b/internal/refactor/inline/testdata/multistmt-body.txtar
index 6bd0108..7702719 100644
--- a/internal/refactor/inline/testdata/multistmt-body.txtar
+++ b/internal/refactor/inline/testdata/multistmt-body.txtar
@@ -54,10 +54,8 @@
func _() {
a := 1
-
z := 1
- print(a + 2 + z)
- //@ inline(re"f", out2)
+ print(a + 2 + z) //@ inline(re"f", out2)
}
-- a/a3.go --
diff --git a/internal/refactor/inline/testdata/tailcall.txtar b/internal/refactor/inline/testdata/tailcall.txtar
index 53b6de3..ccfe9f4 100644
--- a/internal/refactor/inline/testdata/tailcall.txtar
+++ b/internal/refactor/inline/testdata/tailcall.txtar
@@ -36,7 +36,6 @@
package a
func _() int {
-
total := 0
start:
for i := 1; i <= 2; i++ {
@@ -47,8 +46,7 @@
return -1
}
}
- return total
- //@ inline(re"sum", sum)
+ return total //@ inline(re"sum", sum)
}
func sum(lo, hi int) int {
diff --git a/internal/typesinternal/objectpath.go b/internal/typesinternal/objectpath.go
deleted file mode 100644
index 5e96e89..0000000
--- a/internal/typesinternal/objectpath.go
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package typesinternal
-
-import "go/types"
-
-// This file contains back doors that allow gopls to avoid method sorting when
-// using the objectpath package.
-//
-// This is performance-critical in certain repositories, but changing the
-// behavior of the objectpath package is still being discussed in
-// golang/go#61443. If we decide to remove the sorting in objectpath we can
-// simply delete these back doors. Otherwise, we should add a new API to
-// objectpath that allows controlling the sorting.
-
-// SkipEncoderMethodSorting marks enc (which must be an *objectpath.Encoder) as
-// not requiring sorted methods.
-var SkipEncoderMethodSorting func(enc interface{})
-
-// ObjectpathObject is like objectpath.Object, but allows suppressing method
-// sorting.
-var ObjectpathObject func(pkg *types.Package, p string, skipMethodSorting bool) (types.Object, error)