all: merge master (ee20ddf) into gopls-release-branch.0.14

Also add back the replace directive, and tidy.

For golang/go#63220

Conflicts:

- gopls/go.sum

Merge List:

+ 2023-10-05 ee20ddf1f internal/refactor/inline: permit return conversions in tailcall
+ 2023-10-05 db1d1e0d3 gopls/internal/lsp: go to definition from embed directive
+ 2023-10-05 2be977ecc internal/refactor/inline: work around channel type misformatting
+ 2023-10-04 0ba9c8439 internal/fuzzy: several improvements for symbol matching
+ 2023-10-04 c2725ad84 gopls: update x/telemetry dependency
+ 2023-10-03 e8722c010 go/types/internal/play: show types.Selection information
+ 2023-10-03 a819c616c internal/refactor/inline: eliminate unnecessary binding decl
+ 2023-10-03 102b64b54 internal/refactor/inline: tweak everything-test docs again
+ 2023-10-03 197e2c432 internal/refactor/inline: fix broken tests
+ 2023-10-03 586b21ae0 internal/refactor/inline: elide redundant braces
+ 2023-10-02 ca34416d4 internal/refactor/inline: fallible constant analysis
+ 2023-10-02 6a38a5f6f internal/refactor/inline: use default working directory
+ 2023-10-02 c6d331deb internal/refactor/inline: don't add same import PkgName twice
+ 2023-10-02 1058109b6 internal/refactor/inline: don't insert unnecessary parens
+ 2023-10-02 d8e94f203 internal/refactor/inline: fix bug in shadow detection
+ 2023-10-02 0adbf9c67 gopls/internal/lsp: simplify the telemetry prompt
+ 2023-10-02 2ed429843 go/analysis/analysistest: format golden files before comparing
+ 2023-09-28 451716b51 internal/refactor/inline: consider "", 0.0, 1.0 duplicable
+ 2023-09-28 792f91f70 internal/refactor/inline: tweak everything test for cgo
+ 2023-09-28 941629930 internal/refactor/inline: fix pkgname shadowing bug
+ 2023-09-28 4cd12d6da gopls/internal/lsp/fake: don't set a completion budget for tests
+ 2023-09-28 57ecf488e gopls/internal/lsp: hover over embed directives
+ 2023-09-28 6de34480a gopls/internal/lsp/cache: remove snapshot.typeCheckMu
+ 2023-09-27 7f23bc81d gopls/internal/regtest/source/completion: reuse functionCallSnippet in unimported completion
+ 2023-09-27 3d03fbd05 gopls/internal/lsp: use matcher score in ranking unimported candidates
+ 2023-09-27 7577387a8 gopls/internal/lsp/source: don't complete to golang.org/toolchain
+ 2023-09-27 4b34fbf5f internal/refactor/inline: fix bug discard receiver and spread
+ 2023-09-27 6ec9b0f07 internal/refactor/inline: refine "last ref to caller local"
+ 2023-09-27 08bdfeca0 internal/refactor/inline: split up the big table
+ 2023-09-27 486787ef4 gopls/internal/lsp/source: Add ui.complete.completeFunctionCalls toggle
+ 2023-09-27 9d2d0e853 gopls: set a context deadline after minimal completion results
+ 2023-09-26 169105a90 internal/refactor/inline: insert conversions during substitution
+ 2023-09-26 b3ada30db internal/refactor/inline: analyze callee effects
+ 2023-09-26 160210312 internal/refactor/inline: skip cgo tests on non-cgo builders
+ 2023-09-26 1c8e684dd internal/refactor/inline: sound treatment of named results
+ 2023-09-26 d32f97a6d internal/refactor/inline: eliminate Callee.BodyIsReturnExpr
+ 2023-09-26 f4abeaefa go/analysis/passes/directive: use strings.Cut
+ 2023-09-26 c42ed47c5 internal/refactor/inline: reject attempts to inline in cgo code
+ 2023-09-26 313150aa0 internal/refactor/inline: x++ counts as assignment in escape
+ 2023-09-26 d6f1bb710 internal/refactor/inline: ignore line directives in testing

Change-Id: I97157c76bd781c03aef407b86ac1da209846f99a
diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go
index 139c758..0c066ba 100644
--- a/go/analysis/analysistest/analysistest.go
+++ b/go/analysis/analysistest/analysistest.go
@@ -211,24 +211,14 @@
 					for _, vf := range ar.Files {
 						if vf.Name == sf {
 							found = true
-							out, err := diff.ApplyBytes(orig, edits)
-							if err != nil {
-								t.Errorf("%s: error applying fixes: %v (see possible explanations at RunWithSuggestedFixes)", file.Name(), err)
-								continue
-							}
 							// the file may contain multiple trailing
 							// newlines if the user places empty lines
 							// between files in the archive. normalize
 							// this to a single newline.
-							want := string(bytes.TrimRight(vf.Data, "\n")) + "\n"
-							formatted, err := format.Source(out)
-							if err != nil {
-								t.Errorf("%s: error formatting edited source: %v\n%s", file.Name(), err, out)
-								continue
-							}
-							if got := string(formatted); got != want {
-								unified := diff.Unified(fmt.Sprintf("%s.golden [%s]", file.Name(), sf), "actual", want, got)
-								t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), unified)
+							golden := append(bytes.TrimRight(vf.Data, "\n"), '\n')
+
+							if err := applyDiffsAndCompare(orig, golden, edits, file.Name()); err != nil {
+								t.Errorf("%s", err)
 							}
 							break
 						}
@@ -245,21 +235,8 @@
 					catchallEdits = append(catchallEdits, edits...)
 				}
 
-				out, err := diff.ApplyBytes(orig, catchallEdits)
-				if err != nil {
-					t.Errorf("%s: error applying fixes: %v (see possible explanations at RunWithSuggestedFixes)", file.Name(), err)
-					continue
-				}
-				want := string(ar.Comment)
-
-				formatted, err := format.Source(out)
-				if err != nil {
-					t.Errorf("%s: error formatting resulting source: %v\n%s", file.Name(), err, out)
-					continue
-				}
-				if got := string(formatted); got != want {
-					unified := diff.Unified(file.Name()+".golden", "actual", want, got)
-					t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), unified)
+				if err := applyDiffsAndCompare(orig, ar.Comment, catchallEdits, file.Name()); err != nil {
+					t.Errorf("%s", err)
 				}
 			}
 		}
@@ -267,6 +244,30 @@
 	return r
 }
 
+// applyDiffsAndCompare applies edits to src and compares the results against
+// golden after formatting both. fileName is use solely for error reporting.
+func applyDiffsAndCompare(src, golden []byte, edits []diff.Edit, fileName string) error {
+	out, err := diff.ApplyBytes(src, edits)
+	if err != nil {
+		return fmt.Errorf("%s: error applying fixes: %v (see possible explanations at RunWithSuggestedFixes)", fileName, err)
+	}
+	wantRaw, err := format.Source(golden)
+	if err != nil {
+		return fmt.Errorf("%s.golden: error formatting golden file: %v\n%s", fileName, err, out)
+	}
+	want := string(wantRaw)
+
+	formatted, err := format.Source(out)
+	if err != nil {
+		return fmt.Errorf("%s: error formatting resulting source: %v\n%s", fileName, err, out)
+	}
+	if got := string(formatted); got != want {
+		unified := diff.Unified(fileName+".golden", "actual", want, got)
+		return fmt.Errorf("suggested fixes failed for %s:\n%s", fileName, unified)
+	}
+	return nil
+}
+
 // Run applies an analysis to the packages denoted by the "go list" patterns.
 //
 // It loads the packages from the specified
diff --git a/go/analysis/analysistest/analysistest_test.go b/go/analysis/analysistest/analysistest_test.go
index 8c7ff73..0b5f5ed 100644
--- a/go/analysis/analysistest/analysistest_test.go
+++ b/go/analysis/analysistest/analysistest_test.go
@@ -70,6 +70,8 @@
 
 	// OK (multiple expectations on same line)
 	println(); println() // want "call of println(...)" "call of println(...)"
+
+	// A Line that is not formatted correctly in the golden file.
 }
 
 // OK (facts and diagnostics on same line)
@@ -109,6 +111,8 @@
 	// OK (multiple expectations on same line)
 	println_TEST_()
 	println_TEST_() // want "call of println(...)" "call of println(...)"
+	
+			// A Line that is not formatted correctly in the golden file.
 }
 
 // OK (facts and diagnostics on same line)
diff --git a/go/analysis/passes/directive/directive.go b/go/analysis/passes/directive/directive.go
index 1146d7b..2691f18 100644
--- a/go/analysis/passes/directive/directive.go
+++ b/go/analysis/passes/directive/directive.go
@@ -124,7 +124,7 @@
 	for text != "" {
 		offset := len(fullText) - len(text)
 		var line string
-		line, text, _ = stringsCut(text, "\n")
+		line, text, _ = strings.Cut(text, "\n")
 
 		if !inStar && strings.HasPrefix(line, "//") {
 			check.comment(pos+token.Pos(offset), line)
@@ -137,7 +137,7 @@
 			line = strings.TrimSpace(line)
 			if inStar {
 				var ok bool
-				_, line, ok = stringsCut(line, "*/")
+				_, line, ok = strings.Cut(line, "*/")
 				if !ok {
 					break
 				}
@@ -200,14 +200,6 @@
 	}
 }
 
-// Go 1.18 strings.Cut.
-func stringsCut(s, sep string) (before, after string, found bool) {
-	if i := strings.Index(s, sep); i >= 0 {
-		return s[:i], s[i+len(sep):], true
-	}
-	return s, "", false
-}
-
 // Go 1.20 strings.CutPrefix.
 func stringsCutPrefix(s, prefix string) (after string, found bool) {
 	if !strings.HasPrefix(s, prefix) {
diff --git a/go/types/internal/play/play.go b/go/types/internal/play/play.go
index 80deff5..79cdc5a 100644
--- a/go/types/internal/play/play.go
+++ b/go/types/internal/play/play.go
@@ -173,6 +173,26 @@
 		}
 	}
 
+	// selection x.f information (if cursor is over .f)
+	for _, n := range path[:2] {
+		if sel, ok := n.(*ast.SelectorExpr); ok {
+			seln, ok := pkg.TypesInfo.Selections[sel]
+			if ok {
+				fmt.Fprintf(out, "Selection: %s recv=%v obj=%v type=%v indirect=%t index=%d\n\n",
+					strings.Fields("FieldVal MethodVal MethodExpr")[seln.Kind()],
+					seln.Recv(),
+					seln.Obj(),
+					seln.Type(),
+					seln.Indirect(),
+					seln.Index())
+
+			} else {
+				fmt.Fprintf(out, "Selector is qualified identifier.\n\n")
+			}
+			break
+		}
+	}
+
 	// Object type information.
 	switch n := path[0].(type) {
 	case *ast.Ident:
diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md
index 5bc692e..eca3ee8 100644
--- a/gopls/doc/settings.md
+++ b/gopls/doc/settings.md
@@ -269,6 +269,16 @@
 
 Default: `true`.
 
+##### **completeFunctionCalls** *bool*
+
+completeFunctionCalls enables function call completion.
+
+When completing a statement, or when a function return type matches the
+expected of the expression being completed, completion may suggest call
+expressions (i.e. may include parentheses).
+
+Default: `true`.
+
 #### Diagnostic
 
 ##### **analyses** *map[string]bool*
diff --git a/gopls/go.mod b/gopls/go.mod
index 6b95612..abb49c9 100644
--- a/gopls/go.mod
+++ b/gopls/go.mod
@@ -10,7 +10,7 @@
 	golang.org/x/mod v0.12.0
 	golang.org/x/sync v0.3.0
 	golang.org/x/sys v0.12.0
-	golang.org/x/telemetry v0.0.0-20230923135512-f45a5404d02c
+	golang.org/x/telemetry v0.0.0-20231003223302-0168ef4ebbd3
 	golang.org/x/text v0.13.0
 	golang.org/x/tools v0.13.1-0.20230925193239-ccd6b0e95a6a
 	golang.org/x/vuln v1.0.1
@@ -26,3 +26,5 @@
 	golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 // indirect
 
 )
+
+replace golang.org/x/tools => ../
diff --git a/gopls/go.sum b/gopls/go.sum
index 54f8d86..c4dadd2 100644
--- a/gopls/go.sum
+++ b/gopls/go.sum
@@ -1,11 +1,18 @@
 github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak=
 github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
+github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
+github.com/google/go-cmdtest v0.4.1-0.20220921163831-55ab3332a786/go.mod h1:apVn/GCasLZUVpAJ6oWAuyP7Ne7CEsQbTnc0plM3m+o=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
+github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
 github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
 github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
 github.com/google/safehtml v0.0.2/go.mod h1:L4KWwDsUJdECRAEpZoBn3O64bQaywRscowZjJAzjHnU=
 github.com/google/safehtml v0.1.0 h1:EwLKo8qawTKfsi0orxcQAZzu07cICaBeFMegAU9eaT8=
 github.com/google/safehtml v0.1.0/go.mod h1:L4KWwDsUJdECRAEpZoBn3O64bQaywRscowZjJAzjHnU=
@@ -15,38 +22,63 @@
 github.com/jba/templatecheck v0.6.0/go.mod h1:/1k7EajoSErFI9GLHAsiIJEaNLt3ALKNw2TV7z2SYv4=
 github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
 github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
 github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
 github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
 github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
 github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
 github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
 github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
+github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
 github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
 github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
 github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
+golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
 golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 h1:2O2DON6y3XMJiQRAS1UWU+54aec2uopH3x7MAiqGW6Y=
 golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
 golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
 golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
+golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
 golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o=
 golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/telemetry v0.0.0-20230923135512-f45a5404d02c h1:az7Rs3XV7P68bKMPT50p2X4su02nhHqtDOi9T8f3IEw=
-golang.org/x/telemetry v0.0.0-20230923135512-f45a5404d02c/go.mod h1:ppZ76JTkRgJC2GQEgtVY3fiuJR+N8FU2MAlp+gfN1E4=
+golang.org/x/telemetry v0.0.0-20231003223302-0168ef4ebbd3 h1:vxxQvncMbcRAtqHV5HsHGJkbya+BIOYIY+y6cdPZhzk=
+golang.org/x/telemetry v0.0.0-20231003223302-0168ef4ebbd3/go.mod h1:ppZ76JTkRgJC2GQEgtVY3fiuJR+N8FU2MAlp+gfN1E4=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
+golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
 golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
 golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
-golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.13.1-0.20230925193239-ccd6b0e95a6a h1:6OVH6eKlHOm8nM+hiSjdlbkNKMlfeZc+HWLYfG8QWns=
-golang.org/x/tools v0.13.1-0.20230925193239-ccd6b0e95a6a/go.mod h1:UT0HyK+PbVxjduiWXEYi1mODDynTaoTRHJox7q8FIKk=
 golang.org/x/vuln v1.0.1 h1:KUas02EjQK5LTuIx1OylBQdKKZ9jeugs+HiqO5HormU=
 golang.org/x/vuln v1.0.1/go.mod h1:bb2hMwln/tqxg32BNY4CcxHWtHXuYa3SbIBmtsyjxtM=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
@@ -61,5 +93,6 @@
 honnef.co/go/tools v0.4.5/go.mod h1:GUV+uIBCLpdf0/v6UhHHG/yzI/z6qPskBeQCjcNB96k=
 mvdan.cc/gofumpt v0.4.0 h1:JVf4NN1mIpHogBj7ABpgOyZc65/UUOkKQFkoURsz4MM=
 mvdan.cc/gofumpt v0.4.0/go.mod h1:PljLOHDeZqgS8opHRKLzp2It2VBuSdteAgqUfzMTxlQ=
+mvdan.cc/unparam v0.0.0-20230312165513-e84e2d14e3b8/go.mod h1:Oh/d7dEtzsNHGOq1Cdv8aMm3KdKhVvPbRQcM8WFpBR8=
 mvdan.cc/xurls/v2 v2.4.0 h1:tzxjVAj+wSBmDcF6zBB7/myTy3gX9xvi8Tyr28AuQgc=
 mvdan.cc/xurls/v2 v2.4.0/go.mod h1:+GEjq9uNjqs8LQfM9nVnM8rff0OQ5Iash5rzX+N1CSg=
diff --git a/gopls/internal/lsp/cache/check.go b/gopls/internal/lsp/cache/check.go
index 438da16..e0be99b 100644
--- a/gopls/internal/lsp/cache/check.go
+++ b/gopls/internal/lsp/cache/check.go
@@ -323,9 +323,6 @@
 //
 // Both pre and post may be called concurrently.
 func (s *snapshot) forEachPackage(ctx context.Context, ids []PackageID, pre preTypeCheck, post postTypeCheck) error {
-	s.typeCheckMu.Lock()
-	defer s.typeCheckMu.Unlock()
-
 	ctx, done := event.Start(ctx, "cache.forEachPackage", tag.PackageCount.Of(len(ids)))
 	defer done()
 
diff --git a/gopls/internal/lsp/cache/snapshot.go b/gopls/internal/lsp/cache/snapshot.go
index 9d659f0..6fc69bd 100644
--- a/gopls/internal/lsp/cache/snapshot.go
+++ b/gopls/internal/lsp/cache/snapshot.go
@@ -176,18 +176,6 @@
 	ignoreFilterOnce sync.Once
 	ignoreFilter     *ignoreFilter
 
-	// typeCheckMu guards type checking.
-	//
-	// Only one type checking pass should be running at a given time, for two reasons:
-	//  1. type checking batches are optimized to use all available processors.
-	//     Generally speaking, running two type checking batches serially is about
-	//     as fast as running them in parallel.
-	//  2. type checking produces cached artifacts that may be re-used by the
-	//     next type-checking batch: the shared import graph and the set of
-	//     active packages. Running type checking batches in parallel after an
-	//     invalidation can cause redundant calculation of this shared state.
-	typeCheckMu sync.Mutex
-
 	// options holds the user configuration at the time this snapshot was
 	// created.
 	options *source.Options
diff --git a/gopls/internal/lsp/command/interface.go b/gopls/internal/lsp/command/interface.go
index 603e612..c3bd921 100644
--- a/gopls/internal/lsp/command/interface.go
+++ b/gopls/internal/lsp/command/interface.go
@@ -37,6 +37,7 @@
 	//
 	// Applies a fix to a region of source code.
 	ApplyFix(context.Context, ApplyFixArgs) error
+
 	// Test: Run test(s) (legacy)
 	//
 	// Runs `go test` for a specific set of test or benchmark functions.
diff --git a/gopls/internal/lsp/definition.go b/gopls/internal/lsp/definition.go
index a438beb..892e48d 100644
--- a/gopls/internal/lsp/definition.go
+++ b/gopls/internal/lsp/definition.go
@@ -6,7 +6,6 @@
 
 import (
 	"context"
-	"errors"
 	"fmt"
 
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
@@ -36,11 +35,6 @@
 	case source.Tmpl:
 		return template.Definition(snapshot, fh, params.Position)
 	case source.Go:
-		// Partial support for jumping from linkname directive (position at 2nd argument).
-		locations, err := source.LinknameDefinition(ctx, snapshot, fh, params.Position)
-		if !errors.Is(err, source.ErrNoLinkname) {
-			return locations, err
-		}
 		return source.Definition(ctx, snapshot, fh, params.Position)
 	default:
 		return nil, fmt.Errorf("can't find definitions for file type %s", kind)
diff --git a/gopls/internal/lsp/fake/editor.go b/gopls/internal/lsp/fake/editor.go
index 12d07a7..ccee51e 100644
--- a/gopls/internal/lsp/fake/editor.go
+++ b/gopls/internal/lsp/fake/editor.go
@@ -235,9 +235,9 @@
 		// asynchronous operations being completed (such as diagnosing a snapshot).
 		"verboseWorkDoneProgress": true,
 
-		// Set a generous completion budget, so that tests don't flake because
+		// Set an unlimited completion budget, so that tests don't flake because
 		// completions are too slow.
-		"completionBudget": "10s",
+		"completionBudget": "0s",
 	}
 
 	for k, v := range config.Settings {
diff --git a/gopls/internal/lsp/prompt.go b/gopls/internal/lsp/prompt.go
index 1e63337..976f7c6 100644
--- a/gopls/internal/lsp/prompt.go
+++ b/gopls/internal/lsp/prompt.go
@@ -198,8 +198,8 @@
 	if s.Options().LinkifyShowMessage {
 		prompt = `Go telemetry helps us improve Go by periodically sending anonymous metrics and crash reports to the Go team. Learn more at [telemetry.go.dev/privacy](https://telemetry.go.dev/privacy).
 
-	Would you like to enable Go telemetry?
-	`
+Would you like to enable Go telemetry?
+`
 	}
 	// TODO(rfindley): investigate a "tell me more" action in combination with ShowDocument.
 	params := &protocol.ShowMessageRequestParams{
@@ -258,24 +258,15 @@
 }
 
 func telemetryOnMessage(linkify bool) string {
-	reportDate := time.Now().AddDate(0, 0, 7).Format("2006-01-02")
-	format := `Telemetry uploading is now enabled and may be sent to https://telemetry.go.dev starting %s. Uploaded data is used to help improve the Go toolchain and related tools, and it will be published as part of a public dataset.
-
-For more details, see https://telemetry.go.dev/privacy.
-This data is collected in accordance with the Google Privacy Policy (https://policies.google.com/privacy).
+	format := `Thank you. Telemetry uploading is now enabled.
 
 To disable telemetry uploading, run %s.
 `
+	var runCmd = "`go run golang.org/x/telemetry/cmd/gotelemetry@latest off`"
 	if linkify {
-		format = `Telemetry uploading is now enabled and may be sent to [telemetry.go.dev](https://telemetry.go.dev) starting %s. Uploaded data is used to help improve the Go toolchain and related tools, and it will be published as part of a public dataset.
-
-For more details, see [telemetry.go.dev/privacy](https://telemetry.go.dev/privacy).
-This data is collected in accordance with the [Google Privacy Policy](https://policies.google.com/privacy).
-
-To disable telemetry uploading, run [%s](https://golang.org/x/telemetry/cmd/gotelemetry).
-`
+		runCmd = "[gotelemetry off](https://golang.org/x/telemetry/cmd/gotelemetry)"
 	}
-	return fmt.Sprintf(format, reportDate, "`go run golang.org/x/telemetry/cmd/gotelemetry@latest off`")
+	return fmt.Sprintf(format, runCmd)
 }
 
 // acquireLockFile attempts to "acquire a lock" for writing to path.
diff --git a/gopls/internal/lsp/regtest/marker.go b/gopls/internal/lsp/regtest/marker.go
index f6d462a..f6c1d4c 100644
--- a/gopls/internal/lsp/regtest/marker.go
+++ b/gopls/internal/lsp/regtest/marker.go
@@ -232,8 +232,11 @@
 //     request at the given location, and verifies that each expected
 //     completion item occurs in the results, in the expected order. Other
 //     unexpected completion items may occur in the results.
-//     TODO(rfindley): this should accept a slice of labels, rather than
-//     completion items.
+//     TODO(rfindley): this exists for compatibility with the old marker tests.
+//     Replace this with rankl, and rename.
+//
+//   - rankl(location, ...label): like rank, but only cares about completion
+//     item labels.
 //
 //   - refs(location, want ...location): executes a textDocument/references
 //     request at the first location and asserts that the result is the set of
@@ -712,6 +715,7 @@
 	"hover":            actionMarkerFunc(hoverMarker),
 	"implementation":   actionMarkerFunc(implementationMarker),
 	"rank":             actionMarkerFunc(rankMarker),
+	"rankl":            actionMarkerFunc(ranklMarker),
 	"refs":             actionMarkerFunc(refsMarker),
 	"rename":           actionMarkerFunc(renameMarker),
 	"renameerr":        actionMarkerFunc(renameErrMarker),
@@ -1450,6 +1454,23 @@
 	}
 }
 
+func ranklMarker(mark marker, src protocol.Location, labels ...string) {
+	list := mark.run.env.Completion(src)
+	var got []string
+	// Collect results that are present in items, preserving their order.
+	for _, g := range list.Items {
+		for _, label := range labels {
+			if g.Label == label {
+				got = append(got, g.Label)
+				break
+			}
+		}
+	}
+	if diff := cmp.Diff(labels, got); diff != "" {
+		mark.errorf("completion rankings do not match (-want +got):\n%s", diff)
+	}
+}
+
 func snippetMarker(mark marker, src protocol.Location, item completionItem, want string) {
 	list := mark.run.env.Completion(src)
 	var (
diff --git a/gopls/internal/lsp/source/api_json.go b/gopls/internal/lsp/source/api_json.go
index 45ebabb..0fd6b07 100644
--- a/gopls/internal/lsp/source/api_json.go
+++ b/gopls/internal/lsp/source/api_json.go
@@ -147,6 +147,13 @@
 				Hierarchy: "ui.completion",
 			},
 			{
+				Name:      "completeFunctionCalls",
+				Type:      "bool",
+				Doc:       "completeFunctionCalls enables function call completion.\n\nWhen completing a statement, or when a function return type matches the\nexpected of the expression being completed, completion may suggest call\nexpressions (i.e. may include parentheses).\n",
+				Default:   "true",
+				Hierarchy: "ui.completion",
+			},
+			{
 				Name: "importShortcut",
 				Type: "enum",
 				Doc:  "importShortcut specifies whether import statements should link to\ndocumentation or go to definitions.\n",
diff --git a/gopls/internal/lsp/source/completion/completion.go b/gopls/internal/lsp/source/completion/completion.go
index 594f234..4044d84 100644
--- a/gopls/internal/lsp/source/completion/completion.go
+++ b/gopls/internal/lsp/source/completion/completion.go
@@ -104,15 +104,16 @@
 
 // completionOptions holds completion specific configuration.
 type completionOptions struct {
-	unimported        bool
-	documentation     bool
-	fullDocumentation bool
-	placeholders      bool
-	literal           bool
-	snippets          bool
-	postfix           bool
-	matcher           source.Matcher
-	budget            time.Duration
+	unimported            bool
+	documentation         bool
+	fullDocumentation     bool
+	placeholders          bool
+	literal               bool
+	snippets              bool
+	postfix               bool
+	matcher               source.Matcher
+	budget                time.Duration
+	completeFunctionCalls bool
 }
 
 // Snippet is a convenience returns the snippet if available, otherwise
@@ -543,15 +544,16 @@
 			enabled: opts.DeepCompletion,
 		},
 		opts: &completionOptions{
-			matcher:           opts.Matcher,
-			unimported:        opts.CompleteUnimported,
-			documentation:     opts.CompletionDocumentation && opts.HoverKind != source.NoDocumentation,
-			fullDocumentation: opts.HoverKind == source.FullDocumentation,
-			placeholders:      opts.UsePlaceholders,
-			literal:           opts.LiteralCompletions && opts.InsertTextFormat == protocol.SnippetTextFormat,
-			budget:            opts.CompletionBudget,
-			snippets:          opts.InsertTextFormat == protocol.SnippetTextFormat,
-			postfix:           opts.ExperimentalPostfixCompletions,
+			matcher:               opts.Matcher,
+			unimported:            opts.CompleteUnimported,
+			documentation:         opts.CompletionDocumentation && opts.HoverKind != source.NoDocumentation,
+			fullDocumentation:     opts.HoverKind == source.FullDocumentation,
+			placeholders:          opts.UsePlaceholders,
+			literal:               opts.LiteralCompletions && opts.InsertTextFormat == protocol.SnippetTextFormat,
+			budget:                opts.CompletionBudget,
+			snippets:              opts.InsertTextFormat == protocol.SnippetTextFormat,
+			postfix:               opts.ExperimentalPostfixCompletions,
+			completeFunctionCalls: opts.CompleteFunctionCalls,
 		},
 		// default to a matcher that always matches
 		matcher:        prefixMatcher(""),
@@ -596,6 +598,17 @@
 	// Deep search collected candidates and their members for more candidates.
 	c.deepSearch(ctx, 1, deadline)
 
+	// At this point we have a sufficiently complete set of results, and want to
+	// return as close to the completion budget as possible. Previously, we
+	// avoided cancelling the context because it could result in partial results
+	// for e.g. struct fields. At this point, we have a minimal valid set of
+	// candidates, and so truncating due to context cancellation is acceptable.
+	if c.opts.budget > 0 {
+		timeoutDuration := time.Until(c.startTime.Add(c.opts.budget))
+		ctx, cancel = context.WithTimeout(ctx, timeoutDuration)
+		defer cancel()
+	}
+
 	for _, callback := range c.completionCallbacks {
 		if deadline == nil || time.Now().Before(*deadline) {
 			if err := c.snapshot.RunProcessEnvFunc(ctx, callback); err != nil {
@@ -1283,7 +1296,7 @@
 				Label:      id.Name,
 				Detail:     fmt.Sprintf("%s (from %q)", strings.ToLower(tok.String()), m.PkgPath),
 				InsertText: id.Name,
-				Score:      unimportedScore(relevances[path]),
+				Score:      float64(score) * unimportedScore(relevances[path]),
 			}
 			switch tok {
 			case token.FUNC:
@@ -1307,32 +1320,18 @@
 
 			// For functions, add a parameter snippet.
 			if fn != nil {
-				var sn snippet.Builder
-				sn.WriteText(id.Name)
-
-				paramList := func(open, close string, list *ast.FieldList) {
+				paramList := func(list *ast.FieldList) []string {
+					var params []string
 					if list != nil {
 						var cfg printer.Config // slight overkill
-						var nparams int
 						param := func(name string, typ ast.Expr) {
-							if nparams > 0 {
-								sn.WriteText(", ")
-							}
-							nparams++
-							if c.opts.placeholders {
-								sn.WritePlaceholder(func(b *snippet.Builder) {
-									var buf strings.Builder
-									buf.WriteString(name)
-									buf.WriteByte(' ')
-									cfg.Fprint(&buf, token.NewFileSet(), typ)
-									b.WriteText(buf.String())
-								})
-							} else {
-								sn.WriteText(name)
-							}
+							var buf strings.Builder
+							buf.WriteString(name)
+							buf.WriteByte(' ')
+							cfg.Fprint(&buf, token.NewFileSet(), typ)
+							params = append(params, buf.String())
 						}
 
-						sn.WriteText(open)
 						for _, field := range list.List {
 							if field.Names != nil {
 								for _, name := range field.Names {
@@ -1342,13 +1341,14 @@
 								param("_", field.Type)
 							}
 						}
-						sn.WriteText(close)
 					}
+					return params
 				}
 
-				paramList("[", "]", typeparams.ForFuncType(fn.Type))
-				paramList("(", ")", fn.Type.Params)
-
+				tparams := paramList(fn.Type.TypeParams)
+				params := paramList(fn.Type.Params)
+				var sn snippet.Builder
+				c.functionCallSnippet(id.Name, tparams, params, &sn)
 				item.snippet = &sn
 			}
 
@@ -1381,6 +1381,10 @@
 	ctx, cancel := context.WithCancel(ctx)
 	var mu sync.Mutex
 	add := func(pkgExport imports.PackageExport) {
+		if ignoreUnimportedCompletion(pkgExport.Fix) {
+			return
+		}
+
 		mu.Lock()
 		defer mu.Unlock()
 		// TODO(adonovan): what if the actual package has a vendor/ prefix?
@@ -1432,6 +1436,13 @@
 	}
 }
 
+// ignoreUnimportedCompletion reports whether an unimported completion
+// resulting in the given import should be ignored.
+func ignoreUnimportedCompletion(fix *imports.ImportFix) bool {
+	// golang/go#60062: don't add unimported completion to golang.org/toolchain.
+	return fix != nil && strings.HasPrefix(fix.StmtInfo.ImportPath, "golang.org/toolchain")
+}
+
 func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *importInfo, cb func(candidate)) {
 	mset := c.methodSetCache[methodSetKey{typ, addressable}]
 	if mset == nil {
@@ -1744,6 +1755,9 @@
 
 	var mu sync.Mutex
 	add := func(pkg imports.ImportFix) {
+		if ignoreUnimportedCompletion(&pkg) {
+			return
+		}
 		mu.Lock()
 		defer mu.Unlock()
 		if _, ok := seen[pkg.IdentName]; ok {
diff --git a/gopls/internal/lsp/source/completion/snippet.go b/gopls/internal/lsp/source/completion/snippet.go
index f4ea767..2be485f 100644
--- a/gopls/internal/lsp/source/completion/snippet.go
+++ b/gopls/internal/lsp/source/completion/snippet.go
@@ -51,6 +51,11 @@
 
 // functionCallSnippet calculates the snippet for function calls.
 func (c *completer) functionCallSnippet(name string, tparams, params []string, snip *snippet.Builder) {
+	if !c.opts.completeFunctionCalls {
+		snip.WriteText(name)
+		return
+	}
+
 	// If there is no suffix then we need to reuse existing call parens
 	// "()" if present. If there is an identifier suffix then we always
 	// need to include "()" since we don't overwrite the suffix.
diff --git a/gopls/internal/lsp/source/definition.go b/gopls/internal/lsp/source/definition.go
index 90a4329..dd3feda 100644
--- a/gopls/internal/lsp/source/definition.go
+++ b/gopls/internal/lsp/source/definition.go
@@ -6,6 +6,7 @@
 
 import (
 	"context"
+	"errors"
 	"fmt"
 	"go/ast"
 	"go/token"
@@ -58,6 +59,18 @@
 		return []protocol.Location{loc}, nil
 	}
 
+	// Handle the case where the cursor is in a linkname directive.
+	locations, err := LinknameDefinition(ctx, snapshot, fh, position)
+	if !errors.Is(err, ErrNoLinkname) {
+		return locations, err
+	}
+
+	// Handle the case where the cursor is in an embed directive.
+	locations, err = EmbedDefinition(pgf.Mapper, position)
+	if !errors.Is(err, ErrNoEmbed) {
+		return locations, err
+	}
+
 	// The general case: the cursor is on an identifier.
 	_, obj, _ := referencedObject(pkg, pgf, pos)
 	if obj == nil {
diff --git a/gopls/internal/lsp/source/embeddirective.go b/gopls/internal/lsp/source/embeddirective.go
new file mode 100644
index 0000000..d4e85d7
--- /dev/null
+++ b/gopls/internal/lsp/source/embeddirective.go
@@ -0,0 +1,195 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+	"errors"
+	"fmt"
+	"io/fs"
+	"path/filepath"
+	"strconv"
+	"strings"
+	"unicode"
+	"unicode/utf8"
+
+	"golang.org/x/tools/gopls/internal/lsp/protocol"
+)
+
+// ErrNoEmbed is returned by EmbedDefinition when no embed
+// directive is found at a particular position.
+// As such it indicates that other definitions could be worth checking.
+var ErrNoEmbed = errors.New("no embed directive found")
+
+var errStopWalk = errors.New("stop walk")
+
+// EmbedDefinition finds a file matching the embed directive at pos in the mapped file.
+// If there is no embed directive at pos, returns ErrNoEmbed.
+// If multiple files match the embed pattern, one is picked at random.
+func EmbedDefinition(m *protocol.Mapper, pos protocol.Position) ([]protocol.Location, error) {
+	pattern, _ := parseEmbedDirective(m, pos)
+	if pattern == "" {
+		return nil, ErrNoEmbed
+	}
+
+	// Find the first matching file.
+	var match string
+	dir := filepath.Dir(m.URI.Filename())
+	err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error {
+		if e != nil {
+			return e
+		}
+		rel, err := filepath.Rel(dir, abs)
+		if err != nil {
+			return err
+		}
+		ok, err := filepath.Match(pattern, rel)
+		if err != nil {
+			return err
+		}
+		if ok && !d.IsDir() {
+			match = abs
+			return errStopWalk
+		}
+		return nil
+	})
+	if err != nil && !errors.Is(err, errStopWalk) {
+		return nil, err
+	}
+	if match == "" {
+		return nil, fmt.Errorf("%q does not match any files in %q", pattern, dir)
+	}
+
+	loc := protocol.Location{
+		URI: protocol.URIFromPath(match),
+		Range: protocol.Range{
+			Start: protocol.Position{Line: 0, Character: 0},
+		},
+	}
+	return []protocol.Location{loc}, nil
+}
+
+// parseEmbedDirective attempts to parse a go:embed directive argument at pos.
+// If successful it return the directive argument and its range, else zero values are returned.
+func parseEmbedDirective(m *protocol.Mapper, pos protocol.Position) (string, protocol.Range) {
+	lineStart, err := m.PositionOffset(protocol.Position{Line: pos.Line, Character: 0})
+	if err != nil {
+		return "", protocol.Range{}
+	}
+	lineEnd, err := m.PositionOffset(protocol.Position{Line: pos.Line + 1, Character: 0})
+	if err != nil {
+		return "", protocol.Range{}
+	}
+
+	text := string(m.Content[lineStart:lineEnd])
+	if !strings.HasPrefix(text, "//go:embed") {
+		return "", protocol.Range{}
+	}
+	text = text[len("//go:embed"):]
+	offset := lineStart + len("//go:embed")
+
+	// Find the first pattern in text that covers the offset of the pos we are looking for.
+	findOffset, err := m.PositionOffset(pos)
+	if err != nil {
+		return "", protocol.Range{}
+	}
+	patterns, err := parseGoEmbed(text, offset)
+	if err != nil {
+		return "", protocol.Range{}
+	}
+	for _, p := range patterns {
+		if p.startOffset <= findOffset && findOffset <= p.endOffset {
+			// Found our match.
+			rng, err := m.OffsetRange(p.startOffset, p.endOffset)
+			if err != nil {
+				return "", protocol.Range{}
+			}
+			return p.pattern, rng
+		}
+	}
+
+	return "", protocol.Range{}
+}
+
+type fileEmbed struct {
+	pattern     string
+	startOffset int
+	endOffset   int
+}
+
+// parseGoEmbed patterns that come after the directive.
+//
+// Copied and adapted from go/build/read.go.
+// Replaced token.Position with start/end offset (including quotes if present).
+func parseGoEmbed(args string, offset int) ([]fileEmbed, error) {
+	trimBytes := func(n int) {
+		offset += n
+		args = args[n:]
+	}
+	trimSpace := func() {
+		trim := strings.TrimLeftFunc(args, unicode.IsSpace)
+		trimBytes(len(args) - len(trim))
+	}
+
+	var list []fileEmbed
+	for trimSpace(); args != ""; trimSpace() {
+		var path string
+		pathOffset := offset
+	Switch:
+		switch args[0] {
+		default:
+			i := len(args)
+			for j, c := range args {
+				if unicode.IsSpace(c) {
+					i = j
+					break
+				}
+			}
+			path = args[:i]
+			trimBytes(i)
+
+		case '`':
+			var ok bool
+			path, _, ok = strings.Cut(args[1:], "`")
+			if !ok {
+				return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+			}
+			trimBytes(1 + len(path) + 1)
+
+		case '"':
+			i := 1
+			for ; i < len(args); i++ {
+				if args[i] == '\\' {
+					i++
+					continue
+				}
+				if args[i] == '"' {
+					q, err := strconv.Unquote(args[:i+1])
+					if err != nil {
+						return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1])
+					}
+					path = q
+					trimBytes(i + 1)
+					break Switch
+				}
+			}
+			if i >= len(args) {
+				return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+			}
+		}
+
+		if args != "" {
+			r, _ := utf8.DecodeRuneInString(args)
+			if !unicode.IsSpace(r) {
+				return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+			}
+		}
+		list = append(list, fileEmbed{
+			pattern:     path,
+			startOffset: pathOffset,
+			endOffset:   offset,
+		})
+	}
+	return list, nil
+}
diff --git a/gopls/internal/lsp/source/hover.go b/gopls/internal/lsp/source/hover.go
index a683075..9531783 100644
--- a/gopls/internal/lsp/source/hover.go
+++ b/gopls/internal/lsp/source/hover.go
@@ -14,6 +14,8 @@
 	"go/format"
 	"go/token"
 	"go/types"
+	"io/fs"
+	"path/filepath"
 	"strconv"
 	"strings"
 	"time"
@@ -121,6 +123,12 @@
 		}
 	}
 
+	// Handle hovering over embed directive argument.
+	pattern, embedRng := parseEmbedDirective(pgf.Mapper, pp)
+	if pattern != "" {
+		return hoverEmbed(fh, embedRng, pattern)
+	}
+
 	// Handle linkname directive by overriding what to look for.
 	var linkedRange *protocol.Range // range referenced by linkname directive, or nil
 	if pkgPath, name, offset := parseLinkname(ctx, snapshot, fh, pp); pkgPath != "" && name != "" {
@@ -625,6 +633,48 @@
 	}, nil
 }
 
+// hoverEmbed computes hover information for a filepath.Match pattern.
+// Assumes that the pattern is relative to the location of fh.
+func hoverEmbed(fh FileHandle, rng protocol.Range, pattern string) (protocol.Range, *HoverJSON, error) {
+	s := &strings.Builder{}
+
+	dir := filepath.Dir(fh.URI().Filename())
+	var matches []string
+	err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error {
+		if e != nil {
+			return e
+		}
+		rel, err := filepath.Rel(dir, abs)
+		if err != nil {
+			return err
+		}
+		ok, err := filepath.Match(pattern, rel)
+		if err != nil {
+			return err
+		}
+		if ok && !d.IsDir() {
+			matches = append(matches, rel)
+		}
+		return nil
+	})
+	if err != nil {
+		return protocol.Range{}, nil, err
+	}
+
+	for _, m := range matches {
+		// TODO: Renders each file as separate markdown paragraphs.
+		// If forcing (a single) newline is possible it might be more clear.
+		fmt.Fprintf(s, "%s\n\n", m)
+	}
+
+	json := &HoverJSON{
+		Signature:         fmt.Sprintf("Embedding %q", pattern),
+		Synopsis:          s.String(),
+		FullDocumentation: s.String(),
+	}
+	return rng, json, nil
+}
+
 // inferredSignatureString is a wrapper around the types.ObjectString function
 // that adds more information to inferred signatures. It will return an empty string
 // if the passed types.Object is not a signature.
diff --git a/gopls/internal/lsp/source/options.go b/gopls/internal/lsp/source/options.go
index 0d610a4..ec544fc 100644
--- a/gopls/internal/lsp/source/options.go
+++ b/gopls/internal/lsp/source/options.go
@@ -155,6 +155,7 @@
 						Matcher:                        Fuzzy,
 						CompletionBudget:               100 * time.Millisecond,
 						ExperimentalPostfixCompletions: true,
+						CompleteFunctionCalls:          true,
 					},
 					Codelenses: map[string]bool{
 						string(command.Generate):          true,
@@ -388,6 +389,13 @@
 	// ExperimentalPostfixCompletions enables artificial method snippets
 	// such as "someSlice.sort!".
 	ExperimentalPostfixCompletions bool `status:"experimental"`
+
+	// CompleteFunctionCalls enables function call completion.
+	//
+	// When completing a statement, or when a function return type matches the
+	// expected of the expression being completed, completion may suggest call
+	// expressions (i.e. may include parentheses).
+	CompleteFunctionCalls bool
 }
 
 type DocumentationOptions struct {
@@ -1186,6 +1194,8 @@
 					" rebuild gopls with a more recent version of Go", result.Name, runtime.Version())
 			}
 		}
+	case "completeFunctionCalls":
+		result.setBool(&o.CompleteFunctionCalls)
 
 	case "semanticTokens":
 		result.setBool(&o.SemanticTokens)
diff --git a/gopls/internal/regtest/completion/completion_test.go b/gopls/internal/regtest/completion/completion_test.go
index 3949578..81300eb 100644
--- a/gopls/internal/regtest/completion/completion_test.go
+++ b/gopls/internal/regtest/completion/completion_test.go
@@ -952,3 +952,54 @@
 		}
 	})
 }
+
+// Fix for golang/go#60062: unimported completion included "golang.org/toolchain" results.
+func TestToolchainCompletions(t *testing.T) {
+	const files = `
+-- go.mod --
+module foo.test/foo
+
+go 1.21
+
+-- foo.go --
+package foo
+
+func _() {
+	os.Open
+}
+
+func _() {
+	strings
+}
+`
+
+	const proxy = `
+-- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/go.mod --
+module golang.org/toolchain
+-- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/src/os/os.go --
+package os
+
+func Open() {}
+-- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/src/strings/strings.go --
+package strings
+
+func Join() {}
+`
+
+	WithOptions(
+		ProxyFiles(proxy),
+	).Run(t, files, func(t *testing.T, env *Env) {
+		env.RunGoCommand("mod", "download", "golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64")
+		env.OpenFile("foo.go")
+
+		for _, pattern := range []string{"os.Open()", "string()"} {
+			loc := env.RegexpSearch("foo.go", pattern)
+			res := env.Completion(loc)
+			for _, item := range res.Items {
+				if strings.Contains(item.Detail, "golang.org/toolchain") {
+					t.Errorf("Completion(...) returned toolchain item %#v", item)
+				}
+			}
+		}
+	})
+}
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/inline.txt b/gopls/internal/regtest/marker/testdata/codeaction/inline.txt
index 134065f..15d3cab 100644
--- a/gopls/internal/regtest/marker/testdata/codeaction/inline.txt
+++ b/gopls/internal/regtest/marker/testdata/codeaction/inline.txt
@@ -17,7 +17,7 @@
 package a
 
 func _() {
-	println((1 + 2)) //@codeaction("refactor.inline", "add", ")", inline)
+	println(1 + 2) //@codeaction("refactor.inline", "add", ")", inline)
 }
 
 func add(x, y int) int { return x + y }
diff --git a/gopls/internal/regtest/marker/testdata/completion/issue62560.txt b/gopls/internal/regtest/marker/testdata/completion/issue62560.txt
new file mode 100644
index 0000000..89763fe
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/completion/issue62560.txt
@@ -0,0 +1,19 @@
+This test verifies that completion of package members in unimported packages
+reflects their fuzzy score, even when those members are present in the
+transitive import graph of the main module. (For technical reasons, this was
+the nature of the regression in golang/go#62560.)
+
+-- go.mod --
+module mod.test
+
+-- foo/foo.go --
+package foo
+
+func _() {
+	json.U //@rankl(re"U()", "Unmarshal", "InvalidUTF8Error"), diag("json", re"(undefined|undeclared)")
+}
+
+-- bar/bar.go --
+package bar
+
+import _ "encoding/json"
diff --git a/gopls/internal/regtest/marker/testdata/completion/issue62676.txt b/gopls/internal/regtest/marker/testdata/completion/issue62676.txt
new file mode 100644
index 0000000..af4c3b6
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/completion/issue62676.txt
@@ -0,0 +1,63 @@
+This test verifies that unimported completion respects the usePlaceholders setting.
+
+-- flags --
+-ignore_extra_diags
+
+-- settings.json --
+{
+	"usePlaceholders": false
+}
+
+-- go.mod --
+module mod.test
+
+go 1.21
+
+-- foo/foo.go --
+package foo
+
+func _() {
+	// This uses goimports-based completion; TODO: this should insert snippets.
+	os.Open //@acceptcompletion(re"Open()", "Open", open)
+}
+
+func _() {
+	// This uses metadata-based completion.
+	errors.New //@acceptcompletion(re"New()", "New", new)
+}
+
+-- bar/bar.go --
+package bar
+
+import _ "errors" // important: doesn't transitively import os.
+
+-- @new/foo/foo.go --
+package foo
+
+import "errors"
+
+func _() {
+	// This uses goimports-based completion; TODO: this should insert snippets.
+	os.Open //@acceptcompletion(re"Open()", "Open", open)
+}
+
+func _() {
+	// This uses metadata-based completion.
+	errors.New(${1:}) //@acceptcompletion(re"New()", "New", new)
+}
+
+-- @open/foo/foo.go --
+package foo
+
+import "os"
+
+func _() {
+	// This uses goimports-based completion; TODO: this should insert snippets.
+	os.Open //@acceptcompletion(re"Open()", "Open", open)
+}
+
+func _() {
+	// This uses metadata-based completion.
+	errors.New //@acceptcompletion(re"New()", "New", new)
+}
+
diff --git a/gopls/internal/regtest/misc/definition_test.go b/gopls/internal/regtest/misc/definition_test.go
index f11b207..d16539f 100644
--- a/gopls/internal/regtest/misc/definition_test.go
+++ b/gopls/internal/regtest/misc/definition_test.go
@@ -529,3 +529,43 @@
 		}
 	})
 }
+
+const embedDefinition = `
+-- go.mod --
+module mod.com
+
+-- main.go --
+package main
+
+import (
+	"embed"
+)
+
+//go:embed *.txt
+var foo embed.FS
+
+func main() {}
+
+-- skip.sql --
+SKIP
+
+-- foo.txt --
+FOO
+
+-- skip.bat --
+SKIP
+`
+
+func TestGoToEmbedDefinition(t *testing.T) {
+	Run(t, embedDefinition, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+
+		start := env.RegexpSearch("main.go", `\*.txt`)
+		loc := env.GoToDefinition(start)
+
+		name := env.Sandbox.Workdir.URIToPath(loc.URI)
+		if want := "foo.txt"; name != want {
+			t.Errorf("GoToDefinition: got file %q, want %q", name, want)
+		}
+	})
+}
diff --git a/gopls/internal/regtest/misc/hover_test.go b/gopls/internal/regtest/misc/hover_test.go
index 9b2d86e..7b84f8a 100644
--- a/gopls/internal/regtest/misc/hover_test.go
+++ b/gopls/internal/regtest/misc/hover_test.go
@@ -435,3 +435,59 @@
 		_, _, _ = env.Editor.Hover(env.Ctx, env.RegexpSearch("go.work", "modb"))
 	})
 }
+
+const embedHover = `
+-- go.mod --
+module mod.com
+go 1.19
+-- main.go --
+package main
+
+import "embed"
+
+//go:embed *.txt
+var foo embed.FS
+
+func main() {
+}
+-- foo.txt --
+FOO
+-- bar.txt --
+BAR
+-- baz.txt --
+BAZ
+-- other.sql --
+SKIPPED
+-- dir.txt/skip.txt --
+SKIPPED
+`
+
+func TestHoverEmbedDirective(t *testing.T) {
+	testenv.NeedsGo1Point(t, 19)
+	Run(t, embedHover, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+		from := env.RegexpSearch("main.go", `\*.txt`)
+
+		got, _ := env.Hover(from)
+		if got == nil {
+			t.Fatalf("hover over //go:embed arg not found")
+		}
+		content := got.Value
+
+		wants := []string{"foo.txt", "bar.txt", "baz.txt"}
+		for _, want := range wants {
+			if !strings.Contains(content, want) {
+				t.Errorf("hover: %q does not contain: %q", content, want)
+			}
+		}
+
+		// A directory should never be matched, even if it happens to have a matching name.
+		// Content in subdirectories should not match on only one asterisk.
+		skips := []string{"other.sql", "dir.txt", "skip.txt"}
+		for _, skip := range skips {
+			if strings.Contains(content, skip) {
+				t.Errorf("hover: %q should not contain: %q", content, skip)
+			}
+		}
+	})
+}
diff --git a/internal/fuzzy/self_test.go b/internal/fuzzy/self_test.go
new file mode 100644
index 0000000..fae0aea
--- /dev/null
+++ b/internal/fuzzy/self_test.go
@@ -0,0 +1,39 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package fuzzy_test
+
+import (
+	"testing"
+
+	. "golang.org/x/tools/internal/fuzzy"
+)
+
+func BenchmarkSelf_Matcher(b *testing.B) {
+	idents := collectIdentifiers(b)
+	patterns := generatePatterns()
+
+	for i := 0; i < b.N; i++ {
+		for _, pattern := range patterns {
+			sm := NewMatcher(pattern)
+			for _, ident := range idents {
+				_ = sm.Score(ident)
+			}
+		}
+	}
+}
+
+func BenchmarkSelf_SymbolMatcher(b *testing.B) {
+	idents := collectIdentifiers(b)
+	patterns := generatePatterns()
+
+	for i := 0; i < b.N; i++ {
+		for _, pattern := range patterns {
+			sm := NewSymbolMatcher(pattern)
+			for _, ident := range idents {
+				_, _ = sm.Match([]string{ident})
+			}
+		}
+	}
+}
diff --git a/internal/fuzzy/symbol.go b/internal/fuzzy/symbol.go
index bf93041..5fe2ce3 100644
--- a/internal/fuzzy/symbol.go
+++ b/internal/fuzzy/symbol.go
@@ -5,6 +5,9 @@
 package fuzzy
 
 import (
+	"bytes"
+	"fmt"
+	"log"
 	"unicode"
 )
 
@@ -36,10 +39,12 @@
 	segments    [256]uint8  // how many segments from the right is each rune
 }
 
+// Rune roles.
 const (
-	segmentStart uint32 = 1 << iota
-	wordStart
-	separator
+	segmentStart uint32 = 1 << iota // input rune starts a segment (i.e. follows '/' or '.')
+	wordStart                       // input rune starts a word, per camel-case naming rules
+	separator                       // input rune is a separator ('/' or '.')
+	upper                           // input rune is an upper case letter
 )
 
 // NewSymbolMatcher creates a SymbolMatcher that may be used to match the given
@@ -61,17 +66,17 @@
 	return m
 }
 
-// Match looks for the right-most match of the search pattern within the symbol
-// represented by concatenating the given chunks, returning its offset and
-// score.
+// Match searches for the right-most match of the search pattern within the
+// symbol represented by concatenating the given chunks.
 //
-// If a match is found, the first return value will hold the absolute byte
-// offset within all chunks for the start of the symbol. In other words, the
-// index of the match within strings.Join(chunks, ""). If no match is found,
-// the first return value will be -1.
+// If a match is found, the first result holds the absolute byte offset within
+// all chunks for the start of the symbol. In other words, the index of the
+// match within strings.Join(chunks, "").
 //
 // The second return value will be the score of the match, which is always
 // between 0 and 1, inclusive. A score of 0 indicates no match.
+//
+// If no match is found, Match returns (-1, 0).
 func (m *SymbolMatcher) Match(chunks []string) (int, float64) {
 	// Explicit behavior for an empty pattern.
 	//
@@ -81,11 +86,25 @@
 		return -1, 0
 	}
 
-	// First phase: populate the input buffer with lower-cased runes.
+	// Matching implements a heavily optimized linear scoring algorithm on the
+	// input. This is not guaranteed to produce the highest score, but works well
+	// enough, particularly due to the right-to-left significance of qualified
+	// symbols.
+	//
+	// Matching proceeds in three passes through the input:
+	//  - The first pass populates the input buffer and collects rune roles.
+	//  - The second pass proceeds right-to-left to find the right-most match.
+	//  - The third pass proceeds left-to-right from the start of the right-most
+	//    match, to find the most *compact* match, and computes the score of this
+	//    match.
+	//
+	// See below for more details of each pass, as well as the scoring algorithm.
+
+	// First pass: populate the input buffer out of the provided chunks
+	// (lower-casing in the process), and collect rune roles.
 	//
 	// We could also check for a forward match here, but since we'd have to write
 	// the entire input anyway this has negligible impact on performance.
-
 	var (
 		inputLen  = uint8(0)
 		modifiers = wordStart | segmentStart
@@ -107,7 +126,16 @@
 				l = unicode.ToLower(r)
 			}
 			if l != r {
-				modifiers |= wordStart
+				modifiers |= upper
+
+				// If the current rune is capitalized *and the preceding rune was not*,
+				// mark this as a word start. This avoids spuriously high ranking of
+				// non-camelcase naming schemas, such as the
+				// yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE example of
+				// golang/go#60201.
+				if inputLen == 0 || m.roles[inputLen-1]&upper == 0 {
+					modifiers |= wordStart
+				}
 			}
 			m.inputBuffer[inputLen] = l
 			m.roles[inputLen] = modifiers
@@ -125,14 +153,13 @@
 		}
 	}
 
-	// Second phase: find the right-most match, and count segments from the
+	// Second pass: find the right-most match, and count segments from the
 	// right.
-
 	var (
 		pi    = uint8(m.patternLen - 1) // pattern index
 		p     = m.pattern[pi]           // pattern rune
 		start = -1                      // start offset of match
-		rseg  = uint8(0)
+		rseg  = uint8(0)                // effective "depth" from the right of the current rune in consideration
 	)
 	const maxSeg = 3 // maximum number of segments from the right to count, for scoring purposes.
 
@@ -144,6 +171,8 @@
 		m.segments[ii] = rseg
 		if p == r {
 			if pi == 0 {
+				// TODO(rfindley): BUG: the docstring for Match says that it returns an
+				// absolute byte offset, but clearly it is returning a rune offset here.
 				start = int(ii)
 				break
 			}
@@ -161,85 +190,120 @@
 		return -1, 0
 	}
 
-	// Third phase: find the shortest match, and compute the score.
+	// Third pass: find the shortest match and compute the score.
 
-	// Score is the average score for each character.
+	// Score is the average score for each rune.
 	//
-	// A character score is the multiple of:
-	//   1. 1.0 if the character starts a segment or is preceded by a matching
-	//      character, 0.9 if the character starts a mid-segment word, else 0.6.
+	// A rune score is the multiple of:
+	//   1. The base score, which is 1.0 if the rune starts a segment, 0.9 if the
+	//      rune starts a mid-segment word, else 0.6.
 	//
-	//      Note that characters preceded by a matching character get the max
-	//      score of 1.0 so that sequential or exact matches are preferred, even
-	//      if they don't start/end at a segment or word boundary. For example, a
-	//      match for "func" in intfuncs should have a higher score than in
-	//      ifunmatched.
+	//      Runes preceded by a matching rune are treated the same as the start
+	//      of a mid-segment word (with a 0.9 score), so that sequential or exact
+	//      matches are preferred. We call this a sequential bonus.
 	//
-	//      For the final character match, the multiplier from (1) is reduced to
-	//      0.9 if the next character in the input is a mid-segment word, or 0.6
-	//      if the next character in the input is not a word or segment start.
-	//      This ensures that we favor whole-word or whole-segment matches over
-	//      prefix matches.
+	//      For the final rune match, this sequential bonus is reduced to 0.8 if
+	//      the next rune in the input is a mid-segment word, or 0.7 if the next
+	//      rune in the input is not a word or segment start. This ensures that
+	//      we favor whole-word or whole-segment matches over prefix matches.
 	//
-	//   2. 1.0 if the character is part of the last segment, otherwise
+	//   2. 1.0 if the rune is part of the last segment, otherwise
 	//      1.0-0.1*<segments from the right>, with a max segment count of 3.
 	//      Notably 1.0-0.1*3 = 0.7 > 0.6, so that foo/_/_/_/_ (a match very
-	//      early in a qualified symbol name) still scores higher than _f_o_o_
-	//      (a completely split match).
+	//      early in a qualified symbol name) still scores higher than _f_o_o_ (a
+	//      completely split match).
 	//
 	// This is a naive algorithm, but it is fast. There's lots of prior art here
 	// that could be leveraged. For example, we could explicitly consider
-	// character distance, and exact matches of words or segments.
+	// rune distance, and exact matches of words or segments.
 	//
 	// Also note that this might not actually find the highest scoring match, as
 	// doing so could require a non-linear algorithm, depending on how the score
 	// is calculated.
 
+	// debugging support
+	const debug = false // enable to log debugging information
+	var (
+		runeScores []float64
+		runeIdxs   []int
+	)
+
 	pi = 0
 	p = m.pattern[pi]
 
 	const (
-		segStreak  = 1.0 // start of segment or sequential match
-		wordStreak = 0.9 // start of word match
-		noStreak   = 0.6
-		perSegment = 0.1 // we count at most 3 segments above
+		segStartScore = 1.0 // base score of runes starting a segment
+		wordScore     = 0.9 // base score of runes starting or continuing a word
+		noStreak      = 0.6
+		perSegment    = 0.1 // we count at most 3 segments above
 	)
 
-	streakBonus := noStreak
 	totScore := 0.0
+	lastMatch := uint8(255)
 	for ii := uint8(start); ii < inputLen; ii++ {
 		r := m.inputBuffer[ii]
 		if r == p {
 			pi++
+			finalRune := pi >= m.patternLen
 			p = m.pattern[pi]
-			// Note: this could be optimized with some bit operations.
-			switch {
-			case m.roles[ii]&segmentStart != 0 && segStreak > streakBonus:
-				streakBonus = segStreak
-			case m.roles[ii]&wordStart != 0 && wordStreak > streakBonus:
-				streakBonus = wordStreak
-			}
-			finalChar := pi >= m.patternLen
-			// finalCost := 1.0
-			if finalChar && streakBonus > noStreak {
-				switch {
-				case ii == inputLen-1 || m.roles[ii+1]&segmentStart != 0:
-					// Full segment: no reduction
-				case m.roles[ii+1]&wordStart != 0:
-					streakBonus = wordStreak
-				default:
-					streakBonus = noStreak
+
+			baseScore := noStreak
+
+			// Calculate the sequence bonus based on preceding matches.
+			//
+			// We do this first as it is overridden by role scoring below.
+			if lastMatch == ii-1 {
+				baseScore = wordScore
+				// Reduce the sequence bonus for the final rune of the pattern based on
+				// whether it borders a new segment or word.
+				if finalRune {
+					switch {
+					case ii == inputLen-1 || m.roles[ii+1]&separator != 0:
+						// Full segment: no reduction
+					case m.roles[ii+1]&wordStart != 0:
+						baseScore = wordScore - 0.1
+					default:
+						baseScore = wordScore - 0.2
+					}
 				}
 			}
-			totScore += streakBonus * (1.0 - float64(m.segments[ii])*perSegment)
-			if finalChar {
+			lastMatch = ii
+
+			// Calculate the rune's role score. If the rune starts a segment or word,
+			// this overrides the sequence score, as the rune starts a new sequence.
+			switch {
+			case m.roles[ii]&segmentStart != 0:
+				baseScore = segStartScore
+			case m.roles[ii]&wordStart != 0:
+				baseScore = wordScore
+			}
+
+			// Apply the segment-depth penalty (segments from the right).
+			runeScore := baseScore * (1.0 - float64(m.segments[ii])*perSegment)
+			if debug {
+				runeScores = append(runeScores, runeScore)
+				runeIdxs = append(runeIdxs, int(ii))
+			}
+			totScore += runeScore
+			if finalRune {
 				break
 			}
-			streakBonus = segStreak // see above: sequential characters get the max score
-		} else {
-			streakBonus = noStreak
 		}
 	}
 
+	if debug {
+		// Format rune roles and scores in line:
+		// fo[o:.52].[b:1]a[r:.6]
+		var summary bytes.Buffer
+		last := 0
+		for i, idx := range runeIdxs {
+			summary.WriteString(string(m.inputBuffer[last:idx])) // encode runes
+			fmt.Fprintf(&summary, "[%s:%.2g]", string(m.inputBuffer[idx]), runeScores[i])
+			last = idx + 1
+		}
+		summary.WriteString(string(m.inputBuffer[last:inputLen])) // encode runes
+		log.Println(summary.String())
+	}
+
 	return start, totScore / float64(m.patternLen)
 }
diff --git a/internal/fuzzy/symbol_test.go b/internal/fuzzy/symbol_test.go
index 2a9d9b6..43e629d 100644
--- a/internal/fuzzy/symbol_test.go
+++ b/internal/fuzzy/symbol_test.go
@@ -5,8 +5,12 @@
 package fuzzy_test
 
 import (
+	"go/ast"
+	"go/token"
+	"sort"
 	"testing"
 
+	"golang.org/x/tools/go/packages"
 	. "golang.org/x/tools/internal/fuzzy"
 )
 
@@ -34,30 +38,173 @@
 }
 
 func TestSymbolRanking(t *testing.T) {
-	matcher := NewSymbolMatcher("test")
 
-	// symbols to match, in ascending order of ranking.
-	symbols := []string{
-		"this.is.better.than.most",
-		"test.foo.bar",
-		"thebest",
-		"test.foo",
-		"test.foo",
-		"atest",
-		"testage",
-		"tTest",
-		"foo.test",
-		"test",
+	// query -> symbols to match, in ascending order of score
+	queryRanks := map[string][]string{
+		"test": {
+			"this.is.better.than.most",
+			"test.foo.bar",
+			"thebest",
+			"atest",
+			"test.foo",
+			"testage",
+			"tTest",
+			"foo.test",
+		},
+		"parseside": { // golang/go#60201
+			"yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE",
+			"parseContext.parse_sidebyside",
+		},
+		"cvb": {
+			"filecache_test.testIPCValueB",
+			"cover.Boundary",
+		},
+		"dho": {
+			"gocommand.DebugHangingGoCommands",
+			"protocol.DocumentHighlightOptions",
+		},
+		"flg": {
+			"completion.FALLTHROUGH",
+			"main.flagGoCmd",
+		},
+		"fvi": {
+			"godoc.fileIndexVersion",
+			"macho.FlagSubsectionsViaSymbols",
+		},
 	}
-	prev := 0.0
-	for _, sym := range symbols {
-		_, score := matcher.Match([]string{sym})
-		t.Logf("Match(%q) = %v", sym, score)
-		if score < prev {
-			t.Errorf("Match(%q) = _, %v, want > %v", sym, score, prev)
+
+	for query, symbols := range queryRanks {
+		t.Run(query, func(t *testing.T) {
+			matcher := NewSymbolMatcher(query)
+			prev := 0.0
+			for _, sym := range symbols {
+				_, score := matcher.Match([]string{sym})
+				t.Logf("Match(%q) = %v", sym, score)
+				if score <= prev {
+					t.Errorf("Match(%q) = _, %v, want > %v", sym, score, prev)
+				}
+				prev = score
+			}
+		})
+	}
+}
+
+func TestMatcherSimilarities(t *testing.T) {
+	// This test compares the fuzzy matcher with the symbol matcher on a corpus
+	// of qualified identifiers extracted from x/tools.
+	//
+	// These two matchers are not expected to agree, but inspecting differences
+	// can be useful for finding interesting ranking edge cases.
+	t.Skip("unskip this test to compare matchers")
+
+	idents := collectIdentifiers(t)
+	t.Logf("collected %d unique identifiers", len(idents))
+
+	// TODO: use go1.21 slices.MaxFunc.
+	topMatch := func(score func(string) float64) string {
+		top := ""
+		topScore := 0.0
+		for _, cand := range idents {
+			if s := score(cand); s > topScore {
+				top = cand
+				topScore = s
+			}
 		}
-		prev = score
+		return top
 	}
+
+	agreed := 0
+	total := 0
+	bad := 0
+	patterns := generatePatterns()
+	for _, pattern := range patterns {
+		total++
+
+		fm := NewMatcher(pattern)
+		topFuzzy := topMatch(func(input string) float64 {
+			return float64(fm.Score(input))
+		})
+		sm := NewSymbolMatcher(pattern)
+		topSymbol := topMatch(func(input string) float64 {
+			_, score := sm.Match([]string{input})
+			return score
+		})
+		switch {
+		case topFuzzy == "" && topSymbol != "":
+			if false {
+				// The fuzzy matcher has a bug where it misses some matches; for this
+				// test we only care about the symbol matcher.
+				t.Logf("%q matched %q but no fuzzy match", pattern, topSymbol)
+			}
+			total--
+			bad++
+		case topFuzzy != "" && topSymbol == "":
+			t.Fatalf("%q matched %q but no symbol match", pattern, topFuzzy)
+		case topFuzzy == topSymbol:
+			agreed++
+		default:
+			// Enable this log to see mismatches.
+			if false {
+				t.Logf("mismatch for %q: fuzzy: %q, symbol: %q", pattern, topFuzzy, topSymbol)
+			}
+		}
+	}
+	t.Logf("fuzzy matchers agreed on %d out of %d queries (%d bad)", agreed, total, bad)
+}
+
+func collectIdentifiers(tb testing.TB) []string {
+	cfg := &packages.Config{
+		Mode:  packages.NeedName | packages.NeedSyntax | packages.NeedFiles,
+		Tests: true,
+	}
+	pkgs, err := packages.Load(cfg, "golang.org/x/tools/...")
+	if err != nil {
+		tb.Fatal(err)
+	}
+	uniqueIdents := make(map[string]bool)
+	decls := 0
+	for _, pkg := range pkgs {
+		for _, f := range pkg.Syntax {
+			for _, decl := range f.Decls {
+				decls++
+				switch decl := decl.(type) {
+				case *ast.GenDecl:
+					for _, spec := range decl.Specs {
+						switch decl.Tok {
+						case token.IMPORT:
+						case token.TYPE:
+							name := spec.(*ast.TypeSpec).Name.Name
+							qualified := pkg.Name + "." + name
+							uniqueIdents[qualified] = true
+						case token.CONST, token.VAR:
+							for _, n := range spec.(*ast.ValueSpec).Names {
+								qualified := pkg.Name + "." + n.Name
+								uniqueIdents[qualified] = true
+							}
+						}
+					}
+				}
+			}
+		}
+	}
+	var idents []string
+	for k := range uniqueIdents {
+		idents = append(idents, k)
+	}
+	sort.Strings(idents)
+	return idents
+}
+
+func generatePatterns() []string {
+	var patterns []string
+	for x := 'a'; x <= 'z'; x++ {
+		for y := 'a'; y <= 'z'; y++ {
+			for z := 'a'; z <= 'z'; z++ {
+				patterns = append(patterns, string(x)+string(y)+string(z))
+			}
+		}
+	}
+	return patterns
 }
 
 // Test that we strongly prefer exact matches.
@@ -89,9 +236,8 @@
 
 func TestChunkedMatch(t *testing.T) {
 	matcher := NewSymbolMatcher("test")
-
+	_, want := matcher.Match([]string{"test"})
 	chunked := [][]string{
-		{"test"},
 		{"", "test"},
 		{"test", ""},
 		{"te", "st"},
@@ -99,7 +245,7 @@
 
 	for _, chunks := range chunked {
 		offset, score := matcher.Match(chunks)
-		if offset != 0 || score != 1.0 {
+		if offset != 0 || score != want {
 			t.Errorf("Match(%v) = %v, %v, want 0, 1.0", chunks, offset, score)
 		}
 	}
diff --git a/internal/refactor/inline/callee.go b/internal/refactor/inline/callee.go
index 64a6903..dc74eab 100644
--- a/internal/refactor/inline/callee.go
+++ b/internal/refactor/inline/callee.go
@@ -14,6 +14,7 @@
 	"go/parser"
 	"go/token"
 	"go/types"
+	"strings"
 
 	"golang.org/x/tools/go/ast/astutil"
 	"golang.org/x/tools/go/types/typeutil"
@@ -36,15 +37,17 @@
 	Unexported       []string     // names of free objects that are unexported
 	FreeRefs         []freeRef    // locations of references to free objects
 	FreeObjs         []object     // descriptions of free objects
-	BodyIsReturnExpr bool         // function body is "return expr(s)" with trivial conversion
-	ValidForCallStmt bool         // => bodyIsReturnExpr and sole expr is f() or <-ch
+	ValidForCallStmt bool         // function body is "return expr" where expr is f() or <-ch
 	NumResults       int          // number of results (according to type, not ast.FieldList)
 	Params           []*paramInfo // information about parameters (incl. receiver)
 	Results          []*paramInfo // information about result variables
+	Effects          []int        // order in which parameters are evaluated (see calleefx)
 	HasDefer         bool         // uses defer
+	HasBareReturn    bool         // uses bare return in non-void function
 	TotalReturns     int          // number of return statements
 	TrivialReturns   int          // number of return statements with trivial result conversions
 	Labels           []string     // names of all control labels
+	Falcon           falconResult // falcon constraint system
 }
 
 // A freeRef records a reference to a free object.  Gob-serializable.
@@ -56,10 +59,11 @@
 
 // An object abstracts a free types.Object referenced by the callee. Gob-serializable.
 type object struct {
-	Name     string // Object.Name()
-	Kind     string // one of {var,func,const,type,pkgname,nil,builtin}
-	PkgPath  string // pkgpath of object (or of imported package if kind="pkgname")
-	ValidPos bool   // Object.Pos().IsValid()
+	Name     string          // Object.Name()
+	Kind     string          // one of {var,func,const,type,pkgname,nil,builtin}
+	PkgPath  string          // pkgpath of object (or of imported package if kind="pkgname")
+	ValidPos bool            // Object.Pos().IsValid()
+	Shadow   map[string]bool // names shadowed at one of the object's refs
 }
 
 // AnalyzeCallee analyzes a function that is a candidate for inlining
@@ -117,7 +121,14 @@
 	)
 	var f func(n ast.Node) bool
 	visit := func(n ast.Node) { ast.Inspect(n, f) }
+	var stack []ast.Node
+	stack = append(stack, decl.Type) // for scope of function itself
 	f = func(n ast.Node) bool {
+		if n != nil {
+			stack = append(stack, n) // push
+		} else {
+			stack = stack[:len(stack)-1] // pop
+		}
 		switch n := n.(type) {
 		case *ast.SelectorExpr:
 			// Check selections of free fields/methods.
@@ -196,6 +207,8 @@
 						freeObjIndex[obj] = objidx
 					}
 
+					freeObjs[objidx].Shadow = addShadows(freeObjs[objidx].Shadow, info, obj.Name(), stack)
+
 					freeRefs = append(freeRefs, freeRef{
 						Offset: int(n.Pos() - decl.Pos()),
 						Object: objidx,
@@ -207,67 +220,34 @@
 	}
 	visit(decl)
 
-	// Analyze callee body for "return results" form, where
-	// results is one or more expressions or an n-ary call,
-	// and the implied conversions are trivial.
+	// Analyze callee body for "return expr" form,
+	// where expr is f() or <-ch. These forms are
+	// safe to inline as a standalone statement.
 	validForCallStmt := false
-	bodyIsReturnExpr := func() bool {
-		if decl.Type.Results != nil &&
-			len(decl.Type.Results.List) > 0 &&
-			len(decl.Body.List) == 1 {
-			if ret, ok := decl.Body.List[0].(*ast.ReturnStmt); ok && len(ret.Results) > 0 {
-				// Don't reduce calls to functions whose
-				// return statement has non trivial conversions.
-				argType := func(i int) types.Type {
-					return info.TypeOf(ret.Results[i])
-				}
-				if len(ret.Results) == 1 && sig.Results().Len() > 1 {
-					// Spread return: return f() where f.Results > 1.
-					tuple := info.TypeOf(ret.Results[0]).(*types.Tuple)
-					argType = func(i int) types.Type {
-						return tuple.At(i).Type()
-					}
-				}
-				for i := 0; i < sig.Results().Len(); i++ {
-					if !trivialConversion(argType(i), sig.Results().At(i)) {
-						return false
-					}
-				}
-
-				return true
-			}
-		}
-		return false
-	}()
-	if bodyIsReturnExpr {
-		ret := decl.Body.List[0].(*ast.ReturnStmt)
-
-		// Ascertain whether the results expression(s)
-		// would be safe to inline as a standalone statement.
-		// (This is true only for a single call or receive expression.)
+	if len(decl.Body.List) != 1 {
+		// not just a return statement
+	} else if ret, ok := decl.Body.List[0].(*ast.ReturnStmt); ok && len(ret.Results) == 1 {
 		validForCallStmt = func() bool {
-			if len(ret.Results) == 1 {
-				switch expr := astutil.Unparen(ret.Results[0]).(type) {
-				case *ast.CallExpr: // f(x)
-					callee := typeutil.Callee(info, expr)
-					if callee == nil {
-						return false // conversion T(x)
-					}
-
-					// The only non-void built-in functions that may be
-					// called as a statement are copy and recover
-					// (though arguably a call to recover should never
-					// be inlined as that changes its behavior).
-					if builtin, ok := callee.(*types.Builtin); ok {
-						return builtin.Name() == "copy" ||
-							builtin.Name() == "recover"
-					}
-
-					return true // ordinary call f()
-
-				case *ast.UnaryExpr: // <-x
-					return expr.Op == token.ARROW // channel receive <-ch
+			switch expr := astutil.Unparen(ret.Results[0]).(type) {
+			case *ast.CallExpr: // f(x)
+				callee := typeutil.Callee(info, expr)
+				if callee == nil {
+					return false // conversion T(x)
 				}
+
+				// The only non-void built-in functions that may be
+				// called as a statement are copy and recover
+				// (though arguably a call to recover should never
+				// be inlined as that changes its behavior).
+				if builtin, ok := callee.(*types.Builtin); ok {
+					return builtin.Name() == "copy" ||
+						builtin.Name() == "recover"
+				}
+
+				return true // ordinary call f()
+
+			case *ast.UnaryExpr: // <-x
+				return expr.Op == token.ARROW // channel receive <-ch
 			}
 
 			// No other expressions are valid statements.
@@ -279,6 +259,7 @@
 	// (but not any nested functions).
 	var (
 		hasDefer       = false
+		hasBareReturn  = false
 		totalReturns   = 0
 		trivialReturns = 0
 		labels         []string
@@ -314,6 +295,8 @@
 						break
 					}
 				}
+			} else if sig.Results().Len() > 0 {
+				hasBareReturn = true
 			}
 			if trivial {
 				trivialReturns++
@@ -322,6 +305,17 @@
 		return true
 	})
 
+	// Reject attempts to inline cgo-generated functions.
+	for _, obj := range freeObjs {
+		// There are others (iconst fconst sconst fpvar macro)
+		// but this is probably sufficient.
+		if strings.HasPrefix(obj.Name, "_Cfunc_") ||
+			strings.HasPrefix(obj.Name, "_Ctype_") ||
+			strings.HasPrefix(obj.Name, "_Cvar_") {
+			return nil, fmt.Errorf("cannot inline cgo-generated functions")
+		}
+	}
+
 	// Compact content to just the FuncDecl.
 	//
 	// As a space optimization, we don't retain the complete
@@ -338,7 +332,7 @@
 		return nil, err
 	}
 
-	params, results := analyzeParams(logf, fset, info, decl)
+	params, results, effects, falcon := analyzeParams(logf, fset, info, decl)
 	return &Callee{gobCallee{
 		Content:          content,
 		PkgPath:          pkg.Path(),
@@ -346,15 +340,17 @@
 		Unexported:       unexported,
 		FreeObjs:         freeObjs,
 		FreeRefs:         freeRefs,
-		BodyIsReturnExpr: bodyIsReturnExpr,
 		ValidForCallStmt: validForCallStmt,
 		NumResults:       sig.Results().Len(),
 		Params:           params,
 		Results:          results,
+		Effects:          effects,
 		HasDefer:         hasDefer,
+		HasBareReturn:    hasBareReturn,
 		TotalReturns:     totalReturns,
 		TrivialReturns:   trivialReturns,
 		Labels:           labels,
+		Falcon:           falcon,
 	}}, nil
 }
 
@@ -372,11 +368,14 @@
 
 // A paramInfo records information about a callee receiver, parameter, or result variable.
 type paramInfo struct {
-	Name     string          // parameter name (may be blank, or even "")
-	Assigned bool            // parameter appears on left side of an assignment statement
-	Escapes  bool            // parameter has its address taken
-	Refs     []int           // FuncDecl-relative byte offset of parameter ref within body
-	Shadow   map[string]bool // names shadowed at one of the above refs
+	Name       string          // parameter name (may be blank, or even "")
+	Index      int             // index within signature
+	IsResult   bool            // false for receiver or parameter, true for result variable
+	Assigned   bool            // parameter appears on left side of an assignment statement
+	Escapes    bool            // parameter has its address taken
+	Refs       []int           // FuncDecl-relative byte offset of parameter ref within body
+	Shadow     map[string]bool // names shadowed at one of the above refs
+	FalconType string          // name of this parameter's type (if basic) in the falcon system
 }
 
 // analyzeParams computes information about parameters of function fn,
@@ -386,7 +385,7 @@
 // the other of the result variables of function fn.
 //
 // The input must be well-typed.
-func analyzeParams(logf func(string, ...any), fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) (params, results []*paramInfo) {
+func analyzeParams(logf func(string, ...any), fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) (params, results []*paramInfo, effects []int, _ falconResult) {
 	fnobj, ok := info.Defs[decl.Name]
 	if !ok {
 		panic(fmt.Sprintf("%s: no func object for %q",
@@ -396,19 +395,23 @@
 	paramInfos := make(map[*types.Var]*paramInfo)
 	{
 		sig := fnobj.Type().(*types.Signature)
-		newParamInfo := func(param *types.Var) *paramInfo {
-			info := &paramInfo{Name: param.Name()}
+		newParamInfo := func(param *types.Var, isResult bool) *paramInfo {
+			info := &paramInfo{
+				Name:     param.Name(),
+				IsResult: isResult,
+				Index:    len(paramInfos),
+			}
 			paramInfos[param] = info
 			return info
 		}
 		if sig.Recv() != nil {
-			params = append(params, newParamInfo(sig.Recv()))
+			params = append(params, newParamInfo(sig.Recv(), false))
 		}
 		for i := 0; i < sig.Params().Len(); i++ {
-			params = append(params, newParamInfo(sig.Params().At(i)))
+			params = append(params, newParamInfo(sig.Params().At(i), false))
 		}
 		for i := 0; i < sig.Results().Len(); i++ {
-			results = append(results, newParamInfo(sig.Results().At(i)))
+			results = append(results, newParamInfo(sig.Results().At(i), true))
 		}
 	}
 
@@ -426,6 +429,9 @@
 
 	// Record locations of all references to parameters.
 	// And record the set of intervening definitions for each parameter.
+	//
+	// TODO(adonovan): combine this traversal with the one that computes
+	// FreeRefs. The tricky part is that calleefx needs this one first.
 	var stack []ast.Node
 	stack = append(stack, decl.Type) // for scope of function itself
 	ast.Inspect(decl.Body, func(n ast.Node) bool {
@@ -438,37 +444,52 @@
 		if id, ok := n.(*ast.Ident); ok {
 			if v, ok := info.Uses[id].(*types.Var); ok {
 				if pinfo, ok := paramInfos[v]; ok {
-					// Record location of ref to parameter.
+					// Record location of ref to parameter/result
+					// and any intervening (shadowing) names.
 					offset := int(n.Pos() - decl.Pos())
 					pinfo.Refs = append(pinfo.Refs, offset)
-
-					// Find set of names shadowed within body
-					// (excluding the parameter itself).
-					// If these names are free in the arg expression,
-					// we can't substitute the parameter.
-					for _, n := range stack {
-						if scope, ok := info.Scopes[n]; ok {
-							for _, name := range scope.Names() {
-								if name != pinfo.Name {
-									if pinfo.Shadow == nil {
-										pinfo.Shadow = make(map[string]bool)
-									}
-									pinfo.Shadow[name] = true
-								}
-							}
-						}
-					}
+					pinfo.Shadow = addShadows(pinfo.Shadow, info, pinfo.Name, stack)
 				}
 			}
 		}
 		return true
 	})
 
-	return params, results
+	// Compute subset and order of parameters that are strictly evaluated.
+	// (Depends on Refs computed above.)
+	effects = calleefx(info, decl.Body, paramInfos)
+	logf("effects list = %v", effects)
+
+	falcon := falcon(logf, fset, paramInfos, info, decl)
+
+	return params, results, effects, falcon
 }
 
 // -- callee helpers --
 
+// addShadows returns the shadows set augmented by the set of names
+// locally shadowed at the location of the reference in the callee
+// (identified by the stack). The name of the reference itself is
+// excluded.
+//
+// These shadowed names may not be used in a replacement expression
+// for the reference.
+func addShadows(shadows map[string]bool, info *types.Info, exclude string, stack []ast.Node) map[string]bool {
+	for _, n := range stack {
+		if scope := scopeFor(info, n); scope != nil {
+			for _, name := range scope.Names() {
+				if name != exclude {
+					if shadows == nil {
+						shadows = make(map[string]bool)
+					}
+					shadows[name] = true
+				}
+			}
+		}
+	}
+	return shadows
+}
+
 // deref removes a pointer type constructor from the core type of t.
 func deref(t types.Type) types.Type {
 	if ptr, ok := typeparams.CoreType(t).(*types.Pointer); ok {
diff --git a/internal/refactor/inline/calleefx.go b/internal/refactor/inline/calleefx.go
new file mode 100644
index 0000000..6e3dc79
--- /dev/null
+++ b/internal/refactor/inline/calleefx.go
@@ -0,0 +1,334 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+// This file defines the analysis of callee effects.
+
+import (
+	"go/ast"
+	"go/token"
+	"go/types"
+)
+
+const (
+	rinf = -1 //  R∞: arbitrary read from memory
+	winf = -2 //  W∞: arbitrary write to memory (or unknown control)
+)
+
+// calleefx returns a list of parameter indices indicating the order
+// in which parameters are first referenced during evaluation of the
+// callee, relative both to each other and to other effects of the
+// callee (if any), such as arbitrary reads (rinf) and arbitrary
+// effects (winf), including unknown control flow. Each parameter
+// that is referenced appears once in the list.
+//
+// For example, the effects list of this function:
+//
+//	func f(x, y, z int) int {
+//	    return y + x + g() + z
+//	}
+//
+// is [1 0 -2 2], indicating reads of y and x, followed by the unknown
+// effects of the g() call. and finally the read of parameter z. This
+// information is used during inlining to ascertain when it is safe
+// for parameter references to be replaced by their corresponding
+// argument expressions. Such substitutions are permitted only when
+// they do not cause "write" operations (those with effects) to
+// commute with "read" operations (those that have no effect but are
+// not pure). Impure operations may be reordered with other impure
+// operations, and pure operations may be reordered arbitrarily.
+//
+// The analysis ignores the effects of runtime panics, on the
+// assumption that well-behaved programs shouldn't encounter them.
+func calleefx(info *types.Info, body *ast.BlockStmt, paramInfos map[*types.Var]*paramInfo) []int {
+	// This traversal analyzes the callee's statements (in syntax
+	// form, though one could do better with SSA) to compute the
+	// sequence of events of the following kinds:
+	//
+	// 1  read of a parameter variable.
+	// 2. reads from other memory.
+	// 3. writes to memory
+
+	var effects []int // indices of parameters, or rinf/winf (-ve)
+	seen := make(map[int]bool)
+	effect := func(i int) {
+		if !seen[i] {
+			seen[i] = true
+			effects = append(effects, i)
+		}
+	}
+
+	// unknown is called for statements of unknown effects (or control).
+	unknown := func() {
+		effect(winf)
+
+		// Ensure that all remaining parameters are "seen"
+		// after we go into the unknown (unless they are
+		// unreferenced by the function body). This lets us
+		// not bother implementing the complete traversal into
+		// control structures.
+		//
+		// TODO(adonovan): add them in a deterministic order.
+		// (This is not a bug but determinism is good.)
+		for _, pinfo := range paramInfos {
+			if !pinfo.IsResult && len(pinfo.Refs) > 0 {
+				effect(pinfo.Index)
+			}
+		}
+	}
+
+	var visitExpr func(n ast.Expr)
+	var visitStmt func(n ast.Stmt) bool
+	visitExpr = func(n ast.Expr) {
+		switch n := n.(type) {
+		case *ast.Ident:
+			if v, ok := info.Uses[n].(*types.Var); ok && !v.IsField() {
+				// Use of global?
+				if v.Parent() == v.Pkg().Scope() {
+					effect(rinf) // read global var
+				}
+
+				// Use of parameter?
+				if pinfo, ok := paramInfos[v]; ok && !pinfo.IsResult {
+					effect(pinfo.Index) // read parameter var
+				}
+
+				// Use of local variables is ok.
+			}
+
+		case *ast.BasicLit:
+			// no effect
+
+		case *ast.FuncLit:
+			// A func literal has no read or write effect
+			// until called, and (most) function calls are
+			// considered to have arbitrary effects.
+			// So, no effect.
+
+		case *ast.CompositeLit:
+			for _, elt := range n.Elts {
+				visitExpr(elt) // note: visits KeyValueExpr
+			}
+
+		case *ast.ParenExpr:
+			visitExpr(n.X)
+
+		case *ast.SelectorExpr:
+			if sel, ok := info.Selections[n]; ok {
+				visitExpr(n.X)
+				if sel.Indirect() {
+					effect(rinf) // indirect read x.f of heap variable
+				}
+			} else {
+				// qualified identifier: treat like unqualified
+				visitExpr(n.Sel)
+			}
+
+		case *ast.IndexExpr:
+			if tv := info.Types[n.Index]; tv.IsType() {
+				// no effect (G[T] instantiation)
+			} else {
+				visitExpr(n.X)
+				visitExpr(n.Index)
+				switch tv.Type.Underlying().(type) {
+				case *types.Slice, *types.Pointer: // []T, *[n]T (not string, [n]T)
+					effect(rinf) // indirect read of slice/array element
+				}
+			}
+
+		case *ast.IndexListExpr:
+			// no effect (M[K,V] instantiation)
+
+		case *ast.SliceExpr:
+			visitExpr(n.X)
+			visitExpr(n.Low)
+			visitExpr(n.High)
+			visitExpr(n.Max)
+
+		case *ast.TypeAssertExpr:
+			visitExpr(n.X)
+
+		case *ast.CallExpr:
+			if info.Types[n.Fun].IsType() {
+				// conversion T(x)
+				visitExpr(n.Args[0])
+			} else {
+				// call f(args)
+				visitExpr(n.Fun)
+				for i, arg := range n.Args {
+					if i == 0 && info.Types[arg].IsType() {
+						continue // new(T), make(T, n)
+					}
+					visitExpr(arg)
+				}
+
+				// The pure built-ins have no effects beyond
+				// those of their operands (not even memory reads).
+				// All other calls have unknown effects.
+				if !callsPureBuiltin(info, n) {
+					unknown() // arbitrary effects
+				}
+			}
+
+		case *ast.StarExpr:
+			visitExpr(n.X)
+			effect(rinf) // *ptr load or store depends on state of heap
+
+		case *ast.UnaryExpr: // + - ! ^ & ~ <-
+			visitExpr(n.X)
+			if n.Op == token.ARROW {
+				unknown() // effect: channel receive
+			}
+
+		case *ast.BinaryExpr:
+			visitExpr(n.X)
+			visitExpr(n.Y)
+
+		case *ast.KeyValueExpr:
+			visitExpr(n.Key) // may be a struct field
+			visitExpr(n.Value)
+
+		case *ast.BadExpr:
+			// no effect
+
+		case nil:
+			// optional subtree
+
+		default:
+			// type syntax: unreachable given traversal
+			panic(n)
+		}
+	}
+
+	// visitStmt's result indicates the continuation:
+	// false for return, true for the next statement.
+	//
+	// We could treat return as an unknown, but this way
+	// yields definite effects for simple sequences like
+	// {S1; S2; return}, so unreferenced parameters are
+	// not spuriously added to the effects list, and thus
+	// not spuriously disqualified from elimination.
+	visitStmt = func(n ast.Stmt) bool {
+		switch n := n.(type) {
+		case *ast.DeclStmt:
+			decl := n.Decl.(*ast.GenDecl)
+			for _, spec := range decl.Specs {
+				switch spec := spec.(type) {
+				case *ast.ValueSpec:
+					for _, v := range spec.Values {
+						visitExpr(v)
+					}
+
+				case *ast.TypeSpec:
+					// no effect
+				}
+			}
+
+		case *ast.LabeledStmt:
+			return visitStmt(n.Stmt)
+
+		case *ast.ExprStmt:
+			visitExpr(n.X)
+
+		case *ast.SendStmt:
+			visitExpr(n.Chan)
+			visitExpr(n.Value)
+			unknown() // effect: channel send
+
+		case *ast.IncDecStmt:
+			visitExpr(n.X)
+			unknown() // effect: variable increment
+
+		case *ast.AssignStmt:
+			for _, lhs := range n.Lhs {
+				visitExpr(lhs)
+			}
+			for _, rhs := range n.Rhs {
+				visitExpr(rhs)
+			}
+			for _, lhs := range n.Lhs {
+				id, _ := lhs.(*ast.Ident)
+				if id != nil && id.Name == "_" {
+					continue // blank assign has no effect
+				}
+				if n.Tok == token.DEFINE && id != nil && info.Defs[id] != nil {
+					continue // new var declared by := has no effect
+				}
+				unknown() // assignment to existing var
+				break
+			}
+
+		case *ast.GoStmt:
+			visitExpr(n.Call.Fun)
+			for _, arg := range n.Call.Args {
+				visitExpr(arg)
+			}
+			unknown() // effect: create goroutine
+
+		case *ast.DeferStmt:
+			visitExpr(n.Call.Fun)
+			for _, arg := range n.Call.Args {
+				visitExpr(arg)
+			}
+			unknown() // effect: push defer
+
+		case *ast.ReturnStmt:
+			for _, res := range n.Results {
+				visitExpr(res)
+			}
+			return false
+
+		case *ast.BlockStmt:
+			for _, stmt := range n.List {
+				if !visitStmt(stmt) {
+					return false
+				}
+			}
+
+		case *ast.BranchStmt:
+			unknown() // control flow
+
+		case *ast.IfStmt:
+			visitStmt(n.Init)
+			visitExpr(n.Cond)
+			unknown() // control flow
+
+		case *ast.SwitchStmt:
+			visitStmt(n.Init)
+			visitExpr(n.Tag)
+			unknown() // control flow
+
+		case *ast.TypeSwitchStmt:
+			visitStmt(n.Init)
+			visitStmt(n.Assign)
+			unknown() // control flow
+
+		case *ast.SelectStmt:
+			unknown() // control flow
+
+		case *ast.ForStmt:
+			visitStmt(n.Init)
+			visitExpr(n.Cond)
+			unknown() // control flow
+
+		case *ast.RangeStmt:
+			visitExpr(n.X)
+			unknown() // control flow
+
+		case *ast.EmptyStmt, *ast.BadStmt:
+			// no effect
+
+		case nil:
+			// optional subtree
+
+		default:
+			panic(n)
+		}
+		return true
+	}
+	visitStmt(body)
+
+	return effects
+}
diff --git a/internal/refactor/inline/calleefx_test.go b/internal/refactor/inline/calleefx_test.go
new file mode 100644
index 0000000..1fc16ae
--- /dev/null
+++ b/internal/refactor/inline/calleefx_test.go
@@ -0,0 +1,159 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline_test
+
+import (
+	"fmt"
+	"go/ast"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"testing"
+
+	"golang.org/x/tools/internal/refactor/inline"
+)
+
+// TestCalleeEffects is a unit test of the calleefx analysis.
+func TestCalleeEffects(t *testing.T) {
+	// Each callee must declare a function or method named f.
+	const funcName = "f"
+
+	var tests = []struct {
+		descr  string
+		callee string // Go source file (sans package decl) containing callee decl
+		want   string // expected effects string (-1=R∞ -2=W∞)
+	}{
+		{
+			"Assignments have unknown effects.",
+			`func f(x, y int) { x = y }`,
+			`[0 1 -2]`,
+		},
+		{
+			"Reads from globals are impure.",
+			`func f() { _ = g }; var g int`,
+			`[-1]`,
+		},
+		{
+			"Writes to globals have effects.",
+			`func f() { g = 0 }; var g int`,
+			`[-1 -2]`, // the -1 is spurious but benign
+		},
+		{
+			"Blank assign has no effect.",
+			`func f(x int) { _ = x }`,
+			`[0]`,
+		},
+		{
+			"Short decl of new var has has no effect.",
+			`func f(x int) { y := x; _ = y }`,
+			`[0]`,
+		},
+		{
+			"Short decl of existing var (y) is an assignment.",
+			`func f(x int) { y := x; y, z := 1, 2; _, _ = y, z }`,
+			`[0 -2]`,
+		},
+		{
+			"Unreferenced parameters are excluded.",
+			`func f(x, y, z int) { _ = z + x }`,
+			`[2 0]`,
+		},
+		{
+			"Built-in len has no effect.",
+			`func f(x, y string) { _ = len(y) + len(x) }`,
+			`[1 0]`,
+		},
+		{
+			"Built-in println has effects.",
+			`func f(x, y int) { println(y, x) }`,
+			`[1 0 -2]`,
+		},
+		{
+			"Return has no effect, and no control successor.",
+			`func f(x, y int) int { return x + y; panic(1) }`,
+			`[0 1]`,
+		},
+		{
+			"Loops (etc) have unknown effects.",
+			`func f(x, y bool) { for x { _ = y } }`,
+			`[0 -2 1]`,
+		},
+		{
+			"Calls have unknown effects.",
+			`func f(x, y int) { _, _, _ = x, g(), y }; func g() int`,
+			`[0 -2 1]`,
+		},
+		{
+			"Calls to some built-ins are pure.",
+			`func f(x, y int) { _, _, _ = x, len("hi"), y }`,
+			`[0 1]`,
+		},
+		{
+			"Calls to some built-ins are pure (variant).",
+			`func f(x, y int) { s := "hi"; _, _, _ = x, len(s), y; s = "bye" }`,
+			`[0 1 -2]`,
+		},
+		{
+			"Calls to some built-ins are pure (another variants).",
+			`func f(x, y int) { s := "hi"; _, _, _ = x, len(s), y }`,
+			`[0 1]`,
+		},
+		{
+			"Reading a local var is impure but does not have effects.",
+			`func f(x, y bool) { for x { _ = y } }`,
+			`[0 -2 1]`,
+		},
+	}
+	for _, test := range tests {
+		test := test
+		t.Run(test.descr, func(t *testing.T) {
+			fset := token.NewFileSet()
+			mustParse := func(filename string, content any) *ast.File {
+				f, err := parser.ParseFile(fset, filename, content, parser.ParseComments|parser.SkipObjectResolution)
+				if err != nil {
+					t.Fatalf("ParseFile: %v", err)
+				}
+				return f
+			}
+
+			// Parse callee file and find first func decl named f.
+			calleeContent := "package p\n" + test.callee
+			calleeFile := mustParse("callee.go", calleeContent)
+			var decl *ast.FuncDecl
+			for _, d := range calleeFile.Decls {
+				if d, ok := d.(*ast.FuncDecl); ok && d.Name.Name == funcName {
+					decl = d
+					break
+				}
+			}
+			if decl == nil {
+				t.Fatalf("declaration of func %s not found: %s", funcName, test.callee)
+			}
+
+			info := &types.Info{
+				Defs:       make(map[*ast.Ident]types.Object),
+				Uses:       make(map[*ast.Ident]types.Object),
+				Types:      make(map[ast.Expr]types.TypeAndValue),
+				Implicits:  make(map[ast.Node]types.Object),
+				Selections: make(map[*ast.SelectorExpr]*types.Selection),
+				Scopes:     make(map[ast.Node]*types.Scope),
+			}
+			conf := &types.Config{Error: func(err error) { t.Error(err) }}
+			pkg, err := conf.Check("p", fset, []*ast.File{calleeFile}, info)
+			if err != nil {
+				t.Fatal(err)
+			}
+
+			callee, err := inline.AnalyzeCallee(t.Logf, fset, pkg, info, decl, []byte(calleeContent))
+			if err != nil {
+				t.Fatal(err)
+			}
+			if got := fmt.Sprint(callee.Effects()); got != test.want {
+				t.Errorf("for effects of %s, got %s want %s",
+					test.callee, got, test.want)
+			}
+		})
+	}
+}
diff --git a/internal/refactor/inline/doc.go b/internal/refactor/inline/doc.go
index f83759a..b13241f 100644
--- a/internal/refactor/inline/doc.go
+++ b/internal/refactor/inline/doc.go
@@ -219,6 +219,18 @@
     be prepared to eliminate the declaration too---this is where an
     iterative framework for simplification would really help).
 
+  - An expression such as s[i] may be valid if s and i are
+    variables but invalid if either or both of them are constants.
+    For example, a negative constant index s[-1] is always out of
+    bounds, and even a non-negative constant index may be out of
+    bounds depending on the particular string constant (e.g.
+    "abc"[4]).
+
+    So, if a parameter participates in any expression that is
+    subject to additional compile-time checks when its operands are
+    constant, it may be unsafe to substitute that parameter by a
+    constant argument value (#62664).
+
 More complex callee functions are inlinable with more elaborate and
 invasive changes to the statements surrounding the call expression.
 
@@ -253,6 +265,13 @@
   - Eliminate parens and braces inserted conservatively when they
     are redundant.
 
+  - Eliminate explicit conversions of "untyped" literals inserted
+    conservatively when they are redundant. For example, the
+    conversion int32(1) is redundant when this value is used only as a
+    slice index; but it may be crucial if it is used in x := int32(1)
+    as it changes the type of x, which may have further implications.
+    The conversions may also be important to the falcon analysis.
+
   - Allow non-'go' build systems such as Bazel/Blaze a chance to
     decide whether an import is accessible using logic other than
     "/internal/" path segments. This could be achieved by returning
diff --git a/internal/refactor/inline/escape.go b/internal/refactor/inline/escape.go
index f88be27..d05d2b9 100644
--- a/internal/refactor/inline/escape.go
+++ b/internal/refactor/inline/escape.go
@@ -57,7 +57,7 @@
 		}
 	}
 
-	// Search function body for operations &x, x.f(), and x = y
+	// Search function body for operations &x, x.f(), x++, and x = y
 	// where x is a parameter. Each of these treats x as an address.
 	ast.Inspect(root, func(n ast.Node) bool {
 		switch n := n.(type) {
@@ -88,6 +88,9 @@
 					lvalue(lhs, false)
 				}
 			}
+
+		case *ast.IncDecStmt:
+			lvalue(n.X, false)
 		}
 		return true
 	})
diff --git a/internal/refactor/inline/everything_test.go b/internal/refactor/inline/everything_test.go
index 7166c1b..a1c5448 100644
--- a/internal/refactor/inline/everything_test.go
+++ b/internal/refactor/inline/everything_test.go
@@ -22,21 +22,29 @@
 	"golang.org/x/tools/internal/testenv"
 )
 
-// Run with this command:
-//
-// $ go test ./internal/refactor/inline/ -run=Everything -v -packages=./internal/...
-
-// TODO(adonovan):
-// - report counters (number of attempts, failed AnalyzeCallee, failed
-//   Inline, etc.)
-// - Make a pretty log of the entire output so that we can peruse it
-//   for opportunities for systematic improvement.
-
 var packagesFlag = flag.String("packages", "", "set of packages for TestEverything")
 
 // TestEverything invokes the inliner on every single call site in a
 // given package. and checks that it produces either a reasonable
 // error, or output that parses and type-checks.
+//
+// It does nothing during ordinary testing, but may be used to find
+// inlining bugs in large corpora.
+//
+// Use this command to inline everything in golang.org/x/tools:
+//
+// $ go test ./internal/refactor/inline/ -run=Everything -packages=../../../
+//
+// And these commands to inline everything in the kubernetes repository:
+//
+// $ go build -c -o /tmp/everything ./internal/refactor/inline/
+// $ (cd kubernetes && /tmp/everything -test.run=Everything -packages=./...)
+//
+// TODO(adonovan):
+//   - report counters (number of attempts, failed AnalyzeCallee, failed
+//     Inline, etc.)
+//   - Make a pretty log of the entire output so that we can peruse it
+//     for opportunities for systematic improvement.
 func TestEverything(t *testing.T) {
 	testenv.NeedsGoPackages(t)
 	if testing.Short() {
@@ -48,7 +56,6 @@
 
 	// Load this package plus dependencies from typed syntax.
 	cfg := &packages.Config{
-		Dir:  "../../..", // root of x/tools
 		Mode: packages.LoadAllSyntax,
 		Env: append(os.Environ(),
 			"GO111MODULES=on",
@@ -100,7 +107,7 @@
 				}
 
 				// Prepare caller info.
-				callPosn := callerPkg.Fset.Position(call.Lparen)
+				callPosn := callerPkg.Fset.PositionFor(call.Lparen, false)
 				callerContent, err := readFile(callPosn.Filename)
 				if err != nil {
 					t.Fatal(err)
@@ -119,7 +126,7 @@
 				if !ok {
 					t.Fatalf("missing package for callee %v", fn)
 				}
-				calleePosn := callerPkg.Fset.Position(fn.Pos())
+				calleePosn := callerPkg.Fset.PositionFor(fn.Pos(), false)
 				calleeDecl, err := findFuncByPosition(calleePkg, calleePosn)
 				if err != nil {
 					t.Fatal(err)
@@ -153,6 +160,7 @@
 							"has no body",
 							"type parameters are not yet",
 							"line directives",
+							"cgo-generated",
 						} {
 							if strings.Contains(err.Error(), ignore) {
 								return
diff --git a/internal/refactor/inline/export_test.go b/internal/refactor/inline/export_test.go
new file mode 100644
index 0000000..7b2cec7
--- /dev/null
+++ b/internal/refactor/inline/export_test.go
@@ -0,0 +1,9 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+// This file opens back doors for testing.
+
+func (callee *Callee) Effects() []int { return callee.impl.Effects }
diff --git a/internal/refactor/inline/falcon.go b/internal/refactor/inline/falcon.go
new file mode 100644
index 0000000..9863e8d
--- /dev/null
+++ b/internal/refactor/inline/falcon.go
@@ -0,0 +1,892 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+// This file defines the callee side of the "fallible constant" analysis.
+
+import (
+	"fmt"
+	"go/ast"
+	"go/constant"
+	"go/format"
+	"go/token"
+	"go/types"
+	"strconv"
+	"strings"
+
+	"golang.org/x/tools/go/types/typeutil"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+// falconResult is the result of the analysis of the callee.
+type falconResult struct {
+	Types       []falconType // types for falcon constraint environment
+	Constraints []string     // constraints (Go expressions) on values of fallible constants
+}
+
+// A falconType specifies the name and underlying type of a synthetic
+// defined type for use in falcon constraints.
+//
+// Unique types from callee code are bijectively mapped onto falcon
+// types so that constraints are independent of callee type
+// information but preserve type equivalence classes.
+//
+// Fresh names are deliberately obscure to avoid shadowing even if a
+// callee parameter has a nanme like "int" or "any".
+type falconType struct {
+	Name string
+	Kind types.BasicKind // string/number/bool
+}
+
+// falcon identifies "fallible constant" expressions, which are
+// expressions that may fail to compile if one or more of their
+// operands is changed from non-constant to constant.
+//
+// Consider:
+//
+//	func sub(s string, i, j int) string { return s[i:j] }
+//
+// If parameters are replaced by constants, the compiler is
+// required to perform these additional checks:
+//
+//   - if i is constant, 0 <= i.
+//   - if s and i are constant, i <= len(s).
+//   - ditto for j.
+//   - if i and j are constant, i <= j.
+//
+// s[i:j] is thus a "fallible constant" expression dependent on {s, i,
+// j}. Each falcon creates a set of conditional constraints across one
+// or more parameter variables.
+//
+//   - When inlining a call such as sub("abc", -1, 2), the parameter i
+//     cannot be eliminated by substitution as its argument value is
+//     negative.
+//
+//   - When inlining sub("", 2, 1), all three parameters cannot be be
+//     simultaneously eliminated by substitution without violating i
+//     <= len(s) and j <= len(s), but the parameters i and j could be
+//     safely eliminated without s.
+//
+// Parameters that cannot be eliminated must remain non-constant,
+// either in the form of a binding declaration:
+//
+//	{ var i int = -1; return "abc"[i:2] }
+//
+// or a parameter of a literalization:
+//
+//	func (i int) string { return "abc"[i:2] }(-1)
+//
+// These example expressions are obviously doomed to fail at run
+// time, but in realistic cases such expressions are dominated by
+// appropriate conditions that make them reachable only when safe:
+//
+//	if 0 <= i && i <= j && j <= len(s) { _ = s[i:j] }
+//
+// (In principle a more sophisticated inliner could entirely eliminate
+// such unreachable blocks based on the condition being always-false
+// for the given parameter substitution, but this is tricky to do safely
+// because the type-checker considers only a single configuration.
+// Consider: if runtime.GOOS == "linux" { ... }.)
+//
+// We believe this is an exhaustive list of "fallible constant" operations:
+//
+//   - switch z { case x: case y } 	// duplicate case values
+//   - s[i], s[i:j], s[i:j:k]		// index out of bounds (0 <= i <= j <= k <= len(s))
+//   - T{x: 0}				// index out of bounds, duplicate index
+//   - x/y, x%y, x/=y, x%=y		// integer division by zero; minint/-1 overflow
+//   - x+y, x-y, x*y			// arithmetic overflow
+//   - x<<y				// shift out of range
+//   - -x				// negation of minint
+//   - T(x)				// value out of range
+//
+// The fundamental reason for this elaborate algorithm is that the
+// "separate analysis" of callee and caller, as required when running
+// in an environment such as unitchecker, means that there is no way
+// for us to simply invoke the type checker on the combination of
+// caller and callee code, as by the time we analyze the caller, we no
+// longer have access to type information for the callee (and, in
+// particular, any of its direct dependencies that are not direct
+// dependencies of the caller). So, in effect, we are forced to map
+// the problem in a neutral (callee-type-independent) constraint
+// system that can be verified later.
+func falcon(logf func(string, ...any), fset *token.FileSet, params map[*types.Var]*paramInfo, info *types.Info, decl *ast.FuncDecl) falconResult {
+
+	st := &falconState{
+		logf:   logf,
+		fset:   fset,
+		params: params,
+		info:   info,
+		decl:   decl,
+	}
+
+	// type mapping
+	st.int = st.typename(types.Typ[types.Int])
+	st.any = "interface{}" // don't use "any" as it may be shadowed
+	for obj, info := range st.params {
+		if isBasic(obj.Type(), types.IsConstType) {
+			info.FalconType = st.typename(obj.Type())
+		}
+	}
+
+	st.stmt(st.decl.Body)
+
+	return st.result
+}
+
+type falconState struct {
+	// inputs
+	logf   func(string, ...any)
+	fset   *token.FileSet
+	params map[*types.Var]*paramInfo
+	info   *types.Info
+	decl   *ast.FuncDecl
+
+	// working state
+	int       string
+	any       string
+	typenames typeutil.Map
+
+	result falconResult
+}
+
+// typename returns the name in the falcon constraint system
+// of a given string/number/bool type t. Falcon types are
+// specified directly in go/types data structures rather than
+// by name, avoiding potential shadowing conflicts with
+// confusing parameter names such as "int".
+//
+// Also, each distinct type (as determined by types.Identical)
+// is mapped to a fresh type in the falcon system so that we
+// can map the types in the callee code into a neutral form
+// that does not depend on imports, allowing us to detect
+// potential conflicts such as
+//
+//	map[any]{T1(1): 0, T2(1): 0}
+//
+// where T1=T2.
+func (st *falconState) typename(t types.Type) string {
+	name, ok := st.typenames.At(t).(string)
+	if !ok {
+		basic := t.Underlying().(*types.Basic)
+
+		// That dot Û° is an Arabic zero numeral U+06F0.
+		// It is very unlikely to appear in a real program.
+		// TODO(adonovan): use a non-heuristic solution.
+		name = fmt.Sprintf("%sÛ°%d", basic, st.typenames.Len())
+		st.typenames.Set(t, name)
+		st.logf("falcon: emit type %s %s // %q", name, basic, t)
+		st.result.Types = append(st.result.Types, falconType{
+			Name: name,
+			Kind: basic.Kind(),
+		})
+	}
+	return name
+}
+
+// -- constraint emission --
+
+// emit emits a Go expression that must have a legal type.
+// In effect, we let the go/types constant folding algorithm
+// do most of the heavy lifting (though it may be hard to
+// believe from the complexity of this algorithm!).
+func (st *falconState) emit(constraint ast.Expr) {
+	var out strings.Builder
+	if err := format.Node(&out, st.fset, constraint); err != nil {
+		panic(err) // can't happen
+	}
+	syntax := out.String()
+	st.logf("falcon: emit constraint %s", syntax)
+	st.result.Constraints = append(st.result.Constraints, syntax)
+}
+
+// emitNonNegative emits an []T{}[index] constraint,
+// which ensures index is non-negative if constant.
+func (st *falconState) emitNonNegative(index ast.Expr) {
+	st.emit(&ast.IndexExpr{
+		X: &ast.CompositeLit{
+			Type: &ast.ArrayType{
+				Elt: makeIdent(st.int),
+			},
+		},
+		Index: index,
+	})
+}
+
+// emitMonotonic emits an []T{}[i:j] constraint,
+// which ensures i <= j if both are constant.
+func (st *falconState) emitMonotonic(i, j ast.Expr) {
+	st.emit(&ast.SliceExpr{
+		X: &ast.CompositeLit{
+			Type: &ast.ArrayType{
+				Elt: makeIdent(st.int),
+			},
+		},
+		Low:  i,
+		High: j,
+	})
+}
+
+// emitUnique emits a T{elem1: 0, ... elemN: 0} constraint,
+// which ensures that all constant elems are unique.
+// T may be a map, slice, or array depending
+// on the desired check semantics.
+func (st *falconState) emitUnique(typ ast.Expr, elems []ast.Expr) {
+	if len(elems) > 1 {
+		var elts []ast.Expr
+		for _, elem := range elems {
+			elts = append(elts, &ast.KeyValueExpr{
+				Key:   elem,
+				Value: makeIntLit(0),
+			})
+		}
+		st.emit(&ast.CompositeLit{
+			Type: typ,
+			Elts: elts,
+		})
+	}
+}
+
+// -- traversal --
+
+// The traversal functions scan the callee body for expressions that
+// are not constant but would become constant if the parameter vars
+// were redeclared as constants, and emits for each one a constraint
+// (a Go expression) with the property that it will not type-check
+// (using types.CheckExpr) if the particular argument values are
+// unsuitable.
+//
+// These constraints are checked by Inline with the actual
+// constant argument values. Violations cause it to reject
+// parameters as candidates for substitution.
+
+func (st *falconState) stmt(s ast.Stmt) {
+	ast.Inspect(s, func(n ast.Node) bool {
+		switch n := n.(type) {
+		case ast.Expr:
+			_ = st.expr(n)
+			return false // skip usual traversal
+
+		case *ast.AssignStmt:
+			switch n.Tok {
+			case token.QUO_ASSIGN, token.REM_ASSIGN:
+				// x /= y
+				// Possible "integer division by zero"
+				// Emit constraint: 1/y.
+				_ = st.expr(n.Lhs[0])
+				kY := st.expr(n.Rhs[0])
+				if kY, ok := kY.(ast.Expr); ok {
+					op := token.QUO
+					if n.Tok == token.REM_ASSIGN {
+						op = token.REM
+					}
+					st.emit(&ast.BinaryExpr{
+						Op: op,
+						X:  makeIntLit(1),
+						Y:  kY,
+					})
+				}
+				return false // skip usual traversal
+			}
+
+		case *ast.SwitchStmt:
+			if n.Init != nil {
+				st.stmt(n.Init)
+			}
+			tBool := types.Type(types.Typ[types.Bool])
+			tagType := tBool // default: true
+			if n.Tag != nil {
+				st.expr(n.Tag)
+				tagType = st.info.TypeOf(n.Tag)
+			}
+
+			// Possible "duplicate case value".
+			// Emit constraint map[T]int{v1: 0, ..., vN:0}
+			// to ensure all maybe-constant case values are unique
+			// (unless switch tag is boolean, which is relaxed).
+			var unique []ast.Expr
+			for _, clause := range n.Body.List {
+				clause := clause.(*ast.CaseClause)
+				for _, caseval := range clause.List {
+					if k := st.expr(caseval); k != nil {
+						unique = append(unique, st.toExpr(k))
+					}
+				}
+				for _, stmt := range clause.Body {
+					st.stmt(stmt)
+				}
+			}
+			if unique != nil && !types.Identical(tagType.Underlying(), tBool) {
+				tname := st.any
+				if !types.IsInterface(tagType) {
+					tname = st.typename(tagType)
+				}
+				t := &ast.MapType{
+					Key:   makeIdent(tname),
+					Value: makeIdent(st.int),
+				}
+				st.emitUnique(t, unique)
+			}
+		}
+		return true
+	})
+}
+
+// fieldTypes visits the .Type of each field in the list.
+func (st *falconState) fieldTypes(fields *ast.FieldList) {
+	if fields != nil {
+		for _, field := range fields.List {
+			_ = st.expr(field.Type)
+		}
+	}
+}
+
+// expr visits the expression (or type) and returns a
+// non-nil result if the expression is constant or would
+// become constant if all suitable function parameters were
+// redeclared as constants.
+//
+// If the expression is constant, st.expr returns its type
+// and value (types.TypeAndValue). If the expression would
+// become constant, st.expr returns an ast.Expr tree whose
+// leaves are literals and parameter references, and whose
+// interior nodes are operations that may become constant,
+// such as -x, x+y, f(x), and T(x). We call these would-be
+// constant expressions "fallible constants", since they may
+// fail to type-check for some values of x, i, and j. (We
+// refer to the non-nil cases collectively as "maybe
+// constant", and the nil case as "definitely non-constant".)
+//
+// As a side effect, st.expr emits constraints for each
+// fallible constant expression; this is its main purpose.
+//
+// Consequently, st.expr must visit the entire subtree so
+// that all necessary constraints are emitted. It may not
+// short-circuit the traversal when it encounters a constant
+// subexpression as constants may contain arbitrary other
+// syntax that may impose constraints. Consider (as always)
+// this contrived but legal example of a type parameter (!)
+// that contains statement syntax:
+//
+//	func f[T [unsafe.Sizeof(func() { stmts })]int]()
+//
+// There is no need to emit constraints for (e.g.) s[i] when s
+// and i are already constants, because we know the expression
+// is sound, but it is sometimes easier to emit these
+// redundant constraints than to avoid them.
+func (st *falconState) expr(e ast.Expr) (res any) { // = types.TypeAndValue | ast.Expr
+	tv := st.info.Types[e]
+	if tv.Value != nil {
+		// A constant value overrides any other result.
+		defer func() { res = tv }()
+	}
+
+	switch e := e.(type) {
+	case *ast.Ident:
+		if v, ok := st.info.Uses[e].(*types.Var); ok {
+			if _, ok := st.params[v]; ok && isBasic(v.Type(), types.IsConstType) {
+				return e // reference to constable parameter
+			}
+		}
+		// (References to *types.Const are handled by the defer.)
+
+	case *ast.BasicLit:
+		// constant
+
+	case *ast.ParenExpr:
+		return st.expr(e.X)
+
+	case *ast.FuncLit:
+		_ = st.expr(e.Type)
+		st.stmt(e.Body)
+		// definitely non-constant
+
+	case *ast.CompositeLit:
+		// T{k: v, ...}, where T ∈ {array,*array,slice,map},
+		// imposes a constraint that all constant k are
+		// distinct and, for arrays [n]T, within range 0-n.
+		//
+		// Types matter, not just values. For example,
+		// an interface-keyed map may contain keys
+		// that are numerically equal so long as they
+		// are of distinct types. For example:
+		//
+		//   type myint int
+		//   map[any]bool{1: true, 1:        true} // error: duplicate key
+		//   map[any]bool{1: true, int16(1): true} // ok
+		//   map[any]bool{1: true, myint(1): true} // ok
+		//
+		// This can be asserted by emitting a
+		// constraint of the form T{k1: 0, ..., kN: 0}.
+		if e.Type != nil {
+			_ = st.expr(e.Type)
+		}
+		t := deref(typeparams.CoreType(deref(tv.Type)))
+		var uniques []ast.Expr
+		for _, elt := range e.Elts {
+			if kv, ok := elt.(*ast.KeyValueExpr); ok {
+				if !is[*types.Struct](t) {
+					if k := st.expr(kv.Key); k != nil {
+						uniques = append(uniques, st.toExpr(k))
+					}
+				}
+				_ = st.expr(kv.Value)
+			} else {
+				_ = st.expr(elt)
+			}
+		}
+		if uniques != nil {
+			// Inv: not a struct.
+
+			// The type T in constraint T{...} depends on the CompLit:
+			// - for a basic-keyed map, use map[K]int;
+			// - for an interface-keyed map, use map[any]int;
+			// - for a slice, use []int;
+			// - for an array or *array, use [n]int.
+			// The last two entail progressively stronger index checks.
+			var ct ast.Expr // type syntax for constraint
+			switch t := t.(type) {
+			case *types.Map:
+				if types.IsInterface(t.Key()) {
+					ct = &ast.MapType{
+						Key:   makeIdent(st.any),
+						Value: makeIdent(st.int),
+					}
+				} else {
+					ct = &ast.MapType{
+						Key:   makeIdent(st.typename(t.Key())),
+						Value: makeIdent(st.int),
+					}
+				}
+			case *types.Array: // or *array
+				ct = &ast.ArrayType{
+					Len: makeIntLit(t.Len()),
+					Elt: makeIdent(st.int),
+				}
+			default:
+				panic(t)
+			}
+			st.emitUnique(ct, uniques)
+		}
+		// definitely non-constant
+
+	case *ast.SelectorExpr:
+		_ = st.expr(e.X)
+		_ = st.expr(e.Sel)
+		// The defer is sufficient to handle
+		// qualified identifiers (pkg.Const).
+		// All other cases are definitely non-constant.
+
+	case *ast.IndexExpr:
+		if tv.IsType() {
+			// type C[T]
+			_ = st.expr(e.X)
+			_ = st.expr(e.Index)
+		} else {
+			// term x[i]
+			//
+			// Constraints (if x is slice/string/array/*array, not map):
+			// - i >= 0
+			//     if i is a fallible constant
+			// - i < len(x)
+			//     if x is array/*array and
+			//     i is a fallible constant;
+			//  or if s is a string and both i,
+			//     s are maybe-constants,
+			//     but not both are constants.
+			kX := st.expr(e.X)
+			kI := st.expr(e.Index)
+			if kI != nil && !is[*types.Map](st.info.TypeOf(e.X).Underlying()) {
+				if kI, ok := kI.(ast.Expr); ok {
+					st.emitNonNegative(kI)
+				}
+				// Emit constraint to check indices against known length.
+				// TODO(adonovan): factor with SliceExpr logic.
+				var x ast.Expr
+				if kX != nil {
+					// string
+					x = st.toExpr(kX)
+				} else if arr, ok := deref(st.info.TypeOf(e.X).Underlying()).(*types.Array); ok {
+					// array, *array
+					x = &ast.CompositeLit{
+						Type: &ast.ArrayType{
+							Len: makeIntLit(arr.Len()),
+							Elt: makeIdent(st.int),
+						},
+					}
+				}
+				if x != nil {
+					st.emit(&ast.IndexExpr{
+						X:     x,
+						Index: st.toExpr(kI),
+					})
+				}
+			}
+		}
+		// definitely non-constant
+
+	case *ast.SliceExpr:
+		// x[low:high:max]
+		//
+		// Emit non-negative constraints for each index,
+		// plus low <= high <= max <= len(x)
+		// for each pair that are maybe-constant
+		// but not definitely constant.
+
+		kX := st.expr(e.X)
+		var kLow, kHigh, kMax any
+		if e.Low != nil {
+			kLow = st.expr(e.Low)
+			if kLow != nil {
+				if kLow, ok := kLow.(ast.Expr); ok {
+					st.emitNonNegative(kLow)
+				}
+			}
+		}
+		if e.High != nil {
+			kHigh = st.expr(e.High)
+			if kHigh != nil {
+				if kHigh, ok := kHigh.(ast.Expr); ok {
+					st.emitNonNegative(kHigh)
+				}
+				if kLow != nil {
+					st.emitMonotonic(st.toExpr(kLow), st.toExpr(kHigh))
+				}
+			}
+		}
+		if e.Max != nil {
+			kMax = st.expr(e.Max)
+			if kMax != nil {
+				if kMax, ok := kMax.(ast.Expr); ok {
+					st.emitNonNegative(kMax)
+				}
+				if kHigh != nil {
+					st.emitMonotonic(st.toExpr(kHigh), st.toExpr(kMax))
+				}
+			}
+		}
+
+		// Emit constraint to check indices against known length.
+		var x ast.Expr
+		if kX != nil {
+			// string
+			x = st.toExpr(kX)
+		} else if arr, ok := deref(st.info.TypeOf(e.X).Underlying()).(*types.Array); ok {
+			// array, *array
+			x = &ast.CompositeLit{
+				Type: &ast.ArrayType{
+					Len: makeIntLit(arr.Len()),
+					Elt: makeIdent(st.int),
+				},
+			}
+		}
+		if x != nil {
+			// Avoid slice[::max] if kHigh is nonconstant (nil).
+			high, max := st.toExpr(kHigh), st.toExpr(kMax)
+			if high == nil {
+				high = max // => slice[:max:max]
+			}
+			st.emit(&ast.SliceExpr{
+				X:    x,
+				Low:  st.toExpr(kLow),
+				High: high,
+				Max:  max,
+			})
+		}
+		// definitely non-constant
+
+	case *ast.TypeAssertExpr:
+		_ = st.expr(e.X)
+		if e.Type != nil {
+			_ = st.expr(e.Type)
+		}
+
+	case *ast.CallExpr:
+		_ = st.expr(e.Fun)
+		if tv, ok := st.info.Types[e.Fun]; ok && tv.IsType() {
+			// conversion T(x)
+			//
+			// Possible "value out of range".
+			kX := st.expr(e.Args[0])
+			if kX != nil && isBasic(tv.Type, types.IsConstType) {
+				conv := convert(makeIdent(st.typename(tv.Type)), st.toExpr(kX))
+				if is[ast.Expr](kX) {
+					st.emit(conv)
+				}
+				return conv
+			}
+			return nil // definitely non-constant
+		}
+
+		// call f(x)
+
+		all := true // all args are possibly-constant
+		kArgs := make([]ast.Expr, len(e.Args))
+		for i, arg := range e.Args {
+			if kArg := st.expr(arg); kArg != nil {
+				kArgs[i] = st.toExpr(kArg)
+			} else {
+				all = false
+			}
+		}
+
+		// Calls to built-ins with fallibly constant arguments
+		// may become constant. All other calls are either
+		// constant or non-constant
+		if id, ok := e.Fun.(*ast.Ident); ok && all && tv.Value == nil {
+			if builtin, ok := st.info.Uses[id].(*types.Builtin); ok {
+				switch builtin.Name() {
+				case "len", "imag", "real", "complex", "min", "max":
+					return &ast.CallExpr{
+						Fun:      id,
+						Args:     kArgs,
+						Ellipsis: e.Ellipsis,
+					}
+				}
+			}
+		}
+
+	case *ast.StarExpr: // *T, *ptr
+		_ = st.expr(e.X)
+
+	case *ast.UnaryExpr:
+		// + - ! ^ & <- ~
+		//
+		// Possible "negation of minint".
+		// Emit constraint: -x
+		kX := st.expr(e.X)
+		if kX != nil && !is[types.TypeAndValue](kX) {
+			if e.Op == token.SUB {
+				st.emit(&ast.UnaryExpr{
+					Op: e.Op,
+					X:  st.toExpr(kX),
+				})
+			}
+
+			return &ast.UnaryExpr{
+				Op: e.Op,
+				X:  st.toExpr(kX),
+			}
+		}
+
+	case *ast.BinaryExpr:
+		kX := st.expr(e.X)
+		kY := st.expr(e.Y)
+		switch e.Op {
+		case token.QUO, token.REM:
+			// x/y, x%y
+			//
+			// Possible "integer division by zero" or
+			// "minint / -1" overflow.
+			// Emit constraint: x/y or 1/y
+			if kY != nil {
+				if kX == nil {
+					kX = makeIntLit(1)
+				}
+				st.emit(&ast.BinaryExpr{
+					Op: e.Op,
+					X:  st.toExpr(kX),
+					Y:  st.toExpr(kY),
+				})
+			}
+
+		case token.ADD, token.SUB, token.MUL:
+			// x+y, x-y, x*y
+			//
+			// Possible "arithmetic overflow".
+			// Emit constraint: x+y
+			if kX != nil && kY != nil {
+				st.emit(&ast.BinaryExpr{
+					Op: e.Op,
+					X:  st.toExpr(kX),
+					Y:  st.toExpr(kY),
+				})
+			}
+
+		case token.SHL, token.SHR:
+			// x << y, x >> y
+			//
+			// Possible "constant shift too large".
+			// Either operand may be too large individually,
+			// and they may be too large together.
+			// Emit constraint:
+			//    x << y (if both maybe-constant)
+			//    x << 0 (if y is non-constant)
+			//    1 << y (if x is non-constant)
+			if kX != nil || kY != nil {
+				x := st.toExpr(kX)
+				if x == nil {
+					x = makeIntLit(1)
+				}
+				y := st.toExpr(kY)
+				if y == nil {
+					y = makeIntLit(0)
+				}
+				st.emit(&ast.BinaryExpr{
+					Op: e.Op,
+					X:  x,
+					Y:  y,
+				})
+			}
+
+		case token.LSS, token.GTR, token.EQL, token.NEQ, token.LEQ, token.GEQ:
+			// < > == != <= <=
+			//
+			// A "x cmp y" expression with constant operands x, y is
+			// itself constant, but I can't see how a constant bool
+			// could be fallible: the compiler doesn't reject duplicate
+			// boolean cases in a switch, presumably because boolean
+			// switches are less like n-way branches and more like
+			// sequential if-else chains with possibly overlapping
+			// conditions; and there is (sadly) no way to convert a
+			// boolean constant to an int constant.
+		}
+		if kX != nil && kY != nil {
+			return &ast.BinaryExpr{
+				Op: e.Op,
+				X:  st.toExpr(kX),
+				Y:  st.toExpr(kY),
+			}
+		}
+
+	// types
+	//
+	// We need to visit types (and even type parameters)
+	// in order to reach all the places where things could go wrong:
+	//
+	// 	const (
+	// 		s = ""
+	// 		i = 0
+	// 	)
+	// 	type C[T [unsafe.Sizeof(func() { _ = s[i] })]int] bool
+
+	case *ast.IndexListExpr:
+		_ = st.expr(e.X)
+		for _, expr := range e.Indices {
+			_ = st.expr(expr)
+		}
+
+	case *ast.Ellipsis:
+		if e.Elt != nil {
+			_ = st.expr(e.Elt)
+		}
+
+	case *ast.ArrayType:
+		if e.Len != nil {
+			_ = st.expr(e.Len)
+		}
+		_ = st.expr(e.Elt)
+
+	case *ast.StructType:
+		st.fieldTypes(e.Fields)
+
+	case *ast.FuncType:
+		st.fieldTypes(e.TypeParams)
+		st.fieldTypes(e.Params)
+		st.fieldTypes(e.Results)
+
+	case *ast.InterfaceType:
+		st.fieldTypes(e.Methods)
+
+	case *ast.MapType:
+		_ = st.expr(e.Key)
+		_ = st.expr(e.Value)
+
+	case *ast.ChanType:
+		_ = st.expr(e.Value)
+	}
+	return
+}
+
+// toExpr converts the result of visitExpr to a falcon expression.
+// (We don't do this in visitExpr as we first need to discriminate
+// constants from maybe-constants.)
+func (st *falconState) toExpr(x any) ast.Expr {
+	switch x := x.(type) {
+	case nil:
+		return nil
+
+	case types.TypeAndValue:
+		lit := makeLiteral(x.Value)
+		if !isBasic(x.Type, types.IsUntyped) {
+			// convert to "typed" type
+			lit = &ast.CallExpr{
+				Fun:  makeIdent(st.typename(x.Type)),
+				Args: []ast.Expr{lit},
+			}
+		}
+		return lit
+
+	case ast.Expr:
+		return x
+
+	default:
+		panic(x)
+	}
+}
+
+func makeLiteral(v constant.Value) ast.Expr {
+	switch v.Kind() {
+	case constant.Bool:
+		// Rather than refer to the true or false built-ins,
+		// which could be shadowed by poorly chosen parameter
+		// names, we use 0 == 0 for true and 0 != 0 for false.
+		op := token.EQL
+		if !constant.BoolVal(v) {
+			op = token.NEQ
+		}
+		return &ast.BinaryExpr{
+			Op: op,
+			X:  makeIntLit(0),
+			Y:  makeIntLit(0),
+		}
+
+	case constant.String:
+		return &ast.BasicLit{
+			Kind:  token.STRING,
+			Value: v.ExactString(),
+		}
+
+	case constant.Int:
+		return &ast.BasicLit{
+			Kind:  token.INT,
+			Value: v.ExactString(),
+		}
+
+	case constant.Float:
+		return &ast.BasicLit{
+			Kind:  token.FLOAT,
+			Value: v.ExactString(),
+		}
+
+	case constant.Complex:
+		// The components could be float or int.
+		y := makeLiteral(constant.Imag(v))
+		y.(*ast.BasicLit).Value += "i" // ugh
+		if re := constant.Real(v); !consteq(re, kZeroInt) {
+			// complex: x + yi
+			y = &ast.BinaryExpr{
+				Op: token.ADD,
+				X:  makeLiteral(re),
+				Y:  y,
+			}
+		}
+		return y
+
+	default:
+		panic(v.Kind())
+	}
+}
+
+func makeIntLit(x int64) *ast.BasicLit {
+	return &ast.BasicLit{
+		Kind:  token.INT,
+		Value: strconv.FormatInt(x, 10),
+	}
+}
+
+func isBasic(t types.Type, info types.BasicInfo) bool {
+	basic, ok := t.Underlying().(*types.Basic)
+	return ok && basic.Info()&info != 0
+}
diff --git a/internal/refactor/inline/falcon_test.go b/internal/refactor/inline/falcon_test.go
new file mode 100644
index 0000000..64a98f5
--- /dev/null
+++ b/internal/refactor/inline/falcon_test.go
@@ -0,0 +1,381 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline_test
+
+import "testing"
+
+// Testcases mostly come in pairs, of a success and a failure
+// to substitute based on specific constant argument values.
+
+func TestFalconStringIndex(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Non-negative string index.",
+			`func f(i int) byte { return s[i] }; var s string`,
+			`func _() { f(0) }`,
+			`func _() { _ = s[0] }`,
+		},
+		{
+			"Negative string index.",
+			`func f(i int) byte { return s[i] }; var s string`,
+			`func _() { f(-1) }`,
+			`func _() {
+	var i int = -1
+	_ = s[i]
+}`,
+		},
+		{
+			"String index in range.",
+			`func f(s string, i int) byte { return s[i] }`,
+			`func _() { f("-", 0) }`,
+			`func _() { _ = "-"[0] }`,
+		},
+		{
+			"String index out of range.",
+			`func f(s string, i int) byte { return s[i] }`,
+			`func _() { f("-", 1) }`,
+			`func _() {
+	var (
+		s string = "-"
+		i int    = 1
+	)
+	_ = s[i]
+}`,
+		},
+		{
+			"Remove known prefix (OK)",
+			`func f(s, prefix string) string { return s[:len(prefix)] }`,
+			`func _() { f("", "") }`,
+			`func _() { _ = ""[:len("")] }`,
+		},
+		{
+			"Remove not-a-prefix (out of range)",
+			`func f(s, prefix string) string { return s[:len(prefix)] }`,
+			`func _() { f("", "pre") }`,
+			`func _() {
+	var s, prefix string = "", "pre"
+	_ = s[:len(prefix)]
+}`,
+		},
+	})
+}
+
+func TestFalconSliceIndices(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Monotonic (0<=i<=j) slice indices (len unknown).",
+			`func f(i, j int) []int { return s[i:j] }; var s []int`,
+			`func _() { f(0, 1) }`,
+			`func _() { _ = s[0:1] }`,
+		},
+		{
+			"Non-monotonic slice indices (len unknown).",
+			`func f(i, j int) []int { return s[i:j] }; var s []int`,
+			`func _() { f(1, 0) }`,
+			`func _() {
+	var i, j int = 1, 0
+	_ = s[i:j]
+}`,
+		},
+		{
+			"Negative slice index.",
+			`func f(i, j int) []int { return s[i:j] }; var s []int`,
+			`func _() { f(-1, 1) }`,
+			`func _() {
+	var i, j int = -1, 1
+	_ = s[i:j]
+}`,
+		},
+	})
+}
+
+func TestFalconMapKeys(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Unique map keys (int)",
+			`func f(x int) { _ = map[int]bool{1: true, x: true} }`,
+			`func _() { f(2) }`,
+			`func _() { _ = map[int]bool{1: true, 2: true} }`,
+		},
+		{
+			"Duplicate map keys (int)",
+			`func f(x int) { _ = map[int]bool{1: true, x: true} }`,
+			`func _() { f(1) }`,
+			`func _() {
+	var x int = 1
+	_ = map[int]bool{1: true, x: true}
+}`,
+		},
+		{
+			"Unique map keys (varied built-in types)",
+			`func f(x int16) { _ = map[any]bool{1: true, x: true} }`,
+			`func _() { f(2) }`,
+			`func _() { _ = map[any]bool{1: true, int16(2): true} }`,
+		},
+		{
+			"Duplicate map keys (varied built-in types)",
+			`func f(x int16) { _ = map[any]bool{1: true, x: true} }`,
+			`func _() { f(1) }`,
+			`func _() { _ = map[any]bool{1: true, int16(1): true} }`,
+		},
+		{
+			"Unique map keys (varied user-defined types)",
+			`func f(x myint) { _ = map[any]bool{1: true, x: true} }; type myint int`,
+			`func _() { f(2) }`,
+			`func _() { _ = map[any]bool{1: true, myint(2): true} }`,
+		},
+		{
+			"Duplicate map keys (varied user-defined types)",
+			`func f(x myint, y myint2) { _ = map[any]bool{x: true, y: true} }; type (myint int; myint2 int)`,
+			`func _() { f(1, 1) }`,
+			`func _() {
+	var (
+		x myint  = 1
+		y myint2 = 1
+	)
+	_ = map[any]bool{x: true, y: true}
+}`,
+		},
+		{
+			"Duplicate map keys (user-defined alias to built-in)",
+			`func f(x myint, y int) { _ = map[any]bool{x: true, y: true} }; type myint = int`,
+			`func _() { f(1, 1) }`,
+			`func _() {
+	var (
+		x myint = 1
+		y int   = 1
+	)
+	_ = map[any]bool{x: true, y: true}
+}`,
+		},
+	})
+}
+
+func TestFalconSwitchCases(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Unique switch cases (int).",
+			`func f(x int) { switch 0 { case x: case 1: } }`,
+			`func _() { f(2) }`,
+			`func _() {
+	switch 0 {
+	case 2:
+	case 1:
+	}
+}`,
+		},
+		{
+			"Duplicate switch cases (int).",
+			`func f(x int) { switch 0 { case x: case 1: } }`,
+			`func _() { f(1) }`,
+			`func _() {
+	var x int = 1
+	switch 0 {
+	case x:
+	case 1:
+	}
+}`,
+		},
+		{
+			"Unique switch cases (varied built-in types).",
+			`func f(x int) { switch any(nil) { case x: case int16(1): } }`,
+			`func _() { f(2) }`,
+			`func _() {
+	switch any(nil) {
+	case 2:
+	case int16(1):
+	}
+}`,
+		},
+		{
+			"Duplicate switch cases (varied built-in types).",
+			`func f(x int) { switch any(nil) { case x: case int16(1): } }`,
+			`func _() { f(1) }`,
+			`func _() {
+	switch any(nil) {
+	case 1:
+	case int16(1):
+	}
+}`,
+		},
+	})
+}
+
+func TestFalconDivision(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Division by two.",
+			`func f(x, y int) int { return x / y }`,
+			`func _() { f(1, 2) }`,
+			`func _() { _ = 1 / 2 }`,
+		},
+		{
+			"Division by zero.",
+			`func f(x, y int) int { return x / y }`,
+			`func _() { f(1, 0) }`,
+			`func _() {
+	var x, y int = 1, 0
+	_ = x / y
+}`,
+		},
+		{
+			"Division by two (statement).",
+			`func f(x, y int) { x /= y }`,
+			`func _() { f(1, 2) }`,
+			`func _() {
+	var x int = 1
+	x /= 2
+}`,
+		},
+		{
+			"Division by zero (statement).",
+			`func f(x, y int) { x /= y }`,
+			`func _() { f(1, 0) }`,
+			`func _() {
+	var x, y int = 1, 0
+	x /= y
+}`,
+		},
+		{
+			"Division of minint by two (ok).",
+			`func f(x, y int32) { _ = x / y }`,
+			`func _() { f(-0x80000000, 2) }`,
+			`func _() { _ = int32(-0x80000000) / int32(2) }`,
+		},
+		{
+			"Division of minint by -1 (overflow).",
+			`func f(x, y int32) { _ = x / y }`,
+			`func _() { f(-0x80000000, -1) }`,
+			`func _() {
+	var x, y int32 = -0x80000000, -1
+	_ = x / y
+}`,
+		},
+	})
+}
+
+func TestFalconMinusMinInt(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Negation of maxint.",
+			`func f(x int32) int32 { return -x }`,
+			`func _() { f(0x7fffffff) }`,
+			`func _() { _ = -int32(0x7fffffff) }`,
+		},
+		{
+			"Negation of minint.",
+			`func f(x int32) int32 { return -x }`,
+			`func _() { f(-0x80000000) }`,
+			`func _() {
+	var x int32 = -0x80000000
+	_ = -x
+}`,
+		},
+	})
+}
+
+func TestFalconArithmeticOverflow(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Addition without overflow.",
+			`func f(x, y int32) int32 { return x + y }`,
+			`func _() { f(100, 200) }`,
+			`func _() { _ = int32(100) + int32(200) }`,
+		},
+		{
+			"Addition with overflow.",
+			`func f(x, y int32) int32 { return x + y }`,
+			`func _() { f(1<<30, 1<<30) }`,
+			`func _() {
+	var x, y int32 = 1 << 30, 1 << 30
+	_ = x + y
+}`,
+		},
+		{
+			"Conversion in range.",
+			`func f(x int) int8 { return int8(x) }`,
+			`func _() { f(123) }`,
+			`func _() { _ = int8(123) }`,
+		},
+		{
+			"Conversion out of range.",
+			`func f(x int) int8 { return int8(x) }`,
+			`func _() { f(456) }`,
+			`func _() {
+	var x int = 456
+	_ = int8(x)
+}`,
+		},
+	})
+}
+
+func TestFalconComplex(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Complex arithmetic (good).",
+			`func f(re, im float64, z complex128) byte { return "x"[int(real(complex(re, im)*complex(re, -im)-z))] }`,
+			`func _() { f(1, 2, 5+0i) }`,
+			`func _() { _ = "x"[int(real(complex(float64(1), float64(2))*complex(float64(1), -float64(2))-(5+0i)))] }`,
+		},
+		{
+			"Complex arithmetic (bad).",
+			`func f(re, im float64, z complex128) byte { return "x"[int(real(complex(re, im)*complex(re, -im)-z))] }`,
+			`func _() { f(1, 3, 5+0i) }`,
+			`func _() {
+	var (
+		re, im float64    = 1, 3
+		z      complex128 = 5 + 0i
+	)
+	_ = "x"[int(real(complex(re, im)*complex(re, -im)-z))]
+}`,
+		},
+	})
+}
+func TestFalconMisc(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Compound constant expression (good).",
+			`func f(x, y string, i, j int) byte { return x[i*len(y)+j] }`,
+			`func _() { f("abc", "xy", 2, -3) }`,
+			`func _() { _ = "abc"[2*len("xy")+-3] }`,
+		},
+		{
+			"Compound constant expression (index out of range).",
+			`func f(x, y string, i, j int) byte { return x[i*len(y)+j] }`,
+			`func _() { f("abc", "xy", 4, -3) }`,
+			`func _() {
+	var (
+		x, y string = "abc", "xy"
+		i, j int    = 4, -3
+	)
+	_ = x[i*len(y)+j]
+}`,
+		},
+		{
+			"Constraints within nested functions (good).",
+			`func f(x int) { _ = func() { _ = [1]int{}[x] } }`,
+			`func _() { f(0) }`,
+			`func _() { _ = func() { _ = [1]int{}[0] } }`,
+		},
+		{
+			"Constraints within nested functions (bad).",
+			`func f(x int) { _ = func() { _ = [1]int{}[x] } }`,
+			`func _() { f(1) }`,
+			`func _() {
+	var x int = 1
+	_ = func() { _ = [1]int{}[x] }
+}`,
+		},
+		{
+			"Falcon violation rejects only the constant arguments (x, z).",
+			`func f(x, y, z string) string { return x[:2] + y + z[:2] }; var b string`,
+			`func _() { f("a", b, "c") }`,
+			`func _() {
+	var x, z string = "a", "c"
+	_ = x[:2] + b + z[:2]
+}`,
+		},
+	})
+}
diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go
index cb98d58..9615ab4 100644
--- a/internal/refactor/inline/inline.go
+++ b/internal/refactor/inline/inline.go
@@ -8,6 +8,7 @@
 	"bytes"
 	"fmt"
 	"go/ast"
+	"go/constant"
 	"go/format"
 	"go/parser"
 	"go/token"
@@ -34,7 +35,8 @@
 	Call    *ast.CallExpr
 	Content []byte // source of file containing
 
-	path []ast.Node
+	path          []ast.Node    // path from call to root of file syntax tree
+	enclosingFunc *ast.FuncDecl // top-level function/method enclosing the call, if any
 }
 
 // Inline inlines the called function (callee) into the function call (caller)
@@ -57,6 +59,12 @@
 		return nil, fmt.Errorf("internal error: caller syntax positions are inconsistent with file content (did you forget to use FileSet.PositionFor when computing the file name?)")
 	}
 
+	// TODO(adonovan): use go1.21's ast.IsGenerated.
+	// Break the string literal so we can use inlining in this file. :)
+	if bytes.Contains(caller.Content, []byte("// Code generated by "+"cmd/cgo; DO NOT EDIT.")) {
+		return nil, fmt.Errorf("cannot inline calls from files that import \"C\"")
+	}
+
 	res, err := inline(logf, caller, &callee.impl)
 	if err != nil {
 		return nil, err
@@ -66,6 +74,89 @@
 	assert(res.old != nil, "old is nil")
 	assert(res.new != nil, "new is nil")
 
+	// A single return operand inlined to a unary
+	// expression context may need parens. Otherwise:
+	//    func two() int { return 1+1 }
+	//    print(-two())  =>  print(-1+1) // oops!
+	//
+	// Usually it is not necessary to insert ParenExprs
+	// as the formatter is smart enough to insert them as
+	// needed by the context. But the res.{old,new}
+	// substitution is done by formatting res.new in isolation
+	// and then splicing its text over res.old, so the
+	// formatter doesn't see the parent node and cannot do
+	// the right thing. (One solution would be to always
+	// format the enclosing node of old, but that requires
+	// non-lossy comment handling, #20744.)
+	//
+	// So, we must analyze the call's context
+	// to see whether ambiguity is possible.
+	// For example, if the context is x[y:z], then
+	// the x subtree is subject to precedence ambiguity
+	// (replacing x by p+q would give p+q[y:z] which is wrong)
+	// but the y and z subtrees are safe.
+	if needsParens(caller.path, res.old, res.new) {
+		res.new = &ast.ParenExpr{X: res.new.(ast.Expr)}
+	}
+
+	// Some reduction strategies return a new block holding the
+	// callee's statements. The block's braces may be elided when
+	// there is no conflict between names declared in the block
+	// with those declared by the parent block, and no risk of
+	// a caller's goto jumping forward across a declaration.
+	//
+	// This elision is only safe when the ExprStmt is beneath a
+	// BlockStmt, CaseClause.Body, or CommClause.Body;
+	// (see "statement theory").
+	elideBraces := false
+	if newBlock, ok := res.new.(*ast.BlockStmt); ok {
+		i := nodeIndex(caller.path, res.old)
+		parent := caller.path[i+1]
+		var body []ast.Stmt
+		switch parent := parent.(type) {
+		case *ast.BlockStmt:
+			body = parent.List
+		case *ast.CommClause:
+			body = parent.Body
+		case *ast.CaseClause:
+			body = parent.Body
+		}
+		if body != nil {
+			callerNames := declares(body)
+
+			// If BlockStmt is a function body,
+			// include its receiver, params, and results.
+			addFieldNames := func(fields *ast.FieldList) {
+				if fields != nil {
+					for _, field := range fields.List {
+						for _, id := range field.Names {
+							callerNames[id.Name] = true
+						}
+					}
+				}
+			}
+			switch f := caller.path[i+2].(type) {
+			case *ast.FuncDecl:
+				addFieldNames(f.Recv)
+				addFieldNames(f.Type.Params)
+				addFieldNames(f.Type.Results)
+			case *ast.FuncLit:
+				addFieldNames(f.Type.Params)
+				addFieldNames(f.Type.Results)
+			}
+
+			if len(callerLabels(caller.path)) > 0 {
+				// TODO(adonovan): be more precise and reject
+				// only forward gotos across the inlined block.
+				logf("keeping block braces: caller uses control labels")
+			} else if intersects(declares(newBlock.List), callerNames) {
+				logf("keeping block braces: avoids name conflict")
+			} else {
+				elideBraces = true
+			}
+		}
+	}
+
 	// Don't call replaceNode(caller.File, res.old, res.new)
 	// as it mutates the caller's syntax tree.
 	// Instead, splice the file, replacing the extent of the "old"
@@ -78,10 +169,29 @@
 		var out bytes.Buffer
 		out.Write(caller.Content[:start])
 		// TODO(adonovan): might it make more sense to use
-		// callee.Fset when formatting res.new??
+		// callee.Fset when formatting res.new?
+		// The new tree is a mix of (cloned) caller nodes for
+		// the argument expressions and callee nodes for the
+		// function body. In essence the question is: which
+		// is more likely to have comments?
+		// Usually the callee body will be larger and more
+		// statement-heavy than the the arguments, but a
+		// strategy may widen the scope of the replacement
+		// (res.old) from CallExpr to, say, its enclosing
+		// block, so the caller nodes dominate.
+		// Precise comment handling would make this a
+		// non-issue. Formatting wouldn't really need a
+		// FileSet at all.
+		mark := out.Len()
 		if err := format.Node(&out, caller.Fset, res.new); err != nil {
 			return nil, err
 		}
+		if elideBraces {
+			// Overwrite unnecessary {...} braces with spaces.
+			// TODO(adonovan): less hacky solution.
+			out.Bytes()[mark] = ' '
+			out.Bytes()[out.Len()-1] = ' '
+		}
 		out.Write(caller.Content[end:])
 		const mode = parser.ParseComments | parser.SkipObjectResolution | parser.AllErrors
 		f, err = parser.ParseFile(caller.Fset, "callee.go", &out, mode)
@@ -230,24 +340,28 @@
 
 	// -- analyze callee's free references in caller context --
 
-	// syntax path enclosing Call, innermost first (Path[0]=Call)
+	// Compute syntax path enclosing Call, innermost first (Path[0]=Call),
+	// and outermost enclosing function, if any.
 	caller.path, _ = astutil.PathEnclosingInterval(caller.File, caller.Call.Pos(), caller.Call.End())
+	for _, n := range caller.path {
+		if decl, ok := n.(*ast.FuncDecl); ok {
+			caller.enclosingFunc = decl
+			break
+		}
+	}
 
-	// Find the outermost function enclosing the call site (if any).
-	// Analyze all its local vars for the "single assignment" property
+	// If call is within a function, analyze all its
+	// local vars for the "single assignment" property.
 	// (Taking the address &v counts as a potential assignment.)
 	var assign1 func(v *types.Var) bool // reports whether v a single-assignment local var
 	{
 		updatedLocals := make(map[*types.Var]bool)
-		for _, n := range caller.path {
-			if decl, ok := n.(*ast.FuncDecl); ok {
-				escape(caller.Info, decl.Body, func(v *types.Var, _ bool) {
-					updatedLocals[v] = true
-				})
-				break
-			}
+		if caller.enclosingFunc != nil {
+			escape(caller.Info, caller.enclosingFunc, func(v *types.Var, _ bool) {
+				updatedLocals[v] = true
+			})
+			logf("multiple-assignment vars: %v", updatedLocals)
 		}
-		logf("multiple-assignment vars: %v", updatedLocals)
 		assign1 = func(v *types.Var) bool { return !updatedLocals[v] }
 	}
 
@@ -269,29 +383,43 @@
 
 	// localImportName returns the local name for a given imported package path.
 	var newImports []*ast.ImportSpec
-	localImportName := func(path string) string {
+	localImportName := func(path string, shadows map[string]bool) string {
 		// Does an import exist?
 		for _, name := range importMap[path] {
 			// Check that either the import preexisted,
-			// or that it was newly added (no PkgName) but is not shadowed.
-			found := caller.lookup(name)
-			if is[*types.PkgName](found) || found == nil {
-				return name
+			// or that it was newly added (no PkgName) but is not shadowed,
+			// either in the callee (shadows) or caller (caller.lookup).
+			if !shadows[name] {
+				found := caller.lookup(name)
+				if is[*types.PkgName](found) || found == nil {
+					return name
+				}
 			}
 		}
 
+		newlyAdded := func(name string) bool {
+			for _, new := range newImports {
+				if new.Name.Name == name {
+					return true
+				}
+			}
+			return false
+		}
+
 		// import added by callee
 		//
 		// Choose local PkgName based on last segment of
 		// package path plus, if needed, a numeric suffix to
 		// ensure uniqueness.
 		//
+		// "init" is not a legal PkgName.
+		//
 		// TODO(adonovan): preserve the PkgName used
 		// in the original source, or, for a dot import,
 		// use the package's declared name.
 		base := pathpkg.Base(path)
 		name := base
-		for n := 0; caller.lookup(name) != nil; n++ {
+		for n := 0; shadows[name] || caller.lookup(name) != nil || newlyAdded(name) || name == "init"; n++ {
 			name = fmt.Sprintf("%s%d", base, n)
 		}
 
@@ -322,7 +450,7 @@
 		//   => check not shadowed in caller.
 		// - package-level var/func/const/types
 		//   => same package: check not shadowed in caller.
-		//   => otherwise: import other package form a qualified identifier.
+		//   => otherwise: import other package, form a qualified identifier.
 		//      (Unexported cross-package references were rejected already.)
 		// - type parameter
 		//   => not yet supported
@@ -331,10 +459,14 @@
 		//
 		// There can be no free references to labels, fields, or methods.
 
+		// Note that we must consider potential shadowing both
+		// at the caller side (caller.lookup) and, when
+		// choosing new PkgNames, within the callee (obj.shadow).
+
 		var newName ast.Expr
 		if obj.Kind == "pkgname" {
 			// Use locally appropriate import, creating as needed.
-			newName = makeIdent(localImportName(obj.PkgPath)) // imported package
+			newName = makeIdent(localImportName(obj.PkgPath, obj.Shadow)) // imported package
 
 		} else if !obj.ValidPos {
 			// Built-in function, type, or value (e.g. nil, zero):
@@ -374,7 +506,7 @@
 
 			// Form a qualified identifier, pkg.Name.
 			if qualify {
-				pkgName := localImportName(obj.PkgPath)
+				pkgName := localImportName(obj.PkgPath, obj.Shadow)
 				newName = &ast.SelectorExpr{
 					X:   makeIdent(pkgName),
 					Sel: makeIdent(obj.Name),
@@ -424,14 +556,29 @@
 		sig := calleeSymbol.Type().(*types.Signature)
 		if sig.Recv() != nil {
 			params = append(params, &parameter{
-				obj:  sig.Recv(),
-				info: callee.Params[0],
+				obj:       sig.Recv(),
+				fieldType: calleeDecl.Recv.List[0].Type,
+				info:      callee.Params[0],
 			})
 		}
+
+		// Flatten the list of syntactic types.
+		var types []ast.Expr
+		for _, field := range calleeDecl.Type.Params.List {
+			if field.Names == nil {
+				types = append(types, field.Type)
+			} else {
+				for range field.Names {
+					types = append(types, field.Type)
+				}
+			}
+		}
+
 		for i := 0; i < sig.Params().Len(); i++ {
 			params = append(params, &parameter{
-				obj:  sig.Params().At(i),
-				info: callee.Params[len(params)],
+				obj:       sig.Params().At(i),
+				fieldType: types[i],
+				info:      callee.Params[len(params)],
 			})
 		}
 
@@ -482,6 +629,7 @@
 							Elts: elts,
 						},
 						typ:        lastParam.obj.Type(),
+						constant:   nil,
 						pure:       pure,
 						effects:    effects,
 						duplicable: false,
@@ -492,18 +640,25 @@
 		}
 	}
 
+	// Log effective arguments.
+	for i, arg := range args {
+		logf("arg #%d: %s pure=%t effects=%t duplicable=%t free=%v type=%v",
+			i, debugFormatNode(caller.Fset, arg.expr),
+			arg.pure, arg.effects, arg.duplicable, arg.freevars, arg.typ)
+	}
+
 	// Note: computation below should be expressed in terms of
 	// the args and params slices, not the raw material.
 
 	// Perform parameter substitution.
 	// May eliminate some elements of params/args.
-	substitute(logf, caller, params, args, replaceCalleeID)
+	substitute(logf, caller, params, args, callee.Effects, callee.Falcon, replaceCalleeID)
 
 	// Update the callee's signature syntax.
 	updateCalleeParams(calleeDecl, params)
 
 	// Create a var (param = arg; ...) decl for use by some strategies.
-	bindingDeclStmt := createBindingDecl(logf, caller, args, calleeDecl)
+	bindingDeclStmt := createBindingDecl(logf, caller, args, calleeDecl, callee.Results)
 
 	var remainingArgs []ast.Expr
 	for _, arg := range args {
@@ -540,31 +695,48 @@
 		logf("strategy: reduce call to empty body")
 
 		// Evaluate the arguments for effects and delete the call entirely.
-		stmt := callStmt(caller.path) // cannot fail
+		stmt := callStmt(caller.path, false) // cannot fail
 		res.old = stmt
 		if nargs := len(remainingArgs); nargs > 0 {
 			// Emit "_, _ = args" to discard results.
-			// Make correction for spread calls
-			// f(g()) or x.f(g()) where g() is a tuple.
-			//
-			// TODO(adonovan): fix: it's not valid for a
-			// single AssignStmt to discard a receiver and
-			// a spread argument; use a var decl with two specs.
-			//
+
 			// TODO(adonovan): if args is the []T{a1, ..., an}
 			// literal synthesized during variadic simplification,
 			// consider unwrapping it to its (pure) elements.
 			// Perhaps there's no harm doing this for any slice literal.
-			if last := last(args); last != nil {
-				if tuple, ok := last.typ.(*types.Tuple); ok {
-					nargs += tuple.Len() - 1
+
+			// Make correction for spread calls
+			// f(g()) or recv.f(g()) where g() is a tuple.
+			if last := last(args); last != nil && last.spread {
+				nspread := last.typ.(*types.Tuple).Len()
+				if len(args) > 1 { // [recv, g()]
+					// A single AssignStmt cannot discard both, so use a 2-spec var decl.
+					res.new = &ast.GenDecl{
+						Tok: token.VAR,
+						Specs: []ast.Spec{
+							&ast.ValueSpec{
+								Names:  []*ast.Ident{makeIdent("_")},
+								Values: []ast.Expr{args[0].expr},
+							},
+							&ast.ValueSpec{
+								Names:  blanks[*ast.Ident](nspread),
+								Values: []ast.Expr{args[1].expr},
+							},
+						},
+					}
+					return res, nil
 				}
+
+				// Sole argument is spread call.
+				nargs = nspread
 			}
+
 			res.new = &ast.AssignStmt{
-				Lhs: blanks(nargs),
+				Lhs: blanks[ast.Expr](nargs),
 				Tok: token.ASSIGN,
 				Rhs: remainingArgs,
 			}
+
 		} else {
 			// No remaining arguments: delete call statement entirely
 			res.new = &ast.EmptyStmt{}
@@ -572,14 +744,12 @@
 		return res, nil
 	}
 
-	// Attempt to reduce parameterless calls
-	// whose result variables do not escape.
-	allParamsSubstituted := forall(params, func(i int, p *parameter) bool {
-		return p == nil
-	})
-	noResultEscapes := !exists(callee.Results, func(i int, r *paramInfo) bool {
-		return r.Escapes
-	})
+	// If all parameters have been substituted and no result
+	// variable is referenced, we don't need a binding decl.
+	// This may enable better reduction strategies.
+	allResultsUnreferenced := forall(callee.Results, func(i int, r *paramInfo) bool { return len(r.Refs) == 0 })
+	needBindingDecl := !allResultsUnreferenced ||
+		exists(params, func(i int, p *parameter) bool { return p != nil })
 
 	// Special case: call to { return exprs }.
 	//
@@ -590,26 +760,29 @@
 	//
 	// If:
 	// - the body is just "return expr" with trivial implicit conversions,
-	// - all parameters can be eliminated
-	//   (by substitution, or a binding decl),
-	// - no result var escapes,
+	//   or the caller's return type matches the callee's,
+	// - all parameters and result vars can be eliminated
+	//   or replaced by a binding decl,
 	// then the call expression can be replaced by the
 	// callee's body expression, suitably substituted.
-	if callee.BodyIsReturnExpr {
+	if len(calleeDecl.Body.List) == 1 &&
+		is[*ast.ReturnStmt](calleeDecl.Body.List[0]) &&
+		len(calleeDecl.Body.List[0].(*ast.ReturnStmt).Results) > 0 && // not a bare return
+		safeReturn(caller, calleeSymbol, callee) {
 		results := calleeDecl.Body.List[0].(*ast.ReturnStmt).Results
 
 		context := callContext(caller.path)
 
 		// statement context
 		if stmt, ok := context.(*ast.ExprStmt); ok &&
-			(allParamsSubstituted && noResultEscapes || bindingDeclStmt != nil) {
+			(!needBindingDecl || bindingDeclStmt != nil) {
 			logf("strategy: reduce stmt-context call to { return exprs }")
 			clearPositions(calleeDecl.Body)
 
 			if callee.ValidForCallStmt {
 				logf("callee body is valid as statement")
 				// Inv: len(results) == 1
-				if allParamsSubstituted && noResultEscapes {
+				if !needBindingDecl {
 					// Reduces to: expr
 					res.old = caller.Call
 					res.new = results[0]
@@ -630,12 +803,12 @@
 				// (f() or <-ch), explicitly discard the results:
 				// Reduces to: _, _ = exprs
 				discard := &ast.AssignStmt{
-					Lhs: blanks(callee.NumResults),
+					Lhs: blanks[ast.Expr](callee.NumResults),
 					Tok: token.ASSIGN,
 					Rhs: results,
 				}
 				res.old = stmt
-				if allParamsSubstituted && noResultEscapes {
+				if !needBindingDecl {
 					// Reduces to: _, _ = exprs
 					res.new = discard
 				} else {
@@ -652,25 +825,14 @@
 		}
 
 		// expression context
-		if allParamsSubstituted && noResultEscapes {
+		if !needBindingDecl {
 			clearPositions(calleeDecl.Body)
 
 			if callee.NumResults == 1 {
 				logf("strategy: reduce expr-context call to { return expr }")
 
-				// A single return operand inlined to a unary
-				// expression context may need parens. Otherwise:
-				//    func two() int { return 1+1 }
-				//    print(-two())  =>  print(-1+1) // oops!
-				//
-				// TODO(adonovan): do better by analyzing 'context'
-				// to see whether ambiguity is possible.
-				// For example, if the context is x[y:z], then
-				// the x subtree is subject to precedence ambiguity
-				// (replacing x by p+q would give p+q[y:z] which is wrong)
-				// but the y and z subtrees are safe.
 				res.old = caller.Call
-				res.new = &ast.ParenExpr{X: results[0]}
+				res.new = results[0]
 			} else {
 				logf("strategy: reduce spread-context call to { return expr }")
 
@@ -717,12 +879,13 @@
 	//         { var (bindings); body }
 	//         { body }
 	// so long as:
-	// - all parameters can be eliminated
-	//   (by substitution, or a binding decl),
+	// - all parameters can be eliminated or replaced by a binding decl,
 	// - call is a tail-call;
-	// - all returns in body have trivial result conversions;
+	// - all returns in body have trivial result conversions,
+	//   or the caller's return type matches the callee's,
 	// - there is no label conflict;
-	// - no result variable is referenced by name.
+	// - no result variable is referenced by name,
+	//   or implicitly by a bare return.
 	//
 	// The body may use defer, arbitrary control flow, and
 	// multiple returns.
@@ -735,17 +898,15 @@
 	// or implicit) return.
 	if ret, ok := callContext(caller.path).(*ast.ReturnStmt); ok &&
 		len(ret.Results) == 1 &&
-		callee.TrivialReturns == callee.TotalReturns &&
-		(allParamsSubstituted && noResultEscapes || bindingDeclStmt != nil) &&
+		safeReturn(caller, calleeSymbol, callee) &&
+		!callee.HasBareReturn &&
+		(!needBindingDecl || bindingDeclStmt != nil) &&
 		!hasLabelConflict(caller.path, callee.Labels) &&
-		forall(callee.Results, func(i int, p *paramInfo) bool {
-			// all result vars are unreferenced
-			return len(p.Refs) == 0
-		}) {
+		allResultsUnreferenced {
 		logf("strategy: reduce tail-call")
 		body := calleeDecl.Body
 		clearPositions(body)
-		if !(allParamsSubstituted && noResultEscapes) {
+		if needBindingDecl {
 			body.List = prepend(bindingDeclStmt, body.List...)
 		}
 		res.old = ret
@@ -766,12 +927,13 @@
 	// - callee is a void function (no returns)
 	// - callee does not use defer
 	// - there is no label conflict between caller and callee
-	// - all parameters can be eliminated
-	//   (by substitution, or a binding decl),
+	// - all parameters and result vars can be eliminated
+	//   or replaced by a binding decl,
+	// - caller ExprStmt is in unrestricted statement context.
 	//
 	// If there is only a single statement, the braces are omitted.
-	if stmt := callStmt(caller.path); stmt != nil &&
-		(allParamsSubstituted && noResultEscapes || bindingDeclStmt != nil) &&
+	if stmt := callStmt(caller.path, true); stmt != nil &&
+		(!needBindingDecl || bindingDeclStmt != nil) &&
 		!callee.HasDefer &&
 		!hasLabelConflict(caller.path, callee.Labels) &&
 		callee.TotalReturns == 0 {
@@ -779,10 +941,10 @@
 		body := calleeDecl.Body
 		var repl ast.Stmt = body
 		clearPositions(repl)
-		if !(allParamsSubstituted && noResultEscapes) {
+		if needBindingDecl {
 			body.List = prepend(bindingDeclStmt, body.List...)
 		}
-		if len(body.List) == 1 {
+		if len(body.List) == 1 { // FIXME do this opt later
 			repl = body.List[0] // singleton: omit braces
 		}
 		res.old = stmt
@@ -790,7 +952,7 @@
 		return res, nil
 	}
 
-	// TODO(adonovan): parameterless call to { stmt; return expr }
+	// TODO(adonovan): parameterless call to { stmts; return expr }
 	// from one of these contexts:
 	//    x, y     = f()
 	//    x, y    := f()
@@ -839,13 +1001,15 @@
 }
 
 type argument struct {
-	expr       ast.Expr
-	typ        types.Type      // may be tuple for sole non-receiver arg in spread call
-	spread     bool            // final arg is call() assigned to multiple params
-	pure       bool            // expr is pure (doesn't read variables)
-	effects    bool            // expr has effects (updates variables)
-	duplicable bool            // expr may be duplicated
-	freevars   map[string]bool // free names of expr
+	expr          ast.Expr
+	typ           types.Type      // may be tuple for sole non-receiver arg in spread call
+	constant      constant.Value  // value of argument if constant
+	spread        bool            // final arg is call() assigned to multiple params
+	pure          bool            // expr is pure (doesn't read variables)
+	effects       bool            // expr has effects (updates variables)
+	duplicable    bool            // expr may be duplicated
+	freevars      map[string]bool // free names of expr
+	substitutable bool            // is candidate for substitution
 }
 
 // arguments returns the effective arguments of the call.
@@ -900,6 +1064,7 @@
 			arg := &argument{
 				expr:       recvArg,
 				typ:        caller.Info.TypeOf(recvArg),
+				constant:   caller.Info.Types[recvArg].Value,
 				pure:       pure(caller.Info, assign1, recvArg),
 				effects:    effects(caller.Info, recvArg),
 				duplicable: duplicable(caller.Info, recvArg),
@@ -921,11 +1086,15 @@
 						debugFormatNode(caller.Fset, caller.Call.Fun),
 						fld.Name())
 				}
+				if is[*types.Pointer](arg.typ.Underlying()) {
+					arg.pure = false // implicit *ptr operation => impure
+				}
 				arg.expr = &ast.SelectorExpr{
 					X:   arg.expr,
 					Sel: makeIdent(fld.Name()),
 				}
 				arg.typ = fld.Type()
+				arg.duplicable = false
 			}
 
 			// Make * or & explicit.
@@ -939,45 +1108,62 @@
 				// *recv
 				arg.expr = &ast.StarExpr{X: arg.expr}
 				arg.typ = deref(arg.typ)
-
-				// Technically *recv is non-pure and
-				// non-duplicable, as side effects
-				// could change the pointer between
-				// multiple reads. But unfortunately
-				// this really degrades many of our tests.
-				// (The indices loop above should similarly
-				// update these flags when traversing pointers.)
-				//
-				// TODO(adonovan): improve the precision of
-				// purity and duplicability.
-				// For example, *new(T) is actually pure.
-				// And *ptr, where ptr doesn't escape and
-				// has no assignments other than its decl,
-				// is also pure; this is very common.
-				//
-				// arg.pure = false
+				arg.duplicable = false
+				arg.pure = false
 			}
 		}
 	}
 	for _, expr := range callArgs {
-		typ := caller.Info.TypeOf(expr)
+		tv := caller.Info.Types[expr]
 		args = append(args, &argument{
 			expr:       expr,
-			typ:        typ,
-			spread:     is[*types.Tuple](typ), // => last
+			typ:        tv.Type,
+			constant:   tv.Value,
+			spread:     is[*types.Tuple](tv.Type), // => last
 			pure:       pure(caller.Info, assign1, expr),
 			effects:    effects(caller.Info, expr),
 			duplicable: duplicable(caller.Info, expr),
 			freevars:   freeVars(caller.Info, expr),
 		})
 	}
+
+	// Re-typecheck each constant argument expression in a neutral context.
+	//
+	// In a call such as func(int16){}(1), the type checker infers
+	// the type "int16", not "untyped int", for the argument 1,
+	// because it has incorporated information from the left-hand
+	// side of the assignment implicit in parameter passing, but
+	// of course in a different context, the expression 1 may have
+	// a different type.
+	//
+	// So, we must use CheckExpr to recompute the type of the
+	// argument in a neutral context to find its inherent type.
+	// (This is arguably a bug in go/types, but I'm pretty certain
+	// I requested it be this way long ago... -adonovan)
+	//
+	// This is only needed for constants. Other implicit
+	// assignment conversions, such as unnamed-to-named struct or
+	// chan to <-chan, do not result in the type-checker imposing
+	// the LHS type on the RHS value.
+	for _, arg := range args {
+		if arg.constant == nil {
+			continue
+		}
+		info := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)}
+		if err := types.CheckExpr(caller.Fset, caller.Types, caller.Call.Pos(), arg.expr, info); err != nil {
+			return nil, err
+		}
+		arg.typ = info.TypeOf(arg.expr)
+	}
+
 	return args, nil
 }
 
 type parameter struct {
-	obj      *types.Var // parameter var from caller's signature
-	info     *paramInfo // information from AnalyzeCallee
-	variadic bool       // (final) parameter is unsimplified ...T
+	obj       *types.Var // parameter var from caller's signature
+	fieldType ast.Expr   // syntax of type, from calleeDecl.Type.{Recv,Params}
+	info      *paramInfo // information from AnalyzeCallee
+	variadic  bool       // (final) parameter is unsimplified ...T
 }
 
 // substitute implements parameter elimination by substitution.
@@ -999,7 +1185,7 @@
 // parameter, and is provided with its relative offset and replacement
 // expression (argument), and the corresponding elements of params and
 // args are replaced by nil.
-func substitute(logf func(string, ...any), caller *Caller, params []*parameter, args []*argument, replaceCalleeID func(offset int, repl ast.Expr)) {
+func substitute(logf func(string, ...any), caller *Caller, params []*parameter, args []*argument, effects []int, falcon falconResult, replaceCalleeID func(offset int, repl ast.Expr)) {
 	// Inv:
 	//  in        calls to     variadic, len(args) >= len(params)-1
 	//  in spread calls to non-variadic, len(args) <  len(params)
@@ -1018,9 +1204,10 @@
 		// do it earlier (see pure/duplicable/freevars).
 
 		if arg.spread {
+			// spread => last argument, but not always last parameter
 			logf("keeping param %q and following ones: argument %s is spread",
 				param.info.Name, debugFormatNode(caller.Fset, arg.expr))
-			break // spread => last argument, but not always last parameter
+			return // give up
 		}
 		assert(!param.variadic, "unsimplified variadic parameter")
 		if param.info.Escapes {
@@ -1031,48 +1218,33 @@
 			logf("keeping param %q: assigned by callee", param.info.Name)
 			continue // callee needs the parameter variable
 		}
-		if !arg.pure || arg.effects {
-			// TODO(adonovan): conduct a deeper analysis of callee effects
-			// to relax this constraint.
-			logf("keeping param %q: argument %s is impure",
-				param.info.Name, debugFormatNode(caller.Fset, arg.expr))
-			continue // unsafe to change order or cardinality of effects
-		}
 		if len(param.info.Refs) > 1 && !arg.duplicable {
 			logf("keeping param %q: argument is not duplicable", param.info.Name)
 			continue // incorrect or poor style to duplicate an expression
 		}
 		if len(param.info.Refs) == 0 {
-			// Eliminating an unreferenced parameter might
+			if arg.effects {
+				logf("keeping param %q: though unreferenced, it has effects", param.info.Name)
+				continue
+			}
+
+			// If the caller is within a function body,
+			// eliminating an unreferenced parameter might
 			// remove the last reference to a caller local var.
-			for free := range arg.freevars {
-				if v, ok := caller.lookup(free).(*types.Var); ok {
-					// TODO(adonovan): be more precise and check
-					// that v is defined within the body of the caller
-					// function (if any) and is indeed referenced
-					// only by the call. (See assign1 for analysis
-					// of enclosing func.)
-					logf("keeping param %q: arg contains perhaps the last reference to possible caller local %v @ %v",
-						param.info.Name, v, caller.Fset.PositionFor(v.Pos(), false))
-					continue next
+			if caller.enclosingFunc != nil {
+				for free := range arg.freevars {
+					if v, ok := caller.lookup(free).(*types.Var); ok && within(v.Pos(), caller.enclosingFunc.Body) {
+						// TODO(adonovan): be more precise and check that v
+						// is indeed referenced only by call arguments.
+						// Better: proceed, but blank out its declaration as needed.
+						logf("keeping param %q: arg contains perhaps the last reference to possible caller local %v @ %v",
+							param.info.Name, v, caller.Fset.PositionFor(v.Pos(), false))
+						continue next
+					}
 				}
 			}
 		}
 
-		// Check that eliminating the parameter wouldn't materially
-		// change the type.
-		//
-		// (We don't simply wrap the argument in an explicit conversion
-		// to the parameter type because that could increase allocation
-		// in the number of (e.g.) string -> any conversions.
-		// Even when Uses = 1, the sole ref might be in a loop or lambda that
-		// is multiply executed.)
-		if len(param.info.Refs) > 0 && !trivialConversion(args[i].typ, params[i].obj) {
-			logf("keeping param %q: argument passing converts %s to type %s",
-				param.info.Name, args[i].typ, params[i].obj.Type())
-			continue // implicit conversion is significant
-		}
-
 		// Check for shadowing.
 		//
 		// Consider inlining a call f(z, 1) to
@@ -1088,19 +1260,208 @@
 			}
 		}
 
-		// It is safe to eliminate param and replace it with arg.
-		// No additional parens are required around arg for
-		// the supported "pure" expressions.
-		//
-		// Because arg.expr belongs to the caller,
-		// we clone it before splicing it into the callee tree.
-		logf("replacing parameter %q by argument %q",
-			param.info.Name, debugFormatNode(caller.Fset, arg.expr))
-		for _, ref := range param.info.Refs {
-			replaceCalleeID(ref, cloneNode(arg.expr).(ast.Expr))
+		arg.substitutable = true // may be substituted, if effects permit
+	}
+
+	// Reject constant arguments as substitution candidates
+	// if they cause violation of falcon constraints.
+	checkFalconConstraints(logf, params, args, falcon)
+
+	// As a final step, introduce bindings to resolve any
+	// evaluation order hazards. This must be done last, as
+	// additional subsequent bindings could introduce new hazards.
+	resolveEffects(logf, args, effects)
+
+	// The remaining candidates are safe to substitute.
+	for i, param := range params {
+		if arg := args[i]; arg.substitutable {
+
+			// Wrap the argument in an explicit conversion if
+			// substitution might materially change its type.
+			// (We already did the necessary shadowing check
+			// on the parameter type syntax.)
+			//
+			// This is only needed for substituted arguments. All
+			// other arguments are given explicit types in either
+			// a binding decl or when using the literalization
+			// strategy.
+			if len(param.info.Refs) > 0 && !trivialConversion(args[i].typ, params[i].obj) {
+				arg.expr = convert(params[i].fieldType, arg.expr)
+				logf("param %q: adding explicit %s -> %s conversion around argument",
+					param.info.Name, args[i].typ, params[i].obj.Type())
+			}
+
+			// It is safe to substitute param and replace it with arg.
+			// The formatter introduces parens as needed for precedence.
+			//
+			// Because arg.expr belongs to the caller,
+			// we clone it before splicing it into the callee tree.
+			logf("replacing parameter %q by argument %q",
+				param.info.Name, debugFormatNode(caller.Fset, arg.expr))
+			for _, ref := range param.info.Refs {
+				replaceCalleeID(ref, cloneNode(arg.expr).(ast.Expr))
+			}
+			params[i] = nil // substituted
+			args[i] = nil   // substituted
 		}
-		params[i] = nil // substituted
-		args[i] = nil   // substituted
+	}
+}
+
+// checkFalconConstraints checks whether constant arguments
+// are safe to substitute (e.g. s[i] -> ""[0] is not safe.)
+//
+// Any failed constraint causes us to reject all constant arguments as
+// substitution candidates (by clearing args[i].substitution=false).
+//
+// TODO(adonovan): we could obtain a finer result rejecting only the
+// freevars of each failed constraint, and processing constraints in
+// order of increasing arity, but failures are quite rare.
+func checkFalconConstraints(logf func(string, ...any), params []*parameter, args []*argument, falcon falconResult) {
+	// Create a dummy package, as this is the only
+	// way to create an environment for CheckExpr.
+	pkg := types.NewPackage("falcon", "falcon")
+
+	// Declare types used by constraints.
+	for _, typ := range falcon.Types {
+		logf("falcon env: type %s %s", typ.Name, types.Typ[typ.Kind])
+		pkg.Scope().Insert(types.NewTypeName(token.NoPos, pkg, typ.Name, types.Typ[typ.Kind]))
+	}
+
+	// Declared constants and variables for for parameters.
+	nconst := 0
+	for i, param := range params {
+		name := param.info.Name
+		if name == "" {
+			continue // unreferenced
+		}
+		arg := args[i]
+		if arg.constant != nil && arg.substitutable && param.info.FalconType != "" {
+			t := pkg.Scope().Lookup(param.info.FalconType).Type()
+			pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, t, arg.constant))
+			logf("falcon env: const %s %s = %v", name, param.info.FalconType, arg.constant)
+			nconst++
+		} else {
+			pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, arg.typ))
+			logf("falcon env: var %s %s", name, arg.typ)
+		}
+	}
+	if nconst == 0 {
+		return // nothing to do
+	}
+
+	// Parse and evaluate the constraints in the environment.
+	fset := token.NewFileSet()
+	for _, falcon := range falcon.Constraints {
+		expr, err := parser.ParseExprFrom(fset, "falcon", falcon, 0)
+		if err != nil {
+			panic(fmt.Sprintf("failed to parse falcon constraint %s: %v", falcon, err))
+		}
+		if err := types.CheckExpr(fset, pkg, token.NoPos, expr, nil); err != nil {
+			logf("falcon: constraint %s violated: %v", falcon, err)
+			for j, arg := range args {
+				if arg.constant != nil && arg.substitutable {
+					logf("keeping param %q due falcon violation", params[j].info.Name)
+					arg.substitutable = false
+				}
+			}
+			break
+		}
+		logf("falcon: constraint %s satisfied", falcon)
+	}
+}
+
+// resolveEffects marks arguments as non-substitutable to resolve
+// hazards resulting from the callee evaluation order described by the
+// effects list.
+//
+// To do this, each argument is categorized as a read (R), write (W),
+// or pure. A hazard occurs when the order of evaluation of a W
+// changes with respect to any R or W. Pure arguments can be
+// effectively ignored, as they can be safely evaluated in any order.
+//
+// The callee effects list contains the index of each parameter in the
+// order it is first evaluated during execution of the callee. In
+// addition, the two special values R∞ and W∞ indicate the relative
+// position of the callee's first non-parameter read and its first
+// effects (or other unknown behavior).
+// For example, the list [0 2 1 R∞ 3 W∞] for func(a, b, c, d)
+// indicates that the callee referenced parameters a, c, and b,
+// followed by an arbitrary read, then parameter d, and finally
+// unknown behavior.
+//
+// When an argument is marked as not substitutable, we say that it is
+// 'bound', in the sense that its evaluation occurs in a binding decl
+// or literalized call. Such bindings always occur in the original
+// callee parameter order.
+//
+// In this context, "resolving hazards" means binding arguments so
+// that they are evaluated in a valid, hazard-free order. A trivial
+// solution to this problem would be to bind all arguments, but of
+// course that's not useful. The goal is to bind as few arguments as
+// possible.
+//
+// The algorithm proceeds by inspecting arguments in reverse parameter
+// order (right to left), preserving the invariant that every
+// higher-ordered argument is either already substituted or does not
+// need to be substituted. At each iteration, if there is an
+// evaluation hazard in the callee effects relative to the current
+// argument, the argument must be bound. Subsequently, if the argument
+// is bound for any reason, each lower-ordered argument must also be
+// bound if either the argument or lower-order argument is a
+// W---otherwise the binding itself would introduce a hazard.
+//
+// Thus, after each iteration, there are no hazards relative to the
+// current argument. Subsequent iterations cannot introduce hazards
+// with that argument because they can result only in additional
+// binding of lower-ordered arguments.
+func resolveEffects(logf func(string, ...any), args []*argument, effects []int) {
+	effectStr := func(effects bool, idx int) string {
+		i := fmt.Sprint(idx)
+		if idx == len(args) {
+			i = "∞"
+		}
+		return string("RW"[btoi(effects)]) + i
+	}
+	for i := len(args) - 1; i >= 0; i-- {
+		argi := args[i]
+		if argi.substitutable && !argi.pure {
+			// i is not bound: check whether it must be bound due to hazards.
+			idx := index(effects, i)
+			if idx >= 0 {
+				for _, j := range effects[:idx] {
+					var (
+						ji int  // effective param index
+						jw bool // j is a write
+					)
+					if j == winf || j == rinf {
+						jw = j == winf
+						ji = len(args)
+					} else {
+						jw = args[j].effects
+						ji = j
+					}
+					if ji > i && (jw || argi.effects) { // out of order evaluation
+						logf("binding argument %s: preceded by %s",
+							effectStr(argi.effects, i), effectStr(jw, ji))
+						argi.substitutable = false
+						break
+					}
+				}
+			}
+		}
+		if !argi.substitutable {
+			for j := 0; j < i; j++ {
+				argj := args[j]
+				if argj.pure {
+					continue
+				}
+				if (argi.effects || argj.effects) && argj.substitutable {
+					logf("binding argument %s: %s is bound",
+						effectStr(argj.effects, j), effectStr(argi.effects, i))
+					argj.substitutable = false
+				}
+			}
+		}
 	}
 }
 
@@ -1167,11 +1528,13 @@
 }
 
 // createBindingDecl constructs a "binding decl" that implements
-// parameter assignment.
+// parameter assignment and declares any named result variables
+// referenced by the callee.
 //
-// If we succeed, the declaration may be used by reduction
-// strategies to relax the requirement that all parameters
-// have been substituted.
+// It may not always be possible to create the decl (e.g. due to
+// shadowing), in which case it returns nil; but if it succeeds, the
+// declaration may be used by reduction strategies to relax the
+// requirement that all parameters have been substituted.
 //
 // For example, a call:
 //
@@ -1202,7 +1565,7 @@
 //
 // Strategies may impose additional checks on return
 // conversions, labels, defer, etc.
-func createBindingDecl(logf func(string, ...any), caller *Caller, args []*argument, calleeDecl *ast.FuncDecl) ast.Stmt {
+func createBindingDecl(logf func(string, ...any), caller *Caller, args []*argument, calleeDecl *ast.FuncDecl, results []*paramInfo) ast.Stmt {
 	// Spread calls are tricky as they may not align with the
 	// parameters' field groupings nor types.
 	// For example, given
@@ -1220,27 +1583,15 @@
 		return nil
 	}
 
-	// Compute remaining argument expressions.
-	var values []ast.Expr
-	for _, arg := range args {
-		if arg != nil {
-			values = append(values, arg.expr)
-		}
-	}
-
 	var (
 		specs    []ast.Spec
 		shadowed = make(map[string]bool) // names defined by previous specs
 	)
-	for _, field := range calleeDecl.Type.Params.List {
-		// Each field (param group) becomes a ValueSpec.
-		spec := &ast.ValueSpec{
-			Names:  field.Names,
-			Type:   field.Type,
-			Values: values[:len(field.Names)],
-		}
-		values = values[len(field.Names):]
-
+	// shadow reports whether any name referenced by spec is
+	// shadowed by a name declared by a previous spec (since,
+	// unlike parameters, each spec of a var decl is within the
+	// scope of the previous specs).
+	shadow := func(spec *ast.ValueSpec) bool {
 		// Compute union of free names of type and values
 		// and detect shadowing. Values is the arguments
 		// (caller syntax), so we can use type info.
@@ -1252,11 +1603,11 @@
 				free[name] = true
 			}
 		}
-		freeishNames(free, field.Type)
+		freeishNames(free, spec.Type)
 		for name := range free {
 			if shadowed[name] {
 				logf("binding decl would shadow free name %q", name)
-				return nil
+				return true
 			}
 		}
 		for _, id := range spec.Names {
@@ -1264,10 +1615,66 @@
 				shadowed[id.Name] = true
 			}
 		}
+		return false
+	}
 
+	// parameters
+	//
+	// Bind parameters that were not eliminated through
+	// substitution. (Non-nil arguments correspond to the
+	// remaining parameters in calleeDecl.)
+	var values []ast.Expr
+	for _, arg := range args {
+		if arg != nil {
+			values = append(values, arg.expr)
+		}
+	}
+	for _, field := range calleeDecl.Type.Params.List {
+		// Each field (param group) becomes a ValueSpec.
+		spec := &ast.ValueSpec{
+			Names:  field.Names,
+			Type:   field.Type,
+			Values: values[:len(field.Names)],
+		}
+		values = values[len(field.Names):]
+		if shadow(spec) {
+			return nil
+		}
 		specs = append(specs, spec)
 	}
 	assert(len(values) == 0, "args/params mismatch")
+
+	// results
+	//
+	// Add specs to declare any named result
+	// variables that are referenced by the body.
+	if calleeDecl.Type.Results != nil {
+		resultIdx := 0
+		for _, field := range calleeDecl.Type.Results.List {
+			if field.Names == nil {
+				resultIdx++
+				continue // unnamed field
+			}
+			var names []*ast.Ident
+			for _, id := range field.Names {
+				if len(results[resultIdx].Refs) > 0 {
+					names = append(names, id)
+				}
+				resultIdx++
+			}
+			if len(names) > 0 {
+				spec := &ast.ValueSpec{
+					Names: names,
+					Type:  field.Type,
+				}
+				if shadow(spec) {
+					return nil
+				}
+				specs = append(specs, spec)
+			}
+		}
+	}
+
 	decl := &ast.DeclStmt{
 		Decl: &ast.GenDecl{
 			Tok:   token.VAR,
@@ -1282,12 +1689,7 @@
 func (caller *Caller) lookup(name string) types.Object {
 	pos := caller.Call.Pos()
 	for _, n := range caller.path {
-		// The function body scope (containing not just params)
-		// is associated with FuncDecl.Type, not FuncDecl.Body.
-		if decl, ok := n.(*ast.FuncDecl); ok {
-			n = decl.Type
-		}
-		if scope := caller.Info.Scopes[n]; scope != nil {
+		if scope := scopeFor(caller.Info, n); scope != nil {
 			if _, obj := scope.LookupParent(name, pos); obj != nil {
 				return obj
 			}
@@ -1296,6 +1698,18 @@
 	return nil
 }
 
+func scopeFor(info *types.Info, n ast.Node) *types.Scope {
+	// The function body scope (containing not just params)
+	// is associated with the function's type, not body.
+	switch fn := n.(type) {
+	case *ast.FuncDecl:
+		n = fn.Type
+	case *ast.FuncLit:
+		n = fn.Type
+	}
+	return info.Scopes[n]
+}
+
 // -- predicates over expressions --
 
 // freeVars returns the names of all free identifiers of e:
@@ -1356,13 +1770,16 @@
 			return false // prune descent
 
 		case *ast.CallExpr:
-			if !info.Types[n.Fun].IsType() {
+			if info.Types[n.Fun].IsType() {
 				// A conversion T(x) has only the effect of its operand.
 			} else if !callsPureBuiltin(info, n) {
 				// A handful of built-ins have no effect
 				// beyond those of their arguments.
 				// All other calls (including append, copy, recover)
 				// have unknown effects.
+				//
+				// As with 'pure', there is room for
+				// improvement by inspecting the callee.
 				effects = true
 			}
 
@@ -1524,6 +1941,11 @@
 	return pure(e)
 }
 
+// callsPureBuiltin reports whether call is a call of a built-in
+// function that is a pure computation over its operands (analogous to
+// a + operator). Because it does not depend on program state, it may
+// be evaluated at any point--though not necessarily at multiple
+// points (consider new, make).
 func callsPureBuiltin(info *types.Info, call *ast.CallExpr) bool {
 	if id, ok := astutil.Unparen(call.Fun).(*ast.Ident); ok {
 		if b, ok := info.ObjectOf(id).(*types.Builtin); ok {
@@ -1555,17 +1977,30 @@
 	switch e := e.(type) {
 	case *ast.ParenExpr:
 		return duplicable(info, e.X)
+
 	case *ast.Ident:
 		return true
+
 	case *ast.BasicLit:
-		return e.Kind == token.INT
+		v := info.Types[e].Value
+		switch e.Kind {
+		case token.INT:
+			return true // any int
+		case token.STRING:
+			return consteq(v, kZeroString) // only ""
+		case token.FLOAT:
+			return consteq(v, kZeroFloat) || consteq(v, kOneFloat) // only 0.0 or 1.0
+		}
+
 	case *ast.UnaryExpr: // e.g. +1, -1
 		return (e.Op == token.ADD || e.Op == token.SUB) && duplicable(info, e.X)
+
 	case *ast.CallExpr:
 		// Don't treat a conversion T(x) as duplicable even
 		// if x is duplicable because it could duplicate
 		// allocations. There may be cases to tease apart here.
 		return false
+
 	case *ast.SelectorExpr:
 		if sel, ok := info.Selections[e]; ok {
 			// A field or method selection x.f is referentially
@@ -1574,11 +2009,21 @@
 		}
 		// A qualified identifier pkg.Name is referentially transparent.
 		return true
-	default:
-		return false
 	}
+	return false
 }
 
+func consteq(x, y constant.Value) bool {
+	return constant.Compare(x, token.EQL, y)
+}
+
+var (
+	kZeroInt    = constant.MakeInt64(0)
+	kZeroString = constant.MakeString("")
+	kZeroFloat  = constant.MakeFloat64(0.0)
+	kOneFloat   = constant.MakeFloat64(1.0)
+)
+
 // -- inline helpers --
 
 func assert(cond bool, msg string) {
@@ -1588,13 +2033,13 @@
 }
 
 // blanks returns a slice of n > 0 blank identifiers.
-func blanks(n int) []ast.Expr {
+func blanks[E ast.Expr](n int) []E {
 	if n == 0 {
 		panic("blanks(0)")
 	}
-	res := make([]ast.Expr, n)
+	res := make([]E, n)
 	for i := range res {
-		res[i] = makeIdent("_")
+		res[i] = ast.Expr(makeIdent("_")).(E) // ugh
 	}
 	return res
 }
@@ -1617,7 +2062,9 @@
 }
 
 func isPkgLevel(obj types.Object) bool {
-	// TODO(adonovan): why not simply: obj.Parent() == obj.Pkg().Scope()?
+	// TODO(adonovan): consider using the simpler obj.Parent() ==
+	// obj.Pkg().Scope() instead. But be sure to test carefully
+	// with instantiations of generics.
 	return obj.Pkg().Scope().Lookup(obj.Name()) == obj
 }
 
@@ -1637,6 +2084,18 @@
 // enclosing the call (specified as a PathEnclosingInterval)
 // intersects with the set of callee labels.
 func hasLabelConflict(callPath []ast.Node, calleeLabels []string) bool {
+	labels := callerLabels(callPath)
+	for _, label := range calleeLabels {
+		if labels[label] {
+			return true // conflict
+		}
+	}
+	return false
+}
+
+// callerLabels returns the set of control labels in the function (if
+// any) enclosing the call (specified as a PathEnclosingInterval).
+func callerLabels(callPath []ast.Node) map[string]bool {
 	var callerBody *ast.BlockStmt
 	switch f := callerFunc(callPath).(type) {
 	case *ast.FuncDecl:
@@ -1644,23 +2103,22 @@
 	case *ast.FuncLit:
 		callerBody = f.Body
 	}
-	conflict := false
+	var labels map[string]bool
 	if callerBody != nil {
 		ast.Inspect(callerBody, func(n ast.Node) bool {
 			switch n := n.(type) {
 			case *ast.FuncLit:
 				return false // prune traversal
 			case *ast.LabeledStmt:
-				for _, label := range calleeLabels {
-					if label == n.Label.Name {
-						conflict = true
-					}
+				if labels == nil {
+					labels = make(map[string]bool)
 				}
+				labels[n.Label.Name] = true
 			}
 			return true
 		})
 	}
-	return conflict
+	return labels
 }
 
 // callerFunc returns the innermost Func{Decl,Lit} node enclosing the
@@ -1678,11 +2136,64 @@
 // callStmt reports whether the function call (specified
 // as a PathEnclosingInterval) appears within an ExprStmt,
 // and returns it if so.
-func callStmt(callPath []ast.Node) *ast.ExprStmt {
-	stmt, _ := callContext(callPath).(*ast.ExprStmt)
+//
+// If unrestricted, callStmt returns nil if the ExprStmt f() appears
+// in a restricted context (such as "if f(); cond {") where it cannot
+// be replaced by an arbitrary statement. (See "statement theory".)
+func callStmt(callPath []ast.Node, unrestricted bool) *ast.ExprStmt {
+	stmt, ok := callContext(callPath).(*ast.ExprStmt)
+	if ok && unrestricted {
+		switch callPath[nodeIndex(callPath, stmt)+1].(type) {
+		case *ast.LabeledStmt,
+			*ast.BlockStmt,
+			*ast.CaseClause,
+			*ast.CommClause:
+			// unrestricted
+		default:
+			// TODO(adonovan): handle restricted
+			// XYZStmt.Init contexts (but not ForStmt.Post)
+			// by creating a block around the if/for/switch:
+			// "if f(); cond {"  ->  "{ stmts; if cond {"
+
+			return nil // restricted
+		}
+	}
 	return stmt
 }
 
+// Statement theory
+//
+// These are all the places a statement may appear in the AST:
+//
+// LabeledStmt.Stmt       Stmt      -- any
+// BlockStmt.List       []Stmt      -- any (but see switch/select)
+// IfStmt.Init            Stmt?     -- simple
+// IfStmt.Body            BlockStmt
+// IfStmt.Else            Stmt?     -- IfStmt or BlockStmt
+// CaseClause.Body      []Stmt      -- any
+// SwitchStmt.Init        Stmt?     -- simple
+// SwitchStmt.Body        BlockStmt -- CaseClauses only
+// TypeSwitchStmt.Init    Stmt?     -- simple
+// TypeSwitchStmt.Assign  Stmt      -- AssignStmt(TypeAssertExpr) or ExprStmt(TypeAssertExpr)
+// TypeSwitchStmt.Body    BlockStmt -- CaseClauses only
+// CommClause.Comm        Stmt?     -- SendStmt or ExprStmt(UnaryExpr) or AssignStmt(UnaryExpr)
+// CommClause.Body      []Stmt      -- any
+// SelectStmt.Body        BlockStmt -- CommClauses only
+// ForStmt.Init           Stmt?     -- simple
+// ForStmt.Post           Stmt?     -- simple
+// ForStmt.Body           BlockStmt
+// RangeStmt.Body         BlockStmt
+//
+// simple = AssignStmt | SendStmt | IncDecStmt | ExprStmt.
+//
+// A BlockStmt cannot replace an ExprStmt in
+// {If,Switch,TypeSwitch}Stmt.Init or ForStmt.Post.
+// That is allowed only within:
+//   LabeledStmt.Stmt       Stmt
+//   BlockStmt.List       []Stmt
+//   CaseClause.Body      []Stmt
+//   CommClause.Body      []Stmt
+
 // replaceNode performs a destructive update of the tree rooted at
 // root, replacing each occurrence of "from" with "to". If to is nil and
 // the element is within a slice, the slice element is removed.
@@ -1985,3 +2496,141 @@
 	}
 	return is[*ast.CallExpr](expr)
 }
+
+// needsParens reports whether parens are required to avoid ambiguity
+// around the new node replacing the specified old node (which is some
+// ancestor of the CallExpr identified by its PathEnclosingInterval).
+func needsParens(callPath []ast.Node, old, new ast.Node) bool {
+	// Find enclosing old node and its parent.
+	i := nodeIndex(callPath, old)
+	if i == -1 {
+		panic("not found")
+	}
+
+	// There is no precedence ambiguity when replacing
+	// (e.g.) a statement enclosing the call.
+	if !is[ast.Expr](old) {
+		return false
+	}
+
+	// An expression beneath a non-expression
+	// has no precedence ambiguity.
+	parent, ok := callPath[i+1].(ast.Expr)
+	if !ok {
+		return false
+	}
+
+	precedence := func(n ast.Node) int {
+		switch n := n.(type) {
+		case *ast.UnaryExpr, *ast.StarExpr:
+			return token.UnaryPrec
+		case *ast.BinaryExpr:
+			return n.Op.Precedence()
+		}
+		return -1
+	}
+
+	// Parens are not required if the new node
+	// is not unary or binary.
+	newprec := precedence(new)
+	if newprec < 0 {
+		return false
+	}
+
+	// Parens are required if parent and child are both
+	// unary or binary and the parent has higher precedence.
+	if precedence(parent) > newprec {
+		return true
+	}
+
+	// Was the old node the operand of a postfix operator?
+	//  f().sel
+	//  f()[i:j]
+	//  f()[i]
+	//  f().(T)
+	//  f()(x)
+	switch parent := parent.(type) {
+	case *ast.SelectorExpr:
+		return parent.X == old
+	case *ast.IndexExpr:
+		return parent.X == old
+	case *ast.SliceExpr:
+		return parent.X == old
+	case *ast.TypeAssertExpr:
+		return parent.X == old
+	case *ast.CallExpr:
+		return parent.Fun == old
+	}
+	return false
+}
+
+func nodeIndex(nodes []ast.Node, n ast.Node) int {
+	// TODO(adonovan): Use index[ast.Node]() in go1.20.
+	for i, node := range nodes {
+		if node == n {
+			return i
+		}
+	}
+	return -1
+}
+
+// declares returns the set of lexical names declared by a
+// sequence of statements from the same block, excluding sub-blocks.
+// (Lexical names do not include control labels.)
+func declares(stmts []ast.Stmt) map[string]bool {
+	names := make(map[string]bool)
+	for _, stmt := range stmts {
+		switch stmt := stmt.(type) {
+		case *ast.DeclStmt:
+			for _, spec := range stmt.Decl.(*ast.GenDecl).Specs {
+				switch spec := spec.(type) {
+				case *ast.ValueSpec:
+					for _, id := range spec.Names {
+						names[id.Name] = true
+					}
+				case *ast.TypeSpec:
+					names[spec.Name.Name] = true
+				}
+			}
+
+		case *ast.AssignStmt:
+			if stmt.Tok == token.DEFINE {
+				for _, lhs := range stmt.Lhs {
+					names[lhs.(*ast.Ident).Name] = true
+				}
+			}
+		}
+	}
+	return names
+}
+
+// safeReturn reports whether the callee's return statements may be safely
+// used to return from the function enclosing the caller (which must exist).
+func safeReturn(caller *Caller, calleeSymbol *types.Func, callee *gobCallee) bool {
+	// It is safe if all callee returns involve only trivial conversions.
+	if callee.TrivialReturns == callee.TotalReturns {
+		return true
+	}
+
+	var callerType types.Type
+	// Find type of innermost function enclosing call.
+	// (Beware: Caller.enclosingFunc is the outermost.)
+loop:
+	for _, n := range caller.path {
+		switch f := n.(type) {
+		case *ast.FuncDecl:
+			callerType = caller.Info.ObjectOf(f.Name).Type()
+			break loop
+		case *ast.FuncLit:
+			callerType = caller.Info.TypeOf(f)
+			break loop
+		}
+	}
+
+	// Non-trivial return conversions in the callee are permitted
+	// if the same non-trivial conversion would occur after inlining,
+	// i.e. if the caller and callee results tuples are identical.
+	callerResults := callerType.(*types.Signature).Results()
+	calleeResults := calleeSymbol.Type().(*types.Signature).Results()
+	return types.Identical(callerResults, calleeResults)
+}
diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go
index 3cad649..8362e44 100644
--- a/internal/refactor/inline/inline_test.go
+++ b/internal/refactor/inline/inline_test.go
@@ -45,6 +45,12 @@
 		t.Run(filepath.Base(file), func(t *testing.T) {
 			t.Parallel()
 
+			// The few tests that use cgo should be in
+			// files whose name includes "cgo".
+			if strings.Contains(t.Name(), "cgo") {
+				testenv.NeedsTool(t, "cgo")
+			}
+
 			// Extract archive to temporary tree.
 			ar, err := txtar.ParseFile(file)
 			if err != nil {
@@ -231,7 +237,7 @@
 		}
 	}
 
-	calleeDecl, err := findFuncByPosition(calleePkg, caller.Fset.Position(fn.Pos()))
+	calleeDecl, err := findFuncByPosition(calleePkg, caller.Fset.PositionFor(fn.Pos(), false))
 	if err != nil {
 		return err
 	}
@@ -297,7 +303,7 @@
 		// them, so how are we supposed to find the source? Yuck!
 		// Ugh. need to samefile? Nope $GOROOT just won't work
 		// This is highly client specific anyway.
-		posn2 := pkg.Fset.Position(decl.Name.Pos())
+		posn2 := pkg.Fset.PositionFor(decl.Name.Pos(), false)
 		return posn.Filename == posn2.Filename &&
 			posn.Line == posn2.Line
 	}
@@ -311,18 +317,27 @@
 	return nil, fmt.Errorf("can't find FuncDecl at %v in package %q", posn, pkg.PkgPath)
 }
 
-// TestTable is a table driven test, enabling more compact expression
-// of single-package test cases than is possible with the txtar notation.
-func TestTable(t *testing.T) {
-	// Each callee must declare a function or method named f,
-	// and each caller must call it.
-	const funcName = "f"
+// Each callee must declare a function or method named f,
+// and each caller must call it.
+const funcName = "f"
 
-	var tests = []struct {
-		descr          string
-		callee, caller string // Go source files (sans package decl) of caller, callee
-		want           string // expected new portion of caller file, or "error: regexp"
-	}{
+// A testcase is an item in a table-driven test.
+//
+// The table-driven tests are less flexible, but enable more compact
+// expression of single-package test cases than is possible with the
+// txtar notation.
+//
+// TODO(adonovan): improve coverage of the cross product of each
+// strategy with the checklist of concerns enumerated in the package
+// doc comment.
+type testcase struct {
+	descr          string
+	callee, caller string // Go source files (sans package decl) of caller, callee
+	want           string // expected new portion of caller file, or "error: regexp"
+}
+
+func TestErrors(t *testing.T) {
+	runTests(t, []testcase{
 		{
 			"Generic functions are not yet supported.",
 			`func f[T any](x T) T { return x }`,
@@ -335,11 +350,16 @@
 			`var _ = G[int]{}.f(0)`,
 			`error: type parameters are not yet supported`,
 		},
+	})
+}
+
+func TestBasics(t *testing.T) {
+	runTests(t, []testcase{
 		{
 			"Basic",
 			`func f(x int) int { return x }`,
 			`var _ = f(0)`,
-			`var _ = (0)`,
+			`var _ = 0`,
 		},
 		{
 			"Empty body, no arg effects.",
@@ -354,10 +374,184 @@
 			`func _() { _ = recover().(int) }`,
 		},
 		{
+			"Non-duplicable arguments are not substituted even if pure.",
+			`func f(s string, i int) { print(s, s, i, i) }`,
+			`func _() { f("hi", 0)  }`,
+			`func _() {
+	var s string = "hi"
+	print(s, s, 0, 0)
+}`,
+		},
+		{
+			"Workaround for T(x) misformatting (#63362).",
+			`func f(ch <-chan int) { <-ch }`,
+			`func _(ch chan int) { f(ch) }`,
+			`func _(ch chan int) { <-(<-chan int)(ch) }`,
+		},
+	})
+}
+
+func TestExprStmtReduction(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"A call in an unrestricted ExprStmt may be replaced by the body stmts.",
+			`func f() { var _ = len("") }`,
+			`func _() { f() }`,
+			`func _() { var _ = len("") }`,
+		},
+		{
+			"ExprStmts in the body of a switch case are unrestricted.",
+			`func f() { x := 1; print(x) }`,
+			`func _() { switch { case true: f() } }`,
+			`func _() {
+	switch {
+	case true:
+		x := 1
+		print(x)
+	}
+}`,
+		},
+		{
+			"ExprStmts in the body of a select case are unrestricted.",
+			`func f() { x := 1; print(x) }`,
+			`func _() { select { default: f() } }`,
+			`func _() {
+	select {
+	default:
+		x := 1
+		print(x)
+	}
+}`,
+		},
+		{
+			"Some ExprStmt contexts are restricted to simple statements.",
+			`func f() { var _ = len("") }`,
+			`func _(cond bool) { if f(); cond {} }`,
+			`func _(cond bool) {
+	if func() { var _ = len("") }(); cond {
+	}
+}`,
+		},
+		{
+			"Braces must be preserved to avoid a name conflict (decl before).",
+			`func f() { x := 1; print(x) }`,
+			`func _() { x := 2; print(x); f() }`,
+			`func _() {
+	x := 2
+	print(x)
+	{
+		x := 1
+		print(x)
+	}
+}`,
+		},
+		{
+			"Braces must be preserved to avoid a name conflict (decl after).",
+			`func f() { x := 1; print(x) }`,
+			`func _() { f(); x := 2; print(x) }`,
+			`func _() {
+	{
+		x := 1
+		print(x)
+	}
+	x := 2
+	print(x)
+}`,
+		},
+		{
+			"Braces must be preserved to avoid a forward jump across a decl.",
+			`func f() { x := 1; print(x) }`,
+			`func _() { goto label; f(); label: }`,
+			`func _() {
+	goto label
+	{
+		x := 1
+		print(x)
+	}
+label:
+}`,
+		},
+	})
+}
+
+func TestPrecedenceParens(t *testing.T) {
+	// Ensure that parens are inserted when (and only when) necessary
+	// around the replacement for the call expression. (This is a special
+	// case in the way the inliner uses a combination of AST formatting
+	// for the call and text splicing for the rest of the file.)
+	runTests(t, []testcase{
+		{
+			"Multiplication in addition context (no parens).",
+			`func f(x, y int) int { return x * y }`,
+			`func _() { _ = 1 + f(2, 3) }`,
+			`func _() { _ = 1 + 2*3 }`,
+		},
+		{
+			"Addition in multiplication context (parens).",
+			`func f(x, y int) int { return x + y }`,
+			`func _() { _ = 1 * f(2, 3) }`,
+			`func _() { _ = 1 * (2 + 3) }`,
+		},
+		{
+			"Addition in negation context (parens).",
+			`func f(x, y int) int { return x + y }`,
+			`func _() { _ = -f(1, 2) }`,
+			`func _() { _ = -(1 + 2) }`,
+		},
+		{
+			"Addition in call context (no parens).",
+			`func f(x, y int) int { return x + y }`,
+			`func _() { println(f(1, 2)) }`,
+			`func _() { println(1 + 2) }`,
+		},
+		{
+			"Addition in slice operand context (parens).",
+			`func f(x, y string) string { return x + y }`,
+			`func _() { _ = f("x",  "y")[1:2] }`,
+			`func _() { _ = ("x" + "y")[1:2] }`,
+		},
+		{
+			"String literal in slice operand context (no parens).",
+			`func f(x string) string { return x }`,
+			`func _() { _ = f("xy")[1:2] }`,
+			`func _() { _ = "xy"[1:2] }`,
+		},
+	})
+}
+
+func TestSubstitution(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Arg to unref'd param can be eliminated if has no effects.",
+			`func f(x, y int) {}; var global int`,
+			`func _() { f(0, global) }`,
+			`func _() {}`,
+		},
+		{
+			"But not if it may contain last reference to a caller local var.",
+			`func f(int) {}`,
+			`func _() { var local int; f(local) }`,
+			`func _() { var local int; _ = local }`,
+		},
+		{
+			"Regression test for detection of shadowing in nested functions.",
+			`func f(x int) { _ = func() { y := 1; print(y); print(x) } }`,
+			`func _(y int) { f(y) } `,
+			`func _(y int) {
+	var x int = y
+	_ = func() { y := 1; print(y); print(x) }
+}`,
+		},
+	})
+}
+
+func TestTailCallStrategy(t *testing.T) {
+	runTests(t, []testcase{
+		{
 			"Tail call.",
 			`func f() int { return 1 }`,
 			`func _() int { return f() }`,
-			`func _() int { return (1) }`,
+			`func _() int { return 1 }`,
 		},
 		{
 			"Void tail call.",
@@ -371,6 +565,30 @@
 			`func _() { f() }`,
 			`func _() { func() { defer f(); println() }() }`,
 		},
+		// Tests for issue #63336:
+		{
+			"Tail call with non-trivial return conversion (caller.sig = callee.sig).",
+			`func f() error { if true { return nil } else { return e } }; var e struct{error}`,
+			`func _() error { return f() }`,
+			`func _() error {
+	if true {
+		return nil
+	} else {
+		return e
+	}
+}`,
+		},
+		{
+			"Tail call with non-trivial return conversion (caller.sig != callee.sig).",
+			`func f() error { return E{} }; type E struct{error}`,
+			`func _() any { return f() }`,
+			`func _() any { return func() error { return E{} }() }`,
+		},
+	})
+}
+
+func TestSpreadCalls(t *testing.T) {
+	runTests(t, []testcase{
 		{
 			"Edge case: cannot literalize spread method call.",
 			`type I int
@@ -383,6 +601,28 @@
 			`error: can't yet inline spread call to method`,
 		},
 		{
+			"Spread argument evaluated for effect.",
+			`func f(int, int) {}; func g() (int, int)`,
+			`func _() { f(g())  }`,
+			`func _() { _, _ = g() }`,
+		},
+		{
+			"Edge case: receiver and spread argument, both evaluated for effect.",
+			`type T int; func (T) f(int, int) {}; func g() (int, int)`,
+			`func _() { T(0).f(g())  }`,
+			`func _() {
+	var (
+		_    = T(0)
+		_, _ = g()
+	)
+}`,
+		},
+	})
+}
+
+func TestVariadic(t *testing.T) {
+	runTests(t, []testcase{
+		{
 			"Variadic cancellation (basic).",
 			`func f(args ...any) { defer f(&args); println(args) }`,
 			`func _(slice []any) { f(slice...) }`,
@@ -404,7 +644,7 @@
 			"Variadic elimination (literalization).",
 			`func f(x any, rest ...any) { defer println(x, rest) }`, // defer => literalization
 			`func _() { f(1, 2, 3) }`,
-			`func _() { func(x any) { defer println(x, []any{2, 3}) }(1) }`,
+			`func _() { func() { defer println(any(1), []any{2, 3}) }() }`,
 		},
 		{
 			"Variadic elimination (reduction).",
@@ -430,39 +670,56 @@
 			`func _() { f(g()) }`,
 			`func _() { func(x, y int, rest ...int) { println(x, y, rest) }(g()) }`,
 		},
+	})
+}
+
+func TestParameterBindingDecl(t *testing.T) {
+	runTests(t, []testcase{
 		{
-			"Binding declaration (x eliminated).",
-			`func f(w, x, y any, z int) { println(w, y, z) }; func g(int) int`,
-			`func _() { f(g(0), g(1), g(2), g(3)) }`,
+			"IncDec counts as assignment.",
+			`func f(x int) { x++ }`,
+			`func _() { f(1) }`,
 			`func _() {
-	{
-		var (
-			w, _, y any = g(0), g(1), g(2)
-			z       int = g(3)
-		)
-		println(w, y, z)
-	}
+	var x int = 1
+	x++
 }`,
 		},
 		{
-			"Binding decl in reduction of stmt-context call to { return exprs }",
+			"Binding declaration (x, y, z eliminated).",
+			`func f(w, x, y any, z int) { println(w, y, z) }; func g(int) int`,
+			`func _() { f(g(0), g(1), g(2), g(3)) }`,
+			`func _() {
+	var w, _ any = g(0), g(1)
+	println(w, any(g(2)), g(3))
+}`,
+		},
+		{
+			"Reduction of stmt-context call to { return exprs }, with substitution",
 			`func f(ch chan int) int { return <-ch }; func g() chan int`,
 			`func _() { f(g()) }`,
+			`func _() { <-g() }`,
+		},
+		{
+			// Same again, with callee effects:
+			"Binding decl in reduction of stmt-context call to { return exprs }",
+			`func f(x int) int { return <-h(g(2), x) }; func g(int) int; func h(int, int) chan int`,
+			`func _() { f(g(1)) }`,
 			`func _() {
-	{
-		var ch chan int = g()
-		<-ch
-	}
+	var x int = g(1)
+	<-h(g(2), x)
 }`,
 		},
 		{
 			"No binding decl due to shadowing of int",
-			`func f(int, y any, z int) { defer println(int, y, z) }; func g() int`,
-			`func _() { f(g(), g(), g()) }`,
-			`func _() { func(int, y any, z int) { defer println(int, y, z) }(g(), g(), g()) }
-`,
+			`func f(int, y any, z int) { defer g(0); println(int, y, z) }; func g(int) int`,
+			`func _() { f(g(1), g(2), g(3)) }`,
+			`func _() { func(int, y any, z int) { defer g(0); println(int, y, z) }(g(1), g(2), g(3)) }`,
 		},
-		// Embedded fields:
+	})
+}
+
+func TestEmbeddedFields(t *testing.T) {
+	runTests(t, []testcase{
 		{
 			"Embedded fields in x.f method selection (direct).",
 			`type T int; func (t T) f() { print(t) }; type U struct{ T }`,
@@ -475,20 +732,11 @@
 			`func _(v V) { v.f() }`,
 			`func _(v V) { print(*v.U.T) }`,
 		},
-		// TODO(adonovan): due to former unsoundness in pure(),
-		// the previous outputs of two tests below used to be neater.
-		// A followup analysis (strict effects) will restore tidiness.
 		{
 			"Embedded fields in x.f method selection (implicit &).",
 			`type ( T int; U struct{T}; V struct {U} ); func (t *T) f() { print(t) }`,
 			`func _(v V) { v.f() }`,
-			// was `func _(v V) { print(&v.U.T) }`,
-			`func _(v V) {
-	{
-		var t *T = &v.U.T
-		print(t)
-	}
-}`,
+			`func _(v V) { print(&v.U.T) }`,
 		},
 		// Now the same tests again with T.f(recv).
 		{
@@ -507,18 +755,299 @@
 			"Embedded fields in (*T).f method selection.",
 			`type ( T int; U struct{T}; V struct {U} ); func (t *T) f() { print(t) }`,
 			`func _(v V) { (*V).f(&v) }`,
-			// was `func _(v V) { print(&(&v).U.T) }`,
-			`func _(v V) {
+			`func _(v V) { print(&(&v).U.T) }`,
+		},
+		{
+			// x is a single-assign var, and x.f does not load through a pointer
+			// (despite types.Selection.Indirect=true), so x is pure.
+			"No binding decl is required for recv in method-to-method calls.",
+			`type T struct{}; func (x *T) f() { g(); print(*x) }; func g()`,
+			`func (x *T) _() { x.f() }`,
+			`func (x *T) _() {
+	g()
+	print(*x)
+}`,
+		},
+		{
+			"Same, with implicit &recv.",
+			`type T struct{}; func (x *T) f() { g(); print(*x) }; func g()`,
+			`func (x T) _() { x.f() }`,
+			`func (x T) _() {
 	{
-		var t *T = &(&v).U.T
-		print(t)
+		var x *T = &x
+		g()
+		print(*x)
 	}
 }`,
 		},
-		// TODO(adonovan): improve coverage of the cross
-		// product of each strategy with the checklist of
-		// concerns enumerated in the package doc comment.
+	})
+}
+
+func TestSubstitutionPreservesArgumentEffectOrder(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Arguments have effects, but parameters are evaluated in order.",
+			`func f(a, b, c int) { print(a, b, c) }; func g(int) int`,
+			`func _() { f(g(1), g(2), g(3)) }`,
+			`func _() { print(g(1), g(2), g(3)) }`,
+		},
+		{
+			"Arguments have effects, and parameters are evaluated out of order.",
+			`func f(a, b, c int) { print(a, c, b) }; func g(int) int`,
+			`func _() { f(g(1), g(2), g(3)) }`,
+			`func _() {
+	var a, b int = g(1), g(2)
+	print(a, g(3), b)
+}`,
+		},
+		{
+			"Pure arguments may commute with argument that have effects.",
+			`func f(a, b, c int) { print(a, c, b) }; func g(int) int`,
+			`func _() { f(g(1), 2, g(3)) }`,
+			`func _() { print(g(1), g(3), 2) }`,
+		},
+		{
+			"Impure arguments may commute with each other.",
+			`func f(a, b, c, d int) { print(a, c, b, d) }; func g(int) int; var x, y int`,
+			`func _() { f(g(1), x, y, g(2)) }`,
+			`func _() { print(g(1), y, x, g(2)) }`,
+		},
+		{
+			"Impure arguments do not commute with arguments that have effects (1)",
+			`func f(a, b, c, d int) { print(a, c, b, d) }; func g(int) int; var x, y int`,
+			`func _() { f(g(1), g(2), y, g(3)) }`,
+			`func _() {
+	var a, b int = g(1), g(2)
+	print(a, y, b, g(3))
+}`,
+		},
+		{
+			"Impure arguments do not commute with those that have effects (2).",
+			`func f(a, b, c, d int) { print(a, c, b, d) }; func g(int) int; var x, y int`,
+			`func _() { f(g(1), y, g(2), g(3)) }`,
+			`func _() {
+	var a, b int = g(1), y
+	print(a, g(2), b, g(3))
+}`,
+		},
+		{
+			"Callee effects commute with pure arguments.",
+			`func f(a, b, c int) { print(a, c, recover().(int), b) }; func g(int) int`,
+			`func _() { f(g(1), 2, g(3)) }`,
+			`func _() { print(g(1), g(3), recover().(int), 2) }`,
+		},
+		{
+			"Callee reads may commute with impure arguments.",
+			`func f(a, b int) { print(a, x, b) }; func g(int) int; var x, y int`,
+			`func _() { f(g(1), y) }`,
+			`func _() { print(g(1), x, y) }`,
+		},
+		{
+			"All impure parameters preceding a read hazard must be kept.",
+			`func f(a, b, c int) { print(a, b, recover().(int), c) }; var x, y, z int`,
+			`func _() { f(x, y, z) }`,
+			`func _() {
+	var c int = z
+	print(x, y, recover().(int), c)
+}`,
+		},
+		{
+			"All parameters preceding a write hazard must be kept.",
+			`func f(a, b, c int) { print(a, b, recover().(int), c) }; func g(int) int; var x, y, z int`,
+			`func _() { f(x, y, g(0))  }`,
+			`func _() {
+	var a, b, c int = x, y, g(0)
+	print(a, b, recover().(int), c)
+}`,
+		},
+		{
+			"[W1 R0 W2 W4 R3] -- test case for second iteration of effect loop",
+			`func f(a, b, c, d, e int) { print(b, a, c, e, d) }; func g(int) int; var x, y int`,
+			`func _() { f(x, g(1), g(2), y, g(3))  }`,
+			`func _() {
+	var a, b, c, d int = x, g(1), g(2), y
+	print(b, a, c, g(3), d)
+}`,
+		},
+		{
+			// In this example, the set() call is rejected as a substitution
+			// candidate due to a shadowing conflict (x). This must entail that the
+			// selection x.y (R) is also rejected, because it is lower numbered.
+			//
+			// Incidentally this program (which panics when executed) illustrates
+			// that although effects occur left-to-right, read operations such
+			// as x.y are not ordered wrt writes, depending on the compiler.
+			// Changing x.y to identity(x).y forces the ordering and avoids the panic.
+			"Hazards with args already rejected (e.g. due to shadowing) are detected too.",
+			`func f(x, y int) int { return x + y }; func set[T any](ptr *T, old, new T) int { println(old); *ptr = new; return 0; }`,
+			`func _() { x := new(struct{ y int }); f(x.y, set(&x, x, nil)) }`,
+			`func _() {
+	x := new(struct{ y int })
+	{
+		var x, y int = x.y, set(&x, x, nil)
+		_ = x + y
 	}
+}`,
+		},
+		{
+			// Rejection of a later parameter for reasons other than callee
+			// effects (e.g. escape) may create hazards with lower-numbered
+			// parameters that require them to be rejected too.
+			"Hazards with already eliminated parameters (variant)",
+			`func f(x, y int) { _ = &y }; func g(int) int`,
+			`func _() { f(g(1), g(2)) }`,
+			`func _() {
+	var _, y int = g(1), g(2)
+	_ = &y
+}`,
+		},
+		{
+			// In this case g(2) is rejected for substitution because it is
+			// unreferenced but has effects, so parameter x must also be rejected
+			// so that its argument v can be evaluated earlier in the binding decl.
+			"Hazards with already eliminated parameters (unreferenced fx variant)",
+			`func f(x, y int) { _ = x }; func g(int) int; var v int`,
+			`func _() { f(v, g(2)) }`,
+			`func _() {
+	var x, _ int = v, g(2)
+	_ = x
+}`,
+		},
+		{
+			"Defer f() evaluates f() before unknown effects",
+			`func f(int, y any, z int) { defer println(int, y, z) }; func g(int) int`,
+			`func _() { f(g(1), g(2), g(3)) }`,
+			`func _() { func() { defer println(any(g(1)), any(g(2)), g(3)) }() }`,
+		},
+	})
+}
+
+func TestNamedResultVars(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Stmt-context call to {return g()} that mentions named result.",
+			`func f() (x int) { return g(x) }; func g(int) int`,
+			`func _() { f() }`,
+			`func _() {
+	var x int
+	g(x)
+}`,
+		},
+		{
+			"Ditto, with binding decl again.",
+			`func f(y string) (x int) { return x+x+len(y+y) }`,
+			`func _() { f(".") }`,
+			`func _() {
+	var (
+		y string = "."
+		x int
+	)
+	_ = x + x + len(y+y)
+}`,
+		},
+
+		{
+			"Ditto, with binding decl (due to repeated y refs).",
+			`func f(y string) (x string) { return x+y+y }`,
+			`func _() { f(".") }`,
+			`func _() {
+	var (
+		y string = "."
+		x string
+	)
+	_ = x + y + y
+}`,
+		},
+		{
+			"Stmt-context call to {return binary} that mentions named result.",
+			`func f() (x int) { return x+x }`,
+			`func _() { f() }`,
+			`func _() {
+	var x int
+	_ = x + x
+}`,
+		},
+		{
+			"Tail call to {return expr} that mentions named result.",
+			`func f() (x int) { return x }`,
+			`func _() int { return f() }`,
+			`func _() int { return func() (x int) { return x }() }`,
+		},
+		{
+			"Tail call to {return} that implicitly reads named result.",
+			`func f() (x int) { return }`,
+			`func _() int { return f() }`,
+			`func _() int { return func() (x int) { return }() }`,
+		},
+		{
+			"Spread-context call to {return expr} that mentions named result.",
+			`func f() (x, y int) { return x, y }`,
+			`func _() { var _, _ = f() }`,
+			`func _() { var _, _ = func() (x, y int) { return x, y }() }`,
+		},
+		{
+			"Shadowing in binding decl for named results => literalization.",
+			`func f(y string) (x y) { return x+x+len(y+y) }; type y = int`,
+			`func _() { f(".") }`,
+			`func _() { func(y string) (x y) { return x + x + len(y+y) }(".") }`,
+		},
+	})
+}
+
+func TestSubstitutionPreservesParameterType(t *testing.T) {
+	runTests(t, []testcase{
+		{
+			"Substitution preserves argument type (#63193).",
+			`func f(x int16) { y := x; _ = (*int16)(&y) }`,
+			`func _() { f(1) }`,
+			`func _() {
+	y := int16(1)
+	_ = (*int16)(&y)
+}`,
+		},
+		{
+			"Same, with non-constant (unnamed to named struct) conversion.",
+			`func f(x T) { y := x; _ = (*T)(&y) }; type T struct{}`,
+			`func _() { f(struct{}{}) }`,
+			`func _() {
+	y := T(struct{}{})
+	_ = (*T)(&y)
+}`,
+		},
+		{
+			"Same, with non-constant (chan to <-chan) conversion.",
+			`func f(x T) { y := x; _ = (*T)(&y) }; type T = <-chan int; var ch chan int`,
+			`func _() { f(ch) }`,
+			`func _() {
+	y := T(ch)
+	_ = (*T)(&y)
+}`,
+		},
+		{
+			"Same, with untyped nil to typed nil conversion.",
+			`func f(x *int) { y := x; _ = (**int)(&y) }`,
+			`func _() { f(nil) }`,
+			`func _() {
+	y := (*int)(nil)
+	_ = (**int)(&y)
+}`,
+		},
+		{
+			"Conversion of untyped int to named type is made explicit.",
+			`type T int; func (x T) f() { x.g() }; func (T) g() {}`,
+			`func _() { T.f(1) }`,
+			`func _() { T(1).g() }`,
+		},
+		{
+			"Check for shadowing error on type used in the conversion.",
+			`func f(x T) { _ = &x == (*T)(nil) }; type T int16`,
+			`func _() { type T bool; f(1) }`,
+			`error: T.*shadowed.*by.*type`,
+		},
+	})
+}
+
+func runTests(t *testing.T, tests []testcase) {
 	for _, test := range tests {
 		test := test
 		t.Run(test.descr, func(t *testing.T) {
@@ -580,7 +1109,7 @@
 			conf := &types.Config{Error: func(err error) { t.Error(err) }}
 			pkg, err := conf.Check("p", fset, []*ast.File{callerFile, calleeFile}, info)
 			if err != nil {
-				t.Fatal(err)
+				t.Fatal("transformation introduced type errors")
 			}
 
 			// Analyze callee and inline call.
diff --git a/internal/refactor/inline/testdata/basic-err.txtar b/internal/refactor/inline/testdata/basic-err.txtar
index c811fe4..4868b2c 100644
--- a/internal/refactor/inline/testdata/basic-err.txtar
+++ b/internal/refactor/inline/testdata/basic-err.txtar
@@ -1,12 +1,6 @@
 Test of inlining a function that references err.Error,
 which is often a special case because it has no position.
 
-Previously the output was
-  var _ = (io.EOF.Error())
-but this relied on a bug in pure().
-A follow-up analysis of callee effect ordering
-will re-enable this "style optimization".
-
 -- go.mod --
 module testdata
 go 1.12
@@ -25,6 +19,6 @@
 
 import "io"
 
-var _ = func(err error) string { return err.Error() }(io.EOF) //@ inline(re"getError", getError)
+var _ = io.EOF.Error() //@ inline(re"getError", getError)
 
 func getError(err error) string { return err.Error() }
diff --git a/internal/refactor/inline/testdata/basic-reduce.txtar b/internal/refactor/inline/testdata/basic-reduce.txtar
index 46e44b7..10aca52 100644
--- a/internal/refactor/inline/testdata/basic-reduce.txtar
+++ b/internal/refactor/inline/testdata/basic-reduce.txtar
@@ -14,7 +14,7 @@
 -- zero --
 package a
 
-var _ = (0) //@ inline(re"zero", zero)
+var _ = 0 //@ inline(re"zero", zero)
 
 func zero() int { return 0 }
 
@@ -46,5 +46,5 @@
 -- add2 --
 package a
 
-var _ = (len("") + 2) //@ inline(re"add", add2)
+var _ = len("") + 2 //@ inline(re"add", add2)
 
diff --git a/internal/refactor/inline/testdata/cgo.txtar b/internal/refactor/inline/testdata/cgo.txtar
new file mode 100644
index 0000000..41567ed
--- /dev/null
+++ b/internal/refactor/inline/testdata/cgo.txtar
@@ -0,0 +1,45 @@
+Test that attempts to inline with caller or callee in a cgo-generated
+file are rejected.
+
+-- go.mod --
+module testdata
+go 1.12
+
+-- a/a.go --
+package a
+
+/*
+static void f() {}
+*/
+import "C"
+
+func a() {
+	C.f() //@ inline(re"f", re"cannot inline cgo-generated functions")
+	g()   //@ inline(re"g", re`cannot inline calls from files that import "C"`)
+}
+
+func g() {
+	println()
+}
+
+-- a/a2.go --
+package a
+
+func b() {
+	a() //@ inline(re"a", re"cannot inline cgo-generated functions")
+}
+
+func c() {
+	b() //@ inline(re"b", result)
+}
+
+-- result --
+package a
+
+func b() {
+	a() //@ inline(re"a", re"cannot inline cgo-generated functions")
+}
+
+func c() {
+	a() //@ inline(re"b", result)
+}
diff --git a/internal/refactor/inline/testdata/comments.txtar b/internal/refactor/inline/testdata/comments.txtar
index 189e9a2..76f6492 100644
--- a/internal/refactor/inline/testdata/comments.txtar
+++ b/internal/refactor/inline/testdata/comments.txtar
@@ -51,7 +51,7 @@
 package a
 
 func _() {
-	println((1 + 1)) //@ inline(re"g", g)
+	println(1 + 1) //@ inline(re"g", g)
 }
 
 func g() int { return 1 /*hello*/ + /*there*/ 1 }
diff --git a/internal/refactor/inline/testdata/import-shadow.txtar b/internal/refactor/inline/testdata/import-shadow.txtar
index c311ee6..4188a52 100644
--- a/internal/refactor/inline/testdata/import-shadow.txtar
+++ b/internal/refactor/inline/testdata/import-shadow.txtar
@@ -2,13 +2,10 @@
 and the name log is in scope, doesn't mean the name in scope
 refers to the package: it could be locally shadowed.
 
-Two scenarios below:
-
-1. a second (renaming) import is added because the first import is
-   locally shadowed.
-
-2. a new import is added with a fresh name because the default
-   name is locally shadowed.
+In all three scenarios below, renaming import with a fresh name is
+added because the usual name is locally shadowed: in cases 1, 2 an
+existing import is shadowed by (respectively) a local constant,
+parameter; in case 3 there is no existing import.
 
 -- go.mod --
 module testdata
@@ -90,9 +87,39 @@
 var x b.T
 
 func A(b int) {
-	{
-		var _ b0.T = x
-		b0.One()
-		b0.Two()
-	} //@ inline(re"F", fresult)
+
+	b0.One()
+	b0.Two()
+	//@ inline(re"F", fresult)
+}
+
+-- d/d.go --
+package d
+
+import "testdata/e"
+
+func D() {
+	const log = "shadow"
+	e.E() //@ inline(re"E", eresult)
+}
+
+-- e/e.go --
+package e
+
+import "log"
+
+func E() {
+	log.Printf("")
+}
+
+-- eresult --
+package d
+
+import (
+	log0 "log"
+)
+
+func D() {
+	const log = "shadow"
+	log0.Printf("") //@ inline(re"E", eresult)
 }
diff --git a/internal/refactor/inline/testdata/issue62667.txtar b/internal/refactor/inline/testdata/issue62667.txtar
new file mode 100644
index 0000000..21420e2
--- /dev/null
+++ b/internal/refactor/inline/testdata/issue62667.txtar
@@ -0,0 +1,44 @@
+Regression test for #62667: the callee's reference to Split
+was blindly qualified to path.Split even though the imported
+PkgName path is shadowed by the parameter of the same name.
+
+The defer is to defeat reduction of the call and
+substitution of the path parameter by g().
+
+-- go.mod --
+module testdata
+go 1.12
+
+-- a/a.go --
+package a
+
+import "testdata/path"
+
+func A() {
+	path.Dir(g()) //@ inline(re"Dir", result)
+}
+
+func g() string
+
+-- path/path.go --
+package path
+
+func Dir(path string) {
+	defer func(){}()
+	Split(path)
+}
+
+func Split(string) {}
+
+-- result --
+package a
+
+import (
+	path0 "testdata/path"
+)
+
+func A() {
+	func(path string) { defer func() {}(); path0.Split(path) }(g()) //@ inline(re"Dir", result)
+}
+
+func g() string
\ No newline at end of file
diff --git a/internal/refactor/inline/testdata/issue63298.txtar b/internal/refactor/inline/testdata/issue63298.txtar
new file mode 100644
index 0000000..e355e8e
--- /dev/null
+++ b/internal/refactor/inline/testdata/issue63298.txtar
@@ -0,0 +1,52 @@
+Regression test for #63298: inlining a function that
+depends on two packages with the same name leads
+to duplicate PkgNames.
+
+-- go.mod --
+module testdata
+go 1.12
+
+-- a/a.go --
+package a
+
+func _() {
+	a2() //@ inline(re"a2", result)
+}
+
+-- a/a2.go --
+package a
+
+import "testdata/b"
+import anotherb "testdata/another/b"
+
+func a2() {
+	b.B()
+	anotherb.B()
+}
+
+-- b/b.go --
+package b
+
+func B() {}
+
+-- b/another/b.go --
+package b
+
+func B() {}
+
+-- result --
+package a
+
+import (
+	b "testdata/b"
+	b0 "testdata/another/b"
+
+	//@ inline(re"a2", result)
+)
+
+func _() {
+
+	b.B()
+	b0.B()
+
+}
\ No newline at end of file
diff --git a/internal/refactor/inline/testdata/method.txtar b/internal/refactor/inline/testdata/method.txtar
index 61c082e..b141b09 100644
--- a/internal/refactor/inline/testdata/method.txtar
+++ b/internal/refactor/inline/testdata/method.txtar
@@ -15,7 +15,8 @@
 package a
 
 type T int
-func (T) f0() { println() }
+
+func (recv T) f0() { println(recv) }
 
 func _(x T) {
 	x.f0() //@ inline(re"f0", f0)
@@ -26,19 +27,16 @@
 
 type T int
 
-func (T) f0() { println() }
+func (recv T) f0() { println(recv) }
 
 func _(x T) {
-	{
-		var _ T = x
-		println()
-	} //@ inline(re"f0", f0)
+	println(x) //@ inline(re"f0", f0)
 }
 
 -- a/g0.go --
 package a
 
-func (recv *T) g0() { println() }
+func (recv *T) g0() { println(recv) }
 
 func _(x T) {
 	x.g0() //@ inline(re"g0", g0)
@@ -47,19 +45,16 @@
 -- g0 --
 package a
 
-func (recv *T) g0() { println() }
+func (recv *T) g0() { println(recv) }
 
 func _(x T) {
-	{
-		var _ *T = &x
-		println()
-	} //@ inline(re"g0", g0)
+	println(&x) //@ inline(re"g0", g0)
 }
 
 -- a/f1.go --
 package a
 
-func (T) f1(int, int) { println() }
+func (recv T) f1(int, int) { println(recv) }
 
 func _(x T) {
 	x.f1(1, 2) //@ inline(re"f1", f1)
@@ -68,19 +63,16 @@
 -- f1 --
 package a
 
-func (T) f1(int, int) { println() }
+func (recv T) f1(int, int) { println(recv) }
 
 func _(x T) {
-	{
-		var _ T = x
-		println()
-	} //@ inline(re"f1", f1)
+	println(x) //@ inline(re"f1", f1)
 }
 
 -- a/g1.go --
 package a
 
-func (recv *T) g1(int, int) { println() }
+func (recv *T) g1(int, int) { println(recv) }
 
 func _(x T) {
 	x.g1(1, 2) //@ inline(re"g1", g1)
@@ -89,13 +81,10 @@
 -- g1 --
 package a
 
-func (recv *T) g1(int, int) { println() }
+func (recv *T) g1(int, int) { println(recv) }
 
 func _(x T) {
-	{
-		var _ *T = &x
-		println()
-	} //@ inline(re"g1", g1)
+	println(&x) //@ inline(re"g1", g1)
 }
 
 -- a/h.go --
@@ -115,10 +104,10 @@
 
 func _() {
 	var ptr *T
-	{
-		var _ T = *ptr
-		_ = 1
-	} //@ inline(re"h", h)
+
+	var _ T = *ptr
+	_ = 1
+	//@ inline(re"h", h)
 }
 
 -- a/i.go --
diff --git a/internal/refactor/inline/testdata/multistmt-body.txtar b/internal/refactor/inline/testdata/multistmt-body.txtar
index 1dc39c5..6bd0108 100644
--- a/internal/refactor/inline/testdata/multistmt-body.txtar
+++ b/internal/refactor/inline/testdata/multistmt-body.txtar
@@ -54,10 +54,10 @@
 
 func _() {
 	a := 1
-	{
-		z := 1
-		print(a + 2 + z)
-	} //@ inline(re"f", out2)
+
+	z := 1
+	print(a + 2 + z)
+	//@ inline(re"f", out2)
 }
 
 -- a/a3.go --
diff --git a/internal/refactor/inline/testdata/param-subst.txtar b/internal/refactor/inline/testdata/param-subst.txtar
index 135313b..b6e462d 100644
--- a/internal/refactor/inline/testdata/param-subst.txtar
+++ b/internal/refactor/inline/testdata/param-subst.txtar
@@ -14,6 +14,6 @@
 -- add --
 package a
 
-var _ = (2 + 2*(1+1)) //@ inline(re"add", add)
+var _ = 2 + 2*(1+1) //@ inline(re"add", add)
 
 func add(x, y int) int { return x + 2*y }
\ No newline at end of file
diff --git a/internal/refactor/inline/testdata/tailcall.txtar b/internal/refactor/inline/testdata/tailcall.txtar
index 64f5f97..53b6de3 100644
--- a/internal/refactor/inline/testdata/tailcall.txtar
+++ b/internal/refactor/inline/testdata/tailcall.txtar
@@ -36,19 +36,19 @@
 package a
 
 func _() int {
-	{
-		total := 0
-	start:
-		for i := 1; i <= 2; i++ {
-			total += i
-			if i == 6 {
-				goto start
-			} else if i == 7 {
-				return -1
-			}
+
+	total := 0
+start:
+	for i := 1; i <= 2; i++ {
+		total += i
+		if i == 6 {
+			goto start
+		} else if i == 7 {
+			return -1
 		}
-		return total
-	} //@ inline(re"sum", sum)
+	}
+	return total
+	//@ inline(re"sum", sum)
 }
 
 func sum(lo, hi int) int {
diff --git a/internal/refactor/inline/util.go b/internal/refactor/inline/util.go
index 7ca5b08..98d654e 100644
--- a/internal/refactor/inline/util.go
+++ b/internal/refactor/inline/util.go
@@ -19,6 +19,27 @@
 	return ok
 }
 
+// TODO(adonovan): use go1.21's slices.Clone.
+func clone[T any](slice []T) []T { return append([]T{}, slice...) }
+
+// TODO(adonovan): use go1.21's slices.Index.
+func index[T comparable](slice []T, x T) int {
+	for i, elem := range slice {
+		if elem == x {
+			return i
+		}
+	}
+	return -1
+}
+
+func btoi(b bool) int {
+	if b {
+		return 1
+	} else {
+		return 0
+	}
+}
+
 func offsetOf(fset *token.FileSet, pos token.Pos) int {
 	return fset.PositionFor(pos, false).Offset
 }
@@ -69,3 +90,30 @@
 	}
 	return false
 }
+
+// intersects reports whether the maps' key sets intersect.
+func intersects[K comparable, T1, T2 any](x map[K]T1, y map[K]T2) bool {
+	if len(x) > len(y) {
+		return intersects(y, x)
+	}
+	for k := range x {
+		if _, ok := y[k]; ok {
+			return true
+		}
+	}
+	return false
+}
+
+// convert returns syntax for the conversion T(x).
+func convert(T, x ast.Expr) *ast.CallExpr {
+	// The formatter generally adds parens as needed,
+	// but before go1.22 it had a bug (#63362) for
+	// channel types that requires this workaround.
+	if ch, ok := T.(*ast.ChanType); ok && ch.Dir == ast.RECV {
+		T = &ast.ParenExpr{X: T}
+	}
+	return &ast.CallExpr{
+		Fun:  T,
+		Args: []ast.Expr{x},
+	}
+}