gopls/internal/span: simplify Span
This change moves span.Range to safetoken.Range, since it's in
the token domain. The span package now doesn't care about
the token domain (except for one testing-related hook).
Spans are now just a set of optional offset, line, and column,
any subset of which may be populated. All conversions between
these forms, or to other forms such as protocol or token,
are done by ColumnMapper. The With{Position,Offset,All} methods
and their redundant implementations of the conversion logic
(and the necessary workarounds) are gone.
Also:
- rename pgf.RangeToSpanRange to RangeToSpanRange.
- add ColumnMapper.PosLocation method
- typeErrorDiagnostics uses it, avoiding last call to span.FileSpan.
- delete span.FileSpan
- delete Span.Range method
- update TestFormat now that there is no "filling in" of
Spans based on a token.File.
Change-Id: I04000a1666d9e5333070dbbfac397f88bba23e11
Reviewed-on: https://go-review.googlesource.com/c/tools/+/460936
TryBot-Result: Gopher Robot <gobot@golang.org>
Run-TryBot: Alan Donovan <adonovan@google.com>
Reviewed-by: Robert Findley <rfindley@google.com>
Auto-Submit: Alan Donovan <adonovan@google.com>
gopls-CI: kokoro <noreply+kokoro@google.com>
diff --git a/gopls/internal/lsp/analysis/fillstruct/fillstruct.go b/gopls/internal/lsp/analysis/fillstruct/fillstruct.go
index 00857a0..1b33157 100644
--- a/gopls/internal/lsp/analysis/fillstruct/fillstruct.go
+++ b/gopls/internal/lsp/analysis/fillstruct/fillstruct.go
@@ -27,7 +27,6 @@
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/gopls/internal/lsp/safetoken"
- "golang.org/x/tools/gopls/internal/span"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/fuzzy"
"golang.org/x/tools/internal/typeparams"
@@ -136,7 +135,7 @@
// SuggestedFix computes the suggested fix for the kinds of
// diagnostics produced by the Analyzer above.
-func SuggestedFix(fset *token.FileSet, rng span.Range, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+func SuggestedFix(fset *token.FileSet, rng safetoken.Range, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
if info == nil {
return nil, fmt.Errorf("nil types.Info")
}
diff --git a/gopls/internal/lsp/analysis/undeclaredname/undeclared.go b/gopls/internal/lsp/analysis/undeclaredname/undeclared.go
index 3e42f08..996bf2d 100644
--- a/gopls/internal/lsp/analysis/undeclaredname/undeclared.go
+++ b/gopls/internal/lsp/analysis/undeclaredname/undeclared.go
@@ -19,7 +19,6 @@
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/gopls/internal/lsp/safetoken"
- "golang.org/x/tools/gopls/internal/span"
"golang.org/x/tools/internal/analysisinternal"
)
@@ -122,7 +121,7 @@
})
}
-func SuggestedFix(fset *token.FileSet, rng span.Range, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+func SuggestedFix(fset *token.FileSet, rng safetoken.Range, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
pos := rng.Start // don't use the end
path, _ := astutil.PathEnclosingInterval(file, pos, pos)
if len(path) < 2 {
diff --git a/gopls/internal/lsp/cache/errors.go b/gopls/internal/lsp/cache/errors.go
index 6777193..16c0e78 100644
--- a/gopls/internal/lsp/cache/errors.go
+++ b/gopls/internal/lsp/cache/errors.go
@@ -103,17 +103,13 @@
var unsupportedFeatureRe = regexp.MustCompile(`.*require.* go(\d+\.\d+) or later`)
func typeErrorDiagnostics(snapshot *snapshot, pkg *pkg, e extendedError) ([]*source.Diagnostic, error) {
- code, spn, err := typeErrorData(pkg, e.primary)
- if err != nil {
- return nil, err
- }
- rng, err := spanToRange(pkg, spn)
+ code, loc, err := typeErrorData(pkg, e.primary)
if err != nil {
return nil, err
}
diag := &source.Diagnostic{
- URI: spn.URI(),
- Range: rng,
+ URI: loc.URI.SpanURI(),
+ Range: loc.Range,
Severity: protocol.SeverityError,
Source: source.TypeError,
Message: e.primary.Msg,
@@ -128,29 +124,25 @@
}
for _, secondary := range e.secondaries {
- _, secondarySpan, err := typeErrorData(pkg, secondary)
- if err != nil {
- return nil, err
- }
- rng, err := spanToRange(pkg, secondarySpan)
+ _, secondaryLoc, err := typeErrorData(pkg, secondary)
if err != nil {
return nil, err
}
diag.Related = append(diag.Related, source.RelatedInformation{
- URI: secondarySpan.URI(),
- Range: rng,
+ URI: secondaryLoc.URI.SpanURI(),
+ Range: secondaryLoc.Range,
Message: secondary.Msg,
})
}
if match := importErrorRe.FindStringSubmatch(e.primary.Msg); match != nil {
- diag.SuggestedFixes, err = goGetQuickFixes(snapshot, spn.URI(), match[1])
+ diag.SuggestedFixes, err = goGetQuickFixes(snapshot, loc.URI.SpanURI(), match[1])
if err != nil {
return nil, err
}
}
if match := unsupportedFeatureRe.FindStringSubmatch(e.primary.Msg); match != nil {
- diag.SuggestedFixes, err = editGoDirectiveQuickFix(snapshot, spn.URI(), match[1])
+ diag.SuggestedFixes, err = editGoDirectiveQuickFix(snapshot, loc.URI.SpanURI(), match[1])
if err != nil {
return nil, err
}
@@ -294,7 +286,7 @@
return out
}
-func typeErrorData(pkg *pkg, terr types.Error) (typesinternal.ErrorCode, span.Span, error) {
+func typeErrorData(pkg *pkg, terr types.Error) (typesinternal.ErrorCode, protocol.Location, error) {
ecode, start, end, ok := typesinternal.ReadGo116ErrorData(terr)
if !ok {
start, end = terr.Pos, terr.Pos
@@ -303,30 +295,27 @@
// go/types may return invalid positions in some cases, such as
// in errors on tokens missing from the syntax tree.
if !start.IsValid() {
- return 0, span.Span{}, fmt.Errorf("type error (%q, code %d, go116=%t) without position", terr.Msg, ecode, ok)
+ return 0, protocol.Location{}, fmt.Errorf("type error (%q, code %d, go116=%t) without position", terr.Msg, ecode, ok)
}
// go/types errors retain their FileSet.
// Sanity-check that we're using the right one.
fset := pkg.FileSet()
if fset != terr.Fset {
- return 0, span.Span{}, bug.Errorf("wrong FileSet for type error")
+ return 0, protocol.Location{}, bug.Errorf("wrong FileSet for type error")
}
posn := safetoken.StartPosition(fset, start)
if !posn.IsValid() {
- return 0, span.Span{}, fmt.Errorf("position %d of type error %q (code %q) not found in FileSet", start, start, terr)
+ return 0, protocol.Location{}, fmt.Errorf("position %d of type error %q (code %q) not found in FileSet", start, start, terr)
}
pgf, err := pkg.File(span.URIFromPath(posn.Filename))
if err != nil {
- return 0, span.Span{}, err
+ return 0, protocol.Location{}, err
}
if !end.IsValid() || end == start {
end = analysisinternal.TypeErrorEndPos(fset, pgf.Src, start)
}
- spn, err := span.FileSpan(pgf.Tok, start, end)
- if err != nil {
- return 0, span.Span{}, err
- }
- return ecode, spn, nil
+ loc, err := pgf.Mapper.PosLocation(pgf.Tok, start, end)
+ return ecode, loc, err
}
// spanToRange converts a span.Span to a protocol.Range,
diff --git a/gopls/internal/lsp/code_action.go b/gopls/internal/lsp/code_action.go
index 5e0a778..8f39ea9 100644
--- a/gopls/internal/lsp/code_action.go
+++ b/gopls/internal/lsp/code_action.go
@@ -316,7 +316,7 @@
if err != nil {
return nil, fmt.Errorf("getting file for Identifier: %w", err)
}
- srng, err := pgf.RangeToSpanRange(rng)
+ srng, err := pgf.RangeToTokenRange(rng)
if err != nil {
return nil, err
}
diff --git a/gopls/internal/lsp/protocol/span.go b/gopls/internal/lsp/protocol/span.go
index d07614c..14dbdea 100644
--- a/gopls/internal/lsp/protocol/span.go
+++ b/gopls/internal/lsp/protocol/span.go
@@ -20,9 +20,9 @@
// span: for optional fields; useful for CLIs and tests without access to file contents.
// span.Point = (line?, col?, offset?) 1-based UTF-8
// span.Span = (uri URI, start, end span.Point)
-// span.Range = (file token.File, start, end token.Pos)
//
// token: for interaction with the go/* syntax packages:
+// safetoken.Range = (file token.File, start, end token.Pos)
// token.Pos
// token.FileSet
// token.File
@@ -30,15 +30,14 @@
// (see also safetoken)
//
// TODO(adonovan): simplify this picture:
-// - Move span.Range to package safetoken. Can we eliminate most uses in gopls?
-// Without a ColumnMapper it's not really self-contained.
+// - Eliminate most/all uses of safetoken.Range in gopls, as
+// without a ColumnMapper it's not really self-contained.
// It is mostly used by completion. Given access to complete.mapper,
// it could use a pair of byte offsets instead.
// - ColumnMapper.OffsetPoint and .PointPosition aren't used outside this package.
// OffsetSpan is barely used, and its user would better off with a MappedRange
// or protocol.Range. The span package data types are mostly used in tests
// and in argument parsing (without access to file content).
-// - share core conversion primitives with span package.
// - rename ColumnMapper to just Mapper, since it also maps lines
// <=> offsets, and move it to file mapper.go.
//
@@ -206,6 +205,7 @@
}
// OffsetSpan converts a byte-offset interval to a (UTF-8) span.
+// The resulting span contains line, column, and offset information.
func (m *ColumnMapper) OffsetSpan(start, end int) (span.Span, error) {
if start > end {
return span.Span{}, fmt.Errorf("start offset (%d) > end (%d)", start, end)
@@ -281,6 +281,7 @@
}
// OffsetPoint converts a byte offset to a span (UTF-8) point.
+// The resulting point contains line, column, and offset information.
func (m *ColumnMapper) OffsetPoint(offset int) (span.Point, error) {
if !(0 <= offset && offset <= len(m.Content)) {
return span.Point{}, fmt.Errorf("invalid offset %d (want 0-%d)", offset, len(m.Content))
@@ -389,6 +390,19 @@
return m.OffsetPosition(offset)
}
+// PosPosition converts a token range to a protocol (UTF-16) location.
+func (m *ColumnMapper) PosLocation(tf *token.File, start, end token.Pos) (Location, error) {
+ startOffset, endOffset, err := safetoken.Offsets(tf, start, end)
+ if err != nil {
+ return Location{}, err
+ }
+ rng, err := m.OffsetRange(startOffset, endOffset)
+ if err != nil {
+ return Location{}, err
+ }
+ return Location{URI: URIFromSpanURI(m.URI), Range: rng}, nil
+}
+
// PosPosition converts a token range to a protocol (UTF-16) range.
func (m *ColumnMapper) PosRange(tf *token.File, start, end token.Pos) (Range, error) {
startOffset, endOffset, err := safetoken.Offsets(tf, start, end)
diff --git a/gopls/internal/lsp/safetoken/range.go b/gopls/internal/lsp/safetoken/range.go
new file mode 100644
index 0000000..827a257
--- /dev/null
+++ b/gopls/internal/lsp/safetoken/range.go
@@ -0,0 +1,57 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package safetoken
+
+import "go/token"
+
+// Range represents a source code range in token.Pos form.
+//
+// It also carries the token.File that produced the position,
+// so that it is capable of returning (file, line, col8) information.
+// However it cannot be converted to protocol (UTF-16) form
+// without access to file content; to do that, use a protocol.ContentMapper.
+type Range struct {
+ TokFile *token.File // non-nil
+ Start, End token.Pos // both IsValid()
+}
+
+// NewRange creates a new Range from a token.File and two positions within it.
+// The given start position must be valid; if end is invalid, start is used as
+// the end position.
+//
+// (If you only have a token.FileSet, use file = fset.File(start). But
+// most callers know exactly which token.File they're dealing with and
+// should pass it explicitly. Not only does this save a lookup, but it
+// brings us a step closer to eliminating the global FileSet.)
+func NewRange(file *token.File, start, end token.Pos) Range {
+ if file == nil {
+ panic("nil *token.File")
+ }
+ if !start.IsValid() {
+ panic("invalid start token.Pos")
+ }
+ if !end.IsValid() {
+ end = start
+ }
+
+ // TODO(adonovan): ideally we would make this stronger assertion:
+ //
+ // // Assert that file is non-nil and contains start and end.
+ // _ = file.Offset(start)
+ // _ = file.Offset(end)
+ //
+ // but some callers (e.g. packageCompletionSurrounding) don't ensure this precondition.
+
+ return Range{
+ TokFile: file,
+ Start: start,
+ End: end,
+ }
+}
+
+// IsPoint returns true if the range represents a single point.
+func (r Range) IsPoint() bool {
+ return r.Start == r.End
+}
diff --git a/gopls/internal/lsp/source/completion/completion.go b/gopls/internal/lsp/source/completion/completion.go
index 12bad1c..845b959 100644
--- a/gopls/internal/lsp/source/completion/completion.go
+++ b/gopls/internal/lsp/source/completion/completion.go
@@ -24,9 +24,9 @@
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
"golang.org/x/tools/gopls/internal/lsp/snippet"
"golang.org/x/tools/gopls/internal/lsp/source"
- "golang.org/x/tools/gopls/internal/span"
"golang.org/x/tools/internal/event"
"golang.org/x/tools/internal/fuzzy"
"golang.org/x/tools/internal/imports"
@@ -290,7 +290,7 @@
type Selection struct {
content string
cursor token.Pos // relative to rng.TokFile
- rng span.Range
+ rng safetoken.Range
mapper *protocol.ColumnMapper
}
@@ -322,7 +322,7 @@
content: ident.Name,
cursor: c.pos,
// Overwrite the prefix only.
- rng: span.NewRange(c.tokFile, ident.Pos(), ident.End()),
+ rng: safetoken.NewRange(c.tokFile, ident.Pos(), ident.End()),
mapper: c.mapper,
}
@@ -345,7 +345,7 @@
c.surrounding = &Selection{
content: "",
cursor: c.pos,
- rng: span.NewRange(c.tokFile, c.pos, c.pos),
+ rng: safetoken.NewRange(c.tokFile, c.pos, c.pos),
mapper: c.mapper,
}
}
@@ -798,7 +798,7 @@
c.surrounding = &Selection{
content: content,
cursor: c.pos,
- rng: span.NewRange(c.tokFile, start, end),
+ rng: safetoken.NewRange(c.tokFile, start, end),
mapper: c.mapper,
}
@@ -1019,7 +1019,7 @@
c.surrounding = &Selection{
content: cursorComment.Text[start:end],
cursor: c.pos,
- rng: span.NewRange(c.tokFile, token.Pos(int(cursorComment.Slash)+start), token.Pos(int(cursorComment.Slash)+end)),
+ rng: safetoken.NewRange(c.tokFile, token.Pos(int(cursorComment.Slash)+start), token.Pos(int(cursorComment.Slash)+end)),
mapper: c.mapper,
}
c.setMatcherFromPrefix(c.surrounding.Prefix())
diff --git a/gopls/internal/lsp/source/completion/definition.go b/gopls/internal/lsp/source/completion/definition.go
index 2270dd7..564d82c 100644
--- a/gopls/internal/lsp/source/completion/definition.go
+++ b/gopls/internal/lsp/source/completion/definition.go
@@ -12,9 +12,9 @@
"unicode/utf8"
"golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
"golang.org/x/tools/gopls/internal/lsp/snippet"
"golang.org/x/tools/gopls/internal/lsp/source"
- "golang.org/x/tools/gopls/internal/span"
)
// some function definitions in test files can be completed
@@ -40,7 +40,7 @@
sel := &Selection{
content: "",
cursor: start,
- rng: span.NewRange(pgf.Tok, start, end),
+ rng: safetoken.NewRange(pgf.Tok, start, end),
mapper: pgf.Mapper,
}
var ans []CompletionItem
diff --git a/gopls/internal/lsp/source/completion/package.go b/gopls/internal/lsp/source/completion/package.go
index b8270e5..f6b5148 100644
--- a/gopls/internal/lsp/source/completion/package.go
+++ b/gopls/internal/lsp/source/completion/package.go
@@ -90,7 +90,7 @@
return &Selection{
content: name.Name,
cursor: cursor,
- rng: span.NewRange(tok, name.Pos(), name.End()),
+ rng: safetoken.NewRange(tok, name.Pos(), name.End()),
mapper: m,
}, nil
}
@@ -128,7 +128,7 @@
return &Selection{
content: content,
cursor: cursor,
- rng: span.NewRange(tok, start, end),
+ rng: safetoken.NewRange(tok, start, end),
mapper: m,
}, nil
}
@@ -150,7 +150,7 @@
return &Selection{
content: "",
cursor: cursor,
- rng: span.NewRange(tok, cursor, cursor),
+ rng: safetoken.NewRange(tok, cursor, cursor),
mapper: m,
}, nil
}
diff --git a/gopls/internal/lsp/source/extract.go b/gopls/internal/lsp/source/extract.go
index 31a8598..6f3c780 100644
--- a/gopls/internal/lsp/source/extract.go
+++ b/gopls/internal/lsp/source/extract.go
@@ -19,12 +19,11 @@
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/gopls/internal/lsp/safetoken"
- "golang.org/x/tools/gopls/internal/span"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/bug"
)
-func extractVariable(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, _ *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+func extractVariable(fset *token.FileSet, rng safetoken.Range, src []byte, file *ast.File, _ *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
tokFile := fset.File(file.Pos())
expr, path, ok, err := CanExtractVariable(rng, file)
if !ok {
@@ -99,7 +98,7 @@
// CanExtractVariable reports whether the code in the given range can be
// extracted to a variable.
-func CanExtractVariable(rng span.Range, file *ast.File) (ast.Expr, []ast.Node, bool, error) {
+func CanExtractVariable(rng safetoken.Range, file *ast.File) (ast.Expr, []ast.Node, bool, error) {
if rng.Start == rng.End {
return nil, nil, false, fmt.Errorf("start and end are equal")
}
@@ -190,12 +189,12 @@
}
// extractMethod refactors the selected block of code into a new method.
-func extractMethod(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+func extractMethod(fset *token.FileSet, rng safetoken.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
return extractFunctionMethod(fset, rng, src, file, pkg, info, true)
}
// extractFunction refactors the selected block of code into a new function.
-func extractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+func extractFunction(fset *token.FileSet, rng safetoken.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
return extractFunctionMethod(fset, rng, src, file, pkg, info, false)
}
@@ -207,7 +206,7 @@
// and return values of the extracted function/method. Lastly, we construct the call
// of the function/method and insert this call as well as the extracted function/method into
// their proper locations.
-func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info, isMethod bool) (*analysis.SuggestedFix, error) {
+func extractFunctionMethod(fset *token.FileSet, rng safetoken.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info, isMethod bool) (*analysis.SuggestedFix, error) {
errorPrefix := "extractFunction"
if isMethod {
errorPrefix = "extractMethod"
@@ -344,7 +343,7 @@
if v.obj.Parent() == nil {
return nil, fmt.Errorf("parent nil")
}
- isUsed, firstUseAfter := objUsed(info, span.NewRange(tok, rng.End, v.obj.Parent().End()), v.obj)
+ isUsed, firstUseAfter := objUsed(info, safetoken.NewRange(tok, rng.End, v.obj.Parent().End()), v.obj)
if v.assigned && isUsed && !varOverridden(info, firstUseAfter, v.obj, v.free, outer) {
returnTypes = append(returnTypes, &ast.Field{Type: typ})
returns = append(returns, identifier)
@@ -653,7 +652,7 @@
// their cursors for whitespace. To support this use case, we must manually adjust the
// ranges to match the correct AST node. In this particular example, we would adjust
// rng.Start forward to the start of 'if' and rng.End backward to after '}'.
-func adjustRangeForCommentsAndWhiteSpace(rng span.Range, tok *token.File, content []byte, file *ast.File) (span.Range, error) {
+func adjustRangeForCommentsAndWhiteSpace(rng safetoken.Range, tok *token.File, content []byte, file *ast.File) (safetoken.Range, error) {
// Adjust the end of the range to after leading whitespace and comments.
prevStart, start := token.NoPos, rng.Start
startComment := sort.Search(len(file.Comments), func(i int) bool {
@@ -671,7 +670,7 @@
// Move forwards to find a non-whitespace character.
offset, err := safetoken.Offset(tok, start)
if err != nil {
- return span.Range{}, err
+ return safetoken.Range{}, err
}
for offset < len(content) && isGoWhiteSpace(content[offset]) {
offset++
@@ -701,7 +700,7 @@
// Move backwards to find a non-whitespace character.
offset, err := safetoken.Offset(tok, end)
if err != nil {
- return span.Range{}, err
+ return safetoken.Range{}, err
}
for offset > 0 && isGoWhiteSpace(content[offset-1]) {
offset--
@@ -709,7 +708,7 @@
end = tok.Pos(offset)
}
- return span.NewRange(tok, start, end), nil
+ return safetoken.NewRange(tok, start, end), nil
}
// isGoWhiteSpace returns true if b is a considered white space in
@@ -753,7 +752,7 @@
// variables will be used as arguments in the extracted function. It also returns a
// list of identifiers that may need to be returned by the extracted function.
// Some of the code in this function has been adapted from tools/cmd/guru/freevars.go.
-func collectFreeVars(info *types.Info, file *ast.File, fileScope, pkgScope *types.Scope, rng span.Range, node ast.Node) ([]*variable, error) {
+func collectFreeVars(info *types.Info, file *ast.File, fileScope, pkgScope *types.Scope, rng safetoken.Range, node ast.Node) ([]*variable, error) {
// id returns non-nil if n denotes an object that is referenced by the span
// and defined either within the span or in the lexical environment. The bool
// return value acts as an indicator for where it was defined.
@@ -962,14 +961,14 @@
type fnExtractParams struct {
tok *token.File
path []ast.Node
- rng span.Range
+ rng safetoken.Range
outer *ast.FuncDecl
start ast.Node
}
// CanExtractFunction reports whether the code in the given range can be
// extracted to a function.
-func CanExtractFunction(tok *token.File, rng span.Range, src []byte, file *ast.File) (*fnExtractParams, bool, bool, error) {
+func CanExtractFunction(tok *token.File, rng safetoken.Range, src []byte, file *ast.File) (*fnExtractParams, bool, bool, error) {
if rng.Start == rng.End {
return nil, false, false, fmt.Errorf("start and end are equal")
}
@@ -1041,7 +1040,7 @@
// objUsed checks if the object is used within the range. It returns the first
// occurrence of the object in the range, if it exists.
-func objUsed(info *types.Info, rng span.Range, obj types.Object) (bool, *ast.Ident) {
+func objUsed(info *types.Info, rng safetoken.Range, obj types.Object) (bool, *ast.Ident) {
var firstUse *ast.Ident
for id, objUse := range info.Uses {
if obj != objUse {
diff --git a/gopls/internal/lsp/source/fix.go b/gopls/internal/lsp/source/fix.go
index 7b39c44..9b6322c 100644
--- a/gopls/internal/lsp/source/fix.go
+++ b/gopls/internal/lsp/source/fix.go
@@ -15,6 +15,7 @@
"golang.org/x/tools/gopls/internal/lsp/analysis/fillstruct"
"golang.org/x/tools/gopls/internal/lsp/analysis/undeclaredname"
"golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
"golang.org/x/tools/gopls/internal/span"
"golang.org/x/tools/internal/bug"
)
@@ -27,7 +28,7 @@
// separately. Such analyzers should provide a function with a signature of
// SuggestedFixFunc.
SuggestedFixFunc func(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, pRng protocol.Range) (*token.FileSet, *analysis.SuggestedFix, error)
- singleFileFixFunc func(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error)
+ singleFileFixFunc func(fset *token.FileSet, rng safetoken.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error)
)
const (
@@ -133,14 +134,14 @@
// getAllSuggestedFixInputs is a helper function to collect all possible needed
// inputs for an AppliesFunc or SuggestedFixFunc.
-func getAllSuggestedFixInputs(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, span.Range, []byte, *ast.File, *types.Package, *types.Info, error) {
+func getAllSuggestedFixInputs(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, safetoken.Range, []byte, *ast.File, *types.Package, *types.Info, error) {
pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), TypecheckWorkspace, NarrowestPackage)
if err != nil {
- return nil, span.Range{}, nil, nil, nil, nil, fmt.Errorf("getting file for Identifier: %w", err)
+ return nil, safetoken.Range{}, nil, nil, nil, nil, fmt.Errorf("getting file for Identifier: %w", err)
}
- rng, err := pgf.RangeToSpanRange(pRng)
+ rng, err := pgf.RangeToTokenRange(pRng)
if err != nil {
- return nil, span.Range{}, nil, nil, nil, nil, err
+ return nil, safetoken.Range{}, nil, nil, nil, nil, err
}
return pkg.FileSet(), rng, pgf.Src, pgf.File, pkg.GetTypes(), pkg.GetTypesInfo(), nil
}
diff --git a/gopls/internal/lsp/source/inlay_hint.go b/gopls/internal/lsp/source/inlay_hint.go
index 5179c1e..3db3248 100644
--- a/gopls/internal/lsp/source/inlay_hint.go
+++ b/gopls/internal/lsp/source/inlay_hint.go
@@ -109,7 +109,7 @@
start, end := pgf.File.Pos(), pgf.File.End()
if pRng.Start.Line < pRng.End.Line || pRng.Start.Character < pRng.End.Character {
// Adjust start and end for the specified range.
- rng, err := pgf.RangeToSpanRange(pRng)
+ rng, err := pgf.RangeToTokenRange(pRng)
if err != nil {
return nil, err
}
diff --git a/gopls/internal/lsp/source/stub.go b/gopls/internal/lsp/source/stub.go
index dd41fbe..c2edf08 100644
--- a/gopls/internal/lsp/source/stub.go
+++ b/gopls/internal/lsp/source/stub.go
@@ -218,7 +218,7 @@
}
func getStubNodes(pgf *ParsedGoFile, pRng protocol.Range) ([]ast.Node, token.Pos, error) {
- rng, err := pgf.RangeToSpanRange(pRng)
+ rng, err := pgf.RangeToTokenRange(pRng)
if err != nil {
return nil, 0, err
}
diff --git a/gopls/internal/lsp/source/view.go b/gopls/internal/lsp/source/view.go
index 9421b50..61a4a95 100644
--- a/gopls/internal/lsp/source/view.go
+++ b/gopls/internal/lsp/source/view.go
@@ -415,12 +415,12 @@
}
// RangeToSpanRange parses a protocol Range back into the go/token domain.
-func (pgf *ParsedGoFile) RangeToSpanRange(r protocol.Range) (span.Range, error) {
- spn, err := pgf.Mapper.RangeSpan(r)
+func (pgf *ParsedGoFile) RangeToTokenRange(r protocol.Range) (safetoken.Range, error) {
+ start, end, err := pgf.Mapper.RangeOffsets(r)
if err != nil {
- return span.Range{}, err
+ return safetoken.Range{}, err
}
- return spn.Range(pgf.Tok)
+ return safetoken.NewRange(pgf.Tok, pgf.Tok.Pos(start), pgf.Tok.Pos(end)), nil
}
// A ParsedModule contains the results of parsing a go.mod file.
diff --git a/gopls/internal/span/span.go b/gopls/internal/span/span.go
index 9687bbb..65c6e20 100644
--- a/gopls/internal/span/span.go
+++ b/gopls/internal/span/span.go
@@ -17,7 +17,17 @@
"golang.org/x/tools/gopls/internal/lsp/safetoken"
)
-// Span represents a source code range in standardized form.
+// A Span represents a range of text within a source file. The start
+// and end points of a valid span may be hold either its byte offset,
+// or its (line, column) pair, or both. Columns are measured in bytes.
+//
+// Spans are appropriate in user interfaces (e.g. command-line tools)
+// and tests where a position is notated without access to the content
+// of the file.
+//
+// Use protocol.ColumnMapper to convert between Span and other
+// representations, such as go/token (also UTF-8) or the LSP protocol
+// (UTF-16). The latter requires access to file contents.
type Span struct {
v span
}
@@ -29,6 +39,12 @@
v point
}
+// The private span/point types have public fields to support JSON
+// encoding, but the public Span/Point types hide these fields by
+// defining methods that shadow them. (This is used by a few of the
+// command-line tool subcommands, which emit spans and have a -json
+// flag.)
+
type span struct {
URI URI `json:"uri"`
Start point `json:"start"`
@@ -224,75 +240,6 @@
}
}
-// (Currently unused, but we gain little yet by deleting it.)
-func (s Span) withPosition(tf *token.File) (Span, error) {
- if err := s.update(tf, true, false); err != nil {
- return Span{}, err
- }
- return s, nil
-}
-
-func (s Span) withOffset(tf *token.File) (Span, error) {
- if err := s.update(tf, false, true); err != nil {
- return Span{}, err
- }
- return s, nil
-}
-
-// (Currently unused except by test.)
-func (s Span) WithAll(tf *token.File) (Span, error) {
- if err := s.update(tf, true, true); err != nil {
- return Span{}, err
- }
- return s, nil
-}
-
-func (s *Span) update(tf *token.File, withPos, withOffset bool) error {
- if !s.IsValid() {
- return fmt.Errorf("cannot add information to an invalid span")
- }
- if withPos && !s.HasPosition() {
- if err := s.v.Start.updatePosition(tf); err != nil {
- return err
- }
- if s.v.End.Offset == s.v.Start.Offset {
- s.v.End = s.v.Start
- } else if err := s.v.End.updatePosition(tf); err != nil {
- return err
- }
- }
- if withOffset && (!s.HasOffset() || (s.v.End.hasPosition() && !s.v.End.hasOffset())) {
- if err := s.v.Start.updateOffset(tf); err != nil {
- return err
- }
- if s.v.End.Line == s.v.Start.Line && s.v.End.Column == s.v.Start.Column {
- s.v.End.Offset = s.v.Start.Offset
- } else if err := s.v.End.updateOffset(tf); err != nil {
- return err
- }
- }
- return nil
-}
-
-func (p *point) updatePosition(tf *token.File) error {
- line, col8, err := offsetToLineCol8(tf, p.Offset)
- if err != nil {
- return err
- }
- p.Line = line
- p.Column = col8
- return nil
-}
-
-func (p *point) updateOffset(tf *token.File) error {
- offset, err := toOffset(tf, p.Line, p.Column)
- if err != nil {
- return err
- }
- p.Offset = offset
- return nil
-}
-
// SetRange implements packagestest.rangeSetter, allowing
// gopls' test suites to use Spans instead of Range in parameters.
func (span *Span) SetRange(file *token.File, start, end token.Pos) {
diff --git a/gopls/internal/span/span_test.go b/gopls/internal/span/span_test.go
index 63c0752..d2aaff1 100644
--- a/gopls/internal/span/span_test.go
+++ b/gopls/internal/span/span_test.go
@@ -6,7 +6,6 @@
import (
"fmt"
- "go/token"
"path/filepath"
"strings"
"testing"
@@ -14,40 +13,37 @@
"golang.org/x/tools/gopls/internal/span"
)
-var (
- tests = [][]string{
- {"C:/file_a", "C:/file_a", "file:///C:/file_a:1:1#0"},
- {"C:/file_b:1:2", "C:/file_b:#1", "file:///C:/file_b:1:2#1"},
- {"C:/file_c:1000", "C:/file_c:#9990", "file:///C:/file_c:1000:1#9990"},
- {"C:/file_d:14:9", "C:/file_d:#138", "file:///C:/file_d:14:9#138"},
- {"C:/file_e:1:2-7", "C:/file_e:#1-#6", "file:///C:/file_e:1:2#1-1:7#6"},
- {"C:/file_f:500-502", "C:/file_f:#4990-#5010", "file:///C:/file_f:500:1#4990-502:1#5010"},
- {"C:/file_g:3:7-8", "C:/file_g:#26-#27", "file:///C:/file_g:3:7#26-3:8#27"},
- {"C:/file_h:3:7-4:8", "C:/file_h:#26-#37", "file:///C:/file_h:3:7#26-4:8#37"},
- }
-)
-
func TestFormat(t *testing.T) {
- converter := lines(10)
- for _, test := range tests {
- for ti, text := range test[:2] {
- spn := span.Parse(text)
- if ti <= 1 {
- // we can check %v produces the same as the input
- expect := toPath(test[ti])
- if got := fmt.Sprintf("%v", spn); got != expect {
- t.Errorf("printing %q got %q expected %q", text, got, expect)
- }
- }
- complete, err := spn.WithAll(converter)
- if err != nil {
- t.Error(err)
- }
- for fi, format := range []string{"%v", "%#v", "%+v"} {
- expect := toPath(test[fi])
- if got := fmt.Sprintf(format, complete); got != expect {
- t.Errorf("printing completed %q as %q got %q expected %q [%+v]", text, format, got, expect, spn)
- }
+ formats := []string{"%v", "%#v", "%+v"}
+
+ // Element 0 is the input, and the elements 0-2 are the expected
+ // output in [%v %#v %+v] formats. Thus the first must be in
+ // canonical form (invariant under span.Parse + fmt.Sprint).
+ // The '#' form displays offsets; the '+' form outputs a URI.
+ // If len=4, element 0 is a noncanonical input and 1-3 are expected outputs.
+ for _, test := range [][]string{
+ {"C:/file_a", "C:/file_a", "file:///C:/file_a:#0"},
+ {"C:/file_b:1:2", "C:/file_b:1:2", "file:///C:/file_b:1:2"},
+ {"C:/file_c:1000", "C:/file_c:1000", "file:///C:/file_c:1000:1"},
+ {"C:/file_d:14:9", "C:/file_d:14:9", "file:///C:/file_d:14:9"},
+ {"C:/file_e:1:2-7", "C:/file_e:1:2-7", "file:///C:/file_e:1:2-1:7"},
+ {"C:/file_f:500-502", "C:/file_f:500-502", "file:///C:/file_f:500:1-502:1"},
+ {"C:/file_g:3:7-8", "C:/file_g:3:7-8", "file:///C:/file_g:3:7-3:8"},
+ {"C:/file_h:3:7-4:8", "C:/file_h:3:7-4:8", "file:///C:/file_h:3:7-4:8"},
+ {"C:/file_i:#100", "C:/file_i:#100", "file:///C:/file_i:#100"},
+ {"C:/file_j:#26-#28", "C:/file_j:#26-#28", "file:///C:/file_j:#26-0#28"}, // 0#28?
+ {"C:/file_h:3:7#26-4:8#37", // not canonical
+ "C:/file_h:3:7-4:8", "C:/file_h:#26-#37", "file:///C:/file_h:3:7#26-4:8#37"}} {
+ input := test[0]
+ spn := span.Parse(input)
+ wants := test[0:3]
+ if len(test) == 4 {
+ wants = test[1:4]
+ }
+ for i, format := range formats {
+ want := toPath(wants[i])
+ if got := fmt.Sprintf(format, spn); got != want {
+ t.Errorf("Sprintf(%q, %q) = %q, want %q", format, input, got, want)
}
}
}
@@ -59,16 +55,3 @@
}
return filepath.FromSlash(value)
}
-
-// lines creates a new tokenConverter for a file with 1000 lines, each width
-// bytes wide.
-func lines(width int) *token.File {
- fset := token.NewFileSet()
- f := fset.AddFile("", -1, 1000*width)
- var lines []int
- for i := 0; i < 1000; i++ {
- lines = append(lines, i*width)
- }
- f.SetLines(lines)
- return f
-}
diff --git a/gopls/internal/span/token.go b/gopls/internal/span/token.go
deleted file mode 100644
index 1b8a6495..0000000
--- a/gopls/internal/span/token.go
+++ /dev/null
@@ -1,195 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package span
-
-import (
- "fmt"
- "go/token"
-
- "golang.org/x/tools/gopls/internal/lsp/safetoken"
- "golang.org/x/tools/internal/bug"
-)
-
-// Range represents a source code range in token.Pos form.
-// It also carries the token.File that produced the positions, so that it is
-// self contained.
-//
-// TODO(adonovan): move to safetoken.Range (but the Range.Span function must stay behind).
-type Range struct {
- TokFile *token.File // non-nil
- Start, End token.Pos // both IsValid()
-}
-
-// NewRange creates a new Range from a token.File and two positions within it.
-// The given start position must be valid; if end is invalid, start is used as
-// the end position.
-//
-// (If you only have a token.FileSet, use file = fset.File(start). But
-// most callers know exactly which token.File they're dealing with and
-// should pass it explicitly. Not only does this save a lookup, but it
-// brings us a step closer to eliminating the global FileSet.)
-func NewRange(file *token.File, start, end token.Pos) Range {
- if file == nil {
- panic("nil *token.File")
- }
- if !start.IsValid() {
- panic("invalid start token.Pos")
- }
- if !end.IsValid() {
- end = start
- }
-
- // TODO(adonovan): ideally we would make this stronger assertion:
- //
- // // Assert that file is non-nil and contains start and end.
- // _ = file.Offset(start)
- // _ = file.Offset(end)
- //
- // but some callers (e.g. packageCompletionSurrounding,
- // posToMappedRange) don't ensure this precondition.
-
- return Range{
- TokFile: file,
- Start: start,
- End: end,
- }
-}
-
-// IsPoint returns true if the range represents a single point.
-func (r Range) IsPoint() bool {
- return r.Start == r.End
-}
-
-// Span converts a Range to a Span that represents the Range.
-// It will fill in all the members of the Span, calculating the line and column
-// information.
-func (r Range) Span() (Span, error) {
- return FileSpan(r.TokFile, r.Start, r.End)
-}
-
-// FileSpan returns a span within the file referenced by start and
-// end, using a token.File to translate between offsets and positions.
-func FileSpan(file *token.File, start, end token.Pos) (Span, error) {
- if !start.IsValid() {
- return Span{}, fmt.Errorf("start pos is not valid")
- }
- var s Span
- var err error
- var startFilename string
- startFilename, s.v.Start.Line, s.v.Start.Column, err = position(file, start)
- if err != nil {
- return Span{}, err
- }
- s.v.URI = URIFromPath(startFilename)
- if end.IsValid() {
- var endFilename string
- endFilename, s.v.End.Line, s.v.End.Column, err = position(file, end)
- if err != nil {
- return Span{}, err
- }
- if endFilename != startFilename {
- return Span{}, fmt.Errorf("span begins in file %q but ends in %q", startFilename, endFilename)
- }
- }
- s.v.Start.clean()
- s.v.End.clean()
- s.v.clean()
- return s.withOffset(file)
-}
-
-func position(tf *token.File, pos token.Pos) (string, int, int, error) {
- off, err := offset(tf, pos)
- if err != nil {
- return "", 0, 0, err
- }
- return positionFromOffset(tf, off)
-}
-
-func positionFromOffset(tf *token.File, offset int) (string, int, int, error) {
- if offset > tf.Size() {
- return "", 0, 0, fmt.Errorf("offset %d is beyond EOF (%d) in file %s", offset, tf.Size(), tf.Name())
- }
- pos := tf.Pos(offset)
- p := safetoken.Position(tf, pos)
- // TODO(golang/go#41029): Consider returning line, column instead of line+1, 1 if
- // the file's last character is not a newline.
- if offset == tf.Size() {
- return p.Filename, p.Line + 1, 1, nil
- }
- return p.Filename, p.Line, p.Column, nil
-}
-
-// offset is a copy of the Offset function in go/token, but with the adjustment
-// that it does not panic on invalid positions.
-func offset(tf *token.File, pos token.Pos) (int, error) {
- if int(pos) < tf.Base() || int(pos) > tf.Base()+tf.Size() {
- return 0, fmt.Errorf("invalid pos: %d not in [%d, %d]", pos, tf.Base(), tf.Base()+tf.Size())
- }
- return int(pos) - tf.Base(), nil
-}
-
-// Range converts a Span to a Range that represents the Span for the supplied
-// File.
-func (s Span) Range(tf *token.File) (Range, error) {
- s, err := s.withOffset(tf)
- if err != nil {
- return Range{}, err
- }
- // go/token will panic if the offset is larger than the file's size,
- // so check here to avoid panicking.
- if s.Start().Offset() > tf.Size() {
- return Range{}, bug.Errorf("start offset %v is past the end of the file %v", s.Start(), tf.Size())
- }
- if s.End().Offset() > tf.Size() {
- return Range{}, bug.Errorf("end offset %v is past the end of the file %v", s.End(), tf.Size())
- }
- return Range{
- Start: tf.Pos(s.Start().Offset()),
- End: tf.Pos(s.End().Offset()),
- TokFile: tf,
- }, nil
-}
-
-// offsetToLineCol8 converts a byte offset in the file corresponding to tf into
-// 1-based line and utf-8 column indexes.
-func offsetToLineCol8(tf *token.File, offset int) (int, int, error) {
- _, line, col8, err := positionFromOffset(tf, offset)
- return line, col8, err
-}
-
-// ToOffset converts a 1-based line and utf-8 column index into a byte offset
-// in the file corresponding to tf.
-func toOffset(tf *token.File, line, col int) (int, error) {
- if line < 1 { // token.File.LineStart panics if line < 1
- return -1, fmt.Errorf("invalid line: %d", line)
- }
-
- lineMax := tf.LineCount() + 1
- if line > lineMax {
- return -1, fmt.Errorf("line %d is beyond end of file %v", line, lineMax)
- } else if line == lineMax {
- if col > 1 {
- return -1, fmt.Errorf("column is beyond end of file")
- }
- // at the end of the file, allowing for a trailing eol
- return tf.Size(), nil
- }
- pos := tf.LineStart(line)
- if !pos.IsValid() {
- // bug.Errorf here because LineStart panics on out-of-bound input, and so
- // should never return invalid positions.
- return -1, bug.Errorf("line is not in file")
- }
- // we assume that column is in bytes here, and that the first byte of a
- // line is at column 1
- pos += token.Pos(col - 1)
-
- // Debugging support for https://github.com/golang/go/issues/54655.
- if pos > token.Pos(tf.Base()+tf.Size()) {
- return 0, fmt.Errorf("ToOffset: column %d is beyond end of file", col)
- }
-
- return offset(tf, pos)
-}
diff --git a/gopls/internal/span/token_test.go b/gopls/internal/span/token_test.go
deleted file mode 100644
index 997c8fb..0000000
--- a/gopls/internal/span/token_test.go
+++ /dev/null
@@ -1,80 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package span_test
-
-import (
- "fmt"
- "go/token"
- "path"
- "testing"
-
- "golang.org/x/tools/gopls/internal/span"
-)
-
-var testdata = []struct {
- uri string
- content []byte
-}{
- {"/a.go", []byte(`
-// file a.go
-package test
-`)},
- {"/b.go", []byte(`
-//
-//
-// file b.go
-package test`)},
- {"/c.go", []byte(`
-// file c.go
-package test`)},
-}
-
-var tokenTests = []span.Span{
- span.New(span.URIFromPath("/a.go"), span.NewPoint(1, 1, 0), span.Point{}),
- span.New(span.URIFromPath("/a.go"), span.NewPoint(3, 7, 20), span.NewPoint(3, 7, 20)),
- span.New(span.URIFromPath("/b.go"), span.NewPoint(4, 9, 15), span.NewPoint(4, 13, 19)),
- span.New(span.URIFromPath("/c.go"), span.NewPoint(4, 1, 26), span.Point{}),
-}
-
-func TestToken(t *testing.T) {
- fset := token.NewFileSet()
- files := map[span.URI]*token.File{}
- for _, f := range testdata {
- file := fset.AddFile(f.uri, -1, len(f.content))
- file.SetLinesForContent(f.content)
- files[span.URIFromPath(f.uri)] = file
- }
- for _, test := range tokenTests {
- f := files[test.URI()]
- t.Run(path.Base(f.Name()), func(t *testing.T) {
- checkToken(t, f, span.New(
- test.URI(),
- span.NewPoint(test.Start().Line(), test.Start().Column(), 0),
- span.NewPoint(test.End().Line(), test.End().Column(), 0),
- ), test)
- checkToken(t, f, span.New(
- test.URI(),
- span.NewPoint(0, 0, test.Start().Offset()),
- span.NewPoint(0, 0, test.End().Offset()),
- ), test)
- })
- }
-}
-
-func checkToken(t *testing.T, f *token.File, in, expect span.Span) {
- rng, err := in.Range(f)
- if err != nil {
- t.Error(err)
- }
- gotLoc, err := rng.Span()
- if err != nil {
- t.Error(err)
- }
- expected := fmt.Sprintf("%+v", expect)
- got := fmt.Sprintf("%+v", gotLoc)
- if expected != got {
- t.Errorf("For %v expected %q got %q", in, expected, got)
- }
-}