internal/span: eliminate TokenConverter
The TokenConverter has been trimmed down to a thin wrapper around
token.File, and can now be removed.
Change-Id: I9985492274c88e6a13e6d62dadab5595c75c7840
Reviewed-on: https://go-review.googlesource.com/c/tools/+/406134
TryBot-Result: Gopher Robot <gobot@golang.org>
Run-TryBot: Robert Findley <rfindley@google.com>
gopls-CI: kokoro <noreply+kokoro@google.com>
Reviewed-by: Alan Donovan <adonovan@google.com>
diff --git a/internal/lsp/cache/errors.go b/internal/lsp/cache/errors.go
index 08db103..342f2be 100644
--- a/internal/lsp/cache/errors.go
+++ b/internal/lsp/cache/errors.go
@@ -349,7 +349,7 @@
}
func parsedGoSpan(pgf *source.ParsedGoFile, start, end token.Pos) (span.Span, error) {
- return span.FileSpan(pgf.Tok, pgf.Mapper.Converter, start, end)
+ return span.FileSpan(pgf.Mapper.TokFile, pgf.Mapper.TokFile, start, end)
}
// spanToRange converts a span.Span to a protocol.Range,
diff --git a/internal/lsp/cache/mod.go b/internal/lsp/cache/mod.go
index 1695884..37fc6e6 100644
--- a/internal/lsp/cache/mod.go
+++ b/internal/lsp/cache/mod.go
@@ -56,9 +56,9 @@
return &parseModData{err: err}
}
m := &protocol.ColumnMapper{
- URI: modFH.URI(),
- Converter: span.NewContentConverter(modFH.URI().Filename(), contents),
- Content: contents,
+ URI: modFH.URI(),
+ TokFile: span.NewTokenFile(modFH.URI().Filename(), contents),
+ Content: contents,
}
file, parseErr := modfile.Parse(modFH.URI().Filename(), contents, nil)
// Attempt to convert the error to a standardized parse error.
@@ -134,9 +134,9 @@
return &parseWorkData{err: err}
}
m := &protocol.ColumnMapper{
- URI: modFH.URI(),
- Converter: span.NewContentConverter(modFH.URI().Filename(), contents),
- Content: contents,
+ URI: modFH.URI(),
+ TokFile: span.NewTokenFile(modFH.URI().Filename(), contents),
+ Content: contents,
}
file, parseErr := modfile.ParseWork(modFH.URI().Filename(), contents, nil)
// Attempt to convert the error to a standardized parse error.
diff --git a/internal/lsp/cache/mod_tidy.go b/internal/lsp/cache/mod_tidy.go
index e85f651..aa525e7 100644
--- a/internal/lsp/cache/mod_tidy.go
+++ b/internal/lsp/cache/mod_tidy.go
@@ -482,7 +482,7 @@
func spanFromPositions(m *protocol.ColumnMapper, s, e modfile.Position) (span.Span, error) {
toPoint := func(offset int) (span.Point, error) {
- l, c, err := m.Converter.ToPosition(offset)
+ l, c, err := span.ToPosition(m.TokFile, offset)
if err != nil {
return span.Point{}, err
}
diff --git a/internal/lsp/cache/parse.go b/internal/lsp/cache/parse.go
index 1ab1fa9..668c437 100644
--- a/internal/lsp/cache/parse.go
+++ b/internal/lsp/cache/parse.go
@@ -331,9 +331,9 @@
File: file,
Tok: tok,
Mapper: &protocol.ColumnMapper{
- URI: fh.URI(),
- Converter: span.NewTokenConverter(tok),
- Content: src,
+ URI: fh.URI(),
+ TokFile: tok,
+ Content: src,
},
ParseErr: parseErr,
},
@@ -764,7 +764,7 @@
// fixSrc attempts to modify the file's source code to fix certain
// syntax errors that leave the rest of the file unparsed.
-func fixSrc(f *ast.File, tok *token.File, src []byte) (newSrc []byte) {
+func fixSrc(f *ast.File, tf *token.File, src []byte) (newSrc []byte) {
walkASTWithParent(f, func(n, parent ast.Node) bool {
if newSrc != nil {
return false
@@ -772,9 +772,9 @@
switch n := n.(type) {
case *ast.BlockStmt:
- newSrc = fixMissingCurlies(f, n, parent, tok, src)
+ newSrc = fixMissingCurlies(f, n, parent, tf, src)
case *ast.SelectorExpr:
- newSrc = fixDanglingSelector(n, tok, src)
+ newSrc = fixDanglingSelector(n, tf, src)
}
return newSrc == nil
@@ -937,8 +937,8 @@
// To fix completion at "<>", we insert a real "_" after the "." so the
// following declaration of "x" can be parsed and type checked
// normally.
-func fixDanglingSelector(s *ast.SelectorExpr, tok *token.File, src []byte) []byte {
- if !isPhantomUnderscore(s.Sel, tok, src) {
+func fixDanglingSelector(s *ast.SelectorExpr, tf *token.File, src []byte) []byte {
+ if !isPhantomUnderscore(s.Sel, tf, src) {
return nil
}
@@ -946,7 +946,7 @@
return nil
}
- insertOffset, err := safetoken.Offset(tok, s.X.End())
+ insertOffset, err := safetoken.Offset(tf, s.X.End())
if err != nil {
return nil
}
@@ -973,8 +973,8 @@
// yields a "_" selector instead of "var" since "var" is a keyword.
//
// TODO(rfindley): should this constitute an ast 'fix'?
-func fixPhantomSelector(sel *ast.SelectorExpr, tok *token.File, src []byte) {
- if !isPhantomUnderscore(sel.Sel, tok, src) {
+func fixPhantomSelector(sel *ast.SelectorExpr, tf *token.File, src []byte) {
+ if !isPhantomUnderscore(sel.Sel, tf, src) {
return
}
@@ -988,7 +988,7 @@
return
}
- maybeKeyword := readKeyword(sel.Sel.Pos(), tok, src)
+ maybeKeyword := readKeyword(sel.Sel.Pos(), tf, src)
if maybeKeyword == "" {
return
}
diff --git a/internal/lsp/cmd/cmd.go b/internal/lsp/cmd/cmd.go
index 64b0703..a81eb83 100644
--- a/internal/lsp/cmd/cmd.go
+++ b/internal/lsp/cmd/cmd.go
@@ -545,11 +545,10 @@
}
f := c.fset.AddFile(fname, -1, len(content))
f.SetLinesForContent(content)
- converter := span.NewContentConverter(fname, content)
file.mapper = &protocol.ColumnMapper{
- URI: uri,
- Converter: converter,
- Content: content,
+ URI: uri,
+ TokFile: f,
+ Content: content,
}
}
return file
diff --git a/internal/lsp/cmd/semantictokens.go b/internal/lsp/cmd/semantictokens.go
index 120f91d..e4b1306 100644
--- a/internal/lsp/cmd/semantictokens.go
+++ b/internal/lsp/cmd/semantictokens.go
@@ -117,11 +117,11 @@
// can't happen; just parsed this file
return fmt.Errorf("can't find %s in fset", args[0])
}
- tc := span.NewContentConverter(args[0], buf)
+ tf := span.NewTokenFile(args[0], buf)
colmap = &protocol.ColumnMapper{
- URI: span.URI(args[0]),
- Content: buf,
- Converter: tc,
+ URI: span.URI(args[0]),
+ Content: buf,
+ TokFile: tf,
}
err = decorate(file.uri.Filename(), resp.Data)
if err != nil {
diff --git a/internal/lsp/cmd/test/check.go b/internal/lsp/cmd/test/check.go
index f0e6d8f..6a53925 100644
--- a/internal/lsp/cmd/test/check.go
+++ b/internal/lsp/cmd/test/check.go
@@ -35,7 +35,7 @@
if err != nil {
t.Fatal(err)
}
- converter := span.NewContentConverter(fname, data)
+ converter := span.NewTokenFile(fname, data)
s, err := spn.WithPosition(converter)
if err != nil {
t.Fatal(err)
diff --git a/internal/lsp/command.go b/internal/lsp/command.go
index 5636f87..6e21c1b 100644
--- a/internal/lsp/command.go
+++ b/internal/lsp/command.go
@@ -580,9 +580,9 @@
}
m := &protocol.ColumnMapper{
- URI: fh.URI(),
- Converter: span.NewContentConverter(fh.URI().Filename(), oldContent),
- Content: oldContent,
+ URI: fh.URI(),
+ TokFile: span.NewTokenFile(fh.URI().Filename(), oldContent),
+ Content: oldContent,
}
diff, err := snapshot.View().Options().ComputeEdits(uri, string(oldContent), string(newContent))
if err != nil {
diff --git a/internal/lsp/diff/diff.go b/internal/lsp/diff/diff.go
index 06e05a6..8fd6824 100644
--- a/internal/lsp/diff/diff.go
+++ b/internal/lsp/diff/diff.go
@@ -78,10 +78,10 @@
// prepareEdits returns a sorted copy of the edits
func prepareEdits(before string, edits []TextEdit) ([]TextEdit, bool) {
partial := false
- c := span.NewContentConverter("", []byte(before))
+ tf := span.NewTokenFile("", []byte(before))
copied := make([]TextEdit, len(edits))
for i, edit := range edits {
- edit.Span, _ = edit.Span.WithAll(c)
+ edit.Span, _ = edit.Span.WithAll(tf)
copied[i] = edit
partial = partial ||
edit.Span.Start().Offset() >= len(before) ||
diff --git a/internal/lsp/diff/difftest/difftest.go b/internal/lsp/diff/difftest/difftest.go
index 0e014bc..a78e267 100644
--- a/internal/lsp/diff/difftest/difftest.go
+++ b/internal/lsp/diff/difftest/difftest.go
@@ -207,12 +207,12 @@
// expand all the spans to full versions
// we need them all to have their line number and column
for _, tc := range TestCases {
- c := span.NewContentConverter("", []byte(tc.In))
+ tf := span.NewTokenFile("", []byte(tc.In))
for i := range tc.Edits {
- tc.Edits[i].Span, _ = tc.Edits[i].Span.WithAll(c)
+ tc.Edits[i].Span, _ = tc.Edits[i].Span.WithAll(tf)
}
for i := range tc.LineEdits {
- tc.LineEdits[i].Span, _ = tc.LineEdits[i].Span.WithAll(c)
+ tc.LineEdits[i].Span, _ = tc.LineEdits[i].Span.WithAll(tf)
}
}
}
diff --git a/internal/lsp/link.go b/internal/lsp/link.go
index dcb9217..7bb09b4 100644
--- a/internal/lsp/link.go
+++ b/internal/lsp/link.go
@@ -269,12 +269,12 @@
}
case source.Mod:
s, e := int(start), int(end)
- line, col, err := m.Converter.ToPosition(s)
+ line, col, err := span.ToPosition(m.TokFile, s)
if err != nil {
return protocol.DocumentLink{}, err
}
start := span.NewPoint(line, col, s)
- line, col, err = m.Converter.ToPosition(e)
+ line, col, err = span.ToPosition(m.TokFile, e)
if err != nil {
return protocol.DocumentLink{}, err
}
diff --git a/internal/lsp/lsp_test.go b/internal/lsp/lsp_test.go
index 636019f..c7cd530 100644
--- a/internal/lsp/lsp_test.go
+++ b/internal/lsp/lsp_test.go
@@ -1046,7 +1046,7 @@
if content, ok := res[uri]; ok {
m = &protocol.ColumnMapper{
URI: uri,
- Converter: span.NewContentConverter(
+ TokFile: span.NewTokenFile(
uri.Filename(), []byte(content)),
Content: []byte(content),
}
@@ -1284,11 +1284,11 @@
f := fset.AddFile(fname, -1, len(test.text))
f.SetLinesForContent([]byte(test.text))
uri := span.URIFromPath(fname)
- converter := span.NewContentConverter(fname, []byte(test.text))
+ tf := span.NewTokenFile(fname, []byte(test.text))
mapper := &protocol.ColumnMapper{
- URI: uri,
- Converter: converter,
- Content: []byte(test.text),
+ URI: uri,
+ TokFile: tf,
+ Content: []byte(test.text),
}
got, err := mapper.Point(test.pos)
if err != nil && test.want != -1 {
diff --git a/internal/lsp/mod/hover.go b/internal/lsp/mod/hover.go
index 6084ef9..96bc0c0 100644
--- a/internal/lsp/mod/hover.go
+++ b/internal/lsp/mod/hover.go
@@ -43,7 +43,7 @@
if err != nil {
return nil, fmt.Errorf("computing cursor position: %w", err)
}
- hoverRng, err := spn.Range(pm.Mapper.Converter)
+ hoverRng, err := spn.Range(pm.Mapper.TokFile)
if err != nil {
return nil, fmt.Errorf("computing hover range: %w", err)
}
diff --git a/internal/lsp/protocol/span.go b/internal/lsp/protocol/span.go
index 39e0373..4a364d7 100644
--- a/internal/lsp/protocol/span.go
+++ b/internal/lsp/protocol/span.go
@@ -8,14 +8,15 @@
import (
"fmt"
+ "go/token"
"golang.org/x/tools/internal/span"
)
type ColumnMapper struct {
- URI span.URI
- Converter *span.TokenConverter
- Content []byte
+ URI span.URI
+ TokFile *token.File
+ Content []byte
}
func URIFromSpanURI(uri span.URI) DocumentURI {
@@ -42,7 +43,7 @@
if span.CompareURI(m.URI, s.URI()) != 0 {
return Range{}, fmt.Errorf("column mapper is for file %q instead of %q", m.URI, s.URI())
}
- s, err := s.WithAll(m.Converter)
+ s, err := s.WithAll(m.TokFile)
if err != nil {
return Range{}, err
}
@@ -81,7 +82,7 @@
if err != nil {
return span.Span{}, err
}
- return span.New(m.URI, start, end).WithAll(m.Converter)
+ return span.New(m.URI, start, end).WithAll(m.TokFile)
}
func (m *ColumnMapper) RangeToSpanRange(r Range) (span.Range, error) {
@@ -89,7 +90,7 @@
if err != nil {
return span.Range{}, err
}
- return spn.Range(m.Converter)
+ return spn.Range(m.TokFile)
}
func (m *ColumnMapper) PointSpan(p Position) (span.Span, error) {
@@ -97,12 +98,12 @@
if err != nil {
return span.Span{}, err
}
- return span.New(m.URI, start, start).WithAll(m.Converter)
+ return span.New(m.URI, start, start).WithAll(m.TokFile)
}
func (m *ColumnMapper) Point(p Position) (span.Point, error) {
line := int(p.Line) + 1
- offset, err := m.Converter.ToOffset(line, 1)
+ offset, err := span.ToOffset(m.TokFile, line, 1)
if err != nil {
return span.Point{}, err
}
diff --git a/internal/lsp/safetoken/safetoken.go b/internal/lsp/safetoken/safetoken.go
index 94153e9..6898df0 100644
--- a/internal/lsp/safetoken/safetoken.go
+++ b/internal/lsp/safetoken/safetoken.go
@@ -13,24 +13,24 @@
// Offset returns tok.Offset(pos), but first checks that the pos is in range
// for the given file.
-func Offset(tok *token.File, pos token.Pos) (int, error) {
- if !InRange(tok, pos) {
- return -1, fmt.Errorf("pos %v is not in range for file [%v:%v)", pos, tok.Base(), tok.Base()+tok.Size())
+func Offset(tf *token.File, pos token.Pos) (int, error) {
+ if !InRange(tf, pos) {
+ return -1, fmt.Errorf("pos %v is not in range for file [%v:%v)", pos, tf.Base(), tf.Base()+tf.Size())
}
- return tok.Offset(pos), nil
+ return tf.Offset(pos), nil
}
// Pos returns tok.Pos(offset), but first checks that the offset is valid for
// the given file.
-func Pos(tok *token.File, offset int) (token.Pos, error) {
- if offset < 0 || offset > tok.Size() {
- return token.NoPos, fmt.Errorf("offset %v is not in range for file of size %v", offset, tok.Size())
+func Pos(tf *token.File, offset int) (token.Pos, error) {
+ if offset < 0 || offset > tf.Size() {
+ return token.NoPos, fmt.Errorf("offset %v is not in range for file of size %v", offset, tf.Size())
}
- return tok.Pos(offset), nil
+ return tf.Pos(offset), nil
}
// InRange reports whether the given position is in the given token.File.
-func InRange(tok *token.File, pos token.Pos) bool {
- size := tok.Pos(tok.Size())
- return int(pos) >= tok.Base() && pos <= size
+func InRange(tf *token.File, pos token.Pos) bool {
+ size := tf.Pos(tf.Size())
+ return int(pos) >= tf.Base() && pos <= size
}
diff --git a/internal/lsp/semantic.go b/internal/lsp/semantic.go
index d1c2a24..7ff8b6c 100644
--- a/internal/lsp/semantic.go
+++ b/internal/lsp/semantic.go
@@ -260,9 +260,12 @@
return strings.Join(msg, " ")
}
-// avoid panic in token.PostionFor() when typing at the end of the file
-func locInRange(f *token.File, loc token.Pos) bool {
- return f.Base() <= int(loc) && int(loc) < f.Base()+f.Size()
+// avoid panic in token.PositionFor() when typing at the end of the file
+//
+// TODO: this looks wrong: the second check should be int(loc) <= tf.Base()+tf.Size()?
+// Can we just use safetoken.InRange?
+func locInRange(tf *token.File, loc token.Pos) bool {
+ return tf.Base() <= int(loc) && int(loc) < tf.Base()+tf.Size()
}
// find the line in the source
diff --git a/internal/lsp/source/completion/completion.go b/internal/lsp/source/completion/completion.go
index 4db3e30..11b9497 100644
--- a/internal/lsp/source/completion/completion.go
+++ b/internal/lsp/source/completion/completion.go
@@ -449,7 +449,7 @@
if err != nil {
return nil, nil, err
}
- rng, err := spn.Range(pgf.Mapper.Converter)
+ rng, err := spn.Range(pgf.Mapper.TokFile)
if err != nil {
return nil, nil, err
}
diff --git a/internal/lsp/source/completion/package.go b/internal/lsp/source/completion/package.go
index 3752384..ed9f8fd 100644
--- a/internal/lsp/source/completion/package.go
+++ b/internal/lsp/source/completion/package.go
@@ -40,7 +40,7 @@
if err != nil {
return nil, nil, err
}
- rng, err := cursorSpan.Range(pgf.Mapper.Converter)
+ rng, err := cursorSpan.Range(pgf.Mapper.TokFile)
if err != nil {
return nil, nil, err
}
diff --git a/internal/lsp/source/format.go b/internal/lsp/source/format.go
index d8e66ab..1dd914e 100644
--- a/internal/lsp/source/format.go
+++ b/internal/lsp/source/format.go
@@ -22,7 +22,6 @@
"golang.org/x/tools/internal/lsp/lsppos"
"golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/safetoken"
- "golang.org/x/tools/internal/span"
)
// Format formats a file with a given range.
@@ -204,7 +203,7 @@
if err != nil {
return nil, err
}
- return ProtocolEditsFromSource([]byte(left), edits, pgf.Mapper.Converter)
+ return ProtocolEditsFromSource([]byte(left), edits, pgf.Mapper.TokFile)
}
// importPrefix returns the prefix of the given file content through the final
@@ -322,11 +321,11 @@
// ProtocolEditsFromSource converts text edits to LSP edits using the original
// source.
-func ProtocolEditsFromSource(src []byte, edits []diff.TextEdit, converter *span.TokenConverter) ([]protocol.TextEdit, error) {
+func ProtocolEditsFromSource(src []byte, edits []diff.TextEdit, tf *token.File) ([]protocol.TextEdit, error) {
m := lsppos.NewMapper(src)
var result []protocol.TextEdit
for _, edit := range edits {
- spn, err := edit.Span.WithOffset(converter)
+ spn, err := edit.Span.WithOffset(tf)
if err != nil {
return nil, fmt.Errorf("computing offsets: %v", err)
}
diff --git a/internal/lsp/source/highlight.go b/internal/lsp/source/highlight.go
index ea54b7c..a3887af 100644
--- a/internal/lsp/source/highlight.go
+++ b/internal/lsp/source/highlight.go
@@ -37,7 +37,7 @@
if err != nil {
return nil, err
}
- rng, err := spn.Range(pgf.Mapper.Converter)
+ rng, err := spn.Range(pgf.Mapper.TokFile)
if err != nil {
return nil, err
}
diff --git a/internal/lsp/source/hover.go b/internal/lsp/source/hover.go
index dff9060..d764cdd 100644
--- a/internal/lsp/source/hover.go
+++ b/internal/lsp/source/hover.go
@@ -146,7 +146,7 @@
if err != nil {
return 0, MappedRange{}, err
}
- rng, err := spn.Range(pgf.Mapper.Converter)
+ rng, err := spn.Range(pgf.Mapper.TokFile)
if err != nil {
return 0, MappedRange{}, err
}
diff --git a/internal/lsp/source/identifier.go b/internal/lsp/source/identifier.go
index d5aa0a5..f2938f2 100644
--- a/internal/lsp/source/identifier.go
+++ b/internal/lsp/source/identifier.go
@@ -108,7 +108,7 @@
if err != nil {
return nil, err
}
- rng, err := spn.Range(pgf.Mapper.Converter)
+ rng, err := spn.Range(pgf.Mapper.TokFile)
if err != nil {
return nil, err
}
diff --git a/internal/lsp/source/identifier_test.go b/internal/lsp/source/identifier_test.go
index 9bbdf58..fbb52fb 100644
--- a/internal/lsp/source/identifier_test.go
+++ b/internal/lsp/source/identifier_test.go
@@ -101,9 +101,9 @@
// coordinates in the file fname of fset.
func posAt(line, column int, fset *token.FileSet, fname string) token.Pos {
var tok *token.File
- fset.Iterate(func(f *token.File) bool {
- if f.Name() == fname {
- tok = f
+ fset.Iterate(func(tf *token.File) bool {
+ if tf.Name() == fname {
+ tok = tf
return false
}
return true
diff --git a/internal/lsp/source/implementation.go b/internal/lsp/source/implementation.go
index 3df1699..b122a5d 100644
--- a/internal/lsp/source/implementation.go
+++ b/internal/lsp/source/implementation.go
@@ -231,7 +231,7 @@
if err != nil {
return nil, err
}
- rng, err := spn.Range(pgf.Mapper.Converter)
+ rng, err := spn.Range(pgf.Mapper.TokFile)
if err != nil {
return nil, err
}
diff --git a/internal/lsp/source/rename.go b/internal/lsp/source/rename.go
index 8e40694..1cfba80 100644
--- a/internal/lsp/source/rename.go
+++ b/internal/lsp/source/rename.go
@@ -171,11 +171,11 @@
if err != nil {
return nil, err
}
- converter := span.NewContentConverter(uri.Filename(), data)
+ tf := span.NewTokenFile(uri.Filename(), data)
m := &protocol.ColumnMapper{
- URI: uri,
- Converter: converter,
- Content: data,
+ URI: uri,
+ TokFile: tf,
+ Content: data,
}
// Sort the edits first.
diff.SortTextEdits(edits)
diff --git a/internal/lsp/source/rename_check.go b/internal/lsp/source/rename_check.go
index 499e587..b17f9b8 100644
--- a/internal/lsp/source/rename_check.go
+++ b/internal/lsp/source/rename_check.go
@@ -865,10 +865,10 @@
}
// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos)
-func tokenFileContainsPos(f *token.File, pos token.Pos) bool {
+func tokenFileContainsPos(tf *token.File, pos token.Pos) bool {
p := int(pos)
- base := f.Base()
- return base <= p && p < base+f.Size()
+ base := tf.Base()
+ return base <= p && p < base+tf.Size()
}
func objectKind(obj types.Object) string {
diff --git a/internal/lsp/source/signature_help.go b/internal/lsp/source/signature_help.go
index d5def0b..1d35931 100644
--- a/internal/lsp/source/signature_help.go
+++ b/internal/lsp/source/signature_help.go
@@ -28,7 +28,7 @@
if err != nil {
return nil, 0, err
}
- rng, err := spn.Range(pgf.Mapper.Converter)
+ rng, err := spn.Range(pgf.Mapper.TokFile)
if err != nil {
return nil, 0, err
}
diff --git a/internal/lsp/source/stub.go b/internal/lsp/source/stub.go
index 148c2ca..f09bbab 100644
--- a/internal/lsp/source/stub.go
+++ b/internal/lsp/source/stub.go
@@ -85,7 +85,7 @@
}
var edits []analysis.TextEdit
for _, edit := range diffEdits {
- rng, err := edit.Span.Range(parsedConcreteFile.Mapper.Converter)
+ rng, err := edit.Span.Range(parsedConcreteFile.Mapper.TokFile)
if err != nil {
return nil, err
}
@@ -218,7 +218,7 @@
if err != nil {
return nil, 0, err
}
- rng, err := spn.Range(pgf.Mapper.Converter)
+ rng, err := spn.Range(pgf.Mapper.TokFile)
if err != nil {
return nil, 0, err
}
diff --git a/internal/lsp/source/util.go b/internal/lsp/source/util.go
index e391044..06720d7 100644
--- a/internal/lsp/source/util.go
+++ b/internal/lsp/source/util.go
@@ -42,7 +42,7 @@
if tf := fset.File(start); tf == nil {
bug.Report("nil file", nil)
} else {
- mapped := m.Converter.TokFile.Name()
+ mapped := m.TokFile.Name()
adjusted := tf.PositionFor(start, true) // adjusted position
if adjusted.Filename != mapped {
bug.Reportf("mapped file %q does not match start position file %q", mapped, adjusted.Filename)
@@ -81,7 +81,7 @@
if s.m == nil {
return span.Span{}, bug.Errorf("invalid range")
}
- return span.FileSpan(s.spanRange.TokFile, s.m.Converter, s.spanRange.Start, s.spanRange.End)
+ return span.FileSpan(s.spanRange.TokFile, s.m.TokFile, s.spanRange.Start, s.spanRange.End)
}
// URI returns the URI of the edited file.
@@ -570,12 +570,12 @@
// ByteOffsetsToRange creates a range spanning start and end.
func ByteOffsetsToRange(m *protocol.ColumnMapper, uri span.URI, start, end int) (protocol.Range, error) {
- line, col, err := m.Converter.ToPosition(start)
+ line, col, err := span.ToPosition(m.TokFile, start)
if err != nil {
return protocol.Range{}, err
}
s := span.NewPoint(line, col, start)
- line, col, err = m.Converter.ToPosition(end)
+ line, col, err = span.ToPosition(m.TokFile, end)
if err != nil {
return protocol.Range{}, err
}
diff --git a/internal/lsp/source/util_test.go b/internal/lsp/source/util_test.go
index 2920774..5d4e98f 100644
--- a/internal/lsp/source/util_test.go
+++ b/internal/lsp/source/util_test.go
@@ -34,9 +34,9 @@
eURI := span.URIFromPath(ef.Name())
mapper := &protocol.ColumnMapper{
- URI: eURI,
- Converter: span.NewTokenConverter(ef),
- Content: edited,
+ URI: eURI,
+ TokFile: ef,
+ Content: edited,
}
start := cf.Pos(bytes.Index(compiled, []byte("a𐐀b")))
diff --git a/internal/lsp/tests/tests.go b/internal/lsp/tests/tests.go
index 6a77fc7..9a8b76d 100644
--- a/internal/lsp/tests/tests.go
+++ b/internal/lsp/tests/tests.go
@@ -997,11 +997,11 @@
if err != nil {
return nil, err
}
- converter := span.NewContentConverter(uri.Filename(), content)
+ tf := span.NewTokenFile(uri.Filename(), content)
data.mappers[uri] = &protocol.ColumnMapper{
- URI: uri,
- Converter: converter,
- Content: content,
+ URI: uri,
+ TokFile: tf,
+ Content: content,
}
}
return data.mappers[uri], nil
diff --git a/internal/lsp/text_synchronization.go b/internal/lsp/text_synchronization.go
index 59fc29c..157ae23 100644
--- a/internal/lsp/text_synchronization.go
+++ b/internal/lsp/text_synchronization.go
@@ -340,11 +340,11 @@
}
for _, change := range changes {
// Make sure to update column mapper along with the content.
- converter := span.NewContentConverter(uri.Filename(), content)
+ tf := span.NewTokenFile(uri.Filename(), content)
m := &protocol.ColumnMapper{
- URI: uri,
- Converter: converter,
- Content: content,
+ URI: uri,
+ TokFile: tf,
+ Content: content,
}
if change.Range == nil {
return nil, fmt.Errorf("%w: unexpected nil range for change", jsonrpc2.ErrInternal)
diff --git a/internal/lsp/work/completion.go b/internal/lsp/work/completion.go
index 93b6e78..300d13c 100644
--- a/internal/lsp/work/completion.go
+++ b/internal/lsp/work/completion.go
@@ -32,7 +32,7 @@
if err != nil {
return nil, fmt.Errorf("computing cursor position: %w", err)
}
- rng, err := spn.Range(pw.Mapper.Converter)
+ rng, err := spn.Range(pw.Mapper.TokFile)
if err != nil {
return nil, fmt.Errorf("computing range: %w", err)
}
diff --git a/internal/lsp/work/hover.go b/internal/lsp/work/hover.go
index abb7055..5fa6828 100644
--- a/internal/lsp/work/hover.go
+++ b/internal/lsp/work/hover.go
@@ -34,7 +34,7 @@
if err != nil {
return nil, fmt.Errorf("computing cursor position: %w", err)
}
- hoverRng, err := spn.Range(pw.Mapper.Converter)
+ hoverRng, err := spn.Range(pw.Mapper.TokFile)
if err != nil {
return nil, fmt.Errorf("computing hover range: %w", err)
}
diff --git a/internal/span/span.go b/internal/span/span.go
index fdf0644..502145b 100644
--- a/internal/span/span.go
+++ b/internal/span/span.go
@@ -9,6 +9,7 @@
import (
"encoding/json"
"fmt"
+ "go/token"
"path"
)
@@ -208,56 +209,56 @@
}
}
-func (s Span) WithPosition(c *TokenConverter) (Span, error) {
- if err := s.update(c, true, false); err != nil {
+func (s Span) WithPosition(tf *token.File) (Span, error) {
+ if err := s.update(tf, true, false); err != nil {
return Span{}, err
}
return s, nil
}
-func (s Span) WithOffset(c *TokenConverter) (Span, error) {
- if err := s.update(c, false, true); err != nil {
+func (s Span) WithOffset(tf *token.File) (Span, error) {
+ if err := s.update(tf, false, true); err != nil {
return Span{}, err
}
return s, nil
}
-func (s Span) WithAll(c *TokenConverter) (Span, error) {
- if err := s.update(c, true, true); err != nil {
+func (s Span) WithAll(tf *token.File) (Span, error) {
+ if err := s.update(tf, true, true); err != nil {
return Span{}, err
}
return s, nil
}
-func (s *Span) update(c *TokenConverter, withPos, withOffset bool) error {
+func (s *Span) update(tf *token.File, withPos, withOffset bool) error {
if !s.IsValid() {
return fmt.Errorf("cannot add information to an invalid span")
}
if withPos && !s.HasPosition() {
- if err := s.v.Start.updatePosition(c); err != nil {
+ if err := s.v.Start.updatePosition(tf); err != nil {
return err
}
if s.v.End.Offset == s.v.Start.Offset {
s.v.End = s.v.Start
- } else if err := s.v.End.updatePosition(c); err != nil {
+ } else if err := s.v.End.updatePosition(tf); err != nil {
return err
}
}
if withOffset && (!s.HasOffset() || (s.v.End.hasPosition() && !s.v.End.hasOffset())) {
- if err := s.v.Start.updateOffset(c); err != nil {
+ if err := s.v.Start.updateOffset(tf); err != nil {
return err
}
if s.v.End.Line == s.v.Start.Line && s.v.End.Column == s.v.Start.Column {
s.v.End.Offset = s.v.Start.Offset
- } else if err := s.v.End.updateOffset(c); err != nil {
+ } else if err := s.v.End.updateOffset(tf); err != nil {
return err
}
}
return nil
}
-func (p *point) updatePosition(c *TokenConverter) error {
- line, col, err := c.ToPosition(p.Offset)
+func (p *point) updatePosition(tf *token.File) error {
+ line, col, err := ToPosition(tf, p.Offset)
if err != nil {
return err
}
@@ -266,8 +267,8 @@
return nil
}
-func (p *point) updateOffset(c *TokenConverter) error {
- offset, err := c.ToOffset(p.Line, p.Column)
+func (p *point) updateOffset(tf *token.File) error {
+ offset, err := ToOffset(tf, p.Line, p.Column)
if err != nil {
return err
}
diff --git a/internal/span/span_test.go b/internal/span/span_test.go
index 3956565..cff59c3 100644
--- a/internal/span/span_test.go
+++ b/internal/span/span_test.go
@@ -62,7 +62,7 @@
// lines creates a new tokenConverter for a file with 1000 lines, each width
// bytes wide.
-func lines(width int) *span.TokenConverter {
+func lines(width int) *token.File {
fset := token.NewFileSet()
f := fset.AddFile("", -1, 1000*width)
var lines []int
@@ -70,5 +70,5 @@
lines = append(lines, i*width)
}
f.SetLines(lines)
- return span.NewTokenConverter(f)
+ return f
}
diff --git a/internal/span/token.go b/internal/span/token.go
index 6e62776..af01d7b 100644
--- a/internal/span/token.go
+++ b/internal/span/token.go
@@ -23,46 +23,26 @@
TokFile *token.File
}
-// TokenConverter converts between offsets and (col, row) using a token.File.
-//
-// TODO(rfindley): eliminate TokenConverter in favor of just operating on
-// token.File.
-type TokenConverter struct {
- // TokFile is exported for invariant checking; it may be nil in the case of
- // an invalid converter.
- TokFile *token.File
-}
-
// NewRange creates a new Range from a FileSet and two positions.
// To represent a point pass a 0 as the end pos.
func NewRange(fset *token.FileSet, start, end token.Pos) Range {
- file := fset.File(start)
- if file == nil {
+ tf := fset.File(start)
+ if tf == nil {
bug.Reportf("nil file")
}
return Range{
Start: start,
End: end,
- TokFile: file,
+ TokFile: tf,
}
}
-// NewTokenConverter returns an implementation of Converter backed by a
-// token.File.
-func NewTokenConverter(f *token.File) *TokenConverter {
- if f == nil {
- bug.Reportf("nil file")
- }
- return &TokenConverter{TokFile: f}
-}
-
-// NewContentConverter returns an implementation of Converter for the
-// given file content.
-func NewContentConverter(filename string, content []byte) *TokenConverter {
+// NewTokenFile returns a token.File for the given file content.
+func NewTokenFile(filename string, content []byte) *token.File {
fset := token.NewFileSet()
f := fset.AddFile(filename, -1, len(content))
f.SetLinesForContent(content)
- return NewTokenConverter(f)
+ return f
}
// IsPoint returns true if the range represents a single point.
@@ -74,33 +54,34 @@
// It will fill in all the members of the Span, calculating the line and column
// information.
func (r Range) Span() (Span, error) {
- return FileSpan(r.TokFile, NewTokenConverter(r.TokFile), r.Start, r.End)
+ return FileSpan(r.TokFile, r.TokFile, r.Start, r.End)
}
-// FileSpan returns a span within tok, using converter to translate between
-// offsets and positions.
+// FileSpan returns a span within the file referenced by start and end, using a
+// token.File to translate between offsets and positions.
//
-// If non-nil, the converter must be a converter for the source file pointed to
-// by start, after accounting for //line directives, as it will be used to
-// compute offsets for the resulting Span.
-func FileSpan(tok *token.File, converter *TokenConverter, start, end token.Pos) (Span, error) {
+// The start and end position must be contained within posFile, though due to
+// line directives they may reference positions in another file. If srcFile is
+// provided, it is used to map the line:column positions referenced by start
+// and end to offsets in the corresponding file.
+func FileSpan(posFile, srcFile *token.File, start, end token.Pos) (Span, error) {
if !start.IsValid() {
return Span{}, fmt.Errorf("start pos is not valid")
}
- if tok == nil {
+ if posFile == nil {
return Span{}, bug.Errorf("missing file association") // should never get here with a nil file
}
var s Span
var err error
var startFilename string
- startFilename, s.v.Start.Line, s.v.Start.Column, err = position(tok, start)
+ startFilename, s.v.Start.Line, s.v.Start.Column, err = position(posFile, start)
if err != nil {
return Span{}, err
}
s.v.URI = URIFromPath(startFilename)
if end.IsValid() {
var endFilename string
- endFilename, s.v.End.Line, s.v.End.Column, err = position(tok, end)
+ endFilename, s.v.End.Line, s.v.End.Column, err = position(posFile, end)
if err != nil {
return Span{}, err
}
@@ -113,32 +94,33 @@
s.v.Start.clean()
s.v.End.clean()
s.v.clean()
- if converter == nil {
- converter = &TokenConverter{tok}
+ tf := posFile
+ if srcFile != nil {
+ tf = srcFile
}
- if startFilename != converter.TokFile.Name() {
- return Span{}, bug.Errorf("must supply Converter for file %q containing lines from %q", tok.Name(), startFilename)
+ if startFilename != tf.Name() {
+ return Span{}, bug.Errorf("must supply Converter for file %q", startFilename)
}
- return s.WithOffset(converter)
+ return s.WithOffset(tf)
}
-func position(f *token.File, pos token.Pos) (string, int, int, error) {
- off, err := offset(f, pos)
+func position(tf *token.File, pos token.Pos) (string, int, int, error) {
+ off, err := offset(tf, pos)
if err != nil {
return "", 0, 0, err
}
- return positionFromOffset(f, off)
+ return positionFromOffset(tf, off)
}
-func positionFromOffset(f *token.File, offset int) (string, int, int, error) {
- if offset > f.Size() {
- return "", 0, 0, fmt.Errorf("offset %v is past the end of the file %v", offset, f.Size())
+func positionFromOffset(tf *token.File, offset int) (string, int, int, error) {
+ if offset > tf.Size() {
+ return "", 0, 0, fmt.Errorf("offset %v is past the end of the file %v", offset, tf.Size())
}
- pos := f.Pos(offset)
- p := f.Position(pos)
+ pos := tf.Pos(offset)
+ p := tf.Position(pos)
// TODO(golang/go#41029): Consider returning line, column instead of line+1, 1 if
// the file's last character is not a newline.
- if offset == f.Size() {
+ if offset == tf.Size() {
return p.Filename, p.Line + 1, 1, nil
}
return p.Filename, p.Line, p.Column, nil
@@ -146,46 +128,49 @@
// offset is a copy of the Offset function in go/token, but with the adjustment
// that it does not panic on invalid positions.
-func offset(f *token.File, pos token.Pos) (int, error) {
- if int(pos) < f.Base() || int(pos) > f.Base()+f.Size() {
- return 0, fmt.Errorf("invalid pos: %d not in [%d, %d]", pos, f.Base(), f.Base()+f.Size())
+func offset(tf *token.File, pos token.Pos) (int, error) {
+ if int(pos) < tf.Base() || int(pos) > tf.Base()+tf.Size() {
+ return 0, fmt.Errorf("invalid pos: %d not in [%d, %d]", pos, tf.Base(), tf.Base()+tf.Size())
}
- return int(pos) - f.Base(), nil
+ return int(pos) - tf.Base(), nil
}
// Range converts a Span to a Range that represents the Span for the supplied
// File.
-func (s Span) Range(converter *TokenConverter) (Range, error) {
- s, err := s.WithOffset(converter)
+func (s Span) Range(tf *token.File) (Range, error) {
+ s, err := s.WithOffset(tf)
if err != nil {
return Range{}, err
}
- file := converter.TokFile
// go/token will panic if the offset is larger than the file's size,
// so check here to avoid panicking.
- if s.Start().Offset() > file.Size() {
- return Range{}, bug.Errorf("start offset %v is past the end of the file %v", s.Start(), file.Size())
+ if s.Start().Offset() > tf.Size() {
+ return Range{}, bug.Errorf("start offset %v is past the end of the file %v", s.Start(), tf.Size())
}
- if s.End().Offset() > file.Size() {
- return Range{}, bug.Errorf("end offset %v is past the end of the file %v", s.End(), file.Size())
+ if s.End().Offset() > tf.Size() {
+ return Range{}, bug.Errorf("end offset %v is past the end of the file %v", s.End(), tf.Size())
}
return Range{
- Start: file.Pos(s.Start().Offset()),
- End: file.Pos(s.End().Offset()),
- TokFile: file,
+ Start: tf.Pos(s.Start().Offset()),
+ End: tf.Pos(s.End().Offset()),
+ TokFile: tf,
}, nil
}
-func (l *TokenConverter) ToPosition(offset int) (int, int, error) {
- _, line, col, err := positionFromOffset(l.TokFile, offset)
+// ToPosition converts a byte offset in the file corresponding to tf into
+// 1-based line and utf-8 column indexes.
+func ToPosition(tf *token.File, offset int) (int, int, error) {
+ _, line, col, err := positionFromOffset(tf, offset)
return line, col, err
}
-func (l *TokenConverter) ToOffset(line, col int) (int, error) {
+// ToOffset converts a 1-base line and utf-8 column index into a byte offset in
+// the file corresponding to tf.
+func ToOffset(tf *token.File, line, col int) (int, error) {
if line < 0 {
return -1, fmt.Errorf("line is not valid")
}
- lineMax := l.TokFile.LineCount() + 1
+ lineMax := tf.LineCount() + 1
if line > lineMax {
return -1, fmt.Errorf("line is beyond end of file %v", lineMax)
} else if line == lineMax {
@@ -193,14 +178,14 @@
return -1, fmt.Errorf("column is beyond end of file")
}
// at the end of the file, allowing for a trailing eol
- return l.TokFile.Size(), nil
+ return tf.Size(), nil
}
- pos := l.TokFile.LineStart(line)
+ pos := tf.LineStart(line)
if !pos.IsValid() {
return -1, fmt.Errorf("line is not in file")
}
// we assume that column is in bytes here, and that the first byte of a
// line is at column 1
pos += token.Pos(col - 1)
- return offset(l.TokFile, pos)
+ return offset(tf, pos)
}
diff --git a/internal/span/token_test.go b/internal/span/token_test.go
index 5f48d68..1e0b53e 100644
--- a/internal/span/token_test.go
+++ b/internal/span/token_test.go
@@ -48,14 +48,13 @@
}
for _, test := range tokenTests {
f := files[test.URI()]
- c := span.NewTokenConverter(f)
t.Run(path.Base(f.Name()), func(t *testing.T) {
- checkToken(t, c, span.New(
+ checkToken(t, f, span.New(
test.URI(),
span.NewPoint(test.Start().Line(), test.Start().Column(), 0),
span.NewPoint(test.End().Line(), test.End().Column(), 0),
), test)
- checkToken(t, c, span.New(
+ checkToken(t, f, span.New(
test.URI(),
span.NewPoint(0, 0, test.Start().Offset()),
span.NewPoint(0, 0, test.End().Offset()),
@@ -64,8 +63,8 @@
}
}
-func checkToken(t *testing.T, c *span.TokenConverter, in, expect span.Span) {
- rng, err := in.Range(c)
+func checkToken(t *testing.T, f *token.File, in, expect span.Span) {
+ rng, err := in.Range(f)
if err != nil {
t.Error(err)
}