internal/lsp/cache: refactor Go file parsing

Hopefully improve some of the details around parsing that have always
confused me.

- parser.ParseFile will never return an error other than
scanner.ErrorList. Encode that in ParsedGoFile.
- parser.ParseFile will never return a nil file. Eliminate the code
path that handled that.
- Explain why we might fail to find a token.File.
- Trying to fix errors appears quite expensive even if there aren't any
to fix. Don't waste the time.

Change-Id: I87e082ed52c98a438bc7fd3b29e1a486f32fb347
Reviewed-on: https://go-review.googlesource.com/c/tools/+/297069
Trust: Heschi Kreinick <heschi@google.com>
Run-TryBot: Heschi Kreinick <heschi@google.com>
gopls-CI: kokoro <noreply+kokoro@google.com>
TryBot-Result: Go Bot <gobot@golang.org>
Reviewed-by: Rebecca Stambler <rstambler@golang.org>
Reviewed-by: Robert Findley <rfindley@google.com>
diff --git a/internal/lsp/cache/check.go b/internal/lsp/cache/check.go
index e0c7cee..e64e7c3 100644
--- a/internal/lsp/cache/check.go
+++ b/internal/lsp/cache/check.go
@@ -9,6 +9,7 @@
 	"context"
 	"fmt"
 	"go/ast"
+	"go/scanner"
 	"go/types"
 	"path"
 	"path/filepath"
@@ -302,7 +303,7 @@
 	}
 	var (
 		files        = make([]*ast.File, len(m.compiledGoFiles))
-		parseErrors  = make([]error, len(m.compiledGoFiles))
+		parseErrors  = make([]scanner.ErrorList, len(m.compiledGoFiles))
 		actualErrors = make([]error, len(m.compiledGoFiles))
 		wg           sync.WaitGroup
 
diff --git a/internal/lsp/cache/parse.go b/internal/lsp/cache/parse.go
index edbd519..e133235 100644
--- a/internal/lsp/cache/parse.go
+++ b/internal/lsp/cache/parse.go
@@ -246,7 +246,7 @@
 	if fh.Kind() != source.Go {
 		return &parseGoData{err: errors.Errorf("cannot parse non-Go file %s", fh.URI())}
 	}
-	buf, err := fh.Read()
+	src, err := fh.Read()
 	if err != nil {
 		return &parseGoData{err: err}
 	}
@@ -255,37 +255,32 @@
 	if mode == source.ParseHeader {
 		parserMode = parser.ImportsOnly | parser.ParseComments
 	}
-	file, parseError := parser.ParseFile(fset, fh.URI().Filename(), buf, parserMode)
-	var tok *token.File
-	var fixed bool
-	if file != nil {
-		tok = fset.File(file.Pos())
-		if tok == nil {
-			tok = fset.AddFile(fh.URI().Filename(), -1, len(buf))
-			tok.SetLinesForContent(buf)
-			return &parseGoData{
-				parsed: &source.ParsedGoFile{
-					URI:  fh.URI(),
-					Mode: mode,
-					Src:  buf,
-					File: file,
-					Tok:  tok,
-					Mapper: &protocol.ColumnMapper{
-						URI:       fh.URI(),
-						Content:   buf,
-						Converter: span.NewTokenConverter(fset, tok),
-					},
-					ParseErr: parseError,
-				},
-			}
-		}
 
+	file, err := parser.ParseFile(fset, fh.URI().Filename(), src, parserMode)
+	var parseErr scanner.ErrorList
+	if err != nil {
+		// We passed a byte slice, so the only possible error is a parse error.
+		parseErr = err.(scanner.ErrorList)
+	}
+
+	tok := fset.File(file.Pos())
+	if tok == nil {
+		// file.Pos is the location of the package declaration. If there was
+		// none, we can't find the token.File that ParseFile created, and we
+		// have no choice but to recreate it.
+		tok = fset.AddFile(fh.URI().Filename(), -1, len(src))
+		tok.SetLinesForContent(src)
+	}
+
+	fixed := false
+	// If there were parse errors, attempt to fix them up.
+	if parseErr != nil {
 		// Fix any badly parsed parts of the AST.
-		fixed = fixAST(ctx, file, tok, buf)
+		fixed = fixAST(ctx, file, tok, src)
 
 		for i := 0; i < 10; i++ {
 			// Fix certain syntax errors that render the file unparseable.
-			newSrc := fixSrc(file, tok, buf)
+			newSrc := fixSrc(file, tok, src)
 			if newSrc == nil {
 				break
 			}
@@ -294,11 +289,11 @@
 			// it is likely we got stuck in a loop somehow. Log out a diff
 			// of the last changes we made to aid in debugging.
 			if i == 9 {
-				edits, err := myers.ComputeEdits(fh.URI(), string(buf), string(newSrc))
+				edits, err := myers.ComputeEdits(fh.URI(), string(src), string(newSrc))
 				if err != nil {
 					event.Error(ctx, "error generating fixSrc diff", err, tag.File.Of(tok.Name()))
 				} else {
-					unified := diff.ToUnified("before", "after", string(buf), edits)
+					unified := diff.ToUnified("before", "after", string(src), edits)
 					event.Log(ctx, fmt.Sprintf("fixSrc loop - last diff:\n%v", unified), tag.File.Of(tok.Name()))
 				}
 			}
@@ -307,44 +302,33 @@
 			if newFile != nil {
 				// Maintain the original parseError so we don't try formatting the doctored file.
 				file = newFile
-				buf = newSrc
+				src = newSrc
 				tok = fset.File(file.Pos())
 
-				fixed = fixAST(ctx, file, tok, buf)
+				fixed = fixAST(ctx, file, tok, src)
 			}
-
-		}
-
-		if mode == source.ParseExported {
-			trimAST(file)
 		}
 	}
 
-	// A missing file is always an error; use the parse error or make one up if there isn't one.
-	if file == nil {
-		if parseError == nil {
-			parseError = errors.Errorf("parsing %s failed with no parse error reported", fh.URI())
-		}
-		err = parseError
-	}
-	m := &protocol.ColumnMapper{
-		URI:       fh.URI(),
-		Converter: span.NewTokenConverter(fset, tok),
-		Content:   buf,
+	if mode == source.ParseExported {
+		trimAST(file)
 	}
 
 	return &parseGoData{
 		parsed: &source.ParsedGoFile{
-			URI:      fh.URI(),
-			Mode:     mode,
-			Src:      buf,
-			File:     file,
-			Tok:      tok,
-			Mapper:   m,
-			ParseErr: parseError,
+			URI:  fh.URI(),
+			Mode: mode,
+			Src:  src,
+			File: file,
+			Tok:  tok,
+			Mapper: &protocol.ColumnMapper{
+				URI:       fh.URI(),
+				Converter: span.NewTokenConverter(fset, tok),
+				Content:   src,
+			},
+			ParseErr: parseErr,
 		},
 		fixed: fixed,
-		err:   err,
 	}
 }
 
diff --git a/internal/lsp/source/view.go b/internal/lsp/source/view.go
index 1d2c01d..2e06670 100644
--- a/internal/lsp/source/view.go
+++ b/internal/lsp/source/view.go
@@ -9,6 +9,7 @@
 	"context"
 	"fmt"
 	"go/ast"
+	"go/scanner"
 	"go/token"
 	"go/types"
 	"io"
@@ -262,7 +263,7 @@
 	// actual content of the file if we have fixed the AST.
 	Src      []byte
 	Mapper   *protocol.ColumnMapper
-	ParseErr error
+	ParseErr scanner.ErrorList
 }
 
 // A ParsedModule contains the results of parsing a go.mod file.