internal/lsp: refactor source package to use an interface

This change separates a cache package out of the
golang.org/x/tools/internal/lsp/source package. The source package now
uses an interface instead a File struct, which will allow it be reused
more easily. The cache package contains the View and File structs now.

Change-Id: Ia2114e9dafc5214c8b21bceba3adae1c36b9799d
Reviewed-on: https://go-review.googlesource.com/c/152798
Reviewed-by: Ian Cottrell <iancottrell@google.com>
diff --git a/internal/lsp/cache/file.go b/internal/lsp/cache/file.go
new file mode 100644
index 0000000..b0dea34
--- /dev/null
+++ b/internal/lsp/cache/file.go
@@ -0,0 +1,126 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cache
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"io/ioutil"
+
+	"golang.org/x/tools/go/packages"
+	"golang.org/x/tools/internal/lsp/source"
+)
+
+// File holds all the information we know about a file.
+type File struct {
+	URI     source.URI
+	view    *View
+	active  bool
+	content []byte
+	ast     *ast.File
+	token   *token.File
+	pkg     *packages.Package
+}
+
+// SetContent sets the overlay contents for a file.
+// Setting it to nil will revert it to the on disk contents, and remove it
+// from the active set.
+func (f *File) SetContent(content []byte) {
+	f.view.mu.Lock()
+	defer f.view.mu.Unlock()
+	f.content = content
+	// the ast and token fields are invalid
+	f.ast = nil
+	f.token = nil
+	f.pkg = nil
+	// and we might need to update the overlay
+	switch {
+	case f.active && content == nil:
+		// we were active, and want to forget the content
+		f.active = false
+		if filename, err := f.URI.Filename(); err == nil {
+			delete(f.view.Config.Overlay, filename)
+		}
+		f.content = nil
+	case content != nil:
+		// an active overlay, update the map
+		f.active = true
+		if filename, err := f.URI.Filename(); err == nil {
+			f.view.Config.Overlay[filename] = f.content
+		}
+	}
+}
+
+// Read returns the contents of the file, reading it from file system if needed.
+func (f *File) Read() ([]byte, error) {
+	f.view.mu.Lock()
+	defer f.view.mu.Unlock()
+	return f.read()
+}
+
+func (f *File) GetFileSet() (*token.FileSet, error) {
+	if f.view.Config == nil {
+		return nil, fmt.Errorf("no config for file view")
+	}
+	if f.view.Config.Fset == nil {
+		return nil, fmt.Errorf("no fileset for file view config")
+	}
+	return f.view.Config.Fset, nil
+}
+
+func (f *File) GetToken() (*token.File, error) {
+	f.view.mu.Lock()
+	defer f.view.mu.Unlock()
+	if f.token == nil {
+		if err := f.view.parse(f.URI); err != nil {
+			return nil, err
+		}
+		if f.token == nil {
+			return nil, fmt.Errorf("failed to find or parse %v", f.URI)
+		}
+	}
+	return f.token, nil
+}
+
+func (f *File) GetAST() (*ast.File, error) {
+	f.view.mu.Lock()
+	defer f.view.mu.Unlock()
+	if f.ast == nil {
+		if err := f.view.parse(f.URI); err != nil {
+			return nil, err
+		}
+	}
+	return f.ast, nil
+}
+
+func (f *File) GetPackage() (*packages.Package, error) {
+	f.view.mu.Lock()
+	defer f.view.mu.Unlock()
+	if f.pkg == nil {
+		if err := f.view.parse(f.URI); err != nil {
+			return nil, err
+		}
+	}
+	return f.pkg, nil
+}
+
+// read is the internal part of Read that presumes the lock is already held
+func (f *File) read() ([]byte, error) {
+	if f.content != nil {
+		return f.content, nil
+	}
+	// we don't know the content yet, so read it
+	filename, err := f.URI.Filename()
+	if err != nil {
+		return nil, err
+	}
+	content, err := ioutil.ReadFile(filename)
+	if err != nil {
+		return nil, err
+	}
+	f.content = content
+	return f.content, nil
+}
diff --git a/internal/lsp/source/view.go b/internal/lsp/cache/view.go
similarity index 82%
rename from internal/lsp/source/view.go
rename to internal/lsp/cache/view.go
index 2433bce..6d109f7 100644
--- a/internal/lsp/source/view.go
+++ b/internal/lsp/cache/view.go
@@ -2,7 +2,7 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-package source
+package cache
 
 import (
 	"fmt"
@@ -10,6 +10,7 @@
 	"sync"
 
 	"golang.org/x/tools/go/packages"
+	"golang.org/x/tools/internal/lsp/source"
 )
 
 type View struct {
@@ -17,7 +18,7 @@
 
 	Config *packages.Config
 
-	files map[URI]*File
+	files map[source.URI]*File
 }
 
 func NewView() *View {
@@ -28,13 +29,13 @@
 			Tests:   true,
 			Overlay: make(map[string][]byte),
 		},
-		files: make(map[URI]*File),
+		files: make(map[source.URI]*File),
 	}
 }
 
 // GetFile returns a File for the given uri.
 // It will always succeed, adding the file to the managed set if needed.
-func (v *View) GetFile(uri URI) *File {
+func (v *View) GetFile(uri source.URI) *File {
 	v.mu.Lock()
 	f := v.getFile(uri)
 	v.mu.Unlock()
@@ -42,7 +43,7 @@
 }
 
 // getFile is the unlocked internal implementation of GetFile.
-func (v *View) getFile(uri URI) *File {
+func (v *View) getFile(uri source.URI) *File {
 	f, found := v.files[uri]
 	if !found {
 		f = &File{
@@ -54,7 +55,7 @@
 	return f
 }
 
-func (v *View) parse(uri URI) error {
+func (v *View) parse(uri source.URI) error {
 	path, err := uri.Filename()
 	if err != nil {
 		return err
@@ -71,7 +72,7 @@
 		for _, fAST := range pkg.Syntax {
 			// if a file was in multiple packages, which token/ast/pkg do we store
 			fToken := v.Config.Fset.File(fAST.Pos())
-			fURI := ToURI(fToken.Name())
+			fURI := source.ToURI(fToken.Name())
 			f := v.getFile(fURI)
 			f.token = fToken
 			f.ast = fAST
diff --git a/internal/lsp/diagnostics.go b/internal/lsp/diagnostics.go
index ef9f2e1..17fa889 100644
--- a/internal/lsp/diagnostics.go
+++ b/internal/lsp/diagnostics.go
@@ -5,40 +5,57 @@
 package lsp
 
 import (
+	"context"
 	"sort"
 
+	"golang.org/x/tools/internal/lsp/cache"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 )
 
-func toProtocolDiagnostics(v *source.View, diagnostics []source.Diagnostic) []protocol.Diagnostic {
+func (s *server) CacheAndDiagnose(ctx context.Context, uri protocol.DocumentURI, text string) {
+	f := s.view.GetFile(source.URI(uri))
+	f.SetContent([]byte(text))
+
+	go func() {
+		reports, err := source.Diagnostics(ctx, f)
+		if err != nil {
+			return // handle error?
+		}
+		for filename, diagnostics := range reports {
+			s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{
+				URI:         protocol.DocumentURI(source.ToURI(filename)),
+				Diagnostics: toProtocolDiagnostics(s.view, diagnostics),
+			})
+		}
+	}()
+}
+
+func toProtocolDiagnostics(v *cache.View, diagnostics []source.Diagnostic) []protocol.Diagnostic {
 	reports := []protocol.Diagnostic{}
 	for _, diag := range diagnostics {
-		tok := v.Config.Fset.File(diag.Range.Start)
+		f := v.GetFile(source.ToURI(diag.Filename))
+		tok, err := f.GetToken()
+		if err != nil {
+			continue // handle error?
+		}
+		pos := fromTokenPosition(tok, diag.Position)
+		if !pos.IsValid() {
+			continue // handle error?
+		}
 		reports = append(reports, protocol.Diagnostic{
-			Message:  diag.Message,
-			Range:    toProtocolRange(tok, diag.Range),
-			Severity: toProtocolSeverity(diag.Severity),
+			Message: diag.Message,
+			Range: toProtocolRange(tok, source.Range{
+				Start: pos,
+				End:   pos,
+			}),
+			Severity: protocol.SeverityError, // all diagnostics have error severity for now
 			Source:   "LSP",
 		})
 	}
 	return reports
 }
 
-func toProtocolSeverity(severity source.DiagnosticSeverity) protocol.DiagnosticSeverity {
-	switch severity {
-	case source.SeverityError:
-		return protocol.SeverityError
-	case source.SeverityWarning:
-		return protocol.SeverityWarning
-	case source.SeverityHint:
-		return protocol.SeverityHint
-	case source.SeverityInformation:
-		return protocol.SeverityInformation
-	}
-	return protocol.SeverityError // default
-}
-
 func sorted(d []protocol.Diagnostic) {
 	sort.Slice(d, func(i int, j int) bool {
 		if d[i].Range.Start.Line == d[j].Range.Start.Line {
diff --git a/internal/lsp/format.go b/internal/lsp/format.go
new file mode 100644
index 0000000..ef7f1d5
--- /dev/null
+++ b/internal/lsp/format.go
@@ -0,0 +1,60 @@
+package lsp
+
+import (
+	"context"
+	"go/token"
+
+	"golang.org/x/tools/internal/lsp/cache"
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/lsp/source"
+)
+
+// formatRange formats a document with a given range.
+func formatRange(ctx context.Context, v *cache.View, uri protocol.DocumentURI, rng *protocol.Range) ([]protocol.TextEdit, error) {
+	f := v.GetFile(source.URI(uri))
+	tok, err := f.GetToken()
+	if err != nil {
+		return nil, err
+	}
+	var r source.Range
+	if rng == nil {
+		r.Start = tok.Pos(0)
+		r.End = tok.Pos(tok.Size())
+	} else {
+		r = fromProtocolRange(tok, *rng)
+	}
+	content, err := f.Read()
+	if err != nil {
+		return nil, err
+	}
+	edits, err := source.Format(ctx, f, r)
+	if err != nil {
+		return nil, err
+	}
+	return toProtocolEdits(tok, content, edits), nil
+}
+
+func toProtocolEdits(tok *token.File, content []byte, edits []source.TextEdit) []protocol.TextEdit {
+	if edits == nil {
+		return nil
+	}
+	// When a file ends with an empty line, the newline character is counted
+	// as part of the previous line. This causes the formatter to insert
+	// another unnecessary newline on each formatting. We handle this case by
+	// checking if the file already ends with a newline character.
+	hasExtraNewline := content[len(content)-1] == '\n'
+	result := make([]protocol.TextEdit, len(edits))
+	for i, edit := range edits {
+		rng := toProtocolRange(tok, edit.Range)
+		// If the edit ends at the end of the file, add the extra line.
+		if hasExtraNewline && tok.Offset(edit.Range.End) == len(content) {
+			rng.End.Line++
+			rng.End.Character = 0
+		}
+		result[i] = protocol.TextEdit{
+			Range:   rng,
+			NewText: edit.NewText,
+		}
+	}
+	return result
+}
diff --git a/internal/lsp/lsp_test.go b/internal/lsp/lsp_test.go
index 666792a..d693870 100644
--- a/internal/lsp/lsp_test.go
+++ b/internal/lsp/lsp_test.go
@@ -17,6 +17,7 @@
 
 	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/go/packages/packagestest"
+	"golang.org/x/tools/internal/lsp/cache"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 )
@@ -57,7 +58,7 @@
 	defer exported.Cleanup()
 
 	s := &server{
-		view: source.NewView(),
+		view: cache.NewView(),
 	}
 	// Merge the exported.Config with the view.Config.
 	cfg := *exported.Config
@@ -150,11 +151,11 @@
 type formats map[string]string
 type definitions map[protocol.Location]protocol.Location
 
-func (d diagnostics) test(t *testing.T, exported *packagestest.Exported, v *source.View) int {
+func (d diagnostics) test(t *testing.T, exported *packagestest.Exported, v *cache.View) int {
 	count := 0
 	for filename, want := range d {
 		f := v.GetFile(source.ToURI(filename))
-		sourceDiagnostics, err := source.Diagnostics(context.Background(), v, f)
+		sourceDiagnostics, err := source.Diagnostics(context.Background(), f)
 		if err != nil {
 			t.Fatal(err)
 		}
diff --git a/internal/lsp/position.go b/internal/lsp/position.go
index e4b70fb..4b3fc3f 100644
--- a/internal/lsp/position.go
+++ b/internal/lsp/position.go
@@ -7,6 +7,7 @@
 import (
 	"go/token"
 
+	"golang.org/x/tools/internal/lsp/cache"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 )
@@ -14,7 +15,7 @@
 // fromProtocolLocation converts from a protocol location to a source range.
 // It will return an error if the file of the location was not valid.
 // It uses fromProtocolRange to convert the start and end positions.
-func fromProtocolLocation(v *source.View, loc protocol.Location) (source.Range, error) {
+func fromProtocolLocation(v *cache.View, loc protocol.Location) (source.Range, error) {
 	f := v.GetFile(source.URI(loc.URI))
 	tok, err := f.GetToken()
 	if err != nil {
@@ -83,6 +84,14 @@
 	}
 }
 
+// fromTokenPosition converts a token.Position (1-based line and column
+// number) to a token.Pos (byte offset value).
+// It requires the token file the pos belongs to in order to do this.
+func fromTokenPosition(f *token.File, pos token.Position) token.Pos {
+	line := lineStart(f, pos.Line)
+	return line + token.Pos(pos.Column-1) // TODO: this is wrong, bytes not characters
+}
+
 // this functionality was borrowed from the analysisutil package
 func lineStart(f *token.File, line int) token.Pos {
 	// Use binary search to find the start offset of this line.
diff --git a/internal/lsp/server.go b/internal/lsp/server.go
index 30c7417..b6259c0 100644
--- a/internal/lsp/server.go
+++ b/internal/lsp/server.go
@@ -6,11 +6,11 @@
 
 import (
 	"context"
-	"go/token"
 	"os"
 	"sync"
 
 	"golang.org/x/tools/internal/jsonrpc2"
+	"golang.org/x/tools/internal/lsp/cache"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 )
@@ -33,7 +33,7 @@
 	signatureHelpEnabled bool
 	snippetsSupported    bool
 
-	view *source.View
+	view *cache.View
 }
 
 func (s *server) Initialize(ctx context.Context, params *protocol.InitializeParams) (*protocol.InitializeResult, error) {
@@ -42,7 +42,7 @@
 	if s.initialized {
 		return nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidRequest, "server already initialized")
 	}
-	s.view = source.NewView()
+	s.view = cache.NewView()
 	s.initialized = true // mark server as initialized now
 
 	// Check if the client supports snippets in completion items.
@@ -113,7 +113,7 @@
 }
 
 func (s *server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error {
-	s.cacheAndDiagnoseFile(ctx, params.TextDocument.URI, params.TextDocument.Text)
+	s.CacheAndDiagnose(ctx, params.TextDocument.URI, params.TextDocument.Text)
 	return nil
 }
 
@@ -123,29 +123,11 @@
 	}
 	// We expect the full content of file, i.e. a single change with no range.
 	if change := params.ContentChanges[0]; change.RangeLength == 0 {
-		s.cacheAndDiagnoseFile(ctx, params.TextDocument.URI, change.Text)
+		s.CacheAndDiagnose(ctx, params.TextDocument.URI, change.Text)
 	}
 	return nil
 }
 
-func (s *server) cacheAndDiagnoseFile(ctx context.Context, uri protocol.DocumentURI, text string) {
-	f := s.view.GetFile(source.URI(uri))
-	f.SetContent([]byte(text))
-	go func() {
-		f := s.view.GetFile(source.URI(uri))
-		reports, err := source.Diagnostics(ctx, s.view, f)
-		if err != nil {
-			return // handle error?
-		}
-		for filename, diagnostics := range reports {
-			s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{
-				URI:         protocol.DocumentURI(source.ToURI(filename)),
-				Diagnostics: toProtocolDiagnostics(s.view, diagnostics),
-			})
-		}
-	}()
-}
-
 func (s *server) WillSave(context.Context, *protocol.WillSaveTextDocumentParams) error {
 	return notImplemented("WillSave")
 }
@@ -299,56 +281,6 @@
 	return formatRange(ctx, s.view, params.TextDocument.URI, &params.Range)
 }
 
-// formatRange formats a document with a given range.
-func formatRange(ctx context.Context, v *source.View, uri protocol.DocumentURI, rng *protocol.Range) ([]protocol.TextEdit, error) {
-	f := v.GetFile(source.URI(uri))
-	tok, err := f.GetToken()
-	if err != nil {
-		return nil, err
-	}
-	var r source.Range
-	if rng == nil {
-		r.Start = tok.Pos(0)
-		r.End = tok.Pos(tok.Size())
-	} else {
-		r = fromProtocolRange(tok, *rng)
-	}
-	content, err := f.Read()
-	if err != nil {
-		return nil, err
-	}
-	edits, err := source.Format(ctx, f, r)
-	if err != nil {
-		return nil, err
-	}
-	return toProtocolEdits(tok, content, edits), nil
-}
-
-func toProtocolEdits(tok *token.File, content []byte, edits []source.TextEdit) []protocol.TextEdit {
-	if edits == nil {
-		return nil
-	}
-	// When a file ends with an empty line, the newline character is counted
-	// as part of the previous line. This causes the formatter to insert
-	// another unnecessary newline on each formatting. We handle this case by
-	// checking if the file already ends with a newline character.
-	hasExtraNewline := content[len(content)-1] == '\n'
-	result := make([]protocol.TextEdit, len(edits))
-	for i, edit := range edits {
-		rng := toProtocolRange(tok, edit.Range)
-		// If the edit ends at the end of the file, add the extra line.
-		if hasExtraNewline && tok.Offset(edit.Range.End) == len(content) {
-			rng.End.Line++
-			rng.End.Character = 0
-		}
-		result[i] = protocol.TextEdit{
-			Range:   rng,
-			NewText: edit.NewText,
-		}
-	}
-	return result
-}
-
 func (s *server) OnTypeFormatting(context.Context, *protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) {
 	return nil, notImplemented("OnTypeFormatting")
 }
diff --git a/internal/lsp/source/completion.go b/internal/lsp/source/completion.go
index 5524de2..576c501 100644
--- a/internal/lsp/source/completion.go
+++ b/internal/lsp/source/completion.go
@@ -34,7 +34,19 @@
 	PackageCompletionItem
 )
 
-func Completion(ctx context.Context, f *File, pos token.Pos) ([]CompletionItem, string, error) {
+// stdScore is the base score value set for all completion items.
+const stdScore float64 = 1.0
+
+// finder is a function used to record a completion candidate item in a list of
+// completion items.
+type finder func(types.Object, float64, []CompletionItem) []CompletionItem
+
+// Completion returns a list of possible candidates for completion, given a
+// a file and a position. The prefix is computed based on the preceding
+// identifier and can be used by the client to score the quality of the
+// completion. For instance, some clients may tolerate imperfect matches as
+// valid completion results, since users may make typos.
+func Completion(ctx context.Context, f File, pos token.Pos) (items []CompletionItem, prefix string, err error) {
 	file, err := f.GetAST()
 	if err != nil {
 		return nil, "", err
@@ -43,19 +55,6 @@
 	if err != nil {
 		return nil, "", err
 	}
-	return completions(file, pos, pkg.Fset, pkg.Types, pkg.TypesInfo)
-}
-
-const stdScore float64 = 1.0
-
-type finder func(types.Object, float64, []CompletionItem) []CompletionItem
-
-// completions returns the map of possible candidates for completion, given a
-// position, a file AST, and type information. The prefix is computed based on
-// the preceding identifier and can be used by the client to score the quality
-// of the completion. For instance, some clients may tolerate imperfect matches
-// as valid completion results, since users may make typos.
-func completions(file *ast.File, pos token.Pos, fset *token.FileSet, pkg *types.Package, info *types.Info) (items []CompletionItem, prefix string, err error) {
 	path, _ := astutil.PathEnclosingInterval(file, pos, pos)
 	if path == nil {
 		return nil, "", fmt.Errorf("cannot find node enclosing position")
@@ -75,16 +74,16 @@
 	// Save certain facts about the query position, including the expected type
 	// of the completion result, the signature of the function enclosing the
 	// position.
-	typ := expectedType(path, pos, info)
-	sig := enclosingFunction(path, pos, info)
-	pkgStringer := qualifier(file, pkg, info)
+	typ := expectedType(path, pos, pkg.TypesInfo)
+	sig := enclosingFunction(path, pos, pkg.TypesInfo)
+	pkgStringer := qualifier(file, pkg.Types, pkg.TypesInfo)
 
 	seen := make(map[types.Object]bool)
 
 	// found adds a candidate completion.
 	// Only the first candidate of a given name is considered.
 	found := func(obj types.Object, weight float64, items []CompletionItem) []CompletionItem {
-		if obj.Pkg() != nil && obj.Pkg() != pkg && !obj.Exported() {
+		if obj.Pkg() != nil && obj.Pkg() != pkg.Types && !obj.Exported() {
 			return items // inaccessible
 		}
 		if !seen[obj] {
@@ -101,7 +100,7 @@
 	}
 
 	// The position is within a composite literal.
-	if items, prefix, ok := complit(path, pos, pkg, info, found); ok {
+	if items, prefix, ok := complit(path, pos, pkg.Types, pkg.TypesInfo, found); ok {
 		return items, prefix, nil
 	}
 	switch n := path[0].(type) {
@@ -111,39 +110,39 @@
 
 		// Is this the Sel part of a selector?
 		if sel, ok := path[1].(*ast.SelectorExpr); ok && sel.Sel == n {
-			items, err = selector(sel, pos, info, found)
+			items, err = selector(sel, pos, pkg.TypesInfo, found)
 			return items, prefix, err
 		}
 		// reject defining identifiers
-		if obj, ok := info.Defs[n]; ok {
+		if obj, ok := pkg.TypesInfo.Defs[n]; ok {
 			if v, ok := obj.(*types.Var); ok && v.IsField() {
 				// An anonymous field is also a reference to a type.
 			} else {
 				of := ""
 				if obj != nil {
-					qual := types.RelativeTo(pkg)
+					qual := types.RelativeTo(pkg.Types)
 					of += ", of " + types.ObjectString(obj, qual)
 				}
 				return nil, "", fmt.Errorf("this is a definition%s", of)
 			}
 		}
 
-		items = append(items, lexical(path, pos, pkg, info, found)...)
+		items = append(items, lexical(path, pos, pkg.Types, pkg.TypesInfo, found)...)
 
 	// The function name hasn't been typed yet, but the parens are there:
 	//   recv.‸(arg)
 	case *ast.TypeAssertExpr:
 		// Create a fake selector expression.
-		items, err = selector(&ast.SelectorExpr{X: n.X}, pos, info, found)
+		items, err = selector(&ast.SelectorExpr{X: n.X}, pos, pkg.TypesInfo, found)
 		return items, prefix, err
 
 	case *ast.SelectorExpr:
-		items, err = selector(n, pos, info, found)
+		items, err = selector(n, pos, pkg.TypesInfo, found)
 		return items, prefix, err
 
 	default:
 		// fallback to lexical completions
-		return lexical(path, pos, pkg, info, found), "", nil
+		return lexical(path, pos, pkg.Types, pkg.TypesInfo, found), "", nil
 	}
 
 	return items, prefix, nil
diff --git a/internal/lsp/source/definition.go b/internal/lsp/source/definition.go
index 7990325..f83a4f3 100644
--- a/internal/lsp/source/definition.go
+++ b/internal/lsp/source/definition.go
@@ -16,7 +16,7 @@
 	"golang.org/x/tools/go/ast/astutil"
 )
 
-func Definition(ctx context.Context, f *File, pos token.Pos) (Range, error) {
+func Definition(ctx context.Context, f File, pos token.Pos) (Range, error) {
 	fAST, err := f.GetAST()
 	if err != nil {
 		return Range{}, err
@@ -45,10 +45,14 @@
 			}
 		}
 	}
-	return objToRange(f.view.Config.Fset, obj), nil
+	fset, err := f.GetFileSet()
+	if err != nil {
+		return Range{}, err
+	}
+	return objToRange(fset, obj), nil
 }
 
-func TypeDefinition(ctx context.Context, f *File, pos token.Pos) (Range, error) {
+func TypeDefinition(ctx context.Context, f File, pos token.Pos) (Range, error) {
 	fAST, err := f.GetAST()
 	if err != nil {
 		return Range{}, err
@@ -72,7 +76,11 @@
 	if obj == nil {
 		return Range{}, fmt.Errorf("no object for type %s", typ.String())
 	}
-	return objToRange(f.view.Config.Fset, obj), nil
+	fset, err := f.GetFileSet()
+	if err != nil {
+		return Range{}, err
+	}
+	return objToRange(fset, obj), nil
 }
 
 func typeToObject(typ types.Type) (obj types.Object) {
@@ -156,3 +164,33 @@
 		End:   p + token.Pos(len([]byte(obj.Name()))), // TODO: use real range of obj
 	}
 }
+
+// this functionality was borrowed from the analysisutil package
+func lineStart(f *token.File, line int) token.Pos {
+	// Use binary search to find the start offset of this line.
+	//
+	// TODO(adonovan): eventually replace this function with the
+	// simpler and more efficient (*go/token.File).LineStart, added
+	// in go1.12.
+
+	min := 0        // inclusive
+	max := f.Size() // exclusive
+	for {
+		offset := (min + max) / 2
+		pos := f.Pos(offset)
+		posn := f.Position(pos)
+		if posn.Line == line {
+			return pos - (token.Pos(posn.Column) - 1)
+		}
+
+		if min+1 >= max {
+			return token.NoPos
+		}
+
+		if posn.Line < line {
+			min = offset
+		} else {
+			max = offset
+		}
+	}
+}
diff --git a/internal/lsp/source/diagnostics.go b/internal/lsp/source/diagnostics.go
index d1ecedb..8f59265 100644
--- a/internal/lsp/source/diagnostics.go
+++ b/internal/lsp/source/diagnostics.go
@@ -14,21 +14,11 @@
 )
 
 type Diagnostic struct {
-	Range    Range
-	Severity DiagnosticSeverity
-	Message  string
+	token.Position
+	Message string
 }
 
-type DiagnosticSeverity int
-
-const (
-	SeverityError DiagnosticSeverity = iota
-	SeverityWarning
-	SeverityHint
-	SeverityInformation
-)
-
-func Diagnostics(ctx context.Context, v *View, f *File) (map[string][]Diagnostic, error) {
+func Diagnostics(ctx context.Context, f File) (map[string][]Diagnostic, error) {
 	pkg, err := f.GetPackage()
 	if err != nil {
 		return nil, err
@@ -56,25 +46,27 @@
 		diags = parseErrors
 	}
 	for _, diag := range diags {
-		filename, start := v.errorPos(diag)
-		// TODO(rstambler): Add support for diagnostic ranges.
-		end := start
+		pos := errorPos(diag)
 		diagnostic := Diagnostic{
-			Range: Range{
-				Start: start,
-				End:   end,
-			},
+			Position: pos,
 			Message:  diag.Msg,
-			Severity: SeverityError,
 		}
-		if _, ok := reports[filename]; ok {
-			reports[filename] = append(reports[filename], diagnostic)
+		if _, ok := reports[pos.Filename]; ok {
+			reports[pos.Filename] = append(reports[pos.Filename], diagnostic)
 		}
 	}
 	return reports, nil
 }
 
-func (v *View) errorPos(pkgErr packages.Error) (string, token.Pos) {
+// FromTokenPosition converts a token.Position (1-based line and column
+// number) to a token.Pos (byte offset value).
+// It requires the token file the pos belongs to in order to do this.
+func FromTokenPosition(f *token.File, pos token.Position) token.Pos {
+	line := lineStart(f, pos.Line)
+	return line + token.Pos(pos.Column-1) // TODO: this is wrong, bytes not characters
+}
+
+func errorPos(pkgErr packages.Error) token.Position {
 	remainder1, first, hasLine := chop(pkgErr.Pos)
 	remainder2, second, hasColumn := chop(remainder1)
 	var pos token.Position
@@ -86,15 +78,7 @@
 		pos.Filename = remainder1
 		pos.Line = first
 	}
-	f := v.GetFile(ToURI(pos.Filename))
-	if f == nil {
-		return "", token.NoPos
-	}
-	tok, err := f.GetToken()
-	if err != nil {
-		return "", token.NoPos
-	}
-	return pos.Filename, fromTokenPosition(tok, pos)
+	return pos
 }
 
 func chop(text string) (remainder string, value int, ok bool) {
@@ -108,41 +92,3 @@
 	}
 	return text[:i], int(v), true
 }
-
-// fromTokenPosition converts a token.Position (1-based line and column
-// number) to a token.Pos (byte offset value).
-// It requires the token file the pos belongs to in order to do this.
-func fromTokenPosition(f *token.File, pos token.Position) token.Pos {
-	line := lineStart(f, pos.Line)
-	return line + token.Pos(pos.Column-1) // TODO: this is wrong, bytes not characters
-}
-
-// this functionality was borrowed from the analysisutil package
-func lineStart(f *token.File, line int) token.Pos {
-	// Use binary search to find the start offset of this line.
-	//
-	// TODO(adonovan): eventually replace this function with the
-	// simpler and more efficient (*go/token.File).LineStart, added
-	// in go1.12.
-
-	min := 0        // inclusive
-	max := f.Size() // exclusive
-	for {
-		offset := (min + max) / 2
-		pos := f.Pos(offset)
-		posn := f.Position(pos)
-		if posn.Line == line {
-			return pos - (token.Pos(posn.Column) - 1)
-		}
-
-		if min+1 >= max {
-			return token.NoPos
-		}
-
-		if posn.Line < line {
-			min = offset
-		} else {
-			max = offset
-		}
-	}
-}
diff --git a/internal/lsp/source/file.go b/internal/lsp/source/file.go
index dc7bd85..aa7bc8e 100644
--- a/internal/lsp/source/file.go
+++ b/internal/lsp/source/file.go
@@ -5,23 +5,21 @@
 package source
 
 import (
-	"fmt"
 	"go/ast"
 	"go/token"
-	"io/ioutil"
 
 	"golang.org/x/tools/go/packages"
 )
 
-// File holds all the information we know about a file.
-type File struct {
-	URI     URI
-	view    *View
-	active  bool
-	content []byte
-	ast     *ast.File
-	token   *token.File
-	pkg     *packages.Package
+// File represents a Go source file that has been type-checked. It is the input
+// to most of the exported functions in this package, as it wraps up the
+// building blocks for most queries. Users of the source package can abstract
+// the loading of packages into their own caching systems.
+type File interface {
+	GetAST() (*ast.File, error)
+	GetFileSet() (*token.FileSet, error)
+	GetPackage() (*packages.Package, error)
+	GetToken() (*token.File, error)
 }
 
 // Range represents a start and end position.
@@ -39,93 +37,3 @@
 	Range   Range
 	NewText string
 }
-
-// SetContent sets the overlay contents for a file.
-// Setting it to nil will revert it to the on disk contents, and remove it
-// from the active set.
-func (f *File) SetContent(content []byte) {
-	f.view.mu.Lock()
-	defer f.view.mu.Unlock()
-	f.content = content
-	// the ast and token fields are invalid
-	f.ast = nil
-	f.token = nil
-	f.pkg = nil
-	// and we might need to update the overlay
-	switch {
-	case f.active && content == nil:
-		// we were active, and want to forget the content
-		f.active = false
-		if filename, err := f.URI.Filename(); err == nil {
-			delete(f.view.Config.Overlay, filename)
-		}
-		f.content = nil
-	case content != nil:
-		// an active overlay, update the map
-		f.active = true
-		if filename, err := f.URI.Filename(); err == nil {
-			f.view.Config.Overlay[filename] = f.content
-		}
-	}
-}
-
-// Read returns the contents of the file, reading it from file system if needed.
-func (f *File) Read() ([]byte, error) {
-	f.view.mu.Lock()
-	defer f.view.mu.Unlock()
-	return f.read()
-}
-
-func (f *File) GetToken() (*token.File, error) {
-	f.view.mu.Lock()
-	defer f.view.mu.Unlock()
-	if f.token == nil {
-		if err := f.view.parse(f.URI); err != nil {
-			return nil, err
-		}
-		if f.token == nil {
-			return nil, fmt.Errorf("failed to find or parse %v", f.URI)
-		}
-	}
-	return f.token, nil
-}
-
-func (f *File) GetAST() (*ast.File, error) {
-	f.view.mu.Lock()
-	defer f.view.mu.Unlock()
-	if f.ast == nil {
-		if err := f.view.parse(f.URI); err != nil {
-			return nil, err
-		}
-	}
-	return f.ast, nil
-}
-
-func (f *File) GetPackage() (*packages.Package, error) {
-	f.view.mu.Lock()
-	defer f.view.mu.Unlock()
-	if f.pkg == nil {
-		if err := f.view.parse(f.URI); err != nil {
-			return nil, err
-		}
-	}
-	return f.pkg, nil
-}
-
-// read is the internal part of Read that presumes the lock is already held
-func (f *File) read() ([]byte, error) {
-	if f.content != nil {
-		return f.content, nil
-	}
-	// we don't know the content yet, so read it
-	filename, err := f.URI.Filename()
-	if err != nil {
-		return nil, err
-	}
-	content, err := ioutil.ReadFile(filename)
-	if err != nil {
-		return nil, err
-	}
-	f.content = content
-	return f.content, nil
-}
diff --git a/internal/lsp/source/format.go b/internal/lsp/source/format.go
index c7b46d2..2f9938f 100644
--- a/internal/lsp/source/format.go
+++ b/internal/lsp/source/format.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
+// Package source provides core features for use by Go editors and tools.
 package source
 
 import (
@@ -15,7 +16,7 @@
 )
 
 // Format formats a document with a given range.
-func Format(ctx context.Context, f *File, rng Range) ([]TextEdit, error) {
+func Format(ctx context.Context, f File, rng Range) ([]TextEdit, error) {
 	fAST, err := f.GetAST()
 	if err != nil {
 		return nil, err
@@ -45,8 +46,12 @@
 	// of Go used to build the LSP server will determine how it formats code.
 	// This should be acceptable for all users, who likely be prompted to rebuild
 	// the LSP server on each Go release.
+	fset, err := f.GetFileSet()
+	if err != nil {
+		return nil, err
+	}
 	buf := &bytes.Buffer{}
-	if err := format.Node(buf, f.view.Config.Fset, node); err != nil {
+	if err := format.Node(buf, fset, node); err != nil {
 		return nil, err
 	}
 	// TODO(rstambler): Compute text edits instead of replacing whole file.
diff --git a/internal/lsp/source/hover.go b/internal/lsp/source/hover.go
index 2c04a88..e449434 100644
--- a/internal/lsp/source/hover.go
+++ b/internal/lsp/source/hover.go
@@ -11,7 +11,7 @@
 	"go/types"
 )
 
-func Hover(ctx context.Context, f *File, pos token.Pos) (string, Range, error) {
+func Hover(ctx context.Context, f File, pos token.Pos) (string, Range, error) {
 	fAST, err := f.GetAST()
 	if err != nil {
 		return "", Range{}, err
diff --git a/internal/lsp/source/signature_help.go b/internal/lsp/source/signature_help.go
index ecccdd9..db78b12 100644
--- a/internal/lsp/source/signature_help.go
+++ b/internal/lsp/source/signature_help.go
@@ -24,7 +24,7 @@
 	Label string
 }
 
-func SignatureHelp(ctx context.Context, f *File, pos token.Pos) (*SignatureInformation, error) {
+func SignatureHelp(ctx context.Context, f File, pos token.Pos) (*SignatureInformation, error) {
 	fAST, err := f.GetAST()
 	if err != nil {
 		return nil, err