internal/lsp: copy fact support from go/analysis/internal/checker.go

This changes the analysis code from that which was in unitchecker.go
to that in checker.go, so we can run actions that get facts for dependencies
concurrently.

Adds the rest of the traditional vet suite to the LSP.

TODO(matloob): test that facts are actually propagated between packages

Change-Id: I946082159777943af81bcf10e503fecc99da521e
Reviewed-on: https://go-review.googlesource.com/c/161671
Run-TryBot: Michael Matloob <matloob@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Rebecca Stambler <rstambler@golang.org>
diff --git a/go/analysis/internal/checker/checker.go b/go/analysis/internal/checker/checker.go
index 6422e8d..f9a38ba 100644
--- a/go/analysis/internal/checker/checker.go
+++ b/go/analysis/internal/checker/checker.go
@@ -1,3 +1,7 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 // Package checker defines the implementation of the checker commands.
 // The same code drives the multi-analysis driver, the single-analysis
 // driver that is conventionally provided for convenience along with
diff --git a/internal/lsp/cache/view.go b/internal/lsp/cache/view.go
index 80560f1..aadd88a 100644
--- a/internal/lsp/cache/view.go
+++ b/internal/lsp/cache/view.go
@@ -25,6 +25,8 @@
 	Config packages.Config
 
 	files map[source.URI]*File
+
+	analysisCache *source.AnalysisCache
 }
 
 // NewView creates a new View, given a root path and go/packages configuration.
@@ -116,6 +118,10 @@
 			v:               v,
 			topLevelPkgPath: pkg.PkgPath,
 		}
+
+		// TODO(rstambler): Get real TypeSizes from go/packages.
+		pkg.TypesSizes = &types.StdSizes{}
+
 		if err := imp.addImports(pkg); err != nil {
 			return err
 		}
@@ -265,3 +271,10 @@
 	}
 	return pkg.Types, nil
 }
+
+func (v *View) GetAnalysisCache() *source.AnalysisCache {
+	if v.analysisCache == nil {
+		v.analysisCache = source.NewAnalysisCache()
+	}
+	return v.analysisCache
+}
diff --git a/internal/lsp/lsp_test.go b/internal/lsp/lsp_test.go
index 3425e38..bdebfb4 100644
--- a/internal/lsp/lsp_test.go
+++ b/internal/lsp/lsp_test.go
@@ -35,7 +35,7 @@
 	// We hardcode the expected number of test cases to ensure that all tests
 	// are being executed. If a test is added, this number must be changed.
 	const expectedCompletionsCount = 63
-	const expectedDiagnosticsCount = 16
+	const expectedDiagnosticsCount = 17
 	const expectedFormatCount = 3
 	const expectedDefinitionsCount = 16
 	const expectedTypeDefinitionsCount = 2
diff --git a/internal/lsp/source/analysis.go b/internal/lsp/source/analysis.go
new file mode 100644
index 0000000..d01369c
--- /dev/null
+++ b/internal/lsp/source/analysis.go
@@ -0,0 +1,325 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is largely based on go/analysis/internal/checker/checker.go.
+
+package source
+
+import (
+	"fmt"
+	"go/types"
+	"log"
+	"reflect"
+	"sort"
+	"strings"
+	"sync"
+	"time"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/packages"
+)
+
+// AnalysisCache holds analysis information for all the packages in a view.
+type AnalysisCache struct {
+	m map[analysisKey]*action
+}
+
+func NewAnalysisCache() *AnalysisCache {
+	return &AnalysisCache{make(map[analysisKey]*action)}
+}
+
+// Each graph node (action) is one unit of analysis.
+// Edges express package-to-package (vertical) dependencies,
+// and analysis-to-analysis (horizontal) dependencies.
+type analysisKey struct {
+	*analysis.Analyzer
+	*packages.Package
+}
+
+func (c *AnalysisCache) analyze(pkgs []*packages.Package, analyzers []*analysis.Analyzer) []*action {
+	// TODO(matloob): Every time but the first, this needs to re-construct
+	// the invalidated parts of the action graph, probably under a lock?
+	// We'll take care of that later. For now, clear the entire cache!
+	for k := range c.m {
+		delete(c.m, k)
+	}
+
+	// Construct the action graph.
+	var mkAction func(a *analysis.Analyzer, pkg *packages.Package) *action
+	mkAction = func(a *analysis.Analyzer, pkg *packages.Package) *action {
+		k := analysisKey{a, pkg}
+		act, ok := c.m[k]
+		if !ok {
+			act = &action{a: a, pkg: pkg}
+
+			// Add a dependency on each required analyzers.
+			for _, req := range a.Requires {
+				act.deps = append(act.deps, mkAction(req, pkg))
+			}
+
+			// An analysis that consumes/produces facts
+			// must run on the package's dependencies too.
+			if len(a.FactTypes) > 0 {
+				paths := make([]string, 0, len(pkg.Imports))
+				for path := range pkg.Imports {
+					paths = append(paths, path)
+				}
+				sort.Strings(paths) // for determinism
+				for _, path := range paths {
+					dep := mkAction(a, pkg.Imports[path])
+					act.deps = append(act.deps, dep)
+				}
+			}
+
+			c.m[k] = act
+		}
+		return act
+	}
+
+	// Build nodes for initial packages.
+	var roots []*action
+	for _, a := range analyzers {
+		for _, pkg := range pkgs {
+			root := mkAction(a, pkg)
+			root.isroot = true
+			roots = append(roots, root)
+		}
+	}
+
+	// Execute the graph in parallel.
+	execAll(roots)
+
+	return roots
+}
+
+// An action represents one unit of analysis work: the application of
+// one analysis to one package. Actions form a DAG, both within a
+// package (as different analyzers are applied, either in sequence or
+// parallel), and across packages (as dependencies are analyzed).
+type action struct {
+	once         sync.Once
+	a            *analysis.Analyzer
+	pkg          *packages.Package
+	pass         *analysis.Pass
+	isroot       bool
+	deps         []*action
+	objectFacts  map[objectFactKey]analysis.Fact
+	packageFacts map[packageFactKey]analysis.Fact
+	inputs       map[*analysis.Analyzer]interface{}
+	result       interface{}
+	diagnostics  []analysis.Diagnostic
+	err          error
+	duration     time.Duration
+}
+
+type objectFactKey struct {
+	obj types.Object
+	typ reflect.Type
+}
+
+type packageFactKey struct {
+	pkg *types.Package
+	typ reflect.Type
+}
+
+func (act *action) String() string {
+	return fmt.Sprintf("%s@%s", act.a, act.pkg)
+}
+
+func execAll(actions []*action) {
+	var wg sync.WaitGroup
+	for _, act := range actions {
+		wg.Add(1)
+		work := func(act *action) {
+			act.exec()
+			wg.Done()
+		}
+		go work(act)
+	}
+	wg.Wait()
+}
+
+func (act *action) exec() { act.once.Do(act.execOnce) }
+
+func (act *action) execOnce() {
+	// Analyze dependencies.
+	execAll(act.deps)
+
+	// Report an error if any dependency failed.
+	var failed []string
+	for _, dep := range act.deps {
+		if dep.err != nil {
+			failed = append(failed, dep.String())
+		}
+	}
+	if failed != nil {
+		sort.Strings(failed)
+		act.err = fmt.Errorf("failed prerequisites: %s", strings.Join(failed, ", "))
+		return
+	}
+
+	// Plumb the output values of the dependencies
+	// into the inputs of this action.  Also facts.
+	inputs := make(map[*analysis.Analyzer]interface{})
+	act.objectFacts = make(map[objectFactKey]analysis.Fact)
+	act.packageFacts = make(map[packageFactKey]analysis.Fact)
+	for _, dep := range act.deps {
+		if dep.pkg == act.pkg {
+			// Same package, different analysis (horizontal edge):
+			// in-memory outputs of prerequisite analyzers
+			// become inputs to this analysis pass.
+			inputs[dep.a] = dep.result
+
+		} else if dep.a == act.a { // (always true)
+			// Same analysis, different package (vertical edge):
+			// serialized facts produced by prerequisite analysis
+			// become available to this analysis pass.
+			inheritFacts(act, dep)
+		}
+	}
+
+	// Run the analysis.
+	pass := &analysis.Pass{
+		Analyzer:          act.a,
+		Fset:              act.pkg.Fset,
+		Files:             act.pkg.Syntax,
+		OtherFiles:        act.pkg.OtherFiles,
+		Pkg:               act.pkg.Types,
+		TypesInfo:         act.pkg.TypesInfo,
+		TypesSizes:        act.pkg.TypesSizes,
+		ResultOf:          inputs,
+		Report:            func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
+		ImportObjectFact:  act.importObjectFact,
+		ExportObjectFact:  act.exportObjectFact,
+		ImportPackageFact: act.importPackageFact,
+		ExportPackageFact: act.exportPackageFact,
+	}
+	act.pass = pass
+
+	var err error
+	if act.pkg.IllTyped && !pass.Analyzer.RunDespiteErrors {
+		err = fmt.Errorf("analysis skipped due to errors in package")
+	} else {
+		act.result, err = pass.Analyzer.Run(pass)
+		if err == nil {
+			if got, want := reflect.TypeOf(act.result), pass.Analyzer.ResultType; got != want {
+				err = fmt.Errorf(
+					"internal error: on package %s, analyzer %s returned a result of type %v, but declared ResultType %v",
+					pass.Pkg.Path(), pass.Analyzer, got, want)
+			}
+		}
+	}
+	act.err = err
+
+	// disallow calls after Run
+	pass.ExportObjectFact = nil
+	pass.ExportPackageFact = nil
+}
+
+// inheritFacts populates act.facts with
+// those it obtains from its dependency, dep.
+func inheritFacts(act, dep *action) {
+	for key, fact := range dep.objectFacts {
+		// Filter out facts related to objects
+		// that are irrelevant downstream
+		// (equivalently: not in the compiler export data).
+		if !exportedFrom(key.obj, dep.pkg.Types) {
+			continue
+		}
+		act.objectFacts[key] = fact
+	}
+
+	for key, fact := range dep.packageFacts {
+		// TODO: filter out facts that belong to
+		// packages not mentioned in the export data
+		// to prevent side channels.
+
+		act.packageFacts[key] = fact
+	}
+}
+
+// exportedFrom reports whether obj may be visible to a package that imports pkg.
+// This includes not just the exported members of pkg, but also unexported
+// constants, types, fields, and methods, perhaps belonging to oether packages,
+// that find there way into the API.
+// This is an overapproximation of the more accurate approach used by
+// gc export data, which walks the type graph, but it's much simpler.
+//
+// TODO(adonovan): do more accurate filtering by walking the type graph.
+func exportedFrom(obj types.Object, pkg *types.Package) bool {
+	switch obj := obj.(type) {
+	case *types.Func:
+		return obj.Exported() && obj.Pkg() == pkg ||
+			obj.Type().(*types.Signature).Recv() != nil
+	case *types.Var:
+		return obj.Exported() && obj.Pkg() == pkg ||
+			obj.IsField()
+	case *types.TypeName, *types.Const:
+		return true
+	}
+	return false // Nil, Builtin, Label, or PkgName
+}
+
+// importObjectFact implements Pass.ImportObjectFact.
+// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
+// importObjectFact copies the fact value to *ptr.
+func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
+	if obj == nil {
+		panic("nil object")
+	}
+	key := objectFactKey{obj, factType(ptr)}
+	if v, ok := act.objectFacts[key]; ok {
+		reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
+		return true
+	}
+	return false
+}
+
+// exportObjectFact implements Pass.ExportObjectFact.
+func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
+	if act.pass.ExportObjectFact == nil {
+		log.Panicf("%s: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact)
+	}
+
+	if obj.Pkg() != act.pkg.Types {
+		log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
+			act.a, act.pkg, obj, fact)
+	}
+
+	key := objectFactKey{obj, factType(fact)}
+	act.objectFacts[key] = fact // clobber any existing entry
+}
+
+// importPackageFact implements Pass.ImportPackageFact.
+// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
+// fact copies the fact value to *ptr.
+func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
+	if pkg == nil {
+		panic("nil package")
+	}
+	key := packageFactKey{pkg, factType(ptr)}
+	if v, ok := act.packageFacts[key]; ok {
+		reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
+		return true
+	}
+	return false
+}
+
+// exportPackageFact implements Pass.ExportPackageFact.
+func (act *action) exportPackageFact(fact analysis.Fact) {
+	if act.pass.ExportPackageFact == nil {
+		log.Panicf("%s: Pass.ExportPackageFact(%T) called after Run", act, fact)
+	}
+
+	key := packageFactKey{act.pass.Pkg, factType(fact)}
+	act.packageFacts[key] = fact // clobber any existing entry
+}
+
+func factType(fact analysis.Fact) reflect.Type {
+	t := reflect.TypeOf(fact)
+	if t.Kind() != reflect.Ptr {
+		log.Fatalf("invalid Fact type: got %T, want pointer", t)
+	}
+	return t
+}
diff --git a/internal/lsp/source/diagnostics.go b/internal/lsp/source/diagnostics.go
index 4a7fb78..27f5bad 100644
--- a/internal/lsp/source/diagnostics.go
+++ b/internal/lsp/source/diagnostics.go
@@ -9,11 +9,10 @@
 	"context"
 	"fmt"
 	"go/token"
-	"sort"
 	"strconv"
 	"strings"
-	"sync"
 
+	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/analysis/passes/asmdecl"
 	"golang.org/x/tools/go/analysis/passes/assign"
 	"golang.org/x/tools/go/analysis/passes/atomic"
@@ -25,18 +24,18 @@
 	"golang.org/x/tools/go/analysis/passes/copylock"
 	"golang.org/x/tools/go/analysis/passes/httpresponse"
 	"golang.org/x/tools/go/analysis/passes/loopclosure"
+	"golang.org/x/tools/go/analysis/passes/lostcancel"
 	"golang.org/x/tools/go/analysis/passes/nilfunc"
+	"golang.org/x/tools/go/analysis/passes/printf"
 	"golang.org/x/tools/go/analysis/passes/shift"
 	"golang.org/x/tools/go/analysis/passes/stdmethods"
 	"golang.org/x/tools/go/analysis/passes/structtag"
+	"golang.org/x/tools/go/analysis/passes/tests"
 	"golang.org/x/tools/go/analysis/passes/unmarshal"
 	"golang.org/x/tools/go/analysis/passes/unreachable"
 	"golang.org/x/tools/go/analysis/passes/unsafeptr"
 	"golang.org/x/tools/go/analysis/passes/unusedresult"
 
-	"golang.org/x/tools/go/analysis"
-	"golang.org/x/tools/go/analysis/passes/tests"
-
 	"golang.org/x/tools/go/packages"
 )
 
@@ -119,7 +118,7 @@
 		return reports, nil
 	}
 	// Type checking and parsing succeeded. Run analyses.
-	runAnalyses(pkg, func(a *analysis.Analyzer, diag analysis.Diagnostic) {
+	runAnalyses(v.GetAnalysisCache(), pkg, func(a *analysis.Analyzer, diag analysis.Diagnostic) {
 		pos := pkg.Fset.Position(diag.Pos)
 		category := a.Name
 		if diag.Category != "" {
@@ -187,10 +186,8 @@
 	return bytes.IndexAny(line[c-1:], " \n,():;[]"), nil
 }
 
-func runAnalyses(pkg *packages.Package, report func(a *analysis.Analyzer, diag analysis.Diagnostic)) error {
-	// These are the analyses in the vetsuite, except for lostcancel and printf.
-	// Those rely on facts from other packages.
-	// TODO(matloob): Add fact support.
+func runAnalyses(c *AnalysisCache, pkg *packages.Package, report func(a *analysis.Analyzer, diag analysis.Diagnostic)) error {
+	// the traditional vet suite:
 	analyzers := []*analysis.Analyzer{
 		asmdecl.Analyzer,
 		assign.Analyzer,
@@ -203,7 +200,9 @@
 		copylock.Analyzer,
 		httpresponse.Analyzer,
 		loopclosure.Analyzer,
+		lostcancel.Analyzer,
 		nilfunc.Analyzer,
+		printf.Analyzer,
 		shift.Analyzer,
 		stdmethods.Analyzer,
 		structtag.Analyzer,
@@ -214,102 +213,17 @@
 		unusedresult.Analyzer,
 	}
 
-	// Execute actions in parallel. Based on unitchecker's analysis execution logic.
-	type action struct {
-		once        sync.Once
-		result      interface{}
-		err         error
-		diagnostics []analysis.Diagnostic
-	}
-	actions := make(map[*analysis.Analyzer]*action)
-
-	var registerActions func(a *analysis.Analyzer)
-	registerActions = func(a *analysis.Analyzer) {
-		act, ok := actions[a]
-		if !ok {
-			act = new(action)
-			for _, req := range a.Requires {
-				registerActions(req)
-			}
-			actions[a] = act
-		}
-	}
-	for _, a := range analyzers {
-		registerActions(a)
-	}
-
-	// In parallel, execute the DAG of analyzers.
-	var exec func(a *analysis.Analyzer) *action
-	var execAll func(analyzers []*analysis.Analyzer)
-	exec = func(a *analysis.Analyzer) *action {
-		act := actions[a]
-		act.once.Do(func() {
-			if len(a.FactTypes) > 0 {
-				panic("for analyzer " + a.Name + " modular analyses needing facts are not yet supported")
-			}
-
-			execAll(a.Requires) // prefetch dependencies in parallel
-
-			// The inputs to this analysis are the
-			// results of its prerequisites.
-			inputs := make(map[*analysis.Analyzer]interface{})
-			var failed []string
-			for _, req := range a.Requires {
-				reqact := exec(req)
-				if reqact.err != nil {
-					failed = append(failed, req.String())
-					continue
-				}
-				inputs[req] = reqact.result
-			}
-
-			// Report an error if any dependency failed.
-			if failed != nil {
-				sort.Strings(failed)
-				act.err = fmt.Errorf("failed prerequisites: %s", strings.Join(failed, ", "))
-				return
-			}
-
-			pass := &analysis.Pass{
-				Analyzer:   a,
-				Fset:       pkg.Fset,
-				Files:      pkg.Syntax,
-				OtherFiles: pkg.OtherFiles,
-				Pkg:        pkg.Types,
-				TypesInfo:  pkg.TypesInfo,
-				TypesSizes: pkg.TypesSizes,
-				ResultOf:   inputs,
-				Report:     func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
-			}
-
-			act.result, act.err = a.Run(pass)
-		})
-		return act
-	}
-	execAll = func(analyzers []*analysis.Analyzer) {
-		var wg sync.WaitGroup
-		for _, a := range analyzers {
-			wg.Add(1)
-			go func(a *analysis.Analyzer) {
-				_ = exec(a)
-				wg.Done()
-			}(a)
-		}
-		wg.Wait()
-	}
-
-	execAll(analyzers)
+	roots := c.analyze([]*packages.Package{pkg}, analyzers)
 
 	// Report diagnostics and errors from root analyzers.
-	for _, a := range analyzers {
-		act := actions[a]
-		if act.err != nil {
-			// TODO(matloob): This isn't quite right: we might return a failed prerequisites error,
-			// which isn't super useful...
-			return act.err
-		}
-		for _, diag := range act.diagnostics {
-			report(a, diag)
+	for _, r := range roots {
+		for _, diag := range r.diagnostics {
+			if r.err != nil {
+				// TODO(matloob): This isn't quite right: we might return a failed prerequisites error,
+				// which isn't super useful...
+				return r.err
+			}
+			report(r.a, diag)
 		}
 	}
 
diff --git a/internal/lsp/source/view.go b/internal/lsp/source/view.go
index 395c06f..4953827 100644
--- a/internal/lsp/source/view.go
+++ b/internal/lsp/source/view.go
@@ -18,6 +18,7 @@
 type View interface {
 	GetFile(ctx context.Context, uri URI) (File, error)
 	SetContent(ctx context.Context, uri URI, content []byte) (View, error)
+	GetAnalysisCache() *AnalysisCache
 	FileSet() *token.FileSet
 }
 
diff --git a/internal/lsp/testdata/analyzer/bad_test.go b/internal/lsp/testdata/analyzer/bad_test.go
index 7e3e864..d10ff81 100644
--- a/internal/lsp/testdata/analyzer/bad_test.go
+++ b/internal/lsp/testdata/analyzer/bad_test.go
@@ -1,6 +1,7 @@
 package analyzer
 
 import (
+	"fmt"
 	"sync"
 	"testing"
 )
@@ -8,4 +9,10 @@
 func Testbad(t *testing.T) { //@diag("", "tests", "Testbad has malformed name: first letter after 'Test' must not be lowercase")
 	var x sync.Mutex
 	_ = x //@diag("x", "copylocks", "assignment copies lock value to _: sync.Mutex")
+
+	printfWrapper("%s") //@diag("printfWrapper", "printf", "printfWrapper format %!s(MISSING) reads arg #1, but call has 0 args")
+}
+
+func printfWrapper(format string, args ...interface{}) {
+	fmt.Printf(format, args...)
 }