godoc: revert support for Go 1.8 aliases

Change-Id: Ibb3afede1121bd53567f3ff70b886b02dd81399f
Reviewed-on: https://go-review.googlesource.com/32832
Reviewed-by: Robert Griesemer <gri@golang.org>
diff --git a/godoc/index.go b/godoc/index.go
index fbf80d6..725121a 100644
--- a/godoc/index.go
+++ b/godoc/index.go
@@ -2,8 +2,6 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// +build !go1.8
-
 // This file contains the infrastructure to create an
 // identifier and full-text index for a set of Go files.
 //
diff --git a/godoc/index18.go b/godoc/index18.go
deleted file mode 100644
index de48cd8..0000000
--- a/godoc/index18.go
+++ /dev/null
@@ -1,1596 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.8
-
-// This file contains the infrastructure to create an
-// identifier and full-text index for a set of Go files.
-//
-// Algorithm for identifier index:
-// - traverse all .go files of the file tree specified by root
-// - for each identifier (word) encountered, collect all occurrences (spots)
-//   into a list; this produces a list of spots for each word
-// - reduce the lists: from a list of spots to a list of FileRuns,
-//   and from a list of FileRuns into a list of PakRuns
-// - make a HitList from the PakRuns
-//
-// Details:
-// - keep two lists per word: one containing package-level declarations
-//   that have snippets, and one containing all other spots
-// - keep the snippets in a separate table indexed by snippet index
-//   and store the snippet index in place of the line number in a SpotInfo
-//   (the line number for spots with snippets is stored in the snippet)
-// - at the end, create lists of alternative spellings for a given
-//   word
-//
-// Algorithm for full text index:
-// - concatenate all source code in a byte buffer (in memory)
-// - add the files to a file set in lockstep as they are added to the byte
-//   buffer such that a byte buffer offset corresponds to the Pos value for
-//   that file location
-// - create a suffix array from the concatenated sources
-//
-// String lookup in full text index:
-// - use the suffix array to lookup a string's offsets - the offsets
-//   correspond to the Pos values relative to the file set
-// - translate the Pos values back into file and line information and
-//   sort the result
-
-package godoc
-
-import (
-	"bufio"
-	"bytes"
-	"encoding/gob"
-	"errors"
-	"fmt"
-	"go/ast"
-	"go/doc"
-	"go/parser"
-	"go/token"
-	"index/suffixarray"
-	"io"
-	"log"
-	"os"
-	pathpkg "path"
-	"path/filepath"
-	"regexp"
-	"runtime"
-	"sort"
-	"strconv"
-	"strings"
-	"sync"
-	"time"
-	"unicode"
-
-	"golang.org/x/tools/godoc/util"
-	"golang.org/x/tools/godoc/vfs"
-)
-
-// ----------------------------------------------------------------------------
-// InterfaceSlice is a helper type for sorting interface
-// slices according to some slice-specific sort criteria.
-
-type comparer func(x, y interface{}) bool
-
-type interfaceSlice struct {
-	slice []interface{}
-	less  comparer
-}
-
-// ----------------------------------------------------------------------------
-// RunList
-
-// A RunList is a list of entries that can be sorted according to some
-// criteria. A RunList may be compressed by grouping "runs" of entries
-// which are equal (according to the sort critera) into a new RunList of
-// runs. For instance, a RunList containing pairs (x, y) may be compressed
-// into a RunList containing pair runs (x, {y}) where each run consists of
-// a list of y's with the same x.
-type RunList []interface{}
-
-func (h RunList) sort(less comparer) {
-	sort.Sort(&interfaceSlice{h, less})
-}
-
-func (p *interfaceSlice) Len() int           { return len(p.slice) }
-func (p *interfaceSlice) Less(i, j int) bool { return p.less(p.slice[i], p.slice[j]) }
-func (p *interfaceSlice) Swap(i, j int)      { p.slice[i], p.slice[j] = p.slice[j], p.slice[i] }
-
-// Compress entries which are the same according to a sort criteria
-// (specified by less) into "runs".
-func (h RunList) reduce(less comparer, newRun func(h RunList) interface{}) RunList {
-	if len(h) == 0 {
-		return nil
-	}
-	// len(h) > 0
-
-	// create runs of entries with equal values
-	h.sort(less)
-
-	// for each run, make a new run object and collect them in a new RunList
-	var hh RunList
-	i, x := 0, h[0]
-	for j, y := range h {
-		if less(x, y) {
-			hh = append(hh, newRun(h[i:j]))
-			i, x = j, h[j] // start a new run
-		}
-	}
-	// add final run, if any
-	if i < len(h) {
-		hh = append(hh, newRun(h[i:]))
-	}
-
-	return hh
-}
-
-// ----------------------------------------------------------------------------
-// KindRun
-
-// Debugging support. Disable to see multiple entries per line.
-const removeDuplicates = true
-
-// A KindRun is a run of SpotInfos of the same kind in a given file.
-// The kind (3 bits) is stored in each SpotInfo element; to find the
-// kind of a KindRun, look at any of its elements.
-type KindRun []SpotInfo
-
-// KindRuns are sorted by line number or index. Since the isIndex bit
-// is always the same for all infos in one list we can compare lori's.
-func (k KindRun) Len() int           { return len(k) }
-func (k KindRun) Less(i, j int) bool { return k[i].Lori() < k[j].Lori() }
-func (k KindRun) Swap(i, j int)      { k[i], k[j] = k[j], k[i] }
-
-// FileRun contents are sorted by Kind for the reduction into KindRuns.
-func lessKind(x, y interface{}) bool { return x.(SpotInfo).Kind() < y.(SpotInfo).Kind() }
-
-// newKindRun allocates a new KindRun from the SpotInfo run h.
-func newKindRun(h RunList) interface{} {
-	run := make(KindRun, len(h))
-	for i, x := range h {
-		run[i] = x.(SpotInfo)
-	}
-
-	// Spots were sorted by file and kind to create this run.
-	// Within this run, sort them by line number or index.
-	sort.Sort(run)
-
-	if removeDuplicates {
-		// Since both the lori and kind field must be
-		// same for duplicates, and since the isIndex
-		// bit is always the same for all infos in one
-		// list we can simply compare the entire info.
-		k := 0
-		prev := SpotInfo(1<<32 - 1) // an unlikely value
-		for _, x := range run {
-			if x != prev {
-				run[k] = x
-				k++
-				prev = x
-			}
-		}
-		run = run[0:k]
-	}
-
-	return run
-}
-
-// ----------------------------------------------------------------------------
-// FileRun
-
-// A Pak describes a Go package.
-type Pak struct {
-	Path string // path of directory containing the package
-	Name string // package name as declared by package clause
-}
-
-// Paks are sorted by name (primary key) and by import path (secondary key).
-func (p *Pak) less(q *Pak) bool {
-	return p.Name < q.Name || p.Name == q.Name && p.Path < q.Path
-}
-
-// A File describes a Go file.
-type File struct {
-	Name string // directory-local file name
-	Pak  *Pak   // the package to which the file belongs
-}
-
-// Path returns the file path of f.
-func (f *File) Path() string {
-	return pathpkg.Join(f.Pak.Path, f.Name)
-}
-
-// A Spot describes a single occurrence of a word.
-type Spot struct {
-	File *File
-	Info SpotInfo
-}
-
-// A FileRun is a list of KindRuns belonging to the same file.
-type FileRun struct {
-	File   *File
-	Groups []KindRun
-}
-
-// Spots are sorted by file path for the reduction into FileRuns.
-func lessSpot(x, y interface{}) bool {
-	fx := x.(Spot).File
-	fy := y.(Spot).File
-	// same as "return fx.Path() < fy.Path()" but w/o computing the file path first
-	px := fx.Pak.Path
-	py := fy.Pak.Path
-	return px < py || px == py && fx.Name < fy.Name
-}
-
-// newFileRun allocates a new FileRun from the Spot run h.
-func newFileRun(h RunList) interface{} {
-	file := h[0].(Spot).File
-
-	// reduce the list of Spots into a list of KindRuns
-	h1 := make(RunList, len(h))
-	for i, x := range h {
-		h1[i] = x.(Spot).Info
-	}
-	h2 := h1.reduce(lessKind, newKindRun)
-
-	// create the FileRun
-	groups := make([]KindRun, len(h2))
-	for i, x := range h2 {
-		groups[i] = x.(KindRun)
-	}
-	return &FileRun{file, groups}
-}
-
-// ----------------------------------------------------------------------------
-// PakRun
-
-// A PakRun describes a run of *FileRuns of a package.
-type PakRun struct {
-	Pak   *Pak
-	Files []*FileRun
-}
-
-// Sorting support for files within a PakRun.
-func (p *PakRun) Len() int           { return len(p.Files) }
-func (p *PakRun) Less(i, j int) bool { return p.Files[i].File.Name < p.Files[j].File.Name }
-func (p *PakRun) Swap(i, j int)      { p.Files[i], p.Files[j] = p.Files[j], p.Files[i] }
-
-// FileRuns are sorted by package for the reduction into PakRuns.
-func lessFileRun(x, y interface{}) bool {
-	return x.(*FileRun).File.Pak.less(y.(*FileRun).File.Pak)
-}
-
-// newPakRun allocates a new PakRun from the *FileRun run h.
-func newPakRun(h RunList) interface{} {
-	pak := h[0].(*FileRun).File.Pak
-	files := make([]*FileRun, len(h))
-	for i, x := range h {
-		files[i] = x.(*FileRun)
-	}
-	run := &PakRun{pak, files}
-	sort.Sort(run) // files were sorted by package; sort them by file now
-	return run
-}
-
-// ----------------------------------------------------------------------------
-// HitList
-
-// A HitList describes a list of PakRuns.
-type HitList []*PakRun
-
-// PakRuns are sorted by package.
-func lessPakRun(x, y interface{}) bool { return x.(*PakRun).Pak.less(y.(*PakRun).Pak) }
-
-func reduce(h0 RunList) HitList {
-	// reduce a list of Spots into a list of FileRuns
-	h1 := h0.reduce(lessSpot, newFileRun)
-	// reduce a list of FileRuns into a list of PakRuns
-	h2 := h1.reduce(lessFileRun, newPakRun)
-	// sort the list of PakRuns by package
-	h2.sort(lessPakRun)
-	// create a HitList
-	h := make(HitList, len(h2))
-	for i, p := range h2 {
-		h[i] = p.(*PakRun)
-	}
-	return h
-}
-
-// filter returns a new HitList created by filtering
-// all PakRuns from h that have a matching pakname.
-func (h HitList) filter(pakname string) HitList {
-	var hh HitList
-	for _, p := range h {
-		if p.Pak.Name == pakname {
-			hh = append(hh, p)
-		}
-	}
-	return hh
-}
-
-// ----------------------------------------------------------------------------
-// AltWords
-
-type wordPair struct {
-	canon string // canonical word spelling (all lowercase)
-	alt   string // alternative spelling
-}
-
-// An AltWords describes a list of alternative spellings for a
-// canonical (all lowercase) spelling of a word.
-type AltWords struct {
-	Canon string   // canonical word spelling (all lowercase)
-	Alts  []string // alternative spelling for the same word
-}
-
-// wordPairs are sorted by their canonical spelling.
-func lessWordPair(x, y interface{}) bool { return x.(*wordPair).canon < y.(*wordPair).canon }
-
-// newAltWords allocates a new AltWords from the *wordPair run h.
-func newAltWords(h RunList) interface{} {
-	canon := h[0].(*wordPair).canon
-	alts := make([]string, len(h))
-	for i, x := range h {
-		alts[i] = x.(*wordPair).alt
-	}
-	return &AltWords{canon, alts}
-}
-
-func (a *AltWords) filter(s string) *AltWords {
-	var alts []string
-	for _, w := range a.Alts {
-		if w != s {
-			alts = append(alts, w)
-		}
-	}
-	if len(alts) > 0 {
-		return &AltWords{a.Canon, alts}
-	}
-	return nil
-}
-
-// Ident stores information about external identifiers in order to create
-// links to package documentation.
-type Ident struct {
-	Path    string // e.g. "net/http"
-	Package string // e.g. "http"
-	Name    string // e.g. "NewRequest"
-	Doc     string // e.g. "NewRequest returns a new Request..."
-}
-
-// byImportCount sorts the given slice of Idents by the import
-// counts of the packages to which they belong.
-type byImportCount struct {
-	Idents      []Ident
-	ImportCount map[string]int
-}
-
-func (ic byImportCount) Len() int {
-	return len(ic.Idents)
-}
-
-func (ic byImportCount) Less(i, j int) bool {
-	ri := ic.ImportCount[ic.Idents[i].Path]
-	rj := ic.ImportCount[ic.Idents[j].Path]
-	if ri == rj {
-		return ic.Idents[i].Path < ic.Idents[j].Path
-	}
-	return ri > rj
-}
-
-func (ic byImportCount) Swap(i, j int) {
-	ic.Idents[i], ic.Idents[j] = ic.Idents[j], ic.Idents[i]
-}
-
-func (ic byImportCount) String() string {
-	buf := bytes.NewBuffer([]byte("["))
-	for _, v := range ic.Idents {
-		buf.WriteString(fmt.Sprintf("\n\t%s, %s (%d)", v.Path, v.Name, ic.ImportCount[v.Path]))
-	}
-	buf.WriteString("\n]")
-	return buf.String()
-}
-
-// filter creates a new Ident list where the results match the given
-// package name.
-func (ic byImportCount) filter(pakname string) []Ident {
-	if ic.Idents == nil {
-		return nil
-	}
-	var res []Ident
-	for _, i := range ic.Idents {
-		if i.Package == pakname {
-			res = append(res, i)
-		}
-	}
-	return res
-}
-
-// top returns the top n identifiers.
-func (ic byImportCount) top(n int) []Ident {
-	if len(ic.Idents) > n {
-		return ic.Idents[:n]
-	}
-	return ic.Idents
-}
-
-// ----------------------------------------------------------------------------
-// Indexer
-
-type IndexResult struct {
-	Decls  RunList // package-level declarations (with snippets)
-	Others RunList // all other occurrences
-}
-
-// Statistics provides statistics information for an index.
-type Statistics struct {
-	Bytes int // total size of indexed source files
-	Files int // number of indexed source files
-	Lines int // number of lines (all files)
-	Words int // number of different identifiers
-	Spots int // number of identifier occurrences
-}
-
-// An Indexer maintains the data structures and provides the machinery
-// for indexing .go files under a file tree. It implements the path.Visitor
-// interface for walking file trees, and the ast.Visitor interface for
-// walking Go ASTs.
-type Indexer struct {
-	c          *Corpus
-	fset       *token.FileSet // file set for all indexed files
-	fsOpenGate chan bool      // send pre fs.Open; receive on close
-
-	mu            sync.Mutex              // guards all the following
-	sources       bytes.Buffer            // concatenated sources
-	strings       map[string]string       // interned string
-	packages      map[Pak]*Pak            // interned *Paks
-	words         map[string]*IndexResult // RunLists of Spots
-	snippets      []*Snippet              // indices are stored in SpotInfos
-	current       *token.File             // last file added to file set
-	file          *File                   // AST for current file
-	decl          ast.Decl                // AST for current decl
-	stats         Statistics
-	throttle      *util.Throttle
-	importCount   map[string]int                 // package path ("net/http") => count
-	packagePath   map[string]map[string]bool     // "template" => "text/template" => true
-	exports       map[string]map[string]SpotKind // "net/http" => "ListenAndServe" => FuncDecl
-	curPkgExports map[string]SpotKind
-	idents        map[SpotKind]map[string][]Ident // kind => name => list of Idents
-}
-
-func (x *Indexer) intern(s string) string {
-	if s, ok := x.strings[s]; ok {
-		return s
-	}
-	x.strings[s] = s
-	return s
-}
-
-func (x *Indexer) lookupPackage(path, name string) *Pak {
-	// In the source directory tree, more than one package may
-	// live in the same directory. For the packages map, construct
-	// a key that includes both the directory path and the package
-	// name.
-	key := Pak{Path: x.intern(path), Name: x.intern(name)}
-	pak := x.packages[key]
-	if pak == nil {
-		pak = &key
-		x.packages[key] = pak
-	}
-	return pak
-}
-
-func (x *Indexer) addSnippet(s *Snippet) int {
-	index := len(x.snippets)
-	x.snippets = append(x.snippets, s)
-	return index
-}
-
-func (x *Indexer) visitIdent(kind SpotKind, id *ast.Ident) {
-	if id == nil {
-		return
-	}
-	name := x.intern(id.Name)
-
-	switch kind {
-	case TypeDecl, FuncDecl, ConstDecl, VarDecl:
-		x.curPkgExports[name] = kind
-	}
-
-	lists, found := x.words[name]
-	if !found {
-		lists = new(IndexResult)
-		x.words[name] = lists
-	}
-
-	if kind == Use || x.decl == nil {
-		if x.c.IndexGoCode {
-			// not a declaration or no snippet required
-			info := makeSpotInfo(kind, x.current.Line(id.Pos()), false)
-			lists.Others = append(lists.Others, Spot{x.file, info})
-		}
-	} else {
-		// a declaration with snippet
-		index := x.addSnippet(NewSnippet(x.fset, x.decl, id))
-		info := makeSpotInfo(kind, index, true)
-		lists.Decls = append(lists.Decls, Spot{x.file, info})
-	}
-
-	x.stats.Spots++
-}
-
-func (x *Indexer) visitFieldList(kind SpotKind, flist *ast.FieldList) {
-	for _, f := range flist.List {
-		x.decl = nil // no snippets for fields
-		for _, name := range f.Names {
-			x.visitIdent(kind, name)
-		}
-		ast.Walk(x, f.Type)
-		// ignore tag - not indexed at the moment
-	}
-}
-
-func (x *Indexer) visitSpec(kind SpotKind, spec ast.Spec) {
-	switch n := spec.(type) {
-	case *ast.ImportSpec:
-		x.visitIdent(ImportDecl, n.Name)
-		if n.Path != nil {
-			if imp, err := strconv.Unquote(n.Path.Value); err == nil {
-				x.importCount[x.intern(imp)]++
-			}
-		}
-
-	case *ast.AliasSpec:
-		x.visitIdent(kind, n.Name)
-		ast.Walk(x, n.Orig)
-
-	case *ast.ValueSpec:
-		for _, n := range n.Names {
-			x.visitIdent(kind, n)
-		}
-		ast.Walk(x, n.Type)
-		for _, v := range n.Values {
-			ast.Walk(x, v)
-		}
-
-	case *ast.TypeSpec:
-		x.visitIdent(TypeDecl, n.Name)
-		ast.Walk(x, n.Type)
-	}
-}
-
-func (x *Indexer) visitGenDecl(decl *ast.GenDecl) {
-	kind := VarDecl
-	if decl.Tok == token.CONST {
-		kind = ConstDecl
-	}
-	x.decl = decl
-	for _, s := range decl.Specs {
-		x.visitSpec(kind, s)
-	}
-}
-
-func (x *Indexer) Visit(node ast.Node) ast.Visitor {
-	switch n := node.(type) {
-	case nil:
-		// nothing to do
-
-	case *ast.Ident:
-		x.visitIdent(Use, n)
-
-	case *ast.FieldList:
-		x.visitFieldList(VarDecl, n)
-
-	case *ast.InterfaceType:
-		x.visitFieldList(MethodDecl, n.Methods)
-
-	case *ast.DeclStmt:
-		// local declarations should only be *ast.GenDecls;
-		// ignore incorrect ASTs
-		if decl, ok := n.Decl.(*ast.GenDecl); ok {
-			x.decl = nil // no snippets for local declarations
-			x.visitGenDecl(decl)
-		}
-
-	case *ast.GenDecl:
-		x.decl = n
-		x.visitGenDecl(n)
-
-	case *ast.FuncDecl:
-		kind := FuncDecl
-		if n.Recv != nil {
-			kind = MethodDecl
-			ast.Walk(x, n.Recv)
-		}
-		x.decl = n
-		x.visitIdent(kind, n.Name)
-		ast.Walk(x, n.Type)
-		if n.Body != nil {
-			ast.Walk(x, n.Body)
-		}
-
-	case *ast.File:
-		x.decl = nil
-		x.visitIdent(PackageClause, n.Name)
-		for _, d := range n.Decls {
-			ast.Walk(x, d)
-		}
-
-	default:
-		return x
-	}
-
-	return nil
-}
-
-// addFile adds a file to the index if possible and returns the file set file
-// and the file's AST if it was successfully parsed as a Go file. If addFile
-// failed (that is, if the file was not added), it returns file == nil.
-func (x *Indexer) addFile(f vfs.ReadSeekCloser, filename string, goFile bool) (file *token.File, ast *ast.File) {
-	defer f.Close()
-
-	// The file set's base offset and x.sources size must be in lock-step;
-	// this permits the direct mapping of suffix array lookup results to
-	// to corresponding Pos values.
-	//
-	// When a file is added to the file set, its offset base increases by
-	// the size of the file + 1; and the initial base offset is 1. Add an
-	// extra byte to the sources here.
-	x.sources.WriteByte(0)
-
-	// If the sources length doesn't match the file set base at this point
-	// the file set implementation changed or we have another error.
-	base := x.fset.Base()
-	if x.sources.Len() != base {
-		panic("internal error: file base incorrect")
-	}
-
-	// append file contents (src) to x.sources
-	if _, err := x.sources.ReadFrom(f); err == nil {
-		src := x.sources.Bytes()[base:]
-
-		if goFile {
-			// parse the file and in the process add it to the file set
-			if ast, err = parser.ParseFile(x.fset, filename, src, parser.ParseComments); err == nil {
-				file = x.fset.File(ast.Pos()) // ast.Pos() is inside the file
-				return
-			}
-			// file has parse errors, and the AST may be incorrect -
-			// set lines information explicitly and index as ordinary
-			// text file (cannot fall through to the text case below
-			// because the file has already been added to the file set
-			// by the parser)
-			file = x.fset.File(token.Pos(base)) // token.Pos(base) is inside the file
-			file.SetLinesForContent(src)
-			ast = nil
-			return
-		}
-
-		if util.IsText(src) {
-			// only add the file to the file set (for the full text index)
-			file = x.fset.AddFile(filename, x.fset.Base(), len(src))
-			file.SetLinesForContent(src)
-			return
-		}
-	}
-
-	// discard possibly added data
-	x.sources.Truncate(base - 1) // -1 to remove added byte 0 since no file was added
-	return
-}
-
-// Design note: Using an explicit white list of permitted files for indexing
-// makes sure that the important files are included and massively reduces the
-// number of files to index. The advantage over a blacklist is that unexpected
-// (non-blacklisted) files won't suddenly explode the index.
-
-// Files are whitelisted if they have a file name or extension
-// present as key in whitelisted.
-var whitelisted = map[string]bool{
-	".bash":        true,
-	".c":           true,
-	".cc":          true,
-	".cpp":         true,
-	".cxx":         true,
-	".css":         true,
-	".go":          true,
-	".goc":         true,
-	".h":           true,
-	".hh":          true,
-	".hpp":         true,
-	".hxx":         true,
-	".html":        true,
-	".js":          true,
-	".out":         true,
-	".py":          true,
-	".s":           true,
-	".sh":          true,
-	".txt":         true,
-	".xml":         true,
-	"AUTHORS":      true,
-	"CONTRIBUTORS": true,
-	"LICENSE":      true,
-	"Makefile":     true,
-	"PATENTS":      true,
-	"README":       true,
-}
-
-// isWhitelisted returns true if a file is on the list
-// of "permitted" files for indexing. The filename must
-// be the directory-local name of the file.
-func isWhitelisted(filename string) bool {
-	key := pathpkg.Ext(filename)
-	if key == "" {
-		// file has no extension - use entire filename
-		key = filename
-	}
-	return whitelisted[key]
-}
-
-func (x *Indexer) indexDocs(dirname string, filename string, astFile *ast.File) {
-	pkgName := x.intern(astFile.Name.Name)
-	if pkgName == "main" {
-		return
-	}
-	pkgPath := x.intern(strings.TrimPrefix(strings.TrimPrefix(dirname, "/src/"), "pkg/"))
-	astPkg := ast.Package{
-		Name: pkgName,
-		Files: map[string]*ast.File{
-			filename: astFile,
-		},
-	}
-	var m doc.Mode
-	docPkg := doc.New(&astPkg, dirname, m)
-	addIdent := func(sk SpotKind, name string, docstr string) {
-		if x.idents[sk] == nil {
-			x.idents[sk] = make(map[string][]Ident)
-		}
-		name = x.intern(name)
-		x.idents[sk][name] = append(x.idents[sk][name], Ident{
-			Path:    pkgPath,
-			Package: pkgName,
-			Name:    name,
-			Doc:     doc.Synopsis(docstr),
-		})
-	}
-
-	if x.idents[PackageClause] == nil {
-		x.idents[PackageClause] = make(map[string][]Ident)
-	}
-	// List of words under which the package identifier will be stored.
-	// This includes the package name and the components of the directory
-	// in which it resides.
-	words := strings.Split(pathpkg.Dir(pkgPath), "/")
-	if words[0] == "." {
-		words = []string{}
-	}
-	name := x.intern(docPkg.Name)
-	synopsis := doc.Synopsis(docPkg.Doc)
-	words = append(words, name)
-	pkgIdent := Ident{
-		Path:    pkgPath,
-		Package: pkgName,
-		Name:    name,
-		Doc:     synopsis,
-	}
-	for _, word := range words {
-		word = x.intern(word)
-		found := false
-		pkgs := x.idents[PackageClause][word]
-		for i, p := range pkgs {
-			if p.Path == pkgPath {
-				if docPkg.Doc != "" {
-					p.Doc = synopsis
-					pkgs[i] = p
-				}
-				found = true
-				break
-			}
-		}
-		if !found {
-			x.idents[PackageClause][word] = append(x.idents[PackageClause][word], pkgIdent)
-		}
-	}
-
-	for _, c := range docPkg.Consts {
-		for _, name := range c.Names {
-			addIdent(ConstDecl, name, c.Doc)
-		}
-	}
-	for _, t := range docPkg.Types {
-		addIdent(TypeDecl, t.Name, t.Doc)
-		for _, c := range t.Consts {
-			for _, name := range c.Names {
-				addIdent(ConstDecl, name, c.Doc)
-			}
-		}
-		for _, v := range t.Vars {
-			for _, name := range v.Names {
-				addIdent(VarDecl, name, v.Doc)
-			}
-		}
-		for _, f := range t.Funcs {
-			addIdent(FuncDecl, f.Name, f.Doc)
-		}
-		for _, f := range t.Methods {
-			addIdent(MethodDecl, f.Name, f.Doc)
-			// Change the name of methods to be "<typename>.<methodname>".
-			// They will still be indexed as <methodname>.
-			idents := x.idents[MethodDecl][f.Name]
-			idents[len(idents)-1].Name = x.intern(t.Name + "." + f.Name)
-		}
-	}
-	for _, v := range docPkg.Vars {
-		for _, name := range v.Names {
-			addIdent(VarDecl, name, v.Doc)
-		}
-	}
-	for _, f := range docPkg.Funcs {
-		addIdent(FuncDecl, f.Name, f.Doc)
-	}
-}
-
-func (x *Indexer) indexGoFile(dirname string, filename string, file *token.File, astFile *ast.File) {
-	pkgName := astFile.Name.Name
-
-	if x.c.IndexGoCode {
-		x.current = file
-		pak := x.lookupPackage(dirname, pkgName)
-		x.file = &File{filename, pak}
-		ast.Walk(x, astFile)
-	}
-
-	if x.c.IndexDocs {
-		// Test files are already filtered out in visitFile if IndexGoCode and
-		// IndexFullText are false.  Otherwise, check here.
-		isTestFile := (x.c.IndexGoCode || x.c.IndexFullText) &&
-			(strings.HasSuffix(filename, "_test.go") || strings.HasPrefix(dirname, "/test/"))
-		if !isTestFile {
-			x.indexDocs(dirname, filename, astFile)
-		}
-	}
-
-	ppKey := x.intern(pkgName)
-	if _, ok := x.packagePath[ppKey]; !ok {
-		x.packagePath[ppKey] = make(map[string]bool)
-	}
-	pkgPath := x.intern(strings.TrimPrefix(strings.TrimPrefix(dirname, "/src/"), "pkg/"))
-	x.packagePath[ppKey][pkgPath] = true
-
-	// Merge in exported symbols found walking this file into
-	// the map for that package.
-	if len(x.curPkgExports) > 0 {
-		dest, ok := x.exports[pkgPath]
-		if !ok {
-			dest = make(map[string]SpotKind)
-			x.exports[pkgPath] = dest
-		}
-		for k, v := range x.curPkgExports {
-			dest[k] = v
-		}
-	}
-}
-
-func (x *Indexer) visitFile(dirname string, fi os.FileInfo) {
-	if fi.IsDir() || !x.c.IndexEnabled {
-		return
-	}
-
-	filename := pathpkg.Join(dirname, fi.Name())
-	goFile := isGoFile(fi)
-
-	switch {
-	case x.c.IndexFullText:
-		if !isWhitelisted(fi.Name()) {
-			return
-		}
-	case x.c.IndexGoCode:
-		if !goFile {
-			return
-		}
-	case x.c.IndexDocs:
-		if !goFile ||
-			strings.HasSuffix(fi.Name(), "_test.go") ||
-			strings.HasPrefix(dirname, "/test/") {
-			return
-		}
-	default:
-		// No indexing turned on.
-		return
-	}
-
-	x.fsOpenGate <- true
-	defer func() { <-x.fsOpenGate }()
-
-	// open file
-	f, err := x.c.fs.Open(filename)
-	if err != nil {
-		return
-	}
-
-	x.mu.Lock()
-	defer x.mu.Unlock()
-
-	x.throttle.Throttle()
-
-	x.curPkgExports = make(map[string]SpotKind)
-	file, fast := x.addFile(f, filename, goFile)
-	if file == nil {
-		return // addFile failed
-	}
-
-	if fast != nil {
-		x.indexGoFile(dirname, fi.Name(), file, fast)
-	}
-
-	// update statistics
-	x.stats.Bytes += file.Size()
-	x.stats.Files++
-	x.stats.Lines += file.LineCount()
-}
-
-// indexOptions contains information that affects the contents of an index.
-type indexOptions struct {
-	// Docs provides documentation search results.
-	// It is only consulted if IndexEnabled is true.
-	// The default values is true.
-	Docs bool
-
-	// GoCode provides Go source code search results.
-	// It is only consulted if IndexEnabled is true.
-	// The default values is true.
-	GoCode bool
-
-	// FullText provides search results from all files.
-	// It is only consulted if IndexEnabled is true.
-	// The default values is true.
-	FullText bool
-
-	// MaxResults optionally specifies the maximum results for indexing.
-	// The default is 1000.
-	MaxResults int
-}
-
-// ----------------------------------------------------------------------------
-// Index
-
-type LookupResult struct {
-	Decls  HitList // package-level declarations (with snippets)
-	Others HitList // all other occurrences
-}
-
-type Index struct {
-	fset        *token.FileSet           // file set used during indexing; nil if no textindex
-	suffixes    *suffixarray.Index       // suffixes for concatenated sources; nil if no textindex
-	words       map[string]*LookupResult // maps words to hit lists
-	alts        map[string]*AltWords     // maps canonical(words) to lists of alternative spellings
-	snippets    []*Snippet               // all snippets, indexed by snippet index
-	stats       Statistics
-	importCount map[string]int                 // package path ("net/http") => count
-	packagePath map[string]map[string]bool     // "template" => "text/template" => true
-	exports     map[string]map[string]SpotKind // "net/http" => "ListenAndServe" => FuncDecl
-	idents      map[SpotKind]map[string][]Ident
-	opts        indexOptions
-}
-
-func canonical(w string) string { return strings.ToLower(w) }
-
-// Somewhat arbitrary, but I figure low enough to not hurt disk-based filesystems
-// consuming file descriptors, where some systems have low 256 or 512 limits.
-// Go should have a built-in way to cap fd usage under the ulimit.
-const (
-	maxOpenFiles = 200
-	maxOpenDirs  = 50
-)
-
-func (c *Corpus) throttle() float64 {
-	if c.IndexThrottle <= 0 {
-		return 0.9
-	}
-	if c.IndexThrottle > 1.0 {
-		return 1.0
-	}
-	return c.IndexThrottle
-}
-
-// NewIndex creates a new index for the .go files provided by the corpus.
-func (c *Corpus) NewIndex() *Index {
-	// initialize Indexer
-	// (use some reasonably sized maps to start)
-	x := &Indexer{
-		c:           c,
-		fset:        token.NewFileSet(),
-		fsOpenGate:  make(chan bool, maxOpenFiles),
-		strings:     make(map[string]string),
-		packages:    make(map[Pak]*Pak, 256),
-		words:       make(map[string]*IndexResult, 8192),
-		throttle:    util.NewThrottle(c.throttle(), 100*time.Millisecond), // run at least 0.1s at a time
-		importCount: make(map[string]int),
-		packagePath: make(map[string]map[string]bool),
-		exports:     make(map[string]map[string]SpotKind),
-		idents:      make(map[SpotKind]map[string][]Ident, 4),
-	}
-
-	// index all files in the directories given by dirnames
-	var wg sync.WaitGroup // outstanding ReadDir + visitFile
-	dirGate := make(chan bool, maxOpenDirs)
-	for dirname := range c.fsDirnames() {
-		if c.IndexDirectory != nil && !c.IndexDirectory(dirname) {
-			continue
-		}
-		dirGate <- true
-		wg.Add(1)
-		go func(dirname string) {
-			defer func() { <-dirGate }()
-			defer wg.Done()
-
-			list, err := c.fs.ReadDir(dirname)
-			if err != nil {
-				log.Printf("ReadDir(%q): %v; skipping directory", dirname, err)
-				return // ignore this directory
-			}
-			for _, fi := range list {
-				wg.Add(1)
-				go func(fi os.FileInfo) {
-					defer wg.Done()
-					x.visitFile(dirname, fi)
-				}(fi)
-			}
-		}(dirname)
-	}
-	wg.Wait()
-
-	if !c.IndexFullText {
-		// the file set, the current file, and the sources are
-		// not needed after indexing if no text index is built -
-		// help GC and clear them
-		x.fset = nil
-		x.sources.Reset()
-		x.current = nil // contains reference to fset!
-	}
-
-	// for each word, reduce the RunLists into a LookupResult;
-	// also collect the word with its canonical spelling in a
-	// word list for later computation of alternative spellings
-	words := make(map[string]*LookupResult)
-	var wlist RunList
-	for w, h := range x.words {
-		decls := reduce(h.Decls)
-		others := reduce(h.Others)
-		words[w] = &LookupResult{
-			Decls:  decls,
-			Others: others,
-		}
-		wlist = append(wlist, &wordPair{canonical(w), w})
-		x.throttle.Throttle()
-	}
-	x.stats.Words = len(words)
-
-	// reduce the word list {canonical(w), w} into
-	// a list of AltWords runs {canonical(w), {w}}
-	alist := wlist.reduce(lessWordPair, newAltWords)
-
-	// convert alist into a map of alternative spellings
-	alts := make(map[string]*AltWords)
-	for i := 0; i < len(alist); i++ {
-		a := alist[i].(*AltWords)
-		alts[a.Canon] = a
-	}
-
-	// create text index
-	var suffixes *suffixarray.Index
-	if c.IndexFullText {
-		suffixes = suffixarray.New(x.sources.Bytes())
-	}
-
-	// sort idents by the number of imports of their respective packages
-	for _, idMap := range x.idents {
-		for _, ir := range idMap {
-			sort.Sort(byImportCount{ir, x.importCount})
-		}
-	}
-
-	return &Index{
-		fset:        x.fset,
-		suffixes:    suffixes,
-		words:       words,
-		alts:        alts,
-		snippets:    x.snippets,
-		stats:       x.stats,
-		importCount: x.importCount,
-		packagePath: x.packagePath,
-		exports:     x.exports,
-		idents:      x.idents,
-		opts: indexOptions{
-			Docs:       x.c.IndexDocs,
-			GoCode:     x.c.IndexGoCode,
-			FullText:   x.c.IndexFullText,
-			MaxResults: x.c.MaxResults,
-		},
-	}
-}
-
-var ErrFileIndexVersion = errors.New("file index version out of date")
-
-const fileIndexVersion = 3
-
-// fileIndex is the subset of Index that's gob-encoded for use by
-// Index.Write and Index.Read.
-type fileIndex struct {
-	Version     int
-	Words       map[string]*LookupResult
-	Alts        map[string]*AltWords
-	Snippets    []*Snippet
-	Fulltext    bool
-	Stats       Statistics
-	ImportCount map[string]int
-	PackagePath map[string]map[string]bool
-	Exports     map[string]map[string]SpotKind
-	Idents      map[SpotKind]map[string][]Ident
-	Opts        indexOptions
-}
-
-func (x *fileIndex) Write(w io.Writer) error {
-	return gob.NewEncoder(w).Encode(x)
-}
-
-func (x *fileIndex) Read(r io.Reader) error {
-	return gob.NewDecoder(r).Decode(x)
-}
-
-// WriteTo writes the index x to w.
-func (x *Index) WriteTo(w io.Writer) (n int64, err error) {
-	w = countingWriter{&n, w}
-	fulltext := false
-	if x.suffixes != nil {
-		fulltext = true
-	}
-	fx := fileIndex{
-		Version:     fileIndexVersion,
-		Words:       x.words,
-		Alts:        x.alts,
-		Snippets:    x.snippets,
-		Fulltext:    fulltext,
-		Stats:       x.stats,
-		ImportCount: x.importCount,
-		PackagePath: x.packagePath,
-		Exports:     x.exports,
-		Idents:      x.idents,
-		Opts:        x.opts,
-	}
-	if err := fx.Write(w); err != nil {
-		return 0, err
-	}
-	if fulltext {
-		encode := func(x interface{}) error {
-			return gob.NewEncoder(w).Encode(x)
-		}
-		if err := x.fset.Write(encode); err != nil {
-			return 0, err
-		}
-		if err := x.suffixes.Write(w); err != nil {
-			return 0, err
-		}
-	}
-	return n, nil
-}
-
-// ReadFrom reads the index from r into x; x must not be nil.
-// If r does not also implement io.ByteReader, it will be wrapped in a bufio.Reader.
-// If the index is from an old version, the error is ErrFileIndexVersion.
-func (x *Index) ReadFrom(r io.Reader) (n int64, err error) {
-	// We use the ability to read bytes as a plausible surrogate for buffering.
-	if _, ok := r.(io.ByteReader); !ok {
-		r = bufio.NewReader(r)
-	}
-	r = countingReader{&n, r.(byteReader)}
-	var fx fileIndex
-	if err := fx.Read(r); err != nil {
-		return n, err
-	}
-	if fx.Version != fileIndexVersion {
-		return 0, ErrFileIndexVersion
-	}
-	x.words = fx.Words
-	x.alts = fx.Alts
-	x.snippets = fx.Snippets
-	x.stats = fx.Stats
-	x.importCount = fx.ImportCount
-	x.packagePath = fx.PackagePath
-	x.exports = fx.Exports
-	x.idents = fx.Idents
-	x.opts = fx.Opts
-	if fx.Fulltext {
-		x.fset = token.NewFileSet()
-		decode := func(x interface{}) error {
-			return gob.NewDecoder(r).Decode(x)
-		}
-		if err := x.fset.Read(decode); err != nil {
-			return n, err
-		}
-		x.suffixes = new(suffixarray.Index)
-		if err := x.suffixes.Read(r); err != nil {
-			return n, err
-		}
-	}
-	return n, nil
-}
-
-// Stats returns index statistics.
-func (x *Index) Stats() Statistics {
-	return x.stats
-}
-
-// ImportCount returns a map from import paths to how many times they were seen.
-func (x *Index) ImportCount() map[string]int {
-	return x.importCount
-}
-
-// PackagePath returns a map from short package name to a set
-// of full package path names that use that short package name.
-func (x *Index) PackagePath() map[string]map[string]bool {
-	return x.packagePath
-}
-
-// Exports returns a map from full package path to exported
-// symbol name to its type.
-func (x *Index) Exports() map[string]map[string]SpotKind {
-	return x.exports
-}
-
-// Idents returns a map from identifier type to exported
-// symbol name to the list of identifiers matching that name.
-func (x *Index) Idents() map[SpotKind]map[string][]Ident {
-	return x.idents
-}
-
-func (x *Index) lookupWord(w string) (match *LookupResult, alt *AltWords) {
-	match = x.words[w]
-	alt = x.alts[canonical(w)]
-	// remove current spelling from alternatives
-	// (if there is no match, the alternatives do
-	// not contain the current spelling)
-	if match != nil && alt != nil {
-		alt = alt.filter(w)
-	}
-	return
-}
-
-// isIdentifier reports whether s is a Go identifier.
-func isIdentifier(s string) bool {
-	for i, ch := range s {
-		if unicode.IsLetter(ch) || ch == '_' || i > 0 && unicode.IsDigit(ch) {
-			continue
-		}
-		return false
-	}
-	return len(s) > 0
-}
-
-// For a given query, which is either a single identifier or a qualified
-// identifier, Lookup returns a SearchResult containing packages, a LookupResult, a
-// list of alternative spellings, and identifiers, if any. Any and all results
-// may be nil.  If the query syntax is wrong, an error is reported.
-func (x *Index) Lookup(query string) (*SearchResult, error) {
-	ss := strings.Split(query, ".")
-
-	// check query syntax
-	for _, s := range ss {
-		if !isIdentifier(s) {
-			return nil, errors.New("all query parts must be identifiers")
-		}
-	}
-	rslt := &SearchResult{
-		Query:  query,
-		Idents: make(map[SpotKind][]Ident, 5),
-	}
-	// handle simple and qualified identifiers
-	switch len(ss) {
-	case 1:
-		ident := ss[0]
-		rslt.Hit, rslt.Alt = x.lookupWord(ident)
-		if rslt.Hit != nil {
-			// found a match - filter packages with same name
-			// for the list of packages called ident, if any
-			rslt.Pak = rslt.Hit.Others.filter(ident)
-		}
-		for k, v := range x.idents {
-			const rsltLimit = 50
-			ids := byImportCount{v[ident], x.importCount}
-			rslt.Idents[k] = ids.top(rsltLimit)
-		}
-
-	case 2:
-		pakname, ident := ss[0], ss[1]
-		rslt.Hit, rslt.Alt = x.lookupWord(ident)
-		if rslt.Hit != nil {
-			// found a match - filter by package name
-			// (no paks - package names are not qualified)
-			decls := rslt.Hit.Decls.filter(pakname)
-			others := rslt.Hit.Others.filter(pakname)
-			rslt.Hit = &LookupResult{decls, others}
-		}
-		for k, v := range x.idents {
-			ids := byImportCount{v[ident], x.importCount}
-			rslt.Idents[k] = ids.filter(pakname)
-		}
-
-	default:
-		return nil, errors.New("query is not a (qualified) identifier")
-	}
-
-	return rslt, nil
-}
-
-func (x *Index) Snippet(i int) *Snippet {
-	// handle illegal snippet indices gracefully
-	if 0 <= i && i < len(x.snippets) {
-		return x.snippets[i]
-	}
-	return nil
-}
-
-type positionList []struct {
-	filename string
-	line     int
-}
-
-func (list positionList) Len() int           { return len(list) }
-func (list positionList) Less(i, j int) bool { return list[i].filename < list[j].filename }
-func (list positionList) Swap(i, j int)      { list[i], list[j] = list[j], list[i] }
-
-// unique returns the list sorted and with duplicate entries removed
-func unique(list []int) []int {
-	sort.Ints(list)
-	var last int
-	i := 0
-	for _, x := range list {
-		if i == 0 || x != last {
-			last = x
-			list[i] = x
-			i++
-		}
-	}
-	return list[0:i]
-}
-
-// A FileLines value specifies a file and line numbers within that file.
-type FileLines struct {
-	Filename string
-	Lines    []int
-}
-
-// LookupRegexp returns the number of matches and the matches where a regular
-// expression r is found in the full text index. At most n matches are
-// returned (thus found <= n).
-//
-func (x *Index) LookupRegexp(r *regexp.Regexp, n int) (found int, result []FileLines) {
-	if x.suffixes == nil || n <= 0 {
-		return
-	}
-	// n > 0
-
-	var list positionList
-	// FindAllIndex may returns matches that span across file boundaries.
-	// Such matches are unlikely, buf after eliminating them we may end up
-	// with fewer than n matches. If we don't have enough at the end, redo
-	// the search with an increased value n1, but only if FindAllIndex
-	// returned all the requested matches in the first place (if it
-	// returned fewer than that there cannot be more).
-	for n1 := n; found < n; n1 += n - found {
-		found = 0
-		matches := x.suffixes.FindAllIndex(r, n1)
-		// compute files, exclude matches that span file boundaries,
-		// and map offsets to file-local offsets
-		list = make(positionList, len(matches))
-		for _, m := range matches {
-			// by construction, an offset corresponds to the Pos value
-			// for the file set - use it to get the file and line
-			p := token.Pos(m[0])
-			if file := x.fset.File(p); file != nil {
-				if base := file.Base(); base <= m[1] && m[1] <= base+file.Size() {
-					// match [m[0], m[1]) is within the file boundaries
-					list[found].filename = file.Name()
-					list[found].line = file.Line(p)
-					found++
-				}
-			}
-		}
-		if found == n || len(matches) < n1 {
-			// found all matches or there's no chance to find more
-			break
-		}
-	}
-	list = list[0:found]
-	sort.Sort(list) // sort by filename
-
-	// collect matches belonging to the same file
-	var last string
-	var lines []int
-	addLines := func() {
-		if len(lines) > 0 {
-			// remove duplicate lines
-			result = append(result, FileLines{last, unique(lines)})
-			lines = nil
-		}
-	}
-	for _, m := range list {
-		if m.filename != last {
-			addLines()
-			last = m.filename
-		}
-		lines = append(lines, m.line)
-	}
-	addLines()
-
-	return
-}
-
-// InvalidateIndex should be called whenever any of the file systems
-// under godoc's observation change so that the indexer is kicked on.
-func (c *Corpus) invalidateIndex() {
-	c.fsModified.Set(nil)
-	c.refreshMetadata()
-}
-
-// indexUpToDate() returns true if the search index is not older
-// than any of the file systems under godoc's observation.
-//
-func (c *Corpus) indexUpToDate() bool {
-	_, fsTime := c.fsModified.Get()
-	_, siTime := c.searchIndex.Get()
-	return !fsTime.After(siTime)
-}
-
-// feedDirnames feeds the directory names of all directories
-// under the file system given by root to channel c.
-//
-func (c *Corpus) feedDirnames(ch chan<- string) {
-	if dir, _ := c.fsTree.Get(); dir != nil {
-		for d := range dir.(*Directory).iter(false) {
-			ch <- d.Path
-		}
-	}
-}
-
-// fsDirnames() returns a channel sending all directory names
-// of all the file systems under godoc's observation.
-//
-func (c *Corpus) fsDirnames() <-chan string {
-	ch := make(chan string, 256) // buffered for fewer context switches
-	go func() {
-		c.feedDirnames(ch)
-		close(ch)
-	}()
-	return ch
-}
-
-// CompatibleWith reports whether the Index x is compatible with the corpus
-// indexing options set in c.
-func (x *Index) CompatibleWith(c *Corpus) bool {
-	return x.opts.Docs == c.IndexDocs &&
-		x.opts.GoCode == c.IndexGoCode &&
-		x.opts.FullText == c.IndexFullText &&
-		x.opts.MaxResults == c.MaxResults
-}
-
-func (c *Corpus) readIndex(filenames string) error {
-	matches, err := filepath.Glob(filenames)
-	if err != nil {
-		return err
-	} else if matches == nil {
-		return fmt.Errorf("no index files match %q", filenames)
-	}
-	sort.Strings(matches) // make sure files are in the right order
-	files := make([]io.Reader, 0, len(matches))
-	for _, filename := range matches {
-		f, err := os.Open(filename)
-		if err != nil {
-			return err
-		}
-		defer f.Close()
-		files = append(files, f)
-	}
-	return c.ReadIndexFrom(io.MultiReader(files...))
-}
-
-// ReadIndexFrom sets the current index from the serialized version found in r.
-func (c *Corpus) ReadIndexFrom(r io.Reader) error {
-	x := new(Index)
-	if _, err := x.ReadFrom(r); err != nil {
-		return err
-	}
-	if !x.CompatibleWith(c) {
-		return fmt.Errorf("index file options are incompatible: %v", x.opts)
-	}
-	c.searchIndex.Set(x)
-	return nil
-}
-
-func (c *Corpus) UpdateIndex() {
-	if c.Verbose {
-		log.Printf("updating index...")
-	}
-	start := time.Now()
-	index := c.NewIndex()
-	stop := time.Now()
-	c.searchIndex.Set(index)
-	if c.Verbose {
-		secs := stop.Sub(start).Seconds()
-		stats := index.Stats()
-		log.Printf("index updated (%gs, %d bytes of source, %d files, %d lines, %d unique words, %d spots)",
-			secs, stats.Bytes, stats.Files, stats.Lines, stats.Words, stats.Spots)
-	}
-	memstats := new(runtime.MemStats)
-	runtime.ReadMemStats(memstats)
-	if c.Verbose {
-		log.Printf("before GC: bytes = %d footprint = %d", memstats.HeapAlloc, memstats.Sys)
-	}
-	runtime.GC()
-	runtime.ReadMemStats(memstats)
-	if c.Verbose {
-		log.Printf("after  GC: bytes = %d footprint = %d", memstats.HeapAlloc, memstats.Sys)
-	}
-}
-
-// RunIndexer runs forever, indexing.
-func (c *Corpus) RunIndexer() {
-	// initialize the index from disk if possible
-	if c.IndexFiles != "" {
-		c.initFSTree()
-		if err := c.readIndex(c.IndexFiles); err != nil {
-			log.Printf("error reading index from file %s: %v", c.IndexFiles, err)
-		}
-		return
-	}
-
-	// Repeatedly update the package directory tree and index.
-	// TODO(bgarcia): Use fsnotify to only update when notified of a filesystem change.
-	for {
-		c.initFSTree()
-		c.UpdateIndex()
-		if c.IndexInterval < 0 {
-			return
-		}
-		delay := 5 * time.Minute // by default, reindex every 5 minutes
-		if c.IndexInterval > 0 {
-			delay = c.IndexInterval
-		}
-		time.Sleep(delay)
-	}
-}
-
-type countingWriter struct {
-	n *int64
-	w io.Writer
-}
-
-func (c countingWriter) Write(p []byte) (n int, err error) {
-	n, err = c.w.Write(p)
-	*c.n += int64(n)
-	return
-}
-
-type byteReader interface {
-	io.Reader
-	io.ByteReader
-}
-
-type countingReader struct {
-	n *int64
-	r byteReader
-}
-
-func (c countingReader) Read(p []byte) (n int, err error) {
-	n, err = c.r.Read(p)
-	*c.n += int64(n)
-	return
-}
-
-func (c countingReader) ReadByte() (b byte, err error) {
-	b, err = c.r.ReadByte()
-	*c.n += 1
-	return
-}
diff --git a/godoc/linkify.go b/godoc/linkify.go
index db30b12..0a8fb47 100644
--- a/godoc/linkify.go
+++ b/godoc/linkify.go
@@ -2,8 +2,6 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// +build !go1.8
-
 // This file implements LinkifyText which introduces
 // links for identifiers pointing to their declarations.
 // The approach does not cover all cases because godoc
diff --git a/godoc/linkify18.go b/godoc/linkify18.go
deleted file mode 100644
index 5b9a030..0000000
--- a/godoc/linkify18.go
+++ /dev/null
@@ -1,238 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.8
-
-// This file implements LinkifyText which introduces
-// links for identifiers pointing to their declarations.
-// The approach does not cover all cases because godoc
-// doesn't have complete type information, but it's
-// reasonably good for browsing.
-
-package godoc
-
-import (
-	"fmt"
-	"go/ast"
-	"go/token"
-	"io"
-	"strconv"
-)
-
-// LinkifyText HTML-escapes source text and writes it to w.
-// Identifiers that are in a "use" position (i.e., that are
-// not being declared), are wrapped with HTML links pointing
-// to the respective declaration, if possible. Comments are
-// formatted the same way as with FormatText.
-//
-func LinkifyText(w io.Writer, text []byte, n ast.Node) {
-	links := linksFor(n)
-
-	i := 0     // links index
-	prev := "" // prev HTML tag
-	linkWriter := func(w io.Writer, _ int, start bool) {
-		// end tag
-		if !start {
-			if prev != "" {
-				fmt.Fprintf(w, `</%s>`, prev)
-				prev = ""
-			}
-			return
-		}
-
-		// start tag
-		prev = ""
-		if i < len(links) {
-			switch info := links[i]; {
-			case info.path != "" && info.name == "":
-				// package path
-				fmt.Fprintf(w, `<a href="/pkg/%s/">`, info.path)
-				prev = "a"
-			case info.path != "" && info.name != "":
-				// qualified identifier
-				fmt.Fprintf(w, `<a href="/pkg/%s/#%s">`, info.path, info.name)
-				prev = "a"
-			case info.path == "" && info.name != "":
-				// local identifier
-				if info.mode == identVal {
-					fmt.Fprintf(w, `<span id="%s">`, info.name)
-					prev = "span"
-				} else if ast.IsExported(info.name) {
-					fmt.Fprintf(w, `<a href="#%s">`, info.name)
-					prev = "a"
-				}
-			}
-			i++
-		}
-	}
-
-	idents := tokenSelection(text, token.IDENT)
-	comments := tokenSelection(text, token.COMMENT)
-	FormatSelections(w, text, linkWriter, idents, selectionTag, comments)
-}
-
-// A link describes the (HTML) link information for an identifier.
-// The zero value of a link represents "no link".
-//
-type link struct {
-	mode       identMode
-	path, name string // package path, identifier name
-}
-
-// linksFor returns the list of links for the identifiers used
-// by node in the same order as they appear in the source.
-//
-func linksFor(node ast.Node) (list []link) {
-	modes := identModesFor(node)
-
-	// NOTE: We are expecting ast.Inspect to call the
-	//       callback function in source text order.
-	ast.Inspect(node, func(node ast.Node) bool {
-		switch n := node.(type) {
-		case *ast.Ident:
-			m := modes[n]
-			info := link{mode: m}
-			switch m {
-			case identUse:
-				if n.Obj == nil && predeclared[n.Name] {
-					info.path = builtinPkgPath
-				}
-				info.name = n.Name
-			case identDef:
-				// any declaration expect const or var - empty link
-			case identVal:
-				// const or var declaration
-				info.name = n.Name
-			}
-			list = append(list, info)
-			return false
-		case *ast.SelectorExpr:
-			// Detect qualified identifiers of the form pkg.ident.
-			// If anything fails we return true and collect individual
-			// identifiers instead.
-			if x, _ := n.X.(*ast.Ident); x != nil {
-				// x must be a package for a qualified identifier
-				if obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {
-					if spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {
-						// spec.Path.Value is the import path
-						if path, err := strconv.Unquote(spec.Path.Value); err == nil {
-							// Register two links, one for the package
-							// and one for the qualified identifier.
-							info := link{path: path}
-							list = append(list, info)
-							info.name = n.Sel.Name
-							list = append(list, info)
-							return false
-						}
-					}
-				}
-			}
-		}
-		return true
-	})
-
-	return
-}
-
-// The identMode describes how an identifier is "used" at its source location.
-type identMode int
-
-const (
-	identUse identMode = iota // identifier is used (must be zero value for identMode)
-	identDef                  // identifier is defined
-	identVal                  // identifier is defined in a const or var declaration
-)
-
-// identModesFor returns a map providing the identMode for each identifier used by node.
-func identModesFor(node ast.Node) map[*ast.Ident]identMode {
-	m := make(map[*ast.Ident]identMode)
-
-	ast.Inspect(node, func(node ast.Node) bool {
-		switch n := node.(type) {
-		case *ast.Field:
-			for _, n := range n.Names {
-				m[n] = identDef
-			}
-		case *ast.ImportSpec:
-			if name := n.Name; name != nil {
-				m[name] = identDef
-			}
-		case *ast.AliasSpec:
-			m[n.Name] = identVal
-		case *ast.ValueSpec:
-			for _, n := range n.Names {
-				m[n] = identVal
-			}
-		case *ast.TypeSpec:
-			m[n.Name] = identDef
-		case *ast.FuncDecl:
-			m[n.Name] = identDef
-		case *ast.AssignStmt:
-			// Short variable declarations only show up if we apply
-			// this code to all source code (as opposed to exported
-			// declarations only).
-			if n.Tok == token.DEFINE {
-				// Some of the lhs variables may be re-declared,
-				// so technically they are not defs. We don't
-				// care for now.
-				for _, x := range n.Lhs {
-					// Each lhs expression should be an
-					// ident, but we are conservative and check.
-					if n, _ := x.(*ast.Ident); n != nil {
-						m[n] = identVal
-					}
-				}
-			}
-		}
-		return true
-	})
-
-	return m
-}
-
-// The predeclared map represents the set of all predeclared identifiers.
-// TODO(gri) This information is also encoded in similar maps in go/doc,
-//           but not exported. Consider exporting an accessor and using
-//           it instead.
-var predeclared = map[string]bool{
-	"bool":       true,
-	"byte":       true,
-	"complex64":  true,
-	"complex128": true,
-	"error":      true,
-	"float32":    true,
-	"float64":    true,
-	"int":        true,
-	"int8":       true,
-	"int16":      true,
-	"int32":      true,
-	"int64":      true,
-	"rune":       true,
-	"string":     true,
-	"uint":       true,
-	"uint8":      true,
-	"uint16":     true,
-	"uint32":     true,
-	"uint64":     true,
-	"uintptr":    true,
-	"true":       true,
-	"false":      true,
-	"iota":       true,
-	"nil":        true,
-	"append":     true,
-	"cap":        true,
-	"close":      true,
-	"complex":    true,
-	"copy":       true,
-	"delete":     true,
-	"imag":       true,
-	"len":        true,
-	"make":       true,
-	"new":        true,
-	"panic":      true,
-	"print":      true,
-	"println":    true,
-	"real":       true,
-	"recover":    true,
-}
diff --git a/godoc/server.go b/godoc/server.go
index 30d008d..18f110a 100644
--- a/godoc/server.go
+++ b/godoc/server.go
@@ -2,8 +2,6 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// +build !go1.8
-
 package godoc
 
 import (
diff --git a/godoc/server18.go b/godoc/server18.go
deleted file mode 100644
index 05d824f..0000000
--- a/godoc/server18.go
+++ /dev/null
@@ -1,768 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.8
-
-package godoc
-
-import (
-	"bytes"
-	"encoding/json"
-	"fmt"
-	"go/ast"
-	"go/build"
-	"go/doc"
-	"go/token"
-	htmlpkg "html"
-	htmltemplate "html/template"
-	"io"
-	"io/ioutil"
-	"log"
-	"net/http"
-	"os"
-	pathpkg "path"
-	"path/filepath"
-	"sort"
-	"strings"
-	"text/template"
-	"time"
-
-	"golang.org/x/tools/godoc/analysis"
-	"golang.org/x/tools/godoc/util"
-	"golang.org/x/tools/godoc/vfs"
-)
-
-// handlerServer is a migration from an old godoc http Handler type.
-// This should probably merge into something else.
-type handlerServer struct {
-	p           *Presentation
-	c           *Corpus  // copy of p.Corpus
-	pattern     string   // url pattern; e.g. "/pkg/"
-	stripPrefix string   // prefix to strip from import path; e.g. "pkg/"
-	fsRoot      string   // file system root to which the pattern is mapped; e.g. "/src"
-	exclude     []string // file system paths to exclude; e.g. "/src/cmd"
-}
-
-func (s *handlerServer) registerWithMux(mux *http.ServeMux) {
-	mux.Handle(s.pattern, s)
-}
-
-// getPageInfo returns the PageInfo for a package directory abspath. If the
-// parameter genAST is set, an AST containing only the package exports is
-// computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc)
-// is extracted from the AST. If there is no corresponding package in the
-// directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub-
-// directories, PageInfo.Dirs is nil. If an error occurred, PageInfo.Err is
-// set to the respective error but the error is not logged.
-//
-func (h *handlerServer) GetPageInfo(abspath, relpath string, mode PageInfoMode, goos, goarch string) *PageInfo {
-	info := &PageInfo{Dirname: abspath}
-
-	// Restrict to the package files that would be used when building
-	// the package on this system.  This makes sure that if there are
-	// separate implementations for, say, Windows vs Unix, we don't
-	// jumble them all together.
-	// Note: If goos/goarch aren't set, the current binary's GOOS/GOARCH
-	// are used.
-	ctxt := build.Default
-	ctxt.IsAbsPath = pathpkg.IsAbs
-	ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
-		f, err := h.c.fs.ReadDir(filepath.ToSlash(dir))
-		filtered := make([]os.FileInfo, 0, len(f))
-		for _, i := range f {
-			if mode&NoFiltering != 0 || i.Name() != "internal" {
-				filtered = append(filtered, i)
-			}
-		}
-		return filtered, err
-	}
-	ctxt.OpenFile = func(name string) (r io.ReadCloser, err error) {
-		data, err := vfs.ReadFile(h.c.fs, filepath.ToSlash(name))
-		if err != nil {
-			return nil, err
-		}
-		return ioutil.NopCloser(bytes.NewReader(data)), nil
-	}
-
-	if goos != "" {
-		ctxt.GOOS = goos
-	}
-	if goarch != "" {
-		ctxt.GOARCH = goarch
-	}
-
-	pkginfo, err := ctxt.ImportDir(abspath, 0)
-	// continue if there are no Go source files; we still want the directory info
-	if _, nogo := err.(*build.NoGoError); err != nil && !nogo {
-		info.Err = err
-		return info
-	}
-
-	// collect package files
-	pkgname := pkginfo.Name
-	pkgfiles := append(pkginfo.GoFiles, pkginfo.CgoFiles...)
-	if len(pkgfiles) == 0 {
-		// Commands written in C have no .go files in the build.
-		// Instead, documentation may be found in an ignored file.
-		// The file may be ignored via an explicit +build ignore
-		// constraint (recommended), or by defining the package
-		// documentation (historic).
-		pkgname = "main" // assume package main since pkginfo.Name == ""
-		pkgfiles = pkginfo.IgnoredGoFiles
-	}
-
-	// get package information, if any
-	if len(pkgfiles) > 0 {
-		// build package AST
-		fset := token.NewFileSet()
-		files, err := h.c.parseFiles(fset, relpath, abspath, pkgfiles)
-		if err != nil {
-			info.Err = err
-			return info
-		}
-
-		// ignore any errors - they are due to unresolved identifiers
-		pkg, _ := ast.NewPackage(fset, files, poorMansImporter, nil)
-
-		// extract package documentation
-		info.FSet = fset
-		if mode&ShowSource == 0 {
-			// show extracted documentation
-			var m doc.Mode
-			if mode&NoFiltering != 0 {
-				m |= doc.AllDecls
-			}
-			if mode&AllMethods != 0 {
-				m |= doc.AllMethods
-			}
-			info.PDoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath
-			if mode&NoTypeAssoc != 0 {
-				for _, t := range info.PDoc.Types {
-					info.PDoc.Consts = append(info.PDoc.Consts, t.Consts...)
-					info.PDoc.Vars = append(info.PDoc.Vars, t.Vars...)
-					info.PDoc.Funcs = append(info.PDoc.Funcs, t.Funcs...)
-					t.Consts = nil
-					t.Vars = nil
-					t.Funcs = nil
-				}
-				// for now we cannot easily sort consts and vars since
-				// go/doc.Value doesn't export the order information
-				sort.Sort(funcsByName(info.PDoc.Funcs))
-			}
-
-			// collect examples
-			testfiles := append(pkginfo.TestGoFiles, pkginfo.XTestGoFiles...)
-			files, err = h.c.parseFiles(fset, relpath, abspath, testfiles)
-			if err != nil {
-				log.Println("parsing examples:", err)
-			}
-			info.Examples = collectExamples(h.c, pkg, files)
-
-			// collect any notes that we want to show
-			if info.PDoc.Notes != nil {
-				// could regexp.Compile only once per godoc, but probably not worth it
-				if rx := h.p.NotesRx; rx != nil {
-					for m, n := range info.PDoc.Notes {
-						if rx.MatchString(m) {
-							if info.Notes == nil {
-								info.Notes = make(map[string][]*doc.Note)
-							}
-							info.Notes[m] = n
-						}
-					}
-				}
-			}
-
-		} else {
-			// show source code
-			// TODO(gri) Consider eliminating export filtering in this mode,
-			//           or perhaps eliminating the mode altogether.
-			if mode&NoFiltering == 0 {
-				packageExports(fset, pkg)
-			}
-			info.PAst = files
-		}
-		info.IsMain = pkgname == "main"
-	}
-
-	// get directory information, if any
-	var dir *Directory
-	var timestamp time.Time
-	if tree, ts := h.c.fsTree.Get(); tree != nil && tree.(*Directory) != nil {
-		// directory tree is present; lookup respective directory
-		// (may still fail if the file system was updated and the
-		// new directory tree has not yet been computed)
-		dir = tree.(*Directory).lookup(abspath)
-		timestamp = ts
-	}
-	if dir == nil {
-		// no directory tree present (too early after startup or
-		// command-line mode); compute one level for this page
-		// note: cannot use path filter here because in general
-		//       it doesn't contain the FSTree path
-		dir = h.c.newDirectory(abspath, 1)
-		timestamp = time.Now()
-	}
-	info.Dirs = dir.listing(true, func(path string) bool { return h.includePath(path, mode) })
-	info.DirTime = timestamp
-	info.DirFlat = mode&FlatDir != 0
-
-	return info
-}
-
-func (h *handlerServer) includePath(path string, mode PageInfoMode) (r bool) {
-	// if the path is under one of the exclusion paths, don't list.
-	for _, e := range h.exclude {
-		if strings.HasPrefix(path, e) {
-			return false
-		}
-	}
-
-	// if the path includes 'internal', don't list unless we are in the NoFiltering mode.
-	if mode&NoFiltering != 0 {
-		return true
-	}
-	if strings.Contains(path, "internal") || strings.Contains(path, "vendor") {
-		for _, c := range strings.Split(filepath.Clean(path), string(os.PathSeparator)) {
-			if c == "internal" || c == "vendor" {
-				return false
-			}
-		}
-	}
-	return true
-}
-
-type funcsByName []*doc.Func
-
-func (s funcsByName) Len() int           { return len(s) }
-func (s funcsByName) Swap(i, j int)      { s[i], s[j] = s[j], s[i] }
-func (s funcsByName) Less(i, j int) bool { return s[i].Name < s[j].Name }
-
-func (h *handlerServer) ServeHTTP(w http.ResponseWriter, r *http.Request) {
-	if redirect(w, r) {
-		return
-	}
-
-	relpath := pathpkg.Clean(r.URL.Path[len(h.stripPrefix)+1:])
-	abspath := pathpkg.Join(h.fsRoot, relpath)
-	mode := h.p.GetPageInfoMode(r)
-	if relpath == builtinPkgPath {
-		mode = NoFiltering | NoTypeAssoc
-	}
-	info := h.GetPageInfo(abspath, relpath, mode, r.FormValue("GOOS"), r.FormValue("GOARCH"))
-	if info.Err != nil {
-		log.Print(info.Err)
-		h.p.ServeError(w, r, relpath, info.Err)
-		return
-	}
-
-	if mode&NoHTML != 0 {
-		h.p.ServeText(w, applyTemplate(h.p.PackageText, "packageText", info))
-		return
-	}
-
-	var tabtitle, title, subtitle string
-	switch {
-	case info.PAst != nil:
-		for _, ast := range info.PAst {
-			tabtitle = ast.Name.Name
-			break
-		}
-	case info.PDoc != nil:
-		tabtitle = info.PDoc.Name
-	default:
-		tabtitle = info.Dirname
-		title = "Directory "
-		if h.p.ShowTimestamps {
-			subtitle = "Last update: " + info.DirTime.String()
-		}
-	}
-	if title == "" {
-		if info.IsMain {
-			// assume that the directory name is the command name
-			_, tabtitle = pathpkg.Split(relpath)
-			title = "Command "
-		} else {
-			title = "Package "
-		}
-	}
-	title += tabtitle
-
-	// special cases for top-level package/command directories
-	switch tabtitle {
-	case "/src":
-		title = "Packages"
-		tabtitle = "Packages"
-	case "/src/cmd":
-		title = "Commands"
-		tabtitle = "Commands"
-	}
-
-	// Emit JSON array for type information.
-	pi := h.c.Analysis.PackageInfo(relpath)
-	info.CallGraphIndex = pi.CallGraphIndex
-	info.CallGraph = htmltemplate.JS(marshalJSON(pi.CallGraph))
-	info.AnalysisData = htmltemplate.JS(marshalJSON(pi.Types))
-	info.TypeInfoIndex = make(map[string]int)
-	for i, ti := range pi.Types {
-		info.TypeInfoIndex[ti.Name] = i
-	}
-
-	info.Share = allowShare(r)
-	h.p.ServePage(w, Page{
-		Title:    title,
-		Tabtitle: tabtitle,
-		Subtitle: subtitle,
-		Body:     applyTemplate(h.p.PackageHTML, "packageHTML", info),
-		Share:    info.Share,
-	})
-}
-
-type PageInfoMode uint
-
-const (
-	NoFiltering PageInfoMode = 1 << iota // do not filter exports
-	AllMethods                           // show all embedded methods
-	ShowSource                           // show source code, do not extract documentation
-	NoHTML                               // show result in textual form, do not generate HTML
-	FlatDir                              // show directory in a flat (non-indented) manner
-	NoTypeAssoc                          // don't associate consts, vars, and factory functions with types
-)
-
-// modeNames defines names for each PageInfoMode flag.
-var modeNames = map[string]PageInfoMode{
-	"all":     NoFiltering,
-	"methods": AllMethods,
-	"src":     ShowSource,
-	"text":    NoHTML,
-	"flat":    FlatDir,
-}
-
-// GetPageInfoMode computes the PageInfoMode flags by analyzing the request
-// URL form value "m". It is value is a comma-separated list of mode names
-// as defined by modeNames (e.g.: m=src,text).
-func (p *Presentation) GetPageInfoMode(r *http.Request) PageInfoMode {
-	var mode PageInfoMode
-	for _, k := range strings.Split(r.FormValue("m"), ",") {
-		if m, found := modeNames[strings.TrimSpace(k)]; found {
-			mode |= m
-		}
-	}
-	if p.AdjustPageInfoMode != nil {
-		mode = p.AdjustPageInfoMode(r, mode)
-	}
-	return mode
-}
-
-// poorMansImporter returns a (dummy) package object named
-// by the last path component of the provided package path
-// (as is the convention for packages). This is sufficient
-// to resolve package identifiers without doing an actual
-// import. It never returns an error.
-//
-func poorMansImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) {
-	pkg := imports[path]
-	if pkg == nil {
-		// note that strings.LastIndex returns -1 if there is no "/"
-		pkg = ast.NewObj(ast.Pkg, path[strings.LastIndex(path, "/")+1:])
-		pkg.Data = ast.NewScope(nil) // required by ast.NewPackage for dot-import
-		imports[path] = pkg
-	}
-	return pkg, nil
-}
-
-// globalNames returns a set of the names declared by all package-level
-// declarations. Method names are returned in the form Receiver_Method.
-func globalNames(pkg *ast.Package) map[string]bool {
-	names := make(map[string]bool)
-	for _, file := range pkg.Files {
-		for _, decl := range file.Decls {
-			addNames(names, decl)
-		}
-	}
-	return names
-}
-
-// collectExamples collects examples for pkg from testfiles.
-func collectExamples(c *Corpus, pkg *ast.Package, testfiles map[string]*ast.File) []*doc.Example {
-	var files []*ast.File
-	for _, f := range testfiles {
-		files = append(files, f)
-	}
-
-	var examples []*doc.Example
-	globals := globalNames(pkg)
-	for _, e := range doc.Examples(files...) {
-		name := stripExampleSuffix(e.Name)
-		if name == "" || globals[name] {
-			examples = append(examples, e)
-		} else if c.Verbose {
-			log.Printf("skipping example 'Example%s' because '%s' is not a known function or type", e.Name, e.Name)
-		}
-	}
-
-	return examples
-}
-
-// addNames adds the names declared by decl to the names set.
-// Method names are added in the form ReceiverTypeName_Method.
-func addNames(names map[string]bool, decl ast.Decl) {
-	switch d := decl.(type) {
-	case *ast.FuncDecl:
-		name := d.Name.Name
-		if d.Recv != nil {
-			var typeName string
-			switch r := d.Recv.List[0].Type.(type) {
-			case *ast.StarExpr:
-				typeName = r.X.(*ast.Ident).Name
-			case *ast.Ident:
-				typeName = r.Name
-			}
-			name = typeName + "_" + name
-		}
-		names[name] = true
-	case *ast.GenDecl:
-		for _, spec := range d.Specs {
-			switch s := spec.(type) {
-			case *ast.TypeSpec:
-				names[s.Name.Name] = true
-			case *ast.AliasSpec:
-				names[s.Name.Name] = true
-			case *ast.ValueSpec:
-				for _, id := range s.Names {
-					names[id.Name] = true
-				}
-			}
-		}
-	}
-}
-
-// packageExports is a local implementation of ast.PackageExports
-// which correctly updates each package file's comment list.
-// (The ast.PackageExports signature is frozen, hence the local
-// implementation).
-//
-func packageExports(fset *token.FileSet, pkg *ast.Package) {
-	for _, src := range pkg.Files {
-		cmap := ast.NewCommentMap(fset, src, src.Comments)
-		ast.FileExports(src)
-		src.Comments = cmap.Filter(src).Comments()
-	}
-}
-
-func applyTemplate(t *template.Template, name string, data interface{}) []byte {
-	var buf bytes.Buffer
-	if err := t.Execute(&buf, data); err != nil {
-		log.Printf("%s.Execute: %s", name, err)
-	}
-	return buf.Bytes()
-}
-
-type writerCapturesErr struct {
-	w   io.Writer
-	err error
-}
-
-func (w *writerCapturesErr) Write(p []byte) (int, error) {
-	n, err := w.w.Write(p)
-	if err != nil {
-		w.err = err
-	}
-	return n, err
-}
-
-// applyTemplateToResponseWriter uses an http.ResponseWriter as the io.Writer
-// for the call to template.Execute.  It uses an io.Writer wrapper to capture
-// errors from the underlying http.ResponseWriter.  Errors are logged only when
-// they come from the template processing and not the Writer; this avoid
-// polluting log files with error messages due to networking issues, such as
-// client disconnects and http HEAD protocol violations.
-func applyTemplateToResponseWriter(rw http.ResponseWriter, t *template.Template, data interface{}) {
-	w := &writerCapturesErr{w: rw}
-	err := t.Execute(w, data)
-	// There are some cases where template.Execute does not return an error when
-	// rw returns an error, and some where it does.  So check w.err first.
-	if w.err == nil && err != nil {
-		// Log template errors.
-		log.Printf("%s.Execute: %s", t.Name(), err)
-	}
-}
-
-func redirect(w http.ResponseWriter, r *http.Request) (redirected bool) {
-	canonical := pathpkg.Clean(r.URL.Path)
-	if !strings.HasSuffix(canonical, "/") {
-		canonical += "/"
-	}
-	if r.URL.Path != canonical {
-		url := *r.URL
-		url.Path = canonical
-		http.Redirect(w, r, url.String(), http.StatusMovedPermanently)
-		redirected = true
-	}
-	return
-}
-
-func redirectFile(w http.ResponseWriter, r *http.Request) (redirected bool) {
-	c := pathpkg.Clean(r.URL.Path)
-	c = strings.TrimRight(c, "/")
-	if r.URL.Path != c {
-		url := *r.URL
-		url.Path = c
-		http.Redirect(w, r, url.String(), http.StatusMovedPermanently)
-		redirected = true
-	}
-	return
-}
-
-func (p *Presentation) serveTextFile(w http.ResponseWriter, r *http.Request, abspath, relpath, title string) {
-	src, err := vfs.ReadFile(p.Corpus.fs, abspath)
-	if err != nil {
-		log.Printf("ReadFile: %s", err)
-		p.ServeError(w, r, relpath, err)
-		return
-	}
-
-	if r.FormValue("m") == "text" {
-		p.ServeText(w, src)
-		return
-	}
-
-	h := r.FormValue("h")
-	s := RangeSelection(r.FormValue("s"))
-
-	var buf bytes.Buffer
-	if pathpkg.Ext(abspath) == ".go" {
-		// Find markup links for this file (e.g. "/src/fmt/print.go").
-		fi := p.Corpus.Analysis.FileInfo(abspath)
-		buf.WriteString("<script type='text/javascript'>document.ANALYSIS_DATA = ")
-		buf.Write(marshalJSON(fi.Data))
-		buf.WriteString(";</script>\n")
-
-		if status := p.Corpus.Analysis.Status(); status != "" {
-			buf.WriteString("<a href='/lib/godoc/analysis/help.html'>Static analysis features</a> ")
-			// TODO(adonovan): show analysis status at per-file granularity.
-			fmt.Fprintf(&buf, "<span style='color: grey'>[%s]</span><br/>", htmlpkg.EscapeString(status))
-		}
-
-		buf.WriteString("<pre>")
-		formatGoSource(&buf, src, fi.Links, h, s)
-		buf.WriteString("</pre>")
-	} else {
-		buf.WriteString("<pre>")
-		FormatText(&buf, src, 1, false, h, s)
-		buf.WriteString("</pre>")
-	}
-	fmt.Fprintf(&buf, `<p><a href="/%s?m=text">View as plain text</a></p>`, htmlpkg.EscapeString(relpath))
-
-	p.ServePage(w, Page{
-		Title:    title + " " + relpath,
-		Tabtitle: relpath,
-		Body:     buf.Bytes(),
-		Share:    allowShare(r),
-	})
-}
-
-// formatGoSource HTML-escapes Go source text and writes it to w,
-// decorating it with the specified analysis links.
-//
-func formatGoSource(buf *bytes.Buffer, text []byte, links []analysis.Link, pattern string, selection Selection) {
-	// Emit to a temp buffer so that we can add line anchors at the end.
-	saved, buf := buf, new(bytes.Buffer)
-
-	var i int
-	var link analysis.Link // shared state of the two funcs below
-	segmentIter := func() (seg Segment) {
-		if i < len(links) {
-			link = links[i]
-			i++
-			seg = Segment{link.Start(), link.End()}
-		}
-		return
-	}
-	linkWriter := func(w io.Writer, offs int, start bool) {
-		link.Write(w, offs, start)
-	}
-
-	comments := tokenSelection(text, token.COMMENT)
-	var highlights Selection
-	if pattern != "" {
-		highlights = regexpSelection(text, pattern)
-	}
-
-	FormatSelections(buf, text, linkWriter, segmentIter, selectionTag, comments, highlights, selection)
-
-	// Now copy buf to saved, adding line anchors.
-
-	// The lineSelection mechanism can't be composed with our
-	// linkWriter, so we have to add line spans as another pass.
-	n := 1
-	for _, line := range bytes.Split(buf.Bytes(), []byte("\n")) {
-		fmt.Fprintf(saved, "<span id=\"L%d\" class=\"ln\">%6d</span>\t", n, n)
-		n++
-		saved.Write(line)
-		saved.WriteByte('\n')
-	}
-}
-
-func (p *Presentation) serveDirectory(w http.ResponseWriter, r *http.Request, abspath, relpath string) {
-	if redirect(w, r) {
-		return
-	}
-
-	list, err := p.Corpus.fs.ReadDir(abspath)
-	if err != nil {
-		p.ServeError(w, r, relpath, err)
-		return
-	}
-
-	p.ServePage(w, Page{
-		Title:    "Directory " + relpath,
-		Tabtitle: relpath,
-		Body:     applyTemplate(p.DirlistHTML, "dirlistHTML", list),
-		Share:    allowShare(r),
-	})
-}
-
-func (p *Presentation) ServeHTMLDoc(w http.ResponseWriter, r *http.Request, abspath, relpath string) {
-	// get HTML body contents
-	src, err := vfs.ReadFile(p.Corpus.fs, abspath)
-	if err != nil {
-		log.Printf("ReadFile: %s", err)
-		p.ServeError(w, r, relpath, err)
-		return
-	}
-
-	// if it begins with "<!DOCTYPE " assume it is standalone
-	// html that doesn't need the template wrapping.
-	if bytes.HasPrefix(src, doctype) {
-		w.Write(src)
-		return
-	}
-
-	// if it begins with a JSON blob, read in the metadata.
-	meta, src, err := extractMetadata(src)
-	if err != nil {
-		log.Printf("decoding metadata %s: %v", relpath, err)
-	}
-
-	page := Page{
-		Title:    meta.Title,
-		Subtitle: meta.Subtitle,
-		Share:    allowShare(r),
-	}
-
-	// evaluate as template if indicated
-	if meta.Template {
-		tmpl, err := template.New("main").Funcs(p.TemplateFuncs()).Parse(string(src))
-		if err != nil {
-			log.Printf("parsing template %s: %v", relpath, err)
-			p.ServeError(w, r, relpath, err)
-			return
-		}
-		var buf bytes.Buffer
-		if err := tmpl.Execute(&buf, page); err != nil {
-			log.Printf("executing template %s: %v", relpath, err)
-			p.ServeError(w, r, relpath, err)
-			return
-		}
-		src = buf.Bytes()
-	}
-
-	// if it's the language spec, add tags to EBNF productions
-	if strings.HasSuffix(abspath, "go_spec.html") {
-		var buf bytes.Buffer
-		Linkify(&buf, src)
-		src = buf.Bytes()
-	}
-
-	page.Body = src
-	p.ServePage(w, page)
-}
-
-func (p *Presentation) ServeFile(w http.ResponseWriter, r *http.Request) {
-	p.serveFile(w, r)
-}
-
-func (p *Presentation) serveFile(w http.ResponseWriter, r *http.Request) {
-	relpath := r.URL.Path
-
-	// Check to see if we need to redirect or serve another file.
-	if m := p.Corpus.MetadataFor(relpath); m != nil {
-		if m.Path != relpath {
-			// Redirect to canonical path.
-			http.Redirect(w, r, m.Path, http.StatusMovedPermanently)
-			return
-		}
-		// Serve from the actual filesystem path.
-		relpath = m.filePath
-	}
-
-	abspath := relpath
-	relpath = relpath[1:] // strip leading slash
-
-	switch pathpkg.Ext(relpath) {
-	case ".html":
-		if strings.HasSuffix(relpath, "/index.html") {
-			// We'll show index.html for the directory.
-			// Use the dir/ version as canonical instead of dir/index.html.
-			http.Redirect(w, r, r.URL.Path[0:len(r.URL.Path)-len("index.html")], http.StatusMovedPermanently)
-			return
-		}
-		p.ServeHTMLDoc(w, r, abspath, relpath)
-		return
-
-	case ".go":
-		p.serveTextFile(w, r, abspath, relpath, "Source file")
-		return
-	}
-
-	dir, err := p.Corpus.fs.Lstat(abspath)
-	if err != nil {
-		log.Print(err)
-		p.ServeError(w, r, relpath, err)
-		return
-	}
-
-	if dir != nil && dir.IsDir() {
-		if redirect(w, r) {
-			return
-		}
-		if index := pathpkg.Join(abspath, "index.html"); util.IsTextFile(p.Corpus.fs, index) {
-			p.ServeHTMLDoc(w, r, index, index)
-			return
-		}
-		p.serveDirectory(w, r, abspath, relpath)
-		return
-	}
-
-	if util.IsTextFile(p.Corpus.fs, abspath) {
-		if redirectFile(w, r) {
-			return
-		}
-		p.serveTextFile(w, r, abspath, relpath, "Text file")
-		return
-	}
-
-	p.fileServer.ServeHTTP(w, r)
-}
-
-func (p *Presentation) ServeText(w http.ResponseWriter, text []byte) {
-	w.Header().Set("Content-Type", "text/plain; charset=utf-8")
-	w.Write(text)
-}
-
-func marshalJSON(x interface{}) []byte {
-	var data []byte
-	var err error
-	const indentJSON = false // for easier debugging
-	if indentJSON {
-		data, err = json.MarshalIndent(x, "", "    ")
-	} else {
-		data, err = json.Marshal(x)
-	}
-	if err != nil {
-		panic(fmt.Sprintf("json.Marshal failed: %s", err))
-	}
-	return data
-}
diff --git a/godoc/snippet.go b/godoc/snippet.go
index 491534c..dd9c822 100644
--- a/godoc/snippet.go
+++ b/godoc/snippet.go
@@ -2,8 +2,6 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// +build !go1.8
-
 // This file contains the infrastructure to create a code
 // snippet for search results.
 //
diff --git a/godoc/snippet18.go b/godoc/snippet18.go
deleted file mode 100644
index 96c92a2..0000000
--- a/godoc/snippet18.go
+++ /dev/null
@@ -1,129 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.8
-
-// This file contains the infrastructure to create a code
-// snippet for search results.
-//
-// Note: At the moment, this only creates HTML snippets.
-
-package godoc
-
-import (
-	"bytes"
-	"fmt"
-	"go/ast"
-	"go/token"
-)
-
-type Snippet struct {
-	Line int
-	Text string // HTML-escaped
-}
-
-func (p *Presentation) newSnippet(fset *token.FileSet, decl ast.Decl, id *ast.Ident) *Snippet {
-	// TODO instead of pretty-printing the node, should use the original source instead
-	var buf1 bytes.Buffer
-	p.writeNode(&buf1, fset, decl)
-	// wrap text with <pre> tag
-	var buf2 bytes.Buffer
-	buf2.WriteString("<pre>")
-	FormatText(&buf2, buf1.Bytes(), -1, true, id.Name, nil)
-	buf2.WriteString("</pre>")
-	return &Snippet{fset.Position(id.Pos()).Line, buf2.String()}
-}
-
-func findSpec(list []ast.Spec, id *ast.Ident) ast.Spec {
-	for _, spec := range list {
-		switch s := spec.(type) {
-		case *ast.ImportSpec:
-			if s.Name == id {
-				return s
-			}
-		case *ast.AliasSpec:
-			if s.Name == id {
-				return s
-			}
-		case *ast.ValueSpec:
-			for _, n := range s.Names {
-				if n == id {
-					return s
-				}
-			}
-		case *ast.TypeSpec:
-			if s.Name == id {
-				return s
-			}
-		}
-	}
-	return nil
-}
-
-func (p *Presentation) genSnippet(fset *token.FileSet, d *ast.GenDecl, id *ast.Ident) *Snippet {
-	s := findSpec(d.Specs, id)
-	if s == nil {
-		return nil //  declaration doesn't contain id - exit gracefully
-	}
-
-	// only use the spec containing the id for the snippet
-	dd := &ast.GenDecl{
-		Doc:    d.Doc,
-		TokPos: d.Pos(),
-		Tok:    d.Tok,
-		Lparen: d.Lparen,
-		Specs:  []ast.Spec{s},
-		Rparen: d.Rparen,
-	}
-
-	return p.newSnippet(fset, dd, id)
-}
-
-func (p *Presentation) funcSnippet(fset *token.FileSet, d *ast.FuncDecl, id *ast.Ident) *Snippet {
-	if d.Name != id {
-		return nil //  declaration doesn't contain id - exit gracefully
-	}
-
-	// only use the function signature for the snippet
-	dd := &ast.FuncDecl{
-		Doc:  d.Doc,
-		Recv: d.Recv,
-		Name: d.Name,
-		Type: d.Type,
-	}
-
-	return p.newSnippet(fset, dd, id)
-}
-
-// NewSnippet creates a text snippet from a declaration decl containing an
-// identifier id. Parts of the declaration not containing the identifier
-// may be removed for a more compact snippet.
-func NewSnippet(fset *token.FileSet, decl ast.Decl, id *ast.Ident) *Snippet {
-	// TODO(bradfitz, adg): remove this function.  But it's used by indexer, which
-	// doesn't have a *Presentation, and NewSnippet needs a TabWidth.
-	var p Presentation
-	p.TabWidth = 4
-	return p.NewSnippet(fset, decl, id)
-}
-
-// NewSnippet creates a text snippet from a declaration decl containing an
-// identifier id. Parts of the declaration not containing the identifier
-// may be removed for a more compact snippet.
-func (p *Presentation) NewSnippet(fset *token.FileSet, decl ast.Decl, id *ast.Ident) *Snippet {
-	var s *Snippet
-	switch d := decl.(type) {
-	case *ast.GenDecl:
-		s = p.genSnippet(fset, d, id)
-	case *ast.FuncDecl:
-		s = p.funcSnippet(fset, d, id)
-	}
-
-	// handle failure gracefully
-	if s == nil {
-		var buf bytes.Buffer
-		fmt.Fprintf(&buf, `<span class="alert">could not generate a snippet for <span class="highlight">%s</span></span>`, id.Name)
-		s = &Snippet{fset.Position(id.Pos()).Line, buf.String()}
-	}
-	return s
-}