Import talksapp
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..80b1c2f
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,41 @@
+FROM golang:latest
+
+# Install redis, nginx, daemontools, etc.
+RUN echo deb http://http.debian.net/debian wheezy-backports main > /etc/apt/sources.list.d/backports.list && \
+	apt-get update && \
+	apt-get install -y --no-install-recommends -t wheezy-backports redis-server && \
+	apt-get install -y --no-install-recommends graphviz nginx-full daemontools unzip
+
+# Configure redis.
+ADD deploy/redis.conf /etc/redis/redis.conf
+
+# Configure nginx.
+RUN echo "daemon off;" >> /etc/nginx/nginx.conf && \
+	rm /etc/nginx/sites-enabled/default
+ADD deploy/gddo.conf /etc/nginx/sites-enabled/gddo.conf
+
+# Configure daemontools services.
+ADD deploy/services /services
+
+# Manually fetch and install gddo-server dependencies (faster than "go get").
+ADD https://github.com/garyburd/redigo/archive/779af66db5668074a96f522d9025cb0a5ef50d89.zip /x/redigo.zip
+ADD https://snappy-go.googlecode.com/archive/12e4b4183793ac4b061921e7980845e750679fd0.tar.gz /x/snappy-go.tar.gz
+RUN unzip /x/redigo.zip -d /x && tar xzvf /x/snappy-go.tar.gz -C /x && \
+	mkdir -p /go/src/github.com/garyburd && \
+	mkdir -p /go/src/code.google.com/p && \
+	mv /x/redigo-* /go/src/github.com/garyburd/redigo && \
+	mv /x/snappy-go-* /go/src/code.google.com/p/snappy-go && \
+	rm -rf /x
+
+# Build the local gddo files.
+ADD . /go/src/github.com/golang/gddo
+RUN go install github.com/golang/gddo/gddo-server
+
+# Exposed ports and volumes.
+# /ssl should contain SSL certs.
+# /data should contain the Redis database, "dump.rdb".
+EXPOSE 80 443
+VOLUME ["/ssl", "/data"]
+
+# How to start it all.
+CMD svscan /services
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..65d761b
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2013 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+   * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+   * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+   * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/README.markdown b/README.markdown
new file mode 100644
index 0000000..6f95b01
--- /dev/null
+++ b/README.markdown
@@ -0,0 +1,25 @@
+This project is the source for http://godoc.org/
+
+[![GoDoc](https://godoc.org/github.com/golang/gddo?status.svg)](http://godoc.org/github.com/golang/gddo)
+
+The code in this project is designed to be used by godoc.org. Send mail to
+golang-dev@googlegroups.com if you want to discuss other uses of the code.
+
+Feedback
+--------
+
+Send ideas and questions to golang-dev@googlegroups.com. Request features and report bugs
+using the [GitHub Issue Tracker](https://github.com/golang/gddo/issues/new). 
+
+
+Contributions
+-------------
+Contributions to this project are welcome, though please send mail before
+starting work on anything major. Contributors retain their copyright, so we
+need you to fill out a short form before we can accept your contribution:
+https://developers.google.com/open-source/cla/individual
+
+More Documentation
+------------------
+
+More documentation about this project is available on the [wiki](https://github.com/golang/gddo/wiki).
diff --git a/database/database.go b/database/database.go
new file mode 100644
index 0000000..ceb034c
--- /dev/null
+++ b/database/database.go
@@ -0,0 +1,1083 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Redis keys and types:
+//
+// maxPackageId string: next id to assign
+// ids hash maps import path to package id
+// pkg:<id> hash
+//      terms: space separated search terms
+//      path: import path
+//      synopsis: synopsis
+//      gob: snappy compressed gob encoded doc.Package
+//      score: document search score
+//      etag:
+//      kind: p=package, c=command, d=directory with no go files
+// index:<term> set: package ids for given search term
+// index:import:<path> set: packages with import path
+// index:project:<root> set: packages in project with root
+// block set: packages to block
+// popular zset: package id, score
+// popular:0 string: scaled base time for popular scores
+// nextCrawl zset: package id, Unix time for next crawl
+// newCrawl set: new paths to crawl
+// badCrawl set: paths that returned error when crawling.
+
+// Package database manages storage for GoPkgDoc.
+package database
+
+import (
+	"bytes"
+	"encoding/gob"
+	"errors"
+	"flag"
+	"fmt"
+	"log"
+	"math"
+	"net/url"
+	"os"
+	"path"
+	"sort"
+	"strconv"
+	"strings"
+	"time"
+
+	"github.com/garyburd/redigo/redis"
+	"github.com/golang/gddo/doc"
+	"github.com/golang/gddo/gosrc"
+	"github.com/golang/snappy/snappy"
+)
+
+type Database struct {
+	Pool interface {
+		Get() redis.Conn
+	}
+}
+
+type Package struct {
+	Path     string `json:"path"`
+	Synopsis string `json:"synopsis,omitempty"`
+}
+
+type byPath []Package
+
+func (p byPath) Len() int           { return len(p) }
+func (p byPath) Less(i, j int) bool { return p[i].Path < p[j].Path }
+func (p byPath) Swap(i, j int)      { p[i], p[j] = p[j], p[i] }
+
+var (
+	redisServer      = flag.String("db-server", "redis://127.0.0.1:6379", "URI of Redis server.")
+	redisIdleTimeout = flag.Duration("db-idle-timeout", 250*time.Second, "Close Redis connections after remaining idle for this duration.")
+	redisLog         = flag.Bool("db-log", false, "Log database commands")
+)
+
+func dialDb() (c redis.Conn, err error) {
+	u, err := url.Parse(*redisServer)
+	if err != nil {
+		return nil, err
+	}
+
+	defer func() {
+		if err != nil && c != nil {
+			c.Close()
+		}
+	}()
+
+	c, err = redis.Dial("tcp", u.Host)
+	if err != nil {
+		return
+	}
+
+	if *redisLog {
+		l := log.New(os.Stderr, "", log.LstdFlags)
+		c = redis.NewLoggingConn(c, l, "")
+	}
+
+	if u.User != nil {
+		if pw, ok := u.User.Password(); ok {
+			if _, err = c.Do("AUTH", pw); err != nil {
+				return
+			}
+		}
+	}
+	return
+}
+
+// New creates a database configured from command line flags.
+func New() (*Database, error) {
+	pool := &redis.Pool{
+		Dial:        dialDb,
+		MaxIdle:     10,
+		IdleTimeout: *redisIdleTimeout,
+	}
+
+	if c := pool.Get(); c.Err() != nil {
+		return nil, c.Err()
+	} else {
+		c.Close()
+	}
+
+	return &Database{Pool: pool}, nil
+}
+
+// Exists returns true if package with import path exists in the database.
+func (db *Database) Exists(path string) (bool, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+	return redis.Bool(c.Do("HEXISTS", "ids", path))
+}
+
+var putScript = redis.NewScript(0, `
+    local path = ARGV[1]
+    local synopsis = ARGV[2]
+    local score = ARGV[3]
+    local gob = ARGV[4]
+    local terms = ARGV[5]
+    local etag = ARGV[6]
+    local kind = ARGV[7]
+    local nextCrawl = ARGV[8]
+
+    local id = redis.call('HGET', 'ids', path)
+    if not id then
+        id = redis.call('INCR', 'maxPackageId')
+        redis.call('HSET', 'ids', path, id)
+    end
+
+    if etag ~= '' and etag == redis.call('HGET', 'pkg:' .. id, 'clone') then
+        terms = ''
+        score = 0
+    end
+
+    local update = {}
+    for term in string.gmatch(redis.call('HGET', 'pkg:' .. id, 'terms') or '', '([^ ]+)') do
+        update[term] = 1
+    end
+
+    for term in string.gmatch(terms, '([^ ]+)') do
+        update[term] = (update[term] or 0) + 2
+    end
+
+    for term, x in pairs(update) do
+        if x == 1 then
+            redis.call('SREM', 'index:' .. term, id)
+        elseif x == 2 then 
+            redis.call('SADD', 'index:' .. term, id)
+        end
+    end
+
+    redis.call('SREM', 'badCrawl', path)
+    redis.call('SREM', 'newCrawl', path)
+
+    if nextCrawl ~= '0' then
+        redis.call('ZADD', 'nextCrawl', nextCrawl, id)
+        redis.call('HSET', 'pkg:' .. id, 'crawl', nextCrawl)
+    end
+
+    return redis.call('HMSET', 'pkg:' .. id, 'path', path, 'synopsis', synopsis, 'score', score, 'gob', gob, 'terms', terms, 'etag', etag, 'kind', kind)
+`)
+
+var addCrawlScript = redis.NewScript(0, `
+    for i=1,#ARGV do
+        local pkg = ARGV[i]
+        if redis.call('HEXISTS', 'ids',  pkg) == 0  and redis.call('SISMEMBER', 'badCrawl', pkg) == 0 then
+            redis.call('SADD', 'newCrawl', pkg)
+        end
+    end
+`)
+
+func (db *Database) AddNewCrawl(importPath string) error {
+	if !gosrc.IsValidRemotePath(importPath) {
+		return errors.New("bad path")
+	}
+	c := db.Pool.Get()
+	defer c.Close()
+	_, err := addCrawlScript.Do(c, importPath)
+	return err
+}
+
+// Put adds the package documentation to the database.
+func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time, hide bool) error {
+	c := db.Pool.Get()
+	defer c.Close()
+
+	score := 0.0
+	if !hide {
+		score = documentScore(pdoc)
+	}
+	terms := documentTerms(pdoc, score)
+
+	var gobBuf bytes.Buffer
+	if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil {
+		return err
+	}
+
+	gobBytes, err := snappy.Encode(nil, gobBuf.Bytes())
+	if err != nil {
+		return err
+	}
+
+	// Truncate large documents.
+	if len(gobBytes) > 400000 {
+		pdocNew := *pdoc
+		pdoc = &pdocNew
+		pdoc.Truncated = true
+		pdoc.Vars = nil
+		pdoc.Funcs = nil
+		pdoc.Types = nil
+		pdoc.Consts = nil
+		pdoc.Examples = nil
+		gobBuf.Reset()
+		if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil {
+			return err
+		}
+		gobBytes, err = snappy.Encode(nil, gobBuf.Bytes())
+		if err != nil {
+			return err
+		}
+	}
+
+	kind := "p"
+	switch {
+	case pdoc.Name == "":
+		kind = "d"
+	case pdoc.IsCmd:
+		kind = "c"
+	}
+
+	t := int64(0)
+	if !nextCrawl.IsZero() {
+		t = nextCrawl.Unix()
+	}
+
+	_, err = putScript.Do(c, pdoc.ImportPath, pdoc.Synopsis, score, gobBytes, strings.Join(terms, " "), pdoc.Etag, kind, t)
+	if err != nil {
+		return err
+	}
+
+	if nextCrawl.IsZero() {
+		// Skip crawling related packages if this is not a full save.
+		return nil
+	}
+
+	paths := make(map[string]bool)
+	for _, p := range pdoc.Imports {
+		if gosrc.IsValidRemotePath(p) {
+			paths[p] = true
+		}
+	}
+	for _, p := range pdoc.TestImports {
+		if gosrc.IsValidRemotePath(p) {
+			paths[p] = true
+		}
+	}
+	for _, p := range pdoc.XTestImports {
+		if gosrc.IsValidRemotePath(p) {
+			paths[p] = true
+		}
+	}
+	if pdoc.ImportPath != pdoc.ProjectRoot && pdoc.ProjectRoot != "" {
+		paths[pdoc.ProjectRoot] = true
+	}
+	for _, p := range pdoc.Subdirectories {
+		paths[pdoc.ImportPath+"/"+p] = true
+	}
+
+	args := make([]interface{}, 0, len(paths))
+	for p := range paths {
+		args = append(args, p)
+	}
+	_, err = addCrawlScript.Do(c, args...)
+	return err
+}
+
+var setNextCrawlEtagScript = redis.NewScript(0, `
+    local root = ARGV[1]
+    local etag = ARGV[2]
+    local nextCrawl = ARGV[3]
+
+    local pkgs = redis.call('SORT', 'index:project:' .. root, 'GET', '#',  'GET', 'pkg:*->etag')
+
+    for i=1,#pkgs,2 do
+        if pkgs[i+1] == etag then
+            redis.call('ZADD', 'nextCrawl', nextCrawl, pkgs[i])
+            redis.call('HSET', 'pkg:' .. pkgs[i], 'crawl', nextCrawl)
+        end
+    end
+`)
+
+// SetNextCrawlEtag sets the next crawl time for all packages in the project with the given etag.
+func (db *Database) SetNextCrawlEtag(projectRoot string, etag string, t time.Time) error {
+	c := db.Pool.Get()
+	defer c.Close()
+	_, err := setNextCrawlEtagScript.Do(c, normalizeProjectRoot(projectRoot), etag, t.Unix())
+	return err
+}
+
+// bumpCrawlScript sets the crawl time to now. To avoid continuously crawling
+// frequently updated repositories, the crawl is scheduled in the future.
+var bumpCrawlScript = redis.NewScript(0, `
+    local root = ARGV[1]
+    local now = tonumber(ARGV[2])
+    local nextCrawl = now + 7200
+    local pkgs = redis.call('SORT', 'index:project:' .. root, 'GET', '#')
+
+    for i=1,#pkgs do
+        local v = redis.call('HMGET', 'pkg:' .. pkgs[i], 'crawl', 'kind')
+        local t = tonumber(v[1] or 0)
+        if t == 0 or now < t then
+            redis.call('HSET', 'pkg:' .. pkgs[i], 'crawl', now)
+        end
+        local nextCrawl = now + 86400
+        if v[2] == 'p' then
+            nextCrawl = now + 7200
+        end
+        t = tonumber(redis.call('ZSCORE', 'nextCrawl', pkgs[i]) or 0)
+        if t == 0 or nextCrawl < t then
+            redis.call('ZADD', 'nextCrawl', nextCrawl, pkgs[i])
+        end
+    end
+`)
+
+func (db *Database) BumpCrawl(projectRoot string) error {
+	c := db.Pool.Get()
+	defer c.Close()
+	_, err := bumpCrawlScript.Do(c, normalizeProjectRoot(projectRoot), time.Now().Unix())
+	return err
+}
+
+// getDocScript gets the package documentation and update time for the
+// specified path. If path is "-", then the oldest document is returned.
+var getDocScript = redis.NewScript(0, `
+    local path = ARGV[1]
+
+    local id
+    if path == '-' then
+        local r = redis.call('ZRANGE', 'nextCrawl', 0, 0)
+        if not r or #r == 0 then
+            return false
+        end
+        id = r[1]
+    else
+        id = redis.call('HGET', 'ids', path)
+        if not id then
+            return false
+        end
+    end
+
+    local gob = redis.call('HGET', 'pkg:' .. id, 'gob')
+    if not gob then
+        return false
+    end
+
+    local nextCrawl = redis.call('HGET', 'pkg:' .. id, 'crawl')
+    if not nextCrawl then 
+        nextCrawl = redis.call('ZSCORE', 'nextCrawl', id)
+        if not nextCrawl then
+            nextCrawl = 0
+        end
+    end
+    
+    return {gob, nextCrawl}
+`)
+
+func (db *Database) getDoc(c redis.Conn, path string) (*doc.Package, time.Time, error) {
+	r, err := redis.Values(getDocScript.Do(c, path))
+	if err == redis.ErrNil {
+		return nil, time.Time{}, nil
+	} else if err != nil {
+		return nil, time.Time{}, err
+	}
+
+	var p []byte
+	var t int64
+
+	if _, err := redis.Scan(r, &p, &t); err != nil {
+		return nil, time.Time{}, err
+	}
+
+	p, err = snappy.Decode(nil, p)
+	if err != nil {
+		return nil, time.Time{}, err
+	}
+
+	var pdoc doc.Package
+	if err := gob.NewDecoder(bytes.NewReader(p)).Decode(&pdoc); err != nil {
+		return nil, time.Time{}, err
+	}
+
+	nextCrawl := pdoc.Updated
+	if t != 0 {
+		nextCrawl = time.Unix(t, 0).UTC()
+	}
+
+	return &pdoc, nextCrawl, err
+}
+
+var getSubdirsScript = redis.NewScript(0, `
+    local reply
+    for i = 1,#ARGV do
+        reply = redis.call('SORT', 'index:project:' .. ARGV[i], 'ALPHA', 'BY', 'pkg:*->path', 'GET', 'pkg:*->path', 'GET', 'pkg:*->synopsis', 'GET', 'pkg:*->kind')
+        if #reply > 0 then
+            break
+        end
+    end
+    return reply
+`)
+
+func (db *Database) getSubdirs(c redis.Conn, path string, pdoc *doc.Package) ([]Package, error) {
+	var reply interface{}
+	var err error
+
+	switch {
+	case isStandardPackage(path):
+		reply, err = getSubdirsScript.Do(c, "go")
+	case pdoc != nil:
+		reply, err = getSubdirsScript.Do(c, pdoc.ProjectRoot)
+	default:
+		var roots []interface{}
+		projectRoot := path
+		for i := 0; i < 5; i++ {
+			roots = append(roots, projectRoot)
+			if j := strings.LastIndex(projectRoot, "/"); j < 0 {
+				break
+			} else {
+				projectRoot = projectRoot[:j]
+			}
+		}
+		reply, err = getSubdirsScript.Do(c, roots...)
+	}
+
+	values, err := redis.Values(reply, err)
+	if err != nil {
+		return nil, err
+	}
+
+	var subdirs []Package
+	prefix := path + "/"
+
+	for len(values) > 0 {
+		var pkg Package
+		var kind string
+		values, err = redis.Scan(values, &pkg.Path, &pkg.Synopsis, &kind)
+		if err != nil {
+			return nil, err
+		}
+		if (kind == "p" || kind == "c") && strings.HasPrefix(pkg.Path, prefix) {
+			subdirs = append(subdirs, pkg)
+		}
+	}
+
+	return subdirs, err
+}
+
+// Get gets the package documenation and sub-directories for the the given
+// import path.
+func (db *Database) Get(path string) (*doc.Package, []Package, time.Time, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+
+	pdoc, nextCrawl, err := db.getDoc(c, path)
+	if err != nil {
+		return nil, nil, time.Time{}, err
+	}
+
+	if pdoc != nil {
+		// fixup for speclal "-" path.
+		path = pdoc.ImportPath
+	}
+
+	subdirs, err := db.getSubdirs(c, path, pdoc)
+	if err != nil {
+		return nil, nil, time.Time{}, err
+	}
+	return pdoc, subdirs, nextCrawl, nil
+}
+
+func (db *Database) GetDoc(path string) (*doc.Package, time.Time, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+	return db.getDoc(c, path)
+}
+
+var deleteScript = redis.NewScript(0, `
+    local path = ARGV[1]
+
+    local id = redis.call('HGET', 'ids', path)
+    if not id then
+        return false
+    end
+
+    for term in string.gmatch(redis.call('HGET', 'pkg:' .. id, 'terms') or '', '([^ ]+)') do
+        redis.call('SREM', 'index:' .. term, id)
+    end
+
+    redis.call('ZREM', 'nextCrawl', id)
+    redis.call('SREM', 'newCrawl', path)
+    redis.call('ZREM', 'popular', id)
+    redis.call('DEL', 'pkg:' .. id)
+    return redis.call('HDEL', 'ids', path)
+`)
+
+// Delete deletes the documenation for the given import path.
+func (db *Database) Delete(path string) error {
+	c := db.Pool.Get()
+	defer c.Close()
+	_, err := deleteScript.Do(c, path)
+	return err
+}
+
+func packages(reply interface{}, all bool) ([]Package, error) {
+	values, err := redis.Values(reply, nil)
+	if err != nil {
+		return nil, err
+	}
+	result := make([]Package, 0, len(values)/3)
+	for len(values) > 0 {
+		var pkg Package
+		var kind string
+		values, err = redis.Scan(values, &pkg.Path, &pkg.Synopsis, &kind)
+		if err != nil {
+			return nil, err
+		}
+		if !all && kind == "d" {
+			continue
+		}
+		if pkg.Path == "C" {
+			pkg.Synopsis = "Package C is a \"pseudo-package\" used to access the C namespace from a cgo source file."
+		}
+		result = append(result, pkg)
+	}
+	return result, nil
+}
+
+func (db *Database) getPackages(key string, all bool) ([]Package, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+	reply, err := c.Do("SORT", key, "ALPHA", "BY", "pkg:*->path", "GET", "pkg:*->path", "GET", "pkg:*->synopsis", "GET", "pkg:*->kind")
+	if err != nil {
+		return nil, err
+	}
+	return packages(reply, all)
+}
+
+func (db *Database) GoIndex() ([]Package, error) {
+	return db.getPackages("index:project:go", false)
+}
+
+func (db *Database) GoSubrepoIndex() ([]Package, error) {
+	return db.getPackages("index:project:subrepo", false)
+}
+
+func (db *Database) Index() ([]Package, error) {
+	return db.getPackages("index:all:", false)
+}
+
+func (db *Database) Project(projectRoot string) ([]Package, error) {
+	return db.getPackages("index:project:"+normalizeProjectRoot(projectRoot), true)
+}
+
+func (db *Database) AllPackages() ([]Package, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+	values, err := redis.Values(c.Do("SORT", "nextCrawl", "DESC", "BY", "pkg:*->score", "GET", "pkg:*->path", "GET", "pkg:*->kind"))
+	if err != nil {
+		return nil, err
+	}
+	result := make([]Package, 0, len(values)/2)
+	for len(values) > 0 {
+		var pkg Package
+		var kind string
+		values, err = redis.Scan(values, &pkg.Path, &kind)
+		if err != nil {
+			return nil, err
+		}
+		if kind == "d" {
+			continue
+		}
+		result = append(result, pkg)
+	}
+	return result, nil
+}
+
+var packagesScript = redis.NewScript(0, `
+    local result = {}
+    for i = 1,#ARGV do
+        local path = ARGV[i]
+        local synopsis = ''
+        local kind = 'u'
+        local id = redis.call('HGET', 'ids',  path)
+        if id then
+            synopsis = redis.call('HGET', 'pkg:' .. id, 'synopsis')
+            kind = redis.call('HGET', 'pkg:' .. id, 'kind')
+        end
+        result[#result+1] = path
+        result[#result+1] = synopsis
+        result[#result+1] = kind
+    end
+    return result
+`)
+
+func (db *Database) Packages(paths []string) ([]Package, error) {
+	var args []interface{}
+	for _, p := range paths {
+		args = append(args, p)
+	}
+	c := db.Pool.Get()
+	defer c.Close()
+	reply, err := packagesScript.Do(c, args...)
+	if err != nil {
+		return nil, err
+	}
+	pkgs, err := packages(reply, false)
+	sort.Sort(byPath(pkgs))
+	return pkgs, err
+}
+
+func (db *Database) ImporterCount(path string) (int, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+	return redis.Int(c.Do("SCARD", "index:import:"+path))
+}
+
+func (db *Database) Importers(path string) ([]Package, error) {
+	return db.getPackages("index:import:"+path, false)
+}
+
+func (db *Database) Block(root string) error {
+	c := db.Pool.Get()
+	defer c.Close()
+	if _, err := c.Do("SADD", "block", root); err != nil {
+		return err
+	}
+	keys, err := redis.Strings(c.Do("HKEYS", "ids"))
+	if err != nil {
+		return err
+	}
+	for _, key := range keys {
+		if key == root || strings.HasPrefix(key, root) && key[len(root)] == '/' {
+			if _, err := deleteScript.Do(c, key); err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
+
+var isBlockedScript = redis.NewScript(0, `
+    local path = ''
+    for s in string.gmatch(ARGV[1], '[^/]+') do
+        path = path .. s
+        if redis.call('SISMEMBER', 'block', path) == 1 then
+            return 1
+        end
+        path = path .. '/'
+    end
+    return  0
+`)
+
+func (db *Database) IsBlocked(path string) (bool, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+	return redis.Bool(isBlockedScript.Do(c, path))
+}
+
+type queryResult struct {
+	Path     string
+	Synopsis string
+	Score    float64
+}
+
+type byScore []*queryResult
+
+func (p byScore) Len() int           { return len(p) }
+func (p byScore) Less(i, j int) bool { return p[j].Score < p[i].Score }
+func (p byScore) Swap(i, j int)      { p[i], p[j] = p[j], p[i] }
+
+func (db *Database) Query(q string) ([]Package, error) {
+	terms := parseQuery(q)
+	if len(terms) == 0 {
+		return nil, nil
+	}
+	c := db.Pool.Get()
+	defer c.Close()
+	n, err := redis.Int(c.Do("INCR", "maxQueryId"))
+	if err != nil {
+		return nil, err
+	}
+	id := "tmp:query-" + strconv.Itoa(n)
+
+	args := []interface{}{id}
+	for _, term := range terms {
+		args = append(args, "index:"+term)
+	}
+	c.Send("SINTERSTORE", args...)
+	c.Send("SORT", id, "DESC", "BY", "nosort", "GET", "pkg:*->path", "GET", "pkg:*->synopsis", "GET", "pkg:*->score")
+	c.Send("DEL", id)
+	c.Flush()
+	c.Receive()                              // SINTERSTORE
+	values, err := redis.Values(c.Receive()) // SORT
+	if err != nil {
+		return nil, err
+	}
+	c.Receive() // DEL
+
+	var queryResults []*queryResult
+	if err := redis.ScanSlice(values, &queryResults, "Path", "Synopsis", "Score"); err != nil {
+		return nil, err
+	}
+
+	for _, qr := range queryResults {
+		c.Send("SCARD", "index:import:"+qr.Path)
+	}
+	c.Flush()
+
+	for _, qr := range queryResults {
+		importCount, err := redis.Int(c.Receive())
+		if err != nil {
+			return nil, err
+		}
+
+		qr.Score *= math.Log(float64(10 + importCount))
+
+		if isStandardPackage(qr.Path) {
+			if strings.HasSuffix(qr.Path, q) {
+				// Big bump for exact match on standard package name.
+				qr.Score *= 10000
+			} else {
+				qr.Score *= 1.2
+			}
+		}
+
+		if q == path.Base(qr.Path) {
+			qr.Score *= 1.1
+		}
+	}
+
+	sort.Sort(byScore(queryResults))
+
+	pkgs := make([]Package, len(queryResults))
+	for i, qr := range queryResults {
+		pkgs[i].Path = qr.Path
+		pkgs[i].Synopsis = qr.Synopsis
+	}
+
+	return pkgs, nil
+}
+
+type PackageInfo struct {
+	PDoc  *doc.Package
+	Score float64
+	Kind  string
+	Size  int
+}
+
+// Do executes function f for each document in the database.
+func (db *Database) Do(f func(*PackageInfo) error) error {
+	c := db.Pool.Get()
+	defer c.Close()
+	cursor := 0
+	c.Send("SCAN", cursor, "MATCH", "pkg:*")
+	c.Flush()
+	for {
+		// Recieve previous SCAN.
+		values, err := redis.Values(c.Receive())
+		if err != nil {
+			return err
+		}
+		var keys [][]byte
+		if _, err := redis.Scan(values, &cursor, &keys); err != nil {
+			return err
+		}
+		if cursor == 0 {
+			break
+		}
+		for _, key := range keys {
+			c.Send("HMGET", key, "gob", "score", "kind", "path", "terms", "synopis")
+		}
+		c.Send("SCAN", cursor, "MATCH", "pkg:*")
+		c.Flush()
+		for _ = range keys {
+			values, err := redis.Values(c.Receive())
+			if err != nil {
+				return err
+			}
+
+			var (
+				pi       PackageInfo
+				p        []byte
+				path     string
+				terms    string
+				synopsis string
+			)
+
+			if _, err := redis.Scan(values, &p, &pi.Score, &pi.Kind, &path, &terms, &synopsis); err != nil {
+				return err
+			}
+
+			if p == nil {
+				continue
+			}
+
+			pi.Size = len(path) + len(p) + len(terms) + len(synopsis)
+
+			p, err = snappy.Decode(nil, p)
+			if err != nil {
+				return fmt.Errorf("snappy decoding %s: %v", path, err)
+			}
+
+			if err := gob.NewDecoder(bytes.NewReader(p)).Decode(&pi.PDoc); err != nil {
+				return fmt.Errorf("gob decoding %s: %v", path, err)
+			}
+			if err := f(&pi); err != nil {
+				return fmt.Errorf("func %s: %v", path, err)
+			}
+		}
+	}
+	return nil
+}
+
+var importGraphScript = redis.NewScript(0, `
+    local path = ARGV[1]
+
+    local id = redis.call('HGET', 'ids', path)
+    if not id then
+        return false
+    end
+
+    return redis.call('HMGET', 'pkg:' .. id, 'synopsis', 'terms')
+`)
+
+// DepLevel specifies the level of depdenencies to show in an import graph.
+type DepLevel int
+
+const (
+	ShowAllDeps      DepLevel = iota // show all dependencies
+	HideStandardDeps                 // don't show dependencies of standard libraries
+	HideStandardAll                  // don't show standard libraries at all
+)
+
+func (db *Database) ImportGraph(pdoc *doc.Package, level DepLevel) ([]Package, [][2]int, error) {
+
+	// This breadth-first traversal of the package's dependencies uses the
+	// Redis pipeline as queue. Links to packages with invalid import paths are
+	// only included for the root package.
+
+	c := db.Pool.Get()
+	defer c.Close()
+	if err := importGraphScript.Load(c); err != nil {
+		return nil, nil, err
+	}
+
+	nodes := []Package{{Path: pdoc.ImportPath, Synopsis: pdoc.Synopsis}}
+	edges := [][2]int{}
+	index := map[string]int{pdoc.ImportPath: 0}
+
+	for _, path := range pdoc.Imports {
+		if level >= HideStandardAll && isStandardPackage(path) {
+			continue
+		}
+		j := len(nodes)
+		index[path] = j
+		edges = append(edges, [2]int{0, j})
+		nodes = append(nodes, Package{Path: path})
+		importGraphScript.Send(c, path)
+	}
+
+	for i := 1; i < len(nodes); i++ {
+		c.Flush()
+		r, err := redis.Values(c.Receive())
+		if err == redis.ErrNil {
+			continue
+		} else if err != nil {
+			return nil, nil, err
+		}
+		var synopsis, terms string
+		if _, err := redis.Scan(r, &synopsis, &terms); err != nil {
+			return nil, nil, err
+		}
+		nodes[i].Synopsis = synopsis
+		for _, term := range strings.Fields(terms) {
+			if strings.HasPrefix(term, "import:") {
+				path := term[len("import:"):]
+				if level >= HideStandardDeps && isStandardPackage(path) {
+					continue
+				}
+				j, ok := index[path]
+				if !ok {
+					j = len(nodes)
+					index[path] = j
+					nodes = append(nodes, Package{Path: path})
+					importGraphScript.Send(c, path)
+				}
+				edges = append(edges, [2]int{i, j})
+			}
+		}
+	}
+	return nodes, edges, nil
+}
+
+func (db *Database) PutGob(key string, value interface{}) error {
+	var buf bytes.Buffer
+	if err := gob.NewEncoder(&buf).Encode(value); err != nil {
+		return err
+	}
+	c := db.Pool.Get()
+	defer c.Close()
+	_, err := c.Do("SET", "gob:"+key, buf.Bytes())
+	return err
+}
+
+func (db *Database) GetGob(key string, value interface{}) error {
+	c := db.Pool.Get()
+	defer c.Close()
+	p, err := redis.Bytes(c.Do("GET", "gob:"+key))
+	if err == redis.ErrNil {
+		return nil
+	} else if err != nil {
+		return err
+	}
+	return gob.NewDecoder(bytes.NewReader(p)).Decode(value)
+}
+
+var incrementPopularScoreScript = redis.NewScript(0, `
+    local path = ARGV[1]
+    local n = ARGV[2]
+    local t = ARGV[3]
+
+    local id = redis.call('HGET', 'ids', path)
+    if not id then
+        return
+    end
+
+    local t0 = redis.call('GET', 'popular:0') or '0'
+    local f = math.exp(tonumber(t) - tonumber(t0))
+    redis.call('ZINCRBY', 'popular', tonumber(n) * f, id)
+    if f > 10 then
+        redis.call('SET', 'popular:0', t)
+        redis.call('ZUNIONSTORE', 'popular', 1, 'popular', 'WEIGHTS', 1.0 / f)
+        redis.call('ZREMRANGEBYSCORE', 'popular', '-inf', 0.05)
+    end
+`)
+
+const popularHalfLife = time.Hour * 24 * 7
+
+func (db *Database) incrementPopularScoreInternal(path string, delta float64, t time.Time) error {
+	// nt = n0 * math.Exp(-lambda * t)
+	// lambda = math.Ln2 / thalf
+	c := db.Pool.Get()
+	defer c.Close()
+	const lambda = math.Ln2 / float64(popularHalfLife)
+	scaledTime := lambda * float64(t.Sub(time.Unix(1257894000, 0)))
+	_, err := incrementPopularScoreScript.Do(c, path, delta, scaledTime)
+	return err
+}
+
+func (db *Database) IncrementPopularScore(path string) error {
+	return db.incrementPopularScoreInternal(path, 1, time.Now())
+}
+
+var popularScript = redis.NewScript(0, `
+    local stop = ARGV[1]
+    local ids = redis.call('ZREVRANGE', 'popular', '0', stop)
+    local result = {}
+    for i=1,#ids do
+        local values = redis.call('HMGET', 'pkg:' .. ids[i], 'path', 'synopsis', 'kind')
+        result[#result+1] = values[1]
+        result[#result+1] = values[2]
+        result[#result+1] = values[3]
+    end
+    return result
+`)
+
+func (db *Database) Popular(count int) ([]Package, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+	reply, err := popularScript.Do(c, count-1)
+	if err != nil {
+		return nil, err
+	}
+	pkgs, err := packages(reply, false)
+	return pkgs, err
+}
+
+var popularWithScoreScript = redis.NewScript(0, `
+    local ids = redis.call('ZREVRANGE', 'popular', '0', -1, 'WITHSCORES')
+    local result = {}
+    for i=1,#ids,2 do
+        result[#result+1] = redis.call('HGET', 'pkg:' .. ids[i], 'path')
+        result[#result+1] = ids[i+1]
+        result[#result+1] = 'p'
+    end
+    return result
+`)
+
+func (db *Database) PopularWithScores() ([]Package, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+	reply, err := popularWithScoreScript.Do(c)
+	if err != nil {
+		return nil, err
+	}
+	pkgs, err := packages(reply, false)
+	return pkgs, err
+}
+
+func (db *Database) PopNewCrawl() (string, bool, error) {
+	c := db.Pool.Get()
+	defer c.Close()
+
+	var subdirs []Package
+
+	path, err := redis.String(c.Do("SPOP", "newCrawl"))
+	switch {
+	case err == redis.ErrNil:
+		err = nil
+		path = ""
+	case err == nil:
+		subdirs, err = db.getSubdirs(c, path, nil)
+	}
+	return path, len(subdirs) > 0, err
+}
+
+func (db *Database) AddBadCrawl(path string) error {
+	c := db.Pool.Get()
+	defer c.Close()
+	_, err := c.Do("SADD", "badCrawl", path)
+	return err
+}
+
+var incrementCounterScript = redis.NewScript(0, `
+    local key = 'counter:' .. ARGV[1]
+    local n = tonumber(ARGV[2])
+    local t = tonumber(ARGV[3])
+    local exp = tonumber(ARGV[4])
+
+    local counter = redis.call('GET', key)
+    if counter then
+        counter = cjson.decode(counter)
+        n = n + counter.n * math.exp(counter.t - t)
+    end
+
+    redis.call('SET', key, cjson.encode({n = n; t = t}))
+    redis.call('EXPIRE', key, exp)
+    return tostring(n)
+`)
+
+const counterHalflife = time.Hour
+
+func (db *Database) incrementCounterInternal(key string, delta float64, t time.Time) (float64, error) {
+	// nt = n0 * math.Exp(-lambda * t)
+	// lambda = math.Ln2 / thalf
+	c := db.Pool.Get()
+	defer c.Close()
+	const lambda = math.Ln2 / float64(counterHalflife)
+	scaledTime := lambda * float64(t.Sub(time.Unix(1257894000, 0)))
+	return redis.Float64(incrementCounterScript.Do(c, key, delta, scaledTime, (4*counterHalflife)/time.Second))
+}
+
+func (db *Database) IncrementCounter(key string, delta float64) (float64, error) {
+	return db.incrementCounterInternal(key, delta, time.Now())
+}
diff --git a/database/database_test.go b/database/database_test.go
new file mode 100644
index 0000000..1ab5c5a
--- /dev/null
+++ b/database/database_test.go
@@ -0,0 +1,259 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package database
+
+import (
+	"math"
+	"reflect"
+	"strconv"
+	"testing"
+	"time"
+
+	"github.com/garyburd/redigo/redis"
+	"github.com/golang/gddo/doc"
+)
+
+func newDB(t *testing.T) *Database {
+	p := redis.NewPool(func() (redis.Conn, error) {
+		c, err := redis.DialTimeout("tcp", ":6379", 0, 1*time.Second, 1*time.Second)
+		if err != nil {
+			return nil, err
+		}
+		_, err = c.Do("SELECT", "9")
+		if err != nil {
+			c.Close()
+			return nil, err
+		}
+		return c, nil
+	}, 1)
+
+	c := p.Get()
+	defer c.Close()
+	n, err := redis.Int(c.Do("DBSIZE"))
+	if n != 0 || err != nil {
+		t.Fatalf("DBSIZE returned %d, %v", n, err)
+	}
+	return &Database{Pool: p}
+}
+
+func closeDB(db *Database) {
+	c := db.Pool.Get()
+	c.Do("FLUSHDB")
+	c.Close()
+}
+
+func TestPutGet(t *testing.T) {
+	var nextCrawl = time.Unix(time.Now().Add(time.Hour).Unix(), 0).UTC()
+
+	db := newDB(t)
+	defer closeDB(db)
+	pdoc := &doc.Package{
+		ImportPath:  "github.com/user/repo/foo/bar",
+		Name:        "bar",
+		Synopsis:    "hello",
+		ProjectRoot: "github.com/user/repo",
+		ProjectName: "foo",
+		Updated:     time.Now().Add(-time.Hour),
+		Imports:     []string{"C", "errors", "github.com/user/repo/foo/bar"}, // self import for testing convenience.
+	}
+	if err := db.Put(pdoc, nextCrawl, false); err != nil {
+		t.Errorf("db.Put() returned error %v", err)
+	}
+	if err := db.Put(pdoc, time.Time{}, false); err != nil {
+		t.Errorf("second db.Put() returned error %v", err)
+	}
+
+	actualPdoc, actualSubdirs, actualCrawl, err := db.Get("github.com/user/repo/foo/bar")
+	if err != nil {
+		t.Fatalf("db.Get(.../foo/bar) returned %v", err)
+	}
+	if len(actualSubdirs) != 0 {
+		t.Errorf("db.Get(.../foo/bar) returned subdirs %v, want none", actualSubdirs)
+	}
+	if !reflect.DeepEqual(actualPdoc, pdoc) {
+		t.Errorf("db.Get(.../foo/bar) returned doc %v, want %v", actualPdoc, pdoc)
+	}
+	if !nextCrawl.Equal(actualCrawl) {
+		t.Errorf("db.Get(.../foo/bar) returned crawl %v, want %v", actualCrawl, nextCrawl)
+	}
+
+	before := time.Now().Unix()
+	if err := db.BumpCrawl(pdoc.ProjectRoot); err != nil {
+		t.Errorf("db.BumpCrawl() returned %v", err)
+	}
+	after := time.Now().Unix()
+
+	_, _, actualCrawl, _ = db.Get("github.com/user/repo/foo/bar")
+	if actualCrawl.Unix() < before || after < actualCrawl.Unix() {
+		t.Errorf("actualCrawl=%v, expect value between %v and %v", actualCrawl.Unix(), before, after)
+	}
+
+	// Popular
+
+	if err := db.IncrementPopularScore(pdoc.ImportPath); err != nil {
+		t.Errorf("db.IncrementPopularScore() returned %v", err)
+	}
+
+	// Get "-"
+
+	actualPdoc, _, _, err = db.Get("-")
+	if err != nil {
+		t.Fatalf("db.Get(-) returned %v", err)
+	}
+	if !reflect.DeepEqual(actualPdoc, pdoc) {
+		t.Errorf("db.Get(-) returned doc %v, want %v", actualPdoc, pdoc)
+	}
+
+	actualPdoc, actualSubdirs, _, err = db.Get("github.com/user/repo/foo")
+	if err != nil {
+		t.Fatalf("db.Get(.../foo) returned %v", err)
+	}
+	if actualPdoc != nil {
+		t.Errorf("db.Get(.../foo) returned doc %v, want %v", actualPdoc, nil)
+	}
+	expectedSubdirs := []Package{{Path: "github.com/user/repo/foo/bar", Synopsis: "hello"}}
+	if !reflect.DeepEqual(actualSubdirs, expectedSubdirs) {
+		t.Errorf("db.Get(.../foo) returned subdirs %v, want %v", actualSubdirs, expectedSubdirs)
+	}
+	actualImporters, err := db.Importers("github.com/user/repo/foo/bar")
+	if err != nil {
+		t.Fatalf("db.Importers() retunred error %v", err)
+	}
+	expectedImporters := []Package{{"github.com/user/repo/foo/bar", "hello"}}
+	if !reflect.DeepEqual(actualImporters, expectedImporters) {
+		t.Errorf("db.Importers() = %v, want %v", actualImporters, expectedImporters)
+	}
+	actualImports, err := db.Packages(pdoc.Imports)
+	if err != nil {
+		t.Fatalf("db.Imports() retunred error %v", err)
+	}
+	for i := range actualImports {
+		if actualImports[i].Path == "C" {
+			actualImports[i].Synopsis = ""
+		}
+	}
+	expectedImports := []Package{{"C", ""}, {"errors", ""}, {"github.com/user/repo/foo/bar", "hello"}}
+	if !reflect.DeepEqual(actualImports, expectedImports) {
+		t.Errorf("db.Imports() = %v, want %v", actualImports, expectedImports)
+	}
+	importerCount, _ := db.ImporterCount("github.com/user/repo/foo/bar")
+	if importerCount != 1 {
+		t.Errorf("db.ImporterCount() = %d, want %d", importerCount, 1)
+	}
+	if err := db.Delete("github.com/user/repo/foo/bar"); err != nil {
+		t.Errorf("db.Delete() returned error %v", err)
+	}
+
+	db.Query("bar")
+
+	if err := db.Put(pdoc, time.Time{}, false); err != nil {
+		t.Errorf("db.Put() returned error %v", err)
+	}
+
+	if err := db.Block("github.com/user/repo"); err != nil {
+		t.Errorf("db.Block() returned error %v", err)
+	}
+
+	blocked, err := db.IsBlocked("github.com/user/repo/foo/bar")
+	if !blocked || err != nil {
+		t.Errorf("db.IsBlocked(github.com/user/repo/foo/bar) returned %v, %v, want true, nil", blocked, err)
+	}
+
+	blocked, err = db.IsBlocked("github.com/foo/bar")
+	if blocked || err != nil {
+		t.Errorf("db.IsBlocked(github.com/foo/bar) returned %v, %v, want false, nil", blocked, err)
+	}
+
+	c := db.Pool.Get()
+	defer c.Close()
+	c.Send("DEL", "maxQueryId")
+	c.Send("DEL", "maxPackageId")
+	c.Send("DEL", "block")
+	c.Send("DEL", "popular:0")
+	c.Send("DEL", "newCrawl")
+	keys, err := redis.Values(c.Do("HKEYS", "ids"))
+	for _, key := range keys {
+		t.Errorf("unexpected id %s", key)
+	}
+	keys, err = redis.Values(c.Do("KEYS", "*"))
+	for _, key := range keys {
+		t.Errorf("unexpected key %s", key)
+	}
+}
+
+const epsilon = 0.000001
+
+func TestPopular(t *testing.T) {
+	db := newDB(t)
+	defer closeDB(db)
+	c := db.Pool.Get()
+	defer c.Close()
+
+	// Add scores for packages. On each iteration, add half-life to time and
+	// divide the score by two. All packages should have the same score.
+
+	now := time.Now()
+	score := float64(4048)
+	for id := 12; id >= 0; id-- {
+		path := "github.com/user/repo/p" + strconv.Itoa(id)
+		c.Do("HSET", "ids", path, id)
+		err := db.incrementPopularScoreInternal(path, score, now)
+		if err != nil {
+			t.Fatal(err)
+		}
+		now = now.Add(popularHalfLife)
+		score /= 2
+	}
+
+	values, _ := redis.Values(c.Do("ZRANGE", "popular", "0", "100000", "WITHSCORES"))
+	if len(values) != 26 {
+		t.Fatalf("Expected 26 values, got %d", len(values))
+	}
+
+	// Check for equal scores.
+	score, err := redis.Float64(values[1], nil)
+	if err != nil {
+		t.Fatal(err)
+	}
+	for i := 3; i < len(values); i += 2 {
+		s, _ := redis.Float64(values[i], nil)
+		if math.Abs(score-s)/score > epsilon {
+			t.Errorf("Bad score, score[1]=%g, score[%d]=%g", score, i, s)
+		}
+	}
+}
+
+func TestCounter(t *testing.T) {
+	db := newDB(t)
+	defer closeDB(db)
+
+	const key = "127.0.0.1"
+
+	now := time.Now()
+	n, err := db.incrementCounterInternal(key, 1, now)
+	if err != nil {
+		t.Fatal(err)
+	}
+	if math.Abs(n-1.0) > epsilon {
+		t.Errorf("1: got n=%g, want 1", n)
+	}
+	n, err = db.incrementCounterInternal(key, 1, now)
+	if err != nil {
+		t.Fatal(err)
+	}
+	if math.Abs(n-2.0)/2.0 > epsilon {
+		t.Errorf("2: got n=%g, want 2", n)
+	}
+	now = now.Add(counterHalflife)
+	n, err = db.incrementCounterInternal(key, 1, now)
+	if err != nil {
+		t.Fatal(err)
+	}
+	if math.Abs(n-2.0)/2.0 > epsilon {
+		t.Errorf("3: got n=%g, want 2", n)
+	}
+}
diff --git a/database/index.go b/database/index.go
new file mode 100644
index 0000000..3986cdf
--- /dev/null
+++ b/database/index.go
@@ -0,0 +1,195 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package database
+
+import (
+	"path"
+	"regexp"
+	"strings"
+	"unicode"
+
+	"github.com/golang/gddo/doc"
+	"github.com/golang/gddo/gosrc"
+)
+
+func isStandardPackage(path string) bool {
+	return strings.Index(path, ".") < 0
+}
+
+func isTermSep(r rune) bool {
+	return unicode.IsSpace(r) || unicode.IsPunct(r) || unicode.IsSymbol(r)
+}
+
+func normalizeProjectRoot(projectRoot string) string {
+	if projectRoot == "" {
+		return "go"
+	}
+	return projectRoot
+}
+
+var synonyms = map[string]string{
+	"redis":    "redisdb", // append db to avoid stemming to 'red'
+	"rand":     "random",
+	"postgres": "postgresql",
+	"mongo":    "mongodb",
+}
+
+func term(s string) string {
+	s = strings.ToLower(s)
+	if x, ok := synonyms[s]; ok {
+		s = x
+	}
+	return stem(s)
+}
+
+var httpPat = regexp.MustCompile(`https?://\S+`)
+
+func documentTerms(pdoc *doc.Package, score float64) []string {
+
+	terms := make(map[string]bool)
+
+	// Project root
+
+	projectRoot := normalizeProjectRoot(pdoc.ProjectRoot)
+	terms["project:"+projectRoot] = true
+
+	if strings.HasPrefix(pdoc.ImportPath, "golang.org/x/") {
+		terms["project:subrepo"] = true
+	}
+
+	// Imports
+
+	for _, path := range pdoc.Imports {
+		if gosrc.IsValidPath(path) {
+			terms["import:"+path] = true
+		}
+	}
+
+	if score > 0 {
+
+		if isStandardPackage(pdoc.ImportPath) {
+			for _, term := range parseQuery(pdoc.ImportPath) {
+				terms[term] = true
+			}
+		} else {
+			terms["all:"] = true
+			for _, term := range parseQuery(pdoc.ProjectName) {
+				terms[term] = true
+			}
+			for _, term := range parseQuery(pdoc.Name) {
+				terms[term] = true
+			}
+		}
+
+		// Synopsis
+
+		synopsis := httpPat.ReplaceAllLiteralString(pdoc.Synopsis, "")
+		for i, s := range strings.FieldsFunc(synopsis, isTermSep) {
+			s = strings.ToLower(s)
+			if !stopWord[s] && (i > 3 || s != "package") {
+				terms[term(s)] = true
+			}
+		}
+	}
+
+	result := make([]string, 0, len(terms))
+	for term := range terms {
+		result = append(result, term)
+	}
+	return result
+}
+
+// vendorPat matches the path of a vendored package.
+var vendorPat = regexp.MustCompile(
+	// match directories used by tools to vendor packages.
+	`/(?:_?third_party|vendors|Godeps/_workspace/src)/` +
+		// match a domain name.
+		`[^./]+\.[^/]+`)
+
+func documentScore(pdoc *doc.Package) float64 {
+	if pdoc.Name == "" ||
+		pdoc.DeadEndFork ||
+		len(pdoc.Errors) > 0 ||
+		strings.HasSuffix(pdoc.ImportPath, ".go") ||
+		strings.HasPrefix(pdoc.ImportPath, "gist.github.com/") ||
+		strings.HasSuffix(pdoc.ImportPath, "/internal") ||
+		strings.Contains(pdoc.ImportPath, "/internal/") ||
+		vendorPat.MatchString(pdoc.ImportPath) {
+		return 0
+	}
+
+	for _, p := range pdoc.Imports {
+		if strings.HasSuffix(p, ".go") {
+			return 0
+		}
+	}
+
+	r := 1.0
+	if pdoc.IsCmd {
+		if pdoc.Doc == "" {
+			// Do not include command in index if it does not have documentation.
+			return 0
+		}
+		if !importsGoPackages(pdoc) {
+			// Penalize commands that don't use the "go/*" packages.
+			r *= 0.9
+		}
+	} else {
+		if !pdoc.Truncated &&
+			len(pdoc.Consts) == 0 &&
+			len(pdoc.Vars) == 0 &&
+			len(pdoc.Funcs) == 0 &&
+			len(pdoc.Types) == 0 &&
+			len(pdoc.Examples) == 0 {
+			// Do not include package in index if it does not have exports.
+			return 0
+		}
+		if pdoc.Doc == "" {
+			// Penalty for no documentation.
+			r *= 0.95
+		}
+		if path.Base(pdoc.ImportPath) != pdoc.Name {
+			// Penalty for last element of path != package name.
+			r *= 0.9
+		}
+		for i := 0; i < strings.Count(pdoc.ImportPath[len(pdoc.ProjectRoot):], "/"); i++ {
+			// Penalty for deeply nested packages.
+			r *= 0.99
+		}
+		if strings.Index(pdoc.ImportPath[len(pdoc.ProjectRoot):], "/src/") > 0 {
+			r *= 0.95
+		}
+		for _, p := range pdoc.Imports {
+			if vendorPat.MatchString(p) {
+				// Penalize packages that import vendored packages.
+				r *= 0.1
+				break
+			}
+		}
+	}
+	return r
+}
+
+func parseQuery(q string) []string {
+	var terms []string
+	q = strings.ToLower(q)
+	for _, s := range strings.FieldsFunc(q, isTermSep) {
+		if !stopWord[s] {
+			terms = append(terms, term(s))
+		}
+	}
+	return terms
+}
+
+func importsGoPackages(pdoc *doc.Package) bool {
+	for _, m := range pdoc.Imports {
+		if strings.HasPrefix(m, "go/") {
+			return true
+		}
+	}
+	return false
+}
diff --git a/database/index_test.go b/database/index_test.go
new file mode 100644
index 0000000..ea40f69
--- /dev/null
+++ b/database/index_test.go
@@ -0,0 +1,129 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package database
+
+import (
+	"reflect"
+	"sort"
+	"testing"
+
+	"github.com/golang/gddo/doc"
+)
+
+var indexTests = []struct {
+	pdoc  *doc.Package
+	terms []string
+}{
+	{&doc.Package{
+		ImportPath:  "strconv",
+		ProjectRoot: "",
+		ProjectName: "Go",
+		Name:        "strconv",
+		Synopsis:    "Package strconv implements conversions to and from string representations of basic data types.",
+		Doc:         "Package strconv implements conversions to and from string representations\nof basic data types.",
+		Imports:     []string{"errors", "math", "unicode/utf8"},
+		Funcs:       []*doc.Func{{}},
+	},
+		[]string{
+			"bas",
+			"convert",
+			"dat",
+			"import:errors",
+			"import:math",
+			"import:unicode/utf8",
+			"project:go",
+			"repres",
+			"strconv",
+			"string",
+			"typ"},
+	},
+	{&doc.Package{
+		ImportPath:  "github.com/user/repo/dir",
+		ProjectRoot: "github.com/user/repo",
+		ProjectName: "go-oauth",
+		ProjectURL:  "https://github.com/user/repo/",
+		Name:        "dir",
+		Synopsis:    "Package dir implements a subset of the OAuth client interface as defined in RFC 5849.",
+		Doc: "Package oauth implements a subset of the OAuth client interface as defined in RFC 5849.\n\n" +
+			"This package assumes that the application writes request URL paths to the\nnetwork using " +
+			"the encoding implemented by the net/url URL RequestURI method.\n" +
+			"The HTTP client in the standard net/http package uses this encoding.",
+		IsCmd: false,
+		Imports: []string{
+			"bytes",
+			"crypto/hmac",
+			"crypto/sha1",
+			"encoding/base64",
+			"encoding/binary",
+			"errors",
+			"fmt",
+			"io",
+			"io/ioutil",
+			"net/http",
+			"net/url",
+			"regexp",
+			"sort",
+			"strconv",
+			"strings",
+			"sync",
+			"time",
+		},
+		TestImports: []string{"bytes", "net/url", "testing"},
+		Funcs:       []*doc.Func{{}},
+	},
+		[]string{
+			"all:",
+			"5849", "cly", "defin", "dir", "go",
+			"import:bytes", "import:crypto/hmac", "import:crypto/sha1",
+			"import:encoding/base64", "import:encoding/binary", "import:errors",
+			"import:fmt", "import:io", "import:io/ioutil", "import:net/http",
+			"import:net/url", "import:regexp", "import:sort", "import:strconv",
+			"import:strings", "import:sync", "import:time", "interfac",
+			"oau", "project:github.com/user/repo", "rfc", "subset",
+		},
+	},
+}
+
+func TestDocTerms(t *testing.T) {
+	for _, tt := range indexTests {
+		score := documentScore(tt.pdoc)
+		terms := documentTerms(tt.pdoc, score)
+		sort.Strings(terms)
+		sort.Strings(tt.terms)
+		if !reflect.DeepEqual(terms, tt.terms) {
+			t.Errorf("documentTerms(%s)=%#v, want %#v", tt.pdoc.ImportPath, terms, tt.terms)
+		}
+	}
+}
+
+var vendorPatTests = []struct {
+	path  string
+	match bool
+}{
+	{"camlistore.org/third_party/github.com/user/repo", true},
+	{"camlistore.org/third_party/dir", false},
+	{"camlistore.org/third_party", false},
+	{"camlistore.org/xthird_party/github.com/user/repo", false},
+	{"camlistore.org/third_partyx/github.com/user/repo", false},
+
+	{"example.org/_third_party/github.com/user/repo/dir", true},
+	{"example.org/_third_party/dir", false},
+
+	{"github.com/user/repo/Godeps/_workspace/src/github.com/user/repo", true},
+	{"github.com/user/repo/Godeps/_workspace/src/dir", false},
+
+	{"github.com/user/repo", false},
+}
+
+func TestVendorPat(t *testing.T) {
+	for _, tt := range vendorPatTests {
+		match := vendorPat.MatchString(tt.path)
+		if match != tt.match {
+			t.Errorf("match(%q) = %v, want %v", tt.path, match, match)
+		}
+	}
+}
diff --git a/database/stem.go b/database/stem.go
new file mode 100644
index 0000000..534ae68
--- /dev/null
+++ b/database/stem.go
@@ -0,0 +1,123 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// This file implements the Paice/Husk stemming algorithm.
+// http://www.comp.lancs.ac.uk/computing/research/stemming/Links/paice.htm
+
+package database
+
+import (
+	"bytes"
+	"regexp"
+	"strconv"
+)
+
+const stemRuleText = `
+ai*2. a*1. 
+bb1. 
+city3s. ci2> cn1t> 
+dd1. dei3y> deec2ss. dee1. de2> dooh4> 
+e1> 
+feil1v. fi2> 
+gni3> gai3y. ga2> gg1. 
+ht*2. hsiug5ct. hsi3> 
+i*1. i1y> 
+ji1d. juf1s. ju1d. jo1d. jeh1r. jrev1t. jsim2t. jn1d. j1s. 
+lbaifi6. lbai4y. lba3> lbi3. lib2l> lc1. lufi4y. luf3> lu2. lai3> lau3> la2> ll1. 
+mui3. mu*2. msi3> mm1. 
+nois4j> noix4ct. noi3> nai3> na2> nee0. ne2> nn1. 
+pihs4> pp1. 
+re2> rae0. ra2. ro2> ru2> rr1. rt1> rei3y> 
+sei3y> sis2. si2> ssen4> ss0. suo3> su*2. s*1> s0. 
+tacilp4y. ta2> tnem4> tne3> tna3> tpir2b. tpro2b. tcud1. tpmus2. tpec2iv. tulo2v. tsis0. tsi3> tt1. 
+uqi3. ugo1. 
+vis3j> vie0. vi2> 
+ylb1> yli3y> ylp0. yl2> ygo1. yhp1. ymo1. ypo1. yti3> yte3> ytl2. yrtsi5. yra3> yro3> yfi3. ycn2t> yca3> 
+zi2> zy1s. 
+`
+
+type stemRule struct {
+	text   string
+	suffix []byte
+	intact bool
+	remove int
+	append []byte
+	more   bool
+}
+
+func parseStemRules() map[byte][]*stemRule {
+
+	rules := make(map[byte][]*stemRule)
+	for _, m := range regexp.MustCompile(`(?m)(?:^| )([a-zA-Z]*)(\*?)([0-9])([a-zA-z]*)([.>])`).FindAllStringSubmatch(stemRuleText, -1) {
+
+		suffix := []byte(m[1])
+		for i := 0; i < len(suffix)/2; i++ {
+			j := len(suffix) - 1 - i
+			suffix[i], suffix[j] = suffix[j], suffix[i]
+		}
+
+		remove, _ := strconv.Atoi(m[3])
+		r := &stemRule{
+			text:   m[0],
+			suffix: suffix,
+			intact: m[2] == "*",
+			remove: remove,
+			append: []byte(m[4]),
+			more:   m[5] == ">",
+		}
+		c := suffix[len(suffix)-1]
+		rules[c] = append(rules[c], r)
+	}
+	return rules
+}
+
+var stemRules = parseStemRules()
+
+func firstVowel(offset int, p []byte) int {
+	for i, b := range p {
+		switch b {
+		case 'a', 'e', 'i', 'o', 'u':
+			return offset + i
+		case 'y':
+			if offset+i > 0 {
+				return offset + i
+			}
+		}
+	}
+	return -1
+}
+
+func acceptableStem(a, b []byte) bool {
+	i := firstVowel(0, a)
+	if i < 0 {
+		i = firstVowel(len(a), b)
+	}
+	l := len(a) + len(b)
+	if i == 0 {
+		return l > 1
+	}
+	return i >= 0 && l > 2
+}
+
+func stem(s string) string {
+	stem := bytes.ToLower([]byte(s))
+	intact := true
+	run := acceptableStem(stem, []byte{})
+	for run {
+		run = false
+		for _, rule := range stemRules[stem[len(stem)-1]] {
+			if bytes.HasSuffix(stem, rule.suffix) &&
+				(intact || !rule.intact) &&
+				acceptableStem(stem[:len(stem)-rule.remove], rule.append) {
+				stem = append(stem[:len(stem)-rule.remove], rule.append...)
+				intact = false
+				run = rule.more
+				break
+			}
+		}
+	}
+	return string(stem)
+}
diff --git a/database/stem_test.go b/database/stem_test.go
new file mode 100644
index 0000000..c3bc05f
--- /dev/null
+++ b/database/stem_test.go
@@ -0,0 +1,31 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package database
+
+import (
+	"testing"
+)
+
+var stemTests = []struct {
+	s, expected string
+}{
+	{"html", "html"},
+	{"strings", "string"},
+	{"ballroom", "ballroom"},
+	{"mechanicalization", "mech"},
+	{"pragmaticality", "pragm"},
+	{"rationalistically", "rat"},
+}
+
+func TestStem(t *testing.T) {
+	for _, tt := range stemTests {
+		actual := stem(tt.s)
+		if actual != tt.expected {
+			t.Errorf("stem(%q) = %q, want %q", tt.s, actual, tt.expected)
+		}
+	}
+}
diff --git a/database/stop.go b/database/stop.go
new file mode 100644
index 0000000..3b23e34
--- /dev/null
+++ b/database/stop.go
@@ -0,0 +1,143 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package database
+
+import (
+	"strings"
+)
+
+var stopWord = createStopWordMap()
+
+func createStopWordMap() map[string]bool {
+	m := make(map[string]bool)
+	for _, s := range strings.Fields(stopText) {
+		m[s] = true
+	}
+	return m
+}
+
+const stopText = `
+a
+about
+after
+all
+also
+am
+an
+and
+another
+any
+are
+as
+at
+b
+be
+because
+been
+before
+being
+between
+both
+but
+by
+c
+came
+can
+come
+could
+d
+did
+do
+e
+each
+f
+for
+from
+g
+get
+got
+h
+had
+has
+have
+he
+her
+here
+him
+himself
+his
+how
+i
+if
+implement
+implements
+in
+into
+is
+it
+j
+k
+l
+like
+m
+make
+many
+me
+might
+more
+most
+much
+must
+my
+n
+never
+now
+o
+of
+on
+only
+or
+other
+our
+out
+over
+p
+q
+r
+s
+said
+same
+see
+should
+since
+some
+still
+such
+t
+take
+than
+that
+the
+their
+them
+then
+there
+these
+they
+this
+those
+through
+to
+too
+u
+under
+v
+w
+x
+y
+z
+`
diff --git a/deploy/gddo.conf b/deploy/gddo.conf
new file mode 100644
index 0000000..ee4a80f
--- /dev/null
+++ b/deploy/gddo.conf
@@ -0,0 +1,37 @@
+log_format verbose '$remote_addr\t[$time_local]\t$host\t$request\t$status\t$body_bytes_sent\t$http_referer\t$http_user_agent\t$request_time\t$upstream_response_time';
+
+server {
+    server_name go.pkgdoc.org pkgdoc.org www.pkgdoc.org www.godoc.org;
+    rewrite ^ http://godoc.org$request_uri? permanent;
+}
+
+server {
+    server_name talks.godoc.org;
+    rewrite ^ http://go-talks.appspot.com$request_uri? permanent;
+}
+
+server {
+    listen 80 default_server;
+    listen 443 ssl default_server;
+
+    ssl_certificate      /ssl/godoc_org.crt;
+    ssl_certificate_key  /ssl/godoc_org.key;
+    
+    server_name _ godoc.org api.godoc.org;
+    access_log /var/log/nginx/gddo.log verbose;
+
+    gzip on;
+    gzip_proxied any;
+    gzip_types text/css text/plain text/javascript application/javascript;
+
+    if ($http_user_agent ~ (seek\.io|Wotbox|Exabot|HTTrack|TurnitinBot|Ezooms|PaperLiBot|Sogou) ) {
+        return 444;
+    }
+
+    location / {
+        proxy_pass http://127.0.0.1:8080;
+        proxy_set_header    Host $http_host;                                                                                                                     
+        proxy_set_header    X-Scheme $scheme;
+        proxy_set_header    X-Real-IP $remote_addr;
+    }
+}
diff --git a/deploy/redis.conf b/deploy/redis.conf
new file mode 100644
index 0000000..e2b3ee7
--- /dev/null
+++ b/deploy/redis.conf
@@ -0,0 +1,695 @@
+# Redis configuration file example
+
+# Note on units: when memory size is needed, it is possible to specify
+# it in the usual form of 1k 5GB 4M and so forth:
+#
+# 1k => 1000 bytes
+# 1kb => 1024 bytes
+# 1m => 1000000 bytes
+# 1mb => 1024*1024 bytes
+# 1g => 1000000000 bytes
+# 1gb => 1024*1024*1024 bytes
+#
+# units are case insensitive so 1GB 1Gb 1gB are all the same.
+
+# By default Redis does not run as a daemon. Use 'yes' if you need it.
+# Note that Redis will write a pid file in /var/run/redis.pid when daemonized.
+daemonize no
+
+# When running daemonized, Redis writes a pid file in /var/run/redis.pid by
+# default. You can specify a custom pid file location here.
+pidfile /var/run/redis.pid
+
+# Accept connections on the specified port, default is 6379.
+# If port 0 is specified Redis will not listen on a TCP socket.
+port 6379
+
+# By default Redis listens for connections from all the network interfaces
+# available on the server. It is possible to listen to just one or multiple
+# interfaces using the "bind" configuration directive, followed by one or
+# more IP addresses.
+#
+# Examples:
+#
+# bind 192.168.1.100 10.0.0.1
+# bind 127.0.0.1
+
+# Specify the path for the unix socket that will be used to listen for
+# incoming connections. There is no default, so Redis will not listen
+# on a unix socket when not specified.
+#
+# unixsocket /tmp/redis.sock
+# unixsocketperm 755
+
+# Close the connection after a client is idle for N seconds (0 to disable)
+timeout 0
+
+# TCP keepalive.
+#
+# If non-zero, use SO_KEEPALIVE to send TCP ACKs to clients in absence
+# of communication. This is useful for two reasons:
+#
+# 1) Detect dead peers.
+# 2) Take the connection alive from the point of view of network
+#    equipment in the middle.
+#
+# On Linux, the specified value (in seconds) is the period used to send ACKs.
+# Note that to close the connection the double of the time is needed.
+# On other kernels the period depends on the kernel configuration.
+#
+# A reasonable value for this option is 60 seconds.
+tcp-keepalive 0
+
+# Specify the server verbosity level.
+# This can be one of:
+# debug (a lot of information, useful for development/testing)
+# verbose (many rarely useful info, but not a mess like the debug level)
+# notice (moderately verbose, what you want in production probably)
+# warning (only very important / critical messages are logged)
+loglevel notice
+
+# Specify the log file name. Also the emptry string can be used to force
+# Redis to log on the standard output. Note that if you use standard
+# output for logging but daemonize, logs will be sent to /dev/null
+logfile ""
+
+# To enable logging to the system logger, just set 'syslog-enabled' to yes,
+# and optionally update the other syslog parameters to suit your needs.
+# syslog-enabled no
+
+# Specify the syslog identity.
+# syslog-ident redis
+
+# Specify the syslog facility. Must be USER or between LOCAL0-LOCAL7.
+# syslog-facility local0
+
+# Set the number of databases. The default database is DB 0, you can select
+# a different one on a per-connection basis using SELECT <dbid> where
+# dbid is a number between 0 and 'databases'-1
+databases 16
+
+################################ SNAPSHOTTING  #################################
+#
+# Save the DB on disk:
+#
+#   save <seconds> <changes>
+#
+#   Will save the DB if both the given number of seconds and the given
+#   number of write operations against the DB occurred.
+#
+#   In the example below the behaviour will be to save:
+#   after 900 sec (15 min) if at least 1 key changed
+#   after 300 sec (5 min) if at least 10 keys changed
+#   after 60 sec if at least 10000 keys changed
+#
+#   Note: you can disable saving at all commenting all the "save" lines.
+#
+#   It is also possible to remove all the previously configured save
+#   points by adding a save directive with a single empty string argument
+#   like in the following example:
+#
+#   save ""
+
+save 900 1
+save 300 10
+save 60 10000
+
+# By default Redis will stop accepting writes if RDB snapshots are enabled
+# (at least one save point) and the latest background save failed.
+# This will make the user aware (in an hard way) that data is not persisting
+# on disk properly, otherwise chances are that no one will notice and some
+# distater will happen.
+#
+# If the background saving process will start working again Redis will
+# automatically allow writes again.
+#
+# However if you have setup your proper monitoring of the Redis server
+# and persistence, you may want to disable this feature so that Redis will
+# continue to work as usually even if there are problems with disk,
+# permissions, and so forth.
+stop-writes-on-bgsave-error yes
+
+# Compress string objects using LZF when dump .rdb databases?
+# For default that's set to 'yes' as it's almost always a win.
+# If you want to save some CPU in the saving child set it to 'no' but
+# the dataset will likely be bigger if you have compressible values or keys.
+rdbcompression yes
+
+# Since version 5 of RDB a CRC64 checksum is placed at the end of the file.
+# This makes the format more resistant to corruption but there is a performance
+# hit to pay (around 10%) when saving and loading RDB files, so you can disable it
+# for maximum performances.
+#
+# RDB files created with checksum disabled have a checksum of zero that will
+# tell the loading code to skip the check.
+rdbchecksum yes
+
+# The filename where to dump the DB
+dbfilename dump.rdb
+
+# The working directory.
+#
+# The DB will be written inside this directory, with the filename specified
+# above using the 'dbfilename' configuration directive.
+# 
+# The Append Only File will also be created inside this directory.
+# 
+# Note that you must specify a directory here, not a file name.
+dir /data
+
+################################# REPLICATION #################################
+
+# Master-Slave replication. Use slaveof to make a Redis instance a copy of
+# another Redis server. Note that the configuration is local to the slave
+# so for example it is possible to configure the slave to save the DB with a
+# different interval, or to listen to another port, and so on.
+#
+# slaveof <masterip> <masterport>
+
+# If the master is password protected (using the "requirepass" configuration
+# directive below) it is possible to tell the slave to authenticate before
+# starting the replication synchronization process, otherwise the master will
+# refuse the slave request.
+#
+# masterauth <master-password>
+
+# When a slave loses its connection with the master, or when the replication
+# is still in progress, the slave can act in two different ways:
+#
+# 1) if slave-serve-stale-data is set to 'yes' (the default) the slave will
+#    still reply to client requests, possibly with out of date data, or the
+#    data set may just be empty if this is the first synchronization.
+#
+# 2) if slave-serve-stale-data is set to 'no' the slave will reply with
+#    an error "SYNC with master in progress" to all the kind of commands
+#    but to INFO and SLAVEOF.
+#
+slave-serve-stale-data yes
+
+# You can configure a slave instance to accept writes or not. Writing against
+# a slave instance may be useful to store some ephemeral data (because data
+# written on a slave will be easily deleted after resync with the master) but
+# may also cause problems if clients are writing to it because of a
+# misconfiguration.
+#
+# Since Redis 2.6 by default slaves are read-only.
+#
+# Note: read only slaves are not designed to be exposed to untrusted clients
+# on the internet. It's just a protection layer against misuse of the instance.
+# Still a read only slave exports by default all the administrative commands
+# such as CONFIG, DEBUG, and so forth. To a limited extend you can improve
+# security of read only slaves using 'rename-command' to shadow all the
+# administrative / dangerous commands.
+slave-read-only yes
+
+# Slaves send PINGs to server in a predefined interval. It's possible to change
+# this interval with the repl_ping_slave_period option. The default value is 10
+# seconds.
+#
+# repl-ping-slave-period 10
+
+# The following option sets the replication timeout for:
+#
+# 1) Bulk transfer I/O during SYNC, from the point of view of slave.
+# 2) Master timeout from the point of view of slaves (data, pings).
+# 3) Slave timeout from the point of view of masters (REPLCONF ACK pings).
+#
+# It is important to make sure that this value is greater than the value
+# specified for repl-ping-slave-period otherwise a timeout will be detected
+# every time there is low traffic between the master and the slave.
+#
+# repl-timeout 60
+
+# Disable TCP_NODELAY on the slave socket after SYNC?
+#
+# If you select "yes" Redis will use a smaller number of TCP packets and
+# less bandwidth to send data to slaves. But this can add a delay for
+# the data to appear on the slave side, up to 40 milliseconds with
+# Linux kernels using a default configuration.
+#
+# If you select "no" the delay for data to appear on the slave side will
+# be reduced but more bandwidth will be used for replication.
+#
+# By default we optimize for low latency, but in very high traffic conditions
+# or when the master and slaves are many hops away, turning this to "yes" may
+# be a good idea.
+repl-disable-tcp-nodelay no
+
+# Set the replication backlog size. The backlog is a buffer that accumulates
+# slave data when slaves are disconnected for some time, so that when a slave
+# wants to reconnect again, often a full resync is not needed, but a partial
+# resync is enough, just passing the portion of data the slave missed while
+# disconnected.
+#
+# The biggest the replication backlog, the longer the time the slave can be
+# disconnected and later be able to perform a partial resynchronization.
+#
+# The backlog is only allocated once there is at least a slave connected.
+#
+# repl-backlog-size 1mb
+
+# After a master has no longer connected slaves for some time, the backlog
+# will be freed. The following option configures the amount of seconds that
+# need to elapse, starting from the time the last slave disconnected, for
+# the backlog buffer to be freed.
+#
+# A value of 0 means to never release the backlog.
+#
+# repl-backlog-ttl 3600
+
+# The slave priority is an integer number published by Redis in the INFO output.
+# It is used by Redis Sentinel in order to select a slave to promote into a
+# master if the master is no longer working correctly.
+#
+# A slave with a low priority number is considered better for promotion, so
+# for instance if there are three slaves with priority 10, 100, 25 Sentinel will
+# pick the one wtih priority 10, that is the lowest.
+#
+# However a special priority of 0 marks the slave as not able to perform the
+# role of master, so a slave with priority of 0 will never be selected by
+# Redis Sentinel for promotion.
+#
+# By default the priority is 100.
+slave-priority 100
+
+# It is possible for a master to stop accepting writes if there are less than
+# N slaves connected, having a lag less or equal than M seconds.
+#
+# The N slaves need to be in "online" state.
+#
+# The lag in seconds, that must be <= the specified value, is calculated from
+# the last ping received from the slave, that is usually sent every second.
+#
+# This option does not GUARANTEES that N replicas will accept the write, but
+# will limit the window of exposure for lost writes in case not enough slaves
+# are available, to the specified number of seconds.
+#
+# For example to require at least 3 slaves with a lag <= 10 seconds use:
+#
+# min-slaves-to-write 3
+# min-slaves-max-lag 10
+#
+# Setting one or the other to 0 disables the feature.
+#
+# By default min-slaves-to-write is set to 0 (feature disabled) and
+# min-slaves-max-lag is set to 10.
+
+################################## SECURITY ###################################
+
+# Require clients to issue AUTH <PASSWORD> before processing any other
+# commands.  This might be useful in environments in which you do not trust
+# others with access to the host running redis-server.
+#
+# This should stay commented out for backward compatibility and because most
+# people do not need auth (e.g. they run their own servers).
+# 
+# Warning: since Redis is pretty fast an outside user can try up to
+# 150k passwords per second against a good box. This means that you should
+# use a very strong password otherwise it will be very easy to break.
+#
+# requirepass foobared
+
+# Command renaming.
+#
+# It is possible to change the name of dangerous commands in a shared
+# environment. For instance the CONFIG command may be renamed into something
+# hard to guess so that it will still be available for internal-use tools
+# but not available for general clients.
+#
+# Example:
+#
+# rename-command CONFIG b840fc02d524045429941cc15f59e41cb7be6c52
+#
+# It is also possible to completely kill a command by renaming it into
+# an empty string:
+#
+# rename-command CONFIG ""
+#
+# Please note that changing the name of commands that are logged into the
+# AOF file or transmitted to slaves may cause problems.
+
+################################### LIMITS ####################################
+
+# Set the max number of connected clients at the same time. By default
+# this limit is set to 10000 clients, however if the Redis server is not
+# able to configure the process file limit to allow for the specified limit
+# the max number of allowed clients is set to the current file limit
+# minus 32 (as Redis reserves a few file descriptors for internal uses).
+#
+# Once the limit is reached Redis will close all the new connections sending
+# an error 'max number of clients reached'.
+#
+# maxclients 10000
+
+# Don't use more memory than the specified amount of bytes.
+# When the memory limit is reached Redis will try to remove keys
+# accordingly to the eviction policy selected (see maxmemmory-policy).
+#
+# If Redis can't remove keys according to the policy, or if the policy is
+# set to 'noeviction', Redis will start to reply with errors to commands
+# that would use more memory, like SET, LPUSH, and so on, and will continue
+# to reply to read-only commands like GET.
+#
+# This option is usually useful when using Redis as an LRU cache, or to set
+# an hard memory limit for an instance (using the 'noeviction' policy).
+#
+# WARNING: If you have slaves attached to an instance with maxmemory on,
+# the size of the output buffers needed to feed the slaves are subtracted
+# from the used memory count, so that network problems / resyncs will
+# not trigger a loop where keys are evicted, and in turn the output
+# buffer of slaves is full with DELs of keys evicted triggering the deletion
+# of more keys, and so forth until the database is completely emptied.
+#
+# In short... if you have slaves attached it is suggested that you set a lower
+# limit for maxmemory so that there is some free RAM on the system for slave
+# output buffers (but this is not needed if the policy is 'noeviction').
+#
+# maxmemory <bytes>
+
+# MAXMEMORY POLICY: how Redis will select what to remove when maxmemory
+# is reached. You can select among five behaviors:
+# 
+# volatile-lru -> remove the key with an expire set using an LRU algorithm
+# allkeys-lru -> remove any key accordingly to the LRU algorithm
+# volatile-random -> remove a random key with an expire set
+# allkeys-random -> remove a random key, any key
+# volatile-ttl -> remove the key with the nearest expire time (minor TTL)
+# noeviction -> don't expire at all, just return an error on write operations
+# 
+# Note: with any of the above policies, Redis will return an error on write
+#       operations, when there are not suitable keys for eviction.
+#
+#       At the date of writing this commands are: set setnx setex append
+#       incr decr rpush lpush rpushx lpushx linsert lset rpoplpush sadd
+#       sinter sinterstore sunion sunionstore sdiff sdiffstore zadd zincrby
+#       zunionstore zinterstore hset hsetnx hmset hincrby incrby decrby
+#       getset mset msetnx exec sort
+#
+# The default is:
+#
+# maxmemory-policy volatile-lru
+
+# LRU and minimal TTL algorithms are not precise algorithms but approximated
+# algorithms (in order to save memory), so you can select as well the sample
+# size to check. For instance for default Redis will check three keys and
+# pick the one that was used less recently, you can change the sample size
+# using the following configuration directive.
+#
+# maxmemory-samples 3
+
+############################## APPEND ONLY MODE ###############################
+
+# By default Redis asynchronously dumps the dataset on disk. This mode is
+# good enough in many applications, but an issue with the Redis process or
+# a power outage may result into a few minutes of writes lost (depending on
+# the configured save points).
+#
+# The Append Only File is an alternative persistence mode that provides
+# much better durability. For instance using the default data fsync policy
+# (see later in the config file) Redis can lose just one second of writes in a
+# dramatic event like a server power outage, or a single write if something
+# wrong with the Redis process itself happens, but the operating system is
+# still running correctly.
+#
+# AOF and RDB persistence can be enabled at the same time without problems.
+# If the AOF is enabled on startup Redis will load the AOF, that is the file
+# with the better durability guarantees.
+#
+# Please check http://redis.io/topics/persistence for more information.
+
+appendonly no
+
+# The name of the append only file (default: "appendonly.aof")
+# appendfilename appendonly.aof
+
+# The fsync() call tells the Operating System to actually write data on disk
+# instead to wait for more data in the output buffer. Some OS will really flush 
+# data on disk, some other OS will just try to do it ASAP.
+#
+# Redis supports three different modes:
+#
+# no: don't fsync, just let the OS flush the data when it wants. Faster.
+# always: fsync after every write to the append only log . Slow, Safest.
+# everysec: fsync only one time every second. Compromise.
+#
+# The default is "everysec", as that's usually the right compromise between
+# speed and data safety. It's up to you to understand if you can relax this to
+# "no" that will let the operating system flush the output buffer when
+# it wants, for better performances (but if you can live with the idea of
+# some data loss consider the default persistence mode that's snapshotting),
+# or on the contrary, use "always" that's very slow but a bit safer than
+# everysec.
+#
+# More details please check the following article:
+# http://antirez.com/post/redis-persistence-demystified.html
+#
+# If unsure, use "everysec".
+
+# appendfsync always
+appendfsync everysec
+# appendfsync no
+
+# When the AOF fsync policy is set to always or everysec, and a background
+# saving process (a background save or AOF log background rewriting) is
+# performing a lot of I/O against the disk, in some Linux configurations
+# Redis may block too long on the fsync() call. Note that there is no fix for
+# this currently, as even performing fsync in a different thread will block
+# our synchronous write(2) call.
+#
+# In order to mitigate this problem it's possible to use the following option
+# that will prevent fsync() from being called in the main process while a
+# BGSAVE or BGREWRITEAOF is in progress.
+#
+# This means that while another child is saving, the durability of Redis is
+# the same as "appendfsync none". In practical terms, this means that it is
+# possible to lose up to 30 seconds of log in the worst scenario (with the
+# default Linux settings).
+# 
+# If you have latency problems turn this to "yes". Otherwise leave it as
+# "no" that is the safest pick from the point of view of durability.
+no-appendfsync-on-rewrite no
+
+# Automatic rewrite of the append only file.
+# Redis is able to automatically rewrite the log file implicitly calling
+# BGREWRITEAOF when the AOF log size grows by the specified percentage.
+# 
+# This is how it works: Redis remembers the size of the AOF file after the
+# latest rewrite (if no rewrite has happened since the restart, the size of
+# the AOF at startup is used).
+#
+# This base size is compared to the current size. If the current size is
+# bigger than the specified percentage, the rewrite is triggered. Also
+# you need to specify a minimal size for the AOF file to be rewritten, this
+# is useful to avoid rewriting the AOF file even if the percentage increase
+# is reached but it is still pretty small.
+#
+# Specify a percentage of zero in order to disable the automatic AOF
+# rewrite feature.
+
+auto-aof-rewrite-percentage 100
+auto-aof-rewrite-min-size 64mb
+
+################################ LUA SCRIPTING  ###############################
+
+# Max execution time of a Lua script in milliseconds.
+#
+# If the maximum execution time is reached Redis will log that a script is
+# still in execution after the maximum allowed time and will start to
+# reply to queries with an error.
+#
+# When a long running script exceed the maximum execution time only the
+# SCRIPT KILL and SHUTDOWN NOSAVE commands are available. The first can be
+# used to stop a script that did not yet called write commands. The second
+# is the only way to shut down the server in the case a write commands was
+# already issue by the script but the user don't want to wait for the natural
+# termination of the script.
+#
+# Set it to 0 or a negative value for unlimited execution without warnings.
+lua-time-limit 5000
+
+################################## SLOW LOG ###################################
+
+# The Redis Slow Log is a system to log queries that exceeded a specified
+# execution time. The execution time does not include the I/O operations
+# like talking with the client, sending the reply and so forth,
+# but just the time needed to actually execute the command (this is the only
+# stage of command execution where the thread is blocked and can not serve
+# other requests in the meantime).
+# 
+# You can configure the slow log with two parameters: one tells Redis
+# what is the execution time, in microseconds, to exceed in order for the
+# command to get logged, and the other parameter is the length of the
+# slow log. When a new command is logged the oldest one is removed from the
+# queue of logged commands.
+
+# The following time is expressed in microseconds, so 1000000 is equivalent
+# to one second. Note that a negative number disables the slow log, while
+# a value of zero forces the logging of every command.
+slowlog-log-slower-than 10000
+
+# There is no limit to this length. Just be aware that it will consume memory.
+# You can reclaim memory used by the slow log with SLOWLOG RESET.
+slowlog-max-len 128
+
+############################# Event notification ##############################
+
+# Redis can notify Pub/Sub clients about events happening in the key space.
+# This feature is documented at http://redis.io/topics/keyspace-events
+# 
+# For instance if keyspace events notification is enabled, and a client
+# performs a DEL operation on key "foo" stored in the Database 0, two
+# messages will be published via Pub/Sub:
+#
+# PUBLISH __keyspace@0__:foo del
+# PUBLISH __keyevent@0__:del foo
+#
+# It is possible to select the events that Redis will notify among a set
+# of classes. Every class is identified by a single character:
+#
+#  K     Keyspace events, published with __keyspace@<db>__ prefix.
+#  E     Keyevent events, published with __keyevent@<db>__ prefix.
+#  g     Generic commands (non-type specific) like DEL, EXPIRE, RENAME, ...
+#  $     String commands
+#  l     List commands
+#  s     Set commands
+#  h     Hash commands
+#  z     Sorted set commands
+#  x     Expired events (events generated every time a key expires)
+#  e     Evicted events (events generated when a key is evicted for maxmemory)
+#  A     Alias for g$lshzxe, so that the "AKE" string means all the events.
+#
+#  The "notify-keyspace-events" takes as argument a string that is composed
+#  by zero or multiple characters. The empty string means that notifications
+#  are disabled at all.
+#
+#  Example: to enable list and generic events, from the point of view of the
+#           event name, use:
+#
+#  notify-keyspace-events Elg
+#
+#  Example 2: to get the stream of the expired keys subscribing to channel
+#             name __keyevent@0__:expired use:
+#
+#  notify-keyspace-events Ex
+#
+#  By default all notifications are disabled because most users don't need
+#  this feature and the feature has some overhead. Note that if you don't
+#  specify at least one of K or E, no events will be delivered.
+notify-keyspace-events ""
+
+############################### ADVANCED CONFIG ###############################
+
+# Hashes are encoded using a memory efficient data structure when they have a
+# small number of entries, and the biggest entry does not exceed a given
+# threshold. These thresholds can be configured using the following directives.
+hash-max-ziplist-entries 512
+hash-max-ziplist-value 64
+
+# Similarly to hashes, small lists are also encoded in a special way in order
+# to save a lot of space. The special representation is only used when
+# you are under the following limits:
+list-max-ziplist-entries 512
+list-max-ziplist-value 64
+
+# Sets have a special encoding in just one case: when a set is composed
+# of just strings that happens to be integers in radix 10 in the range
+# of 64 bit signed integers.
+# The following configuration setting sets the limit in the size of the
+# set in order to use this special memory saving encoding.
+set-max-intset-entries 512
+
+# Similarly to hashes and lists, sorted sets are also specially encoded in
+# order to save a lot of space. This encoding is only used when the length and
+# elements of a sorted set are below the following limits:
+zset-max-ziplist-entries 128
+zset-max-ziplist-value 64
+
+# Active rehashing uses 1 millisecond every 100 milliseconds of CPU time in
+# order to help rehashing the main Redis hash table (the one mapping top-level
+# keys to values). The hash table implementation Redis uses (see dict.c)
+# performs a lazy rehashing: the more operation you run into an hash table
+# that is rehashing, the more rehashing "steps" are performed, so if the
+# server is idle the rehashing is never complete and some more memory is used
+# by the hash table.
+# 
+# The default is to use this millisecond 10 times every second in order to
+# active rehashing the main dictionaries, freeing memory when possible.
+#
+# If unsure:
+# use "activerehashing no" if you have hard latency requirements and it is
+# not a good thing in your environment that Redis can reply form time to time
+# to queries with 2 milliseconds delay.
+#
+# use "activerehashing yes" if you don't have such hard requirements but
+# want to free memory asap when possible.
+activerehashing yes
+
+# The client output buffer limits can be used to force disconnection of clients
+# that are not reading data from the server fast enough for some reason (a
+# common reason is that a Pub/Sub client can't consume messages as fast as the
+# publisher can produce them).
+#
+# The limit can be set differently for the three different classes of clients:
+#
+# normal -> normal clients
+# slave  -> slave clients and MONITOR clients
+# pubsub -> clients subcribed to at least one pubsub channel or pattern
+#
+# The syntax of every client-output-buffer-limit directive is the following:
+#
+# client-output-buffer-limit <class> <hard limit> <soft limit> <soft seconds>
+#
+# A client is immediately disconnected once the hard limit is reached, or if
+# the soft limit is reached and remains reached for the specified number of
+# seconds (continuously).
+# So for instance if the hard limit is 32 megabytes and the soft limit is
+# 16 megabytes / 10 seconds, the client will get disconnected immediately
+# if the size of the output buffers reach 32 megabytes, but will also get
+# disconnected if the client reaches 16 megabytes and continuously overcomes
+# the limit for 10 seconds.
+#
+# By default normal clients are not limited because they don't receive data
+# without asking (in a push way), but just after a request, so only
+# asynchronous clients may create a scenario where data is requested faster
+# than it can read.
+#
+# Instead there is a default limit for pubsub and slave clients, since
+# subscribers and slaves receive data in a push fashion.
+#
+# Both the hard or the soft limit can be disabled by setting them to zero.
+client-output-buffer-limit normal 0 0 0
+client-output-buffer-limit slave 256mb 64mb 60
+client-output-buffer-limit pubsub 32mb 8mb 60
+
+# Redis calls an internal function to perform many background tasks, like
+# closing connections of clients in timeot, purging expired keys that are
+# never requested, and so forth.
+#
+# Not all tasks are performed with the same frequency, but Redis checks for
+# tasks to perform accordingly to the specified "hz" value.
+#
+# By default "hz" is set to 10. Raising the value will use more CPU when
+# Redis is idle, but at the same time will make Redis more responsive when
+# there are many keys expiring at the same time, and timeouts may be
+# handled with more precision.
+#
+# The range is between 1 and 500, however a value over 100 is usually not
+# a good idea. Most users should use the default of 10 and raise this up to
+# 100 only in environments where very low latency is required.
+hz 10
+
+# When a child rewrites the AOF file, if the following option is enabled
+# the file will be fsync-ed every 32 MB of data generated. This is useful
+# in order to commit the file to the disk more incrementally and avoid
+# big latency spikes.
+aof-rewrite-incremental-fsync yes
+
+################################## INCLUDES ###################################
+
+# Include one or more other config files here.  This is useful if you
+# have a standard template that goes to all Redis server but also need
+# to customize a few per-server settings.  Include files can include
+# other files, so use this wisely.
+#
+# include /path/to/local.conf
+# include /path/to/other.conf
diff --git a/deploy/services/gddo-server/run b/deploy/services/gddo-server/run
new file mode 100755
index 0000000..efd74b0
--- /dev/null
+++ b/deploy/services/gddo-server/run
@@ -0,0 +1,2 @@
+#!/bin/sh
+exec /go/bin/gddo-server 2>&1
diff --git a/deploy/services/nginx/run b/deploy/services/nginx/run
new file mode 100755
index 0000000..7c6c6af
--- /dev/null
+++ b/deploy/services/nginx/run
@@ -0,0 +1,2 @@
+#!/bin/sh
+exec /usr/sbin/nginx 2>&1
diff --git a/deploy/services/redis/run b/deploy/services/redis/run
new file mode 100755
index 0000000..255fc64
--- /dev/null
+++ b/deploy/services/redis/run
@@ -0,0 +1,2 @@
+#!/bin/sh
+exec /usr/bin/redis-server 2>&1
diff --git a/doc/builder.go b/doc/builder.go
new file mode 100644
index 0000000..d0e658b
--- /dev/null
+++ b/doc/builder.go
@@ -0,0 +1,628 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package doc
+
+import (
+	"bytes"
+	"errors"
+	"go/ast"
+	"go/build"
+	"go/doc"
+	"go/format"
+	"go/parser"
+	"go/token"
+	"regexp"
+	"sort"
+	"strings"
+	"time"
+	"unicode"
+	"unicode/utf8"
+
+	"github.com/golang/gddo/gosrc"
+)
+
+func startsWithUppercase(s string) bool {
+	r, _ := utf8.DecodeRuneInString(s)
+	return unicode.IsUpper(r)
+}
+
+var badSynopsisPrefixes = []string{
+	"Autogenerated by Thrift Compiler",
+	"Automatically generated ",
+	"Auto-generated by ",
+	"Copyright ",
+	"COPYRIGHT ",
+	`THE SOFTWARE IS PROVIDED "AS IS"`,
+	"TODO: ",
+	"vim:",
+}
+
+// synopsis extracts the first sentence from s. All runs of whitespace are
+// replaced by a single space.
+func synopsis(s string) string {
+
+	parts := strings.SplitN(s, "\n\n", 2)
+	s = parts[0]
+
+	var buf []byte
+	const (
+		other = iota
+		period
+		space
+	)
+	last := space
+Loop:
+	for i := 0; i < len(s); i++ {
+		b := s[i]
+		switch b {
+		case ' ', '\t', '\r', '\n':
+			switch last {
+			case period:
+				break Loop
+			case other:
+				buf = append(buf, ' ')
+				last = space
+			}
+		case '.':
+			last = period
+			buf = append(buf, b)
+		default:
+			last = other
+			buf = append(buf, b)
+		}
+	}
+
+	// Ensure that synopsis fits an App Engine datastore text property.
+	const m = 400
+	if len(buf) > m {
+		buf = buf[:m]
+		if i := bytes.LastIndex(buf, []byte{' '}); i >= 0 {
+			buf = buf[:i]
+		}
+		buf = append(buf, " ..."...)
+	}
+
+	s = string(buf)
+
+	r, n := utf8.DecodeRuneInString(s)
+	if n < 0 || unicode.IsPunct(r) || unicode.IsSymbol(r) {
+		// ignore Markdown headings, editor settings, Go build constraints, and * in poorly formatted block comments.
+		s = ""
+	} else {
+		for _, prefix := range badSynopsisPrefixes {
+			if strings.HasPrefix(s, prefix) {
+				s = ""
+				break
+			}
+		}
+	}
+
+	return s
+}
+
+var referencesPats = []*regexp.Regexp{
+	regexp.MustCompile(`"([-a-zA-Z0-9~+_./]+)"`), // quoted path
+	regexp.MustCompile(`https://drone\.io/([-a-zA-Z0-9~+_./]+)/status\.png`),
+	regexp.MustCompile(`\b(?:` + strings.Join([]string{
+		`go\s+get\s+`,
+		`goinstall\s+`,
+		regexp.QuoteMeta("http://godoc.org/"),
+		regexp.QuoteMeta("http://gopkgdoc.appspot.com/pkg/"),
+		regexp.QuoteMeta("http://go.pkgdoc.org/"),
+		regexp.QuoteMeta("http://gowalker.org/"),
+	}, "|") + `)([-a-zA-Z0-9~+_./]+)`),
+}
+
+// addReferences adds packages referenced in plain text s.
+func addReferences(references map[string]bool, s []byte) {
+	for _, pat := range referencesPats {
+		for _, m := range pat.FindAllSubmatch(s, -1) {
+			p := string(m[1])
+			if gosrc.IsValidRemotePath(p) {
+				references[p] = true
+			}
+		}
+	}
+}
+
+type byFuncName []*doc.Func
+
+func (s byFuncName) Len() int           { return len(s) }
+func (s byFuncName) Swap(i, j int)      { s[i], s[j] = s[j], s[i] }
+func (s byFuncName) Less(i, j int) bool { return s[i].Name < s[j].Name }
+
+func removeAssociations(dpkg *doc.Package) {
+	for _, t := range dpkg.Types {
+		dpkg.Funcs = append(dpkg.Funcs, t.Funcs...)
+		t.Funcs = nil
+	}
+	sort.Sort(byFuncName(dpkg.Funcs))
+}
+
+// builder holds the state used when building the documentation.
+type builder struct {
+	srcs     map[string]*source
+	fset     *token.FileSet
+	examples []*doc.Example
+	buf      []byte // scratch space for printNode method.
+}
+
+type Value struct {
+	Decl Code
+	Pos  Pos
+	Doc  string
+}
+
+func (b *builder) values(vdocs []*doc.Value) []*Value {
+	var result []*Value
+	for _, d := range vdocs {
+		result = append(result, &Value{
+			Decl: b.printDecl(d.Decl),
+			Pos:  b.position(d.Decl),
+			Doc:  d.Doc,
+		})
+	}
+	return result
+}
+
+type Note struct {
+	Pos  Pos
+	UID  string
+	Body string
+}
+
+type posNode token.Pos
+
+func (p posNode) Pos() token.Pos { return token.Pos(p) }
+func (p posNode) End() token.Pos { return token.Pos(p) }
+
+func (b *builder) notes(gnotes map[string][]*doc.Note) map[string][]*Note {
+	if len(gnotes) == 0 {
+		return nil
+	}
+	notes := make(map[string][]*Note)
+	for tag, gvalues := range gnotes {
+		values := make([]*Note, len(gvalues))
+		for i := range gvalues {
+			values[i] = &Note{
+				Pos:  b.position(posNode(gvalues[i].Pos)),
+				UID:  gvalues[i].UID,
+				Body: strings.TrimSpace(gvalues[i].Body),
+			}
+		}
+		notes[tag] = values
+	}
+	return notes
+}
+
+type Example struct {
+	Name   string
+	Doc    string
+	Code   Code
+	Play   string
+	Output string
+}
+
+var exampleOutputRx = regexp.MustCompile(`(?i)//[[:space:]]*output:`)
+
+func (b *builder) getExamples(name string) []*Example {
+	var docs []*Example
+	for _, e := range b.examples {
+		if !strings.HasPrefix(e.Name, name) {
+			continue
+		}
+		n := e.Name[len(name):]
+		if n != "" {
+			if i := strings.LastIndex(n, "_"); i != 0 {
+				continue
+			}
+			n = n[1:]
+			if startsWithUppercase(n) {
+				continue
+			}
+			n = strings.Title(n)
+		}
+
+		code, output := b.printExample(e)
+
+		play := ""
+		if e.Play != nil {
+			b.buf = b.buf[:0]
+			if err := format.Node(sliceWriter{&b.buf}, b.fset, e.Play); err != nil {
+				play = err.Error()
+			} else {
+				play = string(b.buf)
+			}
+		}
+
+		docs = append(docs, &Example{
+			Name:   n,
+			Doc:    e.Doc,
+			Code:   code,
+			Output: output,
+			Play:   play})
+	}
+	return docs
+}
+
+type Func struct {
+	Decl     Code
+	Pos      Pos
+	Doc      string
+	Name     string
+	Recv     string
+	Examples []*Example
+}
+
+func (b *builder) funcs(fdocs []*doc.Func) []*Func {
+	var result []*Func
+	for _, d := range fdocs {
+		var exampleName string
+		switch {
+		case d.Recv == "":
+			exampleName = d.Name
+		case d.Recv[0] == '*':
+			exampleName = d.Recv[1:] + "_" + d.Name
+		default:
+			exampleName = d.Recv + "_" + d.Name
+		}
+		result = append(result, &Func{
+			Decl:     b.printDecl(d.Decl),
+			Pos:      b.position(d.Decl),
+			Doc:      d.Doc,
+			Name:     d.Name,
+			Recv:     d.Recv,
+			Examples: b.getExamples(exampleName),
+		})
+	}
+	return result
+}
+
+type Type struct {
+	Doc      string
+	Name     string
+	Decl     Code
+	Pos      Pos
+	Consts   []*Value
+	Vars     []*Value
+	Funcs    []*Func
+	Methods  []*Func
+	Examples []*Example
+}
+
+func (b *builder) types(tdocs []*doc.Type) []*Type {
+	var result []*Type
+	for _, d := range tdocs {
+		result = append(result, &Type{
+			Doc:      d.Doc,
+			Name:     d.Name,
+			Decl:     b.printDecl(d.Decl),
+			Pos:      b.position(d.Decl),
+			Consts:   b.values(d.Consts),
+			Vars:     b.values(d.Vars),
+			Funcs:    b.funcs(d.Funcs),
+			Methods:  b.funcs(d.Methods),
+			Examples: b.getExamples(d.Name),
+		})
+	}
+	return result
+}
+
+var packageNamePats = []*regexp.Regexp{
+	// Last element with .suffix removed.
+	regexp.MustCompile(`/([^-./]+)[-.](?:git|svn|hg|bzr|v\d+)$`),
+
+	// Last element with "go" prefix or suffix removed.
+	regexp.MustCompile(`/([^-./]+)[-.]go$`),
+	regexp.MustCompile(`/go[-.]([^-./]+)$`),
+
+	// Special cases for popular repos.
+	regexp.MustCompile(`^code\.google\.com/p/google-api-go-client/([^/]+)/v[^/]+$`),
+	regexp.MustCompile(`^code\.google\.com/p/biogo\.([^/]+)$`),
+
+	// It's also common for the last element of the path to contain an
+	// extra "go" prefix, but not always. TODO: examine unresolved ids to
+	// detect when trimming the "go" prefix is appropriate.
+
+	// Last component of path.
+	regexp.MustCompile(`([^/]+)$`),
+}
+
+func simpleImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) {
+	pkg := imports[path]
+	if pkg != nil {
+		return pkg, nil
+	}
+
+	// Guess the package name without importing it.
+	for _, pat := range packageNamePats {
+		m := pat.FindStringSubmatch(path)
+		if m != nil {
+			pkg = ast.NewObj(ast.Pkg, m[1])
+			pkg.Data = ast.NewScope(nil)
+			imports[path] = pkg
+			return pkg, nil
+		}
+	}
+
+	return nil, errors.New("package not found")
+}
+
+type File struct {
+	Name string
+	URL  string
+}
+
+type Pos struct {
+	Line int32  // 0 if not valid.
+	N    uint16 // number of lines - 1
+	File int16  // index in Package.Files
+}
+
+type source struct {
+	name      string
+	browseURL string
+	data      []byte
+	index     int
+}
+
+// PackageVersion is modified when previously stored packages are invalid.
+const PackageVersion = "6"
+
+type Package struct {
+	// The import path for this package.
+	ImportPath string
+
+	// Import path prefix for all packages in the project.
+	ProjectRoot string
+
+	// Name of the project.
+	ProjectName string
+
+	// Project home page.
+	ProjectURL string
+
+	// Errors found when fetching or parsing this package.
+	Errors []string
+
+	// Packages referenced in README files.
+	References []string
+
+	// Version control system: git, hg, bzr, ...
+	VCS string
+
+	// Version control: belongs to a dead end fork
+	DeadEndFork bool
+
+	// The time this object was created.
+	Updated time.Time
+
+	// Cache validation tag. This tag is not necessarily an HTTP entity tag.
+	// The tag is "" if there is no meaningful cache validation for the VCS.
+	Etag string
+
+	// Subdirectories, possibly containing Go code.
+	Subdirectories []string
+
+	// Package name or "" if no package for this import path. The proceeding
+	// fields are set even if a package is not found for the import path.
+	Name string
+
+	// Synopsis and full documentation for the package.
+	Synopsis string
+	Doc      string
+
+	// Format this package as a command.
+	IsCmd bool
+
+	// True if package documentation is incomplete.
+	Truncated bool
+
+	// Environment
+	GOOS, GOARCH string
+
+	// Top-level declarations.
+	Consts []*Value
+	Funcs  []*Func
+	Types  []*Type
+	Vars   []*Value
+
+	// Package examples
+	Examples []*Example
+
+	Notes map[string][]*Note
+
+	// Source.
+	LineFmt   string
+	BrowseURL string
+	Files     []*File
+	TestFiles []*File
+
+	// Source size in bytes.
+	SourceSize     int
+	TestSourceSize int
+
+	// Imports
+	Imports      []string
+	TestImports  []string
+	XTestImports []string
+}
+
+var goEnvs = []struct{ GOOS, GOARCH string }{
+	{"linux", "amd64"},
+	{"darwin", "amd64"},
+	{"windows", "amd64"},
+}
+
+// SetDefaultGOOS sets given GOOS value as default one to use when building
+// package documents.
+func SetDefaultGOOS(goos string) {
+	if goos == "" {
+		return
+	}
+	var i int
+	for ; i < len(goEnvs); i++ {
+		if goEnvs[i].GOOS == goos {
+			break
+		}
+	}
+	switch i {
+	case 0:
+		return
+	case len(goEnvs):
+		env := goEnvs[0]
+		env.GOOS = goos
+		goEnvs = append(goEnvs, env)
+	}
+	goEnvs[0], goEnvs[i] = goEnvs[i], goEnvs[0]
+}
+
+func newPackage(dir *gosrc.Directory) (*Package, error) {
+
+	pkg := &Package{
+		Updated:        time.Now().UTC(),
+		LineFmt:        dir.LineFmt,
+		ImportPath:     dir.ImportPath,
+		ProjectRoot:    dir.ProjectRoot,
+		ProjectName:    dir.ProjectName,
+		ProjectURL:     dir.ProjectURL,
+		BrowseURL:      dir.BrowseURL,
+		Etag:           PackageVersion + "-" + dir.Etag,
+		VCS:            dir.VCS,
+		DeadEndFork:    dir.DeadEndFork,
+		Subdirectories: dir.Subdirectories,
+	}
+
+	var b builder
+	b.srcs = make(map[string]*source)
+	references := make(map[string]bool)
+	for _, file := range dir.Files {
+		if strings.HasSuffix(file.Name, ".go") {
+			gosrc.OverwriteLineComments(file.Data)
+			b.srcs[file.Name] = &source{name: file.Name, browseURL: file.BrowseURL, data: file.Data}
+		} else {
+			addReferences(references, file.Data)
+		}
+	}
+
+	for r := range references {
+		pkg.References = append(pkg.References, r)
+	}
+
+	if len(b.srcs) == 0 {
+		return pkg, nil
+	}
+
+	b.fset = token.NewFileSet()
+
+	// Find the package and associated files.
+
+	ctxt := build.Context{
+		GOOS:        "linux",
+		GOARCH:      "amd64",
+		CgoEnabled:  true,
+		ReleaseTags: build.Default.ReleaseTags,
+		BuildTags:   build.Default.BuildTags,
+		Compiler:    "gc",
+	}
+
+	var err error
+	var bpkg *build.Package
+
+	for _, env := range goEnvs {
+		ctxt.GOOS = env.GOOS
+		ctxt.GOARCH = env.GOARCH
+		bpkg, err = dir.Import(&ctxt, build.ImportComment)
+		if _, ok := err.(*build.NoGoError); !ok {
+			break
+		}
+	}
+	if err != nil {
+		if _, ok := err.(*build.NoGoError); !ok {
+			pkg.Errors = append(pkg.Errors, err.Error())
+		}
+		return pkg, nil
+	}
+
+	if bpkg.ImportComment != "" && bpkg.ImportComment != dir.ImportPath {
+		return nil, gosrc.NotFoundError{
+			Message:  "not at canonical import path",
+			Redirect: bpkg.ImportComment,
+		}
+	}
+
+	// Parse the Go files
+
+	files := make(map[string]*ast.File)
+	names := append(bpkg.GoFiles, bpkg.CgoFiles...)
+	sort.Strings(names)
+	pkg.Files = make([]*File, len(names))
+	for i, name := range names {
+		file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
+		if err != nil {
+			pkg.Errors = append(pkg.Errors, err.Error())
+		} else {
+			files[name] = file
+		}
+		src := b.srcs[name]
+		src.index = i
+		pkg.Files[i] = &File{Name: name, URL: src.browseURL}
+		pkg.SourceSize += len(src.data)
+	}
+
+	apkg, _ := ast.NewPackage(b.fset, files, simpleImporter, nil)
+
+	// Find examples in the test files.
+
+	names = append(bpkg.TestGoFiles, bpkg.XTestGoFiles...)
+	sort.Strings(names)
+	pkg.TestFiles = make([]*File, len(names))
+	for i, name := range names {
+		file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
+		if err != nil {
+			pkg.Errors = append(pkg.Errors, err.Error())
+		} else {
+			b.examples = append(b.examples, doc.Examples(file)...)
+		}
+		pkg.TestFiles[i] = &File{Name: name, URL: b.srcs[name].browseURL}
+		pkg.TestSourceSize += len(b.srcs[name].data)
+	}
+
+	b.vetPackage(pkg, apkg)
+
+	mode := doc.Mode(0)
+	if pkg.ImportPath == "builtin" {
+		mode |= doc.AllDecls
+	}
+
+	dpkg := doc.New(apkg, pkg.ImportPath, mode)
+
+	if pkg.ImportPath == "builtin" {
+		removeAssociations(dpkg)
+	}
+
+	pkg.Name = dpkg.Name
+	pkg.Doc = strings.TrimRight(dpkg.Doc, " \t\n\r")
+	pkg.Synopsis = synopsis(pkg.Doc)
+
+	pkg.Examples = b.getExamples("")
+	pkg.IsCmd = bpkg.IsCommand()
+	pkg.GOOS = ctxt.GOOS
+	pkg.GOARCH = ctxt.GOARCH
+
+	pkg.Consts = b.values(dpkg.Consts)
+	pkg.Funcs = b.funcs(dpkg.Funcs)
+	pkg.Types = b.types(dpkg.Types)
+	pkg.Vars = b.values(dpkg.Vars)
+	pkg.Notes = b.notes(dpkg.Notes)
+
+	pkg.Imports = bpkg.Imports
+	pkg.TestImports = bpkg.TestImports
+	pkg.XTestImports = bpkg.XTestImports
+
+	return pkg, nil
+}
diff --git a/doc/builder_test.go b/doc/builder_test.go
new file mode 100644
index 0000000..7160cda
--- /dev/null
+++ b/doc/builder_test.go
@@ -0,0 +1,114 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package doc
+
+import (
+	"go/ast"
+	"testing"
+)
+
+var badSynopsis = []string{
+	"+build !release",
+	"COPYRIGHT Jimmy Bob",
+	"### Markdown heading",
+	"-*- indent-tabs-mode: nil -*-",
+	"vim:set ts=2 sw=2 et ai ft=go:",
+}
+
+func TestBadSynopsis(t *testing.T) {
+	for _, s := range badSynopsis {
+		if synopsis(s) != "" {
+			t.Errorf(`synopsis(%q) did not return ""`, s)
+		}
+	}
+}
+
+const readme = `
+    $ go get github.com/user/repo/pkg1
+    [foo](http://gopkgdoc.appspot.com/pkg/github.com/user/repo/pkg2)
+    [foo](http://go.pkgdoc.org/github.com/user/repo/pkg3)
+    [foo](http://godoc.org/github.com/user/repo/pkg4)
+    <http://go.pkgdoc.org/github.com/user/repo/pkg5>
+    [foo](http://godoc.org/github.com/user/repo/pkg6#Export)
+    http://gowalker.org/github.com/user/repo/pkg7
+    Build Status: [![Build Status](https://drone.io/github.com/user/repo1/status.png)](https://drone.io/github.com/user/repo1/latest)
+    'go get example.org/package1' will install package1.
+    (http://go.pkgdoc.org/example.org/package2 "Package2's documentation on GoPkgDoc").
+    import "example.org/package3"
+`
+
+var expectedReferences = []string{
+	"github.com/user/repo/pkg1",
+	"github.com/user/repo/pkg2",
+	"github.com/user/repo/pkg3",
+	"github.com/user/repo/pkg4",
+	"github.com/user/repo/pkg5",
+	"github.com/user/repo/pkg6",
+	"github.com/user/repo/pkg7",
+	"github.com/user/repo1",
+	"example.org/package1",
+	"example.org/package2",
+	"example.org/package3",
+}
+
+func TestReferences(t *testing.T) {
+	references := make(map[string]bool)
+	addReferences(references, []byte(readme))
+	for _, r := range expectedReferences {
+		if !references[r] {
+			t.Errorf("missing %s", r)
+		}
+		delete(references, r)
+	}
+	for r := range references {
+		t.Errorf("extra %s", r)
+	}
+}
+
+var simpleImporterTests = []struct {
+	path string
+	name string
+}{
+	// Last element with .suffix removed.
+	{"example.com/user/name.git", "name"},
+	{"example.com/user/name.svn", "name"},
+	{"example.com/user/name.hg", "name"},
+	{"example.com/user/name.bzr", "name"},
+	{"example.com/name.v0", "name"},
+	{"example.com/user/repo/name.v11", "name"},
+
+	// Last element with "go" prefix or suffix removed.
+	{"github.com/user/go-name", "name"},
+	{"github.com/user/go.name", "name"},
+	{"github.com/user/name.go", "name"},
+	{"github.com/user/name-go", "name"},
+
+	// Special cases for popular repos.
+	{"code.google.com/p/biogo.name", "name"},
+	{"code.google.com/p/google-api-go-client/name/v3", "name"},
+
+	// Use last element of path.
+	{"example.com/user/name.other", "name.other"},
+	{"example.com/.v0", ".v0"},
+	{"example.com/user/repo.v2/name", "name"},
+	{"github.com/user/namev0", "namev0"},
+	{"github.com/user/goname", "goname"},
+	{"github.com/user/namego", "namego"},
+	{"github.com/user/name", "name"},
+	{"name", "name"},
+	{"user/name", "name"},
+}
+
+func TestSimpleImporter(t *testing.T) {
+	for _, tt := range simpleImporterTests {
+		m := make(map[string]*ast.Object)
+		obj, _ := simpleImporter(m, tt.path)
+		if obj.Name != tt.name {
+			t.Errorf("simpleImporter(%q) = %q, want %q", tt.path, obj.Name, tt.name)
+		}
+	}
+}
diff --git a/doc/code.go b/doc/code.go
new file mode 100644
index 0000000..af2b537
--- /dev/null
+++ b/doc/code.go
@@ -0,0 +1,359 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package doc
+
+import (
+	"bytes"
+	"fmt"
+	"go/ast"
+	"go/doc"
+	"go/printer"
+	"go/scanner"
+	"go/token"
+	"math"
+	"strconv"
+)
+
+const (
+	notPredeclared = iota
+	predeclaredType
+	predeclaredConstant
+	predeclaredFunction
+)
+
+// predeclared represents the set of all predeclared identifiers.
+var predeclared = map[string]int{
+	"bool":       predeclaredType,
+	"byte":       predeclaredType,
+	"complex128": predeclaredType,
+	"complex64":  predeclaredType,
+	"error":      predeclaredType,
+	"float32":    predeclaredType,
+	"float64":    predeclaredType,
+	"int16":      predeclaredType,
+	"int32":      predeclaredType,
+	"int64":      predeclaredType,
+	"int8":       predeclaredType,
+	"int":        predeclaredType,
+	"rune":       predeclaredType,
+	"string":     predeclaredType,
+	"uint16":     predeclaredType,
+	"uint32":     predeclaredType,
+	"uint64":     predeclaredType,
+	"uint8":      predeclaredType,
+	"uint":       predeclaredType,
+	"uintptr":    predeclaredType,
+
+	"true":  predeclaredConstant,
+	"false": predeclaredConstant,
+	"iota":  predeclaredConstant,
+	"nil":   predeclaredConstant,
+
+	"append":  predeclaredFunction,
+	"cap":     predeclaredFunction,
+	"close":   predeclaredFunction,
+	"complex": predeclaredFunction,
+	"copy":    predeclaredFunction,
+	"delete":  predeclaredFunction,
+	"imag":    predeclaredFunction,
+	"len":     predeclaredFunction,
+	"make":    predeclaredFunction,
+	"new":     predeclaredFunction,
+	"panic":   predeclaredFunction,
+	"print":   predeclaredFunction,
+	"println": predeclaredFunction,
+	"real":    predeclaredFunction,
+	"recover": predeclaredFunction,
+}
+
+type AnnotationKind int16
+
+const (
+	// Link to export in package specifed by Paths[PathIndex] with fragment
+	// Text[strings.LastIndex(Text[Pos:End], ".")+1:End].
+	LinkAnnotation AnnotationKind = iota
+
+	// Anchor with name specified by Text[Pos:End] or typeName + "." +
+	// Text[Pos:End] for type declarations.
+	AnchorAnnotation
+
+	// Comment.
+	CommentAnnotation
+
+	// Link to package specified by Paths[PathIndex].
+	PackageLinkAnnotation
+
+	// Link to builtin entity with name Text[Pos:End].
+	BuiltinAnnotation
+)
+
+type Annotation struct {
+	Pos, End  int32
+	Kind      AnnotationKind
+	PathIndex int16
+}
+
+type Code struct {
+	Text        string
+	Annotations []Annotation
+	Paths       []string
+}
+
+// declVisitor modifies a declaration AST for printing and collects annotations.
+type declVisitor struct {
+	annotations []Annotation
+	paths       []string
+	pathIndex   map[string]int
+	comments    []*ast.CommentGroup
+}
+
+func (v *declVisitor) add(kind AnnotationKind, importPath string) {
+	pathIndex := -1
+	if importPath != "" {
+		var ok bool
+		pathIndex, ok = v.pathIndex[importPath]
+		if !ok {
+			pathIndex = len(v.paths)
+			v.paths = append(v.paths, importPath)
+			v.pathIndex[importPath] = pathIndex
+		}
+	}
+	v.annotations = append(v.annotations, Annotation{Kind: kind, PathIndex: int16(pathIndex)})
+}
+
+func (v *declVisitor) ignoreName() {
+	v.add(-1, "")
+}
+
+func (v *declVisitor) Visit(n ast.Node) ast.Visitor {
+	switch n := n.(type) {
+	case *ast.TypeSpec:
+		v.ignoreName()
+		switch n := n.Type.(type) {
+		case *ast.InterfaceType:
+			for _, f := range n.Methods.List {
+				for _ = range f.Names {
+					v.add(AnchorAnnotation, "")
+				}
+				ast.Walk(v, f.Type)
+			}
+		case *ast.StructType:
+			for _, f := range n.Fields.List {
+				for _ = range f.Names {
+					v.add(AnchorAnnotation, "")
+				}
+				ast.Walk(v, f.Type)
+			}
+		default:
+			ast.Walk(v, n)
+		}
+	case *ast.FuncDecl:
+		if n.Recv != nil {
+			ast.Walk(v, n.Recv)
+		}
+		v.ignoreName()
+		ast.Walk(v, n.Type)
+	case *ast.Field:
+		for _ = range n.Names {
+			v.ignoreName()
+		}
+		ast.Walk(v, n.Type)
+	case *ast.ValueSpec:
+		for _ = range n.Names {
+			v.add(AnchorAnnotation, "")
+		}
+		if n.Type != nil {
+			ast.Walk(v, n.Type)
+		}
+		for _, x := range n.Values {
+			ast.Walk(v, x)
+		}
+	case *ast.Ident:
+		switch {
+		case n.Obj == nil && predeclared[n.Name] != notPredeclared:
+			v.add(BuiltinAnnotation, "")
+		case n.Obj != nil && ast.IsExported(n.Name):
+			v.add(LinkAnnotation, "")
+		default:
+			v.ignoreName()
+		}
+	case *ast.SelectorExpr:
+		if x, _ := n.X.(*ast.Ident); x != nil {
+			if obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {
+				if spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {
+					if path, err := strconv.Unquote(spec.Path.Value); err == nil {
+						v.add(PackageLinkAnnotation, path)
+						if path == "C" {
+							v.ignoreName()
+						} else {
+							v.add(LinkAnnotation, path)
+						}
+						return nil
+					}
+				}
+			}
+		}
+		ast.Walk(v, n.X)
+		v.ignoreName()
+	case *ast.BasicLit:
+		if n.Kind == token.STRING && len(n.Value) > 128 {
+			v.comments = append(v.comments,
+				&ast.CommentGroup{List: []*ast.Comment{{
+					Slash: n.Pos(),
+					Text:  fmt.Sprintf("/* %d byte string literal not displayed */", len(n.Value)),
+				}}})
+			n.Value = `""`
+		} else {
+			return v
+		}
+	case *ast.CompositeLit:
+		if len(n.Elts) > 100 {
+			if n.Type != nil {
+				ast.Walk(v, n.Type)
+			}
+			v.comments = append(v.comments,
+				&ast.CommentGroup{List: []*ast.Comment{{
+					Slash: n.Lbrace,
+					Text:  fmt.Sprintf("/* %d elements not displayed */", len(n.Elts)),
+				}}})
+			n.Elts = n.Elts[:0]
+		} else {
+			return v
+		}
+	default:
+		return v
+	}
+	return nil
+}
+
+func (b *builder) printDecl(decl ast.Decl) (d Code) {
+	v := &declVisitor{pathIndex: make(map[string]int)}
+	ast.Walk(v, decl)
+	b.buf = b.buf[:0]
+	err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(
+		sliceWriter{&b.buf},
+		b.fset,
+		&printer.CommentedNode{Node: decl, Comments: v.comments})
+	if err != nil {
+		return Code{Text: err.Error()}
+	}
+
+	var annotations []Annotation
+	var s scanner.Scanner
+	fset := token.NewFileSet()
+	file := fset.AddFile("", fset.Base(), len(b.buf))
+	s.Init(file, b.buf, nil, scanner.ScanComments)
+	prevTok := token.ILLEGAL
+loop:
+	for {
+		pos, tok, lit := s.Scan()
+		switch tok {
+		case token.EOF:
+			break loop
+		case token.COMMENT:
+			p := file.Offset(pos)
+			e := p + len(lit)
+			if prevTok == token.COMMENT {
+				annotations[len(annotations)-1].End = int32(e)
+			} else {
+				annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int32(p), End: int32(e)})
+			}
+		case token.IDENT:
+			if len(v.annotations) == 0 {
+				// Oops!
+				break loop
+			}
+			annotation := v.annotations[0]
+			v.annotations = v.annotations[1:]
+			if annotation.Kind == -1 {
+				continue
+			}
+			p := file.Offset(pos)
+			e := p + len(lit)
+			annotation.Pos = int32(p)
+			annotation.End = int32(e)
+			annotations = append(annotations, annotation)
+		}
+		prevTok = tok
+	}
+	return Code{Text: string(b.buf), Annotations: annotations, Paths: v.paths}
+}
+
+func (b *builder) position(n ast.Node) Pos {
+	var position Pos
+	pos := b.fset.Position(n.Pos())
+	src := b.srcs[pos.Filename]
+	if src != nil {
+		position.File = int16(src.index)
+		position.Line = int32(pos.Line)
+		end := b.fset.Position(n.End())
+		if src == b.srcs[end.Filename] {
+			n := end.Line - pos.Line
+			if n >= 0 && n <= math.MaxUint16 {
+				position.N = uint16(n)
+			}
+		}
+	}
+	return position
+}
+
+func (b *builder) printExample(e *doc.Example) (code Code, output string) {
+	output = e.Output
+
+	b.buf = b.buf[:0]
+	var n interface{}
+	if _, ok := e.Code.(*ast.File); ok {
+		n = e.Play
+	} else {
+		n = &printer.CommentedNode{Node: e.Code, Comments: e.Comments}
+	}
+	err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(sliceWriter{&b.buf}, b.fset, n)
+	if err != nil {
+		return Code{Text: err.Error()}, output
+	}
+
+	// additional formatting if this is a function body
+	if i := len(b.buf); i >= 2 && b.buf[0] == '{' && b.buf[i-1] == '}' {
+		// remove surrounding braces
+		b.buf = b.buf[1 : i-1]
+		// unindent
+		b.buf = bytes.Replace(b.buf, []byte("\n    "), []byte("\n"), -1)
+		// remove output comment
+		if j := exampleOutputRx.FindIndex(b.buf); j != nil {
+			b.buf = bytes.TrimSpace(b.buf[:j[0]])
+		}
+	} else {
+		// drop output, as the output comment will appear in the code
+		output = ""
+	}
+
+	var annotations []Annotation
+	var s scanner.Scanner
+	fset := token.NewFileSet()
+	file := fset.AddFile("", fset.Base(), len(b.buf))
+	s.Init(file, b.buf, nil, scanner.ScanComments)
+	prevTok := token.ILLEGAL
+scanLoop:
+	for {
+		pos, tok, lit := s.Scan()
+		switch tok {
+		case token.EOF:
+			break scanLoop
+		case token.COMMENT:
+			p := file.Offset(pos)
+			e := p + len(lit)
+			if prevTok == token.COMMENT {
+				annotations[len(annotations)-1].End = int32(e)
+			} else {
+				annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int32(p), End: int32(e)})
+			}
+		}
+		prevTok = tok
+	}
+
+	return Code{Text: string(b.buf), Annotations: annotations}, output
+}
diff --git a/doc/get.go b/doc/get.go
new file mode 100644
index 0000000..611bfd0
--- /dev/null
+++ b/doc/get.go
@@ -0,0 +1,55 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Package doc fetches Go package documentation from version control services.
+package doc
+
+import (
+	"github.com/golang/gddo/gosrc"
+	"go/doc"
+	"net/http"
+	"strings"
+)
+
+func Get(client *http.Client, importPath string, etag string) (*Package, error) {
+
+	const versionPrefix = PackageVersion + "-"
+
+	if strings.HasPrefix(etag, versionPrefix) {
+		etag = etag[len(versionPrefix):]
+	} else {
+		etag = ""
+	}
+
+	dir, err := gosrc.Get(client, importPath, etag)
+	if err != nil {
+		return nil, err
+	}
+
+	pdoc, err := newPackage(dir)
+	if err != nil {
+		return pdoc, err
+	}
+
+	if pdoc.Synopsis == "" &&
+		pdoc.Doc == "" &&
+		!pdoc.IsCmd &&
+		pdoc.Name != "" &&
+		dir.ImportPath == dir.ProjectRoot &&
+		len(pdoc.Errors) == 0 {
+		project, err := gosrc.GetProject(client, dir.ResolvedPath)
+		switch {
+		case err == nil:
+			pdoc.Synopsis = doc.Synopsis(project.Description)
+		case gosrc.IsNotFound(err):
+			// ok
+		default:
+			return nil, err
+		}
+	}
+
+	return pdoc, nil
+}
diff --git a/doc/goprint.go b/doc/goprint.go
new file mode 100644
index 0000000..192ee70
--- /dev/null
+++ b/doc/goprint.go
@@ -0,0 +1,69 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// +build ignore
+
+// Command astprint prints the AST for a file.
+//
+// Usage: go run asprint.go fname
+package main
+
+import (
+	"flag"
+	"go/ast"
+	"go/build"
+	"go/doc"
+	"go/parser"
+	"go/token"
+	"io/ioutil"
+	"log"
+	"path/filepath"
+	"strings"
+
+	"github.com/davecgh/go-spew/spew"
+)
+
+func importer(imports map[string]*ast.Object, path string) (*ast.Object, error) {
+	pkg := imports[path]
+	if pkg == nil {
+		name := path[strings.LastIndex(path, "/")+1:]
+		pkg = ast.NewObj(ast.Pkg, name)
+		pkg.Data = ast.NewScope(nil) // required by ast.NewPackage for dot-import
+		imports[path] = pkg
+	}
+	return pkg, nil
+}
+
+func main() {
+	flag.Parse()
+	if len(flag.Args()) != 1 {
+		log.Fatal("Usage: go run goprint.go path")
+	}
+	bpkg, err := build.Default.Import(flag.Args()[0], ".", 0)
+	if err != nil {
+		log.Fatal(err)
+	}
+	fset := token.NewFileSet()
+	files := make(map[string]*ast.File)
+	for _, fname := range bpkg.GoFiles {
+		p, err := ioutil.ReadFile(filepath.Join(bpkg.SrcRoot, bpkg.ImportPath, fname))
+		if err != nil {
+			log.Fatal(err)
+		}
+		file, err := parser.ParseFile(fset, fname, p, parser.ParseComments)
+		if err != nil {
+			log.Fatal(err)
+		}
+		files[fname] = file
+	}
+	c := spew.NewDefaultConfig()
+	c.DisableMethods = true
+	apkg, _ := ast.NewPackage(fset, files, importer, nil)
+	c.Dump(apkg)
+	ast.Print(fset, apkg)
+	dpkg := doc.New(apkg, bpkg.ImportPath, 0)
+	c.Dump(dpkg)
+}
diff --git a/doc/print.go b/doc/print.go
new file mode 100644
index 0000000..b199bbb
--- /dev/null
+++ b/doc/print.go
@@ -0,0 +1,50 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// +build ignore
+
+// Command print fetches and prints package documentation.
+//
+// Usage: go run print.go importPath
+package main
+
+import (
+	"flag"
+	"log"
+	"net/http"
+	"os"
+
+	"github.com/davecgh/go-spew/spew"
+	"github.com/golang/gddo/doc"
+	"github.com/golang/gddo/gosrc"
+)
+
+var (
+	etag  = flag.String("etag", "", "Etag")
+	local = flag.Bool("local", false, "Get package from local directory.")
+)
+
+func main() {
+	flag.Parse()
+	if len(flag.Args()) != 1 {
+		log.Fatal("Usage: go run print.go importPath")
+	}
+	path := flag.Args()[0]
+
+	var (
+		pdoc *doc.Package
+		err  error
+	)
+	if *local {
+		gosrc.SetLocalDevMode(os.Getenv("GOPATH"))
+	}
+	pdoc, err = doc.Get(http.DefaultClient, path, *etag)
+	//}
+	if err != nil {
+		log.Fatal(err)
+	}
+	spew.Dump(pdoc)
+}
diff --git a/doc/util.go b/doc/util.go
new file mode 100644
index 0000000..e9a665e
--- /dev/null
+++ b/doc/util.go
@@ -0,0 +1,14 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package doc
+
+type sliceWriter struct{ p *[]byte }
+
+func (w sliceWriter) Write(p []byte) (int, error) {
+	*w.p = append(*w.p, p...)
+	return len(p), nil
+}
diff --git a/doc/vet.go b/doc/vet.go
new file mode 100644
index 0000000..610ab44
--- /dev/null
+++ b/doc/vet.go
@@ -0,0 +1,81 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package doc
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"strconv"
+	"strings"
+
+	"github.com/golang/gddo/gosrc"
+)
+
+// This list of deprecated exports is used to find code that has not been
+// updated for Go 1.
+var deprecatedExports = map[string][]string{
+	`"bytes"`:         {"Add"},
+	`"crypto/aes"`:    {"Cipher"},
+	`"crypto/hmac"`:   {"NewSHA1", "NewSHA256"},
+	`"crypto/rand"`:   {"Seed"},
+	`"encoding/json"`: {"MarshalForHTML"},
+	`"encoding/xml"`:  {"Marshaler", "NewParser", "Parser"},
+	`"html"`:          {"NewTokenizer", "Parse"},
+	`"image"`:         {"Color", "NRGBAColor", "RGBAColor"},
+	`"io"`:            {"Copyn"},
+	`"log"`:           {"Exitf"},
+	`"math"`:          {"Fabs", "Fmax", "Fmod"},
+	`"os"`:            {"Envs", "Error", "Getenverror", "NewError", "Time", "UnixSignal", "Wait"},
+	`"reflect"`:       {"MapValue", "Typeof"},
+	`"runtime"`:       {"UpdateMemStats"},
+	`"strconv"`:       {"Atob", "Atof32", "Atof64", "AtofN", "Atoi64", "Atoui", "Atoui64", "Btoui64", "Ftoa64", "Itoa64", "Uitoa", "Uitoa64"},
+	`"time"`:          {"LocalTime", "Nanoseconds", "NanosecondsToLocalTime", "Seconds", "SecondsToLocalTime", "SecondsToUTC"},
+	`"unicode/utf8"`:  {"NewString"},
+}
+
+type vetVisitor struct {
+	errors map[string]token.Pos
+}
+
+func (v *vetVisitor) Visit(n ast.Node) ast.Visitor {
+	if sel, ok := n.(*ast.SelectorExpr); ok {
+		if x, _ := sel.X.(*ast.Ident); x != nil {
+			if obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {
+				if spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {
+					for _, name := range deprecatedExports[spec.Path.Value] {
+						if name == sel.Sel.Name {
+							v.errors[fmt.Sprintf("%s.%s not found", spec.Path.Value, sel.Sel.Name)] = n.Pos()
+							return nil
+						}
+					}
+				}
+			}
+		}
+	}
+	return v
+}
+
+func (b *builder) vetPackage(pkg *Package, apkg *ast.Package) {
+	errors := make(map[string]token.Pos)
+	for _, file := range apkg.Files {
+		for _, is := range file.Imports {
+			importPath, _ := strconv.Unquote(is.Path.Value)
+			if !gosrc.IsValidPath(importPath) &&
+				!strings.HasPrefix(importPath, "exp/") &&
+				!strings.HasPrefix(importPath, "appengine") {
+				errors[fmt.Sprintf("Unrecognized import path %q", importPath)] = is.Pos()
+			}
+		}
+		v := vetVisitor{errors: errors}
+		ast.Walk(&v, file)
+	}
+	for message, pos := range errors {
+		pkg.Errors = append(pkg.Errors,
+			fmt.Sprintf("%s (%s)", message, b.fset.Position(pos)))
+	}
+}
diff --git a/gddo-admin/block.go b/gddo-admin/block.go
new file mode 100644
index 0000000..70fe4c1
--- /dev/null
+++ b/gddo-admin/block.go
@@ -0,0 +1,33 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"github.com/golang/gddo/database"
+	"log"
+	"os"
+)
+
+var blockCommand = &command{
+	name:  "block",
+	run:   block,
+	usage: "block path",
+}
+
+func block(c *command) {
+	if len(c.flag.Args()) != 1 {
+		c.printUsage()
+		os.Exit(1)
+	}
+	db, err := database.New()
+	if err != nil {
+		log.Fatal(err)
+	}
+	if err := db.Block(c.flag.Args()[0]); err != nil {
+		log.Fatal(err)
+	}
+}
diff --git a/gddo-admin/crawl.go b/gddo-admin/crawl.go
new file mode 100644
index 0000000..62ebe71
--- /dev/null
+++ b/gddo-admin/crawl.go
@@ -0,0 +1,65 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"fmt"
+	"io/ioutil"
+	"log"
+	"os"
+	"strings"
+
+	"github.com/garyburd/redigo/redis"
+	"github.com/golang/gddo/database"
+)
+
+var crawlCommand = &command{
+	name:  "crawl",
+	run:   crawl,
+	usage: "crawl [new]",
+}
+
+func crawl(c *command) {
+	if len(c.flag.Args()) > 1 {
+		c.printUsage()
+		os.Exit(1)
+	}
+	db, err := database.New()
+	if err != nil {
+		log.Fatal(err)
+	}
+
+	if len(c.flag.Args()) == 1 {
+		p, err := ioutil.ReadFile(c.flag.Args()[0])
+		if err != nil {
+			log.Fatal(err)
+		}
+		for _, p := range strings.Fields(string(p)) {
+			db.AddNewCrawl(p)
+		}
+	}
+
+	conn := db.Pool.Get()
+	defer conn.Close()
+	paths, err := redis.Strings(conn.Do("SMEMBERS", "newCrawl"))
+	if err != nil {
+		log.Fatal(err)
+	}
+	fmt.Println("NEW")
+	for _, path := range paths {
+		fmt.Println(path)
+	}
+
+	paths, err = redis.Strings(conn.Do("SMEMBERS", "badCrawl"))
+	if err != nil {
+		log.Fatal(err)
+	}
+	fmt.Println("BAD")
+	for _, path := range paths {
+		fmt.Println(path)
+	}
+}
diff --git a/gddo-admin/dangle.go b/gddo-admin/dangle.go
new file mode 100644
index 0000000..b0473f6
--- /dev/null
+++ b/gddo-admin/dangle.go
@@ -0,0 +1,59 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"fmt"
+	"log"
+	"os"
+
+	"github.com/golang/gddo/database"
+	"github.com/golang/gddo/gosrc"
+)
+
+var dangleCommand = &command{
+	name:  "dangle",
+	run:   dangle,
+	usage: "dangle",
+}
+
+func dangle(c *command) {
+	if len(c.flag.Args()) != 0 {
+		c.printUsage()
+		os.Exit(1)
+	}
+	db, err := database.New()
+	if err != nil {
+		log.Fatal(err)
+	}
+	m := make(map[string]int)
+	err = db.Do(func(pi *database.PackageInfo) error {
+		m[pi.PDoc.ImportPath] |= 1
+		for _, p := range pi.PDoc.Imports {
+			if gosrc.IsValidPath(p) {
+				m[p] |= 2
+			}
+		}
+		for _, p := range pi.PDoc.TestImports {
+			if gosrc.IsValidPath(p) {
+				m[p] |= 2
+			}
+		}
+		for _, p := range pi.PDoc.XTestImports {
+			if gosrc.IsValidPath(p) {
+				m[p] |= 2
+			}
+		}
+		return nil
+	})
+
+	for p, v := range m {
+		if v == 2 {
+			fmt.Println(p)
+		}
+	}
+}
diff --git a/gddo-admin/delete.go b/gddo-admin/delete.go
new file mode 100644
index 0000000..a9d188e
--- /dev/null
+++ b/gddo-admin/delete.go
@@ -0,0 +1,34 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"log"
+	"os"
+
+	"github.com/golang/gddo/database"
+)
+
+var deleteCommand = &command{
+	name:  "delete",
+	run:   del,
+	usage: "delete path",
+}
+
+func del(c *command) {
+	if len(c.flag.Args()) != 1 {
+		c.printUsage()
+		os.Exit(1)
+	}
+	db, err := database.New()
+	if err != nil {
+		log.Fatal(err)
+	}
+	if err := db.Delete(c.flag.Args()[0]); err != nil {
+		log.Fatal(err)
+	}
+}
diff --git a/gddo-admin/main.go b/gddo-admin/main.go
new file mode 100644
index 0000000..223c94c
--- /dev/null
+++ b/gddo-admin/main.go
@@ -0,0 +1,70 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Command gddo-admin is the GoDoc.org command line administration tool.
+package main
+
+import (
+	"flag"
+	"fmt"
+	"os"
+	"strings"
+)
+
+type command struct {
+	name  string
+	run   func(c *command)
+	flag  flag.FlagSet
+	usage string
+}
+
+func (c *command) printUsage() {
+	fmt.Fprintf(os.Stderr, "%s %s\n", os.Args[0], c.usage)
+	c.flag.PrintDefaults()
+}
+
+var commands = []*command{
+	blockCommand,
+	reindexCommand,
+	deleteCommand,
+	popularCommand,
+	dangleCommand,
+	crawlCommand,
+	statsCommand,
+}
+
+func printUsage() {
+	var n []string
+	for _, c := range commands {
+		n = append(n, c.name)
+	}
+	fmt.Fprintf(os.Stderr, "%s %s\n", os.Args[0], strings.Join(n, "|"))
+	flag.PrintDefaults()
+	for _, c := range commands {
+		c.printUsage()
+	}
+}
+
+func main() {
+	flag.Usage = printUsage
+	flag.Parse()
+	args := flag.Args()
+	if len(args) >= 1 {
+		for _, c := range commands {
+			if args[0] == c.name {
+				c.flag.Usage = func() {
+					c.printUsage()
+					os.Exit(2)
+				}
+				c.flag.Parse(args[1:])
+				c.run(c)
+				return
+			}
+		}
+	}
+	printUsage()
+	os.Exit(2)
+}
diff --git a/gddo-admin/popular.go b/gddo-admin/popular.go
new file mode 100644
index 0000000..cdb678d
--- /dev/null
+++ b/gddo-admin/popular.go
@@ -0,0 +1,44 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"fmt"
+	"log"
+	"os"
+
+	"github.com/golang/gddo/database"
+)
+
+var (
+	popularCommand = &command{
+		name:  "popular",
+		usage: "popular",
+	}
+)
+
+func init() {
+	popularCommand.run = popular
+}
+
+func popular(c *command) {
+	if len(c.flag.Args()) != 0 {
+		c.printUsage()
+		os.Exit(1)
+	}
+	db, err := database.New()
+	if err != nil {
+		log.Fatal(err)
+	}
+	pkgs, err := db.PopularWithScores()
+	if err != nil {
+		log.Fatal(err)
+	}
+	for _, pkg := range pkgs {
+		fmt.Println(pkg.Path, pkg.Synopsis)
+	}
+}
diff --git a/gddo-admin/reindex.go b/gddo-admin/reindex.go
new file mode 100644
index 0000000..44280f2
--- /dev/null
+++ b/gddo-admin/reindex.go
@@ -0,0 +1,68 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"log"
+	"os"
+	"time"
+
+	"github.com/golang/gddo/database"
+	"github.com/golang/gddo/doc"
+)
+
+var reindexCommand = &command{
+	name:  "reindex",
+	run:   reindex,
+	usage: "reindex",
+}
+
+func fix(pdoc *doc.Package) {
+	/*
+	   	for _, v := range pdoc.Consts {
+	   	}
+	   	for _, v := range pdoc.Vars {
+	   	}
+	   	for _, v := range pdoc.Funcs {
+	   	}
+	   	for _, t := range pdoc.Types {
+	   		for _, v := range t.Consts {
+	   		}
+	   		for _, v := range t.Vars {
+	   		}
+	   		for _, v := range t.Funcs {
+	   		}
+	   		for _, v := range t.Methods {
+	   		}
+	   	}
+	       for _, notes := range pdoc.Notes {
+	           for _, v := range notes {
+	           }
+	       }
+	*/
+}
+
+func reindex(c *command) {
+	if len(c.flag.Args()) != 0 {
+		c.printUsage()
+		os.Exit(1)
+	}
+	db, err := database.New()
+	if err != nil {
+		log.Fatal(err)
+	}
+	var n int
+	err = db.Do(func(pi *database.PackageInfo) error {
+		n += 1
+		fix(pi.PDoc)
+		return db.Put(pi.PDoc, time.Time{}, false)
+	})
+	if err != nil {
+		log.Fatal(err)
+	}
+	log.Printf("Updated %d documents", n)
+}
diff --git a/gddo-admin/stats.go b/gddo-admin/stats.go
new file mode 100644
index 0000000..987087f
--- /dev/null
+++ b/gddo-admin/stats.go
@@ -0,0 +1,78 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"fmt"
+	"log"
+	"os"
+	"sort"
+
+	"github.com/golang/gddo/database"
+)
+
+var statsCommand = &command{
+	name:  "stats",
+	run:   stats,
+	usage: "stats",
+}
+
+type itemSize struct {
+	path string
+	size int
+}
+
+type bySizeDesc []itemSize
+
+func (p bySizeDesc) Len() int           { return len(p) }
+func (p bySizeDesc) Less(i, j int) bool { return p[i].size > p[j].size }
+func (p bySizeDesc) Swap(i, j int)      { p[i], p[j] = p[j], p[i] }
+
+func stats(c *command) {
+	if len(c.flag.Args()) != 0 {
+		c.printUsage()
+		os.Exit(1)
+	}
+	db, err := database.New()
+	if err != nil {
+		log.Fatal(err)
+	}
+
+	var packageSizes []itemSize
+	var truncatedPackages []string
+	projectSizes := make(map[string]int)
+	err = db.Do(func(pi *database.PackageInfo) error {
+		packageSizes = append(packageSizes, itemSize{pi.PDoc.ImportPath, pi.Size})
+		projectSizes[pi.PDoc.ProjectRoot] += pi.Size
+		if pi.PDoc.Truncated {
+			truncatedPackages = append(truncatedPackages, pi.PDoc.ImportPath)
+		}
+		return nil
+	})
+
+	var sizes []itemSize
+	for path, size := range projectSizes {
+		sizes = append(sizes, itemSize{path, size})
+	}
+	sort.Sort(bySizeDesc(sizes))
+	fmt.Println("PROJECT SIZES")
+	for _, size := range sizes {
+		fmt.Printf("%6d %s\n", size.size, size.path)
+	}
+
+	sort.Sort(bySizeDesc(packageSizes))
+	fmt.Println("PACKAGE SIZES")
+	for _, size := range packageSizes {
+		fmt.Printf("%6d %s\n", size.size, size.path)
+	}
+
+	sort.Sort(sort.StringSlice(truncatedPackages))
+	fmt.Println("TRUNCATED PACKAGES")
+	for _, p := range truncatedPackages {
+		fmt.Printf("%s\n", p)
+	}
+}
diff --git a/gddo-server/assets/BingSiteAuth.xml b/gddo-server/assets/BingSiteAuth.xml
new file mode 100644
index 0000000..0737e3e
--- /dev/null
+++ b/gddo-server/assets/BingSiteAuth.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0"?>
+<users>
+	<user>6F3E495D5591D0B1308072CA245E8849</user>
+</users>
\ No newline at end of file
diff --git a/gddo-server/assets/apiRobots.txt b/gddo-server/assets/apiRobots.txt
new file mode 100644
index 0000000..a4751e2
--- /dev/null
+++ b/gddo-server/assets/apiRobots.txt
@@ -0,0 +1,2 @@
+User-agent: *
+Disallow: *
diff --git a/gddo-server/assets/apple-touch-icon-120x120.png b/gddo-server/assets/apple-touch-icon-120x120.png
new file mode 100644
index 0000000..5b09a7a
--- /dev/null
+++ b/gddo-server/assets/apple-touch-icon-120x120.png
Binary files differ
diff --git a/gddo-server/assets/apple-touch-icon-152x152.png b/gddo-server/assets/apple-touch-icon-152x152.png
new file mode 100644
index 0000000..e08f5bf
--- /dev/null
+++ b/gddo-server/assets/apple-touch-icon-152x152.png
Binary files differ
diff --git a/gddo-server/assets/apple-touch-icon-76x76.png b/gddo-server/assets/apple-touch-icon-76x76.png
new file mode 100644
index 0000000..e956ffe
--- /dev/null
+++ b/gddo-server/assets/apple-touch-icon-76x76.png
Binary files differ
diff --git a/gddo-server/assets/apple-touch-icon.png b/gddo-server/assets/apple-touch-icon.png
new file mode 100644
index 0000000..e08f5bf
--- /dev/null
+++ b/gddo-server/assets/apple-touch-icon.png
Binary files differ
diff --git a/gddo-server/assets/favicon.ico b/gddo-server/assets/favicon.ico
new file mode 100644
index 0000000..6ddf354
--- /dev/null
+++ b/gddo-server/assets/favicon.ico
Binary files differ
diff --git a/gddo-server/assets/google3d2f3cd4cc2bb44b.html b/gddo-server/assets/google3d2f3cd4cc2bb44b.html
new file mode 100644
index 0000000..a1d57ce
--- /dev/null
+++ b/gddo-server/assets/google3d2f3cd4cc2bb44b.html
@@ -0,0 +1 @@
+google-site-verification: google3d2f3cd4cc2bb44b.html
\ No newline at end of file
diff --git a/gddo-server/assets/humans.txt b/gddo-server/assets/humans.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gddo-server/assets/humans.txt
diff --git a/gddo-server/assets/robots.txt b/gddo-server/assets/robots.txt
new file mode 100644
index 0000000..3280e04
--- /dev/null
+++ b/gddo-server/assets/robots.txt
@@ -0,0 +1,8 @@
+User-agent: *
+Disallow: /*?imports
+Disallow: /*?importers
+Disallow: /*?import-graph*
+Disallow: /*?gosrc*
+Disallow: /*?file*
+Disallow: /*?play*
+Disallow: /*?tools
diff --git a/gddo-server/assets/sidebar.css b/gddo-server/assets/sidebar.css
new file mode 100644
index 0000000..d5be4f2
--- /dev/null
+++ b/gddo-server/assets/sidebar.css
@@ -0,0 +1,82 @@
+.container { max-width: 970px; }
+
+.section-header {
+    padding-bottom: 4px;
+    margin: 20px 0 10px;
+    border-bottom: 1px solid #eeeeee;
+}
+
+/* Sidebar navigation (copied from bootstrap docs.css) */
+/* First level of nav */
+.gddo-sidebar {
+    margin-top: 5px;
+    margin-bottom: 30px;
+    padding-bottom: 10px;
+    text-shadow: 0 1px 0 #fff;
+    border-radius: 5px;
+}
+
+/* By default it's not affixed in mobile views, so undo that */
+.gddo-sidebar .nav.affix {
+    position: static;
+}
+
+.gddo-sidebar .nav {
+    overflow: auto;
+    height: 95%;
+}
+
+/* All levels of nav */
+.gddo-sidebar .nav > li > a {
+    display: block;
+    color: #716b7a;
+    padding: 5px 0px;
+}
+.gddo-sidebar .nav > li > a:hover,
+.gddo-sidebar .nav > li > a:focus {
+    text-decoration: none;
+    background-color: #e5e3e9;
+}
+.gddo-sidebar .nav > .active > a,
+.gddo-sidebar .nav > .active:hover > a,
+.gddo-sidebar .nav > .active:focus > a {
+    font-weight: bold;
+    color: #563d7c;
+    background-color: transparent;
+}
+
+/* Nav: second level (shown on .active) */
+.gddo-sidebar .nav .nav {
+    display: none; /* Hide by default, but at >768px, show it */
+    margin-bottom: 8px;
+}
+.gddo-sidebar .nav .nav > li > a {
+    padding-top:    3px;
+    padding-bottom: 3px;
+    padding-left: 15px;
+    font-size: 90%;
+}
+
+/* Show and affix the side nav when space allows it */
+@media screen and (min-width: 992px) {
+    .gddo-sidebar .nav > .active > ul {
+        display: block;
+    }
+    /* Widen the fixed sidebar */
+    .gddo-sidebar .nav.affix,
+    .gddo-sidebar .nav.affix-bottom {
+        width: 213px;
+    }
+    .gddo-sidebar .nav.affix {
+        position: fixed; /* Undo the static from mobile first approach */
+        top: 10px;
+    }
+    .gddo-sidebar .nav.affix-bottom {
+        position: absolute; /* Undo the static from mobile first approach */
+    }
+    .gddo-sidebar .nav.affix-bottom .bs-sidenav,
+    .gddo-sidebar .nav.affix .bs-sidenav {
+        margin-top: 0;
+        margin-bottom: 0;
+    }
+}
diff --git a/gddo-server/assets/site.css b/gddo-server/assets/site.css
new file mode 100644
index 0000000..0e61939
--- /dev/null
+++ b/gddo-server/assets/site.css
@@ -0,0 +1,117 @@
+html { background-color: whitesmoke; }
+body { background-color: white; }
+h4 { margin-top: 20px; }
+.container { max-width: 728px; }
+
+#x-projnav {
+    min-height: 20px;
+    margin-bottom: 20px;
+    background-color: #eee;
+    padding: 9px;
+    border-radius: 3px;
+}
+
+#x-footer {
+    padding-top: 14px;
+    padding-bottom: 15px;
+    margin-top: 5px;
+    background-color: #eee;
+    border-top-style: solid;
+    border-top-width: 1px;
+
+}
+
+#x-pkginfo {
+    margin-top: 25px;
+    border-top: 1px solid #ccc;
+    padding-top: 20px;
+    margin-bottom: 15px;
+}
+
+code {
+    background-color: inherit;
+    border: none;
+    color: inherit;
+    padding: 0;
+}
+
+pre {
+    overflow: auto;
+    white-space: pre;
+    word-break: normal;
+    word-wrap: normal;
+}
+
+.funcdecl > pre {
+    white-space: pre-wrap;
+    word-break: break-all;
+    word-wrap: break-word;
+}
+
+pre .com {
+  color: rgb(147, 161, 161);
+}
+
+.decl {
+    position: relative;
+}
+
+.decl > a {
+    position: absolute;
+    top: 0px;
+    right: 0px;
+    display: none;
+    border: 1px solid #ccc;
+    border-top-right-radius: 4px;
+    border-bottom-left-radius: 4px;
+    padding-left: 4px;
+    padding-right: 4px;
+}
+
+.decl > a:hover {
+    background-color: white;
+    text-decoration: none;
+}
+
+.decl:hover > a {
+    display: block;
+}
+
+a, .navbar-default .navbar-brand {
+    color: #375eab;
+}
+
+.navbar-default, #x-footer {
+    background-color: hsl(209, 51%, 92%);
+    border-color: hsl(209, 51%, 88%);
+}
+
+.navbar-default .navbar-nav > .active > a,
+.navbar-default .navbar-nav > .active > a:hover,
+.navbar-default .navbar-nav > .active > a:focus {
+    background-color: hsl(209, 51%, 88%);
+}
+
+.navbar-default .navbar-nav > li > a:hover,
+.navbar-default .navbar-nav > li > a:focus {
+    color: #000;
+}
+
+.panel-default > .panel-heading {
+    color: #333;
+    background-color: transparent;
+}
+
+a.permalink {
+    display: none;
+}
+
+h1:hover .permalink, h2:hover .permalink, h3:hover .permalink, h4:hover .permalink, h5:hover .permalink, h6:hover .permalink {
+    display: inline;
+}
+
+@media (max-width : 768px) {
+    .form-control {
+        font-size:16px;
+    }
+}
diff --git a/gddo-server/assets/site.js b/gddo-server/assets/site.js
new file mode 100644
index 0000000..0cca35c
--- /dev/null
+++ b/gddo-server/assets/site.js
@@ -0,0 +1,212 @@
+// jump modal
+$(function() {
+
+    var all;
+    var visible;
+    var active = -1;
+    var lastFilter = '';
+    var $body = $('#x-jump-body');
+    var $list = $('#x-jump-list');
+    var $filter = $('#x-jump-filter');
+    var $modal = $('#x-jump');
+
+    var update = function(filter) {
+        lastFilter = filter;
+        if (active >= 0) {
+            visible[active].e.removeClass('active');
+            active = -1;
+        }
+        visible = []
+        var re = new RegExp(filter.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, "\\$1"), "gi");
+        all.forEach(function (id) {
+            id.e.detach();
+            var text = id.text;
+            if (filter) {
+                text = id.text.replace(re, function (s) { return '<b>' + s + '</b>'; });
+                if (text == id.text) {
+                    return
+                }
+            }
+            id.e.html(text + ' ' + '<i>' + id.kind + '</i>');
+            visible.push(id);
+        });
+        $body.scrollTop(0);
+        if (visible.length > 0) {
+            active = 0;
+            visible[active].e.addClass('active');
+        }
+        $list.append($.map(visible, function(identifier) { return identifier.e; }));
+    }
+
+    var incrActive = function(delta) {
+        if (visible.length == 0) {
+            return
+        }
+        visible[active].e.removeClass('active');
+        active += delta;
+        if (active < 0) {
+            active = 0;
+            $body.scrollTop(0);
+        } else if (active >= visible.length) {
+            active = visible.length - 1;
+            $body.scrollTop($body[0].scrollHeight - $body[0].clientHeight);
+        } else {
+            var $e = visible[active].e;
+            var t = $e.position().top;
+            var b = t + $e.outerHeight(false);
+            if (t <= 0) {
+                $body.scrollTop($body.scrollTop() + t);
+            } else if (b >= $body.outerHeight(false)) {
+                $body.scrollTop($body.scrollTop() + b - $body.outerHeight(false));
+            }
+        }
+        visible[active].e.addClass('active');
+    }
+
+    $modal.on('show.bs.modal', function() {
+        if (!all) {
+            all = []
+            var kinds = {'c': 'constant', 'v': 'variable', 'f': 'function', 't': 'type', 'd': 'field', 'm': 'method'}
+            $('*[id]').each(function() {
+                var e = $(this);
+                var id = e.attr('id');
+                if (/^[^_][^-]*$/.test(id)) {
+                    all.push({
+                        text: id,
+                        ltext: id.toLowerCase(),
+                        kind: kinds[e.closest('[data-kind]').attr('data-kind')],
+                        e: $('<a/>', {href: '#' + id, 'class': 'list-group-item', tabindex: '-1'})
+                    });
+                }
+            });
+            all.sort(function (a, b) {
+                if (a.ltext > b.ltext) { return 1; }
+                if (a.ltext < b.ltext) { return -1; }
+                return 0
+            });
+        }
+    }).on('shown.bs.modal', function() {
+        update('');
+        $filter.val('').focus();
+    }).on('hide.bs.modal', function() {
+        $filter.blur();
+    }).on('click', '.list-group-item', function() {
+        $modal.modal('hide');
+    });
+
+    $filter.on('change keyup', function() {
+        var filter = $filter.val();
+        if (filter.toUpperCase() != lastFilter.toUpperCase()) {
+            update(filter);
+        }
+    }).on('keydown', function(e) {
+        switch(e.which) {
+        case 38: // up
+            incrActive(-1);
+            e.preventDefault(); 
+            break;
+        case 40: // down
+            incrActive(1);
+            e.preventDefault(); 
+            break;
+        case 13: // enter
+            if (active >= 0) {
+                visible[active].e[0].click();
+            }
+            break
+        }
+    });
+
+});
+
+// keyboard shortcuts
+$(function() {
+    var prevCh = null, prevTime = 0, modal = false;
+
+    $('.modal').on({
+        show: function() { modal = true; },
+        hidden: function() { modal = false; }
+    });
+
+    $(document).on('keypress', function(e) {
+        var combo = e.timeStamp - prevTime <= 1000;
+        prevTime = 0;
+
+        if (modal) {
+            return true;
+        }
+
+        var t = e.target.tagName
+        if (t == 'INPUT' ||
+            t == 'SELECT' ||
+            t == 'TEXTAREA' ) {
+            return true;
+        }
+
+        if (e.target.contentEditable && e.target.contentEditable == 'true') {
+            return true;
+        }
+
+        if (e.metaKey || e.ctrlKey) {
+            return true;
+        }
+
+        var ch = String.fromCharCode(e.which);
+
+        if (combo) {
+            switch (prevCh + ch) {
+            case "gg":
+                $('html,body').animate({scrollTop: 0},'fast');
+                return false;
+            case "gb":
+                $('html,body').animate({scrollTop: $(document).height()},'fast');
+                return false;
+            case "gi":
+                if ($('#pkg-index').length > 0) {
+                    $('html,body').animate({scrollTop: $("#pkg-index").offset().top},'fast');
+                    return false;
+                }
+            case "ge":
+                if ($('#pkg-examples').length > 0) {
+                    $('html,body').animate({scrollTop: $("#pkg-examples").offset().top},'fast');
+                    return false;
+                }
+            }
+        }
+
+        switch (ch) {
+        case "/":
+            $('#x-search-query').focus();
+            return false;
+        case "?":
+            $('#x-shortcuts').modal();
+            return false;
+        case  "f":
+            if ($('#x-jump').length > 0) {
+                $('#x-jump').modal();
+                return false;
+            }
+        }
+
+        prevCh = ch
+        prevTime = e.timeStamp
+        return true;
+    });
+});
+
+// misc
+$(function() {
+    $('span.timeago').timeago();
+    if (window.location.hash.substring(0, 9) == '#example-') {
+       $(id).addClass('in').removeClass('collapse').height('auto');
+    }
+
+    $(document).on("click", "input.click-select", function(e) {
+        $(e.target).select();
+    });
+
+    $('body').scrollspy({
+        target: '.gddo-sidebar',
+        offset: 10
+    });
+});
diff --git a/gddo-server/assets/status.png b/gddo-server/assets/status.png
new file mode 100644
index 0000000..3aff0cb
--- /dev/null
+++ b/gddo-server/assets/status.png
Binary files differ
diff --git a/gddo-server/assets/status.svg b/gddo-server/assets/status.svg
new file mode 100644
index 0000000..3952e41
--- /dev/null
+++ b/gddo-server/assets/status.svg
@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="109" height="20"><linearGradient id="a" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><rect rx="3" width="109" height="20" fill="#555"/><rect rx="3" x="44" width="65" height="20" fill="#5272B4"/><path fill="#5272B4" d="M44 0h4v20h-4z"/><rect rx="3" width="109" height="20" fill="url(#a)"/><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"><text x="23" y="15" fill="#010101" fill-opacity=".3">godoc</text><text x="23" y="14">godoc</text><text x="75.5" y="15" fill="#010101" fill-opacity=".3">reference</text><text x="75.5" y="14">reference</text></g></svg>
\ No newline at end of file
diff --git a/gddo-server/assets/templates/about.html b/gddo-server/assets/templates/about.html
new file mode 100644
index 0000000..96c5283
--- /dev/null
+++ b/gddo-server/assets/templates/about.html
@@ -0,0 +1,72 @@
+{{define "Head"}}<title>About - GoDoc</title>{{end}}
+
+{{define "Body"}}
+<h1>About</h1>
+
+<p>GoDoc hosts documentation for <a href="http://golang.org/">Go</a>
+packages on <a href="https://bitbucket.org/">Bitbucket</a>, <a
+  href="https://github.com/">GitHub</a>, <a
+  href="https://launchpad.net/">Launchpad</a> and <a
+  href="http://code.google.com/hosting/">Google Project Hosting</a>.
+
+<p>The source code for GoDoc is available <a
+  href="https://github.com/golang/gddo">on GitHub</a>.
+
+<p>GoDoc displays documentation for GOOS=linux unless otherwise noted at the
+bottom of the documentation page.
+
+<h4 id="howto">Add a package to GoDoc</h4>
+
+<p>GoDoc generates documentation from Go source code. The <a
+  href="http://blog.golang.org/godoc-documenting-go-code">guidelines</a>
+for writing documentation for the <a
+  href="http://golang.org/cmd/godoc/">godoc</a> tool apply to GoDoc.
+
+<p>It's important to write a good summary of the package in the first sentence
+of the package comment. GoDoc indexes the first sentence and displays the first
+sentence in package lists.
+
+<p>To add a package to GoDoc, <a href="/">search</a> for the package by import
+path. If GoDoc does not already have the documentation for the package, then
+GoDoc will fetch the source from the version control system on the fly and add
+the documentation.
+
+<p>GoDoc checks for package updates once per day. You can force GoDoc to update
+the documentation immediately by clicking the refresh link at the bottom of the
+package documentation page. 
+
+<p>GoDoc crawls package imports and child directories to find new packages.
+
+<h4 id="remove">Remove a package from GoDoc</h4>
+
+GoDoc automatically removes packages deleted from the version control system
+when GoDoc checks for updates to the package. You can force GoDoc to remove a
+deleted package immediately by clicking the refresh link at the bottom of the
+package documentation page.
+
+If you do not want GoDoc to display documentation for your package, send mail
+to golang-dev@googlegroups.com with the import path of the path of the package
+that you want to remove.
+
+<h4 id="feedback">Feedback</h4>
+
+<p>Send your ideas, feature requests and questions to the <a href="https://groups.google.com/group/golang-dev">golang-dev mailing list</a>.
+Report bugs using the <a href="https://github.com/golang/gddo/issues/new">GitHub Issue Tracker</a>. 
+
+<h4 id="shortcuts">Keyboard Shortcuts</h4>
+
+<p>GoDoc has keyboard shortcuts for navigating package documentation
+pages. Type '?' on a package page for help.
+
+<h4 id="bookmarklet">Bookmarklet</h4>
+
+<p>The GoDoc bookmarklet navigates from pages on Bitbucket, GitHub Launchpad
+and Google Project Hosting to the package documentation. To install the
+bookmarklet, click and drag the following link to your bookmark bar: <a
+ href="javascript:window.location='http://{{.Host}}/?q='+encodeURIComponent(window.location)">GoDoc</a>
+
+<h4>More Documentation</h4>
+
+<p>More documentation about GoDoc is available on <a href="https://github.com/golang/gddo/wiki">the project's GitHub wiki</a>.
+
+{{end}}
diff --git a/gddo-server/assets/templates/bot.html b/gddo-server/assets/templates/bot.html
new file mode 100644
index 0000000..5fc9c9f
--- /dev/null
+++ b/gddo-server/assets/templates/bot.html
@@ -0,0 +1,6 @@
+{{define "Head"}}<title>Bot - GoDoc</title>{{end}}
+
+{{define "Body"}}
+  <p>GoDocBot is godoc.org's robot for fetching Go documentation from version control systems.
+  <p>Contact: golang-dev@googlegroups.com
+{{end}}
diff --git a/gddo-server/assets/templates/cmd.html b/gddo-server/assets/templates/cmd.html
new file mode 100644
index 0000000..dbb80b7
--- /dev/null
+++ b/gddo-server/assets/templates/cmd.html
@@ -0,0 +1,8 @@
+{{define "Head"}}{{template "PkgCmdHeader" $}}{{end}}
+
+{{define "Body"}}
+  {{template "ProjectNav" $}}
+  <h2>Command {{$.pdoc.PageName}}</h2>
+  {{$.pdoc.Doc|comment}}
+  {{template "PkgCmdFooter" $}}
+{{end}}
diff --git a/gddo-server/assets/templates/cmd.txt b/gddo-server/assets/templates/cmd.txt
new file mode 100644
index 0000000..4ed5306
--- /dev/null
+++ b/gddo-server/assets/templates/cmd.txt
@@ -0,0 +1,5 @@
+{{define "ROOT"}}{{with .pdoc}}
+COMMAND DOCUMENTATION
+
+{{.Doc|comment}}
+{{template "Subdirs" $}}{{end}}{{end}}
diff --git a/gddo-server/assets/templates/common.html b/gddo-server/assets/templates/common.html
new file mode 100644
index 0000000..e3376c0
--- /dev/null
+++ b/gddo-server/assets/templates/common.html
@@ -0,0 +1,98 @@
+{{define "Analytics"}}{{with gaAccount}}<script type="text/javascript">
+  var _gaq = _gaq || [];
+  _gaq.push(['_setAccount', '{{.}}']);
+  _gaq.push(['_trackPageview']);
+  (function() {
+    var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
+    ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
+    var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
+  })();
+</script>{{end}}{{end}}
+
+{{define "SearchBox"}}
+  <form>
+    <div class="input-group">
+      <input class="form-control" name="q" autofocus="autofocus" value="{{.}}" placeholder="Search for package by import path or keyword." type="text">
+      <span class="input-group-btn">
+        <button class="btn btn-default" type="submit">Go!</button>
+      </span>
+    </div>
+  </form>
+{{end}}
+
+{{define "ProjectNav"}}{{template "FlashMessages" .flashMessages}}<div class="clearfix" id="x-projnav">
+  {{if .pdoc.ProjectRoot}}{{if .pdoc.ProjectURL}}<a href="{{.pdoc.ProjectURL}}"><strong>{{.pdoc.ProjectName}}:</strong></a>{{else}}<strong>{{.pdoc.ProjectName}}:</strong>{{end}}{{else}}<a href="/-/go">Go:</a>{{end}}
+  {{.pdoc.Breadcrumbs templateName}}
+  {{if and .pdoc.Name (equal templateName "pkg.html")}}
+  <span class="pull-right">
+    <a href="#pkg-index">Index</a>
+    {{if .pdoc.AllExamples}}<span class="text-muted">|</span> <a href="#pkg-examples">Examples</a>{{end}}
+    <span class="text-muted">|</span> <a href="#pkg-files">Files</a>
+    {{if .pkgs}}<span class="text-muted">|</span> <a href="#pkg-subdirectories">Directories</a>{{end}}
+  </span>
+  {{end}}
+</div>{{end}}
+
+{{define "Pkgs"}}
+    <table class="table table-condensed">
+    <thead><tr><th>Path</th><th>Synopsis</th></tr></thead>
+    <tbody>{{range .}}<tr><td>{{if .Path|isValidImportPath}}<a href="/{{.Path}}">{{.Path|importPath}}</a>{{else}}{{.Path|importPath}}{{end}}</td><td>{{.Synopsis|importPath}}</td></tr>
+    {{end}}</tbody>
+    </table>
+{{end}}
+
+{{define "PkgCmdHeader"}}{{with .pdoc}}
+  <title>{{.PageName}} - GoDoc</title>
+  {{if .Synopsis}}
+    <meta name="twitter:title" content="{{if .IsCmd}}Command{{else}}Package{{end}} {{.PageName}}">
+    <meta property="og:title" content="{{if .IsCmd}}Command{{else}}Package{{end}} {{.PageName}}">
+    <meta name="description" content="{{.Synopsis}}">
+    <meta name="twitter:description" content="{{.Synopsis}}">
+    <meta property="og:description" content="{{.Synopsis}}">
+    <meta name="twitter:card" content="summary">
+    <meta name="twitter:site" content="@golang">
+  {{end}}
+  {{if .Errors}}<meta name="robots" content="NOINDEX">{{end}}
+{{end}}{{end}}
+
+{{define "PkgCmdFooter"}}
+<!-- Bugs -->
+{{with .pdoc}}{{with .Notes}}{{with .BUG}}
+  <h3 id="pkg-note-bug">Bugs <a class="permalink" href="#pkg-note-bug">&para;</a></h3>{{range .}}<p>{{$.pdoc.SourceLink .Pos "☞" true}} {{.Body}}{{end}}
+{{end}}{{end}}{{end}}
+
+{{if $.pkgs}}<h3 id="pkg-subdirectories">Directories <a class="permalink" href="#pkg-subdirectories">&para;</a></h3>
+    <table class="table table-condensed">
+    <thead><tr><th>Path</th><th>Synopsis</th></tr></thead>
+    <tbody>{{range $.pkgs}}<tr><td><a href="/{{.Path}}">{{relativePath .Path $.pdoc.ImportPath}}</a><td>{{.Synopsis}}</td></tr>{{end}}</tbody>
+    </table>
+{{end}}
+<div id="x-pkginfo">
+{{with $.pdoc}}
+  <form name="x-refresh" method="POST" action="/-/refresh"><input type="hidden" name="path" value="{{.ImportPath}}"></form>
+  <p>{{if or .Imports $.importerCount}}Package {{.Name}} {{if .Imports}}imports <a href="?imports">{{.Imports|len}} packages</a> (<a href="?import-graph">graph</a>){{end}}{{if and .Imports $.importerCount}} and {{end}}{{if $.importerCount}}is imported by <a href="?importers">{{$.importerCount}} packages</a>{{end}}.{{end}}
+  {{if not .Updated.IsZero}}Updated <span class="timeago" title="{{.Updated.Format "2006-01-02T15:04:05Z"}}">{{.Updated.Format "2006-01-02"}}</span>{{if or (equal .GOOS "windows") (equal .GOOS "darwin")}} with GOOS={{.GOOS}}{{end}}.{{end}}
+  <a href="javascript:document.getElementsByName('x-refresh')[0].submit();" title="Refresh this page from the source.">Refresh now</a>.
+  <a href="?tools">Tools</a> for package owners.
+  {{if .DeadEndFork}}This is a dead-end fork (no commits since the fork).{{end}}
+{{end}}
+{{with $.pdoc.Errors}}
+    <p>The <a href="http://golang.org/cmd/go/#Download_and_install_packages_and_dependencies">go get</a>
+    command cannot install this package because of the following issues:
+    <ul>
+      {{range .}}<li>{{.}}{{end}}
+  </ul>
+{{end}}
+</div>
+{{end}}
+
+{{define "Bootstrap.css"}}<link href="//maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap.min.css" rel="stylesheet">{{end}}
+{{define "Bootstrap.js"}}<script src="//maxcdn.bootstrapcdn.com/bootstrap/3.3.1/js/bootstrap.min.js"></script>{{end}}
+{{define "jQuery"}}<script src="//ajax.googleapis.com/ajax/libs/jquery/2.0.3/jquery.min.js"></script>{{end}}
+
+{{define "FlashMessages"}}{{range .}}
+  {{if eq .ID "redir"}}{{if eq (len .Args) 1}}<div class="alert alert-warning">Redirected from {{index .Args 0}}.</div>{{end}}
+  {{else if eq .ID "refresh"}}{{if eq (len .Args) 1}}<div class="alert alert-danger">Error refreshing package: {{index .Args 0}}</div>{{end}}
+  {{end}}
+{{end}}{{end}}
+
diff --git a/gddo-server/assets/templates/common.txt b/gddo-server/assets/templates/common.txt
new file mode 100644
index 0000000..234f652
--- /dev/null
+++ b/gddo-server/assets/templates/common.txt
@@ -0,0 +1,3 @@
+{{define "Subdirs"}}{{with $.pkgs}}SUBDIRECTORIES
+{{range .}}
+      {{.Path}}{{end}}{{end}}{{end}}
diff --git a/gddo-server/assets/templates/dir.html b/gddo-server/assets/templates/dir.html
new file mode 100644
index 0000000..e0eea63
--- /dev/null
+++ b/gddo-server/assets/templates/dir.html
@@ -0,0 +1,10 @@
+{{define "Head"}}
+  {{template "PkgCmdHeader" $}}
+  <meta name="robots" content="NOINDEX">
+{{end}}
+
+{{define "Body"}}
+{{template "ProjectNav" $}}
+{{template "PkgCmdFooter" $}}
+
+{{end}}
diff --git a/gddo-server/assets/templates/dir.txt b/gddo-server/assets/templates/dir.txt
new file mode 100644
index 0000000..4930f58
--- /dev/null
+++ b/gddo-server/assets/templates/dir.txt
@@ -0,0 +1 @@
+{{define "ROOT"}}{{with .pdoc}}{{template "Subdirs" $}}{{end}}{{end}}
diff --git a/gddo-server/assets/templates/graph.html b/gddo-server/assets/templates/graph.html
new file mode 100644
index 0000000..a7a4c54
--- /dev/null
+++ b/gddo-server/assets/templates/graph.html
@@ -0,0 +1,23 @@
+{{define "ROOT"}}<!DOCTYPE html><html lang="en">
+    <head>
+      <title>{{.pdoc.PageName}} graph - GoDoc</title>
+      <meta name="robots" content="NOINDEX, NOFOLLOW">
+      {{template "Bootstrap.css"}}
+      <link href="{{staticPath "/-/site.css"}}" rel="stylesheet">
+    </head>
+    <body>
+      <div class="well-small">
+        Package <a href="/{{.pdoc.ImportPath}}">{{.pdoc.Name}}</a>
+        {{if .pdoc.ProjectRoot}}<span class="text-muted">|</span> 
+            {{if .hide}}
+                <a href="?import-graph">Show</a>
+            {{else}}
+                <a href="?import-graph&hide=1">Hide</a> (<a href="?import-graph&hide=2">all</a>)
+            {{end}} 
+            standard package dependencies.
+        {{end}}
+      </div>
+      {{.svg}}
+  </body>
+  {{template "Analytics"}}
+</html>{{end}}
diff --git a/gddo-server/assets/templates/home.html b/gddo-server/assets/templates/home.html
new file mode 100644
index 0000000..dfdcd32
--- /dev/null
+++ b/gddo-server/assets/templates/home.html
@@ -0,0 +1,38 @@
+{{define "Head"}}<title>GoDoc</title>
+{{/* <link type="application/opensearchdescription+xml" rel="search" href="/-/opensearch.xml?v={{fileHash "templates/opensearch.xml"}}"/> */}}{{end}}
+
+{{define "Body"}}
+<div class="jumbotron">
+    <h2>Search for Go Packages</h2>
+    {{template "SearchBox" ""}}
+</div>
+
+<p>GoDoc hosts documentation for <a href="http://golang.org/">Go</a> packages
+on Bitbucket, GitHub, Google Project Hosting and Launchpad.  Read the <a
+  href="/-/about">About Page</a> for information about adding packages to GoDoc
+and more.
+
+<div class="row">
+  <div class="col-sm-6">
+    {{with .Popular}}
+      <h4>Popular Packages</h4>
+      <ul class="list-unstyled">
+        {{range .}}<li><a href="/{{.Path}}">{{.Path}}</a>{{end}}
+      </ul>
+    {{end}}
+  </div>
+  <div class="col-sm-6">
+    <h4>More Packages</h4>
+    <ul class="list-unstyled">
+      <li><a href="/-/index">Index</a>
+      <li><a href="/-/go">Go Standard Packages</a>
+      <li><a href="/-/subrepo">Go Sub-repository Packages</a>
+      <li><a href="https://golang.org/wiki/Projects">Projects @ go-wiki</a>
+      <li><a href="https://github.com/search?o=desc&amp;q=language%3Ago&amp;s=stars&amp;type=Repositories">Most stars</a>, 
+        <a href="https://github.com/search?o=desc&amp;q=language%3Ago&amp;s=forks&amp;type=Repositories">most forks</a>, 
+        <a href="https://github.com/search?o=desc&amp;q=language%3Ago&amp;s=updated&amp;type=Repositories">recently updated</a> on GitHub
+    </ul>
+  </div>
+</div>
+
+{{end}}
diff --git a/gddo-server/assets/templates/home.txt b/gddo-server/assets/templates/home.txt
new file mode 100644
index 0000000..37d2dfc
--- /dev/null
+++ b/gddo-server/assets/templates/home.txt
@@ -0,0 +1,2 @@
+{{define "ROOT"}}
+{{end}}
diff --git a/gddo-server/assets/templates/importers.html b/gddo-server/assets/templates/importers.html
new file mode 100644
index 0000000..d8e3d17
--- /dev/null
+++ b/gddo-server/assets/templates/importers.html
@@ -0,0 +1,7 @@
+{{define "Head"}}<title>{{.pdoc.PageName}} importers - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
+
+{{define "Body"}}
+  {{template "ProjectNav" $}}
+  <h3>Packages that import {{$.pdoc.Name}}</h3>
+  {{template "Pkgs" $.pkgs}}
+{{end}}
diff --git a/gddo-server/assets/templates/importers_robot.html b/gddo-server/assets/templates/importers_robot.html
new file mode 100644
index 0000000..8d62e98
--- /dev/null
+++ b/gddo-server/assets/templates/importers_robot.html
@@ -0,0 +1,10 @@
+{{define "Head"}}<title>{{.pdoc.PageName}} importers - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
+
+{{define "Body"}}
+  {{template "ProjectNav" $}}
+  <h3>Packages that import {{$.pdoc.Name}}</h3>
+  <table class="table table-condensed">
+    <thead><tr><th>Path</th><th>Synopsis</th></tr></thead>
+    <tbody>{{range .pkgs}}<tr><td>{{.Path|importPath}}</td><td>{{.Synopsis|importPath}}</td></tr>{{end}}</tbody>
+  </table>
+{{end}}
diff --git a/gddo-server/assets/templates/imports.html b/gddo-server/assets/templates/imports.html
new file mode 100644
index 0000000..1fcc10b
--- /dev/null
+++ b/gddo-server/assets/templates/imports.html
@@ -0,0 +1,7 @@
+{{define "Head"}}<title>{{.pdoc.PageName}} imports - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
+
+{{define "Body"}}
+  {{template "ProjectNav" $}}
+  <h3>Packages imported by {{.pdoc.Name}}</h3>
+  {{template "Pkgs" $.pkgs}}
+{{end}}
diff --git a/gddo-server/assets/templates/index.html b/gddo-server/assets/templates/index.html
new file mode 100644
index 0000000..a3768dc
--- /dev/null
+++ b/gddo-server/assets/templates/index.html
@@ -0,0 +1,12 @@
+{{define "Head"}}<title>Index - GoDoc</title><meta name="robots" content="NOINDEX">{{end}}
+
+{{define "Body"}}
+  <h1>Index</h1>
+  <p>The following is a list of '<a
+    href="http://golang.org/cmd/go/#hdr-Download_and_install_packages_and_dependencies">go
+    get</a>'able packages on godoc.org. A <a href="/-/go">list of Go standard packages</a> is also available.
+
+  {{htmlComment "\nPlease use http://api.godoc.org/packages instead of scraping this page.\n"}}
+  {{template "Pkgs" .pkgs}}
+  <p>Number of packages: {{len .pkgs}}.
+{{end}}
diff --git a/gddo-server/assets/templates/layout.html b/gddo-server/assets/templates/layout.html
new file mode 100644
index 0000000..4a9af94
--- /dev/null
+++ b/gddo-server/assets/templates/layout.html
@@ -0,0 +1,74 @@
+{{define "ROOT"}}<!DOCTYPE html><html lang="en">
+<head profile="http://a9.com/-/spec/opensearch/1.1/">
+  <meta charset="utf-8">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  {{template "Bootstrap.css"}}
+  <link href="{{staticPath "/-/site.css"}}" rel="stylesheet">
+  {{template "Head" $}}
+</head>
+<body>
+<nav class="navbar navbar-default" role="navigation">
+  <div class="container">
+  <div class="navbar-header">
+    <button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
+      <span class="sr-only">Toggle navigation</span>
+      <span class="icon-bar"></span>
+      <span class="icon-bar"></span>
+      <span class="icon-bar"></span>
+    </button>
+    <a class="navbar-brand" href="/"><strong>GoDoc</strong></a>
+  </div>
+  <div class="collapse navbar-collapse">
+    <ul class="nav navbar-nav">
+        <li{{if equal "home.html" templateName}} class="active"{{end}}><a href="/">Home</a></li>
+        <li{{if equal "index.html" templateName}} class="active"{{end}}><a href="/-/index">Index</a></li>
+        <li{{if equal "about.html" templateName}} class="active"{{end}}><a href="/-/about">About</a></li>
+    </ul>
+    <form class="navbar-nav navbar-form navbar-right" id="x-search" action="/" role="search"><input class="form-control" id="x-search-query" type="text" name="q" placeholder="Search"></form>
+  </div>
+</div>
+</nav>
+
+<div class="container">
+  {{template "Body" $}}
+</div>
+<div id="x-footer" class="clearfix">
+  <div class="container">
+    <a href="https://github.com/golang/gddo/issues">Website Issues</a>
+    <span class="text-muted">|</span> <a href="http://golang.org/">Go Language</a>
+    <span class="pull-right"><a href="#">Back to top</a></span>
+  </div>
+</div>
+
+<div id="x-shortcuts" tabindex="-1" class="modal">
+    <div class="modal-dialog">
+      <div class="modal-content">
+        <div class="modal-header">
+          <button type="button" class="close" data-dismiss="modal" aria-hidden="true">&times;</button>
+          <h4 class="modal-title">Keyboard shortcuts</h4>
+        </div>
+        <div class="modal-body">
+          <table>{{$mutePkg := not (equal "pkg.html" templateName)}}
+          <tr><td align="right"><b>?</b></td><td> : This menu</td></tr>
+          <tr><td align="right"><b>/</b></td><td> : Search site</td></tr>
+          <tr{{if $mutePkg}} class="text-muted"{{end}}><td align="right"><b>f</b></td><td> : Jump to identifier</td></tr>
+          <tr><td align="right"><b>g</b> then <b>g</b></td><td> : Go to top of page</td></tr>
+          <tr><td align="right"><b>g</b> then <b>b</b></td><td> : Go to end of page</td></tr>
+          <tr{{if $mutePkg}} class="text-muted"{{end}}><td align="right"><b>g</b> then <b>i</b></td><td> : Go to index</td></tr>
+          <tr{{if $mutePkg}} class="text-muted"{{end}}><td align="right"><b>g</b> then <b>e</b></td><td> : Go to examples</td></tr>
+          </table>
+        </div>
+        <div class="modal-footer">
+          <button type="button" class="btn" data-dismiss="modal">Close</button>
+      </div>
+    </div>
+  </div>
+</div>
+{{template "jQuery"}}
+{{template "Bootstrap.js"}}
+<script src="{{staticPath "/-/site.js"}}"></script>
+{{template "Analytics"}}
+</body>
+</html>
+{{end}}
+
diff --git a/gddo-server/assets/templates/notfound.html b/gddo-server/assets/templates/notfound.html
new file mode 100644
index 0000000..684d208
--- /dev/null
+++ b/gddo-server/assets/templates/notfound.html
@@ -0,0 +1,10 @@
+{{define "Head"}}<title>Not Found - GoDoc</title>{{end}}
+
+{{define "Body"}}
+  <h1>Not Found</h1>
+  <p>Oh snap! Our team of gophers could not find the web page you are looking for. Try one of these pages:
+  <ul>
+    <li><a href="/">Home</a>
+    <li><a href="/-/index">Package Index</a>
+  </ul>
+{{end}}
diff --git a/gddo-server/assets/templates/notfound.txt b/gddo-server/assets/templates/notfound.txt
new file mode 100644
index 0000000..a3456ba
--- /dev/null
+++ b/gddo-server/assets/templates/notfound.txt
@@ -0,0 +1,2 @@
+{{define "ROOT"}}NOT FOUND
+{{end}}
diff --git a/gddo-server/assets/templates/opensearch.xml b/gddo-server/assets/templates/opensearch.xml
new file mode 100644
index 0000000..dad5682
--- /dev/null
+++ b/gddo-server/assets/templates/opensearch.xml
@@ -0,0 +1,9 @@
+{{define "ROOT"}}<?xml version="1.0"?>
+<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/">
+    <InputEncoding>UTF-8</InputEncoding>
+    <ShortName>GoDoc</ShortName>
+    <Description>GoDoc: Go Documentation Service</Description>
+    <Url type="text/html" method="get" template="http://{{.}}/?q={searchTerms}"/>
+    <Url type="application/x-suggestions+json" template="http://{{.}}/-/suggest?q={searchTerms}"/>
+</OpenSearchDescription>
+{{end}}
diff --git a/gddo-server/assets/templates/pkg.html b/gddo-server/assets/templates/pkg.html
new file mode 100644
index 0000000..c98b74d
--- /dev/null
+++ b/gddo-server/assets/templates/pkg.html
@@ -0,0 +1,188 @@
+{{define "Head"}}
+  {{template "PkgCmdHeader" $}}
+  {{if sidebarEnabled}}
+    <link href="{{staticPath "/-/sidebar.css"}}" rel="stylesheet">
+  {{end}}
+{{end}}
+
+{{define "Body"}}
+  {{with .pdoc}}
+
+{{if sidebarEnabled}}
+    <div class="row">
+
+      <!-- Sidebar -->
+      <div class="gddo-sidebar col-md-3 hidden-xs hidden-sm">
+        <ul id="sidebar-nav" class="nav" data-spy="affix" data-offset-top="70">
+          <li class="active"><a href="#pkg-overview">Overview</a></li>
+          <li><a href="#pkg-index">Index</a></li>
+          {{if .Examples}}<li><a href="#pkg-examples">Examples</a></li>{{end}}
+          {{if .Consts}}<li><a href="#pkg-constants">Constants</a></li>{{end}}
+          {{if .Vars}}<li><a href="#pkg-variables">Variables</a></li>{{end}}
+
+          {{if .Funcs}}
+            <li>
+              <a href="#pkg-functions">Functions</a>
+              <ul class="nav">
+                {{range .Funcs}}<li><a href="#{{.Name}}">{{.Name}}</a></li>{{end}}
+              </ul>
+            </li>
+          {{end}}
+
+          {{if .Types}}
+            <li>
+              <a href="#pkg-types">Types</a>
+              <ul class="nav">
+                {{range .Types}}<li><a href="#{{.Name}}">{{.Name}}</a></li>{{end}}
+              </ul>
+            </li>
+          {{end}}
+
+          {{if .Notes.BUG}}<li><a href="#pkg-note-bug">Bugs</a></li>{{end}}
+          {{if $.pkgs}}<li><a href="#pkg-subdirectories">Directories</a></li>{{end}}
+        </ul>
+      </div>
+
+      <!-- Content -->
+      <div class="col-md-9">
+
+{{end}}<!-- end sidebarEnabled -->
+
+        {{template "ProjectNav" $}}
+
+        <h2 id="pkg-overview">package {{.Name}}</h2>
+
+        <p><code>import "{{.ImportPath}}"</code>
+
+        {{.Doc|comment}}
+
+        {{template "Examples" .|$.pdoc.ObjExamples}}
+
+        <!-- Index -->
+        <h3 id="pkg-index" class="section-header">Index <a class="permalink" href="#pkg-index">&para;</a></h3>
+
+        {{if .Truncated}}
+          <div class="alert">The documentation displayed here is incomplete. Use the godoc command to read the complete documentation.</div>
+        {{end}}
+
+        <ul class="list-unstyled">
+          {{if .Consts}}<li><a href="#pkg-constants">Constants</a></li>{{end}}
+          {{if .Vars}}<li><a href="#pkg-variables">Variables</a></li>{{end}}
+          {{range .Funcs}}<li><a href="#{{.Name}}">{{.Decl.Text}}</a></li>{{end}}
+          {{range $t := .Types}}
+            <li><a href="#{{.Name}}">type {{.Name}}</a></li>
+            {{if or .Funcs .Methods}}<ul>{{end}}
+            {{range .Funcs}}<li><a href="#{{.Name}}">{{.Decl.Text}}</a></li>{{end}}
+            {{range .Methods}}<li><a href="#{{$t.Name}}.{{.Name}}">{{.Decl.Text}}</a></li>{{end}}
+            {{if or .Funcs .Methods}}</ul>{{end}}
+          {{end}}
+        </ul>
+
+        <!-- Examples -->
+        {{with .AllExamples}}
+          <h4 id="pkg-examples">Examples <a class="permalink" href="#pkg-examples">&para;</a></h4>
+          <ul class="list-unstyled">
+            {{range . }}<li><a href="#example-{{.ID}}" onclick="$('#ex-{{.ID}}').addClass('in').removeClass('collapse').height('auto')">{{.Label}}</a></li>{{end}}
+          </ul>
+        {{else}}
+          <span id="pkg-examples"></span>
+        {{end}}
+
+        <!-- Files -->
+        <h4 id="pkg-files">
+          {{with .BrowseURL}}<a href="{{.}}">Package Files</a>{{else}}Package Files{{end}}
+          <a class="permalink" href="#pkg-files">&para;</a>
+        </h4>
+
+        <p>{{range .Files}}{{if .URL}}<a href="{{.URL}}">{{.Name}}</a>{{else}}{{.Name}}{{end}} {{end}}</p>
+
+        <!-- Contants -->
+        {{if .Consts}}
+          <h3 id="pkg-constants">Constants <a class="permalink" href="#pkg-constants">&para;</a></h3>
+          {{range .Consts}}<div class="decl" data-kind="c">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}{{end}}
+        {{end}}
+
+        <!-- Variables -->
+        {{if .Vars}}
+          <h3 id="pkg-variables">Variables <a class="permalink" href="#pkg-variables">&para;</a></h3>
+          {{range .Vars}}<div class="decl" data-kind="v">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}{{end}}
+        {{end}}
+
+        <!-- Functions -->
+        {{if sidebarEnabled}}{{if .Funcs}}
+            <h3 id="pkg-functions" class="section-header">Functions <a class="permalink" href="#pkg-functions">&para;</a></h3>
+        {{end}}{{end}}
+        {{range .Funcs}}
+          <h3 id="{{.Name}}" data-kind="f">func {{$.pdoc.SourceLink .Pos .Name true}} <a class="permalink" href="#{{.Name}}">&para;</a></h3>
+          <div class="funcdecl decl">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}
+          {{template "Examples" .|$.pdoc.ObjExamples}}
+        {{end}}
+
+        <!-- Types -->
+        {{if sidebarEnabled}}{{if .Types}}
+            <h3 id="pkg-types" class="section-header">Types <a class="permalink" href="#pkg-types">&para;</a></h3>
+        {{end}}{{end}}
+
+        {{range $t := .Types}}
+          <h3 id="{{.Name}}" data-kind="t">type {{$.pdoc.SourceLink .Pos .Name true}} <a class="permalink" href="#{{.Name}}">&para;</a></h3>
+          <div class="decl" data-kind="{{if isInterface $t}}m{{else}}d{{end}}">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl $t}}</div>{{.Doc|comment}}
+          {{range .Consts}}<div class="decl" data-kind="c">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}{{end}}
+          {{range .Vars}}<div class="decl" data-kind="v">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}{{end}}
+          {{template "Examples" .|$.pdoc.ObjExamples}}
+
+          {{range .Funcs}}
+            <h4 id="{{.Name}}" data-kind="f">func {{$.pdoc.SourceLink .Pos .Name true}} <a class="permalink" href="#{{.Name}}">&para;</a></h4>
+            <div class="funcdecl decl">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}
+            {{template "Examples" .|$.pdoc.ObjExamples}}
+          {{end}}
+
+          {{range .Methods}}
+            <h4 id="{{$t.Name}}.{{.Name}}" data-kind="m">func ({{.Recv}}) {{$.pdoc.SourceLink .Pos .Name true}} <a class="permalink" href="#{{$t.Name}}.{{.Name}}">&para;</a></h4>
+            <div class="funcdecl decl">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}
+            {{template "Examples" .|$.pdoc.ObjExamples}}
+          {{end}}
+        {{end}}
+        {{template "PkgCmdFooter" $}}
+        <div id="x-jump" tabindex="-1" class="modal">
+            <div class="modal-dialog">
+            <div class="modal-content">
+              <div class="modal-header">
+                <h4 class="modal-title">Jump to identifier</h4>
+                <br class="clearfix">
+                <input id="x-jump-filter" class="form-control" autocomplete="off" type="text">
+              </div>
+              <div id="x-jump-body" class="modal-body" style="height: 260px; overflow: auto;">
+                <div id="x-jump-list" class="list-group" style="margin-bottom: 0;"></div>
+              </div>
+              <div class="modal-footer">
+                <button type="button" class="btn" data-dismiss="modal">Close</button>
+            </div>
+          </div>
+        </div>
+      </div>
+
+{{if sidebarEnabled}}
+      </div>
+    </div>
+{{end}}
+
+  {{end}}
+{{end}}
+
+{{define "Examples"}}
+  {{if .}}
+    <div class="panel-group">
+    {{range .}}
+      <div class="panel panel-default" id="example-{{.ID}}">
+        <div class="panel-heading"><a class="accordion-toggle" data-toggle="collapse" href="#ex-{{.ID}}">Example{{with .Example.Name}} ({{.}}){{end}}</a></div>
+        <div id="ex-{{.ID}}" class="panel-collapse collapse"><div class="panel-body">
+          {{with .Example.Doc}}<p>{{.|comment}}{{end}}
+          <p>Code:{{if .Example.Play}}<span class="pull-right"><a href="?play={{.ID}}">play</a>&nbsp;</span>{{end}}
+          <pre>{{code .Example.Code nil}}</pre>
+          {{with .Example.Output}}<p>Output:<pre>{{.}}</pre>{{end}}
+        </div></div>
+      </div>
+    {{end}}
+    </div>
+  {{end}}
+{{end}}
diff --git a/gddo-server/assets/templates/pkg.txt b/gddo-server/assets/templates/pkg.txt
new file mode 100644
index 0000000..5de2389
--- /dev/null
+++ b/gddo-server/assets/templates/pkg.txt
@@ -0,0 +1,38 @@
+{{define "ROOT"}}{{with .pdoc}}PACKAGE{{if .Name}}
+
+package {{.Name}}
+    import "{{.ImportPath}}"
+
+{{.Doc|comment}}
+{{if .Consts}}
+CONSTANTS
+
+{{range .Consts}}{{.Decl.Text}}
+{{.Doc|comment}}{{end}}
+{{end}}{{if .Vars}}
+VARIABLES
+
+{{range .Vars}}{{.Decl.Text}}
+{{.Doc|comment}}{{end}}
+{{end}}{{if .Funcs}}
+FUNCTIONS
+
+{{range .Funcs}}{{.Decl.Text}}
+{{.Doc|comment}}
+{{end}}{{end}}{{if .Types}}
+TYPES
+
+{{range .Types}}{{.Decl.Text}}
+{{.Doc|comment}}
+{{range .Consts}}{{.Decl.Text}}
+{{.Doc|comment}}
+{{end}}{{range .Vars}}{{.Decl.Text}}
+{{.Doc|comment}}
+{{end}}{{range .Funcs}}{{.Decl.Text}}
+{{.Doc|comment}}
+{{end}}{{range .Methods}}{{.Decl.Text}}
+{{.Doc|comment}}
+{{end}}{{end}}
+{{end}}
+{{template "Subdirs" $}}
+{{end}}{{end}}{{end}}
diff --git a/gddo-server/assets/templates/results.html b/gddo-server/assets/templates/results.html
new file mode 100644
index 0000000..5160a9b
--- /dev/null
+++ b/gddo-server/assets/templates/results.html
@@ -0,0 +1,14 @@
+{{define "Head"}}<title>{{.q}} - GoDoc</title><meta name="robots" content="NOINDEX">{{end}}
+
+{{define "Body"}}
+  <div class="well">
+    {{template "SearchBox" .q}}
+  </div>
+  <p>Try this search on <a href="http://go-search.org/search?q={{.q}}">Go-Search</a> 
+  or <a href="https://github.com/search?q={{.q}}+language:go">GitHub</a>.
+  {{if .pkgs}}
+    {{template "Pkgs" .pkgs}}
+  {{else}}
+    <p>No packages found.
+  {{end}}
+{{end}}
diff --git a/gddo-server/assets/templates/results.txt b/gddo-server/assets/templates/results.txt
new file mode 100644
index 0000000..02ce749
--- /dev/null
+++ b/gddo-server/assets/templates/results.txt
@@ -0,0 +1,2 @@
+{{define "ROOT"}}{{range .pkgs}}{{.Path}} {{.Synopsis}}
+{{end}}{{end}}
diff --git a/gddo-server/assets/templates/std.html b/gddo-server/assets/templates/std.html
new file mode 100644
index 0000000..42463e0
--- /dev/null
+++ b/gddo-server/assets/templates/std.html
@@ -0,0 +1,8 @@
+{{define "Head"}}<title>Standard Packages - GoDoc</title><meta name="robots" content="NOINDEX">{{end}}
+
+{{define "Body"}}
+  <h1>Go Standard Packages</h1>
+  {{template "Pkgs" .pkgs}}
+  <p>View the official documentation at <a href="http://golang.org/pkg/">golang.org</a>.
+{{end}}
+
diff --git a/gddo-server/assets/templates/subrepo.html b/gddo-server/assets/templates/subrepo.html
new file mode 100644
index 0000000..1f1bc4f
--- /dev/null
+++ b/gddo-server/assets/templates/subrepo.html
@@ -0,0 +1,23 @@
+{{define "Head"}}<title>Go Sub-Repository Packages - GoDoc</title><meta name="robots" content="NOINDEX">{{end}}
+
+{{define "Body"}}
+  <h1>Go Sub-repository Packages</h1>
+  These packages are part of the Go Project but outside the main Go tree. They are developed under looser compatibility requirements than the Go core.
+  <h2>Repositories</h2>
+  <ul class="list-unstyled">
+    {{template "subrepo" map "name" "blog" "desc" "the content and server program for blog.golang.org."}}
+    {{template "subrepo" map "name" "crypto" "desc" "additional cryptography packages."}}
+    {{template "subrepo" map "name" "exp" "desc" "experimental code (handle with care)."}}
+    {{template "subrepo" map "name" "image" "desc" "additional imaging packages."}}
+    {{template "subrepo" map "name" "mobile" "desc" "libraries and build tools for Go on Android."}}
+    {{template "subrepo" map "name" "net" "desc" "additional networking packages."}}
+    {{template "subrepo" map "name" "sys" "desc" "for low-level interactions with the operating system."}}
+    {{template "subrepo" map "name" "talks" "desc" "the content and server program for talks.golang.org."}}
+    {{template "subrepo" map "name" "text" "desc" "packages for working with text."}}
+    {{template "subrepo" map "name" "tools" "desc" "godoc, vet, cover, and other tools."}}
+  </ul>
+  <h2>Packages</h2>
+  {{template "Pkgs" .pkgs}}
+{{end}}
+
+{{define "subrepo"}}<li><a href="https://go.googlesource.com/{{.name}}/+/master">golang.org/x/{{.name}}</a> — {{.desc}}{{end}}
diff --git a/gddo-server/assets/templates/tools.html b/gddo-server/assets/templates/tools.html
new file mode 100644
index 0000000..d067942
--- /dev/null
+++ b/gddo-server/assets/templates/tools.html
@@ -0,0 +1,36 @@
+{{define "Head"}}<title>{{.pdoc.PageName}} tools - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
+
+{{define "Body"}}
+  {{template "ProjectNav" $}}
+  <h2>Tools for {{$.pdoc.PageName}}</h2>
+
+  <h3>Badge</h3>
+
+  <p><a href="{{.uri}}"><img src="{{.uri}}?status.svg" alt="GoDoc"></a>
+
+  <p>Use one of the snippets below to add a link to GoDoc from your project
+  website or README file:</a>
+
+  <h5>HTML</h5>
+  <input type="text" value='<a href="{{.uri}}"><img src="{{.uri}}?status.svg" alt="GoDoc"></a>' class="click-select form-control">
+
+  <h5>Markdown</h5>
+  <input type="text" value="[![GoDoc]({{.uri}}?status.svg)]({{.uri}})" class="click-select form-control">
+
+  {{if .pdoc.Name}}
+    <h3>Lint</h3>
+    <form name="x-lint" method="POST" action="http://go-lint.appspot.com/-/refresh"><input name="importPath" type="hidden" value="{{.pdoc.ImportPath}}"></form>
+    <p><a href="javascript:document.getElementsByName('x-lint')[0].submit();">Run lint</a> on {{.pdoc.PageName}}.
+
+    {{if and (not .pdoc.IsCmd) (not .pdoc.Doc)}}
+      <p>The {{.pdoc.Name}} package does not have a package declaration
+      comment.  See the <a
+        href="http://blog.golang.org/godoc-documenting-go-code">Go
+        documentation guidelines</a> for information on how to write a package
+      comment. It's important to write a good summary of the package in the
+      first sentence of the package comment. GoDoc indexes the first sentence
+      and displays the first sentence in package lists.
+    {{end}}
+  {{end}}
+  <p>&nbsp;
+{{end}}
diff --git a/gddo-server/assets/third_party/jquery.timeago.js b/gddo-server/assets/third_party/jquery.timeago.js
new file mode 100644
index 0000000..e3731b2
--- /dev/null
+++ b/gddo-server/assets/third_party/jquery.timeago.js
@@ -0,0 +1,184 @@
+/**
+ * Timeago is a jQuery plugin that makes it easy to support automatically
+ * updating fuzzy timestamps (e.g. "4 minutes ago" or "about 1 day ago").
+ *
+ * @name timeago
+ * @version 1.1.0
+ * @requires jQuery v1.2.3+
+ * @author Ryan McGeary
+ * @license MIT License - http://www.opensource.org/licenses/mit-license.php
+ *
+ * For usage and examples, visit:
+ * http://timeago.yarp.com/
+ *
+ * Copyright (c) 2008-2013, Ryan McGeary (ryan -[at]- mcgeary [*dot*] org)
+ */
+
+(function (factory) {
+  if (typeof define === 'function' && define.amd) {
+    // AMD. Register as an anonymous module.
+    define(['jquery'], factory);
+  } else {
+    // Browser globals
+    factory(jQuery);
+  }
+}(function ($) {
+  $.timeago = function(timestamp) {
+    if (timestamp instanceof Date) {
+      return inWords(timestamp);
+    } else if (typeof timestamp === "string") {
+      return inWords($.timeago.parse(timestamp));
+    } else if (typeof timestamp === "number") {
+      return inWords(new Date(timestamp));
+    } else {
+      return inWords($.timeago.datetime(timestamp));
+    }
+  };
+  var $t = $.timeago;
+
+  $.extend($.timeago, {
+    settings: {
+      refreshMillis: 60000,
+      allowFuture: false,
+      localeTitle: false,
+      strings: {
+        prefixAgo: null,
+        prefixFromNow: null,
+        suffixAgo: "ago",
+        suffixFromNow: "from now",
+        seconds: "less than a minute",
+        minute: "about a minute",
+        minutes: "%d minutes",
+        hour: "about an hour",
+        hours: "about %d hours",
+        day: "a day",
+        days: "%d days",
+        month: "about a month",
+        months: "%d months",
+        year: "about a year",
+        years: "%d years",
+        wordSeparator: " ",
+        numbers: []
+      }
+    },
+    inWords: function(distanceMillis) {
+      var $l = this.settings.strings;
+      var prefix = $l.prefixAgo;
+      var suffix = $l.suffixAgo;
+      if (this.settings.allowFuture) {
+        if (distanceMillis < 0) {
+          prefix = $l.prefixFromNow;
+          suffix = $l.suffixFromNow;
+        }
+      }
+
+      var seconds = Math.abs(distanceMillis) / 1000;
+      var minutes = seconds / 60;
+      var hours = minutes / 60;
+      var days = hours / 24;
+      var years = days / 365;
+
+      function substitute(stringOrFunction, number) {
+        var string = $.isFunction(stringOrFunction) ? stringOrFunction(number, distanceMillis) : stringOrFunction;
+        var value = ($l.numbers && $l.numbers[number]) || number;
+        return string.replace(/%d/i, value);
+      }
+
+      var words = seconds < 45 && substitute($l.seconds, Math.round(seconds)) ||
+        seconds < 90 && substitute($l.minute, 1) ||
+        minutes < 45 && substitute($l.minutes, Math.round(minutes)) ||
+        minutes < 90 && substitute($l.hour, 1) ||
+        hours < 24 && substitute($l.hours, Math.round(hours)) ||
+        hours < 42 && substitute($l.day, 1) ||
+        days < 30 && substitute($l.days, Math.round(days)) ||
+        days < 45 && substitute($l.month, 1) ||
+        days < 365 && substitute($l.months, Math.round(days / 30)) ||
+        years < 1.5 && substitute($l.year, 1) ||
+        substitute($l.years, Math.round(years));
+
+      var separator = $l.wordSeparator || "";
+      if ($l.wordSeparator === undefined) { separator = " "; }
+      return $.trim([prefix, words, suffix].join(separator));
+    },
+    parse: function(iso8601) {
+      var s = $.trim(iso8601);
+      s = s.replace(/\.\d+/,""); // remove milliseconds
+      s = s.replace(/-/,"/").replace(/-/,"/");
+      s = s.replace(/T/," ").replace(/Z/," UTC");
+      s = s.replace(/([\+\-]\d\d)\:?(\d\d)/," $1$2"); // -04:00 -> -0400
+      return new Date(s);
+    },
+    datetime: function(elem) {
+      var iso8601 = $t.isTime(elem) ? $(elem).attr("datetime") : $(elem).attr("title");
+      return $t.parse(iso8601);
+    },
+    isTime: function(elem) {
+      // jQuery's `is()` doesn't play well with HTML5 in IE
+      return $(elem).get(0).tagName.toLowerCase() === "time"; // $(elem).is("time");
+    }
+  });
+
+  // functions that can be called via $(el).timeago('action')
+  // init is default when no action is given
+  // functions are called with context of a single element
+  var functions = {
+    init: function(){
+      var refresh_el = $.proxy(refresh, this);
+      refresh_el();
+      var $s = $t.settings;
+      if ($s.refreshMillis > 0) {
+        setInterval(refresh_el, $s.refreshMillis);
+      }
+    },
+    update: function(time){
+      $(this).data('timeago', { datetime: $t.parse(time) });
+      refresh.apply(this);
+    }
+  };
+
+  $.fn.timeago = function(action, options) {
+    var fn = action ? functions[action] : functions.init;
+    if(!fn){
+      throw new Error("Unknown function name '"+ action +"' for timeago");
+    }
+    // each over objects here and call the requested function
+    this.each(function(){
+      fn.call(this, options);
+    });
+    return this;
+  };
+
+  function refresh() {
+    var data = prepareData(this);
+    if (!isNaN(data.datetime)) {
+      $(this).text(inWords(data.datetime));
+    }
+    return this;
+  }
+
+  function prepareData(element) {
+    element = $(element);
+    if (!element.data("timeago")) {
+      element.data("timeago", { datetime: $t.datetime(element) });
+      var text = $.trim(element.text());
+      if ($t.settings.localeTitle) {
+        element.attr("title", element.data('timeago').datetime.toLocaleString());
+      } else if (text.length > 0 && !($t.isTime(element) && element.attr("title"))) {
+        element.attr("title", text);
+      }
+    }
+    return element.data("timeago");
+  }
+
+  function inWords(date) {
+    return $t.inWords(distance(date));
+  }
+
+  function distance(date) {
+    return (new Date().getTime() - date.getTime());
+  }
+
+  // fix for IE6 suckage
+  document.createElement("abbr");
+  document.createElement("time");
+}));
diff --git a/gddo-server/background.go b/gddo-server/background.go
new file mode 100644
index 0000000..b9adec9
--- /dev/null
+++ b/gddo-server/background.go
@@ -0,0 +1,114 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"flag"
+	"github.com/golang/gddo/gosrc"
+	"log"
+	"time"
+)
+
+var backgroundTasks = []*struct {
+	name     string
+	fn       func() error
+	interval *time.Duration
+	next     time.Time
+}{
+	{
+		name:     "GitHub updates",
+		fn:       readGitHubUpdates,
+		interval: flag.Duration("github_interval", 0, "Github updates crawler sleeps for this duration between fetches. Zero disables the crawler."),
+	},
+	{
+		name:     "Crawl",
+		fn:       doCrawl,
+		interval: flag.Duration("crawl_interval", 0, "Package updater sleeps for this duration between package updates. Zero disables updates."),
+	},
+}
+
+func runBackgroundTasks() {
+	defer log.Println("ERROR: Background exiting!")
+
+	sleep := time.Minute
+	for _, task := range backgroundTasks {
+		if *task.interval > 0 && sleep > *task.interval {
+			sleep = *task.interval
+		}
+	}
+
+	for {
+		for _, task := range backgroundTasks {
+			start := time.Now()
+			if *task.interval > 0 && start.After(task.next) {
+				if err := task.fn(); err != nil {
+					log.Printf("Task %s: %v", task.name, err)
+				}
+				task.next = time.Now().Add(*task.interval)
+			}
+		}
+		time.Sleep(sleep)
+	}
+}
+
+func doCrawl() error {
+	// Look for new package to crawl.
+	importPath, hasSubdirs, err := db.PopNewCrawl()
+	if err != nil {
+		log.Printf("db.PopNewCrawl() returned error %v", err)
+		return nil
+	}
+	if importPath != "" {
+		if pdoc, err := crawlDoc("new", importPath, nil, hasSubdirs, time.Time{}); pdoc == nil && err == nil {
+			if err := db.AddBadCrawl(importPath); err != nil {
+				log.Printf("ERROR db.AddBadCrawl(%q): %v", importPath, err)
+			}
+		}
+		return nil
+	}
+
+	// Crawl existing doc.
+	pdoc, pkgs, nextCrawl, err := db.Get("-")
+	if err != nil {
+		log.Printf("db.Get(\"-\") returned error %v", err)
+		return nil
+	}
+	if pdoc == nil || nextCrawl.After(time.Now()) {
+		return nil
+	}
+	if _, err = crawlDoc("crawl", pdoc.ImportPath, pdoc, len(pkgs) > 0, nextCrawl); err != nil {
+		// Touch package so that crawl advances to next package.
+		if err := db.SetNextCrawlEtag(pdoc.ProjectRoot, pdoc.Etag, time.Now().Add(*maxAge/3)); err != nil {
+			log.Printf("ERROR db.TouchLastCrawl(%q): %v", pdoc.ImportPath, err)
+		}
+	}
+	return nil
+}
+
+func readGitHubUpdates() error {
+	const key = "gitHubUpdates"
+	var last string
+	if err := db.GetGob(key, &last); err != nil {
+		return err
+	}
+	last, names, err := gosrc.GetGitHubUpdates(httpClient, last)
+	if err != nil {
+		return err
+	}
+
+	for _, name := range names {
+		log.Printf("bump crawl github.com/%s", name)
+		if err := db.BumpCrawl("github.com/" + name); err != nil {
+			log.Println("ERROR force crawl:", err)
+		}
+	}
+
+	if err := db.PutGob(key, last); err != nil {
+		return err
+	}
+	return nil
+}
diff --git a/gddo-server/browse.go b/gddo-server/browse.go
new file mode 100644
index 0000000..e74128b
--- /dev/null
+++ b/gddo-server/browse.go
@@ -0,0 +1,97 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"net/url"
+	"path"
+	"regexp"
+	"strings"
+)
+
+func importPathFromGoogleBrowse(m []string) string {
+	project := m[1]
+	dir := m[2]
+	if dir == "" {
+		dir = "/"
+	} else if dir[len(dir)-1] == '/' {
+		dir = dir[:len(dir)-1]
+	}
+	subrepo := ""
+	if len(m[3]) > 0 {
+		v, _ := url.ParseQuery(m[3][1:])
+		subrepo = v.Get("repo")
+		if len(subrepo) > 0 {
+			subrepo = "." + subrepo
+		}
+	}
+	if strings.HasPrefix(m[4], "#hg%2F") {
+		d, _ := url.QueryUnescape(m[4][len("#hg%2f"):])
+		if i := strings.IndexRune(d, '%'); i >= 0 {
+			d = d[:i]
+		}
+		dir = dir + "/" + d
+	}
+	return "code.google.com/p/" + project + subrepo + dir
+}
+
+var browsePatterns = []struct {
+	pat *regexp.Regexp
+	fn  func([]string) string
+}{
+	{
+		// GitHub tree  browser.
+		regexp.MustCompile(`^https?://(github\.com/[^/]+/[^/]+)(?:/tree/[^/]+(/.*))?$`),
+		func(m []string) string { return m[1] + m[2] },
+	},
+	{
+		// GitHub file browser.
+		regexp.MustCompile(`^https?://(github\.com/[^/]+/[^/]+)/blob/[^/]+/(.*)$`),
+		func(m []string) string {
+			d := path.Dir(m[2])
+			if d == "." {
+				return m[1]
+			}
+			return m[1] + "/" + d
+		},
+	},
+	{
+		// GitHub issues, pulls, etc.
+		regexp.MustCompile(`^https?://(github\.com/[^/]+/[^/]+)(.*)$`),
+		func(m []string) string { return m[1] },
+	},
+	{
+		// Bitbucket source borwser.
+		regexp.MustCompile(`^https?://(bitbucket\.org/[^/]+/[^/]+)(?:/src/[^/]+(/[^?]+)?)?`),
+		func(m []string) string { return m[1] + m[2] },
+	},
+	{
+		// Google Project Hosting source browser.
+		regexp.MustCompile(`^http:/+code\.google\.com/p/([^/]+)/source/browse(/[^?#]*)?(\?[^#]*)?(#.*)?$`),
+		importPathFromGoogleBrowse,
+	},
+	{
+		// Launchpad source browser.
+		regexp.MustCompile(`^https?:/+bazaar\.(launchpad\.net/.*)/files$`),
+		func(m []string) string { return m[1] },
+	},
+	{
+		regexp.MustCompile(`^https?://(.+)$`),
+		func(m []string) string { return strings.Trim(m[1], "/") },
+	},
+}
+
+// isBrowserURL returns importPath and true if URL looks like a URL for a VCS
+// source browser.
+func isBrowseURL(s string) (importPath string, ok bool) {
+	for _, c := range browsePatterns {
+		if m := c.pat.FindStringSubmatch(s); m != nil {
+			return c.fn(m), true
+		}
+	}
+	return "", false
+}
diff --git a/gddo-server/browse_test.go b/gddo-server/browse_test.go
new file mode 100644
index 0000000..67ef2d0
--- /dev/null
+++ b/gddo-server/browse_test.go
@@ -0,0 +1,40 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"testing"
+)
+
+var isBrowseURLTests = []struct {
+	s          string
+	importPath string
+	ok         bool
+}{
+	{"https://github.com/garyburd/gddo/blob/master/doc/code.go", "github.com/garyburd/gddo/doc", true},
+	{"https://github.com/garyburd/go-oauth/blob/master/.gitignore", "github.com/garyburd/go-oauth", true},
+	{"https://github.com/garyburd/gddo/issues/154", "github.com/garyburd/gddo", true},
+	{"https://bitbucket.org/user/repo/src/bd0b661a263e/p1/p2?at=default", "bitbucket.org/user/repo/p1/p2", true},
+	{"https://bitbucket.org/user/repo/src", "bitbucket.org/user/repo", true},
+	{"https://bitbucket.org/user/repo", "bitbucket.org/user/repo", true},
+	{"https://github.com/user/repo", "github.com/user/repo", true},
+	{"https://github.com/user/repo/tree/master/p1", "github.com/user/repo/p1", true},
+	{"http://code.google.com/p/project", "code.google.com/p/project", true},
+}
+
+func TestIsBrowseURL(t *testing.T) {
+	for _, tt := range isBrowseURLTests {
+		importPath, ok := isBrowseURL(tt.s)
+		if tt.ok {
+			if importPath != tt.importPath || ok != true {
+				t.Errorf("IsBrowseURL(%q) = %q, %v; want %q %v", tt.s, importPath, ok, tt.importPath, true)
+			}
+		} else if ok {
+			t.Errorf("IsBrowseURL(%q) = %q, %v; want _, false", tt.s, importPath, ok)
+		}
+	}
+}
diff --git a/gddo-server/client.go b/gddo-server/client.go
new file mode 100644
index 0000000..96ad2af
--- /dev/null
+++ b/gddo-server/client.go
@@ -0,0 +1,78 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// This file implements an http.Client with request timeouts set by command
+// line flags.
+
+package main
+
+import (
+	"flag"
+	"log"
+	"net"
+	"net/http"
+	"time"
+)
+
+var (
+	dialTimeout    = flag.Duration("dial_timeout", 5*time.Second, "Timeout for dialing an HTTP connection.")
+	requestTimeout = flag.Duration("request_timeout", 20*time.Second, "Time out for roundtripping an HTTP request.")
+)
+
+type timeoutConn struct {
+	net.Conn
+}
+
+func (c timeoutConn) Read(p []byte) (int, error) {
+	n, err := c.Conn.Read(p)
+	c.Conn.SetReadDeadline(time.Time{})
+	return n, err
+}
+
+func timeoutDial(network, addr string) (net.Conn, error) {
+	c, err := net.DialTimeout(network, addr, *dialTimeout)
+	if err != nil {
+		return c, err
+	}
+	// The net/http transport CancelRequest feature does not work until after
+	// the TLS handshake is complete. To help catch hangs during the TLS
+	// handshake, we set a deadline on the connection here and clear the
+	// deadline when the first read on the connection completes. This is not
+	// perfect, but it does catch the case where the server accepts and ignores
+	// a connection.
+	c.SetDeadline(time.Now().Add(*requestTimeout))
+	return timeoutConn{c}, nil
+}
+
+type transport struct {
+	t http.Transport
+}
+
+func (t *transport) RoundTrip(req *http.Request) (*http.Response, error) {
+	timer := time.AfterFunc(*requestTimeout, func() {
+		t.t.CancelRequest(req)
+		log.Printf("Canceled request for %s", req.URL)
+	})
+	defer timer.Stop()
+	if req.URL.Host == "api.github.com" && gitHubCredentials != "" {
+		if req.URL.RawQuery == "" {
+			req.URL.RawQuery = gitHubCredentials
+		} else {
+			req.URL.RawQuery += "&" + gitHubCredentials
+		}
+	}
+	if userAgent != "" {
+		req.Header.Set("User-Agent", userAgent)
+	}
+	return t.t.RoundTrip(req)
+}
+
+var httpClient = &http.Client{Transport: &transport{
+	t: http.Transport{
+		Proxy: http.ProxyFromEnvironment,
+		Dial:  timeoutDial,
+		ResponseHeaderTimeout: *requestTimeout / 2,
+	}}}
diff --git a/gddo-server/config.go.template b/gddo-server/config.go.template
new file mode 100644
index 0000000..efea18b
--- /dev/null
+++ b/gddo-server/config.go.template
@@ -0,0 +1,7 @@
+package main
+
+func init() {
+	// Register an application at https://github.com/settings/applications/new
+	// and enter the client ID and client secret here.
+	gitHubCredentials = "client_id=<id>&client_secret=<secret>"
+}
diff --git a/gddo-server/crawl.go b/gddo-server/crawl.go
new file mode 100644
index 0000000..d0a0645
--- /dev/null
+++ b/gddo-server/crawl.go
@@ -0,0 +1,98 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"log"
+	"regexp"
+	"strings"
+	"time"
+
+	"github.com/golang/gddo/doc"
+	"github.com/golang/gddo/gosrc"
+)
+
+var testdataPat = regexp.MustCompile(`/testdata(?:/|$)`)
+
+// crawlDoc fetches the package documentation from the VCS and updates the database.
+func crawlDoc(source string, importPath string, pdoc *doc.Package, hasSubdirs bool, nextCrawl time.Time) (*doc.Package, error) {
+	message := []interface{}{source}
+	defer func() {
+		message = append(message, importPath)
+		log.Println(message...)
+	}()
+
+	if !nextCrawl.IsZero() {
+		d := time.Since(nextCrawl) / time.Hour
+		if d > 0 {
+			message = append(message, "late:", int64(d))
+		}
+	}
+
+	etag := ""
+	if pdoc != nil {
+		etag = pdoc.Etag
+		message = append(message, "etag:", etag)
+	}
+
+	start := time.Now()
+	var err error
+	if strings.HasPrefix(importPath, "code.google.com/p/go.") {
+		// Old import path for Go sub-repository.
+		pdoc = nil
+		err = gosrc.NotFoundError{Message: "old Go sub-repo", Redirect: "golang.org/x/" + importPath[len("code.google.com/p/go."):]}
+	} else if blocked, e := db.IsBlocked(importPath); blocked && e == nil {
+		pdoc = nil
+		err = gosrc.NotFoundError{Message: "blocked."}
+	} else if testdataPat.MatchString(importPath) {
+		pdoc = nil
+		err = gosrc.NotFoundError{Message: "testdata."}
+	} else {
+		var pdocNew *doc.Package
+		pdocNew, err = doc.Get(httpClient, importPath, etag)
+		message = append(message, "fetch:", int64(time.Since(start)/time.Millisecond))
+		if err == nil && pdocNew.Name == "" && !hasSubdirs {
+			pdoc = nil
+			err = gosrc.NotFoundError{Message: "no Go files or subdirs"}
+		} else if err != gosrc.ErrNotModified {
+			pdoc = pdocNew
+		}
+	}
+
+	nextCrawl = start.Add(*maxAge)
+	switch {
+	case strings.HasPrefix(importPath, "github.com/") || (pdoc != nil && len(pdoc.Errors) > 0):
+		nextCrawl = start.Add(*maxAge * 7)
+	case strings.HasPrefix(importPath, "gist.github.com/"):
+		// Don't spend time on gists. It's silly thing to do.
+		nextCrawl = start.Add(*maxAge * 30)
+	}
+
+	switch {
+	case err == nil:
+		message = append(message, "put:", pdoc.Etag)
+		if err := db.Put(pdoc, nextCrawl, false); err != nil {
+			log.Printf("ERROR db.Put(%q): %v", importPath, err)
+		}
+		return pdoc, nil
+	case err == gosrc.ErrNotModified:
+		message = append(message, "touch")
+		if err := db.SetNextCrawlEtag(pdoc.ProjectRoot, pdoc.Etag, nextCrawl); err != nil {
+			log.Printf("ERROR db.SetNextCrawlEtag(%q): %v", importPath, err)
+		}
+		return pdoc, nil
+	case gosrc.IsNotFound(err):
+		message = append(message, "notfound:", err)
+		if err := db.Delete(importPath); err != nil {
+			log.Printf("ERROR db.Delete(%q): %v", importPath, err)
+		}
+		return nil, err
+	default:
+		message = append(message, "ERROR:", err)
+		return nil, err
+	}
+}
diff --git a/gddo-server/graph.go b/gddo-server/graph.go
new file mode 100644
index 0000000..e5bc488
--- /dev/null
+++ b/gddo-server/graph.go
@@ -0,0 +1,48 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"bytes"
+	"errors"
+	"fmt"
+	"os/exec"
+	"strings"
+
+	"github.com/golang/gddo/database"
+	"github.com/golang/gddo/doc"
+)
+
+func renderGraph(pdoc *doc.Package, pkgs []database.Package, edges [][2]int) ([]byte, error) {
+	var in, out bytes.Buffer
+
+	fmt.Fprintf(&in, "digraph %s { \n", pdoc.Name)
+	for i, pkg := range pkgs {
+		fmt.Fprintf(&in, " n%d [label=\"%s\", URL=\"/%s\", tooltip=\"%s\"];\n",
+			i, pkg.Path, pkg.Path,
+			strings.Replace(pkg.Synopsis, `"`, `\"`, -1))
+	}
+	for _, edge := range edges {
+		fmt.Fprintf(&in, " n%d -> n%d;\n", edge[0], edge[1])
+	}
+	in.WriteString("}")
+
+	cmd := exec.Command("dot", "-Tsvg")
+	cmd.Stdin = &in
+	cmd.Stdout = &out
+	if err := cmd.Run(); err != nil {
+		return nil, err
+	}
+
+	p := out.Bytes()
+	if i := bytes.Index(p, []byte("<svg")); i < 0 {
+		return nil, errors.New("<svg not found")
+	} else {
+		p = p[i:]
+	}
+	return p, nil
+}
diff --git a/gddo-server/main.go b/gddo-server/main.go
new file mode 100644
index 0000000..486addd
--- /dev/null
+++ b/gddo-server/main.go
@@ -0,0 +1,904 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Command gddo-server is the GoPkgDoc server.
+package main
+
+import (
+	"bytes"
+	"crypto/md5"
+	"encoding/json"
+	"errors"
+	"flag"
+	"fmt"
+	"go/build"
+	"html/template"
+	"io"
+	"log"
+	"net/http"
+	"os"
+	"path"
+	"path/filepath"
+	"regexp"
+	"runtime/debug"
+	"sort"
+	"strconv"
+	"strings"
+	"time"
+
+	"github.com/golang/gddo/database"
+	"github.com/golang/gddo/doc"
+	"github.com/golang/gddo/gosrc"
+	"github.com/golang/gddo/httputil"
+)
+
+const (
+	jsonMIMEType = "application/json; charset=utf-8"
+	textMIMEType = "text/plain; charset=utf-8"
+	htmlMIMEType = "text/html; charset=utf-8"
+)
+
+var errUpdateTimeout = errors.New("refresh timeout")
+
+type httpError struct {
+	status int   // HTTP status code.
+	err    error // Optional reason for the HTTP error.
+}
+
+func (err *httpError) Error() string {
+	if err.err != nil {
+		return fmt.Sprintf("status %d, reason %s", err.status, err.err.Error())
+	}
+	return fmt.Sprintf("Status %d", err.status)
+}
+
+const (
+	humanRequest = iota
+	robotRequest
+	queryRequest
+	refreshRequest
+)
+
+type crawlResult struct {
+	pdoc *doc.Package
+	err  error
+}
+
+// getDoc gets the package documentation from the database or from the version
+// control system as needed.
+func getDoc(path string, requestType int) (*doc.Package, []database.Package, error) {
+	if path == "-" {
+		// A hack in the database package uses the path "-" to represent the
+		// next document to crawl. Block "-" here so that requests to /- always
+		// return not found.
+		return nil, nil, &httpError{status: http.StatusNotFound}
+	}
+
+	pdoc, pkgs, nextCrawl, err := db.Get(path)
+	if err != nil {
+		return nil, nil, err
+	}
+
+	needsCrawl := false
+	switch requestType {
+	case queryRequest:
+		needsCrawl = nextCrawl.IsZero() && len(pkgs) == 0
+	case humanRequest:
+		needsCrawl = nextCrawl.Before(time.Now())
+	case robotRequest:
+		needsCrawl = nextCrawl.IsZero() && len(pkgs) > 0
+	}
+
+	if !needsCrawl {
+		return pdoc, pkgs, nil
+	}
+
+	c := make(chan crawlResult, 1)
+	go func() {
+		pdoc, err := crawlDoc("web  ", path, pdoc, len(pkgs) > 0, nextCrawl)
+		c <- crawlResult{pdoc, err}
+	}()
+
+	timeout := *getTimeout
+	if pdoc == nil {
+		timeout = *firstGetTimeout
+	}
+
+	select {
+	case cr := <-c:
+		err = cr.err
+		if err == nil {
+			pdoc = cr.pdoc
+		}
+	case <-time.After(timeout):
+		err = errUpdateTimeout
+	}
+
+	switch {
+	case err == nil:
+		return pdoc, pkgs, nil
+	case gosrc.IsNotFound(err):
+		return nil, nil, err
+	case pdoc != nil:
+		log.Printf("Serving %q from database after error getting doc: %v", path, err)
+		return pdoc, pkgs, nil
+	case err == errUpdateTimeout:
+		log.Printf("Serving %q as not found after timeout getting doc", path)
+		return nil, nil, &httpError{status: http.StatusNotFound}
+	default:
+		return nil, nil, err
+	}
+}
+
+func templateExt(req *http.Request) string {
+	if httputil.NegotiateContentType(req, []string{"text/html", "text/plain"}, "text/html") == "text/plain" {
+		return ".txt"
+	}
+	return ".html"
+}
+
+var (
+	robotPat = regexp.MustCompile(`(:?\+https?://)|(?:\Wbot\W)|(?:^Python-urllib)|(?:^Go )|(?:^Java/)`)
+)
+
+func isRobot(req *http.Request) bool {
+	if robotPat.MatchString(req.Header.Get("User-Agent")) {
+		return true
+	}
+	host := httputil.StripPort(req.RemoteAddr)
+	n, err := db.IncrementCounter(host, 1)
+	if err != nil {
+		log.Printf("error incrementing counter for %s, %v", host, err)
+		return false
+	}
+	if n > *robot {
+		log.Printf("robot %.2f %s %s", n, host, req.Header.Get("User-Agent"))
+		return true
+	}
+	return false
+}
+
+func popularLinkReferral(req *http.Request) bool {
+	return strings.HasSuffix(req.Header.Get("Referer"), "//"+req.Host+"/")
+}
+
+func isView(req *http.Request, key string) bool {
+	rq := req.URL.RawQuery
+	return strings.HasPrefix(rq, key) &&
+		(len(rq) == len(key) || rq[len(key)] == '=' || rq[len(key)] == '&')
+}
+
+// httpEtag returns the package entity tag used in HTTP transactions.
+func httpEtag(pdoc *doc.Package, pkgs []database.Package, importerCount int, flashMessages []flashMessage) string {
+	b := make([]byte, 0, 128)
+	b = strconv.AppendInt(b, pdoc.Updated.Unix(), 16)
+	b = append(b, 0)
+	b = append(b, pdoc.Etag...)
+	if importerCount >= 8 {
+		importerCount = 8
+	}
+	b = append(b, 0)
+	b = strconv.AppendInt(b, int64(importerCount), 16)
+	for _, pkg := range pkgs {
+		b = append(b, 0)
+		b = append(b, pkg.Path...)
+		b = append(b, 0)
+		b = append(b, pkg.Synopsis...)
+	}
+	if *sidebarEnabled {
+		b = append(b, "\000xsb"...)
+	}
+	for _, m := range flashMessages {
+		b = append(b, 0)
+		b = append(b, m.ID...)
+		for _, a := range m.Args {
+			b = append(b, 1)
+			b = append(b, a...)
+		}
+	}
+	h := md5.New()
+	h.Write(b)
+	b = h.Sum(b[:0])
+	return fmt.Sprintf("\"%x\"", b)
+}
+
+func servePackage(resp http.ResponseWriter, req *http.Request) error {
+	p := path.Clean(req.URL.Path)
+	if strings.HasPrefix(p, "/pkg/") {
+		p = p[len("/pkg"):]
+	}
+	if p != req.URL.Path {
+		http.Redirect(resp, req, p, http.StatusMovedPermanently)
+		return nil
+	}
+
+	if isView(req, "status.svg") {
+		statusImageHandlerSVG.ServeHTTP(resp, req)
+		return nil
+	}
+
+	if isView(req, "status.png") {
+		statusImageHandlerPNG.ServeHTTP(resp, req)
+		return nil
+	}
+
+	requestType := humanRequest
+	if isRobot(req) {
+		requestType = robotRequest
+	}
+
+	importPath := strings.TrimPrefix(req.URL.Path, "/")
+	pdoc, pkgs, err := getDoc(importPath, requestType)
+
+	if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
+		// To prevent dumb clients from following redirect loops, respond with
+		// status 404 if the target document is not found.
+		if _, _, err := getDoc(e.Redirect, requestType); gosrc.IsNotFound(err) {
+			return &httpError{status: http.StatusNotFound}
+		}
+		u := "/" + e.Redirect
+		if req.URL.RawQuery != "" {
+			u += "?" + req.URL.RawQuery
+		}
+		setFlashMessages(resp, []flashMessage{{ID: "redir", Args: []string{importPath}}})
+		http.Redirect(resp, req, u, http.StatusFound)
+		return nil
+	}
+	if err != nil {
+		return err
+	}
+
+	flashMessages := getFlashMessages(resp, req)
+
+	if pdoc == nil {
+		if len(pkgs) == 0 {
+			return &httpError{status: http.StatusNotFound}
+		}
+		pdocChild, _, _, err := db.Get(pkgs[0].Path)
+		if err != nil {
+			return err
+		}
+		pdoc = &doc.Package{
+			ProjectName: pdocChild.ProjectName,
+			ProjectRoot: pdocChild.ProjectRoot,
+			ProjectURL:  pdocChild.ProjectURL,
+			ImportPath:  importPath,
+		}
+	}
+
+	switch {
+	case len(req.Form) == 0:
+		importerCount := 0
+		if pdoc.Name != "" {
+			importerCount, err = db.ImporterCount(importPath)
+			if err != nil {
+				return err
+			}
+		}
+
+		etag := httpEtag(pdoc, pkgs, importerCount, flashMessages)
+		status := http.StatusOK
+		if req.Header.Get("If-None-Match") == etag {
+			status = http.StatusNotModified
+		}
+
+		if requestType == humanRequest &&
+			pdoc.Name != "" && // not a directory
+			pdoc.ProjectRoot != "" && // not a standard package
+			!pdoc.IsCmd &&
+			len(pdoc.Errors) == 0 &&
+			!popularLinkReferral(req) {
+			if err := db.IncrementPopularScore(pdoc.ImportPath); err != nil {
+				log.Printf("ERROR db.IncrementPopularScore(%s): %v", pdoc.ImportPath, err)
+			}
+		}
+
+		template := "dir"
+		switch {
+		case pdoc.IsCmd:
+			template = "cmd"
+		case pdoc.Name != "":
+			template = "pkg"
+		}
+		template += templateExt(req)
+
+		return executeTemplate(resp, template, status, http.Header{"Etag": {etag}}, map[string]interface{}{
+			"flashMessages": flashMessages,
+			"pkgs":          pkgs,
+			"pdoc":          newTDoc(pdoc),
+			"importerCount": importerCount,
+		})
+	case isView(req, "imports"):
+		if pdoc.Name == "" {
+			break
+		}
+		pkgs, err = db.Packages(pdoc.Imports)
+		if err != nil {
+			return err
+		}
+		return executeTemplate(resp, "imports.html", http.StatusOK, nil, map[string]interface{}{
+			"flashMessages": flashMessages,
+			"pkgs":          pkgs,
+			"pdoc":          newTDoc(pdoc),
+		})
+	case isView(req, "tools"):
+		proto := "http"
+		if req.Host == "godoc.org" {
+			proto = "https"
+		}
+		return executeTemplate(resp, "tools.html", http.StatusOK, nil, map[string]interface{}{
+			"flashMessages": flashMessages,
+			"uri":           fmt.Sprintf("%s://%s/%s", proto, req.Host, importPath),
+			"pdoc":          newTDoc(pdoc),
+		})
+	case isView(req, "importers"):
+		if pdoc.Name == "" {
+			break
+		}
+		pkgs, err = db.Importers(importPath)
+		if err != nil {
+			return err
+		}
+		template := "importers.html"
+		if requestType == robotRequest {
+			// Hide back links from robots.
+			template = "importers_robot.html"
+		}
+		return executeTemplate(resp, template, http.StatusOK, nil, map[string]interface{}{
+			"flashMessages": flashMessages,
+			"pkgs":          pkgs,
+			"pdoc":          newTDoc(pdoc),
+		})
+	case isView(req, "import-graph"):
+		if pdoc.Name == "" {
+			break
+		}
+		hide := database.ShowAllDeps
+		switch req.Form.Get("hide") {
+		case "1":
+			hide = database.HideStandardDeps
+		case "2":
+			hide = database.HideStandardAll
+		}
+		pkgs, edges, err := db.ImportGraph(pdoc, hide)
+		if err != nil {
+			return err
+		}
+		b, err := renderGraph(pdoc, pkgs, edges)
+		if err != nil {
+			return err
+		}
+		return executeTemplate(resp, "graph.html", http.StatusOK, nil, map[string]interface{}{
+			"flashMessages": flashMessages,
+			"svg":           template.HTML(b),
+			"pdoc":          newTDoc(pdoc),
+			"hide":          hide,
+		})
+	case isView(req, "play"):
+		u, err := playURL(pdoc, req.Form.Get("play"))
+		if err != nil {
+			return err
+		}
+		http.Redirect(resp, req, u, http.StatusMovedPermanently)
+		return nil
+	case req.Form.Get("view") != "":
+		// Redirect deprecated view= queries.
+		var q string
+		switch view := req.Form.Get("view"); view {
+		case "imports", "importers":
+			q = view
+		case "import-graph":
+			if req.Form.Get("hide") == "1" {
+				q = "import-graph&hide=1"
+			} else {
+				q = "import-graph"
+			}
+		}
+		if q != "" {
+			u := *req.URL
+			u.RawQuery = q
+			http.Redirect(resp, req, u.String(), http.StatusMovedPermanently)
+			return nil
+		}
+	}
+	return &httpError{status: http.StatusNotFound}
+}
+
+func serveRefresh(resp http.ResponseWriter, req *http.Request) error {
+	importPath := req.Form.Get("path")
+	_, pkgs, _, err := db.Get(importPath)
+	if err != nil {
+		return err
+	}
+	c := make(chan error, 1)
+	go func() {
+		_, err := crawlDoc("rfrsh", importPath, nil, len(pkgs) > 0, time.Time{})
+		c <- err
+	}()
+	select {
+	case err = <-c:
+	case <-time.After(*getTimeout):
+		err = errUpdateTimeout
+	}
+	if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
+		setFlashMessages(resp, []flashMessage{{ID: "redir", Args: []string{importPath}}})
+		importPath = e.Redirect
+		err = nil
+	} else if err != nil {
+		setFlashMessages(resp, []flashMessage{{ID: "refresh", Args: []string{errorText(err)}}})
+	}
+	http.Redirect(resp, req, "/"+importPath, http.StatusFound)
+	return nil
+}
+
+func serveGoIndex(resp http.ResponseWriter, req *http.Request) error {
+	pkgs, err := db.GoIndex()
+	if err != nil {
+		return err
+	}
+	return executeTemplate(resp, "std.html", http.StatusOK, nil, map[string]interface{}{
+		"pkgs": pkgs,
+	})
+}
+
+func serveGoSubrepoIndex(resp http.ResponseWriter, req *http.Request) error {
+	pkgs, err := db.GoSubrepoIndex()
+	if err != nil {
+		return err
+	}
+	return executeTemplate(resp, "subrepo.html", http.StatusOK, nil, map[string]interface{}{
+		"pkgs": pkgs,
+	})
+}
+
+func serveIndex(resp http.ResponseWriter, req *http.Request) error {
+	pkgs, err := db.Index()
+	if err != nil {
+		return err
+	}
+	return executeTemplate(resp, "index.html", http.StatusOK, nil, map[string]interface{}{
+		"pkgs": pkgs,
+	})
+}
+
+type byPath struct {
+	pkgs []database.Package
+	rank []int
+}
+
+func (bp *byPath) Len() int           { return len(bp.pkgs) }
+func (bp *byPath) Less(i, j int) bool { return bp.pkgs[i].Path < bp.pkgs[j].Path }
+func (bp *byPath) Swap(i, j int) {
+	bp.pkgs[i], bp.pkgs[j] = bp.pkgs[j], bp.pkgs[i]
+	bp.rank[i], bp.rank[j] = bp.rank[j], bp.rank[i]
+}
+
+type byRank struct {
+	pkgs []database.Package
+	rank []int
+}
+
+func (br *byRank) Len() int           { return len(br.pkgs) }
+func (br *byRank) Less(i, j int) bool { return br.rank[i] < br.rank[j] }
+func (br *byRank) Swap(i, j int) {
+	br.pkgs[i], br.pkgs[j] = br.pkgs[j], br.pkgs[i]
+	br.rank[i], br.rank[j] = br.rank[j], br.rank[i]
+}
+
+func popular() ([]database.Package, error) {
+	const n = 25
+
+	pkgs, err := db.Popular(2 * n)
+	if err != nil {
+		return nil, err
+	}
+
+	rank := make([]int, len(pkgs))
+	for i := range pkgs {
+		rank[i] = i
+	}
+
+	sort.Sort(&byPath{pkgs, rank})
+
+	j := 0
+	prev := "."
+	for i, pkg := range pkgs {
+		if strings.HasPrefix(pkg.Path, prev) {
+			if rank[j-1] < rank[i] {
+				rank[j-1] = rank[i]
+			}
+			continue
+		}
+		prev = pkg.Path + "/"
+		pkgs[j] = pkg
+		rank[j] = rank[i]
+		j++
+	}
+	pkgs = pkgs[:j]
+
+	sort.Sort(&byRank{pkgs, rank})
+
+	if len(pkgs) > n {
+		pkgs = pkgs[:n]
+	}
+
+	sort.Sort(&byPath{pkgs, rank})
+
+	return pkgs, nil
+}
+
+func serveHome(resp http.ResponseWriter, req *http.Request) error {
+	if req.URL.Path != "/" {
+		return servePackage(resp, req)
+	}
+
+	q := strings.TrimSpace(req.Form.Get("q"))
+	if q == "" {
+		pkgs, err := popular()
+		if err != nil {
+			return err
+		}
+
+		return executeTemplate(resp, "home"+templateExt(req), http.StatusOK, nil,
+			map[string]interface{}{"Popular": pkgs})
+	}
+
+	if path, ok := isBrowseURL(q); ok {
+		q = path
+	}
+
+	if gosrc.IsValidRemotePath(q) || (strings.Contains(q, "/") && gosrc.IsGoRepoPath(q)) {
+		pdoc, pkgs, err := getDoc(q, queryRequest)
+		if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
+			http.Redirect(resp, req, "/"+e.Redirect, http.StatusFound)
+			return nil
+		}
+		if err == nil && (pdoc != nil || len(pkgs) > 0) {
+			http.Redirect(resp, req, "/"+q, http.StatusFound)
+			return nil
+		}
+	}
+
+	pkgs, err := db.Query(q)
+	if err != nil {
+		return err
+	}
+
+	return executeTemplate(resp, "results"+templateExt(req), http.StatusOK, nil,
+		map[string]interface{}{"q": q, "pkgs": pkgs})
+}
+
+func serveAbout(resp http.ResponseWriter, req *http.Request) error {
+	return executeTemplate(resp, "about.html", http.StatusOK, nil,
+		map[string]interface{}{"Host": req.Host})
+}
+
+func serveBot(resp http.ResponseWriter, req *http.Request) error {
+	return executeTemplate(resp, "bot.html", http.StatusOK, nil, nil)
+}
+
+func logError(req *http.Request, err error, rv interface{}) {
+	if err != nil {
+		var buf bytes.Buffer
+		fmt.Fprintf(&buf, "Error serving %s: %v\n", req.URL, err)
+		if rv != nil {
+			fmt.Fprintln(&buf, rv)
+			buf.Write(debug.Stack())
+		}
+		log.Print(buf.String())
+	}
+}
+
+func serveAPISearch(resp http.ResponseWriter, req *http.Request) error {
+	q := strings.TrimSpace(req.Form.Get("q"))
+
+	var pkgs []database.Package
+
+	if gosrc.IsValidRemotePath(q) || (strings.Contains(q, "/") && gosrc.IsGoRepoPath(q)) {
+		pdoc, _, err := getDoc(q, robotRequest)
+		if err == nil && pdoc != nil {
+			pkgs = []database.Package{{Path: pdoc.ImportPath, Synopsis: pdoc.Synopsis}}
+		}
+	}
+
+	if pkgs == nil {
+		var err error
+		pkgs, err = db.Query(q)
+		if err != nil {
+			return err
+		}
+	}
+
+	var data = struct {
+		Results []database.Package `json:"results"`
+	}{
+		pkgs,
+	}
+	resp.Header().Set("Content-Type", jsonMIMEType)
+	return json.NewEncoder(resp).Encode(&data)
+}
+
+func serveAPIPackages(resp http.ResponseWriter, req *http.Request) error {
+	pkgs, err := db.AllPackages()
+	if err != nil {
+		return err
+	}
+	data := struct {
+		Results []database.Package `json:"results"`
+	}{
+		pkgs,
+	}
+	resp.Header().Set("Content-Type", jsonMIMEType)
+	return json.NewEncoder(resp).Encode(&data)
+}
+
+func serveAPIImporters(resp http.ResponseWriter, req *http.Request) error {
+	importPath := strings.TrimPrefix(req.URL.Path, "/importers/")
+	pkgs, err := db.Importers(importPath)
+	if err != nil {
+		return err
+	}
+	data := struct {
+		Results []database.Package `json:"results"`
+	}{
+		pkgs,
+	}
+	resp.Header().Set("Content-Type", jsonMIMEType)
+	return json.NewEncoder(resp).Encode(&data)
+}
+
+func serveAPIImports(resp http.ResponseWriter, req *http.Request) error {
+	importPath := strings.TrimPrefix(req.URL.Path, "/imports/")
+	pdoc, _, err := getDoc(importPath, robotRequest)
+	if err != nil {
+		return err
+	}
+	if pdoc == nil || pdoc.Name == "" {
+		return &httpError{status: http.StatusNotFound}
+	}
+	imports, err := db.Packages(pdoc.Imports)
+	if err != nil {
+		return err
+	}
+	testImports, err := db.Packages(pdoc.TestImports)
+	if err != nil {
+		return err
+	}
+	data := struct {
+		Imports     []database.Package `json:"imports"`
+		TestImports []database.Package `json:"testImports"`
+	}{
+		imports,
+		testImports,
+	}
+	resp.Header().Set("Content-Type", jsonMIMEType)
+	return json.NewEncoder(resp).Encode(&data)
+}
+
+func serveAPIHome(resp http.ResponseWriter, req *http.Request) error {
+	return &httpError{status: http.StatusNotFound}
+}
+
+func runHandler(resp http.ResponseWriter, req *http.Request,
+	fn func(resp http.ResponseWriter, req *http.Request) error, errfn httputil.Error) {
+	defer func() {
+		if rv := recover(); rv != nil {
+			err := errors.New("handler panic")
+			logError(req, err, rv)
+			errfn(resp, req, http.StatusInternalServerError, err)
+		}
+	}()
+
+	if s := req.Header.Get("X-Real-Ip"); s != "" && httputil.StripPort(req.RemoteAddr) == "127.0.0.1" {
+		req.RemoteAddr = s
+	}
+
+	req.Body = http.MaxBytesReader(resp, req.Body, 2048)
+	req.ParseForm()
+	var rb httputil.ResponseBuffer
+	err := fn(&rb, req)
+	if err == nil {
+		rb.WriteTo(resp)
+	} else if e, ok := err.(*httpError); ok {
+		if e.status >= 500 {
+			logError(req, err, nil)
+		}
+		errfn(resp, req, e.status, e.err)
+	} else if gosrc.IsNotFound(err) {
+		errfn(resp, req, http.StatusNotFound, nil)
+	} else {
+		logError(req, err, nil)
+		errfn(resp, req, http.StatusInternalServerError, err)
+	}
+}
+
+type handler func(resp http.ResponseWriter, req *http.Request) error
+
+func (h handler) ServeHTTP(resp http.ResponseWriter, req *http.Request) {
+	runHandler(resp, req, h, handleError)
+}
+
+type apiHandler func(resp http.ResponseWriter, req *http.Request) error
+
+func (h apiHandler) ServeHTTP(resp http.ResponseWriter, req *http.Request) {
+	runHandler(resp, req, h, handleAPIError)
+}
+
+func errorText(err error) string {
+	if err == errUpdateTimeout {
+		return "Timeout getting package files from the version control system."
+	}
+	if e, ok := err.(*gosrc.RemoteError); ok {
+		return "Error getting package files from " + e.Host + "."
+	}
+	return "Internal server error."
+}
+
+func handleError(resp http.ResponseWriter, req *http.Request, status int, err error) {
+	switch status {
+	case http.StatusNotFound:
+		executeTemplate(resp, "notfound"+templateExt(req), status, nil, nil)
+	default:
+		resp.Header().Set("Content-Type", textMIMEType)
+		resp.WriteHeader(http.StatusInternalServerError)
+		io.WriteString(resp, errorText(err))
+	}
+}
+
+func handleAPIError(resp http.ResponseWriter, req *http.Request, status int, err error) {
+	var data struct {
+		Error struct {
+			Message string `json:"message"`
+		} `json:"error"`
+	}
+	data.Error.Message = http.StatusText(status)
+	resp.Header().Set("Content-Type", jsonMIMEType)
+	resp.WriteHeader(status)
+	json.NewEncoder(resp).Encode(&data)
+}
+
+type hostMux []struct {
+	prefix string
+	h      http.Handler
+}
+
+func (m hostMux) ServeHTTP(resp http.ResponseWriter, req *http.Request) {
+	var h http.Handler
+	for _, ph := range m {
+		if strings.HasPrefix(req.Host, ph.prefix) {
+			h = ph.h
+			break
+		}
+	}
+	h.ServeHTTP(resp, req)
+}
+
+func defaultBase(path string) string {
+	p, err := build.Default.Import(path, "", build.FindOnly)
+	if err != nil {
+		return "."
+	}
+	return p.Dir
+}
+
+var (
+	db                    *database.Database
+	statusImageHandlerPNG http.Handler
+	statusImageHandlerSVG http.Handler
+)
+
+var (
+	robot             = flag.Float64("robot", 100, "Request counter threshold for robots.")
+	assetsDir         = flag.String("assets", filepath.Join(defaultBase("github.com/golang/gddo/gddo-server"), "assets"), "Base directory for templates and static files.")
+	getTimeout        = flag.Duration("get_timeout", 8*time.Second, "Time to wait for package update from the VCS.")
+	firstGetTimeout   = flag.Duration("first_get_timeout", 5*time.Second, "Time to wait for first fetch of package from the VCS.")
+	maxAge            = flag.Duration("max_age", 24*time.Hour, "Update package documents older than this age.")
+	httpAddr          = flag.String("http", ":8080", "Listen for HTTP connections on this address.")
+	sidebarEnabled    = flag.Bool("sidebar", false, "Enable package page sidebar.")
+	defaultGOOS       = flag.String("default_goos", "", "Default GOOS to use when building package documents.")
+	gitHubCredentials = ""
+	userAgent         = ""
+)
+
+func main() {
+	flag.Parse()
+	doc.SetDefaultGOOS(*defaultGOOS)
+	log.Printf("Starting server, os.Args=%s", strings.Join(os.Args, " "))
+
+	if err := parseHTMLTemplates([][]string{
+		{"about.html", "common.html", "layout.html"},
+		{"bot.html", "common.html", "layout.html"},
+		{"cmd.html", "common.html", "layout.html"},
+		{"dir.html", "common.html", "layout.html"},
+		{"home.html", "common.html", "layout.html"},
+		{"importers.html", "common.html", "layout.html"},
+		{"importers_robot.html", "common.html", "layout.html"},
+		{"imports.html", "common.html", "layout.html"},
+		{"index.html", "common.html", "layout.html"},
+		{"notfound.html", "common.html", "layout.html"},
+		{"pkg.html", "common.html", "layout.html"},
+		{"results.html", "common.html", "layout.html"},
+		{"tools.html", "common.html", "layout.html"},
+		{"std.html", "common.html", "layout.html"},
+		{"subrepo.html", "common.html", "layout.html"},
+		{"graph.html", "common.html"},
+	}); err != nil {
+		log.Fatal(err)
+	}
+
+	if err := parseTextTemplates([][]string{
+		{"cmd.txt", "common.txt"},
+		{"dir.txt", "common.txt"},
+		{"home.txt", "common.txt"},
+		{"notfound.txt", "common.txt"},
+		{"pkg.txt", "common.txt"},
+		{"results.txt", "common.txt"},
+	}); err != nil {
+		log.Fatal(err)
+	}
+
+	var err error
+	db, err = database.New()
+	if err != nil {
+		log.Fatalf("Error opening database: %v", err)
+	}
+
+	go runBackgroundTasks()
+
+	staticServer := httputil.StaticServer{
+		Dir:    *assetsDir,
+		MaxAge: time.Hour,
+		MIMETypes: map[string]string{
+			".css": "text/css; charset=utf-8",
+			".js":  "text/javascript; charset=utf-8",
+		},
+	}
+	statusImageHandlerPNG = staticServer.FileHandler("status.png")
+	statusImageHandlerSVG = staticServer.FileHandler("status.svg")
+
+	apiMux := http.NewServeMux()
+	apiMux.Handle("/favicon.ico", staticServer.FileHandler("favicon.ico"))
+	apiMux.Handle("/google3d2f3cd4cc2bb44b.html", staticServer.FileHandler("google3d2f3cd4cc2bb44b.html"))
+	apiMux.Handle("/humans.txt", staticServer.FileHandler("humans.txt"))
+	apiMux.Handle("/robots.txt", staticServer.FileHandler("apiRobots.txt"))
+	apiMux.Handle("/search", apiHandler(serveAPISearch))
+	apiMux.Handle("/packages", apiHandler(serveAPIPackages))
+	apiMux.Handle("/importers/", apiHandler(serveAPIImporters))
+	apiMux.Handle("/imports/", apiHandler(serveAPIImports))
+	apiMux.Handle("/", apiHandler(serveAPIHome))
+
+	mux := http.NewServeMux()
+	mux.Handle("/-/site.js", staticServer.FilesHandler(
+		"third_party/jquery.timeago.js",
+		"site.js"))
+	mux.Handle("/-/site.css", staticServer.FilesHandler("site.css"))
+	if *sidebarEnabled {
+		mux.Handle("/-/sidebar.css", staticServer.FilesHandler("sidebar.css"))
+	}
+
+	mux.Handle("/-/about", handler(serveAbout))
+	mux.Handle("/-/bot", handler(serveBot))
+	mux.Handle("/-/go", handler(serveGoIndex))
+	mux.Handle("/-/subrepo", handler(serveGoSubrepoIndex))
+	mux.Handle("/-/index", handler(serveIndex))
+	mux.Handle("/-/refresh", handler(serveRefresh))
+	mux.Handle("/a/index", http.RedirectHandler("/-/index", http.StatusMovedPermanently))
+	mux.Handle("/about", http.RedirectHandler("/-/about", http.StatusMovedPermanently))
+	mux.Handle("/favicon.ico", staticServer.FileHandler("favicon.ico"))
+	mux.Handle("/google3d2f3cd4cc2bb44b.html", staticServer.FileHandler("google3d2f3cd4cc2bb44b.html"))
+	mux.Handle("/humans.txt", staticServer.FileHandler("humans.txt"))
+	mux.Handle("/robots.txt", staticServer.FileHandler("robots.txt"))
+	mux.Handle("/BingSiteAuth.xml", staticServer.FileHandler("BingSiteAuth.xml"))
+	mux.Handle("/C", http.RedirectHandler("http://golang.org/doc/articles/c_go_cgo.html", http.StatusMovedPermanently))
+	mux.Handle("/ajax.googleapis.com/", http.NotFoundHandler())
+	mux.Handle("/", handler(serveHome))
+
+	cacheBusters.Handler = mux
+
+	if err := http.ListenAndServe(*httpAddr, hostMux{{"api.", apiMux}, {"", mux}}); err != nil {
+		log.Fatal(err)
+	}
+}
diff --git a/gddo-server/main_test.go b/gddo-server/main_test.go
new file mode 100644
index 0000000..ec6c9ec
--- /dev/null
+++ b/gddo-server/main_test.go
@@ -0,0 +1,33 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"net/http"
+	"testing"
+)
+
+var robotTests = []string{
+	"Mozilla/5.0 (compatible; TweetedTimes Bot/1.0; +http://tweetedtimes.com)",
+	"Mozilla/5.0 (compatible; YandexBot/3.0; +http://yandex.com/bots)",
+	"Mozilla/5.0 (compatible; MJ12bot/v1.4.3; http://www.majestic12.co.uk/bot.php?+)",
+	"Go 1.1 package http",
+	"Java/1.7.0_25	0.003	0.003",
+	"Python-urllib/2.6",
+	"Mozilla/5.0 (compatible; archive.org_bot +http://www.archive.org/details/archive.org_bot)",
+	"Mozilla/5.0 (compatible; Ezooms/1.0; ezooms.bot@gmail.com)",
+	"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)",
+}
+
+func TestRobots(t *testing.T) {
+	for _, tt := range robotTests {
+		req := http.Request{Header: http.Header{"User-Agent": {tt}}}
+		if !isRobot(&req) {
+			t.Errorf("%s not a robot", tt)
+		}
+	}
+}
diff --git a/gddo-server/play.go b/gddo-server/play.go
new file mode 100644
index 0000000..a957ae1
--- /dev/null
+++ b/gddo-server/play.go
@@ -0,0 +1,76 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"fmt"
+	"io/ioutil"
+	"net/http"
+	"regexp"
+	"strings"
+
+	"github.com/golang/gddo/doc"
+)
+
+func findExamples(pdoc *doc.Package, export, method string) []*doc.Example {
+	if "package" == export {
+		return pdoc.Examples
+	}
+	for _, f := range pdoc.Funcs {
+		if f.Name == export {
+			return f.Examples
+		}
+	}
+	for _, t := range pdoc.Types {
+		for _, f := range t.Funcs {
+			if f.Name == export {
+				return f.Examples
+			}
+		}
+		if t.Name == export {
+			if method == "" {
+				return t.Examples
+			}
+			for _, m := range t.Methods {
+				if method == m.Name {
+					return m.Examples
+				}
+			}
+			return nil
+		}
+	}
+	return nil
+}
+
+func findExample(pdoc *doc.Package, export, method, name string) *doc.Example {
+	for _, e := range findExamples(pdoc, export, method) {
+		if name == e.Name {
+			return e
+		}
+	}
+	return nil
+}
+
+var exampleIDPat = regexp.MustCompile(`([^-]+)(?:-([^-]*)(?:-(.*))?)?`)
+
+func playURL(pdoc *doc.Package, id string) (string, error) {
+	if m := exampleIDPat.FindStringSubmatch(id); m != nil {
+		if e := findExample(pdoc, m[1], m[2], m[3]); e != nil && e.Play != "" {
+			resp, err := httpClient.Post("http://play.golang.org/share", "text/plain", strings.NewReader(e.Play))
+			if err != nil {
+				return "", err
+			}
+			defer resp.Body.Close()
+			p, err := ioutil.ReadAll(resp.Body)
+			if err != nil {
+				return "", err
+			}
+			return fmt.Sprintf("http://play.golang.org/p/%s", p), nil
+		}
+	}
+	return "", &httpError{status: http.StatusNotFound}
+}
diff --git a/gddo-server/template.go b/gddo-server/template.go
new file mode 100644
index 0000000..d877518
--- /dev/null
+++ b/gddo-server/template.go
@@ -0,0 +1,515 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"bytes"
+	"encoding/base64"
+	"errors"
+	"fmt"
+	godoc "go/doc"
+	htemp "html/template"
+	"io"
+	"net/http"
+	"net/url"
+	"path"
+	"path/filepath"
+	"reflect"
+	"regexp"
+	"sort"
+	"strings"
+	ttemp "text/template"
+	"time"
+
+	"github.com/golang/gddo/doc"
+	"github.com/golang/gddo/gosrc"
+	"github.com/golang/gddo/httputil"
+)
+
+var cacheBusters httputil.CacheBusters
+
+type flashMessage struct {
+	ID   string
+	Args []string
+}
+
+// getFlashMessages retrieves flash messages from the request and clears the flash cookie if needed.
+func getFlashMessages(resp http.ResponseWriter, req *http.Request) []flashMessage {
+	c, err := req.Cookie("flash")
+	if err == http.ErrNoCookie {
+		return nil
+	}
+	http.SetCookie(resp, &http.Cookie{Name: "flash", Path: "/", MaxAge: -1, Expires: time.Now().Add(-100 * 24 * time.Hour)})
+	if err != nil {
+		return nil
+	}
+	p, err := base64.URLEncoding.DecodeString(c.Value)
+	if err != nil {
+		return nil
+	}
+	var messages []flashMessage
+	for _, s := range strings.Split(string(p), "\000") {
+		idArgs := strings.Split(s, "\001")
+		messages = append(messages, flashMessage{ID: idArgs[0], Args: idArgs[1:]})
+	}
+	return messages
+}
+
+// setFlashMessages sets a cookie with the given flash messages.
+func setFlashMessages(resp http.ResponseWriter, messages []flashMessage) {
+	var buf []byte
+	for i, message := range messages {
+		if i > 0 {
+			buf = append(buf, '\000')
+		}
+		buf = append(buf, message.ID...)
+		for _, arg := range message.Args {
+			buf = append(buf, '\001')
+			buf = append(buf, arg...)
+		}
+	}
+	value := base64.URLEncoding.EncodeToString(buf)
+	http.SetCookie(resp, &http.Cookie{Name: "flash", Value: value, Path: "/"})
+}
+
+type tdoc struct {
+	*doc.Package
+	allExamples []*texample
+}
+
+type texample struct {
+	ID      string
+	Label   string
+	Example *doc.Example
+	obj     interface{}
+}
+
+func newTDoc(pdoc *doc.Package) *tdoc {
+	return &tdoc{Package: pdoc}
+}
+
+func (pdoc *tdoc) SourceLink(pos doc.Pos, text string, textOnlyOK bool) htemp.HTML {
+	if pos.Line == 0 || pdoc.LineFmt == "" || pdoc.Files[pos.File].URL == "" {
+		if textOnlyOK {
+			return htemp.HTML(htemp.HTMLEscapeString(text))
+		} else {
+			return ""
+		}
+	}
+	return htemp.HTML(fmt.Sprintf(`<a title="View Source" href="%s">%s</a>`,
+		htemp.HTMLEscapeString(fmt.Sprintf(pdoc.LineFmt, pdoc.Files[pos.File].URL, pos.Line)),
+		htemp.HTMLEscapeString(text)))
+}
+
+func (pdoc *tdoc) PageName() string {
+	if pdoc.Name != "" && !pdoc.IsCmd {
+		return pdoc.Name
+	}
+	_, name := path.Split(pdoc.ImportPath)
+	return name
+}
+
+func (pdoc *tdoc) addExamples(obj interface{}, export, method string, examples []*doc.Example) {
+	label := export
+	id := export
+	if method != "" {
+		label += "." + method
+		id += "-" + method
+	}
+	for _, e := range examples {
+		te := &texample{Label: label, ID: id, Example: e, obj: obj}
+		if e.Name != "" {
+			te.Label += " (" + e.Name + ")"
+			if method == "" {
+				te.ID += "-"
+			}
+			te.ID += "-" + e.Name
+		}
+		pdoc.allExamples = append(pdoc.allExamples, te)
+	}
+}
+
+type byExampleID []*texample
+
+func (e byExampleID) Len() int           { return len(e) }
+func (e byExampleID) Less(i, j int) bool { return e[i].ID < e[j].ID }
+func (e byExampleID) Swap(i, j int)      { e[i], e[j] = e[j], e[i] }
+
+func (pdoc *tdoc) AllExamples() []*texample {
+	if pdoc.allExamples != nil {
+		return pdoc.allExamples
+	}
+	pdoc.allExamples = make([]*texample, 0)
+	pdoc.addExamples(pdoc, "package", "", pdoc.Examples)
+	for _, f := range pdoc.Funcs {
+		pdoc.addExamples(f, f.Name, "", f.Examples)
+	}
+	for _, t := range pdoc.Types {
+		pdoc.addExamples(t, t.Name, "", t.Examples)
+		for _, f := range t.Funcs {
+			pdoc.addExamples(f, f.Name, "", f.Examples)
+		}
+		for _, m := range t.Methods {
+			if len(m.Examples) > 0 {
+				pdoc.addExamples(m, t.Name, m.Name, m.Examples)
+			}
+		}
+	}
+	sort.Sort(byExampleID(pdoc.allExamples))
+	return pdoc.allExamples
+}
+
+func (pdoc *tdoc) ObjExamples(obj interface{}) []*texample {
+	var examples []*texample
+	for _, e := range pdoc.allExamples {
+		if e.obj == obj {
+			examples = append(examples, e)
+		}
+	}
+	return examples
+}
+
+func (pdoc *tdoc) Breadcrumbs(templateName string) htemp.HTML {
+	if !strings.HasPrefix(pdoc.ImportPath, pdoc.ProjectRoot) {
+		return ""
+	}
+	var buf bytes.Buffer
+	i := 0
+	j := len(pdoc.ProjectRoot)
+	if j == 0 {
+		j = strings.IndexRune(pdoc.ImportPath, '/')
+		if j < 0 {
+			j = len(pdoc.ImportPath)
+		}
+	}
+	for {
+		if i != 0 {
+			buf.WriteString(`<span class="text-muted">/</span>`)
+		}
+		link := j < len(pdoc.ImportPath) ||
+			(templateName != "dir.html" && templateName != "cmd.html" && templateName != "pkg.html")
+		if link {
+			buf.WriteString(`<a href="`)
+			buf.WriteString(formatPathFrag(pdoc.ImportPath[:j], ""))
+			buf.WriteString(`">`)
+		} else {
+			buf.WriteString(`<span class="text-muted">`)
+		}
+		buf.WriteString(htemp.HTMLEscapeString(pdoc.ImportPath[i:j]))
+		if link {
+			buf.WriteString("</a>")
+		} else {
+			buf.WriteString("</span>")
+		}
+		i = j + 1
+		if i >= len(pdoc.ImportPath) {
+			break
+		}
+		j = strings.IndexRune(pdoc.ImportPath[i:], '/')
+		if j < 0 {
+			j = len(pdoc.ImportPath)
+		} else {
+			j += i
+		}
+	}
+	return htemp.HTML(buf.String())
+}
+
+func formatPathFrag(path, fragment string) string {
+	if len(path) > 0 && path[0] != '/' {
+		path = "/" + path
+	}
+	u := url.URL{Path: path, Fragment: fragment}
+	return u.String()
+}
+
+func hostFn(urlStr string) string {
+	u, err := url.Parse(urlStr)
+	if err != nil {
+		return ""
+	}
+	return u.Host
+}
+
+func mapFn(kvs ...interface{}) (map[string]interface{}, error) {
+	if len(kvs)%2 != 0 {
+		return nil, errors.New("map requires even number of arguments")
+	}
+	m := make(map[string]interface{})
+	for i := 0; i < len(kvs); i += 2 {
+		s, ok := kvs[i].(string)
+		if !ok {
+			return nil, errors.New("even args to map must be strings")
+		}
+		m[s] = kvs[i+1]
+	}
+	return m, nil
+}
+
+// relativePathFn formats an import path as HTML.
+func relativePathFn(path string, parentPath interface{}) string {
+	if p, ok := parentPath.(string); ok && p != "" && strings.HasPrefix(path, p) {
+		path = path[len(p)+1:]
+	}
+	return path
+}
+
+// importPathFn formats an import with zero width space characters to allow for breaks.
+func importPathFn(path string) htemp.HTML {
+	path = htemp.HTMLEscapeString(path)
+	if len(path) > 45 {
+		// Allow long import paths to break following "/"
+		path = strings.Replace(path, "/", "/&#8203;", -1)
+	}
+	return htemp.HTML(path)
+}
+
+var (
+	h3Pat      = regexp.MustCompile(`<h3 id="([^"]+)">([^<]+)</h3>`)
+	rfcPat     = regexp.MustCompile(`RFC\s+(\d{3,4})((,|)\s+[Ss]ection\s+(\d+)((\.\d+|)|)|)`)
+	packagePat = regexp.MustCompile(`\s+package\s+([-a-z0-9]\S+)`)
+)
+
+func replaceAll(src []byte, re *regexp.Regexp, replace func(out, src []byte, m []int) []byte) []byte {
+	var out []byte
+	for len(src) > 0 {
+		m := re.FindSubmatchIndex(src)
+		if m == nil {
+			break
+		}
+		out = append(out, src[:m[0]]...)
+		out = replace(out, src, m)
+		src = src[m[1]:]
+	}
+	if out == nil {
+		return src
+	}
+	return append(out, src...)
+}
+
+// commentFn formats a source code comment as HTML.
+func commentFn(v string) htemp.HTML {
+	var buf bytes.Buffer
+	godoc.ToHTML(&buf, v, nil)
+	p := buf.Bytes()
+	p = replaceAll(p, h3Pat, func(out, src []byte, m []int) []byte {
+		out = append(out, `<h4 id="`...)
+		out = append(out, src[m[2]:m[3]]...)
+		out = append(out, `">`...)
+		out = append(out, src[m[4]:m[5]]...)
+		out = append(out, ` <a class="permalink" href="#`...)
+		out = append(out, src[m[2]:m[3]]...)
+		out = append(out, `">&para</a></h4>`...)
+		return out
+	})
+	p = replaceAll(p, rfcPat, func(out, src []byte, m []int) []byte {
+		out = append(out, `<a href="http://tools.ietf.org/html/rfc`...)
+		out = append(out, src[m[2]:m[3]]...)
+
+		// If available, add major section fragment
+		if m[6] != -1 {
+			out = append(out, `#section-`...)
+			out = append(out, src[m[8]:m[9]]...)
+
+			// If available, add minor section fragment
+			if m[13] != -1 {
+				out = append(out, src[m[12]:m[13]]...)
+			}
+		}
+
+		out = append(out, `">`...)
+		out = append(out, src[m[0]:m[1]]...)
+		out = append(out, `</a>`...)
+		return out
+	})
+	p = replaceAll(p, packagePat, func(out, src []byte, m []int) []byte {
+		path := bytes.TrimRight(src[m[2]:m[3]], ".!?:")
+		if !gosrc.IsValidPath(string(path)) {
+			return append(out, src[m[0]:m[1]]...)
+		}
+		out = append(out, src[m[0]:m[2]]...)
+		out = append(out, `<a href="/`...)
+		out = append(out, path...)
+		out = append(out, `">`...)
+		out = append(out, path...)
+		out = append(out, `</a>`...)
+		out = append(out, src[m[2]+len(path):m[1]]...)
+		return out
+	})
+	return htemp.HTML(p)
+}
+
+// commentTextFn formats a source code comment as text.
+func commentTextFn(v string) string {
+	const indent = "    "
+	var buf bytes.Buffer
+	godoc.ToText(&buf, v, indent, "\t", 80-2*len(indent))
+	p := buf.Bytes()
+	return string(p)
+}
+
+var period = []byte{'.'}
+
+func codeFn(c doc.Code, typ *doc.Type) htemp.HTML {
+	var buf bytes.Buffer
+	last := 0
+	src := []byte(c.Text)
+	buf.WriteString("<pre>")
+	for _, a := range c.Annotations {
+		htemp.HTMLEscape(&buf, src[last:a.Pos])
+		switch a.Kind {
+		case doc.PackageLinkAnnotation:
+			buf.WriteString(`<a href="`)
+			buf.WriteString(formatPathFrag(c.Paths[a.PathIndex], ""))
+			buf.WriteString(`">`)
+			htemp.HTMLEscape(&buf, src[a.Pos:a.End])
+			buf.WriteString(`</a>`)
+		case doc.LinkAnnotation, doc.BuiltinAnnotation:
+			var p string
+			if a.Kind == doc.BuiltinAnnotation {
+				p = "builtin"
+			} else if a.PathIndex >= 0 {
+				p = c.Paths[a.PathIndex]
+			}
+			n := src[a.Pos:a.End]
+			n = n[bytes.LastIndex(n, period)+1:]
+			buf.WriteString(`<a href="`)
+			buf.WriteString(formatPathFrag(p, string(n)))
+			buf.WriteString(`">`)
+			htemp.HTMLEscape(&buf, src[a.Pos:a.End])
+			buf.WriteString(`</a>`)
+		case doc.CommentAnnotation:
+			buf.WriteString(`<span class="com">`)
+			htemp.HTMLEscape(&buf, src[a.Pos:a.End])
+			buf.WriteString(`</span>`)
+		case doc.AnchorAnnotation:
+			buf.WriteString(`<span id="`)
+			if typ != nil {
+				htemp.HTMLEscape(&buf, []byte(typ.Name))
+				buf.WriteByte('.')
+			}
+			htemp.HTMLEscape(&buf, src[a.Pos:a.End])
+			buf.WriteString(`">`)
+			htemp.HTMLEscape(&buf, src[a.Pos:a.End])
+			buf.WriteString(`</span>`)
+		default:
+			htemp.HTMLEscape(&buf, src[a.Pos:a.End])
+		}
+		last = int(a.End)
+	}
+	htemp.HTMLEscape(&buf, src[last:])
+	buf.WriteString("</pre>")
+	return htemp.HTML(buf.String())
+}
+
+var isInterfacePat = regexp.MustCompile(`^type [^ ]+ interface`)
+
+func isInterfaceFn(t *doc.Type) bool {
+	return isInterfacePat.MatchString(t.Decl.Text)
+}
+
+var gaAccount string
+
+func gaAccountFn() string {
+	return gaAccount
+}
+
+func noteTitleFn(s string) string {
+	return strings.Title(strings.ToLower(s))
+}
+
+func htmlCommentFn(s string) htemp.HTML {
+	return htemp.HTML("<!-- " + s + " -->")
+}
+
+var mimeTypes = map[string]string{
+	".html": htmlMIMEType,
+	".txt":  textMIMEType,
+}
+
+func executeTemplate(resp http.ResponseWriter, name string, status int, header http.Header, data interface{}) error {
+	for k, v := range header {
+		resp.Header()[k] = v
+	}
+	mimeType, ok := mimeTypes[path.Ext(name)]
+	if !ok {
+		mimeType = textMIMEType
+	}
+	resp.Header().Set("Content-Type", mimeType)
+	t := templates[name]
+	if t == nil {
+		return fmt.Errorf("template %s not found", name)
+	}
+	resp.WriteHeader(status)
+	if status == http.StatusNotModified {
+		return nil
+	}
+	return t.Execute(resp, data)
+}
+
+var templates = map[string]interface {
+	Execute(io.Writer, interface{}) error
+}{}
+
+func joinTemplateDir(base string, files []string) []string {
+	result := make([]string, len(files))
+	for i := range files {
+		result[i] = filepath.Join(base, "templates", files[i])
+	}
+	return result
+}
+
+func parseHTMLTemplates(sets [][]string) error {
+	for _, set := range sets {
+		templateName := set[0]
+		t := htemp.New("")
+		t.Funcs(htemp.FuncMap{
+			"code":              codeFn,
+			"comment":           commentFn,
+			"equal":             reflect.DeepEqual,
+			"gaAccount":         gaAccountFn,
+			"host":              hostFn,
+			"htmlComment":       htmlCommentFn,
+			"importPath":        importPathFn,
+			"isInterface":       isInterfaceFn,
+			"isValidImportPath": gosrc.IsValidPath,
+			"map":               mapFn,
+			"noteTitle":         noteTitleFn,
+			"relativePath":      relativePathFn,
+			"sidebarEnabled":    func() bool { return *sidebarEnabled },
+			"staticPath":        func(p string) string { return cacheBusters.AppendQueryParam(p, "v") },
+			"templateName":      func() string { return templateName },
+		})
+		if _, err := t.ParseFiles(joinTemplateDir(*assetsDir, set)...); err != nil {
+			return err
+		}
+		t = t.Lookup("ROOT")
+		if t == nil {
+			return fmt.Errorf("ROOT template not found in %v", set)
+		}
+		templates[set[0]] = t
+	}
+	return nil
+}
+
+func parseTextTemplates(sets [][]string) error {
+	for _, set := range sets {
+		t := ttemp.New("")
+		t.Funcs(ttemp.FuncMap{
+			"comment": commentTextFn,
+		})
+		if _, err := t.ParseFiles(joinTemplateDir(*assetsDir, set)...); err != nil {
+			return err
+		}
+		t = t.Lookup("ROOT")
+		if t == nil {
+			return fmt.Errorf("ROOT template not found in %v", set)
+		}
+		templates[set[0]] = t
+	}
+	return nil
+}
diff --git a/gddo-server/template_test.go b/gddo-server/template_test.go
new file mode 100644
index 0000000..0f98d5b
--- /dev/null
+++ b/gddo-server/template_test.go
@@ -0,0 +1,33 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package main
+
+import (
+	"net/http"
+	"net/http/httptest"
+	"reflect"
+	"strings"
+	"testing"
+)
+
+func TestFlashMessages(t *testing.T) {
+	resp := httptest.NewRecorder()
+
+	expectedMessages := []flashMessage{
+		{ID: "a", Args: []string{"one"}},
+		{ID: "b", Args: []string{"two", "three"}},
+		{ID: "c", Args: []string{}},
+	}
+
+	setFlashMessages(resp, expectedMessages)
+	req := &http.Request{Header: http.Header{"Cookie": {strings.Split(resp.Header().Get("Set-Cookie"), ";")[0]}}}
+
+	actualMessages := getFlashMessages(resp, req)
+	if !reflect.DeepEqual(actualMessages, expectedMessages) {
+		t.Errorf("got messages %+v, want %+v", actualMessages, expectedMessages)
+	}
+}
diff --git a/gosrc/LICENSE b/gosrc/LICENSE
new file mode 100644
index 0000000..65d761b
--- /dev/null
+++ b/gosrc/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2013 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+   * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+   * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+   * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/gosrc/README.markdown b/gosrc/README.markdown
new file mode 100644
index 0000000..7dc2a8a
--- /dev/null
+++ b/gosrc/README.markdown
@@ -0,0 +1,9 @@
+Package gosrc fetches Go package source code from version control services.
+
+Contributions
+-------------
+Contributions to this project are welcome, though please send mail before
+starting work on anything major. Contributors retain their copyright, so we
+need you to fill out a short form before we can accept your contribution:
+https://developers.google.com/open-source/cla/individual
+
diff --git a/gosrc/bitbucket.go b/gosrc/bitbucket.go
new file mode 100644
index 0000000..437349c
--- /dev/null
+++ b/gosrc/bitbucket.go
@@ -0,0 +1,148 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"net/http"
+	"path"
+	"regexp"
+	"time"
+)
+
+func init() {
+	addService(&service{
+		pattern: regexp.MustCompile(`^bitbucket\.org/(?P<owner>[a-z0-9A-Z_.\-]+)/(?P<repo>[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]*)?$`),
+		prefix:  "bitbucket.org/",
+		get:     getBitbucketDir,
+	})
+}
+
+var bitbucketEtagRe = regexp.MustCompile(`^(hg|git)-`)
+
+type bitbucketRepo struct {
+	Scm         string
+	CreatedOn   string `json:"created_on"`
+	LastUpdated string `json:"last_updated"`
+	ForkOf      struct {
+		Scm string
+	} `json:"fork_of"`
+}
+
+func getBitbucketDir(client *http.Client, match map[string]string, savedEtag string) (*Directory, error) {
+	var repo *bitbucketRepo
+	c := &httpClient{client: client}
+
+	if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil {
+		match["vcs"] = m[1]
+	} else {
+		repo, err := getBitbucketRepo(c, match)
+		if err != nil {
+			return nil, err
+		}
+
+		match["vcs"] = repo.Scm
+	}
+
+	tags := make(map[string]string)
+	for _, nodeType := range []string{"branches", "tags"} {
+		var nodes map[string]struct {
+			Node string
+		}
+		if _, err := c.getJSON(expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil {
+			return nil, err
+		}
+		for t, n := range nodes {
+			tags[t] = n.Node
+		}
+	}
+
+	var err error
+	match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]])
+	if err != nil {
+		return nil, err
+	}
+
+	etag := expand("{vcs}-{commit}", match)
+	if etag == savedEtag {
+		return nil, ErrNotModified
+	}
+
+	if repo == nil {
+		repo, err = getBitbucketRepo(c, match)
+		if err != nil {
+			return nil, err
+		}
+	}
+
+	var contents struct {
+		Directories []string
+		Files       []struct {
+			Path string
+		}
+	}
+
+	if _, err := c.getJSON(expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), &contents); err != nil {
+		return nil, err
+	}
+
+	var files []*File
+	var dataURLs []string
+
+	for _, f := range contents.Files {
+		_, name := path.Split(f.Path)
+		if isDocFile(name) {
+			files = append(files, &File{Name: name, BrowseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path)})
+			dataURLs = append(dataURLs, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path))
+		}
+	}
+
+	if err := c.getFiles(dataURLs, files); err != nil {
+		return nil, err
+	}
+
+	return &Directory{
+		BrowseURL:      expand("https://bitbucket.org/{owner}/{repo}/src/{tag}{dir}", match),
+		Etag:           etag,
+		Files:          files,
+		LineFmt:        "%s#cl-%d",
+		ProjectName:    match["repo"],
+		ProjectRoot:    expand("bitbucket.org/{owner}/{repo}", match),
+		ProjectURL:     expand("https://bitbucket.org/{owner}/{repo}/", match),
+		Subdirectories: contents.Directories,
+		VCS:            match["vcs"],
+		DeadEndFork:    isBitbucketDeadEndFork(repo),
+	}, nil
+}
+
+func getBitbucketRepo(c *httpClient, match map[string]string) (*bitbucketRepo, error) {
+	var repo bitbucketRepo
+	if _, err := c.getJSON(expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil {
+		return nil, err
+	}
+
+	return &repo, nil
+}
+
+func isBitbucketDeadEndFork(repo *bitbucketRepo) bool {
+	l := "2006-01-02T15:04:05.999999999"
+	created, err := time.Parse(l, repo.CreatedOn)
+	if err != nil {
+		return false
+	}
+
+	updated, err := time.Parse(l, repo.LastUpdated)
+	if err != nil {
+		return false
+	}
+
+	isDeadEndFork := false
+	if repo.ForkOf.Scm != "" && created.Unix() >= updated.Unix() {
+		isDeadEndFork = true
+	}
+
+	return isDeadEndFork
+}
diff --git a/gosrc/build.go b/gosrc/build.go
new file mode 100644
index 0000000..5ffc969
--- /dev/null
+++ b/gosrc/build.go
@@ -0,0 +1,62 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"bytes"
+	"go/build"
+	"io"
+	"io/ioutil"
+	"os"
+	"path"
+	"strings"
+	"time"
+)
+
+// Import returns details about the package in the directory.
+func (dir *Directory) Import(ctx *build.Context, mode build.ImportMode) (*build.Package, error) {
+	safeCopy := *ctx
+	ctx = &safeCopy
+	ctx.JoinPath = path.Join
+	ctx.IsAbsPath = path.IsAbs
+	ctx.SplitPathList = func(list string) []string { return strings.Split(list, ":") }
+	ctx.IsDir = func(path string) bool { return false }
+	ctx.HasSubdir = func(root, dir string) (rel string, ok bool) { return "", false }
+	ctx.ReadDir = dir.readDir
+	ctx.OpenFile = dir.openFile
+	return ctx.ImportDir(".", mode)
+}
+
+type fileInfo struct{ f *File }
+
+func (fi fileInfo) Name() string       { return fi.f.Name }
+func (fi fileInfo) Size() int64        { return int64(len(fi.f.Data)) }
+func (fi fileInfo) Mode() os.FileMode  { return 0 }
+func (fi fileInfo) ModTime() time.Time { return time.Time{} }
+func (fi fileInfo) IsDir() bool        { return false }
+func (fi fileInfo) Sys() interface{}   { return nil }
+
+func (dir *Directory) readDir(name string) ([]os.FileInfo, error) {
+	if name != "." {
+		return nil, os.ErrNotExist
+	}
+	fis := make([]os.FileInfo, len(dir.Files))
+	for i, f := range dir.Files {
+		fis[i] = fileInfo{f}
+	}
+	return fis, nil
+}
+
+func (dir *Directory) openFile(path string) (io.ReadCloser, error) {
+	name := strings.TrimPrefix(path, "./")
+	for _, f := range dir.Files {
+		if f.Name == name {
+			return ioutil.NopCloser(bytes.NewReader(f.Data)), nil
+		}
+	}
+	return nil, os.ErrNotExist
+}
diff --git a/gosrc/client.go b/gosrc/client.go
new file mode 100644
index 0000000..be19e0f
--- /dev/null
+++ b/gosrc/client.go
@@ -0,0 +1,124 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"encoding/json"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"net/http"
+)
+
+type httpClient struct {
+	errFn  func(*http.Response) error
+	header http.Header
+	client *http.Client
+}
+
+func (c *httpClient) err(resp *http.Response) error {
+	if resp.StatusCode == 404 {
+		return NotFoundError{Message: "Resource not found: " + resp.Request.URL.String()}
+	}
+	if c.errFn != nil {
+		return c.errFn(resp)
+	}
+	return &RemoteError{resp.Request.URL.Host, fmt.Errorf("%d: (%s)", resp.StatusCode, resp.Request.URL.String())}
+}
+
+func (c *httpClient) get(url string) (*http.Response, error) {
+	req, err := http.NewRequest("GET", url, nil)
+	if err != nil {
+		return nil, err
+	}
+	for k, vs := range c.header {
+		req.Header[k] = vs
+	}
+	resp, err := c.client.Do(req)
+	if err != nil {
+		return nil, &RemoteError{req.URL.Host, err}
+	}
+	return resp, err
+}
+
+func (c *httpClient) getBytes(url string) ([]byte, error) {
+	resp, err := c.get(url)
+	if err != nil {
+		return nil, err
+	}
+	defer resp.Body.Close()
+	if resp.StatusCode != 200 {
+		return nil, c.err(resp)
+	}
+	p, err := ioutil.ReadAll(resp.Body)
+	return p, err
+}
+
+func (c *httpClient) getReader(url string) (io.ReadCloser, error) {
+	resp, err := c.get(url)
+	if err != nil {
+		return nil, err
+	}
+	if resp.StatusCode != 200 {
+		err = c.err(resp)
+		resp.Body.Close()
+		return nil, err
+	}
+	return resp.Body, nil
+}
+
+func (c *httpClient) getJSON(url string, v interface{}) (*http.Response, error) {
+	resp, err := c.get(url)
+	if err != nil {
+		return resp, err
+	}
+	defer resp.Body.Close()
+	if resp.StatusCode != 200 {
+		return resp, c.err(resp)
+	}
+	err = json.NewDecoder(resp.Body).Decode(v)
+	if _, ok := err.(*json.SyntaxError); ok {
+		err = NotFoundError{Message: "JSON syntax error at " + url}
+	}
+	return resp, err
+}
+
+func (c *httpClient) getFiles(urls []string, files []*File) error {
+	ch := make(chan error, len(files))
+	for i := range files {
+		go func(i int) {
+			resp, err := c.get(urls[i])
+			if err != nil {
+				ch <- err
+				return
+			}
+			defer resp.Body.Close()
+			if resp.StatusCode != 200 {
+				var err error
+				if c.errFn != nil {
+					err = c.errFn(resp)
+				} else {
+					err = &RemoteError{resp.Request.URL.Host, fmt.Errorf("get %s -> %d", urls[i], resp.StatusCode)}
+				}
+				ch <- err
+				return
+			}
+			files[i].Data, err = ioutil.ReadAll(resp.Body)
+			if err != nil {
+				ch <- &RemoteError{resp.Request.URL.Host, err}
+				return
+			}
+			ch <- nil
+		}(i)
+	}
+	for range files {
+		if err := <-ch; err != nil {
+			return err
+		}
+	}
+	return nil
+}
diff --git a/gosrc/data.go b/gosrc/data.go
new file mode 100644
index 0000000..69ce0d4
--- /dev/null
+++ b/gosrc/data.go
@@ -0,0 +1,917 @@
+// Created by go generate; DO NOT EDIT
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gosrc
+
+const (
+	goRepoPath  = 1
+	packagePath = 2
+)
+
+var pathFlags = map[string]int{
+	"C":                   2,
+	"archive":             1,
+	"archive/tar":         3,
+	"archive/zip":         3,
+	"bufio":               3,
+	"builtin":             3,
+	"bytes":               3,
+	"compress":            1,
+	"compress/bzip2":      3,
+	"compress/flate":      3,
+	"compress/gzip":       3,
+	"compress/lzw":        3,
+	"compress/zlib":       3,
+	"container":           1,
+	"container/heap":      3,
+	"container/list":      3,
+	"container/ring":      3,
+	"crypto":              3,
+	"crypto/aes":          3,
+	"crypto/cipher":       3,
+	"crypto/des":          3,
+	"crypto/dsa":          3,
+	"crypto/ecdsa":        3,
+	"crypto/elliptic":     3,
+	"crypto/hmac":         3,
+	"crypto/md5":          3,
+	"crypto/rand":         3,
+	"crypto/rc4":          3,
+	"crypto/rsa":          3,
+	"crypto/sha1":         3,
+	"crypto/sha256":       3,
+	"crypto/sha512":       3,
+	"crypto/subtle":       3,
+	"crypto/tls":          3,
+	"crypto/x509":         3,
+	"crypto/x509/pkix":    3,
+	"database":            1,
+	"database/sql":        3,
+	"database/sql/driver": 3,
+	"debug":               1,
+	"debug/dwarf":         3,
+	"debug/elf":           3,
+	"debug/goobj":         3,
+	"debug/gosym":         3,
+	"debug/macho":         3,
+	"debug/pe":            3,
+	"debug/plan9obj":      3,
+	"encoding":            3,
+	"encoding/ascii85":    3,
+	"encoding/asn1":       3,
+	"encoding/base32":     3,
+	"encoding/base64":     3,
+	"encoding/binary":     3,
+	"encoding/csv":        3,
+	"encoding/gob":        3,
+	"encoding/hex":        3,
+	"encoding/json":       3,
+	"encoding/pem":        3,
+	"encoding/xml":        3,
+	"errors":              3,
+	"expvar":              3,
+	"flag":                3,
+	"fmt":                 3,
+	"go":                  1,
+	"go/ast":              3,
+	"go/build":            3,
+	"go/doc":              3,
+	"go/format":           3,
+	"go/parser":           3,
+	"go/printer":          3,
+	"go/scanner":          3,
+	"go/token":            3,
+	"hash":                3,
+	"hash/adler32":        3,
+	"hash/crc32":          3,
+	"hash/crc64":          3,
+	"hash/fnv":            3,
+	"html":                3,
+	"html/template":       3,
+	"image":               3,
+	"image/color":         3,
+	"image/color/palette": 3,
+	"image/draw":          3,
+	"image/gif":           3,
+	"image/jpeg":          3,
+	"image/png":           3,
+	"index":               1,
+	"index/suffixarray":   3,
+	"io":                  3,
+	"io/ioutil":           3,
+	"log":                 3,
+	"log/syslog":          3,
+	"math":                3,
+	"math/big":            3,
+	"math/cmplx":          3,
+	"math/rand":           3,
+	"mime":                3,
+	"mime/multipart":      3,
+	"net":                 3,
+	"net/http":            3,
+	"net/http/cgi":        3,
+	"net/http/cookiejar":  3,
+	"net/http/fcgi":       3,
+	"net/http/httptest":   3,
+	"net/http/httputil":   3,
+	"net/http/internal":   3,
+	"net/http/pprof":      3,
+	"net/mail":            3,
+	"net/rpc":             3,
+	"net/rpc/jsonrpc":     3,
+	"net/smtp":            3,
+	"net/textproto":       3,
+	"net/url":             3,
+	"os":                  3,
+	"os/exec":             3,
+	"os/signal":           3,
+	"os/user":             3,
+	"path":                3,
+	"path/filepath":       3,
+	"reflect":             3,
+	"regexp":              3,
+	"regexp/syntax":       3,
+	"runtime":             3,
+	"runtime/cgo":         3,
+	"runtime/debug":       3,
+	"runtime/pprof":       3,
+	"runtime/race":        3,
+	"sort":                3,
+	"strconv":             3,
+	"strings":             3,
+	"sync":                3,
+	"sync/atomic":         3,
+	"syscall":             3,
+	"testing":             3,
+	"testing/iotest":      3,
+	"testing/quick":       3,
+	"text":                1,
+	"text/scanner":        3,
+	"text/tabwriter":      3,
+	"text/template":       3,
+	"text/template/parse": 3,
+	"time":                3,
+	"unicode":             3,
+	"unicode/utf16":       3,
+	"unicode/utf8":        3,
+	"unsafe":              3,
+}
+
+var validTLDs = map[string]bool{
+	".abogado":                  true,
+	".ac":                       true,
+	".academy":                  true,
+	".accountants":              true,
+	".active":                   true,
+	".actor":                    true,
+	".ad":                       true,
+	".ae":                       true,
+	".aero":                     true,
+	".af":                       true,
+	".ag":                       true,
+	".agency":                   true,
+	".ai":                       true,
+	".airforce":                 true,
+	".al":                       true,
+	".allfinanz":                true,
+	".alsace":                   true,
+	".am":                       true,
+	".an":                       true,
+	".android":                  true,
+	".ao":                       true,
+	".aq":                       true,
+	".ar":                       true,
+	".archi":                    true,
+	".army":                     true,
+	".arpa":                     true,
+	".as":                       true,
+	".asia":                     true,
+	".associates":               true,
+	".at":                       true,
+	".attorney":                 true,
+	".au":                       true,
+	".auction":                  true,
+	".audio":                    true,
+	".autos":                    true,
+	".aw":                       true,
+	".ax":                       true,
+	".axa":                      true,
+	".az":                       true,
+	".ba":                       true,
+	".band":                     true,
+	".bar":                      true,
+	".bargains":                 true,
+	".bayern":                   true,
+	".bb":                       true,
+	".bd":                       true,
+	".be":                       true,
+	".beer":                     true,
+	".berlin":                   true,
+	".best":                     true,
+	".bf":                       true,
+	".bg":                       true,
+	".bh":                       true,
+	".bi":                       true,
+	".bid":                      true,
+	".bike":                     true,
+	".bio":                      true,
+	".biz":                      true,
+	".bj":                       true,
+	".black":                    true,
+	".blackfriday":              true,
+	".bloomberg":                true,
+	".blue":                     true,
+	".bm":                       true,
+	".bmw":                      true,
+	".bn":                       true,
+	".bnpparibas":               true,
+	".bo":                       true,
+	".boo":                      true,
+	".boutique":                 true,
+	".br":                       true,
+	".brussels":                 true,
+	".bs":                       true,
+	".bt":                       true,
+	".budapest":                 true,
+	".build":                    true,
+	".builders":                 true,
+	".business":                 true,
+	".buzz":                     true,
+	".bv":                       true,
+	".bw":                       true,
+	".by":                       true,
+	".bz":                       true,
+	".bzh":                      true,
+	".ca":                       true,
+	".cab":                      true,
+	".cal":                      true,
+	".camera":                   true,
+	".camp":                     true,
+	".cancerresearch":           true,
+	".capetown":                 true,
+	".capital":                  true,
+	".caravan":                  true,
+	".cards":                    true,
+	".care":                     true,
+	".career":                   true,
+	".careers":                  true,
+	".casa":                     true,
+	".cash":                     true,
+	".cat":                      true,
+	".catering":                 true,
+	".cc":                       true,
+	".cd":                       true,
+	".center":                   true,
+	".ceo":                      true,
+	".cern":                     true,
+	".cf":                       true,
+	".cg":                       true,
+	".ch":                       true,
+	".channel":                  true,
+	".cheap":                    true,
+	".christmas":                true,
+	".chrome":                   true,
+	".church":                   true,
+	".ci":                       true,
+	".citic":                    true,
+	".city":                     true,
+	".ck":                       true,
+	".cl":                       true,
+	".claims":                   true,
+	".cleaning":                 true,
+	".click":                    true,
+	".clinic":                   true,
+	".clothing":                 true,
+	".club":                     true,
+	".cm":                       true,
+	".cn":                       true,
+	".co":                       true,
+	".codes":                    true,
+	".coffee":                   true,
+	".college":                  true,
+	".cologne":                  true,
+	".com":                      true,
+	".community":                true,
+	".company":                  true,
+	".computer":                 true,
+	".condos":                   true,
+	".construction":             true,
+	".consulting":               true,
+	".contractors":              true,
+	".cooking":                  true,
+	".cool":                     true,
+	".coop":                     true,
+	".country":                  true,
+	".cr":                       true,
+	".credit":                   true,
+	".creditcard":               true,
+	".cricket":                  true,
+	".crs":                      true,
+	".cruises":                  true,
+	".cu":                       true,
+	".cuisinella":               true,
+	".cv":                       true,
+	".cw":                       true,
+	".cx":                       true,
+	".cy":                       true,
+	".cymru":                    true,
+	".cz":                       true,
+	".dad":                      true,
+	".dance":                    true,
+	".dating":                   true,
+	".day":                      true,
+	".de":                       true,
+	".deals":                    true,
+	".degree":                   true,
+	".delivery":                 true,
+	".democrat":                 true,
+	".dental":                   true,
+	".dentist":                  true,
+	".desi":                     true,
+	".diamonds":                 true,
+	".diet":                     true,
+	".digital":                  true,
+	".direct":                   true,
+	".directory":                true,
+	".discount":                 true,
+	".dj":                       true,
+	".dk":                       true,
+	".dm":                       true,
+	".dnp":                      true,
+	".do":                       true,
+	".domains":                  true,
+	".durban":                   true,
+	".dvag":                     true,
+	".dz":                       true,
+	".eat":                      true,
+	".ec":                       true,
+	".edu":                      true,
+	".education":                true,
+	".ee":                       true,
+	".eg":                       true,
+	".email":                    true,
+	".emerck":                   true,
+	".energy":                   true,
+	".engineer":                 true,
+	".engineering":              true,
+	".enterprises":              true,
+	".equipment":                true,
+	".er":                       true,
+	".es":                       true,
+	".esq":                      true,
+	".estate":                   true,
+	".et":                       true,
+	".eu":                       true,
+	".eus":                      true,
+	".events":                   true,
+	".exchange":                 true,
+	".expert":                   true,
+	".exposed":                  true,
+	".fail":                     true,
+	".farm":                     true,
+	".feedback":                 true,
+	".fi":                       true,
+	".finance":                  true,
+	".financial":                true,
+	".firmdale":                 true,
+	".fish":                     true,
+	".fishing":                  true,
+	".fitness":                  true,
+	".fj":                       true,
+	".fk":                       true,
+	".flights":                  true,
+	".florist":                  true,
+	".flsmidth":                 true,
+	".fly":                      true,
+	".fm":                       true,
+	".fo":                       true,
+	".foo":                      true,
+	".forsale":                  true,
+	".foundation":               true,
+	".fr":                       true,
+	".frl":                      true,
+	".frogans":                  true,
+	".fund":                     true,
+	".furniture":                true,
+	".futbol":                   true,
+	".ga":                       true,
+	".gal":                      true,
+	".gallery":                  true,
+	".gb":                       true,
+	".gbiz":                     true,
+	".gd":                       true,
+	".ge":                       true,
+	".gent":                     true,
+	".gf":                       true,
+	".gg":                       true,
+	".gh":                       true,
+	".gi":                       true,
+	".gift":                     true,
+	".gifts":                    true,
+	".gives":                    true,
+	".gl":                       true,
+	".glass":                    true,
+	".gle":                      true,
+	".global":                   true,
+	".globo":                    true,
+	".gm":                       true,
+	".gmail":                    true,
+	".gmo":                      true,
+	".gmx":                      true,
+	".gn":                       true,
+	".google":                   true,
+	".gop":                      true,
+	".gov":                      true,
+	".gp":                       true,
+	".gq":                       true,
+	".gr":                       true,
+	".graphics":                 true,
+	".gratis":                   true,
+	".green":                    true,
+	".gripe":                    true,
+	".gs":                       true,
+	".gt":                       true,
+	".gu":                       true,
+	".guide":                    true,
+	".guitars":                  true,
+	".guru":                     true,
+	".gw":                       true,
+	".gy":                       true,
+	".hamburg":                  true,
+	".haus":                     true,
+	".healthcare":               true,
+	".help":                     true,
+	".here":                     true,
+	".hiphop":                   true,
+	".hiv":                      true,
+	".hk":                       true,
+	".hm":                       true,
+	".hn":                       true,
+	".holdings":                 true,
+	".holiday":                  true,
+	".homes":                    true,
+	".horse":                    true,
+	".host":                     true,
+	".hosting":                  true,
+	".house":                    true,
+	".how":                      true,
+	".hr":                       true,
+	".ht":                       true,
+	".hu":                       true,
+	".ibm":                      true,
+	".id":                       true,
+	".ie":                       true,
+	".il":                       true,
+	".im":                       true,
+	".immo":                     true,
+	".immobilien":               true,
+	".in":                       true,
+	".industries":               true,
+	".info":                     true,
+	".ing":                      true,
+	".ink":                      true,
+	".institute":                true,
+	".insure":                   true,
+	".int":                      true,
+	".international":            true,
+	".investments":              true,
+	".io":                       true,
+	".iq":                       true,
+	".ir":                       true,
+	".is":                       true,
+	".it":                       true,
+	".je":                       true,
+	".jetzt":                    true,
+	".jm":                       true,
+	".jo":                       true,
+	".jobs":                     true,
+	".joburg":                   true,
+	".jp":                       true,
+	".juegos":                   true,
+	".kaufen":                   true,
+	".ke":                       true,
+	".kg":                       true,
+	".kh":                       true,
+	".ki":                       true,
+	".kim":                      true,
+	".kitchen":                  true,
+	".kiwi":                     true,
+	".km":                       true,
+	".kn":                       true,
+	".koeln":                    true,
+	".kp":                       true,
+	".kr":                       true,
+	".krd":                      true,
+	".kred":                     true,
+	".kw":                       true,
+	".ky":                       true,
+	".kz":                       true,
+	".la":                       true,
+	".lacaixa":                  true,
+	".land":                     true,
+	".lawyer":                   true,
+	".lb":                       true,
+	".lc":                       true,
+	".lds":                      true,
+	".lease":                    true,
+	".lgbt":                     true,
+	".li":                       true,
+	".life":                     true,
+	".lighting":                 true,
+	".limited":                  true,
+	".limo":                     true,
+	".link":                     true,
+	".lk":                       true,
+	".loans":                    true,
+	".london":                   true,
+	".lotto":                    true,
+	".lr":                       true,
+	".ls":                       true,
+	".lt":                       true,
+	".ltda":                     true,
+	".lu":                       true,
+	".luxe":                     true,
+	".luxury":                   true,
+	".lv":                       true,
+	".ly":                       true,
+	".ma":                       true,
+	".madrid":                   true,
+	".maison":                   true,
+	".management":               true,
+	".mango":                    true,
+	".market":                   true,
+	".marketing":                true,
+	".mc":                       true,
+	".md":                       true,
+	".me":                       true,
+	".media":                    true,
+	".meet":                     true,
+	".melbourne":                true,
+	".meme":                     true,
+	".menu":                     true,
+	".mg":                       true,
+	".mh":                       true,
+	".miami":                    true,
+	".mil":                      true,
+	".mini":                     true,
+	".mk":                       true,
+	".ml":                       true,
+	".mm":                       true,
+	".mn":                       true,
+	".mo":                       true,
+	".mobi":                     true,
+	".moda":                     true,
+	".moe":                      true,
+	".monash":                   true,
+	".mormon":                   true,
+	".mortgage":                 true,
+	".moscow":                   true,
+	".motorcycles":              true,
+	".mov":                      true,
+	".mp":                       true,
+	".mq":                       true,
+	".mr":                       true,
+	".ms":                       true,
+	".mt":                       true,
+	".mu":                       true,
+	".museum":                   true,
+	".mv":                       true,
+	".mw":                       true,
+	".mx":                       true,
+	".my":                       true,
+	".mz":                       true,
+	".na":                       true,
+	".nagoya":                   true,
+	".name":                     true,
+	".navy":                     true,
+	".nc":                       true,
+	".ne":                       true,
+	".net":                      true,
+	".network":                  true,
+	".neustar":                  true,
+	".new":                      true,
+	".nexus":                    true,
+	".nf":                       true,
+	".ng":                       true,
+	".ngo":                      true,
+	".nhk":                      true,
+	".ni":                       true,
+	".ninja":                    true,
+	".nl":                       true,
+	".no":                       true,
+	".np":                       true,
+	".nr":                       true,
+	".nra":                      true,
+	".nrw":                      true,
+	".nu":                       true,
+	".nyc":                      true,
+	".nz":                       true,
+	".okinawa":                  true,
+	".om":                       true,
+	".ong":                      true,
+	".onl":                      true,
+	".ooo":                      true,
+	".org":                      true,
+	".organic":                  true,
+	".otsuka":                   true,
+	".ovh":                      true,
+	".pa":                       true,
+	".paris":                    true,
+	".partners":                 true,
+	".parts":                    true,
+	".party":                    true,
+	".pe":                       true,
+	".pf":                       true,
+	".pg":                       true,
+	".ph":                       true,
+	".pharmacy":                 true,
+	".photo":                    true,
+	".photography":              true,
+	".photos":                   true,
+	".physio":                   true,
+	".pics":                     true,
+	".pictures":                 true,
+	".pink":                     true,
+	".pizza":                    true,
+	".pk":                       true,
+	".pl":                       true,
+	".place":                    true,
+	".plumbing":                 true,
+	".pm":                       true,
+	".pn":                       true,
+	".pohl":                     true,
+	".poker":                    true,
+	".post":                     true,
+	".pr":                       true,
+	".praxi":                    true,
+	".press":                    true,
+	".pro":                      true,
+	".prod":                     true,
+	".productions":              true,
+	".prof":                     true,
+	".properties":               true,
+	".property":                 true,
+	".ps":                       true,
+	".pt":                       true,
+	".pub":                      true,
+	".pw":                       true,
+	".py":                       true,
+	".qa":                       true,
+	".qpon":                     true,
+	".quebec":                   true,
+	".re":                       true,
+	".realtor":                  true,
+	".recipes":                  true,
+	".red":                      true,
+	".rehab":                    true,
+	".reise":                    true,
+	".reisen":                   true,
+	".reit":                     true,
+	".ren":                      true,
+	".rentals":                  true,
+	".repair":                   true,
+	".report":                   true,
+	".republican":               true,
+	".rest":                     true,
+	".restaurant":               true,
+	".reviews":                  true,
+	".rich":                     true,
+	".rio":                      true,
+	".rip":                      true,
+	".ro":                       true,
+	".rocks":                    true,
+	".rodeo":                    true,
+	".rs":                       true,
+	".rsvp":                     true,
+	".ru":                       true,
+	".ruhr":                     true,
+	".rw":                       true,
+	".ryukyu":                   true,
+	".sa":                       true,
+	".saarland":                 true,
+	".sarl":                     true,
+	".sb":                       true,
+	".sc":                       true,
+	".sca":                      true,
+	".scb":                      true,
+	".schmidt":                  true,
+	".schule":                   true,
+	".science":                  true,
+	".scot":                     true,
+	".sd":                       true,
+	".se":                       true,
+	".services":                 true,
+	".sexy":                     true,
+	".sg":                       true,
+	".sh":                       true,
+	".shiksha":                  true,
+	".shoes":                    true,
+	".si":                       true,
+	".singles":                  true,
+	".sj":                       true,
+	".sk":                       true,
+	".sl":                       true,
+	".sm":                       true,
+	".sn":                       true,
+	".so":                       true,
+	".social":                   true,
+	".software":                 true,
+	".sohu":                     true,
+	".solar":                    true,
+	".solutions":                true,
+	".soy":                      true,
+	".space":                    true,
+	".spiegel":                  true,
+	".sr":                       true,
+	".st":                       true,
+	".su":                       true,
+	".supplies":                 true,
+	".supply":                   true,
+	".support":                  true,
+	".surf":                     true,
+	".surgery":                  true,
+	".suzuki":                   true,
+	".sv":                       true,
+	".sx":                       true,
+	".sy":                       true,
+	".sydney":                   true,
+	".systems":                  true,
+	".sz":                       true,
+	".taipei":                   true,
+	".tatar":                    true,
+	".tattoo":                   true,
+	".tax":                      true,
+	".tc":                       true,
+	".td":                       true,
+	".technology":               true,
+	".tel":                      true,
+	".tf":                       true,
+	".tg":                       true,
+	".th":                       true,
+	".tienda":                   true,
+	".tips":                     true,
+	".tirol":                    true,
+	".tj":                       true,
+	".tk":                       true,
+	".tl":                       true,
+	".tm":                       true,
+	".tn":                       true,
+	".to":                       true,
+	".today":                    true,
+	".tokyo":                    true,
+	".tools":                    true,
+	".top":                      true,
+	".town":                     true,
+	".toys":                     true,
+	".tp":                       true,
+	".tr":                       true,
+	".trade":                    true,
+	".training":                 true,
+	".travel":                   true,
+	".tt":                       true,
+	".tui":                      true,
+	".tv":                       true,
+	".tw":                       true,
+	".tz":                       true,
+	".ua":                       true,
+	".ug":                       true,
+	".uk":                       true,
+	".university":               true,
+	".uno":                      true,
+	".uol":                      true,
+	".us":                       true,
+	".uy":                       true,
+	".uz":                       true,
+	".va":                       true,
+	".vacations":                true,
+	".vc":                       true,
+	".ve":                       true,
+	".vegas":                    true,
+	".ventures":                 true,
+	".versicherung":             true,
+	".vet":                      true,
+	".vg":                       true,
+	".vi":                       true,
+	".viajes":                   true,
+	".villas":                   true,
+	".vision":                   true,
+	".vlaanderen":               true,
+	".vn":                       true,
+	".vodka":                    true,
+	".vote":                     true,
+	".voting":                   true,
+	".voto":                     true,
+	".voyage":                   true,
+	".vu":                       true,
+	".wales":                    true,
+	".wang":                     true,
+	".watch":                    true,
+	".webcam":                   true,
+	".website":                  true,
+	".wed":                      true,
+	".wedding":                  true,
+	".wf":                       true,
+	".whoswho":                  true,
+	".wien":                     true,
+	".wiki":                     true,
+	".williamhill":              true,
+	".wme":                      true,
+	".work":                     true,
+	".works":                    true,
+	".world":                    true,
+	".ws":                       true,
+	".wtc":                      true,
+	".wtf":                      true,
+	".xn--1qqw23a":              true,
+	".xn--3bst00m":              true,
+	".xn--3ds443g":              true,
+	".xn--3e0b707e":             true,
+	".xn--45brj9c":              true,
+	".xn--45q11c":               true,
+	".xn--4gbrim":               true,
+	".xn--55qw42g":              true,
+	".xn--55qx5d":               true,
+	".xn--6frz82g":              true,
+	".xn--6qq986b3xl":           true,
+	".xn--80adxhks":             true,
+	".xn--80ao21a":              true,
+	".xn--80asehdb":             true,
+	".xn--80aswg":               true,
+	".xn--90a3ac":               true,
+	".xn--c1avg":                true,
+	".xn--cg4bki":               true,
+	".xn--clchc0ea0b2g2a9gcd":   true,
+	".xn--czr694b":              true,
+	".xn--czru2d":               true,
+	".xn--d1acj3b":              true,
+	".xn--d1alf":                true,
+	".xn--fiq228c5hs":           true,
+	".xn--fiq64b":               true,
+	".xn--fiqs8s":               true,
+	".xn--fiqz9s":               true,
+	".xn--flw351e":              true,
+	".xn--fpcrj9c3d":            true,
+	".xn--fzc2c9e2c":            true,
+	".xn--gecrj9c":              true,
+	".xn--h2brj9c":              true,
+	".xn--i1b6b1a6a2e":          true,
+	".xn--io0a7i":               true,
+	".xn--j1amh":                true,
+	".xn--j6w193g":              true,
+	".xn--kprw13d":              true,
+	".xn--kpry57d":              true,
+	".xn--kput3i":               true,
+	".xn--l1acc":                true,
+	".xn--lgbbat1ad8j":          true,
+	".xn--mgb9awbf":             true,
+	".xn--mgba3a4f16a":          true,
+	".xn--mgbaam7a8h":           true,
+	".xn--mgbab2bd":             true,
+	".xn--mgbayh7gpa":           true,
+	".xn--mgbbh1a71e":           true,
+	".xn--mgbc0a9azcg":          true,
+	".xn--mgberp4a5d4ar":        true,
+	".xn--mgbx4cd0ab":           true,
+	".xn--ngbc5azd":             true,
+	".xn--node":                 true,
+	".xn--nqv7f":                true,
+	".xn--nqv7fs00ema":          true,
+	".xn--o3cw4h":               true,
+	".xn--ogbpf8fl":             true,
+	".xn--p1acf":                true,
+	".xn--p1ai":                 true,
+	".xn--pgbs0dh":              true,
+	".xn--q9jyb4c":              true,
+	".xn--qcka1pmc":             true,
+	".xn--rhqv96g":              true,
+	".xn--s9brj9c":              true,
+	".xn--ses554g":              true,
+	".xn--unup4y":               true,
+	".xn--vermgensberater-ctb":  true,
+	".xn--vermgensberatung-pwb": true,
+	".xn--vhquv":                true,
+	".xn--wgbh1c":               true,
+	".xn--wgbl6a":               true,
+	".xn--xhq521b":              true,
+	".xn--xkc2al3hye2a":         true,
+	".xn--xkc2dl3a5ee0h":        true,
+	".xn--yfro4i67o":            true,
+	".xn--ygbi2ammx":            true,
+	".xn--zfr164b":              true,
+	".xxx":                      true,
+	".xyz":                      true,
+	".yachts":                   true,
+	".yandex":                   true,
+	".ye":                       true,
+	".yoga":                     true,
+	".yokohama":                 true,
+	".youtube":                  true,
+	".yt":                       true,
+	".za":                       true,
+	".zip":                      true,
+	".zm":                       true,
+	".zone":                     true,
+	".zw":                       true,
+}
diff --git a/gosrc/gen.go b/gosrc/gen.go
new file mode 100644
index 0000000..3c9f791
--- /dev/null
+++ b/gosrc/gen.go
@@ -0,0 +1,127 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+package main
+
+import (
+	"bytes"
+	"flag"
+	"go/format"
+	"io/ioutil"
+	"log"
+	"net/http"
+	"os"
+	"os/exec"
+	"strings"
+	"text/template"
+)
+
+const (
+	goRepoPath = 1 << iota
+	packagePath
+)
+
+var tmpl = template.Must(template.New("").Parse(`// Created by go generate; DO NOT EDIT
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gosrc
+
+const (
+    goRepoPath = {{.goRepoPath}}
+    packagePath = {{.packagePath}}
+)
+
+var pathFlags = map[string]int{
+{{range $k, $v := .pathFlags}}{{printf "%q" $k}}: {{$v}},
+{{end}} }
+
+var validTLDs = map[string]bool{
+{{range  $v := .validTLDs}}{{printf "%q" $v}}: true,
+{{end}} }
+`))
+
+var output = flag.String("output", "data.go", "file name to write")
+
+func main() {
+	log.SetFlags(0)
+	log.SetPrefix("gen: ")
+	flag.Parse()
+	if flag.NArg() != 0 {
+		log.Fatal("usage: decgen [--output filename]")
+	}
+
+	// Build map of standard repository path flags.
+
+	cmd := exec.Command("go", "list", "std")
+	p, err := cmd.Output()
+	if err != nil {
+		log.Fatal(err)
+	}
+	pathFlags := map[string]int{
+		"builtin": packagePath | goRepoPath,
+		"C":       packagePath,
+	}
+	for _, path := range strings.Fields(string(p)) {
+		if strings.HasPrefix(path, "cmd/") {
+			continue
+		}
+		pathFlags[path] |= packagePath | goRepoPath
+		for {
+			i := strings.LastIndex(path, "/")
+			if i < 0 {
+				break
+			}
+			path = path[:i]
+			pathFlags[path] |= goRepoPath
+		}
+	}
+
+	// Get list of valid TLDs.
+
+	resp, err := http.Get("http://data.iana.org/TLD/tlds-alpha-by-domain.txt")
+	if err != nil {
+		log.Fatal(err)
+	}
+	defer resp.Body.Close()
+	p, err = ioutil.ReadAll(resp.Body)
+	if err != nil {
+		log.Fatal(err)
+	}
+
+	var validTLDs []string
+	for _, line := range strings.Split(string(p), "\n") {
+		line = strings.TrimSpace(line)
+		if len(line) == 0 || line[0] == '#' {
+			continue
+		}
+		validTLDs = append(validTLDs, "."+strings.ToLower(line))
+	}
+
+	// Generate output.
+
+	var buf bytes.Buffer
+	err = tmpl.Execute(&buf, map[string]interface{}{
+		"output":      *output,
+		"goRepoPath":  goRepoPath,
+		"packagePath": packagePath,
+		"pathFlags":   pathFlags,
+		"validTLDs":   validTLDs,
+	})
+	if err != nil {
+		log.Fatal("template error:", err)
+	}
+	source, err := format.Source(buf.Bytes())
+	if err != nil {
+		log.Fatal("source format error:", err)
+	}
+	fd, err := os.Create(*output)
+	_, err = fd.Write(source)
+	if err != nil {
+		log.Fatal(err)
+	}
+}
diff --git a/gosrc/github.go b/gosrc/github.go
new file mode 100644
index 0000000..b612346
--- /dev/null
+++ b/gosrc/github.go
@@ -0,0 +1,329 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"encoding/json"
+	"fmt"
+	"net/http"
+	"net/url"
+	"regexp"
+	"strings"
+	"time"
+)
+
+func init() {
+	addService(&service{
+		pattern:         regexp.MustCompile(`^github\.com/(?P<owner>[a-z0-9A-Z_.\-]+)/(?P<repo>[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]*)?$`),
+		prefix:          "github.com/",
+		get:             getGitHubDir,
+		getPresentation: getGitHubPresentation,
+		getProject:      getGitHubProject,
+	})
+
+	addService(&service{
+		pattern: regexp.MustCompile(`^gist\.github\.com/(?P<gist>[a-z0-9A-Z_.\-]+)\.git$`),
+		prefix:  "gist.github.com/",
+		get:     getGistDir,
+	})
+}
+
+var (
+	gitHubRawHeader     = http.Header{"Accept": {"application/vnd.github-blob.raw"}}
+	gitHubPreviewHeader = http.Header{"Accept": {"application/vnd.github.preview"}}
+	ownerRepoPat        = regexp.MustCompile(`^https://api.github.com/repos/([^/]+)/([^/]+)/`)
+)
+
+func gitHubError(resp *http.Response) error {
+	var e struct {
+		Message string `json:"message"`
+	}
+	if err := json.NewDecoder(resp.Body).Decode(&e); err == nil {
+		return &RemoteError{resp.Request.URL.Host, fmt.Errorf("%d: %s (%s)", resp.StatusCode, e.Message, resp.Request.URL.String())}
+	}
+	return &RemoteError{resp.Request.URL.Host, fmt.Errorf("%d: (%s)", resp.StatusCode, resp.Request.URL.String())}
+}
+
+func getGitHubDir(client *http.Client, match map[string]string, savedEtag string) (*Directory, error) {
+
+	c := &httpClient{client: client, errFn: gitHubError}
+
+	type refJSON struct {
+		Object struct {
+			Type string
+			Sha  string
+			URL  string
+		}
+		Ref string
+		URL string
+	}
+	var refs []*refJSON
+
+	resp, err := c.getJSON(expand("https://api.github.com/repos/{owner}/{repo}/git/refs", match), &refs)
+	if err != nil {
+		return nil, err
+	}
+
+	// If the response contains a Link header, then fallback to requesting "master" and "go1" by name.
+	if resp.Header.Get("Link") != "" {
+		var masterRef refJSON
+		if _, err := c.getJSON(expand("https://api.github.com/repos/{owner}/{repo}/git/refs/heads/master", match), &masterRef); err == nil {
+			refs = append(refs, &masterRef)
+		}
+
+		var go1Ref refJSON
+		if _, err := c.getJSON(expand("https://api.github.com/repos/{owner}/{repo}/git/refs/tags/go1", match), &go1Ref); err == nil {
+			refs = append(refs, &go1Ref)
+		}
+	}
+
+	tags := make(map[string]string)
+	for _, ref := range refs {
+		switch {
+		case strings.HasPrefix(ref.Ref, "refs/heads/"):
+			tags[ref.Ref[len("refs/heads/"):]] = ref.Object.Sha
+		case strings.HasPrefix(ref.Ref, "refs/tags/"):
+			tags[ref.Ref[len("refs/tags/"):]] = ref.Object.Sha
+		}
+	}
+
+	var commit string
+	match["tag"], commit, err = bestTag(tags, "master")
+	if err != nil {
+		return nil, err
+	}
+
+	if commit == savedEtag {
+		return nil, ErrNotModified
+	}
+
+	var contents []*struct {
+		Type    string
+		Name    string
+		GitURL  string `json:"git_url"`
+		HTMLURL string `json:"html_url"`
+	}
+
+	if _, err := c.getJSON(expand("https://api.github.com/repos/{owner}/{repo}/contents{dir}?ref={tag}", match), &contents); err != nil {
+		return nil, err
+	}
+
+	if len(contents) == 0 {
+		return nil, NotFoundError{Message: "No files in directory."}
+	}
+
+	// GitHub owner and repo names are case-insensitive. Redirect if requested
+	// names do not match the canonical names in API response.
+	if m := ownerRepoPat.FindStringSubmatch(contents[0].GitURL); m != nil && (m[1] != match["owner"] || m[2] != match["repo"]) {
+		match["owner"] = m[1]
+		match["repo"] = m[2]
+		return nil, NotFoundError{
+			Message:  "Github import path has incorrect case.",
+			Redirect: expand("github.com/{owner}/{repo}{dir}", match),
+		}
+	}
+
+	var files []*File
+	var dataURLs []string
+	var subdirs []string
+
+	for _, item := range contents {
+		switch {
+		case item.Type == "dir":
+			if isValidPathElement(item.Name) {
+				subdirs = append(subdirs, item.Name)
+			}
+		case isDocFile(item.Name):
+			files = append(files, &File{Name: item.Name, BrowseURL: item.HTMLURL})
+			dataURLs = append(dataURLs, item.GitURL)
+		}
+	}
+
+	c.header = gitHubRawHeader
+	if err := c.getFiles(dataURLs, files); err != nil {
+		return nil, err
+	}
+
+	browseURL := expand("https://github.com/{owner}/{repo}", match)
+	if match["dir"] != "" {
+		browseURL = expand("https://github.com/{owner}/{repo}/tree/{tag}{dir}", match)
+	}
+
+	var repo = struct {
+		Fork      bool      `json:"fork"`
+		CreatedAt time.Time `json:"created_at"`
+		PushedAt  time.Time `json:"pushed_at"`
+	}{}
+
+	if _, err := c.getJSON(expand("https://api.github.com/repos/{owner}/{repo}", match), &repo); err != nil {
+		return nil, err
+	}
+
+	isDeadEndFork := repo.Fork && repo.PushedAt.Before(repo.CreatedAt)
+
+	return &Directory{
+		BrowseURL:      browseURL,
+		Etag:           commit,
+		Files:          files,
+		LineFmt:        "%s#L%d",
+		ProjectName:    match["repo"],
+		ProjectRoot:    expand("github.com/{owner}/{repo}", match),
+		ProjectURL:     expand("https://github.com/{owner}/{repo}", match),
+		Subdirectories: subdirs,
+		VCS:            "git",
+		DeadEndFork:    isDeadEndFork,
+	}, nil
+}
+
+func getGitHubPresentation(client *http.Client, match map[string]string) (*Presentation, error) {
+	c := &httpClient{client: client, header: gitHubRawHeader}
+
+	p, err := c.getBytes(expand("https://api.github.com/repos/{owner}/{repo}/contents{dir}/{file}", match))
+	if err != nil {
+		return nil, err
+	}
+
+	apiBase, err := url.Parse(expand("https://api.github.com/repos/{owner}/{repo}/contents{dir}/", match))
+	if err != nil {
+		return nil, err
+	}
+	rawBase, err := url.Parse(expand("https://raw.github.com/{owner}/{repo}/master{dir}/", match))
+	if err != nil {
+		return nil, err
+	}
+
+	c.header = gitHubRawHeader
+
+	b := &presBuilder{
+		data:     p,
+		filename: match["file"],
+		fetch: func(fnames []string) ([]*File, error) {
+			var files []*File
+			var dataURLs []string
+			for _, fname := range fnames {
+				u, err := apiBase.Parse(fname)
+				if err != nil {
+					return nil, err
+				}
+				u.RawQuery = apiBase.RawQuery
+				files = append(files, &File{Name: fname})
+				dataURLs = append(dataURLs, u.String())
+			}
+			err := c.getFiles(dataURLs, files)
+			return files, err
+		},
+		resolveURL: func(fname string) string {
+			u, err := rawBase.Parse(fname)
+			if err != nil {
+				return "/notfound"
+			}
+			if strings.HasSuffix(fname, ".svg") {
+				u.Host = "rawgithub.com"
+			}
+			return u.String()
+		},
+	}
+
+	return b.build()
+}
+
+// GetGitHubUpdates returns the full names ("owner/repo") of recently pushed GitHub repositories.
+// by pushedAfter.
+func GetGitHubUpdates(client *http.Client, pushedAfter string) (maxPushedAt string, names []string, err error) {
+	c := httpClient{client: client, header: gitHubPreviewHeader}
+
+	if pushedAfter == "" {
+		pushedAfter = time.Now().Add(-24 * time.Hour).UTC().Format("2006-01-02T15:04:05Z")
+	}
+	u := "https://api.github.com/search/repositories?order=asc&sort=updated&q=fork:true+language:Go+pushed:>" + pushedAfter
+	var updates struct {
+		Items []struct {
+			FullName string `json:"full_name"`
+			PushedAt string `json:"pushed_at"`
+		}
+	}
+	_, err = c.getJSON(u, &updates)
+	if err != nil {
+		return pushedAfter, nil, err
+	}
+
+	maxPushedAt = pushedAfter
+	for _, item := range updates.Items {
+		names = append(names, item.FullName)
+		if item.PushedAt > maxPushedAt {
+			maxPushedAt = item.PushedAt
+		}
+	}
+	return maxPushedAt, names, nil
+}
+
+func getGitHubProject(client *http.Client, match map[string]string) (*Project, error) {
+	c := &httpClient{client: client, errFn: gitHubError}
+
+	var repo struct {
+		Description string
+	}
+
+	if _, err := c.getJSON(expand("https://api.github.com/repos/{owner}/{repo}", match), &repo); err != nil {
+		return nil, err
+	}
+
+	return &Project{
+		Description: repo.Description,
+	}, nil
+}
+
+func getGistDir(client *http.Client, match map[string]string, savedEtag string) (*Directory, error) {
+	c := &httpClient{client: client, errFn: gitHubError}
+
+	var gist struct {
+		Files map[string]struct {
+			Content string
+		}
+		HtmlUrl string `json:"html_url"`
+		History []struct {
+			Version string
+		}
+	}
+
+	if _, err := c.getJSON(expand("https://api.github.com/gists/{gist}", match), &gist); err != nil {
+		return nil, err
+	}
+
+	if len(gist.History) == 0 {
+		return nil, NotFoundError{Message: "History not found."}
+	}
+	commit := gist.History[0].Version
+
+	if commit == savedEtag {
+		return nil, ErrNotModified
+	}
+
+	var files []*File
+
+	for name, file := range gist.Files {
+		if isDocFile(name) {
+			files = append(files, &File{
+				Name:      name,
+				Data:      []byte(file.Content),
+				BrowseURL: gist.HtmlUrl + "#file-" + strings.Replace(name, ".", "-", -1),
+			})
+		}
+	}
+
+	return &Directory{
+		BrowseURL:      gist.HtmlUrl,
+		Etag:           commit,
+		Files:          files,
+		LineFmt:        "%s-L%d",
+		ProjectName:    match["gist"],
+		ProjectRoot:    expand("gist.github.com/{gist}.git", match),
+		ProjectURL:     gist.HtmlUrl,
+		Subdirectories: nil,
+		VCS:            "git",
+	}, nil
+}
diff --git a/gosrc/golang.go b/gosrc/golang.go
new file mode 100644
index 0000000..f3d474d
--- /dev/null
+++ b/gosrc/golang.go
@@ -0,0 +1,71 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"errors"
+	"net/http"
+	"regexp"
+	"strings"
+)
+
+var (
+	golangBuildVersionRe = regexp.MustCompile(`Build version ([-+:. 0-9A-Za-z]+)`)
+	golangFileRe         = regexp.MustCompile(`<a href="([^"]+)"`)
+)
+
+func getStandardDir(client *http.Client, importPath string, savedEtag string) (*Directory, error) {
+	c := &httpClient{client: client}
+
+	browseURL := "https://golang.org/src/" + importPath + "/"
+	p, err := c.getBytes(browseURL)
+	if IsNotFound(err) {
+		// Fallback to Go 1.3 directory structure.
+		// TODO(garyburd): Delete fallback after 1.4 is pushed to golang.org.
+		browseURL = "https://golang.org/src/pkg/" + importPath + "/"
+		p, err = c.getBytes(browseURL)
+	}
+	if err != nil {
+		return nil, err
+	}
+
+	var etag string
+	m := golangBuildVersionRe.FindSubmatch(p)
+	if m == nil {
+		return nil, errors.New("Could not find revision for " + importPath)
+	}
+	etag = strings.Trim(string(m[1]), ". ")
+	if etag == savedEtag {
+		return nil, ErrNotModified
+	}
+
+	var files []*File
+	var dataURLs []string
+	for _, m := range golangFileRe.FindAllSubmatch(p, -1) {
+		fname := string(m[1])
+		if isDocFile(fname) {
+			files = append(files, &File{Name: fname, BrowseURL: browseURL + fname})
+			dataURLs = append(dataURLs, browseURL+fname+"?m=text")
+		}
+	}
+
+	if err := c.getFiles(dataURLs, files); err != nil {
+		return nil, err
+	}
+
+	return &Directory{
+		BrowseURL:    browseURL,
+		Etag:         etag,
+		Files:        files,
+		ImportPath:   importPath,
+		LineFmt:      "%s#L%d",
+		ProjectName:  "Go",
+		ProjectRoot:  "",
+		ProjectURL:   "https://golang.org/",
+		ResolvedPath: importPath,
+	}, nil
+}
diff --git a/gosrc/google.go b/gosrc/google.go
new file mode 100644
index 0000000..c36c223
--- /dev/null
+++ b/gosrc/google.go
@@ -0,0 +1,168 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"errors"
+	"net/http"
+	"net/url"
+	"regexp"
+	"strings"
+)
+
+func init() {
+	addService(&service{
+		pattern:         regexp.MustCompile(`^code\.google\.com/(?P<pr>[pr])/(?P<repo>[a-z0-9\-]+)(:?\.(?P<subrepo>[a-z0-9\-]+))?(?P<dir>/[a-z0-9A-Z_.\-/]+)?$`),
+		prefix:          "code.google.com/",
+		get:             getGoogleDir,
+		getPresentation: getGooglePresentation,
+	})
+}
+
+var (
+	googleRepoRe     = regexp.MustCompile(`id="checkoutcmd">(hg|git|svn)`)
+	googleRevisionRe = regexp.MustCompile(`<h2>(?:[^ ]+ - )?Revision *([^:]+):`)
+	googleEtagRe     = regexp.MustCompile(`^(hg|git|svn)-`)
+	googleFileRe     = regexp.MustCompile(`<li><a href="([^"]+)"`)
+)
+
+func getGoogleDir(client *http.Client, match map[string]string, savedEtag string) (*Directory, error) {
+	c := &httpClient{client: client}
+
+	setupGoogleMatch(match)
+	if m := googleEtagRe.FindStringSubmatch(savedEtag); m != nil {
+		match["vcs"] = m[1]
+	} else if err := getGoogleVCS(c, match); err != nil {
+		return nil, err
+	}
+
+	// Scrape the repo browser to find the project revision and individual Go files.
+	p, err := c.getBytes(expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/", match))
+	if err != nil {
+		return nil, err
+	}
+
+	var etag string
+	m := googleRevisionRe.FindSubmatch(p)
+	if m == nil {
+		return nil, errors.New("Could not find revision for " + match["importPath"])
+	}
+	etag = expand("{vcs}-{0}", match, string(m[1]))
+	if etag == savedEtag {
+		return nil, ErrNotModified
+	}
+
+	var subdirs []string
+	var files []*File
+	var dataURLs []string
+	for _, m := range googleFileRe.FindAllSubmatch(p, -1) {
+		fname := string(m[1])
+		switch {
+		case strings.HasSuffix(fname, "/"):
+			fname = fname[:len(fname)-1]
+			if isValidPathElement(fname) {
+				subdirs = append(subdirs, fname)
+			}
+		case isDocFile(fname):
+			files = append(files, &File{Name: fname, BrowseURL: expand("http://code.google.com/{pr}/{repo}/source/browse{dir}/{0}{query}", match, fname)})
+			dataURLs = append(dataURLs, expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/{0}", match, fname))
+		}
+	}
+
+	if err := c.getFiles(dataURLs, files); err != nil {
+		return nil, err
+	}
+
+	var projectURL string
+	if match["subrepo"] == "" {
+		projectURL = expand("https://code.google.com/{pr}/{repo}/", match)
+	} else {
+		projectURL = expand("https://code.google.com/{pr}/{repo}/source/browse?repo={subrepo}", match)
+	}
+
+	return &Directory{
+		BrowseURL:   expand("http://code.google.com/{pr}/{repo}/source/browse{dir}/{query}", match),
+		Etag:        etag,
+		Files:       files,
+		LineFmt:     "%s#%d",
+		ProjectName: expand("{repo}{dot}{subrepo}", match),
+		ProjectRoot: expand("code.google.com/{pr}/{repo}{dot}{subrepo}", match),
+		ProjectURL:  projectURL,
+		VCS:         match["vcs"],
+	}, nil
+}
+
+func setupGoogleMatch(match map[string]string) {
+	if s := match["subrepo"]; s != "" {
+		match["dot"] = "."
+		match["query"] = "?repo=" + s
+	} else {
+		match["dot"] = ""
+		match["query"] = ""
+	}
+}
+
+func getGoogleVCS(c *httpClient, match map[string]string) error {
+	// Scrape the HTML project page to find the VCS.
+	p, err := c.getBytes(expand("http://code.google.com/{pr}/{repo}/source/checkout", match))
+	if err != nil {
+		return err
+	}
+	m := googleRepoRe.FindSubmatch(p)
+	if m == nil {
+		return NotFoundError{Message: "Could not find VCS on Google Code project page."}
+	}
+	match["vcs"] = string(m[1])
+	return nil
+}
+
+func getGooglePresentation(client *http.Client, match map[string]string) (*Presentation, error) {
+	c := &httpClient{client: client}
+
+	setupGoogleMatch(match)
+	if err := getGoogleVCS(c, match); err != nil {
+		return nil, err
+	}
+
+	rawBase, err := url.Parse(expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/", match))
+	if err != nil {
+		return nil, err
+	}
+
+	p, err := c.getBytes(expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/{file}", match))
+	if err != nil {
+		return nil, err
+	}
+
+	b := &presBuilder{
+		data:     p,
+		filename: match["file"],
+		fetch: func(fnames []string) ([]*File, error) {
+			var files []*File
+			var dataURLs []string
+			for _, fname := range fnames {
+				u, err := rawBase.Parse(fname)
+				if err != nil {
+					return nil, err
+				}
+				files = append(files, &File{Name: fname})
+				dataURLs = append(dataURLs, u.String())
+			}
+			err := c.getFiles(dataURLs, files)
+			return files, err
+		},
+		resolveURL: func(fname string) string {
+			u, err := rawBase.Parse(fname)
+			if err != nil {
+				return "/notfound"
+			}
+			return u.String()
+		},
+	}
+
+	return b.build()
+}
diff --git a/gosrc/gosrc.go b/gosrc/gosrc.go
new file mode 100644
index 0000000..9ae9fca
--- /dev/null
+++ b/gosrc/gosrc.go
@@ -0,0 +1,476 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Package gosrc fetches Go package source code from version control services.
+package gosrc
+
+import (
+	"encoding/xml"
+	"errors"
+	"fmt"
+	"io"
+	"net/http"
+	"path"
+	"regexp"
+	"strings"
+)
+
+// File represents a file.
+type File struct {
+	// File name with no directory.
+	Name string
+
+	// Contents of the file.
+	Data []byte
+
+	// Location of file on version control service website.
+	BrowseURL string
+}
+
+// Directory describes a directory on a version control service.
+type Directory struct {
+	// The import path for this package.
+	ImportPath string
+
+	// Import path of package after resolving go-import meta tags, if any.
+	ResolvedPath string
+
+	// Import path prefix for all packages in the project.
+	ProjectRoot string
+
+	// Name of the project.
+	ProjectName string
+
+	// Project home page.
+	ProjectURL string
+
+	// Version control system: git, hg, bzr, ...
+	VCS string
+
+	// Version control: belongs to a dead end fork
+	DeadEndFork bool
+
+	// Cache validation tag. This tag is not necessarily an HTTP entity tag.
+	// The tag is "" if there is no meaningful cache validation for the VCS.
+	Etag string
+
+	// Files.
+	Files []*File
+
+	// Subdirectories, not guaranteed to contain Go code.
+	Subdirectories []string
+
+	// Location of directory on version control service website.
+	BrowseURL string
+
+	// Format specifier for link to source line. Example: "%s#L%d"
+	LineFmt string
+}
+
+// Project represents a repository.
+type Project struct {
+	Description string
+}
+
+// NotFoundError indicates that the directory or presentation was not found.
+type NotFoundError struct {
+	// Diagnostic message describing why the directory was not found.
+	Message string
+
+	// Redirect specifies the path where package can be found.
+	Redirect string
+}
+
+func (e NotFoundError) Error() string {
+	return e.Message
+}
+
+// IsNotFound returns true if err is of type NotFoundError.
+func IsNotFound(err error) bool {
+	_, ok := err.(NotFoundError)
+	return ok
+}
+
+type RemoteError struct {
+	Host string
+	err  error
+}
+
+func (e *RemoteError) Error() string {
+	return e.err.Error()
+}
+
+// ErrNotModified indicates that the directory matches the specified etag.
+var ErrNotModified = errors.New("package not modified")
+
+var errNoMatch = errors.New("no match")
+
+// service represents a source code control service.
+type service struct {
+	pattern         *regexp.Regexp
+	prefix          string
+	get             func(*http.Client, map[string]string, string) (*Directory, error)
+	getPresentation func(*http.Client, map[string]string) (*Presentation, error)
+	getProject      func(*http.Client, map[string]string) (*Project, error)
+}
+
+var services []*service
+
+func addService(s *service) {
+	if s.prefix == "" {
+		services = append(services, s)
+	} else {
+		services = append([]*service{s}, services...)
+	}
+}
+
+func (s *service) match(importPath string) (map[string]string, error) {
+	if !strings.HasPrefix(importPath, s.prefix) {
+		return nil, nil
+	}
+	m := s.pattern.FindStringSubmatch(importPath)
+	if m == nil {
+		if s.prefix != "" {
+			return nil, NotFoundError{Message: "Import path prefix matches known service, but regexp does not."}
+		}
+		return nil, nil
+	}
+	match := map[string]string{"importPath": importPath}
+	for i, n := range s.pattern.SubexpNames() {
+		if n != "" {
+			match[n] = m[i]
+		}
+	}
+	return match, nil
+}
+
+// importMeta represents the values in a go-import meta tag.
+type importMeta struct {
+	projectRoot string
+	vcs         string
+	repo        string
+}
+
+// sourceMeta represents the values in a go-source meta tag.
+type sourceMeta struct {
+	projectRoot  string
+	projectURL   string
+	dirTemplate  string
+	fileTemplate string
+}
+
+func isHTTPURL(s string) bool {
+	return strings.HasPrefix(s, "https://") || strings.HasPrefix(s, "http://")
+}
+
+func replaceDir(s string, dir string) string {
+	slashDir := ""
+	dir = strings.Trim(dir, "/")
+	if dir != "" {
+		slashDir = "/" + dir
+	}
+	s = strings.Replace(s, "{dir}", dir, -1)
+	s = strings.Replace(s, "{/dir}", slashDir, -1)
+	return s
+}
+
+func attrValue(attrs []xml.Attr, name string) string {
+	for _, a := range attrs {
+		if strings.EqualFold(a.Name.Local, name) {
+			return a.Value
+		}
+	}
+	return ""
+}
+
+func fetchMeta(client *http.Client, importPath string) (scheme string, im *importMeta, sm *sourceMeta, redir bool, err error) {
+	uri := importPath
+	if !strings.Contains(uri, "/") {
+		// Add slash for root of domain.
+		uri = uri + "/"
+	}
+	uri = uri + "?go-get=1"
+
+	c := httpClient{client: client}
+	scheme = "https"
+	resp, err := c.get(scheme + "://" + uri)
+	if err != nil || resp.StatusCode != 200 {
+		if err == nil {
+			resp.Body.Close()
+		}
+		scheme = "http"
+		resp, err = c.get(scheme + "://" + uri)
+		if err != nil {
+			return scheme, nil, nil, false, err
+		}
+	}
+	defer resp.Body.Close()
+	im, sm, redir, err = parseMeta(scheme, importPath, resp.Body)
+	return scheme, im, sm, redir, err
+}
+
+var refreshToGodocPat = regexp.MustCompile(`(?i)^\d+; url=https?://godoc\.org/`)
+
+func parseMeta(scheme, importPath string, r io.Reader) (im *importMeta, sm *sourceMeta, redir bool, err error) {
+	errorMessage := "go-import meta tag not found"
+
+	d := xml.NewDecoder(r)
+	d.Strict = false
+metaScan:
+	for {
+		t, tokenErr := d.Token()
+		if tokenErr != nil {
+			break metaScan
+		}
+		switch t := t.(type) {
+		case xml.EndElement:
+			if strings.EqualFold(t.Name.Local, "head") {
+				break metaScan
+			}
+		case xml.StartElement:
+			if strings.EqualFold(t.Name.Local, "body") {
+				break metaScan
+			}
+			if !strings.EqualFold(t.Name.Local, "meta") {
+				continue metaScan
+			}
+			if strings.EqualFold(attrValue(t.Attr, "http-equiv"), "refresh") {
+				// Check for http-equiv refresh back to godoc.org.
+				redir = refreshToGodocPat.MatchString(attrValue(t.Attr, "content"))
+				continue metaScan
+			}
+			nameAttr := attrValue(t.Attr, "name")
+			if nameAttr != "go-import" && nameAttr != "go-source" {
+				continue metaScan
+			}
+			fields := strings.Fields(attrValue(t.Attr, "content"))
+			if len(fields) < 1 {
+				continue metaScan
+			}
+			projectRoot := fields[0]
+			if !strings.HasPrefix(importPath, projectRoot) ||
+				!(len(importPath) == len(projectRoot) || importPath[len(projectRoot)] == '/') {
+				// Ignore if root is not a prefix of the  path. This allows a
+				// site to use a single error page for multiple repositories.
+				continue metaScan
+			}
+			switch nameAttr {
+			case "go-import":
+				if len(fields) != 3 {
+					errorMessage = "go-import meta tag content attribute does not have three fields"
+					continue metaScan
+				}
+				if im != nil {
+					im = nil
+					errorMessage = "more than one go-import meta tag found"
+					break metaScan
+				}
+				im = &importMeta{
+					projectRoot: projectRoot,
+					vcs:         fields[1],
+					repo:        fields[2],
+				}
+			case "go-source":
+				if sm != nil {
+					// Ignore extra go-source meta tags.
+					continue metaScan
+				}
+				if len(fields) != 4 {
+					continue metaScan
+				}
+				sm = &sourceMeta{
+					projectRoot:  projectRoot,
+					projectURL:   fields[1],
+					dirTemplate:  fields[2],
+					fileTemplate: fields[3],
+				}
+			}
+		}
+	}
+	if im == nil {
+		return nil, nil, redir, NotFoundError{Message: fmt.Sprintf("%s at %s://%s", errorMessage, scheme, importPath)}
+	}
+	if sm != nil && sm.projectRoot != im.projectRoot {
+		sm = nil
+	}
+	return im, sm, redir, nil
+}
+
+// getVCSDirFn is called by getDynamic to fetch source using VCS commands. The
+// default value here does nothing. If the code is not built for App Engine,
+// then getvCSDirFn is set getVCSDir, the function that actually does the work.
+var getVCSDirFn = func(client *http.Client, m map[string]string, etag string) (*Directory, error) {
+	return nil, errNoMatch
+}
+
+// getDynamic gets a directory from a service that is not statically known.
+func getDynamic(client *http.Client, importPath, etag string) (*Directory, error) {
+	metaProto, im, sm, redir, err := fetchMeta(client, importPath)
+	if err != nil {
+		return nil, err
+	}
+
+	if im.projectRoot != importPath {
+		var imRoot *importMeta
+		metaProto, imRoot, _, redir, err = fetchMeta(client, im.projectRoot)
+		if err != nil {
+			return nil, err
+		}
+		if *imRoot != *im {
+			return nil, NotFoundError{Message: "project root mismatch."}
+		}
+	}
+
+	repo := strings.TrimSuffix(im.repo, "."+im.vcs)
+	i := strings.Index(repo, "://")
+	if i < 0 {
+		return nil, NotFoundError{Message: "bad repo URL: " + im.repo}
+	}
+	proto := repo[:i]
+	repo = repo[i+len("://"):]
+	dirName := importPath[len(im.projectRoot):]
+
+	resolvedPath := repo + dirName
+	dir, err := getStatic(client, resolvedPath, etag)
+	if err == errNoMatch {
+		resolvedPath = repo + "." + im.vcs + dirName
+		match := map[string]string{
+			"dir":        dirName,
+			"importPath": importPath,
+			"repo":       repo,
+			"scheme":     proto,
+			"vcs":        im.vcs,
+		}
+		dir, err = getVCSDirFn(client, match, etag)
+	}
+	if err != nil || dir == nil {
+		return nil, err
+	}
+
+	dir.ImportPath = importPath
+	dir.ProjectRoot = im.projectRoot
+	dir.ResolvedPath = resolvedPath
+	dir.ProjectName = path.Base(im.projectRoot)
+	if !redir {
+		dir.ProjectURL = metaProto + "://" + im.projectRoot
+	}
+
+	if sm == nil {
+		return dir, nil
+	}
+
+	if isHTTPURL(sm.projectURL) {
+		dir.ProjectURL = sm.projectURL
+	}
+
+	if isHTTPURL(sm.dirTemplate) {
+		dir.BrowseURL = replaceDir(sm.dirTemplate, dirName)
+	}
+
+	if isHTTPURL(sm.fileTemplate) {
+		fileTemplate := replaceDir(sm.fileTemplate, dirName)
+		parts := strings.SplitN(fileTemplate, "#", 2)
+		if strings.Contains(parts[0], "{file}") {
+			for _, f := range dir.Files {
+				f.BrowseURL = strings.Replace(parts[0], "{file}", f.Name, -1)
+			}
+			if len(parts) == 2 && strings.Count(parts[1], "{line}") == 1 {
+				s := strings.Replace(parts[1], "%", "%%", -1)
+				s = strings.Replace(s, "{line}", "%d", 1)
+				dir.LineFmt = "%s#" + s
+			}
+		}
+	}
+
+	return dir, nil
+}
+
+// getStatic gets a diretory from a statically known service. getStatic
+// returns errNoMatch if the import path is not recognized.
+func getStatic(client *http.Client, importPath, etag string) (*Directory, error) {
+	for _, s := range services {
+		if s.get == nil {
+			continue
+		}
+		match, err := s.match(importPath)
+		if err != nil {
+			return nil, err
+		}
+		if match != nil {
+			dir, err := s.get(client, match, etag)
+			if dir != nil {
+				dir.ImportPath = importPath
+				dir.ResolvedPath = importPath
+			}
+			return dir, err
+		}
+	}
+	return nil, errNoMatch
+}
+
+func Get(client *http.Client, importPath string, etag string) (dir *Directory, err error) {
+	switch {
+	case localPath != "":
+		dir, err = getLocal(importPath)
+	case IsGoRepoPath(importPath):
+		dir, err = getStandardDir(client, importPath, etag)
+	case IsValidRemotePath(importPath):
+		dir, err = getStatic(client, importPath, etag)
+		if err == errNoMatch {
+			dir, err = getDynamic(client, importPath, etag)
+		}
+	default:
+		err = errNoMatch
+	}
+
+	if err == errNoMatch {
+		err = NotFoundError{Message: "Import path not valid:"}
+	}
+
+	return dir, err
+}
+
+// GetPresentation gets a presentation from the the given path.
+func GetPresentation(client *http.Client, importPath string) (*Presentation, error) {
+	ext := path.Ext(importPath)
+	if ext != ".slide" && ext != ".article" {
+		return nil, NotFoundError{Message: "unknown file extension."}
+	}
+
+	importPath, file := path.Split(importPath)
+	importPath = strings.TrimSuffix(importPath, "/")
+	for _, s := range services {
+		if s.getPresentation == nil {
+			continue
+		}
+		match, err := s.match(importPath)
+		if err != nil {
+			return nil, err
+		}
+		if match != nil {
+			match["file"] = file
+			return s.getPresentation(client, match)
+		}
+	}
+	return nil, NotFoundError{Message: "path does not match registered service"}
+}
+
+// GetProject gets information about a repository.
+func GetProject(client *http.Client, importPath string) (*Project, error) {
+	for _, s := range services {
+		if s.getProject == nil {
+			continue
+		}
+		match, err := s.match(importPath)
+		if err != nil {
+			return nil, err
+		}
+		if match != nil {
+			return s.getProject(client, match)
+		}
+	}
+	return nil, NotFoundError{Message: "path does not match registered service"}
+}
diff --git a/gosrc/gosrc_test.go b/gosrc/gosrc_test.go
new file mode 100644
index 0000000..84e81f3
--- /dev/null
+++ b/gosrc/gosrc_test.go
@@ -0,0 +1,272 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"fmt"
+	"io/ioutil"
+	"net/http"
+	"path"
+	"reflect"
+	"regexp"
+	"strings"
+	"testing"
+)
+
+var testWeb = map[string]string{
+	// Package at root of a GitHub repo.
+	"https://alice.org/pkg": `<head> <meta name="go-import" content="alice.org/pkg git https://github.com/alice/pkg"></head>`,
+	// Package in sub-diretory.
+	"https://alice.org/pkg/sub": `<head> <meta name="go-import" content="alice.org/pkg git https://github.com/alice/pkg"><body>`,
+	// Fallback to http.
+	"http://alice.org/pkg/http": `<head> <meta name="go-import" content="alice.org/pkg git https://github.com/alice/pkg">`,
+	// Meta tag in sub-directory does not match meta tag at root.
+	"https://alice.org/pkg/mismatch": `<head> <meta name="go-import" content="alice.org/pkg hg https://github.com/alice/pkg">`,
+	// More than one matching meta tag.
+	"http://alice.org/pkg/multiple": `<head> ` +
+		`<meta name="go-import" content="alice.org/pkg git https://github.com/alice/pkg">` +
+		`<meta name="go-import" content="alice.org/pkg git https://github.com/alice/pkg">`,
+	// Package with go-source meta tag.
+	"https://alice.org/pkg/source": `<head>` +
+		`<meta name="go-import" content="alice.org/pkg git https://github.com/alice/pkg">` +
+		`<meta name="go-source" content="alice.org/pkg http://alice.org/pkg http://alice.org/pkg{/dir} http://alice.org/pkg{/dir}?f={file}#Line{line}">`,
+	"https://alice.org/pkg/ignore": `<head>` +
+		`<title>Hello</title>` +
+		// Unknown meta name
+		`<meta name="go-junk" content="alice.org/pkg http://alice.org/pkg http://alice.org/pkg{/dir} http://alice.org/pkg{/dir}?f={file}#Line{line}">` +
+		// go-source before go-meta
+		`<meta name="go-source" content="alice.org/pkg http://alice.org/pkg http://alice.org/pkg{/dir} http://alice.org/pkg{/dir}?f={file}#Line{line}">` +
+		// go-import tag for the package
+		`<meta name="go-import" content="alice.org/pkg git https://github.com/alice/pkg">` +
+		// go-import with wrong number of fields
+		`<meta name="go-import" content="alice.org/pkg https://github.com/alice/pkg">` +
+		// go-import with no fields
+		`<meta name="go-import" content="">` +
+		// go-source with wrong number of fields
+		`<meta name="go-source" content="alice.org/pkg blah">` +
+		// meta tag for a differnt package
+		`<meta name="go-import" content="alice.org/other git https://github.com/alice/other">` +
+		// meta tag for a different package
+		`<meta name="go-import" content="alice.org/other git https://github.com/alice/other">` +
+		`</head>` +
+		// go-import outside of head
+		`<meta name="go-import" content="alice.org/pkg git https://github.com/alice/pkg">`,
+
+	// Package at root of a Git repo.
+	"https://bob.com/pkg": `<head> <meta name="go-import" content="bob.com/pkg git https://vcs.net/bob/pkg.git">`,
+	// Package at in sub-directory of a Git repo.
+	"https://bob.com/pkg/sub": `<head> <meta name="go-import" content="bob.com/pkg git https://vcs.net/bob/pkg.git">`,
+	// Package with go-source meta tag.
+	"https://bob.com/pkg/source": `<head>` +
+		`<meta name="go-import" content="bob.com/pkg git https://vcs.net/bob/pkg.git">` +
+		`<meta name="go-source" content="bob.com/pkg http://bob.com/pkg http://bob.com/pkg{/dir}/ http://bob.com/pkg{/dir}/?f={file}#Line{line}">`,
+	// Meta refresh to godoc.org
+	"http://rsc.io/benchstat": `<head>` +
+		`<!DOCTYPE html><html><head>` +
+		`<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>` +
+		`<meta name="go-import" content="rsc.io/benchstat git https://github.com/rsc/benchstat">` +
+		`<meta http-equiv="refresh" content="0; url=https://godoc.org/rsc.io/benchstat">` +
+		`</head>`,
+}
+
+var getDynamicTests = []struct {
+	importPath string
+	dir        *Directory
+}{
+	{"alice.org/pkg", &Directory{
+		BrowseURL:    "https://github.com/alice/pkg",
+		ImportPath:   "alice.org/pkg",
+		LineFmt:      "%s#L%d",
+		ProjectName:  "pkg",
+		ProjectRoot:  "alice.org/pkg",
+		ProjectURL:   "https://alice.org/pkg",
+		ResolvedPath: "github.com/alice/pkg",
+		VCS:          "git",
+		Files:        []*File{{Name: "main.go", BrowseURL: "https://github.com/alice/pkg/blob/master/main.go"}},
+	}},
+	{"alice.org/pkg/sub", &Directory{
+		BrowseURL:    "https://github.com/alice/pkg/tree/master/sub",
+		ImportPath:   "alice.org/pkg/sub",
+		LineFmt:      "%s#L%d",
+		ProjectName:  "pkg",
+		ProjectRoot:  "alice.org/pkg",
+		ProjectURL:   "https://alice.org/pkg",
+		ResolvedPath: "github.com/alice/pkg/sub",
+		VCS:          "git",
+		Files:        []*File{{Name: "main.go", BrowseURL: "https://github.com/alice/pkg/blob/master/sub/main.go"}},
+	}},
+	{"alice.org/pkg/http", &Directory{
+		BrowseURL:    "https://github.com/alice/pkg/tree/master/http",
+		ImportPath:   "alice.org/pkg/http",
+		LineFmt:      "%s#L%d",
+		ProjectName:  "pkg",
+		ProjectRoot:  "alice.org/pkg",
+		ProjectURL:   "https://alice.org/pkg",
+		ResolvedPath: "github.com/alice/pkg/http",
+		VCS:          "git",
+		Files:        []*File{{Name: "main.go", BrowseURL: "https://github.com/alice/pkg/blob/master/http/main.go"}},
+	}},
+	{"alice.org/pkg/source", &Directory{
+		BrowseURL:    "http://alice.org/pkg/source",
+		ImportPath:   "alice.org/pkg/source",
+		LineFmt:      "%s#Line%d",
+		ProjectName:  "pkg",
+		ProjectRoot:  "alice.org/pkg",
+		ProjectURL:   "http://alice.org/pkg",
+		ResolvedPath: "github.com/alice/pkg/source",
+		VCS:          "git",
+		Files:        []*File{{Name: "main.go", BrowseURL: "http://alice.org/pkg/source?f=main.go"}},
+	}},
+	{"alice.org/pkg/ignore", &Directory{
+		BrowseURL:    "http://alice.org/pkg/ignore",
+		ImportPath:   "alice.org/pkg/ignore",
+		LineFmt:      "%s#Line%d",
+		ProjectName:  "pkg",
+		ProjectRoot:  "alice.org/pkg",
+		ProjectURL:   "http://alice.org/pkg",
+		ResolvedPath: "github.com/alice/pkg/ignore",
+		VCS:          "git",
+		Files:        []*File{{Name: "main.go", BrowseURL: "http://alice.org/pkg/ignore?f=main.go"}},
+	}},
+	{"alice.org/pkg/mismatch", nil},
+	{"alice.org/pkg/multiple", nil},
+	{"alice.org/pkg/notfound", nil},
+
+	{"bob.com/pkg", &Directory{
+		ImportPath:   "bob.com/pkg",
+		ProjectName:  "pkg",
+		ProjectRoot:  "bob.com/pkg",
+		ProjectURL:   "https://bob.com/pkg",
+		ResolvedPath: "vcs.net/bob/pkg.git",
+		VCS:          "git",
+		Files:        []*File{{Name: "main.go"}},
+	}},
+	{"bob.com/pkg/sub", &Directory{
+		ImportPath:   "bob.com/pkg/sub",
+		ProjectName:  "pkg",
+		ProjectRoot:  "bob.com/pkg",
+		ProjectURL:   "https://bob.com/pkg",
+		ResolvedPath: "vcs.net/bob/pkg.git/sub",
+		VCS:          "git",
+		Files:        []*File{{Name: "main.go"}},
+	}},
+	{"bob.com/pkg/source", &Directory{
+		BrowseURL:    "http://bob.com/pkg/source/",
+		ImportPath:   "bob.com/pkg/source",
+		LineFmt:      "%s#Line%d",
+		ProjectName:  "pkg",
+		ProjectRoot:  "bob.com/pkg",
+		ProjectURL:   "http://bob.com/pkg",
+		ResolvedPath: "vcs.net/bob/pkg.git/source",
+		VCS:          "git",
+		Files:        []*File{{Name: "main.go", BrowseURL: "http://bob.com/pkg/source/?f=main.go"}},
+	}},
+	{"rsc.io/benchstat", &Directory{
+		BrowseURL:    "https://github.com/rsc/benchstat",
+		ImportPath:   "rsc.io/benchstat",
+		LineFmt:      "%s#L%d",
+		ProjectName:  "benchstat",
+		ProjectRoot:  "rsc.io/benchstat",
+		ProjectURL:   "https://github.com/rsc/benchstat",
+		ResolvedPath: "github.com/rsc/benchstat",
+		VCS:          "git",
+		Files:        []*File{{Name: "main.go", BrowseURL: "https://github.com/rsc/benchstat/blob/master/main.go"}},
+	}},
+}
+
+type testTransport map[string]string
+
+func (t testTransport) RoundTrip(req *http.Request) (*http.Response, error) {
+	statusCode := http.StatusOK
+	req.URL.RawQuery = ""
+	body, ok := t[req.URL.String()]
+	if !ok {
+		statusCode = http.StatusNotFound
+	}
+	resp := &http.Response{
+		StatusCode: statusCode,
+		Body:       ioutil.NopCloser(strings.NewReader(body)),
+	}
+	return resp, nil
+}
+
+var githubPattern = regexp.MustCompile(`^github\.com/(?P<owner>[a-z0-9A-Z_.\-]+)/(?P<repo>[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]*)?$`)
+
+func testGet(client *http.Client, match map[string]string, etag string) (*Directory, error) {
+	importPath := match["importPath"]
+
+	if m := githubPattern.FindStringSubmatch(importPath); m != nil {
+		browseURL := fmt.Sprintf("https://github.com/%s/%s", m[1], m[2])
+		if m[3] != "" {
+			browseURL = fmt.Sprintf("%s/tree/master%s", browseURL, m[3])
+		}
+		return &Directory{
+			BrowseURL:   browseURL,
+			ImportPath:  importPath,
+			LineFmt:     "%s#L%d",
+			ProjectName: m[2],
+			ProjectRoot: fmt.Sprintf("github.com/%s/%s", m[1], m[2]),
+			ProjectURL:  fmt.Sprintf("https://github.com/%s/%s", m[1], m[2]),
+			VCS:         "git",
+			Files: []*File{{
+				Name:      "main.go",
+				BrowseURL: fmt.Sprintf("https://github.com/%s/%s/blob/master%s/main.go", m[1], m[2], m[3]),
+			}},
+		}, nil
+	}
+
+	if strings.HasPrefix(match["repo"], "vcs.net") {
+		return &Directory{
+			ImportPath:  importPath,
+			ProjectName: path.Base(match["repo"]),
+			ProjectRoot: fmt.Sprintf("%s.%s", match["repo"], match["vcs"]),
+			VCS:         match["vcs"],
+			Files:       []*File{{Name: "main.go"}},
+		}, nil
+	}
+
+	return nil, errNoMatch
+}
+
+func TestGetDynamic(t *testing.T) {
+	savedServices := services
+	savedGetVCSDirFn := getVCSDirFn
+	defer func() {
+		services = savedServices
+		getVCSDirFn = savedGetVCSDirFn
+	}()
+	services = []*service{{pattern: regexp.MustCompile(".*"), get: testGet}}
+	getVCSDirFn = testGet
+	client := &http.Client{Transport: testTransport(testWeb)}
+
+	for _, tt := range getDynamicTests {
+		dir, err := getDynamic(client, tt.importPath, "")
+
+		if tt.dir == nil {
+			if err == nil {
+				t.Errorf("getDynamic(client, %q, etag) did not return expected error", tt.importPath)
+			}
+			continue
+		}
+
+		if err != nil {
+			t.Errorf("getDynamic(client, %q, etag) return unexpected error: %v", tt.importPath, err)
+			continue
+		}
+
+		if !reflect.DeepEqual(dir, tt.dir) {
+			t.Errorf("getDynamic(client, %q, etag) =\n     %+v,\nwant %+v", tt.importPath, dir, tt.dir)
+			for i, f := range dir.Files {
+				var want *File
+				if i < len(tt.dir.Files) {
+					want = tt.dir.Files[i]
+				}
+				t.Errorf("file %d = %+v, want %+v", i, f, want)
+			}
+		}
+	}
+}
diff --git a/gosrc/launchpad.go b/gosrc/launchpad.go
new file mode 100644
index 0000000..5cda037
--- /dev/null
+++ b/gosrc/launchpad.go
@@ -0,0 +1,135 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"archive/tar"
+	"bytes"
+	"compress/gzip"
+	"crypto/md5"
+	"encoding/hex"
+	"io"
+	"net/http"
+	"path"
+	"regexp"
+	"sort"
+	"strings"
+)
+
+func init() {
+	addService(&service{
+		pattern: regexp.MustCompile(`^launchpad\.net/(?P<repo>(?P<project>[a-z0-9A-Z_.\-]+)(?P<series>/[a-z0-9A-Z_.\-]+)?|~[a-z0-9A-Z_.\-]+/(\+junk|[a-z0-9A-Z_.\-]+)/[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]+)*$`),
+		prefix:  "launchpad.net/",
+		get:     getLaunchpadDir,
+	})
+}
+
+type byHash []byte
+
+func (p byHash) Len() int { return len(p) / md5.Size }
+func (p byHash) Less(i, j int) bool {
+	return -1 == bytes.Compare(p[i*md5.Size:(i+1)*md5.Size], p[j*md5.Size:(j+1)*md5.Size])
+}
+func (p byHash) Swap(i, j int) {
+	var temp [md5.Size]byte
+	copy(temp[:], p[i*md5.Size:])
+	copy(p[i*md5.Size:(i+1)*md5.Size], p[j*md5.Size:])
+	copy(p[j*md5.Size:], temp[:])
+}
+
+func getLaunchpadDir(client *http.Client, match map[string]string, savedEtag string) (*Directory, error) {
+	c := &httpClient{client: client}
+
+	if match["project"] != "" && match["series"] != "" {
+		rc, err := c.getReader(expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match))
+		switch {
+		case err == nil:
+			rc.Close()
+			// The structure of the import path is launchpad.net/{root}/{dir}.
+		case IsNotFound(err):
+			// The structure of the import path is is launchpad.net/{project}/{dir}.
+			match["repo"] = match["project"]
+			match["dir"] = expand("{series}{dir}", match)
+		default:
+			return nil, err
+		}
+	}
+
+	p, err := c.getBytes(expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match))
+	if err != nil {
+		return nil, err
+	}
+
+	gzr, err := gzip.NewReader(bytes.NewReader(p))
+	if err != nil {
+		return nil, err
+	}
+	defer gzr.Close()
+
+	tr := tar.NewReader(gzr)
+
+	var hash []byte
+	inTree := false
+	dirPrefix := expand("+branch/{repo}{dir}/", match)
+	var files []*File
+	for {
+		h, err := tr.Next()
+		if err == io.EOF {
+			break
+		}
+		if err != nil {
+			return nil, err
+		}
+		d, f := path.Split(h.Name)
+		if !isDocFile(f) {
+			continue
+		}
+		b := make([]byte, h.Size)
+		if _, err := io.ReadFull(tr, b); err != nil {
+			return nil, err
+		}
+
+		m := md5.New()
+		m.Write(b)
+		hash = m.Sum(hash)
+
+		if !strings.HasPrefix(h.Name, dirPrefix) {
+			continue
+		}
+		inTree = true
+		if d == dirPrefix {
+			files = append(files, &File{
+				Name:      f,
+				BrowseURL: expand("http://bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/{0}", match, f),
+				Data:      b})
+		}
+	}
+
+	if !inTree {
+		return nil, NotFoundError{Message: "Directory tree does not contain Go files."}
+	}
+
+	sort.Sort(byHash(hash))
+	m := md5.New()
+	m.Write(hash)
+	hash = m.Sum(hash[:0])
+	etag := hex.EncodeToString(hash)
+	if etag == savedEtag {
+		return nil, ErrNotModified
+	}
+
+	return &Directory{
+		BrowseURL:   expand("http://bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/", match),
+		Etag:        etag,
+		Files:       files,
+		LineFmt:     "%s#L%d",
+		ProjectName: match["repo"],
+		ProjectRoot: expand("launchpad.net/{repo}", match),
+		ProjectURL:  expand("https://launchpad.net/{repo}/", match),
+		VCS:         "bzr",
+	}, nil
+}
diff --git a/gosrc/local.go b/gosrc/local.go
new file mode 100644
index 0000000..d9ae236
--- /dev/null
+++ b/gosrc/local.go
@@ -0,0 +1,63 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"go/build"
+	"io/ioutil"
+	"path/filepath"
+	"strconv"
+	"time"
+)
+
+var localPath string
+
+// SetLocalDevMode sets the package to local development mode. In this mode,
+// the GOPATH specified by path is used to find directories instead of version
+// control services.
+func SetLocalDevMode(path string) {
+	localPath = path
+}
+
+func getLocal(importPath string) (*Directory, error) {
+	ctx := build.Default
+	if localPath != "" {
+		ctx.GOPATH = localPath
+	}
+	bpkg, err := ctx.Import(importPath, ".", build.FindOnly)
+	if err != nil {
+		return nil, err
+	}
+	dir := filepath.Join(bpkg.SrcRoot, filepath.FromSlash(importPath))
+	fis, err := ioutil.ReadDir(dir)
+	if err != nil {
+		return nil, err
+	}
+	var modTime time.Time
+	var files []*File
+	for _, fi := range fis {
+		if fi.IsDir() || !isDocFile(fi.Name()) {
+			continue
+		}
+		if fi.ModTime().After(modTime) {
+			modTime = fi.ModTime()
+		}
+		b, err := ioutil.ReadFile(filepath.Join(dir, fi.Name()))
+		if err != nil {
+			return nil, err
+		}
+		files = append(files, &File{
+			Name: fi.Name(),
+			Data: b,
+		})
+	}
+	return &Directory{
+		ImportPath: importPath,
+		Etag:       strconv.FormatInt(modTime.Unix(), 16),
+		Files:      files,
+	}, nil
+}
diff --git a/gosrc/path.go b/gosrc/path.go
new file mode 100644
index 0000000..6575df8
--- /dev/null
+++ b/gosrc/path.go
@@ -0,0 +1,59 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+//go:generate go run gen.go -output data.go
+
+package gosrc
+
+import (
+	"path"
+	"regexp"
+	"strings"
+)
+
+var validHost = regexp.MustCompile(`^[-a-z0-9]+(?:\.[-a-z0-9]+)+$`)
+var validPathElement = regexp.MustCompile(`^[-A-Za-z0-9~+_][-A-Za-z0-9_.]*$`)
+
+func isValidPathElement(s string) bool {
+	return validPathElement.MatchString(s)
+}
+
+// IsValidRemotePath returns true if importPath is structurally valid for "go get".
+func IsValidRemotePath(importPath string) bool {
+
+	parts := strings.Split(importPath, "/")
+
+	if len(parts) <= 1 {
+		// Import path must contain at least one "/".
+		return false
+	}
+
+	if !validTLDs[path.Ext(parts[0])] {
+		return false
+	}
+
+	if !validHost.MatchString(parts[0]) {
+		return false
+	}
+
+	for _, part := range parts[1:] {
+		if !isValidPathElement(part) {
+			return false
+		}
+	}
+
+	return true
+}
+
+// IsGoRepoPath returns true if path is in $GOROOT/src.
+func IsGoRepoPath(path string) bool {
+	return pathFlags[path]&goRepoPath != 0
+}
+
+// IsValidPath returns true if importPath is structurally valid.
+func IsValidPath(importPath string) bool {
+	return pathFlags[importPath]&packagePath != 0 || IsValidRemotePath(importPath)
+}
diff --git a/gosrc/path_test.go b/gosrc/path_test.go
new file mode 100644
index 0000000..be9fc0f
--- /dev/null
+++ b/gosrc/path_test.go
@@ -0,0 +1,45 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"testing"
+)
+
+var goodImportPaths = []string{
+	"github.com/user/repo",
+	"github.com/user/repo/src/pkg/compress/somethingelse",
+	"github.com/user/repo/src/compress/gzip",
+	"github.com/user/repo/src/pkg",
+	"camlistore.org/r/p/camlistore",
+	"example.com/foo.git",
+	"launchpad.net/~user/foo/trunk",
+	"launchpad.net/~user/+junk/version",
+	"github.com/user/repo/_ok/x",
+}
+
+var badImportPaths = []string{
+	"foobar",
+	"foo.",
+	".bar",
+	"favicon.ico",
+	"exmpple.com",
+	"github.com/user/repo/.ignore/x",
+}
+
+func TestIsValidRemotePath(t *testing.T) {
+	for _, importPath := range goodImportPaths {
+		if !IsValidRemotePath(importPath) {
+			t.Errorf("isBadImportPath(%q) -> true, want false", importPath)
+		}
+	}
+	for _, importPath := range badImportPaths {
+		if IsValidRemotePath(importPath) {
+			t.Errorf("isBadImportPath(%q) -> false, want true", importPath)
+		}
+	}
+}
diff --git a/gosrc/present.go b/gosrc/present.go
new file mode 100644
index 0000000..455804b
--- /dev/null
+++ b/gosrc/present.go
@@ -0,0 +1,73 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"regexp"
+	"time"
+)
+
+type Presentation struct {
+	Filename string
+	Files    map[string][]byte
+	Updated  time.Time
+}
+
+type presBuilder struct {
+	filename   string
+	data       []byte
+	resolveURL func(fname string) string
+	fetch      func(fnames []string) ([]*File, error)
+}
+
+var assetPat = regexp.MustCompile(`(?m)^\.(play|code|image|iframe|html)\s+(?:-\S+\s+)*(\S+)`)
+
+func (b *presBuilder) build() (*Presentation, error) {
+	var data []byte
+	var fnames []string
+	i := 0
+	for _, m := range assetPat.FindAllSubmatchIndex(b.data, -1) {
+		name := string(b.data[m[4]:m[5]])
+		switch string(b.data[m[2]:m[3]]) {
+		case "iframe", "image":
+			data = append(data, b.data[i:m[4]]...)
+			data = append(data, b.resolveURL(name)...)
+		case "html":
+			// TODO: sanitize and fix relative URLs in HTML.
+			data = append(data, "\nERROR: .html not supported\n"...)
+		case "play", "code":
+			data = append(data, b.data[i:m[5]]...)
+			found := false
+			for _, n := range fnames {
+				if n == name {
+					found = true
+					break
+				}
+			}
+			if !found {
+				fnames = append(fnames, name)
+			}
+		default:
+			data = append(data, "\nERROR: unknown command\n"...)
+		}
+		i = m[5]
+	}
+	data = append(data, b.data[i:]...)
+	files, err := b.fetch(fnames)
+	if err != nil {
+		return nil, err
+	}
+	pres := &Presentation{
+		Updated:  time.Now().UTC(),
+		Filename: b.filename,
+		Files:    map[string][]byte{b.filename: data},
+	}
+	for _, f := range files {
+		pres.Files[f.Name] = f.Data
+	}
+	return pres, nil
+}
diff --git a/gosrc/print.go b/gosrc/print.go
new file mode 100644
index 0000000..46de29c
--- /dev/null
+++ b/gosrc/print.go
@@ -0,0 +1,78 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// +build ignore
+
+// Command print fetches and prints package.
+//
+// Usage: go run print.go importPath
+package main
+
+import (
+	"flag"
+	"fmt"
+	"log"
+	"net/http"
+	"strings"
+
+	"github.com/golang/gddo/gosrc"
+)
+
+var (
+	etag    = flag.String("etag", "", "Etag")
+	local   = flag.String("local", "", "Get package from local workspace.")
+	present = flag.Bool("present", false, "Get presentation.")
+)
+
+func main() {
+	flag.Parse()
+	if len(flag.Args()) != 1 {
+		log.Fatal("Usage: go run print.go importPath")
+	}
+	if *present {
+		printPresentation(flag.Args()[0])
+	} else {
+		printDir(flag.Args()[0])
+	}
+}
+
+func printDir(path string) {
+	if *local != "" {
+		gosrc.SetLocalDevMode(*local)
+	}
+	dir, err := gosrc.Get(http.DefaultClient, path, *etag)
+	if err != nil {
+		log.Fatal(err)
+	}
+
+	fmt.Println("ImportPath    ", dir.ImportPath)
+	fmt.Println("ResovledPath  ", dir.ResolvedPath)
+	fmt.Println("ProjectRoot   ", dir.ProjectRoot)
+	fmt.Println("ProjectName   ", dir.ProjectName)
+	fmt.Println("ProjectURL    ", dir.ProjectURL)
+	fmt.Println("VCS           ", dir.VCS)
+	fmt.Println("Etag          ", dir.Etag)
+	fmt.Println("BrowseURL     ", dir.BrowseURL)
+	fmt.Println("Subdirectories", strings.Join(dir.Subdirectories, ", "))
+	fmt.Println("LineFmt       ", dir.LineFmt)
+	fmt.Println("Files:")
+	for _, file := range dir.Files {
+		fmt.Printf("%30s %5d %s\n", file.Name, len(file.Data), file.BrowseURL)
+	}
+}
+
+func printPresentation(path string) {
+	pres, err := gosrc.GetPresentation(http.DefaultClient, path)
+	if err != nil {
+		log.Fatal(err)
+	}
+	fmt.Printf("%s\n", pres.Files[pres.Filename])
+	for name, data := range pres.Files {
+		if name != pres.Filename {
+			fmt.Printf("---------- %s ----------\n%s\n", name, data)
+		}
+	}
+}
diff --git a/gosrc/util.go b/gosrc/util.go
new file mode 100644
index 0000000..1aa1eac
--- /dev/null
+++ b/gosrc/util.go
@@ -0,0 +1,70 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"regexp"
+	"strconv"
+	"strings"
+)
+
+var defaultTags = map[string]string{"git": "master", "hg": "default"}
+
+func bestTag(tags map[string]string, defaultTag string) (string, string, error) {
+	if commit, ok := tags["go1"]; ok {
+		return "go1", commit, nil
+	}
+	if commit, ok := tags[defaultTag]; ok {
+		return defaultTag, commit, nil
+	}
+	return "", "", NotFoundError{Message: "Tag or branch not found."}
+}
+
+// expand replaces {k} in template with match[k] or subs[atoi(k)] if k is not in match.
+func expand(template string, match map[string]string, subs ...string) string {
+	var p []byte
+	var i int
+	for {
+		i = strings.Index(template, "{")
+		if i < 0 {
+			break
+		}
+		p = append(p, template[:i]...)
+		template = template[i+1:]
+		i = strings.Index(template, "}")
+		if s, ok := match[template[:i]]; ok {
+			p = append(p, s...)
+		} else {
+			j, _ := strconv.Atoi(template[:i])
+			p = append(p, subs[j]...)
+		}
+		template = template[i+1:]
+	}
+	p = append(p, template...)
+	return string(p)
+}
+
+var readmePat = regexp.MustCompile(`(?i)^readme(?:$|\.)`)
+
+// isDocFile returns true if a file with name n should be included in the
+// documentation.
+func isDocFile(n string) bool {
+	if strings.HasSuffix(n, ".go") && n[0] != '_' && n[0] != '.' {
+		return true
+	}
+	return readmePat.MatchString(n)
+}
+
+var linePat = regexp.MustCompile(`(?m)^//line .*$`)
+
+func OverwriteLineComments(p []byte) {
+	for _, m := range linePat.FindAllIndex(p, -1) {
+		for i := m[0] + 2; i < m[1]; i++ {
+			p[i] = ' '
+		}
+	}
+}
diff --git a/gosrc/util_test.go b/gosrc/util_test.go
new file mode 100644
index 0000000..2561e3a
--- /dev/null
+++ b/gosrc/util_test.go
@@ -0,0 +1,31 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package gosrc
+
+import (
+	"testing"
+)
+
+var lineCommentTests = []struct {
+	in, out string
+}{
+	{"", ""},
+	{"//line  1", "//       "},
+	{"//line x\n//line y", "//      \n//      "},
+	{"x\n//line ", "x\n//     "},
+}
+
+func TestOverwriteLineComments(t *testing.T) {
+	for _, tt := range lineCommentTests {
+		p := []byte(tt.in)
+		OverwriteLineComments(p)
+		s := string(p)
+		if s != tt.out {
+			t.Errorf("in=%q, actual=%q, expect=%q", tt.in, s, tt.out)
+		}
+	}
+}
diff --git a/gosrc/vcs.go b/gosrc/vcs.go
new file mode 100644
index 0000000..95e7c4a
--- /dev/null
+++ b/gosrc/vcs.go
@@ -0,0 +1,349 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// +build !appengine
+
+package gosrc
+
+import (
+	"bytes"
+	"errors"
+	"io/ioutil"
+	"log"
+	"net/http"
+	"os"
+	"os/exec"
+	"path"
+	"path/filepath"
+	"regexp"
+	"strings"
+	"time"
+)
+
+func init() {
+	addService(&service{
+		pattern: regexp.MustCompile(`^(?P<repo>(?:[a-z0-9.\-]+\.)+[a-z0-9.\-]+(?::[0-9]+)?/[A-Za-z0-9_.\-/]*?)\.(?P<vcs>bzr|git|hg|svn)(?P<dir>/[A-Za-z0-9_.\-/]*)?$`),
+		prefix:  "",
+		get:     getVCSDir,
+	})
+	getVCSDirFn = getVCSDir
+}
+
+const (
+	lsRemoteTimeout = 5 * time.Minute
+	cloneTimeout    = 10 * time.Minute
+	fetchTimeout    = 5 * time.Minute
+	checkoutTimeout = 1 * time.Minute
+)
+
+// Store temporary data in this directory.
+var TempDir = filepath.Join(os.TempDir(), "gddo")
+
+type urlTemplates struct {
+	re         *regexp.Regexp
+	fileBrowse string
+	project    string
+	line       string
+}
+
+var vcsServices = []*urlTemplates{
+	{
+		regexp.MustCompile(`^git\.gitorious\.org/(?P<repo>[^/]+/[^/]+)$`),
+		"https://gitorious.org/{repo}/blobs/{tag}/{dir}{0}",
+		"https://gitorious.org/{repo}",
+		"%s#line%d",
+	},
+	{
+		regexp.MustCompile(`^git\.oschina\.net/(?P<repo>[^/]+/[^/]+)$`),
+		"http://git.oschina.net/{repo}/blob/{tag}/{dir}{0}",
+		"http://git.oschina.net/{repo}",
+		"%s#L%d",
+	},
+	{
+		regexp.MustCompile(`^(?P<r1>[^.]+)\.googlesource.com/(?P<r2>[^./]+)$`),
+		"https://{r1}.googlesource.com/{r2}/+/{tag}/{dir}{0}",
+		"https://{r1}.googlesource.com/{r2}/+/{tag}",
+		"%s#%d",
+	},
+	{
+		regexp.MustCompile(`^gitcafe.com/(?P<repo>[^/]+/.[^/]+)$`),
+		"https://gitcafe.com/{repo}/tree/{tag}/{dir}{0}",
+		"https://gitcafe.com/{repo}",
+		"",
+	},
+}
+
+// lookupURLTemplate finds an expand() template, match map and line number
+// format for well known repositories.
+func lookupURLTemplate(repo, dir, tag string) (*urlTemplates, map[string]string) {
+	if strings.HasPrefix(dir, "/") {
+		dir = dir[1:] + "/"
+	}
+	for _, t := range vcsServices {
+		if m := t.re.FindStringSubmatch(repo); m != nil {
+			match := map[string]string{
+				"dir": dir,
+				"tag": tag,
+			}
+			for i, name := range t.re.SubexpNames() {
+				if name != "" {
+					match[name] = m[i]
+				}
+			}
+			return t, match
+		}
+	}
+	return &urlTemplates{}, nil
+}
+
+type vcsCmd struct {
+	schemes  []string
+	download func([]string, string, string) (string, string, error)
+}
+
+var vcsCmds = map[string]*vcsCmd{
+	"git": {
+		schemes:  []string{"http", "https", "ssh", "git"},
+		download: downloadGit,
+	},
+	"svn": {
+		schemes:  []string{"http", "https", "svn"},
+		download: downloadSVN,
+	},
+}
+
+var lsremoteRe = regexp.MustCompile(`(?m)^([0-9a-f]{40})\s+refs/(?:tags|heads)/(.+)$`)
+
+func downloadGit(schemes []string, repo, savedEtag string) (string, string, error) {
+	var p []byte
+	var scheme string
+	for i := range schemes {
+		cmd := exec.Command("git", "ls-remote", "--heads", "--tags", schemes[i]+"://"+repo+".git")
+		log.Println(strings.Join(cmd.Args, " "))
+		var err error
+		p, err = outputWithTimeout(cmd, lsRemoteTimeout)
+		if err == nil {
+			scheme = schemes[i]
+			break
+		}
+	}
+
+	if scheme == "" {
+		return "", "", NotFoundError{Message: "VCS not found"}
+	}
+
+	tags := make(map[string]string)
+	for _, m := range lsremoteRe.FindAllSubmatch(p, -1) {
+		tags[string(m[2])] = string(m[1])
+	}
+
+	tag, commit, err := bestTag(tags, "master")
+	if err != nil {
+		return "", "", err
+	}
+
+	etag := scheme + "-" + commit
+
+	if etag == savedEtag {
+		return "", "", ErrNotModified
+	}
+
+	dir := path.Join(TempDir, repo+".git")
+	p, err = ioutil.ReadFile(path.Join(dir, ".git/HEAD"))
+	switch {
+	case err != nil:
+		if err := os.MkdirAll(dir, 0777); err != nil {
+			return "", "", err
+		}
+		cmd := exec.Command("git", "clone", scheme+"://"+repo+".git", dir)
+		log.Println(strings.Join(cmd.Args, " "))
+		if err := runWithTimeout(cmd, cloneTimeout); err != nil {
+			return "", "", err
+		}
+	case string(bytes.TrimRight(p, "\n")) == commit:
+		return tag, etag, nil
+	default:
+		cmd := exec.Command("git", "fetch")
+		log.Println(strings.Join(cmd.Args, " "))
+		cmd.Dir = dir
+		if err := runWithTimeout(cmd, fetchTimeout); err != nil {
+			return "", "", err
+		}
+	}
+
+	cmd := exec.Command("git", "checkout", "--detach", "--force", commit)
+	cmd.Dir = dir
+	if err := runWithTimeout(cmd, checkoutTimeout); err != nil {
+		return "", "", err
+	}
+
+	return tag, etag, nil
+}
+
+func downloadSVN(schemes []string, repo, savedEtag string) (string, string, error) {
+	var scheme string
+	var revno string
+	for i := range schemes {
+		var err error
+		revno, err = getSVNRevision(schemes[i] + "://" + repo)
+		if err == nil {
+			scheme = schemes[i]
+			break
+		}
+	}
+
+	if scheme == "" {
+		return "", "", NotFoundError{Message: "VCS not found"}
+	}
+
+	etag := scheme + "-" + revno
+	if etag == savedEtag {
+		return "", "", ErrNotModified
+	}
+
+	dir := filepath.Join(TempDir, repo+".svn")
+	localRevno, err := getSVNRevision(dir)
+	switch {
+	case err != nil:
+		log.Printf("err: %v", err)
+		if err := os.MkdirAll(dir, 0777); err != nil {
+			return "", "", err
+		}
+		cmd := exec.Command("svn", "checkout", scheme+"://"+repo, "-r", revno, dir)
+		log.Println(strings.Join(cmd.Args, " "))
+		if err := runWithTimeout(cmd, cloneTimeout); err != nil {
+			return "", "", err
+		}
+	case localRevno != revno:
+		cmd := exec.Command("svn", "update", "-r", revno)
+		log.Println(strings.Join(cmd.Args, " "))
+		cmd.Dir = dir
+		if err := runWithTimeout(cmd, fetchTimeout); err != nil {
+			return "", "", err
+		}
+	}
+
+	return "", etag, nil
+}
+
+var svnrevRe = regexp.MustCompile(`(?m)^Last Changed Rev: ([0-9]+)$`)
+
+func getSVNRevision(target string) (string, error) {
+	cmd := exec.Command("svn", "info", target)
+	log.Println(strings.Join(cmd.Args, " "))
+	out, err := outputWithTimeout(cmd, lsRemoteTimeout)
+	if err != nil {
+		return "", err
+	}
+	match := svnrevRe.FindStringSubmatch(string(out))
+	if match != nil {
+		return match[1], nil
+	}
+	return "", NotFoundError{Message: "Last changed revision not found"}
+}
+
+func getVCSDir(client *http.Client, match map[string]string, etagSaved string) (*Directory, error) {
+	cmd := vcsCmds[match["vcs"]]
+	if cmd == nil {
+		return nil, NotFoundError{Message: expand("VCS not supported: {vcs}", match)}
+	}
+
+	scheme := match["scheme"]
+	if scheme == "" {
+		i := strings.Index(etagSaved, "-")
+		if i > 0 {
+			scheme = etagSaved[:i]
+		}
+	}
+
+	schemes := cmd.schemes
+	if scheme != "" {
+		for i := range cmd.schemes {
+			if cmd.schemes[i] == scheme {
+				schemes = cmd.schemes[i : i+1]
+				break
+			}
+		}
+	}
+
+	// Download and checkout.
+
+	tag, etag, err := cmd.download(schemes, match["repo"], etagSaved)
+	if err != nil {
+		return nil, err
+	}
+
+	// Find source location.
+
+	template, urlMatch := lookupURLTemplate(match["repo"], match["dir"], tag)
+
+	// Slurp source files.
+
+	d := path.Join(TempDir, expand("{repo}.{vcs}", match), match["dir"])
+	f, err := os.Open(d)
+	if err != nil {
+		if os.IsNotExist(err) {
+			err = NotFoundError{Message: err.Error()}
+		}
+		return nil, err
+	}
+	fis, err := f.Readdir(-1)
+	if err != nil {
+		return nil, err
+	}
+
+	var files []*File
+	var subdirs []string
+	for _, fi := range fis {
+		switch {
+		case fi.IsDir():
+			if isValidPathElement(fi.Name()) {
+				subdirs = append(subdirs, fi.Name())
+			}
+		case isDocFile(fi.Name()):
+			b, err := ioutil.ReadFile(path.Join(d, fi.Name()))
+			if err != nil {
+				return nil, err
+			}
+			files = append(files, &File{
+				Name:      fi.Name(),
+				BrowseURL: expand(template.fileBrowse, urlMatch, fi.Name()),
+				Data:      b,
+			})
+		}
+	}
+
+	return &Directory{
+		LineFmt:        template.line,
+		ProjectRoot:    expand("{repo}.{vcs}", match),
+		ProjectName:    path.Base(match["repo"]),
+		ProjectURL:     expand(template.project, urlMatch),
+		BrowseURL:      "",
+		Etag:           etag,
+		VCS:            match["vcs"],
+		Subdirectories: subdirs,
+		Files:          files,
+	}, nil
+}
+
+func runWithTimeout(cmd *exec.Cmd, timeout time.Duration) error {
+	if err := cmd.Start(); err != nil {
+		return err
+	}
+	t := time.AfterFunc(timeout, func() { cmd.Process.Kill() })
+	defer t.Stop()
+	return cmd.Wait()
+}
+
+func outputWithTimeout(cmd *exec.Cmd, timeout time.Duration) ([]byte, error) {
+	if cmd.Stdout != nil {
+		return nil, errors.New("exec: Stdout already set")
+	}
+	var b bytes.Buffer
+	cmd.Stdout = &b
+	err := runWithTimeout(cmd, timeout)
+	return b.Bytes(), err
+}
diff --git a/httputil/buster.go b/httputil/buster.go
new file mode 100644
index 0000000..9bbf243
--- /dev/null
+++ b/httputil/buster.go
@@ -0,0 +1,95 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package httputil
+
+import (
+	"io"
+	"io/ioutil"
+	"net/http"
+	"net/url"
+	"strings"
+	"sync"
+)
+
+type busterWriter struct {
+	headerMap http.Header
+	status    int
+	io.Writer
+}
+
+func (bw *busterWriter) Header() http.Header {
+	return bw.headerMap
+}
+
+func (bw *busterWriter) WriteHeader(status int) {
+	bw.status = status
+}
+
+// CacheBusters maintains a cache of cache busting tokens for static resources served by Handler.
+type CacheBusters struct {
+	Handler http.Handler
+
+	mu     sync.Mutex
+	tokens map[string]string
+}
+
+func sanitizeTokenRune(r rune) rune {
+	if r <= ' ' || r >= 127 {
+		return -1
+	}
+	// Convert percent encoding reserved characters to '-'.
+	if strings.ContainsRune("!#$&'()*+,/:;=?@[]", r) {
+		return '-'
+	}
+	return r
+}
+
+// Token returns the cache busting token for path. If the token is not already
+// cached, Get issues a HEAD request on handler and uses the response ETag and
+// Last-Modified headers to compute a token.
+func (cb *CacheBusters) Get(path string) string {
+	cb.mu.Lock()
+	if cb.tokens == nil {
+		cb.tokens = make(map[string]string)
+	}
+	token, ok := cb.tokens[path]
+	cb.mu.Unlock()
+	if ok {
+		return token
+	}
+
+	w := busterWriter{
+		Writer:    ioutil.Discard,
+		headerMap: make(http.Header),
+	}
+	r := &http.Request{URL: &url.URL{Path: path}, Method: "HEAD"}
+	cb.Handler.ServeHTTP(&w, r)
+
+	if w.status == 200 {
+		token = w.headerMap.Get("Etag")
+		if token == "" {
+			token = w.headerMap.Get("Last-Modified")
+		}
+		token = strings.Trim(token, `" `)
+		token = strings.Map(sanitizeTokenRune, token)
+	}
+
+	cb.mu.Lock()
+	cb.tokens[path] = token
+	cb.mu.Unlock()
+
+	return token
+}
+
+// AppendQueryParam appends the token as a query parameter to path.
+func (cb *CacheBusters) AppendQueryParam(path string, name string) string {
+	token := cb.Get(path)
+	if token == "" {
+		return path
+	}
+	return path + "?" + name + "=" + token
+}
diff --git a/httputil/buster_test.go b/httputil/buster_test.go
new file mode 100644
index 0000000..0d2bb83
--- /dev/null
+++ b/httputil/buster_test.go
@@ -0,0 +1,29 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package httputil
+
+import (
+	"net/http"
+	"testing"
+)
+
+func TestCacheBusters(t *testing.T) {
+	cbs := CacheBusters{Handler: http.FileServer(http.Dir("."))}
+
+	token := cbs.Get("/buster_test.go")
+	if token == "" {
+		t.Errorf("could not extract token from http.FileServer")
+	}
+
+	var ss StaticServer
+	cbs = CacheBusters{Handler: ss.FileHandler("buster_test.go")}
+
+	token = cbs.Get("/xxx")
+	if token == "" {
+		t.Errorf("could not extract token from StaticServer FileHandler")
+	}
+}
diff --git a/httputil/header/header.go b/httputil/header/header.go
new file mode 100644
index 0000000..bd30b33
--- /dev/null
+++ b/httputil/header/header.go
@@ -0,0 +1,297 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Package header provides functions for parsing HTTP headers.
+package header
+
+import (
+	"net/http"
+	"strings"
+	"time"
+)
+
+// Octet types from RFC 2616.
+var octetTypes [256]octetType
+
+type octetType byte
+
+const (
+	isToken octetType = 1 << iota
+	isSpace
+)
+
+func init() {
+	// OCTET      = <any 8-bit sequence of data>
+	// CHAR       = <any US-ASCII character (octets 0 - 127)>
+	// CTL        = <any US-ASCII control character (octets 0 - 31) and DEL (127)>
+	// CR         = <US-ASCII CR, carriage return (13)>
+	// LF         = <US-ASCII LF, linefeed (10)>
+	// SP         = <US-ASCII SP, space (32)>
+	// HT         = <US-ASCII HT, horizontal-tab (9)>
+	// <">        = <US-ASCII double-quote mark (34)>
+	// CRLF       = CR LF
+	// LWS        = [CRLF] 1*( SP | HT )
+	// TEXT       = <any OCTET except CTLs, but including LWS>
+	// separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <">
+	//              | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT
+	// token      = 1*<any CHAR except CTLs or separators>
+	// qdtext     = <any TEXT except <">>
+
+	for c := 0; c < 256; c++ {
+		var t octetType
+		isCtl := c <= 31 || c == 127
+		isChar := 0 <= c && c <= 127
+		isSeparator := strings.IndexRune(" \t\"(),/:;<=>?@[]\\{}", rune(c)) >= 0
+		if strings.IndexRune(" \t\r\n", rune(c)) >= 0 {
+			t |= isSpace
+		}
+		if isChar && !isCtl && !isSeparator {
+			t |= isToken
+		}
+		octetTypes[c] = t
+	}
+}
+
+// Copy returns a shallow copy of the header.
+func Copy(header http.Header) http.Header {
+	h := make(http.Header)
+	for k, vs := range header {
+		h[k] = vs
+	}
+	return h
+}
+
+var timeLayouts = []string{"Mon, 02 Jan 2006 15:04:05 GMT", time.RFC850, time.ANSIC}
+
+// ParseTime parses the header as time. The zero value is returned if the
+// header is not present or there is an error parsing the
+// header.
+func ParseTime(header http.Header, key string) time.Time {
+	if s := header.Get(key); s != "" {
+		for _, layout := range timeLayouts {
+			if t, err := time.Parse(layout, s); err == nil {
+				return t.UTC()
+			}
+		}
+	}
+	return time.Time{}
+}
+
+// ParseList parses a comma separated list of values. Commas are ignored in
+// quoted strings. Quoted values are not unescaped or unquoted. Whitespace is
+// trimmed.
+func ParseList(header http.Header, key string) []string {
+	var result []string
+	for _, s := range header[http.CanonicalHeaderKey(key)] {
+		begin := 0
+		end := 0
+		escape := false
+		quote := false
+		for i := 0; i < len(s); i++ {
+			b := s[i]
+			switch {
+			case escape:
+				escape = false
+				end = i + 1
+			case quote:
+				switch b {
+				case '\\':
+					escape = true
+				case '"':
+					quote = false
+				}
+				end = i + 1
+			case b == '"':
+				quote = true
+				end = i + 1
+			case octetTypes[b]&isSpace != 0:
+				if begin == end {
+					begin = i + 1
+					end = begin
+				}
+			case b == ',':
+				if begin < end {
+					result = append(result, s[begin:end])
+				}
+				begin = i + 1
+				end = begin
+			default:
+				end = i + 1
+			}
+		}
+		if begin < end {
+			result = append(result, s[begin:end])
+		}
+	}
+	return result
+}
+
+// ParseValueAndParams parses a comma separated list of values with optional
+// semicolon separated name-value pairs. Content-Type and Content-Disposition
+// headers are in this format.
+func ParseValueAndParams(header http.Header, key string) (value string, params map[string]string) {
+	params = make(map[string]string)
+	s := header.Get(key)
+	value, s = expectTokenSlash(s)
+	if value == "" {
+		return
+	}
+	value = strings.ToLower(value)
+	s = skipSpace(s)
+	for strings.HasPrefix(s, ";") {
+		var pkey string
+		pkey, s = expectToken(skipSpace(s[1:]))
+		if pkey == "" {
+			return
+		}
+		if !strings.HasPrefix(s, "=") {
+			return
+		}
+		var pvalue string
+		pvalue, s = expectTokenOrQuoted(s[1:])
+		if pvalue == "" {
+			return
+		}
+		pkey = strings.ToLower(pkey)
+		params[pkey] = pvalue
+		s = skipSpace(s)
+	}
+	return
+}
+
+type AcceptSpec struct {
+	Value string
+	Q     float64
+}
+
+// ParseAccept parses Accept* headers.
+func ParseAccept(header http.Header, key string) (specs []AcceptSpec) {
+loop:
+	for _, s := range header[key] {
+		for {
+			var spec AcceptSpec
+			spec.Value, s = expectTokenSlash(s)
+			if spec.Value == "" {
+				continue loop
+			}
+			spec.Q = 1.0
+			s = skipSpace(s)
+			if strings.HasPrefix(s, ";") {
+				s = skipSpace(s[1:])
+				if !strings.HasPrefix(s, "q=") {
+					continue loop
+				}
+				spec.Q, s = expectQuality(s[2:])
+				if spec.Q < 0.0 {
+					continue loop
+				}
+			}
+			specs = append(specs, spec)
+			s = skipSpace(s)
+			if !strings.HasPrefix(s, ",") {
+				continue loop
+			}
+			s = skipSpace(s[1:])
+		}
+	}
+	return
+}
+
+func skipSpace(s string) (rest string) {
+	i := 0
+	for ; i < len(s); i++ {
+		if octetTypes[s[i]]&isSpace == 0 {
+			break
+		}
+	}
+	return s[i:]
+}
+
+func expectToken(s string) (token, rest string) {
+	i := 0
+	for ; i < len(s); i++ {
+		if octetTypes[s[i]]&isToken == 0 {
+			break
+		}
+	}
+	return s[:i], s[i:]
+}
+
+func expectTokenSlash(s string) (token, rest string) {
+	i := 0
+	for ; i < len(s); i++ {
+		b := s[i]
+		if (octetTypes[b]&isToken == 0) && b != '/' {
+			break
+		}
+	}
+	return s[:i], s[i:]
+}
+
+func expectQuality(s string) (q float64, rest string) {
+	switch {
+	case len(s) == 0:
+		return -1, ""
+	case s[0] == '0':
+		q = 0
+	case s[0] == '1':
+		q = 1
+	default:
+		return -1, ""
+	}
+	s = s[1:]
+	if !strings.HasPrefix(s, ".") {
+		return q, s
+	}
+	s = s[1:]
+	i := 0
+	n := 0
+	d := 1
+	for ; i < len(s); i++ {
+		b := s[i]
+		if b < '0' || b > '9' {
+			break
+		}
+		n = n*10 + int(b) - '0'
+		d *= 10
+	}
+	return q + float64(n)/float64(d), s[i:]
+}
+
+func expectTokenOrQuoted(s string) (value string, rest string) {
+	if !strings.HasPrefix(s, "\"") {
+		return expectToken(s)
+	}
+	s = s[1:]
+	for i := 0; i < len(s); i++ {
+		switch s[i] {
+		case '"':
+			return s[:i], s[i+1:]
+		case '\\':
+			p := make([]byte, len(s)-1)
+			j := copy(p, s[:i])
+			escape := true
+			for i = i + 1; i < len(s); i++ {
+				b := s[i]
+				switch {
+				case escape:
+					escape = false
+					p[j] = b
+					j += 1
+				case b == '\\':
+					escape = true
+				case b == '"':
+					return string(p[:j]), s[i+1:]
+				default:
+					p[j] = b
+					j += 1
+				}
+			}
+			return "", ""
+		}
+	}
+	return "", ""
+}
diff --git a/httputil/header/header_test.go b/httputil/header/header_test.go
new file mode 100644
index 0000000..ee351ed
--- /dev/null
+++ b/httputil/header/header_test.go
@@ -0,0 +1,138 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package header
+
+import (
+	"net/http"
+	"reflect"
+	"testing"
+	"time"
+)
+
+var getHeaderListTests = []struct {
+	s string
+	l []string
+}{
+	{s: `a`, l: []string{`a`}},
+	{s: `a, b , c `, l: []string{`a`, `b`, `c`}},
+	{s: `a,, b , , c `, l: []string{`a`, `b`, `c`}},
+	{s: `a,b,c`, l: []string{`a`, `b`, `c`}},
+	{s: ` a b, c d `, l: []string{`a b`, `c d`}},
+	{s: `"a, b, c", d `, l: []string{`"a, b, c"`, "d"}},
+	{s: `","`, l: []string{`","`}},
+	{s: `"\""`, l: []string{`"\""`}},
+	{s: `" "`, l: []string{`" "`}},
+}
+
+func TestGetHeaderList(t *testing.T) {
+	for _, tt := range getHeaderListTests {
+		header := http.Header{"Foo": {tt.s}}
+		if l := ParseList(header, "foo"); !reflect.DeepEqual(tt.l, l) {
+			t.Errorf("ParseList for %q = %q, want %q", tt.s, l, tt.l)
+		}
+	}
+}
+
+var parseValueAndParamsTests = []struct {
+	s      string
+	value  string
+	params map[string]string
+}{
+	{`text/html`, "text/html", map[string]string{}},
+	{`text/html  `, "text/html", map[string]string{}},
+	{`text/html ; `, "text/html", map[string]string{}},
+	{`tExt/htMl`, "text/html", map[string]string{}},
+	{`tExt/htMl; fOO=";"; hellO=world`, "text/html", map[string]string{
+		"hello": "world",
+		"foo":   `;`,
+	}},
+	{`text/html; foo=bar, hello=world`, "text/html", map[string]string{"foo": "bar"}},
+	{`text/html ; foo=bar `, "text/html", map[string]string{"foo": "bar"}},
+	{`text/html ;foo=bar `, "text/html", map[string]string{"foo": "bar"}},
+	{`text/html; foo="b\ar"`, "text/html", map[string]string{"foo": "bar"}},
+	{`text/html; foo="bar\"baz\"qux"`, "text/html", map[string]string{"foo": `bar"baz"qux`}},
+	{`text/html; foo="b,ar"`, "text/html", map[string]string{"foo": "b,ar"}},
+	{`text/html; foo="b;ar"`, "text/html", map[string]string{"foo": "b;ar"}},
+	{`text/html; FOO="bar"`, "text/html", map[string]string{"foo": "bar"}},
+	{`form-data; filename="file.txt"; name=file`, "form-data", map[string]string{"filename": "file.txt", "name": "file"}},
+}
+
+func TestParseValueAndParams(t *testing.T) {
+	for _, tt := range parseValueAndParamsTests {
+		header := http.Header{"Content-Type": {tt.s}}
+		value, params := ParseValueAndParams(header, "Content-Type")
+		if value != tt.value {
+			t.Errorf("%q, value=%q, want %q", tt.s, value, tt.value)
+		}
+		if !reflect.DeepEqual(params, tt.params) {
+			t.Errorf("%q, param=%#v, want %#v", tt.s, params, tt.params)
+		}
+	}
+}
+
+var parseTimeValidTests = []string{
+	"Sun, 06 Nov 1994 08:49:37 GMT",
+	"Sunday, 06-Nov-94 08:49:37 GMT",
+	"Sun Nov  6 08:49:37 1994",
+}
+
+var parseTimeInvalidTests = []string{
+	"junk",
+}
+
+func TestParseTime(t *testing.T) {
+	expected := time.Date(1994, 11, 6, 8, 49, 37, 0, time.UTC)
+	for _, s := range parseTimeValidTests {
+		header := http.Header{"Date": {s}}
+		actual := ParseTime(header, "Date")
+		if actual != expected {
+			t.Errorf("GetTime(%q)=%v, want %v", s, actual, expected)
+		}
+	}
+	for _, s := range parseTimeInvalidTests {
+		header := http.Header{"Date": {s}}
+		actual := ParseTime(header, "Date")
+		if !actual.IsZero() {
+			t.Errorf("GetTime(%q) did not return zero", s)
+		}
+	}
+}
+
+var parseAcceptTests = []struct {
+	s        string
+	expected []AcceptSpec
+}{
+	{"text/html", []AcceptSpec{{"text/html", 1}}},
+	{"text/html; q=0", []AcceptSpec{{"text/html", 0}}},
+	{"text/html; q=0.0", []AcceptSpec{{"text/html", 0}}},
+	{"text/html; q=1", []AcceptSpec{{"text/html", 1}}},
+	{"text/html; q=1.0", []AcceptSpec{{"text/html", 1}}},
+	{"text/html; q=0.1", []AcceptSpec{{"text/html", 0.1}}},
+	{"text/html;q=0.1", []AcceptSpec{{"text/html", 0.1}}},
+	{"text/html, text/plain", []AcceptSpec{{"text/html", 1}, {"text/plain", 1}}},
+	{"text/html; q=0.1, text/plain", []AcceptSpec{{"text/html", 0.1}, {"text/plain", 1}}},
+	{"iso-8859-5, unicode-1-1;q=0.8,iso-8859-1", []AcceptSpec{{"iso-8859-5", 1}, {"unicode-1-1", 0.8}, {"iso-8859-1", 1}}},
+	{"iso-8859-1", []AcceptSpec{{"iso-8859-1", 1}}},
+	{"*", []AcceptSpec{{"*", 1}}},
+	{"da, en-gb;q=0.8, en;q=0.7", []AcceptSpec{{"da", 1}, {"en-gb", 0.8}, {"en", 0.7}}},
+	{"da, q, en-gb;q=0.8", []AcceptSpec{{"da", 1}, {"q", 1}, {"en-gb", 0.8}}},
+	{"image/png, image/*;q=0.5", []AcceptSpec{{"image/png", 1}, {"image/*", 0.5}}},
+
+	// bad cases
+	{"value1; q=0.1.2", []AcceptSpec{{"value1", 0.1}}},
+	{"da, en-gb;q=foo", []AcceptSpec{{"da", 1}}},
+}
+
+func TestParseAccept(t *testing.T) {
+	for _, tt := range parseAcceptTests {
+		header := http.Header{"Accept": {tt.s}}
+		actual := ParseAccept(header, "Accept")
+		if !reflect.DeepEqual(actual, tt.expected) {
+			t.Errorf("ParseAccept(h, %q)=%v, want %v", tt.s, actual, tt.expected)
+		}
+	}
+}
diff --git a/httputil/httputil.go b/httputil/httputil.go
new file mode 100644
index 0000000..b077edf
--- /dev/null
+++ b/httputil/httputil.go
@@ -0,0 +1,23 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Package httputil is a toolkit for the Go net/http package.
+package httputil
+
+import (
+	"net"
+	"net/http"
+)
+
+// StripPort removes the port specification from an address.
+func StripPort(s string) string {
+	if h, _, err := net.SplitHostPort(s); err == nil {
+		s = h
+	}
+	return s
+}
+
+type Error func(w http.ResponseWriter, r *http.Request, status int, err error)
diff --git a/httputil/negotiate.go b/httputil/negotiate.go
new file mode 100644
index 0000000..6af3e4c
--- /dev/null
+++ b/httputil/negotiate.go
@@ -0,0 +1,79 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package httputil
+
+import (
+	"github.com/golang/gddo/httputil/header"
+	"net/http"
+	"strings"
+)
+
+// NegotiateContentEncoding returns the best offered content encoding for the
+// request's Accept-Encoding header. If two offers match with equal weight and
+// then the offer earlier in the list is preferred. If no offers are
+// acceptable, then "" is returned.
+func NegotiateContentEncoding(r *http.Request, offers []string) string {
+	bestOffer := "identity"
+	bestQ := -1.0
+	specs := header.ParseAccept(r.Header, "Accept-Encoding")
+	for _, offer := range offers {
+		for _, spec := range specs {
+			if spec.Q > bestQ &&
+				(spec.Value == "*" || spec.Value == offer) {
+				bestQ = spec.Q
+				bestOffer = offer
+			}
+		}
+	}
+	if bestQ == 0 {
+		bestOffer = ""
+	}
+	return bestOffer
+}
+
+// NegotiateContentType returns the best offered content type for the request's
+// Accept header. If two offers match with equal weight, then the more specific
+// offer is preferred.  For example, text/* trumps */*. If two offers match
+// with equal weight and specificity, then the offer earlier in the list is
+// preferred. If no offers match, then defaultOffer is returned.
+func NegotiateContentType(r *http.Request, offers []string, defaultOffer string) string {
+	bestOffer := defaultOffer
+	bestQ := -1.0
+	bestWild := 3
+	specs := header.ParseAccept(r.Header, "Accept")
+	for _, offer := range offers {
+		for _, spec := range specs {
+			switch {
+			case spec.Q == 0.0:
+				// ignore
+			case spec.Q < bestQ:
+				// better match found
+			case spec.Value == "*/*":
+				if spec.Q > bestQ || bestWild > 2 {
+					bestQ = spec.Q
+					bestWild = 2
+					bestOffer = offer
+				}
+			case strings.HasSuffix(spec.Value, "/*"):
+				if strings.HasPrefix(offer, spec.Value[:len(spec.Value)-1]) &&
+					(spec.Q > bestQ || bestWild > 1) {
+					bestQ = spec.Q
+					bestWild = 1
+					bestOffer = offer
+				}
+			default:
+				if spec.Value == offer &&
+					(spec.Q > bestQ || bestWild > 0) {
+					bestQ = spec.Q
+					bestWild = 0
+					bestOffer = offer
+				}
+			}
+		}
+	}
+	return bestOffer
+}
diff --git a/httputil/negotiate_test.go b/httputil/negotiate_test.go
new file mode 100644
index 0000000..24bf4be
--- /dev/null
+++ b/httputil/negotiate_test.go
@@ -0,0 +1,71 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package httputil_test
+
+import (
+	"github.com/golang/gddo/httputil"
+	"net/http"
+	"testing"
+)
+
+var negotiateContentEncodingTests = []struct {
+	s      string
+	offers []string
+	expect string
+}{
+	{"", []string{"identity", "gzip"}, "identity"},
+	{"*;q=0", []string{"identity", "gzip"}, ""},
+	{"gzip", []string{"identity", "gzip"}, "gzip"},
+}
+
+func TestNegotiateContentEnoding(t *testing.T) {
+	for _, tt := range negotiateContentEncodingTests {
+		r := &http.Request{Header: http.Header{"Accept-Encoding": {tt.s}}}
+		actual := httputil.NegotiateContentEncoding(r, tt.offers)
+		if actual != tt.expect {
+			t.Errorf("NegotiateContentEncoding(%q, %#v)=%q, want %q", tt.s, tt.offers, actual, tt.expect)
+		}
+	}
+}
+
+var negotiateContentTypeTests = []struct {
+	s            string
+	offers       []string
+	defaultOffer string
+	expect       string
+}{
+	{"text/html, */*;q=0", []string{"x/y"}, "", ""},
+	{"text/html, */*", []string{"x/y"}, "", "x/y"},
+	{"text/html, image/png", []string{"text/html", "image/png"}, "", "text/html"},
+	{"text/html, image/png", []string{"image/png", "text/html"}, "", "image/png"},
+	{"text/html, image/png; q=0.5", []string{"image/png"}, "", "image/png"},
+	{"text/html, image/png; q=0.5", []string{"text/html"}, "", "text/html"},
+	{"text/html, image/png; q=0.5", []string{"foo/bar"}, "", ""},
+	{"text/html, image/png; q=0.5", []string{"image/png", "text/html"}, "", "text/html"},
+	{"text/html, image/png; q=0.5", []string{"text/html", "image/png"}, "", "text/html"},
+	{"text/html;q=0.5, image/png", []string{"image/png"}, "", "image/png"},
+	{"text/html;q=0.5, image/png", []string{"text/html"}, "", "text/html"},
+	{"text/html;q=0.5, image/png", []string{"image/png", "text/html"}, "", "image/png"},
+	{"text/html;q=0.5, image/png", []string{"text/html", "image/png"}, "", "image/png"},
+	{"image/png, image/*;q=0.5", []string{"image/jpg", "image/png"}, "", "image/png"},
+	{"image/png, image/*;q=0.5", []string{"image/jpg"}, "", "image/jpg"},
+	{"image/png, image/*;q=0.5", []string{"image/jpg", "image/gif"}, "", "image/jpg"},
+	{"image/png, image/*", []string{"image/jpg", "image/gif"}, "", "image/jpg"},
+	{"image/png, image/*", []string{"image/gif", "image/jpg"}, "", "image/gif"},
+	{"image/png, image/*", []string{"image/gif", "image/png"}, "", "image/png"},
+	{"image/png, image/*", []string{"image/png", "image/gif"}, "", "image/png"},
+}
+
+func TestNegotiateContentType(t *testing.T) {
+	for _, tt := range negotiateContentTypeTests {
+		r := &http.Request{Header: http.Header{"Accept": {tt.s}}}
+		actual := httputil.NegotiateContentType(r, tt.offers, tt.defaultOffer)
+		if actual != tt.expect {
+			t.Errorf("NegotiateContentType(%q, %#v, %q)=%q, want %q", tt.s, tt.offers, tt.defaultOffer, actual, tt.expect)
+		}
+	}
+}
diff --git a/httputil/respbuf.go b/httputil/respbuf.go
new file mode 100644
index 0000000..8641c0f
--- /dev/null
+++ b/httputil/respbuf.go
@@ -0,0 +1,52 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package httputil
+
+import (
+	"bytes"
+	"net/http"
+	"strconv"
+)
+
+type ResponseBuffer struct {
+	buf    bytes.Buffer
+	status int
+	header http.Header
+}
+
+func (rb *ResponseBuffer) Write(p []byte) (int, error) {
+	return rb.buf.Write(p)
+}
+
+func (rb *ResponseBuffer) WriteHeader(status int) {
+	rb.status = status
+}
+
+func (rb *ResponseBuffer) Header() http.Header {
+	if rb.header == nil {
+		rb.header = make(http.Header)
+	}
+	return rb.header
+}
+
+func (rb *ResponseBuffer) WriteTo(w http.ResponseWriter) error {
+	for k, v := range rb.header {
+		w.Header()[k] = v
+	}
+	if rb.buf.Len() > 0 {
+		w.Header().Set("Content-Length", strconv.Itoa(rb.buf.Len()))
+	}
+	if rb.status != 0 {
+		w.WriteHeader(rb.status)
+	}
+	if rb.buf.Len() > 0 {
+		if _, err := w.Write(rb.buf.Bytes()); err != nil {
+			return err
+		}
+	}
+	return nil
+}
diff --git a/httputil/static.go b/httputil/static.go
new file mode 100644
index 0000000..6610dde
--- /dev/null
+++ b/httputil/static.go
@@ -0,0 +1,265 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package httputil
+
+import (
+	"bytes"
+	"crypto/sha1"
+	"errors"
+	"fmt"
+	"github.com/golang/gddo/httputil/header"
+	"io"
+	"io/ioutil"
+	"mime"
+	"net/http"
+	"os"
+	"path"
+	"path/filepath"
+	"strconv"
+	"strings"
+	"sync"
+	"time"
+)
+
+// StaticServer serves static files.
+type StaticServer struct {
+	// Dir specifies the location of the directory containing the files to serve.
+	Dir string
+
+	// MaxAge specifies the maximum age for the cache control and expiration
+	// headers.
+	MaxAge time.Duration
+
+	// Error specifies the function used to generate error responses. If Error
+	// is nil, then http.Error is used to generate error responses.
+	Error Error
+
+	// MIMETypes is a map from file extensions to MIME types.
+	MIMETypes map[string]string
+
+	mu    sync.Mutex
+	etags map[string]string
+}
+
+func (ss *StaticServer) resolve(fname string) string {
+	if path.IsAbs(fname) {
+		panic("Absolute path not allowed when creating a StaticServer handler")
+	}
+	dir := ss.Dir
+	if dir == "" {
+		dir = "."
+	}
+	fname = filepath.FromSlash(fname)
+	return filepath.Join(dir, fname)
+}
+
+func (ss *StaticServer) mimeType(fname string) string {
+	ext := path.Ext(fname)
+	var mimeType string
+	if ss.MIMETypes != nil {
+		mimeType = ss.MIMETypes[ext]
+	}
+	if mimeType == "" {
+		mimeType = mime.TypeByExtension(ext)
+	}
+	if mimeType == "" {
+		mimeType = "application/octet-stream"
+	}
+	return mimeType
+}
+
+func (ss *StaticServer) openFile(fname string) (io.ReadCloser, int64, string, error) {
+	f, err := os.Open(fname)
+	if err != nil {
+		return nil, 0, "", err
+	}
+	fi, err := f.Stat()
+	if err != nil {
+		f.Close()
+		return nil, 0, "", err
+	}
+	const modeType = os.ModeDir | os.ModeSymlink | os.ModeNamedPipe | os.ModeSocket | os.ModeDevice
+	if fi.Mode()&modeType != 0 {
+		f.Close()
+		return nil, 0, "", errors.New("not a regular file")
+	}
+	return f, fi.Size(), ss.mimeType(fname), nil
+}
+
+// FileHandler returns a handler that serves a single file. The file is
+// specified by a slash separated path relative to the static server's Dir
+// field.
+func (ss *StaticServer) FileHandler(fileName string) http.Handler {
+	id := fileName
+	fileName = ss.resolve(fileName)
+	return &staticHandler{
+		ss:   ss,
+		id:   func(_ string) string { return id },
+		open: func(_ string) (io.ReadCloser, int64, string, error) { return ss.openFile(fileName) },
+	}
+}
+
+// DirectoryHandler returns a handler that serves files from a directory tree.
+// The directory is specified by a slash separated path relative to the static
+// server's Dir field.
+func (ss *StaticServer) DirectoryHandler(prefix, dirName string) http.Handler {
+	if !strings.HasSuffix(prefix, "/") {
+		prefix += "/"
+	}
+	idBase := dirName
+	dirName = ss.resolve(dirName)
+	return &staticHandler{
+		ss: ss,
+		id: func(p string) string {
+			if !strings.HasPrefix(p, prefix) {
+				return "."
+			}
+			return path.Join(idBase, p[len(prefix):])
+		},
+		open: func(p string) (io.ReadCloser, int64, string, error) {
+			if !strings.HasPrefix(p, prefix) {
+				return nil, 0, "", errors.New("request url does not match directory prefix")
+			}
+			p = p[len(prefix):]
+			return ss.openFile(filepath.Join(dirName, filepath.FromSlash(p)))
+		},
+	}
+}
+
+// FilesHandler returns a handler that serves the concatentation of the
+// specified files. The files are specified by slash separated paths relative
+// to the static server's Dir field.
+func (ss *StaticServer) FilesHandler(fileNames ...string) http.Handler {
+
+	// todo: cache concatenated files on disk and serve from there.
+
+	mimeType := ss.mimeType(fileNames[0])
+	var buf []byte
+	var openErr error
+
+	for _, fileName := range fileNames {
+		p, err := ioutil.ReadFile(ss.resolve(fileName))
+		if err != nil {
+			openErr = err
+			buf = nil
+			break
+		}
+		buf = append(buf, p...)
+	}
+
+	id := strings.Join(fileNames, " ")
+
+	return &staticHandler{
+		ss: ss,
+		id: func(_ string) string { return id },
+		open: func(p string) (io.ReadCloser, int64, string, error) {
+			return ioutil.NopCloser(bytes.NewReader(buf)), int64(len(buf)), mimeType, openErr
+		},
+	}
+}
+
+type staticHandler struct {
+	id   func(fname string) string
+	open func(p string) (io.ReadCloser, int64, string, error)
+	ss   *StaticServer
+}
+
+func (h *staticHandler) error(w http.ResponseWriter, r *http.Request, status int, err error) {
+	http.Error(w, http.StatusText(status), status)
+}
+
+func (h *staticHandler) etag(p string) (string, error) {
+	id := h.id(p)
+
+	h.ss.mu.Lock()
+	if h.ss.etags == nil {
+		h.ss.etags = make(map[string]string)
+	}
+	etag := h.ss.etags[id]
+	h.ss.mu.Unlock()
+
+	if etag != "" {
+		return etag, nil
+	}
+
+	// todo: if a concurrent goroutine is calculating the hash, then wait for
+	// it instead of computing it again here.
+
+	rc, _, _, err := h.open(p)
+	if err != nil {
+		return "", err
+	}
+
+	defer rc.Close()
+
+	w := sha1.New()
+	_, err = io.Copy(w, rc)
+	if err != nil {
+		return "", err
+	}
+
+	etag = fmt.Sprintf(`"%x"`, w.Sum(nil))
+
+	h.ss.mu.Lock()
+	h.ss.etags[id] = etag
+	h.ss.mu.Unlock()
+
+	return etag, nil
+}
+
+func (h *staticHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+	p := path.Clean(r.URL.Path)
+	if p != r.URL.Path {
+		http.Redirect(w, r, p, 301)
+		return
+	}
+
+	etag, err := h.etag(p)
+	if err != nil {
+		h.error(w, r, http.StatusNotFound, err)
+		return
+	}
+
+	maxAge := h.ss.MaxAge
+	if maxAge == 0 {
+		maxAge = 24 * time.Hour
+	}
+	if r.FormValue("v") != "" {
+		maxAge = 365 * 24 * time.Hour
+	}
+
+	cacheControl := fmt.Sprintf("public, max-age=%d", maxAge/time.Second)
+
+	for _, e := range header.ParseList(r.Header, "If-None-Match") {
+		if e == etag {
+			w.Header().Set("Cache-Control", cacheControl)
+			w.Header().Set("Etag", etag)
+			w.WriteHeader(http.StatusNotModified)
+			return
+		}
+	}
+
+	rc, cl, ct, err := h.open(p)
+	if err != nil {
+		h.error(w, r, http.StatusNotFound, err)
+		return
+	}
+	defer rc.Close()
+
+	w.Header().Set("Cache-Control", cacheControl)
+	w.Header().Set("Etag", etag)
+	if ct != "" {
+		w.Header().Set("Content-Type", ct)
+	}
+	if cl != 0 {
+		w.Header().Set("Content-Length", strconv.FormatInt(cl, 10))
+	}
+	w.WriteHeader(http.StatusOK)
+	if r.Method != "HEAD" {
+		io.Copy(w, rc)
+	}
+}
diff --git a/httputil/static_test.go b/httputil/static_test.go
new file mode 100644
index 0000000..111b9a4
--- /dev/null
+++ b/httputil/static_test.go
@@ -0,0 +1,174 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package httputil_test
+
+import (
+	"crypto/sha1"
+	"encoding/hex"
+	"github.com/golang/gddo/httputil"
+	"io/ioutil"
+	"net/http"
+	"net/http/httptest"
+	"net/url"
+	"os"
+	"reflect"
+	"strconv"
+	"testing"
+	"time"
+)
+
+var (
+	testHash          = computeTestHash()
+	testEtag          = `"` + testHash + `"`
+	testContentLength = computeTestContentLength()
+)
+
+func mustParseURL(urlStr string) *url.URL {
+	u, err := url.Parse(urlStr)
+	if err != nil {
+		panic(err)
+	}
+	return u
+}
+
+func computeTestHash() string {
+	p, err := ioutil.ReadFile("static_test.go")
+	if err != nil {
+		panic(err)
+	}
+	w := sha1.New()
+	w.Write(p)
+	return hex.EncodeToString(w.Sum(nil))
+}
+
+func computeTestContentLength() string {
+	info, err := os.Stat("static_test.go")
+	if err != nil {
+		panic(err)
+	}
+	return strconv.FormatInt(info.Size(), 10)
+}
+
+var fileServerTests = []*struct {
+	name   string // test name for log
+	ss     *httputil.StaticServer
+	r      *http.Request
+	header http.Header // expected response headers
+	status int         // expected response status
+	empty  bool        // true if response body not expected.
+}{
+	{
+		name: "get",
+		ss:   &httputil.StaticServer{MaxAge: 3 * time.Second},
+		r: &http.Request{
+			URL:    mustParseURL("/dir/static_test.go"),
+			Method: "GET",
+		},
+		status: http.StatusOK,
+		header: http.Header{
+			"Etag":           {testEtag},
+			"Cache-Control":  {"public, max-age=3"},
+			"Content-Length": {testContentLength},
+			"Content-Type":   {"application/octet-stream"},
+		},
+	},
+	{
+		name: "get .",
+		ss:   &httputil.StaticServer{Dir: ".", MaxAge: 3 * time.Second},
+		r: &http.Request{
+			URL:    mustParseURL("/dir/static_test.go"),
+			Method: "GET",
+		},
+		status: http.StatusOK,
+		header: http.Header{
+			"Etag":           {testEtag},
+			"Cache-Control":  {"public, max-age=3"},
+			"Content-Length": {testContentLength},
+			"Content-Type":   {"application/octet-stream"},
+		},
+	},
+	{
+		name: "get with ?v=",
+		ss:   &httputil.StaticServer{MaxAge: 3 * time.Second},
+		r: &http.Request{
+			URL:    mustParseURL("/dir/static_test.go?v=xxxxx"),
+			Method: "GET",
+		},
+		status: http.StatusOK,
+		header: http.Header{
+			"Etag":           {testEtag},
+			"Cache-Control":  {"public, max-age=31536000"},
+			"Content-Length": {testContentLength},
+			"Content-Type":   {"application/octet-stream"},
+		},
+	},
+	{
+		name: "head",
+		ss:   &httputil.StaticServer{MaxAge: 3 * time.Second},
+		r: &http.Request{
+			URL:    mustParseURL("/dir/static_test.go"),
+			Method: "HEAD",
+		},
+		status: http.StatusOK,
+		header: http.Header{
+			"Etag":           {testEtag},
+			"Cache-Control":  {"public, max-age=3"},
+			"Content-Length": {testContentLength},
+			"Content-Type":   {"application/octet-stream"},
+		},
+		empty: true,
+	},
+	{
+		name: "if-none-match",
+		ss:   &httputil.StaticServer{MaxAge: 3 * time.Second},
+		r: &http.Request{
+			URL:    mustParseURL("/dir/static_test.go"),
+			Method: "GET",
+			Header: http.Header{"If-None-Match": {testEtag}},
+		},
+		status: http.StatusNotModified,
+		header: http.Header{
+			"Cache-Control": {"public, max-age=3"},
+			"Etag":          {testEtag},
+		},
+		empty: true,
+	},
+}
+
+func testStaticServer(t *testing.T, f func(*httputil.StaticServer) http.Handler) {
+	for _, tt := range fileServerTests {
+		w := httptest.NewRecorder()
+
+		h := f(tt.ss)
+		h.ServeHTTP(w, tt.r)
+
+		if w.Code != tt.status {
+			t.Errorf("%s, status=%d, want %d", tt.name, w.Code, tt.status)
+		}
+
+		if !reflect.DeepEqual(w.HeaderMap, tt.header) {
+			t.Errorf("%s\n\theader=%v,\n\twant   %v", tt.name, w.HeaderMap, tt.header)
+		}
+
+		empty := w.Body.Len() == 0
+		if empty != tt.empty {
+			t.Errorf("%s empty=%v, want %v", tt.name, empty, tt.empty)
+		}
+	}
+}
+
+func TestFileHandler(t *testing.T) {
+	testStaticServer(t, func(ss *httputil.StaticServer) http.Handler { return ss.FileHandler("static_test.go") })
+}
+
+func TestDirectoryHandler(t *testing.T) {
+	testStaticServer(t, func(ss *httputil.StaticServer) http.Handler { return ss.DirectoryHandler("/dir", ".") })
+}
+
+func TestFilesHandler(t *testing.T) {
+	testStaticServer(t, func(ss *httputil.StaticServer) http.Handler { return ss.FilesHandler("static_test.go") })
+}
diff --git a/lintapp/README.md b/lintapp/README.md
new file mode 100644
index 0000000..0edff47
--- /dev/null
+++ b/lintapp/README.md
@@ -0,0 +1,11 @@
+lintapp
+=======
+
+This directory contains the source for [go-lint.appspot.com](http://go-lint.appspot.com).
+
+Development Environment Setup
+-----------------------------
+
+- Copy config.go.template to config.go and edit the file as described in the comments.
+- Install Go App Engine SDK 
+- Run the server using the dev_appserver command.
diff --git a/lintapp/app.yaml b/lintapp/app.yaml
new file mode 100644
index 0000000..9894f1b
--- /dev/null
+++ b/lintapp/app.yaml
@@ -0,0 +1,16 @@
+application: go-lint
+version: 1
+runtime: go
+api_version: go1
+
+handlers:
+- url: /favicon\.ico
+  static_files: assets/favicon.ico
+  upload: assets/favicon\.ico
+
+- url: /robots\.txt
+  static_files: assets/robots.txt
+  upload: assets/robots\.txt
+
+- url: /.*
+  script: _go_app
diff --git a/lintapp/assets/favicon.ico b/lintapp/assets/favicon.ico
new file mode 100644
index 0000000..f19c04d
--- /dev/null
+++ b/lintapp/assets/favicon.ico
Binary files differ
diff --git a/lintapp/assets/robots.txt b/lintapp/assets/robots.txt
new file mode 100644
index 0000000..6ffbc30
--- /dev/null
+++ b/lintapp/assets/robots.txt
@@ -0,0 +1,3 @@
+User-agent: *
+Disallow: /
+
diff --git a/lintapp/assets/templates/common.html b/lintapp/assets/templates/common.html
new file mode 100644
index 0000000..b494dd8
--- /dev/null
+++ b/lintapp/assets/templates/common.html
@@ -0,0 +1,9 @@
+{{define "commonHead"}}
+  <meta charset="utf-8" />
+  <link rel="stylesheet" href="http://yui.yahooapis.com/pure/0.3.0/base-min.css">
+  <style>body { padding: 15px; }</style> 
+{{end}}
+
+{{define "commonFooter"}}
+<p><a href="/">Home</a> | <a href="mailto:{{contactEmail}}">Feedback</a> | <a href="https://github.com/golang/gddo/issues">Website Issues</a>
+{{end}}
diff --git a/lintapp/assets/templates/error.html b/lintapp/assets/templates/error.html
new file mode 100644
index 0000000..4ad7724
--- /dev/null
+++ b/lintapp/assets/templates/error.html
@@ -0,0 +1,10 @@
+{{define "ROOT"}}
+<!DOCTYPE html>
+<html> 
+<head> 
+  {{template "commonHead"}}
+  <title>{{.}}</title>
+<html><body>
+    <p>{{.}}
+</body></html>
+{{end}}
diff --git a/lintapp/assets/templates/index.html b/lintapp/assets/templates/index.html
new file mode 100644
index 0000000..9f3d18f
--- /dev/null
+++ b/lintapp/assets/templates/index.html
@@ -0,0 +1,20 @@
+{{define "ROOT"}}
+<!DOCTYPE html>
+<html> 
+<head> 
+  {{template "commonHead"}}
+  <title>go-lint</title>
+</head>
+<body>
+  <h3>Go Lint</h3>
+  <p>Go Lint lints <a href="http://golang.org/">Go</a> source files on GitHub,
+  Bitbucket and Google Project Hosting using the <a
+    href="https://github.com/golang/lint">lint package</a>.
+  <form method="POST" action="/-/refresh">
+    <input type="text" size=60 name="importPath" autofocus="autofocus" placeholder="Package import path">
+    <input value="Lint" type="submit">
+  </form>
+  {{template "commonFooter"}}
+</body>
+</html>
+{{end}}
diff --git a/lintapp/assets/templates/package.html b/lintapp/assets/templates/package.html
new file mode 100644
index 0000000..57f4a86
--- /dev/null
+++ b/lintapp/assets/templates/package.html
@@ -0,0 +1,21 @@
+{{define "ROOT"}}
+<!DOCTYPE html>
+<html> 
+<head> 
+  {{template "commonHead"}}
+  <title>Lint {{.Path}}</title>
+</head>
+<body>
+  <h3>Lint for {{if .URL}}<a href="{{.URL}}">{{.Path}}<a/>{{else}}{{.Path}}{{end}}</h3>
+  <form method="POST" action="/-/refresh">
+    <input type="hidden" name="importPath" value="{{.Path}}">
+    This report was generated {{.Updated|timeago}}. <input type="submit" value="Refresh">
+  </form>
+  {{range $f := .Files}}{{range .Problems}}
+    <p>{{if .Line}}<a href="{{printf $.LineFmt $f.URL .Line}}" title="{{.LineText}}">{{$f.Name}}:{{.Line}}</a>{{else}}{{$f.Name}}{{end}}: 
+      {{.Text}}
+      {{if .Link}} <a href="{{.Link}}">☞</a>{{end}}
+  {{end}}{{end}}
+  {{template "commonFooter"}}
+</body></html>
+{{end}}
diff --git a/lintapp/config.go.template b/lintapp/config.go.template
new file mode 100644
index 0000000..e3579fa
--- /dev/null
+++ b/lintapp/config.go.template
@@ -0,0 +1,10 @@
+package lintapp
+
+func init() {
+	// Register an application at https://github.com/settings/applications/new
+	// and enter the client ID and client secret here.
+	gitHubCredentials = "client_id=<id>&client_secret=<secret>"
+
+	// Set contact email for /-/bot.html
+	contactEmail = "example@example.com"
+}
diff --git a/lintapp/main.go b/lintapp/main.go
new file mode 100644
index 0000000..b969d5b
--- /dev/null
+++ b/lintapp/main.go
@@ -0,0 +1,313 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Package lintapp implements the go-lint.appspot.com server.
+package lintapp
+
+import (
+	"bytes"
+	"encoding/gob"
+	"fmt"
+	"html/template"
+	"net/http"
+	"path/filepath"
+	"strconv"
+	"strings"
+	"time"
+
+	"appengine"
+	"appengine/datastore"
+	"appengine/urlfetch"
+
+	"github.com/golang/gddo/gosrc"
+	"github.com/golang/lint"
+)
+
+func init() {
+	http.Handle("/", handlerFunc(serveRoot))
+	http.Handle("/-/bot", handlerFunc(serveBot))
+	http.Handle("/-/refresh", handlerFunc(serveRefresh))
+}
+
+var (
+	contactEmail    = "unknown@example.com"
+	homeTemplate    = parseTemplate("common.html", "index.html")
+	packageTemplate = parseTemplate("common.html", "package.html")
+	errorTemplate   = parseTemplate("common.html", "error.html")
+	templateFuncs   = template.FuncMap{
+		"timeago":      timeagoFn,
+		"contactEmail": contactEmailFn,
+	}
+	gitHubCredentials = ""
+)
+
+func parseTemplate(fnames ...string) *template.Template {
+	paths := make([]string, len(fnames))
+	for i := range fnames {
+		paths[i] = filepath.Join("assets/templates", fnames[i])
+	}
+	t, err := template.New("").Funcs(templateFuncs).ParseFiles(paths...)
+	if err != nil {
+		panic(err)
+	}
+	t = t.Lookup("ROOT")
+	if t == nil {
+		panic(fmt.Sprintf("ROOT template not found in %v", fnames))
+	}
+	return t
+}
+
+func contactEmailFn() string {
+	return contactEmail
+}
+
+func timeagoFn(t time.Time) string {
+	d := time.Since(t)
+	switch {
+	case d < time.Second:
+		return "just now"
+	case d < 2*time.Second:
+		return "one second ago"
+	case d < time.Minute:
+		return fmt.Sprintf("%d seconds ago", d/time.Second)
+	case d < 2*time.Minute:
+		return "one minute ago"
+	case d < time.Hour:
+		return fmt.Sprintf("%d minutes ago", d/time.Minute)
+	case d < 2*time.Hour:
+		return "one hour ago"
+	case d < 48*time.Hour:
+		return fmt.Sprintf("%d hours ago", d/time.Hour)
+	default:
+		return fmt.Sprintf("%d days ago", d/(time.Hour*24))
+	}
+}
+
+func writeResponse(w http.ResponseWriter, status int, t *template.Template, v interface{}) error {
+	var buf bytes.Buffer
+	if err := t.Execute(&buf, v); err != nil {
+		return err
+	}
+	w.Header().Set("Content-Type", "text/html; charset=utf-8")
+	w.Header().Set("Content-Length", strconv.Itoa(buf.Len()))
+	w.WriteHeader(status)
+	_, err := w.Write(buf.Bytes())
+	return err
+}
+
+func writeErrorResponse(w http.ResponseWriter, status int) error {
+	return writeResponse(w, status, errorTemplate, http.StatusText(status))
+}
+
+type transport struct {
+	rt http.RoundTripper
+	ua string
+}
+
+func (t transport) RoundTrip(r *http.Request) (*http.Response, error) {
+	r.Header.Set("User-Agent", t.ua)
+	if r.URL.Host == "api.github.com" && gitHubCredentials != "" {
+		if r.URL.RawQuery == "" {
+			r.URL.RawQuery = gitHubCredentials
+		} else {
+			r.URL.RawQuery += "&" + gitHubCredentials
+		}
+	}
+	return t.rt.RoundTrip(r)
+}
+
+func httpClient(r *http.Request) *http.Client {
+	c := appengine.NewContext(r)
+	return &http.Client{
+		Transport: &transport{
+			rt: &urlfetch.Transport{Context: c, Deadline: 10 * time.Second},
+			ua: fmt.Sprintf("%s (+http://%s/-/bot)", appengine.AppID(c), r.Host),
+		},
+	}
+}
+
+const version = 1
+
+type storePackage struct {
+	Data    []byte
+	Version int
+}
+
+type lintPackage struct {
+	Files   []*lintFile
+	Path    string
+	Updated time.Time
+	LineFmt string
+	URL     string
+}
+
+type lintFile struct {
+	Name     string
+	Problems []*lintProblem
+	URL      string
+}
+
+type lintProblem struct {
+	Line       int
+	Text       string
+	LineText   string
+	Confidence float64
+	Link       string
+}
+
+func putPackage(c appengine.Context, importPath string, pkg *lintPackage) error {
+	var buf bytes.Buffer
+	if err := gob.NewEncoder(&buf).Encode(pkg); err != nil {
+		return err
+	}
+	_, err := datastore.Put(c,
+		datastore.NewKey(c, "Package", importPath, 0, nil),
+		&storePackage{Data: buf.Bytes(), Version: version})
+	return err
+}
+
+func getPackage(c appengine.Context, importPath string) (*lintPackage, error) {
+	var spkg storePackage
+	if err := datastore.Get(c, datastore.NewKey(c, "Package", importPath, 0, nil), &spkg); err != nil {
+		if err == datastore.ErrNoSuchEntity {
+			err = nil
+		}
+		return nil, err
+	}
+	if spkg.Version != version {
+		return nil, nil
+	}
+	var pkg lintPackage
+	if err := gob.NewDecoder(bytes.NewReader(spkg.Data)).Decode(&pkg); err != nil {
+		return nil, err
+	}
+	return &pkg, nil
+}
+
+func runLint(r *http.Request, importPath string) (*lintPackage, error) {
+	dir, err := gosrc.Get(httpClient(r), importPath, "")
+	if err != nil {
+		return nil, err
+	}
+
+	pkg := lintPackage{
+		Path:    importPath,
+		Updated: time.Now(),
+		LineFmt: dir.LineFmt,
+		URL:     dir.BrowseURL,
+	}
+	linter := lint.Linter{}
+	for _, f := range dir.Files {
+		if !strings.HasSuffix(f.Name, ".go") {
+			continue
+		}
+		problems, err := linter.Lint(f.Name, f.Data)
+		if err == nil && len(problems) == 0 {
+			continue
+		}
+		file := lintFile{Name: f.Name, URL: f.BrowseURL}
+		if err != nil {
+			file.Problems = []*lintProblem{{Text: err.Error()}}
+		} else {
+			for _, p := range problems {
+				file.Problems = append(file.Problems, &lintProblem{
+					Line:       p.Position.Line,
+					Text:       p.Text,
+					LineText:   p.LineText,
+					Confidence: p.Confidence,
+					Link:       p.Link,
+				})
+			}
+		}
+		if len(file.Problems) > 0 {
+			pkg.Files = append(pkg.Files, &file)
+		}
+	}
+
+	if err := putPackage(appengine.NewContext(r), importPath, &pkg); err != nil {
+		return nil, err
+	}
+
+	return &pkg, nil
+}
+
+func filterByConfidence(r *http.Request, pkg *lintPackage) {
+	minConfidence, err := strconv.ParseFloat(r.FormValue("minConfidence"), 64)
+	if err != nil {
+		minConfidence = 0.8
+	}
+	for _, f := range pkg.Files {
+		j := 0
+		for i := range f.Problems {
+			if f.Problems[i].Confidence >= minConfidence {
+				f.Problems[j] = f.Problems[i]
+				j += 1
+			}
+		}
+		f.Problems = f.Problems[:j]
+	}
+}
+
+type handlerFunc func(http.ResponseWriter, *http.Request) error
+
+func (f handlerFunc) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+	c := appengine.NewContext(r)
+	err := f(w, r)
+	if err == nil {
+		return
+	} else if gosrc.IsNotFound(err) {
+		writeErrorResponse(w, 404)
+	} else if e, ok := err.(*gosrc.RemoteError); ok {
+		c.Infof("Remote error %s: %v", e.Host, e)
+		writeResponse(w, 500, errorTemplate, fmt.Sprintf("Error accessing %s.", e.Host))
+	} else if err != nil {
+		c.Errorf("Internal error %v", err)
+		writeErrorResponse(w, 500)
+	}
+}
+
+func serveRoot(w http.ResponseWriter, r *http.Request) error {
+	switch {
+	case r.Method != "GET" && r.Method != "HEAD":
+		return writeErrorResponse(w, 405)
+	case r.URL.Path == "/":
+		return writeResponse(w, 200, homeTemplate, nil)
+	default:
+		importPath := r.URL.Path[1:]
+		if !gosrc.IsValidPath(importPath) {
+			return gosrc.NotFoundError{Message: "bad path"}
+		}
+		c := appengine.NewContext(r)
+		pkg, err := getPackage(c, importPath)
+		if pkg == nil && err == nil {
+			pkg, err = runLint(r, importPath)
+		}
+		if err != nil {
+			return err
+		}
+		filterByConfidence(r, pkg)
+		return writeResponse(w, 200, packageTemplate, pkg)
+	}
+}
+
+func serveRefresh(w http.ResponseWriter, r *http.Request) error {
+	if r.Method != "POST" {
+		return writeErrorResponse(w, 405)
+	}
+	importPath := r.FormValue("importPath")
+	pkg, err := runLint(r, importPath)
+	if err != nil {
+		return err
+	}
+	http.Redirect(w, r, "/"+pkg.Path, 301)
+	return nil
+}
+
+func serveBot(w http.ResponseWriter, r *http.Request) error {
+	c := appengine.NewContext(r)
+	_, err := fmt.Fprintf(w, "Contact %s for help with the %s bot.", contactEmail, appengine.AppID(c))
+	return err
+}
diff --git a/.gitignore b/talksapp/.gitignore
similarity index 100%
rename from .gitignore
rename to talksapp/.gitignore
diff --git a/README.md b/talksapp/README.md
similarity index 100%
rename from README.md
rename to talksapp/README.md
diff --git a/app.yaml b/talksapp/app.yaml
similarity index 100%
rename from app.yaml
rename to talksapp/app.yaml
diff --git a/assets/home.article b/talksapp/assets/home.article
similarity index 100%
rename from assets/home.article
rename to talksapp/assets/home.article
diff --git a/assets/robots.txt b/talksapp/assets/robots.txt
similarity index 100%
rename from assets/robots.txt
rename to talksapp/assets/robots.txt
diff --git a/config.go.template b/talksapp/config.go.template
similarity index 100%
rename from config.go.template
rename to talksapp/config.go.template
diff --git a/main.go b/talksapp/main.go
similarity index 100%
rename from main.go
rename to talksapp/main.go
diff --git a/setup.sh b/talksapp/setup.sh
similarity index 100%
rename from setup.sh
rename to talksapp/setup.sh