go.tools: add missing files ssa/*.go

R=golang-dev, adonovan
CC=golang-dev
https://golang.org/cl/9500043
diff --git a/ssa/blockopt.go b/ssa/blockopt.go
new file mode 100644
index 0000000..f39635d
--- /dev/null
+++ b/ssa/blockopt.go
@@ -0,0 +1,173 @@
+package ssa
+
+// Simple block optimizations to simplify the control flow graph.
+
+// TODO(adonovan): opt: instead of creating several "unreachable" blocks
+// per function in the Builder, reuse a single one (e.g. at Blocks[1])
+// to reduce garbage.
+
+import (
+	"fmt"
+	"os"
+)
+
+// If true, perform sanity checking and show progress at each
+// successive iteration of optimizeBlocks.  Very verbose.
+const debugBlockOpt = false
+
+// markReachable sets Index=-1 for all blocks reachable from b.
+func markReachable(b *BasicBlock) {
+	b.Index = -1
+	for _, succ := range b.Succs {
+		if succ.Index == 0 {
+			markReachable(succ)
+		}
+	}
+}
+
+// deleteUnreachableBlocks marks all reachable blocks of f and
+// eliminates (nils) all others, including possibly cyclic subgraphs.
+//
+func deleteUnreachableBlocks(f *Function) {
+	const white, black = 0, -1
+	// We borrow b.Index temporarily as the mark bit.
+	for _, b := range f.Blocks {
+		b.Index = white
+	}
+	markReachable(f.Blocks[0])
+	for i, b := range f.Blocks {
+		if b.Index == white {
+			for _, c := range b.Succs {
+				if c.Index == black {
+					c.removePred(b) // delete white->black edge
+				}
+			}
+			if debugBlockOpt {
+				fmt.Fprintln(os.Stderr, "unreachable", b)
+			}
+			f.Blocks[i] = nil // delete b
+		}
+	}
+	f.removeNilBlocks()
+}
+
+// jumpThreading attempts to apply simple jump-threading to block b,
+// in which a->b->c become a->c if b is just a Jump.
+// The result is true if the optimization was applied.
+//
+func jumpThreading(f *Function, b *BasicBlock) bool {
+	if b.Index == 0 {
+		return false // don't apply to entry block
+	}
+	if b.Instrs == nil {
+		fmt.Println("empty block ", b)
+		return false
+	}
+	if _, ok := b.Instrs[0].(*Jump); !ok {
+		return false // not just a jump
+	}
+	c := b.Succs[0]
+	if c == b {
+		return false // don't apply to degenerate jump-to-self.
+	}
+	if c.hasPhi() {
+		return false // not sound without more effort
+	}
+	for j, a := range b.Preds {
+		a.replaceSucc(b, c)
+
+		// If a now has two edges to c, replace its degenerate If by Jump.
+		if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c {
+			jump := new(Jump)
+			jump.SetBlock(a)
+			a.Instrs[len(a.Instrs)-1] = jump
+			a.Succs = a.Succs[:1]
+			c.removePred(b)
+		} else {
+			if j == 0 {
+				c.replacePred(b, a)
+			} else {
+				c.Preds = append(c.Preds, a)
+			}
+		}
+
+		if debugBlockOpt {
+			fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c)
+		}
+	}
+	f.Blocks[b.Index] = nil // delete b
+	return true
+}
+
+// fuseBlocks attempts to apply the block fusion optimization to block
+// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1.
+// The result is true if the optimization was applied.
+//
+func fuseBlocks(f *Function, a *BasicBlock) bool {
+	if len(a.Succs) != 1 {
+		return false
+	}
+	b := a.Succs[0]
+	if len(b.Preds) != 1 {
+		return false
+	}
+	// Eliminate jump at end of A, then copy all of B across.
+	a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...)
+	for _, instr := range b.Instrs {
+		instr.SetBlock(a)
+	}
+
+	// A inherits B's successors
+	a.Succs = append(a.succs2[:0], b.Succs...)
+
+	// Fix up Preds links of all successors of B.
+	for _, c := range b.Succs {
+		c.replacePred(b, a)
+	}
+
+	if debugBlockOpt {
+		fmt.Fprintln(os.Stderr, "fuseBlocks", a, b)
+	}
+
+	f.Blocks[b.Index] = nil // delete b
+	return true
+}
+
+// optimizeBlocks() performs some simple block optimizations on a
+// completed function: dead block elimination, block fusion, jump
+// threading.
+//
+func optimizeBlocks(f *Function) {
+	deleteUnreachableBlocks(f)
+
+	// Loop until no further progress.
+	changed := true
+	for changed {
+		changed = false
+
+		if debugBlockOpt {
+			f.DumpTo(os.Stderr)
+			MustSanityCheck(f, nil)
+		}
+
+		for _, b := range f.Blocks {
+			// f.Blocks will temporarily contain nils to indicate
+			// deleted blocks; we remove them at the end.
+			if b == nil {
+				continue
+			}
+
+			// Fuse blocks.  b->c becomes bc.
+			if fuseBlocks(f, b) {
+				changed = true
+			}
+
+			// a->b->c becomes a->c if b contains only a Jump.
+			if jumpThreading(f, b) {
+				changed = true
+				continue // (b was disconnected)
+			}
+		}
+	}
+	f.removeNilBlocks()
+}
diff --git a/ssa/builder.go b/ssa/builder.go
new file mode 100644
index 0000000..3604517
--- /dev/null
+++ b/ssa/builder.go
@@ -0,0 +1,2703 @@
+package ssa
+
+// This file defines the SSA builder.
+//
+// The builder has two phases, CREATE and BUILD.  In the CREATE
+// phase, all packages are constructed and type-checked and
+// definitions of all package members are created, method-sets are
+// computed, and bridge methods are synthesized.  The create phase
+// proceeds in topological order over the import dependency graph,
+// initiated by client calls to CreatePackage.
+//
+// In the BUILD phase, the Builder traverses the AST of each Go source
+// function and generates SSA instructions for the function body.
+// Within each package, building proceeds in a topological order over
+// the intra-package symbol reference graph, whose roots are the set
+// of package-level declarations in lexical order.  The BUILD phases
+// for distinct packages are independent and are executed in parallel.
+//
+// The Builder's and Program's indices (maps) are populated and
+// mutated during the CREATE phase, but during the BUILD phase they
+// remain constant.  The sole exception is Prog.methodSets, which is
+// protected by a dedicated mutex.
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"os"
+	"strconv"
+	"sync"
+	"sync/atomic"
+
+	"code.google.com/p/go.tools/go/exact"
+	"code.google.com/p/go.tools/go/types"
+)
+
+var (
+	varOk = &types.Var{Name: "ok", Type: tBool}
+
+	// Type constants.
+	tBool       = types.Typ[types.Bool]
+	tByte       = types.Typ[types.Byte]
+	tFloat32    = types.Typ[types.Float32]
+	tFloat64    = types.Typ[types.Float64]
+	tComplex64  = types.Typ[types.Complex64]
+	tComplex128 = types.Typ[types.Complex128]
+	tInt        = types.Typ[types.Int]
+	tInvalid    = types.Typ[types.Invalid]
+	tUntypedNil = types.Typ[types.UntypedNil]
+	tRangeIter  = &types.Basic{Name: "iter"} // the type of all "range" iterators
+	tEface      = new(types.Interface)
+
+	// The result type of a "select".
+	tSelect = &types.Result{Values: []*types.Var{
+		{Name: "index", Type: tInt},
+		{Name: "recv", Type: tEface},
+		varOk,
+	}}
+
+	// SSA Value constants.
+	vZero  = intLiteral(0)
+	vOne   = intLiteral(1)
+	vTrue  = newLiteral(exact.MakeBool(true), tBool)
+	vFalse = newLiteral(exact.MakeBool(false), tBool)
+)
+
+// A Context specifies the supporting context for SSA construction.
+//
+// TODO(adonovan): make it so empty => default behaviours?
+// Currently not the case for Loader.
+//
+type Context struct {
+	// Mode is a bitfield of options controlling verbosity,
+	// logging and additional sanity checks.
+	Mode BuilderMode
+
+	// Loader is a SourceLoader function that finds, loads and
+	// parses Go source files for a given import path.  (It is
+	// ignored if the mode bits include UseGCImporter.)
+	// See (e.g.) GoRootLoader.
+	Loader SourceLoader
+
+	// RetainAST is an optional user predicate that determines
+	// whether to retain (true) or discard (false) the AST and its
+	// type information for each package after BuildPackage has
+	// finished.
+	// Implementations must be thread-safe.
+	// If RetainAST is nil, all ASTs and TypeInfos are discarded.
+	RetainAST func(*Package) bool
+
+	// TypeChecker contains options relating to the type checker.
+	// The SSA Builder will override any user-supplied values for
+	// its Expr, Ident and Import fields; other fields will be
+	// passed through to the type checker.
+	TypeChecker types.Context
+}
+
+// BuilderMode is a bitmask of options for diagnostics and checking.
+type BuilderMode uint
+
+const (
+	LogPackages          BuilderMode = 1 << iota // Dump package inventory to stderr
+	LogFunctions                                 // Dump function SSA code to stderr
+	LogSource                                    // Show source locations as SSA builder progresses
+	SanityCheckFunctions                         // Perform sanity checking of function bodies
+	UseGCImporter                                // Ignore SourceLoader; use gc-compiled object code for all imports
+	NaiveForm                                    // Build naïve SSA form: don't replace local loads/stores with registers
+	BuildSerially                                // Build packages serially, not in parallel.
+)
+
+// A Builder creates the SSA representation of a single program.
+// Instances may be created using NewBuilder.
+//
+// The SSA Builder constructs a Program containing Package instances
+// for packages of Go source code, loading, parsing and recursively
+// constructing packages for all imported dependencies as well.
+//
+// If the UseGCImporter mode flag is specified, binary object files
+// produced by the gc compiler will be loaded instead of source code
+// for all imported packages.  Such files supply only the types of
+// package-level declarations and values of constants, but no code, so
+// this mode will not yield a whole program.  It is intended for
+// analyses that perform intraprocedural analysis of a single package.
+//
+// A typical client will create a Builder with NewBuilder; call
+// CreatePackage for the "root" package(s), e.g. main; then call
+// BuildPackage on the same set of packages to construct SSA-form code
+// for functions and methods.  After that, the representation of the
+// program (Builder.Prog) is complete and transitively closed, and the
+// Builder object can be discarded to reclaim its memory.  The
+// client's analysis may then begin.
+//
+type Builder struct {
+	Prog    *Program // the program being built
+	Context *Context // the client context
+
+	importErrs map[string]error            // across-packages import cache of failures
+	packages   map[*types.Package]*Package // SSA packages by types.Package
+	globals    map[types.Object]Value      // all package-level funcs and vars, and universal built-ins
+}
+
+// NewBuilder creates and returns a new SSA builder with options
+// specified by context.
+//
+func NewBuilder(context *Context) *Builder {
+	b := &Builder{
+		Prog: &Program{
+			Files:           token.NewFileSet(),
+			Packages:        make(map[string]*Package),
+			Builtins:        make(map[types.Object]*Builtin),
+			methodSets:      make(map[types.Type]MethodSet),
+			concreteMethods: make(map[*types.Method]*Function),
+			mode:            context.Mode,
+		},
+		Context:    context,
+		globals:    make(map[types.Object]Value),
+		importErrs: make(map[string]error),
+		packages:   make(map[*types.Package]*Package),
+	}
+
+	b.Context.TypeChecker.Import = func(imports map[string]*types.Package, path string) (pkg *types.Package, err error) {
+		return b.doImport(imports, path)
+	}
+
+	// Create Values for built-in functions.
+	for _, obj := range types.Universe.Entries {
+		switch obj := obj.(type) {
+		case *types.Func:
+			v := &Builtin{obj}
+			b.globals[obj] = v
+			b.Prog.Builtins[obj] = v
+		}
+	}
+	return b
+}
+
+// lookup returns the package-level *Function or *Global (or universal
+// *Builtin) for the named object obj.
+//
+// Intra-package references are edges in the initialization dependency
+// graph.  If the result v is a Function or Global belonging to
+// 'from', the package on whose behalf this lookup occurs, then lookup
+// emits initialization code into from.Init if not already done.
+//
+func (b *Builder) lookup(from *Package, obj types.Object) (v Value, ok bool) {
+	v, ok = b.globals[obj]
+	if ok {
+		switch v := v.(type) {
+		case *Function:
+			if from == v.Pkg {
+				b.buildFunction(v)
+			}
+		case *Global:
+			if from == v.Pkg {
+				b.buildGlobal(v, obj)
+			}
+		}
+	}
+	return
+}
+
+// cond emits to fn code to evaluate boolean condition e and jump
+// to t or f depending on its value, performing various simplifications.
+//
+// Postcondition: fn.currentBlock is nil.
+//
+func (b *Builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) {
+	switch e := e.(type) {
+	case *ast.ParenExpr:
+		b.cond(fn, e.X, t, f)
+		return
+
+	case *ast.BinaryExpr:
+		switch e.Op {
+		case token.LAND:
+			ltrue := fn.newBasicBlock("cond.true")
+			b.cond(fn, e.X, ltrue, f)
+			fn.currentBlock = ltrue
+			b.cond(fn, e.Y, t, f)
+			return
+
+		case token.LOR:
+			lfalse := fn.newBasicBlock("cond.false")
+			b.cond(fn, e.X, t, lfalse)
+			fn.currentBlock = lfalse
+			b.cond(fn, e.Y, t, f)
+			return
+		}
+
+	case *ast.UnaryExpr:
+		if e.Op == token.NOT {
+			b.cond(fn, e.X, f, t)
+			return
+		}
+	}
+
+	switch cond := b.expr(fn, e).(type) {
+	case *Literal:
+		// Dispatch constant conditions statically.
+		if exact.BoolVal(cond.Value) {
+			emitJump(fn, t)
+		} else {
+			emitJump(fn, f)
+		}
+	default:
+		emitIf(fn, cond, t, f)
+	}
+}
+
+// logicalBinop emits code to fn to evaluate e, a &&- or
+// ||-expression whose reified boolean value is wanted.
+// The value is returned.
+//
+func (b *Builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
+	rhs := fn.newBasicBlock("binop.rhs")
+	done := fn.newBasicBlock("binop.done")
+
+	var short Value // value of the short-circuit path
+	switch e.Op {
+	case token.LAND:
+		b.cond(fn, e.X, rhs, done)
+		short = vFalse
+	case token.LOR:
+		b.cond(fn, e.X, done, rhs)
+		short = vTrue
+	}
+
+	// Is rhs unreachable?
+	if rhs.Preds == nil {
+		// Simplify false&&y to false, true||y to true.
+		fn.currentBlock = done
+		return short
+	}
+
+	// Is done unreachable?
+	if done.Preds == nil {
+		// Simplify true&&y (or false||y) to y.
+		fn.currentBlock = rhs
+		return b.expr(fn, e.Y)
+	}
+
+	// All edges from e.X to done carry the short-circuit value.
+	var edges []Value
+	for _ = range done.Preds {
+		edges = append(edges, short)
+	}
+
+	// The edge from e.Y to done carries the value of e.Y.
+	fn.currentBlock = rhs
+	edges = append(edges, b.expr(fn, e.Y))
+	emitJump(fn, done)
+	fn.currentBlock = done
+
+	phi := &Phi{Edges: edges, Comment: e.Op.String()}
+	phi.Type_ = phi.Edges[0].Type()
+	return done.emit(phi)
+}
+
+// exprN lowers a multi-result expression e to SSA form, emitting code
+// to fn and returning a single Value whose type is a *types.Results
+// (tuple).  The caller must access the components via Extract.
+//
+// Multi-result expressions include CallExprs in a multi-value
+// assignment or return statement, and "value,ok" uses of
+// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op
+// is token.ARROW).
+//
+func (b *Builder) exprN(fn *Function, e ast.Expr) Value {
+	var typ types.Type
+	var tuple Value
+	switch e := e.(type) {
+	case *ast.ParenExpr:
+		return b.exprN(fn, e.X)
+
+	case *ast.CallExpr:
+		// Currently, no built-in function nor type conversion
+		// has multiple results, so we can avoid some of the
+		// cases for single-valued CallExpr.
+		var c Call
+		b.setCall(fn, e, &c.Call)
+		c.Type_ = fn.Pkg.TypeOf(e)
+		return fn.emit(&c)
+
+	case *ast.IndexExpr:
+		mapt := underlyingType(fn.Pkg.TypeOf(e.X)).(*types.Map)
+		typ = mapt.Elt
+		tuple = fn.emit(&Lookup{
+			X:       b.expr(fn, e.X),
+			Index:   emitConv(fn, b.expr(fn, e.Index), mapt.Key),
+			CommaOk: true,
+		})
+
+	case *ast.TypeAssertExpr:
+		return emitTypeTest(fn, b.expr(fn, e.X), fn.Pkg.TypeOf(e))
+
+	case *ast.UnaryExpr: // must be receive <-
+		typ = underlyingType(fn.Pkg.TypeOf(e.X)).(*types.Chan).Elt
+		tuple = fn.emit(&UnOp{
+			Op:      token.ARROW,
+			X:       b.expr(fn, e.X),
+			CommaOk: true,
+		})
+
+	default:
+		panic(fmt.Sprintf("unexpected exprN: %T", e))
+	}
+
+	// The typechecker sets the type of the expression to just the
+	// asserted type in the "value, ok" form, not to *types.Result
+	// (though it includes the valueOk operand in its error messages).
+
+	tuple.(interface {
+		setType(types.Type)
+	}).setType(&types.Result{Values: []*types.Var{
+		{Name: "value", Type: typ},
+		varOk,
+	}})
+	return tuple
+}
+
+// builtin emits to fn SSA instructions to implement a call to the
+// built-in function called name with the specified arguments
+// and return type.  It returns the value defined by the result.
+//
+// The result is nil if no special handling was required; in this case
+// the caller should treat this like an ordinary library function
+// call.
+//
+func (b *Builder) builtin(fn *Function, name string, args []ast.Expr, typ types.Type, pos token.Pos) Value {
+	switch name {
+	case "make":
+		switch underlyingType(typ).(type) {
+		case *types.Slice:
+			n := b.expr(fn, args[1])
+			m := n
+			if len(args) == 3 {
+				m = b.expr(fn, args[2])
+			}
+			v := &MakeSlice{
+				Len: n,
+				Cap: m,
+				Pos: pos,
+			}
+			v.setType(typ)
+			return fn.emit(v)
+
+		case *types.Map:
+			var res Value
+			if len(args) == 2 {
+				res = b.expr(fn, args[1])
+			}
+			v := &MakeMap{Reserve: res, Pos: pos}
+			v.setType(typ)
+			return fn.emit(v)
+
+		case *types.Chan:
+			var sz Value = vZero
+			if len(args) == 2 {
+				sz = b.expr(fn, args[1])
+			}
+			v := &MakeChan{Size: sz, Pos: pos}
+			v.setType(typ)
+			return fn.emit(v)
+		}
+
+	case "new":
+		return emitNew(fn, indirectType(underlyingType(typ)), pos)
+
+	case "len", "cap":
+		// Special case: len or cap of an array or *array is
+		// based on the type, not the value which may be nil.
+		// We must still evaluate the value, though.  (If it
+		// was side-effect free, the whole call would have
+		// been constant-folded.)
+		t := underlyingType(deref(fn.Pkg.TypeOf(args[0])))
+		if at, ok := t.(*types.Array); ok {
+			b.expr(fn, args[0]) // for effects only
+			return intLiteral(at.Len)
+		}
+		// Otherwise treat as normal.
+
+	case "panic":
+		fn.emit(&Panic{X: emitConv(fn, b.expr(fn, args[0]), tEface)})
+		fn.currentBlock = fn.newBasicBlock("unreachable")
+		return vFalse // any non-nil Value will do
+	}
+	return nil // treat all others as a regular function call
+}
+
+// selector evaluates the selector expression e and returns its value,
+// or if wantAddr is true, its address, in which case escaping
+// indicates whether the caller intends to use the resulting pointer
+// in a potentially escaping way.
+//
+func (b *Builder) selector(fn *Function, e *ast.SelectorExpr, wantAddr, escaping bool) Value {
+	id := makeId(e.Sel.Name, fn.Pkg.Types)
+	st := underlyingType(deref(fn.Pkg.TypeOf(e.X))).(*types.Struct)
+	index := -1
+	for i, f := range st.Fields {
+		if IdFromQualifiedName(f.QualifiedName) == id {
+			index = i
+			break
+		}
+	}
+	var path *anonFieldPath
+	if index == -1 {
+		// Not a named field.  Use breadth-first algorithm.
+		path, index = findPromotedField(st, id)
+		if path == nil {
+			panic("field not found, even with promotion: " + e.Sel.Name)
+		}
+	}
+	fieldType := fn.Pkg.TypeOf(e)
+	if wantAddr {
+		return b.fieldAddr(fn, e.X, path, index, fieldType, escaping)
+	}
+	return b.fieldExpr(fn, e.X, path, index, fieldType)
+}
+
+// fieldAddr evaluates the base expression (a struct or *struct),
+// applies to it any implicit field selections from path, and then
+// selects the field #index of type fieldType.
+// Its address is returned.
+//
+// (fieldType can be derived from base+index.)
+//
+func (b *Builder) fieldAddr(fn *Function, base ast.Expr, path *anonFieldPath, index int, fieldType types.Type, escaping bool) Value {
+	var x Value
+	if path != nil {
+		switch underlyingType(path.field.Type).(type) {
+		case *types.Struct:
+			x = b.fieldAddr(fn, base, path.tail, path.index, path.field.Type, escaping)
+		case *types.Pointer:
+			x = b.fieldExpr(fn, base, path.tail, path.index, path.field.Type)
+		}
+	} else {
+		switch underlyingType(fn.Pkg.TypeOf(base)).(type) {
+		case *types.Struct:
+			x = b.addr(fn, base, escaping).(address).addr
+		case *types.Pointer:
+			x = b.expr(fn, base)
+		}
+	}
+	v := &FieldAddr{
+		X:     x,
+		Field: index,
+	}
+	v.setType(pointer(fieldType))
+	return fn.emit(v)
+}
+
+// fieldExpr evaluates the base expression (a struct or *struct),
+// applies to it any implicit field selections from path, and then
+// selects the field #index of type fieldType.
+// Its value is returned.
+//
+// (fieldType can be derived from base+index.)
+//
+func (b *Builder) fieldExpr(fn *Function, base ast.Expr, path *anonFieldPath, index int, fieldType types.Type) Value {
+	var x Value
+	if path != nil {
+		x = b.fieldExpr(fn, base, path.tail, path.index, path.field.Type)
+	} else {
+		x = b.expr(fn, base)
+	}
+	switch underlyingType(x.Type()).(type) {
+	case *types.Struct:
+		v := &Field{
+			X:     x,
+			Field: index,
+		}
+		v.setType(fieldType)
+		return fn.emit(v)
+
+	case *types.Pointer: // *struct
+		v := &FieldAddr{
+			X:     x,
+			Field: index,
+		}
+		v.setType(pointer(fieldType))
+		return emitLoad(fn, fn.emit(v))
+	}
+	panic("unreachable")
+}
+
+// addr lowers a single-result addressable expression e to SSA form,
+// emitting code to fn and returning the location (an lvalue) defined
+// by the expression.
+//
+// If escaping is true, addr marks the base variable of the
+// addressable expression e as being a potentially escaping pointer
+// value.  For example, in this code:
+//
+//   a := A{
+//     b: [1]B{B{c: 1}}
+//   }
+//   return &a.b[0].c
+//
+// the application of & causes a.b[0].c to have its address taken,
+// which means that ultimately the local variable a must be
+// heap-allocated.  This is a simple but very conservative escape
+// analysis.
+//
+// Operations forming potentially escaping pointers include:
+// - &x, including when implicit in method call or composite literals.
+// - a[:] iff a is an array (not *array)
+// - references to variables in lexically enclosing functions.
+//
+func (b *Builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
+	switch e := e.(type) {
+	case *ast.Ident:
+		obj := fn.Pkg.ObjectOf(e)
+		v, ok := b.lookup(fn.Pkg, obj) // var (address)
+		if !ok {
+			v = fn.lookup(obj, escaping)
+		}
+		return address{v}
+
+	case *ast.CompositeLit:
+		t := deref(fn.Pkg.TypeOf(e))
+		var v Value
+		if escaping {
+			v = emitNew(fn, t, e.Lbrace)
+		} else {
+			v = fn.addLocal(t, e.Lbrace)
+		}
+		b.compLit(fn, v, e, t) // initialize in place
+		return address{v}
+
+	case *ast.ParenExpr:
+		return b.addr(fn, e.X, escaping)
+
+	case *ast.SelectorExpr:
+		// p.M where p is a package.
+		if obj := fn.Pkg.isPackageRef(e); obj != nil {
+			if v, ok := b.lookup(fn.Pkg, obj); ok {
+				return address{v}
+			}
+			panic("undefined package-qualified name: " + obj.GetName())
+		}
+
+		// e.f where e is an expression.
+		return address{b.selector(fn, e, true, escaping)}
+
+	case *ast.IndexExpr:
+		var x Value
+		var et types.Type
+		switch t := underlyingType(fn.Pkg.TypeOf(e.X)).(type) {
+		case *types.Array:
+			x = b.addr(fn, e.X, escaping).(address).addr
+			et = pointer(t.Elt)
+		case *types.Pointer: // *array
+			x = b.expr(fn, e.X)
+			et = pointer(underlyingType(t.Base).(*types.Array).Elt)
+		case *types.Slice:
+			x = b.expr(fn, e.X)
+			et = pointer(t.Elt)
+		case *types.Map:
+			return &element{
+				m: b.expr(fn, e.X),
+				k: emitConv(fn, b.expr(fn, e.Index), t.Key),
+				t: t.Elt,
+			}
+		default:
+			panic("unexpected container type in IndexExpr: " + t.String())
+		}
+		v := &IndexAddr{
+			X:     x,
+			Index: emitConv(fn, b.expr(fn, e.Index), tInt),
+		}
+		v.setType(et)
+		return address{fn.emit(v)}
+
+	case *ast.StarExpr:
+		return address{b.expr(fn, e.X)}
+	}
+
+	panic(fmt.Sprintf("unexpected address expression: %T", e))
+}
+
+// exprInPlace emits to fn code to initialize the lvalue loc with the
+// value of expression e.
+//
+// This is equivalent to loc.store(fn, b.expr(fn, e)) but may
+// generate better code in some cases, e.g. for composite literals
+// in an addressable location.
+//
+func (b *Builder) exprInPlace(fn *Function, loc lvalue, e ast.Expr) {
+	if addr, ok := loc.(address); ok {
+		if e, ok := e.(*ast.CompositeLit); ok {
+			typ := addr.typ()
+			switch underlyingType(typ).(type) {
+			case *types.Pointer: // implicit & -- possibly escaping
+				ptr := b.addr(fn, e, true).(address).addr
+				addr.store(fn, ptr) // copy address
+				return
+
+			case *types.Interface:
+				// e.g. var x interface{} = T{...}
+				// Can't in-place initialize an interface value.
+				// Fall back to copying.
+
+			default:
+				b.compLit(fn, addr.addr, e, typ) // in place
+				return
+			}
+		}
+	}
+	loc.store(fn, b.expr(fn, e)) // copy value
+}
+
+// expr lowers a single-result expression e to SSA form, emitting code
+// to fn and returning the Value defined by the expression.
+//
+func (b *Builder) expr(fn *Function, e ast.Expr) Value {
+	if lit := fn.Pkg.ValueOf(e); lit != nil {
+		return lit
+	}
+
+	switch e := e.(type) {
+	case *ast.BasicLit:
+		panic("non-constant BasicLit") // unreachable
+
+	case *ast.FuncLit:
+		posn := b.Prog.Files.Position(e.Type.Func)
+		fn2 := &Function{
+			Name_:     fmt.Sprintf("func@%d.%d", posn.Line, posn.Column),
+			Signature: underlyingType(fn.Pkg.TypeOf(e.Type)).(*types.Signature),
+			Pos:       e.Type.Func,
+			Enclosing: fn,
+			Pkg:       fn.Pkg,
+			Prog:      b.Prog,
+			syntax: &funcSyntax{
+				paramFields:  e.Type.Params,
+				resultFields: e.Type.Results,
+				body:         e.Body,
+			},
+		}
+		fn.AnonFuncs = append(fn.AnonFuncs, fn2)
+		b.buildFunction(fn2)
+		if fn2.FreeVars == nil {
+			return fn2
+		}
+		v := &MakeClosure{Fn: fn2}
+		v.setType(fn.Pkg.TypeOf(e))
+		for _, fv := range fn2.FreeVars {
+			v.Bindings = append(v.Bindings, fv.Outer)
+		}
+		return fn.emit(v)
+
+	case *ast.ParenExpr:
+		return b.expr(fn, e.X)
+
+	case *ast.TypeAssertExpr: // single-result form only
+		return emitTypeAssert(fn, b.expr(fn, e.X), fn.Pkg.TypeOf(e))
+
+	case *ast.CallExpr:
+		typ := fn.Pkg.TypeOf(e)
+		if fn.Pkg.IsType(e.Fun) {
+			// Type conversion, e.g. string(x) or big.Int(x)
+			return emitConv(fn, b.expr(fn, e.Args[0]), typ)
+		}
+		// Call to "intrinsic" built-ins, e.g. new, make, panic.
+		if id, ok := e.Fun.(*ast.Ident); ok {
+			obj := fn.Pkg.ObjectOf(id)
+			if _, ok := fn.Prog.Builtins[obj]; ok {
+				if v := b.builtin(fn, id.Name, e.Args, typ, e.Lparen); v != nil {
+					return v
+				}
+			}
+		}
+		// Regular function call.
+		var v Call
+		b.setCall(fn, e, &v.Call)
+		v.setType(typ)
+		return fn.emit(&v)
+
+	case *ast.UnaryExpr:
+		switch e.Op {
+		case token.AND: // &X --- potentially escaping.
+			return b.addr(fn, e.X, true).(address).addr
+		case token.ADD:
+			return b.expr(fn, e.X)
+		case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^
+			v := &UnOp{
+				Op: e.Op,
+				X:  b.expr(fn, e.X),
+			}
+			v.setType(fn.Pkg.TypeOf(e))
+			return fn.emit(v)
+		default:
+			panic(e.Op)
+		}
+
+	case *ast.BinaryExpr:
+		switch e.Op {
+		case token.LAND, token.LOR:
+			return b.logicalBinop(fn, e)
+		case token.SHL, token.SHR:
+			fallthrough
+		case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
+			return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), fn.Pkg.TypeOf(e))
+
+		case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
+			return emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y))
+		default:
+			panic("illegal op in BinaryExpr: " + e.Op.String())
+		}
+
+	case *ast.SliceExpr:
+		var low, high Value
+		var x Value
+		switch underlyingType(fn.Pkg.TypeOf(e.X)).(type) {
+		case *types.Array:
+			// Potentially escaping.
+			x = b.addr(fn, e.X, true).(address).addr
+		case *types.Basic, *types.Slice, *types.Pointer: // *array
+			x = b.expr(fn, e.X)
+		default:
+			unreachable()
+		}
+		if e.High != nil {
+			high = b.expr(fn, e.High)
+		}
+		if e.Low != nil {
+			low = b.expr(fn, e.Low)
+		}
+		v := &Slice{
+			X:    x,
+			Low:  low,
+			High: high,
+		}
+		v.setType(fn.Pkg.TypeOf(e))
+		return fn.emit(v)
+
+	case *ast.Ident:
+		obj := fn.Pkg.ObjectOf(e)
+		// Global or universal?
+		if v, ok := b.lookup(fn.Pkg, obj); ok {
+			if objKind(obj) == ast.Var {
+				v = emitLoad(fn, v) // var (address)
+			}
+			return v
+		}
+		// Local?
+		return emitLoad(fn, fn.lookup(obj, false)) // var (address)
+
+	case *ast.SelectorExpr:
+		// p.M where p is a package.
+		if obj := fn.Pkg.isPackageRef(e); obj != nil {
+			return b.expr(fn, e.Sel)
+		}
+
+		// (*T).f or T.f, the method f from the method-set of type T.
+		if fn.Pkg.IsType(e.X) {
+			id := makeId(e.Sel.Name, fn.Pkg.Types)
+			typ := fn.Pkg.TypeOf(e.X)
+			if m := b.Prog.MethodSet(typ)[id]; m != nil {
+				return m
+			}
+
+			// T must be an interface; return method thunk.
+			return makeImethodThunk(b.Prog, typ, id)
+		}
+
+		// e.f where e is an expression.
+		return b.selector(fn, e, false, false)
+
+	case *ast.IndexExpr:
+		switch t := underlyingType(fn.Pkg.TypeOf(e.X)).(type) {
+		case *types.Array:
+			// Non-addressable array (in a register).
+			v := &Index{
+				X:     b.expr(fn, e.X),
+				Index: emitConv(fn, b.expr(fn, e.Index), tInt),
+			}
+			v.setType(t.Elt)
+			return fn.emit(v)
+
+		case *types.Map:
+			// Maps are not addressable.
+			mapt := underlyingType(fn.Pkg.TypeOf(e.X)).(*types.Map)
+			v := &Lookup{
+				X:     b.expr(fn, e.X),
+				Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key),
+			}
+			v.setType(mapt.Elt)
+			return fn.emit(v)
+
+		case *types.Basic: // => string
+			// Strings are not addressable.
+			v := &Lookup{
+				X:     b.expr(fn, e.X),
+				Index: b.expr(fn, e.Index),
+			}
+			v.setType(tByte)
+			return fn.emit(v)
+
+		case *types.Slice, *types.Pointer: // *array
+			// Addressable slice/array; use IndexAddr and Load.
+			return b.addr(fn, e, false).load(fn)
+
+		default:
+			panic("unexpected container type in IndexExpr: " + t.String())
+		}
+
+	case *ast.CompositeLit, *ast.StarExpr:
+		// Addressable types (lvalues)
+		return b.addr(fn, e, false).load(fn)
+	}
+
+	panic(fmt.Sprintf("unexpected expr: %T", e))
+}
+
+// stmtList emits to fn code for all statements in list.
+func (b *Builder) stmtList(fn *Function, list []ast.Stmt) {
+	for _, s := range list {
+		b.stmt(fn, s)
+	}
+}
+
+// setCallFunc populates the function parts of a CallCommon structure
+// (Func, Method, Recv, Args[0]) based on the kind of invocation
+// occurring in e.
+//
+func (b *Builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
+	c.Pos = e.Lparen
+	c.HasEllipsis = e.Ellipsis != 0
+
+	// Is the call of the form x.f()?
+	sel, ok := noparens(e.Fun).(*ast.SelectorExpr)
+
+	// Case 0: e.Fun evaluates normally to a function.
+	if !ok {
+		c.Func = b.expr(fn, e.Fun)
+		return
+	}
+
+	// Case 1: call of form x.F() where x is a package name.
+	if obj := fn.Pkg.isPackageRef(sel); obj != nil {
+		// This is a specialization of expr(ast.Ident(obj)).
+		if v, ok := b.lookup(fn.Pkg, obj); ok {
+			if _, ok := v.(*Function); !ok {
+				v = emitLoad(fn, v) // var (address)
+			}
+			c.Func = v
+			return
+		}
+		panic("undefined package-qualified name: " + obj.GetName())
+	}
+
+	// Case 2a: X.f() or (*X).f(): a statically dipatched call to
+	// the method f in the method-set of X or *X.  X may be
+	// an interface.  Treat like case 0.
+	// TODO(adonovan): opt: inline expr() here, to make the call static
+	// and to avoid generation of a stub for an interface method.
+	if fn.Pkg.IsType(sel.X) {
+		c.Func = b.expr(fn, e.Fun)
+		return
+	}
+
+	// Let X be the type of x.
+	typ := fn.Pkg.TypeOf(sel.X)
+
+	// Case 2: x.f(): a statically dispatched call to a method
+	// from the method-set of X or perhaps *X (if x is addressable
+	// but not a pointer).
+	id := makeId(sel.Sel.Name, fn.Pkg.Types)
+	// Consult method-set of X.
+	if m := b.Prog.MethodSet(typ)[id]; m != nil {
+		var recv Value
+		aptr := isPointer(typ)
+		fptr := isPointer(m.Signature.Recv.Type)
+		if aptr == fptr {
+			// Actual's and formal's "pointerness" match.
+			recv = b.expr(fn, sel.X)
+		} else {
+			// Actual is a pointer, formal is not.
+			// Load a copy.
+			recv = emitLoad(fn, b.expr(fn, sel.X))
+		}
+		c.Func = m
+		c.Args = append(c.Args, recv)
+		return
+	}
+	if !isPointer(typ) {
+		// Consult method-set of *X.
+		if m := b.Prog.MethodSet(pointer(typ))[id]; m != nil {
+			// A method found only in MS(*X) must have a
+			// pointer formal receiver; but the actual
+			// value is not a pointer.
+			// Implicit & -- possibly escaping.
+			recv := b.addr(fn, sel.X, true).(address).addr
+			c.Func = m
+			c.Args = append(c.Args, recv)
+			return
+		}
+	}
+
+	switch t := underlyingType(typ).(type) {
+	case *types.Struct, *types.Pointer:
+		// Case 3: x.f() where x.f is a function value in a
+		// struct field f; not a method call.  f is a 'var'
+		// (of function type) in the Fields of types.Struct X.
+		// Treat like case 0.
+		c.Func = b.expr(fn, e.Fun)
+
+	case *types.Interface:
+		// Case 4: x.f() where a dynamically dispatched call
+		// to an interface method f.  f is a 'func' object in
+		// the Methods of types.Interface X
+		c.Method, _ = methodIndex(t, t.Methods, id)
+		c.Recv = b.expr(fn, sel.X)
+
+	default:
+		panic(fmt.Sprintf("illegal (%s).%s() call; X:%T", t, sel.Sel.Name, sel.X))
+	}
+}
+
+// emitCallArgs emits to f code for the actual parameters of call e to
+// a (possibly built-in) function of effective type sig.
+// The argument values are appended to args, which is then returned.
+//
+func (b *Builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value {
+	// f(x, y, z...): pass slice z straight through.
+	if e.Ellipsis != 0 {
+		for i, arg := range e.Args {
+			// TODO(gri): annoyingly Signature.Params doesn't
+			// reflect the slice type for a final ...T param.
+			t := sig.Params[i].Type
+			if sig.IsVariadic && i == len(e.Args)-1 {
+				t = &types.Slice{Elt: t}
+			}
+			args = append(args, emitConv(fn, b.expr(fn, arg), t))
+		}
+		return args
+	}
+
+	offset := len(args) // 1 if call has receiver, 0 otherwise
+
+	// Evaluate actual parameter expressions.
+	//
+	// If this is a chained call of the form f(g()) where g has
+	// multiple return values (MRV), they are flattened out into
+	// args; a suffix of them may end up in a varargs slice.
+	for _, arg := range e.Args {
+		v := b.expr(fn, arg)
+		if ttuple, ok := v.Type().(*types.Result); ok { // MRV chain
+			for i, t := range ttuple.Values {
+				args = append(args, emitExtract(fn, v, i, t.Type))
+			}
+		} else {
+			args = append(args, v)
+		}
+	}
+
+	// Actual->formal assignability conversions for normal parameters.
+	np := len(sig.Params) // number of normal parameters
+	if sig.IsVariadic {
+		np--
+	}
+	for i := 0; i < np; i++ {
+		args[offset+i] = emitConv(fn, args[offset+i], sig.Params[i].Type)
+	}
+
+	// Actual->formal assignability conversions for variadic parameter,
+	// and construction of slice.
+	if sig.IsVariadic {
+		varargs := args[offset+np:]
+		vt := sig.Params[np].Type
+		st := &types.Slice{Elt: vt}
+		if len(varargs) == 0 {
+			args = append(args, nilLiteral(st))
+		} else {
+			// Replace a suffix of args with a slice containing it.
+			at := &types.Array{
+				Elt: vt,
+				Len: int64(len(varargs)),
+			}
+			a := emitNew(fn, at, e.Lparen)
+			for i, arg := range varargs {
+				iaddr := &IndexAddr{
+					X:     a,
+					Index: intLiteral(int64(i)),
+				}
+				iaddr.setType(pointer(vt))
+				fn.emit(iaddr)
+				emitStore(fn, iaddr, arg)
+			}
+			s := &Slice{X: a}
+			s.setType(st)
+			args[offset+np] = fn.emit(s)
+			args = args[:offset+np+1]
+		}
+	}
+	return args
+}
+
+// setCall emits to fn code to evaluate all the parameters of a function
+// call e, and populates *c with those values.
+//
+func (b *Builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
+	// First deal with the f(...) part and optional receiver.
+	b.setCallFunc(fn, e, c)
+
+	// Then append the other actual parameters.
+	sig, _ := underlyingType(fn.Pkg.TypeOf(e.Fun)).(*types.Signature)
+	if sig == nil {
+		sig = builtinCallSignature(&fn.Pkg.TypeInfo, e)
+	}
+	c.Args = b.emitCallArgs(fn, sig, e, c.Args)
+}
+
+// assignOp emits to fn code to perform loc += incr or loc -= incr.
+func (b *Builder) assignOp(fn *Function, loc lvalue, incr Value, op token.Token) {
+	oldv := loc.load(fn)
+	loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, incr, oldv.Type()), loc.typ()))
+}
+
+// buildGlobal emits code to the g.Pkg.Init function for the variable
+// definition(s) of g.  Effects occur out of lexical order; see
+// explanation at globalValueSpec.
+// Precondition: g == b.globals[obj]
+//
+func (b *Builder) buildGlobal(g *Global, obj types.Object) {
+	spec := g.spec
+	if spec == nil {
+		return // already built (or in progress)
+	}
+	b.globalValueSpec(g.Pkg.Init, spec, g, obj)
+}
+
+// globalValueSpec emits to init code to define one or all of the vars
+// in the package-level ValueSpec spec.
+//
+// It implements the build phase for a ValueSpec, ensuring that all
+// vars are initialized if not already visited by buildGlobal during
+// the reference graph traversal.
+//
+// This function may be called in two modes:
+// A) with g and obj non-nil, to initialize just a single global.
+//    This occurs during the reference graph traversal.
+// B) with g and obj nil, to initialize all globals in the same ValueSpec.
+//    This occurs during the left-to-right traversal over the ast.File.
+//
+// Precondition: g == b.globals[obj]
+//
+// Package-level var initialization order is quite subtle.
+// The side effects of:
+//   var a, b = f(), g()
+// are not observed left-to-right if b is referenced before a in the
+// reference graph traversal.  So, we track which Globals have been
+// initialized by setting Global.spec=nil.
+//
+// Blank identifiers make things more complex since they don't have
+// associated types.Objects or ssa.Globals yet we must still ensure
+// that their corresponding side effects are observed at the right
+// moment.  Consider:
+//   var a, _, b = f(), g(), h()
+// Here, the relative ordering of the call to g() is unspecified but
+// it must occur exactly once, during mode B.  So globalValueSpec for
+// blanks must special-case n:n assigments and just evaluate the RHS
+// g() for effect.
+//
+// In a n:1 assignment:
+//   var a, _, b = f()
+// a reference to either a or b causes both globals to be initialized
+// at the same time.  Furthermore, no further work is required to
+// ensure that the effects of the blank assignment occur.  We must
+// keep track of which n:1 specs have been evaluated, independent of
+// which Globals are on the LHS (possibly none, if all are blank).
+//
+// See also localValueSpec.
+//
+func (b *Builder) globalValueSpec(init *Function, spec *ast.ValueSpec, g *Global, obj types.Object) {
+	switch {
+	case len(spec.Values) == len(spec.Names):
+		// e.g. var x, y = 0, 1
+		// 1:1 assignment.
+		// Only the first time for a given GLOBAL has any effect.
+		for i, id := range spec.Names {
+			var lval lvalue = blank{}
+			if g != nil {
+				// Mode A: initialized only a single global, g
+				if isBlankIdent(id) || init.Pkg.ObjectOf(id) != obj {
+					continue
+				}
+				g.spec = nil
+				lval = address{g}
+			} else {
+				// Mode B: initialize all globals.
+				if !isBlankIdent(id) {
+					g2 := b.globals[init.Pkg.ObjectOf(id)].(*Global)
+					if g2.spec == nil {
+						continue // already done
+					}
+					g2.spec = nil
+					lval = address{g2}
+				}
+			}
+			if b.Context.Mode&LogSource != 0 {
+				fmt.Fprintln(os.Stderr, "build global", id.Name)
+			}
+			b.exprInPlace(init, lval, spec.Values[i])
+			if g != nil {
+				break
+			}
+		}
+
+	case len(spec.Values) == 0:
+		// e.g. var x, y int
+		// Globals are implicitly zero-initialized.
+
+	default:
+		// e.g. var x, _, y = f()
+		// n:1 assignment.
+		// Only the first time for a given SPEC has any effect.
+		if !init.Pkg.nTo1Vars[spec] {
+			init.Pkg.nTo1Vars[spec] = true
+			if b.Context.Mode&LogSource != 0 {
+				defer logStack("build globals %s", spec.Names)()
+			}
+			tuple := b.exprN(init, spec.Values[0])
+			rtypes := tuple.Type().(*types.Result).Values
+			for i, id := range spec.Names {
+				if !isBlankIdent(id) {
+					g := b.globals[init.Pkg.ObjectOf(id)].(*Global)
+					g.spec = nil // just an optimization
+					emitStore(init, g,
+						emitExtract(init, tuple, i, rtypes[i].Type))
+				}
+			}
+		}
+	}
+}
+
+// localValueSpec emits to fn code to define all of the vars in the
+// function-local ValueSpec, spec.
+//
+// See also globalValueSpec: the two routines are similar but local
+// ValueSpecs are much simpler since they are encountered once only,
+// in their entirety, in lexical order.
+//
+func (b *Builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
+	switch {
+	case len(spec.Values) == len(spec.Names):
+		// e.g. var x, y = 0, 1
+		// 1:1 assignment
+		for i, id := range spec.Names {
+			var lval lvalue = blank{}
+			if !isBlankIdent(id) {
+				lval = address{fn.addNamedLocal(fn.Pkg.ObjectOf(id))}
+			}
+			b.exprInPlace(fn, lval, spec.Values[i])
+		}
+
+	case len(spec.Values) == 0:
+		// e.g. var x, y int
+		// Locals are implicitly zero-initialized.
+		for _, id := range spec.Names {
+			if !isBlankIdent(id) {
+				fn.addNamedLocal(fn.Pkg.ObjectOf(id))
+			}
+		}
+
+	default:
+		// e.g. var x, y = pos()
+		tuple := b.exprN(fn, spec.Values[0])
+		rtypes := tuple.Type().(*types.Result).Values
+		for i, id := range spec.Names {
+			if !isBlankIdent(id) {
+				lhs := fn.addNamedLocal(fn.Pkg.ObjectOf(id))
+				emitStore(fn, lhs, emitExtract(fn, tuple, i, rtypes[i].Type))
+			}
+		}
+	}
+}
+
+// assignStmt emits code to fn for a parallel assignment of rhss to lhss.
+// isDef is true if this is a short variable declaration (:=).
+//
+// Note the similarity with localValueSpec.
+//
+func (b *Builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) {
+	// Side effects of all LHSs and RHSs must occur in left-to-right order.
+	var lvals []lvalue
+	for _, lhs := range lhss {
+		var lval lvalue = blank{}
+		if !isBlankIdent(lhs) {
+			if isDef {
+				// Local may be "redeclared" in the same
+				// scope, so don't blindly create anew.
+				obj := fn.Pkg.ObjectOf(lhs.(*ast.Ident))
+				if _, ok := fn.objects[obj]; !ok {
+					fn.addNamedLocal(obj)
+				}
+			}
+			lval = b.addr(fn, lhs, false) // non-escaping
+		}
+		lvals = append(lvals, lval)
+	}
+	if len(lhss) == len(rhss) {
+		// e.g. x, y = f(), g()
+		if len(lhss) == 1 {
+			// x = type{...}
+			// Optimization: in-place construction
+			// of composite literals.
+			b.exprInPlace(fn, lvals[0], rhss[0])
+		} else {
+			// Parallel assignment.  All reads must occur
+			// before all updates, precluding exprInPlace.
+			// TODO(adonovan): opt: is it sound to
+			// perform exprInPlace if !isDef?
+			var rvals []Value
+			for _, rval := range rhss {
+				rvals = append(rvals, b.expr(fn, rval))
+			}
+			for i, lval := range lvals {
+				lval.store(fn, rvals[i])
+			}
+		}
+	} else {
+		// e.g. x, y = pos()
+		tuple := b.exprN(fn, rhss[0])
+		rtypes := tuple.Type().(*types.Result).Values
+		for i, lval := range lvals {
+			lval.store(fn, emitExtract(fn, tuple, i, rtypes[i].Type))
+		}
+	}
+}
+
+// compLit emits to fn code to initialize a composite literal e at
+// address addr with type typ, typically allocated by Alloc.
+// Nested composite literals are recursively initialized in place
+// where possible.
+//
+func (b *Builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, typ types.Type) {
+	// TODO(adonovan): document how and why typ ever differs from
+	// fn.Pkg.TypeOf(e).
+
+	switch t := underlyingType(typ).(type) {
+	case *types.Struct:
+		for i, e := range e.Elts {
+			fieldIndex := i
+			if kv, ok := e.(*ast.KeyValueExpr); ok {
+				fname := kv.Key.(*ast.Ident).Name
+				for i, sf := range t.Fields {
+					if sf.Name == fname {
+						fieldIndex = i
+						e = kv.Value
+						break
+					}
+				}
+			}
+			sf := t.Fields[fieldIndex]
+			faddr := &FieldAddr{
+				X:     addr,
+				Field: fieldIndex,
+			}
+			faddr.setType(pointer(sf.Type))
+			fn.emit(faddr)
+			b.exprInPlace(fn, address{faddr}, e)
+		}
+
+	case *types.Array, *types.Slice:
+		var at *types.Array
+		var array Value
+		switch t := t.(type) {
+		case *types.Slice:
+			at = &types.Array{Elt: t.Elt} // set Len later
+			array = emitNew(fn, at, e.Lbrace)
+		case *types.Array:
+			at = t
+			array = addr
+		}
+		var idx *Literal
+		var max int64 = -1
+		for _, e := range e.Elts {
+			if kv, ok := e.(*ast.KeyValueExpr); ok {
+				idx = b.expr(fn, kv.Key).(*Literal)
+				e = kv.Value
+			} else {
+				var idxval int64
+				if idx != nil {
+					idxval = idx.Int64() + 1
+				}
+				idx = intLiteral(idxval)
+			}
+			if idx.Int64() > max {
+				max = idx.Int64()
+			}
+			iaddr := &IndexAddr{
+				X:     array,
+				Index: idx,
+			}
+			iaddr.setType(pointer(at.Elt))
+			fn.emit(iaddr)
+			b.exprInPlace(fn, address{iaddr}, e)
+		}
+		if t != at { // slice
+			at.Len = max + 1
+			s := &Slice{X: array}
+			s.setType(t)
+			emitStore(fn, addr, fn.emit(s))
+		}
+
+	case *types.Map:
+		m := &MakeMap{Reserve: intLiteral(int64(len(e.Elts))), Pos: e.Lbrace}
+		m.setType(typ)
+		emitStore(fn, addr, fn.emit(m))
+		for _, e := range e.Elts {
+			e := e.(*ast.KeyValueExpr)
+			up := &MapUpdate{
+				Map:   m,
+				Key:   emitConv(fn, b.expr(fn, e.Key), t.Key),
+				Value: emitConv(fn, b.expr(fn, e.Value), t.Elt),
+			}
+			fn.emit(up)
+		}
+
+	case *types.Pointer:
+		// Pointers can only occur in the recursive case; we
+		// strip them off in addr() before calling compLit
+		// again, so that we allocate space for a T not a *T.
+		panic("compLit(fn, addr, e, *types.Pointer")
+
+	default:
+		panic("unexpected CompositeLit type: " + t.String())
+	}
+}
+
+// switchStmt emits to fn code for the switch statement s, optionally
+// labelled by label.
+//
+func (b *Builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
+	// We treat SwitchStmt like a sequential if-else chain.
+	// More efficient strategies (e.g. multiway dispatch)
+	// are possible if all cases are free of side effects.
+	if s.Init != nil {
+		b.stmt(fn, s.Init)
+	}
+	var tag Value = vTrue
+	if s.Tag != nil {
+		tag = b.expr(fn, s.Tag)
+	}
+	done := fn.newBasicBlock("switch.done")
+	if label != nil {
+		label._break = done
+	}
+	// We pull the default case (if present) down to the end.
+	// But each fallthrough label must point to the next
+	// body block in source order, so we preallocate a
+	// body block (fallthru) for the next case.
+	// Unfortunately this makes for a confusing block order.
+	var dfltBody *[]ast.Stmt
+	var dfltFallthrough *BasicBlock
+	var fallthru, dfltBlock *BasicBlock
+	ncases := len(s.Body.List)
+	for i, clause := range s.Body.List {
+		body := fallthru
+		if body == nil {
+			body = fn.newBasicBlock("switch.body") // first case only
+		}
+
+		// Preallocate body block for the next case.
+		fallthru = done
+		if i+1 < ncases {
+			fallthru = fn.newBasicBlock("switch.body")
+		}
+
+		cc := clause.(*ast.CaseClause)
+		if cc.List == nil {
+			// Default case.
+			dfltBody = &cc.Body
+			dfltFallthrough = fallthru
+			dfltBlock = body
+			continue
+		}
+
+		var nextCond *BasicBlock
+		for _, cond := range cc.List {
+			nextCond = fn.newBasicBlock("switch.next")
+			// TODO(adonovan): opt: when tag==vTrue, we'd
+			// get better much code if we use b.cond(cond)
+			// instead of BinOp(EQL, tag, b.expr(cond))
+			// followed by If.  Don't forget conversions
+			// though.
+			cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond))
+			emitIf(fn, cond, body, nextCond)
+			fn.currentBlock = nextCond
+		}
+		fn.currentBlock = body
+		fn.targets = &targets{
+			tail:         fn.targets,
+			_break:       done,
+			_fallthrough: fallthru,
+		}
+		b.stmtList(fn, cc.Body)
+		fn.targets = fn.targets.tail
+		emitJump(fn, done)
+		fn.currentBlock = nextCond
+	}
+	if dfltBlock != nil {
+		emitJump(fn, dfltBlock)
+		fn.currentBlock = dfltBlock
+		fn.targets = &targets{
+			tail:         fn.targets,
+			_break:       done,
+			_fallthrough: dfltFallthrough,
+		}
+		b.stmtList(fn, *dfltBody)
+		fn.targets = fn.targets.tail
+	}
+	emitJump(fn, done)
+	fn.currentBlock = done
+}
+
+// typeSwitchStmt emits to fn code for the type switch statement s, optionally
+// labelled by label.
+//
+func (b *Builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) {
+	// We treat TypeSwitchStmt like a sequential if-else
+	// chain.  More efficient strategies (e.g. multiway
+	// dispatch) are possible.
+
+	// Typeswitch lowering:
+	//
+	// var x X
+	// switch y := x.(type) {
+	// case T1, T2: S1                  // >1 	(y := x)
+	// default:     SD                  // 0 types 	(y := x)
+	// case T3:     S3                  // 1 type 	(y := x.(T3))
+	// }
+	//
+	//      ...s.Init...
+	// 	x := eval x
+	//      y := x
+	// .caseT1:
+	// 	t1, ok1 := typeswitch,ok x <T1>
+	// 	if ok1 then goto S1 else goto .caseT2
+	// .caseT2:
+	// 	t2, ok2 := typeswitch,ok x <T2>
+	// 	if ok2 then goto S1 else goto .caseT3
+	// .S1:
+	// 	...S1...
+	// 	goto done
+	// .caseT3:
+	// 	t3, ok3 := typeswitch,ok x <T3>
+	// 	if ok3 then goto S3 else goto default
+	// .S3:
+	// 	y' := t3  // Kludge: within scope of S3, y resolves here
+	// 	...S3...
+	// 	goto done
+	// .default:
+	// 	goto done
+	// .done:
+
+	if s.Init != nil {
+		b.stmt(fn, s.Init)
+	}
+
+	var x, y Value
+	var id *ast.Ident
+	switch ass := s.Assign.(type) {
+	case *ast.ExprStmt: // x.(type)
+		x = b.expr(fn, noparens(ass.X).(*ast.TypeAssertExpr).X)
+	case *ast.AssignStmt: // y := x.(type)
+		x = b.expr(fn, noparens(ass.Rhs[0]).(*ast.TypeAssertExpr).X)
+		id = ass.Lhs[0].(*ast.Ident)
+		y = fn.addNamedLocal(fn.Pkg.ObjectOf(id))
+		emitStore(fn, y, x)
+	}
+
+	done := fn.newBasicBlock("typeswitch.done")
+	if label != nil {
+		label._break = done
+	}
+	var dfltBody []ast.Stmt
+	for _, clause := range s.Body.List {
+		cc := clause.(*ast.CaseClause)
+		if cc.List == nil {
+			dfltBody = cc.Body
+			continue
+		}
+		body := fn.newBasicBlock("typeswitch.body")
+		var next *BasicBlock
+		var casetype types.Type
+		var ti Value // t_i, ok := typeassert,ok x <T_i>
+		for _, cond := range cc.List {
+			next = fn.newBasicBlock("typeswitch.next")
+			casetype = fn.Pkg.TypeOf(cond)
+			var condv Value
+			if casetype == tUntypedNil {
+				condv = emitCompare(fn, token.EQL, x, nilLiteral(x.Type()))
+			} else {
+				yok := emitTypeTest(fn, x, casetype)
+				ti = emitExtract(fn, yok, 0, casetype)
+				condv = emitExtract(fn, yok, 1, tBool)
+			}
+			emitIf(fn, condv, body, next)
+			fn.currentBlock = next
+		}
+		fn.currentBlock = body
+		if id != nil && len(cc.List) == 1 && casetype != tUntypedNil {
+			// Declare a new shadow local variable of the
+			// same name but a more specific type.
+			// Side effect: reassociates binding for y's object.
+			y2 := fn.addNamedLocal(fn.Pkg.ObjectOf(id))
+			y2.Name_ += "'" // debugging aid
+			y2.Type_ = pointer(casetype)
+			emitStore(fn, y2, ti)
+		}
+		fn.targets = &targets{
+			tail:   fn.targets,
+			_break: done,
+		}
+		b.stmtList(fn, cc.Body)
+		fn.targets = fn.targets.tail
+		if id != nil {
+			fn.objects[fn.Pkg.ObjectOf(id)] = y // restore previous y binding
+		}
+		emitJump(fn, done)
+		fn.currentBlock = next
+	}
+	fn.targets = &targets{
+		tail:   fn.targets,
+		_break: done,
+	}
+	b.stmtList(fn, dfltBody)
+	fn.targets = fn.targets.tail
+	emitJump(fn, done)
+	fn.currentBlock = done
+}
+
+// selectStmt emits to fn code for the select statement s, optionally
+// labelled by label.
+//
+func (b *Builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
+	// A blocking select of a single case degenerates to a
+	// simple send or receive.
+	// TODO(adonovan): opt: is this optimization worth its weight?
+	if len(s.Body.List) == 1 {
+		clause := s.Body.List[0].(*ast.CommClause)
+		if clause.Comm != nil {
+			b.stmt(fn, clause.Comm)
+			done := fn.newBasicBlock("select.done")
+			if label != nil {
+				label._break = done
+			}
+			fn.targets = &targets{
+				tail:   fn.targets,
+				_break: done,
+			}
+			b.stmtList(fn, clause.Body)
+			fn.targets = fn.targets.tail
+			emitJump(fn, done)
+			fn.currentBlock = done
+			return
+		}
+	}
+
+	// First evaluate all channels in all cases, and find
+	// the directions of each state.
+	var states []SelectState
+	blocking := true
+	for _, clause := range s.Body.List {
+		switch comm := clause.(*ast.CommClause).Comm.(type) {
+		case nil: // default case
+			blocking = false
+
+		case *ast.SendStmt: // ch<- i
+			ch := b.expr(fn, comm.Chan)
+			states = append(states, SelectState{
+				Dir:  ast.SEND,
+				Chan: ch,
+				Send: emitConv(fn, b.expr(fn, comm.Value),
+					underlyingType(ch.Type()).(*types.Chan).Elt),
+			})
+
+		case *ast.AssignStmt: // x := <-ch
+			states = append(states, SelectState{
+				Dir:  ast.RECV,
+				Chan: b.expr(fn, noparens(comm.Rhs[0]).(*ast.UnaryExpr).X),
+			})
+
+		case *ast.ExprStmt: // <-ch
+			states = append(states, SelectState{
+				Dir:  ast.RECV,
+				Chan: b.expr(fn, noparens(comm.X).(*ast.UnaryExpr).X),
+			})
+		}
+	}
+
+	// We dispatch on the (fair) result of Select using a
+	// sequential if-else chain, in effect:
+	//
+	// idx, recv, recvOk := select(...)
+	// if idx == 0 {  // receive on channel 0
+	//     x, ok := recv.(T0), recvOk
+	//     ...state0...
+	// } else if v == 1 {   // send on channel 1
+	//     ...state1...
+	// } else {
+	//     ...default...
+	// }
+	triple := &Select{
+		States:   states,
+		Blocking: blocking,
+	}
+	triple.setType(tSelect)
+	fn.emit(triple)
+	idx := emitExtract(fn, triple, 0, tInt)
+
+	done := fn.newBasicBlock("select.done")
+	if label != nil {
+		label._break = done
+	}
+
+	var dfltBody *[]ast.Stmt
+	state := 0
+	for _, cc := range s.Body.List {
+		clause := cc.(*ast.CommClause)
+		if clause.Comm == nil {
+			dfltBody = &clause.Body
+			continue
+		}
+		body := fn.newBasicBlock("select.body")
+		next := fn.newBasicBlock("select.next")
+		emitIf(fn, emitCompare(fn, token.EQL, idx, intLiteral(int64(state))), body, next)
+		fn.currentBlock = body
+		fn.targets = &targets{
+			tail:   fn.targets,
+			_break: done,
+		}
+		switch comm := clause.Comm.(type) {
+		case *ast.AssignStmt: // x := <-states[state].Chan
+			xdecl := fn.addNamedLocal(fn.Pkg.ObjectOf(comm.Lhs[0].(*ast.Ident)))
+			recv := emitTypeAssert(fn, emitExtract(fn, triple, 1, tEface), indirectType(xdecl.Type()))
+			emitStore(fn, xdecl, recv)
+
+			if len(comm.Lhs) == 2 { // x, ok := ...
+				okdecl := fn.addNamedLocal(fn.Pkg.ObjectOf(comm.Lhs[1].(*ast.Ident)))
+				emitStore(fn, okdecl, emitExtract(fn, triple, 2, indirectType(okdecl.Type())))
+			}
+		}
+		b.stmtList(fn, clause.Body)
+		fn.targets = fn.targets.tail
+		emitJump(fn, done)
+		fn.currentBlock = next
+		state++
+	}
+	if dfltBody != nil {
+		fn.targets = &targets{
+			tail:   fn.targets,
+			_break: done,
+		}
+		b.stmtList(fn, *dfltBody)
+		fn.targets = fn.targets.tail
+	}
+	emitJump(fn, done)
+	fn.currentBlock = done
+}
+
+// forStmt emits to fn code for the for statement s, optionally
+// labelled by label.
+//
+func (b *Builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
+	//	...init...
+	//      jump loop
+	// loop:
+	//      if cond goto body else done
+	// body:
+	//      ...body...
+	//      jump post
+	// post:				 (target of continue)
+	//      ...post...
+	//      jump loop
+	// done:                                 (target of break)
+	if s.Init != nil {
+		b.stmt(fn, s.Init)
+	}
+	body := fn.newBasicBlock("for.body")
+	done := fn.newBasicBlock("for.done") // target of 'break'
+	loop := body                         // target of back-edge
+	if s.Cond != nil {
+		loop = fn.newBasicBlock("for.loop")
+	}
+	cont := loop // target of 'continue'
+	if s.Post != nil {
+		cont = fn.newBasicBlock("for.post")
+	}
+	if label != nil {
+		label._break = done
+		label._continue = cont
+	}
+	emitJump(fn, loop)
+	fn.currentBlock = loop
+	if loop != body {
+		b.cond(fn, s.Cond, body, done)
+		fn.currentBlock = body
+	}
+	fn.targets = &targets{
+		tail:      fn.targets,
+		_break:    done,
+		_continue: cont,
+	}
+	b.stmt(fn, s.Body)
+	fn.targets = fn.targets.tail
+	emitJump(fn, cont)
+
+	if s.Post != nil {
+		fn.currentBlock = cont
+		b.stmt(fn, s.Post)
+		emitJump(fn, loop) // back-edge
+	}
+	fn.currentBlock = done
+}
+
+// rangeIndexed emits to fn the header for an integer indexed loop
+// over array, *array or slice value x.
+// The v result is defined only if tv is non-nil.
+//
+func (b *Builder) rangeIndexed(fn *Function, x Value, tv types.Type) (k, v Value, loop, done *BasicBlock) {
+	//
+	//      length = len(x)
+	//      index = -1
+	// loop:                                   (target of continue)
+	//      index++
+	// 	if index < length goto body else done
+	// body:
+	//      k = index
+	//      v = x[index]
+	//      ...body...
+	// 	jump loop
+	// done:                                   (target of break)
+
+	// Determine number of iterations.
+	var length Value
+	if arr, ok := deref(x.Type()).(*types.Array); ok {
+		// For array or *array, the number of iterations is
+		// known statically thanks to the type.  We avoid a
+		// data dependence upon x, permitting later dead-code
+		// elimination if x is pure, static unrolling, etc.
+		// Ranging over a nil *array may have >0 iterations.
+		length = intLiteral(arr.Len)
+	} else {
+		// length = len(x).
+		var c Call
+		c.Call.Func = b.globals[types.Universe.Lookup("len")]
+		c.Call.Args = []Value{x}
+		c.setType(tInt)
+		length = fn.emit(&c)
+	}
+
+	index := fn.addLocal(tInt, token.NoPos)
+	emitStore(fn, index, intLiteral(-1))
+
+	loop = fn.newBasicBlock("rangeindex.loop")
+	emitJump(fn, loop)
+	fn.currentBlock = loop
+
+	incr := &BinOp{
+		Op: token.ADD,
+		X:  emitLoad(fn, index),
+		Y:  vOne,
+	}
+	incr.setType(tInt)
+	emitStore(fn, index, fn.emit(incr))
+
+	body := fn.newBasicBlock("rangeindex.body")
+	done = fn.newBasicBlock("rangeindex.done")
+	emitIf(fn, emitCompare(fn, token.LSS, incr, length), body, done)
+	fn.currentBlock = body
+
+	k = emitLoad(fn, index)
+	if tv != nil {
+		switch t := underlyingType(x.Type()).(type) {
+		case *types.Array:
+			instr := &Index{
+				X:     x,
+				Index: k,
+			}
+			instr.setType(t.Elt)
+			v = fn.emit(instr)
+
+		case *types.Pointer: // *array
+			instr := &IndexAddr{
+				X:     x,
+				Index: k,
+			}
+			instr.setType(pointer(t.Base.(*types.Array).Elt))
+			v = emitLoad(fn, fn.emit(instr))
+
+		case *types.Slice:
+			instr := &IndexAddr{
+				X:     x,
+				Index: k,
+			}
+			instr.setType(pointer(t.Elt))
+			v = emitLoad(fn, fn.emit(instr))
+
+		default:
+			panic("rangeIndexed x:" + t.String())
+		}
+	}
+	return
+}
+
+// rangeIter emits to fn the header for a loop using
+// Range/Next/Extract to iterate over map or string value x.
+// tk and tv are the types of the key/value results k and v, or nil
+// if the respective component is not wanted.
+//
+func (b *Builder) rangeIter(fn *Function, x Value, tk, tv types.Type) (k, v Value, loop, done *BasicBlock) {
+	//
+	//	it = range x
+	// loop:                                   (target of continue)
+	//	okv = next it                      (ok, key, value)
+	//  	ok = extract okv #0
+	// 	if ok goto body else done
+	// body:
+	// 	k = extract okv #1
+	// 	v = extract okv #2
+	//      ...body...
+	// 	jump loop
+	// done:                                   (target of break)
+	//
+
+	if tk == nil {
+		tk = tInvalid
+	}
+	if tv == nil {
+		tv = tInvalid
+	}
+
+	rng := &Range{X: x}
+	rng.setType(tRangeIter)
+	it := fn.emit(rng)
+
+	loop = fn.newBasicBlock("rangeiter.loop")
+	emitJump(fn, loop)
+	fn.currentBlock = loop
+
+	_, isString := underlyingType(x.Type()).(*types.Basic)
+
+	okv := &Next{
+		Iter:     it,
+		IsString: isString,
+	}
+	okv.setType(&types.Result{Values: []*types.Var{
+		varOk,
+		{Name: "k", Type: tk},
+		{Name: "v", Type: tv},
+	}})
+	fn.emit(okv)
+
+	body := fn.newBasicBlock("rangeiter.body")
+	done = fn.newBasicBlock("rangeiter.done")
+	emitIf(fn, emitExtract(fn, okv, 0, tBool), body, done)
+	fn.currentBlock = body
+
+	if tk != tInvalid {
+		k = emitExtract(fn, okv, 1, tk)
+	}
+	if tv != tInvalid {
+		v = emitExtract(fn, okv, 2, tv)
+	}
+	return
+}
+
+// rangeChan emits to fn the header for a loop that receives from
+// channel x until it fails.
+// tk is the channel's element type, or nil if the k result is
+// not wanted
+//
+func (b *Builder) rangeChan(fn *Function, x Value, tk types.Type) (k Value, loop, done *BasicBlock) {
+	//
+	// loop:                                   (target of continue)
+	//      ko = <-x                           (key, ok)
+	//      ok = extract ko #1
+	//      if ok goto body else done
+	// body:
+	//      k = extract ko #0
+	//      ...
+	//      goto loop
+	// done:                                   (target of break)
+
+	loop = fn.newBasicBlock("rangechan.loop")
+	emitJump(fn, loop)
+	fn.currentBlock = loop
+	recv := &UnOp{
+		Op:      token.ARROW,
+		X:       x,
+		CommaOk: true,
+	}
+	recv.setType(&types.Result{Values: []*types.Var{
+		{Name: "k", Type: tk},
+		varOk,
+	}})
+	ko := fn.emit(recv)
+	body := fn.newBasicBlock("rangechan.body")
+	done = fn.newBasicBlock("rangechan.done")
+	emitIf(fn, emitExtract(fn, ko, 1, tBool), body, done)
+	fn.currentBlock = body
+	if tk != nil {
+		k = emitExtract(fn, ko, 0, tk)
+	}
+	return
+}
+
+// rangeStmt emits to fn code for the range statement s, optionally
+// labelled by label.
+//
+func (b *Builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
+	var tk, tv types.Type
+	if !isBlankIdent(s.Key) {
+		tk = fn.Pkg.TypeOf(s.Key)
+	}
+	if s.Value != nil && !isBlankIdent(s.Value) {
+		tv = fn.Pkg.TypeOf(s.Value)
+	}
+
+	// If iteration variables are defined (:=), this
+	// occurs once outside the loop.
+	//
+	// Unlike a short variable declaration, a RangeStmt
+	// using := never redeclares an existing variable; it
+	// always creates a new one.
+	if s.Tok == token.DEFINE {
+		if tk != nil {
+			fn.addNamedLocal(fn.Pkg.ObjectOf(s.Key.(*ast.Ident)))
+		}
+		if tv != nil {
+			fn.addNamedLocal(fn.Pkg.ObjectOf(s.Value.(*ast.Ident)))
+		}
+	}
+
+	x := b.expr(fn, s.X)
+
+	var k, v Value
+	var loop, done *BasicBlock
+	switch rt := underlyingType(x.Type()).(type) {
+	case *types.Slice, *types.Array, *types.Pointer: // *array
+		k, v, loop, done = b.rangeIndexed(fn, x, tv)
+
+	case *types.Chan:
+		k, loop, done = b.rangeChan(fn, x, tk)
+
+	case *types.Map, *types.Basic: // string
+		k, v, loop, done = b.rangeIter(fn, x, tk, tv)
+
+	default:
+		panic("Cannot range over: " + rt.String())
+	}
+
+	// Evaluate both LHS expressions before we update either.
+	var kl, vl lvalue
+	if tk != nil {
+		kl = b.addr(fn, s.Key, false) // non-escaping
+	}
+	if tv != nil {
+		vl = b.addr(fn, s.Value, false) // non-escaping
+	}
+	if tk != nil {
+		kl.store(fn, k)
+	}
+	if tv != nil {
+		vl.store(fn, v)
+	}
+
+	if label != nil {
+		label._break = done
+		label._continue = loop
+	}
+
+	fn.targets = &targets{
+		tail:      fn.targets,
+		_break:    done,
+		_continue: loop,
+	}
+	b.stmt(fn, s.Body)
+	fn.targets = fn.targets.tail
+	emitJump(fn, loop) // back-edge
+	fn.currentBlock = done
+}
+
+// stmt lowers statement s to SSA form, emitting code to fn.
+func (b *Builder) stmt(fn *Function, _s ast.Stmt) {
+	// The label of the current statement.  If non-nil, its _goto
+	// target is always set; its _break and _continue are set only
+	// within the body of switch/typeswitch/select/for/range.
+	// It is effectively an additional default-nil parameter of stmt().
+	var label *lblock
+start:
+	switch s := _s.(type) {
+	case *ast.EmptyStmt:
+		// ignore.  (Usually removed by gofmt.)
+
+	case *ast.DeclStmt: // Con, Var or Typ
+		d := s.Decl.(*ast.GenDecl)
+		for _, spec := range d.Specs {
+			if vs, ok := spec.(*ast.ValueSpec); ok {
+				b.localValueSpec(fn, vs)
+			}
+		}
+
+	case *ast.LabeledStmt:
+		label = fn.labelledBlock(s.Label)
+		emitJump(fn, label._goto)
+		fn.currentBlock = label._goto
+		_s = s.Stmt
+		goto start // effectively: tailcall stmt(fn, s.Stmt, label)
+
+	case *ast.ExprStmt:
+		b.expr(fn, s.X)
+
+	case *ast.SendStmt:
+		fn.emit(&Send{
+			Chan: b.expr(fn, s.Chan),
+			X: emitConv(fn, b.expr(fn, s.Value),
+				underlyingType(fn.Pkg.TypeOf(s.Chan)).(*types.Chan).Elt),
+		})
+
+	case *ast.IncDecStmt:
+		op := token.ADD
+		if s.Tok == token.DEC {
+			op = token.SUB
+		}
+		b.assignOp(fn, b.addr(fn, s.X, false), vOne, op)
+
+	case *ast.AssignStmt:
+		switch s.Tok {
+		case token.ASSIGN, token.DEFINE:
+			b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE)
+
+		default: // +=, etc.
+			op := s.Tok + token.ADD - token.ADD_ASSIGN
+			b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op)
+		}
+
+	case *ast.GoStmt:
+		// The "intrinsics" new/make/len/cap are forbidden here.
+		// panic is treated like an ordinary function call.
+		var v Go
+		b.setCall(fn, s.Call, &v.Call)
+		fn.emit(&v)
+
+	case *ast.DeferStmt:
+		// The "intrinsics" new/make/len/cap are forbidden here.
+		// panic is treated like an ordinary function call.
+		var v Defer
+		b.setCall(fn, s.Call, &v.Call)
+		fn.emit(&v)
+
+	case *ast.ReturnStmt:
+		if fn == fn.Pkg.Init {
+			// A "return" within an init block is treated
+			// like a "goto" to the next init block.  We
+			// use the outermost BREAK target for this purpose.
+			var block *BasicBlock
+			for t := fn.targets; t != nil; t = t.tail {
+				if t._break != nil {
+					block = t._break
+				}
+			}
+			// Run function calls deferred in this init
+			// block when explicitly returning from it.
+			fn.emit(new(RunDefers))
+			emitJump(fn, block)
+			fn.currentBlock = fn.newBasicBlock("unreachable")
+			return
+		}
+
+		var results []Value
+		if len(s.Results) == 1 && len(fn.Signature.Results) > 1 {
+			// Return of one expression in a multi-valued function.
+			tuple := b.exprN(fn, s.Results[0])
+			for i, v := range tuple.Type().(*types.Result).Values {
+				results = append(results,
+					emitConv(fn, emitExtract(fn, tuple, i, v.Type),
+						fn.Signature.Results[i].Type))
+			}
+		} else {
+			// 1:1 return, or no-arg return in non-void function.
+			for i, r := range s.Results {
+				v := emitConv(fn, b.expr(fn, r), fn.Signature.Results[i].Type)
+				results = append(results, v)
+			}
+		}
+		if fn.namedResults != nil {
+			// Function has named result parameters (NRPs).
+			// Perform parallel assignment of return operands to NRPs.
+			for i, r := range results {
+				emitStore(fn, fn.namedResults[i], r)
+			}
+		}
+		// Run function calls deferred in this
+		// function when explicitly returning from it.
+		fn.emit(new(RunDefers))
+		if fn.namedResults != nil {
+			// Reload NRPs to form the result tuple.
+			results = results[:0]
+			for _, r := range fn.namedResults {
+				results = append(results, emitLoad(fn, r))
+			}
+		}
+		fn.emit(&Ret{Results: results})
+		fn.currentBlock = fn.newBasicBlock("unreachable")
+
+	case *ast.BranchStmt:
+		var block *BasicBlock
+		switch s.Tok {
+		case token.BREAK:
+			if s.Label != nil {
+				block = fn.labelledBlock(s.Label)._break
+			} else {
+				for t := fn.targets; t != nil && block == nil; t = t.tail {
+					block = t._break
+				}
+			}
+
+		case token.CONTINUE:
+			if s.Label != nil {
+				block = fn.labelledBlock(s.Label)._continue
+			} else {
+				for t := fn.targets; t != nil && block == nil; t = t.tail {
+					block = t._continue
+				}
+			}
+
+		case token.FALLTHROUGH:
+			for t := fn.targets; t != nil && block == nil; t = t.tail {
+				block = t._fallthrough
+			}
+
+		case token.GOTO:
+			block = fn.labelledBlock(s.Label)._goto
+		}
+		if block == nil {
+			// TODO(gri): fix: catch these in the typechecker.
+			fmt.Printf("ignoring illegal branch: %s %s\n", s.Tok, s.Label)
+		} else {
+			emitJump(fn, block)
+			fn.currentBlock = fn.newBasicBlock("unreachable")
+		}
+
+	case *ast.BlockStmt:
+		b.stmtList(fn, s.List)
+
+	case *ast.IfStmt:
+		if s.Init != nil {
+			b.stmt(fn, s.Init)
+		}
+		then := fn.newBasicBlock("if.then")
+		done := fn.newBasicBlock("if.done")
+		els := done
+		if s.Else != nil {
+			els = fn.newBasicBlock("if.else")
+		}
+		b.cond(fn, s.Cond, then, els)
+		fn.currentBlock = then
+		b.stmt(fn, s.Body)
+		emitJump(fn, done)
+
+		if s.Else != nil {
+			fn.currentBlock = els
+			b.stmt(fn, s.Else)
+			emitJump(fn, done)
+		}
+
+		fn.currentBlock = done
+
+	case *ast.SwitchStmt:
+		b.switchStmt(fn, s, label)
+
+	case *ast.TypeSwitchStmt:
+		b.typeSwitchStmt(fn, s, label)
+
+	case *ast.SelectStmt:
+		b.selectStmt(fn, s, label)
+
+	case *ast.ForStmt:
+		b.forStmt(fn, s, label)
+
+	case *ast.RangeStmt:
+		b.rangeStmt(fn, s, label)
+
+	default:
+		panic(fmt.Sprintf("unexpected statement kind: %T", s))
+	}
+}
+
+// buildFunction builds SSA code for the body of function fn.  Idempotent.
+func (b *Builder) buildFunction(fn *Function) {
+	if fn.Blocks != nil {
+		return // building already started
+	}
+	if fn.syntax == nil {
+		return // not a Go source function.  (Synthetic, or from object file.)
+	}
+	if fn.syntax.body == nil {
+		// External function.
+		if fn.Params == nil {
+			// This condition ensures we add a non-empty
+			// params list once only, but we may attempt
+			// the degenerate empty case repeatedly.
+			// TODO(adonovan): opt: don't do that.
+
+			// We set Function.Params even though there is no body
+			// code to reference them.  This simplifies clients.
+			if recv := fn.Signature.Recv; recv != nil {
+				fn.addParam(recv.Name, recv.Type)
+			}
+			for _, param := range fn.Signature.Params {
+				fn.addParam(param.Name, param.Type)
+			}
+		}
+		return
+	}
+	if fn.Prog.mode&LogSource != 0 {
+		defer logStack("build function %s @ %s",
+			fn.FullName(), fn.Prog.Files.Position(fn.Pos))()
+	}
+	fn.startBody()
+	fn.createSyntacticParams(fn.Pkg.idents)
+	b.stmt(fn, fn.syntax.body)
+	if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb.Preds != nil) {
+		// Run function calls deferred in this function when
+		// falling off the end of the body block.
+		fn.emit(new(RunDefers))
+		fn.emit(new(Ret))
+	}
+	fn.finishBody()
+}
+
+// memberFromObject populates package pkg with a member for the
+// typechecker object obj.
+//
+// For objects from Go source code, syntax is the associated syntax
+// tree (for funcs and vars only); it will be used during the build
+// phase.
+//
+func (b *Builder) memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
+	name := obj.GetName()
+	switch obj := obj.(type) {
+	case *types.TypeName:
+		pkg.Members[name] = &Type{NamedType: obj.Type.(*types.NamedType)}
+
+	case *types.Const:
+		pkg.Members[name] = &Constant{
+			Name_: name,
+			Value: newLiteral(obj.Val, obj.Type),
+			Pos:   obj.GetPos(),
+		}
+
+	case *types.Var:
+		spec, _ := syntax.(*ast.ValueSpec)
+		g := &Global{
+			Pkg:   pkg,
+			Name_: name,
+			Type_: pointer(obj.Type), // address
+			Pos:   obj.GetPos(),
+			spec:  spec,
+		}
+		b.globals[obj] = g
+		pkg.Members[name] = g
+
+	case *types.Func:
+		var fs *funcSyntax
+		var pos token.Pos
+		if decl, ok := syntax.(*ast.FuncDecl); ok {
+			fs = &funcSyntax{
+				recvField:    decl.Recv,
+				paramFields:  decl.Type.Params,
+				resultFields: decl.Type.Results,
+				body:         decl.Body,
+			}
+			// TODO(gri): make GcImported types.Object
+			// implement the full object interface
+			// including Pos().  Or at least not crash.
+			pos = obj.GetPos()
+		}
+		sig := obj.Type.(*types.Signature)
+		fn := &Function{
+			Name_:     name,
+			Signature: sig,
+			Pos:       pos,
+			Pkg:       pkg,
+			Prog:      b.Prog,
+			syntax:    fs,
+		}
+		if sig.Recv == nil {
+			// Function declaration.
+			b.globals[obj] = fn
+			pkg.Members[name] = fn
+		} else {
+			// Method declaration.
+			nt := deref(sig.Recv.Type).(*types.NamedType)
+			_, method := methodIndex(nt, nt.Methods, makeId(name, pkg.Types))
+			b.Prog.concreteMethods[method] = fn
+		}
+
+	default: // (incl. *types.Package)
+		panic(fmt.Sprintf("unexpected Object type: %T", obj))
+	}
+}
+
+// membersFromDecl populates package pkg with members for each
+// typechecker object (var, func, const or type) associated with the
+// specified decl.
+//
+func (b *Builder) membersFromDecl(pkg *Package, decl ast.Decl) {
+	switch decl := decl.(type) {
+	case *ast.GenDecl: // import, const, type or var
+		switch decl.Tok {
+		case token.CONST:
+			for _, spec := range decl.Specs {
+				for _, id := range spec.(*ast.ValueSpec).Names {
+					if !isBlankIdent(id) {
+						b.memberFromObject(pkg, pkg.ObjectOf(id), nil)
+					}
+				}
+			}
+
+		case token.VAR:
+			for _, spec := range decl.Specs {
+				for _, id := range spec.(*ast.ValueSpec).Names {
+					if !isBlankIdent(id) {
+						b.memberFromObject(pkg, pkg.ObjectOf(id), spec)
+					}
+				}
+			}
+
+		case token.TYPE:
+			for _, spec := range decl.Specs {
+				id := spec.(*ast.TypeSpec).Name
+				if !isBlankIdent(id) {
+					b.memberFromObject(pkg, pkg.ObjectOf(id), nil)
+				}
+			}
+		}
+
+	case *ast.FuncDecl:
+		id := decl.Name
+		if decl.Recv == nil && id.Name == "init" {
+			if !pkg.Init.Pos.IsValid() {
+				pkg.Init.Pos = decl.Name.Pos()
+			}
+			return // init blocks aren't functions
+		}
+		if !isBlankIdent(id) {
+			b.memberFromObject(pkg, pkg.ObjectOf(id), decl)
+		}
+	}
+}
+
+// typecheck invokes the type-checker on files and returns the
+// type-checker's package so formed, plus the AST type information.
+//
+func (b *Builder) typecheck(files []*ast.File) (*types.Package, *TypeInfo, error) {
+	info := &TypeInfo{
+		types:     make(map[ast.Expr]types.Type),
+		idents:    make(map[*ast.Ident]types.Object),
+		constants: make(map[ast.Expr]*Literal),
+	}
+	tc := b.Context.TypeChecker
+	tc.Expr = func(x ast.Expr, typ types.Type, val exact.Value) {
+		info.types[x] = typ
+		if val != nil {
+			info.constants[x] = newLiteral(val, typ)
+		}
+	}
+	tc.Ident = func(ident *ast.Ident, obj types.Object) {
+		// Invariants:
+		// - obj is non-nil.
+		// - isBlankIdent(ident) <=> obj.GetType()==nil
+		info.idents[ident] = obj
+	}
+	typkg, firstErr := tc.Check(b.Prog.Files, files)
+	tc.Expr = nil
+	tc.Ident = nil
+	if firstErr != nil {
+		return nil, nil, firstErr
+	}
+	return typkg, info, nil
+}
+
+// CreatePackage creates a package from the specified set of files,
+// performs type-checking, and allocates all global SSA Values for the
+// package.  It returns a new SSA Package providing access to these
+// values.  The order of files determines the package initialization order.
+//
+// importPath is the full name under which this package is known, such
+// as appears in an import declaration. e.g. "sync/atomic".
+//
+// The ParseFiles() utility may be helpful for parsing a set of Go
+// source files.
+//
+func (b *Builder) CreatePackage(importPath string, files []*ast.File) (*Package, error) {
+	typkg, info, err := b.typecheck(files)
+	if err != nil {
+		return nil, err
+	}
+	return b.createPackageImpl(typkg, importPath, files, info), nil
+}
+
+// createPackageImpl constructs an SSA Package from an error-free
+// types.Package typkg and populates its Members mapping.  It returns
+// the newly constructed ssa.Package.
+//
+// The real work of building SSA form for each function is not done
+// until a subsequent call to BuildPackage.
+//
+// If files is non-nil, its declarations will be used to generate code
+// for functions, methods and init blocks in a subsequent call to
+// BuildPackage; info must contains the type information for those files.
+// Otherwise, typkg is assumed to have been imported
+// from the gc compiler's object files; no code will be available.
+//
+func (b *Builder) createPackageImpl(typkg *types.Package, importPath string, files []*ast.File, info *TypeInfo) *Package {
+	// The typechecker sets types.Package.Path only for GcImported
+	// packages, since it doesn't know import path until after typechecking is done.
+	// Here we ensure it is always set, since we know the correct path.
+	if typkg.Path == "" {
+		typkg.Path = importPath
+	} else if typkg.Path != importPath {
+		panic(fmt.Sprintf("%s != %s", typkg.Path, importPath))
+	}
+
+	p := &Package{
+		Prog:     b.Prog,
+		Types:    typkg,
+		Members:  make(map[string]Member),
+		Files:    files,
+		nTo1Vars: make(map[*ast.ValueSpec]bool),
+	}
+
+	if files != nil {
+		p.TypeInfo = *info
+	}
+
+	b.packages[typkg] = p
+	b.Prog.Packages[importPath] = p
+
+	// Add init() function (but not to Members since it can't be referenced).
+	p.Init = &Function{
+		Name_:     "init",
+		Signature: new(types.Signature),
+		Pkg:       p,
+		Prog:      b.Prog,
+	}
+
+	// CREATE phase.
+	// Allocate all package members: vars, funcs and consts and types.
+	if len(files) > 0 {
+		// Go source package.
+
+		// TODO(gri): make it a typechecker error for there to
+		// be duplicate (e.g.) main functions in the same package.
+		for _, file := range p.Files {
+			for _, decl := range file.Decls {
+				b.membersFromDecl(p, decl)
+			}
+		}
+	} else {
+		// GC-compiled binary package.
+		// No code.
+		// No position information.
+
+		for _, obj := range p.Types.Scope.Entries {
+			b.memberFromObject(p, obj, nil)
+		}
+	}
+
+	// Compute the method sets
+	for _, mem := range p.Members {
+		switch t := mem.(type) {
+		case *Type:
+			t.Methods = b.Prog.MethodSet(t.NamedType)
+			t.PtrMethods = b.Prog.MethodSet(pointer(t.NamedType))
+		}
+	}
+
+	// Add initializer guard variable.
+	initguard := &Global{
+		Pkg:   p,
+		Name_: "init·guard",
+		Type_: pointer(tBool),
+	}
+	p.Members[initguard.Name()] = initguard
+
+	if b.Context.Mode&LogPackages != 0 {
+		p.DumpTo(os.Stderr)
+	}
+
+	return p
+}
+
+// buildDecl builds SSA code for all globals, functions or methods
+// declared by decl in package pkg.
+//
+func (b *Builder) buildDecl(pkg *Package, decl ast.Decl) {
+	switch decl := decl.(type) {
+	case *ast.GenDecl:
+		switch decl.Tok {
+		// Nothing to do for CONST, IMPORT.
+		case token.VAR:
+			for _, spec := range decl.Specs {
+				b.globalValueSpec(pkg.Init, spec.(*ast.ValueSpec), nil, nil)
+			}
+		case token.TYPE:
+			for _, spec := range decl.Specs {
+				id := spec.(*ast.TypeSpec).Name
+				if isBlankIdent(id) {
+					continue
+				}
+				obj := pkg.ObjectOf(id).(*types.TypeName)
+				for _, method := range obj.Type.(*types.NamedType).Methods {
+					b.buildFunction(b.Prog.concreteMethods[method])
+				}
+			}
+		}
+
+	case *ast.FuncDecl:
+		id := decl.Name
+		if isBlankIdent(id) {
+			// no-op
+
+		} else if decl.Recv == nil && id.Name == "init" {
+			// init() block
+			if b.Context.Mode&LogSource != 0 {
+				fmt.Fprintln(os.Stderr, "build init block @", b.Prog.Files.Position(decl.Pos()))
+			}
+			init := pkg.Init
+
+			// A return statement within an init block is
+			// treated like a "goto" to the the next init
+			// block, which we stuff in the outermost
+			// break label.
+			next := init.newBasicBlock("init.next")
+			init.targets = &targets{
+				tail:   init.targets,
+				_break: next,
+			}
+			b.stmt(init, decl.Body)
+			// Run function calls deferred in this init
+			// block when falling off the end of the block.
+			init.emit(new(RunDefers))
+			emitJump(init, next)
+			init.targets = init.targets.tail
+			init.currentBlock = next
+
+		} else if m, ok := b.globals[pkg.ObjectOf(id)]; ok {
+			// Package-level function.
+			b.buildFunction(m.(*Function))
+		}
+	}
+
+}
+
+// BuildAllPackages constructs the SSA representation of the bodies of
+// all functions in all packages known to the Builder.  Construction
+// occurs in parallel unless the BuildSerially mode flag was set.
+//
+// BuildAllPackages is idempotent and thread-safe.
+//
+func (b *Builder) BuildAllPackages() {
+	var wg sync.WaitGroup
+	for _, p := range b.Prog.Packages {
+		if b.Context.Mode&BuildSerially != 0 {
+			b.BuildPackage(p)
+		} else {
+			wg.Add(1)
+			go func(p *Package) {
+				b.BuildPackage(p)
+				wg.Done()
+			}(p)
+		}
+	}
+	wg.Wait()
+}
+
+// BuildPackage builds SSA code for all functions and vars in package p.
+//
+// BuildPackage is idempotent and thread-safe.
+//
+func (b *Builder) BuildPackage(p *Package) {
+	if !atomic.CompareAndSwapInt32(&p.started, 0, 1) {
+		return // already started
+	}
+	if p.Files == nil {
+		return // nothing to do
+	}
+	if b.Context.Mode&LogSource != 0 {
+		defer logStack("build package %s", p.Types.Path)()
+	}
+	init := p.Init
+	init.startBody()
+
+	// Make init() skip if package is already initialized.
+	initguard := p.Var("init·guard")
+	doinit := init.newBasicBlock("init.start")
+	done := init.newBasicBlock("init.done")
+	emitIf(init, emitLoad(init, initguard), done, doinit)
+	init.currentBlock = doinit
+	emitStore(init, initguard, vTrue)
+
+	// TODO(gri): fix: the types.Package.Imports map may contains
+	// entries for other package's import statements, if produced
+	// by GcImport.  Project it down to just the ones for us.
+	imports := make(map[string]*types.Package)
+	for _, file := range p.Files {
+		for _, imp := range file.Imports {
+			path, _ := strconv.Unquote(imp.Path.Value)
+			if path != "unsafe" {
+				imports[path] = p.Types.Imports[path]
+			}
+		}
+	}
+
+	// Call the init() function of each package we import.
+	// Order is unspecified (and is in fact nondeterministic).
+	for name, imported := range imports {
+		p2 := b.packages[imported]
+		if p2 == nil {
+			panic("Building " + p.Name() + ": CreatePackage has not been called for package " + name)
+		}
+
+		var v Call
+		v.Call.Func = p2.Init
+		v.Call.Pos = init.Pos
+		v.setType(new(types.Result))
+		init.emit(&v)
+	}
+
+	// Visit the package's var decls and init funcs in source
+	// order.  This causes init() code to be generated in
+	// topological order.  We visit them transitively through
+	// functions of the same package, but we don't treat functions
+	// as roots.
+	//
+	// We also ensure all functions and methods are built, even if
+	// they are unreachable.
+	for _, file := range p.Files {
+		for _, decl := range file.Decls {
+			b.buildDecl(p, decl)
+		}
+	}
+
+	// Clear out the typed ASTs unless otherwise requested.
+	if retain := b.Context.RetainAST; retain == nil || !retain(p) {
+		p.Files = nil
+		p.TypeInfo = TypeInfo{} // clear
+	}
+	p.nTo1Vars = nil
+
+	// Finish up.
+	emitJump(init, done)
+	init.currentBlock = done
+	init.emit(new(RunDefers))
+	init.emit(new(Ret))
+	init.finishBody()
+}
diff --git a/ssa/doc.go b/ssa/doc.go
new file mode 100644
index 0000000..11f93c5
--- /dev/null
+++ b/ssa/doc.go
@@ -0,0 +1,115 @@
+// Package ssa defines a representation of the elements of Go programs
+// (packages, types, functions, variables and constants) using a
+// static single-assignment (SSA) form intermediate representation
+// (IR) for the bodies of functions.
+//
+// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE.
+//
+// For an introduction to SSA form, see
+// http://en.wikipedia.org/wiki/Static_single_assignment_form.
+// This page provides a broader reading list:
+// http://www.dcs.gla.ac.uk/~jsinger/ssa.html.
+//
+// The level of abstraction of the SSA form is intentionally close to
+// the source language to facilitate construction of source analysis
+// tools.  It is not primarily intended for machine code generation.
+//
+// All looping, branching and switching constructs are replaced with
+// unstructured control flow.  We may add higher-level control flow
+// primitives in the future to facilitate constant-time dispatch of
+// switch statements, for example.
+//
+// Builder encapsulates the tasks of type-checking (using go/types)
+// abstract syntax trees (as defined by go/ast) for the source files
+// comprising a Go program, and the conversion of each function from
+// Go ASTs to the SSA representation.
+//
+// By supplying an instance of the SourceLocator function prototype,
+// clients may control how the builder locates, loads and parses Go
+// sources files for imported packages.  This package provides
+// GorootLoader, which uses go/build to locate packages in the Go
+// source distribution, and go/parser to parse them.
+//
+// The builder initially builds a naive SSA form in which all local
+// variables are addresses of stack locations with explicit loads and
+// stores.  Registerisation of eligible locals and φ-node insertion
+// using dominance and dataflow are then performed as a second pass
+// called "lifting" to improve the accuracy and performance of
+// subsequent analyses; this pass can be skipped by setting the
+// NaiveForm builder flag.
+//
+// The program representation constructed by this package is fully
+// resolved internally, i.e. it does not rely on the names of Values,
+// Packages, Functions, Types or BasicBlocks for the correct
+// interpretation of the program.  Only the identities of objects and
+// the topology of the SSA and type graphs are semantically
+// significant.  (There is one exception: Ids, used to identify field
+// and method names, contain strings.)  Avoidance of name-based
+// operations simplifies the implementation of subsequent passes and
+// can make them very efficient.  Many objects are nonetheless named
+// to aid in debugging, but it is not essential that the names be
+// either accurate or unambiguous.  The public API exposes a number of
+// name-based maps for client convenience.
+//
+// Given a Go source package such as this:
+//
+//      package main
+//
+//      import "fmt"
+//
+//      const message = "Hello, World!"
+//
+//      func hello() {
+//              fmt.Println(message)
+//      }
+//
+// The SSA Builder creates a *Program containing a main *Package such
+// as this:
+//
+//      Package(Name: "main")
+//        Members:
+//          "message":          *Literal (Type: untyped string, Value: "Hello, World!")
+//          "init·guard":       *Global (Type: *bool)
+//          "hello":            *Function (Type: func())
+//        Init:                 *Function (Type: func())
+//
+// The printed representation of the function main.hello is shown
+// below.  Within the function listing, the name of each BasicBlock
+// such as ".0.entry" is printed left-aligned, followed by the block's
+// instructions, i.e. implementations of Instruction.
+// For each instruction that defines an SSA virtual register
+// (i.e. implements Value), the type of that value is shown in the
+// right column.
+//
+//      # Name: main.hello
+//      # Declared at hello.go:7:6
+//      # Type: func()
+//      func hello():
+//      .0.entry:
+//              t0 = new [1]interface{}                                                 *[1]interface{}
+//              t1 = &t0[0:untyped integer]                                             *interface{}
+//              t2 = make interface interface{} <- string ("Hello, World!":string)      interface{}
+//              *t1 = t2
+//              t3 = slice t0[:]                                                        []interface{}
+//              t4 = fmt.Println(t3)                                                    (n int, err error)
+//              ret
+//
+//
+// The ssadump utility is an example of an application that loads and
+// dumps the SSA form of a Go program, whether a single package or a
+// whole program.
+//
+// TODO(adonovan): demonstrate more features in the example:
+// parameters and control flow at the least.
+//
+// TODO(adonovan): Consider how token.Pos source location information
+// should be made available generally.  Currently it is only present in
+// Package, Function and CallCommon.
+//
+// TODO(adonovan): Consider the exceptional control-flow implications
+// of defer and recover().
+//
+// TODO(adonovan): build tables/functions that relate source variables
+// to SSA variables to assist user interfaces that make queries about
+// specific source entities.
+package ssa
diff --git a/ssa/dom.go b/ssa/dom.go
new file mode 100644
index 0000000..499ea9e
--- /dev/null
+++ b/ssa/dom.go
@@ -0,0 +1,296 @@
+package ssa
+
+// This file defines algorithms related to dominance.
+
+// Dominator tree construction ----------------------------------------
+//
+// We use the algorithm described in Lengauer & Tarjan. 1979.  A fast
+// algorithm for finding dominators in a flowgraph.
+// http://doi.acm.org/10.1145/357062.357071
+//
+// We also apply the optimizations to SLT described in Georgiadis et
+// al, Finding Dominators in Practice, JGAA 2006,
+// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf
+// to avoid the need for buckets of size > 1.
+
+import (
+	"fmt"
+	"io"
+	"math/big"
+	"os"
+)
+
+// domNode represents a node in the dominator tree.
+//
+// TODO(adonovan): export this, when ready.
+type domNode struct {
+	Block     *BasicBlock // the basic block; n.Block.dom == n
+	Idom      *domNode    // immediate dominator (parent in dominator tree)
+	Children  []*domNode  // nodes dominated by this one
+	Level     int         // level number of node within tree; zero for root
+	Pre, Post int         // pre- and post-order numbering within dominator tree
+
+	// Working state for Lengauer-Tarjan algorithm
+	// (during which Pre is repurposed for CFG DFS preorder number).
+	// TODO(adonovan): opt: measure allocating these as temps.
+	semi     *domNode // semidominator
+	parent   *domNode // parent in DFS traversal of CFG
+	ancestor *domNode // ancestor with least sdom
+}
+
+// ltDfs implements the depth-first search part of the LT algorithm.
+func ltDfs(v *domNode, i int, preorder []*domNode) int {
+	preorder[i] = v
+	v.Pre = i // For now: DFS preorder of spanning tree of CFG
+	i++
+	v.semi = v
+	v.ancestor = nil
+	for _, succ := range v.Block.Succs {
+		if w := succ.dom; w.semi == nil {
+			w.parent = v
+			i = ltDfs(w, i, preorder)
+		}
+	}
+	return i
+}
+
+// ltEval implements the EVAL part of the LT algorithm.
+func ltEval(v *domNode) *domNode {
+	// TODO(adonovan): opt: do path compression per simple LT.
+	u := v
+	for ; v.ancestor != nil; v = v.ancestor {
+		if v.semi.Pre < u.semi.Pre {
+			u = v
+		}
+	}
+	return u
+}
+
+// ltLink implements the LINK part of the LT algorithm.
+func ltLink(v, w *domNode) {
+	w.ancestor = v
+}
+
+// buildDomTree computes the dominator tree of f using the LT algorithm.
+// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run).
+//
+func buildDomTree(f *Function) {
+	// The step numbers refer to the original LT paper; the
+	// reodering is due to Georgiadis.
+
+	// Initialize domNode nodes.
+	for _, b := range f.Blocks {
+		dom := b.dom
+		if dom == nil {
+			dom = &domNode{Block: b}
+			b.dom = dom
+		} else {
+			dom.Block = b // reuse
+		}
+	}
+
+	// Step 1.  Number vertices by depth-first preorder.
+	n := len(f.Blocks)
+	preorder := make([]*domNode, n)
+	root := f.Blocks[0].dom
+	ltDfs(root, 0, preorder)
+
+	buckets := make([]*domNode, n)
+	copy(buckets, preorder)
+
+	// In reverse preorder...
+	for i := n - 1; i > 0; i-- {
+		w := preorder[i]
+
+		// Step 3. Implicitly define the immediate dominator of each node.
+		for v := buckets[i]; v != w; v = buckets[v.Pre] {
+			u := ltEval(v)
+			if u.semi.Pre < i {
+				v.Idom = u
+			} else {
+				v.Idom = w
+			}
+		}
+
+		// Step 2. Compute the semidominators of all nodes.
+		w.semi = w.parent
+		for _, pred := range w.Block.Preds {
+			v := pred.dom
+			u := ltEval(v)
+			if u.semi.Pre < w.semi.Pre {
+				w.semi = u.semi
+			}
+		}
+
+		ltLink(w.parent, w)
+
+		if w.parent == w.semi {
+			w.Idom = w.parent
+		} else {
+			buckets[i] = buckets[w.semi.Pre]
+			buckets[w.semi.Pre] = w
+		}
+	}
+
+	// The final 'Step 3' is now outside the loop.
+	for v := buckets[0]; v != root; v = buckets[v.Pre] {
+		v.Idom = root
+	}
+
+	// Step 4. Explicitly define the immediate dominator of each
+	// node, in preorder.
+	for _, w := range preorder[1:] {
+		if w == root {
+			w.Idom = nil
+		} else {
+			if w.Idom != w.semi {
+				w.Idom = w.Idom.Idom
+			}
+			// Calculate Children relation as inverse of Idom.
+			w.Idom.Children = append(w.Idom.Children, w)
+		}
+
+		// Clear working state.
+		w.semi = nil
+		w.parent = nil
+		w.ancestor = nil
+	}
+
+	numberDomTree(root, 0, 0, 0)
+
+	// printDomTreeDot(os.Stderr, f)        // debugging
+	// printDomTreeText(os.Stderr, root, 0) // debugging
+
+	if f.Prog.mode&SanityCheckFunctions != 0 {
+		sanityCheckDomTree(f)
+	}
+}
+
+// numberDomTree sets the pre- and post-order numbers of a depth-first
+// traversal of the dominator tree rooted at v.  These are used to
+// answer dominance queries in constant time.  Also, it sets the level
+// numbers (zero for the root) used for frontier computation.
+//
+func numberDomTree(v *domNode, pre, post, level int) (int, int) {
+	v.Level = level
+	level++
+	v.Pre = pre
+	pre++
+	for _, child := range v.Children {
+		pre, post = numberDomTree(child, pre, post, level)
+	}
+	v.Post = post
+	post++
+	return pre, post
+}
+
+// dominates returns true if b dominates c.
+// Requires that dominance information is up-to-date.
+//
+func dominates(b, c *BasicBlock) bool {
+	return b.dom.Pre <= c.dom.Pre && c.dom.Post <= b.dom.Post
+}
+
+// Testing utilities ----------------------------------------
+
+// sanityCheckDomTree checks the correctness of the dominator tree
+// computed by the LT algorithm by comparing against the dominance
+// relation computed by a naive Kildall-style forward dataflow
+// analysis (Algorithm 10.16 from the "Dragon" book).
+//
+func sanityCheckDomTree(f *Function) {
+	n := len(f.Blocks)
+
+	// D[i] is the set of blocks that dominate f.Blocks[i],
+	// represented as a bit-set of block indices.
+	D := make([]big.Int, n)
+
+	one := big.NewInt(1)
+
+	// all is the set of all blocks; constant.
+	var all big.Int
+	all.Set(one).Lsh(&all, uint(n)).Sub(&all, one)
+
+	// Initialization.
+	for i := range f.Blocks {
+		if i == 0 {
+			// The root is dominated only by itself.
+			D[i].SetBit(&D[0], 0, 1)
+		} else {
+			// All other blocks are (initially) dominated
+			// by every block.
+			D[i].Set(&all)
+		}
+	}
+
+	// Iteration until fixed point.
+	for changed := true; changed; {
+		changed = false
+		for i, b := range f.Blocks {
+			if i == 0 {
+				continue
+			}
+			// Compute intersection across predecessors.
+			var x big.Int
+			x.Set(&all)
+			for _, pred := range b.Preds {
+				x.And(&x, &D[pred.Index])
+			}
+			x.SetBit(&x, i, 1) // a block always dominates itself.
+			if D[i].Cmp(&x) != 0 {
+				D[i].Set(&x)
+				changed = true
+			}
+		}
+	}
+
+	// Check the entire relation.  O(n^2).
+	ok := true
+	for i := 0; i < n; i++ {
+		for j := 0; j < n; j++ {
+			b, c := f.Blocks[i], f.Blocks[j]
+			actual := dominates(b, c)
+			expected := D[j].Bit(i) == 1
+			if actual != expected {
+				fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected)
+				ok = false
+			}
+		}
+	}
+	if !ok {
+		panic("sanityCheckDomTree failed for " + f.FullName())
+	}
+}
+
+// Printing functions ----------------------------------------
+
+// printDomTree prints the dominator tree as text, using indentation.
+func printDomTreeText(w io.Writer, v *domNode, indent int) {
+	fmt.Fprintf(w, "%*s%s\n", 4*indent, "", v.Block)
+	for _, child := range v.Children {
+		printDomTreeText(w, child, indent+1)
+	}
+}
+
+// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
+// (.dot) format.
+func printDomTreeDot(w io.Writer, f *Function) {
+	fmt.Fprintln(w, "//", f.FullName())
+	fmt.Fprintln(w, "digraph domtree {")
+	for i, b := range f.Blocks {
+		v := b.dom
+		fmt.Fprintf(w, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.Pre, b, v.Pre, v.Post)
+		// TODO(adonovan): improve appearance of edges
+		// belonging to both dominator tree and CFG.
+
+		// Dominator tree edge.
+		if i != 0 {
+			fmt.Fprintf(w, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.Idom.Pre, v.Pre)
+		}
+		// CFG edges.
+		for _, pred := range b.Preds {
+			fmt.Fprintf(w, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.Pre, v.Pre)
+		}
+	}
+	fmt.Fprintln(w, "}")
+}
diff --git a/ssa/emit.go b/ssa/emit.go
new file mode 100644
index 0000000..7b15288
--- /dev/null
+++ b/ssa/emit.go
@@ -0,0 +1,301 @@
+package ssa
+
+// Helpers for emitting SSA instructions.
+
+import (
+	"go/token"
+
+	"code.google.com/p/go.tools/go/types"
+)
+
+// emitNew emits to f a new (heap Alloc) instruction allocating an
+// object of type typ.  pos is the optional source location.
+//
+func emitNew(f *Function, typ types.Type, pos token.Pos) Value {
+	return f.emit(&Alloc{
+		Type_: pointer(typ),
+		Heap:  true,
+		Pos:   pos,
+	})
+}
+
+// emitLoad emits to f an instruction to load the address addr into a
+// new temporary, and returns the value so defined.
+//
+func emitLoad(f *Function, addr Value) Value {
+	v := &UnOp{Op: token.MUL, X: addr}
+	v.setType(indirectType(addr.Type()))
+	return f.emit(v)
+}
+
+// emitArith emits to f code to compute the binary operation op(x, y)
+// where op is an eager shift, logical or arithmetic operation.
+// (Use emitCompare() for comparisons and Builder.logicalBinop() for
+// non-eager operations.)
+//
+func emitArith(f *Function, op token.Token, x, y Value, t types.Type) Value {
+	switch op {
+	case token.SHL, token.SHR:
+		x = emitConv(f, x, t)
+		y = emitConv(f, y, types.Typ[types.Uint64])
+
+	case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
+		x = emitConv(f, x, t)
+		y = emitConv(f, y, t)
+
+	default:
+		panic("illegal op in emitArith: " + op.String())
+
+	}
+	v := &BinOp{
+		Op: op,
+		X:  x,
+		Y:  y,
+	}
+	v.setType(t)
+	return f.emit(v)
+}
+
+// emitCompare emits to f code compute the boolean result of
+// comparison comparison 'x op y'.
+//
+func emitCompare(f *Function, op token.Token, x, y Value) Value {
+	xt := underlyingType(x.Type())
+	yt := underlyingType(y.Type())
+
+	// Special case to optimise a tagless SwitchStmt so that
+	// these are equivalent
+	//   switch { case e: ...}
+	//   switch true { case e: ... }
+	//   if e==true { ... }
+	// even in the case when e's type is an interface.
+	// TODO(adonovan): opt: generalise to x==true, false!=y, etc.
+	if x == vTrue && op == token.EQL {
+		if yt, ok := yt.(*types.Basic); ok && yt.Info&types.IsBoolean != 0 {
+			return y
+		}
+	}
+
+	if types.IsIdentical(xt, yt) {
+		// no conversion necessary
+	} else if _, ok := xt.(*types.Interface); ok {
+		y = emitConv(f, y, x.Type())
+	} else if _, ok := yt.(*types.Interface); ok {
+		x = emitConv(f, x, y.Type())
+	} else if _, ok := x.(*Literal); ok {
+		x = emitConv(f, x, y.Type())
+	} else if _, ok := y.(*Literal); ok {
+		y = emitConv(f, y, x.Type())
+	} else {
+		// other cases, e.g. channels.  No-op.
+	}
+
+	v := &BinOp{
+		Op: op,
+		X:  x,
+		Y:  y,
+	}
+	v.setType(tBool)
+	return f.emit(v)
+}
+
+// emitConv emits to f code to convert Value val to exactly type typ,
+// and returns the converted value.  Implicit conversions are implied
+// by language assignability rules in the following operations:
+//
+// - from rvalue type to lvalue type in assignments.
+// - from actual- to formal-parameter types in function calls.
+// - from return value type to result type in return statements.
+// - population of struct fields, array and slice elements, and map
+//   keys and values within compoisite literals
+// - from index value to index type in indexing expressions.
+// - for both arguments of comparisons.
+// - from value type to channel type in send expressions.
+//
+func emitConv(f *Function, val Value, typ types.Type) Value {
+	// fmt.Printf("emitConv %s -> %s, %T", val.Type(), typ, val) // debugging
+
+	// Identical types?  Conversion is a no-op.
+	if types.IsIdentical(val.Type(), typ) {
+		return val
+	}
+
+	ut_dst := underlyingType(typ)
+	ut_src := underlyingType(val.Type())
+
+	// Identical underlying types?  Conversion is a name change.
+	if types.IsIdentical(ut_dst, ut_src) {
+		// TODO(adonovan): make this use a distinct
+		// instruction, ChangeType.  This instruction must
+		// also cover the cases of channel type restrictions and
+		// conversions between pointers to identical base
+		// types.
+		c := &Conv{X: val}
+		c.setType(typ)
+		return f.emit(c)
+	}
+
+	// Conversion to, or construction of a value of, an interface type?
+	if _, ok := ut_dst.(*types.Interface); ok {
+
+		// Assignment from one interface type to another?
+		if _, ok := ut_src.(*types.Interface); ok {
+			return emitTypeAssert(f, val, typ)
+		}
+
+		// Untyped nil literal?  Return interface-typed nil literal.
+		if ut_src == tUntypedNil {
+			return nilLiteral(typ)
+		}
+
+		// Convert (non-nil) "untyped" literals to their default type.
+		// TODO(gri): expose types.isUntyped().
+		if t, ok := ut_src.(*types.Basic); ok && t.Info&types.IsUntyped != 0 {
+			val = emitConv(f, val, DefaultType(ut_src))
+		}
+
+		mi := &MakeInterface{
+			X:       val,
+			Methods: f.Prog.MethodSet(val.Type()),
+		}
+		mi.setType(typ)
+		return f.emit(mi)
+	}
+
+	// Conversion of a literal to a non-interface type results in
+	// a new literal of the destination type and (initially) the
+	// same abstract value.  We don't compute the representation
+	// change yet; this defers the point at which the number of
+	// possible representations explodes.
+	if l, ok := val.(*Literal); ok {
+		return newLiteral(l.Value, typ)
+	}
+
+	// A representation-changing conversion.
+	c := &Conv{X: val}
+	c.setType(typ)
+	return f.emit(c)
+}
+
+// emitStore emits to f an instruction to store value val at location
+// addr, applying implicit conversions as required by assignabilty rules.
+//
+func emitStore(f *Function, addr, val Value) {
+	f.emit(&Store{
+		Addr: addr,
+		Val:  emitConv(f, val, indirectType(addr.Type())),
+	})
+}
+
+// emitJump emits to f a jump to target, and updates the control-flow graph.
+// Postcondition: f.currentBlock is nil.
+//
+func emitJump(f *Function, target *BasicBlock) {
+	b := f.currentBlock
+	b.emit(new(Jump))
+	addEdge(b, target)
+	f.currentBlock = nil
+}
+
+// emitIf emits to f a conditional jump to tblock or fblock based on
+// cond, and updates the control-flow graph.
+// Postcondition: f.currentBlock is nil.
+//
+func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) {
+	b := f.currentBlock
+	b.emit(&If{Cond: cond})
+	addEdge(b, tblock)
+	addEdge(b, fblock)
+	f.currentBlock = nil
+}
+
+// emitExtract emits to f an instruction to extract the index'th
+// component of tuple, ascribing it type typ.  It returns the
+// extracted value.
+//
+func emitExtract(f *Function, tuple Value, index int, typ types.Type) Value {
+	e := &Extract{Tuple: tuple, Index: index}
+	// In all cases but one (tSelect's recv), typ is redundant w.r.t.
+	// tuple.Type().(*types.Result).Values[index].Type.
+	e.setType(typ)
+	return f.emit(e)
+}
+
+// emitTypeAssert emits to f a type assertion value := x.(t) and
+// returns the value.  x.Type() must be an interface.
+//
+func emitTypeAssert(f *Function, x Value, t types.Type) Value {
+	// Simplify infallible assertions.
+	txi := underlyingType(x.Type()).(*types.Interface)
+	if ti, ok := underlyingType(t).(*types.Interface); ok {
+		if types.IsIdentical(ti, txi) {
+			return x
+		}
+		if isSuperinterface(ti, txi) {
+			c := &ChangeInterface{X: x}
+			c.setType(t)
+			return f.emit(c)
+		}
+	}
+
+	a := &TypeAssert{X: x, AssertedType: t}
+	a.setType(t)
+	return f.emit(a)
+}
+
+// emitTypeTest emits to f a type test value,ok := x.(t) and returns
+// a (value, ok) tuple.  x.Type() must be an interface.
+//
+func emitTypeTest(f *Function, x Value, t types.Type) Value {
+	// TODO(adonovan): opt: simplify infallible tests as per
+	// emitTypeAssert, and return (x, vTrue).
+	// (Requires that exprN returns a slice of extracted values,
+	// not a single Value of type *types.Results.)
+	a := &TypeAssert{
+		X:            x,
+		AssertedType: t,
+		CommaOk:      true,
+	}
+	a.setType(&types.Result{Values: []*types.Var{
+		{Name: "value", Type: t},
+		varOk,
+	}})
+	return f.emit(a)
+}
+
+// emitTailCall emits to f a function call in tail position,
+// passing on all but the first formal parameter to f as actual
+// values in the call.  Intended for delegating bridge methods.
+// Precondition: f does/will not use deferred procedure calls.
+// Postcondition: f.currentBlock is nil.
+//
+func emitTailCall(f *Function, call *Call) {
+	for _, arg := range f.Params[1:] {
+		call.Call.Args = append(call.Call.Args, arg)
+	}
+	nr := len(f.Signature.Results)
+	if nr == 1 {
+		call.Type_ = f.Signature.Results[0].Type
+	} else {
+		call.Type_ = &types.Result{Values: f.Signature.Results}
+	}
+	tuple := f.emit(call)
+	var ret Ret
+	switch nr {
+	case 0:
+		// no-op
+	case 1:
+		ret.Results = []Value{tuple}
+	default:
+		for i, o := range call.Type().(*types.Result).Values {
+			v := emitExtract(f, tuple, i, o.Type)
+			// TODO(adonovan): in principle, this is required:
+			//   v = emitConv(f, o.Type, f.Signature.Results[i].Type)
+			// but in practice emitTailCall is only used when
+			// the types exactly match.
+			ret.Results = append(ret.Results, v)
+		}
+	}
+	f.emit(&ret)
+	f.currentBlock = nil
+}
diff --git a/ssa/func.go b/ssa/func.go
new file mode 100644
index 0000000..4b93cf7
--- /dev/null
+++ b/ssa/func.go
@@ -0,0 +1,608 @@
+package ssa
+
+// This file implements the Function and BasicBlock types.
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"io"
+	"os"
+
+	"code.google.com/p/go.tools/go/types"
+)
+
+// addEdge adds a control-flow graph edge from from to to.
+func addEdge(from, to *BasicBlock) {
+	from.Succs = append(from.Succs, to)
+	to.Preds = append(to.Preds, from)
+}
+
+// String returns a human-readable label of this block.
+// It is not guaranteed unique within the function.
+//
+func (b *BasicBlock) String() string {
+	return fmt.Sprintf("%d.%s", b.Index, b.Comment)
+}
+
+// emit appends an instruction to the current basic block.
+// If the instruction defines a Value, it is returned.
+//
+func (b *BasicBlock) emit(i Instruction) Value {
+	i.SetBlock(b)
+	b.Instrs = append(b.Instrs, i)
+	v, _ := i.(Value)
+	return v
+}
+
+// predIndex returns the i such that b.Preds[i] == c or panics if
+// there is none.
+func (b *BasicBlock) predIndex(c *BasicBlock) int {
+	for i, pred := range b.Preds {
+		if pred == c {
+			return i
+		}
+	}
+	panic(fmt.Sprintf("no edge %s -> %s", c, b))
+}
+
+// hasPhi returns true if b.Instrs contains φ-nodes.
+func (b *BasicBlock) hasPhi() bool {
+	_, ok := b.Instrs[0].(*Phi)
+	return ok
+}
+
+// phis returns the prefix of b.Instrs containing all the block's φ-nodes.
+func (b *BasicBlock) phis() []Instruction {
+	for i, instr := range b.Instrs {
+		if _, ok := instr.(*Phi); !ok {
+			return b.Instrs[:i]
+		}
+	}
+	return nil // unreachable in well-formed blocks
+}
+
+// replacePred replaces all occurrences of p in b's predecessor list with q.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) replacePred(p, q *BasicBlock) {
+	for i, pred := range b.Preds {
+		if pred == p {
+			b.Preds[i] = q
+		}
+	}
+}
+
+// replaceSucc replaces all occurrences of p in b's successor list with q.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
+	for i, succ := range b.Succs {
+		if succ == p {
+			b.Succs[i] = q
+		}
+	}
+}
+
+// removePred removes all occurrences of p in b's
+// predecessor list and φ-nodes.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) removePred(p *BasicBlock) {
+	phis := b.phis()
+
+	// We must preserve edge order for φ-nodes.
+	j := 0
+	for i, pred := range b.Preds {
+		if pred != p {
+			b.Preds[j] = b.Preds[i]
+			// Strike out φ-edge too.
+			for _, instr := range phis {
+				phi := instr.(*Phi)
+				phi.Edges[j] = phi.Edges[i]
+			}
+			j++
+		}
+	}
+	// Nil out b.Preds[j:] and φ-edges[j:] to aid GC.
+	for i := j; i < len(b.Preds); i++ {
+		b.Preds[i] = nil
+		for _, instr := range phis {
+			instr.(*Phi).Edges[i] = nil
+		}
+	}
+	b.Preds = b.Preds[:j]
+	for _, instr := range phis {
+		phi := instr.(*Phi)
+		phi.Edges = phi.Edges[:j]
+	}
+}
+
+// Destinations associated with unlabelled for/switch/select stmts.
+// We push/pop one of these as we enter/leave each construct and for
+// each BranchStmt we scan for the innermost target of the right type.
+//
+type targets struct {
+	tail         *targets // rest of stack
+	_break       *BasicBlock
+	_continue    *BasicBlock
+	_fallthrough *BasicBlock
+}
+
+// Destinations associated with a labelled block.
+// We populate these as labels are encountered in forward gotos or
+// labelled statements.
+//
+type lblock struct {
+	_goto     *BasicBlock
+	_break    *BasicBlock
+	_continue *BasicBlock
+}
+
+// funcSyntax holds the syntax tree for the function declaration and body.
+type funcSyntax struct {
+	recvField    *ast.FieldList
+	paramFields  *ast.FieldList
+	resultFields *ast.FieldList
+	body         *ast.BlockStmt
+}
+
+// labelledBlock returns the branch target associated with the
+// specified label, creating it if needed.
+//
+func (f *Function) labelledBlock(label *ast.Ident) *lblock {
+	lb := f.lblocks[label.Obj]
+	if lb == nil {
+		lb = &lblock{_goto: f.newBasicBlock(label.Name)}
+		if f.lblocks == nil {
+			f.lblocks = make(map[*ast.Object]*lblock)
+		}
+		f.lblocks[label.Obj] = lb
+	}
+	return lb
+}
+
+// addParam adds a (non-escaping) parameter to f.Params of the
+// specified name and type.
+//
+func (f *Function) addParam(name string, typ types.Type) *Parameter {
+	v := &Parameter{
+		Name_: name,
+		Type_: typ,
+	}
+	f.Params = append(f.Params, v)
+	return v
+}
+
+// addSpilledParam declares a parameter that is pre-spilled to the
+// stack; the function body will load/store the spilled location.
+// Subsequent lifting will eliminate spills where possible.
+//
+func (f *Function) addSpilledParam(obj types.Object) {
+	name := obj.GetName()
+	param := f.addParam(name, obj.GetType())
+	spill := &Alloc{
+		Name_: name + "~", // "~" means "spilled"
+		Type_: pointer(obj.GetType()),
+	}
+	f.objects[obj] = spill
+	f.Locals = append(f.Locals, spill)
+	f.emit(spill)
+	f.emit(&Store{Addr: spill, Val: param})
+}
+
+// startBody initializes the function prior to generating SSA code for its body.
+// Precondition: f.Type() already set.
+//
+func (f *Function) startBody() {
+	f.currentBlock = f.newBasicBlock("entry")
+	f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init
+}
+
+// createSyntacticParams populates f.Params and generates code (spills
+// and named result locals) for all the parameters declared in the
+// syntax.  In addition it populates the f.objects mapping.
+//
+// idents must be a mapping from syntactic identifiers to their
+// canonical type objects.
+//
+// Preconditions:
+// f.syntax != nil, i.e. this is a Go source function.
+// f.startBody() was called.
+// Postcondition:
+// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv ? 1 : 0)
+//
+func (f *Function) createSyntacticParams(idents map[*ast.Ident]types.Object) {
+	// Receiver (at most one inner iteration).
+	if f.syntax.recvField != nil {
+		for _, field := range f.syntax.recvField.List {
+			for _, n := range field.Names {
+				f.addSpilledParam(idents[n])
+			}
+			// Anonymous receiver?  No need to spill.
+			if field.Names == nil {
+				recvVar := f.Signature.Recv
+				f.addParam(recvVar.Name, recvVar.Type)
+			}
+		}
+	}
+
+	// Parameters.
+	if f.syntax.paramFields != nil {
+		n := len(f.Params) // 1 if has recv, 0 otherwise
+		for _, field := range f.syntax.paramFields.List {
+			for _, n := range field.Names {
+				f.addSpilledParam(idents[n])
+			}
+			// Anonymous parameter?  No need to spill.
+			if field.Names == nil {
+				paramVar := f.Signature.Params[len(f.Params)-n]
+				f.addParam(paramVar.Name, paramVar.Type)
+			}
+		}
+	}
+
+	// Named results.
+	if f.syntax.resultFields != nil {
+		for _, field := range f.syntax.resultFields.List {
+			// Implicit "var" decl of locals for named results.
+			for _, n := range field.Names {
+				f.namedResults = append(f.namedResults, f.addNamedLocal(idents[n]))
+			}
+		}
+	}
+}
+
+// numberRegisters assigns numbers to all SSA registers
+// (value-defining Instructions) in f, to aid debugging.
+// (Non-Instruction Values are named at construction.)
+// NB: named Allocs retain their existing name.
+// TODO(adonovan): when we have source position info,
+// preserve names only for source locals.
+//
+func numberRegisters(f *Function) {
+	a, v := 0, 0
+	for _, b := range f.Blocks {
+		for _, instr := range b.Instrs {
+			switch instr := instr.(type) {
+			case *Alloc:
+				// Allocs may be named at birth.
+				if instr.Name_ == "" {
+					instr.Name_ = fmt.Sprintf("a%d", a)
+					a++
+				}
+			case Value:
+				instr.(interface {
+					setNum(int)
+				}).setNum(v)
+				v++
+			}
+		}
+	}
+}
+
+// buildReferrers populates the def/use information in all non-nil
+// Value.Referrers slice.
+// Precondition: all such slices are initially empty.
+func buildReferrers(f *Function) {
+	var rands []*Value
+	for _, b := range f.Blocks {
+		for _, instr := range b.Instrs {
+			rands = instr.Operands(rands[:0]) // recycle storage
+			for _, rand := range rands {
+				if r := *rand; r != nil {
+					if ref := r.Referrers(); ref != nil {
+						*ref = append(*ref, instr)
+					}
+				}
+			}
+		}
+	}
+}
+
+// finishBody() finalizes the function after SSA code generation of its body.
+func (f *Function) finishBody() {
+	f.objects = nil
+	f.namedResults = nil
+	f.currentBlock = nil
+	f.lblocks = nil
+	f.syntax = nil
+
+	// Remove any f.Locals that are now heap-allocated.
+	j := 0
+	for _, l := range f.Locals {
+		if !l.Heap {
+			f.Locals[j] = l
+			j++
+		}
+	}
+	// Nil out f.Locals[j:] to aid GC.
+	for i := j; i < len(f.Locals); i++ {
+		f.Locals[i] = nil
+	}
+	f.Locals = f.Locals[:j]
+
+	optimizeBlocks(f)
+
+	buildReferrers(f)
+
+	if f.Prog.mode&NaiveForm == 0 {
+		// For debugging pre-state of lifting pass:
+		// numberRegisters(f)
+		// f.DumpTo(os.Stderr)
+
+		lift(f)
+	}
+
+	numberRegisters(f)
+
+	if f.Prog.mode&LogFunctions != 0 {
+		f.DumpTo(os.Stderr)
+	}
+
+	if f.Prog.mode&SanityCheckFunctions != 0 {
+		MustSanityCheck(f, nil)
+	}
+}
+
+// removeNilBlocks eliminates nils from f.Blocks and updates each
+// BasicBlock.Index.  Use this after any pass that may delete blocks.
+//
+func (f *Function) removeNilBlocks() {
+	j := 0
+	for _, b := range f.Blocks {
+		if b != nil {
+			b.Index = j
+			f.Blocks[j] = b
+			j++
+		}
+	}
+	// Nil out f.Blocks[j:] to aid GC.
+	for i := j; i < len(f.Blocks); i++ {
+		f.Blocks[i] = nil
+	}
+	f.Blocks = f.Blocks[:j]
+}
+
+// addNamedLocal creates a local variable, adds it to function f and
+// returns it.  Its name and type are taken from obj.  Subsequent
+// calls to f.lookup(obj) will return the same local.
+//
+// Precondition: f.syntax != nil (i.e. a Go source function).
+//
+func (f *Function) addNamedLocal(obj types.Object) *Alloc {
+	l := f.addLocal(obj.GetType(), obj.GetPos())
+	l.Name_ = obj.GetName()
+	f.objects[obj] = l
+	return l
+}
+
+// addLocal creates an anonymous local variable of type typ, adds it
+// to function f and returns it.  pos is the optional source location.
+//
+func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc {
+	v := &Alloc{Type_: pointer(typ), Pos: pos}
+	f.Locals = append(f.Locals, v)
+	f.emit(v)
+	return v
+}
+
+// lookup returns the address of the named variable identified by obj
+// that is local to function f or one of its enclosing functions.
+// If escaping, the reference comes from a potentially escaping pointer
+// expression and the referent must be heap-allocated.
+//
+func (f *Function) lookup(obj types.Object, escaping bool) Value {
+	if v, ok := f.objects[obj]; ok {
+		if escaping {
+			// Walk up the chain of Captures.
+			x := v
+			for {
+				if c, ok := x.(*Capture); ok {
+					x = c.Outer
+				} else {
+					break
+				}
+			}
+			// By construction, all captures are ultimately Allocs in the
+			// naive SSA form.  Parameters are pre-spilled to the stack.
+			x.(*Alloc).Heap = true
+		}
+		return v // function-local var (address)
+	}
+
+	// Definition must be in an enclosing function;
+	// plumb it through intervening closures.
+	if f.Enclosing == nil {
+		panic("no Value for type.Object " + obj.GetName())
+	}
+	v := &Capture{Outer: f.Enclosing.lookup(obj, true)} // escaping
+	f.objects[obj] = v
+	f.FreeVars = append(f.FreeVars, v)
+	return v
+}
+
+// emit emits the specified instruction to function f, updating the
+// control-flow graph if required.
+//
+func (f *Function) emit(instr Instruction) Value {
+	return f.currentBlock.emit(instr)
+}
+
+// FullName returns the full name of this function, qualified by
+// package name, receiver type, etc.
+//
+// The specific formatting rules are not guaranteed and may change.
+//
+// Examples:
+//      "math.IsNaN"                // a package-level function
+//      "IsNaN"                     // intra-package reference to same
+//      "(*sync.WaitGroup).Add"     // a declared method
+//      "(*exp/ssa.Ret).Block"      // a bridge method
+//      "(ssa.Instruction).Block"   // an interface method thunk
+//      "func@5.32"                 // an anonymous function
+//
+func (f *Function) FullName() string {
+	return f.fullName(nil)
+}
+
+// Like FullName, but if from==f.Pkg, suppress package qualification.
+func (f *Function) fullName(from *Package) string {
+	// Anonymous?
+	if f.Enclosing != nil {
+		return f.Name_
+	}
+
+	recv := f.Signature.Recv
+
+	// Synthetic?
+	if f.Pkg == nil {
+		var recvType types.Type
+		if recv != nil {
+			recvType = recv.Type // bridge method
+		} else {
+			recvType = f.Params[0].Type() // interface method thunk
+		}
+		return fmt.Sprintf("(%s).%s", recvType, f.Name_)
+	}
+
+	// Declared method?
+	if recv != nil {
+		return fmt.Sprintf("(%s).%s", recv.Type, f.Name_)
+	}
+
+	// Package-level function.
+	// Prefix with package name for cross-package references only.
+	if from != f.Pkg {
+		return fmt.Sprintf("%s.%s", f.Pkg.Types.Path, f.Name_)
+	}
+	return f.Name_
+}
+
+// writeSignature writes to w the signature sig in declaration syntax.
+// Derived from types.Signature.String().
+//
+func writeSignature(w io.Writer, name string, sig *types.Signature, params []*Parameter) {
+	io.WriteString(w, "func ")
+	if sig.Recv != nil {
+		io.WriteString(w, "(")
+		if n := params[0].Name(); n != "" {
+			io.WriteString(w, n)
+			io.WriteString(w, " ")
+		}
+		io.WriteString(w, params[0].Type().String())
+		io.WriteString(w, ") ")
+		params = params[1:]
+	}
+	io.WriteString(w, name)
+	io.WriteString(w, "(")
+	for i, v := range params {
+		if i > 0 {
+			io.WriteString(w, ", ")
+		}
+		io.WriteString(w, v.Name())
+		io.WriteString(w, " ")
+		if sig.IsVariadic && i == len(params)-1 {
+			io.WriteString(w, "...")
+			io.WriteString(w, underlyingType(v.Type()).(*types.Slice).Elt.String())
+		} else {
+			io.WriteString(w, v.Type().String())
+		}
+	}
+	io.WriteString(w, ")")
+	if res := sig.Results; res != nil {
+		io.WriteString(w, " ")
+		var t types.Type
+		if len(res) == 1 && res[0].Name == "" {
+			t = res[0].Type
+		} else {
+			t = &types.Result{Values: res}
+		}
+		io.WriteString(w, t.String())
+	}
+}
+
+// DumpTo prints to w a human readable "disassembly" of the SSA code of
+// all basic blocks of function f.
+//
+func (f *Function) DumpTo(w io.Writer) {
+	fmt.Fprintf(w, "# Name: %s\n", f.FullName())
+	fmt.Fprintf(w, "# Declared at %s\n", f.Prog.Files.Position(f.Pos))
+
+	if f.Enclosing != nil {
+		fmt.Fprintf(w, "# Parent: %s\n", f.Enclosing.Name())
+	}
+
+	if f.FreeVars != nil {
+		io.WriteString(w, "# Free variables:\n")
+		for i, fv := range f.FreeVars {
+			fmt.Fprintf(w, "# % 3d:\t%s %s\n", i, fv.Name(), fv.Type())
+		}
+	}
+
+	if len(f.Locals) > 0 {
+		io.WriteString(w, "# Locals:\n")
+		for i, l := range f.Locals {
+			fmt.Fprintf(w, "# % 3d:\t%s %s\n", i, l.Name(), indirectType(l.Type()))
+		}
+	}
+
+	writeSignature(w, f.Name(), f.Signature, f.Params)
+	io.WriteString(w, ":\n")
+
+	if f.Blocks == nil {
+		io.WriteString(w, "\t(external)\n")
+	}
+
+	for _, b := range f.Blocks {
+		if b == nil {
+			// Corrupt CFG.
+			fmt.Fprintf(w, ".nil:\n")
+			continue
+		}
+		fmt.Fprintf(w, ".%s:\t\t\t\t\t\t\t       P:%d S:%d\n", b, len(b.Preds), len(b.Succs))
+		if false { // CFG debugging
+			fmt.Fprintf(w, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs)
+		}
+		for _, instr := range b.Instrs {
+			io.WriteString(w, "\t")
+			switch v := instr.(type) {
+			case Value:
+				l := 80 // for old time's sake.
+				// Left-align the instruction.
+				if name := v.Name(); name != "" {
+					n, _ := fmt.Fprintf(w, "%s = ", name)
+					l -= n
+				}
+				n, _ := io.WriteString(w, instr.String())
+				l -= n
+				// Right-align the type.
+				if t := v.Type(); t != nil {
+					fmt.Fprintf(w, " %*s", l-10, t)
+				}
+			case nil:
+				// Be robust against bad transforms.
+				io.WriteString(w, "<deleted>")
+			default:
+				io.WriteString(w, instr.String())
+			}
+			io.WriteString(w, "\n")
+		}
+	}
+	fmt.Fprintf(w, "\n")
+}
+
+// newBasicBlock adds to f a new basic block and returns it.  It does
+// not automatically become the current block for subsequent calls to emit.
+// comment is an optional string for more readable debugging output.
+//
+func (f *Function) newBasicBlock(comment string) *BasicBlock {
+	b := &BasicBlock{
+		Index:   len(f.Blocks),
+		Comment: comment,
+		Func:    f,
+	}
+	b.Succs = b.succs2[:0]
+	f.Blocks = append(f.Blocks, b)
+	return b
+}
diff --git a/ssa/importer.go b/ssa/importer.go
new file mode 100644
index 0000000..d6b0106
--- /dev/null
+++ b/ssa/importer.go
@@ -0,0 +1,155 @@
+package ssa
+
+// This file defines an implementation of the types.Importer interface
+// (func) that loads the transitive closure of dependencies of a
+// "main" package.
+
+import (
+	"errors"
+	"go/ast"
+	"go/build"
+	"go/parser"
+	"go/token"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"code.google.com/p/go.tools/go/types"
+)
+
+// Prototype of a function that locates, reads and parses a set of
+// source files given an import path.
+//
+// fset is the fileset to which the ASTs should be added.
+// path is the imported path, e.g. "sync/atomic".
+//
+// On success, the function returns files, the set of ASTs produced,
+// or the first error encountered.
+//
+type SourceLoader func(fset *token.FileSet, path string) (files []*ast.File, err error)
+
+// doImport loads the typechecker package identified by path
+// Implements the types.Importer prototype.
+//
+func (b *Builder) doImport(imports map[string]*types.Package, path string) (typkg *types.Package, err error) {
+	// Package unsafe is handled specially, and has no ssa.Package.
+	if path == "unsafe" {
+		return types.Unsafe, nil
+	}
+
+	if pkg := b.Prog.Packages[path]; pkg != nil {
+		typkg = pkg.Types
+		imports[path] = typkg
+		return // positive cache hit
+	}
+
+	if err = b.importErrs[path]; err != nil {
+		return // negative cache hit
+	}
+	var files []*ast.File
+	var info *TypeInfo
+	if b.Context.Mode&UseGCImporter != 0 {
+		typkg, err = types.GcImport(imports, path)
+	} else {
+		files, err = b.Context.Loader(b.Prog.Files, path)
+		if err == nil {
+			typkg, info, err = b.typecheck(files)
+		}
+	}
+	if err != nil {
+		// Cache failure
+		b.importErrs[path] = err
+		return nil, err
+	}
+
+	// Cache success
+	imports[path] = typkg                                                 // cache for just this package.
+	b.Prog.Packages[path] = b.createPackageImpl(typkg, path, files, info) // cache across all packages
+
+	return typkg, nil
+}
+
+// GorootLoader is an implementation of the SourceLoader function
+// prototype that loads and parses Go source files from the package
+// directory beneath $GOROOT/src/pkg.
+//
+// TODO(adonovan): get rsc and adg (go/build owners) to review this.
+// TODO(adonovan): permit clients to specify a non-default go/build.Context.
+//
+func GorootLoader(fset *token.FileSet, path string) (files []*ast.File, err error) {
+	// TODO(adonovan): fix: Do we need cwd? Shouldn't ImportDir(path) / $GOROOT suffice?
+	srcDir, err := os.Getwd()
+	if err != nil {
+		return // serious misconfiguration
+	}
+	bp, err := build.Import(path, srcDir, 0)
+	if err != nil {
+		return // import failed
+	}
+	files, err = ParseFiles(fset, bp.Dir, bp.GoFiles...)
+	if err != nil {
+		return nil, err
+	}
+	return
+}
+
+// ParseFiles parses the Go source files files within directory dir
+// and returns their ASTs, or the first parse error if any.
+//
+// This utility function is provided to facilitate implementing a
+// SourceLoader.
+//
+func ParseFiles(fset *token.FileSet, dir string, files ...string) (parsed []*ast.File, err error) {
+	for _, file := range files {
+		var f *ast.File
+		if !filepath.IsAbs(file) {
+			file = filepath.Join(dir, file)
+		}
+		f, err = parser.ParseFile(fset, file, nil, parser.DeclarationErrors)
+		if err != nil {
+			return // parsing failed
+		}
+		parsed = append(parsed, f)
+	}
+	return
+}
+
+// CreatePackageFromArgs builds an initial Package from a list of
+// command-line arguments.
+// If args is a list of *.go files, they are parsed and type-checked.
+// If args is a Go package import path, that package is imported.
+// rest is the suffix of args that were not consumed.
+//
+// This utility is provided to facilitate construction of command-line
+// tools with a consistent user interface.
+//
+func CreatePackageFromArgs(builder *Builder, args []string) (pkg *Package, rest []string, err error) {
+	var pkgname string
+	var files []*ast.File
+
+	switch {
+	case len(args) == 0:
+		err = errors.New("No *.go source files nor package name was specified.")
+
+	case strings.HasSuffix(args[0], ".go"):
+		// % tool a.go b.go ...
+		// Leading consecutive *.go arguments constitute main package.
+		pkgname = "main"
+		i := 1
+		for ; i < len(args) && strings.HasSuffix(args[i], ".go"); i++ {
+		}
+		files, err = ParseFiles(builder.Prog.Files, ".", args[:i]...)
+		rest = args[i:]
+
+	default:
+		// % tool my/package ...
+		// First argument is import path of main package.
+		pkgname = args[0]
+		rest = args[1:]
+		files, err = builder.Context.Loader(builder.Prog.Files, pkgname)
+	}
+	if err == nil {
+		pkg, err = builder.CreatePackage(pkgname, files)
+	}
+	return
+}
diff --git a/ssa/lift.go b/ssa/lift.go
new file mode 100644
index 0000000..62f9993
--- /dev/null
+++ b/ssa/lift.go
@@ -0,0 +1,513 @@
+package ssa
+
+// This file defines the lifting pass which tries to "lift" Alloc
+// cells (new/local variables) into SSA registers, replacing loads
+// with the dominating stored value, eliminating loads and stores, and
+// inserting φ-nodes as needed.
+
+// Cited papers and resources:
+//
+// Ron Cytron et al. 1991. Efficiently computing SSA form...
+// http://doi.acm.org/10.1145/115372.115320
+//
+// Cooper, Harvey, Kennedy.  2001.  A Simple, Fast Dominance Algorithm.
+// Software Practice and Experience 2001, 4:1-10.
+// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf
+//
+// Daniel Berlin, llvmdev mailing list, 2012.
+// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html
+// (Be sure to expand the whole thread.)
+
+// TODO(adonovan): opt: there are many optimizations worth evaluating, and
+// the conventional wisdom for SSA construction is that a simple
+// algorithm well engineered often beats those of better asymptotic
+// complexity on all but the most egregious inputs.
+//
+// Danny Berlin suggests that the Cooper et al. algorithm for
+// computing the dominance frontier is superior to Cytron et al.
+// Furthermore he recommends that rather than computing the DF for the
+// whole function then renaming all alloc cells, it may be cheaper to
+// compute the DF for each alloc cell separately and throw it away.
+//
+// Consider exploiting liveness information to avoid creating dead
+// φ-nodes which we then immediately remove.
+//
+// Integrate lifting with scalar replacement of aggregates (SRA) since
+// the two are synergistic.
+//
+// Also see many other "TODO: opt" suggestions in the code.
+
+import (
+	"fmt"
+	"go/token"
+	"math/big"
+	"os"
+
+	"code.google.com/p/go.tools/go/types"
+)
+
+// If true, perform sanity checking and show diagnostic information at
+// each step of lifting.  Very verbose.
+const debugLifting = false
+
+// domFrontier maps each block to the set of blocks in its dominance
+// frontier.  The outer slice is conceptually a map keyed by
+// Block.Index.  The inner slice is conceptually a set, possibly
+// containing duplicates.
+//
+// TODO(adonovan): opt: measure impact of dups; consider a packed bit
+// representation, e.g. big.Int, and bitwise parallel operations for
+// the union step in the Children loop.
+//
+// domFrontier's methods mutate the slice's elements but not its
+// length, so their receivers needn't be pointers.
+//
+type domFrontier [][]*BasicBlock
+
+func (df domFrontier) add(u, v *domNode) {
+	p := &df[u.Block.Index]
+	*p = append(*p, v.Block)
+}
+
+// build builds the dominance frontier df for the dominator (sub)tree
+// rooted at u, using the Cytron et al. algorithm.
+//
+// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA
+// by pruning the entire IDF computation, rather than merely pruning
+// the DF -> IDF step.
+func (df domFrontier) build(u *domNode) {
+	// Encounter each node u in postorder of dom tree.
+	for _, child := range u.Children {
+		df.build(child)
+	}
+	for _, vb := range u.Block.Succs {
+		if v := vb.dom; v.Idom != u {
+			df.add(u, v)
+		}
+	}
+	for _, w := range u.Children {
+		for _, vb := range df[w.Block.Index] {
+			// TODO(adonovan): opt: use word-parallel bitwise union.
+			if v := vb.dom; v.Idom != u {
+				df.add(u, v)
+			}
+		}
+	}
+}
+
+func buildDomFrontier(fn *Function) domFrontier {
+	df := make(domFrontier, len(fn.Blocks))
+	df.build(fn.Blocks[0].dom)
+	return df
+}
+
+// lift attempts to replace local and new Allocs accessed only with
+// load/store by SSA registers, inserting φ-nodes where necessary.
+// The result is a program in classical pruned SSA form.
+//
+// Preconditions:
+// - fn has no dead blocks (blockopt has run).
+// - Def/use info (Operands and Referrers) is up-to-date.
+//
+func lift(fn *Function) {
+	// TODO(adonovan): opt: lots of little optimizations may be
+	// worthwhile here, especially if they cause us to avoid
+	// buildDomTree.  For example:
+	//
+	// - Alloc never loaded?  Eliminate.
+	// - Alloc never stored?  Replace all loads with a zero literal.
+	// - Alloc stored once?  Replace loads with dominating store;
+	//   don't forget that an Alloc is itself an effective store
+	//   of zero.
+	// - Alloc used only within a single block?
+	//   Use degenerate algorithm avoiding φ-nodes.
+	// - Consider synergy with scalar replacement of aggregates (SRA).
+	//   e.g. *(&x.f) where x is an Alloc.
+	//   Perhaps we'd get better results if we generated this as x.f
+	//   i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)).
+	//   Unclear.
+	//
+	// But we will start with the simplest correct code to make
+	// life easier for reviewers.
+
+	buildDomTree(fn)
+
+	df := buildDomFrontier(fn)
+
+	if debugLifting {
+		title := false
+		for i, blocks := range df {
+			if blocks != nil {
+				if !title {
+					fmt.Fprintln(os.Stderr, "Dominance frontier:")
+					title = true
+				}
+				fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks)
+			}
+		}
+	}
+
+	newPhis := make(newPhiMap)
+
+	// During this pass we will replace some BasicBlock.Instrs
+	// (allocs, loads and stores) with nil, keeping a count in
+	// BasicBlock.gaps.  At the end we will reset Instrs to the
+	// concatenation of all non-dead newPhis and non-nil Instrs
+	// for the block, reusing the original array if space permits.
+
+	// While we're here, we also eliminate 'rundefers'
+	// instructions in functions that contain no 'defer'
+	// instructions.
+	usesDefer := false
+
+	// Determine which allocs we can lift and number them densely.
+	// The renaming phase uses this numbering for compact maps.
+	numAllocs := 0
+	for _, b := range fn.Blocks {
+		b.gaps = 0
+		b.rundefers = 0
+		for i, instr := range b.Instrs {
+			switch instr := instr.(type) {
+			case *Alloc:
+				if liftAlloc(df, instr, newPhis) {
+					instr.index = numAllocs
+					numAllocs++
+					// Delete the alloc.
+					b.Instrs[i] = nil
+					b.gaps++
+				} else {
+					instr.index = -1
+				}
+			case *Defer:
+				usesDefer = true
+			case *RunDefers:
+				b.rundefers++
+			}
+		}
+	}
+
+	// renaming maps an alloc (keyed by index) to its replacement
+	// value.  Initially the renaming contains nil, signifying the
+	// zero literal of the appropriate type; we construct the
+	// Literal lazily at most once on each path through the domtree.
+	// TODO(adonovan): opt: cache per-function not per subtree.
+	renaming := make([]Value, numAllocs)
+
+	// Renaming.
+	rename(fn.Blocks[0], renaming, newPhis)
+
+	// Eliminate dead new phis, then prepend the live ones to each block.
+	for _, b := range fn.Blocks {
+
+		// Compress the newPhis slice to eliminate unused phis.
+		// TODO(adonovan): opt: compute liveness to avoid
+		// placing phis in blocks for which the alloc cell is
+		// not live.
+		nps := newPhis[b]
+		j := 0
+		for _, np := range nps {
+			if len(*np.phi.Referrers()) == 0 {
+				continue // unreferenced phi
+			}
+			nps[j] = np
+			j++
+		}
+		nps = nps[:j]
+
+		rundefersToKill := b.rundefers
+		if usesDefer {
+			rundefersToKill = 0
+		}
+
+		if j+b.gaps+rundefersToKill == 0 {
+			continue // fast path: no new phis or gaps
+		}
+
+		// Compact nps + non-nil Instrs into a new slice.
+		// TODO(adonovan): opt: compact in situ if there is
+		// sufficient space or slack in the slice.
+		dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill)
+		for i, np := range nps {
+			dst[i] = np.phi
+		}
+		for _, instr := range b.Instrs {
+			if instr == nil {
+				continue
+			}
+			if !usesDefer {
+				if _, ok := instr.(*RunDefers); ok {
+					continue
+				}
+			}
+			dst[j] = instr
+			j++
+		}
+		for i, np := range nps {
+			dst[i] = np.phi
+		}
+		b.Instrs = dst
+	}
+
+	// Remove any fn.Locals that were lifted.
+	j := 0
+	for _, l := range fn.Locals {
+		if l.index == -1 {
+			fn.Locals[j] = l
+			j++
+		}
+	}
+	// Nil out fn.Locals[j:] to aid GC.
+	for i := j; i < len(fn.Locals); i++ {
+		fn.Locals[i] = nil
+	}
+	fn.Locals = fn.Locals[:j]
+}
+
+type blockSet struct{ big.Int } // (inherit methods from Int)
+
+// add adds b to the set and returns true if the set changed.
+func (s *blockSet) add(b *BasicBlock) bool {
+	i := b.Index
+	if s.Bit(i) != 0 {
+		return false
+	}
+	s.SetBit(&s.Int, i, 1)
+	return true
+}
+
+// take removes an arbitrary element from a set s and
+// returns its index, or returns -1 if empty.
+func (s *blockSet) take() int {
+	l := s.BitLen()
+	for i := 0; i < l; i++ {
+		if s.Bit(i) == 1 {
+			s.SetBit(&s.Int, i, 0)
+			return i
+		}
+	}
+	return -1
+}
+
+// newPhi is a pair of a newly introduced φ-node and the lifted Alloc
+// it replaces.
+type newPhi struct {
+	phi   *Phi
+	alloc *Alloc
+}
+
+// newPhiMap records for each basic block, the set of newPhis that
+// must be prepended to the block.
+type newPhiMap map[*BasicBlock][]newPhi
+
+// liftAlloc determines whether alloc can be lifted into registers,
+// and if so, it populates newPhis with all the φ-nodes it may require
+// and returns true.
+//
+func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap) bool {
+	// Don't lift aggregates into registers.
+	// We'll need a separate SRA pass for that.
+	switch underlyingType(indirectType(alloc.Type())).(type) {
+	case *types.Array, *types.Struct:
+		return false
+	}
+
+	// Compute defblocks, the set of blocks containing a
+	// definition of the alloc cell.
+	var defblocks blockSet
+	for _, instr := range *alloc.Referrers() {
+		// Bail out if we discover the alloc is not liftable;
+		// the only operations permitted to use the alloc are
+		// loads/stores into the cell.
+		switch instr := instr.(type) {
+		case *Store:
+			if instr.Val == alloc {
+				return false // address used as value
+			}
+			if instr.Addr != alloc {
+				panic("Alloc.Referrers is inconsistent")
+			}
+			defblocks.add(instr.Block())
+		case *UnOp:
+			if instr.Op != token.MUL {
+				return false // not a load
+			}
+			if instr.X != alloc {
+				panic("Alloc.Referrers is inconsistent")
+			}
+		default:
+			return false // some other instruction
+		}
+	}
+	// The Alloc itself counts as a (zero) definition of the cell.
+	defblocks.add(alloc.Block())
+
+	if debugLifting {
+		fmt.Fprintln(os.Stderr, "liftAlloc: lifting ", alloc, alloc.Name())
+	}
+
+	fn := alloc.Block().Func
+
+	// Φ-insertion.
+	//
+	// What follows is the body of the main loop of the insert-φ
+	// function described by Cytron et al, but instead of using
+	// counter tricks, we just reset the 'hasAlready' and 'work'
+	// sets each iteration.  These are bitmaps so it's pretty cheap.
+	//
+	// TODO(adonovan): opt: recycle slice storage for W,
+	// hasAlready, defBlocks across liftAlloc calls.
+	var hasAlready blockSet
+
+	// Initialize W and work to defblocks.
+	var work blockSet = defblocks // blocks seen
+	var W blockSet                // blocks to do
+	W.Set(&defblocks.Int)
+
+	// Traverse iterated dominance frontier, inserting φ-nodes.
+	for i := W.take(); i != -1; i = W.take() {
+		u := fn.Blocks[i]
+		for _, v := range df[u.Index] {
+			if hasAlready.add(v) {
+				// Create φ-node.
+				// It will be prepended to v.Instrs later, if needed.
+				phi := &Phi{
+					Edges:   make([]Value, len(v.Preds)),
+					Comment: alloc.Name(),
+				}
+				phi.setType(indirectType(alloc.Type()))
+				phi.Block_ = v
+				if debugLifting {
+					fmt.Fprintf(os.Stderr, "place %s = %s at block %s\n", phi.Name(), phi, v)
+				}
+				newPhis[v] = append(newPhis[v], newPhi{phi, alloc})
+
+				if work.add(v) {
+					W.add(v)
+				}
+			}
+		}
+	}
+
+	return true
+}
+
+// replaceAll replaces all intraprocedural uses of x with y,
+// updating x.Referrers and y.Referrers.
+// Precondition: x.Referrers() != nil, i.e. x must be local to some function.
+//
+func replaceAll(x, y Value) {
+	var rands []*Value
+	pxrefs := x.Referrers()
+	pyrefs := y.Referrers()
+	for _, instr := range *pxrefs {
+		rands = instr.Operands(rands[:0]) // recycle storage
+		for _, rand := range rands {
+			if *rand != nil {
+				if *rand == x {
+					*rand = y
+				}
+			}
+		}
+		if pyrefs != nil {
+			*pyrefs = append(*pyrefs, instr) // dups ok
+		}
+	}
+	*pxrefs = nil // x is now unreferenced
+}
+
+// renamed returns the value to which alloc is being renamed,
+// constructing it lazily if it's the implicit zero initialization.
+//
+func renamed(renaming []Value, alloc *Alloc) Value {
+	v := renaming[alloc.index]
+	if v == nil {
+		v = zeroLiteral(indirectType(alloc.Type()))
+		renaming[alloc.index] = v
+	}
+	return v
+}
+
+// rename implements the (Cytron et al) SSA renaming algorithm, a
+// preorder traversal of the dominator tree replacing all loads of
+// Alloc cells with the value stored to that cell by the dominating
+// store instruction.  For lifting, we need only consider loads,
+// stores and φ-nodes.
+//
+// renaming is a map from *Alloc (keyed by index number) to its
+// dominating stored value; newPhis[x] is the set of new φ-nodes to be
+// prepended to block x.
+//
+func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) {
+	// Each φ-node becomes the new name for its associated Alloc.
+	for _, np := range newPhis[u] {
+		phi := np.phi
+		alloc := np.alloc
+		renaming[alloc.index] = phi
+	}
+
+	// Rename loads and stores of allocs.
+	for i, instr := range u.Instrs {
+		_ = i
+		switch instr := instr.(type) {
+		case *Store:
+			if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index != -1 { // store to Alloc cell
+				// Delete the Store.
+				u.Instrs[i] = nil
+				u.gaps++
+				// Replace dominated loads by the
+				// stored value.
+				renaming[alloc.index] = instr.Val
+				if debugLifting {
+					fmt.Fprintln(os.Stderr, "Kill store ", instr, "; current value is now ", instr.Val.Name())
+				}
+			}
+		case *UnOp:
+			if instr.Op == token.MUL {
+				if alloc, ok := instr.X.(*Alloc); ok && alloc.index != -1 { // load of Alloc cell
+					newval := renamed(renaming, alloc)
+					if debugLifting {
+						fmt.Fprintln(os.Stderr, "Replace refs to load", instr.Name(), "=", instr, "with", newval.Name())
+					}
+					// Replace all references to
+					// the loaded value by the
+					// dominating stored value.
+					replaceAll(instr, newval)
+					// Delete the Load.
+					u.Instrs[i] = nil
+					u.gaps++
+				}
+			}
+		}
+	}
+
+	// For each φ-node in a CFG successor, rename the edge.
+	for _, v := range u.Succs {
+		phis := newPhis[v]
+		if len(phis) == 0 {
+			continue
+		}
+		i := v.predIndex(u)
+		for _, np := range phis {
+			phi := np.phi
+			alloc := np.alloc
+			newval := renamed(renaming, alloc)
+			if debugLifting {
+				fmt.Fprintf(os.Stderr, "setphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n \n",
+					phi.Name(), u, v, i, alloc.Name(), newval.Name())
+			}
+			phi.Edges[i] = newval
+			if prefs := newval.Referrers(); prefs != nil {
+				*prefs = append(*prefs, phi)
+			}
+		}
+	}
+
+	// Continue depth-first recursion over domtree, pushing a
+	// fresh copy of the renaming map for each subtree.
+	for _, v := range u.dom.Children {
+		// TODO(adonovan): opt: avoid copy on final iteration; use destructive update.
+		r := make([]Value, len(renaming))
+		copy(r, renaming)
+		rename(v.Block, r, newPhis)
+	}
+}
diff --git a/ssa/literal.go b/ssa/literal.go
new file mode 100644
index 0000000..d34d43d
--- /dev/null
+++ b/ssa/literal.go
@@ -0,0 +1,139 @@
+package ssa
+
+// This file defines the Literal SSA value type.
+
+import (
+	"fmt"
+	"strconv"
+
+	"code.google.com/p/go.tools/go/exact"
+	"code.google.com/p/go.tools/go/types"
+)
+
+// newLiteral returns a new literal of the specified value and type.
+// val must be valid according to the specification of Literal.Value.
+//
+func newLiteral(val exact.Value, typ types.Type) *Literal {
+	// This constructor exists to provide a single place to
+	// insert logging/assertions during debugging.
+	return &Literal{typ, val}
+}
+
+// intLiteral returns an untyped integer literal that evaluates to i.
+func intLiteral(i int64) *Literal {
+	return newLiteral(exact.MakeInt64(i), types.Typ[types.UntypedInt])
+}
+
+// nilLiteral returns a nil literal of the specified type, which may
+// be any reference type, including interfaces.
+//
+func nilLiteral(typ types.Type) *Literal {
+	return newLiteral(exact.MakeNil(), typ)
+}
+
+// zeroLiteral returns a new "zero" literal of the specified type,
+// which must not be an array or struct type: the zero values of
+// aggregates are well-defined but cannot be represented by Literal.
+//
+func zeroLiteral(t types.Type) *Literal {
+	switch t := t.(type) {
+	case *types.Basic:
+		switch {
+		case t.Info&types.IsBoolean != 0:
+			return newLiteral(exact.MakeBool(false), t)
+		case t.Info&types.IsNumeric != 0:
+			return newLiteral(exact.MakeInt64(0), t)
+		case t.Info&types.IsString != 0:
+			return newLiteral(exact.MakeString(""), t)
+		case t.Kind == types.UnsafePointer:
+			fallthrough
+		case t.Kind == types.UntypedNil:
+			return nilLiteral(t)
+		default:
+			panic(fmt.Sprint("zeroLiteral for unexpected type:", t))
+		}
+	case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature:
+		return nilLiteral(t)
+	case *types.NamedType:
+		return newLiteral(zeroLiteral(t.Underlying).Value, t)
+	case *types.Array, *types.Struct:
+		panic(fmt.Sprint("zeroLiteral applied to aggregate:", t))
+	}
+	panic(fmt.Sprint("zeroLiteral: unexpected ", t))
+}
+
+func (l *Literal) Name() string {
+	s := l.Value.String()
+	if l.Value.Kind() == exact.String {
+		const n = 20
+		if len(s) > n {
+			s = s[:n-3] + "..." // abbreviate
+		}
+		s = strconv.Quote(s)
+	}
+	return s + ":" + l.Type_.String()
+}
+
+func (l *Literal) Type() types.Type {
+	return l.Type_
+}
+
+func (l *Literal) Referrers() *[]Instruction {
+	return nil
+}
+
+// IsNil returns true if this literal represents a typed or untyped nil value.
+func (l *Literal) IsNil() bool {
+	return l.Value.Kind() == exact.Nil
+}
+
+// Int64 returns the numeric value of this literal truncated to fit
+// a signed 64-bit integer.
+//
+func (l *Literal) Int64() int64 {
+	switch x := l.Value; x.Kind() {
+	case exact.Int:
+		if i, ok := exact.Int64Val(x); ok {
+			return i
+		}
+		return 0
+	case exact.Float:
+		f, _ := exact.Float64Val(x)
+		return int64(f)
+	}
+	panic(fmt.Sprintf("unexpected literal value: %T", l.Value))
+}
+
+// Uint64 returns the numeric value of this literal truncated to fit
+// an unsigned 64-bit integer.
+//
+func (l *Literal) Uint64() uint64 {
+	switch x := l.Value; x.Kind() {
+	case exact.Int:
+		if u, ok := exact.Uint64Val(x); ok {
+			return u
+		}
+		return 0
+	case exact.Float:
+		f, _ := exact.Float64Val(x)
+		return uint64(f)
+	}
+	panic(fmt.Sprintf("unexpected literal value: %T", l.Value))
+}
+
+// Float64 returns the numeric value of this literal truncated to fit
+// a float64.
+//
+func (l *Literal) Float64() float64 {
+	f, _ := exact.Float64Val(l.Value)
+	return f
+}
+
+// Complex128 returns the complex value of this literal truncated to
+// fit a complex128.
+//
+func (l *Literal) Complex128() complex128 {
+	re, _ := exact.Float64Val(exact.Real(l.Value))
+	im, _ := exact.Float64Val(exact.Imag(l.Value))
+	return complex(re, im)
+}
diff --git a/ssa/lvalue.go b/ssa/lvalue.go
new file mode 100644
index 0000000..358deea
--- /dev/null
+++ b/ssa/lvalue.go
@@ -0,0 +1,86 @@
+package ssa
+
+// lvalues are the union of addressable expressions and map-index
+// expressions.
+
+import (
+	"code.google.com/p/go.tools/go/types"
+)
+
+// An lvalue represents an assignable location that may appear on the
+// left-hand side of an assignment.  This is a generalization of a
+// pointer to permit updates to elements of maps.
+//
+type lvalue interface {
+	store(fn *Function, v Value) // stores v into the location
+	load(fn *Function) Value     // loads the contents of the location
+	typ() types.Type             // returns the type of the location
+}
+
+// An address is an lvalue represented by a true pointer.
+type address struct {
+	addr Value
+}
+
+func (a address) load(fn *Function) Value {
+	return emitLoad(fn, a.addr)
+}
+
+func (a address) store(fn *Function, v Value) {
+	emitStore(fn, a.addr, v)
+}
+
+func (a address) typ() types.Type {
+	return indirectType(a.addr.Type())
+}
+
+// An element is an lvalue represented by m[k], the location of an
+// element of a map or string.  These locations are not addressable
+// since pointers cannot be formed from them, but they do support
+// load(), and in the case of maps, store().
+//
+type element struct {
+	m, k Value      // map or string
+	t    types.Type // map element type or string byte type
+}
+
+func (e *element) load(fn *Function) Value {
+	l := &Lookup{
+		X:     e.m,
+		Index: e.k,
+	}
+	l.setType(e.t)
+	return fn.emit(l)
+}
+
+func (e *element) store(fn *Function, v Value) {
+	fn.emit(&MapUpdate{
+		Map:   e.m,
+		Key:   e.k,
+		Value: emitConv(fn, v, e.t),
+	})
+}
+
+func (e *element) typ() types.Type {
+	return e.t
+}
+
+// A blanks is a dummy variable whose name is "_".
+// It is not reified: loads are illegal and stores are ignored.
+//
+type blank struct{}
+
+func (bl blank) load(fn *Function) Value {
+	panic("blank.load is illegal")
+}
+
+func (bl blank) store(fn *Function, v Value) {
+	// no-op
+}
+
+func (bl blank) typ() types.Type {
+	// This should be the type of the blank Ident; the typechecker
+	// doesn't provide this yet, but fortunately, we don't need it
+	// yet either.
+	panic("blank.typ is unimplemented")
+}
diff --git a/ssa/print.go b/ssa/print.go
new file mode 100644
index 0000000..e950c77
--- /dev/null
+++ b/ssa/print.go
@@ -0,0 +1,408 @@
+package ssa
+
+// This file implements the String() methods for all Value and
+// Instruction types.
+
+import (
+	"bytes"
+	"fmt"
+	"go/ast"
+	"io"
+	"sort"
+
+	"code.google.com/p/go.tools/go/types"
+)
+
+func (id Id) String() string {
+	if id.Pkg == nil {
+		return id.Name
+	}
+	return fmt.Sprintf("%s/%s", id.Pkg.Path, id.Name)
+}
+
+// relName returns the name of v relative to i.
+// In most cases, this is identical to v.Name(), but for references to
+// Functions (including methods) and Globals, the FullName is used
+// instead, explicitly package-qualified for cross-package references.
+//
+func relName(v Value, i Instruction) string {
+	switch v := v.(type) {
+	case *Global:
+		if i != nil && v.Pkg == i.Block().Func.Pkg {
+			return v.Name()
+		}
+		return v.FullName()
+	case *Function:
+		var pkg *Package
+		if i != nil {
+			pkg = i.Block().Func.Pkg
+		}
+		return v.fullName(pkg)
+	}
+	return v.Name()
+}
+
+// Value.String()
+//
+// This method is provided only for debugging.
+// It never appears in disassembly, which uses Value.Name().
+
+func (v *Literal) String() string {
+	return fmt.Sprintf("literal %s rep=%T", v.Name(), v.Value)
+}
+
+func (v *Parameter) String() string {
+	return fmt.Sprintf("parameter %s : %s", v.Name(), v.Type())
+}
+
+func (v *Capture) String() string {
+	return fmt.Sprintf("capture %s : %s", v.Name(), v.Type())
+}
+
+func (v *Global) String() string {
+	return fmt.Sprintf("global %s : %s", v.Name(), v.Type())
+}
+
+func (v *Builtin) String() string {
+	return fmt.Sprintf("builtin %s : %s", v.Name(), v.Type())
+}
+
+func (v *Function) String() string {
+	return fmt.Sprintf("function %s : %s", v.Name(), v.Type())
+}
+
+// FullName returns g's package-qualified name.
+func (g *Global) FullName() string {
+	return fmt.Sprintf("%s.%s", g.Pkg.Types.Path, g.Name_)
+}
+
+// Instruction.String()
+
+func (v *Alloc) String() string {
+	op := "local"
+	if v.Heap {
+		op = "new"
+	}
+	return fmt.Sprintf("%s %s", op, indirectType(v.Type()))
+}
+
+func (v *Phi) String() string {
+	var b bytes.Buffer
+	b.WriteString("phi [")
+	for i, edge := range v.Edges {
+		if i > 0 {
+			b.WriteString(", ")
+		}
+		// Be robust against malformed CFG.
+		blockname := "?"
+		if v.Block_ != nil && i < len(v.Block_.Preds) {
+			blockname = v.Block_.Preds[i].String()
+		}
+		b.WriteString(blockname)
+		b.WriteString(": ")
+		edgeVal := "<nil>" // be robust
+		if edge != nil {
+			edgeVal = relName(edge, v)
+		}
+		b.WriteString(edgeVal)
+	}
+	b.WriteString("]")
+	if v.Comment != "" {
+		b.WriteString(" #")
+		b.WriteString(v.Comment)
+	}
+	return b.String()
+}
+
+func printCall(v *CallCommon, prefix string, instr Instruction) string {
+	var b bytes.Buffer
+	b.WriteString(prefix)
+	if !v.IsInvoke() {
+		b.WriteString(relName(v.Func, instr))
+	} else {
+		name := underlyingType(v.Recv.Type()).(*types.Interface).Methods[v.Method].Name
+		fmt.Fprintf(&b, "invoke %s.%s [#%d]", relName(v.Recv, instr), name, v.Method)
+	}
+	b.WriteString("(")
+	for i, arg := range v.Args {
+		if i > 0 {
+			b.WriteString(", ")
+		}
+		b.WriteString(relName(arg, instr))
+	}
+	if v.HasEllipsis {
+		b.WriteString("...")
+	}
+	b.WriteString(")")
+	return b.String()
+}
+
+func (c *CallCommon) String() string {
+	return printCall(c, "", nil)
+}
+
+func (v *Call) String() string {
+	return printCall(&v.Call, "", v)
+}
+
+func (v *BinOp) String() string {
+	return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v))
+}
+
+func (v *UnOp) String() string {
+	return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk))
+}
+
+func (v *Conv) String() string {
+	return fmt.Sprintf("convert %s <- %s (%s)", v.Type(), v.X.Type(), relName(v.X, v))
+}
+
+func (v *ChangeInterface) String() string {
+	return fmt.Sprintf("change interface %s <- %s (%s)", v.Type(), v.X.Type(), relName(v.X, v))
+}
+
+func (v *MakeInterface) String() string {
+	return fmt.Sprintf("make interface %s <- %s (%s)", v.Type(), v.X.Type(), relName(v.X, v))
+}
+
+func (v *MakeClosure) String() string {
+	var b bytes.Buffer
+	fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v))
+	if v.Bindings != nil {
+		b.WriteString(" [")
+		for i, c := range v.Bindings {
+			if i > 0 {
+				b.WriteString(", ")
+			}
+			b.WriteString(relName(c, v))
+		}
+		b.WriteString("]")
+	}
+	return b.String()
+}
+
+func (v *MakeSlice) String() string {
+	var b bytes.Buffer
+	b.WriteString("make slice ")
+	b.WriteString(v.Type().String())
+	b.WriteString(" ")
+	b.WriteString(relName(v.Len, v))
+	b.WriteString(" ")
+	b.WriteString(relName(v.Cap, v))
+	return b.String()
+}
+
+func (v *Slice) String() string {
+	var b bytes.Buffer
+	b.WriteString("slice ")
+	b.WriteString(relName(v.X, v))
+	b.WriteString("[")
+	if v.Low != nil {
+		b.WriteString(relName(v.Low, v))
+	}
+	b.WriteString(":")
+	if v.High != nil {
+		b.WriteString(relName(v.High, v))
+	}
+	b.WriteString("]")
+	return b.String()
+}
+
+func (v *MakeMap) String() string {
+	res := ""
+	if v.Reserve != nil {
+		res = relName(v.Reserve, v)
+	}
+	return fmt.Sprintf("make %s %s", v.Type(), res)
+}
+
+func (v *MakeChan) String() string {
+	return fmt.Sprintf("make %s %s", v.Type(), relName(v.Size, v))
+}
+
+func (v *FieldAddr) String() string {
+	fields := underlyingType(indirectType(v.X.Type())).(*types.Struct).Fields
+	// Be robust against a bad index.
+	name := "?"
+	if v.Field >= 0 && v.Field < len(fields) {
+		name = fields[v.Field].Name
+	}
+	return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field)
+}
+
+func (v *Field) String() string {
+	fields := underlyingType(v.X.Type()).(*types.Struct).Fields
+	// Be robust against a bad index.
+	name := "?"
+	if v.Field >= 0 && v.Field < len(fields) {
+		name = fields[v.Field].Name
+	}
+	return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field)
+}
+
+func (v *IndexAddr) String() string {
+	return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v))
+}
+
+func (v *Index) String() string {
+	return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v))
+}
+
+func (v *Lookup) String() string {
+	return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk))
+}
+
+func (v *Range) String() string {
+	return "range " + relName(v.X, v)
+}
+
+func (v *Next) String() string {
+	return "next " + relName(v.Iter, v)
+}
+
+func (v *TypeAssert) String() string {
+	return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), v.AssertedType)
+}
+
+func (v *Extract) String() string {
+	return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index)
+}
+
+func (s *Jump) String() string {
+	// Be robust against malformed CFG.
+	blockname := "?"
+	if s.Block_ != nil && len(s.Block_.Succs) == 1 {
+		blockname = s.Block_.Succs[0].String()
+	}
+	return fmt.Sprintf("jump %s", blockname)
+}
+
+func (s *If) String() string {
+	// Be robust against malformed CFG.
+	tblockname, fblockname := "?", "?"
+	if s.Block_ != nil && len(s.Block_.Succs) == 2 {
+		tblockname = s.Block_.Succs[0].String()
+		fblockname = s.Block_.Succs[1].String()
+	}
+	return fmt.Sprintf("if %s goto %s else %s", relName(s.Cond, s), tblockname, fblockname)
+}
+
+func (s *Go) String() string {
+	return printCall(&s.Call, "go ", s)
+}
+
+func (s *Panic) String() string {
+	return "panic " + relName(s.X, s)
+}
+
+func (s *Ret) String() string {
+	var b bytes.Buffer
+	b.WriteString("ret")
+	for i, r := range s.Results {
+		if i == 0 {
+			b.WriteString(" ")
+		} else {
+			b.WriteString(", ")
+		}
+		b.WriteString(relName(r, s))
+	}
+	return b.String()
+}
+
+func (*RunDefers) String() string {
+	return "rundefers"
+}
+
+func (s *Send) String() string {
+	return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s))
+}
+
+func (s *Defer) String() string {
+	return printCall(&s.Call, "defer ", s)
+}
+
+func (s *Select) String() string {
+	var b bytes.Buffer
+	for i, st := range s.States {
+		if i > 0 {
+			b.WriteString(", ")
+		}
+		if st.Dir == ast.RECV {
+			b.WriteString("<-")
+			b.WriteString(relName(st.Chan, s))
+		} else {
+			b.WriteString(relName(st.Chan, s))
+			b.WriteString("<-")
+			b.WriteString(relName(st.Send, s))
+		}
+	}
+	non := ""
+	if !s.Blocking {
+		non = "non"
+	}
+	return fmt.Sprintf("select %sblocking [%s]", non, b.String())
+}
+
+func (s *Store) String() string {
+	return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s))
+}
+
+func (s *MapUpdate) String() string {
+	return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s))
+}
+
+func (p *Package) String() string {
+	return "Package " + p.Types.Path
+}
+
+func (p *Package) DumpTo(w io.Writer) {
+	fmt.Fprintf(w, "Package %s:\n", p.Types.Path)
+
+	var names []string
+	maxname := 0
+	for name := range p.Members {
+		if l := len(name); l > maxname {
+			maxname = l
+		}
+		names = append(names, name)
+	}
+
+	sort.Strings(names)
+	for _, name := range names {
+		switch mem := p.Members[name].(type) {
+		case *Constant:
+			fmt.Fprintf(w, "  const %-*s %s = %s\n", maxname, name, mem.Name(), mem.Value.Name())
+
+		case *Function:
+			fmt.Fprintf(w, "  func  %-*s %s\n", maxname, name, mem.Type())
+
+		case *Type:
+			fmt.Fprintf(w, "  type  %-*s %s\n", maxname, name, mem.NamedType.Underlying)
+			// We display only PtrMethods since its keys
+			// are a superset of Methods' keys, though the
+			// methods themselves may differ,
+			// e.g. different bridge methods.
+			// TODO(adonovan): show pointerness of receivers.
+			var keys ids
+			for id := range mem.PtrMethods {
+				keys = append(keys, id)
+			}
+			sort.Sort(keys)
+			for _, id := range keys {
+				method := mem.PtrMethods[id]
+				fmt.Fprintf(w, "    method %s %s\n", id, method.Signature)
+			}
+
+		case *Global:
+			fmt.Fprintf(w, "  var   %-*s %s\n", maxname, name, mem.Type())
+
+		}
+	}
+}
+
+func commaOk(x bool) string {
+	if x {
+		return ",ok"
+	}
+	return ""
+}
diff --git a/ssa/promote.go b/ssa/promote.go
new file mode 100644
index 0000000..89f9d95
--- /dev/null
+++ b/ssa/promote.go
@@ -0,0 +1,447 @@
+package ssa
+
+// This file defines algorithms related to "promotion" of field and
+// method selector expressions e.x, such as desugaring implicit field
+// and method selections, method-set computation, and construction of
+// synthetic "bridge" methods.
+
+import (
+	"fmt"
+
+	"code.google.com/p/go.tools/go/types"
+)
+
+// anonFieldPath is a linked list of anonymous fields entered by
+// breadth-first traversal has entered, rightmost (outermost) first.
+// e.g. "e.f" denoting "e.A.B.C.f" would have a path [C, B, A].
+// Common tails may be shared.
+//
+// It is used by various "promotion"-related algorithms.
+//
+type anonFieldPath struct {
+	tail  *anonFieldPath
+	index int // index of field within enclosing types.Struct.Fields
+	field *types.Field
+}
+
+func (p *anonFieldPath) contains(f *types.Field) bool {
+	for ; p != nil; p = p.tail {
+		if p.field == f {
+			return true
+		}
+	}
+	return false
+}
+
+// reverse returns the linked list reversed, as a slice.
+func (p *anonFieldPath) reverse() []*anonFieldPath {
+	n := 0
+	for q := p; q != nil; q = q.tail {
+		n++
+	}
+	s := make([]*anonFieldPath, n)
+	n = 0
+	for ; p != nil; p = p.tail {
+		s[len(s)-1-n] = p
+		n++
+	}
+	return s
+}
+
+// isIndirect returns true if the path indirects a pointer.
+func (p *anonFieldPath) isIndirect() bool {
+	for ; p != nil; p = p.tail {
+		if isPointer(p.field.Type) {
+			return true
+		}
+	}
+	return false
+}
+
+// Method Set construction ----------------------------------------
+
+// A candidate is a method eligible for promotion: a method of an
+// abstract (interface) or concrete (anonymous struct or named) type,
+// along with the anonymous field path via which it is implicitly
+// reached.  If there is exactly one candidate for a given id, it will
+// be promoted to membership of the original type's method-set.
+//
+// Candidates with path=nil are trivially members of the original
+// type's method-set.
+//
+type candidate struct {
+	method   *types.Method  // method object of abstract or concrete type
+	concrete *Function      // actual method (iff concrete)
+	path     *anonFieldPath // desugared selector path
+}
+
+// For debugging.
+func (c candidate) String() string {
+	s := ""
+	// Inefficient!
+	for p := c.path; p != nil; p = p.tail {
+		s = "." + p.field.Name + s
+	}
+	return "@" + s + "." + c.method.Name
+}
+
+// ptrRecv returns true if this candidate has a pointer receiver.
+func (c candidate) ptrRecv() bool {
+	return c.concrete != nil && isPointer(c.concrete.Signature.Recv.Type)
+}
+
+// MethodSet returns the method set for type typ,
+// building bridge methods as needed for promoted methods.
+// A nil result indicates an empty set.
+//
+// Thread-safe.
+func (p *Program) MethodSet(typ types.Type) MethodSet {
+	if !canHaveConcreteMethods(typ, true) {
+		return nil
+	}
+
+	p.methodSetsMu.Lock()
+	defer p.methodSetsMu.Unlock()
+
+	// TODO(adonovan): Using Types as map keys doesn't properly
+	// de-dup.  e.g. *NamedType are canonical but *Struct and
+	// others are not.  Need to de-dup based on using a two-level
+	// hash-table with hash function types.Type.String and
+	// equivalence relation types.IsIdentical.
+	mset := p.methodSets[typ]
+	if mset == nil {
+		mset = buildMethodSet(p, typ)
+		p.methodSets[typ] = mset
+	}
+	return mset
+}
+
+// buildMethodSet computes the concrete method set for type typ.
+// It is the implementation of Program.MethodSet.
+//
+func buildMethodSet(prog *Program, typ types.Type) MethodSet {
+	if prog.mode&LogSource != 0 {
+		defer logStack("buildMethodSet %s %T", typ, typ)()
+	}
+
+	// cands maps ids (field and method names) encountered at any
+	// level of of the breadth-first traversal to a unique
+	// promotion candidate.  A nil value indicates a "blocked" id
+	// (i.e. a field or ambiguous method).
+	//
+	// nextcands is the same but carries just the level in progress.
+	cands, nextcands := make(map[Id]*candidate), make(map[Id]*candidate)
+
+	var next, list []*anonFieldPath
+	list = append(list, nil) // hack: nil means "use typ"
+
+	// For each level of the type graph...
+	for len(list) > 0 {
+		// Invariant: next=[], nextcands={}.
+
+		// Collect selectors from one level into 'nextcands'.
+		// Record the next levels into 'next'.
+		for _, node := range list {
+			t := typ // first time only
+			if node != nil {
+				t = node.field.Type
+			}
+			t = deref(t)
+
+			if nt, ok := t.(*types.NamedType); ok {
+				for _, meth := range nt.Methods {
+					addCandidate(nextcands, IdFromQualifiedName(meth.QualifiedName), meth, prog.concreteMethods[meth], node)
+				}
+				t = nt.Underlying
+			}
+
+			switch t := t.(type) {
+			case *types.Interface:
+				for _, meth := range t.Methods {
+					addCandidate(nextcands, IdFromQualifiedName(meth.QualifiedName), meth, nil, node)
+				}
+
+			case *types.Struct:
+				for i, f := range t.Fields {
+					nextcands[IdFromQualifiedName(f.QualifiedName)] = nil // a field: block id
+					// Queue up anonymous fields for next iteration.
+					// Break cycles to ensure termination.
+					if f.IsAnonymous && !node.contains(f) {
+						next = append(next, &anonFieldPath{node, i, f})
+					}
+				}
+			}
+		}
+
+		// Examine collected selectors.
+		// Promote unique, non-blocked ones to cands.
+		for id, cand := range nextcands {
+			delete(nextcands, id)
+			if cand == nil {
+				// Update cands so we ignore it at all deeper levels.
+				// Don't clobber existing (shallower) binding!
+				if _, ok := cands[id]; !ok {
+					cands[id] = nil // block id
+				}
+				continue
+			}
+			if _, ok := cands[id]; ok {
+				// Ignore candidate: a shallower binding exists.
+			} else {
+				cands[id] = cand
+			}
+		}
+		list, next = next, list[:0] // reuse array
+	}
+
+	// Build method sets and bridge methods.
+	mset := make(MethodSet)
+	for id, cand := range cands {
+		if cand == nil {
+			continue // blocked; ignore
+		}
+		if cand.ptrRecv() && !(isPointer(typ) || cand.path.isIndirect()) {
+			// A candidate concrete method f with receiver
+			// *C is promoted into the method set of
+			// (non-pointer) E iff the implicit path selection
+			// is indirect, e.g. e.A->B.C.f
+			continue
+		}
+		var method *Function
+		if cand.path == nil {
+			// Trivial member of method-set; no bridge needed.
+			method = cand.concrete
+		} else {
+			method = makeBridgeMethod(prog, typ, cand)
+		}
+		if method == nil {
+			panic("unexpected nil method in method set")
+		}
+		mset[id] = method
+	}
+	return mset
+}
+
+// addCandidate adds the promotion candidate (method, node) to m[id].
+// If m[id] already exists (whether nil or not), m[id] is set to nil.
+// If method denotes a concrete method, concrete is its implementation.
+//
+func addCandidate(m map[Id]*candidate, id Id, method *types.Method, concrete *Function, node *anonFieldPath) {
+	prev, found := m[id]
+	switch {
+	case prev != nil:
+		// Two candidates for same selector: ambiguous; block it.
+		m[id] = nil
+	case found:
+		// Already blocked.
+	default:
+		// A viable candidate.
+		m[id] = &candidate{method, concrete, node}
+	}
+}
+
+// makeBridgeMethod creates a synthetic Function that delegates to a
+// "promoted" method.  For example, given these decls:
+//
+//    type A struct {B}
+//    type B struct {*C}
+//    type C ...
+//    func (*C) f()
+//
+// then makeBridgeMethod(typ=A, cand={method:(*C).f, path:[B,*C]}) will
+// synthesize this bridge method:
+//
+//    func (a A) f() { return a.B.C->f() }
+//
+// prog is the program to which the synthesized method will belong.
+// typ is the receiver type of the bridge method.  cand is the
+// candidate method to be promoted; it may be concrete or an interface
+// method.
+//
+func makeBridgeMethod(prog *Program, typ types.Type, cand *candidate) *Function {
+	sig := *cand.method.Type // make a copy, sharing underlying Values
+	sig.Recv = &types.Var{Name: "recv", Type: typ}
+
+	if prog.mode&LogSource != 0 {
+		defer logStack("makeBridgeMethod %s, %s, type %s", typ, cand, &sig)()
+	}
+
+	fn := &Function{
+		Name_:     cand.method.Name,
+		Signature: &sig,
+		Prog:      prog,
+	}
+	fn.startBody()
+	fn.addSpilledParam(sig.Recv)
+	createParams(fn)
+
+	// Each bridge method performs a sequence of selections,
+	// then tailcalls the promoted method.
+	// We use pointer arithmetic (FieldAddr possibly followed by
+	// Load) in preference to value extraction (Field possibly
+	// preceded by Load).
+	var v Value = fn.Locals[0] // spilled receiver
+	if isPointer(typ) {
+		v = emitLoad(fn, v)
+	}
+	// Iterate over selections e.A.B.C.f in the natural order [A,B,C].
+	for _, p := range cand.path.reverse() {
+		// Loop invariant: v holds a pointer to a struct.
+		if _, ok := underlyingType(indirectType(v.Type())).(*types.Struct); !ok {
+			panic(fmt.Sprint("not a *struct: ", v.Type(), p.field.Type))
+		}
+		sel := &FieldAddr{
+			X:     v,
+			Field: p.index,
+		}
+		sel.setType(pointer(p.field.Type))
+		v = fn.emit(sel)
+		if isPointer(p.field.Type) {
+			v = emitLoad(fn, v)
+		}
+	}
+	if !cand.ptrRecv() {
+		v = emitLoad(fn, v)
+	}
+
+	var c Call
+	if cand.concrete != nil {
+		c.Call.Func = cand.concrete
+		fn.Pos = c.Call.Func.(*Function).Pos // TODO(adonovan): fix: wrong.
+		c.Call.Pos = fn.Pos                  // TODO(adonovan): fix: wrong.
+		c.Call.Args = append(c.Call.Args, v)
+	} else {
+		c.Call.Recv = v
+		c.Call.Method = 0
+	}
+	emitTailCall(fn, &c)
+	fn.finishBody()
+	return fn
+}
+
+// createParams creates parameters for bridge method fn based on its Signature.
+func createParams(fn *Function) {
+	var last *Parameter
+	for i, p := range fn.Signature.Params {
+		name := p.Name
+		if name == "" {
+			name = fmt.Sprintf("arg%d", i)
+		}
+		last = fn.addParam(name, p.Type)
+	}
+	if fn.Signature.IsVariadic {
+		last.Type_ = &types.Slice{Elt: last.Type_}
+	}
+}
+
+// Thunks for standalone interface methods ----------------------------------------
+
+// makeImethodThunk returns a synthetic thunk function permitting an
+// method id of interface typ to be called like a standalone function,
+// e.g.:
+//
+//   type I interface { f(x int) R }
+//   m := I.f  // thunk
+//   var i I
+//   m(i, 0)
+//
+// The thunk is defined as if by:
+//
+//   func I.f(i I, x int, ...) R {
+//     return i.f(x, ...)
+//   }
+//
+// The generated thunks do not belong to any package.  (Arguably they
+// belong in the package that defines the interface, but we have no
+// way to determine that on demand; we'd have to create all possible
+// thunks a priori.)
+//
+// TODO(adonovan): opt: currently the stub is created even when used
+// in call position: I.f(i, 0).  Clearly this is suboptimal.
+//
+// TODO(adonovan): memoize creation of these functions in the Program.
+//
+func makeImethodThunk(prog *Program, typ types.Type, id Id) *Function {
+	if prog.mode&LogSource != 0 {
+		defer logStack("makeImethodThunk %s.%s", typ, id)()
+	}
+	itf := underlyingType(typ).(*types.Interface)
+	index, meth := methodIndex(itf, itf.Methods, id)
+	sig := *meth.Type // copy; shared Values
+	fn := &Function{
+		Name_:     meth.Name,
+		Signature: &sig,
+		Prog:      prog,
+	}
+	// TODO(adonovan): set fn.Pos to location of interface method ast.Field.
+	fn.startBody()
+	fn.addParam("recv", typ)
+	createParams(fn)
+	var c Call
+	c.Call.Method = index
+	c.Call.Recv = fn.Params[0]
+	emitTailCall(fn, &c)
+	fn.finishBody()
+	return fn
+}
+
+// Implicit field promotion ----------------------------------------
+
+// For a given struct type and (promoted) field Id, findEmbeddedField
+// returns the path of implicit anonymous field selections, and the
+// field index of the explicit (=outermost) selection.
+//
+// TODO(gri): if go/types/operand.go's lookupFieldBreadthFirst were to
+// record (e.g. call a client-provided callback) the implicit field
+// selection path discovered for a particular ast.SelectorExpr, we could
+// eliminate this function.
+//
+func findPromotedField(st *types.Struct, id Id) (*anonFieldPath, int) {
+	// visited records the types that have been searched already.
+	// Invariant: keys are all *types.NamedType.
+	// (types.Type is not a sound map key in general.)
+	visited := make(map[types.Type]bool)
+
+	var list, next []*anonFieldPath
+	for i, f := range st.Fields {
+		if f.IsAnonymous {
+			list = append(list, &anonFieldPath{nil, i, f})
+		}
+	}
+
+	// Search the current level if there is any work to do and collect
+	// embedded types of the next lower level in the next list.
+	for {
+		// look for name in all types at this level
+		for _, node := range list {
+			typ := deref(node.field.Type).(*types.NamedType)
+			if visited[typ] {
+				continue
+			}
+			visited[typ] = true
+
+			switch typ := typ.Underlying.(type) {
+			case *types.Struct:
+				for i, f := range typ.Fields {
+					if IdFromQualifiedName(f.QualifiedName) == id {
+						return node, i
+					}
+				}
+				for i, f := range typ.Fields {
+					if f.IsAnonymous {
+						next = append(next, &anonFieldPath{node, i, f})
+					}
+				}
+
+			}
+		}
+
+		if len(next) == 0 {
+			panic("field not found: " + id.String())
+		}
+
+		// No match so far.
+		list, next = next, list[:0] // reuse arrays
+	}
+	panic("unreachable")
+}
diff --git a/ssa/sanity.go b/ssa/sanity.go
new file mode 100644
index 0000000..9a6a81d
--- /dev/null
+++ b/ssa/sanity.go
@@ -0,0 +1,307 @@
+package ssa
+
+// An optional pass for sanity-checking invariants of the SSA representation.
+// Currently it checks CFG invariants but little at the instruction level.
+
+import (
+	"fmt"
+	"io"
+	"os"
+)
+
+type sanity struct {
+	reporter io.Writer
+	fn       *Function
+	block    *BasicBlock
+	insane   bool
+}
+
+// SanityCheck performs integrity checking of the SSA representation
+// of the function fn and returns true if it was valid.  Diagnostics
+// are written to reporter if non-nil, os.Stderr otherwise.  Some
+// diagnostics are only warnings and do not imply a negative result.
+//
+// Sanity checking is intended to facilitate the debugging of code
+// transformation passes.
+//
+func SanityCheck(fn *Function, reporter io.Writer) bool {
+	if reporter == nil {
+		reporter = os.Stderr
+	}
+	return (&sanity{reporter: reporter}).checkFunction(fn)
+}
+
+// MustSanityCheck is like SanityCheck but panics instead of returning
+// a negative result.
+//
+func MustSanityCheck(fn *Function, reporter io.Writer) {
+	if !SanityCheck(fn, reporter) {
+		panic("SanityCheck failed")
+	}
+}
+
+func (s *sanity) diagnostic(prefix, format string, args ...interface{}) {
+	fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn.FullName())
+	if s.block != nil {
+		fmt.Fprintf(s.reporter, ", block %s", s.block)
+	}
+	io.WriteString(s.reporter, ": ")
+	fmt.Fprintf(s.reporter, format, args...)
+	io.WriteString(s.reporter, "\n")
+}
+
+func (s *sanity) errorf(format string, args ...interface{}) {
+	s.insane = true
+	s.diagnostic("Error", format, args...)
+}
+
+func (s *sanity) warnf(format string, args ...interface{}) {
+	s.diagnostic("Warning", format, args...)
+}
+
+// findDuplicate returns an arbitrary basic block that appeared more
+// than once in blocks, or nil if all were unique.
+func findDuplicate(blocks []*BasicBlock) *BasicBlock {
+	if len(blocks) < 2 {
+		return nil
+	}
+	if blocks[0] == blocks[1] {
+		return blocks[0]
+	}
+	// Slow path:
+	m := make(map[*BasicBlock]bool)
+	for _, b := range blocks {
+		if m[b] {
+			return b
+		}
+		m[b] = true
+	}
+	return nil
+}
+
+func (s *sanity) checkInstr(idx int, instr Instruction) {
+	switch instr := instr.(type) {
+	case *If, *Jump, *Ret, *Panic:
+		s.errorf("control flow instruction not at end of block")
+	case *Phi:
+		if idx == 0 {
+			// It suffices to apply this check to just the first phi node.
+			if dup := findDuplicate(s.block.Preds); dup != nil {
+				s.errorf("phi node in block with duplicate predecessor %s", dup)
+			}
+		} else {
+			prev := s.block.Instrs[idx-1]
+			if _, ok := prev.(*Phi); !ok {
+				s.errorf("Phi instruction follows a non-Phi: %T", prev)
+			}
+		}
+		if ne, np := len(instr.Edges), len(s.block.Preds); ne != np {
+			s.errorf("phi node has %d edges but %d predecessors", ne, np)
+
+		} else {
+			for i, e := range instr.Edges {
+				if e == nil {
+					s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i])
+				}
+			}
+		}
+
+	case *Alloc:
+		if !instr.Heap {
+			found := false
+			for _, l := range s.fn.Locals {
+				if l == instr {
+					found = true
+					break
+				}
+			}
+			if !found {
+				s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr)
+			}
+		}
+
+	case *BinOp:
+	case *Call:
+	case *ChangeInterface:
+	case *Conv:
+	case *Defer:
+	case *Extract:
+	case *Field:
+	case *FieldAddr:
+	case *Go:
+	case *Index:
+	case *IndexAddr:
+	case *Lookup:
+	case *MakeChan:
+	case *MakeClosure:
+		// TODO(adonovan): check FreeVars count matches.
+	case *MakeInterface:
+	case *MakeMap:
+	case *MakeSlice:
+	case *MapUpdate:
+	case *Next:
+	case *Range:
+	case *RunDefers:
+	case *Select:
+	case *Send:
+	case *Slice:
+	case *Store:
+	case *TypeAssert:
+	case *UnOp:
+		// TODO(adonovan): implement checks.
+	default:
+		panic(fmt.Sprintf("Unknown instruction type: %T", instr))
+	}
+}
+
+func (s *sanity) checkFinalInstr(idx int, instr Instruction) {
+	switch instr.(type) {
+	case *If:
+		if nsuccs := len(s.block.Succs); nsuccs != 2 {
+			s.errorf("If-terminated block has %d successors; expected 2", nsuccs)
+			return
+		}
+		if s.block.Succs[0] == s.block.Succs[1] {
+			s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0])
+			return
+		}
+
+	case *Jump:
+		if nsuccs := len(s.block.Succs); nsuccs != 1 {
+			s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs)
+			return
+		}
+
+	case *Ret:
+		if nsuccs := len(s.block.Succs); nsuccs != 0 {
+			s.errorf("Ret-terminated block has %d successors; expected none", nsuccs)
+			return
+		}
+		// TODO(adonovan): check number and types of results
+
+	case *Panic:
+		if nsuccs := len(s.block.Succs); nsuccs != 0 {
+			s.errorf("Panic-terminated block has %d successors; expected none", nsuccs)
+			return
+		}
+
+	default:
+		s.errorf("non-control flow instruction at end of block")
+	}
+}
+
+func (s *sanity) checkBlock(b *BasicBlock, index int) {
+	s.block = b
+
+	if b.Index != index {
+		s.errorf("block has incorrect Index %d", b.Index)
+	}
+	if b.Func != s.fn {
+		s.errorf("block has incorrect Func %s", b.Func.FullName())
+	}
+
+	// Check all blocks are reachable.
+	// (The entry block is always implicitly reachable.)
+	if index > 0 && len(b.Preds) == 0 {
+		s.warnf("unreachable block")
+		if b.Instrs == nil {
+			// Since this block is about to be pruned,
+			// tolerating transient problems in it
+			// simplifies other optimizations.
+			return
+		}
+	}
+
+	// Check predecessor and successor relations are dual,
+	// and that all blocks in CFG belong to same function.
+	for _, a := range b.Preds {
+		found := false
+		for _, bb := range a.Succs {
+			if bb == b {
+				found = true
+				break
+			}
+		}
+		if !found {
+			s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs)
+		}
+		if a.Func != s.fn {
+			s.errorf("predecessor %s belongs to different function %s", a, a.Func.FullName())
+		}
+	}
+	for _, c := range b.Succs {
+		found := false
+		for _, bb := range c.Preds {
+			if bb == b {
+				found = true
+				break
+			}
+		}
+		if !found {
+			s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds)
+		}
+		if c.Func != s.fn {
+			s.errorf("successor %s belongs to different function %s", c, c.Func.FullName())
+		}
+	}
+
+	// Check each instruction is sane.
+	// TODO(adonovan): check Instruction invariants:
+	// - check Operands is dual to Value.Referrers.
+	// - check all Operands that are also Instructions belong to s.fn too
+	//   (and for bonus marks, that their block dominates block b).
+	n := len(b.Instrs)
+	if n == 0 {
+		s.errorf("basic block contains no instructions")
+	}
+	for j, instr := range b.Instrs {
+		if instr == nil {
+			s.errorf("nil instruction at index %d", j)
+			continue
+		}
+		if b2 := instr.Block(); b2 == nil {
+			s.errorf("nil Block() for instruction at index %d", j)
+			continue
+		} else if b2 != b {
+			s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j)
+			continue
+		}
+		if j < n-1 {
+			s.checkInstr(j, instr)
+		} else {
+			s.checkFinalInstr(j, instr)
+		}
+	}
+}
+
+func (s *sanity) checkFunction(fn *Function) bool {
+	// TODO(adonovan): check Function invariants:
+	// - check owning Package (if any) contains this (possibly anon) function
+	// - check params match signature
+	// - check transient fields are nil
+	// - warn if any fn.Locals do not appear among block instructions.
+	s.fn = fn
+	if fn.Prog == nil {
+		s.errorf("nil Prog")
+	}
+	for i, l := range fn.Locals {
+		if l.Heap {
+			s.errorf("Local %s at index %d has Heap flag set", l.Name(), i)
+		}
+	}
+	if fn.Blocks != nil && len(fn.Blocks) == 0 {
+		// Function _had_ blocks (so it's not external) but
+		// they were "optimized" away, even the entry block.
+		s.errorf("Blocks slice is non-nil but empty")
+	}
+	for i, b := range fn.Blocks {
+		if b == nil {
+			s.warnf("nil *BasicBlock at f.Blocks[%d]", i)
+			continue
+		}
+		s.checkBlock(b, i)
+	}
+	s.block = nil
+	s.fn = nil
+	return !s.insane
+}
diff --git a/ssa/ssa.go b/ssa/ssa.go
new file mode 100644
index 0000000..a25766e
--- /dev/null
+++ b/ssa/ssa.go
@@ -0,0 +1,1400 @@
+package ssa
+
+// This package defines a high-level intermediate representation for
+// Go programs using static single-assignment (SSA) form.
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"sync"
+
+	"code.google.com/p/go.tools/go/exact"
+	"code.google.com/p/go.tools/go/types"
+)
+
+// A Program is a partial or complete Go program converted to SSA form.
+// Each Builder creates and populates a single Program during its
+// lifetime.
+//
+type Program struct {
+	Files    *token.FileSet            // position information for the files of this Program
+	Packages map[string]*Package       // all loaded Packages, keyed by import path
+	Builtins map[types.Object]*Builtin // all built-in functions, keyed by typechecker objects.
+
+	methodSets      map[types.Type]MethodSet    // concrete method sets for all needed types  [TODO(adonovan): de-dup]
+	methodSetsMu    sync.Mutex                  // serializes all accesses to methodSets
+	concreteMethods map[*types.Method]*Function // maps named concrete methods to their code
+	mode            BuilderMode                 // set of mode bits
+}
+
+// A Package is a single analyzed Go package containing Members for
+// all package-level functions, variables, constants and types it
+// declares.  These may be accessed directly via Members, or via the
+// type-specific accessor methods Func, Type, Var and Const.
+//
+type Package struct {
+	Prog    *Program          // the owning program
+	Types   *types.Package    // the type checker's package object for this package.
+	Members map[string]Member // all exported and unexported members of the package
+	Init    *Function         // the package's (concatenated) init function
+
+	// These fields are available between package creation and SSA
+	// building, but are then cleared unless Context.RetainAST(pkg).
+	Files    []*ast.File // abstract syntax for the package's files
+	TypeInfo             // type-checker intermediate results
+
+	// The following fields are set transiently during building,
+	// then cleared.
+	started  int32                   // atomically tested and set at start of build phase
+	nTo1Vars map[*ast.ValueSpec]bool // set of n:1 ValueSpecs already built
+}
+
+// A Member is a member of a Go package, implemented by *Constant,
+// *Global, *Function, or *Type; they are created by package-level
+// const, var, func and type declarations respectively.
+//
+type Member interface {
+	Name() string      // the declared name of the package member
+	String() string    // human-readable information about the value
+	Posn() token.Pos   // position of member's declaration, if known
+	Type() types.Type  // the type of the package member
+	ImplementsMember() // dummy method to indicate the "implements" relation.
+}
+
+// An Id identifies the name of a field of a struct type, or the name
+// of a method of an interface or a named type.
+//
+// For exported names, i.e. those beginning with a Unicode upper-case
+// letter, a simple string is unambiguous.
+//
+// However, a method set or struct may contain multiple unexported
+// names with identical spelling that are logically distinct because
+// they originate in different packages.  Unexported names must
+// therefore be disambiguated by their package too.
+//
+// The Pkg field of an Id is therefore nil iff the name is exported.
+//
+// This type is suitable for use as a map key because the equivalence
+// relation == is consistent with identifier equality.
+type Id struct {
+	Pkg  *types.Package
+	Name string
+}
+
+// A MethodSet contains all the methods for a particular type.
+// The method sets for T and *T are distinct entities.
+// The methods for a non-pointer type T all have receiver type T, but
+// the methods for pointer type *T may have receiver type *T or T.
+//
+type MethodSet map[Id]*Function
+
+// A Type is a Member of a Package representing the name, underlying
+// type and method set of a named type declared at package scope.
+//
+type Type struct {
+	NamedType  *types.NamedType
+	Methods    MethodSet // concrete method set of N
+	PtrMethods MethodSet // concrete method set of (*N)
+}
+
+// A Constant is a Member of Package representing a package-level
+// constant value.
+//
+type Constant struct {
+	Name_ string
+	Value *Literal
+	Pos   token.Pos
+}
+
+// An SSA value that can be referenced by an instruction.
+type Value interface {
+	// Name returns the name of this value, and determines how
+	// this Value appears when used as an operand of an
+	// Instruction.
+	//
+	// This is the same as the source name for Parameters,
+	// Builtins, Functions, Captures, Globals and some Allocs.
+	// For literals, it is a representation of the literal's value
+	// and type.  For all other Values this is the name of the
+	// virtual register defined by the instruction.
+	//
+	// The name of an SSA Value is not semantically significant,
+	// and may not even be unique within a function.
+	Name() string
+
+	// If this value is an Instruction, String returns its
+	// disassembled form; otherwise it returns unspecified
+	// human-readable information about the Value, such as its
+	// kind, name and type.
+	String() string
+
+	// Type returns the type of this value.  Many instructions
+	// (e.g. IndexAddr) change the behaviour depending on the
+	// types of their operands.
+	Type() types.Type
+
+	// Referrers returns the list of instructions that have this
+	// value as one of their operands; it may contain duplicates
+	// if an instruction has a repeated operand.
+	//
+	// Referrers actually returns a pointer through which the
+	// caller may perform mutations to the object's state.
+	//
+	// Referrers is currently only defined for the function-local
+	// values Capture, Parameter and all value-defining instructions.
+	// It returns nil for Function, Builtin, Literal and Global.
+	//
+	// Instruction.Operands contains the inverse of this relation.
+	Referrers() *[]Instruction
+
+	// Dummy method to indicate the "implements" relation.
+	ImplementsValue()
+}
+
+// An Instruction is an SSA instruction that computes a new Value or
+// has some effect.
+//
+// An Instruction that defines a value (e.g. BinOp) also implements
+// the Value interface; an Instruction that only has an effect (e.g. Store)
+// does not.
+//
+type Instruction interface {
+	// String returns the disassembled form of this value.  e.g.
+	//
+	// Examples of Instructions that define a Value:
+	// e.g.  "x + y"     (BinOp)
+	//       "len([])"   (Call)
+	// Note that the name of the Value is not printed.
+	//
+	// Examples of Instructions that do define (are) Values:
+	// e.g.  "ret x"     (Ret)
+	//       "*y = x"    (Store)
+	//
+	// (This separation is useful for some analyses which
+	// distinguish the operation from the value it
+	// defines. e.g. 'y = local int' is both an allocation of
+	// memory 'local int' and a definition of a pointer y.)
+	String() string
+
+	// Block returns the basic block to which this instruction
+	// belongs.
+	Block() *BasicBlock
+
+	// SetBlock sets the basic block to which this instruction
+	// belongs.
+	SetBlock(*BasicBlock)
+
+	// Operands returns the operands of this instruction: the
+	// set of Values it references.
+	//
+	// Specifically, it appends their addresses to rands, a
+	// user-provided slice, and returns the resulting slice,
+	// permitting avoidance of memory allocation.
+	//
+	// The operands are appended in undefined order; the addresses
+	// are always non-nil but may point to a nil Value.  Clients
+	// may store through the pointers, e.g. to effect a value
+	// renaming.
+	//
+	// Value.Referrers is a subset of the inverse of this
+	// relation.  (Referrers are not tracked for all types of
+	// Values.)
+	Operands(rands []*Value) []*Value
+
+	// Dummy method to indicate the "implements" relation.
+	ImplementsInstruction()
+}
+
+// Function represents the parameters, results and code of a function
+// or method.
+//
+// If Blocks is nil, this indicates an external function for which no
+// Go source code is available.  In this case, Captures and Locals
+// will be nil too.  Clients performing whole-program analysis must
+// handle external functions specially.
+//
+// Functions are immutable values; they do not have addresses.
+//
+// Blocks[0] is the function entry point; block order is not otherwise
+// semantically significant, though it may affect the readability of
+// the disassembly.
+//
+// A nested function that refers to one or more lexically enclosing
+// local variables ("free variables") has Capture parameters.  Such
+// functions cannot be called directly but require a value created by
+// MakeClosure which, via its Bindings, supplies values for these
+// parameters.  Captures are always addresses.
+//
+// If the function is a method (Signature.Recv != nil) then the first
+// element of Params is the receiver parameter.
+//
+// Type() returns the function's Signature.
+//
+type Function struct {
+	Name_     string
+	Signature *types.Signature
+
+	Pos       token.Pos    // location of the definition
+	Enclosing *Function    // enclosing function if anon; nil if global
+	Pkg       *Package     // enclosing package for Go source functions; otherwise nil
+	Prog      *Program     // enclosing program
+	Params    []*Parameter // function parameters; for methods, includes receiver
+	FreeVars  []*Capture   // free variables whose values must be supplied by closure
+	Locals    []*Alloc
+	Blocks    []*BasicBlock // basic blocks of the function; nil => external
+	AnonFuncs []*Function   // anonymous functions directly beneath this one
+
+	// The following fields are set transiently during building,
+	// then cleared.
+	currentBlock *BasicBlock             // where to emit code
+	objects      map[types.Object]Value  // addresses of local variables
+	namedResults []*Alloc                // tuple of named results
+	syntax       *funcSyntax             // abstract syntax trees for Go source functions
+	targets      *targets                // linked stack of branch targets
+	lblocks      map[*ast.Object]*lblock // labelled blocks
+}
+
+// An SSA basic block.
+//
+// The final element of Instrs is always an explicit transfer of
+// control (If, Jump, Ret or Panic).
+//
+// A block may contain no Instructions only if it is unreachable,
+// i.e. Preds is nil.  Empty blocks are typically pruned.
+//
+// BasicBlocks and their Preds/Succs relation form a (possibly cyclic)
+// graph independent of the SSA Value graph.  It is illegal for
+// multiple edges to exist between the same pair of blocks.
+//
+// The order of Preds and Succs are significant (to Phi and If
+// instructions, respectively).
+//
+type BasicBlock struct {
+	Index        int            // index of this block within Func.Blocks
+	Comment      string         // optional label; no semantic significance
+	Func         *Function      // containing function
+	Instrs       []Instruction  // instructions in order
+	Preds, Succs []*BasicBlock  // predecessors and successors
+	succs2       [2]*BasicBlock // initial space for Succs.
+	dom          *domNode       // node in dominator tree; optional.
+	gaps         int            // number of nil Instrs (transient).
+	rundefers    int            // number of rundefers (transient)
+}
+
+// Pure values ----------------------------------------
+
+// A Capture is a pointer to a lexically enclosing local variable.
+//
+// The referent of a capture is an Alloc or another Capture and is
+// always considered potentially escaping, so Captures are always
+// addresses in the heap, and have pointer types.
+//
+type Capture struct {
+	Outer     Value // the Value captured from the enclosing context.
+	referrers []Instruction
+}
+
+// A Parameter represents an input parameter of a function.
+//
+type Parameter struct {
+	Name_     string
+	Type_     types.Type
+	referrers []Instruction
+}
+
+// A Literal represents a literal nil, boolean, string or numeric
+// (integer, fraction or complex) value.
+//
+// A literal's underlying Type() can be a basic type, possibly one of
+// the "untyped" types.  A nil literal can have any reference type:
+// interface, map, channel, pointer, slice, or function---but not
+// "untyped nil".
+//
+// All source-level constant expressions are represented by a Literal
+// of equal type and value.
+//
+// Value holds the exact value of the literal, independent of its
+// Type(), using the same representation as package go/types uses for
+// constants.
+//
+// Example printed form:
+// 	42:int
+//	"hello":untyped string
+//	3+4i:MyComplex
+//
+type Literal struct {
+	Type_ types.Type
+	Value exact.Value
+}
+
+// A Global is a named Value holding the address of a package-level
+// variable.
+//
+type Global struct {
+	Name_ string
+	Type_ types.Type
+	Pkg   *Package
+	Pos   token.Pos
+
+	// The following fields are set transiently during building,
+	// then cleared.
+	spec *ast.ValueSpec // explained at buildGlobal
+}
+
+// A built-in function, e.g. len.
+//
+// Builtins are immutable values; they do not have addresses.
+//
+// Type() returns an inscrutable *types.builtin.  Built-in functions
+// may have polymorphic or variadic types that are not expressible in
+// Go's type system.
+//
+type Builtin struct {
+	Object *types.Func // canonical types.Universe object for this built-in
+}
+
+// Value-defining instructions  ----------------------------------------
+
+// The Alloc instruction reserves space for a value of the given type,
+// zero-initializes it, and yields its address.
+//
+// Alloc values are always addresses, and have pointer types, so the
+// type of the allocated space is actually indirect(Type()).
+//
+// If Heap is false, Alloc allocates space in the function's
+// activation record (frame); we refer to an Alloc(Heap=false) as a
+// "local" alloc.  Each local Alloc returns the same address each time
+// it is executed within the same activation; the space is
+// re-initialized to zero.
+//
+// If Heap is true, Alloc allocates space in the heap, and returns; we
+// refer to an Alloc(Heap=true) as a "new" alloc.  Each new Alloc
+// returns a different address each time it is executed.
+//
+// When Alloc is applied to a channel, map or slice type, it returns
+// the address of an uninitialized (nil) reference of that kind; store
+// the result of MakeSlice, MakeMap or MakeChan in that location to
+// instantiate these types.
+//
+// Example printed form:
+// 	t0 = local int
+// 	t1 = new int
+//
+type Alloc struct {
+	anInstruction
+	Name_     string
+	Type_     types.Type
+	Heap      bool
+	Pos       token.Pos
+	referrers []Instruction
+	index     int // dense numbering; for lifting
+}
+
+// Phi represents an SSA φ-node, which combines values that differ
+// across incoming control-flow edges and yields a new value.  Within
+// a block, all φ-nodes must appear before all non-φ nodes.
+//
+// Example printed form:
+// 	t2 = phi [0.start: t0, 1.if.then: t1, ...]
+//
+type Phi struct {
+	Register
+	Comment string  // a hint as to its purpose
+	Edges   []Value // Edges[i] is value for Block().Preds[i]
+}
+
+// Call represents a function or method call.
+//
+// The Call instruction yields the function result, if there is
+// exactly one, or a tuple (empty or len>1) whose components are
+// accessed via Extract.
+//
+// See CallCommon for generic function call documentation.
+//
+// Example printed form:
+// 	t2 = println(t0, t1)
+// 	t4 = t3()
+// 	t7 = invoke t5.Println(...t6)
+//
+type Call struct {
+	Register
+	Call CallCommon
+}
+
+// BinOp yields the result of binary operation X Op Y.
+//
+// Example printed form:
+// 	t1 = t0 + 1:int
+//
+type BinOp struct {
+	Register
+	// One of:
+	// ADD SUB MUL QUO REM          + - * / %
+	// AND OR XOR SHL SHR AND_NOT   & | ^ << >> &~
+	// EQL LSS GTR NEQ LEQ GEQ      == != < <= < >=
+	Op   token.Token
+	X, Y Value
+}
+
+// UnOp yields the result of Op X.
+// ARROW is channel receive.
+// MUL is pointer indirection (load).
+// XOR is bitwise complement.
+// SUB is negation.
+//
+// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above
+// and a boolean indicating the success of the receive.  The
+// components of the tuple are accessed using Extract.
+//
+// Example printed form:
+// 	t0 = *x
+// 	t2 = <-t1,ok
+//
+type UnOp struct {
+	Register
+	Op      token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^
+	X       Value
+	CommaOk bool
+}
+
+// Conv yields the conversion of X to type Type().
+//
+// A conversion is one of the following kinds.  The behaviour of the
+// conversion operator may depend on both Type() and X.Type(), as well
+// as the dynamic value.
+//
+// A '+' indicates that a dynamic representation change may occur.
+// A '-' indicates that the conversion is a value-preserving change
+// to types only.
+//
+// 1. implicit conversions (arising from assignability rules):
+//    - adding/removing a name, same underlying types.
+//    - channel type restriction, possibly adding/removing a name.
+// 2. explicit conversions (in addition to the above):
+//    - changing a name, same underlying types.
+//    - between pointers to identical base types.
+//    + conversions between real numeric types.
+//    + conversions between complex numeric types.
+//    + integer/[]byte/[]rune -> string.
+//    + string -> []byte/[]rune.
+//
+// TODO(adonovan): split into two cases:
+// - rename value (ChangeType)
+// + value to type with different representation (Conv)
+//
+// Conversions of untyped string/number/bool constants to a specific
+// representation are eliminated during SSA construction.
+//
+// Example printed form:
+// 	t1 = convert interface{} <- int (t0)
+//
+type Conv struct {
+	Register
+	X Value
+}
+
+// ChangeInterface constructs a value of one interface type from a
+// value of another interface type known to be assignable to it.
+//
+// This operation cannot fail.  Use TypeAssert for interface
+// conversions that may fail dynamically.
+// TODO(adonovan): rename to "{Narrow,Restrict}Interface"?
+//
+// Example printed form:
+// 	t1 = change interface interface{} <- I (t0)
+//
+type ChangeInterface struct {
+	Register
+	X Value
+}
+
+// MakeInterface constructs an instance of an interface type from a
+// value and its method-set.
+//
+// To construct the zero value of an interface type T, use:
+// 	&Literal{types.nilType{}, T}
+//
+// Example printed form:
+// 	t1 = make interface interface{} <- int (42:int)
+//
+type MakeInterface struct {
+	Register
+	X       Value
+	Methods MethodSet // method set of (non-interface) X
+}
+
+// A MakeClosure instruction yields an anonymous function value whose
+// code is Fn and whose lexical capture slots are populated by Bindings.
+//
+// By construction, all captured variables are addresses of variables
+// allocated with 'new', i.e. Alloc(Heap=true).
+//
+// Type() returns a (possibly named) *types.Signature.
+//
+// Example printed form:
+// 	t0 = make closure anon@1.2 [x y z]
+//
+type MakeClosure struct {
+	Register
+	Fn       Value   // always a *Function
+	Bindings []Value // values for each free variable in Fn.FreeVars
+}
+
+// The MakeMap instruction creates a new hash-table-based map object
+// and yields a value of kind map.
+//
+// Type() returns a (possibly named) *types.Map.
+//
+// Example printed form:
+// 	t1 = make map[string]int t0
+//
+type MakeMap struct {
+	Register
+	Reserve Value // initial space reservation; nil => default
+	Pos     token.Pos
+}
+
+// The MakeChan instruction creates a new channel object and yields a
+// value of kind chan.
+//
+// Type() returns a (possibly named) *types.Chan.
+//
+// Example printed form:
+// 	t0 = make chan int 0
+//
+type MakeChan struct {
+	Register
+	Size Value // int; size of buffer; zero => synchronous.
+	Pos  token.Pos
+}
+
+// MakeSlice yields a slice of length Len backed by a newly allocated
+// array of length Cap.
+//
+// Both Len and Cap must be non-nil Values of integer type.
+//
+// (Alloc(types.Array) followed by Slice will not suffice because
+// Alloc can only create arrays of statically known length.)
+//
+// Type() returns a (possibly named) *types.Slice.
+//
+// Example printed form:
+// 	t1 = make slice []string 1:int t0
+//
+type MakeSlice struct {
+	Register
+	Len Value
+	Cap Value
+	Pos token.Pos
+}
+
+// Slice yields a slice of an existing string, slice or *array X
+// between optional integer bounds Low and High.
+//
+// Type() returns string if the type of X was string, otherwise a
+// *types.Slice with the same element type as X.
+//
+// Example printed form:
+// 	t1 = slice t0[1:]
+//
+type Slice struct {
+	Register
+	X         Value // slice, string, or *array
+	Low, High Value // either may be nil
+}
+
+// FieldAddr yields the address of Field of *struct  X.
+//
+// The field is identified by its index within the field list of the
+// struct type of X.
+//
+// Type() returns a (possibly named) *types.Pointer.
+//
+// Example printed form:
+// 	t1 = &t0.name [#1]
+//
+type FieldAddr struct {
+	Register
+	X     Value // *struct
+	Field int   // index into X.Type().(*types.Struct).Fields
+}
+
+// Field yields the Field of struct X.
+//
+// The field is identified by its index within the field list of the
+// struct type of X; by using numeric indices we avoid ambiguity of
+// package-local identifiers and permit compact representations.
+//
+// Example printed form:
+// 	t1 = t0.name [#1]
+//
+type Field struct {
+	Register
+	X     Value // struct
+	Field int   // index into X.Type().(*types.Struct).Fields
+}
+
+// IndexAddr yields the address of the element at index Index of
+// collection X.  Index is an integer expression.
+//
+// The elements of maps and strings are not addressable; use Lookup or
+// MapUpdate instead.
+//
+// Type() returns a (possibly named) *types.Pointer.
+//
+// Example printed form:
+// 	t2 = &t0[t1]
+//
+type IndexAddr struct {
+	Register
+	X     Value // slice or *array,
+	Index Value // numeric index
+}
+
+// Index yields element Index of array X.
+//
+// Example printed form:
+// 	t2 = t0[t1]
+//
+type Index struct {
+	Register
+	X     Value // array
+	Index Value // integer index
+}
+
+// Lookup yields element Index of collection X, a map or string.
+// Index is an integer expression if X is a string or the appropriate
+// key type if X is a map.
+//
+// If CommaOk, the result is a 2-tuple of the value above and a
+// boolean indicating the result of a map membership test for the key.
+// The components of the tuple are accessed using Extract.
+//
+// Example printed form:
+// 	t2 = t0[t1]
+// 	t5 = t3[t4],ok
+//
+type Lookup struct {
+	Register
+	X       Value // string or map
+	Index   Value // numeric or key-typed index
+	CommaOk bool  // return a value,ok pair
+}
+
+// SelectState is a helper for Select.
+// It represents one goal state and its corresponding communication.
+//
+type SelectState struct {
+	Dir  ast.ChanDir // direction of case
+	Chan Value       // channel to use (for send or receive)
+	Send Value       // value to send (for send)
+}
+
+// Select tests whether (or blocks until) one or more of the specified
+// sent or received states is entered.
+//
+// It returns a triple (index int, recv interface{}, recvOk bool)
+// whose components, described below, must be accessed via the Extract
+// instruction.
+//
+// If Blocking, select waits until exactly one state holds, i.e. a
+// channel becomes ready for the designated operation of sending or
+// receiving; select chooses one among the ready states
+// pseudorandomly, performs the send or receive operation, and sets
+// 'index' to the index of the chosen channel.
+//
+// If !Blocking, select doesn't block if no states hold; instead it
+// returns immediately with index equal to -1.
+//
+// If the chosen channel was used for a receive, 'recv' is set to the
+// received value; otherwise it is nil.
+//
+// The third component of the triple, recvOk, is a boolean whose value
+// is true iff the selected operation was a receive and the receive
+// successfully yielded a value.
+//
+// Example printed form:
+// 	t3 = select nonblocking [<-t0, t1<-t2, ...]
+// 	t4 = select blocking []
+//
+type Select struct {
+	Register
+	States   []SelectState
+	Blocking bool
+}
+
+// Range yields an iterator over the domain and range of X,
+// which must be a string or map.
+//
+// Elements are accessed via Next.
+//
+// Type() returns a (possibly named) *types.Result (tuple type).
+//
+// Example printed form:
+// 	t0 = range "hello":string
+//
+type Range struct {
+	Register
+	X Value // string or map
+}
+
+// Next reads and advances the (map or string) iterator Iter and
+// returns a 3-tuple value (ok, k, v).  If the iterator is not
+// exhausted, ok is true and k and v are the next elements of the
+// domain and range, respectively.  Otherwise ok is false and k and v
+// are undefined.
+//
+// Components of the tuple are accessed using Extract.
+//
+// The IsString field distinguishes iterators over strings from those
+// over maps, as the Type() alone is insufficient: consider
+// map[int]rune.
+//
+// Type() returns a *types.Result (tuple type) for the triple
+// (ok, k, v).  The types of k and/or v may be types.Invalid.
+//
+// Example printed form:
+// 	t1 = next t0
+//
+type Next struct {
+	Register
+	Iter     Value
+	IsString bool // true => string iterator; false => map iterator.
+}
+
+// TypeAssert tests whether interface value X has type AssertedType.
+//
+// If !CommaOk, on success it returns v, the result of the conversion
+// (defined below); on failure it panics.
+//
+// If CommaOk: on success it returns a pair (v, true) where v is the
+// result of the conversion; on failure it returns (z, false) where z
+// is AssertedType's zero value.  The components of the pair must be
+// accessed using the Extract instruction.
+//
+// If AssertedType is a concrete type, TypeAssert checks whether the
+// dynamic type in interface X is equal to it, and if so, the result
+// of the conversion is a copy of the value in the interface.
+//
+// If AssertedType is an interface, TypeAssert checks whether the
+// dynamic type of the interface is assignable to it, and if so, the
+// result of the conversion is a copy of the interface value X.
+// If AssertedType is a superinterface of X.Type(), the operation
+// cannot fail; ChangeInterface is preferred in this case.
+//
+// Type() reflects the actual type of the result, possibly a pair
+// (types.Result); AssertedType is the asserted type.
+//
+// Example printed form:
+// 	t1 = typeassert t0.(int)
+// 	t3 = typeassert,ok t2.(T)
+//
+type TypeAssert struct {
+	Register
+	X            Value
+	AssertedType types.Type
+	CommaOk      bool
+}
+
+// Extract yields component Index of Tuple.
+//
+// This is used to access the results of instructions with multiple
+// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and
+// IndexExpr(Map).
+//
+// Example printed form:
+// 	t1 = extract t0 #1
+//
+type Extract struct {
+	Register
+	Tuple Value
+	Index int
+}
+
+// Instructions executed for effect.  They do not yield a value. --------------------
+
+// Jump transfers control to the sole successor of its owning block.
+//
+// A Jump instruction must be the last instruction of its containing
+// BasicBlock.
+//
+// Example printed form:
+// 	jump done
+//
+type Jump struct {
+	anInstruction
+}
+
+// The If instruction transfers control to one of the two successors
+// of its owning block, depending on the boolean Cond: the first if
+// true, the second if false.
+//
+// An If instruction must be the last instruction of its containing
+// BasicBlock.
+//
+// Example printed form:
+// 	if t0 goto done else body
+//
+type If struct {
+	anInstruction
+	Cond Value
+}
+
+// Ret returns values and control back to the calling function.
+//
+// len(Results) is always equal to the number of results in the
+// function's signature.
+//
+// If len(Results) > 1, Ret returns a tuple value with the specified
+// components which the caller must access using Extract instructions.
+//
+// There is no instruction to return a ready-made tuple like those
+// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or
+// a tail-call to a function with multiple result parameters.
+//
+// Ret must be the last instruction of its containing BasicBlock.
+// Such a block has no successors.
+//
+// Example printed form:
+// 	ret
+// 	ret nil:I, 2:int
+//
+type Ret struct {
+	anInstruction
+	Results []Value
+}
+
+// RunDefers pops and invokes the entire stack of procedure calls
+// pushed by Defer instructions in this function.
+//
+// It is legal to encounter multiple 'rundefers' instructions in a
+// single control-flow path through a function; this is useful in
+// the combined init() function, for example.
+//
+// Example printed form:
+//	rundefers
+//
+type RunDefers struct {
+	anInstruction
+}
+
+// Panic initiates a panic with value X.
+//
+// A Panic instruction must be the last instruction of its containing
+// BasicBlock, which must have no successors.
+//
+// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction;
+// they are treated as calls to a built-in function.
+//
+// Example printed form:
+// 	panic t0
+//
+type Panic struct {
+	anInstruction
+	X Value // an interface{}
+}
+
+// Go creates a new goroutine and calls the specified function
+// within it.
+//
+// See CallCommon for generic function call documentation.
+//
+// Example printed form:
+// 	go println(t0, t1)
+// 	go t3()
+// 	go invoke t5.Println(...t6)
+//
+type Go struct {
+	anInstruction
+	Call CallCommon
+}
+
+// Defer pushes the specified call onto a stack of functions
+// to be called by a RunDefers instruction or by a panic.
+//
+// See CallCommon for generic function call documentation.
+//
+// Example printed form:
+// 	defer println(t0, t1)
+// 	defer t3()
+// 	defer invoke t5.Println(...t6)
+//
+type Defer struct {
+	anInstruction
+	Call CallCommon
+}
+
+// Send sends X on channel Chan.
+//
+// Example printed form:
+// 	send t0 <- t1
+//
+type Send struct {
+	anInstruction
+	Chan, X Value
+}
+
+// Store stores Val at address Addr.
+// Stores can be of arbitrary types.
+//
+// Example printed form:
+// 	*x = y
+//
+type Store struct {
+	anInstruction
+	Addr Value
+	Val  Value
+}
+
+// MapUpdate updates the association of Map[Key] to Value.
+//
+// Example printed form:
+//	t0[t1] = t2
+//
+type MapUpdate struct {
+	anInstruction
+	Map   Value
+	Key   Value
+	Value Value
+}
+
+// Embeddable mix-ins and helpers for common parts of other structs. -----------
+
+// Register is a mix-in embedded by all SSA values that are also
+// instructions, i.e. virtual registers, and provides implementations
+// of the Value interface's Name() and Type() methods: the name is
+// simply a numbered register (e.g. "t0") and the type is the Type_
+// field.
+//
+// Temporary names are automatically assigned to each Register on
+// completion of building a function in SSA form.
+//
+// Clients must not assume that the 'id' value (and the Name() derived
+// from it) is unique within a function.  As always in this API,
+// semantics are determined only by identity; names exist only to
+// facilitate debugging.
+//
+type Register struct {
+	anInstruction
+	num       int        // "name" of virtual register, e.g. "t0".  Not guaranteed unique.
+	Type_     types.Type // type of virtual register
+	referrers []Instruction
+}
+
+// anInstruction is a mix-in embedded by all Instructions.
+// It provides the implementations of the Block and SetBlock methods.
+type anInstruction struct {
+	Block_ *BasicBlock // the basic block of this instruction
+}
+
+// CallCommon is contained by Go, Defer and Call to hold the
+// common parts of a function or method call.
+//
+// Each CallCommon exists in one of two modes, function call and
+// interface method invocation, or "call" and "invoke" for short.
+//
+// 1. "call" mode: when Recv is nil (!IsInvoke), a CallCommon
+// represents an ordinary function call of the value in Func.
+//
+// In the common case in which Func is a *Function, this indicates a
+// statically dispatched call to a package-level function, an
+// anonymous function, or a method of a named type.  Also statically
+// dispatched, but less common, Func may be a *MakeClosure, indicating
+// an immediately applied function literal with free variables.  Any
+// other Value of Func indicates a dynamically dispatched function
+// call.  The StaticCallee method returns the callee in these cases.
+//
+// Args contains the arguments to the call.  If Func is a method,
+// Args[0] contains the receiver parameter.  Recv and Method are not
+// used in this mode.
+//
+// Example printed form:
+// 	t2 = println(t0, t1)
+// 	go t3()
+//	defer t5(...t6)
+//
+// 2. "invoke" mode: when Recv is non-nil (IsInvoke), a CallCommon
+// represents a dynamically dispatched call to an interface method.
+// In this mode, Recv is the interface value and Method is the index
+// of the method within the interface type of the receiver.
+//
+// Recv is implicitly supplied to the concrete method implementation
+// as the receiver parameter; in other words, Args[0] holds not the
+// receiver but the first true argument.  Func is not used in this
+// mode.
+//
+// If the called method's receiver has non-pointer type T, but the
+// receiver supplied by the interface value has type *T, an implicit
+// load (copy) operation is performed.
+//
+// Example printed form:
+// 	t1 = invoke t0.String()
+// 	go invoke t3.Run(t2)
+// 	defer invoke t4.Handle(...t5)
+//
+// In both modes, HasEllipsis is true iff the last element of Args is
+// a slice value containing zero or more arguments to a variadic
+// function.  (This is not semantically significant since the type of
+// the called function is sufficient to determine this, but it aids
+// readability of the printed form.)
+//
+type CallCommon struct {
+	Recv        Value     // receiver, iff interface method invocation
+	Method      int       // index of interface method; call MethodId() for its Id
+	Func        Value     // target of call, iff function call
+	Args        []Value   // actual parameters, including receiver in invoke mode
+	HasEllipsis bool      // true iff last Args is a slice of '...' args (needed?)
+	Pos         token.Pos // position of call expression
+}
+
+// IsInvoke returns true if this call has "invoke" (not "call") mode.
+func (c *CallCommon) IsInvoke() bool {
+	return c.Recv != nil
+}
+
+// StaticCallee returns the called function if this is a trivially
+// static "call"-mode call.
+func (c *CallCommon) StaticCallee() *Function {
+	switch fn := c.Func.(type) {
+	case *Function:
+		return fn
+	case *MakeClosure:
+		return fn.Fn.(*Function)
+	}
+	return nil
+}
+
+// MethodId returns the Id for the method called by c, which must
+// have "invoke" mode.
+func (c *CallCommon) MethodId() Id {
+	meth := underlyingType(c.Recv.Type()).(*types.Interface).Methods[c.Method]
+	return IdFromQualifiedName(meth.QualifiedName)
+}
+
+// Description returns a description of the mode of this call suitable
+// for a user interface, e.g. "static method call".
+func (c *CallCommon) Description() string {
+	switch fn := c.Func.(type) {
+	case nil:
+		return "dynamic method call" // ("invoke" mode)
+	case *MakeClosure:
+		return "static function closure call"
+	case *Function:
+		if fn.Signature.Recv != nil {
+			return "static method call"
+		}
+		return "static function call"
+	}
+	return "dynamic function call"
+}
+
+func (v *Builtin) Type() types.Type        { return v.Object.GetType() }
+func (v *Builtin) Name() string            { return v.Object.GetName() }
+func (*Builtin) Referrers() *[]Instruction { return nil }
+
+func (v *Capture) Type() types.Type          { return v.Outer.Type() }
+func (v *Capture) Name() string              { return v.Outer.Name() }
+func (v *Capture) Referrers() *[]Instruction { return &v.referrers }
+
+func (v *Global) Type() types.Type        { return v.Type_ }
+func (v *Global) Name() string            { return v.Name_ }
+func (v *Global) Posn() token.Pos         { return v.Pos }
+func (*Global) Referrers() *[]Instruction { return nil }
+
+func (v *Function) Name() string            { return v.Name_ }
+func (v *Function) Type() types.Type        { return v.Signature }
+func (v *Function) Posn() token.Pos         { return v.Pos }
+func (*Function) Referrers() *[]Instruction { return nil }
+
+func (v *Parameter) Type() types.Type          { return v.Type_ }
+func (v *Parameter) Name() string              { return v.Name_ }
+func (v *Parameter) Referrers() *[]Instruction { return &v.referrers }
+
+func (v *Alloc) Type() types.Type          { return v.Type_ }
+func (v *Alloc) Name() string              { return v.Name_ }
+func (v *Alloc) Referrers() *[]Instruction { return &v.referrers }
+
+func (v *Register) Type() types.Type          { return v.Type_ }
+func (v *Register) setType(typ types.Type)    { v.Type_ = typ }
+func (v *Register) Name() string              { return fmt.Sprintf("t%d", v.num) }
+func (v *Register) setNum(num int)            { v.num = num }
+func (v *Register) Referrers() *[]Instruction { return &v.referrers }
+func (v *Register) asRegister() *Register     { return v }
+
+func (v *anInstruction) Block() *BasicBlock         { return v.Block_ }
+func (v *anInstruction) SetBlock(block *BasicBlock) { v.Block_ = block }
+
+func (t *Type) Name() string     { return t.NamedType.Obj.Name }
+func (t *Type) Posn() token.Pos  { return t.NamedType.Obj.GetPos() }
+func (t *Type) String() string   { return t.Name() }
+func (t *Type) Type() types.Type { return t.NamedType }
+
+func (p *Package) Name() string { return p.Types.Name }
+
+func (c *Constant) Name() string     { return c.Name_ }
+func (c *Constant) Posn() token.Pos  { return c.Pos }
+func (c *Constant) String() string   { return c.Name() }
+func (c *Constant) Type() types.Type { return c.Value.Type() }
+
+// Func returns the package-level function of the specified name,
+// or nil if not found.
+//
+func (p *Package) Func(name string) (f *Function) {
+	f, _ = p.Members[name].(*Function)
+	return
+}
+
+// Var returns the package-level variable of the specified name,
+// or nil if not found.
+//
+func (p *Package) Var(name string) (g *Global) {
+	g, _ = p.Members[name].(*Global)
+	return
+}
+
+// Const returns the package-level constant of the specified name,
+// or nil if not found.
+//
+func (p *Package) Const(name string) (c *Constant) {
+	c, _ = p.Members[name].(*Constant)
+	return
+}
+
+// Type returns the package-level type of the specified name,
+// or nil if not found.
+//
+func (p *Package) Type(name string) (t *Type) {
+	t, _ = p.Members[name].(*Type)
+	return
+}
+
+// "Implements" relation boilerplate.
+// Don't try to factor this using promotion and mix-ins: the long-hand
+// form serves as better documentation, including in godoc.
+
+func (*Alloc) ImplementsValue()           {}
+func (*BinOp) ImplementsValue()           {}
+func (*Builtin) ImplementsValue()         {}
+func (*Call) ImplementsValue()            {}
+func (*Capture) ImplementsValue()         {}
+func (*ChangeInterface) ImplementsValue() {}
+func (*Conv) ImplementsValue()            {}
+func (*Extract) ImplementsValue()         {}
+func (*Field) ImplementsValue()           {}
+func (*FieldAddr) ImplementsValue()       {}
+func (*Function) ImplementsValue()        {}
+func (*Global) ImplementsValue()          {}
+func (*Index) ImplementsValue()           {}
+func (*IndexAddr) ImplementsValue()       {}
+func (*Literal) ImplementsValue()         {}
+func (*Lookup) ImplementsValue()          {}
+func (*MakeChan) ImplementsValue()        {}
+func (*MakeClosure) ImplementsValue()     {}
+func (*MakeInterface) ImplementsValue()   {}
+func (*MakeMap) ImplementsValue()         {}
+func (*MakeSlice) ImplementsValue()       {}
+func (*Next) ImplementsValue()            {}
+func (*Parameter) ImplementsValue()       {}
+func (*Phi) ImplementsValue()             {}
+func (*Range) ImplementsValue()           {}
+func (*Select) ImplementsValue()          {}
+func (*Slice) ImplementsValue()           {}
+func (*TypeAssert) ImplementsValue()      {}
+func (*UnOp) ImplementsValue()            {}
+
+func (*Constant) ImplementsMember() {}
+func (*Function) ImplementsMember() {}
+func (*Global) ImplementsMember()   {}
+func (*Type) ImplementsMember()     {}
+
+func (*Alloc) ImplementsInstruction()           {}
+func (*BinOp) ImplementsInstruction()           {}
+func (*Call) ImplementsInstruction()            {}
+func (*ChangeInterface) ImplementsInstruction() {}
+func (*Conv) ImplementsInstruction()            {}
+func (*Defer) ImplementsInstruction()           {}
+func (*Extract) ImplementsInstruction()         {}
+func (*Field) ImplementsInstruction()           {}
+func (*FieldAddr) ImplementsInstruction()       {}
+func (*Go) ImplementsInstruction()              {}
+func (*If) ImplementsInstruction()              {}
+func (*Index) ImplementsInstruction()           {}
+func (*IndexAddr) ImplementsInstruction()       {}
+func (*Jump) ImplementsInstruction()            {}
+func (*Lookup) ImplementsInstruction()          {}
+func (*MakeChan) ImplementsInstruction()        {}
+func (*MakeClosure) ImplementsInstruction()     {}
+func (*MakeInterface) ImplementsInstruction()   {}
+func (*MakeMap) ImplementsInstruction()         {}
+func (*MakeSlice) ImplementsInstruction()       {}
+func (*MapUpdate) ImplementsInstruction()       {}
+func (*Next) ImplementsInstruction()            {}
+func (*Panic) ImplementsInstruction()           {}
+func (*Phi) ImplementsInstruction()             {}
+func (*Range) ImplementsInstruction()           {}
+func (*Ret) ImplementsInstruction()             {}
+func (*RunDefers) ImplementsInstruction()       {}
+func (*Select) ImplementsInstruction()          {}
+func (*Send) ImplementsInstruction()            {}
+func (*Slice) ImplementsInstruction()           {}
+func (*Store) ImplementsInstruction()           {}
+func (*TypeAssert) ImplementsInstruction()      {}
+func (*UnOp) ImplementsInstruction()            {}
+
+// Operands.
+
+// REVIEWERS: Should this method be defined nearer each type to avoid skew?
+
+func (v *Alloc) Operands(rands []*Value) []*Value {
+	return rands
+}
+
+func (v *BinOp) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X, &v.Y)
+}
+
+func (c *CallCommon) Operands(rands []*Value) []*Value {
+	rands = append(rands, &c.Recv, &c.Func)
+	for i := range c.Args {
+		rands = append(rands, &c.Args[i])
+	}
+	return rands
+}
+
+func (s *Go) Operands(rands []*Value) []*Value {
+	return s.Call.Operands(rands)
+}
+
+func (s *Call) Operands(rands []*Value) []*Value {
+	return s.Call.Operands(rands)
+}
+
+func (s *Defer) Operands(rands []*Value) []*Value {
+	return s.Call.Operands(rands)
+}
+
+func (v *ChangeInterface) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X)
+}
+
+func (v *Conv) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X)
+}
+
+func (v *Extract) Operands(rands []*Value) []*Value {
+	return append(rands, &v.Tuple)
+}
+
+func (v *Field) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X)
+}
+
+func (v *FieldAddr) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X)
+}
+
+func (s *If) Operands(rands []*Value) []*Value {
+	return append(rands, &s.Cond)
+}
+
+func (v *Index) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X, &v.Index)
+}
+
+func (v *IndexAddr) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X, &v.Index)
+}
+
+func (*Jump) Operands(rands []*Value) []*Value {
+	return rands
+}
+
+func (v *Lookup) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X, &v.Index)
+}
+
+func (v *MakeChan) Operands(rands []*Value) []*Value {
+	return append(rands, &v.Size)
+}
+
+func (v *MakeClosure) Operands(rands []*Value) []*Value {
+	rands = append(rands, &v.Fn)
+	for i := range v.Bindings {
+		rands = append(rands, &v.Bindings[i])
+	}
+	return rands
+}
+
+func (v *MakeInterface) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X)
+}
+
+func (v *MakeMap) Operands(rands []*Value) []*Value {
+	return append(rands, &v.Reserve)
+}
+
+func (v *MakeSlice) Operands(rands []*Value) []*Value {
+	return append(rands, &v.Len, &v.Cap)
+}
+
+func (v *MapUpdate) Operands(rands []*Value) []*Value {
+	return append(rands, &v.Map, &v.Key, &v.Value)
+}
+
+func (v *Next) Operands(rands []*Value) []*Value {
+	return append(rands, &v.Iter)
+}
+
+func (s *Panic) Operands(rands []*Value) []*Value {
+	return append(rands, &s.X)
+}
+
+func (v *Phi) Operands(rands []*Value) []*Value {
+	for i := range v.Edges {
+		rands = append(rands, &v.Edges[i])
+	}
+	return rands
+}
+
+func (v *Range) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X)
+}
+
+func (s *Ret) Operands(rands []*Value) []*Value {
+	for i := range s.Results {
+		rands = append(rands, &s.Results[i])
+	}
+	return rands
+}
+
+func (*RunDefers) Operands(rands []*Value) []*Value {
+	return rands
+}
+
+func (v *Select) Operands(rands []*Value) []*Value {
+	for i := range v.States {
+		rands = append(rands, &v.States[i].Chan, &v.States[i].Send)
+	}
+	return rands
+}
+
+func (s *Send) Operands(rands []*Value) []*Value {
+	return append(rands, &s.Chan, &s.X)
+}
+
+func (v *Slice) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X, &v.Low, &v.High)
+}
+
+func (s *Store) Operands(rands []*Value) []*Value {
+	return append(rands, &s.Addr, &s.Val)
+}
+
+func (v *TypeAssert) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X)
+}
+
+func (v *UnOp) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X)
+}
diff --git a/ssa/ssadump.go b/ssa/ssadump.go
new file mode 100644
index 0000000..d7f64aa
--- /dev/null
+++ b/ssa/ssadump.go
@@ -0,0 +1,117 @@
+// +build ignore
+
+package main
+
+// ssadump: a tool for displaying and interpreting the SSA form of Go programs.
+
+import (
+	"flag"
+	"fmt"
+	"log"
+	"os"
+	"runtime/pprof"
+
+	"code.google.com/p/go.tools/ssa"
+	"code.google.com/p/go.tools/ssa/interp"
+)
+
+var buildFlag = flag.String("build", "", `Options controlling the SSA builder.
+The value is a sequence of zero or more of these letters:
+C	perform sanity [C]hecking of the SSA form.
+P	log [P]ackage inventory.
+F	log [F]unction SSA code.
+S	log [S]ource locations as SSA builder progresses.
+G	use binary object files from gc to provide imports (no code).
+L	build distinct packages seria[L]ly instead of in parallel.
+N	build [N]aive SSA form: don't replace local loads/stores with registers.
+`)
+
+var runFlag = flag.Bool("run", false, "Invokes the SSA interpreter on the program.")
+
+var interpFlag = flag.String("interp", "", `Options controlling the SSA test interpreter.
+The value is a sequence of zero or more more of these letters:
+R	disable [R]ecover() from panic; show interpreter crash instead.
+T	[T]race execution of the program.  Best for single-threaded programs!
+`)
+
+const usage = `SSA builder and interpreter.
+Usage: ssadump [<flag> ...] [<file.go> ...] [<arg> ...]
+       ssadump [<flag> ...] <import/path>   [<arg> ...]
+Use -help flag to display options.
+
+Examples:
+% ssadump -run -interp=T hello.go     # interpret a program, with tracing
+% ssadump -build=FPG hello.go         # quickly dump SSA form of a single package
+`
+
+var cpuprofile = flag.String("cpuprofile", "", "write cpu profile to file")
+
+func main() {
+	flag.Parse()
+	args := flag.Args()
+
+	var mode ssa.BuilderMode
+	for _, c := range *buildFlag {
+		switch c {
+		case 'P':
+			mode |= ssa.LogPackages | ssa.BuildSerially
+		case 'F':
+			mode |= ssa.LogFunctions | ssa.BuildSerially
+		case 'S':
+			mode |= ssa.LogSource | ssa.BuildSerially
+		case 'C':
+			mode |= ssa.SanityCheckFunctions
+		case 'N':
+			mode |= ssa.NaiveForm
+		case 'G':
+			mode |= ssa.UseGCImporter
+		case 'L':
+			mode |= ssa.BuildSerially
+		default:
+			log.Fatalf("Unknown -build option: '%c'.", c)
+		}
+	}
+
+	var interpMode interp.Mode
+	for _, c := range *interpFlag {
+		switch c {
+		case 'T':
+			interpMode |= interp.EnableTracing
+		case 'R':
+			interpMode |= interp.DisableRecover
+		default:
+			log.Fatalf("Unknown -interp option: '%c'.", c)
+		}
+	}
+
+	if len(args) == 0 {
+		fmt.Fprint(os.Stderr, usage)
+		os.Exit(1)
+	}
+
+	// Profiling support.
+	if *cpuprofile != "" {
+		f, err := os.Create(*cpuprofile)
+		if err != nil {
+			log.Fatal(err)
+		}
+		pprof.StartCPUProfile(f)
+		defer pprof.StopCPUProfile()
+	}
+
+	context := &ssa.Context{
+		Mode:   mode,
+		Loader: ssa.GorootLoader,
+	}
+	b := ssa.NewBuilder(context)
+	mainpkg, args, err := ssa.CreatePackageFromArgs(b, args)
+	if err != nil {
+		log.Fatal(err.Error())
+	}
+	b.BuildAllPackages()
+	b = nil // discard Builder
+
+	if *runFlag {
+		interp.Interpret(mainpkg, interpMode, mainpkg.Name(), args)
+	}
+}
diff --git a/ssa/typeinfo.go b/ssa/typeinfo.go
new file mode 100644
index 0000000..38673fd
--- /dev/null
+++ b/ssa/typeinfo.go
@@ -0,0 +1,201 @@
+package ssa
+
+// This file defines utilities for querying the results of typechecker:
+// types of expressions, values of constant expressions, referents of identifiers.
+
+import (
+	"code.google.com/p/go.tools/go/types"
+	"fmt"
+	"go/ast"
+)
+
+// TypeInfo contains information provided by the type checker about
+// the abstract syntax for a single package.
+type TypeInfo struct {
+	types     map[ast.Expr]types.Type     // inferred types of expressions
+	constants map[ast.Expr]*Literal       // values of constant expressions
+	idents    map[*ast.Ident]types.Object // canonical type objects for named entities
+}
+
+// TypeOf returns the type of expression e.
+// Precondition: e belongs to the package's ASTs.
+func (info *TypeInfo) TypeOf(e ast.Expr) types.Type {
+	// For Ident, b.types may be more specific than
+	// b.obj(id.(*ast.Ident)).GetType(),
+	// e.g. in the case of typeswitch.
+	if t, ok := info.types[e]; ok {
+		return t
+	}
+	// The typechecker doesn't notify us of all Idents,
+	// e.g. s.Key and s.Value in a RangeStmt.
+	// So we have this fallback.
+	// TODO(gri): This is a typechecker bug.  When fixed,
+	// eliminate this case and panic.
+	if id, ok := e.(*ast.Ident); ok {
+		return info.ObjectOf(id).GetType()
+	}
+	panic("no type for expression")
+}
+
+// ValueOf returns the value of expression e if it is a constant,
+// nil otherwise.
+//
+func (info *TypeInfo) ValueOf(e ast.Expr) *Literal {
+	return info.constants[e]
+}
+
+// ObjectOf returns the typechecker object denoted by the specified id.
+// Precondition: id belongs to the package's ASTs.
+//
+func (info *TypeInfo) ObjectOf(id *ast.Ident) types.Object {
+	if obj, ok := info.idents[id]; ok {
+		return obj
+	}
+	panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %p", id.Name, id))
+}
+
+// IsType returns true iff expression e denotes a type.
+// Precondition: e belongs to the package's ASTs.
+//
+func (info *TypeInfo) IsType(e ast.Expr) bool {
+	switch e := e.(type) {
+	case *ast.SelectorExpr: // pkg.Type
+		if obj := info.isPackageRef(e); obj != nil {
+			return objKind(obj) == ast.Typ
+		}
+	case *ast.StarExpr: // *T
+		return info.IsType(e.X)
+	case *ast.Ident:
+		return objKind(info.ObjectOf(e)) == ast.Typ
+	case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
+		return true
+	case *ast.ParenExpr:
+		return info.IsType(e.X)
+	}
+	return false
+}
+
+// isPackageRef returns the identity of the object if sel is a
+// package-qualified reference to a named const, var, func or type.
+// Otherwise it returns nil.
+// Precondition: sel belongs to the package's ASTs.
+//
+func (info *TypeInfo) isPackageRef(sel *ast.SelectorExpr) types.Object {
+	if id, ok := sel.X.(*ast.Ident); ok {
+		if obj := info.ObjectOf(id); objKind(obj) == ast.Pkg {
+			return obj.(*types.Package).Scope.Lookup(sel.Sel.Name)
+		}
+	}
+	return nil
+}
+
+// builtinCallSignature returns a new Signature describing the
+// effective type of a builtin operator for the particular call e.
+//
+// This requires ad-hoc typing rules for all variadic (append, print,
+// println) and polymorphic (append, copy, delete, close) built-ins.
+// This logic could be part of the typechecker, and should arguably
+// be moved there and made accessible via an additional types.Context
+// callback.
+//
+// The returned Signature is degenerate and only intended for use by
+// emitCallArgs.
+//
+func builtinCallSignature(info *TypeInfo, e *ast.CallExpr) *types.Signature {
+	var params []*types.Var
+	var isVariadic bool
+
+	switch builtin := noparens(e.Fun).(*ast.Ident).Name; builtin {
+	case "append":
+		var t0, t1 types.Type
+		t0 = info.TypeOf(e) // infer arg[0] type from result type
+		if e.Ellipsis != 0 {
+			// append([]T, []T) []T
+			// append([]byte, string) []byte
+			t1 = info.TypeOf(e.Args[1]) // no conversion
+		} else {
+			// append([]T, ...T) []T
+			t1 = underlyingType(t0).(*types.Slice).Elt
+			isVariadic = true
+		}
+		params = append(params,
+			&types.Var{Type: t0},
+			&types.Var{Type: t1})
+
+	case "print", "println": // print{,ln}(any, ...interface{})
+		isVariadic = true
+		// Note, arg0 may have any type, not necessarily tEface.
+		params = append(params,
+			&types.Var{Type: info.TypeOf(e.Args[0])},
+			&types.Var{Type: tEface})
+
+	case "close":
+		params = append(params, &types.Var{Type: info.TypeOf(e.Args[0])})
+
+	case "copy":
+		// copy([]T, []T) int
+		// Infer arg types from each other.  Sleazy.
+		var st *types.Slice
+		if t, ok := underlyingType(info.TypeOf(e.Args[0])).(*types.Slice); ok {
+			st = t
+		} else if t, ok := underlyingType(info.TypeOf(e.Args[1])).(*types.Slice); ok {
+			st = t
+		} else {
+			panic("cannot infer types in call to copy()")
+		}
+		stvar := &types.Var{Type: st}
+		params = append(params, stvar, stvar)
+
+	case "delete":
+		// delete(map[K]V, K)
+		tmap := info.TypeOf(e.Args[0])
+		tkey := underlyingType(tmap).(*types.Map).Key
+		params = append(params,
+			&types.Var{Type: tmap},
+			&types.Var{Type: tkey})
+
+	case "len", "cap":
+		params = append(params, &types.Var{Type: info.TypeOf(e.Args[0])})
+
+	case "real", "imag":
+		// Reverse conversion to "complex" case below.
+		var argType types.Type
+		switch info.TypeOf(e).(*types.Basic).Kind {
+		case types.UntypedFloat:
+			argType = types.Typ[types.UntypedComplex]
+		case types.Float64:
+			argType = tComplex128
+		case types.Float32:
+			argType = tComplex64
+		default:
+			unreachable()
+		}
+		params = append(params, &types.Var{Type: argType})
+
+	case "complex":
+		var argType types.Type
+		switch info.TypeOf(e).(*types.Basic).Kind {
+		case types.UntypedComplex:
+			argType = types.Typ[types.UntypedFloat]
+		case types.Complex128:
+			argType = tFloat64
+		case types.Complex64:
+			argType = tFloat32
+		default:
+			unreachable()
+		}
+		v := &types.Var{Type: argType}
+		params = append(params, v, v)
+
+	case "panic":
+		params = append(params, &types.Var{Type: tEface})
+
+	case "recover":
+		// no params
+
+	default:
+		panic("unknown builtin: " + builtin)
+	}
+
+	return &types.Signature{Params: params, IsVariadic: isVariadic}
+}
diff --git a/ssa/util.go b/ssa/util.go
new file mode 100644
index 0000000..09682d9
--- /dev/null
+++ b/ssa/util.go
@@ -0,0 +1,251 @@
+package ssa
+
+// This file defines a number of miscellaneous utility functions.
+
+import (
+	"fmt"
+	"go/ast"
+	"io"
+	"os"
+	"reflect"
+
+	"code.google.com/p/go.tools/go/types"
+)
+
+func unreachable() {
+	panic("unreachable")
+}
+
+//// AST utilities
+
+// noparens returns e with any enclosing parentheses stripped.
+func noparens(e ast.Expr) ast.Expr {
+	for {
+		p, ok := e.(*ast.ParenExpr)
+		if !ok {
+			break
+		}
+		e = p.X
+	}
+	return e
+}
+
+// isBlankIdent returns true iff e is an Ident with name "_".
+// They have no associated types.Object, and thus no type.
+//
+// TODO(gri): consider making typechecker not treat them differently.
+// It's one less thing for clients like us to worry about.
+//
+func isBlankIdent(e ast.Expr) bool {
+	id, ok := e.(*ast.Ident)
+	return ok && id.Name == "_"
+}
+
+//// Type utilities.  Some of these belong in go/types.
+
+// underlyingType returns the underlying type of typ.
+// TODO(gri): this is a copy of go/types.underlying; export that function.
+//
+func underlyingType(typ types.Type) types.Type {
+	if typ, ok := typ.(*types.NamedType); ok {
+		return typ.Underlying // underlying types are never NamedTypes
+	}
+	if typ == nil {
+		panic("underlyingType(nil)")
+	}
+	return typ
+}
+
+// isPointer returns true for types whose underlying type is a pointer.
+func isPointer(typ types.Type) bool {
+	if nt, ok := typ.(*types.NamedType); ok {
+		typ = nt.Underlying
+	}
+	_, ok := typ.(*types.Pointer)
+	return ok
+}
+
+// pointer(typ) returns the type that is a pointer to typ.
+func pointer(typ types.Type) *types.Pointer {
+	return &types.Pointer{Base: typ}
+}
+
+// indirect(typ) assumes that typ is a pointer type,
+// or named alias thereof, and returns its base type.
+// Panic ensures if it is not a pointer.
+//
+func indirectType(ptr types.Type) types.Type {
+	if v, ok := underlyingType(ptr).(*types.Pointer); ok {
+		return v.Base
+	}
+	// When debugging it is convenient to comment out this line
+	// and let it continue to print the (illegal) SSA form.
+	panic("indirect() of non-pointer type: " + ptr.String())
+	return nil
+}
+
+// deref returns a pointer's base type; otherwise it returns typ.
+func deref(typ types.Type) types.Type {
+	if typ, ok := underlyingType(typ).(*types.Pointer); ok {
+		return typ.Base
+	}
+	return typ
+}
+
+// methodIndex returns the method (and its index) named id within the
+// method table methods of named or interface type typ.  If not found,
+// panic ensues.
+//
+func methodIndex(typ types.Type, methods []*types.Method, id Id) (i int, m *types.Method) {
+	for i, m = range methods {
+		if IdFromQualifiedName(m.QualifiedName) == id {
+			return
+		}
+	}
+	panic(fmt.Sprint("method not found: ", id, " in interface ", typ))
+}
+
+// isSuperinterface returns true if x is a superinterface of y,
+// i.e.  x's methods are a subset of y's.
+//
+func isSuperinterface(x, y *types.Interface) bool {
+	if len(y.Methods) < len(x.Methods) {
+		return false
+	}
+	// TODO(adonovan): opt: this is quadratic.
+outer:
+	for _, xm := range x.Methods {
+		for _, ym := range y.Methods {
+			if IdFromQualifiedName(xm.QualifiedName) == IdFromQualifiedName(ym.QualifiedName) {
+				if !types.IsIdentical(xm.Type, ym.Type) {
+					return false // common name but conflicting types
+				}
+				continue outer
+			}
+		}
+		return false // y doesn't have this method
+	}
+	return true
+}
+
+// objKind returns the syntactic category of the named entity denoted by obj.
+func objKind(obj types.Object) ast.ObjKind {
+	switch obj.(type) {
+	case *types.Package:
+		return ast.Pkg
+	case *types.TypeName:
+		return ast.Typ
+	case *types.Const:
+		return ast.Con
+	case *types.Var:
+		return ast.Var
+	case *types.Func:
+		return ast.Fun
+	}
+	panic(fmt.Sprintf("unexpected Object type: %T", obj))
+}
+
+// canHaveConcreteMethods returns true iff typ may have concrete
+// methods associated with it.  Callers must supply allowPtr=true.
+//
+// TODO(gri): consider putting this in go/types.  It's surprisingly subtle.
+func canHaveConcreteMethods(typ types.Type, allowPtr bool) bool {
+	switch typ := typ.(type) {
+	case *types.Pointer:
+		return allowPtr && canHaveConcreteMethods(typ.Base, false)
+	case *types.NamedType:
+		switch typ.Underlying.(type) {
+		case *types.Pointer, *types.Interface:
+			return false
+		}
+		return true
+	case *types.Struct:
+		return true
+	}
+	return false
+}
+
+// DefaultType returns the default "typed" type for an "untyped" type;
+// it returns the incoming type for all other types. If there is no
+// corresponding untyped type, the result is types.Typ[types.Invalid].
+//
+// Exported to exp/ssa/interp.
+//
+// TODO(gri): this is a copy of go/types.defaultType; export that function.
+//
+func DefaultType(typ types.Type) types.Type {
+	if t, ok := typ.(*types.Basic); ok {
+		k := types.Invalid
+		switch t.Kind {
+		// case UntypedNil:
+		//      There is no default type for nil. For a good error message,
+		//      catch this case before calling this function.
+		case types.UntypedBool:
+			k = types.Bool
+		case types.UntypedInt:
+			k = types.Int
+		case types.UntypedRune:
+			k = types.Rune
+		case types.UntypedFloat:
+			k = types.Float64
+		case types.UntypedComplex:
+			k = types.Complex128
+		case types.UntypedString:
+			k = types.String
+		}
+		typ = types.Typ[k]
+	}
+	return typ
+}
+
+// makeId returns the Id (name, pkg) if the name is exported or
+// (name, nil) otherwise.
+//
+func makeId(name string, pkg *types.Package) (id Id) {
+	id.Name = name
+	if !ast.IsExported(name) {
+		id.Pkg = pkg
+		// TODO(gri): fix
+		// if pkg.Path == "" {
+		// 	panic("Package " + pkg.Name + "has empty Path")
+		// }
+	}
+	return
+}
+
+// IdFromQualifiedName returns the Id (qn.Name, qn.Pkg) if qn is an
+// exported name or (qn.Name, nil) otherwise.
+//
+// Exported to exp/ssa/interp.
+//
+func IdFromQualifiedName(qn types.QualifiedName) Id {
+	return makeId(qn.Name, qn.Pkg)
+}
+
+type ids []Id // a sortable slice of Id
+
+func (p ids) Len() int { return len(p) }
+func (p ids) Less(i, j int) bool {
+	x, y := p[i], p[j]
+	// *Package pointers are canonical so order by them.
+	// Don't use x.Pkg.ImportPath because sometimes it's empty.
+	// (TODO(gri): fix that.)
+	return reflect.ValueOf(x.Pkg).Pointer() < reflect.ValueOf(y.Pkg).Pointer() ||
+		x.Pkg == y.Pkg && x.Name < y.Name
+}
+func (p ids) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
+
+// logStack prints the formatted "start" message to stderr and
+// returns a closure that prints the corresponding "end" message.
+// Call using 'defer logStack(...)()' to show builder stack on panic.
+// Don't forget trailing parens!
+//
+func logStack(format string, args ...interface{}) func() {
+	msg := fmt.Sprintf(format, args...)
+	io.WriteString(os.Stderr, msg)
+	io.WriteString(os.Stderr, "\n")
+	return func() {
+		io.WriteString(os.Stderr, msg)
+		io.WriteString(os.Stderr, " end\n")
+	}
+}