go/internal/gcimporter: write export data for go/types

Add an iexport.go (and corresponding iexport_test.go) file, which is an
adapted version of $GOROOT/src/cmd/compile/internal/gc/iexport.go. This
code writes exportdata for a *go/types.Package.

A majority of this code is directly copied from iexport.go, with a
change of types, while some of it had to be modified slightly.

Updates golang/go#28260

Change-Id: Ic7e8e99f0c6b886839280b410afffb037da8a79b
Reviewed-on: https://go-review.googlesource.com/c/156901
Run-TryBot: Rebecca Stambler <rstambler@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Robert Griesemer <gri@golang.org>
diff --git a/go/internal/gcimporter/bexport.go b/go/internal/gcimporter/bexport.go
index 9f65049..a807d0a 100644
--- a/go/internal/gcimporter/bexport.go
+++ b/go/internal/gcimporter/bexport.go
@@ -127,10 +127,10 @@
 	// --- generic export data ---
 
 	// populate type map with predeclared "known" types
-	for index, typ := range predeclared {
+	for index, typ := range predeclared() {
 		p.typIndex[typ] = index
 	}
-	if len(p.typIndex) != len(predeclared) {
+	if len(p.typIndex) != len(predeclared()) {
 		return nil, internalError("duplicate entries in type map?")
 	}
 
diff --git a/go/internal/gcimporter/bimport.go b/go/internal/gcimporter/bimport.go
index b31eacf..e3c3107 100644
--- a/go/internal/gcimporter/bimport.go
+++ b/go/internal/gcimporter/bimport.go
@@ -126,7 +126,7 @@
 	// --- generic export data ---
 
 	// populate typList with predeclared "known" types
-	p.typList = append(p.typList, predeclared...)
+	p.typList = append(p.typList, predeclared()...)
 
 	// read package data
 	pkg = p.pkg()
@@ -976,50 +976,58 @@
 	aliasTag
 )
 
-var predeclared = []types.Type{
-	// basic types
-	types.Typ[types.Bool],
-	types.Typ[types.Int],
-	types.Typ[types.Int8],
-	types.Typ[types.Int16],
-	types.Typ[types.Int32],
-	types.Typ[types.Int64],
-	types.Typ[types.Uint],
-	types.Typ[types.Uint8],
-	types.Typ[types.Uint16],
-	types.Typ[types.Uint32],
-	types.Typ[types.Uint64],
-	types.Typ[types.Uintptr],
-	types.Typ[types.Float32],
-	types.Typ[types.Float64],
-	types.Typ[types.Complex64],
-	types.Typ[types.Complex128],
-	types.Typ[types.String],
+var predecl []types.Type // initialized lazily
 
-	// basic type aliases
-	types.Universe.Lookup("byte").Type(),
-	types.Universe.Lookup("rune").Type(),
+func predeclared() []types.Type {
+	if predecl == nil {
+		// initialize lazily to be sure that all
+		// elements have been initialized before
+		predecl = []types.Type{ // basic types
+			types.Typ[types.Bool],
+			types.Typ[types.Int],
+			types.Typ[types.Int8],
+			types.Typ[types.Int16],
+			types.Typ[types.Int32],
+			types.Typ[types.Int64],
+			types.Typ[types.Uint],
+			types.Typ[types.Uint8],
+			types.Typ[types.Uint16],
+			types.Typ[types.Uint32],
+			types.Typ[types.Uint64],
+			types.Typ[types.Uintptr],
+			types.Typ[types.Float32],
+			types.Typ[types.Float64],
+			types.Typ[types.Complex64],
+			types.Typ[types.Complex128],
+			types.Typ[types.String],
 
-	// error
-	types.Universe.Lookup("error").Type(),
+			// basic type aliases
+			types.Universe.Lookup("byte").Type(),
+			types.Universe.Lookup("rune").Type(),
 
-	// untyped types
-	types.Typ[types.UntypedBool],
-	types.Typ[types.UntypedInt],
-	types.Typ[types.UntypedRune],
-	types.Typ[types.UntypedFloat],
-	types.Typ[types.UntypedComplex],
-	types.Typ[types.UntypedString],
-	types.Typ[types.UntypedNil],
+			// error
+			types.Universe.Lookup("error").Type(),
 
-	// package unsafe
-	types.Typ[types.UnsafePointer],
+			// untyped types
+			types.Typ[types.UntypedBool],
+			types.Typ[types.UntypedInt],
+			types.Typ[types.UntypedRune],
+			types.Typ[types.UntypedFloat],
+			types.Typ[types.UntypedComplex],
+			types.Typ[types.UntypedString],
+			types.Typ[types.UntypedNil],
 
-	// invalid type
-	types.Typ[types.Invalid], // only appears in packages with errors
+			// package unsafe
+			types.Typ[types.UnsafePointer],
 
-	// used internally by gc; never used by this package or in .a files
-	anyType{},
+			// invalid type
+			types.Typ[types.Invalid], // only appears in packages with errors
+
+			// used internally by gc; never used by this package or in .a files
+			anyType{},
+		}
+	}
+	return predecl
 }
 
 type anyType struct{}
diff --git a/go/internal/gcimporter/iexport.go b/go/internal/gcimporter/iexport.go
new file mode 100644
index 0000000..be671c7
--- /dev/null
+++ b/go/internal/gcimporter/iexport.go
@@ -0,0 +1,723 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed binary package export.
+// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go;
+// see that file for specification of the format.
+
+// +build go1.11
+
+package gcimporter
+
+import (
+	"bytes"
+	"encoding/binary"
+	"go/ast"
+	"go/constant"
+	"go/token"
+	"go/types"
+	"io"
+	"math/big"
+	"reflect"
+	"sort"
+)
+
+// Current indexed export format version. Increase with each format change.
+// 0: Go1.11 encoding
+const iexportVersion = 0
+
+// IExportData returns the binary export data for pkg.
+// If no file set is provided, position info will be missing.
+func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
+	defer func() {
+		if e := recover(); e != nil {
+			if ierr, ok := e.(internalError); ok {
+				err = ierr
+				return
+			}
+			// Not an internal error; panic again.
+			panic(e)
+		}
+	}()
+
+	p := iexporter{
+		out:         bytes.NewBuffer(nil),
+		fset:        fset,
+		allPkgs:     map[*types.Package]bool{},
+		stringIndex: map[string]uint64{},
+		declIndex:   map[types.Object]uint64{},
+		typIndex:    map[types.Type]uint64{},
+	}
+
+	for i, pt := range predeclared() {
+		p.typIndex[pt] = uint64(i)
+	}
+	if len(p.typIndex) > predeclReserved {
+		panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved))
+	}
+
+	// Initialize work queue with exported declarations.
+	scope := pkg.Scope()
+	for _, name := range scope.Names() {
+		if ast.IsExported(name) {
+			p.pushDecl(scope.Lookup(name))
+		}
+	}
+
+	// Loop until no more work.
+	for !p.declTodo.empty() {
+		p.doDecl(p.declTodo.popHead())
+	}
+
+	// Append indices to data0 section.
+	dataLen := uint64(p.data0.Len())
+	w := p.newWriter()
+	w.writeIndex(p.declIndex, pkg)
+	w.flush()
+
+	// Assemble header.
+	var hdr intWriter
+	hdr.WriteByte('i')
+	hdr.uint64(iexportVersion)
+	hdr.uint64(uint64(p.strings.Len()))
+	hdr.uint64(dataLen)
+
+	// Flush output.
+	io.Copy(p.out, &hdr)
+	io.Copy(p.out, &p.strings)
+	io.Copy(p.out, &p.data0)
+
+	return p.out.Bytes(), nil
+}
+
+// writeIndex writes out an object index. mainIndex indicates whether
+// we're writing out the main index, which is also read by
+// non-compiler tools and includes a complete package description
+// (i.e., name and height).
+func (w *exportWriter) writeIndex(index map[types.Object]uint64, localpkg *types.Package) {
+	// Build a map from packages to objects from that package.
+	pkgObjs := map[*types.Package][]types.Object{}
+
+	// For the main index, make sure to include every package that
+	// we reference, even if we're not exporting (or reexporting)
+	// any symbols from it.
+	pkgObjs[localpkg] = nil
+	for pkg := range w.p.allPkgs {
+		pkgObjs[pkg] = nil
+	}
+
+	for obj := range index {
+		pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj)
+	}
+
+	var pkgs []*types.Package
+	for pkg, objs := range pkgObjs {
+		pkgs = append(pkgs, pkg)
+
+		sort.Slice(objs, func(i, j int) bool {
+			return objs[i].Name() < objs[j].Name()
+		})
+	}
+
+	sort.Slice(pkgs, func(i, j int) bool {
+		return pkgs[i].Path() < pkgs[j].Path()
+	})
+
+	w.uint64(uint64(len(pkgs)))
+	for _, pkg := range pkgs {
+		w.string(pkg.Path())
+		w.string(pkg.Name())
+		w.uint64(uint64(0)) // package height is not needed for go/types
+
+		objs := pkgObjs[pkg]
+		w.uint64(uint64(len(objs)))
+		for _, obj := range objs {
+			w.string(obj.Name())
+			w.uint64(index[obj])
+		}
+	}
+}
+
+type iexporter struct {
+	fset *token.FileSet
+	out  *bytes.Buffer
+
+	// allPkgs tracks all packages that have been referenced by
+	// the export data, so we can ensure to include them in the
+	// main index.
+	allPkgs map[*types.Package]bool
+
+	declTodo objQueue
+
+	strings     intWriter
+	stringIndex map[string]uint64
+
+	data0     intWriter
+	declIndex map[types.Object]uint64
+	typIndex  map[types.Type]uint64
+}
+
+// stringOff returns the offset of s within the string section.
+// If not already present, it's added to the end.
+func (p *iexporter) stringOff(s string) uint64 {
+	off, ok := p.stringIndex[s]
+	if !ok {
+		off = uint64(p.strings.Len())
+		p.stringIndex[s] = off
+
+		p.strings.uint64(uint64(len(s)))
+		p.strings.WriteString(s)
+	}
+	return off
+}
+
+// pushDecl adds n to the declaration work queue, if not already present.
+func (p *iexporter) pushDecl(obj types.Object) {
+	// Package unsafe is known to the compiler and predeclared.
+	assert(obj.Pkg() != types.Unsafe)
+
+	if _, ok := p.declIndex[obj]; ok {
+		return
+	}
+
+	p.declIndex[obj] = ^uint64(0) // mark n present in work queue
+	p.declTodo.pushTail(obj)
+}
+
+// exportWriter handles writing out individual data section chunks.
+type exportWriter struct {
+	p *iexporter
+
+	data     intWriter
+	currPkg  *types.Package
+	prevFile string
+	prevLine int64
+}
+
+func (p *iexporter) doDecl(obj types.Object) {
+	w := p.newWriter()
+	w.setPkg(obj.Pkg(), false)
+
+	switch obj := obj.(type) {
+	case *types.Var:
+		w.tag('V')
+		w.pos(obj.Pos())
+		w.typ(obj.Type(), obj.Pkg())
+
+	case *types.Func:
+		sig, _ := obj.Type().(*types.Signature)
+		if sig.Recv() != nil {
+			panic(internalErrorf("unexpected method: %v", sig))
+		}
+		w.tag('F')
+		w.pos(obj.Pos())
+		w.signature(sig)
+
+	case *types.Const:
+		w.tag('C')
+		w.pos(obj.Pos())
+		w.value(obj.Type(), obj.Val())
+
+	case *types.TypeName:
+		if obj.IsAlias() {
+			w.tag('A')
+			w.pos(obj.Pos())
+			w.typ(obj.Type(), obj.Pkg())
+			break
+		}
+
+		// Defined type.
+		w.tag('T')
+		w.pos(obj.Pos())
+
+		underlying := obj.Type().Underlying()
+		w.typ(underlying, obj.Pkg())
+
+		t := obj.Type()
+		if types.IsInterface(t) {
+			break
+		}
+
+		named, ok := t.(*types.Named)
+		if !ok {
+			panic(internalErrorf("%s is not a defined type", t))
+		}
+
+		n := named.NumMethods()
+		w.uint64(uint64(n))
+		for i := 0; i < n; i++ {
+			m := named.Method(i)
+			w.pos(m.Pos())
+			w.string(m.Name())
+			sig, _ := m.Type().(*types.Signature)
+			w.param(sig.Recv())
+			w.signature(sig)
+		}
+
+	default:
+		panic(internalErrorf("unexpected object: %v", obj))
+	}
+
+	p.declIndex[obj] = w.flush()
+}
+
+func (w *exportWriter) tag(tag byte) {
+	w.data.WriteByte(tag)
+}
+
+func (w *exportWriter) pos(pos token.Pos) {
+	p := w.p.fset.Position(pos)
+	file := p.Filename
+	line := int64(p.Line)
+
+	// When file is the same as the last position (common case),
+	// we can save a few bytes by delta encoding just the line
+	// number.
+	//
+	// Note: Because data objects may be read out of order (or not
+	// at all), we can only apply delta encoding within a single
+	// object. This is handled implicitly by tracking prevFile and
+	// prevLine as fields of exportWriter.
+
+	if file == w.prevFile {
+		delta := line - w.prevLine
+		w.int64(delta)
+		if delta == deltaNewFile {
+			w.int64(-1)
+		}
+	} else {
+		w.int64(deltaNewFile)
+		w.int64(line) // line >= 0
+		w.string(file)
+		w.prevFile = file
+	}
+	w.prevLine = line
+}
+
+func (w *exportWriter) pkg(pkg *types.Package) {
+	// Ensure any referenced packages are declared in the main index.
+	w.p.allPkgs[pkg] = true
+
+	w.string(pkg.Path())
+}
+
+func (w *exportWriter) qualifiedIdent(obj types.Object) {
+	// Ensure any referenced declarations are written out too.
+	w.p.pushDecl(obj)
+
+	w.string(obj.Name())
+	w.pkg(obj.Pkg())
+}
+
+func (w *exportWriter) typ(t types.Type, pkg *types.Package) {
+	w.data.uint64(w.p.typOff(t, pkg))
+}
+
+func (p *iexporter) newWriter() *exportWriter {
+	return &exportWriter{p: p}
+}
+
+func (w *exportWriter) flush() uint64 {
+	off := uint64(w.p.data0.Len())
+	io.Copy(&w.p.data0, &w.data)
+	return off
+}
+
+func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 {
+	off, ok := p.typIndex[t]
+	if !ok {
+		w := p.newWriter()
+		w.doTyp(t, pkg)
+		off = predeclReserved + w.flush()
+		p.typIndex[t] = off
+	}
+	return off
+}
+
+func (w *exportWriter) startType(k itag) {
+	w.data.uint64(uint64(k))
+}
+
+func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
+	switch t := t.(type) {
+	case *types.Named:
+		w.startType(definedType)
+		w.qualifiedIdent(t.Obj())
+
+	case *types.Pointer:
+		w.startType(pointerType)
+		w.typ(t.Elem(), pkg)
+
+	case *types.Slice:
+		w.startType(sliceType)
+		w.typ(t.Elem(), pkg)
+
+	case *types.Array:
+		w.startType(arrayType)
+		w.uint64(uint64(t.Len()))
+		w.typ(t.Elem(), pkg)
+
+	case *types.Chan:
+		w.startType(chanType)
+		// 1 RecvOnly; 2 SendOnly; 3 SendRecv
+		var dir uint64
+		switch t.Dir() {
+		case types.RecvOnly:
+			dir = 1
+		case types.SendOnly:
+			dir = 2
+		case types.SendRecv:
+			dir = 3
+		}
+		w.uint64(dir)
+		w.typ(t.Elem(), pkg)
+
+	case *types.Map:
+		w.startType(mapType)
+		w.typ(t.Key(), pkg)
+		w.typ(t.Elem(), pkg)
+
+	case *types.Signature:
+		w.startType(signatureType)
+		w.setPkg(pkg, true)
+		w.signature(t)
+
+	case *types.Struct:
+		w.startType(structType)
+		w.setPkg(pkg, true)
+
+		n := t.NumFields()
+		w.uint64(uint64(n))
+		for i := 0; i < n; i++ {
+			f := t.Field(i)
+			w.pos(f.Pos())
+			w.string(f.Name())
+			w.typ(f.Type(), pkg)
+			w.bool(f.Embedded())
+			w.string(t.Tag(i)) // note (or tag)
+		}
+
+	case *types.Interface:
+		w.startType(interfaceType)
+		w.setPkg(pkg, true)
+
+		n := t.NumEmbeddeds()
+		w.uint64(uint64(n))
+		for i := 0; i < n; i++ {
+			f := t.Embedded(i)
+			w.pos(f.Obj().Pos())
+			w.typ(f.Obj().Type(), f.Obj().Pkg())
+		}
+
+		n = t.NumExplicitMethods()
+		w.uint64(uint64(n))
+		for i := 0; i < n; i++ {
+			m := t.ExplicitMethod(i)
+			w.pos(m.Pos())
+			w.string(m.Name())
+			sig, _ := m.Type().(*types.Signature)
+			w.signature(sig)
+		}
+
+	default:
+		panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t)))
+	}
+}
+
+func (w *exportWriter) setPkg(pkg *types.Package, write bool) {
+	if write {
+		w.pkg(pkg)
+	}
+
+	w.currPkg = pkg
+}
+
+func (w *exportWriter) signature(sig *types.Signature) {
+	w.paramList(sig.Params())
+	w.paramList(sig.Results())
+	if sig.Params().Len() > 0 {
+		w.bool(sig.Variadic())
+	}
+}
+
+func (w *exportWriter) paramList(tup *types.Tuple) {
+	n := tup.Len()
+	w.uint64(uint64(n))
+	for i := 0; i < n; i++ {
+		w.param(tup.At(i))
+	}
+}
+
+func (w *exportWriter) param(obj types.Object) {
+	w.pos(obj.Pos())
+	w.localIdent(obj)
+	w.typ(obj.Type(), obj.Pkg())
+}
+
+func (w *exportWriter) value(typ types.Type, v constant.Value) {
+	w.typ(typ, nil)
+
+	switch v.Kind() {
+	case constant.Bool:
+		w.bool(constant.BoolVal(v))
+	case constant.Int:
+		var i big.Int
+		if i64, exact := constant.Int64Val(v); exact {
+			i.SetInt64(i64)
+		} else if ui64, exact := constant.Uint64Val(v); exact {
+			i.SetUint64(ui64)
+		} else {
+			i.SetString(v.ExactString(), 10)
+		}
+		w.mpint(&i, typ)
+	case constant.Float:
+		f := constantToFloat(v)
+		w.mpfloat(f, typ)
+	case constant.Complex:
+		w.mpfloat(constantToFloat(constant.Real(v)), typ)
+		w.mpfloat(constantToFloat(constant.Imag(v)), typ)
+	case constant.String:
+		w.string(constant.StringVal(v))
+	case constant.Unknown:
+		// package contains type errors
+	default:
+		panic(internalErrorf("unexpected value %v (%T)", v, v))
+	}
+}
+
+// constantToFloat converts a constant.Value with kind constant.Float to a
+// big.Float.
+func constantToFloat(x constant.Value) *big.Float {
+	assert(x.Kind() == constant.Float)
+	// Use the same floating-point precision (512) as cmd/compile
+	// (see Mpprec in cmd/compile/internal/gc/mpfloat.go).
+	const mpprec = 512
+	var f big.Float
+	f.SetPrec(mpprec)
+	if v, exact := constant.Float64Val(x); exact {
+		// float64
+		f.SetFloat64(v)
+	} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
+		// TODO(gri): add big.Rat accessor to constant.Value.
+		n := valueToRat(num)
+		d := valueToRat(denom)
+		f.SetRat(n.Quo(n, d))
+	} else {
+		// Value too large to represent as a fraction => inaccessible.
+		// TODO(gri): add big.Float accessor to constant.Value.
+		_, ok := f.SetString(x.ExactString())
+		assert(ok)
+	}
+	return &f
+}
+
+// mpint exports a multi-precision integer.
+//
+// For unsigned types, small values are written out as a single
+// byte. Larger values are written out as a length-prefixed big-endian
+// byte string, where the length prefix is encoded as its complement.
+// For example, bytes 0, 1, and 2 directly represent the integer
+// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-,
+// 2-, and 3-byte big-endian string follow.
+//
+// Encoding for signed types use the same general approach as for
+// unsigned types, except small values use zig-zag encoding and the
+// bottom bit of length prefix byte for large values is reserved as a
+// sign bit.
+//
+// The exact boundary between small and large encodings varies
+// according to the maximum number of bytes needed to encode a value
+// of type typ. As a special case, 8-bit types are always encoded as a
+// single byte.
+//
+// TODO(mdempsky): Is this level of complexity really worthwhile?
+func (w *exportWriter) mpint(x *big.Int, typ types.Type) {
+	basic, ok := typ.Underlying().(*types.Basic)
+	if !ok {
+		panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying()))
+	}
+
+	signed, maxBytes := intSize(basic)
+
+	negative := x.Sign() < 0
+	if !signed && negative {
+		panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x))
+	}
+
+	b := x.Bytes()
+	if len(b) > 0 && b[0] == 0 {
+		panic(internalErrorf("leading zeros"))
+	}
+	if uint(len(b)) > maxBytes {
+		panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x))
+	}
+
+	maxSmall := 256 - maxBytes
+	if signed {
+		maxSmall = 256 - 2*maxBytes
+	}
+	if maxBytes == 1 {
+		maxSmall = 256
+	}
+
+	// Check if x can use small value encoding.
+	if len(b) <= 1 {
+		var ux uint
+		if len(b) == 1 {
+			ux = uint(b[0])
+		}
+		if signed {
+			ux <<= 1
+			if negative {
+				ux--
+			}
+		}
+		if ux < maxSmall {
+			w.data.WriteByte(byte(ux))
+			return
+		}
+	}
+
+	n := 256 - uint(len(b))
+	if signed {
+		n = 256 - 2*uint(len(b))
+		if negative {
+			n |= 1
+		}
+	}
+	if n < maxSmall || n >= 256 {
+		panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n))
+	}
+
+	w.data.WriteByte(byte(n))
+	w.data.Write(b)
+}
+
+// mpfloat exports a multi-precision floating point number.
+//
+// The number's value is decomposed into mantissa × 2**exponent, where
+// mantissa is an integer. The value is written out as mantissa (as a
+// multi-precision integer) and then the exponent, except exponent is
+// omitted if mantissa is zero.
+func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) {
+	if f.IsInf() {
+		panic("infinite constant")
+	}
+
+	// Break into f = mant × 2**exp, with 0.5 <= mant < 1.
+	var mant big.Float
+	exp := int64(f.MantExp(&mant))
+
+	// Scale so that mant is an integer.
+	prec := mant.MinPrec()
+	mant.SetMantExp(&mant, int(prec))
+	exp -= int64(prec)
+
+	manti, acc := mant.Int(nil)
+	if acc != big.Exact {
+		panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc))
+	}
+	w.mpint(manti, typ)
+	if manti.Sign() != 0 {
+		w.int64(exp)
+	}
+}
+
+func (w *exportWriter) bool(b bool) bool {
+	var x uint64
+	if b {
+		x = 1
+	}
+	w.uint64(x)
+	return b
+}
+
+func (w *exportWriter) int64(x int64)   { w.data.int64(x) }
+func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) }
+func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
+
+func (w *exportWriter) localIdent(obj types.Object) {
+	// Anonymous parameters.
+	if obj == nil {
+		w.string("")
+		return
+	}
+
+	name := obj.Name()
+	if name == "_" {
+		w.string("_")
+		return
+	}
+
+	w.string(name)
+}
+
+type intWriter struct {
+	bytes.Buffer
+}
+
+func (w *intWriter) int64(x int64) {
+	var buf [binary.MaxVarintLen64]byte
+	n := binary.PutVarint(buf[:], x)
+	w.Write(buf[:n])
+}
+
+func (w *intWriter) uint64(x uint64) {
+	var buf [binary.MaxVarintLen64]byte
+	n := binary.PutUvarint(buf[:], x)
+	w.Write(buf[:n])
+}
+
+func assert(cond bool) {
+	if !cond {
+		panic("internal error: assertion failed")
+	}
+}
+
+// The below is copied from go/src/cmd/compile/internal/gc/syntax.go.
+
+// objQueue is a FIFO queue of types.Object. The zero value of objQueue is
+// a ready-to-use empty queue.
+type objQueue struct {
+	ring       []types.Object
+	head, tail int
+}
+
+// empty returns true if q contains no Nodes.
+func (q *objQueue) empty() bool {
+	return q.head == q.tail
+}
+
+// pushTail appends n to the tail of the queue.
+func (q *objQueue) pushTail(obj types.Object) {
+	if len(q.ring) == 0 {
+		q.ring = make([]types.Object, 16)
+	} else if q.head+len(q.ring) == q.tail {
+		// Grow the ring.
+		nring := make([]types.Object, len(q.ring)*2)
+		// Copy the old elements.
+		part := q.ring[q.head%len(q.ring):]
+		if q.tail-q.head <= len(part) {
+			part = part[:q.tail-q.head]
+			copy(nring, part)
+		} else {
+			pos := copy(nring, part)
+			copy(nring[pos:], q.ring[:q.tail%len(q.ring)])
+		}
+		q.ring, q.head, q.tail = nring, 0, q.tail-q.head
+	}
+
+	q.ring[q.tail%len(q.ring)] = obj
+	q.tail++
+}
+
+// popHead pops a node from the head of the queue. It panics if q is empty.
+func (q *objQueue) popHead() types.Object {
+	if q.empty() {
+		panic("dequeue empty")
+	}
+	obj := q.ring[q.head%len(q.ring)]
+	q.head++
+	return obj
+}
diff --git a/go/internal/gcimporter/iexport_test.go b/go/internal/gcimporter/iexport_test.go
new file mode 100644
index 0000000..efb8d75
--- /dev/null
+++ b/go/internal/gcimporter/iexport_test.go
@@ -0,0 +1,305 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This is a copy of bexport_test.go for iexport.go.
+
+// +build go1.11
+
+package gcimporter_test
+
+import (
+	"fmt"
+	"go/ast"
+	"go/build"
+	"go/constant"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"math/big"
+	"reflect"
+	"runtime"
+	"sort"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/go/buildutil"
+	"golang.org/x/tools/go/internal/gcimporter"
+	"golang.org/x/tools/go/loader"
+)
+
+func TestIExportData_stdlib(t *testing.T) {
+	if runtime.Compiler == "gccgo" {
+		t.Skip("gccgo standard library is inaccessible")
+	}
+	if runtime.GOOS == "android" {
+		t.Skipf("incomplete std lib on %s", runtime.GOOS)
+	}
+
+	// Load, parse and type-check the program.
+	ctxt := build.Default // copy
+	ctxt.GOPATH = ""      // disable GOPATH
+	conf := loader.Config{
+		Build:       &ctxt,
+		AllowErrors: true,
+	}
+	for _, path := range buildutil.AllPackages(conf.Build) {
+		conf.Import(path)
+	}
+
+	// Create a package containing type and value errors to ensure
+	// they are properly encoded/decoded.
+	f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors
+const UnknownValue = "" + 0
+type UnknownType undefined
+`)
+	if err != nil {
+		t.Fatal(err)
+	}
+	conf.CreateFromFiles("haserrors", f)
+
+	prog, err := conf.Load()
+	if err != nil {
+		t.Fatalf("Load failed: %v", err)
+	}
+
+	numPkgs := len(prog.AllPackages)
+	if want := 248; numPkgs < want {
+		t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
+	}
+
+	var sorted []*types.Package
+	for pkg := range prog.AllPackages {
+		sorted = append(sorted, pkg)
+	}
+	sort.Slice(sorted, func(i, j int) bool {
+		return sorted[i].Path() < sorted[j].Path()
+	})
+
+	for _, pkg := range sorted {
+		info := prog.AllPackages[pkg]
+		if info.Files == nil {
+			continue // empty directory
+		}
+		exportdata, err := gcimporter.IExportData(conf.Fset, pkg)
+		if err != nil {
+			t.Fatal(err)
+		}
+		if exportdata[0] == 'i' {
+			exportdata = exportdata[1:] // trim the 'i' in the header
+		} else {
+			t.Fatalf("unexpected first character of export data: %v", exportdata[0])
+		}
+
+		imports := make(map[string]*types.Package)
+		fset2 := token.NewFileSet()
+		n, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path())
+		if err != nil {
+			t.Errorf("IImportData(%s): %v", pkg.Path(), err)
+			continue
+		}
+		if n != len(exportdata) {
+			t.Errorf("IImportData(%s) decoded %d bytes, want %d",
+				pkg.Path(), n, len(exportdata))
+		}
+
+		// Compare the packages' corresponding members.
+		for _, name := range pkg.Scope().Names() {
+			if !ast.IsExported(name) {
+				continue
+			}
+			obj1 := pkg.Scope().Lookup(name)
+			obj2 := pkg2.Scope().Lookup(name)
+			if obj2 == nil {
+				t.Fatalf("%s.%s not found, want %s", pkg.Path(), name, obj1)
+				continue
+			}
+
+			fl1 := fileLine(conf.Fset, obj1)
+			fl2 := fileLine(fset2, obj2)
+			if fl1 != fl2 {
+				t.Errorf("%s.%s: got posn %s, want %s",
+					pkg.Path(), name, fl2, fl1)
+			}
+
+			if err := cmpObj(obj1, obj2); err != nil {
+				t.Errorf("%s.%s: %s\ngot:  %s\nwant: %s",
+					pkg.Path(), name, err, obj2, obj1)
+			}
+		}
+	}
+}
+
+// TestVeryLongFile tests the position of an import object declared in
+// a very long input file.  Line numbers greater than maxlines are
+// reported as line 1, not garbage or token.NoPos.
+func TestIExportData_long(t *testing.T) {
+	// parse and typecheck
+	longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int"
+	fset1 := token.NewFileSet()
+	f, err := parser.ParseFile(fset1, "foo.go", longFile, 0)
+	if err != nil {
+		t.Fatal(err)
+	}
+	var conf types.Config
+	pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	// export
+	exportdata, err := gcimporter.IExportData(fset1, pkg)
+	if err != nil {
+		t.Fatal(err)
+	}
+	if exportdata[0] == 'i' {
+		exportdata = exportdata[1:] // trim the 'i' in the header
+	} else {
+		t.Fatalf("unexpected first character of export data: %v", exportdata[0])
+	}
+
+	// import
+	imports := make(map[string]*types.Package)
+	fset2 := token.NewFileSet()
+	_, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path())
+	if err != nil {
+		t.Fatalf("IImportData(%s): %v", pkg.Path(), err)
+	}
+
+	// compare
+	posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos())
+	posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos())
+	if want := "foo.go:1:1"; posn2.String() != want {
+		t.Errorf("X position = %s, want %s (orig was %s)",
+			posn2, want, posn1)
+	}
+}
+
+func TestIExportData_typealiases(t *testing.T) {
+	// parse and typecheck
+	fset1 := token.NewFileSet()
+	f, err := parser.ParseFile(fset1, "p.go", src, 0)
+	if err != nil {
+		t.Fatal(err)
+	}
+	var conf types.Config
+	pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil)
+	if err == nil {
+		// foo in undeclared in src; we should see an error
+		t.Fatal("invalid source type-checked without error")
+	}
+	if pkg1 == nil {
+		// despite incorrect src we should see a (partially) type-checked package
+		t.Fatal("nil package returned")
+	}
+	checkPkg(t, pkg1, "export")
+
+	// export
+	exportdata, err := gcimporter.IExportData(fset1, pkg1)
+	if err != nil {
+		t.Fatal(err)
+	}
+	if exportdata[0] == 'i' {
+		exportdata = exportdata[1:] // trim the 'i' in the header
+	} else {
+		t.Fatalf("unexpected first character of export data: %v", exportdata[0])
+	}
+
+	// import
+	imports := make(map[string]*types.Package)
+	fset2 := token.NewFileSet()
+	_, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg1.Path())
+	if err != nil {
+		t.Fatalf("IImportData(%s): %v", pkg1.Path(), err)
+	}
+	checkPkg(t, pkg2, "import")
+}
+
+// cmpObj reports how x and y differ. They are assumed to belong to different
+// universes so cannot be compared directly. It is an adapted version of
+// equalObj in bexport_test.go.
+func cmpObj(x, y types.Object) error {
+	if reflect.TypeOf(x) != reflect.TypeOf(y) {
+		return fmt.Errorf("%T vs %T", x, y)
+	}
+	xt := x.Type()
+	yt := y.Type()
+	switch x.(type) {
+	case *types.Var, *types.Func:
+		// ok
+	case *types.Const:
+		xval := x.(*types.Const).Val()
+		yval := y.(*types.Const).Val()
+		equal := constant.Compare(xval, token.EQL, yval)
+		if !equal {
+			// try approx. comparison
+			xkind := xval.Kind()
+			ykind := yval.Kind()
+			if xkind == constant.Complex || ykind == constant.Complex {
+				equal = same(constant.Real(xval), constant.Real(yval)) &&
+					same(constant.Imag(xval), constant.Imag(yval))
+			} else if xkind == constant.Float || ykind == constant.Float {
+				equal = same(xval, yval)
+			} else if xkind == constant.Unknown && ykind == constant.Unknown {
+				equal = true
+			}
+		}
+		if !equal {
+			return fmt.Errorf("unequal constants %s vs %s", xval, yval)
+		}
+	case *types.TypeName:
+		xt = xt.Underlying()
+		yt = yt.Underlying()
+	default:
+		return fmt.Errorf("unexpected %T", x)
+	}
+	return equalType(xt, yt)
+}
+
+// Use the same floating-point precision (512) as cmd/compile
+// (see Mpprec in cmd/compile/internal/gc/mpfloat.go).
+const mpprec = 512
+
+// same compares non-complex numeric values and reports if they are approximately equal.
+func same(x, y constant.Value) bool {
+	xf := constantToFloat(x)
+	yf := constantToFloat(y)
+	d := new(big.Float).Sub(xf, yf)
+	d.Abs(d)
+	eps := big.NewFloat(1.0 / (1 << (mpprec - 1))) // allow for 1 bit of error
+	return d.Cmp(eps) < 0
+}
+
+// copy of the function with the same name in iexport.go.
+func constantToFloat(x constant.Value) *big.Float {
+	var f big.Float
+	f.SetPrec(mpprec)
+	if v, exact := constant.Float64Val(x); exact {
+		// float64
+		f.SetFloat64(v)
+	} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
+		// TODO(gri): add big.Rat accessor to constant.Value.
+		n := valueToRat(num)
+		d := valueToRat(denom)
+		f.SetRat(n.Quo(n, d))
+	} else {
+		// Value too large to represent as a fraction => inaccessible.
+		// TODO(gri): add big.Float accessor to constant.Value.
+		_, ok := f.SetString(x.ExactString())
+		if !ok {
+			panic("should not reach here")
+		}
+	}
+	return &f
+}
+
+// copy of the function with the same name in iexport.go.
+func valueToRat(x constant.Value) *big.Rat {
+	// Convert little-endian to big-endian.
+	// I can't believe this is necessary.
+	bytes := constant.Bytes(x)
+	for i := 0; i < len(bytes)/2; i++ {
+		bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
+	}
+	return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
+}
diff --git a/go/internal/gcimporter/iimport.go b/go/internal/gcimporter/iimport.go
index 0fd22bb..3cb7ae5 100644
--- a/go/internal/gcimporter/iimport.go
+++ b/go/internal/gcimporter/iimport.go
@@ -109,7 +109,7 @@
 		},
 	}
 
-	for i, pt := range predeclared {
+	for i, pt := range predeclared() {
 		p.typCache[uint64(i)] = pt
 	}
 
@@ -142,8 +142,12 @@
 		p.pkgIndex[pkg] = nameIndex
 		pkgList[i] = pkg
 	}
-
-	localpkg := pkgList[0]
+	var localpkg *types.Package
+	for _, pkg := range pkgList {
+		if pkg.Path() == path {
+			localpkg = pkg
+		}
+	}
 
 	names := make([]string, 0, len(p.pkgIndex[localpkg]))
 	for name := range p.pkgIndex[localpkg] {
@@ -330,6 +334,10 @@
 		val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
 
 	default:
+		if b.Kind() == types.Invalid {
+			val = constant.MakeUnknown()
+			return
+		}
 		errorf("unexpected type %v", typ) // panics
 		panic("unreachable")
 	}