[dev.cc] cmd/internal/gc, cmd/new6g etc: convert from cmd/gc, cmd/6g etc

First draft of converted Go compiler, using rsc.io/c2go rev 83d795a.

Change-Id: I29f4c7010de07d2ff1947bbca9865879d83c32c3
Reviewed-on: https://go-review.googlesource.com/4851
Reviewed-by: Rob Pike <r@golang.org>
diff --git a/src/cmd/internal/gc/walk.go b/src/cmd/internal/gc/walk.go
new file mode 100644
index 0000000..37299ca
--- /dev/null
+++ b/src/cmd/internal/gc/walk.go
@@ -0,0 +1,4531 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gc
+
+import (
+	"cmd/internal/obj"
+	"fmt"
+	"strings"
+)
+
+var mpzero Mpint
+
+// The constant is known to runtime.
+const (
+	tmpstringbufsize = 32
+)
+
+func walk(fn *Node) {
+	var s string
+	var l *NodeList
+	var lno int
+
+	Curfn = fn
+
+	if Debug['W'] != 0 {
+		s = fmt.Sprintf("\nbefore %v", Sconv(Curfn.Nname.Sym, 0))
+		dumplist(s, Curfn.Nbody)
+	}
+
+	lno = int(lineno)
+
+	// Final typecheck for any unused variables.
+	// It's hard to be on the heap when not-used, but best to be consistent about &~PHEAP here and below.
+	for l = fn.Dcl; l != nil; l = l.Next {
+		if l.N.Op == ONAME && l.N.Class&^PHEAP == PAUTO {
+			typecheck(&l.N, Erv|Easgn)
+		}
+	}
+
+	// Propagate the used flag for typeswitch variables up to the NONAME in it's definition.
+	for l = fn.Dcl; l != nil; l = l.Next {
+		if l.N.Op == ONAME && l.N.Class&^PHEAP == PAUTO && l.N.Defn != nil && l.N.Defn.Op == OTYPESW && l.N.Used != 0 {
+			l.N.Defn.Left.Used++
+		}
+	}
+
+	for l = fn.Dcl; l != nil; l = l.Next {
+		if l.N.Op != ONAME || l.N.Class&^PHEAP != PAUTO || l.N.Sym.Name[0] == '&' || l.N.Used != 0 {
+			continue
+		}
+		if l.N.Defn != nil && l.N.Defn.Op == OTYPESW {
+			if l.N.Defn.Left.Used != 0 {
+				continue
+			}
+			lineno = l.N.Defn.Left.Lineno
+			Yyerror("%v declared and not used", Sconv(l.N.Sym, 0))
+			l.N.Defn.Left.Used = 1 // suppress repeats
+		} else {
+			lineno = l.N.Lineno
+			Yyerror("%v declared and not used", Sconv(l.N.Sym, 0))
+		}
+	}
+
+	lineno = int32(lno)
+	if nerrors != 0 {
+		return
+	}
+	walkstmtlist(Curfn.Nbody)
+	if Debug['W'] != 0 {
+		s = fmt.Sprintf("after walk %v", Sconv(Curfn.Nname.Sym, 0))
+		dumplist(s, Curfn.Nbody)
+	}
+
+	heapmoves()
+	if Debug['W'] != 0 && Curfn.Enter != nil {
+		s = fmt.Sprintf("enter %v", Sconv(Curfn.Nname.Sym, 0))
+		dumplist(s, Curfn.Enter)
+	}
+}
+
+func walkstmtlist(l *NodeList) {
+	for ; l != nil; l = l.Next {
+		walkstmt(&l.N)
+	}
+}
+
+func samelist(a *NodeList, b *NodeList) int {
+	for ; a != nil && b != nil; (func() { a = a.Next; b = b.Next })() {
+		if a.N != b.N {
+			return 0
+		}
+	}
+	return bool2int(a == b)
+}
+
+func paramoutheap(fn *Node) int {
+	var l *NodeList
+
+	for l = fn.Dcl; l != nil; l = l.Next {
+		switch l.N.Class {
+		case PPARAMOUT,
+			PPARAMOUT | PHEAP:
+			return int(l.N.Addrtaken)
+
+			// stop early - parameters are over
+		case PAUTO,
+			PAUTO | PHEAP:
+			return 0
+		}
+	}
+
+	return 0
+}
+
+// adds "adjust" to all the argument locations for the call n.
+// n must be a defer or go node that has already been walked.
+func adjustargs(n *Node, adjust int) {
+	var callfunc *Node
+	var arg *Node
+	var lhs *Node
+	var args *NodeList
+
+	callfunc = n.Left
+	for args = callfunc.List; args != nil; args = args.Next {
+		arg = args.N
+		if arg.Op != OAS {
+			Yyerror("call arg not assignment")
+		}
+		lhs = arg.Left
+		if lhs.Op == ONAME {
+			// This is a temporary introduced by reorder1.
+			// The real store to the stack appears later in the arg list.
+			continue
+		}
+
+		if lhs.Op != OINDREG {
+			Yyerror("call argument store does not use OINDREG")
+		}
+
+		// can't really check this in machine-indep code.
+		//if(lhs->val.u.reg != D_SP)
+		//      yyerror("call arg assign not indreg(SP)");
+		lhs.Xoffset += int64(adjust)
+	}
+}
+
+func walkstmt(np **Node) {
+	var init *NodeList
+	var ll *NodeList
+	var rl *NodeList
+	var cl int
+	var n *Node
+	var f *Node
+
+	n = *np
+	if n == nil {
+		return
+	}
+	if n.Dodata == 2 { // don't walk, generated by anylit.
+		return
+	}
+
+	setlineno(n)
+
+	walkstmtlist(n.Ninit)
+
+	switch n.Op {
+	default:
+		if n.Op == ONAME {
+			Yyerror("%v is not a top level statement", Sconv(n.Sym, 0))
+		} else {
+			Yyerror("%v is not a top level statement", Oconv(int(n.Op), 0))
+		}
+		Dump("nottop", n)
+
+	case OAS,
+		OASOP,
+		OAS2,
+		OAS2DOTTYPE,
+		OAS2RECV,
+		OAS2FUNC,
+		OAS2MAPR,
+		OCLOSE,
+		OCOPY,
+		OCALLMETH,
+		OCALLINTER,
+		OCALL,
+		OCALLFUNC,
+		ODELETE,
+		OSEND,
+		OPRINT,
+		OPRINTN,
+		OPANIC,
+		OEMPTY,
+		ORECOVER:
+		if n.Typecheck == 0 {
+			Fatal("missing typecheck: %v", Nconv(n, obj.FmtSign))
+		}
+		init = n.Ninit
+		n.Ninit = nil
+		walkexpr(&n, &init)
+		addinit(&n, init)
+		if (*np).Op == OCOPY && n.Op == OCONVNOP {
+			n.Op = OEMPTY // don't leave plain values as statements.
+		}
+
+		// special case for a receive where we throw away
+	// the value received.
+	case ORECV:
+		if n.Typecheck == 0 {
+			Fatal("missing typecheck: %v", Nconv(n, obj.FmtSign))
+		}
+		init = n.Ninit
+		n.Ninit = nil
+
+		walkexpr(&n.Left, &init)
+		n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, typename(n.Left.Type), n.Left, nodnil())
+		walkexpr(&n, &init)
+
+		addinit(&n, init)
+
+	case OBREAK,
+		ODCL,
+		OCONTINUE,
+		OFALL,
+		OGOTO,
+		OLABEL,
+		ODCLCONST,
+		ODCLTYPE,
+		OCHECKNIL,
+		OVARKILL:
+		break
+
+	case OBLOCK:
+		walkstmtlist(n.List)
+
+	case OXCASE:
+		Yyerror("case statement out of place")
+		n.Op = OCASE
+		fallthrough
+
+	case OCASE:
+		walkstmt(&n.Right)
+
+	case ODEFER:
+		Hasdefer = 1
+		switch n.Left.Op {
+		case OPRINT,
+			OPRINTN:
+			walkprintfunc(&n.Left, &n.Ninit)
+
+		case OCOPY:
+			n.Left = copyany(n.Left, &n.Ninit, 1)
+
+		default:
+			walkexpr(&n.Left, &n.Ninit)
+		}
+
+		// make room for size & fn arguments.
+		adjustargs(n, 2*Widthptr)
+
+	case OFOR:
+		if n.Ntest != nil {
+			walkstmtlist(n.Ntest.Ninit)
+			init = n.Ntest.Ninit
+			n.Ntest.Ninit = nil
+			walkexpr(&n.Ntest, &init)
+			addinit(&n.Ntest, init)
+		}
+
+		walkstmt(&n.Nincr)
+		walkstmtlist(n.Nbody)
+
+	case OIF:
+		walkexpr(&n.Ntest, &n.Ninit)
+		walkstmtlist(n.Nbody)
+		walkstmtlist(n.Nelse)
+
+	case OPROC:
+		switch n.Left.Op {
+		case OPRINT,
+			OPRINTN:
+			walkprintfunc(&n.Left, &n.Ninit)
+
+		case OCOPY:
+			n.Left = copyany(n.Left, &n.Ninit, 1)
+
+		default:
+			walkexpr(&n.Left, &n.Ninit)
+		}
+
+		// make room for size & fn arguments.
+		adjustargs(n, 2*Widthptr)
+
+	case ORETURN:
+		walkexprlist(n.List, &n.Ninit)
+		if n.List == nil {
+			break
+		}
+		if (Curfn.Type.Outnamed != 0 && count(n.List) > 1) || paramoutheap(Curfn) != 0 {
+			// assign to the function out parameters,
+			// so that reorder3 can fix up conflicts
+			rl = nil
+
+			for ll = Curfn.Dcl; ll != nil; ll = ll.Next {
+				cl = int(ll.N.Class) &^ PHEAP
+				if cl == PAUTO {
+					break
+				}
+				if cl == PPARAMOUT {
+					rl = list(rl, ll.N)
+				}
+			}
+
+			if samelist(rl, n.List) != 0 {
+				// special return in disguise
+				n.List = nil
+
+				break
+			}
+
+			if count(n.List) == 1 && count(rl) > 1 {
+				// OAS2FUNC in disguise
+				f = n.List.N
+
+				if f.Op != OCALLFUNC && f.Op != OCALLMETH && f.Op != OCALLINTER {
+					Fatal("expected return of call, have %v", Nconv(f, 0))
+				}
+				n.List = concat(list1(f), ascompatet(int(n.Op), rl, &f.Type, 0, &n.Ninit))
+				break
+			}
+
+			// move function calls out, to make reorder3's job easier.
+			walkexprlistsafe(n.List, &n.Ninit)
+
+			ll = ascompatee(int(n.Op), rl, n.List, &n.Ninit)
+			n.List = reorder3(ll)
+			break
+		}
+
+		ll = ascompatte(int(n.Op), nil, 0, Getoutarg(Curfn.Type), n.List, 1, &n.Ninit)
+		n.List = ll
+
+	case ORETJMP:
+		break
+
+	case OSELECT:
+		walkselect(n)
+
+	case OSWITCH:
+		walkswitch(n)
+
+	case ORANGE:
+		walkrange(n)
+
+	case OXFALL:
+		Yyerror("fallthrough statement out of place")
+		n.Op = OFALL
+	}
+
+	if n.Op == ONAME {
+		Fatal("walkstmt ended up with name: %v", Nconv(n, obj.FmtSign))
+	}
+
+	*np = n
+}
+
+/*
+ * walk the whole tree of the body of an
+ * expression or simple statement.
+ * the types expressions are calculated.
+ * compile-time constants are evaluated.
+ * complex side effects like statements are appended to init
+ */
+func walkexprlist(l *NodeList, init **NodeList) {
+	for ; l != nil; l = l.Next {
+		walkexpr(&l.N, init)
+	}
+}
+
+func walkexprlistsafe(l *NodeList, init **NodeList) {
+	for ; l != nil; l = l.Next {
+		l.N = safeexpr(l.N, init)
+		walkexpr(&l.N, init)
+	}
+}
+
+func walkexprlistcheap(l *NodeList, init **NodeList) {
+	for ; l != nil; l = l.Next {
+		l.N = cheapexpr(l.N, init)
+		walkexpr(&l.N, init)
+	}
+}
+
+func walkexpr(np **Node, init **NodeList) {
+	var r *Node
+	var l *Node
+	var var_ *Node
+	var a *Node
+	var ok *Node
+	var map_ *Node
+	var key *Node
+	var ll *NodeList
+	var lr *NodeList
+	var t *Type
+	var et int
+	var old_safemode int
+	var v int64
+	var lno int32
+	var n *Node
+	var fn *Node
+	var n1 *Node
+	var n2 *Node
+	var sym *Sym
+	var buf string
+	var p string
+	var from string
+	var to string
+
+	n = *np
+
+	if n == nil {
+		return
+	}
+
+	if init == &n.Ninit {
+		// not okay to use n->ninit when walking n,
+		// because we might replace n with some other node
+		// and would lose the init list.
+		Fatal("walkexpr init == &n->ninit")
+	}
+
+	if n.Ninit != nil {
+		walkstmtlist(n.Ninit)
+		*init = concat(*init, n.Ninit)
+		n.Ninit = nil
+	}
+
+	// annoying case - not typechecked
+	if n.Op == OKEY {
+		walkexpr(&n.Left, init)
+		walkexpr(&n.Right, init)
+		return
+	}
+
+	lno = setlineno(n)
+
+	if Debug['w'] > 1 {
+		Dump("walk-before", n)
+	}
+
+	if n.Typecheck != 1 {
+		Fatal("missed typecheck: %v\n", Nconv(n, obj.FmtSign))
+	}
+
+	switch n.Op {
+	default:
+		Dump("walk", n)
+		Fatal("walkexpr: switch 1 unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
+
+	case OTYPE,
+		ONONAME,
+		OINDREG,
+		OEMPTY,
+		OPARAM:
+		goto ret
+
+	case ONOT,
+		OMINUS,
+		OPLUS,
+		OCOM,
+		OREAL,
+		OIMAG,
+		ODOTMETH,
+		ODOTINTER:
+		walkexpr(&n.Left, init)
+		goto ret
+
+	case OIND:
+		walkexpr(&n.Left, init)
+		goto ret
+
+	case ODOT:
+		usefield(n)
+		walkexpr(&n.Left, init)
+		goto ret
+
+	case ODOTPTR:
+		usefield(n)
+		if n.Op == ODOTPTR && n.Left.Type.Type.Width == 0 {
+			// No actual copy will be generated, so emit an explicit nil check.
+			n.Left = cheapexpr(n.Left, init)
+
+			checknil(n.Left, init)
+		}
+
+		walkexpr(&n.Left, init)
+		goto ret
+
+	case OEFACE:
+		walkexpr(&n.Left, init)
+		walkexpr(&n.Right, init)
+		goto ret
+
+	case OSPTR,
+		OITAB:
+		walkexpr(&n.Left, init)
+		goto ret
+
+	case OLEN,
+		OCAP:
+		walkexpr(&n.Left, init)
+
+		// replace len(*[10]int) with 10.
+		// delayed until now to preserve side effects.
+		t = n.Left.Type
+
+		if Isptr[t.Etype] != 0 {
+			t = t.Type
+		}
+		if Isfixedarray(t) != 0 {
+			safeexpr(n.Left, init)
+			Nodconst(n, n.Type, t.Bound)
+			n.Typecheck = 1
+		}
+
+		goto ret
+
+	case OLSH,
+		ORSH:
+		walkexpr(&n.Left, init)
+		walkexpr(&n.Right, init)
+		t = n.Left.Type
+		n.Bounded = uint8(bounded(n.Right, 8*t.Width))
+		if Debug['m'] != 0 && n.Etype != 0 && !(Isconst(n.Right, CTINT) != 0) {
+			Warn("shift bounds check elided")
+		}
+		goto ret
+
+		// Use results from call expression as arguments for complex.
+	case OAND,
+		OSUB,
+		OHMUL,
+		OLT,
+		OLE,
+		OGE,
+		OGT,
+		OADD,
+		OCOMPLEX,
+		OLROT:
+		if n.Op == OCOMPLEX && n.Left == nil && n.Right == nil {
+			n.Left = n.List.N
+			n.Right = n.List.Next.N
+		}
+
+		walkexpr(&n.Left, init)
+		walkexpr(&n.Right, init)
+		goto ret
+
+	case OOR,
+		OXOR:
+		walkexpr(&n.Left, init)
+		walkexpr(&n.Right, init)
+		walkrotate(&n)
+		goto ret
+
+	case OEQ,
+		ONE:
+		walkexpr(&n.Left, init)
+		walkexpr(&n.Right, init)
+
+		// Disable safemode while compiling this code: the code we
+		// generate internally can refer to unsafe.Pointer.
+		// In this case it can happen if we need to generate an ==
+		// for a struct containing a reflect.Value, which itself has
+		// an unexported field of type unsafe.Pointer.
+		old_safemode = safemode
+
+		safemode = 0
+		walkcompare(&n, init)
+		safemode = old_safemode
+		goto ret
+
+	case OANDAND,
+		OOROR:
+		walkexpr(&n.Left, init)
+
+		// cannot put side effects from n->right on init,
+		// because they cannot run before n->left is checked.
+		// save elsewhere and store on the eventual n->right.
+		ll = nil
+
+		walkexpr(&n.Right, &ll)
+		addinit(&n.Right, ll)
+		goto ret
+
+	case OPRINT,
+		OPRINTN:
+		walkexprlist(n.List, init)
+		n = walkprint(n, init)
+		goto ret
+
+	case OPANIC:
+		n = mkcall("gopanic", nil, init, n.Left)
+		goto ret
+
+	case ORECOVER:
+		n = mkcall("gorecover", n.Type, init, Nod(OADDR, nodfp, nil))
+		goto ret
+
+	case OLITERAL:
+		n.Addable = 1
+		goto ret
+
+	case OCLOSUREVAR,
+		OCFUNC:
+		n.Addable = 1
+		goto ret
+
+	case ONAME:
+		if !(n.Class&PHEAP != 0) && n.Class != PPARAMREF {
+			n.Addable = 1
+		}
+		goto ret
+
+	case OCALLINTER:
+		t = n.Left.Type
+		if n.List != nil && n.List.N.Op == OAS {
+			goto ret
+		}
+		walkexpr(&n.Left, init)
+		walkexprlist(n.List, init)
+		ll = ascompatte(int(n.Op), n, int(n.Isddd), getinarg(t), n.List, 0, init)
+		n.List = reorder1(ll)
+		goto ret
+
+	case OCALLFUNC:
+		if n.Left.Op == OCLOSURE {
+			// Transform direct call of a closure to call of a normal function.
+			// transformclosure already did all preparation work.
+
+			// Append captured variables to argument list.
+			n.List = concat(n.List, n.Left.Enter)
+
+			n.Left.Enter = nil
+
+			// Replace OCLOSURE with ONAME/PFUNC.
+			n.Left = n.Left.Closure.Nname
+
+			// Update type of OCALLFUNC node.
+			// Output arguments had not changed, but their offsets could.
+			if n.Left.Type.Outtuple == 1 {
+				t = getoutargx(n.Left.Type).Type
+				if t.Etype == TFIELD {
+					t = t.Type
+				}
+				n.Type = t
+			} else {
+				n.Type = getoutargx(n.Left.Type)
+			}
+		}
+
+		t = n.Left.Type
+		if n.List != nil && n.List.N.Op == OAS {
+			goto ret
+		}
+
+		walkexpr(&n.Left, init)
+		walkexprlist(n.List, init)
+
+		ll = ascompatte(int(n.Op), n, int(n.Isddd), getinarg(t), n.List, 0, init)
+		n.List = reorder1(ll)
+		goto ret
+
+	case OCALLMETH:
+		t = n.Left.Type
+		if n.List != nil && n.List.N.Op == OAS {
+			goto ret
+		}
+		walkexpr(&n.Left, init)
+		walkexprlist(n.List, init)
+		ll = ascompatte(int(n.Op), n, 0, getthis(t), list1(n.Left.Left), 0, init)
+		lr = ascompatte(int(n.Op), n, int(n.Isddd), getinarg(t), n.List, 0, init)
+		ll = concat(ll, lr)
+		n.Left.Left = nil
+		ullmancalc(n.Left)
+		n.List = reorder1(ll)
+		goto ret
+
+	case OAS:
+		*init = concat(*init, n.Ninit)
+		n.Ninit = nil
+
+		walkexpr(&n.Left, init)
+		n.Left = safeexpr(n.Left, init)
+
+		if oaslit(n, init) != 0 {
+			goto ret
+		}
+
+		if n.Right == nil || iszero(n.Right) != 0 && !(flag_race != 0) {
+			goto ret
+		}
+
+		switch n.Right.Op {
+		default:
+			walkexpr(&n.Right, init)
+
+			// x = i.(T); n->left is x, n->right->left is i.
+		// orderstmt made sure x is addressable.
+		case ODOTTYPE:
+			walkexpr(&n.Right.Left, init)
+
+			n1 = Nod(OADDR, n.Left, nil)
+			r = n.Right // i.(T)
+
+			from = "I"
+
+			to = "T"
+			if isnilinter(r.Left.Type) != 0 {
+				from = "E"
+			}
+			if isnilinter(r.Type) != 0 {
+				to = "E"
+			} else if Isinter(r.Type) != 0 {
+				to = "I"
+			}
+
+			buf = fmt.Sprintf("assert%s2%s", from, to)
+
+			fn = syslook(buf, 1)
+			argtype(fn, r.Left.Type)
+			argtype(fn, r.Type)
+
+			n = mkcall1(fn, nil, init, typename(r.Type), r.Left, n1)
+			walkexpr(&n, init)
+			goto ret
+
+			// x = <-c; n->left is x, n->right->left is c.
+		// orderstmt made sure x is addressable.
+		case ORECV:
+			walkexpr(&n.Right.Left, init)
+
+			n1 = Nod(OADDR, n.Left, nil)
+			r = n.Right.Left // the channel
+			n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, typename(r.Type), r, n1)
+			walkexpr(&n, init)
+			goto ret
+		}
+
+		if n.Left != nil && n.Right != nil {
+			r = convas(Nod(OAS, n.Left, n.Right), init)
+			r.Dodata = n.Dodata
+			n = r
+			n = applywritebarrier(n, init)
+		}
+
+		goto ret
+
+	case OAS2:
+		*init = concat(*init, n.Ninit)
+		n.Ninit = nil
+		walkexprlistsafe(n.List, init)
+		walkexprlistsafe(n.Rlist, init)
+		ll = ascompatee(OAS, n.List, n.Rlist, init)
+		ll = reorder3(ll)
+		for lr = ll; lr != nil; lr = lr.Next {
+			lr.N = applywritebarrier(lr.N, init)
+		}
+		n = liststmt(ll)
+		goto ret
+
+		// a,b,... = fn()
+	case OAS2FUNC:
+		*init = concat(*init, n.Ninit)
+
+		n.Ninit = nil
+		r = n.Rlist.N
+		walkexprlistsafe(n.List, init)
+		walkexpr(&r, init)
+
+		ll = ascompatet(int(n.Op), n.List, &r.Type, 0, init)
+		for lr = ll; lr != nil; lr = lr.Next {
+			lr.N = applywritebarrier(lr.N, init)
+		}
+		n = liststmt(concat(list1(r), ll))
+		goto ret
+
+		// x, y = <-c
+	// orderstmt made sure x is addressable.
+	case OAS2RECV:
+		*init = concat(*init, n.Ninit)
+
+		n.Ninit = nil
+		r = n.Rlist.N
+		walkexprlistsafe(n.List, init)
+		walkexpr(&r.Left, init)
+		if isblank(n.List.N) {
+			n1 = nodnil()
+		} else {
+			n1 = Nod(OADDR, n.List.N, nil)
+		}
+		n1.Etype = 1 // addr does not escape
+		fn = chanfn("chanrecv2", 2, r.Left.Type)
+		r = mkcall1(fn, n.List.Next.N.Type, init, typename(r.Left.Type), r.Left, n1)
+		n = Nod(OAS, n.List.Next.N, r)
+		typecheck(&n, Etop)
+		goto ret
+
+		// a,b = m[i];
+	case OAS2MAPR:
+		*init = concat(*init, n.Ninit)
+
+		n.Ninit = nil
+		r = n.Rlist.N
+		walkexprlistsafe(n.List, init)
+		walkexpr(&r.Left, init)
+		walkexpr(&r.Right, init)
+		t = r.Left.Type
+		p = ""
+		if t.Type.Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing.
+			switch Simsimtype(t.Down) {
+			case TINT32,
+				TUINT32:
+				p = "mapaccess2_fast32"
+
+			case TINT64,
+				TUINT64:
+				p = "mapaccess2_fast64"
+
+			case TSTRING:
+				p = "mapaccess2_faststr"
+			}
+		}
+
+		if p != "" {
+			// fast versions take key by value
+			key = r.Right
+		} else {
+			// standard version takes key by reference
+			// orderexpr made sure key is addressable.
+			key = Nod(OADDR, r.Right, nil)
+
+			p = "mapaccess2"
+		}
+
+		// from:
+		//   a,b = m[i]
+		// to:
+		//   var,b = mapaccess2*(t, m, i)
+		//   a = *var
+		a = n.List.N
+
+		fn = mapfn(p, t)
+		r = mkcall1(fn, getoutargx(fn.Type), init, typename(t), r.Left, key)
+
+		// mapaccess2* returns a typed bool, but due to spec changes,
+		// the boolean result of i.(T) is now untyped so we make it the
+		// same type as the variable on the lhs.
+		if !isblank(n.List.Next.N) {
+			r.Type.Type.Down.Type = n.List.Next.N.Type
+		}
+		n.Rlist = list1(r)
+		n.Op = OAS2FUNC
+
+		// don't generate a = *var if a is _
+		if !isblank(a) {
+			var_ = temp(Ptrto(t.Type))
+			var_.Typecheck = 1
+			n.List.N = var_
+			walkexpr(&n, init)
+			*init = list(*init, n)
+			n = Nod(OAS, a, Nod(OIND, var_, nil))
+		}
+
+		typecheck(&n, Etop)
+		walkexpr(&n, init)
+
+		// mapaccess needs a zero value to be at least this big.
+		if zerosize < t.Type.Width {
+			zerosize = t.Type.Width
+		}
+
+		// TODO: ptr is always non-nil, so disable nil check for this OIND op.
+		goto ret
+
+	case ODELETE:
+		*init = concat(*init, n.Ninit)
+		n.Ninit = nil
+		map_ = n.List.N
+		key = n.List.Next.N
+		walkexpr(&map_, init)
+		walkexpr(&key, init)
+
+		// orderstmt made sure key is addressable.
+		key = Nod(OADDR, key, nil)
+
+		t = map_.Type
+		n = mkcall1(mapfndel("mapdelete", t), nil, init, typename(t), map_, key)
+		goto ret
+
+		// a,b = i.(T)
+	// orderstmt made sure a is addressable.
+	case OAS2DOTTYPE:
+		*init = concat(*init, n.Ninit)
+
+		n.Ninit = nil
+		r = n.Rlist.N
+		walkexprlistsafe(n.List, init)
+		walkexpr(&r.Left, init)
+		if isblank(n.List.N) {
+			n1 = nodnil()
+		} else {
+			n1 = Nod(OADDR, n.List.N, nil)
+		}
+		n1.Etype = 1 // addr does not escape
+
+		from = "I"
+
+		to = "T"
+		if isnilinter(r.Left.Type) != 0 {
+			from = "E"
+		}
+		if isnilinter(r.Type) != 0 {
+			to = "E"
+		} else if Isinter(r.Type) != 0 {
+			to = "I"
+		}
+		buf = fmt.Sprintf("assert%s2%s2", from, to)
+
+		fn = syslook(buf, 1)
+		argtype(fn, r.Left.Type)
+		argtype(fn, r.Type)
+
+		t = Types[TBOOL]
+		ok = n.List.Next.N
+		if !isblank(ok) {
+			t = ok.Type
+		}
+		r = mkcall1(fn, t, init, typename(r.Type), r.Left, n1)
+		n = Nod(OAS, ok, r)
+		typecheck(&n, Etop)
+		goto ret
+
+	case ODOTTYPE,
+		ODOTTYPE2:
+		Fatal("walkexpr ODOTTYPE") // should see inside OAS or OAS2 only
+		fallthrough
+
+	case OCONVIFACE:
+		walkexpr(&n.Left, init)
+
+		// Optimize convT2E as a two-word copy when T is pointer-shaped.
+		if isnilinter(n.Type) != 0 && isdirectiface(n.Left.Type) != 0 {
+			l = Nod(OEFACE, typename(n.Left.Type), n.Left)
+			l.Type = n.Type
+			l.Typecheck = n.Typecheck
+			n = l
+			goto ret
+		}
+
+		// Build name of function: convI2E etc.
+		// Not all names are possible
+		// (e.g., we'll never generate convE2E or convE2I).
+		from = "T"
+
+		to = "I"
+		if isnilinter(n.Left.Type) != 0 {
+			from = "E"
+		} else if Isinter(n.Left.Type) != 0 {
+			from = "I"
+		}
+		if isnilinter(n.Type) != 0 {
+			to = "E"
+		}
+		buf = fmt.Sprintf("conv%s2%s", from, to)
+
+		fn = syslook(buf, 1)
+		ll = nil
+		if !(Isinter(n.Left.Type) != 0) {
+			ll = list(ll, typename(n.Left.Type))
+		}
+		if !(isnilinter(n.Type) != 0) {
+			ll = list(ll, typename(n.Type))
+		}
+		if !(Isinter(n.Left.Type) != 0) && !(isnilinter(n.Type) != 0) {
+			sym = Pkglookup(fmt.Sprintf("%v.%v", Tconv(n.Left.Type, obj.FmtLeft), Tconv(n.Type, obj.FmtLeft)), itabpkg)
+			if sym.Def == nil {
+				l = Nod(ONAME, nil, nil)
+				l.Sym = sym
+				l.Type = Ptrto(Types[TUINT8])
+				l.Addable = 1
+				l.Class = PEXTERN
+				l.Xoffset = 0
+				sym.Def = l
+				ggloblsym(sym, int32(Widthptr), obj.DUPOK|obj.NOPTR)
+			}
+
+			l = Nod(OADDR, sym.Def, nil)
+			l.Addable = 1
+			ll = list(ll, l)
+
+			if isdirectiface(n.Left.Type) != 0 {
+				/* For pointer types, we can make a special form of optimization
+				 *
+				 * These statements are put onto the expression init list:
+				 * 	Itab *tab = atomicloadtype(&cache);
+				 * 	if(tab == nil)
+				 * 		tab = typ2Itab(type, itype, &cache);
+				 *
+				 * The CONVIFACE expression is replaced with this:
+				 * 	OEFACE{tab, ptr};
+				 */
+				l = temp(Ptrto(Types[TUINT8]))
+
+				n1 = Nod(OAS, l, sym.Def)
+				typecheck(&n1, Etop)
+				*init = list(*init, n1)
+
+				fn = syslook("typ2Itab", 1)
+				n1 = Nod(OCALL, fn, nil)
+				n1.List = ll
+				typecheck(&n1, Erv)
+				walkexpr(&n1, init)
+
+				n2 = Nod(OIF, nil, nil)
+				n2.Ntest = Nod(OEQ, l, nodnil())
+				n2.Nbody = list1(Nod(OAS, l, n1))
+				n2.Likely = -1
+				typecheck(&n2, Etop)
+				*init = list(*init, n2)
+
+				l = Nod(OEFACE, l, n.Left)
+				l.Typecheck = n.Typecheck
+				l.Type = n.Type
+				n = l
+				goto ret
+			}
+		}
+
+		if Isinter(n.Left.Type) != 0 {
+			ll = list(ll, n.Left)
+		} else {
+			// regular types are passed by reference to avoid C vararg calls
+			// orderexpr arranged for n->left to be a temporary for all
+			// the conversions it could see. comparison of an interface
+			// with a non-interface, especially in a switch on interface value
+			// with non-interface cases, is not visible to orderstmt, so we
+			// have to fall back on allocating a temp here.
+			if islvalue(n.Left) != 0 {
+				ll = list(ll, Nod(OADDR, n.Left, nil))
+			} else {
+				ll = list(ll, Nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil))
+			}
+		}
+
+		argtype(fn, n.Left.Type)
+		argtype(fn, n.Type)
+		dowidth(fn.Type)
+		n = Nod(OCALL, fn, nil)
+		n.List = ll
+		typecheck(&n, Erv)
+		walkexpr(&n, init)
+		goto ret
+
+	case OCONV,
+		OCONVNOP:
+		if Thearch.Thechar == '5' {
+			if Isfloat[n.Left.Type.Etype] != 0 {
+				if n.Type.Etype == TINT64 {
+					n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
+					goto ret
+				}
+
+				if n.Type.Etype == TUINT64 {
+					n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
+					goto ret
+				}
+			}
+
+			if Isfloat[n.Type.Etype] != 0 {
+				if n.Left.Type.Etype == TINT64 {
+					n = mkcall("int64tofloat64", n.Type, init, conv(n.Left, Types[TINT64]))
+					goto ret
+				}
+
+				if n.Left.Type.Etype == TUINT64 {
+					n = mkcall("uint64tofloat64", n.Type, init, conv(n.Left, Types[TUINT64]))
+					goto ret
+				}
+			}
+		}
+
+		walkexpr(&n.Left, init)
+		goto ret
+
+	case OANDNOT:
+		walkexpr(&n.Left, init)
+		n.Op = OAND
+		n.Right = Nod(OCOM, n.Right, nil)
+		typecheck(&n.Right, Erv)
+		walkexpr(&n.Right, init)
+		goto ret
+
+	case OMUL:
+		walkexpr(&n.Left, init)
+		walkexpr(&n.Right, init)
+		walkmul(&n, init)
+		goto ret
+
+	case ODIV,
+		OMOD:
+		walkexpr(&n.Left, init)
+		walkexpr(&n.Right, init)
+
+		/*
+		 * rewrite complex div into function call.
+		 */
+		et = int(n.Left.Type.Etype)
+
+		if Iscomplex[et] != 0 && n.Op == ODIV {
+			t = n.Type
+			n = mkcall("complex128div", Types[TCOMPLEX128], init, conv(n.Left, Types[TCOMPLEX128]), conv(n.Right, Types[TCOMPLEX128]))
+			n = conv(n, t)
+			goto ret
+		}
+
+		// Nothing to do for float divisions.
+		if Isfloat[et] != 0 {
+			goto ret
+		}
+
+		// Try rewriting as shifts or magic multiplies.
+		walkdiv(&n, init)
+
+		/*
+		 * rewrite 64-bit div and mod into function calls
+		 * on 32-bit architectures.
+		 */
+		switch n.Op {
+		case OMOD,
+			ODIV:
+			if Widthreg >= 8 || (et != TUINT64 && et != TINT64) {
+				goto ret
+			}
+			if et == TINT64 {
+				namebuf = "int64"
+			} else {
+				namebuf = "uint64"
+			}
+			if n.Op == ODIV {
+				namebuf += "div"
+			} else {
+				namebuf += "mod"
+			}
+			n = mkcall(namebuf, n.Type, init, conv(n.Left, Types[et]), conv(n.Right, Types[et]))
+
+		default:
+			break
+		}
+
+		goto ret
+
+	case OINDEX:
+		walkexpr(&n.Left, init)
+
+		// save the original node for bounds checking elision.
+		// If it was a ODIV/OMOD walk might rewrite it.
+		r = n.Right
+
+		walkexpr(&n.Right, init)
+
+		// if range of type cannot exceed static array bound,
+		// disable bounds check.
+		if n.Bounded != 0 {
+			goto ret
+		}
+		t = n.Left.Type
+		if t != nil && Isptr[t.Etype] != 0 {
+			t = t.Type
+		}
+		if Isfixedarray(t) != 0 {
+			n.Bounded = uint8(bounded(r, t.Bound))
+			if Debug['m'] != 0 && n.Bounded != 0 && !(Isconst(n.Right, CTINT) != 0) {
+				Warn("index bounds check elided")
+			}
+			if Smallintconst(n.Right) != 0 && !(n.Bounded != 0) {
+				Yyerror("index out of bounds")
+			}
+		} else if Isconst(n.Left, CTSTR) != 0 {
+			n.Bounded = uint8(bounded(r, int64(len(n.Left.Val.U.Sval.S))))
+			if Debug['m'] != 0 && n.Bounded != 0 && !(Isconst(n.Right, CTINT) != 0) {
+				Warn("index bounds check elided")
+			}
+			if Smallintconst(n.Right) != 0 {
+				if !(n.Bounded != 0) {
+					Yyerror("index out of bounds")
+				} else {
+					// replace "abc"[1] with 'b'.
+					// delayed until now because "abc"[1] is not
+					// an ideal constant.
+					v = Mpgetfix(n.Right.Val.U.Xval)
+
+					Nodconst(n, n.Type, int64(n.Left.Val.U.Sval.S[v]))
+					n.Typecheck = 1
+				}
+			}
+		}
+
+		if Isconst(n.Right, CTINT) != 0 {
+			if Mpcmpfixfix(n.Right.Val.U.Xval, &mpzero) < 0 || Mpcmpfixfix(n.Right.Val.U.Xval, Maxintval[TINT]) > 0 {
+				Yyerror("index out of bounds")
+			}
+		}
+		goto ret
+
+	case OINDEXMAP:
+		if n.Etype == 1 {
+			goto ret
+		}
+		walkexpr(&n.Left, init)
+		walkexpr(&n.Right, init)
+
+		t = n.Left.Type
+		p = ""
+		if t.Type.Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing.
+			switch Simsimtype(t.Down) {
+			case TINT32,
+				TUINT32:
+				p = "mapaccess1_fast32"
+
+			case TINT64,
+				TUINT64:
+				p = "mapaccess1_fast64"
+
+			case TSTRING:
+				p = "mapaccess1_faststr"
+			}
+		}
+
+		if p != "" {
+			// fast versions take key by value
+			key = n.Right
+		} else {
+			// standard version takes key by reference.
+			// orderexpr made sure key is addressable.
+			key = Nod(OADDR, n.Right, nil)
+
+			p = "mapaccess1"
+		}
+
+		n = mkcall1(mapfn(p, t), Ptrto(t.Type), init, typename(t), n.Left, key)
+		n = Nod(OIND, n, nil)
+		n.Type = t.Type
+		n.Typecheck = 1
+
+		// mapaccess needs a zero value to be at least this big.
+		if zerosize < t.Type.Width {
+			zerosize = t.Type.Width
+		}
+		goto ret
+
+	case ORECV:
+		Fatal("walkexpr ORECV") // should see inside OAS only
+		fallthrough
+
+	case OSLICE:
+		if n.Right != nil && n.Right.Left == nil && n.Right.Right == nil { // noop
+			walkexpr(&n.Left, init)
+			n = n.Left
+			goto ret
+		}
+		fallthrough
+
+		// fallthrough
+	case OSLICEARR,
+		OSLICESTR:
+		if n.Right == nil { // already processed
+			goto ret
+		}
+
+		walkexpr(&n.Left, init)
+
+		// cgen_slice can't handle string literals as source
+		// TODO the OINDEX case is a bug elsewhere that needs to be traced.  it causes a crash on ([2][]int{ ... })[1][lo:hi]
+		if (n.Op == OSLICESTR && n.Left.Op == OLITERAL) || (n.Left.Op == OINDEX) {
+			n.Left = copyexpr(n.Left, n.Left.Type, init)
+		} else {
+			n.Left = safeexpr(n.Left, init)
+		}
+		walkexpr(&n.Right.Left, init)
+		n.Right.Left = safeexpr(n.Right.Left, init)
+		walkexpr(&n.Right.Right, init)
+		n.Right.Right = safeexpr(n.Right.Right, init)
+		n = sliceany(n, init) // chops n->right, sets n->list
+		goto ret
+
+	case OSLICE3,
+		OSLICE3ARR:
+		if n.Right == nil { // already processed
+			goto ret
+		}
+
+		walkexpr(&n.Left, init)
+
+		// TODO the OINDEX case is a bug elsewhere that needs to be traced.  it causes a crash on ([2][]int{ ... })[1][lo:hi]
+		// TODO the comment on the previous line was copied from case OSLICE. it might not even be true.
+		if n.Left.Op == OINDEX {
+			n.Left = copyexpr(n.Left, n.Left.Type, init)
+		} else {
+			n.Left = safeexpr(n.Left, init)
+		}
+		walkexpr(&n.Right.Left, init)
+		n.Right.Left = safeexpr(n.Right.Left, init)
+		walkexpr(&n.Right.Right.Left, init)
+		n.Right.Right.Left = safeexpr(n.Right.Right.Left, init)
+		walkexpr(&n.Right.Right.Right, init)
+		n.Right.Right.Right = safeexpr(n.Right.Right.Right, init)
+		n = sliceany(n, init) // chops n->right, sets n->list
+		goto ret
+
+	case OADDR:
+		walkexpr(&n.Left, init)
+		goto ret
+
+	case ONEW:
+		if n.Esc == EscNone && n.Type.Type.Width < 1<<16 {
+			r = temp(n.Type.Type)
+			r = Nod(OAS, r, nil) // zero temp
+			typecheck(&r, Etop)
+			*init = list(*init, r)
+			r = Nod(OADDR, r.Left, nil)
+			typecheck(&r, Erv)
+			n = r
+		} else {
+			n = callnew(n.Type.Type)
+		}
+
+		goto ret
+
+		// If one argument to the comparison is an empty string,
+	// comparing the lengths instead will yield the same result
+	// without the function call.
+	case OCMPSTR:
+		if (Isconst(n.Left, CTSTR) != 0 && len(n.Left.Val.U.Sval.S) == 0) || (Isconst(n.Right, CTSTR) != 0 && len(n.Right.Val.U.Sval.S) == 0) {
+			r = Nod(int(n.Etype), Nod(OLEN, n.Left, nil), Nod(OLEN, n.Right, nil))
+			typecheck(&r, Erv)
+			walkexpr(&r, init)
+			r.Type = n.Type
+			n = r
+			goto ret
+		}
+
+		// s + "badgerbadgerbadger" == "badgerbadgerbadger"
+		if (n.Etype == OEQ || n.Etype == ONE) && Isconst(n.Right, CTSTR) != 0 && n.Left.Op == OADDSTR && count(n.Left.List) == 2 && Isconst(n.Left.List.Next.N, CTSTR) != 0 && cmpslit(n.Right, n.Left.List.Next.N) == 0 {
+			r = Nod(int(n.Etype), Nod(OLEN, n.Left.List.N, nil), Nodintconst(0))
+			typecheck(&r, Erv)
+			walkexpr(&r, init)
+			r.Type = n.Type
+			n = r
+			goto ret
+		}
+
+		if n.Etype == OEQ || n.Etype == ONE {
+			// prepare for rewrite below
+			n.Left = cheapexpr(n.Left, init)
+
+			n.Right = cheapexpr(n.Right, init)
+
+			r = mkcall("eqstring", Types[TBOOL], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING]))
+
+			// quick check of len before full compare for == or !=
+			// eqstring assumes that the lengths are equal
+			if n.Etype == OEQ {
+				// len(left) == len(right) && eqstring(left, right)
+				r = Nod(OANDAND, Nod(OEQ, Nod(OLEN, n.Left, nil), Nod(OLEN, n.Right, nil)), r)
+			} else {
+				// len(left) != len(right) || !eqstring(left, right)
+				r = Nod(ONOT, r, nil)
+
+				r = Nod(OOROR, Nod(ONE, Nod(OLEN, n.Left, nil), Nod(OLEN, n.Right, nil)), r)
+			}
+
+			typecheck(&r, Erv)
+			walkexpr(&r, nil)
+		} else {
+			// sys_cmpstring(s1, s2) :: 0
+			r = mkcall("cmpstring", Types[TINT], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING]))
+
+			r = Nod(int(n.Etype), r, Nodintconst(0))
+		}
+
+		typecheck(&r, Erv)
+		if n.Type.Etype != TBOOL {
+			Fatal("cmp %v", Tconv(n.Type, 0))
+		}
+		r.Type = n.Type
+		n = r
+		goto ret
+
+	case OADDSTR:
+		n = addstr(n, init)
+		goto ret
+
+	case OAPPEND:
+		if n.Isddd != 0 {
+			n = appendslice(n, init) // also works for append(slice, string).
+		} else {
+			n = walkappend(n, init)
+		}
+		goto ret
+
+	case OCOPY:
+		n = copyany(n, init, flag_race)
+		goto ret
+
+		// cannot use chanfn - closechan takes any, not chan any
+	case OCLOSE:
+		fn = syslook("closechan", 1)
+
+		argtype(fn, n.Left.Type)
+		n = mkcall1(fn, nil, init, n.Left)
+		goto ret
+
+	case OMAKECHAN:
+		n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]))
+		goto ret
+
+	case OMAKEMAP:
+		t = n.Type
+
+		fn = syslook("makemap", 1)
+
+		a = nodnil() // hmap buffer
+		r = nodnil() // bucket buffer
+		if n.Esc == EscNone {
+			// Allocate hmap buffer on stack.
+			var_ = temp(hmap(t))
+
+			a = Nod(OAS, var_, nil) // zero temp
+			typecheck(&a, Etop)
+			*init = list(*init, a)
+			a = Nod(OADDR, var_, nil)
+
+			// Allocate one bucket on stack.
+			// Maximum key/value size is 128 bytes, larger objects
+			// are stored with an indirection. So max bucket size is 2048+eps.
+			var_ = temp(mapbucket(t))
+
+			r = Nod(OAS, var_, nil) // zero temp
+			typecheck(&r, Etop)
+			*init = list(*init, r)
+			r = Nod(OADDR, var_, nil)
+		}
+
+		argtype(fn, hmap(t))      // hmap buffer
+		argtype(fn, mapbucket(t)) // bucket buffer
+		argtype(fn, t.Down)       // key type
+		argtype(fn, t.Type)       // value type
+		n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]), a, r)
+		goto ret
+
+	case OMAKESLICE:
+		l = n.Left
+		r = n.Right
+		if r == nil {
+			r = safeexpr(l, init)
+			l = r
+		}
+		t = n.Type
+		if n.Esc == EscNone && Smallintconst(l) != 0 && Smallintconst(r) != 0 && (t.Type.Width == 0 || Mpgetfix(r.Val.U.Xval) < (1<<16)/t.Type.Width) {
+			// var arr [r]T
+			// n = arr[:l]
+			t = aindex(r, t.Type) // [r]T
+			var_ = temp(t)
+			a = Nod(OAS, var_, nil) // zero temp
+			typecheck(&a, Etop)
+			*init = list(*init, a)
+			r = Nod(OSLICE, var_, Nod(OKEY, nil, l)) // arr[:l]
+			r = conv(r, n.Type)                      // in case n->type is named.
+			typecheck(&r, Erv)
+			walkexpr(&r, init)
+			n = r
+		} else {
+			// makeslice(t *Type, nel int64, max int64) (ary []any)
+			fn = syslook("makeslice", 1)
+
+			argtype(fn, t.Type) // any-1
+			n = mkcall1(fn, n.Type, init, typename(n.Type), conv(l, Types[TINT64]), conv(r, Types[TINT64]))
+		}
+
+		goto ret
+
+	case ORUNESTR:
+		a = nodnil()
+		if n.Esc == EscNone {
+			t = aindex(Nodintconst(4), Types[TUINT8])
+			var_ = temp(t)
+			a = Nod(OADDR, var_, nil)
+		}
+
+		// intstring(*[4]byte, rune)
+		n = mkcall("intstring", n.Type, init, a, conv(n.Left, Types[TINT64]))
+
+		goto ret
+
+	case OARRAYBYTESTR:
+		a = nodnil()
+		if n.Esc == EscNone {
+			// Create temporary buffer for string on stack.
+			t = aindex(Nodintconst(tmpstringbufsize), Types[TUINT8])
+
+			a = Nod(OADDR, temp(t), nil)
+		}
+
+		// slicebytetostring(*[32]byte, []byte) string;
+		n = mkcall("slicebytetostring", n.Type, init, a, n.Left)
+
+		goto ret
+
+		// slicebytetostringtmp([]byte) string;
+	case OARRAYBYTESTRTMP:
+		n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
+
+		goto ret
+
+		// slicerunetostring(*[32]byte, []rune) string;
+	case OARRAYRUNESTR:
+		a = nodnil()
+
+		if n.Esc == EscNone {
+			// Create temporary buffer for string on stack.
+			t = aindex(Nodintconst(tmpstringbufsize), Types[TUINT8])
+
+			a = Nod(OADDR, temp(t), nil)
+		}
+
+		n = mkcall("slicerunetostring", n.Type, init, a, n.Left)
+		goto ret
+
+		// stringtoslicebyte(*32[byte], string) []byte;
+	case OSTRARRAYBYTE:
+		a = nodnil()
+
+		if n.Esc == EscNone {
+			// Create temporary buffer for slice on stack.
+			t = aindex(Nodintconst(tmpstringbufsize), Types[TUINT8])
+
+			a = Nod(OADDR, temp(t), nil)
+		}
+
+		n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, Types[TSTRING]))
+		goto ret
+
+		// stringtoslicebytetmp(string) []byte;
+	case OSTRARRAYBYTETMP:
+		n = mkcall("stringtoslicebytetmp", n.Type, init, conv(n.Left, Types[TSTRING]))
+
+		goto ret
+
+		// stringtoslicerune(*[32]rune, string) []rune
+	case OSTRARRAYRUNE:
+		a = nodnil()
+
+		if n.Esc == EscNone {
+			// Create temporary buffer for slice on stack.
+			t = aindex(Nodintconst(tmpstringbufsize), Types[TINT32])
+
+			a = Nod(OADDR, temp(t), nil)
+		}
+
+		n = mkcall("stringtoslicerune", n.Type, init, a, n.Left)
+		goto ret
+
+		// ifaceeq(i1 any-1, i2 any-2) (ret bool);
+	case OCMPIFACE:
+		if !Eqtype(n.Left.Type, n.Right.Type) {
+			Fatal("ifaceeq %v %v %v", Oconv(int(n.Op), 0), Tconv(n.Left.Type, 0), Tconv(n.Right.Type, 0))
+		}
+		if isnilinter(n.Left.Type) != 0 {
+			fn = syslook("efaceeq", 1)
+		} else {
+			fn = syslook("ifaceeq", 1)
+		}
+
+		n.Right = cheapexpr(n.Right, init)
+		n.Left = cheapexpr(n.Left, init)
+		argtype(fn, n.Right.Type)
+		argtype(fn, n.Left.Type)
+		r = mkcall1(fn, n.Type, init, n.Left, n.Right)
+		if n.Etype == ONE {
+			r = Nod(ONOT, r, nil)
+		}
+
+		// check itable/type before full compare.
+		if n.Etype == OEQ {
+			r = Nod(OANDAND, Nod(OEQ, Nod(OITAB, n.Left, nil), Nod(OITAB, n.Right, nil)), r)
+		} else {
+			r = Nod(OOROR, Nod(ONE, Nod(OITAB, n.Left, nil), Nod(OITAB, n.Right, nil)), r)
+		}
+		typecheck(&r, Erv)
+		walkexpr(&r, init)
+		r.Type = n.Type
+		n = r
+		goto ret
+
+	case OARRAYLIT,
+		OMAPLIT,
+		OSTRUCTLIT,
+		OPTRLIT:
+		var_ = temp(n.Type)
+		anylit(0, n, var_, init)
+		n = var_
+		goto ret
+
+	case OSEND:
+		n1 = n.Right
+		n1 = assignconv(n1, n.Left.Type.Type, "chan send")
+		walkexpr(&n1, init)
+		n1 = Nod(OADDR, n1, nil)
+		n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, typename(n.Left.Type), n.Left, n1)
+		goto ret
+
+	case OCLOSURE:
+		n = walkclosure(n, init)
+		goto ret
+
+	case OCALLPART:
+		n = walkpartialcall(n, init)
+		goto ret
+	}
+
+	Fatal("missing switch %v", Oconv(int(n.Op), 0))
+
+	// Expressions that are constant at run time but not
+	// considered const by the language spec are not turned into
+	// constants until walk. For example, if n is y%1 == 0, the
+	// walk of y%1 may have replaced it by 0.
+	// Check whether n with its updated args is itself now a constant.
+ret:
+	t = n.Type
+
+	evconst(n)
+	n.Type = t
+	if n.Op == OLITERAL {
+		typecheck(&n, Erv)
+	}
+
+	ullmancalc(n)
+
+	if Debug['w'] != 0 && n != nil {
+		Dump("walk", n)
+	}
+
+	lineno = lno
+	*np = n
+}
+
+func ascompatee1(op int, l *Node, r *Node, init **NodeList) *Node {
+	var n *Node
+
+	// convas will turn map assigns into function calls,
+	// making it impossible for reorder3 to work.
+	n = Nod(OAS, l, r)
+
+	if l.Op == OINDEXMAP {
+		return n
+	}
+
+	return convas(n, init)
+}
+
+func ascompatee(op int, nl *NodeList, nr *NodeList, init **NodeList) *NodeList {
+	var ll *NodeList
+	var lr *NodeList
+	var nn *NodeList
+
+	/*
+	 * check assign expression list to
+	 * a expression list. called in
+	 *	expr-list = expr-list
+	 */
+
+	// ensure order of evaluation for function calls
+	for ll = nl; ll != nil; ll = ll.Next {
+		ll.N = safeexpr(ll.N, init)
+	}
+	for lr = nr; lr != nil; lr = lr.Next {
+		lr.N = safeexpr(lr.N, init)
+	}
+
+	nn = nil
+	ll = nl
+	lr = nr
+	for ; ll != nil && lr != nil; (func() { ll = ll.Next; lr = lr.Next })() {
+		// Do not generate 'x = x' during return. See issue 4014.
+		if op == ORETURN && ll.N == lr.N {
+			continue
+		}
+		nn = list(nn, ascompatee1(op, ll.N, lr.N, init))
+	}
+
+	// cannot happen: caller checked that lists had same length
+	if ll != nil || lr != nil {
+		Yyerror("error in shape across %v %v %v / %d %d [%s]", Hconv(nl, obj.FmtSign), Oconv(int(op), 0), Hconv(nr, obj.FmtSign), count(nl), count(nr), Curfn.Nname.Sym.Name)
+	}
+	return nn
+}
+
+/*
+ * l is an lv and rt is the type of an rv
+ * return 1 if this implies a function call
+ * evaluating the lv or a function call
+ * in the conversion of the types
+ */
+func fncall(l *Node, rt *Type) int {
+	var r Node
+
+	if l.Ullman >= UINF || l.Op == OINDEXMAP {
+		return 1
+	}
+	r = Node{}
+	if needwritebarrier(l, &r) != 0 {
+		return 1
+	}
+	if Eqtype(l.Type, rt) {
+		return 0
+	}
+	return 1
+}
+
+func ascompatet(op int, nl *NodeList, nr **Type, fp int, init **NodeList) *NodeList {
+	var l *Node
+	var tmp *Node
+	var a *Node
+	var ll *NodeList
+	var r *Type
+	var saver Iter
+	var ucount int
+	var nn *NodeList
+	var mm *NodeList
+
+	/*
+	 * check assign type list to
+	 * a expression list. called in
+	 *	expr-list = func()
+	 */
+	r = Structfirst(&saver, nr)
+
+	nn = nil
+	mm = nil
+	ucount = 0
+	for ll = nl; ll != nil; ll = ll.Next {
+		if r == nil {
+			break
+		}
+		l = ll.N
+		if isblank(l) {
+			r = structnext(&saver)
+			continue
+		}
+
+		// any lv that causes a fn call must be
+		// deferred until all the return arguments
+		// have been pulled from the output arguments
+		if fncall(l, r.Type) != 0 {
+			tmp = temp(r.Type)
+			typecheck(&tmp, Erv)
+			a = Nod(OAS, l, tmp)
+			a = convas(a, init)
+			mm = list(mm, a)
+			l = tmp
+		}
+
+		a = Nod(OAS, l, nodarg(r, fp))
+		a = convas(a, init)
+		ullmancalc(a)
+		if a.Ullman >= UINF {
+			Dump("ascompatet ucount", a)
+			ucount++
+		}
+
+		nn = list(nn, a)
+		r = structnext(&saver)
+	}
+
+	if ll != nil || r != nil {
+		Yyerror("ascompatet: assignment count mismatch: %d = %d", count(nl), structcount(*nr))
+	}
+
+	if ucount != 0 {
+		Fatal("ascompatet: too many function calls evaluating parameters")
+	}
+	return concat(nn, mm)
+}
+
+/*
+* package all the arguments that match a ... T parameter into a []T.
+ */
+func mkdotargslice(lr0 *NodeList, nn *NodeList, l *Type, fp int, init **NodeList, ddd *Node) *NodeList {
+	var a *Node
+	var n *Node
+	var tslice *Type
+	var esc int
+
+	esc = EscUnknown
+	if ddd != nil {
+		esc = int(ddd.Esc)
+	}
+
+	tslice = typ(TARRAY)
+	tslice.Type = l.Type.Type
+	tslice.Bound = -1
+
+	if count(lr0) == 0 {
+		n = nodnil()
+		n.Type = tslice
+	} else {
+		n = Nod(OCOMPLIT, nil, typenod(tslice))
+		if ddd != nil {
+			n.Alloc = ddd.Alloc // temporary to use
+		}
+		n.List = lr0
+		n.Esc = uint(esc)
+		typecheck(&n, Erv)
+		if n.Type == nil {
+			Fatal("mkdotargslice: typecheck failed")
+		}
+		walkexpr(&n, init)
+	}
+
+	a = Nod(OAS, nodarg(l, fp), n)
+	nn = list(nn, convas(a, init))
+	return nn
+}
+
+/*
+ * helpers for shape errors
+ */
+func dumptypes(nl **Type, what string) string {
+	var first int
+	var l *Type
+	var savel Iter
+	var fmt_ string
+
+	fmt_ = ""
+	fmt_ += fmt.Sprintf("\t")
+	first = 1
+	for l = Structfirst(&savel, nl); l != nil; l = structnext(&savel) {
+		if first != 0 {
+			first = 0
+		} else {
+			fmt_ += fmt.Sprintf(", ")
+		}
+		fmt_ += fmt.Sprintf("%v", Tconv(l, 0))
+	}
+
+	if first != 0 {
+		fmt_ += fmt.Sprintf("[no arguments %s]", what)
+	}
+	return fmt_
+}
+
+func dumpnodetypes(l *NodeList, what string) string {
+	var first int
+	var r *Node
+	var fmt_ string
+
+	fmt_ = ""
+	fmt_ += fmt.Sprintf("\t")
+	first = 1
+	for ; l != nil; l = l.Next {
+		r = l.N
+		if first != 0 {
+			first = 0
+		} else {
+			fmt_ += fmt.Sprintf(", ")
+		}
+		fmt_ += fmt.Sprintf("%v", Tconv(r.Type, 0))
+	}
+
+	if first != 0 {
+		fmt_ += fmt.Sprintf("[no arguments %s]", what)
+	}
+	return fmt_
+}
+
+/*
+ * check assign expression list to
+ * a type list. called in
+ *	return expr-list
+ *	func(expr-list)
+ */
+func ascompatte(op int, call *Node, isddd int, nl **Type, lr *NodeList, fp int, init **NodeList) *NodeList {
+	var l *Type
+	var ll *Type
+	var r *Node
+	var a *Node
+	var nn *NodeList
+	var lr0 *NodeList
+	var alist *NodeList
+	var savel Iter
+	var l1 string
+	var l2 string
+
+	lr0 = lr
+	l = Structfirst(&savel, nl)
+	r = nil
+	if lr != nil {
+		r = lr.N
+	}
+	nn = nil
+
+	// f(g()) where g has multiple return values
+	if r != nil && lr.Next == nil && r.Type.Etype == TSTRUCT && r.Type.Funarg != 0 {
+		// optimization - can do block copy
+		if eqtypenoname(r.Type, *nl) != 0 {
+			a = nodarg(*nl, fp)
+			r = Nod(OCONVNOP, r, nil)
+			r.Type = a.Type
+			nn = list1(convas(Nod(OAS, a, r), init))
+			goto ret
+		}
+
+		// conversions involved.
+		// copy into temporaries.
+		alist = nil
+
+		for l = Structfirst(&savel, &r.Type); l != nil; l = structnext(&savel) {
+			a = temp(l.Type)
+			alist = list(alist, a)
+		}
+
+		a = Nod(OAS2, nil, nil)
+		a.List = alist
+		a.Rlist = lr
+		typecheck(&a, Etop)
+		walkstmt(&a)
+		*init = list(*init, a)
+		lr = alist
+		r = lr.N
+		l = Structfirst(&savel, nl)
+	}
+
+loop:
+	if l != nil && l.Isddd != 0 {
+		// the ddd parameter must be last
+		ll = structnext(&savel)
+
+		if ll != nil {
+			Yyerror("... must be last argument")
+		}
+
+		// special case --
+		// only if we are assigning a single ddd
+		// argument to a ddd parameter then it is
+		// passed thru unencapsulated
+		if r != nil && lr.Next == nil && isddd != 0 && Eqtype(l.Type, r.Type) {
+			a = Nod(OAS, nodarg(l, fp), r)
+			a = convas(a, init)
+			nn = list(nn, a)
+			goto ret
+		}
+
+		// normal case -- make a slice of all
+		// remaining arguments and pass it to
+		// the ddd parameter.
+		nn = mkdotargslice(lr, nn, l, fp, init, call.Right)
+
+		goto ret
+	}
+
+	if l == nil || r == nil {
+		if l != nil || r != nil {
+			l1 = dumptypes(nl, "expected")
+			l2 = dumpnodetypes(lr0, "given")
+			if l != nil {
+				Yyerror("not enough arguments to %v\n%s\n%s", Oconv(int(op), 0), l1, l2)
+			} else {
+				Yyerror("too many arguments to %v\n%s\n%s", Oconv(int(op), 0), l1, l2)
+			}
+		}
+
+		goto ret
+	}
+
+	a = Nod(OAS, nodarg(l, fp), r)
+	a = convas(a, init)
+	nn = list(nn, a)
+
+	l = structnext(&savel)
+	r = nil
+	lr = lr.Next
+	if lr != nil {
+		r = lr.N
+	}
+	goto loop
+
+ret:
+	for lr = nn; lr != nil; lr = lr.Next {
+		lr.N.Typecheck = 1
+	}
+	return nn
+}
+
+// generate code for print
+func walkprint(nn *Node, init **NodeList) *Node {
+	var r *Node
+	var n *Node
+	var l *NodeList
+	var all *NodeList
+	var on *Node
+	var t *Type
+	var notfirst int
+	var et int
+	var op int
+	var calls *NodeList
+
+	op = int(nn.Op)
+	all = nn.List
+	calls = nil
+	notfirst = 0
+
+	// Hoist all the argument evaluation up before the lock.
+	walkexprlistcheap(all, init)
+
+	calls = list(calls, mkcall("printlock", nil, init))
+
+	for l = all; l != nil; l = l.Next {
+		if notfirst != 0 {
+			calls = list(calls, mkcall("printsp", nil, init))
+		}
+
+		notfirst = bool2int(op == OPRINTN)
+
+		n = l.N
+		if n.Op == OLITERAL {
+			switch n.Val.Ctype {
+			case CTRUNE:
+				defaultlit(&n, runetype)
+
+			case CTINT:
+				defaultlit(&n, Types[TINT64])
+
+			case CTFLT:
+				defaultlit(&n, Types[TFLOAT64])
+			}
+		}
+
+		if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
+			defaultlit(&n, Types[TINT64])
+		}
+		defaultlit(&n, nil)
+		l.N = n
+		if n.Type == nil || n.Type.Etype == TFORW {
+			continue
+		}
+
+		t = n.Type
+		et = int(n.Type.Etype)
+		if Isinter(n.Type) != 0 {
+			if isnilinter(n.Type) != 0 {
+				on = syslook("printeface", 1)
+			} else {
+				on = syslook("printiface", 1)
+			}
+			argtype(on, n.Type) // any-1
+		} else if Isptr[et] != 0 || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR {
+			on = syslook("printpointer", 1)
+			argtype(on, n.Type) // any-1
+		} else if Isslice(n.Type) != 0 {
+			on = syslook("printslice", 1)
+			argtype(on, n.Type) // any-1
+		} else if Isint[et] != 0 {
+			if et == TUINT64 {
+				if (t.Sym.Pkg == Runtimepkg || compiling_runtime != 0) && t.Sym.Name == "hex" {
+					on = syslook("printhex", 0)
+				} else {
+					on = syslook("printuint", 0)
+				}
+			} else {
+				on = syslook("printint", 0)
+			}
+		} else if Isfloat[et] != 0 {
+			on = syslook("printfloat", 0)
+		} else if Iscomplex[et] != 0 {
+			on = syslook("printcomplex", 0)
+		} else if et == TBOOL {
+			on = syslook("printbool", 0)
+		} else if et == TSTRING {
+			on = syslook("printstring", 0)
+		} else {
+			badtype(OPRINT, n.Type, nil)
+			continue
+		}
+
+		t = *getinarg(on.Type)
+		if t != nil {
+			t = t.Type
+		}
+		if t != nil {
+			t = t.Type
+		}
+
+		if !Eqtype(t, n.Type) {
+			n = Nod(OCONV, n, nil)
+			n.Type = t
+		}
+
+		r = Nod(OCALL, on, nil)
+		r.List = list1(n)
+		calls = list(calls, r)
+	}
+
+	if op == OPRINTN {
+		calls = list(calls, mkcall("printnl", nil, nil))
+	}
+
+	calls = list(calls, mkcall("printunlock", nil, init))
+
+	typechecklist(calls, Etop)
+	walkexprlist(calls, init)
+
+	r = Nod(OEMPTY, nil, nil)
+	typecheck(&r, Etop)
+	walkexpr(&r, init)
+	r.Ninit = calls
+	return r
+}
+
+func callnew(t *Type) *Node {
+	var fn *Node
+
+	dowidth(t)
+	fn = syslook("newobject", 1)
+	argtype(fn, t)
+	return mkcall1(fn, Ptrto(t), nil, typename(t))
+}
+
+func isstack(n *Node) int {
+	var defn *Node
+
+	n = outervalue(n)
+
+	// If n is *autotmp and autotmp = &foo, replace n with foo.
+	// We introduce such temps when initializing struct literals.
+	if n.Op == OIND && n.Left.Op == ONAME && strings.HasPrefix(n.Left.Sym.Name, "autotmp_") {
+		defn = n.Left.Defn
+		if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR {
+			n = defn.Right.Left
+		}
+	}
+
+	switch n.Op {
+	// OINDREG only ends up in walk if it's indirect of SP.
+	case OINDREG:
+		return 1
+
+	case ONAME:
+		switch n.Class {
+		case PAUTO,
+			PPARAM,
+			PPARAMOUT:
+			return 1
+		}
+	}
+
+	return 0
+}
+
+func isglobal(n *Node) int {
+	n = outervalue(n)
+
+	switch n.Op {
+	case ONAME:
+		switch n.Class {
+		case PEXTERN:
+			return 1
+		}
+	}
+
+	return 0
+}
+
+// Do we need a write barrier for the assignment l = r?
+func needwritebarrier(l *Node, r *Node) int {
+	if !(use_writebarrier != 0) {
+		return 0
+	}
+
+	if l == nil || isblank(l) {
+		return 0
+	}
+
+	// No write barrier for write of non-pointers.
+	dowidth(l.Type)
+
+	if !haspointers(l.Type) {
+		return 0
+	}
+
+	// No write barrier for write to stack.
+	if isstack(l) != 0 {
+		return 0
+	}
+
+	// No write barrier for implicit or explicit zeroing.
+	if r == nil || iszero(r) != 0 {
+		return 0
+	}
+
+	// No write barrier for initialization to constant.
+	if r.Op == OLITERAL {
+		return 0
+	}
+
+	// No write barrier for storing static (read-only) data.
+	if r.Op == ONAME && strings.HasPrefix(r.Sym.Name, "statictmp_") {
+		return 0
+	}
+
+	// No write barrier for storing address of stack values,
+	// which are guaranteed only to be written to the stack.
+	if r.Op == OADDR && isstack(r.Left) != 0 {
+		return 0
+	}
+
+	// No write barrier for storing address of global, which
+	// is live no matter what.
+	if r.Op == OADDR && isglobal(r.Left) != 0 {
+		return 0
+	}
+
+	// No write barrier for reslice: x = x[0:y] or x = append(x, ...).
+	// Both are compiled to modify x directly.
+	// In the case of append, a write barrier may still be needed
+	// if the underlying array grows, but the append code can
+	// generate the write barrier directly in that case.
+	// (It does not yet, but the cost of the write barrier will be
+	// small compared to the cost of the allocation.)
+	if r.Reslice != 0 {
+		switch r.Op {
+		case OSLICE,
+			OSLICE3,
+			OSLICESTR,
+			OAPPEND:
+			break
+
+		default:
+			Dump("bad reslice-l", l)
+			Dump("bad reslice-r", r)
+		}
+
+		return 0
+	}
+
+	// Otherwise, be conservative and use write barrier.
+	return 1
+}
+
+// TODO(rsc): Perhaps componentgen should run before this.
+
+var applywritebarrier_bv *Bvec
+
+func applywritebarrier(n *Node, init **NodeList) *Node {
+	var l *Node
+	var r *Node
+	var t *Type
+	var x int64
+	var name string
+
+	if n.Left != nil && n.Right != nil && needwritebarrier(n.Left, n.Right) != 0 {
+		if Curfn != nil && Curfn.Nowritebarrier {
+			Yyerror("write barrier prohibited")
+		}
+		t = n.Left.Type
+		l = Nod(OADDR, n.Left, nil)
+		l.Etype = 1 // addr does not escape
+		if t.Width == int64(Widthptr) {
+			n = mkcall1(writebarrierfn("writebarrierptr", t, n.Right.Type), nil, init, l, n.Right)
+		} else if t.Etype == TSTRING {
+			n = mkcall1(writebarrierfn("writebarrierstring", t, n.Right.Type), nil, init, l, n.Right)
+		} else if Isslice(t) != 0 {
+			n = mkcall1(writebarrierfn("writebarrierslice", t, n.Right.Type), nil, init, l, n.Right)
+		} else if Isinter(t) != 0 {
+			n = mkcall1(writebarrierfn("writebarrieriface", t, n.Right.Type), nil, init, l, n.Right)
+		} else if t.Width <= int64(4*Widthptr) {
+			x = 0
+			if applywritebarrier_bv == nil {
+				applywritebarrier_bv = bvalloc(obj.BitsPerPointer * 4)
+			}
+			bvresetall(applywritebarrier_bv)
+			twobitwalktype1(t, &x, applywritebarrier_bv)
+			const (
+				PtrBit = 1
+			)
+			// The bvgets are looking for BitsPointer in successive slots.
+			if obj.BitsPointer != 1<<PtrBit {
+				Fatal("wrong PtrBit")
+			}
+			switch t.Width / int64(Widthptr) {
+			default:
+				Fatal("found writebarrierfat for %d-byte object of type %v", int(t.Width), Tconv(t, 0))
+				fallthrough
+
+			case 2:
+				name = fmt.Sprintf("writebarrierfat%d%d", bvget(applywritebarrier_bv, PtrBit), bvget(applywritebarrier_bv, obj.BitsPerPointer+PtrBit))
+
+			case 3:
+				name = fmt.Sprintf("writebarrierfat%d%d%d", bvget(applywritebarrier_bv, PtrBit), bvget(applywritebarrier_bv, obj.BitsPerPointer+PtrBit), bvget(applywritebarrier_bv, 2*obj.BitsPerPointer+PtrBit))
+
+			case 4:
+				name = fmt.Sprintf("writebarrierfat%d%d%d%d", bvget(applywritebarrier_bv, PtrBit), bvget(applywritebarrier_bv, obj.BitsPerPointer+PtrBit), bvget(applywritebarrier_bv, 2*obj.BitsPerPointer+PtrBit), bvget(applywritebarrier_bv, 3*obj.BitsPerPointer+PtrBit))
+			}
+
+			n = mkcall1(writebarrierfn(name, t, n.Right.Type), nil, init, l, nodnil(), n.Right)
+		} else {
+			r = n.Right
+			for r.Op == OCONVNOP {
+				r = r.Left
+			}
+			r = Nod(OADDR, r, nil)
+			r.Etype = 1 // addr does not escape
+
+			//warnl(n->lineno, "typedmemmove %T %N", t, r);
+			n = mkcall1(writebarrierfn("typedmemmove", t, r.Left.Type), nil, init, typename(t), l, r)
+		}
+	}
+
+	return n
+}
+
+func convas(n *Node, init **NodeList) *Node {
+	var lt *Type
+	var rt *Type
+	var map_ *Node
+	var key *Node
+	var val *Node
+
+	if n.Op != OAS {
+		Fatal("convas: not OAS %v", Oconv(int(n.Op), 0))
+	}
+
+	n.Typecheck = 1
+
+	if n.Left == nil || n.Right == nil {
+		goto out
+	}
+
+	lt = n.Left.Type
+	rt = n.Right.Type
+	if lt == nil || rt == nil {
+		goto out
+	}
+
+	if isblank(n.Left) {
+		defaultlit(&n.Right, nil)
+		goto out
+	}
+
+	if n.Left.Op == OINDEXMAP {
+		map_ = n.Left.Left
+		key = n.Left.Right
+		val = n.Right
+		walkexpr(&map_, init)
+		walkexpr(&key, init)
+		walkexpr(&val, init)
+
+		// orderexpr made sure key and val are addressable.
+		key = Nod(OADDR, key, nil)
+
+		val = Nod(OADDR, val, nil)
+		n = mkcall1(mapfn("mapassign1", map_.Type), nil, init, typename(map_.Type), map_, key, val)
+		goto out
+	}
+
+	if !Eqtype(lt, rt) {
+		n.Right = assignconv(n.Right, lt, "assignment")
+		walkexpr(&n.Right, init)
+	}
+
+out:
+	ullmancalc(n)
+	return n
+}
+
+/*
+ * from ascompat[te]
+ * evaluating actual function arguments.
+ *	f(a,b)
+ * if there is exactly one function expr,
+ * then it is done first. otherwise must
+ * make temp variables
+ */
+func reorder1(all *NodeList) *NodeList {
+	var f *Node
+	var a *Node
+	var n *Node
+	var l *NodeList
+	var r *NodeList
+	var g *NodeList
+	var c int
+	var d int
+	var t int
+
+	c = 0 // function calls
+	t = 0 // total parameters
+
+	for l = all; l != nil; l = l.Next {
+		n = l.N
+		t++
+		ullmancalc(n)
+		if n.Ullman >= UINF {
+			c++
+		}
+	}
+
+	if c == 0 || t == 1 {
+		return all
+	}
+
+	g = nil // fncalls assigned to tempnames
+	f = nil // last fncall assigned to stack
+	r = nil // non fncalls and tempnames assigned to stack
+	d = 0
+	for l = all; l != nil; l = l.Next {
+		n = l.N
+		if n.Ullman < UINF {
+			r = list(r, n)
+			continue
+		}
+
+		d++
+		if d == c {
+			f = n
+			continue
+		}
+
+		// make assignment of fncall to tempname
+		a = temp(n.Right.Type)
+
+		a = Nod(OAS, a, n.Right)
+		g = list(g, a)
+
+		// put normal arg assignment on list
+		// with fncall replaced by tempname
+		n.Right = a.Left
+
+		r = list(r, n)
+	}
+
+	if f != nil {
+		g = list(g, f)
+	}
+	return concat(g, r)
+}
+
+/*
+ * from ascompat[ee]
+ *	a,b = c,d
+ * simultaneous assignment. there cannot
+ * be later use of an earlier lvalue.
+ *
+ * function calls have been removed.
+ */
+func reorder3(all *NodeList) *NodeList {
+	var list *NodeList
+	var early *NodeList
+	var mapinit *NodeList
+	var l *Node
+
+	// If a needed expression may be affected by an
+	// earlier assignment, make an early copy of that
+	// expression and use the copy instead.
+	early = nil
+
+	mapinit = nil
+	for list = all; list != nil; list = list.Next {
+		l = list.N.Left
+
+		// Save subexpressions needed on left side.
+		// Drill through non-dereferences.
+		for {
+			if l.Op == ODOT || l.Op == OPAREN {
+				l = l.Left
+				continue
+			}
+
+			if l.Op == OINDEX && Isfixedarray(l.Left.Type) != 0 {
+				reorder3save(&l.Right, all, list, &early)
+				l = l.Left
+				continue
+			}
+
+			break
+		}
+
+		switch l.Op {
+		default:
+			Fatal("reorder3 unexpected lvalue %v", Oconv(int(l.Op), obj.FmtSharp))
+			fallthrough
+
+		case ONAME:
+			break
+
+		case OINDEX,
+			OINDEXMAP:
+			reorder3save(&l.Left, all, list, &early)
+			reorder3save(&l.Right, all, list, &early)
+			if l.Op == OINDEXMAP {
+				list.N = convas(list.N, &mapinit)
+			}
+
+		case OIND,
+			ODOTPTR:
+			reorder3save(&l.Left, all, list, &early)
+		}
+
+		// Save expression on right side.
+		reorder3save(&list.N.Right, all, list, &early)
+	}
+
+	early = concat(mapinit, early)
+	return concat(early, all)
+}
+
+/*
+ * if the evaluation of *np would be affected by the
+ * assignments in all up to but not including stop,
+ * copy into a temporary during *early and
+ * replace *np with that temp.
+ */
+func reorder3save(np **Node, all *NodeList, stop *NodeList, early **NodeList) {
+	var n *Node
+	var q *Node
+
+	n = *np
+	if !(aliased(n, all, stop) != 0) {
+		return
+	}
+
+	q = temp(n.Type)
+	q = Nod(OAS, q, n)
+	typecheck(&q, Etop)
+	*early = list(*early, q)
+	*np = q.Left
+}
+
+/*
+ * what's the outer value that a write to n affects?
+ * outer value means containing struct or array.
+ */
+func outervalue(n *Node) *Node {
+	for {
+		if n.Op == OXDOT {
+			Fatal("OXDOT in walk")
+		}
+		if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP {
+			n = n.Left
+			continue
+		}
+
+		if n.Op == OINDEX && Isfixedarray(n.Left.Type) != 0 {
+			n = n.Left
+			continue
+		}
+
+		break
+	}
+
+	return n
+}
+
+/*
+ * Is it possible that the computation of n might be
+ * affected by writes in as up to but not including stop?
+ */
+func aliased(n *Node, all *NodeList, stop *NodeList) int {
+	var memwrite int
+	var varwrite int
+	var a *Node
+	var l *NodeList
+
+	if n == nil {
+		return 0
+	}
+
+	// Look for obvious aliasing: a variable being assigned
+	// during the all list and appearing in n.
+	// Also record whether there are any writes to main memory.
+	// Also record whether there are any writes to variables
+	// whose addresses have been taken.
+	memwrite = 0
+
+	varwrite = 0
+	for l = all; l != stop; l = l.Next {
+		a = outervalue(l.N.Left)
+		if a.Op != ONAME {
+			memwrite = 1
+			continue
+		}
+
+		switch n.Class {
+		default:
+			varwrite = 1
+			continue
+
+		case PAUTO,
+			PPARAM,
+			PPARAMOUT:
+			if n.Addrtaken != 0 {
+				varwrite = 1
+				continue
+			}
+
+			if vmatch2(a, n) != 0 {
+				// Direct hit.
+				return 1
+			}
+		}
+	}
+
+	// The variables being written do not appear in n.
+	// However, n might refer to computed addresses
+	// that are being written.
+
+	// If no computed addresses are affected by the writes, no aliasing.
+	if !(memwrite != 0) && !(varwrite != 0) {
+		return 0
+	}
+
+	// If n does not refer to computed addresses
+	// (that is, if n only refers to variables whose addresses
+	// have not been taken), no aliasing.
+	if varexpr(n) != 0 {
+		return 0
+	}
+
+	// Otherwise, both the writes and n refer to computed memory addresses.
+	// Assume that they might conflict.
+	return 1
+}
+
+/*
+ * does the evaluation of n only refer to variables
+ * whose addresses have not been taken?
+ * (and no other memory)
+ */
+func varexpr(n *Node) int {
+	if n == nil {
+		return 1
+	}
+
+	switch n.Op {
+	case OLITERAL:
+		return 1
+
+	case ONAME:
+		switch n.Class {
+		case PAUTO,
+			PPARAM,
+			PPARAMOUT:
+			if !(n.Addrtaken != 0) {
+				return 1
+			}
+		}
+
+		return 0
+
+	case OADD,
+		OSUB,
+		OOR,
+		OXOR,
+		OMUL,
+		ODIV,
+		OMOD,
+		OLSH,
+		ORSH,
+		OAND,
+		OANDNOT,
+		OPLUS,
+		OMINUS,
+		OCOM,
+		OPAREN,
+		OANDAND,
+		OOROR,
+		ODOT, // but not ODOTPTR
+		OCONV,
+		OCONVNOP,
+		OCONVIFACE,
+		ODOTTYPE:
+		return bool2int(varexpr(n.Left) != 0 && varexpr(n.Right) != 0)
+	}
+
+	// Be conservative.
+	return 0
+}
+
+/*
+ * is the name l mentioned in r?
+ */
+func vmatch2(l *Node, r *Node) int {
+	var ll *NodeList
+
+	if r == nil {
+		return 0
+	}
+	switch r.Op {
+	// match each right given left
+	case ONAME:
+		return bool2int(l == r)
+
+	case OLITERAL:
+		return 0
+	}
+
+	if vmatch2(l, r.Left) != 0 {
+		return 1
+	}
+	if vmatch2(l, r.Right) != 0 {
+		return 1
+	}
+	for ll = r.List; ll != nil; ll = ll.Next {
+		if vmatch2(l, ll.N) != 0 {
+			return 1
+		}
+	}
+	return 0
+}
+
+/*
+ * is any name mentioned in l also mentioned in r?
+ * called by sinit.c
+ */
+func vmatch1(l *Node, r *Node) int {
+	var ll *NodeList
+
+	/*
+	 * isolate all left sides
+	 */
+	if l == nil || r == nil {
+		return 0
+	}
+	switch l.Op {
+	case ONAME:
+		switch l.Class {
+		case PPARAM,
+			PPARAMREF,
+			PAUTO:
+			break
+
+			// assignment to non-stack variable
+		// must be delayed if right has function calls.
+		default:
+			if r.Ullman >= UINF {
+				return 1
+			}
+		}
+
+		return vmatch2(l, r)
+
+	case OLITERAL:
+		return 0
+	}
+
+	if vmatch1(l.Left, r) != 0 {
+		return 1
+	}
+	if vmatch1(l.Right, r) != 0 {
+		return 1
+	}
+	for ll = l.List; ll != nil; ll = ll.Next {
+		if vmatch1(ll.N, r) != 0 {
+			return 1
+		}
+	}
+	return 0
+}
+
+/*
+ * walk through argin parameters.
+ * generate and return code to allocate
+ * copies of escaped parameters to the heap.
+ */
+func paramstoheap(argin **Type, out int) *NodeList {
+	var t *Type
+	var savet Iter
+	var v *Node
+	var as *Node
+	var nn *NodeList
+
+	nn = nil
+	for t = Structfirst(&savet, argin); t != nil; t = structnext(&savet) {
+		v = t.Nname
+		if v != nil && v.Sym != nil && v.Sym.Name[0] == '~' && v.Sym.Name[1] == 'r' { // unnamed result
+			v = nil
+		}
+
+		// For precise stacks, the garbage collector assumes results
+		// are always live, so zero them always.
+		if out != 0 {
+			// Defer might stop a panic and show the
+			// return values as they exist at the time of panic.
+			// Make sure to zero them on entry to the function.
+			nn = list(nn, Nod(OAS, nodarg(t, 1), nil))
+		}
+
+		if v == nil || !(v.Class&PHEAP != 0) {
+			continue
+		}
+
+		// generate allocation & copying code
+		if compiling_runtime != 0 {
+			Yyerror("%v escapes to heap, not allowed in runtime.", Nconv(v, 0))
+		}
+		if v.Alloc == nil {
+			v.Alloc = callnew(v.Type)
+		}
+		nn = list(nn, Nod(OAS, v.Heapaddr, v.Alloc))
+		if v.Class&^PHEAP != PPARAMOUT {
+			as = Nod(OAS, v, v.Stackparam)
+			v.Stackparam.Typecheck = 1
+			typecheck(&as, Etop)
+			as = applywritebarrier(as, &nn)
+			nn = list(nn, as)
+		}
+	}
+
+	return nn
+}
+
+/*
+ * walk through argout parameters copying back to stack
+ */
+func returnsfromheap(argin **Type) *NodeList {
+	var t *Type
+	var savet Iter
+	var v *Node
+	var nn *NodeList
+
+	nn = nil
+	for t = Structfirst(&savet, argin); t != nil; t = structnext(&savet) {
+		v = t.Nname
+		if v == nil || v.Class != PHEAP|PPARAMOUT {
+			continue
+		}
+		nn = list(nn, Nod(OAS, v.Stackparam, v))
+	}
+
+	return nn
+}
+
+/*
+ * take care of migrating any function in/out args
+ * between the stack and the heap.  adds code to
+ * curfn's before and after lists.
+ */
+func heapmoves() {
+	var nn *NodeList
+	var lno int32
+
+	lno = lineno
+	lineno = Curfn.Lineno
+	nn = paramstoheap(getthis(Curfn.Type), 0)
+	nn = concat(nn, paramstoheap(getinarg(Curfn.Type), 0))
+	nn = concat(nn, paramstoheap(Getoutarg(Curfn.Type), 1))
+	Curfn.Enter = concat(Curfn.Enter, nn)
+	lineno = Curfn.Endlineno
+	Curfn.Exit = returnsfromheap(Getoutarg(Curfn.Type))
+	lineno = lno
+}
+
+func vmkcall(fn *Node, t *Type, init **NodeList, va []*Node) *Node {
+	var i int
+	var n int
+	var r *Node
+	var args *NodeList
+
+	if fn.Type == nil || fn.Type.Etype != TFUNC {
+		Fatal("mkcall %v %v", Nconv(fn, 0), Tconv(fn.Type, 0))
+	}
+
+	args = nil
+	n = fn.Type.Intuple
+	for i = 0; i < n; i++ {
+		args = list(args, va[i])
+	}
+
+	r = Nod(OCALL, fn, nil)
+	r.List = args
+	if fn.Type.Outtuple > 0 {
+		typecheck(&r, Erv|Efnstruct)
+	} else {
+		typecheck(&r, Etop)
+	}
+	walkexpr(&r, init)
+	r.Type = t
+	return r
+}
+
+func mkcall(name string, t *Type, init **NodeList, args ...*Node) *Node {
+	return vmkcall(syslook(name, 0), t, init, args)
+}
+
+func mkcall1(fn *Node, t *Type, init **NodeList, args ...*Node) *Node {
+	return vmkcall(fn, t, init, args)
+}
+
+func conv(n *Node, t *Type) *Node {
+	if Eqtype(n.Type, t) {
+		return n
+	}
+	n = Nod(OCONV, n, nil)
+	n.Type = t
+	typecheck(&n, Erv)
+	return n
+}
+
+func chanfn(name string, n int, t *Type) *Node {
+	var fn *Node
+	var i int
+
+	if t.Etype != TCHAN {
+		Fatal("chanfn %v", Tconv(t, 0))
+	}
+	fn = syslook(name, 1)
+	for i = 0; i < n; i++ {
+		argtype(fn, t.Type)
+	}
+	return fn
+}
+
+func mapfn(name string, t *Type) *Node {
+	var fn *Node
+
+	if t.Etype != TMAP {
+		Fatal("mapfn %v", Tconv(t, 0))
+	}
+	fn = syslook(name, 1)
+	argtype(fn, t.Down)
+	argtype(fn, t.Type)
+	argtype(fn, t.Down)
+	argtype(fn, t.Type)
+	return fn
+}
+
+func mapfndel(name string, t *Type) *Node {
+	var fn *Node
+
+	if t.Etype != TMAP {
+		Fatal("mapfn %v", Tconv(t, 0))
+	}
+	fn = syslook(name, 1)
+	argtype(fn, t.Down)
+	argtype(fn, t.Type)
+	argtype(fn, t.Down)
+	return fn
+}
+
+func writebarrierfn(name string, l *Type, r *Type) *Node {
+	var fn *Node
+
+	fn = syslook(name, 1)
+	argtype(fn, l)
+	argtype(fn, r)
+	return fn
+}
+
+func addstr(n *Node, init **NodeList) *Node {
+	var r *Node
+	var cat *Node
+	var slice *Node
+	var buf *Node
+	var args *NodeList
+	var l *NodeList
+	var c int
+	var sz int64
+	var t *Type
+
+	// orderexpr rewrote OADDSTR to have a list of strings.
+	c = count(n.List)
+
+	if c < 2 {
+		Yyerror("addstr count %d too small", c)
+	}
+
+	buf = nodnil()
+	if n.Esc == EscNone {
+		sz = 0
+		for l = n.List; l != nil; l = l.Next {
+			if n.Op == OLITERAL {
+				sz += int64(len(n.Val.U.Sval.S))
+			}
+		}
+
+		// Don't allocate the buffer if the result won't fit.
+		if sz < tmpstringbufsize {
+			// Create temporary buffer for result string on stack.
+			t = aindex(Nodintconst(tmpstringbufsize), Types[TUINT8])
+
+			buf = Nod(OADDR, temp(t), nil)
+		}
+	}
+
+	// build list of string arguments
+	args = list1(buf)
+
+	for l = n.List; l != nil; l = l.Next {
+		args = list(args, conv(l.N, Types[TSTRING]))
+	}
+
+	if c <= 5 {
+		// small numbers of strings use direct runtime helpers.
+		// note: orderexpr knows this cutoff too.
+		namebuf = fmt.Sprintf("concatstring%d", c)
+	} else {
+		// large numbers of strings are passed to the runtime as a slice.
+		namebuf = "concatstrings"
+
+		t = typ(TARRAY)
+		t.Type = Types[TSTRING]
+		t.Bound = -1
+		slice = Nod(OCOMPLIT, nil, typenod(t))
+		slice.Alloc = n.Alloc
+		slice.List = args.Next // skip buf arg
+		args = list1(buf)
+		args = list(args, slice)
+		slice.Esc = EscNone
+	}
+
+	cat = syslook(namebuf, 1)
+	r = Nod(OCALL, cat, nil)
+	r.List = args
+	typecheck(&r, Erv)
+	walkexpr(&r, init)
+	r.Type = n.Type
+
+	return r
+}
+
+// expand append(l1, l2...) to
+//   init {
+//     s := l1
+//     if n := len(l1) + len(l2) - cap(s); n > 0 {
+//       s = growslice(s, n)
+//     }
+//     s = s[:len(l1)+len(l2)]
+//     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
+//   }
+//   s
+//
+// l2 is allowed to be a string.
+func appendslice(n *Node, init **NodeList) *Node {
+	var l *NodeList
+	var l1 *Node
+	var l2 *Node
+	var nt *Node
+	var nif *Node
+	var fn *Node
+	var nptr1 *Node
+	var nptr2 *Node
+	var nwid *Node
+	var s *Node
+
+	walkexprlistsafe(n.List, init)
+
+	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
+	// and n are name or literal, but those may index the slice we're
+	// modifying here.  Fix explicitly.
+	for l = n.List; l != nil; l = l.Next {
+		l.N = cheapexpr(l.N, init)
+	}
+
+	l1 = n.List.N
+	l2 = n.List.Next.N
+
+	s = temp(l1.Type) // var s []T
+	l = nil
+	l = list(l, Nod(OAS, s, l1)) // s = l1
+
+	nt = temp(Types[TINT])
+
+	nif = Nod(OIF, nil, nil)
+
+	// n := len(s) + len(l2) - cap(s)
+	nif.Ninit = list1(Nod(OAS, nt, Nod(OSUB, Nod(OADD, Nod(OLEN, s, nil), Nod(OLEN, l2, nil)), Nod(OCAP, s, nil))))
+
+	nif.Ntest = Nod(OGT, nt, Nodintconst(0))
+
+	// instantiate growslice(Type*, []any, int64) []any
+	fn = syslook("growslice", 1)
+
+	argtype(fn, s.Type.Type)
+	argtype(fn, s.Type.Type)
+
+	// s = growslice(T, s, n)
+	nif.Nbody = list1(Nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type), s, conv(nt, Types[TINT64]))))
+
+	l = list(l, nif)
+
+	if haspointers(l1.Type.Type) {
+		// copy(s[len(l1):len(l1)+len(l2)], l2)
+		nptr1 = Nod(OSLICE, s, Nod(OKEY, Nod(OLEN, l1, nil), Nod(OADD, Nod(OLEN, l1, nil), Nod(OLEN, l2, nil))))
+
+		nptr1.Etype = 1
+		nptr2 = l2
+		fn = syslook("typedslicecopy", 1)
+		argtype(fn, l1.Type)
+		argtype(fn, l2.Type)
+		nt = mkcall1(fn, Types[TINT], &l, typename(l1.Type.Type), nptr1, nptr2)
+		l = list(l, nt)
+	} else if flag_race != 0 {
+		// rely on runtime to instrument copy.
+		// copy(s[len(l1):len(l1)+len(l2)], l2)
+		nptr1 = Nod(OSLICE, s, Nod(OKEY, Nod(OLEN, l1, nil), Nod(OADD, Nod(OLEN, l1, nil), Nod(OLEN, l2, nil))))
+
+		nptr1.Etype = 1
+		nptr2 = l2
+		if l2.Type.Etype == TSTRING {
+			fn = syslook("slicestringcopy", 1)
+		} else {
+			fn = syslook("slicecopy", 1)
+		}
+		argtype(fn, l1.Type)
+		argtype(fn, l2.Type)
+		nt = mkcall1(fn, Types[TINT], &l, nptr1, nptr2, Nodintconst(s.Type.Type.Width))
+		l = list(l, nt)
+	} else {
+		// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
+		nptr1 = Nod(OINDEX, s, Nod(OLEN, l1, nil))
+
+		nptr1.Bounded = 1
+		nptr1 = Nod(OADDR, nptr1, nil)
+
+		nptr2 = Nod(OSPTR, l2, nil)
+
+		fn = syslook("memmove", 1)
+		argtype(fn, s.Type.Type) // 1 old []any
+		argtype(fn, s.Type.Type) // 2 ret []any
+
+		nwid = cheapexpr(conv(Nod(OLEN, l2, nil), Types[TUINTPTR]), &l)
+
+		nwid = Nod(OMUL, nwid, Nodintconst(s.Type.Type.Width))
+		nt = mkcall1(fn, nil, &l, nptr1, nptr2, nwid)
+		l = list(l, nt)
+	}
+
+	// s = s[:len(l1)+len(l2)]
+	nt = Nod(OADD, Nod(OLEN, l1, nil), Nod(OLEN, l2, nil))
+
+	nt = Nod(OSLICE, s, Nod(OKEY, nil, nt))
+	nt.Etype = 1
+	l = list(l, Nod(OAS, s, nt))
+
+	typechecklist(l, Etop)
+	walkstmtlist(l)
+	*init = concat(*init, l)
+	return s
+}
+
+// expand append(src, a [, b]* ) to
+//
+//   init {
+//     s := src
+//     const argc = len(args) - 1
+//     if cap(s) - len(s) < argc {
+//	    s = growslice(s, argc)
+//     }
+//     n := len(s)
+//     s = s[:n+argc]
+//     s[n] = a
+//     s[n+1] = b
+//     ...
+//   }
+//   s
+func walkappend(n *Node, init **NodeList) *Node {
+	var l *NodeList
+	var a *NodeList
+	var nsrc *Node
+	var ns *Node
+	var nn *Node
+	var na *Node
+	var nx *Node
+	var fn *Node
+	var argc int
+
+	walkexprlistsafe(n.List, init)
+
+	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
+	// and n are name or literal, but those may index the slice we're
+	// modifying here.  Fix explicitly.
+	for l = n.List; l != nil; l = l.Next {
+		l.N = cheapexpr(l.N, init)
+	}
+
+	nsrc = n.List.N
+
+	// Resolve slice type of multi-valued return.
+	if Istype(nsrc.Type, TSTRUCT) != 0 {
+		nsrc.Type = nsrc.Type.Type.Type
+	}
+	argc = count(n.List) - 1
+	if argc < 1 {
+		return nsrc
+	}
+
+	l = nil
+
+	ns = temp(nsrc.Type)
+	l = list(l, Nod(OAS, ns, nsrc)) // s = src
+
+	na = Nodintconst(int64(argc)) // const argc
+	nx = Nod(OIF, nil, nil)       // if cap(s) - len(s) < argc
+	nx.Ntest = Nod(OLT, Nod(OSUB, Nod(OCAP, ns, nil), Nod(OLEN, ns, nil)), na)
+
+	fn = syslook("growslice", 1) //   growslice(<type>, old []T, n int64) (ret []T)
+	argtype(fn, ns.Type.Type)    // 1 old []any
+	argtype(fn, ns.Type.Type)    // 2 ret []any
+
+	nx.Nbody = list1(Nod(OAS, ns, mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type), ns, conv(na, Types[TINT64]))))
+
+	l = list(l, nx)
+
+	nn = temp(Types[TINT])
+	l = list(l, Nod(OAS, nn, Nod(OLEN, ns, nil))) // n = len(s)
+
+	nx = Nod(OSLICE, ns, Nod(OKEY, nil, Nod(OADD, nn, na))) // ...s[:n+argc]
+	nx.Etype = 1
+	l = list(l, Nod(OAS, ns, nx)) // s = s[:n+argc]
+
+	for a = n.List.Next; a != nil; a = a.Next {
+		nx = Nod(OINDEX, ns, nn) // s[n] ...
+		nx.Bounded = 1
+		l = list(l, Nod(OAS, nx, a.N)) // s[n] = arg
+		if a.Next != nil {
+			l = list(l, Nod(OAS, nn, Nod(OADD, nn, Nodintconst(1)))) // n = n + 1
+		}
+	}
+
+	typechecklist(l, Etop)
+	walkstmtlist(l)
+	*init = concat(*init, l)
+	return ns
+}
+
+// Lower copy(a, b) to a memmove call or a runtime call.
+//
+// init {
+//   n := len(a)
+//   if n > len(b) { n = len(b) }
+//   memmove(a.ptr, b.ptr, n*sizeof(elem(a)))
+// }
+// n;
+//
+// Also works if b is a string.
+//
+func copyany(n *Node, init **NodeList, runtimecall int) *Node {
+	var nl *Node
+	var nr *Node
+	var nfrm *Node
+	var nto *Node
+	var nif *Node
+	var nlen *Node
+	var nwid *Node
+	var fn *Node
+	var l *NodeList
+
+	if haspointers(n.Left.Type.Type) {
+		fn = writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
+		return mkcall1(fn, n.Type, init, typename(n.Left.Type.Type), n.Left, n.Right)
+	}
+
+	if runtimecall != 0 {
+		if n.Right.Type.Etype == TSTRING {
+			fn = syslook("slicestringcopy", 1)
+		} else {
+			fn = syslook("slicecopy", 1)
+		}
+		argtype(fn, n.Left.Type)
+		argtype(fn, n.Right.Type)
+		return mkcall1(fn, n.Type, init, n.Left, n.Right, Nodintconst(n.Left.Type.Type.Width))
+	}
+
+	walkexpr(&n.Left, init)
+	walkexpr(&n.Right, init)
+	nl = temp(n.Left.Type)
+	nr = temp(n.Right.Type)
+	l = nil
+	l = list(l, Nod(OAS, nl, n.Left))
+	l = list(l, Nod(OAS, nr, n.Right))
+
+	nfrm = Nod(OSPTR, nr, nil)
+	nto = Nod(OSPTR, nl, nil)
+
+	nlen = temp(Types[TINT])
+
+	// n = len(to)
+	l = list(l, Nod(OAS, nlen, Nod(OLEN, nl, nil)))
+
+	// if n > len(frm) { n = len(frm) }
+	nif = Nod(OIF, nil, nil)
+
+	nif.Ntest = Nod(OGT, nlen, Nod(OLEN, nr, nil))
+	nif.Nbody = list(nif.Nbody, Nod(OAS, nlen, Nod(OLEN, nr, nil)))
+	l = list(l, nif)
+
+	// Call memmove.
+	fn = syslook("memmove", 1)
+
+	argtype(fn, nl.Type.Type)
+	argtype(fn, nl.Type.Type)
+	nwid = temp(Types[TUINTPTR])
+	l = list(l, Nod(OAS, nwid, conv(nlen, Types[TUINTPTR])))
+	nwid = Nod(OMUL, nwid, Nodintconst(nl.Type.Type.Width))
+	l = list(l, mkcall1(fn, nil, init, nto, nfrm, nwid))
+
+	typechecklist(l, Etop)
+	walkstmtlist(l)
+	*init = concat(*init, l)
+	return nlen
+}
+
+// Generate frontend part for OSLICE[3][ARR|STR]
+//
+func sliceany(n *Node, init **NodeList) *Node {
+	var bounded int
+	var slice3 int
+	var src *Node
+	var lb *Node
+	var hb *Node
+	var cb *Node
+	var bound *Node
+	var chk *Node
+	var chk0 *Node
+	var chk1 *Node
+	var chk2 *Node
+	var lbv int64
+	var hbv int64
+	var cbv int64
+	var bv int64
+	var w int64
+	var bt *Type
+
+	//	print("before sliceany: %+N\n", n);
+
+	src = n.Left
+
+	lb = n.Right.Left
+	slice3 = bool2int(n.Op == OSLICE3 || n.Op == OSLICE3ARR)
+	if slice3 != 0 {
+		hb = n.Right.Right.Left
+		cb = n.Right.Right.Right
+	} else {
+		hb = n.Right.Right
+		cb = nil
+	}
+
+	bounded = int(n.Etype)
+
+	if n.Op == OSLICESTR {
+		bound = Nod(OLEN, src, nil)
+	} else {
+		bound = Nod(OCAP, src, nil)
+	}
+
+	typecheck(&bound, Erv)
+	walkexpr(&bound, init) // if src is an array, bound will be a const now.
+
+	// static checks if possible
+	bv = 1 << 50
+
+	if Isconst(bound, CTINT) != 0 {
+		if !(Smallintconst(bound) != 0) {
+			Yyerror("array len too large")
+		} else {
+			bv = Mpgetfix(bound.Val.U.Xval)
+		}
+	}
+
+	if Isconst(cb, CTINT) != 0 {
+		cbv = Mpgetfix(cb.Val.U.Xval)
+		if cbv < 0 || cbv > bv {
+			Yyerror("slice index out of bounds")
+		}
+	}
+
+	if Isconst(hb, CTINT) != 0 {
+		hbv = Mpgetfix(hb.Val.U.Xval)
+		if hbv < 0 || hbv > bv {
+			Yyerror("slice index out of bounds")
+		}
+	}
+
+	if Isconst(lb, CTINT) != 0 {
+		lbv = Mpgetfix(lb.Val.U.Xval)
+		if lbv < 0 || lbv > bv {
+			Yyerror("slice index out of bounds")
+			lbv = -1
+		}
+
+		if lbv == 0 {
+			lb = nil
+		}
+	}
+
+	// Checking src[lb:hb:cb] or src[lb:hb].
+	// if chk0 || chk1 || chk2 { panicslice() }
+	chk = nil
+
+	chk0 = nil // cap(src) < cb
+	chk1 = nil // cb < hb for src[lb:hb:cb]; cap(src) < hb for src[lb:hb]
+	chk2 = nil // hb < lb
+
+	// All comparisons are unsigned to avoid testing < 0.
+	bt = Types[Simtype[TUINT]]
+
+	if cb != nil && cb.Type.Width > 4 {
+		bt = Types[TUINT64]
+	}
+	if hb != nil && hb.Type.Width > 4 {
+		bt = Types[TUINT64]
+	}
+	if lb != nil && lb.Type.Width > 4 {
+		bt = Types[TUINT64]
+	}
+
+	bound = cheapexpr(conv(bound, bt), init)
+
+	if cb != nil {
+		cb = cheapexpr(conv(cb, bt), init)
+		if !(bounded != 0) {
+			chk0 = Nod(OLT, bound, cb)
+		}
+	} else if slice3 != 0 {
+		// When we figure out what this means, implement it.
+		Fatal("slice3 with cb == N") // rejected by parser
+	}
+
+	if hb != nil {
+		hb = cheapexpr(conv(hb, bt), init)
+		if !(bounded != 0) {
+			if cb != nil {
+				chk1 = Nod(OLT, cb, hb)
+			} else {
+				chk1 = Nod(OLT, bound, hb)
+			}
+		}
+	} else if slice3 != 0 {
+		// When we figure out what this means, implement it.
+		Fatal("slice3 with hb == N") // rejected by parser
+	} else if n.Op == OSLICEARR {
+		hb = bound
+	} else {
+		hb = Nod(OLEN, src, nil)
+		typecheck(&hb, Erv)
+		walkexpr(&hb, init)
+		hb = cheapexpr(conv(hb, bt), init)
+	}
+
+	if lb != nil {
+		lb = cheapexpr(conv(lb, bt), init)
+		if !(bounded != 0) {
+			chk2 = Nod(OLT, hb, lb)
+		}
+	}
+
+	if chk0 != nil || chk1 != nil || chk2 != nil {
+		chk = Nod(OIF, nil, nil)
+		chk.Nbody = list1(mkcall("panicslice", nil, init))
+		chk.Likely = -1
+		if chk0 != nil {
+			chk.Ntest = chk0
+		}
+		if chk1 != nil {
+			if chk.Ntest == nil {
+				chk.Ntest = chk1
+			} else {
+				chk.Ntest = Nod(OOROR, chk.Ntest, chk1)
+			}
+		}
+
+		if chk2 != nil {
+			if chk.Ntest == nil {
+				chk.Ntest = chk2
+			} else {
+				chk.Ntest = Nod(OOROR, chk.Ntest, chk2)
+			}
+		}
+
+		typecheck(&chk, Etop)
+		walkstmt(&chk)
+		*init = concat(*init, chk.Ninit)
+		chk.Ninit = nil
+		*init = list(*init, chk)
+	}
+
+	// prepare new cap, len and offs for backend cgen_slice
+	// cap = bound [ - lo ]
+	n.Right = nil
+
+	n.List = nil
+	if !(slice3 != 0) {
+		cb = bound
+	}
+	if lb == nil {
+		bound = conv(cb, Types[Simtype[TUINT]])
+	} else {
+		bound = Nod(OSUB, conv(cb, Types[Simtype[TUINT]]), conv(lb, Types[Simtype[TUINT]]))
+	}
+	typecheck(&bound, Erv)
+	walkexpr(&bound, init)
+	n.List = list(n.List, bound)
+
+	// len = hi [ - lo]
+	if lb == nil {
+		hb = conv(hb, Types[Simtype[TUINT]])
+	} else {
+		hb = Nod(OSUB, conv(hb, Types[Simtype[TUINT]]), conv(lb, Types[Simtype[TUINT]]))
+	}
+	typecheck(&hb, Erv)
+	walkexpr(&hb, init)
+	n.List = list(n.List, hb)
+
+	// offs = [width *] lo, but omit if zero
+	if lb != nil {
+		if n.Op == OSLICESTR {
+			w = 1
+		} else {
+			w = n.Type.Type.Width
+		}
+		lb = conv(lb, Types[TUINTPTR])
+		if w > 1 {
+			lb = Nod(OMUL, Nodintconst(w), lb)
+		}
+		typecheck(&lb, Erv)
+		walkexpr(&lb, init)
+		n.List = list(n.List, lb)
+	}
+
+	//	print("after sliceany: %+N\n", n);
+
+	return n
+}
+
+func eqfor(t *Type, needsize *int) *Node {
+	var a int
+	var n *Node
+	var ntype *Node
+	var sym *Sym
+
+	// Should only arrive here with large memory or
+	// a struct/array containing a non-memory field/element.
+	// Small memory is handled inline, and single non-memory
+	// is handled during type check (OCMPSTR etc).
+	a = algtype1(t, nil)
+
+	if a != AMEM && a != -1 {
+		Fatal("eqfor %v", Tconv(t, 0))
+	}
+
+	if a == AMEM {
+		n = syslook("memequal", 1)
+		argtype(n, t)
+		argtype(n, t)
+		*needsize = 1
+		return n
+	}
+
+	sym = typesymprefix(".eq", t)
+	n = newname(sym)
+	n.Class = PFUNC
+	ntype = Nod(OTFUNC, nil, nil)
+	ntype.List = list(ntype.List, Nod(ODCLFIELD, nil, typenod(Ptrto(t))))
+	ntype.List = list(ntype.List, Nod(ODCLFIELD, nil, typenod(Ptrto(t))))
+	ntype.Rlist = list(ntype.Rlist, Nod(ODCLFIELD, nil, typenod(Types[TBOOL])))
+	typecheck(&ntype, Etype)
+	n.Type = ntype.Type
+	*needsize = 0
+	return n
+}
+
+func countfield(t *Type) int {
+	var t1 *Type
+	var n int
+
+	n = 0
+	for t1 = t.Type; t1 != nil; t1 = t1.Down {
+		n++
+	}
+	return n
+}
+
+func walkcompare(np **Node, init **NodeList) {
+	var n *Node
+	var l *Node
+	var r *Node
+	var call *Node
+	var a *Node
+	var li *Node
+	var ri *Node
+	var expr *Node
+	var cmpl *Node
+	var cmpr *Node
+	var x *Node
+	var ok *Node
+	var andor int
+	var i int
+	var needsize int
+	var t *Type
+	var t1 *Type
+
+	n = *np
+
+	// Given interface value l and concrete value r, rewrite
+	//   l == r
+	// to
+	//   x, ok := l.(type(r)); ok && x == r
+	// Handle != similarly.
+	// This avoids the allocation that would be required
+	// to convert r to l for comparison.
+	l = nil
+
+	r = nil
+	if Isinter(n.Left.Type) != 0 && !(Isinter(n.Right.Type) != 0) {
+		l = n.Left
+		r = n.Right
+	} else if !(Isinter(n.Left.Type) != 0) && Isinter(n.Right.Type) != 0 {
+		l = n.Right
+		r = n.Left
+	}
+
+	if l != nil {
+		x = temp(r.Type)
+		ok = temp(Types[TBOOL])
+
+		// l.(type(r))
+		a = Nod(ODOTTYPE, l, nil)
+
+		a.Type = r.Type
+
+		// x, ok := l.(type(r))
+		expr = Nod(OAS2, nil, nil)
+
+		expr.List = list1(x)
+		expr.List = list(expr.List, ok)
+		expr.Rlist = list1(a)
+		typecheck(&expr, Etop)
+		walkexpr(&expr, init)
+
+		if n.Op == OEQ {
+			r = Nod(OANDAND, ok, Nod(OEQ, x, r))
+		} else {
+			r = Nod(OOROR, Nod(ONOT, ok, nil), Nod(ONE, x, r))
+		}
+		*init = list(*init, expr)
+		goto ret
+	}
+
+	// Must be comparison of array or struct.
+	// Otherwise back end handles it.
+	t = n.Left.Type
+
+	switch t.Etype {
+	default:
+		return
+
+	case TARRAY:
+		if Isslice(t) != 0 {
+			return
+		}
+
+	case TSTRUCT:
+		break
+	}
+
+	cmpl = n.Left
+	for cmpl != nil && cmpl.Op == OCONVNOP {
+		cmpl = cmpl.Left
+	}
+	cmpr = n.Right
+	for cmpr != nil && cmpr.Op == OCONVNOP {
+		cmpr = cmpr.Left
+	}
+
+	if !(islvalue(cmpl) != 0) || !(islvalue(cmpr) != 0) {
+		Fatal("arguments of comparison must be lvalues - %v %v", Nconv(cmpl, 0), Nconv(cmpr, 0))
+	}
+
+	l = temp(Ptrto(t))
+	a = Nod(OAS, l, Nod(OADDR, cmpl, nil))
+	a.Right.Etype = 1 // addr does not escape
+	typecheck(&a, Etop)
+	*init = list(*init, a)
+
+	r = temp(Ptrto(t))
+	a = Nod(OAS, r, Nod(OADDR, cmpr, nil))
+	a.Right.Etype = 1 // addr does not escape
+	typecheck(&a, Etop)
+	*init = list(*init, a)
+
+	expr = nil
+	andor = OANDAND
+	if n.Op == ONE {
+		andor = OOROR
+	}
+
+	if t.Etype == TARRAY && t.Bound <= 4 && issimple[t.Type.Etype] != 0 {
+		// Four or fewer elements of a basic type.
+		// Unroll comparisons.
+		for i = 0; int64(i) < t.Bound; i++ {
+			li = Nod(OINDEX, l, Nodintconst(int64(i)))
+			ri = Nod(OINDEX, r, Nodintconst(int64(i)))
+			a = Nod(int(n.Op), li, ri)
+			if expr == nil {
+				expr = a
+			} else {
+				expr = Nod(andor, expr, a)
+			}
+		}
+
+		if expr == nil {
+			expr = Nodbool(bool2int(n.Op == OEQ))
+		}
+		r = expr
+		goto ret
+	}
+
+	if t.Etype == TSTRUCT && countfield(t) <= 4 {
+		// Struct of four or fewer fields.
+		// Inline comparisons.
+		for t1 = t.Type; t1 != nil; t1 = t1.Down {
+			if isblanksym(t1.Sym) {
+				continue
+			}
+			li = Nod(OXDOT, l, newname(t1.Sym))
+			ri = Nod(OXDOT, r, newname(t1.Sym))
+			a = Nod(int(n.Op), li, ri)
+			if expr == nil {
+				expr = a
+			} else {
+				expr = Nod(andor, expr, a)
+			}
+		}
+
+		if expr == nil {
+			expr = Nodbool(bool2int(n.Op == OEQ))
+		}
+		r = expr
+		goto ret
+	}
+
+	// Chose not to inline.  Call equality function directly.
+	call = Nod(OCALL, eqfor(t, &needsize), nil)
+
+	call.List = list(call.List, l)
+	call.List = list(call.List, r)
+	if needsize != 0 {
+		call.List = list(call.List, Nodintconst(t.Width))
+	}
+	r = call
+	if n.Op != OEQ {
+		r = Nod(ONOT, r, nil)
+	}
+	goto ret
+
+ret:
+	typecheck(&r, Erv)
+	walkexpr(&r, init)
+	if r.Type != n.Type {
+		r = Nod(OCONVNOP, r, nil)
+		r.Type = n.Type
+		r.Typecheck = 1
+	}
+
+	*np = r
+	return
+}
+
+func samecheap(a *Node, b *Node) int {
+	var ar *Node
+	var br *Node
+	for a != nil && b != nil && a.Op == b.Op {
+		switch a.Op {
+		default:
+			return 0
+
+		case ONAME:
+			return bool2int(a == b)
+
+		case ODOT,
+			ODOTPTR:
+			ar = a.Right
+			br = b.Right
+			if ar.Op != ONAME || br.Op != ONAME || ar.Sym != br.Sym {
+				return 0
+			}
+
+		case OINDEX:
+			ar = a.Right
+			br = b.Right
+			if !(Isconst(ar, CTINT) != 0) || !(Isconst(br, CTINT) != 0) || Mpcmpfixfix(ar.Val.U.Xval, br.Val.U.Xval) != 0 {
+				return 0
+			}
+		}
+
+		a = a.Left
+		b = b.Left
+	}
+
+	return 0
+}
+
+func walkrotate(np **Node) {
+	var w int
+	var sl int
+	var sr int
+	var s int
+	var l *Node
+	var r *Node
+	var n *Node
+
+	if Thearch.Thechar == '9' {
+		return
+	}
+
+	n = *np
+
+	// Want << | >> or >> | << or << ^ >> or >> ^ << on unsigned value.
+	l = n.Left
+
+	r = n.Right
+	if (n.Op != OOR && n.Op != OXOR) || (l.Op != OLSH && l.Op != ORSH) || (r.Op != OLSH && r.Op != ORSH) || n.Type == nil || Issigned[n.Type.Etype] != 0 || l.Op == r.Op {
+		return
+	}
+
+	// Want same, side effect-free expression on lhs of both shifts.
+	if !(samecheap(l.Left, r.Left) != 0) {
+		return
+	}
+
+	// Constants adding to width?
+	w = int(l.Type.Width * 8)
+
+	if Smallintconst(l.Right) != 0 && Smallintconst(r.Right) != 0 {
+		sl = int(Mpgetfix(l.Right.Val.U.Xval))
+		if sl >= 0 {
+			sr = int(Mpgetfix(r.Right.Val.U.Xval))
+			if sr >= 0 && sl+sr == w {
+				goto yes
+			}
+		}
+		return
+	}
+
+	// TODO: Could allow s and 32-s if s is bounded (maybe s&31 and 32-s&31).
+	return
+
+	// Rewrite left shift half to left rotate.
+yes:
+	if l.Op == OLSH {
+		n = l
+	} else {
+		n = r
+	}
+	n.Op = OLROT
+
+	// Remove rotate 0 and rotate w.
+	s = int(Mpgetfix(n.Right.Val.U.Xval))
+
+	if s == 0 || s == w {
+		n = n.Left
+	}
+
+	*np = n
+	return
+}
+
+/*
+ * walkmul rewrites integer multiplication by powers of two as shifts.
+ */
+func walkmul(np **Node, init **NodeList) {
+	var n *Node
+	var nl *Node
+	var nr *Node
+	var pow int
+	var neg int
+	var w int
+
+	n = *np
+	if !(Isint[n.Type.Etype] != 0) {
+		return
+	}
+
+	if n.Right.Op == OLITERAL {
+		nl = n.Left
+		nr = n.Right
+	} else if n.Left.Op == OLITERAL {
+		nl = n.Right
+		nr = n.Left
+	} else {
+		return
+	}
+
+	neg = 0
+
+	// x*0 is 0 (and side effects of x).
+	if Mpgetfix(nr.Val.U.Xval) == 0 {
+		cheapexpr(nl, init)
+		Nodconst(n, n.Type, 0)
+		goto ret
+	}
+
+	// nr is a constant.
+	pow = powtwo(nr)
+
+	if pow < 0 {
+		return
+	}
+	if pow >= 1000 {
+		// negative power of 2, like -16
+		neg = 1
+
+		pow -= 1000
+	}
+
+	w = int(nl.Type.Width * 8)
+	if pow+1 >= w { // too big, shouldn't happen
+		return
+	}
+
+	nl = cheapexpr(nl, init)
+
+	if pow == 0 {
+		// x*1 is x
+		n = nl
+
+		goto ret
+	}
+
+	n = Nod(OLSH, nl, Nodintconst(int64(pow)))
+
+ret:
+	if neg != 0 {
+		n = Nod(OMINUS, n, nil)
+	}
+
+	typecheck(&n, Erv)
+	walkexpr(&n, init)
+	*np = n
+}
+
+/*
+ * walkdiv rewrites division by a constant as less expensive
+ * operations.
+ */
+func walkdiv(np **Node, init **NodeList) {
+	var n *Node
+	var nl *Node
+	var nr *Node
+	// if >= 0, nr is 1<<pow // 1 if nr is negative.
+	var nc *Node
+	var n1 *Node
+	var n2 *Node
+	var n3 *Node
+	var n4 *Node
+	var pow int
+	var s int
+	var w int
+	var twide *Type
+	var m Magic
+
+	// TODO(minux)
+	if Thearch.Thechar == '9' {
+		return
+	}
+
+	n = *np
+	if n.Right.Op != OLITERAL {
+		return
+	}
+
+	// nr is a constant.
+	nl = cheapexpr(n.Left, init)
+
+	nr = n.Right
+
+	// special cases of mod/div
+	// by a constant
+	w = int(nl.Type.Width * 8)
+
+	s = 0
+	pow = powtwo(nr)
+	if pow >= 1000 {
+		// negative power of 2
+		s = 1
+
+		pow -= 1000
+	}
+
+	if pow+1 >= w {
+		// divisor too large.
+		return
+	}
+
+	if pow < 0 {
+		goto divbymul
+	}
+
+	switch pow {
+	case 0:
+		if n.Op == OMOD {
+			// nl % 1 is zero.
+			Nodconst(n, n.Type, 0)
+		} else if s != 0 {
+			// divide by -1
+			n.Op = OMINUS
+
+			n.Right = nil
+		} else {
+			// divide by 1
+			n = nl
+		}
+
+	default:
+		if Issigned[n.Type.Etype] != 0 {
+			if n.Op == OMOD {
+				// signed modulo 2^pow is like ANDing
+				// with the last pow bits, but if nl < 0,
+				// nl & (2^pow-1) is (nl+1)%2^pow - 1.
+				nc = Nod(OXXX, nil, nil)
+
+				Nodconst(nc, Types[Simtype[TUINT]], int64(w)-1)
+				n1 = Nod(ORSH, nl, nc) // n1 = -1 iff nl < 0.
+				if pow == 1 {
+					typecheck(&n1, Erv)
+					n1 = cheapexpr(n1, init)
+
+					// n = (nl+ε)&1 -ε where ε=1 iff nl<0.
+					n2 = Nod(OSUB, nl, n1)
+
+					nc = Nod(OXXX, nil, nil)
+					Nodconst(nc, nl.Type, 1)
+					n3 = Nod(OAND, n2, nc)
+					n = Nod(OADD, n3, n1)
+				} else {
+					// n = (nl+ε)&(nr-1) - ε where ε=2^pow-1 iff nl<0.
+					nc = Nod(OXXX, nil, nil)
+
+					Nodconst(nc, nl.Type, (1<<uint(pow))-1)
+					n2 = Nod(OAND, n1, nc) // n2 = 2^pow-1 iff nl<0.
+					typecheck(&n2, Erv)
+					n2 = cheapexpr(n2, init)
+
+					n3 = Nod(OADD, nl, n2)
+					n4 = Nod(OAND, n3, nc)
+					n = Nod(OSUB, n4, n2)
+				}
+
+				break
+			} else {
+				// arithmetic right shift does not give the correct rounding.
+				// if nl >= 0, nl >> n == nl / nr
+				// if nl < 0, we want to add 2^n-1 first.
+				nc = Nod(OXXX, nil, nil)
+
+				Nodconst(nc, Types[Simtype[TUINT]], int64(w)-1)
+				n1 = Nod(ORSH, nl, nc) // n1 = -1 iff nl < 0.
+				if pow == 1 {
+					// nl+1 is nl-(-1)
+					n.Left = Nod(OSUB, nl, n1)
+				} else {
+					// Do a logical right right on -1 to keep pow bits.
+					nc = Nod(OXXX, nil, nil)
+
+					Nodconst(nc, Types[Simtype[TUINT]], int64(w)-int64(pow))
+					n2 = Nod(ORSH, conv(n1, tounsigned(nl.Type)), nc)
+					n.Left = Nod(OADD, nl, conv(n2, nl.Type))
+				}
+
+				// n = (nl + 2^pow-1) >> pow
+				n.Op = ORSH
+
+				nc = Nod(OXXX, nil, nil)
+				Nodconst(nc, Types[Simtype[TUINT]], int64(pow))
+				n.Right = nc
+				n.Typecheck = 0
+			}
+
+			if s != 0 {
+				n = Nod(OMINUS, n, nil)
+			}
+			break
+		}
+
+		nc = Nod(OXXX, nil, nil)
+		if n.Op == OMOD {
+			// n = nl & (nr-1)
+			n.Op = OAND
+
+			Nodconst(nc, nl.Type, Mpgetfix(nr.Val.U.Xval)-1)
+		} else {
+			// n = nl >> pow
+			n.Op = ORSH
+
+			Nodconst(nc, Types[Simtype[TUINT]], int64(pow))
+		}
+
+		n.Typecheck = 0
+		n.Right = nc
+	}
+
+	goto ret
+
+	// try to do division by multiply by (2^w)/d
+	// see hacker's delight chapter 10
+	// TODO: support 64-bit magic multiply here.
+divbymul:
+	m.W = w
+
+	if Issigned[nl.Type.Etype] != 0 {
+		m.Sd = Mpgetfix(nr.Val.U.Xval)
+		Smagic(&m)
+	} else {
+		m.Ud = uint64(Mpgetfix(nr.Val.U.Xval))
+		Umagic(&m)
+	}
+
+	if m.Bad != 0 {
+		return
+	}
+
+	// We have a quick division method so use it
+	// for modulo too.
+	if n.Op == OMOD {
+		goto longmod
+	}
+
+	switch Simtype[nl.Type.Etype] {
+	default:
+		return
+
+		// n1 = nl * magic >> w (HMUL)
+	case TUINT8,
+		TUINT16,
+		TUINT32:
+		nc = Nod(OXXX, nil, nil)
+
+		Nodconst(nc, nl.Type, int64(m.Um))
+		n1 = Nod(OMUL, nl, nc)
+		typecheck(&n1, Erv)
+		n1.Op = OHMUL
+		if m.Ua != 0 {
+			// Select a Go type with (at least) twice the width.
+			switch Simtype[nl.Type.Etype] {
+			default:
+				return
+
+			case TUINT8,
+				TUINT16:
+				twide = Types[TUINT32]
+
+			case TUINT32:
+				twide = Types[TUINT64]
+
+			case TINT8,
+				TINT16:
+				twide = Types[TINT32]
+
+			case TINT32:
+				twide = Types[TINT64]
+			}
+
+			// add numerator (might overflow).
+			// n2 = (n1 + nl)
+			n2 = Nod(OADD, conv(n1, twide), conv(nl, twide))
+
+			// shift by m.s
+			nc = Nod(OXXX, nil, nil)
+
+			Nodconst(nc, Types[TUINT], int64(m.S))
+			n = conv(Nod(ORSH, n2, nc), nl.Type)
+		} else {
+			// n = n1 >> m.s
+			nc = Nod(OXXX, nil, nil)
+
+			Nodconst(nc, Types[TUINT], int64(m.S))
+			n = Nod(ORSH, n1, nc)
+		}
+
+		// n1 = nl * magic >> w
+	case TINT8,
+		TINT16,
+		TINT32:
+		nc = Nod(OXXX, nil, nil)
+
+		Nodconst(nc, nl.Type, m.Sm)
+		n1 = Nod(OMUL, nl, nc)
+		typecheck(&n1, Erv)
+		n1.Op = OHMUL
+		if m.Sm < 0 {
+			// add the numerator.
+			n1 = Nod(OADD, n1, nl)
+		}
+
+		// shift by m.s
+		nc = Nod(OXXX, nil, nil)
+
+		Nodconst(nc, Types[TUINT], int64(m.S))
+		n2 = conv(Nod(ORSH, n1, nc), nl.Type)
+
+		// add 1 iff n1 is negative.
+		nc = Nod(OXXX, nil, nil)
+
+		Nodconst(nc, Types[TUINT], int64(w)-1)
+		n3 = Nod(ORSH, nl, nc) // n4 = -1 iff n1 is negative.
+		n = Nod(OSUB, n2, n3)
+
+		// apply sign.
+		if m.Sd < 0 {
+			n = Nod(OMINUS, n, nil)
+		}
+	}
+
+	goto ret
+
+	// rewrite as A%B = A - (A/B*B).
+longmod:
+	n1 = Nod(ODIV, nl, nr)
+
+	n2 = Nod(OMUL, n1, nr)
+	n = Nod(OSUB, nl, n2)
+	goto ret
+
+ret:
+	typecheck(&n, Erv)
+	walkexpr(&n, init)
+	*np = n
+}
+
+// return 1 if integer n must be in range [0, max), 0 otherwise
+func bounded(n *Node, max int64) int {
+	var v int64
+	var bits int32
+	var sign int
+
+	if n.Type == nil || !(Isint[n.Type.Etype] != 0) {
+		return 0
+	}
+
+	sign = int(Issigned[n.Type.Etype])
+	bits = int32(8 * n.Type.Width)
+
+	if Smallintconst(n) != 0 {
+		v = Mpgetfix(n.Val.U.Xval)
+		return bool2int(0 <= v && v < max)
+	}
+
+	switch n.Op {
+	case OAND:
+		v = -1
+		if Smallintconst(n.Left) != 0 {
+			v = Mpgetfix(n.Left.Val.U.Xval)
+		} else if Smallintconst(n.Right) != 0 {
+			v = Mpgetfix(n.Right.Val.U.Xval)
+		}
+
+		if 0 <= v && v < max {
+			return 1
+		}
+
+	case OMOD:
+		if !(sign != 0) && Smallintconst(n.Right) != 0 {
+			v = Mpgetfix(n.Right.Val.U.Xval)
+			if 0 <= v && v <= max {
+				return 1
+			}
+		}
+
+	case ODIV:
+		if !(sign != 0) && Smallintconst(n.Right) != 0 {
+			v = Mpgetfix(n.Right.Val.U.Xval)
+			for bits > 0 && v >= 2 {
+				bits--
+				v >>= 1
+			}
+		}
+
+	case ORSH:
+		if !(sign != 0) && Smallintconst(n.Right) != 0 {
+			v = Mpgetfix(n.Right.Val.U.Xval)
+			if v > int64(bits) {
+				return 1
+			}
+			bits -= int32(v)
+		}
+	}
+
+	if !(sign != 0) && bits <= 62 && 1<<uint(bits) <= max {
+		return 1
+	}
+
+	return 0
+}
+
+func usefield(n *Node) {
+	var field *Type
+	var l *Type
+
+	if !(obj.Fieldtrack_enabled != 0) {
+		return
+	}
+
+	switch n.Op {
+	default:
+		Fatal("usefield %v", Oconv(int(n.Op), 0))
+		fallthrough
+
+	case ODOT,
+		ODOTPTR:
+		break
+	}
+
+	field = n.Paramfld
+	if field == nil {
+		Fatal("usefield %v %v without paramfld", Tconv(n.Left.Type, 0), Sconv(n.Right.Sym, 0))
+	}
+	if field.Note == nil || !strings.Contains(field.Note.S, "go:\"track\"") {
+		return
+	}
+
+	// dedup on list
+	if field.Lastfn == Curfn {
+		return
+	}
+	field.Lastfn = Curfn
+	field.Outer = n.Left.Type
+	if Isptr[field.Outer.Etype] != 0 {
+		field.Outer = field.Outer.Type
+	}
+	if field.Outer.Sym == nil {
+		Yyerror("tracked field must be in named struct type")
+	}
+	if !exportname(field.Sym.Name) {
+		Yyerror("tracked field must be exported (upper case)")
+	}
+
+	l = typ(0)
+	l.Type = field
+	l.Down = Curfn.Paramfld
+	Curfn.Paramfld = l
+}
+
+func candiscardlist(l *NodeList) int {
+	for ; l != nil; l = l.Next {
+		if !(candiscard(l.N) != 0) {
+			return 0
+		}
+	}
+	return 1
+}
+
+func candiscard(n *Node) int {
+	if n == nil {
+		return 1
+	}
+
+	switch n.Op {
+	default:
+		return 0
+
+		// Discardable as long as the subpieces are.
+	case ONAME,
+		ONONAME,
+		OTYPE,
+		OPACK,
+		OLITERAL,
+		OADD,
+		OSUB,
+		OOR,
+		OXOR,
+		OADDSTR,
+		OADDR,
+		OANDAND,
+		OARRAYBYTESTR,
+		OARRAYRUNESTR,
+		OSTRARRAYBYTE,
+		OSTRARRAYRUNE,
+		OCAP,
+		OCMPIFACE,
+		OCMPSTR,
+		OCOMPLIT,
+		OMAPLIT,
+		OSTRUCTLIT,
+		OARRAYLIT,
+		OPTRLIT,
+		OCONV,
+		OCONVIFACE,
+		OCONVNOP,
+		ODOT,
+		OEQ,
+		ONE,
+		OLT,
+		OLE,
+		OGT,
+		OGE,
+		OKEY,
+		OLEN,
+		OMUL,
+		OLSH,
+		ORSH,
+		OAND,
+		OANDNOT,
+		ONEW,
+		ONOT,
+		OCOM,
+		OPLUS,
+		OMINUS,
+		OOROR,
+		OPAREN,
+		ORUNESTR,
+		OREAL,
+		OIMAG,
+		OCOMPLEX:
+		break
+
+		// Discardable as long as we know it's not division by zero.
+	case ODIV,
+		OMOD:
+		if Isconst(n.Right, CTINT) != 0 && mpcmpfixc(n.Right.Val.U.Xval, 0) != 0 {
+			break
+		}
+		if Isconst(n.Right, CTFLT) != 0 && mpcmpfltc(n.Right.Val.U.Fval, 0) != 0 {
+			break
+		}
+		return 0
+
+		// Discardable as long as we know it won't fail because of a bad size.
+	case OMAKECHAN,
+		OMAKEMAP:
+		if Isconst(n.Left, CTINT) != 0 && mpcmpfixc(n.Left.Val.U.Xval, 0) == 0 {
+			break
+		}
+		return 0
+
+		// Difficult to tell what sizes are okay.
+	case OMAKESLICE:
+		return 0
+	}
+
+	if !(candiscard(n.Left) != 0) || !(candiscard(n.Right) != 0) || !(candiscard(n.Ntest) != 0) || !(candiscard(n.Nincr) != 0) || !(candiscardlist(n.Ninit) != 0) || !(candiscardlist(n.Nbody) != 0) || !(candiscardlist(n.Nelse) != 0) || !(candiscardlist(n.List) != 0) || !(candiscardlist(n.Rlist) != 0) {
+		return 0
+	}
+
+	return 1
+}
+
+// rewrite
+//	print(x, y, z)
+// into
+//	func(a1, a2, a3) {
+//		print(a1, a2, a3)
+//	}(x, y, z)
+// and same for println.
+
+var walkprintfunc_prgen int
+
+func walkprintfunc(np **Node, init **NodeList) {
+	var n *Node
+	var a *Node
+	var fn *Node
+	var t *Node
+	var oldfn *Node
+	var l *NodeList
+	var printargs *NodeList
+	var num int
+	var buf string
+
+	n = *np
+
+	if n.Ninit != nil {
+		walkstmtlist(n.Ninit)
+		*init = concat(*init, n.Ninit)
+		n.Ninit = nil
+	}
+
+	t = Nod(OTFUNC, nil, nil)
+	num = 0
+	printargs = nil
+	for l = n.List; l != nil; l = l.Next {
+		buf = fmt.Sprintf("a%d", num)
+		num++
+		a = Nod(ODCLFIELD, newname(Lookup(buf)), typenod(l.N.Type))
+		t.List = list(t.List, a)
+		printargs = list(printargs, a.Left)
+	}
+
+	fn = Nod(ODCLFUNC, nil, nil)
+	walkprintfunc_prgen++
+	buf = fmt.Sprintf("print·%d", walkprintfunc_prgen)
+	fn.Nname = newname(Lookup(buf))
+	fn.Nname.Defn = fn
+	fn.Nname.Ntype = t
+	declare(fn.Nname, PFUNC)
+
+	oldfn = Curfn
+	Curfn = nil
+	funchdr(fn)
+
+	a = Nod(int(n.Op), nil, nil)
+	a.List = printargs
+	typecheck(&a, Etop)
+	walkstmt(&a)
+
+	fn.Nbody = list1(a)
+
+	funcbody(fn)
+
+	typecheck(&fn, Etop)
+	typechecklist(fn.Nbody, Etop)
+	xtop = list(xtop, fn)
+	Curfn = oldfn
+
+	a = Nod(OCALL, nil, nil)
+	a.Left = fn.Nname
+	a.List = n.List
+	typecheck(&a, Etop)
+	walkexpr(&a, init)
+	*np = a
+}