cmd/internal/gc: move cgen, regalloc, et al to portable code
This CL moves the bulk of the code that has been copy-and-pasted
since the initial 386 port back into a shared place, cutting 5 copies to 1.
The motivation here is not cleanup per se but instead to reduce the
cost of introducing changes in shared concepts like regalloc or general
expression evaluation. For example, a change after this one will
implement x.(*T) without a call into the runtime. This CL makes that
followup work 5x easier.
The single copy still has more special cases for architecture details
than I'd like, but having them called out explicitly like this at least
opens the door to generalizing the conditions and smoothing out
the distinctions in the future.
This is a LARGE CL. I started by trying to pull in one function at a time
in a sequence of CLs and it became clear that everything was so
interrelated that it had to be moved as a whole. Apologies for the size.
It is not clear how many more releases this code will matter for;
eventually it will be replaced by Keith's SSA work. But as noted above,
the deduplication was necessary to reduce the cost of working on
the current code while we have it.
Passes tests on amd64, 386, arm, and ppc64le.
Can build arm64 binaries but not tested there.
Being able to build binaries means it is probably very close.
Change-Id: I735977f04c0614f80215fb12966dfe9bbd1f5861
Reviewed-on: https://go-review.googlesource.com/7853
Reviewed-by: Brad Fitzpatrick <bradfitz@golang.org>
diff --git a/src/cmd/internal/gc/cgen.go b/src/cmd/internal/gc/cgen.go
new file mode 100644
index 0000000..610f251
--- /dev/null
+++ b/src/cmd/internal/gc/cgen.go
@@ -0,0 +1,2567 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gc
+
+import (
+ "cmd/internal/obj"
+ "fmt"
+)
+
+/*
+ * generate:
+ * res = n;
+ * simplifies and calls Thearch.Gmove.
+ */
+func Cgen(n *Node, res *Node) {
+ if Debug['g'] != 0 {
+ Dump("\ncgen-n", n)
+ Dump("cgen-res", res)
+ }
+
+ if n == nil || n.Type == nil {
+ return
+ }
+
+ if res == nil || res.Type == nil {
+ Fatal("cgen: res nil")
+ }
+
+ for n.Op == OCONVNOP {
+ n = n.Left
+ }
+
+ switch n.Op {
+ case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
+ if res.Op != ONAME || res.Addable == 0 {
+ var n1 Node
+ Tempname(&n1, n.Type)
+ Cgen_slice(n, &n1)
+ Cgen(&n1, res)
+ } else {
+ Cgen_slice(n, res)
+ }
+ return
+
+ case OEFACE:
+ if res.Op != ONAME || res.Addable == 0 {
+ var n1 Node
+ Tempname(&n1, n.Type)
+ Cgen_eface(n, &n1)
+ Cgen(&n1, res)
+ } else {
+ Cgen_eface(n, res)
+ }
+ return
+ }
+
+ if n.Ullman >= UINF {
+ if n.Op == OINDREG {
+ Fatal("cgen: this is going to miscompile")
+ }
+ if res.Ullman >= UINF {
+ var n1 Node
+ Tempname(&n1, n.Type)
+ Cgen(n, &n1)
+ Cgen(&n1, res)
+ return
+ }
+ }
+
+ if Isfat(n.Type) {
+ if n.Type.Width < 0 {
+ Fatal("forgot to compute width for %v", Tconv(n.Type, 0))
+ }
+ sgen(n, res, n.Type.Width)
+ return
+ }
+
+ if res.Addable == 0 {
+ if n.Ullman > res.Ullman {
+ if Ctxt.Arch.Regsize == 4 && Is64(n.Type) {
+ var n1 Node
+ Tempname(&n1, n.Type)
+ Cgen(n, &n1)
+ Cgen(&n1, res)
+ return
+ }
+
+ var n1 Node
+ Regalloc(&n1, n.Type, res)
+ Cgen(n, &n1)
+ if n1.Ullman > res.Ullman {
+ Dump("n1", &n1)
+ Dump("res", res)
+ Fatal("loop in cgen")
+ }
+
+ Cgen(&n1, res)
+ Regfree(&n1)
+ return
+ }
+
+ var f int
+ if res.Ullman >= UINF {
+ goto gen
+ }
+
+ if Complexop(n, res) {
+ Complexgen(n, res)
+ return
+ }
+
+ f = 1 // gen thru register
+ switch n.Op {
+ case OLITERAL:
+ if Smallintconst(n) {
+ f = 0
+ }
+
+ case OREGISTER:
+ f = 0
+ }
+
+ if !Iscomplex[n.Type.Etype] && Ctxt.Arch.Regsize == 8 {
+ a := Thearch.Optoas(OAS, res.Type)
+ var addr obj.Addr
+ if Thearch.Sudoaddable(a, res, &addr) {
+ var p1 *obj.Prog
+ if f != 0 {
+ var n2 Node
+ Regalloc(&n2, res.Type, nil)
+ Cgen(n, &n2)
+ p1 = Thearch.Gins(a, &n2, nil)
+ Regfree(&n2)
+ } else {
+ p1 = Thearch.Gins(a, n, nil)
+ }
+ p1.To = addr
+ if Debug['g'] != 0 {
+ fmt.Printf("%v [ignore previous line]\n", p1)
+ }
+ Thearch.Sudoclean()
+ return
+ }
+ }
+
+ gen:
+ if Ctxt.Arch.Thechar == '8' {
+ // no registers to speak of
+ var n1, n2 Node
+ Tempname(&n1, n.Type)
+ Cgen(n, &n1)
+ Igen(res, &n2, nil)
+ Thearch.Gmove(&n1, &n2)
+ Regfree(&n2)
+ return
+ }
+
+ var n1 Node
+ Igen(res, &n1, nil)
+ Cgen(n, &n1)
+ Regfree(&n1)
+ return
+ }
+
+ // update addressability for string, slice
+ // can't do in walk because n->left->addable
+ // changes if n->left is an escaping local variable.
+ switch n.Op {
+ case OSPTR,
+ OLEN:
+ if Isslice(n.Left.Type) || Istype(n.Left.Type, TSTRING) {
+ n.Addable = n.Left.Addable
+ }
+
+ case OCAP:
+ if Isslice(n.Left.Type) {
+ n.Addable = n.Left.Addable
+ }
+
+ case OITAB:
+ n.Addable = n.Left.Addable
+ }
+
+ if Ctxt.Arch.Thechar == '5' { // TODO(rsc): Maybe more often?
+ // if both are addressable, move
+ if n.Addable != 0 && res.Addable != 0 {
+ if Is64(n.Type) || Is64(res.Type) || n.Op == OREGISTER || res.Op == OREGISTER || Iscomplex[n.Type.Etype] || Iscomplex[res.Type.Etype] {
+ Thearch.Gmove(n, res)
+ } else {
+ var n1 Node
+ Regalloc(&n1, n.Type, nil)
+ Thearch.Gmove(n, &n1)
+ Cgen(&n1, res)
+ Regfree(&n1)
+ }
+
+ return
+ }
+
+ // if both are not addressable, use a temporary.
+ if n.Addable == 0 && res.Addable == 0 {
+ // could use regalloc here sometimes,
+ // but have to check for ullman >= UINF.
+ var n1 Node
+ Tempname(&n1, n.Type)
+ Cgen(n, &n1)
+ Cgen(&n1, res)
+ return
+ }
+
+ // if result is not addressable directly but n is,
+ // compute its address and then store via the address.
+ if res.Addable == 0 {
+ var n1 Node
+ Igen(res, &n1, nil)
+ Cgen(n, &n1)
+ Regfree(&n1)
+ return
+ }
+ }
+
+ if Complexop(n, res) {
+ Complexgen(n, res)
+ return
+ }
+
+ if (Ctxt.Arch.Thechar == '6' || Ctxt.Arch.Thechar == '8') && n.Addable != 0 {
+ Thearch.Gmove(n, res)
+ return
+ }
+
+ if Ctxt.Arch.Thechar == '7' || Ctxt.Arch.Thechar == '9' {
+ // if both are addressable, move
+ if n.Addable != 0 {
+ if n.Op == OREGISTER || res.Op == OREGISTER {
+ Thearch.Gmove(n, res)
+ } else {
+ var n1 Node
+ Regalloc(&n1, n.Type, nil)
+ Thearch.Gmove(n, &n1)
+ Cgen(&n1, res)
+ Regfree(&n1)
+ }
+ return
+ }
+ }
+
+ // if n is sudoaddable generate addr and move
+ if Ctxt.Arch.Thechar == '5' && !Is64(n.Type) && !Is64(res.Type) && !Iscomplex[n.Type.Etype] && !Iscomplex[res.Type.Etype] {
+ a := Thearch.Optoas(OAS, n.Type)
+ var addr obj.Addr
+ if Thearch.Sudoaddable(a, n, &addr) {
+ if res.Op != OREGISTER {
+ var n2 Node
+ Regalloc(&n2, res.Type, nil)
+ p1 := Thearch.Gins(a, nil, &n2)
+ p1.From = addr
+ if Debug['g'] != 0 {
+ fmt.Printf("%v [ignore previous line]\n", p1)
+ }
+ Thearch.Gmove(&n2, res)
+ Regfree(&n2)
+ } else {
+ p1 := Thearch.Gins(a, nil, res)
+ p1.From = addr
+ if Debug['g'] != 0 {
+ fmt.Printf("%v [ignore previous line]\n", p1)
+ }
+ }
+ Thearch.Sudoclean()
+ return
+ }
+ }
+
+ nl := n.Left
+ nr := n.Right
+
+ if nl != nil && nl.Ullman >= UINF {
+ if nr != nil && nr.Ullman >= UINF {
+ var n1 Node
+ Tempname(&n1, nl.Type)
+ Cgen(nl, &n1)
+ n2 := *n
+ n2.Left = &n1
+ Cgen(&n2, res)
+ return
+ }
+ }
+
+ // 64-bit ops are hard on 32-bit machine.
+ if Ctxt.Arch.Regsize == 4 && (Is64(n.Type) || Is64(res.Type) || n.Left != nil && Is64(n.Left.Type)) {
+ switch n.Op {
+ // math goes to cgen64.
+ case OMINUS,
+ OCOM,
+ OADD,
+ OSUB,
+ OMUL,
+ OLROT,
+ OLSH,
+ ORSH,
+ OAND,
+ OOR,
+ OXOR:
+ Thearch.Cgen64(n, res)
+ return
+ }
+ }
+
+ if Thearch.Cgen_float != nil && nl != nil && Isfloat[n.Type.Etype] && Isfloat[nl.Type.Etype] {
+ Thearch.Cgen_float(n, res)
+ return
+ }
+
+ if !Iscomplex[n.Type.Etype] && Ctxt.Arch.Regsize == 8 {
+ a := Thearch.Optoas(OAS, n.Type)
+ var addr obj.Addr
+ if Thearch.Sudoaddable(a, n, &addr) {
+ if res.Op == OREGISTER {
+ p1 := Thearch.Gins(a, nil, res)
+ p1.From = addr
+ } else {
+ var n2 Node
+ Regalloc(&n2, n.Type, nil)
+ p1 := Thearch.Gins(a, nil, &n2)
+ p1.From = addr
+ Thearch.Gins(a, &n2, res)
+ Regfree(&n2)
+ }
+
+ Thearch.Sudoclean()
+ return
+ }
+ }
+
+ var a int
+ switch n.Op {
+ default:
+ Dump("cgen", n)
+ Dump("cgen-res", res)
+ Fatal("cgen: unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
+
+ // these call bgen to get a bool value
+ case OOROR,
+ OANDAND,
+ OEQ,
+ ONE,
+ OLT,
+ OLE,
+ OGE,
+ OGT,
+ ONOT:
+ p1 := Gbranch(obj.AJMP, nil, 0)
+
+ p2 := Pc
+ Thearch.Gmove(Nodbool(true), res)
+ p3 := Gbranch(obj.AJMP, nil, 0)
+ Patch(p1, Pc)
+ Bgen(n, true, 0, p2)
+ Thearch.Gmove(Nodbool(false), res)
+ Patch(p3, Pc)
+ return
+
+ case OPLUS:
+ Cgen(nl, res)
+ return
+
+ // unary
+ case OCOM:
+ a := Thearch.Optoas(OXOR, nl.Type)
+
+ var n1 Node
+ Regalloc(&n1, nl.Type, nil)
+ Cgen(nl, &n1)
+ var n2 Node
+ Nodconst(&n2, nl.Type, -1)
+ Thearch.Gins(a, &n2, &n1)
+ cgen_norm(n, &n1, res)
+ return
+
+ case OMINUS:
+ if Isfloat[nl.Type.Etype] {
+ nr = Nodintconst(-1)
+ Convlit(&nr, n.Type)
+ a = Thearch.Optoas(OMUL, nl.Type)
+ goto sbop
+ }
+
+ a := Thearch.Optoas(int(n.Op), nl.Type)
+ // unary
+ var n1 Node
+ Regalloc(&n1, nl.Type, res)
+
+ Cgen(nl, &n1)
+ if Ctxt.Arch.Thechar == '5' {
+ var n2 Node
+ Nodconst(&n2, nl.Type, 0)
+ Thearch.Gins(a, &n2, &n1)
+ } else if Ctxt.Arch.Thechar == '7' {
+ Thearch.Gins(a, &n1, &n1)
+ } else {
+ Thearch.Gins(a, nil, &n1)
+ }
+ cgen_norm(n, &n1, res)
+ return
+
+ // symmetric binary
+ case OAND,
+ OOR,
+ OXOR,
+ OADD,
+ OMUL:
+ if n.Op == OMUL && Thearch.Cgen_bmul != nil && Thearch.Cgen_bmul(int(n.Op), nl, nr, res) {
+ break
+ }
+ a = Thearch.Optoas(int(n.Op), nl.Type)
+ goto sbop
+
+ // asymmetric binary
+ case OSUB:
+ a = Thearch.Optoas(int(n.Op), nl.Type)
+ goto abop
+
+ case OHMUL:
+ Thearch.Cgen_hmul(nl, nr, res)
+
+ case OCONV:
+ if Eqtype(n.Type, nl.Type) || Noconv(n.Type, nl.Type) {
+ Cgen(nl, res)
+ return
+ }
+
+ if Ctxt.Arch.Thechar == '8' {
+ var n1 Node
+ var n2 Node
+ Tempname(&n2, n.Type)
+ Mgen(nl, &n1, res)
+ Thearch.Gmove(&n1, &n2)
+ Thearch.Gmove(&n2, res)
+ Mfree(&n1)
+ break
+ }
+
+ var n1 Node
+ var n2 Node
+ if Ctxt.Arch.Thechar == '5' {
+ if nl.Addable != 0 && !Is64(nl.Type) {
+ Regalloc(&n1, nl.Type, res)
+ Thearch.Gmove(nl, &n1)
+ } else {
+ if n.Type.Width > int64(Widthptr) || Is64(nl.Type) || Isfloat[nl.Type.Etype] {
+ Tempname(&n1, nl.Type)
+ } else {
+ Regalloc(&n1, nl.Type, res)
+ }
+ Cgen(nl, &n1)
+ }
+ if n.Type.Width > int64(Widthptr) || Is64(n.Type) || Isfloat[n.Type.Etype] {
+ Tempname(&n2, n.Type)
+ } else {
+ Regalloc(&n2, n.Type, nil)
+ }
+ } else {
+ if n.Type.Width > nl.Type.Width {
+ // If loading from memory, do conversion during load,
+ // so as to avoid use of 8-bit register in, say, int(*byteptr).
+ switch nl.Op {
+ case ODOT, ODOTPTR, OINDEX, OIND, ONAME:
+ Igen(nl, &n1, res)
+ Regalloc(&n2, n.Type, res)
+ Thearch.Gmove(&n1, &n2)
+ Thearch.Gmove(&n2, res)
+ Regfree(&n2)
+ Regfree(&n1)
+ return
+ }
+ }
+ Regalloc(&n1, nl.Type, res)
+ Regalloc(&n2, n.Type, &n1)
+ Cgen(nl, &n1)
+ }
+
+ // if we do the conversion n1 -> n2 here
+ // reusing the register, then gmove won't
+ // have to allocate its own register.
+ Thearch.Gmove(&n1, &n2)
+ Thearch.Gmove(&n2, res)
+ if n2.Op == OREGISTER {
+ Regfree(&n2)
+ }
+ if n1.Op == OREGISTER {
+ Regfree(&n1)
+ }
+
+ case ODOT,
+ ODOTPTR,
+ OINDEX,
+ OIND,
+ ONAME: // PHEAP or PPARAMREF var
+ var n1 Node
+ Igen(n, &n1, res)
+
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+
+ // interface table is first word of interface value
+ case OITAB:
+ var n1 Node
+ Igen(nl, &n1, res)
+
+ n1.Type = n.Type
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+
+ case OSPTR:
+ // pointer is the first word of string or slice.
+ if Isconst(nl, CTSTR) {
+ var n1 Node
+ Regalloc(&n1, Types[Tptr], res)
+ p1 := Thearch.Gins(Thearch.Optoas(OAS, n1.Type), nil, &n1)
+ Datastring(nl.Val.U.Sval, &p1.From)
+ p1.From.Type = obj.TYPE_ADDR
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+ break
+ }
+
+ var n1 Node
+ Igen(nl, &n1, res)
+ n1.Type = n.Type
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+
+ case OLEN:
+ if Istype(nl.Type, TMAP) || Istype(nl.Type, TCHAN) {
+ // map and chan have len in the first int-sized word.
+ // a zero pointer means zero length
+ var n1 Node
+ Regalloc(&n1, Types[Tptr], res)
+
+ Cgen(nl, &n1)
+
+ var n2 Node
+ Nodconst(&n2, Types[Tptr], 0)
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[Tptr]), &n1, &n2)
+ p1 := Gbranch(Thearch.Optoas(OEQ, Types[Tptr]), nil, 0)
+
+ n2 = n1
+ n2.Op = OINDREG
+ n2.Type = Types[Simtype[TINT]]
+ Thearch.Gmove(&n2, &n1)
+
+ Patch(p1, Pc)
+
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+ break
+ }
+
+ if Istype(nl.Type, TSTRING) || Isslice(nl.Type) {
+ // both slice and string have len one pointer into the struct.
+ // a zero pointer means zero length
+ var n1 Node
+ Igen(nl, &n1, res)
+
+ n1.Type = Types[Simtype[TUINT]]
+ n1.Xoffset += int64(Array_nel)
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+ break
+ }
+
+ Fatal("cgen: OLEN: unknown type %v", Tconv(nl.Type, obj.FmtLong))
+
+ case OCAP:
+ if Istype(nl.Type, TCHAN) {
+ // chan has cap in the second int-sized word.
+ // a zero pointer means zero length
+ var n1 Node
+ Regalloc(&n1, Types[Tptr], res)
+
+ Cgen(nl, &n1)
+
+ var n2 Node
+ Nodconst(&n2, Types[Tptr], 0)
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[Tptr]), &n1, &n2)
+ p1 := Gbranch(Thearch.Optoas(OEQ, Types[Tptr]), nil, 0)
+
+ n2 = n1
+ n2.Op = OINDREG
+ n2.Xoffset = int64(Widthint)
+ n2.Type = Types[Simtype[TINT]]
+ Thearch.Gmove(&n2, &n1)
+
+ Patch(p1, Pc)
+
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+ break
+ }
+
+ if Isslice(nl.Type) {
+ var n1 Node
+ Igen(nl, &n1, res)
+ n1.Type = Types[Simtype[TUINT]]
+ n1.Xoffset += int64(Array_cap)
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+ break
+ }
+
+ Fatal("cgen: OCAP: unknown type %v", Tconv(nl.Type, obj.FmtLong))
+
+ case OADDR:
+ if n.Bounded { // let race detector avoid nil checks
+ Disable_checknil++
+ }
+ Agen(nl, res)
+ if n.Bounded {
+ Disable_checknil--
+ }
+
+ case OCALLMETH:
+ cgen_callmeth(n, 0)
+ cgen_callret(n, res)
+
+ case OCALLINTER:
+ cgen_callinter(n, res, 0)
+ cgen_callret(n, res)
+
+ case OCALLFUNC:
+ cgen_call(n, 0)
+ cgen_callret(n, res)
+
+ case OMOD, ODIV:
+ if Isfloat[n.Type.Etype] || Thearch.Dodiv == nil {
+ a = Thearch.Optoas(int(n.Op), nl.Type)
+ goto abop
+ }
+
+ if nl.Ullman >= nr.Ullman {
+ var n1 Node
+ Regalloc(&n1, nl.Type, res)
+ Cgen(nl, &n1)
+ cgen_div(int(n.Op), &n1, nr, res)
+ Regfree(&n1)
+ } else {
+ var n2 Node
+ if !Smallintconst(nr) {
+ Regalloc(&n2, nr.Type, res)
+ Cgen(nr, &n2)
+ } else {
+ n2 = *nr
+ }
+
+ cgen_div(int(n.Op), nl, &n2, res)
+ if n2.Op != OLITERAL {
+ Regfree(&n2)
+ }
+ }
+
+ case OLSH,
+ ORSH,
+ OLROT:
+ Thearch.Cgen_shift(int(n.Op), n.Bounded, nl, nr, res)
+ }
+
+ return
+
+ /*
+ * put simplest on right - we'll generate into left
+ * and then adjust it using the computation of right.
+ * constants and variables have the same ullman
+ * count, so look for constants specially.
+ *
+ * an integer constant we can use as an immediate
+ * is simpler than a variable - we can use the immediate
+ * in the adjustment instruction directly - so it goes
+ * on the right.
+ *
+ * other constants, like big integers or floating point
+ * constants, require a mov into a register, so those
+ * might as well go on the left, so we can reuse that
+ * register for the computation.
+ */
+sbop: // symmetric binary
+ if nl.Ullman < nr.Ullman || (nl.Ullman == nr.Ullman && (Smallintconst(nl) || (nr.Op == OLITERAL && !Smallintconst(nr)))) {
+ r := nl
+ nl = nr
+ nr = r
+ }
+
+abop: // asymmetric binary
+ var n1 Node
+ var n2 Node
+ if Ctxt.Arch.Thechar == '8' {
+ // no registers, sigh
+ if Smallintconst(nr) {
+ var n1 Node
+ Mgen(nl, &n1, res)
+ var n2 Node
+ Regalloc(&n2, nl.Type, &n1)
+ Thearch.Gmove(&n1, &n2)
+ Thearch.Gins(a, nr, &n2)
+ Thearch.Gmove(&n2, res)
+ Regfree(&n2)
+ Mfree(&n1)
+ } else if nl.Ullman >= nr.Ullman {
+ var nt Node
+ Tempname(&nt, nl.Type)
+ Cgen(nl, &nt)
+ var n2 Node
+ Mgen(nr, &n2, nil)
+ var n1 Node
+ Regalloc(&n1, nl.Type, res)
+ Thearch.Gmove(&nt, &n1)
+ Thearch.Gins(a, &n2, &n1)
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+ Mfree(&n2)
+ } else {
+ var n2 Node
+ Regalloc(&n2, nr.Type, res)
+ Cgen(nr, &n2)
+ var n1 Node
+ Regalloc(&n1, nl.Type, nil)
+ Cgen(nl, &n1)
+ Thearch.Gins(a, &n2, &n1)
+ Regfree(&n2)
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+ }
+ return
+ }
+
+ if nl.Ullman >= nr.Ullman {
+ Regalloc(&n1, nl.Type, res)
+ Cgen(nl, &n1)
+
+ if Smallintconst(nr) && Ctxt.Arch.Thechar != '5' && Ctxt.Arch.Thechar != '7' && Ctxt.Arch.Thechar != '9' { // TODO(rsc): Check opcode for arm
+ n2 = *nr
+ } else {
+ Regalloc(&n2, nr.Type, nil)
+ Cgen(nr, &n2)
+ }
+ } else {
+ if Smallintconst(nr) && Ctxt.Arch.Thechar != '5' && Ctxt.Arch.Thechar != '7' && Ctxt.Arch.Thechar != '9' { // TODO(rsc): Check opcode for arm
+ n2 = *nr
+ } else {
+ Regalloc(&n2, nr.Type, res)
+ Cgen(nr, &n2)
+ }
+
+ Regalloc(&n1, nl.Type, nil)
+ Cgen(nl, &n1)
+ }
+
+ Thearch.Gins(a, &n2, &n1)
+ if n2.Op != OLITERAL {
+ Regfree(&n2)
+ }
+ cgen_norm(n, &n1, res)
+}
+
+// cgen_norm moves n1 to res, truncating to expected type if necessary.
+// n1 is a register, and cgen_norm frees it.
+func cgen_norm(n, n1, res *Node) {
+ switch Ctxt.Arch.Thechar {
+ case '6', '8':
+ // We use sized math, so the result is already truncated.
+ default:
+ switch n.Op {
+ case OADD, OSUB, OMUL, ODIV, OCOM, OMINUS:
+ // TODO(rsc): What about left shift?
+ Thearch.Gins(Thearch.Optoas(OAS, n.Type), n1, n1)
+ }
+ }
+
+ Thearch.Gmove(n1, res)
+ Regfree(n1)
+}
+
+func Mgen(n *Node, n1 *Node, rg *Node) {
+ n1.Op = OEMPTY
+
+ if n.Addable != 0 {
+ *n1 = *n
+ if n1.Op == OREGISTER || n1.Op == OINDREG {
+ reg[n.Val.U.Reg-int16(Thearch.REGMIN)]++
+ }
+ return
+ }
+
+ Tempname(n1, n.Type)
+ Cgen(n, n1)
+ if n.Type.Width <= int64(Widthptr) || Isfloat[n.Type.Etype] {
+ n2 := *n1
+ Regalloc(n1, n.Type, rg)
+ Thearch.Gmove(&n2, n1)
+ }
+}
+
+func Mfree(n *Node) {
+ if n.Op == OREGISTER {
+ Regfree(n)
+ }
+}
+
+/*
+ * allocate a register (reusing res if possible) and generate
+ * a = n
+ * The caller must call Regfree(a).
+ */
+func Cgenr(n *Node, a *Node, res *Node) {
+ if Debug['g'] != 0 {
+ Dump("cgenr-n", n)
+ }
+
+ if Isfat(n.Type) {
+ Fatal("cgenr on fat node")
+ }
+
+ if n.Addable != 0 {
+ Regalloc(a, n.Type, res)
+ Thearch.Gmove(n, a)
+ return
+ }
+
+ switch n.Op {
+ case ONAME,
+ ODOT,
+ ODOTPTR,
+ OINDEX,
+ OCALLFUNC,
+ OCALLMETH,
+ OCALLINTER:
+ var n1 Node
+ Igen(n, &n1, res)
+ Regalloc(a, Types[Tptr], &n1)
+ Thearch.Gmove(&n1, a)
+ Regfree(&n1)
+
+ default:
+ Regalloc(a, n.Type, res)
+ Cgen(n, a)
+ }
+}
+
+/*
+ * allocate a register (reusing res if possible) and generate
+ * a = &n
+ * The caller must call Regfree(a).
+ * The generated code checks that the result is not nil.
+ */
+func Agenr(n *Node, a *Node, res *Node) {
+ if Debug['g'] != 0 {
+ Dump("\nagenr-n", n)
+ }
+
+ nl := n.Left
+ nr := n.Right
+
+ switch n.Op {
+ case ODOT, ODOTPTR, OCALLFUNC, OCALLMETH, OCALLINTER:
+ var n1 Node
+ Igen(n, &n1, res)
+ Regalloc(a, Types[Tptr], &n1)
+ Agen(&n1, a)
+ Regfree(&n1)
+
+ case OIND:
+ Cgenr(n.Left, a, res)
+ Cgen_checknil(a)
+
+ case OINDEX:
+ if Ctxt.Arch.Thechar == '5' {
+ var p2 *obj.Prog // to be patched to panicindex.
+ w := uint32(n.Type.Width)
+ bounded := Debug['B'] != 0 || n.Bounded
+ var n1 Node
+ var n3 Node
+ if nr.Addable != 0 {
+ var tmp Node
+ if !Isconst(nr, CTINT) {
+ Tempname(&tmp, Types[TINT32])
+ }
+ if !Isconst(nl, CTSTR) {
+ Agenr(nl, &n3, res)
+ }
+ if !Isconst(nr, CTINT) {
+ p2 = Thearch.Cgenindex(nr, &tmp, bounded)
+ Regalloc(&n1, tmp.Type, nil)
+ Thearch.Gmove(&tmp, &n1)
+ }
+ } else if nl.Addable != 0 {
+ if !Isconst(nr, CTINT) {
+ var tmp Node
+ Tempname(&tmp, Types[TINT32])
+ p2 = Thearch.Cgenindex(nr, &tmp, bounded)
+ Regalloc(&n1, tmp.Type, nil)
+ Thearch.Gmove(&tmp, &n1)
+ }
+
+ if !Isconst(nl, CTSTR) {
+ Agenr(nl, &n3, res)
+ }
+ } else {
+ var tmp Node
+ Tempname(&tmp, Types[TINT32])
+ p2 = Thearch.Cgenindex(nr, &tmp, bounded)
+ nr = &tmp
+ if !Isconst(nl, CTSTR) {
+ Agenr(nl, &n3, res)
+ }
+ Regalloc(&n1, tmp.Type, nil)
+ Thearch.Gins(Thearch.Optoas(OAS, tmp.Type), &tmp, &n1)
+ }
+
+ // &a is in &n3 (allocated in res)
+ // i is in &n1 (if not constant)
+ // w is width
+
+ // constant index
+ if Isconst(nr, CTINT) {
+ if Isconst(nl, CTSTR) {
+ Fatal("constant string constant index")
+ }
+ v := uint64(Mpgetfix(nr.Val.U.Xval))
+ var n2 Node
+ if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
+ if Debug['B'] == 0 && !n.Bounded {
+ n1 = n3
+ n1.Op = OINDREG
+ n1.Type = Types[Tptr]
+ n1.Xoffset = int64(Array_nel)
+ var n4 Node
+ Regalloc(&n4, n1.Type, nil)
+ Thearch.Gmove(&n1, &n4)
+ Nodconst(&n2, Types[TUINT32], int64(v))
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[TUINT32]), &n4, &n2)
+ Regfree(&n4)
+ p1 := Gbranch(Thearch.Optoas(OGT, Types[TUINT32]), nil, +1)
+ Ginscall(Panicindex, 0)
+ Patch(p1, Pc)
+ }
+
+ n1 = n3
+ n1.Op = OINDREG
+ n1.Type = Types[Tptr]
+ n1.Xoffset = int64(Array_array)
+ Thearch.Gmove(&n1, &n3)
+ }
+
+ Nodconst(&n2, Types[Tptr], int64(v*uint64(w)))
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3)
+ *a = n3
+ break
+ }
+
+ var n2 Node
+ Regalloc(&n2, Types[TINT32], &n1) // i
+ Thearch.Gmove(&n1, &n2)
+ Regfree(&n1)
+
+ var n4 Node
+ if Debug['B'] == 0 && !n.Bounded {
+ // check bounds
+ if Isconst(nl, CTSTR) {
+ Nodconst(&n4, Types[TUINT32], int64(len(nl.Val.U.Sval)))
+ } else if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
+ n1 = n3
+ n1.Op = OINDREG
+ n1.Type = Types[Tptr]
+ n1.Xoffset = int64(Array_nel)
+ Regalloc(&n4, Types[TUINT32], nil)
+ Thearch.Gmove(&n1, &n4)
+ } else {
+ Nodconst(&n4, Types[TUINT32], nl.Type.Bound)
+ }
+
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[TUINT32]), &n2, &n4)
+ if n4.Op == OREGISTER {
+ Regfree(&n4)
+ }
+ p1 := Gbranch(Thearch.Optoas(OLT, Types[TUINT32]), nil, +1)
+ if p2 != nil {
+ Patch(p2, Pc)
+ }
+ Ginscall(Panicindex, 0)
+ Patch(p1, Pc)
+ }
+
+ if Isconst(nl, CTSTR) {
+ Regalloc(&n3, Types[Tptr], res)
+ p1 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &n3)
+ Datastring(nl.Val.U.Sval, &p1.From)
+ p1.From.Type = obj.TYPE_ADDR
+ } else if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
+ n1 = n3
+ n1.Op = OINDREG
+ n1.Type = Types[Tptr]
+ n1.Xoffset = int64(Array_array)
+ Thearch.Gmove(&n1, &n3)
+ }
+
+ if w == 0 {
+ // nothing to do
+ } else if Thearch.AddIndex != nil && Thearch.AddIndex(&n2, int64(w), &n3) {
+ // done by back end
+ } else if w == 1 {
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3)
+ } else {
+ Regalloc(&n4, Types[TUINT32], nil)
+ Nodconst(&n1, Types[TUINT32], int64(w))
+ Thearch.Gmove(&n1, &n4)
+ Thearch.Gins(Thearch.Optoas(OMUL, Types[TUINT32]), &n4, &n2)
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3)
+ Regfree(&n4)
+ }
+ *a = n3
+ Regfree(&n2)
+ break
+ }
+ if Ctxt.Arch.Thechar == '8' {
+ var p2 *obj.Prog // to be patched to panicindex.
+ w := uint32(n.Type.Width)
+ bounded := Debug['B'] != 0 || n.Bounded
+ var n3 Node
+ var tmp Node
+ var n1 Node
+ if nr.Addable != 0 {
+ // Generate &nl first, and move nr into register.
+ if !Isconst(nl, CTSTR) {
+ Igen(nl, &n3, res)
+ }
+ if !Isconst(nr, CTINT) {
+ p2 = Thearch.Igenindex(nr, &tmp, bounded)
+ Regalloc(&n1, tmp.Type, nil)
+ Thearch.Gmove(&tmp, &n1)
+ }
+ } else if nl.Addable != 0 {
+ // Generate nr first, and move &nl into register.
+ if !Isconst(nr, CTINT) {
+ p2 = Thearch.Igenindex(nr, &tmp, bounded)
+ Regalloc(&n1, tmp.Type, nil)
+ Thearch.Gmove(&tmp, &n1)
+ }
+
+ if !Isconst(nl, CTSTR) {
+ Igen(nl, &n3, res)
+ }
+ } else {
+ p2 = Thearch.Igenindex(nr, &tmp, bounded)
+ nr = &tmp
+ if !Isconst(nl, CTSTR) {
+ Igen(nl, &n3, res)
+ }
+ Regalloc(&n1, tmp.Type, nil)
+ Thearch.Gins(Thearch.Optoas(OAS, tmp.Type), &tmp, &n1)
+ }
+
+ // For fixed array we really want the pointer in n3.
+ var n2 Node
+ if Isfixedarray(nl.Type) {
+ Regalloc(&n2, Types[Tptr], &n3)
+ Agen(&n3, &n2)
+ Regfree(&n3)
+ n3 = n2
+ }
+
+ // &a[0] is in n3 (allocated in res)
+ // i is in n1 (if not constant)
+ // len(a) is in nlen (if needed)
+ // w is width
+
+ // constant index
+ if Isconst(nr, CTINT) {
+ if Isconst(nl, CTSTR) {
+ Fatal("constant string constant index") // front end should handle
+ }
+ v := uint64(Mpgetfix(nr.Val.U.Xval))
+ if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
+ if Debug['B'] == 0 && !n.Bounded {
+ nlen := n3
+ nlen.Type = Types[TUINT32]
+ nlen.Xoffset += int64(Array_nel)
+ Nodconst(&n2, Types[TUINT32], int64(v))
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[TUINT32]), &nlen, &n2)
+ p1 := Gbranch(Thearch.Optoas(OGT, Types[TUINT32]), nil, +1)
+ Ginscall(Panicindex, -1)
+ Patch(p1, Pc)
+ }
+ }
+
+ // Load base pointer in n2 = n3.
+ Regalloc(&n2, Types[Tptr], &n3)
+
+ n3.Type = Types[Tptr]
+ n3.Xoffset += int64(Array_array)
+ Thearch.Gmove(&n3, &n2)
+ Regfree(&n3)
+ if v*uint64(w) != 0 {
+ Nodconst(&n1, Types[Tptr], int64(v*uint64(w)))
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n1, &n2)
+ }
+ *a = n2
+ break
+ }
+
+ // i is in register n1, extend to 32 bits.
+ t := Types[TUINT32]
+
+ if Issigned[n1.Type.Etype] {
+ t = Types[TINT32]
+ }
+
+ Regalloc(&n2, t, &n1) // i
+ Thearch.Gmove(&n1, &n2)
+ Regfree(&n1)
+
+ if Debug['B'] == 0 && !n.Bounded {
+ // check bounds
+ t := Types[TUINT32]
+
+ var nlen Node
+ if Isconst(nl, CTSTR) {
+ Nodconst(&nlen, t, int64(len(nl.Val.U.Sval)))
+ } else if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
+ nlen = n3
+ nlen.Type = t
+ nlen.Xoffset += int64(Array_nel)
+ } else {
+ Nodconst(&nlen, t, nl.Type.Bound)
+ }
+
+ Thearch.Gins(Thearch.Optoas(OCMP, t), &n2, &nlen)
+ p1 := Gbranch(Thearch.Optoas(OLT, t), nil, +1)
+ if p2 != nil {
+ Patch(p2, Pc)
+ }
+ Ginscall(Panicindex, -1)
+ Patch(p1, Pc)
+ }
+
+ if Isconst(nl, CTSTR) {
+ Regalloc(&n3, Types[Tptr], res)
+ p1 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &n3)
+ Datastring(nl.Val.U.Sval, &p1.From)
+ p1.From.Type = obj.TYPE_ADDR
+ Thearch.Gins(Thearch.Optoas(OADD, n3.Type), &n2, &n3)
+ goto indexdone1
+ }
+
+ // Load base pointer in n3.
+ Regalloc(&tmp, Types[Tptr], &n3)
+
+ if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
+ n3.Type = Types[Tptr]
+ n3.Xoffset += int64(Array_array)
+ Thearch.Gmove(&n3, &tmp)
+ }
+
+ Regfree(&n3)
+ n3 = tmp
+
+ if w == 0 {
+ // nothing to do
+ } else if Thearch.AddIndex != nil && Thearch.AddIndex(&n2, int64(w), &n3) {
+ // done by back end
+ } else if w == 1 {
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3)
+ } else {
+ Nodconst(&tmp, Types[TUINT32], int64(w))
+ Thearch.Gins(Thearch.Optoas(OMUL, Types[TUINT32]), &tmp, &n2)
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3)
+ }
+
+ indexdone1:
+ *a = n3
+ Regfree(&n2)
+ break
+ }
+
+ freelen := 0
+ w := uint64(n.Type.Width)
+
+ // Generate the non-addressable child first.
+ var n3 Node
+ var nlen Node
+ var tmp Node
+ var n1 Node
+ if nr.Addable != 0 {
+ goto irad
+ }
+ if nl.Addable != 0 {
+ Cgenr(nr, &n1, nil)
+ if !Isconst(nl, CTSTR) {
+ if Isfixedarray(nl.Type) {
+ Agenr(nl, &n3, res)
+ } else {
+ Igen(nl, &nlen, res)
+ freelen = 1
+ nlen.Type = Types[Tptr]
+ nlen.Xoffset += int64(Array_array)
+ Regalloc(&n3, Types[Tptr], res)
+ Thearch.Gmove(&nlen, &n3)
+ nlen.Type = Types[Simtype[TUINT]]
+ nlen.Xoffset += int64(Array_nel) - int64(Array_array)
+ }
+ }
+
+ goto index
+ }
+
+ Tempname(&tmp, nr.Type)
+ Cgen(nr, &tmp)
+ nr = &tmp
+
+ irad:
+ if !Isconst(nl, CTSTR) {
+ if Isfixedarray(nl.Type) {
+ Agenr(nl, &n3, res)
+ } else {
+ if nl.Addable == 0 {
+ // igen will need an addressable node.
+ var tmp2 Node
+ Tempname(&tmp2, nl.Type)
+
+ Cgen(nl, &tmp2)
+ nl = &tmp2
+ }
+
+ Igen(nl, &nlen, res)
+ freelen = 1
+ nlen.Type = Types[Tptr]
+ nlen.Xoffset += int64(Array_array)
+ Regalloc(&n3, Types[Tptr], res)
+ Thearch.Gmove(&nlen, &n3)
+ nlen.Type = Types[Simtype[TUINT]]
+ nlen.Xoffset += int64(Array_nel) - int64(Array_array)
+ }
+ }
+
+ if !Isconst(nr, CTINT) {
+ Cgenr(nr, &n1, nil)
+ }
+
+ goto index
+
+ // &a is in &n3 (allocated in res)
+ // i is in &n1 (if not constant)
+ // len(a) is in nlen (if needed)
+ // w is width
+
+ // constant index
+ index:
+ if Isconst(nr, CTINT) {
+ if Isconst(nl, CTSTR) {
+ Fatal("constant string constant index") // front end should handle
+ }
+ v := uint64(Mpgetfix(nr.Val.U.Xval))
+ if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
+ if Debug['B'] == 0 && !n.Bounded {
+ if nlen.Op != OREGISTER && (Ctxt.Arch.Thechar == '7' || Ctxt.Arch.Thechar == '9') {
+ var tmp2 Node
+ Regalloc(&tmp2, Types[Simtype[TUINT]], nil)
+ Thearch.Gmove(&nlen, &tmp2)
+ Regfree(&nlen) // in case it is OINDREG
+ nlen = tmp2
+ }
+ var n2 Node
+ Nodconst(&n2, Types[Simtype[TUINT]], int64(v))
+ if Smallintconst(nr) {
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[Simtype[TUINT]]), &nlen, &n2)
+ } else {
+ Regalloc(&tmp, Types[Simtype[TUINT]], nil)
+ Thearch.Gmove(&n2, &tmp)
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[Simtype[TUINT]]), &nlen, &tmp)
+ Regfree(&tmp)
+ }
+
+ p1 := Gbranch(Thearch.Optoas(OGT, Types[Simtype[TUINT]]), nil, +1)
+ Ginscall(Panicindex, -1)
+ Patch(p1, Pc)
+ }
+
+ Regfree(&nlen)
+ }
+
+ if v*w != 0 {
+ Thearch.Ginscon(Thearch.Optoas(OADD, Types[Tptr]), int64(v*w), &n3)
+ }
+ *a = n3
+ break
+ }
+
+ // type of the index
+ t := Types[TUINT64]
+
+ if Issigned[n1.Type.Etype] {
+ t = Types[TINT64]
+ }
+
+ var n2 Node
+ Regalloc(&n2, t, &n1) // i
+ Thearch.Gmove(&n1, &n2)
+ Regfree(&n1)
+
+ if Debug['B'] == 0 && !n.Bounded {
+ // check bounds
+ t = Types[Simtype[TUINT]]
+
+ if Is64(nr.Type) {
+ t = Types[TUINT64]
+ }
+ if Isconst(nl, CTSTR) {
+ Nodconst(&nlen, t, int64(len(nl.Val.U.Sval)))
+ } else if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
+ if Is64(nr.Type) || Ctxt.Arch.Thechar == '7' || Ctxt.Arch.Thechar == '9' {
+ var n5 Node
+ Regalloc(&n5, t, nil)
+ Thearch.Gmove(&nlen, &n5)
+ Regfree(&nlen)
+ nlen = n5
+ }
+ } else {
+ Nodconst(&nlen, t, nl.Type.Bound)
+ if !Smallintconst(&nlen) {
+ var n5 Node
+ Regalloc(&n5, t, nil)
+ Thearch.Gmove(&nlen, &n5)
+ nlen = n5
+ freelen = 1
+ }
+ }
+
+ Thearch.Gins(Thearch.Optoas(OCMP, t), &n2, &nlen)
+ p1 := Gbranch(Thearch.Optoas(OLT, t), nil, +1)
+ Ginscall(Panicindex, -1)
+ Patch(p1, Pc)
+ }
+
+ if Isconst(nl, CTSTR) {
+ Regalloc(&n3, Types[Tptr], res)
+ p1 := Thearch.Gins(Thearch.Optoas(OAS, n3.Type), nil, &n3) // XXX was LEAQ!
+ Datastring(nl.Val.U.Sval, &p1.From)
+ p1.From.Type = obj.TYPE_ADDR
+ Thearch.Gins(Thearch.Optoas(OADD, n3.Type), &n2, &n3)
+ goto indexdone
+ }
+
+ if w == 0 {
+ // nothing to do
+ } else if Thearch.AddIndex != nil && Thearch.AddIndex(&n2, int64(w), &n3) {
+ // done by back end
+ } else if w == 1 {
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3)
+ } else {
+ Thearch.Ginscon(Thearch.Optoas(OMUL, t), int64(w), &n2)
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3)
+ }
+
+ indexdone:
+ *a = n3
+ Regfree(&n2)
+ if freelen != 0 {
+ Regfree(&nlen)
+ }
+
+ default:
+ Regalloc(a, Types[Tptr], res)
+ Agen(n, a)
+ }
+}
+
+/*
+ * generate:
+ * res = &n;
+ * The generated code checks that the result is not nil.
+ */
+func Agen(n *Node, res *Node) {
+ if Debug['g'] != 0 {
+ Dump("\nagen-res", res)
+ Dump("agen-r", n)
+ }
+
+ if n == nil || n.Type == nil {
+ return
+ }
+
+ for n.Op == OCONVNOP {
+ n = n.Left
+ }
+
+ if Isconst(n, CTNIL) && n.Type.Width > int64(Widthptr) {
+ // Use of a nil interface or nil slice.
+ // Create a temporary we can take the address of and read.
+ // The generated code is just going to panic, so it need not
+ // be terribly efficient. See issue 3670.
+ var n1 Node
+ Tempname(&n1, n.Type)
+
+ Gvardef(&n1)
+ Thearch.Clearfat(&n1)
+ var n2 Node
+ Regalloc(&n2, Types[Tptr], res)
+ var n3 Node
+ n3.Op = OADDR
+ n3.Left = &n1
+ Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &n3, &n2)
+ Thearch.Gmove(&n2, res)
+ Regfree(&n2)
+ return
+ }
+
+ if n.Addable != 0 {
+ if n.Op == OREGISTER {
+ Fatal("agen OREGISTER")
+ }
+ var n1 Node
+ n1.Op = OADDR
+ n1.Left = n
+ var n2 Node
+ Regalloc(&n2, Types[Tptr], res)
+ Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &n1, &n2)
+ Thearch.Gmove(&n2, res)
+ Regfree(&n2)
+ return
+ }
+
+ nl := n.Left
+
+ switch n.Op {
+ default:
+ Fatal("agen: unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
+
+ case OCALLMETH:
+ cgen_callmeth(n, 0)
+ cgen_aret(n, res)
+
+ case OCALLINTER:
+ cgen_callinter(n, res, 0)
+ cgen_aret(n, res)
+
+ case OCALLFUNC:
+ cgen_call(n, 0)
+ cgen_aret(n, res)
+
+ case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
+ var n1 Node
+ Tempname(&n1, n.Type)
+ Cgen_slice(n, &n1)
+ Agen(&n1, res)
+
+ case OEFACE:
+ var n1 Node
+ Tempname(&n1, n.Type)
+ Cgen_eface(n, &n1)
+ Agen(&n1, res)
+
+ case OINDEX:
+ var n1 Node
+ Agenr(n, &n1, res)
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+
+ case ONAME:
+ // should only get here with names in this func.
+ if n.Funcdepth > 0 && n.Funcdepth != Funcdepth {
+ Dump("bad agen", n)
+ Fatal("agen: bad ONAME funcdepth %d != %d", n.Funcdepth, Funcdepth)
+ }
+
+ // should only get here for heap vars or paramref
+ if n.Class&PHEAP == 0 && n.Class != PPARAMREF {
+ Dump("bad agen", n)
+ Fatal("agen: bad ONAME class %#x", n.Class)
+ }
+
+ Cgen(n.Heapaddr, res)
+ if n.Xoffset != 0 {
+ addOffset(res, n.Xoffset)
+ }
+
+ case OIND:
+ Cgen(nl, res)
+ Cgen_checknil(res)
+
+ case ODOT:
+ Agen(nl, res)
+ if n.Xoffset != 0 {
+ addOffset(res, n.Xoffset)
+ }
+
+ case ODOTPTR:
+ Cgen(nl, res)
+ Cgen_checknil(res)
+ if n.Xoffset != 0 {
+ addOffset(res, n.Xoffset)
+ }
+ }
+}
+
+func addOffset(res *Node, offset int64) {
+ if Ctxt.Arch.Thechar == '6' || Ctxt.Arch.Thechar == '8' {
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), Nodintconst(offset), res)
+ return
+ }
+
+ var n1, n2 Node
+ Regalloc(&n1, Types[Tptr], nil)
+ Thearch.Gmove(res, &n1)
+ Regalloc(&n2, Types[Tptr], nil)
+ Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), Nodintconst(offset), &n2)
+ Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n1)
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+ Regfree(&n2)
+}
+
+/*
+ * generate:
+ * newreg = &n;
+ * res = newreg
+ *
+ * on exit, a has been changed to be *newreg.
+ * caller must Regfree(a).
+ * The generated code checks that the result is not *nil.
+ */
+func Igen(n *Node, a *Node, res *Node) {
+ if Debug['g'] != 0 {
+ Dump("\nigen-n", n)
+ }
+
+ switch n.Op {
+ case ONAME:
+ if (n.Class&PHEAP != 0) || n.Class == PPARAMREF {
+ break
+ }
+ *a = *n
+ return
+
+ case OINDREG:
+ // Increase the refcount of the register so that igen's caller
+ // has to call Regfree.
+ if n.Val.U.Reg != int16(Thearch.REGSP) {
+ reg[n.Val.U.Reg-int16(Thearch.REGMIN)]++
+ }
+ *a = *n
+ return
+
+ case ODOT:
+ Igen(n.Left, a, res)
+ a.Xoffset += n.Xoffset
+ a.Type = n.Type
+ Fixlargeoffset(a)
+ return
+
+ case ODOTPTR:
+ Cgenr(n.Left, a, res)
+ Cgen_checknil(a)
+ a.Op = OINDREG
+ a.Xoffset += n.Xoffset
+ a.Type = n.Type
+ Fixlargeoffset(a)
+ return
+
+ case OCALLFUNC,
+ OCALLMETH,
+ OCALLINTER:
+ switch n.Op {
+ case OCALLFUNC:
+ cgen_call(n, 0)
+
+ case OCALLMETH:
+ cgen_callmeth(n, 0)
+
+ case OCALLINTER:
+ cgen_callinter(n, nil, 0)
+ }
+
+ var flist Iter
+ fp := Structfirst(&flist, Getoutarg(n.Left.Type))
+ *a = Node{}
+ a.Op = OINDREG
+ a.Val.U.Reg = int16(Thearch.REGSP)
+ a.Addable = 1
+ a.Xoffset = fp.Width
+ if HasLinkRegister() {
+ a.Xoffset += int64(Ctxt.Arch.Ptrsize)
+ }
+ a.Type = n.Type
+ return
+
+ // Index of fixed-size array by constant can
+ // put the offset in the addressing.
+ // Could do the same for slice except that we need
+ // to use the real index for the bounds checking.
+ case OINDEX:
+ if Isfixedarray(n.Left.Type) || (Isptr[n.Left.Type.Etype] && Isfixedarray(n.Left.Left.Type)) {
+ if Isconst(n.Right, CTINT) {
+ // Compute &a.
+ if !Isptr[n.Left.Type.Etype] {
+ Igen(n.Left, a, res)
+ } else {
+ var n1 Node
+ Igen(n.Left, &n1, res)
+ Cgen_checknil(&n1)
+ Regalloc(a, Types[Tptr], res)
+ Thearch.Gmove(&n1, a)
+ Regfree(&n1)
+ a.Op = OINDREG
+ }
+
+ // Compute &a[i] as &a + i*width.
+ a.Type = n.Type
+
+ a.Xoffset += Mpgetfix(n.Right.Val.U.Xval) * n.Type.Width
+ Fixlargeoffset(a)
+ return
+ }
+ }
+ }
+
+ Agenr(n, a, res)
+ a.Op = OINDREG
+ a.Type = n.Type
+}
+
+/*
+ * generate:
+ * if(n == true) goto to;
+ */
+func Bgen(n *Node, true_ bool, likely int, to *obj.Prog) {
+ if Debug['g'] != 0 {
+ Dump("\nbgen", n)
+ }
+
+ if n == nil {
+ n = Nodbool(true)
+ }
+
+ if n.Ninit != nil {
+ Genlist(n.Ninit)
+ }
+
+ if n.Type == nil {
+ Convlit(&n, Types[TBOOL])
+ if n.Type == nil {
+ return
+ }
+ }
+
+ et := int(n.Type.Etype)
+ if et != TBOOL {
+ Yyerror("cgen: bad type %v for %v", Tconv(n.Type, 0), Oconv(int(n.Op), 0))
+ Patch(Thearch.Gins(obj.AEND, nil, nil), to)
+ return
+ }
+
+ for n.Op == OCONVNOP {
+ n = n.Left
+ if n.Ninit != nil {
+ Genlist(n.Ninit)
+ }
+ }
+
+ if Thearch.Bgen_float != nil && n.Left != nil && Isfloat[n.Left.Type.Etype] {
+ Thearch.Bgen_float(n, bool2int(true_), likely, to)
+ return
+ }
+
+ var nl *Node
+ var nr *Node
+ switch n.Op {
+ default:
+ goto def
+
+ // need to ask if it is bool?
+ case OLITERAL:
+ if !true_ == (n.Val.U.Bval == 0) {
+ Patch(Gbranch(obj.AJMP, nil, likely), to)
+ }
+ return
+
+ case ONAME:
+ if n.Addable == 0 || Ctxt.Arch.Thechar == '5' || Ctxt.Arch.Thechar == '7' || Ctxt.Arch.Thechar == '9' {
+ goto def
+ }
+ var n1 Node
+ Nodconst(&n1, n.Type, 0)
+ Thearch.Gins(Thearch.Optoas(OCMP, n.Type), n, &n1)
+ a := Thearch.Optoas(ONE, n.Type)
+ if !true_ {
+ a = Thearch.Optoas(OEQ, n.Type)
+ }
+ Patch(Gbranch(a, n.Type, likely), to)
+ return
+
+ case OANDAND, OOROR:
+ if (n.Op == OANDAND) == true_ {
+ p1 := Gbranch(obj.AJMP, nil, 0)
+ p2 := Gbranch(obj.AJMP, nil, 0)
+ Patch(p1, Pc)
+ Bgen(n.Left, !true_, -likely, p2)
+ Bgen(n.Right, !true_, -likely, p2)
+ p1 = Gbranch(obj.AJMP, nil, 0)
+ Patch(p1, to)
+ Patch(p2, Pc)
+ } else {
+ Bgen(n.Left, true_, likely, to)
+ Bgen(n.Right, true_, likely, to)
+ }
+
+ return
+
+ case OEQ, ONE, OLT, OGT, OLE, OGE:
+ nr = n.Right
+ if nr == nil || nr.Type == nil {
+ return
+ }
+ fallthrough
+
+ case ONOT: // unary
+ nl = n.Left
+
+ if nl == nil || nl.Type == nil {
+ return
+ }
+ }
+
+ switch n.Op {
+ case ONOT:
+ Bgen(nl, !true_, likely, to)
+ return
+
+ case OEQ, ONE, OLT, OGT, OLE, OGE:
+ a := int(n.Op)
+ if !true_ {
+ if Isfloat[nr.Type.Etype] {
+ // brcom is not valid on floats when NaN is involved.
+ p1 := Gbranch(obj.AJMP, nil, 0)
+ p2 := Gbranch(obj.AJMP, nil, 0)
+ Patch(p1, Pc)
+ ll := n.Ninit // avoid re-genning ninit
+ n.Ninit = nil
+ Bgen(n, true, -likely, p2)
+ n.Ninit = ll
+ Patch(Gbranch(obj.AJMP, nil, 0), to)
+ Patch(p2, Pc)
+ return
+ }
+
+ a = Brcom(a)
+ true_ = !true_
+ }
+
+ // make simplest on right
+ if nl.Op == OLITERAL || (nl.Ullman < nr.Ullman && nl.Ullman < UINF) {
+ a = Brrev(a)
+ r := nl
+ nl = nr
+ nr = r
+ }
+
+ if Isslice(nl.Type) {
+ // front end should only leave cmp to literal nil
+ if (a != OEQ && a != ONE) || nr.Op != OLITERAL {
+ Yyerror("illegal slice comparison")
+ break
+ }
+
+ a = Thearch.Optoas(a, Types[Tptr])
+ var n1 Node
+ Igen(nl, &n1, nil)
+ n1.Xoffset += int64(Array_array)
+ n1.Type = Types[Tptr]
+ var n2 Node
+ Regalloc(&n2, Types[Tptr], &n1)
+ Cgen(&n1, &n2)
+ Regfree(&n1)
+ var tmp Node
+ Nodconst(&tmp, Types[Tptr], 0)
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[Tptr]), &n2, &tmp)
+ Patch(Gbranch(a, Types[Tptr], likely), to)
+ Regfree(&n2)
+ break
+ }
+
+ if Isinter(nl.Type) {
+ // front end should only leave cmp to literal nil
+ if (a != OEQ && a != ONE) || nr.Op != OLITERAL {
+ Yyerror("illegal interface comparison")
+ break
+ }
+
+ a = Thearch.Optoas(a, Types[Tptr])
+ var n1 Node
+ Igen(nl, &n1, nil)
+ n1.Type = Types[Tptr]
+ var n2 Node
+ Regalloc(&n2, Types[Tptr], &n1)
+ Cgen(&n1, &n2)
+ Regfree(&n1)
+ var tmp Node
+ Nodconst(&tmp, Types[Tptr], 0)
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[Tptr]), &n2, &tmp)
+ Patch(Gbranch(a, Types[Tptr], likely), to)
+ Regfree(&n2)
+ break
+ }
+
+ if Iscomplex[nl.Type.Etype] {
+ Complexbool(a, nl, nr, true_, likely, to)
+ break
+ }
+
+ if Ctxt.Arch.Regsize == 4 && Is64(nr.Type) {
+ if nl.Addable == 0 || Isconst(nl, CTINT) {
+ var n1 Node
+ Tempname(&n1, nl.Type)
+ Cgen(nl, &n1)
+ nl = &n1
+ }
+
+ if nr.Addable == 0 {
+ var n2 Node
+ Tempname(&n2, nr.Type)
+ Cgen(nr, &n2)
+ nr = &n2
+ }
+
+ Thearch.Cmp64(nl, nr, a, likely, to)
+ break
+ }
+
+ var n1 Node
+ var n2 Node
+ if nr.Ullman >= UINF {
+ Regalloc(&n1, nl.Type, nil)
+ Cgen(nl, &n1)
+
+ var tmp Node
+ Tempname(&tmp, nl.Type)
+ Thearch.Gmove(&n1, &tmp)
+ Regfree(&n1)
+
+ Regalloc(&n2, nr.Type, nil)
+ Cgen(nr, &n2)
+
+ Regalloc(&n1, nl.Type, nil)
+ Cgen(&tmp, &n1)
+
+ goto cmp
+ }
+
+ if nl.Addable == 0 && Ctxt.Arch.Thechar == '8' {
+ Tempname(&n1, nl.Type)
+ } else {
+ Regalloc(&n1, nl.Type, nil)
+ }
+ Cgen(nl, &n1)
+ nl = &n1
+
+ if Smallintconst(nr) && Ctxt.Arch.Thechar != '9' {
+ Thearch.Gins(Thearch.Optoas(OCMP, nr.Type), nl, nr)
+ Patch(Gbranch(Thearch.Optoas(a, nr.Type), nr.Type, likely), to)
+ if n1.Op == OREGISTER {
+ Regfree(&n1)
+ }
+ break
+ }
+
+ if nr.Addable == 0 && Ctxt.Arch.Thechar == '8' {
+ var tmp Node
+ Tempname(&tmp, nr.Type)
+ Cgen(nr, &tmp)
+ nr = &tmp
+ }
+
+ Regalloc(&n2, nr.Type, nil)
+ Cgen(nr, &n2)
+ nr = &n2
+
+ cmp:
+ l, r := nl, nr
+ // On x86, only < and <= work right with NaN; reverse if needed
+ if Ctxt.Arch.Thechar == '6' && Isfloat[nl.Type.Etype] && (a == OGT || a == OGE) {
+ l, r = r, l
+ a = Brrev(a)
+ }
+
+ Thearch.Gins(Thearch.Optoas(OCMP, nr.Type), l, r)
+
+ if Ctxt.Arch.Thechar == '6' && Isfloat[nr.Type.Etype] && (n.Op == OEQ || n.Op == ONE) {
+ if n.Op == OEQ {
+ // neither NE nor P
+ p1 := Gbranch(Thearch.Optoas(ONE, nr.Type), nil, -likely)
+ p2 := Gbranch(Thearch.Optoas(OPS, nr.Type), nil, -likely)
+ Patch(Gbranch(obj.AJMP, nil, 0), to)
+ Patch(p1, Pc)
+ Patch(p2, Pc)
+ } else {
+ // either NE or P
+ Patch(Gbranch(Thearch.Optoas(ONE, nr.Type), nil, likely), to)
+ Patch(Gbranch(Thearch.Optoas(OPS, nr.Type), nil, likely), to)
+ }
+ } else if Ctxt.Arch.Thechar == '5' && Isfloat[nl.Type.Etype] {
+ if n.Op == ONE {
+ Patch(Gbranch(Thearch.Optoas(OPS, nr.Type), nr.Type, likely), to)
+ Patch(Gbranch(Thearch.Optoas(a, nr.Type), nr.Type, likely), to)
+ } else {
+ p := Gbranch(Thearch.Optoas(OPS, nr.Type), nr.Type, -likely)
+ Patch(Gbranch(Thearch.Optoas(a, nr.Type), nr.Type, likely), to)
+ Patch(p, Pc)
+ }
+ } else if (Ctxt.Arch.Thechar == '7' || Ctxt.Arch.Thechar == '9') && Isfloat[nl.Type.Etype] && (a == OLE || a == OGE) {
+ // On arm64 and ppc64, <= and >= mishandle NaN. Must decompose into < or > and =.
+ if a == OLE {
+ a = OLT
+ } else {
+ a = OGT
+ }
+ Patch(Gbranch(Thearch.Optoas(a, nr.Type), nr.Type, likely), to)
+ Patch(Gbranch(Thearch.Optoas(OEQ, nr.Type), nr.Type, likely), to)
+ } else {
+ Patch(Gbranch(Thearch.Optoas(a, nr.Type), nr.Type, likely), to)
+ }
+ if n1.Op == OREGISTER {
+ Regfree(&n1)
+ }
+ if n2.Op == OREGISTER {
+ Regfree(&n2)
+ }
+ }
+
+ return
+
+def:
+ // TODO: Optimize on systems that can compare to zero easily.
+ var n1 Node
+ Regalloc(&n1, n.Type, nil)
+ Cgen(n, &n1)
+ var n2 Node
+ Nodconst(&n2, n.Type, 0)
+ Thearch.Gins(Thearch.Optoas(OCMP, n.Type), &n1, &n2)
+ a := Thearch.Optoas(ONE, n.Type)
+ if !true_ {
+ a = Thearch.Optoas(OEQ, n.Type)
+ }
+ Patch(Gbranch(a, n.Type, likely), to)
+ Regfree(&n1)
+ return
+}
+
+/*
+ * n is on stack, either local variable
+ * or return value from function call.
+ * return n's offset from SP.
+ */
+func stkof(n *Node) int64 {
+ switch n.Op {
+ case OINDREG:
+ return n.Xoffset
+
+ case ODOT:
+ t := n.Left.Type
+ if Isptr[t.Etype] {
+ break
+ }
+ off := stkof(n.Left)
+ if off == -1000 || off == 1000 {
+ return off
+ }
+ return off + n.Xoffset
+
+ case OINDEX:
+ t := n.Left.Type
+ if !Isfixedarray(t) {
+ break
+ }
+ off := stkof(n.Left)
+ if off == -1000 || off == 1000 {
+ return off
+ }
+ if Isconst(n.Right, CTINT) {
+ return off + t.Type.Width*Mpgetfix(n.Right.Val.U.Xval)
+ }
+ return 1000
+
+ case OCALLMETH, OCALLINTER, OCALLFUNC:
+ t := n.Left.Type
+ if Isptr[t.Etype] {
+ t = t.Type
+ }
+
+ var flist Iter
+ t = Structfirst(&flist, Getoutarg(t))
+ if t != nil {
+ w := t.Width
+ if HasLinkRegister() {
+ w += int64(Ctxt.Arch.Ptrsize)
+ }
+ return w
+ }
+ }
+
+ // botch - probably failing to recognize address
+ // arithmetic on the above. eg INDEX and DOT
+ return -1000
+}
+
+/*
+ * block copy:
+ * memmove(&ns, &n, w);
+ */
+func sgen(n *Node, ns *Node, w int64) {
+ if Debug['g'] != 0 {
+ fmt.Printf("\nsgen w=%d\n", w)
+ Dump("r", n)
+ Dump("res", ns)
+ }
+
+ if n.Ullman >= UINF && ns.Ullman >= UINF {
+ Fatal("sgen UINF")
+ }
+
+ if w < 0 {
+ Fatal("sgen copy %d", w)
+ }
+
+ // If copying .args, that's all the results, so record definition sites
+ // for them for the liveness analysis.
+ if ns.Op == ONAME && ns.Sym.Name == ".args" {
+ for l := Curfn.Dcl; l != nil; l = l.Next {
+ if l.N.Class == PPARAMOUT {
+ Gvardef(l.N)
+ }
+ }
+ }
+
+ // Avoid taking the address for simple enough types.
+ if Componentgen(n, ns) {
+ return
+ }
+
+ if w == 0 {
+ // evaluate side effects only
+ var nodr Node
+ Regalloc(&nodr, Types[Tptr], nil)
+ Agen(ns, &nodr)
+ Agen(n, &nodr)
+ Regfree(&nodr)
+ return
+ }
+
+ // offset on the stack
+ osrc := stkof(n)
+ odst := stkof(ns)
+
+ if osrc != -1000 && odst != -1000 && (osrc == 1000 || odst == 1000) {
+ // osrc and odst both on stack, and at least one is in
+ // an unknown position. Could generate code to test
+ // for forward/backward copy, but instead just copy
+ // to a temporary location first.
+ var tmp Node
+ Tempname(&tmp, n.Type)
+ sgen(n, &tmp, w)
+ sgen(&tmp, ns, w)
+ return
+ }
+
+ Thearch.Stackcopy(n, ns, osrc, odst, w)
+}
+
+/*
+ * generate:
+ * call f
+ * proc=-1 normal call but no return
+ * proc=0 normal call
+ * proc=1 goroutine run in new proc
+ * proc=2 defer call save away stack
+ * proc=3 normal call to C pointer (not Go func value)
+*/
+func Ginscall(f *Node, proc int) {
+ if f.Type != nil {
+ extra := int32(0)
+ if proc == 1 || proc == 2 {
+ extra = 2 * int32(Widthptr)
+ }
+ Setmaxarg(f.Type, extra)
+ }
+
+ switch proc {
+ default:
+ Fatal("Ginscall: bad proc %d", proc)
+
+ case 0, // normal call
+ -1: // normal call but no return
+ if f.Op == ONAME && f.Class == PFUNC {
+ if f == Deferreturn {
+ // Deferred calls will appear to be returning to
+ // the CALL deferreturn(SB) that we are about to emit.
+ // However, the stack trace code will show the line
+ // of the instruction byte before the return PC.
+ // To avoid that being an unrelated instruction,
+ // insert an actual hardware NOP that will have the right line number.
+ // This is different from obj.ANOP, which is a virtual no-op
+ // that doesn't make it into the instruction stream.
+ Thearch.Ginsnop()
+ }
+
+ p := Thearch.Gins(obj.ACALL, nil, f)
+ Afunclit(&p.To, f)
+ if proc == -1 || Noreturn(p) {
+ Thearch.Gins(obj.AUNDEF, nil, nil)
+ }
+ break
+ }
+
+ var reg Node
+ Nodreg(®, Types[Tptr], Thearch.REGCTXT)
+ var r1 Node
+ Nodreg(&r1, Types[Tptr], Thearch.REGCALLX)
+ Thearch.Gmove(f, ®)
+ reg.Op = OINDREG
+ Thearch.Gmove(®, &r1)
+ reg.Op = OREGISTER
+ Thearch.Gins(obj.ACALL, ®, &r1)
+
+ case 3: // normal call of c function pointer
+ Thearch.Gins(obj.ACALL, nil, f)
+
+ case 1, // call in new proc (go)
+ 2: // deferred call (defer)
+ var stk Node
+
+ // size of arguments at 0(SP)
+ stk.Op = OINDREG
+ stk.Val.U.Reg = int16(Thearch.REGSP)
+ stk.Xoffset = 0
+ if HasLinkRegister() {
+ stk.Xoffset += int64(Ctxt.Arch.Ptrsize)
+ }
+ Thearch.Ginscon(Thearch.Optoas(OAS, Types[Tptr]), int64(Argsize(f.Type)), &stk)
+
+ // FuncVal* at 8(SP)
+ stk.Xoffset = int64(Widthptr)
+ if HasLinkRegister() {
+ stk.Xoffset += int64(Ctxt.Arch.Ptrsize)
+ }
+
+ var reg Node
+ Nodreg(®, Types[Tptr], Thearch.REGCALLX2)
+ Thearch.Gmove(f, ®)
+ Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), ®, &stk)
+
+ if proc == 1 {
+ Ginscall(Newproc, 0)
+ } else {
+ if Hasdefer == 0 {
+ Fatal("hasdefer=0 but has defer")
+ }
+ Ginscall(Deferproc, 0)
+ }
+
+ if proc == 2 {
+ Nodreg(®, Types[TINT32], Thearch.REGRETURN)
+ Thearch.Gins(Thearch.Optoas(OCMP, Types[TINT32]), ®, Nodintconst(0))
+ p := Gbranch(Thearch.Optoas(OEQ, Types[TINT32]), nil, +1)
+ cgen_ret(nil)
+ Patch(p, Pc)
+ }
+ }
+}
+
+/*
+ * n is call to interface method.
+ * generate res = n.
+ */
+func cgen_callinter(n *Node, res *Node, proc int) {
+ i := n.Left
+ if i.Op != ODOTINTER {
+ Fatal("cgen_callinter: not ODOTINTER %v", Oconv(int(i.Op), 0))
+ }
+
+ f := i.Right // field
+ if f.Op != ONAME {
+ Fatal("cgen_callinter: not ONAME %v", Oconv(int(f.Op), 0))
+ }
+
+ i = i.Left // interface
+
+ if i.Addable == 0 {
+ var tmpi Node
+ Tempname(&tmpi, i.Type)
+ Cgen(i, &tmpi)
+ i = &tmpi
+ }
+
+ Genlist(n.List) // assign the args
+
+ // i is now addable, prepare an indirected
+ // register to hold its address.
+ var nodi Node
+ Igen(i, &nodi, res) // REG = &inter
+
+ var nodsp Node
+ Nodindreg(&nodsp, Types[Tptr], Thearch.REGSP)
+ nodsp.Xoffset = 0
+ if HasLinkRegister() {
+ nodsp.Xoffset += int64(Ctxt.Arch.Ptrsize)
+ }
+ if proc != 0 {
+ nodsp.Xoffset += 2 * int64(Widthptr) // leave room for size & fn
+ }
+ nodi.Type = Types[Tptr]
+ nodi.Xoffset += int64(Widthptr)
+ Cgen(&nodi, &nodsp) // {0, 8(nacl), or 16}(SP) = 8(REG) -- i.data
+
+ var nodo Node
+ Regalloc(&nodo, Types[Tptr], res)
+
+ nodi.Type = Types[Tptr]
+ nodi.Xoffset -= int64(Widthptr)
+ Cgen(&nodi, &nodo) // REG = 0(REG) -- i.tab
+ Regfree(&nodi)
+
+ var nodr Node
+ Regalloc(&nodr, Types[Tptr], &nodo)
+ if n.Left.Xoffset == BADWIDTH {
+ Fatal("cgen_callinter: badwidth")
+ }
+ Cgen_checknil(&nodo) // in case offset is huge
+ nodo.Op = OINDREG
+ nodo.Xoffset = n.Left.Xoffset + 3*int64(Widthptr) + 8
+ if proc == 0 {
+ // plain call: use direct c function pointer - more efficient
+ Cgen(&nodo, &nodr) // REG = 32+offset(REG) -- i.tab->fun[f]
+ proc = 3
+ } else {
+ // go/defer. generate go func value.
+ Agen(&nodo, &nodr) // REG = &(32+offset(REG)) -- i.tab->fun[f]
+ }
+
+ nodr.Type = n.Left.Type
+ Ginscall(&nodr, proc)
+
+ Regfree(&nodr)
+ Regfree(&nodo)
+}
+
+/*
+ * generate function call;
+ * proc=0 normal call
+ * proc=1 goroutine run in new proc
+ * proc=2 defer call save away stack
+ */
+func cgen_call(n *Node, proc int) {
+ if n == nil {
+ return
+ }
+
+ var afun Node
+ if n.Left.Ullman >= UINF {
+ // if name involves a fn call
+ // precompute the address of the fn
+ Tempname(&afun, Types[Tptr])
+
+ Cgen(n.Left, &afun)
+ }
+
+ Genlist(n.List) // assign the args
+ t := n.Left.Type
+
+ // call tempname pointer
+ if n.Left.Ullman >= UINF {
+ var nod Node
+ Regalloc(&nod, Types[Tptr], nil)
+ Cgen_as(&nod, &afun)
+ nod.Type = t
+ Ginscall(&nod, proc)
+ Regfree(&nod)
+ return
+ }
+
+ // call pointer
+ if n.Left.Op != ONAME || n.Left.Class != PFUNC {
+ var nod Node
+ Regalloc(&nod, Types[Tptr], nil)
+ Cgen_as(&nod, n.Left)
+ nod.Type = t
+ Ginscall(&nod, proc)
+ Regfree(&nod)
+ return
+ }
+
+ // call direct
+ n.Left.Method = 1
+
+ Ginscall(n.Left, proc)
+}
+
+func HasLinkRegister() bool {
+ c := Ctxt.Arch.Thechar
+ return c != '6' && c != '8'
+}
+
+/*
+ * call to n has already been generated.
+ * generate:
+ * res = return value from call.
+ */
+func cgen_callret(n *Node, res *Node) {
+ t := n.Left.Type
+ if t.Etype == TPTR32 || t.Etype == TPTR64 {
+ t = t.Type
+ }
+
+ var flist Iter
+ fp := Structfirst(&flist, Getoutarg(t))
+ if fp == nil {
+ Fatal("cgen_callret: nil")
+ }
+
+ var nod Node
+ nod.Op = OINDREG
+ nod.Val.U.Reg = int16(Thearch.REGSP)
+ nod.Addable = 1
+
+ nod.Xoffset = fp.Width
+ if HasLinkRegister() {
+ nod.Xoffset += int64(Ctxt.Arch.Ptrsize)
+ }
+ nod.Type = fp.Type
+ Cgen_as(res, &nod)
+}
+
+/*
+ * call to n has already been generated.
+ * generate:
+ * res = &return value from call.
+ */
+func cgen_aret(n *Node, res *Node) {
+ t := n.Left.Type
+ if Isptr[t.Etype] {
+ t = t.Type
+ }
+
+ var flist Iter
+ fp := Structfirst(&flist, Getoutarg(t))
+ if fp == nil {
+ Fatal("cgen_aret: nil")
+ }
+
+ var nod1 Node
+ nod1.Op = OINDREG
+ nod1.Val.U.Reg = int16(Thearch.REGSP)
+ nod1.Addable = 1
+ nod1.Xoffset = fp.Width
+ if HasLinkRegister() {
+ nod1.Xoffset += int64(Ctxt.Arch.Ptrsize)
+ }
+ nod1.Type = fp.Type
+
+ if res.Op != OREGISTER {
+ var nod2 Node
+ Regalloc(&nod2, Types[Tptr], res)
+ Agen(&nod1, &nod2)
+ Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &nod2, res)
+ Regfree(&nod2)
+ } else {
+ Agen(&nod1, res)
+ }
+}
+
+/*
+ * generate return.
+ * n->left is assignments to return values.
+ */
+func cgen_ret(n *Node) {
+ if n != nil {
+ Genlist(n.List) // copy out args
+ }
+ if Hasdefer != 0 {
+ Ginscall(Deferreturn, 0)
+ }
+ Genlist(Curfn.Exit)
+ p := Thearch.Gins(obj.ARET, nil, nil)
+ if n != nil && n.Op == ORETJMP {
+ p.To.Type = obj.TYPE_MEM
+ p.To.Name = obj.NAME_EXTERN
+ p.To.Sym = Linksym(n.Left.Sym)
+ }
+}
+
+/*
+ * generate division according to op, one of:
+ * res = nl / nr
+ * res = nl % nr
+ */
+func cgen_div(op int, nl *Node, nr *Node, res *Node) {
+ var w int
+
+ // TODO(rsc): arm64 needs to support the relevant instructions
+ // in peep and optoas in order to enable this.
+ // TODO(rsc): ppc64 needs to support the relevant instructions
+ // in peep and optoas in order to enable this.
+ if nr.Op != OLITERAL || Ctxt.Arch.Thechar == '7' || Ctxt.Arch.Thechar == '9' {
+ goto longdiv
+ }
+ w = int(nl.Type.Width * 8)
+
+ // Front end handled 32-bit division. We only need to handle 64-bit.
+ // try to do division by multiply by (2^w)/d
+ // see hacker's delight chapter 10
+ switch Simtype[nl.Type.Etype] {
+ default:
+ goto longdiv
+
+ case TUINT64:
+ var m Magic
+ m.W = w
+ m.Ud = uint64(Mpgetfix(nr.Val.U.Xval))
+ Umagic(&m)
+ if m.Bad != 0 {
+ break
+ }
+ if op == OMOD {
+ goto longmod
+ }
+
+ var n1 Node
+ Cgenr(nl, &n1, nil)
+ var n2 Node
+ Nodconst(&n2, nl.Type, int64(m.Um))
+ var n3 Node
+ Regalloc(&n3, nl.Type, res)
+ Thearch.Cgen_hmul(&n1, &n2, &n3)
+
+ if m.Ua != 0 {
+ // need to add numerator accounting for overflow
+ Thearch.Gins(Thearch.Optoas(OADD, nl.Type), &n1, &n3)
+
+ Nodconst(&n2, nl.Type, 1)
+ Thearch.Gins(Thearch.Optoas(ORROTC, nl.Type), &n2, &n3)
+ Nodconst(&n2, nl.Type, int64(m.S)-1)
+ Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n3)
+ } else {
+ Nodconst(&n2, nl.Type, int64(m.S))
+ Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n3) // shift dx
+ }
+
+ Thearch.Gmove(&n3, res)
+ Regfree(&n1)
+ Regfree(&n3)
+ return
+
+ case TINT64:
+ var m Magic
+ m.W = w
+ m.Sd = Mpgetfix(nr.Val.U.Xval)
+ Smagic(&m)
+ if m.Bad != 0 {
+ break
+ }
+ if op == OMOD {
+ goto longmod
+ }
+
+ var n1 Node
+ Cgenr(nl, &n1, res)
+ var n2 Node
+ Nodconst(&n2, nl.Type, m.Sm)
+ var n3 Node
+ Regalloc(&n3, nl.Type, nil)
+ Thearch.Cgen_hmul(&n1, &n2, &n3)
+
+ if m.Sm < 0 {
+ // need to add numerator
+ Thearch.Gins(Thearch.Optoas(OADD, nl.Type), &n1, &n3)
+ }
+
+ Nodconst(&n2, nl.Type, int64(m.S))
+ Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n3) // shift n3
+
+ Nodconst(&n2, nl.Type, int64(w)-1)
+
+ Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n1) // -1 iff num is neg
+ Thearch.Gins(Thearch.Optoas(OSUB, nl.Type), &n1, &n3) // added
+
+ if m.Sd < 0 {
+ // this could probably be removed
+ // by factoring it into the multiplier
+ Thearch.Gins(Thearch.Optoas(OMINUS, nl.Type), nil, &n3)
+ }
+
+ Thearch.Gmove(&n3, res)
+ Regfree(&n1)
+ Regfree(&n3)
+ return
+ }
+
+ goto longdiv
+
+ // division and mod using (slow) hardware instruction
+longdiv:
+ Thearch.Dodiv(op, nl, nr, res)
+
+ return
+
+ // mod using formula A%B = A-(A/B*B) but
+ // we know that there is a fast algorithm for A/B
+longmod:
+ var n1 Node
+ Regalloc(&n1, nl.Type, res)
+
+ Cgen(nl, &n1)
+ var n2 Node
+ Regalloc(&n2, nl.Type, nil)
+ cgen_div(ODIV, &n1, nr, &n2)
+ a := Thearch.Optoas(OMUL, nl.Type)
+ if w == 8 {
+ // use 2-operand 16-bit multiply
+ // because there is no 2-operand 8-bit multiply
+ a = Thearch.Optoas(OMUL, Types[TINT16]) // XXX was IMULW
+ }
+
+ if !Smallintconst(nr) {
+ var n3 Node
+ Regalloc(&n3, nl.Type, nil)
+ Cgen(nr, &n3)
+ Thearch.Gins(a, &n3, &n2)
+ Regfree(&n3)
+ } else {
+ Thearch.Gins(a, nr, &n2)
+ }
+ Thearch.Gins(Thearch.Optoas(OSUB, nl.Type), &n2, &n1)
+ Thearch.Gmove(&n1, res)
+ Regfree(&n1)
+ Regfree(&n2)
+}
+
+func Fixlargeoffset(n *Node) {
+ if n == nil {
+ return
+ }
+ if n.Op != OINDREG {
+ return
+ }
+ if n.Val.U.Reg == int16(Thearch.REGSP) { // stack offset cannot be large
+ return
+ }
+ if n.Xoffset != int64(int32(n.Xoffset)) {
+ // offset too large, add to register instead.
+ a := *n
+
+ a.Op = OREGISTER
+ a.Type = Types[Tptr]
+ a.Xoffset = 0
+ Cgen_checknil(&a)
+ Thearch.Ginscon(Thearch.Optoas(OADD, Types[Tptr]), n.Xoffset, &a)
+ n.Xoffset = 0
+ }
+}
diff --git a/src/cmd/internal/gc/cplx.go b/src/cmd/internal/gc/cplx.go
index 0097571..7c0a150 100644
--- a/src/cmd/internal/gc/cplx.go
+++ b/src/cmd/internal/gc/cplx.go
@@ -25,21 +25,21 @@
if nr != nil {
if nl.Ullman > nr.Ullman && nl.Addable == 0 {
Tempname(&tnl, nl.Type)
- Thearch.Cgen(nl, &tnl)
+ Cgen(nl, &tnl)
nl = &tnl
}
if nr.Addable == 0 {
var tnr Node
Tempname(&tnr, nr.Type)
- Thearch.Cgen(nr, &tnr)
+ Cgen(nr, &tnr)
nr = &tnr
}
}
if nl.Addable == 0 {
Tempname(&tnl, nl.Type)
- Thearch.Cgen(nl, &tnl)
+ Cgen(nl, &tnl)
nl = &tnl
}
@@ -78,7 +78,7 @@
true_ = !true_
}
- Thearch.Bgen(&na, true_, likely, to)
+ Bgen(&na, true_, likely, to)
}
// break addable nc-complex into nr-real and ni-imaginary
@@ -111,7 +111,7 @@
ra.Op = OMINUS
ra.Left = nl
ra.Type = nl.Type
- Thearch.Cgen(&ra, res)
+ Cgen(&ra, res)
}
// build and execute tree
@@ -150,14 +150,14 @@
ra.Left = &n1
ra.Right = &n3
ra.Type = n1.Type
- Thearch.Cgen(&ra, &n5)
+ Cgen(&ra, &n5)
ra = Node{}
ra.Op = uint8(op)
ra.Left = &n2
ra.Right = &n4
ra.Type = n2.Type
- Thearch.Cgen(&ra, &n6)
+ Cgen(&ra, &n6)
}
// build and execute tree
@@ -197,7 +197,7 @@
ra.Left = &rm1
ra.Right = &rm2
ra.Type = rm1.Type
- Thearch.Cgen(&ra, &tmp)
+ Cgen(&ra, &tmp)
// imag part
rm1 = Node{}
@@ -218,10 +218,10 @@
ra.Left = &rm1
ra.Right = &rm2
ra.Type = rm1.Type
- Thearch.Cgen(&ra, &n6)
+ Cgen(&ra, &n6)
// tmp ->real part
- Thearch.Cgen(&tmp, &n5)
+ Cgen(&tmp, &n5)
}
func nodfconst(n *Node, t *Type, fval *Mpflt) {
@@ -322,8 +322,8 @@
var n3 Node
subnode(&n3, &n4, t)
- Thearch.Cgen(&n1, &n3)
- Thearch.Cgen(&n2, &n4)
+ Cgen(&n1, &n3)
+ Cgen(&n2, &n4)
}
}
@@ -346,9 +346,9 @@
subnode(&n1, &n2, res)
var tmp Node
Tempname(&tmp, n1.Type)
- Thearch.Cgen(n.Left, &tmp)
- Thearch.Cgen(n.Right, &n2)
- Thearch.Cgen(&tmp, &n1)
+ Cgen(n.Left, &tmp)
+ Cgen(n.Right, &n2)
+ Cgen(&tmp, &n1)
return
}
@@ -366,11 +366,11 @@
var n2 Node
subnode(&n1, &n2, nl)
if n.Op == OREAL {
- Thearch.Cgen(&n1, res)
+ Cgen(&n1, res)
return
}
- Thearch.Cgen(&n2, res)
+ Cgen(&n2, res)
return
}
@@ -394,9 +394,9 @@
if res.Addable == 0 {
var n1 Node
- Thearch.Igen(res, &n1, nil)
- Thearch.Cgen(n, &n1)
- Thearch.Regfree(&n1)
+ Igen(res, &n1, nil)
+ Cgen(n, &n1)
+ Regfree(&n1)
return
}
@@ -419,10 +419,10 @@
OCALLMETH,
OCALLINTER:
var n1 Node
- Thearch.Igen(n, &n1, res)
+ Igen(n, &n1, res)
Complexmove(&n1, res)
- Thearch.Regfree(&n1)
+ Regfree(&n1)
return
case OCONV,
@@ -447,21 +447,21 @@
if nr != nil {
if nl.Ullman > nr.Ullman && nl.Addable == 0 {
Tempname(&tnl, nl.Type)
- Thearch.Cgen(nl, &tnl)
+ Cgen(nl, &tnl)
nl = &tnl
}
if nr.Addable == 0 {
var tnr Node
Tempname(&tnr, nr.Type)
- Thearch.Cgen(nr, &tnr)
+ Cgen(nr, &tnr)
nr = &tnr
}
}
if nl.Addable == 0 {
Tempname(&tnl, nl.Type)
- Thearch.Cgen(nl, &tnl)
+ Cgen(nl, &tnl)
nl = &tnl
}
diff --git a/src/cmd/internal/gc/gen.go b/src/cmd/internal/gc/gen.go
index 314f6c1..9686092 100644
--- a/src/cmd/internal/gc/gen.go
+++ b/src/cmd/internal/gc/gen.go
@@ -235,13 +235,13 @@
Fatal("cgen_proc: unknown call %v", Oconv(int(n.Left.Op), 0))
case OCALLMETH:
- Cgen_callmeth(n.Left, proc)
+ cgen_callmeth(n.Left, proc)
case OCALLINTER:
- Thearch.Cgen_callinter(n.Left, nil, proc)
+ cgen_callinter(n.Left, nil, proc)
case OCALLFUNC:
- Thearch.Cgen_call(n.Left, proc)
+ cgen_call(n.Left, proc)
}
}
@@ -377,7 +377,7 @@
}
ullmancalc(&z)
- Thearch.Cgen(&z, n)
+ Cgen(&z, n)
}
/*
@@ -393,17 +393,17 @@
*/
tmp := temp(Types[Tptr])
- Thearch.Cgen(n.Right, tmp)
+ Cgen(n.Right, tmp)
Gvardef(res)
dst := *res
dst.Type = Types[Tptr]
dst.Xoffset += int64(Widthptr)
- Thearch.Cgen(tmp, &dst)
+ Cgen(tmp, &dst)
dst.Xoffset -= int64(Widthptr)
- Thearch.Cgen(n.Left, &dst)
+ Cgen(n.Left, &dst)
}
/*
@@ -443,7 +443,7 @@
var src Node
if isnil(n.Left) {
Tempname(&src, n.Left.Type)
- Thearch.Cgen(n.Left, &src)
+ Cgen(n.Left, &src)
} else {
src = *n.Left
}
@@ -455,11 +455,11 @@
if !Isptr[n.Left.Type.Etype] {
Fatal("slicearr is supposed to work on pointer: %v\n", Nconv(n, obj.FmtSign))
}
- Thearch.Cgen(&src, base)
+ Cgen(&src, base)
Cgen_checknil(base)
} else {
src.Type = Types[Tptr]
- Thearch.Cgen(&src, base)
+ Cgen(&src, base)
}
// committed to the update
@@ -468,10 +468,10 @@
// compute len and cap.
// len = n-i, cap = m-i, and offs = i*width.
// computing offs last lets the multiply overwrite i.
- Thearch.Cgen((*Node)(len), tmplen)
+ Cgen((*Node)(len), tmplen)
if n.Op != OSLICESTR {
- Thearch.Cgen(cap, tmpcap)
+ Cgen(cap, tmpcap)
}
// if new cap != 0 { base += add }
@@ -489,11 +489,11 @@
Nodconst(&con, tmpcap.Type, 0)
cmp := Nod(OEQ, tmpcap, &con)
typecheck(&cmp, Erv)
- Thearch.Bgen(cmp, true, -1, p2)
+ Bgen(cmp, true, -1, p2)
add := Nod(OADD, base, offs)
typecheck(&add, Erv)
- Thearch.Cgen(add, base)
+ Cgen(add, base)
Patch(p2, Pc)
}
@@ -503,14 +503,14 @@
dst.Xoffset += int64(Array_array)
dst.Type = Types[Tptr]
- Thearch.Cgen(base, &dst)
+ Cgen(base, &dst)
// dst.len = hi [ - lo ]
dst = *res
dst.Xoffset += int64(Array_nel)
dst.Type = Types[Simtype[TUINT]]
- Thearch.Cgen(tmplen, &dst)
+ Cgen(tmplen, &dst)
if n.Op != OSLICESTR {
// dst.cap = cap [ - lo ]
@@ -518,7 +518,7 @@
dst.Xoffset += int64(Array_cap)
dst.Type = Types[Simtype[TUINT]]
- Thearch.Cgen(tmpcap, &dst)
+ Cgen(tmpcap, &dst)
}
}
@@ -620,7 +620,7 @@
lno := setlineno(n)
- wasregalloc := Thearch.Anyregalloc()
+ wasregalloc := Anyregalloc()
if n == nil {
goto ret
@@ -760,10 +760,10 @@
lab.Continpc = continpc
}
- gen(n.Nincr) // contin: incr
- Patch(p1, Pc) // test:
- Thearch.Bgen(n.Ntest, false, -1, breakpc) // if(!test) goto break
- Genlist(n.Nbody) // body
+ gen(n.Nincr) // contin: incr
+ Patch(p1, Pc) // test:
+ Bgen(n.Ntest, false, -1, breakpc) // if(!test) goto break
+ Genlist(n.Nbody) // body
gjmp(continpc)
Patch(breakpc, Pc) // done:
continpc = scontin
@@ -774,15 +774,15 @@
}
case OIF:
- p1 := gjmp(nil) // goto test
- p2 := gjmp(nil) // p2: goto else
- Patch(p1, Pc) // test:
- Thearch.Bgen(n.Ntest, false, int(-n.Likely), p2) // if(!test) goto p2
- Genlist(n.Nbody) // then
- p3 := gjmp(nil) // goto done
- Patch(p2, Pc) // else:
- Genlist(n.Nelse) // else
- Patch(p3, Pc) // done:
+ p1 := gjmp(nil) // goto test
+ p2 := gjmp(nil) // p2: goto else
+ Patch(p1, Pc) // test:
+ Bgen(n.Ntest, false, int(-n.Likely), p2) // if(!test) goto p2
+ Genlist(n.Nbody) // then
+ p3 := gjmp(nil) // goto done
+ Patch(p2, Pc) // else:
+ Genlist(n.Nelse) // else
+ Patch(p3, Pc) // done:
case OSWITCH:
sbreak := breakpc
@@ -832,13 +832,13 @@
Cgen_as(n.Left, n.Right)
case OCALLMETH:
- Cgen_callmeth(n, 0)
+ cgen_callmeth(n, 0)
case OCALLINTER:
- Thearch.Cgen_callinter(n, nil, 0)
+ cgen_callinter(n, nil, 0)
case OCALLFUNC:
- Thearch.Cgen_call(n, 0)
+ cgen_call(n, 0)
case OPROC:
cgen_proc(n, 1)
@@ -848,7 +848,7 @@
case ORETURN,
ORETJMP:
- Thearch.Cgen_ret(n)
+ cgen_ret(n)
case OCHECKNIL:
Cgen_checknil(n.Left)
@@ -858,7 +858,7 @@
}
ret:
- if Thearch.Anyregalloc() != wasregalloc {
+ if Anyregalloc() != wasregalloc {
Dump("node", n)
Fatal("registers left allocated")
}
@@ -908,10 +908,10 @@
return
}
- Thearch.Cgen(nr, nl)
+ Cgen(nr, nl)
}
-func Cgen_callmeth(n *Node, proc int) {
+func cgen_callmeth(n *Node, proc int) {
// generate a rewrite in n2 for the method call
// (p.f)(...) goes to (f)(p,...)
@@ -929,7 +929,7 @@
if n2.Left.Op == ONAME {
n2.Left.Class = PFUNC
}
- Thearch.Cgen_call(&n2, proc)
+ cgen_call(&n2, proc)
}
func checklabels() {
@@ -1020,14 +1020,14 @@
if nr != nil && !cadable(nr) {
goto no
}
- Thearch.Igen(nl, &nodl, nil)
+ Igen(nl, &nodl, nil)
freel = 1
}
if nr != nil {
nodr = *nr
if !cadable(nr) {
- Thearch.Igen(nr, &nodr, nil)
+ Igen(nr, &nodr, nil)
freer = 1
}
} else {
@@ -1035,7 +1035,7 @@
var tmp Node
Nodconst(&tmp, nl.Type, 0)
- Thearch.Regalloc(&nodr, Types[TUINT], nil)
+ Regalloc(&nodr, Types[TUINT], nil)
Thearch.Gmove(&tmp, &nodr)
freer = 1
}
@@ -1190,19 +1190,19 @@
no:
if freer != 0 {
- Thearch.Regfree(&nodr)
+ Regfree(&nodr)
}
if freel != 0 {
- Thearch.Regfree(&nodl)
+ Regfree(&nodl)
}
return false
yes:
if freer != 0 {
- Thearch.Regfree(&nodr)
+ Regfree(&nodr)
}
if freel != 0 {
- Thearch.Regfree(&nodl)
+ Regfree(&nodl)
}
return true
}
diff --git a/src/cmd/internal/gc/go.go b/src/cmd/internal/gc/go.go
index 26f545a..c33664f 100644
--- a/src/cmd/internal/gc/go.go
+++ b/src/cmd/internal/gc/go.go
@@ -777,47 +777,60 @@
)
type Arch struct {
- Thechar int
- Thestring string
- Thelinkarch *obj.LinkArch
- Typedefs []Typedef
- REGSP int
- REGCTXT int
- MAXWIDTH int64
- Anyregalloc func() bool
- Betypeinit func()
- Bgen func(*Node, bool, int, *obj.Prog)
- Cgen func(*Node, *Node)
- Cgen_call func(*Node, int)
- Cgen_callinter func(*Node, *Node, int)
- Cgen_ret func(*Node)
- Clearfat func(*Node)
- Defframe func(*obj.Prog)
- Excise func(*Flow)
- Expandchecks func(*obj.Prog)
- Gclean func()
- Ginit func()
- Gins func(int, *Node, *Node) *obj.Prog
- Ginscall func(*Node, int)
- Gmove func(*Node, *Node)
- Igen func(*Node, *Node, *Node)
- Linkarchinit func()
- Peep func(*obj.Prog)
- Proginfo func(*obj.Prog) // fills in Prog.Info
- Regalloc func(*Node, *Type, *Node)
- Regfree func(*Node)
- Regtyp func(*obj.Addr) bool
- Sameaddr func(*obj.Addr, *obj.Addr) bool
- Smallindir func(*obj.Addr, *obj.Addr) bool
- Stackaddr func(*obj.Addr) bool
- Excludedregs func() uint64
- RtoB func(int) uint64
- FtoB func(int) uint64
- BtoR func(uint64) int
- BtoF func(uint64) int
- Optoas func(int, *Type) int
- Doregbits func(int) uint64
- Regnames func(*int) []string
+ Thechar int
+ Thestring string
+ Thelinkarch *obj.LinkArch
+ Typedefs []Typedef
+ REGSP int
+ REGCTXT int
+ REGCALLX int // BX
+ REGCALLX2 int // AX
+ REGRETURN int // AX
+ REGMIN int
+ REGMAX int
+ FREGMIN int
+ FREGMAX int
+ MAXWIDTH int64
+ ReservedRegs []int
+
+ AddIndex func(*Node, int64, *Node) bool // optional
+ Betypeinit func()
+ Bgen_float func(*Node, int, int, *obj.Prog) // optional
+ Cgen64 func(*Node, *Node) // only on 32-bit systems
+ Cgenindex func(*Node, *Node, bool) *obj.Prog
+ Cgen_bmul func(int, *Node, *Node, *Node) bool
+ Cgen_float func(*Node, *Node) // optional
+ Cgen_hmul func(*Node, *Node, *Node)
+ Cgen_shift func(int, bool, *Node, *Node, *Node)
+ Clearfat func(*Node)
+ Cmp64 func(*Node, *Node, int, int, *obj.Prog) // only on 32-bit systems
+ Defframe func(*obj.Prog)
+ Dodiv func(int, *Node, *Node, *Node)
+ Excise func(*Flow)
+ Expandchecks func(*obj.Prog)
+ Gins func(int, *Node, *Node) *obj.Prog
+ Ginscon func(int, int64, *Node)
+ Ginsnop func()
+ Gmove func(*Node, *Node)
+ Igenindex func(*Node, *Node, bool) *obj.Prog
+ Linkarchinit func()
+ Peep func(*obj.Prog)
+ Proginfo func(*obj.Prog) // fills in Prog.Info
+ Regtyp func(*obj.Addr) bool
+ Sameaddr func(*obj.Addr, *obj.Addr) bool
+ Smallindir func(*obj.Addr, *obj.Addr) bool
+ Stackaddr func(*obj.Addr) bool
+ Stackcopy func(*Node, *Node, int64, int64, int64)
+ Sudoaddable func(int, *Node, *obj.Addr) bool
+ Sudoclean func()
+ Excludedregs func() uint64
+ RtoB func(int) uint64
+ FtoB func(int) uint64
+ BtoR func(uint64) int
+ BtoF func(uint64) int
+ Optoas func(int, *Type) int
+ Doregbits func(int) uint64
+ Regnames func(*int) []string
}
var pcloc int32
diff --git a/src/cmd/internal/gc/gsubr.go b/src/cmd/internal/gc/gsubr.go
index f5d7621..05642d6 100644
--- a/src/cmd/internal/gc/gsubr.go
+++ b/src/cmd/internal/gc/gsubr.go
@@ -30,7 +30,12 @@
package gc
-import "cmd/internal/obj"
+import (
+ "cmd/internal/obj"
+ "fmt"
+ "runtime"
+ "strings"
+)
var ddumped int
@@ -295,6 +300,8 @@
switch n.Op {
default:
a := a // copy to let escape into Ctxt.Dconv
+ Debug['h'] = 1
+ Dump("naddr", n)
Fatal("naddr: bad %v %v", Oconv(int(n.Op), 0), Ctxt.Dconv(a))
case OREGISTER:
@@ -559,11 +566,8 @@
n.Op = OINDREG
n.Val.U.Reg = int16(Thearch.REGSP)
- if Thearch.Thechar == '5' {
- n.Xoffset += 4
- }
- if Thearch.Thechar == '7' || Thearch.Thechar == '9' {
- n.Xoffset += 8
+ if HasLinkRegister() {
+ n.Xoffset += int64(Ctxt.Arch.Ptrsize)
}
case 1: // input arg
@@ -571,10 +575,6 @@
case 2: // offset output arg
Fatal("shouldn't be used")
-
- n.Op = OINDREG
- n.Val.U.Reg = int16(Thearch.REGSP)
- n.Xoffset += Types[Tptr].Width
}
n.Typecheck = 1
@@ -598,3 +598,228 @@
p.To.Offset = 0
return q
}
+
+var reg [100]int // count of references to reg
+var regstk [100][]byte // allocation sites, when -v is given
+
+func ginit() {
+ for r := range reg {
+ reg[r] = 1
+ }
+
+ for r := Thearch.REGMIN; r <= Thearch.REGMAX; r++ {
+ reg[r-Thearch.REGMIN] = 0
+ }
+ for r := Thearch.FREGMIN; r <= Thearch.FREGMAX; r++ {
+ reg[r-Thearch.REGMIN] = 0
+ }
+
+ for _, r := range Thearch.ReservedRegs {
+ reg[r-Thearch.REGMIN] = 1
+ }
+}
+
+func gclean() {
+ for _, r := range Thearch.ReservedRegs {
+ reg[r-Thearch.REGMIN]--
+ }
+
+ for r := Thearch.REGMIN; r <= Thearch.REGMAX; r++ {
+ n := reg[r-Thearch.REGMIN]
+ if n != 0 {
+ Yyerror("reg %v left allocated", obj.Rconv(r))
+ if Debug['v'] != 0 {
+ Regdump()
+ }
+ }
+ }
+
+ for r := Thearch.FREGMIN; r <= Thearch.FREGMAX; r++ {
+ n := reg[r-Thearch.REGMIN]
+ if n != 0 {
+ Yyerror("reg %v left allocated", obj.Rconv(r))
+ if Debug['v'] != 0 {
+ Regdump()
+ }
+ }
+ }
+}
+
+func Anyregalloc() bool {
+ n := 0
+ for r := Thearch.REGMIN; r <= Thearch.REGMAX; r++ {
+ if reg[r-Thearch.REGMIN] == 0 {
+ n++
+ }
+ }
+ return n > len(Thearch.ReservedRegs)
+}
+
+/*
+ * allocate register of type t, leave in n.
+ * if o != N, o may be reusable register.
+ * caller must Regfree(n).
+ */
+func Regalloc(n *Node, t *Type, o *Node) {
+ if t == nil {
+ Fatal("regalloc: t nil")
+ }
+ et := int(Simtype[t.Etype])
+ if Ctxt.Arch.Regsize == 4 && (et == TINT64 || et == TUINT64) {
+ Fatal("regalloc 64bit")
+ }
+
+ var i int
+Switch:
+ switch et {
+ default:
+ Fatal("regalloc: unknown type %v", Tconv(t, 0))
+
+ case TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64, TUINT64, TPTR32, TPTR64, TBOOL:
+ if o != nil && o.Op == OREGISTER {
+ i = int(o.Val.U.Reg)
+ if Thearch.REGMIN <= i && i <= Thearch.REGMAX {
+ break Switch
+ }
+ }
+ for i = Thearch.REGMIN; i <= Thearch.REGMAX; i++ {
+ if reg[i-Thearch.REGMIN] == 0 {
+ break Switch
+ }
+ }
+ Flusherrors()
+ Regdump()
+ Fatal("out of fixed registers")
+
+ case TFLOAT32, TFLOAT64:
+ if o != nil && o.Op == OREGISTER {
+ i = int(o.Val.U.Reg)
+ if Thearch.FREGMIN <= i && i <= Thearch.FREGMAX {
+ break Switch
+ }
+ }
+ for i = Thearch.FREGMIN; i <= Thearch.FREGMAX; i++ {
+ if reg[i-Thearch.REGMIN] == 0 { // note: REGMIN, not FREGMIN
+ break Switch
+ }
+ }
+ Flusherrors()
+ Regdump()
+ Fatal("out of floating registers")
+
+ case TCOMPLEX64, TCOMPLEX128:
+ Tempname(n, t)
+ return
+ }
+
+ ix := i - Thearch.REGMIN
+ if reg[ix] == 0 && Debug['v'] > 0 {
+ if regstk[ix] == nil {
+ regstk[ix] = make([]byte, 4096)
+ }
+ stk := regstk[ix]
+ n := runtime.Stack(stk[:cap(stk)], false)
+ regstk[ix] = stk[:n]
+ }
+ reg[ix]++
+ Nodreg(n, t, i)
+}
+
+func Regfree(n *Node) {
+ if n.Op == ONAME {
+ return
+ }
+ if n.Op != OREGISTER && n.Op != OINDREG {
+ Fatal("regfree: not a register")
+ }
+ i := int(n.Val.U.Reg)
+ if i == Thearch.REGSP {
+ return
+ }
+ switch {
+ case Thearch.REGMIN <= i && i <= Thearch.REGMAX,
+ Thearch.FREGMIN <= i && i <= Thearch.FREGMAX:
+ // ok
+ default:
+ Fatal("regfree: reg out of range")
+ }
+
+ i -= Thearch.REGMIN
+ if reg[i] <= 0 {
+ Fatal("regfree: reg not allocated")
+ }
+ reg[i]--
+ if reg[i] == 0 {
+ regstk[i] = regstk[i][:0]
+ }
+}
+
+// Reginuse reports whether r is in use.
+func Reginuse(r int) bool {
+ switch {
+ case Thearch.REGMIN <= r && r <= Thearch.REGMAX,
+ Thearch.FREGMIN <= r && r <= Thearch.FREGMAX:
+ // ok
+ default:
+ Fatal("reginuse: reg out of range")
+ }
+
+ return reg[r-Thearch.REGMIN] > 0
+}
+
+// Regrealloc(n) undoes the effect of Regfree(n),
+// so that a register can be given up but then reclaimed.
+func Regrealloc(n *Node) {
+ if n.Op != OREGISTER && n.Op != OINDREG {
+ Fatal("regrealloc: not a register")
+ }
+ i := int(n.Val.U.Reg)
+ if i == Thearch.REGSP {
+ return
+ }
+ switch {
+ case Thearch.REGMIN <= i && i <= Thearch.REGMAX,
+ Thearch.FREGMIN <= i && i <= Thearch.FREGMAX:
+ // ok
+ default:
+ Fatal("regrealloc: reg out of range")
+ }
+
+ i -= Thearch.REGMIN
+ if reg[i] == 0 && Debug['v'] > 0 {
+ if regstk[i] == nil {
+ regstk[i] = make([]byte, 4096)
+ }
+ stk := regstk[i]
+ n := runtime.Stack(stk[:cap(stk)], false)
+ regstk[i] = stk[:n]
+ }
+ reg[i]++
+}
+
+func Regdump() {
+ if Debug['v'] == 0 {
+ fmt.Printf("run compiler with -v for register allocation sites\n")
+ return
+ }
+
+ dump := func(r int) {
+ stk := regstk[r-Thearch.REGMIN]
+ if len(stk) == 0 {
+ return
+ }
+ fmt.Printf("reg %v allocated at:\n", obj.Rconv(r))
+ fmt.Printf("\t%s\n", strings.Replace(strings.TrimSpace(string(stk)), "\n", "\n\t", -1))
+ }
+
+ for r := Thearch.REGMIN; r <= Thearch.REGMAX; r++ {
+ if reg[r-Thearch.REGMIN] != 0 {
+ dump(r)
+ }
+ }
+ for r := Thearch.FREGMIN; r <= Thearch.FREGMAX; r++ {
+ if reg[r-Thearch.REGMIN] == 0 {
+ dump(r)
+ }
+ }
+}
diff --git a/src/cmd/internal/gc/pgen.go b/src/cmd/internal/gc/pgen.go
index ab55911..de106ad 100644
--- a/src/cmd/internal/gc/pgen.go
+++ b/src/cmd/internal/gc/pgen.go
@@ -335,10 +335,10 @@
if ((Thearch.Thechar == '5' || Thearch.Thechar == '7' || Thearch.Thechar == '9') && n.Op != OREGISTER) || n.Addable == 0 || n.Op == OLITERAL {
var reg Node
- Thearch.Regalloc(®, Types[Tptr], n)
- Thearch.Cgen(n, ®)
+ Regalloc(®, Types[Tptr], n)
+ Cgen(n, ®)
Thearch.Gins(obj.ACHECKNIL, ®, nil)
- Thearch.Regfree(®)
+ Regfree(®)
return
}
@@ -458,7 +458,7 @@
Afunclit(&ptxt.From, Curfn.Nname)
- Thearch.Ginit()
+ ginit()
gcargs = makefuncdatasym("gcargs·%d", obj.FUNCDATA_ArgsPointerMaps)
gclocals = makefuncdatasym("gclocals·%d", obj.FUNCDATA_LocalsPointerMaps)
@@ -484,7 +484,7 @@
Genlist(Curfn.Enter)
Genlist(Curfn.Nbody)
- Thearch.Gclean()
+ gclean()
checklabels()
if nerrors != 0 {
goto ret
@@ -494,13 +494,13 @@
}
if Curfn.Type.Outtuple != 0 {
- Thearch.Ginscall(throwreturn, 0)
+ Ginscall(throwreturn, 0)
}
- Thearch.Ginit()
+ ginit()
// TODO: Determine when the final cgen_ret can be omitted. Perhaps always?
- Thearch.Cgen_ret(nil)
+ cgen_ret(nil)
if Hasdefer != 0 {
// deferreturn pretends to have one uintptr argument.
@@ -510,7 +510,7 @@
}
}
- Thearch.Gclean()
+ gclean()
if nerrors != 0 {
goto ret
}
diff --git a/src/cmd/internal/gc/popt.go b/src/cmd/internal/gc/popt.go
index ac6dd5e..2428284 100644
--- a/src/cmd/internal/gc/popt.go
+++ b/src/cmd/internal/gc/popt.go
@@ -924,8 +924,6 @@
// Assume that stack variables with address not taken can be loaded multiple times
// from memory without being rechecked. Other variables need to be checked on
// each load.
-type NilVar struct {
-}
var killed int // f->data is either nil or &killed
diff --git a/src/cmd/internal/gc/reg.go b/src/cmd/internal/gc/reg.go
index 37fd3c3..5546960 100644
--- a/src/cmd/internal/gc/reg.go
+++ b/src/cmd/internal/gc/reg.go
@@ -1344,7 +1344,7 @@
}
}
- if Debug['v'] != 0 && strings.Contains(Curfn.Nname.Sym.Name, "Parse") {
+ if false && Debug['v'] != 0 && strings.Contains(Curfn.Nname.Sym.Name, "Parse") {
Warn("regions: %d\n", nregion)
}
if nregion >= MaxRgn {
diff --git a/src/cmd/internal/gc/syntax.go b/src/cmd/internal/gc/syntax.go
index ac7c285..1c84aa1 100644
--- a/src/cmd/internal/gc/syntax.go
+++ b/src/cmd/internal/gc/syntax.go
@@ -294,6 +294,7 @@
OLROT // left rotate: AROL.
ORROTC // right rotate-carry: ARCR.
ORETJMP // return to other function
+ OPS // compare parity set (for x86 NaN check)
OEND
)
diff --git a/src/cmd/internal/obj/arm64/asm7.go b/src/cmd/internal/obj/arm64/asm7.go
index d8f39fe..05bf679 100644
--- a/src/cmd/internal/obj/arm64/asm7.go
+++ b/src/cmd/internal/obj/arm64/asm7.go
@@ -232,6 +232,7 @@
{ABL, C_NONE, C_NONE, C_SBRA, 5, 4, 0, 0, 0},
{AB, C_NONE, C_NONE, C_ZOREG, 6, 4, 0, 0, 0},
{ABL, C_NONE, C_NONE, C_REG, 6, 4, 0, 0, 0},
+ {ABL, C_REG, C_NONE, C_REG, 6, 4, 0, 0, 0},
{ABL, C_NONE, C_NONE, C_ZOREG, 6, 4, 0, 0, 0},
{obj.ARET, C_NONE, C_NONE, C_REG, 6, 4, 0, 0, 0},
{obj.ARET, C_NONE, C_NONE, C_ZOREG, 6, 4, 0, 0, 0},
diff --git a/src/cmd/internal/obj/go.go b/src/cmd/internal/obj/go.go
index 496a5b8..2b65ee6 100644
--- a/src/cmd/internal/obj/go.go
+++ b/src/cmd/internal/obj/go.go
@@ -49,7 +49,7 @@
os.Exit(2)
}
-func linksetexp() {
+func init() {
for _, f := range strings.Split(goexperiment, ",") {
if f != "" {
addexp(f)
diff --git a/src/cmd/internal/obj/sym.go b/src/cmd/internal/obj/sym.go
index 410ed84..ca3ffc1 100644
--- a/src/cmd/internal/obj/sym.go
+++ b/src/cmd/internal/obj/sym.go
@@ -123,8 +123,6 @@
}
func Linknew(arch *LinkArch) *Link {
- linksetexp()
-
ctxt := new(Link)
ctxt.Hash = make(map[SymVer]*LSym)
ctxt.Arch = arch
diff --git a/src/cmd/internal/obj/x86/obj6.go b/src/cmd/internal/obj/x86/obj6.go
index 6a0baa6..58b1c30 100644
--- a/src/cmd/internal/obj/x86/obj6.go
+++ b/src/cmd/internal/obj/x86/obj6.go
@@ -190,6 +190,18 @@
}
}
+ // Rewrite MOVL/MOVQ $XXX(FP/SP) as LEAL/LEAQ.
+ if p.From.Type == obj.TYPE_ADDR && (ctxt.Arch.Thechar == '6' || p.From.Name != obj.NAME_EXTERN && p.From.Name != obj.NAME_STATIC) {
+ switch p.As {
+ case AMOVL:
+ p.As = ALEAL
+ p.From.Type = obj.TYPE_MEM
+ case AMOVQ:
+ p.As = ALEAQ
+ p.From.Type = obj.TYPE_MEM
+ }
+ }
+
if ctxt.Headtype == obj.Hnacl && p.Mode == 64 {
nacladdr(ctxt, p, &p.From3)
nacladdr(ctxt, p, &p.From)