blob: 5cea66f5ff64a0f440f34493ccec1ea607be5ce0 [file] [log] [blame]
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package walk
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
)
func walkSelect(sel *ir.SelectStmt) {
lno := ir.SetPos(sel)
if sel.Walked() {
base.Fatalf("double walkSelect")
}
sel.SetWalked(true)
init := ir.TakeInit(sel)
init = append(init, walkSelectCases(sel.Cases)...)
sel.Cases = nil
sel.Compiled = init
walkStmtList(sel.Compiled)
base.Pos = lno
}
func walkSelectCases(cases []*ir.CommClause) []ir.Node {
ncas := len(cases)
sellineno := base.Pos
// optimization: zero-case select
if ncas == 0 {
return []ir.Node{mkcallstmt("block")}
}
// optimization: one-case select: single op.
if ncas == 1 {
cas := cases[0]
ir.SetPos(cas)
l := cas.Init()
if cas.Comm != nil { // not default:
n := cas.Comm
l = append(l, ir.TakeInit(n)...)
switch n.Op() {
default:
base.Fatalf("select %v", n.Op())
case ir.OSEND:
// already ok
case ir.OSELRECV2:
r := n.(*ir.AssignListStmt)
if ir.IsBlank(r.Lhs[0]) && ir.IsBlank(r.Lhs[1]) {
n = r.Rhs[0]
break
}
r.SetOp(ir.OAS2RECV)
}
l = append(l, n)
}
l = append(l, cas.Body...)
l = append(l, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil))
return l
}
// convert case value arguments to addresses.
// this rewrite is used by both the general code and the next optimization.
var dflt *ir.CommClause
for _, cas := range cases {
ir.SetPos(cas)
n := cas.Comm
if n == nil {
dflt = cas
continue
}
switch n.Op() {
case ir.OSEND:
n := n.(*ir.SendStmt)
n.Value = typecheck.NodAddr(n.Value)
n.Value = typecheck.Expr(n.Value)
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
if !ir.IsBlank(n.Lhs[0]) {
n.Lhs[0] = typecheck.NodAddr(n.Lhs[0])
n.Lhs[0] = typecheck.Expr(n.Lhs[0])
}
}
}
// optimization: two-case select but one is default: single non-blocking op.
if ncas == 2 && dflt != nil {
cas := cases[0]
if cas == dflt {
cas = cases[1]
}
n := cas.Comm
ir.SetPos(n)
r := ir.NewIfStmt(base.Pos, nil, nil, nil)
r.SetInit(cas.Init())
var cond ir.Node
switch n.Op() {
default:
base.Fatalf("select %v", n.Op())
case ir.OSEND:
// if selectnbsend(c, v) { body } else { default body }
n := n.(*ir.SendStmt)
ch := n.Chan
cond = mkcall1(chanfn("selectnbsend", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), ch, n.Value)
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
recv := n.Rhs[0].(*ir.UnaryExpr)
ch := recv.X
elem := n.Lhs[0]
if ir.IsBlank(elem) {
elem = typecheck.NodNil()
}
cond = typecheck.Temp(types.Types[types.TBOOL])
fn := chanfn("selectnbrecv", 2, ch.Type())
call := mkcall1(fn, fn.Type().Results(), r.PtrInit(), elem, ch)
as := ir.NewAssignListStmt(r.Pos(), ir.OAS2, []ir.Node{cond, n.Lhs[1]}, []ir.Node{call})
r.PtrInit().Append(typecheck.Stmt(as))
}
r.Cond = typecheck.Expr(cond)
r.Body = cas.Body
r.Else = append(dflt.Init(), dflt.Body...)
return []ir.Node{r, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)}
}
if dflt != nil {
ncas--
}
casorder := make([]*ir.CommClause, ncas)
nsends, nrecvs := 0, 0
var init []ir.Node
// generate sel-struct
base.Pos = sellineno
selv := typecheck.Temp(types.NewArray(scasetype(), int64(ncas)))
init = append(init, typecheck.Stmt(ir.NewAssignStmt(base.Pos, selv, nil)))
// No initialization for order; runtime.selectgo is responsible for that.
order := typecheck.Temp(types.NewArray(types.Types[types.TUINT16], 2*int64(ncas)))
var pc0, pcs ir.Node
if base.Flag.Race {
pcs = typecheck.Temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas)))
pc0 = typecheck.Expr(typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(0))))
} else {
pc0 = typecheck.NodNil()
}
// register cases
for _, cas := range cases {
ir.SetPos(cas)
init = append(init, ir.TakeInit(cas)...)
n := cas.Comm
if n == nil { // default:
continue
}
var i int
var c, elem ir.Node
switch n.Op() {
default:
base.Fatalf("select %v", n.Op())
case ir.OSEND:
n := n.(*ir.SendStmt)
i = nsends
nsends++
c = n.Chan
elem = n.Value
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
nrecvs++
i = ncas - nrecvs
recv := n.Rhs[0].(*ir.UnaryExpr)
c = recv.X
elem = n.Lhs[0]
}
casorder[i] = cas
setField := func(f string, val ir.Node) {
r := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, ir.NewIndexExpr(base.Pos, selv, ir.NewInt(int64(i))), typecheck.Lookup(f)), val)
init = append(init, typecheck.Stmt(r))
}
c = typecheck.ConvNop(c, types.Types[types.TUNSAFEPTR])
setField("c", c)
if !ir.IsBlank(elem) {
elem = typecheck.ConvNop(elem, types.Types[types.TUNSAFEPTR])
setField("elem", elem)
}
// TODO(mdempsky): There should be a cleaner way to
// handle this.
if base.Flag.Race {
r := mkcallstmt("selectsetpc", typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(int64(i)))))
init = append(init, r)
}
}
if nsends+nrecvs != ncas {
base.Fatalf("walkSelectCases: miscount: %v + %v != %v", nsends, nrecvs, ncas)
}
// run the select
base.Pos = sellineno
chosen := typecheck.Temp(types.Types[types.TINT])
recvOK := typecheck.Temp(types.Types[types.TBOOL])
r := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
r.Lhs = []ir.Node{chosen, recvOK}
fn := typecheck.LookupRuntime("selectgo")
var fnInit ir.Nodes
r.Rhs = []ir.Node{mkcall1(fn, fn.Type().Results(), &fnInit, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, ir.NewInt(int64(nsends)), ir.NewInt(int64(nrecvs)), ir.NewBool(dflt == nil))}
init = append(init, fnInit...)
init = append(init, typecheck.Stmt(r))
// selv and order are no longer alive after selectgo.
init = append(init, ir.NewUnaryExpr(base.Pos, ir.OVARKILL, selv))
init = append(init, ir.NewUnaryExpr(base.Pos, ir.OVARKILL, order))
if base.Flag.Race {
init = append(init, ir.NewUnaryExpr(base.Pos, ir.OVARKILL, pcs))
}
// dispatch cases
dispatch := func(cond ir.Node, cas *ir.CommClause) {
var list ir.Nodes
if n := cas.Comm; n != nil && n.Op() == ir.OSELRECV2 {
n := n.(*ir.AssignListStmt)
if !ir.IsBlank(n.Lhs[1]) {
x := ir.NewAssignStmt(base.Pos, n.Lhs[1], recvOK)
list.Append(typecheck.Stmt(x))
}
}
list.Append(cas.Body.Take()...)
list.Append(ir.NewBranchStmt(base.Pos, ir.OBREAK, nil))
var r ir.Node
if cond != nil {
cond = typecheck.Expr(cond)
cond = typecheck.DefaultLit(cond, nil)
r = ir.NewIfStmt(base.Pos, cond, list, nil)
} else {
r = ir.NewBlockStmt(base.Pos, list)
}
init = append(init, r)
}
if dflt != nil {
ir.SetPos(dflt)
dispatch(ir.NewBinaryExpr(base.Pos, ir.OLT, chosen, ir.NewInt(0)), dflt)
}
for i, cas := range casorder {
ir.SetPos(cas)
if i == len(casorder)-1 {
dispatch(nil, cas)
break
}
dispatch(ir.NewBinaryExpr(base.Pos, ir.OEQ, chosen, ir.NewInt(int64(i))), cas)
}
return init
}
// bytePtrToIndex returns a Node representing "(*byte)(&n[i])".
func bytePtrToIndex(n ir.Node, i int64) ir.Node {
s := typecheck.NodAddr(ir.NewIndexExpr(base.Pos, n, ir.NewInt(i)))
t := types.NewPtr(types.Types[types.TUINT8])
return typecheck.ConvNop(s, t)
}
var scase *types.Type
// Keep in sync with src/runtime/select.go.
func scasetype() *types.Type {
if scase == nil {
scase = types.NewStruct(types.NoPkg, []*types.Field{
types.NewField(base.Pos, typecheck.Lookup("c"), types.Types[types.TUNSAFEPTR]),
types.NewField(base.Pos, typecheck.Lookup("elem"), types.Types[types.TUNSAFEPTR]),
})
scase.SetNoalg(true)
}
return scase
}