blob: 45ae132cde8003ba86c3850d457820fd005d92ba [file] [log] [blame]
Keith Randalld2fd43a2015-04-15 15:51:25 -07001// Copyright 2015 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package gc
6
7import (
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -07008 "bytes"
Keith Randalle3869a62015-09-07 23:18:02 -07009 "crypto/sha1"
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070010 "fmt"
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -070011 "html"
Todd Neal19447a62015-09-04 06:33:56 -050012 "math"
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -070013 "os"
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -060014 "strings"
Keith Randalld2fd43a2015-04-15 15:51:25 -070015
Keith Randall067e8df2015-05-28 13:49:20 -070016 "cmd/compile/internal/ssa"
Keith Randall083a6462015-05-12 11:06:44 -070017 "cmd/internal/obj"
Keith Randall8c46aa52015-06-19 21:02:28 -070018 "cmd/internal/obj/x86"
Keith Randalld2fd43a2015-04-15 15:51:25 -070019)
20
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070021// buildssa builds an SSA function
22// and reports whether it should be used.
23// Once the SSA implementation is complete,
24// it will never return nil, and the bool can be removed.
25func buildssa(fn *Node) (ssafn *ssa.Func, usessa bool) {
26 name := fn.Func.Nname.Sym.Name
David Chase956f3192015-09-11 16:40:05 -040027 usessa = strings.HasSuffix(name, "_ssa") || strings.Contains(name, "_ssa.") || name == os.Getenv("GOSSAFUNC")
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070028
29 if usessa {
Josh Bleecher Snydere0ac5c52015-07-20 18:42:45 -070030 fmt.Println("generating SSA for", name)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070031 dumplist("buildssa-enter", fn.Func.Enter)
32 dumplist("buildssa-body", fn.Nbody)
David Chase8824dcc2015-10-08 12:39:56 -040033 dumplist("buildssa-exit", fn.Func.Exit)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070034 }
Keith Randalld2fd43a2015-04-15 15:51:25 -070035
Keith Randallcfc2aa52015-05-18 16:44:20 -070036 var s state
Michael Matloob81ccf502015-05-30 01:03:06 -040037 s.pushLine(fn.Lineno)
38 defer s.popLine()
39
Keith Randalld2fd43a2015-04-15 15:51:25 -070040 // TODO(khr): build config just once at the start of the compiler binary
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070041
42 var e ssaExport
43 e.log = usessa
44 s.config = ssa.NewConfig(Thearch.Thestring, &e)
Keith Randalld2fd43a2015-04-15 15:51:25 -070045 s.f = s.config.NewFunc()
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070046 s.f.Name = name
David Chase8824dcc2015-10-08 12:39:56 -040047 s.exitCode = fn.Func.Exit
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070048
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -070049 if name == os.Getenv("GOSSAFUNC") {
50 // TODO: tempfile? it is handy to have the location
51 // of this file be stable, so you can just reload in the browser.
52 s.config.HTML = ssa.NewHTMLWriter("ssa.html", &s, name)
53 // TODO: generate and print a mapping from nodes to values and blocks
54 }
55 defer func() {
56 if !usessa {
57 s.config.HTML.Close()
58 }
59 }()
60
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070061 // If SSA support for the function is incomplete,
62 // assume that any panics are due to violated
63 // invariants. Swallow them silently.
64 defer func() {
65 if err := recover(); err != nil {
66 if !e.unimplemented {
67 panic(err)
68 }
69 }
70 }()
Keith Randalld2fd43a2015-04-15 15:51:25 -070071
72 // We construct SSA using an algorithm similar to
73 // Brau, Buchwald, Hack, Leißa, Mallon, and Zwinkau
74 // http://pp.info.uni-karlsruhe.de/uploads/publikationen/braun13cc.pdf
75 // TODO: check this comment
76
77 // Allocate starting block
78 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
79
Keith Randallcfc2aa52015-05-18 16:44:20 -070080 // Allocate starting values
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -070081 s.labels = map[string]*ssaLabel{}
82 s.labeledNodes = map[*Node]*ssaLabel{}
Keith Randall8c46aa52015-06-19 21:02:28 -070083 s.startmem = s.entryNewValue0(ssa.OpArg, ssa.TypeMem)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -070084 s.sp = s.entryNewValue0(ssa.OpSP, Types[TUINTPTR]) // TODO: use generic pointer type (unsafe.Pointer?) instead
85 s.sb = s.entryNewValue0(ssa.OpSB, Types[TUINTPTR])
Keith Randall8c46aa52015-06-19 21:02:28 -070086
David Chase956f3192015-09-11 16:40:05 -040087 s.startBlock(s.f.Entry)
88 s.vars[&memVar] = s.startmem
89
Keith Randall8c46aa52015-06-19 21:02:28 -070090 // Generate addresses of local declarations
91 s.decladdrs = map[*Node]*ssa.Value{}
92 for d := fn.Func.Dcl; d != nil; d = d.Next {
93 n := d.N
94 switch n.Class {
David Chase956f3192015-09-11 16:40:05 -040095 case PPARAM:
Keith Randalld2107fc2015-08-24 02:16:19 -070096 aux := &ssa.ArgSymbol{Typ: n.Type, Node: n}
Keith Randall8c46aa52015-06-19 21:02:28 -070097 s.decladdrs[n] = s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp)
David Chase956f3192015-09-11 16:40:05 -040098 case PAUTO | PHEAP:
99 // TODO this looks wrong for PAUTO|PHEAP, no vardef, but also no definition
100 aux := &ssa.AutoSymbol{Typ: n.Type, Node: n}
101 s.decladdrs[n] = s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp)
David Chase8824dcc2015-10-08 12:39:56 -0400102 case PPARAM | PHEAP, PPARAMOUT | PHEAP:
103 // This ends up wrong, have to do it at the PARAM node instead.
David Chase956f3192015-09-11 16:40:05 -0400104 case PAUTO, PPARAMOUT:
Keith Randalld2107fc2015-08-24 02:16:19 -0700105 // processed at each use, to prevent Addr coming
106 // before the decl.
Keith Randallc3eb1a72015-09-06 13:42:26 -0700107 case PFUNC:
108 // local function - already handled by frontend
Daniel Morsingbe2a3e22015-07-01 20:37:25 +0100109 default:
110 str := ""
111 if n.Class&PHEAP != 0 {
112 str = ",heap"
113 }
Josh Bleecher Snyder58446032015-08-23 20:29:43 -0700114 s.Unimplementedf("local variable with class %s%s unimplemented", classnames[n.Class&^PHEAP], str)
Keith Randall8c46aa52015-06-19 21:02:28 -0700115 }
116 }
117 // nodfp is a special argument which is the function's FP.
Keith Randalld2107fc2015-08-24 02:16:19 -0700118 aux := &ssa.ArgSymbol{Typ: Types[TUINTPTR], Node: nodfp}
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -0700119 s.decladdrs[nodfp] = s.entryNewValue1A(ssa.OpAddr, Types[TUINTPTR], aux, s.sp)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700120
121 // Convert the AST-based IR to the SSA-based IR
Keith Randallf7f604e2015-05-27 14:52:22 -0700122 s.stmtList(fn.Func.Enter)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700123 s.stmtList(fn.Nbody)
124
Keith Randallcfc2aa52015-05-18 16:44:20 -0700125 // fallthrough to exit
Keith Randalla7cfc7592015-09-08 16:04:37 -0700126 if s.curBlock != nil {
David Chase8824dcc2015-10-08 12:39:56 -0400127 s.stmtList(s.exitCode)
Keith Randalla7cfc7592015-09-08 16:04:37 -0700128 m := s.mem()
129 b := s.endBlock()
Keith Randalld9f2caf2015-09-03 14:28:52 -0700130 b.Kind = ssa.BlockRet
Keith Randalla7cfc7592015-09-08 16:04:37 -0700131 b.Control = m
Keith Randallcfc2aa52015-05-18 16:44:20 -0700132 }
133
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700134 // Check that we used all labels
135 for name, lab := range s.labels {
136 if !lab.used() && !lab.reported {
137 yyerrorl(int(lab.defNode.Lineno), "label %v defined and not used", name)
138 lab.reported = true
139 }
140 if lab.used() && !lab.defined() && !lab.reported {
141 yyerrorl(int(lab.useNode.Lineno), "label %v not defined", name)
142 lab.reported = true
143 }
144 }
145
146 // Check any forward gotos. Non-forward gotos have already been checked.
147 for _, n := range s.fwdGotos {
148 lab := s.labels[n.Left.Sym.Name]
149 // If the label is undefined, we have already have printed an error.
150 if lab.defined() {
151 s.checkgoto(n, lab.defNode)
152 }
153 }
154
155 if nerrors > 0 {
156 return nil, false
157 }
158
Keith Randalld2fd43a2015-04-15 15:51:25 -0700159 // Link up variable uses to variable definitions
160 s.linkForwardReferences()
161
David Chase8824dcc2015-10-08 12:39:56 -0400162 // Don't carry reference this around longer than necessary
163 s.exitCode = nil
164
Josh Bleecher Snyder983bc8d2015-07-17 16:47:43 +0000165 // Main call to ssa package to compile function
166 ssa.Compile(s.f)
167
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -0700168 // Calculate stats about what percentage of functions SSA handles.
169 if false {
Josh Bleecher Snyder983bc8d2015-07-17 16:47:43 +0000170 fmt.Printf("SSA implemented: %t\n", !e.unimplemented)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -0700171 }
172
173 if e.unimplemented {
174 return nil, false
175 }
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -0700176
177 // TODO: enable codegen more broadly once the codegen stabilizes
178 // and runtime support is in (gc maps, write barriers, etc.)
Keith Randalle3869a62015-09-07 23:18:02 -0700179 if usessa {
180 return s.f, true
181 }
182 if localpkg.Name != os.Getenv("GOSSAPKG") {
183 return s.f, false
184 }
185 if os.Getenv("GOSSAHASH") == "" {
186 // Use everything in the package
187 return s.f, true
188 }
189 // Check the hash of the name against a partial input hash.
190 // We use this feature to do a binary search within a package to
191 // find a function that is incorrectly compiled.
192 hstr := ""
193 for _, b := range sha1.Sum([]byte(name)) {
194 hstr += fmt.Sprintf("%08b", b)
195 }
196 if strings.HasSuffix(hstr, os.Getenv("GOSSAHASH")) {
Keith Randall9d22c102015-09-11 11:02:57 -0700197 fmt.Printf("GOSSAHASH triggered %s\n", name)
Keith Randalle3869a62015-09-07 23:18:02 -0700198 return s.f, true
199 }
200 return s.f, false
Keith Randalld2fd43a2015-04-15 15:51:25 -0700201}
202
Keith Randallcfc2aa52015-05-18 16:44:20 -0700203type state struct {
Keith Randalld2fd43a2015-04-15 15:51:25 -0700204 // configuration (arch) information
205 config *ssa.Config
206
207 // function we're building
208 f *ssa.Func
209
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700210 // labels and labeled control flow nodes (OFOR, OSWITCH, OSELECT) in f
211 labels map[string]*ssaLabel
212 labeledNodes map[*Node]*ssaLabel
213
214 // gotos that jump forward; required for deferred checkgoto calls
215 fwdGotos []*Node
David Chase8824dcc2015-10-08 12:39:56 -0400216 // Code that must precede any return
217 // (e.g., copying value of heap-escaped paramout back to true paramout)
218 exitCode *NodeList
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700219
220 // unlabeled break and continue statement tracking
221 breakTo *ssa.Block // current target for plain break statement
222 continueTo *ssa.Block // current target for plain continue statement
Keith Randalld2fd43a2015-04-15 15:51:25 -0700223
224 // current location where we're interpreting the AST
225 curBlock *ssa.Block
226
Keith Randall8c46aa52015-06-19 21:02:28 -0700227 // variable assignments in the current block (map from variable symbol to ssa value)
228 // *Node is the unique identifier (an ONAME Node) for the variable.
229 vars map[*Node]*ssa.Value
Keith Randalld2fd43a2015-04-15 15:51:25 -0700230
231 // all defined variables at the end of each block. Indexed by block ID.
Keith Randall8c46aa52015-06-19 21:02:28 -0700232 defvars []map[*Node]*ssa.Value
Keith Randalld2fd43a2015-04-15 15:51:25 -0700233
Keith Randalld2107fc2015-08-24 02:16:19 -0700234 // addresses of PPARAM and PPARAMOUT variables.
Keith Randall8c46aa52015-06-19 21:02:28 -0700235 decladdrs map[*Node]*ssa.Value
Keith Randallcfc2aa52015-05-18 16:44:20 -0700236
237 // starting values. Memory, frame pointer, and stack pointer
238 startmem *ssa.Value
Keith Randallcfc2aa52015-05-18 16:44:20 -0700239 sp *ssa.Value
Keith Randall8c46aa52015-06-19 21:02:28 -0700240 sb *ssa.Value
Michael Matloob81ccf502015-05-30 01:03:06 -0400241
242 // line number stack. The current line number is top of stack
243 line []int32
Keith Randalld2fd43a2015-04-15 15:51:25 -0700244}
245
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700246type ssaLabel struct {
247 target *ssa.Block // block identified by this label
248 breakTarget *ssa.Block // block to break to in control flow node identified by this label
249 continueTarget *ssa.Block // block to continue to in control flow node identified by this label
250 defNode *Node // label definition Node (OLABEL)
251 // Label use Node (OGOTO, OBREAK, OCONTINUE).
252 // Used only for error detection and reporting.
253 // There might be multiple uses, but we only need to track one.
254 useNode *Node
255 reported bool // reported indicates whether an error has already been reported for this label
256}
257
258// defined reports whether the label has a definition (OLABEL node).
259func (l *ssaLabel) defined() bool { return l.defNode != nil }
260
261// used reports whether the label has a use (OGOTO, OBREAK, or OCONTINUE node).
262func (l *ssaLabel) used() bool { return l.useNode != nil }
263
264// label returns the label associated with sym, creating it if necessary.
265func (s *state) label(sym *Sym) *ssaLabel {
266 lab := s.labels[sym.Name]
267 if lab == nil {
268 lab = new(ssaLabel)
269 s.labels[sym.Name] = lab
270 }
271 return lab
272}
273
Josh Bleecher Snyder1edf4892015-07-03 20:29:11 -0700274func (s *state) Logf(msg string, args ...interface{}) { s.config.Logf(msg, args...) }
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -0700275func (s *state) Fatalf(msg string, args ...interface{}) { s.config.Fatalf(msg, args...) }
276func (s *state) Unimplementedf(msg string, args ...interface{}) { s.config.Unimplementedf(msg, args...) }
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -0700277
Keith Randall269baa92015-09-17 10:31:16 -0700278var (
279 // dummy node for the memory variable
Keith Randallb32217a2015-09-17 16:45:10 -0700280 memVar = Node{Op: ONAME, Sym: &Sym{Name: "mem"}}
Keith Randall8c46aa52015-06-19 21:02:28 -0700281
Keith Randall269baa92015-09-17 10:31:16 -0700282 // dummy nodes for temporary variables
Keith Randallb32217a2015-09-17 16:45:10 -0700283 ptrVar = Node{Op: ONAME, Sym: &Sym{Name: "ptr"}}
284 capVar = Node{Op: ONAME, Sym: &Sym{Name: "cap"}}
Keith Randall269baa92015-09-17 10:31:16 -0700285 typVar = Node{Op: ONAME, Sym: &Sym{Name: "typ"}}
286 idataVar = Node{Op: ONAME, Sym: &Sym{Name: "idata"}}
287 okVar = Node{Op: ONAME, Sym: &Sym{Name: "ok"}}
288)
Keith Randall5505e8c2015-09-12 23:27:26 -0700289
Keith Randalld2fd43a2015-04-15 15:51:25 -0700290// startBlock sets the current block we're generating code in to b.
Keith Randallcfc2aa52015-05-18 16:44:20 -0700291func (s *state) startBlock(b *ssa.Block) {
292 if s.curBlock != nil {
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -0700293 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
Keith Randallcfc2aa52015-05-18 16:44:20 -0700294 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700295 s.curBlock = b
Keith Randall8c46aa52015-06-19 21:02:28 -0700296 s.vars = map[*Node]*ssa.Value{}
Keith Randalld2fd43a2015-04-15 15:51:25 -0700297}
298
299// endBlock marks the end of generating code for the current block.
300// Returns the (former) current block. Returns nil if there is no current
301// block, i.e. if no code flows to the current execution point.
Keith Randallcfc2aa52015-05-18 16:44:20 -0700302func (s *state) endBlock() *ssa.Block {
Keith Randalld2fd43a2015-04-15 15:51:25 -0700303 b := s.curBlock
304 if b == nil {
305 return nil
306 }
307 for len(s.defvars) <= int(b.ID) {
308 s.defvars = append(s.defvars, nil)
309 }
310 s.defvars[b.ID] = s.vars
311 s.curBlock = nil
312 s.vars = nil
Michael Matloob81ccf502015-05-30 01:03:06 -0400313 b.Line = s.peekLine()
Keith Randalld2fd43a2015-04-15 15:51:25 -0700314 return b
315}
316
Michael Matloob81ccf502015-05-30 01:03:06 -0400317// pushLine pushes a line number on the line number stack.
318func (s *state) pushLine(line int32) {
319 s.line = append(s.line, line)
320}
321
322// popLine pops the top of the line number stack.
323func (s *state) popLine() {
324 s.line = s.line[:len(s.line)-1]
325}
326
327// peekLine peek the top of the line number stack.
328func (s *state) peekLine() int32 {
329 return s.line[len(s.line)-1]
330}
331
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700332func (s *state) Error(msg string, args ...interface{}) {
333 yyerrorl(int(s.peekLine()), msg, args...)
334}
335
Keith Randall8f22b522015-06-11 21:29:25 -0700336// newValue0 adds a new value with no arguments to the current block.
337func (s *state) newValue0(op ssa.Op, t ssa.Type) *ssa.Value {
338 return s.curBlock.NewValue0(s.peekLine(), op, t)
339}
340
341// newValue0A adds a new value with no arguments and an aux value to the current block.
342func (s *state) newValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value {
343 return s.curBlock.NewValue0A(s.peekLine(), op, t, aux)
Michael Matloob81ccf502015-05-30 01:03:06 -0400344}
345
Todd Neal991036a2015-09-03 18:24:22 -0500346// newValue0I adds a new value with no arguments and an auxint value to the current block.
347func (s *state) newValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value {
348 return s.curBlock.NewValue0I(s.peekLine(), op, t, auxint)
349}
350
Michael Matloob81ccf502015-05-30 01:03:06 -0400351// newValue1 adds a new value with one argument to the current block.
Keith Randall8f22b522015-06-11 21:29:25 -0700352func (s *state) newValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value {
353 return s.curBlock.NewValue1(s.peekLine(), op, t, arg)
354}
355
356// newValue1A adds a new value with one argument and an aux value to the current block.
357func (s *state) newValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value {
358 return s.curBlock.NewValue1A(s.peekLine(), op, t, aux, arg)
Michael Matloob81ccf502015-05-30 01:03:06 -0400359}
360
Keith Randallcd7e0592015-07-15 21:33:49 -0700361// newValue1I adds a new value with one argument and an auxint value to the current block.
362func (s *state) newValue1I(op ssa.Op, t ssa.Type, aux int64, arg *ssa.Value) *ssa.Value {
363 return s.curBlock.NewValue1I(s.peekLine(), op, t, aux, arg)
364}
365
Michael Matloob81ccf502015-05-30 01:03:06 -0400366// newValue2 adds a new value with two arguments to the current block.
Keith Randall8f22b522015-06-11 21:29:25 -0700367func (s *state) newValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value {
368 return s.curBlock.NewValue2(s.peekLine(), op, t, arg0, arg1)
Michael Matloob81ccf502015-05-30 01:03:06 -0400369}
370
Daniel Morsing66b47812015-06-27 15:45:20 +0100371// newValue2I adds a new value with two arguments and an auxint value to the current block.
372func (s *state) newValue2I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
373 return s.curBlock.NewValue2I(s.peekLine(), op, t, aux, arg0, arg1)
374}
375
Michael Matloob81ccf502015-05-30 01:03:06 -0400376// newValue3 adds a new value with three arguments to the current block.
Keith Randall8f22b522015-06-11 21:29:25 -0700377func (s *state) newValue3(op ssa.Op, t ssa.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
378 return s.curBlock.NewValue3(s.peekLine(), op, t, arg0, arg1, arg2)
Michael Matloob81ccf502015-05-30 01:03:06 -0400379}
380
Keith Randalld4cc51d2015-08-14 21:47:20 -0700381// newValue3I adds a new value with three arguments and an auxint value to the current block.
382func (s *state) newValue3I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
383 return s.curBlock.NewValue3I(s.peekLine(), op, t, aux, arg0, arg1, arg2)
384}
385
Todd Neal991036a2015-09-03 18:24:22 -0500386// entryNewValue0 adds a new value with no arguments to the entry block.
Keith Randall8f22b522015-06-11 21:29:25 -0700387func (s *state) entryNewValue0(op ssa.Op, t ssa.Type) *ssa.Value {
388 return s.f.Entry.NewValue0(s.peekLine(), op, t)
389}
390
Todd Neal991036a2015-09-03 18:24:22 -0500391// entryNewValue0A adds a new value with no arguments and an aux value to the entry block.
Keith Randall8f22b522015-06-11 21:29:25 -0700392func (s *state) entryNewValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value {
393 return s.f.Entry.NewValue0A(s.peekLine(), op, t, aux)
Michael Matloob81ccf502015-05-30 01:03:06 -0400394}
395
Todd Neal991036a2015-09-03 18:24:22 -0500396// entryNewValue0I adds a new value with no arguments and an auxint value to the entry block.
397func (s *state) entryNewValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value {
398 return s.f.Entry.NewValue0I(s.peekLine(), op, t, auxint)
399}
400
Michael Matloob81ccf502015-05-30 01:03:06 -0400401// entryNewValue1 adds a new value with one argument to the entry block.
Keith Randall8f22b522015-06-11 21:29:25 -0700402func (s *state) entryNewValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value {
403 return s.f.Entry.NewValue1(s.peekLine(), op, t, arg)
404}
405
406// entryNewValue1 adds a new value with one argument and an auxint value to the entry block.
407func (s *state) entryNewValue1I(op ssa.Op, t ssa.Type, auxint int64, arg *ssa.Value) *ssa.Value {
408 return s.f.Entry.NewValue1I(s.peekLine(), op, t, auxint, arg)
Michael Matloob81ccf502015-05-30 01:03:06 -0400409}
410
Keith Randall8c46aa52015-06-19 21:02:28 -0700411// entryNewValue1A adds a new value with one argument and an aux value to the entry block.
412func (s *state) entryNewValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value {
413 return s.f.Entry.NewValue1A(s.peekLine(), op, t, aux, arg)
414}
415
Michael Matloob81ccf502015-05-30 01:03:06 -0400416// entryNewValue2 adds a new value with two arguments to the entry block.
Keith Randall8f22b522015-06-11 21:29:25 -0700417func (s *state) entryNewValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value {
418 return s.f.Entry.NewValue2(s.peekLine(), op, t, arg0, arg1)
Michael Matloob81ccf502015-05-30 01:03:06 -0400419}
420
Josh Bleecher Snydercea44142015-09-08 16:52:25 -0700421// const* routines add a new const value to the entry block.
422func (s *state) constBool(c bool) *ssa.Value {
423 return s.f.ConstBool(s.peekLine(), Types[TBOOL], c)
424}
Keith Randall9cb332e2015-07-28 14:19:20 -0700425func (s *state) constInt8(t ssa.Type, c int8) *ssa.Value {
426 return s.f.ConstInt8(s.peekLine(), t, c)
427}
428func (s *state) constInt16(t ssa.Type, c int16) *ssa.Value {
429 return s.f.ConstInt16(s.peekLine(), t, c)
430}
431func (s *state) constInt32(t ssa.Type, c int32) *ssa.Value {
432 return s.f.ConstInt32(s.peekLine(), t, c)
433}
434func (s *state) constInt64(t ssa.Type, c int64) *ssa.Value {
435 return s.f.ConstInt64(s.peekLine(), t, c)
436}
David Chase997a9f32015-08-12 16:38:11 -0400437func (s *state) constFloat32(t ssa.Type, c float64) *ssa.Value {
438 return s.f.ConstFloat32(s.peekLine(), t, c)
439}
440func (s *state) constFloat64(t ssa.Type, c float64) *ssa.Value {
441 return s.f.ConstFloat64(s.peekLine(), t, c)
442}
Keith Randall9cb332e2015-07-28 14:19:20 -0700443func (s *state) constIntPtr(t ssa.Type, c int64) *ssa.Value {
444 if s.config.PtrSize == 4 && int64(int32(c)) != c {
445 s.Fatalf("pointer constant too big %d", c)
446 }
447 return s.f.ConstIntPtr(s.peekLine(), t, c)
448}
Michael Matloob81ccf502015-05-30 01:03:06 -0400449func (s *state) constInt(t ssa.Type, c int64) *ssa.Value {
Keith Randall9cb332e2015-07-28 14:19:20 -0700450 if s.config.IntSize == 8 {
451 return s.constInt64(t, c)
452 }
453 if int64(int32(c)) != c {
454 s.Fatalf("integer constant too big %d", c)
455 }
456 return s.constInt32(t, int32(c))
Michael Matloob81ccf502015-05-30 01:03:06 -0400457}
458
Keith Randalld2fd43a2015-04-15 15:51:25 -0700459// ssaStmtList converts the statement n to SSA and adds it to s.
Keith Randallcfc2aa52015-05-18 16:44:20 -0700460func (s *state) stmtList(l *NodeList) {
Keith Randalld2fd43a2015-04-15 15:51:25 -0700461 for ; l != nil; l = l.Next {
462 s.stmt(l.N)
463 }
464}
465
466// ssaStmt converts the statement n to SSA and adds it to s.
Keith Randallcfc2aa52015-05-18 16:44:20 -0700467func (s *state) stmt(n *Node) {
Michael Matloob81ccf502015-05-30 01:03:06 -0400468 s.pushLine(n.Lineno)
469 defer s.popLine()
470
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700471 // If s.curBlock is nil, then we're about to generate dead code.
472 // We can't just short-circuit here, though,
473 // because we check labels and gotos as part of SSA generation.
474 // Provide a block for the dead code so that we don't have
475 // to add special cases everywhere else.
476 if s.curBlock == nil {
477 dead := s.f.NewBlock(ssa.BlockPlain)
478 s.startBlock(dead)
479 }
480
Keith Randalld2fd43a2015-04-15 15:51:25 -0700481 s.stmtList(n.Ninit)
482 switch n.Op {
483
484 case OBLOCK:
485 s.stmtList(n.List)
486
Josh Bleecher Snyder2574e4a2015-07-16 13:25:36 -0600487 // No-ops
Todd Neal67e43c12015-08-28 21:19:40 -0500488 case OEMPTY, ODCLCONST, ODCLTYPE, OFALL:
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -0600489
Josh Bleecher Snyder2574e4a2015-07-16 13:25:36 -0600490 // Expression statements
491 case OCALLFUNC, OCALLMETH, OCALLINTER:
Keith Randalld24768e2015-09-09 23:56:59 -0700492 s.call(n, callNormal)
493 case ODEFER:
494 s.call(n.Left, callDefer)
495 case OPROC:
496 s.call(n.Left, callGo)
Josh Bleecher Snyder2574e4a2015-07-16 13:25:36 -0600497
Keith Randall269baa92015-09-17 10:31:16 -0700498 case OAS2DOTTYPE:
499 res, resok := s.dottype(n.Rlist.N, true)
500 s.assign(n.List.N, res, false)
501 s.assign(n.List.Next.N, resok, false)
502 return
503
Keith Randalld2fd43a2015-04-15 15:51:25 -0700504 case ODCL:
Daniel Morsingc31b6dd2015-06-12 14:23:29 +0100505 if n.Left.Class&PHEAP == 0 {
506 return
507 }
508 if compiling_runtime != 0 {
Keith Randall0ec72b62015-09-08 15:42:53 -0700509 Fatalf("%v escapes to heap, not allowed in runtime.", n)
Daniel Morsingc31b6dd2015-06-12 14:23:29 +0100510 }
511
512 // TODO: the old pass hides the details of PHEAP
513 // variables behind ONAME nodes. Figure out if it's better
514 // to rewrite the tree and make the heapaddr construct explicit
515 // or to keep this detail hidden behind the scenes.
516 palloc := prealloc[n.Left]
517 if palloc == nil {
518 palloc = callnew(n.Left.Type)
519 prealloc[n.Left] = palloc
520 }
Josh Bleecher Snyder07269312015-08-29 14:54:45 -0700521 r := s.expr(palloc)
522 s.assign(n.Left.Name.Heapaddr, r, false)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700523
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700524 case OLABEL:
525 sym := n.Left.Sym
526
527 if isblanksym(sym) {
Keith Randall7e4c06d2015-07-12 11:52:09 -0700528 // Empty identifier is valid but useless.
529 // See issues 11589, 11593.
530 return
531 }
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700532
533 lab := s.label(sym)
534
535 // Associate label with its control flow node, if any
536 if ctl := n.Name.Defn; ctl != nil {
537 switch ctl.Op {
538 case OFOR, OSWITCH, OSELECT:
539 s.labeledNodes[ctl] = lab
540 }
Keith Randall0ad9c8c2015-06-12 16:24:33 -0700541 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700542
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700543 if !lab.defined() {
544 lab.defNode = n
545 } else {
546 s.Error("label %v already defined at %v", sym, Ctxt.Line(int(lab.defNode.Lineno)))
547 lab.reported = true
Keith Randalld2fd43a2015-04-15 15:51:25 -0700548 }
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700549 // The label might already have a target block via a goto.
550 if lab.target == nil {
551 lab.target = s.f.NewBlock(ssa.BlockPlain)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -0700552 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700553
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700554 // go to that label (we pretend "label:" is preceded by "goto label")
555 b := s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -0500556 b.AddEdgeTo(lab.target)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700557 s.startBlock(lab.target)
558
559 case OGOTO:
560 sym := n.Left.Sym
561
562 lab := s.label(sym)
563 if lab.target == nil {
564 lab.target = s.f.NewBlock(ssa.BlockPlain)
565 }
566 if !lab.used() {
567 lab.useNode = n
568 }
569
570 if lab.defined() {
571 s.checkgoto(n, lab.defNode)
572 } else {
573 s.fwdGotos = append(s.fwdGotos, n)
574 }
575
576 b := s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -0500577 b.AddEdgeTo(lab.target)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700578
Keith Randall290d8fc2015-06-10 15:03:06 -0700579 case OAS, OASWB:
Josh Bleecher Snyder6b416652015-07-28 10:56:39 -0700580 // Check whether we can generate static data rather than code.
581 // If so, ignore n and defer data generation until codegen.
582 // Failure to do this causes writes to readonly symbols.
583 if gen_as_init(n, true) {
584 var data []*Node
585 if s.f.StaticData != nil {
586 data = s.f.StaticData.([]*Node)
587 }
588 s.f.StaticData = append(data, n)
589 return
590 }
Josh Bleecher Snyder07269312015-08-29 14:54:45 -0700591 var r *ssa.Value
592 if n.Right != nil {
Keith Randalld3886902015-09-18 22:12:38 -0700593 if n.Right.Op == OSTRUCTLIT || n.Right.Op == OARRAYLIT {
594 // All literals with nonzero fields have already been
595 // rewritten during walk. Any that remain are just T{}
596 // or equivalents. Leave r = nil to get zeroing behavior.
597 if !iszero(n.Right) {
598 Fatalf("literal with nonzero value in SSA: %v", n.Right)
599 }
600 } else {
601 r = s.expr(n.Right)
602 }
Josh Bleecher Snyder07269312015-08-29 14:54:45 -0700603 }
Keith Randall9d22c102015-09-11 11:02:57 -0700604 if n.Right != nil && n.Right.Op == OAPPEND {
605 // Yuck! The frontend gets rid of the write barrier, but we need it!
606 // At least, we need it in the case where growslice is called.
607 // TODO: Do the write barrier on just the growslice branch.
608 // TODO: just add a ptr graying to the end of growslice?
609 // TODO: check whether we need to do this for ODOTTYPE and ORECV also.
610 // They get similar wb-removal treatment in walk.go:OAS.
611 s.assign(n.Left, r, true)
612 return
613 }
Josh Bleecher Snyder07269312015-08-29 14:54:45 -0700614 s.assign(n.Left, r, n.Op == OASWB)
Daniel Morsingc31b6dd2015-06-12 14:23:29 +0100615
Keith Randalld2fd43a2015-04-15 15:51:25 -0700616 case OIF:
Keith Randalle707fbe2015-06-11 10:20:39 -0700617 cond := s.expr(n.Left)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700618 b := s.endBlock()
619 b.Kind = ssa.BlockIf
620 b.Control = cond
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -0700621 b.Likely = ssa.BranchPrediction(n.Likely) // gc and ssa both use -1/0/+1 for likeliness
Keith Randalld2fd43a2015-04-15 15:51:25 -0700622
623 bThen := s.f.NewBlock(ssa.BlockPlain)
624 bEnd := s.f.NewBlock(ssa.BlockPlain)
625 var bElse *ssa.Block
626
Keith Randalle707fbe2015-06-11 10:20:39 -0700627 if n.Rlist == nil {
Todd Neal47d67992015-08-28 21:36:29 -0500628 b.AddEdgeTo(bThen)
629 b.AddEdgeTo(bEnd)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700630 } else {
631 bElse = s.f.NewBlock(ssa.BlockPlain)
Todd Neal47d67992015-08-28 21:36:29 -0500632 b.AddEdgeTo(bThen)
633 b.AddEdgeTo(bElse)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700634 }
635
636 s.startBlock(bThen)
637 s.stmtList(n.Nbody)
Josh Bleecher Snydere0ac5c52015-07-20 18:42:45 -0700638 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500639 b.AddEdgeTo(bEnd)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700640 }
641
Keith Randalle707fbe2015-06-11 10:20:39 -0700642 if n.Rlist != nil {
Keith Randalld2fd43a2015-04-15 15:51:25 -0700643 s.startBlock(bElse)
Keith Randalle707fbe2015-06-11 10:20:39 -0700644 s.stmtList(n.Rlist)
Josh Bleecher Snydere0ac5c52015-07-20 18:42:45 -0700645 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500646 b.AddEdgeTo(bEnd)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700647 }
648 }
649 s.startBlock(bEnd)
650
651 case ORETURN:
652 s.stmtList(n.List)
David Chase8824dcc2015-10-08 12:39:56 -0400653 s.stmtList(s.exitCode)
Keith Randalla7cfc7592015-09-08 16:04:37 -0700654 m := s.mem()
Keith Randalld2fd43a2015-04-15 15:51:25 -0700655 b := s.endBlock()
Keith Randall10f38f52015-09-03 09:09:59 -0700656 b.Kind = ssa.BlockRet
Keith Randalla7cfc7592015-09-08 16:04:37 -0700657 b.Control = m
Keith Randall8a1f6212015-09-08 21:28:44 -0700658 case ORETJMP:
659 s.stmtList(n.List)
David Chase8824dcc2015-10-08 12:39:56 -0400660 s.stmtList(s.exitCode)
Keith Randall8a1f6212015-09-08 21:28:44 -0700661 m := s.mem()
662 b := s.endBlock()
663 b.Kind = ssa.BlockRetJmp
664 b.Aux = n.Left.Sym
665 b.Control = m
Keith Randalld2fd43a2015-04-15 15:51:25 -0700666
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700667 case OCONTINUE, OBREAK:
668 var op string
669 var to *ssa.Block
670 switch n.Op {
671 case OCONTINUE:
672 op = "continue"
673 to = s.continueTo
674 case OBREAK:
675 op = "break"
676 to = s.breakTo
677 }
678 if n.Left == nil {
679 // plain break/continue
680 if to == nil {
681 s.Error("%s is not in a loop", op)
682 return
683 }
684 // nothing to do; "to" is already the correct target
685 } else {
686 // labeled break/continue; look up the target
687 sym := n.Left.Sym
688 lab := s.label(sym)
689 if !lab.used() {
690 lab.useNode = n.Left
691 }
692 if !lab.defined() {
693 s.Error("%s label not defined: %v", op, sym)
694 lab.reported = true
695 return
696 }
697 switch n.Op {
698 case OCONTINUE:
699 to = lab.continueTarget
700 case OBREAK:
701 to = lab.breakTarget
702 }
703 if to == nil {
704 // Valid label but not usable with a break/continue here, e.g.:
705 // for {
706 // continue abc
707 // }
708 // abc:
709 // for {}
710 s.Error("invalid %s label %v", op, sym)
711 lab.reported = true
712 return
713 }
714 }
715
716 b := s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -0500717 b.AddEdgeTo(to)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700718
Keith Randalld2fd43a2015-04-15 15:51:25 -0700719 case OFOR:
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700720 // OFOR: for Ninit; Left; Right { Nbody }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700721 bCond := s.f.NewBlock(ssa.BlockPlain)
722 bBody := s.f.NewBlock(ssa.BlockPlain)
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700723 bIncr := s.f.NewBlock(ssa.BlockPlain)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700724 bEnd := s.f.NewBlock(ssa.BlockPlain)
725
726 // first, jump to condition test
727 b := s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -0500728 b.AddEdgeTo(bCond)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700729
730 // generate code to test condition
Keith Randalld2fd43a2015-04-15 15:51:25 -0700731 s.startBlock(bCond)
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700732 var cond *ssa.Value
733 if n.Left != nil {
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700734 cond = s.expr(n.Left)
735 } else {
Josh Bleecher Snydercea44142015-09-08 16:52:25 -0700736 cond = s.constBool(true)
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700737 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700738 b = s.endBlock()
739 b.Kind = ssa.BlockIf
740 b.Control = cond
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -0700741 b.Likely = ssa.BranchLikely
Todd Neal47d67992015-08-28 21:36:29 -0500742 b.AddEdgeTo(bBody)
743 b.AddEdgeTo(bEnd)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700744
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700745 // set up for continue/break in body
746 prevContinue := s.continueTo
747 prevBreak := s.breakTo
748 s.continueTo = bIncr
749 s.breakTo = bEnd
750 lab := s.labeledNodes[n]
751 if lab != nil {
752 // labeled for loop
753 lab.continueTarget = bIncr
754 lab.breakTarget = bEnd
755 }
756
Keith Randalld2fd43a2015-04-15 15:51:25 -0700757 // generate body
758 s.startBlock(bBody)
759 s.stmtList(n.Nbody)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700760
761 // tear down continue/break
762 s.continueTo = prevContinue
763 s.breakTo = prevBreak
764 if lab != nil {
765 lab.continueTarget = nil
766 lab.breakTarget = nil
767 }
768
769 // done with body, goto incr
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700770 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500771 b.AddEdgeTo(bIncr)
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700772 }
773
774 // generate incr
775 s.startBlock(bIncr)
Josh Bleecher Snyder46815b92015-06-24 17:48:22 -0700776 if n.Right != nil {
777 s.stmt(n.Right)
778 }
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700779 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500780 b.AddEdgeTo(bCond)
Josh Bleecher Snyder6c140592015-07-04 09:07:54 -0700781 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700782 s.startBlock(bEnd)
783
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700784 case OSWITCH, OSELECT:
785 // These have been mostly rewritten by the front end into their Nbody fields.
786 // Our main task is to correctly hook up any break statements.
787 bEnd := s.f.NewBlock(ssa.BlockPlain)
788
789 prevBreak := s.breakTo
790 s.breakTo = bEnd
791 lab := s.labeledNodes[n]
792 if lab != nil {
793 // labeled
794 lab.breakTarget = bEnd
795 }
796
797 // generate body code
798 s.stmtList(n.Nbody)
799
800 s.breakTo = prevBreak
801 if lab != nil {
802 lab.breakTarget = nil
803 }
804
805 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500806 b.AddEdgeTo(bEnd)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700807 }
808 s.startBlock(bEnd)
809
Keith Randalld2fd43a2015-04-15 15:51:25 -0700810 case OVARKILL:
Keith Randalld2107fc2015-08-24 02:16:19 -0700811 // Insert a varkill op to record that a variable is no longer live.
812 // We only care about liveness info at call sites, so putting the
813 // varkill in the store chain is enough to keep it correctly ordered
814 // with respect to call ops.
Keith Randalld29e92b2015-09-19 12:01:39 -0700815 if !canSSA(n.Left) {
816 s.vars[&memVar] = s.newValue1A(ssa.OpVarKill, ssa.TypeMem, n.Left, s.mem())
817 }
Keith Randall9569b952015-08-28 22:51:01 -0700818
Keith Randall46ffb022015-09-12 14:06:44 -0700819 case OCHECKNIL:
820 p := s.expr(n.Left)
821 s.nilCheck(p)
822
Keith Randalld2fd43a2015-04-15 15:51:25 -0700823 default:
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -0700824 s.Unimplementedf("unhandled stmt %s", opnames[n.Op])
Keith Randalld2fd43a2015-04-15 15:51:25 -0700825 }
826}
827
Keith Randall67fdb0d2015-07-19 15:48:20 -0700828type opAndType struct {
829 op uint8
830 etype uint8
831}
832
833var opToSSA = map[opAndType]ssa.Op{
David Chase997a9f32015-08-12 16:38:11 -0400834 opAndType{OADD, TINT8}: ssa.OpAdd8,
835 opAndType{OADD, TUINT8}: ssa.OpAdd8,
836 opAndType{OADD, TINT16}: ssa.OpAdd16,
837 opAndType{OADD, TUINT16}: ssa.OpAdd16,
838 opAndType{OADD, TINT32}: ssa.OpAdd32,
839 opAndType{OADD, TUINT32}: ssa.OpAdd32,
840 opAndType{OADD, TPTR32}: ssa.OpAdd32,
841 opAndType{OADD, TINT64}: ssa.OpAdd64,
842 opAndType{OADD, TUINT64}: ssa.OpAdd64,
843 opAndType{OADD, TPTR64}: ssa.OpAdd64,
844 opAndType{OADD, TFLOAT32}: ssa.OpAdd32F,
845 opAndType{OADD, TFLOAT64}: ssa.OpAdd64F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700846
David Chase997a9f32015-08-12 16:38:11 -0400847 opAndType{OSUB, TINT8}: ssa.OpSub8,
848 opAndType{OSUB, TUINT8}: ssa.OpSub8,
849 opAndType{OSUB, TINT16}: ssa.OpSub16,
850 opAndType{OSUB, TUINT16}: ssa.OpSub16,
851 opAndType{OSUB, TINT32}: ssa.OpSub32,
852 opAndType{OSUB, TUINT32}: ssa.OpSub32,
853 opAndType{OSUB, TINT64}: ssa.OpSub64,
854 opAndType{OSUB, TUINT64}: ssa.OpSub64,
855 opAndType{OSUB, TFLOAT32}: ssa.OpSub32F,
856 opAndType{OSUB, TFLOAT64}: ssa.OpSub64F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700857
Josh Bleecher Snydere61e7c92015-07-22 19:19:40 -0700858 opAndType{ONOT, TBOOL}: ssa.OpNot,
859
David Chase3a9d0ac2015-08-28 14:24:10 -0400860 opAndType{OMINUS, TINT8}: ssa.OpNeg8,
861 opAndType{OMINUS, TUINT8}: ssa.OpNeg8,
862 opAndType{OMINUS, TINT16}: ssa.OpNeg16,
863 opAndType{OMINUS, TUINT16}: ssa.OpNeg16,
864 opAndType{OMINUS, TINT32}: ssa.OpNeg32,
865 opAndType{OMINUS, TUINT32}: ssa.OpNeg32,
866 opAndType{OMINUS, TINT64}: ssa.OpNeg64,
867 opAndType{OMINUS, TUINT64}: ssa.OpNeg64,
868 opAndType{OMINUS, TFLOAT32}: ssa.OpNeg32F,
869 opAndType{OMINUS, TFLOAT64}: ssa.OpNeg64F,
Alexandru Moșoi954d5ad2015-07-21 16:58:18 +0200870
Keith Randall4b803152015-07-29 17:07:09 -0700871 opAndType{OCOM, TINT8}: ssa.OpCom8,
872 opAndType{OCOM, TUINT8}: ssa.OpCom8,
873 opAndType{OCOM, TINT16}: ssa.OpCom16,
874 opAndType{OCOM, TUINT16}: ssa.OpCom16,
875 opAndType{OCOM, TINT32}: ssa.OpCom32,
876 opAndType{OCOM, TUINT32}: ssa.OpCom32,
877 opAndType{OCOM, TINT64}: ssa.OpCom64,
878 opAndType{OCOM, TUINT64}: ssa.OpCom64,
879
Josh Bleecher Snyderfa5fe192015-09-06 19:24:59 -0700880 opAndType{OIMAG, TCOMPLEX64}: ssa.OpComplexImag,
881 opAndType{OIMAG, TCOMPLEX128}: ssa.OpComplexImag,
882 opAndType{OREAL, TCOMPLEX64}: ssa.OpComplexReal,
883 opAndType{OREAL, TCOMPLEX128}: ssa.OpComplexReal,
884
David Chase997a9f32015-08-12 16:38:11 -0400885 opAndType{OMUL, TINT8}: ssa.OpMul8,
886 opAndType{OMUL, TUINT8}: ssa.OpMul8,
887 opAndType{OMUL, TINT16}: ssa.OpMul16,
888 opAndType{OMUL, TUINT16}: ssa.OpMul16,
889 opAndType{OMUL, TINT32}: ssa.OpMul32,
890 opAndType{OMUL, TUINT32}: ssa.OpMul32,
891 opAndType{OMUL, TINT64}: ssa.OpMul64,
892 opAndType{OMUL, TUINT64}: ssa.OpMul64,
893 opAndType{OMUL, TFLOAT32}: ssa.OpMul32F,
894 opAndType{OMUL, TFLOAT64}: ssa.OpMul64F,
895
896 opAndType{ODIV, TFLOAT32}: ssa.OpDiv32F,
897 opAndType{ODIV, TFLOAT64}: ssa.OpDiv64F,
Keith Randallbe1eb572015-07-22 13:46:15 -0700898
Todd Neal67cbd5b2015-08-18 19:14:47 -0500899 opAndType{OHMUL, TINT8}: ssa.OpHmul8,
900 opAndType{OHMUL, TUINT8}: ssa.OpHmul8u,
901 opAndType{OHMUL, TINT16}: ssa.OpHmul16,
902 opAndType{OHMUL, TUINT16}: ssa.OpHmul16u,
903 opAndType{OHMUL, TINT32}: ssa.OpHmul32,
904 opAndType{OHMUL, TUINT32}: ssa.OpHmul32u,
905
Todd Neala45f2d82015-08-17 17:46:06 -0500906 opAndType{ODIV, TINT8}: ssa.OpDiv8,
907 opAndType{ODIV, TUINT8}: ssa.OpDiv8u,
908 opAndType{ODIV, TINT16}: ssa.OpDiv16,
909 opAndType{ODIV, TUINT16}: ssa.OpDiv16u,
910 opAndType{ODIV, TINT32}: ssa.OpDiv32,
911 opAndType{ODIV, TUINT32}: ssa.OpDiv32u,
912 opAndType{ODIV, TINT64}: ssa.OpDiv64,
913 opAndType{ODIV, TUINT64}: ssa.OpDiv64u,
914
Todd Neal57d9e7e2015-08-18 19:51:44 -0500915 opAndType{OMOD, TINT8}: ssa.OpMod8,
916 opAndType{OMOD, TUINT8}: ssa.OpMod8u,
917 opAndType{OMOD, TINT16}: ssa.OpMod16,
918 opAndType{OMOD, TUINT16}: ssa.OpMod16u,
919 opAndType{OMOD, TINT32}: ssa.OpMod32,
920 opAndType{OMOD, TUINT32}: ssa.OpMod32u,
921 opAndType{OMOD, TINT64}: ssa.OpMod64,
922 opAndType{OMOD, TUINT64}: ssa.OpMod64u,
923
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200924 opAndType{OAND, TINT8}: ssa.OpAnd8,
Keith Randall2a5e6c42015-07-23 14:35:02 -0700925 opAndType{OAND, TUINT8}: ssa.OpAnd8,
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200926 opAndType{OAND, TINT16}: ssa.OpAnd16,
Keith Randall2a5e6c42015-07-23 14:35:02 -0700927 opAndType{OAND, TUINT16}: ssa.OpAnd16,
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200928 opAndType{OAND, TINT32}: ssa.OpAnd32,
Keith Randall2a5e6c42015-07-23 14:35:02 -0700929 opAndType{OAND, TUINT32}: ssa.OpAnd32,
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200930 opAndType{OAND, TINT64}: ssa.OpAnd64,
Keith Randall2a5e6c42015-07-23 14:35:02 -0700931 opAndType{OAND, TUINT64}: ssa.OpAnd64,
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200932
Alexandru Moșoi74024162015-07-29 17:52:25 +0200933 opAndType{OOR, TINT8}: ssa.OpOr8,
934 opAndType{OOR, TUINT8}: ssa.OpOr8,
935 opAndType{OOR, TINT16}: ssa.OpOr16,
936 opAndType{OOR, TUINT16}: ssa.OpOr16,
937 opAndType{OOR, TINT32}: ssa.OpOr32,
938 opAndType{OOR, TUINT32}: ssa.OpOr32,
939 opAndType{OOR, TINT64}: ssa.OpOr64,
940 opAndType{OOR, TUINT64}: ssa.OpOr64,
941
Alexandru Moșoi6d9362a12015-07-30 12:33:36 +0200942 opAndType{OXOR, TINT8}: ssa.OpXor8,
943 opAndType{OXOR, TUINT8}: ssa.OpXor8,
944 opAndType{OXOR, TINT16}: ssa.OpXor16,
945 opAndType{OXOR, TUINT16}: ssa.OpXor16,
946 opAndType{OXOR, TINT32}: ssa.OpXor32,
947 opAndType{OXOR, TUINT32}: ssa.OpXor32,
948 opAndType{OXOR, TINT64}: ssa.OpXor64,
949 opAndType{OXOR, TUINT64}: ssa.OpXor64,
950
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700951 opAndType{OEQ, TBOOL}: ssa.OpEq8,
952 opAndType{OEQ, TINT8}: ssa.OpEq8,
953 opAndType{OEQ, TUINT8}: ssa.OpEq8,
954 opAndType{OEQ, TINT16}: ssa.OpEq16,
955 opAndType{OEQ, TUINT16}: ssa.OpEq16,
956 opAndType{OEQ, TINT32}: ssa.OpEq32,
957 opAndType{OEQ, TUINT32}: ssa.OpEq32,
958 opAndType{OEQ, TINT64}: ssa.OpEq64,
959 opAndType{OEQ, TUINT64}: ssa.OpEq64,
Keith Randall1e4ebfd2015-09-10 13:53:27 -0700960 opAndType{OEQ, TINTER}: ssa.OpEqInter,
961 opAndType{OEQ, TARRAY}: ssa.OpEqSlice,
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700962 opAndType{OEQ, TFUNC}: ssa.OpEqPtr,
963 opAndType{OEQ, TMAP}: ssa.OpEqPtr,
964 opAndType{OEQ, TCHAN}: ssa.OpEqPtr,
Todd Neal5fdd4fe2015-08-30 20:47:26 -0500965 opAndType{OEQ, TPTR64}: ssa.OpEqPtr,
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700966 opAndType{OEQ, TUINTPTR}: ssa.OpEqPtr,
967 opAndType{OEQ, TUNSAFEPTR}: ssa.OpEqPtr,
David Chase8e601b22015-08-18 14:39:26 -0400968 opAndType{OEQ, TFLOAT64}: ssa.OpEq64F,
969 opAndType{OEQ, TFLOAT32}: ssa.OpEq32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700970
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700971 opAndType{ONE, TBOOL}: ssa.OpNeq8,
972 opAndType{ONE, TINT8}: ssa.OpNeq8,
973 opAndType{ONE, TUINT8}: ssa.OpNeq8,
974 opAndType{ONE, TINT16}: ssa.OpNeq16,
975 opAndType{ONE, TUINT16}: ssa.OpNeq16,
976 opAndType{ONE, TINT32}: ssa.OpNeq32,
977 opAndType{ONE, TUINT32}: ssa.OpNeq32,
978 opAndType{ONE, TINT64}: ssa.OpNeq64,
979 opAndType{ONE, TUINT64}: ssa.OpNeq64,
Keith Randall1e4ebfd2015-09-10 13:53:27 -0700980 opAndType{ONE, TINTER}: ssa.OpNeqInter,
981 opAndType{ONE, TARRAY}: ssa.OpNeqSlice,
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700982 opAndType{ONE, TFUNC}: ssa.OpNeqPtr,
983 opAndType{ONE, TMAP}: ssa.OpNeqPtr,
984 opAndType{ONE, TCHAN}: ssa.OpNeqPtr,
Todd Neal5fdd4fe2015-08-30 20:47:26 -0500985 opAndType{ONE, TPTR64}: ssa.OpNeqPtr,
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700986 opAndType{ONE, TUINTPTR}: ssa.OpNeqPtr,
987 opAndType{ONE, TUNSAFEPTR}: ssa.OpNeqPtr,
David Chase8e601b22015-08-18 14:39:26 -0400988 opAndType{ONE, TFLOAT64}: ssa.OpNeq64F,
989 opAndType{ONE, TFLOAT32}: ssa.OpNeq32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700990
David Chase8e601b22015-08-18 14:39:26 -0400991 opAndType{OLT, TINT8}: ssa.OpLess8,
992 opAndType{OLT, TUINT8}: ssa.OpLess8U,
993 opAndType{OLT, TINT16}: ssa.OpLess16,
994 opAndType{OLT, TUINT16}: ssa.OpLess16U,
995 opAndType{OLT, TINT32}: ssa.OpLess32,
996 opAndType{OLT, TUINT32}: ssa.OpLess32U,
997 opAndType{OLT, TINT64}: ssa.OpLess64,
998 opAndType{OLT, TUINT64}: ssa.OpLess64U,
999 opAndType{OLT, TFLOAT64}: ssa.OpLess64F,
1000 opAndType{OLT, TFLOAT32}: ssa.OpLess32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -07001001
David Chase8e601b22015-08-18 14:39:26 -04001002 opAndType{OGT, TINT8}: ssa.OpGreater8,
1003 opAndType{OGT, TUINT8}: ssa.OpGreater8U,
1004 opAndType{OGT, TINT16}: ssa.OpGreater16,
1005 opAndType{OGT, TUINT16}: ssa.OpGreater16U,
1006 opAndType{OGT, TINT32}: ssa.OpGreater32,
1007 opAndType{OGT, TUINT32}: ssa.OpGreater32U,
1008 opAndType{OGT, TINT64}: ssa.OpGreater64,
1009 opAndType{OGT, TUINT64}: ssa.OpGreater64U,
1010 opAndType{OGT, TFLOAT64}: ssa.OpGreater64F,
1011 opAndType{OGT, TFLOAT32}: ssa.OpGreater32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -07001012
David Chase8e601b22015-08-18 14:39:26 -04001013 opAndType{OLE, TINT8}: ssa.OpLeq8,
1014 opAndType{OLE, TUINT8}: ssa.OpLeq8U,
1015 opAndType{OLE, TINT16}: ssa.OpLeq16,
1016 opAndType{OLE, TUINT16}: ssa.OpLeq16U,
1017 opAndType{OLE, TINT32}: ssa.OpLeq32,
1018 opAndType{OLE, TUINT32}: ssa.OpLeq32U,
1019 opAndType{OLE, TINT64}: ssa.OpLeq64,
1020 opAndType{OLE, TUINT64}: ssa.OpLeq64U,
1021 opAndType{OLE, TFLOAT64}: ssa.OpLeq64F,
1022 opAndType{OLE, TFLOAT32}: ssa.OpLeq32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -07001023
David Chase8e601b22015-08-18 14:39:26 -04001024 opAndType{OGE, TINT8}: ssa.OpGeq8,
1025 opAndType{OGE, TUINT8}: ssa.OpGeq8U,
1026 opAndType{OGE, TINT16}: ssa.OpGeq16,
1027 opAndType{OGE, TUINT16}: ssa.OpGeq16U,
1028 opAndType{OGE, TINT32}: ssa.OpGeq32,
1029 opAndType{OGE, TUINT32}: ssa.OpGeq32U,
1030 opAndType{OGE, TINT64}: ssa.OpGeq64,
1031 opAndType{OGE, TUINT64}: ssa.OpGeq64U,
1032 opAndType{OGE, TFLOAT64}: ssa.OpGeq64F,
1033 opAndType{OGE, TFLOAT32}: ssa.OpGeq32F,
David Chase40aba8c2015-08-05 22:11:14 -04001034
1035 opAndType{OLROT, TUINT8}: ssa.OpLrot8,
1036 opAndType{OLROT, TUINT16}: ssa.OpLrot16,
1037 opAndType{OLROT, TUINT32}: ssa.OpLrot32,
1038 opAndType{OLROT, TUINT64}: ssa.OpLrot64,
Keith Randalla329e212015-09-12 13:26:57 -07001039
1040 opAndType{OSQRT, TFLOAT64}: ssa.OpSqrt,
Keith Randall67fdb0d2015-07-19 15:48:20 -07001041}
1042
Keith Randall2a5e6c42015-07-23 14:35:02 -07001043func (s *state) concreteEtype(t *Type) uint8 {
1044 e := t.Etype
1045 switch e {
1046 default:
1047 return e
Keith Randall67fdb0d2015-07-19 15:48:20 -07001048 case TINT:
Keith Randall2a5e6c42015-07-23 14:35:02 -07001049 if s.config.IntSize == 8 {
1050 return TINT64
Keith Randall67fdb0d2015-07-19 15:48:20 -07001051 }
Keith Randall2a5e6c42015-07-23 14:35:02 -07001052 return TINT32
Keith Randall67fdb0d2015-07-19 15:48:20 -07001053 case TUINT:
Keith Randall2a5e6c42015-07-23 14:35:02 -07001054 if s.config.IntSize == 8 {
1055 return TUINT64
Keith Randall67fdb0d2015-07-19 15:48:20 -07001056 }
Keith Randall2a5e6c42015-07-23 14:35:02 -07001057 return TUINT32
1058 case TUINTPTR:
1059 if s.config.PtrSize == 8 {
1060 return TUINT64
1061 }
1062 return TUINT32
Keith Randall67fdb0d2015-07-19 15:48:20 -07001063 }
Keith Randall2a5e6c42015-07-23 14:35:02 -07001064}
1065
1066func (s *state) ssaOp(op uint8, t *Type) ssa.Op {
1067 etype := s.concreteEtype(t)
Keith Randall67fdb0d2015-07-19 15:48:20 -07001068 x, ok := opToSSA[opAndType{op, etype}]
1069 if !ok {
Josh Bleecher Snyder58446032015-08-23 20:29:43 -07001070 s.Unimplementedf("unhandled binary op %s %s", opnames[op], Econv(int(etype), 0))
Keith Randall67fdb0d2015-07-19 15:48:20 -07001071 }
1072 return x
Josh Bleecher Snyder46815b92015-06-24 17:48:22 -07001073}
1074
David Chase3a9d0ac2015-08-28 14:24:10 -04001075func floatForComplex(t *Type) *Type {
1076 if t.Size() == 8 {
1077 return Types[TFLOAT32]
1078 } else {
1079 return Types[TFLOAT64]
1080 }
1081}
1082
Keith Randall4b803152015-07-29 17:07:09 -07001083type opAndTwoTypes struct {
1084 op uint8
1085 etype1 uint8
1086 etype2 uint8
1087}
1088
David Chased052bbd2015-09-01 17:09:00 -04001089type twoTypes struct {
1090 etype1 uint8
1091 etype2 uint8
1092}
1093
1094type twoOpsAndType struct {
1095 op1 ssa.Op
1096 op2 ssa.Op
1097 intermediateType uint8
1098}
1099
1100var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
1101
1102 twoTypes{TINT8, TFLOAT32}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to32F, TINT32},
1103 twoTypes{TINT16, TFLOAT32}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to32F, TINT32},
1104 twoTypes{TINT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to32F, TINT32},
1105 twoTypes{TINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to32F, TINT64},
1106
1107 twoTypes{TINT8, TFLOAT64}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to64F, TINT32},
1108 twoTypes{TINT16, TFLOAT64}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to64F, TINT32},
1109 twoTypes{TINT32, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to64F, TINT32},
1110 twoTypes{TINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to64F, TINT64},
1111
1112 twoTypes{TFLOAT32, TINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32},
1113 twoTypes{TFLOAT32, TINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32},
1114 twoTypes{TFLOAT32, TINT32}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpCopy, TINT32},
1115 twoTypes{TFLOAT32, TINT64}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpCopy, TINT64},
1116
1117 twoTypes{TFLOAT64, TINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32},
1118 twoTypes{TFLOAT64, TINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32},
1119 twoTypes{TFLOAT64, TINT32}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpCopy, TINT32},
1120 twoTypes{TFLOAT64, TINT64}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpCopy, TINT64},
1121 // unsigned
1122 twoTypes{TUINT8, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to32F, TINT32},
1123 twoTypes{TUINT16, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to32F, TINT32},
1124 twoTypes{TUINT32, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to32F, TINT64}, // go wide to dodge unsigned
1125 twoTypes{TUINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64}, // Cvt64Uto32F, branchy code expansion instead
1126
1127 twoTypes{TUINT8, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to64F, TINT32},
1128 twoTypes{TUINT16, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to64F, TINT32},
1129 twoTypes{TUINT32, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to64F, TINT64}, // go wide to dodge unsigned
1130 twoTypes{TUINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64}, // Cvt64Uto64F, branchy code expansion instead
1131
1132 twoTypes{TFLOAT32, TUINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32},
1133 twoTypes{TFLOAT32, TUINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32},
1134 twoTypes{TFLOAT32, TUINT32}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned
1135 twoTypes{TFLOAT32, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64}, // Cvt32Fto64U, branchy code expansion instead
1136
1137 twoTypes{TFLOAT64, TUINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32},
1138 twoTypes{TFLOAT64, TUINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32},
1139 twoTypes{TFLOAT64, TUINT32}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned
1140 twoTypes{TFLOAT64, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64}, // Cvt64Fto64U, branchy code expansion instead
1141
1142 // float
1143 twoTypes{TFLOAT64, TFLOAT32}: twoOpsAndType{ssa.OpCvt64Fto32F, ssa.OpCopy, TFLOAT32},
1144 twoTypes{TFLOAT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT64},
1145 twoTypes{TFLOAT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT32},
1146 twoTypes{TFLOAT32, TFLOAT64}: twoOpsAndType{ssa.OpCvt32Fto64F, ssa.OpCopy, TFLOAT64},
1147}
1148
Keith Randall4b803152015-07-29 17:07:09 -07001149var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
1150 opAndTwoTypes{OLSH, TINT8, TUINT8}: ssa.OpLsh8x8,
1151 opAndTwoTypes{OLSH, TUINT8, TUINT8}: ssa.OpLsh8x8,
1152 opAndTwoTypes{OLSH, TINT8, TUINT16}: ssa.OpLsh8x16,
1153 opAndTwoTypes{OLSH, TUINT8, TUINT16}: ssa.OpLsh8x16,
1154 opAndTwoTypes{OLSH, TINT8, TUINT32}: ssa.OpLsh8x32,
1155 opAndTwoTypes{OLSH, TUINT8, TUINT32}: ssa.OpLsh8x32,
1156 opAndTwoTypes{OLSH, TINT8, TUINT64}: ssa.OpLsh8x64,
1157 opAndTwoTypes{OLSH, TUINT8, TUINT64}: ssa.OpLsh8x64,
1158
1159 opAndTwoTypes{OLSH, TINT16, TUINT8}: ssa.OpLsh16x8,
1160 opAndTwoTypes{OLSH, TUINT16, TUINT8}: ssa.OpLsh16x8,
1161 opAndTwoTypes{OLSH, TINT16, TUINT16}: ssa.OpLsh16x16,
1162 opAndTwoTypes{OLSH, TUINT16, TUINT16}: ssa.OpLsh16x16,
1163 opAndTwoTypes{OLSH, TINT16, TUINT32}: ssa.OpLsh16x32,
1164 opAndTwoTypes{OLSH, TUINT16, TUINT32}: ssa.OpLsh16x32,
1165 opAndTwoTypes{OLSH, TINT16, TUINT64}: ssa.OpLsh16x64,
1166 opAndTwoTypes{OLSH, TUINT16, TUINT64}: ssa.OpLsh16x64,
1167
1168 opAndTwoTypes{OLSH, TINT32, TUINT8}: ssa.OpLsh32x8,
1169 opAndTwoTypes{OLSH, TUINT32, TUINT8}: ssa.OpLsh32x8,
1170 opAndTwoTypes{OLSH, TINT32, TUINT16}: ssa.OpLsh32x16,
1171 opAndTwoTypes{OLSH, TUINT32, TUINT16}: ssa.OpLsh32x16,
1172 opAndTwoTypes{OLSH, TINT32, TUINT32}: ssa.OpLsh32x32,
1173 opAndTwoTypes{OLSH, TUINT32, TUINT32}: ssa.OpLsh32x32,
1174 opAndTwoTypes{OLSH, TINT32, TUINT64}: ssa.OpLsh32x64,
1175 opAndTwoTypes{OLSH, TUINT32, TUINT64}: ssa.OpLsh32x64,
1176
1177 opAndTwoTypes{OLSH, TINT64, TUINT8}: ssa.OpLsh64x8,
1178 opAndTwoTypes{OLSH, TUINT64, TUINT8}: ssa.OpLsh64x8,
1179 opAndTwoTypes{OLSH, TINT64, TUINT16}: ssa.OpLsh64x16,
1180 opAndTwoTypes{OLSH, TUINT64, TUINT16}: ssa.OpLsh64x16,
1181 opAndTwoTypes{OLSH, TINT64, TUINT32}: ssa.OpLsh64x32,
1182 opAndTwoTypes{OLSH, TUINT64, TUINT32}: ssa.OpLsh64x32,
1183 opAndTwoTypes{OLSH, TINT64, TUINT64}: ssa.OpLsh64x64,
1184 opAndTwoTypes{OLSH, TUINT64, TUINT64}: ssa.OpLsh64x64,
1185
1186 opAndTwoTypes{ORSH, TINT8, TUINT8}: ssa.OpRsh8x8,
1187 opAndTwoTypes{ORSH, TUINT8, TUINT8}: ssa.OpRsh8Ux8,
1188 opAndTwoTypes{ORSH, TINT8, TUINT16}: ssa.OpRsh8x16,
1189 opAndTwoTypes{ORSH, TUINT8, TUINT16}: ssa.OpRsh8Ux16,
1190 opAndTwoTypes{ORSH, TINT8, TUINT32}: ssa.OpRsh8x32,
1191 opAndTwoTypes{ORSH, TUINT8, TUINT32}: ssa.OpRsh8Ux32,
1192 opAndTwoTypes{ORSH, TINT8, TUINT64}: ssa.OpRsh8x64,
1193 opAndTwoTypes{ORSH, TUINT8, TUINT64}: ssa.OpRsh8Ux64,
1194
1195 opAndTwoTypes{ORSH, TINT16, TUINT8}: ssa.OpRsh16x8,
1196 opAndTwoTypes{ORSH, TUINT16, TUINT8}: ssa.OpRsh16Ux8,
1197 opAndTwoTypes{ORSH, TINT16, TUINT16}: ssa.OpRsh16x16,
1198 opAndTwoTypes{ORSH, TUINT16, TUINT16}: ssa.OpRsh16Ux16,
1199 opAndTwoTypes{ORSH, TINT16, TUINT32}: ssa.OpRsh16x32,
1200 opAndTwoTypes{ORSH, TUINT16, TUINT32}: ssa.OpRsh16Ux32,
1201 opAndTwoTypes{ORSH, TINT16, TUINT64}: ssa.OpRsh16x64,
1202 opAndTwoTypes{ORSH, TUINT16, TUINT64}: ssa.OpRsh16Ux64,
1203
1204 opAndTwoTypes{ORSH, TINT32, TUINT8}: ssa.OpRsh32x8,
1205 opAndTwoTypes{ORSH, TUINT32, TUINT8}: ssa.OpRsh32Ux8,
1206 opAndTwoTypes{ORSH, TINT32, TUINT16}: ssa.OpRsh32x16,
1207 opAndTwoTypes{ORSH, TUINT32, TUINT16}: ssa.OpRsh32Ux16,
1208 opAndTwoTypes{ORSH, TINT32, TUINT32}: ssa.OpRsh32x32,
1209 opAndTwoTypes{ORSH, TUINT32, TUINT32}: ssa.OpRsh32Ux32,
1210 opAndTwoTypes{ORSH, TINT32, TUINT64}: ssa.OpRsh32x64,
1211 opAndTwoTypes{ORSH, TUINT32, TUINT64}: ssa.OpRsh32Ux64,
1212
1213 opAndTwoTypes{ORSH, TINT64, TUINT8}: ssa.OpRsh64x8,
1214 opAndTwoTypes{ORSH, TUINT64, TUINT8}: ssa.OpRsh64Ux8,
1215 opAndTwoTypes{ORSH, TINT64, TUINT16}: ssa.OpRsh64x16,
1216 opAndTwoTypes{ORSH, TUINT64, TUINT16}: ssa.OpRsh64Ux16,
1217 opAndTwoTypes{ORSH, TINT64, TUINT32}: ssa.OpRsh64x32,
1218 opAndTwoTypes{ORSH, TUINT64, TUINT32}: ssa.OpRsh64Ux32,
1219 opAndTwoTypes{ORSH, TINT64, TUINT64}: ssa.OpRsh64x64,
1220 opAndTwoTypes{ORSH, TUINT64, TUINT64}: ssa.OpRsh64Ux64,
1221}
1222
1223func (s *state) ssaShiftOp(op uint8, t *Type, u *Type) ssa.Op {
1224 etype1 := s.concreteEtype(t)
1225 etype2 := s.concreteEtype(u)
1226 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
1227 if !ok {
1228 s.Unimplementedf("unhandled shift op %s etype=%s/%s", opnames[op], Econv(int(etype1), 0), Econv(int(etype2), 0))
1229 }
1230 return x
1231}
1232
David Chase40aba8c2015-08-05 22:11:14 -04001233func (s *state) ssaRotateOp(op uint8, t *Type) ssa.Op {
1234 etype1 := s.concreteEtype(t)
1235 x, ok := opToSSA[opAndType{op, etype1}]
1236 if !ok {
1237 s.Unimplementedf("unhandled rotate op %s etype=%s", opnames[op], Econv(int(etype1), 0))
1238 }
1239 return x
1240}
1241
Keith Randalld2fd43a2015-04-15 15:51:25 -07001242// expr converts the expression n to ssa, adds it to s and returns the ssa result.
Keith Randallcfc2aa52015-05-18 16:44:20 -07001243func (s *state) expr(n *Node) *ssa.Value {
Michael Matloob81ccf502015-05-30 01:03:06 -04001244 s.pushLine(n.Lineno)
1245 defer s.popLine()
1246
Keith Randall06f32922015-07-11 11:39:12 -07001247 s.stmtList(n.Ninit)
Keith Randalld2fd43a2015-04-15 15:51:25 -07001248 switch n.Op {
Todd Nealdef7c652015-09-07 19:07:02 -05001249 case OCFUNC:
1250 aux := &ssa.ExternSymbol{n.Type, n.Left.Sym}
1251 return s.entryNewValue1A(ssa.OpAddr, n.Type, aux, s.sb)
David Chase956f3192015-09-11 16:40:05 -04001252 case OPARAM:
David Chase32ffbf72015-10-08 17:14:12 -04001253 addr := s.addr(n)
1254 return s.newValue2(ssa.OpLoad, n.Left.Type, addr, s.mem())
Keith Randalld2fd43a2015-04-15 15:51:25 -07001255 case ONAME:
Keith Randall290d8fc2015-06-10 15:03:06 -07001256 if n.Class == PFUNC {
1257 // "value" of a function is the address of the function's closure
Keith Randall8c46aa52015-06-19 21:02:28 -07001258 sym := funcsym(n.Sym)
1259 aux := &ssa.ExternSymbol{n.Type, sym}
1260 return s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sb)
Keith Randall23df95b2015-05-12 15:16:52 -07001261 }
Keith Randall290d8fc2015-06-10 15:03:06 -07001262 if canSSA(n) {
Keith Randall8c46aa52015-06-19 21:02:28 -07001263 return s.variable(n, n.Type)
Keith Randall290d8fc2015-06-10 15:03:06 -07001264 }
1265 addr := s.addr(n)
Keith Randall8f22b522015-06-11 21:29:25 -07001266 return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
David Chase956f3192015-09-11 16:40:05 -04001267 case OCLOSUREVAR:
1268 addr := s.addr(n)
1269 return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
Keith Randalld2fd43a2015-04-15 15:51:25 -07001270 case OLITERAL:
Keith Randalle707fbe2015-06-11 10:20:39 -07001271 switch n.Val().Ctype() {
Keith Randalld2fd43a2015-04-15 15:51:25 -07001272 case CTINT:
Keith Randall9cb332e2015-07-28 14:19:20 -07001273 i := Mpgetfix(n.Val().U.(*Mpint))
1274 switch n.Type.Size() {
1275 case 1:
1276 return s.constInt8(n.Type, int8(i))
1277 case 2:
1278 return s.constInt16(n.Type, int16(i))
1279 case 4:
1280 return s.constInt32(n.Type, int32(i))
1281 case 8:
1282 return s.constInt64(n.Type, i)
1283 default:
1284 s.Fatalf("bad integer size %d", n.Type.Size())
1285 return nil
1286 }
1287 case CTSTR:
1288 return s.entryNewValue0A(ssa.OpConstString, n.Type, n.Val().U)
1289 case CTBOOL:
Josh Bleecher Snydercea44142015-09-08 16:52:25 -07001290 return s.constBool(n.Val().U.(bool))
Brad Fitzpatrick337b7e72015-07-13 17:30:42 -06001291 case CTNIL:
Keith Randall9f954db2015-08-18 10:26:28 -07001292 t := n.Type
1293 switch {
1294 case t.IsSlice():
1295 return s.entryNewValue0(ssa.OpConstSlice, t)
1296 case t.IsInterface():
1297 return s.entryNewValue0(ssa.OpConstInterface, t)
1298 default:
1299 return s.entryNewValue0(ssa.OpConstNil, t)
1300 }
David Chase997a9f32015-08-12 16:38:11 -04001301 case CTFLT:
1302 f := n.Val().U.(*Mpflt)
1303 switch n.Type.Size() {
1304 case 4:
Todd Nealadba6c42015-09-08 07:50:25 -04001305 // -0.0 literals need to be treated as if they were 0.0, adding 0.0 here
1306 // accomplishes this while not affecting other values.
1307 return s.constFloat32(n.Type, mpgetflt32(f)+0.0)
David Chase997a9f32015-08-12 16:38:11 -04001308 case 8:
Todd Nealadba6c42015-09-08 07:50:25 -04001309 return s.constFloat64(n.Type, mpgetflt(f)+0.0)
David Chase997a9f32015-08-12 16:38:11 -04001310 default:
1311 s.Fatalf("bad float size %d", n.Type.Size())
1312 return nil
1313 }
David Chase52578582015-08-28 14:24:10 -04001314 case CTCPLX:
1315 c := n.Val().U.(*Mpcplx)
1316 r := &c.Real
1317 i := &c.Imag
1318 switch n.Type.Size() {
1319 case 8:
1320 {
1321 pt := Types[TFLOAT32]
Todd Nealadba6c42015-09-08 07:50:25 -04001322 // -0.0 literals need to be treated as if they were 0.0, adding 0.0 here
1323 // accomplishes this while not affecting other values.
David Chase52578582015-08-28 14:24:10 -04001324 return s.newValue2(ssa.OpComplexMake, n.Type,
Todd Nealadba6c42015-09-08 07:50:25 -04001325 s.constFloat32(pt, mpgetflt32(r)+0.0),
1326 s.constFloat32(pt, mpgetflt32(i)+0.0))
David Chase52578582015-08-28 14:24:10 -04001327 }
1328 case 16:
1329 {
1330 pt := Types[TFLOAT64]
1331 return s.newValue2(ssa.OpComplexMake, n.Type,
Todd Nealadba6c42015-09-08 07:50:25 -04001332 s.constFloat64(pt, mpgetflt(r)+0.0),
1333 s.constFloat64(pt, mpgetflt(i)+0.0))
David Chase52578582015-08-28 14:24:10 -04001334 }
1335 default:
1336 s.Fatalf("bad float size %d", n.Type.Size())
1337 return nil
1338 }
David Chase997a9f32015-08-12 16:38:11 -04001339
Keith Randalld2fd43a2015-04-15 15:51:25 -07001340 default:
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07001341 s.Unimplementedf("unhandled OLITERAL %v", n.Val().Ctype())
Keith Randalld2fd43a2015-04-15 15:51:25 -07001342 return nil
1343 }
Keith Randall0ad9c8c2015-06-12 16:24:33 -07001344 case OCONVNOP:
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001345 to := n.Type
1346 from := n.Left.Type
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001347
1348 // Assume everything will work out, so set up our return value.
1349 // Anything interesting that happens from here is a fatal.
Keith Randall0ad9c8c2015-06-12 16:24:33 -07001350 x := s.expr(n.Left)
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001351 v := s.newValue1(ssa.OpCopy, to, x) // ensure that v has the right type
1352
Todd Nealdef7c652015-09-07 19:07:02 -05001353 // CONVNOP closure
1354 if to.Etype == TFUNC && from.IsPtr() {
1355 return v
1356 }
1357
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001358 // named <--> unnamed type or typed <--> untyped const
1359 if from.Etype == to.Etype {
1360 return v
1361 }
1362 // unsafe.Pointer <--> *T
1363 if to.Etype == TUNSAFEPTR && from.IsPtr() || from.Etype == TUNSAFEPTR && to.IsPtr() {
1364 return v
1365 }
1366
1367 dowidth(from)
1368 dowidth(to)
1369 if from.Width != to.Width {
1370 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Width, to, to.Width)
1371 return nil
1372 }
1373 if etypesign(from.Etype) != etypesign(to.Etype) {
1374 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, Econv(int(from.Etype), 0), to, Econv(int(to.Etype), 0))
1375 return nil
1376 }
1377
1378 if flag_race != 0 {
1379 s.Unimplementedf("questionable CONVNOP from race detector %v -> %v\n", from, to)
1380 return nil
1381 }
1382
1383 if etypesign(from.Etype) == 0 {
1384 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
1385 return nil
1386 }
1387
1388 // integer, same width, same sign
1389 return v
1390
Michael Matloob73054f52015-06-14 11:38:46 -07001391 case OCONV:
1392 x := s.expr(n.Left)
Keith Randall2a5e6c42015-07-23 14:35:02 -07001393 ft := n.Left.Type // from type
1394 tt := n.Type // to type
1395 if ft.IsInteger() && tt.IsInteger() {
1396 var op ssa.Op
1397 if tt.Size() == ft.Size() {
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001398 op = ssa.OpCopy
Keith Randall2a5e6c42015-07-23 14:35:02 -07001399 } else if tt.Size() < ft.Size() {
1400 // truncation
1401 switch 10*ft.Size() + tt.Size() {
1402 case 21:
1403 op = ssa.OpTrunc16to8
1404 case 41:
1405 op = ssa.OpTrunc32to8
1406 case 42:
1407 op = ssa.OpTrunc32to16
1408 case 81:
1409 op = ssa.OpTrunc64to8
1410 case 82:
1411 op = ssa.OpTrunc64to16
1412 case 84:
1413 op = ssa.OpTrunc64to32
1414 default:
1415 s.Fatalf("weird integer truncation %s -> %s", ft, tt)
1416 }
1417 } else if ft.IsSigned() {
1418 // sign extension
1419 switch 10*ft.Size() + tt.Size() {
1420 case 12:
1421 op = ssa.OpSignExt8to16
1422 case 14:
1423 op = ssa.OpSignExt8to32
1424 case 18:
1425 op = ssa.OpSignExt8to64
1426 case 24:
1427 op = ssa.OpSignExt16to32
1428 case 28:
1429 op = ssa.OpSignExt16to64
1430 case 48:
1431 op = ssa.OpSignExt32to64
1432 default:
1433 s.Fatalf("bad integer sign extension %s -> %s", ft, tt)
1434 }
1435 } else {
1436 // zero extension
1437 switch 10*ft.Size() + tt.Size() {
1438 case 12:
1439 op = ssa.OpZeroExt8to16
1440 case 14:
1441 op = ssa.OpZeroExt8to32
1442 case 18:
1443 op = ssa.OpZeroExt8to64
1444 case 24:
1445 op = ssa.OpZeroExt16to32
1446 case 28:
1447 op = ssa.OpZeroExt16to64
1448 case 48:
1449 op = ssa.OpZeroExt32to64
1450 default:
1451 s.Fatalf("weird integer sign extension %s -> %s", ft, tt)
1452 }
1453 }
1454 return s.newValue1(op, n.Type, x)
1455 }
David Chase42825882015-08-20 15:14:20 -04001456
David Chased052bbd2015-09-01 17:09:00 -04001457 if ft.IsFloat() || tt.IsFloat() {
1458 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
1459 if !ok {
1460 s.Fatalf("weird float conversion %s -> %s", ft, tt)
David Chase42825882015-08-20 15:14:20 -04001461 }
David Chased052bbd2015-09-01 17:09:00 -04001462 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
1463
1464 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
1465 // normal case, not tripping over unsigned 64
1466 if op1 == ssa.OpCopy {
1467 if op2 == ssa.OpCopy {
1468 return x
1469 }
1470 return s.newValue1(op2, n.Type, x)
1471 }
1472 if op2 == ssa.OpCopy {
1473 return s.newValue1(op1, n.Type, x)
1474 }
1475 return s.newValue1(op2, n.Type, s.newValue1(op1, Types[it], x))
1476 }
1477 // Tricky 64-bit unsigned cases.
1478 if ft.IsInteger() {
1479 // therefore tt is float32 or float64, and ft is also unsigned
David Chase42825882015-08-20 15:14:20 -04001480 if tt.Size() == 4 {
1481 return s.uint64Tofloat32(n, x, ft, tt)
1482 }
1483 if tt.Size() == 8 {
1484 return s.uint64Tofloat64(n, x, ft, tt)
1485 }
David Chased052bbd2015-09-01 17:09:00 -04001486 s.Fatalf("weird unsigned integer to float conversion %s -> %s", ft, tt)
David Chase42825882015-08-20 15:14:20 -04001487 }
David Chased052bbd2015-09-01 17:09:00 -04001488 // therefore ft is float32 or float64, and tt is unsigned integer
David Chase73151062015-08-26 14:25:40 -04001489 if ft.Size() == 4 {
David Chased052bbd2015-09-01 17:09:00 -04001490 return s.float32ToUint64(n, x, ft, tt)
David Chase73151062015-08-26 14:25:40 -04001491 }
David Chased052bbd2015-09-01 17:09:00 -04001492 if ft.Size() == 8 {
1493 return s.float64ToUint64(n, x, ft, tt)
David Chase73151062015-08-26 14:25:40 -04001494 }
David Chased052bbd2015-09-01 17:09:00 -04001495 s.Fatalf("weird float to unsigned integer conversion %s -> %s", ft, tt)
1496 return nil
David Chase42825882015-08-20 15:14:20 -04001497 }
David Chase3a9d0ac2015-08-28 14:24:10 -04001498
1499 if ft.IsComplex() && tt.IsComplex() {
1500 var op ssa.Op
1501 if ft.Size() == tt.Size() {
1502 op = ssa.OpCopy
1503 } else if ft.Size() == 8 && tt.Size() == 16 {
1504 op = ssa.OpCvt32Fto64F
1505 } else if ft.Size() == 16 && tt.Size() == 8 {
1506 op = ssa.OpCvt64Fto32F
1507 } else {
1508 s.Fatalf("weird complex conversion %s -> %s", ft, tt)
1509 }
1510 ftp := floatForComplex(ft)
1511 ttp := floatForComplex(tt)
1512 return s.newValue2(ssa.OpComplexMake, tt,
1513 s.newValue1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, x)),
1514 s.newValue1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, x)))
1515 }
David Chase42825882015-08-20 15:14:20 -04001516
Josh Bleecher Snyder58446032015-08-23 20:29:43 -07001517 s.Unimplementedf("unhandled OCONV %s -> %s", Econv(int(n.Left.Type.Etype), 0), Econv(int(n.Type.Etype), 0))
Keith Randall2a5e6c42015-07-23 14:35:02 -07001518 return nil
Keith Randallcfc2aa52015-05-18 16:44:20 -07001519
Keith Randall269baa92015-09-17 10:31:16 -07001520 case ODOTTYPE:
1521 res, _ := s.dottype(n, false)
1522 return res
1523
Josh Bleecher Snyder46815b92015-06-24 17:48:22 -07001524 // binary ops
1525 case OLT, OEQ, ONE, OLE, OGE, OGT:
Keith Randalld2fd43a2015-04-15 15:51:25 -07001526 a := s.expr(n.Left)
1527 b := s.expr(n.Right)
Keith Randalldb380bf2015-09-10 11:05:42 -07001528 if n.Left.Type.IsComplex() {
Keith Randallc244ce02015-09-10 14:59:00 -07001529 pt := floatForComplex(n.Left.Type)
Keith Randalldb380bf2015-09-10 11:05:42 -07001530 op := s.ssaOp(OEQ, pt)
1531 r := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
1532 i := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
1533 c := s.newValue2(ssa.OpAnd8, Types[TBOOL], r, i)
1534 switch n.Op {
1535 case OEQ:
1536 return c
1537 case ONE:
1538 return s.newValue1(ssa.OpNot, Types[TBOOL], c)
1539 default:
1540 s.Fatalf("ordered complex compare %s", opnames[n.Op])
1541 }
Keith Randalldb380bf2015-09-10 11:05:42 -07001542 }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001543 return s.newValue2(s.ssaOp(n.Op, n.Left.Type), Types[TBOOL], a, b)
David Chase3a9d0ac2015-08-28 14:24:10 -04001544 case OMUL:
1545 a := s.expr(n.Left)
1546 b := s.expr(n.Right)
1547 if n.Type.IsComplex() {
1548 mulop := ssa.OpMul64F
1549 addop := ssa.OpAdd64F
1550 subop := ssa.OpSub64F
1551 pt := floatForComplex(n.Type) // Could be Float32 or Float64
1552 wt := Types[TFLOAT64] // Compute in Float64 to minimize cancellation error
1553
1554 areal := s.newValue1(ssa.OpComplexReal, pt, a)
1555 breal := s.newValue1(ssa.OpComplexReal, pt, b)
1556 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
1557 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
1558
1559 if pt != wt { // Widen for calculation
1560 areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal)
1561 breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal)
1562 aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag)
1563 bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag)
1564 }
1565
1566 xreal := s.newValue2(subop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag))
1567 ximag := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, bimag), s.newValue2(mulop, wt, aimag, breal))
1568
1569 if pt != wt { // Narrow to store back
1570 xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal)
1571 ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag)
1572 }
1573
1574 return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag)
1575 }
1576 return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
1577
1578 case ODIV:
1579 a := s.expr(n.Left)
1580 b := s.expr(n.Right)
1581 if n.Type.IsComplex() {
1582 // TODO this is not executed because the front-end substitutes a runtime call.
1583 // That probably ought to change; with modest optimization the widen/narrow
1584 // conversions could all be elided in larger expression trees.
1585 mulop := ssa.OpMul64F
1586 addop := ssa.OpAdd64F
1587 subop := ssa.OpSub64F
1588 divop := ssa.OpDiv64F
1589 pt := floatForComplex(n.Type) // Could be Float32 or Float64
1590 wt := Types[TFLOAT64] // Compute in Float64 to minimize cancellation error
1591
1592 areal := s.newValue1(ssa.OpComplexReal, pt, a)
1593 breal := s.newValue1(ssa.OpComplexReal, pt, b)
1594 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
1595 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
1596
1597 if pt != wt { // Widen for calculation
1598 areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal)
1599 breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal)
1600 aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag)
1601 bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag)
1602 }
1603
1604 denom := s.newValue2(addop, wt, s.newValue2(mulop, wt, breal, breal), s.newValue2(mulop, wt, bimag, bimag))
1605 xreal := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag))
1606 ximag := s.newValue2(subop, wt, s.newValue2(mulop, wt, aimag, breal), s.newValue2(mulop, wt, areal, bimag))
1607
1608 // TODO not sure if this is best done in wide precision or narrow
1609 // Double-rounding might be an issue.
1610 // Note that the pre-SSA implementation does the entire calculation
1611 // in wide format, so wide is compatible.
1612 xreal = s.newValue2(divop, wt, xreal, denom)
1613 ximag = s.newValue2(divop, wt, ximag, denom)
1614
1615 if pt != wt { // Narrow to store back
1616 xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal)
1617 ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag)
1618 }
1619
1620 return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag)
1621 }
1622 return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
1623 case OADD, OSUB:
1624 a := s.expr(n.Left)
1625 b := s.expr(n.Right)
1626 if n.Type.IsComplex() {
1627 pt := floatForComplex(n.Type)
1628 op := s.ssaOp(n.Op, pt)
1629 return s.newValue2(ssa.OpComplexMake, n.Type,
1630 s.newValue2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
1631 s.newValue2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
1632 }
1633 return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
1634 case OAND, OOR, OMOD, OHMUL, OXOR:
Keith Randalld2fd43a2015-04-15 15:51:25 -07001635 a := s.expr(n.Left)
1636 b := s.expr(n.Right)
Keith Randall67fdb0d2015-07-19 15:48:20 -07001637 return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
Keith Randall4b803152015-07-29 17:07:09 -07001638 case OLSH, ORSH:
1639 a := s.expr(n.Left)
1640 b := s.expr(n.Right)
1641 return s.newValue2(s.ssaShiftOp(n.Op, n.Type, n.Right.Type), a.Type, a, b)
David Chase40aba8c2015-08-05 22:11:14 -04001642 case OLROT:
1643 a := s.expr(n.Left)
1644 i := n.Right.Int()
1645 if i <= 0 || i >= n.Type.Size()*8 {
1646 s.Fatalf("Wrong rotate distance for LROT, expected 1 through %d, saw %d", n.Type.Size()*8-1, i)
1647 }
1648 return s.newValue1I(s.ssaRotateOp(n.Op, n.Type), a.Type, i, a)
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001649 case OANDAND, OOROR:
1650 // To implement OANDAND (and OOROR), we introduce a
1651 // new temporary variable to hold the result. The
1652 // variable is associated with the OANDAND node in the
1653 // s.vars table (normally variables are only
1654 // associated with ONAME nodes). We convert
1655 // A && B
1656 // to
1657 // var = A
1658 // if var {
1659 // var = B
1660 // }
1661 // Using var in the subsequent block introduces the
1662 // necessary phi variable.
1663 el := s.expr(n.Left)
1664 s.vars[n] = el
1665
1666 b := s.endBlock()
1667 b.Kind = ssa.BlockIf
1668 b.Control = el
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07001669 // In theory, we should set b.Likely here based on context.
1670 // However, gc only gives us likeliness hints
1671 // in a single place, for plain OIF statements,
1672 // and passing around context is finnicky, so don't bother for now.
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001673
1674 bRight := s.f.NewBlock(ssa.BlockPlain)
1675 bResult := s.f.NewBlock(ssa.BlockPlain)
1676 if n.Op == OANDAND {
Todd Neal47d67992015-08-28 21:36:29 -05001677 b.AddEdgeTo(bRight)
1678 b.AddEdgeTo(bResult)
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001679 } else if n.Op == OOROR {
Todd Neal47d67992015-08-28 21:36:29 -05001680 b.AddEdgeTo(bResult)
1681 b.AddEdgeTo(bRight)
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001682 }
1683
1684 s.startBlock(bRight)
1685 er := s.expr(n.Right)
1686 s.vars[n] = er
1687
1688 b = s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05001689 b.AddEdgeTo(bResult)
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001690
1691 s.startBlock(bResult)
Josh Bleecher Snyder35ad1fc2015-08-27 10:11:08 -07001692 return s.variable(n, Types[TBOOL])
Keith Randall7e390722015-09-12 14:14:02 -07001693 case OCOMPLEX:
1694 r := s.expr(n.Left)
1695 i := s.expr(n.Right)
1696 return s.newValue2(ssa.OpComplexMake, n.Type, r, i)
Keith Randalld2fd43a2015-04-15 15:51:25 -07001697
Josh Bleecher Snyder4178f202015-09-05 19:28:00 -07001698 // unary ops
David Chase3a9d0ac2015-08-28 14:24:10 -04001699 case OMINUS:
1700 a := s.expr(n.Left)
1701 if n.Type.IsComplex() {
1702 tp := floatForComplex(n.Type)
1703 negop := s.ssaOp(n.Op, tp)
1704 return s.newValue2(ssa.OpComplexMake, n.Type,
1705 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
1706 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
1707 }
1708 return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a)
Keith Randalla329e212015-09-12 13:26:57 -07001709 case ONOT, OCOM, OSQRT:
Brad Fitzpatrickd9c72d72015-07-10 11:25:48 -06001710 a := s.expr(n.Left)
Alexandru Moșoi954d5ad2015-07-21 16:58:18 +02001711 return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a)
Keith Randall2f518072015-09-10 11:37:09 -07001712 case OIMAG, OREAL:
1713 a := s.expr(n.Left)
1714 return s.newValue1(s.ssaOp(n.Op, n.Left.Type), n.Type, a)
Josh Bleecher Snyder4178f202015-09-05 19:28:00 -07001715 case OPLUS:
1716 return s.expr(n.Left)
Brad Fitzpatrickd9c72d72015-07-10 11:25:48 -06001717
Keith Randallcfc2aa52015-05-18 16:44:20 -07001718 case OADDR:
1719 return s.addr(n.Left)
1720
Josh Bleecher Snyder25d19162015-07-28 12:37:46 -07001721 case OINDREG:
1722 if int(n.Reg) != Thearch.REGSP {
1723 s.Unimplementedf("OINDREG of non-SP register %s in expr: %v", obj.Rconv(int(n.Reg)), n)
1724 return nil
1725 }
1726 addr := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(n.Type), n.Xoffset, s.sp)
1727 return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
1728
Keith Randalld2fd43a2015-04-15 15:51:25 -07001729 case OIND:
1730 p := s.expr(n.Left)
Keith Randallcfc2aa52015-05-18 16:44:20 -07001731 s.nilCheck(p)
Keith Randall8f22b522015-06-11 21:29:25 -07001732 return s.newValue2(ssa.OpLoad, n.Type, p, s.mem())
Keith Randallcfc2aa52015-05-18 16:44:20 -07001733
Keith Randallcd7e0592015-07-15 21:33:49 -07001734 case ODOT:
1735 v := s.expr(n.Left)
1736 return s.newValue1I(ssa.OpStructSelect, n.Type, n.Xoffset, v)
1737
Keith Randalld2fd43a2015-04-15 15:51:25 -07001738 case ODOTPTR:
1739 p := s.expr(n.Left)
Keith Randallcfc2aa52015-05-18 16:44:20 -07001740 s.nilCheck(p)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001741 p = s.newValue2(ssa.OpAddPtr, p.Type, p, s.constIntPtr(Types[TUINTPTR], n.Xoffset))
Keith Randall8f22b522015-06-11 21:29:25 -07001742 return s.newValue2(ssa.OpLoad, n.Type, p, s.mem())
Keith Randalld2fd43a2015-04-15 15:51:25 -07001743
1744 case OINDEX:
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07001745 if n.Left.Type.Bound >= 0 { // array or string
Keith Randallcfc2aa52015-05-18 16:44:20 -07001746 a := s.expr(n.Left)
1747 i := s.expr(n.Right)
Keith Randall2a5e6c42015-07-23 14:35:02 -07001748 i = s.extendIndex(i)
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07001749 if n.Left.Type.IsString() {
Keith Randallc7081402015-09-10 10:01:15 -07001750 if !n.Bounded {
1751 len := s.newValue1(ssa.OpStringLen, Types[TINT], a)
1752 s.boundsCheck(i, len)
1753 }
1754 ptrtyp := Ptrto(Types[TUINT8])
1755 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
1756 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
1757 return s.newValue2(ssa.OpLoad, Types[TUINT8], ptr, s.mem())
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07001758 } else {
Keith Randallc7081402015-09-10 10:01:15 -07001759 if !n.Bounded {
1760 len := s.constInt(Types[TINT], n.Left.Type.Bound)
1761 s.boundsCheck(i, len)
1762 }
1763 return s.newValue2(ssa.OpArrayIndex, n.Left.Type.Type, a, i)
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07001764 }
Keith Randallcfc2aa52015-05-18 16:44:20 -07001765 } else { // slice
1766 p := s.addr(n)
Keith Randall8f22b522015-06-11 21:29:25 -07001767 return s.newValue2(ssa.OpLoad, n.Left.Type.Type, p, s.mem())
Keith Randallcfc2aa52015-05-18 16:44:20 -07001768 }
Keith Randalld2fd43a2015-04-15 15:51:25 -07001769
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06001770 case OLEN, OCAP:
Josh Bleecher Snydercc3f0312015-07-03 18:41:28 -07001771 switch {
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06001772 case n.Left.Type.IsSlice():
1773 op := ssa.OpSliceLen
1774 if n.Op == OCAP {
1775 op = ssa.OpSliceCap
1776 }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001777 return s.newValue1(op, Types[TINT], s.expr(n.Left))
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06001778 case n.Left.Type.IsString(): // string; not reachable for OCAP
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001779 return s.newValue1(ssa.OpStringLen, Types[TINT], s.expr(n.Left))
Todd Neal707af252015-08-28 15:56:43 -05001780 case n.Left.Type.IsMap(), n.Left.Type.IsChan():
1781 return s.referenceTypeBuiltin(n, s.expr(n.Left))
Josh Bleecher Snydercc3f0312015-07-03 18:41:28 -07001782 default: // array
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001783 return s.constInt(Types[TINT], n.Left.Type.Bound)
Josh Bleecher Snydercc3f0312015-07-03 18:41:28 -07001784 }
1785
Josh Bleecher Snydera2d15802015-08-12 10:12:14 -07001786 case OSPTR:
1787 a := s.expr(n.Left)
1788 if n.Left.Type.IsSlice() {
1789 return s.newValue1(ssa.OpSlicePtr, n.Type, a)
1790 } else {
1791 return s.newValue1(ssa.OpStringPtr, n.Type, a)
1792 }
1793
Keith Randalld1c15a02015-08-04 15:47:22 -07001794 case OITAB:
1795 a := s.expr(n.Left)
1796 return s.newValue1(ssa.OpITab, n.Type, a)
1797
Josh Bleecher Snyder1792b362015-09-05 19:28:27 -07001798 case OEFACE:
1799 tab := s.expr(n.Left)
1800 data := s.expr(n.Right)
Keith Randall808d7c72015-10-07 14:35:25 -07001801 // The frontend allows putting things like struct{*byte} in
1802 // the data portion of an eface. But we don't want struct{*byte}
1803 // as a register type because (among other reasons) the liveness
1804 // analysis is confused by the "fat" variables that result from
1805 // such types being spilled.
1806 // So here we ensure that we are selecting the underlying pointer
1807 // when we build an eface.
1808 for !data.Type.IsPtr() {
1809 switch {
1810 case data.Type.IsArray():
1811 data = s.newValue2(ssa.OpArrayIndex, data.Type.Elem(), data, s.constInt(Types[TINT], 0))
1812 case data.Type.IsStruct():
1813 for i := data.Type.NumFields() - 1; i >= 0; i-- {
1814 f := data.Type.FieldType(i)
1815 if f.Size() == 0 {
1816 // eface type could also be struct{p *byte; q [0]int}
1817 continue
1818 }
1819 data = s.newValue1I(ssa.OpStructSelect, f, data.Type.FieldOff(i), data)
1820 break
1821 }
1822 default:
1823 s.Fatalf("type being put into an eface isn't a pointer")
1824 }
1825 }
Josh Bleecher Snyder1792b362015-09-05 19:28:27 -07001826 return s.newValue2(ssa.OpIMake, n.Type, tab, data)
1827
Keith Randall5505e8c2015-09-12 23:27:26 -07001828 case OSLICE, OSLICEARR:
1829 v := s.expr(n.Left)
1830 var i, j *ssa.Value
1831 if n.Right.Left != nil {
1832 i = s.extendIndex(s.expr(n.Right.Left))
1833 }
1834 if n.Right.Right != nil {
1835 j = s.extendIndex(s.expr(n.Right.Right))
1836 }
1837 p, l, c := s.slice(n.Left.Type, v, i, j, nil)
1838 return s.newValue3(ssa.OpSliceMake, n.Type, p, l, c)
Keith Randall3526cf52015-08-24 23:52:03 -07001839 case OSLICESTR:
Keith Randall5505e8c2015-09-12 23:27:26 -07001840 v := s.expr(n.Left)
1841 var i, j *ssa.Value
1842 if n.Right.Left != nil {
1843 i = s.extendIndex(s.expr(n.Right.Left))
Keith Randall3526cf52015-08-24 23:52:03 -07001844 }
Keith Randall5505e8c2015-09-12 23:27:26 -07001845 if n.Right.Right != nil {
1846 j = s.extendIndex(s.expr(n.Right.Right))
Keith Randall3526cf52015-08-24 23:52:03 -07001847 }
Keith Randall5505e8c2015-09-12 23:27:26 -07001848 p, l, _ := s.slice(n.Left.Type, v, i, j, nil)
1849 return s.newValue2(ssa.OpStringMake, n.Type, p, l)
1850 case OSLICE3, OSLICE3ARR:
1851 v := s.expr(n.Left)
1852 var i *ssa.Value
1853 if n.Right.Left != nil {
1854 i = s.extendIndex(s.expr(n.Right.Left))
Keith Randall3526cf52015-08-24 23:52:03 -07001855 }
Keith Randall5505e8c2015-09-12 23:27:26 -07001856 j := s.extendIndex(s.expr(n.Right.Right.Left))
1857 k := s.extendIndex(s.expr(n.Right.Right.Right))
1858 p, l, c := s.slice(n.Left.Type, v, i, j, k)
1859 return s.newValue3(ssa.OpSliceMake, n.Type, p, l, c)
Keith Randall3526cf52015-08-24 23:52:03 -07001860
Keith Randalld24768e2015-09-09 23:56:59 -07001861 case OCALLFUNC, OCALLINTER, OCALLMETH:
1862 return s.call(n, callNormal)
Josh Bleecher Snyder3d23afb2015-08-12 11:22:16 -07001863
1864 case OGETG:
1865 return s.newValue0(ssa.OpGetG, n.Type)
1866
Keith Randall9d22c102015-09-11 11:02:57 -07001867 case OAPPEND:
1868 // append(s, e1, e2, e3). Compile like:
1869 // ptr,len,cap := s
1870 // newlen := len + 3
1871 // if newlen > s.cap {
1872 // ptr,_,cap = growslice(s, newlen)
1873 // }
1874 // *(ptr+len) = e1
1875 // *(ptr+len+1) = e2
1876 // *(ptr+len+2) = e3
1877 // makeslice(ptr,newlen,cap)
1878
1879 et := n.Type.Type
1880 pt := Ptrto(et)
1881
1882 // Evaluate slice
1883 slice := s.expr(n.List.N)
1884
Keith Randall9d22c102015-09-11 11:02:57 -07001885 // Allocate new blocks
1886 grow := s.f.NewBlock(ssa.BlockPlain)
Keith Randall9d22c102015-09-11 11:02:57 -07001887 assign := s.f.NewBlock(ssa.BlockPlain)
1888
1889 // Decide if we need to grow
Keith Randall9aba7e72015-10-05 13:48:40 -07001890 nargs := int64(count(n.List) - 1)
Keith Randall9d22c102015-09-11 11:02:57 -07001891 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
1892 l := s.newValue1(ssa.OpSliceLen, Types[TINT], slice)
1893 c := s.newValue1(ssa.OpSliceCap, Types[TINT], slice)
1894 nl := s.newValue2(s.ssaOp(OADD, Types[TINT]), Types[TINT], l, s.constInt(Types[TINT], nargs))
1895 cmp := s.newValue2(s.ssaOp(OGT, Types[TINT]), Types[TBOOL], nl, c)
Keith Randallb32217a2015-09-17 16:45:10 -07001896 s.vars[&ptrVar] = p
1897 s.vars[&capVar] = c
Keith Randall9d22c102015-09-11 11:02:57 -07001898 b := s.endBlock()
1899 b.Kind = ssa.BlockIf
1900 b.Likely = ssa.BranchUnlikely
1901 b.Control = cmp
1902 b.AddEdgeTo(grow)
1903 b.AddEdgeTo(assign)
1904
1905 // Call growslice
1906 s.startBlock(grow)
1907 taddr := s.newValue1A(ssa.OpAddr, Types[TUINTPTR], &ssa.ExternSymbol{Types[TUINTPTR], typenamesym(n.Type)}, s.sb)
1908
Keith Randall8c5bfcc2015-09-18 15:11:30 -07001909 r := s.rtcall(growslice, true, []*Type{pt, Types[TINT], Types[TINT]}, taddr, p, l, c, nl)
Keith Randall9d22c102015-09-11 11:02:57 -07001910
Keith Randall8c5bfcc2015-09-18 15:11:30 -07001911 s.vars[&ptrVar] = r[0]
1912 // Note: we don't need to read r[1], the result's length. It will be nl.
1913 // (or maybe we should, we just have to spill/restore nl otherwise?)
1914 s.vars[&capVar] = r[2]
Keith Randall9d22c102015-09-11 11:02:57 -07001915 b = s.endBlock()
1916 b.AddEdgeTo(assign)
1917
1918 // assign new elements to slots
1919 s.startBlock(assign)
Keith Randall9aba7e72015-10-05 13:48:40 -07001920
1921 // Evaluate args
1922 args := make([]*ssa.Value, 0, nargs)
Keith Randall808d7c72015-10-07 14:35:25 -07001923 store := make([]bool, 0, nargs)
Keith Randall9aba7e72015-10-05 13:48:40 -07001924 for l := n.List.Next; l != nil; l = l.Next {
Keith Randall808d7c72015-10-07 14:35:25 -07001925 if canSSAType(l.N.Type) {
1926 args = append(args, s.expr(l.N))
1927 store = append(store, true)
1928 } else {
1929 args = append(args, s.addr(l.N))
1930 store = append(store, false)
1931 }
Keith Randall9aba7e72015-10-05 13:48:40 -07001932 }
1933
Keith Randallb32217a2015-09-17 16:45:10 -07001934 p = s.variable(&ptrVar, pt) // generates phi for ptr
1935 c = s.variable(&capVar, Types[TINT]) // generates phi for cap
Keith Randall9d22c102015-09-11 11:02:57 -07001936 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, l)
1937 for i, arg := range args {
1938 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(Types[TUINTPTR], int64(i)))
Keith Randall808d7c72015-10-07 14:35:25 -07001939 if store[i] {
1940 s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, et.Size(), addr, arg, s.mem())
1941 } else {
1942 s.vars[&memVar] = s.newValue3I(ssa.OpMove, ssa.TypeMem, et.Size(), addr, arg, s.mem())
1943 }
Keith Randall9d22c102015-09-11 11:02:57 -07001944 if haspointers(et) {
1945 // TODO: just one write barrier call for all of these writes?
1946 // TODO: maybe just one writeBarrierEnabled check?
1947 s.insertWB(et, addr)
1948 }
1949 }
1950
1951 // make result
Keith Randallb32217a2015-09-17 16:45:10 -07001952 delete(s.vars, &ptrVar)
1953 delete(s.vars, &capVar)
Keith Randall8c5bfcc2015-09-18 15:11:30 -07001954 return s.newValue3(ssa.OpSliceMake, n.Type, p, nl, c)
Keith Randall9d22c102015-09-11 11:02:57 -07001955
Keith Randalld2fd43a2015-04-15 15:51:25 -07001956 default:
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07001957 s.Unimplementedf("unhandled expr %s", opnames[n.Op])
Keith Randalld2fd43a2015-04-15 15:51:25 -07001958 return nil
1959 }
1960}
1961
Josh Bleecher Snyder07269312015-08-29 14:54:45 -07001962func (s *state) assign(left *Node, right *ssa.Value, wb bool) {
Keith Randalld4cc51d2015-08-14 21:47:20 -07001963 if left.Op == ONAME && isblank(left) {
Keith Randalld4cc51d2015-08-14 21:47:20 -07001964 return
1965 }
Keith Randalld4cc51d2015-08-14 21:47:20 -07001966 t := left.Type
1967 dowidth(t)
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01001968 if right == nil {
1969 // right == nil means use the zero value of the assigned type.
Daniel Morsing66b47812015-06-27 15:45:20 +01001970 if !canSSA(left) {
1971 // if we can't ssa this memory, treat it as just zeroing out the backing memory
1972 addr := s.addr(left)
Keith Randalld2107fc2015-08-24 02:16:19 -07001973 if left.Op == ONAME {
Keith Randallb32217a2015-09-17 16:45:10 -07001974 s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, left, s.mem())
Keith Randalld2107fc2015-08-24 02:16:19 -07001975 }
Keith Randallb32217a2015-09-17 16:45:10 -07001976 s.vars[&memVar] = s.newValue2I(ssa.OpZero, ssa.TypeMem, t.Size(), addr, s.mem())
Daniel Morsing66b47812015-06-27 15:45:20 +01001977 return
1978 }
Josh Bleecher Snyder07269312015-08-29 14:54:45 -07001979 right = s.zeroVal(t)
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01001980 }
1981 if left.Op == ONAME && canSSA(left) {
1982 // Update variable assignment.
Josh Bleecher Snyder07269312015-08-29 14:54:45 -07001983 s.vars[left] = right
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01001984 return
1985 }
1986 // not ssa-able. Treat as a store.
1987 addr := s.addr(left)
Keith Randalld2107fc2015-08-24 02:16:19 -07001988 if left.Op == ONAME {
Keith Randallb32217a2015-09-17 16:45:10 -07001989 s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, left, s.mem())
Keith Randalld2107fc2015-08-24 02:16:19 -07001990 }
Keith Randallb32217a2015-09-17 16:45:10 -07001991 s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, t.Size(), addr, right, s.mem())
Keith Randalle3869a62015-09-07 23:18:02 -07001992 if wb {
Keith Randall9d22c102015-09-11 11:02:57 -07001993 s.insertWB(left.Type, addr)
Keith Randalle3869a62015-09-07 23:18:02 -07001994 }
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01001995}
1996
Josh Bleecher Snyder21bd4832015-07-20 15:30:52 -07001997// zeroVal returns the zero value for type t.
1998func (s *state) zeroVal(t *Type) *ssa.Value {
1999 switch {
Keith Randall9cb332e2015-07-28 14:19:20 -07002000 case t.IsInteger():
2001 switch t.Size() {
2002 case 1:
2003 return s.constInt8(t, 0)
2004 case 2:
2005 return s.constInt16(t, 0)
2006 case 4:
2007 return s.constInt32(t, 0)
2008 case 8:
2009 return s.constInt64(t, 0)
2010 default:
2011 s.Fatalf("bad sized integer type %s", t)
2012 }
Todd Neal752fe4d2015-08-25 19:21:45 -05002013 case t.IsFloat():
2014 switch t.Size() {
2015 case 4:
2016 return s.constFloat32(t, 0)
2017 case 8:
2018 return s.constFloat64(t, 0)
2019 default:
2020 s.Fatalf("bad sized float type %s", t)
2021 }
David Chase52578582015-08-28 14:24:10 -04002022 case t.IsComplex():
2023 switch t.Size() {
2024 case 8:
2025 z := s.constFloat32(Types[TFLOAT32], 0)
Keith Randalla5cffb62015-08-28 13:52:26 -07002026 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
David Chase52578582015-08-28 14:24:10 -04002027 case 16:
2028 z := s.constFloat64(Types[TFLOAT64], 0)
Keith Randalla5cffb62015-08-28 13:52:26 -07002029 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
David Chase52578582015-08-28 14:24:10 -04002030 default:
2031 s.Fatalf("bad sized complex type %s", t)
2032 }
2033
Josh Bleecher Snyder21bd4832015-07-20 15:30:52 -07002034 case t.IsString():
Keith Randall9cb332e2015-07-28 14:19:20 -07002035 return s.entryNewValue0A(ssa.OpConstString, t, "")
2036 case t.IsPtr():
2037 return s.entryNewValue0(ssa.OpConstNil, t)
Josh Bleecher Snyder21bd4832015-07-20 15:30:52 -07002038 case t.IsBoolean():
Josh Bleecher Snydercea44142015-09-08 16:52:25 -07002039 return s.constBool(false)
Keith Randall9f954db2015-08-18 10:26:28 -07002040 case t.IsInterface():
2041 return s.entryNewValue0(ssa.OpConstInterface, t)
2042 case t.IsSlice():
2043 return s.entryNewValue0(ssa.OpConstSlice, t)
Josh Bleecher Snyder21bd4832015-07-20 15:30:52 -07002044 }
2045 s.Unimplementedf("zero for type %v not implemented", t)
2046 return nil
2047}
2048
Keith Randalld24768e2015-09-09 23:56:59 -07002049type callKind int8
2050
2051const (
2052 callNormal callKind = iota
2053 callDefer
2054 callGo
2055)
2056
2057func (s *state) call(n *Node, k callKind) *ssa.Value {
2058 var sym *Sym // target symbol (if static)
2059 var closure *ssa.Value // ptr to closure to run (if dynamic)
2060 var codeptr *ssa.Value // ptr to target code (if dynamic)
2061 var rcvr *ssa.Value // receiver to set
2062 fn := n.Left
2063 switch n.Op {
2064 case OCALLFUNC:
2065 if k == callNormal && fn.Op == ONAME && fn.Class == PFUNC {
2066 sym = fn.Sym
2067 break
2068 }
2069 closure = s.expr(fn)
2070 if closure == nil {
2071 return nil // TODO: remove when expr always returns non-nil
2072 }
2073 case OCALLMETH:
2074 if fn.Op != ODOTMETH {
2075 Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn)
2076 }
2077 if fn.Right.Op != ONAME {
2078 Fatalf("OCALLMETH: n.Left.Right not a ONAME: %v", fn.Right)
2079 }
2080 if k == callNormal {
2081 sym = fn.Right.Sym
2082 break
2083 }
2084 n2 := *fn.Right
2085 n2.Class = PFUNC
2086 closure = s.expr(&n2)
2087 // Note: receiver is already assigned in n.List, so we don't
2088 // want to set it here.
2089 case OCALLINTER:
2090 if fn.Op != ODOTINTER {
2091 Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", Oconv(int(fn.Op), 0))
2092 }
2093 i := s.expr(fn.Left)
2094 itab := s.newValue1(ssa.OpITab, Types[TUINTPTR], i)
2095 itabidx := fn.Xoffset + 3*int64(Widthptr) + 8 // offset of fun field in runtime.itab
2096 itab = s.newValue1I(ssa.OpOffPtr, Types[TUINTPTR], itabidx, itab)
2097 if k == callNormal {
2098 codeptr = s.newValue2(ssa.OpLoad, Types[TUINTPTR], itab, s.mem())
2099 } else {
2100 closure = itab
2101 }
2102 rcvr = s.newValue1(ssa.OpIData, Types[TUINTPTR], i)
2103 }
2104 dowidth(fn.Type)
2105 stksize := fn.Type.Argwid // includes receiver
2106
2107 // Run all argument assignments. The arg slots have already
2108 // been offset by the appropriate amount (+2*widthptr for go/defer,
2109 // +widthptr for interface calls).
2110 // For OCALLMETH, the receiver is set in these statements.
2111 s.stmtList(n.List)
2112
2113 // Set receiver (for interface calls)
2114 if rcvr != nil {
2115 var argStart int64
2116 if HasLinkRegister() {
2117 argStart += int64(Widthptr)
2118 }
2119 if k != callNormal {
2120 argStart += int64(2 * Widthptr)
2121 }
2122 addr := s.entryNewValue1I(ssa.OpOffPtr, Types[TUINTPTR], argStart, s.sp)
Keith Randallb32217a2015-09-17 16:45:10 -07002123 s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, rcvr, s.mem())
Keith Randalld24768e2015-09-09 23:56:59 -07002124 }
2125
2126 // Defer/go args
2127 if k != callNormal {
2128 // Write argsize and closure (args to Newproc/Deferproc).
2129 argsize := s.constInt32(Types[TUINT32], int32(stksize))
Keith Randallb32217a2015-09-17 16:45:10 -07002130 s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, 4, s.sp, argsize, s.mem())
Keith Randalld24768e2015-09-09 23:56:59 -07002131 addr := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(Types[TUINTPTR]), int64(Widthptr), s.sp)
Keith Randallb32217a2015-09-17 16:45:10 -07002132 s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, closure, s.mem())
Keith Randalld24768e2015-09-09 23:56:59 -07002133 stksize += 2 * int64(Widthptr)
2134 }
2135
2136 // call target
2137 bNext := s.f.NewBlock(ssa.BlockPlain)
2138 var call *ssa.Value
2139 switch {
2140 case k == callDefer:
2141 call = s.newValue1(ssa.OpDeferCall, ssa.TypeMem, s.mem())
2142 case k == callGo:
2143 call = s.newValue1(ssa.OpGoCall, ssa.TypeMem, s.mem())
2144 case closure != nil:
2145 codeptr = s.newValue2(ssa.OpLoad, Types[TUINTPTR], closure, s.mem())
2146 call = s.newValue3(ssa.OpClosureCall, ssa.TypeMem, codeptr, closure, s.mem())
2147 case codeptr != nil:
2148 call = s.newValue2(ssa.OpInterCall, ssa.TypeMem, codeptr, s.mem())
2149 case sym != nil:
2150 call = s.newValue1A(ssa.OpStaticCall, ssa.TypeMem, sym, s.mem())
2151 default:
2152 Fatalf("bad call type %s %v", opnames[n.Op], n)
2153 }
2154 call.AuxInt = stksize // Call operations carry the argsize of the callee along with them
2155
2156 // Finish call block
Keith Randallb32217a2015-09-17 16:45:10 -07002157 s.vars[&memVar] = call
Keith Randalld24768e2015-09-09 23:56:59 -07002158 b := s.endBlock()
2159 b.Kind = ssa.BlockCall
2160 b.Control = call
2161 b.AddEdgeTo(bNext)
2162
2163 // Read result from stack at the start of the fallthrough block
2164 s.startBlock(bNext)
2165 var titer Iter
2166 fp := Structfirst(&titer, Getoutarg(n.Left.Type))
2167 if fp == nil || k != callNormal {
2168 // call has no return value. Continue with the next statement.
2169 return nil
2170 }
2171 a := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(fp.Type), fp.Width, s.sp)
2172 return s.newValue2(ssa.OpLoad, fp.Type, a, call)
2173}
2174
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07002175// etypesign returns the signed-ness of e, for integer/pointer etypes.
2176// -1 means signed, +1 means unsigned, 0 means non-integer/non-pointer.
2177func etypesign(e uint8) int8 {
2178 switch e {
2179 case TINT8, TINT16, TINT32, TINT64, TINT:
2180 return -1
2181 case TUINT8, TUINT16, TUINT32, TUINT64, TUINT, TUINTPTR, TUNSAFEPTR:
2182 return +1
2183 }
2184 return 0
2185}
2186
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07002187// addr converts the address of the expression n to SSA, adds it to s and returns the SSA result.
Keith Randallc3c84a22015-07-13 15:55:37 -07002188// The value that the returned Value represents is guaranteed to be non-nil.
Keith Randallcfc2aa52015-05-18 16:44:20 -07002189func (s *state) addr(n *Node) *ssa.Value {
2190 switch n.Op {
2191 case ONAME:
Keith Randall290d8fc2015-06-10 15:03:06 -07002192 switch n.Class {
2193 case PEXTERN:
Keith Randallcfc2aa52015-05-18 16:44:20 -07002194 // global variable
Keith Randall8c46aa52015-06-19 21:02:28 -07002195 aux := &ssa.ExternSymbol{n.Type, n.Sym}
Josh Bleecher Snyder67df7932015-07-28 11:08:44 -07002196 v := s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sb)
2197 // TODO: Make OpAddr use AuxInt as well as Aux.
2198 if n.Xoffset != 0 {
2199 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, n.Xoffset, v)
2200 }
2201 return v
David Chase956f3192015-09-11 16:40:05 -04002202 case PPARAM:
2203 // parameter slot
Josh Bleecher Snyder596ddf42015-06-29 11:56:28 -07002204 v := s.decladdrs[n]
2205 if v == nil {
Josh Bleecher Snyder0a133cdd2015-07-03 20:28:56 -07002206 if flag_race != 0 && n.String() == ".fp" {
2207 s.Unimplementedf("race detector mishandles nodfp")
2208 }
Josh Bleecher Snyder596ddf42015-06-29 11:56:28 -07002209 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
2210 }
2211 return v
Keith Randalld2107fc2015-08-24 02:16:19 -07002212 case PAUTO:
2213 // We need to regenerate the address of autos
2214 // at every use. This prevents LEA instructions
2215 // from occurring before the corresponding VarDef
2216 // op and confusing the liveness analysis into thinking
2217 // the variable is live at function entry.
2218 // TODO: I'm not sure if this really works or we're just
2219 // getting lucky. We might need a real dependency edge
2220 // between vardef and addr ops.
2221 aux := &ssa.AutoSymbol{Typ: n.Type, Node: n}
2222 return s.newValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp)
David Chase956f3192015-09-11 16:40:05 -04002223 case PPARAMOUT: // Same as PAUTO -- cannot generate LEA early.
2224 aux := &ssa.ArgSymbol{Typ: n.Type, Node: n}
2225 return s.newValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp)
2226 case PAUTO | PHEAP, PPARAM | PHEAP, PPARAMOUT | PHEAP, PPARAMREF:
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01002227 return s.expr(n.Name.Heapaddr)
Keith Randall290d8fc2015-06-10 15:03:06 -07002228 default:
Josh Bleecher Snyder58446032015-08-23 20:29:43 -07002229 s.Unimplementedf("variable address class %v not implemented", n.Class)
Keith Randall290d8fc2015-06-10 15:03:06 -07002230 return nil
Keith Randallcfc2aa52015-05-18 16:44:20 -07002231 }
Keith Randallcfc2aa52015-05-18 16:44:20 -07002232 case OINDREG:
Josh Bleecher Snyder25d19162015-07-28 12:37:46 -07002233 // indirect off a register
Keith Randallcfc2aa52015-05-18 16:44:20 -07002234 // used for storing/loading arguments/returns to/from callees
Josh Bleecher Snyder25d19162015-07-28 12:37:46 -07002235 if int(n.Reg) != Thearch.REGSP {
2236 s.Unimplementedf("OINDREG of non-SP register %s in addr: %v", obj.Rconv(int(n.Reg)), n)
2237 return nil
2238 }
Keith Randall8f22b522015-06-11 21:29:25 -07002239 return s.entryNewValue1I(ssa.OpOffPtr, Ptrto(n.Type), n.Xoffset, s.sp)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002240 case OINDEX:
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06002241 if n.Left.Type.IsSlice() {
Keith Randallcfc2aa52015-05-18 16:44:20 -07002242 a := s.expr(n.Left)
2243 i := s.expr(n.Right)
Keith Randall2a5e6c42015-07-23 14:35:02 -07002244 i = s.extendIndex(i)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002245 len := s.newValue1(ssa.OpSliceLen, Types[TUINTPTR], a)
Keith Randall46e62f82015-08-18 14:17:30 -07002246 if !n.Bounded {
2247 s.boundsCheck(i, len)
2248 }
Keith Randall8f22b522015-06-11 21:29:25 -07002249 p := s.newValue1(ssa.OpSlicePtr, Ptrto(n.Left.Type.Type), a)
2250 return s.newValue2(ssa.OpPtrIndex, Ptrto(n.Left.Type.Type), p, i)
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06002251 } else { // array
2252 a := s.addr(n.Left)
2253 i := s.expr(n.Right)
Keith Randall2a5e6c42015-07-23 14:35:02 -07002254 i = s.extendIndex(i)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002255 len := s.constInt(Types[TINT], n.Left.Type.Bound)
Keith Randall46e62f82015-08-18 14:17:30 -07002256 if !n.Bounded {
2257 s.boundsCheck(i, len)
2258 }
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06002259 return s.newValue2(ssa.OpPtrIndex, Ptrto(n.Left.Type.Type), a, i)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002260 }
Todd Nealb383de22015-07-13 21:22:16 -05002261 case OIND:
2262 p := s.expr(n.Left)
2263 s.nilCheck(p)
2264 return p
Keith Randallc3c84a22015-07-13 15:55:37 -07002265 case ODOT:
2266 p := s.addr(n.Left)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002267 return s.newValue2(ssa.OpAddPtr, p.Type, p, s.constIntPtr(Types[TUINTPTR], n.Xoffset))
Keith Randallc3c84a22015-07-13 15:55:37 -07002268 case ODOTPTR:
2269 p := s.expr(n.Left)
2270 s.nilCheck(p)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002271 return s.newValue2(ssa.OpAddPtr, p.Type, p, s.constIntPtr(Types[TUINTPTR], n.Xoffset))
David Chase956f3192015-09-11 16:40:05 -04002272 case OCLOSUREVAR:
2273 return s.newValue2(ssa.OpAddPtr, Ptrto(n.Type),
2274 s.entryNewValue0(ssa.OpGetClosurePtr, Types[TUINTPTR]),
2275 s.constIntPtr(Types[TUINTPTR], n.Xoffset))
David Chase32ffbf72015-10-08 17:14:12 -04002276 case OPARAM:
2277 p := n.Left
2278 if p.Op != ONAME || !(p.Class == PPARAM|PHEAP || p.Class == PPARAMOUT|PHEAP) {
2279 s.Fatalf("OPARAM not of ONAME,{PPARAM,PPARAMOUT}|PHEAP, instead %s", nodedump(p, 0))
2280 }
2281
2282 // Recover original offset to address passed-in param value.
2283 original_p := *p
2284 original_p.Xoffset = n.Xoffset
2285 aux := &ssa.ArgSymbol{Typ: n.Type, Node: &original_p}
2286 return s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002287 default:
Josh Bleecher Snyder58446032015-08-23 20:29:43 -07002288 s.Unimplementedf("unhandled addr %v", Oconv(int(n.Op), 0))
Keith Randallcfc2aa52015-05-18 16:44:20 -07002289 return nil
2290 }
2291}
2292
Keith Randall290d8fc2015-06-10 15:03:06 -07002293// canSSA reports whether n is SSA-able.
2294// n must be an ONAME.
2295func canSSA(n *Node) bool {
2296 if n.Op != ONAME {
Daniel Morsing66b47812015-06-27 15:45:20 +01002297 return false
Keith Randall290d8fc2015-06-10 15:03:06 -07002298 }
2299 if n.Addrtaken {
2300 return false
2301 }
2302 if n.Class&PHEAP != 0 {
2303 return false
2304 }
Josh Bleecher Snyder96548732015-08-28 13:35:32 -07002305 switch n.Class {
2306 case PEXTERN, PPARAMOUT, PPARAMREF:
Keith Randall290d8fc2015-06-10 15:03:06 -07002307 return false
2308 }
Keith Randall8a1f6212015-09-08 21:28:44 -07002309 if n.Class == PPARAM && n.String() == ".this" {
2310 // wrappers generated by genwrapper need to update
2311 // the .this pointer in place.
2312 return false
2313 }
Keith Randall9f954db2015-08-18 10:26:28 -07002314 return canSSAType(n.Type)
2315 // TODO: try to make more variables SSAable?
2316}
2317
2318// canSSA reports whether variables of type t are SSA-able.
2319func canSSAType(t *Type) bool {
2320 dowidth(t)
2321 if t.Width > int64(4*Widthptr) {
2322 // 4*Widthptr is an arbitrary constant. We want it
2323 // to be at least 3*Widthptr so slices can be registerized.
2324 // Too big and we'll introduce too much register pressure.
Daniel Morsing66b47812015-06-27 15:45:20 +01002325 return false
2326 }
Keith Randall9f954db2015-08-18 10:26:28 -07002327 switch t.Etype {
2328 case TARRAY:
2329 if Isslice(t) {
2330 return true
2331 }
2332 // We can't do arrays because dynamic indexing is
2333 // not supported on SSA variables.
2334 // TODO: maybe allow if length is <=1? All indexes
2335 // are constant? Might be good for the arrays
2336 // introduced by the compiler for variadic functions.
2337 return false
2338 case TSTRUCT:
2339 if countfield(t) > 4 {
2340 // 4 is an arbitrary constant. Same reasoning
2341 // as above, lots of small fields would waste
2342 // register space needed by other values.
2343 return false
2344 }
2345 for t1 := t.Type; t1 != nil; t1 = t1.Down {
2346 if !canSSAType(t1.Type) {
2347 return false
2348 }
2349 }
2350 return false // until it is implemented
2351 //return true
2352 default:
2353 return true
2354 }
Keith Randall290d8fc2015-06-10 15:03:06 -07002355}
2356
Keith Randallcfc2aa52015-05-18 16:44:20 -07002357// nilCheck generates nil pointer checking code.
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002358// Starts a new block on return, unless nil checks are disabled.
Josh Bleecher Snyder7e74e432015-07-24 11:55:52 -07002359// Used only for automatically inserted nil checks,
2360// not for user code like 'x != nil'.
Keith Randallcfc2aa52015-05-18 16:44:20 -07002361func (s *state) nilCheck(ptr *ssa.Value) {
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002362 if Disable_checknil != 0 {
2363 return
2364 }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002365 c := s.newValue1(ssa.OpIsNonNil, Types[TBOOL], ptr)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002366 b := s.endBlock()
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07002367 b.Kind = ssa.BlockIf
Keith Randallcfc2aa52015-05-18 16:44:20 -07002368 b.Control = c
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07002369 b.Likely = ssa.BranchLikely
Keith Randallcfc2aa52015-05-18 16:44:20 -07002370 bNext := s.f.NewBlock(ssa.BlockPlain)
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002371 bPanic := s.f.NewBlock(ssa.BlockPlain)
Todd Neal47d67992015-08-28 21:36:29 -05002372 b.AddEdgeTo(bNext)
2373 b.AddEdgeTo(bPanic)
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002374 s.startBlock(bPanic)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002375 // TODO: implicit nil checks somehow?
Keith Randallf5c53e02015-09-09 18:03:41 -07002376 chk := s.newValue2(ssa.OpPanicNilCheck, ssa.TypeMem, ptr, s.mem())
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002377 s.endBlock()
Keith Randallf5c53e02015-09-09 18:03:41 -07002378 bPanic.Kind = ssa.BlockExit
2379 bPanic.Control = chk
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002380 s.startBlock(bNext)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002381}
2382
2383// boundsCheck generates bounds checking code. Checks if 0 <= idx < len, branches to exit if not.
2384// Starts a new block on return.
2385func (s *state) boundsCheck(idx, len *ssa.Value) {
Keith Randall8d236812015-08-18 15:25:40 -07002386 if Debug['B'] != 0 {
2387 return
2388 }
Keith Randallcfc2aa52015-05-18 16:44:20 -07002389 // TODO: convert index to full width?
2390 // TODO: if index is 64-bit and we're compiling to 32-bit, check that high 32 bits are zero.
2391
2392 // bounds check
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002393 cmp := s.newValue2(ssa.OpIsInBounds, Types[TBOOL], idx, len)
Keith Randall3a70bf92015-09-17 16:54:15 -07002394 s.check(cmp, Panicindex)
Keith Randall3526cf52015-08-24 23:52:03 -07002395}
2396
2397// sliceBoundsCheck generates slice bounds checking code. Checks if 0 <= idx <= len, branches to exit if not.
2398// Starts a new block on return.
2399func (s *state) sliceBoundsCheck(idx, len *ssa.Value) {
2400 if Debug['B'] != 0 {
2401 return
2402 }
2403 // TODO: convert index to full width?
2404 // TODO: if index is 64-bit and we're compiling to 32-bit, check that high 32 bits are zero.
2405
2406 // bounds check
2407 cmp := s.newValue2(ssa.OpIsSliceInBounds, Types[TBOOL], idx, len)
Keith Randall3a70bf92015-09-17 16:54:15 -07002408 s.check(cmp, panicslice)
Keith Randall3526cf52015-08-24 23:52:03 -07002409}
2410
Keith Randall3a70bf92015-09-17 16:54:15 -07002411// If cmp (a bool) is true, panic using the given function.
2412func (s *state) check(cmp *ssa.Value, fn *Node) {
Keith Randallcfc2aa52015-05-18 16:44:20 -07002413 b := s.endBlock()
2414 b.Kind = ssa.BlockIf
2415 b.Control = cmp
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07002416 b.Likely = ssa.BranchLikely
Keith Randallcfc2aa52015-05-18 16:44:20 -07002417 bNext := s.f.NewBlock(ssa.BlockPlain)
Keith Randall8d236812015-08-18 15:25:40 -07002418 bPanic := s.f.NewBlock(ssa.BlockPlain)
Todd Neal47d67992015-08-28 21:36:29 -05002419 b.AddEdgeTo(bNext)
2420 b.AddEdgeTo(bPanic)
Keith Randall8d236812015-08-18 15:25:40 -07002421 s.startBlock(bPanic)
Keith Randall3a70bf92015-09-17 16:54:15 -07002422 // The panic call takes/returns memory to ensure that the right
Keith Randall8d236812015-08-18 15:25:40 -07002423 // memory state is observed if the panic happens.
Keith Randall3a70bf92015-09-17 16:54:15 -07002424 s.rtcall(fn, false, nil)
2425
Keith Randallcfc2aa52015-05-18 16:44:20 -07002426 s.startBlock(bNext)
2427}
2428
Keith Randall8c5bfcc2015-09-18 15:11:30 -07002429// rtcall issues a call to the given runtime function fn with the listed args.
2430// Returns a slice of results of the given result types.
2431// The call is added to the end of the current block.
2432// If returns is false, the block is marked as an exit block.
2433// If returns is true, the block is marked as a call block. A new block
2434// is started to load the return values.
2435func (s *state) rtcall(fn *Node, returns bool, results []*Type, args ...*ssa.Value) []*ssa.Value {
2436 // Write args to the stack
2437 var off int64 // TODO: arch-dependent starting offset?
2438 for _, arg := range args {
2439 t := arg.Type
2440 off = Rnd(off, t.Alignment())
2441 ptr := s.sp
2442 if off != 0 {
2443 ptr = s.newValue1I(ssa.OpOffPtr, Types[TUINTPTR], off, s.sp)
2444 }
2445 size := t.Size()
2446 s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, size, ptr, arg, s.mem())
2447 off += size
2448 }
2449 off = Rnd(off, int64(Widthptr))
2450
2451 // Issue call
2452 call := s.newValue1A(ssa.OpStaticCall, ssa.TypeMem, fn.Sym, s.mem())
2453 s.vars[&memVar] = call
2454
2455 // Finish block
2456 b := s.endBlock()
2457 if !returns {
2458 b.Kind = ssa.BlockExit
2459 b.Control = call
2460 call.AuxInt = off
2461 if len(results) > 0 {
2462 Fatalf("panic call can't have results")
2463 }
2464 return nil
2465 }
2466 b.Kind = ssa.BlockCall
2467 b.Control = call
2468 bNext := s.f.NewBlock(ssa.BlockPlain)
2469 b.AddEdgeTo(bNext)
2470 s.startBlock(bNext)
2471
2472 // Load results
2473 res := make([]*ssa.Value, len(results))
2474 for i, t := range results {
2475 off = Rnd(off, t.Alignment())
2476 ptr := s.sp
2477 if off != 0 {
2478 ptr = s.newValue1I(ssa.OpOffPtr, Types[TUINTPTR], off, s.sp)
2479 }
2480 res[i] = s.newValue2(ssa.OpLoad, t, ptr, s.mem())
2481 off += t.Size()
2482 }
2483 off = Rnd(off, int64(Widthptr))
2484
2485 // Remember how much callee stack space we needed.
2486 call.AuxInt = off
2487
2488 return res
2489}
2490
Keith Randall9d22c102015-09-11 11:02:57 -07002491// insertWB inserts a write barrier. A value of type t has already
2492// been stored at location p. Tell the runtime about this write.
2493// Note: there must be no GC suspension points between the write and
2494// the call that this function inserts.
2495func (s *state) insertWB(t *Type, p *ssa.Value) {
2496 // if writeBarrierEnabled {
2497 // typedmemmove_nostore(&t, p)
2498 // }
2499 bThen := s.f.NewBlock(ssa.BlockPlain)
Keith Randall9d22c102015-09-11 11:02:57 -07002500
2501 aux := &ssa.ExternSymbol{Types[TBOOL], syslook("writeBarrierEnabled", 0).Sym}
2502 flagaddr := s.newValue1A(ssa.OpAddr, Ptrto(Types[TBOOL]), aux, s.sb)
2503 flag := s.newValue2(ssa.OpLoad, Types[TBOOL], flagaddr, s.mem())
2504 b := s.endBlock()
2505 b.Kind = ssa.BlockIf
2506 b.Likely = ssa.BranchUnlikely
2507 b.Control = flag
2508 b.AddEdgeTo(bThen)
Keith Randall9d22c102015-09-11 11:02:57 -07002509
2510 s.startBlock(bThen)
2511 // TODO: writebarrierptr_nostore if just one pointer word (or a few?)
2512 taddr := s.newValue1A(ssa.OpAddr, Types[TUINTPTR], &ssa.ExternSymbol{Types[TUINTPTR], typenamesym(t)}, s.sb)
Keith Randall8c5bfcc2015-09-18 15:11:30 -07002513 s.rtcall(typedmemmove_nostore, true, nil, taddr, p)
Keith Randall9d22c102015-09-11 11:02:57 -07002514
Keith Randall8c5bfcc2015-09-18 15:11:30 -07002515 b.AddEdgeTo(s.curBlock)
Keith Randall9d22c102015-09-11 11:02:57 -07002516}
2517
Keith Randall5505e8c2015-09-12 23:27:26 -07002518// slice computes the slice v[i:j:k] and returns ptr, len, and cap of result.
2519// i,j,k may be nil, in which case they are set to their default value.
2520// t is a slice, ptr to array, or string type.
2521func (s *state) slice(t *Type, v, i, j, k *ssa.Value) (p, l, c *ssa.Value) {
2522 var elemtype *Type
2523 var ptrtype *Type
2524 var ptr *ssa.Value
2525 var len *ssa.Value
2526 var cap *ssa.Value
2527 zero := s.constInt(Types[TINT], 0)
2528 switch {
2529 case t.IsSlice():
2530 elemtype = t.Type
2531 ptrtype = Ptrto(elemtype)
2532 ptr = s.newValue1(ssa.OpSlicePtr, ptrtype, v)
2533 len = s.newValue1(ssa.OpSliceLen, Types[TINT], v)
2534 cap = s.newValue1(ssa.OpSliceCap, Types[TINT], v)
2535 case t.IsString():
2536 elemtype = Types[TUINT8]
2537 ptrtype = Ptrto(elemtype)
2538 ptr = s.newValue1(ssa.OpStringPtr, ptrtype, v)
2539 len = s.newValue1(ssa.OpStringLen, Types[TINT], v)
2540 cap = len
2541 case t.IsPtr():
2542 if !t.Type.IsArray() {
2543 s.Fatalf("bad ptr to array in slice %v\n", t)
2544 }
2545 elemtype = t.Type.Type
2546 ptrtype = Ptrto(elemtype)
2547 s.nilCheck(v)
2548 ptr = v
2549 len = s.constInt(Types[TINT], t.Type.Bound)
2550 cap = len
2551 default:
2552 s.Fatalf("bad type in slice %v\n", t)
2553 }
2554
2555 // Set default values
2556 if i == nil {
2557 i = zero
2558 }
2559 if j == nil {
2560 j = len
2561 }
2562 if k == nil {
2563 k = cap
2564 }
2565
2566 // Panic if slice indices are not in bounds.
2567 s.sliceBoundsCheck(i, j)
2568 if j != k {
2569 s.sliceBoundsCheck(j, k)
2570 }
2571 if k != cap {
2572 s.sliceBoundsCheck(k, cap)
2573 }
2574
2575 // Generate the following code assuming that indexes are in bounds.
2576 // The conditional is to make sure that we don't generate a slice
2577 // that points to the next object in memory.
2578 // rlen = (SubPtr j i)
2579 // rcap = (SubPtr k i)
2580 // p = ptr
2581 // if rcap != 0 {
2582 // p = (AddPtr ptr (MulPtr low (ConstPtr size)))
2583 // }
2584 // result = (SliceMake p size)
2585 rlen := s.newValue2(ssa.OpSubPtr, Types[TINT], j, i)
2586 var rcap *ssa.Value
2587 switch {
2588 case t.IsString():
2589 // Capacity of the result is unimportant. However, we use
2590 // rcap to test if we've generated a zero-length slice.
2591 // Use length of strings for that.
2592 rcap = rlen
2593 case j == k:
2594 rcap = rlen
2595 default:
2596 rcap = s.newValue2(ssa.OpSubPtr, Types[TINT], k, i)
2597 }
2598
Keith Randallb32217a2015-09-17 16:45:10 -07002599 s.vars[&ptrVar] = ptr
Keith Randall5505e8c2015-09-12 23:27:26 -07002600
2601 // Generate code to test the resulting slice length.
2602 var cmp *ssa.Value
2603 if s.config.IntSize == 8 {
2604 cmp = s.newValue2(ssa.OpNeq64, Types[TBOOL], rcap, s.constInt(Types[TINT], 0))
2605 } else {
2606 cmp = s.newValue2(ssa.OpNeq32, Types[TBOOL], rcap, s.constInt(Types[TINT], 0))
2607 }
2608
2609 b := s.endBlock()
2610 b.Kind = ssa.BlockIf
2611 b.Likely = ssa.BranchLikely
2612 b.Control = cmp
2613
2614 // Generate code for non-zero length slice case.
2615 nz := s.f.NewBlock(ssa.BlockPlain)
2616 b.AddEdgeTo(nz)
2617 s.startBlock(nz)
2618 var inc *ssa.Value
2619 if elemtype.Width == 1 {
2620 inc = i
2621 } else {
2622 inc = s.newValue2(ssa.OpMulPtr, Types[TUINTPTR], i, s.constInt(Types[TINT], elemtype.Width))
2623 }
Keith Randallb32217a2015-09-17 16:45:10 -07002624 s.vars[&ptrVar] = s.newValue2(ssa.OpAddPtr, ptrtype, ptr, inc)
Keith Randall5505e8c2015-09-12 23:27:26 -07002625 s.endBlock()
2626
2627 // All done.
2628 merge := s.f.NewBlock(ssa.BlockPlain)
2629 b.AddEdgeTo(merge)
2630 nz.AddEdgeTo(merge)
2631 s.startBlock(merge)
Keith Randallb32217a2015-09-17 16:45:10 -07002632 rptr := s.variable(&ptrVar, ptrtype)
2633 delete(s.vars, &ptrVar)
Keith Randall5505e8c2015-09-12 23:27:26 -07002634 return rptr, rlen, rcap
2635}
2636
David Chase42825882015-08-20 15:14:20 -04002637type u2fcvtTab struct {
2638 geq, cvt2F, and, rsh, or, add ssa.Op
2639 one func(*state, ssa.Type, int64) *ssa.Value
2640}
2641
2642var u64_f64 u2fcvtTab = u2fcvtTab{
2643 geq: ssa.OpGeq64,
2644 cvt2F: ssa.OpCvt64to64F,
2645 and: ssa.OpAnd64,
2646 rsh: ssa.OpRsh64Ux64,
2647 or: ssa.OpOr64,
2648 add: ssa.OpAdd64F,
2649 one: (*state).constInt64,
2650}
2651
2652var u64_f32 u2fcvtTab = u2fcvtTab{
2653 geq: ssa.OpGeq64,
2654 cvt2F: ssa.OpCvt64to32F,
2655 and: ssa.OpAnd64,
2656 rsh: ssa.OpRsh64Ux64,
2657 or: ssa.OpOr64,
2658 add: ssa.OpAdd32F,
2659 one: (*state).constInt64,
2660}
2661
2662// Excess generality on a machine with 64-bit integer registers.
2663// Not used on AMD64.
2664var u32_f32 u2fcvtTab = u2fcvtTab{
2665 geq: ssa.OpGeq32,
2666 cvt2F: ssa.OpCvt32to32F,
2667 and: ssa.OpAnd32,
2668 rsh: ssa.OpRsh32Ux32,
2669 or: ssa.OpOr32,
2670 add: ssa.OpAdd32F,
2671 one: func(s *state, t ssa.Type, x int64) *ssa.Value {
2672 return s.constInt32(t, int32(x))
2673 },
2674}
2675
2676func (s *state) uint64Tofloat64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2677 return s.uintTofloat(&u64_f64, n, x, ft, tt)
2678}
2679
2680func (s *state) uint64Tofloat32(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2681 return s.uintTofloat(&u64_f32, n, x, ft, tt)
2682}
2683
2684func (s *state) uintTofloat(cvttab *u2fcvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2685 // if x >= 0 {
2686 // result = (floatY) x
2687 // } else {
2688 // y = uintX(x) ; y = x & 1
2689 // z = uintX(x) ; z = z >> 1
2690 // z = z >> 1
2691 // z = z | y
David Chase73151062015-08-26 14:25:40 -04002692 // result = floatY(z)
2693 // result = result + result
David Chase42825882015-08-20 15:14:20 -04002694 // }
2695 //
2696 // Code borrowed from old code generator.
2697 // What's going on: large 64-bit "unsigned" looks like
2698 // negative number to hardware's integer-to-float
2699 // conversion. However, because the mantissa is only
2700 // 63 bits, we don't need the LSB, so instead we do an
2701 // unsigned right shift (divide by two), convert, and
2702 // double. However, before we do that, we need to be
2703 // sure that we do not lose a "1" if that made the
2704 // difference in the resulting rounding. Therefore, we
2705 // preserve it, and OR (not ADD) it back in. The case
2706 // that matters is when the eleven discarded bits are
2707 // equal to 10000000001; that rounds up, and the 1 cannot
2708 // be lost else it would round down if the LSB of the
2709 // candidate mantissa is 0.
2710 cmp := s.newValue2(cvttab.geq, Types[TBOOL], x, s.zeroVal(ft))
2711 b := s.endBlock()
2712 b.Kind = ssa.BlockIf
2713 b.Control = cmp
2714 b.Likely = ssa.BranchLikely
2715
2716 bThen := s.f.NewBlock(ssa.BlockPlain)
2717 bElse := s.f.NewBlock(ssa.BlockPlain)
2718 bAfter := s.f.NewBlock(ssa.BlockPlain)
2719
Todd Neal47d67992015-08-28 21:36:29 -05002720 b.AddEdgeTo(bThen)
David Chase42825882015-08-20 15:14:20 -04002721 s.startBlock(bThen)
2722 a0 := s.newValue1(cvttab.cvt2F, tt, x)
2723 s.vars[n] = a0
2724 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002725 bThen.AddEdgeTo(bAfter)
David Chase42825882015-08-20 15:14:20 -04002726
Todd Neal47d67992015-08-28 21:36:29 -05002727 b.AddEdgeTo(bElse)
David Chase42825882015-08-20 15:14:20 -04002728 s.startBlock(bElse)
2729 one := cvttab.one(s, ft, 1)
2730 y := s.newValue2(cvttab.and, ft, x, one)
2731 z := s.newValue2(cvttab.rsh, ft, x, one)
2732 z = s.newValue2(cvttab.or, ft, z, y)
2733 a := s.newValue1(cvttab.cvt2F, tt, z)
2734 a1 := s.newValue2(cvttab.add, tt, a, a)
2735 s.vars[n] = a1
2736 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002737 bElse.AddEdgeTo(bAfter)
David Chase42825882015-08-20 15:14:20 -04002738
2739 s.startBlock(bAfter)
2740 return s.variable(n, n.Type)
2741}
2742
Todd Neal707af252015-08-28 15:56:43 -05002743// referenceTypeBuiltin generates code for the len/cap builtins for maps and channels.
2744func (s *state) referenceTypeBuiltin(n *Node, x *ssa.Value) *ssa.Value {
2745 if !n.Left.Type.IsMap() && !n.Left.Type.IsChan() {
2746 s.Fatalf("node must be a map or a channel")
2747 }
Todd Neale0e40682015-08-26 18:40:52 -05002748 // if n == nil {
2749 // return 0
2750 // } else {
Todd Neal707af252015-08-28 15:56:43 -05002751 // // len
Todd Neale0e40682015-08-26 18:40:52 -05002752 // return *((*int)n)
Todd Neal707af252015-08-28 15:56:43 -05002753 // // cap
2754 // return *(((*int)n)+1)
Todd Neale0e40682015-08-26 18:40:52 -05002755 // }
2756 lenType := n.Type
Todd Neal67ac8a32015-08-28 15:20:54 -05002757 nilValue := s.newValue0(ssa.OpConstNil, Types[TUINTPTR])
2758 cmp := s.newValue2(ssa.OpEqPtr, Types[TBOOL], x, nilValue)
Todd Neale0e40682015-08-26 18:40:52 -05002759 b := s.endBlock()
2760 b.Kind = ssa.BlockIf
2761 b.Control = cmp
2762 b.Likely = ssa.BranchUnlikely
2763
2764 bThen := s.f.NewBlock(ssa.BlockPlain)
2765 bElse := s.f.NewBlock(ssa.BlockPlain)
2766 bAfter := s.f.NewBlock(ssa.BlockPlain)
2767
Todd Neal707af252015-08-28 15:56:43 -05002768 // length/capacity of a nil map/chan is zero
Todd Neal47d67992015-08-28 21:36:29 -05002769 b.AddEdgeTo(bThen)
Todd Neale0e40682015-08-26 18:40:52 -05002770 s.startBlock(bThen)
2771 s.vars[n] = s.zeroVal(lenType)
2772 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002773 bThen.AddEdgeTo(bAfter)
Todd Neale0e40682015-08-26 18:40:52 -05002774
Todd Neal47d67992015-08-28 21:36:29 -05002775 b.AddEdgeTo(bElse)
Todd Neale0e40682015-08-26 18:40:52 -05002776 s.startBlock(bElse)
Todd Neal707af252015-08-28 15:56:43 -05002777 if n.Op == OLEN {
2778 // length is stored in the first word for map/chan
2779 s.vars[n] = s.newValue2(ssa.OpLoad, lenType, x, s.mem())
2780 } else if n.Op == OCAP {
2781 // capacity is stored in the second word for chan
2782 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Width, x)
2783 s.vars[n] = s.newValue2(ssa.OpLoad, lenType, sw, s.mem())
2784 } else {
2785 s.Fatalf("op must be OLEN or OCAP")
2786 }
Todd Neale0e40682015-08-26 18:40:52 -05002787 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002788 bElse.AddEdgeTo(bAfter)
Todd Neale0e40682015-08-26 18:40:52 -05002789
2790 s.startBlock(bAfter)
2791 return s.variable(n, lenType)
2792}
2793
David Chase73151062015-08-26 14:25:40 -04002794type f2uCvtTab struct {
2795 ltf, cvt2U, subf ssa.Op
2796 value func(*state, ssa.Type, float64) *ssa.Value
2797}
2798
2799var f32_u64 f2uCvtTab = f2uCvtTab{
2800 ltf: ssa.OpLess32F,
2801 cvt2U: ssa.OpCvt32Fto64,
2802 subf: ssa.OpSub32F,
2803 value: (*state).constFloat32,
2804}
2805
2806var f64_u64 f2uCvtTab = f2uCvtTab{
2807 ltf: ssa.OpLess64F,
2808 cvt2U: ssa.OpCvt64Fto64,
2809 subf: ssa.OpSub64F,
2810 value: (*state).constFloat64,
2811}
2812
2813func (s *state) float32ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2814 return s.floatToUint(&f32_u64, n, x, ft, tt)
2815}
2816func (s *state) float64ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2817 return s.floatToUint(&f64_u64, n, x, ft, tt)
2818}
2819
2820func (s *state) floatToUint(cvttab *f2uCvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2821 // if x < 9223372036854775808.0 {
2822 // result = uintY(x)
2823 // } else {
2824 // y = x - 9223372036854775808.0
2825 // z = uintY(y)
2826 // result = z | -9223372036854775808
2827 // }
2828 twoToThe63 := cvttab.value(s, ft, 9223372036854775808.0)
2829 cmp := s.newValue2(cvttab.ltf, Types[TBOOL], x, twoToThe63)
2830 b := s.endBlock()
2831 b.Kind = ssa.BlockIf
2832 b.Control = cmp
2833 b.Likely = ssa.BranchLikely
2834
2835 bThen := s.f.NewBlock(ssa.BlockPlain)
2836 bElse := s.f.NewBlock(ssa.BlockPlain)
2837 bAfter := s.f.NewBlock(ssa.BlockPlain)
2838
Todd Neal47d67992015-08-28 21:36:29 -05002839 b.AddEdgeTo(bThen)
David Chase73151062015-08-26 14:25:40 -04002840 s.startBlock(bThen)
2841 a0 := s.newValue1(cvttab.cvt2U, tt, x)
2842 s.vars[n] = a0
2843 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002844 bThen.AddEdgeTo(bAfter)
David Chase73151062015-08-26 14:25:40 -04002845
Todd Neal47d67992015-08-28 21:36:29 -05002846 b.AddEdgeTo(bElse)
David Chase73151062015-08-26 14:25:40 -04002847 s.startBlock(bElse)
2848 y := s.newValue2(cvttab.subf, ft, x, twoToThe63)
2849 y = s.newValue1(cvttab.cvt2U, tt, y)
2850 z := s.constInt64(tt, -9223372036854775808)
2851 a1 := s.newValue2(ssa.OpOr64, tt, y, z)
2852 s.vars[n] = a1
2853 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002854 bElse.AddEdgeTo(bAfter)
David Chase73151062015-08-26 14:25:40 -04002855
2856 s.startBlock(bAfter)
2857 return s.variable(n, n.Type)
2858}
2859
Keith Randall269baa92015-09-17 10:31:16 -07002860// ifaceType returns the value for the word containing the type.
2861// n is the node for the interface expression.
2862// v is the corresponding value.
2863func (s *state) ifaceType(n *Node, v *ssa.Value) *ssa.Value {
2864 byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte)
2865
2866 if isnilinter(n.Type) {
2867 // Have *eface. The type is the first word in the struct.
2868 return s.newValue1(ssa.OpITab, byteptr, v)
2869 }
2870
2871 // Have *iface.
2872 // The first word in the struct is the *itab.
2873 // If the *itab is nil, return 0.
2874 // Otherwise, the second word in the *itab is the type.
2875
2876 tab := s.newValue1(ssa.OpITab, byteptr, v)
2877 s.vars[&typVar] = tab
2878 isnonnil := s.newValue2(ssa.OpNeqPtr, Types[TBOOL], tab, s.entryNewValue0(ssa.OpConstNil, byteptr))
2879 b := s.endBlock()
2880 b.Kind = ssa.BlockIf
2881 b.Control = isnonnil
2882 b.Likely = ssa.BranchLikely
2883
2884 bLoad := s.f.NewBlock(ssa.BlockPlain)
2885 bEnd := s.f.NewBlock(ssa.BlockPlain)
2886
2887 b.AddEdgeTo(bLoad)
2888 b.AddEdgeTo(bEnd)
2889 bLoad.AddEdgeTo(bEnd)
2890
2891 s.startBlock(bLoad)
2892 off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(Widthptr), tab)
2893 s.vars[&typVar] = s.newValue2(ssa.OpLoad, byteptr, off, s.mem())
2894 s.endBlock()
2895
2896 s.startBlock(bEnd)
2897 typ := s.variable(&typVar, byteptr)
2898 delete(s.vars, &typVar)
2899 return typ
2900}
2901
2902// dottype generates SSA for a type assertion node.
2903// commaok indicates whether to panic or return a bool.
2904// If commaok is false, resok will be nil.
2905func (s *state) dottype(n *Node, commaok bool) (res, resok *ssa.Value) {
2906 iface := s.expr(n.Left)
2907 typ := s.ifaceType(n.Left, iface) // actual concrete type
2908 target := s.expr(typename(n.Type)) // target type
2909 if !isdirectiface(n.Type) {
2910 // walk rewrites ODOTTYPE/OAS2DOTTYPE into runtime calls except for this case.
2911 Fatalf("dottype needs a direct iface type %s", n.Type)
2912 }
2913
2914 // TODO: If we have a nonempty interface and its itab field is nil,
2915 // then this test is redundant and ifaceType should just branch directly to bFail.
2916 cond := s.newValue2(ssa.OpEqPtr, Types[TBOOL], typ, target)
2917 b := s.endBlock()
2918 b.Kind = ssa.BlockIf
2919 b.Control = cond
2920 b.Likely = ssa.BranchLikely
2921
2922 byteptr := Ptrto(Types[TUINT8])
2923
2924 bOk := s.f.NewBlock(ssa.BlockPlain)
2925 bFail := s.f.NewBlock(ssa.BlockPlain)
2926 b.AddEdgeTo(bOk)
2927 b.AddEdgeTo(bFail)
2928
2929 if !commaok {
2930 // on failure, panic by calling panicdottype
2931 s.startBlock(bFail)
Keith Randall269baa92015-09-17 10:31:16 -07002932 taddr := s.newValue1A(ssa.OpAddr, byteptr, &ssa.ExternSymbol{byteptr, typenamesym(n.Left.Type)}, s.sb)
Keith Randall8c5bfcc2015-09-18 15:11:30 -07002933 s.rtcall(panicdottype, false, nil, typ, target, taddr)
Keith Randall269baa92015-09-17 10:31:16 -07002934
2935 // on success, return idata field
2936 s.startBlock(bOk)
2937 return s.newValue1(ssa.OpIData, n.Type, iface), nil
2938 }
2939
2940 // commaok is the more complicated case because we have
2941 // a control flow merge point.
2942 bEnd := s.f.NewBlock(ssa.BlockPlain)
2943
2944 // type assertion succeeded
2945 s.startBlock(bOk)
2946 s.vars[&idataVar] = s.newValue1(ssa.OpIData, n.Type, iface)
2947 s.vars[&okVar] = s.constBool(true)
2948 s.endBlock()
2949 bOk.AddEdgeTo(bEnd)
2950
2951 // type assertion failed
2952 s.startBlock(bFail)
2953 s.vars[&idataVar] = s.entryNewValue0(ssa.OpConstNil, byteptr)
2954 s.vars[&okVar] = s.constBool(false)
2955 s.endBlock()
2956 bFail.AddEdgeTo(bEnd)
2957
2958 // merge point
2959 s.startBlock(bEnd)
2960 res = s.variable(&idataVar, byteptr)
2961 resok = s.variable(&okVar, Types[TBOOL])
2962 delete(s.vars, &idataVar)
2963 delete(s.vars, &okVar)
2964 return res, resok
2965}
2966
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -07002967// checkgoto checks that a goto from from to to does not
2968// jump into a block or jump over variable declarations.
2969// It is a copy of checkgoto in the pre-SSA backend,
2970// modified only for line number handling.
2971// TODO: document how this works and why it is designed the way it is.
2972func (s *state) checkgoto(from *Node, to *Node) {
2973 if from.Sym == to.Sym {
2974 return
2975 }
2976
2977 nf := 0
2978 for fs := from.Sym; fs != nil; fs = fs.Link {
2979 nf++
2980 }
2981 nt := 0
2982 for fs := to.Sym; fs != nil; fs = fs.Link {
2983 nt++
2984 }
2985 fs := from.Sym
2986 for ; nf > nt; nf-- {
2987 fs = fs.Link
2988 }
2989 if fs != to.Sym {
2990 // decide what to complain about.
2991 // prefer to complain about 'into block' over declarations,
2992 // so scan backward to find most recent block or else dcl.
2993 var block *Sym
2994
2995 var dcl *Sym
2996 ts := to.Sym
2997 for ; nt > nf; nt-- {
2998 if ts.Pkg == nil {
2999 block = ts
3000 } else {
3001 dcl = ts
3002 }
3003 ts = ts.Link
3004 }
3005
3006 for ts != fs {
3007 if ts.Pkg == nil {
3008 block = ts
3009 } else {
3010 dcl = ts
3011 }
3012 ts = ts.Link
3013 fs = fs.Link
3014 }
3015
3016 lno := int(from.Left.Lineno)
3017 if block != nil {
3018 yyerrorl(lno, "goto %v jumps into block starting at %v", from.Left.Sym, Ctxt.Line(int(block.Lastlineno)))
3019 } else {
3020 yyerrorl(lno, "goto %v jumps over declaration of %v at %v", from.Left.Sym, dcl, Ctxt.Line(int(dcl.Lastlineno)))
3021 }
3022 }
3023}
3024
Keith Randalld2fd43a2015-04-15 15:51:25 -07003025// variable returns the value of a variable at the current location.
Keith Randall8c46aa52015-06-19 21:02:28 -07003026func (s *state) variable(name *Node, t ssa.Type) *ssa.Value {
Keith Randalld2fd43a2015-04-15 15:51:25 -07003027 v := s.vars[name]
3028 if v == nil {
3029 // TODO: get type? Take Sym as arg?
Keith Randall8f22b522015-06-11 21:29:25 -07003030 v = s.newValue0A(ssa.OpFwdRef, t, name)
Keith Randalld2fd43a2015-04-15 15:51:25 -07003031 s.vars[name] = v
3032 }
3033 return v
3034}
3035
Keith Randallcfc2aa52015-05-18 16:44:20 -07003036func (s *state) mem() *ssa.Value {
Keith Randallb32217a2015-09-17 16:45:10 -07003037 return s.variable(&memVar, ssa.TypeMem)
Keith Randalld2fd43a2015-04-15 15:51:25 -07003038}
3039
Keith Randallcfc2aa52015-05-18 16:44:20 -07003040func (s *state) linkForwardReferences() {
Keith Randalld2fd43a2015-04-15 15:51:25 -07003041 // Build ssa graph. Each variable on its first use in a basic block
3042 // leaves a FwdRef in that block representing the incoming value
3043 // of that variable. This function links that ref up with possible definitions,
3044 // inserting Phi values as needed. This is essentially the algorithm
3045 // described by Brau, Buchwald, Hack, Leißa, Mallon, and Zwinkau:
3046 // http://pp.info.uni-karlsruhe.de/uploads/publikationen/braun13cc.pdf
3047 for _, b := range s.f.Blocks {
3048 for _, v := range b.Values {
3049 if v.Op != ssa.OpFwdRef {
3050 continue
3051 }
Keith Randall8c46aa52015-06-19 21:02:28 -07003052 name := v.Aux.(*Node)
Keith Randalld2fd43a2015-04-15 15:51:25 -07003053 v.Op = ssa.OpCopy
3054 v.Aux = nil
3055 v.SetArgs1(s.lookupVarIncoming(b, v.Type, name))
3056 }
3057 }
3058}
3059
3060// lookupVarIncoming finds the variable's value at the start of block b.
Keith Randall8c46aa52015-06-19 21:02:28 -07003061func (s *state) lookupVarIncoming(b *ssa.Block, t ssa.Type, name *Node) *ssa.Value {
Keith Randalld2fd43a2015-04-15 15:51:25 -07003062 // TODO(khr): have lookupVarIncoming overwrite the fwdRef or copy it
3063 // will be used in, instead of having the result used in a copy value.
3064 if b == s.f.Entry {
Keith Randallb32217a2015-09-17 16:45:10 -07003065 if name == &memVar {
Keith Randallcfc2aa52015-05-18 16:44:20 -07003066 return s.startmem
Keith Randalld2fd43a2015-04-15 15:51:25 -07003067 }
3068 // variable is live at the entry block. Load it.
Keith Randall8c46aa52015-06-19 21:02:28 -07003069 addr := s.decladdrs[name]
3070 if addr == nil {
3071 // TODO: closure args reach here.
David Chase32ffbf72015-10-08 17:14:12 -04003072 s.Unimplementedf("unhandled closure arg %s at entry to function %s", name, b.Func.Name)
Keith Randall8c46aa52015-06-19 21:02:28 -07003073 }
3074 if _, ok := addr.Aux.(*ssa.ArgSymbol); !ok {
3075 s.Fatalf("variable live at start of function %s is not an argument %s", b.Func.Name, name)
3076 }
Keith Randall8f22b522015-06-11 21:29:25 -07003077 return s.entryNewValue2(ssa.OpLoad, t, addr, s.startmem)
Keith Randalld2fd43a2015-04-15 15:51:25 -07003078 }
3079 var vals []*ssa.Value
3080 for _, p := range b.Preds {
3081 vals = append(vals, s.lookupVarOutgoing(p, t, name))
3082 }
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003083 if len(vals) == 0 {
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -07003084 // This block is dead; we have no predecessors and we're not the entry block.
3085 // It doesn't matter what we use here as long as it is well-formed,
3086 // so use the default/zero value.
Keith Randallb32217a2015-09-17 16:45:10 -07003087 if name == &memVar {
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -07003088 return s.startmem
3089 }
3090 return s.zeroVal(name.Type)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003091 }
Keith Randalld2fd43a2015-04-15 15:51:25 -07003092 v0 := vals[0]
3093 for i := 1; i < len(vals); i++ {
3094 if vals[i] != v0 {
3095 // need a phi value
Keith Randall8f22b522015-06-11 21:29:25 -07003096 v := b.NewValue0(s.peekLine(), ssa.OpPhi, t)
Keith Randalld2fd43a2015-04-15 15:51:25 -07003097 v.AddArgs(vals...)
3098 return v
3099 }
3100 }
3101 return v0
3102}
3103
3104// lookupVarOutgoing finds the variable's value at the end of block b.
Keith Randall8c46aa52015-06-19 21:02:28 -07003105func (s *state) lookupVarOutgoing(b *ssa.Block, t ssa.Type, name *Node) *ssa.Value {
Keith Randalld2fd43a2015-04-15 15:51:25 -07003106 m := s.defvars[b.ID]
3107 if v, ok := m[name]; ok {
3108 return v
3109 }
3110 // The variable is not defined by b and we haven't
3111 // looked it up yet. Generate v, a copy value which
3112 // will be the outgoing value of the variable. Then
3113 // look up w, the incoming value of the variable.
3114 // Make v = copy(w). We need the extra copy to
3115 // prevent infinite recursion when looking up the
3116 // incoming value of the variable.
Keith Randall8f22b522015-06-11 21:29:25 -07003117 v := b.NewValue0(s.peekLine(), ssa.OpCopy, t)
Keith Randalld2fd43a2015-04-15 15:51:25 -07003118 m[name] = v
3119 v.AddArg(s.lookupVarIncoming(b, t, name))
3120 return v
3121}
3122
3123// TODO: the above mutually recursive functions can lead to very deep stacks. Fix that.
3124
Keith Randall083a6462015-05-12 11:06:44 -07003125// an unresolved branch
3126type branch struct {
3127 p *obj.Prog // branch instruction
3128 b *ssa.Block // target
3129}
3130
Keith Randall9569b952015-08-28 22:51:01 -07003131type genState struct {
3132 // branches remembers all the branch instructions we've seen
3133 // and where they would like to go.
3134 branches []branch
3135
3136 // bstart remembers where each block starts (indexed by block ID)
3137 bstart []*obj.Prog
3138
3139 // deferBranches remembers all the defer branches we've seen.
3140 deferBranches []*obj.Prog
3141
3142 // deferTarget remembers the (last) deferreturn call site.
3143 deferTarget *obj.Prog
3144}
3145
Keith Randall083a6462015-05-12 11:06:44 -07003146// genssa appends entries to ptxt for each instruction in f.
3147// gcargs and gclocals are filled in with pointer maps for the frame.
3148func genssa(f *ssa.Func, ptxt *obj.Prog, gcargs, gclocals *Sym) {
Keith Randall9569b952015-08-28 22:51:01 -07003149 var s genState
3150
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07003151 e := f.Config.Frontend().(*ssaExport)
3152 // We're about to emit a bunch of Progs.
3153 // Since the only way to get here is to explicitly request it,
3154 // just fail on unimplemented instead of trying to unwind our mess.
3155 e.mustImplement = true
3156
Keith Randall083a6462015-05-12 11:06:44 -07003157 // Remember where each block starts.
Keith Randall9569b952015-08-28 22:51:01 -07003158 s.bstart = make([]*obj.Prog, f.NumBlocks())
Keith Randall083a6462015-05-12 11:06:44 -07003159
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07003160 var valueProgs map[*obj.Prog]*ssa.Value
3161 var blockProgs map[*obj.Prog]*ssa.Block
3162 const logProgs = true
3163 if logProgs {
3164 valueProgs = make(map[*obj.Prog]*ssa.Value, f.NumValues())
3165 blockProgs = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
3166 f.Logf("genssa %s\n", f.Name)
3167 blockProgs[Pc] = f.Blocks[0]
3168 }
3169
Keith Randall083a6462015-05-12 11:06:44 -07003170 // Emit basic blocks
3171 for i, b := range f.Blocks {
Keith Randall9569b952015-08-28 22:51:01 -07003172 s.bstart[b.ID] = Pc
Keith Randall083a6462015-05-12 11:06:44 -07003173 // Emit values in block
3174 for _, v := range b.Values {
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07003175 x := Pc
Keith Randall9569b952015-08-28 22:51:01 -07003176 s.genValue(v)
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07003177 if logProgs {
3178 for ; x != Pc; x = x.Link {
3179 valueProgs[x] = v
3180 }
3181 }
Keith Randall083a6462015-05-12 11:06:44 -07003182 }
3183 // Emit control flow instructions for block
3184 var next *ssa.Block
3185 if i < len(f.Blocks)-1 {
3186 next = f.Blocks[i+1]
3187 }
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07003188 x := Pc
Keith Randall9569b952015-08-28 22:51:01 -07003189 s.genBlock(b, next)
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07003190 if logProgs {
3191 for ; x != Pc; x = x.Link {
3192 blockProgs[x] = b
3193 }
3194 }
Keith Randall083a6462015-05-12 11:06:44 -07003195 }
3196
3197 // Resolve branches
Keith Randall9569b952015-08-28 22:51:01 -07003198 for _, br := range s.branches {
3199 br.p.To.Val = s.bstart[br.b.ID]
3200 }
Keith Randallca9e4502015-09-08 08:59:57 -07003201 if s.deferBranches != nil && s.deferTarget == nil {
3202 // This can happen when the function has a defer but
3203 // no return (because it has an infinite loop).
3204 s.deferReturn()
3205 Prog(obj.ARET)
3206 }
Keith Randall9569b952015-08-28 22:51:01 -07003207 for _, p := range s.deferBranches {
3208 p.To.Val = s.deferTarget
Keith Randall083a6462015-05-12 11:06:44 -07003209 }
3210
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07003211 if logProgs {
3212 for p := ptxt; p != nil; p = p.Link {
3213 var s string
3214 if v, ok := valueProgs[p]; ok {
3215 s = v.String()
3216 } else if b, ok := blockProgs[p]; ok {
3217 s = b.String()
3218 } else {
3219 s = " " // most value and branch strings are 2-3 characters long
3220 }
3221 f.Logf("%s\t%s\n", s, p)
3222 }
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -07003223 if f.Config.HTML != nil {
3224 saved := ptxt.Ctxt.LineHist.PrintFilenameOnly
3225 ptxt.Ctxt.LineHist.PrintFilenameOnly = true
3226 var buf bytes.Buffer
3227 buf.WriteString("<code>")
3228 buf.WriteString("<dl class=\"ssa-gen\">")
3229 for p := ptxt; p != nil; p = p.Link {
3230 buf.WriteString("<dt class=\"ssa-prog-src\">")
3231 if v, ok := valueProgs[p]; ok {
3232 buf.WriteString(v.HTML())
3233 } else if b, ok := blockProgs[p]; ok {
3234 buf.WriteString(b.HTML())
3235 }
3236 buf.WriteString("</dt>")
3237 buf.WriteString("<dd class=\"ssa-prog\">")
3238 buf.WriteString(html.EscapeString(p.String()))
3239 buf.WriteString("</dd>")
3240 buf.WriteString("</li>")
3241 }
3242 buf.WriteString("</dl>")
3243 buf.WriteString("</code>")
3244 f.Config.HTML.WriteColumn("genssa", buf.String())
3245 ptxt.Ctxt.LineHist.PrintFilenameOnly = saved
3246 }
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07003247 }
3248
Josh Bleecher Snyder6b416652015-07-28 10:56:39 -07003249 // Emit static data
3250 if f.StaticData != nil {
3251 for _, n := range f.StaticData.([]*Node) {
3252 if !gen_as_init(n, false) {
Keith Randall0ec72b62015-09-08 15:42:53 -07003253 Fatalf("non-static data marked as static: %v\n\n", n, f)
Josh Bleecher Snyder6b416652015-07-28 10:56:39 -07003254 }
3255 }
3256 }
3257
Keith Randalld2107fc2015-08-24 02:16:19 -07003258 // Allocate stack frame
3259 allocauto(ptxt)
Keith Randall083a6462015-05-12 11:06:44 -07003260
Keith Randalld2107fc2015-08-24 02:16:19 -07003261 // Generate gc bitmaps.
3262 liveness(Curfn, ptxt, gcargs, gclocals)
3263 gcsymdup(gcargs)
3264 gcsymdup(gclocals)
Keith Randall083a6462015-05-12 11:06:44 -07003265
Keith Randalld2107fc2015-08-24 02:16:19 -07003266 // Add frame prologue. Zero ambiguously live variables.
3267 Thearch.Defframe(ptxt)
3268 if Debug['f'] != 0 {
3269 frame(0)
3270 }
3271
3272 // Remove leftover instrumentation from the instruction stream.
3273 removevardef(ptxt)
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -07003274
3275 f.Config.HTML.Close()
Keith Randall083a6462015-05-12 11:06:44 -07003276}
3277
David Chase997a9f32015-08-12 16:38:11 -04003278// opregreg emits instructions for
David Chase8e601b22015-08-18 14:39:26 -04003279// dest := dest(To) op src(From)
David Chase997a9f32015-08-12 16:38:11 -04003280// and also returns the created obj.Prog so it
3281// may be further adjusted (offset, scale, etc).
3282func opregreg(op int, dest, src int16) *obj.Prog {
3283 p := Prog(op)
3284 p.From.Type = obj.TYPE_REG
3285 p.To.Type = obj.TYPE_REG
3286 p.To.Reg = dest
3287 p.From.Reg = src
3288 return p
3289}
3290
Keith Randall9569b952015-08-28 22:51:01 -07003291func (s *genState) genValue(v *ssa.Value) {
Michael Matloob81ccf502015-05-30 01:03:06 -04003292 lineno = v.Line
Keith Randall083a6462015-05-12 11:06:44 -07003293 switch v.Op {
Keith Randall0dca7352015-06-06 16:03:33 -07003294 case ssa.OpAMD64ADDQ:
Keith Randall083a6462015-05-12 11:06:44 -07003295 // TODO: use addq instead of leaq if target is in the right register.
3296 p := Prog(x86.ALEAQ)
3297 p.From.Type = obj.TYPE_MEM
3298 p.From.Reg = regnum(v.Args[0])
3299 p.From.Scale = 1
3300 p.From.Index = regnum(v.Args[1])
3301 p.To.Type = obj.TYPE_REG
3302 p.To.Reg = regnum(v)
Michael Matloob73054f52015-06-14 11:38:46 -07003303 case ssa.OpAMD64ADDL:
3304 p := Prog(x86.ALEAL)
3305 p.From.Type = obj.TYPE_MEM
3306 p.From.Reg = regnum(v.Args[0])
3307 p.From.Scale = 1
3308 p.From.Index = regnum(v.Args[1])
3309 p.To.Type = obj.TYPE_REG
3310 p.To.Reg = regnum(v)
3311 case ssa.OpAMD64ADDW:
3312 p := Prog(x86.ALEAW)
3313 p.From.Type = obj.TYPE_MEM
3314 p.From.Reg = regnum(v.Args[0])
3315 p.From.Scale = 1
3316 p.From.Index = regnum(v.Args[1])
3317 p.To.Type = obj.TYPE_REG
3318 p.To.Reg = regnum(v)
Keith Randall20550cb2015-07-28 16:04:50 -07003319 // 2-address opcode arithmetic, symmetric
David Chase997a9f32015-08-12 16:38:11 -04003320 case ssa.OpAMD64ADDB, ssa.OpAMD64ADDSS, ssa.OpAMD64ADDSD,
Alexandru Moșoiedff8812015-07-28 14:58:49 +02003321 ssa.OpAMD64ANDQ, ssa.OpAMD64ANDL, ssa.OpAMD64ANDW, ssa.OpAMD64ANDB,
Keith Randall20550cb2015-07-28 16:04:50 -07003322 ssa.OpAMD64ORQ, ssa.OpAMD64ORL, ssa.OpAMD64ORW, ssa.OpAMD64ORB,
3323 ssa.OpAMD64XORQ, ssa.OpAMD64XORL, ssa.OpAMD64XORW, ssa.OpAMD64XORB,
David Chase997a9f32015-08-12 16:38:11 -04003324 ssa.OpAMD64MULQ, ssa.OpAMD64MULL, ssa.OpAMD64MULW, ssa.OpAMD64MULB,
David Chase3a9d0ac2015-08-28 14:24:10 -04003325 ssa.OpAMD64MULSS, ssa.OpAMD64MULSD, ssa.OpAMD64PXOR:
Michael Matloob73054f52015-06-14 11:38:46 -07003326 r := regnum(v)
3327 x := regnum(v.Args[0])
3328 y := regnum(v.Args[1])
3329 if x != r && y != r {
David Chase997a9f32015-08-12 16:38:11 -04003330 opregreg(regMoveByTypeAMD64(v.Type), r, x)
Michael Matloob73054f52015-06-14 11:38:46 -07003331 x = r
3332 }
3333 p := Prog(v.Op.Asm())
3334 p.From.Type = obj.TYPE_REG
3335 p.To.Type = obj.TYPE_REG
3336 p.To.Reg = r
3337 if x == r {
3338 p.From.Reg = y
3339 } else {
3340 p.From.Reg = x
3341 }
Keith Randall20550cb2015-07-28 16:04:50 -07003342 // 2-address opcode arithmetic, not symmetric
3343 case ssa.OpAMD64SUBQ, ssa.OpAMD64SUBL, ssa.OpAMD64SUBW, ssa.OpAMD64SUBB:
Keith Randallbe1eb572015-07-22 13:46:15 -07003344 r := regnum(v)
3345 x := regnum(v.Args[0])
Keith Randall20550cb2015-07-28 16:04:50 -07003346 y := regnum(v.Args[1])
3347 var neg bool
3348 if y == r {
3349 // compute -(y-x) instead
3350 x, y = y, x
3351 neg = true
Keith Randallbe1eb572015-07-22 13:46:15 -07003352 }
Keith Randall083a6462015-05-12 11:06:44 -07003353 if x != r {
David Chase997a9f32015-08-12 16:38:11 -04003354 opregreg(regMoveByTypeAMD64(v.Type), r, x)
Keith Randall083a6462015-05-12 11:06:44 -07003355 }
David Chase997a9f32015-08-12 16:38:11 -04003356 opregreg(v.Op.Asm(), r, y)
Keith Randall20550cb2015-07-28 16:04:50 -07003357
Keith Randall20550cb2015-07-28 16:04:50 -07003358 if neg {
3359 p := Prog(x86.ANEGQ) // TODO: use correct size? This is mostly a hack until regalloc does 2-address correctly
Keith Randall20550cb2015-07-28 16:04:50 -07003360 p.To.Type = obj.TYPE_REG
3361 p.To.Reg = r
3362 }
David Chase997a9f32015-08-12 16:38:11 -04003363 case ssa.OpAMD64SUBSS, ssa.OpAMD64SUBSD, ssa.OpAMD64DIVSS, ssa.OpAMD64DIVSD:
3364 r := regnum(v)
3365 x := regnum(v.Args[0])
3366 y := regnum(v.Args[1])
3367 if y == r && x != r {
3368 // r/y := x op r/y, need to preserve x and rewrite to
3369 // r/y := r/y op x15
3370 x15 := int16(x86.REG_X15)
3371 // register move y to x15
3372 // register move x to y
3373 // rename y with x15
3374 opregreg(regMoveByTypeAMD64(v.Type), x15, y)
3375 opregreg(regMoveByTypeAMD64(v.Type), r, x)
3376 y = x15
3377 } else if x != r {
3378 opregreg(regMoveByTypeAMD64(v.Type), r, x)
3379 }
3380 opregreg(v.Op.Asm(), r, y)
3381
Todd Neala45f2d82015-08-17 17:46:06 -05003382 case ssa.OpAMD64DIVQ, ssa.OpAMD64DIVL, ssa.OpAMD64DIVW,
Todd Neal57d9e7e2015-08-18 19:51:44 -05003383 ssa.OpAMD64DIVQU, ssa.OpAMD64DIVLU, ssa.OpAMD64DIVWU,
3384 ssa.OpAMD64MODQ, ssa.OpAMD64MODL, ssa.OpAMD64MODW,
3385 ssa.OpAMD64MODQU, ssa.OpAMD64MODLU, ssa.OpAMD64MODWU:
Todd Neala45f2d82015-08-17 17:46:06 -05003386
3387 // Arg[0] is already in AX as it's the only register we allow
3388 // and AX is the only output
3389 x := regnum(v.Args[1])
3390
3391 // CPU faults upon signed overflow, which occurs when most
Todd Neal57d9e7e2015-08-18 19:51:44 -05003392 // negative int is divided by -1.
Todd Neala45f2d82015-08-17 17:46:06 -05003393 var j *obj.Prog
3394 if v.Op == ssa.OpAMD64DIVQ || v.Op == ssa.OpAMD64DIVL ||
Todd Neal57d9e7e2015-08-18 19:51:44 -05003395 v.Op == ssa.OpAMD64DIVW || v.Op == ssa.OpAMD64MODQ ||
3396 v.Op == ssa.OpAMD64MODL || v.Op == ssa.OpAMD64MODW {
Todd Neala45f2d82015-08-17 17:46:06 -05003397
3398 var c *obj.Prog
3399 switch v.Op {
Todd Neal57d9e7e2015-08-18 19:51:44 -05003400 case ssa.OpAMD64DIVQ, ssa.OpAMD64MODQ:
Todd Neala45f2d82015-08-17 17:46:06 -05003401 c = Prog(x86.ACMPQ)
Todd Neal57d9e7e2015-08-18 19:51:44 -05003402 j = Prog(x86.AJEQ)
3403 // go ahead and sign extend to save doing it later
3404 Prog(x86.ACQO)
3405
3406 case ssa.OpAMD64DIVL, ssa.OpAMD64MODL:
Todd Neala45f2d82015-08-17 17:46:06 -05003407 c = Prog(x86.ACMPL)
Todd Neal57d9e7e2015-08-18 19:51:44 -05003408 j = Prog(x86.AJEQ)
3409 Prog(x86.ACDQ)
3410
3411 case ssa.OpAMD64DIVW, ssa.OpAMD64MODW:
Todd Neala45f2d82015-08-17 17:46:06 -05003412 c = Prog(x86.ACMPW)
Todd Neal57d9e7e2015-08-18 19:51:44 -05003413 j = Prog(x86.AJEQ)
3414 Prog(x86.ACWD)
Todd Neala45f2d82015-08-17 17:46:06 -05003415 }
3416 c.From.Type = obj.TYPE_REG
3417 c.From.Reg = x
3418 c.To.Type = obj.TYPE_CONST
3419 c.To.Offset = -1
3420
Todd Neala45f2d82015-08-17 17:46:06 -05003421 j.To.Type = obj.TYPE_BRANCH
3422
3423 }
3424
Todd Neal57d9e7e2015-08-18 19:51:44 -05003425 // for unsigned ints, we sign extend by setting DX = 0
3426 // signed ints were sign extended above
3427 if v.Op == ssa.OpAMD64DIVQU || v.Op == ssa.OpAMD64MODQU ||
3428 v.Op == ssa.OpAMD64DIVLU || v.Op == ssa.OpAMD64MODLU ||
3429 v.Op == ssa.OpAMD64DIVWU || v.Op == ssa.OpAMD64MODWU {
Todd Neala45f2d82015-08-17 17:46:06 -05003430 c := Prog(x86.AXORQ)
3431 c.From.Type = obj.TYPE_REG
3432 c.From.Reg = x86.REG_DX
3433 c.To.Type = obj.TYPE_REG
3434 c.To.Reg = x86.REG_DX
Todd Neala45f2d82015-08-17 17:46:06 -05003435 }
3436
3437 p := Prog(v.Op.Asm())
3438 p.From.Type = obj.TYPE_REG
3439 p.From.Reg = x
3440
3441 // signed division, rest of the check for -1 case
3442 if j != nil {
3443 j2 := Prog(obj.AJMP)
3444 j2.To.Type = obj.TYPE_BRANCH
3445
Todd Neal57d9e7e2015-08-18 19:51:44 -05003446 var n *obj.Prog
3447 if v.Op == ssa.OpAMD64DIVQ || v.Op == ssa.OpAMD64DIVL ||
3448 v.Op == ssa.OpAMD64DIVW {
3449 // n * -1 = -n
3450 n = Prog(x86.ANEGQ)
3451 n.To.Type = obj.TYPE_REG
3452 n.To.Reg = x86.REG_AX
3453 } else {
3454 // n % -1 == 0
3455 n = Prog(x86.AXORQ)
3456 n.From.Type = obj.TYPE_REG
3457 n.From.Reg = x86.REG_DX
3458 n.To.Type = obj.TYPE_REG
3459 n.To.Reg = x86.REG_DX
3460 }
Todd Neala45f2d82015-08-17 17:46:06 -05003461
3462 j.To.Val = n
3463 j2.To.Val = Pc
3464 }
3465
Todd Neal67cbd5b2015-08-18 19:14:47 -05003466 case ssa.OpAMD64HMULL, ssa.OpAMD64HMULW, ssa.OpAMD64HMULB,
3467 ssa.OpAMD64HMULLU, ssa.OpAMD64HMULWU, ssa.OpAMD64HMULBU:
3468 // the frontend rewrites constant division by 8/16/32 bit integers into
3469 // HMUL by a constant
3470
3471 // Arg[0] is already in AX as it's the only register we allow
3472 // and DX is the only output we care about (the high bits)
3473 p := Prog(v.Op.Asm())
3474 p.From.Type = obj.TYPE_REG
3475 p.From.Reg = regnum(v.Args[1])
3476
3477 // IMULB puts the high portion in AH instead of DL,
3478 // so move it to DL for consistency
3479 if v.Type.Size() == 1 {
3480 m := Prog(x86.AMOVB)
3481 m.From.Type = obj.TYPE_REG
3482 m.From.Reg = x86.REG_AH
3483 m.To.Type = obj.TYPE_REG
3484 m.To.Reg = x86.REG_DX
3485 }
3486
Keith Randall20550cb2015-07-28 16:04:50 -07003487 case ssa.OpAMD64SHLQ, ssa.OpAMD64SHLL, ssa.OpAMD64SHLW, ssa.OpAMD64SHLB,
3488 ssa.OpAMD64SHRQ, ssa.OpAMD64SHRL, ssa.OpAMD64SHRW, ssa.OpAMD64SHRB,
3489 ssa.OpAMD64SARQ, ssa.OpAMD64SARL, ssa.OpAMD64SARW, ssa.OpAMD64SARB:
Keith Randall6f188472015-06-10 10:39:57 -07003490 x := regnum(v.Args[0])
3491 r := regnum(v)
3492 if x != r {
3493 if r == x86.REG_CX {
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07003494 v.Fatalf("can't implement %s, target and shift both in CX", v.LongString())
Keith Randall6f188472015-06-10 10:39:57 -07003495 }
Keith Randall20550cb2015-07-28 16:04:50 -07003496 p := Prog(regMoveAMD64(v.Type.Size()))
Keith Randall6f188472015-06-10 10:39:57 -07003497 p.From.Type = obj.TYPE_REG
3498 p.From.Reg = x
3499 p.To.Type = obj.TYPE_REG
3500 p.To.Reg = r
Keith Randall6f188472015-06-10 10:39:57 -07003501 }
Michael Matloob703ef062015-06-16 11:11:16 -07003502 p := Prog(v.Op.Asm())
Keith Randall6f188472015-06-10 10:39:57 -07003503 p.From.Type = obj.TYPE_REG
3504 p.From.Reg = regnum(v.Args[1]) // should be CX
3505 p.To.Type = obj.TYPE_REG
3506 p.To.Reg = r
Keith Randall20550cb2015-07-28 16:04:50 -07003507 case ssa.OpAMD64ADDQconst, ssa.OpAMD64ADDLconst, ssa.OpAMD64ADDWconst:
3508 // TODO: use addq instead of leaq if target is in the right register.
3509 var asm int
3510 switch v.Op {
3511 case ssa.OpAMD64ADDQconst:
3512 asm = x86.ALEAQ
3513 case ssa.OpAMD64ADDLconst:
3514 asm = x86.ALEAL
3515 case ssa.OpAMD64ADDWconst:
3516 asm = x86.ALEAW
3517 }
3518 p := Prog(asm)
3519 p.From.Type = obj.TYPE_MEM
3520 p.From.Reg = regnum(v.Args[0])
3521 p.From.Offset = v.AuxInt
3522 p.To.Type = obj.TYPE_REG
3523 p.To.Reg = regnum(v)
Alexandru Moșoi7a6de6d2015-08-14 13:23:11 +02003524 case ssa.OpAMD64MULQconst, ssa.OpAMD64MULLconst, ssa.OpAMD64MULWconst, ssa.OpAMD64MULBconst:
Keith Randall20550cb2015-07-28 16:04:50 -07003525 r := regnum(v)
3526 x := regnum(v.Args[0])
3527 if r != x {
3528 p := Prog(regMoveAMD64(v.Type.Size()))
3529 p.From.Type = obj.TYPE_REG
3530 p.From.Reg = x
3531 p.To.Type = obj.TYPE_REG
3532 p.To.Reg = r
3533 }
3534 p := Prog(v.Op.Asm())
3535 p.From.Type = obj.TYPE_CONST
3536 p.From.Offset = v.AuxInt
3537 p.To.Type = obj.TYPE_REG
3538 p.To.Reg = r
3539 // TODO: Teach doasm to compile the three-address multiply imul $c, r1, r2
3540 // instead of using the MOVQ above.
3541 //p.From3 = new(obj.Addr)
3542 //p.From3.Type = obj.TYPE_REG
3543 //p.From3.Reg = regnum(v.Args[0])
3544 case ssa.OpAMD64ADDBconst,
3545 ssa.OpAMD64ANDQconst, ssa.OpAMD64ANDLconst, ssa.OpAMD64ANDWconst, ssa.OpAMD64ANDBconst,
3546 ssa.OpAMD64ORQconst, ssa.OpAMD64ORLconst, ssa.OpAMD64ORWconst, ssa.OpAMD64ORBconst,
3547 ssa.OpAMD64XORQconst, ssa.OpAMD64XORLconst, ssa.OpAMD64XORWconst, ssa.OpAMD64XORBconst,
3548 ssa.OpAMD64SUBQconst, ssa.OpAMD64SUBLconst, ssa.OpAMD64SUBWconst, ssa.OpAMD64SUBBconst,
3549 ssa.OpAMD64SHLQconst, ssa.OpAMD64SHLLconst, ssa.OpAMD64SHLWconst, ssa.OpAMD64SHLBconst,
3550 ssa.OpAMD64SHRQconst, ssa.OpAMD64SHRLconst, ssa.OpAMD64SHRWconst, ssa.OpAMD64SHRBconst,
David Chase40aba8c2015-08-05 22:11:14 -04003551 ssa.OpAMD64SARQconst, ssa.OpAMD64SARLconst, ssa.OpAMD64SARWconst, ssa.OpAMD64SARBconst,
3552 ssa.OpAMD64ROLQconst, ssa.OpAMD64ROLLconst, ssa.OpAMD64ROLWconst, ssa.OpAMD64ROLBconst:
Keith Randall20550cb2015-07-28 16:04:50 -07003553 // This code compensates for the fact that the register allocator
3554 // doesn't understand 2-address instructions yet. TODO: fix that.
Keith Randall247786c2015-05-28 10:47:24 -07003555 x := regnum(v.Args[0])
3556 r := regnum(v)
3557 if x != r {
Keith Randall20550cb2015-07-28 16:04:50 -07003558 p := Prog(regMoveAMD64(v.Type.Size()))
Keith Randall247786c2015-05-28 10:47:24 -07003559 p.From.Type = obj.TYPE_REG
3560 p.From.Reg = x
3561 p.To.Type = obj.TYPE_REG
3562 p.To.Reg = r
Keith Randall247786c2015-05-28 10:47:24 -07003563 }
Michael Matloob703ef062015-06-16 11:11:16 -07003564 p := Prog(v.Op.Asm())
Keith Randall247786c2015-05-28 10:47:24 -07003565 p.From.Type = obj.TYPE_CONST
Keith Randall8f22b522015-06-11 21:29:25 -07003566 p.From.Offset = v.AuxInt
Keith Randall247786c2015-05-28 10:47:24 -07003567 p.To.Type = obj.TYPE_REG
Keith Randalldbd83c42015-06-28 06:08:50 -07003568 p.To.Reg = r
Keith Randall4b803152015-07-29 17:07:09 -07003569 case ssa.OpAMD64SBBQcarrymask, ssa.OpAMD64SBBLcarrymask:
Keith Randall6f188472015-06-10 10:39:57 -07003570 r := regnum(v)
Keith Randall20550cb2015-07-28 16:04:50 -07003571 p := Prog(v.Op.Asm())
Keith Randall6f188472015-06-10 10:39:57 -07003572 p.From.Type = obj.TYPE_REG
3573 p.From.Reg = r
3574 p.To.Type = obj.TYPE_REG
3575 p.To.Reg = r
Todd Neald90e0482015-07-23 20:01:40 -05003576 case ssa.OpAMD64LEAQ1, ssa.OpAMD64LEAQ2, ssa.OpAMD64LEAQ4, ssa.OpAMD64LEAQ8:
Keith Randall247786c2015-05-28 10:47:24 -07003577 p := Prog(x86.ALEAQ)
3578 p.From.Type = obj.TYPE_MEM
3579 p.From.Reg = regnum(v.Args[0])
Todd Neald90e0482015-07-23 20:01:40 -05003580 switch v.Op {
3581 case ssa.OpAMD64LEAQ1:
3582 p.From.Scale = 1
3583 case ssa.OpAMD64LEAQ2:
3584 p.From.Scale = 2
3585 case ssa.OpAMD64LEAQ4:
3586 p.From.Scale = 4
3587 case ssa.OpAMD64LEAQ8:
3588 p.From.Scale = 8
3589 }
Keith Randall247786c2015-05-28 10:47:24 -07003590 p.From.Index = regnum(v.Args[1])
Keith Randall8c46aa52015-06-19 21:02:28 -07003591 addAux(&p.From, v)
3592 p.To.Type = obj.TYPE_REG
3593 p.To.Reg = regnum(v)
3594 case ssa.OpAMD64LEAQ:
3595 p := Prog(x86.ALEAQ)
3596 p.From.Type = obj.TYPE_MEM
3597 p.From.Reg = regnum(v.Args[0])
3598 addAux(&p.From, v)
Keith Randall247786c2015-05-28 10:47:24 -07003599 p.To.Type = obj.TYPE_REG
3600 p.To.Reg = regnum(v)
Keith Randall20550cb2015-07-28 16:04:50 -07003601 case ssa.OpAMD64CMPQ, ssa.OpAMD64CMPL, ssa.OpAMD64CMPW, ssa.OpAMD64CMPB,
3602 ssa.OpAMD64TESTQ, ssa.OpAMD64TESTL, ssa.OpAMD64TESTW, ssa.OpAMD64TESTB:
David Chase8e601b22015-08-18 14:39:26 -04003603 opregreg(v.Op.Asm(), regnum(v.Args[1]), regnum(v.Args[0]))
3604 case ssa.OpAMD64UCOMISS, ssa.OpAMD64UCOMISD:
3605 // Go assembler has swapped operands for UCOMISx relative to CMP,
3606 // must account for that right here.
3607 opregreg(v.Op.Asm(), regnum(v.Args[0]), regnum(v.Args[1]))
Keith Randall20550cb2015-07-28 16:04:50 -07003608 case ssa.OpAMD64CMPQconst, ssa.OpAMD64CMPLconst, ssa.OpAMD64CMPWconst, ssa.OpAMD64CMPBconst,
3609 ssa.OpAMD64TESTQconst, ssa.OpAMD64TESTLconst, ssa.OpAMD64TESTWconst, ssa.OpAMD64TESTBconst:
3610 p := Prog(v.Op.Asm())
Keith Randallcfc2aa52015-05-18 16:44:20 -07003611 p.From.Type = obj.TYPE_REG
3612 p.From.Reg = regnum(v.Args[0])
3613 p.To.Type = obj.TYPE_CONST
Keith Randall8f22b522015-06-11 21:29:25 -07003614 p.To.Offset = v.AuxInt
Keith Randall9cb332e2015-07-28 14:19:20 -07003615 case ssa.OpAMD64MOVBconst, ssa.OpAMD64MOVWconst, ssa.OpAMD64MOVLconst, ssa.OpAMD64MOVQconst:
Keith Randall083a6462015-05-12 11:06:44 -07003616 x := regnum(v)
Keith Randall9cb332e2015-07-28 14:19:20 -07003617 p := Prog(v.Op.Asm())
Keith Randall083a6462015-05-12 11:06:44 -07003618 p.From.Type = obj.TYPE_CONST
Keith Randall9cb332e2015-07-28 14:19:20 -07003619 var i int64
3620 switch v.Op {
3621 case ssa.OpAMD64MOVBconst:
3622 i = int64(int8(v.AuxInt))
3623 case ssa.OpAMD64MOVWconst:
3624 i = int64(int16(v.AuxInt))
3625 case ssa.OpAMD64MOVLconst:
3626 i = int64(int32(v.AuxInt))
3627 case ssa.OpAMD64MOVQconst:
3628 i = v.AuxInt
3629 }
3630 p.From.Offset = i
Keith Randall083a6462015-05-12 11:06:44 -07003631 p.To.Type = obj.TYPE_REG
3632 p.To.Reg = x
David Chase997a9f32015-08-12 16:38:11 -04003633 case ssa.OpAMD64MOVSSconst, ssa.OpAMD64MOVSDconst:
3634 x := regnum(v)
3635 p := Prog(v.Op.Asm())
3636 p.From.Type = obj.TYPE_FCONST
Todd Neal19447a62015-09-04 06:33:56 -05003637 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
David Chase997a9f32015-08-12 16:38:11 -04003638 p.To.Type = obj.TYPE_REG
3639 p.To.Reg = x
3640 case ssa.OpAMD64MOVQload, ssa.OpAMD64MOVSSload, ssa.OpAMD64MOVSDload, ssa.OpAMD64MOVLload, ssa.OpAMD64MOVWload, ssa.OpAMD64MOVBload, ssa.OpAMD64MOVBQSXload, ssa.OpAMD64MOVBQZXload:
Michael Matloob703ef062015-06-16 11:11:16 -07003641 p := Prog(v.Op.Asm())
Keith Randallcfc2aa52015-05-18 16:44:20 -07003642 p.From.Type = obj.TYPE_MEM
Keith Randall247786c2015-05-28 10:47:24 -07003643 p.From.Reg = regnum(v.Args[0])
Keith Randall8c46aa52015-06-19 21:02:28 -07003644 addAux(&p.From, v)
Keith Randallcfc2aa52015-05-18 16:44:20 -07003645 p.To.Type = obj.TYPE_REG
3646 p.To.Reg = regnum(v)
David Chase997a9f32015-08-12 16:38:11 -04003647 case ssa.OpAMD64MOVQloadidx8, ssa.OpAMD64MOVSDloadidx8:
3648 p := Prog(v.Op.Asm())
Keith Randallcfc2aa52015-05-18 16:44:20 -07003649 p.From.Type = obj.TYPE_MEM
Keith Randall247786c2015-05-28 10:47:24 -07003650 p.From.Reg = regnum(v.Args[0])
Keith Randall8c46aa52015-06-19 21:02:28 -07003651 addAux(&p.From, v)
Keith Randallcfc2aa52015-05-18 16:44:20 -07003652 p.From.Scale = 8
3653 p.From.Index = regnum(v.Args[1])
3654 p.To.Type = obj.TYPE_REG
3655 p.To.Reg = regnum(v)
David Chase997a9f32015-08-12 16:38:11 -04003656 case ssa.OpAMD64MOVSSloadidx4:
3657 p := Prog(v.Op.Asm())
3658 p.From.Type = obj.TYPE_MEM
3659 p.From.Reg = regnum(v.Args[0])
3660 addAux(&p.From, v)
3661 p.From.Scale = 4
3662 p.From.Index = regnum(v.Args[1])
3663 p.To.Type = obj.TYPE_REG
3664 p.To.Reg = regnum(v)
3665 case ssa.OpAMD64MOVQstore, ssa.OpAMD64MOVSSstore, ssa.OpAMD64MOVSDstore, ssa.OpAMD64MOVLstore, ssa.OpAMD64MOVWstore, ssa.OpAMD64MOVBstore:
Michael Matloob73054f52015-06-14 11:38:46 -07003666 p := Prog(v.Op.Asm())
Keith Randall083a6462015-05-12 11:06:44 -07003667 p.From.Type = obj.TYPE_REG
Keith Randallcfc2aa52015-05-18 16:44:20 -07003668 p.From.Reg = regnum(v.Args[1])
Keith Randall083a6462015-05-12 11:06:44 -07003669 p.To.Type = obj.TYPE_MEM
Keith Randall247786c2015-05-28 10:47:24 -07003670 p.To.Reg = regnum(v.Args[0])
Keith Randall8c46aa52015-06-19 21:02:28 -07003671 addAux(&p.To, v)
David Chase997a9f32015-08-12 16:38:11 -04003672 case ssa.OpAMD64MOVQstoreidx8, ssa.OpAMD64MOVSDstoreidx8:
3673 p := Prog(v.Op.Asm())
Josh Bleecher Snyder3e3d1622015-07-27 16:36:36 -07003674 p.From.Type = obj.TYPE_REG
3675 p.From.Reg = regnum(v.Args[2])
3676 p.To.Type = obj.TYPE_MEM
3677 p.To.Reg = regnum(v.Args[0])
3678 p.To.Scale = 8
3679 p.To.Index = regnum(v.Args[1])
3680 addAux(&p.To, v)
David Chase997a9f32015-08-12 16:38:11 -04003681 case ssa.OpAMD64MOVSSstoreidx4:
3682 p := Prog(v.Op.Asm())
3683 p.From.Type = obj.TYPE_REG
3684 p.From.Reg = regnum(v.Args[2])
3685 p.To.Type = obj.TYPE_MEM
3686 p.To.Reg = regnum(v.Args[0])
3687 p.To.Scale = 4
3688 p.To.Index = regnum(v.Args[1])
3689 addAux(&p.To, v)
David Chase42825882015-08-20 15:14:20 -04003690 case ssa.OpAMD64MOVLQSX, ssa.OpAMD64MOVWQSX, ssa.OpAMD64MOVBQSX, ssa.OpAMD64MOVLQZX, ssa.OpAMD64MOVWQZX, ssa.OpAMD64MOVBQZX,
3691 ssa.OpAMD64CVTSL2SS, ssa.OpAMD64CVTSL2SD, ssa.OpAMD64CVTSQ2SS, ssa.OpAMD64CVTSQ2SD,
Todd Neal634b50c2015-09-01 19:05:44 -05003692 ssa.OpAMD64CVTTSS2SL, ssa.OpAMD64CVTTSD2SL, ssa.OpAMD64CVTTSS2SQ, ssa.OpAMD64CVTTSD2SQ,
David Chase42825882015-08-20 15:14:20 -04003693 ssa.OpAMD64CVTSS2SD, ssa.OpAMD64CVTSD2SS:
3694 opregreg(v.Op.Asm(), regnum(v), regnum(v.Args[0]))
Keith Randall04d6edc2015-09-18 18:23:34 -07003695 case ssa.OpAMD64DUFFZERO:
3696 p := Prog(obj.ADUFFZERO)
3697 p.To.Type = obj.TYPE_ADDR
3698 p.To.Sym = Linksym(Pkglookup("duffzero", Runtimepkg))
3699 p.To.Offset = v.AuxInt
3700
Keith Randallf7f604e2015-05-27 14:52:22 -07003701 case ssa.OpCopy: // TODO: lower to MOVQ earlier?
3702 if v.Type.IsMemory() {
3703 return
3704 }
Keith Randall083a6462015-05-12 11:06:44 -07003705 x := regnum(v.Args[0])
3706 y := regnum(v)
3707 if x != y {
David Chase997a9f32015-08-12 16:38:11 -04003708 opregreg(regMoveByTypeAMD64(v.Type), y, x)
Keith Randall083a6462015-05-12 11:06:44 -07003709 }
Josh Bleecher Snyder0bb2a502015-07-24 14:51:51 -07003710 case ssa.OpLoadReg:
Josh Bleecher Snyder26f135d2015-07-20 15:22:34 -07003711 if v.Type.IsFlags() {
3712 v.Unimplementedf("load flags not implemented: %v", v.LongString())
3713 return
3714 }
David Chase997a9f32015-08-12 16:38:11 -04003715 p := Prog(movSizeByType(v.Type))
Keith Randalld2107fc2015-08-24 02:16:19 -07003716 n := autoVar(v.Args[0])
Keith Randall083a6462015-05-12 11:06:44 -07003717 p.From.Type = obj.TYPE_MEM
Keith Randalld2107fc2015-08-24 02:16:19 -07003718 p.From.Name = obj.NAME_AUTO
3719 p.From.Node = n
3720 p.From.Sym = Linksym(n.Sym)
Keith Randall083a6462015-05-12 11:06:44 -07003721 p.To.Type = obj.TYPE_REG
3722 p.To.Reg = regnum(v)
David Chase997a9f32015-08-12 16:38:11 -04003723
Josh Bleecher Snyder0bb2a502015-07-24 14:51:51 -07003724 case ssa.OpStoreReg:
Josh Bleecher Snyder26f135d2015-07-20 15:22:34 -07003725 if v.Type.IsFlags() {
3726 v.Unimplementedf("store flags not implemented: %v", v.LongString())
3727 return
3728 }
David Chase997a9f32015-08-12 16:38:11 -04003729 p := Prog(movSizeByType(v.Type))
Keith Randall083a6462015-05-12 11:06:44 -07003730 p.From.Type = obj.TYPE_REG
3731 p.From.Reg = regnum(v.Args[0])
Keith Randalld2107fc2015-08-24 02:16:19 -07003732 n := autoVar(v)
Keith Randall083a6462015-05-12 11:06:44 -07003733 p.To.Type = obj.TYPE_MEM
Keith Randalld2107fc2015-08-24 02:16:19 -07003734 p.To.Name = obj.NAME_AUTO
3735 p.To.Node = n
3736 p.To.Sym = Linksym(n.Sym)
Keith Randall083a6462015-05-12 11:06:44 -07003737 case ssa.OpPhi:
Keith Randall0b46b422015-08-11 12:51:33 -07003738 // just check to make sure regalloc and stackalloc did it right
3739 if v.Type.IsMemory() {
3740 return
3741 }
Keith Randall083a6462015-05-12 11:06:44 -07003742 f := v.Block.Func
3743 loc := f.RegAlloc[v.ID]
3744 for _, a := range v.Args {
Josh Bleecher Snyder55845232015-08-05 16:43:49 -07003745 if aloc := f.RegAlloc[a.ID]; aloc != loc { // TODO: .Equal() instead?
3746 v.Fatalf("phi arg at different location than phi: %v @ %v, but arg %v @ %v\n%s\n", v, loc, a, aloc, v.Block.Func)
Keith Randall083a6462015-05-12 11:06:44 -07003747 }
3748 }
David Chase997a9f32015-08-12 16:38:11 -04003749 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64, ssa.OpConstString, ssa.OpConstNil, ssa.OpConstBool,
3750 ssa.OpConst32F, ssa.OpConst64F:
Keith Randall083a6462015-05-12 11:06:44 -07003751 if v.Block.Func.RegAlloc[v.ID] != nil {
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07003752 v.Fatalf("const value %v shouldn't have a location", v)
Keith Randall083a6462015-05-12 11:06:44 -07003753 }
David Chase997a9f32015-08-12 16:38:11 -04003754
Keith Randall083a6462015-05-12 11:06:44 -07003755 case ssa.OpArg:
3756 // memory arg needs no code
Keith Randall8f22b522015-06-11 21:29:25 -07003757 // TODO: check that only mem arg goes here.
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07003758 case ssa.OpAMD64LoweredPanicNilCheck:
3759 if Debug_checknil != 0 && v.Line > 1 { // v.Line==1 in generated wrappers
3760 Warnl(int(v.Line), "generated nil check")
3761 }
3762 // Write to memory address 0. It doesn't matter what we write; use AX.
Keith Randall8d236812015-08-18 15:25:40 -07003763 // Input 0 is the pointer we just checked, use it as the destination.
3764 r := regnum(v.Args[0])
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07003765 q := Prog(x86.AMOVL)
3766 q.From.Type = obj.TYPE_REG
3767 q.From.Reg = x86.REG_AX
3768 q.To.Type = obj.TYPE_MEM
Keith Randall8d236812015-08-18 15:25:40 -07003769 q.To.Reg = r
David Chase956f3192015-09-11 16:40:05 -04003770 case ssa.OpAMD64LoweredGetClosurePtr:
3771 // Output is hardwired to DX only,
3772 // and DX contains the closure pointer on
3773 // closure entry, and this "instruction"
3774 // is scheduled to the very beginning
3775 // of the entry block.
Josh Bleecher Snyder3d23afb2015-08-12 11:22:16 -07003776 case ssa.OpAMD64LoweredGetG:
3777 r := regnum(v)
3778 // See the comments in cmd/internal/obj/x86/obj6.go
3779 // near CanUse1InsnTLS for a detailed explanation of these instructions.
3780 if x86.CanUse1InsnTLS(Ctxt) {
3781 // MOVQ (TLS), r
3782 p := Prog(x86.AMOVQ)
3783 p.From.Type = obj.TYPE_MEM
3784 p.From.Reg = x86.REG_TLS
3785 p.To.Type = obj.TYPE_REG
3786 p.To.Reg = r
3787 } else {
3788 // MOVQ TLS, r
3789 // MOVQ (r)(TLS*1), r
3790 p := Prog(x86.AMOVQ)
3791 p.From.Type = obj.TYPE_REG
3792 p.From.Reg = x86.REG_TLS
3793 p.To.Type = obj.TYPE_REG
3794 p.To.Reg = r
3795 q := Prog(x86.AMOVQ)
3796 q.From.Type = obj.TYPE_MEM
3797 q.From.Reg = r
3798 q.From.Index = x86.REG_TLS
3799 q.From.Scale = 1
3800 q.To.Type = obj.TYPE_REG
3801 q.To.Reg = r
3802 }
Keith Randall290d8fc2015-06-10 15:03:06 -07003803 case ssa.OpAMD64CALLstatic:
Keith Randall247786c2015-05-28 10:47:24 -07003804 p := Prog(obj.ACALL)
3805 p.To.Type = obj.TYPE_MEM
3806 p.To.Name = obj.NAME_EXTERN
3807 p.To.Sym = Linksym(v.Aux.(*Sym))
Keith Randalld2107fc2015-08-24 02:16:19 -07003808 if Maxarg < v.AuxInt {
3809 Maxarg = v.AuxInt
3810 }
Keith Randall290d8fc2015-06-10 15:03:06 -07003811 case ssa.OpAMD64CALLclosure:
3812 p := Prog(obj.ACALL)
3813 p.To.Type = obj.TYPE_REG
3814 p.To.Reg = regnum(v.Args[0])
Keith Randalld2107fc2015-08-24 02:16:19 -07003815 if Maxarg < v.AuxInt {
3816 Maxarg = v.AuxInt
3817 }
Keith Randall9569b952015-08-28 22:51:01 -07003818 case ssa.OpAMD64CALLdefer:
3819 p := Prog(obj.ACALL)
3820 p.To.Type = obj.TYPE_MEM
3821 p.To.Name = obj.NAME_EXTERN
3822 p.To.Sym = Linksym(Deferproc.Sym)
3823 if Maxarg < v.AuxInt {
3824 Maxarg = v.AuxInt
3825 }
3826 // defer returns in rax:
3827 // 0 if we should continue executing
3828 // 1 if we should jump to deferreturn call
3829 p = Prog(x86.ATESTL)
3830 p.From.Type = obj.TYPE_REG
3831 p.From.Reg = x86.REG_AX
3832 p.To.Type = obj.TYPE_REG
3833 p.To.Reg = x86.REG_AX
3834 p = Prog(x86.AJNE)
3835 p.To.Type = obj.TYPE_BRANCH
3836 s.deferBranches = append(s.deferBranches, p)
3837 case ssa.OpAMD64CALLgo:
3838 p := Prog(obj.ACALL)
3839 p.To.Type = obj.TYPE_MEM
3840 p.To.Name = obj.NAME_EXTERN
3841 p.To.Sym = Linksym(Newproc.Sym)
3842 if Maxarg < v.AuxInt {
3843 Maxarg = v.AuxInt
3844 }
Keith Randalld24768e2015-09-09 23:56:59 -07003845 case ssa.OpAMD64CALLinter:
3846 p := Prog(obj.ACALL)
3847 p.To.Type = obj.TYPE_REG
3848 p.To.Reg = regnum(v.Args[0])
3849 if Maxarg < v.AuxInt {
3850 Maxarg = v.AuxInt
3851 }
Keith Randall4b803152015-07-29 17:07:09 -07003852 case ssa.OpAMD64NEGQ, ssa.OpAMD64NEGL, ssa.OpAMD64NEGW, ssa.OpAMD64NEGB,
3853 ssa.OpAMD64NOTQ, ssa.OpAMD64NOTL, ssa.OpAMD64NOTW, ssa.OpAMD64NOTB:
Josh Bleecher Snyder93c354b62015-07-30 17:15:16 -07003854 x := regnum(v.Args[0])
3855 r := regnum(v)
3856 if x != r {
3857 p := Prog(regMoveAMD64(v.Type.Size()))
3858 p.From.Type = obj.TYPE_REG
3859 p.From.Reg = x
3860 p.To.Type = obj.TYPE_REG
3861 p.To.Reg = r
3862 }
Alexandru Moșoi954d5ad2015-07-21 16:58:18 +02003863 p := Prog(v.Op.Asm())
3864 p.To.Type = obj.TYPE_REG
Josh Bleecher Snyder93c354b62015-07-30 17:15:16 -07003865 p.To.Reg = r
Keith Randalla329e212015-09-12 13:26:57 -07003866 case ssa.OpAMD64SQRTSD:
3867 p := Prog(v.Op.Asm())
3868 p.From.Type = obj.TYPE_REG
3869 p.From.Reg = regnum(v.Args[0])
3870 p.To.Type = obj.TYPE_REG
3871 p.To.Reg = regnum(v)
Keith Randall8c46aa52015-06-19 21:02:28 -07003872 case ssa.OpSP, ssa.OpSB:
Keith Randallcfc2aa52015-05-18 16:44:20 -07003873 // nothing to do
Josh Bleecher Snydera7940742015-07-20 15:21:49 -07003874 case ssa.OpAMD64SETEQ, ssa.OpAMD64SETNE,
3875 ssa.OpAMD64SETL, ssa.OpAMD64SETLE,
3876 ssa.OpAMD64SETG, ssa.OpAMD64SETGE,
David Chase8e601b22015-08-18 14:39:26 -04003877 ssa.OpAMD64SETGF, ssa.OpAMD64SETGEF,
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003878 ssa.OpAMD64SETB, ssa.OpAMD64SETBE,
David Chase8e601b22015-08-18 14:39:26 -04003879 ssa.OpAMD64SETORD, ssa.OpAMD64SETNAN,
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003880 ssa.OpAMD64SETA, ssa.OpAMD64SETAE:
Josh Bleecher Snydera7940742015-07-20 15:21:49 -07003881 p := Prog(v.Op.Asm())
3882 p.To.Type = obj.TYPE_REG
3883 p.To.Reg = regnum(v)
David Chase8e601b22015-08-18 14:39:26 -04003884
3885 case ssa.OpAMD64SETNEF:
3886 p := Prog(v.Op.Asm())
3887 p.To.Type = obj.TYPE_REG
3888 p.To.Reg = regnum(v)
3889 q := Prog(x86.ASETPS)
3890 q.To.Type = obj.TYPE_REG
3891 q.To.Reg = x86.REG_AX
3892 // TODO AORQ copied from old code generator, why not AORB?
3893 opregreg(x86.AORQ, regnum(v), x86.REG_AX)
3894
3895 case ssa.OpAMD64SETEQF:
3896 p := Prog(v.Op.Asm())
3897 p.To.Type = obj.TYPE_REG
3898 p.To.Reg = regnum(v)
3899 q := Prog(x86.ASETPC)
3900 q.To.Type = obj.TYPE_REG
3901 q.To.Reg = x86.REG_AX
3902 // TODO AANDQ copied from old code generator, why not AANDB?
3903 opregreg(x86.AANDQ, regnum(v), x86.REG_AX)
3904
Keith Randall20550cb2015-07-28 16:04:50 -07003905 case ssa.OpAMD64InvertFlags:
3906 v.Fatalf("InvertFlags should never make it to codegen %v", v)
3907 case ssa.OpAMD64REPSTOSQ:
3908 Prog(x86.AREP)
3909 Prog(x86.ASTOSQ)
Keith Randall20550cb2015-07-28 16:04:50 -07003910 case ssa.OpAMD64REPMOVSB:
3911 Prog(x86.AREP)
3912 Prog(x86.AMOVSB)
Keith Randalld2107fc2015-08-24 02:16:19 -07003913 case ssa.OpVarDef:
3914 Gvardef(v.Aux.(*Node))
3915 case ssa.OpVarKill:
3916 gvarkill(v.Aux.(*Node))
Keith Randall083a6462015-05-12 11:06:44 -07003917 default:
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07003918 v.Unimplementedf("genValue not implemented: %s", v.LongString())
Keith Randall083a6462015-05-12 11:06:44 -07003919 }
3920}
3921
David Chase997a9f32015-08-12 16:38:11 -04003922// movSizeByType returns the MOV instruction of the given type.
3923func movSizeByType(t ssa.Type) (asm int) {
3924 // For x86, there's no difference between reg move opcodes
3925 // and memory move opcodes.
3926 asm = regMoveByTypeAMD64(t)
3927 return
Josh Bleecher Snyder0bb2a502015-07-24 14:51:51 -07003928}
3929
Daniel Morsing66b47812015-06-27 15:45:20 +01003930// movZero generates a register indirect move with a 0 immediate and keeps track of bytes left and next offset
3931func movZero(as int, width int64, nbytes int64, offset int64, regnum int16) (nleft int64, noff int64) {
3932 p := Prog(as)
3933 // TODO: use zero register on archs that support it.
3934 p.From.Type = obj.TYPE_CONST
3935 p.From.Offset = 0
3936 p.To.Type = obj.TYPE_MEM
3937 p.To.Reg = regnum
3938 p.To.Offset = offset
3939 offset += width
3940 nleft = nbytes - width
3941 return nleft, offset
3942}
3943
David Chase8e601b22015-08-18 14:39:26 -04003944var blockJump = [...]struct {
3945 asm, invasm int
3946}{
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003947 ssa.BlockAMD64EQ: {x86.AJEQ, x86.AJNE},
3948 ssa.BlockAMD64NE: {x86.AJNE, x86.AJEQ},
3949 ssa.BlockAMD64LT: {x86.AJLT, x86.AJGE},
3950 ssa.BlockAMD64GE: {x86.AJGE, x86.AJLT},
3951 ssa.BlockAMD64LE: {x86.AJLE, x86.AJGT},
3952 ssa.BlockAMD64GT: {x86.AJGT, x86.AJLE},
3953 ssa.BlockAMD64ULT: {x86.AJCS, x86.AJCC},
3954 ssa.BlockAMD64UGE: {x86.AJCC, x86.AJCS},
3955 ssa.BlockAMD64UGT: {x86.AJHI, x86.AJLS},
3956 ssa.BlockAMD64ULE: {x86.AJLS, x86.AJHI},
David Chase8e601b22015-08-18 14:39:26 -04003957 ssa.BlockAMD64ORD: {x86.AJPC, x86.AJPS},
3958 ssa.BlockAMD64NAN: {x86.AJPS, x86.AJPC},
3959}
3960
3961type floatingEQNEJump struct {
3962 jump, index int
3963}
3964
3965var eqfJumps = [2][2]floatingEQNEJump{
3966 {{x86.AJNE, 1}, {x86.AJPS, 1}}, // next == b.Succs[0]
3967 {{x86.AJNE, 1}, {x86.AJPC, 0}}, // next == b.Succs[1]
3968}
3969var nefJumps = [2][2]floatingEQNEJump{
3970 {{x86.AJNE, 0}, {x86.AJPC, 1}}, // next == b.Succs[0]
3971 {{x86.AJNE, 0}, {x86.AJPS, 0}}, // next == b.Succs[1]
3972}
3973
3974func oneFPJump(b *ssa.Block, jumps *floatingEQNEJump, likely ssa.BranchPrediction, branches []branch) []branch {
3975 p := Prog(jumps.jump)
3976 p.To.Type = obj.TYPE_BRANCH
3977 to := jumps.index
3978 branches = append(branches, branch{p, b.Succs[to]})
3979 if to == 1 {
3980 likely = -likely
3981 }
3982 // liblink reorders the instruction stream as it sees fit.
3983 // Pass along what we know so liblink can make use of it.
3984 // TODO: Once we've fully switched to SSA,
3985 // make liblink leave our output alone.
3986 switch likely {
3987 case ssa.BranchUnlikely:
3988 p.From.Type = obj.TYPE_CONST
3989 p.From.Offset = 0
3990 case ssa.BranchLikely:
3991 p.From.Type = obj.TYPE_CONST
3992 p.From.Offset = 1
3993 }
3994 return branches
3995}
3996
Keith Randall9569b952015-08-28 22:51:01 -07003997func genFPJump(s *genState, b, next *ssa.Block, jumps *[2][2]floatingEQNEJump) {
David Chase8e601b22015-08-18 14:39:26 -04003998 likely := b.Likely
3999 switch next {
4000 case b.Succs[0]:
Keith Randall9569b952015-08-28 22:51:01 -07004001 s.branches = oneFPJump(b, &jumps[0][0], likely, s.branches)
4002 s.branches = oneFPJump(b, &jumps[0][1], likely, s.branches)
David Chase8e601b22015-08-18 14:39:26 -04004003 case b.Succs[1]:
Keith Randall9569b952015-08-28 22:51:01 -07004004 s.branches = oneFPJump(b, &jumps[1][0], likely, s.branches)
4005 s.branches = oneFPJump(b, &jumps[1][1], likely, s.branches)
David Chase8e601b22015-08-18 14:39:26 -04004006 default:
Keith Randall9569b952015-08-28 22:51:01 -07004007 s.branches = oneFPJump(b, &jumps[1][0], likely, s.branches)
4008 s.branches = oneFPJump(b, &jumps[1][1], likely, s.branches)
David Chase8e601b22015-08-18 14:39:26 -04004009 q := Prog(obj.AJMP)
4010 q.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07004011 s.branches = append(s.branches, branch{q, b.Succs[1]})
David Chase8e601b22015-08-18 14:39:26 -04004012 }
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07004013}
4014
Keith Randall9569b952015-08-28 22:51:01 -07004015func (s *genState) genBlock(b, next *ssa.Block) {
Michael Matloob81ccf502015-05-30 01:03:06 -04004016 lineno = b.Line
Keith Randall8d236812015-08-18 15:25:40 -07004017
Keith Randall083a6462015-05-12 11:06:44 -07004018 switch b.Kind {
Keith Randallf5c53e02015-09-09 18:03:41 -07004019 case ssa.BlockPlain, ssa.BlockCall:
Keith Randall083a6462015-05-12 11:06:44 -07004020 if b.Succs[0] != next {
4021 p := Prog(obj.AJMP)
4022 p.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07004023 s.branches = append(s.branches, branch{p, b.Succs[0]})
Keith Randall083a6462015-05-12 11:06:44 -07004024 }
4025 case ssa.BlockExit:
Keith Randall5f105732015-09-17 15:19:23 -07004026 Prog(obj.AUNDEF) // tell plive.go that we never reach here
Keith Randall10f38f52015-09-03 09:09:59 -07004027 case ssa.BlockRet:
Keith Randall0ec72b62015-09-08 15:42:53 -07004028 if hasdefer {
Keith Randallca9e4502015-09-08 08:59:57 -07004029 s.deferReturn()
Keith Randall9569b952015-08-28 22:51:01 -07004030 }
Keith Randall083a6462015-05-12 11:06:44 -07004031 Prog(obj.ARET)
Keith Randall8a1f6212015-09-08 21:28:44 -07004032 case ssa.BlockRetJmp:
4033 p := Prog(obj.AJMP)
4034 p.To.Type = obj.TYPE_MEM
4035 p.To.Name = obj.NAME_EXTERN
4036 p.To.Sym = Linksym(b.Aux.(*Sym))
David Chase8e601b22015-08-18 14:39:26 -04004037
4038 case ssa.BlockAMD64EQF:
Keith Randall9569b952015-08-28 22:51:01 -07004039 genFPJump(s, b, next, &eqfJumps)
David Chase8e601b22015-08-18 14:39:26 -04004040
4041 case ssa.BlockAMD64NEF:
Keith Randall9569b952015-08-28 22:51:01 -07004042 genFPJump(s, b, next, &nefJumps)
David Chase8e601b22015-08-18 14:39:26 -04004043
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07004044 case ssa.BlockAMD64EQ, ssa.BlockAMD64NE,
4045 ssa.BlockAMD64LT, ssa.BlockAMD64GE,
4046 ssa.BlockAMD64LE, ssa.BlockAMD64GT,
4047 ssa.BlockAMD64ULT, ssa.BlockAMD64UGT,
4048 ssa.BlockAMD64ULE, ssa.BlockAMD64UGE:
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07004049 jmp := blockJump[b.Kind]
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07004050 likely := b.Likely
4051 var p *obj.Prog
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07004052 switch next {
4053 case b.Succs[0]:
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07004054 p = Prog(jmp.invasm)
4055 likely *= -1
Keith Randallcfc2aa52015-05-18 16:44:20 -07004056 p.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07004057 s.branches = append(s.branches, branch{p, b.Succs[1]})
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07004058 case b.Succs[1]:
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07004059 p = Prog(jmp.asm)
Keith Randallcfc2aa52015-05-18 16:44:20 -07004060 p.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07004061 s.branches = append(s.branches, branch{p, b.Succs[0]})
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07004062 default:
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07004063 p = Prog(jmp.asm)
Keith Randallcfc2aa52015-05-18 16:44:20 -07004064 p.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07004065 s.branches = append(s.branches, branch{p, b.Succs[0]})
Keith Randallcfc2aa52015-05-18 16:44:20 -07004066 q := Prog(obj.AJMP)
4067 q.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07004068 s.branches = append(s.branches, branch{q, b.Succs[1]})
Keith Randallcfc2aa52015-05-18 16:44:20 -07004069 }
4070
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07004071 // liblink reorders the instruction stream as it sees fit.
4072 // Pass along what we know so liblink can make use of it.
4073 // TODO: Once we've fully switched to SSA,
4074 // make liblink leave our output alone.
4075 switch likely {
4076 case ssa.BranchUnlikely:
4077 p.From.Type = obj.TYPE_CONST
4078 p.From.Offset = 0
4079 case ssa.BranchLikely:
4080 p.From.Type = obj.TYPE_CONST
4081 p.From.Offset = 1
4082 }
4083
Keith Randall083a6462015-05-12 11:06:44 -07004084 default:
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07004085 b.Unimplementedf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString())
Keith Randall083a6462015-05-12 11:06:44 -07004086 }
Keith Randall083a6462015-05-12 11:06:44 -07004087}
4088
Keith Randallca9e4502015-09-08 08:59:57 -07004089func (s *genState) deferReturn() {
4090 // Deferred calls will appear to be returning to
4091 // the CALL deferreturn(SB) that we are about to emit.
4092 // However, the stack trace code will show the line
4093 // of the instruction byte before the return PC.
4094 // To avoid that being an unrelated instruction,
4095 // insert an actual hardware NOP that will have the right line number.
4096 // This is different from obj.ANOP, which is a virtual no-op
4097 // that doesn't make it into the instruction stream.
4098 s.deferTarget = Pc
4099 Thearch.Ginsnop()
4100 p := Prog(obj.ACALL)
4101 p.To.Type = obj.TYPE_MEM
4102 p.To.Name = obj.NAME_EXTERN
4103 p.To.Sym = Linksym(Deferreturn.Sym)
4104}
4105
Keith Randall8c46aa52015-06-19 21:02:28 -07004106// addAux adds the offset in the aux fields (AuxInt and Aux) of v to a.
4107func addAux(a *obj.Addr, v *ssa.Value) {
4108 if a.Type != obj.TYPE_MEM {
4109 v.Fatalf("bad addAux addr %s", a)
4110 }
4111 // add integer offset
4112 a.Offset += v.AuxInt
4113
4114 // If no additional symbol offset, we're done.
4115 if v.Aux == nil {
4116 return
4117 }
4118 // Add symbol's offset from its base register.
4119 switch sym := v.Aux.(type) {
4120 case *ssa.ExternSymbol:
4121 a.Name = obj.NAME_EXTERN
4122 a.Sym = Linksym(sym.Sym.(*Sym))
4123 case *ssa.ArgSymbol:
Keith Randalld2107fc2015-08-24 02:16:19 -07004124 n := sym.Node.(*Node)
4125 a.Name = obj.NAME_PARAM
4126 a.Node = n
4127 a.Sym = Linksym(n.Orig.Sym)
4128 a.Offset += n.Xoffset // TODO: why do I have to add this here? I don't for auto variables.
Keith Randall8c46aa52015-06-19 21:02:28 -07004129 case *ssa.AutoSymbol:
Keith Randalld2107fc2015-08-24 02:16:19 -07004130 n := sym.Node.(*Node)
4131 a.Name = obj.NAME_AUTO
4132 a.Node = n
4133 a.Sym = Linksym(n.Sym)
Keith Randall8c46aa52015-06-19 21:02:28 -07004134 default:
4135 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
4136 }
4137}
4138
Keith Randall2a5e6c42015-07-23 14:35:02 -07004139// extendIndex extends v to a full pointer width.
4140func (s *state) extendIndex(v *ssa.Value) *ssa.Value {
4141 size := v.Type.Size()
4142 if size == s.config.PtrSize {
4143 return v
4144 }
4145 if size > s.config.PtrSize {
4146 // TODO: truncate 64-bit indexes on 32-bit pointer archs. We'd need to test
4147 // the high word and branch to out-of-bounds failure if it is not 0.
4148 s.Unimplementedf("64->32 index truncation not implemented")
4149 return v
4150 }
4151
4152 // Extend value to the required size
4153 var op ssa.Op
4154 if v.Type.IsSigned() {
4155 switch 10*size + s.config.PtrSize {
4156 case 14:
4157 op = ssa.OpSignExt8to32
4158 case 18:
4159 op = ssa.OpSignExt8to64
4160 case 24:
4161 op = ssa.OpSignExt16to32
4162 case 28:
4163 op = ssa.OpSignExt16to64
4164 case 48:
4165 op = ssa.OpSignExt32to64
4166 default:
4167 s.Fatalf("bad signed index extension %s", v.Type)
4168 }
4169 } else {
4170 switch 10*size + s.config.PtrSize {
4171 case 14:
4172 op = ssa.OpZeroExt8to32
4173 case 18:
4174 op = ssa.OpZeroExt8to64
4175 case 24:
4176 op = ssa.OpZeroExt16to32
4177 case 28:
4178 op = ssa.OpZeroExt16to64
4179 case 48:
4180 op = ssa.OpZeroExt32to64
4181 default:
4182 s.Fatalf("bad unsigned index extension %s", v.Type)
4183 }
4184 }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07004185 return s.newValue1(op, Types[TUINTPTR], v)
Keith Randall2a5e6c42015-07-23 14:35:02 -07004186}
4187
Keith Randall083a6462015-05-12 11:06:44 -07004188// ssaRegToReg maps ssa register numbers to obj register numbers.
4189var ssaRegToReg = [...]int16{
4190 x86.REG_AX,
4191 x86.REG_CX,
4192 x86.REG_DX,
4193 x86.REG_BX,
4194 x86.REG_SP,
4195 x86.REG_BP,
4196 x86.REG_SI,
4197 x86.REG_DI,
4198 x86.REG_R8,
4199 x86.REG_R9,
4200 x86.REG_R10,
4201 x86.REG_R11,
4202 x86.REG_R12,
4203 x86.REG_R13,
4204 x86.REG_R14,
4205 x86.REG_R15,
Keith Randall8c46aa52015-06-19 21:02:28 -07004206 x86.REG_X0,
4207 x86.REG_X1,
4208 x86.REG_X2,
4209 x86.REG_X3,
4210 x86.REG_X4,
4211 x86.REG_X5,
4212 x86.REG_X6,
4213 x86.REG_X7,
4214 x86.REG_X8,
4215 x86.REG_X9,
4216 x86.REG_X10,
4217 x86.REG_X11,
4218 x86.REG_X12,
4219 x86.REG_X13,
4220 x86.REG_X14,
4221 x86.REG_X15,
4222 0, // SB isn't a real register. We fill an Addr.Reg field with 0 in this case.
Keith Randall083a6462015-05-12 11:06:44 -07004223 // TODO: arch-dependent
4224}
4225
Keith Randall9cb332e2015-07-28 14:19:20 -07004226// regMoveAMD64 returns the register->register move opcode for the given width.
4227// TODO: generalize for all architectures?
4228func regMoveAMD64(width int64) int {
4229 switch width {
4230 case 1:
4231 return x86.AMOVB
4232 case 2:
4233 return x86.AMOVW
4234 case 4:
4235 return x86.AMOVL
4236 case 8:
4237 return x86.AMOVQ
4238 default:
David Chase997a9f32015-08-12 16:38:11 -04004239 panic("bad int register width")
Keith Randall9cb332e2015-07-28 14:19:20 -07004240 }
4241}
4242
David Chase997a9f32015-08-12 16:38:11 -04004243func regMoveByTypeAMD64(t ssa.Type) int {
4244 width := t.Size()
4245 if t.IsFloat() {
4246 switch width {
4247 case 4:
4248 return x86.AMOVSS
4249 case 8:
4250 return x86.AMOVSD
4251 default:
4252 panic("bad float register width")
4253 }
4254 } else {
4255 switch width {
4256 case 1:
4257 return x86.AMOVB
4258 case 2:
4259 return x86.AMOVW
4260 case 4:
4261 return x86.AMOVL
4262 case 8:
4263 return x86.AMOVQ
4264 default:
4265 panic("bad int register width")
4266 }
4267 }
4268
4269 panic("bad register type")
4270}
4271
Keith Randall083a6462015-05-12 11:06:44 -07004272// regnum returns the register (in cmd/internal/obj numbering) to
4273// which v has been allocated. Panics if v is not assigned to a
4274// register.
Josh Bleecher Snydere1395492015-08-05 16:06:39 -07004275// TODO: Make this panic again once it stops happening routinely.
Keith Randall083a6462015-05-12 11:06:44 -07004276func regnum(v *ssa.Value) int16 {
Josh Bleecher Snydere1395492015-08-05 16:06:39 -07004277 reg := v.Block.Func.RegAlloc[v.ID]
4278 if reg == nil {
4279 v.Unimplementedf("nil regnum for value: %s\n%s\n", v.LongString(), v.Block.Func)
4280 return 0
4281 }
4282 return ssaRegToReg[reg.(*ssa.Register).Num]
Keith Randall083a6462015-05-12 11:06:44 -07004283}
4284
Keith Randalld2107fc2015-08-24 02:16:19 -07004285// autoVar returns a *Node representing the auto variable assigned to v.
4286func autoVar(v *ssa.Value) *Node {
4287 return v.Block.Func.RegAlloc[v.ID].(*ssa.LocalSlot).N.(*Node)
Keith Randall083a6462015-05-12 11:06:44 -07004288}
Keith Randallf7f604e2015-05-27 14:52:22 -07004289
4290// ssaExport exports a bunch of compiler services for the ssa backend.
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07004291type ssaExport struct {
4292 log bool
4293 unimplemented bool
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07004294 mustImplement bool
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07004295}
Keith Randallf7f604e2015-05-27 14:52:22 -07004296
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07004297func (s *ssaExport) TypeBool() ssa.Type { return Types[TBOOL] }
4298func (s *ssaExport) TypeInt8() ssa.Type { return Types[TINT8] }
4299func (s *ssaExport) TypeInt16() ssa.Type { return Types[TINT16] }
4300func (s *ssaExport) TypeInt32() ssa.Type { return Types[TINT32] }
4301func (s *ssaExport) TypeInt64() ssa.Type { return Types[TINT64] }
4302func (s *ssaExport) TypeUInt8() ssa.Type { return Types[TUINT8] }
4303func (s *ssaExport) TypeUInt16() ssa.Type { return Types[TUINT16] }
4304func (s *ssaExport) TypeUInt32() ssa.Type { return Types[TUINT32] }
4305func (s *ssaExport) TypeUInt64() ssa.Type { return Types[TUINT64] }
David Chase52578582015-08-28 14:24:10 -04004306func (s *ssaExport) TypeFloat32() ssa.Type { return Types[TFLOAT32] }
4307func (s *ssaExport) TypeFloat64() ssa.Type { return Types[TFLOAT64] }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07004308func (s *ssaExport) TypeInt() ssa.Type { return Types[TINT] }
4309func (s *ssaExport) TypeUintptr() ssa.Type { return Types[TUINTPTR] }
4310func (s *ssaExport) TypeString() ssa.Type { return Types[TSTRING] }
4311func (s *ssaExport) TypeBytePtr() ssa.Type { return Ptrto(Types[TUINT8]) }
4312
Josh Bleecher Snyder8d31df18a2015-07-24 11:28:12 -07004313// StringData returns a symbol (a *Sym wrapped in an interface) which
4314// is the data component of a global string constant containing s.
4315func (*ssaExport) StringData(s string) interface{} {
Keith Randall8c46aa52015-06-19 21:02:28 -07004316 // TODO: is idealstring correct? It might not matter...
Josh Bleecher Snyder8d31df18a2015-07-24 11:28:12 -07004317 _, data := stringsym(s)
4318 return &ssa.ExternSymbol{Typ: idealstring, Sym: data}
Keith Randallf7f604e2015-05-27 14:52:22 -07004319}
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07004320
Keith Randalld2107fc2015-08-24 02:16:19 -07004321func (e *ssaExport) Auto(t ssa.Type) fmt.Stringer {
4322 n := temp(t.(*Type)) // Note: adds new auto to Curfn.Func.Dcl list
4323 e.mustImplement = true // This modifies the input to SSA, so we want to make sure we succeed from here!
4324 return n
4325}
4326
Keith Randall37590bd2015-09-18 22:58:10 -07004327func (e ssaExport) CanSSA(t ssa.Type) bool {
4328 return canSSAType(t.(*Type))
4329}
4330
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07004331// Log logs a message from the compiler.
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07004332func (e *ssaExport) Logf(msg string, args ...interface{}) {
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07004333 // If e was marked as unimplemented, anything could happen. Ignore.
4334 if e.log && !e.unimplemented {
4335 fmt.Printf(msg, args...)
4336 }
4337}
4338
4339// Fatal reports a compiler error and exits.
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07004340func (e *ssaExport) Fatalf(msg string, args ...interface{}) {
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07004341 // If e was marked as unimplemented, anything could happen. Ignore.
4342 if !e.unimplemented {
Keith Randall0ec72b62015-09-08 15:42:53 -07004343 Fatalf(msg, args...)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07004344 }
4345}
4346
4347// Unimplemented reports that the function cannot be compiled.
4348// It will be removed once SSA work is complete.
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07004349func (e *ssaExport) Unimplementedf(msg string, args ...interface{}) {
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07004350 if e.mustImplement {
Keith Randall0ec72b62015-09-08 15:42:53 -07004351 Fatalf(msg, args...)
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07004352 }
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07004353 const alwaysLog = false // enable to calculate top unimplemented features
4354 if !e.unimplemented && (e.log || alwaysLog) {
4355 // first implementation failure, print explanation
4356 fmt.Printf("SSA unimplemented: "+msg+"\n", args...)
4357 }
4358 e.unimplemented = true
4359}