blob: 70350e0e2cf63f5cf80c14f9dfbebd1f1cdc6815 [file] [log] [blame]
Keith Randalld2fd43a2015-04-15 15:51:25 -07001// Copyright 2015 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package gc
6
7import (
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -07008 "bytes"
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07009 "fmt"
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -070010 "html"
Todd Neal19447a62015-09-04 06:33:56 -050011 "math"
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -070012 "os"
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -060013 "strings"
Keith Randalld2fd43a2015-04-15 15:51:25 -070014
Keith Randall067e8df2015-05-28 13:49:20 -070015 "cmd/compile/internal/ssa"
Keith Randall083a6462015-05-12 11:06:44 -070016 "cmd/internal/obj"
Keith Randall8c46aa52015-06-19 21:02:28 -070017 "cmd/internal/obj/x86"
Keith Randalld2fd43a2015-04-15 15:51:25 -070018)
19
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070020// buildssa builds an SSA function
21// and reports whether it should be used.
22// Once the SSA implementation is complete,
23// it will never return nil, and the bool can be removed.
24func buildssa(fn *Node) (ssafn *ssa.Func, usessa bool) {
25 name := fn.Func.Nname.Sym.Name
Josh Bleecher Snyder9495e452015-08-04 11:13:56 -070026 usessa = strings.HasSuffix(name, "_ssa") || name == os.Getenv("GOSSAFUNC")
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070027
28 if usessa {
Josh Bleecher Snydere0ac5c52015-07-20 18:42:45 -070029 fmt.Println("generating SSA for", name)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070030 dumplist("buildssa-enter", fn.Func.Enter)
31 dumplist("buildssa-body", fn.Nbody)
32 }
Keith Randalld2fd43a2015-04-15 15:51:25 -070033
Keith Randallcfc2aa52015-05-18 16:44:20 -070034 var s state
Michael Matloob81ccf502015-05-30 01:03:06 -040035 s.pushLine(fn.Lineno)
36 defer s.popLine()
37
Keith Randalld2fd43a2015-04-15 15:51:25 -070038 // TODO(khr): build config just once at the start of the compiler binary
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070039
40 var e ssaExport
41 e.log = usessa
42 s.config = ssa.NewConfig(Thearch.Thestring, &e)
Keith Randalld2fd43a2015-04-15 15:51:25 -070043 s.f = s.config.NewFunc()
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070044 s.f.Name = name
45
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -070046 if name == os.Getenv("GOSSAFUNC") {
47 // TODO: tempfile? it is handy to have the location
48 // of this file be stable, so you can just reload in the browser.
49 s.config.HTML = ssa.NewHTMLWriter("ssa.html", &s, name)
50 // TODO: generate and print a mapping from nodes to values and blocks
51 }
52 defer func() {
53 if !usessa {
54 s.config.HTML.Close()
55 }
56 }()
57
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -070058 // If SSA support for the function is incomplete,
59 // assume that any panics are due to violated
60 // invariants. Swallow them silently.
61 defer func() {
62 if err := recover(); err != nil {
63 if !e.unimplemented {
64 panic(err)
65 }
66 }
67 }()
Keith Randalld2fd43a2015-04-15 15:51:25 -070068
69 // We construct SSA using an algorithm similar to
70 // Brau, Buchwald, Hack, Leißa, Mallon, and Zwinkau
71 // http://pp.info.uni-karlsruhe.de/uploads/publikationen/braun13cc.pdf
72 // TODO: check this comment
73
74 // Allocate starting block
75 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
76
Keith Randallcfc2aa52015-05-18 16:44:20 -070077 // Allocate starting values
Keith Randall8c46aa52015-06-19 21:02:28 -070078 s.vars = map[*Node]*ssa.Value{}
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -070079 s.labels = map[string]*ssaLabel{}
80 s.labeledNodes = map[*Node]*ssaLabel{}
Keith Randall8c46aa52015-06-19 21:02:28 -070081 s.startmem = s.entryNewValue0(ssa.OpArg, ssa.TypeMem)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -070082 s.sp = s.entryNewValue0(ssa.OpSP, Types[TUINTPTR]) // TODO: use generic pointer type (unsafe.Pointer?) instead
83 s.sb = s.entryNewValue0(ssa.OpSB, Types[TUINTPTR])
Keith Randall8c46aa52015-06-19 21:02:28 -070084
85 // Generate addresses of local declarations
86 s.decladdrs = map[*Node]*ssa.Value{}
87 for d := fn.Func.Dcl; d != nil; d = d.Next {
88 n := d.N
89 switch n.Class {
90 case PPARAM, PPARAMOUT:
Keith Randalld2107fc2015-08-24 02:16:19 -070091 aux := &ssa.ArgSymbol{Typ: n.Type, Node: n}
Keith Randall8c46aa52015-06-19 21:02:28 -070092 s.decladdrs[n] = s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp)
93 case PAUTO:
Keith Randalld2107fc2015-08-24 02:16:19 -070094 // processed at each use, to prevent Addr coming
95 // before the decl.
Keith Randallc3eb1a72015-09-06 13:42:26 -070096 case PFUNC:
97 // local function - already handled by frontend
Daniel Morsingbe2a3e22015-07-01 20:37:25 +010098 default:
99 str := ""
100 if n.Class&PHEAP != 0 {
101 str = ",heap"
102 }
Josh Bleecher Snyder58446032015-08-23 20:29:43 -0700103 s.Unimplementedf("local variable with class %s%s unimplemented", classnames[n.Class&^PHEAP], str)
Keith Randall8c46aa52015-06-19 21:02:28 -0700104 }
105 }
106 // nodfp is a special argument which is the function's FP.
Keith Randalld2107fc2015-08-24 02:16:19 -0700107 aux := &ssa.ArgSymbol{Typ: Types[TUINTPTR], Node: nodfp}
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -0700108 s.decladdrs[nodfp] = s.entryNewValue1A(ssa.OpAddr, Types[TUINTPTR], aux, s.sp)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700109
110 // Convert the AST-based IR to the SSA-based IR
111 s.startBlock(s.f.Entry)
Keith Randallf7f604e2015-05-27 14:52:22 -0700112 s.stmtList(fn.Func.Enter)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700113 s.stmtList(fn.Nbody)
114
Keith Randallcfc2aa52015-05-18 16:44:20 -0700115 // fallthrough to exit
Keith Randalla7cfc7592015-09-08 16:04:37 -0700116 if s.curBlock != nil {
117 m := s.mem()
118 b := s.endBlock()
Keith Randalld9f2caf2015-09-03 14:28:52 -0700119 b.Kind = ssa.BlockRet
Keith Randalla7cfc7592015-09-08 16:04:37 -0700120 b.Control = m
Keith Randallcfc2aa52015-05-18 16:44:20 -0700121 }
122
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700123 // Check that we used all labels
124 for name, lab := range s.labels {
125 if !lab.used() && !lab.reported {
126 yyerrorl(int(lab.defNode.Lineno), "label %v defined and not used", name)
127 lab.reported = true
128 }
129 if lab.used() && !lab.defined() && !lab.reported {
130 yyerrorl(int(lab.useNode.Lineno), "label %v not defined", name)
131 lab.reported = true
132 }
133 }
134
135 // Check any forward gotos. Non-forward gotos have already been checked.
136 for _, n := range s.fwdGotos {
137 lab := s.labels[n.Left.Sym.Name]
138 // If the label is undefined, we have already have printed an error.
139 if lab.defined() {
140 s.checkgoto(n, lab.defNode)
141 }
142 }
143
144 if nerrors > 0 {
145 return nil, false
146 }
147
Keith Randalld2fd43a2015-04-15 15:51:25 -0700148 // Link up variable uses to variable definitions
149 s.linkForwardReferences()
150
Josh Bleecher Snyder983bc8d2015-07-17 16:47:43 +0000151 // Main call to ssa package to compile function
152 ssa.Compile(s.f)
153
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -0700154 // Calculate stats about what percentage of functions SSA handles.
155 if false {
Josh Bleecher Snyder983bc8d2015-07-17 16:47:43 +0000156 fmt.Printf("SSA implemented: %t\n", !e.unimplemented)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -0700157 }
158
159 if e.unimplemented {
160 return nil, false
161 }
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -0700162
163 // TODO: enable codegen more broadly once the codegen stabilizes
164 // and runtime support is in (gc maps, write barriers, etc.)
Josh Bleecher Snyder9495e452015-08-04 11:13:56 -0700165 return s.f, usessa || localpkg.Name == os.Getenv("GOSSAPKG")
Keith Randalld2fd43a2015-04-15 15:51:25 -0700166}
167
Keith Randallcfc2aa52015-05-18 16:44:20 -0700168type state struct {
Keith Randalld2fd43a2015-04-15 15:51:25 -0700169 // configuration (arch) information
170 config *ssa.Config
171
172 // function we're building
173 f *ssa.Func
174
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700175 // labels and labeled control flow nodes (OFOR, OSWITCH, OSELECT) in f
176 labels map[string]*ssaLabel
177 labeledNodes map[*Node]*ssaLabel
178
179 // gotos that jump forward; required for deferred checkgoto calls
180 fwdGotos []*Node
181
182 // unlabeled break and continue statement tracking
183 breakTo *ssa.Block // current target for plain break statement
184 continueTo *ssa.Block // current target for plain continue statement
Keith Randalld2fd43a2015-04-15 15:51:25 -0700185
186 // current location where we're interpreting the AST
187 curBlock *ssa.Block
188
Keith Randall8c46aa52015-06-19 21:02:28 -0700189 // variable assignments in the current block (map from variable symbol to ssa value)
190 // *Node is the unique identifier (an ONAME Node) for the variable.
191 vars map[*Node]*ssa.Value
Keith Randalld2fd43a2015-04-15 15:51:25 -0700192
193 // all defined variables at the end of each block. Indexed by block ID.
Keith Randall8c46aa52015-06-19 21:02:28 -0700194 defvars []map[*Node]*ssa.Value
Keith Randalld2fd43a2015-04-15 15:51:25 -0700195
Keith Randalld2107fc2015-08-24 02:16:19 -0700196 // addresses of PPARAM and PPARAMOUT variables.
Keith Randall8c46aa52015-06-19 21:02:28 -0700197 decladdrs map[*Node]*ssa.Value
Keith Randallcfc2aa52015-05-18 16:44:20 -0700198
199 // starting values. Memory, frame pointer, and stack pointer
200 startmem *ssa.Value
Keith Randallcfc2aa52015-05-18 16:44:20 -0700201 sp *ssa.Value
Keith Randall8c46aa52015-06-19 21:02:28 -0700202 sb *ssa.Value
Michael Matloob81ccf502015-05-30 01:03:06 -0400203
204 // line number stack. The current line number is top of stack
205 line []int32
Keith Randalld2fd43a2015-04-15 15:51:25 -0700206}
207
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700208type ssaLabel struct {
209 target *ssa.Block // block identified by this label
210 breakTarget *ssa.Block // block to break to in control flow node identified by this label
211 continueTarget *ssa.Block // block to continue to in control flow node identified by this label
212 defNode *Node // label definition Node (OLABEL)
213 // Label use Node (OGOTO, OBREAK, OCONTINUE).
214 // Used only for error detection and reporting.
215 // There might be multiple uses, but we only need to track one.
216 useNode *Node
217 reported bool // reported indicates whether an error has already been reported for this label
218}
219
220// defined reports whether the label has a definition (OLABEL node).
221func (l *ssaLabel) defined() bool { return l.defNode != nil }
222
223// used reports whether the label has a use (OGOTO, OBREAK, or OCONTINUE node).
224func (l *ssaLabel) used() bool { return l.useNode != nil }
225
226// label returns the label associated with sym, creating it if necessary.
227func (s *state) label(sym *Sym) *ssaLabel {
228 lab := s.labels[sym.Name]
229 if lab == nil {
230 lab = new(ssaLabel)
231 s.labels[sym.Name] = lab
232 }
233 return lab
234}
235
Josh Bleecher Snyder1edf4892015-07-03 20:29:11 -0700236func (s *state) Logf(msg string, args ...interface{}) { s.config.Logf(msg, args...) }
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -0700237func (s *state) Fatalf(msg string, args ...interface{}) { s.config.Fatalf(msg, args...) }
238func (s *state) Unimplementedf(msg string, args ...interface{}) { s.config.Unimplementedf(msg, args...) }
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -0700239
Keith Randall8c46aa52015-06-19 21:02:28 -0700240// dummy node for the memory variable
241var memvar = Node{Op: ONAME, Sym: &Sym{Name: "mem"}}
242
Keith Randalld2fd43a2015-04-15 15:51:25 -0700243// startBlock sets the current block we're generating code in to b.
Keith Randallcfc2aa52015-05-18 16:44:20 -0700244func (s *state) startBlock(b *ssa.Block) {
245 if s.curBlock != nil {
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -0700246 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
Keith Randallcfc2aa52015-05-18 16:44:20 -0700247 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700248 s.curBlock = b
Keith Randall8c46aa52015-06-19 21:02:28 -0700249 s.vars = map[*Node]*ssa.Value{}
Keith Randalld2fd43a2015-04-15 15:51:25 -0700250}
251
252// endBlock marks the end of generating code for the current block.
253// Returns the (former) current block. Returns nil if there is no current
254// block, i.e. if no code flows to the current execution point.
Keith Randallcfc2aa52015-05-18 16:44:20 -0700255func (s *state) endBlock() *ssa.Block {
Keith Randalld2fd43a2015-04-15 15:51:25 -0700256 b := s.curBlock
257 if b == nil {
258 return nil
259 }
260 for len(s.defvars) <= int(b.ID) {
261 s.defvars = append(s.defvars, nil)
262 }
263 s.defvars[b.ID] = s.vars
264 s.curBlock = nil
265 s.vars = nil
Michael Matloob81ccf502015-05-30 01:03:06 -0400266 b.Line = s.peekLine()
Keith Randalld2fd43a2015-04-15 15:51:25 -0700267 return b
268}
269
Michael Matloob81ccf502015-05-30 01:03:06 -0400270// pushLine pushes a line number on the line number stack.
271func (s *state) pushLine(line int32) {
272 s.line = append(s.line, line)
273}
274
275// popLine pops the top of the line number stack.
276func (s *state) popLine() {
277 s.line = s.line[:len(s.line)-1]
278}
279
280// peekLine peek the top of the line number stack.
281func (s *state) peekLine() int32 {
282 return s.line[len(s.line)-1]
283}
284
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700285func (s *state) Error(msg string, args ...interface{}) {
286 yyerrorl(int(s.peekLine()), msg, args...)
287}
288
Keith Randall8f22b522015-06-11 21:29:25 -0700289// newValue0 adds a new value with no arguments to the current block.
290func (s *state) newValue0(op ssa.Op, t ssa.Type) *ssa.Value {
291 return s.curBlock.NewValue0(s.peekLine(), op, t)
292}
293
294// newValue0A adds a new value with no arguments and an aux value to the current block.
295func (s *state) newValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value {
296 return s.curBlock.NewValue0A(s.peekLine(), op, t, aux)
Michael Matloob81ccf502015-05-30 01:03:06 -0400297}
298
Todd Neal991036a2015-09-03 18:24:22 -0500299// newValue0I adds a new value with no arguments and an auxint value to the current block.
300func (s *state) newValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value {
301 return s.curBlock.NewValue0I(s.peekLine(), op, t, auxint)
302}
303
Michael Matloob81ccf502015-05-30 01:03:06 -0400304// newValue1 adds a new value with one argument to the current block.
Keith Randall8f22b522015-06-11 21:29:25 -0700305func (s *state) newValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value {
306 return s.curBlock.NewValue1(s.peekLine(), op, t, arg)
307}
308
309// newValue1A adds a new value with one argument and an aux value to the current block.
310func (s *state) newValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value {
311 return s.curBlock.NewValue1A(s.peekLine(), op, t, aux, arg)
Michael Matloob81ccf502015-05-30 01:03:06 -0400312}
313
Keith Randallcd7e0592015-07-15 21:33:49 -0700314// newValue1I adds a new value with one argument and an auxint value to the current block.
315func (s *state) newValue1I(op ssa.Op, t ssa.Type, aux int64, arg *ssa.Value) *ssa.Value {
316 return s.curBlock.NewValue1I(s.peekLine(), op, t, aux, arg)
317}
318
Michael Matloob81ccf502015-05-30 01:03:06 -0400319// newValue2 adds a new value with two arguments to the current block.
Keith Randall8f22b522015-06-11 21:29:25 -0700320func (s *state) newValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value {
321 return s.curBlock.NewValue2(s.peekLine(), op, t, arg0, arg1)
Michael Matloob81ccf502015-05-30 01:03:06 -0400322}
323
Daniel Morsing66b47812015-06-27 15:45:20 +0100324// newValue2I adds a new value with two arguments and an auxint value to the current block.
325func (s *state) newValue2I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
326 return s.curBlock.NewValue2I(s.peekLine(), op, t, aux, arg0, arg1)
327}
328
Michael Matloob81ccf502015-05-30 01:03:06 -0400329// newValue3 adds a new value with three arguments to the current block.
Keith Randall8f22b522015-06-11 21:29:25 -0700330func (s *state) newValue3(op ssa.Op, t ssa.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
331 return s.curBlock.NewValue3(s.peekLine(), op, t, arg0, arg1, arg2)
Michael Matloob81ccf502015-05-30 01:03:06 -0400332}
333
Keith Randalld4cc51d2015-08-14 21:47:20 -0700334// newValue3I adds a new value with three arguments and an auxint value to the current block.
335func (s *state) newValue3I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
336 return s.curBlock.NewValue3I(s.peekLine(), op, t, aux, arg0, arg1, arg2)
337}
338
Todd Neal991036a2015-09-03 18:24:22 -0500339// entryNewValue0 adds a new value with no arguments to the entry block.
Keith Randall8f22b522015-06-11 21:29:25 -0700340func (s *state) entryNewValue0(op ssa.Op, t ssa.Type) *ssa.Value {
341 return s.f.Entry.NewValue0(s.peekLine(), op, t)
342}
343
Todd Neal991036a2015-09-03 18:24:22 -0500344// entryNewValue0A adds a new value with no arguments and an aux value to the entry block.
Keith Randall8f22b522015-06-11 21:29:25 -0700345func (s *state) entryNewValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value {
346 return s.f.Entry.NewValue0A(s.peekLine(), op, t, aux)
Michael Matloob81ccf502015-05-30 01:03:06 -0400347}
348
Todd Neal991036a2015-09-03 18:24:22 -0500349// entryNewValue0I adds a new value with no arguments and an auxint value to the entry block.
350func (s *state) entryNewValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value {
351 return s.f.Entry.NewValue0I(s.peekLine(), op, t, auxint)
352}
353
Michael Matloob81ccf502015-05-30 01:03:06 -0400354// entryNewValue1 adds a new value with one argument to the entry block.
Keith Randall8f22b522015-06-11 21:29:25 -0700355func (s *state) entryNewValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value {
356 return s.f.Entry.NewValue1(s.peekLine(), op, t, arg)
357}
358
359// entryNewValue1 adds a new value with one argument and an auxint value to the entry block.
360func (s *state) entryNewValue1I(op ssa.Op, t ssa.Type, auxint int64, arg *ssa.Value) *ssa.Value {
361 return s.f.Entry.NewValue1I(s.peekLine(), op, t, auxint, arg)
Michael Matloob81ccf502015-05-30 01:03:06 -0400362}
363
Keith Randall8c46aa52015-06-19 21:02:28 -0700364// entryNewValue1A adds a new value with one argument and an aux value to the entry block.
365func (s *state) entryNewValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value {
366 return s.f.Entry.NewValue1A(s.peekLine(), op, t, aux, arg)
367}
368
Michael Matloob81ccf502015-05-30 01:03:06 -0400369// entryNewValue2 adds a new value with two arguments to the entry block.
Keith Randall8f22b522015-06-11 21:29:25 -0700370func (s *state) entryNewValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value {
371 return s.f.Entry.NewValue2(s.peekLine(), op, t, arg0, arg1)
Michael Matloob81ccf502015-05-30 01:03:06 -0400372}
373
Josh Bleecher Snydercea44142015-09-08 16:52:25 -0700374// const* routines add a new const value to the entry block.
375func (s *state) constBool(c bool) *ssa.Value {
376 return s.f.ConstBool(s.peekLine(), Types[TBOOL], c)
377}
Keith Randall9cb332e2015-07-28 14:19:20 -0700378func (s *state) constInt8(t ssa.Type, c int8) *ssa.Value {
379 return s.f.ConstInt8(s.peekLine(), t, c)
380}
381func (s *state) constInt16(t ssa.Type, c int16) *ssa.Value {
382 return s.f.ConstInt16(s.peekLine(), t, c)
383}
384func (s *state) constInt32(t ssa.Type, c int32) *ssa.Value {
385 return s.f.ConstInt32(s.peekLine(), t, c)
386}
387func (s *state) constInt64(t ssa.Type, c int64) *ssa.Value {
388 return s.f.ConstInt64(s.peekLine(), t, c)
389}
David Chase997a9f32015-08-12 16:38:11 -0400390func (s *state) constFloat32(t ssa.Type, c float64) *ssa.Value {
391 return s.f.ConstFloat32(s.peekLine(), t, c)
392}
393func (s *state) constFloat64(t ssa.Type, c float64) *ssa.Value {
394 return s.f.ConstFloat64(s.peekLine(), t, c)
395}
Keith Randall9cb332e2015-07-28 14:19:20 -0700396func (s *state) constIntPtr(t ssa.Type, c int64) *ssa.Value {
397 if s.config.PtrSize == 4 && int64(int32(c)) != c {
398 s.Fatalf("pointer constant too big %d", c)
399 }
400 return s.f.ConstIntPtr(s.peekLine(), t, c)
401}
Michael Matloob81ccf502015-05-30 01:03:06 -0400402func (s *state) constInt(t ssa.Type, c int64) *ssa.Value {
Keith Randall9cb332e2015-07-28 14:19:20 -0700403 if s.config.IntSize == 8 {
404 return s.constInt64(t, c)
405 }
406 if int64(int32(c)) != c {
407 s.Fatalf("integer constant too big %d", c)
408 }
409 return s.constInt32(t, int32(c))
Michael Matloob81ccf502015-05-30 01:03:06 -0400410}
411
Keith Randalld2fd43a2015-04-15 15:51:25 -0700412// ssaStmtList converts the statement n to SSA and adds it to s.
Keith Randallcfc2aa52015-05-18 16:44:20 -0700413func (s *state) stmtList(l *NodeList) {
Keith Randalld2fd43a2015-04-15 15:51:25 -0700414 for ; l != nil; l = l.Next {
415 s.stmt(l.N)
416 }
417}
418
419// ssaStmt converts the statement n to SSA and adds it to s.
Keith Randallcfc2aa52015-05-18 16:44:20 -0700420func (s *state) stmt(n *Node) {
Michael Matloob81ccf502015-05-30 01:03:06 -0400421 s.pushLine(n.Lineno)
422 defer s.popLine()
423
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700424 // If s.curBlock is nil, then we're about to generate dead code.
425 // We can't just short-circuit here, though,
426 // because we check labels and gotos as part of SSA generation.
427 // Provide a block for the dead code so that we don't have
428 // to add special cases everywhere else.
429 if s.curBlock == nil {
430 dead := s.f.NewBlock(ssa.BlockPlain)
431 s.startBlock(dead)
432 }
433
Keith Randalld2fd43a2015-04-15 15:51:25 -0700434 s.stmtList(n.Ninit)
435 switch n.Op {
436
437 case OBLOCK:
438 s.stmtList(n.List)
439
Josh Bleecher Snyder2574e4a2015-07-16 13:25:36 -0600440 // No-ops
Todd Neal67e43c12015-08-28 21:19:40 -0500441 case OEMPTY, ODCLCONST, ODCLTYPE, OFALL:
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -0600442
Josh Bleecher Snyder2574e4a2015-07-16 13:25:36 -0600443 // Expression statements
444 case OCALLFUNC, OCALLMETH, OCALLINTER:
445 s.expr(n)
446
Keith Randalld2fd43a2015-04-15 15:51:25 -0700447 case ODCL:
Daniel Morsingc31b6dd2015-06-12 14:23:29 +0100448 if n.Left.Class&PHEAP == 0 {
449 return
450 }
451 if compiling_runtime != 0 {
Keith Randall0ec72b62015-09-08 15:42:53 -0700452 Fatalf("%v escapes to heap, not allowed in runtime.", n)
Daniel Morsingc31b6dd2015-06-12 14:23:29 +0100453 }
454
455 // TODO: the old pass hides the details of PHEAP
456 // variables behind ONAME nodes. Figure out if it's better
457 // to rewrite the tree and make the heapaddr construct explicit
458 // or to keep this detail hidden behind the scenes.
459 palloc := prealloc[n.Left]
460 if palloc == nil {
461 palloc = callnew(n.Left.Type)
462 prealloc[n.Left] = palloc
463 }
Josh Bleecher Snyder07269312015-08-29 14:54:45 -0700464 r := s.expr(palloc)
465 s.assign(n.Left.Name.Heapaddr, r, false)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700466
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700467 case OLABEL:
468 sym := n.Left.Sym
469
470 if isblanksym(sym) {
Keith Randall7e4c06d2015-07-12 11:52:09 -0700471 // Empty identifier is valid but useless.
472 // See issues 11589, 11593.
473 return
474 }
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700475
476 lab := s.label(sym)
477
478 // Associate label with its control flow node, if any
479 if ctl := n.Name.Defn; ctl != nil {
480 switch ctl.Op {
481 case OFOR, OSWITCH, OSELECT:
482 s.labeledNodes[ctl] = lab
483 }
Keith Randall0ad9c8c2015-06-12 16:24:33 -0700484 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700485
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700486 if !lab.defined() {
487 lab.defNode = n
488 } else {
489 s.Error("label %v already defined at %v", sym, Ctxt.Line(int(lab.defNode.Lineno)))
490 lab.reported = true
Keith Randalld2fd43a2015-04-15 15:51:25 -0700491 }
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700492 // The label might already have a target block via a goto.
493 if lab.target == nil {
494 lab.target = s.f.NewBlock(ssa.BlockPlain)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -0700495 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700496
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700497 // go to that label (we pretend "label:" is preceded by "goto label")
498 b := s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -0500499 b.AddEdgeTo(lab.target)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700500 s.startBlock(lab.target)
501
502 case OGOTO:
503 sym := n.Left.Sym
504
505 lab := s.label(sym)
506 if lab.target == nil {
507 lab.target = s.f.NewBlock(ssa.BlockPlain)
508 }
509 if !lab.used() {
510 lab.useNode = n
511 }
512
513 if lab.defined() {
514 s.checkgoto(n, lab.defNode)
515 } else {
516 s.fwdGotos = append(s.fwdGotos, n)
517 }
518
519 b := s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -0500520 b.AddEdgeTo(lab.target)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700521
Keith Randall290d8fc2015-06-10 15:03:06 -0700522 case OAS, OASWB:
Josh Bleecher Snyder6b416652015-07-28 10:56:39 -0700523 // Check whether we can generate static data rather than code.
524 // If so, ignore n and defer data generation until codegen.
525 // Failure to do this causes writes to readonly symbols.
526 if gen_as_init(n, true) {
527 var data []*Node
528 if s.f.StaticData != nil {
529 data = s.f.StaticData.([]*Node)
530 }
531 s.f.StaticData = append(data, n)
532 return
533 }
Josh Bleecher Snyder07269312015-08-29 14:54:45 -0700534 var r *ssa.Value
535 if n.Right != nil {
536 r = s.expr(n.Right)
537 }
538 s.assign(n.Left, r, n.Op == OASWB)
Daniel Morsingc31b6dd2015-06-12 14:23:29 +0100539
Keith Randalld2fd43a2015-04-15 15:51:25 -0700540 case OIF:
Keith Randalle707fbe2015-06-11 10:20:39 -0700541 cond := s.expr(n.Left)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700542 b := s.endBlock()
543 b.Kind = ssa.BlockIf
544 b.Control = cond
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -0700545 b.Likely = ssa.BranchPrediction(n.Likely) // gc and ssa both use -1/0/+1 for likeliness
Keith Randalld2fd43a2015-04-15 15:51:25 -0700546
547 bThen := s.f.NewBlock(ssa.BlockPlain)
548 bEnd := s.f.NewBlock(ssa.BlockPlain)
549 var bElse *ssa.Block
550
Keith Randalle707fbe2015-06-11 10:20:39 -0700551 if n.Rlist == nil {
Todd Neal47d67992015-08-28 21:36:29 -0500552 b.AddEdgeTo(bThen)
553 b.AddEdgeTo(bEnd)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700554 } else {
555 bElse = s.f.NewBlock(ssa.BlockPlain)
Todd Neal47d67992015-08-28 21:36:29 -0500556 b.AddEdgeTo(bThen)
557 b.AddEdgeTo(bElse)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700558 }
559
560 s.startBlock(bThen)
561 s.stmtList(n.Nbody)
Josh Bleecher Snydere0ac5c52015-07-20 18:42:45 -0700562 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500563 b.AddEdgeTo(bEnd)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700564 }
565
Keith Randalle707fbe2015-06-11 10:20:39 -0700566 if n.Rlist != nil {
Keith Randalld2fd43a2015-04-15 15:51:25 -0700567 s.startBlock(bElse)
Keith Randalle707fbe2015-06-11 10:20:39 -0700568 s.stmtList(n.Rlist)
Josh Bleecher Snydere0ac5c52015-07-20 18:42:45 -0700569 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500570 b.AddEdgeTo(bEnd)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700571 }
572 }
573 s.startBlock(bEnd)
574
575 case ORETURN:
576 s.stmtList(n.List)
Keith Randalla7cfc7592015-09-08 16:04:37 -0700577 m := s.mem()
Keith Randalld2fd43a2015-04-15 15:51:25 -0700578 b := s.endBlock()
Keith Randall10f38f52015-09-03 09:09:59 -0700579 b.Kind = ssa.BlockRet
Keith Randalla7cfc7592015-09-08 16:04:37 -0700580 b.Control = m
Keith Randall8a1f6212015-09-08 21:28:44 -0700581 case ORETJMP:
582 s.stmtList(n.List)
583 m := s.mem()
584 b := s.endBlock()
585 b.Kind = ssa.BlockRetJmp
586 b.Aux = n.Left.Sym
587 b.Control = m
Keith Randalld2fd43a2015-04-15 15:51:25 -0700588
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700589 case OCONTINUE, OBREAK:
590 var op string
591 var to *ssa.Block
592 switch n.Op {
593 case OCONTINUE:
594 op = "continue"
595 to = s.continueTo
596 case OBREAK:
597 op = "break"
598 to = s.breakTo
599 }
600 if n.Left == nil {
601 // plain break/continue
602 if to == nil {
603 s.Error("%s is not in a loop", op)
604 return
605 }
606 // nothing to do; "to" is already the correct target
607 } else {
608 // labeled break/continue; look up the target
609 sym := n.Left.Sym
610 lab := s.label(sym)
611 if !lab.used() {
612 lab.useNode = n.Left
613 }
614 if !lab.defined() {
615 s.Error("%s label not defined: %v", op, sym)
616 lab.reported = true
617 return
618 }
619 switch n.Op {
620 case OCONTINUE:
621 to = lab.continueTarget
622 case OBREAK:
623 to = lab.breakTarget
624 }
625 if to == nil {
626 // Valid label but not usable with a break/continue here, e.g.:
627 // for {
628 // continue abc
629 // }
630 // abc:
631 // for {}
632 s.Error("invalid %s label %v", op, sym)
633 lab.reported = true
634 return
635 }
636 }
637
638 b := s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -0500639 b.AddEdgeTo(to)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700640
Keith Randalld2fd43a2015-04-15 15:51:25 -0700641 case OFOR:
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700642 // OFOR: for Ninit; Left; Right { Nbody }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700643 bCond := s.f.NewBlock(ssa.BlockPlain)
644 bBody := s.f.NewBlock(ssa.BlockPlain)
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700645 bIncr := s.f.NewBlock(ssa.BlockPlain)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700646 bEnd := s.f.NewBlock(ssa.BlockPlain)
647
648 // first, jump to condition test
649 b := s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -0500650 b.AddEdgeTo(bCond)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700651
652 // generate code to test condition
Keith Randalld2fd43a2015-04-15 15:51:25 -0700653 s.startBlock(bCond)
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700654 var cond *ssa.Value
655 if n.Left != nil {
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700656 cond = s.expr(n.Left)
657 } else {
Josh Bleecher Snydercea44142015-09-08 16:52:25 -0700658 cond = s.constBool(true)
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700659 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700660 b = s.endBlock()
661 b.Kind = ssa.BlockIf
662 b.Control = cond
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -0700663 b.Likely = ssa.BranchLikely
Todd Neal47d67992015-08-28 21:36:29 -0500664 b.AddEdgeTo(bBody)
665 b.AddEdgeTo(bEnd)
Keith Randalld2fd43a2015-04-15 15:51:25 -0700666
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700667 // set up for continue/break in body
668 prevContinue := s.continueTo
669 prevBreak := s.breakTo
670 s.continueTo = bIncr
671 s.breakTo = bEnd
672 lab := s.labeledNodes[n]
673 if lab != nil {
674 // labeled for loop
675 lab.continueTarget = bIncr
676 lab.breakTarget = bEnd
677 }
678
Keith Randalld2fd43a2015-04-15 15:51:25 -0700679 // generate body
680 s.startBlock(bBody)
681 s.stmtList(n.Nbody)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700682
683 // tear down continue/break
684 s.continueTo = prevContinue
685 s.breakTo = prevBreak
686 if lab != nil {
687 lab.continueTarget = nil
688 lab.breakTarget = nil
689 }
690
691 // done with body, goto incr
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700692 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500693 b.AddEdgeTo(bIncr)
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700694 }
695
696 // generate incr
697 s.startBlock(bIncr)
Josh Bleecher Snyder46815b92015-06-24 17:48:22 -0700698 if n.Right != nil {
699 s.stmt(n.Right)
700 }
Josh Bleecher Snyder51738682015-07-06 15:29:39 -0700701 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500702 b.AddEdgeTo(bCond)
Josh Bleecher Snyder6c140592015-07-04 09:07:54 -0700703 }
Keith Randalld2fd43a2015-04-15 15:51:25 -0700704 s.startBlock(bEnd)
705
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700706 case OSWITCH, OSELECT:
707 // These have been mostly rewritten by the front end into their Nbody fields.
708 // Our main task is to correctly hook up any break statements.
709 bEnd := s.f.NewBlock(ssa.BlockPlain)
710
711 prevBreak := s.breakTo
712 s.breakTo = bEnd
713 lab := s.labeledNodes[n]
714 if lab != nil {
715 // labeled
716 lab.breakTarget = bEnd
717 }
718
719 // generate body code
720 s.stmtList(n.Nbody)
721
722 s.breakTo = prevBreak
723 if lab != nil {
724 lab.breakTarget = nil
725 }
726
727 if b := s.endBlock(); b != nil {
Todd Neal47d67992015-08-28 21:36:29 -0500728 b.AddEdgeTo(bEnd)
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -0700729 }
730 s.startBlock(bEnd)
731
Keith Randalld2fd43a2015-04-15 15:51:25 -0700732 case OVARKILL:
Keith Randalld2107fc2015-08-24 02:16:19 -0700733 // Insert a varkill op to record that a variable is no longer live.
734 // We only care about liveness info at call sites, so putting the
735 // varkill in the store chain is enough to keep it correctly ordered
736 // with respect to call ops.
737 s.vars[&memvar] = s.newValue1A(ssa.OpVarKill, ssa.TypeMem, n.Left, s.mem())
Keith Randall9569b952015-08-28 22:51:01 -0700738
739 case OPROC, ODEFER:
740 call := n.Left
741 fn := call.Left
742 if call.Op != OCALLFUNC {
743 s.Unimplementedf("defer/go of %s", opnames[call.Op])
744 }
745
Keith Randallfd8c71b2015-09-08 21:37:37 -0700746 // Run all argument assignments. The arg slots have already
747 // been offset by 2*widthptr.
748 s.stmtList(call.List)
749
Keith Randall9569b952015-08-28 22:51:01 -0700750 // Write argsize and closure (args to Newproc/Deferproc)
751 argsize := s.constInt32(Types[TUINT32], int32(fn.Type.Argwid))
752 s.vars[&memvar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, 4, s.sp, argsize, s.mem())
753 closure := s.expr(fn)
754 addr := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(Types[TUINTPTR]), int64(Widthptr), s.sp)
755 s.vars[&memvar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, closure, s.mem())
756
Keith Randall9569b952015-08-28 22:51:01 -0700757 // Call deferproc or newproc
758 bNext := s.f.NewBlock(ssa.BlockPlain)
759 var op ssa.Op
760 switch n.Op {
761 case ODEFER:
762 op = ssa.OpDeferCall
763 case OPROC:
764 op = ssa.OpGoCall
765 }
766 r := s.newValue1(op, ssa.TypeMem, s.mem())
767 r.AuxInt = fn.Type.Argwid + 2*int64(Widthptr) // total stack space used
768 s.vars[&memvar] = r
769 b := s.endBlock()
770 b.Kind = ssa.BlockCall
771 b.Control = r
772 b.AddEdgeTo(bNext)
Keith Randall9569b952015-08-28 22:51:01 -0700773 s.startBlock(bNext)
774
Keith Randall46ffb022015-09-12 14:06:44 -0700775 case OCHECKNIL:
776 p := s.expr(n.Left)
777 s.nilCheck(p)
778
Keith Randalld2fd43a2015-04-15 15:51:25 -0700779 default:
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -0700780 s.Unimplementedf("unhandled stmt %s", opnames[n.Op])
Keith Randalld2fd43a2015-04-15 15:51:25 -0700781 }
782}
783
Keith Randall67fdb0d2015-07-19 15:48:20 -0700784type opAndType struct {
785 op uint8
786 etype uint8
787}
788
789var opToSSA = map[opAndType]ssa.Op{
David Chase997a9f32015-08-12 16:38:11 -0400790 opAndType{OADD, TINT8}: ssa.OpAdd8,
791 opAndType{OADD, TUINT8}: ssa.OpAdd8,
792 opAndType{OADD, TINT16}: ssa.OpAdd16,
793 opAndType{OADD, TUINT16}: ssa.OpAdd16,
794 opAndType{OADD, TINT32}: ssa.OpAdd32,
795 opAndType{OADD, TUINT32}: ssa.OpAdd32,
796 opAndType{OADD, TPTR32}: ssa.OpAdd32,
797 opAndType{OADD, TINT64}: ssa.OpAdd64,
798 opAndType{OADD, TUINT64}: ssa.OpAdd64,
799 opAndType{OADD, TPTR64}: ssa.OpAdd64,
800 opAndType{OADD, TFLOAT32}: ssa.OpAdd32F,
801 opAndType{OADD, TFLOAT64}: ssa.OpAdd64F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700802
David Chase997a9f32015-08-12 16:38:11 -0400803 opAndType{OSUB, TINT8}: ssa.OpSub8,
804 opAndType{OSUB, TUINT8}: ssa.OpSub8,
805 opAndType{OSUB, TINT16}: ssa.OpSub16,
806 opAndType{OSUB, TUINT16}: ssa.OpSub16,
807 opAndType{OSUB, TINT32}: ssa.OpSub32,
808 opAndType{OSUB, TUINT32}: ssa.OpSub32,
809 opAndType{OSUB, TINT64}: ssa.OpSub64,
810 opAndType{OSUB, TUINT64}: ssa.OpSub64,
811 opAndType{OSUB, TFLOAT32}: ssa.OpSub32F,
812 opAndType{OSUB, TFLOAT64}: ssa.OpSub64F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700813
Josh Bleecher Snydere61e7c92015-07-22 19:19:40 -0700814 opAndType{ONOT, TBOOL}: ssa.OpNot,
815
David Chase3a9d0ac2015-08-28 14:24:10 -0400816 opAndType{OMINUS, TINT8}: ssa.OpNeg8,
817 opAndType{OMINUS, TUINT8}: ssa.OpNeg8,
818 opAndType{OMINUS, TINT16}: ssa.OpNeg16,
819 opAndType{OMINUS, TUINT16}: ssa.OpNeg16,
820 opAndType{OMINUS, TINT32}: ssa.OpNeg32,
821 opAndType{OMINUS, TUINT32}: ssa.OpNeg32,
822 opAndType{OMINUS, TINT64}: ssa.OpNeg64,
823 opAndType{OMINUS, TUINT64}: ssa.OpNeg64,
824 opAndType{OMINUS, TFLOAT32}: ssa.OpNeg32F,
825 opAndType{OMINUS, TFLOAT64}: ssa.OpNeg64F,
Alexandru Moșoi954d5ad2015-07-21 16:58:18 +0200826
Keith Randall4b803152015-07-29 17:07:09 -0700827 opAndType{OCOM, TINT8}: ssa.OpCom8,
828 opAndType{OCOM, TUINT8}: ssa.OpCom8,
829 opAndType{OCOM, TINT16}: ssa.OpCom16,
830 opAndType{OCOM, TUINT16}: ssa.OpCom16,
831 opAndType{OCOM, TINT32}: ssa.OpCom32,
832 opAndType{OCOM, TUINT32}: ssa.OpCom32,
833 opAndType{OCOM, TINT64}: ssa.OpCom64,
834 opAndType{OCOM, TUINT64}: ssa.OpCom64,
835
Josh Bleecher Snyderfa5fe192015-09-06 19:24:59 -0700836 opAndType{OIMAG, TCOMPLEX64}: ssa.OpComplexImag,
837 opAndType{OIMAG, TCOMPLEX128}: ssa.OpComplexImag,
838 opAndType{OREAL, TCOMPLEX64}: ssa.OpComplexReal,
839 opAndType{OREAL, TCOMPLEX128}: ssa.OpComplexReal,
840
David Chase997a9f32015-08-12 16:38:11 -0400841 opAndType{OMUL, TINT8}: ssa.OpMul8,
842 opAndType{OMUL, TUINT8}: ssa.OpMul8,
843 opAndType{OMUL, TINT16}: ssa.OpMul16,
844 opAndType{OMUL, TUINT16}: ssa.OpMul16,
845 opAndType{OMUL, TINT32}: ssa.OpMul32,
846 opAndType{OMUL, TUINT32}: ssa.OpMul32,
847 opAndType{OMUL, TINT64}: ssa.OpMul64,
848 opAndType{OMUL, TUINT64}: ssa.OpMul64,
849 opAndType{OMUL, TFLOAT32}: ssa.OpMul32F,
850 opAndType{OMUL, TFLOAT64}: ssa.OpMul64F,
851
852 opAndType{ODIV, TFLOAT32}: ssa.OpDiv32F,
853 opAndType{ODIV, TFLOAT64}: ssa.OpDiv64F,
Keith Randallbe1eb572015-07-22 13:46:15 -0700854
Todd Neal67cbd5b2015-08-18 19:14:47 -0500855 opAndType{OHMUL, TINT8}: ssa.OpHmul8,
856 opAndType{OHMUL, TUINT8}: ssa.OpHmul8u,
857 opAndType{OHMUL, TINT16}: ssa.OpHmul16,
858 opAndType{OHMUL, TUINT16}: ssa.OpHmul16u,
859 opAndType{OHMUL, TINT32}: ssa.OpHmul32,
860 opAndType{OHMUL, TUINT32}: ssa.OpHmul32u,
861
Todd Neala45f2d82015-08-17 17:46:06 -0500862 opAndType{ODIV, TINT8}: ssa.OpDiv8,
863 opAndType{ODIV, TUINT8}: ssa.OpDiv8u,
864 opAndType{ODIV, TINT16}: ssa.OpDiv16,
865 opAndType{ODIV, TUINT16}: ssa.OpDiv16u,
866 opAndType{ODIV, TINT32}: ssa.OpDiv32,
867 opAndType{ODIV, TUINT32}: ssa.OpDiv32u,
868 opAndType{ODIV, TINT64}: ssa.OpDiv64,
869 opAndType{ODIV, TUINT64}: ssa.OpDiv64u,
870
Todd Neal57d9e7e2015-08-18 19:51:44 -0500871 opAndType{OMOD, TINT8}: ssa.OpMod8,
872 opAndType{OMOD, TUINT8}: ssa.OpMod8u,
873 opAndType{OMOD, TINT16}: ssa.OpMod16,
874 opAndType{OMOD, TUINT16}: ssa.OpMod16u,
875 opAndType{OMOD, TINT32}: ssa.OpMod32,
876 opAndType{OMOD, TUINT32}: ssa.OpMod32u,
877 opAndType{OMOD, TINT64}: ssa.OpMod64,
878 opAndType{OMOD, TUINT64}: ssa.OpMod64u,
879
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200880 opAndType{OAND, TINT8}: ssa.OpAnd8,
Keith Randall2a5e6c42015-07-23 14:35:02 -0700881 opAndType{OAND, TUINT8}: ssa.OpAnd8,
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200882 opAndType{OAND, TINT16}: ssa.OpAnd16,
Keith Randall2a5e6c42015-07-23 14:35:02 -0700883 opAndType{OAND, TUINT16}: ssa.OpAnd16,
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200884 opAndType{OAND, TINT32}: ssa.OpAnd32,
Keith Randall2a5e6c42015-07-23 14:35:02 -0700885 opAndType{OAND, TUINT32}: ssa.OpAnd32,
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200886 opAndType{OAND, TINT64}: ssa.OpAnd64,
Keith Randall2a5e6c42015-07-23 14:35:02 -0700887 opAndType{OAND, TUINT64}: ssa.OpAnd64,
Alexandru Moșoiedff8812015-07-28 14:58:49 +0200888
Alexandru Moșoi74024162015-07-29 17:52:25 +0200889 opAndType{OOR, TINT8}: ssa.OpOr8,
890 opAndType{OOR, TUINT8}: ssa.OpOr8,
891 opAndType{OOR, TINT16}: ssa.OpOr16,
892 opAndType{OOR, TUINT16}: ssa.OpOr16,
893 opAndType{OOR, TINT32}: ssa.OpOr32,
894 opAndType{OOR, TUINT32}: ssa.OpOr32,
895 opAndType{OOR, TINT64}: ssa.OpOr64,
896 opAndType{OOR, TUINT64}: ssa.OpOr64,
897
Alexandru Moșoi6d9362a12015-07-30 12:33:36 +0200898 opAndType{OXOR, TINT8}: ssa.OpXor8,
899 opAndType{OXOR, TUINT8}: ssa.OpXor8,
900 opAndType{OXOR, TINT16}: ssa.OpXor16,
901 opAndType{OXOR, TUINT16}: ssa.OpXor16,
902 opAndType{OXOR, TINT32}: ssa.OpXor32,
903 opAndType{OXOR, TUINT32}: ssa.OpXor32,
904 opAndType{OXOR, TINT64}: ssa.OpXor64,
905 opAndType{OXOR, TUINT64}: ssa.OpXor64,
906
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700907 opAndType{OEQ, TBOOL}: ssa.OpEq8,
908 opAndType{OEQ, TINT8}: ssa.OpEq8,
909 opAndType{OEQ, TUINT8}: ssa.OpEq8,
910 opAndType{OEQ, TINT16}: ssa.OpEq16,
911 opAndType{OEQ, TUINT16}: ssa.OpEq16,
912 opAndType{OEQ, TINT32}: ssa.OpEq32,
913 opAndType{OEQ, TUINT32}: ssa.OpEq32,
914 opAndType{OEQ, TINT64}: ssa.OpEq64,
915 opAndType{OEQ, TUINT64}: ssa.OpEq64,
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700916 opAndType{OEQ, TINTER}: ssa.OpEqFat, // e == nil only
917 opAndType{OEQ, TARRAY}: ssa.OpEqFat, // slice only; a == nil only
918 opAndType{OEQ, TFUNC}: ssa.OpEqPtr,
919 opAndType{OEQ, TMAP}: ssa.OpEqPtr,
920 opAndType{OEQ, TCHAN}: ssa.OpEqPtr,
Todd Neal5fdd4fe2015-08-30 20:47:26 -0500921 opAndType{OEQ, TPTR64}: ssa.OpEqPtr,
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700922 opAndType{OEQ, TUINTPTR}: ssa.OpEqPtr,
923 opAndType{OEQ, TUNSAFEPTR}: ssa.OpEqPtr,
David Chase8e601b22015-08-18 14:39:26 -0400924 opAndType{OEQ, TFLOAT64}: ssa.OpEq64F,
925 opAndType{OEQ, TFLOAT32}: ssa.OpEq32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700926
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700927 opAndType{ONE, TBOOL}: ssa.OpNeq8,
928 opAndType{ONE, TINT8}: ssa.OpNeq8,
929 opAndType{ONE, TUINT8}: ssa.OpNeq8,
930 opAndType{ONE, TINT16}: ssa.OpNeq16,
931 opAndType{ONE, TUINT16}: ssa.OpNeq16,
932 opAndType{ONE, TINT32}: ssa.OpNeq32,
933 opAndType{ONE, TUINT32}: ssa.OpNeq32,
934 opAndType{ONE, TINT64}: ssa.OpNeq64,
935 opAndType{ONE, TUINT64}: ssa.OpNeq64,
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700936 opAndType{ONE, TINTER}: ssa.OpNeqFat, // e != nil only
937 opAndType{ONE, TARRAY}: ssa.OpNeqFat, // slice only; a != nil only
938 opAndType{ONE, TFUNC}: ssa.OpNeqPtr,
939 opAndType{ONE, TMAP}: ssa.OpNeqPtr,
940 opAndType{ONE, TCHAN}: ssa.OpNeqPtr,
Todd Neal5fdd4fe2015-08-30 20:47:26 -0500941 opAndType{ONE, TPTR64}: ssa.OpNeqPtr,
Josh Bleecher Snyder1bab5b92015-07-28 14:14:25 -0700942 opAndType{ONE, TUINTPTR}: ssa.OpNeqPtr,
943 opAndType{ONE, TUNSAFEPTR}: ssa.OpNeqPtr,
David Chase8e601b22015-08-18 14:39:26 -0400944 opAndType{ONE, TFLOAT64}: ssa.OpNeq64F,
945 opAndType{ONE, TFLOAT32}: ssa.OpNeq32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700946
David Chase8e601b22015-08-18 14:39:26 -0400947 opAndType{OLT, TINT8}: ssa.OpLess8,
948 opAndType{OLT, TUINT8}: ssa.OpLess8U,
949 opAndType{OLT, TINT16}: ssa.OpLess16,
950 opAndType{OLT, TUINT16}: ssa.OpLess16U,
951 opAndType{OLT, TINT32}: ssa.OpLess32,
952 opAndType{OLT, TUINT32}: ssa.OpLess32U,
953 opAndType{OLT, TINT64}: ssa.OpLess64,
954 opAndType{OLT, TUINT64}: ssa.OpLess64U,
955 opAndType{OLT, TFLOAT64}: ssa.OpLess64F,
956 opAndType{OLT, TFLOAT32}: ssa.OpLess32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700957
David Chase8e601b22015-08-18 14:39:26 -0400958 opAndType{OGT, TINT8}: ssa.OpGreater8,
959 opAndType{OGT, TUINT8}: ssa.OpGreater8U,
960 opAndType{OGT, TINT16}: ssa.OpGreater16,
961 opAndType{OGT, TUINT16}: ssa.OpGreater16U,
962 opAndType{OGT, TINT32}: ssa.OpGreater32,
963 opAndType{OGT, TUINT32}: ssa.OpGreater32U,
964 opAndType{OGT, TINT64}: ssa.OpGreater64,
965 opAndType{OGT, TUINT64}: ssa.OpGreater64U,
966 opAndType{OGT, TFLOAT64}: ssa.OpGreater64F,
967 opAndType{OGT, TFLOAT32}: ssa.OpGreater32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700968
David Chase8e601b22015-08-18 14:39:26 -0400969 opAndType{OLE, TINT8}: ssa.OpLeq8,
970 opAndType{OLE, TUINT8}: ssa.OpLeq8U,
971 opAndType{OLE, TINT16}: ssa.OpLeq16,
972 opAndType{OLE, TUINT16}: ssa.OpLeq16U,
973 opAndType{OLE, TINT32}: ssa.OpLeq32,
974 opAndType{OLE, TUINT32}: ssa.OpLeq32U,
975 opAndType{OLE, TINT64}: ssa.OpLeq64,
976 opAndType{OLE, TUINT64}: ssa.OpLeq64U,
977 opAndType{OLE, TFLOAT64}: ssa.OpLeq64F,
978 opAndType{OLE, TFLOAT32}: ssa.OpLeq32F,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700979
David Chase8e601b22015-08-18 14:39:26 -0400980 opAndType{OGE, TINT8}: ssa.OpGeq8,
981 opAndType{OGE, TUINT8}: ssa.OpGeq8U,
982 opAndType{OGE, TINT16}: ssa.OpGeq16,
983 opAndType{OGE, TUINT16}: ssa.OpGeq16U,
984 opAndType{OGE, TINT32}: ssa.OpGeq32,
985 opAndType{OGE, TUINT32}: ssa.OpGeq32U,
986 opAndType{OGE, TINT64}: ssa.OpGeq64,
987 opAndType{OGE, TUINT64}: ssa.OpGeq64U,
988 opAndType{OGE, TFLOAT64}: ssa.OpGeq64F,
989 opAndType{OGE, TFLOAT32}: ssa.OpGeq32F,
David Chase40aba8c2015-08-05 22:11:14 -0400990
991 opAndType{OLROT, TUINT8}: ssa.OpLrot8,
992 opAndType{OLROT, TUINT16}: ssa.OpLrot16,
993 opAndType{OLROT, TUINT32}: ssa.OpLrot32,
994 opAndType{OLROT, TUINT64}: ssa.OpLrot64,
Keith Randalla329e212015-09-12 13:26:57 -0700995
996 opAndType{OSQRT, TFLOAT64}: ssa.OpSqrt,
Keith Randall67fdb0d2015-07-19 15:48:20 -0700997}
998
Keith Randall2a5e6c42015-07-23 14:35:02 -0700999func (s *state) concreteEtype(t *Type) uint8 {
1000 e := t.Etype
1001 switch e {
1002 default:
1003 return e
Keith Randall67fdb0d2015-07-19 15:48:20 -07001004 case TINT:
Keith Randall2a5e6c42015-07-23 14:35:02 -07001005 if s.config.IntSize == 8 {
1006 return TINT64
Keith Randall67fdb0d2015-07-19 15:48:20 -07001007 }
Keith Randall2a5e6c42015-07-23 14:35:02 -07001008 return TINT32
Keith Randall67fdb0d2015-07-19 15:48:20 -07001009 case TUINT:
Keith Randall2a5e6c42015-07-23 14:35:02 -07001010 if s.config.IntSize == 8 {
1011 return TUINT64
Keith Randall67fdb0d2015-07-19 15:48:20 -07001012 }
Keith Randall2a5e6c42015-07-23 14:35:02 -07001013 return TUINT32
1014 case TUINTPTR:
1015 if s.config.PtrSize == 8 {
1016 return TUINT64
1017 }
1018 return TUINT32
Keith Randall67fdb0d2015-07-19 15:48:20 -07001019 }
Keith Randall2a5e6c42015-07-23 14:35:02 -07001020}
1021
1022func (s *state) ssaOp(op uint8, t *Type) ssa.Op {
1023 etype := s.concreteEtype(t)
Keith Randall67fdb0d2015-07-19 15:48:20 -07001024 x, ok := opToSSA[opAndType{op, etype}]
1025 if !ok {
Josh Bleecher Snyder58446032015-08-23 20:29:43 -07001026 s.Unimplementedf("unhandled binary op %s %s", opnames[op], Econv(int(etype), 0))
Keith Randall67fdb0d2015-07-19 15:48:20 -07001027 }
1028 return x
Josh Bleecher Snyder46815b92015-06-24 17:48:22 -07001029}
1030
David Chase3a9d0ac2015-08-28 14:24:10 -04001031func floatForComplex(t *Type) *Type {
1032 if t.Size() == 8 {
1033 return Types[TFLOAT32]
1034 } else {
1035 return Types[TFLOAT64]
1036 }
1037}
1038
Keith Randall4b803152015-07-29 17:07:09 -07001039type opAndTwoTypes struct {
1040 op uint8
1041 etype1 uint8
1042 etype2 uint8
1043}
1044
David Chased052bbd2015-09-01 17:09:00 -04001045type twoTypes struct {
1046 etype1 uint8
1047 etype2 uint8
1048}
1049
1050type twoOpsAndType struct {
1051 op1 ssa.Op
1052 op2 ssa.Op
1053 intermediateType uint8
1054}
1055
1056var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
1057
1058 twoTypes{TINT8, TFLOAT32}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to32F, TINT32},
1059 twoTypes{TINT16, TFLOAT32}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to32F, TINT32},
1060 twoTypes{TINT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to32F, TINT32},
1061 twoTypes{TINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to32F, TINT64},
1062
1063 twoTypes{TINT8, TFLOAT64}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to64F, TINT32},
1064 twoTypes{TINT16, TFLOAT64}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to64F, TINT32},
1065 twoTypes{TINT32, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to64F, TINT32},
1066 twoTypes{TINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to64F, TINT64},
1067
1068 twoTypes{TFLOAT32, TINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32},
1069 twoTypes{TFLOAT32, TINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32},
1070 twoTypes{TFLOAT32, TINT32}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpCopy, TINT32},
1071 twoTypes{TFLOAT32, TINT64}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpCopy, TINT64},
1072
1073 twoTypes{TFLOAT64, TINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32},
1074 twoTypes{TFLOAT64, TINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32},
1075 twoTypes{TFLOAT64, TINT32}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpCopy, TINT32},
1076 twoTypes{TFLOAT64, TINT64}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpCopy, TINT64},
1077 // unsigned
1078 twoTypes{TUINT8, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to32F, TINT32},
1079 twoTypes{TUINT16, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to32F, TINT32},
1080 twoTypes{TUINT32, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to32F, TINT64}, // go wide to dodge unsigned
1081 twoTypes{TUINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64}, // Cvt64Uto32F, branchy code expansion instead
1082
1083 twoTypes{TUINT8, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to64F, TINT32},
1084 twoTypes{TUINT16, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to64F, TINT32},
1085 twoTypes{TUINT32, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to64F, TINT64}, // go wide to dodge unsigned
1086 twoTypes{TUINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64}, // Cvt64Uto64F, branchy code expansion instead
1087
1088 twoTypes{TFLOAT32, TUINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32},
1089 twoTypes{TFLOAT32, TUINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32},
1090 twoTypes{TFLOAT32, TUINT32}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned
1091 twoTypes{TFLOAT32, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64}, // Cvt32Fto64U, branchy code expansion instead
1092
1093 twoTypes{TFLOAT64, TUINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32},
1094 twoTypes{TFLOAT64, TUINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32},
1095 twoTypes{TFLOAT64, TUINT32}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned
1096 twoTypes{TFLOAT64, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64}, // Cvt64Fto64U, branchy code expansion instead
1097
1098 // float
1099 twoTypes{TFLOAT64, TFLOAT32}: twoOpsAndType{ssa.OpCvt64Fto32F, ssa.OpCopy, TFLOAT32},
1100 twoTypes{TFLOAT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT64},
1101 twoTypes{TFLOAT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT32},
1102 twoTypes{TFLOAT32, TFLOAT64}: twoOpsAndType{ssa.OpCvt32Fto64F, ssa.OpCopy, TFLOAT64},
1103}
1104
Keith Randall4b803152015-07-29 17:07:09 -07001105var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
1106 opAndTwoTypes{OLSH, TINT8, TUINT8}: ssa.OpLsh8x8,
1107 opAndTwoTypes{OLSH, TUINT8, TUINT8}: ssa.OpLsh8x8,
1108 opAndTwoTypes{OLSH, TINT8, TUINT16}: ssa.OpLsh8x16,
1109 opAndTwoTypes{OLSH, TUINT8, TUINT16}: ssa.OpLsh8x16,
1110 opAndTwoTypes{OLSH, TINT8, TUINT32}: ssa.OpLsh8x32,
1111 opAndTwoTypes{OLSH, TUINT8, TUINT32}: ssa.OpLsh8x32,
1112 opAndTwoTypes{OLSH, TINT8, TUINT64}: ssa.OpLsh8x64,
1113 opAndTwoTypes{OLSH, TUINT8, TUINT64}: ssa.OpLsh8x64,
1114
1115 opAndTwoTypes{OLSH, TINT16, TUINT8}: ssa.OpLsh16x8,
1116 opAndTwoTypes{OLSH, TUINT16, TUINT8}: ssa.OpLsh16x8,
1117 opAndTwoTypes{OLSH, TINT16, TUINT16}: ssa.OpLsh16x16,
1118 opAndTwoTypes{OLSH, TUINT16, TUINT16}: ssa.OpLsh16x16,
1119 opAndTwoTypes{OLSH, TINT16, TUINT32}: ssa.OpLsh16x32,
1120 opAndTwoTypes{OLSH, TUINT16, TUINT32}: ssa.OpLsh16x32,
1121 opAndTwoTypes{OLSH, TINT16, TUINT64}: ssa.OpLsh16x64,
1122 opAndTwoTypes{OLSH, TUINT16, TUINT64}: ssa.OpLsh16x64,
1123
1124 opAndTwoTypes{OLSH, TINT32, TUINT8}: ssa.OpLsh32x8,
1125 opAndTwoTypes{OLSH, TUINT32, TUINT8}: ssa.OpLsh32x8,
1126 opAndTwoTypes{OLSH, TINT32, TUINT16}: ssa.OpLsh32x16,
1127 opAndTwoTypes{OLSH, TUINT32, TUINT16}: ssa.OpLsh32x16,
1128 opAndTwoTypes{OLSH, TINT32, TUINT32}: ssa.OpLsh32x32,
1129 opAndTwoTypes{OLSH, TUINT32, TUINT32}: ssa.OpLsh32x32,
1130 opAndTwoTypes{OLSH, TINT32, TUINT64}: ssa.OpLsh32x64,
1131 opAndTwoTypes{OLSH, TUINT32, TUINT64}: ssa.OpLsh32x64,
1132
1133 opAndTwoTypes{OLSH, TINT64, TUINT8}: ssa.OpLsh64x8,
1134 opAndTwoTypes{OLSH, TUINT64, TUINT8}: ssa.OpLsh64x8,
1135 opAndTwoTypes{OLSH, TINT64, TUINT16}: ssa.OpLsh64x16,
1136 opAndTwoTypes{OLSH, TUINT64, TUINT16}: ssa.OpLsh64x16,
1137 opAndTwoTypes{OLSH, TINT64, TUINT32}: ssa.OpLsh64x32,
1138 opAndTwoTypes{OLSH, TUINT64, TUINT32}: ssa.OpLsh64x32,
1139 opAndTwoTypes{OLSH, TINT64, TUINT64}: ssa.OpLsh64x64,
1140 opAndTwoTypes{OLSH, TUINT64, TUINT64}: ssa.OpLsh64x64,
1141
1142 opAndTwoTypes{ORSH, TINT8, TUINT8}: ssa.OpRsh8x8,
1143 opAndTwoTypes{ORSH, TUINT8, TUINT8}: ssa.OpRsh8Ux8,
1144 opAndTwoTypes{ORSH, TINT8, TUINT16}: ssa.OpRsh8x16,
1145 opAndTwoTypes{ORSH, TUINT8, TUINT16}: ssa.OpRsh8Ux16,
1146 opAndTwoTypes{ORSH, TINT8, TUINT32}: ssa.OpRsh8x32,
1147 opAndTwoTypes{ORSH, TUINT8, TUINT32}: ssa.OpRsh8Ux32,
1148 opAndTwoTypes{ORSH, TINT8, TUINT64}: ssa.OpRsh8x64,
1149 opAndTwoTypes{ORSH, TUINT8, TUINT64}: ssa.OpRsh8Ux64,
1150
1151 opAndTwoTypes{ORSH, TINT16, TUINT8}: ssa.OpRsh16x8,
1152 opAndTwoTypes{ORSH, TUINT16, TUINT8}: ssa.OpRsh16Ux8,
1153 opAndTwoTypes{ORSH, TINT16, TUINT16}: ssa.OpRsh16x16,
1154 opAndTwoTypes{ORSH, TUINT16, TUINT16}: ssa.OpRsh16Ux16,
1155 opAndTwoTypes{ORSH, TINT16, TUINT32}: ssa.OpRsh16x32,
1156 opAndTwoTypes{ORSH, TUINT16, TUINT32}: ssa.OpRsh16Ux32,
1157 opAndTwoTypes{ORSH, TINT16, TUINT64}: ssa.OpRsh16x64,
1158 opAndTwoTypes{ORSH, TUINT16, TUINT64}: ssa.OpRsh16Ux64,
1159
1160 opAndTwoTypes{ORSH, TINT32, TUINT8}: ssa.OpRsh32x8,
1161 opAndTwoTypes{ORSH, TUINT32, TUINT8}: ssa.OpRsh32Ux8,
1162 opAndTwoTypes{ORSH, TINT32, TUINT16}: ssa.OpRsh32x16,
1163 opAndTwoTypes{ORSH, TUINT32, TUINT16}: ssa.OpRsh32Ux16,
1164 opAndTwoTypes{ORSH, TINT32, TUINT32}: ssa.OpRsh32x32,
1165 opAndTwoTypes{ORSH, TUINT32, TUINT32}: ssa.OpRsh32Ux32,
1166 opAndTwoTypes{ORSH, TINT32, TUINT64}: ssa.OpRsh32x64,
1167 opAndTwoTypes{ORSH, TUINT32, TUINT64}: ssa.OpRsh32Ux64,
1168
1169 opAndTwoTypes{ORSH, TINT64, TUINT8}: ssa.OpRsh64x8,
1170 opAndTwoTypes{ORSH, TUINT64, TUINT8}: ssa.OpRsh64Ux8,
1171 opAndTwoTypes{ORSH, TINT64, TUINT16}: ssa.OpRsh64x16,
1172 opAndTwoTypes{ORSH, TUINT64, TUINT16}: ssa.OpRsh64Ux16,
1173 opAndTwoTypes{ORSH, TINT64, TUINT32}: ssa.OpRsh64x32,
1174 opAndTwoTypes{ORSH, TUINT64, TUINT32}: ssa.OpRsh64Ux32,
1175 opAndTwoTypes{ORSH, TINT64, TUINT64}: ssa.OpRsh64x64,
1176 opAndTwoTypes{ORSH, TUINT64, TUINT64}: ssa.OpRsh64Ux64,
1177}
1178
1179func (s *state) ssaShiftOp(op uint8, t *Type, u *Type) ssa.Op {
1180 etype1 := s.concreteEtype(t)
1181 etype2 := s.concreteEtype(u)
1182 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
1183 if !ok {
1184 s.Unimplementedf("unhandled shift op %s etype=%s/%s", opnames[op], Econv(int(etype1), 0), Econv(int(etype2), 0))
1185 }
1186 return x
1187}
1188
David Chase40aba8c2015-08-05 22:11:14 -04001189func (s *state) ssaRotateOp(op uint8, t *Type) ssa.Op {
1190 etype1 := s.concreteEtype(t)
1191 x, ok := opToSSA[opAndType{op, etype1}]
1192 if !ok {
1193 s.Unimplementedf("unhandled rotate op %s etype=%s", opnames[op], Econv(int(etype1), 0))
1194 }
1195 return x
1196}
1197
Keith Randalld2fd43a2015-04-15 15:51:25 -07001198// expr converts the expression n to ssa, adds it to s and returns the ssa result.
Keith Randallcfc2aa52015-05-18 16:44:20 -07001199func (s *state) expr(n *Node) *ssa.Value {
Michael Matloob81ccf502015-05-30 01:03:06 -04001200 s.pushLine(n.Lineno)
1201 defer s.popLine()
1202
Keith Randall06f32922015-07-11 11:39:12 -07001203 s.stmtList(n.Ninit)
Keith Randalld2fd43a2015-04-15 15:51:25 -07001204 switch n.Op {
Todd Nealdef7c652015-09-07 19:07:02 -05001205 case OCFUNC:
1206 aux := &ssa.ExternSymbol{n.Type, n.Left.Sym}
1207 return s.entryNewValue1A(ssa.OpAddr, n.Type, aux, s.sb)
Keith Randalld2fd43a2015-04-15 15:51:25 -07001208 case ONAME:
Keith Randall290d8fc2015-06-10 15:03:06 -07001209 if n.Class == PFUNC {
1210 // "value" of a function is the address of the function's closure
Keith Randall8c46aa52015-06-19 21:02:28 -07001211 sym := funcsym(n.Sym)
1212 aux := &ssa.ExternSymbol{n.Type, sym}
1213 return s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sb)
Keith Randall23df95b2015-05-12 15:16:52 -07001214 }
Keith Randall290d8fc2015-06-10 15:03:06 -07001215 if canSSA(n) {
Keith Randall8c46aa52015-06-19 21:02:28 -07001216 return s.variable(n, n.Type)
Keith Randall290d8fc2015-06-10 15:03:06 -07001217 }
1218 addr := s.addr(n)
Keith Randall8f22b522015-06-11 21:29:25 -07001219 return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
Keith Randalld2fd43a2015-04-15 15:51:25 -07001220 case OLITERAL:
Keith Randalle707fbe2015-06-11 10:20:39 -07001221 switch n.Val().Ctype() {
Keith Randalld2fd43a2015-04-15 15:51:25 -07001222 case CTINT:
Keith Randall9cb332e2015-07-28 14:19:20 -07001223 i := Mpgetfix(n.Val().U.(*Mpint))
1224 switch n.Type.Size() {
1225 case 1:
1226 return s.constInt8(n.Type, int8(i))
1227 case 2:
1228 return s.constInt16(n.Type, int16(i))
1229 case 4:
1230 return s.constInt32(n.Type, int32(i))
1231 case 8:
1232 return s.constInt64(n.Type, i)
1233 default:
1234 s.Fatalf("bad integer size %d", n.Type.Size())
1235 return nil
1236 }
1237 case CTSTR:
1238 return s.entryNewValue0A(ssa.OpConstString, n.Type, n.Val().U)
1239 case CTBOOL:
Josh Bleecher Snydercea44142015-09-08 16:52:25 -07001240 return s.constBool(n.Val().U.(bool))
Brad Fitzpatrick337b7e72015-07-13 17:30:42 -06001241 case CTNIL:
Keith Randall9f954db2015-08-18 10:26:28 -07001242 t := n.Type
1243 switch {
1244 case t.IsSlice():
1245 return s.entryNewValue0(ssa.OpConstSlice, t)
1246 case t.IsInterface():
1247 return s.entryNewValue0(ssa.OpConstInterface, t)
1248 default:
1249 return s.entryNewValue0(ssa.OpConstNil, t)
1250 }
David Chase997a9f32015-08-12 16:38:11 -04001251 case CTFLT:
1252 f := n.Val().U.(*Mpflt)
1253 switch n.Type.Size() {
1254 case 4:
Todd Nealadba6c42015-09-08 07:50:25 -04001255 // -0.0 literals need to be treated as if they were 0.0, adding 0.0 here
1256 // accomplishes this while not affecting other values.
1257 return s.constFloat32(n.Type, mpgetflt32(f)+0.0)
David Chase997a9f32015-08-12 16:38:11 -04001258 case 8:
Todd Nealadba6c42015-09-08 07:50:25 -04001259 return s.constFloat64(n.Type, mpgetflt(f)+0.0)
David Chase997a9f32015-08-12 16:38:11 -04001260 default:
1261 s.Fatalf("bad float size %d", n.Type.Size())
1262 return nil
1263 }
David Chase52578582015-08-28 14:24:10 -04001264 case CTCPLX:
1265 c := n.Val().U.(*Mpcplx)
1266 r := &c.Real
1267 i := &c.Imag
1268 switch n.Type.Size() {
1269 case 8:
1270 {
1271 pt := Types[TFLOAT32]
Todd Nealadba6c42015-09-08 07:50:25 -04001272 // -0.0 literals need to be treated as if they were 0.0, adding 0.0 here
1273 // accomplishes this while not affecting other values.
David Chase52578582015-08-28 14:24:10 -04001274 return s.newValue2(ssa.OpComplexMake, n.Type,
Todd Nealadba6c42015-09-08 07:50:25 -04001275 s.constFloat32(pt, mpgetflt32(r)+0.0),
1276 s.constFloat32(pt, mpgetflt32(i)+0.0))
David Chase52578582015-08-28 14:24:10 -04001277 }
1278 case 16:
1279 {
1280 pt := Types[TFLOAT64]
1281 return s.newValue2(ssa.OpComplexMake, n.Type,
Todd Nealadba6c42015-09-08 07:50:25 -04001282 s.constFloat64(pt, mpgetflt(r)+0.0),
1283 s.constFloat64(pt, mpgetflt(i)+0.0))
David Chase52578582015-08-28 14:24:10 -04001284 }
1285 default:
1286 s.Fatalf("bad float size %d", n.Type.Size())
1287 return nil
1288 }
David Chase997a9f32015-08-12 16:38:11 -04001289
Keith Randalld2fd43a2015-04-15 15:51:25 -07001290 default:
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07001291 s.Unimplementedf("unhandled OLITERAL %v", n.Val().Ctype())
Keith Randalld2fd43a2015-04-15 15:51:25 -07001292 return nil
1293 }
Keith Randall0ad9c8c2015-06-12 16:24:33 -07001294 case OCONVNOP:
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001295 to := n.Type
1296 from := n.Left.Type
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001297
1298 // Assume everything will work out, so set up our return value.
1299 // Anything interesting that happens from here is a fatal.
Keith Randall0ad9c8c2015-06-12 16:24:33 -07001300 x := s.expr(n.Left)
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001301 v := s.newValue1(ssa.OpCopy, to, x) // ensure that v has the right type
1302
Todd Nealdef7c652015-09-07 19:07:02 -05001303 // CONVNOP closure
1304 if to.Etype == TFUNC && from.IsPtr() {
1305 return v
1306 }
1307
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001308 // named <--> unnamed type or typed <--> untyped const
1309 if from.Etype == to.Etype {
1310 return v
1311 }
1312 // unsafe.Pointer <--> *T
1313 if to.Etype == TUNSAFEPTR && from.IsPtr() || from.Etype == TUNSAFEPTR && to.IsPtr() {
1314 return v
1315 }
1316
1317 dowidth(from)
1318 dowidth(to)
1319 if from.Width != to.Width {
1320 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Width, to, to.Width)
1321 return nil
1322 }
1323 if etypesign(from.Etype) != etypesign(to.Etype) {
1324 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, Econv(int(from.Etype), 0), to, Econv(int(to.Etype), 0))
1325 return nil
1326 }
1327
1328 if flag_race != 0 {
1329 s.Unimplementedf("questionable CONVNOP from race detector %v -> %v\n", from, to)
1330 return nil
1331 }
1332
1333 if etypesign(from.Etype) == 0 {
1334 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
1335 return nil
1336 }
1337
1338 // integer, same width, same sign
1339 return v
1340
Michael Matloob73054f52015-06-14 11:38:46 -07001341 case OCONV:
1342 x := s.expr(n.Left)
Keith Randall2a5e6c42015-07-23 14:35:02 -07001343 ft := n.Left.Type // from type
1344 tt := n.Type // to type
1345 if ft.IsInteger() && tt.IsInteger() {
1346 var op ssa.Op
1347 if tt.Size() == ft.Size() {
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001348 op = ssa.OpCopy
Keith Randall2a5e6c42015-07-23 14:35:02 -07001349 } else if tt.Size() < ft.Size() {
1350 // truncation
1351 switch 10*ft.Size() + tt.Size() {
1352 case 21:
1353 op = ssa.OpTrunc16to8
1354 case 41:
1355 op = ssa.OpTrunc32to8
1356 case 42:
1357 op = ssa.OpTrunc32to16
1358 case 81:
1359 op = ssa.OpTrunc64to8
1360 case 82:
1361 op = ssa.OpTrunc64to16
1362 case 84:
1363 op = ssa.OpTrunc64to32
1364 default:
1365 s.Fatalf("weird integer truncation %s -> %s", ft, tt)
1366 }
1367 } else if ft.IsSigned() {
1368 // sign extension
1369 switch 10*ft.Size() + tt.Size() {
1370 case 12:
1371 op = ssa.OpSignExt8to16
1372 case 14:
1373 op = ssa.OpSignExt8to32
1374 case 18:
1375 op = ssa.OpSignExt8to64
1376 case 24:
1377 op = ssa.OpSignExt16to32
1378 case 28:
1379 op = ssa.OpSignExt16to64
1380 case 48:
1381 op = ssa.OpSignExt32to64
1382 default:
1383 s.Fatalf("bad integer sign extension %s -> %s", ft, tt)
1384 }
1385 } else {
1386 // zero extension
1387 switch 10*ft.Size() + tt.Size() {
1388 case 12:
1389 op = ssa.OpZeroExt8to16
1390 case 14:
1391 op = ssa.OpZeroExt8to32
1392 case 18:
1393 op = ssa.OpZeroExt8to64
1394 case 24:
1395 op = ssa.OpZeroExt16to32
1396 case 28:
1397 op = ssa.OpZeroExt16to64
1398 case 48:
1399 op = ssa.OpZeroExt32to64
1400 default:
1401 s.Fatalf("weird integer sign extension %s -> %s", ft, tt)
1402 }
1403 }
1404 return s.newValue1(op, n.Type, x)
1405 }
David Chase42825882015-08-20 15:14:20 -04001406
David Chased052bbd2015-09-01 17:09:00 -04001407 if ft.IsFloat() || tt.IsFloat() {
1408 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
1409 if !ok {
1410 s.Fatalf("weird float conversion %s -> %s", ft, tt)
David Chase42825882015-08-20 15:14:20 -04001411 }
David Chased052bbd2015-09-01 17:09:00 -04001412 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
1413
1414 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
1415 // normal case, not tripping over unsigned 64
1416 if op1 == ssa.OpCopy {
1417 if op2 == ssa.OpCopy {
1418 return x
1419 }
1420 return s.newValue1(op2, n.Type, x)
1421 }
1422 if op2 == ssa.OpCopy {
1423 return s.newValue1(op1, n.Type, x)
1424 }
1425 return s.newValue1(op2, n.Type, s.newValue1(op1, Types[it], x))
1426 }
1427 // Tricky 64-bit unsigned cases.
1428 if ft.IsInteger() {
1429 // therefore tt is float32 or float64, and ft is also unsigned
David Chase42825882015-08-20 15:14:20 -04001430 if tt.Size() == 4 {
1431 return s.uint64Tofloat32(n, x, ft, tt)
1432 }
1433 if tt.Size() == 8 {
1434 return s.uint64Tofloat64(n, x, ft, tt)
1435 }
David Chased052bbd2015-09-01 17:09:00 -04001436 s.Fatalf("weird unsigned integer to float conversion %s -> %s", ft, tt)
David Chase42825882015-08-20 15:14:20 -04001437 }
David Chased052bbd2015-09-01 17:09:00 -04001438 // therefore ft is float32 or float64, and tt is unsigned integer
David Chase73151062015-08-26 14:25:40 -04001439 if ft.Size() == 4 {
David Chased052bbd2015-09-01 17:09:00 -04001440 return s.float32ToUint64(n, x, ft, tt)
David Chase73151062015-08-26 14:25:40 -04001441 }
David Chased052bbd2015-09-01 17:09:00 -04001442 if ft.Size() == 8 {
1443 return s.float64ToUint64(n, x, ft, tt)
David Chase73151062015-08-26 14:25:40 -04001444 }
David Chased052bbd2015-09-01 17:09:00 -04001445 s.Fatalf("weird float to unsigned integer conversion %s -> %s", ft, tt)
1446 return nil
David Chase42825882015-08-20 15:14:20 -04001447 }
David Chase3a9d0ac2015-08-28 14:24:10 -04001448
1449 if ft.IsComplex() && tt.IsComplex() {
1450 var op ssa.Op
1451 if ft.Size() == tt.Size() {
1452 op = ssa.OpCopy
1453 } else if ft.Size() == 8 && tt.Size() == 16 {
1454 op = ssa.OpCvt32Fto64F
1455 } else if ft.Size() == 16 && tt.Size() == 8 {
1456 op = ssa.OpCvt64Fto32F
1457 } else {
1458 s.Fatalf("weird complex conversion %s -> %s", ft, tt)
1459 }
1460 ftp := floatForComplex(ft)
1461 ttp := floatForComplex(tt)
1462 return s.newValue2(ssa.OpComplexMake, tt,
1463 s.newValue1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, x)),
1464 s.newValue1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, x)))
1465 }
David Chase42825882015-08-20 15:14:20 -04001466
Josh Bleecher Snyder58446032015-08-23 20:29:43 -07001467 s.Unimplementedf("unhandled OCONV %s -> %s", Econv(int(n.Left.Type.Etype), 0), Econv(int(n.Type.Etype), 0))
Keith Randall2a5e6c42015-07-23 14:35:02 -07001468 return nil
Keith Randallcfc2aa52015-05-18 16:44:20 -07001469
Josh Bleecher Snyder46815b92015-06-24 17:48:22 -07001470 // binary ops
1471 case OLT, OEQ, ONE, OLE, OGE, OGT:
Keith Randalld2fd43a2015-04-15 15:51:25 -07001472 a := s.expr(n.Left)
1473 b := s.expr(n.Right)
Keith Randalldb380bf2015-09-10 11:05:42 -07001474 if n.Left.Type.IsComplex() {
Keith Randallc244ce02015-09-10 14:59:00 -07001475 pt := floatForComplex(n.Left.Type)
Keith Randalldb380bf2015-09-10 11:05:42 -07001476 op := s.ssaOp(OEQ, pt)
1477 r := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
1478 i := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
1479 c := s.newValue2(ssa.OpAnd8, Types[TBOOL], r, i)
1480 switch n.Op {
1481 case OEQ:
1482 return c
1483 case ONE:
1484 return s.newValue1(ssa.OpNot, Types[TBOOL], c)
1485 default:
1486 s.Fatalf("ordered complex compare %s", opnames[n.Op])
1487 }
1488
1489 }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001490 return s.newValue2(s.ssaOp(n.Op, n.Left.Type), Types[TBOOL], a, b)
David Chase3a9d0ac2015-08-28 14:24:10 -04001491 case OMUL:
1492 a := s.expr(n.Left)
1493 b := s.expr(n.Right)
1494 if n.Type.IsComplex() {
1495 mulop := ssa.OpMul64F
1496 addop := ssa.OpAdd64F
1497 subop := ssa.OpSub64F
1498 pt := floatForComplex(n.Type) // Could be Float32 or Float64
1499 wt := Types[TFLOAT64] // Compute in Float64 to minimize cancellation error
1500
1501 areal := s.newValue1(ssa.OpComplexReal, pt, a)
1502 breal := s.newValue1(ssa.OpComplexReal, pt, b)
1503 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
1504 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
1505
1506 if pt != wt { // Widen for calculation
1507 areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal)
1508 breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal)
1509 aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag)
1510 bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag)
1511 }
1512
1513 xreal := s.newValue2(subop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag))
1514 ximag := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, bimag), s.newValue2(mulop, wt, aimag, breal))
1515
1516 if pt != wt { // Narrow to store back
1517 xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal)
1518 ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag)
1519 }
1520
1521 return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag)
1522 }
1523 return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
1524
1525 case ODIV:
1526 a := s.expr(n.Left)
1527 b := s.expr(n.Right)
1528 if n.Type.IsComplex() {
1529 // TODO this is not executed because the front-end substitutes a runtime call.
1530 // That probably ought to change; with modest optimization the widen/narrow
1531 // conversions could all be elided in larger expression trees.
1532 mulop := ssa.OpMul64F
1533 addop := ssa.OpAdd64F
1534 subop := ssa.OpSub64F
1535 divop := ssa.OpDiv64F
1536 pt := floatForComplex(n.Type) // Could be Float32 or Float64
1537 wt := Types[TFLOAT64] // Compute in Float64 to minimize cancellation error
1538
1539 areal := s.newValue1(ssa.OpComplexReal, pt, a)
1540 breal := s.newValue1(ssa.OpComplexReal, pt, b)
1541 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
1542 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
1543
1544 if pt != wt { // Widen for calculation
1545 areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal)
1546 breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal)
1547 aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag)
1548 bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag)
1549 }
1550
1551 denom := s.newValue2(addop, wt, s.newValue2(mulop, wt, breal, breal), s.newValue2(mulop, wt, bimag, bimag))
1552 xreal := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag))
1553 ximag := s.newValue2(subop, wt, s.newValue2(mulop, wt, aimag, breal), s.newValue2(mulop, wt, areal, bimag))
1554
1555 // TODO not sure if this is best done in wide precision or narrow
1556 // Double-rounding might be an issue.
1557 // Note that the pre-SSA implementation does the entire calculation
1558 // in wide format, so wide is compatible.
1559 xreal = s.newValue2(divop, wt, xreal, denom)
1560 ximag = s.newValue2(divop, wt, ximag, denom)
1561
1562 if pt != wt { // Narrow to store back
1563 xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal)
1564 ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag)
1565 }
1566
1567 return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag)
1568 }
1569 return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
1570 case OADD, OSUB:
1571 a := s.expr(n.Left)
1572 b := s.expr(n.Right)
1573 if n.Type.IsComplex() {
1574 pt := floatForComplex(n.Type)
1575 op := s.ssaOp(n.Op, pt)
1576 return s.newValue2(ssa.OpComplexMake, n.Type,
1577 s.newValue2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
1578 s.newValue2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
1579 }
1580 return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
1581 case OAND, OOR, OMOD, OHMUL, OXOR:
Keith Randalld2fd43a2015-04-15 15:51:25 -07001582 a := s.expr(n.Left)
1583 b := s.expr(n.Right)
Keith Randall67fdb0d2015-07-19 15:48:20 -07001584 return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
Keith Randall4b803152015-07-29 17:07:09 -07001585 case OLSH, ORSH:
1586 a := s.expr(n.Left)
1587 b := s.expr(n.Right)
1588 return s.newValue2(s.ssaShiftOp(n.Op, n.Type, n.Right.Type), a.Type, a, b)
David Chase40aba8c2015-08-05 22:11:14 -04001589 case OLROT:
1590 a := s.expr(n.Left)
1591 i := n.Right.Int()
1592 if i <= 0 || i >= n.Type.Size()*8 {
1593 s.Fatalf("Wrong rotate distance for LROT, expected 1 through %d, saw %d", n.Type.Size()*8-1, i)
1594 }
1595 return s.newValue1I(s.ssaRotateOp(n.Op, n.Type), a.Type, i, a)
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001596 case OANDAND, OOROR:
1597 // To implement OANDAND (and OOROR), we introduce a
1598 // new temporary variable to hold the result. The
1599 // variable is associated with the OANDAND node in the
1600 // s.vars table (normally variables are only
1601 // associated with ONAME nodes). We convert
1602 // A && B
1603 // to
1604 // var = A
1605 // if var {
1606 // var = B
1607 // }
1608 // Using var in the subsequent block introduces the
1609 // necessary phi variable.
1610 el := s.expr(n.Left)
1611 s.vars[n] = el
1612
1613 b := s.endBlock()
1614 b.Kind = ssa.BlockIf
1615 b.Control = el
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07001616 // In theory, we should set b.Likely here based on context.
1617 // However, gc only gives us likeliness hints
1618 // in a single place, for plain OIF statements,
1619 // and passing around context is finnicky, so don't bother for now.
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001620
1621 bRight := s.f.NewBlock(ssa.BlockPlain)
1622 bResult := s.f.NewBlock(ssa.BlockPlain)
1623 if n.Op == OANDAND {
Todd Neal47d67992015-08-28 21:36:29 -05001624 b.AddEdgeTo(bRight)
1625 b.AddEdgeTo(bResult)
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001626 } else if n.Op == OOROR {
Todd Neal47d67992015-08-28 21:36:29 -05001627 b.AddEdgeTo(bResult)
1628 b.AddEdgeTo(bRight)
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001629 }
1630
1631 s.startBlock(bRight)
1632 er := s.expr(n.Right)
1633 s.vars[n] = er
1634
1635 b = s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05001636 b.AddEdgeTo(bResult)
Brad Fitzpatricke8167112015-07-10 12:58:53 -06001637
1638 s.startBlock(bResult)
Josh Bleecher Snyder35ad1fc2015-08-27 10:11:08 -07001639 return s.variable(n, Types[TBOOL])
Keith Randall7e390722015-09-12 14:14:02 -07001640 case OCOMPLEX:
1641 r := s.expr(n.Left)
1642 i := s.expr(n.Right)
1643 return s.newValue2(ssa.OpComplexMake, n.Type, r, i)
Keith Randalld2fd43a2015-04-15 15:51:25 -07001644
Josh Bleecher Snyder4178f202015-09-05 19:28:00 -07001645 // unary ops
David Chase3a9d0ac2015-08-28 14:24:10 -04001646 case OMINUS:
1647 a := s.expr(n.Left)
1648 if n.Type.IsComplex() {
1649 tp := floatForComplex(n.Type)
1650 negop := s.ssaOp(n.Op, tp)
1651 return s.newValue2(ssa.OpComplexMake, n.Type,
1652 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
1653 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
1654 }
1655 return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a)
Keith Randalla329e212015-09-12 13:26:57 -07001656 case ONOT, OCOM, OSQRT:
Brad Fitzpatrickd9c72d72015-07-10 11:25:48 -06001657 a := s.expr(n.Left)
Alexandru Moșoi954d5ad2015-07-21 16:58:18 +02001658 return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a)
Keith Randall2f518072015-09-10 11:37:09 -07001659 case OIMAG, OREAL:
1660 a := s.expr(n.Left)
1661 return s.newValue1(s.ssaOp(n.Op, n.Left.Type), n.Type, a)
Josh Bleecher Snyder4178f202015-09-05 19:28:00 -07001662 case OPLUS:
1663 return s.expr(n.Left)
Brad Fitzpatrickd9c72d72015-07-10 11:25:48 -06001664
Keith Randallcfc2aa52015-05-18 16:44:20 -07001665 case OADDR:
1666 return s.addr(n.Left)
1667
Josh Bleecher Snyder25d19162015-07-28 12:37:46 -07001668 case OINDREG:
1669 if int(n.Reg) != Thearch.REGSP {
1670 s.Unimplementedf("OINDREG of non-SP register %s in expr: %v", obj.Rconv(int(n.Reg)), n)
1671 return nil
1672 }
1673 addr := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(n.Type), n.Xoffset, s.sp)
1674 return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
1675
Keith Randalld2fd43a2015-04-15 15:51:25 -07001676 case OIND:
1677 p := s.expr(n.Left)
Keith Randallcfc2aa52015-05-18 16:44:20 -07001678 s.nilCheck(p)
Keith Randall8f22b522015-06-11 21:29:25 -07001679 return s.newValue2(ssa.OpLoad, n.Type, p, s.mem())
Keith Randallcfc2aa52015-05-18 16:44:20 -07001680
Keith Randallcd7e0592015-07-15 21:33:49 -07001681 case ODOT:
1682 v := s.expr(n.Left)
1683 return s.newValue1I(ssa.OpStructSelect, n.Type, n.Xoffset, v)
1684
Keith Randalld2fd43a2015-04-15 15:51:25 -07001685 case ODOTPTR:
1686 p := s.expr(n.Left)
Keith Randallcfc2aa52015-05-18 16:44:20 -07001687 s.nilCheck(p)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001688 p = s.newValue2(ssa.OpAddPtr, p.Type, p, s.constIntPtr(Types[TUINTPTR], n.Xoffset))
Keith Randall8f22b522015-06-11 21:29:25 -07001689 return s.newValue2(ssa.OpLoad, n.Type, p, s.mem())
Keith Randalld2fd43a2015-04-15 15:51:25 -07001690
1691 case OINDEX:
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07001692 if n.Left.Type.Bound >= 0 { // array or string
Keith Randallcfc2aa52015-05-18 16:44:20 -07001693 a := s.expr(n.Left)
1694 i := s.expr(n.Right)
Keith Randall2a5e6c42015-07-23 14:35:02 -07001695 i = s.extendIndex(i)
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07001696 var elemtype *Type
1697 var len *ssa.Value
1698 if n.Left.Type.IsString() {
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001699 len = s.newValue1(ssa.OpStringLen, Types[TINT], a)
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07001700 elemtype = Types[TUINT8]
1701 } else {
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001702 len = s.constInt(Types[TINT], n.Left.Type.Bound)
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07001703 elemtype = n.Left.Type.Type
1704 }
Keith Randall46e62f82015-08-18 14:17:30 -07001705 if !n.Bounded {
1706 s.boundsCheck(i, len)
1707 }
Keith Randall8f22b522015-06-11 21:29:25 -07001708 return s.newValue2(ssa.OpArrayIndex, elemtype, a, i)
Keith Randallcfc2aa52015-05-18 16:44:20 -07001709 } else { // slice
1710 p := s.addr(n)
Keith Randall8f22b522015-06-11 21:29:25 -07001711 return s.newValue2(ssa.OpLoad, n.Left.Type.Type, p, s.mem())
Keith Randallcfc2aa52015-05-18 16:44:20 -07001712 }
Keith Randalld2fd43a2015-04-15 15:51:25 -07001713
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06001714 case OLEN, OCAP:
Josh Bleecher Snydercc3f0312015-07-03 18:41:28 -07001715 switch {
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06001716 case n.Left.Type.IsSlice():
1717 op := ssa.OpSliceLen
1718 if n.Op == OCAP {
1719 op = ssa.OpSliceCap
1720 }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001721 return s.newValue1(op, Types[TINT], s.expr(n.Left))
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06001722 case n.Left.Type.IsString(): // string; not reachable for OCAP
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001723 return s.newValue1(ssa.OpStringLen, Types[TINT], s.expr(n.Left))
Todd Neal707af252015-08-28 15:56:43 -05001724 case n.Left.Type.IsMap(), n.Left.Type.IsChan():
1725 return s.referenceTypeBuiltin(n, s.expr(n.Left))
Josh Bleecher Snydercc3f0312015-07-03 18:41:28 -07001726 default: // array
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001727 return s.constInt(Types[TINT], n.Left.Type.Bound)
Josh Bleecher Snydercc3f0312015-07-03 18:41:28 -07001728 }
1729
Josh Bleecher Snydera2d15802015-08-12 10:12:14 -07001730 case OSPTR:
1731 a := s.expr(n.Left)
1732 if n.Left.Type.IsSlice() {
1733 return s.newValue1(ssa.OpSlicePtr, n.Type, a)
1734 } else {
1735 return s.newValue1(ssa.OpStringPtr, n.Type, a)
1736 }
1737
Keith Randalld1c15a02015-08-04 15:47:22 -07001738 case OITAB:
1739 a := s.expr(n.Left)
1740 return s.newValue1(ssa.OpITab, n.Type, a)
1741
Josh Bleecher Snyder1792b362015-09-05 19:28:27 -07001742 case OEFACE:
1743 tab := s.expr(n.Left)
1744 data := s.expr(n.Right)
1745 return s.newValue2(ssa.OpIMake, n.Type, tab, data)
1746
Keith Randall3526cf52015-08-24 23:52:03 -07001747 case OSLICESTR:
1748 // Evaluate the string once.
1749 str := s.expr(n.Left)
1750 ptr := s.newValue1(ssa.OpStringPtr, Ptrto(Types[TUINT8]), str)
1751 len := s.newValue1(ssa.OpStringLen, Types[TINT], str)
1752 zero := s.constInt(Types[TINT], 0)
1753
1754 // Evaluate the slice indexes.
1755 var low, high *ssa.Value
1756 if n.Right.Left == nil {
1757 low = zero
1758 } else {
Alexandru Moșoic684d4d2015-09-08 18:18:59 +02001759 low = s.extendIndex(s.expr(n.Right.Left))
Keith Randall3526cf52015-08-24 23:52:03 -07001760 }
1761 if n.Right.Right == nil {
1762 high = len
1763 } else {
Alexandru Moșoic684d4d2015-09-08 18:18:59 +02001764 high = s.extendIndex(s.expr(n.Right.Right))
Keith Randall3526cf52015-08-24 23:52:03 -07001765 }
1766
1767 // Panic if slice indices are not in bounds.
1768 s.sliceBoundsCheck(low, high)
1769 s.sliceBoundsCheck(high, len)
1770
1771 // Generate the following code assuming that indexes are in bounds.
1772 // The conditional is to make sure that we don't generate a string
1773 // that points to the next object in memory.
1774 // rlen = (SubPtr high low)
1775 // p = ptr
1776 // if rlen != 0 {
1777 // p = (AddPtr ptr low)
1778 // }
1779 // result = (StringMake p size)
1780 rlen := s.newValue2(ssa.OpSubPtr, Types[TINT], high, low)
1781
1782 // Use n as the "variable" for p.
1783 s.vars[n] = ptr
1784
1785 // Generate code to test the resulting slice length.
1786 var cmp *ssa.Value
1787 if s.config.IntSize == 8 {
1788 cmp = s.newValue2(ssa.OpNeq64, Types[TBOOL], rlen, zero)
1789 } else {
1790 cmp = s.newValue2(ssa.OpNeq32, Types[TBOOL], rlen, zero)
1791 }
1792
1793 b := s.endBlock()
1794 b.Kind = ssa.BlockIf
1795 b.Likely = ssa.BranchLikely
1796 b.Control = cmp
1797
1798 // Generate code for non-zero length slice case.
1799 nz := s.f.NewBlock(ssa.BlockPlain)
Todd Neal47d67992015-08-28 21:36:29 -05001800 b.AddEdgeTo(nz)
Keith Randall3526cf52015-08-24 23:52:03 -07001801 s.startBlock(nz)
1802 s.vars[n] = s.newValue2(ssa.OpAddPtr, Ptrto(Types[TUINT8]), ptr, low)
1803 s.endBlock()
1804
1805 // All done.
1806 merge := s.f.NewBlock(ssa.BlockPlain)
Todd Neal47d67992015-08-28 21:36:29 -05001807 b.AddEdgeTo(merge)
1808 nz.AddEdgeTo(merge)
Keith Randall3526cf52015-08-24 23:52:03 -07001809 s.startBlock(merge)
1810 return s.newValue2(ssa.OpStringMake, Types[TSTRING], s.variable(n, Ptrto(Types[TUINT8])), rlen)
1811
Josh Bleecher Snyder15dcdfb2015-07-21 07:37:47 -07001812 case OCALLFUNC, OCALLMETH:
1813 left := n.Left
1814 static := left.Op == ONAME && left.Class == PFUNC
1815
1816 if n.Op == OCALLMETH {
1817 // Rewrite to an OCALLFUNC: (p.f)(...) becomes (f)(p, ...)
1818 // Take care not to modify the original AST.
1819 if left.Op != ODOTMETH {
Keith Randall0ec72b62015-09-08 15:42:53 -07001820 Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", left)
Josh Bleecher Snyder15dcdfb2015-07-21 07:37:47 -07001821 }
1822
1823 newLeft := *left.Right
1824 newLeft.Type = left.Type
1825 if newLeft.Op == ONAME {
1826 newLeft.Class = PFUNC
1827 }
1828 left = &newLeft
1829 static = true
1830 }
Keith Randall290d8fc2015-06-10 15:03:06 -07001831
1832 // evaluate closure
1833 var closure *ssa.Value
1834 if !static {
Josh Bleecher Snyder15dcdfb2015-07-21 07:37:47 -07001835 closure = s.expr(left)
Keith Randall290d8fc2015-06-10 15:03:06 -07001836 }
1837
Keith Randalld2fd43a2015-04-15 15:51:25 -07001838 // run all argument assignments
Keith Randalld2fd43a2015-04-15 15:51:25 -07001839 s.stmtList(n.List)
1840
Keith Randalld2fd43a2015-04-15 15:51:25 -07001841 bNext := s.f.NewBlock(ssa.BlockPlain)
Keith Randall290d8fc2015-06-10 15:03:06 -07001842 var call *ssa.Value
1843 if static {
Josh Bleecher Snyder15dcdfb2015-07-21 07:37:47 -07001844 call = s.newValue1A(ssa.OpStaticCall, ssa.TypeMem, left.Sym, s.mem())
Keith Randall290d8fc2015-06-10 15:03:06 -07001845 } else {
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07001846 entry := s.newValue2(ssa.OpLoad, Types[TUINTPTR], closure, s.mem())
Keith Randall8f22b522015-06-11 21:29:25 -07001847 call = s.newValue3(ssa.OpClosureCall, ssa.TypeMem, entry, closure, s.mem())
Keith Randall290d8fc2015-06-10 15:03:06 -07001848 }
Josh Bleecher Snyder15dcdfb2015-07-21 07:37:47 -07001849 dowidth(left.Type)
1850 call.AuxInt = left.Type.Argwid // call operations carry the argsize of the callee along with them
Keith Randalld56d2fa2015-08-12 12:54:47 -07001851 s.vars[&memvar] = call
Keith Randalld2fd43a2015-04-15 15:51:25 -07001852 b := s.endBlock()
1853 b.Kind = ssa.BlockCall
1854 b.Control = call
Todd Neal47d67992015-08-28 21:36:29 -05001855 b.AddEdgeTo(bNext)
Keith Randalld2fd43a2015-04-15 15:51:25 -07001856
1857 // read result from stack at the start of the fallthrough block
1858 s.startBlock(bNext)
1859 var titer Iter
Josh Bleecher Snyder15dcdfb2015-07-21 07:37:47 -07001860 fp := Structfirst(&titer, Getoutarg(left.Type))
Michael Matloob2aabacd2015-06-16 17:58:03 -07001861 if fp == nil {
1862 // CALLFUNC has no return value. Continue with the next statement.
1863 return nil
1864 }
Keith Randall8f22b522015-06-11 21:29:25 -07001865 a := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(fp.Type), fp.Width, s.sp)
1866 return s.newValue2(ssa.OpLoad, fp.Type, a, call)
Josh Bleecher Snyder3d23afb2015-08-12 11:22:16 -07001867
1868 case OGETG:
1869 return s.newValue0(ssa.OpGetG, n.Type)
1870
Keith Randalld2fd43a2015-04-15 15:51:25 -07001871 default:
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07001872 s.Unimplementedf("unhandled expr %s", opnames[n.Op])
Keith Randalld2fd43a2015-04-15 15:51:25 -07001873 return nil
1874 }
1875}
1876
Josh Bleecher Snyder07269312015-08-29 14:54:45 -07001877func (s *state) assign(left *Node, right *ssa.Value, wb bool) {
Keith Randalld4cc51d2015-08-14 21:47:20 -07001878 if left.Op == ONAME && isblank(left) {
Keith Randalld4cc51d2015-08-14 21:47:20 -07001879 return
1880 }
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01001881 // TODO: do write barrier
Josh Bleecher Snyder07269312015-08-29 14:54:45 -07001882 // if wb
Keith Randalld4cc51d2015-08-14 21:47:20 -07001883 t := left.Type
1884 dowidth(t)
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01001885 if right == nil {
1886 // right == nil means use the zero value of the assigned type.
Daniel Morsing66b47812015-06-27 15:45:20 +01001887 if !canSSA(left) {
1888 // if we can't ssa this memory, treat it as just zeroing out the backing memory
1889 addr := s.addr(left)
Keith Randalld2107fc2015-08-24 02:16:19 -07001890 if left.Op == ONAME {
1891 s.vars[&memvar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, left, s.mem())
1892 }
Daniel Morsing66b47812015-06-27 15:45:20 +01001893 s.vars[&memvar] = s.newValue2I(ssa.OpZero, ssa.TypeMem, t.Size(), addr, s.mem())
1894 return
1895 }
Josh Bleecher Snyder07269312015-08-29 14:54:45 -07001896 right = s.zeroVal(t)
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01001897 }
1898 if left.Op == ONAME && canSSA(left) {
1899 // Update variable assignment.
Josh Bleecher Snyder07269312015-08-29 14:54:45 -07001900 s.vars[left] = right
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01001901 return
1902 }
1903 // not ssa-able. Treat as a store.
1904 addr := s.addr(left)
Keith Randalld2107fc2015-08-24 02:16:19 -07001905 if left.Op == ONAME {
1906 s.vars[&memvar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, left, s.mem())
1907 }
Josh Bleecher Snyder07269312015-08-29 14:54:45 -07001908 s.vars[&memvar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, t.Size(), addr, right, s.mem())
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01001909}
1910
Josh Bleecher Snyder21bd4832015-07-20 15:30:52 -07001911// zeroVal returns the zero value for type t.
1912func (s *state) zeroVal(t *Type) *ssa.Value {
1913 switch {
Keith Randall9cb332e2015-07-28 14:19:20 -07001914 case t.IsInteger():
1915 switch t.Size() {
1916 case 1:
1917 return s.constInt8(t, 0)
1918 case 2:
1919 return s.constInt16(t, 0)
1920 case 4:
1921 return s.constInt32(t, 0)
1922 case 8:
1923 return s.constInt64(t, 0)
1924 default:
1925 s.Fatalf("bad sized integer type %s", t)
1926 }
Todd Neal752fe4d2015-08-25 19:21:45 -05001927 case t.IsFloat():
1928 switch t.Size() {
1929 case 4:
1930 return s.constFloat32(t, 0)
1931 case 8:
1932 return s.constFloat64(t, 0)
1933 default:
1934 s.Fatalf("bad sized float type %s", t)
1935 }
David Chase52578582015-08-28 14:24:10 -04001936 case t.IsComplex():
1937 switch t.Size() {
1938 case 8:
1939 z := s.constFloat32(Types[TFLOAT32], 0)
Keith Randalla5cffb62015-08-28 13:52:26 -07001940 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
David Chase52578582015-08-28 14:24:10 -04001941 case 16:
1942 z := s.constFloat64(Types[TFLOAT64], 0)
Keith Randalla5cffb62015-08-28 13:52:26 -07001943 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
David Chase52578582015-08-28 14:24:10 -04001944 default:
1945 s.Fatalf("bad sized complex type %s", t)
1946 }
1947
Josh Bleecher Snyder21bd4832015-07-20 15:30:52 -07001948 case t.IsString():
Keith Randall9cb332e2015-07-28 14:19:20 -07001949 return s.entryNewValue0A(ssa.OpConstString, t, "")
1950 case t.IsPtr():
1951 return s.entryNewValue0(ssa.OpConstNil, t)
Josh Bleecher Snyder21bd4832015-07-20 15:30:52 -07001952 case t.IsBoolean():
Josh Bleecher Snydercea44142015-09-08 16:52:25 -07001953 return s.constBool(false)
Keith Randall9f954db2015-08-18 10:26:28 -07001954 case t.IsInterface():
1955 return s.entryNewValue0(ssa.OpConstInterface, t)
1956 case t.IsSlice():
1957 return s.entryNewValue0(ssa.OpConstSlice, t)
Josh Bleecher Snyder21bd4832015-07-20 15:30:52 -07001958 }
1959 s.Unimplementedf("zero for type %v not implemented", t)
1960 return nil
1961}
1962
Josh Bleecher Snyder95aff4d2015-07-28 14:31:25 -07001963// etypesign returns the signed-ness of e, for integer/pointer etypes.
1964// -1 means signed, +1 means unsigned, 0 means non-integer/non-pointer.
1965func etypesign(e uint8) int8 {
1966 switch e {
1967 case TINT8, TINT16, TINT32, TINT64, TINT:
1968 return -1
1969 case TUINT8, TUINT16, TUINT32, TUINT64, TUINT, TUINTPTR, TUNSAFEPTR:
1970 return +1
1971 }
1972 return 0
1973}
1974
Josh Bleecher Snydere00d6092015-06-02 09:16:22 -07001975// addr converts the address of the expression n to SSA, adds it to s and returns the SSA result.
Keith Randallc3c84a22015-07-13 15:55:37 -07001976// The value that the returned Value represents is guaranteed to be non-nil.
Keith Randallcfc2aa52015-05-18 16:44:20 -07001977func (s *state) addr(n *Node) *ssa.Value {
1978 switch n.Op {
1979 case ONAME:
Keith Randall290d8fc2015-06-10 15:03:06 -07001980 switch n.Class {
1981 case PEXTERN:
Keith Randallcfc2aa52015-05-18 16:44:20 -07001982 // global variable
Keith Randall8c46aa52015-06-19 21:02:28 -07001983 aux := &ssa.ExternSymbol{n.Type, n.Sym}
Josh Bleecher Snyder67df7932015-07-28 11:08:44 -07001984 v := s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sb)
1985 // TODO: Make OpAddr use AuxInt as well as Aux.
1986 if n.Xoffset != 0 {
1987 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, n.Xoffset, v)
1988 }
1989 return v
Keith Randalld2107fc2015-08-24 02:16:19 -07001990 case PPARAM, PPARAMOUT:
Keith Randall8c46aa52015-06-19 21:02:28 -07001991 // parameter/result slot or local variable
Josh Bleecher Snyder596ddf42015-06-29 11:56:28 -07001992 v := s.decladdrs[n]
1993 if v == nil {
Josh Bleecher Snyder0a133cdd2015-07-03 20:28:56 -07001994 if flag_race != 0 && n.String() == ".fp" {
1995 s.Unimplementedf("race detector mishandles nodfp")
1996 }
Josh Bleecher Snyder596ddf42015-06-29 11:56:28 -07001997 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
1998 }
1999 return v
Keith Randalld2107fc2015-08-24 02:16:19 -07002000 case PAUTO:
2001 // We need to regenerate the address of autos
2002 // at every use. This prevents LEA instructions
2003 // from occurring before the corresponding VarDef
2004 // op and confusing the liveness analysis into thinking
2005 // the variable is live at function entry.
2006 // TODO: I'm not sure if this really works or we're just
2007 // getting lucky. We might need a real dependency edge
2008 // between vardef and addr ops.
2009 aux := &ssa.AutoSymbol{Typ: n.Type, Node: n}
2010 return s.newValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp)
Josh Bleecher Snyder96548732015-08-28 13:35:32 -07002011 case PAUTO | PHEAP, PPARAMREF:
Daniel Morsingc31b6dd2015-06-12 14:23:29 +01002012 return s.expr(n.Name.Heapaddr)
Keith Randall290d8fc2015-06-10 15:03:06 -07002013 default:
Josh Bleecher Snyder58446032015-08-23 20:29:43 -07002014 s.Unimplementedf("variable address class %v not implemented", n.Class)
Keith Randall290d8fc2015-06-10 15:03:06 -07002015 return nil
Keith Randallcfc2aa52015-05-18 16:44:20 -07002016 }
Keith Randallcfc2aa52015-05-18 16:44:20 -07002017 case OINDREG:
Josh Bleecher Snyder25d19162015-07-28 12:37:46 -07002018 // indirect off a register
Keith Randallcfc2aa52015-05-18 16:44:20 -07002019 // used for storing/loading arguments/returns to/from callees
Josh Bleecher Snyder25d19162015-07-28 12:37:46 -07002020 if int(n.Reg) != Thearch.REGSP {
2021 s.Unimplementedf("OINDREG of non-SP register %s in addr: %v", obj.Rconv(int(n.Reg)), n)
2022 return nil
2023 }
Keith Randall8f22b522015-06-11 21:29:25 -07002024 return s.entryNewValue1I(ssa.OpOffPtr, Ptrto(n.Type), n.Xoffset, s.sp)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002025 case OINDEX:
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06002026 if n.Left.Type.IsSlice() {
Keith Randallcfc2aa52015-05-18 16:44:20 -07002027 a := s.expr(n.Left)
2028 i := s.expr(n.Right)
Keith Randall2a5e6c42015-07-23 14:35:02 -07002029 i = s.extendIndex(i)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002030 len := s.newValue1(ssa.OpSliceLen, Types[TUINTPTR], a)
Keith Randall46e62f82015-08-18 14:17:30 -07002031 if !n.Bounded {
2032 s.boundsCheck(i, len)
2033 }
Keith Randall8f22b522015-06-11 21:29:25 -07002034 p := s.newValue1(ssa.OpSlicePtr, Ptrto(n.Left.Type.Type), a)
2035 return s.newValue2(ssa.OpPtrIndex, Ptrto(n.Left.Type.Type), p, i)
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06002036 } else { // array
2037 a := s.addr(n.Left)
2038 i := s.expr(n.Right)
Keith Randall2a5e6c42015-07-23 14:35:02 -07002039 i = s.extendIndex(i)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002040 len := s.constInt(Types[TINT], n.Left.Type.Bound)
Keith Randall46e62f82015-08-18 14:17:30 -07002041 if !n.Bounded {
2042 s.boundsCheck(i, len)
2043 }
Brad Fitzpatrick7af53d92015-07-10 10:47:28 -06002044 return s.newValue2(ssa.OpPtrIndex, Ptrto(n.Left.Type.Type), a, i)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002045 }
Todd Nealb383de22015-07-13 21:22:16 -05002046 case OIND:
2047 p := s.expr(n.Left)
2048 s.nilCheck(p)
2049 return p
Keith Randallc3c84a22015-07-13 15:55:37 -07002050 case ODOT:
2051 p := s.addr(n.Left)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002052 return s.newValue2(ssa.OpAddPtr, p.Type, p, s.constIntPtr(Types[TUINTPTR], n.Xoffset))
Keith Randallc3c84a22015-07-13 15:55:37 -07002053 case ODOTPTR:
2054 p := s.expr(n.Left)
2055 s.nilCheck(p)
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002056 return s.newValue2(ssa.OpAddPtr, p.Type, p, s.constIntPtr(Types[TUINTPTR], n.Xoffset))
Keith Randallcfc2aa52015-05-18 16:44:20 -07002057 default:
Josh Bleecher Snyder58446032015-08-23 20:29:43 -07002058 s.Unimplementedf("unhandled addr %v", Oconv(int(n.Op), 0))
Keith Randallcfc2aa52015-05-18 16:44:20 -07002059 return nil
2060 }
2061}
2062
Keith Randall290d8fc2015-06-10 15:03:06 -07002063// canSSA reports whether n is SSA-able.
2064// n must be an ONAME.
2065func canSSA(n *Node) bool {
2066 if n.Op != ONAME {
Daniel Morsing66b47812015-06-27 15:45:20 +01002067 return false
Keith Randall290d8fc2015-06-10 15:03:06 -07002068 }
2069 if n.Addrtaken {
2070 return false
2071 }
2072 if n.Class&PHEAP != 0 {
2073 return false
2074 }
Josh Bleecher Snyder96548732015-08-28 13:35:32 -07002075 switch n.Class {
2076 case PEXTERN, PPARAMOUT, PPARAMREF:
Keith Randall290d8fc2015-06-10 15:03:06 -07002077 return false
2078 }
Keith Randall8a1f6212015-09-08 21:28:44 -07002079 if n.Class == PPARAM && n.String() == ".this" {
2080 // wrappers generated by genwrapper need to update
2081 // the .this pointer in place.
2082 return false
2083 }
Keith Randall9f954db2015-08-18 10:26:28 -07002084 return canSSAType(n.Type)
2085 // TODO: try to make more variables SSAable?
2086}
2087
2088// canSSA reports whether variables of type t are SSA-able.
2089func canSSAType(t *Type) bool {
2090 dowidth(t)
2091 if t.Width > int64(4*Widthptr) {
2092 // 4*Widthptr is an arbitrary constant. We want it
2093 // to be at least 3*Widthptr so slices can be registerized.
2094 // Too big and we'll introduce too much register pressure.
Daniel Morsing66b47812015-06-27 15:45:20 +01002095 return false
2096 }
Keith Randall9f954db2015-08-18 10:26:28 -07002097 switch t.Etype {
2098 case TARRAY:
2099 if Isslice(t) {
2100 return true
2101 }
2102 // We can't do arrays because dynamic indexing is
2103 // not supported on SSA variables.
2104 // TODO: maybe allow if length is <=1? All indexes
2105 // are constant? Might be good for the arrays
2106 // introduced by the compiler for variadic functions.
2107 return false
2108 case TSTRUCT:
2109 if countfield(t) > 4 {
2110 // 4 is an arbitrary constant. Same reasoning
2111 // as above, lots of small fields would waste
2112 // register space needed by other values.
2113 return false
2114 }
2115 for t1 := t.Type; t1 != nil; t1 = t1.Down {
2116 if !canSSAType(t1.Type) {
2117 return false
2118 }
2119 }
2120 return false // until it is implemented
2121 //return true
2122 default:
2123 return true
2124 }
Keith Randall290d8fc2015-06-10 15:03:06 -07002125}
2126
Keith Randallcfc2aa52015-05-18 16:44:20 -07002127// nilCheck generates nil pointer checking code.
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002128// Starts a new block on return, unless nil checks are disabled.
Josh Bleecher Snyder7e74e432015-07-24 11:55:52 -07002129// Used only for automatically inserted nil checks,
2130// not for user code like 'x != nil'.
Keith Randallcfc2aa52015-05-18 16:44:20 -07002131func (s *state) nilCheck(ptr *ssa.Value) {
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002132 if Disable_checknil != 0 {
2133 return
2134 }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002135 c := s.newValue1(ssa.OpIsNonNil, Types[TBOOL], ptr)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002136 b := s.endBlock()
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07002137 b.Kind = ssa.BlockIf
Keith Randallcfc2aa52015-05-18 16:44:20 -07002138 b.Control = c
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07002139 b.Likely = ssa.BranchLikely
Keith Randallcfc2aa52015-05-18 16:44:20 -07002140 bNext := s.f.NewBlock(ssa.BlockPlain)
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002141 bPanic := s.f.NewBlock(ssa.BlockPlain)
Todd Neal47d67992015-08-28 21:36:29 -05002142 b.AddEdgeTo(bNext)
2143 b.AddEdgeTo(bPanic)
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002144 s.startBlock(bPanic)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002145 // TODO: implicit nil checks somehow?
Keith Randallf5c53e02015-09-09 18:03:41 -07002146 chk := s.newValue2(ssa.OpPanicNilCheck, ssa.TypeMem, ptr, s.mem())
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002147 s.endBlock()
Keith Randallf5c53e02015-09-09 18:03:41 -07002148 bPanic.Kind = ssa.BlockExit
2149 bPanic.Control = chk
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07002150 s.startBlock(bNext)
Keith Randallcfc2aa52015-05-18 16:44:20 -07002151}
2152
2153// boundsCheck generates bounds checking code. Checks if 0 <= idx < len, branches to exit if not.
2154// Starts a new block on return.
2155func (s *state) boundsCheck(idx, len *ssa.Value) {
Keith Randall8d236812015-08-18 15:25:40 -07002156 if Debug['B'] != 0 {
2157 return
2158 }
Keith Randallcfc2aa52015-05-18 16:44:20 -07002159 // TODO: convert index to full width?
2160 // TODO: if index is 64-bit and we're compiling to 32-bit, check that high 32 bits are zero.
2161
2162 // bounds check
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07002163 cmp := s.newValue2(ssa.OpIsInBounds, Types[TBOOL], idx, len)
Keith Randall3526cf52015-08-24 23:52:03 -07002164 s.check(cmp, ssa.OpPanicIndexCheck)
2165}
2166
2167// sliceBoundsCheck generates slice bounds checking code. Checks if 0 <= idx <= len, branches to exit if not.
2168// Starts a new block on return.
2169func (s *state) sliceBoundsCheck(idx, len *ssa.Value) {
2170 if Debug['B'] != 0 {
2171 return
2172 }
2173 // TODO: convert index to full width?
2174 // TODO: if index is 64-bit and we're compiling to 32-bit, check that high 32 bits are zero.
2175
2176 // bounds check
2177 cmp := s.newValue2(ssa.OpIsSliceInBounds, Types[TBOOL], idx, len)
2178 s.check(cmp, ssa.OpPanicSliceCheck)
2179}
2180
2181// If cmp (a bool) is true, panic using the given op.
2182func (s *state) check(cmp *ssa.Value, panicOp ssa.Op) {
Keith Randallcfc2aa52015-05-18 16:44:20 -07002183 b := s.endBlock()
2184 b.Kind = ssa.BlockIf
2185 b.Control = cmp
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07002186 b.Likely = ssa.BranchLikely
Keith Randallcfc2aa52015-05-18 16:44:20 -07002187 bNext := s.f.NewBlock(ssa.BlockPlain)
Keith Randall8d236812015-08-18 15:25:40 -07002188 bPanic := s.f.NewBlock(ssa.BlockPlain)
Todd Neal47d67992015-08-28 21:36:29 -05002189 b.AddEdgeTo(bNext)
2190 b.AddEdgeTo(bPanic)
Keith Randall8d236812015-08-18 15:25:40 -07002191 s.startBlock(bPanic)
2192 // The panic check takes/returns memory to ensure that the right
2193 // memory state is observed if the panic happens.
Keith Randallf5c53e02015-09-09 18:03:41 -07002194 chk := s.newValue1(panicOp, ssa.TypeMem, s.mem())
Keith Randall8d236812015-08-18 15:25:40 -07002195 s.endBlock()
Keith Randallf5c53e02015-09-09 18:03:41 -07002196 bPanic.Kind = ssa.BlockExit
2197 bPanic.Control = chk
Keith Randallcfc2aa52015-05-18 16:44:20 -07002198 s.startBlock(bNext)
2199}
2200
David Chase42825882015-08-20 15:14:20 -04002201type u2fcvtTab struct {
2202 geq, cvt2F, and, rsh, or, add ssa.Op
2203 one func(*state, ssa.Type, int64) *ssa.Value
2204}
2205
2206var u64_f64 u2fcvtTab = u2fcvtTab{
2207 geq: ssa.OpGeq64,
2208 cvt2F: ssa.OpCvt64to64F,
2209 and: ssa.OpAnd64,
2210 rsh: ssa.OpRsh64Ux64,
2211 or: ssa.OpOr64,
2212 add: ssa.OpAdd64F,
2213 one: (*state).constInt64,
2214}
2215
2216var u64_f32 u2fcvtTab = u2fcvtTab{
2217 geq: ssa.OpGeq64,
2218 cvt2F: ssa.OpCvt64to32F,
2219 and: ssa.OpAnd64,
2220 rsh: ssa.OpRsh64Ux64,
2221 or: ssa.OpOr64,
2222 add: ssa.OpAdd32F,
2223 one: (*state).constInt64,
2224}
2225
2226// Excess generality on a machine with 64-bit integer registers.
2227// Not used on AMD64.
2228var u32_f32 u2fcvtTab = u2fcvtTab{
2229 geq: ssa.OpGeq32,
2230 cvt2F: ssa.OpCvt32to32F,
2231 and: ssa.OpAnd32,
2232 rsh: ssa.OpRsh32Ux32,
2233 or: ssa.OpOr32,
2234 add: ssa.OpAdd32F,
2235 one: func(s *state, t ssa.Type, x int64) *ssa.Value {
2236 return s.constInt32(t, int32(x))
2237 },
2238}
2239
2240func (s *state) uint64Tofloat64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2241 return s.uintTofloat(&u64_f64, n, x, ft, tt)
2242}
2243
2244func (s *state) uint64Tofloat32(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2245 return s.uintTofloat(&u64_f32, n, x, ft, tt)
2246}
2247
2248func (s *state) uintTofloat(cvttab *u2fcvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2249 // if x >= 0 {
2250 // result = (floatY) x
2251 // } else {
2252 // y = uintX(x) ; y = x & 1
2253 // z = uintX(x) ; z = z >> 1
2254 // z = z >> 1
2255 // z = z | y
David Chase73151062015-08-26 14:25:40 -04002256 // result = floatY(z)
2257 // result = result + result
David Chase42825882015-08-20 15:14:20 -04002258 // }
2259 //
2260 // Code borrowed from old code generator.
2261 // What's going on: large 64-bit "unsigned" looks like
2262 // negative number to hardware's integer-to-float
2263 // conversion. However, because the mantissa is only
2264 // 63 bits, we don't need the LSB, so instead we do an
2265 // unsigned right shift (divide by two), convert, and
2266 // double. However, before we do that, we need to be
2267 // sure that we do not lose a "1" if that made the
2268 // difference in the resulting rounding. Therefore, we
2269 // preserve it, and OR (not ADD) it back in. The case
2270 // that matters is when the eleven discarded bits are
2271 // equal to 10000000001; that rounds up, and the 1 cannot
2272 // be lost else it would round down if the LSB of the
2273 // candidate mantissa is 0.
2274 cmp := s.newValue2(cvttab.geq, Types[TBOOL], x, s.zeroVal(ft))
2275 b := s.endBlock()
2276 b.Kind = ssa.BlockIf
2277 b.Control = cmp
2278 b.Likely = ssa.BranchLikely
2279
2280 bThen := s.f.NewBlock(ssa.BlockPlain)
2281 bElse := s.f.NewBlock(ssa.BlockPlain)
2282 bAfter := s.f.NewBlock(ssa.BlockPlain)
2283
Todd Neal47d67992015-08-28 21:36:29 -05002284 b.AddEdgeTo(bThen)
David Chase42825882015-08-20 15:14:20 -04002285 s.startBlock(bThen)
2286 a0 := s.newValue1(cvttab.cvt2F, tt, x)
2287 s.vars[n] = a0
2288 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002289 bThen.AddEdgeTo(bAfter)
David Chase42825882015-08-20 15:14:20 -04002290
Todd Neal47d67992015-08-28 21:36:29 -05002291 b.AddEdgeTo(bElse)
David Chase42825882015-08-20 15:14:20 -04002292 s.startBlock(bElse)
2293 one := cvttab.one(s, ft, 1)
2294 y := s.newValue2(cvttab.and, ft, x, one)
2295 z := s.newValue2(cvttab.rsh, ft, x, one)
2296 z = s.newValue2(cvttab.or, ft, z, y)
2297 a := s.newValue1(cvttab.cvt2F, tt, z)
2298 a1 := s.newValue2(cvttab.add, tt, a, a)
2299 s.vars[n] = a1
2300 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002301 bElse.AddEdgeTo(bAfter)
David Chase42825882015-08-20 15:14:20 -04002302
2303 s.startBlock(bAfter)
2304 return s.variable(n, n.Type)
2305}
2306
Todd Neal707af252015-08-28 15:56:43 -05002307// referenceTypeBuiltin generates code for the len/cap builtins for maps and channels.
2308func (s *state) referenceTypeBuiltin(n *Node, x *ssa.Value) *ssa.Value {
2309 if !n.Left.Type.IsMap() && !n.Left.Type.IsChan() {
2310 s.Fatalf("node must be a map or a channel")
2311 }
Todd Neale0e40682015-08-26 18:40:52 -05002312 // if n == nil {
2313 // return 0
2314 // } else {
Todd Neal707af252015-08-28 15:56:43 -05002315 // // len
Todd Neale0e40682015-08-26 18:40:52 -05002316 // return *((*int)n)
Todd Neal707af252015-08-28 15:56:43 -05002317 // // cap
2318 // return *(((*int)n)+1)
Todd Neale0e40682015-08-26 18:40:52 -05002319 // }
2320 lenType := n.Type
Todd Neal67ac8a32015-08-28 15:20:54 -05002321 nilValue := s.newValue0(ssa.OpConstNil, Types[TUINTPTR])
2322 cmp := s.newValue2(ssa.OpEqPtr, Types[TBOOL], x, nilValue)
Todd Neale0e40682015-08-26 18:40:52 -05002323 b := s.endBlock()
2324 b.Kind = ssa.BlockIf
2325 b.Control = cmp
2326 b.Likely = ssa.BranchUnlikely
2327
2328 bThen := s.f.NewBlock(ssa.BlockPlain)
2329 bElse := s.f.NewBlock(ssa.BlockPlain)
2330 bAfter := s.f.NewBlock(ssa.BlockPlain)
2331
Todd Neal707af252015-08-28 15:56:43 -05002332 // length/capacity of a nil map/chan is zero
Todd Neal47d67992015-08-28 21:36:29 -05002333 b.AddEdgeTo(bThen)
Todd Neale0e40682015-08-26 18:40:52 -05002334 s.startBlock(bThen)
2335 s.vars[n] = s.zeroVal(lenType)
2336 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002337 bThen.AddEdgeTo(bAfter)
Todd Neale0e40682015-08-26 18:40:52 -05002338
Todd Neal47d67992015-08-28 21:36:29 -05002339 b.AddEdgeTo(bElse)
Todd Neale0e40682015-08-26 18:40:52 -05002340 s.startBlock(bElse)
Todd Neal707af252015-08-28 15:56:43 -05002341 if n.Op == OLEN {
2342 // length is stored in the first word for map/chan
2343 s.vars[n] = s.newValue2(ssa.OpLoad, lenType, x, s.mem())
2344 } else if n.Op == OCAP {
2345 // capacity is stored in the second word for chan
2346 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Width, x)
2347 s.vars[n] = s.newValue2(ssa.OpLoad, lenType, sw, s.mem())
2348 } else {
2349 s.Fatalf("op must be OLEN or OCAP")
2350 }
Todd Neale0e40682015-08-26 18:40:52 -05002351 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002352 bElse.AddEdgeTo(bAfter)
Todd Neale0e40682015-08-26 18:40:52 -05002353
2354 s.startBlock(bAfter)
2355 return s.variable(n, lenType)
2356}
2357
David Chase73151062015-08-26 14:25:40 -04002358type f2uCvtTab struct {
2359 ltf, cvt2U, subf ssa.Op
2360 value func(*state, ssa.Type, float64) *ssa.Value
2361}
2362
2363var f32_u64 f2uCvtTab = f2uCvtTab{
2364 ltf: ssa.OpLess32F,
2365 cvt2U: ssa.OpCvt32Fto64,
2366 subf: ssa.OpSub32F,
2367 value: (*state).constFloat32,
2368}
2369
2370var f64_u64 f2uCvtTab = f2uCvtTab{
2371 ltf: ssa.OpLess64F,
2372 cvt2U: ssa.OpCvt64Fto64,
2373 subf: ssa.OpSub64F,
2374 value: (*state).constFloat64,
2375}
2376
2377func (s *state) float32ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2378 return s.floatToUint(&f32_u64, n, x, ft, tt)
2379}
2380func (s *state) float64ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2381 return s.floatToUint(&f64_u64, n, x, ft, tt)
2382}
2383
2384func (s *state) floatToUint(cvttab *f2uCvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
2385 // if x < 9223372036854775808.0 {
2386 // result = uintY(x)
2387 // } else {
2388 // y = x - 9223372036854775808.0
2389 // z = uintY(y)
2390 // result = z | -9223372036854775808
2391 // }
2392 twoToThe63 := cvttab.value(s, ft, 9223372036854775808.0)
2393 cmp := s.newValue2(cvttab.ltf, Types[TBOOL], x, twoToThe63)
2394 b := s.endBlock()
2395 b.Kind = ssa.BlockIf
2396 b.Control = cmp
2397 b.Likely = ssa.BranchLikely
2398
2399 bThen := s.f.NewBlock(ssa.BlockPlain)
2400 bElse := s.f.NewBlock(ssa.BlockPlain)
2401 bAfter := s.f.NewBlock(ssa.BlockPlain)
2402
Todd Neal47d67992015-08-28 21:36:29 -05002403 b.AddEdgeTo(bThen)
David Chase73151062015-08-26 14:25:40 -04002404 s.startBlock(bThen)
2405 a0 := s.newValue1(cvttab.cvt2U, tt, x)
2406 s.vars[n] = a0
2407 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002408 bThen.AddEdgeTo(bAfter)
David Chase73151062015-08-26 14:25:40 -04002409
Todd Neal47d67992015-08-28 21:36:29 -05002410 b.AddEdgeTo(bElse)
David Chase73151062015-08-26 14:25:40 -04002411 s.startBlock(bElse)
2412 y := s.newValue2(cvttab.subf, ft, x, twoToThe63)
2413 y = s.newValue1(cvttab.cvt2U, tt, y)
2414 z := s.constInt64(tt, -9223372036854775808)
2415 a1 := s.newValue2(ssa.OpOr64, tt, y, z)
2416 s.vars[n] = a1
2417 s.endBlock()
Todd Neal47d67992015-08-28 21:36:29 -05002418 bElse.AddEdgeTo(bAfter)
David Chase73151062015-08-26 14:25:40 -04002419
2420 s.startBlock(bAfter)
2421 return s.variable(n, n.Type)
2422}
2423
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -07002424// checkgoto checks that a goto from from to to does not
2425// jump into a block or jump over variable declarations.
2426// It is a copy of checkgoto in the pre-SSA backend,
2427// modified only for line number handling.
2428// TODO: document how this works and why it is designed the way it is.
2429func (s *state) checkgoto(from *Node, to *Node) {
2430 if from.Sym == to.Sym {
2431 return
2432 }
2433
2434 nf := 0
2435 for fs := from.Sym; fs != nil; fs = fs.Link {
2436 nf++
2437 }
2438 nt := 0
2439 for fs := to.Sym; fs != nil; fs = fs.Link {
2440 nt++
2441 }
2442 fs := from.Sym
2443 for ; nf > nt; nf-- {
2444 fs = fs.Link
2445 }
2446 if fs != to.Sym {
2447 // decide what to complain about.
2448 // prefer to complain about 'into block' over declarations,
2449 // so scan backward to find most recent block or else dcl.
2450 var block *Sym
2451
2452 var dcl *Sym
2453 ts := to.Sym
2454 for ; nt > nf; nt-- {
2455 if ts.Pkg == nil {
2456 block = ts
2457 } else {
2458 dcl = ts
2459 }
2460 ts = ts.Link
2461 }
2462
2463 for ts != fs {
2464 if ts.Pkg == nil {
2465 block = ts
2466 } else {
2467 dcl = ts
2468 }
2469 ts = ts.Link
2470 fs = fs.Link
2471 }
2472
2473 lno := int(from.Left.Lineno)
2474 if block != nil {
2475 yyerrorl(lno, "goto %v jumps into block starting at %v", from.Left.Sym, Ctxt.Line(int(block.Lastlineno)))
2476 } else {
2477 yyerrorl(lno, "goto %v jumps over declaration of %v at %v", from.Left.Sym, dcl, Ctxt.Line(int(dcl.Lastlineno)))
2478 }
2479 }
2480}
2481
Keith Randalld2fd43a2015-04-15 15:51:25 -07002482// variable returns the value of a variable at the current location.
Keith Randall8c46aa52015-06-19 21:02:28 -07002483func (s *state) variable(name *Node, t ssa.Type) *ssa.Value {
Keith Randalld2fd43a2015-04-15 15:51:25 -07002484 v := s.vars[name]
2485 if v == nil {
2486 // TODO: get type? Take Sym as arg?
Keith Randall8f22b522015-06-11 21:29:25 -07002487 v = s.newValue0A(ssa.OpFwdRef, t, name)
Keith Randalld2fd43a2015-04-15 15:51:25 -07002488 s.vars[name] = v
2489 }
2490 return v
2491}
2492
Keith Randallcfc2aa52015-05-18 16:44:20 -07002493func (s *state) mem() *ssa.Value {
Keith Randall8c46aa52015-06-19 21:02:28 -07002494 return s.variable(&memvar, ssa.TypeMem)
Keith Randalld2fd43a2015-04-15 15:51:25 -07002495}
2496
Keith Randallcfc2aa52015-05-18 16:44:20 -07002497func (s *state) linkForwardReferences() {
Keith Randalld2fd43a2015-04-15 15:51:25 -07002498 // Build ssa graph. Each variable on its first use in a basic block
2499 // leaves a FwdRef in that block representing the incoming value
2500 // of that variable. This function links that ref up with possible definitions,
2501 // inserting Phi values as needed. This is essentially the algorithm
2502 // described by Brau, Buchwald, Hack, Leißa, Mallon, and Zwinkau:
2503 // http://pp.info.uni-karlsruhe.de/uploads/publikationen/braun13cc.pdf
2504 for _, b := range s.f.Blocks {
2505 for _, v := range b.Values {
2506 if v.Op != ssa.OpFwdRef {
2507 continue
2508 }
Keith Randall8c46aa52015-06-19 21:02:28 -07002509 name := v.Aux.(*Node)
Keith Randalld2fd43a2015-04-15 15:51:25 -07002510 v.Op = ssa.OpCopy
2511 v.Aux = nil
2512 v.SetArgs1(s.lookupVarIncoming(b, v.Type, name))
2513 }
2514 }
2515}
2516
2517// lookupVarIncoming finds the variable's value at the start of block b.
Keith Randall8c46aa52015-06-19 21:02:28 -07002518func (s *state) lookupVarIncoming(b *ssa.Block, t ssa.Type, name *Node) *ssa.Value {
Keith Randalld2fd43a2015-04-15 15:51:25 -07002519 // TODO(khr): have lookupVarIncoming overwrite the fwdRef or copy it
2520 // will be used in, instead of having the result used in a copy value.
2521 if b == s.f.Entry {
Keith Randall8c46aa52015-06-19 21:02:28 -07002522 if name == &memvar {
Keith Randallcfc2aa52015-05-18 16:44:20 -07002523 return s.startmem
Keith Randalld2fd43a2015-04-15 15:51:25 -07002524 }
2525 // variable is live at the entry block. Load it.
Keith Randall8c46aa52015-06-19 21:02:28 -07002526 addr := s.decladdrs[name]
2527 if addr == nil {
2528 // TODO: closure args reach here.
Josh Bleecher Snyder58446032015-08-23 20:29:43 -07002529 s.Unimplementedf("unhandled closure arg")
Keith Randall8c46aa52015-06-19 21:02:28 -07002530 }
2531 if _, ok := addr.Aux.(*ssa.ArgSymbol); !ok {
2532 s.Fatalf("variable live at start of function %s is not an argument %s", b.Func.Name, name)
2533 }
Keith Randall8f22b522015-06-11 21:29:25 -07002534 return s.entryNewValue2(ssa.OpLoad, t, addr, s.startmem)
Keith Randalld2fd43a2015-04-15 15:51:25 -07002535 }
2536 var vals []*ssa.Value
2537 for _, p := range b.Preds {
2538 vals = append(vals, s.lookupVarOutgoing(p, t, name))
2539 }
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07002540 if len(vals) == 0 {
Josh Bleecher Snyder61aa0952015-07-20 15:39:14 -07002541 // This block is dead; we have no predecessors and we're not the entry block.
2542 // It doesn't matter what we use here as long as it is well-formed,
2543 // so use the default/zero value.
2544 if name == &memvar {
2545 return s.startmem
2546 }
2547 return s.zeroVal(name.Type)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07002548 }
Keith Randalld2fd43a2015-04-15 15:51:25 -07002549 v0 := vals[0]
2550 for i := 1; i < len(vals); i++ {
2551 if vals[i] != v0 {
2552 // need a phi value
Keith Randall8f22b522015-06-11 21:29:25 -07002553 v := b.NewValue0(s.peekLine(), ssa.OpPhi, t)
Keith Randalld2fd43a2015-04-15 15:51:25 -07002554 v.AddArgs(vals...)
2555 return v
2556 }
2557 }
2558 return v0
2559}
2560
2561// lookupVarOutgoing finds the variable's value at the end of block b.
Keith Randall8c46aa52015-06-19 21:02:28 -07002562func (s *state) lookupVarOutgoing(b *ssa.Block, t ssa.Type, name *Node) *ssa.Value {
Keith Randalld2fd43a2015-04-15 15:51:25 -07002563 m := s.defvars[b.ID]
2564 if v, ok := m[name]; ok {
2565 return v
2566 }
2567 // The variable is not defined by b and we haven't
2568 // looked it up yet. Generate v, a copy value which
2569 // will be the outgoing value of the variable. Then
2570 // look up w, the incoming value of the variable.
2571 // Make v = copy(w). We need the extra copy to
2572 // prevent infinite recursion when looking up the
2573 // incoming value of the variable.
Keith Randall8f22b522015-06-11 21:29:25 -07002574 v := b.NewValue0(s.peekLine(), ssa.OpCopy, t)
Keith Randalld2fd43a2015-04-15 15:51:25 -07002575 m[name] = v
2576 v.AddArg(s.lookupVarIncoming(b, t, name))
2577 return v
2578}
2579
2580// TODO: the above mutually recursive functions can lead to very deep stacks. Fix that.
2581
Keith Randall083a6462015-05-12 11:06:44 -07002582// an unresolved branch
2583type branch struct {
2584 p *obj.Prog // branch instruction
2585 b *ssa.Block // target
2586}
2587
Keith Randall9569b952015-08-28 22:51:01 -07002588type genState struct {
2589 // branches remembers all the branch instructions we've seen
2590 // and where they would like to go.
2591 branches []branch
2592
2593 // bstart remembers where each block starts (indexed by block ID)
2594 bstart []*obj.Prog
2595
2596 // deferBranches remembers all the defer branches we've seen.
2597 deferBranches []*obj.Prog
2598
2599 // deferTarget remembers the (last) deferreturn call site.
2600 deferTarget *obj.Prog
2601}
2602
Keith Randall083a6462015-05-12 11:06:44 -07002603// genssa appends entries to ptxt for each instruction in f.
2604// gcargs and gclocals are filled in with pointer maps for the frame.
2605func genssa(f *ssa.Func, ptxt *obj.Prog, gcargs, gclocals *Sym) {
Keith Randall9569b952015-08-28 22:51:01 -07002606 var s genState
2607
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07002608 e := f.Config.Frontend().(*ssaExport)
2609 // We're about to emit a bunch of Progs.
2610 // Since the only way to get here is to explicitly request it,
2611 // just fail on unimplemented instead of trying to unwind our mess.
2612 e.mustImplement = true
2613
Keith Randall083a6462015-05-12 11:06:44 -07002614 // Remember where each block starts.
Keith Randall9569b952015-08-28 22:51:01 -07002615 s.bstart = make([]*obj.Prog, f.NumBlocks())
Keith Randall083a6462015-05-12 11:06:44 -07002616
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07002617 var valueProgs map[*obj.Prog]*ssa.Value
2618 var blockProgs map[*obj.Prog]*ssa.Block
2619 const logProgs = true
2620 if logProgs {
2621 valueProgs = make(map[*obj.Prog]*ssa.Value, f.NumValues())
2622 blockProgs = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
2623 f.Logf("genssa %s\n", f.Name)
2624 blockProgs[Pc] = f.Blocks[0]
2625 }
2626
Keith Randall083a6462015-05-12 11:06:44 -07002627 // Emit basic blocks
2628 for i, b := range f.Blocks {
Keith Randall9569b952015-08-28 22:51:01 -07002629 s.bstart[b.ID] = Pc
Keith Randall083a6462015-05-12 11:06:44 -07002630 // Emit values in block
2631 for _, v := range b.Values {
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07002632 x := Pc
Keith Randall9569b952015-08-28 22:51:01 -07002633 s.genValue(v)
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07002634 if logProgs {
2635 for ; x != Pc; x = x.Link {
2636 valueProgs[x] = v
2637 }
2638 }
Keith Randall083a6462015-05-12 11:06:44 -07002639 }
2640 // Emit control flow instructions for block
2641 var next *ssa.Block
2642 if i < len(f.Blocks)-1 {
2643 next = f.Blocks[i+1]
2644 }
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07002645 x := Pc
Keith Randall9569b952015-08-28 22:51:01 -07002646 s.genBlock(b, next)
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07002647 if logProgs {
2648 for ; x != Pc; x = x.Link {
2649 blockProgs[x] = b
2650 }
2651 }
Keith Randall083a6462015-05-12 11:06:44 -07002652 }
2653
2654 // Resolve branches
Keith Randall9569b952015-08-28 22:51:01 -07002655 for _, br := range s.branches {
2656 br.p.To.Val = s.bstart[br.b.ID]
2657 }
Keith Randallca9e4502015-09-08 08:59:57 -07002658 if s.deferBranches != nil && s.deferTarget == nil {
2659 // This can happen when the function has a defer but
2660 // no return (because it has an infinite loop).
2661 s.deferReturn()
2662 Prog(obj.ARET)
2663 }
Keith Randall9569b952015-08-28 22:51:01 -07002664 for _, p := range s.deferBranches {
2665 p.To.Val = s.deferTarget
Keith Randall083a6462015-05-12 11:06:44 -07002666 }
2667
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07002668 if logProgs {
2669 for p := ptxt; p != nil; p = p.Link {
2670 var s string
2671 if v, ok := valueProgs[p]; ok {
2672 s = v.String()
2673 } else if b, ok := blockProgs[p]; ok {
2674 s = b.String()
2675 } else {
2676 s = " " // most value and branch strings are 2-3 characters long
2677 }
2678 f.Logf("%s\t%s\n", s, p)
2679 }
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -07002680 if f.Config.HTML != nil {
2681 saved := ptxt.Ctxt.LineHist.PrintFilenameOnly
2682 ptxt.Ctxt.LineHist.PrintFilenameOnly = true
2683 var buf bytes.Buffer
2684 buf.WriteString("<code>")
2685 buf.WriteString("<dl class=\"ssa-gen\">")
2686 for p := ptxt; p != nil; p = p.Link {
2687 buf.WriteString("<dt class=\"ssa-prog-src\">")
2688 if v, ok := valueProgs[p]; ok {
2689 buf.WriteString(v.HTML())
2690 } else if b, ok := blockProgs[p]; ok {
2691 buf.WriteString(b.HTML())
2692 }
2693 buf.WriteString("</dt>")
2694 buf.WriteString("<dd class=\"ssa-prog\">")
2695 buf.WriteString(html.EscapeString(p.String()))
2696 buf.WriteString("</dd>")
2697 buf.WriteString("</li>")
2698 }
2699 buf.WriteString("</dl>")
2700 buf.WriteString("</code>")
2701 f.Config.HTML.WriteColumn("genssa", buf.String())
2702 ptxt.Ctxt.LineHist.PrintFilenameOnly = saved
2703 }
Josh Bleecher Snyderb8efee02015-07-31 14:37:15 -07002704 }
2705
Josh Bleecher Snyder6b416652015-07-28 10:56:39 -07002706 // Emit static data
2707 if f.StaticData != nil {
2708 for _, n := range f.StaticData.([]*Node) {
2709 if !gen_as_init(n, false) {
Keith Randall0ec72b62015-09-08 15:42:53 -07002710 Fatalf("non-static data marked as static: %v\n\n", n, f)
Josh Bleecher Snyder6b416652015-07-28 10:56:39 -07002711 }
2712 }
2713 }
2714
Keith Randalld2107fc2015-08-24 02:16:19 -07002715 // Allocate stack frame
2716 allocauto(ptxt)
Keith Randall083a6462015-05-12 11:06:44 -07002717
Keith Randalld2107fc2015-08-24 02:16:19 -07002718 // Generate gc bitmaps.
2719 liveness(Curfn, ptxt, gcargs, gclocals)
2720 gcsymdup(gcargs)
2721 gcsymdup(gclocals)
Keith Randall083a6462015-05-12 11:06:44 -07002722
Keith Randalld2107fc2015-08-24 02:16:19 -07002723 // Add frame prologue. Zero ambiguously live variables.
2724 Thearch.Defframe(ptxt)
2725 if Debug['f'] != 0 {
2726 frame(0)
2727 }
2728
2729 // Remove leftover instrumentation from the instruction stream.
2730 removevardef(ptxt)
Josh Bleecher Snyder35fb5142015-08-10 12:15:52 -07002731
2732 f.Config.HTML.Close()
Keith Randall083a6462015-05-12 11:06:44 -07002733}
2734
David Chase997a9f32015-08-12 16:38:11 -04002735// opregreg emits instructions for
David Chase8e601b22015-08-18 14:39:26 -04002736// dest := dest(To) op src(From)
David Chase997a9f32015-08-12 16:38:11 -04002737// and also returns the created obj.Prog so it
2738// may be further adjusted (offset, scale, etc).
2739func opregreg(op int, dest, src int16) *obj.Prog {
2740 p := Prog(op)
2741 p.From.Type = obj.TYPE_REG
2742 p.To.Type = obj.TYPE_REG
2743 p.To.Reg = dest
2744 p.From.Reg = src
2745 return p
2746}
2747
Keith Randall9569b952015-08-28 22:51:01 -07002748func (s *genState) genValue(v *ssa.Value) {
Michael Matloob81ccf502015-05-30 01:03:06 -04002749 lineno = v.Line
Keith Randall083a6462015-05-12 11:06:44 -07002750 switch v.Op {
Keith Randall0dca7352015-06-06 16:03:33 -07002751 case ssa.OpAMD64ADDQ:
Keith Randall083a6462015-05-12 11:06:44 -07002752 // TODO: use addq instead of leaq if target is in the right register.
2753 p := Prog(x86.ALEAQ)
2754 p.From.Type = obj.TYPE_MEM
2755 p.From.Reg = regnum(v.Args[0])
2756 p.From.Scale = 1
2757 p.From.Index = regnum(v.Args[1])
2758 p.To.Type = obj.TYPE_REG
2759 p.To.Reg = regnum(v)
Michael Matloob73054f52015-06-14 11:38:46 -07002760 case ssa.OpAMD64ADDL:
2761 p := Prog(x86.ALEAL)
2762 p.From.Type = obj.TYPE_MEM
2763 p.From.Reg = regnum(v.Args[0])
2764 p.From.Scale = 1
2765 p.From.Index = regnum(v.Args[1])
2766 p.To.Type = obj.TYPE_REG
2767 p.To.Reg = regnum(v)
2768 case ssa.OpAMD64ADDW:
2769 p := Prog(x86.ALEAW)
2770 p.From.Type = obj.TYPE_MEM
2771 p.From.Reg = regnum(v.Args[0])
2772 p.From.Scale = 1
2773 p.From.Index = regnum(v.Args[1])
2774 p.To.Type = obj.TYPE_REG
2775 p.To.Reg = regnum(v)
Keith Randall20550cb2015-07-28 16:04:50 -07002776 // 2-address opcode arithmetic, symmetric
David Chase997a9f32015-08-12 16:38:11 -04002777 case ssa.OpAMD64ADDB, ssa.OpAMD64ADDSS, ssa.OpAMD64ADDSD,
Alexandru Moșoiedff8812015-07-28 14:58:49 +02002778 ssa.OpAMD64ANDQ, ssa.OpAMD64ANDL, ssa.OpAMD64ANDW, ssa.OpAMD64ANDB,
Keith Randall20550cb2015-07-28 16:04:50 -07002779 ssa.OpAMD64ORQ, ssa.OpAMD64ORL, ssa.OpAMD64ORW, ssa.OpAMD64ORB,
2780 ssa.OpAMD64XORQ, ssa.OpAMD64XORL, ssa.OpAMD64XORW, ssa.OpAMD64XORB,
David Chase997a9f32015-08-12 16:38:11 -04002781 ssa.OpAMD64MULQ, ssa.OpAMD64MULL, ssa.OpAMD64MULW, ssa.OpAMD64MULB,
David Chase3a9d0ac2015-08-28 14:24:10 -04002782 ssa.OpAMD64MULSS, ssa.OpAMD64MULSD, ssa.OpAMD64PXOR:
Michael Matloob73054f52015-06-14 11:38:46 -07002783 r := regnum(v)
2784 x := regnum(v.Args[0])
2785 y := regnum(v.Args[1])
2786 if x != r && y != r {
David Chase997a9f32015-08-12 16:38:11 -04002787 opregreg(regMoveByTypeAMD64(v.Type), r, x)
Michael Matloob73054f52015-06-14 11:38:46 -07002788 x = r
2789 }
2790 p := Prog(v.Op.Asm())
2791 p.From.Type = obj.TYPE_REG
2792 p.To.Type = obj.TYPE_REG
2793 p.To.Reg = r
2794 if x == r {
2795 p.From.Reg = y
2796 } else {
2797 p.From.Reg = x
2798 }
Keith Randall20550cb2015-07-28 16:04:50 -07002799 // 2-address opcode arithmetic, not symmetric
2800 case ssa.OpAMD64SUBQ, ssa.OpAMD64SUBL, ssa.OpAMD64SUBW, ssa.OpAMD64SUBB:
Keith Randallbe1eb572015-07-22 13:46:15 -07002801 r := regnum(v)
2802 x := regnum(v.Args[0])
Keith Randall20550cb2015-07-28 16:04:50 -07002803 y := regnum(v.Args[1])
2804 var neg bool
2805 if y == r {
2806 // compute -(y-x) instead
2807 x, y = y, x
2808 neg = true
Keith Randallbe1eb572015-07-22 13:46:15 -07002809 }
Keith Randall083a6462015-05-12 11:06:44 -07002810 if x != r {
David Chase997a9f32015-08-12 16:38:11 -04002811 opregreg(regMoveByTypeAMD64(v.Type), r, x)
Keith Randall083a6462015-05-12 11:06:44 -07002812 }
David Chase997a9f32015-08-12 16:38:11 -04002813 opregreg(v.Op.Asm(), r, y)
Keith Randall20550cb2015-07-28 16:04:50 -07002814
Keith Randall20550cb2015-07-28 16:04:50 -07002815 if neg {
2816 p := Prog(x86.ANEGQ) // TODO: use correct size? This is mostly a hack until regalloc does 2-address correctly
Keith Randall20550cb2015-07-28 16:04:50 -07002817 p.To.Type = obj.TYPE_REG
2818 p.To.Reg = r
2819 }
David Chase997a9f32015-08-12 16:38:11 -04002820 case ssa.OpAMD64SUBSS, ssa.OpAMD64SUBSD, ssa.OpAMD64DIVSS, ssa.OpAMD64DIVSD:
2821 r := regnum(v)
2822 x := regnum(v.Args[0])
2823 y := regnum(v.Args[1])
2824 if y == r && x != r {
2825 // r/y := x op r/y, need to preserve x and rewrite to
2826 // r/y := r/y op x15
2827 x15 := int16(x86.REG_X15)
2828 // register move y to x15
2829 // register move x to y
2830 // rename y with x15
2831 opregreg(regMoveByTypeAMD64(v.Type), x15, y)
2832 opregreg(regMoveByTypeAMD64(v.Type), r, x)
2833 y = x15
2834 } else if x != r {
2835 opregreg(regMoveByTypeAMD64(v.Type), r, x)
2836 }
2837 opregreg(v.Op.Asm(), r, y)
2838
Todd Neala45f2d82015-08-17 17:46:06 -05002839 case ssa.OpAMD64DIVQ, ssa.OpAMD64DIVL, ssa.OpAMD64DIVW,
Todd Neal57d9e7e2015-08-18 19:51:44 -05002840 ssa.OpAMD64DIVQU, ssa.OpAMD64DIVLU, ssa.OpAMD64DIVWU,
2841 ssa.OpAMD64MODQ, ssa.OpAMD64MODL, ssa.OpAMD64MODW,
2842 ssa.OpAMD64MODQU, ssa.OpAMD64MODLU, ssa.OpAMD64MODWU:
Todd Neala45f2d82015-08-17 17:46:06 -05002843
2844 // Arg[0] is already in AX as it's the only register we allow
2845 // and AX is the only output
2846 x := regnum(v.Args[1])
2847
2848 // CPU faults upon signed overflow, which occurs when most
Todd Neal57d9e7e2015-08-18 19:51:44 -05002849 // negative int is divided by -1.
Todd Neala45f2d82015-08-17 17:46:06 -05002850 var j *obj.Prog
2851 if v.Op == ssa.OpAMD64DIVQ || v.Op == ssa.OpAMD64DIVL ||
Todd Neal57d9e7e2015-08-18 19:51:44 -05002852 v.Op == ssa.OpAMD64DIVW || v.Op == ssa.OpAMD64MODQ ||
2853 v.Op == ssa.OpAMD64MODL || v.Op == ssa.OpAMD64MODW {
Todd Neala45f2d82015-08-17 17:46:06 -05002854
2855 var c *obj.Prog
2856 switch v.Op {
Todd Neal57d9e7e2015-08-18 19:51:44 -05002857 case ssa.OpAMD64DIVQ, ssa.OpAMD64MODQ:
Todd Neala45f2d82015-08-17 17:46:06 -05002858 c = Prog(x86.ACMPQ)
Todd Neal57d9e7e2015-08-18 19:51:44 -05002859 j = Prog(x86.AJEQ)
2860 // go ahead and sign extend to save doing it later
2861 Prog(x86.ACQO)
2862
2863 case ssa.OpAMD64DIVL, ssa.OpAMD64MODL:
Todd Neala45f2d82015-08-17 17:46:06 -05002864 c = Prog(x86.ACMPL)
Todd Neal57d9e7e2015-08-18 19:51:44 -05002865 j = Prog(x86.AJEQ)
2866 Prog(x86.ACDQ)
2867
2868 case ssa.OpAMD64DIVW, ssa.OpAMD64MODW:
Todd Neala45f2d82015-08-17 17:46:06 -05002869 c = Prog(x86.ACMPW)
Todd Neal57d9e7e2015-08-18 19:51:44 -05002870 j = Prog(x86.AJEQ)
2871 Prog(x86.ACWD)
Todd Neala45f2d82015-08-17 17:46:06 -05002872 }
2873 c.From.Type = obj.TYPE_REG
2874 c.From.Reg = x
2875 c.To.Type = obj.TYPE_CONST
2876 c.To.Offset = -1
2877
Todd Neala45f2d82015-08-17 17:46:06 -05002878 j.To.Type = obj.TYPE_BRANCH
2879
2880 }
2881
Todd Neal57d9e7e2015-08-18 19:51:44 -05002882 // for unsigned ints, we sign extend by setting DX = 0
2883 // signed ints were sign extended above
2884 if v.Op == ssa.OpAMD64DIVQU || v.Op == ssa.OpAMD64MODQU ||
2885 v.Op == ssa.OpAMD64DIVLU || v.Op == ssa.OpAMD64MODLU ||
2886 v.Op == ssa.OpAMD64DIVWU || v.Op == ssa.OpAMD64MODWU {
Todd Neala45f2d82015-08-17 17:46:06 -05002887 c := Prog(x86.AXORQ)
2888 c.From.Type = obj.TYPE_REG
2889 c.From.Reg = x86.REG_DX
2890 c.To.Type = obj.TYPE_REG
2891 c.To.Reg = x86.REG_DX
Todd Neala45f2d82015-08-17 17:46:06 -05002892 }
2893
2894 p := Prog(v.Op.Asm())
2895 p.From.Type = obj.TYPE_REG
2896 p.From.Reg = x
2897
2898 // signed division, rest of the check for -1 case
2899 if j != nil {
2900 j2 := Prog(obj.AJMP)
2901 j2.To.Type = obj.TYPE_BRANCH
2902
Todd Neal57d9e7e2015-08-18 19:51:44 -05002903 var n *obj.Prog
2904 if v.Op == ssa.OpAMD64DIVQ || v.Op == ssa.OpAMD64DIVL ||
2905 v.Op == ssa.OpAMD64DIVW {
2906 // n * -1 = -n
2907 n = Prog(x86.ANEGQ)
2908 n.To.Type = obj.TYPE_REG
2909 n.To.Reg = x86.REG_AX
2910 } else {
2911 // n % -1 == 0
2912 n = Prog(x86.AXORQ)
2913 n.From.Type = obj.TYPE_REG
2914 n.From.Reg = x86.REG_DX
2915 n.To.Type = obj.TYPE_REG
2916 n.To.Reg = x86.REG_DX
2917 }
Todd Neala45f2d82015-08-17 17:46:06 -05002918
2919 j.To.Val = n
2920 j2.To.Val = Pc
2921 }
2922
Todd Neal67cbd5b2015-08-18 19:14:47 -05002923 case ssa.OpAMD64HMULL, ssa.OpAMD64HMULW, ssa.OpAMD64HMULB,
2924 ssa.OpAMD64HMULLU, ssa.OpAMD64HMULWU, ssa.OpAMD64HMULBU:
2925 // the frontend rewrites constant division by 8/16/32 bit integers into
2926 // HMUL by a constant
2927
2928 // Arg[0] is already in AX as it's the only register we allow
2929 // and DX is the only output we care about (the high bits)
2930 p := Prog(v.Op.Asm())
2931 p.From.Type = obj.TYPE_REG
2932 p.From.Reg = regnum(v.Args[1])
2933
2934 // IMULB puts the high portion in AH instead of DL,
2935 // so move it to DL for consistency
2936 if v.Type.Size() == 1 {
2937 m := Prog(x86.AMOVB)
2938 m.From.Type = obj.TYPE_REG
2939 m.From.Reg = x86.REG_AH
2940 m.To.Type = obj.TYPE_REG
2941 m.To.Reg = x86.REG_DX
2942 }
2943
Keith Randall20550cb2015-07-28 16:04:50 -07002944 case ssa.OpAMD64SHLQ, ssa.OpAMD64SHLL, ssa.OpAMD64SHLW, ssa.OpAMD64SHLB,
2945 ssa.OpAMD64SHRQ, ssa.OpAMD64SHRL, ssa.OpAMD64SHRW, ssa.OpAMD64SHRB,
2946 ssa.OpAMD64SARQ, ssa.OpAMD64SARL, ssa.OpAMD64SARW, ssa.OpAMD64SARB:
Keith Randall6f188472015-06-10 10:39:57 -07002947 x := regnum(v.Args[0])
2948 r := regnum(v)
2949 if x != r {
2950 if r == x86.REG_CX {
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07002951 v.Fatalf("can't implement %s, target and shift both in CX", v.LongString())
Keith Randall6f188472015-06-10 10:39:57 -07002952 }
Keith Randall20550cb2015-07-28 16:04:50 -07002953 p := Prog(regMoveAMD64(v.Type.Size()))
Keith Randall6f188472015-06-10 10:39:57 -07002954 p.From.Type = obj.TYPE_REG
2955 p.From.Reg = x
2956 p.To.Type = obj.TYPE_REG
2957 p.To.Reg = r
Keith Randall6f188472015-06-10 10:39:57 -07002958 }
Michael Matloob703ef062015-06-16 11:11:16 -07002959 p := Prog(v.Op.Asm())
Keith Randall6f188472015-06-10 10:39:57 -07002960 p.From.Type = obj.TYPE_REG
2961 p.From.Reg = regnum(v.Args[1]) // should be CX
2962 p.To.Type = obj.TYPE_REG
2963 p.To.Reg = r
Keith Randall20550cb2015-07-28 16:04:50 -07002964 case ssa.OpAMD64ADDQconst, ssa.OpAMD64ADDLconst, ssa.OpAMD64ADDWconst:
2965 // TODO: use addq instead of leaq if target is in the right register.
2966 var asm int
2967 switch v.Op {
2968 case ssa.OpAMD64ADDQconst:
2969 asm = x86.ALEAQ
2970 case ssa.OpAMD64ADDLconst:
2971 asm = x86.ALEAL
2972 case ssa.OpAMD64ADDWconst:
2973 asm = x86.ALEAW
2974 }
2975 p := Prog(asm)
2976 p.From.Type = obj.TYPE_MEM
2977 p.From.Reg = regnum(v.Args[0])
2978 p.From.Offset = v.AuxInt
2979 p.To.Type = obj.TYPE_REG
2980 p.To.Reg = regnum(v)
Alexandru Moșoi7a6de6d2015-08-14 13:23:11 +02002981 case ssa.OpAMD64MULQconst, ssa.OpAMD64MULLconst, ssa.OpAMD64MULWconst, ssa.OpAMD64MULBconst:
Keith Randall20550cb2015-07-28 16:04:50 -07002982 r := regnum(v)
2983 x := regnum(v.Args[0])
2984 if r != x {
2985 p := Prog(regMoveAMD64(v.Type.Size()))
2986 p.From.Type = obj.TYPE_REG
2987 p.From.Reg = x
2988 p.To.Type = obj.TYPE_REG
2989 p.To.Reg = r
2990 }
2991 p := Prog(v.Op.Asm())
2992 p.From.Type = obj.TYPE_CONST
2993 p.From.Offset = v.AuxInt
2994 p.To.Type = obj.TYPE_REG
2995 p.To.Reg = r
2996 // TODO: Teach doasm to compile the three-address multiply imul $c, r1, r2
2997 // instead of using the MOVQ above.
2998 //p.From3 = new(obj.Addr)
2999 //p.From3.Type = obj.TYPE_REG
3000 //p.From3.Reg = regnum(v.Args[0])
3001 case ssa.OpAMD64ADDBconst,
3002 ssa.OpAMD64ANDQconst, ssa.OpAMD64ANDLconst, ssa.OpAMD64ANDWconst, ssa.OpAMD64ANDBconst,
3003 ssa.OpAMD64ORQconst, ssa.OpAMD64ORLconst, ssa.OpAMD64ORWconst, ssa.OpAMD64ORBconst,
3004 ssa.OpAMD64XORQconst, ssa.OpAMD64XORLconst, ssa.OpAMD64XORWconst, ssa.OpAMD64XORBconst,
3005 ssa.OpAMD64SUBQconst, ssa.OpAMD64SUBLconst, ssa.OpAMD64SUBWconst, ssa.OpAMD64SUBBconst,
3006 ssa.OpAMD64SHLQconst, ssa.OpAMD64SHLLconst, ssa.OpAMD64SHLWconst, ssa.OpAMD64SHLBconst,
3007 ssa.OpAMD64SHRQconst, ssa.OpAMD64SHRLconst, ssa.OpAMD64SHRWconst, ssa.OpAMD64SHRBconst,
David Chase40aba8c2015-08-05 22:11:14 -04003008 ssa.OpAMD64SARQconst, ssa.OpAMD64SARLconst, ssa.OpAMD64SARWconst, ssa.OpAMD64SARBconst,
3009 ssa.OpAMD64ROLQconst, ssa.OpAMD64ROLLconst, ssa.OpAMD64ROLWconst, ssa.OpAMD64ROLBconst:
Keith Randall20550cb2015-07-28 16:04:50 -07003010 // This code compensates for the fact that the register allocator
3011 // doesn't understand 2-address instructions yet. TODO: fix that.
Keith Randall247786c2015-05-28 10:47:24 -07003012 x := regnum(v.Args[0])
3013 r := regnum(v)
3014 if x != r {
Keith Randall20550cb2015-07-28 16:04:50 -07003015 p := Prog(regMoveAMD64(v.Type.Size()))
Keith Randall247786c2015-05-28 10:47:24 -07003016 p.From.Type = obj.TYPE_REG
3017 p.From.Reg = x
3018 p.To.Type = obj.TYPE_REG
3019 p.To.Reg = r
Keith Randall247786c2015-05-28 10:47:24 -07003020 }
Michael Matloob703ef062015-06-16 11:11:16 -07003021 p := Prog(v.Op.Asm())
Keith Randall247786c2015-05-28 10:47:24 -07003022 p.From.Type = obj.TYPE_CONST
Keith Randall8f22b522015-06-11 21:29:25 -07003023 p.From.Offset = v.AuxInt
Keith Randall247786c2015-05-28 10:47:24 -07003024 p.To.Type = obj.TYPE_REG
Keith Randalldbd83c42015-06-28 06:08:50 -07003025 p.To.Reg = r
Keith Randall4b803152015-07-29 17:07:09 -07003026 case ssa.OpAMD64SBBQcarrymask, ssa.OpAMD64SBBLcarrymask:
Keith Randall6f188472015-06-10 10:39:57 -07003027 r := regnum(v)
Keith Randall20550cb2015-07-28 16:04:50 -07003028 p := Prog(v.Op.Asm())
Keith Randall6f188472015-06-10 10:39:57 -07003029 p.From.Type = obj.TYPE_REG
3030 p.From.Reg = r
3031 p.To.Type = obj.TYPE_REG
3032 p.To.Reg = r
Todd Neald90e0482015-07-23 20:01:40 -05003033 case ssa.OpAMD64LEAQ1, ssa.OpAMD64LEAQ2, ssa.OpAMD64LEAQ4, ssa.OpAMD64LEAQ8:
Keith Randall247786c2015-05-28 10:47:24 -07003034 p := Prog(x86.ALEAQ)
3035 p.From.Type = obj.TYPE_MEM
3036 p.From.Reg = regnum(v.Args[0])
Todd Neald90e0482015-07-23 20:01:40 -05003037 switch v.Op {
3038 case ssa.OpAMD64LEAQ1:
3039 p.From.Scale = 1
3040 case ssa.OpAMD64LEAQ2:
3041 p.From.Scale = 2
3042 case ssa.OpAMD64LEAQ4:
3043 p.From.Scale = 4
3044 case ssa.OpAMD64LEAQ8:
3045 p.From.Scale = 8
3046 }
Keith Randall247786c2015-05-28 10:47:24 -07003047 p.From.Index = regnum(v.Args[1])
Keith Randall8c46aa52015-06-19 21:02:28 -07003048 addAux(&p.From, v)
3049 p.To.Type = obj.TYPE_REG
3050 p.To.Reg = regnum(v)
3051 case ssa.OpAMD64LEAQ:
3052 p := Prog(x86.ALEAQ)
3053 p.From.Type = obj.TYPE_MEM
3054 p.From.Reg = regnum(v.Args[0])
3055 addAux(&p.From, v)
Keith Randall247786c2015-05-28 10:47:24 -07003056 p.To.Type = obj.TYPE_REG
3057 p.To.Reg = regnum(v)
Keith Randall20550cb2015-07-28 16:04:50 -07003058 case ssa.OpAMD64CMPQ, ssa.OpAMD64CMPL, ssa.OpAMD64CMPW, ssa.OpAMD64CMPB,
3059 ssa.OpAMD64TESTQ, ssa.OpAMD64TESTL, ssa.OpAMD64TESTW, ssa.OpAMD64TESTB:
David Chase8e601b22015-08-18 14:39:26 -04003060 opregreg(v.Op.Asm(), regnum(v.Args[1]), regnum(v.Args[0]))
3061 case ssa.OpAMD64UCOMISS, ssa.OpAMD64UCOMISD:
3062 // Go assembler has swapped operands for UCOMISx relative to CMP,
3063 // must account for that right here.
3064 opregreg(v.Op.Asm(), regnum(v.Args[0]), regnum(v.Args[1]))
Keith Randall20550cb2015-07-28 16:04:50 -07003065 case ssa.OpAMD64CMPQconst, ssa.OpAMD64CMPLconst, ssa.OpAMD64CMPWconst, ssa.OpAMD64CMPBconst,
3066 ssa.OpAMD64TESTQconst, ssa.OpAMD64TESTLconst, ssa.OpAMD64TESTWconst, ssa.OpAMD64TESTBconst:
3067 p := Prog(v.Op.Asm())
Keith Randallcfc2aa52015-05-18 16:44:20 -07003068 p.From.Type = obj.TYPE_REG
3069 p.From.Reg = regnum(v.Args[0])
3070 p.To.Type = obj.TYPE_CONST
Keith Randall8f22b522015-06-11 21:29:25 -07003071 p.To.Offset = v.AuxInt
Keith Randall9cb332e2015-07-28 14:19:20 -07003072 case ssa.OpAMD64MOVBconst, ssa.OpAMD64MOVWconst, ssa.OpAMD64MOVLconst, ssa.OpAMD64MOVQconst:
Keith Randall083a6462015-05-12 11:06:44 -07003073 x := regnum(v)
Keith Randall9cb332e2015-07-28 14:19:20 -07003074 p := Prog(v.Op.Asm())
Keith Randall083a6462015-05-12 11:06:44 -07003075 p.From.Type = obj.TYPE_CONST
Keith Randall9cb332e2015-07-28 14:19:20 -07003076 var i int64
3077 switch v.Op {
3078 case ssa.OpAMD64MOVBconst:
3079 i = int64(int8(v.AuxInt))
3080 case ssa.OpAMD64MOVWconst:
3081 i = int64(int16(v.AuxInt))
3082 case ssa.OpAMD64MOVLconst:
3083 i = int64(int32(v.AuxInt))
3084 case ssa.OpAMD64MOVQconst:
3085 i = v.AuxInt
3086 }
3087 p.From.Offset = i
Keith Randall083a6462015-05-12 11:06:44 -07003088 p.To.Type = obj.TYPE_REG
3089 p.To.Reg = x
David Chase997a9f32015-08-12 16:38:11 -04003090 case ssa.OpAMD64MOVSSconst, ssa.OpAMD64MOVSDconst:
3091 x := regnum(v)
3092 p := Prog(v.Op.Asm())
3093 p.From.Type = obj.TYPE_FCONST
Todd Neal19447a62015-09-04 06:33:56 -05003094 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
David Chase997a9f32015-08-12 16:38:11 -04003095 p.To.Type = obj.TYPE_REG
3096 p.To.Reg = x
3097 case ssa.OpAMD64MOVQload, ssa.OpAMD64MOVSSload, ssa.OpAMD64MOVSDload, ssa.OpAMD64MOVLload, ssa.OpAMD64MOVWload, ssa.OpAMD64MOVBload, ssa.OpAMD64MOVBQSXload, ssa.OpAMD64MOVBQZXload:
Michael Matloob703ef062015-06-16 11:11:16 -07003098 p := Prog(v.Op.Asm())
Keith Randallcfc2aa52015-05-18 16:44:20 -07003099 p.From.Type = obj.TYPE_MEM
Keith Randall247786c2015-05-28 10:47:24 -07003100 p.From.Reg = regnum(v.Args[0])
Keith Randall8c46aa52015-06-19 21:02:28 -07003101 addAux(&p.From, v)
Keith Randallcfc2aa52015-05-18 16:44:20 -07003102 p.To.Type = obj.TYPE_REG
3103 p.To.Reg = regnum(v)
David Chase997a9f32015-08-12 16:38:11 -04003104 case ssa.OpAMD64MOVQloadidx8, ssa.OpAMD64MOVSDloadidx8:
3105 p := Prog(v.Op.Asm())
Keith Randallcfc2aa52015-05-18 16:44:20 -07003106 p.From.Type = obj.TYPE_MEM
Keith Randall247786c2015-05-28 10:47:24 -07003107 p.From.Reg = regnum(v.Args[0])
Keith Randall8c46aa52015-06-19 21:02:28 -07003108 addAux(&p.From, v)
Keith Randallcfc2aa52015-05-18 16:44:20 -07003109 p.From.Scale = 8
3110 p.From.Index = regnum(v.Args[1])
3111 p.To.Type = obj.TYPE_REG
3112 p.To.Reg = regnum(v)
David Chase997a9f32015-08-12 16:38:11 -04003113 case ssa.OpAMD64MOVSSloadidx4:
3114 p := Prog(v.Op.Asm())
3115 p.From.Type = obj.TYPE_MEM
3116 p.From.Reg = regnum(v.Args[0])
3117 addAux(&p.From, v)
3118 p.From.Scale = 4
3119 p.From.Index = regnum(v.Args[1])
3120 p.To.Type = obj.TYPE_REG
3121 p.To.Reg = regnum(v)
3122 case ssa.OpAMD64MOVQstore, ssa.OpAMD64MOVSSstore, ssa.OpAMD64MOVSDstore, ssa.OpAMD64MOVLstore, ssa.OpAMD64MOVWstore, ssa.OpAMD64MOVBstore:
Michael Matloob73054f52015-06-14 11:38:46 -07003123 p := Prog(v.Op.Asm())
Keith Randall083a6462015-05-12 11:06:44 -07003124 p.From.Type = obj.TYPE_REG
Keith Randallcfc2aa52015-05-18 16:44:20 -07003125 p.From.Reg = regnum(v.Args[1])
Keith Randall083a6462015-05-12 11:06:44 -07003126 p.To.Type = obj.TYPE_MEM
Keith Randall247786c2015-05-28 10:47:24 -07003127 p.To.Reg = regnum(v.Args[0])
Keith Randall8c46aa52015-06-19 21:02:28 -07003128 addAux(&p.To, v)
David Chase997a9f32015-08-12 16:38:11 -04003129 case ssa.OpAMD64MOVQstoreidx8, ssa.OpAMD64MOVSDstoreidx8:
3130 p := Prog(v.Op.Asm())
Josh Bleecher Snyder3e3d1622015-07-27 16:36:36 -07003131 p.From.Type = obj.TYPE_REG
3132 p.From.Reg = regnum(v.Args[2])
3133 p.To.Type = obj.TYPE_MEM
3134 p.To.Reg = regnum(v.Args[0])
3135 p.To.Scale = 8
3136 p.To.Index = regnum(v.Args[1])
3137 addAux(&p.To, v)
David Chase997a9f32015-08-12 16:38:11 -04003138 case ssa.OpAMD64MOVSSstoreidx4:
3139 p := Prog(v.Op.Asm())
3140 p.From.Type = obj.TYPE_REG
3141 p.From.Reg = regnum(v.Args[2])
3142 p.To.Type = obj.TYPE_MEM
3143 p.To.Reg = regnum(v.Args[0])
3144 p.To.Scale = 4
3145 p.To.Index = regnum(v.Args[1])
3146 addAux(&p.To, v)
David Chase42825882015-08-20 15:14:20 -04003147 case ssa.OpAMD64MOVLQSX, ssa.OpAMD64MOVWQSX, ssa.OpAMD64MOVBQSX, ssa.OpAMD64MOVLQZX, ssa.OpAMD64MOVWQZX, ssa.OpAMD64MOVBQZX,
3148 ssa.OpAMD64CVTSL2SS, ssa.OpAMD64CVTSL2SD, ssa.OpAMD64CVTSQ2SS, ssa.OpAMD64CVTSQ2SD,
Todd Neal634b50c2015-09-01 19:05:44 -05003149 ssa.OpAMD64CVTTSS2SL, ssa.OpAMD64CVTTSD2SL, ssa.OpAMD64CVTTSS2SQ, ssa.OpAMD64CVTTSD2SQ,
David Chase42825882015-08-20 15:14:20 -04003150 ssa.OpAMD64CVTSS2SD, ssa.OpAMD64CVTSD2SS:
3151 opregreg(v.Op.Asm(), regnum(v), regnum(v.Args[0]))
Daniel Morsing66b47812015-06-27 15:45:20 +01003152 case ssa.OpAMD64MOVXzero:
3153 nb := v.AuxInt
3154 offset := int64(0)
3155 reg := regnum(v.Args[0])
3156 for nb >= 8 {
3157 nb, offset = movZero(x86.AMOVQ, 8, nb, offset, reg)
3158 }
3159 for nb >= 4 {
3160 nb, offset = movZero(x86.AMOVL, 4, nb, offset, reg)
3161 }
3162 for nb >= 2 {
3163 nb, offset = movZero(x86.AMOVW, 2, nb, offset, reg)
3164 }
3165 for nb >= 1 {
3166 nb, offset = movZero(x86.AMOVB, 1, nb, offset, reg)
3167 }
Keith Randallf7f604e2015-05-27 14:52:22 -07003168 case ssa.OpCopy: // TODO: lower to MOVQ earlier?
3169 if v.Type.IsMemory() {
3170 return
3171 }
Keith Randall083a6462015-05-12 11:06:44 -07003172 x := regnum(v.Args[0])
3173 y := regnum(v)
3174 if x != y {
David Chase997a9f32015-08-12 16:38:11 -04003175 opregreg(regMoveByTypeAMD64(v.Type), y, x)
Keith Randall083a6462015-05-12 11:06:44 -07003176 }
Josh Bleecher Snyder0bb2a502015-07-24 14:51:51 -07003177 case ssa.OpLoadReg:
Josh Bleecher Snyder26f135d2015-07-20 15:22:34 -07003178 if v.Type.IsFlags() {
3179 v.Unimplementedf("load flags not implemented: %v", v.LongString())
3180 return
3181 }
David Chase997a9f32015-08-12 16:38:11 -04003182 p := Prog(movSizeByType(v.Type))
Keith Randalld2107fc2015-08-24 02:16:19 -07003183 n := autoVar(v.Args[0])
Keith Randall083a6462015-05-12 11:06:44 -07003184 p.From.Type = obj.TYPE_MEM
Keith Randalld2107fc2015-08-24 02:16:19 -07003185 p.From.Name = obj.NAME_AUTO
3186 p.From.Node = n
3187 p.From.Sym = Linksym(n.Sym)
Keith Randall083a6462015-05-12 11:06:44 -07003188 p.To.Type = obj.TYPE_REG
3189 p.To.Reg = regnum(v)
David Chase997a9f32015-08-12 16:38:11 -04003190
Josh Bleecher Snyder0bb2a502015-07-24 14:51:51 -07003191 case ssa.OpStoreReg:
Josh Bleecher Snyder26f135d2015-07-20 15:22:34 -07003192 if v.Type.IsFlags() {
3193 v.Unimplementedf("store flags not implemented: %v", v.LongString())
3194 return
3195 }
David Chase997a9f32015-08-12 16:38:11 -04003196 p := Prog(movSizeByType(v.Type))
Keith Randall083a6462015-05-12 11:06:44 -07003197 p.From.Type = obj.TYPE_REG
3198 p.From.Reg = regnum(v.Args[0])
Keith Randalld2107fc2015-08-24 02:16:19 -07003199 n := autoVar(v)
Keith Randall083a6462015-05-12 11:06:44 -07003200 p.To.Type = obj.TYPE_MEM
Keith Randalld2107fc2015-08-24 02:16:19 -07003201 p.To.Name = obj.NAME_AUTO
3202 p.To.Node = n
3203 p.To.Sym = Linksym(n.Sym)
Keith Randall083a6462015-05-12 11:06:44 -07003204 case ssa.OpPhi:
Keith Randall0b46b422015-08-11 12:51:33 -07003205 // just check to make sure regalloc and stackalloc did it right
3206 if v.Type.IsMemory() {
3207 return
3208 }
Keith Randall083a6462015-05-12 11:06:44 -07003209 f := v.Block.Func
3210 loc := f.RegAlloc[v.ID]
3211 for _, a := range v.Args {
Josh Bleecher Snyder55845232015-08-05 16:43:49 -07003212 if aloc := f.RegAlloc[a.ID]; aloc != loc { // TODO: .Equal() instead?
3213 v.Fatalf("phi arg at different location than phi: %v @ %v, but arg %v @ %v\n%s\n", v, loc, a, aloc, v.Block.Func)
Keith Randall083a6462015-05-12 11:06:44 -07003214 }
3215 }
David Chase997a9f32015-08-12 16:38:11 -04003216 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64, ssa.OpConstString, ssa.OpConstNil, ssa.OpConstBool,
3217 ssa.OpConst32F, ssa.OpConst64F:
Keith Randall083a6462015-05-12 11:06:44 -07003218 if v.Block.Func.RegAlloc[v.ID] != nil {
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07003219 v.Fatalf("const value %v shouldn't have a location", v)
Keith Randall083a6462015-05-12 11:06:44 -07003220 }
David Chase997a9f32015-08-12 16:38:11 -04003221
Keith Randall083a6462015-05-12 11:06:44 -07003222 case ssa.OpArg:
3223 // memory arg needs no code
Keith Randall8f22b522015-06-11 21:29:25 -07003224 // TODO: check that only mem arg goes here.
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07003225 case ssa.OpAMD64LoweredPanicNilCheck:
3226 if Debug_checknil != 0 && v.Line > 1 { // v.Line==1 in generated wrappers
3227 Warnl(int(v.Line), "generated nil check")
3228 }
3229 // Write to memory address 0. It doesn't matter what we write; use AX.
Keith Randall8d236812015-08-18 15:25:40 -07003230 // Input 0 is the pointer we just checked, use it as the destination.
3231 r := regnum(v.Args[0])
Josh Bleecher Snyder463858e2015-08-11 09:47:45 -07003232 q := Prog(x86.AMOVL)
3233 q.From.Type = obj.TYPE_REG
3234 q.From.Reg = x86.REG_AX
3235 q.To.Type = obj.TYPE_MEM
Keith Randall8d236812015-08-18 15:25:40 -07003236 q.To.Reg = r
Keith Randalld2107fc2015-08-24 02:16:19 -07003237 Prog(obj.AUNDEF) // tell plive.go that we never reach here
Keith Randall8d236812015-08-18 15:25:40 -07003238 case ssa.OpAMD64LoweredPanicIndexCheck:
3239 p := Prog(obj.ACALL)
3240 p.To.Type = obj.TYPE_MEM
3241 p.To.Name = obj.NAME_EXTERN
3242 p.To.Sym = Linksym(Panicindex.Sym)
Keith Randalld2107fc2015-08-24 02:16:19 -07003243 Prog(obj.AUNDEF)
Keith Randall8d236812015-08-18 15:25:40 -07003244 case ssa.OpAMD64LoweredPanicSliceCheck:
3245 p := Prog(obj.ACALL)
3246 p.To.Type = obj.TYPE_MEM
3247 p.To.Name = obj.NAME_EXTERN
3248 p.To.Sym = Linksym(panicslice.Sym)
Keith Randalld2107fc2015-08-24 02:16:19 -07003249 Prog(obj.AUNDEF)
Josh Bleecher Snyder3d23afb2015-08-12 11:22:16 -07003250 case ssa.OpAMD64LoweredGetG:
3251 r := regnum(v)
3252 // See the comments in cmd/internal/obj/x86/obj6.go
3253 // near CanUse1InsnTLS for a detailed explanation of these instructions.
3254 if x86.CanUse1InsnTLS(Ctxt) {
3255 // MOVQ (TLS), r
3256 p := Prog(x86.AMOVQ)
3257 p.From.Type = obj.TYPE_MEM
3258 p.From.Reg = x86.REG_TLS
3259 p.To.Type = obj.TYPE_REG
3260 p.To.Reg = r
3261 } else {
3262 // MOVQ TLS, r
3263 // MOVQ (r)(TLS*1), r
3264 p := Prog(x86.AMOVQ)
3265 p.From.Type = obj.TYPE_REG
3266 p.From.Reg = x86.REG_TLS
3267 p.To.Type = obj.TYPE_REG
3268 p.To.Reg = r
3269 q := Prog(x86.AMOVQ)
3270 q.From.Type = obj.TYPE_MEM
3271 q.From.Reg = r
3272 q.From.Index = x86.REG_TLS
3273 q.From.Scale = 1
3274 q.To.Type = obj.TYPE_REG
3275 q.To.Reg = r
3276 }
Keith Randall290d8fc2015-06-10 15:03:06 -07003277 case ssa.OpAMD64CALLstatic:
Keith Randall247786c2015-05-28 10:47:24 -07003278 p := Prog(obj.ACALL)
3279 p.To.Type = obj.TYPE_MEM
3280 p.To.Name = obj.NAME_EXTERN
3281 p.To.Sym = Linksym(v.Aux.(*Sym))
Keith Randalld2107fc2015-08-24 02:16:19 -07003282 if Maxarg < v.AuxInt {
3283 Maxarg = v.AuxInt
3284 }
Keith Randall290d8fc2015-06-10 15:03:06 -07003285 case ssa.OpAMD64CALLclosure:
3286 p := Prog(obj.ACALL)
3287 p.To.Type = obj.TYPE_REG
3288 p.To.Reg = regnum(v.Args[0])
Keith Randalld2107fc2015-08-24 02:16:19 -07003289 if Maxarg < v.AuxInt {
3290 Maxarg = v.AuxInt
3291 }
Keith Randall9569b952015-08-28 22:51:01 -07003292 case ssa.OpAMD64CALLdefer:
3293 p := Prog(obj.ACALL)
3294 p.To.Type = obj.TYPE_MEM
3295 p.To.Name = obj.NAME_EXTERN
3296 p.To.Sym = Linksym(Deferproc.Sym)
3297 if Maxarg < v.AuxInt {
3298 Maxarg = v.AuxInt
3299 }
3300 // defer returns in rax:
3301 // 0 if we should continue executing
3302 // 1 if we should jump to deferreturn call
3303 p = Prog(x86.ATESTL)
3304 p.From.Type = obj.TYPE_REG
3305 p.From.Reg = x86.REG_AX
3306 p.To.Type = obj.TYPE_REG
3307 p.To.Reg = x86.REG_AX
3308 p = Prog(x86.AJNE)
3309 p.To.Type = obj.TYPE_BRANCH
3310 s.deferBranches = append(s.deferBranches, p)
3311 case ssa.OpAMD64CALLgo:
3312 p := Prog(obj.ACALL)
3313 p.To.Type = obj.TYPE_MEM
3314 p.To.Name = obj.NAME_EXTERN
3315 p.To.Sym = Linksym(Newproc.Sym)
3316 if Maxarg < v.AuxInt {
3317 Maxarg = v.AuxInt
3318 }
Keith Randall4b803152015-07-29 17:07:09 -07003319 case ssa.OpAMD64NEGQ, ssa.OpAMD64NEGL, ssa.OpAMD64NEGW, ssa.OpAMD64NEGB,
3320 ssa.OpAMD64NOTQ, ssa.OpAMD64NOTL, ssa.OpAMD64NOTW, ssa.OpAMD64NOTB:
Josh Bleecher Snyder93c354b62015-07-30 17:15:16 -07003321 x := regnum(v.Args[0])
3322 r := regnum(v)
3323 if x != r {
3324 p := Prog(regMoveAMD64(v.Type.Size()))
3325 p.From.Type = obj.TYPE_REG
3326 p.From.Reg = x
3327 p.To.Type = obj.TYPE_REG
3328 p.To.Reg = r
3329 }
Alexandru Moșoi954d5ad2015-07-21 16:58:18 +02003330 p := Prog(v.Op.Asm())
3331 p.To.Type = obj.TYPE_REG
Josh Bleecher Snyder93c354b62015-07-30 17:15:16 -07003332 p.To.Reg = r
Keith Randalla329e212015-09-12 13:26:57 -07003333 case ssa.OpAMD64SQRTSD:
3334 p := Prog(v.Op.Asm())
3335 p.From.Type = obj.TYPE_REG
3336 p.From.Reg = regnum(v.Args[0])
3337 p.To.Type = obj.TYPE_REG
3338 p.To.Reg = regnum(v)
Keith Randall8c46aa52015-06-19 21:02:28 -07003339 case ssa.OpSP, ssa.OpSB:
Keith Randallcfc2aa52015-05-18 16:44:20 -07003340 // nothing to do
Josh Bleecher Snydera7940742015-07-20 15:21:49 -07003341 case ssa.OpAMD64SETEQ, ssa.OpAMD64SETNE,
3342 ssa.OpAMD64SETL, ssa.OpAMD64SETLE,
3343 ssa.OpAMD64SETG, ssa.OpAMD64SETGE,
David Chase8e601b22015-08-18 14:39:26 -04003344 ssa.OpAMD64SETGF, ssa.OpAMD64SETGEF,
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003345 ssa.OpAMD64SETB, ssa.OpAMD64SETBE,
David Chase8e601b22015-08-18 14:39:26 -04003346 ssa.OpAMD64SETORD, ssa.OpAMD64SETNAN,
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003347 ssa.OpAMD64SETA, ssa.OpAMD64SETAE:
Josh Bleecher Snydera7940742015-07-20 15:21:49 -07003348 p := Prog(v.Op.Asm())
3349 p.To.Type = obj.TYPE_REG
3350 p.To.Reg = regnum(v)
David Chase8e601b22015-08-18 14:39:26 -04003351
3352 case ssa.OpAMD64SETNEF:
3353 p := Prog(v.Op.Asm())
3354 p.To.Type = obj.TYPE_REG
3355 p.To.Reg = regnum(v)
3356 q := Prog(x86.ASETPS)
3357 q.To.Type = obj.TYPE_REG
3358 q.To.Reg = x86.REG_AX
3359 // TODO AORQ copied from old code generator, why not AORB?
3360 opregreg(x86.AORQ, regnum(v), x86.REG_AX)
3361
3362 case ssa.OpAMD64SETEQF:
3363 p := Prog(v.Op.Asm())
3364 p.To.Type = obj.TYPE_REG
3365 p.To.Reg = regnum(v)
3366 q := Prog(x86.ASETPC)
3367 q.To.Type = obj.TYPE_REG
3368 q.To.Reg = x86.REG_AX
3369 // TODO AANDQ copied from old code generator, why not AANDB?
3370 opregreg(x86.AANDQ, regnum(v), x86.REG_AX)
3371
Keith Randall20550cb2015-07-28 16:04:50 -07003372 case ssa.OpAMD64InvertFlags:
3373 v.Fatalf("InvertFlags should never make it to codegen %v", v)
3374 case ssa.OpAMD64REPSTOSQ:
Keith Randall0b46b422015-08-11 12:51:33 -07003375 p := Prog(x86.AXORL) // TODO: lift out zeroing into its own instruction?
3376 p.From.Type = obj.TYPE_REG
3377 p.From.Reg = x86.REG_AX
3378 p.To.Type = obj.TYPE_REG
3379 p.To.Reg = x86.REG_AX
Keith Randall20550cb2015-07-28 16:04:50 -07003380 Prog(x86.AREP)
3381 Prog(x86.ASTOSQ)
Keith Randall20550cb2015-07-28 16:04:50 -07003382 case ssa.OpAMD64REPMOVSB:
3383 Prog(x86.AREP)
3384 Prog(x86.AMOVSB)
Keith Randalld2107fc2015-08-24 02:16:19 -07003385 case ssa.OpVarDef:
3386 Gvardef(v.Aux.(*Node))
3387 case ssa.OpVarKill:
3388 gvarkill(v.Aux.(*Node))
Keith Randall083a6462015-05-12 11:06:44 -07003389 default:
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07003390 v.Unimplementedf("genValue not implemented: %s", v.LongString())
Keith Randall083a6462015-05-12 11:06:44 -07003391 }
3392}
3393
David Chase997a9f32015-08-12 16:38:11 -04003394// movSizeByType returns the MOV instruction of the given type.
3395func movSizeByType(t ssa.Type) (asm int) {
3396 // For x86, there's no difference between reg move opcodes
3397 // and memory move opcodes.
3398 asm = regMoveByTypeAMD64(t)
3399 return
Josh Bleecher Snyder0bb2a502015-07-24 14:51:51 -07003400}
3401
Daniel Morsing66b47812015-06-27 15:45:20 +01003402// movZero generates a register indirect move with a 0 immediate and keeps track of bytes left and next offset
3403func movZero(as int, width int64, nbytes int64, offset int64, regnum int16) (nleft int64, noff int64) {
3404 p := Prog(as)
3405 // TODO: use zero register on archs that support it.
3406 p.From.Type = obj.TYPE_CONST
3407 p.From.Offset = 0
3408 p.To.Type = obj.TYPE_MEM
3409 p.To.Reg = regnum
3410 p.To.Offset = offset
3411 offset += width
3412 nleft = nbytes - width
3413 return nleft, offset
3414}
3415
David Chase8e601b22015-08-18 14:39:26 -04003416var blockJump = [...]struct {
3417 asm, invasm int
3418}{
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003419 ssa.BlockAMD64EQ: {x86.AJEQ, x86.AJNE},
3420 ssa.BlockAMD64NE: {x86.AJNE, x86.AJEQ},
3421 ssa.BlockAMD64LT: {x86.AJLT, x86.AJGE},
3422 ssa.BlockAMD64GE: {x86.AJGE, x86.AJLT},
3423 ssa.BlockAMD64LE: {x86.AJLE, x86.AJGT},
3424 ssa.BlockAMD64GT: {x86.AJGT, x86.AJLE},
3425 ssa.BlockAMD64ULT: {x86.AJCS, x86.AJCC},
3426 ssa.BlockAMD64UGE: {x86.AJCC, x86.AJCS},
3427 ssa.BlockAMD64UGT: {x86.AJHI, x86.AJLS},
3428 ssa.BlockAMD64ULE: {x86.AJLS, x86.AJHI},
David Chase8e601b22015-08-18 14:39:26 -04003429 ssa.BlockAMD64ORD: {x86.AJPC, x86.AJPS},
3430 ssa.BlockAMD64NAN: {x86.AJPS, x86.AJPC},
3431}
3432
3433type floatingEQNEJump struct {
3434 jump, index int
3435}
3436
3437var eqfJumps = [2][2]floatingEQNEJump{
3438 {{x86.AJNE, 1}, {x86.AJPS, 1}}, // next == b.Succs[0]
3439 {{x86.AJNE, 1}, {x86.AJPC, 0}}, // next == b.Succs[1]
3440}
3441var nefJumps = [2][2]floatingEQNEJump{
3442 {{x86.AJNE, 0}, {x86.AJPC, 1}}, // next == b.Succs[0]
3443 {{x86.AJNE, 0}, {x86.AJPS, 0}}, // next == b.Succs[1]
3444}
3445
3446func oneFPJump(b *ssa.Block, jumps *floatingEQNEJump, likely ssa.BranchPrediction, branches []branch) []branch {
3447 p := Prog(jumps.jump)
3448 p.To.Type = obj.TYPE_BRANCH
3449 to := jumps.index
3450 branches = append(branches, branch{p, b.Succs[to]})
3451 if to == 1 {
3452 likely = -likely
3453 }
3454 // liblink reorders the instruction stream as it sees fit.
3455 // Pass along what we know so liblink can make use of it.
3456 // TODO: Once we've fully switched to SSA,
3457 // make liblink leave our output alone.
3458 switch likely {
3459 case ssa.BranchUnlikely:
3460 p.From.Type = obj.TYPE_CONST
3461 p.From.Offset = 0
3462 case ssa.BranchLikely:
3463 p.From.Type = obj.TYPE_CONST
3464 p.From.Offset = 1
3465 }
3466 return branches
3467}
3468
Keith Randall9569b952015-08-28 22:51:01 -07003469func genFPJump(s *genState, b, next *ssa.Block, jumps *[2][2]floatingEQNEJump) {
David Chase8e601b22015-08-18 14:39:26 -04003470 likely := b.Likely
3471 switch next {
3472 case b.Succs[0]:
Keith Randall9569b952015-08-28 22:51:01 -07003473 s.branches = oneFPJump(b, &jumps[0][0], likely, s.branches)
3474 s.branches = oneFPJump(b, &jumps[0][1], likely, s.branches)
David Chase8e601b22015-08-18 14:39:26 -04003475 case b.Succs[1]:
Keith Randall9569b952015-08-28 22:51:01 -07003476 s.branches = oneFPJump(b, &jumps[1][0], likely, s.branches)
3477 s.branches = oneFPJump(b, &jumps[1][1], likely, s.branches)
David Chase8e601b22015-08-18 14:39:26 -04003478 default:
Keith Randall9569b952015-08-28 22:51:01 -07003479 s.branches = oneFPJump(b, &jumps[1][0], likely, s.branches)
3480 s.branches = oneFPJump(b, &jumps[1][1], likely, s.branches)
David Chase8e601b22015-08-18 14:39:26 -04003481 q := Prog(obj.AJMP)
3482 q.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07003483 s.branches = append(s.branches, branch{q, b.Succs[1]})
David Chase8e601b22015-08-18 14:39:26 -04003484 }
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003485}
3486
Keith Randall9569b952015-08-28 22:51:01 -07003487func (s *genState) genBlock(b, next *ssa.Block) {
Michael Matloob81ccf502015-05-30 01:03:06 -04003488 lineno = b.Line
Keith Randall8d236812015-08-18 15:25:40 -07003489
Keith Randall083a6462015-05-12 11:06:44 -07003490 switch b.Kind {
Keith Randallf5c53e02015-09-09 18:03:41 -07003491 case ssa.BlockPlain, ssa.BlockCall:
Keith Randall083a6462015-05-12 11:06:44 -07003492 if b.Succs[0] != next {
3493 p := Prog(obj.AJMP)
3494 p.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07003495 s.branches = append(s.branches, branch{p, b.Succs[0]})
Keith Randall083a6462015-05-12 11:06:44 -07003496 }
3497 case ssa.BlockExit:
Keith Randall10f38f52015-09-03 09:09:59 -07003498 case ssa.BlockRet:
Keith Randall0ec72b62015-09-08 15:42:53 -07003499 if hasdefer {
Keith Randallca9e4502015-09-08 08:59:57 -07003500 s.deferReturn()
Keith Randall9569b952015-08-28 22:51:01 -07003501 }
Keith Randall083a6462015-05-12 11:06:44 -07003502 Prog(obj.ARET)
Keith Randall8a1f6212015-09-08 21:28:44 -07003503 case ssa.BlockRetJmp:
3504 p := Prog(obj.AJMP)
3505 p.To.Type = obj.TYPE_MEM
3506 p.To.Name = obj.NAME_EXTERN
3507 p.To.Sym = Linksym(b.Aux.(*Sym))
David Chase8e601b22015-08-18 14:39:26 -04003508
3509 case ssa.BlockAMD64EQF:
Keith Randall9569b952015-08-28 22:51:01 -07003510 genFPJump(s, b, next, &eqfJumps)
David Chase8e601b22015-08-18 14:39:26 -04003511
3512 case ssa.BlockAMD64NEF:
Keith Randall9569b952015-08-28 22:51:01 -07003513 genFPJump(s, b, next, &nefJumps)
David Chase8e601b22015-08-18 14:39:26 -04003514
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003515 case ssa.BlockAMD64EQ, ssa.BlockAMD64NE,
3516 ssa.BlockAMD64LT, ssa.BlockAMD64GE,
3517 ssa.BlockAMD64LE, ssa.BlockAMD64GT,
3518 ssa.BlockAMD64ULT, ssa.BlockAMD64UGT,
3519 ssa.BlockAMD64ULE, ssa.BlockAMD64UGE:
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003520 jmp := blockJump[b.Kind]
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07003521 likely := b.Likely
3522 var p *obj.Prog
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003523 switch next {
3524 case b.Succs[0]:
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07003525 p = Prog(jmp.invasm)
3526 likely *= -1
Keith Randallcfc2aa52015-05-18 16:44:20 -07003527 p.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07003528 s.branches = append(s.branches, branch{p, b.Succs[1]})
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003529 case b.Succs[1]:
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07003530 p = Prog(jmp.asm)
Keith Randallcfc2aa52015-05-18 16:44:20 -07003531 p.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07003532 s.branches = append(s.branches, branch{p, b.Succs[0]})
Josh Bleecher Snyder71b57072015-07-24 12:47:00 -07003533 default:
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07003534 p = Prog(jmp.asm)
Keith Randallcfc2aa52015-05-18 16:44:20 -07003535 p.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07003536 s.branches = append(s.branches, branch{p, b.Succs[0]})
Keith Randallcfc2aa52015-05-18 16:44:20 -07003537 q := Prog(obj.AJMP)
3538 q.To.Type = obj.TYPE_BRANCH
Keith Randall9569b952015-08-28 22:51:01 -07003539 s.branches = append(s.branches, branch{q, b.Succs[1]})
Keith Randallcfc2aa52015-05-18 16:44:20 -07003540 }
3541
Josh Bleecher Snyderbbf8c5c2015-08-11 17:28:56 -07003542 // liblink reorders the instruction stream as it sees fit.
3543 // Pass along what we know so liblink can make use of it.
3544 // TODO: Once we've fully switched to SSA,
3545 // make liblink leave our output alone.
3546 switch likely {
3547 case ssa.BranchUnlikely:
3548 p.From.Type = obj.TYPE_CONST
3549 p.From.Offset = 0
3550 case ssa.BranchLikely:
3551 p.From.Type = obj.TYPE_CONST
3552 p.From.Offset = 1
3553 }
3554
Keith Randall083a6462015-05-12 11:06:44 -07003555 default:
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07003556 b.Unimplementedf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString())
Keith Randall083a6462015-05-12 11:06:44 -07003557 }
Keith Randall083a6462015-05-12 11:06:44 -07003558}
3559
Keith Randallca9e4502015-09-08 08:59:57 -07003560func (s *genState) deferReturn() {
3561 // Deferred calls will appear to be returning to
3562 // the CALL deferreturn(SB) that we are about to emit.
3563 // However, the stack trace code will show the line
3564 // of the instruction byte before the return PC.
3565 // To avoid that being an unrelated instruction,
3566 // insert an actual hardware NOP that will have the right line number.
3567 // This is different from obj.ANOP, which is a virtual no-op
3568 // that doesn't make it into the instruction stream.
3569 s.deferTarget = Pc
3570 Thearch.Ginsnop()
3571 p := Prog(obj.ACALL)
3572 p.To.Type = obj.TYPE_MEM
3573 p.To.Name = obj.NAME_EXTERN
3574 p.To.Sym = Linksym(Deferreturn.Sym)
3575}
3576
Keith Randall8c46aa52015-06-19 21:02:28 -07003577// addAux adds the offset in the aux fields (AuxInt and Aux) of v to a.
3578func addAux(a *obj.Addr, v *ssa.Value) {
3579 if a.Type != obj.TYPE_MEM {
3580 v.Fatalf("bad addAux addr %s", a)
3581 }
3582 // add integer offset
3583 a.Offset += v.AuxInt
3584
3585 // If no additional symbol offset, we're done.
3586 if v.Aux == nil {
3587 return
3588 }
3589 // Add symbol's offset from its base register.
3590 switch sym := v.Aux.(type) {
3591 case *ssa.ExternSymbol:
3592 a.Name = obj.NAME_EXTERN
3593 a.Sym = Linksym(sym.Sym.(*Sym))
3594 case *ssa.ArgSymbol:
Keith Randalld2107fc2015-08-24 02:16:19 -07003595 n := sym.Node.(*Node)
3596 a.Name = obj.NAME_PARAM
3597 a.Node = n
3598 a.Sym = Linksym(n.Orig.Sym)
3599 a.Offset += n.Xoffset // TODO: why do I have to add this here? I don't for auto variables.
Keith Randall8c46aa52015-06-19 21:02:28 -07003600 case *ssa.AutoSymbol:
Keith Randalld2107fc2015-08-24 02:16:19 -07003601 n := sym.Node.(*Node)
3602 a.Name = obj.NAME_AUTO
3603 a.Node = n
3604 a.Sym = Linksym(n.Sym)
Keith Randall8c46aa52015-06-19 21:02:28 -07003605 default:
3606 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
3607 }
3608}
3609
Keith Randall2a5e6c42015-07-23 14:35:02 -07003610// extendIndex extends v to a full pointer width.
3611func (s *state) extendIndex(v *ssa.Value) *ssa.Value {
3612 size := v.Type.Size()
3613 if size == s.config.PtrSize {
3614 return v
3615 }
3616 if size > s.config.PtrSize {
3617 // TODO: truncate 64-bit indexes on 32-bit pointer archs. We'd need to test
3618 // the high word and branch to out-of-bounds failure if it is not 0.
3619 s.Unimplementedf("64->32 index truncation not implemented")
3620 return v
3621 }
3622
3623 // Extend value to the required size
3624 var op ssa.Op
3625 if v.Type.IsSigned() {
3626 switch 10*size + s.config.PtrSize {
3627 case 14:
3628 op = ssa.OpSignExt8to32
3629 case 18:
3630 op = ssa.OpSignExt8to64
3631 case 24:
3632 op = ssa.OpSignExt16to32
3633 case 28:
3634 op = ssa.OpSignExt16to64
3635 case 48:
3636 op = ssa.OpSignExt32to64
3637 default:
3638 s.Fatalf("bad signed index extension %s", v.Type)
3639 }
3640 } else {
3641 switch 10*size + s.config.PtrSize {
3642 case 14:
3643 op = ssa.OpZeroExt8to32
3644 case 18:
3645 op = ssa.OpZeroExt8to64
3646 case 24:
3647 op = ssa.OpZeroExt16to32
3648 case 28:
3649 op = ssa.OpZeroExt16to64
3650 case 48:
3651 op = ssa.OpZeroExt32to64
3652 default:
3653 s.Fatalf("bad unsigned index extension %s", v.Type)
3654 }
3655 }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07003656 return s.newValue1(op, Types[TUINTPTR], v)
Keith Randall2a5e6c42015-07-23 14:35:02 -07003657}
3658
Keith Randall083a6462015-05-12 11:06:44 -07003659// ssaRegToReg maps ssa register numbers to obj register numbers.
3660var ssaRegToReg = [...]int16{
3661 x86.REG_AX,
3662 x86.REG_CX,
3663 x86.REG_DX,
3664 x86.REG_BX,
3665 x86.REG_SP,
3666 x86.REG_BP,
3667 x86.REG_SI,
3668 x86.REG_DI,
3669 x86.REG_R8,
3670 x86.REG_R9,
3671 x86.REG_R10,
3672 x86.REG_R11,
3673 x86.REG_R12,
3674 x86.REG_R13,
3675 x86.REG_R14,
3676 x86.REG_R15,
Keith Randall8c46aa52015-06-19 21:02:28 -07003677 x86.REG_X0,
3678 x86.REG_X1,
3679 x86.REG_X2,
3680 x86.REG_X3,
3681 x86.REG_X4,
3682 x86.REG_X5,
3683 x86.REG_X6,
3684 x86.REG_X7,
3685 x86.REG_X8,
3686 x86.REG_X9,
3687 x86.REG_X10,
3688 x86.REG_X11,
3689 x86.REG_X12,
3690 x86.REG_X13,
3691 x86.REG_X14,
3692 x86.REG_X15,
3693 0, // SB isn't a real register. We fill an Addr.Reg field with 0 in this case.
Keith Randall083a6462015-05-12 11:06:44 -07003694 // TODO: arch-dependent
3695}
3696
Keith Randall9cb332e2015-07-28 14:19:20 -07003697// regMoveAMD64 returns the register->register move opcode for the given width.
3698// TODO: generalize for all architectures?
3699func regMoveAMD64(width int64) int {
3700 switch width {
3701 case 1:
3702 return x86.AMOVB
3703 case 2:
3704 return x86.AMOVW
3705 case 4:
3706 return x86.AMOVL
3707 case 8:
3708 return x86.AMOVQ
3709 default:
David Chase997a9f32015-08-12 16:38:11 -04003710 panic("bad int register width")
Keith Randall9cb332e2015-07-28 14:19:20 -07003711 }
3712}
3713
David Chase997a9f32015-08-12 16:38:11 -04003714func regMoveByTypeAMD64(t ssa.Type) int {
3715 width := t.Size()
3716 if t.IsFloat() {
3717 switch width {
3718 case 4:
3719 return x86.AMOVSS
3720 case 8:
3721 return x86.AMOVSD
3722 default:
3723 panic("bad float register width")
3724 }
3725 } else {
3726 switch width {
3727 case 1:
3728 return x86.AMOVB
3729 case 2:
3730 return x86.AMOVW
3731 case 4:
3732 return x86.AMOVL
3733 case 8:
3734 return x86.AMOVQ
3735 default:
3736 panic("bad int register width")
3737 }
3738 }
3739
3740 panic("bad register type")
3741}
3742
Keith Randall083a6462015-05-12 11:06:44 -07003743// regnum returns the register (in cmd/internal/obj numbering) to
3744// which v has been allocated. Panics if v is not assigned to a
3745// register.
Josh Bleecher Snydere1395492015-08-05 16:06:39 -07003746// TODO: Make this panic again once it stops happening routinely.
Keith Randall083a6462015-05-12 11:06:44 -07003747func regnum(v *ssa.Value) int16 {
Josh Bleecher Snydere1395492015-08-05 16:06:39 -07003748 reg := v.Block.Func.RegAlloc[v.ID]
3749 if reg == nil {
3750 v.Unimplementedf("nil regnum for value: %s\n%s\n", v.LongString(), v.Block.Func)
3751 return 0
3752 }
3753 return ssaRegToReg[reg.(*ssa.Register).Num]
Keith Randall083a6462015-05-12 11:06:44 -07003754}
3755
Keith Randalld2107fc2015-08-24 02:16:19 -07003756// autoVar returns a *Node representing the auto variable assigned to v.
3757func autoVar(v *ssa.Value) *Node {
3758 return v.Block.Func.RegAlloc[v.ID].(*ssa.LocalSlot).N.(*Node)
Keith Randall083a6462015-05-12 11:06:44 -07003759}
Keith Randallf7f604e2015-05-27 14:52:22 -07003760
3761// ssaExport exports a bunch of compiler services for the ssa backend.
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003762type ssaExport struct {
3763 log bool
3764 unimplemented bool
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07003765 mustImplement bool
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003766}
Keith Randallf7f604e2015-05-27 14:52:22 -07003767
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07003768func (s *ssaExport) TypeBool() ssa.Type { return Types[TBOOL] }
3769func (s *ssaExport) TypeInt8() ssa.Type { return Types[TINT8] }
3770func (s *ssaExport) TypeInt16() ssa.Type { return Types[TINT16] }
3771func (s *ssaExport) TypeInt32() ssa.Type { return Types[TINT32] }
3772func (s *ssaExport) TypeInt64() ssa.Type { return Types[TINT64] }
3773func (s *ssaExport) TypeUInt8() ssa.Type { return Types[TUINT8] }
3774func (s *ssaExport) TypeUInt16() ssa.Type { return Types[TUINT16] }
3775func (s *ssaExport) TypeUInt32() ssa.Type { return Types[TUINT32] }
3776func (s *ssaExport) TypeUInt64() ssa.Type { return Types[TUINT64] }
David Chase52578582015-08-28 14:24:10 -04003777func (s *ssaExport) TypeFloat32() ssa.Type { return Types[TFLOAT32] }
3778func (s *ssaExport) TypeFloat64() ssa.Type { return Types[TFLOAT64] }
Josh Bleecher Snyder85e03292015-07-30 11:03:05 -07003779func (s *ssaExport) TypeInt() ssa.Type { return Types[TINT] }
3780func (s *ssaExport) TypeUintptr() ssa.Type { return Types[TUINTPTR] }
3781func (s *ssaExport) TypeString() ssa.Type { return Types[TSTRING] }
3782func (s *ssaExport) TypeBytePtr() ssa.Type { return Ptrto(Types[TUINT8]) }
3783
Josh Bleecher Snyder8d31df18a2015-07-24 11:28:12 -07003784// StringData returns a symbol (a *Sym wrapped in an interface) which
3785// is the data component of a global string constant containing s.
3786func (*ssaExport) StringData(s string) interface{} {
Keith Randall8c46aa52015-06-19 21:02:28 -07003787 // TODO: is idealstring correct? It might not matter...
Josh Bleecher Snyder8d31df18a2015-07-24 11:28:12 -07003788 _, data := stringsym(s)
3789 return &ssa.ExternSymbol{Typ: idealstring, Sym: data}
Keith Randallf7f604e2015-05-27 14:52:22 -07003790}
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003791
Keith Randalld2107fc2015-08-24 02:16:19 -07003792func (e *ssaExport) Auto(t ssa.Type) fmt.Stringer {
3793 n := temp(t.(*Type)) // Note: adds new auto to Curfn.Func.Dcl list
3794 e.mustImplement = true // This modifies the input to SSA, so we want to make sure we succeed from here!
3795 return n
3796}
3797
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003798// Log logs a message from the compiler.
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07003799func (e *ssaExport) Logf(msg string, args ...interface{}) {
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003800 // If e was marked as unimplemented, anything could happen. Ignore.
3801 if e.log && !e.unimplemented {
3802 fmt.Printf(msg, args...)
3803 }
3804}
3805
3806// Fatal reports a compiler error and exits.
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07003807func (e *ssaExport) Fatalf(msg string, args ...interface{}) {
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003808 // If e was marked as unimplemented, anything could happen. Ignore.
3809 if !e.unimplemented {
Keith Randall0ec72b62015-09-08 15:42:53 -07003810 Fatalf(msg, args...)
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003811 }
3812}
3813
3814// Unimplemented reports that the function cannot be compiled.
3815// It will be removed once SSA work is complete.
Josh Bleecher Snyder37ddc272015-06-24 14:03:39 -07003816func (e *ssaExport) Unimplementedf(msg string, args ...interface{}) {
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07003817 if e.mustImplement {
Keith Randall0ec72b62015-09-08 15:42:53 -07003818 Fatalf(msg, args...)
Josh Bleecher Snyderd2982092015-07-22 13:13:53 -07003819 }
Josh Bleecher Snyder8c6abfe2015-06-12 11:01:13 -07003820 const alwaysLog = false // enable to calculate top unimplemented features
3821 if !e.unimplemented && (e.log || alwaysLog) {
3822 // first implementation failure, print explanation
3823 fmt.Printf("SSA unimplemented: "+msg+"\n", args...)
3824 }
3825 e.unimplemented = true
3826}