Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1 | // Copyright 2015 The Go Authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style |
| 3 | // license that can be found in the LICENSE file. |
| 4 | |
| 5 | package gc |
| 6 | |
| 7 | import ( |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 8 | "bytes" |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 9 | "fmt" |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 10 | "html" |
Todd Neal | 19447a6 | 2015-09-04 06:33:56 -0500 | [diff] [blame] | 11 | "math" |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 12 | "os" |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 13 | "strings" |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 14 | |
Keith Randall | 067e8df | 2015-05-28 13:49:20 -0700 | [diff] [blame] | 15 | "cmd/compile/internal/ssa" |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 16 | "cmd/internal/obj" |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 17 | "cmd/internal/obj/x86" |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 18 | ) |
| 19 | |
Keith Randall | c0740fe | 2016-03-03 22:06:57 -0800 | [diff] [blame] | 20 | var ssaEnabled = true |
| 21 | |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 22 | // Smallest possible faulting page at address zero. |
| 23 | const minZeroPage = 4096 |
| 24 | |
Keith Randall | 2f57d0f | 2016-01-28 13:46:30 -0800 | [diff] [blame] | 25 | var ssaConfig *ssa.Config |
| 26 | var ssaExp ssaExport |
| 27 | |
David Chase | 378a863 | 2016-02-25 13:10:51 -0500 | [diff] [blame] | 28 | func initssa() *ssa.Config { |
| 29 | ssaExp.unimplemented = false |
| 30 | ssaExp.mustImplement = true |
| 31 | if ssaConfig == nil { |
| 32 | ssaConfig = ssa.NewConfig(Thearch.Thestring, &ssaExp, Ctxt, Debug['N'] == 0) |
| 33 | } |
| 34 | return ssaConfig |
| 35 | } |
| 36 | |
Keith Randall | 5b355a7 | 2015-12-11 20:41:52 -0800 | [diff] [blame] | 37 | func shouldssa(fn *Node) bool { |
| 38 | if Thearch.Thestring != "amd64" { |
| 39 | return false |
| 40 | } |
Keith Randall | c0740fe | 2016-03-03 22:06:57 -0800 | [diff] [blame] | 41 | if !ssaEnabled { |
| 42 | return false |
| 43 | } |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 44 | |
David Chase | e99dd52 | 2015-10-19 11:36:07 -0400 | [diff] [blame] | 45 | // Environment variable control of SSA CG |
| 46 | // 1. IF GOSSAFUNC == current function name THEN |
| 47 | // compile this function with SSA and log output to ssa.html |
| 48 | |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 49 | // 2. IF GOSSAHASH == "" THEN |
David Chase | e99dd52 | 2015-10-19 11:36:07 -0400 | [diff] [blame] | 50 | // compile this function (and everything else) with SSA |
| 51 | |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 52 | // 3. IF GOSSAHASH == "n" or "N" |
David Chase | e99dd52 | 2015-10-19 11:36:07 -0400 | [diff] [blame] | 53 | // IF GOSSAPKG == current package name THEN |
| 54 | // compile this function (and everything in this package) with SSA |
| 55 | // ELSE |
| 56 | // use the old back end for this function. |
| 57 | // This is for compatibility with existing test harness and should go away. |
| 58 | |
| 59 | // 4. IF GOSSAHASH is a suffix of the binary-rendered SHA1 hash of the function name THEN |
| 60 | // compile this function with SSA |
| 61 | // ELSE |
| 62 | // compile this function with the old back end. |
| 63 | |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 64 | // Plan is for 3 to be removed when the tests are revised. |
| 65 | // SSA is now default, and is disabled by setting |
| 66 | // GOSSAHASH to n or N, or selectively with strings of |
| 67 | // 0 and 1. |
David Chase | e99dd52 | 2015-10-19 11:36:07 -0400 | [diff] [blame] | 68 | |
Keith Randall | 5b355a7 | 2015-12-11 20:41:52 -0800 | [diff] [blame] | 69 | name := fn.Func.Nname.Sym.Name |
| 70 | |
| 71 | funcname := os.Getenv("GOSSAFUNC") |
| 72 | if funcname != "" { |
| 73 | // If GOSSAFUNC is set, compile only that function. |
| 74 | return name == funcname |
| 75 | } |
| 76 | |
| 77 | pkg := os.Getenv("GOSSAPKG") |
| 78 | if pkg != "" { |
| 79 | // If GOSSAPKG is set, compile only that package. |
| 80 | return localpkg.Name == pkg |
| 81 | } |
| 82 | |
David Chase | 378a863 | 2016-02-25 13:10:51 -0500 | [diff] [blame] | 83 | return initssa().DebugHashMatch("GOSSAHASH", name) |
Keith Randall | 5b355a7 | 2015-12-11 20:41:52 -0800 | [diff] [blame] | 84 | } |
| 85 | |
| 86 | // buildssa builds an SSA function. |
| 87 | func buildssa(fn *Node) *ssa.Func { |
| 88 | name := fn.Func.Nname.Sym.Name |
Keith Randall | 5968180 | 2016-03-01 13:47:48 -0800 | [diff] [blame] | 89 | printssa := name == os.Getenv("GOSSAFUNC") |
Keith Randall | 5b355a7 | 2015-12-11 20:41:52 -0800 | [diff] [blame] | 90 | if printssa { |
Josh Bleecher Snyder | e0ac5c5 | 2015-07-20 18:42:45 -0700 | [diff] [blame] | 91 | fmt.Println("generating SSA for", name) |
Ian Lance Taylor | 55c65d4 | 2016-03-04 13:16:48 -0800 | [diff] [blame] | 92 | dumplist("buildssa-enter", fn.Func.Enter) |
| 93 | dumplist("buildssa-body", fn.Nbody) |
| 94 | dumplist("buildssa-exit", fn.Func.Exit) |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 95 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 96 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 97 | var s state |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 98 | s.pushLine(fn.Lineno) |
| 99 | defer s.popLine() |
| 100 | |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 101 | if fn.Func.Pragma&CgoUnsafeArgs != 0 { |
| 102 | s.cgoUnsafeArgs = true |
| 103 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 104 | // TODO(khr): build config just once at the start of the compiler binary |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 105 | |
Keith Randall | 2f57d0f | 2016-01-28 13:46:30 -0800 | [diff] [blame] | 106 | ssaExp.log = printssa |
David Chase | 378a863 | 2016-02-25 13:10:51 -0500 | [diff] [blame] | 107 | |
| 108 | s.config = initssa() |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 109 | s.f = s.config.NewFunc() |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 110 | s.f.Name = name |
David Chase | 8824dcc | 2015-10-08 12:39:56 -0400 | [diff] [blame] | 111 | s.exitCode = fn.Func.Exit |
Keith Randall | 74e568f | 2015-11-09 21:35:40 -0800 | [diff] [blame] | 112 | s.panics = map[funcLine]*ssa.Block{} |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 113 | |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 114 | if name == os.Getenv("GOSSAFUNC") { |
| 115 | // TODO: tempfile? it is handy to have the location |
| 116 | // of this file be stable, so you can just reload in the browser. |
Keith Randall | da8af47 | 2016-01-13 11:14:57 -0800 | [diff] [blame] | 117 | s.config.HTML = ssa.NewHTMLWriter("ssa.html", s.config, name) |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 118 | // TODO: generate and print a mapping from nodes to values and blocks |
| 119 | } |
| 120 | defer func() { |
Keith Randall | 5b355a7 | 2015-12-11 20:41:52 -0800 | [diff] [blame] | 121 | if !printssa { |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 122 | s.config.HTML.Close() |
| 123 | } |
| 124 | }() |
| 125 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 126 | // Allocate starting block |
| 127 | s.f.Entry = s.f.NewBlock(ssa.BlockPlain) |
| 128 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 129 | // Allocate starting values |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 130 | s.labels = map[string]*ssaLabel{} |
| 131 | s.labeledNodes = map[*Node]*ssaLabel{} |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 132 | s.startmem = s.entryNewValue0(ssa.OpInitMem, ssa.TypeMem) |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 133 | s.sp = s.entryNewValue0(ssa.OpSP, Types[TUINTPTR]) // TODO: use generic pointer type (unsafe.Pointer?) instead |
| 134 | s.sb = s.entryNewValue0(ssa.OpSB, Types[TUINTPTR]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 135 | |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 136 | s.startBlock(s.f.Entry) |
| 137 | s.vars[&memVar] = s.startmem |
| 138 | |
Todd Neal | d076ef7 | 2015-10-15 20:25:32 -0500 | [diff] [blame] | 139 | s.varsyms = map[*Node]interface{}{} |
| 140 | |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 141 | // Generate addresses of local declarations |
| 142 | s.decladdrs = map[*Node]*ssa.Value{} |
Keith Randall | 4fffd456 | 2016-02-29 13:31:48 -0800 | [diff] [blame] | 143 | for _, n := range fn.Func.Dcl { |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 144 | switch n.Class { |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 145 | case PPARAM, PPARAMOUT: |
Todd Neal | d076ef7 | 2015-10-15 20:25:32 -0500 | [diff] [blame] | 146 | aux := s.lookupSymbol(n, &ssa.ArgSymbol{Typ: n.Type, Node: n}) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 147 | s.decladdrs[n] = s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp) |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 148 | if n.Class == PPARAMOUT && s.canSSA(n) { |
| 149 | // Save ssa-able PPARAMOUT variables so we can |
| 150 | // store them back to the stack at the end of |
| 151 | // the function. |
| 152 | s.returns = append(s.returns, n) |
| 153 | } |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 154 | case PAUTO | PHEAP: |
| 155 | // TODO this looks wrong for PAUTO|PHEAP, no vardef, but also no definition |
Todd Neal | d076ef7 | 2015-10-15 20:25:32 -0500 | [diff] [blame] | 156 | aux := s.lookupSymbol(n, &ssa.AutoSymbol{Typ: n.Type, Node: n}) |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 157 | s.decladdrs[n] = s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp) |
David Chase | 8824dcc | 2015-10-08 12:39:56 -0400 | [diff] [blame] | 158 | case PPARAM | PHEAP, PPARAMOUT | PHEAP: |
| 159 | // This ends up wrong, have to do it at the PARAM node instead. |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 160 | case PAUTO: |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 161 | // processed at each use, to prevent Addr coming |
| 162 | // before the decl. |
Keith Randall | c3eb1a7 | 2015-09-06 13:42:26 -0700 | [diff] [blame] | 163 | case PFUNC: |
| 164 | // local function - already handled by frontend |
Daniel Morsing | be2a3e2 | 2015-07-01 20:37:25 +0100 | [diff] [blame] | 165 | default: |
| 166 | str := "" |
| 167 | if n.Class&PHEAP != 0 { |
| 168 | str = ",heap" |
| 169 | } |
Josh Bleecher Snyder | 5844603 | 2015-08-23 20:29:43 -0700 | [diff] [blame] | 170 | s.Unimplementedf("local variable with class %s%s unimplemented", classnames[n.Class&^PHEAP], str) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 171 | } |
| 172 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 173 | |
| 174 | // Convert the AST-based IR to the SSA-based IR |
Keith Randall | 4fffd456 | 2016-02-29 13:31:48 -0800 | [diff] [blame] | 175 | s.stmts(fn.Func.Enter) |
Keith Randall | 9d854fd | 2016-03-01 12:50:17 -0800 | [diff] [blame] | 176 | s.stmts(fn.Nbody) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 177 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 178 | // fallthrough to exit |
Keith Randall | a7cfc759 | 2015-09-08 16:04:37 -0700 | [diff] [blame] | 179 | if s.curBlock != nil { |
Keith Randall | ddc6b64 | 2016-03-09 19:27:57 -0800 | [diff] [blame] | 180 | s.pushLine(fn.Func.Endlineno) |
| 181 | s.exit() |
| 182 | s.popLine() |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 183 | } |
| 184 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 185 | // Check that we used all labels |
| 186 | for name, lab := range s.labels { |
| 187 | if !lab.used() && !lab.reported { |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 188 | yyerrorl(lab.defNode.Lineno, "label %v defined and not used", name) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 189 | lab.reported = true |
| 190 | } |
| 191 | if lab.used() && !lab.defined() && !lab.reported { |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 192 | yyerrorl(lab.useNode.Lineno, "label %v not defined", name) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 193 | lab.reported = true |
| 194 | } |
| 195 | } |
| 196 | |
| 197 | // Check any forward gotos. Non-forward gotos have already been checked. |
| 198 | for _, n := range s.fwdGotos { |
| 199 | lab := s.labels[n.Left.Sym.Name] |
| 200 | // If the label is undefined, we have already have printed an error. |
| 201 | if lab.defined() { |
| 202 | s.checkgoto(n, lab.defNode) |
| 203 | } |
| 204 | } |
| 205 | |
| 206 | if nerrors > 0 { |
Keith Randall | 4c5459d | 2016-01-28 16:11:56 -0800 | [diff] [blame] | 207 | s.f.Free() |
Keith Randall | 5b355a7 | 2015-12-11 20:41:52 -0800 | [diff] [blame] | 208 | return nil |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 209 | } |
| 210 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 211 | // Link up variable uses to variable definitions |
| 212 | s.linkForwardReferences() |
| 213 | |
David Chase | 8824dcc | 2015-10-08 12:39:56 -0400 | [diff] [blame] | 214 | // Don't carry reference this around longer than necessary |
Keith Randall | 4fffd456 | 2016-02-29 13:31:48 -0800 | [diff] [blame] | 215 | s.exitCode = Nodes{} |
David Chase | 8824dcc | 2015-10-08 12:39:56 -0400 | [diff] [blame] | 216 | |
Josh Bleecher Snyder | 983bc8d | 2015-07-17 16:47:43 +0000 | [diff] [blame] | 217 | // Main call to ssa package to compile function |
| 218 | ssa.Compile(s.f) |
| 219 | |
Keith Randall | 5b355a7 | 2015-12-11 20:41:52 -0800 | [diff] [blame] | 220 | return s.f |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 221 | } |
| 222 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 223 | type state struct { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 224 | // configuration (arch) information |
| 225 | config *ssa.Config |
| 226 | |
| 227 | // function we're building |
| 228 | f *ssa.Func |
| 229 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 230 | // labels and labeled control flow nodes (OFOR, OSWITCH, OSELECT) in f |
| 231 | labels map[string]*ssaLabel |
| 232 | labeledNodes map[*Node]*ssaLabel |
| 233 | |
| 234 | // gotos that jump forward; required for deferred checkgoto calls |
| 235 | fwdGotos []*Node |
David Chase | 8824dcc | 2015-10-08 12:39:56 -0400 | [diff] [blame] | 236 | // Code that must precede any return |
| 237 | // (e.g., copying value of heap-escaped paramout back to true paramout) |
Keith Randall | 4fffd456 | 2016-02-29 13:31:48 -0800 | [diff] [blame] | 238 | exitCode Nodes |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 239 | |
| 240 | // unlabeled break and continue statement tracking |
| 241 | breakTo *ssa.Block // current target for plain break statement |
| 242 | continueTo *ssa.Block // current target for plain continue statement |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 243 | |
| 244 | // current location where we're interpreting the AST |
| 245 | curBlock *ssa.Block |
| 246 | |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 247 | // variable assignments in the current block (map from variable symbol to ssa value) |
| 248 | // *Node is the unique identifier (an ONAME Node) for the variable. |
| 249 | vars map[*Node]*ssa.Value |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 250 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 251 | // all defined variables at the end of each block. Indexed by block ID. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 252 | defvars []map[*Node]*ssa.Value |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 253 | |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 254 | // addresses of PPARAM and PPARAMOUT variables. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 255 | decladdrs map[*Node]*ssa.Value |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 256 | |
Todd Neal | d076ef7 | 2015-10-15 20:25:32 -0500 | [diff] [blame] | 257 | // symbols for PEXTERN, PAUTO and PPARAMOUT variables so they can be reused. |
| 258 | varsyms map[*Node]interface{} |
| 259 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 260 | // starting values. Memory, stack pointer, and globals pointer |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 261 | startmem *ssa.Value |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 262 | sp *ssa.Value |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 263 | sb *ssa.Value |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 264 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 265 | // line number stack. The current line number is top of stack |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 266 | line []int32 |
Keith Randall | 74e568f | 2015-11-09 21:35:40 -0800 | [diff] [blame] | 267 | |
| 268 | // list of panic calls by function name and line number. |
| 269 | // Used to deduplicate panic calls. |
| 270 | panics map[funcLine]*ssa.Block |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 271 | |
| 272 | // list of FwdRef values. |
| 273 | fwdRefs []*ssa.Value |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 274 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 275 | // list of PPARAMOUT (return) variables. Does not include PPARAM|PHEAP vars. |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 276 | returns []*Node |
| 277 | |
| 278 | cgoUnsafeArgs bool |
Keith Randall | 74e568f | 2015-11-09 21:35:40 -0800 | [diff] [blame] | 279 | } |
| 280 | |
| 281 | type funcLine struct { |
| 282 | f *Node |
| 283 | line int32 |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 284 | } |
| 285 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 286 | type ssaLabel struct { |
| 287 | target *ssa.Block // block identified by this label |
| 288 | breakTarget *ssa.Block // block to break to in control flow node identified by this label |
| 289 | continueTarget *ssa.Block // block to continue to in control flow node identified by this label |
| 290 | defNode *Node // label definition Node (OLABEL) |
| 291 | // Label use Node (OGOTO, OBREAK, OCONTINUE). |
| 292 | // Used only for error detection and reporting. |
| 293 | // There might be multiple uses, but we only need to track one. |
| 294 | useNode *Node |
| 295 | reported bool // reported indicates whether an error has already been reported for this label |
| 296 | } |
| 297 | |
| 298 | // defined reports whether the label has a definition (OLABEL node). |
| 299 | func (l *ssaLabel) defined() bool { return l.defNode != nil } |
| 300 | |
| 301 | // used reports whether the label has a use (OGOTO, OBREAK, or OCONTINUE node). |
| 302 | func (l *ssaLabel) used() bool { return l.useNode != nil } |
| 303 | |
| 304 | // label returns the label associated with sym, creating it if necessary. |
| 305 | func (s *state) label(sym *Sym) *ssaLabel { |
| 306 | lab := s.labels[sym.Name] |
| 307 | if lab == nil { |
| 308 | lab = new(ssaLabel) |
| 309 | s.labels[sym.Name] = lab |
| 310 | } |
| 311 | return lab |
| 312 | } |
| 313 | |
Keith Randall | da8af47 | 2016-01-13 11:14:57 -0800 | [diff] [blame] | 314 | func (s *state) Logf(msg string, args ...interface{}) { s.config.Logf(msg, args...) } |
David Chase | 88b230e | 2016-01-29 14:44:15 -0500 | [diff] [blame] | 315 | func (s *state) Log() bool { return s.config.Log() } |
Keith Randall | da8af47 | 2016-01-13 11:14:57 -0800 | [diff] [blame] | 316 | func (s *state) Fatalf(msg string, args ...interface{}) { s.config.Fatalf(s.peekLine(), msg, args...) } |
| 317 | func (s *state) Unimplementedf(msg string, args ...interface{}) { |
| 318 | s.config.Unimplementedf(s.peekLine(), msg, args...) |
| 319 | } |
Todd Neal | 98b88de | 2016-03-13 23:04:31 -0500 | [diff] [blame^] | 320 | func (s *state) Warnl(line int32, msg string, args ...interface{}) { s.config.Warnl(line, msg, args...) } |
| 321 | func (s *state) Debug_checknil() bool { return s.config.Debug_checknil() } |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 322 | |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 323 | var ( |
| 324 | // dummy node for the memory variable |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 325 | memVar = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "mem"}} |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 326 | |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 327 | // dummy nodes for temporary variables |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 328 | ptrVar = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "ptr"}} |
| 329 | capVar = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "cap"}} |
| 330 | typVar = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "typ"}} |
| 331 | idataVar = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "idata"}} |
| 332 | okVar = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "ok"}} |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 333 | ) |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 334 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 335 | // startBlock sets the current block we're generating code in to b. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 336 | func (s *state) startBlock(b *ssa.Block) { |
| 337 | if s.curBlock != nil { |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 338 | s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 339 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 340 | s.curBlock = b |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 341 | s.vars = map[*Node]*ssa.Value{} |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 342 | } |
| 343 | |
| 344 | // endBlock marks the end of generating code for the current block. |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 345 | // Returns the (former) current block. Returns nil if there is no current |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 346 | // block, i.e. if no code flows to the current execution point. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 347 | func (s *state) endBlock() *ssa.Block { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 348 | b := s.curBlock |
| 349 | if b == nil { |
| 350 | return nil |
| 351 | } |
| 352 | for len(s.defvars) <= int(b.ID) { |
| 353 | s.defvars = append(s.defvars, nil) |
| 354 | } |
| 355 | s.defvars[b.ID] = s.vars |
| 356 | s.curBlock = nil |
| 357 | s.vars = nil |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 358 | b.Line = s.peekLine() |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 359 | return b |
| 360 | } |
| 361 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 362 | // pushLine pushes a line number on the line number stack. |
| 363 | func (s *state) pushLine(line int32) { |
| 364 | s.line = append(s.line, line) |
| 365 | } |
| 366 | |
| 367 | // popLine pops the top of the line number stack. |
| 368 | func (s *state) popLine() { |
| 369 | s.line = s.line[:len(s.line)-1] |
| 370 | } |
| 371 | |
| 372 | // peekLine peek the top of the line number stack. |
| 373 | func (s *state) peekLine() int32 { |
| 374 | return s.line[len(s.line)-1] |
| 375 | } |
| 376 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 377 | func (s *state) Error(msg string, args ...interface{}) { |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 378 | yyerrorl(s.peekLine(), msg, args...) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 379 | } |
| 380 | |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 381 | // newValue0 adds a new value with no arguments to the current block. |
| 382 | func (s *state) newValue0(op ssa.Op, t ssa.Type) *ssa.Value { |
| 383 | return s.curBlock.NewValue0(s.peekLine(), op, t) |
| 384 | } |
| 385 | |
| 386 | // newValue0A adds a new value with no arguments and an aux value to the current block. |
| 387 | func (s *state) newValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value { |
| 388 | return s.curBlock.NewValue0A(s.peekLine(), op, t, aux) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 389 | } |
| 390 | |
Todd Neal | 991036a | 2015-09-03 18:24:22 -0500 | [diff] [blame] | 391 | // newValue0I adds a new value with no arguments and an auxint value to the current block. |
| 392 | func (s *state) newValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value { |
| 393 | return s.curBlock.NewValue0I(s.peekLine(), op, t, auxint) |
| 394 | } |
| 395 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 396 | // newValue1 adds a new value with one argument to the current block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 397 | func (s *state) newValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value { |
| 398 | return s.curBlock.NewValue1(s.peekLine(), op, t, arg) |
| 399 | } |
| 400 | |
| 401 | // newValue1A adds a new value with one argument and an aux value to the current block. |
| 402 | func (s *state) newValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value { |
| 403 | return s.curBlock.NewValue1A(s.peekLine(), op, t, aux, arg) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 404 | } |
| 405 | |
Keith Randall | cd7e059 | 2015-07-15 21:33:49 -0700 | [diff] [blame] | 406 | // newValue1I adds a new value with one argument and an auxint value to the current block. |
| 407 | func (s *state) newValue1I(op ssa.Op, t ssa.Type, aux int64, arg *ssa.Value) *ssa.Value { |
| 408 | return s.curBlock.NewValue1I(s.peekLine(), op, t, aux, arg) |
| 409 | } |
| 410 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 411 | // newValue2 adds a new value with two arguments to the current block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 412 | func (s *state) newValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value { |
| 413 | return s.curBlock.NewValue2(s.peekLine(), op, t, arg0, arg1) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 414 | } |
| 415 | |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 416 | // newValue2I adds a new value with two arguments and an auxint value to the current block. |
| 417 | func (s *state) newValue2I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value { |
| 418 | return s.curBlock.NewValue2I(s.peekLine(), op, t, aux, arg0, arg1) |
| 419 | } |
| 420 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 421 | // newValue3 adds a new value with three arguments to the current block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 422 | func (s *state) newValue3(op ssa.Op, t ssa.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value { |
| 423 | return s.curBlock.NewValue3(s.peekLine(), op, t, arg0, arg1, arg2) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 424 | } |
| 425 | |
Keith Randall | d4cc51d | 2015-08-14 21:47:20 -0700 | [diff] [blame] | 426 | // newValue3I adds a new value with three arguments and an auxint value to the current block. |
| 427 | func (s *state) newValue3I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value { |
| 428 | return s.curBlock.NewValue3I(s.peekLine(), op, t, aux, arg0, arg1, arg2) |
| 429 | } |
| 430 | |
Todd Neal | 991036a | 2015-09-03 18:24:22 -0500 | [diff] [blame] | 431 | // entryNewValue0 adds a new value with no arguments to the entry block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 432 | func (s *state) entryNewValue0(op ssa.Op, t ssa.Type) *ssa.Value { |
| 433 | return s.f.Entry.NewValue0(s.peekLine(), op, t) |
| 434 | } |
| 435 | |
Todd Neal | 991036a | 2015-09-03 18:24:22 -0500 | [diff] [blame] | 436 | // entryNewValue0A adds a new value with no arguments and an aux value to the entry block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 437 | func (s *state) entryNewValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value { |
| 438 | return s.f.Entry.NewValue0A(s.peekLine(), op, t, aux) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 439 | } |
| 440 | |
Todd Neal | 991036a | 2015-09-03 18:24:22 -0500 | [diff] [blame] | 441 | // entryNewValue0I adds a new value with no arguments and an auxint value to the entry block. |
| 442 | func (s *state) entryNewValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value { |
| 443 | return s.f.Entry.NewValue0I(s.peekLine(), op, t, auxint) |
| 444 | } |
| 445 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 446 | // entryNewValue1 adds a new value with one argument to the entry block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 447 | func (s *state) entryNewValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value { |
| 448 | return s.f.Entry.NewValue1(s.peekLine(), op, t, arg) |
| 449 | } |
| 450 | |
| 451 | // entryNewValue1 adds a new value with one argument and an auxint value to the entry block. |
| 452 | func (s *state) entryNewValue1I(op ssa.Op, t ssa.Type, auxint int64, arg *ssa.Value) *ssa.Value { |
| 453 | return s.f.Entry.NewValue1I(s.peekLine(), op, t, auxint, arg) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 454 | } |
| 455 | |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 456 | // entryNewValue1A adds a new value with one argument and an aux value to the entry block. |
| 457 | func (s *state) entryNewValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value { |
| 458 | return s.f.Entry.NewValue1A(s.peekLine(), op, t, aux, arg) |
| 459 | } |
| 460 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 461 | // entryNewValue2 adds a new value with two arguments to the entry block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 462 | func (s *state) entryNewValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value { |
| 463 | return s.f.Entry.NewValue2(s.peekLine(), op, t, arg0, arg1) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 464 | } |
| 465 | |
Josh Bleecher Snyder | cea4414 | 2015-09-08 16:52:25 -0700 | [diff] [blame] | 466 | // const* routines add a new const value to the entry block. |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 467 | func (s *state) constSlice(t ssa.Type) *ssa.Value { return s.f.ConstSlice(s.peekLine(), t) } |
| 468 | func (s *state) constInterface(t ssa.Type) *ssa.Value { return s.f.ConstInterface(s.peekLine(), t) } |
| 469 | func (s *state) constNil(t ssa.Type) *ssa.Value { return s.f.ConstNil(s.peekLine(), t) } |
| 470 | func (s *state) constEmptyString(t ssa.Type) *ssa.Value { return s.f.ConstEmptyString(s.peekLine(), t) } |
Josh Bleecher Snyder | cea4414 | 2015-09-08 16:52:25 -0700 | [diff] [blame] | 471 | func (s *state) constBool(c bool) *ssa.Value { |
| 472 | return s.f.ConstBool(s.peekLine(), Types[TBOOL], c) |
| 473 | } |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 474 | func (s *state) constInt8(t ssa.Type, c int8) *ssa.Value { |
| 475 | return s.f.ConstInt8(s.peekLine(), t, c) |
| 476 | } |
| 477 | func (s *state) constInt16(t ssa.Type, c int16) *ssa.Value { |
| 478 | return s.f.ConstInt16(s.peekLine(), t, c) |
| 479 | } |
| 480 | func (s *state) constInt32(t ssa.Type, c int32) *ssa.Value { |
| 481 | return s.f.ConstInt32(s.peekLine(), t, c) |
| 482 | } |
| 483 | func (s *state) constInt64(t ssa.Type, c int64) *ssa.Value { |
| 484 | return s.f.ConstInt64(s.peekLine(), t, c) |
| 485 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 486 | func (s *state) constFloat32(t ssa.Type, c float64) *ssa.Value { |
| 487 | return s.f.ConstFloat32(s.peekLine(), t, c) |
| 488 | } |
| 489 | func (s *state) constFloat64(t ssa.Type, c float64) *ssa.Value { |
| 490 | return s.f.ConstFloat64(s.peekLine(), t, c) |
| 491 | } |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 492 | func (s *state) constInt(t ssa.Type, c int64) *ssa.Value { |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 493 | if s.config.IntSize == 8 { |
| 494 | return s.constInt64(t, c) |
| 495 | } |
| 496 | if int64(int32(c)) != c { |
| 497 | s.Fatalf("integer constant too big %d", c) |
| 498 | } |
| 499 | return s.constInt32(t, int32(c)) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 500 | } |
| 501 | |
Keith Randall | 4fffd456 | 2016-02-29 13:31:48 -0800 | [diff] [blame] | 502 | func (s *state) stmts(a Nodes) { |
| 503 | for _, x := range a.Slice() { |
| 504 | s.stmt(x) |
| 505 | } |
| 506 | } |
| 507 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 508 | // ssaStmtList converts the statement n to SSA and adds it to s. |
Ian Lance Taylor | c4012b6 | 2016-03-08 10:26:20 -0800 | [diff] [blame] | 509 | func (s *state) stmtList(l Nodes) { |
Ian Lance Taylor | 38921b3 | 2016-03-08 15:10:26 -0800 | [diff] [blame] | 510 | for _, n := range l.Slice() { |
| 511 | s.stmt(n) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 512 | } |
| 513 | } |
| 514 | |
| 515 | // ssaStmt converts the statement n to SSA and adds it to s. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 516 | func (s *state) stmt(n *Node) { |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 517 | s.pushLine(n.Lineno) |
| 518 | defer s.popLine() |
| 519 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 520 | // If s.curBlock is nil, then we're about to generate dead code. |
| 521 | // We can't just short-circuit here, though, |
| 522 | // because we check labels and gotos as part of SSA generation. |
| 523 | // Provide a block for the dead code so that we don't have |
| 524 | // to add special cases everywhere else. |
| 525 | if s.curBlock == nil { |
| 526 | dead := s.f.NewBlock(ssa.BlockPlain) |
| 527 | s.startBlock(dead) |
| 528 | } |
| 529 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 530 | s.stmtList(n.Ninit) |
| 531 | switch n.Op { |
| 532 | |
| 533 | case OBLOCK: |
| 534 | s.stmtList(n.List) |
| 535 | |
Josh Bleecher Snyder | 2574e4a | 2015-07-16 13:25:36 -0600 | [diff] [blame] | 536 | // No-ops |
Todd Neal | 67e43c1 | 2015-08-28 21:19:40 -0500 | [diff] [blame] | 537 | case OEMPTY, ODCLCONST, ODCLTYPE, OFALL: |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 538 | |
Josh Bleecher Snyder | 2574e4a | 2015-07-16 13:25:36 -0600 | [diff] [blame] | 539 | // Expression statements |
| 540 | case OCALLFUNC, OCALLMETH, OCALLINTER: |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 541 | s.call(n, callNormal) |
Keith Randall | fb54e03 | 2016-02-24 16:19:20 -0800 | [diff] [blame] | 542 | if n.Op == OCALLFUNC && n.Left.Op == ONAME && n.Left.Class == PFUNC && |
| 543 | (compiling_runtime != 0 && n.Left.Sym.Name == "throw" || |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 544 | n.Left.Sym.Pkg == Runtimepkg && (n.Left.Sym.Name == "gopanic" || n.Left.Sym.Name == "selectgo" || n.Left.Sym.Name == "block")) { |
Keith Randall | faf1bdb | 2016-02-06 22:35:34 -0800 | [diff] [blame] | 545 | m := s.mem() |
| 546 | b := s.endBlock() |
| 547 | b.Kind = ssa.BlockExit |
| 548 | b.Control = m |
| 549 | // TODO: never rewrite OPANIC to OCALLFUNC in the |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 550 | // first place. Need to wait until all backends |
Keith Randall | faf1bdb | 2016-02-06 22:35:34 -0800 | [diff] [blame] | 551 | // go through SSA. |
| 552 | } |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 553 | case ODEFER: |
| 554 | s.call(n.Left, callDefer) |
| 555 | case OPROC: |
| 556 | s.call(n.Left, callGo) |
Josh Bleecher Snyder | 2574e4a | 2015-07-16 13:25:36 -0600 | [diff] [blame] | 557 | |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 558 | case OAS2DOTTYPE: |
Ian Lance Taylor | 38921b3 | 2016-03-08 15:10:26 -0800 | [diff] [blame] | 559 | res, resok := s.dottype(n.Rlist.First(), true) |
| 560 | s.assign(n.List.First(), res, needwritebarrier(n.List.First(), n.Rlist.First()), false, n.Lineno) |
| 561 | s.assign(n.List.Second(), resok, false, false, n.Lineno) |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 562 | return |
| 563 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 564 | case ODCL: |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 565 | if n.Left.Class&PHEAP == 0 { |
| 566 | return |
| 567 | } |
| 568 | if compiling_runtime != 0 { |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 569 | Fatalf("%v escapes to heap, not allowed in runtime.", n) |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 570 | } |
| 571 | |
| 572 | // TODO: the old pass hides the details of PHEAP |
| 573 | // variables behind ONAME nodes. Figure out if it's better |
| 574 | // to rewrite the tree and make the heapaddr construct explicit |
| 575 | // or to keep this detail hidden behind the scenes. |
| 576 | palloc := prealloc[n.Left] |
| 577 | if palloc == nil { |
| 578 | palloc = callnew(n.Left.Type) |
| 579 | prealloc[n.Left] = palloc |
| 580 | } |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 581 | r := s.expr(palloc) |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 582 | s.assign(n.Left.Name.Heapaddr, r, false, false, n.Lineno) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 583 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 584 | case OLABEL: |
| 585 | sym := n.Left.Sym |
| 586 | |
| 587 | if isblanksym(sym) { |
Keith Randall | 7e4c06d | 2015-07-12 11:52:09 -0700 | [diff] [blame] | 588 | // Empty identifier is valid but useless. |
| 589 | // See issues 11589, 11593. |
| 590 | return |
| 591 | } |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 592 | |
| 593 | lab := s.label(sym) |
| 594 | |
| 595 | // Associate label with its control flow node, if any |
| 596 | if ctl := n.Name.Defn; ctl != nil { |
| 597 | switch ctl.Op { |
| 598 | case OFOR, OSWITCH, OSELECT: |
| 599 | s.labeledNodes[ctl] = lab |
| 600 | } |
Keith Randall | 0ad9c8c | 2015-06-12 16:24:33 -0700 | [diff] [blame] | 601 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 602 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 603 | if !lab.defined() { |
| 604 | lab.defNode = n |
| 605 | } else { |
Robert Griesemer | 2faf5bc | 2016-03-02 11:30:29 -0800 | [diff] [blame] | 606 | s.Error("label %v already defined at %v", sym, linestr(lab.defNode.Lineno)) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 607 | lab.reported = true |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 608 | } |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 609 | // The label might already have a target block via a goto. |
| 610 | if lab.target == nil { |
| 611 | lab.target = s.f.NewBlock(ssa.BlockPlain) |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 612 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 613 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 614 | // go to that label (we pretend "label:" is preceded by "goto label") |
| 615 | b := s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 616 | b.AddEdgeTo(lab.target) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 617 | s.startBlock(lab.target) |
| 618 | |
| 619 | case OGOTO: |
| 620 | sym := n.Left.Sym |
| 621 | |
| 622 | lab := s.label(sym) |
| 623 | if lab.target == nil { |
| 624 | lab.target = s.f.NewBlock(ssa.BlockPlain) |
| 625 | } |
| 626 | if !lab.used() { |
| 627 | lab.useNode = n |
| 628 | } |
| 629 | |
| 630 | if lab.defined() { |
| 631 | s.checkgoto(n, lab.defNode) |
| 632 | } else { |
| 633 | s.fwdGotos = append(s.fwdGotos, n) |
| 634 | } |
| 635 | |
| 636 | b := s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 637 | b.AddEdgeTo(lab.target) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 638 | |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 639 | case OAS, OASWB: |
Josh Bleecher Snyder | 6b41665 | 2015-07-28 10:56:39 -0700 | [diff] [blame] | 640 | // Check whether we can generate static data rather than code. |
| 641 | // If so, ignore n and defer data generation until codegen. |
| 642 | // Failure to do this causes writes to readonly symbols. |
| 643 | if gen_as_init(n, true) { |
| 644 | var data []*Node |
| 645 | if s.f.StaticData != nil { |
| 646 | data = s.f.StaticData.([]*Node) |
| 647 | } |
| 648 | s.f.StaticData = append(data, n) |
| 649 | return |
| 650 | } |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 651 | |
| 652 | var t *Type |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 653 | if n.Right != nil { |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 654 | t = n.Right.Type |
| 655 | } else { |
| 656 | t = n.Left.Type |
| 657 | } |
| 658 | |
| 659 | // Evaluate RHS. |
| 660 | rhs := n.Right |
| 661 | if rhs != nil && (rhs.Op == OSTRUCTLIT || rhs.Op == OARRAYLIT) { |
| 662 | // All literals with nonzero fields have already been |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 663 | // rewritten during walk. Any that remain are just T{} |
| 664 | // or equivalents. Use the zero value. |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 665 | if !iszero(rhs) { |
| 666 | Fatalf("literal with nonzero value in SSA: %v", rhs) |
| 667 | } |
| 668 | rhs = nil |
| 669 | } |
| 670 | var r *ssa.Value |
| 671 | needwb := n.Op == OASWB && rhs != nil |
| 672 | deref := !canSSAType(t) |
| 673 | if deref { |
| 674 | if rhs == nil { |
| 675 | r = nil // Signal assign to use OpZero. |
Keith Randall | d388690 | 2015-09-18 22:12:38 -0700 | [diff] [blame] | 676 | } else { |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 677 | r = s.addr(rhs, false) |
| 678 | } |
| 679 | } else { |
| 680 | if rhs == nil { |
| 681 | r = s.zeroVal(t) |
| 682 | } else { |
| 683 | r = s.expr(rhs) |
Keith Randall | d388690 | 2015-09-18 22:12:38 -0700 | [diff] [blame] | 684 | } |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 685 | } |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 686 | if rhs != nil && rhs.Op == OAPPEND { |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 687 | // Yuck! The frontend gets rid of the write barrier, but we need it! |
| 688 | // At least, we need it in the case where growslice is called. |
| 689 | // TODO: Do the write barrier on just the growslice branch. |
| 690 | // TODO: just add a ptr graying to the end of growslice? |
| 691 | // TODO: check whether we need to do this for ODOTTYPE and ORECV also. |
| 692 | // They get similar wb-removal treatment in walk.go:OAS. |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 693 | needwb = true |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 694 | } |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 695 | |
| 696 | s.assign(n.Left, r, needwb, deref, n.Lineno) |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 697 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 698 | case OIF: |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 699 | bThen := s.f.NewBlock(ssa.BlockPlain) |
| 700 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
| 701 | var bElse *ssa.Block |
Ian Lance Taylor | 38921b3 | 2016-03-08 15:10:26 -0800 | [diff] [blame] | 702 | if n.Rlist.Len() != 0 { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 703 | bElse = s.f.NewBlock(ssa.BlockPlain) |
Keith Randall | 9918731 | 2015-11-02 16:56:53 -0800 | [diff] [blame] | 704 | s.condBranch(n.Left, bThen, bElse, n.Likely) |
| 705 | } else { |
| 706 | s.condBranch(n.Left, bThen, bEnd, n.Likely) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 707 | } |
| 708 | |
| 709 | s.startBlock(bThen) |
Keith Randall | 9d854fd | 2016-03-01 12:50:17 -0800 | [diff] [blame] | 710 | s.stmts(n.Nbody) |
Josh Bleecher Snyder | e0ac5c5 | 2015-07-20 18:42:45 -0700 | [diff] [blame] | 711 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 712 | b.AddEdgeTo(bEnd) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 713 | } |
| 714 | |
Ian Lance Taylor | 38921b3 | 2016-03-08 15:10:26 -0800 | [diff] [blame] | 715 | if n.Rlist.Len() != 0 { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 716 | s.startBlock(bElse) |
Keith Randall | e707fbe | 2015-06-11 10:20:39 -0700 | [diff] [blame] | 717 | s.stmtList(n.Rlist) |
Josh Bleecher Snyder | e0ac5c5 | 2015-07-20 18:42:45 -0700 | [diff] [blame] | 718 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 719 | b.AddEdgeTo(bEnd) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 720 | } |
| 721 | } |
| 722 | s.startBlock(bEnd) |
| 723 | |
| 724 | case ORETURN: |
| 725 | s.stmtList(n.List) |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 726 | s.exit() |
Keith Randall | 8a1f621 | 2015-09-08 21:28:44 -0700 | [diff] [blame] | 727 | case ORETJMP: |
| 728 | s.stmtList(n.List) |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 729 | b := s.exit() |
| 730 | b.Kind = ssa.BlockRetJmp // override BlockRet |
Keith Randall | 8a1f621 | 2015-09-08 21:28:44 -0700 | [diff] [blame] | 731 | b.Aux = n.Left.Sym |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 732 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 733 | case OCONTINUE, OBREAK: |
| 734 | var op string |
| 735 | var to *ssa.Block |
| 736 | switch n.Op { |
| 737 | case OCONTINUE: |
| 738 | op = "continue" |
| 739 | to = s.continueTo |
| 740 | case OBREAK: |
| 741 | op = "break" |
| 742 | to = s.breakTo |
| 743 | } |
| 744 | if n.Left == nil { |
| 745 | // plain break/continue |
| 746 | if to == nil { |
| 747 | s.Error("%s is not in a loop", op) |
| 748 | return |
| 749 | } |
| 750 | // nothing to do; "to" is already the correct target |
| 751 | } else { |
| 752 | // labeled break/continue; look up the target |
| 753 | sym := n.Left.Sym |
| 754 | lab := s.label(sym) |
| 755 | if !lab.used() { |
| 756 | lab.useNode = n.Left |
| 757 | } |
| 758 | if !lab.defined() { |
| 759 | s.Error("%s label not defined: %v", op, sym) |
| 760 | lab.reported = true |
| 761 | return |
| 762 | } |
| 763 | switch n.Op { |
| 764 | case OCONTINUE: |
| 765 | to = lab.continueTarget |
| 766 | case OBREAK: |
| 767 | to = lab.breakTarget |
| 768 | } |
| 769 | if to == nil { |
| 770 | // Valid label but not usable with a break/continue here, e.g.: |
| 771 | // for { |
| 772 | // continue abc |
| 773 | // } |
| 774 | // abc: |
| 775 | // for {} |
| 776 | s.Error("invalid %s label %v", op, sym) |
| 777 | lab.reported = true |
| 778 | return |
| 779 | } |
| 780 | } |
| 781 | |
| 782 | b := s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 783 | b.AddEdgeTo(to) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 784 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 785 | case OFOR: |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 786 | // OFOR: for Ninit; Left; Right { Nbody } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 787 | bCond := s.f.NewBlock(ssa.BlockPlain) |
| 788 | bBody := s.f.NewBlock(ssa.BlockPlain) |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 789 | bIncr := s.f.NewBlock(ssa.BlockPlain) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 790 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
| 791 | |
| 792 | // first, jump to condition test |
| 793 | b := s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 794 | b.AddEdgeTo(bCond) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 795 | |
| 796 | // generate code to test condition |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 797 | s.startBlock(bCond) |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 798 | if n.Left != nil { |
Keith Randall | 9918731 | 2015-11-02 16:56:53 -0800 | [diff] [blame] | 799 | s.condBranch(n.Left, bBody, bEnd, 1) |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 800 | } else { |
Keith Randall | 9918731 | 2015-11-02 16:56:53 -0800 | [diff] [blame] | 801 | b := s.endBlock() |
| 802 | b.Kind = ssa.BlockPlain |
| 803 | b.AddEdgeTo(bBody) |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 804 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 805 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 806 | // set up for continue/break in body |
| 807 | prevContinue := s.continueTo |
| 808 | prevBreak := s.breakTo |
| 809 | s.continueTo = bIncr |
| 810 | s.breakTo = bEnd |
| 811 | lab := s.labeledNodes[n] |
| 812 | if lab != nil { |
| 813 | // labeled for loop |
| 814 | lab.continueTarget = bIncr |
| 815 | lab.breakTarget = bEnd |
| 816 | } |
| 817 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 818 | // generate body |
| 819 | s.startBlock(bBody) |
Keith Randall | 9d854fd | 2016-03-01 12:50:17 -0800 | [diff] [blame] | 820 | s.stmts(n.Nbody) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 821 | |
| 822 | // tear down continue/break |
| 823 | s.continueTo = prevContinue |
| 824 | s.breakTo = prevBreak |
| 825 | if lab != nil { |
| 826 | lab.continueTarget = nil |
| 827 | lab.breakTarget = nil |
| 828 | } |
| 829 | |
| 830 | // done with body, goto incr |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 831 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 832 | b.AddEdgeTo(bIncr) |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 833 | } |
| 834 | |
| 835 | // generate incr |
| 836 | s.startBlock(bIncr) |
Josh Bleecher Snyder | 46815b9 | 2015-06-24 17:48:22 -0700 | [diff] [blame] | 837 | if n.Right != nil { |
| 838 | s.stmt(n.Right) |
| 839 | } |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 840 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 841 | b.AddEdgeTo(bCond) |
Josh Bleecher Snyder | 6c14059 | 2015-07-04 09:07:54 -0700 | [diff] [blame] | 842 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 843 | s.startBlock(bEnd) |
| 844 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 845 | case OSWITCH, OSELECT: |
| 846 | // These have been mostly rewritten by the front end into their Nbody fields. |
| 847 | // Our main task is to correctly hook up any break statements. |
| 848 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
| 849 | |
| 850 | prevBreak := s.breakTo |
| 851 | s.breakTo = bEnd |
| 852 | lab := s.labeledNodes[n] |
| 853 | if lab != nil { |
| 854 | // labeled |
| 855 | lab.breakTarget = bEnd |
| 856 | } |
| 857 | |
| 858 | // generate body code |
Keith Randall | 9d854fd | 2016-03-01 12:50:17 -0800 | [diff] [blame] | 859 | s.stmts(n.Nbody) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 860 | |
| 861 | s.breakTo = prevBreak |
| 862 | if lab != nil { |
| 863 | lab.breakTarget = nil |
| 864 | } |
| 865 | |
Keith Randall | eb0cff9 | 2016-02-09 12:28:02 -0800 | [diff] [blame] | 866 | // OSWITCH never falls through (s.curBlock == nil here). |
| 867 | // OSELECT does not fall through if we're calling selectgo. |
| 868 | // OSELECT does fall through if we're calling selectnb{send,recv}[2]. |
| 869 | // In those latter cases, go to the code after the select. |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 870 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 871 | b.AddEdgeTo(bEnd) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 872 | } |
| 873 | s.startBlock(bEnd) |
| 874 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 875 | case OVARKILL: |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 876 | // Insert a varkill op to record that a variable is no longer live. |
| 877 | // We only care about liveness info at call sites, so putting the |
| 878 | // varkill in the store chain is enough to keep it correctly ordered |
| 879 | // with respect to call ops. |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 880 | if !s.canSSA(n.Left) { |
Keith Randall | d29e92b | 2015-09-19 12:01:39 -0700 | [diff] [blame] | 881 | s.vars[&memVar] = s.newValue1A(ssa.OpVarKill, ssa.TypeMem, n.Left, s.mem()) |
| 882 | } |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 883 | |
Keith Randall | 23d5810 | 2016-01-19 09:59:21 -0800 | [diff] [blame] | 884 | case OVARLIVE: |
| 885 | // Insert a varlive op to record that a variable is still live. |
| 886 | if !n.Left.Addrtaken { |
| 887 | s.Fatalf("VARLIVE variable %s must have Addrtaken set", n.Left) |
| 888 | } |
| 889 | s.vars[&memVar] = s.newValue1A(ssa.OpVarLive, ssa.TypeMem, n.Left, s.mem()) |
| 890 | |
Keith Randall | 46ffb02 | 2015-09-12 14:06:44 -0700 | [diff] [blame] | 891 | case OCHECKNIL: |
| 892 | p := s.expr(n.Left) |
| 893 | s.nilCheck(p) |
| 894 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 895 | default: |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 896 | s.Unimplementedf("unhandled stmt %s", opnames[n.Op]) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 897 | } |
| 898 | } |
| 899 | |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 900 | // exit processes any code that needs to be generated just before returning. |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 901 | // It returns a BlockRet block that ends the control flow. Its control value |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 902 | // will be set to the final memory state. |
| 903 | func (s *state) exit() *ssa.Block { |
Keith Randall | ddc6b64 | 2016-03-09 19:27:57 -0800 | [diff] [blame] | 904 | if hasdefer { |
| 905 | s.rtcall(Deferreturn, true, nil) |
| 906 | } |
| 907 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 908 | // Run exit code. Typically, this code copies heap-allocated PPARAMOUT |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 909 | // variables back to the stack. |
| 910 | s.stmts(s.exitCode) |
| 911 | |
| 912 | // Store SSAable PPARAMOUT variables back to stack locations. |
| 913 | for _, n := range s.returns { |
| 914 | aux := &ssa.ArgSymbol{Typ: n.Type, Node: n} |
| 915 | addr := s.newValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp) |
| 916 | val := s.variable(n, n.Type) |
| 917 | s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, n, s.mem()) |
| 918 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, n.Type.Size(), addr, val, s.mem()) |
| 919 | // TODO: if val is ever spilled, we'd like to use the |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 920 | // PPARAMOUT slot for spilling it. That won't happen |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 921 | // currently. |
| 922 | } |
| 923 | |
| 924 | // Do actual return. |
| 925 | m := s.mem() |
| 926 | b := s.endBlock() |
| 927 | b.Kind = ssa.BlockRet |
| 928 | b.Control = m |
| 929 | return b |
| 930 | } |
| 931 | |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 932 | type opAndType struct { |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 933 | op Op |
| 934 | etype EType |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 935 | } |
| 936 | |
| 937 | var opToSSA = map[opAndType]ssa.Op{ |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 938 | opAndType{OADD, TINT8}: ssa.OpAdd8, |
| 939 | opAndType{OADD, TUINT8}: ssa.OpAdd8, |
| 940 | opAndType{OADD, TINT16}: ssa.OpAdd16, |
| 941 | opAndType{OADD, TUINT16}: ssa.OpAdd16, |
| 942 | opAndType{OADD, TINT32}: ssa.OpAdd32, |
| 943 | opAndType{OADD, TUINT32}: ssa.OpAdd32, |
| 944 | opAndType{OADD, TPTR32}: ssa.OpAdd32, |
| 945 | opAndType{OADD, TINT64}: ssa.OpAdd64, |
| 946 | opAndType{OADD, TUINT64}: ssa.OpAdd64, |
| 947 | opAndType{OADD, TPTR64}: ssa.OpAdd64, |
| 948 | opAndType{OADD, TFLOAT32}: ssa.OpAdd32F, |
| 949 | opAndType{OADD, TFLOAT64}: ssa.OpAdd64F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 950 | |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 951 | opAndType{OSUB, TINT8}: ssa.OpSub8, |
| 952 | opAndType{OSUB, TUINT8}: ssa.OpSub8, |
| 953 | opAndType{OSUB, TINT16}: ssa.OpSub16, |
| 954 | opAndType{OSUB, TUINT16}: ssa.OpSub16, |
| 955 | opAndType{OSUB, TINT32}: ssa.OpSub32, |
| 956 | opAndType{OSUB, TUINT32}: ssa.OpSub32, |
| 957 | opAndType{OSUB, TINT64}: ssa.OpSub64, |
| 958 | opAndType{OSUB, TUINT64}: ssa.OpSub64, |
| 959 | opAndType{OSUB, TFLOAT32}: ssa.OpSub32F, |
| 960 | opAndType{OSUB, TFLOAT64}: ssa.OpSub64F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 961 | |
Josh Bleecher Snyder | e61e7c9 | 2015-07-22 19:19:40 -0700 | [diff] [blame] | 962 | opAndType{ONOT, TBOOL}: ssa.OpNot, |
| 963 | |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 964 | opAndType{OMINUS, TINT8}: ssa.OpNeg8, |
| 965 | opAndType{OMINUS, TUINT8}: ssa.OpNeg8, |
| 966 | opAndType{OMINUS, TINT16}: ssa.OpNeg16, |
| 967 | opAndType{OMINUS, TUINT16}: ssa.OpNeg16, |
| 968 | opAndType{OMINUS, TINT32}: ssa.OpNeg32, |
| 969 | opAndType{OMINUS, TUINT32}: ssa.OpNeg32, |
| 970 | opAndType{OMINUS, TINT64}: ssa.OpNeg64, |
| 971 | opAndType{OMINUS, TUINT64}: ssa.OpNeg64, |
| 972 | opAndType{OMINUS, TFLOAT32}: ssa.OpNeg32F, |
| 973 | opAndType{OMINUS, TFLOAT64}: ssa.OpNeg64F, |
Alexandru Moșoi | 954d5ad | 2015-07-21 16:58:18 +0200 | [diff] [blame] | 974 | |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 975 | opAndType{OCOM, TINT8}: ssa.OpCom8, |
| 976 | opAndType{OCOM, TUINT8}: ssa.OpCom8, |
| 977 | opAndType{OCOM, TINT16}: ssa.OpCom16, |
| 978 | opAndType{OCOM, TUINT16}: ssa.OpCom16, |
| 979 | opAndType{OCOM, TINT32}: ssa.OpCom32, |
| 980 | opAndType{OCOM, TUINT32}: ssa.OpCom32, |
| 981 | opAndType{OCOM, TINT64}: ssa.OpCom64, |
| 982 | opAndType{OCOM, TUINT64}: ssa.OpCom64, |
| 983 | |
Josh Bleecher Snyder | fa5fe19 | 2015-09-06 19:24:59 -0700 | [diff] [blame] | 984 | opAndType{OIMAG, TCOMPLEX64}: ssa.OpComplexImag, |
| 985 | opAndType{OIMAG, TCOMPLEX128}: ssa.OpComplexImag, |
| 986 | opAndType{OREAL, TCOMPLEX64}: ssa.OpComplexReal, |
| 987 | opAndType{OREAL, TCOMPLEX128}: ssa.OpComplexReal, |
| 988 | |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 989 | opAndType{OMUL, TINT8}: ssa.OpMul8, |
| 990 | opAndType{OMUL, TUINT8}: ssa.OpMul8, |
| 991 | opAndType{OMUL, TINT16}: ssa.OpMul16, |
| 992 | opAndType{OMUL, TUINT16}: ssa.OpMul16, |
| 993 | opAndType{OMUL, TINT32}: ssa.OpMul32, |
| 994 | opAndType{OMUL, TUINT32}: ssa.OpMul32, |
| 995 | opAndType{OMUL, TINT64}: ssa.OpMul64, |
| 996 | opAndType{OMUL, TUINT64}: ssa.OpMul64, |
| 997 | opAndType{OMUL, TFLOAT32}: ssa.OpMul32F, |
| 998 | opAndType{OMUL, TFLOAT64}: ssa.OpMul64F, |
| 999 | |
| 1000 | opAndType{ODIV, TFLOAT32}: ssa.OpDiv32F, |
| 1001 | opAndType{ODIV, TFLOAT64}: ssa.OpDiv64F, |
Keith Randall | be1eb57 | 2015-07-22 13:46:15 -0700 | [diff] [blame] | 1002 | |
Todd Neal | 67cbd5b | 2015-08-18 19:14:47 -0500 | [diff] [blame] | 1003 | opAndType{OHMUL, TINT8}: ssa.OpHmul8, |
| 1004 | opAndType{OHMUL, TUINT8}: ssa.OpHmul8u, |
| 1005 | opAndType{OHMUL, TINT16}: ssa.OpHmul16, |
| 1006 | opAndType{OHMUL, TUINT16}: ssa.OpHmul16u, |
| 1007 | opAndType{OHMUL, TINT32}: ssa.OpHmul32, |
| 1008 | opAndType{OHMUL, TUINT32}: ssa.OpHmul32u, |
| 1009 | |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 1010 | opAndType{ODIV, TINT8}: ssa.OpDiv8, |
| 1011 | opAndType{ODIV, TUINT8}: ssa.OpDiv8u, |
| 1012 | opAndType{ODIV, TINT16}: ssa.OpDiv16, |
| 1013 | opAndType{ODIV, TUINT16}: ssa.OpDiv16u, |
| 1014 | opAndType{ODIV, TINT32}: ssa.OpDiv32, |
| 1015 | opAndType{ODIV, TUINT32}: ssa.OpDiv32u, |
| 1016 | opAndType{ODIV, TINT64}: ssa.OpDiv64, |
| 1017 | opAndType{ODIV, TUINT64}: ssa.OpDiv64u, |
| 1018 | |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 1019 | opAndType{OMOD, TINT8}: ssa.OpMod8, |
| 1020 | opAndType{OMOD, TUINT8}: ssa.OpMod8u, |
| 1021 | opAndType{OMOD, TINT16}: ssa.OpMod16, |
| 1022 | opAndType{OMOD, TUINT16}: ssa.OpMod16u, |
| 1023 | opAndType{OMOD, TINT32}: ssa.OpMod32, |
| 1024 | opAndType{OMOD, TUINT32}: ssa.OpMod32u, |
| 1025 | opAndType{OMOD, TINT64}: ssa.OpMod64, |
| 1026 | opAndType{OMOD, TUINT64}: ssa.OpMod64u, |
| 1027 | |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 1028 | opAndType{OAND, TINT8}: ssa.OpAnd8, |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1029 | opAndType{OAND, TUINT8}: ssa.OpAnd8, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 1030 | opAndType{OAND, TINT16}: ssa.OpAnd16, |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1031 | opAndType{OAND, TUINT16}: ssa.OpAnd16, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 1032 | opAndType{OAND, TINT32}: ssa.OpAnd32, |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1033 | opAndType{OAND, TUINT32}: ssa.OpAnd32, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 1034 | opAndType{OAND, TINT64}: ssa.OpAnd64, |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1035 | opAndType{OAND, TUINT64}: ssa.OpAnd64, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 1036 | |
Alexandru Moșoi | 7402416 | 2015-07-29 17:52:25 +0200 | [diff] [blame] | 1037 | opAndType{OOR, TINT8}: ssa.OpOr8, |
| 1038 | opAndType{OOR, TUINT8}: ssa.OpOr8, |
| 1039 | opAndType{OOR, TINT16}: ssa.OpOr16, |
| 1040 | opAndType{OOR, TUINT16}: ssa.OpOr16, |
| 1041 | opAndType{OOR, TINT32}: ssa.OpOr32, |
| 1042 | opAndType{OOR, TUINT32}: ssa.OpOr32, |
| 1043 | opAndType{OOR, TINT64}: ssa.OpOr64, |
| 1044 | opAndType{OOR, TUINT64}: ssa.OpOr64, |
| 1045 | |
Alexandru Moșoi | 6d9362a1 | 2015-07-30 12:33:36 +0200 | [diff] [blame] | 1046 | opAndType{OXOR, TINT8}: ssa.OpXor8, |
| 1047 | opAndType{OXOR, TUINT8}: ssa.OpXor8, |
| 1048 | opAndType{OXOR, TINT16}: ssa.OpXor16, |
| 1049 | opAndType{OXOR, TUINT16}: ssa.OpXor16, |
| 1050 | opAndType{OXOR, TINT32}: ssa.OpXor32, |
| 1051 | opAndType{OXOR, TUINT32}: ssa.OpXor32, |
| 1052 | opAndType{OXOR, TINT64}: ssa.OpXor64, |
| 1053 | opAndType{OXOR, TUINT64}: ssa.OpXor64, |
| 1054 | |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 1055 | opAndType{OEQ, TBOOL}: ssa.OpEq8, |
| 1056 | opAndType{OEQ, TINT8}: ssa.OpEq8, |
| 1057 | opAndType{OEQ, TUINT8}: ssa.OpEq8, |
| 1058 | opAndType{OEQ, TINT16}: ssa.OpEq16, |
| 1059 | opAndType{OEQ, TUINT16}: ssa.OpEq16, |
| 1060 | opAndType{OEQ, TINT32}: ssa.OpEq32, |
| 1061 | opAndType{OEQ, TUINT32}: ssa.OpEq32, |
| 1062 | opAndType{OEQ, TINT64}: ssa.OpEq64, |
| 1063 | opAndType{OEQ, TUINT64}: ssa.OpEq64, |
Keith Randall | 1e4ebfd | 2015-09-10 13:53:27 -0700 | [diff] [blame] | 1064 | opAndType{OEQ, TINTER}: ssa.OpEqInter, |
| 1065 | opAndType{OEQ, TARRAY}: ssa.OpEqSlice, |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 1066 | opAndType{OEQ, TFUNC}: ssa.OpEqPtr, |
| 1067 | opAndType{OEQ, TMAP}: ssa.OpEqPtr, |
| 1068 | opAndType{OEQ, TCHAN}: ssa.OpEqPtr, |
Todd Neal | 5fdd4fe | 2015-08-30 20:47:26 -0500 | [diff] [blame] | 1069 | opAndType{OEQ, TPTR64}: ssa.OpEqPtr, |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 1070 | opAndType{OEQ, TUINTPTR}: ssa.OpEqPtr, |
| 1071 | opAndType{OEQ, TUNSAFEPTR}: ssa.OpEqPtr, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 1072 | opAndType{OEQ, TFLOAT64}: ssa.OpEq64F, |
| 1073 | opAndType{OEQ, TFLOAT32}: ssa.OpEq32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1074 | |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 1075 | opAndType{ONE, TBOOL}: ssa.OpNeq8, |
| 1076 | opAndType{ONE, TINT8}: ssa.OpNeq8, |
| 1077 | opAndType{ONE, TUINT8}: ssa.OpNeq8, |
| 1078 | opAndType{ONE, TINT16}: ssa.OpNeq16, |
| 1079 | opAndType{ONE, TUINT16}: ssa.OpNeq16, |
| 1080 | opAndType{ONE, TINT32}: ssa.OpNeq32, |
| 1081 | opAndType{ONE, TUINT32}: ssa.OpNeq32, |
| 1082 | opAndType{ONE, TINT64}: ssa.OpNeq64, |
| 1083 | opAndType{ONE, TUINT64}: ssa.OpNeq64, |
Keith Randall | 1e4ebfd | 2015-09-10 13:53:27 -0700 | [diff] [blame] | 1084 | opAndType{ONE, TINTER}: ssa.OpNeqInter, |
| 1085 | opAndType{ONE, TARRAY}: ssa.OpNeqSlice, |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 1086 | opAndType{ONE, TFUNC}: ssa.OpNeqPtr, |
| 1087 | opAndType{ONE, TMAP}: ssa.OpNeqPtr, |
| 1088 | opAndType{ONE, TCHAN}: ssa.OpNeqPtr, |
Todd Neal | 5fdd4fe | 2015-08-30 20:47:26 -0500 | [diff] [blame] | 1089 | opAndType{ONE, TPTR64}: ssa.OpNeqPtr, |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 1090 | opAndType{ONE, TUINTPTR}: ssa.OpNeqPtr, |
| 1091 | opAndType{ONE, TUNSAFEPTR}: ssa.OpNeqPtr, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 1092 | opAndType{ONE, TFLOAT64}: ssa.OpNeq64F, |
| 1093 | opAndType{ONE, TFLOAT32}: ssa.OpNeq32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1094 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 1095 | opAndType{OLT, TINT8}: ssa.OpLess8, |
| 1096 | opAndType{OLT, TUINT8}: ssa.OpLess8U, |
| 1097 | opAndType{OLT, TINT16}: ssa.OpLess16, |
| 1098 | opAndType{OLT, TUINT16}: ssa.OpLess16U, |
| 1099 | opAndType{OLT, TINT32}: ssa.OpLess32, |
| 1100 | opAndType{OLT, TUINT32}: ssa.OpLess32U, |
| 1101 | opAndType{OLT, TINT64}: ssa.OpLess64, |
| 1102 | opAndType{OLT, TUINT64}: ssa.OpLess64U, |
| 1103 | opAndType{OLT, TFLOAT64}: ssa.OpLess64F, |
| 1104 | opAndType{OLT, TFLOAT32}: ssa.OpLess32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1105 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 1106 | opAndType{OGT, TINT8}: ssa.OpGreater8, |
| 1107 | opAndType{OGT, TUINT8}: ssa.OpGreater8U, |
| 1108 | opAndType{OGT, TINT16}: ssa.OpGreater16, |
| 1109 | opAndType{OGT, TUINT16}: ssa.OpGreater16U, |
| 1110 | opAndType{OGT, TINT32}: ssa.OpGreater32, |
| 1111 | opAndType{OGT, TUINT32}: ssa.OpGreater32U, |
| 1112 | opAndType{OGT, TINT64}: ssa.OpGreater64, |
| 1113 | opAndType{OGT, TUINT64}: ssa.OpGreater64U, |
| 1114 | opAndType{OGT, TFLOAT64}: ssa.OpGreater64F, |
| 1115 | opAndType{OGT, TFLOAT32}: ssa.OpGreater32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1116 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 1117 | opAndType{OLE, TINT8}: ssa.OpLeq8, |
| 1118 | opAndType{OLE, TUINT8}: ssa.OpLeq8U, |
| 1119 | opAndType{OLE, TINT16}: ssa.OpLeq16, |
| 1120 | opAndType{OLE, TUINT16}: ssa.OpLeq16U, |
| 1121 | opAndType{OLE, TINT32}: ssa.OpLeq32, |
| 1122 | opAndType{OLE, TUINT32}: ssa.OpLeq32U, |
| 1123 | opAndType{OLE, TINT64}: ssa.OpLeq64, |
| 1124 | opAndType{OLE, TUINT64}: ssa.OpLeq64U, |
| 1125 | opAndType{OLE, TFLOAT64}: ssa.OpLeq64F, |
| 1126 | opAndType{OLE, TFLOAT32}: ssa.OpLeq32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1127 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 1128 | opAndType{OGE, TINT8}: ssa.OpGeq8, |
| 1129 | opAndType{OGE, TUINT8}: ssa.OpGeq8U, |
| 1130 | opAndType{OGE, TINT16}: ssa.OpGeq16, |
| 1131 | opAndType{OGE, TUINT16}: ssa.OpGeq16U, |
| 1132 | opAndType{OGE, TINT32}: ssa.OpGeq32, |
| 1133 | opAndType{OGE, TUINT32}: ssa.OpGeq32U, |
| 1134 | opAndType{OGE, TINT64}: ssa.OpGeq64, |
| 1135 | opAndType{OGE, TUINT64}: ssa.OpGeq64U, |
| 1136 | opAndType{OGE, TFLOAT64}: ssa.OpGeq64F, |
| 1137 | opAndType{OGE, TFLOAT32}: ssa.OpGeq32F, |
David Chase | 40aba8c | 2015-08-05 22:11:14 -0400 | [diff] [blame] | 1138 | |
| 1139 | opAndType{OLROT, TUINT8}: ssa.OpLrot8, |
| 1140 | opAndType{OLROT, TUINT16}: ssa.OpLrot16, |
| 1141 | opAndType{OLROT, TUINT32}: ssa.OpLrot32, |
| 1142 | opAndType{OLROT, TUINT64}: ssa.OpLrot64, |
Keith Randall | a329e21 | 2015-09-12 13:26:57 -0700 | [diff] [blame] | 1143 | |
| 1144 | opAndType{OSQRT, TFLOAT64}: ssa.OpSqrt, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1145 | } |
| 1146 | |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1147 | func (s *state) concreteEtype(t *Type) EType { |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1148 | e := t.Etype |
| 1149 | switch e { |
| 1150 | default: |
| 1151 | return e |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1152 | case TINT: |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1153 | if s.config.IntSize == 8 { |
| 1154 | return TINT64 |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1155 | } |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1156 | return TINT32 |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1157 | case TUINT: |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1158 | if s.config.IntSize == 8 { |
| 1159 | return TUINT64 |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1160 | } |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1161 | return TUINT32 |
| 1162 | case TUINTPTR: |
| 1163 | if s.config.PtrSize == 8 { |
| 1164 | return TUINT64 |
| 1165 | } |
| 1166 | return TUINT32 |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1167 | } |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1168 | } |
| 1169 | |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1170 | func (s *state) ssaOp(op Op, t *Type) ssa.Op { |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1171 | etype := s.concreteEtype(t) |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1172 | x, ok := opToSSA[opAndType{op, etype}] |
| 1173 | if !ok { |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1174 | s.Unimplementedf("unhandled binary op %s %s", opnames[op], Econv(etype)) |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1175 | } |
| 1176 | return x |
Josh Bleecher Snyder | 46815b9 | 2015-06-24 17:48:22 -0700 | [diff] [blame] | 1177 | } |
| 1178 | |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1179 | func floatForComplex(t *Type) *Type { |
| 1180 | if t.Size() == 8 { |
| 1181 | return Types[TFLOAT32] |
| 1182 | } else { |
| 1183 | return Types[TFLOAT64] |
| 1184 | } |
| 1185 | } |
| 1186 | |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 1187 | type opAndTwoTypes struct { |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1188 | op Op |
| 1189 | etype1 EType |
| 1190 | etype2 EType |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 1191 | } |
| 1192 | |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1193 | type twoTypes struct { |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1194 | etype1 EType |
| 1195 | etype2 EType |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1196 | } |
| 1197 | |
| 1198 | type twoOpsAndType struct { |
| 1199 | op1 ssa.Op |
| 1200 | op2 ssa.Op |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1201 | intermediateType EType |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1202 | } |
| 1203 | |
| 1204 | var fpConvOpToSSA = map[twoTypes]twoOpsAndType{ |
| 1205 | |
| 1206 | twoTypes{TINT8, TFLOAT32}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to32F, TINT32}, |
| 1207 | twoTypes{TINT16, TFLOAT32}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to32F, TINT32}, |
| 1208 | twoTypes{TINT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to32F, TINT32}, |
| 1209 | twoTypes{TINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to32F, TINT64}, |
| 1210 | |
| 1211 | twoTypes{TINT8, TFLOAT64}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to64F, TINT32}, |
| 1212 | twoTypes{TINT16, TFLOAT64}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to64F, TINT32}, |
| 1213 | twoTypes{TINT32, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to64F, TINT32}, |
| 1214 | twoTypes{TINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to64F, TINT64}, |
| 1215 | |
| 1216 | twoTypes{TFLOAT32, TINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32}, |
| 1217 | twoTypes{TFLOAT32, TINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32}, |
| 1218 | twoTypes{TFLOAT32, TINT32}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpCopy, TINT32}, |
| 1219 | twoTypes{TFLOAT32, TINT64}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpCopy, TINT64}, |
| 1220 | |
| 1221 | twoTypes{TFLOAT64, TINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32}, |
| 1222 | twoTypes{TFLOAT64, TINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32}, |
| 1223 | twoTypes{TFLOAT64, TINT32}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpCopy, TINT32}, |
| 1224 | twoTypes{TFLOAT64, TINT64}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpCopy, TINT64}, |
| 1225 | // unsigned |
| 1226 | twoTypes{TUINT8, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to32F, TINT32}, |
| 1227 | twoTypes{TUINT16, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to32F, TINT32}, |
| 1228 | twoTypes{TUINT32, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to32F, TINT64}, // go wide to dodge unsigned |
| 1229 | twoTypes{TUINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64}, // Cvt64Uto32F, branchy code expansion instead |
| 1230 | |
| 1231 | twoTypes{TUINT8, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to64F, TINT32}, |
| 1232 | twoTypes{TUINT16, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to64F, TINT32}, |
| 1233 | twoTypes{TUINT32, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to64F, TINT64}, // go wide to dodge unsigned |
| 1234 | twoTypes{TUINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64}, // Cvt64Uto64F, branchy code expansion instead |
| 1235 | |
| 1236 | twoTypes{TFLOAT32, TUINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32}, |
| 1237 | twoTypes{TFLOAT32, TUINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32}, |
| 1238 | twoTypes{TFLOAT32, TUINT32}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned |
| 1239 | twoTypes{TFLOAT32, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64}, // Cvt32Fto64U, branchy code expansion instead |
| 1240 | |
| 1241 | twoTypes{TFLOAT64, TUINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32}, |
| 1242 | twoTypes{TFLOAT64, TUINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32}, |
| 1243 | twoTypes{TFLOAT64, TUINT32}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned |
| 1244 | twoTypes{TFLOAT64, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64}, // Cvt64Fto64U, branchy code expansion instead |
| 1245 | |
| 1246 | // float |
| 1247 | twoTypes{TFLOAT64, TFLOAT32}: twoOpsAndType{ssa.OpCvt64Fto32F, ssa.OpCopy, TFLOAT32}, |
| 1248 | twoTypes{TFLOAT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT64}, |
| 1249 | twoTypes{TFLOAT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT32}, |
| 1250 | twoTypes{TFLOAT32, TFLOAT64}: twoOpsAndType{ssa.OpCvt32Fto64F, ssa.OpCopy, TFLOAT64}, |
| 1251 | } |
| 1252 | |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 1253 | var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{ |
| 1254 | opAndTwoTypes{OLSH, TINT8, TUINT8}: ssa.OpLsh8x8, |
| 1255 | opAndTwoTypes{OLSH, TUINT8, TUINT8}: ssa.OpLsh8x8, |
| 1256 | opAndTwoTypes{OLSH, TINT8, TUINT16}: ssa.OpLsh8x16, |
| 1257 | opAndTwoTypes{OLSH, TUINT8, TUINT16}: ssa.OpLsh8x16, |
| 1258 | opAndTwoTypes{OLSH, TINT8, TUINT32}: ssa.OpLsh8x32, |
| 1259 | opAndTwoTypes{OLSH, TUINT8, TUINT32}: ssa.OpLsh8x32, |
| 1260 | opAndTwoTypes{OLSH, TINT8, TUINT64}: ssa.OpLsh8x64, |
| 1261 | opAndTwoTypes{OLSH, TUINT8, TUINT64}: ssa.OpLsh8x64, |
| 1262 | |
| 1263 | opAndTwoTypes{OLSH, TINT16, TUINT8}: ssa.OpLsh16x8, |
| 1264 | opAndTwoTypes{OLSH, TUINT16, TUINT8}: ssa.OpLsh16x8, |
| 1265 | opAndTwoTypes{OLSH, TINT16, TUINT16}: ssa.OpLsh16x16, |
| 1266 | opAndTwoTypes{OLSH, TUINT16, TUINT16}: ssa.OpLsh16x16, |
| 1267 | opAndTwoTypes{OLSH, TINT16, TUINT32}: ssa.OpLsh16x32, |
| 1268 | opAndTwoTypes{OLSH, TUINT16, TUINT32}: ssa.OpLsh16x32, |
| 1269 | opAndTwoTypes{OLSH, TINT16, TUINT64}: ssa.OpLsh16x64, |
| 1270 | opAndTwoTypes{OLSH, TUINT16, TUINT64}: ssa.OpLsh16x64, |
| 1271 | |
| 1272 | opAndTwoTypes{OLSH, TINT32, TUINT8}: ssa.OpLsh32x8, |
| 1273 | opAndTwoTypes{OLSH, TUINT32, TUINT8}: ssa.OpLsh32x8, |
| 1274 | opAndTwoTypes{OLSH, TINT32, TUINT16}: ssa.OpLsh32x16, |
| 1275 | opAndTwoTypes{OLSH, TUINT32, TUINT16}: ssa.OpLsh32x16, |
| 1276 | opAndTwoTypes{OLSH, TINT32, TUINT32}: ssa.OpLsh32x32, |
| 1277 | opAndTwoTypes{OLSH, TUINT32, TUINT32}: ssa.OpLsh32x32, |
| 1278 | opAndTwoTypes{OLSH, TINT32, TUINT64}: ssa.OpLsh32x64, |
| 1279 | opAndTwoTypes{OLSH, TUINT32, TUINT64}: ssa.OpLsh32x64, |
| 1280 | |
| 1281 | opAndTwoTypes{OLSH, TINT64, TUINT8}: ssa.OpLsh64x8, |
| 1282 | opAndTwoTypes{OLSH, TUINT64, TUINT8}: ssa.OpLsh64x8, |
| 1283 | opAndTwoTypes{OLSH, TINT64, TUINT16}: ssa.OpLsh64x16, |
| 1284 | opAndTwoTypes{OLSH, TUINT64, TUINT16}: ssa.OpLsh64x16, |
| 1285 | opAndTwoTypes{OLSH, TINT64, TUINT32}: ssa.OpLsh64x32, |
| 1286 | opAndTwoTypes{OLSH, TUINT64, TUINT32}: ssa.OpLsh64x32, |
| 1287 | opAndTwoTypes{OLSH, TINT64, TUINT64}: ssa.OpLsh64x64, |
| 1288 | opAndTwoTypes{OLSH, TUINT64, TUINT64}: ssa.OpLsh64x64, |
| 1289 | |
| 1290 | opAndTwoTypes{ORSH, TINT8, TUINT8}: ssa.OpRsh8x8, |
| 1291 | opAndTwoTypes{ORSH, TUINT8, TUINT8}: ssa.OpRsh8Ux8, |
| 1292 | opAndTwoTypes{ORSH, TINT8, TUINT16}: ssa.OpRsh8x16, |
| 1293 | opAndTwoTypes{ORSH, TUINT8, TUINT16}: ssa.OpRsh8Ux16, |
| 1294 | opAndTwoTypes{ORSH, TINT8, TUINT32}: ssa.OpRsh8x32, |
| 1295 | opAndTwoTypes{ORSH, TUINT8, TUINT32}: ssa.OpRsh8Ux32, |
| 1296 | opAndTwoTypes{ORSH, TINT8, TUINT64}: ssa.OpRsh8x64, |
| 1297 | opAndTwoTypes{ORSH, TUINT8, TUINT64}: ssa.OpRsh8Ux64, |
| 1298 | |
| 1299 | opAndTwoTypes{ORSH, TINT16, TUINT8}: ssa.OpRsh16x8, |
| 1300 | opAndTwoTypes{ORSH, TUINT16, TUINT8}: ssa.OpRsh16Ux8, |
| 1301 | opAndTwoTypes{ORSH, TINT16, TUINT16}: ssa.OpRsh16x16, |
| 1302 | opAndTwoTypes{ORSH, TUINT16, TUINT16}: ssa.OpRsh16Ux16, |
| 1303 | opAndTwoTypes{ORSH, TINT16, TUINT32}: ssa.OpRsh16x32, |
| 1304 | opAndTwoTypes{ORSH, TUINT16, TUINT32}: ssa.OpRsh16Ux32, |
| 1305 | opAndTwoTypes{ORSH, TINT16, TUINT64}: ssa.OpRsh16x64, |
| 1306 | opAndTwoTypes{ORSH, TUINT16, TUINT64}: ssa.OpRsh16Ux64, |
| 1307 | |
| 1308 | opAndTwoTypes{ORSH, TINT32, TUINT8}: ssa.OpRsh32x8, |
| 1309 | opAndTwoTypes{ORSH, TUINT32, TUINT8}: ssa.OpRsh32Ux8, |
| 1310 | opAndTwoTypes{ORSH, TINT32, TUINT16}: ssa.OpRsh32x16, |
| 1311 | opAndTwoTypes{ORSH, TUINT32, TUINT16}: ssa.OpRsh32Ux16, |
| 1312 | opAndTwoTypes{ORSH, TINT32, TUINT32}: ssa.OpRsh32x32, |
| 1313 | opAndTwoTypes{ORSH, TUINT32, TUINT32}: ssa.OpRsh32Ux32, |
| 1314 | opAndTwoTypes{ORSH, TINT32, TUINT64}: ssa.OpRsh32x64, |
| 1315 | opAndTwoTypes{ORSH, TUINT32, TUINT64}: ssa.OpRsh32Ux64, |
| 1316 | |
| 1317 | opAndTwoTypes{ORSH, TINT64, TUINT8}: ssa.OpRsh64x8, |
| 1318 | opAndTwoTypes{ORSH, TUINT64, TUINT8}: ssa.OpRsh64Ux8, |
| 1319 | opAndTwoTypes{ORSH, TINT64, TUINT16}: ssa.OpRsh64x16, |
| 1320 | opAndTwoTypes{ORSH, TUINT64, TUINT16}: ssa.OpRsh64Ux16, |
| 1321 | opAndTwoTypes{ORSH, TINT64, TUINT32}: ssa.OpRsh64x32, |
| 1322 | opAndTwoTypes{ORSH, TUINT64, TUINT32}: ssa.OpRsh64Ux32, |
| 1323 | opAndTwoTypes{ORSH, TINT64, TUINT64}: ssa.OpRsh64x64, |
| 1324 | opAndTwoTypes{ORSH, TUINT64, TUINT64}: ssa.OpRsh64Ux64, |
| 1325 | } |
| 1326 | |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1327 | func (s *state) ssaShiftOp(op Op, t *Type, u *Type) ssa.Op { |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 1328 | etype1 := s.concreteEtype(t) |
| 1329 | etype2 := s.concreteEtype(u) |
| 1330 | x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}] |
| 1331 | if !ok { |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1332 | s.Unimplementedf("unhandled shift op %s etype=%s/%s", opnames[op], Econv(etype1), Econv(etype2)) |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 1333 | } |
| 1334 | return x |
| 1335 | } |
| 1336 | |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1337 | func (s *state) ssaRotateOp(op Op, t *Type) ssa.Op { |
David Chase | 40aba8c | 2015-08-05 22:11:14 -0400 | [diff] [blame] | 1338 | etype1 := s.concreteEtype(t) |
| 1339 | x, ok := opToSSA[opAndType{op, etype1}] |
| 1340 | if !ok { |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1341 | s.Unimplementedf("unhandled rotate op %s etype=%s", opnames[op], Econv(etype1)) |
David Chase | 40aba8c | 2015-08-05 22:11:14 -0400 | [diff] [blame] | 1342 | } |
| 1343 | return x |
| 1344 | } |
| 1345 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1346 | // expr converts the expression n to ssa, adds it to s and returns the ssa result. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1347 | func (s *state) expr(n *Node) *ssa.Value { |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 1348 | s.pushLine(n.Lineno) |
| 1349 | defer s.popLine() |
| 1350 | |
Keith Randall | 06f3292 | 2015-07-11 11:39:12 -0700 | [diff] [blame] | 1351 | s.stmtList(n.Ninit) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1352 | switch n.Op { |
Todd Neal | def7c65 | 2015-09-07 19:07:02 -0500 | [diff] [blame] | 1353 | case OCFUNC: |
Todd Neal | d076ef7 | 2015-10-15 20:25:32 -0500 | [diff] [blame] | 1354 | aux := s.lookupSymbol(n, &ssa.ExternSymbol{n.Type, n.Left.Sym}) |
Todd Neal | def7c65 | 2015-09-07 19:07:02 -0500 | [diff] [blame] | 1355 | return s.entryNewValue1A(ssa.OpAddr, n.Type, aux, s.sb) |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 1356 | case OPARAM: |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 1357 | addr := s.addr(n, false) |
David Chase | 32ffbf7 | 2015-10-08 17:14:12 -0400 | [diff] [blame] | 1358 | return s.newValue2(ssa.OpLoad, n.Left.Type, addr, s.mem()) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1359 | case ONAME: |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1360 | if n.Class == PFUNC { |
| 1361 | // "value" of a function is the address of the function's closure |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 1362 | sym := funcsym(n.Sym) |
| 1363 | aux := &ssa.ExternSymbol{n.Type, sym} |
| 1364 | return s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sb) |
Keith Randall | 23df95b | 2015-05-12 15:16:52 -0700 | [diff] [blame] | 1365 | } |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 1366 | if s.canSSA(n) { |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 1367 | return s.variable(n, n.Type) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1368 | } |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 1369 | addr := s.addr(n, false) |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1370 | return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem()) |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 1371 | case OCLOSUREVAR: |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 1372 | addr := s.addr(n, false) |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 1373 | return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem()) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1374 | case OLITERAL: |
Keith Randall | e707fbe | 2015-06-11 10:20:39 -0700 | [diff] [blame] | 1375 | switch n.Val().Ctype() { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1376 | case CTINT: |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 1377 | i := Mpgetfix(n.Val().U.(*Mpint)) |
| 1378 | switch n.Type.Size() { |
| 1379 | case 1: |
| 1380 | return s.constInt8(n.Type, int8(i)) |
| 1381 | case 2: |
| 1382 | return s.constInt16(n.Type, int16(i)) |
| 1383 | case 4: |
| 1384 | return s.constInt32(n.Type, int32(i)) |
| 1385 | case 8: |
| 1386 | return s.constInt64(n.Type, i) |
| 1387 | default: |
| 1388 | s.Fatalf("bad integer size %d", n.Type.Size()) |
| 1389 | return nil |
| 1390 | } |
| 1391 | case CTSTR: |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 1392 | if n.Val().U == "" { |
| 1393 | return s.constEmptyString(n.Type) |
| 1394 | } |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 1395 | return s.entryNewValue0A(ssa.OpConstString, n.Type, n.Val().U) |
| 1396 | case CTBOOL: |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 1397 | v := s.constBool(n.Val().U.(bool)) |
| 1398 | // For some reason the frontend gets the line numbers of |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 1399 | // CTBOOL literals totally wrong. Fix it here by grabbing |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 1400 | // the line number of the enclosing AST node. |
| 1401 | if len(s.line) >= 2 { |
| 1402 | v.Line = s.line[len(s.line)-2] |
| 1403 | } |
| 1404 | return v |
Brad Fitzpatrick | 337b7e7 | 2015-07-13 17:30:42 -0600 | [diff] [blame] | 1405 | case CTNIL: |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 1406 | t := n.Type |
| 1407 | switch { |
| 1408 | case t.IsSlice(): |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 1409 | return s.constSlice(t) |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 1410 | case t.IsInterface(): |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 1411 | return s.constInterface(t) |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 1412 | default: |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 1413 | return s.constNil(t) |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 1414 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 1415 | case CTFLT: |
| 1416 | f := n.Val().U.(*Mpflt) |
| 1417 | switch n.Type.Size() { |
| 1418 | case 4: |
Keith Randall | 733bf6e | 2016-01-25 20:26:06 -0800 | [diff] [blame] | 1419 | return s.constFloat32(n.Type, mpgetflt32(f)) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 1420 | case 8: |
Keith Randall | 733bf6e | 2016-01-25 20:26:06 -0800 | [diff] [blame] | 1421 | return s.constFloat64(n.Type, mpgetflt(f)) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 1422 | default: |
| 1423 | s.Fatalf("bad float size %d", n.Type.Size()) |
| 1424 | return nil |
| 1425 | } |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1426 | case CTCPLX: |
| 1427 | c := n.Val().U.(*Mpcplx) |
| 1428 | r := &c.Real |
| 1429 | i := &c.Imag |
| 1430 | switch n.Type.Size() { |
| 1431 | case 8: |
| 1432 | { |
| 1433 | pt := Types[TFLOAT32] |
| 1434 | return s.newValue2(ssa.OpComplexMake, n.Type, |
Keith Randall | 733bf6e | 2016-01-25 20:26:06 -0800 | [diff] [blame] | 1435 | s.constFloat32(pt, mpgetflt32(r)), |
| 1436 | s.constFloat32(pt, mpgetflt32(i))) |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1437 | } |
| 1438 | case 16: |
| 1439 | { |
| 1440 | pt := Types[TFLOAT64] |
| 1441 | return s.newValue2(ssa.OpComplexMake, n.Type, |
Keith Randall | 733bf6e | 2016-01-25 20:26:06 -0800 | [diff] [blame] | 1442 | s.constFloat64(pt, mpgetflt(r)), |
| 1443 | s.constFloat64(pt, mpgetflt(i))) |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1444 | } |
| 1445 | default: |
| 1446 | s.Fatalf("bad float size %d", n.Type.Size()) |
| 1447 | return nil |
| 1448 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 1449 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1450 | default: |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 1451 | s.Unimplementedf("unhandled OLITERAL %v", n.Val().Ctype()) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1452 | return nil |
| 1453 | } |
Keith Randall | 0ad9c8c | 2015-06-12 16:24:33 -0700 | [diff] [blame] | 1454 | case OCONVNOP: |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1455 | to := n.Type |
| 1456 | from := n.Left.Type |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1457 | |
| 1458 | // Assume everything will work out, so set up our return value. |
| 1459 | // Anything interesting that happens from here is a fatal. |
Keith Randall | 0ad9c8c | 2015-06-12 16:24:33 -0700 | [diff] [blame] | 1460 | x := s.expr(n.Left) |
David Chase | e99dd52 | 2015-10-19 11:36:07 -0400 | [diff] [blame] | 1461 | |
| 1462 | // Special case for not confusing GC and liveness. |
| 1463 | // We don't want pointers accidentally classified |
| 1464 | // as not-pointers or vice-versa because of copy |
| 1465 | // elision. |
| 1466 | if to.IsPtr() != from.IsPtr() { |
Keith Randall | 7807bda | 2015-11-10 15:35:36 -0800 | [diff] [blame] | 1467 | return s.newValue2(ssa.OpConvert, to, x, s.mem()) |
David Chase | e99dd52 | 2015-10-19 11:36:07 -0400 | [diff] [blame] | 1468 | } |
| 1469 | |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1470 | v := s.newValue1(ssa.OpCopy, to, x) // ensure that v has the right type |
| 1471 | |
Todd Neal | def7c65 | 2015-09-07 19:07:02 -0500 | [diff] [blame] | 1472 | // CONVNOP closure |
| 1473 | if to.Etype == TFUNC && from.IsPtr() { |
| 1474 | return v |
| 1475 | } |
| 1476 | |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1477 | // named <--> unnamed type or typed <--> untyped const |
| 1478 | if from.Etype == to.Etype { |
| 1479 | return v |
| 1480 | } |
David Chase | e99dd52 | 2015-10-19 11:36:07 -0400 | [diff] [blame] | 1481 | |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1482 | // unsafe.Pointer <--> *T |
| 1483 | if to.Etype == TUNSAFEPTR && from.IsPtr() || from.Etype == TUNSAFEPTR && to.IsPtr() { |
| 1484 | return v |
| 1485 | } |
| 1486 | |
| 1487 | dowidth(from) |
| 1488 | dowidth(to) |
| 1489 | if from.Width != to.Width { |
| 1490 | s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Width, to, to.Width) |
| 1491 | return nil |
| 1492 | } |
| 1493 | if etypesign(from.Etype) != etypesign(to.Etype) { |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1494 | s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, Econv(from.Etype), to, Econv(to.Etype)) |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1495 | return nil |
| 1496 | } |
| 1497 | |
Ian Lance Taylor | 88e1803 | 2016-03-01 15:17:34 -0800 | [diff] [blame] | 1498 | if instrumenting { |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 1499 | // These appear to be fine, but they fail the |
| 1500 | // integer constraint below, so okay them here. |
| 1501 | // Sample non-integer conversion: map[string]string -> *uint8 |
| 1502 | return v |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1503 | } |
| 1504 | |
| 1505 | if etypesign(from.Etype) == 0 { |
| 1506 | s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to) |
| 1507 | return nil |
| 1508 | } |
| 1509 | |
| 1510 | // integer, same width, same sign |
| 1511 | return v |
| 1512 | |
Michael Matloob | 73054f5 | 2015-06-14 11:38:46 -0700 | [diff] [blame] | 1513 | case OCONV: |
| 1514 | x := s.expr(n.Left) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1515 | ft := n.Left.Type // from type |
| 1516 | tt := n.Type // to type |
| 1517 | if ft.IsInteger() && tt.IsInteger() { |
| 1518 | var op ssa.Op |
| 1519 | if tt.Size() == ft.Size() { |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1520 | op = ssa.OpCopy |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1521 | } else if tt.Size() < ft.Size() { |
| 1522 | // truncation |
| 1523 | switch 10*ft.Size() + tt.Size() { |
| 1524 | case 21: |
| 1525 | op = ssa.OpTrunc16to8 |
| 1526 | case 41: |
| 1527 | op = ssa.OpTrunc32to8 |
| 1528 | case 42: |
| 1529 | op = ssa.OpTrunc32to16 |
| 1530 | case 81: |
| 1531 | op = ssa.OpTrunc64to8 |
| 1532 | case 82: |
| 1533 | op = ssa.OpTrunc64to16 |
| 1534 | case 84: |
| 1535 | op = ssa.OpTrunc64to32 |
| 1536 | default: |
| 1537 | s.Fatalf("weird integer truncation %s -> %s", ft, tt) |
| 1538 | } |
| 1539 | } else if ft.IsSigned() { |
| 1540 | // sign extension |
| 1541 | switch 10*ft.Size() + tt.Size() { |
| 1542 | case 12: |
| 1543 | op = ssa.OpSignExt8to16 |
| 1544 | case 14: |
| 1545 | op = ssa.OpSignExt8to32 |
| 1546 | case 18: |
| 1547 | op = ssa.OpSignExt8to64 |
| 1548 | case 24: |
| 1549 | op = ssa.OpSignExt16to32 |
| 1550 | case 28: |
| 1551 | op = ssa.OpSignExt16to64 |
| 1552 | case 48: |
| 1553 | op = ssa.OpSignExt32to64 |
| 1554 | default: |
| 1555 | s.Fatalf("bad integer sign extension %s -> %s", ft, tt) |
| 1556 | } |
| 1557 | } else { |
| 1558 | // zero extension |
| 1559 | switch 10*ft.Size() + tt.Size() { |
| 1560 | case 12: |
| 1561 | op = ssa.OpZeroExt8to16 |
| 1562 | case 14: |
| 1563 | op = ssa.OpZeroExt8to32 |
| 1564 | case 18: |
| 1565 | op = ssa.OpZeroExt8to64 |
| 1566 | case 24: |
| 1567 | op = ssa.OpZeroExt16to32 |
| 1568 | case 28: |
| 1569 | op = ssa.OpZeroExt16to64 |
| 1570 | case 48: |
| 1571 | op = ssa.OpZeroExt32to64 |
| 1572 | default: |
| 1573 | s.Fatalf("weird integer sign extension %s -> %s", ft, tt) |
| 1574 | } |
| 1575 | } |
| 1576 | return s.newValue1(op, n.Type, x) |
| 1577 | } |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1578 | |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1579 | if ft.IsFloat() || tt.IsFloat() { |
| 1580 | conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}] |
| 1581 | if !ok { |
| 1582 | s.Fatalf("weird float conversion %s -> %s", ft, tt) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1583 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1584 | op1, op2, it := conv.op1, conv.op2, conv.intermediateType |
| 1585 | |
| 1586 | if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid { |
| 1587 | // normal case, not tripping over unsigned 64 |
| 1588 | if op1 == ssa.OpCopy { |
| 1589 | if op2 == ssa.OpCopy { |
| 1590 | return x |
| 1591 | } |
| 1592 | return s.newValue1(op2, n.Type, x) |
| 1593 | } |
| 1594 | if op2 == ssa.OpCopy { |
| 1595 | return s.newValue1(op1, n.Type, x) |
| 1596 | } |
| 1597 | return s.newValue1(op2, n.Type, s.newValue1(op1, Types[it], x)) |
| 1598 | } |
| 1599 | // Tricky 64-bit unsigned cases. |
| 1600 | if ft.IsInteger() { |
| 1601 | // therefore tt is float32 or float64, and ft is also unsigned |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1602 | if tt.Size() == 4 { |
| 1603 | return s.uint64Tofloat32(n, x, ft, tt) |
| 1604 | } |
| 1605 | if tt.Size() == 8 { |
| 1606 | return s.uint64Tofloat64(n, x, ft, tt) |
| 1607 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1608 | s.Fatalf("weird unsigned integer to float conversion %s -> %s", ft, tt) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1609 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1610 | // therefore ft is float32 or float64, and tt is unsigned integer |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 1611 | if ft.Size() == 4 { |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1612 | return s.float32ToUint64(n, x, ft, tt) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 1613 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1614 | if ft.Size() == 8 { |
| 1615 | return s.float64ToUint64(n, x, ft, tt) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 1616 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1617 | s.Fatalf("weird float to unsigned integer conversion %s -> %s", ft, tt) |
| 1618 | return nil |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1619 | } |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1620 | |
| 1621 | if ft.IsComplex() && tt.IsComplex() { |
| 1622 | var op ssa.Op |
| 1623 | if ft.Size() == tt.Size() { |
| 1624 | op = ssa.OpCopy |
| 1625 | } else if ft.Size() == 8 && tt.Size() == 16 { |
| 1626 | op = ssa.OpCvt32Fto64F |
| 1627 | } else if ft.Size() == 16 && tt.Size() == 8 { |
| 1628 | op = ssa.OpCvt64Fto32F |
| 1629 | } else { |
| 1630 | s.Fatalf("weird complex conversion %s -> %s", ft, tt) |
| 1631 | } |
| 1632 | ftp := floatForComplex(ft) |
| 1633 | ttp := floatForComplex(tt) |
| 1634 | return s.newValue2(ssa.OpComplexMake, tt, |
| 1635 | s.newValue1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, x)), |
| 1636 | s.newValue1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, x))) |
| 1637 | } |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1638 | |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 1639 | s.Unimplementedf("unhandled OCONV %s -> %s", Econv(n.Left.Type.Etype), Econv(n.Type.Etype)) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1640 | return nil |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1641 | |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 1642 | case ODOTTYPE: |
| 1643 | res, _ := s.dottype(n, false) |
| 1644 | return res |
| 1645 | |
Josh Bleecher Snyder | 46815b9 | 2015-06-24 17:48:22 -0700 | [diff] [blame] | 1646 | // binary ops |
| 1647 | case OLT, OEQ, ONE, OLE, OGE, OGT: |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1648 | a := s.expr(n.Left) |
| 1649 | b := s.expr(n.Right) |
Keith Randall | db380bf | 2015-09-10 11:05:42 -0700 | [diff] [blame] | 1650 | if n.Left.Type.IsComplex() { |
Keith Randall | c244ce0 | 2015-09-10 14:59:00 -0700 | [diff] [blame] | 1651 | pt := floatForComplex(n.Left.Type) |
Keith Randall | db380bf | 2015-09-10 11:05:42 -0700 | [diff] [blame] | 1652 | op := s.ssaOp(OEQ, pt) |
| 1653 | r := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)) |
| 1654 | i := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)) |
| 1655 | c := s.newValue2(ssa.OpAnd8, Types[TBOOL], r, i) |
| 1656 | switch n.Op { |
| 1657 | case OEQ: |
| 1658 | return c |
| 1659 | case ONE: |
| 1660 | return s.newValue1(ssa.OpNot, Types[TBOOL], c) |
| 1661 | default: |
| 1662 | s.Fatalf("ordered complex compare %s", opnames[n.Op]) |
| 1663 | } |
Keith Randall | db380bf | 2015-09-10 11:05:42 -0700 | [diff] [blame] | 1664 | } |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1665 | return s.newValue2(s.ssaOp(n.Op, n.Left.Type), Types[TBOOL], a, b) |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1666 | case OMUL: |
| 1667 | a := s.expr(n.Left) |
| 1668 | b := s.expr(n.Right) |
| 1669 | if n.Type.IsComplex() { |
| 1670 | mulop := ssa.OpMul64F |
| 1671 | addop := ssa.OpAdd64F |
| 1672 | subop := ssa.OpSub64F |
| 1673 | pt := floatForComplex(n.Type) // Could be Float32 or Float64 |
| 1674 | wt := Types[TFLOAT64] // Compute in Float64 to minimize cancellation error |
| 1675 | |
| 1676 | areal := s.newValue1(ssa.OpComplexReal, pt, a) |
| 1677 | breal := s.newValue1(ssa.OpComplexReal, pt, b) |
| 1678 | aimag := s.newValue1(ssa.OpComplexImag, pt, a) |
| 1679 | bimag := s.newValue1(ssa.OpComplexImag, pt, b) |
| 1680 | |
| 1681 | if pt != wt { // Widen for calculation |
| 1682 | areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal) |
| 1683 | breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal) |
| 1684 | aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag) |
| 1685 | bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag) |
| 1686 | } |
| 1687 | |
| 1688 | xreal := s.newValue2(subop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag)) |
| 1689 | ximag := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, bimag), s.newValue2(mulop, wt, aimag, breal)) |
| 1690 | |
| 1691 | if pt != wt { // Narrow to store back |
| 1692 | xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal) |
| 1693 | ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag) |
| 1694 | } |
| 1695 | |
| 1696 | return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag) |
| 1697 | } |
| 1698 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
| 1699 | |
| 1700 | case ODIV: |
| 1701 | a := s.expr(n.Left) |
| 1702 | b := s.expr(n.Right) |
| 1703 | if n.Type.IsComplex() { |
| 1704 | // TODO this is not executed because the front-end substitutes a runtime call. |
| 1705 | // That probably ought to change; with modest optimization the widen/narrow |
| 1706 | // conversions could all be elided in larger expression trees. |
| 1707 | mulop := ssa.OpMul64F |
| 1708 | addop := ssa.OpAdd64F |
| 1709 | subop := ssa.OpSub64F |
| 1710 | divop := ssa.OpDiv64F |
| 1711 | pt := floatForComplex(n.Type) // Could be Float32 or Float64 |
| 1712 | wt := Types[TFLOAT64] // Compute in Float64 to minimize cancellation error |
| 1713 | |
| 1714 | areal := s.newValue1(ssa.OpComplexReal, pt, a) |
| 1715 | breal := s.newValue1(ssa.OpComplexReal, pt, b) |
| 1716 | aimag := s.newValue1(ssa.OpComplexImag, pt, a) |
| 1717 | bimag := s.newValue1(ssa.OpComplexImag, pt, b) |
| 1718 | |
| 1719 | if pt != wt { // Widen for calculation |
| 1720 | areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal) |
| 1721 | breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal) |
| 1722 | aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag) |
| 1723 | bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag) |
| 1724 | } |
| 1725 | |
| 1726 | denom := s.newValue2(addop, wt, s.newValue2(mulop, wt, breal, breal), s.newValue2(mulop, wt, bimag, bimag)) |
| 1727 | xreal := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag)) |
| 1728 | ximag := s.newValue2(subop, wt, s.newValue2(mulop, wt, aimag, breal), s.newValue2(mulop, wt, areal, bimag)) |
| 1729 | |
| 1730 | // TODO not sure if this is best done in wide precision or narrow |
| 1731 | // Double-rounding might be an issue. |
| 1732 | // Note that the pre-SSA implementation does the entire calculation |
| 1733 | // in wide format, so wide is compatible. |
| 1734 | xreal = s.newValue2(divop, wt, xreal, denom) |
| 1735 | ximag = s.newValue2(divop, wt, ximag, denom) |
| 1736 | |
| 1737 | if pt != wt { // Narrow to store back |
| 1738 | xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal) |
| 1739 | ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag) |
| 1740 | } |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1741 | return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag) |
| 1742 | } |
David Chase | 18559e2 | 2015-10-28 13:55:46 -0400 | [diff] [blame] | 1743 | if n.Type.IsFloat() { |
| 1744 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
| 1745 | } else { |
| 1746 | // do a size-appropriate check for zero |
| 1747 | cmp := s.newValue2(s.ssaOp(ONE, n.Type), Types[TBOOL], b, s.zeroVal(n.Type)) |
| 1748 | s.check(cmp, panicdivide) |
| 1749 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
| 1750 | } |
| 1751 | case OMOD: |
| 1752 | a := s.expr(n.Left) |
| 1753 | b := s.expr(n.Right) |
| 1754 | // do a size-appropriate check for zero |
| 1755 | cmp := s.newValue2(s.ssaOp(ONE, n.Type), Types[TBOOL], b, s.zeroVal(n.Type)) |
| 1756 | s.check(cmp, panicdivide) |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1757 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
| 1758 | case OADD, OSUB: |
| 1759 | a := s.expr(n.Left) |
| 1760 | b := s.expr(n.Right) |
| 1761 | if n.Type.IsComplex() { |
| 1762 | pt := floatForComplex(n.Type) |
| 1763 | op := s.ssaOp(n.Op, pt) |
| 1764 | return s.newValue2(ssa.OpComplexMake, n.Type, |
| 1765 | s.newValue2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)), |
| 1766 | s.newValue2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))) |
| 1767 | } |
| 1768 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
David Chase | 18559e2 | 2015-10-28 13:55:46 -0400 | [diff] [blame] | 1769 | case OAND, OOR, OHMUL, OXOR: |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1770 | a := s.expr(n.Left) |
| 1771 | b := s.expr(n.Right) |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1772 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 1773 | case OLSH, ORSH: |
| 1774 | a := s.expr(n.Left) |
| 1775 | b := s.expr(n.Right) |
| 1776 | return s.newValue2(s.ssaShiftOp(n.Op, n.Type, n.Right.Type), a.Type, a, b) |
David Chase | 40aba8c | 2015-08-05 22:11:14 -0400 | [diff] [blame] | 1777 | case OLROT: |
| 1778 | a := s.expr(n.Left) |
| 1779 | i := n.Right.Int() |
| 1780 | if i <= 0 || i >= n.Type.Size()*8 { |
| 1781 | s.Fatalf("Wrong rotate distance for LROT, expected 1 through %d, saw %d", n.Type.Size()*8-1, i) |
| 1782 | } |
| 1783 | return s.newValue1I(s.ssaRotateOp(n.Op, n.Type), a.Type, i, a) |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1784 | case OANDAND, OOROR: |
| 1785 | // To implement OANDAND (and OOROR), we introduce a |
| 1786 | // new temporary variable to hold the result. The |
| 1787 | // variable is associated with the OANDAND node in the |
| 1788 | // s.vars table (normally variables are only |
| 1789 | // associated with ONAME nodes). We convert |
| 1790 | // A && B |
| 1791 | // to |
| 1792 | // var = A |
| 1793 | // if var { |
| 1794 | // var = B |
| 1795 | // } |
| 1796 | // Using var in the subsequent block introduces the |
| 1797 | // necessary phi variable. |
| 1798 | el := s.expr(n.Left) |
| 1799 | s.vars[n] = el |
| 1800 | |
| 1801 | b := s.endBlock() |
| 1802 | b.Kind = ssa.BlockIf |
| 1803 | b.Control = el |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 1804 | // In theory, we should set b.Likely here based on context. |
| 1805 | // However, gc only gives us likeliness hints |
| 1806 | // in a single place, for plain OIF statements, |
| 1807 | // and passing around context is finnicky, so don't bother for now. |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1808 | |
| 1809 | bRight := s.f.NewBlock(ssa.BlockPlain) |
| 1810 | bResult := s.f.NewBlock(ssa.BlockPlain) |
| 1811 | if n.Op == OANDAND { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 1812 | b.AddEdgeTo(bRight) |
| 1813 | b.AddEdgeTo(bResult) |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1814 | } else if n.Op == OOROR { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 1815 | b.AddEdgeTo(bResult) |
| 1816 | b.AddEdgeTo(bRight) |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1817 | } |
| 1818 | |
| 1819 | s.startBlock(bRight) |
| 1820 | er := s.expr(n.Right) |
| 1821 | s.vars[n] = er |
| 1822 | |
| 1823 | b = s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 1824 | b.AddEdgeTo(bResult) |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1825 | |
| 1826 | s.startBlock(bResult) |
Josh Bleecher Snyder | 35ad1fc | 2015-08-27 10:11:08 -0700 | [diff] [blame] | 1827 | return s.variable(n, Types[TBOOL]) |
Keith Randall | 7e39072 | 2015-09-12 14:14:02 -0700 | [diff] [blame] | 1828 | case OCOMPLEX: |
| 1829 | r := s.expr(n.Left) |
| 1830 | i := s.expr(n.Right) |
| 1831 | return s.newValue2(ssa.OpComplexMake, n.Type, r, i) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1832 | |
Josh Bleecher Snyder | 4178f20 | 2015-09-05 19:28:00 -0700 | [diff] [blame] | 1833 | // unary ops |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1834 | case OMINUS: |
| 1835 | a := s.expr(n.Left) |
| 1836 | if n.Type.IsComplex() { |
| 1837 | tp := floatForComplex(n.Type) |
| 1838 | negop := s.ssaOp(n.Op, tp) |
| 1839 | return s.newValue2(ssa.OpComplexMake, n.Type, |
| 1840 | s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)), |
| 1841 | s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a))) |
| 1842 | } |
| 1843 | return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a) |
Keith Randall | a329e21 | 2015-09-12 13:26:57 -0700 | [diff] [blame] | 1844 | case ONOT, OCOM, OSQRT: |
Brad Fitzpatrick | d9c72d7 | 2015-07-10 11:25:48 -0600 | [diff] [blame] | 1845 | a := s.expr(n.Left) |
Alexandru Moșoi | 954d5ad | 2015-07-21 16:58:18 +0200 | [diff] [blame] | 1846 | return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a) |
Keith Randall | 2f51807 | 2015-09-10 11:37:09 -0700 | [diff] [blame] | 1847 | case OIMAG, OREAL: |
| 1848 | a := s.expr(n.Left) |
| 1849 | return s.newValue1(s.ssaOp(n.Op, n.Left.Type), n.Type, a) |
Josh Bleecher Snyder | 4178f20 | 2015-09-05 19:28:00 -0700 | [diff] [blame] | 1850 | case OPLUS: |
| 1851 | return s.expr(n.Left) |
Brad Fitzpatrick | d9c72d7 | 2015-07-10 11:25:48 -0600 | [diff] [blame] | 1852 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1853 | case OADDR: |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 1854 | return s.addr(n.Left, n.Bounded) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1855 | |
Josh Bleecher Snyder | 25d1916 | 2015-07-28 12:37:46 -0700 | [diff] [blame] | 1856 | case OINDREG: |
| 1857 | if int(n.Reg) != Thearch.REGSP { |
| 1858 | s.Unimplementedf("OINDREG of non-SP register %s in expr: %v", obj.Rconv(int(n.Reg)), n) |
| 1859 | return nil |
| 1860 | } |
| 1861 | addr := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(n.Type), n.Xoffset, s.sp) |
| 1862 | return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem()) |
| 1863 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1864 | case OIND: |
| 1865 | p := s.expr(n.Left) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1866 | s.nilCheck(p) |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1867 | return s.newValue2(ssa.OpLoad, n.Type, p, s.mem()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1868 | |
Keith Randall | cd7e059 | 2015-07-15 21:33:49 -0700 | [diff] [blame] | 1869 | case ODOT: |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 1870 | t := n.Left.Type |
| 1871 | if canSSAType(t) { |
| 1872 | v := s.expr(n.Left) |
| 1873 | return s.newValue1I(ssa.OpStructSelect, n.Type, fieldIdx(n), v) |
| 1874 | } |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 1875 | p := s.addr(n, false) |
Keith Randall | 9703564 | 2015-10-09 09:33:29 -0700 | [diff] [blame] | 1876 | return s.newValue2(ssa.OpLoad, n.Type, p, s.mem()) |
Keith Randall | cd7e059 | 2015-07-15 21:33:49 -0700 | [diff] [blame] | 1877 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1878 | case ODOTPTR: |
| 1879 | p := s.expr(n.Left) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1880 | s.nilCheck(p) |
Josh Bleecher Snyder | da1802f | 2016-03-04 12:34:43 -0800 | [diff] [blame] | 1881 | p = s.newValue1I(ssa.OpOffPtr, p.Type, n.Xoffset, p) |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1882 | return s.newValue2(ssa.OpLoad, n.Type, p, s.mem()) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1883 | |
| 1884 | case OINDEX: |
Keith Randall | 9703564 | 2015-10-09 09:33:29 -0700 | [diff] [blame] | 1885 | switch { |
| 1886 | case n.Left.Type.IsString(): |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1887 | a := s.expr(n.Left) |
| 1888 | i := s.expr(n.Right) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1889 | i = s.extendIndex(i) |
Keith Randall | 9703564 | 2015-10-09 09:33:29 -0700 | [diff] [blame] | 1890 | if !n.Bounded { |
| 1891 | len := s.newValue1(ssa.OpStringLen, Types[TINT], a) |
| 1892 | s.boundsCheck(i, len) |
Josh Bleecher Snyder | e00d609 | 2015-06-02 09:16:22 -0700 | [diff] [blame] | 1893 | } |
Keith Randall | 9703564 | 2015-10-09 09:33:29 -0700 | [diff] [blame] | 1894 | ptrtyp := Ptrto(Types[TUINT8]) |
| 1895 | ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a) |
Josh Bleecher Snyder | da1802f | 2016-03-04 12:34:43 -0800 | [diff] [blame] | 1896 | if Isconst(n.Right, CTINT) { |
| 1897 | ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, n.Right.Int(), ptr) |
| 1898 | } else { |
| 1899 | ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i) |
| 1900 | } |
Keith Randall | 9703564 | 2015-10-09 09:33:29 -0700 | [diff] [blame] | 1901 | return s.newValue2(ssa.OpLoad, Types[TUINT8], ptr, s.mem()) |
| 1902 | case n.Left.Type.IsSlice(): |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 1903 | p := s.addr(n, false) |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1904 | return s.newValue2(ssa.OpLoad, n.Left.Type.Type, p, s.mem()) |
Keith Randall | 9703564 | 2015-10-09 09:33:29 -0700 | [diff] [blame] | 1905 | case n.Left.Type.IsArray(): |
| 1906 | // TODO: fix when we can SSA arrays of length 1. |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 1907 | p := s.addr(n, false) |
Keith Randall | 9703564 | 2015-10-09 09:33:29 -0700 | [diff] [blame] | 1908 | return s.newValue2(ssa.OpLoad, n.Left.Type.Type, p, s.mem()) |
| 1909 | default: |
| 1910 | s.Fatalf("bad type for index %v", n.Left.Type) |
| 1911 | return nil |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1912 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1913 | |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 1914 | case OLEN, OCAP: |
Josh Bleecher Snyder | cc3f031 | 2015-07-03 18:41:28 -0700 | [diff] [blame] | 1915 | switch { |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 1916 | case n.Left.Type.IsSlice(): |
| 1917 | op := ssa.OpSliceLen |
| 1918 | if n.Op == OCAP { |
| 1919 | op = ssa.OpSliceCap |
| 1920 | } |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1921 | return s.newValue1(op, Types[TINT], s.expr(n.Left)) |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 1922 | case n.Left.Type.IsString(): // string; not reachable for OCAP |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1923 | return s.newValue1(ssa.OpStringLen, Types[TINT], s.expr(n.Left)) |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 1924 | case n.Left.Type.IsMap(), n.Left.Type.IsChan(): |
| 1925 | return s.referenceTypeBuiltin(n, s.expr(n.Left)) |
Josh Bleecher Snyder | cc3f031 | 2015-07-03 18:41:28 -0700 | [diff] [blame] | 1926 | default: // array |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1927 | return s.constInt(Types[TINT], n.Left.Type.Bound) |
Josh Bleecher Snyder | cc3f031 | 2015-07-03 18:41:28 -0700 | [diff] [blame] | 1928 | } |
| 1929 | |
Josh Bleecher Snyder | a2d1580 | 2015-08-12 10:12:14 -0700 | [diff] [blame] | 1930 | case OSPTR: |
| 1931 | a := s.expr(n.Left) |
| 1932 | if n.Left.Type.IsSlice() { |
| 1933 | return s.newValue1(ssa.OpSlicePtr, n.Type, a) |
| 1934 | } else { |
| 1935 | return s.newValue1(ssa.OpStringPtr, n.Type, a) |
| 1936 | } |
| 1937 | |
Keith Randall | d1c15a0 | 2015-08-04 15:47:22 -0700 | [diff] [blame] | 1938 | case OITAB: |
| 1939 | a := s.expr(n.Left) |
| 1940 | return s.newValue1(ssa.OpITab, n.Type, a) |
| 1941 | |
Josh Bleecher Snyder | 1792b36 | 2015-09-05 19:28:27 -0700 | [diff] [blame] | 1942 | case OEFACE: |
| 1943 | tab := s.expr(n.Left) |
| 1944 | data := s.expr(n.Right) |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 1945 | // The frontend allows putting things like struct{*byte} in |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 1946 | // the data portion of an eface. But we don't want struct{*byte} |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 1947 | // as a register type because (among other reasons) the liveness |
| 1948 | // analysis is confused by the "fat" variables that result from |
| 1949 | // such types being spilled. |
| 1950 | // So here we ensure that we are selecting the underlying pointer |
| 1951 | // when we build an eface. |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 1952 | // TODO: get rid of this now that structs can be SSA'd? |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 1953 | for !data.Type.IsPtr() { |
| 1954 | switch { |
| 1955 | case data.Type.IsArray(): |
Matthew Dempsky | 0b28187 | 2016-03-10 14:35:39 -0800 | [diff] [blame] | 1956 | data = s.newValue1I(ssa.OpArrayIndex, data.Type.ElemType(), 0, data) |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 1957 | case data.Type.IsStruct(): |
| 1958 | for i := data.Type.NumFields() - 1; i >= 0; i-- { |
| 1959 | f := data.Type.FieldType(i) |
| 1960 | if f.Size() == 0 { |
| 1961 | // eface type could also be struct{p *byte; q [0]int} |
| 1962 | continue |
| 1963 | } |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 1964 | data = s.newValue1I(ssa.OpStructSelect, f, i, data) |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 1965 | break |
| 1966 | } |
| 1967 | default: |
| 1968 | s.Fatalf("type being put into an eface isn't a pointer") |
| 1969 | } |
| 1970 | } |
Josh Bleecher Snyder | 1792b36 | 2015-09-05 19:28:27 -0700 | [diff] [blame] | 1971 | return s.newValue2(ssa.OpIMake, n.Type, tab, data) |
| 1972 | |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 1973 | case OSLICE, OSLICEARR: |
| 1974 | v := s.expr(n.Left) |
| 1975 | var i, j *ssa.Value |
| 1976 | if n.Right.Left != nil { |
| 1977 | i = s.extendIndex(s.expr(n.Right.Left)) |
| 1978 | } |
| 1979 | if n.Right.Right != nil { |
| 1980 | j = s.extendIndex(s.expr(n.Right.Right)) |
| 1981 | } |
| 1982 | p, l, c := s.slice(n.Left.Type, v, i, j, nil) |
| 1983 | return s.newValue3(ssa.OpSliceMake, n.Type, p, l, c) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 1984 | case OSLICESTR: |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 1985 | v := s.expr(n.Left) |
| 1986 | var i, j *ssa.Value |
| 1987 | if n.Right.Left != nil { |
| 1988 | i = s.extendIndex(s.expr(n.Right.Left)) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 1989 | } |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 1990 | if n.Right.Right != nil { |
| 1991 | j = s.extendIndex(s.expr(n.Right.Right)) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 1992 | } |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 1993 | p, l, _ := s.slice(n.Left.Type, v, i, j, nil) |
| 1994 | return s.newValue2(ssa.OpStringMake, n.Type, p, l) |
| 1995 | case OSLICE3, OSLICE3ARR: |
| 1996 | v := s.expr(n.Left) |
| 1997 | var i *ssa.Value |
| 1998 | if n.Right.Left != nil { |
| 1999 | i = s.extendIndex(s.expr(n.Right.Left)) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 2000 | } |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 2001 | j := s.extendIndex(s.expr(n.Right.Right.Left)) |
| 2002 | k := s.extendIndex(s.expr(n.Right.Right.Right)) |
| 2003 | p, l, c := s.slice(n.Left.Type, v, i, j, k) |
| 2004 | return s.newValue3(ssa.OpSliceMake, n.Type, p, l, c) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 2005 | |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2006 | case OCALLFUNC, OCALLINTER, OCALLMETH: |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2007 | a := s.call(n, callNormal) |
| 2008 | return s.newValue2(ssa.OpLoad, n.Type, a, s.mem()) |
Josh Bleecher Snyder | 3d23afb | 2015-08-12 11:22:16 -0700 | [diff] [blame] | 2009 | |
| 2010 | case OGETG: |
Keith Randall | d694f83 | 2015-10-19 18:54:40 -0700 | [diff] [blame] | 2011 | return s.newValue1(ssa.OpGetG, n.Type, s.mem()) |
Josh Bleecher Snyder | 3d23afb | 2015-08-12 11:22:16 -0700 | [diff] [blame] | 2012 | |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2013 | case OAPPEND: |
| 2014 | // append(s, e1, e2, e3). Compile like: |
| 2015 | // ptr,len,cap := s |
| 2016 | // newlen := len + 3 |
| 2017 | // if newlen > s.cap { |
| 2018 | // ptr,_,cap = growslice(s, newlen) |
| 2019 | // } |
| 2020 | // *(ptr+len) = e1 |
| 2021 | // *(ptr+len+1) = e2 |
| 2022 | // *(ptr+len+2) = e3 |
| 2023 | // makeslice(ptr,newlen,cap) |
| 2024 | |
| 2025 | et := n.Type.Type |
| 2026 | pt := Ptrto(et) |
| 2027 | |
| 2028 | // Evaluate slice |
Ian Lance Taylor | 38921b3 | 2016-03-08 15:10:26 -0800 | [diff] [blame] | 2029 | slice := s.expr(n.List.First()) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2030 | |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2031 | // Allocate new blocks |
| 2032 | grow := s.f.NewBlock(ssa.BlockPlain) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2033 | assign := s.f.NewBlock(ssa.BlockPlain) |
| 2034 | |
| 2035 | // Decide if we need to grow |
Ian Lance Taylor | 38921b3 | 2016-03-08 15:10:26 -0800 | [diff] [blame] | 2036 | nargs := int64(n.List.Len() - 1) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2037 | p := s.newValue1(ssa.OpSlicePtr, pt, slice) |
| 2038 | l := s.newValue1(ssa.OpSliceLen, Types[TINT], slice) |
| 2039 | c := s.newValue1(ssa.OpSliceCap, Types[TINT], slice) |
| 2040 | nl := s.newValue2(s.ssaOp(OADD, Types[TINT]), Types[TINT], l, s.constInt(Types[TINT], nargs)) |
| 2041 | cmp := s.newValue2(s.ssaOp(OGT, Types[TINT]), Types[TBOOL], nl, c) |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 2042 | s.vars[&ptrVar] = p |
| 2043 | s.vars[&capVar] = c |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2044 | b := s.endBlock() |
| 2045 | b.Kind = ssa.BlockIf |
| 2046 | b.Likely = ssa.BranchUnlikely |
| 2047 | b.Control = cmp |
| 2048 | b.AddEdgeTo(grow) |
| 2049 | b.AddEdgeTo(assign) |
| 2050 | |
| 2051 | // Call growslice |
| 2052 | s.startBlock(grow) |
| 2053 | taddr := s.newValue1A(ssa.OpAddr, Types[TUINTPTR], &ssa.ExternSymbol{Types[TUINTPTR], typenamesym(n.Type)}, s.sb) |
| 2054 | |
Keith Randall | 8c5bfcc | 2015-09-18 15:11:30 -0700 | [diff] [blame] | 2055 | r := s.rtcall(growslice, true, []*Type{pt, Types[TINT], Types[TINT]}, taddr, p, l, c, nl) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2056 | |
Keith Randall | 8c5bfcc | 2015-09-18 15:11:30 -0700 | [diff] [blame] | 2057 | s.vars[&ptrVar] = r[0] |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2058 | // Note: we don't need to read r[1], the result's length. It will be nl. |
Keith Randall | 8c5bfcc | 2015-09-18 15:11:30 -0700 | [diff] [blame] | 2059 | // (or maybe we should, we just have to spill/restore nl otherwise?) |
| 2060 | s.vars[&capVar] = r[2] |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2061 | b = s.endBlock() |
| 2062 | b.AddEdgeTo(assign) |
| 2063 | |
| 2064 | // assign new elements to slots |
| 2065 | s.startBlock(assign) |
Keith Randall | 9aba7e7 | 2015-10-05 13:48:40 -0700 | [diff] [blame] | 2066 | |
| 2067 | // Evaluate args |
| 2068 | args := make([]*ssa.Value, 0, nargs) |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 2069 | store := make([]bool, 0, nargs) |
Ian Lance Taylor | cd6619d | 2016-03-09 12:39:36 -0800 | [diff] [blame] | 2070 | for _, n := range n.List.Slice()[1:] { |
| 2071 | if canSSAType(n.Type) { |
| 2072 | args = append(args, s.expr(n)) |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 2073 | store = append(store, true) |
| 2074 | } else { |
Ian Lance Taylor | cd6619d | 2016-03-09 12:39:36 -0800 | [diff] [blame] | 2075 | args = append(args, s.addr(n, false)) |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 2076 | store = append(store, false) |
| 2077 | } |
Keith Randall | 9aba7e7 | 2015-10-05 13:48:40 -0700 | [diff] [blame] | 2078 | } |
| 2079 | |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 2080 | p = s.variable(&ptrVar, pt) // generates phi for ptr |
| 2081 | c = s.variable(&capVar, Types[TINT]) // generates phi for cap |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2082 | p2 := s.newValue2(ssa.OpPtrIndex, pt, p, l) |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2083 | // TODO: just one write barrier call for all of these writes? |
| 2084 | // TODO: maybe just one writeBarrier.enabled check? |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2085 | for i, arg := range args { |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 2086 | addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(Types[TINT], int64(i))) |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 2087 | if store[i] { |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2088 | if haspointers(et) { |
| 2089 | s.insertWBstore(et, addr, arg, n.Lineno) |
| 2090 | } else { |
| 2091 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, et.Size(), addr, arg, s.mem()) |
| 2092 | } |
Keith Randall | 808d7c7 | 2015-10-07 14:35:25 -0700 | [diff] [blame] | 2093 | } else { |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2094 | if haspointers(et) { |
| 2095 | s.insertWBmove(et, addr, arg, n.Lineno) |
| 2096 | } else { |
| 2097 | s.vars[&memVar] = s.newValue3I(ssa.OpMove, ssa.TypeMem, et.Size(), addr, arg, s.mem()) |
| 2098 | } |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2099 | } |
| 2100 | } |
| 2101 | |
| 2102 | // make result |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 2103 | delete(s.vars, &ptrVar) |
| 2104 | delete(s.vars, &capVar) |
Keith Randall | 8c5bfcc | 2015-09-18 15:11:30 -0700 | [diff] [blame] | 2105 | return s.newValue3(ssa.OpSliceMake, n.Type, p, nl, c) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2106 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2107 | default: |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 2108 | s.Unimplementedf("unhandled expr %s", opnames[n.Op]) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2109 | return nil |
| 2110 | } |
| 2111 | } |
| 2112 | |
Keith Randall | 9918731 | 2015-11-02 16:56:53 -0800 | [diff] [blame] | 2113 | // condBranch evaluates the boolean expression cond and branches to yes |
| 2114 | // if cond is true and no if cond is false. |
| 2115 | // This function is intended to handle && and || better than just calling |
| 2116 | // s.expr(cond) and branching on the result. |
| 2117 | func (s *state) condBranch(cond *Node, yes, no *ssa.Block, likely int8) { |
| 2118 | if cond.Op == OANDAND { |
| 2119 | mid := s.f.NewBlock(ssa.BlockPlain) |
| 2120 | s.stmtList(cond.Ninit) |
| 2121 | s.condBranch(cond.Left, mid, no, max8(likely, 0)) |
| 2122 | s.startBlock(mid) |
| 2123 | s.condBranch(cond.Right, yes, no, likely) |
| 2124 | return |
| 2125 | // Note: if likely==1, then both recursive calls pass 1. |
| 2126 | // If likely==-1, then we don't have enough information to decide |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2127 | // whether the first branch is likely or not. So we pass 0 for |
Keith Randall | 9918731 | 2015-11-02 16:56:53 -0800 | [diff] [blame] | 2128 | // the likeliness of the first branch. |
| 2129 | // TODO: have the frontend give us branch prediction hints for |
| 2130 | // OANDAND and OOROR nodes (if it ever has such info). |
| 2131 | } |
| 2132 | if cond.Op == OOROR { |
| 2133 | mid := s.f.NewBlock(ssa.BlockPlain) |
| 2134 | s.stmtList(cond.Ninit) |
| 2135 | s.condBranch(cond.Left, yes, mid, min8(likely, 0)) |
| 2136 | s.startBlock(mid) |
| 2137 | s.condBranch(cond.Right, yes, no, likely) |
| 2138 | return |
| 2139 | // Note: if likely==-1, then both recursive calls pass -1. |
| 2140 | // If likely==1, then we don't have enough info to decide |
| 2141 | // the likelihood of the first branch. |
| 2142 | } |
Keith Randall | d19bfc3 | 2015-11-03 09:30:17 -0800 | [diff] [blame] | 2143 | if cond.Op == ONOT { |
| 2144 | s.stmtList(cond.Ninit) |
| 2145 | s.condBranch(cond.Left, no, yes, -likely) |
| 2146 | return |
| 2147 | } |
Keith Randall | 9918731 | 2015-11-02 16:56:53 -0800 | [diff] [blame] | 2148 | c := s.expr(cond) |
| 2149 | b := s.endBlock() |
| 2150 | b.Kind = ssa.BlockIf |
| 2151 | b.Control = c |
| 2152 | b.Likely = ssa.BranchPrediction(likely) // gc and ssa both use -1/0/+1 for likeliness |
| 2153 | b.AddEdgeTo(yes) |
| 2154 | b.AddEdgeTo(no) |
| 2155 | } |
| 2156 | |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2157 | // assign does left = right. |
| 2158 | // Right has already been evaluated to ssa, left has not. |
| 2159 | // If deref is true, then we do left = *right instead (and right has already been nil-checked). |
| 2160 | // If deref is true and right == nil, just do left = 0. |
| 2161 | // Include a write barrier if wb is true. |
| 2162 | func (s *state) assign(left *Node, right *ssa.Value, wb, deref bool, line int32) { |
Keith Randall | d4cc51d | 2015-08-14 21:47:20 -0700 | [diff] [blame] | 2163 | if left.Op == ONAME && isblank(left) { |
Keith Randall | d4cc51d | 2015-08-14 21:47:20 -0700 | [diff] [blame] | 2164 | return |
| 2165 | } |
Keith Randall | d4cc51d | 2015-08-14 21:47:20 -0700 | [diff] [blame] | 2166 | t := left.Type |
| 2167 | dowidth(t) |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 2168 | if s.canSSA(left) { |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2169 | if deref { |
| 2170 | s.Fatalf("can SSA LHS %s but not RHS %s", left, right) |
| 2171 | } |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 2172 | if left.Op == ODOT { |
| 2173 | // We're assigning to a field of an ssa-able value. |
| 2174 | // We need to build a new structure with the new value for the |
| 2175 | // field we're assigning and the old values for the other fields. |
| 2176 | // For instance: |
| 2177 | // type T struct {a, b, c int} |
| 2178 | // var T x |
| 2179 | // x.b = 5 |
| 2180 | // For the x.b = 5 assignment we want to generate x = T{x.a, 5, x.c} |
| 2181 | |
| 2182 | // Grab information about the structure type. |
| 2183 | t := left.Left.Type |
| 2184 | nf := t.NumFields() |
| 2185 | idx := fieldIdx(left) |
| 2186 | |
| 2187 | // Grab old value of structure. |
| 2188 | old := s.expr(left.Left) |
| 2189 | |
| 2190 | // Make new structure. |
| 2191 | new := s.newValue0(ssa.StructMakeOp(t.NumFields()), t) |
| 2192 | |
| 2193 | // Add fields as args. |
| 2194 | for i := int64(0); i < nf; i++ { |
| 2195 | if i == idx { |
| 2196 | new.AddArg(right) |
| 2197 | } else { |
| 2198 | new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), i, old)) |
| 2199 | } |
| 2200 | } |
| 2201 | |
| 2202 | // Recursively assign the new value we've made to the base of the dot op. |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2203 | s.assign(left.Left, new, false, false, line) |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 2204 | // TODO: do we need to update named values here? |
| 2205 | return |
| 2206 | } |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 2207 | // Update variable assignment. |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 2208 | s.vars[left] = right |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2209 | s.addNamedValue(left, right) |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 2210 | return |
| 2211 | } |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2212 | // Left is not ssa-able. Compute its address. |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 2213 | addr := s.addr(left, false) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 2214 | if left.Op == ONAME { |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 2215 | s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, left, s.mem()) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 2216 | } |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2217 | if deref { |
| 2218 | // Treat as a mem->mem move. |
| 2219 | if right == nil { |
| 2220 | s.vars[&memVar] = s.newValue2I(ssa.OpZero, ssa.TypeMem, t.Size(), addr, s.mem()) |
| 2221 | return |
| 2222 | } |
| 2223 | if wb { |
| 2224 | s.insertWBmove(t, addr, right, line) |
| 2225 | return |
| 2226 | } |
| 2227 | s.vars[&memVar] = s.newValue3I(ssa.OpMove, ssa.TypeMem, t.Size(), addr, right, s.mem()) |
| 2228 | return |
Keith Randall | e3869a6 | 2015-09-07 23:18:02 -0700 | [diff] [blame] | 2229 | } |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2230 | // Treat as a store. |
| 2231 | if wb { |
| 2232 | s.insertWBstore(t, addr, right, line) |
| 2233 | return |
| 2234 | } |
| 2235 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, t.Size(), addr, right, s.mem()) |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 2236 | } |
| 2237 | |
Josh Bleecher Snyder | 21bd483 | 2015-07-20 15:30:52 -0700 | [diff] [blame] | 2238 | // zeroVal returns the zero value for type t. |
| 2239 | func (s *state) zeroVal(t *Type) *ssa.Value { |
| 2240 | switch { |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 2241 | case t.IsInteger(): |
| 2242 | switch t.Size() { |
| 2243 | case 1: |
| 2244 | return s.constInt8(t, 0) |
| 2245 | case 2: |
| 2246 | return s.constInt16(t, 0) |
| 2247 | case 4: |
| 2248 | return s.constInt32(t, 0) |
| 2249 | case 8: |
| 2250 | return s.constInt64(t, 0) |
| 2251 | default: |
| 2252 | s.Fatalf("bad sized integer type %s", t) |
| 2253 | } |
Todd Neal | 752fe4d | 2015-08-25 19:21:45 -0500 | [diff] [blame] | 2254 | case t.IsFloat(): |
| 2255 | switch t.Size() { |
| 2256 | case 4: |
| 2257 | return s.constFloat32(t, 0) |
| 2258 | case 8: |
| 2259 | return s.constFloat64(t, 0) |
| 2260 | default: |
| 2261 | s.Fatalf("bad sized float type %s", t) |
| 2262 | } |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 2263 | case t.IsComplex(): |
| 2264 | switch t.Size() { |
| 2265 | case 8: |
| 2266 | z := s.constFloat32(Types[TFLOAT32], 0) |
Keith Randall | a5cffb6 | 2015-08-28 13:52:26 -0700 | [diff] [blame] | 2267 | return s.entryNewValue2(ssa.OpComplexMake, t, z, z) |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 2268 | case 16: |
| 2269 | z := s.constFloat64(Types[TFLOAT64], 0) |
Keith Randall | a5cffb6 | 2015-08-28 13:52:26 -0700 | [diff] [blame] | 2270 | return s.entryNewValue2(ssa.OpComplexMake, t, z, z) |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 2271 | default: |
| 2272 | s.Fatalf("bad sized complex type %s", t) |
| 2273 | } |
| 2274 | |
Josh Bleecher Snyder | 21bd483 | 2015-07-20 15:30:52 -0700 | [diff] [blame] | 2275 | case t.IsString(): |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 2276 | return s.constEmptyString(t) |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 2277 | case t.IsPtr(): |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 2278 | return s.constNil(t) |
Josh Bleecher Snyder | 21bd483 | 2015-07-20 15:30:52 -0700 | [diff] [blame] | 2279 | case t.IsBoolean(): |
Josh Bleecher Snyder | cea4414 | 2015-09-08 16:52:25 -0700 | [diff] [blame] | 2280 | return s.constBool(false) |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2281 | case t.IsInterface(): |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 2282 | return s.constInterface(t) |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2283 | case t.IsSlice(): |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 2284 | return s.constSlice(t) |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 2285 | case t.IsStruct(): |
| 2286 | n := t.NumFields() |
| 2287 | v := s.entryNewValue0(ssa.StructMakeOp(t.NumFields()), t) |
| 2288 | for i := int64(0); i < n; i++ { |
| 2289 | v.AddArg(s.zeroVal(t.FieldType(i).(*Type))) |
| 2290 | } |
| 2291 | return v |
Josh Bleecher Snyder | 21bd483 | 2015-07-20 15:30:52 -0700 | [diff] [blame] | 2292 | } |
| 2293 | s.Unimplementedf("zero for type %v not implemented", t) |
| 2294 | return nil |
| 2295 | } |
| 2296 | |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2297 | type callKind int8 |
| 2298 | |
| 2299 | const ( |
| 2300 | callNormal callKind = iota |
| 2301 | callDefer |
| 2302 | callGo |
| 2303 | ) |
| 2304 | |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2305 | // Calls the function n using the specified call type. |
| 2306 | // Returns the address of the return value (or nil if none). |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2307 | func (s *state) call(n *Node, k callKind) *ssa.Value { |
| 2308 | var sym *Sym // target symbol (if static) |
| 2309 | var closure *ssa.Value // ptr to closure to run (if dynamic) |
| 2310 | var codeptr *ssa.Value // ptr to target code (if dynamic) |
| 2311 | var rcvr *ssa.Value // receiver to set |
| 2312 | fn := n.Left |
| 2313 | switch n.Op { |
| 2314 | case OCALLFUNC: |
| 2315 | if k == callNormal && fn.Op == ONAME && fn.Class == PFUNC { |
| 2316 | sym = fn.Sym |
| 2317 | break |
| 2318 | } |
| 2319 | closure = s.expr(fn) |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2320 | case OCALLMETH: |
| 2321 | if fn.Op != ODOTMETH { |
| 2322 | Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn) |
| 2323 | } |
| 2324 | if fn.Right.Op != ONAME { |
| 2325 | Fatalf("OCALLMETH: n.Left.Right not a ONAME: %v", fn.Right) |
| 2326 | } |
| 2327 | if k == callNormal { |
| 2328 | sym = fn.Right.Sym |
| 2329 | break |
| 2330 | } |
| 2331 | n2 := *fn.Right |
| 2332 | n2.Class = PFUNC |
| 2333 | closure = s.expr(&n2) |
| 2334 | // Note: receiver is already assigned in n.List, so we don't |
| 2335 | // want to set it here. |
| 2336 | case OCALLINTER: |
| 2337 | if fn.Op != ODOTINTER { |
Matthew Dempsky | c3dfad5 | 2016-03-07 08:23:55 -0800 | [diff] [blame] | 2338 | Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", Oconv(fn.Op, 0)) |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2339 | } |
| 2340 | i := s.expr(fn.Left) |
| 2341 | itab := s.newValue1(ssa.OpITab, Types[TUINTPTR], i) |
| 2342 | itabidx := fn.Xoffset + 3*int64(Widthptr) + 8 // offset of fun field in runtime.itab |
| 2343 | itab = s.newValue1I(ssa.OpOffPtr, Types[TUINTPTR], itabidx, itab) |
| 2344 | if k == callNormal { |
| 2345 | codeptr = s.newValue2(ssa.OpLoad, Types[TUINTPTR], itab, s.mem()) |
| 2346 | } else { |
| 2347 | closure = itab |
| 2348 | } |
| 2349 | rcvr = s.newValue1(ssa.OpIData, Types[TUINTPTR], i) |
| 2350 | } |
| 2351 | dowidth(fn.Type) |
| 2352 | stksize := fn.Type.Argwid // includes receiver |
| 2353 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2354 | // Run all argument assignments. The arg slots have already |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2355 | // been offset by the appropriate amount (+2*widthptr for go/defer, |
| 2356 | // +widthptr for interface calls). |
| 2357 | // For OCALLMETH, the receiver is set in these statements. |
| 2358 | s.stmtList(n.List) |
| 2359 | |
| 2360 | // Set receiver (for interface calls) |
| 2361 | if rcvr != nil { |
Keith Randall | 7c4fbb6 | 2015-10-19 13:56:55 -0700 | [diff] [blame] | 2362 | argStart := Ctxt.FixedFrameSize() |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2363 | if k != callNormal { |
| 2364 | argStart += int64(2 * Widthptr) |
| 2365 | } |
| 2366 | addr := s.entryNewValue1I(ssa.OpOffPtr, Types[TUINTPTR], argStart, s.sp) |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 2367 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, rcvr, s.mem()) |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2368 | } |
| 2369 | |
| 2370 | // Defer/go args |
| 2371 | if k != callNormal { |
| 2372 | // Write argsize and closure (args to Newproc/Deferproc). |
| 2373 | argsize := s.constInt32(Types[TUINT32], int32(stksize)) |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 2374 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, 4, s.sp, argsize, s.mem()) |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2375 | addr := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(Types[TUINTPTR]), int64(Widthptr), s.sp) |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 2376 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, closure, s.mem()) |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2377 | stksize += 2 * int64(Widthptr) |
| 2378 | } |
| 2379 | |
| 2380 | // call target |
| 2381 | bNext := s.f.NewBlock(ssa.BlockPlain) |
| 2382 | var call *ssa.Value |
| 2383 | switch { |
| 2384 | case k == callDefer: |
| 2385 | call = s.newValue1(ssa.OpDeferCall, ssa.TypeMem, s.mem()) |
| 2386 | case k == callGo: |
| 2387 | call = s.newValue1(ssa.OpGoCall, ssa.TypeMem, s.mem()) |
| 2388 | case closure != nil: |
| 2389 | codeptr = s.newValue2(ssa.OpLoad, Types[TUINTPTR], closure, s.mem()) |
| 2390 | call = s.newValue3(ssa.OpClosureCall, ssa.TypeMem, codeptr, closure, s.mem()) |
| 2391 | case codeptr != nil: |
| 2392 | call = s.newValue2(ssa.OpInterCall, ssa.TypeMem, codeptr, s.mem()) |
| 2393 | case sym != nil: |
| 2394 | call = s.newValue1A(ssa.OpStaticCall, ssa.TypeMem, sym, s.mem()) |
| 2395 | default: |
| 2396 | Fatalf("bad call type %s %v", opnames[n.Op], n) |
| 2397 | } |
| 2398 | call.AuxInt = stksize // Call operations carry the argsize of the callee along with them |
| 2399 | |
| 2400 | // Finish call block |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 2401 | s.vars[&memVar] = call |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2402 | b := s.endBlock() |
| 2403 | b.Kind = ssa.BlockCall |
| 2404 | b.Control = call |
| 2405 | b.AddEdgeTo(bNext) |
Keith Randall | ddc6b64 | 2016-03-09 19:27:57 -0800 | [diff] [blame] | 2406 | if k == callDefer { |
| 2407 | // Add recover edge to exit code. |
| 2408 | b.Kind = ssa.BlockDefer |
| 2409 | r := s.f.NewBlock(ssa.BlockPlain) |
| 2410 | s.startBlock(r) |
| 2411 | s.exit() |
| 2412 | b.AddEdgeTo(r) |
| 2413 | b.Likely = ssa.BranchLikely |
| 2414 | } |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2415 | |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2416 | // Start exit block, find address of result. |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2417 | s.startBlock(bNext) |
Matthew Dempsky | 0cff505 | 2016-03-09 20:45:18 -0800 | [diff] [blame] | 2418 | fp := n.Left.Type.Results().Field(0) |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2419 | if fp == nil || k != callNormal { |
| 2420 | // call has no return value. Continue with the next statement. |
| 2421 | return nil |
| 2422 | } |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2423 | return s.entryNewValue1I(ssa.OpOffPtr, Ptrto(fp.Type), fp.Width, s.sp) |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 2424 | } |
| 2425 | |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 2426 | // etypesign returns the signed-ness of e, for integer/pointer etypes. |
| 2427 | // -1 means signed, +1 means unsigned, 0 means non-integer/non-pointer. |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 2428 | func etypesign(e EType) int8 { |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 2429 | switch e { |
| 2430 | case TINT8, TINT16, TINT32, TINT64, TINT: |
| 2431 | return -1 |
| 2432 | case TUINT8, TUINT16, TUINT32, TUINT64, TUINT, TUINTPTR, TUNSAFEPTR: |
| 2433 | return +1 |
| 2434 | } |
| 2435 | return 0 |
| 2436 | } |
| 2437 | |
Todd Neal | d076ef7 | 2015-10-15 20:25:32 -0500 | [diff] [blame] | 2438 | // lookupSymbol is used to retrieve the symbol (Extern, Arg or Auto) used for a particular node. |
| 2439 | // This improves the effectiveness of cse by using the same Aux values for the |
| 2440 | // same symbols. |
| 2441 | func (s *state) lookupSymbol(n *Node, sym interface{}) interface{} { |
| 2442 | switch sym.(type) { |
| 2443 | default: |
| 2444 | s.Fatalf("sym %v is of uknown type %T", sym, sym) |
| 2445 | case *ssa.ExternSymbol, *ssa.ArgSymbol, *ssa.AutoSymbol: |
| 2446 | // these are the only valid types |
| 2447 | } |
| 2448 | |
| 2449 | if lsym, ok := s.varsyms[n]; ok { |
| 2450 | return lsym |
| 2451 | } else { |
| 2452 | s.varsyms[n] = sym |
| 2453 | return sym |
| 2454 | } |
| 2455 | } |
| 2456 | |
Josh Bleecher Snyder | e00d609 | 2015-06-02 09:16:22 -0700 | [diff] [blame] | 2457 | // addr converts the address of the expression n to SSA, adds it to s and returns the SSA result. |
Keith Randall | c3c84a2 | 2015-07-13 15:55:37 -0700 | [diff] [blame] | 2458 | // The value that the returned Value represents is guaranteed to be non-nil. |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 2459 | // If bounded is true then this address does not require a nil check for its operand |
| 2460 | // even if that would otherwise be implied. |
| 2461 | func (s *state) addr(n *Node, bounded bool) *ssa.Value { |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2462 | t := Ptrto(n.Type) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2463 | switch n.Op { |
| 2464 | case ONAME: |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2465 | switch n.Class { |
| 2466 | case PEXTERN: |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2467 | // global variable |
Todd Neal | 74180dd | 2015-10-27 21:35:48 -0500 | [diff] [blame] | 2468 | aux := s.lookupSymbol(n, &ssa.ExternSymbol{n.Type, n.Sym}) |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2469 | v := s.entryNewValue1A(ssa.OpAddr, t, aux, s.sb) |
Josh Bleecher Snyder | 67df793 | 2015-07-28 11:08:44 -0700 | [diff] [blame] | 2470 | // TODO: Make OpAddr use AuxInt as well as Aux. |
| 2471 | if n.Xoffset != 0 { |
| 2472 | v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, n.Xoffset, v) |
| 2473 | } |
| 2474 | return v |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 2475 | case PPARAM: |
| 2476 | // parameter slot |
Josh Bleecher Snyder | 596ddf4 | 2015-06-29 11:56:28 -0700 | [diff] [blame] | 2477 | v := s.decladdrs[n] |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 2478 | if v != nil { |
| 2479 | return v |
Josh Bleecher Snyder | 596ddf4 | 2015-06-29 11:56:28 -0700 | [diff] [blame] | 2480 | } |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 2481 | if n.String() == ".fp" { |
| 2482 | // Special arg that points to the frame pointer. |
| 2483 | // (Used by the race detector, others?) |
| 2484 | aux := s.lookupSymbol(n, &ssa.ArgSymbol{Typ: n.Type, Node: n}) |
| 2485 | return s.entryNewValue1A(ssa.OpAddr, t, aux, s.sp) |
| 2486 | } |
| 2487 | s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs) |
| 2488 | return nil |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 2489 | case PAUTO: |
Todd Neal | 40bfec0 | 2016-03-11 20:03:17 -0600 | [diff] [blame] | 2490 | aux := s.lookupSymbol(n, &ssa.AutoSymbol{Typ: n.Type, Node: n}) |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2491 | return s.newValue1A(ssa.OpAddr, t, aux, s.sp) |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 2492 | case PPARAMOUT: // Same as PAUTO -- cannot generate LEA early. |
Todd Neal | d076ef7 | 2015-10-15 20:25:32 -0500 | [diff] [blame] | 2493 | // ensure that we reuse symbols for out parameters so |
| 2494 | // that cse works on their addresses |
| 2495 | aux := s.lookupSymbol(n, &ssa.ArgSymbol{Typ: n.Type, Node: n}) |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2496 | return s.newValue1A(ssa.OpAddr, t, aux, s.sp) |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 2497 | case PAUTO | PHEAP, PPARAM | PHEAP, PPARAMOUT | PHEAP, PPARAMREF: |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 2498 | return s.expr(n.Name.Heapaddr) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2499 | default: |
Josh Bleecher Snyder | 5844603 | 2015-08-23 20:29:43 -0700 | [diff] [blame] | 2500 | s.Unimplementedf("variable address class %v not implemented", n.Class) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2501 | return nil |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2502 | } |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2503 | case OINDREG: |
Josh Bleecher Snyder | 25d1916 | 2015-07-28 12:37:46 -0700 | [diff] [blame] | 2504 | // indirect off a register |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2505 | // used for storing/loading arguments/returns to/from callees |
Josh Bleecher Snyder | 25d1916 | 2015-07-28 12:37:46 -0700 | [diff] [blame] | 2506 | if int(n.Reg) != Thearch.REGSP { |
| 2507 | s.Unimplementedf("OINDREG of non-SP register %s in addr: %v", obj.Rconv(int(n.Reg)), n) |
| 2508 | return nil |
| 2509 | } |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2510 | return s.entryNewValue1I(ssa.OpOffPtr, t, n.Xoffset, s.sp) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2511 | case OINDEX: |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 2512 | if n.Left.Type.IsSlice() { |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2513 | a := s.expr(n.Left) |
| 2514 | i := s.expr(n.Right) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 2515 | i = s.extendIndex(i) |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2516 | len := s.newValue1(ssa.OpSliceLen, Types[TINT], a) |
Keith Randall | 46e62f8 | 2015-08-18 14:17:30 -0700 | [diff] [blame] | 2517 | if !n.Bounded { |
| 2518 | s.boundsCheck(i, len) |
| 2519 | } |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2520 | p := s.newValue1(ssa.OpSlicePtr, t, a) |
| 2521 | return s.newValue2(ssa.OpPtrIndex, t, p, i) |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 2522 | } else { // array |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 2523 | a := s.addr(n.Left, bounded) |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 2524 | i := s.expr(n.Right) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 2525 | i = s.extendIndex(i) |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 2526 | len := s.constInt(Types[TINT], n.Left.Type.Bound) |
Keith Randall | 46e62f8 | 2015-08-18 14:17:30 -0700 | [diff] [blame] | 2527 | if !n.Bounded { |
| 2528 | s.boundsCheck(i, len) |
| 2529 | } |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 2530 | return s.newValue2(ssa.OpPtrIndex, Ptrto(n.Left.Type.Type), a, i) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2531 | } |
Todd Neal | b383de2 | 2015-07-13 21:22:16 -0500 | [diff] [blame] | 2532 | case OIND: |
| 2533 | p := s.expr(n.Left) |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 2534 | if !bounded { |
| 2535 | s.nilCheck(p) |
| 2536 | } |
Todd Neal | b383de2 | 2015-07-13 21:22:16 -0500 | [diff] [blame] | 2537 | return p |
Keith Randall | c3c84a2 | 2015-07-13 15:55:37 -0700 | [diff] [blame] | 2538 | case ODOT: |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 2539 | p := s.addr(n.Left, bounded) |
Josh Bleecher Snyder | da1802f | 2016-03-04 12:34:43 -0800 | [diff] [blame] | 2540 | return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset, p) |
Keith Randall | c3c84a2 | 2015-07-13 15:55:37 -0700 | [diff] [blame] | 2541 | case ODOTPTR: |
| 2542 | p := s.expr(n.Left) |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 2543 | if !bounded { |
| 2544 | s.nilCheck(p) |
| 2545 | } |
Josh Bleecher Snyder | da1802f | 2016-03-04 12:34:43 -0800 | [diff] [blame] | 2546 | return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset, p) |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 2547 | case OCLOSUREVAR: |
Josh Bleecher Snyder | da1802f | 2016-03-04 12:34:43 -0800 | [diff] [blame] | 2548 | return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset, |
| 2549 | s.entryNewValue0(ssa.OpGetClosurePtr, Ptrto(Types[TUINT8]))) |
David Chase | 32ffbf7 | 2015-10-08 17:14:12 -0400 | [diff] [blame] | 2550 | case OPARAM: |
| 2551 | p := n.Left |
| 2552 | if p.Op != ONAME || !(p.Class == PPARAM|PHEAP || p.Class == PPARAMOUT|PHEAP) { |
| 2553 | s.Fatalf("OPARAM not of ONAME,{PPARAM,PPARAMOUT}|PHEAP, instead %s", nodedump(p, 0)) |
| 2554 | } |
| 2555 | |
| 2556 | // Recover original offset to address passed-in param value. |
| 2557 | original_p := *p |
| 2558 | original_p.Xoffset = n.Xoffset |
| 2559 | aux := &ssa.ArgSymbol{Typ: n.Type, Node: &original_p} |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2560 | return s.entryNewValue1A(ssa.OpAddr, t, aux, s.sp) |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 2561 | case OCONVNOP: |
| 2562 | addr := s.addr(n.Left, bounded) |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 2563 | return s.newValue1(ssa.OpCopy, t, addr) // ensure that addr has the right type |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2564 | case OCALLFUNC, OCALLINTER, OCALLMETH: |
| 2565 | return s.call(n, callNormal) |
David Chase | 57670ad | 2015-10-09 16:48:30 -0400 | [diff] [blame] | 2566 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2567 | default: |
Matthew Dempsky | c3dfad5 | 2016-03-07 08:23:55 -0800 | [diff] [blame] | 2568 | s.Unimplementedf("unhandled addr %v", Oconv(n.Op, 0)) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2569 | return nil |
| 2570 | } |
| 2571 | } |
| 2572 | |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2573 | // canSSA reports whether n is SSA-able. |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 2574 | // n must be an ONAME (or an ODOT sequence with an ONAME base). |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 2575 | func (s *state) canSSA(n *Node) bool { |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 2576 | for n.Op == ODOT { |
| 2577 | n = n.Left |
| 2578 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2579 | if n.Op != ONAME { |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 2580 | return false |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2581 | } |
| 2582 | if n.Addrtaken { |
| 2583 | return false |
| 2584 | } |
| 2585 | if n.Class&PHEAP != 0 { |
| 2586 | return false |
| 2587 | } |
Josh Bleecher Snyder | 9654873 | 2015-08-28 13:35:32 -0700 | [diff] [blame] | 2588 | switch n.Class { |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 2589 | case PEXTERN, PPARAMREF: |
| 2590 | // TODO: maybe treat PPARAMREF with an Arg-like op to read from closure? |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2591 | return false |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 2592 | case PPARAMOUT: |
| 2593 | if hasdefer { |
| 2594 | // TODO: handle this case? Named return values must be |
| 2595 | // in memory so that the deferred function can see them. |
| 2596 | // Maybe do: if !strings.HasPrefix(n.String(), "~") { return false } |
| 2597 | return false |
| 2598 | } |
| 2599 | if s.cgoUnsafeArgs { |
| 2600 | // Cgo effectively takes the address of all result args, |
| 2601 | // but the compiler can't see that. |
| 2602 | return false |
| 2603 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2604 | } |
Keith Randall | 8a1f621 | 2015-09-08 21:28:44 -0700 | [diff] [blame] | 2605 | if n.Class == PPARAM && n.String() == ".this" { |
| 2606 | // wrappers generated by genwrapper need to update |
| 2607 | // the .this pointer in place. |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 2608 | // TODO: treat as a PPARMOUT? |
Keith Randall | 8a1f621 | 2015-09-08 21:28:44 -0700 | [diff] [blame] | 2609 | return false |
| 2610 | } |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2611 | return canSSAType(n.Type) |
| 2612 | // TODO: try to make more variables SSAable? |
| 2613 | } |
| 2614 | |
| 2615 | // canSSA reports whether variables of type t are SSA-able. |
| 2616 | func canSSAType(t *Type) bool { |
| 2617 | dowidth(t) |
| 2618 | if t.Width > int64(4*Widthptr) { |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2619 | // 4*Widthptr is an arbitrary constant. We want it |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2620 | // to be at least 3*Widthptr so slices can be registerized. |
| 2621 | // Too big and we'll introduce too much register pressure. |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 2622 | return false |
| 2623 | } |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2624 | switch t.Etype { |
| 2625 | case TARRAY: |
| 2626 | if Isslice(t) { |
| 2627 | return true |
| 2628 | } |
| 2629 | // We can't do arrays because dynamic indexing is |
| 2630 | // not supported on SSA variables. |
| 2631 | // TODO: maybe allow if length is <=1? All indexes |
| 2632 | // are constant? Might be good for the arrays |
| 2633 | // introduced by the compiler for variadic functions. |
| 2634 | return false |
| 2635 | case TSTRUCT: |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 2636 | if countfield(t) > ssa.MaxStruct { |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2637 | return false |
| 2638 | } |
Matthew Dempsky | fe5b4a6 | 2016-03-10 01:50:58 -0800 | [diff] [blame] | 2639 | for t1, it := IterFields(t); t1 != nil; t1 = it.Next() { |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2640 | if !canSSAType(t1.Type) { |
| 2641 | return false |
| 2642 | } |
| 2643 | } |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 2644 | return true |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2645 | default: |
| 2646 | return true |
| 2647 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2648 | } |
| 2649 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2650 | // nilCheck generates nil pointer checking code. |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 2651 | // Starts a new block on return, unless nil checks are disabled. |
Josh Bleecher Snyder | 7e74e43 | 2015-07-24 11:55:52 -0700 | [diff] [blame] | 2652 | // Used only for automatically inserted nil checks, |
| 2653 | // not for user code like 'x != nil'. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2654 | func (s *state) nilCheck(ptr *ssa.Value) { |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 2655 | if Disable_checknil != 0 { |
| 2656 | return |
| 2657 | } |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 2658 | chk := s.newValue2(ssa.OpNilCheck, ssa.TypeVoid, ptr, s.mem()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2659 | b := s.endBlock() |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 2660 | b.Kind = ssa.BlockCheck |
| 2661 | b.Control = chk |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2662 | bNext := s.f.NewBlock(ssa.BlockPlain) |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2663 | b.AddEdgeTo(bNext) |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 2664 | s.startBlock(bNext) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2665 | } |
| 2666 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2667 | // boundsCheck generates bounds checking code. Checks if 0 <= idx < len, branches to exit if not. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2668 | // Starts a new block on return. |
| 2669 | func (s *state) boundsCheck(idx, len *ssa.Value) { |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 2670 | if Debug['B'] != 0 { |
| 2671 | return |
| 2672 | } |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2673 | // TODO: convert index to full width? |
| 2674 | // TODO: if index is 64-bit and we're compiling to 32-bit, check that high 32 bits are zero. |
| 2675 | |
| 2676 | // bounds check |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 2677 | cmp := s.newValue2(ssa.OpIsInBounds, Types[TBOOL], idx, len) |
Keith Randall | 3a70bf9 | 2015-09-17 16:54:15 -0700 | [diff] [blame] | 2678 | s.check(cmp, Panicindex) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 2679 | } |
| 2680 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2681 | // sliceBoundsCheck generates slice bounds checking code. Checks if 0 <= idx <= len, branches to exit if not. |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 2682 | // Starts a new block on return. |
| 2683 | func (s *state) sliceBoundsCheck(idx, len *ssa.Value) { |
| 2684 | if Debug['B'] != 0 { |
| 2685 | return |
| 2686 | } |
| 2687 | // TODO: convert index to full width? |
| 2688 | // TODO: if index is 64-bit and we're compiling to 32-bit, check that high 32 bits are zero. |
| 2689 | |
| 2690 | // bounds check |
| 2691 | cmp := s.newValue2(ssa.OpIsSliceInBounds, Types[TBOOL], idx, len) |
Keith Randall | 3a70bf9 | 2015-09-17 16:54:15 -0700 | [diff] [blame] | 2692 | s.check(cmp, panicslice) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 2693 | } |
| 2694 | |
Keith Randall | 3a70bf9 | 2015-09-17 16:54:15 -0700 | [diff] [blame] | 2695 | // If cmp (a bool) is true, panic using the given function. |
| 2696 | func (s *state) check(cmp *ssa.Value, fn *Node) { |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2697 | b := s.endBlock() |
| 2698 | b.Kind = ssa.BlockIf |
| 2699 | b.Control = cmp |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 2700 | b.Likely = ssa.BranchLikely |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2701 | bNext := s.f.NewBlock(ssa.BlockPlain) |
Keith Randall | 74e568f | 2015-11-09 21:35:40 -0800 | [diff] [blame] | 2702 | line := s.peekLine() |
| 2703 | bPanic := s.panics[funcLine{fn, line}] |
| 2704 | if bPanic == nil { |
| 2705 | bPanic = s.f.NewBlock(ssa.BlockPlain) |
| 2706 | s.panics[funcLine{fn, line}] = bPanic |
| 2707 | s.startBlock(bPanic) |
| 2708 | // The panic call takes/returns memory to ensure that the right |
| 2709 | // memory state is observed if the panic happens. |
| 2710 | s.rtcall(fn, false, nil) |
| 2711 | } |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2712 | b.AddEdgeTo(bNext) |
| 2713 | b.AddEdgeTo(bPanic) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2714 | s.startBlock(bNext) |
| 2715 | } |
| 2716 | |
Keith Randall | 8c5bfcc | 2015-09-18 15:11:30 -0700 | [diff] [blame] | 2717 | // rtcall issues a call to the given runtime function fn with the listed args. |
| 2718 | // Returns a slice of results of the given result types. |
| 2719 | // The call is added to the end of the current block. |
| 2720 | // If returns is false, the block is marked as an exit block. |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2721 | // If returns is true, the block is marked as a call block. A new block |
Keith Randall | 8c5bfcc | 2015-09-18 15:11:30 -0700 | [diff] [blame] | 2722 | // is started to load the return values. |
| 2723 | func (s *state) rtcall(fn *Node, returns bool, results []*Type, args ...*ssa.Value) []*ssa.Value { |
| 2724 | // Write args to the stack |
| 2725 | var off int64 // TODO: arch-dependent starting offset? |
| 2726 | for _, arg := range args { |
| 2727 | t := arg.Type |
| 2728 | off = Rnd(off, t.Alignment()) |
| 2729 | ptr := s.sp |
| 2730 | if off != 0 { |
| 2731 | ptr = s.newValue1I(ssa.OpOffPtr, Types[TUINTPTR], off, s.sp) |
| 2732 | } |
| 2733 | size := t.Size() |
| 2734 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, size, ptr, arg, s.mem()) |
| 2735 | off += size |
| 2736 | } |
| 2737 | off = Rnd(off, int64(Widthptr)) |
| 2738 | |
| 2739 | // Issue call |
| 2740 | call := s.newValue1A(ssa.OpStaticCall, ssa.TypeMem, fn.Sym, s.mem()) |
| 2741 | s.vars[&memVar] = call |
| 2742 | |
| 2743 | // Finish block |
| 2744 | b := s.endBlock() |
| 2745 | if !returns { |
| 2746 | b.Kind = ssa.BlockExit |
| 2747 | b.Control = call |
| 2748 | call.AuxInt = off |
| 2749 | if len(results) > 0 { |
| 2750 | Fatalf("panic call can't have results") |
| 2751 | } |
| 2752 | return nil |
| 2753 | } |
| 2754 | b.Kind = ssa.BlockCall |
| 2755 | b.Control = call |
| 2756 | bNext := s.f.NewBlock(ssa.BlockPlain) |
| 2757 | b.AddEdgeTo(bNext) |
| 2758 | s.startBlock(bNext) |
| 2759 | |
| 2760 | // Load results |
| 2761 | res := make([]*ssa.Value, len(results)) |
| 2762 | for i, t := range results { |
| 2763 | off = Rnd(off, t.Alignment()) |
| 2764 | ptr := s.sp |
| 2765 | if off != 0 { |
| 2766 | ptr = s.newValue1I(ssa.OpOffPtr, Types[TUINTPTR], off, s.sp) |
| 2767 | } |
| 2768 | res[i] = s.newValue2(ssa.OpLoad, t, ptr, s.mem()) |
| 2769 | off += t.Size() |
| 2770 | } |
| 2771 | off = Rnd(off, int64(Widthptr)) |
| 2772 | |
| 2773 | // Remember how much callee stack space we needed. |
| 2774 | call.AuxInt = off |
| 2775 | |
| 2776 | return res |
| 2777 | } |
| 2778 | |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2779 | // insertWBmove inserts the assignment *left = *right including a write barrier. |
| 2780 | // t is the type being assigned. |
| 2781 | func (s *state) insertWBmove(t *Type, left, right *ssa.Value, line int32) { |
Keith Randall | 4304fbc | 2015-11-16 13:20:16 -0800 | [diff] [blame] | 2782 | // if writeBarrier.enabled { |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2783 | // typedmemmove(&t, left, right) |
| 2784 | // } else { |
| 2785 | // *left = *right |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2786 | // } |
| 2787 | bThen := s.f.NewBlock(ssa.BlockPlain) |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2788 | bElse := s.f.NewBlock(ssa.BlockPlain) |
| 2789 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2790 | |
Matthew Dempsky | dafbcf6 | 2016-03-04 15:19:06 -0800 | [diff] [blame] | 2791 | aux := &ssa.ExternSymbol{Types[TBOOL], syslook("writeBarrier").Sym} |
David Chase | 8107b00 | 2016-02-28 11:15:22 -0500 | [diff] [blame] | 2792 | flagaddr := s.newValue1A(ssa.OpAddr, Ptrto(Types[TUINT32]), aux, s.sb) |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2793 | // TODO: select the .enabled field. It is currently first, so not needed for now. |
David Chase | 8107b00 | 2016-02-28 11:15:22 -0500 | [diff] [blame] | 2794 | // Load word, test byte, avoiding partial register write from load byte. |
| 2795 | flag := s.newValue2(ssa.OpLoad, Types[TUINT32], flagaddr, s.mem()) |
| 2796 | flag = s.newValue1(ssa.OpTrunc64to8, Types[TBOOL], flag) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2797 | b := s.endBlock() |
| 2798 | b.Kind = ssa.BlockIf |
| 2799 | b.Likely = ssa.BranchUnlikely |
| 2800 | b.Control = flag |
| 2801 | b.AddEdgeTo(bThen) |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2802 | b.AddEdgeTo(bElse) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2803 | |
| 2804 | s.startBlock(bThen) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2805 | taddr := s.newValue1A(ssa.OpAddr, Types[TUINTPTR], &ssa.ExternSymbol{Types[TUINTPTR], typenamesym(t)}, s.sb) |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2806 | s.rtcall(typedmemmove, true, nil, taddr, left, right) |
| 2807 | s.endBlock().AddEdgeTo(bEnd) |
| 2808 | |
| 2809 | s.startBlock(bElse) |
| 2810 | s.vars[&memVar] = s.newValue3I(ssa.OpMove, ssa.TypeMem, t.Size(), left, right, s.mem()) |
| 2811 | s.endBlock().AddEdgeTo(bEnd) |
| 2812 | |
| 2813 | s.startBlock(bEnd) |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2814 | |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 2815 | if Debug_wb > 0 { |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 2816 | Warnl(line, "write barrier") |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 2817 | } |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2818 | } |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 2819 | |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2820 | // insertWBstore inserts the assignment *left = right including a write barrier. |
| 2821 | // t is the type being assigned. |
| 2822 | func (s *state) insertWBstore(t *Type, left, right *ssa.Value, line int32) { |
| 2823 | // store scalar fields |
| 2824 | // if writeBarrier.enabled { |
| 2825 | // writebarrierptr for pointer fields |
| 2826 | // } else { |
| 2827 | // store pointer fields |
| 2828 | // } |
| 2829 | |
Keith Randall | aebf661 | 2016-01-29 21:57:57 -0800 | [diff] [blame] | 2830 | s.storeTypeScalars(t, left, right) |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2831 | |
| 2832 | bThen := s.f.NewBlock(ssa.BlockPlain) |
| 2833 | bElse := s.f.NewBlock(ssa.BlockPlain) |
| 2834 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
| 2835 | |
Matthew Dempsky | dafbcf6 | 2016-03-04 15:19:06 -0800 | [diff] [blame] | 2836 | aux := &ssa.ExternSymbol{Types[TBOOL], syslook("writeBarrier").Sym} |
David Chase | 8107b00 | 2016-02-28 11:15:22 -0500 | [diff] [blame] | 2837 | flagaddr := s.newValue1A(ssa.OpAddr, Ptrto(Types[TUINT32]), aux, s.sb) |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 2838 | // TODO: select the .enabled field. It is currently first, so not needed for now. |
David Chase | 8107b00 | 2016-02-28 11:15:22 -0500 | [diff] [blame] | 2839 | // Load word, test byte, avoiding partial register write from load byte. |
| 2840 | flag := s.newValue2(ssa.OpLoad, Types[TUINT32], flagaddr, s.mem()) |
| 2841 | flag = s.newValue1(ssa.OpTrunc64to8, Types[TBOOL], flag) |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2842 | b := s.endBlock() |
| 2843 | b.Kind = ssa.BlockIf |
| 2844 | b.Likely = ssa.BranchUnlikely |
| 2845 | b.Control = flag |
| 2846 | b.AddEdgeTo(bThen) |
| 2847 | b.AddEdgeTo(bElse) |
| 2848 | |
| 2849 | // Issue write barriers for pointer writes. |
| 2850 | s.startBlock(bThen) |
Keith Randall | aebf661 | 2016-01-29 21:57:57 -0800 | [diff] [blame] | 2851 | s.storeTypePtrsWB(t, left, right) |
| 2852 | s.endBlock().AddEdgeTo(bEnd) |
| 2853 | |
| 2854 | // Issue regular stores for pointer writes. |
| 2855 | s.startBlock(bElse) |
| 2856 | s.storeTypePtrs(t, left, right) |
| 2857 | s.endBlock().AddEdgeTo(bEnd) |
| 2858 | |
| 2859 | s.startBlock(bEnd) |
| 2860 | |
| 2861 | if Debug_wb > 0 { |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 2862 | Warnl(line, "write barrier") |
Keith Randall | aebf661 | 2016-01-29 21:57:57 -0800 | [diff] [blame] | 2863 | } |
| 2864 | } |
| 2865 | |
| 2866 | // do *left = right for all scalar (non-pointer) parts of t. |
| 2867 | func (s *state) storeTypeScalars(t *Type, left, right *ssa.Value) { |
| 2868 | switch { |
| 2869 | case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex(): |
| 2870 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, t.Size(), left, right, s.mem()) |
| 2871 | case t.IsPtr() || t.IsMap() || t.IsChan(): |
| 2872 | // no scalar fields. |
| 2873 | case t.IsString(): |
| 2874 | len := s.newValue1(ssa.OpStringLen, Types[TINT], right) |
| 2875 | lenAddr := s.newValue1I(ssa.OpOffPtr, Ptrto(Types[TINT]), s.config.IntSize, left) |
| 2876 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, lenAddr, len, s.mem()) |
| 2877 | case t.IsSlice(): |
| 2878 | len := s.newValue1(ssa.OpSliceLen, Types[TINT], right) |
| 2879 | cap := s.newValue1(ssa.OpSliceCap, Types[TINT], right) |
| 2880 | lenAddr := s.newValue1I(ssa.OpOffPtr, Ptrto(Types[TINT]), s.config.IntSize, left) |
| 2881 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, lenAddr, len, s.mem()) |
| 2882 | capAddr := s.newValue1I(ssa.OpOffPtr, Ptrto(Types[TINT]), 2*s.config.IntSize, left) |
| 2883 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, capAddr, cap, s.mem()) |
| 2884 | case t.IsInterface(): |
| 2885 | // itab field doesn't need a write barrier (even though it is a pointer). |
| 2886 | itab := s.newValue1(ssa.OpITab, Ptrto(Types[TUINT8]), right) |
| 2887 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, left, itab, s.mem()) |
| 2888 | case t.IsStruct(): |
| 2889 | n := t.NumFields() |
| 2890 | for i := int64(0); i < n; i++ { |
| 2891 | ft := t.FieldType(i) |
| 2892 | addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left) |
| 2893 | val := s.newValue1I(ssa.OpStructSelect, ft, i, right) |
| 2894 | s.storeTypeScalars(ft.(*Type), addr, val) |
| 2895 | } |
| 2896 | default: |
| 2897 | s.Fatalf("bad write barrier type %s", t) |
| 2898 | } |
| 2899 | } |
| 2900 | |
| 2901 | // do *left = right for all pointer parts of t. |
| 2902 | func (s *state) storeTypePtrs(t *Type, left, right *ssa.Value) { |
| 2903 | switch { |
| 2904 | case t.IsPtr() || t.IsMap() || t.IsChan(): |
| 2905 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, left, right, s.mem()) |
| 2906 | case t.IsString(): |
| 2907 | ptr := s.newValue1(ssa.OpStringPtr, Ptrto(Types[TUINT8]), right) |
| 2908 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, left, ptr, s.mem()) |
| 2909 | case t.IsSlice(): |
| 2910 | ptr := s.newValue1(ssa.OpSlicePtr, Ptrto(Types[TUINT8]), right) |
| 2911 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, left, ptr, s.mem()) |
| 2912 | case t.IsInterface(): |
| 2913 | // itab field is treated as a scalar. |
| 2914 | idata := s.newValue1(ssa.OpIData, Ptrto(Types[TUINT8]), right) |
| 2915 | idataAddr := s.newValue1I(ssa.OpOffPtr, Ptrto(Types[TUINT8]), s.config.PtrSize, left) |
| 2916 | s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, idataAddr, idata, s.mem()) |
| 2917 | case t.IsStruct(): |
| 2918 | n := t.NumFields() |
| 2919 | for i := int64(0); i < n; i++ { |
| 2920 | ft := t.FieldType(i) |
| 2921 | if !haspointers(ft.(*Type)) { |
| 2922 | continue |
| 2923 | } |
| 2924 | addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left) |
| 2925 | val := s.newValue1I(ssa.OpStructSelect, ft, i, right) |
| 2926 | s.storeTypePtrs(ft.(*Type), addr, val) |
| 2927 | } |
| 2928 | default: |
| 2929 | s.Fatalf("bad write barrier type %s", t) |
| 2930 | } |
| 2931 | } |
| 2932 | |
| 2933 | // do *left = right with a write barrier for all pointer parts of t. |
| 2934 | func (s *state) storeTypePtrsWB(t *Type, left, right *ssa.Value) { |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2935 | switch { |
| 2936 | case t.IsPtr() || t.IsMap() || t.IsChan(): |
| 2937 | s.rtcall(writebarrierptr, true, nil, left, right) |
| 2938 | case t.IsString(): |
| 2939 | ptr := s.newValue1(ssa.OpStringPtr, Ptrto(Types[TUINT8]), right) |
| 2940 | s.rtcall(writebarrierptr, true, nil, left, ptr) |
| 2941 | case t.IsSlice(): |
| 2942 | ptr := s.newValue1(ssa.OpSlicePtr, Ptrto(Types[TUINT8]), right) |
| 2943 | s.rtcall(writebarrierptr, true, nil, left, ptr) |
| 2944 | case t.IsInterface(): |
| 2945 | idata := s.newValue1(ssa.OpIData, Ptrto(Types[TUINT8]), right) |
| 2946 | idataAddr := s.newValue1I(ssa.OpOffPtr, Ptrto(Types[TUINT8]), s.config.PtrSize, left) |
| 2947 | s.rtcall(writebarrierptr, true, nil, idataAddr, idata) |
Keith Randall | aebf661 | 2016-01-29 21:57:57 -0800 | [diff] [blame] | 2948 | case t.IsStruct(): |
| 2949 | n := t.NumFields() |
| 2950 | for i := int64(0); i < n; i++ { |
| 2951 | ft := t.FieldType(i) |
| 2952 | if !haspointers(ft.(*Type)) { |
| 2953 | continue |
| 2954 | } |
| 2955 | addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left) |
| 2956 | val := s.newValue1I(ssa.OpStructSelect, ft, i, right) |
| 2957 | s.storeTypePtrsWB(ft.(*Type), addr, val) |
| 2958 | } |
Keith Randall | 5ba3194 | 2016-01-25 17:06:54 -0800 | [diff] [blame] | 2959 | default: |
| 2960 | s.Fatalf("bad write barrier type %s", t) |
| 2961 | } |
Keith Randall | 9d22c10 | 2015-09-11 11:02:57 -0700 | [diff] [blame] | 2962 | } |
| 2963 | |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 2964 | // slice computes the slice v[i:j:k] and returns ptr, len, and cap of result. |
| 2965 | // i,j,k may be nil, in which case they are set to their default value. |
| 2966 | // t is a slice, ptr to array, or string type. |
| 2967 | func (s *state) slice(t *Type, v, i, j, k *ssa.Value) (p, l, c *ssa.Value) { |
| 2968 | var elemtype *Type |
| 2969 | var ptrtype *Type |
| 2970 | var ptr *ssa.Value |
| 2971 | var len *ssa.Value |
| 2972 | var cap *ssa.Value |
| 2973 | zero := s.constInt(Types[TINT], 0) |
| 2974 | switch { |
| 2975 | case t.IsSlice(): |
| 2976 | elemtype = t.Type |
| 2977 | ptrtype = Ptrto(elemtype) |
| 2978 | ptr = s.newValue1(ssa.OpSlicePtr, ptrtype, v) |
| 2979 | len = s.newValue1(ssa.OpSliceLen, Types[TINT], v) |
| 2980 | cap = s.newValue1(ssa.OpSliceCap, Types[TINT], v) |
| 2981 | case t.IsString(): |
| 2982 | elemtype = Types[TUINT8] |
| 2983 | ptrtype = Ptrto(elemtype) |
| 2984 | ptr = s.newValue1(ssa.OpStringPtr, ptrtype, v) |
| 2985 | len = s.newValue1(ssa.OpStringLen, Types[TINT], v) |
| 2986 | cap = len |
| 2987 | case t.IsPtr(): |
| 2988 | if !t.Type.IsArray() { |
| 2989 | s.Fatalf("bad ptr to array in slice %v\n", t) |
| 2990 | } |
| 2991 | elemtype = t.Type.Type |
| 2992 | ptrtype = Ptrto(elemtype) |
| 2993 | s.nilCheck(v) |
| 2994 | ptr = v |
| 2995 | len = s.constInt(Types[TINT], t.Type.Bound) |
| 2996 | cap = len |
| 2997 | default: |
| 2998 | s.Fatalf("bad type in slice %v\n", t) |
| 2999 | } |
| 3000 | |
| 3001 | // Set default values |
| 3002 | if i == nil { |
| 3003 | i = zero |
| 3004 | } |
| 3005 | if j == nil { |
| 3006 | j = len |
| 3007 | } |
| 3008 | if k == nil { |
| 3009 | k = cap |
| 3010 | } |
| 3011 | |
| 3012 | // Panic if slice indices are not in bounds. |
| 3013 | s.sliceBoundsCheck(i, j) |
| 3014 | if j != k { |
| 3015 | s.sliceBoundsCheck(j, k) |
| 3016 | } |
| 3017 | if k != cap { |
| 3018 | s.sliceBoundsCheck(k, cap) |
| 3019 | } |
| 3020 | |
| 3021 | // Generate the following code assuming that indexes are in bounds. |
| 3022 | // The conditional is to make sure that we don't generate a slice |
| 3023 | // that points to the next object in memory. |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 3024 | // rlen = (Sub64 j i) |
| 3025 | // rcap = (Sub64 k i) |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3026 | // p = ptr |
| 3027 | // if rcap != 0 { |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 3028 | // p = (AddPtr ptr (Mul64 low (Const64 size))) |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3029 | // } |
| 3030 | // result = (SliceMake p size) |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 3031 | subOp := s.ssaOp(OSUB, Types[TINT]) |
| 3032 | neqOp := s.ssaOp(ONE, Types[TINT]) |
| 3033 | mulOp := s.ssaOp(OMUL, Types[TINT]) |
| 3034 | rlen := s.newValue2(subOp, Types[TINT], j, i) |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3035 | var rcap *ssa.Value |
| 3036 | switch { |
| 3037 | case t.IsString(): |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3038 | // Capacity of the result is unimportant. However, we use |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3039 | // rcap to test if we've generated a zero-length slice. |
| 3040 | // Use length of strings for that. |
| 3041 | rcap = rlen |
| 3042 | case j == k: |
| 3043 | rcap = rlen |
| 3044 | default: |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 3045 | rcap = s.newValue2(subOp, Types[TINT], k, i) |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3046 | } |
| 3047 | |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 3048 | s.vars[&ptrVar] = ptr |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3049 | |
| 3050 | // Generate code to test the resulting slice length. |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 3051 | cmp := s.newValue2(neqOp, Types[TBOOL], rcap, s.constInt(Types[TINT], 0)) |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3052 | |
| 3053 | b := s.endBlock() |
| 3054 | b.Kind = ssa.BlockIf |
| 3055 | b.Likely = ssa.BranchLikely |
| 3056 | b.Control = cmp |
| 3057 | |
| 3058 | // Generate code for non-zero length slice case. |
| 3059 | nz := s.f.NewBlock(ssa.BlockPlain) |
| 3060 | b.AddEdgeTo(nz) |
| 3061 | s.startBlock(nz) |
| 3062 | var inc *ssa.Value |
| 3063 | if elemtype.Width == 1 { |
| 3064 | inc = i |
| 3065 | } else { |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 3066 | inc = s.newValue2(mulOp, Types[TINT], i, s.constInt(Types[TINT], elemtype.Width)) |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3067 | } |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 3068 | s.vars[&ptrVar] = s.newValue2(ssa.OpAddPtr, ptrtype, ptr, inc) |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3069 | s.endBlock() |
| 3070 | |
| 3071 | // All done. |
| 3072 | merge := s.f.NewBlock(ssa.BlockPlain) |
| 3073 | b.AddEdgeTo(merge) |
| 3074 | nz.AddEdgeTo(merge) |
| 3075 | s.startBlock(merge) |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 3076 | rptr := s.variable(&ptrVar, ptrtype) |
| 3077 | delete(s.vars, &ptrVar) |
Keith Randall | 5505e8c | 2015-09-12 23:27:26 -0700 | [diff] [blame] | 3078 | return rptr, rlen, rcap |
| 3079 | } |
| 3080 | |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3081 | type u2fcvtTab struct { |
| 3082 | geq, cvt2F, and, rsh, or, add ssa.Op |
| 3083 | one func(*state, ssa.Type, int64) *ssa.Value |
| 3084 | } |
| 3085 | |
| 3086 | var u64_f64 u2fcvtTab = u2fcvtTab{ |
| 3087 | geq: ssa.OpGeq64, |
| 3088 | cvt2F: ssa.OpCvt64to64F, |
| 3089 | and: ssa.OpAnd64, |
| 3090 | rsh: ssa.OpRsh64Ux64, |
| 3091 | or: ssa.OpOr64, |
| 3092 | add: ssa.OpAdd64F, |
| 3093 | one: (*state).constInt64, |
| 3094 | } |
| 3095 | |
| 3096 | var u64_f32 u2fcvtTab = u2fcvtTab{ |
| 3097 | geq: ssa.OpGeq64, |
| 3098 | cvt2F: ssa.OpCvt64to32F, |
| 3099 | and: ssa.OpAnd64, |
| 3100 | rsh: ssa.OpRsh64Ux64, |
| 3101 | or: ssa.OpOr64, |
| 3102 | add: ssa.OpAdd32F, |
| 3103 | one: (*state).constInt64, |
| 3104 | } |
| 3105 | |
| 3106 | // Excess generality on a machine with 64-bit integer registers. |
| 3107 | // Not used on AMD64. |
| 3108 | var u32_f32 u2fcvtTab = u2fcvtTab{ |
| 3109 | geq: ssa.OpGeq32, |
| 3110 | cvt2F: ssa.OpCvt32to32F, |
| 3111 | and: ssa.OpAnd32, |
| 3112 | rsh: ssa.OpRsh32Ux32, |
| 3113 | or: ssa.OpOr32, |
| 3114 | add: ssa.OpAdd32F, |
| 3115 | one: func(s *state, t ssa.Type, x int64) *ssa.Value { |
| 3116 | return s.constInt32(t, int32(x)) |
| 3117 | }, |
| 3118 | } |
| 3119 | |
| 3120 | func (s *state) uint64Tofloat64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 3121 | return s.uintTofloat(&u64_f64, n, x, ft, tt) |
| 3122 | } |
| 3123 | |
| 3124 | func (s *state) uint64Tofloat32(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 3125 | return s.uintTofloat(&u64_f32, n, x, ft, tt) |
| 3126 | } |
| 3127 | |
| 3128 | func (s *state) uintTofloat(cvttab *u2fcvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 3129 | // if x >= 0 { |
| 3130 | // result = (floatY) x |
| 3131 | // } else { |
| 3132 | // y = uintX(x) ; y = x & 1 |
| 3133 | // z = uintX(x) ; z = z >> 1 |
| 3134 | // z = z >> 1 |
| 3135 | // z = z | y |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 3136 | // result = floatY(z) |
| 3137 | // result = result + result |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3138 | // } |
| 3139 | // |
| 3140 | // Code borrowed from old code generator. |
| 3141 | // What's going on: large 64-bit "unsigned" looks like |
| 3142 | // negative number to hardware's integer-to-float |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3143 | // conversion. However, because the mantissa is only |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3144 | // 63 bits, we don't need the LSB, so instead we do an |
| 3145 | // unsigned right shift (divide by two), convert, and |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3146 | // double. However, before we do that, we need to be |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3147 | // sure that we do not lose a "1" if that made the |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3148 | // difference in the resulting rounding. Therefore, we |
| 3149 | // preserve it, and OR (not ADD) it back in. The case |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3150 | // that matters is when the eleven discarded bits are |
| 3151 | // equal to 10000000001; that rounds up, and the 1 cannot |
| 3152 | // be lost else it would round down if the LSB of the |
| 3153 | // candidate mantissa is 0. |
| 3154 | cmp := s.newValue2(cvttab.geq, Types[TBOOL], x, s.zeroVal(ft)) |
| 3155 | b := s.endBlock() |
| 3156 | b.Kind = ssa.BlockIf |
| 3157 | b.Control = cmp |
| 3158 | b.Likely = ssa.BranchLikely |
| 3159 | |
| 3160 | bThen := s.f.NewBlock(ssa.BlockPlain) |
| 3161 | bElse := s.f.NewBlock(ssa.BlockPlain) |
| 3162 | bAfter := s.f.NewBlock(ssa.BlockPlain) |
| 3163 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3164 | b.AddEdgeTo(bThen) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3165 | s.startBlock(bThen) |
| 3166 | a0 := s.newValue1(cvttab.cvt2F, tt, x) |
| 3167 | s.vars[n] = a0 |
| 3168 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3169 | bThen.AddEdgeTo(bAfter) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3170 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3171 | b.AddEdgeTo(bElse) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3172 | s.startBlock(bElse) |
| 3173 | one := cvttab.one(s, ft, 1) |
| 3174 | y := s.newValue2(cvttab.and, ft, x, one) |
| 3175 | z := s.newValue2(cvttab.rsh, ft, x, one) |
| 3176 | z = s.newValue2(cvttab.or, ft, z, y) |
| 3177 | a := s.newValue1(cvttab.cvt2F, tt, z) |
| 3178 | a1 := s.newValue2(cvttab.add, tt, a, a) |
| 3179 | s.vars[n] = a1 |
| 3180 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3181 | bElse.AddEdgeTo(bAfter) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3182 | |
| 3183 | s.startBlock(bAfter) |
| 3184 | return s.variable(n, n.Type) |
| 3185 | } |
| 3186 | |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 3187 | // referenceTypeBuiltin generates code for the len/cap builtins for maps and channels. |
| 3188 | func (s *state) referenceTypeBuiltin(n *Node, x *ssa.Value) *ssa.Value { |
| 3189 | if !n.Left.Type.IsMap() && !n.Left.Type.IsChan() { |
| 3190 | s.Fatalf("node must be a map or a channel") |
| 3191 | } |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 3192 | // if n == nil { |
| 3193 | // return 0 |
| 3194 | // } else { |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 3195 | // // len |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 3196 | // return *((*int)n) |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 3197 | // // cap |
| 3198 | // return *(((*int)n)+1) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 3199 | // } |
| 3200 | lenType := n.Type |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 3201 | nilValue := s.constNil(Types[TUINTPTR]) |
Todd Neal | 67ac8a3 | 2015-08-28 15:20:54 -0500 | [diff] [blame] | 3202 | cmp := s.newValue2(ssa.OpEqPtr, Types[TBOOL], x, nilValue) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 3203 | b := s.endBlock() |
| 3204 | b.Kind = ssa.BlockIf |
| 3205 | b.Control = cmp |
| 3206 | b.Likely = ssa.BranchUnlikely |
| 3207 | |
| 3208 | bThen := s.f.NewBlock(ssa.BlockPlain) |
| 3209 | bElse := s.f.NewBlock(ssa.BlockPlain) |
| 3210 | bAfter := s.f.NewBlock(ssa.BlockPlain) |
| 3211 | |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 3212 | // length/capacity of a nil map/chan is zero |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3213 | b.AddEdgeTo(bThen) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 3214 | s.startBlock(bThen) |
| 3215 | s.vars[n] = s.zeroVal(lenType) |
| 3216 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3217 | bThen.AddEdgeTo(bAfter) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 3218 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3219 | b.AddEdgeTo(bElse) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 3220 | s.startBlock(bElse) |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 3221 | if n.Op == OLEN { |
| 3222 | // length is stored in the first word for map/chan |
| 3223 | s.vars[n] = s.newValue2(ssa.OpLoad, lenType, x, s.mem()) |
| 3224 | } else if n.Op == OCAP { |
| 3225 | // capacity is stored in the second word for chan |
| 3226 | sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Width, x) |
| 3227 | s.vars[n] = s.newValue2(ssa.OpLoad, lenType, sw, s.mem()) |
| 3228 | } else { |
| 3229 | s.Fatalf("op must be OLEN or OCAP") |
| 3230 | } |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 3231 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3232 | bElse.AddEdgeTo(bAfter) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 3233 | |
| 3234 | s.startBlock(bAfter) |
| 3235 | return s.variable(n, lenType) |
| 3236 | } |
| 3237 | |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 3238 | type f2uCvtTab struct { |
| 3239 | ltf, cvt2U, subf ssa.Op |
| 3240 | value func(*state, ssa.Type, float64) *ssa.Value |
| 3241 | } |
| 3242 | |
| 3243 | var f32_u64 f2uCvtTab = f2uCvtTab{ |
| 3244 | ltf: ssa.OpLess32F, |
| 3245 | cvt2U: ssa.OpCvt32Fto64, |
| 3246 | subf: ssa.OpSub32F, |
| 3247 | value: (*state).constFloat32, |
| 3248 | } |
| 3249 | |
| 3250 | var f64_u64 f2uCvtTab = f2uCvtTab{ |
| 3251 | ltf: ssa.OpLess64F, |
| 3252 | cvt2U: ssa.OpCvt64Fto64, |
| 3253 | subf: ssa.OpSub64F, |
| 3254 | value: (*state).constFloat64, |
| 3255 | } |
| 3256 | |
| 3257 | func (s *state) float32ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 3258 | return s.floatToUint(&f32_u64, n, x, ft, tt) |
| 3259 | } |
| 3260 | func (s *state) float64ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 3261 | return s.floatToUint(&f64_u64, n, x, ft, tt) |
| 3262 | } |
| 3263 | |
| 3264 | func (s *state) floatToUint(cvttab *f2uCvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 3265 | // if x < 9223372036854775808.0 { |
| 3266 | // result = uintY(x) |
| 3267 | // } else { |
| 3268 | // y = x - 9223372036854775808.0 |
| 3269 | // z = uintY(y) |
| 3270 | // result = z | -9223372036854775808 |
| 3271 | // } |
| 3272 | twoToThe63 := cvttab.value(s, ft, 9223372036854775808.0) |
| 3273 | cmp := s.newValue2(cvttab.ltf, Types[TBOOL], x, twoToThe63) |
| 3274 | b := s.endBlock() |
| 3275 | b.Kind = ssa.BlockIf |
| 3276 | b.Control = cmp |
| 3277 | b.Likely = ssa.BranchLikely |
| 3278 | |
| 3279 | bThen := s.f.NewBlock(ssa.BlockPlain) |
| 3280 | bElse := s.f.NewBlock(ssa.BlockPlain) |
| 3281 | bAfter := s.f.NewBlock(ssa.BlockPlain) |
| 3282 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3283 | b.AddEdgeTo(bThen) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 3284 | s.startBlock(bThen) |
| 3285 | a0 := s.newValue1(cvttab.cvt2U, tt, x) |
| 3286 | s.vars[n] = a0 |
| 3287 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3288 | bThen.AddEdgeTo(bAfter) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 3289 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3290 | b.AddEdgeTo(bElse) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 3291 | s.startBlock(bElse) |
| 3292 | y := s.newValue2(cvttab.subf, ft, x, twoToThe63) |
| 3293 | y = s.newValue1(cvttab.cvt2U, tt, y) |
| 3294 | z := s.constInt64(tt, -9223372036854775808) |
| 3295 | a1 := s.newValue2(ssa.OpOr64, tt, y, z) |
| 3296 | s.vars[n] = a1 |
| 3297 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 3298 | bElse.AddEdgeTo(bAfter) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 3299 | |
| 3300 | s.startBlock(bAfter) |
| 3301 | return s.variable(n, n.Type) |
| 3302 | } |
| 3303 | |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 3304 | // ifaceType returns the value for the word containing the type. |
| 3305 | // n is the node for the interface expression. |
| 3306 | // v is the corresponding value. |
| 3307 | func (s *state) ifaceType(n *Node, v *ssa.Value) *ssa.Value { |
| 3308 | byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte) |
| 3309 | |
| 3310 | if isnilinter(n.Type) { |
| 3311 | // Have *eface. The type is the first word in the struct. |
| 3312 | return s.newValue1(ssa.OpITab, byteptr, v) |
| 3313 | } |
| 3314 | |
| 3315 | // Have *iface. |
| 3316 | // The first word in the struct is the *itab. |
| 3317 | // If the *itab is nil, return 0. |
| 3318 | // Otherwise, the second word in the *itab is the type. |
| 3319 | |
| 3320 | tab := s.newValue1(ssa.OpITab, byteptr, v) |
| 3321 | s.vars[&typVar] = tab |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 3322 | isnonnil := s.newValue2(ssa.OpNeqPtr, Types[TBOOL], tab, s.constNil(byteptr)) |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 3323 | b := s.endBlock() |
| 3324 | b.Kind = ssa.BlockIf |
| 3325 | b.Control = isnonnil |
| 3326 | b.Likely = ssa.BranchLikely |
| 3327 | |
| 3328 | bLoad := s.f.NewBlock(ssa.BlockPlain) |
| 3329 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
| 3330 | |
| 3331 | b.AddEdgeTo(bLoad) |
| 3332 | b.AddEdgeTo(bEnd) |
| 3333 | bLoad.AddEdgeTo(bEnd) |
| 3334 | |
| 3335 | s.startBlock(bLoad) |
| 3336 | off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(Widthptr), tab) |
| 3337 | s.vars[&typVar] = s.newValue2(ssa.OpLoad, byteptr, off, s.mem()) |
| 3338 | s.endBlock() |
| 3339 | |
| 3340 | s.startBlock(bEnd) |
| 3341 | typ := s.variable(&typVar, byteptr) |
| 3342 | delete(s.vars, &typVar) |
| 3343 | return typ |
| 3344 | } |
| 3345 | |
| 3346 | // dottype generates SSA for a type assertion node. |
| 3347 | // commaok indicates whether to panic or return a bool. |
| 3348 | // If commaok is false, resok will be nil. |
| 3349 | func (s *state) dottype(n *Node, commaok bool) (res, resok *ssa.Value) { |
| 3350 | iface := s.expr(n.Left) |
| 3351 | typ := s.ifaceType(n.Left, iface) // actual concrete type |
| 3352 | target := s.expr(typename(n.Type)) // target type |
| 3353 | if !isdirectiface(n.Type) { |
| 3354 | // walk rewrites ODOTTYPE/OAS2DOTTYPE into runtime calls except for this case. |
| 3355 | Fatalf("dottype needs a direct iface type %s", n.Type) |
| 3356 | } |
| 3357 | |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 3358 | if Debug_typeassert > 0 { |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 3359 | Warnl(n.Lineno, "type assertion inlined") |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 3360 | } |
| 3361 | |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 3362 | // TODO: If we have a nonempty interface and its itab field is nil, |
| 3363 | // then this test is redundant and ifaceType should just branch directly to bFail. |
| 3364 | cond := s.newValue2(ssa.OpEqPtr, Types[TBOOL], typ, target) |
| 3365 | b := s.endBlock() |
| 3366 | b.Kind = ssa.BlockIf |
| 3367 | b.Control = cond |
| 3368 | b.Likely = ssa.BranchLikely |
| 3369 | |
| 3370 | byteptr := Ptrto(Types[TUINT8]) |
| 3371 | |
| 3372 | bOk := s.f.NewBlock(ssa.BlockPlain) |
| 3373 | bFail := s.f.NewBlock(ssa.BlockPlain) |
| 3374 | b.AddEdgeTo(bOk) |
| 3375 | b.AddEdgeTo(bFail) |
| 3376 | |
| 3377 | if !commaok { |
| 3378 | // on failure, panic by calling panicdottype |
| 3379 | s.startBlock(bFail) |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 3380 | taddr := s.newValue1A(ssa.OpAddr, byteptr, &ssa.ExternSymbol{byteptr, typenamesym(n.Left.Type)}, s.sb) |
Keith Randall | 8c5bfcc | 2015-09-18 15:11:30 -0700 | [diff] [blame] | 3381 | s.rtcall(panicdottype, false, nil, typ, target, taddr) |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 3382 | |
| 3383 | // on success, return idata field |
| 3384 | s.startBlock(bOk) |
| 3385 | return s.newValue1(ssa.OpIData, n.Type, iface), nil |
| 3386 | } |
| 3387 | |
| 3388 | // commaok is the more complicated case because we have |
| 3389 | // a control flow merge point. |
| 3390 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
| 3391 | |
| 3392 | // type assertion succeeded |
| 3393 | s.startBlock(bOk) |
| 3394 | s.vars[&idataVar] = s.newValue1(ssa.OpIData, n.Type, iface) |
| 3395 | s.vars[&okVar] = s.constBool(true) |
| 3396 | s.endBlock() |
| 3397 | bOk.AddEdgeTo(bEnd) |
| 3398 | |
| 3399 | // type assertion failed |
| 3400 | s.startBlock(bFail) |
Josh Bleecher Snyder | 3921427 | 2016-03-06 18:06:09 -0800 | [diff] [blame] | 3401 | s.vars[&idataVar] = s.constNil(byteptr) |
Keith Randall | 269baa9 | 2015-09-17 10:31:16 -0700 | [diff] [blame] | 3402 | s.vars[&okVar] = s.constBool(false) |
| 3403 | s.endBlock() |
| 3404 | bFail.AddEdgeTo(bEnd) |
| 3405 | |
| 3406 | // merge point |
| 3407 | s.startBlock(bEnd) |
| 3408 | res = s.variable(&idataVar, byteptr) |
| 3409 | resok = s.variable(&okVar, Types[TBOOL]) |
| 3410 | delete(s.vars, &idataVar) |
| 3411 | delete(s.vars, &okVar) |
| 3412 | return res, resok |
| 3413 | } |
| 3414 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 3415 | // checkgoto checks that a goto from from to to does not |
| 3416 | // jump into a block or jump over variable declarations. |
| 3417 | // It is a copy of checkgoto in the pre-SSA backend, |
| 3418 | // modified only for line number handling. |
| 3419 | // TODO: document how this works and why it is designed the way it is. |
| 3420 | func (s *state) checkgoto(from *Node, to *Node) { |
| 3421 | if from.Sym == to.Sym { |
| 3422 | return |
| 3423 | } |
| 3424 | |
| 3425 | nf := 0 |
| 3426 | for fs := from.Sym; fs != nil; fs = fs.Link { |
| 3427 | nf++ |
| 3428 | } |
| 3429 | nt := 0 |
| 3430 | for fs := to.Sym; fs != nil; fs = fs.Link { |
| 3431 | nt++ |
| 3432 | } |
| 3433 | fs := from.Sym |
| 3434 | for ; nf > nt; nf-- { |
| 3435 | fs = fs.Link |
| 3436 | } |
| 3437 | if fs != to.Sym { |
| 3438 | // decide what to complain about. |
| 3439 | // prefer to complain about 'into block' over declarations, |
| 3440 | // so scan backward to find most recent block or else dcl. |
| 3441 | var block *Sym |
| 3442 | |
| 3443 | var dcl *Sym |
| 3444 | ts := to.Sym |
| 3445 | for ; nt > nf; nt-- { |
| 3446 | if ts.Pkg == nil { |
| 3447 | block = ts |
| 3448 | } else { |
| 3449 | dcl = ts |
| 3450 | } |
| 3451 | ts = ts.Link |
| 3452 | } |
| 3453 | |
| 3454 | for ts != fs { |
| 3455 | if ts.Pkg == nil { |
| 3456 | block = ts |
| 3457 | } else { |
| 3458 | dcl = ts |
| 3459 | } |
| 3460 | ts = ts.Link |
| 3461 | fs = fs.Link |
| 3462 | } |
| 3463 | |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 3464 | lno := from.Left.Lineno |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 3465 | if block != nil { |
Robert Griesemer | 2faf5bc | 2016-03-02 11:30:29 -0800 | [diff] [blame] | 3466 | yyerrorl(lno, "goto %v jumps into block starting at %v", from.Left.Sym, linestr(block.Lastlineno)) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 3467 | } else { |
Robert Griesemer | 2faf5bc | 2016-03-02 11:30:29 -0800 | [diff] [blame] | 3468 | yyerrorl(lno, "goto %v jumps over declaration of %v at %v", from.Left.Sym, dcl, linestr(dcl.Lastlineno)) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 3469 | } |
| 3470 | } |
| 3471 | } |
| 3472 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3473 | // variable returns the value of a variable at the current location. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3474 | func (s *state) variable(name *Node, t ssa.Type) *ssa.Value { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3475 | v := s.vars[name] |
| 3476 | if v == nil { |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 3477 | v = s.newValue0A(ssa.OpFwdRef, t, name) |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3478 | s.fwdRefs = append(s.fwdRefs, v) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3479 | s.vars[name] = v |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3480 | s.addNamedValue(name, v) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3481 | } |
| 3482 | return v |
| 3483 | } |
| 3484 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3485 | func (s *state) mem() *ssa.Value { |
Keith Randall | b32217a | 2015-09-17 16:45:10 -0700 | [diff] [blame] | 3486 | return s.variable(&memVar, ssa.TypeMem) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3487 | } |
| 3488 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3489 | func (s *state) linkForwardReferences() { |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3490 | // Build SSA graph. Each variable on its first use in a basic block |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3491 | // leaves a FwdRef in that block representing the incoming value |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3492 | // of that variable. This function links that ref up with possible definitions, |
| 3493 | // inserting Phi values as needed. This is essentially the algorithm |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3494 | // described by Braun, Buchwald, Hack, Leißa, Mallon, and Zwinkau: |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3495 | // http://pp.info.uni-karlsruhe.de/uploads/publikationen/braun13cc.pdf |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3496 | // Differences: |
| 3497 | // - We use FwdRef nodes to postpone phi building until the CFG is |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3498 | // completely built. That way we can avoid the notion of "sealed" |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3499 | // blocks. |
| 3500 | // - Phi optimization is a separate pass (in ../ssa/phielim.go). |
| 3501 | for len(s.fwdRefs) > 0 { |
| 3502 | v := s.fwdRefs[len(s.fwdRefs)-1] |
| 3503 | s.fwdRefs = s.fwdRefs[:len(s.fwdRefs)-1] |
| 3504 | s.resolveFwdRef(v) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3505 | } |
| 3506 | } |
| 3507 | |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3508 | // resolveFwdRef modifies v to be the variable's value at the start of its block. |
| 3509 | // v must be a FwdRef op. |
| 3510 | func (s *state) resolveFwdRef(v *ssa.Value) { |
| 3511 | b := v.Block |
| 3512 | name := v.Aux.(*Node) |
| 3513 | v.Aux = nil |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3514 | if b == s.f.Entry { |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3515 | // Live variable at start of function. |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 3516 | if s.canSSA(name) { |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3517 | v.Op = ssa.OpArg |
| 3518 | v.Aux = name |
| 3519 | return |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 3520 | } |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3521 | // Not SSAable. Load it. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3522 | addr := s.decladdrs[name] |
| 3523 | if addr == nil { |
| 3524 | // TODO: closure args reach here. |
David Chase | 32ffbf7 | 2015-10-08 17:14:12 -0400 | [diff] [blame] | 3525 | s.Unimplementedf("unhandled closure arg %s at entry to function %s", name, b.Func.Name) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3526 | } |
| 3527 | if _, ok := addr.Aux.(*ssa.ArgSymbol); !ok { |
| 3528 | s.Fatalf("variable live at start of function %s is not an argument %s", b.Func.Name, name) |
| 3529 | } |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3530 | v.Op = ssa.OpLoad |
| 3531 | v.AddArgs(addr, s.startmem) |
| 3532 | return |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3533 | } |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3534 | if len(b.Preds) == 0 { |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 3535 | // This block is dead; we have no predecessors and we're not the entry block. |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3536 | // It doesn't matter what we use here as long as it is well-formed. |
| 3537 | v.Op = ssa.OpUnknown |
| 3538 | return |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 3539 | } |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3540 | // Find variable value on each predecessor. |
| 3541 | var argstore [4]*ssa.Value |
| 3542 | args := argstore[:0] |
| 3543 | for _, p := range b.Preds { |
| 3544 | args = append(args, s.lookupVarOutgoing(p, v.Type, name, v.Line)) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3545 | } |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3546 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3547 | // Decide if we need a phi or not. We need a phi if there |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3548 | // are two different args (which are both not v). |
| 3549 | var w *ssa.Value |
| 3550 | for _, a := range args { |
| 3551 | if a == v { |
| 3552 | continue // self-reference |
| 3553 | } |
| 3554 | if a == w { |
| 3555 | continue // already have this witness |
| 3556 | } |
| 3557 | if w != nil { |
| 3558 | // two witnesses, need a phi value |
| 3559 | v.Op = ssa.OpPhi |
| 3560 | v.AddArgs(args...) |
| 3561 | return |
| 3562 | } |
| 3563 | w = a // save witness |
| 3564 | } |
| 3565 | if w == nil { |
| 3566 | s.Fatalf("no witness for reachable phi %s", v) |
| 3567 | } |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3568 | // One witness. Make v a copy of w. |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3569 | v.Op = ssa.OpCopy |
| 3570 | v.AddArg(w) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3571 | } |
| 3572 | |
| 3573 | // lookupVarOutgoing finds the variable's value at the end of block b. |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3574 | func (s *state) lookupVarOutgoing(b *ssa.Block, t ssa.Type, name *Node, line int32) *ssa.Value { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3575 | m := s.defvars[b.ID] |
| 3576 | if v, ok := m[name]; ok { |
| 3577 | return v |
| 3578 | } |
| 3579 | // The variable is not defined by b and we haven't |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3580 | // looked it up yet. Generate a FwdRef for the variable and return that. |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3581 | v := b.NewValue0A(line, ssa.OpFwdRef, t, name) |
| 3582 | s.fwdRefs = append(s.fwdRefs, v) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3583 | m[name] = v |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 3584 | s.addNamedValue(name, v) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 3585 | return v |
| 3586 | } |
| 3587 | |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 3588 | func (s *state) addNamedValue(n *Node, v *ssa.Value) { |
| 3589 | if n.Class == Pxxx { |
| 3590 | // Don't track our dummy nodes (&memVar etc.). |
| 3591 | return |
| 3592 | } |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 3593 | if strings.HasPrefix(n.Sym.Name, "autotmp_") { |
| 3594 | // Don't track autotmp_ variables. |
| 3595 | return |
| 3596 | } |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 3597 | if n.Class == PAUTO && (v.Type.IsString() || v.Type.IsSlice() || v.Type.IsInterface()) { |
| 3598 | // TODO: can't handle auto compound objects with pointers yet. |
| 3599 | // The live variable analysis barfs because we don't put VARDEF |
| 3600 | // pseudos in the right place when we spill to these nodes. |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 3601 | return |
| 3602 | } |
Keith Randall | 31d13f4 | 2016-03-08 20:09:48 -0800 | [diff] [blame] | 3603 | if n.Class == PPARAMOUT { |
| 3604 | // Don't track named output values. This prevents return values |
| 3605 | // from being assigned too early. See #14591 and #14762. TODO: allow this. |
| 3606 | return |
| 3607 | } |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 3608 | if n.Class == PAUTO && n.Xoffset != 0 { |
| 3609 | s.Fatalf("AUTO var with offset %s %d", n, n.Xoffset) |
| 3610 | } |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 3611 | loc := ssa.LocalSlot{N: n, Type: n.Type, Off: 0} |
| 3612 | values, ok := s.f.NamedValues[loc] |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 3613 | if !ok { |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 3614 | s.f.Names = append(s.f.Names, loc) |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 3615 | } |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 3616 | s.f.NamedValues[loc] = append(values, v) |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 3617 | } |
| 3618 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3619 | // an unresolved branch |
| 3620 | type branch struct { |
| 3621 | p *obj.Prog // branch instruction |
| 3622 | b *ssa.Block // target |
| 3623 | } |
| 3624 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3625 | type genState struct { |
| 3626 | // branches remembers all the branch instructions we've seen |
| 3627 | // and where they would like to go. |
| 3628 | branches []branch |
| 3629 | |
| 3630 | // bstart remembers where each block starts (indexed by block ID) |
| 3631 | bstart []*obj.Prog |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3632 | } |
| 3633 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3634 | // genssa appends entries to ptxt for each instruction in f. |
| 3635 | // gcargs and gclocals are filled in with pointer maps for the frame. |
| 3636 | func genssa(f *ssa.Func, ptxt *obj.Prog, gcargs, gclocals *Sym) { |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3637 | var s genState |
| 3638 | |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 3639 | e := f.Config.Frontend().(*ssaExport) |
| 3640 | // We're about to emit a bunch of Progs. |
| 3641 | // Since the only way to get here is to explicitly request it, |
| 3642 | // just fail on unimplemented instead of trying to unwind our mess. |
| 3643 | e.mustImplement = true |
| 3644 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3645 | // Remember where each block starts. |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3646 | s.bstart = make([]*obj.Prog, f.NumBlocks()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3647 | |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 3648 | var valueProgs map[*obj.Prog]*ssa.Value |
| 3649 | var blockProgs map[*obj.Prog]*ssa.Block |
| 3650 | const logProgs = true |
| 3651 | if logProgs { |
| 3652 | valueProgs = make(map[*obj.Prog]*ssa.Value, f.NumValues()) |
| 3653 | blockProgs = make(map[*obj.Prog]*ssa.Block, f.NumBlocks()) |
| 3654 | f.Logf("genssa %s\n", f.Name) |
| 3655 | blockProgs[Pc] = f.Blocks[0] |
| 3656 | } |
| 3657 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3658 | // Emit basic blocks |
| 3659 | for i, b := range f.Blocks { |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3660 | s.bstart[b.ID] = Pc |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3661 | // Emit values in block |
Keith Randall | 7b77394 | 2016-01-22 13:44:58 -0800 | [diff] [blame] | 3662 | s.markMoves(b) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3663 | for _, v := range b.Values { |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 3664 | x := Pc |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3665 | s.genValue(v) |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 3666 | if logProgs { |
| 3667 | for ; x != Pc; x = x.Link { |
| 3668 | valueProgs[x] = v |
| 3669 | } |
| 3670 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3671 | } |
| 3672 | // Emit control flow instructions for block |
| 3673 | var next *ssa.Block |
Keith Randall | 91f69c6 | 2016-02-26 16:32:01 -0800 | [diff] [blame] | 3674 | if i < len(f.Blocks)-1 && (Debug['N'] == 0 || b.Kind == ssa.BlockCall) { |
Keith Randall | 8906d2a | 2016-02-22 23:19:00 -0800 | [diff] [blame] | 3675 | // If -N, leave next==nil so every block with successors |
Keith Randall | 91f69c6 | 2016-02-26 16:32:01 -0800 | [diff] [blame] | 3676 | // ends in a JMP (except call blocks - plive doesn't like |
| 3677 | // select{send,recv} followed by a JMP call). Helps keep |
| 3678 | // line numbers for otherwise empty blocks. |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3679 | next = f.Blocks[i+1] |
| 3680 | } |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 3681 | x := Pc |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3682 | s.genBlock(b, next) |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 3683 | if logProgs { |
| 3684 | for ; x != Pc; x = x.Link { |
| 3685 | blockProgs[x] = b |
| 3686 | } |
| 3687 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3688 | } |
| 3689 | |
| 3690 | // Resolve branches |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3691 | for _, br := range s.branches { |
| 3692 | br.p.To.Val = s.bstart[br.b.ID] |
| 3693 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3694 | |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 3695 | if logProgs { |
| 3696 | for p := ptxt; p != nil; p = p.Link { |
| 3697 | var s string |
| 3698 | if v, ok := valueProgs[p]; ok { |
| 3699 | s = v.String() |
| 3700 | } else if b, ok := blockProgs[p]; ok { |
| 3701 | s = b.String() |
| 3702 | } else { |
| 3703 | s = " " // most value and branch strings are 2-3 characters long |
| 3704 | } |
| 3705 | f.Logf("%s\t%s\n", s, p) |
| 3706 | } |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 3707 | if f.Config.HTML != nil { |
| 3708 | saved := ptxt.Ctxt.LineHist.PrintFilenameOnly |
| 3709 | ptxt.Ctxt.LineHist.PrintFilenameOnly = true |
| 3710 | var buf bytes.Buffer |
| 3711 | buf.WriteString("<code>") |
| 3712 | buf.WriteString("<dl class=\"ssa-gen\">") |
| 3713 | for p := ptxt; p != nil; p = p.Link { |
| 3714 | buf.WriteString("<dt class=\"ssa-prog-src\">") |
| 3715 | if v, ok := valueProgs[p]; ok { |
| 3716 | buf.WriteString(v.HTML()) |
| 3717 | } else if b, ok := blockProgs[p]; ok { |
| 3718 | buf.WriteString(b.HTML()) |
| 3719 | } |
| 3720 | buf.WriteString("</dt>") |
| 3721 | buf.WriteString("<dd class=\"ssa-prog\">") |
| 3722 | buf.WriteString(html.EscapeString(p.String())) |
| 3723 | buf.WriteString("</dd>") |
| 3724 | buf.WriteString("</li>") |
| 3725 | } |
| 3726 | buf.WriteString("</dl>") |
| 3727 | buf.WriteString("</code>") |
| 3728 | f.Config.HTML.WriteColumn("genssa", buf.String()) |
| 3729 | ptxt.Ctxt.LineHist.PrintFilenameOnly = saved |
| 3730 | } |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 3731 | } |
| 3732 | |
Josh Bleecher Snyder | 6b41665 | 2015-07-28 10:56:39 -0700 | [diff] [blame] | 3733 | // Emit static data |
| 3734 | if f.StaticData != nil { |
| 3735 | for _, n := range f.StaticData.([]*Node) { |
| 3736 | if !gen_as_init(n, false) { |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 3737 | Fatalf("non-static data marked as static: %v\n\n", n, f) |
Josh Bleecher Snyder | 6b41665 | 2015-07-28 10:56:39 -0700 | [diff] [blame] | 3738 | } |
| 3739 | } |
| 3740 | } |
| 3741 | |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3742 | // Allocate stack frame |
| 3743 | allocauto(ptxt) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3744 | |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3745 | // Generate gc bitmaps. |
| 3746 | liveness(Curfn, ptxt, gcargs, gclocals) |
| 3747 | gcsymdup(gcargs) |
| 3748 | gcsymdup(gclocals) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3749 | |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 3750 | // Add frame prologue. Zero ambiguously live variables. |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3751 | Thearch.Defframe(ptxt) |
| 3752 | if Debug['f'] != 0 { |
| 3753 | frame(0) |
| 3754 | } |
| 3755 | |
| 3756 | // Remove leftover instrumentation from the instruction stream. |
| 3757 | removevardef(ptxt) |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 3758 | |
| 3759 | f.Config.HTML.Close() |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3760 | } |
| 3761 | |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3762 | // opregreg emits instructions for |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3763 | // dest := dest(To) op src(From) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3764 | // and also returns the created obj.Prog so it |
| 3765 | // may be further adjusted (offset, scale, etc). |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 3766 | func opregreg(op obj.As, dest, src int16) *obj.Prog { |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3767 | p := Prog(op) |
| 3768 | p.From.Type = obj.TYPE_REG |
| 3769 | p.To.Type = obj.TYPE_REG |
| 3770 | p.To.Reg = dest |
| 3771 | p.From.Reg = src |
| 3772 | return p |
| 3773 | } |
| 3774 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3775 | func (s *genState) genValue(v *ssa.Value) { |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 3776 | lineno = v.Line |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3777 | switch v.Op { |
Keith Randall | a0da2d2 | 2016-02-04 15:08:47 -0800 | [diff] [blame] | 3778 | case ssa.OpAMD64ADDQ, ssa.OpAMD64ADDL, ssa.OpAMD64ADDW: |
| 3779 | r := regnum(v) |
| 3780 | r1 := regnum(v.Args[0]) |
| 3781 | r2 := regnum(v.Args[1]) |
| 3782 | switch { |
| 3783 | case r == r1: |
| 3784 | p := Prog(v.Op.Asm()) |
| 3785 | p.From.Type = obj.TYPE_REG |
| 3786 | p.From.Reg = r2 |
| 3787 | p.To.Type = obj.TYPE_REG |
| 3788 | p.To.Reg = r |
| 3789 | case r == r2: |
| 3790 | p := Prog(v.Op.Asm()) |
| 3791 | p.From.Type = obj.TYPE_REG |
| 3792 | p.From.Reg = r1 |
| 3793 | p.To.Type = obj.TYPE_REG |
| 3794 | p.To.Reg = r |
| 3795 | default: |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 3796 | var asm obj.As |
Keith Randall | a0da2d2 | 2016-02-04 15:08:47 -0800 | [diff] [blame] | 3797 | switch v.Op { |
| 3798 | case ssa.OpAMD64ADDQ: |
| 3799 | asm = x86.ALEAQ |
| 3800 | case ssa.OpAMD64ADDL: |
| 3801 | asm = x86.ALEAL |
| 3802 | case ssa.OpAMD64ADDW: |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 3803 | asm = x86.ALEAL |
Keith Randall | a0da2d2 | 2016-02-04 15:08:47 -0800 | [diff] [blame] | 3804 | } |
| 3805 | p := Prog(asm) |
| 3806 | p.From.Type = obj.TYPE_MEM |
| 3807 | p.From.Reg = r1 |
| 3808 | p.From.Scale = 1 |
| 3809 | p.From.Index = r2 |
| 3810 | p.To.Type = obj.TYPE_REG |
| 3811 | p.To.Reg = r |
| 3812 | } |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3813 | // 2-address opcode arithmetic, symmetric |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3814 | case ssa.OpAMD64ADDB, ssa.OpAMD64ADDSS, ssa.OpAMD64ADDSD, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 3815 | ssa.OpAMD64ANDQ, ssa.OpAMD64ANDL, ssa.OpAMD64ANDW, ssa.OpAMD64ANDB, |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3816 | ssa.OpAMD64ORQ, ssa.OpAMD64ORL, ssa.OpAMD64ORW, ssa.OpAMD64ORB, |
| 3817 | ssa.OpAMD64XORQ, ssa.OpAMD64XORL, ssa.OpAMD64XORW, ssa.OpAMD64XORB, |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3818 | ssa.OpAMD64MULQ, ssa.OpAMD64MULL, ssa.OpAMD64MULW, ssa.OpAMD64MULB, |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 3819 | ssa.OpAMD64MULSS, ssa.OpAMD64MULSD, ssa.OpAMD64PXOR: |
Michael Matloob | 73054f5 | 2015-06-14 11:38:46 -0700 | [diff] [blame] | 3820 | r := regnum(v) |
| 3821 | x := regnum(v.Args[0]) |
| 3822 | y := regnum(v.Args[1]) |
| 3823 | if x != r && y != r { |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 3824 | opregreg(moveByType(v.Type), r, x) |
Michael Matloob | 73054f5 | 2015-06-14 11:38:46 -0700 | [diff] [blame] | 3825 | x = r |
| 3826 | } |
| 3827 | p := Prog(v.Op.Asm()) |
| 3828 | p.From.Type = obj.TYPE_REG |
| 3829 | p.To.Type = obj.TYPE_REG |
| 3830 | p.To.Reg = r |
| 3831 | if x == r { |
| 3832 | p.From.Reg = y |
| 3833 | } else { |
| 3834 | p.From.Reg = x |
| 3835 | } |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3836 | // 2-address opcode arithmetic, not symmetric |
| 3837 | case ssa.OpAMD64SUBQ, ssa.OpAMD64SUBL, ssa.OpAMD64SUBW, ssa.OpAMD64SUBB: |
Keith Randall | be1eb57 | 2015-07-22 13:46:15 -0700 | [diff] [blame] | 3838 | r := regnum(v) |
| 3839 | x := regnum(v.Args[0]) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3840 | y := regnum(v.Args[1]) |
| 3841 | var neg bool |
| 3842 | if y == r { |
| 3843 | // compute -(y-x) instead |
| 3844 | x, y = y, x |
| 3845 | neg = true |
Keith Randall | be1eb57 | 2015-07-22 13:46:15 -0700 | [diff] [blame] | 3846 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3847 | if x != r { |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 3848 | opregreg(moveByType(v.Type), r, x) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3849 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3850 | opregreg(v.Op.Asm(), r, y) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3851 | |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3852 | if neg { |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 3853 | if v.Op == ssa.OpAMD64SUBQ { |
| 3854 | p := Prog(x86.ANEGQ) |
| 3855 | p.To.Type = obj.TYPE_REG |
| 3856 | p.To.Reg = r |
| 3857 | } else { // Avoids partial registers write |
| 3858 | p := Prog(x86.ANEGL) |
| 3859 | p.To.Type = obj.TYPE_REG |
| 3860 | p.To.Reg = r |
| 3861 | } |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3862 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3863 | case ssa.OpAMD64SUBSS, ssa.OpAMD64SUBSD, ssa.OpAMD64DIVSS, ssa.OpAMD64DIVSD: |
| 3864 | r := regnum(v) |
| 3865 | x := regnum(v.Args[0]) |
| 3866 | y := regnum(v.Args[1]) |
| 3867 | if y == r && x != r { |
| 3868 | // r/y := x op r/y, need to preserve x and rewrite to |
| 3869 | // r/y := r/y op x15 |
| 3870 | x15 := int16(x86.REG_X15) |
| 3871 | // register move y to x15 |
| 3872 | // register move x to y |
| 3873 | // rename y with x15 |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 3874 | opregreg(moveByType(v.Type), x15, y) |
| 3875 | opregreg(moveByType(v.Type), r, x) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3876 | y = x15 |
| 3877 | } else if x != r { |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 3878 | opregreg(moveByType(v.Type), r, x) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3879 | } |
| 3880 | opregreg(v.Op.Asm(), r, y) |
| 3881 | |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3882 | case ssa.OpAMD64DIVQ, ssa.OpAMD64DIVL, ssa.OpAMD64DIVW, |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 3883 | ssa.OpAMD64DIVQU, ssa.OpAMD64DIVLU, ssa.OpAMD64DIVWU, |
| 3884 | ssa.OpAMD64MODQ, ssa.OpAMD64MODL, ssa.OpAMD64MODW, |
| 3885 | ssa.OpAMD64MODQU, ssa.OpAMD64MODLU, ssa.OpAMD64MODWU: |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3886 | |
| 3887 | // Arg[0] is already in AX as it's the only register we allow |
| 3888 | // and AX is the only output |
| 3889 | x := regnum(v.Args[1]) |
| 3890 | |
| 3891 | // CPU faults upon signed overflow, which occurs when most |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 3892 | // negative int is divided by -1. |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3893 | var j *obj.Prog |
| 3894 | if v.Op == ssa.OpAMD64DIVQ || v.Op == ssa.OpAMD64DIVL || |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 3895 | v.Op == ssa.OpAMD64DIVW || v.Op == ssa.OpAMD64MODQ || |
| 3896 | v.Op == ssa.OpAMD64MODL || v.Op == ssa.OpAMD64MODW { |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3897 | |
| 3898 | var c *obj.Prog |
| 3899 | switch v.Op { |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 3900 | case ssa.OpAMD64DIVQ, ssa.OpAMD64MODQ: |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3901 | c = Prog(x86.ACMPQ) |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 3902 | j = Prog(x86.AJEQ) |
| 3903 | // go ahead and sign extend to save doing it later |
| 3904 | Prog(x86.ACQO) |
| 3905 | |
| 3906 | case ssa.OpAMD64DIVL, ssa.OpAMD64MODL: |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3907 | c = Prog(x86.ACMPL) |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 3908 | j = Prog(x86.AJEQ) |
| 3909 | Prog(x86.ACDQ) |
| 3910 | |
| 3911 | case ssa.OpAMD64DIVW, ssa.OpAMD64MODW: |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3912 | c = Prog(x86.ACMPW) |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 3913 | j = Prog(x86.AJEQ) |
| 3914 | Prog(x86.ACWD) |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3915 | } |
| 3916 | c.From.Type = obj.TYPE_REG |
| 3917 | c.From.Reg = x |
| 3918 | c.To.Type = obj.TYPE_CONST |
| 3919 | c.To.Offset = -1 |
| 3920 | |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3921 | j.To.Type = obj.TYPE_BRANCH |
| 3922 | |
| 3923 | } |
| 3924 | |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 3925 | // for unsigned ints, we sign extend by setting DX = 0 |
| 3926 | // signed ints were sign extended above |
| 3927 | if v.Op == ssa.OpAMD64DIVQU || v.Op == ssa.OpAMD64MODQU || |
| 3928 | v.Op == ssa.OpAMD64DIVLU || v.Op == ssa.OpAMD64MODLU || |
| 3929 | v.Op == ssa.OpAMD64DIVWU || v.Op == ssa.OpAMD64MODWU { |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3930 | c := Prog(x86.AXORQ) |
| 3931 | c.From.Type = obj.TYPE_REG |
| 3932 | c.From.Reg = x86.REG_DX |
| 3933 | c.To.Type = obj.TYPE_REG |
| 3934 | c.To.Reg = x86.REG_DX |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3935 | } |
| 3936 | |
| 3937 | p := Prog(v.Op.Asm()) |
| 3938 | p.From.Type = obj.TYPE_REG |
| 3939 | p.From.Reg = x |
| 3940 | |
| 3941 | // signed division, rest of the check for -1 case |
| 3942 | if j != nil { |
| 3943 | j2 := Prog(obj.AJMP) |
| 3944 | j2.To.Type = obj.TYPE_BRANCH |
| 3945 | |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 3946 | var n *obj.Prog |
| 3947 | if v.Op == ssa.OpAMD64DIVQ || v.Op == ssa.OpAMD64DIVL || |
| 3948 | v.Op == ssa.OpAMD64DIVW { |
| 3949 | // n * -1 = -n |
| 3950 | n = Prog(x86.ANEGQ) |
| 3951 | n.To.Type = obj.TYPE_REG |
| 3952 | n.To.Reg = x86.REG_AX |
| 3953 | } else { |
| 3954 | // n % -1 == 0 |
| 3955 | n = Prog(x86.AXORQ) |
| 3956 | n.From.Type = obj.TYPE_REG |
| 3957 | n.From.Reg = x86.REG_DX |
| 3958 | n.To.Type = obj.TYPE_REG |
| 3959 | n.To.Reg = x86.REG_DX |
| 3960 | } |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 3961 | |
| 3962 | j.To.Val = n |
| 3963 | j2.To.Val = Pc |
| 3964 | } |
| 3965 | |
Keith Randall | a3055af | 2016-02-05 20:26:18 -0800 | [diff] [blame] | 3966 | case ssa.OpAMD64HMULQ, ssa.OpAMD64HMULL, ssa.OpAMD64HMULW, ssa.OpAMD64HMULB, |
| 3967 | ssa.OpAMD64HMULQU, ssa.OpAMD64HMULLU, ssa.OpAMD64HMULWU, ssa.OpAMD64HMULBU: |
Todd Neal | 67cbd5b | 2015-08-18 19:14:47 -0500 | [diff] [blame] | 3968 | // the frontend rewrites constant division by 8/16/32 bit integers into |
| 3969 | // HMUL by a constant |
Keith Randall | a3055af | 2016-02-05 20:26:18 -0800 | [diff] [blame] | 3970 | // SSA rewrites generate the 64 bit versions |
Todd Neal | 67cbd5b | 2015-08-18 19:14:47 -0500 | [diff] [blame] | 3971 | |
| 3972 | // Arg[0] is already in AX as it's the only register we allow |
| 3973 | // and DX is the only output we care about (the high bits) |
| 3974 | p := Prog(v.Op.Asm()) |
| 3975 | p.From.Type = obj.TYPE_REG |
| 3976 | p.From.Reg = regnum(v.Args[1]) |
| 3977 | |
| 3978 | // IMULB puts the high portion in AH instead of DL, |
| 3979 | // so move it to DL for consistency |
| 3980 | if v.Type.Size() == 1 { |
| 3981 | m := Prog(x86.AMOVB) |
| 3982 | m.From.Type = obj.TYPE_REG |
| 3983 | m.From.Reg = x86.REG_AH |
| 3984 | m.To.Type = obj.TYPE_REG |
| 3985 | m.To.Reg = x86.REG_DX |
| 3986 | } |
| 3987 | |
Keith Randall | a3055af | 2016-02-05 20:26:18 -0800 | [diff] [blame] | 3988 | case ssa.OpAMD64AVGQU: |
| 3989 | // compute (x+y)/2 unsigned. |
| 3990 | // Do a 64-bit add, the overflow goes into the carry. |
| 3991 | // Shift right once and pull the carry back into the 63rd bit. |
| 3992 | r := regnum(v) |
| 3993 | x := regnum(v.Args[0]) |
| 3994 | y := regnum(v.Args[1]) |
| 3995 | if x != r && y != r { |
| 3996 | opregreg(moveByType(v.Type), r, x) |
| 3997 | x = r |
| 3998 | } |
| 3999 | p := Prog(x86.AADDQ) |
| 4000 | p.From.Type = obj.TYPE_REG |
| 4001 | p.To.Type = obj.TYPE_REG |
| 4002 | p.To.Reg = r |
| 4003 | if x == r { |
| 4004 | p.From.Reg = y |
| 4005 | } else { |
| 4006 | p.From.Reg = x |
| 4007 | } |
| 4008 | p = Prog(x86.ARCRQ) |
| 4009 | p.From.Type = obj.TYPE_CONST |
| 4010 | p.From.Offset = 1 |
| 4011 | p.To.Type = obj.TYPE_REG |
| 4012 | p.To.Reg = r |
| 4013 | |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4014 | case ssa.OpAMD64SHLQ, ssa.OpAMD64SHLL, ssa.OpAMD64SHLW, ssa.OpAMD64SHLB, |
| 4015 | ssa.OpAMD64SHRQ, ssa.OpAMD64SHRL, ssa.OpAMD64SHRW, ssa.OpAMD64SHRB, |
| 4016 | ssa.OpAMD64SARQ, ssa.OpAMD64SARL, ssa.OpAMD64SARW, ssa.OpAMD64SARB: |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 4017 | x := regnum(v.Args[0]) |
| 4018 | r := regnum(v) |
| 4019 | if x != r { |
| 4020 | if r == x86.REG_CX { |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 4021 | v.Fatalf("can't implement %s, target and shift both in CX", v.LongString()) |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 4022 | } |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 4023 | p := Prog(moveByType(v.Type)) |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 4024 | p.From.Type = obj.TYPE_REG |
| 4025 | p.From.Reg = x |
| 4026 | p.To.Type = obj.TYPE_REG |
| 4027 | p.To.Reg = r |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 4028 | } |
Michael Matloob | 703ef06 | 2015-06-16 11:11:16 -0700 | [diff] [blame] | 4029 | p := Prog(v.Op.Asm()) |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 4030 | p.From.Type = obj.TYPE_REG |
| 4031 | p.From.Reg = regnum(v.Args[1]) // should be CX |
| 4032 | p.To.Type = obj.TYPE_REG |
| 4033 | p.To.Reg = r |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4034 | case ssa.OpAMD64ADDQconst, ssa.OpAMD64ADDLconst, ssa.OpAMD64ADDWconst: |
Keith Randall | a0da2d2 | 2016-02-04 15:08:47 -0800 | [diff] [blame] | 4035 | r := regnum(v) |
| 4036 | a := regnum(v.Args[0]) |
| 4037 | if r == a { |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4038 | if v.AuxInt2Int64() == 1 { |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 4039 | var asm obj.As |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4040 | switch v.Op { |
| 4041 | // Software optimization manual recommends add $1,reg. |
| 4042 | // But inc/dec is 1 byte smaller. ICC always uses inc |
| 4043 | // Clang/GCC choose depending on flags, but prefer add. |
| 4044 | // Experiments show that inc/dec is both a little faster |
| 4045 | // and make a binary a little smaller. |
| 4046 | case ssa.OpAMD64ADDQconst: |
| 4047 | asm = x86.AINCQ |
| 4048 | case ssa.OpAMD64ADDLconst: |
| 4049 | asm = x86.AINCL |
| 4050 | case ssa.OpAMD64ADDWconst: |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 4051 | asm = x86.AINCL |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4052 | } |
| 4053 | p := Prog(asm) |
| 4054 | p.To.Type = obj.TYPE_REG |
| 4055 | p.To.Reg = r |
| 4056 | return |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4057 | } else if v.AuxInt2Int64() == -1 { |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 4058 | var asm obj.As |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4059 | switch v.Op { |
| 4060 | case ssa.OpAMD64ADDQconst: |
| 4061 | asm = x86.ADECQ |
| 4062 | case ssa.OpAMD64ADDLconst: |
| 4063 | asm = x86.ADECL |
| 4064 | case ssa.OpAMD64ADDWconst: |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 4065 | asm = x86.ADECL |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4066 | } |
| 4067 | p := Prog(asm) |
| 4068 | p.To.Type = obj.TYPE_REG |
| 4069 | p.To.Reg = r |
| 4070 | return |
| 4071 | } else { |
| 4072 | p := Prog(v.Op.Asm()) |
| 4073 | p.From.Type = obj.TYPE_CONST |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4074 | p.From.Offset = v.AuxInt2Int64() |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4075 | p.To.Type = obj.TYPE_REG |
| 4076 | p.To.Reg = r |
| 4077 | return |
| 4078 | } |
Keith Randall | a0da2d2 | 2016-02-04 15:08:47 -0800 | [diff] [blame] | 4079 | } |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 4080 | var asm obj.As |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4081 | switch v.Op { |
| 4082 | case ssa.OpAMD64ADDQconst: |
| 4083 | asm = x86.ALEAQ |
| 4084 | case ssa.OpAMD64ADDLconst: |
| 4085 | asm = x86.ALEAL |
| 4086 | case ssa.OpAMD64ADDWconst: |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 4087 | asm = x86.ALEAL |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4088 | } |
| 4089 | p := Prog(asm) |
| 4090 | p.From.Type = obj.TYPE_MEM |
Keith Randall | a0da2d2 | 2016-02-04 15:08:47 -0800 | [diff] [blame] | 4091 | p.From.Reg = a |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4092 | p.From.Offset = v.AuxInt2Int64() |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4093 | p.To.Type = obj.TYPE_REG |
Keith Randall | a0da2d2 | 2016-02-04 15:08:47 -0800 | [diff] [blame] | 4094 | p.To.Reg = r |
Alexandru Moșoi | 7a6de6d | 2015-08-14 13:23:11 +0200 | [diff] [blame] | 4095 | case ssa.OpAMD64MULQconst, ssa.OpAMD64MULLconst, ssa.OpAMD64MULWconst, ssa.OpAMD64MULBconst: |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4096 | r := regnum(v) |
| 4097 | x := regnum(v.Args[0]) |
| 4098 | if r != x { |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 4099 | p := Prog(moveByType(v.Type)) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4100 | p.From.Type = obj.TYPE_REG |
| 4101 | p.From.Reg = x |
| 4102 | p.To.Type = obj.TYPE_REG |
| 4103 | p.To.Reg = r |
| 4104 | } |
| 4105 | p := Prog(v.Op.Asm()) |
| 4106 | p.From.Type = obj.TYPE_CONST |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4107 | p.From.Offset = v.AuxInt2Int64() |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4108 | p.To.Type = obj.TYPE_REG |
| 4109 | p.To.Reg = r |
| 4110 | // TODO: Teach doasm to compile the three-address multiply imul $c, r1, r2 |
| 4111 | // instead of using the MOVQ above. |
| 4112 | //p.From3 = new(obj.Addr) |
| 4113 | //p.From3.Type = obj.TYPE_REG |
| 4114 | //p.From3.Reg = regnum(v.Args[0]) |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4115 | case ssa.OpAMD64SUBQconst, ssa.OpAMD64SUBLconst, ssa.OpAMD64SUBWconst: |
| 4116 | x := regnum(v.Args[0]) |
| 4117 | r := regnum(v) |
| 4118 | // We have 3-op add (lea), so transforming a = b - const into |
| 4119 | // a = b + (- const), saves us 1 instruction. We can't fit |
| 4120 | // - (-1 << 31) into 4 bytes offset in lea. |
| 4121 | // We handle 2-address just fine below. |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4122 | if v.AuxInt2Int64() == -1<<31 || x == r { |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4123 | if x != r { |
| 4124 | // This code compensates for the fact that the register allocator |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 4125 | // doesn't understand 2-address instructions yet. TODO: fix that. |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4126 | p := Prog(moveByType(v.Type)) |
| 4127 | p.From.Type = obj.TYPE_REG |
| 4128 | p.From.Reg = x |
| 4129 | p.To.Type = obj.TYPE_REG |
| 4130 | p.To.Reg = r |
| 4131 | } |
| 4132 | p := Prog(v.Op.Asm()) |
| 4133 | p.From.Type = obj.TYPE_CONST |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4134 | p.From.Offset = v.AuxInt2Int64() |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4135 | p.To.Type = obj.TYPE_REG |
| 4136 | p.To.Reg = r |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4137 | } else if x == r && v.AuxInt2Int64() == -1 { |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 4138 | var asm obj.As |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4139 | // x = x - (-1) is the same as x++ |
| 4140 | // See OpAMD64ADDQconst comments about inc vs add $1,reg |
| 4141 | switch v.Op { |
| 4142 | case ssa.OpAMD64SUBQconst: |
| 4143 | asm = x86.AINCQ |
| 4144 | case ssa.OpAMD64SUBLconst: |
| 4145 | asm = x86.AINCL |
| 4146 | case ssa.OpAMD64SUBWconst: |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 4147 | asm = x86.AINCL |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4148 | } |
| 4149 | p := Prog(asm) |
| 4150 | p.To.Type = obj.TYPE_REG |
| 4151 | p.To.Reg = r |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4152 | } else if x == r && v.AuxInt2Int64() == 1 { |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 4153 | var asm obj.As |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4154 | switch v.Op { |
| 4155 | case ssa.OpAMD64SUBQconst: |
| 4156 | asm = x86.ADECQ |
| 4157 | case ssa.OpAMD64SUBLconst: |
| 4158 | asm = x86.ADECL |
| 4159 | case ssa.OpAMD64SUBWconst: |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 4160 | asm = x86.ADECL |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4161 | } |
| 4162 | p := Prog(asm) |
| 4163 | p.To.Type = obj.TYPE_REG |
| 4164 | p.To.Reg = r |
| 4165 | } else { |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 4166 | var asm obj.As |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4167 | switch v.Op { |
| 4168 | case ssa.OpAMD64SUBQconst: |
| 4169 | asm = x86.ALEAQ |
| 4170 | case ssa.OpAMD64SUBLconst: |
| 4171 | asm = x86.ALEAL |
| 4172 | case ssa.OpAMD64SUBWconst: |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 4173 | asm = x86.ALEAL |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4174 | } |
| 4175 | p := Prog(asm) |
| 4176 | p.From.Type = obj.TYPE_MEM |
| 4177 | p.From.Reg = x |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4178 | p.From.Offset = -v.AuxInt2Int64() |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4179 | p.To.Type = obj.TYPE_REG |
| 4180 | p.To.Reg = r |
| 4181 | } |
| 4182 | |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4183 | case ssa.OpAMD64ADDBconst, |
| 4184 | ssa.OpAMD64ANDQconst, ssa.OpAMD64ANDLconst, ssa.OpAMD64ANDWconst, ssa.OpAMD64ANDBconst, |
| 4185 | ssa.OpAMD64ORQconst, ssa.OpAMD64ORLconst, ssa.OpAMD64ORWconst, ssa.OpAMD64ORBconst, |
| 4186 | ssa.OpAMD64XORQconst, ssa.OpAMD64XORLconst, ssa.OpAMD64XORWconst, ssa.OpAMD64XORBconst, |
Ilya Tocar | e93410d | 2016-02-05 19:24:53 +0300 | [diff] [blame] | 4187 | ssa.OpAMD64SUBBconst, ssa.OpAMD64SHLQconst, ssa.OpAMD64SHLLconst, ssa.OpAMD64SHLWconst, |
| 4188 | ssa.OpAMD64SHLBconst, ssa.OpAMD64SHRQconst, ssa.OpAMD64SHRLconst, ssa.OpAMD64SHRWconst, |
| 4189 | ssa.OpAMD64SHRBconst, ssa.OpAMD64SARQconst, ssa.OpAMD64SARLconst, ssa.OpAMD64SARWconst, |
| 4190 | ssa.OpAMD64SARBconst, ssa.OpAMD64ROLQconst, ssa.OpAMD64ROLLconst, ssa.OpAMD64ROLWconst, |
| 4191 | ssa.OpAMD64ROLBconst: |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4192 | // This code compensates for the fact that the register allocator |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 4193 | // doesn't understand 2-address instructions yet. TODO: fix that. |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4194 | x := regnum(v.Args[0]) |
| 4195 | r := regnum(v) |
| 4196 | if x != r { |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 4197 | p := Prog(moveByType(v.Type)) |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4198 | p.From.Type = obj.TYPE_REG |
| 4199 | p.From.Reg = x |
| 4200 | p.To.Type = obj.TYPE_REG |
| 4201 | p.To.Reg = r |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4202 | } |
Michael Matloob | 703ef06 | 2015-06-16 11:11:16 -0700 | [diff] [blame] | 4203 | p := Prog(v.Op.Asm()) |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4204 | p.From.Type = obj.TYPE_CONST |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4205 | p.From.Offset = v.AuxInt2Int64() |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4206 | p.To.Type = obj.TYPE_REG |
Keith Randall | dbd83c4 | 2015-06-28 06:08:50 -0700 | [diff] [blame] | 4207 | p.To.Reg = r |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 4208 | case ssa.OpAMD64SBBQcarrymask, ssa.OpAMD64SBBLcarrymask: |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 4209 | r := regnum(v) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4210 | p := Prog(v.Op.Asm()) |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 4211 | p.From.Type = obj.TYPE_REG |
| 4212 | p.From.Reg = r |
| 4213 | p.To.Type = obj.TYPE_REG |
| 4214 | p.To.Reg = r |
Todd Neal | d90e048 | 2015-07-23 20:01:40 -0500 | [diff] [blame] | 4215 | case ssa.OpAMD64LEAQ1, ssa.OpAMD64LEAQ2, ssa.OpAMD64LEAQ4, ssa.OpAMD64LEAQ8: |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4216 | p := Prog(x86.ALEAQ) |
| 4217 | p.From.Type = obj.TYPE_MEM |
| 4218 | p.From.Reg = regnum(v.Args[0]) |
Todd Neal | d90e048 | 2015-07-23 20:01:40 -0500 | [diff] [blame] | 4219 | switch v.Op { |
| 4220 | case ssa.OpAMD64LEAQ1: |
| 4221 | p.From.Scale = 1 |
| 4222 | case ssa.OpAMD64LEAQ2: |
| 4223 | p.From.Scale = 2 |
| 4224 | case ssa.OpAMD64LEAQ4: |
| 4225 | p.From.Scale = 4 |
| 4226 | case ssa.OpAMD64LEAQ8: |
| 4227 | p.From.Scale = 8 |
| 4228 | } |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4229 | p.From.Index = regnum(v.Args[1]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4230 | addAux(&p.From, v) |
| 4231 | p.To.Type = obj.TYPE_REG |
| 4232 | p.To.Reg = regnum(v) |
| 4233 | case ssa.OpAMD64LEAQ: |
| 4234 | p := Prog(x86.ALEAQ) |
| 4235 | p.From.Type = obj.TYPE_MEM |
| 4236 | p.From.Reg = regnum(v.Args[0]) |
| 4237 | addAux(&p.From, v) |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4238 | p.To.Type = obj.TYPE_REG |
| 4239 | p.To.Reg = regnum(v) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4240 | case ssa.OpAMD64CMPQ, ssa.OpAMD64CMPL, ssa.OpAMD64CMPW, ssa.OpAMD64CMPB, |
| 4241 | ssa.OpAMD64TESTQ, ssa.OpAMD64TESTL, ssa.OpAMD64TESTW, ssa.OpAMD64TESTB: |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4242 | opregreg(v.Op.Asm(), regnum(v.Args[1]), regnum(v.Args[0])) |
| 4243 | case ssa.OpAMD64UCOMISS, ssa.OpAMD64UCOMISD: |
| 4244 | // Go assembler has swapped operands for UCOMISx relative to CMP, |
| 4245 | // must account for that right here. |
| 4246 | opregreg(v.Op.Asm(), regnum(v.Args[0]), regnum(v.Args[1])) |
Keith Randall | 1cc5789 | 2016-01-30 11:25:38 -0800 | [diff] [blame] | 4247 | case ssa.OpAMD64CMPQconst, ssa.OpAMD64CMPLconst, ssa.OpAMD64CMPWconst, ssa.OpAMD64CMPBconst: |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4248 | p := Prog(v.Op.Asm()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4249 | p.From.Type = obj.TYPE_REG |
| 4250 | p.From.Reg = regnum(v.Args[0]) |
| 4251 | p.To.Type = obj.TYPE_CONST |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4252 | p.To.Offset = v.AuxInt2Int64() |
Keith Randall | 1cc5789 | 2016-01-30 11:25:38 -0800 | [diff] [blame] | 4253 | case ssa.OpAMD64TESTQconst, ssa.OpAMD64TESTLconst, ssa.OpAMD64TESTWconst, ssa.OpAMD64TESTBconst: |
| 4254 | p := Prog(v.Op.Asm()) |
| 4255 | p.From.Type = obj.TYPE_CONST |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4256 | p.From.Offset = v.AuxInt2Int64() |
Keith Randall | 1cc5789 | 2016-01-30 11:25:38 -0800 | [diff] [blame] | 4257 | p.To.Type = obj.TYPE_REG |
| 4258 | p.To.Reg = regnum(v.Args[0]) |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 4259 | case ssa.OpAMD64MOVBconst, ssa.OpAMD64MOVWconst, ssa.OpAMD64MOVLconst, ssa.OpAMD64MOVQconst: |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4260 | x := regnum(v) |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 4261 | p := Prog(v.Op.Asm()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4262 | p.From.Type = obj.TYPE_CONST |
Todd Neal | c17b6b4 | 2016-02-19 16:58:21 -0600 | [diff] [blame] | 4263 | p.From.Offset = v.AuxInt2Int64() |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4264 | p.To.Type = obj.TYPE_REG |
| 4265 | p.To.Reg = x |
Keith Randall | 7b77394 | 2016-01-22 13:44:58 -0800 | [diff] [blame] | 4266 | // If flags are live at this instruction, suppress the |
| 4267 | // MOV $0,AX -> XOR AX,AX optimization. |
| 4268 | if v.Aux != nil { |
| 4269 | p.Mark |= x86.PRESERVEFLAGS |
| 4270 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 4271 | case ssa.OpAMD64MOVSSconst, ssa.OpAMD64MOVSDconst: |
| 4272 | x := regnum(v) |
| 4273 | p := Prog(v.Op.Asm()) |
| 4274 | p.From.Type = obj.TYPE_FCONST |
Todd Neal | 19447a6 | 2015-09-04 06:33:56 -0500 | [diff] [blame] | 4275 | p.From.Val = math.Float64frombits(uint64(v.AuxInt)) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 4276 | p.To.Type = obj.TYPE_REG |
| 4277 | p.To.Reg = x |
Keith Randall | 1cc5789 | 2016-01-30 11:25:38 -0800 | [diff] [blame] | 4278 | case ssa.OpAMD64MOVQload, ssa.OpAMD64MOVSSload, ssa.OpAMD64MOVSDload, ssa.OpAMD64MOVLload, ssa.OpAMD64MOVWload, ssa.OpAMD64MOVBload, ssa.OpAMD64MOVBQSXload, ssa.OpAMD64MOVBQZXload, ssa.OpAMD64MOVWQSXload, ssa.OpAMD64MOVWQZXload, ssa.OpAMD64MOVLQSXload, ssa.OpAMD64MOVLQZXload, ssa.OpAMD64MOVOload: |
Michael Matloob | 703ef06 | 2015-06-16 11:11:16 -0700 | [diff] [blame] | 4279 | p := Prog(v.Op.Asm()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4280 | p.From.Type = obj.TYPE_MEM |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4281 | p.From.Reg = regnum(v.Args[0]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4282 | addAux(&p.From, v) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4283 | p.To.Type = obj.TYPE_REG |
| 4284 | p.To.Reg = regnum(v) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 4285 | case ssa.OpAMD64MOVQloadidx8, ssa.OpAMD64MOVSDloadidx8: |
| 4286 | p := Prog(v.Op.Asm()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4287 | p.From.Type = obj.TYPE_MEM |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4288 | p.From.Reg = regnum(v.Args[0]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4289 | addAux(&p.From, v) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4290 | p.From.Scale = 8 |
| 4291 | p.From.Index = regnum(v.Args[1]) |
| 4292 | p.To.Type = obj.TYPE_REG |
| 4293 | p.To.Reg = regnum(v) |
Keith Randall | 9278a04 | 2016-02-02 11:13:50 -0800 | [diff] [blame] | 4294 | case ssa.OpAMD64MOVLloadidx4, ssa.OpAMD64MOVSSloadidx4: |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 4295 | p := Prog(v.Op.Asm()) |
| 4296 | p.From.Type = obj.TYPE_MEM |
| 4297 | p.From.Reg = regnum(v.Args[0]) |
| 4298 | addAux(&p.From, v) |
| 4299 | p.From.Scale = 4 |
| 4300 | p.From.Index = regnum(v.Args[1]) |
| 4301 | p.To.Type = obj.TYPE_REG |
| 4302 | p.To.Reg = regnum(v) |
Keith Randall | 9278a04 | 2016-02-02 11:13:50 -0800 | [diff] [blame] | 4303 | case ssa.OpAMD64MOVWloadidx2: |
| 4304 | p := Prog(v.Op.Asm()) |
| 4305 | p.From.Type = obj.TYPE_MEM |
| 4306 | p.From.Reg = regnum(v.Args[0]) |
| 4307 | addAux(&p.From, v) |
| 4308 | p.From.Scale = 2 |
| 4309 | p.From.Index = regnum(v.Args[1]) |
| 4310 | p.To.Type = obj.TYPE_REG |
| 4311 | p.To.Reg = regnum(v) |
| 4312 | case ssa.OpAMD64MOVBloadidx1: |
| 4313 | p := Prog(v.Op.Asm()) |
| 4314 | p.From.Type = obj.TYPE_MEM |
| 4315 | p.From.Reg = regnum(v.Args[0]) |
| 4316 | addAux(&p.From, v) |
| 4317 | p.From.Scale = 1 |
| 4318 | p.From.Index = regnum(v.Args[1]) |
| 4319 | p.To.Type = obj.TYPE_REG |
| 4320 | p.To.Reg = regnum(v) |
Keith Randall | 10462eb | 2015-10-21 17:18:07 -0700 | [diff] [blame] | 4321 | case ssa.OpAMD64MOVQstore, ssa.OpAMD64MOVSSstore, ssa.OpAMD64MOVSDstore, ssa.OpAMD64MOVLstore, ssa.OpAMD64MOVWstore, ssa.OpAMD64MOVBstore, ssa.OpAMD64MOVOstore: |
Michael Matloob | 73054f5 | 2015-06-14 11:38:46 -0700 | [diff] [blame] | 4322 | p := Prog(v.Op.Asm()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4323 | p.From.Type = obj.TYPE_REG |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4324 | p.From.Reg = regnum(v.Args[1]) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4325 | p.To.Type = obj.TYPE_MEM |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4326 | p.To.Reg = regnum(v.Args[0]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4327 | addAux(&p.To, v) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 4328 | case ssa.OpAMD64MOVQstoreidx8, ssa.OpAMD64MOVSDstoreidx8: |
| 4329 | p := Prog(v.Op.Asm()) |
Josh Bleecher Snyder | 3e3d162 | 2015-07-27 16:36:36 -0700 | [diff] [blame] | 4330 | p.From.Type = obj.TYPE_REG |
| 4331 | p.From.Reg = regnum(v.Args[2]) |
| 4332 | p.To.Type = obj.TYPE_MEM |
| 4333 | p.To.Reg = regnum(v.Args[0]) |
| 4334 | p.To.Scale = 8 |
| 4335 | p.To.Index = regnum(v.Args[1]) |
| 4336 | addAux(&p.To, v) |
Keith Randall | 1cc5789 | 2016-01-30 11:25:38 -0800 | [diff] [blame] | 4337 | case ssa.OpAMD64MOVSSstoreidx4, ssa.OpAMD64MOVLstoreidx4: |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 4338 | p := Prog(v.Op.Asm()) |
| 4339 | p.From.Type = obj.TYPE_REG |
| 4340 | p.From.Reg = regnum(v.Args[2]) |
| 4341 | p.To.Type = obj.TYPE_MEM |
| 4342 | p.To.Reg = regnum(v.Args[0]) |
| 4343 | p.To.Scale = 4 |
| 4344 | p.To.Index = regnum(v.Args[1]) |
| 4345 | addAux(&p.To, v) |
Keith Randall | 1cc5789 | 2016-01-30 11:25:38 -0800 | [diff] [blame] | 4346 | case ssa.OpAMD64MOVWstoreidx2: |
| 4347 | p := Prog(v.Op.Asm()) |
| 4348 | p.From.Type = obj.TYPE_REG |
| 4349 | p.From.Reg = regnum(v.Args[2]) |
| 4350 | p.To.Type = obj.TYPE_MEM |
| 4351 | p.To.Reg = regnum(v.Args[0]) |
| 4352 | p.To.Scale = 2 |
| 4353 | p.To.Index = regnum(v.Args[1]) |
| 4354 | addAux(&p.To, v) |
| 4355 | case ssa.OpAMD64MOVBstoreidx1: |
| 4356 | p := Prog(v.Op.Asm()) |
| 4357 | p.From.Type = obj.TYPE_REG |
| 4358 | p.From.Reg = regnum(v.Args[2]) |
| 4359 | p.To.Type = obj.TYPE_MEM |
| 4360 | p.To.Reg = regnum(v.Args[0]) |
| 4361 | p.To.Scale = 1 |
| 4362 | p.To.Index = regnum(v.Args[1]) |
| 4363 | addAux(&p.To, v) |
Keith Randall | d43f2e3 | 2015-10-21 13:13:56 -0700 | [diff] [blame] | 4364 | case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst: |
| 4365 | p := Prog(v.Op.Asm()) |
| 4366 | p.From.Type = obj.TYPE_CONST |
Keith Randall | 16b1fce | 2016-01-31 11:39:39 -0800 | [diff] [blame] | 4367 | sc := v.AuxValAndOff() |
Keith Randall | d43f2e3 | 2015-10-21 13:13:56 -0700 | [diff] [blame] | 4368 | i := sc.Val() |
| 4369 | switch v.Op { |
| 4370 | case ssa.OpAMD64MOVBstoreconst: |
| 4371 | i = int64(int8(i)) |
| 4372 | case ssa.OpAMD64MOVWstoreconst: |
| 4373 | i = int64(int16(i)) |
| 4374 | case ssa.OpAMD64MOVLstoreconst: |
| 4375 | i = int64(int32(i)) |
| 4376 | case ssa.OpAMD64MOVQstoreconst: |
| 4377 | } |
| 4378 | p.From.Offset = i |
| 4379 | p.To.Type = obj.TYPE_MEM |
| 4380 | p.To.Reg = regnum(v.Args[0]) |
| 4381 | addAux2(&p.To, v, sc.Off()) |
Keith Randall | a6fb514 | 2016-02-04 15:53:33 -0800 | [diff] [blame] | 4382 | case ssa.OpAMD64MOVQstoreconstidx8, ssa.OpAMD64MOVLstoreconstidx4, ssa.OpAMD64MOVWstoreconstidx2, ssa.OpAMD64MOVBstoreconstidx1: |
| 4383 | p := Prog(v.Op.Asm()) |
| 4384 | p.From.Type = obj.TYPE_CONST |
| 4385 | sc := v.AuxValAndOff() |
| 4386 | switch v.Op { |
| 4387 | case ssa.OpAMD64MOVBstoreconstidx1: |
| 4388 | p.From.Offset = int64(int8(sc.Val())) |
| 4389 | p.To.Scale = 1 |
| 4390 | case ssa.OpAMD64MOVWstoreconstidx2: |
| 4391 | p.From.Offset = int64(int16(sc.Val())) |
| 4392 | p.To.Scale = 2 |
| 4393 | case ssa.OpAMD64MOVLstoreconstidx4: |
| 4394 | p.From.Offset = int64(int32(sc.Val())) |
| 4395 | p.To.Scale = 4 |
| 4396 | case ssa.OpAMD64MOVQstoreconstidx8: |
| 4397 | p.From.Offset = sc.Val() |
| 4398 | p.To.Scale = 8 |
| 4399 | } |
| 4400 | p.To.Type = obj.TYPE_MEM |
| 4401 | p.To.Reg = regnum(v.Args[0]) |
| 4402 | p.To.Index = regnum(v.Args[1]) |
| 4403 | addAux2(&p.To, v, sc.Off()) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 4404 | case ssa.OpAMD64MOVLQSX, ssa.OpAMD64MOVWQSX, ssa.OpAMD64MOVBQSX, ssa.OpAMD64MOVLQZX, ssa.OpAMD64MOVWQZX, ssa.OpAMD64MOVBQZX, |
| 4405 | ssa.OpAMD64CVTSL2SS, ssa.OpAMD64CVTSL2SD, ssa.OpAMD64CVTSQ2SS, ssa.OpAMD64CVTSQ2SD, |
Todd Neal | 634b50c | 2015-09-01 19:05:44 -0500 | [diff] [blame] | 4406 | ssa.OpAMD64CVTTSS2SL, ssa.OpAMD64CVTTSD2SL, ssa.OpAMD64CVTTSS2SQ, ssa.OpAMD64CVTTSD2SQ, |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 4407 | ssa.OpAMD64CVTSS2SD, ssa.OpAMD64CVTSD2SS: |
| 4408 | opregreg(v.Op.Asm(), regnum(v), regnum(v.Args[0])) |
Keith Randall | 04d6edc | 2015-09-18 18:23:34 -0700 | [diff] [blame] | 4409 | case ssa.OpAMD64DUFFZERO: |
| 4410 | p := Prog(obj.ADUFFZERO) |
| 4411 | p.To.Type = obj.TYPE_ADDR |
| 4412 | p.To.Sym = Linksym(Pkglookup("duffzero", Runtimepkg)) |
| 4413 | p.To.Offset = v.AuxInt |
Keith Randall | 7c4fbb6 | 2015-10-19 13:56:55 -0700 | [diff] [blame] | 4414 | case ssa.OpAMD64MOVOconst: |
| 4415 | if v.AuxInt != 0 { |
| 4416 | v.Unimplementedf("MOVOconst can only do constant=0") |
| 4417 | } |
| 4418 | r := regnum(v) |
| 4419 | opregreg(x86.AXORPS, r, r) |
Keith Randall | 10462eb | 2015-10-21 17:18:07 -0700 | [diff] [blame] | 4420 | case ssa.OpAMD64DUFFCOPY: |
| 4421 | p := Prog(obj.ADUFFCOPY) |
| 4422 | p.To.Type = obj.TYPE_ADDR |
| 4423 | p.To.Sym = Linksym(Pkglookup("duffcopy", Runtimepkg)) |
| 4424 | p.To.Offset = v.AuxInt |
Keith Randall | 04d6edc | 2015-09-18 18:23:34 -0700 | [diff] [blame] | 4425 | |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 4426 | case ssa.OpCopy, ssa.OpAMD64MOVQconvert: // TODO: use MOVQreg for reg->reg copies instead of OpCopy? |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 4427 | if v.Type.IsMemory() { |
| 4428 | return |
| 4429 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4430 | x := regnum(v.Args[0]) |
| 4431 | y := regnum(v) |
| 4432 | if x != y { |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 4433 | opregreg(moveByType(v.Type), y, x) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4434 | } |
Josh Bleecher Snyder | 0bb2a50 | 2015-07-24 14:51:51 -0700 | [diff] [blame] | 4435 | case ssa.OpLoadReg: |
Josh Bleecher Snyder | 26f135d | 2015-07-20 15:22:34 -0700 | [diff] [blame] | 4436 | if v.Type.IsFlags() { |
| 4437 | v.Unimplementedf("load flags not implemented: %v", v.LongString()) |
| 4438 | return |
| 4439 | } |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 4440 | p := Prog(loadByType(v.Type)) |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 4441 | n, off := autoVar(v.Args[0]) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4442 | p.From.Type = obj.TYPE_MEM |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 4443 | p.From.Node = n |
| 4444 | p.From.Sym = Linksym(n.Sym) |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 4445 | p.From.Offset = off |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 4446 | if n.Class == PPARAM || n.Class == PPARAMOUT { |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 4447 | p.From.Name = obj.NAME_PARAM |
| 4448 | p.From.Offset += n.Xoffset |
| 4449 | } else { |
| 4450 | p.From.Name = obj.NAME_AUTO |
| 4451 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4452 | p.To.Type = obj.TYPE_REG |
| 4453 | p.To.Reg = regnum(v) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 4454 | |
Josh Bleecher Snyder | 0bb2a50 | 2015-07-24 14:51:51 -0700 | [diff] [blame] | 4455 | case ssa.OpStoreReg: |
Josh Bleecher Snyder | 26f135d | 2015-07-20 15:22:34 -0700 | [diff] [blame] | 4456 | if v.Type.IsFlags() { |
| 4457 | v.Unimplementedf("store flags not implemented: %v", v.LongString()) |
| 4458 | return |
| 4459 | } |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 4460 | p := Prog(storeByType(v.Type)) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4461 | p.From.Type = obj.TYPE_REG |
| 4462 | p.From.Reg = regnum(v.Args[0]) |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 4463 | n, off := autoVar(v) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4464 | p.To.Type = obj.TYPE_MEM |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 4465 | p.To.Node = n |
| 4466 | p.To.Sym = Linksym(n.Sym) |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 4467 | p.To.Offset = off |
Keith Randall | 6a8a9da | 2016-02-27 17:49:31 -0800 | [diff] [blame] | 4468 | if n.Class == PPARAM || n.Class == PPARAMOUT { |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 4469 | p.To.Name = obj.NAME_PARAM |
| 4470 | p.To.Offset += n.Xoffset |
| 4471 | } else { |
| 4472 | p.To.Name = obj.NAME_AUTO |
| 4473 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4474 | case ssa.OpPhi: |
Keith Randall | 0b46b42 | 2015-08-11 12:51:33 -0700 | [diff] [blame] | 4475 | // just check to make sure regalloc and stackalloc did it right |
| 4476 | if v.Type.IsMemory() { |
| 4477 | return |
| 4478 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4479 | f := v.Block.Func |
| 4480 | loc := f.RegAlloc[v.ID] |
| 4481 | for _, a := range v.Args { |
Josh Bleecher Snyder | 5584523 | 2015-08-05 16:43:49 -0700 | [diff] [blame] | 4482 | if aloc := f.RegAlloc[a.ID]; aloc != loc { // TODO: .Equal() instead? |
| 4483 | v.Fatalf("phi arg at different location than phi: %v @ %v, but arg %v @ %v\n%s\n", v, loc, a, aloc, v.Block.Func) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4484 | } |
| 4485 | } |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 4486 | case ssa.OpInitMem: |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4487 | // memory arg needs no code |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 4488 | case ssa.OpArg: |
| 4489 | // input args need no code |
David Chase | 956f319 | 2015-09-11 16:40:05 -0400 | [diff] [blame] | 4490 | case ssa.OpAMD64LoweredGetClosurePtr: |
| 4491 | // Output is hardwired to DX only, |
| 4492 | // and DX contains the closure pointer on |
| 4493 | // closure entry, and this "instruction" |
| 4494 | // is scheduled to the very beginning |
| 4495 | // of the entry block. |
Josh Bleecher Snyder | 3d23afb | 2015-08-12 11:22:16 -0700 | [diff] [blame] | 4496 | case ssa.OpAMD64LoweredGetG: |
| 4497 | r := regnum(v) |
| 4498 | // See the comments in cmd/internal/obj/x86/obj6.go |
| 4499 | // near CanUse1InsnTLS for a detailed explanation of these instructions. |
| 4500 | if x86.CanUse1InsnTLS(Ctxt) { |
| 4501 | // MOVQ (TLS), r |
| 4502 | p := Prog(x86.AMOVQ) |
| 4503 | p.From.Type = obj.TYPE_MEM |
| 4504 | p.From.Reg = x86.REG_TLS |
| 4505 | p.To.Type = obj.TYPE_REG |
| 4506 | p.To.Reg = r |
| 4507 | } else { |
| 4508 | // MOVQ TLS, r |
| 4509 | // MOVQ (r)(TLS*1), r |
| 4510 | p := Prog(x86.AMOVQ) |
| 4511 | p.From.Type = obj.TYPE_REG |
| 4512 | p.From.Reg = x86.REG_TLS |
| 4513 | p.To.Type = obj.TYPE_REG |
| 4514 | p.To.Reg = r |
| 4515 | q := Prog(x86.AMOVQ) |
| 4516 | q.From.Type = obj.TYPE_MEM |
| 4517 | q.From.Reg = r |
| 4518 | q.From.Index = x86.REG_TLS |
| 4519 | q.From.Scale = 1 |
| 4520 | q.To.Type = obj.TYPE_REG |
| 4521 | q.To.Reg = r |
| 4522 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 4523 | case ssa.OpAMD64CALLstatic: |
Keith Randall | ddc6b64 | 2016-03-09 19:27:57 -0800 | [diff] [blame] | 4524 | if v.Aux.(*Sym) == Deferreturn.Sym { |
| 4525 | // Deferred calls will appear to be returning to |
| 4526 | // the CALL deferreturn(SB) that we are about to emit. |
| 4527 | // However, the stack trace code will show the line |
| 4528 | // of the instruction byte before the return PC. |
| 4529 | // To avoid that being an unrelated instruction, |
| 4530 | // insert an actual hardware NOP that will have the right line number. |
| 4531 | // This is different from obj.ANOP, which is a virtual no-op |
| 4532 | // that doesn't make it into the instruction stream. |
| 4533 | Thearch.Ginsnop() |
| 4534 | } |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 4535 | p := Prog(obj.ACALL) |
| 4536 | p.To.Type = obj.TYPE_MEM |
| 4537 | p.To.Name = obj.NAME_EXTERN |
| 4538 | p.To.Sym = Linksym(v.Aux.(*Sym)) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 4539 | if Maxarg < v.AuxInt { |
| 4540 | Maxarg = v.AuxInt |
| 4541 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 4542 | case ssa.OpAMD64CALLclosure: |
| 4543 | p := Prog(obj.ACALL) |
| 4544 | p.To.Type = obj.TYPE_REG |
| 4545 | p.To.Reg = regnum(v.Args[0]) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 4546 | if Maxarg < v.AuxInt { |
| 4547 | Maxarg = v.AuxInt |
| 4548 | } |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4549 | case ssa.OpAMD64CALLdefer: |
| 4550 | p := Prog(obj.ACALL) |
| 4551 | p.To.Type = obj.TYPE_MEM |
| 4552 | p.To.Name = obj.NAME_EXTERN |
| 4553 | p.To.Sym = Linksym(Deferproc.Sym) |
| 4554 | if Maxarg < v.AuxInt { |
| 4555 | Maxarg = v.AuxInt |
| 4556 | } |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4557 | case ssa.OpAMD64CALLgo: |
| 4558 | p := Prog(obj.ACALL) |
| 4559 | p.To.Type = obj.TYPE_MEM |
| 4560 | p.To.Name = obj.NAME_EXTERN |
| 4561 | p.To.Sym = Linksym(Newproc.Sym) |
| 4562 | if Maxarg < v.AuxInt { |
| 4563 | Maxarg = v.AuxInt |
| 4564 | } |
Keith Randall | d24768e | 2015-09-09 23:56:59 -0700 | [diff] [blame] | 4565 | case ssa.OpAMD64CALLinter: |
| 4566 | p := Prog(obj.ACALL) |
| 4567 | p.To.Type = obj.TYPE_REG |
| 4568 | p.To.Reg = regnum(v.Args[0]) |
| 4569 | if Maxarg < v.AuxInt { |
| 4570 | Maxarg = v.AuxInt |
| 4571 | } |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 4572 | case ssa.OpAMD64NEGQ, ssa.OpAMD64NEGL, ssa.OpAMD64NEGW, ssa.OpAMD64NEGB, |
| 4573 | ssa.OpAMD64NOTQ, ssa.OpAMD64NOTL, ssa.OpAMD64NOTW, ssa.OpAMD64NOTB: |
Josh Bleecher Snyder | 93c354b6 | 2015-07-30 17:15:16 -0700 | [diff] [blame] | 4574 | x := regnum(v.Args[0]) |
| 4575 | r := regnum(v) |
| 4576 | if x != r { |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 4577 | p := Prog(moveByType(v.Type)) |
Josh Bleecher Snyder | 93c354b6 | 2015-07-30 17:15:16 -0700 | [diff] [blame] | 4578 | p.From.Type = obj.TYPE_REG |
| 4579 | p.From.Reg = x |
| 4580 | p.To.Type = obj.TYPE_REG |
| 4581 | p.To.Reg = r |
| 4582 | } |
Alexandru Moșoi | 954d5ad | 2015-07-21 16:58:18 +0200 | [diff] [blame] | 4583 | p := Prog(v.Op.Asm()) |
| 4584 | p.To.Type = obj.TYPE_REG |
Josh Bleecher Snyder | 93c354b6 | 2015-07-30 17:15:16 -0700 | [diff] [blame] | 4585 | p.To.Reg = r |
Keith Randall | a329e21 | 2015-09-12 13:26:57 -0700 | [diff] [blame] | 4586 | case ssa.OpAMD64SQRTSD: |
| 4587 | p := Prog(v.Op.Asm()) |
| 4588 | p.From.Type = obj.TYPE_REG |
| 4589 | p.From.Reg = regnum(v.Args[0]) |
| 4590 | p.To.Type = obj.TYPE_REG |
| 4591 | p.To.Reg = regnum(v) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4592 | case ssa.OpSP, ssa.OpSB: |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4593 | // nothing to do |
Josh Bleecher Snyder | a794074 | 2015-07-20 15:21:49 -0700 | [diff] [blame] | 4594 | case ssa.OpAMD64SETEQ, ssa.OpAMD64SETNE, |
| 4595 | ssa.OpAMD64SETL, ssa.OpAMD64SETLE, |
| 4596 | ssa.OpAMD64SETG, ssa.OpAMD64SETGE, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4597 | ssa.OpAMD64SETGF, ssa.OpAMD64SETGEF, |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 4598 | ssa.OpAMD64SETB, ssa.OpAMD64SETBE, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4599 | ssa.OpAMD64SETORD, ssa.OpAMD64SETNAN, |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 4600 | ssa.OpAMD64SETA, ssa.OpAMD64SETAE: |
Josh Bleecher Snyder | a794074 | 2015-07-20 15:21:49 -0700 | [diff] [blame] | 4601 | p := Prog(v.Op.Asm()) |
| 4602 | p.To.Type = obj.TYPE_REG |
| 4603 | p.To.Reg = regnum(v) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4604 | |
| 4605 | case ssa.OpAMD64SETNEF: |
| 4606 | p := Prog(v.Op.Asm()) |
| 4607 | p.To.Type = obj.TYPE_REG |
| 4608 | p.To.Reg = regnum(v) |
| 4609 | q := Prog(x86.ASETPS) |
| 4610 | q.To.Type = obj.TYPE_REG |
| 4611 | q.To.Reg = x86.REG_AX |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 4612 | // ORL avoids partial register write and is smaller than ORQ, used by old compiler |
| 4613 | opregreg(x86.AORL, regnum(v), x86.REG_AX) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4614 | |
| 4615 | case ssa.OpAMD64SETEQF: |
| 4616 | p := Prog(v.Op.Asm()) |
| 4617 | p.To.Type = obj.TYPE_REG |
| 4618 | p.To.Reg = regnum(v) |
| 4619 | q := Prog(x86.ASETPC) |
| 4620 | q.To.Type = obj.TYPE_REG |
| 4621 | q.To.Reg = x86.REG_AX |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 4622 | // ANDL avoids partial register write and is smaller than ANDQ, used by old compiler |
| 4623 | opregreg(x86.AANDL, regnum(v), x86.REG_AX) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4624 | |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4625 | case ssa.OpAMD64InvertFlags: |
| 4626 | v.Fatalf("InvertFlags should never make it to codegen %v", v) |
Keith Randall | 3425295 | 2016-01-05 14:56:26 -0800 | [diff] [blame] | 4627 | case ssa.OpAMD64FlagEQ, ssa.OpAMD64FlagLT_ULT, ssa.OpAMD64FlagLT_UGT, ssa.OpAMD64FlagGT_ULT, ssa.OpAMD64FlagGT_UGT: |
| 4628 | v.Fatalf("Flag* ops should never make it to codegen %v", v) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4629 | case ssa.OpAMD64REPSTOSQ: |
| 4630 | Prog(x86.AREP) |
| 4631 | Prog(x86.ASTOSQ) |
Keith Randall | 10462eb | 2015-10-21 17:18:07 -0700 | [diff] [blame] | 4632 | case ssa.OpAMD64REPMOVSQ: |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 4633 | Prog(x86.AREP) |
Keith Randall | 10462eb | 2015-10-21 17:18:07 -0700 | [diff] [blame] | 4634 | Prog(x86.AMOVSQ) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 4635 | case ssa.OpVarDef: |
| 4636 | Gvardef(v.Aux.(*Node)) |
| 4637 | case ssa.OpVarKill: |
| 4638 | gvarkill(v.Aux.(*Node)) |
Keith Randall | 23d5810 | 2016-01-19 09:59:21 -0800 | [diff] [blame] | 4639 | case ssa.OpVarLive: |
| 4640 | gvarlive(v.Aux.(*Node)) |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4641 | case ssa.OpAMD64LoweredNilCheck: |
| 4642 | // Optimization - if the subsequent block has a load or store |
| 4643 | // at the same address, we don't need to issue this instruction. |
Keith Randall | 3c26c0d | 2016-01-21 13:27:01 -0800 | [diff] [blame] | 4644 | mem := v.Args[1] |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4645 | for _, w := range v.Block.Succs[0].Values { |
Keith Randall | 3c26c0d | 2016-01-21 13:27:01 -0800 | [diff] [blame] | 4646 | if w.Op == ssa.OpPhi { |
| 4647 | if w.Type.IsMemory() { |
| 4648 | mem = w |
| 4649 | } |
| 4650 | continue |
| 4651 | } |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4652 | if len(w.Args) == 0 || !w.Args[len(w.Args)-1].Type.IsMemory() { |
| 4653 | // w doesn't use a store - can't be a memory op. |
| 4654 | continue |
| 4655 | } |
Keith Randall | 3c26c0d | 2016-01-21 13:27:01 -0800 | [diff] [blame] | 4656 | if w.Args[len(w.Args)-1] != mem { |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4657 | v.Fatalf("wrong store after nilcheck v=%s w=%s", v, w) |
| 4658 | } |
| 4659 | switch w.Op { |
| 4660 | case ssa.OpAMD64MOVQload, ssa.OpAMD64MOVLload, ssa.OpAMD64MOVWload, ssa.OpAMD64MOVBload, |
Keith Randall | 1cc5789 | 2016-01-30 11:25:38 -0800 | [diff] [blame] | 4661 | ssa.OpAMD64MOVQstore, ssa.OpAMD64MOVLstore, ssa.OpAMD64MOVWstore, ssa.OpAMD64MOVBstore, |
| 4662 | ssa.OpAMD64MOVBQSXload, ssa.OpAMD64MOVBQZXload, ssa.OpAMD64MOVWQSXload, |
Keith Randall | 4a346e7 | 2016-02-25 13:45:22 -0800 | [diff] [blame] | 4663 | ssa.OpAMD64MOVWQZXload, ssa.OpAMD64MOVLQSXload, ssa.OpAMD64MOVLQZXload, |
| 4664 | ssa.OpAMD64MOVSSload, ssa.OpAMD64MOVSDload, ssa.OpAMD64MOVOload, |
| 4665 | ssa.OpAMD64MOVSSstore, ssa.OpAMD64MOVSDstore, ssa.OpAMD64MOVOstore: |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4666 | if w.Args[0] == v.Args[0] && w.Aux == nil && w.AuxInt >= 0 && w.AuxInt < minZeroPage { |
Keith Randall | 3c26c0d | 2016-01-21 13:27:01 -0800 | [diff] [blame] | 4667 | if Debug_checknil != 0 && int(v.Line) > 1 { |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 4668 | Warnl(v.Line, "removed nil check") |
Keith Randall | 3c26c0d | 2016-01-21 13:27:01 -0800 | [diff] [blame] | 4669 | } |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4670 | return |
| 4671 | } |
Keith Randall | d43f2e3 | 2015-10-21 13:13:56 -0700 | [diff] [blame] | 4672 | case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst: |
Keith Randall | f94e074 | 2016-01-26 15:47:08 -0800 | [diff] [blame] | 4673 | off := ssa.ValAndOff(v.AuxInt).Off() |
Keith Randall | d43f2e3 | 2015-10-21 13:13:56 -0700 | [diff] [blame] | 4674 | if w.Args[0] == v.Args[0] && w.Aux == nil && off >= 0 && off < minZeroPage { |
Keith Randall | 3c26c0d | 2016-01-21 13:27:01 -0800 | [diff] [blame] | 4675 | if Debug_checknil != 0 && int(v.Line) > 1 { |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 4676 | Warnl(v.Line, "removed nil check") |
Keith Randall | 3c26c0d | 2016-01-21 13:27:01 -0800 | [diff] [blame] | 4677 | } |
Keith Randall | d43f2e3 | 2015-10-21 13:13:56 -0700 | [diff] [blame] | 4678 | return |
| 4679 | } |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4680 | } |
| 4681 | if w.Type.IsMemory() { |
Keith Randall | 4a346e7 | 2016-02-25 13:45:22 -0800 | [diff] [blame] | 4682 | if w.Op == ssa.OpVarDef || w.Op == ssa.OpVarKill || w.Op == ssa.OpVarLive { |
| 4683 | // these ops are OK |
| 4684 | mem = w |
| 4685 | continue |
| 4686 | } |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4687 | // We can't delay the nil check past the next store. |
| 4688 | break |
| 4689 | } |
| 4690 | } |
| 4691 | // Issue a load which will fault if the input is nil. |
| 4692 | // TODO: We currently use the 2-byte instruction TESTB AX, (reg). |
| 4693 | // Should we use the 3-byte TESTB $0, (reg) instead? It is larger |
| 4694 | // but it doesn't have false dependency on AX. |
| 4695 | // Or maybe allocate an output register and use MOVL (reg),reg2 ? |
| 4696 | // That trades clobbering flags for clobbering a register. |
| 4697 | p := Prog(x86.ATESTB) |
| 4698 | p.From.Type = obj.TYPE_REG |
| 4699 | p.From.Reg = x86.REG_AX |
| 4700 | p.To.Type = obj.TYPE_MEM |
| 4701 | p.To.Reg = regnum(v.Args[0]) |
| 4702 | addAux(&p.To, v) |
| 4703 | if Debug_checknil != 0 && v.Line > 1 { // v.Line==1 in generated wrappers |
Robert Griesemer | b83f397 | 2016-03-02 11:01:25 -0800 | [diff] [blame] | 4704 | Warnl(v.Line, "generated nil check") |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4705 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4706 | default: |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 4707 | v.Unimplementedf("genValue not implemented: %s", v.LongString()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4708 | } |
| 4709 | } |
| 4710 | |
Keith Randall | 7b77394 | 2016-01-22 13:44:58 -0800 | [diff] [blame] | 4711 | // markMoves marks any MOVXconst ops that need to avoid clobbering flags. |
| 4712 | func (s *genState) markMoves(b *ssa.Block) { |
| 4713 | flive := b.FlagsLiveAtEnd |
| 4714 | if b.Control != nil && b.Control.Type.IsFlags() { |
| 4715 | flive = true |
| 4716 | } |
| 4717 | for i := len(b.Values) - 1; i >= 0; i-- { |
| 4718 | v := b.Values[i] |
Keith Randall | f1f366c | 2016-02-29 11:10:08 -0800 | [diff] [blame] | 4719 | if flive && (v.Op == ssa.OpAMD64MOVBconst || v.Op == ssa.OpAMD64MOVWconst || v.Op == ssa.OpAMD64MOVLconst || v.Op == ssa.OpAMD64MOVQconst) { |
Keith Randall | 7b77394 | 2016-01-22 13:44:58 -0800 | [diff] [blame] | 4720 | // The "mark" is any non-nil Aux value. |
| 4721 | v.Aux = v |
| 4722 | } |
| 4723 | if v.Type.IsFlags() { |
| 4724 | flive = false |
| 4725 | } |
| 4726 | for _, a := range v.Args { |
| 4727 | if a.Type.IsFlags() { |
| 4728 | flive = true |
| 4729 | } |
| 4730 | } |
| 4731 | } |
| 4732 | } |
| 4733 | |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 4734 | // movZero generates a register indirect move with a 0 immediate and keeps track of bytes left and next offset |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 4735 | func movZero(as obj.As, width int64, nbytes int64, offset int64, regnum int16) (nleft int64, noff int64) { |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 4736 | p := Prog(as) |
| 4737 | // TODO: use zero register on archs that support it. |
| 4738 | p.From.Type = obj.TYPE_CONST |
| 4739 | p.From.Offset = 0 |
| 4740 | p.To.Type = obj.TYPE_MEM |
| 4741 | p.To.Reg = regnum |
| 4742 | p.To.Offset = offset |
| 4743 | offset += width |
| 4744 | nleft = nbytes - width |
| 4745 | return nleft, offset |
| 4746 | } |
| 4747 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4748 | var blockJump = [...]struct { |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 4749 | asm, invasm obj.As |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4750 | }{ |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 4751 | ssa.BlockAMD64EQ: {x86.AJEQ, x86.AJNE}, |
| 4752 | ssa.BlockAMD64NE: {x86.AJNE, x86.AJEQ}, |
| 4753 | ssa.BlockAMD64LT: {x86.AJLT, x86.AJGE}, |
| 4754 | ssa.BlockAMD64GE: {x86.AJGE, x86.AJLT}, |
| 4755 | ssa.BlockAMD64LE: {x86.AJLE, x86.AJGT}, |
| 4756 | ssa.BlockAMD64GT: {x86.AJGT, x86.AJLE}, |
| 4757 | ssa.BlockAMD64ULT: {x86.AJCS, x86.AJCC}, |
| 4758 | ssa.BlockAMD64UGE: {x86.AJCC, x86.AJCS}, |
| 4759 | ssa.BlockAMD64UGT: {x86.AJHI, x86.AJLS}, |
| 4760 | ssa.BlockAMD64ULE: {x86.AJLS, x86.AJHI}, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4761 | ssa.BlockAMD64ORD: {x86.AJPC, x86.AJPS}, |
| 4762 | ssa.BlockAMD64NAN: {x86.AJPS, x86.AJPC}, |
| 4763 | } |
| 4764 | |
| 4765 | type floatingEQNEJump struct { |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 4766 | jump obj.As |
| 4767 | index int |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4768 | } |
| 4769 | |
| 4770 | var eqfJumps = [2][2]floatingEQNEJump{ |
| 4771 | {{x86.AJNE, 1}, {x86.AJPS, 1}}, // next == b.Succs[0] |
| 4772 | {{x86.AJNE, 1}, {x86.AJPC, 0}}, // next == b.Succs[1] |
| 4773 | } |
| 4774 | var nefJumps = [2][2]floatingEQNEJump{ |
| 4775 | {{x86.AJNE, 0}, {x86.AJPC, 1}}, // next == b.Succs[0] |
| 4776 | {{x86.AJNE, 0}, {x86.AJPS, 0}}, // next == b.Succs[1] |
| 4777 | } |
| 4778 | |
| 4779 | func oneFPJump(b *ssa.Block, jumps *floatingEQNEJump, likely ssa.BranchPrediction, branches []branch) []branch { |
| 4780 | p := Prog(jumps.jump) |
| 4781 | p.To.Type = obj.TYPE_BRANCH |
| 4782 | to := jumps.index |
| 4783 | branches = append(branches, branch{p, b.Succs[to]}) |
| 4784 | if to == 1 { |
| 4785 | likely = -likely |
| 4786 | } |
| 4787 | // liblink reorders the instruction stream as it sees fit. |
| 4788 | // Pass along what we know so liblink can make use of it. |
| 4789 | // TODO: Once we've fully switched to SSA, |
| 4790 | // make liblink leave our output alone. |
| 4791 | switch likely { |
| 4792 | case ssa.BranchUnlikely: |
| 4793 | p.From.Type = obj.TYPE_CONST |
| 4794 | p.From.Offset = 0 |
| 4795 | case ssa.BranchLikely: |
| 4796 | p.From.Type = obj.TYPE_CONST |
| 4797 | p.From.Offset = 1 |
| 4798 | } |
| 4799 | return branches |
| 4800 | } |
| 4801 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4802 | func genFPJump(s *genState, b, next *ssa.Block, jumps *[2][2]floatingEQNEJump) { |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4803 | likely := b.Likely |
| 4804 | switch next { |
| 4805 | case b.Succs[0]: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4806 | s.branches = oneFPJump(b, &jumps[0][0], likely, s.branches) |
| 4807 | s.branches = oneFPJump(b, &jumps[0][1], likely, s.branches) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4808 | case b.Succs[1]: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4809 | s.branches = oneFPJump(b, &jumps[1][0], likely, s.branches) |
| 4810 | s.branches = oneFPJump(b, &jumps[1][1], likely, s.branches) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4811 | default: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4812 | s.branches = oneFPJump(b, &jumps[1][0], likely, s.branches) |
| 4813 | s.branches = oneFPJump(b, &jumps[1][1], likely, s.branches) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4814 | q := Prog(obj.AJMP) |
| 4815 | q.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4816 | s.branches = append(s.branches, branch{q, b.Succs[1]}) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4817 | } |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 4818 | } |
| 4819 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4820 | func (s *genState) genBlock(b, next *ssa.Block) { |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 4821 | lineno = b.Line |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 4822 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4823 | switch b.Kind { |
Keith Randall | 31115a5 | 2015-10-23 19:12:49 -0700 | [diff] [blame] | 4824 | case ssa.BlockPlain, ssa.BlockCall, ssa.BlockCheck: |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4825 | if b.Succs[0] != next { |
| 4826 | p := Prog(obj.AJMP) |
| 4827 | p.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4828 | s.branches = append(s.branches, branch{p, b.Succs[0]}) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4829 | } |
Keith Randall | ddc6b64 | 2016-03-09 19:27:57 -0800 | [diff] [blame] | 4830 | case ssa.BlockDefer: |
| 4831 | // defer returns in rax: |
| 4832 | // 0 if we should continue executing |
| 4833 | // 1 if we should jump to deferreturn call |
| 4834 | p := Prog(x86.ATESTL) |
| 4835 | p.From.Type = obj.TYPE_REG |
| 4836 | p.From.Reg = x86.REG_AX |
| 4837 | p.To.Type = obj.TYPE_REG |
| 4838 | p.To.Reg = x86.REG_AX |
| 4839 | p = Prog(x86.AJNE) |
| 4840 | p.To.Type = obj.TYPE_BRANCH |
| 4841 | s.branches = append(s.branches, branch{p, b.Succs[1]}) |
| 4842 | if b.Succs[0] != next { |
| 4843 | p := Prog(obj.AJMP) |
| 4844 | p.To.Type = obj.TYPE_BRANCH |
| 4845 | s.branches = append(s.branches, branch{p, b.Succs[0]}) |
| 4846 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4847 | case ssa.BlockExit: |
Keith Randall | 5f10573 | 2015-09-17 15:19:23 -0700 | [diff] [blame] | 4848 | Prog(obj.AUNDEF) // tell plive.go that we never reach here |
Keith Randall | 10f38f5 | 2015-09-03 09:09:59 -0700 | [diff] [blame] | 4849 | case ssa.BlockRet: |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4850 | Prog(obj.ARET) |
Keith Randall | 8a1f621 | 2015-09-08 21:28:44 -0700 | [diff] [blame] | 4851 | case ssa.BlockRetJmp: |
| 4852 | p := Prog(obj.AJMP) |
| 4853 | p.To.Type = obj.TYPE_MEM |
| 4854 | p.To.Name = obj.NAME_EXTERN |
| 4855 | p.To.Sym = Linksym(b.Aux.(*Sym)) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4856 | |
| 4857 | case ssa.BlockAMD64EQF: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4858 | genFPJump(s, b, next, &eqfJumps) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4859 | |
| 4860 | case ssa.BlockAMD64NEF: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4861 | genFPJump(s, b, next, &nefJumps) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 4862 | |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 4863 | case ssa.BlockAMD64EQ, ssa.BlockAMD64NE, |
| 4864 | ssa.BlockAMD64LT, ssa.BlockAMD64GE, |
| 4865 | ssa.BlockAMD64LE, ssa.BlockAMD64GT, |
| 4866 | ssa.BlockAMD64ULT, ssa.BlockAMD64UGT, |
| 4867 | ssa.BlockAMD64ULE, ssa.BlockAMD64UGE: |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 4868 | jmp := blockJump[b.Kind] |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 4869 | likely := b.Likely |
| 4870 | var p *obj.Prog |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 4871 | switch next { |
| 4872 | case b.Succs[0]: |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 4873 | p = Prog(jmp.invasm) |
| 4874 | likely *= -1 |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4875 | p.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4876 | s.branches = append(s.branches, branch{p, b.Succs[1]}) |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 4877 | case b.Succs[1]: |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 4878 | p = Prog(jmp.asm) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4879 | p.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4880 | s.branches = append(s.branches, branch{p, b.Succs[0]}) |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 4881 | default: |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 4882 | p = Prog(jmp.asm) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4883 | p.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4884 | s.branches = append(s.branches, branch{p, b.Succs[0]}) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4885 | q := Prog(obj.AJMP) |
| 4886 | q.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 4887 | s.branches = append(s.branches, branch{q, b.Succs[1]}) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 4888 | } |
| 4889 | |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 4890 | // liblink reorders the instruction stream as it sees fit. |
| 4891 | // Pass along what we know so liblink can make use of it. |
| 4892 | // TODO: Once we've fully switched to SSA, |
| 4893 | // make liblink leave our output alone. |
| 4894 | switch likely { |
| 4895 | case ssa.BranchUnlikely: |
| 4896 | p.From.Type = obj.TYPE_CONST |
| 4897 | p.From.Offset = 0 |
| 4898 | case ssa.BranchLikely: |
| 4899 | p.From.Type = obj.TYPE_CONST |
| 4900 | p.From.Offset = 1 |
| 4901 | } |
| 4902 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4903 | default: |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 4904 | b.Unimplementedf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4905 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4906 | } |
| 4907 | |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4908 | // addAux adds the offset in the aux fields (AuxInt and Aux) of v to a. |
| 4909 | func addAux(a *obj.Addr, v *ssa.Value) { |
Keith Randall | d43f2e3 | 2015-10-21 13:13:56 -0700 | [diff] [blame] | 4910 | addAux2(a, v, v.AuxInt) |
| 4911 | } |
| 4912 | func addAux2(a *obj.Addr, v *ssa.Value, offset int64) { |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4913 | if a.Type != obj.TYPE_MEM { |
| 4914 | v.Fatalf("bad addAux addr %s", a) |
| 4915 | } |
| 4916 | // add integer offset |
Keith Randall | d43f2e3 | 2015-10-21 13:13:56 -0700 | [diff] [blame] | 4917 | a.Offset += offset |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4918 | |
| 4919 | // If no additional symbol offset, we're done. |
| 4920 | if v.Aux == nil { |
| 4921 | return |
| 4922 | } |
| 4923 | // Add symbol's offset from its base register. |
| 4924 | switch sym := v.Aux.(type) { |
| 4925 | case *ssa.ExternSymbol: |
| 4926 | a.Name = obj.NAME_EXTERN |
| 4927 | a.Sym = Linksym(sym.Sym.(*Sym)) |
| 4928 | case *ssa.ArgSymbol: |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 4929 | n := sym.Node.(*Node) |
| 4930 | a.Name = obj.NAME_PARAM |
| 4931 | a.Node = n |
| 4932 | a.Sym = Linksym(n.Orig.Sym) |
| 4933 | a.Offset += n.Xoffset // TODO: why do I have to add this here? I don't for auto variables. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4934 | case *ssa.AutoSymbol: |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 4935 | n := sym.Node.(*Node) |
| 4936 | a.Name = obj.NAME_AUTO |
| 4937 | a.Node = n |
| 4938 | a.Sym = Linksym(n.Sym) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 4939 | default: |
| 4940 | v.Fatalf("aux in %s not implemented %#v", v, v.Aux) |
| 4941 | } |
| 4942 | } |
| 4943 | |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 4944 | // extendIndex extends v to a full int width. |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 4945 | func (s *state) extendIndex(v *ssa.Value) *ssa.Value { |
| 4946 | size := v.Type.Size() |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 4947 | if size == s.config.IntSize { |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 4948 | return v |
| 4949 | } |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 4950 | if size > s.config.IntSize { |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 4951 | // TODO: truncate 64-bit indexes on 32-bit pointer archs. We'd need to test |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 4952 | // the high word and branch to out-of-bounds failure if it is not 0. |
| 4953 | s.Unimplementedf("64->32 index truncation not implemented") |
| 4954 | return v |
| 4955 | } |
| 4956 | |
| 4957 | // Extend value to the required size |
| 4958 | var op ssa.Op |
| 4959 | if v.Type.IsSigned() { |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 4960 | switch 10*size + s.config.IntSize { |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 4961 | case 14: |
| 4962 | op = ssa.OpSignExt8to32 |
| 4963 | case 18: |
| 4964 | op = ssa.OpSignExt8to64 |
| 4965 | case 24: |
| 4966 | op = ssa.OpSignExt16to32 |
| 4967 | case 28: |
| 4968 | op = ssa.OpSignExt16to64 |
| 4969 | case 48: |
| 4970 | op = ssa.OpSignExt32to64 |
| 4971 | default: |
| 4972 | s.Fatalf("bad signed index extension %s", v.Type) |
| 4973 | } |
| 4974 | } else { |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 4975 | switch 10*size + s.config.IntSize { |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 4976 | case 14: |
| 4977 | op = ssa.OpZeroExt8to32 |
| 4978 | case 18: |
| 4979 | op = ssa.OpZeroExt8to64 |
| 4980 | case 24: |
| 4981 | op = ssa.OpZeroExt16to32 |
| 4982 | case 28: |
| 4983 | op = ssa.OpZeroExt16to64 |
| 4984 | case 48: |
| 4985 | op = ssa.OpZeroExt32to64 |
| 4986 | default: |
| 4987 | s.Fatalf("bad unsigned index extension %s", v.Type) |
| 4988 | } |
| 4989 | } |
Keith Randall | 582baae | 2015-11-02 21:28:13 -0800 | [diff] [blame] | 4990 | return s.newValue1(op, Types[TINT], v) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 4991 | } |
| 4992 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 4993 | // ssaRegToReg maps ssa register numbers to obj register numbers. |
| 4994 | var ssaRegToReg = [...]int16{ |
| 4995 | x86.REG_AX, |
| 4996 | x86.REG_CX, |
| 4997 | x86.REG_DX, |
| 4998 | x86.REG_BX, |
| 4999 | x86.REG_SP, |
| 5000 | x86.REG_BP, |
| 5001 | x86.REG_SI, |
| 5002 | x86.REG_DI, |
| 5003 | x86.REG_R8, |
| 5004 | x86.REG_R9, |
| 5005 | x86.REG_R10, |
| 5006 | x86.REG_R11, |
| 5007 | x86.REG_R12, |
| 5008 | x86.REG_R13, |
| 5009 | x86.REG_R14, |
| 5010 | x86.REG_R15, |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 5011 | x86.REG_X0, |
| 5012 | x86.REG_X1, |
| 5013 | x86.REG_X2, |
| 5014 | x86.REG_X3, |
| 5015 | x86.REG_X4, |
| 5016 | x86.REG_X5, |
| 5017 | x86.REG_X6, |
| 5018 | x86.REG_X7, |
| 5019 | x86.REG_X8, |
| 5020 | x86.REG_X9, |
| 5021 | x86.REG_X10, |
| 5022 | x86.REG_X11, |
| 5023 | x86.REG_X12, |
| 5024 | x86.REG_X13, |
| 5025 | x86.REG_X14, |
| 5026 | x86.REG_X15, |
| 5027 | 0, // SB isn't a real register. We fill an Addr.Reg field with 0 in this case. |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 5028 | // TODO: arch-dependent |
| 5029 | } |
| 5030 | |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 5031 | // loadByType returns the load instruction of the given type. |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 5032 | func loadByType(t ssa.Type) obj.As { |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 5033 | // Avoid partial register write |
| 5034 | if !t.IsFloat() && t.Size() <= 2 { |
| 5035 | if t.Size() == 1 { |
| 5036 | return x86.AMOVBLZX |
| 5037 | } else { |
| 5038 | return x86.AMOVWLZX |
| 5039 | } |
| 5040 | } |
| 5041 | // Otherwise, there's no difference between load and store opcodes. |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 5042 | return storeByType(t) |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 5043 | } |
| 5044 | |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 5045 | // storeByType returns the store instruction of the given type. |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 5046 | func storeByType(t ssa.Type) obj.As { |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 5047 | width := t.Size() |
| 5048 | if t.IsFloat() { |
| 5049 | switch width { |
| 5050 | case 4: |
| 5051 | return x86.AMOVSS |
| 5052 | case 8: |
| 5053 | return x86.AMOVSD |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 5054 | } |
| 5055 | } else { |
| 5056 | switch width { |
| 5057 | case 1: |
| 5058 | return x86.AMOVB |
| 5059 | case 2: |
| 5060 | return x86.AMOVW |
| 5061 | case 4: |
| 5062 | return x86.AMOVL |
| 5063 | case 8: |
| 5064 | return x86.AMOVQ |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 5065 | } |
| 5066 | } |
| 5067 | panic("bad store type") |
| 5068 | } |
| 5069 | |
| 5070 | // moveByType returns the reg->reg move instruction of the given type. |
Matthew Dempsky | 0d9258a | 2016-03-07 18:00:08 -0800 | [diff] [blame] | 5071 | func moveByType(t ssa.Type) obj.As { |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 5072 | if t.IsFloat() { |
| 5073 | // Moving the whole sse2 register is faster |
| 5074 | // than moving just the correct low portion of it. |
Ilya Tocar | 1b1d0a9 | 2016-02-26 16:48:16 +0300 | [diff] [blame] | 5075 | // There is no xmm->xmm move with 1 byte opcode, |
| 5076 | // so use movups, which has 2 byte opcode. |
| 5077 | return x86.AMOVUPS |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 5078 | } else { |
| 5079 | switch t.Size() { |
| 5080 | case 1: |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 5081 | // Avoids partial register write |
| 5082 | return x86.AMOVL |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 5083 | case 2: |
Ilya Tocar | e96b232 | 2016-02-15 17:01:26 +0300 | [diff] [blame] | 5084 | return x86.AMOVL |
Keith Randall | 90065ea | 2016-01-15 08:45:47 -0800 | [diff] [blame] | 5085 | case 4: |
| 5086 | return x86.AMOVL |
| 5087 | case 8: |
| 5088 | return x86.AMOVQ |
Keith Randall | c03ed49 | 2016-03-02 15:18:40 -0800 | [diff] [blame] | 5089 | case 16: |
| 5090 | return x86.AMOVUPS // int128s are in SSE registers |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 5091 | default: |
Keith Randall | c03ed49 | 2016-03-02 15:18:40 -0800 | [diff] [blame] | 5092 | panic(fmt.Sprintf("bad int register width %d:%s", t.Size(), t)) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 5093 | } |
| 5094 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 5095 | panic("bad register type") |
| 5096 | } |
| 5097 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 5098 | // regnum returns the register (in cmd/internal/obj numbering) to |
Brad Fitzpatrick | 5fea2cc | 2016-03-01 23:21:55 +0000 | [diff] [blame] | 5099 | // which v has been allocated. Panics if v is not assigned to a |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 5100 | // register. |
Josh Bleecher Snyder | e139549 | 2015-08-05 16:06:39 -0700 | [diff] [blame] | 5101 | // TODO: Make this panic again once it stops happening routinely. |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 5102 | func regnum(v *ssa.Value) int16 { |
Josh Bleecher Snyder | e139549 | 2015-08-05 16:06:39 -0700 | [diff] [blame] | 5103 | reg := v.Block.Func.RegAlloc[v.ID] |
| 5104 | if reg == nil { |
| 5105 | v.Unimplementedf("nil regnum for value: %s\n%s\n", v.LongString(), v.Block.Func) |
| 5106 | return 0 |
| 5107 | } |
| 5108 | return ssaRegToReg[reg.(*ssa.Register).Num] |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 5109 | } |
| 5110 | |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 5111 | // autoVar returns a *Node and int64 representing the auto variable and offset within it |
| 5112 | // where v should be spilled. |
| 5113 | func autoVar(v *ssa.Value) (*Node, int64) { |
| 5114 | loc := v.Block.Func.RegAlloc[v.ID].(ssa.LocalSlot) |
Keith Randall | 9094e3a | 2016-01-04 13:34:54 -0800 | [diff] [blame] | 5115 | if v.Type.Size() > loc.Type.Size() { |
| 5116 | v.Fatalf("spill/restore type %s doesn't fit in slot type %s", v.Type, loc.Type) |
| 5117 | } |
Keith Randall | 02f4d0a | 2015-11-02 08:10:26 -0800 | [diff] [blame] | 5118 | return loc.N.(*Node), loc.Off |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 5119 | } |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 5120 | |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 5121 | // fieldIdx finds the index of the field referred to by the ODOT node n. |
| 5122 | func fieldIdx(n *Node) int64 { |
| 5123 | t := n.Left.Type |
| 5124 | f := n.Right |
| 5125 | if t.Etype != TSTRUCT { |
| 5126 | panic("ODOT's LHS is not a struct") |
| 5127 | } |
| 5128 | |
| 5129 | var i int64 |
Matthew Dempsky | fe5b4a6 | 2016-03-10 01:50:58 -0800 | [diff] [blame] | 5130 | for t1, it := IterFields(t); t1 != nil; t1 = it.Next() { |
Keith Randall | a734bbc | 2016-01-11 21:05:33 -0800 | [diff] [blame] | 5131 | if t1.Etype != TFIELD { |
| 5132 | panic("non-TFIELD in TSTRUCT") |
| 5133 | } |
| 5134 | if t1.Sym != f.Sym { |
| 5135 | i++ |
| 5136 | continue |
| 5137 | } |
| 5138 | if t1.Width != n.Xoffset { |
| 5139 | panic("field offset doesn't match") |
| 5140 | } |
| 5141 | return i |
| 5142 | } |
| 5143 | panic(fmt.Sprintf("can't find field in expr %s\n", n)) |
| 5144 | |
| 5145 | // TODO: keep the result of this fucntion somewhere in the ODOT Node |
| 5146 | // so we don't have to recompute it each time we need it. |
| 5147 | } |
| 5148 | |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 5149 | // ssaExport exports a bunch of compiler services for the ssa backend. |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 5150 | type ssaExport struct { |
| 5151 | log bool |
| 5152 | unimplemented bool |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 5153 | mustImplement bool |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 5154 | } |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 5155 | |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 5156 | func (s *ssaExport) TypeBool() ssa.Type { return Types[TBOOL] } |
| 5157 | func (s *ssaExport) TypeInt8() ssa.Type { return Types[TINT8] } |
| 5158 | func (s *ssaExport) TypeInt16() ssa.Type { return Types[TINT16] } |
| 5159 | func (s *ssaExport) TypeInt32() ssa.Type { return Types[TINT32] } |
| 5160 | func (s *ssaExport) TypeInt64() ssa.Type { return Types[TINT64] } |
| 5161 | func (s *ssaExport) TypeUInt8() ssa.Type { return Types[TUINT8] } |
| 5162 | func (s *ssaExport) TypeUInt16() ssa.Type { return Types[TUINT16] } |
| 5163 | func (s *ssaExport) TypeUInt32() ssa.Type { return Types[TUINT32] } |
| 5164 | func (s *ssaExport) TypeUInt64() ssa.Type { return Types[TUINT64] } |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 5165 | func (s *ssaExport) TypeFloat32() ssa.Type { return Types[TFLOAT32] } |
| 5166 | func (s *ssaExport) TypeFloat64() ssa.Type { return Types[TFLOAT64] } |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 5167 | func (s *ssaExport) TypeInt() ssa.Type { return Types[TINT] } |
| 5168 | func (s *ssaExport) TypeUintptr() ssa.Type { return Types[TUINTPTR] } |
| 5169 | func (s *ssaExport) TypeString() ssa.Type { return Types[TSTRING] } |
| 5170 | func (s *ssaExport) TypeBytePtr() ssa.Type { return Ptrto(Types[TUINT8]) } |
| 5171 | |
Josh Bleecher Snyder | 8d31df18a | 2015-07-24 11:28:12 -0700 | [diff] [blame] | 5172 | // StringData returns a symbol (a *Sym wrapped in an interface) which |
| 5173 | // is the data component of a global string constant containing s. |
| 5174 | func (*ssaExport) StringData(s string) interface{} { |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 5175 | // TODO: is idealstring correct? It might not matter... |
Josh Bleecher Snyder | 8d31df18a | 2015-07-24 11:28:12 -0700 | [diff] [blame] | 5176 | _, data := stringsym(s) |
| 5177 | return &ssa.ExternSymbol{Typ: idealstring, Sym: data} |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 5178 | } |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 5179 | |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 5180 | func (e *ssaExport) Auto(t ssa.Type) ssa.GCNode { |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 5181 | n := temp(t.(*Type)) // Note: adds new auto to Curfn.Func.Dcl list |
| 5182 | e.mustImplement = true // This modifies the input to SSA, so we want to make sure we succeed from here! |
| 5183 | return n |
| 5184 | } |
| 5185 | |
Keith Randall | 7d61246 | 2015-10-22 13:07:38 -0700 | [diff] [blame] | 5186 | func (e *ssaExport) CanSSA(t ssa.Type) bool { |
Keith Randall | 37590bd | 2015-09-18 22:58:10 -0700 | [diff] [blame] | 5187 | return canSSAType(t.(*Type)) |
| 5188 | } |
| 5189 | |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 5190 | func (e *ssaExport) Line(line int32) string { |
Robert Griesemer | 2faf5bc | 2016-03-02 11:30:29 -0800 | [diff] [blame] | 5191 | return linestr(line) |
Keith Randall | b5c5efd | 2016-01-14 16:02:23 -0800 | [diff] [blame] | 5192 | } |
| 5193 | |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 5194 | // Log logs a message from the compiler. |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 5195 | func (e *ssaExport) Logf(msg string, args ...interface{}) { |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 5196 | // If e was marked as unimplemented, anything could happen. Ignore. |
| 5197 | if e.log && !e.unimplemented { |
| 5198 | fmt.Printf(msg, args...) |
| 5199 | } |
| 5200 | } |
| 5201 | |
David Chase | 88b230e | 2016-01-29 14:44:15 -0500 | [diff] [blame] | 5202 | func (e *ssaExport) Log() bool { |
| 5203 | return e.log |
| 5204 | } |
| 5205 | |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 5206 | // Fatal reports a compiler error and exits. |
Keith Randall | da8af47 | 2016-01-13 11:14:57 -0800 | [diff] [blame] | 5207 | func (e *ssaExport) Fatalf(line int32, msg string, args ...interface{}) { |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 5208 | // If e was marked as unimplemented, anything could happen. Ignore. |
| 5209 | if !e.unimplemented { |
Keith Randall | da8af47 | 2016-01-13 11:14:57 -0800 | [diff] [blame] | 5210 | lineno = line |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 5211 | Fatalf(msg, args...) |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 5212 | } |
| 5213 | } |
| 5214 | |
| 5215 | // Unimplemented reports that the function cannot be compiled. |
| 5216 | // It will be removed once SSA work is complete. |
Keith Randall | da8af47 | 2016-01-13 11:14:57 -0800 | [diff] [blame] | 5217 | func (e *ssaExport) Unimplementedf(line int32, msg string, args ...interface{}) { |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 5218 | if e.mustImplement { |
Keith Randall | da8af47 | 2016-01-13 11:14:57 -0800 | [diff] [blame] | 5219 | lineno = line |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 5220 | Fatalf(msg, args...) |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 5221 | } |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 5222 | const alwaysLog = false // enable to calculate top unimplemented features |
| 5223 | if !e.unimplemented && (e.log || alwaysLog) { |
| 5224 | // first implementation failure, print explanation |
| 5225 | fmt.Printf("SSA unimplemented: "+msg+"\n", args...) |
| 5226 | } |
| 5227 | e.unimplemented = true |
| 5228 | } |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 5229 | |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 5230 | // Warnl reports a "warning", which is usually flag-triggered |
| 5231 | // logging output for the benefit of tests. |
Todd Neal | 98b88de | 2016-03-13 23:04:31 -0500 | [diff] [blame^] | 5232 | func (e *ssaExport) Warnl(line int32, fmt_ string, args ...interface{}) { |
| 5233 | Warnl(line, fmt_, args...) |
David Chase | 729abfa | 2015-10-26 17:34:06 -0400 | [diff] [blame] | 5234 | } |
| 5235 | |
| 5236 | func (e *ssaExport) Debug_checknil() bool { |
| 5237 | return Debug_checknil != 0 |
| 5238 | } |
| 5239 | |
Keith Randall | c24681a | 2015-10-22 14:22:38 -0700 | [diff] [blame] | 5240 | func (n *Node) Typ() ssa.Type { |
| 5241 | return n.Type |
| 5242 | } |