Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1 | // Copyright 2015 The Go Authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style |
| 3 | // license that can be found in the LICENSE file. |
| 4 | |
| 5 | package gc |
| 6 | |
| 7 | import ( |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 8 | "bytes" |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 9 | "fmt" |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 10 | "html" |
Todd Neal | 19447a6 | 2015-09-04 06:33:56 -0500 | [diff] [blame] | 11 | "math" |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 12 | "os" |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 13 | "strings" |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 14 | |
Keith Randall | 067e8df | 2015-05-28 13:49:20 -0700 | [diff] [blame] | 15 | "cmd/compile/internal/ssa" |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 16 | "cmd/internal/obj" |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 17 | "cmd/internal/obj/x86" |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 18 | ) |
| 19 | |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 20 | // buildssa builds an SSA function |
| 21 | // and reports whether it should be used. |
| 22 | // Once the SSA implementation is complete, |
| 23 | // it will never return nil, and the bool can be removed. |
| 24 | func buildssa(fn *Node) (ssafn *ssa.Func, usessa bool) { |
| 25 | name := fn.Func.Nname.Sym.Name |
Josh Bleecher Snyder | 9495e45 | 2015-08-04 11:13:56 -0700 | [diff] [blame] | 26 | usessa = strings.HasSuffix(name, "_ssa") || name == os.Getenv("GOSSAFUNC") |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 27 | |
| 28 | if usessa { |
Josh Bleecher Snyder | e0ac5c5 | 2015-07-20 18:42:45 -0700 | [diff] [blame] | 29 | fmt.Println("generating SSA for", name) |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 30 | dumplist("buildssa-enter", fn.Func.Enter) |
| 31 | dumplist("buildssa-body", fn.Nbody) |
| 32 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 33 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 34 | var s state |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 35 | s.pushLine(fn.Lineno) |
| 36 | defer s.popLine() |
| 37 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 38 | // TODO(khr): build config just once at the start of the compiler binary |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 39 | |
| 40 | var e ssaExport |
| 41 | e.log = usessa |
| 42 | s.config = ssa.NewConfig(Thearch.Thestring, &e) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 43 | s.f = s.config.NewFunc() |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 44 | s.f.Name = name |
| 45 | |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 46 | if name == os.Getenv("GOSSAFUNC") { |
| 47 | // TODO: tempfile? it is handy to have the location |
| 48 | // of this file be stable, so you can just reload in the browser. |
| 49 | s.config.HTML = ssa.NewHTMLWriter("ssa.html", &s, name) |
| 50 | // TODO: generate and print a mapping from nodes to values and blocks |
| 51 | } |
| 52 | defer func() { |
| 53 | if !usessa { |
| 54 | s.config.HTML.Close() |
| 55 | } |
| 56 | }() |
| 57 | |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 58 | // If SSA support for the function is incomplete, |
| 59 | // assume that any panics are due to violated |
| 60 | // invariants. Swallow them silently. |
| 61 | defer func() { |
| 62 | if err := recover(); err != nil { |
| 63 | if !e.unimplemented { |
| 64 | panic(err) |
| 65 | } |
| 66 | } |
| 67 | }() |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 68 | |
| 69 | // We construct SSA using an algorithm similar to |
| 70 | // Brau, Buchwald, Hack, Leißa, Mallon, and Zwinkau |
| 71 | // http://pp.info.uni-karlsruhe.de/uploads/publikationen/braun13cc.pdf |
| 72 | // TODO: check this comment |
| 73 | |
| 74 | // Allocate starting block |
| 75 | s.f.Entry = s.f.NewBlock(ssa.BlockPlain) |
| 76 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 77 | // Allocate starting values |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 78 | s.vars = map[*Node]*ssa.Value{} |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 79 | s.labels = map[string]*ssaLabel{} |
| 80 | s.labeledNodes = map[*Node]*ssaLabel{} |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 81 | s.startmem = s.entryNewValue0(ssa.OpArg, ssa.TypeMem) |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 82 | s.sp = s.entryNewValue0(ssa.OpSP, Types[TUINTPTR]) // TODO: use generic pointer type (unsafe.Pointer?) instead |
| 83 | s.sb = s.entryNewValue0(ssa.OpSB, Types[TUINTPTR]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 84 | |
| 85 | // Generate addresses of local declarations |
| 86 | s.decladdrs = map[*Node]*ssa.Value{} |
| 87 | for d := fn.Func.Dcl; d != nil; d = d.Next { |
| 88 | n := d.N |
| 89 | switch n.Class { |
| 90 | case PPARAM, PPARAMOUT: |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 91 | aux := &ssa.ArgSymbol{Typ: n.Type, Node: n} |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 92 | s.decladdrs[n] = s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp) |
| 93 | case PAUTO: |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 94 | // processed at each use, to prevent Addr coming |
| 95 | // before the decl. |
Keith Randall | c3eb1a7 | 2015-09-06 13:42:26 -0700 | [diff] [blame] | 96 | case PFUNC: |
| 97 | // local function - already handled by frontend |
Daniel Morsing | be2a3e2 | 2015-07-01 20:37:25 +0100 | [diff] [blame] | 98 | default: |
| 99 | str := "" |
| 100 | if n.Class&PHEAP != 0 { |
| 101 | str = ",heap" |
| 102 | } |
Josh Bleecher Snyder | 5844603 | 2015-08-23 20:29:43 -0700 | [diff] [blame] | 103 | s.Unimplementedf("local variable with class %s%s unimplemented", classnames[n.Class&^PHEAP], str) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 104 | } |
| 105 | } |
| 106 | // nodfp is a special argument which is the function's FP. |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 107 | aux := &ssa.ArgSymbol{Typ: Types[TUINTPTR], Node: nodfp} |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 108 | s.decladdrs[nodfp] = s.entryNewValue1A(ssa.OpAddr, Types[TUINTPTR], aux, s.sp) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 109 | |
| 110 | // Convert the AST-based IR to the SSA-based IR |
| 111 | s.startBlock(s.f.Entry) |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 112 | s.stmtList(fn.Func.Enter) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 113 | s.stmtList(fn.Nbody) |
| 114 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 115 | // fallthrough to exit |
Keith Randall | a7cfc759 | 2015-09-08 16:04:37 -0700 | [diff] [blame] | 116 | if s.curBlock != nil { |
| 117 | m := s.mem() |
| 118 | b := s.endBlock() |
Keith Randall | d9f2caf | 2015-09-03 14:28:52 -0700 | [diff] [blame] | 119 | b.Kind = ssa.BlockRet |
Keith Randall | a7cfc759 | 2015-09-08 16:04:37 -0700 | [diff] [blame] | 120 | b.Control = m |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 121 | } |
| 122 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 123 | // Check that we used all labels |
| 124 | for name, lab := range s.labels { |
| 125 | if !lab.used() && !lab.reported { |
| 126 | yyerrorl(int(lab.defNode.Lineno), "label %v defined and not used", name) |
| 127 | lab.reported = true |
| 128 | } |
| 129 | if lab.used() && !lab.defined() && !lab.reported { |
| 130 | yyerrorl(int(lab.useNode.Lineno), "label %v not defined", name) |
| 131 | lab.reported = true |
| 132 | } |
| 133 | } |
| 134 | |
| 135 | // Check any forward gotos. Non-forward gotos have already been checked. |
| 136 | for _, n := range s.fwdGotos { |
| 137 | lab := s.labels[n.Left.Sym.Name] |
| 138 | // If the label is undefined, we have already have printed an error. |
| 139 | if lab.defined() { |
| 140 | s.checkgoto(n, lab.defNode) |
| 141 | } |
| 142 | } |
| 143 | |
| 144 | if nerrors > 0 { |
| 145 | return nil, false |
| 146 | } |
| 147 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 148 | // Link up variable uses to variable definitions |
| 149 | s.linkForwardReferences() |
| 150 | |
Josh Bleecher Snyder | 983bc8d | 2015-07-17 16:47:43 +0000 | [diff] [blame] | 151 | // Main call to ssa package to compile function |
| 152 | ssa.Compile(s.f) |
| 153 | |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 154 | // Calculate stats about what percentage of functions SSA handles. |
| 155 | if false { |
Josh Bleecher Snyder | 983bc8d | 2015-07-17 16:47:43 +0000 | [diff] [blame] | 156 | fmt.Printf("SSA implemented: %t\n", !e.unimplemented) |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 157 | } |
| 158 | |
| 159 | if e.unimplemented { |
| 160 | return nil, false |
| 161 | } |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 162 | |
| 163 | // TODO: enable codegen more broadly once the codegen stabilizes |
| 164 | // and runtime support is in (gc maps, write barriers, etc.) |
Josh Bleecher Snyder | 9495e45 | 2015-08-04 11:13:56 -0700 | [diff] [blame] | 165 | return s.f, usessa || localpkg.Name == os.Getenv("GOSSAPKG") |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 166 | } |
| 167 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 168 | type state struct { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 169 | // configuration (arch) information |
| 170 | config *ssa.Config |
| 171 | |
| 172 | // function we're building |
| 173 | f *ssa.Func |
| 174 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 175 | // labels and labeled control flow nodes (OFOR, OSWITCH, OSELECT) in f |
| 176 | labels map[string]*ssaLabel |
| 177 | labeledNodes map[*Node]*ssaLabel |
| 178 | |
| 179 | // gotos that jump forward; required for deferred checkgoto calls |
| 180 | fwdGotos []*Node |
| 181 | |
| 182 | // unlabeled break and continue statement tracking |
| 183 | breakTo *ssa.Block // current target for plain break statement |
| 184 | continueTo *ssa.Block // current target for plain continue statement |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 185 | |
| 186 | // current location where we're interpreting the AST |
| 187 | curBlock *ssa.Block |
| 188 | |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 189 | // variable assignments in the current block (map from variable symbol to ssa value) |
| 190 | // *Node is the unique identifier (an ONAME Node) for the variable. |
| 191 | vars map[*Node]*ssa.Value |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 192 | |
| 193 | // all defined variables at the end of each block. Indexed by block ID. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 194 | defvars []map[*Node]*ssa.Value |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 195 | |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 196 | // addresses of PPARAM and PPARAMOUT variables. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 197 | decladdrs map[*Node]*ssa.Value |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 198 | |
| 199 | // starting values. Memory, frame pointer, and stack pointer |
| 200 | startmem *ssa.Value |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 201 | sp *ssa.Value |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 202 | sb *ssa.Value |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 203 | |
| 204 | // line number stack. The current line number is top of stack |
| 205 | line []int32 |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 206 | } |
| 207 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 208 | type ssaLabel struct { |
| 209 | target *ssa.Block // block identified by this label |
| 210 | breakTarget *ssa.Block // block to break to in control flow node identified by this label |
| 211 | continueTarget *ssa.Block // block to continue to in control flow node identified by this label |
| 212 | defNode *Node // label definition Node (OLABEL) |
| 213 | // Label use Node (OGOTO, OBREAK, OCONTINUE). |
| 214 | // Used only for error detection and reporting. |
| 215 | // There might be multiple uses, but we only need to track one. |
| 216 | useNode *Node |
| 217 | reported bool // reported indicates whether an error has already been reported for this label |
| 218 | } |
| 219 | |
| 220 | // defined reports whether the label has a definition (OLABEL node). |
| 221 | func (l *ssaLabel) defined() bool { return l.defNode != nil } |
| 222 | |
| 223 | // used reports whether the label has a use (OGOTO, OBREAK, or OCONTINUE node). |
| 224 | func (l *ssaLabel) used() bool { return l.useNode != nil } |
| 225 | |
| 226 | // label returns the label associated with sym, creating it if necessary. |
| 227 | func (s *state) label(sym *Sym) *ssaLabel { |
| 228 | lab := s.labels[sym.Name] |
| 229 | if lab == nil { |
| 230 | lab = new(ssaLabel) |
| 231 | s.labels[sym.Name] = lab |
| 232 | } |
| 233 | return lab |
| 234 | } |
| 235 | |
Josh Bleecher Snyder | 1edf489 | 2015-07-03 20:29:11 -0700 | [diff] [blame] | 236 | func (s *state) Logf(msg string, args ...interface{}) { s.config.Logf(msg, args...) } |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 237 | func (s *state) Fatalf(msg string, args ...interface{}) { s.config.Fatalf(msg, args...) } |
| 238 | func (s *state) Unimplementedf(msg string, args ...interface{}) { s.config.Unimplementedf(msg, args...) } |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 239 | |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 240 | // dummy node for the memory variable |
| 241 | var memvar = Node{Op: ONAME, Sym: &Sym{Name: "mem"}} |
| 242 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 243 | // startBlock sets the current block we're generating code in to b. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 244 | func (s *state) startBlock(b *ssa.Block) { |
| 245 | if s.curBlock != nil { |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 246 | s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 247 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 248 | s.curBlock = b |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 249 | s.vars = map[*Node]*ssa.Value{} |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 250 | } |
| 251 | |
| 252 | // endBlock marks the end of generating code for the current block. |
| 253 | // Returns the (former) current block. Returns nil if there is no current |
| 254 | // block, i.e. if no code flows to the current execution point. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 255 | func (s *state) endBlock() *ssa.Block { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 256 | b := s.curBlock |
| 257 | if b == nil { |
| 258 | return nil |
| 259 | } |
| 260 | for len(s.defvars) <= int(b.ID) { |
| 261 | s.defvars = append(s.defvars, nil) |
| 262 | } |
| 263 | s.defvars[b.ID] = s.vars |
| 264 | s.curBlock = nil |
| 265 | s.vars = nil |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 266 | b.Line = s.peekLine() |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 267 | return b |
| 268 | } |
| 269 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 270 | // pushLine pushes a line number on the line number stack. |
| 271 | func (s *state) pushLine(line int32) { |
| 272 | s.line = append(s.line, line) |
| 273 | } |
| 274 | |
| 275 | // popLine pops the top of the line number stack. |
| 276 | func (s *state) popLine() { |
| 277 | s.line = s.line[:len(s.line)-1] |
| 278 | } |
| 279 | |
| 280 | // peekLine peek the top of the line number stack. |
| 281 | func (s *state) peekLine() int32 { |
| 282 | return s.line[len(s.line)-1] |
| 283 | } |
| 284 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 285 | func (s *state) Error(msg string, args ...interface{}) { |
| 286 | yyerrorl(int(s.peekLine()), msg, args...) |
| 287 | } |
| 288 | |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 289 | // newValue0 adds a new value with no arguments to the current block. |
| 290 | func (s *state) newValue0(op ssa.Op, t ssa.Type) *ssa.Value { |
| 291 | return s.curBlock.NewValue0(s.peekLine(), op, t) |
| 292 | } |
| 293 | |
| 294 | // newValue0A adds a new value with no arguments and an aux value to the current block. |
| 295 | func (s *state) newValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value { |
| 296 | return s.curBlock.NewValue0A(s.peekLine(), op, t, aux) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 297 | } |
| 298 | |
Todd Neal | 991036a | 2015-09-03 18:24:22 -0500 | [diff] [blame] | 299 | // newValue0I adds a new value with no arguments and an auxint value to the current block. |
| 300 | func (s *state) newValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value { |
| 301 | return s.curBlock.NewValue0I(s.peekLine(), op, t, auxint) |
| 302 | } |
| 303 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 304 | // newValue1 adds a new value with one argument to the current block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 305 | func (s *state) newValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value { |
| 306 | return s.curBlock.NewValue1(s.peekLine(), op, t, arg) |
| 307 | } |
| 308 | |
| 309 | // newValue1A adds a new value with one argument and an aux value to the current block. |
| 310 | func (s *state) newValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value { |
| 311 | return s.curBlock.NewValue1A(s.peekLine(), op, t, aux, arg) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 312 | } |
| 313 | |
Keith Randall | cd7e059 | 2015-07-15 21:33:49 -0700 | [diff] [blame] | 314 | // newValue1I adds a new value with one argument and an auxint value to the current block. |
| 315 | func (s *state) newValue1I(op ssa.Op, t ssa.Type, aux int64, arg *ssa.Value) *ssa.Value { |
| 316 | return s.curBlock.NewValue1I(s.peekLine(), op, t, aux, arg) |
| 317 | } |
| 318 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 319 | // newValue2 adds a new value with two arguments to the current block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 320 | func (s *state) newValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value { |
| 321 | return s.curBlock.NewValue2(s.peekLine(), op, t, arg0, arg1) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 322 | } |
| 323 | |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 324 | // newValue2I adds a new value with two arguments and an auxint value to the current block. |
| 325 | func (s *state) newValue2I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value { |
| 326 | return s.curBlock.NewValue2I(s.peekLine(), op, t, aux, arg0, arg1) |
| 327 | } |
| 328 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 329 | // newValue3 adds a new value with three arguments to the current block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 330 | func (s *state) newValue3(op ssa.Op, t ssa.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value { |
| 331 | return s.curBlock.NewValue3(s.peekLine(), op, t, arg0, arg1, arg2) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 332 | } |
| 333 | |
Keith Randall | d4cc51d | 2015-08-14 21:47:20 -0700 | [diff] [blame] | 334 | // newValue3I adds a new value with three arguments and an auxint value to the current block. |
| 335 | func (s *state) newValue3I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value { |
| 336 | return s.curBlock.NewValue3I(s.peekLine(), op, t, aux, arg0, arg1, arg2) |
| 337 | } |
| 338 | |
Todd Neal | 991036a | 2015-09-03 18:24:22 -0500 | [diff] [blame] | 339 | // entryNewValue0 adds a new value with no arguments to the entry block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 340 | func (s *state) entryNewValue0(op ssa.Op, t ssa.Type) *ssa.Value { |
| 341 | return s.f.Entry.NewValue0(s.peekLine(), op, t) |
| 342 | } |
| 343 | |
Todd Neal | 991036a | 2015-09-03 18:24:22 -0500 | [diff] [blame] | 344 | // entryNewValue0A adds a new value with no arguments and an aux value to the entry block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 345 | func (s *state) entryNewValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value { |
| 346 | return s.f.Entry.NewValue0A(s.peekLine(), op, t, aux) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 347 | } |
| 348 | |
Todd Neal | 991036a | 2015-09-03 18:24:22 -0500 | [diff] [blame] | 349 | // entryNewValue0I adds a new value with no arguments and an auxint value to the entry block. |
| 350 | func (s *state) entryNewValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value { |
| 351 | return s.f.Entry.NewValue0I(s.peekLine(), op, t, auxint) |
| 352 | } |
| 353 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 354 | // entryNewValue1 adds a new value with one argument to the entry block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 355 | func (s *state) entryNewValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value { |
| 356 | return s.f.Entry.NewValue1(s.peekLine(), op, t, arg) |
| 357 | } |
| 358 | |
| 359 | // entryNewValue1 adds a new value with one argument and an auxint value to the entry block. |
| 360 | func (s *state) entryNewValue1I(op ssa.Op, t ssa.Type, auxint int64, arg *ssa.Value) *ssa.Value { |
| 361 | return s.f.Entry.NewValue1I(s.peekLine(), op, t, auxint, arg) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 362 | } |
| 363 | |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 364 | // entryNewValue1A adds a new value with one argument and an aux value to the entry block. |
| 365 | func (s *state) entryNewValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value { |
| 366 | return s.f.Entry.NewValue1A(s.peekLine(), op, t, aux, arg) |
| 367 | } |
| 368 | |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 369 | // entryNewValue2 adds a new value with two arguments to the entry block. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 370 | func (s *state) entryNewValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value { |
| 371 | return s.f.Entry.NewValue2(s.peekLine(), op, t, arg0, arg1) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 372 | } |
| 373 | |
Josh Bleecher Snyder | cea4414 | 2015-09-08 16:52:25 -0700 | [diff] [blame] | 374 | // const* routines add a new const value to the entry block. |
| 375 | func (s *state) constBool(c bool) *ssa.Value { |
| 376 | return s.f.ConstBool(s.peekLine(), Types[TBOOL], c) |
| 377 | } |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 378 | func (s *state) constInt8(t ssa.Type, c int8) *ssa.Value { |
| 379 | return s.f.ConstInt8(s.peekLine(), t, c) |
| 380 | } |
| 381 | func (s *state) constInt16(t ssa.Type, c int16) *ssa.Value { |
| 382 | return s.f.ConstInt16(s.peekLine(), t, c) |
| 383 | } |
| 384 | func (s *state) constInt32(t ssa.Type, c int32) *ssa.Value { |
| 385 | return s.f.ConstInt32(s.peekLine(), t, c) |
| 386 | } |
| 387 | func (s *state) constInt64(t ssa.Type, c int64) *ssa.Value { |
| 388 | return s.f.ConstInt64(s.peekLine(), t, c) |
| 389 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 390 | func (s *state) constFloat32(t ssa.Type, c float64) *ssa.Value { |
| 391 | return s.f.ConstFloat32(s.peekLine(), t, c) |
| 392 | } |
| 393 | func (s *state) constFloat64(t ssa.Type, c float64) *ssa.Value { |
| 394 | return s.f.ConstFloat64(s.peekLine(), t, c) |
| 395 | } |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 396 | func (s *state) constIntPtr(t ssa.Type, c int64) *ssa.Value { |
| 397 | if s.config.PtrSize == 4 && int64(int32(c)) != c { |
| 398 | s.Fatalf("pointer constant too big %d", c) |
| 399 | } |
| 400 | return s.f.ConstIntPtr(s.peekLine(), t, c) |
| 401 | } |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 402 | func (s *state) constInt(t ssa.Type, c int64) *ssa.Value { |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 403 | if s.config.IntSize == 8 { |
| 404 | return s.constInt64(t, c) |
| 405 | } |
| 406 | if int64(int32(c)) != c { |
| 407 | s.Fatalf("integer constant too big %d", c) |
| 408 | } |
| 409 | return s.constInt32(t, int32(c)) |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 410 | } |
| 411 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 412 | // ssaStmtList converts the statement n to SSA and adds it to s. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 413 | func (s *state) stmtList(l *NodeList) { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 414 | for ; l != nil; l = l.Next { |
| 415 | s.stmt(l.N) |
| 416 | } |
| 417 | } |
| 418 | |
| 419 | // ssaStmt converts the statement n to SSA and adds it to s. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 420 | func (s *state) stmt(n *Node) { |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 421 | s.pushLine(n.Lineno) |
| 422 | defer s.popLine() |
| 423 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 424 | // If s.curBlock is nil, then we're about to generate dead code. |
| 425 | // We can't just short-circuit here, though, |
| 426 | // because we check labels and gotos as part of SSA generation. |
| 427 | // Provide a block for the dead code so that we don't have |
| 428 | // to add special cases everywhere else. |
| 429 | if s.curBlock == nil { |
| 430 | dead := s.f.NewBlock(ssa.BlockPlain) |
| 431 | s.startBlock(dead) |
| 432 | } |
| 433 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 434 | s.stmtList(n.Ninit) |
| 435 | switch n.Op { |
| 436 | |
| 437 | case OBLOCK: |
| 438 | s.stmtList(n.List) |
| 439 | |
Josh Bleecher Snyder | 2574e4a | 2015-07-16 13:25:36 -0600 | [diff] [blame] | 440 | // No-ops |
Todd Neal | 67e43c1 | 2015-08-28 21:19:40 -0500 | [diff] [blame] | 441 | case OEMPTY, ODCLCONST, ODCLTYPE, OFALL: |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 442 | |
Josh Bleecher Snyder | 2574e4a | 2015-07-16 13:25:36 -0600 | [diff] [blame] | 443 | // Expression statements |
| 444 | case OCALLFUNC, OCALLMETH, OCALLINTER: |
| 445 | s.expr(n) |
| 446 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 447 | case ODCL: |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 448 | if n.Left.Class&PHEAP == 0 { |
| 449 | return |
| 450 | } |
| 451 | if compiling_runtime != 0 { |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 452 | Fatalf("%v escapes to heap, not allowed in runtime.", n) |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 453 | } |
| 454 | |
| 455 | // TODO: the old pass hides the details of PHEAP |
| 456 | // variables behind ONAME nodes. Figure out if it's better |
| 457 | // to rewrite the tree and make the heapaddr construct explicit |
| 458 | // or to keep this detail hidden behind the scenes. |
| 459 | palloc := prealloc[n.Left] |
| 460 | if palloc == nil { |
| 461 | palloc = callnew(n.Left.Type) |
| 462 | prealloc[n.Left] = palloc |
| 463 | } |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 464 | r := s.expr(palloc) |
| 465 | s.assign(n.Left.Name.Heapaddr, r, false) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 466 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 467 | case OLABEL: |
| 468 | sym := n.Left.Sym |
| 469 | |
| 470 | if isblanksym(sym) { |
Keith Randall | 7e4c06d | 2015-07-12 11:52:09 -0700 | [diff] [blame] | 471 | // Empty identifier is valid but useless. |
| 472 | // See issues 11589, 11593. |
| 473 | return |
| 474 | } |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 475 | |
| 476 | lab := s.label(sym) |
| 477 | |
| 478 | // Associate label with its control flow node, if any |
| 479 | if ctl := n.Name.Defn; ctl != nil { |
| 480 | switch ctl.Op { |
| 481 | case OFOR, OSWITCH, OSELECT: |
| 482 | s.labeledNodes[ctl] = lab |
| 483 | } |
Keith Randall | 0ad9c8c | 2015-06-12 16:24:33 -0700 | [diff] [blame] | 484 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 485 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 486 | if !lab.defined() { |
| 487 | lab.defNode = n |
| 488 | } else { |
| 489 | s.Error("label %v already defined at %v", sym, Ctxt.Line(int(lab.defNode.Lineno))) |
| 490 | lab.reported = true |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 491 | } |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 492 | // The label might already have a target block via a goto. |
| 493 | if lab.target == nil { |
| 494 | lab.target = s.f.NewBlock(ssa.BlockPlain) |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 495 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 496 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 497 | // go to that label (we pretend "label:" is preceded by "goto label") |
| 498 | b := s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 499 | b.AddEdgeTo(lab.target) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 500 | s.startBlock(lab.target) |
| 501 | |
| 502 | case OGOTO: |
| 503 | sym := n.Left.Sym |
| 504 | |
| 505 | lab := s.label(sym) |
| 506 | if lab.target == nil { |
| 507 | lab.target = s.f.NewBlock(ssa.BlockPlain) |
| 508 | } |
| 509 | if !lab.used() { |
| 510 | lab.useNode = n |
| 511 | } |
| 512 | |
| 513 | if lab.defined() { |
| 514 | s.checkgoto(n, lab.defNode) |
| 515 | } else { |
| 516 | s.fwdGotos = append(s.fwdGotos, n) |
| 517 | } |
| 518 | |
| 519 | b := s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 520 | b.AddEdgeTo(lab.target) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 521 | |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 522 | case OAS, OASWB: |
Josh Bleecher Snyder | 6b41665 | 2015-07-28 10:56:39 -0700 | [diff] [blame] | 523 | // Check whether we can generate static data rather than code. |
| 524 | // If so, ignore n and defer data generation until codegen. |
| 525 | // Failure to do this causes writes to readonly symbols. |
| 526 | if gen_as_init(n, true) { |
| 527 | var data []*Node |
| 528 | if s.f.StaticData != nil { |
| 529 | data = s.f.StaticData.([]*Node) |
| 530 | } |
| 531 | s.f.StaticData = append(data, n) |
| 532 | return |
| 533 | } |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 534 | var r *ssa.Value |
| 535 | if n.Right != nil { |
| 536 | r = s.expr(n.Right) |
| 537 | } |
| 538 | s.assign(n.Left, r, n.Op == OASWB) |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 539 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 540 | case OIF: |
Keith Randall | e707fbe | 2015-06-11 10:20:39 -0700 | [diff] [blame] | 541 | cond := s.expr(n.Left) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 542 | b := s.endBlock() |
| 543 | b.Kind = ssa.BlockIf |
| 544 | b.Control = cond |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 545 | b.Likely = ssa.BranchPrediction(n.Likely) // gc and ssa both use -1/0/+1 for likeliness |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 546 | |
| 547 | bThen := s.f.NewBlock(ssa.BlockPlain) |
| 548 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
| 549 | var bElse *ssa.Block |
| 550 | |
Keith Randall | e707fbe | 2015-06-11 10:20:39 -0700 | [diff] [blame] | 551 | if n.Rlist == nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 552 | b.AddEdgeTo(bThen) |
| 553 | b.AddEdgeTo(bEnd) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 554 | } else { |
| 555 | bElse = s.f.NewBlock(ssa.BlockPlain) |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 556 | b.AddEdgeTo(bThen) |
| 557 | b.AddEdgeTo(bElse) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 558 | } |
| 559 | |
| 560 | s.startBlock(bThen) |
| 561 | s.stmtList(n.Nbody) |
Josh Bleecher Snyder | e0ac5c5 | 2015-07-20 18:42:45 -0700 | [diff] [blame] | 562 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 563 | b.AddEdgeTo(bEnd) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 564 | } |
| 565 | |
Keith Randall | e707fbe | 2015-06-11 10:20:39 -0700 | [diff] [blame] | 566 | if n.Rlist != nil { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 567 | s.startBlock(bElse) |
Keith Randall | e707fbe | 2015-06-11 10:20:39 -0700 | [diff] [blame] | 568 | s.stmtList(n.Rlist) |
Josh Bleecher Snyder | e0ac5c5 | 2015-07-20 18:42:45 -0700 | [diff] [blame] | 569 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 570 | b.AddEdgeTo(bEnd) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 571 | } |
| 572 | } |
| 573 | s.startBlock(bEnd) |
| 574 | |
| 575 | case ORETURN: |
| 576 | s.stmtList(n.List) |
Keith Randall | a7cfc759 | 2015-09-08 16:04:37 -0700 | [diff] [blame] | 577 | m := s.mem() |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 578 | b := s.endBlock() |
Keith Randall | 10f38f5 | 2015-09-03 09:09:59 -0700 | [diff] [blame] | 579 | b.Kind = ssa.BlockRet |
Keith Randall | a7cfc759 | 2015-09-08 16:04:37 -0700 | [diff] [blame] | 580 | b.Control = m |
Keith Randall | 8a1f621 | 2015-09-08 21:28:44 -0700 | [diff] [blame] | 581 | case ORETJMP: |
| 582 | s.stmtList(n.List) |
| 583 | m := s.mem() |
| 584 | b := s.endBlock() |
| 585 | b.Kind = ssa.BlockRetJmp |
| 586 | b.Aux = n.Left.Sym |
| 587 | b.Control = m |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 588 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 589 | case OCONTINUE, OBREAK: |
| 590 | var op string |
| 591 | var to *ssa.Block |
| 592 | switch n.Op { |
| 593 | case OCONTINUE: |
| 594 | op = "continue" |
| 595 | to = s.continueTo |
| 596 | case OBREAK: |
| 597 | op = "break" |
| 598 | to = s.breakTo |
| 599 | } |
| 600 | if n.Left == nil { |
| 601 | // plain break/continue |
| 602 | if to == nil { |
| 603 | s.Error("%s is not in a loop", op) |
| 604 | return |
| 605 | } |
| 606 | // nothing to do; "to" is already the correct target |
| 607 | } else { |
| 608 | // labeled break/continue; look up the target |
| 609 | sym := n.Left.Sym |
| 610 | lab := s.label(sym) |
| 611 | if !lab.used() { |
| 612 | lab.useNode = n.Left |
| 613 | } |
| 614 | if !lab.defined() { |
| 615 | s.Error("%s label not defined: %v", op, sym) |
| 616 | lab.reported = true |
| 617 | return |
| 618 | } |
| 619 | switch n.Op { |
| 620 | case OCONTINUE: |
| 621 | to = lab.continueTarget |
| 622 | case OBREAK: |
| 623 | to = lab.breakTarget |
| 624 | } |
| 625 | if to == nil { |
| 626 | // Valid label but not usable with a break/continue here, e.g.: |
| 627 | // for { |
| 628 | // continue abc |
| 629 | // } |
| 630 | // abc: |
| 631 | // for {} |
| 632 | s.Error("invalid %s label %v", op, sym) |
| 633 | lab.reported = true |
| 634 | return |
| 635 | } |
| 636 | } |
| 637 | |
| 638 | b := s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 639 | b.AddEdgeTo(to) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 640 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 641 | case OFOR: |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 642 | // OFOR: for Ninit; Left; Right { Nbody } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 643 | bCond := s.f.NewBlock(ssa.BlockPlain) |
| 644 | bBody := s.f.NewBlock(ssa.BlockPlain) |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 645 | bIncr := s.f.NewBlock(ssa.BlockPlain) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 646 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
| 647 | |
| 648 | // first, jump to condition test |
| 649 | b := s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 650 | b.AddEdgeTo(bCond) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 651 | |
| 652 | // generate code to test condition |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 653 | s.startBlock(bCond) |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 654 | var cond *ssa.Value |
| 655 | if n.Left != nil { |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 656 | cond = s.expr(n.Left) |
| 657 | } else { |
Josh Bleecher Snyder | cea4414 | 2015-09-08 16:52:25 -0700 | [diff] [blame] | 658 | cond = s.constBool(true) |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 659 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 660 | b = s.endBlock() |
| 661 | b.Kind = ssa.BlockIf |
| 662 | b.Control = cond |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 663 | b.Likely = ssa.BranchLikely |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 664 | b.AddEdgeTo(bBody) |
| 665 | b.AddEdgeTo(bEnd) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 666 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 667 | // set up for continue/break in body |
| 668 | prevContinue := s.continueTo |
| 669 | prevBreak := s.breakTo |
| 670 | s.continueTo = bIncr |
| 671 | s.breakTo = bEnd |
| 672 | lab := s.labeledNodes[n] |
| 673 | if lab != nil { |
| 674 | // labeled for loop |
| 675 | lab.continueTarget = bIncr |
| 676 | lab.breakTarget = bEnd |
| 677 | } |
| 678 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 679 | // generate body |
| 680 | s.startBlock(bBody) |
| 681 | s.stmtList(n.Nbody) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 682 | |
| 683 | // tear down continue/break |
| 684 | s.continueTo = prevContinue |
| 685 | s.breakTo = prevBreak |
| 686 | if lab != nil { |
| 687 | lab.continueTarget = nil |
| 688 | lab.breakTarget = nil |
| 689 | } |
| 690 | |
| 691 | // done with body, goto incr |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 692 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 693 | b.AddEdgeTo(bIncr) |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 694 | } |
| 695 | |
| 696 | // generate incr |
| 697 | s.startBlock(bIncr) |
Josh Bleecher Snyder | 46815b9 | 2015-06-24 17:48:22 -0700 | [diff] [blame] | 698 | if n.Right != nil { |
| 699 | s.stmt(n.Right) |
| 700 | } |
Josh Bleecher Snyder | 5173868 | 2015-07-06 15:29:39 -0700 | [diff] [blame] | 701 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 702 | b.AddEdgeTo(bCond) |
Josh Bleecher Snyder | 6c14059 | 2015-07-04 09:07:54 -0700 | [diff] [blame] | 703 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 704 | s.startBlock(bEnd) |
| 705 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 706 | case OSWITCH, OSELECT: |
| 707 | // These have been mostly rewritten by the front end into their Nbody fields. |
| 708 | // Our main task is to correctly hook up any break statements. |
| 709 | bEnd := s.f.NewBlock(ssa.BlockPlain) |
| 710 | |
| 711 | prevBreak := s.breakTo |
| 712 | s.breakTo = bEnd |
| 713 | lab := s.labeledNodes[n] |
| 714 | if lab != nil { |
| 715 | // labeled |
| 716 | lab.breakTarget = bEnd |
| 717 | } |
| 718 | |
| 719 | // generate body code |
| 720 | s.stmtList(n.Nbody) |
| 721 | |
| 722 | s.breakTo = prevBreak |
| 723 | if lab != nil { |
| 724 | lab.breakTarget = nil |
| 725 | } |
| 726 | |
| 727 | if b := s.endBlock(); b != nil { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 728 | b.AddEdgeTo(bEnd) |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 729 | } |
| 730 | s.startBlock(bEnd) |
| 731 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 732 | case OVARKILL: |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 733 | // Insert a varkill op to record that a variable is no longer live. |
| 734 | // We only care about liveness info at call sites, so putting the |
| 735 | // varkill in the store chain is enough to keep it correctly ordered |
| 736 | // with respect to call ops. |
| 737 | s.vars[&memvar] = s.newValue1A(ssa.OpVarKill, ssa.TypeMem, n.Left, s.mem()) |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 738 | |
| 739 | case OPROC, ODEFER: |
| 740 | call := n.Left |
| 741 | fn := call.Left |
| 742 | if call.Op != OCALLFUNC { |
| 743 | s.Unimplementedf("defer/go of %s", opnames[call.Op]) |
| 744 | } |
| 745 | |
Keith Randall | fd8c71b | 2015-09-08 21:37:37 -0700 | [diff] [blame] | 746 | // Run all argument assignments. The arg slots have already |
| 747 | // been offset by 2*widthptr. |
| 748 | s.stmtList(call.List) |
| 749 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 750 | // Write argsize and closure (args to Newproc/Deferproc) |
| 751 | argsize := s.constInt32(Types[TUINT32], int32(fn.Type.Argwid)) |
| 752 | s.vars[&memvar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, 4, s.sp, argsize, s.mem()) |
| 753 | closure := s.expr(fn) |
| 754 | addr := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(Types[TUINTPTR]), int64(Widthptr), s.sp) |
| 755 | s.vars[&memvar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, closure, s.mem()) |
| 756 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 757 | // Call deferproc or newproc |
| 758 | bNext := s.f.NewBlock(ssa.BlockPlain) |
| 759 | var op ssa.Op |
| 760 | switch n.Op { |
| 761 | case ODEFER: |
| 762 | op = ssa.OpDeferCall |
| 763 | case OPROC: |
| 764 | op = ssa.OpGoCall |
| 765 | } |
| 766 | r := s.newValue1(op, ssa.TypeMem, s.mem()) |
| 767 | r.AuxInt = fn.Type.Argwid + 2*int64(Widthptr) // total stack space used |
| 768 | s.vars[&memvar] = r |
| 769 | b := s.endBlock() |
| 770 | b.Kind = ssa.BlockCall |
| 771 | b.Control = r |
| 772 | b.AddEdgeTo(bNext) |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 773 | s.startBlock(bNext) |
| 774 | |
Keith Randall | 46ffb02 | 2015-09-12 14:06:44 -0700 | [diff] [blame^] | 775 | case OCHECKNIL: |
| 776 | p := s.expr(n.Left) |
| 777 | s.nilCheck(p) |
| 778 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 779 | default: |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 780 | s.Unimplementedf("unhandled stmt %s", opnames[n.Op]) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 781 | } |
| 782 | } |
| 783 | |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 784 | type opAndType struct { |
| 785 | op uint8 |
| 786 | etype uint8 |
| 787 | } |
| 788 | |
| 789 | var opToSSA = map[opAndType]ssa.Op{ |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 790 | opAndType{OADD, TINT8}: ssa.OpAdd8, |
| 791 | opAndType{OADD, TUINT8}: ssa.OpAdd8, |
| 792 | opAndType{OADD, TINT16}: ssa.OpAdd16, |
| 793 | opAndType{OADD, TUINT16}: ssa.OpAdd16, |
| 794 | opAndType{OADD, TINT32}: ssa.OpAdd32, |
| 795 | opAndType{OADD, TUINT32}: ssa.OpAdd32, |
| 796 | opAndType{OADD, TPTR32}: ssa.OpAdd32, |
| 797 | opAndType{OADD, TINT64}: ssa.OpAdd64, |
| 798 | opAndType{OADD, TUINT64}: ssa.OpAdd64, |
| 799 | opAndType{OADD, TPTR64}: ssa.OpAdd64, |
| 800 | opAndType{OADD, TFLOAT32}: ssa.OpAdd32F, |
| 801 | opAndType{OADD, TFLOAT64}: ssa.OpAdd64F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 802 | |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 803 | opAndType{OSUB, TINT8}: ssa.OpSub8, |
| 804 | opAndType{OSUB, TUINT8}: ssa.OpSub8, |
| 805 | opAndType{OSUB, TINT16}: ssa.OpSub16, |
| 806 | opAndType{OSUB, TUINT16}: ssa.OpSub16, |
| 807 | opAndType{OSUB, TINT32}: ssa.OpSub32, |
| 808 | opAndType{OSUB, TUINT32}: ssa.OpSub32, |
| 809 | opAndType{OSUB, TINT64}: ssa.OpSub64, |
| 810 | opAndType{OSUB, TUINT64}: ssa.OpSub64, |
| 811 | opAndType{OSUB, TFLOAT32}: ssa.OpSub32F, |
| 812 | opAndType{OSUB, TFLOAT64}: ssa.OpSub64F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 813 | |
Josh Bleecher Snyder | e61e7c9 | 2015-07-22 19:19:40 -0700 | [diff] [blame] | 814 | opAndType{ONOT, TBOOL}: ssa.OpNot, |
| 815 | |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 816 | opAndType{OMINUS, TINT8}: ssa.OpNeg8, |
| 817 | opAndType{OMINUS, TUINT8}: ssa.OpNeg8, |
| 818 | opAndType{OMINUS, TINT16}: ssa.OpNeg16, |
| 819 | opAndType{OMINUS, TUINT16}: ssa.OpNeg16, |
| 820 | opAndType{OMINUS, TINT32}: ssa.OpNeg32, |
| 821 | opAndType{OMINUS, TUINT32}: ssa.OpNeg32, |
| 822 | opAndType{OMINUS, TINT64}: ssa.OpNeg64, |
| 823 | opAndType{OMINUS, TUINT64}: ssa.OpNeg64, |
| 824 | opAndType{OMINUS, TFLOAT32}: ssa.OpNeg32F, |
| 825 | opAndType{OMINUS, TFLOAT64}: ssa.OpNeg64F, |
Alexandru Moșoi | 954d5ad | 2015-07-21 16:58:18 +0200 | [diff] [blame] | 826 | |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 827 | opAndType{OCOM, TINT8}: ssa.OpCom8, |
| 828 | opAndType{OCOM, TUINT8}: ssa.OpCom8, |
| 829 | opAndType{OCOM, TINT16}: ssa.OpCom16, |
| 830 | opAndType{OCOM, TUINT16}: ssa.OpCom16, |
| 831 | opAndType{OCOM, TINT32}: ssa.OpCom32, |
| 832 | opAndType{OCOM, TUINT32}: ssa.OpCom32, |
| 833 | opAndType{OCOM, TINT64}: ssa.OpCom64, |
| 834 | opAndType{OCOM, TUINT64}: ssa.OpCom64, |
| 835 | |
Josh Bleecher Snyder | fa5fe19 | 2015-09-06 19:24:59 -0700 | [diff] [blame] | 836 | opAndType{OIMAG, TCOMPLEX64}: ssa.OpComplexImag, |
| 837 | opAndType{OIMAG, TCOMPLEX128}: ssa.OpComplexImag, |
| 838 | opAndType{OREAL, TCOMPLEX64}: ssa.OpComplexReal, |
| 839 | opAndType{OREAL, TCOMPLEX128}: ssa.OpComplexReal, |
| 840 | |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 841 | opAndType{OMUL, TINT8}: ssa.OpMul8, |
| 842 | opAndType{OMUL, TUINT8}: ssa.OpMul8, |
| 843 | opAndType{OMUL, TINT16}: ssa.OpMul16, |
| 844 | opAndType{OMUL, TUINT16}: ssa.OpMul16, |
| 845 | opAndType{OMUL, TINT32}: ssa.OpMul32, |
| 846 | opAndType{OMUL, TUINT32}: ssa.OpMul32, |
| 847 | opAndType{OMUL, TINT64}: ssa.OpMul64, |
| 848 | opAndType{OMUL, TUINT64}: ssa.OpMul64, |
| 849 | opAndType{OMUL, TFLOAT32}: ssa.OpMul32F, |
| 850 | opAndType{OMUL, TFLOAT64}: ssa.OpMul64F, |
| 851 | |
| 852 | opAndType{ODIV, TFLOAT32}: ssa.OpDiv32F, |
| 853 | opAndType{ODIV, TFLOAT64}: ssa.OpDiv64F, |
Keith Randall | be1eb57 | 2015-07-22 13:46:15 -0700 | [diff] [blame] | 854 | |
Todd Neal | 67cbd5b | 2015-08-18 19:14:47 -0500 | [diff] [blame] | 855 | opAndType{OHMUL, TINT8}: ssa.OpHmul8, |
| 856 | opAndType{OHMUL, TUINT8}: ssa.OpHmul8u, |
| 857 | opAndType{OHMUL, TINT16}: ssa.OpHmul16, |
| 858 | opAndType{OHMUL, TUINT16}: ssa.OpHmul16u, |
| 859 | opAndType{OHMUL, TINT32}: ssa.OpHmul32, |
| 860 | opAndType{OHMUL, TUINT32}: ssa.OpHmul32u, |
| 861 | |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 862 | opAndType{ODIV, TINT8}: ssa.OpDiv8, |
| 863 | opAndType{ODIV, TUINT8}: ssa.OpDiv8u, |
| 864 | opAndType{ODIV, TINT16}: ssa.OpDiv16, |
| 865 | opAndType{ODIV, TUINT16}: ssa.OpDiv16u, |
| 866 | opAndType{ODIV, TINT32}: ssa.OpDiv32, |
| 867 | opAndType{ODIV, TUINT32}: ssa.OpDiv32u, |
| 868 | opAndType{ODIV, TINT64}: ssa.OpDiv64, |
| 869 | opAndType{ODIV, TUINT64}: ssa.OpDiv64u, |
| 870 | |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 871 | opAndType{OMOD, TINT8}: ssa.OpMod8, |
| 872 | opAndType{OMOD, TUINT8}: ssa.OpMod8u, |
| 873 | opAndType{OMOD, TINT16}: ssa.OpMod16, |
| 874 | opAndType{OMOD, TUINT16}: ssa.OpMod16u, |
| 875 | opAndType{OMOD, TINT32}: ssa.OpMod32, |
| 876 | opAndType{OMOD, TUINT32}: ssa.OpMod32u, |
| 877 | opAndType{OMOD, TINT64}: ssa.OpMod64, |
| 878 | opAndType{OMOD, TUINT64}: ssa.OpMod64u, |
| 879 | |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 880 | opAndType{OAND, TINT8}: ssa.OpAnd8, |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 881 | opAndType{OAND, TUINT8}: ssa.OpAnd8, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 882 | opAndType{OAND, TINT16}: ssa.OpAnd16, |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 883 | opAndType{OAND, TUINT16}: ssa.OpAnd16, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 884 | opAndType{OAND, TINT32}: ssa.OpAnd32, |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 885 | opAndType{OAND, TUINT32}: ssa.OpAnd32, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 886 | opAndType{OAND, TINT64}: ssa.OpAnd64, |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 887 | opAndType{OAND, TUINT64}: ssa.OpAnd64, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 888 | |
Alexandru Moșoi | 7402416 | 2015-07-29 17:52:25 +0200 | [diff] [blame] | 889 | opAndType{OOR, TINT8}: ssa.OpOr8, |
| 890 | opAndType{OOR, TUINT8}: ssa.OpOr8, |
| 891 | opAndType{OOR, TINT16}: ssa.OpOr16, |
| 892 | opAndType{OOR, TUINT16}: ssa.OpOr16, |
| 893 | opAndType{OOR, TINT32}: ssa.OpOr32, |
| 894 | opAndType{OOR, TUINT32}: ssa.OpOr32, |
| 895 | opAndType{OOR, TINT64}: ssa.OpOr64, |
| 896 | opAndType{OOR, TUINT64}: ssa.OpOr64, |
| 897 | |
Alexandru Moșoi | 6d9362a1 | 2015-07-30 12:33:36 +0200 | [diff] [blame] | 898 | opAndType{OXOR, TINT8}: ssa.OpXor8, |
| 899 | opAndType{OXOR, TUINT8}: ssa.OpXor8, |
| 900 | opAndType{OXOR, TINT16}: ssa.OpXor16, |
| 901 | opAndType{OXOR, TUINT16}: ssa.OpXor16, |
| 902 | opAndType{OXOR, TINT32}: ssa.OpXor32, |
| 903 | opAndType{OXOR, TUINT32}: ssa.OpXor32, |
| 904 | opAndType{OXOR, TINT64}: ssa.OpXor64, |
| 905 | opAndType{OXOR, TUINT64}: ssa.OpXor64, |
| 906 | |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 907 | opAndType{OEQ, TBOOL}: ssa.OpEq8, |
| 908 | opAndType{OEQ, TINT8}: ssa.OpEq8, |
| 909 | opAndType{OEQ, TUINT8}: ssa.OpEq8, |
| 910 | opAndType{OEQ, TINT16}: ssa.OpEq16, |
| 911 | opAndType{OEQ, TUINT16}: ssa.OpEq16, |
| 912 | opAndType{OEQ, TINT32}: ssa.OpEq32, |
| 913 | opAndType{OEQ, TUINT32}: ssa.OpEq32, |
| 914 | opAndType{OEQ, TINT64}: ssa.OpEq64, |
| 915 | opAndType{OEQ, TUINT64}: ssa.OpEq64, |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 916 | opAndType{OEQ, TINTER}: ssa.OpEqFat, // e == nil only |
| 917 | opAndType{OEQ, TARRAY}: ssa.OpEqFat, // slice only; a == nil only |
| 918 | opAndType{OEQ, TFUNC}: ssa.OpEqPtr, |
| 919 | opAndType{OEQ, TMAP}: ssa.OpEqPtr, |
| 920 | opAndType{OEQ, TCHAN}: ssa.OpEqPtr, |
Todd Neal | 5fdd4fe | 2015-08-30 20:47:26 -0500 | [diff] [blame] | 921 | opAndType{OEQ, TPTR64}: ssa.OpEqPtr, |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 922 | opAndType{OEQ, TUINTPTR}: ssa.OpEqPtr, |
| 923 | opAndType{OEQ, TUNSAFEPTR}: ssa.OpEqPtr, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 924 | opAndType{OEQ, TFLOAT64}: ssa.OpEq64F, |
| 925 | opAndType{OEQ, TFLOAT32}: ssa.OpEq32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 926 | |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 927 | opAndType{ONE, TBOOL}: ssa.OpNeq8, |
| 928 | opAndType{ONE, TINT8}: ssa.OpNeq8, |
| 929 | opAndType{ONE, TUINT8}: ssa.OpNeq8, |
| 930 | opAndType{ONE, TINT16}: ssa.OpNeq16, |
| 931 | opAndType{ONE, TUINT16}: ssa.OpNeq16, |
| 932 | opAndType{ONE, TINT32}: ssa.OpNeq32, |
| 933 | opAndType{ONE, TUINT32}: ssa.OpNeq32, |
| 934 | opAndType{ONE, TINT64}: ssa.OpNeq64, |
| 935 | opAndType{ONE, TUINT64}: ssa.OpNeq64, |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 936 | opAndType{ONE, TINTER}: ssa.OpNeqFat, // e != nil only |
| 937 | opAndType{ONE, TARRAY}: ssa.OpNeqFat, // slice only; a != nil only |
| 938 | opAndType{ONE, TFUNC}: ssa.OpNeqPtr, |
| 939 | opAndType{ONE, TMAP}: ssa.OpNeqPtr, |
| 940 | opAndType{ONE, TCHAN}: ssa.OpNeqPtr, |
Todd Neal | 5fdd4fe | 2015-08-30 20:47:26 -0500 | [diff] [blame] | 941 | opAndType{ONE, TPTR64}: ssa.OpNeqPtr, |
Josh Bleecher Snyder | 1bab5b9 | 2015-07-28 14:14:25 -0700 | [diff] [blame] | 942 | opAndType{ONE, TUINTPTR}: ssa.OpNeqPtr, |
| 943 | opAndType{ONE, TUNSAFEPTR}: ssa.OpNeqPtr, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 944 | opAndType{ONE, TFLOAT64}: ssa.OpNeq64F, |
| 945 | opAndType{ONE, TFLOAT32}: ssa.OpNeq32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 946 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 947 | opAndType{OLT, TINT8}: ssa.OpLess8, |
| 948 | opAndType{OLT, TUINT8}: ssa.OpLess8U, |
| 949 | opAndType{OLT, TINT16}: ssa.OpLess16, |
| 950 | opAndType{OLT, TUINT16}: ssa.OpLess16U, |
| 951 | opAndType{OLT, TINT32}: ssa.OpLess32, |
| 952 | opAndType{OLT, TUINT32}: ssa.OpLess32U, |
| 953 | opAndType{OLT, TINT64}: ssa.OpLess64, |
| 954 | opAndType{OLT, TUINT64}: ssa.OpLess64U, |
| 955 | opAndType{OLT, TFLOAT64}: ssa.OpLess64F, |
| 956 | opAndType{OLT, TFLOAT32}: ssa.OpLess32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 957 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 958 | opAndType{OGT, TINT8}: ssa.OpGreater8, |
| 959 | opAndType{OGT, TUINT8}: ssa.OpGreater8U, |
| 960 | opAndType{OGT, TINT16}: ssa.OpGreater16, |
| 961 | opAndType{OGT, TUINT16}: ssa.OpGreater16U, |
| 962 | opAndType{OGT, TINT32}: ssa.OpGreater32, |
| 963 | opAndType{OGT, TUINT32}: ssa.OpGreater32U, |
| 964 | opAndType{OGT, TINT64}: ssa.OpGreater64, |
| 965 | opAndType{OGT, TUINT64}: ssa.OpGreater64U, |
| 966 | opAndType{OGT, TFLOAT64}: ssa.OpGreater64F, |
| 967 | opAndType{OGT, TFLOAT32}: ssa.OpGreater32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 968 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 969 | opAndType{OLE, TINT8}: ssa.OpLeq8, |
| 970 | opAndType{OLE, TUINT8}: ssa.OpLeq8U, |
| 971 | opAndType{OLE, TINT16}: ssa.OpLeq16, |
| 972 | opAndType{OLE, TUINT16}: ssa.OpLeq16U, |
| 973 | opAndType{OLE, TINT32}: ssa.OpLeq32, |
| 974 | opAndType{OLE, TUINT32}: ssa.OpLeq32U, |
| 975 | opAndType{OLE, TINT64}: ssa.OpLeq64, |
| 976 | opAndType{OLE, TUINT64}: ssa.OpLeq64U, |
| 977 | opAndType{OLE, TFLOAT64}: ssa.OpLeq64F, |
| 978 | opAndType{OLE, TFLOAT32}: ssa.OpLeq32F, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 979 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 980 | opAndType{OGE, TINT8}: ssa.OpGeq8, |
| 981 | opAndType{OGE, TUINT8}: ssa.OpGeq8U, |
| 982 | opAndType{OGE, TINT16}: ssa.OpGeq16, |
| 983 | opAndType{OGE, TUINT16}: ssa.OpGeq16U, |
| 984 | opAndType{OGE, TINT32}: ssa.OpGeq32, |
| 985 | opAndType{OGE, TUINT32}: ssa.OpGeq32U, |
| 986 | opAndType{OGE, TINT64}: ssa.OpGeq64, |
| 987 | opAndType{OGE, TUINT64}: ssa.OpGeq64U, |
| 988 | opAndType{OGE, TFLOAT64}: ssa.OpGeq64F, |
| 989 | opAndType{OGE, TFLOAT32}: ssa.OpGeq32F, |
David Chase | 40aba8c | 2015-08-05 22:11:14 -0400 | [diff] [blame] | 990 | |
| 991 | opAndType{OLROT, TUINT8}: ssa.OpLrot8, |
| 992 | opAndType{OLROT, TUINT16}: ssa.OpLrot16, |
| 993 | opAndType{OLROT, TUINT32}: ssa.OpLrot32, |
| 994 | opAndType{OLROT, TUINT64}: ssa.OpLrot64, |
Keith Randall | a329e21 | 2015-09-12 13:26:57 -0700 | [diff] [blame] | 995 | |
| 996 | opAndType{OSQRT, TFLOAT64}: ssa.OpSqrt, |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 997 | } |
| 998 | |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 999 | func (s *state) concreteEtype(t *Type) uint8 { |
| 1000 | e := t.Etype |
| 1001 | switch e { |
| 1002 | default: |
| 1003 | return e |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1004 | case TINT: |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1005 | if s.config.IntSize == 8 { |
| 1006 | return TINT64 |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1007 | } |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1008 | return TINT32 |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1009 | case TUINT: |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1010 | if s.config.IntSize == 8 { |
| 1011 | return TUINT64 |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1012 | } |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1013 | return TUINT32 |
| 1014 | case TUINTPTR: |
| 1015 | if s.config.PtrSize == 8 { |
| 1016 | return TUINT64 |
| 1017 | } |
| 1018 | return TUINT32 |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1019 | } |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1020 | } |
| 1021 | |
| 1022 | func (s *state) ssaOp(op uint8, t *Type) ssa.Op { |
| 1023 | etype := s.concreteEtype(t) |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1024 | x, ok := opToSSA[opAndType{op, etype}] |
| 1025 | if !ok { |
Josh Bleecher Snyder | 5844603 | 2015-08-23 20:29:43 -0700 | [diff] [blame] | 1026 | s.Unimplementedf("unhandled binary op %s %s", opnames[op], Econv(int(etype), 0)) |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1027 | } |
| 1028 | return x |
Josh Bleecher Snyder | 46815b9 | 2015-06-24 17:48:22 -0700 | [diff] [blame] | 1029 | } |
| 1030 | |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1031 | func floatForComplex(t *Type) *Type { |
| 1032 | if t.Size() == 8 { |
| 1033 | return Types[TFLOAT32] |
| 1034 | } else { |
| 1035 | return Types[TFLOAT64] |
| 1036 | } |
| 1037 | } |
| 1038 | |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 1039 | type opAndTwoTypes struct { |
| 1040 | op uint8 |
| 1041 | etype1 uint8 |
| 1042 | etype2 uint8 |
| 1043 | } |
| 1044 | |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1045 | type twoTypes struct { |
| 1046 | etype1 uint8 |
| 1047 | etype2 uint8 |
| 1048 | } |
| 1049 | |
| 1050 | type twoOpsAndType struct { |
| 1051 | op1 ssa.Op |
| 1052 | op2 ssa.Op |
| 1053 | intermediateType uint8 |
| 1054 | } |
| 1055 | |
| 1056 | var fpConvOpToSSA = map[twoTypes]twoOpsAndType{ |
| 1057 | |
| 1058 | twoTypes{TINT8, TFLOAT32}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to32F, TINT32}, |
| 1059 | twoTypes{TINT16, TFLOAT32}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to32F, TINT32}, |
| 1060 | twoTypes{TINT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to32F, TINT32}, |
| 1061 | twoTypes{TINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to32F, TINT64}, |
| 1062 | |
| 1063 | twoTypes{TINT8, TFLOAT64}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to64F, TINT32}, |
| 1064 | twoTypes{TINT16, TFLOAT64}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to64F, TINT32}, |
| 1065 | twoTypes{TINT32, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to64F, TINT32}, |
| 1066 | twoTypes{TINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to64F, TINT64}, |
| 1067 | |
| 1068 | twoTypes{TFLOAT32, TINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32}, |
| 1069 | twoTypes{TFLOAT32, TINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32}, |
| 1070 | twoTypes{TFLOAT32, TINT32}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpCopy, TINT32}, |
| 1071 | twoTypes{TFLOAT32, TINT64}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpCopy, TINT64}, |
| 1072 | |
| 1073 | twoTypes{TFLOAT64, TINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32}, |
| 1074 | twoTypes{TFLOAT64, TINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32}, |
| 1075 | twoTypes{TFLOAT64, TINT32}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpCopy, TINT32}, |
| 1076 | twoTypes{TFLOAT64, TINT64}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpCopy, TINT64}, |
| 1077 | // unsigned |
| 1078 | twoTypes{TUINT8, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to32F, TINT32}, |
| 1079 | twoTypes{TUINT16, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to32F, TINT32}, |
| 1080 | twoTypes{TUINT32, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to32F, TINT64}, // go wide to dodge unsigned |
| 1081 | twoTypes{TUINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64}, // Cvt64Uto32F, branchy code expansion instead |
| 1082 | |
| 1083 | twoTypes{TUINT8, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to64F, TINT32}, |
| 1084 | twoTypes{TUINT16, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to64F, TINT32}, |
| 1085 | twoTypes{TUINT32, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to64F, TINT64}, // go wide to dodge unsigned |
| 1086 | twoTypes{TUINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64}, // Cvt64Uto64F, branchy code expansion instead |
| 1087 | |
| 1088 | twoTypes{TFLOAT32, TUINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32}, |
| 1089 | twoTypes{TFLOAT32, TUINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32}, |
| 1090 | twoTypes{TFLOAT32, TUINT32}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned |
| 1091 | twoTypes{TFLOAT32, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64}, // Cvt32Fto64U, branchy code expansion instead |
| 1092 | |
| 1093 | twoTypes{TFLOAT64, TUINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32}, |
| 1094 | twoTypes{TFLOAT64, TUINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32}, |
| 1095 | twoTypes{TFLOAT64, TUINT32}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned |
| 1096 | twoTypes{TFLOAT64, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64}, // Cvt64Fto64U, branchy code expansion instead |
| 1097 | |
| 1098 | // float |
| 1099 | twoTypes{TFLOAT64, TFLOAT32}: twoOpsAndType{ssa.OpCvt64Fto32F, ssa.OpCopy, TFLOAT32}, |
| 1100 | twoTypes{TFLOAT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT64}, |
| 1101 | twoTypes{TFLOAT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT32}, |
| 1102 | twoTypes{TFLOAT32, TFLOAT64}: twoOpsAndType{ssa.OpCvt32Fto64F, ssa.OpCopy, TFLOAT64}, |
| 1103 | } |
| 1104 | |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 1105 | var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{ |
| 1106 | opAndTwoTypes{OLSH, TINT8, TUINT8}: ssa.OpLsh8x8, |
| 1107 | opAndTwoTypes{OLSH, TUINT8, TUINT8}: ssa.OpLsh8x8, |
| 1108 | opAndTwoTypes{OLSH, TINT8, TUINT16}: ssa.OpLsh8x16, |
| 1109 | opAndTwoTypes{OLSH, TUINT8, TUINT16}: ssa.OpLsh8x16, |
| 1110 | opAndTwoTypes{OLSH, TINT8, TUINT32}: ssa.OpLsh8x32, |
| 1111 | opAndTwoTypes{OLSH, TUINT8, TUINT32}: ssa.OpLsh8x32, |
| 1112 | opAndTwoTypes{OLSH, TINT8, TUINT64}: ssa.OpLsh8x64, |
| 1113 | opAndTwoTypes{OLSH, TUINT8, TUINT64}: ssa.OpLsh8x64, |
| 1114 | |
| 1115 | opAndTwoTypes{OLSH, TINT16, TUINT8}: ssa.OpLsh16x8, |
| 1116 | opAndTwoTypes{OLSH, TUINT16, TUINT8}: ssa.OpLsh16x8, |
| 1117 | opAndTwoTypes{OLSH, TINT16, TUINT16}: ssa.OpLsh16x16, |
| 1118 | opAndTwoTypes{OLSH, TUINT16, TUINT16}: ssa.OpLsh16x16, |
| 1119 | opAndTwoTypes{OLSH, TINT16, TUINT32}: ssa.OpLsh16x32, |
| 1120 | opAndTwoTypes{OLSH, TUINT16, TUINT32}: ssa.OpLsh16x32, |
| 1121 | opAndTwoTypes{OLSH, TINT16, TUINT64}: ssa.OpLsh16x64, |
| 1122 | opAndTwoTypes{OLSH, TUINT16, TUINT64}: ssa.OpLsh16x64, |
| 1123 | |
| 1124 | opAndTwoTypes{OLSH, TINT32, TUINT8}: ssa.OpLsh32x8, |
| 1125 | opAndTwoTypes{OLSH, TUINT32, TUINT8}: ssa.OpLsh32x8, |
| 1126 | opAndTwoTypes{OLSH, TINT32, TUINT16}: ssa.OpLsh32x16, |
| 1127 | opAndTwoTypes{OLSH, TUINT32, TUINT16}: ssa.OpLsh32x16, |
| 1128 | opAndTwoTypes{OLSH, TINT32, TUINT32}: ssa.OpLsh32x32, |
| 1129 | opAndTwoTypes{OLSH, TUINT32, TUINT32}: ssa.OpLsh32x32, |
| 1130 | opAndTwoTypes{OLSH, TINT32, TUINT64}: ssa.OpLsh32x64, |
| 1131 | opAndTwoTypes{OLSH, TUINT32, TUINT64}: ssa.OpLsh32x64, |
| 1132 | |
| 1133 | opAndTwoTypes{OLSH, TINT64, TUINT8}: ssa.OpLsh64x8, |
| 1134 | opAndTwoTypes{OLSH, TUINT64, TUINT8}: ssa.OpLsh64x8, |
| 1135 | opAndTwoTypes{OLSH, TINT64, TUINT16}: ssa.OpLsh64x16, |
| 1136 | opAndTwoTypes{OLSH, TUINT64, TUINT16}: ssa.OpLsh64x16, |
| 1137 | opAndTwoTypes{OLSH, TINT64, TUINT32}: ssa.OpLsh64x32, |
| 1138 | opAndTwoTypes{OLSH, TUINT64, TUINT32}: ssa.OpLsh64x32, |
| 1139 | opAndTwoTypes{OLSH, TINT64, TUINT64}: ssa.OpLsh64x64, |
| 1140 | opAndTwoTypes{OLSH, TUINT64, TUINT64}: ssa.OpLsh64x64, |
| 1141 | |
| 1142 | opAndTwoTypes{ORSH, TINT8, TUINT8}: ssa.OpRsh8x8, |
| 1143 | opAndTwoTypes{ORSH, TUINT8, TUINT8}: ssa.OpRsh8Ux8, |
| 1144 | opAndTwoTypes{ORSH, TINT8, TUINT16}: ssa.OpRsh8x16, |
| 1145 | opAndTwoTypes{ORSH, TUINT8, TUINT16}: ssa.OpRsh8Ux16, |
| 1146 | opAndTwoTypes{ORSH, TINT8, TUINT32}: ssa.OpRsh8x32, |
| 1147 | opAndTwoTypes{ORSH, TUINT8, TUINT32}: ssa.OpRsh8Ux32, |
| 1148 | opAndTwoTypes{ORSH, TINT8, TUINT64}: ssa.OpRsh8x64, |
| 1149 | opAndTwoTypes{ORSH, TUINT8, TUINT64}: ssa.OpRsh8Ux64, |
| 1150 | |
| 1151 | opAndTwoTypes{ORSH, TINT16, TUINT8}: ssa.OpRsh16x8, |
| 1152 | opAndTwoTypes{ORSH, TUINT16, TUINT8}: ssa.OpRsh16Ux8, |
| 1153 | opAndTwoTypes{ORSH, TINT16, TUINT16}: ssa.OpRsh16x16, |
| 1154 | opAndTwoTypes{ORSH, TUINT16, TUINT16}: ssa.OpRsh16Ux16, |
| 1155 | opAndTwoTypes{ORSH, TINT16, TUINT32}: ssa.OpRsh16x32, |
| 1156 | opAndTwoTypes{ORSH, TUINT16, TUINT32}: ssa.OpRsh16Ux32, |
| 1157 | opAndTwoTypes{ORSH, TINT16, TUINT64}: ssa.OpRsh16x64, |
| 1158 | opAndTwoTypes{ORSH, TUINT16, TUINT64}: ssa.OpRsh16Ux64, |
| 1159 | |
| 1160 | opAndTwoTypes{ORSH, TINT32, TUINT8}: ssa.OpRsh32x8, |
| 1161 | opAndTwoTypes{ORSH, TUINT32, TUINT8}: ssa.OpRsh32Ux8, |
| 1162 | opAndTwoTypes{ORSH, TINT32, TUINT16}: ssa.OpRsh32x16, |
| 1163 | opAndTwoTypes{ORSH, TUINT32, TUINT16}: ssa.OpRsh32Ux16, |
| 1164 | opAndTwoTypes{ORSH, TINT32, TUINT32}: ssa.OpRsh32x32, |
| 1165 | opAndTwoTypes{ORSH, TUINT32, TUINT32}: ssa.OpRsh32Ux32, |
| 1166 | opAndTwoTypes{ORSH, TINT32, TUINT64}: ssa.OpRsh32x64, |
| 1167 | opAndTwoTypes{ORSH, TUINT32, TUINT64}: ssa.OpRsh32Ux64, |
| 1168 | |
| 1169 | opAndTwoTypes{ORSH, TINT64, TUINT8}: ssa.OpRsh64x8, |
| 1170 | opAndTwoTypes{ORSH, TUINT64, TUINT8}: ssa.OpRsh64Ux8, |
| 1171 | opAndTwoTypes{ORSH, TINT64, TUINT16}: ssa.OpRsh64x16, |
| 1172 | opAndTwoTypes{ORSH, TUINT64, TUINT16}: ssa.OpRsh64Ux16, |
| 1173 | opAndTwoTypes{ORSH, TINT64, TUINT32}: ssa.OpRsh64x32, |
| 1174 | opAndTwoTypes{ORSH, TUINT64, TUINT32}: ssa.OpRsh64Ux32, |
| 1175 | opAndTwoTypes{ORSH, TINT64, TUINT64}: ssa.OpRsh64x64, |
| 1176 | opAndTwoTypes{ORSH, TUINT64, TUINT64}: ssa.OpRsh64Ux64, |
| 1177 | } |
| 1178 | |
| 1179 | func (s *state) ssaShiftOp(op uint8, t *Type, u *Type) ssa.Op { |
| 1180 | etype1 := s.concreteEtype(t) |
| 1181 | etype2 := s.concreteEtype(u) |
| 1182 | x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}] |
| 1183 | if !ok { |
| 1184 | s.Unimplementedf("unhandled shift op %s etype=%s/%s", opnames[op], Econv(int(etype1), 0), Econv(int(etype2), 0)) |
| 1185 | } |
| 1186 | return x |
| 1187 | } |
| 1188 | |
David Chase | 40aba8c | 2015-08-05 22:11:14 -0400 | [diff] [blame] | 1189 | func (s *state) ssaRotateOp(op uint8, t *Type) ssa.Op { |
| 1190 | etype1 := s.concreteEtype(t) |
| 1191 | x, ok := opToSSA[opAndType{op, etype1}] |
| 1192 | if !ok { |
| 1193 | s.Unimplementedf("unhandled rotate op %s etype=%s", opnames[op], Econv(int(etype1), 0)) |
| 1194 | } |
| 1195 | return x |
| 1196 | } |
| 1197 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1198 | // expr converts the expression n to ssa, adds it to s and returns the ssa result. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1199 | func (s *state) expr(n *Node) *ssa.Value { |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 1200 | s.pushLine(n.Lineno) |
| 1201 | defer s.popLine() |
| 1202 | |
Keith Randall | 06f3292 | 2015-07-11 11:39:12 -0700 | [diff] [blame] | 1203 | s.stmtList(n.Ninit) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1204 | switch n.Op { |
Todd Neal | def7c65 | 2015-09-07 19:07:02 -0500 | [diff] [blame] | 1205 | case OCFUNC: |
| 1206 | aux := &ssa.ExternSymbol{n.Type, n.Left.Sym} |
| 1207 | return s.entryNewValue1A(ssa.OpAddr, n.Type, aux, s.sb) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1208 | case ONAME: |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1209 | if n.Class == PFUNC { |
| 1210 | // "value" of a function is the address of the function's closure |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 1211 | sym := funcsym(n.Sym) |
| 1212 | aux := &ssa.ExternSymbol{n.Type, sym} |
| 1213 | return s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sb) |
Keith Randall | 23df95b | 2015-05-12 15:16:52 -0700 | [diff] [blame] | 1214 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1215 | if canSSA(n) { |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 1216 | return s.variable(n, n.Type) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1217 | } |
| 1218 | addr := s.addr(n) |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1219 | return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem()) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1220 | case OLITERAL: |
Keith Randall | e707fbe | 2015-06-11 10:20:39 -0700 | [diff] [blame] | 1221 | switch n.Val().Ctype() { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1222 | case CTINT: |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 1223 | i := Mpgetfix(n.Val().U.(*Mpint)) |
| 1224 | switch n.Type.Size() { |
| 1225 | case 1: |
| 1226 | return s.constInt8(n.Type, int8(i)) |
| 1227 | case 2: |
| 1228 | return s.constInt16(n.Type, int16(i)) |
| 1229 | case 4: |
| 1230 | return s.constInt32(n.Type, int32(i)) |
| 1231 | case 8: |
| 1232 | return s.constInt64(n.Type, i) |
| 1233 | default: |
| 1234 | s.Fatalf("bad integer size %d", n.Type.Size()) |
| 1235 | return nil |
| 1236 | } |
| 1237 | case CTSTR: |
| 1238 | return s.entryNewValue0A(ssa.OpConstString, n.Type, n.Val().U) |
| 1239 | case CTBOOL: |
Josh Bleecher Snyder | cea4414 | 2015-09-08 16:52:25 -0700 | [diff] [blame] | 1240 | return s.constBool(n.Val().U.(bool)) |
Brad Fitzpatrick | 337b7e7 | 2015-07-13 17:30:42 -0600 | [diff] [blame] | 1241 | case CTNIL: |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 1242 | t := n.Type |
| 1243 | switch { |
| 1244 | case t.IsSlice(): |
| 1245 | return s.entryNewValue0(ssa.OpConstSlice, t) |
| 1246 | case t.IsInterface(): |
| 1247 | return s.entryNewValue0(ssa.OpConstInterface, t) |
| 1248 | default: |
| 1249 | return s.entryNewValue0(ssa.OpConstNil, t) |
| 1250 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 1251 | case CTFLT: |
| 1252 | f := n.Val().U.(*Mpflt) |
| 1253 | switch n.Type.Size() { |
| 1254 | case 4: |
Todd Neal | adba6c4 | 2015-09-08 07:50:25 -0400 | [diff] [blame] | 1255 | // -0.0 literals need to be treated as if they were 0.0, adding 0.0 here |
| 1256 | // accomplishes this while not affecting other values. |
| 1257 | return s.constFloat32(n.Type, mpgetflt32(f)+0.0) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 1258 | case 8: |
Todd Neal | adba6c4 | 2015-09-08 07:50:25 -0400 | [diff] [blame] | 1259 | return s.constFloat64(n.Type, mpgetflt(f)+0.0) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 1260 | default: |
| 1261 | s.Fatalf("bad float size %d", n.Type.Size()) |
| 1262 | return nil |
| 1263 | } |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1264 | case CTCPLX: |
| 1265 | c := n.Val().U.(*Mpcplx) |
| 1266 | r := &c.Real |
| 1267 | i := &c.Imag |
| 1268 | switch n.Type.Size() { |
| 1269 | case 8: |
| 1270 | { |
| 1271 | pt := Types[TFLOAT32] |
Todd Neal | adba6c4 | 2015-09-08 07:50:25 -0400 | [diff] [blame] | 1272 | // -0.0 literals need to be treated as if they were 0.0, adding 0.0 here |
| 1273 | // accomplishes this while not affecting other values. |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1274 | return s.newValue2(ssa.OpComplexMake, n.Type, |
Todd Neal | adba6c4 | 2015-09-08 07:50:25 -0400 | [diff] [blame] | 1275 | s.constFloat32(pt, mpgetflt32(r)+0.0), |
| 1276 | s.constFloat32(pt, mpgetflt32(i)+0.0)) |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1277 | } |
| 1278 | case 16: |
| 1279 | { |
| 1280 | pt := Types[TFLOAT64] |
| 1281 | return s.newValue2(ssa.OpComplexMake, n.Type, |
Todd Neal | adba6c4 | 2015-09-08 07:50:25 -0400 | [diff] [blame] | 1282 | s.constFloat64(pt, mpgetflt(r)+0.0), |
| 1283 | s.constFloat64(pt, mpgetflt(i)+0.0)) |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1284 | } |
| 1285 | default: |
| 1286 | s.Fatalf("bad float size %d", n.Type.Size()) |
| 1287 | return nil |
| 1288 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 1289 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1290 | default: |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 1291 | s.Unimplementedf("unhandled OLITERAL %v", n.Val().Ctype()) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1292 | return nil |
| 1293 | } |
Keith Randall | 0ad9c8c | 2015-06-12 16:24:33 -0700 | [diff] [blame] | 1294 | case OCONVNOP: |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1295 | to := n.Type |
| 1296 | from := n.Left.Type |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1297 | |
| 1298 | // Assume everything will work out, so set up our return value. |
| 1299 | // Anything interesting that happens from here is a fatal. |
Keith Randall | 0ad9c8c | 2015-06-12 16:24:33 -0700 | [diff] [blame] | 1300 | x := s.expr(n.Left) |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1301 | v := s.newValue1(ssa.OpCopy, to, x) // ensure that v has the right type |
| 1302 | |
Todd Neal | def7c65 | 2015-09-07 19:07:02 -0500 | [diff] [blame] | 1303 | // CONVNOP closure |
| 1304 | if to.Etype == TFUNC && from.IsPtr() { |
| 1305 | return v |
| 1306 | } |
| 1307 | |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1308 | // named <--> unnamed type or typed <--> untyped const |
| 1309 | if from.Etype == to.Etype { |
| 1310 | return v |
| 1311 | } |
| 1312 | // unsafe.Pointer <--> *T |
| 1313 | if to.Etype == TUNSAFEPTR && from.IsPtr() || from.Etype == TUNSAFEPTR && to.IsPtr() { |
| 1314 | return v |
| 1315 | } |
| 1316 | |
| 1317 | dowidth(from) |
| 1318 | dowidth(to) |
| 1319 | if from.Width != to.Width { |
| 1320 | s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Width, to, to.Width) |
| 1321 | return nil |
| 1322 | } |
| 1323 | if etypesign(from.Etype) != etypesign(to.Etype) { |
| 1324 | s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, Econv(int(from.Etype), 0), to, Econv(int(to.Etype), 0)) |
| 1325 | return nil |
| 1326 | } |
| 1327 | |
| 1328 | if flag_race != 0 { |
| 1329 | s.Unimplementedf("questionable CONVNOP from race detector %v -> %v\n", from, to) |
| 1330 | return nil |
| 1331 | } |
| 1332 | |
| 1333 | if etypesign(from.Etype) == 0 { |
| 1334 | s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to) |
| 1335 | return nil |
| 1336 | } |
| 1337 | |
| 1338 | // integer, same width, same sign |
| 1339 | return v |
| 1340 | |
Michael Matloob | 73054f5 | 2015-06-14 11:38:46 -0700 | [diff] [blame] | 1341 | case OCONV: |
| 1342 | x := s.expr(n.Left) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1343 | ft := n.Left.Type // from type |
| 1344 | tt := n.Type // to type |
| 1345 | if ft.IsInteger() && tt.IsInteger() { |
| 1346 | var op ssa.Op |
| 1347 | if tt.Size() == ft.Size() { |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1348 | op = ssa.OpCopy |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1349 | } else if tt.Size() < ft.Size() { |
| 1350 | // truncation |
| 1351 | switch 10*ft.Size() + tt.Size() { |
| 1352 | case 21: |
| 1353 | op = ssa.OpTrunc16to8 |
| 1354 | case 41: |
| 1355 | op = ssa.OpTrunc32to8 |
| 1356 | case 42: |
| 1357 | op = ssa.OpTrunc32to16 |
| 1358 | case 81: |
| 1359 | op = ssa.OpTrunc64to8 |
| 1360 | case 82: |
| 1361 | op = ssa.OpTrunc64to16 |
| 1362 | case 84: |
| 1363 | op = ssa.OpTrunc64to32 |
| 1364 | default: |
| 1365 | s.Fatalf("weird integer truncation %s -> %s", ft, tt) |
| 1366 | } |
| 1367 | } else if ft.IsSigned() { |
| 1368 | // sign extension |
| 1369 | switch 10*ft.Size() + tt.Size() { |
| 1370 | case 12: |
| 1371 | op = ssa.OpSignExt8to16 |
| 1372 | case 14: |
| 1373 | op = ssa.OpSignExt8to32 |
| 1374 | case 18: |
| 1375 | op = ssa.OpSignExt8to64 |
| 1376 | case 24: |
| 1377 | op = ssa.OpSignExt16to32 |
| 1378 | case 28: |
| 1379 | op = ssa.OpSignExt16to64 |
| 1380 | case 48: |
| 1381 | op = ssa.OpSignExt32to64 |
| 1382 | default: |
| 1383 | s.Fatalf("bad integer sign extension %s -> %s", ft, tt) |
| 1384 | } |
| 1385 | } else { |
| 1386 | // zero extension |
| 1387 | switch 10*ft.Size() + tt.Size() { |
| 1388 | case 12: |
| 1389 | op = ssa.OpZeroExt8to16 |
| 1390 | case 14: |
| 1391 | op = ssa.OpZeroExt8to32 |
| 1392 | case 18: |
| 1393 | op = ssa.OpZeroExt8to64 |
| 1394 | case 24: |
| 1395 | op = ssa.OpZeroExt16to32 |
| 1396 | case 28: |
| 1397 | op = ssa.OpZeroExt16to64 |
| 1398 | case 48: |
| 1399 | op = ssa.OpZeroExt32to64 |
| 1400 | default: |
| 1401 | s.Fatalf("weird integer sign extension %s -> %s", ft, tt) |
| 1402 | } |
| 1403 | } |
| 1404 | return s.newValue1(op, n.Type, x) |
| 1405 | } |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1406 | |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1407 | if ft.IsFloat() || tt.IsFloat() { |
| 1408 | conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}] |
| 1409 | if !ok { |
| 1410 | s.Fatalf("weird float conversion %s -> %s", ft, tt) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1411 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1412 | op1, op2, it := conv.op1, conv.op2, conv.intermediateType |
| 1413 | |
| 1414 | if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid { |
| 1415 | // normal case, not tripping over unsigned 64 |
| 1416 | if op1 == ssa.OpCopy { |
| 1417 | if op2 == ssa.OpCopy { |
| 1418 | return x |
| 1419 | } |
| 1420 | return s.newValue1(op2, n.Type, x) |
| 1421 | } |
| 1422 | if op2 == ssa.OpCopy { |
| 1423 | return s.newValue1(op1, n.Type, x) |
| 1424 | } |
| 1425 | return s.newValue1(op2, n.Type, s.newValue1(op1, Types[it], x)) |
| 1426 | } |
| 1427 | // Tricky 64-bit unsigned cases. |
| 1428 | if ft.IsInteger() { |
| 1429 | // therefore tt is float32 or float64, and ft is also unsigned |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1430 | if tt.Size() == 4 { |
| 1431 | return s.uint64Tofloat32(n, x, ft, tt) |
| 1432 | } |
| 1433 | if tt.Size() == 8 { |
| 1434 | return s.uint64Tofloat64(n, x, ft, tt) |
| 1435 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1436 | s.Fatalf("weird unsigned integer to float conversion %s -> %s", ft, tt) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1437 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1438 | // therefore ft is float32 or float64, and tt is unsigned integer |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 1439 | if ft.Size() == 4 { |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1440 | return s.float32ToUint64(n, x, ft, tt) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 1441 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1442 | if ft.Size() == 8 { |
| 1443 | return s.float64ToUint64(n, x, ft, tt) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 1444 | } |
David Chase | d052bbd | 2015-09-01 17:09:00 -0400 | [diff] [blame] | 1445 | s.Fatalf("weird float to unsigned integer conversion %s -> %s", ft, tt) |
| 1446 | return nil |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1447 | } |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1448 | |
| 1449 | if ft.IsComplex() && tt.IsComplex() { |
| 1450 | var op ssa.Op |
| 1451 | if ft.Size() == tt.Size() { |
| 1452 | op = ssa.OpCopy |
| 1453 | } else if ft.Size() == 8 && tt.Size() == 16 { |
| 1454 | op = ssa.OpCvt32Fto64F |
| 1455 | } else if ft.Size() == 16 && tt.Size() == 8 { |
| 1456 | op = ssa.OpCvt64Fto32F |
| 1457 | } else { |
| 1458 | s.Fatalf("weird complex conversion %s -> %s", ft, tt) |
| 1459 | } |
| 1460 | ftp := floatForComplex(ft) |
| 1461 | ttp := floatForComplex(tt) |
| 1462 | return s.newValue2(ssa.OpComplexMake, tt, |
| 1463 | s.newValue1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, x)), |
| 1464 | s.newValue1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, x))) |
| 1465 | } |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 1466 | |
Josh Bleecher Snyder | 5844603 | 2015-08-23 20:29:43 -0700 | [diff] [blame] | 1467 | s.Unimplementedf("unhandled OCONV %s -> %s", Econv(int(n.Left.Type.Etype), 0), Econv(int(n.Type.Etype), 0)) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1468 | return nil |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1469 | |
Josh Bleecher Snyder | 46815b9 | 2015-06-24 17:48:22 -0700 | [diff] [blame] | 1470 | // binary ops |
| 1471 | case OLT, OEQ, ONE, OLE, OGE, OGT: |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1472 | a := s.expr(n.Left) |
| 1473 | b := s.expr(n.Right) |
Keith Randall | db380bf | 2015-09-10 11:05:42 -0700 | [diff] [blame] | 1474 | if n.Left.Type.IsComplex() { |
Keith Randall | c244ce0 | 2015-09-10 14:59:00 -0700 | [diff] [blame] | 1475 | pt := floatForComplex(n.Left.Type) |
Keith Randall | db380bf | 2015-09-10 11:05:42 -0700 | [diff] [blame] | 1476 | op := s.ssaOp(OEQ, pt) |
| 1477 | r := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)) |
| 1478 | i := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)) |
| 1479 | c := s.newValue2(ssa.OpAnd8, Types[TBOOL], r, i) |
| 1480 | switch n.Op { |
| 1481 | case OEQ: |
| 1482 | return c |
| 1483 | case ONE: |
| 1484 | return s.newValue1(ssa.OpNot, Types[TBOOL], c) |
| 1485 | default: |
| 1486 | s.Fatalf("ordered complex compare %s", opnames[n.Op]) |
| 1487 | } |
| 1488 | |
| 1489 | } |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1490 | return s.newValue2(s.ssaOp(n.Op, n.Left.Type), Types[TBOOL], a, b) |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1491 | case OMUL: |
| 1492 | a := s.expr(n.Left) |
| 1493 | b := s.expr(n.Right) |
| 1494 | if n.Type.IsComplex() { |
| 1495 | mulop := ssa.OpMul64F |
| 1496 | addop := ssa.OpAdd64F |
| 1497 | subop := ssa.OpSub64F |
| 1498 | pt := floatForComplex(n.Type) // Could be Float32 or Float64 |
| 1499 | wt := Types[TFLOAT64] // Compute in Float64 to minimize cancellation error |
| 1500 | |
| 1501 | areal := s.newValue1(ssa.OpComplexReal, pt, a) |
| 1502 | breal := s.newValue1(ssa.OpComplexReal, pt, b) |
| 1503 | aimag := s.newValue1(ssa.OpComplexImag, pt, a) |
| 1504 | bimag := s.newValue1(ssa.OpComplexImag, pt, b) |
| 1505 | |
| 1506 | if pt != wt { // Widen for calculation |
| 1507 | areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal) |
| 1508 | breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal) |
| 1509 | aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag) |
| 1510 | bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag) |
| 1511 | } |
| 1512 | |
| 1513 | xreal := s.newValue2(subop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag)) |
| 1514 | ximag := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, bimag), s.newValue2(mulop, wt, aimag, breal)) |
| 1515 | |
| 1516 | if pt != wt { // Narrow to store back |
| 1517 | xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal) |
| 1518 | ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag) |
| 1519 | } |
| 1520 | |
| 1521 | return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag) |
| 1522 | } |
| 1523 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
| 1524 | |
| 1525 | case ODIV: |
| 1526 | a := s.expr(n.Left) |
| 1527 | b := s.expr(n.Right) |
| 1528 | if n.Type.IsComplex() { |
| 1529 | // TODO this is not executed because the front-end substitutes a runtime call. |
| 1530 | // That probably ought to change; with modest optimization the widen/narrow |
| 1531 | // conversions could all be elided in larger expression trees. |
| 1532 | mulop := ssa.OpMul64F |
| 1533 | addop := ssa.OpAdd64F |
| 1534 | subop := ssa.OpSub64F |
| 1535 | divop := ssa.OpDiv64F |
| 1536 | pt := floatForComplex(n.Type) // Could be Float32 or Float64 |
| 1537 | wt := Types[TFLOAT64] // Compute in Float64 to minimize cancellation error |
| 1538 | |
| 1539 | areal := s.newValue1(ssa.OpComplexReal, pt, a) |
| 1540 | breal := s.newValue1(ssa.OpComplexReal, pt, b) |
| 1541 | aimag := s.newValue1(ssa.OpComplexImag, pt, a) |
| 1542 | bimag := s.newValue1(ssa.OpComplexImag, pt, b) |
| 1543 | |
| 1544 | if pt != wt { // Widen for calculation |
| 1545 | areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal) |
| 1546 | breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal) |
| 1547 | aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag) |
| 1548 | bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag) |
| 1549 | } |
| 1550 | |
| 1551 | denom := s.newValue2(addop, wt, s.newValue2(mulop, wt, breal, breal), s.newValue2(mulop, wt, bimag, bimag)) |
| 1552 | xreal := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag)) |
| 1553 | ximag := s.newValue2(subop, wt, s.newValue2(mulop, wt, aimag, breal), s.newValue2(mulop, wt, areal, bimag)) |
| 1554 | |
| 1555 | // TODO not sure if this is best done in wide precision or narrow |
| 1556 | // Double-rounding might be an issue. |
| 1557 | // Note that the pre-SSA implementation does the entire calculation |
| 1558 | // in wide format, so wide is compatible. |
| 1559 | xreal = s.newValue2(divop, wt, xreal, denom) |
| 1560 | ximag = s.newValue2(divop, wt, ximag, denom) |
| 1561 | |
| 1562 | if pt != wt { // Narrow to store back |
| 1563 | xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal) |
| 1564 | ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag) |
| 1565 | } |
| 1566 | |
| 1567 | return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag) |
| 1568 | } |
| 1569 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
| 1570 | case OADD, OSUB: |
| 1571 | a := s.expr(n.Left) |
| 1572 | b := s.expr(n.Right) |
| 1573 | if n.Type.IsComplex() { |
| 1574 | pt := floatForComplex(n.Type) |
| 1575 | op := s.ssaOp(n.Op, pt) |
| 1576 | return s.newValue2(ssa.OpComplexMake, n.Type, |
| 1577 | s.newValue2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)), |
| 1578 | s.newValue2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))) |
| 1579 | } |
| 1580 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
| 1581 | case OAND, OOR, OMOD, OHMUL, OXOR: |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1582 | a := s.expr(n.Left) |
| 1583 | b := s.expr(n.Right) |
Keith Randall | 67fdb0d | 2015-07-19 15:48:20 -0700 | [diff] [blame] | 1584 | return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b) |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 1585 | case OLSH, ORSH: |
| 1586 | a := s.expr(n.Left) |
| 1587 | b := s.expr(n.Right) |
| 1588 | return s.newValue2(s.ssaShiftOp(n.Op, n.Type, n.Right.Type), a.Type, a, b) |
David Chase | 40aba8c | 2015-08-05 22:11:14 -0400 | [diff] [blame] | 1589 | case OLROT: |
| 1590 | a := s.expr(n.Left) |
| 1591 | i := n.Right.Int() |
| 1592 | if i <= 0 || i >= n.Type.Size()*8 { |
| 1593 | s.Fatalf("Wrong rotate distance for LROT, expected 1 through %d, saw %d", n.Type.Size()*8-1, i) |
| 1594 | } |
| 1595 | return s.newValue1I(s.ssaRotateOp(n.Op, n.Type), a.Type, i, a) |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1596 | case OANDAND, OOROR: |
| 1597 | // To implement OANDAND (and OOROR), we introduce a |
| 1598 | // new temporary variable to hold the result. The |
| 1599 | // variable is associated with the OANDAND node in the |
| 1600 | // s.vars table (normally variables are only |
| 1601 | // associated with ONAME nodes). We convert |
| 1602 | // A && B |
| 1603 | // to |
| 1604 | // var = A |
| 1605 | // if var { |
| 1606 | // var = B |
| 1607 | // } |
| 1608 | // Using var in the subsequent block introduces the |
| 1609 | // necessary phi variable. |
| 1610 | el := s.expr(n.Left) |
| 1611 | s.vars[n] = el |
| 1612 | |
| 1613 | b := s.endBlock() |
| 1614 | b.Kind = ssa.BlockIf |
| 1615 | b.Control = el |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 1616 | // In theory, we should set b.Likely here based on context. |
| 1617 | // However, gc only gives us likeliness hints |
| 1618 | // in a single place, for plain OIF statements, |
| 1619 | // and passing around context is finnicky, so don't bother for now. |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1620 | |
| 1621 | bRight := s.f.NewBlock(ssa.BlockPlain) |
| 1622 | bResult := s.f.NewBlock(ssa.BlockPlain) |
| 1623 | if n.Op == OANDAND { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 1624 | b.AddEdgeTo(bRight) |
| 1625 | b.AddEdgeTo(bResult) |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1626 | } else if n.Op == OOROR { |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 1627 | b.AddEdgeTo(bResult) |
| 1628 | b.AddEdgeTo(bRight) |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1629 | } |
| 1630 | |
| 1631 | s.startBlock(bRight) |
| 1632 | er := s.expr(n.Right) |
| 1633 | s.vars[n] = er |
| 1634 | |
| 1635 | b = s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 1636 | b.AddEdgeTo(bResult) |
Brad Fitzpatrick | e816711 | 2015-07-10 12:58:53 -0600 | [diff] [blame] | 1637 | |
| 1638 | s.startBlock(bResult) |
Josh Bleecher Snyder | 35ad1fc | 2015-08-27 10:11:08 -0700 | [diff] [blame] | 1639 | return s.variable(n, Types[TBOOL]) |
Keith Randall | 7e39072 | 2015-09-12 14:14:02 -0700 | [diff] [blame] | 1640 | case OCOMPLEX: |
| 1641 | r := s.expr(n.Left) |
| 1642 | i := s.expr(n.Right) |
| 1643 | return s.newValue2(ssa.OpComplexMake, n.Type, r, i) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1644 | |
Josh Bleecher Snyder | 4178f20 | 2015-09-05 19:28:00 -0700 | [diff] [blame] | 1645 | // unary ops |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1646 | case OMINUS: |
| 1647 | a := s.expr(n.Left) |
| 1648 | if n.Type.IsComplex() { |
| 1649 | tp := floatForComplex(n.Type) |
| 1650 | negop := s.ssaOp(n.Op, tp) |
| 1651 | return s.newValue2(ssa.OpComplexMake, n.Type, |
| 1652 | s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)), |
| 1653 | s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a))) |
| 1654 | } |
| 1655 | return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a) |
Keith Randall | a329e21 | 2015-09-12 13:26:57 -0700 | [diff] [blame] | 1656 | case ONOT, OCOM, OSQRT: |
Brad Fitzpatrick | d9c72d7 | 2015-07-10 11:25:48 -0600 | [diff] [blame] | 1657 | a := s.expr(n.Left) |
Alexandru Moșoi | 954d5ad | 2015-07-21 16:58:18 +0200 | [diff] [blame] | 1658 | return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a) |
Keith Randall | 2f51807 | 2015-09-10 11:37:09 -0700 | [diff] [blame] | 1659 | case OIMAG, OREAL: |
| 1660 | a := s.expr(n.Left) |
| 1661 | return s.newValue1(s.ssaOp(n.Op, n.Left.Type), n.Type, a) |
Josh Bleecher Snyder | 4178f20 | 2015-09-05 19:28:00 -0700 | [diff] [blame] | 1662 | case OPLUS: |
| 1663 | return s.expr(n.Left) |
Brad Fitzpatrick | d9c72d7 | 2015-07-10 11:25:48 -0600 | [diff] [blame] | 1664 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1665 | case OADDR: |
| 1666 | return s.addr(n.Left) |
| 1667 | |
Josh Bleecher Snyder | 25d1916 | 2015-07-28 12:37:46 -0700 | [diff] [blame] | 1668 | case OINDREG: |
| 1669 | if int(n.Reg) != Thearch.REGSP { |
| 1670 | s.Unimplementedf("OINDREG of non-SP register %s in expr: %v", obj.Rconv(int(n.Reg)), n) |
| 1671 | return nil |
| 1672 | } |
| 1673 | addr := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(n.Type), n.Xoffset, s.sp) |
| 1674 | return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem()) |
| 1675 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1676 | case OIND: |
| 1677 | p := s.expr(n.Left) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1678 | s.nilCheck(p) |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1679 | return s.newValue2(ssa.OpLoad, n.Type, p, s.mem()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1680 | |
Keith Randall | cd7e059 | 2015-07-15 21:33:49 -0700 | [diff] [blame] | 1681 | case ODOT: |
| 1682 | v := s.expr(n.Left) |
| 1683 | return s.newValue1I(ssa.OpStructSelect, n.Type, n.Xoffset, v) |
| 1684 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1685 | case ODOTPTR: |
| 1686 | p := s.expr(n.Left) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1687 | s.nilCheck(p) |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1688 | p = s.newValue2(ssa.OpAddPtr, p.Type, p, s.constIntPtr(Types[TUINTPTR], n.Xoffset)) |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1689 | return s.newValue2(ssa.OpLoad, n.Type, p, s.mem()) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1690 | |
| 1691 | case OINDEX: |
Josh Bleecher Snyder | e00d609 | 2015-06-02 09:16:22 -0700 | [diff] [blame] | 1692 | if n.Left.Type.Bound >= 0 { // array or string |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1693 | a := s.expr(n.Left) |
| 1694 | i := s.expr(n.Right) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 1695 | i = s.extendIndex(i) |
Josh Bleecher Snyder | e00d609 | 2015-06-02 09:16:22 -0700 | [diff] [blame] | 1696 | var elemtype *Type |
| 1697 | var len *ssa.Value |
| 1698 | if n.Left.Type.IsString() { |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1699 | len = s.newValue1(ssa.OpStringLen, Types[TINT], a) |
Josh Bleecher Snyder | e00d609 | 2015-06-02 09:16:22 -0700 | [diff] [blame] | 1700 | elemtype = Types[TUINT8] |
| 1701 | } else { |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1702 | len = s.constInt(Types[TINT], n.Left.Type.Bound) |
Josh Bleecher Snyder | e00d609 | 2015-06-02 09:16:22 -0700 | [diff] [blame] | 1703 | elemtype = n.Left.Type.Type |
| 1704 | } |
Keith Randall | 46e62f8 | 2015-08-18 14:17:30 -0700 | [diff] [blame] | 1705 | if !n.Bounded { |
| 1706 | s.boundsCheck(i, len) |
| 1707 | } |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1708 | return s.newValue2(ssa.OpArrayIndex, elemtype, a, i) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1709 | } else { // slice |
| 1710 | p := s.addr(n) |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1711 | return s.newValue2(ssa.OpLoad, n.Left.Type.Type, p, s.mem()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1712 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1713 | |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 1714 | case OLEN, OCAP: |
Josh Bleecher Snyder | cc3f031 | 2015-07-03 18:41:28 -0700 | [diff] [blame] | 1715 | switch { |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 1716 | case n.Left.Type.IsSlice(): |
| 1717 | op := ssa.OpSliceLen |
| 1718 | if n.Op == OCAP { |
| 1719 | op = ssa.OpSliceCap |
| 1720 | } |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1721 | return s.newValue1(op, Types[TINT], s.expr(n.Left)) |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 1722 | case n.Left.Type.IsString(): // string; not reachable for OCAP |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1723 | return s.newValue1(ssa.OpStringLen, Types[TINT], s.expr(n.Left)) |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 1724 | case n.Left.Type.IsMap(), n.Left.Type.IsChan(): |
| 1725 | return s.referenceTypeBuiltin(n, s.expr(n.Left)) |
Josh Bleecher Snyder | cc3f031 | 2015-07-03 18:41:28 -0700 | [diff] [blame] | 1726 | default: // array |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1727 | return s.constInt(Types[TINT], n.Left.Type.Bound) |
Josh Bleecher Snyder | cc3f031 | 2015-07-03 18:41:28 -0700 | [diff] [blame] | 1728 | } |
| 1729 | |
Josh Bleecher Snyder | a2d1580 | 2015-08-12 10:12:14 -0700 | [diff] [blame] | 1730 | case OSPTR: |
| 1731 | a := s.expr(n.Left) |
| 1732 | if n.Left.Type.IsSlice() { |
| 1733 | return s.newValue1(ssa.OpSlicePtr, n.Type, a) |
| 1734 | } else { |
| 1735 | return s.newValue1(ssa.OpStringPtr, n.Type, a) |
| 1736 | } |
| 1737 | |
Keith Randall | d1c15a0 | 2015-08-04 15:47:22 -0700 | [diff] [blame] | 1738 | case OITAB: |
| 1739 | a := s.expr(n.Left) |
| 1740 | return s.newValue1(ssa.OpITab, n.Type, a) |
| 1741 | |
Josh Bleecher Snyder | 1792b36 | 2015-09-05 19:28:27 -0700 | [diff] [blame] | 1742 | case OEFACE: |
| 1743 | tab := s.expr(n.Left) |
| 1744 | data := s.expr(n.Right) |
| 1745 | return s.newValue2(ssa.OpIMake, n.Type, tab, data) |
| 1746 | |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 1747 | case OSLICESTR: |
| 1748 | // Evaluate the string once. |
| 1749 | str := s.expr(n.Left) |
| 1750 | ptr := s.newValue1(ssa.OpStringPtr, Ptrto(Types[TUINT8]), str) |
| 1751 | len := s.newValue1(ssa.OpStringLen, Types[TINT], str) |
| 1752 | zero := s.constInt(Types[TINT], 0) |
| 1753 | |
| 1754 | // Evaluate the slice indexes. |
| 1755 | var low, high *ssa.Value |
| 1756 | if n.Right.Left == nil { |
| 1757 | low = zero |
| 1758 | } else { |
Alexandru Moșoi | c684d4d | 2015-09-08 18:18:59 +0200 | [diff] [blame] | 1759 | low = s.extendIndex(s.expr(n.Right.Left)) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 1760 | } |
| 1761 | if n.Right.Right == nil { |
| 1762 | high = len |
| 1763 | } else { |
Alexandru Moșoi | c684d4d | 2015-09-08 18:18:59 +0200 | [diff] [blame] | 1764 | high = s.extendIndex(s.expr(n.Right.Right)) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 1765 | } |
| 1766 | |
| 1767 | // Panic if slice indices are not in bounds. |
| 1768 | s.sliceBoundsCheck(low, high) |
| 1769 | s.sliceBoundsCheck(high, len) |
| 1770 | |
| 1771 | // Generate the following code assuming that indexes are in bounds. |
| 1772 | // The conditional is to make sure that we don't generate a string |
| 1773 | // that points to the next object in memory. |
| 1774 | // rlen = (SubPtr high low) |
| 1775 | // p = ptr |
| 1776 | // if rlen != 0 { |
| 1777 | // p = (AddPtr ptr low) |
| 1778 | // } |
| 1779 | // result = (StringMake p size) |
| 1780 | rlen := s.newValue2(ssa.OpSubPtr, Types[TINT], high, low) |
| 1781 | |
| 1782 | // Use n as the "variable" for p. |
| 1783 | s.vars[n] = ptr |
| 1784 | |
| 1785 | // Generate code to test the resulting slice length. |
| 1786 | var cmp *ssa.Value |
| 1787 | if s.config.IntSize == 8 { |
| 1788 | cmp = s.newValue2(ssa.OpNeq64, Types[TBOOL], rlen, zero) |
| 1789 | } else { |
| 1790 | cmp = s.newValue2(ssa.OpNeq32, Types[TBOOL], rlen, zero) |
| 1791 | } |
| 1792 | |
| 1793 | b := s.endBlock() |
| 1794 | b.Kind = ssa.BlockIf |
| 1795 | b.Likely = ssa.BranchLikely |
| 1796 | b.Control = cmp |
| 1797 | |
| 1798 | // Generate code for non-zero length slice case. |
| 1799 | nz := s.f.NewBlock(ssa.BlockPlain) |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 1800 | b.AddEdgeTo(nz) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 1801 | s.startBlock(nz) |
| 1802 | s.vars[n] = s.newValue2(ssa.OpAddPtr, Ptrto(Types[TUINT8]), ptr, low) |
| 1803 | s.endBlock() |
| 1804 | |
| 1805 | // All done. |
| 1806 | merge := s.f.NewBlock(ssa.BlockPlain) |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 1807 | b.AddEdgeTo(merge) |
| 1808 | nz.AddEdgeTo(merge) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 1809 | s.startBlock(merge) |
| 1810 | return s.newValue2(ssa.OpStringMake, Types[TSTRING], s.variable(n, Ptrto(Types[TUINT8])), rlen) |
| 1811 | |
Josh Bleecher Snyder | 15dcdfb | 2015-07-21 07:37:47 -0700 | [diff] [blame] | 1812 | case OCALLFUNC, OCALLMETH: |
| 1813 | left := n.Left |
| 1814 | static := left.Op == ONAME && left.Class == PFUNC |
| 1815 | |
| 1816 | if n.Op == OCALLMETH { |
| 1817 | // Rewrite to an OCALLFUNC: (p.f)(...) becomes (f)(p, ...) |
| 1818 | // Take care not to modify the original AST. |
| 1819 | if left.Op != ODOTMETH { |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 1820 | Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", left) |
Josh Bleecher Snyder | 15dcdfb | 2015-07-21 07:37:47 -0700 | [diff] [blame] | 1821 | } |
| 1822 | |
| 1823 | newLeft := *left.Right |
| 1824 | newLeft.Type = left.Type |
| 1825 | if newLeft.Op == ONAME { |
| 1826 | newLeft.Class = PFUNC |
| 1827 | } |
| 1828 | left = &newLeft |
| 1829 | static = true |
| 1830 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1831 | |
| 1832 | // evaluate closure |
| 1833 | var closure *ssa.Value |
| 1834 | if !static { |
Josh Bleecher Snyder | 15dcdfb | 2015-07-21 07:37:47 -0700 | [diff] [blame] | 1835 | closure = s.expr(left) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1836 | } |
| 1837 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1838 | // run all argument assignments |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1839 | s.stmtList(n.List) |
| 1840 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1841 | bNext := s.f.NewBlock(ssa.BlockPlain) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1842 | var call *ssa.Value |
| 1843 | if static { |
Josh Bleecher Snyder | 15dcdfb | 2015-07-21 07:37:47 -0700 | [diff] [blame] | 1844 | call = s.newValue1A(ssa.OpStaticCall, ssa.TypeMem, left.Sym, s.mem()) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1845 | } else { |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 1846 | entry := s.newValue2(ssa.OpLoad, Types[TUINTPTR], closure, s.mem()) |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1847 | call = s.newValue3(ssa.OpClosureCall, ssa.TypeMem, entry, closure, s.mem()) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1848 | } |
Josh Bleecher Snyder | 15dcdfb | 2015-07-21 07:37:47 -0700 | [diff] [blame] | 1849 | dowidth(left.Type) |
| 1850 | call.AuxInt = left.Type.Argwid // call operations carry the argsize of the callee along with them |
Keith Randall | d56d2fa | 2015-08-12 12:54:47 -0700 | [diff] [blame] | 1851 | s.vars[&memvar] = call |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1852 | b := s.endBlock() |
| 1853 | b.Kind = ssa.BlockCall |
| 1854 | b.Control = call |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 1855 | b.AddEdgeTo(bNext) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1856 | |
| 1857 | // read result from stack at the start of the fallthrough block |
| 1858 | s.startBlock(bNext) |
| 1859 | var titer Iter |
Josh Bleecher Snyder | 15dcdfb | 2015-07-21 07:37:47 -0700 | [diff] [blame] | 1860 | fp := Structfirst(&titer, Getoutarg(left.Type)) |
Michael Matloob | 2aabacd | 2015-06-16 17:58:03 -0700 | [diff] [blame] | 1861 | if fp == nil { |
| 1862 | // CALLFUNC has no return value. Continue with the next statement. |
| 1863 | return nil |
| 1864 | } |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 1865 | a := s.entryNewValue1I(ssa.OpOffPtr, Ptrto(fp.Type), fp.Width, s.sp) |
| 1866 | return s.newValue2(ssa.OpLoad, fp.Type, a, call) |
Josh Bleecher Snyder | 3d23afb | 2015-08-12 11:22:16 -0700 | [diff] [blame] | 1867 | |
| 1868 | case OGETG: |
| 1869 | return s.newValue0(ssa.OpGetG, n.Type) |
| 1870 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1871 | default: |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 1872 | s.Unimplementedf("unhandled expr %s", opnames[n.Op]) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 1873 | return nil |
| 1874 | } |
| 1875 | } |
| 1876 | |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 1877 | func (s *state) assign(left *Node, right *ssa.Value, wb bool) { |
Keith Randall | d4cc51d | 2015-08-14 21:47:20 -0700 | [diff] [blame] | 1878 | if left.Op == ONAME && isblank(left) { |
Keith Randall | d4cc51d | 2015-08-14 21:47:20 -0700 | [diff] [blame] | 1879 | return |
| 1880 | } |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 1881 | // TODO: do write barrier |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 1882 | // if wb |
Keith Randall | d4cc51d | 2015-08-14 21:47:20 -0700 | [diff] [blame] | 1883 | t := left.Type |
| 1884 | dowidth(t) |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 1885 | if right == nil { |
| 1886 | // right == nil means use the zero value of the assigned type. |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 1887 | if !canSSA(left) { |
| 1888 | // if we can't ssa this memory, treat it as just zeroing out the backing memory |
| 1889 | addr := s.addr(left) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 1890 | if left.Op == ONAME { |
| 1891 | s.vars[&memvar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, left, s.mem()) |
| 1892 | } |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 1893 | s.vars[&memvar] = s.newValue2I(ssa.OpZero, ssa.TypeMem, t.Size(), addr, s.mem()) |
| 1894 | return |
| 1895 | } |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 1896 | right = s.zeroVal(t) |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 1897 | } |
| 1898 | if left.Op == ONAME && canSSA(left) { |
| 1899 | // Update variable assignment. |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 1900 | s.vars[left] = right |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 1901 | return |
| 1902 | } |
| 1903 | // not ssa-able. Treat as a store. |
| 1904 | addr := s.addr(left) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 1905 | if left.Op == ONAME { |
| 1906 | s.vars[&memvar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, left, s.mem()) |
| 1907 | } |
Josh Bleecher Snyder | 0726931 | 2015-08-29 14:54:45 -0700 | [diff] [blame] | 1908 | s.vars[&memvar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, t.Size(), addr, right, s.mem()) |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 1909 | } |
| 1910 | |
Josh Bleecher Snyder | 21bd483 | 2015-07-20 15:30:52 -0700 | [diff] [blame] | 1911 | // zeroVal returns the zero value for type t. |
| 1912 | func (s *state) zeroVal(t *Type) *ssa.Value { |
| 1913 | switch { |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 1914 | case t.IsInteger(): |
| 1915 | switch t.Size() { |
| 1916 | case 1: |
| 1917 | return s.constInt8(t, 0) |
| 1918 | case 2: |
| 1919 | return s.constInt16(t, 0) |
| 1920 | case 4: |
| 1921 | return s.constInt32(t, 0) |
| 1922 | case 8: |
| 1923 | return s.constInt64(t, 0) |
| 1924 | default: |
| 1925 | s.Fatalf("bad sized integer type %s", t) |
| 1926 | } |
Todd Neal | 752fe4d | 2015-08-25 19:21:45 -0500 | [diff] [blame] | 1927 | case t.IsFloat(): |
| 1928 | switch t.Size() { |
| 1929 | case 4: |
| 1930 | return s.constFloat32(t, 0) |
| 1931 | case 8: |
| 1932 | return s.constFloat64(t, 0) |
| 1933 | default: |
| 1934 | s.Fatalf("bad sized float type %s", t) |
| 1935 | } |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1936 | case t.IsComplex(): |
| 1937 | switch t.Size() { |
| 1938 | case 8: |
| 1939 | z := s.constFloat32(Types[TFLOAT32], 0) |
Keith Randall | a5cffb6 | 2015-08-28 13:52:26 -0700 | [diff] [blame] | 1940 | return s.entryNewValue2(ssa.OpComplexMake, t, z, z) |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1941 | case 16: |
| 1942 | z := s.constFloat64(Types[TFLOAT64], 0) |
Keith Randall | a5cffb6 | 2015-08-28 13:52:26 -0700 | [diff] [blame] | 1943 | return s.entryNewValue2(ssa.OpComplexMake, t, z, z) |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 1944 | default: |
| 1945 | s.Fatalf("bad sized complex type %s", t) |
| 1946 | } |
| 1947 | |
Josh Bleecher Snyder | 21bd483 | 2015-07-20 15:30:52 -0700 | [diff] [blame] | 1948 | case t.IsString(): |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 1949 | return s.entryNewValue0A(ssa.OpConstString, t, "") |
| 1950 | case t.IsPtr(): |
| 1951 | return s.entryNewValue0(ssa.OpConstNil, t) |
Josh Bleecher Snyder | 21bd483 | 2015-07-20 15:30:52 -0700 | [diff] [blame] | 1952 | case t.IsBoolean(): |
Josh Bleecher Snyder | cea4414 | 2015-09-08 16:52:25 -0700 | [diff] [blame] | 1953 | return s.constBool(false) |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 1954 | case t.IsInterface(): |
| 1955 | return s.entryNewValue0(ssa.OpConstInterface, t) |
| 1956 | case t.IsSlice(): |
| 1957 | return s.entryNewValue0(ssa.OpConstSlice, t) |
Josh Bleecher Snyder | 21bd483 | 2015-07-20 15:30:52 -0700 | [diff] [blame] | 1958 | } |
| 1959 | s.Unimplementedf("zero for type %v not implemented", t) |
| 1960 | return nil |
| 1961 | } |
| 1962 | |
Josh Bleecher Snyder | 95aff4d | 2015-07-28 14:31:25 -0700 | [diff] [blame] | 1963 | // etypesign returns the signed-ness of e, for integer/pointer etypes. |
| 1964 | // -1 means signed, +1 means unsigned, 0 means non-integer/non-pointer. |
| 1965 | func etypesign(e uint8) int8 { |
| 1966 | switch e { |
| 1967 | case TINT8, TINT16, TINT32, TINT64, TINT: |
| 1968 | return -1 |
| 1969 | case TUINT8, TUINT16, TUINT32, TUINT64, TUINT, TUINTPTR, TUNSAFEPTR: |
| 1970 | return +1 |
| 1971 | } |
| 1972 | return 0 |
| 1973 | } |
| 1974 | |
Josh Bleecher Snyder | e00d609 | 2015-06-02 09:16:22 -0700 | [diff] [blame] | 1975 | // addr converts the address of the expression n to SSA, adds it to s and returns the SSA result. |
Keith Randall | c3c84a2 | 2015-07-13 15:55:37 -0700 | [diff] [blame] | 1976 | // The value that the returned Value represents is guaranteed to be non-nil. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1977 | func (s *state) addr(n *Node) *ssa.Value { |
| 1978 | switch n.Op { |
| 1979 | case ONAME: |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 1980 | switch n.Class { |
| 1981 | case PEXTERN: |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 1982 | // global variable |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 1983 | aux := &ssa.ExternSymbol{n.Type, n.Sym} |
Josh Bleecher Snyder | 67df793 | 2015-07-28 11:08:44 -0700 | [diff] [blame] | 1984 | v := s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sb) |
| 1985 | // TODO: Make OpAddr use AuxInt as well as Aux. |
| 1986 | if n.Xoffset != 0 { |
| 1987 | v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, n.Xoffset, v) |
| 1988 | } |
| 1989 | return v |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 1990 | case PPARAM, PPARAMOUT: |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 1991 | // parameter/result slot or local variable |
Josh Bleecher Snyder | 596ddf4 | 2015-06-29 11:56:28 -0700 | [diff] [blame] | 1992 | v := s.decladdrs[n] |
| 1993 | if v == nil { |
Josh Bleecher Snyder | 0a133cdd | 2015-07-03 20:28:56 -0700 | [diff] [blame] | 1994 | if flag_race != 0 && n.String() == ".fp" { |
| 1995 | s.Unimplementedf("race detector mishandles nodfp") |
| 1996 | } |
Josh Bleecher Snyder | 596ddf4 | 2015-06-29 11:56:28 -0700 | [diff] [blame] | 1997 | s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs) |
| 1998 | } |
| 1999 | return v |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 2000 | case PAUTO: |
| 2001 | // We need to regenerate the address of autos |
| 2002 | // at every use. This prevents LEA instructions |
| 2003 | // from occurring before the corresponding VarDef |
| 2004 | // op and confusing the liveness analysis into thinking |
| 2005 | // the variable is live at function entry. |
| 2006 | // TODO: I'm not sure if this really works or we're just |
| 2007 | // getting lucky. We might need a real dependency edge |
| 2008 | // between vardef and addr ops. |
| 2009 | aux := &ssa.AutoSymbol{Typ: n.Type, Node: n} |
| 2010 | return s.newValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sp) |
Josh Bleecher Snyder | 9654873 | 2015-08-28 13:35:32 -0700 | [diff] [blame] | 2011 | case PAUTO | PHEAP, PPARAMREF: |
Daniel Morsing | c31b6dd | 2015-06-12 14:23:29 +0100 | [diff] [blame] | 2012 | return s.expr(n.Name.Heapaddr) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2013 | default: |
Josh Bleecher Snyder | 5844603 | 2015-08-23 20:29:43 -0700 | [diff] [blame] | 2014 | s.Unimplementedf("variable address class %v not implemented", n.Class) |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2015 | return nil |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2016 | } |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2017 | case OINDREG: |
Josh Bleecher Snyder | 25d1916 | 2015-07-28 12:37:46 -0700 | [diff] [blame] | 2018 | // indirect off a register |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2019 | // used for storing/loading arguments/returns to/from callees |
Josh Bleecher Snyder | 25d1916 | 2015-07-28 12:37:46 -0700 | [diff] [blame] | 2020 | if int(n.Reg) != Thearch.REGSP { |
| 2021 | s.Unimplementedf("OINDREG of non-SP register %s in addr: %v", obj.Rconv(int(n.Reg)), n) |
| 2022 | return nil |
| 2023 | } |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 2024 | return s.entryNewValue1I(ssa.OpOffPtr, Ptrto(n.Type), n.Xoffset, s.sp) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2025 | case OINDEX: |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 2026 | if n.Left.Type.IsSlice() { |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2027 | a := s.expr(n.Left) |
| 2028 | i := s.expr(n.Right) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 2029 | i = s.extendIndex(i) |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 2030 | len := s.newValue1(ssa.OpSliceLen, Types[TUINTPTR], a) |
Keith Randall | 46e62f8 | 2015-08-18 14:17:30 -0700 | [diff] [blame] | 2031 | if !n.Bounded { |
| 2032 | s.boundsCheck(i, len) |
| 2033 | } |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 2034 | p := s.newValue1(ssa.OpSlicePtr, Ptrto(n.Left.Type.Type), a) |
| 2035 | return s.newValue2(ssa.OpPtrIndex, Ptrto(n.Left.Type.Type), p, i) |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 2036 | } else { // array |
| 2037 | a := s.addr(n.Left) |
| 2038 | i := s.expr(n.Right) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 2039 | i = s.extendIndex(i) |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 2040 | len := s.constInt(Types[TINT], n.Left.Type.Bound) |
Keith Randall | 46e62f8 | 2015-08-18 14:17:30 -0700 | [diff] [blame] | 2041 | if !n.Bounded { |
| 2042 | s.boundsCheck(i, len) |
| 2043 | } |
Brad Fitzpatrick | 7af53d9 | 2015-07-10 10:47:28 -0600 | [diff] [blame] | 2044 | return s.newValue2(ssa.OpPtrIndex, Ptrto(n.Left.Type.Type), a, i) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2045 | } |
Todd Neal | b383de2 | 2015-07-13 21:22:16 -0500 | [diff] [blame] | 2046 | case OIND: |
| 2047 | p := s.expr(n.Left) |
| 2048 | s.nilCheck(p) |
| 2049 | return p |
Keith Randall | c3c84a2 | 2015-07-13 15:55:37 -0700 | [diff] [blame] | 2050 | case ODOT: |
| 2051 | p := s.addr(n.Left) |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 2052 | return s.newValue2(ssa.OpAddPtr, p.Type, p, s.constIntPtr(Types[TUINTPTR], n.Xoffset)) |
Keith Randall | c3c84a2 | 2015-07-13 15:55:37 -0700 | [diff] [blame] | 2053 | case ODOTPTR: |
| 2054 | p := s.expr(n.Left) |
| 2055 | s.nilCheck(p) |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 2056 | return s.newValue2(ssa.OpAddPtr, p.Type, p, s.constIntPtr(Types[TUINTPTR], n.Xoffset)) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2057 | default: |
Josh Bleecher Snyder | 5844603 | 2015-08-23 20:29:43 -0700 | [diff] [blame] | 2058 | s.Unimplementedf("unhandled addr %v", Oconv(int(n.Op), 0)) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2059 | return nil |
| 2060 | } |
| 2061 | } |
| 2062 | |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2063 | // canSSA reports whether n is SSA-able. |
| 2064 | // n must be an ONAME. |
| 2065 | func canSSA(n *Node) bool { |
| 2066 | if n.Op != ONAME { |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 2067 | return false |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2068 | } |
| 2069 | if n.Addrtaken { |
| 2070 | return false |
| 2071 | } |
| 2072 | if n.Class&PHEAP != 0 { |
| 2073 | return false |
| 2074 | } |
Josh Bleecher Snyder | 9654873 | 2015-08-28 13:35:32 -0700 | [diff] [blame] | 2075 | switch n.Class { |
| 2076 | case PEXTERN, PPARAMOUT, PPARAMREF: |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2077 | return false |
| 2078 | } |
Keith Randall | 8a1f621 | 2015-09-08 21:28:44 -0700 | [diff] [blame] | 2079 | if n.Class == PPARAM && n.String() == ".this" { |
| 2080 | // wrappers generated by genwrapper need to update |
| 2081 | // the .this pointer in place. |
| 2082 | return false |
| 2083 | } |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2084 | return canSSAType(n.Type) |
| 2085 | // TODO: try to make more variables SSAable? |
| 2086 | } |
| 2087 | |
| 2088 | // canSSA reports whether variables of type t are SSA-able. |
| 2089 | func canSSAType(t *Type) bool { |
| 2090 | dowidth(t) |
| 2091 | if t.Width > int64(4*Widthptr) { |
| 2092 | // 4*Widthptr is an arbitrary constant. We want it |
| 2093 | // to be at least 3*Widthptr so slices can be registerized. |
| 2094 | // Too big and we'll introduce too much register pressure. |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 2095 | return false |
| 2096 | } |
Keith Randall | 9f954db | 2015-08-18 10:26:28 -0700 | [diff] [blame] | 2097 | switch t.Etype { |
| 2098 | case TARRAY: |
| 2099 | if Isslice(t) { |
| 2100 | return true |
| 2101 | } |
| 2102 | // We can't do arrays because dynamic indexing is |
| 2103 | // not supported on SSA variables. |
| 2104 | // TODO: maybe allow if length is <=1? All indexes |
| 2105 | // are constant? Might be good for the arrays |
| 2106 | // introduced by the compiler for variadic functions. |
| 2107 | return false |
| 2108 | case TSTRUCT: |
| 2109 | if countfield(t) > 4 { |
| 2110 | // 4 is an arbitrary constant. Same reasoning |
| 2111 | // as above, lots of small fields would waste |
| 2112 | // register space needed by other values. |
| 2113 | return false |
| 2114 | } |
| 2115 | for t1 := t.Type; t1 != nil; t1 = t1.Down { |
| 2116 | if !canSSAType(t1.Type) { |
| 2117 | return false |
| 2118 | } |
| 2119 | } |
| 2120 | return false // until it is implemented |
| 2121 | //return true |
| 2122 | default: |
| 2123 | return true |
| 2124 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 2125 | } |
| 2126 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2127 | // nilCheck generates nil pointer checking code. |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 2128 | // Starts a new block on return, unless nil checks are disabled. |
Josh Bleecher Snyder | 7e74e43 | 2015-07-24 11:55:52 -0700 | [diff] [blame] | 2129 | // Used only for automatically inserted nil checks, |
| 2130 | // not for user code like 'x != nil'. |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2131 | func (s *state) nilCheck(ptr *ssa.Value) { |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 2132 | if Disable_checknil != 0 { |
| 2133 | return |
| 2134 | } |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 2135 | c := s.newValue1(ssa.OpIsNonNil, Types[TBOOL], ptr) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2136 | b := s.endBlock() |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 2137 | b.Kind = ssa.BlockIf |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2138 | b.Control = c |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 2139 | b.Likely = ssa.BranchLikely |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2140 | bNext := s.f.NewBlock(ssa.BlockPlain) |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 2141 | bPanic := s.f.NewBlock(ssa.BlockPlain) |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2142 | b.AddEdgeTo(bNext) |
| 2143 | b.AddEdgeTo(bPanic) |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 2144 | s.startBlock(bPanic) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2145 | // TODO: implicit nil checks somehow? |
Keith Randall | f5c53e0 | 2015-09-09 18:03:41 -0700 | [diff] [blame] | 2146 | chk := s.newValue2(ssa.OpPanicNilCheck, ssa.TypeMem, ptr, s.mem()) |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 2147 | s.endBlock() |
Keith Randall | f5c53e0 | 2015-09-09 18:03:41 -0700 | [diff] [blame] | 2148 | bPanic.Kind = ssa.BlockExit |
| 2149 | bPanic.Control = chk |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 2150 | s.startBlock(bNext) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2151 | } |
| 2152 | |
| 2153 | // boundsCheck generates bounds checking code. Checks if 0 <= idx < len, branches to exit if not. |
| 2154 | // Starts a new block on return. |
| 2155 | func (s *state) boundsCheck(idx, len *ssa.Value) { |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 2156 | if Debug['B'] != 0 { |
| 2157 | return |
| 2158 | } |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2159 | // TODO: convert index to full width? |
| 2160 | // TODO: if index is 64-bit and we're compiling to 32-bit, check that high 32 bits are zero. |
| 2161 | |
| 2162 | // bounds check |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 2163 | cmp := s.newValue2(ssa.OpIsInBounds, Types[TBOOL], idx, len) |
Keith Randall | 3526cf5 | 2015-08-24 23:52:03 -0700 | [diff] [blame] | 2164 | s.check(cmp, ssa.OpPanicIndexCheck) |
| 2165 | } |
| 2166 | |
| 2167 | // sliceBoundsCheck generates slice bounds checking code. Checks if 0 <= idx <= len, branches to exit if not. |
| 2168 | // Starts a new block on return. |
| 2169 | func (s *state) sliceBoundsCheck(idx, len *ssa.Value) { |
| 2170 | if Debug['B'] != 0 { |
| 2171 | return |
| 2172 | } |
| 2173 | // TODO: convert index to full width? |
| 2174 | // TODO: if index is 64-bit and we're compiling to 32-bit, check that high 32 bits are zero. |
| 2175 | |
| 2176 | // bounds check |
| 2177 | cmp := s.newValue2(ssa.OpIsSliceInBounds, Types[TBOOL], idx, len) |
| 2178 | s.check(cmp, ssa.OpPanicSliceCheck) |
| 2179 | } |
| 2180 | |
| 2181 | // If cmp (a bool) is true, panic using the given op. |
| 2182 | func (s *state) check(cmp *ssa.Value, panicOp ssa.Op) { |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2183 | b := s.endBlock() |
| 2184 | b.Kind = ssa.BlockIf |
| 2185 | b.Control = cmp |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 2186 | b.Likely = ssa.BranchLikely |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2187 | bNext := s.f.NewBlock(ssa.BlockPlain) |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 2188 | bPanic := s.f.NewBlock(ssa.BlockPlain) |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2189 | b.AddEdgeTo(bNext) |
| 2190 | b.AddEdgeTo(bPanic) |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 2191 | s.startBlock(bPanic) |
| 2192 | // The panic check takes/returns memory to ensure that the right |
| 2193 | // memory state is observed if the panic happens. |
Keith Randall | f5c53e0 | 2015-09-09 18:03:41 -0700 | [diff] [blame] | 2194 | chk := s.newValue1(panicOp, ssa.TypeMem, s.mem()) |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 2195 | s.endBlock() |
Keith Randall | f5c53e0 | 2015-09-09 18:03:41 -0700 | [diff] [blame] | 2196 | bPanic.Kind = ssa.BlockExit |
| 2197 | bPanic.Control = chk |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2198 | s.startBlock(bNext) |
| 2199 | } |
| 2200 | |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 2201 | type u2fcvtTab struct { |
| 2202 | geq, cvt2F, and, rsh, or, add ssa.Op |
| 2203 | one func(*state, ssa.Type, int64) *ssa.Value |
| 2204 | } |
| 2205 | |
| 2206 | var u64_f64 u2fcvtTab = u2fcvtTab{ |
| 2207 | geq: ssa.OpGeq64, |
| 2208 | cvt2F: ssa.OpCvt64to64F, |
| 2209 | and: ssa.OpAnd64, |
| 2210 | rsh: ssa.OpRsh64Ux64, |
| 2211 | or: ssa.OpOr64, |
| 2212 | add: ssa.OpAdd64F, |
| 2213 | one: (*state).constInt64, |
| 2214 | } |
| 2215 | |
| 2216 | var u64_f32 u2fcvtTab = u2fcvtTab{ |
| 2217 | geq: ssa.OpGeq64, |
| 2218 | cvt2F: ssa.OpCvt64to32F, |
| 2219 | and: ssa.OpAnd64, |
| 2220 | rsh: ssa.OpRsh64Ux64, |
| 2221 | or: ssa.OpOr64, |
| 2222 | add: ssa.OpAdd32F, |
| 2223 | one: (*state).constInt64, |
| 2224 | } |
| 2225 | |
| 2226 | // Excess generality on a machine with 64-bit integer registers. |
| 2227 | // Not used on AMD64. |
| 2228 | var u32_f32 u2fcvtTab = u2fcvtTab{ |
| 2229 | geq: ssa.OpGeq32, |
| 2230 | cvt2F: ssa.OpCvt32to32F, |
| 2231 | and: ssa.OpAnd32, |
| 2232 | rsh: ssa.OpRsh32Ux32, |
| 2233 | or: ssa.OpOr32, |
| 2234 | add: ssa.OpAdd32F, |
| 2235 | one: func(s *state, t ssa.Type, x int64) *ssa.Value { |
| 2236 | return s.constInt32(t, int32(x)) |
| 2237 | }, |
| 2238 | } |
| 2239 | |
| 2240 | func (s *state) uint64Tofloat64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 2241 | return s.uintTofloat(&u64_f64, n, x, ft, tt) |
| 2242 | } |
| 2243 | |
| 2244 | func (s *state) uint64Tofloat32(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 2245 | return s.uintTofloat(&u64_f32, n, x, ft, tt) |
| 2246 | } |
| 2247 | |
| 2248 | func (s *state) uintTofloat(cvttab *u2fcvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 2249 | // if x >= 0 { |
| 2250 | // result = (floatY) x |
| 2251 | // } else { |
| 2252 | // y = uintX(x) ; y = x & 1 |
| 2253 | // z = uintX(x) ; z = z >> 1 |
| 2254 | // z = z >> 1 |
| 2255 | // z = z | y |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 2256 | // result = floatY(z) |
| 2257 | // result = result + result |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 2258 | // } |
| 2259 | // |
| 2260 | // Code borrowed from old code generator. |
| 2261 | // What's going on: large 64-bit "unsigned" looks like |
| 2262 | // negative number to hardware's integer-to-float |
| 2263 | // conversion. However, because the mantissa is only |
| 2264 | // 63 bits, we don't need the LSB, so instead we do an |
| 2265 | // unsigned right shift (divide by two), convert, and |
| 2266 | // double. However, before we do that, we need to be |
| 2267 | // sure that we do not lose a "1" if that made the |
| 2268 | // difference in the resulting rounding. Therefore, we |
| 2269 | // preserve it, and OR (not ADD) it back in. The case |
| 2270 | // that matters is when the eleven discarded bits are |
| 2271 | // equal to 10000000001; that rounds up, and the 1 cannot |
| 2272 | // be lost else it would round down if the LSB of the |
| 2273 | // candidate mantissa is 0. |
| 2274 | cmp := s.newValue2(cvttab.geq, Types[TBOOL], x, s.zeroVal(ft)) |
| 2275 | b := s.endBlock() |
| 2276 | b.Kind = ssa.BlockIf |
| 2277 | b.Control = cmp |
| 2278 | b.Likely = ssa.BranchLikely |
| 2279 | |
| 2280 | bThen := s.f.NewBlock(ssa.BlockPlain) |
| 2281 | bElse := s.f.NewBlock(ssa.BlockPlain) |
| 2282 | bAfter := s.f.NewBlock(ssa.BlockPlain) |
| 2283 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2284 | b.AddEdgeTo(bThen) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 2285 | s.startBlock(bThen) |
| 2286 | a0 := s.newValue1(cvttab.cvt2F, tt, x) |
| 2287 | s.vars[n] = a0 |
| 2288 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2289 | bThen.AddEdgeTo(bAfter) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 2290 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2291 | b.AddEdgeTo(bElse) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 2292 | s.startBlock(bElse) |
| 2293 | one := cvttab.one(s, ft, 1) |
| 2294 | y := s.newValue2(cvttab.and, ft, x, one) |
| 2295 | z := s.newValue2(cvttab.rsh, ft, x, one) |
| 2296 | z = s.newValue2(cvttab.or, ft, z, y) |
| 2297 | a := s.newValue1(cvttab.cvt2F, tt, z) |
| 2298 | a1 := s.newValue2(cvttab.add, tt, a, a) |
| 2299 | s.vars[n] = a1 |
| 2300 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2301 | bElse.AddEdgeTo(bAfter) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 2302 | |
| 2303 | s.startBlock(bAfter) |
| 2304 | return s.variable(n, n.Type) |
| 2305 | } |
| 2306 | |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 2307 | // referenceTypeBuiltin generates code for the len/cap builtins for maps and channels. |
| 2308 | func (s *state) referenceTypeBuiltin(n *Node, x *ssa.Value) *ssa.Value { |
| 2309 | if !n.Left.Type.IsMap() && !n.Left.Type.IsChan() { |
| 2310 | s.Fatalf("node must be a map or a channel") |
| 2311 | } |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 2312 | // if n == nil { |
| 2313 | // return 0 |
| 2314 | // } else { |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 2315 | // // len |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 2316 | // return *((*int)n) |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 2317 | // // cap |
| 2318 | // return *(((*int)n)+1) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 2319 | // } |
| 2320 | lenType := n.Type |
Todd Neal | 67ac8a3 | 2015-08-28 15:20:54 -0500 | [diff] [blame] | 2321 | nilValue := s.newValue0(ssa.OpConstNil, Types[TUINTPTR]) |
| 2322 | cmp := s.newValue2(ssa.OpEqPtr, Types[TBOOL], x, nilValue) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 2323 | b := s.endBlock() |
| 2324 | b.Kind = ssa.BlockIf |
| 2325 | b.Control = cmp |
| 2326 | b.Likely = ssa.BranchUnlikely |
| 2327 | |
| 2328 | bThen := s.f.NewBlock(ssa.BlockPlain) |
| 2329 | bElse := s.f.NewBlock(ssa.BlockPlain) |
| 2330 | bAfter := s.f.NewBlock(ssa.BlockPlain) |
| 2331 | |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 2332 | // length/capacity of a nil map/chan is zero |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2333 | b.AddEdgeTo(bThen) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 2334 | s.startBlock(bThen) |
| 2335 | s.vars[n] = s.zeroVal(lenType) |
| 2336 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2337 | bThen.AddEdgeTo(bAfter) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 2338 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2339 | b.AddEdgeTo(bElse) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 2340 | s.startBlock(bElse) |
Todd Neal | 707af25 | 2015-08-28 15:56:43 -0500 | [diff] [blame] | 2341 | if n.Op == OLEN { |
| 2342 | // length is stored in the first word for map/chan |
| 2343 | s.vars[n] = s.newValue2(ssa.OpLoad, lenType, x, s.mem()) |
| 2344 | } else if n.Op == OCAP { |
| 2345 | // capacity is stored in the second word for chan |
| 2346 | sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Width, x) |
| 2347 | s.vars[n] = s.newValue2(ssa.OpLoad, lenType, sw, s.mem()) |
| 2348 | } else { |
| 2349 | s.Fatalf("op must be OLEN or OCAP") |
| 2350 | } |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 2351 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2352 | bElse.AddEdgeTo(bAfter) |
Todd Neal | e0e4068 | 2015-08-26 18:40:52 -0500 | [diff] [blame] | 2353 | |
| 2354 | s.startBlock(bAfter) |
| 2355 | return s.variable(n, lenType) |
| 2356 | } |
| 2357 | |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 2358 | type f2uCvtTab struct { |
| 2359 | ltf, cvt2U, subf ssa.Op |
| 2360 | value func(*state, ssa.Type, float64) *ssa.Value |
| 2361 | } |
| 2362 | |
| 2363 | var f32_u64 f2uCvtTab = f2uCvtTab{ |
| 2364 | ltf: ssa.OpLess32F, |
| 2365 | cvt2U: ssa.OpCvt32Fto64, |
| 2366 | subf: ssa.OpSub32F, |
| 2367 | value: (*state).constFloat32, |
| 2368 | } |
| 2369 | |
| 2370 | var f64_u64 f2uCvtTab = f2uCvtTab{ |
| 2371 | ltf: ssa.OpLess64F, |
| 2372 | cvt2U: ssa.OpCvt64Fto64, |
| 2373 | subf: ssa.OpSub64F, |
| 2374 | value: (*state).constFloat64, |
| 2375 | } |
| 2376 | |
| 2377 | func (s *state) float32ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 2378 | return s.floatToUint(&f32_u64, n, x, ft, tt) |
| 2379 | } |
| 2380 | func (s *state) float64ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 2381 | return s.floatToUint(&f64_u64, n, x, ft, tt) |
| 2382 | } |
| 2383 | |
| 2384 | func (s *state) floatToUint(cvttab *f2uCvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value { |
| 2385 | // if x < 9223372036854775808.0 { |
| 2386 | // result = uintY(x) |
| 2387 | // } else { |
| 2388 | // y = x - 9223372036854775808.0 |
| 2389 | // z = uintY(y) |
| 2390 | // result = z | -9223372036854775808 |
| 2391 | // } |
| 2392 | twoToThe63 := cvttab.value(s, ft, 9223372036854775808.0) |
| 2393 | cmp := s.newValue2(cvttab.ltf, Types[TBOOL], x, twoToThe63) |
| 2394 | b := s.endBlock() |
| 2395 | b.Kind = ssa.BlockIf |
| 2396 | b.Control = cmp |
| 2397 | b.Likely = ssa.BranchLikely |
| 2398 | |
| 2399 | bThen := s.f.NewBlock(ssa.BlockPlain) |
| 2400 | bElse := s.f.NewBlock(ssa.BlockPlain) |
| 2401 | bAfter := s.f.NewBlock(ssa.BlockPlain) |
| 2402 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2403 | b.AddEdgeTo(bThen) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 2404 | s.startBlock(bThen) |
| 2405 | a0 := s.newValue1(cvttab.cvt2U, tt, x) |
| 2406 | s.vars[n] = a0 |
| 2407 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2408 | bThen.AddEdgeTo(bAfter) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 2409 | |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2410 | b.AddEdgeTo(bElse) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 2411 | s.startBlock(bElse) |
| 2412 | y := s.newValue2(cvttab.subf, ft, x, twoToThe63) |
| 2413 | y = s.newValue1(cvttab.cvt2U, tt, y) |
| 2414 | z := s.constInt64(tt, -9223372036854775808) |
| 2415 | a1 := s.newValue2(ssa.OpOr64, tt, y, z) |
| 2416 | s.vars[n] = a1 |
| 2417 | s.endBlock() |
Todd Neal | 47d6799 | 2015-08-28 21:36:29 -0500 | [diff] [blame] | 2418 | bElse.AddEdgeTo(bAfter) |
David Chase | 7315106 | 2015-08-26 14:25:40 -0400 | [diff] [blame] | 2419 | |
| 2420 | s.startBlock(bAfter) |
| 2421 | return s.variable(n, n.Type) |
| 2422 | } |
| 2423 | |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 2424 | // checkgoto checks that a goto from from to to does not |
| 2425 | // jump into a block or jump over variable declarations. |
| 2426 | // It is a copy of checkgoto in the pre-SSA backend, |
| 2427 | // modified only for line number handling. |
| 2428 | // TODO: document how this works and why it is designed the way it is. |
| 2429 | func (s *state) checkgoto(from *Node, to *Node) { |
| 2430 | if from.Sym == to.Sym { |
| 2431 | return |
| 2432 | } |
| 2433 | |
| 2434 | nf := 0 |
| 2435 | for fs := from.Sym; fs != nil; fs = fs.Link { |
| 2436 | nf++ |
| 2437 | } |
| 2438 | nt := 0 |
| 2439 | for fs := to.Sym; fs != nil; fs = fs.Link { |
| 2440 | nt++ |
| 2441 | } |
| 2442 | fs := from.Sym |
| 2443 | for ; nf > nt; nf-- { |
| 2444 | fs = fs.Link |
| 2445 | } |
| 2446 | if fs != to.Sym { |
| 2447 | // decide what to complain about. |
| 2448 | // prefer to complain about 'into block' over declarations, |
| 2449 | // so scan backward to find most recent block or else dcl. |
| 2450 | var block *Sym |
| 2451 | |
| 2452 | var dcl *Sym |
| 2453 | ts := to.Sym |
| 2454 | for ; nt > nf; nt-- { |
| 2455 | if ts.Pkg == nil { |
| 2456 | block = ts |
| 2457 | } else { |
| 2458 | dcl = ts |
| 2459 | } |
| 2460 | ts = ts.Link |
| 2461 | } |
| 2462 | |
| 2463 | for ts != fs { |
| 2464 | if ts.Pkg == nil { |
| 2465 | block = ts |
| 2466 | } else { |
| 2467 | dcl = ts |
| 2468 | } |
| 2469 | ts = ts.Link |
| 2470 | fs = fs.Link |
| 2471 | } |
| 2472 | |
| 2473 | lno := int(from.Left.Lineno) |
| 2474 | if block != nil { |
| 2475 | yyerrorl(lno, "goto %v jumps into block starting at %v", from.Left.Sym, Ctxt.Line(int(block.Lastlineno))) |
| 2476 | } else { |
| 2477 | yyerrorl(lno, "goto %v jumps over declaration of %v at %v", from.Left.Sym, dcl, Ctxt.Line(int(dcl.Lastlineno))) |
| 2478 | } |
| 2479 | } |
| 2480 | } |
| 2481 | |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2482 | // variable returns the value of a variable at the current location. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 2483 | func (s *state) variable(name *Node, t ssa.Type) *ssa.Value { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2484 | v := s.vars[name] |
| 2485 | if v == nil { |
| 2486 | // TODO: get type? Take Sym as arg? |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 2487 | v = s.newValue0A(ssa.OpFwdRef, t, name) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2488 | s.vars[name] = v |
| 2489 | } |
| 2490 | return v |
| 2491 | } |
| 2492 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2493 | func (s *state) mem() *ssa.Value { |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 2494 | return s.variable(&memvar, ssa.TypeMem) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2495 | } |
| 2496 | |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2497 | func (s *state) linkForwardReferences() { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2498 | // Build ssa graph. Each variable on its first use in a basic block |
| 2499 | // leaves a FwdRef in that block representing the incoming value |
| 2500 | // of that variable. This function links that ref up with possible definitions, |
| 2501 | // inserting Phi values as needed. This is essentially the algorithm |
| 2502 | // described by Brau, Buchwald, Hack, Leißa, Mallon, and Zwinkau: |
| 2503 | // http://pp.info.uni-karlsruhe.de/uploads/publikationen/braun13cc.pdf |
| 2504 | for _, b := range s.f.Blocks { |
| 2505 | for _, v := range b.Values { |
| 2506 | if v.Op != ssa.OpFwdRef { |
| 2507 | continue |
| 2508 | } |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 2509 | name := v.Aux.(*Node) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2510 | v.Op = ssa.OpCopy |
| 2511 | v.Aux = nil |
| 2512 | v.SetArgs1(s.lookupVarIncoming(b, v.Type, name)) |
| 2513 | } |
| 2514 | } |
| 2515 | } |
| 2516 | |
| 2517 | // lookupVarIncoming finds the variable's value at the start of block b. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 2518 | func (s *state) lookupVarIncoming(b *ssa.Block, t ssa.Type, name *Node) *ssa.Value { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2519 | // TODO(khr): have lookupVarIncoming overwrite the fwdRef or copy it |
| 2520 | // will be used in, instead of having the result used in a copy value. |
| 2521 | if b == s.f.Entry { |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 2522 | if name == &memvar { |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 2523 | return s.startmem |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2524 | } |
| 2525 | // variable is live at the entry block. Load it. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 2526 | addr := s.decladdrs[name] |
| 2527 | if addr == nil { |
| 2528 | // TODO: closure args reach here. |
Josh Bleecher Snyder | 5844603 | 2015-08-23 20:29:43 -0700 | [diff] [blame] | 2529 | s.Unimplementedf("unhandled closure arg") |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 2530 | } |
| 2531 | if _, ok := addr.Aux.(*ssa.ArgSymbol); !ok { |
| 2532 | s.Fatalf("variable live at start of function %s is not an argument %s", b.Func.Name, name) |
| 2533 | } |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 2534 | return s.entryNewValue2(ssa.OpLoad, t, addr, s.startmem) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2535 | } |
| 2536 | var vals []*ssa.Value |
| 2537 | for _, p := range b.Preds { |
| 2538 | vals = append(vals, s.lookupVarOutgoing(p, t, name)) |
| 2539 | } |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 2540 | if len(vals) == 0 { |
Josh Bleecher Snyder | 61aa095 | 2015-07-20 15:39:14 -0700 | [diff] [blame] | 2541 | // This block is dead; we have no predecessors and we're not the entry block. |
| 2542 | // It doesn't matter what we use here as long as it is well-formed, |
| 2543 | // so use the default/zero value. |
| 2544 | if name == &memvar { |
| 2545 | return s.startmem |
| 2546 | } |
| 2547 | return s.zeroVal(name.Type) |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 2548 | } |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2549 | v0 := vals[0] |
| 2550 | for i := 1; i < len(vals); i++ { |
| 2551 | if vals[i] != v0 { |
| 2552 | // need a phi value |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 2553 | v := b.NewValue0(s.peekLine(), ssa.OpPhi, t) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2554 | v.AddArgs(vals...) |
| 2555 | return v |
| 2556 | } |
| 2557 | } |
| 2558 | return v0 |
| 2559 | } |
| 2560 | |
| 2561 | // lookupVarOutgoing finds the variable's value at the end of block b. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 2562 | func (s *state) lookupVarOutgoing(b *ssa.Block, t ssa.Type, name *Node) *ssa.Value { |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2563 | m := s.defvars[b.ID] |
| 2564 | if v, ok := m[name]; ok { |
| 2565 | return v |
| 2566 | } |
| 2567 | // The variable is not defined by b and we haven't |
| 2568 | // looked it up yet. Generate v, a copy value which |
| 2569 | // will be the outgoing value of the variable. Then |
| 2570 | // look up w, the incoming value of the variable. |
| 2571 | // Make v = copy(w). We need the extra copy to |
| 2572 | // prevent infinite recursion when looking up the |
| 2573 | // incoming value of the variable. |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 2574 | v := b.NewValue0(s.peekLine(), ssa.OpCopy, t) |
Keith Randall | d2fd43a | 2015-04-15 15:51:25 -0700 | [diff] [blame] | 2575 | m[name] = v |
| 2576 | v.AddArg(s.lookupVarIncoming(b, t, name)) |
| 2577 | return v |
| 2578 | } |
| 2579 | |
| 2580 | // TODO: the above mutually recursive functions can lead to very deep stacks. Fix that. |
| 2581 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2582 | // an unresolved branch |
| 2583 | type branch struct { |
| 2584 | p *obj.Prog // branch instruction |
| 2585 | b *ssa.Block // target |
| 2586 | } |
| 2587 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 2588 | type genState struct { |
| 2589 | // branches remembers all the branch instructions we've seen |
| 2590 | // and where they would like to go. |
| 2591 | branches []branch |
| 2592 | |
| 2593 | // bstart remembers where each block starts (indexed by block ID) |
| 2594 | bstart []*obj.Prog |
| 2595 | |
| 2596 | // deferBranches remembers all the defer branches we've seen. |
| 2597 | deferBranches []*obj.Prog |
| 2598 | |
| 2599 | // deferTarget remembers the (last) deferreturn call site. |
| 2600 | deferTarget *obj.Prog |
| 2601 | } |
| 2602 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2603 | // genssa appends entries to ptxt for each instruction in f. |
| 2604 | // gcargs and gclocals are filled in with pointer maps for the frame. |
| 2605 | func genssa(f *ssa.Func, ptxt *obj.Prog, gcargs, gclocals *Sym) { |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 2606 | var s genState |
| 2607 | |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 2608 | e := f.Config.Frontend().(*ssaExport) |
| 2609 | // We're about to emit a bunch of Progs. |
| 2610 | // Since the only way to get here is to explicitly request it, |
| 2611 | // just fail on unimplemented instead of trying to unwind our mess. |
| 2612 | e.mustImplement = true |
| 2613 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2614 | // Remember where each block starts. |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 2615 | s.bstart = make([]*obj.Prog, f.NumBlocks()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2616 | |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 2617 | var valueProgs map[*obj.Prog]*ssa.Value |
| 2618 | var blockProgs map[*obj.Prog]*ssa.Block |
| 2619 | const logProgs = true |
| 2620 | if logProgs { |
| 2621 | valueProgs = make(map[*obj.Prog]*ssa.Value, f.NumValues()) |
| 2622 | blockProgs = make(map[*obj.Prog]*ssa.Block, f.NumBlocks()) |
| 2623 | f.Logf("genssa %s\n", f.Name) |
| 2624 | blockProgs[Pc] = f.Blocks[0] |
| 2625 | } |
| 2626 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2627 | // Emit basic blocks |
| 2628 | for i, b := range f.Blocks { |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 2629 | s.bstart[b.ID] = Pc |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2630 | // Emit values in block |
| 2631 | for _, v := range b.Values { |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 2632 | x := Pc |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 2633 | s.genValue(v) |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 2634 | if logProgs { |
| 2635 | for ; x != Pc; x = x.Link { |
| 2636 | valueProgs[x] = v |
| 2637 | } |
| 2638 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2639 | } |
| 2640 | // Emit control flow instructions for block |
| 2641 | var next *ssa.Block |
| 2642 | if i < len(f.Blocks)-1 { |
| 2643 | next = f.Blocks[i+1] |
| 2644 | } |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 2645 | x := Pc |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 2646 | s.genBlock(b, next) |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 2647 | if logProgs { |
| 2648 | for ; x != Pc; x = x.Link { |
| 2649 | blockProgs[x] = b |
| 2650 | } |
| 2651 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2652 | } |
| 2653 | |
| 2654 | // Resolve branches |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 2655 | for _, br := range s.branches { |
| 2656 | br.p.To.Val = s.bstart[br.b.ID] |
| 2657 | } |
Keith Randall | ca9e450 | 2015-09-08 08:59:57 -0700 | [diff] [blame] | 2658 | if s.deferBranches != nil && s.deferTarget == nil { |
| 2659 | // This can happen when the function has a defer but |
| 2660 | // no return (because it has an infinite loop). |
| 2661 | s.deferReturn() |
| 2662 | Prog(obj.ARET) |
| 2663 | } |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 2664 | for _, p := range s.deferBranches { |
| 2665 | p.To.Val = s.deferTarget |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2666 | } |
| 2667 | |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 2668 | if logProgs { |
| 2669 | for p := ptxt; p != nil; p = p.Link { |
| 2670 | var s string |
| 2671 | if v, ok := valueProgs[p]; ok { |
| 2672 | s = v.String() |
| 2673 | } else if b, ok := blockProgs[p]; ok { |
| 2674 | s = b.String() |
| 2675 | } else { |
| 2676 | s = " " // most value and branch strings are 2-3 characters long |
| 2677 | } |
| 2678 | f.Logf("%s\t%s\n", s, p) |
| 2679 | } |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 2680 | if f.Config.HTML != nil { |
| 2681 | saved := ptxt.Ctxt.LineHist.PrintFilenameOnly |
| 2682 | ptxt.Ctxt.LineHist.PrintFilenameOnly = true |
| 2683 | var buf bytes.Buffer |
| 2684 | buf.WriteString("<code>") |
| 2685 | buf.WriteString("<dl class=\"ssa-gen\">") |
| 2686 | for p := ptxt; p != nil; p = p.Link { |
| 2687 | buf.WriteString("<dt class=\"ssa-prog-src\">") |
| 2688 | if v, ok := valueProgs[p]; ok { |
| 2689 | buf.WriteString(v.HTML()) |
| 2690 | } else if b, ok := blockProgs[p]; ok { |
| 2691 | buf.WriteString(b.HTML()) |
| 2692 | } |
| 2693 | buf.WriteString("</dt>") |
| 2694 | buf.WriteString("<dd class=\"ssa-prog\">") |
| 2695 | buf.WriteString(html.EscapeString(p.String())) |
| 2696 | buf.WriteString("</dd>") |
| 2697 | buf.WriteString("</li>") |
| 2698 | } |
| 2699 | buf.WriteString("</dl>") |
| 2700 | buf.WriteString("</code>") |
| 2701 | f.Config.HTML.WriteColumn("genssa", buf.String()) |
| 2702 | ptxt.Ctxt.LineHist.PrintFilenameOnly = saved |
| 2703 | } |
Josh Bleecher Snyder | b8efee0 | 2015-07-31 14:37:15 -0700 | [diff] [blame] | 2704 | } |
| 2705 | |
Josh Bleecher Snyder | 6b41665 | 2015-07-28 10:56:39 -0700 | [diff] [blame] | 2706 | // Emit static data |
| 2707 | if f.StaticData != nil { |
| 2708 | for _, n := range f.StaticData.([]*Node) { |
| 2709 | if !gen_as_init(n, false) { |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 2710 | Fatalf("non-static data marked as static: %v\n\n", n, f) |
Josh Bleecher Snyder | 6b41665 | 2015-07-28 10:56:39 -0700 | [diff] [blame] | 2711 | } |
| 2712 | } |
| 2713 | } |
| 2714 | |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 2715 | // Allocate stack frame |
| 2716 | allocauto(ptxt) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2717 | |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 2718 | // Generate gc bitmaps. |
| 2719 | liveness(Curfn, ptxt, gcargs, gclocals) |
| 2720 | gcsymdup(gcargs) |
| 2721 | gcsymdup(gclocals) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2722 | |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 2723 | // Add frame prologue. Zero ambiguously live variables. |
| 2724 | Thearch.Defframe(ptxt) |
| 2725 | if Debug['f'] != 0 { |
| 2726 | frame(0) |
| 2727 | } |
| 2728 | |
| 2729 | // Remove leftover instrumentation from the instruction stream. |
| 2730 | removevardef(ptxt) |
Josh Bleecher Snyder | 35fb514 | 2015-08-10 12:15:52 -0700 | [diff] [blame] | 2731 | |
| 2732 | f.Config.HTML.Close() |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2733 | } |
| 2734 | |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 2735 | // opregreg emits instructions for |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 2736 | // dest := dest(To) op src(From) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 2737 | // and also returns the created obj.Prog so it |
| 2738 | // may be further adjusted (offset, scale, etc). |
| 2739 | func opregreg(op int, dest, src int16) *obj.Prog { |
| 2740 | p := Prog(op) |
| 2741 | p.From.Type = obj.TYPE_REG |
| 2742 | p.To.Type = obj.TYPE_REG |
| 2743 | p.To.Reg = dest |
| 2744 | p.From.Reg = src |
| 2745 | return p |
| 2746 | } |
| 2747 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 2748 | func (s *genState) genValue(v *ssa.Value) { |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 2749 | lineno = v.Line |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2750 | switch v.Op { |
Keith Randall | 0dca735 | 2015-06-06 16:03:33 -0700 | [diff] [blame] | 2751 | case ssa.OpAMD64ADDQ: |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2752 | // TODO: use addq instead of leaq if target is in the right register. |
| 2753 | p := Prog(x86.ALEAQ) |
| 2754 | p.From.Type = obj.TYPE_MEM |
| 2755 | p.From.Reg = regnum(v.Args[0]) |
| 2756 | p.From.Scale = 1 |
| 2757 | p.From.Index = regnum(v.Args[1]) |
| 2758 | p.To.Type = obj.TYPE_REG |
| 2759 | p.To.Reg = regnum(v) |
Michael Matloob | 73054f5 | 2015-06-14 11:38:46 -0700 | [diff] [blame] | 2760 | case ssa.OpAMD64ADDL: |
| 2761 | p := Prog(x86.ALEAL) |
| 2762 | p.From.Type = obj.TYPE_MEM |
| 2763 | p.From.Reg = regnum(v.Args[0]) |
| 2764 | p.From.Scale = 1 |
| 2765 | p.From.Index = regnum(v.Args[1]) |
| 2766 | p.To.Type = obj.TYPE_REG |
| 2767 | p.To.Reg = regnum(v) |
| 2768 | case ssa.OpAMD64ADDW: |
| 2769 | p := Prog(x86.ALEAW) |
| 2770 | p.From.Type = obj.TYPE_MEM |
| 2771 | p.From.Reg = regnum(v.Args[0]) |
| 2772 | p.From.Scale = 1 |
| 2773 | p.From.Index = regnum(v.Args[1]) |
| 2774 | p.To.Type = obj.TYPE_REG |
| 2775 | p.To.Reg = regnum(v) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2776 | // 2-address opcode arithmetic, symmetric |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 2777 | case ssa.OpAMD64ADDB, ssa.OpAMD64ADDSS, ssa.OpAMD64ADDSD, |
Alexandru Moșoi | edff881 | 2015-07-28 14:58:49 +0200 | [diff] [blame] | 2778 | ssa.OpAMD64ANDQ, ssa.OpAMD64ANDL, ssa.OpAMD64ANDW, ssa.OpAMD64ANDB, |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2779 | ssa.OpAMD64ORQ, ssa.OpAMD64ORL, ssa.OpAMD64ORW, ssa.OpAMD64ORB, |
| 2780 | ssa.OpAMD64XORQ, ssa.OpAMD64XORL, ssa.OpAMD64XORW, ssa.OpAMD64XORB, |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 2781 | ssa.OpAMD64MULQ, ssa.OpAMD64MULL, ssa.OpAMD64MULW, ssa.OpAMD64MULB, |
David Chase | 3a9d0ac | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 2782 | ssa.OpAMD64MULSS, ssa.OpAMD64MULSD, ssa.OpAMD64PXOR: |
Michael Matloob | 73054f5 | 2015-06-14 11:38:46 -0700 | [diff] [blame] | 2783 | r := regnum(v) |
| 2784 | x := regnum(v.Args[0]) |
| 2785 | y := regnum(v.Args[1]) |
| 2786 | if x != r && y != r { |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 2787 | opregreg(regMoveByTypeAMD64(v.Type), r, x) |
Michael Matloob | 73054f5 | 2015-06-14 11:38:46 -0700 | [diff] [blame] | 2788 | x = r |
| 2789 | } |
| 2790 | p := Prog(v.Op.Asm()) |
| 2791 | p.From.Type = obj.TYPE_REG |
| 2792 | p.To.Type = obj.TYPE_REG |
| 2793 | p.To.Reg = r |
| 2794 | if x == r { |
| 2795 | p.From.Reg = y |
| 2796 | } else { |
| 2797 | p.From.Reg = x |
| 2798 | } |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2799 | // 2-address opcode arithmetic, not symmetric |
| 2800 | case ssa.OpAMD64SUBQ, ssa.OpAMD64SUBL, ssa.OpAMD64SUBW, ssa.OpAMD64SUBB: |
Keith Randall | be1eb57 | 2015-07-22 13:46:15 -0700 | [diff] [blame] | 2801 | r := regnum(v) |
| 2802 | x := regnum(v.Args[0]) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2803 | y := regnum(v.Args[1]) |
| 2804 | var neg bool |
| 2805 | if y == r { |
| 2806 | // compute -(y-x) instead |
| 2807 | x, y = y, x |
| 2808 | neg = true |
Keith Randall | be1eb57 | 2015-07-22 13:46:15 -0700 | [diff] [blame] | 2809 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2810 | if x != r { |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 2811 | opregreg(regMoveByTypeAMD64(v.Type), r, x) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 2812 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 2813 | opregreg(v.Op.Asm(), r, y) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2814 | |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2815 | if neg { |
| 2816 | p := Prog(x86.ANEGQ) // TODO: use correct size? This is mostly a hack until regalloc does 2-address correctly |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2817 | p.To.Type = obj.TYPE_REG |
| 2818 | p.To.Reg = r |
| 2819 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 2820 | case ssa.OpAMD64SUBSS, ssa.OpAMD64SUBSD, ssa.OpAMD64DIVSS, ssa.OpAMD64DIVSD: |
| 2821 | r := regnum(v) |
| 2822 | x := regnum(v.Args[0]) |
| 2823 | y := regnum(v.Args[1]) |
| 2824 | if y == r && x != r { |
| 2825 | // r/y := x op r/y, need to preserve x and rewrite to |
| 2826 | // r/y := r/y op x15 |
| 2827 | x15 := int16(x86.REG_X15) |
| 2828 | // register move y to x15 |
| 2829 | // register move x to y |
| 2830 | // rename y with x15 |
| 2831 | opregreg(regMoveByTypeAMD64(v.Type), x15, y) |
| 2832 | opregreg(regMoveByTypeAMD64(v.Type), r, x) |
| 2833 | y = x15 |
| 2834 | } else if x != r { |
| 2835 | opregreg(regMoveByTypeAMD64(v.Type), r, x) |
| 2836 | } |
| 2837 | opregreg(v.Op.Asm(), r, y) |
| 2838 | |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2839 | case ssa.OpAMD64DIVQ, ssa.OpAMD64DIVL, ssa.OpAMD64DIVW, |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 2840 | ssa.OpAMD64DIVQU, ssa.OpAMD64DIVLU, ssa.OpAMD64DIVWU, |
| 2841 | ssa.OpAMD64MODQ, ssa.OpAMD64MODL, ssa.OpAMD64MODW, |
| 2842 | ssa.OpAMD64MODQU, ssa.OpAMD64MODLU, ssa.OpAMD64MODWU: |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2843 | |
| 2844 | // Arg[0] is already in AX as it's the only register we allow |
| 2845 | // and AX is the only output |
| 2846 | x := regnum(v.Args[1]) |
| 2847 | |
| 2848 | // CPU faults upon signed overflow, which occurs when most |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 2849 | // negative int is divided by -1. |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2850 | var j *obj.Prog |
| 2851 | if v.Op == ssa.OpAMD64DIVQ || v.Op == ssa.OpAMD64DIVL || |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 2852 | v.Op == ssa.OpAMD64DIVW || v.Op == ssa.OpAMD64MODQ || |
| 2853 | v.Op == ssa.OpAMD64MODL || v.Op == ssa.OpAMD64MODW { |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2854 | |
| 2855 | var c *obj.Prog |
| 2856 | switch v.Op { |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 2857 | case ssa.OpAMD64DIVQ, ssa.OpAMD64MODQ: |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2858 | c = Prog(x86.ACMPQ) |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 2859 | j = Prog(x86.AJEQ) |
| 2860 | // go ahead and sign extend to save doing it later |
| 2861 | Prog(x86.ACQO) |
| 2862 | |
| 2863 | case ssa.OpAMD64DIVL, ssa.OpAMD64MODL: |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2864 | c = Prog(x86.ACMPL) |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 2865 | j = Prog(x86.AJEQ) |
| 2866 | Prog(x86.ACDQ) |
| 2867 | |
| 2868 | case ssa.OpAMD64DIVW, ssa.OpAMD64MODW: |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2869 | c = Prog(x86.ACMPW) |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 2870 | j = Prog(x86.AJEQ) |
| 2871 | Prog(x86.ACWD) |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2872 | } |
| 2873 | c.From.Type = obj.TYPE_REG |
| 2874 | c.From.Reg = x |
| 2875 | c.To.Type = obj.TYPE_CONST |
| 2876 | c.To.Offset = -1 |
| 2877 | |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2878 | j.To.Type = obj.TYPE_BRANCH |
| 2879 | |
| 2880 | } |
| 2881 | |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 2882 | // for unsigned ints, we sign extend by setting DX = 0 |
| 2883 | // signed ints were sign extended above |
| 2884 | if v.Op == ssa.OpAMD64DIVQU || v.Op == ssa.OpAMD64MODQU || |
| 2885 | v.Op == ssa.OpAMD64DIVLU || v.Op == ssa.OpAMD64MODLU || |
| 2886 | v.Op == ssa.OpAMD64DIVWU || v.Op == ssa.OpAMD64MODWU { |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2887 | c := Prog(x86.AXORQ) |
| 2888 | c.From.Type = obj.TYPE_REG |
| 2889 | c.From.Reg = x86.REG_DX |
| 2890 | c.To.Type = obj.TYPE_REG |
| 2891 | c.To.Reg = x86.REG_DX |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2892 | } |
| 2893 | |
| 2894 | p := Prog(v.Op.Asm()) |
| 2895 | p.From.Type = obj.TYPE_REG |
| 2896 | p.From.Reg = x |
| 2897 | |
| 2898 | // signed division, rest of the check for -1 case |
| 2899 | if j != nil { |
| 2900 | j2 := Prog(obj.AJMP) |
| 2901 | j2.To.Type = obj.TYPE_BRANCH |
| 2902 | |
Todd Neal | 57d9e7e | 2015-08-18 19:51:44 -0500 | [diff] [blame] | 2903 | var n *obj.Prog |
| 2904 | if v.Op == ssa.OpAMD64DIVQ || v.Op == ssa.OpAMD64DIVL || |
| 2905 | v.Op == ssa.OpAMD64DIVW { |
| 2906 | // n * -1 = -n |
| 2907 | n = Prog(x86.ANEGQ) |
| 2908 | n.To.Type = obj.TYPE_REG |
| 2909 | n.To.Reg = x86.REG_AX |
| 2910 | } else { |
| 2911 | // n % -1 == 0 |
| 2912 | n = Prog(x86.AXORQ) |
| 2913 | n.From.Type = obj.TYPE_REG |
| 2914 | n.From.Reg = x86.REG_DX |
| 2915 | n.To.Type = obj.TYPE_REG |
| 2916 | n.To.Reg = x86.REG_DX |
| 2917 | } |
Todd Neal | a45f2d8 | 2015-08-17 17:46:06 -0500 | [diff] [blame] | 2918 | |
| 2919 | j.To.Val = n |
| 2920 | j2.To.Val = Pc |
| 2921 | } |
| 2922 | |
Todd Neal | 67cbd5b | 2015-08-18 19:14:47 -0500 | [diff] [blame] | 2923 | case ssa.OpAMD64HMULL, ssa.OpAMD64HMULW, ssa.OpAMD64HMULB, |
| 2924 | ssa.OpAMD64HMULLU, ssa.OpAMD64HMULWU, ssa.OpAMD64HMULBU: |
| 2925 | // the frontend rewrites constant division by 8/16/32 bit integers into |
| 2926 | // HMUL by a constant |
| 2927 | |
| 2928 | // Arg[0] is already in AX as it's the only register we allow |
| 2929 | // and DX is the only output we care about (the high bits) |
| 2930 | p := Prog(v.Op.Asm()) |
| 2931 | p.From.Type = obj.TYPE_REG |
| 2932 | p.From.Reg = regnum(v.Args[1]) |
| 2933 | |
| 2934 | // IMULB puts the high portion in AH instead of DL, |
| 2935 | // so move it to DL for consistency |
| 2936 | if v.Type.Size() == 1 { |
| 2937 | m := Prog(x86.AMOVB) |
| 2938 | m.From.Type = obj.TYPE_REG |
| 2939 | m.From.Reg = x86.REG_AH |
| 2940 | m.To.Type = obj.TYPE_REG |
| 2941 | m.To.Reg = x86.REG_DX |
| 2942 | } |
| 2943 | |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2944 | case ssa.OpAMD64SHLQ, ssa.OpAMD64SHLL, ssa.OpAMD64SHLW, ssa.OpAMD64SHLB, |
| 2945 | ssa.OpAMD64SHRQ, ssa.OpAMD64SHRL, ssa.OpAMD64SHRW, ssa.OpAMD64SHRB, |
| 2946 | ssa.OpAMD64SARQ, ssa.OpAMD64SARL, ssa.OpAMD64SARW, ssa.OpAMD64SARB: |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 2947 | x := regnum(v.Args[0]) |
| 2948 | r := regnum(v) |
| 2949 | if x != r { |
| 2950 | if r == x86.REG_CX { |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 2951 | v.Fatalf("can't implement %s, target and shift both in CX", v.LongString()) |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 2952 | } |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2953 | p := Prog(regMoveAMD64(v.Type.Size())) |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 2954 | p.From.Type = obj.TYPE_REG |
| 2955 | p.From.Reg = x |
| 2956 | p.To.Type = obj.TYPE_REG |
| 2957 | p.To.Reg = r |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 2958 | } |
Michael Matloob | 703ef06 | 2015-06-16 11:11:16 -0700 | [diff] [blame] | 2959 | p := Prog(v.Op.Asm()) |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 2960 | p.From.Type = obj.TYPE_REG |
| 2961 | p.From.Reg = regnum(v.Args[1]) // should be CX |
| 2962 | p.To.Type = obj.TYPE_REG |
| 2963 | p.To.Reg = r |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2964 | case ssa.OpAMD64ADDQconst, ssa.OpAMD64ADDLconst, ssa.OpAMD64ADDWconst: |
| 2965 | // TODO: use addq instead of leaq if target is in the right register. |
| 2966 | var asm int |
| 2967 | switch v.Op { |
| 2968 | case ssa.OpAMD64ADDQconst: |
| 2969 | asm = x86.ALEAQ |
| 2970 | case ssa.OpAMD64ADDLconst: |
| 2971 | asm = x86.ALEAL |
| 2972 | case ssa.OpAMD64ADDWconst: |
| 2973 | asm = x86.ALEAW |
| 2974 | } |
| 2975 | p := Prog(asm) |
| 2976 | p.From.Type = obj.TYPE_MEM |
| 2977 | p.From.Reg = regnum(v.Args[0]) |
| 2978 | p.From.Offset = v.AuxInt |
| 2979 | p.To.Type = obj.TYPE_REG |
| 2980 | p.To.Reg = regnum(v) |
Alexandru Moșoi | 7a6de6d | 2015-08-14 13:23:11 +0200 | [diff] [blame] | 2981 | case ssa.OpAMD64MULQconst, ssa.OpAMD64MULLconst, ssa.OpAMD64MULWconst, ssa.OpAMD64MULBconst: |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 2982 | r := regnum(v) |
| 2983 | x := regnum(v.Args[0]) |
| 2984 | if r != x { |
| 2985 | p := Prog(regMoveAMD64(v.Type.Size())) |
| 2986 | p.From.Type = obj.TYPE_REG |
| 2987 | p.From.Reg = x |
| 2988 | p.To.Type = obj.TYPE_REG |
| 2989 | p.To.Reg = r |
| 2990 | } |
| 2991 | p := Prog(v.Op.Asm()) |
| 2992 | p.From.Type = obj.TYPE_CONST |
| 2993 | p.From.Offset = v.AuxInt |
| 2994 | p.To.Type = obj.TYPE_REG |
| 2995 | p.To.Reg = r |
| 2996 | // TODO: Teach doasm to compile the three-address multiply imul $c, r1, r2 |
| 2997 | // instead of using the MOVQ above. |
| 2998 | //p.From3 = new(obj.Addr) |
| 2999 | //p.From3.Type = obj.TYPE_REG |
| 3000 | //p.From3.Reg = regnum(v.Args[0]) |
| 3001 | case ssa.OpAMD64ADDBconst, |
| 3002 | ssa.OpAMD64ANDQconst, ssa.OpAMD64ANDLconst, ssa.OpAMD64ANDWconst, ssa.OpAMD64ANDBconst, |
| 3003 | ssa.OpAMD64ORQconst, ssa.OpAMD64ORLconst, ssa.OpAMD64ORWconst, ssa.OpAMD64ORBconst, |
| 3004 | ssa.OpAMD64XORQconst, ssa.OpAMD64XORLconst, ssa.OpAMD64XORWconst, ssa.OpAMD64XORBconst, |
| 3005 | ssa.OpAMD64SUBQconst, ssa.OpAMD64SUBLconst, ssa.OpAMD64SUBWconst, ssa.OpAMD64SUBBconst, |
| 3006 | ssa.OpAMD64SHLQconst, ssa.OpAMD64SHLLconst, ssa.OpAMD64SHLWconst, ssa.OpAMD64SHLBconst, |
| 3007 | ssa.OpAMD64SHRQconst, ssa.OpAMD64SHRLconst, ssa.OpAMD64SHRWconst, ssa.OpAMD64SHRBconst, |
David Chase | 40aba8c | 2015-08-05 22:11:14 -0400 | [diff] [blame] | 3008 | ssa.OpAMD64SARQconst, ssa.OpAMD64SARLconst, ssa.OpAMD64SARWconst, ssa.OpAMD64SARBconst, |
| 3009 | ssa.OpAMD64ROLQconst, ssa.OpAMD64ROLLconst, ssa.OpAMD64ROLWconst, ssa.OpAMD64ROLBconst: |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3010 | // This code compensates for the fact that the register allocator |
| 3011 | // doesn't understand 2-address instructions yet. TODO: fix that. |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3012 | x := regnum(v.Args[0]) |
| 3013 | r := regnum(v) |
| 3014 | if x != r { |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3015 | p := Prog(regMoveAMD64(v.Type.Size())) |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3016 | p.From.Type = obj.TYPE_REG |
| 3017 | p.From.Reg = x |
| 3018 | p.To.Type = obj.TYPE_REG |
| 3019 | p.To.Reg = r |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3020 | } |
Michael Matloob | 703ef06 | 2015-06-16 11:11:16 -0700 | [diff] [blame] | 3021 | p := Prog(v.Op.Asm()) |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3022 | p.From.Type = obj.TYPE_CONST |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 3023 | p.From.Offset = v.AuxInt |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3024 | p.To.Type = obj.TYPE_REG |
Keith Randall | dbd83c4 | 2015-06-28 06:08:50 -0700 | [diff] [blame] | 3025 | p.To.Reg = r |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 3026 | case ssa.OpAMD64SBBQcarrymask, ssa.OpAMD64SBBLcarrymask: |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 3027 | r := regnum(v) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3028 | p := Prog(v.Op.Asm()) |
Keith Randall | 6f18847 | 2015-06-10 10:39:57 -0700 | [diff] [blame] | 3029 | p.From.Type = obj.TYPE_REG |
| 3030 | p.From.Reg = r |
| 3031 | p.To.Type = obj.TYPE_REG |
| 3032 | p.To.Reg = r |
Todd Neal | d90e048 | 2015-07-23 20:01:40 -0500 | [diff] [blame] | 3033 | case ssa.OpAMD64LEAQ1, ssa.OpAMD64LEAQ2, ssa.OpAMD64LEAQ4, ssa.OpAMD64LEAQ8: |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3034 | p := Prog(x86.ALEAQ) |
| 3035 | p.From.Type = obj.TYPE_MEM |
| 3036 | p.From.Reg = regnum(v.Args[0]) |
Todd Neal | d90e048 | 2015-07-23 20:01:40 -0500 | [diff] [blame] | 3037 | switch v.Op { |
| 3038 | case ssa.OpAMD64LEAQ1: |
| 3039 | p.From.Scale = 1 |
| 3040 | case ssa.OpAMD64LEAQ2: |
| 3041 | p.From.Scale = 2 |
| 3042 | case ssa.OpAMD64LEAQ4: |
| 3043 | p.From.Scale = 4 |
| 3044 | case ssa.OpAMD64LEAQ8: |
| 3045 | p.From.Scale = 8 |
| 3046 | } |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3047 | p.From.Index = regnum(v.Args[1]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3048 | addAux(&p.From, v) |
| 3049 | p.To.Type = obj.TYPE_REG |
| 3050 | p.To.Reg = regnum(v) |
| 3051 | case ssa.OpAMD64LEAQ: |
| 3052 | p := Prog(x86.ALEAQ) |
| 3053 | p.From.Type = obj.TYPE_MEM |
| 3054 | p.From.Reg = regnum(v.Args[0]) |
| 3055 | addAux(&p.From, v) |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3056 | p.To.Type = obj.TYPE_REG |
| 3057 | p.To.Reg = regnum(v) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3058 | case ssa.OpAMD64CMPQ, ssa.OpAMD64CMPL, ssa.OpAMD64CMPW, ssa.OpAMD64CMPB, |
| 3059 | ssa.OpAMD64TESTQ, ssa.OpAMD64TESTL, ssa.OpAMD64TESTW, ssa.OpAMD64TESTB: |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3060 | opregreg(v.Op.Asm(), regnum(v.Args[1]), regnum(v.Args[0])) |
| 3061 | case ssa.OpAMD64UCOMISS, ssa.OpAMD64UCOMISD: |
| 3062 | // Go assembler has swapped operands for UCOMISx relative to CMP, |
| 3063 | // must account for that right here. |
| 3064 | opregreg(v.Op.Asm(), regnum(v.Args[0]), regnum(v.Args[1])) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3065 | case ssa.OpAMD64CMPQconst, ssa.OpAMD64CMPLconst, ssa.OpAMD64CMPWconst, ssa.OpAMD64CMPBconst, |
| 3066 | ssa.OpAMD64TESTQconst, ssa.OpAMD64TESTLconst, ssa.OpAMD64TESTWconst, ssa.OpAMD64TESTBconst: |
| 3067 | p := Prog(v.Op.Asm()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3068 | p.From.Type = obj.TYPE_REG |
| 3069 | p.From.Reg = regnum(v.Args[0]) |
| 3070 | p.To.Type = obj.TYPE_CONST |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 3071 | p.To.Offset = v.AuxInt |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 3072 | case ssa.OpAMD64MOVBconst, ssa.OpAMD64MOVWconst, ssa.OpAMD64MOVLconst, ssa.OpAMD64MOVQconst: |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3073 | x := regnum(v) |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 3074 | p := Prog(v.Op.Asm()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3075 | p.From.Type = obj.TYPE_CONST |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 3076 | var i int64 |
| 3077 | switch v.Op { |
| 3078 | case ssa.OpAMD64MOVBconst: |
| 3079 | i = int64(int8(v.AuxInt)) |
| 3080 | case ssa.OpAMD64MOVWconst: |
| 3081 | i = int64(int16(v.AuxInt)) |
| 3082 | case ssa.OpAMD64MOVLconst: |
| 3083 | i = int64(int32(v.AuxInt)) |
| 3084 | case ssa.OpAMD64MOVQconst: |
| 3085 | i = v.AuxInt |
| 3086 | } |
| 3087 | p.From.Offset = i |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3088 | p.To.Type = obj.TYPE_REG |
| 3089 | p.To.Reg = x |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3090 | case ssa.OpAMD64MOVSSconst, ssa.OpAMD64MOVSDconst: |
| 3091 | x := regnum(v) |
| 3092 | p := Prog(v.Op.Asm()) |
| 3093 | p.From.Type = obj.TYPE_FCONST |
Todd Neal | 19447a6 | 2015-09-04 06:33:56 -0500 | [diff] [blame] | 3094 | p.From.Val = math.Float64frombits(uint64(v.AuxInt)) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3095 | p.To.Type = obj.TYPE_REG |
| 3096 | p.To.Reg = x |
| 3097 | case ssa.OpAMD64MOVQload, ssa.OpAMD64MOVSSload, ssa.OpAMD64MOVSDload, ssa.OpAMD64MOVLload, ssa.OpAMD64MOVWload, ssa.OpAMD64MOVBload, ssa.OpAMD64MOVBQSXload, ssa.OpAMD64MOVBQZXload: |
Michael Matloob | 703ef06 | 2015-06-16 11:11:16 -0700 | [diff] [blame] | 3098 | p := Prog(v.Op.Asm()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3099 | p.From.Type = obj.TYPE_MEM |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3100 | p.From.Reg = regnum(v.Args[0]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3101 | addAux(&p.From, v) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3102 | p.To.Type = obj.TYPE_REG |
| 3103 | p.To.Reg = regnum(v) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3104 | case ssa.OpAMD64MOVQloadidx8, ssa.OpAMD64MOVSDloadidx8: |
| 3105 | p := Prog(v.Op.Asm()) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3106 | p.From.Type = obj.TYPE_MEM |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3107 | p.From.Reg = regnum(v.Args[0]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3108 | addAux(&p.From, v) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3109 | p.From.Scale = 8 |
| 3110 | p.From.Index = regnum(v.Args[1]) |
| 3111 | p.To.Type = obj.TYPE_REG |
| 3112 | p.To.Reg = regnum(v) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3113 | case ssa.OpAMD64MOVSSloadidx4: |
| 3114 | p := Prog(v.Op.Asm()) |
| 3115 | p.From.Type = obj.TYPE_MEM |
| 3116 | p.From.Reg = regnum(v.Args[0]) |
| 3117 | addAux(&p.From, v) |
| 3118 | p.From.Scale = 4 |
| 3119 | p.From.Index = regnum(v.Args[1]) |
| 3120 | p.To.Type = obj.TYPE_REG |
| 3121 | p.To.Reg = regnum(v) |
| 3122 | case ssa.OpAMD64MOVQstore, ssa.OpAMD64MOVSSstore, ssa.OpAMD64MOVSDstore, ssa.OpAMD64MOVLstore, ssa.OpAMD64MOVWstore, ssa.OpAMD64MOVBstore: |
Michael Matloob | 73054f5 | 2015-06-14 11:38:46 -0700 | [diff] [blame] | 3123 | p := Prog(v.Op.Asm()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3124 | p.From.Type = obj.TYPE_REG |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3125 | p.From.Reg = regnum(v.Args[1]) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3126 | p.To.Type = obj.TYPE_MEM |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3127 | p.To.Reg = regnum(v.Args[0]) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3128 | addAux(&p.To, v) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3129 | case ssa.OpAMD64MOVQstoreidx8, ssa.OpAMD64MOVSDstoreidx8: |
| 3130 | p := Prog(v.Op.Asm()) |
Josh Bleecher Snyder | 3e3d162 | 2015-07-27 16:36:36 -0700 | [diff] [blame] | 3131 | p.From.Type = obj.TYPE_REG |
| 3132 | p.From.Reg = regnum(v.Args[2]) |
| 3133 | p.To.Type = obj.TYPE_MEM |
| 3134 | p.To.Reg = regnum(v.Args[0]) |
| 3135 | p.To.Scale = 8 |
| 3136 | p.To.Index = regnum(v.Args[1]) |
| 3137 | addAux(&p.To, v) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3138 | case ssa.OpAMD64MOVSSstoreidx4: |
| 3139 | p := Prog(v.Op.Asm()) |
| 3140 | p.From.Type = obj.TYPE_REG |
| 3141 | p.From.Reg = regnum(v.Args[2]) |
| 3142 | p.To.Type = obj.TYPE_MEM |
| 3143 | p.To.Reg = regnum(v.Args[0]) |
| 3144 | p.To.Scale = 4 |
| 3145 | p.To.Index = regnum(v.Args[1]) |
| 3146 | addAux(&p.To, v) |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3147 | case ssa.OpAMD64MOVLQSX, ssa.OpAMD64MOVWQSX, ssa.OpAMD64MOVBQSX, ssa.OpAMD64MOVLQZX, ssa.OpAMD64MOVWQZX, ssa.OpAMD64MOVBQZX, |
| 3148 | ssa.OpAMD64CVTSL2SS, ssa.OpAMD64CVTSL2SD, ssa.OpAMD64CVTSQ2SS, ssa.OpAMD64CVTSQ2SD, |
Todd Neal | 634b50c | 2015-09-01 19:05:44 -0500 | [diff] [blame] | 3149 | ssa.OpAMD64CVTTSS2SL, ssa.OpAMD64CVTTSD2SL, ssa.OpAMD64CVTTSS2SQ, ssa.OpAMD64CVTTSD2SQ, |
David Chase | 4282588 | 2015-08-20 15:14:20 -0400 | [diff] [blame] | 3150 | ssa.OpAMD64CVTSS2SD, ssa.OpAMD64CVTSD2SS: |
| 3151 | opregreg(v.Op.Asm(), regnum(v), regnum(v.Args[0])) |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 3152 | case ssa.OpAMD64MOVXzero: |
| 3153 | nb := v.AuxInt |
| 3154 | offset := int64(0) |
| 3155 | reg := regnum(v.Args[0]) |
| 3156 | for nb >= 8 { |
| 3157 | nb, offset = movZero(x86.AMOVQ, 8, nb, offset, reg) |
| 3158 | } |
| 3159 | for nb >= 4 { |
| 3160 | nb, offset = movZero(x86.AMOVL, 4, nb, offset, reg) |
| 3161 | } |
| 3162 | for nb >= 2 { |
| 3163 | nb, offset = movZero(x86.AMOVW, 2, nb, offset, reg) |
| 3164 | } |
| 3165 | for nb >= 1 { |
| 3166 | nb, offset = movZero(x86.AMOVB, 1, nb, offset, reg) |
| 3167 | } |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 3168 | case ssa.OpCopy: // TODO: lower to MOVQ earlier? |
| 3169 | if v.Type.IsMemory() { |
| 3170 | return |
| 3171 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3172 | x := regnum(v.Args[0]) |
| 3173 | y := regnum(v) |
| 3174 | if x != y { |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3175 | opregreg(regMoveByTypeAMD64(v.Type), y, x) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3176 | } |
Josh Bleecher Snyder | 0bb2a50 | 2015-07-24 14:51:51 -0700 | [diff] [blame] | 3177 | case ssa.OpLoadReg: |
Josh Bleecher Snyder | 26f135d | 2015-07-20 15:22:34 -0700 | [diff] [blame] | 3178 | if v.Type.IsFlags() { |
| 3179 | v.Unimplementedf("load flags not implemented: %v", v.LongString()) |
| 3180 | return |
| 3181 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3182 | p := Prog(movSizeByType(v.Type)) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3183 | n := autoVar(v.Args[0]) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3184 | p.From.Type = obj.TYPE_MEM |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3185 | p.From.Name = obj.NAME_AUTO |
| 3186 | p.From.Node = n |
| 3187 | p.From.Sym = Linksym(n.Sym) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3188 | p.To.Type = obj.TYPE_REG |
| 3189 | p.To.Reg = regnum(v) |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3190 | |
Josh Bleecher Snyder | 0bb2a50 | 2015-07-24 14:51:51 -0700 | [diff] [blame] | 3191 | case ssa.OpStoreReg: |
Josh Bleecher Snyder | 26f135d | 2015-07-20 15:22:34 -0700 | [diff] [blame] | 3192 | if v.Type.IsFlags() { |
| 3193 | v.Unimplementedf("store flags not implemented: %v", v.LongString()) |
| 3194 | return |
| 3195 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3196 | p := Prog(movSizeByType(v.Type)) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3197 | p.From.Type = obj.TYPE_REG |
| 3198 | p.From.Reg = regnum(v.Args[0]) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3199 | n := autoVar(v) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3200 | p.To.Type = obj.TYPE_MEM |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3201 | p.To.Name = obj.NAME_AUTO |
| 3202 | p.To.Node = n |
| 3203 | p.To.Sym = Linksym(n.Sym) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3204 | case ssa.OpPhi: |
Keith Randall | 0b46b42 | 2015-08-11 12:51:33 -0700 | [diff] [blame] | 3205 | // just check to make sure regalloc and stackalloc did it right |
| 3206 | if v.Type.IsMemory() { |
| 3207 | return |
| 3208 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3209 | f := v.Block.Func |
| 3210 | loc := f.RegAlloc[v.ID] |
| 3211 | for _, a := range v.Args { |
Josh Bleecher Snyder | 5584523 | 2015-08-05 16:43:49 -0700 | [diff] [blame] | 3212 | if aloc := f.RegAlloc[a.ID]; aloc != loc { // TODO: .Equal() instead? |
| 3213 | v.Fatalf("phi arg at different location than phi: %v @ %v, but arg %v @ %v\n%s\n", v, loc, a, aloc, v.Block.Func) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3214 | } |
| 3215 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3216 | case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64, ssa.OpConstString, ssa.OpConstNil, ssa.OpConstBool, |
| 3217 | ssa.OpConst32F, ssa.OpConst64F: |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3218 | if v.Block.Func.RegAlloc[v.ID] != nil { |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 3219 | v.Fatalf("const value %v shouldn't have a location", v) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3220 | } |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3221 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3222 | case ssa.OpArg: |
| 3223 | // memory arg needs no code |
Keith Randall | 8f22b52 | 2015-06-11 21:29:25 -0700 | [diff] [blame] | 3224 | // TODO: check that only mem arg goes here. |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 3225 | case ssa.OpAMD64LoweredPanicNilCheck: |
| 3226 | if Debug_checknil != 0 && v.Line > 1 { // v.Line==1 in generated wrappers |
| 3227 | Warnl(int(v.Line), "generated nil check") |
| 3228 | } |
| 3229 | // Write to memory address 0. It doesn't matter what we write; use AX. |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 3230 | // Input 0 is the pointer we just checked, use it as the destination. |
| 3231 | r := regnum(v.Args[0]) |
Josh Bleecher Snyder | 463858e | 2015-08-11 09:47:45 -0700 | [diff] [blame] | 3232 | q := Prog(x86.AMOVL) |
| 3233 | q.From.Type = obj.TYPE_REG |
| 3234 | q.From.Reg = x86.REG_AX |
| 3235 | q.To.Type = obj.TYPE_MEM |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 3236 | q.To.Reg = r |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3237 | Prog(obj.AUNDEF) // tell plive.go that we never reach here |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 3238 | case ssa.OpAMD64LoweredPanicIndexCheck: |
| 3239 | p := Prog(obj.ACALL) |
| 3240 | p.To.Type = obj.TYPE_MEM |
| 3241 | p.To.Name = obj.NAME_EXTERN |
| 3242 | p.To.Sym = Linksym(Panicindex.Sym) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3243 | Prog(obj.AUNDEF) |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 3244 | case ssa.OpAMD64LoweredPanicSliceCheck: |
| 3245 | p := Prog(obj.ACALL) |
| 3246 | p.To.Type = obj.TYPE_MEM |
| 3247 | p.To.Name = obj.NAME_EXTERN |
| 3248 | p.To.Sym = Linksym(panicslice.Sym) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3249 | Prog(obj.AUNDEF) |
Josh Bleecher Snyder | 3d23afb | 2015-08-12 11:22:16 -0700 | [diff] [blame] | 3250 | case ssa.OpAMD64LoweredGetG: |
| 3251 | r := regnum(v) |
| 3252 | // See the comments in cmd/internal/obj/x86/obj6.go |
| 3253 | // near CanUse1InsnTLS for a detailed explanation of these instructions. |
| 3254 | if x86.CanUse1InsnTLS(Ctxt) { |
| 3255 | // MOVQ (TLS), r |
| 3256 | p := Prog(x86.AMOVQ) |
| 3257 | p.From.Type = obj.TYPE_MEM |
| 3258 | p.From.Reg = x86.REG_TLS |
| 3259 | p.To.Type = obj.TYPE_REG |
| 3260 | p.To.Reg = r |
| 3261 | } else { |
| 3262 | // MOVQ TLS, r |
| 3263 | // MOVQ (r)(TLS*1), r |
| 3264 | p := Prog(x86.AMOVQ) |
| 3265 | p.From.Type = obj.TYPE_REG |
| 3266 | p.From.Reg = x86.REG_TLS |
| 3267 | p.To.Type = obj.TYPE_REG |
| 3268 | p.To.Reg = r |
| 3269 | q := Prog(x86.AMOVQ) |
| 3270 | q.From.Type = obj.TYPE_MEM |
| 3271 | q.From.Reg = r |
| 3272 | q.From.Index = x86.REG_TLS |
| 3273 | q.From.Scale = 1 |
| 3274 | q.To.Type = obj.TYPE_REG |
| 3275 | q.To.Reg = r |
| 3276 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 3277 | case ssa.OpAMD64CALLstatic: |
Keith Randall | 247786c | 2015-05-28 10:47:24 -0700 | [diff] [blame] | 3278 | p := Prog(obj.ACALL) |
| 3279 | p.To.Type = obj.TYPE_MEM |
| 3280 | p.To.Name = obj.NAME_EXTERN |
| 3281 | p.To.Sym = Linksym(v.Aux.(*Sym)) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3282 | if Maxarg < v.AuxInt { |
| 3283 | Maxarg = v.AuxInt |
| 3284 | } |
Keith Randall | 290d8fc | 2015-06-10 15:03:06 -0700 | [diff] [blame] | 3285 | case ssa.OpAMD64CALLclosure: |
| 3286 | p := Prog(obj.ACALL) |
| 3287 | p.To.Type = obj.TYPE_REG |
| 3288 | p.To.Reg = regnum(v.Args[0]) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3289 | if Maxarg < v.AuxInt { |
| 3290 | Maxarg = v.AuxInt |
| 3291 | } |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3292 | case ssa.OpAMD64CALLdefer: |
| 3293 | p := Prog(obj.ACALL) |
| 3294 | p.To.Type = obj.TYPE_MEM |
| 3295 | p.To.Name = obj.NAME_EXTERN |
| 3296 | p.To.Sym = Linksym(Deferproc.Sym) |
| 3297 | if Maxarg < v.AuxInt { |
| 3298 | Maxarg = v.AuxInt |
| 3299 | } |
| 3300 | // defer returns in rax: |
| 3301 | // 0 if we should continue executing |
| 3302 | // 1 if we should jump to deferreturn call |
| 3303 | p = Prog(x86.ATESTL) |
| 3304 | p.From.Type = obj.TYPE_REG |
| 3305 | p.From.Reg = x86.REG_AX |
| 3306 | p.To.Type = obj.TYPE_REG |
| 3307 | p.To.Reg = x86.REG_AX |
| 3308 | p = Prog(x86.AJNE) |
| 3309 | p.To.Type = obj.TYPE_BRANCH |
| 3310 | s.deferBranches = append(s.deferBranches, p) |
| 3311 | case ssa.OpAMD64CALLgo: |
| 3312 | p := Prog(obj.ACALL) |
| 3313 | p.To.Type = obj.TYPE_MEM |
| 3314 | p.To.Name = obj.NAME_EXTERN |
| 3315 | p.To.Sym = Linksym(Newproc.Sym) |
| 3316 | if Maxarg < v.AuxInt { |
| 3317 | Maxarg = v.AuxInt |
| 3318 | } |
Keith Randall | 4b80315 | 2015-07-29 17:07:09 -0700 | [diff] [blame] | 3319 | case ssa.OpAMD64NEGQ, ssa.OpAMD64NEGL, ssa.OpAMD64NEGW, ssa.OpAMD64NEGB, |
| 3320 | ssa.OpAMD64NOTQ, ssa.OpAMD64NOTL, ssa.OpAMD64NOTW, ssa.OpAMD64NOTB: |
Josh Bleecher Snyder | 93c354b6 | 2015-07-30 17:15:16 -0700 | [diff] [blame] | 3321 | x := regnum(v.Args[0]) |
| 3322 | r := regnum(v) |
| 3323 | if x != r { |
| 3324 | p := Prog(regMoveAMD64(v.Type.Size())) |
| 3325 | p.From.Type = obj.TYPE_REG |
| 3326 | p.From.Reg = x |
| 3327 | p.To.Type = obj.TYPE_REG |
| 3328 | p.To.Reg = r |
| 3329 | } |
Alexandru Moșoi | 954d5ad | 2015-07-21 16:58:18 +0200 | [diff] [blame] | 3330 | p := Prog(v.Op.Asm()) |
| 3331 | p.To.Type = obj.TYPE_REG |
Josh Bleecher Snyder | 93c354b6 | 2015-07-30 17:15:16 -0700 | [diff] [blame] | 3332 | p.To.Reg = r |
Keith Randall | a329e21 | 2015-09-12 13:26:57 -0700 | [diff] [blame] | 3333 | case ssa.OpAMD64SQRTSD: |
| 3334 | p := Prog(v.Op.Asm()) |
| 3335 | p.From.Type = obj.TYPE_REG |
| 3336 | p.From.Reg = regnum(v.Args[0]) |
| 3337 | p.To.Type = obj.TYPE_REG |
| 3338 | p.To.Reg = regnum(v) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3339 | case ssa.OpSP, ssa.OpSB: |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3340 | // nothing to do |
Josh Bleecher Snyder | a794074 | 2015-07-20 15:21:49 -0700 | [diff] [blame] | 3341 | case ssa.OpAMD64SETEQ, ssa.OpAMD64SETNE, |
| 3342 | ssa.OpAMD64SETL, ssa.OpAMD64SETLE, |
| 3343 | ssa.OpAMD64SETG, ssa.OpAMD64SETGE, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3344 | ssa.OpAMD64SETGF, ssa.OpAMD64SETGEF, |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 3345 | ssa.OpAMD64SETB, ssa.OpAMD64SETBE, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3346 | ssa.OpAMD64SETORD, ssa.OpAMD64SETNAN, |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 3347 | ssa.OpAMD64SETA, ssa.OpAMD64SETAE: |
Josh Bleecher Snyder | a794074 | 2015-07-20 15:21:49 -0700 | [diff] [blame] | 3348 | p := Prog(v.Op.Asm()) |
| 3349 | p.To.Type = obj.TYPE_REG |
| 3350 | p.To.Reg = regnum(v) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3351 | |
| 3352 | case ssa.OpAMD64SETNEF: |
| 3353 | p := Prog(v.Op.Asm()) |
| 3354 | p.To.Type = obj.TYPE_REG |
| 3355 | p.To.Reg = regnum(v) |
| 3356 | q := Prog(x86.ASETPS) |
| 3357 | q.To.Type = obj.TYPE_REG |
| 3358 | q.To.Reg = x86.REG_AX |
| 3359 | // TODO AORQ copied from old code generator, why not AORB? |
| 3360 | opregreg(x86.AORQ, regnum(v), x86.REG_AX) |
| 3361 | |
| 3362 | case ssa.OpAMD64SETEQF: |
| 3363 | p := Prog(v.Op.Asm()) |
| 3364 | p.To.Type = obj.TYPE_REG |
| 3365 | p.To.Reg = regnum(v) |
| 3366 | q := Prog(x86.ASETPC) |
| 3367 | q.To.Type = obj.TYPE_REG |
| 3368 | q.To.Reg = x86.REG_AX |
| 3369 | // TODO AANDQ copied from old code generator, why not AANDB? |
| 3370 | opregreg(x86.AANDQ, regnum(v), x86.REG_AX) |
| 3371 | |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3372 | case ssa.OpAMD64InvertFlags: |
| 3373 | v.Fatalf("InvertFlags should never make it to codegen %v", v) |
| 3374 | case ssa.OpAMD64REPSTOSQ: |
Keith Randall | 0b46b42 | 2015-08-11 12:51:33 -0700 | [diff] [blame] | 3375 | p := Prog(x86.AXORL) // TODO: lift out zeroing into its own instruction? |
| 3376 | p.From.Type = obj.TYPE_REG |
| 3377 | p.From.Reg = x86.REG_AX |
| 3378 | p.To.Type = obj.TYPE_REG |
| 3379 | p.To.Reg = x86.REG_AX |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3380 | Prog(x86.AREP) |
| 3381 | Prog(x86.ASTOSQ) |
Keith Randall | 20550cb | 2015-07-28 16:04:50 -0700 | [diff] [blame] | 3382 | case ssa.OpAMD64REPMOVSB: |
| 3383 | Prog(x86.AREP) |
| 3384 | Prog(x86.AMOVSB) |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3385 | case ssa.OpVarDef: |
| 3386 | Gvardef(v.Aux.(*Node)) |
| 3387 | case ssa.OpVarKill: |
| 3388 | gvarkill(v.Aux.(*Node)) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3389 | default: |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 3390 | v.Unimplementedf("genValue not implemented: %s", v.LongString()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3391 | } |
| 3392 | } |
| 3393 | |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3394 | // movSizeByType returns the MOV instruction of the given type. |
| 3395 | func movSizeByType(t ssa.Type) (asm int) { |
| 3396 | // For x86, there's no difference between reg move opcodes |
| 3397 | // and memory move opcodes. |
| 3398 | asm = regMoveByTypeAMD64(t) |
| 3399 | return |
Josh Bleecher Snyder | 0bb2a50 | 2015-07-24 14:51:51 -0700 | [diff] [blame] | 3400 | } |
| 3401 | |
Daniel Morsing | 66b4781 | 2015-06-27 15:45:20 +0100 | [diff] [blame] | 3402 | // movZero generates a register indirect move with a 0 immediate and keeps track of bytes left and next offset |
| 3403 | func movZero(as int, width int64, nbytes int64, offset int64, regnum int16) (nleft int64, noff int64) { |
| 3404 | p := Prog(as) |
| 3405 | // TODO: use zero register on archs that support it. |
| 3406 | p.From.Type = obj.TYPE_CONST |
| 3407 | p.From.Offset = 0 |
| 3408 | p.To.Type = obj.TYPE_MEM |
| 3409 | p.To.Reg = regnum |
| 3410 | p.To.Offset = offset |
| 3411 | offset += width |
| 3412 | nleft = nbytes - width |
| 3413 | return nleft, offset |
| 3414 | } |
| 3415 | |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3416 | var blockJump = [...]struct { |
| 3417 | asm, invasm int |
| 3418 | }{ |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 3419 | ssa.BlockAMD64EQ: {x86.AJEQ, x86.AJNE}, |
| 3420 | ssa.BlockAMD64NE: {x86.AJNE, x86.AJEQ}, |
| 3421 | ssa.BlockAMD64LT: {x86.AJLT, x86.AJGE}, |
| 3422 | ssa.BlockAMD64GE: {x86.AJGE, x86.AJLT}, |
| 3423 | ssa.BlockAMD64LE: {x86.AJLE, x86.AJGT}, |
| 3424 | ssa.BlockAMD64GT: {x86.AJGT, x86.AJLE}, |
| 3425 | ssa.BlockAMD64ULT: {x86.AJCS, x86.AJCC}, |
| 3426 | ssa.BlockAMD64UGE: {x86.AJCC, x86.AJCS}, |
| 3427 | ssa.BlockAMD64UGT: {x86.AJHI, x86.AJLS}, |
| 3428 | ssa.BlockAMD64ULE: {x86.AJLS, x86.AJHI}, |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3429 | ssa.BlockAMD64ORD: {x86.AJPC, x86.AJPS}, |
| 3430 | ssa.BlockAMD64NAN: {x86.AJPS, x86.AJPC}, |
| 3431 | } |
| 3432 | |
| 3433 | type floatingEQNEJump struct { |
| 3434 | jump, index int |
| 3435 | } |
| 3436 | |
| 3437 | var eqfJumps = [2][2]floatingEQNEJump{ |
| 3438 | {{x86.AJNE, 1}, {x86.AJPS, 1}}, // next == b.Succs[0] |
| 3439 | {{x86.AJNE, 1}, {x86.AJPC, 0}}, // next == b.Succs[1] |
| 3440 | } |
| 3441 | var nefJumps = [2][2]floatingEQNEJump{ |
| 3442 | {{x86.AJNE, 0}, {x86.AJPC, 1}}, // next == b.Succs[0] |
| 3443 | {{x86.AJNE, 0}, {x86.AJPS, 0}}, // next == b.Succs[1] |
| 3444 | } |
| 3445 | |
| 3446 | func oneFPJump(b *ssa.Block, jumps *floatingEQNEJump, likely ssa.BranchPrediction, branches []branch) []branch { |
| 3447 | p := Prog(jumps.jump) |
| 3448 | p.To.Type = obj.TYPE_BRANCH |
| 3449 | to := jumps.index |
| 3450 | branches = append(branches, branch{p, b.Succs[to]}) |
| 3451 | if to == 1 { |
| 3452 | likely = -likely |
| 3453 | } |
| 3454 | // liblink reorders the instruction stream as it sees fit. |
| 3455 | // Pass along what we know so liblink can make use of it. |
| 3456 | // TODO: Once we've fully switched to SSA, |
| 3457 | // make liblink leave our output alone. |
| 3458 | switch likely { |
| 3459 | case ssa.BranchUnlikely: |
| 3460 | p.From.Type = obj.TYPE_CONST |
| 3461 | p.From.Offset = 0 |
| 3462 | case ssa.BranchLikely: |
| 3463 | p.From.Type = obj.TYPE_CONST |
| 3464 | p.From.Offset = 1 |
| 3465 | } |
| 3466 | return branches |
| 3467 | } |
| 3468 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3469 | func genFPJump(s *genState, b, next *ssa.Block, jumps *[2][2]floatingEQNEJump) { |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3470 | likely := b.Likely |
| 3471 | switch next { |
| 3472 | case b.Succs[0]: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3473 | s.branches = oneFPJump(b, &jumps[0][0], likely, s.branches) |
| 3474 | s.branches = oneFPJump(b, &jumps[0][1], likely, s.branches) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3475 | case b.Succs[1]: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3476 | s.branches = oneFPJump(b, &jumps[1][0], likely, s.branches) |
| 3477 | s.branches = oneFPJump(b, &jumps[1][1], likely, s.branches) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3478 | default: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3479 | s.branches = oneFPJump(b, &jumps[1][0], likely, s.branches) |
| 3480 | s.branches = oneFPJump(b, &jumps[1][1], likely, s.branches) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3481 | q := Prog(obj.AJMP) |
| 3482 | q.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3483 | s.branches = append(s.branches, branch{q, b.Succs[1]}) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3484 | } |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 3485 | } |
| 3486 | |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3487 | func (s *genState) genBlock(b, next *ssa.Block) { |
Michael Matloob | 81ccf50 | 2015-05-30 01:03:06 -0400 | [diff] [blame] | 3488 | lineno = b.Line |
Keith Randall | 8d23681 | 2015-08-18 15:25:40 -0700 | [diff] [blame] | 3489 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3490 | switch b.Kind { |
Keith Randall | f5c53e0 | 2015-09-09 18:03:41 -0700 | [diff] [blame] | 3491 | case ssa.BlockPlain, ssa.BlockCall: |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3492 | if b.Succs[0] != next { |
| 3493 | p := Prog(obj.AJMP) |
| 3494 | p.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3495 | s.branches = append(s.branches, branch{p, b.Succs[0]}) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3496 | } |
| 3497 | case ssa.BlockExit: |
Keith Randall | 10f38f5 | 2015-09-03 09:09:59 -0700 | [diff] [blame] | 3498 | case ssa.BlockRet: |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 3499 | if hasdefer { |
Keith Randall | ca9e450 | 2015-09-08 08:59:57 -0700 | [diff] [blame] | 3500 | s.deferReturn() |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3501 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3502 | Prog(obj.ARET) |
Keith Randall | 8a1f621 | 2015-09-08 21:28:44 -0700 | [diff] [blame] | 3503 | case ssa.BlockRetJmp: |
| 3504 | p := Prog(obj.AJMP) |
| 3505 | p.To.Type = obj.TYPE_MEM |
| 3506 | p.To.Name = obj.NAME_EXTERN |
| 3507 | p.To.Sym = Linksym(b.Aux.(*Sym)) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3508 | |
| 3509 | case ssa.BlockAMD64EQF: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3510 | genFPJump(s, b, next, &eqfJumps) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3511 | |
| 3512 | case ssa.BlockAMD64NEF: |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3513 | genFPJump(s, b, next, &nefJumps) |
David Chase | 8e601b2 | 2015-08-18 14:39:26 -0400 | [diff] [blame] | 3514 | |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 3515 | case ssa.BlockAMD64EQ, ssa.BlockAMD64NE, |
| 3516 | ssa.BlockAMD64LT, ssa.BlockAMD64GE, |
| 3517 | ssa.BlockAMD64LE, ssa.BlockAMD64GT, |
| 3518 | ssa.BlockAMD64ULT, ssa.BlockAMD64UGT, |
| 3519 | ssa.BlockAMD64ULE, ssa.BlockAMD64UGE: |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 3520 | jmp := blockJump[b.Kind] |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 3521 | likely := b.Likely |
| 3522 | var p *obj.Prog |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 3523 | switch next { |
| 3524 | case b.Succs[0]: |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 3525 | p = Prog(jmp.invasm) |
| 3526 | likely *= -1 |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3527 | p.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3528 | s.branches = append(s.branches, branch{p, b.Succs[1]}) |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 3529 | case b.Succs[1]: |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 3530 | p = Prog(jmp.asm) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3531 | p.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3532 | s.branches = append(s.branches, branch{p, b.Succs[0]}) |
Josh Bleecher Snyder | 71b5707 | 2015-07-24 12:47:00 -0700 | [diff] [blame] | 3533 | default: |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 3534 | p = Prog(jmp.asm) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3535 | p.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3536 | s.branches = append(s.branches, branch{p, b.Succs[0]}) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3537 | q := Prog(obj.AJMP) |
| 3538 | q.To.Type = obj.TYPE_BRANCH |
Keith Randall | 9569b95 | 2015-08-28 22:51:01 -0700 | [diff] [blame] | 3539 | s.branches = append(s.branches, branch{q, b.Succs[1]}) |
Keith Randall | cfc2aa5 | 2015-05-18 16:44:20 -0700 | [diff] [blame] | 3540 | } |
| 3541 | |
Josh Bleecher Snyder | bbf8c5c | 2015-08-11 17:28:56 -0700 | [diff] [blame] | 3542 | // liblink reorders the instruction stream as it sees fit. |
| 3543 | // Pass along what we know so liblink can make use of it. |
| 3544 | // TODO: Once we've fully switched to SSA, |
| 3545 | // make liblink leave our output alone. |
| 3546 | switch likely { |
| 3547 | case ssa.BranchUnlikely: |
| 3548 | p.From.Type = obj.TYPE_CONST |
| 3549 | p.From.Offset = 0 |
| 3550 | case ssa.BranchLikely: |
| 3551 | p.From.Type = obj.TYPE_CONST |
| 3552 | p.From.Offset = 1 |
| 3553 | } |
| 3554 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3555 | default: |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 3556 | b.Unimplementedf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString()) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3557 | } |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3558 | } |
| 3559 | |
Keith Randall | ca9e450 | 2015-09-08 08:59:57 -0700 | [diff] [blame] | 3560 | func (s *genState) deferReturn() { |
| 3561 | // Deferred calls will appear to be returning to |
| 3562 | // the CALL deferreturn(SB) that we are about to emit. |
| 3563 | // However, the stack trace code will show the line |
| 3564 | // of the instruction byte before the return PC. |
| 3565 | // To avoid that being an unrelated instruction, |
| 3566 | // insert an actual hardware NOP that will have the right line number. |
| 3567 | // This is different from obj.ANOP, which is a virtual no-op |
| 3568 | // that doesn't make it into the instruction stream. |
| 3569 | s.deferTarget = Pc |
| 3570 | Thearch.Ginsnop() |
| 3571 | p := Prog(obj.ACALL) |
| 3572 | p.To.Type = obj.TYPE_MEM |
| 3573 | p.To.Name = obj.NAME_EXTERN |
| 3574 | p.To.Sym = Linksym(Deferreturn.Sym) |
| 3575 | } |
| 3576 | |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3577 | // addAux adds the offset in the aux fields (AuxInt and Aux) of v to a. |
| 3578 | func addAux(a *obj.Addr, v *ssa.Value) { |
| 3579 | if a.Type != obj.TYPE_MEM { |
| 3580 | v.Fatalf("bad addAux addr %s", a) |
| 3581 | } |
| 3582 | // add integer offset |
| 3583 | a.Offset += v.AuxInt |
| 3584 | |
| 3585 | // If no additional symbol offset, we're done. |
| 3586 | if v.Aux == nil { |
| 3587 | return |
| 3588 | } |
| 3589 | // Add symbol's offset from its base register. |
| 3590 | switch sym := v.Aux.(type) { |
| 3591 | case *ssa.ExternSymbol: |
| 3592 | a.Name = obj.NAME_EXTERN |
| 3593 | a.Sym = Linksym(sym.Sym.(*Sym)) |
| 3594 | case *ssa.ArgSymbol: |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3595 | n := sym.Node.(*Node) |
| 3596 | a.Name = obj.NAME_PARAM |
| 3597 | a.Node = n |
| 3598 | a.Sym = Linksym(n.Orig.Sym) |
| 3599 | a.Offset += n.Xoffset // TODO: why do I have to add this here? I don't for auto variables. |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3600 | case *ssa.AutoSymbol: |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3601 | n := sym.Node.(*Node) |
| 3602 | a.Name = obj.NAME_AUTO |
| 3603 | a.Node = n |
| 3604 | a.Sym = Linksym(n.Sym) |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3605 | default: |
| 3606 | v.Fatalf("aux in %s not implemented %#v", v, v.Aux) |
| 3607 | } |
| 3608 | } |
| 3609 | |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 3610 | // extendIndex extends v to a full pointer width. |
| 3611 | func (s *state) extendIndex(v *ssa.Value) *ssa.Value { |
| 3612 | size := v.Type.Size() |
| 3613 | if size == s.config.PtrSize { |
| 3614 | return v |
| 3615 | } |
| 3616 | if size > s.config.PtrSize { |
| 3617 | // TODO: truncate 64-bit indexes on 32-bit pointer archs. We'd need to test |
| 3618 | // the high word and branch to out-of-bounds failure if it is not 0. |
| 3619 | s.Unimplementedf("64->32 index truncation not implemented") |
| 3620 | return v |
| 3621 | } |
| 3622 | |
| 3623 | // Extend value to the required size |
| 3624 | var op ssa.Op |
| 3625 | if v.Type.IsSigned() { |
| 3626 | switch 10*size + s.config.PtrSize { |
| 3627 | case 14: |
| 3628 | op = ssa.OpSignExt8to32 |
| 3629 | case 18: |
| 3630 | op = ssa.OpSignExt8to64 |
| 3631 | case 24: |
| 3632 | op = ssa.OpSignExt16to32 |
| 3633 | case 28: |
| 3634 | op = ssa.OpSignExt16to64 |
| 3635 | case 48: |
| 3636 | op = ssa.OpSignExt32to64 |
| 3637 | default: |
| 3638 | s.Fatalf("bad signed index extension %s", v.Type) |
| 3639 | } |
| 3640 | } else { |
| 3641 | switch 10*size + s.config.PtrSize { |
| 3642 | case 14: |
| 3643 | op = ssa.OpZeroExt8to32 |
| 3644 | case 18: |
| 3645 | op = ssa.OpZeroExt8to64 |
| 3646 | case 24: |
| 3647 | op = ssa.OpZeroExt16to32 |
| 3648 | case 28: |
| 3649 | op = ssa.OpZeroExt16to64 |
| 3650 | case 48: |
| 3651 | op = ssa.OpZeroExt32to64 |
| 3652 | default: |
| 3653 | s.Fatalf("bad unsigned index extension %s", v.Type) |
| 3654 | } |
| 3655 | } |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 3656 | return s.newValue1(op, Types[TUINTPTR], v) |
Keith Randall | 2a5e6c4 | 2015-07-23 14:35:02 -0700 | [diff] [blame] | 3657 | } |
| 3658 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3659 | // ssaRegToReg maps ssa register numbers to obj register numbers. |
| 3660 | var ssaRegToReg = [...]int16{ |
| 3661 | x86.REG_AX, |
| 3662 | x86.REG_CX, |
| 3663 | x86.REG_DX, |
| 3664 | x86.REG_BX, |
| 3665 | x86.REG_SP, |
| 3666 | x86.REG_BP, |
| 3667 | x86.REG_SI, |
| 3668 | x86.REG_DI, |
| 3669 | x86.REG_R8, |
| 3670 | x86.REG_R9, |
| 3671 | x86.REG_R10, |
| 3672 | x86.REG_R11, |
| 3673 | x86.REG_R12, |
| 3674 | x86.REG_R13, |
| 3675 | x86.REG_R14, |
| 3676 | x86.REG_R15, |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3677 | x86.REG_X0, |
| 3678 | x86.REG_X1, |
| 3679 | x86.REG_X2, |
| 3680 | x86.REG_X3, |
| 3681 | x86.REG_X4, |
| 3682 | x86.REG_X5, |
| 3683 | x86.REG_X6, |
| 3684 | x86.REG_X7, |
| 3685 | x86.REG_X8, |
| 3686 | x86.REG_X9, |
| 3687 | x86.REG_X10, |
| 3688 | x86.REG_X11, |
| 3689 | x86.REG_X12, |
| 3690 | x86.REG_X13, |
| 3691 | x86.REG_X14, |
| 3692 | x86.REG_X15, |
| 3693 | 0, // SB isn't a real register. We fill an Addr.Reg field with 0 in this case. |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3694 | // TODO: arch-dependent |
| 3695 | } |
| 3696 | |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 3697 | // regMoveAMD64 returns the register->register move opcode for the given width. |
| 3698 | // TODO: generalize for all architectures? |
| 3699 | func regMoveAMD64(width int64) int { |
| 3700 | switch width { |
| 3701 | case 1: |
| 3702 | return x86.AMOVB |
| 3703 | case 2: |
| 3704 | return x86.AMOVW |
| 3705 | case 4: |
| 3706 | return x86.AMOVL |
| 3707 | case 8: |
| 3708 | return x86.AMOVQ |
| 3709 | default: |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3710 | panic("bad int register width") |
Keith Randall | 9cb332e | 2015-07-28 14:19:20 -0700 | [diff] [blame] | 3711 | } |
| 3712 | } |
| 3713 | |
David Chase | 997a9f3 | 2015-08-12 16:38:11 -0400 | [diff] [blame] | 3714 | func regMoveByTypeAMD64(t ssa.Type) int { |
| 3715 | width := t.Size() |
| 3716 | if t.IsFloat() { |
| 3717 | switch width { |
| 3718 | case 4: |
| 3719 | return x86.AMOVSS |
| 3720 | case 8: |
| 3721 | return x86.AMOVSD |
| 3722 | default: |
| 3723 | panic("bad float register width") |
| 3724 | } |
| 3725 | } else { |
| 3726 | switch width { |
| 3727 | case 1: |
| 3728 | return x86.AMOVB |
| 3729 | case 2: |
| 3730 | return x86.AMOVW |
| 3731 | case 4: |
| 3732 | return x86.AMOVL |
| 3733 | case 8: |
| 3734 | return x86.AMOVQ |
| 3735 | default: |
| 3736 | panic("bad int register width") |
| 3737 | } |
| 3738 | } |
| 3739 | |
| 3740 | panic("bad register type") |
| 3741 | } |
| 3742 | |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3743 | // regnum returns the register (in cmd/internal/obj numbering) to |
| 3744 | // which v has been allocated. Panics if v is not assigned to a |
| 3745 | // register. |
Josh Bleecher Snyder | e139549 | 2015-08-05 16:06:39 -0700 | [diff] [blame] | 3746 | // TODO: Make this panic again once it stops happening routinely. |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3747 | func regnum(v *ssa.Value) int16 { |
Josh Bleecher Snyder | e139549 | 2015-08-05 16:06:39 -0700 | [diff] [blame] | 3748 | reg := v.Block.Func.RegAlloc[v.ID] |
| 3749 | if reg == nil { |
| 3750 | v.Unimplementedf("nil regnum for value: %s\n%s\n", v.LongString(), v.Block.Func) |
| 3751 | return 0 |
| 3752 | } |
| 3753 | return ssaRegToReg[reg.(*ssa.Register).Num] |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3754 | } |
| 3755 | |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3756 | // autoVar returns a *Node representing the auto variable assigned to v. |
| 3757 | func autoVar(v *ssa.Value) *Node { |
| 3758 | return v.Block.Func.RegAlloc[v.ID].(*ssa.LocalSlot).N.(*Node) |
Keith Randall | 083a646 | 2015-05-12 11:06:44 -0700 | [diff] [blame] | 3759 | } |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 3760 | |
| 3761 | // ssaExport exports a bunch of compiler services for the ssa backend. |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 3762 | type ssaExport struct { |
| 3763 | log bool |
| 3764 | unimplemented bool |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 3765 | mustImplement bool |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 3766 | } |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 3767 | |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 3768 | func (s *ssaExport) TypeBool() ssa.Type { return Types[TBOOL] } |
| 3769 | func (s *ssaExport) TypeInt8() ssa.Type { return Types[TINT8] } |
| 3770 | func (s *ssaExport) TypeInt16() ssa.Type { return Types[TINT16] } |
| 3771 | func (s *ssaExport) TypeInt32() ssa.Type { return Types[TINT32] } |
| 3772 | func (s *ssaExport) TypeInt64() ssa.Type { return Types[TINT64] } |
| 3773 | func (s *ssaExport) TypeUInt8() ssa.Type { return Types[TUINT8] } |
| 3774 | func (s *ssaExport) TypeUInt16() ssa.Type { return Types[TUINT16] } |
| 3775 | func (s *ssaExport) TypeUInt32() ssa.Type { return Types[TUINT32] } |
| 3776 | func (s *ssaExport) TypeUInt64() ssa.Type { return Types[TUINT64] } |
David Chase | 5257858 | 2015-08-28 14:24:10 -0400 | [diff] [blame] | 3777 | func (s *ssaExport) TypeFloat32() ssa.Type { return Types[TFLOAT32] } |
| 3778 | func (s *ssaExport) TypeFloat64() ssa.Type { return Types[TFLOAT64] } |
Josh Bleecher Snyder | 85e0329 | 2015-07-30 11:03:05 -0700 | [diff] [blame] | 3779 | func (s *ssaExport) TypeInt() ssa.Type { return Types[TINT] } |
| 3780 | func (s *ssaExport) TypeUintptr() ssa.Type { return Types[TUINTPTR] } |
| 3781 | func (s *ssaExport) TypeString() ssa.Type { return Types[TSTRING] } |
| 3782 | func (s *ssaExport) TypeBytePtr() ssa.Type { return Ptrto(Types[TUINT8]) } |
| 3783 | |
Josh Bleecher Snyder | 8d31df18a | 2015-07-24 11:28:12 -0700 | [diff] [blame] | 3784 | // StringData returns a symbol (a *Sym wrapped in an interface) which |
| 3785 | // is the data component of a global string constant containing s. |
| 3786 | func (*ssaExport) StringData(s string) interface{} { |
Keith Randall | 8c46aa5 | 2015-06-19 21:02:28 -0700 | [diff] [blame] | 3787 | // TODO: is idealstring correct? It might not matter... |
Josh Bleecher Snyder | 8d31df18a | 2015-07-24 11:28:12 -0700 | [diff] [blame] | 3788 | _, data := stringsym(s) |
| 3789 | return &ssa.ExternSymbol{Typ: idealstring, Sym: data} |
Keith Randall | f7f604e | 2015-05-27 14:52:22 -0700 | [diff] [blame] | 3790 | } |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 3791 | |
Keith Randall | d2107fc | 2015-08-24 02:16:19 -0700 | [diff] [blame] | 3792 | func (e *ssaExport) Auto(t ssa.Type) fmt.Stringer { |
| 3793 | n := temp(t.(*Type)) // Note: adds new auto to Curfn.Func.Dcl list |
| 3794 | e.mustImplement = true // This modifies the input to SSA, so we want to make sure we succeed from here! |
| 3795 | return n |
| 3796 | } |
| 3797 | |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 3798 | // Log logs a message from the compiler. |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 3799 | func (e *ssaExport) Logf(msg string, args ...interface{}) { |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 3800 | // If e was marked as unimplemented, anything could happen. Ignore. |
| 3801 | if e.log && !e.unimplemented { |
| 3802 | fmt.Printf(msg, args...) |
| 3803 | } |
| 3804 | } |
| 3805 | |
| 3806 | // Fatal reports a compiler error and exits. |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 3807 | func (e *ssaExport) Fatalf(msg string, args ...interface{}) { |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 3808 | // If e was marked as unimplemented, anything could happen. Ignore. |
| 3809 | if !e.unimplemented { |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 3810 | Fatalf(msg, args...) |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 3811 | } |
| 3812 | } |
| 3813 | |
| 3814 | // Unimplemented reports that the function cannot be compiled. |
| 3815 | // It will be removed once SSA work is complete. |
Josh Bleecher Snyder | 37ddc27 | 2015-06-24 14:03:39 -0700 | [diff] [blame] | 3816 | func (e *ssaExport) Unimplementedf(msg string, args ...interface{}) { |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 3817 | if e.mustImplement { |
Keith Randall | 0ec72b6 | 2015-09-08 15:42:53 -0700 | [diff] [blame] | 3818 | Fatalf(msg, args...) |
Josh Bleecher Snyder | d298209 | 2015-07-22 13:13:53 -0700 | [diff] [blame] | 3819 | } |
Josh Bleecher Snyder | 8c6abfe | 2015-06-12 11:01:13 -0700 | [diff] [blame] | 3820 | const alwaysLog = false // enable to calculate top unimplemented features |
| 3821 | if !e.unimplemented && (e.log || alwaysLog) { |
| 3822 | // first implementation failure, print explanation |
| 3823 | fmt.Printf("SSA unimplemented: "+msg+"\n", args...) |
| 3824 | } |
| 3825 | e.unimplemented = true |
| 3826 | } |