[dev.ssa] cmd/compile: be safer about uintptr/unsafe.Pointer conversions

Make sure that when a pointer value is live across a function
call, we save it as a pointer.  (And similarly a uintptr
live across a function call should not be saved as a pointer.)

Add a nasty test case.

This is probably what is preventing the merge from master
to dev.ssa.  Signs point to something like this bug happening
in mallocgc.

Change-Id: Ib23fa1251b8d1c50d82c6a448cb4a4fc28219029
Reviewed-on: https://go-review.googlesource.com/16830
Run-TryBot: Keith Randall <khr@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: David Chase <drchase@google.com>
diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go
index 0b67480..4cdfa5c 100644
--- a/src/cmd/compile/internal/gc/ssa.go
+++ b/src/cmd/compile/internal/gc/ssa.go
@@ -1375,7 +1375,7 @@
 		// as not-pointers or vice-versa because of copy
 		// elision.
 		if to.IsPtr() != from.IsPtr() {
-			return s.newValue1(ssa.OpConvert, to, x)
+			return s.newValue2(ssa.OpConvert, to, x, s.mem())
 		}
 
 		v := s.newValue1(ssa.OpCopy, to, x) // ensure that v has the right type
@@ -3886,7 +3886,7 @@
 		p.To.Sym = Linksym(Pkglookup("duffcopy", Runtimepkg))
 		p.To.Offset = v.AuxInt
 
-	case ssa.OpCopy: // TODO: lower to MOVQ earlier?
+	case ssa.OpCopy, ssa.OpAMD64MOVQconvert: // TODO: lower Copy to MOVQ earlier?
 		if v.Type.IsMemory() {
 			return
 		}
diff --git a/src/cmd/compile/internal/gc/ssa_test.go b/src/cmd/compile/internal/gc/ssa_test.go
index 5a881ed..74fa847 100644
--- a/src/cmd/compile/internal/gc/ssa_test.go
+++ b/src/cmd/compile/internal/gc/ssa_test.go
@@ -93,3 +93,5 @@
 func TestAddressed(t *testing.T) { runTest(t, "addressed_ssa.go") }
 
 func TestCopy(t *testing.T) { runTest(t, "copy_ssa.go") }
+
+func TestUnsafe(t *testing.T) { runTest(t, "unsafe_ssa.go") }
diff --git a/src/cmd/compile/internal/gc/testdata/unsafe_ssa.go b/src/cmd/compile/internal/gc/testdata/unsafe_ssa.go
new file mode 100644
index 0000000..bc29282
--- /dev/null
+++ b/src/cmd/compile/internal/gc/testdata/unsafe_ssa.go
@@ -0,0 +1,129 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"fmt"
+	"runtime"
+	"unsafe"
+)
+
+// global pointer slot
+var a *[8]uint
+
+// unfoldable true
+var b = true
+
+// Test to make sure that a pointer value which is alive
+// across a call is retained, even when there are matching
+// conversions to/from uintptr around the call.
+// We arrange things very carefully to have to/from
+// conversions on either side of the call which cannot be
+// combined with any other conversions.
+func f_ssa() *[8]uint {
+	// Make x a uintptr pointing to where a points.
+	var x uintptr
+	if b {
+		x = uintptr(unsafe.Pointer(a))
+	} else {
+		x = 0
+	}
+	// Clobber the global pointer.  The only live ref
+	// to the allocated object is now x.
+	a = nil
+
+	// Convert to pointer so it should hold
+	// the object live across GC call.
+	p := unsafe.Pointer(x)
+
+	// Call gc.
+	runtime.GC()
+
+	// Convert back to uintptr.
+	y := uintptr(p)
+
+	// Mess with y so that the subsequent cast
+	// to unsafe.Pointer can't be combined with the
+	// uintptr cast above.
+	var z uintptr
+	if b {
+		z = y
+	} else {
+		z = 0
+	}
+	return (*[8]uint)(unsafe.Pointer(z))
+}
+
+// g_ssa is the same as f_ssa, but with a bit of pointer
+// arithmetic for added insanity.
+func g_ssa() *[7]uint {
+	// Make x a uintptr pointing to where a points.
+	var x uintptr
+	if b {
+		x = uintptr(unsafe.Pointer(a))
+	} else {
+		x = 0
+	}
+	// Clobber the global pointer.  The only live ref
+	// to the allocated object is now x.
+	a = nil
+
+	// Offset x by one int.
+	x += unsafe.Sizeof(int(0))
+
+	// Convert to pointer so it should hold
+	// the object live across GC call.
+	p := unsafe.Pointer(x)
+
+	// Call gc.
+	runtime.GC()
+
+	// Convert back to uintptr.
+	y := uintptr(p)
+
+	// Mess with y so that the subsequent cast
+	// to unsafe.Pointer can't be combined with the
+	// uintptr cast above.
+	var z uintptr
+	if b {
+		z = y
+	} else {
+		z = 0
+	}
+	return (*[7]uint)(unsafe.Pointer(z))
+}
+
+func testf() {
+	a = new([8]uint)
+	for i := 0; i < 8; i++ {
+		a[i] = 0xabcd
+	}
+	c := f_ssa()
+	for i := 0; i < 8; i++ {
+		if c[i] != 0xabcd {
+			fmt.Printf("%d:%x\n", i, c[i])
+			panic("bad c")
+		}
+	}
+}
+
+func testg() {
+	a = new([8]uint)
+	for i := 0; i < 8; i++ {
+		a[i] = 0xabcd
+	}
+	c := g_ssa()
+	for i := 0; i < 7; i++ {
+		if c[i] != 0xabcd {
+			fmt.Printf("%d:%x\n", i, c[i])
+			panic("bad c")
+		}
+	}
+}
+
+func main() {
+	testf()
+	testg()
+}
diff --git a/src/cmd/compile/internal/ssa/gen/AMD64.rules b/src/cmd/compile/internal/ssa/gen/AMD64.rules
index 4364022..7d0aa4b 100644
--- a/src/cmd/compile/internal/ssa/gen/AMD64.rules
+++ b/src/cmd/compile/internal/ssa/gen/AMD64.rules
@@ -281,7 +281,7 @@
 (Store [1] ptr val mem) -> (MOVBstore ptr val mem)
 
 // We want this to stick out so the to/from ptr conversion is obvious
-(Convert <t> x) -> (LEAQ <t> x)
+(Convert <t> x mem) -> (MOVQconvert <t> x mem)
 
 // checks
 (IsNonNil p) -> (SETNE (TESTQ p p))
diff --git a/src/cmd/compile/internal/ssa/gen/AMD64Ops.go b/src/cmd/compile/internal/ssa/gen/AMD64Ops.go
index fa5072f..ba53e81 100644
--- a/src/cmd/compile/internal/ssa/gen/AMD64Ops.go
+++ b/src/cmd/compile/internal/ssa/gen/AMD64Ops.go
@@ -465,6 +465,13 @@
 		{name: "LoweredGetClosurePtr", reg: regInfo{outputs: []regMask{buildReg("DX")}}},
 		//arg0=ptr,arg1=mem, returns void.  Faults if ptr is nil.
 		{name: "LoweredNilCheck", reg: regInfo{inputs: []regMask{gpsp}, clobbers: flags}},
+
+		// MOVQconvert converts between pointers and integers.
+		// We have a special op for this so as to not confuse GC
+		// (particularly stack maps).  It takes a memory arg so it
+		// gets correctly ordered with respect to GC safepoints.
+		// arg0=ptr/int arg1=mem, output=int/ptr
+		{name: "MOVQconvert", reg: gp11nf, asm: "MOVQ"},
 	}
 
 	var AMD64blocks = []blockData{
diff --git a/src/cmd/compile/internal/ssa/gen/generic.rules b/src/cmd/compile/internal/ssa/gen/generic.rules
index d3de24d..5de877d 100644
--- a/src/cmd/compile/internal/ssa/gen/generic.rules
+++ b/src/cmd/compile/internal/ssa/gen/generic.rules
@@ -274,7 +274,8 @@
 (If (ConstBool [c]) yes no) && c == 0 -> (First nil no yes)
 
 // Get rid of Convert ops for pointer arithmetic on unsafe.Pointer.
-(Convert (Add64 (Convert ptr) off)) -> (Add64 ptr off)
+(Convert (Add64 (Convert ptr mem) off) mem) -> (Add64 ptr off)
+(Convert (Convert ptr mem) mem) -> ptr
 
 // Decompose compound argument values
 (Arg {n} [off]) && v.Type.IsString() ->
diff --git a/src/cmd/compile/internal/ssa/gen/genericOps.go b/src/cmd/compile/internal/ssa/gen/genericOps.go
index ead0cfd..e57dd93 100644
--- a/src/cmd/compile/internal/ssa/gen/genericOps.go
+++ b/src/cmd/compile/internal/ssa/gen/genericOps.go
@@ -236,9 +236,14 @@
 	{name: "Sqrt"}, // sqrt(arg0), float64 only
 
 	// Data movement
-	{name: "Phi"},     // select an argument based on which predecessor block we came from
-	{name: "Copy"},    // output = arg0
-	{name: "Convert"}, // output = arg0 -- a copy that converts to/from a pointer
+	{name: "Phi"},  // select an argument based on which predecessor block we came from
+	{name: "Copy"}, // output = arg0
+	// Convert converts between pointers and integers.
+	// We have a special op for this so as to not confuse GC
+	// (particularly stack maps).  It takes a memory arg so it
+	// gets correctly ordered with respect to GC safepoints.
+	// arg0=ptr/int arg1=mem, output=int/ptr
+	{name: "Convert"},
 
 	// constants.  Constant values are stored in the aux field.
 	// booleans have a bool aux field, strings have a string aux
diff --git a/src/cmd/compile/internal/ssa/opGen.go b/src/cmd/compile/internal/ssa/opGen.go
index d043e07..132ca83 100644
--- a/src/cmd/compile/internal/ssa/opGen.go
+++ b/src/cmd/compile/internal/ssa/opGen.go
@@ -282,6 +282,7 @@
 	OpAMD64LoweredGetG
 	OpAMD64LoweredGetClosurePtr
 	OpAMD64LoweredNilCheck
+	OpAMD64MOVQconvert
 
 	OpAdd8
 	OpAdd16
@@ -3219,6 +3220,18 @@
 			clobbers: 8589934592, // .FLAGS
 		},
 	},
+	{
+		name: "MOVQconvert",
+		asm:  x86.AMOVQ,
+		reg: regInfo{
+			inputs: []inputInfo{
+				{0, 65535}, // .AX .CX .DX .BX .SP .BP .SI .DI .R8 .R9 .R10 .R11 .R12 .R13 .R14 .R15
+			},
+			outputs: []regMask{
+				65519, // .AX .CX .DX .BX .BP .SI .DI .R8 .R9 .R10 .R11 .R12 .R13 .R14 .R15
+			},
+		},
+	},
 
 	{
 		name:    "Add8",
diff --git a/src/cmd/compile/internal/ssa/rewriteAMD64.go b/src/cmd/compile/internal/ssa/rewriteAMD64.go
index cfdd5a2..3be94e3 100644
--- a/src/cmd/compile/internal/ssa/rewriteAMD64.go
+++ b/src/cmd/compile/internal/ssa/rewriteAMD64.go
@@ -2585,22 +2585,24 @@
 func rewriteValueAMD64_OpConvert(v *Value, config *Config) bool {
 	b := v.Block
 	_ = b
-	// match: (Convert <t> x)
+	// match: (Convert <t> x mem)
 	// cond:
-	// result: (LEAQ <t> x)
+	// result: (MOVQconvert <t> x mem)
 	{
 		t := v.Type
 		x := v.Args[0]
-		v.Op = OpAMD64LEAQ
+		mem := v.Args[1]
+		v.Op = OpAMD64MOVQconvert
 		v.AuxInt = 0
 		v.Aux = nil
 		v.resetArgs()
 		v.Type = t
 		v.AddArg(x)
+		v.AddArg(mem)
 		return true
 	}
-	goto end1cac40a6074914d6ae3d4aa039a625ed
-end1cac40a6074914d6ae3d4aa039a625ed:
+	goto end0aa5cd28888761ffab21bce45db361c8
+end0aa5cd28888761ffab21bce45db361c8:
 	;
 	return false
 }
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go
index 174967a..9563e87 100644
--- a/src/cmd/compile/internal/ssa/rewritegeneric.go
+++ b/src/cmd/compile/internal/ssa/rewritegeneric.go
@@ -926,18 +926,22 @@
 func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool {
 	b := v.Block
 	_ = b
-	// match: (Convert (Add64 (Convert ptr) off))
+	// match: (Convert (Add64 (Convert ptr mem) off) mem)
 	// cond:
 	// result: (Add64 ptr off)
 	{
 		if v.Args[0].Op != OpAdd64 {
-			goto end913a7ecf456c00ffbee36c2dbbf0e1af
+			goto endbbc9f1666b4d39a130e1b86f109e7c1b
 		}
 		if v.Args[0].Args[0].Op != OpConvert {
-			goto end913a7ecf456c00ffbee36c2dbbf0e1af
+			goto endbbc9f1666b4d39a130e1b86f109e7c1b
 		}
 		ptr := v.Args[0].Args[0].Args[0]
+		mem := v.Args[0].Args[0].Args[1]
 		off := v.Args[0].Args[1]
+		if v.Args[1] != mem {
+			goto endbbc9f1666b4d39a130e1b86f109e7c1b
+		}
 		v.Op = OpAdd64
 		v.AuxInt = 0
 		v.Aux = nil
@@ -946,8 +950,31 @@
 		v.AddArg(off)
 		return true
 	}
-	goto end913a7ecf456c00ffbee36c2dbbf0e1af
-end913a7ecf456c00ffbee36c2dbbf0e1af:
+	goto endbbc9f1666b4d39a130e1b86f109e7c1b
+endbbc9f1666b4d39a130e1b86f109e7c1b:
+	;
+	// match: (Convert (Convert ptr mem) mem)
+	// cond:
+	// result: ptr
+	{
+		if v.Args[0].Op != OpConvert {
+			goto end98c5e0ca257eb216989171786f91b42d
+		}
+		ptr := v.Args[0].Args[0]
+		mem := v.Args[0].Args[1]
+		if v.Args[1] != mem {
+			goto end98c5e0ca257eb216989171786f91b42d
+		}
+		v.Op = OpCopy
+		v.AuxInt = 0
+		v.Aux = nil
+		v.resetArgs()
+		v.Type = ptr.Type
+		v.AddArg(ptr)
+		return true
+	}
+	goto end98c5e0ca257eb216989171786f91b42d
+end98c5e0ca257eb216989171786f91b42d:
 	;
 	return false
 }