go.tools/pointer: reflect, part 2: channels.
(reflect.Value).Send
(reflect.Value).TrySend
(reflect.Value).Recv
(reflect.Value).TryRecv
(reflect.Type).ChanOf
(reflect.Type).In
(reflect.Type).Out
reflect.Indirect
reflect.MakeChan
Also:
- specialize genInvoke when the receiver is a reflect.Type under the
assumption that there's only one possible concrete type. This
makes all reflect.Type operations context-sensitive since the calls
are no longer dynamic.
- Rename all variables to match the actual parameter names used in
the reflect API.
- Add pointer.Config.Reflection flag
(exposed in oracle as --reflect, default false) to enable reflection.
It currently adds about 20% running time. I'll make it true after
the presolver is implemented.
- Simplified worklist datatype and solver main loop slightly
(~10% speed improvement).
- Use addLabel() utility to add a label to a PTS.
(Working on my 3 yr old 2x2GHz+4GB Mac vs 8x4GHz+24GB workstation,
one really notices the cost of pointer analysis.
Note to self: time to implement presolver.)
R=crawshaw
CC=golang-dev
https://golang.org/cl/13242062
diff --git a/pointer/TODO b/pointer/TODO
index b7976f6..d5f12bc 100644
--- a/pointer/TODO
+++ b/pointer/TODO
@@ -18,7 +18,6 @@
PRESOLVER OPTIMISATIONS
- use HVN, HRU, LE, PE, HCD, LCD.
- But: LE would lose the precise detail we currently enjoy in each label.
SOLVER:
- use BDDs and/or sparse bitvectors for ptsets
diff --git a/pointer/analysis.go b/pointer/analysis.go
index 319d976..b825bb1 100644
--- a/pointer/analysis.go
+++ b/pointer/analysis.go
@@ -185,7 +185,8 @@
hasher typemap.Hasher // cache of type hashes
reflectValueObj types.Object // type symbol for reflect.Value (if present)
reflectRtypeObj types.Object // *types.TypeName for reflect.rtype (if present)
- reflectRtype *types.Pointer // *reflect.rtype
+ reflectRtypePtr *types.Pointer // *reflect.rtype
+ reflectType *types.Named // reflect.Type
rtypes typemap.M // nodeid of canonical *rtype-tagged object for type T
reflectZeros typemap.M // nodeid of canonical T-tagged object for zero value
}
@@ -244,8 +245,9 @@
if reflect := a.prog.ImportedPackage("reflect"); reflect != nil {
a.reflectValueObj = reflect.Object.Scope().Lookup("Value")
+ a.reflectType = reflect.Object.Scope().Lookup("Type").Type().(*types.Named)
a.reflectRtypeObj = reflect.Object.Scope().Lookup("rtype")
- a.reflectRtype = types.NewPointer(a.reflectRtypeObj.Type())
+ a.reflectRtypePtr = types.NewPointer(a.reflectRtypeObj.Type())
// Override flattening of reflect.Value, treating it like a basic type.
tReflectValue := a.reflectValueObj.Type()
@@ -265,11 +267,7 @@
root := a.generate()
- // ---------- Presolver ----------
-
- // TODO(adonovan): opt: presolver optimisations.
-
- // ---------- Solver ----------
+ //a.optimize()
a.solve()
diff --git a/pointer/api.go b/pointer/api.go
index c11b908..f1f4e47 100644
--- a/pointer/api.go
+++ b/pointer/api.go
@@ -20,6 +20,13 @@
Mains []*ssa.Package // set of 'main' packages to analyze
root *ssa.Function // synthetic analysis root
+ // Reflection determines whether to handle reflection
+ // operators soundly, which is currently rather slow since it
+ // causes constraint to be generated during solving
+ // proportional to the number of constraint variables, which
+ // has not yet been reduced by presolver optimisation.
+ Reflection bool
+
// -------- Optional callbacks invoked by the analysis --------
// Call is invoked for each discovered call-graph edge. The
diff --git a/pointer/doc.go b/pointer/doc.go
index a497cba..760961c 100644
--- a/pointer/doc.go
+++ b/pointer/doc.go
@@ -10,11 +10,13 @@
The implementation is similar to that described in (Pearce et al,
PASTE'04). Unlike many algorithms which interleave constraint
generation and solving, constructing the callgraph as they go, this
-implementation has a strict phase ordering: generation before solving.
-Only simple (copy) constraints may be generated during solving. This
-improves the traction of presolver optimisations, but imposes certain
-restrictions, e.g. potential context sensitivity is limited since all
-variants must be created a priori.
+implementation for the most part observes a phase ordering (generation
+before solving), with only simple (copy) constraints being generated
+during solving. (The exception is reflection, which creates various
+constraints during solving as new types flow to reflect.Value
+operations.) This improves the traction of presolver optimisations,
+but imposes certain restrictions, e.g. potential context sensitivity
+is limited since all variants must be created a priori.
We intend to add various presolving optimisations such as Pointer and
Location Equivalence from (Hardekopf & Lin, SAS '07) and solver
diff --git a/pointer/gen.go b/pointer/gen.go
index 0c448fa..62fb103 100644
--- a/pointer/gen.go
+++ b/pointer/gen.go
@@ -235,7 +235,7 @@
a.addOneNode(T, "reflect.rtype", nil)
a.endObject(obj, nil, nil).rtype = T
- id := a.makeTagged(a.reflectRtype, nil, nil)
+ id := a.makeTagged(a.reflectRtypePtr, nil, nil)
a.nodes[id].obj.rtype = T
a.nodes[id+1].typ = T // trick (each *rtype tagged object is a singleton)
a.addressOf(id+1, obj)
@@ -244,6 +244,15 @@
return id
}
+// rtypeValue returns the type of the *reflect.rtype-tagged object obj.
+func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type {
+ tDyn, t, _ := a.taggedValue(obj)
+ if tDyn != a.reflectRtypePtr {
+ panic(fmt.Sprintf("not a *reflect.rtype-tagged value: obj=n%d tag=%v payload=n%d", obj, tDyn, t))
+ }
+ return a.nodes[t].typ
+}
+
// valueNode returns the id of the value node for v, creating it (and
// the association) as needed. It may return zero for uninteresting
// values containing no pointers.
@@ -432,6 +441,11 @@
}
}
+// typeAssert creates a typeAssert constraint of the form dst = src.(T).
+func (a *analysis) typeAssert(T types.Type, dst, src nodeid) {
+ a.addConstraint(&typeAssertConstraint{T, dst, src})
+}
+
// addConstraint adds c to the constraint set.
func (a *analysis) addConstraint(c constraint) {
a.constraints = append(a.constraints, c)
@@ -720,13 +734,11 @@
// It returns a node whose pts() will be the set of possible call targets.
//
func (a *analysis) genInvoke(call *ssa.CallCommon, result nodeid) nodeid {
- sig := call.Signature()
+ if call.Value.Type() == a.reflectType {
+ return a.genInvokeReflectType(call, result)
+ }
- // TODO(adonovan): optimise this into a static call when there
- // can be at most one type that implements the interface (due
- // to unexported methods). This is particularly important for
- // methods of interface reflect.Type (sole impl:
- // *reflect.rtype), so we can realize context sensitivity.
+ sig := call.Signature()
// Allocate a contiguous targets/params/results block for this call.
block := a.nextNode()
@@ -753,6 +765,63 @@
return targets
}
+// genInvokeReflectType is a specialization of genInvoke where the
+// receiver type is a reflect.Type, under the assumption that there
+// can be at most one implementation of this interface, *reflect.rtype.
+//
+// (Though this may appear to be an instance of a pattern---method
+// calls on interfaces known to have exactly one implementation---in
+// practice it occurs rarely, so we special case for reflect.Type.)
+//
+// In effect we treat this:
+// var rt reflect.Type = ...
+// rt.F()
+// as this:
+// rt.(*reflect.rtype).F()
+//
+// It returns a node whose pts() will be the (singleton) set of
+// possible call targets.
+//
+func (a *analysis) genInvokeReflectType(call *ssa.CallCommon, result nodeid) nodeid {
+ // Unpack receiver into rtype
+ rtype := a.addOneNode(a.reflectRtypePtr, "rtype.recv", nil)
+ recv := a.valueNode(call.Value)
+ a.typeAssert(a.reflectRtypePtr, rtype, recv)
+
+ // Look up the concrete method.
+ meth := a.reflectRtypePtr.MethodSet().Lookup(call.Method.Pkg(), call.Method.Name())
+ fn := a.prog.Method(meth)
+
+ obj := a.makeFunctionObject(fn) // new contour for this call
+
+ // From now on, it's essentially a static call, but little is
+ // gained by factoring together the code for both cases.
+
+ sig := fn.Signature // concrete method
+ targets := a.addOneNode(sig, "call.targets", nil)
+ a.addressOf(targets, obj) // (a singleton)
+
+ // Copy receiver.
+ params := a.funcParams(obj)
+ a.copy(params, rtype, 1)
+ params++
+
+ // Copy actual parameters into formal params block.
+ // Must loop, since the actuals aren't contiguous.
+ for i, arg := range call.Args {
+ sz := a.sizeof(sig.Params().At(i).Type())
+ a.copy(params, a.valueNode(arg), sz)
+ params += nodeid(sz)
+ }
+
+ // Copy formal results block to actual result.
+ if result != 0 {
+ a.copy(result, a.funcResults(obj), a.sizeof(sig.Results()))
+ }
+
+ return obj
+}
+
// genCall generates contraints for call instruction instr.
func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) {
call := instr.Common()
@@ -927,8 +996,7 @@
a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
case *ssa.TypeAssert:
- dst, src := a.valueNode(instr), a.valueNode(instr.X)
- a.addConstraint(&typeAssertConstraint{instr.AssertedType, dst, src})
+ a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X))
case *ssa.Slice:
a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
@@ -1117,7 +1185,9 @@
a.panicNode = a.addNodes(tEface, "panic")
// Create nodes and constraints for all methods of reflect.rtype.
- if rtype := a.reflectRtype; rtype != nil {
+ // (Shared contours are used by dynamic calls to reflect.Type
+ // methods---typically just String().)
+ if rtype := a.reflectRtypePtr; rtype != nil {
mset := rtype.MethodSet()
for i, n := 0, mset.Len(); i < n; i++ {
a.valueNode(a.prog.Method(mset.At(i)))
@@ -1135,9 +1205,5 @@
a.genFunc(cgn)
}
- // Create a dummy node to avoid out-of-range indexing in case
- // the last allocated type was of zero length.
- a.addNodes(tInvalid, "(max)")
-
return root
}
diff --git a/pointer/intrinsics.go b/pointer/intrinsics.go
index b59d330..917701f 100644
--- a/pointer/intrinsics.go
+++ b/pointer/intrinsics.go
@@ -70,6 +70,8 @@
"(reflect.Value).OverflowInt": ext۰NoEffect,
"(reflect.Value).OverflowUint": ext۰NoEffect,
"(reflect.Value).Pointer": ext۰NoEffect,
+ "(reflect.Value).Recv": ext۰reflect۰Value۰Recv,
+ "(reflect.Value).Send": ext۰reflect۰Value۰Send,
"(reflect.Value).Set": ext۰reflect۰Value۰Set,
"(reflect.Value).SetBool": ext۰NoEffect,
"(reflect.Value).SetBytes": ext۰reflect۰Value۰SetBytes,
@@ -83,6 +85,8 @@
"(reflect.Value).SetUint": ext۰NoEffect,
"(reflect.Value).Slice": ext۰reflect۰Value۰Slice,
"(reflect.Value).String": ext۰NoEffect,
+ "(reflect.Value).TryRecv": ext۰reflect۰Value۰Recv,
+ "(reflect.Value).TrySend": ext۰reflect۰Value۰Send,
"(reflect.Value).Type": ext۰NoEffect,
"(reflect.Value).Uint": ext۰NoEffect,
"(reflect.Value).UnsafeAddr": ext۰NoEffect,
@@ -275,9 +279,13 @@
if !ok {
impl = intrinsicsByName[fn.String()] // may be nil
- // Ensure all "reflect" code is treated intrinsically.
- if impl == nil && fn.Pkg != nil && a.reflectValueObj != nil && a.reflectValueObj.Pkg() == fn.Pkg.Object {
- impl = ext۰NotYetImplemented
+ if fn.Pkg != nil && a.reflectValueObj != nil && a.reflectValueObj.Pkg() == fn.Pkg.Object {
+ if !a.config.Reflection {
+ impl = ext۰NoEffect // reflection disabled
+ } else if impl == nil {
+ // Ensure all "reflect" code is treated intrinsically.
+ impl = ext۰NotYetImplemented
+ }
}
a.intrinsics[fn] = impl
diff --git a/pointer/pointer_test.go b/pointer/pointer_test.go
index 6f06242..8799e71 100644
--- a/pointer/pointer_test.go
+++ b/pointer/pointer_test.go
@@ -33,9 +33,11 @@
// "testdata/tmp.go",
// Working:
+ "testdata/a_test.go",
"testdata/another.go",
"testdata/arrays.go",
"testdata/channels.go",
+ "testdata/chanreflect.go",
"testdata/context.go",
"testdata/conv.go",
"testdata/flow.go",
@@ -43,21 +45,19 @@
"testdata/func.go",
"testdata/hello.go",
"testdata/interfaces.go",
+ "testdata/funcreflect.go",
+ "testdata/mapreflect.go",
"testdata/maps.go",
"testdata/panic.go",
"testdata/recur.go",
+ "testdata/reflect.go",
"testdata/structs.go",
- "testdata/a_test.go",
- "testdata/mapreflect.go",
// TODO(adonovan): get these tests (of reflection) passing.
// (The tests are mostly sound since they were used for a
// previous implementation.)
- // "testdata/funcreflect.go",
// "testdata/arrayreflect.go",
- // "testdata/chanreflect.go",
// "testdata/finalizer.go",
- // "testdata/reflect.go",
// "testdata/structreflect.go",
}
@@ -290,8 +290,9 @@
// Run the analysis.
config := &pointer.Config{
- Mains: []*ssa.Package{ptrmain},
- Log: &log,
+ Reflection: true,
+ Mains: []*ssa.Package{ptrmain},
+ Log: &log,
Print: func(site *ssa.CallCommon, p pointer.Pointer) {
probes = append(probes, probe{site, p})
},
diff --git a/pointer/reflect.go b/pointer/reflect.go
index 59b51a3..d55d0bb 100644
--- a/pointer/reflect.go
+++ b/pointer/reflect.go
@@ -4,10 +4,14 @@
// constraints arising from the use of reflection in the target
// program. See doc.go for explanation of the representation.
//
+// For consistency, the names of all parameters match those of the
+// actual functions in the "reflect" package.
+//
// TODO(adonovan): fix: most of the reflect API permits implicit
// conversions due to assignability, e.g. m.MapIndex(k) is ok if T(k)
// is assignable to T(M).key. It's not yet clear how best to model
-// that.
+// that; perhaps a more lenient version of typeAssertConstraint is
+// needed.
//
// To avoid proliferation of equivalent labels, instrinsics should
// memoize as much as possible, like TypeOf and Zero do for their
@@ -17,6 +21,7 @@
import (
"fmt"
+ "go/ast"
"code.google.com/p/go.tools/go/types"
)
@@ -37,25 +42,25 @@
// ---------- func (Value).Interface() Value ----------
-// result = rv.Interface()
+// result = v.Interface()
type rVInterfaceConstraint struct {
- rv nodeid // (ptr)
+ v nodeid // (ptr)
result nodeid
}
func (c *rVInterfaceConstraint) String() string {
- return fmt.Sprintf("n%d = reflect n%d.Interface()", c.result, c.rv)
+ return fmt.Sprintf("n%d = reflect n%d.Interface()", c.result, c.v)
}
func (c *rVInterfaceConstraint) ptr() nodeid {
- return c.rv
+ return c.v
}
func (c *rVInterfaceConstraint) solve(a *analysis, _ *node, delta nodeset) {
resultPts := &a.nodes[c.result].pts
changed := false
- for obj := range delta {
- tDyn, _, indirect := a.taggedValue(obj)
+ for vObj := range delta {
+ tDyn, _, indirect := a.taggedValue(vObj)
if tDyn == nil {
panic("not a tagged object")
}
@@ -65,7 +70,7 @@
panic("indirect tagged object")
}
- if resultPts.add(obj) {
+ if resultPts.add(vObj) {
changed = true
}
}
@@ -76,33 +81,33 @@
func ext۰reflect۰Value۰Interface(a *analysis, cgn *cgnode) {
a.addConstraint(&rVInterfaceConstraint{
- rv: a.funcParams(cgn.obj),
+ v: a.funcParams(cgn.obj),
result: a.funcResults(cgn.obj),
})
}
// ---------- func (Value).MapIndex(Value) Value ----------
-// result = rv.MapIndex(key)
+// result = v.MapIndex(_)
type rVMapIndexConstraint struct {
cgn *cgnode
- rv nodeid // (ptr)
+ v nodeid // (ptr)
result nodeid
}
func (c *rVMapIndexConstraint) String() string {
- return fmt.Sprintf("n%d = reflect n%d.MapIndex(_)", c.result, c.rv)
+ return fmt.Sprintf("n%d = reflect n%d.MapIndex(_)", c.result, c.v)
}
func (c *rVMapIndexConstraint) ptr() nodeid {
- return c.rv
+ return c.v
}
func (c *rVMapIndexConstraint) solve(a *analysis, _ *node, delta nodeset) {
changed := false
- for obj := range delta {
- tDyn, m, indirect := a.taggedValue(obj)
- tMap, _ := tDyn.(*types.Map)
+ for vObj := range delta {
+ tDyn, m, indirect := a.taggedValue(vObj)
+ tMap, _ := tDyn.Underlying().(*types.Map)
if tMap == nil {
continue // not a map
}
@@ -112,9 +117,9 @@
panic("indirect tagged object")
}
- vObj := a.makeTagged(tMap.Elem(), c.cgn, nil)
- a.loadOffset(vObj+1, m, a.sizeof(tMap.Key()), a.sizeof(tMap.Elem()))
- if a.nodes[c.result].pts.add(vObj) {
+ obj := a.makeTagged(tMap.Elem(), c.cgn, nil)
+ a.loadOffset(obj+1, m, a.sizeof(tMap.Key()), a.sizeof(tMap.Elem()))
+ if a.addLabel(c.result, obj) {
changed = true
}
}
@@ -126,33 +131,33 @@
func ext۰reflect۰Value۰MapIndex(a *analysis, cgn *cgnode) {
a.addConstraint(&rVMapIndexConstraint{
cgn: cgn,
- rv: a.funcParams(cgn.obj),
+ v: a.funcParams(cgn.obj),
result: a.funcResults(cgn.obj),
})
}
// ---------- func (Value).MapKeys() []Value ----------
-// result = rv.MapKeys()
+// result = v.MapKeys()
type rVMapKeysConstraint struct {
cgn *cgnode
- rv nodeid // (ptr)
+ v nodeid // (ptr)
result nodeid
}
func (c *rVMapKeysConstraint) String() string {
- return fmt.Sprintf("n%d = reflect n%d.MapKeys()", c.result, c.rv)
+ return fmt.Sprintf("n%d = reflect n%d.MapKeys()", c.result, c.v)
}
func (c *rVMapKeysConstraint) ptr() nodeid {
- return c.rv
+ return c.v
}
func (c *rVMapKeysConstraint) solve(a *analysis, _ *node, delta nodeset) {
changed := false
- for obj := range delta {
- tDyn, m, indirect := a.taggedValue(obj)
- tMap, _ := tDyn.(*types.Map)
+ for vObj := range delta {
+ tDyn, m, indirect := a.taggedValue(vObj)
+ tMap, _ := tDyn.Underlying().(*types.Map)
if tMap == nil {
continue // not a map
}
@@ -164,7 +169,7 @@
kObj := a.makeTagged(tMap.Key(), c.cgn, nil)
a.load(kObj+1, m, a.sizeof(tMap.Key()))
- if a.nodes[c.result].pts.add(kObj) {
+ if a.addLabel(c.result, kObj) {
changed = true
}
}
@@ -180,41 +185,139 @@
a.endObject(obj, cgn, nil)
a.addressOf(a.funcResults(cgn.obj), obj)
- // resolution rule attached to rv
a.addConstraint(&rVMapKeysConstraint{
cgn: cgn,
- rv: a.funcParams(cgn.obj),
+ v: a.funcParams(cgn.obj),
result: obj + 1, // result is stored in array elems
})
}
func ext۰reflect۰Value۰Method(a *analysis, cgn *cgnode) {}
func ext۰reflect۰Value۰MethodByName(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰Value۰Set(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰Value۰SetBytes(a *analysis, cgn *cgnode) {}
+
+// ---------- func (Value).Recv(Value) ----------
+
+// result, _ = v.Recv()
+type rVRecvConstraint struct {
+ cgn *cgnode
+ v nodeid // (ptr)
+ result nodeid
+}
+
+func (c *rVRecvConstraint) String() string {
+ return fmt.Sprintf("n%d = reflect n%d.Recv()", c.result, c.v)
+}
+
+func (c *rVRecvConstraint) ptr() nodeid {
+ return c.v
+}
+
+func (c *rVRecvConstraint) solve(a *analysis, _ *node, delta nodeset) {
+ changed := false
+ for vObj := range delta {
+ tDyn, ch, indirect := a.taggedValue(vObj)
+ tChan, _ := tDyn.Underlying().(*types.Chan)
+ if tChan == nil {
+ continue // not a channel
+ }
+ if indirect {
+ // TODO(adonovan): we'll need to implement this
+ // when we start creating indirect tagged objects.
+ panic("indirect tagged object")
+ }
+
+ tElem := tChan.Elem()
+ elemObj := a.makeTagged(tElem, c.cgn, nil)
+ a.load(elemObj+1, ch, a.sizeof(tElem))
+ if a.addLabel(c.result, elemObj) {
+ changed = true
+ }
+ }
+ if changed {
+ a.addWork(c.result)
+ }
+}
+
+func ext۰reflect۰Value۰Recv(a *analysis, cgn *cgnode) {
+ a.addConstraint(&rVRecvConstraint{
+ cgn: cgn,
+ v: a.funcParams(cgn.obj),
+ result: a.funcResults(cgn.obj),
+ })
+}
+
+// ---------- func (Value).Send(Value) ----------
+
+// v.Send(x)
+type rVSendConstraint struct {
+ cgn *cgnode
+ v nodeid // (ptr)
+ x nodeid
+}
+
+func (c *rVSendConstraint) String() string {
+ return fmt.Sprintf("reflect n%d.Send(n%d)", c.v, c.x)
+}
+
+func (c *rVSendConstraint) ptr() nodeid {
+ return c.v
+}
+
+func (c *rVSendConstraint) solve(a *analysis, _ *node, delta nodeset) {
+ for vObj := range delta {
+ tDyn, ch, indirect := a.taggedValue(vObj)
+ tChan, _ := tDyn.Underlying().(*types.Chan)
+ if tChan == nil {
+ continue // not a channel
+ }
+ if indirect {
+ // TODO(adonovan): we'll need to implement this
+ // when we start creating indirect tagged objects.
+ panic("indirect tagged object")
+ }
+
+ // Extract x's payload to xtmp, then store to channel.
+ tElem := tChan.Elem()
+ xtmp := a.addNodes(tElem, "Send.xtmp")
+ a.typeAssert(tElem, xtmp, c.x)
+ a.store(ch, xtmp, a.sizeof(tElem))
+ }
+}
+
+func ext۰reflect۰Value۰Send(a *analysis, cgn *cgnode) {
+ params := a.funcParams(cgn.obj)
+ a.addConstraint(&rVSendConstraint{
+ cgn: cgn,
+ v: params,
+ x: params + 1,
+ })
+}
+
+func ext۰reflect۰Value۰Set(a *analysis, cgn *cgnode) {}
+func ext۰reflect۰Value۰SetBytes(a *analysis, cgn *cgnode) {}
// ---------- func (Value).SetMapIndex(k Value, v Value) ----------
-// rv.SetMapIndex(k, v)
+// v.SetMapIndex(key, val)
type rVSetMapIndexConstraint struct {
cgn *cgnode
- rv nodeid // (ptr)
- k nodeid
- v nodeid
+ v nodeid // (ptr)
+ key nodeid
+ val nodeid
}
func (c *rVSetMapIndexConstraint) String() string {
- return fmt.Sprintf("reflect n%d.SetMapIndex(n%d, n%d)", c.rv, c.k, c.v)
+ return fmt.Sprintf("reflect n%d.SetMapIndex(n%d, n%d)", c.v, c.key, c.val)
}
func (c *rVSetMapIndexConstraint) ptr() nodeid {
- return c.rv
+ return c.v
}
func (c *rVSetMapIndexConstraint) solve(a *analysis, _ *node, delta nodeset) {
- for obj := range delta {
- tDyn, m, indirect := a.taggedValue(obj)
- tMap, _ := tDyn.(*types.Map)
+ for vObj := range delta {
+ tDyn, m, indirect := a.taggedValue(vObj)
+ tMap, _ := tDyn.Underlying().(*types.Map)
if tMap == nil {
continue // not a map
}
@@ -224,28 +327,27 @@
panic("indirect tagged object")
}
- ksize := a.sizeof(tMap.Key())
+ keysize := a.sizeof(tMap.Key())
- // Extract k Value's payload to ktmp, then store to map key.
- ktmp := a.addNodes(tMap.Key(), "SetMapIndex.ktmp")
- a.addConstraint(&typeAssertConstraint{tMap.Key(), ktmp, c.k})
- a.store(m, ktmp, ksize)
+ // Extract key's payload to keytmp, then store to map key.
+ keytmp := a.addNodes(tMap.Key(), "SetMapIndex.keytmp")
+ a.typeAssert(tMap.Key(), keytmp, c.key)
+ a.store(m, keytmp, keysize)
- // Extract v Value's payload to vtmp, then store to map value.
- vtmp := a.addNodes(tMap.Elem(), "SetMapIndex.vtmp")
- a.addConstraint(&typeAssertConstraint{tMap.Elem(), vtmp, c.v})
- a.storeOffset(m, vtmp, ksize, a.sizeof(tMap.Elem()))
+ // Extract val's payload to vtmp, then store to map value.
+ valtmp := a.addNodes(tMap.Elem(), "SetMapIndex.valtmp")
+ a.typeAssert(tMap.Elem(), valtmp, c.val)
+ a.storeOffset(m, valtmp, keysize, a.sizeof(tMap.Elem()))
}
}
func ext۰reflect۰Value۰SetMapIndex(a *analysis, cgn *cgnode) {
- // resolution rule attached to rv
- rv := a.funcParams(cgn.obj)
+ params := a.funcParams(cgn.obj)
a.addConstraint(&rVSetMapIndexConstraint{
cgn: cgn,
- rv: rv,
- k: rv + 1,
- v: rv + 2,
+ v: params,
+ key: params + 1,
+ val: params + 2,
})
}
@@ -257,45 +359,300 @@
func ext۰reflect۰Append(a *analysis, cgn *cgnode) {}
func ext۰reflect۰AppendSlice(a *analysis, cgn *cgnode) {}
func ext۰reflect۰Copy(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰ChanOf(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰Indirect(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰MakeChan(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰MakeFunc(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰MakeMap(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰MakeSlice(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰MapOf(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰New(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰NewAt(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰PtrTo(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰Select(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰SliceOf(a *analysis, cgn *cgnode) {}
-// ---------- func TypeOf(v Value) Type ----------
+// ---------- func ChanOf(ChanDir, Type) Type ----------
-// result = TypeOf(v)
-type reflectTypeOfConstraint struct {
+// result = ChanOf(_, t)
+type reflectChanOfConstraint struct {
+ cgn *cgnode
+ t nodeid // (ptr)
+ result nodeid
+}
+
+func (c *reflectChanOfConstraint) String() string {
+ return fmt.Sprintf("n%d = reflect.ChanOf(n%d)", c.result, c.t)
+}
+
+func (c *reflectChanOfConstraint) ptr() nodeid {
+ return c.t
+}
+
+func (c *reflectChanOfConstraint) solve(a *analysis, _ *node, delta nodeset) {
+ changed := false
+ for tObj := range delta {
+ T := a.rtypeTaggedValue(tObj)
+ // TODO(adonovan): use only the channel direction
+ // provided at the callsite, if constant.
+ for _, dir := range []ast.ChanDir{1, 2, 3} {
+ if a.addLabel(c.result, a.makeRtype(types.NewChan(dir, T))) {
+ changed = true
+ }
+ }
+ }
+ if changed {
+ a.addWork(c.result)
+ }
+}
+
+func ext۰reflect۰ChanOf(a *analysis, cgn *cgnode) {
+ params := a.funcParams(cgn.obj)
+ a.addConstraint(&reflectChanOfConstraint{
+ cgn: cgn,
+ t: params + 1,
+ result: a.funcResults(cgn.obj),
+ })
+}
+
+// ---------- func Indirect(v Value) Value ----------
+
+// result = Indirect(v)
+type reflectIndirectConstraint struct {
cgn *cgnode
v nodeid // (ptr)
result nodeid
}
-func (c *reflectTypeOfConstraint) String() string {
- return fmt.Sprintf("n%d = reflect.TypeOf(n%d)", c.result, c.v)
+func (c *reflectIndirectConstraint) String() string {
+ return fmt.Sprintf("n%d = reflect.Indirect(n%d)", c.result, c.v)
}
-func (c *reflectTypeOfConstraint) ptr() nodeid {
+func (c *reflectIndirectConstraint) ptr() nodeid {
return c.v
}
-func (c *reflectTypeOfConstraint) solve(a *analysis, _ *node, delta nodeset) {
+func (c *reflectIndirectConstraint) solve(a *analysis, _ *node, delta nodeset) {
changed := false
- for obj := range delta {
- tDyn, _, _ := a.taggedValue(obj)
+ for vObj := range delta {
+ tDyn, _, _ := a.taggedValue(vObj)
if tDyn == nil {
panic("not a tagged value")
}
- if a.nodes[c.result].pts.add(a.makeRtype(tDyn)) {
+ var res nodeid
+ if tPtr, ok := tDyn.Underlying().(*types.Pointer); ok {
+ // load the payload of the pointer's tagged object
+ // into a new tagged object
+ res = a.makeTagged(tPtr.Elem(), c.cgn, nil)
+ a.load(res+1, vObj+1, a.sizeof(tPtr.Elem()))
+ } else {
+ res = vObj
+ }
+
+ if a.addLabel(c.result, res) {
+ changed = true
+ }
+ }
+ if changed {
+ a.addWork(c.result)
+ }
+}
+
+func ext۰reflect۰Indirect(a *analysis, cgn *cgnode) {
+ a.addConstraint(&reflectIndirectConstraint{
+ cgn: cgn,
+ v: a.funcParams(cgn.obj),
+ result: a.funcResults(cgn.obj),
+ })
+}
+
+// ---------- func MakeChan(Type) Value ----------
+
+// result = MakeChan(typ)
+type reflectMakeChanConstraint struct {
+ cgn *cgnode
+ typ nodeid // (ptr)
+ result nodeid
+}
+
+func (c *reflectMakeChanConstraint) String() string {
+ return fmt.Sprintf("n%d = reflect.MakeChan(n%d)", c.result, c.typ)
+}
+
+func (c *reflectMakeChanConstraint) ptr() nodeid {
+ return c.typ
+}
+
+func (c *reflectMakeChanConstraint) solve(a *analysis, _ *node, delta nodeset) {
+ changed := false
+ for typObj := range delta {
+ T := a.rtypeTaggedValue(typObj)
+ tChan, ok := T.Underlying().(*types.Chan)
+ if !ok || tChan.Dir() != ast.SEND|ast.RECV {
+ continue // not a bidirectional channel type
+ }
+
+ obj := a.nextNode()
+ a.addNodes(tChan.Elem(), "reflect.MakeChan.value")
+ a.endObject(obj, c.cgn, nil)
+
+ // put its address in a new T-tagged object
+ id := a.makeTagged(T, c.cgn, nil)
+ a.addLabel(id+1, obj)
+
+ // flow the T-tagged object to the result
+ if a.addLabel(c.result, id) {
+ changed = true
+ }
+ }
+ if changed {
+ a.addWork(c.result)
+ }
+}
+
+func ext۰reflect۰MakeChan(a *analysis, cgn *cgnode) {
+ a.addConstraint(&reflectMakeChanConstraint{
+ cgn: cgn,
+ typ: a.funcParams(cgn.obj),
+ result: a.funcResults(cgn.obj),
+ })
+}
+
+func ext۰reflect۰MakeFunc(a *analysis, cgn *cgnode) {}
+
+// ---------- func MakeMap(Type) Value ----------
+
+// result = MakeMap(typ)
+type reflectMakeMapConstraint struct {
+ cgn *cgnode
+ typ nodeid // (ptr)
+ result nodeid
+}
+
+func (c *reflectMakeMapConstraint) String() string {
+ return fmt.Sprintf("n%d = reflect.MakeMap(n%d)", c.result, c.typ)
+}
+
+func (c *reflectMakeMapConstraint) ptr() nodeid {
+ return c.typ
+}
+
+func (c *reflectMakeMapConstraint) solve(a *analysis, _ *node, delta nodeset) {
+ changed := false
+ for typObj := range delta {
+ T := a.rtypeTaggedValue(typObj)
+ tMap, ok := T.Underlying().(*types.Map)
+ if !ok {
+ continue // not a map type
+ }
+
+ mapObj := a.nextNode()
+ a.addNodes(tMap.Key(), "reflect.MakeMap.key")
+ a.addNodes(tMap.Elem(), "reflect.MakeMap.value")
+ a.endObject(mapObj, c.cgn, nil)
+
+ // put its address in a new T-tagged object
+ id := a.makeTagged(T, c.cgn, nil)
+ a.addLabel(id+1, mapObj)
+
+ // flow the T-tagged object to the result
+ if a.addLabel(c.result, id) {
+ changed = true
+ }
+ }
+ if changed {
+ a.addWork(c.result)
+ }
+}
+
+func ext۰reflect۰MakeMap(a *analysis, cgn *cgnode) {
+ a.addConstraint(&reflectMakeMapConstraint{
+ cgn: cgn,
+ typ: a.funcParams(cgn.obj),
+ result: a.funcResults(cgn.obj),
+ })
+}
+
+func ext۰reflect۰MakeSlice(a *analysis, cgn *cgnode) {}
+func ext۰reflect۰MapOf(a *analysis, cgn *cgnode) {}
+
+// ---------- func New(Type) Value ----------
+
+// result = New(typ)
+type reflectNewConstraint struct {
+ cgn *cgnode
+ typ nodeid // (ptr)
+ result nodeid
+}
+
+func (c *reflectNewConstraint) String() string {
+ return fmt.Sprintf("n%d = reflect.New(n%d)", c.result, c.typ)
+}
+
+func (c *reflectNewConstraint) ptr() nodeid {
+ return c.typ
+}
+
+func (c *reflectNewConstraint) solve(a *analysis, _ *node, delta nodeset) {
+ changed := false
+ for typObj := range delta {
+ T := a.rtypeTaggedValue(typObj)
+
+ // allocate new T object
+ newObj := a.nextNode()
+ a.addNodes(T, "reflect.New")
+ a.endObject(newObj, c.cgn, nil)
+
+ // put its address in a new *T-tagged object
+ id := a.makeTagged(types.NewPointer(T), c.cgn, nil)
+ a.addLabel(id+1, newObj)
+
+ // flow the pointer to the result
+ if a.addLabel(c.result, id) {
+ changed = true
+ }
+ }
+ if changed {
+ a.addWork(c.result)
+ }
+}
+
+func ext۰reflect۰New(a *analysis, cgn *cgnode) {
+ a.addConstraint(&reflectNewConstraint{
+ cgn: cgn,
+ typ: a.funcParams(cgn.obj),
+ result: a.funcResults(cgn.obj),
+ })
+}
+
+func ext۰reflect۰NewAt(a *analysis, cgn *cgnode) {
+ ext۰reflect۰New(a, cgn)
+
+ // TODO(adonovan): make it easier to report errors of this form,
+ // which includes the callsite:
+ // a.warnf("unsound: main.reflectNewAt contains a reflect.NewAt() call")
+ a.warnf(cgn.Func().Pos(), "unsound: reflect.NewAt() call")
+}
+
+func ext۰reflect۰PtrTo(a *analysis, cgn *cgnode) {}
+func ext۰reflect۰Select(a *analysis, cgn *cgnode) {}
+func ext۰reflect۰SliceOf(a *analysis, cgn *cgnode) {}
+
+// ---------- func TypeOf(v Value) Type ----------
+
+// result = TypeOf(i)
+type reflectTypeOfConstraint struct {
+ cgn *cgnode
+ i nodeid // (ptr)
+ result nodeid
+}
+
+func (c *reflectTypeOfConstraint) String() string {
+ return fmt.Sprintf("n%d = reflect.TypeOf(n%d)", c.result, c.i)
+}
+
+func (c *reflectTypeOfConstraint) ptr() nodeid {
+ return c.i
+}
+
+func (c *reflectTypeOfConstraint) solve(a *analysis, _ *node, delta nodeset) {
+ changed := false
+ for iObj := range delta {
+ tDyn, _, _ := a.taggedValue(iObj)
+ if tDyn == nil {
+ panic("not a tagged value")
+ }
+
+ if a.addLabel(c.result, a.makeRtype(tDyn)) {
changed = true
}
}
@@ -307,7 +664,7 @@
func ext۰reflect۰TypeOf(a *analysis, cgn *cgnode) {
a.addConstraint(&reflectTypeOfConstraint{
cgn: cgn,
- v: a.funcParams(cgn.obj),
+ i: a.funcParams(cgn.obj),
result: a.funcResults(cgn.obj),
})
}
@@ -323,29 +680,25 @@
// ---------- func Zero(Type) Value ----------
-// result = Zero(t)
+// result = Zero(typ)
type reflectZeroConstraint struct {
cgn *cgnode
- t nodeid // (ptr)
+ typ nodeid // (ptr)
result nodeid
}
func (c *reflectZeroConstraint) String() string {
- return fmt.Sprintf("n%d = reflect.Zero(n%d)", c.result, c.t)
+ return fmt.Sprintf("n%d = reflect.Zero(n%d)", c.result, c.typ)
}
func (c *reflectZeroConstraint) ptr() nodeid {
- return c.t
+ return c.typ
}
func (c *reflectZeroConstraint) solve(a *analysis, _ *node, delta nodeset) {
changed := false
- for obj := range delta {
- tDyn, v, _ := a.taggedValue(obj)
- if tDyn != a.reflectRtype {
- panic("not a *reflect.rtype-tagged value")
- }
- T := a.nodes[v].typ
+ for typObj := range delta {
+ T := a.rtypeTaggedValue(typObj)
// memoize using a.reflectZeros[T]
var id nodeid
@@ -355,7 +708,7 @@
id = a.makeTagged(T, c.cgn, nil)
a.reflectZeros.Set(T, id)
}
- if a.nodes[c.result].pts.add(id) {
+ if a.addLabel(c.result, id) {
changed = true
}
}
@@ -367,7 +720,7 @@
func ext۰reflect۰Zero(a *analysis, cgn *cgnode) {
a.addConstraint(&reflectZeroConstraint{
cgn: cgn,
- t: a.funcParams(cgn.obj),
+ typ: a.funcParams(cgn.obj),
result: a.funcResults(cgn.obj),
})
}
@@ -392,15 +745,15 @@
}
func (c *rtypeElemConstraint) solve(a *analysis, _ *node, delta nodeset) {
+ // Implemented by *types.{Map,Chan,Array,Slice,Pointer}.
+ type hasElem interface {
+ Elem() types.Type
+ }
changed := false
- for obj := range delta {
- T := a.nodes[obj].typ // assume obj is an *rtype
-
- // Works for *types.{Map,Chan,Array,Slice,Pointer}.
- if T, ok := T.Underlying().(interface {
- Elem() types.Type
- }); ok {
- if a.nodes[c.result].pts.add(a.makeRtype(T.Elem())) {
+ for tObj := range delta {
+ T := a.nodes[tObj].obj.rtype
+ if tHasElem, ok := T.Underlying().(hasElem); ok {
+ if a.addLabel(c.result, a.makeRtype(tHasElem.Elem())) {
changed = true
}
}
@@ -422,7 +775,71 @@
func ext۰reflect۰rtype۰FieldByIndex(a *analysis, cgn *cgnode) {}
func ext۰reflect۰rtype۰FieldByName(a *analysis, cgn *cgnode) {}
func ext۰reflect۰rtype۰FieldByNameFunc(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰rtype۰In(a *analysis, cgn *cgnode) {}
+
+// ---------- func (*rtype) In/Out() Type ----------
+
+// result = In/Out(t)
+type rtypeInOutConstraint struct {
+ cgn *cgnode
+ t nodeid // (ptr)
+ result nodeid
+ out bool
+}
+
+func (c *rtypeInOutConstraint) String() string {
+ return fmt.Sprintf("n%d = (*reflect.rtype).InOut(n%d)", c.result, c.t)
+}
+
+func (c *rtypeInOutConstraint) ptr() nodeid {
+ return c.t
+}
+
+func (c *rtypeInOutConstraint) solve(a *analysis, _ *node, delta nodeset) {
+ changed := false
+ for tObj := range delta {
+ T := a.nodes[tObj].obj.rtype
+ sig, ok := T.Underlying().(*types.Signature)
+ if !ok {
+ continue // not a func type
+ }
+
+ tuple := sig.Params()
+ if c.out {
+ tuple = sig.Results()
+ }
+ // TODO(adonovan): when a function is analyzed
+ // context-sensitively, we should be able to see its
+ // caller's actual parameter's ssa.Values. Refactor
+ // the intrinsic mechanism to allow this. Then if the
+ // value is an int const K, skip the loop and use
+ // tuple.At(K).
+ for i, n := 0, tuple.Len(); i < n; i++ {
+ if a.addLabel(c.result, a.makeRtype(tuple.At(i).Type())) {
+ changed = true
+ }
+ }
+ }
+ if changed {
+ a.addWork(c.result)
+ }
+}
+
+func ext۰reflect۰rtype۰InOut(a *analysis, cgn *cgnode, out bool) {
+ a.addConstraint(&rtypeInOutConstraint{
+ cgn: cgn,
+ t: a.funcParams(cgn.obj),
+ result: a.funcResults(cgn.obj),
+ out: out,
+ })
+}
+
+func ext۰reflect۰rtype۰In(a *analysis, cgn *cgnode) {
+ ext۰reflect۰rtype۰InOut(a, cgn, false)
+}
+
+func ext۰reflect۰rtype۰Out(a *analysis, cgn *cgnode) {
+ ext۰reflect۰rtype۰InOut(a, cgn, true)
+}
// ---------- func (*rtype) Key() Type ----------
@@ -443,11 +860,10 @@
func (c *rtypeKeyConstraint) solve(a *analysis, _ *node, delta nodeset) {
changed := false
- for obj := range delta {
- T := a.nodes[obj].typ // assume obj is an *rtype
-
+ for tObj := range delta {
+ T := a.nodes[tObj].obj.rtype
if tMap, ok := T.Underlying().(*types.Map); ok {
- if a.nodes[c.result].pts.add(a.makeRtype(tMap.Key())) {
+ if a.addLabel(c.result, a.makeRtype(tMap.Key())) {
changed = true
}
}
@@ -467,4 +883,3 @@
func ext۰reflect۰rtype۰Method(a *analysis, cgn *cgnode) {}
func ext۰reflect۰rtype۰MethodByName(a *analysis, cgn *cgnode) {}
-func ext۰reflect۰rtype۰Out(a *analysis, cgn *cgnode) {}
diff --git a/pointer/solve.go b/pointer/solve.go
index 252dc68..b9d5f68 100644
--- a/pointer/solve.go
+++ b/pointer/solve.go
@@ -14,27 +14,21 @@
)
func (a *analysis) solve() {
- a.work.swap()
-
// Solver main loop.
for round := 1; ; round++ {
+ if a.log != nil {
+ fmt.Fprintf(a.log, "Solving, round %d\n", round)
+ }
+
// Add new constraints to the graph:
// static constraints from SSA on round 1,
// dynamic constraints from reflection thereafter.
a.processNewConstraints()
- if a.work.swap() {
- if a.log != nil {
- fmt.Fprintf(a.log, "Solving, round %d\n", round)
- }
-
- // Next iteration.
- if a.work.empty() {
- break // done
- }
- }
-
id := a.work.take()
+ if id == empty {
+ break
+ }
if a.log != nil {
fmt.Fprintf(a.log, "\tnode n%d\n", id)
}
@@ -110,9 +104,6 @@
if len(n.prevPts) > 0 {
stale.add(id)
}
- if a.log != nil {
- fmt.Fprintf(a.log, "Adding to worklist n%d\n", id)
- }
a.addWork(id)
}
}
@@ -152,6 +143,11 @@
}
}
+// addLabel adds label to the points-to set of ptr and reports whether the set grew.
+func (a *analysis) addLabel(ptr, label nodeid) bool {
+ return a.nodes[ptr].pts.add(label)
+}
+
func (a *analysis) addWork(id nodeid) {
a.work.add(id)
if a.log != nil {
@@ -205,6 +201,10 @@
// Returns sizeof.
// Implicitly adds nodes to worklist.
+//
+// TODO(adonovan): now that we support a.copy() during solving, we
+// could eliminate onlineCopyN, but it's much slower. Investigate.
+//
func (a *analysis) onlineCopyN(dst, src nodeid, sizeof uint32) uint32 {
for i := uint32(0); i < sizeof; i++ {
if a.onlineCopy(dst, src) {
@@ -263,7 +263,7 @@
if tIface != nil {
if types.IsAssignableTo(tDyn, tIface) {
- if a.nodes[c.dst].pts.add(ifaceObj) {
+ if a.addLabel(c.dst, ifaceObj) {
a.addWork(c.dst)
}
}
@@ -316,7 +316,7 @@
// Make callsite's fn variable point to identity of
// concrete method. (There's no need to add it to
// worklist since it never has attached constraints.)
- a.nodes[c.params].pts.add(fnObj)
+ a.addLabel(c.params, fnObj)
// Extract value and connect to method's receiver.
// Copy payload to method's receiver param (arg0).
@@ -324,7 +324,6 @@
recvSize := a.sizeof(sig.Recv().Type())
a.onlineCopyN(arg0, v, recvSize)
- // Copy iface object payload to method receiver.
src := c.params + 1 // skip past identity
dst := arg0 + nodeid(recvSize)
diff --git a/pointer/testdata/chanreflect.go b/pointer/testdata/chanreflect.go
index bfbcc26..9f68e84 100644
--- a/pointer/testdata/chanreflect.go
+++ b/pointer/testdata/chanreflect.go
@@ -4,19 +4,16 @@
import "reflect"
-//
-// This test is very sensitive to line-number perturbations!
-
// Test of channels with reflection.
var a, b int
func chanreflect1() {
- ch := make(chan *int, 0)
+ ch := make(chan *int, 0) // @line cr1make
crv := reflect.ValueOf(ch)
crv.Send(reflect.ValueOf(&a))
print(crv.Interface()) // @types chan *int
- print(crv.Interface().(chan *int)) // @pointsto makechan@testdata/chanreflect.go:15:12
+ print(crv.Interface().(chan *int)) // @pointsto makechan@cr1make:12
print(<-ch) // @pointsto main.a
}
@@ -29,25 +26,31 @@
print(r.Interface().(*int)) // @pointsto main.b
}
+// TODO(adonovan): the analysis can't yet take advantage of the
+// ChanOf(dir) parameter so the results are less precise than they
+// should be: all three directions are returned.
+
func chanOfRecv() {
// MakeChan(<-chan) is a no-op.
t := reflect.ChanOf(reflect.RecvDir, reflect.TypeOf(&a))
- print(reflect.Zero(t).Interface()) // @types <-chan *int
+ print(reflect.Zero(t).Interface()) // @types <-chan *int | chan<- *int | chan *int
print(reflect.MakeChan(t, 0).Interface().(<-chan *int)) // @pointsto
+ print(reflect.MakeChan(t, 0).Interface().(chan *int)) // @pointsto <alloc in reflect.MakeChan>
}
func chanOfSend() {
// MakeChan(chan<-) is a no-op.
t := reflect.ChanOf(reflect.SendDir, reflect.TypeOf(&a))
- print(reflect.Zero(t).Interface()) // @types chan<- *int
+ print(reflect.Zero(t).Interface()) // @types <-chan *int | chan<- *int | chan *int
print(reflect.MakeChan(t, 0).Interface().(chan<- *int)) // @pointsto
+ print(reflect.MakeChan(t, 0).Interface().(chan *int)) // @pointsto <alloc in reflect.MakeChan>
}
func chanOfBoth() {
t := reflect.ChanOf(reflect.BothDir, reflect.TypeOf(&a))
- print(reflect.Zero(t).Interface()) // @types chan *int
+ print(reflect.Zero(t).Interface()) // @types <-chan *int | chan<- *int | chan *int
ch := reflect.MakeChan(t, 0)
- print(ch.Interface().(chan *int)) // @pointsto reflectMakechan@testdata/chanreflect.go:49:24
+ print(ch.Interface().(chan *int)) // @pointsto <alloc in reflect.MakeChan>
ch.Send(reflect.ValueOf(&b))
ch.Interface().(chan *int) <- &a
r, _ := ch.Recv()
diff --git a/pointer/testdata/funcreflect.go b/pointer/testdata/funcreflect.go
index d9275dc..7fe8ca8 100644
--- a/pointer/testdata/funcreflect.go
+++ b/pointer/testdata/funcreflect.go
@@ -2,29 +2,43 @@
package main
-//
-
import "reflect"
-var a, b int
+var zero, a, b int
-func f(p *int) *int {
- print(p) // @pointsto
- return &b
+// func f(p *int) *int {
+// print(p) // #@pointsto
+// return &b
+// }
+
+// func g(p *bool) {
+// }
+
+// func reflectValueCall() {
+// rvf := reflect.ValueOf(f)
+// res := rvf.Call([]reflect.Value{reflect.ValueOf(&a)})
+// print(res[0].Interface()) // #@types
+// print(res[0].Interface().(*int)) // #@pointsto
+// }
+
+// #@calls main.reflectValueCall -> main.f
+
+func reflectTypeInOut() {
+ var f func(float64, bool) (string, int)
+ // TODO(adonovan): when the In/Out argument is a valid index constant,
+ // only include a single type in the result. Needs some work.
+ print(reflect.Zero(reflect.TypeOf(f).In(0)).Interface()) // @types float64 | bool
+ print(reflect.Zero(reflect.TypeOf(f).In(1)).Interface()) // @types float64 | bool
+ print(reflect.Zero(reflect.TypeOf(f).In(-1)).Interface()) // @types float64 | bool
+ print(reflect.Zero(reflect.TypeOf(f).In(zero)).Interface()) // @types float64 | bool
+
+ print(reflect.Zero(reflect.TypeOf(f).Out(0)).Interface()) // @types string | int
+ print(reflect.Zero(reflect.TypeOf(f).Out(1)).Interface()) // @types string | int
+ print(reflect.Zero(reflect.TypeOf(f).Out(2)).Interface()) // @types string | int
+ print(reflect.Zero(reflect.TypeOf(3).Out(0)).Interface()) // @types
}
-func g(p *bool) {
-}
-
-func funcreflect1() {
- rvf := reflect.ValueOf(f)
- res := rvf.Call([]reflect.Value{reflect.ValueOf(&a)})
- print(res[0].Interface()) // @types
- print(res[0].Interface().(*int)) // @pointsto
-}
-
-// @calls main.funcreflect1 -> main.f
-
func main() {
- funcreflect1()
+ //reflectValueCall()
+ reflectTypeInOut()
}
diff --git a/pointer/testdata/mapreflect.go b/pointer/testdata/mapreflect.go
index 5305c20..721d5e4 100644
--- a/pointer/testdata/mapreflect.go
+++ b/pointer/testdata/mapreflect.go
@@ -9,7 +9,7 @@
var a int
var b bool
-func mapreflect1() {
+func reflectMapKeysIndex() {
m := make(map[*int]*bool) // @line mr1make
m[&a] = &b
@@ -33,7 +33,7 @@
}
}
-func mapreflect2() {
+func reflectSetMapIndex() {
m := make(map[*int]*bool)
mrv := reflect.ValueOf(m)
mrv.SetMapIndex(reflect.ValueOf(&a), reflect.ValueOf(&b))
@@ -64,7 +64,16 @@
print(reflect.Zero(tmap.Elem()).Interface()) // @types *bool
}
+func reflectMakeMap() {
+ t := reflect.TypeOf(map[*int]*bool(nil))
+ v := reflect.MakeMap(t)
+ print(v) // @types map[*int]*bool
+ print(v) // @pointsto <alloc in reflect.MakeMap>
+}
+
func main() {
- mapreflect1()
- mapreflect2()
+ reflectMapKeysIndex()
+ reflectSetMapIndex()
+ reflectMakeMap()
+ // TODO(adonovan): reflect.MapOf(Type)
}
diff --git a/pointer/testdata/reflect.go b/pointer/testdata/reflect.go
index 6aa83fb..306f764 100644
--- a/pointer/testdata/reflect.go
+++ b/pointer/testdata/reflect.go
@@ -6,6 +6,7 @@
import "unsafe"
var a, b int
+var unknown bool
func reflectIndirect() {
ptr := &a
@@ -20,19 +21,22 @@
print(reflect.NewAt(reflect.TypeOf(3), unsafe.Pointer(&x)).Interface()) // @types *int
}
-// @warning "unsound: main.reflectNewAt contains a reflect.NewAt.. call"
+// TODO(adonovan): report the location of the caller, not NewAt.
+// #warning "unsound: main.reflectNewAt contains a reflect.NewAt.. call"
+// @warning "unsound: reflect.NewAt.. call"
func reflectTypeOf() {
t := reflect.TypeOf(3)
if unknown {
t = reflect.TypeOf("foo")
}
- print(t) // @types *reflect.rtype
+ // TODO(adonovan): make types.Eval let us refer to unexported types.
+ print(t) // #@types *reflect.rtype
print(reflect.Zero(t).Interface()) // @types int | string
newint := reflect.New(t).Interface() // @line rtonew
print(newint) // @types *int | *string
- print(newint.(*int)) // @pointsto reflectAlloc@rtonew:23
- print(newint.(*string)) // @pointsto reflectAlloc@rtonew:23
+ print(newint.(*int)) // @pointsto <alloc in reflect.New>
+ print(newint.(*string)) // @pointsto <alloc in reflect.New>
}
func reflectTypeElem() {
@@ -44,26 +48,9 @@
print(reflect.Zero(reflect.TypeOf(3).Elem()).Interface()) // @types
}
-func reflectTypeInOut() {
- var f func(float64, bool) (string, int)
- print(reflect.Zero(reflect.TypeOf(f).In(0)).Interface()) // @types float64
- print(reflect.Zero(reflect.TypeOf(f).In(1)).Interface()) // @types bool
- print(reflect.Zero(reflect.TypeOf(f).In(-1)).Interface()) // @types float64 | bool
- print(reflect.Zero(reflect.TypeOf(f).In(zero)).Interface()) // @types float64 | bool
-
- print(reflect.Zero(reflect.TypeOf(f).Out(0)).Interface()) // @types string
- print(reflect.Zero(reflect.TypeOf(f).Out(1)).Interface()) // @types int
- print(reflect.Zero(reflect.TypeOf(f).Out(2)).Interface()) // @types string | int
- print(reflect.Zero(reflect.TypeOf(3).Out(0)).Interface()) // @types
-}
-
func main() {
reflectIndirect()
reflectNewAt()
reflectTypeOf()
reflectTypeElem()
- reflectTypeInOut()
}
-
-var unknown bool
-var zero int
diff --git a/pointer/util.go b/pointer/util.go
index 64d8a16..0d37a80 100644
--- a/pointer/util.go
+++ b/pointer/util.go
@@ -278,47 +278,30 @@
// Worklist -------------------------------------------------------------------
-// TODO(adonovan): interface may not be general enough for certain
-// implementations, e.g. priority queue
-//
-// Uses double-buffering so nodes can be added during iteration.
+const empty nodeid = 1<<32 - 1
+
type worklist interface {
- empty() bool // Reports whether active buffer is empty.
- swap() bool // Switches to the shadow buffer if empty().
- add(nodeid) // Adds a node to the shadow buffer.
- take() nodeid // Takes a node from the active buffer. Precondition: !empty().
+ add(nodeid) // Adds a node to the set
+ take() nodeid // Takes a node from the set and returns it, or empty
}
-// Horribly naive (and nondeterministic) worklist
-// based on two hash-sets.
+// Simple nondeterministic worklist based on a built-in map.
type mapWorklist struct {
- active, shadow nodeset
-}
-
-func (w *mapWorklist) empty() bool {
- return len(w.active) == 0
-}
-
-func (w *mapWorklist) swap() bool {
- if w.empty() {
- w.shadow, w.active = w.active, w.shadow
- return true
- }
- return false
+ set nodeset
}
func (w *mapWorklist) add(n nodeid) {
- w.shadow[n] = struct{}{}
+ w.set[n] = struct{}{}
}
func (w *mapWorklist) take() nodeid {
- for k := range w.active {
- delete(w.active, k)
+ for k := range w.set {
+ delete(w.set, k)
return k
}
- panic("worklist.take(): empty active buffer")
+ return empty
}
func makeMapWorklist() worklist {
- return &mapWorklist{make(nodeset), make(nodeset)}
+ return &mapWorklist{make(nodeset)}
}