[dev.ssa] Merge remote-tracking branch 'origin/master' into mergebranch
Semi-regular merge from tip to dev.ssa.
Change-Id: If7d2269f267bcbc0ecd3a483d349951044470e3f
diff --git a/src/cmd/compile/internal/gc/bexport.go b/src/cmd/compile/internal/gc/bexport.go
index b49f0fb..ff0465f 100644
--- a/src/cmd/compile/internal/gc/bexport.go
+++ b/src/cmd/compile/internal/gc/bexport.go
@@ -877,7 +877,7 @@
// tracef is like fmt.Printf but it rewrites the format string
// to take care of indentation.
func (p *exporter) tracef(format string, args ...interface{}) {
- if strings.IndexAny(format, "<>\n") >= 0 {
+ if strings.ContainsAny(format, "<>\n") {
var buf bytes.Buffer
for i := 0; i < len(format); i++ {
// no need to deal with runes
@@ -1035,6 +1035,9 @@
// package unsafe
Types[TUNSAFEPTR],
+
+ // any type, for builtin export data
+ Types[TANY],
}
}
return predecl
diff --git a/src/cmd/compile/internal/gc/builtin.go b/src/cmd/compile/internal/gc/builtin.go
index 7f2e80b..4a6e56f 100644
--- a/src/cmd/compile/internal/gc/builtin.go
+++ b/src/cmd/compile/internal/gc/builtin.go
@@ -3,7 +3,7 @@
package gc
const runtimeimport = "" +
- "package runtime\n" +
+ "package runtime safe\n" +
"func @\"\".newobject (@\"\".typ·2 *byte) (? *any)\n" +
"func @\"\".panicindex ()\n" +
"func @\"\".panicslice ()\n" +
@@ -44,7 +44,7 @@
"func @\"\".stringtoslicerune (? *[32]rune, ? string) (? []rune)\n" +
"func @\"\".stringiter (? string, ? int) (? int)\n" +
"func @\"\".stringiter2 (? string, ? int) (@\"\".retk·1 int, @\"\".retv·2 rune)\n" +
- "func @\"\".slicecopy (@\"\".to·2 any, @\"\".fr·3 any, @\"\".wid·4 uintptr) (? int)\n" +
+ "func @\"\".slicecopy (@\"\".to·2 any, @\"\".fr·3 any, @\"\".wid·4 uintptr \"unsafe-uintptr\") (? int)\n" +
"func @\"\".slicestringcopy (@\"\".to·2 any, @\"\".fr·3 any) (? int)\n" +
"func @\"\".typ2Itab (@\"\".typ·2 *byte, @\"\".typ2·3 *byte, @\"\".cache·4 **byte) (@\"\".ret·1 *byte)\n" +
"func @\"\".convI2E (@\"\".elem·2 any) (@\"\".ret·1 any)\n" +
@@ -66,8 +66,6 @@
"func @\"\".panicdottype (@\"\".have·1 *byte, @\"\".want·2 *byte, @\"\".iface·3 *byte)\n" +
"func @\"\".ifaceeq (@\"\".i1·2 any, @\"\".i2·3 any) (@\"\".ret·1 bool)\n" +
"func @\"\".efaceeq (@\"\".i1·2 any, @\"\".i2·3 any) (@\"\".ret·1 bool)\n" +
- "func @\"\".ifacethash (@\"\".i1·2 any) (@\"\".ret·1 uint32)\n" +
- "func @\"\".efacethash (@\"\".i1·2 any) (@\"\".ret·1 uint32)\n" +
"func @\"\".makemap (@\"\".mapType·2 *byte, @\"\".hint·3 int64, @\"\".mapbuf·4 *any, @\"\".bucketbuf·5 *any) (@\"\".hmap·1 map[any]any)\n" +
"func @\"\".mapaccess1 (@\"\".mapType·2 *byte, @\"\".hmap·3 map[any]any, @\"\".key·4 *any) (@\"\".val·1 *any)\n" +
"func @\"\".mapaccess1_fast32 (@\"\".mapType·2 *byte, @\"\".hmap·3 map[any]any, @\"\".key·4 any) (@\"\".val·1 *any)\n" +
@@ -91,31 +89,31 @@
"func @\"\".writebarrierstring (@\"\".dst·1 *any, @\"\".src·2 any)\n" +
"func @\"\".writebarrierslice (@\"\".dst·1 *any, @\"\".src·2 any)\n" +
"func @\"\".writebarrieriface (@\"\".dst·1 *any, @\"\".src·2 any)\n" +
- "func @\"\".writebarrierfat01 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat10 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat11 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat001 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat010 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat011 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat100 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat101 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat110 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat111 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat0001 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat0010 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat0011 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat0100 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat0101 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat0110 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat0111 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat1000 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat1001 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat1010 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat1011 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat1100 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat1101 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat1110 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
- "func @\"\".writebarrierfat1111 (@\"\".dst·1 *any, _ uintptr, @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat01 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat10 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat11 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat001 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat010 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat011 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat100 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat101 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat110 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat111 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat0001 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat0010 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat0011 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat0100 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat0101 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat0110 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat0111 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat1000 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat1001 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat1010 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat1011 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat1100 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat1101 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat1110 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
+ "func @\"\".writebarrierfat1111 (@\"\".dst·1 *any, _ uintptr \"unsafe-uintptr\", @\"\".src·3 any)\n" +
"func @\"\".typedmemmove (@\"\".typ·1 *byte, @\"\".dst·2 *any, @\"\".src·3 *any)\n" +
"func @\"\".typedslicecopy (@\"\".typ·2 *byte, @\"\".dst·3 any, @\"\".src·4 any) (? int)\n" +
"func @\"\".selectnbsend (@\"\".chanType·2 *byte, @\"\".hchan·3 chan<- any, @\"\".elem·4 *any) (? bool)\n" +
@@ -131,9 +129,9 @@
"func @\"\".makeslice (@\"\".typ·2 *byte, @\"\".nel·3 int64, @\"\".cap·4 int64) (@\"\".ary·1 []any)\n" +
"func @\"\".growslice (@\"\".typ·2 *byte, @\"\".old·3 []any, @\"\".cap·4 int) (@\"\".ary·1 []any)\n" +
"func @\"\".growslice_n (@\"\".typ·2 *byte, @\"\".old·3 []any, @\"\".n·4 int) (@\"\".ary·1 []any)\n" +
- "func @\"\".memmove (@\"\".to·1 *any, @\"\".frm·2 *any, @\"\".length·3 uintptr)\n" +
- "func @\"\".memclr (@\"\".ptr·1 *byte, @\"\".length·2 uintptr)\n" +
- "func @\"\".memequal (@\"\".x·2 *any, @\"\".y·3 *any, @\"\".size·4 uintptr) (? bool)\n" +
+ "func @\"\".memmove (@\"\".to·1 *any, @\"\".frm·2 *any, @\"\".length·3 uintptr \"unsafe-uintptr\")\n" +
+ "func @\"\".memclr (@\"\".ptr·1 *byte, @\"\".length·2 uintptr \"unsafe-uintptr\")\n" +
+ "func @\"\".memequal (@\"\".x·2 *any, @\"\".y·3 *any, @\"\".size·4 uintptr \"unsafe-uintptr\") (? bool)\n" +
"func @\"\".memequal8 (@\"\".x·2 *any, @\"\".y·3 *any) (? bool)\n" +
"func @\"\".memequal16 (@\"\".x·2 *any, @\"\".y·3 *any) (? bool)\n" +
"func @\"\".memequal32 (@\"\".x·2 *any, @\"\".y·3 *any) (? bool)\n" +
@@ -148,15 +146,14 @@
"func @\"\".int64tofloat64 (? int64) (? float64)\n" +
"func @\"\".uint64tofloat64 (? uint64) (? float64)\n" +
"func @\"\".complex128div (@\"\".num·2 complex128, @\"\".den·3 complex128) (@\"\".quo·1 complex128)\n" +
- "func @\"\".racefuncenter (? uintptr)\n" +
- "func @\"\".racefuncenterfp (? *int32)\n" +
+ "func @\"\".racefuncenter (? uintptr \"unsafe-uintptr\")\n" +
"func @\"\".racefuncexit ()\n" +
- "func @\"\".raceread (? uintptr)\n" +
- "func @\"\".racewrite (? uintptr)\n" +
- "func @\"\".racereadrange (@\"\".addr·1 uintptr, @\"\".size·2 uintptr)\n" +
- "func @\"\".racewriterange (@\"\".addr·1 uintptr, @\"\".size·2 uintptr)\n" +
- "func @\"\".msanread (@\"\".addr·1 uintptr, @\"\".size·2 uintptr)\n" +
- "func @\"\".msanwrite (@\"\".addr·1 uintptr, @\"\".size·2 uintptr)\n" +
+ "func @\"\".raceread (? uintptr \"unsafe-uintptr\")\n" +
+ "func @\"\".racewrite (? uintptr \"unsafe-uintptr\")\n" +
+ "func @\"\".racereadrange (@\"\".addr·1 uintptr \"unsafe-uintptr\", @\"\".size·2 uintptr \"unsafe-uintptr\")\n" +
+ "func @\"\".racewriterange (@\"\".addr·1 uintptr \"unsafe-uintptr\", @\"\".size·2 uintptr \"unsafe-uintptr\")\n" +
+ "func @\"\".msanread (@\"\".addr·1 uintptr \"unsafe-uintptr\", @\"\".size·2 uintptr \"unsafe-uintptr\")\n" +
+ "func @\"\".msanwrite (@\"\".addr·1 uintptr \"unsafe-uintptr\", @\"\".size·2 uintptr \"unsafe-uintptr\")\n" +
"\n" +
"$$\n"
diff --git a/src/cmd/compile/internal/gc/builtin/runtime.go b/src/cmd/compile/internal/gc/builtin/runtime.go
index 70663ee..0fe6242 100644
--- a/src/cmd/compile/internal/gc/builtin/runtime.go
+++ b/src/cmd/compile/internal/gc/builtin/runtime.go
@@ -8,7 +8,7 @@
// +build ignore
-package PACKAGE
+package runtime
// emitted by compiler, not referred to by go programs
@@ -83,8 +83,6 @@
func ifaceeq(i1 any, i2 any) (ret bool)
func efaceeq(i1 any, i2 any) (ret bool)
-func ifacethash(i1 any) (ret uint32)
-func efacethash(i1 any) (ret uint32)
// *byte is really *runtime.Type
func makemap(mapType *byte, hint int64, mapbuf *any, bucketbuf *any) (hmap map[any]any)
@@ -192,7 +190,6 @@
// race detection
func racefuncenter(uintptr)
-func racefuncenterfp(*int32)
func racefuncexit()
func raceread(uintptr)
func racewrite(uintptr)
diff --git a/src/cmd/compile/internal/gc/builtin/unsafe.go b/src/cmd/compile/internal/gc/builtin/unsafe.go
index ce50869..a7fc8aa 100644
--- a/src/cmd/compile/internal/gc/builtin/unsafe.go
+++ b/src/cmd/compile/internal/gc/builtin/unsafe.go
@@ -8,7 +8,7 @@
// +build ignore
-package PACKAGE
+package unsafe
type Pointer uintptr // not really; filled in by compiler
diff --git a/src/cmd/compile/internal/gc/builtin_test.go b/src/cmd/compile/internal/gc/builtin_test.go
new file mode 100644
index 0000000..94111e6
--- /dev/null
+++ b/src/cmd/compile/internal/gc/builtin_test.go
@@ -0,0 +1,31 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gc_test
+
+import (
+ "bytes"
+ "internal/testenv"
+ "io/ioutil"
+ "os/exec"
+ "testing"
+)
+
+func TestBuiltin(t *testing.T) {
+ testenv.MustHaveGoRun(t)
+
+ old, err := ioutil.ReadFile("builtin.go")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ new, err := exec.Command("go", "run", "mkbuiltin.go", "-stdout").Output()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if !bytes.Equal(old, new) {
+ t.Fatal("builtin.go out of date; run mkbuiltin.go")
+ }
+}
diff --git a/src/cmd/compile/internal/gc/esc.go b/src/cmd/compile/internal/gc/esc.go
index ff983e7..1a5a433 100644
--- a/src/cmd/compile/internal/gc/esc.go
+++ b/src/cmd/compile/internal/gc/esc.go
@@ -576,6 +576,12 @@
if n == nil {
return
}
+ if n.Type != nil && n.Type.Etype == TFIELD {
+ // This is the left side of x:y in a struct literal.
+ // x is syntax, not an expression.
+ // See #14405.
+ return
+ }
lno := int(setlineno(n))
@@ -602,9 +608,10 @@
// Big stuff escapes unconditionally
// "Big" conditions that were scattered around in walk have been gathered here
- if n.Esc != EscHeap && n.Type != nil && (n.Type.Width > MaxStackVarSize ||
- n.Op == ONEW && n.Type.Type.Width >= 1<<16 ||
- n.Op == OMAKESLICE && !isSmallMakeSlice(n)) {
+ if n.Esc != EscHeap && n.Type != nil &&
+ (n.Type.Width > MaxStackVarSize ||
+ n.Op == ONEW && n.Type.Type.Width >= 1<<16 ||
+ n.Op == OMAKESLICE && !isSmallMakeSlice(n)) {
if Debug['m'] > 1 {
Warnl(int(n.Lineno), "%v is too large for stack", n)
}
@@ -962,7 +969,7 @@
dst = &e.theSink
}
- case ODOT: // treat "dst.x = src" as "dst = src"
+ case ODOT: // treat "dst.x = src" as "dst = src"
escassign(e, dst.Left, src)
return
@@ -1042,7 +1049,6 @@
ODOTMETH,
// treat recv.meth as a value with recv in it, only happens in ODEFER and OPROC
// iface.method already leaks iface in esccall, no need to put in extra ODOTINTER edge here
- ODOTTYPE,
ODOTTYPE2,
OSLICE,
OSLICE3,
@@ -1052,6 +1058,12 @@
// Conversions, field access, slice all preserve the input value.
escassign(e, dst, src.Left)
+ case ODOTTYPE:
+ if src.Type != nil && !haspointers(src.Type) {
+ break
+ }
+ escassign(e, dst, src.Left)
+
case OAPPEND:
// Append returns first argument.
// Subsequent arguments are already leaked because they are operands to append.
@@ -1549,9 +1561,9 @@
// finding an OADDR just means we're following the upstream of a dereference,
// so this address doesn't leak (yet).
// If level == 0, it means the /value/ of this node can reach the root of this flood.
-// so if this node is an OADDR, it's argument should be marked as escaping iff
-// it's currfn/e->loopdepth are different from the flood's root.
-// Once an object has been moved to the heap, all of it's upstream should be considered
+// so if this node is an OADDR, its argument should be marked as escaping iff
+// its currfn/e->loopdepth are different from the flood's root.
+// Once an object has been moved to the heap, all of its upstream should be considered
// escaping to the global scope.
func escflood(e *EscState, dst *Node) {
switch dst.Op {
diff --git a/src/cmd/compile/internal/gc/export.go b/src/cmd/compile/internal/gc/export.go
index e50cf38..1b61d7f 100644
--- a/src/cmd/compile/internal/gc/export.go
+++ b/src/cmd/compile/internal/gc/export.go
@@ -442,7 +442,7 @@
// mark the symbol so it is not reexported
if s.Def == nil {
- if exportname(s.Name) || initname(s.Name) {
+ if Debug['A'] != 0 || exportname(s.Name) || initname(s.Name) {
s.Flags |= SymExport
} else {
s.Flags |= SymPackage // package scope
diff --git a/src/cmd/compile/internal/gc/fmt.go b/src/cmd/compile/internal/gc/fmt.go
index dbd8db1..6902281 100644
--- a/src/cmd/compile/internal/gc/fmt.go
+++ b/src/cmd/compile/internal/gc/fmt.go
@@ -749,7 +749,13 @@
if name != "" {
str = name + " " + typ
}
- if flag&obj.FmtShort == 0 && !fmtbody && t.Note != nil {
+
+ // The fmtbody flag is intended to suppress escape analysis annotations
+ // when printing a function type used in a function body.
+ // (The escape analysis tags do not apply to func vars.)
+ // But it must not suppress struct field tags.
+ // See golang.org/issue/13777 and golang.org/issue/14331.
+ if flag&obj.FmtShort == 0 && (!fmtbody || !t.Funarg) && t.Note != nil {
str += " " + strconv.Quote(*t.Note)
}
return str
@@ -1537,7 +1543,7 @@
} else {
fmt.Fprintf(&buf, "%v%v", Oconv(int(n.Op), 0), Jconv(n, 0))
}
- if recur && n.Type == nil && n.Name.Param.Ntype != nil {
+ if recur && n.Type == nil && n.Name != nil && n.Name.Param != nil && n.Name.Param.Ntype != nil {
indent(&buf)
fmt.Fprintf(&buf, "%v-ntype%v", Oconv(int(n.Op), 0), n.Name.Param.Ntype)
}
diff --git a/src/cmd/compile/internal/gc/gen.go b/src/cmd/compile/internal/gc/gen.go
index 60b93ef..721ef31 100644
--- a/src/cmd/compile/internal/gc/gen.go
+++ b/src/cmd/compile/internal/gc/gen.go
@@ -838,7 +838,7 @@
Cgen_as_wb(n.Left, n.Right, true)
case OAS2DOTTYPE:
- cgen_dottype(n.Rlist.N, n.List.N, n.List.Next.N, false)
+ cgen_dottype(n.Rlist.N, n.List.N, n.List.Next.N, needwritebarrier(n.List.N, n.Rlist.N))
case OCALLMETH:
cgen_callmeth(n, 0)
diff --git a/src/cmd/compile/internal/gc/go.go b/src/cmd/compile/internal/gc/go.go
index 08442a4..f721fab 100644
--- a/src/cmd/compile/internal/gc/go.go
+++ b/src/cmd/compile/internal/gc/go.go
@@ -28,30 +28,21 @@
const (
// These values are known by runtime.
- // The MEMx and NOEQx values must run in parallel. See algtype.
- AMEM = iota
+ ANOEQ = iota
AMEM0
AMEM8
AMEM16
AMEM32
AMEM64
AMEM128
- ANOEQ
- ANOEQ0
- ANOEQ8
- ANOEQ16
- ANOEQ32
- ANOEQ64
- ANOEQ128
ASTRING
AINTER
ANILINTER
- ASLICE
AFLOAT32
AFLOAT64
ACPLX64
ACPLX128
- AUNK = 100
+ AMEM = 100
)
const (
@@ -329,8 +320,7 @@
const (
// types of channel
- // must match ../../pkg/nreflect/type.go:/Chandir
- Cxxx = 0
+ // must match ../../../../reflect/type.go:/ChanDir
Crecv = 1 << 0
Csend = 1 << 1
Cboth = Crecv | Csend
@@ -385,27 +375,10 @@
offset int32
}
-type Io struct {
- infile string
- bin *obj.Biobuf
- cp string // used for content when bin==nil
- last int
- peekc int
- peekc1 int // second peekc for ...
- nlsemi bool
- eofnl bool
- importsafe bool
-}
-
type Dlist struct {
field *Type
}
-type Idir struct {
- link *Idir
- dir string
-}
-
// argument passing to/from
// smagic and umagic
type Magic struct {
@@ -452,10 +425,6 @@
var dotlist [10]Dlist // size is max depth of embeddeds
-var curio Io
-
-var pushedio Io
-
var lexlineno int32
var lineno int32
@@ -493,8 +462,6 @@
var Debug_checknil int
var Debug_typeassert int
-var importmyname *Sym // my name for package
-
var localpkg *Pkg // package being compiled
var importpkg *Pkg // package being imported
@@ -527,8 +494,6 @@
var myimportpath string
-var idirs *Idir
-
var localimport string
var asmhdr string
diff --git a/src/cmd/compile/internal/gc/inl.go b/src/cmd/compile/internal/gc/inl.go
index 64afd67..a445f71 100644
--- a/src/cmd/compile/internal/gc/inl.go
+++ b/src/cmd/compile/internal/gc/inl.go
@@ -7,7 +7,7 @@
// saves a copy of the body. Then inlcalls walks each function body to
// expand calls to inlinable functions.
//
-// The debug['l'] flag controls the agressiveness. Note that main() swaps level 0 and 1,
+// The debug['l'] flag controls the aggressiveness. Note that main() swaps level 0 and 1,
// making 1 the default and -l disable. -ll and more is useful to flush out bugs.
// These additional levels (beyond -l) may be buggy and are not supported.
// 0: disabled
diff --git a/src/cmd/compile/internal/gc/lex.go b/src/cmd/compile/internal/gc/lex.go
index e4ce9c7..51ad616 100644
--- a/src/cmd/compile/internal/gc/lex.go
+++ b/src/cmd/compile/internal/gc/lex.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:generate go run mkbuiltin.go runtime unsafe
+//go:generate go run mkbuiltin.go
package gc
@@ -257,7 +257,7 @@
msanpkg.Name = "msan"
}
if flag_race != 0 && flag_msan != 0 {
- log.Fatal("can not use both -race and -msan")
+ log.Fatal("cannot use both -race and -msan")
} else if flag_race != 0 || flag_msan != 0 {
instrumenting = true
}
@@ -313,6 +313,8 @@
lexlineno = 1
const BOM = 0xFEFF
+ loadsys()
+
for _, infile = range flag.Args() {
if trace && Debug['x'] != 0 {
fmt.Printf("--- %s ---\n", infile)
@@ -320,23 +322,15 @@
linehistpush(infile)
- curio.infile = infile
- var err error
- curio.bin, err = obj.Bopenr(infile)
+ bin, err := obj.Bopenr(infile)
if err != nil {
fmt.Printf("open %s: %v\n", infile, err)
errorexit()
}
- curio.peekc = 0
- curio.peekc1 = 0
- curio.nlsemi = false
- curio.eofnl = false
- curio.last = 0
-
// Skip initial BOM if present.
- if obj.Bgetrune(curio.bin) != BOM {
- obj.Bungetrune(curio.bin)
+ if obj.Bgetrune(bin) != BOM {
+ obj.Bungetrune(bin)
}
block = 1
@@ -344,15 +338,18 @@
imported_unsafe = false
- parse_file()
+ parse_file(bin)
if nsyntaxerrors != 0 {
errorexit()
}
+ // Instead of converting EOF into '\n' in getc and count it as an extra line
+ // for the line history to work, and which then has to be corrected elsewhere,
+ // just add a line here.
+ lexlineno++
+
linehistpop()
- if curio.bin != nil {
- obj.Bterm(curio.bin)
- }
+ obj.Bterm(bin)
}
testdclstack()
@@ -571,17 +568,12 @@
return true
}
-func addidir(dir string) {
- if dir == "" {
- return
- }
+var idirs []string
- var pp **Idir
- for pp = &idirs; *pp != nil; pp = &(*pp).link {
+func addidir(dir string) {
+ if dir != "" {
+ idirs = append(idirs, dir)
}
- *pp = new(Idir)
- (*pp).link = nil
- (*pp).dir = dir
}
// is this path a local name? begins with ./ or ../ or /
@@ -620,12 +612,12 @@
return "", false
}
- for p := idirs; p != nil; p = p.link {
- file = fmt.Sprintf("%s/%s.a", p.dir, name)
+ for _, dir := range idirs {
+ file = fmt.Sprintf("%s/%s.a", dir, name)
if _, err := os.Stat(file); err == nil {
return file, true
}
- file = fmt.Sprintf("%s/%s.o", p.dir, name)
+ file = fmt.Sprintf("%s/%s.o", dir, name)
if _, err := os.Stat(file); err == nil {
return file, true
}
@@ -658,27 +650,45 @@
return "", false
}
-func fakeimport() {
- importpkg = mkpkg("fake")
- cannedimports("fake.o", "$$\n")
+// loadsys loads the definitions for the low-level runtime and unsafe functions,
+// so that the compiler can generate calls to them,
+// but does not make the names "runtime" or "unsafe" visible as packages.
+func loadsys() {
+ if Debug['A'] != 0 {
+ return
+ }
+
+ block = 1
+ iota_ = -1000000
+ incannedimport = 1
+
+ importpkg = Runtimepkg
+ parse_import(obj.Binitr(strings.NewReader(runtimeimport)), nil)
+
+ importpkg = unsafepkg
+ parse_import(obj.Binitr(strings.NewReader(unsafeimport)), nil)
+
+ importpkg = nil
+ incannedimport = 0
}
-// TODO(gri) line argument doesn't appear to be used
-func importfile(f *Val, line int) {
- if _, ok := f.U.(string); !ok {
+func importfile(f *Val, indent []byte) {
+ if importpkg != nil {
+ Fatalf("importpkg not nil")
+ }
+
+ path_, ok := f.U.(string)
+ if !ok {
Yyerror("import statement not a string")
- fakeimport()
return
}
- if len(f.U.(string)) == 0 {
+ if len(path_) == 0 {
Yyerror("import path is empty")
- fakeimport()
return
}
- if isbadimport(f.U.(string)) {
- fakeimport()
+ if isbadimport(path_) {
return
}
@@ -686,18 +696,16 @@
// but we reserve the import path "main" to identify
// the main package, just as we reserve the import
// path "math" to identify the standard math package.
- if f.U.(string) == "main" {
+ if path_ == "main" {
Yyerror("cannot import \"main\"")
errorexit()
}
- if myimportpath != "" && f.U.(string) == myimportpath {
- Yyerror("import %q while compiling that package (import cycle)", f.U.(string))
+ if myimportpath != "" && path_ == myimportpath {
+ Yyerror("import %q while compiling that package (import cycle)", path_)
errorexit()
}
- path_ := f.U.(string)
-
if mapped, ok := importMap[path_]; ok {
path_ = mapped
}
@@ -708,8 +716,7 @@
errorexit()
}
- importpkg = mkpkg(f.U.(string))
- cannedimports("unsafe.o", unsafeimport)
+ importpkg = unsafepkg
imported_unsafe = true
return
}
@@ -717,7 +724,6 @@
if islocalname(path_) {
if path_[0] == '/' {
Yyerror("import path cannot be absolute path")
- fakeimport()
return
}
@@ -725,48 +731,33 @@
if localimport != "" {
prefix = localimport
}
- cleanbuf := prefix
- cleanbuf += "/"
- cleanbuf += path_
- cleanbuf = path.Clean(cleanbuf)
- path_ = cleanbuf
+ path_ = path.Join(prefix, path_)
if isbadimport(path_) {
- fakeimport()
return
}
}
file, found := findpkg(path_)
if !found {
- Yyerror("can't find import: %q", f.U.(string))
+ Yyerror("can't find import: %q", path_)
errorexit()
}
importpkg = mkpkg(path_)
- // If we already saw that package, feed a dummy statement
- // to the lexer to avoid parsing export data twice.
if importpkg.Imported {
- tag := ""
- if importpkg.Safe {
- tag = "safe"
- }
-
- p := fmt.Sprintf("package %s %s\n$$\n", importpkg.Name, tag)
- cannedimports(file, p)
return
}
importpkg.Imported = true
- var err error
- var imp *obj.Biobuf
- imp, err = obj.Bopenr(file)
+ imp, err := obj.Bopenr(file)
if err != nil {
- Yyerror("can't open import: %q: %v", f.U.(string), err)
+ Yyerror("can't open import: %q: %v", path_, err)
errorexit()
}
+ defer obj.Bterm(imp)
if strings.HasSuffix(file, ".a") {
if !skiptopkgdef(imp) {
@@ -822,76 +813,21 @@
switch c {
case '\n':
// old export format
- pushedio = curio
-
- curio.bin = imp
- curio.peekc = 0
- curio.peekc1 = 0
- curio.infile = file
- curio.nlsemi = false
- typecheckok = true
-
- push_parser()
+ parse_import(imp, indent)
case 'B':
// new export format
obj.Bgetc(imp) // skip \n after $$B
Import(imp)
- // continue as if the package was imported before (see above)
- tag := ""
- if importpkg.Safe {
- tag = "safe"
- }
- p := fmt.Sprintf("package %s %s\n$$\n", importpkg.Name, tag)
- cannedimports(file, p)
- // Reset incannedimport flag (we are not truly in a
- // canned import) - this will cause importpkg.Direct to
- // be set via parser.import_package (was issue #13977).
- //
- // TODO(gri) Remove this global variable and convoluted
- // code in the process of streamlining the import code.
- incannedimport = 0
-
default:
- Yyerror("no import in %q", f.U.(string))
- }
-}
-
-func unimportfile() {
- pop_parser()
-
- if curio.bin != nil {
- obj.Bterm(curio.bin)
- curio.bin = nil
- } else {
- lexlineno-- // re correct sys.6 line number
+ Yyerror("no import in %q", path_)
+ errorexit()
}
- curio = pushedio
-
- pushedio.bin = nil
- incannedimport = 0
- typecheckok = false
-}
-
-func cannedimports(file string, cp string) {
- lexlineno++ // if sys.6 is included on line 1,
-
- pushedio = curio
-
- curio.bin = nil
- curio.peekc = 0
- curio.peekc1 = 0
- curio.infile = file
- curio.cp = cp
- curio.nlsemi = false
- curio.importsafe = false
-
- typecheckok = true
- incannedimport = 1
-
- push_parser()
+ if safemode != 0 && !importpkg.Safe {
+ Yyerror("cannot import unsafe package %q", importpkg.Path)
+ }
}
func isSpace(c int) bool {
@@ -899,7 +835,7 @@
}
func isAlpha(c int) bool {
- return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
+ return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z'
}
func isDigit(c int) bool {
@@ -933,10 +869,19 @@
return false
}
-type yySymType struct {
- sym *Sym
- val Val
- op Op
+type lexer struct {
+ // source
+ bin *obj.Biobuf
+ peekc int
+ peekc1 int // second peekc for ...
+
+ nlsemi bool // if set, '\n' and EOF translate to ';'
+
+ // current token
+ tok int32
+ sym_ *Sym // valid if tok == LNAME
+ val Val // valid if tok == LLITERAL
+ op Op // valid if tok == LASOP
}
const (
@@ -987,7 +932,7 @@
LRSH
)
-func _yylex(yylval *yySymType) int32 {
+func (l *lexer) next() {
var c1 int
var op Op
var escflag int
@@ -998,27 +943,31 @@
prevlineno = lineno
+ nlsemi := l.nlsemi
+ l.nlsemi = false
+
l0:
- c := getc()
- if isSpace(c) {
- if c == '\n' && curio.nlsemi {
- ungetc(c)
+ // skip white space
+ c := l.getc()
+ for isSpace(c) {
+ if c == '\n' && nlsemi {
+ l.ungetc(c)
if Debug['x'] != 0 {
fmt.Printf("lex: implicit semi\n")
}
- return ';'
+ l.tok = ';'
+ return
}
-
- goto l0
+ c = l.getc()
}
- lineno = lexlineno // start of token
+ // start of token
+ lineno = lexlineno
if c >= utf8.RuneSelf {
// all multibyte runes are alpha
cp = &lexbuf
cp.Reset()
-
goto talph
}
@@ -1034,7 +983,7 @@
if c != '0' {
for {
cp.WriteByte(byte(c))
- c = getc()
+ c = l.getc()
if isDigit(c) {
continue
}
@@ -1052,11 +1001,11 @@
}
cp.WriteByte(byte(c))
- c = getc()
+ c = l.getc()
if c == 'x' || c == 'X' {
for {
cp.WriteByte(byte(c))
- c = getc()
+ c = l.getc()
if isDigit(c) {
continue
}
@@ -1089,7 +1038,7 @@
c1 = 1 // not octal
}
cp.WriteByte(byte(c))
- c = getc()
+ c = l.getc()
}
if c == '.' {
@@ -1110,8 +1059,18 @@
switch c {
case EOF:
lineno = prevlineno
- ungetc(EOF)
- return -1
+ l.ungetc(EOF)
+ // Treat EOF as "end of line" for the purposes
+ // of inserting a semicolon.
+ if nlsemi {
+ if Debug['x'] != 0 {
+ fmt.Printf("lex: implicit semi\n")
+ }
+ l.tok = ';'
+ return
+ }
+ l.tok = -1
+ return
case '_':
cp = &lexbuf
@@ -1119,7 +1078,7 @@
goto talph
case '.':
- c1 = getc()
+ c1 = l.getc()
if isDigit(c1) {
cp = &lexbuf
cp.Reset()
@@ -1129,13 +1088,13 @@
}
if c1 == '.' {
- c1 = getc()
+ c1 = l.getc()
if c1 == '.' {
c = LDDD
goto lx
}
- ungetc(c1)
+ l.ungetc(c1)
c1 = '.'
}
@@ -1148,7 +1107,7 @@
cp.Reset()
for {
- if escchar('"', &escflag, &v) {
+ if l.escchar('"', &escflag, &v) {
break
}
if v < utf8.RuneSelf || escflag != 0 {
@@ -1169,7 +1128,7 @@
cp.Reset()
for {
- c = int(getr())
+ c = int(l.getr())
if c == '\r' {
continue
}
@@ -1188,40 +1147,42 @@
// '.'
case '\'':
- if escchar('\'', &escflag, &v) {
+ if l.escchar('\'', &escflag, &v) {
Yyerror("empty character literal or unescaped ' in character literal")
v = '\''
}
- if !escchar('\'', &escflag, &v) {
+ if !l.escchar('\'', &escflag, &v) {
Yyerror("missing '")
- ungetc(int(v))
+ l.ungetc(int(v))
}
x := new(Mpint)
- yylval.val.U = x
+ l.val.U = x
Mpmovecfix(x, v)
x.Rune = true
if Debug['x'] != 0 {
fmt.Printf("lex: codepoint literal\n")
}
litbuf = "string literal"
- return LLITERAL
+ l.nlsemi = true
+ l.tok = LLITERAL
+ return
case '/':
- c1 = getc()
+ c1 = l.getc()
if c1 == '*' {
nl := false
for {
- c = int(getr())
+ c = int(l.getr())
if c == '\n' {
nl = true
}
for c == '*' {
- c = int(getr())
+ c = int(l.getr())
if c == '/' {
if nl {
- ungetc('\n')
+ l.ungetc('\n')
}
goto l0
}
@@ -1239,14 +1200,14 @@
}
if c1 == '/' {
- c = getlinepragma()
+ c = l.getlinepragma()
for {
if c == '\n' || c == EOF {
- ungetc(c)
+ l.ungetc(c)
goto l0
}
- c = int(getr())
+ c = int(l.getr())
}
}
@@ -1256,29 +1217,30 @@
}
case ':':
- c1 = getc()
+ c1 = l.getc()
if c1 == '=' {
c = int(LCOLAS)
goto lx
}
case '*':
- c1 = getc()
+ c1 = l.getc()
if c1 == '=' {
op = OMUL
goto asop
}
case '%':
- c1 = getc()
+ c1 = l.getc()
if c1 == '=' {
op = OMOD
goto asop
}
case '+':
- c1 = getc()
+ c1 = l.getc()
if c1 == '+' {
+ l.nlsemi = true
c = int(LINC)
goto lx
}
@@ -1289,8 +1251,9 @@
}
case '-':
- c1 = getc()
+ c1 = l.getc()
if c1 == '-' {
+ l.nlsemi = true
c = int(LDEC)
goto lx
}
@@ -1301,10 +1264,10 @@
}
case '>':
- c1 = getc()
+ c1 = l.getc()
if c1 == '>' {
c = int(LRSH)
- c1 = getc()
+ c1 = l.getc()
if c1 == '=' {
op = ORSH
goto asop
@@ -1321,10 +1284,10 @@
c = int(LGT)
case '<':
- c1 = getc()
+ c1 = l.getc()
if c1 == '<' {
c = int(LLSH)
- c1 = getc()
+ c1 = l.getc()
if c1 == '=' {
op = OLSH
goto asop
@@ -1346,21 +1309,21 @@
c = int(LLT)
case '=':
- c1 = getc()
+ c1 = l.getc()
if c1 == '=' {
c = int(LEQ)
goto lx
}
case '!':
- c1 = getc()
+ c1 = l.getc()
if c1 == '=' {
c = int(LNE)
goto lx
}
case '&':
- c1 = getc()
+ c1 = l.getc()
if c1 == '&' {
c = int(LANDAND)
goto lx
@@ -1368,7 +1331,7 @@
if c1 == '^' {
c = int(LANDNOT)
- c1 = getc()
+ c1 = l.getc()
if c1 == '=' {
op = OANDNOT
goto asop
@@ -1383,7 +1346,7 @@
}
case '|':
- c1 = getc()
+ c1 = l.getc()
if c1 == '|' {
c = int(LOROR)
goto lx
@@ -1395,17 +1358,21 @@
}
case '^':
- c1 = getc()
+ c1 = l.getc()
if c1 == '=' {
op = OXOR
goto asop
}
+ case ')', ']', '}':
+ l.nlsemi = true
+ goto lx
+
default:
goto lx
}
- ungetc(c1)
+ l.ungetc(c1)
lx:
if Debug['x'] != 0 {
@@ -1425,22 +1392,24 @@
goto l0
}
- return int32(c)
+ l.tok = int32(c)
+ return
asop:
- yylval.op = op
+ l.op = op
if Debug['x'] != 0 {
fmt.Printf("lex: TOKEN ASOP %s=\n", goopnames[op])
}
- return LASOP
+ l.tok = LASOP
+ return
// cp is set to lexbuf and some
// prefix has been stored
talph:
for {
if c >= utf8.RuneSelf {
- ungetc(c)
- r := rune(getr())
+ l.ungetc(c)
+ r := rune(l.getr())
// 0xb7 · is used for internal names
if !unicode.IsLetter(r) && !unicode.IsDigit(r) && (importpkg == nil || r != 0xb7) {
@@ -1455,11 +1424,11 @@
} else {
cp.WriteByte(byte(c))
}
- c = getc()
+ c = l.getc()
}
cp = nil
- ungetc(c)
+ l.ungetc(c)
s = LookupBytes(lexbuf.Bytes())
if s.Lexical == LIGNORE {
@@ -1469,31 +1438,38 @@
if Debug['x'] != 0 {
fmt.Printf("lex: %s %s\n", s, lexname(int(s.Lexical)))
}
- yylval.sym = s
- return int32(s.Lexical)
+ l.sym_ = s
+ switch s.Lexical {
+ case LNAME, LRETURN, LBREAK, LCONTINUE, LFALL:
+ l.nlsemi = true
+ }
+ l.tok = int32(s.Lexical)
+ return
ncu:
cp = nil
- ungetc(c)
+ l.ungetc(c)
str = lexbuf.String()
- yylval.val.U = new(Mpint)
- mpatofix(yylval.val.U.(*Mpint), str)
- if yylval.val.U.(*Mpint).Ovf {
+ l.val.U = new(Mpint)
+ mpatofix(l.val.U.(*Mpint), str)
+ if l.val.U.(*Mpint).Ovf {
Yyerror("overflow in constant")
- Mpmovecfix(yylval.val.U.(*Mpint), 0)
+ Mpmovecfix(l.val.U.(*Mpint), 0)
}
if Debug['x'] != 0 {
fmt.Printf("lex: integer literal\n")
}
litbuf = "literal " + str
- return LLITERAL
+ l.nlsemi = true
+ l.tok = LLITERAL
+ return
casedot:
for {
cp.WriteByte(byte(c))
- c = getc()
+ c = l.getc()
if !isDigit(c) {
break
}
@@ -1513,10 +1489,10 @@
Yyerror("malformed floating point constant")
}
cp.WriteByte(byte(c))
- c = getc()
+ c = l.getc()
if c == '+' || c == '-' {
cp.WriteByte(byte(c))
- c = getc()
+ c = l.getc()
}
if !isDigit(c) {
@@ -1524,7 +1500,7 @@
}
for isDigit(c) {
cp.WriteByte(byte(c))
- c = getc()
+ c = l.getc()
}
if c == 'i' {
@@ -1537,56 +1513,60 @@
cp = nil
str = lexbuf.String()
- yylval.val.U = new(Mpcplx)
- Mpmovecflt(&yylval.val.U.(*Mpcplx).Real, 0.0)
- mpatoflt(&yylval.val.U.(*Mpcplx).Imag, str)
- if yylval.val.U.(*Mpcplx).Imag.Val.IsInf() {
+ l.val.U = new(Mpcplx)
+ Mpmovecflt(&l.val.U.(*Mpcplx).Real, 0.0)
+ mpatoflt(&l.val.U.(*Mpcplx).Imag, str)
+ if l.val.U.(*Mpcplx).Imag.Val.IsInf() {
Yyerror("overflow in imaginary constant")
- Mpmovecflt(&yylval.val.U.(*Mpcplx).Imag, 0.0)
+ Mpmovecflt(&l.val.U.(*Mpcplx).Imag, 0.0)
}
if Debug['x'] != 0 {
fmt.Printf("lex: imaginary literal\n")
}
litbuf = "literal " + str
- return LLITERAL
+ l.nlsemi = true
+ l.tok = LLITERAL
+ return
caseout:
cp = nil
- ungetc(c)
+ l.ungetc(c)
str = lexbuf.String()
- yylval.val.U = newMpflt()
- mpatoflt(yylval.val.U.(*Mpflt), str)
- if yylval.val.U.(*Mpflt).Val.IsInf() {
+ l.val.U = newMpflt()
+ mpatoflt(l.val.U.(*Mpflt), str)
+ if l.val.U.(*Mpflt).Val.IsInf() {
Yyerror("overflow in float constant")
- Mpmovecflt(yylval.val.U.(*Mpflt), 0.0)
+ Mpmovecflt(l.val.U.(*Mpflt), 0.0)
}
if Debug['x'] != 0 {
fmt.Printf("lex: floating literal\n")
}
litbuf = "literal " + str
- return LLITERAL
+ l.nlsemi = true
+ l.tok = LLITERAL
+ return
strlit:
- yylval.val.U = internString(cp.Bytes())
+ l.val.U = internString(cp.Bytes())
if Debug['x'] != 0 {
fmt.Printf("lex: string literal\n")
}
litbuf = "string literal"
- return LLITERAL
+ l.nlsemi = true
+ l.tok = LLITERAL
}
var internedStrings = map[string]string{}
func internString(b []byte) string {
s, ok := internedStrings[string(b)] // string(b) here doesn't allocate
- if ok {
- return s
+ if !ok {
+ s = string(b)
+ internedStrings[s] = s
}
- s = string(b)
- internedStrings[s] = s
return s
}
@@ -1603,16 +1583,16 @@
// //line parse.y:15
// as a discontinuity in sequential line numbers.
// the next line of input comes from parse.y:15
-func getlinepragma() int {
+func (l *lexer) getlinepragma() int {
var cmd, verb, name string
- c := int(getr())
+ c := int(l.getr())
if c == 'g' {
cp := &lexbuf
cp.Reset()
cp.WriteByte('g') // already read
for {
- c = int(getr())
+ c = int(l.getr())
if c == EOF || c >= utf8.RuneSelf {
return c
}
@@ -1704,7 +1684,7 @@
return c
}
for i := 1; i < 5; i++ {
- c = int(getr())
+ c = int(l.getr())
if c != int("line "[i]) {
return c
}
@@ -1714,7 +1694,7 @@
cp.Reset()
linep := 0
for {
- c = int(getr())
+ c = int(l.getr())
if c == EOF {
return c
}
@@ -1895,113 +1875,57 @@
}
}
-func yylex(yylval *yySymType) int32 {
- lx := _yylex(yylval)
-
- if curio.nlsemi && lx == EOF {
- // Treat EOF as "end of line" for the purposes
- // of inserting a semicolon.
- lx = ';'
- }
-
- switch lx {
- case LNAME,
- LLITERAL,
- LBREAK,
- LCONTINUE,
- LFALL,
- LRETURN,
- LINC,
- LDEC,
- ')',
- '}',
- ']':
- curio.nlsemi = true
-
- default:
- curio.nlsemi = false
- }
-
- return lx
-}
-
-func getc() int {
- c := curio.peekc
+func (l *lexer) getc() int {
+ c := l.peekc
if c != 0 {
- curio.peekc = curio.peekc1
- curio.peekc1 = 0
+ l.peekc = l.peekc1
+ l.peekc1 = 0
goto check
}
- if curio.bin == nil {
- if len(curio.cp) == 0 {
- c = 0
- } else {
- c = int(curio.cp[0])
- curio.cp = curio.cp[1:]
+loop:
+ c = obj.Bgetc(l.bin)
+ // recognize BOM (U+FEFF): UTF-8 encoding is 0xef 0xbb 0xbf
+ if c == 0xef {
+ buf, err := l.bin.Peek(2)
+ if err != nil {
+ yyerrorl(int(lexlineno), "illegal UTF-8 sequence ef % x followed by read error (%v)", string(buf), err)
+ errorexit()
}
- } else {
- loop:
- c = obj.Bgetc(curio.bin)
- // recognize BOM (U+FEFF): UTF-8 encoding is 0xef 0xbb 0xbf
- if c == 0xef {
- buf, err := curio.bin.Peek(2)
- if err != nil {
- yyerrorl(int(lexlineno), "illegal UTF-8 sequence ef % x followed by read error (%v)", string(buf), err)
- errorexit()
- }
- if buf[0] == 0xbb && buf[1] == 0xbf {
- yyerrorl(int(lexlineno), "Unicode (UTF-8) BOM in middle of file")
+ if buf[0] == 0xbb && buf[1] == 0xbf {
+ yyerrorl(int(lexlineno), "Unicode (UTF-8) BOM in middle of file")
- // consume BOM bytes
- obj.Bgetc(curio.bin)
- obj.Bgetc(curio.bin)
- goto loop
- }
+ // consume BOM bytes
+ obj.Bgetc(l.bin)
+ obj.Bgetc(l.bin)
+ goto loop
}
}
check:
- switch c {
- case 0:
- if curio.bin != nil {
- Yyerror("illegal NUL byte")
- break
- }
- fallthrough
-
- // insert \n at EOF
- case EOF:
- if curio.eofnl || curio.last == '\n' {
- return EOF
- }
- curio.eofnl = true
- c = '\n'
- fallthrough
-
- case '\n':
- if pushedio.bin == nil {
- lexlineno++
- }
+ if c == 0 {
+ Yyerror("illegal NUL byte")
+ return 0
}
-
- curio.last = c
+ if c == '\n' && importpkg == nil {
+ lexlineno++
+ }
return c
}
-func ungetc(c int) {
- curio.peekc1 = curio.peekc
- curio.peekc = c
- if c == '\n' && pushedio.bin == nil {
+func (l *lexer) ungetc(c int) {
+ l.peekc1 = l.peekc
+ l.peekc = c
+ if c == '\n' && importpkg == nil {
lexlineno--
}
}
-func getr() int32 {
+func (l *lexer) getr() int32 {
var buf [utf8.UTFMax]byte
for i := 0; ; i++ {
- c := getc()
+ c := l.getc()
if i == 0 && c < utf8.RuneSelf {
return int32(c)
}
@@ -2020,10 +1944,10 @@
}
}
-func escchar(e int, escflg *int, val *int64) bool {
+func (l *lexer) escchar(e int, escflg *int, val *int64) bool {
*escflg = 0
- c := int(getr())
+ c := int(l.getr())
switch c {
case EOF:
Yyerror("eof in string")
@@ -2045,7 +1969,7 @@
}
u := 0
- c = int(getr())
+ c = int(l.getr())
var i int
switch c {
case 'x':
@@ -2072,23 +1996,23 @@
'6',
'7':
*escflg = 1 // it's a byte
- l := int64(c) - '0'
+ x := int64(c) - '0'
for i := 2; i > 0; i-- {
- c = getc()
+ c = l.getc()
if c >= '0' && c <= '7' {
- l = l*8 + int64(c) - '0'
+ x = x*8 + int64(c) - '0'
continue
}
Yyerror("non-octal character in escape sequence: %c", c)
- ungetc(c)
+ l.ungetc(c)
}
- if l > 255 {
- Yyerror("octal escape value > 255: %d", l)
+ if x > 255 {
+ Yyerror("octal escape value > 255: %d", x)
}
- *val = l
+ *val = x
return false
case 'a':
@@ -2118,35 +2042,35 @@
return false
hex:
- l := int64(0)
+ x := int64(0)
for ; i > 0; i-- {
- c = getc()
+ c = l.getc()
if c >= '0' && c <= '9' {
- l = l*16 + int64(c) - '0'
+ x = x*16 + int64(c) - '0'
continue
}
if c >= 'a' && c <= 'f' {
- l = l*16 + int64(c) - 'a' + 10
+ x = x*16 + int64(c) - 'a' + 10
continue
}
if c >= 'A' && c <= 'F' {
- l = l*16 + int64(c) - 'A' + 10
+ x = x*16 + int64(c) - 'A' + 10
continue
}
Yyerror("non-hex character in escape sequence: %c", c)
- ungetc(c)
+ l.ungetc(c)
break
}
- if u != 0 && (l > utf8.MaxRune || (0xd800 <= l && l < 0xe000)) {
- Yyerror("invalid Unicode code point in escape sequence: %#x", l)
- l = utf8.RuneError
+ if u != 0 && (x > utf8.MaxRune || (0xd800 <= x && x < 0xe000)) {
+ Yyerror("invalid Unicode code point in escape sequence: %#x", x)
+ x = utf8.RuneError
}
- *val = l
+ *val = x
return false
}
diff --git a/src/cmd/compile/internal/gc/mkbuiltin.go b/src/cmd/compile/internal/gc/mkbuiltin.go
index b1e4458..13cde5e 100644
--- a/src/cmd/compile/internal/gc/mkbuiltin.go
+++ b/src/cmd/compile/internal/gc/mkbuiltin.go
@@ -4,95 +4,90 @@
// +build ignore
-// Generate builtin.go from builtin/runtime.go and builtin/unsafe.go
-// (passed as arguments on the command line by a go:generate comment).
+// Generate builtin.go from builtin/runtime.go and builtin/unsafe.go.
// Run this after changing builtin/runtime.go and builtin/unsafe.go
// or after changing the export metadata format in the compiler.
// Either way, you need to have a working compiler binary first.
package main
import (
- "bufio"
+ "bytes"
+ "flag"
"fmt"
"io"
+ "io/ioutil"
"log"
"os"
"os/exec"
- "strings"
)
+var stdout = flag.Bool("stdout", false, "write to stdout instead of builtin.go")
+
func main() {
- f, err := os.Create("builtin.go")
+ flag.Parse()
+
+ var b bytes.Buffer
+ fmt.Fprintln(&b, "// AUTO-GENERATED by mkbuiltin.go; DO NOT EDIT")
+ fmt.Fprintln(&b, "")
+ fmt.Fprintln(&b, "package gc")
+
+ mkbuiltin(&b, "runtime")
+ mkbuiltin(&b, "unsafe")
+
+ var err error
+ if *stdout {
+ _, err = os.Stdout.Write(b.Bytes())
+ } else {
+ err = ioutil.WriteFile("builtin.go", b.Bytes(), 0666)
+ }
if err != nil {
log.Fatal(err)
}
- defer f.Close()
- w := bufio.NewWriter(f)
-
- fmt.Fprintln(w, "// AUTO-GENERATED by mkbuiltin.go; DO NOT EDIT")
- fmt.Fprintln(w, "")
- fmt.Fprintln(w, "package gc")
-
- for _, name := range os.Args[1:] {
- mkbuiltin(w, name)
- }
-
- if err := w.Flush(); err != nil {
- log.Fatal(err)
- }
}
-// Compile .go file, import data from .6 file, and write Go string version.
+// Compile .go file, import data from .o file, and write Go string version.
func mkbuiltin(w io.Writer, name string) {
- if err := exec.Command("go", "tool", "compile", "-A", "builtin/"+name+".go").Run(); err != nil {
+ args := []string{"tool", "compile", "-A"}
+ if name == "runtime" {
+ args = append(args, "-u")
+ }
+ args = append(args, "builtin/"+name+".go")
+
+ if err := exec.Command("go", args...).Run(); err != nil {
log.Fatal(err)
}
obj := name + ".o"
defer os.Remove(obj)
- r, err := os.Open(obj)
+ b, err := ioutil.ReadFile(obj)
if err != nil {
log.Fatal(err)
}
- defer r.Close()
- scanner := bufio.NewScanner(r)
// Look for $$ that introduces imports.
- for scanner.Scan() {
- if strings.Contains(scanner.Text(), "$$") {
- goto Begin
- }
+ i := bytes.Index(b, []byte("\n$$\n"))
+ if i < 0 {
+ log.Fatal("did not find beginning of imports")
}
- log.Fatal("did not find beginning of imports")
+ i += 4
-Begin:
- initfunc := fmt.Sprintf("init_%s_function", name)
+ // Look for $$ that closes imports.
+ j := bytes.Index(b[i:], []byte("\n$$\n"))
+ if j < 0 {
+ log.Fatal("did not find end of imports")
+ }
+ j += i + 4
- fmt.Fprintf(w, "\nconst %simport = \"\" +\n", name)
-
- // sys.go claims to be in package PACKAGE to avoid
- // conflicts during "go tool compile sys.go". Rename PACKAGE to $2.
- replacer := strings.NewReplacer("PACKAGE", name)
-
- // Process imports, stopping at $$ that closes them.
- for scanner.Scan() {
- p := scanner.Text()
- if strings.Contains(p, "$$") {
- goto End
- }
-
+ // Process and reformat imports.
+ fmt.Fprintf(w, "\nconst %simport = \"\"", name)
+ for _, p := range bytes.SplitAfter(b[i:j], []byte("\n")) {
// Chop leading white space.
- p = strings.TrimLeft(p, " \t")
-
- // Cut out decl of init_$1_function - it doesn't exist.
- if strings.Contains(p, initfunc) {
+ p = bytes.TrimLeft(p, " \t")
+ if len(p) == 0 {
continue
}
- fmt.Fprintf(w, "\t%q +\n", replacer.Replace(p)+"\n")
+ fmt.Fprintf(w, " +\n\t%q", p)
}
- log.Fatal("did not find end of imports")
-
-End:
- fmt.Fprintf(w, "\t\"$$\\n\"\n")
+ fmt.Fprintf(w, "\n")
}
diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go
index 66549be..0a96da6 100644
--- a/src/cmd/compile/internal/gc/obj.go
+++ b/src/cmd/compile/internal/gc/obj.go
@@ -233,8 +233,7 @@
off = dsname(symdata, off, s[n:n+m])
}
- off = duint8(symdata, off, 0) // terminating NUL for runtime
- off = (off + Widthptr - 1) &^ (Widthptr - 1) // round to pointer alignment
+ off = duint8(symdata, off, 0) // terminating NUL for runtime
ggloblsym(symdata, int32(off), obj.DUPOK|obj.RODATA|obj.LOCAL)
return symhdr, symdata
diff --git a/src/cmd/compile/internal/gc/order.go b/src/cmd/compile/internal/gc/order.go
index 05cd53a..12405d5 100644
--- a/src/cmd/compile/internal/gc/order.go
+++ b/src/cmd/compile/internal/gc/order.go
@@ -42,8 +42,7 @@
// Order holds state during the ordering process.
type Order struct {
out *NodeList // list of generated statements
- temp *NodeList // head of stack of temporary variables
- free *NodeList // free list of NodeList* structs (for use in temp)
+ temp []*Node // stack of temporary variables
}
// Order rewrites fn->nbody to apply the ordering constraints
@@ -68,14 +67,7 @@
order.out = list(order.out, a)
}
- l := order.free
- if l == nil {
- l = new(NodeList)
- }
- order.free = l.Next
- l.Next = order.temp
- l.N = var_
- order.temp = l
+ order.temp = append(order.temp, var_)
return var_
}
@@ -215,42 +207,35 @@
*np = ordercopyexpr(n, n.Type, order, 0)
}
+type ordermarker int
+
// Marktemp returns the top of the temporary variable stack.
-func marktemp(order *Order) *NodeList {
- return order.temp
+func marktemp(order *Order) ordermarker {
+ return ordermarker(len(order.temp))
}
// Poptemp pops temporaries off the stack until reaching the mark,
// which must have been returned by marktemp.
-func poptemp(mark *NodeList, order *Order) {
- var l *NodeList
-
- for {
- l = order.temp
- if l == mark {
- break
- }
- order.temp = l.Next
- l.Next = order.free
- order.free = l
- }
+func poptemp(mark ordermarker, order *Order) {
+ order.temp = order.temp[:mark]
}
// Cleantempnopop emits to *out VARKILL instructions for each temporary
// above the mark on the temporary stack, but it does not pop them
// from the stack.
-func cleantempnopop(mark *NodeList, order *Order, out **NodeList) {
+func cleantempnopop(mark ordermarker, order *Order, out **NodeList) {
var kill *Node
- for l := order.temp; l != mark; l = l.Next {
- if l.N.Name.Keepalive {
- l.N.Name.Keepalive = false
- l.N.Addrtaken = true // ensure SSA keeps the l.N variable
- kill = Nod(OVARLIVE, l.N, nil)
+ for i := len(order.temp) - 1; i >= int(mark); i-- {
+ n := order.temp[i]
+ if n.Name.Keepalive {
+ n.Name.Keepalive = false
+ n.Addrtaken = true // ensure SSA keeps the n variable
+ kill = Nod(OVARLIVE, n, nil)
typecheck(&kill, Etop)
*out = list(*out, kill)
}
- kill = Nod(OVARKILL, l.N, nil)
+ kill = Nod(OVARKILL, n, nil)
typecheck(&kill, Etop)
*out = list(*out, kill)
}
@@ -258,7 +243,7 @@
// Cleantemp emits VARKILL instructions for each temporary above the
// mark on the temporary stack and removes them from the stack.
-func cleantemp(top *NodeList, order *Order) {
+func cleantemp(top ordermarker, order *Order) {
cleantempnopop(top, order, &order.out)
poptemp(top, order)
}
@@ -290,13 +275,7 @@
// insert new temporaries from order
// at head of outer list.
- lp := &order.temp
-
- for *lp != nil {
- lp = &(*lp).Next
- }
- *lp = outer.temp
- outer.temp = order.temp
+ outer.temp = append(outer.temp, order.temp...)
*np = n
}
diff --git a/src/cmd/compile/internal/gc/parser.go b/src/cmd/compile/internal/gc/parser.go
index 054cf73..fbc5a5e 100644
--- a/src/cmd/compile/internal/gc/parser.go
+++ b/src/cmd/compile/internal/gc/parser.go
@@ -5,7 +5,7 @@
package gc
// The recursive-descent parser is built around a slighty modified grammar
-// of Go to accomodate for the constraints imposed by strict one token look-
+// of Go to accommodate for the constraints imposed by strict one token look-
// ahead, and for better error handling. Subsequent checks of the constructed
// syntax tree restrict the language accepted by the compiler to proper Go.
//
@@ -13,6 +13,7 @@
// to handle optional commas and semicolons before a closing ) or } .
import (
+ "cmd/internal/obj"
"fmt"
"strconv"
"strings"
@@ -20,81 +21,31 @@
const trace = false // if set, parse tracing can be enabled with -x
-// TODO(gri) Once we handle imports w/o redirecting the underlying
-// source of the lexer we can get rid of these. They are here for
-// compatibility with the existing yacc-based parser setup (issue 13242).
-var thenewparser parser // the parser in use
-var savedstate []parser // saved parser state, used during import
-
-func push_parser() {
- // Indentation (for tracing) must be preserved across parsers
- // since we are changing the lexer source (and parser state)
- // under foot, in the middle of productions. This won't be
- // needed anymore once we fix issue 13242, but neither will
- // be the push/pop_parser functionality.
- // (Instead we could just use a global variable indent, but
- // but eventually indent should be parser-specific anyway.)
- indent := thenewparser.indent
- savedstate = append(savedstate, thenewparser)
- thenewparser = parser{indent: indent} // preserve indentation
- thenewparser.next()
+// parse_import parses the export data of a package that is imported.
+func parse_import(bin *obj.Biobuf, indent []byte) {
+ newparser(bin, indent).import_package()
}
-func pop_parser() {
- indent := thenewparser.indent
- n := len(savedstate) - 1
- thenewparser = savedstate[n]
- thenewparser.indent = indent // preserve indentation
- savedstate = savedstate[:n]
-}
-
-// parse_file sets up a new parser and parses a single Go source file.
-func parse_file() {
- thenewparser = parser{}
- thenewparser.loadsys()
- thenewparser.next()
- thenewparser.file()
-}
-
-// loadsys loads the definitions for the low-level runtime functions,
-// so that the compiler can generate calls to them,
-// but does not make the name "runtime" visible as a package.
-func (p *parser) loadsys() {
- if trace && Debug['x'] != 0 {
- defer p.trace("loadsys")()
- }
-
- importpkg = Runtimepkg
-
- if Debug['A'] != 0 {
- cannedimports("runtime.Builtin", "package runtime\n\n$$\n\n")
- } else {
- cannedimports("runtime.Builtin", runtimeimport)
- }
- curio.importsafe = true
-
- p.import_package()
- p.import_there()
-
- importpkg = nil
+// parse_file parses a single Go source file.
+func parse_file(bin *obj.Biobuf) {
+ newparser(bin, nil).file()
}
type parser struct {
- tok int32 // next token (one-token look-ahead)
- op Op // valid if tok == LASOP
- val Val // valid if tok == LLITERAL
- sym_ *Sym // valid if tok == LNAME
- fnest int // function nesting level (for error handling)
- xnest int // expression nesting level (for complit ambiguity resolution)
- yy yySymType // for temporary use by next
- indent []byte // tracing support
+ lexer
+ fnest int // function nesting level (for error handling)
+ xnest int // expression nesting level (for complit ambiguity resolution)
+ indent []byte // tracing support
}
-func (p *parser) next() {
- p.tok = yylex(&p.yy)
- p.op = p.yy.op
- p.val = p.yy.val
- p.sym_ = p.yy.sym
+// newparser returns a new parser ready to parse from src.
+// indent is the initial indentation for tracing output.
+func newparser(src *obj.Biobuf, indent []byte) *parser {
+ var p parser
+ p.bin = src
+ p.indent = indent
+ p.next()
+ return &p
}
func (p *parser) got(tok int32) bool {
@@ -347,108 +298,87 @@
p.want(LIMPORT)
if p.got('(') {
for p.tok != EOF && p.tok != ')' {
- p.import_stmt()
+ p.importdcl()
if !p.osemi(')') {
break
}
}
p.want(')')
} else {
- p.import_stmt()
- }
-}
-
-func (p *parser) import_stmt() {
- if trace && Debug['x'] != 0 {
- defer p.trace("import_stmt")()
- }
-
- line := int32(p.import_here())
- if p.tok == LPACKAGE {
- p.import_package()
- p.import_there()
-
- ipkg := importpkg
- my := importmyname
- importpkg = nil
- importmyname = nil
-
- if my == nil {
- my = Lookup(ipkg.Name)
- }
-
- pack := Nod(OPACK, nil, nil)
- pack.Sym = my
- pack.Name.Pkg = ipkg
- pack.Lineno = line
-
- if strings.HasPrefix(my.Name, ".") {
- importdot(ipkg, pack)
- return
- }
- if my.Name == "init" {
- lineno = line
- Yyerror("cannot import package as init - init must be a func")
- return
- }
- if my.Name == "_" {
- return
- }
- if my.Def != nil {
- lineno = line
- redeclare(my, "as imported package name")
- }
- my.Def = pack
- my.Lastlineno = line
- my.Block = 1 // at top level
-
- return
- }
-
- p.import_there()
- // When an invalid import path is passed to importfile,
- // it calls Yyerror and then sets up a fake import with
- // no package statement. This allows us to test more
- // than one invalid import statement in a single file.
- if nerrors == 0 {
- Fatalf("phase error in import")
+ p.importdcl()
}
}
// ImportSpec = [ "." | PackageName ] ImportPath .
// ImportPath = string_lit .
-//
-// import_here switches the underlying lexed source to the export data
-// of the imported package.
-func (p *parser) import_here() int {
+func (p *parser) importdcl() {
if trace && Debug['x'] != 0 {
- defer p.trace("import_here")()
+ defer p.trace("importdcl")()
}
- importmyname = nil
+ var my *Sym
switch p.tok {
case LNAME, '@', '?':
// import with given name
- importmyname = p.sym()
+ my = p.sym()
case '.':
// import into my name space
- importmyname = Lookup(".")
+ my = Lookup(".")
p.next()
}
- var path Val
- if p.tok == LLITERAL {
- path = p.val
- p.next()
- } else {
+ if p.tok != LLITERAL {
p.syntax_error("missing import path; require quoted string")
p.advance(';', ')')
+ return
}
- line := parserline()
- importfile(&path, line)
- return line
+ line := int32(parserline())
+ path := p.val
+ p.next()
+
+ importfile(&path, p.indent)
+ if importpkg == nil {
+ if nerrors == 0 {
+ Fatalf("phase error in import")
+ }
+ return
+ }
+
+ ipkg := importpkg
+ importpkg = nil
+
+ ipkg.Direct = true
+
+ if my == nil {
+ my = Lookup(ipkg.Name)
+ }
+
+ pack := Nod(OPACK, nil, nil)
+ pack.Sym = my
+ pack.Name.Pkg = ipkg
+ pack.Lineno = line
+
+ if strings.HasPrefix(my.Name, ".") {
+ importdot(ipkg, pack)
+ return
+ }
+ if my.Name == "init" {
+ lineno = line
+ Yyerror("cannot import package as init - init must be a func")
+ return
+ }
+ if my.Name == "_" {
+ return
+ }
+ if my.Def != nil {
+ lineno = line
+ redeclare(my, "as imported package name")
+ }
+ my.Def = pack
+ my.Lastlineno = line
+ my.Block = 1 // at top level
}
// import_package parses the header of an imported package as exported
@@ -467,9 +397,10 @@
p.import_error()
}
+ importsafe := false
if p.tok == LNAME {
if p.sym_.Name == "safe" {
- curio.importsafe = true
+ importsafe = true
}
p.next()
}
@@ -481,23 +412,9 @@
} else if importpkg.Name != name {
Yyerror("conflicting names %s and %s for package %q", importpkg.Name, name, importpkg.Path)
}
- if incannedimport == 0 {
- importpkg.Direct = true
- }
- importpkg.Safe = curio.importsafe
+ importpkg.Safe = importsafe
- if safemode != 0 && !curio.importsafe {
- Yyerror("cannot import unsafe package %q", importpkg.Path)
- }
-}
-
-// import_there parses the imported package definitions and then switches
-// the underlying lexed source back to the importing package.
-func (p *parser) import_there() {
- if trace && Debug['x'] != 0 {
- defer p.trace("import_there")()
- }
-
+ typecheckok = true
defercheckwidth()
p.hidden_import_list()
@@ -508,7 +425,7 @@
}
resumecheckwidth()
- unimportfile()
+ typecheckok = false
}
// Declaration = ConstDecl | TypeDecl | VarDecl .
@@ -1136,65 +1053,16 @@
stmt.Nbody = p.loop_body("if clause")
- l := p.elseif_list_else() // does markdcl
-
- n := stmt
- popdcl()
- for nn := l; nn != nil; nn = nn.Next {
- if nn.N.Op == OIF {
- popdcl()
- }
- n.Rlist = list1(nn.N)
- n = nn.N
- }
-
- return stmt
-}
-
-func (p *parser) elseif() *NodeList {
- if trace && Debug['x'] != 0 {
- defer p.trace("elseif")()
- }
-
- // LELSE LIF already consumed
- markdcl() // matching popdcl in if_stmt
-
- stmt := p.if_header()
- if stmt.Left == nil {
- Yyerror("missing condition in if statement")
- }
-
- stmt.Nbody = p.loop_body("if clause")
-
- return list1(stmt)
-}
-
-func (p *parser) elseif_list_else() (l *NodeList) {
- if trace && Debug['x'] != 0 {
- defer p.trace("elseif_list_else")()
- }
-
- for p.got(LELSE) {
- if p.got(LIF) {
- l = concat(l, p.elseif())
+ if p.got(LELSE) {
+ if p.tok == LIF {
+ stmt.Rlist = list1(p.if_stmt())
} else {
- l = concat(l, p.else_())
- break
+ stmt.Rlist = list1(p.compound_stmt(true))
}
}
- return l
-}
-
-func (p *parser) else_() *NodeList {
- if trace && Debug['x'] != 0 {
- defer p.trace("else")()
- }
-
- l := &NodeList{N: p.compound_stmt(true)}
- l.End = l
- return l
-
+ popdcl()
+ return stmt
}
// switch_stmt parses both expression and type switch statements.
diff --git a/src/cmd/compile/internal/gc/pgen.go b/src/cmd/compile/internal/gc/pgen.go
index f90f89a..987da2b 100644
--- a/src/cmd/compile/internal/gc/pgen.go
+++ b/src/cmd/compile/internal/gc/pgen.go
@@ -187,21 +187,12 @@
// the top of the stack and increasing in size.
// Non-autos sort on offset.
func cmpstackvarlt(a, b *Node) bool {
- if a.Class != b.Class {
- if a.Class == PAUTO {
- return false
- }
- return true
+ if (a.Class == PAUTO) != (b.Class == PAUTO) {
+ return b.Class == PAUTO
}
if a.Class != PAUTO {
- if a.Xoffset < b.Xoffset {
- return true
- }
- if a.Xoffset > b.Xoffset {
- return false
- }
- return false
+ return a.Xoffset < b.Xoffset
}
if a.Used != b.Used {
@@ -220,11 +211,8 @@
return ap
}
- if a.Type.Width < b.Type.Width {
- return false
- }
- if a.Type.Width > b.Type.Width {
- return true
+ if a.Type.Width != b.Type.Width {
+ return a.Type.Width > b.Type.Width
}
return a.Sym.Name < b.Sym.Name
diff --git a/src/cmd/compile/internal/gc/pgen_test.go b/src/cmd/compile/internal/gc/pgen_test.go
index ebc91011..909b8a9 100644
--- a/src/cmd/compile/internal/gc/pgen_test.go
+++ b/src/cmd/compile/internal/gc/pgen_test.go
@@ -41,6 +41,16 @@
false,
},
{
+ Node{Class: PPARAM, Xoffset: 10},
+ Node{Class: PPARAMOUT, Xoffset: 20},
+ true,
+ },
+ {
+ Node{Class: PPARAMOUT, Xoffset: 10},
+ Node{Class: PPARAM, Xoffset: 20},
+ true,
+ },
+ {
Node{Class: PAUTO, Used: true},
Node{Class: PAUTO, Used: false},
true,
@@ -101,6 +111,10 @@
if got != d.lt {
t.Errorf("want %#v < %#v", d.a, d.b)
}
+ // If we expect a < b to be true, check that b < a is false.
+ if d.lt && cmpstackvarlt(&d.b, &d.a) {
+ t.Errorf("unexpected %#v < %#v", d.b, d.a)
+ }
}
}
diff --git a/src/cmd/compile/internal/gc/popt.go b/src/cmd/compile/internal/gc/popt.go
index 4d71ab6..b708222 100644
--- a/src/cmd/compile/internal/gc/popt.go
+++ b/src/cmd/compile/internal/gc/popt.go
@@ -241,6 +241,19 @@
// will not have flow graphs and consequently will not be optimized.
const MaxFlowProg = 50000
+var ffcache []Flow // reusable []Flow, to reduce allocation
+
+func growffcache(n int) {
+ if n > cap(ffcache) {
+ n = (n * 5) / 4
+ if n > MaxFlowProg {
+ n = MaxFlowProg
+ }
+ ffcache = make([]Flow, n)
+ }
+ ffcache = ffcache[:n]
+}
+
func Flowstart(firstp *obj.Prog, newData func() interface{}) *Graph {
// Count and mark instructions to annotate.
nf := 0
@@ -268,7 +281,9 @@
// Allocate annotations and assign to instructions.
graph := new(Graph)
- ff := make([]Flow, nf)
+
+ growffcache(nf)
+ ff := ffcache
start := &ff[0]
id := 0
var last *Flow
@@ -331,6 +346,10 @@
f.Prog.Info.Flags = 0 // drop cached proginfo
f.Prog.Opt = nil
}
+ clear := ffcache[:graph.Num]
+ for i := range clear {
+ clear[i] = Flow{}
+ }
}
// find looping structure
diff --git a/src/cmd/compile/internal/gc/reflect.go b/src/cmd/compile/internal/gc/reflect.go
index f6dd75e..37c3bc9 100644
--- a/src/cmd/compile/internal/gc/reflect.go
+++ b/src/cmd/compile/internal/gc/reflect.go
@@ -45,7 +45,7 @@
// the given map type. This type is not visible to users -
// we include only enough information to generate a correct GC
// program for it.
-// Make sure this stays in sync with ../../runtime/hashmap.go!
+// Make sure this stays in sync with ../../../../runtime/hashmap.go!
const (
BUCKETSIZE = 8
MAXKEYSIZE = 128
@@ -149,7 +149,7 @@
}
// Builds a type representing a Hmap structure for the given map type.
-// Make sure this stays in sync with ../../runtime/hashmap.go!
+// Make sure this stays in sync with ../../../../runtime/hashmap.go!
func hmap(t *Type) *Type {
if t.Hmap != nil {
return t.Hmap
@@ -186,7 +186,7 @@
}
// build a struct:
- // hash_iter {
+ // hiter {
// key *Key
// val *Value
// t *MapType
@@ -200,7 +200,7 @@
// bucket uintptr
// checkBucket uintptr
// }
- // must match ../../runtime/hashmap.go:hash_iter.
+ // must match ../../../../runtime/hashmap.go:hiter.
var field [12]*Type
field[0] = makefield("key", Ptrto(t.Down))
@@ -473,7 +473,7 @@
}
// uncommonType
-// ../../runtime/type.go:/uncommonType
+// ../../../../runtime/type.go:/uncommonType
func dextratype(sym *Sym, off int, t *Type, ptroff int) int {
m := methods(t)
if t.Sym == nil && len(m) == 0 {
@@ -513,7 +513,7 @@
// methods
for _, a := range m {
// method
- // ../../runtime/type.go:/method
+ // ../../../../runtime/type.go:/method
ot = dgostringptr(s, ot, a.name)
ot = dgopkgpath(s, ot, a.pkg)
@@ -710,21 +710,21 @@
gcsym, useGCProg, ptrdata := dgcsym(t)
- // ../../pkg/reflect/type.go:/^type.commonType
+ // ../../../../reflect/type.go:/^type.rtype
// actual type structure
- // type commonType struct {
+ // type rtype struct {
// size uintptr
- // ptrsize uintptr
+ // ptrdata uintptr
// hash uint32
// _ uint8
// align uint8
// fieldAlign uint8
// kind uint8
- // alg unsafe.Pointer
- // gcdata unsafe.Pointer
+ // alg *typeAlg
+ // gcdata *byte
// string *string
- // *extraType
- // ptrToThis *Type
+ // *uncommonType
+ // ptrToThis *rtype
// }
ot = duintptr(s, ot, uint64(t.Width))
ot = duintptr(s, ot, uint64(ptrdata))
@@ -1010,7 +1010,7 @@
case TARRAY:
if t.Bound >= 0 {
- // ../../runtime/type.go:/ArrayType
+ // ../../../../runtime/type.go:/arrayType
s1 := dtypesym(t.Type)
t2 := typ(TARRAY)
@@ -1023,7 +1023,7 @@
ot = dsymptr(s, ot, s2, 0)
ot = duintptr(s, ot, uint64(t.Bound))
} else {
- // ../../runtime/type.go:/SliceType
+ // ../../../../runtime/type.go:/sliceType
s1 := dtypesym(t.Type)
ot = dcommontype(s, ot, t)
@@ -1031,7 +1031,7 @@
ot = dsymptr(s, ot, s1, 0)
}
- // ../../runtime/type.go:/ChanType
+ // ../../../../runtime/type.go:/chanType
case TCHAN:
s1 := dtypesym(t.Type)
@@ -1090,7 +1090,7 @@
dtypesym(a.type_)
}
- // ../../../runtime/type.go:/InterfaceType
+ // ../../../../runtime/type.go:/interfaceType
ot = dcommontype(s, ot, t)
xt = ot - 2*Widthptr
@@ -1098,14 +1098,14 @@
ot = duintxx(s, ot, uint64(n), Widthint)
ot = duintxx(s, ot, uint64(n), Widthint)
for _, a := range m {
- // ../../../runtime/type.go:/imethod
+ // ../../../../runtime/type.go:/imethod
ot = dgostringptr(s, ot, a.name)
ot = dgopkgpath(s, ot, a.pkg)
ot = dsymptr(s, ot, dtypesym(a.type_), 0)
}
- // ../../../runtime/type.go:/MapType
+ // ../../../../runtime/type.go:/mapType
case TMAP:
s1 := dtypesym(t.Down)
@@ -1140,20 +1140,20 @@
case TPTR32, TPTR64:
if t.Type.Etype == TANY {
- // ../../runtime/type.go:/UnsafePointerType
+ // ../../../../runtime/type.go:/UnsafePointerType
ot = dcommontype(s, ot, t)
break
}
- // ../../runtime/type.go:/PtrType
+ // ../../../../runtime/type.go:/ptrType
s1 := dtypesym(t.Type)
ot = dcommontype(s, ot, t)
xt = ot - 2*Widthptr
ot = dsymptr(s, ot, s1, 0)
- // ../../runtime/type.go:/StructType
+ // ../../../../runtime/type.go:/structType
// for security, only the exported fields.
case TSTRUCT:
n := 0
@@ -1169,7 +1169,7 @@
ot = duintxx(s, ot, uint64(n), Widthint)
ot = duintxx(s, ot, uint64(n), Widthint)
for t1 := t.Type; t1 != nil; t1 = t1.Down {
- // ../../runtime/type.go:/structField
+ // ../../../../runtime/type.go:/structField
if t1.Sym != nil && t1.Embedded == 0 {
ot = dgostringptr(s, ot, t1.Sym.Name)
if exportname(t1.Sym.Name) {
@@ -1349,7 +1349,7 @@
ggloblsym(eqfunc, int32(Widthptr), obj.DUPOK|obj.RODATA)
}
- // ../../runtime/alg.go:/typeAlg
+ // ../../../../runtime/alg.go:/typeAlg
ot := 0
ot = dsymptr(s, ot, hashfunc, 0)
diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go
index 19fda37..13d8663 100644
--- a/src/cmd/compile/internal/gc/ssa.go
+++ b/src/cmd/compile/internal/gc/ssa.go
@@ -561,7 +561,7 @@
case OAS2DOTTYPE:
res, resok := s.dottype(n.Rlist.N, true)
- s.assign(n.List.N, res, false, false, n.Lineno)
+ s.assign(n.List.N, res, needwritebarrier(n.List.N, n.Rlist.N), false, n.Lineno)
s.assign(n.List.Next.N, resok, false, false, n.Lineno)
return
diff --git a/src/cmd/compile/internal/gc/subr.go b/src/cmd/compile/internal/gc/subr.go
index 0d25ddf..b6a2648 100644
--- a/src/cmd/compile/internal/gc/subr.go
+++ b/src/cmd/compile/internal/gc/subr.go
@@ -116,12 +116,6 @@
if strings.HasPrefix(msg, "syntax error") {
nsyntaxerrors++
- // An unexpected EOF caused a syntax error. Use the previous
- // line number since getc generated a fake newline character.
- if curio.eofnl {
- lexlineno = prevlineno
- }
-
// only one syntax error per line
if int32(yyerror_lastsyntax) == lexlineno {
return
@@ -465,6 +459,15 @@
return a
}
+ switch t.Bound {
+ case 0:
+ // We checked above that the element type is comparable.
+ return AMEM
+ case 1:
+ // Single-element array is same as its lone element.
+ return a
+ }
+
return -1 // needs special compare
case TSTRUCT:
@@ -500,28 +503,20 @@
func algtype(t *Type) int {
a := algtype1(t, nil)
- if a == AMEM || a == ANOEQ {
- if Isslice(t) {
- return ASLICE
- }
+ if a == AMEM {
switch t.Width {
case 0:
- return a + AMEM0 - AMEM
-
+ return AMEM0
case 1:
- return a + AMEM8 - AMEM
-
+ return AMEM8
case 2:
- return a + AMEM16 - AMEM
-
+ return AMEM16
case 4:
- return a + AMEM32 - AMEM
-
+ return AMEM32
case 8:
- return a + AMEM64 - AMEM
-
+ return AMEM64
case 16:
- return a + AMEM128 - AMEM
+ return AMEM128
}
}
@@ -2640,17 +2635,13 @@
safemode = old_safemode
}
-// Return node for
-// if p.field != q.field { return false }
+// eqfield returns the node
+// p.field == q.field
func eqfield(p *Node, q *Node, field *Node) *Node {
nx := Nod(OXDOT, p, field)
ny := Nod(OXDOT, q, field)
- nif := Nod(OIF, nil, nil)
- nif.Left = Nod(ONE, nx, ny)
- r := Nod(ORETURN, nil, nil)
- r.List = list(r.List, Nodbool(false))
- nif.Nbody = list(nif.Nbody, r)
- return nif
+ ne := Nod(OEQ, nx, ny)
+ return ne
}
func eqmemfunc(size int64, type_ *Type, needsize *int) *Node {
@@ -2671,8 +2662,8 @@
return fn
}
-// Return node for
-// if !memequal(&p.field, &q.field [, size]) { return false }
+// eqmem returns the node
+// memequal(&p.field, &q.field [, size])
func eqmem(p *Node, q *Node, field *Node, size int64) *Node {
var needsize int
@@ -2690,15 +2681,11 @@
call.List = list(call.List, Nodintconst(size))
}
- nif := Nod(OIF, nil, nil)
- nif.Left = Nod(ONOT, call, nil)
- r := Nod(ORETURN, nil, nil)
- r.List = list(r.List, Nodbool(false))
- nif.Nbody = list(nif.Nbody, r)
- return nif
+ return call
}
-// Generate a helper function to check equality of two values of type t.
+// geneq generates a helper function to
+// check equality of two values of type t.
func geneq(sym *Sym, t *Type) {
if Debug['r'] != 0 {
fmt.Printf("geneq %v %v\n", sym, t)
@@ -2768,12 +2755,18 @@
nrange.Nbody = list(nrange.Nbody, nif)
fn.Nbody = list(fn.Nbody, nrange)
- // Walk the struct using memequal for runs of AMEM
+ // return true
+ ret := Nod(ORETURN, nil, nil)
+ ret.List = list(ret.List, Nodbool(true))
+ fn.Nbody = list(fn.Nbody, ret)
+
+ // Walk the struct using memequal for runs of AMEM
// and calling specific equality tests for the others.
// Skip blank-named fields.
case TSTRUCT:
var first *Type
+ var conjuncts []*Node
offend := int64(0)
var size int64
for t1 := t.Type; ; t1 = t1.Down {
@@ -2796,17 +2789,17 @@
// cross-package unexported fields.
if first != nil {
if first.Down == t1 {
- fn.Nbody = list(fn.Nbody, eqfield(np, nq, newname(first.Sym)))
+ conjuncts = append(conjuncts, eqfield(np, nq, newname(first.Sym)))
} else if first.Down.Down == t1 {
- fn.Nbody = list(fn.Nbody, eqfield(np, nq, newname(first.Sym)))
+ conjuncts = append(conjuncts, eqfield(np, nq, newname(first.Sym)))
first = first.Down
if !isblanksym(first.Sym) {
- fn.Nbody = list(fn.Nbody, eqfield(np, nq, newname(first.Sym)))
+ conjuncts = append(conjuncts, eqfield(np, nq, newname(first.Sym)))
}
} else {
// More than two fields: use memequal.
size = offend - first.Width // first->width is offset
- fn.Nbody = list(fn.Nbody, eqmem(np, nq, newname(first.Sym), size))
+ conjuncts = append(conjuncts, eqmem(np, nq, newname(first.Sym), size))
}
first = nil
@@ -2820,16 +2813,27 @@
}
// Check this field, which is not just memory.
- fn.Nbody = list(fn.Nbody, eqfield(np, nq, newname(t1.Sym)))
+ conjuncts = append(conjuncts, eqfield(np, nq, newname(t1.Sym)))
}
+
+ var and *Node
+ switch len(conjuncts) {
+ case 0:
+ and = Nodbool(true)
+ case 1:
+ and = conjuncts[0]
+ default:
+ and = Nod(OANDAND, conjuncts[0], conjuncts[1])
+ for _, conjunct := range conjuncts[2:] {
+ and = Nod(OANDAND, and, conjunct)
+ }
+ }
+
+ ret := Nod(ORETURN, nil, nil)
+ ret.List = list(ret.List, and)
+ fn.Nbody = list(fn.Nbody, ret)
}
- // return true
- r := Nod(ORETURN, nil, nil)
-
- r.List = list(r.List, Nodbool(true))
- fn.Nbody = list(fn.Nbody, r)
-
if Debug['r'] != 0 {
dumplist("geneq body", fn.Nbody)
}
@@ -2847,10 +2851,18 @@
// for a struct containing a reflect.Value, which itself has
// an unexported field of type unsafe.Pointer.
old_safemode := safemode
-
safemode = 0
+
+ // Disable checknils while compiling this code.
+ // We are comparing a struct or an array,
+ // neither of which can be nil, and our comparisons
+ // are shallow.
+ Disable_checknil++
+
funccompile(fn)
+
safemode = old_safemode
+ Disable_checknil--
}
func ifacelookdot(s *Sym, t *Type, followptr *bool, ignorecase int) *Type {
diff --git a/src/cmd/compile/internal/gc/swt.go b/src/cmd/compile/internal/gc/swt.go
index f0433f3..661b3ee 100644
--- a/src/cmd/compile/internal/gc/swt.go
+++ b/src/cmd/compile/internal/gc/swt.go
@@ -549,20 +549,6 @@
// set up labels and jumps
casebody(sw, s.facename)
- // calculate type hash
- t := cond.Right.Type
- if isnilinter(t) {
- a = syslook("efacethash", 1)
- } else {
- a = syslook("ifacethash", 1)
- }
- substArgTypes(a, t)
- a = Nod(OCALL, a, nil)
- a.List = list1(s.facename)
- a = Nod(OAS, s.hashname, a)
- typecheck(&a, Etop)
- cas = list(cas, a)
-
cc := caseClauses(sw, switchKindType)
sw.List = nil
var def *Node
@@ -572,22 +558,66 @@
} else {
def = Nod(OBREAK, nil, nil)
}
+ var typenil *Node
+ if len(cc) > 0 && cc[0].typ == caseKindTypeNil {
+ typenil = cc[0].node.Right
+ cc = cc[1:]
+ }
+
+ // For empty interfaces, do:
+ // if e._type == nil {
+ // do nil case if it exists, otherwise default
+ // }
+ // h := e._type.hash
+ // Use a similar strategy for non-empty interfaces.
+
+ // Get interface descriptor word.
+ typ := Nod(OITAB, s.facename, nil)
+
+ // Check for nil first.
+ i := Nod(OIF, nil, nil)
+ i.Left = Nod(OEQ, typ, nodnil())
+ if typenil != nil {
+ // Do explicit nil case right here.
+ i.Nbody = list1(typenil)
+ } else {
+ // Jump to default case.
+ lbl := newCaseLabel()
+ i.Nbody = list1(Nod(OGOTO, lbl, nil))
+ // Wrap default case with label.
+ blk := Nod(OBLOCK, nil, nil)
+ blk.List = list(list1(Nod(OLABEL, lbl, nil)), def)
+ def = blk
+ }
+ typecheck(&i.Left, Erv)
+ cas = list(cas, i)
+
+ if !isnilinter(cond.Right.Type) {
+ // Load type from itab.
+ typ = Nod(ODOTPTR, typ, nil)
+ typ.Type = Ptrto(Types[TUINT8])
+ typ.Typecheck = 1
+ typ.Xoffset = int64(Widthptr) // offset of _type in runtime.itab
+ typ.Bounded = true // guaranteed not to fault
+ }
+ // Load hash from type.
+ h := Nod(ODOTPTR, typ, nil)
+ h.Type = Types[TUINT32]
+ h.Typecheck = 1
+ h.Xoffset = int64(2 * Widthptr) // offset of hash in runtime._type
+ h.Bounded = true // guaranteed not to fault
+ a = Nod(OAS, s.hashname, h)
+ typecheck(&a, Etop)
+ cas = list(cas, a)
// insert type equality check into each case block
for _, c := range cc {
n := c.node
switch c.typ {
- case caseKindTypeNil:
- var v Val
- v.U = new(NilVal)
- a = Nod(OIF, nil, nil)
- a.Left = Nod(OEQ, s.facename, nodlit(v))
- typecheck(&a.Left, Erv)
- a.Nbody = list1(n.Right) // if i==nil { goto l }
- n.Right = a
-
case caseKindTypeVar, caseKindTypeConst:
n.Right = s.typeone(n)
+ default:
+ Fatalf("typeSwitch with bad kind: %d", c.typ)
}
}
diff --git a/src/cmd/compile/internal/gc/typecheck.go b/src/cmd/compile/internal/gc/typecheck.go
index f74bb33..8fd6f85 100644
--- a/src/cmd/compile/internal/gc/typecheck.go
+++ b/src/cmd/compile/internal/gc/typecheck.go
@@ -936,7 +936,6 @@
n.Type = n.Right.Type
n.Right = nil
if n.Type == nil {
- n.Type = nil
return
}
}
diff --git a/src/cmd/compile/internal/gc/util.go b/src/cmd/compile/internal/gc/util.go
index 7ed3b39..18e990a 100644
--- a/src/cmd/compile/internal/gc/util.go
+++ b/src/cmd/compile/internal/gc/util.go
@@ -1,3 +1,7 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
package gc
import (
diff --git a/src/cmd/compile/internal/gc/walk.go b/src/cmd/compile/internal/gc/walk.go
index dddcb68..45b85b9 100644
--- a/src/cmd/compile/internal/gc/walk.go
+++ b/src/cmd/compile/internal/gc/walk.go
@@ -3193,6 +3193,21 @@
return
}
+ if t.Etype == TARRAY {
+ // Zero- or single-element array, of any type.
+ switch t.Bound {
+ case 0:
+ finishcompare(np, n, Nodbool(n.Op == OEQ), init)
+ return
+ case 1:
+ l0 := Nod(OINDEX, l, Nodintconst(0))
+ r0 := Nod(OINDEX, r, Nodintconst(0))
+ a := Nod(n.Op, l0, r0)
+ finishcompare(np, n, a, init)
+ return
+ }
+ }
+
if t.Etype == TSTRUCT && countfield(t) <= 4 {
// Struct of four or fewer fields.
// Inline comparisons.