runtime: make it harder to find collisions in the 64-bit fallback hash

Currently the first argument to mix() can be set by an attacker, as it
is just the input bytes xored by some constants. That lets an attacker
set the value being multipled by to 0. That can lead to lots of
collisions. To fix, xor the first argument with the process-wide seed,
so the magic collision-generating value isn't a constant known to the
attacker.  (Maybe there's a timing attack that could figure out the
process-wide seed, but that's a much harder attack.)

Fixes #66841

Change-Id: I33e073c78355d1cee08660de52074e6ccc38b426
Reviewed-on: https://go-review.googlesource.com/c/go/+/579115
Reviewed-by: M Zhuo <mengzhuo1203@gmail.com>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
Reviewed-by: Keith Randall <khr@google.com>
Reviewed-by: Michael Pratt <mpratt@google.com>
diff --git a/src/runtime/hash64.go b/src/runtime/hash64.go
index 2864a4b..bd16b7c 100644
--- a/src/runtime/hash64.go
+++ b/src/runtime/hash64.go
@@ -50,32 +50,32 @@
 			seed1 := seed
 			seed2 := seed
 			for ; l > 48; l -= 48 {
-				seed = mix(r8(p)^m2, r8(add(p, 8))^seed)
-				seed1 = mix(r8(add(p, 16))^m3, r8(add(p, 24))^seed1)
-				seed2 = mix(r8(add(p, 32))^m4, r8(add(p, 40))^seed2)
+				seed = mix(r8(p)^hashkey[1]^m2, r8(add(p, 8))^seed)
+				seed1 = mix(r8(add(p, 16))^hashkey[2]^m3, r8(add(p, 24))^seed1)
+				seed2 = mix(r8(add(p, 32))^hashkey[3]^m4, r8(add(p, 40))^seed2)
 				p = add(p, 48)
 			}
 			seed ^= seed1 ^ seed2
 		}
 		for ; l > 16; l -= 16 {
-			seed = mix(r8(p)^m2, r8(add(p, 8))^seed)
+			seed = mix(r8(p)^hashkey[1]^m2, r8(add(p, 8))^seed)
 			p = add(p, 16)
 		}
 		a = r8(add(p, l-16))
 		b = r8(add(p, l-8))
 	}
 
-	return mix(m5^s, mix(a^m2, b^seed))
+	return mix(m5^s, mix(a^hashkey[1]^m2, b^seed))
 }
 
 func memhash32Fallback(p unsafe.Pointer, seed uintptr) uintptr {
 	a := r4(p)
-	return mix(m5^4, mix(a^m2, a^seed^hashkey[0]^m1))
+	return mix(m5^4, mix(a^hashkey[1]^m2, a^seed^hashkey[0]^m1))
 }
 
 func memhash64Fallback(p unsafe.Pointer, seed uintptr) uintptr {
 	a := r8(p)
-	return mix(m5^8, mix(a^m2, a^seed^hashkey[0]^m1))
+	return mix(m5^8, mix(a^hashkey[1]^m2, a^seed^hashkey[0]^m1))
 }
 
 func mix(a, b uintptr) uintptr {
diff --git a/src/runtime/hash_test.go b/src/runtime/hash_test.go
index 77f9167..36207e7 100644
--- a/src/runtime/hash_test.go
+++ b/src/runtime/hash_test.go
@@ -5,10 +5,13 @@
 package runtime_test
 
 import (
+	"encoding/binary"
 	"fmt"
 	"internal/race"
+	"internal/testenv"
 	"math"
 	"math/rand"
+	"os"
 	. "runtime"
 	"slices"
 	"strings"
@@ -625,6 +628,29 @@
 	h.check(t)
 }
 
+func TestIssue66841(t *testing.T) {
+	testenv.MustHaveExec(t)
+	if *UseAeshash && os.Getenv("TEST_ISSUE_66841") == "" {
+		// We want to test the backup hash, so if we're running on a machine
+		// that uses aeshash, exec ourselves while turning aes off.
+		cmd := testenv.CleanCmdEnv(testenv.Command(t, os.Args[0], "-test.run=^TestIssue66841$"))
+		cmd.Env = append(cmd.Env, "GODEBUG=cpu.aes=off", "TEST_ISSUE_66841=1")
+		out, err := cmd.CombinedOutput()
+		if err != nil {
+			t.Errorf("%s", string(out))
+		}
+		// Fall through. Might as well run this test when aeshash is on also.
+	}
+	h := newHashSet()
+	var b [16]byte
+	binary.LittleEndian.PutUint64(b[:8], 0xe7037ed1a0b428db) // runtime.m2
+	for i := 0; i < 1000; i++ {
+		binary.LittleEndian.PutUint64(b[8:], uint64(i))
+		h.addB(b[:])
+	}
+	h.check(t)
+}
+
 // size of the hash output (32 or 64 bits)
 const hashSize = 32 + int(^uintptr(0)>>63<<5)