aboutsummaryrefslogtreecommitdiff
path: root/src/runtime/export_test.go
diff options
context:
space:
mode:
Diffstat (limited to 'src/runtime/export_test.go')
-rw-r--r--src/runtime/export_test.go23
1 files changed, 17 insertions, 6 deletions
diff --git a/src/runtime/export_test.go b/src/runtime/export_test.go
index 81542deb59..1f55717f0a 100644
--- a/src/runtime/export_test.go
+++ b/src/runtime/export_test.go
@@ -9,6 +9,7 @@ package runtime
import (
"internal/abi"
"internal/goarch"
+ "internal/goexperiment"
"internal/goos"
"internal/runtime/atomic"
"internal/runtime/gc"
@@ -58,9 +59,6 @@ const CrashStackImplemented = crashStackImplemented
const TracebackInnerFrames = tracebackInnerFrames
const TracebackOuterFrames = tracebackOuterFrames
-var MapKeys = keys
-var MapValues = values
-
var LockPartialOrder = lockPartialOrder
type TimeTimer = timeTimer
@@ -417,7 +415,8 @@ func ReadMemStatsSlow() (base, slow MemStats) {
slow.HeapReleased += uint64(pg) * pageSize
}
for _, p := range allp {
- pg := sys.OnesCount64(p.pcache.scav)
+ // Only count scav bits for pages in the cache
+ pg := sys.OnesCount64(p.pcache.cache & p.pcache.scav)
slow.HeapReleased += uint64(pg) * pageSize
}
@@ -1122,12 +1121,16 @@ func CheckScavengedBitsCleared(mismatches []BitsMismatch) (n int, ok bool) {
// Lock so that we can safely access the bitmap.
lock(&mheap_.lock)
+
+ heapBase := mheap_.pages.inUse.ranges[0].base.addr()
+ secondArenaBase := arenaBase(arenaIndex(heapBase) + 1)
chunkLoop:
for i := mheap_.pages.start; i < mheap_.pages.end; i++ {
chunk := mheap_.pages.tryChunkOf(i)
if chunk == nil {
continue
}
+ cb := chunkBase(i)
for j := 0; j < pallocChunkPages/64; j++ {
// Run over each 64-bit bitmap section and ensure
// scavenged is being cleared properly on allocation.
@@ -1137,12 +1140,20 @@ func CheckScavengedBitsCleared(mismatches []BitsMismatch) (n int, ok bool) {
want := chunk.scavenged[j] &^ chunk.pallocBits[j]
got := chunk.scavenged[j]
if want != got {
+ // When goexperiment.RandomizedHeapBase64 is set we use a
+ // series of padding pages to generate randomized heap base
+ // address which have both the alloc and scav bits set. If
+ // we see this for a chunk between the address of the heap
+ // base, and the address of the second arena continue.
+ if goexperiment.RandomizedHeapBase64 && (cb >= heapBase && cb < secondArenaBase) {
+ continue
+ }
ok = false
if n >= len(mismatches) {
break chunkLoop
}
mismatches[n] = BitsMismatch{
- Base: chunkBase(i) + uintptr(j)*64*pageSize,
+ Base: cb + uintptr(j)*64*pageSize,
Got: got,
Want: want,
}
@@ -1761,7 +1772,7 @@ func NewUserArena() *UserArena {
func (a *UserArena) New(out *any) {
i := efaceOf(out)
typ := i._type
- if typ.Kind_&abi.KindMask != abi.Pointer {
+ if typ.Kind() != abi.Pointer {
panic("new result of non-ptr type")
}
typ = (*ptrtype)(unsafe.Pointer(typ)).Elem