aboutsummaryrefslogtreecommitdiff
path: root/src/runtime/malloc.go
diff options
context:
space:
mode:
Diffstat (limited to 'src/runtime/malloc.go')
-rw-r--r--src/runtime/malloc.go40
1 files changed, 23 insertions, 17 deletions
diff --git a/src/runtime/malloc.go b/src/runtime/malloc.go
index 70a13d0576..c7335c55c6 100644
--- a/src/runtime/malloc.go
+++ b/src/runtime/malloc.go
@@ -813,24 +813,22 @@ retry:
// base address for all 0-byte allocations
var zerobase uintptr
-// nextFreeFast returns the next free object if one is quickly available.
-// Otherwise it returns 0.
-func nextFreeFast(s *mspan) gclinkptr {
+// nextFreeFast returns the next free object if one is quickly available,
+// and the corresponding free index. Otherwise it returns 0, 0.
+func nextFreeFast(s *mspan) (gclinkptr, uintptr) {
theBit := sys.TrailingZeros64(s.allocCache) // Is there a free object in the allocCache?
if theBit < 64 {
result := s.freeindex + uintptr(theBit)
if result < s.nelems {
- freeidx := result + 1
- if freeidx%64 == 0 && freeidx != s.nelems {
- return 0
- }
s.allocCache >>= uint(theBit + 1)
- s.freeindex = freeidx
+ // NOTE: s.freeindex is not updated for now (although allocCache
+ // is updated). mallocgc will update s.freeindex later after the
+ // memory is initialized.
s.allocCount++
- return gclinkptr(result*s.elemsize + s.base())
+ return gclinkptr(result*s.elemsize + s.base()), result
}
}
- return 0
+ return 0, 0
}
// nextFree returns the next free object from the cached span if one is available.
@@ -842,10 +840,10 @@ func nextFreeFast(s *mspan) gclinkptr {
//
// Must run in a non-preemptible context since otherwise the owner of
// c could change.
-func (c *mcache) nextFree(spc spanClass) (v gclinkptr, s *mspan, shouldhelpgc bool) {
+func (c *mcache) nextFree(spc spanClass) (v gclinkptr, s *mspan, freeIndex uintptr, shouldhelpgc bool) {
s = c.alloc[spc]
shouldhelpgc = false
- freeIndex := s.nextFreeIndex()
+ freeIndex = s.nextFreeIndex()
if freeIndex == s.nelems {
// The span is full.
if uintptr(s.allocCount) != s.nelems {
@@ -953,6 +951,7 @@ func mallocgc(size uintptr, typ *_type, needzero bool) unsafe.Pointer {
// In some cases block zeroing can profitably (for latency reduction purposes)
// be delayed till preemption is possible; delayedZeroing tracks that state.
delayedZeroing := false
+ var freeidx uintptr
if size <= maxSmallSize {
if noscan && size < maxTinySize {
// Tiny allocator.
@@ -1012,9 +1011,10 @@ func mallocgc(size uintptr, typ *_type, needzero bool) unsafe.Pointer {
}
// Allocate a new maxTinySize block.
span = c.alloc[tinySpanClass]
- v := nextFreeFast(span)
+ var v gclinkptr
+ v, freeidx = nextFreeFast(span)
if v == 0 {
- v, span, shouldhelpgc = c.nextFree(tinySpanClass)
+ v, span, freeidx, shouldhelpgc = c.nextFree(tinySpanClass)
}
x = unsafe.Pointer(v)
(*[2]uint64)(x)[0] = 0
@@ -1037,9 +1037,10 @@ func mallocgc(size uintptr, typ *_type, needzero bool) unsafe.Pointer {
size = uintptr(class_to_size[sizeclass])
spc := makeSpanClass(sizeclass, noscan)
span = c.alloc[spc]
- v := nextFreeFast(span)
+ var v gclinkptr
+ v, freeidx = nextFreeFast(span)
if v == 0 {
- v, span, shouldhelpgc = c.nextFree(spc)
+ v, span, freeidx, shouldhelpgc = c.nextFree(spc)
}
x = unsafe.Pointer(v)
if needzero && span.needzero != 0 {
@@ -1051,7 +1052,7 @@ func mallocgc(size uintptr, typ *_type, needzero bool) unsafe.Pointer {
// For large allocations, keep track of zeroed state so that
// bulk zeroing can be happen later in a preemptible context.
span = c.allocLarge(size, noscan)
- span.freeindex = 1
+ freeidx = 0
span.allocCount = 1
size = span.elemsize
x = unsafe.Pointer(span.base())
@@ -1093,6 +1094,11 @@ func mallocgc(size uintptr, typ *_type, needzero bool) unsafe.Pointer {
// but see uninitialized memory or stale heap bits.
publicationBarrier()
+ // As x and the heap bits are initialized, update
+ // freeindx now so x is seen by the GC (including
+ // convervative scan) as an allocated object.
+ span.updateFreeIndex(freeidx + 1)
+
// Allocate black during GC.
// All slots hold nil so no scanning is needed.
// This may be racing with GC so do it atomically if there can be