aboutsummaryrefslogtreecommitdiff
path: root/src/runtime/malloc_generated.go
diff options
context:
space:
mode:
authorthepudds <thepudds1460@gmail.com>2025-11-09 09:24:22 -0500
committert hepudds <thepudds1460@gmail.com>2025-11-14 14:23:16 -0800
commit50128a21541e3fd712ad717a223aaa109cb86d43 (patch)
tree984e45288bdebd7dd2533a8ab62c4154cec244cf /src/runtime/malloc_generated.go
parentc3708350a417a3149bf9191878c3ad945063d439 (diff)
downloadgo-50128a21541e3fd712ad717a223aaa109cb86d43.tar.xz
runtime: support runtime.freegc in size-specialized mallocs for noscan objects
This CL is part of a set of CLs that attempt to reduce how much work the GC must do. See the design in https://go.dev/design/74299-runtime-freegc This CL updates the smallNoScanStub stub in malloc_stubs.go to reuse heap objects that have been freed by runtime.freegc calls, and generates the corresponding size-specialized code in malloc_generated.go. This CL only adds support in the specialized mallocs for noscan heap objects (objects without pointers). A later CL handles objects with pointers. While we are here, we leave a couple of breadcrumbs in mkmalloc.go on how to do the generation. Updates #74299 Change-Id: I2657622601a27211554ee862fce057e101767a70 Reviewed-on: https://go-review.googlesource.com/c/go/+/715761 Reviewed-by: Junyang Shao <shaojunyang@google.com> LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com> Reviewed-by: Michael Knyszek <mknyszek@google.com>
Diffstat (limited to 'src/runtime/malloc_generated.go')
-rw-r--r--src/runtime/malloc_generated.go651
1 files changed, 651 insertions, 0 deletions
diff --git a/src/runtime/malloc_generated.go b/src/runtime/malloc_generated.go
index 2215dbaddb..5abb61257a 100644
--- a/src/runtime/malloc_generated.go
+++ b/src/runtime/malloc_generated.go
@@ -1,4 +1,5 @@
// Code generated by mkmalloc.go; DO NOT EDIT.
+// See overview in malloc_stubs.go.
package runtime
@@ -6400,6 +6401,32 @@ func mallocgcSmallNoScanSC2(size uintptr, typ *_type, needzero bool) unsafe.Poin
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -6497,6 +6524,32 @@ func mallocgcSmallNoScanSC3(size uintptr, typ *_type, needzero bool) unsafe.Poin
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -6594,6 +6647,32 @@ func mallocgcSmallNoScanSC4(size uintptr, typ *_type, needzero bool) unsafe.Poin
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -6691,6 +6770,32 @@ func mallocgcSmallNoScanSC5(size uintptr, typ *_type, needzero bool) unsafe.Poin
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -6788,6 +6893,32 @@ func mallocgcSmallNoScanSC6(size uintptr, typ *_type, needzero bool) unsafe.Poin
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -6885,6 +7016,32 @@ func mallocgcSmallNoScanSC7(size uintptr, typ *_type, needzero bool) unsafe.Poin
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -6982,6 +7139,32 @@ func mallocgcSmallNoScanSC8(size uintptr, typ *_type, needzero bool) unsafe.Poin
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7079,6 +7262,32 @@ func mallocgcSmallNoScanSC9(size uintptr, typ *_type, needzero bool) unsafe.Poin
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7176,6 +7385,32 @@ func mallocgcSmallNoScanSC10(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7273,6 +7508,32 @@ func mallocgcSmallNoScanSC11(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7370,6 +7631,32 @@ func mallocgcSmallNoScanSC12(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7467,6 +7754,32 @@ func mallocgcSmallNoScanSC13(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7564,6 +7877,32 @@ func mallocgcSmallNoScanSC14(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7661,6 +8000,32 @@ func mallocgcSmallNoScanSC15(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7758,6 +8123,32 @@ func mallocgcSmallNoScanSC16(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7855,6 +8246,32 @@ func mallocgcSmallNoScanSC17(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -7952,6 +8369,32 @@ func mallocgcSmallNoScanSC18(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -8049,6 +8492,32 @@ func mallocgcSmallNoScanSC19(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -8146,6 +8615,32 @@ func mallocgcSmallNoScanSC20(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -8243,6 +8738,32 @@ func mallocgcSmallNoScanSC21(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -8340,6 +8861,32 @@ func mallocgcSmallNoScanSC22(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -8437,6 +8984,32 @@ func mallocgcSmallNoScanSC23(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -8534,6 +9107,32 @@ func mallocgcSmallNoScanSC24(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -8631,6 +9230,32 @@ func mallocgcSmallNoScanSC25(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)
@@ -8728,6 +9353,32 @@ func mallocgcSmallNoScanSC26(size uintptr, typ *_type, needzero bool) unsafe.Poi
const spc = spanClass(sizeclass<<1) | spanClass(1)
span := c.alloc[spc]
+ if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+ v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+ mp.mallocing = 0
+ releasem(mp)
+ x := v
+ {
+
+ if valgrindenabled {
+ valgrindMalloc(x, size)
+ }
+
+ if gcBlackenEnabled != 0 && elemsize != 0 {
+ if assistG := getg().m.curg; assistG != nil {
+ assistG.gcAssistBytes -= int64(elemsize - size)
+ }
+ }
+
+ if debug.malloc {
+ postMallocgcDebug(x, elemsize, typ)
+ }
+ return x
+ }
+
+ }
+
var nextFreeFastResult gclinkptr
if span.allocCache != 0 {
theBit := sys.TrailingZeros64(span.allocCache)