aboutsummaryrefslogtreecommitdiff
path: root/src/pkg/runtime/malloc.goc
diff options
context:
space:
mode:
authorRuss Cox <rsc@golang.org>2013-02-15 14:27:03 -0500
committerRuss Cox <rsc@golang.org>2013-02-15 14:27:03 -0500
commit8a6ff3ab3469ea6b448d682ac7ebc3b818208634 (patch)
treebe0d4d94d611d072d7e2c615a6096b820e532dbf /src/pkg/runtime/malloc.goc
parentf87b7f67b232db252a527dbc000533a27ccb8cd2 (diff)
downloadgo-8a6ff3ab3469ea6b448d682ac7ebc3b818208634.tar.xz
runtime: allocate heap metadata at run time
Before, the mheap structure was in the bss, but it's quite large (today, 256 MB, much of which is never actually paged in), and it makes Go binaries run afoul of exec-time bss size limits on some BSD systems. Fixes #4447. R=golang-dev, dave, minux.ma, remyoudompheng, iant CC=golang-dev https://golang.org/cl/7307122
Diffstat (limited to 'src/pkg/runtime/malloc.goc')
-rw-r--r--src/pkg/runtime/malloc.goc54
1 files changed, 28 insertions, 26 deletions
diff --git a/src/pkg/runtime/malloc.goc b/src/pkg/runtime/malloc.goc
index 09367ec174..b5849766c2 100644
--- a/src/pkg/runtime/malloc.goc
+++ b/src/pkg/runtime/malloc.goc
@@ -14,8 +14,7 @@ package runtime
#include "typekind.h"
#include "race.h"
-#pragma dataflag 16 /* mark mheap as 'no pointers', hiding from garbage collector */
-MHeap runtime·mheap;
+MHeap *runtime·mheap;
int32 runtime·checking;
@@ -66,7 +65,7 @@ runtime·mallocgc(uintptr size, uint32 flag, int32 dogc, int32 zeroed)
npages = size >> PageShift;
if((size & PageMask) != 0)
npages++;
- s = runtime·MHeap_Alloc(&runtime·mheap, npages, 0, 1, zeroed);
+ s = runtime·MHeap_Alloc(runtime·mheap, npages, 0, 1, zeroed);
if(s == nil)
runtime·throw("out of memory");
size = npages<<PageShift;
@@ -80,9 +79,9 @@ runtime·mallocgc(uintptr size, uint32 flag, int32 dogc, int32 zeroed)
if (sizeof(void*) == 4 && c->local_total_alloc >= (1<<30)) {
// purge cache stats to prevent overflow
- runtime·lock(&runtime·mheap);
+ runtime·lock(runtime·mheap);
runtime·purgecachedstats(c);
- runtime·unlock(&runtime·mheap);
+ runtime·unlock(runtime·mheap);
}
if(!(flag & FlagNoGC))
@@ -166,7 +165,7 @@ runtime·free(void *v)
// they might coalesce v into other spans and change the bitmap further.
runtime·markfreed(v, size);
runtime·unmarkspan(v, 1<<PageShift);
- runtime·MHeap_Free(&runtime·mheap, s, 1);
+ runtime·MHeap_Free(runtime·mheap, s, 1);
} else {
// Small object.
size = runtime·class_to_size[sizeclass];
@@ -196,12 +195,12 @@ runtime·mlookup(void *v, byte **base, uintptr *size, MSpan **sp)
m->mcache->local_nlookup++;
if (sizeof(void*) == 4 && m->mcache->local_nlookup >= (1<<30)) {
// purge cache stats to prevent overflow
- runtime·lock(&runtime·mheap);
+ runtime·lock(runtime·mheap);
runtime·purgecachedstats(m->mcache);
- runtime·unlock(&runtime·mheap);
+ runtime·unlock(runtime·mheap);
}
- s = runtime·MHeap_LookupMaybe(&runtime·mheap, v);
+ s = runtime·MHeap_LookupMaybe(runtime·mheap, v);
if(sp)
*sp = s;
if(s == nil) {
@@ -245,11 +244,11 @@ runtime·allocmcache(void)
intgo rate;
MCache *c;
- runtime·lock(&runtime·mheap);
- c = runtime·FixAlloc_Alloc(&runtime·mheap.cachealloc);
- mstats.mcache_inuse = runtime·mheap.cachealloc.inuse;
- mstats.mcache_sys = runtime·mheap.cachealloc.sys;
- runtime·unlock(&runtime·mheap);
+ runtime·lock(runtime·mheap);
+ c = runtime·FixAlloc_Alloc(&runtime·mheap->cachealloc);
+ mstats.mcache_inuse = runtime·mheap->cachealloc.inuse;
+ mstats.mcache_sys = runtime·mheap->cachealloc.sys;
+ runtime·unlock(runtime·mheap);
runtime·memclr((byte*)c, sizeof(*c));
// Set first allocation sample size.
@@ -266,10 +265,10 @@ void
runtime·freemcache(MCache *c)
{
runtime·MCache_ReleaseAll(c);
- runtime·lock(&runtime·mheap);
+ runtime·lock(runtime·mheap);
runtime·purgecachedstats(c);
- runtime·FixAlloc_Free(&runtime·mheap.cachealloc, c);
- runtime·unlock(&runtime·mheap);
+ runtime·FixAlloc_Free(&runtime·mheap->cachealloc, c);
+ runtime·unlock(runtime·mheap);
}
void
@@ -314,6 +313,9 @@ runtime·mallocinit(void)
USED(arena_size);
USED(bitmap_size);
+ if((runtime·mheap = runtime·SysAlloc(sizeof(*runtime·mheap))) == nil)
+ runtime·throw("runtime: cannot allocate heap metadata");
+
runtime·InitSizes();
limit = runtime·memlimit();
@@ -392,13 +394,13 @@ runtime·mallocinit(void)
if((uintptr)p & (((uintptr)1<<PageShift)-1))
runtime·throw("runtime: SysReserve returned unaligned address");
- runtime·mheap.bitmap = p;
- runtime·mheap.arena_start = p + bitmap_size;
- runtime·mheap.arena_used = runtime·mheap.arena_start;
- runtime·mheap.arena_end = runtime·mheap.arena_start + arena_size;
+ runtime·mheap->bitmap = p;
+ runtime·mheap->arena_start = p + bitmap_size;
+ runtime·mheap->arena_used = runtime·mheap->arena_start;
+ runtime·mheap->arena_end = runtime·mheap->arena_start + arena_size;
// Initialize the rest of the allocator.
- runtime·MHeap_Init(&runtime·mheap, runtime·SysAlloc);
+ runtime·MHeap_Init(runtime·mheap, runtime·SysAlloc);
m->mcache = runtime·allocmcache();
// See if it works.
@@ -496,8 +498,8 @@ runtime·settype_flush(M *mp, bool sysalloc)
// (Manually inlined copy of runtime·MHeap_Lookup)
p = (uintptr)v>>PageShift;
if(sizeof(void*) == 8)
- p -= (uintptr)runtime·mheap.arena_start >> PageShift;
- s = runtime·mheap.map[p];
+ p -= (uintptr)runtime·mheap->arena_start >> PageShift;
+ s = runtime·mheap->map[p];
if(s->sizeclass == 0) {
s->types.compression = MTypes_Single;
@@ -610,7 +612,7 @@ runtime·settype(void *v, uintptr t)
}
if(DebugTypeAtBlockEnd) {
- s = runtime·MHeap_Lookup(&runtime·mheap, v);
+ s = runtime·MHeap_Lookup(runtime·mheap, v);
*(uintptr*)((uintptr)v+s->elemsize-sizeof(uintptr)) = t;
}
}
@@ -649,7 +651,7 @@ runtime·gettype(void *v)
uintptr t, ofs;
byte *data;
- s = runtime·MHeap_LookupMaybe(&runtime·mheap, v);
+ s = runtime·MHeap_LookupMaybe(runtime·mheap, v);
if(s != nil) {
t = 0;
switch(s->types.compression) {