diff options
| author | Matthew Dempsky <mdempsky@google.com> | 2015-10-15 15:59:49 -0700 |
|---|---|---|
| committer | Matthew Dempsky <mdempsky@google.com> | 2015-10-22 17:12:06 +0000 |
| commit | 1652a2c316ff81a8b3c0363db92aaecdf2273540 (patch) | |
| tree | 33dbb0b171d90d4c542af9d87bc70db3646e474c /src/runtime/stack.go | |
| parent | 151f4ec95d26ed702160f61760cc80aa29b6fd3b (diff) | |
| download | go-1652a2c316ff81a8b3c0363db92aaecdf2273540.tar.xz | |
runtime: add mSpanList type to represent lists of mspans
This CL introduces a new mSpanList type to replace the empty mspan
variables that were previously used as list heads.
To be type safe, the previous circular linked list data structure is
now a tail queue instead. One complication of this is
mSpanList_Remove needs to know the list a span is being removed from,
but this appears to be computable in all circumstances.
As a temporary sanity check, mSpanList_Insert and mSpanList_InsertBack
record the list that an mspan has been inserted into so that
mSpanList_Remove can verify that the correct list was specified.
Whereas mspan is 112 bytes on amd64, mSpanList is only 16 bytes. This
shrinks the size of mheap from 50216 bytes to 12584 bytes.
Change-Id: I8146364753dbc3b4ab120afbb9c7b8740653c216
Reviewed-on: https://go-review.googlesource.com/15906
Run-TryBot: Matthew Dempsky <mdempsky@google.com>
Reviewed-by: Austin Clements <austin@google.com>
Diffstat (limited to 'src/runtime/stack.go')
| -rw-r--r-- | src/runtime/stack.go | 22 |
1 files changed, 11 insertions, 11 deletions
diff --git a/src/runtime/stack.go b/src/runtime/stack.go index e5cd09498c..56efc2eb4a 100644 --- a/src/runtime/stack.go +++ b/src/runtime/stack.go @@ -142,12 +142,12 @@ const ( // order = log_2(size/FixedStack) // There is a free list for each order. // TODO: one lock per order? -var stackpool [_NumStackOrders]mspan +var stackpool [_NumStackOrders]mSpanList var stackpoolmu mutex // List of stack spans to be freed at the end of GC. Protected by // stackpoolmu. -var stackFreeQueue mspan +var stackFreeQueue mSpanList // Cached value of haveexperiment("framepointer") var framepointer_enabled bool @@ -166,8 +166,8 @@ func stackinit() { // stackpoolmu held. func stackpoolalloc(order uint8) gclinkptr { list := &stackpool[order] - s := list.next - if s == list { + s := list.first + if s == nil { // no free stacks. Allocate another span worth. s = mHeap_AllocStack(&mheap_, _StackCacheSize>>_PageShift) if s == nil { @@ -194,7 +194,7 @@ func stackpoolalloc(order uint8) gclinkptr { s.ref++ if s.freelist.ptr() == nil { // all stacks in s are allocated. - mSpanList_Remove(s) + mSpanList_Remove(list, s) } return x } @@ -228,7 +228,7 @@ func stackpoolfree(x gclinkptr, order uint8) { // pointer into a free span. // // By not freeing, we prevent step #4 until GC is done. - mSpanList_Remove(s) + mSpanList_Remove(&stackpool[order], s) s.freelist = 0 mHeap_FreeStack(&mheap_, s) } @@ -994,10 +994,10 @@ func freeStackSpans() { // Scan stack pools for empty stack spans. for order := range stackpool { list := &stackpool[order] - for s := list.next; s != list; { + for s := list.first; s != nil; { next := s.next if s.ref == 0 { - mSpanList_Remove(s) + mSpanList_Remove(list, s) s.freelist = 0 mHeap_FreeStack(&mheap_, s) } @@ -1006,9 +1006,9 @@ func freeStackSpans() { } // Free queued stack spans. - for stackFreeQueue.next != &stackFreeQueue { - s := stackFreeQueue.next - mSpanList_Remove(s) + for !mSpanList_IsEmpty(&stackFreeQueue) { + s := stackFreeQueue.first + mSpanList_Remove(&stackFreeQueue, s) mHeap_FreeStack(&mheap_, s) } |
