1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
7 // See malloc.h for an overview.
9 // The MCentral doesn't actually contain the list of free objects; the MSpan does.
10 // Each MCentral is two lists of MSpans: those with free objects (c->nonempty)
11 // and those that are completely allocated (c->mempty).
13 // TODO(rsc): tcmalloc uses a "transfer cache" to split the list
14 // into sections of class_to_transfercount[sizeclass] objects
15 // so that it is faster to move those lists between MCaches and MCentrals.
21 static bool MCentral_Grow(MCentral
*c
);
22 static void MCentral_Free(MCentral
*c
, MLink
*v
);
23 static void MCentral_ReturnToHeap(MCentral
*c
, MSpan
*s
);
25 // Initialize a single central free list.
27 runtime_MCentral_Init(MCentral
*c
, int32 sizeclass
)
29 c
->sizeclass
= sizeclass
;
30 runtime_MSpanList_Init(&c
->nonempty
);
31 runtime_MSpanList_Init(&c
->mempty
);
34 // Allocate a span to use in an MCache.
36 runtime_MCentral_CacheSpan(MCentral
*c
)
43 sg
= runtime_mheap
.sweepgen
;
45 for(s
= c
->nonempty
.next
; s
!= &c
->nonempty
; s
= s
->next
) {
46 if(s
->sweepgen
== sg
-2 && runtime_cas(&s
->sweepgen
, sg
-2, sg
-1)) {
48 runtime_MSpan_Sweep(s
);
50 // the span could have been moved to heap, retry
53 if(s
->sweepgen
== sg
-1) {
54 // the span is being swept by background sweeper, skip
57 // we have a nonempty span that does not require sweeping, allocate from it
61 for(s
= c
->mempty
.next
; s
!= &c
->mempty
; s
= s
->next
) {
62 if(s
->sweepgen
== sg
-2 && runtime_cas(&s
->sweepgen
, sg
-2, sg
-1)) {
63 // we have an empty span that requires sweeping,
64 // sweep it and see if we can free some space in it
65 runtime_MSpanList_Remove(s
);
66 // swept spans are at the end of the list
67 runtime_MSpanList_InsertBack(&c
->mempty
, s
);
69 runtime_MSpan_Sweep(s
);
71 // the span could be moved to nonempty or heap, retry
74 if(s
->sweepgen
== sg
-1) {
75 // the span is being swept by background sweeper, skip
78 // already swept empty span,
79 // all subsequent ones must also be either swept or in process of sweeping
83 // Replenish central list if empty.
84 if(!MCentral_Grow(c
)) {
91 cap
= (s
->npages
<< PageShift
) / s
->elemsize
;
94 runtime_throw("empty span");
95 if(s
->freelist
== nil
)
96 runtime_throw("freelist empty");
98 runtime_MSpanList_Remove(s
);
99 runtime_MSpanList_InsertBack(&c
->mempty
, s
);
105 // Return span from an MCache.
107 runtime_MCentral_UncacheSpan(MCentral
*c
, MSpan
*s
)
116 // Move any explicitly freed items from the freebuf to the freelist.
117 while((v
= s
->freebuf
) != nil
) {
118 s
->freebuf
= v
->next
;
119 runtime_markfreed(v
);
120 v
->next
= s
->freelist
;
126 // Free back to heap. Unlikely, but possible.
127 MCentral_ReturnToHeap(c
, s
); // unlocks c
131 cap
= (s
->npages
<< PageShift
) / s
->elemsize
;
135 runtime_MSpanList_Remove(s
);
136 runtime_MSpanList_Insert(&c
->nonempty
, s
);
141 // Free the list of objects back into the central free list c.
142 // Called from runtime_free.
144 runtime_MCentral_FreeList(MCentral
*c
, MLink
*start
)
149 for(; start
!= nil
; start
= next
) {
151 MCentral_Free(c
, start
);
156 // Helper: free one object back into the central free list.
157 // Caller must hold lock on c on entry. Holds lock on exit.
159 MCentral_Free(MCentral
*c
, MLink
*v
)
164 s
= runtime_MHeap_Lookup(&runtime_mheap
, v
);
165 if(s
== nil
|| s
->ref
== 0)
166 runtime_throw("invalid free");
167 if(s
->sweepgen
!= runtime_mheap
.sweepgen
)
168 runtime_throw("free into unswept span");
170 // If the span is currently being used unsynchronized by an MCache,
171 // we can't modify the freelist. Add to the freebuf instead. The
172 // items will get moved to the freelist when the span is returned
175 v
->next
= s
->freebuf
;
180 // Move span to nonempty if necessary.
181 if(s
->freelist
== nil
) {
182 runtime_MSpanList_Remove(s
);
183 runtime_MSpanList_Insert(&c
->nonempty
, s
);
186 // Add the object to span's free list.
187 runtime_markfreed(v
);
188 v
->next
= s
->freelist
;
193 // If s is completely freed, return it to the heap.
195 MCentral_ReturnToHeap(c
, s
); // unlocks c
200 // Free n objects from a span s back into the central free list c.
201 // Called during sweep.
202 // Returns true if the span was returned to heap. Sets sweepgen to
203 // the latest generation.
205 runtime_MCentral_FreeSpan(MCentral
*c
, MSpan
*s
, int32 n
, MLink
*start
, MLink
*end
)
208 runtime_throw("freespan into cached span");
211 // Move to nonempty if necessary.
212 if(s
->freelist
== nil
) {
213 runtime_MSpanList_Remove(s
);
214 runtime_MSpanList_Insert(&c
->nonempty
, s
);
217 // Add the objects back to s's free list.
218 end
->next
= s
->freelist
;
223 // delay updating sweepgen until here. This is the signal that
224 // the span may be used in an MCache, so it must come after the
225 // linked list operations above (actually, just after the
227 runtime_atomicstore(&s
->sweepgen
, runtime_mheap
.sweepgen
);
234 // s is completely freed, return it to the heap.
235 MCentral_ReturnToHeap(c
, s
); // unlocks c
240 runtime_MGetSizeClassInfo(int32 sizeclass
, uintptr
*sizep
, int32
*npagesp
, int32
*nobj
)
245 npages
= runtime_class_to_allocnpages
[sizeclass
];
246 size
= runtime_class_to_size
[sizeclass
];
249 *nobj
= (npages
<< PageShift
) / size
;
252 // Fetch a new span from the heap and
253 // carve into objects for the free list.
255 MCentral_Grow(MCentral
*c
)
264 runtime_MGetSizeClassInfo(c
->sizeclass
, &size
, &npages
, &n
);
265 s
= runtime_MHeap_Alloc(&runtime_mheap
, npages
, c
->sizeclass
, 0, 1);
267 // TODO(rsc): Log out of memory
272 // Carve span into sequence of blocks.
273 tailp
= &s
->freelist
;
274 p
= (byte
*)(s
->start
<< PageShift
);
275 s
->limit
= (uintptr
)(p
+ size
*n
);
283 runtime_markspan((byte
*)(s
->start
<<PageShift
), size
, n
, size
*n
< (s
->npages
<<PageShift
));
287 runtime_MSpanList_Insert(&c
->nonempty
, s
);
291 // Return s to the heap. s must be unused (s->ref == 0). Unlocks c.
293 MCentral_ReturnToHeap(MCentral
*c
, MSpan
*s
)
297 size
= runtime_class_to_size
[c
->sizeclass
];
298 runtime_MSpanList_Remove(s
);
302 runtime_throw("ref wrong");
303 c
->nfree
-= (s
->npages
<< PageShift
) / size
;
305 runtime_unmarkspan((byte
*)(s
->start
<<PageShift
), s
->npages
<<PageShift
);
306 runtime_MHeap_Free(&runtime_mheap
, s
, 0);