LEFT | RIGHT |
1 // Copyright 2009 The Go Authors. All rights reserved. | 1 // Copyright 2009 The Go Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style | 2 // Use of this source code is governed by a BSD-style |
3 // license that can be found in the LICENSE file. | 3 // license that can be found in the LICENSE file. |
4 | 4 |
5 // Memory allocator, based on tcmalloc. | 5 // Memory allocator, based on tcmalloc. |
6 // http://goog-perftools.sourceforge.net/doc/tcmalloc.html | 6 // http://goog-perftools.sourceforge.net/doc/tcmalloc.html |
7 | 7 |
8 // The main allocator works in runs of pages. | 8 // The main allocator works in runs of pages. |
9 // Small allocation sizes (up to and including 32 kB) are | 9 // Small allocation sizes (up to and including 32 kB) are |
10 // rounded to one of about 100 size classes, each of which | 10 // rounded to one of about 100 size classes, each of which |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
116 // On 64-bit, we limit the arena to 16G, so 22 bits suffices. | 116 // On 64-bit, we limit the arena to 16G, so 22 bits suffices. |
117 // On 32-bit, we don't bother limiting anything: 20 bits for 4G. | 117 // On 32-bit, we don't bother limiting anything: 20 bits for 4G. |
118 #ifdef _64BIT | 118 #ifdef _64BIT |
119 MHeapMap_Bits = 22, | 119 MHeapMap_Bits = 22, |
120 #else | 120 #else |
121 MHeapMap_Bits = 20, | 121 MHeapMap_Bits = 20, |
122 #endif | 122 #endif |
123 | 123 |
124 // Max number of threads to run garbage collection. | 124 // Max number of threads to run garbage collection. |
125 // 2, 3, and 4 are all plausible maximums depending | 125 // 2, 3, and 4 are all plausible maximums depending |
126 » // on the hardware details of the machine. The second | 126 » // on the hardware details of the machine. The garbage |
127 » // proc is the one that helps the most (after the first), | 127 » // collector scales well to 4 cpus. |
128 » // so start with just 2 for now. | 128 » MaxGcproc = 4, |
129 » MaxGcproc = 2, | |
130 }; | 129 }; |
131 | 130 |
132 // A generic linked list of blocks. (Typically the block is bigger than sizeof(
MLink).) | 131 // A generic linked list of blocks. (Typically the block is bigger than sizeof(
MLink).) |
133 struct MLink | 132 struct MLink |
134 { | 133 { |
135 MLink *next; | 134 MLink *next; |
136 }; | 135 }; |
137 | 136 |
138 // SysAlloc obtains a large chunk of zeroed memory from the | 137 // SysAlloc obtains a large chunk of zeroed memory from the |
139 // operating system, typically on the order of a hundred kilobytes | 138 // operating system, typically on the order of a hundred kilobytes |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
199 uint64 nlookup; // number of pointer lookups | 198 uint64 nlookup; // number of pointer lookups |
200 uint64 nmalloc; // number of mallocs | 199 uint64 nmalloc; // number of mallocs |
201 uint64 nfree; // number of frees | 200 uint64 nfree; // number of frees |
202 | 201 |
203 // Statistics about malloc heap. | 202 // Statistics about malloc heap. |
204 // protected by mheap.Lock | 203 // protected by mheap.Lock |
205 uint64 heap_alloc; // bytes allocated and still in use | 204 uint64 heap_alloc; // bytes allocated and still in use |
206 uint64 heap_sys; // bytes obtained from system | 205 uint64 heap_sys; // bytes obtained from system |
207 uint64 heap_idle; // bytes in idle spans | 206 uint64 heap_idle; // bytes in idle spans |
208 uint64 heap_inuse; // bytes in non-idle spans | 207 uint64 heap_inuse; // bytes in non-idle spans |
| 208 uint64 heap_released; // bytes released to the OS |
209 uint64 heap_objects; // total number of allocated objects | 209 uint64 heap_objects; // total number of allocated objects |
210 | 210 |
211 // Statistics about allocation of low-level fixed-size structures. | 211 // Statistics about allocation of low-level fixed-size structures. |
212 // Protected by FixAlloc locks. | 212 // Protected by FixAlloc locks. |
213 uint64 stacks_inuse; // bootstrap stacks | 213 uint64 stacks_inuse; // bootstrap stacks |
214 uint64 stacks_sys; | 214 uint64 stacks_sys; |
215 uint64 mspan_inuse; // MSpan structures | 215 uint64 mspan_inuse; // MSpan structures |
216 uint64 mspan_sys; | 216 uint64 mspan_sys; |
217 uint64 mcache_inuse; // MCache structures | 217 uint64 mcache_inuse; // MCache structures |
218 uint64 mcache_sys; | 218 uint64 mcache_sys; |
219 uint64 buckhash_sys; // profiling bucket hash table | 219 uint64 buckhash_sys; // profiling bucket hash table |
220 | 220 |
221 // Statistics about garbage collector. | 221 // Statistics about garbage collector. |
222 // Protected by stopping the world during GC. | 222 // Protected by stopping the world during GC. |
223 uint64 next_gc; // next GC (in heap_alloc time) | 223 uint64 next_gc; // next GC (in heap_alloc time) |
| 224 uint64 last_gc; // last GC (in absolute time) |
224 uint64 pause_total_ns; | 225 uint64 pause_total_ns; |
225 uint64 pause_ns[256]; | 226 uint64 pause_ns[256]; |
226 uint32 numgc; | 227 uint32 numgc; |
227 bool enablegc; | 228 bool enablegc; |
228 bool debuggc; | 229 bool debuggc; |
229 | 230 |
230 // Statistics about allocation size classes. | 231 // Statistics about allocation size classes. |
231 struct { | 232 struct { |
232 uint32 size; | 233 uint32 size; |
233 uint64 nmalloc; | 234 uint64 nmalloc; |
234 uint64 nfree; | 235 uint64 nfree; |
235 } by_size[NumSizeClasses]; | 236 } by_size[NumSizeClasses]; |
236 }; | 237 }; |
237 | 238 |
238 #define mstats runtime·MemStats»/* name shared with Go */ | 239 #define mstats runtime·memStats»/* name shared with Go */ |
239 extern MStats mstats; | 240 extern MStats mstats; |
240 | 241 |
241 | 242 |
242 // Size classes. Computed and initialized by InitSizes. | 243 // Size classes. Computed and initialized by InitSizes. |
243 // | 244 // |
244 // SizeToClass(0 <= n <= MaxSmallSize) returns the size class, | 245 // SizeToClass(0 <= n <= MaxSmallSize) returns the size class, |
245 // 1 <= sizeclass < NumSizeClasses, for n. | 246 // 1 <= sizeclass < NumSizeClasses, for n. |
246 // Size class 0 is reserved to mean "not small". | 247 // Size class 0 is reserved to mean "not small". |
247 // | 248 // |
248 // class_to_size[i] = largest size in class i | 249 // class_to_size[i] = largest size in class i |
(...skipping 24 matching lines...) Expand all Loading... |
273 { | 274 { |
274 MCacheList list[NumSizeClasses]; | 275 MCacheList list[NumSizeClasses]; |
275 uint64 size; | 276 uint64 size; |
276 int64 local_cachealloc; // bytes allocated (or freed) from cache since l
ast lock of heap | 277 int64 local_cachealloc; // bytes allocated (or freed) from cache since l
ast lock of heap |
277 int64 local_objects; // objects allocated (or freed) from cache since
last lock of heap | 278 int64 local_objects; // objects allocated (or freed) from cache since
last lock of heap |
278 int64 local_alloc; // bytes allocated (or freed) since last lock of
heap | 279 int64 local_alloc; // bytes allocated (or freed) since last lock of
heap |
279 int64 local_total_alloc; // bytes allocated (even if freed) since
last lock of heap | 280 int64 local_total_alloc; // bytes allocated (even if freed) since
last lock of heap |
280 int64 local_nmalloc; // number of mallocs since last lock of heap | 281 int64 local_nmalloc; // number of mallocs since last lock of heap |
281 int64 local_nfree; // number of frees since last lock of heap | 282 int64 local_nfree; // number of frees since last lock of heap |
282 int64 local_nlookup; // number of pointer lookups since last lock of
heap | 283 int64 local_nlookup; // number of pointer lookups since last lock of
heap |
| 284 int64 local_marked; // number of bytes marked by GC |
| 285 int64 local_nmarked; // number of objects marked by GC |
283 int32 next_sample; // trigger heap sample after allocating this man
y bytes | 286 int32 next_sample; // trigger heap sample after allocating this man
y bytes |
284 // Statistics about allocation size classes since last lock of heap | 287 // Statistics about allocation size classes since last lock of heap |
285 struct { | 288 struct { |
286 int64 nmalloc; | 289 int64 nmalloc; |
287 int64 nfree; | 290 int64 nfree; |
288 } local_by_size[NumSizeClasses]; | 291 } local_by_size[NumSizeClasses]; |
289 | |
290 }; | 292 }; |
291 | 293 |
292 void* runtime·MCache_Alloc(MCache *c, int32 sizeclass, uintptr size, int32 zer
oed); | 294 void* runtime·MCache_Alloc(MCache *c, int32 sizeclass, uintptr size, int32 zer
oed); |
293 void runtime·MCache_Free(MCache *c, void *p, int32 sizeclass, uintptr size); | 295 void runtime·MCache_Free(MCache *c, void *p, int32 sizeclass, uintptr size); |
294 void runtime·MCache_ReleaseAll(MCache *c); | 296 void runtime·MCache_ReleaseAll(MCache *c); |
295 // An MSpan is a run of pages. | 297 // An MSpan is a run of pages. |
296 enum | 298 enum |
297 { | 299 { |
298 MSpanInUse = 0, | 300 MSpanInUse = 0, |
299 MSpanFree, | 301 MSpanFree, |
300 MSpanListHead, | 302 MSpanListHead, |
301 MSpanDead, | 303 MSpanDead, |
302 }; | 304 }; |
303 struct MSpan | 305 struct MSpan |
304 { | 306 { |
305 MSpan *next; // in a span linked list | 307 MSpan *next; // in a span linked list |
306 MSpan *prev; // in a span linked list | 308 MSpan *prev; // in a span linked list |
307 » MSpan» *allnext;» » // in the list of all spans | 309 » MSpan» *allnext;» // in the list of all spans |
308 PageID start; // starting page number | 310 PageID start; // starting page number |
309 uintptr npages; // number of pages in span | 311 uintptr npages; // number of pages in span |
310 MLink *freelist; // list of free objects | 312 MLink *freelist; // list of free objects |
311 uint32 ref; // number of allocated objects in this span | 313 uint32 ref; // number of allocated objects in this span |
312 uint32 needssweep; // the span is marked by GC but not yet swept | 314 uint32 needssweep; // the span is marked by GC but not yet swept |
313 uint32 sizeclass; // size class | 315 uint32 sizeclass; // size class |
314 uint32 state; // MSpanInUse etc | 316 uint32 state; // MSpanInUse etc |
315 » byte» *limit;»// end of data in span | 317 » uintptr size;» » // size in bytes (actual size for large objects) |
| 318 » int64 unusedsince;» // First time spotted by GC in MSpanFree state |
| 319 » uintptr npreleased;» // number of pages released to the OS |
| 320 » byte» *limit;»» // end of data in span |
316 }; | 321 }; |
317 | 322 |
318 void runtime·MSpan_Init(MSpan *span, PageID start, uintptr npages); | 323 void runtime·MSpan_Init(MSpan *span, PageID start, uintptr npages); |
319 void runtime·MSpan_Sweep(MSpan *span, bool dolock); | 324 void runtime·MSpan_Sweep(MSpan *span, bool dolock); |
320 | 325 |
321 // Every MSpan is in one doubly-linked list, | 326 // Every MSpan is in one doubly-linked list, |
322 // either one of the MHeap's free lists or one of the | 327 // either one of the MHeap's free lists or one of the |
323 // MCentral's span lists. We use empty MSpan structures as list heads. | 328 // MCentral's span lists. We use empty MSpan structures as list heads. |
324 void runtime·MSpanList_Init(MSpan *list); | 329 void runtime·MSpanList_Init(MSpan *list); |
325 bool runtime·MSpanList_IsEmpty(MSpan *list); | 330 bool runtime·MSpanList_IsEmpty(MSpan *list); |
326 void runtime·MSpanList_Insert(MSpan *list, MSpan *span); | 331 void runtime·MSpanList_Insert(MSpan *list, MSpan *span); |
| 332 void runtime·MSpanList_InsertBack(MSpan *list, MSpan *span); |
327 void runtime·MSpanList_Remove(MSpan *span); // from whatever list it is in | 333 void runtime·MSpanList_Remove(MSpan *span); // from whatever list it is in |
328 | 334 |
329 | 335 |
330 // Central list of free objects of a given size. | 336 // Central list of free objects of a given size. |
331 struct MCentral | 337 struct MCentral |
332 { | 338 { |
333 Lock; | 339 Lock; |
334 int32 sizeclass; | 340 int32 sizeclass; |
335 MSpan nonempty; | 341 MSpan nonempty; |
336 MSpan empty; | 342 MSpan empty; |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
377 extern MHeap runtime·mheap; | 383 extern MHeap runtime·mheap; |
378 | 384 |
379 void runtime·MHeap_Init(MHeap *h, void *(*allocator)(uintptr)); | 385 void runtime·MHeap_Init(MHeap *h, void *(*allocator)(uintptr)); |
380 MSpan* runtime·MHeap_Alloc(MHeap *h, uintptr npage, int32 sizeclass, int32 acct
); | 386 MSpan* runtime·MHeap_Alloc(MHeap *h, uintptr npage, int32 sizeclass, int32 acct
); |
381 void runtime·MHeap_Free(MHeap *h, MSpan *s, int32 acct); | 387 void runtime·MHeap_Free(MHeap *h, MSpan *s, int32 acct); |
382 MSpan* runtime·MHeap_Lookup(MHeap *h, void *v); | 388 MSpan* runtime·MHeap_Lookup(MHeap *h, void *v); |
383 MSpan* runtime·MHeap_LookupMaybe(MHeap *h, void *v); | 389 MSpan* runtime·MHeap_LookupMaybe(MHeap *h, void *v); |
384 void runtime·MGetSizeClassInfo(int32 sizeclass, uintptr *size, int32 *npages,
int32 *nobj); | 390 void runtime·MGetSizeClassInfo(int32 sizeclass, uintptr *size, int32 *npages,
int32 *nobj); |
385 void* runtime·MHeap_SysAlloc(MHeap *h, uintptr n); | 391 void* runtime·MHeap_SysAlloc(MHeap *h, uintptr n); |
386 void runtime·MHeap_MapBits(MHeap *h); | 392 void runtime·MHeap_MapBits(MHeap *h); |
| 393 void runtime·MHeap_Scavenger(void); |
387 | 394 |
388 void* runtime·mallocgc(uintptr size, uint32 flag, int32 dogc, int32 zeroed); | 395 void* runtime·mallocgc(uintptr size, uint32 flag, int32 dogc, int32 zeroed); |
389 int32 runtime·mlookup(void *v, byte **base, uintptr *size, MSpan **s); | 396 int32 runtime·mlookup(void *v, byte **base, uintptr *size, MSpan **s); |
390 void runtime·gc(int32 force); | 397 void runtime·gc(int32 force); |
391 void runtime·markallocated(void *v, uintptr n, bool noptr); | 398 void runtime·markallocated(void *v, uintptr n, bool noptr); |
392 void runtime·checkallocated(void *v, uintptr n); | 399 void runtime·checkallocated(void *v, uintptr n); |
393 void runtime·markfreed(void *v, uintptr n); | 400 void runtime·markfreed(void *v, uintptr n); |
394 void runtime·checkfreed(void *v, uintptr n); | 401 void runtime·checkfreed(void *v, uintptr n); |
395 int32 runtime·checking; | 402 int32 runtime·checking; |
396 void runtime·markspan(void *v, uintptr size, uintptr n, bool leftover); | 403 void runtime·markspan(void *v, uintptr size, uintptr n, bool leftover); |
397 void runtime·unmarkspan(void *v, uintptr size); | 404 void runtime·unmarkspan(void *v, uintptr size); |
398 bool runtime·blockspecial(void*); | 405 bool runtime·blockspecial(void*); |
399 void runtime·setblockspecial(void*, bool); | 406 void runtime·setblockspecial(void*, bool); |
400 void runtime·purgecachedstats(M*); | 407 void runtime·purgecachedstats(M*); |
401 | 408 |
402 enum | 409 enum |
403 { | 410 { |
404 // flags to malloc | 411 // flags to malloc |
405 FlagNoPointers = 1<<0, // no pointers here | 412 FlagNoPointers = 1<<0, // no pointers here |
406 FlagNoProfiling = 1<<1, // must not profile | 413 FlagNoProfiling = 1<<1, // must not profile |
407 FlagNoGC = 1<<2, // must not free or scan for pointers | 414 FlagNoGC = 1<<2, // must not free or scan for pointers |
408 }; | 415 }; |
409 | 416 |
410 void runtime·MProf_Malloc(void*, uintptr); | 417 void runtime·MProf_Malloc(void*, uintptr); |
411 void runtime·MProf_Free(void*, uintptr); | 418 void runtime·MProf_Free(void*, uintptr); |
| 419 void runtime·MProf_GC(void); |
412 int32 runtime·helpgc(bool*); | 420 int32 runtime·helpgc(bool*); |
413 void runtime·gchelper(void); | 421 void runtime·gchelper(void); |
414 | 422 |
415 // Malloc profiling settings. | |
416 // Must match definition in extern.go. | |
417 enum { | |
418 MProf_None = 0, | |
419 MProf_Sample = 1, | |
420 MProf_All = 2, | |
421 }; | |
422 extern int32 runtime·malloc_profile; | |
423 | |
424 bool runtime·getfinalizer(void *p, bool del, void (**fn)(void*), int32 *nret)
; | 423 bool runtime·getfinalizer(void *p, bool del, void (**fn)(void*), int32 *nret)
; |
425 void runtime·walkfintab(void (*fn)(void*)); | 424 void runtime·walkfintab(void (*fn)(void*)); |
LEFT | RIGHT |