Bug 524743 - Shape regeneration still does not touch most empty scopes. r=brendan.
[mozilla-central.git] / js / src / jsgc.cpp
blob6243808cf175ea7299a81b8dfa8c4de5bb842d41
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS Mark-and-Sweep Garbage Collector.
44 * This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
45 * jsgc.h). It allocates from a special GC arena pool with each arena allocated
46 * using malloc. It uses an ideally parallel array of flag bytes to hold the
47 * mark bit, finalizer type index, etc.
49 * XXX swizzle page to freelist for better locality of reference
51 #include <stdlib.h> /* for free */
52 #include <math.h>
53 #include <string.h> /* for memset used when DEBUG */
54 #include "jstypes.h"
55 #include "jsstdint.h"
56 #include "jsutil.h" /* Added by JSIFY */
57 #include "jshash.h" /* Added by JSIFY */
58 #include "jsbit.h"
59 #include "jsclist.h"
60 #include "jsprf.h"
61 #include "jsapi.h"
62 #include "jsatom.h"
63 #include "jscntxt.h"
64 #include "jsversion.h"
65 #include "jsdbgapi.h"
66 #include "jsexn.h"
67 #include "jsfun.h"
68 #include "jsgc.h"
69 #include "jsinterp.h"
70 #include "jsiter.h"
71 #include "jslock.h"
72 #include "jsnum.h"
73 #include "jsobj.h"
74 #include "jsparse.h"
75 #include "jsscope.h"
76 #include "jsscript.h"
77 #include "jsstaticcheck.h"
78 #include "jsstr.h"
79 #include "jstask.h"
80 #include "jstracer.h"
82 #if JS_HAS_XML_SUPPORT
83 #include "jsxml.h"
84 #endif
86 #ifdef INCLUDE_MOZILLA_DTRACE
87 #include "jsdtracef.h"
88 #endif
91 * Include the headers for mmap.
93 #if defined(XP_WIN)
94 # include <windows.h>
95 #endif
96 #if defined(XP_UNIX) || defined(XP_BEOS)
97 # include <unistd.h>
98 # include <sys/mman.h>
99 #endif
100 /* On Mac OS X MAP_ANONYMOUS is not defined. */
101 #if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)
102 # define MAP_ANONYMOUS MAP_ANON
103 #endif
104 #if !defined(MAP_ANONYMOUS)
105 # define MAP_ANONYMOUS 0
106 #endif
109 * Check JSTempValueUnion has the size of jsval and void * so we can
110 * reinterpret jsval as void* GC-thing pointer and use JSTVU_SINGLE for
111 * different GC-things.
113 JS_STATIC_ASSERT(sizeof(JSTempValueUnion) == sizeof(jsval));
114 JS_STATIC_ASSERT(sizeof(JSTempValueUnion) == sizeof(void *));
117 * Check that JSTRACE_XML follows JSTRACE_OBJECT, JSTRACE_DOUBLE and
118 * JSTRACE_STRING.
120 JS_STATIC_ASSERT(JSTRACE_OBJECT == 0);
121 JS_STATIC_ASSERT(JSTRACE_DOUBLE == 1);
122 JS_STATIC_ASSERT(JSTRACE_STRING == 2);
123 JS_STATIC_ASSERT(JSTRACE_XML == 3);
126 * JS_IS_VALID_TRACE_KIND assumes that JSTRACE_STRING is the last non-xml
127 * trace kind when JS_HAS_XML_SUPPORT is false.
129 JS_STATIC_ASSERT(JSTRACE_STRING + 1 == JSTRACE_XML);
132 * Check that we can use memset(p, 0, ...) to implement JS_CLEAR_WEAK_ROOTS.
134 JS_STATIC_ASSERT(JSVAL_NULL == 0);
137 * Check consistency of external string constants from JSFinalizeGCThingKind.
139 JS_STATIC_ASSERT(FINALIZE_EXTERNAL_STRING_LAST - FINALIZE_EXTERNAL_STRING0 ==
140 JS_EXTERNAL_STRING_LIMIT - 1);
143 * A GC arena contains a fixed number of flag bits for each thing in its heap,
144 * and supports O(1) lookup of a flag given its thing's address.
146 * To implement this, we allocate things of the same size from a GC arena
147 * containing GC_ARENA_SIZE bytes aligned on GC_ARENA_SIZE boundary. The
148 * following picture shows arena's layout:
150 * +------------------------------+--------------------+---------------+
151 * | allocation area for GC thing | flags of GC things | JSGCArenaInfo |
152 * +------------------------------+--------------------+---------------+
154 * To find the flag bits for the thing we calculate the thing index counting
155 * from arena's start using:
157 * thingIndex = (thingAddress & GC_ARENA_MASK) / thingSize
159 * The details of flag's lookup depend on thing's kind. For all GC things
160 * except doubles we use one byte of flags where the 4 bits determine thing's
161 * type and the rest is used to implement GC marking, finalization and
162 * locking. We calculate the address of flag's byte using:
164 * flagByteAddress =
165 * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo) - thingIndex
167 * where
169 * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo)
171 * is the last byte of flags' area.
173 * This implies that the things are allocated from the start of their area and
174 * flags are allocated from the end. This arrangement avoids a relatively
175 * expensive calculation of the location of the boundary separating things and
176 * flags. The boundary's offset from the start of the arena is given by:
178 * thingsPerArena * thingSize
180 * where thingsPerArena is the number of things that the arena can hold:
182 * (GC_ARENA_SIZE - sizeof(JSGCArenaInfo)) / (thingSize + 1).
184 * To allocate doubles we use a specialized arena. It can contain only numbers
185 * so we do not need the type bits. Moreover, since the doubles do not require
186 * a finalizer and very few of them are locked via js_LockGCThing API, we use
187 * just one bit of flags per double to denote if it was marked during the
188 * marking phase of the GC. The locking is implemented via a hash table. Thus
189 * for doubles the flag area becomes a bitmap.
192 static const jsuword GC_ARENAS_PER_CHUNK = 16;
193 static const jsuword GC_ARENA_SHIFT = 12;
194 static const jsuword GC_ARENA_MASK = JS_BITMASK(GC_ARENA_SHIFT);
195 static const jsuword GC_ARENA_SIZE = JS_BIT(GC_ARENA_SHIFT);
197 struct JSGCArenaInfo {
199 * Allocation list for the arena or NULL if the arena holds double values.
201 JSGCArenaList *list;
204 * Pointer to the previous arena in a linked list. The arena can either
205 * belong to one of JSContext.gcArenaList lists or, when it does not have
206 * any allocated GC things, to the list of free arenas in the chunk with
207 * head stored in JSGCChunkInfo.lastFreeArena.
209 JSGCArenaInfo *prev;
212 * A link field for the list of arenas with marked but not yet traced
213 * things. The field is encoded as arena's page to share the space with
214 * firstArena and arenaIndex fields.
216 jsuword prevUntracedPage : JS_BITS_PER_WORD - GC_ARENA_SHIFT;
219 * When firstArena is false, the index of arena in the chunk. When
220 * firstArena is true, the index of a free arena holding JSGCChunkInfo or
221 * NO_FREE_ARENAS if there are no free arenas in the chunk.
223 * GET_ARENA_INDEX and GET_CHUNK_INFO_INDEX are convenience macros to
224 * access either of indexes.
226 jsuword arenaIndex : GC_ARENA_SHIFT - 1;
228 /* Flag indicating if the arena is the first in the chunk. */
229 jsuword firstArena : 1;
231 union {
232 struct {
233 JSGCThing *freeList;
234 jsuword untracedThings; /* bitset for fast search of marked
235 but not yet traced things */
236 } finalizable;
238 bool hasMarkedDoubles; /* the arena has marked doubles */
242 /* GC flag definitions, must fit in 8 bits. */
243 const uint8 GCF_MARK = JS_BIT(0);
244 const uint8 GCF_LOCK = JS_BIT(1); /* lock request bit in API */
245 const uint8 GCF_CHILDREN = JS_BIT(2); /* GC things with children to be
246 marked later. */
249 * The private JSGCThing struct, which describes a JSRuntime.gcFreeList element.
251 struct JSGCThing {
252 JSGCThing *link;
256 * Macros to convert between JSGCArenaInfo, the start address of the arena and
257 * arena's page defined as (start address) >> GC_ARENA_SHIFT.
259 #define ARENA_INFO_OFFSET (GC_ARENA_SIZE - (uint32) sizeof(JSGCArenaInfo))
261 #define IS_ARENA_INFO_ADDRESS(arena) \
262 (((jsuword) (arena) & GC_ARENA_MASK) == ARENA_INFO_OFFSET)
264 #define ARENA_START_TO_INFO(arenaStart) \
265 (JS_ASSERT(((arenaStart) & (jsuword) GC_ARENA_MASK) == 0), \
266 (JSGCArenaInfo *) ((arenaStart) + (jsuword) ARENA_INFO_OFFSET))
268 #define ARENA_INFO_TO_START(arena) \
269 (JS_ASSERT(IS_ARENA_INFO_ADDRESS(arena)), \
270 (jsuword) (arena) & ~(jsuword) GC_ARENA_MASK)
272 #define ARENA_PAGE_TO_INFO(arenaPage) \
273 (JS_ASSERT(arenaPage != 0), \
274 JS_ASSERT(!((jsuword)(arenaPage) >> (JS_BITS_PER_WORD-GC_ARENA_SHIFT))), \
275 ARENA_START_TO_INFO((arenaPage) << GC_ARENA_SHIFT))
277 #define ARENA_INFO_TO_PAGE(arena) \
278 (JS_ASSERT(IS_ARENA_INFO_ADDRESS(arena)), \
279 ((jsuword) (arena) >> GC_ARENA_SHIFT))
281 #define GET_ARENA_INFO(chunk, index) \
282 (JS_ASSERT((index) < GC_ARENAS_PER_CHUNK), \
283 ARENA_START_TO_INFO(chunk + ((index) << GC_ARENA_SHIFT)))
286 * Definitions for allocating arenas in chunks.
288 * All chunks that have at least one free arena are put on the doubly-linked
289 * list with the head stored in JSRuntime.gcChunkList. JSGCChunkInfo contains
290 * the head of the chunk's free arena list together with the link fields for
291 * gcChunkList.
293 * Structure stored in one of chunk's free arenas. GET_CHUNK_INFO_INDEX gives
294 * the index of this arena. When all arenas in the chunk are used, it is
295 * removed from the list and the index is set to NO_FREE_ARENAS indicating
296 * that the chunk is not on gcChunkList and has no JSGCChunkInfo available.
299 struct JSGCChunkInfo {
300 JSGCChunkInfo **prevp;
301 JSGCChunkInfo *next;
302 JSGCArenaInfo *lastFreeArena;
303 uint32 numFreeArenas;
306 #define NO_FREE_ARENAS JS_BITMASK(GC_ARENA_SHIFT - 1)
308 JS_STATIC_ASSERT(1 <= GC_ARENAS_PER_CHUNK &&
309 GC_ARENAS_PER_CHUNK <= NO_FREE_ARENAS);
311 #define GET_ARENA_CHUNK(arena, index) \
312 (JS_ASSERT(GET_ARENA_INDEX(arena) == index), \
313 ARENA_INFO_TO_START(arena) - ((index) << GC_ARENA_SHIFT))
315 #define GET_ARENA_INDEX(arena) \
316 ((arena)->firstArena ? 0 : (uint32) (arena)->arenaIndex)
318 #define GET_CHUNK_INFO_INDEX(chunk) \
319 ((uint32) ARENA_START_TO_INFO(chunk)->arenaIndex)
321 #define SET_CHUNK_INFO_INDEX(chunk, index) \
322 (JS_ASSERT((index) < GC_ARENAS_PER_CHUNK || (index) == NO_FREE_ARENAS), \
323 (void) (ARENA_START_TO_INFO(chunk)->arenaIndex = (jsuword) (index)))
325 #define GET_CHUNK_INFO(chunk, infoIndex) \
326 (JS_ASSERT(GET_CHUNK_INFO_INDEX(chunk) == (infoIndex)), \
327 JS_ASSERT((uint32) (infoIndex) < GC_ARENAS_PER_CHUNK), \
328 (JSGCChunkInfo *) ((chunk) + ((infoIndex) << GC_ARENA_SHIFT)))
330 #define CHUNK_INFO_TO_INDEX(ci) \
331 GET_ARENA_INDEX(ARENA_START_TO_INFO((jsuword)ci))
334 * Macros for GC-thing operations.
336 #define THINGS_PER_ARENA(thingSize) \
337 ((GC_ARENA_SIZE - (uint32) sizeof(JSGCArenaInfo)) / ((thingSize) + 1U))
339 #define THING_TO_ARENA(thing) \
340 (JS_ASSERT(!JSString::isStatic(thing)), \
341 (JSGCArenaInfo *)(((jsuword) (thing) | GC_ARENA_MASK) \
342 + 1 - sizeof(JSGCArenaInfo)))
344 #define THING_TO_INDEX(thing, thingSize) \
345 ((uint32) ((jsuword) (thing) & GC_ARENA_MASK) / (uint32) (thingSize))
347 #define THING_FLAGP(arena, thingIndex) \
348 (JS_ASSERT((jsuword) (thingIndex) \
349 < (jsuword) THINGS_PER_ARENA((arena)->list->thingSize)), \
350 (uint8 *)(arena) - 1 - (thingIndex))
352 #define THING_TO_FLAGP(thing, thingSize) \
353 THING_FLAGP(THING_TO_ARENA(thing), THING_TO_INDEX(thing, thingSize))
355 #define FLAGP_TO_ARENA(flagp) THING_TO_ARENA(flagp)
357 #define FLAGP_TO_INDEX(flagp) \
358 (JS_ASSERT(((jsuword) (flagp) & GC_ARENA_MASK) < ARENA_INFO_OFFSET), \
359 (ARENA_INFO_OFFSET - 1 - (uint32) ((jsuword) (flagp) & GC_ARENA_MASK)))
361 #define FLAGP_TO_THING(flagp, thingSize) \
362 (JS_ASSERT(((jsuword) (flagp) & GC_ARENA_MASK) >= \
363 (ARENA_INFO_OFFSET - THINGS_PER_ARENA(thingSize))), \
364 (JSGCThing *)(((jsuword) (flagp) & ~GC_ARENA_MASK) + \
365 (thingSize) * FLAGP_TO_INDEX(flagp)))
367 static inline JSGCThing *
368 NextThing(JSGCThing *thing, size_t thingSize)
370 return reinterpret_cast<JSGCThing *>(reinterpret_cast<jsuword>(thing) +
371 thingSize);
374 static inline JSGCThing *
375 MakeNewArenaFreeList(JSGCArenaInfo *a, unsigned thingSize, size_t nthings)
377 JS_ASSERT(nthings * thingSize < GC_ARENA_SIZE - sizeof(JSGCArenaInfo));
379 jsuword thingsStart = ARENA_INFO_TO_START(a);
380 JSGCThing *first = reinterpret_cast<JSGCThing *>(thingsStart);
381 JSGCThing *last = reinterpret_cast<JSGCThing *>(thingsStart +
382 (nthings - 1) * thingSize);
383 for (JSGCThing *thing = first; thing != last;) {
384 JSGCThing *next = NextThing(thing, thingSize);
385 thing->link = next;
386 thing = next;
388 last->link = NULL;
389 return first;
393 * Macros for the specialized arena for doubles.
395 * DOUBLES_PER_ARENA defines the maximum number of doubles that the arena can
396 * hold. We find it as the following. Let n be the number of doubles in the
397 * arena. Together with the bitmap of flags and JSGCArenaInfo they should fit
398 * the arena. Hence DOUBLES_PER_ARENA or n_max is the maximum value of n for
399 * which the following holds:
401 * n*s + ceil(n/B) <= M (1)
403 * where "/" denotes normal real division,
404 * ceil(r) gives the least integer not smaller than the number r,
405 * s is the number of words in jsdouble,
406 * B is number of bits per word or B == JS_BITS_PER_WORD
407 * M is the number of words in the arena before JSGCArenaInfo or
408 * M == (GC_ARENA_SIZE - sizeof(JSGCArenaInfo)) / sizeof(jsuword).
409 * M == ARENA_INFO_OFFSET / sizeof(jsuword)
411 * We rewrite the inequality as
413 * n*B*s/B + ceil(n/B) <= M,
414 * ceil(n*B*s/B + n/B) <= M,
415 * ceil(n*(B*s + 1)/B) <= M (2)
417 * We define a helper function e(n, s, B),
419 * e(n, s, B) := ceil(n*(B*s + 1)/B) - n*(B*s + 1)/B, 0 <= e(n, s, B) < 1.
421 * It gives:
423 * n*(B*s + 1)/B + e(n, s, B) <= M,
424 * n + e*B/(B*s + 1) <= M*B/(B*s + 1)
426 * We apply the floor function to both sides of the last equation, where
427 * floor(r) gives the biggest integer not greater than r. As a consequence we
428 * have:
430 * floor(n + e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)),
431 * n + floor(e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)),
432 * n <= floor(M*B/(B*s + 1)), (3)
434 * where floor(e*B/(B*s + 1)) is zero as e*B/(B*s + 1) < B/(B*s + 1) < 1.
435 * Thus any n that satisfies the original constraint (1) or its equivalent (2),
436 * must also satisfy (3). That is, we got an upper estimate for the maximum
437 * value of n. Lets show that this upper estimate,
439 * floor(M*B/(B*s + 1)), (4)
441 * also satisfies (1) and, as such, gives the required maximum value.
442 * Substituting it into (2) gives:
444 * ceil(floor(M*B/(B*s + 1))*(B*s + 1)/B) == ceil(floor(M/X)*X)
446 * where X == (B*s + 1)/B > 1. But then floor(M/X)*X <= M/X*X == M and
448 * ceil(floor(M/X)*X) <= ceil(M) == M.
450 * Thus the value of (4) gives the maximum n satisfying (1).
452 * For the final result we observe that in (4)
454 * M*B == ARENA_INFO_OFFSET / sizeof(jsuword) * JS_BITS_PER_WORD
455 * == ARENA_INFO_OFFSET * JS_BITS_PER_BYTE
457 * and
459 * B*s == JS_BITS_PER_WORD * sizeof(jsdouble) / sizeof(jsuword)
460 * == JS_BITS_PER_DOUBLE.
462 const size_t DOUBLES_PER_ARENA =
463 (ARENA_INFO_OFFSET * JS_BITS_PER_BYTE) / (JS_BITS_PER_DOUBLE + 1);
466 * Check that ARENA_INFO_OFFSET and sizeof(jsdouble) divides sizeof(jsuword).
468 JS_STATIC_ASSERT(ARENA_INFO_OFFSET % sizeof(jsuword) == 0);
469 JS_STATIC_ASSERT(sizeof(jsdouble) % sizeof(jsuword) == 0);
470 JS_STATIC_ASSERT(sizeof(jsbitmap) == sizeof(jsuword));
472 const size_t DOUBLES_ARENA_BITMAP_WORDS =
473 JS_HOWMANY(DOUBLES_PER_ARENA, JS_BITS_PER_WORD);
475 /* Check that DOUBLES_PER_ARENA indeed maximises (1). */
476 JS_STATIC_ASSERT(DOUBLES_PER_ARENA * sizeof(jsdouble) +
477 DOUBLES_ARENA_BITMAP_WORDS * sizeof(jsuword) <=
478 ARENA_INFO_OFFSET);
480 JS_STATIC_ASSERT((DOUBLES_PER_ARENA + 1) * sizeof(jsdouble) +
481 sizeof(jsuword) *
482 JS_HOWMANY((DOUBLES_PER_ARENA + 1), JS_BITS_PER_WORD) >
483 ARENA_INFO_OFFSET);
486 * When DOUBLES_PER_ARENA % BITS_PER_DOUBLE_FLAG_UNIT != 0, some bits in the
487 * last byte of the occupation bitmap are unused.
489 const size_t UNUSED_DOUBLE_BITMAP_BITS =
490 DOUBLES_ARENA_BITMAP_WORDS * JS_BITS_PER_WORD - DOUBLES_PER_ARENA;
492 JS_STATIC_ASSERT(UNUSED_DOUBLE_BITMAP_BITS < JS_BITS_PER_WORD);
494 const size_t DOUBLES_ARENA_BITMAP_OFFSET =
495 ARENA_INFO_OFFSET - DOUBLES_ARENA_BITMAP_WORDS * sizeof(jsuword);
497 #define CHECK_DOUBLE_ARENA_INFO(arenaInfo) \
498 (JS_ASSERT(IS_ARENA_INFO_ADDRESS(arenaInfo)), \
499 JS_ASSERT(!(arenaInfo)->list)) \
502 * Get the start of the bitmap area containing double mark flags in the arena.
503 * To access the flag the code uses
505 * JS_TEST_BIT(bitmapStart, index)
507 * That is, compared with the case of arenas with non-double things, we count
508 * flags from the start of the bitmap area, not from the end.
510 #define DOUBLE_ARENA_BITMAP(arenaInfo) \
511 (CHECK_DOUBLE_ARENA_INFO(arenaInfo), \
512 (jsbitmap *) arenaInfo - DOUBLES_ARENA_BITMAP_WORDS)
514 #define DOUBLE_ARENA_BITMAP_END(arenaInfo) \
515 (CHECK_DOUBLE_ARENA_INFO(arenaInfo), (jsbitmap *) (arenaInfo))
517 #define DOUBLE_THING_TO_INDEX(thing) \
518 (CHECK_DOUBLE_ARENA_INFO(THING_TO_ARENA(thing)), \
519 JS_ASSERT(((jsuword) (thing) & GC_ARENA_MASK) < \
520 DOUBLES_ARENA_BITMAP_OFFSET), \
521 ((uint32) (((jsuword) (thing) & GC_ARENA_MASK) / sizeof(jsdouble))))
523 static void
524 ClearDoubleArenaFlags(JSGCArenaInfo *a)
527 * When some high bits in the last byte of the double occupation bitmap
528 * are unused, we must set them. Otherwise TurnUsedArenaIntoDoubleList
529 * will assume that they corresponds to some free cells and tries to
530 * allocate them.
532 * Note that the code works correctly with UNUSED_DOUBLE_BITMAP_BITS == 0.
534 jsbitmap *bitmap = DOUBLE_ARENA_BITMAP(a);
535 memset(bitmap, 0, (DOUBLES_ARENA_BITMAP_WORDS - 1) * sizeof *bitmap);
536 jsbitmap mask = ((jsbitmap) 1 << UNUSED_DOUBLE_BITMAP_BITS) - 1;
537 size_t nused = JS_BITS_PER_WORD - UNUSED_DOUBLE_BITMAP_BITS;
538 bitmap[DOUBLES_ARENA_BITMAP_WORDS - 1] = mask << nused;
541 static JS_ALWAYS_INLINE JSBool
542 IsMarkedDouble(JSGCArenaInfo *a, uint32 index)
544 jsbitmap *bitmap;
546 JS_ASSERT(a->hasMarkedDoubles);
547 bitmap = DOUBLE_ARENA_BITMAP(a);
548 return JS_TEST_BIT(bitmap, index);
551 JS_STATIC_ASSERT(sizeof(JSStackHeader) >= 2 * sizeof(jsval));
553 JS_STATIC_ASSERT(sizeof(JSGCThing) <= sizeof(JSString));
554 JS_STATIC_ASSERT(sizeof(JSGCThing) <= sizeof(jsdouble));
556 /* We want to use all the available GC thing space for object's slots. */
557 JS_STATIC_ASSERT(sizeof(JSObject) % sizeof(JSGCThing) == 0);
559 #ifdef JS_GCMETER
560 # define METER(x) ((void) (x))
561 # define METER_IF(condition, x) ((void) ((condition) && (x)))
562 #else
563 # define METER(x) ((void) 0)
564 # define METER_IF(condition, x) ((void) 0)
565 #endif
567 #define METER_UPDATE_MAX(maxLval, rval) \
568 METER_IF((maxLval) < (rval), (maxLval) = (rval))
570 static jsuword
571 NewGCChunk(void)
573 void *p;
575 #if defined(XP_WIN)
576 p = VirtualAlloc(NULL, GC_ARENAS_PER_CHUNK << GC_ARENA_SHIFT,
577 MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
578 return (jsuword) p;
579 #else
580 p = mmap(NULL, GC_ARENAS_PER_CHUNK << GC_ARENA_SHIFT,
581 PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
582 return (p == MAP_FAILED) ? 0 : (jsuword) p;
583 #endif
586 static void
587 DestroyGCChunk(jsuword chunk)
589 JS_ASSERT((chunk & GC_ARENA_MASK) == 0);
590 #if defined(XP_WIN)
591 VirtualFree((void *) chunk, 0, MEM_RELEASE);
592 #elif defined(SOLARIS)
593 munmap((char *) chunk, GC_ARENAS_PER_CHUNK << GC_ARENA_SHIFT);
594 #else
595 munmap((void *) chunk, GC_ARENAS_PER_CHUNK << GC_ARENA_SHIFT);
596 #endif
599 static void
600 AddChunkToList(JSRuntime *rt, JSGCChunkInfo *ci)
602 ci->prevp = &rt->gcChunkList;
603 ci->next = rt->gcChunkList;
604 if (rt->gcChunkList) {
605 JS_ASSERT(rt->gcChunkList->prevp == &rt->gcChunkList);
606 rt->gcChunkList->prevp = &ci->next;
608 rt->gcChunkList = ci;
611 static void
612 RemoveChunkFromList(JSRuntime *rt, JSGCChunkInfo *ci)
614 *ci->prevp = ci->next;
615 if (ci->next) {
616 JS_ASSERT(ci->next->prevp == &ci->next);
617 ci->next->prevp = ci->prevp;
621 static JSGCArenaInfo *
622 NewGCArena(JSContext *cx)
624 jsuword chunk;
625 JSGCArenaInfo *a;
627 JSRuntime *rt = cx->runtime;
628 if (rt->gcBytes >= rt->gcMaxBytes) {
630 * FIXME bug 524051 We cannot run a last-ditch GC on trace for now, so
631 * as a workaround we allow to breach the max bytes limit here and
632 * schedule the GC later.
634 if (!JS_ON_TRACE(cx))
635 return NULL;
636 js_TriggerGC(cx, true);
639 JSGCChunkInfo *ci;
640 uint32 i;
641 JSGCArenaInfo *aprev;
643 ci = rt->gcChunkList;
644 if (!ci) {
645 chunk = NewGCChunk();
646 if (chunk == 0)
647 return NULL;
648 JS_ASSERT((chunk & GC_ARENA_MASK) == 0);
649 a = GET_ARENA_INFO(chunk, 0);
650 a->firstArena = JS_TRUE;
651 a->arenaIndex = 0;
652 aprev = NULL;
653 i = 0;
654 do {
655 a->prev = aprev;
656 aprev = a;
657 ++i;
658 a = GET_ARENA_INFO(chunk, i);
659 a->firstArena = JS_FALSE;
660 a->arenaIndex = i;
661 } while (i != GC_ARENAS_PER_CHUNK - 1);
662 ci = GET_CHUNK_INFO(chunk, 0);
663 ci->lastFreeArena = aprev;
664 ci->numFreeArenas = GC_ARENAS_PER_CHUNK - 1;
665 AddChunkToList(rt, ci);
666 } else {
667 JS_ASSERT(ci->prevp == &rt->gcChunkList);
668 a = ci->lastFreeArena;
669 aprev = a->prev;
670 if (!aprev) {
671 JS_ASSERT(ci->numFreeArenas == 1);
672 JS_ASSERT(ARENA_INFO_TO_START(a) == (jsuword) ci);
673 RemoveChunkFromList(rt, ci);
674 chunk = GET_ARENA_CHUNK(a, GET_ARENA_INDEX(a));
675 SET_CHUNK_INFO_INDEX(chunk, NO_FREE_ARENAS);
676 } else {
677 JS_ASSERT(ci->numFreeArenas >= 2);
678 JS_ASSERT(ARENA_INFO_TO_START(a) != (jsuword) ci);
679 ci->lastFreeArena = aprev;
680 ci->numFreeArenas--;
684 rt->gcBytes += GC_ARENA_SIZE;
685 a->prevUntracedPage = 0;
687 return a;
690 static void
691 DestroyGCArenas(JSRuntime *rt, JSGCArenaInfo *last)
693 JSGCArenaInfo *a;
695 while (last) {
696 a = last;
697 last = last->prev;
699 METER(rt->gcStats.afree++);
700 JS_ASSERT(rt->gcBytes >= GC_ARENA_SIZE);
701 rt->gcBytes -= GC_ARENA_SIZE;
703 uint32 arenaIndex;
704 jsuword chunk;
705 uint32 chunkInfoIndex;
706 JSGCChunkInfo *ci;
707 #ifdef DEBUG
708 jsuword firstArena;
710 firstArena = a->firstArena;
711 arenaIndex = a->arenaIndex;
712 memset((void *) ARENA_INFO_TO_START(a), JS_FREE_PATTERN, GC_ARENA_SIZE);
713 a->firstArena = firstArena;
714 a->arenaIndex = arenaIndex;
715 #endif
716 arenaIndex = GET_ARENA_INDEX(a);
717 chunk = GET_ARENA_CHUNK(a, arenaIndex);
718 chunkInfoIndex = GET_CHUNK_INFO_INDEX(chunk);
719 if (chunkInfoIndex == NO_FREE_ARENAS) {
720 chunkInfoIndex = arenaIndex;
721 SET_CHUNK_INFO_INDEX(chunk, arenaIndex);
722 ci = GET_CHUNK_INFO(chunk, chunkInfoIndex);
723 a->prev = NULL;
724 ci->lastFreeArena = a;
725 ci->numFreeArenas = 1;
726 AddChunkToList(rt, ci);
727 } else {
728 JS_ASSERT(chunkInfoIndex != arenaIndex);
729 ci = GET_CHUNK_INFO(chunk, chunkInfoIndex);
730 JS_ASSERT(ci->numFreeArenas != 0);
731 JS_ASSERT(ci->lastFreeArena);
732 JS_ASSERT(a != ci->lastFreeArena);
733 if (ci->numFreeArenas == GC_ARENAS_PER_CHUNK - 1) {
734 RemoveChunkFromList(rt, ci);
735 DestroyGCChunk(chunk);
736 } else {
737 ++ci->numFreeArenas;
738 a->prev = ci->lastFreeArena;
739 ci->lastFreeArena = a;
745 static inline size_t
746 GetFinalizableThingSize(unsigned thingKind)
748 JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
750 static const uint8 map[FINALIZE_LIMIT] = {
751 sizeof(JSObject), /* FINALIZE_OBJECT */
752 sizeof(JSFunction), /* FINALIZE_FUNCTION */
753 #if JS_HAS_XML_SUPPORT
754 sizeof(JSXML), /* FINALIZE_XML */
755 #endif
756 sizeof(JSString), /* FINALIZE_STRING */
757 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING0 */
758 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING1 */
759 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING2 */
760 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING3 */
761 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING4 */
762 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING5 */
763 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING6 */
764 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING7 */
767 JS_ASSERT(thingKind < FINALIZE_LIMIT);
768 return map[thingKind];
771 static inline size_t
772 GetFinalizableTraceKind(size_t thingKind)
774 JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
776 static const uint8 map[FINALIZE_LIMIT] = {
777 JSTRACE_OBJECT, /* FINALIZE_OBJECT */
778 JSTRACE_OBJECT, /* FINALIZE_FUNCTION */
779 #if JS_HAS_XML_SUPPORT /* FINALIZE_XML */
780 JSTRACE_XML,
781 #endif /* FINALIZE_STRING */
782 JSTRACE_STRING,
783 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING0 */
784 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING1 */
785 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING2 */
786 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING3 */
787 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING4 */
788 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING5 */
789 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING6 */
790 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING7 */
793 JS_ASSERT(thingKind < FINALIZE_LIMIT);
794 return map[thingKind];
797 static inline size_t
798 GetFinalizableArenaTraceKind(JSGCArenaInfo *a)
800 JS_ASSERT(a->list);
801 return GetFinalizableTraceKind(a->list->thingKind);
804 static void
805 InitGCArenaLists(JSRuntime *rt)
807 for (unsigned i = 0; i != FINALIZE_LIMIT; ++i) {
808 JSGCArenaList *arenaList = &rt->gcArenaList[i];
809 arenaList->head = NULL;
810 arenaList->cursor = NULL;
811 arenaList->thingKind = i;
812 arenaList->thingSize = GetFinalizableThingSize(i);
814 rt->gcDoubleArenaList.head = NULL;
815 rt->gcDoubleArenaList.cursor = NULL;
818 static void
819 FinishGCArenaLists(JSRuntime *rt)
821 for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
822 JSGCArenaList *arenaList = &rt->gcArenaList[i];
823 DestroyGCArenas(rt, arenaList->head);
824 arenaList->head = NULL;
825 arenaList->cursor = NULL;
827 DestroyGCArenas(rt, rt->gcDoubleArenaList.head);
828 rt->gcDoubleArenaList.head = NULL;
829 rt->gcDoubleArenaList.cursor = NULL;
831 rt->gcBytes = 0;
832 JS_ASSERT(rt->gcChunkList == 0);
836 * This function must not be called when thing is jsdouble.
838 static uint8 *
839 GetGCThingFlags(void *thing)
841 JSGCArenaInfo *a;
842 uint32 index;
844 a = THING_TO_ARENA(thing);
845 index = THING_TO_INDEX(thing, a->list->thingSize);
846 return THING_FLAGP(a, index);
849 intN
850 js_GetExternalStringGCType(JSString *str)
852 JS_STATIC_ASSERT(FINALIZE_STRING + 1 == FINALIZE_EXTERNAL_STRING0);
853 JS_ASSERT(!JSString::isStatic(str));
855 unsigned thingKind = THING_TO_ARENA(str)->list->thingKind;
856 JS_ASSERT(IsFinalizableStringKind(thingKind));
857 return intN(thingKind) - intN(FINALIZE_EXTERNAL_STRING0);
860 JS_FRIEND_API(uint32)
861 js_GetGCThingTraceKind(void *thing)
863 if (JSString::isStatic(thing))
864 return JSTRACE_STRING;
866 JSGCArenaInfo *a = THING_TO_ARENA(thing);
867 if (!a->list)
868 return JSTRACE_DOUBLE;
869 return GetFinalizableArenaTraceKind(a);
872 JSRuntime*
873 js_GetGCStringRuntime(JSString *str)
875 JSGCArenaList *list = THING_TO_ARENA(str)->list;
876 JS_ASSERT(list->thingSize == sizeof(JSString));
878 unsigned i = list->thingKind;
879 JS_ASSERT(i == FINALIZE_STRING ||
880 (FINALIZE_EXTERNAL_STRING0 <= i &&
881 i < FINALIZE_EXTERNAL_STRING0 + JS_EXTERNAL_STRING_LIMIT));
882 return (JSRuntime *)((uint8 *)(list - i) -
883 offsetof(JSRuntime, gcArenaList));
886 JSBool
887 js_IsAboutToBeFinalized(JSContext *cx, void *thing)
889 JSGCArenaInfo *a;
890 uint32 index, flags;
892 a = THING_TO_ARENA(thing);
893 if (!a->list) {
895 * Check if arena has no marked doubles. In that case the bitmap with
896 * the mark flags contains all garbage as it is initialized only when
897 * marking the first double in the arena.
899 if (!a->hasMarkedDoubles)
900 return JS_TRUE;
901 index = DOUBLE_THING_TO_INDEX(thing);
902 return !IsMarkedDouble(a, index);
904 index = THING_TO_INDEX(thing, a->list->thingSize);
905 flags = *THING_FLAGP(a, index);
906 return !(flags & (GCF_MARK | GCF_LOCK));
909 /* This is compatible with JSDHashEntryStub. */
910 typedef struct JSGCRootHashEntry {
911 JSDHashEntryHdr hdr;
912 void *root;
913 const char *name;
914 } JSGCRootHashEntry;
916 /* Initial size of the gcRootsHash table (SWAG, small enough to amortize). */
917 #define GC_ROOTS_SIZE 256
920 * For a CPU with extremely large pages using them for GC things wastes
921 * too much memory.
923 #define GC_ARENAS_PER_CPU_PAGE_LIMIT JS_BIT(18 - GC_ARENA_SHIFT)
925 JS_STATIC_ASSERT(GC_ARENAS_PER_CPU_PAGE_LIMIT <= NO_FREE_ARENAS);
927 JSBool
928 js_InitGC(JSRuntime *rt, uint32 maxbytes)
930 InitGCArenaLists(rt);
931 if (!JS_DHashTableInit(&rt->gcRootsHash, JS_DHashGetStubOps(), NULL,
932 sizeof(JSGCRootHashEntry), GC_ROOTS_SIZE)) {
933 rt->gcRootsHash.ops = NULL;
934 return JS_FALSE;
936 rt->gcLocksHash = NULL; /* create lazily */
939 * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
940 * for default backward API compatibility.
942 rt->gcMaxBytes = maxbytes;
943 rt->setGCMaxMallocBytes(maxbytes);
945 rt->gcEmptyArenaPoolLifespan = 30000;
948 * By default the trigger factor gets maximum possible value. This
949 * means that GC will not be triggered by growth of GC memory (gcBytes).
951 rt->setGCTriggerFactor((uint32) -1);
954 * The assigned value prevents GC from running when GC memory is too low
955 * (during JS engine start).
957 rt->setGCLastBytes(8192);
959 METER(memset(&rt->gcStats, 0, sizeof rt->gcStats));
960 return JS_TRUE;
963 #ifdef JS_GCMETER
965 static void
966 UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas,
967 uint32 nthings)
969 size_t narenas;
971 narenas = nlivearenas + nkilledArenas;
972 JS_ASSERT(narenas >= st->livearenas);
974 st->newarenas = narenas - st->livearenas;
975 st->narenas = narenas;
976 st->livearenas = nlivearenas;
977 if (st->maxarenas < narenas)
978 st->maxarenas = narenas;
979 st->totalarenas += narenas;
981 st->nthings = nthings;
982 if (st->maxthings < nthings)
983 st->maxthings = nthings;
984 st->totalthings += nthings;
987 JS_FRIEND_API(void)
988 js_DumpGCStats(JSRuntime *rt, FILE *fp)
990 int i;
991 size_t sumArenas, sumTotalArenas;
992 size_t sumThings, sumMaxThings;
993 size_t sumThingSize, sumTotalThingSize;
994 size_t sumArenaCapacity, sumTotalArenaCapacity;
995 JSGCArenaStats *st;
996 size_t thingSize, thingsPerArena;
997 size_t sumAlloc, sumLocalAlloc, sumFail, sumRetry;
999 fprintf(fp, "\nGC allocation statistics:\n");
1001 #define UL(x) ((unsigned long)(x))
1002 #define ULSTAT(x) UL(rt->gcStats.x)
1003 #define PERCENT(x,y) (100.0 * (double) (x) / (double) (y))
1005 sumArenas = 0;
1006 sumTotalArenas = 0;
1007 sumThings = 0;
1008 sumMaxThings = 0;
1009 sumThingSize = 0;
1010 sumTotalThingSize = 0;
1011 sumArenaCapacity = 0;
1012 sumTotalArenaCapacity = 0;
1013 sumAlloc = 0;
1014 sumLocalAlloc = 0;
1015 sumFail = 0;
1016 sumRetry = 0;
1017 for (i = -1; i < (int) GC_NUM_FREELISTS; i++) {
1018 if (i == -1) {
1019 thingSize = sizeof(jsdouble);
1020 thingsPerArena = DOUBLES_PER_ARENA;
1021 st = &rt->gcStats.doubleArenaStats;
1022 fprintf(fp,
1023 "Arena list for double values (%lu doubles per arena):",
1024 UL(thingsPerArena));
1025 } else {
1026 thingSize = rt->gcArenaList[i].thingSize;
1027 thingsPerArena = THINGS_PER_ARENA(thingSize);
1028 st = &rt->gcStats.arenaStats[i];
1029 fprintf(fp,
1030 "Arena list %d (thing size %lu, %lu things per arena):",
1031 i, UL(GC_FREELIST_NBYTES(i)), UL(thingsPerArena));
1033 if (st->maxarenas == 0) {
1034 fputs(" NEVER USED\n", fp);
1035 continue;
1037 putc('\n', fp);
1038 fprintf(fp, " arenas before GC: %lu\n", UL(st->narenas));
1039 fprintf(fp, " new arenas before GC: %lu (%.1f%%)\n",
1040 UL(st->newarenas), PERCENT(st->newarenas, st->narenas));
1041 fprintf(fp, " arenas after GC: %lu (%.1f%%)\n",
1042 UL(st->livearenas), PERCENT(st->livearenas, st->narenas));
1043 fprintf(fp, " max arenas: %lu\n", UL(st->maxarenas));
1044 fprintf(fp, " things: %lu\n", UL(st->nthings));
1045 fprintf(fp, " GC cell utilization: %.1f%%\n",
1046 PERCENT(st->nthings, thingsPerArena * st->narenas));
1047 fprintf(fp, " average cell utilization: %.1f%%\n",
1048 PERCENT(st->totalthings, thingsPerArena * st->totalarenas));
1049 fprintf(fp, " max things: %lu\n", UL(st->maxthings));
1050 fprintf(fp, " alloc attempts: %lu\n", UL(st->alloc));
1051 fprintf(fp, " alloc without locks: %1u (%.1f%%)\n",
1052 UL(st->localalloc), PERCENT(st->localalloc, st->alloc));
1053 sumArenas += st->narenas;
1054 sumTotalArenas += st->totalarenas;
1055 sumThings += st->nthings;
1056 sumMaxThings += st->maxthings;
1057 sumThingSize += thingSize * st->nthings;
1058 sumTotalThingSize += thingSize * st->totalthings;
1059 sumArenaCapacity += thingSize * thingsPerArena * st->narenas;
1060 sumTotalArenaCapacity += thingSize * thingsPerArena * st->totalarenas;
1061 sumAlloc += st->alloc;
1062 sumLocalAlloc += st->localalloc;
1063 sumFail += st->fail;
1064 sumRetry += st->retry;
1066 fprintf(fp, "TOTAL STATS:\n");
1067 fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes));
1068 fprintf(fp, " total GC arenas: %lu\n", UL(sumArenas));
1069 fprintf(fp, " total GC things: %lu\n", UL(sumThings));
1070 fprintf(fp, " max total GC things: %lu\n", UL(sumMaxThings));
1071 fprintf(fp, " GC cell utilization: %.1f%%\n",
1072 PERCENT(sumThingSize, sumArenaCapacity));
1073 fprintf(fp, " average cell utilization: %.1f%%\n",
1074 PERCENT(sumTotalThingSize, sumTotalArenaCapacity));
1075 fprintf(fp, "allocation retries after GC: %lu\n", UL(sumRetry));
1076 fprintf(fp, " alloc attempts: %lu\n", UL(sumAlloc));
1077 fprintf(fp, " alloc without locks: %1u (%.1f%%)\n",
1078 UL(sumLocalAlloc), PERCENT(sumLocalAlloc, sumAlloc));
1079 fprintf(fp, " allocation failures: %lu\n", UL(sumFail));
1080 fprintf(fp, " things born locked: %lu\n", ULSTAT(lockborn));
1081 fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock));
1082 fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock));
1083 fprintf(fp, " mark recursion depth: %lu\n", ULSTAT(depth));
1084 fprintf(fp, " maximum mark recursion: %lu\n", ULSTAT(maxdepth));
1085 fprintf(fp, " mark C recursion depth: %lu\n", ULSTAT(cdepth));
1086 fprintf(fp, " maximum mark C recursion: %lu\n", ULSTAT(maxcdepth));
1087 fprintf(fp, " delayed tracing calls: %lu\n", ULSTAT(untraced));
1088 #ifdef DEBUG
1089 fprintf(fp, " max trace later count: %lu\n", ULSTAT(maxuntraced));
1090 #endif
1091 fprintf(fp, " maximum GC nesting level: %lu\n", ULSTAT(maxlevel));
1092 fprintf(fp, "potentially useful GC calls: %lu\n", ULSTAT(poke));
1093 fprintf(fp, " thing arenas freed so far: %lu\n", ULSTAT(afree));
1094 fprintf(fp, " stack segments scanned: %lu\n", ULSTAT(stackseg));
1095 fprintf(fp, "stack segment slots scanned: %lu\n", ULSTAT(segslots));
1096 fprintf(fp, "reachable closeable objects: %lu\n", ULSTAT(nclose));
1097 fprintf(fp, " max reachable closeable: %lu\n", ULSTAT(maxnclose));
1098 fprintf(fp, " scheduled close hooks: %lu\n", ULSTAT(closelater));
1099 fprintf(fp, " max scheduled close hooks: %lu\n", ULSTAT(maxcloselater));
1101 #undef UL
1102 #undef ULSTAT
1103 #undef PERCENT
1105 #ifdef JS_ARENAMETER
1106 JS_DumpArenaStats(fp);
1107 #endif
1109 #endif
1111 #ifdef DEBUG
1112 static void
1113 CheckLeakedRoots(JSRuntime *rt);
1114 #endif
1116 void
1117 js_FinishGC(JSRuntime *rt)
1119 #ifdef JS_ARENAMETER
1120 JS_DumpArenaStats(stdout);
1121 #endif
1122 #ifdef JS_GCMETER
1123 js_DumpGCStats(rt, stdout);
1124 #endif
1126 rt->gcIteratorTable.clear();
1127 FinishGCArenaLists(rt);
1129 if (rt->gcRootsHash.ops) {
1130 #ifdef DEBUG
1131 CheckLeakedRoots(rt);
1132 #endif
1133 JS_DHashTableFinish(&rt->gcRootsHash);
1134 rt->gcRootsHash.ops = NULL;
1136 if (rt->gcLocksHash) {
1137 JS_DHashTableDestroy(rt->gcLocksHash);
1138 rt->gcLocksHash = NULL;
1142 JSBool
1143 js_AddRoot(JSContext *cx, void *rp, const char *name)
1145 JSBool ok = js_AddRootRT(cx->runtime, rp, name);
1146 if (!ok)
1147 JS_ReportOutOfMemory(cx);
1148 return ok;
1151 JSBool
1152 js_AddRootRT(JSRuntime *rt, void *rp, const char *name)
1154 JSBool ok;
1155 JSGCRootHashEntry *rhe;
1158 * Due to the long-standing, but now removed, use of rt->gcLock across the
1159 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
1160 * properly with a racing GC, without calling JS_AddRoot from a request.
1161 * We have to preserve API compatibility here, now that we avoid holding
1162 * rt->gcLock across the mark phase (including the root hashtable mark).
1164 JS_LOCK_GC(rt);
1165 js_WaitForGC(rt);
1166 rhe = (JSGCRootHashEntry *)
1167 JS_DHashTableOperate(&rt->gcRootsHash, rp, JS_DHASH_ADD);
1168 if (rhe) {
1169 rhe->root = rp;
1170 rhe->name = name;
1171 ok = JS_TRUE;
1172 } else {
1173 ok = JS_FALSE;
1175 JS_UNLOCK_GC(rt);
1176 return ok;
1179 JSBool
1180 js_RemoveRoot(JSRuntime *rt, void *rp)
1183 * Due to the JS_RemoveRootRT API, we may be called outside of a request.
1184 * Same synchronization drill as above in js_AddRoot.
1186 JS_LOCK_GC(rt);
1187 js_WaitForGC(rt);
1188 (void) JS_DHashTableOperate(&rt->gcRootsHash, rp, JS_DHASH_REMOVE);
1189 rt->gcPoke = JS_TRUE;
1190 JS_UNLOCK_GC(rt);
1191 return JS_TRUE;
1194 #ifdef DEBUG
1196 static JSDHashOperator
1197 js_root_printer(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 i, void *arg)
1199 uint32 *leakedroots = (uint32 *)arg;
1200 JSGCRootHashEntry *rhe = (JSGCRootHashEntry *)hdr;
1202 (*leakedroots)++;
1203 fprintf(stderr,
1204 "JS engine warning: leaking GC root \'%s\' at %p\n",
1205 rhe->name ? (char *)rhe->name : "", rhe->root);
1207 return JS_DHASH_NEXT;
1210 static void
1211 CheckLeakedRoots(JSRuntime *rt)
1213 uint32 leakedroots = 0;
1215 /* Warn (but don't assert) debug builds of any remaining roots. */
1216 JS_DHashTableEnumerate(&rt->gcRootsHash, js_root_printer,
1217 &leakedroots);
1218 if (leakedroots > 0) {
1219 if (leakedroots == 1) {
1220 fprintf(stderr,
1221 "JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
1222 " This root may point to freed memory. Objects reachable\n"
1223 " through it have not been finalized.\n",
1224 (void *) rt);
1225 } else {
1226 fprintf(stderr,
1227 "JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
1228 " These roots may point to freed memory. Objects reachable\n"
1229 " through them have not been finalized.\n",
1230 (unsigned long) leakedroots, (void *) rt);
1235 typedef struct NamedRootDumpArgs {
1236 void (*dump)(const char *name, void *rp, void *data);
1237 void *data;
1238 } NamedRootDumpArgs;
1240 static JSDHashOperator
1241 js_named_root_dumper(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number,
1242 void *arg)
1244 NamedRootDumpArgs *args = (NamedRootDumpArgs *) arg;
1245 JSGCRootHashEntry *rhe = (JSGCRootHashEntry *)hdr;
1247 if (rhe->name)
1248 args->dump(rhe->name, rhe->root, args->data);
1249 return JS_DHASH_NEXT;
1252 JS_BEGIN_EXTERN_C
1253 void
1254 js_DumpNamedRoots(JSRuntime *rt,
1255 void (*dump)(const char *name, void *rp, void *data),
1256 void *data)
1258 NamedRootDumpArgs args;
1260 args.dump = dump;
1261 args.data = data;
1262 JS_DHashTableEnumerate(&rt->gcRootsHash, js_named_root_dumper, &args);
1264 JS_END_EXTERN_C
1266 #endif /* DEBUG */
1268 typedef struct GCRootMapArgs {
1269 JSGCRootMapFun map;
1270 void *data;
1271 } GCRootMapArgs;
1273 static JSDHashOperator
1274 js_gcroot_mapper(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number,
1275 void *arg)
1277 GCRootMapArgs *args = (GCRootMapArgs *) arg;
1278 JSGCRootHashEntry *rhe = (JSGCRootHashEntry *)hdr;
1279 intN mapflags;
1280 int op;
1282 mapflags = args->map(rhe->root, rhe->name, args->data);
1284 #if JS_MAP_GCROOT_NEXT == JS_DHASH_NEXT && \
1285 JS_MAP_GCROOT_STOP == JS_DHASH_STOP && \
1286 JS_MAP_GCROOT_REMOVE == JS_DHASH_REMOVE
1287 op = (JSDHashOperator)mapflags;
1288 #else
1289 op = JS_DHASH_NEXT;
1290 if (mapflags & JS_MAP_GCROOT_STOP)
1291 op |= JS_DHASH_STOP;
1292 if (mapflags & JS_MAP_GCROOT_REMOVE)
1293 op |= JS_DHASH_REMOVE;
1294 #endif
1296 return (JSDHashOperator) op;
1299 uint32
1300 js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
1302 GCRootMapArgs args;
1303 uint32 rv;
1305 args.map = map;
1306 args.data = data;
1307 JS_LOCK_GC(rt);
1308 rv = JS_DHashTableEnumerate(&rt->gcRootsHash, js_gcroot_mapper, &args);
1309 JS_UNLOCK_GC(rt);
1310 return rv;
1313 JSBool
1314 js_RegisterCloseableIterator(JSContext *cx, JSObject *obj)
1316 JSRuntime *rt;
1317 JSBool ok;
1319 rt = cx->runtime;
1320 JS_ASSERT(!rt->gcRunning);
1322 JS_LOCK_GC(rt);
1323 ok = rt->gcIteratorTable.append(obj);
1324 JS_UNLOCK_GC(rt);
1325 return ok;
1328 static void
1329 CloseNativeIterators(JSContext *cx)
1331 JSRuntime *rt = cx->runtime;
1332 size_t length = rt->gcIteratorTable.length();
1333 JSObject **array = rt->gcIteratorTable.begin();
1335 size_t newLength = 0;
1336 for (size_t i = 0; i < length; ++i) {
1337 JSObject *obj = array[i];
1338 if (js_IsAboutToBeFinalized(cx, obj))
1339 js_CloseNativeIterator(cx, obj);
1340 else
1341 array[newLength++] = obj;
1343 rt->gcIteratorTable.resize(newLength);
1346 void
1347 JSRuntime::setGCTriggerFactor(uint32 factor)
1349 JS_ASSERT(factor >= 100);
1351 gcTriggerFactor = factor;
1352 setGCLastBytes(gcLastBytes);
1355 void
1356 JSRuntime::setGCLastBytes(size_t lastBytes)
1358 gcLastBytes = lastBytes;
1359 uint64 triggerBytes = uint64(lastBytes) * uint64(gcTriggerFactor / 100);
1360 if (triggerBytes != size_t(triggerBytes))
1361 triggerBytes = size_t(-1);
1362 gcTriggerBytes = size_t(triggerBytes);
1365 void
1366 JSGCFreeLists::purge()
1369 * Return the free list back to the arena so the GC finalization will not
1370 * run the finalizers over unitialized bytes from free things.
1372 for (JSGCThing **p = finalizables; p != JS_ARRAY_END(finalizables); ++p) {
1373 JSGCThing *freeListHead = *p;
1374 if (freeListHead) {
1375 JSGCArenaInfo *a = THING_TO_ARENA(freeListHead);
1376 JS_ASSERT(!a->finalizable.freeList);
1377 a->finalizable.freeList = freeListHead;
1378 *p = NULL;
1381 doubles = NULL;
1384 static inline bool
1385 IsGCThresholdReached(JSRuntime *rt)
1387 #ifdef JS_GC_ZEAL
1388 if (rt->gcZeal >= 1)
1389 return true;
1390 #endif
1393 * Since the initial value of the gcLastBytes parameter is not equal to
1394 * zero (see the js_InitGC function) the return value is false when
1395 * the gcBytes value is close to zero at the JS engine start.
1397 return rt->isGCMallocLimitReached() || rt->gcBytes >= rt->gcTriggerBytes;
1400 static JSGCThing *
1401 RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
1403 JS_ASSERT(!JS_THREAD_DATA(cx)->gcFreeLists.finalizables[thingKind]);
1404 JSRuntime *rt = cx->runtime;
1405 JS_LOCK_GC(rt);
1406 JS_ASSERT(!rt->gcRunning);
1407 if (rt->gcRunning) {
1408 METER(rt->gcStats.finalfail++);
1409 JS_UNLOCK_GC(rt);
1410 return NULL;
1413 METER(JSGCArenaStats *astats = &cx->runtime->gcStats.arenaStats[thingKind]);
1414 bool canGC = !JS_ON_TRACE(cx);
1415 bool doGC = canGC && IsGCThresholdReached(rt);
1416 JSGCArenaList *arenaList = &rt->gcArenaList[thingKind];
1417 JSGCArenaInfo *a;
1418 for (;;) {
1419 if (doGC) {
1421 * Keep rt->gcLock across the call into js_GC so we don't starve
1422 * and lose to racing threads who deplete the heap just after
1423 * js_GC has replenished it (or has synchronized with a racing
1424 * GC that collected a bunch of garbage). This unfair scheduling
1425 * can happen on certain operating systems. For the gory details,
1426 * see bug 162779 at https://bugzilla.mozilla.org/.
1428 js_GC(cx, GC_LAST_DITCH);
1429 METER(astats->retry++);
1430 canGC = false;
1433 * The JSGC_END callback can legitimately allocate new GC things
1434 * and populate the free list. If that happens, just return that
1435 * list head.
1437 JSGCThing *freeList = JS_THREAD_DATA(cx)->gcFreeLists.
1438 finalizables[thingKind];
1439 if (freeList) {
1440 JS_UNLOCK_GC(rt);
1441 return freeList;
1445 while ((a = arenaList->cursor) != NULL) {
1446 arenaList->cursor = a->prev;
1447 JSGCThing *freeList = a->finalizable.freeList;
1448 if (freeList) {
1449 a->finalizable.freeList = NULL;
1450 JS_UNLOCK_GC(rt);
1451 return freeList;
1455 a = NewGCArena(cx);
1456 if (a)
1457 break;
1458 if (!canGC) {
1459 METER(astats->fail++);
1460 JS_UNLOCK_GC(rt);
1461 return NULL;
1463 doGC = true;
1467 * Do only minimal initialization of the arena inside the GC lock. We
1468 * can do the rest outside the lock because no other threads will see
1469 * the arena until the GC is run.
1471 a->list = arenaList;
1472 a->prev = arenaList->head;
1473 a->prevUntracedPage = 0;
1474 a->finalizable.untracedThings = 0;
1475 a->finalizable.freeList = NULL;
1476 arenaList->head = a;
1477 JS_UNLOCK_GC(rt);
1479 unsigned nthings = THINGS_PER_ARENA(arenaList->thingSize);
1480 uint8 *flagsStart = THING_FLAGP(a, nthings - 1);
1481 memset(flagsStart, 0, nthings);
1483 /* Turn all things in the arena into a free list. */
1484 return MakeNewArenaFreeList(a, arenaList->thingSize, nthings);
1487 void *
1488 NewFinalizableGCThing(JSContext *cx, unsigned thingKind)
1490 JS_ASSERT(thingKind < FINALIZE_LIMIT);
1491 #ifdef JS_THREADSAFE
1492 JS_ASSERT(cx->thread);
1493 #endif
1495 /* Updates of metering counters here may not be thread-safe. */
1496 METER(cx->runtime->gcStats.arenaStats[thingKind].alloc++);
1498 JSGCThing **freeListp =
1499 JS_THREAD_DATA(cx)->gcFreeLists.finalizables + thingKind;
1500 JSGCThing *thing = *freeListp;
1501 #ifdef JS_TRACER
1502 bool fromTraceReserve = false;
1503 #endif
1505 for (;;) {
1506 if (thing) {
1507 *freeListp = thing->link;
1508 METER(astats->localalloc++);
1509 break;
1512 #if defined JS_GC_ZEAL && defined JS_TRACER
1513 if (cx->runtime->gcZeal >= 1 && JS_TRACE_MONITOR(cx).useReservedObjects)
1514 goto testReservedObjects;
1515 #endif
1517 thing = RefillFinalizableFreeList(cx, thingKind);
1518 if (thing) {
1520 * See comments in RefillFinalizableFreeList about a possibility
1521 * of *freeListp == thing.
1523 JS_ASSERT(!*freeListp || *freeListp == thing);
1524 *freeListp = thing->link;
1525 break;
1528 #ifdef JS_TRACER
1529 if (JS_TRACE_MONITOR(cx).useReservedObjects) {
1530 #ifdef JS_GC_ZEAL
1531 testReservedObjects:
1532 #endif
1533 JS_ASSERT(JS_ON_TRACE(cx));
1534 JS_ASSERT(thingKind == FINALIZE_OBJECT);
1535 JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
1536 thing = (JSGCThing *) tm->reservedObjects;
1537 JS_ASSERT(thing);
1538 tm->reservedObjects = JSVAL_TO_OBJECT(tm->reservedObjects->fslots[0]);
1539 fromTraceReserve = true;
1540 break;
1542 #endif
1544 js_ReportOutOfMemory(cx);
1545 return NULL;
1548 JSLocalRootStack *lrs = cx->localRootStack;
1549 if (lrs) {
1551 * If we're in a local root scope, don't set newborn[type] at all, to
1552 * avoid entraining garbage from it for an unbounded amount of time
1553 * on this context. A caller will leave the local root scope and pop
1554 * this reference, allowing thing to be GC'd if it has no other refs.
1555 * See JS_EnterLocalRootScope and related APIs.
1557 if (js_PushLocalRoot(cx, lrs, (jsval) thing) < 0) {
1559 * When we fail for a thing allocated from a free list, not from
1560 * the reserved pool, the thing is not initialized. To prevent GC
1561 * running the finalizer on the thing, we add the thing back to
1562 * the free list. See bug 337407.
1564 #ifdef JS_TRACER
1565 if (!fromTraceReserve)
1566 #endif
1568 JS_ASSERT(thing->link == *freeListp);
1569 *freeListp = thing;
1571 return NULL;
1573 } else {
1575 * No local root scope, so we're stuck with the old, fragile model of
1576 * depending on a pigeon-hole newborn per type per context.
1578 cx->weakRoots.finalizableNewborns[thingKind] = thing;
1581 return thing;
1584 static JSGCThing *
1585 TurnUsedArenaIntoDoubleList(JSGCArenaInfo *a)
1587 JSGCThing *head;
1588 JSGCThing **tailp = &head;
1589 jsuword thing = ARENA_INFO_TO_START(a);
1592 * When m below points the last bitmap's word in the arena, its high bits
1593 * corresponds to non-existing cells and thingptr is outside the space
1594 * allocated for doubles. ClearDoubleArenaFlags sets such bits to 1. Thus
1595 * even for this last word its bit is unset iff the corresponding cell
1596 * exists and free.
1598 for (jsbitmap *m = DOUBLE_ARENA_BITMAP(a);
1599 m != DOUBLE_ARENA_BITMAP_END(a);
1600 ++m) {
1601 JS_ASSERT(thing < reinterpret_cast<jsuword>(DOUBLE_ARENA_BITMAP(a)));
1602 JS_ASSERT((thing - ARENA_INFO_TO_START(a)) %
1603 (JS_BITS_PER_WORD * sizeof(jsdouble)) == 0);
1605 jsbitmap bits = *m;
1606 if (bits == jsbitmap(-1)) {
1607 thing += JS_BITS_PER_WORD * sizeof(jsdouble);
1608 } else {
1610 * We have some zero bits. Turn corresponding cells into a list
1611 * unrolling the loop for better performance.
1613 const unsigned unroll = 4;
1614 const jsbitmap unrollMask = (jsbitmap(1) << unroll) - 1;
1615 JS_STATIC_ASSERT((JS_BITS_PER_WORD & unrollMask) == 0);
1617 for (unsigned n = 0; n != JS_BITS_PER_WORD; n += unroll) {
1618 jsbitmap bitsChunk = bits & unrollMask;
1619 bits >>= unroll;
1620 if (bitsChunk == unrollMask) {
1621 thing += unroll * sizeof(jsdouble);
1622 } else {
1623 #define DO_BIT(bit) \
1624 if (!(bitsChunk & (jsbitmap(1) << (bit)))) { \
1625 JS_ASSERT(thing - ARENA_INFO_TO_START(a) <= \
1626 (DOUBLES_PER_ARENA - 1) * sizeof(jsdouble));\
1627 JSGCThing *t = reinterpret_cast<JSGCThing *>(thing); \
1628 *tailp = t; \
1629 tailp = &t->link; \
1631 thing += sizeof(jsdouble);
1632 DO_BIT(0);
1633 DO_BIT(1);
1634 DO_BIT(2);
1635 DO_BIT(3);
1636 #undef DO_BIT
1641 *tailp = NULL;
1642 return head;
1645 static JSGCThing *
1646 RefillDoubleFreeList(JSContext *cx)
1648 JS_ASSERT(!JS_THREAD_DATA(cx)->gcFreeLists.doubles);
1650 JSRuntime *rt = cx->runtime;
1651 JS_ASSERT(!rt->gcRunning);
1653 JS_LOCK_GC(rt);
1655 JSGCArenaInfo *a;
1656 bool canGC = !JS_ON_TRACE(cx);
1657 bool doGC = canGC && IsGCThresholdReached(rt);
1658 for (;;) {
1659 if (doGC) {
1660 js_GC(cx, GC_LAST_DITCH);
1661 METER(rt->gcStats.doubleArenaStats.retry++);
1662 canGC = false;
1666 * Loop until we find arena with some free doubles. We turn arenas
1667 * into free lists outside the lock to minimize contention between
1668 * threads.
1670 while (!!(a = rt->gcDoubleArenaList.cursor)) {
1671 JS_ASSERT(!a->hasMarkedDoubles);
1672 rt->gcDoubleArenaList.cursor = a->prev;
1673 JS_UNLOCK_GC(rt);
1674 JSGCThing *list = TurnUsedArenaIntoDoubleList(a);
1675 if (list)
1676 return list;
1677 JS_LOCK_GC(rt);
1679 a = NewGCArena(cx);
1680 if (a)
1681 break;
1682 if (!canGC) {
1683 METER(rt->gcStats.doubleArenaStats.fail++);
1684 JS_UNLOCK_GC(rt);
1686 if (!JS_ON_TRACE(cx)) {
1687 /* Trace code handle this on its own. */
1688 js_ReportOutOfMemory(cx);
1690 return NULL;
1692 doGC = true;
1695 a->list = NULL;
1696 a->hasMarkedDoubles = false;
1697 a->prev = rt->gcDoubleArenaList.head;
1698 rt->gcDoubleArenaList.head = a;
1699 JS_UNLOCK_GC(rt);
1700 return MakeNewArenaFreeList(a, sizeof(jsdouble), DOUBLES_PER_ARENA);
1703 JSBool
1704 js_NewDoubleInRootedValue(JSContext *cx, jsdouble d, jsval *vp)
1706 /* Updates of metering counters here are not thread-safe. */
1707 METER(JSGCArenaStats *astats = &cx->runtime->gcStats.doubleArenaStats);
1708 METER(astats->alloc++);
1710 JSGCThing *thing = JS_THREAD_DATA(cx)->gcFreeLists.doubles;
1711 if (!thing) {
1712 thing = RefillDoubleFreeList(cx);
1713 if (!thing) {
1714 METER(astats->fail++);
1715 return false;
1717 } else {
1718 METER(astats->localalloc++);
1722 * The GC things on the free lists come from one arena and the things on
1723 * the free list are strictly in the ascending order.
1725 JS_ASSERT_IF(thing->link,
1726 THING_TO_ARENA(thing) == THING_TO_ARENA(thing->link));
1727 JS_ASSERT_IF(thing->link, thing < thing->link);
1729 JS_THREAD_DATA(cx)->gcFreeLists.doubles = thing->link;
1730 jsdouble *dp = reinterpret_cast<jsdouble *>(thing);
1731 *dp = d;
1732 *vp = DOUBLE_TO_JSVAL(dp);
1733 return true;
1736 jsdouble *
1737 js_NewWeaklyRootedDouble(JSContext *cx, jsdouble d)
1739 jsval v;
1740 jsdouble *dp;
1742 if (!js_NewDoubleInRootedValue(cx, d, &v))
1743 return NULL;
1745 JS_ASSERT(JSVAL_IS_DOUBLE(v));
1746 dp = JSVAL_TO_DOUBLE(v);
1747 if (cx->localRootStack) {
1748 if (js_PushLocalRoot(cx, cx->localRootStack, v) < 0)
1749 return NULL;
1750 } else {
1751 cx->weakRoots.newbornDouble = dp;
1753 return dp;
1756 #ifdef JS_TRACER
1757 JSBool
1758 js_ReserveObjects(JSContext *cx, size_t nobjects)
1761 * Ensure at least nobjects objects are in the list. fslots[1] of each
1762 * object on the reservedObjects list is the length of the list to this
1763 * object.
1765 JSObject *&head = JS_TRACE_MONITOR(cx).reservedObjects;
1766 size_t i = head ? JSVAL_TO_INT(head->fslots[1]) : 0;
1767 while (i < nobjects) {
1768 JSObject *obj = js_NewGCObject(cx);
1769 if (!obj)
1770 return JS_FALSE;
1771 memset(obj, 0, sizeof(JSObject));
1773 /* The class must be set to something for finalization. */
1774 obj->classword = (jsuword) &js_ObjectClass;
1775 obj->fslots[0] = OBJECT_TO_JSVAL(head);
1776 i++;
1777 obj->fslots[1] = INT_TO_JSVAL(i);
1778 head = obj;
1781 return JS_TRUE;
1784 #endif
1787 * Shallow GC-things can be locked just by setting the GCF_LOCK bit, because
1788 * they have no descendants to mark during the GC. Currently the optimization
1789 * is only used for non-dependant strings.
1791 static uint8 *
1792 GetShallowGCThingFlag(void *thing)
1794 if (JSString::isStatic(thing))
1795 return NULL;
1796 JSGCArenaInfo *a = THING_TO_ARENA(thing);
1797 if (!a->list || !IsFinalizableStringKind(a->list->thingKind))
1798 return NULL;
1800 JS_ASSERT(sizeof(JSString) == a->list->thingSize);
1801 JSString *str = (JSString *) thing;
1802 if (str->isDependent())
1803 return NULL;
1805 uint32 index = THING_TO_INDEX(thing, sizeof(JSString));
1806 return THING_FLAGP(a, index);
1809 /* This is compatible with JSDHashEntryStub. */
1810 typedef struct JSGCLockHashEntry {
1811 JSDHashEntryHdr hdr;
1812 const void *thing;
1813 uint32 count;
1814 } JSGCLockHashEntry;
1816 JSBool
1817 js_LockGCThingRT(JSRuntime *rt, void *thing)
1819 if (!thing)
1820 return JS_TRUE;
1822 JS_LOCK_GC(rt);
1825 * Avoid adding a rt->gcLocksHash entry for shallow things until someone
1826 * nests a lock.
1828 uint8 *flagp = GetShallowGCThingFlag(thing);
1829 JSBool ok;
1830 JSGCLockHashEntry *lhe;
1831 if (flagp && !(*flagp & GCF_LOCK)) {
1832 *flagp |= GCF_LOCK;
1833 METER(rt->gcStats.lock++);
1834 ok = JS_TRUE;
1835 goto out;
1838 if (!rt->gcLocksHash) {
1839 rt->gcLocksHash = JS_NewDHashTable(JS_DHashGetStubOps(), NULL,
1840 sizeof(JSGCLockHashEntry),
1841 GC_ROOTS_SIZE);
1842 if (!rt->gcLocksHash) {
1843 ok = JS_FALSE;
1844 goto out;
1848 lhe = (JSGCLockHashEntry *)
1849 JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_ADD);
1850 if (!lhe) {
1851 ok = JS_FALSE;
1852 goto out;
1854 if (!lhe->thing) {
1855 lhe->thing = thing;
1856 lhe->count = 1;
1857 } else {
1858 JS_ASSERT(lhe->count >= 1);
1859 lhe->count++;
1862 METER(rt->gcStats.lock++);
1863 ok = JS_TRUE;
1864 out:
1865 JS_UNLOCK_GC(rt);
1866 return ok;
1869 JSBool
1870 js_UnlockGCThingRT(JSRuntime *rt, void *thing)
1872 if (!thing)
1873 return JS_TRUE;
1875 JS_LOCK_GC(rt);
1877 uint8 *flagp = GetShallowGCThingFlag(thing);
1878 JSGCLockHashEntry *lhe;
1879 if (flagp && !(*flagp & GCF_LOCK))
1880 goto out;
1881 if (!rt->gcLocksHash ||
1882 (lhe = (JSGCLockHashEntry *)
1883 JS_DHashTableOperate(rt->gcLocksHash, thing,
1884 JS_DHASH_LOOKUP),
1885 JS_DHASH_ENTRY_IS_FREE(&lhe->hdr))) {
1886 /* Shallow entry is not in the hash -> clear its lock bit. */
1887 if (flagp)
1888 *flagp &= ~GCF_LOCK;
1889 else
1890 goto out;
1891 } else {
1892 if (--lhe->count != 0)
1893 goto out;
1894 JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_REMOVE);
1897 rt->gcPoke = JS_TRUE;
1898 METER(rt->gcStats.unlock++);
1899 out:
1900 JS_UNLOCK_GC(rt);
1901 return JS_TRUE;
1904 JS_PUBLIC_API(void)
1905 JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind)
1907 switch (kind) {
1908 case JSTRACE_OBJECT: {
1909 /* If obj has no map, it must be a newborn. */
1910 JSObject *obj = (JSObject *) thing;
1911 if (!obj->map)
1912 break;
1913 obj->map->ops->trace(trc, obj);
1914 break;
1917 case JSTRACE_STRING: {
1918 JSString *str = (JSString *) thing;
1919 if (str->isDependent())
1920 JS_CALL_STRING_TRACER(trc, str->dependentBase(), "base");
1921 break;
1924 #if JS_HAS_XML_SUPPORT
1925 case JSTRACE_XML:
1926 js_TraceXML(trc, (JSXML *)thing);
1927 break;
1928 #endif
1933 * Number of things covered by a single bit of JSGCArenaInfo.untracedThings.
1935 #define THINGS_PER_UNTRACED_BIT(thingSize) \
1936 JS_HOWMANY(THINGS_PER_ARENA(thingSize), JS_BITS_PER_WORD)
1938 static void
1939 DelayTracingChildren(JSRuntime *rt, uint8 *flagp)
1941 JSGCArenaInfo *a;
1942 uint32 untracedBitIndex;
1943 jsuword bit;
1945 JS_ASSERT(!(*flagp & GCF_CHILDREN));
1946 *flagp |= GCF_CHILDREN;
1948 METER(rt->gcStats.untraced++);
1949 #ifdef DEBUG
1950 ++rt->gcTraceLaterCount;
1951 METER_UPDATE_MAX(rt->gcStats.maxuntraced, rt->gcTraceLaterCount);
1952 #endif
1954 a = FLAGP_TO_ARENA(flagp);
1955 untracedBitIndex = FLAGP_TO_INDEX(flagp) /
1956 THINGS_PER_UNTRACED_BIT(a->list->thingSize);
1957 JS_ASSERT(untracedBitIndex < JS_BITS_PER_WORD);
1958 bit = (jsuword)1 << untracedBitIndex;
1959 if (a->finalizable.untracedThings != 0) {
1960 JS_ASSERT(rt->gcUntracedArenaStackTop);
1961 if (a->finalizable.untracedThings & bit) {
1962 /* bit already covers things with children to trace later. */
1963 return;
1965 a->finalizable.untracedThings |= bit;
1966 } else {
1968 * The thing is the first thing with not yet traced children in the
1969 * whole arena, so push the arena on the stack of arenas with things
1970 * to be traced later unless the arena has already been pushed. We
1971 * detect that through checking prevUntracedPage as the field is 0
1972 * only for not yet pushed arenas. To ensure that
1973 * prevUntracedPage != 0
1974 * even when the stack contains one element, we make prevUntracedPage
1975 * for the arena at the bottom to point to itself.
1977 * See comments in TraceDelayedChildren.
1979 a->finalizable.untracedThings = bit;
1980 if (a->prevUntracedPage == 0) {
1981 if (!rt->gcUntracedArenaStackTop) {
1982 /* Stack was empty, mark the arena as the bottom element. */
1983 a->prevUntracedPage = ARENA_INFO_TO_PAGE(a);
1984 } else {
1985 JS_ASSERT(rt->gcUntracedArenaStackTop->prevUntracedPage != 0);
1986 a->prevUntracedPage =
1987 ARENA_INFO_TO_PAGE(rt->gcUntracedArenaStackTop);
1989 rt->gcUntracedArenaStackTop = a;
1992 JS_ASSERT(rt->gcUntracedArenaStackTop);
1995 static void
1996 TraceDelayedChildren(JSTracer *trc)
1998 JSRuntime *rt;
1999 JSGCArenaInfo *a, *aprev;
2000 uint32 thingSize, traceKind;
2001 uint32 thingsPerUntracedBit;
2002 uint32 untracedBitIndex, thingIndex, indexLimit, endIndex;
2003 JSGCThing *thing;
2004 uint8 *flagp;
2006 rt = trc->context->runtime;
2007 a = rt->gcUntracedArenaStackTop;
2008 if (!a) {
2009 JS_ASSERT(rt->gcTraceLaterCount == 0);
2010 return;
2013 for (;;) {
2015 * The following assert verifies that the current arena belongs to the
2016 * untraced stack, since DelayTracingChildren ensures that even for
2017 * stack's bottom prevUntracedPage != 0 but rather points to itself.
2019 JS_ASSERT(a->prevUntracedPage != 0);
2020 JS_ASSERT(rt->gcUntracedArenaStackTop->prevUntracedPage != 0);
2021 thingSize = a->list->thingSize;
2022 traceKind = GetFinalizableArenaTraceKind(a);
2023 indexLimit = THINGS_PER_ARENA(thingSize);
2024 thingsPerUntracedBit = THINGS_PER_UNTRACED_BIT(thingSize);
2027 * We cannot use do-while loop here as a->untracedThings can be zero
2028 * before the loop as a leftover from the previous iterations. See
2029 * comments after the loop.
2031 while (a->finalizable.untracedThings != 0) {
2032 untracedBitIndex = JS_FLOOR_LOG2W(a->finalizable.untracedThings);
2033 a->finalizable.untracedThings &=
2034 ~((jsuword)1 << untracedBitIndex);
2035 thingIndex = untracedBitIndex * thingsPerUntracedBit;
2036 endIndex = thingIndex + thingsPerUntracedBit;
2039 * endIndex can go beyond the last allocated thing as the real
2040 * limit can be "inside" the bit.
2042 if (endIndex > indexLimit)
2043 endIndex = indexLimit;
2044 JS_ASSERT(thingIndex < indexLimit);
2046 do {
2048 * Skip free or already traced things that share the bit
2049 * with untraced ones.
2051 flagp = THING_FLAGP(a, thingIndex);
2052 if (!(*flagp & GCF_CHILDREN))
2053 continue;
2054 *flagp &= ~GCF_CHILDREN;
2055 #ifdef DEBUG
2056 JS_ASSERT(rt->gcTraceLaterCount != 0);
2057 --rt->gcTraceLaterCount;
2058 #endif
2059 thing = FLAGP_TO_THING(flagp, thingSize);
2060 JS_TraceChildren(trc, thing, traceKind);
2061 } while (++thingIndex != endIndex);
2065 * We finished tracing of all things in the the arena but we can only
2066 * pop it from the stack if the arena is the stack's top.
2068 * When JS_TraceChildren from the above calls JS_CallTracer that in
2069 * turn on low C stack calls DelayTracingChildren and the latter
2070 * pushes new arenas to the untraced stack, we have to skip popping
2071 * of this arena until it becomes the top of the stack again.
2073 if (a == rt->gcUntracedArenaStackTop) {
2074 aprev = ARENA_PAGE_TO_INFO(a->prevUntracedPage);
2075 a->prevUntracedPage = 0;
2076 if (a == aprev) {
2078 * prevUntracedPage points to itself and we reached the
2079 * bottom of the stack.
2081 break;
2083 rt->gcUntracedArenaStackTop = a = aprev;
2084 } else {
2085 a = rt->gcUntracedArenaStackTop;
2088 JS_ASSERT(rt->gcUntracedArenaStackTop);
2089 JS_ASSERT(rt->gcUntracedArenaStackTop->prevUntracedPage == 0);
2090 rt->gcUntracedArenaStackTop = NULL;
2091 JS_ASSERT(rt->gcTraceLaterCount == 0);
2094 JS_PUBLIC_API(void)
2095 JS_CallTracer(JSTracer *trc, void *thing, uint32 kind)
2097 JSContext *cx;
2098 JSRuntime *rt;
2099 JSGCArenaInfo *a;
2100 uintN index;
2101 uint8 *flagp;
2103 JS_ASSERT(thing);
2104 JS_ASSERT(JS_IS_VALID_TRACE_KIND(kind));
2105 JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
2107 if (!IS_GC_MARKING_TRACER(trc)) {
2108 trc->callback(trc, thing, kind);
2109 goto out;
2112 cx = trc->context;
2113 rt = cx->runtime;
2114 JS_ASSERT(rt->gcMarkingTracer == trc);
2115 JS_ASSERT(rt->gcLevel > 0);
2118 * Optimize for string and double as their size is known and their tracing
2119 * is not recursive.
2121 switch (kind) {
2122 case JSTRACE_DOUBLE:
2123 a = THING_TO_ARENA(thing);
2124 JS_ASSERT(!a->list);
2125 if (!a->hasMarkedDoubles) {
2126 ClearDoubleArenaFlags(a);
2127 a->hasMarkedDoubles = true;
2129 index = DOUBLE_THING_TO_INDEX(thing);
2130 JS_SET_BIT(DOUBLE_ARENA_BITMAP(a), index);
2131 goto out;
2133 case JSTRACE_STRING:
2134 for (;;) {
2135 if (JSString::isStatic(thing))
2136 goto out;
2137 a = THING_TO_ARENA(thing);
2138 flagp = THING_FLAGP(a, THING_TO_INDEX(thing, sizeof(JSString)));
2139 JS_ASSERT(kind == GetFinalizableArenaTraceKind(a));
2140 if (!((JSString *) thing)->isDependent()) {
2141 *flagp |= GCF_MARK;
2142 goto out;
2144 if (*flagp & GCF_MARK)
2145 goto out;
2146 *flagp |= GCF_MARK;
2147 thing = ((JSString *) thing)->dependentBase();
2149 /* NOTREACHED */
2152 JS_ASSERT(kind == GetFinalizableArenaTraceKind(THING_TO_ARENA(thing)));
2153 flagp = GetGCThingFlags(thing);
2154 if (*flagp & GCF_MARK)
2155 goto out;
2157 *flagp |= GCF_MARK;
2158 if (!cx->insideGCMarkCallback) {
2160 * With JS_GC_ASSUME_LOW_C_STACK defined the mark phase of GC always
2161 * uses the non-recursive code that otherwise would be called only on
2162 * a low C stack condition.
2164 #ifdef JS_GC_ASSUME_LOW_C_STACK
2165 # define RECURSION_TOO_DEEP() JS_TRUE
2166 #else
2167 int stackDummy;
2168 # define RECURSION_TOO_DEEP() (!JS_CHECK_STACK_SIZE(cx, stackDummy))
2169 #endif
2170 if (RECURSION_TOO_DEEP())
2171 DelayTracingChildren(rt, flagp);
2172 else
2173 JS_TraceChildren(trc, thing, kind);
2174 } else {
2176 * For API compatibility we allow for the callback to assume that
2177 * after it calls JS_MarkGCThing for the last time, the callback can
2178 * start to finalize its own objects that are only referenced by
2179 * unmarked GC things.
2181 * Since we do not know which call from inside the callback is the
2182 * last, we ensure that children of all marked things are traced and
2183 * call TraceDelayedChildren(trc) after tracing the thing.
2185 * As TraceDelayedChildren unconditionally invokes JS_TraceChildren
2186 * for the things with untraced children, calling DelayTracingChildren
2187 * is useless here. Hence we always trace thing's children even with a
2188 * low native stack.
2190 cx->insideGCMarkCallback = JS_FALSE;
2191 JS_TraceChildren(trc, thing, kind);
2192 TraceDelayedChildren(trc);
2193 cx->insideGCMarkCallback = JS_TRUE;
2196 out:
2197 #ifdef DEBUG
2198 trc->debugPrinter = NULL;
2199 trc->debugPrintArg = NULL;
2200 #endif
2201 return; /* to avoid out: right_curl when DEBUG is not defined */
2204 void
2205 js_CallValueTracerIfGCThing(JSTracer *trc, jsval v)
2207 void *thing;
2208 uint32 kind;
2210 if (JSVAL_IS_DOUBLE(v) || JSVAL_IS_STRING(v)) {
2211 thing = JSVAL_TO_TRACEABLE(v);
2212 kind = JSVAL_TRACE_KIND(v);
2213 JS_ASSERT(kind == js_GetGCThingTraceKind(JSVAL_TO_GCTHING(v)));
2214 } else if (JSVAL_IS_OBJECT(v) && v != JSVAL_NULL) {
2215 /* v can be an arbitrary GC thing reinterpreted as an object. */
2216 thing = JSVAL_TO_OBJECT(v);
2217 kind = js_GetGCThingTraceKind(thing);
2218 } else {
2219 return;
2221 JS_CallTracer(trc, thing, kind);
2224 static JSDHashOperator
2225 gc_root_traversal(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 num,
2226 void *arg)
2228 JSGCRootHashEntry *rhe = (JSGCRootHashEntry *)hdr;
2229 JSTracer *trc = (JSTracer *)arg;
2230 jsval *rp = (jsval *)rhe->root;
2231 jsval v = *rp;
2233 /* Ignore null reference, scalar values, and static strings. */
2234 if (!JSVAL_IS_NULL(v) &&
2235 JSVAL_IS_GCTHING(v) &&
2236 !JSString::isStatic(JSVAL_TO_GCTHING(v))) {
2237 #ifdef DEBUG
2238 bool root_points_to_gcArenaList = false;
2239 jsuword thing = (jsuword) JSVAL_TO_GCTHING(v);
2240 JSRuntime *rt = trc->context->runtime;
2241 for (unsigned i = 0; i != FINALIZE_LIMIT; i++) {
2242 JSGCArenaList *arenaList = &rt->gcArenaList[i];
2243 size_t thingSize = arenaList->thingSize;
2244 size_t limit = THINGS_PER_ARENA(thingSize) * thingSize;
2245 for (JSGCArenaInfo *a = arenaList->head; a; a = a->prev) {
2246 if (thing - ARENA_INFO_TO_START(a) < limit) {
2247 root_points_to_gcArenaList = true;
2248 break;
2252 if (!root_points_to_gcArenaList) {
2253 for (JSGCArenaInfo *a = rt->gcDoubleArenaList.head; a; a = a->prev) {
2254 if (thing - ARENA_INFO_TO_START(a) <
2255 DOUBLES_PER_ARENA * sizeof(jsdouble)) {
2256 root_points_to_gcArenaList = true;
2257 break;
2261 if (!root_points_to_gcArenaList && rhe->name) {
2262 fprintf(stderr,
2263 "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
2264 "invalid jsval. This is usually caused by a missing call to JS_RemoveRoot.\n"
2265 "The root's name is \"%s\".\n",
2266 rhe->name);
2268 JS_ASSERT(root_points_to_gcArenaList);
2269 #endif
2270 JS_SET_TRACING_NAME(trc, rhe->name ? rhe->name : "root");
2271 js_CallValueTracerIfGCThing(trc, v);
2274 return JS_DHASH_NEXT;
2277 static JSDHashOperator
2278 gc_lock_traversal(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 num,
2279 void *arg)
2281 JSGCLockHashEntry *lhe = (JSGCLockHashEntry *)hdr;
2282 void *thing = (void *)lhe->thing;
2283 JSTracer *trc = (JSTracer *)arg;
2284 uint32 traceKind;
2286 JS_ASSERT(lhe->count >= 1);
2287 traceKind = js_GetGCThingTraceKind(thing);
2288 JS_CALL_TRACER(trc, thing, traceKind, "locked object");
2289 return JS_DHASH_NEXT;
2292 #define TRACE_JSVALS(trc, len, vec, name) \
2293 JS_BEGIN_MACRO \
2294 jsval _v, *_vp, *_end; \
2296 for (_vp = vec, _end = _vp + len; _vp < _end; _vp++) { \
2297 _v = *_vp; \
2298 if (JSVAL_IS_TRACEABLE(_v)) { \
2299 JS_SET_TRACING_INDEX(trc, name, _vp - (vec)); \
2300 JS_CallTracer(trc, JSVAL_TO_TRACEABLE(_v), \
2301 JSVAL_TRACE_KIND(_v)); \
2304 JS_END_MACRO
2306 void
2307 js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp)
2309 uintN nslots, minargs, skip;
2311 if (fp->callobj)
2312 JS_CALL_OBJECT_TRACER(trc, fp->callobj, "call");
2313 if (fp->argsobj)
2314 JS_CALL_OBJECT_TRACER(trc, JSVAL_TO_OBJECT(fp->argsobj), "arguments");
2315 if (fp->varobj)
2316 JS_CALL_OBJECT_TRACER(trc, fp->varobj, "variables");
2317 if (fp->script) {
2318 js_TraceScript(trc, fp->script);
2320 /* fp->slots is null for watch pseudo-frames, see js_watch_set. */
2321 if (fp->slots) {
2323 * Don't mark what has not been pushed yet, or what has been
2324 * popped already.
2326 if (fp->regs && fp->regs->sp) {
2327 nslots = (uintN) (fp->regs->sp - fp->slots);
2328 JS_ASSERT(nslots >= fp->script->nfixed);
2329 } else {
2330 nslots = fp->script->nfixed;
2332 TRACE_JSVALS(trc, nslots, fp->slots, "slot");
2334 } else {
2335 JS_ASSERT(!fp->slots);
2336 JS_ASSERT(!fp->regs);
2339 /* Allow for primitive this parameter due to JSFUN_THISP_* flags. */
2340 JS_CALL_VALUE_TRACER(trc, fp->thisv, "this");
2342 if (fp->argv) {
2343 JS_CALL_VALUE_TRACER(trc, fp->calleeValue(), "callee");
2344 nslots = fp->argc;
2345 skip = 0;
2346 if (fp->fun) {
2347 minargs = FUN_MINARGS(fp->fun);
2348 if (minargs > nslots)
2349 nslots = minargs;
2350 if (!FUN_INTERPRETED(fp->fun)) {
2351 JS_ASSERT(!(fp->fun->flags & JSFUN_FAST_NATIVE));
2352 nslots += fp->fun->u.n.extra;
2354 if (fp->fun->flags & JSFRAME_ROOTED_ARGV)
2355 skip = 2 + fp->argc;
2357 TRACE_JSVALS(trc, 2 + nslots - skip, fp->argv - 2 + skip, "operand");
2360 JS_CALL_VALUE_TRACER(trc, fp->rval, "rval");
2361 if (fp->scopeChain)
2362 JS_CALL_OBJECT_TRACER(trc, fp->scopeChain, "scope chain");
2365 void
2366 JSWeakRoots::mark(JSTracer *trc)
2368 #ifdef DEBUG
2369 const char * const newbornNames[] = {
2370 "newborn_object", /* FINALIZE_OBJECT */
2371 "newborn_function", /* FINALIZE_FUNCTION */
2372 #if JS_HAS_XML_SUPPORT
2373 "newborn_xml", /* FINALIZE_XML */
2374 #endif
2375 "newborn_string", /* FINALIZE_STRING */
2376 "newborn_external_string0", /* FINALIZE_EXTERNAL_STRING0 */
2377 "newborn_external_string1", /* FINALIZE_EXTERNAL_STRING1 */
2378 "newborn_external_string2", /* FINALIZE_EXTERNAL_STRING2 */
2379 "newborn_external_string3", /* FINALIZE_EXTERNAL_STRING3 */
2380 "newborn_external_string4", /* FINALIZE_EXTERNAL_STRING4 */
2381 "newborn_external_string5", /* FINALIZE_EXTERNAL_STRING5 */
2382 "newborn_external_string6", /* FINALIZE_EXTERNAL_STRING6 */
2383 "newborn_external_string7", /* FINALIZE_EXTERNAL_STRING7 */
2385 #endif
2386 for (size_t i = 0; i != JS_ARRAY_LENGTH(finalizableNewborns); ++i) {
2387 void *newborn = finalizableNewborns[i];
2388 if (newborn) {
2389 JS_CALL_TRACER(trc, newborn, GetFinalizableTraceKind(i),
2390 newbornNames[i]);
2393 if (newbornDouble)
2394 JS_CALL_DOUBLE_TRACER(trc, newbornDouble, "newborn_double");
2395 JS_CALL_VALUE_TRACER(trc, lastAtom, "lastAtom");
2396 JS_SET_TRACING_NAME(trc, "lastInternalResult");
2397 js_CallValueTracerIfGCThing(trc, lastInternalResult);
2400 JS_REQUIRES_STACK JS_FRIEND_API(void)
2401 js_TraceContext(JSTracer *trc, JSContext *acx)
2403 JSStackFrame *fp, *nextChain;
2404 JSStackHeader *sh;
2405 JSTempValueRooter *tvr;
2407 if (IS_GC_MARKING_TRACER(trc)) {
2409 #define FREE_OLD_ARENAS(pool) \
2410 JS_BEGIN_MACRO \
2411 int64 _age; \
2412 JSArena * _a = (pool).current; \
2413 if (_a == (pool).first.next && \
2414 _a->avail == _a->base + sizeof(int64)) { \
2415 _age = JS_Now() - *(int64 *) _a->base; \
2416 if (_age > (int64) acx->runtime->gcEmptyArenaPoolLifespan * \
2417 1000) \
2418 JS_FreeArenaPool(&(pool)); \
2420 JS_END_MACRO
2423 * Release the stackPool's arenas if the stackPool has existed for
2424 * longer than the limit specified by gcEmptyArenaPoolLifespan.
2426 FREE_OLD_ARENAS(acx->stackPool);
2429 * Release the regexpPool's arenas based on the same criterion as for
2430 * the stackPool.
2432 FREE_OLD_ARENAS(acx->regexpPool);
2436 * Iterate frame chain and dormant chains.
2438 * (NB: see comment on this whole "dormant" thing in js_Execute.)
2440 * Since js_GetTopStackFrame needs to dereference cx->thread to check for
2441 * JIT frames, we check for non-null thread here and avoid null checks
2442 * there. See bug 471197.
2444 #ifdef JS_THREADSAFE
2445 if (acx->thread)
2446 #endif
2448 fp = js_GetTopStackFrame(acx);
2449 nextChain = acx->dormantFrameChain;
2450 if (!fp)
2451 goto next_chain;
2453 /* The top frame must not be dormant. */
2454 JS_ASSERT(!fp->dormantNext);
2455 for (;;) {
2456 do {
2457 js_TraceStackFrame(trc, fp);
2458 } while ((fp = fp->down) != NULL);
2460 next_chain:
2461 if (!nextChain)
2462 break;
2463 fp = nextChain;
2464 nextChain = nextChain->dormantNext;
2468 /* Mark other roots-by-definition in acx. */
2469 if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL))
2470 JS_CALL_OBJECT_TRACER(trc, acx->globalObject, "global object");
2471 acx->weakRoots.mark(trc);
2472 if (acx->throwing) {
2473 JS_CALL_VALUE_TRACER(trc, acx->exception, "exception");
2474 } else {
2475 /* Avoid keeping GC-ed junk stored in JSContext.exception. */
2476 acx->exception = JSVAL_NULL;
2479 for (sh = acx->stackHeaders; sh; sh = sh->down) {
2480 METER(trc->context->runtime->gcStats.stackseg++);
2481 METER(trc->context->runtime->gcStats.segslots += sh->nslots);
2482 TRACE_JSVALS(trc, sh->nslots, JS_STACK_SEGMENT(sh), "stack");
2485 if (acx->localRootStack)
2486 js_TraceLocalRoots(trc, acx->localRootStack);
2488 for (tvr = acx->tempValueRooters; tvr; tvr = tvr->down) {
2489 switch (tvr->count) {
2490 case JSTVU_SINGLE:
2491 JS_SET_TRACING_NAME(trc, "tvr->u.value");
2492 js_CallValueTracerIfGCThing(trc, tvr->u.value);
2493 break;
2494 case JSTVU_TRACE:
2495 tvr->u.trace(trc, tvr);
2496 break;
2497 case JSTVU_SPROP:
2498 tvr->u.sprop->trace(trc);
2499 break;
2500 case JSTVU_WEAK_ROOTS:
2501 tvr->u.weakRoots->mark(trc);
2502 break;
2503 case JSTVU_COMPILER:
2504 tvr->u.compiler->trace(trc);
2505 break;
2506 case JSTVU_SCRIPT:
2507 js_TraceScript(trc, tvr->u.script);
2508 break;
2509 case JSTVU_ENUMERATOR:
2510 static_cast<JSAutoEnumStateRooter *>(tvr)->mark(trc);
2511 break;
2512 default:
2513 JS_ASSERT(tvr->count >= 0);
2514 TRACE_JSVALS(trc, tvr->count, tvr->u.array, "tvr->u.array");
2518 if (acx->sharpObjectMap.depth > 0)
2519 js_TraceSharpMap(trc, &acx->sharpObjectMap);
2521 js_TraceRegExpStatics(trc, acx);
2523 #ifdef JS_TRACER
2524 InterpState* state = acx->interpState;
2525 while (state) {
2526 if (state->nativeVp)
2527 TRACE_JSVALS(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
2528 state = state->prev;
2530 #endif
2533 #ifdef JS_TRACER
2535 static void
2536 MarkReservedGCThings(JSTraceMonitor *tm)
2538 /* Keep reserved doubles. */
2539 for (jsval *ptr = tm->reservedDoublePool; ptr < tm->reservedDoublePoolPtr; ++ptr) {
2540 jsdouble* dp = JSVAL_TO_DOUBLE(*ptr);
2541 JS_ASSERT(js_GetGCThingTraceKind(dp) == JSTRACE_DOUBLE);
2543 JSGCArenaInfo *a = THING_TO_ARENA(dp);
2544 JS_ASSERT(!a->list);
2545 if (!a->hasMarkedDoubles) {
2546 ClearDoubleArenaFlags(a);
2547 a->hasMarkedDoubles = JS_TRUE;
2549 jsuint index = DOUBLE_THING_TO_INDEX(dp);
2550 JS_SET_BIT(DOUBLE_ARENA_BITMAP(a), index);
2552 /* Keep reserved objects. */
2553 for (JSObject *obj = tm->reservedObjects; obj; obj = JSVAL_TO_OBJECT(obj->fslots[0])) {
2554 JS_ASSERT(js_GetGCThingTraceKind(obj) == JSTRACE_OBJECT);
2556 uint8 *flagp = GetGCThingFlags(obj);
2557 *flagp |= GCF_MARK;
2561 #ifdef JS_THREADSAFE
2562 static JSDHashOperator
2563 reserved_gcthings_marker(JSDHashTable *table, JSDHashEntryHdr *hdr,
2564 uint32, void *)
2566 JSThread *thread = ((JSThreadsHashEntry *) hdr)->thread;
2568 MarkReservedGCThings(&thread->data.traceMonitor);
2569 return JS_DHASH_NEXT;
2571 #endif
2573 #endif
2575 JS_REQUIRES_STACK void
2576 js_TraceRuntime(JSTracer *trc, JSBool allAtoms)
2578 JSRuntime *rt = trc->context->runtime;
2579 JSContext *iter, *acx;
2581 JS_DHashTableEnumerate(&rt->gcRootsHash, gc_root_traversal, trc);
2582 if (rt->gcLocksHash)
2583 JS_DHashTableEnumerate(rt->gcLocksHash, gc_lock_traversal, trc);
2584 js_TraceAtomState(trc, allAtoms);
2585 js_TraceRuntimeNumberState(trc);
2587 iter = NULL;
2588 while ((acx = js_ContextIterator(rt, JS_TRUE, &iter)) != NULL)
2589 js_TraceContext(trc, acx);
2591 js_TraceThreads(rt, trc);
2593 if (rt->gcExtraRootsTraceOp)
2594 rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
2596 #ifdef JS_TRACER
2597 for (int i = 0; i < JSBUILTIN_LIMIT; i++) {
2598 if (rt->builtinFunctions[i])
2599 JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function");
2602 /* Mark reserved gcthings unless we are shutting down. */
2603 if (IS_GC_MARKING_TRACER(trc) && rt->state != JSRTS_LANDING) {
2604 #ifdef JS_THREADSAFE
2605 JS_DHashTableEnumerate(&rt->threads, reserved_gcthings_marker, NULL);
2606 #else
2607 MarkReservedGCThings(&rt->threadData.traceMonitor);
2608 #endif
2611 #endif
2614 void
2615 js_TriggerGC(JSContext *cx, JSBool gcLocked)
2617 JSRuntime *rt = cx->runtime;
2619 #ifdef JS_THREADSAFE
2620 JS_ASSERT(cx->requestDepth > 0);
2621 #endif
2622 JS_ASSERT(!rt->gcRunning);
2623 if (rt->gcIsNeeded)
2624 return;
2626 js_TriggerAllOperationCallbacks(rt, gcLocked);
2629 static void
2630 ProcessSetSlotRequest(JSContext *cx, JSSetSlotRequest *ssr)
2632 JSObject *obj = ssr->obj;
2633 JSObject *pobj = ssr->pobj;
2634 uint32 slot = ssr->slot;
2636 while (pobj) {
2637 pobj = js_GetWrappedObject(cx, pobj);
2638 if (pobj == obj) {
2639 ssr->cycle = true;
2640 return;
2642 pobj = JSVAL_TO_OBJECT(STOBJ_GET_SLOT(pobj, slot));
2645 pobj = ssr->pobj;
2646 if (slot == JSSLOT_PROTO) {
2647 obj->setProto(pobj);
2648 } else {
2649 JS_ASSERT(slot == JSSLOT_PARENT);
2650 obj->setParent(pobj);
2654 void
2655 js_DestroyScriptsToGC(JSContext *cx, JSThreadData *data)
2657 JSScript **listp, *script;
2659 for (size_t i = 0; i != JS_ARRAY_LENGTH(data->scriptsToGC); ++i) {
2660 listp = &data->scriptsToGC[i];
2661 while ((script = *listp) != NULL) {
2662 *listp = script->u.nextToGC;
2663 script->u.nextToGC = NULL;
2664 js_DestroyScript(cx, script);
2669 static inline void
2670 FinalizeGCThing(JSContext *cx, JSObject *obj, unsigned thingKind)
2672 JS_ASSERT(thingKind == FINALIZE_FUNCTION || thingKind == FINALIZE_OBJECT);
2674 /* Cope with stillborn objects that have no map. */
2675 if (!obj->map)
2676 return;
2678 if (JS_UNLIKELY(cx->debugHooks->objectHook != NULL)) {
2679 cx->debugHooks->objectHook(cx, obj, JS_FALSE,
2680 cx->debugHooks->objectHookData);
2683 /* Finalize obj first, in case it needs map and slots. */
2684 JSClass *clasp = STOBJ_GET_CLASS(obj);
2685 if (clasp->finalize)
2686 clasp->finalize(cx, obj);
2688 #ifdef INCLUDE_MOZILLA_DTRACE
2689 if (JAVASCRIPT_OBJECT_FINALIZE_ENABLED())
2690 jsdtrace_object_finalize(obj);
2691 #endif
2693 if (JS_LIKELY(OBJ_IS_NATIVE(obj)))
2694 OBJ_SCOPE(obj)->drop(cx, obj);
2695 js_FreeSlots(cx, obj);
2698 static inline void
2699 FinalizeGCThing(JSContext *cx, JSFunction *fun, unsigned thingKind)
2701 JS_ASSERT(thingKind == FINALIZE_FUNCTION);
2702 FinalizeGCThing(cx, FUN_OBJECT(fun), thingKind);
2705 #if JS_HAS_XML_SUPPORT
2706 static inline void
2707 FinalizeGCThing(JSContext *cx, JSXML *xml, unsigned thingKind)
2709 js_FinalizeXML(cx, xml);
2711 #endif
2713 JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
2714 static JSStringFinalizeOp str_finalizers[JS_EXTERNAL_STRING_LIMIT] = {
2715 NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL
2718 intN
2719 js_ChangeExternalStringFinalizer(JSStringFinalizeOp oldop,
2720 JSStringFinalizeOp newop)
2722 for (uintN i = 0; i != JS_ARRAY_LENGTH(str_finalizers); i++) {
2723 if (str_finalizers[i] == oldop) {
2724 str_finalizers[i] = newop;
2725 return intN(i);
2728 return -1;
2732 * cx is NULL when we are called from js_FinishAtomState to force the
2733 * finalization of the permanently interned strings.
2735 static void
2736 FinalizeString(JSRuntime *rt, JSString *str, unsigned thingKind, JSContext *cx)
2738 jschar *chars;
2739 JSBool valid;
2740 JSStringFinalizeOp finalizer;
2742 JS_RUNTIME_UNMETER(rt, liveStrings);
2743 JS_ASSERT(!JSString::isStatic(str));
2744 JS_ASSERT(IsFinalizableStringKind(thingKind));
2745 if (str->isDependent()) {
2746 /* A dependent string can not be external and must be valid. */
2747 JS_ASSERT(thingKind == FINALIZE_STRING);
2748 JS_ASSERT(str->dependentBase());
2749 JS_RUNTIME_UNMETER(rt, liveDependentStrings);
2750 valid = JS_TRUE;
2751 } else {
2752 /* A stillborn string has null chars, so is not valid. */
2753 chars = str->flatChars();
2754 valid = (chars != NULL);
2755 if (valid) {
2756 if (thingKind == FINALIZE_STRING) {
2757 if (cx)
2758 cx->free(chars);
2759 else
2760 rt->free(chars);
2761 } else {
2762 unsigned type = thingKind - FINALIZE_EXTERNAL_STRING0;
2763 JS_ASSERT(type < JS_ARRAY_LENGTH(str_finalizers));
2764 finalizer = str_finalizers[type];
2765 if (finalizer) {
2767 * Assume that the finalizer for the permanently interned
2768 * string knows how to deal with null context.
2770 finalizer(cx, str);
2775 if (valid && str->isDeflated())
2776 js_PurgeDeflatedStringCache(rt, str);
2779 static inline void
2780 FinalizeGCThing(JSContext *cx, JSString *str, unsigned thingKind)
2782 return FinalizeString(cx->runtime, str, thingKind, cx);
2785 void
2786 js_FinalizeStringRT(JSRuntime *rt, JSString *str)
2788 JS_ASSERT(!JSString::isStatic(str));
2790 unsigned thingKind = THING_TO_ARENA(str)->list->thingKind;
2791 FinalizeString(rt, str, thingKind, NULL);
2794 template<typename T>
2795 static void
2796 FinalizeArenaList(JSContext *cx, unsigned thingKind,
2797 JSGCArenaInfo **emptyArenas)
2799 JSGCArenaList *arenaList = &cx->runtime->gcArenaList[thingKind];
2800 JS_ASSERT(sizeof(T) == arenaList->thingSize);
2802 JSGCArenaInfo **ap = &arenaList->head;
2803 JSGCArenaInfo *a = *ap;
2804 if (!a)
2805 return;
2807 #ifdef JS_GCMETER
2808 uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0;
2809 #endif
2810 for (;;) {
2811 JS_ASSERT(a->list == arenaList);
2812 JS_ASSERT(a->prevUntracedPage == 0);
2813 JS_ASSERT(a->finalizable.untracedThings == 0);
2815 JSGCThing *freeList = NULL;
2816 JSGCThing **tailp = &freeList;
2817 bool allClear = true;
2818 uint8 *flagp = THING_FLAGP(a, 0);
2819 JSGCThing *thing =
2820 reinterpret_cast<JSGCThing *>(ARENA_INFO_TO_START(a));
2821 JSGCThing *thingsEnd =
2822 reinterpret_cast<JSGCThing *>(ARENA_INFO_TO_START(a) +
2823 THINGS_PER_ARENA(sizeof(T)) *
2824 sizeof(T));
2825 JSGCThing* nextFree = a->finalizable.freeList
2826 ? a->finalizable.freeList
2827 : thingsEnd;
2828 for (;; thing = NextThing(thing, sizeof(T)), --flagp) {
2829 if (thing == nextFree) {
2830 if (thing == thingsEnd)
2831 break;
2832 JS_ASSERT(!*flagp);
2833 nextFree = nextFree->link;
2834 if (!nextFree) {
2835 nextFree = thingsEnd;
2836 } else {
2837 JS_ASSERT(thing < nextFree);
2838 JS_ASSERT(nextFree < thingsEnd);
2840 } else {
2841 JS_ASSERT(!(*flagp & ~(GCF_MARK | GCF_LOCK)));
2842 if (*flagp) {
2843 *flagp &= ~GCF_MARK;
2844 allClear = false;
2845 METER(nthings++);
2846 continue;
2850 * Call the finalizer with cleared flags so
2851 * js_IsAboutToBeFinalized will be false.
2853 *flagp = 0;
2854 FinalizeGCThing(cx, reinterpret_cast<T *>(thing), thingKind);
2855 #ifdef DEBUG
2856 memset(thing, JS_FREE_PATTERN, sizeof(T));
2857 #endif
2859 *tailp = thing;
2860 tailp = &thing->link;
2863 #ifdef DEBUG
2864 /* Check that the free list is consistent. */
2865 unsigned nfree = 0;
2866 if (freeList) {
2867 JS_ASSERT(tailp != &freeList);
2868 JSGCThing *thing = freeList;
2869 for (;;) {
2870 ++nfree;
2871 if (&thing->link == tailp)
2872 break;
2873 JS_ASSERT(thing < thing->link);
2874 thing = thing->link;
2877 #endif
2878 if (allClear) {
2880 * Forget just assembled free list head for the arena and
2881 * add the arena itself to the destroy list.
2883 JS_ASSERT(nfree == THINGS_PER_ARENA(sizeof(T)));
2884 *ap = a->prev;
2885 a->prev = *emptyArenas;
2886 *emptyArenas = a;
2887 METER(nkilledarenas++);
2888 } else {
2889 JS_ASSERT(nfree < THINGS_PER_ARENA(sizeof(T)));
2890 *tailp = NULL;
2891 a->finalizable.freeList = freeList;
2892 ap = &a->prev;
2893 METER(nlivearenas++);
2895 if (!(a = *ap))
2896 break;
2898 arenaList->cursor = arenaList->head;
2900 METER(UpdateArenaStats(&rt->gcStats.arenaStats[thingKind],
2901 nlivearenas, nkilledarenas, nthings));
2905 * The gckind flag bit GC_LOCK_HELD indicates a call from js_NewGCThing with
2906 * rt->gcLock already held, so the lock should be kept on return.
2908 void
2909 js_GC(JSContext *cx, JSGCInvocationKind gckind)
2911 JSRuntime *rt;
2912 JSBool keepAtoms;
2913 JSGCCallback callback;
2914 JSTracer trc;
2915 JSGCArenaInfo *emptyArenas, *a, **ap;
2916 #ifdef JS_THREADSAFE
2917 uint32 requestDebit;
2918 #endif
2919 #ifdef JS_GCMETER
2920 uint32 nlivearenas, nkilledarenas, nthings;
2921 #endif
2923 JS_ASSERT_IF(gckind == GC_LAST_DITCH, !JS_ON_TRACE(cx));
2924 rt = cx->runtime;
2926 #ifdef JS_THREADSAFE
2928 * We allow js_GC calls outside a request but the context must be bound
2929 * to the current thread.
2931 JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
2933 /* Avoid deadlock. */
2934 JS_ASSERT(!JS_IS_RUNTIME_LOCKED(rt));
2935 #endif
2937 if (gckind & GC_KEEP_ATOMS) {
2939 * The set slot request and last ditch GC kinds preserve all atoms and
2940 * weak roots.
2942 keepAtoms = JS_TRUE;
2943 } else {
2944 /* Keep atoms when a suspended compile is running on another context. */
2945 keepAtoms = (rt->gcKeepAtoms != 0);
2946 JS_CLEAR_WEAK_ROOTS(&cx->weakRoots);
2950 * Don't collect garbage if the runtime isn't up, and cx is not the last
2951 * context in the runtime. The last context must force a GC, and nothing
2952 * should suppress that final collection or there may be shutdown leaks,
2953 * or runtime bloat until the next context is created.
2955 if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
2956 return;
2958 restart_at_beginning:
2960 * Let the API user decide to defer a GC if it wants to (unless this
2961 * is the last context). Invoke the callback regardless. Sample the
2962 * callback in case we are freely racing with a JS_SetGCCallback{,RT} on
2963 * another thread.
2965 if (gckind != GC_SET_SLOT_REQUEST && (callback = rt->gcCallback)) {
2966 JSBool ok;
2968 if (gckind & GC_LOCK_HELD)
2969 JS_UNLOCK_GC(rt);
2970 ok = callback(cx, JSGC_BEGIN);
2971 if (gckind & GC_LOCK_HELD)
2972 JS_LOCK_GC(rt);
2973 if (!ok && gckind != GC_LAST_CONTEXT) {
2975 * It's possible that we've looped back to this code from the 'goto
2976 * restart_at_beginning' below in the GC_SET_SLOT_REQUEST code and
2977 * that rt->gcLevel is now 0. Don't return without notifying!
2979 if (rt->gcLevel == 0 && (gckind & GC_LOCK_HELD))
2980 JS_NOTIFY_GC_DONE(rt);
2981 return;
2985 /* Lock out other GC allocator and collector invocations. */
2986 if (!(gckind & GC_LOCK_HELD))
2987 JS_LOCK_GC(rt);
2989 METER(rt->gcStats.poke++);
2990 rt->gcPoke = JS_FALSE;
2992 #ifdef JS_THREADSAFE
2994 * Check if the GC is already running on this or another thread and
2995 * delegate the job to it.
2997 if (rt->gcLevel > 0) {
2998 JS_ASSERT(rt->gcThread);
3000 /* Bump gcLevel to restart the current GC, so it finds new garbage. */
3001 rt->gcLevel++;
3002 METER_UPDATE_MAX(rt->gcStats.maxlevel, rt->gcLevel);
3005 * If the GC runs on another thread, temporarily suspend the current
3006 * request and wait until the GC is done.
3008 if (rt->gcThread != cx->thread) {
3009 requestDebit = js_DiscountRequestsForGC(cx);
3010 js_RecountRequestsAfterGC(rt, requestDebit);
3012 if (!(gckind & GC_LOCK_HELD))
3013 JS_UNLOCK_GC(rt);
3014 return;
3017 /* No other thread is in GC, so indicate that we're now in GC. */
3018 rt->gcLevel = 1;
3019 rt->gcThread = cx->thread;
3022 * Notify all operation callbacks, which will give them a chance to
3023 * yield their current request. Contexts that are not currently
3024 * executing will perform their callback at some later point,
3025 * which then will be unnecessary, but harmless.
3027 js_NudgeOtherContexts(cx);
3030 * Discount all the requests on the current thread from contributing
3031 * to rt->requestCount before we wait for all other requests to finish.
3032 * JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
3033 * rt->requestCount transitions to 0.
3035 requestDebit = js_CountThreadRequests(cx);
3036 JS_ASSERT_IF(cx->requestDepth != 0, requestDebit >= 1);
3037 rt->requestCount -= requestDebit;
3038 while (rt->requestCount > 0)
3039 JS_AWAIT_REQUEST_DONE(rt);
3040 rt->requestCount += requestDebit;
3042 #else /* !JS_THREADSAFE */
3044 /* Bump gcLevel and return rather than nest; the outer gc will restart. */
3045 rt->gcLevel++;
3046 METER_UPDATE_MAX(rt->gcStats.maxlevel, rt->gcLevel);
3047 if (rt->gcLevel > 1)
3048 return;
3050 #endif /* !JS_THREADSAFE */
3053 * Set rt->gcRunning here within the GC lock, and after waiting for any
3054 * active requests to end, so that new requests that try to JS_AddRoot,
3055 * JS_RemoveRoot, or JS_RemoveRootRT block in JS_BeginRequest waiting for
3056 * rt->gcLevel to drop to zero, while request-less calls to the *Root*
3057 * APIs block in js_AddRoot or js_RemoveRoot (see above in this file),
3058 * waiting for GC to finish.
3060 rt->gcRunning = JS_TRUE;
3062 if (gckind == GC_SET_SLOT_REQUEST) {
3063 JSSetSlotRequest *ssr;
3065 while ((ssr = rt->setSlotRequests) != NULL) {
3066 rt->setSlotRequests = ssr->next;
3067 JS_UNLOCK_GC(rt);
3068 ssr->next = NULL;
3069 ProcessSetSlotRequest(cx, ssr);
3070 JS_LOCK_GC(rt);
3074 * We assume here that killing links to parent and prototype objects
3075 * does not create garbage (such objects typically are long-lived and
3076 * widely shared, e.g. global objects, Function.prototype, etc.). We
3077 * collect garbage only if a racing thread attempted GC and is waiting
3078 * for us to finish (gcLevel > 1) or if someone already poked us.
3080 if (rt->gcLevel == 1 && !rt->gcPoke && !rt->gcIsNeeded)
3081 goto done_running;
3083 rt->gcLevel = 0;
3084 rt->gcPoke = JS_FALSE;
3085 rt->gcRunning = JS_FALSE;
3086 #ifdef JS_THREADSAFE
3087 rt->gcThread = NULL;
3088 #endif
3089 gckind = GC_LOCK_HELD;
3090 goto restart_at_beginning;
3093 JS_UNLOCK_GC(rt);
3095 #ifdef JS_TRACER
3096 if (JS_ON_TRACE(cx))
3097 goto out;
3098 #endif
3099 VOUCH_HAVE_STACK();
3101 /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
3102 rt->gcIsNeeded = JS_FALSE;
3104 /* Reset malloc counter. */
3105 rt->resetGCMallocBytes();
3107 #ifdef JS_DUMP_SCOPE_METERS
3108 { extern void js_DumpScopeMeters(JSRuntime *rt);
3109 js_DumpScopeMeters(rt);
3111 #endif
3113 #ifdef JS_TRACER
3114 js_PurgeJITOracle();
3115 #endif
3117 restart:
3118 rt->gcNumber++;
3119 JS_ASSERT(!rt->gcUntracedArenaStackTop);
3120 JS_ASSERT(rt->gcTraceLaterCount == 0);
3123 * Reset the property cache's type id generator so we can compress ids.
3124 * Same for the protoHazardShape proxy-shape standing in for all object
3125 * prototypes having readonly or setter properties.
3127 if (rt->shapeGen & SHAPE_OVERFLOW_BIT
3128 #ifdef JS_GC_ZEAL
3129 || rt->gcZeal >= 1
3130 #endif
3132 rt->gcRegenShapes = true;
3133 rt->gcRegenShapesScopeFlag ^= JSScope::SHAPE_REGEN;
3134 rt->shapeGen = 0;
3135 rt->protoHazardShape = 0;
3138 js_PurgeThreads(cx);
3139 #ifdef JS_TRACER
3140 if (gckind == GC_LAST_CONTEXT) {
3141 /* Clear builtin functions, which are recreated on demand. */
3142 memset(rt->builtinFunctions, 0, sizeof rt->builtinFunctions);
3144 #endif
3147 * Mark phase.
3149 JS_TRACER_INIT(&trc, cx, NULL);
3150 rt->gcMarkingTracer = &trc;
3151 JS_ASSERT(IS_GC_MARKING_TRACER(&trc));
3153 #ifdef DEBUG
3154 for (a = rt->gcDoubleArenaList.head; a; a = a->prev)
3155 JS_ASSERT(!a->hasMarkedDoubles);
3156 #endif
3158 js_TraceRuntime(&trc, keepAtoms);
3159 js_MarkScriptFilenames(rt, keepAtoms);
3162 * Mark children of things that caused too deep recursion during the above
3163 * tracing.
3165 TraceDelayedChildren(&trc);
3167 JS_ASSERT(!cx->insideGCMarkCallback);
3168 if (rt->gcCallback) {
3169 cx->insideGCMarkCallback = JS_TRUE;
3170 (void) rt->gcCallback(cx, JSGC_MARK_END);
3171 JS_ASSERT(cx->insideGCMarkCallback);
3172 cx->insideGCMarkCallback = JS_FALSE;
3174 JS_ASSERT(rt->gcTraceLaterCount == 0);
3176 rt->gcMarkingTracer = NULL;
3178 #ifdef JS_THREADSAFE
3179 cx->createDeallocatorTask();
3180 #endif
3183 * Sweep phase.
3185 * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
3186 * so that any attempt to allocate a GC-thing from a finalizer will fail,
3187 * rather than nest badly and leave the unmarked newborn to be swept.
3189 * We first sweep atom state so we can use js_IsAboutToBeFinalized on
3190 * JSString or jsdouble held in a hashtable to check if the hashtable
3191 * entry can be freed. Note that even after the entry is freed, JSObject
3192 * finalizers can continue to access the corresponding jsdouble* and
3193 * JSString* assuming that they are unique. This works since the
3194 * atomization API must not be called during GC.
3196 js_SweepAtomState(cx);
3198 /* Finalize iterator states before the objects they iterate over. */
3199 CloseNativeIterators(cx);
3201 /* Finalize watch points associated with unreachable objects. */
3202 js_SweepWatchPoints(cx);
3204 #ifdef DEBUG
3205 /* Save the pre-sweep count of scope-mapped properties. */
3206 rt->liveScopePropsPreSweep = rt->liveScopeProps;
3207 #endif
3210 * We finalize JSObject instances before JSString, double and other GC
3211 * things to ensure that object's finalizer can access them even if they
3212 * will be freed.
3214 * To minimize the number of checks per each to be freed object and
3215 * function we use separated list finalizers when a debug hook is
3216 * installed.
3218 emptyArenas = NULL;
3219 FinalizeArenaList<JSObject>(cx, FINALIZE_OBJECT, &emptyArenas);
3220 FinalizeArenaList<JSFunction>(cx, FINALIZE_FUNCTION, &emptyArenas);
3221 #if JS_HAS_XML_SUPPORT
3222 FinalizeArenaList<JSXML>(cx, FINALIZE_XML, &emptyArenas);
3223 #endif
3224 FinalizeArenaList<JSString>(cx, FINALIZE_STRING, &emptyArenas);
3225 for (unsigned i = FINALIZE_EXTERNAL_STRING0;
3226 i <= FINALIZE_EXTERNAL_STRING_LAST;
3227 ++i) {
3228 FinalizeArenaList<JSString>(cx, i, &emptyArenas);
3231 ap = &rt->gcDoubleArenaList.head;
3232 METER((nlivearenas = 0, nkilledarenas = 0, nthings = 0));
3233 while ((a = *ap) != NULL) {
3234 if (!a->hasMarkedDoubles) {
3235 /* No marked double values in the arena. */
3236 *ap = a->prev;
3237 a->prev = emptyArenas;
3238 emptyArenas = a;
3239 METER(nkilledarenas++);
3240 } else {
3241 a->hasMarkedDoubles = false;
3242 ap = &a->prev;
3243 #ifdef JS_GCMETER
3244 for (size_t i = 0; i != DOUBLES_PER_ARENA; ++i) {
3245 if (IsMarkedDouble(a, index))
3246 METER(nthings++);
3248 METER(nlivearenas++);
3249 #endif
3252 METER(UpdateArenaStats(&rt->gcStats.doubleArenaStats,
3253 nlivearenas, nkilledarenas, nthings));
3254 rt->gcDoubleArenaList.cursor = rt->gcDoubleArenaList.head;
3257 * Sweep the runtime's property tree after finalizing objects, in case any
3258 * had watchpoints referencing tree nodes.
3260 js_SweepScopeProperties(cx);
3263 * Sweep script filenames after sweeping functions in the generic loop
3264 * above. In this way when a scripted function's finalizer destroys the
3265 * script and calls rt->destroyScriptHook, the hook can still access the
3266 * script's filename. See bug 323267.
3268 js_SweepScriptFilenames(rt);
3271 * Destroy arenas after we finished the sweeping so finalizers can safely
3272 * use js_IsAboutToBeFinalized().
3274 DestroyGCArenas(rt, emptyArenas);
3276 #ifdef JS_THREADSAFE
3277 cx->submitDeallocatorTask();
3278 #endif
3280 if (rt->gcCallback)
3281 (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
3282 #ifdef DEBUG_srcnotesize
3283 { extern void DumpSrcNoteSizeHist();
3284 DumpSrcNoteSizeHist();
3285 printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes);
3287 #endif
3289 #ifdef JS_SCOPE_DEPTH_METER
3290 { static FILE *fp;
3291 if (!fp)
3292 fp = fopen("/tmp/scopedepth.stats", "w");
3294 if (fp) {
3295 JS_DumpBasicStats(&rt->protoLookupDepthStats, "proto-lookup depth", fp);
3296 JS_DumpBasicStats(&rt->scopeSearchDepthStats, "scope-search depth", fp);
3297 JS_DumpBasicStats(&rt->hostenvScopeDepthStats, "hostenv scope depth", fp);
3298 JS_DumpBasicStats(&rt->lexicalScopeDepthStats, "lexical scope depth", fp);
3300 putc('\n', fp);
3301 fflush(fp);
3304 #endif /* JS_SCOPE_DEPTH_METER */
3306 #ifdef JS_DUMP_LOOP_STATS
3307 { static FILE *lsfp;
3308 if (!lsfp)
3309 lsfp = fopen("/tmp/loopstats", "w");
3310 if (lsfp) {
3311 JS_DumpBasicStats(&rt->loopStats, "loops", lsfp);
3312 fflush(lsfp);
3315 #endif /* JS_DUMP_LOOP_STATS */
3317 #ifdef JS_TRACER
3318 out:
3319 #endif
3320 JS_LOCK_GC(rt);
3323 * We want to restart GC if js_GC was called recursively or if any of the
3324 * finalizers called js_RemoveRoot or js_UnlockGCThingRT.
3326 if (!JS_ON_TRACE(cx) && (rt->gcLevel > 1 || rt->gcPoke)) {
3327 VOUCH_HAVE_STACK();
3328 rt->gcLevel = 1;
3329 rt->gcPoke = JS_FALSE;
3330 JS_UNLOCK_GC(rt);
3331 goto restart;
3334 rt->setGCLastBytes(rt->gcBytes);
3335 done_running:
3336 rt->gcLevel = 0;
3337 rt->gcRunning = rt->gcRegenShapes = false;
3339 #ifdef JS_THREADSAFE
3340 rt->gcThread = NULL;
3341 JS_NOTIFY_GC_DONE(rt);
3344 * Unlock unless we have GC_LOCK_HELD which requires locked GC on return.
3346 if (!(gckind & GC_LOCK_HELD))
3347 JS_UNLOCK_GC(rt);
3348 #endif
3351 * Execute JSGC_END callback outside the lock. Again, sample the callback
3352 * pointer in case it changes, since we are outside of the GC vs. requests
3353 * interlock mechanism here.
3355 if (gckind != GC_SET_SLOT_REQUEST && (callback = rt->gcCallback)) {
3356 JSWeakRoots savedWeakRoots;
3357 JSTempValueRooter tvr;
3359 if (gckind & GC_KEEP_ATOMS) {
3361 * We allow JSGC_END implementation to force a full GC or allocate
3362 * new GC things. Thus we must protect the weak roots from garbage
3363 * collection and overwrites.
3365 savedWeakRoots = cx->weakRoots;
3366 JS_PUSH_TEMP_ROOT_WEAK_COPY(cx, &savedWeakRoots, &tvr);
3367 JS_KEEP_ATOMS(rt);
3368 JS_UNLOCK_GC(rt);
3371 (void) callback(cx, JSGC_END);
3373 if (gckind & GC_KEEP_ATOMS) {
3374 JS_LOCK_GC(rt);
3375 JS_UNKEEP_ATOMS(rt);
3376 JS_POP_TEMP_ROOT(cx, &tvr);
3377 } else if (gckind == GC_LAST_CONTEXT && rt->gcPoke) {
3379 * On shutdown iterate until JSGC_END callback stops creating
3380 * garbage.
3382 goto restart_at_beginning;