Merge mozilla-central and tracemonkey. (a=blockers)
[mozilla-central.git] / js / src / jsgc.cpp
blob9ef631349d794d755babd3ddf9698fe9cd77f687
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS Mark-and-Sweep Garbage Collector.
44 * This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
45 * jsgc.h). It allocates from a special GC arena pool with each arena allocated
46 * using malloc. It uses an ideally parallel array of flag bytes to hold the
47 * mark bit, finalizer type index, etc.
49 * XXX swizzle page to freelist for better locality of reference
51 #include <stdlib.h> /* for free */
52 #include <math.h>
53 #include <string.h> /* for memset used when DEBUG */
54 #include "jstypes.h"
55 #include "jsstdint.h"
56 #include "jsutil.h"
57 #include "jshash.h"
58 #include "jsbit.h"
59 #include "jsclist.h"
60 #include "jsprf.h"
61 #include "jsapi.h"
62 #include "jsatom.h"
63 #include "jscntxt.h"
64 #include "jsversion.h"
65 #include "jsdbgapi.h"
66 #include "jsexn.h"
67 #include "jsfun.h"
68 #include "jsgc.h"
69 #include "jsgcchunk.h"
70 #include "jsinterp.h"
71 #include "jsiter.h"
72 #include "jslock.h"
73 #include "jsnum.h"
74 #include "jsobj.h"
75 #include "jsparse.h"
76 #include "jsproxy.h"
77 #include "jsscope.h"
78 #include "jsscript.h"
79 #include "jsstaticcheck.h"
80 #include "jsstr.h"
81 #include "jstracer.h"
82 #include "methodjit/MethodJIT.h"
84 #if JS_HAS_XML_SUPPORT
85 #include "jsxml.h"
86 #endif
88 #include "jsprobes.h"
89 #include "jscntxtinlines.h"
90 #include "jsinterpinlines.h"
91 #include "jsobjinlines.h"
92 #include "jshashtable.h"
94 #include "jsstrinlines.h"
95 #include "jscompartment.h"
97 #ifdef MOZ_VALGRIND
98 # define JS_VALGRIND
99 #endif
100 #ifdef JS_VALGRIND
101 # include <valgrind/memcheck.h>
102 #endif
104 using namespace js;
105 using namespace js::gc;
108 * Check that JSTRACE_XML follows JSTRACE_OBJECT and JSTRACE_STRING.
110 JS_STATIC_ASSERT(JSTRACE_OBJECT == 0);
111 JS_STATIC_ASSERT(JSTRACE_STRING == 1);
112 JS_STATIC_ASSERT(JSTRACE_XML == 2);
115 * JS_IS_VALID_TRACE_KIND assumes that JSTRACE_STRING is the last non-xml
116 * trace kind when JS_HAS_XML_SUPPORT is false.
118 JS_STATIC_ASSERT(JSTRACE_STRING + 1 == JSTRACE_XML);
121 * Everything we store in the heap must be a multiple of the cell size.
123 JS_STATIC_ASSERT(sizeof(JSString) % sizeof(FreeCell) == 0);
124 JS_STATIC_ASSERT(sizeof(JSShortString) % sizeof(FreeCell) == 0);
125 JS_STATIC_ASSERT(sizeof(JSObject) % sizeof(FreeCell) == 0);
126 JS_STATIC_ASSERT(sizeof(JSFunction) % sizeof(FreeCell) == 0);
127 #ifdef JSXML
128 JS_STATIC_ASSERT(sizeof(JSXML) % sizeof(FreeCell) == 0);
129 #endif
132 * All arenas must be exactly 4k.
134 JS_STATIC_ASSERT(sizeof(Arena<JSString>) == 4096);
135 JS_STATIC_ASSERT(sizeof(Arena<JSExternalString>) == 4096);
136 JS_STATIC_ASSERT(sizeof(Arena<JSShortString>) == 4096);
137 JS_STATIC_ASSERT(sizeof(Arena<JSObject>) == 4096);
138 JS_STATIC_ASSERT(sizeof(Arena<JSFunction>) == 4096);
139 JS_STATIC_ASSERT(sizeof(Arena<JSXML>) == 4096);
141 #ifdef JS_GCMETER
142 # define METER(x) ((void) (x))
143 # define METER_IF(condition, x) ((void) ((condition) && (x)))
144 #else
145 # define METER(x) ((void) 0)
146 # define METER_IF(condition, x) ((void) 0)
147 #endif
149 # define METER_UPDATE_MAX(maxLval, rval) \
150 METER_IF((maxLval) < (rval), (maxLval) = (rval))
152 namespace js{
153 namespace gc{
155 /* This array should be const, but that doesn't link right under GCC. */
156 FinalizeKind slotsToThingKind[] = {
157 /* 0 */ FINALIZE_OBJECT0, FINALIZE_OBJECT2, FINALIZE_OBJECT2, FINALIZE_OBJECT4,
158 /* 4 */ FINALIZE_OBJECT4, FINALIZE_OBJECT8, FINALIZE_OBJECT8, FINALIZE_OBJECT8,
159 /* 8 */ FINALIZE_OBJECT8, FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
160 /* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
161 /* 16 */ FINALIZE_OBJECT16
164 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind) == SLOTS_TO_THING_KIND_LIMIT);
166 /* Initialize the arena and setup the free list. */
167 template <typename T>
168 void
169 Arena<T>::init(JSCompartment *compartment, unsigned thingKind)
171 aheader.compartment = compartment;
172 aheader.thingKind = thingKind;
173 aheader.freeList = &t.things[0].cell;
174 aheader.thingSize = sizeof(T);
175 aheader.isUsed = true;
176 JS_ASSERT(sizeof(T) == sizeof(ThingOrCell<T>));
177 ThingOrCell<T> *thing = &t.things[0];
178 ThingOrCell<T> *last = &t.things[JS_ARRAY_LENGTH(t.things) - 1];
179 while (thing < last) {
180 thing->cell.link = &(thing + 1)->cell;
181 ++thing;
183 last->cell.link = NULL;
184 #ifdef DEBUG
185 aheader.hasFreeThings = true;
186 #endif
189 template <typename T>
190 bool
191 Arena<T>::inFreeList(void *thing) const
193 FreeCell *cursor = aheader.freeList;
194 while (cursor) {
195 JS_ASSERT(aheader.thingSize == sizeof(T));
196 JS_ASSERT(!cursor->isMarked());
198 /* If the cursor moves past the thing, it's not in the freelist. */
199 if (thing < cursor)
200 break;
202 /* If we find it on the freelist, it's dead. */
203 if (thing == cursor)
204 return true;
205 JS_ASSERT_IF(cursor->link, cursor < cursor->link);
206 cursor = cursor->link;
208 return false;
211 template<typename T>
212 inline ConservativeGCTest
213 Arena<T>::mark(T *thing, JSTracer *trc)
215 JS_ASSERT(sizeof(T) == aheader.thingSize);
217 T *alignedThing = getAlignedThing(thing);
219 if (alignedThing > &t.things[ThingsPerArena-1].t || alignedThing < &t.things[0].t)
220 return CGCT_NOTARENA;
222 if (!aheader.isUsed || inFreeList(alignedThing))
223 return CGCT_NOTLIVE;
225 JS_SET_TRACING_NAME(trc, "machine stack");
226 Mark(trc, alignedThing);
228 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
229 if (alignedThing != thing)
230 return CGCT_VALIDWITHOFFSET;
231 #endif
232 return CGCT_VALID;
235 #ifdef DEBUG
236 bool
237 checkArenaListsForThing(JSCompartment *comp, void *thing) {
238 if (comp->arenas[FINALIZE_OBJECT0].arenasContainThing<JSObject>(thing) ||
239 comp->arenas[FINALIZE_OBJECT2].arenasContainThing<JSObject_Slots2>(thing) ||
240 comp->arenas[FINALIZE_OBJECT4].arenasContainThing<JSObject_Slots4>(thing) ||
241 comp->arenas[FINALIZE_OBJECT8].arenasContainThing<JSObject_Slots8>(thing) ||
242 comp->arenas[FINALIZE_OBJECT12].arenasContainThing<JSObject_Slots12>(thing) ||
243 comp->arenas[FINALIZE_OBJECT16].arenasContainThing<JSObject_Slots16>(thing) ||
244 comp->arenas[FINALIZE_FUNCTION].arenasContainThing<JSFunction>(thing) ||
245 #if JS_HAS_XML_SUPPORT
246 comp->arenas[FINALIZE_XML].arenasContainThing<JSXML>(thing) ||
247 #endif
248 comp->arenas[FINALIZE_STRING].arenasContainThing<JSString>(thing) ||
249 comp->arenas[FINALIZE_EXTERNAL_STRING].arenasContainThing<JSExternalString>(thing) ||
250 comp->arenas[FINALIZE_SHORT_STRING].arenasContainThing<JSShortString>(thing)) {
251 return true;
254 return false;
257 bool
258 checkArenaListAllUnmarked(JSCompartment *comp) {
259 for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
260 if (comp->arenas[i].markedThingsInArenaList())
261 return false;
263 return true;
265 #endif
267 } /* namespace gc */
268 } /* namespace js */
270 void
271 JSCompartment::finishArenaLists()
273 for (int i = 0; i < FINALIZE_LIMIT; i++)
274 arenas[i].releaseAll();
277 void
278 Chunk::clearMarkBitmap()
280 PodZero(&bitmaps[0], ArenasPerChunk);
283 void
284 Chunk::init(JSRuntime *rt)
286 info.runtime = rt;
287 info.age = 0;
288 info.emptyArenaLists.init();
289 info.emptyArenaLists.cellFreeList = &arenas[0];
290 Arena<FreeCell> *arena = &arenas[0];
291 Arena<FreeCell> *last = &arenas[JS_ARRAY_LENGTH(arenas) - 1];
292 while (arena < last) {
293 arena->header()->next = arena + 1;
294 arena->header()->isUsed = false;
295 ++arena;
297 last->header()->next = NULL;
298 last->header()->isUsed = false;
299 info.numFree = ArenasPerChunk;
302 bool
303 Chunk::unused()
305 return info.numFree == ArenasPerChunk;
308 bool
309 Chunk::hasAvailableArenas()
311 return info.numFree > 0;
314 bool
315 Chunk::withinArenasRange(Cell *cell)
317 uintptr_t addr = uintptr_t(cell);
318 if (addr >= uintptr_t(&arenas[0]) && addr < uintptr_t(&arenas[ArenasPerChunk]))
319 return true;
320 return false;
323 template <typename T>
324 Arena<T> *
325 Chunk::allocateArena(JSCompartment *comp, unsigned thingKind)
327 JS_ASSERT(hasAvailableArenas());
328 Arena<T> *arena = info.emptyArenaLists.getNext<T>(comp, thingKind);
329 JS_ASSERT(arena);
330 JS_ASSERT(arena->header()->isUsed);
331 --info.numFree;
333 JSRuntime *rt = info.runtime;
334 rt->gcBytes += sizeof(Arena<T>);
335 comp->gcBytes += sizeof(Arena<T>);
336 if (comp->gcBytes >= comp->gcTriggerBytes)
337 TriggerCompartmentGC(comp);
338 METER(rt->gcStats.nallarenas++);
339 return arena;
342 template <typename T>
343 void
344 Chunk::releaseArena(Arena<T> *arena)
346 JSRuntime *rt = info.runtime;
347 JSCompartment *comp = arena->header()->compartment;
348 METER(rt->gcStats.afree++);
349 JS_ASSERT(rt->gcStats.nallarenas != 0);
350 METER(rt->gcStats.nallarenas--);
351 JS_ASSERT(rt->gcBytes >= sizeof(Arena<T>));
352 JS_ASSERT(comp->gcBytes >= sizeof(Arena<T>));
354 rt->gcBytes -= sizeof(Arena<T>);
355 comp->gcBytes -= sizeof(Arena<T>);
356 info.emptyArenaLists.insert((Arena<Cell> *)arena);
357 arena->header()->isUsed = false;
358 ++info.numFree;
359 if (unused())
360 info.age = 0;
363 bool
364 Chunk::expire()
366 if (!unused())
367 return false;
368 return info.age++ > MaxAge;
371 JSRuntime *
372 Chunk::getRuntime()
374 return info.runtime;
377 inline jsuword
378 GetGCChunk(JSRuntime *rt)
380 void *p = rt->gcChunkAllocator->alloc();
381 #ifdef MOZ_GCTIMER
382 if (p)
383 JS_ATOMIC_INCREMENT(&newChunkCount);
384 #endif
385 METER_IF(p, rt->gcStats.nchunks++);
386 METER_UPDATE_MAX(rt->gcStats.maxnchunks, rt->gcStats.nchunks);
387 return reinterpret_cast<jsuword>(p);
390 inline void
391 ReleaseGCChunk(JSRuntime *rt, jsuword chunk)
393 void *p = reinterpret_cast<void *>(chunk);
394 JS_ASSERT(p);
395 #ifdef MOZ_GCTIMER
396 JS_ATOMIC_INCREMENT(&destroyChunkCount);
397 #endif
398 JS_ASSERT(rt->gcStats.nchunks != 0);
399 METER(rt->gcStats.nchunks--);
400 rt->gcChunkAllocator->free(p);
403 inline Chunk *
404 AllocateGCChunk(JSRuntime *rt)
406 Chunk *p = (Chunk *)rt->gcChunkAllocator->alloc();
407 #ifdef MOZ_GCTIMER
408 if (p)
409 JS_ATOMIC_INCREMENT(&newChunkCount);
410 #endif
411 METER_IF(p, rt->gcStats.nchunks++);
412 return p;
415 inline void
416 ReleaseGCChunk(JSRuntime *rt, Chunk *p)
418 JS_ASSERT(p);
419 #ifdef MOZ_GCTIMER
420 JS_ATOMIC_INCREMENT(&destroyChunkCount);
421 #endif
422 JS_ASSERT(rt->gcStats.nchunks != 0);
423 METER(rt->gcStats.nchunks--);
424 rt->gcChunkAllocator->free(p);
427 static Chunk *
428 PickChunk(JSRuntime *rt)
430 Chunk *chunk;
431 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
432 if (r.front()->hasAvailableArenas())
433 return r.front();
436 chunk = AllocateGCChunk(rt);
437 if (!chunk)
438 return NULL;
441 * FIXME bug 583732 - chunk is newly allocated and cannot be present in
442 * the table so using ordinary lookupForAdd is suboptimal here.
444 GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk);
445 JS_ASSERT(!p);
446 if (!rt->gcChunkSet.add(p, chunk)) {
447 ReleaseGCChunk(rt, chunk);
448 return NULL;
451 chunk->init(rt);
453 return chunk;
456 static void
457 ExpireGCChunks(JSRuntime *rt)
459 /* Remove unused chunks. */
460 AutoLockGC lock(rt);
462 for (GCChunkSet::Enum e(rt->gcChunkSet); !e.empty(); e.popFront()) {
463 Chunk *chunk = e.front();
464 JS_ASSERT(chunk->info.runtime == rt);
465 if (chunk->expire()) {
466 e.removeFront();
467 ReleaseGCChunk(rt, chunk);
468 continue;
473 template <typename T>
474 static Arena<T> *
475 AllocateArena(JSContext *cx, unsigned thingKind)
477 JSRuntime *rt = cx->runtime;
478 AutoLockGC lock(rt);
479 Chunk *chunk = cx->compartment->chunk;
480 if (!chunk || !chunk->hasAvailableArenas()) {
481 chunk = PickChunk(rt);
482 if (!chunk) {
483 TriggerGC(rt);
484 return NULL;
486 cx->compartment->chunk = chunk;
488 return chunk->allocateArena<T>(cx->compartment, thingKind);
491 JS_FRIEND_API(bool)
492 IsAboutToBeFinalized(JSContext *cx, void *thing)
494 if (JSString::isStatic(thing))
495 return false;
496 JS_ASSERT(cx);
498 JSCompartment *thingCompartment = reinterpret_cast<Cell *>(thing)->compartment();
499 JSRuntime *rt = cx->runtime;
500 JS_ASSERT(rt == thingCompartment->rt);
501 if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
502 return false;
504 return !reinterpret_cast<Cell *>(thing)->isMarked();
507 JS_FRIEND_API(bool)
508 js_GCThingIsMarked(void *thing, uintN color = BLACK)
510 JS_ASSERT(thing);
511 AssertValidColor(thing, color);
512 return reinterpret_cast<Cell *>(thing)->isMarked(color);
516 * 1/8 life for JIT code. After this number of microseconds have passed, 1/8 of all
517 * JIT code is discarded in inactive compartments, regardless of how often that
518 * code runs.
520 static const int64 JIT_SCRIPT_EIGHTH_LIFETIME = 120 * 1000 * 1000;
522 JSBool
523 js_InitGC(JSRuntime *rt, uint32 maxbytes)
526 * Make room for at least 16 chunks so the table would not grow before
527 * the browser starts up.
529 if (!rt->gcChunkSet.init(16))
530 return false;
532 if (!rt->gcRootsHash.init(256))
533 return false;
535 if (!rt->gcLocksHash.init(256))
536 return false;
538 #ifdef JS_THREADSAFE
539 rt->gcLock = JS_NEW_LOCK();
540 if (!rt->gcLock)
541 return false;
542 rt->gcDone = JS_NEW_CONDVAR(rt->gcLock);
543 if (!rt->gcDone)
544 return false;
545 rt->requestDone = JS_NEW_CONDVAR(rt->gcLock);
546 if (!rt->requestDone)
547 return false;
548 if (!rt->gcHelperThread.init(rt))
549 return false;
550 #endif
553 * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
554 * for default backward API compatibility.
556 rt->gcMaxBytes = maxbytes;
557 rt->setGCMaxMallocBytes(maxbytes);
559 rt->gcEmptyArenaPoolLifespan = 30000;
561 rt->gcTriggerFactor = uint32(100.0f * GC_HEAP_GROWTH_FACTOR);
563 rt->atomsCompartment->setGCLastBytes(8192);
566 * The assigned value prevents GC from running when GC memory is too low
567 * (during JS engine start).
569 rt->setGCLastBytes(8192);
571 rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_EIGHTH_LIFETIME;
573 METER(PodZero(&rt->gcStats));
574 return true;
577 namespace js {
579 template <typename T>
580 static inline ConservativeGCTest
581 MarkCell(Cell *cell, JSTracer *trc)
583 return GetArena<T>(cell)->mark((T *)cell, trc);
587 * Returns CGCT_VALID or CGCT_VALIDWITHOFFSET and mark it if the w can be a
588 * live GC thing and sets thingKind accordingly. Otherwise returns the
589 * reason for rejection.
591 inline ConservativeGCTest
592 MarkIfGCThingWord(JSTracer *trc, jsuword w, uint32 &thingKind)
594 JSRuntime *rt = trc->context->runtime;
596 * The conservative scanner may access words that valgrind considers as
597 * undefined. To avoid false positives and not to alter valgrind view of
598 * the memory we make as memcheck-defined the argument, a copy of the
599 * original word. See bug 572678.
601 #ifdef JS_VALGRIND
602 VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
603 #endif
606 * We assume that the compiler never uses sub-word alignment to store
607 * pointers and does not tag pointers on its own. Additionally, the value
608 * representation for all values and the jsid representation for GC-things
609 * do not touch the low two bits. Thus any word with the low two bits set
610 * is not a valid GC-thing.
612 JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
613 if (w & 0x3)
614 return CGCT_LOWBITSET;
617 * An object jsid has its low bits tagged. In the value representation on
618 * 64-bit, the high bits are tagged.
620 const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
621 #if JS_BITS_PER_WORD == 32
622 jsuword payload = w & JSID_PAYLOAD_MASK;
623 #elif JS_BITS_PER_WORD == 64
624 jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
625 #endif
627 Cell *cell = reinterpret_cast<Cell *>(payload);
628 Chunk *chunk = cell->chunk();
630 if (!rt->gcChunkSet.has(chunk))
631 return CGCT_NOTCHUNK;
633 if (!chunk->withinArenasRange(cell))
634 return CGCT_NOTARENA;
636 ArenaHeader *aheader = cell->arena()->header();
638 if (!aheader->isUsed)
639 return CGCT_FREEARENA;
641 ConservativeGCTest test;
642 thingKind = aheader->thingKind;
644 switch (thingKind) {
645 case FINALIZE_OBJECT0:
646 test = MarkCell<JSObject>(cell, trc);
647 break;
648 case FINALIZE_OBJECT2:
649 test = MarkCell<JSObject_Slots2>(cell, trc);
650 break;
651 case FINALIZE_OBJECT4:
652 test = MarkCell<JSObject_Slots4>(cell, trc);
653 break;
654 case FINALIZE_OBJECT8:
655 test = MarkCell<JSObject_Slots8>(cell, trc);
656 break;
657 case FINALIZE_OBJECT12:
658 test = MarkCell<JSObject_Slots12>(cell, trc);
659 break;
660 case FINALIZE_OBJECT16:
661 test = MarkCell<JSObject_Slots16>(cell, trc);
662 break;
663 case FINALIZE_STRING:
664 test = MarkCell<JSString>(cell, trc);
665 break;
666 case FINALIZE_EXTERNAL_STRING:
667 test = MarkCell<JSExternalString>(cell, trc);
668 break;
669 case FINALIZE_SHORT_STRING:
670 test = MarkCell<JSShortString>(cell, trc);
671 break;
672 case FINALIZE_FUNCTION:
673 test = MarkCell<JSFunction>(cell, trc);
674 break;
675 #if JS_HAS_XML_SUPPORT
676 case FINALIZE_XML:
677 test = MarkCell<JSXML>(cell, trc);
678 break;
679 #endif
680 default:
681 test = CGCT_WRONGTAG;
682 JS_NOT_REACHED("wrong tag");
685 return test;
688 inline ConservativeGCTest
689 MarkIfGCThingWord(JSTracer *trc, jsuword w)
691 uint32 thingKind;
692 return MarkIfGCThingWord(trc, w, thingKind);
695 static void
696 MarkWordConservatively(JSTracer *trc, jsuword w)
699 * The conservative scanner may access words that valgrind considers as
700 * undefined. To avoid false positives and not to alter valgrind view of
701 * the memory we make as memcheck-defined the argument, a copy of the
702 * original word. See bug 572678.
704 #ifdef JS_VALGRIND
705 VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
706 #endif
708 uint32 thingKind;
709 #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
710 ConservativeGCTest test =
711 #endif
712 MarkIfGCThingWord(trc, w, thingKind);
714 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
715 if (test == CGCT_VALID || test == CGCT_VALIDWITHOFFSET) {
716 if (IS_GC_MARKING_TRACER(trc) && static_cast<GCMarker *>(trc)->conservativeDumpFileName) {
717 const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
718 #if JS_BITS_PER_WORD == 32
719 jsuword payload = w & JSID_PAYLOAD_MASK;
720 #elif JS_BITS_PER_WORD == 64
721 jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
722 #endif
723 void *thing = (test == CGCT_VALIDWITHOFFSET)
724 ? GetAlignedThing((void *)payload, thingKind)
725 : (void *)payload;
726 GCMarker::ConservativeRoot root = {thing, thingKind};
727 static_cast<GCMarker *>(trc)->conservativeRoots.append(root);
730 #endif
732 #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
733 if (IS_GC_MARKING_TRACER(trc))
734 static_cast<GCMarker *>(trc)->conservativeStats.counter[test]++;
735 #endif
738 static void
739 MarkRangeConservatively(JSTracer *trc, const jsuword *begin, const jsuword *end)
741 JS_ASSERT(begin <= end);
742 for (const jsuword *i = begin; i != end; ++i)
743 MarkWordConservatively(trc, *i);
746 static void
747 MarkThreadDataConservatively(JSTracer *trc, JSThreadData *td)
749 ConservativeGCThreadData *ctd = &td->conservativeGC;
750 JS_ASSERT(ctd->hasStackToScan());
751 jsuword *stackMin, *stackEnd;
752 #if JS_STACK_GROWTH_DIRECTION > 0
753 stackMin = td->nativeStackBase;
754 stackEnd = ctd->nativeStackTop;
755 #else
756 stackMin = ctd->nativeStackTop + 1;
757 stackEnd = td->nativeStackBase;
758 #endif
759 JS_ASSERT(stackMin <= stackEnd);
760 MarkRangeConservatively(trc, stackMin, stackEnd);
761 MarkRangeConservatively(trc, ctd->registerSnapshot.words,
762 JS_ARRAY_END(ctd->registerSnapshot.words));
766 void
767 MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
769 const jsuword *begin = beginv->payloadWord();
770 const jsuword *end = endv->payloadWord();;
771 #ifdef JS_NUNBOX32
773 * With 64-bit jsvals on 32-bit systems, we can optimize a bit by
774 * scanning only the payloads.
776 JS_ASSERT(begin <= end);
777 for (const jsuword *i = begin; i != end; i += sizeof(Value)/sizeof(jsuword))
778 MarkWordConservatively(trc, *i);
779 #else
780 MarkRangeConservatively(trc, begin, end);
781 #endif
784 void
785 MarkConservativeStackRoots(JSTracer *trc)
787 #ifdef JS_THREADSAFE
788 for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
789 JSThread *thread = r.front().value;
790 ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
791 if (ctd->hasStackToScan()) {
792 JS_ASSERT_IF(!thread->data.requestDepth, thread->suspendCount);
793 MarkThreadDataConservatively(trc, &thread->data);
794 } else {
795 JS_ASSERT(!thread->suspendCount);
796 JS_ASSERT(thread->data.requestDepth <= ctd->requestThreshold);
799 #else
800 MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
801 #endif
804 JS_NEVER_INLINE void
805 ConservativeGCThreadData::recordStackTop()
807 /* Update the native stack pointer if it points to a bigger stack. */
808 jsuword dummy;
809 nativeStackTop = &dummy;
812 * To record and update the register snapshot for the conservative
813 * scanning with the latest values we use setjmp.
815 #if defined(_MSC_VER)
816 # pragma warning(push)
817 # pragma warning(disable: 4611)
818 #endif
819 (void) setjmp(registerSnapshot.jmpbuf);
820 #if defined(_MSC_VER)
821 # pragma warning(pop)
822 #endif
825 static inline void
826 RecordNativeStackTopForGC(JSContext *cx)
828 ConservativeGCThreadData *ctd = &JS_THREAD_DATA(cx)->conservativeGC;
830 #ifdef JS_THREADSAFE
831 /* Record the stack top here only if we are called from a request. */
832 JS_ASSERT(cx->thread->data.requestDepth >= ctd->requestThreshold);
833 if (cx->thread->data.requestDepth == ctd->requestThreshold)
834 return;
835 #endif
836 ctd->recordStackTop();
839 } /* namespace js */
841 #ifdef DEBUG
842 static void
843 CheckLeakedRoots(JSRuntime *rt);
844 #endif
846 void
847 js_FinishGC(JSRuntime *rt)
849 #ifdef JS_ARENAMETER
850 JS_DumpArenaStats(stdout);
851 #endif
852 #ifdef JS_GCMETER
853 if (JS_WANT_GC_METER_PRINT)
854 js_DumpGCStats(rt, stdout);
855 #endif
857 /* Delete all remaining Compartments. */
858 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
859 JSCompartment *comp = *c;
860 comp->finishArenaLists();
861 js_delete(comp);
863 rt->compartments.clear();
864 rt->atomsCompartment = NULL;
866 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
867 ReleaseGCChunk(rt, r.front());
868 rt->gcChunkSet.clear();
870 #ifdef JS_THREADSAFE
871 rt->gcHelperThread.finish(rt);
872 #endif
874 #ifdef DEBUG
875 if (!rt->gcRootsHash.empty())
876 CheckLeakedRoots(rt);
877 #endif
878 rt->gcRootsHash.clear();
879 rt->gcLocksHash.clear();
882 JSBool
883 js_AddRoot(JSContext *cx, Value *vp, const char *name)
885 JSBool ok = js_AddRootRT(cx->runtime, Jsvalify(vp), name);
886 if (!ok)
887 JS_ReportOutOfMemory(cx);
888 return ok;
891 JSBool
892 js_AddGCThingRoot(JSContext *cx, void **rp, const char *name)
894 JSBool ok = js_AddGCThingRootRT(cx->runtime, rp, name);
895 if (!ok)
896 JS_ReportOutOfMemory(cx);
897 return ok;
900 JS_FRIEND_API(JSBool)
901 js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
904 * Due to the long-standing, but now removed, use of rt->gcLock across the
905 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
906 * properly with a racing GC, without calling JS_AddRoot from a request.
907 * We have to preserve API compatibility here, now that we avoid holding
908 * rt->gcLock across the mark phase (including the root hashtable mark).
910 AutoLockGC lock(rt);
911 js_WaitForGC(rt);
913 return !!rt->gcRootsHash.put((void *)vp,
914 RootInfo(name, JS_GC_ROOT_VALUE_PTR));
917 JS_FRIEND_API(JSBool)
918 js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
921 * Due to the long-standing, but now removed, use of rt->gcLock across the
922 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
923 * properly with a racing GC, without calling JS_AddRoot from a request.
924 * We have to preserve API compatibility here, now that we avoid holding
925 * rt->gcLock across the mark phase (including the root hashtable mark).
927 AutoLockGC lock(rt);
928 js_WaitForGC(rt);
930 return !!rt->gcRootsHash.put((void *)rp,
931 RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
934 JS_FRIEND_API(JSBool)
935 js_RemoveRoot(JSRuntime *rt, void *rp)
938 * Due to the JS_RemoveRootRT API, we may be called outside of a request.
939 * Same synchronization drill as above in js_AddRoot.
941 AutoLockGC lock(rt);
942 js_WaitForGC(rt);
943 rt->gcRootsHash.remove(rp);
944 rt->gcPoke = JS_TRUE;
945 return JS_TRUE;
948 typedef RootedValueMap::Range RootRange;
949 typedef RootedValueMap::Entry RootEntry;
950 typedef RootedValueMap::Enum RootEnum;
952 #ifdef DEBUG
954 static void
955 CheckLeakedRoots(JSRuntime *rt)
957 uint32 leakedroots = 0;
959 /* Warn (but don't assert) debug builds of any remaining roots. */
960 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
961 RootEntry &entry = r.front();
962 leakedroots++;
963 fprintf(stderr,
964 "JS engine warning: leaking GC root \'%s\' at %p\n",
965 entry.value.name ? entry.value.name : "", entry.key);
968 if (leakedroots > 0) {
969 if (leakedroots == 1) {
970 fprintf(stderr,
971 "JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
972 " This root may point to freed memory. Objects reachable\n"
973 " through it have not been finalized.\n",
974 (void *) rt);
975 } else {
976 fprintf(stderr,
977 "JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
978 " These roots may point to freed memory. Objects reachable\n"
979 " through them have not been finalized.\n",
980 (unsigned long) leakedroots, (void *) rt);
985 void
986 js_DumpNamedRoots(JSRuntime *rt,
987 void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
988 void *data)
990 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
991 RootEntry &entry = r.front();
992 if (const char *name = entry.value.name)
993 dump(name, entry.key, entry.value.type, data);
997 #endif /* DEBUG */
999 uint32
1000 js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
1002 AutoLockGC lock(rt);
1003 int ct = 0;
1004 for (RootEnum e(rt->gcRootsHash); !e.empty(); e.popFront()) {
1005 RootEntry &entry = e.front();
1007 ct++;
1008 intN mapflags = map(entry.key, entry.value.type, entry.value.name, data);
1010 if (mapflags & JS_MAP_GCROOT_REMOVE)
1011 e.removeFront();
1012 if (mapflags & JS_MAP_GCROOT_STOP)
1013 break;
1016 return ct;
1019 void
1020 JSRuntime::setGCTriggerFactor(uint32 factor)
1022 JS_ASSERT(factor >= 100);
1024 gcTriggerFactor = factor;
1025 setGCLastBytes(gcLastBytes);
1027 for (JSCompartment **c = compartments.begin(); c != compartments.end(); ++c) {
1028 (*c)->setGCLastBytes(gcLastBytes);
1032 void
1033 JSRuntime::setGCLastBytes(size_t lastBytes)
1035 gcLastBytes = lastBytes;
1037 /* FIXME bug 603916 - we should unify the triggers here. */
1038 float trigger1 = float(lastBytes) * float(gcTriggerFactor) / 100.0f;
1039 float trigger2 = float(Max(lastBytes, GC_ARENA_ALLOCATION_TRIGGER)) *
1040 GC_HEAP_GROWTH_FACTOR;
1041 float maxtrigger = Max(trigger1, trigger2);
1042 gcTriggerBytes = (float(gcMaxBytes) < maxtrigger) ? gcMaxBytes : size_t(maxtrigger);
1045 void
1046 JSCompartment::setGCLastBytes(size_t lastBytes)
1048 gcLastBytes = lastBytes;
1050 /* FIXME bug 603916 - we should unify the triggers here. */
1051 float trigger1 = float(lastBytes) * float(rt->gcTriggerFactor) / 100.0f;
1052 float trigger2 = float(Max(lastBytes, GC_ARENA_ALLOCATION_TRIGGER)) *
1053 GC_HEAP_GROWTH_FACTOR;
1054 float maxtrigger = Max(trigger1, trigger2);
1055 gcTriggerBytes = (float(rt->gcMaxBytes) < maxtrigger) ? rt->gcMaxBytes : size_t(maxtrigger);
1058 void
1059 FreeLists::purge()
1062 * Return the free list back to the arena so the GC finalization will not
1063 * run the finalizers over unitialized bytes from free things.
1065 for (FreeCell ***p = finalizables; p != JS_ARRAY_END(finalizables); ++p)
1066 *p = NULL;
1069 class JSShortString;
1071 ArenaList *
1072 GetFinalizableArenaList(JSCompartment *c, unsigned thingKind) {
1073 JS_ASSERT(thingKind < FINALIZE_LIMIT);
1074 return &c->arenas[thingKind];
1077 #ifdef DEBUG
1078 bool
1079 CheckAllocation(JSContext *cx)
1081 #ifdef JS_THREADSAFE
1082 JS_ASSERT(cx->thread);
1083 #endif
1084 JS_ASSERT(!cx->runtime->gcRunning);
1085 return true;
1087 #endif
1089 inline bool
1090 NeedLastDitchGC(JSContext *cx)
1092 JSRuntime *rt = cx->runtime;
1093 #ifdef JS_GC_ZEAL
1094 if (rt->gcZeal >= 1)
1095 return true;
1096 #endif
1097 return rt->gcIsNeeded;
1101 * Return false only if the GC run but could not bring its memory usage under
1102 * JSRuntime::gcMaxBytes.
1104 static bool
1105 RunLastDitchGC(JSContext *cx)
1107 JSRuntime *rt = cx->runtime;
1108 METER(rt->gcStats.lastditch++);
1109 #ifdef JS_THREADSAFE
1110 Conditionally<AutoUnlockAtomsCompartment>
1111 unlockAtomsCompartmenIf(cx->compartment == rt->atomsCompartment &&
1112 rt->atomsCompartmentIsLocked, cx);
1113 #endif
1114 /* The last ditch GC preserves all atoms. */
1115 AutoKeepAtoms keep(rt);
1116 js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL);
1118 return rt->gcBytes < rt->gcMaxBytes;
1121 template <typename T>
1122 inline bool
1123 RefillTypedFreeList(JSContext *cx, unsigned thingKind)
1125 JSCompartment *compartment = cx->compartment;
1126 JS_ASSERT_IF(compartment->freeLists.finalizables[thingKind],
1127 !*compartment->freeLists.finalizables[thingKind]);
1129 JS_ASSERT(!cx->runtime->gcRunning);
1130 if (cx->runtime->gcRunning)
1131 return false;
1133 bool canGC = !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota;
1134 do {
1135 if (canGC && JS_UNLIKELY(NeedLastDitchGC(cx))) {
1136 if (!RunLastDitchGC(cx))
1137 break;
1140 * The JSGC_END callback can legitimately allocate new GC
1141 * things and populate the free list. If that happens, just
1142 * return that list head.
1144 if (compartment->freeLists.finalizables[thingKind])
1145 return true;
1146 canGC = false;
1149 ArenaList *arenaList = GetFinalizableArenaList(compartment, thingKind);
1150 Arena<T> *a = reinterpret_cast<Arena<T> *>(arenaList->getNextWithFreeList());
1151 if (a) {
1152 JS_ASSERT(a->header()->freeList);
1153 JS_ASSERT(sizeof(T) == a->header()->thingSize);
1154 compartment->freeLists.populate(a, thingKind);
1155 return true;
1159 * If the allocation fails rt->gcIsNeeded will be set and we will run
1160 * the GC on the next loop iteration if the last ditch GC is allowed.
1162 a = AllocateArena<T>(cx, thingKind);
1163 if (a) {
1164 compartment->freeLists.populate(a, thingKind);
1165 arenaList->insert((Arena<FreeCell> *) a);
1166 a->getMarkingDelay()->init();
1167 return true;
1169 } while (canGC);
1171 METER(cx->runtime->gcStats.fail++);
1172 js_ReportOutOfMemory(cx);
1173 return false;
1176 bool
1177 RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
1179 switch (thingKind) {
1180 case FINALIZE_OBJECT0:
1181 return RefillTypedFreeList<JSObject>(cx, thingKind);
1182 case FINALIZE_OBJECT2:
1183 return RefillTypedFreeList<JSObject_Slots2>(cx, thingKind);
1184 case FINALIZE_OBJECT4:
1185 return RefillTypedFreeList<JSObject_Slots4>(cx, thingKind);
1186 case FINALIZE_OBJECT8:
1187 return RefillTypedFreeList<JSObject_Slots8>(cx, thingKind);
1188 case FINALIZE_OBJECT12:
1189 return RefillTypedFreeList<JSObject_Slots12>(cx, thingKind);
1190 case FINALIZE_OBJECT16:
1191 return RefillTypedFreeList<JSObject_Slots16>(cx, thingKind);
1192 case FINALIZE_STRING:
1193 return RefillTypedFreeList<JSString>(cx, thingKind);
1194 case FINALIZE_EXTERNAL_STRING:
1195 return RefillTypedFreeList<JSExternalString>(cx, thingKind);
1196 case FINALIZE_SHORT_STRING:
1197 return RefillTypedFreeList<JSShortString>(cx, thingKind);
1198 case FINALIZE_FUNCTION:
1199 return RefillTypedFreeList<JSFunction>(cx, thingKind);
1200 #if JS_HAS_XML_SUPPORT
1201 case FINALIZE_XML:
1202 return RefillTypedFreeList<JSXML>(cx, thingKind);
1203 #endif
1204 default:
1205 JS_NOT_REACHED("bad finalize kind");
1206 return false;
1210 intN
1211 js_GetExternalStringGCType(JSString *str) {
1212 return GetExternalStringGCType((JSExternalString *)str);
1215 uint32
1216 js_GetGCThingTraceKind(void *thing) {
1217 return GetGCThingTraceKind(thing);
1220 JSBool
1221 js_LockGCThingRT(JSRuntime *rt, void *thing)
1223 GCLocks *locks;
1225 if (!thing)
1226 return true;
1227 locks = &rt->gcLocksHash;
1228 AutoLockGC lock(rt);
1229 GCLocks::AddPtr p = locks->lookupForAdd(thing);
1231 if (!p) {
1232 if (!locks->add(p, thing, 1))
1233 return false;
1234 } else {
1235 JS_ASSERT(p->value >= 1);
1236 p->value++;
1239 METER(rt->gcStats.lock++);
1240 return true;
1243 void
1244 js_UnlockGCThingRT(JSRuntime *rt, void *thing)
1246 if (!thing)
1247 return;
1249 AutoLockGC lock(rt);
1250 GCLocks::Ptr p = rt->gcLocksHash.lookup(thing);
1252 if (p) {
1253 rt->gcPoke = true;
1254 if (--p->value == 0)
1255 rt->gcLocksHash.remove(p);
1257 METER(rt->gcStats.unlock++);
1261 JS_PUBLIC_API(void)
1262 JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind)
1264 switch (kind) {
1265 case JSTRACE_OBJECT: {
1266 MarkChildren(trc, (JSObject *)thing);
1267 break;
1270 case JSTRACE_STRING: {
1271 MarkChildren(trc, (JSString *)thing);
1272 break;
1275 #if JS_HAS_XML_SUPPORT
1276 case JSTRACE_XML:
1277 MarkChildren(trc, (JSXML *)thing);
1278 break;
1279 #endif
1283 namespace js {
1286 * When the native stack is low, the GC does not call JS_TraceChildren to mark
1287 * the reachable "children" of the thing. Rather the thing is put aside and
1288 * JS_TraceChildren is called later with more space on the C stack.
1290 * To implement such delayed marking of the children with minimal overhead for
1291 * the normal case of sufficient native stack, the code adds a field per
1292 * arena. The field marlingdelay->link links all arenas with delayed things
1293 * into a stack list with the pointer to stack top in
1294 * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
1295 * arenas to the stack as necessary while markDelayedChildren pops the arenas
1296 * from the stack until it empties.
1299 GCMarker::GCMarker(JSContext *cx)
1300 : color(0), stackLimit(0), unmarkedArenaStackTop(NULL)
1302 JS_TRACER_INIT(this, cx, NULL);
1303 #ifdef DEBUG
1304 markLaterCount = 0;
1305 #endif
1306 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1307 conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
1308 memset(&conservativeStats, 0, sizeof(conservativeStats));
1309 #endif
1312 GCMarker::~GCMarker()
1314 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1315 dumpConservativeRoots();
1316 #endif
1317 #ifdef JS_GCMETER
1318 /* Update total stats. */
1319 context->runtime->gcStats.conservative.add(conservativeStats);
1320 #endif
1323 void
1324 GCMarker::delayMarkingChildren(void *thing)
1326 Cell *cell = reinterpret_cast<Cell *>(thing);
1327 Arena<Cell> *a = cell->arena();
1328 JS_ASSERT(cell->isMarked());
1329 METER(cell->compartment()->rt->gcStats.unmarked++);
1330 MarkingDelay *markingDelay = a->getMarkingDelay();
1332 if (markingDelay->link) {
1333 if (markingDelay->start > (jsuword)cell)
1334 markingDelay->start = (jsuword)cell;
1335 /* Arena already scheduled to be marked again */
1336 return;
1338 markingDelay->start = (jsuword)cell;
1339 Arena<Cell> *tos = unmarkedArenaStackTop;
1340 markingDelay->link = tos ? tos : a;
1341 unmarkedArenaStackTop = a;
1342 #ifdef DEBUG
1343 JSCompartment *comp = cell->compartment();
1344 markLaterCount += Arena<FreeCell>::ThingsPerArena;
1345 METER_UPDATE_MAX(comp->rt->gcStats.maxunmarked, markLaterCount);
1346 #endif
1349 template<typename T>
1350 void
1351 Arena<T>::markDelayedChildren(JSTracer *trc)
1353 T* thing = (T *)getMarkingDelay()->start;
1354 T *thingsEnd = &t.things[ThingsPerArena-1].t;
1355 JS_ASSERT(thing == getAlignedThing(thing));
1356 while (thing <= thingsEnd) {
1357 if (thing->asCell()->isMarked())
1358 MarkChildren(trc, thing);
1360 thing++;
1364 void
1365 GCMarker::markDelayedChildren()
1367 while (Arena<Cell> *a = unmarkedArenaStackTop) {
1369 * markingDelay->link == current arena indicates last arena on stack.
1370 * If marking gets delayed at the same arena again, the arena is pushed
1371 * again in delayMarkingChildren. markingDelay->link has to be cleared,
1372 * otherwise the arena is not pushed again.
1374 MarkingDelay *markingDelay = a->getMarkingDelay();
1375 unmarkedArenaStackTop = (markingDelay->link != a)
1376 ? markingDelay->link
1377 : NULL;
1378 markingDelay->link = NULL;
1379 #ifdef DEBUG
1380 markLaterCount -= Arena<FreeCell>::ThingsPerArena;
1381 #endif
1383 switch (a->header()->thingKind) {
1384 case FINALIZE_OBJECT0:
1385 reinterpret_cast<Arena<JSObject> *>(a)->markDelayedChildren(this);
1386 break;
1387 case FINALIZE_OBJECT2:
1388 reinterpret_cast<Arena<JSObject_Slots2> *>(a)->markDelayedChildren(this);
1389 break;
1390 case FINALIZE_OBJECT4:
1391 reinterpret_cast<Arena<JSObject_Slots4> *>(a)->markDelayedChildren(this);
1392 break;
1393 case FINALIZE_OBJECT8:
1394 reinterpret_cast<Arena<JSObject_Slots8> *>(a)->markDelayedChildren(this);
1395 break;
1396 case FINALIZE_OBJECT12:
1397 reinterpret_cast<Arena<JSObject_Slots12> *>(a)->markDelayedChildren(this);
1398 break;
1399 case FINALIZE_OBJECT16:
1400 reinterpret_cast<Arena<JSObject_Slots16> *>(a)->markDelayedChildren(this);
1401 break;
1402 case FINALIZE_STRING:
1403 reinterpret_cast<Arena<JSString> *>(a)->markDelayedChildren(this);
1404 break;
1405 case FINALIZE_EXTERNAL_STRING:
1406 reinterpret_cast<Arena<JSExternalString> *>(a)->markDelayedChildren(this);
1407 break;
1408 case FINALIZE_SHORT_STRING:
1409 JS_ASSERT(false);
1410 break;
1411 case FINALIZE_FUNCTION:
1412 reinterpret_cast<Arena<JSFunction> *>(a)->markDelayedChildren(this);
1413 break;
1414 #if JS_HAS_XML_SUPPORT
1415 case FINALIZE_XML:
1416 reinterpret_cast<Arena<JSXML> *>(a)->markDelayedChildren(this);
1417 break;
1418 #endif
1419 default:
1420 JS_NOT_REACHED("wrong thingkind");
1423 JS_ASSERT(markLaterCount == 0);
1424 JS_ASSERT(!unmarkedArenaStackTop);
1427 } /* namespace js */
1429 static void
1430 gc_root_traversal(JSTracer *trc, const RootEntry &entry)
1432 #ifdef DEBUG
1433 void *ptr;
1434 if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
1435 ptr = *reinterpret_cast<void **>(entry.key);
1436 } else {
1437 Value *vp = reinterpret_cast<Value *>(entry.key);
1438 ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
1441 if (ptr) {
1442 if (!JSString::isStatic(ptr)) {
1443 bool root_points_to_gcArenaList = false;
1444 JSCompartment **c = trc->context->runtime->compartments.begin();
1445 for (; c != trc->context->runtime->compartments.end(); ++c) {
1446 JSCompartment *comp = *c;
1447 if (checkArenaListsForThing(comp, ptr)) {
1448 root_points_to_gcArenaList = true;
1449 break;
1452 if (!root_points_to_gcArenaList && entry.value.name) {
1453 fprintf(stderr,
1454 "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
1455 "invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
1456 "The root's name is \"%s\".\n",
1457 entry.value.name);
1459 JS_ASSERT(root_points_to_gcArenaList);
1462 #endif
1463 JS_SET_TRACING_NAME(trc, entry.value.name ? entry.value.name : "root");
1464 if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
1465 MarkGCThing(trc, *reinterpret_cast<void **>(entry.key));
1466 else
1467 MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key));
1470 static void
1471 gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
1473 JS_ASSERT(entry.value >= 1);
1474 MarkGCThing(trc, entry.key, "locked object");
1477 void
1478 js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp)
1480 MarkObject(trc, fp->scopeChain(), "scope chain");
1481 if (fp->isDummyFrame())
1482 return;
1484 if (fp->hasCallObj())
1485 MarkObject(trc, fp->callObj(), "call");
1486 if (fp->hasArgsObj())
1487 MarkObject(trc, fp->argsObj(), "arguments");
1488 if (fp->isScriptFrame()) {
1489 js_TraceScript(trc, fp->script());
1490 fp->script()->compartment->active = true;
1493 MarkValue(trc, fp->returnValue(), "rval");
1496 void
1497 AutoIdArray::trace(JSTracer *trc)
1499 JS_ASSERT(tag == IDARRAY);
1500 gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
1503 void
1504 AutoEnumStateRooter::trace(JSTracer *trc)
1506 js::gc::MarkObject(trc, *obj, "js::AutoEnumStateRooter.obj");
1509 inline void
1510 AutoGCRooter::trace(JSTracer *trc)
1512 switch (tag) {
1513 case JSVAL:
1514 MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
1515 return;
1517 case SHAPE:
1518 static_cast<AutoShapeRooter *>(this)->shape->trace(trc);
1519 return;
1521 case PARSER:
1522 static_cast<Parser *>(this)->trace(trc);
1523 return;
1525 case SCRIPT:
1526 if (JSScript *script = static_cast<AutoScriptRooter *>(this)->script)
1527 js_TraceScript(trc, script);
1528 return;
1530 case ENUMERATOR:
1531 static_cast<AutoEnumStateRooter *>(this)->trace(trc);
1532 return;
1534 case IDARRAY: {
1535 JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
1536 MarkIdRange(trc, ida->length, ida->vector, "js::AutoIdArray.idArray");
1537 return;
1540 case DESCRIPTORS: {
1541 PropDescArray &descriptors =
1542 static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
1543 for (size_t i = 0, len = descriptors.length(); i < len; i++) {
1544 PropDesc &desc = descriptors[i];
1545 MarkValue(trc, desc.pd, "PropDesc::pd");
1546 MarkValue(trc, desc.value, "PropDesc::value");
1547 MarkValue(trc, desc.get, "PropDesc::get");
1548 MarkValue(trc, desc.set, "PropDesc::set");
1549 MarkId(trc, desc.id, "PropDesc::id");
1551 return;
1554 case DESCRIPTOR : {
1555 PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
1556 if (desc.obj)
1557 MarkObject(trc, *desc.obj, "Descriptor::obj");
1558 MarkValue(trc, desc.value, "Descriptor::value");
1559 if ((desc.attrs & JSPROP_GETTER) && desc.getter)
1560 MarkObject(trc, *CastAsObject(desc.getter), "Descriptor::get");
1561 if (desc.attrs & JSPROP_SETTER && desc.setter)
1562 MarkObject(trc, *CastAsObject(desc.setter), "Descriptor::set");
1563 return;
1566 case NAMESPACES: {
1567 JSXMLArray &array = static_cast<AutoNamespaceArray *>(this)->array;
1568 MarkObjectRange(trc, array.length, reinterpret_cast<JSObject **>(array.vector),
1569 "JSXMLArray.vector");
1570 array.cursors->trace(trc);
1571 return;
1574 case XML:
1575 js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
1576 return;
1578 case OBJECT:
1579 if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
1580 MarkObject(trc, *obj, "js::AutoObjectRooter.obj");
1581 return;
1583 case ID:
1584 MarkId(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
1585 return;
1587 case VALVECTOR: {
1588 Vector<Value, 8> &vector = static_cast<js::AutoValueVector *>(this)->vector;
1589 MarkValueRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
1590 return;
1593 case STRING:
1594 if (JSString *str = static_cast<js::AutoStringRooter *>(this)->str)
1595 MarkString(trc, str, "js::AutoStringRooter.str");
1596 return;
1598 case IDVECTOR: {
1599 Vector<jsid, 8> &vector = static_cast<js::AutoIdVector *>(this)->vector;
1600 MarkIdRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
1601 return;
1604 case SHAPEVECTOR: {
1605 Vector<const Shape *, 8> &vector = static_cast<js::AutoShapeVector *>(this)->vector;
1606 MarkShapeRange(trc, vector.length(), vector.begin(), "js::AutoShapeVector.vector");
1607 return;
1610 case BINDINGS: {
1611 static_cast<js::AutoBindingsRooter *>(this)->bindings.trace(trc);
1612 return;
1616 JS_ASSERT(tag >= 0);
1617 MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
1620 namespace js {
1622 JS_FRIEND_API(void)
1623 MarkContext(JSTracer *trc, JSContext *acx)
1625 /* Stack frames and slots are traced by StackSpace::mark. */
1627 /* Mark other roots-by-definition in acx. */
1628 if (acx->globalObject && !acx->hasRunOption(JSOPTION_UNROOTED_GLOBAL))
1629 MarkObject(trc, *acx->globalObject, "global object");
1630 if (acx->isExceptionPending())
1631 MarkValue(trc, acx->getPendingException(), "exception");
1633 for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
1634 gcr->trace(trc);
1636 if (acx->sharpObjectMap.depth > 0)
1637 js_TraceSharpMap(trc, &acx->sharpObjectMap);
1639 MarkValue(trc, acx->iterValue, "iterValue");
1641 if (acx->compartment)
1642 acx->compartment->mark(trc);
1645 JS_REQUIRES_STACK void
1646 MarkRuntime(JSTracer *trc)
1648 JSRuntime *rt = trc->context->runtime;
1650 if (rt->state != JSRTS_LANDING)
1651 MarkConservativeStackRoots(trc);
1654 * Verify that we do not have at this point unmarked GC things stored in
1655 * autorooters. To maximize test coverage we abort even in non-debug
1656 * builds for now, see bug 574313.
1658 JSContext *iter;
1659 #if 0
1660 iter = NULL;
1661 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
1662 for (AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) {
1663 #ifdef JS_THREADSAFE
1664 JS_ASSERT_IF(!acx->thread->data.requestDepth, acx->thread->suspendCount);
1665 #endif
1666 JS_ASSERT(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan());
1667 void *thing;
1668 switch (gcr->tag) {
1669 default:
1670 continue;
1671 case AutoGCRooter::JSVAL: {
1672 const Value &v = static_cast<AutoValueRooter *>(gcr)->val;
1673 if (!v.isMarkable())
1674 continue;
1675 thing = v.toGCThing();
1676 break;
1678 case AutoGCRooter::XML:
1679 thing = static_cast<AutoXMLRooter *>(gcr)->xml;
1680 break;
1681 case AutoGCRooter::OBJECT:
1682 thing = static_cast<AutoObjectRooter *>(gcr)->obj;
1683 if (!thing)
1684 continue;
1685 break;
1686 case AutoGCRooter::ID: {
1687 jsid id = static_cast<AutoIdRooter *>(gcr)->id();
1688 if (!JSID_IS_GCTHING(id))
1689 continue;
1690 thing = JSID_TO_GCTHING(id);
1691 break;
1695 if (JSString::isStatic(thing))
1696 continue;
1698 if (!reinterpret_cast<Cell *>(thing)->isMarked()) {
1699 ConservativeGCTest test = MarkIfGCThingWord(trc, reinterpret_cast<jsuword>(thing));
1700 fprintf(stderr,
1701 "Conservative GC scanner has missed the root 0x%p with tag %ld"
1702 " on the stack due to %d. The root location 0x%p, distance from"
1703 " the stack base %ld, conservative gc span %ld."
1704 " Consevtaive GC status for the thread %d."
1705 " Aborting.\n",
1706 thing, (long) gcr->tag, int(test), (void *) gcr,
1707 (long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase - (jsword) gcr),
1708 (long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase -
1709 (jsword) JS_THREAD_DATA(acx)->conservativeGC.nativeStackTop),
1710 int(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan()));
1711 JS_ASSERT(false);
1712 abort();
1716 #endif
1718 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
1719 gc_root_traversal(trc, r.front());
1721 for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
1722 gc_lock_traversal(r.front(), trc);
1724 js_TraceAtomState(trc);
1725 js_MarkTraps(trc);
1727 iter = NULL;
1728 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
1729 MarkContext(trc, acx);
1731 rt->atomsCompartment->mark(trc);
1733 #ifdef JS_TRACER
1734 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
1735 (*c)->traceMonitor.mark(trc);
1736 #endif
1738 for (ThreadDataIter i(rt); !i.empty(); i.popFront())
1739 i.threadData()->mark(trc);
1742 * We mark extra roots at the last thing so it can use use additional
1743 * colors to implement cycle collection.
1745 if (rt->gcExtraRootsTraceOp)
1746 rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
1748 #ifdef DEBUG
1749 if (rt->functionMeterFilename) {
1750 for (int k = 0; k < 2; k++) {
1751 typedef JSRuntime::FunctionCountMap HM;
1752 HM &h = (k == 0) ? rt->methodReadBarrierCountMap : rt->unjoinedFunctionCountMap;
1753 for (HM::Range r = h.all(); !r.empty(); r.popFront()) {
1754 JSFunction *fun = r.front().key;
1755 JS_CALL_OBJECT_TRACER(trc, fun, "FunctionCountMap key");
1759 #endif
1762 void
1763 TriggerGC(JSRuntime *rt)
1765 JS_ASSERT(!rt->gcRunning);
1766 if (rt->gcIsNeeded)
1767 return;
1770 * Trigger the GC when it is safe to call an operation callback on any
1771 * thread.
1773 rt->gcIsNeeded = true;
1774 rt->gcTriggerCompartment = NULL;
1775 TriggerAllOperationCallbacks(rt);
1778 void
1779 TriggerCompartmentGC(JSCompartment *comp)
1781 JSRuntime *rt = comp->rt;
1782 JS_ASSERT(!rt->gcRunning);
1784 #ifdef JS_GC_ZEAL
1785 if (rt->gcZeal >= 1) {
1786 TriggerGC(rt);
1787 return;
1789 #endif
1791 if (rt->gcMode != JSGC_MODE_COMPARTMENT || comp == rt->atomsCompartment) {
1792 /* We can't do a compartmental GC of the default compartment. */
1793 TriggerGC(rt);
1794 return;
1797 if (rt->gcIsNeeded) {
1798 /* If we need to GC more than one compartment, run a full GC. */
1799 if (rt->gcTriggerCompartment != comp)
1800 rt->gcTriggerCompartment = NULL;
1801 return;
1804 if (rt->gcBytes > 8192 && rt->gcBytes >= 3 * (rt->gcTriggerBytes / 2)) {
1805 /* If we're using significantly more than our quota, do a full GC. */
1806 TriggerGC(rt);
1807 return;
1811 * Trigger the GC when it is safe to call an operation callback on any
1812 * thread.
1814 rt->gcIsNeeded = true;
1815 rt->gcTriggerCompartment = comp;
1816 TriggerAllOperationCallbacks(comp->rt);
1819 void
1820 MaybeGC(JSContext *cx)
1822 JSRuntime *rt = cx->runtime;
1824 #ifdef JS_GC_ZEAL
1825 if (rt->gcZeal > 0) {
1826 js_GC(cx, NULL, GC_NORMAL);
1827 return;
1829 #endif
1831 JSCompartment *comp = cx->compartment;
1832 if (rt->gcIsNeeded) {
1833 js_GC(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL, GC_NORMAL);
1834 return;
1837 if (comp->gcBytes > 8192 && comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4))
1838 js_GC(cx, (rt->gcMode == JSGC_MODE_COMPARTMENT) ? comp : NULL, GC_NORMAL);
1841 } /* namespace js */
1843 void
1844 js_DestroyScriptsToGC(JSContext *cx, JSCompartment *comp)
1846 JSScript **listp, *script;
1848 for (size_t i = 0; i != JS_ARRAY_LENGTH(comp->scriptsToGC); ++i) {
1849 listp = &comp->scriptsToGC[i];
1850 while ((script = *listp) != NULL) {
1851 *listp = script->u.nextToGC;
1852 script->u.nextToGC = NULL;
1853 js_DestroyCachedScript(cx, script);
1859 * This function is called from js_FinishAtomState to force the finalization
1860 * of the permanently interned strings when cx is not available.
1862 void
1863 js_FinalizeStringRT(JSRuntime *rt, JSString *str)
1865 JS_RUNTIME_UNMETER(rt, liveStrings);
1866 JS_ASSERT(!JSString::isStatic(str));
1867 JS_ASSERT(!str->isRope());
1869 if (str->isDependent()) {
1870 /* A dependent string can not be external and must be valid. */
1871 JS_ASSERT(str->asCell()->arena()->header()->thingKind == FINALIZE_STRING);
1872 JS_ASSERT(str->dependentBase());
1873 JS_RUNTIME_UNMETER(rt, liveDependentStrings);
1874 } else {
1875 unsigned thingKind = str->asCell()->arena()->header()->thingKind;
1876 JS_ASSERT(IsFinalizableStringKind(thingKind));
1878 /* A stillborn string has null chars, so is not valid. */
1879 jschar *chars = const_cast<jschar *>(str->flatChars());
1880 if (!chars)
1881 return;
1882 if (thingKind == FINALIZE_STRING) {
1883 rt->stringMemoryUsed -= str->length() * 2;
1884 rt->free(chars);
1885 } else if (thingKind == FINALIZE_EXTERNAL_STRING) {
1886 ((JSExternalString *)str)->finalize();
1891 template<typename T>
1892 static void
1893 FinalizeArenaList(JSCompartment *comp, JSContext *cx, unsigned thingKind)
1895 JS_STATIC_ASSERT(!(sizeof(T) & Cell::CellMask));
1896 ArenaList *arenaList = GetFinalizableArenaList(comp, thingKind);
1897 Arena<FreeCell> **ap = &arenaList->head;
1898 Arena<T> *a = (Arena<T> *) *ap;
1899 if (!a)
1900 return;
1901 JS_ASSERT(sizeof(T) == arenaList->head->header()->thingSize);
1903 #ifdef JS_GCMETER
1904 uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0;
1905 #endif
1906 for (;;) {
1907 ArenaHeader *header = a->header();
1908 JS_ASSERT_IF(header->hasFreeThings, header->freeList);
1909 JS_ASSERT(header->thingKind == thingKind);
1910 JS_ASSERT(!a->getMarkingDelay()->link);
1911 JS_ASSERT(a->getMarkingDelay()->unmarkedChildren == 0);
1912 JS_ASSERT(a->header()->isUsed);
1914 FreeCell *nextFree = header->freeList;
1915 FreeCell *freeList = NULL;
1916 FreeCell **tailp = &freeList;
1917 bool allClear = true;
1919 T *thingsEnd = &a->t.things[a->ThingsPerArena-1].t;
1920 T *thing = &a->t.things[0].t;
1921 thingsEnd++;
1923 if (!nextFree) {
1924 nextFree = thingsEnd->asFreeCell();
1925 } else {
1926 JS_ASSERT(thing->asCell() <= nextFree);
1927 JS_ASSERT(nextFree < thingsEnd->asCell());
1930 for (;; thing++) {
1931 if (thing->asCell() == nextFree) {
1932 if (thing == thingsEnd)
1933 break;
1934 nextFree = nextFree->link;
1935 if (!nextFree) {
1936 nextFree = thingsEnd->asFreeCell();
1937 } else {
1938 JS_ASSERT(thing->asCell() < nextFree);
1939 JS_ASSERT(nextFree < thingsEnd->asFreeCell());
1941 } else if (thing->asCell()->isMarked()) {
1942 allClear = false;
1943 METER(nthings++);
1944 continue;
1945 } else {
1946 thing->finalize(cx);
1947 #ifdef DEBUG
1948 memset(thing, JS_FREE_PATTERN, sizeof(T));
1949 #endif
1951 FreeCell *t = thing->asFreeCell();
1952 *tailp = t;
1953 tailp = &t->link;
1956 #ifdef DEBUG
1957 /* Check that the free list is consistent. */
1958 unsigned nfree = 0;
1959 if (freeList) {
1960 JS_ASSERT(tailp != &freeList);
1961 FreeCell *t = freeList;
1962 for (;;) {
1963 ++nfree;
1964 if (&t->link == tailp)
1965 break;
1966 JS_ASSERT(t < t->link);
1967 t = t->link;
1970 #endif
1971 if (allClear) {
1973 * Forget just assembled free list head for the arena and
1974 * add the arena itself to the destroy list.
1976 JS_ASSERT(nfree == a->ThingsPerArena);
1977 JS_ASSERT((T *)tailp == &a->t.things[a->ThingsPerArena-1].t);
1978 *tailp = NULL;
1979 header->freeList = freeList;
1980 #ifdef DEBUG
1981 header->hasFreeThings = true;
1982 #endif
1983 *ap = (header->next);
1984 JS_ASSERT((T *)header->freeList == &a->t.things[0].t);
1985 a->chunk()->releaseArena(a);
1986 METER(nkilledarenas++);
1987 } else {
1988 JS_ASSERT(nfree < a->ThingsPerArena);
1989 *tailp = NULL;
1990 header->freeList = freeList;
1991 #ifdef DEBUG
1992 header->hasFreeThings = (nfree == 0) ? false : true;
1993 #endif
1994 ap = &header->next;
1995 METER(nlivearenas++);
1997 if (!(a = (Arena<T> *) *ap))
1998 break;
2000 arenaList->cursor = arenaList->head;
2001 METER(UpdateCompartmentStats(comp, thingKind, nlivearenas, nkilledarenas, nthings));
2004 void
2005 JSCompartment::finalizeObjectArenaLists(JSContext *cx)
2007 FinalizeArenaList<JSObject>(this, cx, FINALIZE_OBJECT0);
2008 FinalizeArenaList<JSObject_Slots2>(this, cx, FINALIZE_OBJECT2);
2009 FinalizeArenaList<JSObject_Slots4>(this, cx, FINALIZE_OBJECT4);
2010 FinalizeArenaList<JSObject_Slots8>(this, cx, FINALIZE_OBJECT8);
2011 FinalizeArenaList<JSObject_Slots12>(this, cx, FINALIZE_OBJECT12);
2012 FinalizeArenaList<JSObject_Slots16>(this, cx, FINALIZE_OBJECT16);
2013 FinalizeArenaList<JSFunction>(this, cx, FINALIZE_FUNCTION);
2014 #if JS_HAS_XML_SUPPORT
2015 FinalizeArenaList<JSXML>(this, cx, FINALIZE_XML);
2016 #endif
2019 void
2020 JSCompartment::finalizeStringArenaLists(JSContext *cx)
2022 FinalizeArenaList<JSShortString>(this, cx, FINALIZE_SHORT_STRING);
2023 FinalizeArenaList<JSString>(this, cx, FINALIZE_STRING);
2024 FinalizeArenaList<JSExternalString>(this, cx, FINALIZE_EXTERNAL_STRING);
2027 #ifdef JS_THREADSAFE
2029 namespace js {
2031 bool
2032 GCHelperThread::init(JSRuntime *rt)
2034 if (!(wakeup = PR_NewCondVar(rt->gcLock)))
2035 return false;
2036 if (!(sweepingDone = PR_NewCondVar(rt->gcLock)))
2037 return false;
2039 thread = PR_CreateThread(PR_USER_THREAD, threadMain, rt, PR_PRIORITY_NORMAL,
2040 PR_LOCAL_THREAD, PR_JOINABLE_THREAD, 0);
2041 return !!thread;
2045 void
2046 GCHelperThread::finish(JSRuntime *rt)
2048 PRThread *join = NULL;
2050 AutoLockGC lock(rt);
2051 if (thread && !shutdown) {
2052 shutdown = true;
2053 PR_NotifyCondVar(wakeup);
2054 join = thread;
2057 if (join) {
2058 /* PR_DestroyThread is not necessary. */
2059 PR_JoinThread(join);
2061 if (wakeup)
2062 PR_DestroyCondVar(wakeup);
2063 if (sweepingDone)
2064 PR_DestroyCondVar(sweepingDone);
2067 /* static */
2068 void
2069 GCHelperThread::threadMain(void *arg)
2071 JSRuntime *rt = static_cast<JSRuntime *>(arg);
2072 rt->gcHelperThread.threadLoop(rt);
2075 void
2076 GCHelperThread::threadLoop(JSRuntime *rt)
2078 AutoLockGC lock(rt);
2079 while (!shutdown) {
2081 * Sweeping can be true here on the first iteration if a GC and the
2082 * corresponding startBackgroundSweep call happen before this thread
2083 * has a chance to run.
2085 if (!sweeping)
2086 PR_WaitCondVar(wakeup, PR_INTERVAL_NO_TIMEOUT);
2087 if (sweeping) {
2088 AutoUnlockGC unlock(rt);
2089 doSweep();
2091 sweeping = false;
2092 PR_NotifyAllCondVar(sweepingDone);
2096 void
2097 GCHelperThread::startBackgroundSweep(JSRuntime *rt)
2099 /* The caller takes the GC lock. */
2100 JS_ASSERT(!sweeping);
2101 sweeping = true;
2102 PR_NotifyCondVar(wakeup);
2105 void
2106 GCHelperThread::waitBackgroundSweepEnd(JSRuntime *rt)
2108 AutoLockGC lock(rt);
2109 while (sweeping)
2110 PR_WaitCondVar(sweepingDone, PR_INTERVAL_NO_TIMEOUT);
2113 JS_FRIEND_API(void)
2114 GCHelperThread::replenishAndFreeLater(void *ptr)
2116 JS_ASSERT(freeCursor == freeCursorEnd);
2117 do {
2118 if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
2119 break;
2120 freeCursor = (void **) js_malloc(FREE_ARRAY_SIZE);
2121 if (!freeCursor) {
2122 freeCursorEnd = NULL;
2123 break;
2125 freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
2126 *freeCursor++ = ptr;
2127 return;
2128 } while (false);
2129 js_free(ptr);
2132 void
2133 GCHelperThread::doSweep()
2135 if (freeCursor) {
2136 void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
2137 freeElementsAndArray(array, freeCursor);
2138 freeCursor = freeCursorEnd = NULL;
2139 } else {
2140 JS_ASSERT(!freeCursorEnd);
2142 for (void ***iter = freeVector.begin(); iter != freeVector.end(); ++iter) {
2143 void **array = *iter;
2144 freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
2146 freeVector.resize(0);
2151 #endif /* JS_THREADSAFE */
2153 static void
2154 SweepCrossCompartmentWrappers(JSContext *cx)
2156 JSRuntime *rt = cx->runtime;
2158 * Figure out how much JIT code should be released from inactive compartments.
2159 * If multiple eighth-lives have passed, compound the release interval linearly;
2160 * if enough time has passed, all inactive JIT code will be released.
2162 uint32 releaseInterval = 0;
2163 int64 now = PRMJ_Now();
2164 if (now >= rt->gcJitReleaseTime) {
2165 releaseInterval = 8;
2166 while (now >= rt->gcJitReleaseTime) {
2167 if (--releaseInterval == 1)
2168 rt->gcJitReleaseTime = now;
2169 rt->gcJitReleaseTime += JIT_SCRIPT_EIGHTH_LIFETIME;
2173 /* Remove dead wrappers from the compartment map. */
2174 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2175 (*c)->sweep(cx, releaseInterval);
2178 static void
2179 SweepCompartments(JSContext *cx, JSGCInvocationKind gckind)
2181 JSRuntime *rt = cx->runtime;
2182 JSCompartmentCallback callback = rt->compartmentCallback;
2184 /* Skip the atomsCompartment. */
2185 JSCompartment **read = rt->compartments.begin() + 1;
2186 JSCompartment **end = rt->compartments.end();
2187 JSCompartment **write = read;
2188 JS_ASSERT(rt->compartments.length() >= 1);
2189 JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
2191 while (read < end) {
2192 JSCompartment *compartment = *read++;
2194 /* Unmarked compartments containing marked objects don't get deleted, except LAST_CONTEXT GC is performed. */
2195 if ((!compartment->isMarked() && compartment->arenaListsAreEmpty())
2196 || gckind == GC_LAST_CONTEXT)
2198 JS_ASSERT(compartment->freeLists.isEmpty());
2199 if (callback)
2200 (void) callback(cx, compartment, JSCOMPARTMENT_DESTROY);
2201 if (compartment->principals)
2202 JSPRINCIPALS_DROP(cx, compartment->principals);
2203 js_delete(compartment);
2204 continue;
2206 *write++ = compartment;
2208 rt->compartments.resize(write - rt->compartments.begin());
2212 * Common cache invalidation and so forth that must be done before GC. Even if
2213 * GCUntilDone calls GC several times, this work needs to be done only once.
2215 static void
2216 PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
2218 JSRuntime *rt = cx->runtime;
2220 /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
2221 rt->gcIsNeeded = false;
2222 rt->gcTriggerCompartment = NULL;
2224 /* Reset malloc counter. */
2225 rt->resetGCMallocBytes();
2227 #ifdef JS_DUMP_SCOPE_METERS
2229 extern void js_DumpScopeMeters(JSRuntime *rt);
2230 js_DumpScopeMeters(rt);
2232 #endif
2235 * Reset the property cache's type id generator so we can compress ids.
2236 * Same for the protoHazardShape proxy-shape standing in for all object
2237 * prototypes having readonly or setter properties.
2239 if (rt->shapeGen & SHAPE_OVERFLOW_BIT
2240 #ifdef JS_GC_ZEAL
2241 || rt->gcZeal >= 1
2242 #endif
2244 rt->gcRegenShapes = true;
2245 rt->shapeGen = 0;
2246 rt->protoHazardShape = 0;
2249 if (rt->gcCurrentCompartment) {
2250 rt->gcCurrentCompartment->purge(cx);
2251 } else {
2252 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2253 (*c)->purge(cx);
2256 js_PurgeThreads(cx);
2258 JSContext *iter = NULL;
2259 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
2260 acx->purge();
2264 static void
2265 MarkAndSweepCompartment(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
2267 JSRuntime *rt = cx->runtime;
2268 rt->gcNumber++;
2269 JS_ASSERT(!rt->gcRegenShapes);
2270 JS_ASSERT(gckind != GC_LAST_CONTEXT);
2271 JS_ASSERT(comp != rt->atomsCompartment);
2272 JS_ASSERT(!comp->isMarked());
2273 JS_ASSERT(comp->rt->gcMode == JSGC_MODE_COMPARTMENT);
2276 * Mark phase.
2278 GCMarker gcmarker(cx);
2279 JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
2280 JS_ASSERT(gcmarker.getMarkColor() == BLACK);
2281 rt->gcMarkingTracer = &gcmarker;
2282 gcmarker.stackLimit = cx->stackLimit;
2284 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
2285 r.front()->clearMarkBitmap();
2287 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2288 (*c)->markCrossCompartment(&gcmarker);
2290 comp->mark(&gcmarker);
2292 MarkRuntime(&gcmarker);
2295 * Mark children of things that caused too deep recursion during the above
2296 * tracing.
2298 gcmarker.markDelayedChildren();
2300 rt->gcMarkingTracer = NULL;
2302 if (rt->gcCallback)
2303 (void) rt->gcCallback(cx, JSGC_MARK_END);
2305 #ifdef JS_THREADSAFE
2307 * cx->gcBackgroundFree is set if we need several mark-and-sweep loops to
2308 * finish the GC.
2310 if(!cx->gcBackgroundFree) {
2311 /* Wait until the sweeping from the previois GC finishes. */
2312 rt->gcHelperThread.waitBackgroundSweepEnd(rt);
2313 cx->gcBackgroundFree = &rt->gcHelperThread;
2315 #endif
2316 #ifdef DEBUG
2317 /* Make sure that we didn't mark an object in another compartment */
2318 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2319 JS_ASSERT_IF(*c != comp, checkArenaListAllUnmarked(*c));
2320 #endif
2323 * Sweep phase.
2325 * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
2326 * so that any attempt to allocate a GC-thing from a finalizer will fail,
2327 * rather than nest badly and leave the unmarked newborn to be swept.
2329 * We first sweep atom state so we can use js_IsAboutToBeFinalized on
2330 * JSString held in a hashtable to check if the hashtable entry can be
2331 * freed. Note that even after the entry is freed, JSObject finalizers can
2332 * continue to access the corresponding JSString* assuming that they are
2333 * unique. This works since the atomization API must not be called during
2334 * the GC.
2336 TIMESTAMP(startSweep);
2337 js_SweepAtomState(cx);
2339 /* Finalize watch points associated with unreachable objects. */
2340 js_SweepWatchPoints(cx);
2342 #ifdef DEBUG
2343 /* Save the pre-sweep count of scope-mapped properties. */
2344 rt->liveObjectPropsPreSweep = rt->liveObjectProps;
2345 #endif
2348 * We finalize iterators before other objects so the iterator can use the
2349 * object which properties it enumerates over to finalize the enumeration
2350 * state. We finalize objects before other GC things to ensure that
2351 * object's finalizer can access them even if they will be freed.
2353 comp->sweep(cx, 0);
2355 comp->finalizeObjectArenaLists(cx);
2356 TIMESTAMP(sweepObjectEnd);
2358 comp->finalizeStringArenaLists(cx);
2359 TIMESTAMP(sweepStringEnd);
2361 #ifdef DEBUG
2362 /* Make sure that we didn't mark a Shape in another compartment. */
2363 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2364 JS_ASSERT_IF(*c != comp, (*c)->propertyTree.checkShapesAllUnmarked(cx));
2366 PropertyTree::dumpShapes(cx);
2367 #endif
2370 * Destroy arenas after we finished the sweeping so finalizers can safely
2371 * use js_IsAboutToBeFinalized().
2373 ExpireGCChunks(rt);
2374 TIMESTAMP(sweepDestroyEnd);
2376 comp->clearMark();
2378 if (rt->gcCallback)
2379 (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
2383 * Perform mark-and-sweep GC.
2385 * In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each
2386 * other thread must be either outside all requests or blocked waiting for GC
2387 * to finish. Note that the caller does not hold rt->gcLock.
2389 static void
2390 MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
2392 JSRuntime *rt = cx->runtime;
2393 rt->gcNumber++;
2396 * Mark phase.
2398 GCMarker gcmarker(cx);
2399 JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
2400 JS_ASSERT(gcmarker.getMarkColor() == BLACK);
2401 rt->gcMarkingTracer = &gcmarker;
2402 gcmarker.stackLimit = cx->stackLimit;
2404 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
2405 r.front()->clearMarkBitmap();
2407 MarkRuntime(&gcmarker);
2408 js_MarkScriptFilenames(rt);
2411 * Mark children of things that caused too deep recursion during the above
2412 * tracing.
2414 gcmarker.markDelayedChildren();
2416 rt->gcMarkingTracer = NULL;
2418 if (rt->gcCallback)
2419 (void) rt->gcCallback(cx, JSGC_MARK_END);
2421 #ifdef JS_THREADSAFE
2423 * cx->gcBackgroundFree is set if we need several mark-and-sweep loops to
2424 * finish the GC.
2426 if(!cx->gcBackgroundFree) {
2427 /* Wait until the sweeping from the previois GC finishes. */
2428 rt->gcHelperThread.waitBackgroundSweepEnd(rt);
2429 cx->gcBackgroundFree = &rt->gcHelperThread;
2431 #endif
2434 * Sweep phase.
2436 * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
2437 * so that any attempt to allocate a GC-thing from a finalizer will fail,
2438 * rather than nest badly and leave the unmarked newborn to be swept.
2440 * We first sweep atom state so we can use js_IsAboutToBeFinalized on
2441 * JSString held in a hashtable to check if the hashtable entry can be
2442 * freed. Note that even after the entry is freed, JSObject finalizers can
2443 * continue to access the corresponding JSString* assuming that they are
2444 * unique. This works since the atomization API must not be called during
2445 * the GC.
2447 TIMESTAMP(startSweep);
2448 js_SweepAtomState(cx);
2450 /* Finalize watch points associated with unreachable objects. */
2451 js_SweepWatchPoints(cx);
2453 #ifdef DEBUG
2454 /* Save the pre-sweep count of scope-mapped properties. */
2455 rt->liveObjectPropsPreSweep = rt->liveObjectProps;
2456 #endif
2458 SweepCrossCompartmentWrappers(cx);
2461 * We finalize iterators before other objects so the iterator can use the
2462 * object which properties it enumerates over to finalize the enumeration
2463 * state. We finalize objects before other GC things to ensure that
2464 * object's finalizer can access them even if they will be freed.
2466 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
2467 (*c)->finalizeObjectArenaLists(cx);
2469 TIMESTAMP(sweepObjectEnd);
2471 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
2472 (*c)->finalizeStringArenaLists(cx);
2474 TIMESTAMP(sweepStringEnd);
2477 * Sweep the runtime's property trees after finalizing objects, in case any
2478 * had watchpoints referencing tree nodes.
2480 * Do this before sweeping compartments, so that we sweep all shapes in
2481 * unreachable compartments.
2483 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2484 (*c)->propertyTree.sweepShapes(cx);
2486 PropertyTree::dumpShapes(cx);
2488 SweepCompartments(cx, gckind);
2491 * Sweep script filenames after sweeping functions in the generic loop
2492 * above. In this way when a scripted function's finalizer destroys the
2493 * script and calls rt->destroyScriptHook, the hook can still access the
2494 * script's filename. See bug 323267.
2496 js_SweepScriptFilenames(rt);
2499 * Destroy arenas after we finished the sweeping so finalizers can safely
2500 * use js_IsAboutToBeFinalized().
2502 ExpireGCChunks(rt);
2503 TIMESTAMP(sweepDestroyEnd);
2505 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2506 (*c)->clearMark();
2508 if (rt->gcCallback)
2509 (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
2510 #ifdef DEBUG_srcnotesize
2511 { extern void DumpSrcNoteSizeHist();
2512 DumpSrcNoteSizeHist();
2513 printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes);
2515 #endif
2517 #ifdef JS_SCOPE_DEPTH_METER
2518 DumpScopeDepthMeter(rt);
2519 #endif
2521 #ifdef JS_DUMP_LOOP_STATS
2522 DumpLoopStats(rt);
2523 #endif
2526 #ifdef JS_THREADSAFE
2529 * If the GC is running and we're called on another thread, wait for this GC
2530 * activation to finish. We can safely wait here without fear of deadlock (in
2531 * the case where we are called within a request on another thread's context)
2532 * because the GC doesn't set rt->gcRunning until after it has waited for all
2533 * active requests to end.
2535 * We call here js_CurrentThreadId() after checking for rt->gcState to avoid
2536 * an expensive call when the GC is not running.
2538 void
2539 js_WaitForGC(JSRuntime *rt)
2541 if (rt->gcRunning && rt->gcThread->id != js_CurrentThreadId()) {
2542 do {
2543 JS_AWAIT_GC_DONE(rt);
2544 } while (rt->gcRunning);
2549 * GC is running on another thread. Temporarily suspend all requests running
2550 * on the current thread and wait until the GC is done.
2552 static void
2553 LetOtherGCFinish(JSContext *cx)
2555 JSRuntime *rt = cx->runtime;
2556 JS_ASSERT(rt->gcThread);
2557 JS_ASSERT(cx->thread != rt->gcThread);
2559 size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
2560 JS_ASSERT(requestDebit <= rt->requestCount);
2561 #ifdef JS_TRACER
2562 JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
2563 #endif
2564 if (requestDebit != 0) {
2565 #ifdef JS_TRACER
2566 if (JS_ON_TRACE(cx)) {
2568 * Leave trace before we decrease rt->requestCount and notify the
2569 * GC. Otherwise the GC may start immediately after we unlock while
2570 * this thread is still on trace.
2572 AutoUnlockGC unlock(rt);
2573 LeaveTrace(cx);
2575 #endif
2576 rt->requestCount -= requestDebit;
2577 if (rt->requestCount == 0)
2578 JS_NOTIFY_REQUEST_DONE(rt);
2582 * Check that we did not release the GC lock above and let the GC to
2583 * finish before we wait.
2585 JS_ASSERT(rt->gcThread);
2588 * Wait for GC to finish on the other thread, even if requestDebit is 0
2589 * and even if GC has not started yet because the gcThread is waiting in
2590 * AutoGCSession. This ensures that js_GC never returns without a full GC
2591 * cycle happening.
2593 do {
2594 JS_AWAIT_GC_DONE(rt);
2595 } while (rt->gcThread);
2597 rt->requestCount += requestDebit;
2600 #endif
2602 class AutoGCSession {
2603 public:
2604 explicit AutoGCSession(JSContext *cx);
2605 ~AutoGCSession();
2607 private:
2608 JSContext *context;
2610 /* Disable copy constructor or assignments */
2611 AutoGCSession(const AutoGCSession&);
2612 void operator=(const AutoGCSession&);
2616 * Start a new GC session. Together with LetOtherGCFinish this function
2617 * contains the rendezvous algorithm by which we stop the world for GC.
2619 * This thread becomes the GC thread. Wait for all other threads to quiesce.
2620 * Then set rt->gcRunning and return.
2622 AutoGCSession::AutoGCSession(JSContext *cx)
2623 : context(cx)
2625 JSRuntime *rt = cx->runtime;
2627 #ifdef JS_THREADSAFE
2628 if (rt->gcThread && rt->gcThread != cx->thread)
2629 LetOtherGCFinish(cx);
2630 #endif
2632 JS_ASSERT(!rt->gcRunning);
2634 #ifdef JS_THREADSAFE
2635 /* No other thread is in GC, so indicate that we're now in GC. */
2636 JS_ASSERT(!rt->gcThread);
2637 rt->gcThread = cx->thread;
2640 * Notify operation callbacks on other threads, which will give them a
2641 * chance to yield their requests. Threads without requests perform their
2642 * callback at some later point, which then will be unnecessary, but
2643 * harmless.
2645 for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) {
2646 JSThread *thread = r.front().value;
2647 if (thread != cx->thread)
2648 thread->data.triggerOperationCallback(rt);
2652 * Discount the request on the current thread from contributing to
2653 * rt->requestCount before we wait for all other requests to finish.
2654 * JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
2655 * rt->requestCount transitions to 0.
2657 size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
2658 JS_ASSERT(requestDebit <= rt->requestCount);
2659 if (requestDebit != rt->requestCount) {
2660 rt->requestCount -= requestDebit;
2662 do {
2663 JS_AWAIT_REQUEST_DONE(rt);
2664 } while (rt->requestCount > 0);
2665 rt->requestCount += requestDebit;
2668 #endif /* JS_THREADSAFE */
2671 * Set rt->gcRunning here within the GC lock, and after waiting for any
2672 * active requests to end. This way js_WaitForGC called outside a request
2673 * would not block on the GC that is waiting for other requests to finish
2674 * with rt->gcThread set while JS_BeginRequest would do such wait.
2676 rt->gcRunning = true;
2679 /* End the current GC session and allow other threads to proceed. */
2680 AutoGCSession::~AutoGCSession()
2682 JSRuntime *rt = context->runtime;
2683 rt->gcRunning = false;
2684 #ifdef JS_THREADSAFE
2685 JS_ASSERT(rt->gcThread == context->thread);
2686 rt->gcThread = NULL;
2687 JS_NOTIFY_GC_DONE(rt);
2688 #endif
2692 * GC, repeatedly if necessary, until we think we have not created any new
2693 * garbage and no other threads are demanding more GC.
2695 static void
2696 GCUntilDone(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
2698 if (JS_ON_TRACE(cx))
2699 return;
2701 JSRuntime *rt = cx->runtime;
2703 /* Recursive GC or a call from another thread restarts the GC cycle. */
2704 if (rt->gcMarkAndSweep) {
2705 rt->gcPoke = true;
2706 #ifdef JS_THREADSAFE
2707 JS_ASSERT(rt->gcThread);
2708 if (rt->gcThread != cx->thread) {
2709 /* We do not return until another GC finishes. */
2710 LetOtherGCFinish(cx);
2712 #endif
2713 return;
2716 AutoGCSession gcsession(cx);
2718 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2719 JS_ASSERT(!(*c)->isMarked());
2722 * We should not be depending on cx->compartment in the GC, so set it to
2723 * NULL to look for violations.
2725 SwitchToCompartment(cx, (JSCompartment *)NULL);
2727 JS_ASSERT(!rt->gcCurrentCompartment);
2728 rt->gcCurrentCompartment = comp;
2730 METER(rt->gcStats.poke++);
2732 bool firstRun = true;
2733 rt->gcMarkAndSweep = true;
2734 #ifdef JS_THREADSAFE
2735 JS_ASSERT(!cx->gcBackgroundFree);
2736 #endif
2737 do {
2738 rt->gcPoke = false;
2740 AutoUnlockGC unlock(rt);
2741 if (firstRun) {
2742 PreGCCleanup(cx, gckind);
2743 TIMESTAMP(startMark);
2744 firstRun = false;
2747 if (comp)
2748 MarkAndSweepCompartment(cx, comp, gckind GCTIMER_ARG);
2749 else
2750 MarkAndSweep(cx, gckind GCTIMER_ARG);
2752 // GC again if:
2753 // - another thread, not in a request, called js_GC
2754 // - js_GC was called recursively
2755 // - a finalizer called js_RemoveRoot or js_UnlockGCThingRT.
2756 } while (rt->gcPoke);
2758 #ifdef JS_THREADSAFE
2759 JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
2760 cx->gcBackgroundFree = NULL;
2761 rt->gcHelperThread.startBackgroundSweep(rt);
2762 #endif
2764 rt->gcMarkAndSweep = false;
2765 rt->gcRegenShapes = false;
2766 rt->setGCLastBytes(rt->gcBytes);
2767 rt->gcCurrentCompartment = NULL;
2769 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2770 (*c)->setGCLastBytes((*c)->gcBytes);
2773 void
2774 js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
2776 JSRuntime *rt = cx->runtime;
2779 * Don't collect garbage if the runtime isn't up, and cx is not the last
2780 * context in the runtime. The last context must force a GC, and nothing
2781 * should suppress that final collection or there may be shutdown leaks,
2782 * or runtime bloat until the next context is created.
2784 if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
2785 return;
2787 RecordNativeStackTopForGC(cx);
2789 #ifdef DEBUG
2790 int stackDummy;
2791 # if JS_STACK_GROWTH_DIRECTION > 0
2792 /* cx->stackLimit is set to jsuword(-1) by default. */
2793 JS_ASSERT_IF(cx->stackLimit != jsuword(-1),
2794 JS_CHECK_STACK_SIZE(cx->stackLimit + (1 << 14), &stackDummy));
2795 # else
2796 /* -16k because it is possible to perform a GC during an overrecursion report. */
2797 JS_ASSERT_IF(cx->stackLimit, JS_CHECK_STACK_SIZE(cx->stackLimit - (1 << 14), &stackDummy));
2798 # endif
2799 #endif
2801 GCTIMER_BEGIN();
2803 do {
2805 * Let the API user decide to defer a GC if it wants to (unless this
2806 * is the last context). Invoke the callback regardless. Sample the
2807 * callback in case we are freely racing with a JS_SetGCCallback{,RT}
2808 * on another thread.
2810 if (JSGCCallback callback = rt->gcCallback) {
2811 if (!callback(cx, JSGC_BEGIN) && gckind != GC_LAST_CONTEXT)
2812 return;
2816 /* Lock out other GC allocator and collector invocations. */
2817 AutoLockGC lock(rt);
2819 GCUntilDone(cx, comp, gckind GCTIMER_ARG);
2822 /* We re-sample the callback again as the finalizers can change it. */
2823 if (JSGCCallback callback = rt->gcCallback)
2824 (void) callback(cx, JSGC_END);
2827 * On shutdown, iterate until the JSGC_END callback stops creating
2828 * garbage.
2830 } while (gckind == GC_LAST_CONTEXT && rt->gcPoke);
2831 #ifdef JS_GCMETER
2832 js_DumpGCStats(cx->runtime, stderr);
2833 #endif
2834 GCTIMER_END(gckind == GC_LAST_CONTEXT);
2837 namespace js {
2838 namespace gc {
2840 bool
2841 SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto)
2844 * This function cannot be called during the GC and always requires a
2845 * request.
2847 #ifdef JS_THREADSAFE
2848 JS_ASSERT(cx->thread->data.requestDepth);
2851 * This is only necessary if AutoGCSession below would wait for GC to
2852 * finish on another thread, but to capture the minimal stack space and
2853 * for code simplicity we do it here unconditionally.
2855 RecordNativeStackTopForGC(cx);
2856 #endif
2858 JSRuntime *rt = cx->runtime;
2859 AutoLockGC lock(rt);
2860 AutoGCSession gcsession(cx);
2861 AutoUnlockGC unlock(rt);
2863 bool cycle = false;
2864 for (JSObject *obj2 = proto; obj2;) {
2865 if (obj2 == obj) {
2866 cycle = true;
2867 break;
2869 obj2 = obj2->getProto();
2871 if (!cycle)
2872 obj->setProto(proto);
2874 return !cycle;
2877 JSCompartment *
2878 NewCompartment(JSContext *cx, JSPrincipals *principals)
2880 JSRuntime *rt = cx->runtime;
2881 JSCompartment *compartment = js_new<JSCompartment>(rt);
2882 if (!compartment || !compartment->init()) {
2883 js_delete(compartment);
2884 JS_ReportOutOfMemory(cx);
2885 return NULL;
2888 if (principals) {
2889 compartment->principals = principals;
2890 JSPRINCIPALS_HOLD(cx, principals);
2893 compartment->setGCLastBytes(8192);
2896 AutoLockGC lock(rt);
2898 if (!rt->compartments.append(compartment)) {
2899 AutoUnlockGC unlock(rt);
2900 JS_ReportOutOfMemory(cx);
2901 return NULL;
2905 JSCompartmentCallback callback = rt->compartmentCallback;
2906 if (callback && !callback(cx, compartment, JSCOMPARTMENT_NEW)) {
2907 AutoLockGC lock(rt);
2908 rt->compartments.popBack();
2909 return NULL;
2911 return compartment;
2914 } /* namespace gc */
2916 void
2917 TraceRuntime(JSTracer *trc)
2919 LeaveTrace(trc->context);
2921 #ifdef JS_THREADSAFE
2923 JSContext *cx = trc->context;
2924 JSRuntime *rt = cx->runtime;
2925 AutoLockGC lock(rt);
2927 if (rt->gcThread != cx->thread) {
2928 AutoGCSession gcsession(cx);
2929 AutoUnlockGC unlock(rt);
2930 RecordNativeStackTopForGC(trc->context);
2931 MarkRuntime(trc);
2932 return;
2935 #else
2936 RecordNativeStackTopForGC(trc->context);
2937 #endif
2940 * Calls from inside a normal GC or a recursive calls are OK and do not
2941 * require session setup.
2943 MarkRuntime(trc);
2946 } /* namespace js */