Bug 623915: about:memory reporter for string char data, r=lw
[mozilla-central.git] / js / src / jsgc.cpp
blob760d601624fdb27d83d3c808bfdb104dddbd5476
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS Mark-and-Sweep Garbage Collector.
44 * This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
45 * jsgc.h). It allocates from a special GC arena pool with each arena allocated
46 * using malloc. It uses an ideally parallel array of flag bytes to hold the
47 * mark bit, finalizer type index, etc.
49 * XXX swizzle page to freelist for better locality of reference
51 #include <stdlib.h> /* for free */
52 #include <math.h>
53 #include <string.h> /* for memset used when DEBUG */
54 #include "jstypes.h"
55 #include "jsstdint.h"
56 #include "jsutil.h"
57 #include "jshash.h"
58 #include "jsbit.h"
59 #include "jsclist.h"
60 #include "jsprf.h"
61 #include "jsapi.h"
62 #include "jsatom.h"
63 #include "jscntxt.h"
64 #include "jsversion.h"
65 #include "jsdbgapi.h"
66 #include "jsexn.h"
67 #include "jsfun.h"
68 #include "jsgc.h"
69 #include "jsgcchunk.h"
70 #include "jsinterp.h"
71 #include "jsiter.h"
72 #include "jslock.h"
73 #include "jsnum.h"
74 #include "jsobj.h"
75 #include "jsparse.h"
76 #include "jsproxy.h"
77 #include "jsscope.h"
78 #include "jsscript.h"
79 #include "jsstaticcheck.h"
80 #include "jsstr.h"
81 #include "jstracer.h"
82 #include "methodjit/MethodJIT.h"
84 #if JS_HAS_XML_SUPPORT
85 #include "jsxml.h"
86 #endif
88 #include "jsprobes.h"
89 #include "jscntxtinlines.h"
90 #include "jsinterpinlines.h"
91 #include "jsobjinlines.h"
92 #include "jshashtable.h"
94 #include "jsstrinlines.h"
95 #include "jscompartment.h"
97 #ifdef MOZ_VALGRIND
98 # define JS_VALGRIND
99 #endif
100 #ifdef JS_VALGRIND
101 # include <valgrind/memcheck.h>
102 #endif
104 using namespace js;
105 using namespace js::gc;
108 * Check that JSTRACE_XML follows JSTRACE_OBJECT and JSTRACE_STRING.
110 JS_STATIC_ASSERT(JSTRACE_OBJECT == 0);
111 JS_STATIC_ASSERT(JSTRACE_STRING == 1);
112 JS_STATIC_ASSERT(JSTRACE_XML == 2);
115 * JS_IS_VALID_TRACE_KIND assumes that JSTRACE_STRING is the last non-xml
116 * trace kind when JS_HAS_XML_SUPPORT is false.
118 JS_STATIC_ASSERT(JSTRACE_STRING + 1 == JSTRACE_XML);
121 * Everything we store in the heap must be a multiple of the cell size.
123 JS_STATIC_ASSERT(sizeof(JSString) % sizeof(FreeCell) == 0);
124 JS_STATIC_ASSERT(sizeof(JSShortString) % sizeof(FreeCell) == 0);
125 JS_STATIC_ASSERT(sizeof(JSObject) % sizeof(FreeCell) == 0);
126 JS_STATIC_ASSERT(sizeof(JSFunction) % sizeof(FreeCell) == 0);
127 #ifdef JSXML
128 JS_STATIC_ASSERT(sizeof(JSXML) % sizeof(FreeCell) == 0);
129 #endif
132 * All arenas must be exactly 4k.
134 JS_STATIC_ASSERT(sizeof(Arena<JSString>) == 4096);
135 JS_STATIC_ASSERT(sizeof(Arena<JSExternalString>) == 4096);
136 JS_STATIC_ASSERT(sizeof(Arena<JSShortString>) == 4096);
137 JS_STATIC_ASSERT(sizeof(Arena<JSObject>) == 4096);
138 JS_STATIC_ASSERT(sizeof(Arena<JSFunction>) == 4096);
139 JS_STATIC_ASSERT(sizeof(Arena<JSXML>) == 4096);
141 #ifdef JS_GCMETER
142 # define METER(x) ((void) (x))
143 # define METER_IF(condition, x) ((void) ((condition) && (x)))
144 #else
145 # define METER(x) ((void) 0)
146 # define METER_IF(condition, x) ((void) 0)
147 #endif
149 # define METER_UPDATE_MAX(maxLval, rval) \
150 METER_IF((maxLval) < (rval), (maxLval) = (rval))
152 namespace js{
153 namespace gc{
155 /* This array should be const, but that doesn't link right under GCC. */
156 FinalizeKind slotsToThingKind[] = {
157 /* 0 */ FINALIZE_OBJECT0, FINALIZE_OBJECT2, FINALIZE_OBJECT2, FINALIZE_OBJECT4,
158 /* 4 */ FINALIZE_OBJECT4, FINALIZE_OBJECT8, FINALIZE_OBJECT8, FINALIZE_OBJECT8,
159 /* 8 */ FINALIZE_OBJECT8, FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
160 /* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
161 /* 16 */ FINALIZE_OBJECT16
164 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind) == SLOTS_TO_THING_KIND_LIMIT);
166 /* Initialize the arena and setup the free list. */
167 template <typename T>
168 void
169 Arena<T>::init(JSCompartment *compartment, unsigned thingKind)
171 aheader.compartment = compartment;
172 aheader.thingKind = thingKind;
173 aheader.freeList = &t.things[0].cell;
174 aheader.thingSize = sizeof(T);
175 aheader.isUsed = true;
176 JS_ASSERT(sizeof(T) == sizeof(ThingOrCell<T>));
177 ThingOrCell<T> *thing = &t.things[0];
178 ThingOrCell<T> *last = &t.things[JS_ARRAY_LENGTH(t.things) - 1];
179 while (thing < last) {
180 thing->cell.link = &(thing + 1)->cell;
181 ++thing;
183 last->cell.link = NULL;
184 #ifdef DEBUG
185 aheader.hasFreeThings = true;
186 #endif
189 template <typename T>
190 bool
191 Arena<T>::inFreeList(void *thing) const
193 FreeCell *cursor = aheader.freeList;
194 while (cursor) {
195 JS_ASSERT(aheader.thingSize == sizeof(T));
196 JS_ASSERT(!cursor->isMarked());
198 /* If the cursor moves past the thing, it's not in the freelist. */
199 if (thing < cursor)
200 break;
202 /* If we find it on the freelist, it's dead. */
203 if (thing == cursor)
204 return true;
205 JS_ASSERT_IF(cursor->link, cursor < cursor->link);
206 cursor = cursor->link;
208 return false;
211 template<typename T>
212 inline ConservativeGCTest
213 Arena<T>::mark(T *thing, JSTracer *trc)
215 JS_ASSERT(sizeof(T) == aheader.thingSize);
217 T *alignedThing = getAlignedThing(thing);
219 if (alignedThing > &t.things[ThingsPerArena-1].t || alignedThing < &t.things[0].t)
220 return CGCT_NOTARENA;
222 if (!aheader.isUsed || inFreeList(alignedThing))
223 return CGCT_NOTLIVE;
225 JS_SET_TRACING_NAME(trc, "machine stack");
226 Mark(trc, alignedThing);
228 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
229 if (alignedThing != thing)
230 return CGCT_VALIDWITHOFFSET;
231 #endif
232 return CGCT_VALID;
235 #ifdef DEBUG
236 bool
237 checkArenaListsForThing(JSCompartment *comp, void *thing) {
238 if (comp->arenas[FINALIZE_OBJECT0].arenasContainThing<JSObject>(thing) ||
239 comp->arenas[FINALIZE_OBJECT2].arenasContainThing<JSObject_Slots2>(thing) ||
240 comp->arenas[FINALIZE_OBJECT4].arenasContainThing<JSObject_Slots4>(thing) ||
241 comp->arenas[FINALIZE_OBJECT8].arenasContainThing<JSObject_Slots8>(thing) ||
242 comp->arenas[FINALIZE_OBJECT12].arenasContainThing<JSObject_Slots12>(thing) ||
243 comp->arenas[FINALIZE_OBJECT16].arenasContainThing<JSObject_Slots16>(thing) ||
244 comp->arenas[FINALIZE_FUNCTION].arenasContainThing<JSFunction>(thing) ||
245 #if JS_HAS_XML_SUPPORT
246 comp->arenas[FINALIZE_XML].arenasContainThing<JSXML>(thing) ||
247 #endif
248 comp->arenas[FINALIZE_STRING].arenasContainThing<JSString>(thing) ||
249 comp->arenas[FINALIZE_EXTERNAL_STRING].arenasContainThing<JSExternalString>(thing) ||
250 comp->arenas[FINALIZE_SHORT_STRING].arenasContainThing<JSShortString>(thing)) {
251 return true;
254 return false;
256 #endif
258 } /* namespace gc */
259 } /* namespace js */
261 void
262 JSCompartment::finishArenaLists()
264 for (int i = 0; i < FINALIZE_LIMIT; i++)
265 arenas[i].releaseAll();
268 void
269 Chunk::clearMarkBitmap()
271 PodZero(&bitmaps[0], ArenasPerChunk);
274 void
275 Chunk::init(JSRuntime *rt)
277 info.runtime = rt;
278 info.age = 0;
279 info.emptyArenaLists.init();
280 info.emptyArenaLists.cellFreeList = &arenas[0];
281 Arena<FreeCell> *arena = &arenas[0];
282 Arena<FreeCell> *last = &arenas[JS_ARRAY_LENGTH(arenas) - 1];
283 while (arena < last) {
284 arena->header()->next = arena + 1;
285 arena->header()->isUsed = false;
286 ++arena;
288 last->header()->next = NULL;
289 last->header()->isUsed = false;
290 info.numFree = ArenasPerChunk;
293 bool
294 Chunk::unused()
296 return info.numFree == ArenasPerChunk;
299 bool
300 Chunk::hasAvailableArenas()
302 return info.numFree > 0;
305 bool
306 Chunk::withinArenasRange(Cell *cell)
308 uintptr_t addr = uintptr_t(cell);
309 if (addr >= uintptr_t(&arenas[0]) && addr < uintptr_t(&arenas[ArenasPerChunk]))
310 return true;
311 return false;
314 template <typename T>
315 Arena<T> *
316 Chunk::allocateArena(JSCompartment *comp, unsigned thingKind)
318 JS_ASSERT(hasAvailableArenas());
319 Arena<T> *arena = info.emptyArenaLists.getNext<T>(comp, thingKind);
320 JS_ASSERT(arena);
321 JS_ASSERT(arena->header()->isUsed);
322 --info.numFree;
324 JSRuntime *rt = info.runtime;
325 rt->gcBytes += sizeof(Arena<T>);
326 if (rt->gcBytes >= rt->gcTriggerBytes)
327 TriggerGC(rt);
328 METER(rt->gcStats.nallarenas++);
329 return arena;
332 template <typename T>
333 void
334 Chunk::releaseArena(Arena<T> *arena)
336 JSRuntime *rt = info.runtime;
337 METER(rt->gcStats.afree++);
338 JS_ASSERT(rt->gcStats.nallarenas != 0);
339 METER(rt->gcStats.nallarenas--);
340 JS_ASSERT(rt->gcBytes >= sizeof(Arena<T>));
342 rt->gcBytes -= sizeof(Arena<T>);
343 info.emptyArenaLists.insert((Arena<Cell> *)arena);
344 arena->header()->isUsed = false;
345 ++info.numFree;
346 if (unused())
347 info.age = 0;
350 bool
351 Chunk::expire()
353 if (!unused())
354 return false;
355 return info.age++ > MaxAge;
358 JSRuntime *
359 Chunk::getRuntime()
361 return info.runtime;
364 inline jsuword
365 GetGCChunk(JSRuntime *rt)
367 void *p = rt->gcChunkAllocator->alloc();
368 #ifdef MOZ_GCTIMER
369 if (p)
370 JS_ATOMIC_INCREMENT(&newChunkCount);
371 #endif
372 METER_IF(p, rt->gcStats.nchunks++);
373 METER_UPDATE_MAX(rt->gcStats.maxnchunks, rt->gcStats.nchunks);
374 return reinterpret_cast<jsuword>(p);
377 inline void
378 ReleaseGCChunk(JSRuntime *rt, jsuword chunk)
380 void *p = reinterpret_cast<void *>(chunk);
381 JS_ASSERT(p);
382 #ifdef MOZ_GCTIMER
383 JS_ATOMIC_INCREMENT(&destroyChunkCount);
384 #endif
385 JS_ASSERT(rt->gcStats.nchunks != 0);
386 METER(rt->gcStats.nchunks--);
387 rt->gcChunkAllocator->free(p);
390 inline Chunk *
391 AllocateGCChunk(JSRuntime *rt)
393 Chunk *p = (Chunk *)rt->gcChunkAllocator->alloc();
394 #ifdef MOZ_GCTIMER
395 if (p)
396 JS_ATOMIC_INCREMENT(&newChunkCount);
397 #endif
398 METER_IF(p, rt->gcStats.nchunks++);
399 return p;
402 inline void
403 ReleaseGCChunk(JSRuntime *rt, Chunk *p)
405 JS_ASSERT(p);
406 #ifdef MOZ_GCTIMER
407 JS_ATOMIC_INCREMENT(&destroyChunkCount);
408 #endif
409 JS_ASSERT(rt->gcStats.nchunks != 0);
410 METER(rt->gcStats.nchunks--);
411 rt->gcChunkAllocator->free(p);
414 static Chunk *
415 PickChunk(JSRuntime *rt)
417 Chunk *chunk;
418 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
419 if (r.front()->hasAvailableArenas())
420 return r.front();
423 chunk = AllocateGCChunk(rt);
424 if (!chunk)
425 return NULL;
428 * FIXME bug 583732 - chunk is newly allocated and cannot be present in
429 * the table so using ordinary lookupForAdd is suboptimal here.
431 GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk);
432 JS_ASSERT(!p);
433 if (!rt->gcChunkSet.add(p, chunk)) {
434 ReleaseGCChunk(rt, chunk);
435 return NULL;
438 chunk->init(rt);
440 return chunk;
443 static void
444 ExpireGCChunks(JSRuntime *rt)
446 /* Remove unused chunks. */
447 AutoLockGC lock(rt);
449 for (GCChunkSet::Enum e(rt->gcChunkSet); !e.empty(); e.popFront()) {
450 Chunk *chunk = e.front();
451 JS_ASSERT(chunk->info.runtime == rt);
452 if (chunk->expire()) {
453 e.removeFront();
454 ReleaseGCChunk(rt, chunk);
455 continue;
460 template <typename T>
461 static Arena<T> *
462 AllocateArena(JSContext *cx, unsigned thingKind)
464 JSRuntime *rt = cx->runtime;
465 AutoLockGC lock(rt);
466 Chunk *chunk = cx->compartment->chunk;
467 if (!chunk || !chunk->hasAvailableArenas()) {
468 chunk = PickChunk(rt);
469 if (!chunk) {
470 TriggerGC(rt);
471 return NULL;
473 cx->compartment->chunk = chunk;
475 return chunk->allocateArena<T>(cx->compartment, thingKind);
478 JS_FRIEND_API(bool)
479 IsAboutToBeFinalized(void *thing)
481 if (JSString::isStatic(thing))
482 return false;
484 return !reinterpret_cast<Cell *>(thing)->isMarked();
487 JS_FRIEND_API(bool)
488 js_GCThingIsMarked(void *thing, uint32 color = BLACK)
490 JS_ASSERT(thing);
491 AssertValidColor(thing, color);
492 return reinterpret_cast<Cell *>(thing)->isMarked(color);
496 * 1/8 life for JIT code. After this number of microseconds have passed, 1/8 of all
497 * JIT code is discarded in inactive compartments, regardless of how often that
498 * code runs.
500 static const int64 JIT_SCRIPT_EIGHTH_LIFETIME = 120 * 1000 * 1000;
502 JSBool
503 js_InitGC(JSRuntime *rt, uint32 maxbytes)
506 * Make room for at least 16 chunks so the table would not grow before
507 * the browser starts up.
509 if (!rt->gcChunkSet.init(16))
510 return false;
512 if (!rt->gcRootsHash.init(256))
513 return false;
515 if (!rt->gcLocksHash.init(256))
516 return false;
518 #ifdef JS_THREADSAFE
519 rt->gcLock = JS_NEW_LOCK();
520 if (!rt->gcLock)
521 return false;
522 rt->gcDone = JS_NEW_CONDVAR(rt->gcLock);
523 if (!rt->gcDone)
524 return false;
525 rt->requestDone = JS_NEW_CONDVAR(rt->gcLock);
526 if (!rt->requestDone)
527 return false;
528 if (!rt->gcHelperThread.init(rt))
529 return false;
530 #endif
533 * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
534 * for default backward API compatibility.
536 rt->gcMaxBytes = maxbytes;
537 rt->setGCMaxMallocBytes(maxbytes);
539 rt->gcEmptyArenaPoolLifespan = 30000;
541 rt->gcTriggerFactor = uint32(100.0f * GC_HEAP_GROWTH_FACTOR);
544 * The assigned value prevents GC from running when GC memory is too low
545 * (during JS engine start).
547 rt->setGCLastBytes(8192);
549 rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_EIGHTH_LIFETIME;
551 METER(PodZero(&rt->gcStats));
552 return true;
555 namespace js {
557 template <typename T>
558 static inline ConservativeGCTest
559 MarkCell(Cell *cell, JSTracer *trc)
561 return GetArena<T>(cell)->mark((T *)cell, trc);
565 * Returns CGCT_VALID or CGCT_VALIDWITHOFFSET and mark it if the w can be a
566 * live GC thing and sets thingKind accordingly. Otherwise returns the
567 * reason for rejection.
569 inline ConservativeGCTest
570 MarkIfGCThingWord(JSTracer *trc, jsuword w, uint32 &thingKind)
572 JSRuntime *rt = trc->context->runtime;
574 * The conservative scanner may access words that valgrind considers as
575 * undefined. To avoid false positives and not to alter valgrind view of
576 * the memory we make as memcheck-defined the argument, a copy of the
577 * original word. See bug 572678.
579 #ifdef JS_VALGRIND
580 VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
581 #endif
584 * We assume that the compiler never uses sub-word alignment to store
585 * pointers and does not tag pointers on its own. Additionally, the value
586 * representation for all values and the jsid representation for GC-things
587 * do not touch the low two bits. Thus any word with the low two bits set
588 * is not a valid GC-thing.
590 JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
591 if (w & 0x3)
592 return CGCT_LOWBITSET;
595 * An object jsid has its low bits tagged. In the value representation on
596 * 64-bit, the high bits are tagged.
598 const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
599 #if JS_BITS_PER_WORD == 32
600 jsuword payload = w & JSID_PAYLOAD_MASK;
601 #elif JS_BITS_PER_WORD == 64
602 jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
603 #endif
605 Cell *cell = reinterpret_cast<Cell *>(payload);
606 Chunk *chunk = cell->chunk();
608 if (!rt->gcChunkSet.has(chunk))
609 return CGCT_NOTCHUNK;
611 if (!chunk->withinArenasRange(cell))
612 return CGCT_NOTARENA;
614 ArenaHeader *aheader = cell->arena()->header();
616 if (!aheader->isUsed)
617 return CGCT_FREEARENA;
619 ConservativeGCTest test;
620 thingKind = aheader->thingKind;
622 switch (thingKind) {
623 case FINALIZE_OBJECT0:
624 test = MarkCell<JSObject>(cell, trc);
625 break;
626 case FINALIZE_OBJECT2:
627 test = MarkCell<JSObject_Slots2>(cell, trc);
628 break;
629 case FINALIZE_OBJECT4:
630 test = MarkCell<JSObject_Slots4>(cell, trc);
631 break;
632 case FINALIZE_OBJECT8:
633 test = MarkCell<JSObject_Slots8>(cell, trc);
634 break;
635 case FINALIZE_OBJECT12:
636 test = MarkCell<JSObject_Slots12>(cell, trc);
637 break;
638 case FINALIZE_OBJECT16:
639 test = MarkCell<JSObject_Slots16>(cell, trc);
640 break;
641 case FINALIZE_STRING:
642 test = MarkCell<JSString>(cell, trc);
643 break;
644 case FINALIZE_EXTERNAL_STRING:
645 test = MarkCell<JSExternalString>(cell, trc);
646 break;
647 case FINALIZE_SHORT_STRING:
648 test = MarkCell<JSShortString>(cell, trc);
649 break;
650 case FINALIZE_FUNCTION:
651 test = MarkCell<JSFunction>(cell, trc);
652 break;
653 #if JS_HAS_XML_SUPPORT
654 case FINALIZE_XML:
655 test = MarkCell<JSXML>(cell, trc);
656 break;
657 #endif
658 default:
659 test = CGCT_WRONGTAG;
660 JS_NOT_REACHED("wrong tag");
663 return test;
666 inline ConservativeGCTest
667 MarkIfGCThingWord(JSTracer *trc, jsuword w)
669 uint32 thingKind;
670 return MarkIfGCThingWord(trc, w, thingKind);
673 static void
674 MarkWordConservatively(JSTracer *trc, jsuword w)
677 * The conservative scanner may access words that valgrind considers as
678 * undefined. To avoid false positives and not to alter valgrind view of
679 * the memory we make as memcheck-defined the argument, a copy of the
680 * original word. See bug 572678.
682 #ifdef JS_VALGRIND
683 VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
684 #endif
686 uint32 thingKind;
687 #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
688 ConservativeGCTest test =
689 #endif
690 MarkIfGCThingWord(trc, w, thingKind);
692 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
693 if (test == CGCT_VALID || test == CGCT_VALIDWITHOFFSET) {
694 if (IS_GC_MARKING_TRACER(trc) && static_cast<GCMarker *>(trc)->conservativeDumpFileName) {
695 const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
696 #if JS_BITS_PER_WORD == 32
697 jsuword payload = w & JSID_PAYLOAD_MASK;
698 #elif JS_BITS_PER_WORD == 64
699 jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
700 #endif
701 void *thing = (test == CGCT_VALIDWITHOFFSET)
702 ? GetAlignedThing((void *)payload, thingKind)
703 : (void *)payload;
704 GCMarker::ConservativeRoot root = {thing, thingKind};
705 static_cast<GCMarker *>(trc)->conservativeRoots.append(root);
708 #endif
710 #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
711 if (IS_GC_MARKING_TRACER(trc))
712 static_cast<GCMarker *>(trc)->conservativeStats.counter[test]++;
713 #endif
716 static void
717 MarkRangeConservatively(JSTracer *trc, const jsuword *begin, const jsuword *end)
719 JS_ASSERT(begin <= end);
720 for (const jsuword *i = begin; i != end; ++i)
721 MarkWordConservatively(trc, *i);
724 static void
725 MarkThreadDataConservatively(JSTracer *trc, JSThreadData *td)
727 ConservativeGCThreadData *ctd = &td->conservativeGC;
728 JS_ASSERT(ctd->hasStackToScan());
729 jsuword *stackMin, *stackEnd;
730 #if JS_STACK_GROWTH_DIRECTION > 0
731 stackMin = td->nativeStackBase;
732 stackEnd = ctd->nativeStackTop;
733 #else
734 stackMin = ctd->nativeStackTop + 1;
735 stackEnd = td->nativeStackBase;
736 #endif
737 JS_ASSERT(stackMin <= stackEnd);
738 MarkRangeConservatively(trc, stackMin, stackEnd);
739 MarkRangeConservatively(trc, ctd->registerSnapshot.words,
740 JS_ARRAY_END(ctd->registerSnapshot.words));
744 void
745 MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
747 const jsuword *begin = beginv->payloadWord();
748 const jsuword *end = endv->payloadWord();;
749 #ifdef JS_NUNBOX32
751 * With 64-bit jsvals on 32-bit systems, we can optimize a bit by
752 * scanning only the payloads.
754 JS_ASSERT(begin <= end);
755 for (const jsuword *i = begin; i != end; i += sizeof(Value)/sizeof(jsuword))
756 MarkWordConservatively(trc, *i);
757 #else
758 MarkRangeConservatively(trc, begin, end);
759 #endif
762 void
763 MarkConservativeStackRoots(JSTracer *trc)
765 #ifdef JS_THREADSAFE
766 for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
767 JSThread *thread = r.front().value;
768 ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
769 if (ctd->hasStackToScan()) {
770 JS_ASSERT_IF(!thread->data.requestDepth, thread->suspendCount);
771 MarkThreadDataConservatively(trc, &thread->data);
772 } else {
773 JS_ASSERT(!thread->suspendCount);
774 JS_ASSERT(thread->data.requestDepth <= ctd->requestThreshold);
777 #else
778 MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
779 #endif
782 JS_NEVER_INLINE void
783 ConservativeGCThreadData::recordStackTop()
785 /* Update the native stack pointer if it points to a bigger stack. */
786 jsuword dummy;
787 nativeStackTop = &dummy;
790 * To record and update the register snapshot for the conservative
791 * scanning with the latest values we use setjmp.
793 #if defined(_MSC_VER)
794 # pragma warning(push)
795 # pragma warning(disable: 4611)
796 #endif
797 (void) setjmp(registerSnapshot.jmpbuf);
798 #if defined(_MSC_VER)
799 # pragma warning(pop)
800 #endif
803 static inline void
804 RecordNativeStackTopForGC(JSContext *cx)
806 ConservativeGCThreadData *ctd = &JS_THREAD_DATA(cx)->conservativeGC;
808 #ifdef JS_THREADSAFE
809 /* Record the stack top here only if we are called from a request. */
810 JS_ASSERT(cx->thread->data.requestDepth >= ctd->requestThreshold);
811 if (cx->thread->data.requestDepth == ctd->requestThreshold)
812 return;
813 #endif
814 ctd->recordStackTop();
817 } /* namespace js */
819 #ifdef DEBUG
820 static void
821 CheckLeakedRoots(JSRuntime *rt);
822 #endif
824 void
825 js_FinishGC(JSRuntime *rt)
827 #ifdef JS_ARENAMETER
828 JS_DumpArenaStats(stdout);
829 #endif
830 #ifdef JS_GCMETER
831 if (JS_WANT_GC_METER_PRINT)
832 js_DumpGCStats(rt, stdout);
833 #endif
835 /* Delete all remaining Compartments. Ideally only the defaultCompartment should be left. */
836 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
837 JSCompartment *comp = *c;
838 comp->finishArenaLists();
839 delete comp;
841 rt->compartments.clear();
842 rt->defaultCompartment = NULL;
844 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
845 ReleaseGCChunk(rt, r.front());
846 rt->gcChunkSet.clear();
848 #ifdef JS_THREADSAFE
849 rt->gcHelperThread.finish(rt);
850 #endif
852 #ifdef DEBUG
853 if (!rt->gcRootsHash.empty())
854 CheckLeakedRoots(rt);
855 #endif
856 rt->gcRootsHash.clear();
857 rt->gcLocksHash.clear();
860 JSBool
861 js_AddRoot(JSContext *cx, Value *vp, const char *name)
863 JSBool ok = js_AddRootRT(cx->runtime, Jsvalify(vp), name);
864 if (!ok)
865 JS_ReportOutOfMemory(cx);
866 return ok;
869 JSBool
870 js_AddGCThingRoot(JSContext *cx, void **rp, const char *name)
872 JSBool ok = js_AddGCThingRootRT(cx->runtime, rp, name);
873 if (!ok)
874 JS_ReportOutOfMemory(cx);
875 return ok;
878 JS_FRIEND_API(JSBool)
879 js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
882 * Due to the long-standing, but now removed, use of rt->gcLock across the
883 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
884 * properly with a racing GC, without calling JS_AddRoot from a request.
885 * We have to preserve API compatibility here, now that we avoid holding
886 * rt->gcLock across the mark phase (including the root hashtable mark).
888 AutoLockGC lock(rt);
889 js_WaitForGC(rt);
891 return !!rt->gcRootsHash.put((void *)vp,
892 RootInfo(name, JS_GC_ROOT_VALUE_PTR));
895 JS_FRIEND_API(JSBool)
896 js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
899 * Due to the long-standing, but now removed, use of rt->gcLock across the
900 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
901 * properly with a racing GC, without calling JS_AddRoot from a request.
902 * We have to preserve API compatibility here, now that we avoid holding
903 * rt->gcLock across the mark phase (including the root hashtable mark).
905 AutoLockGC lock(rt);
906 js_WaitForGC(rt);
908 return !!rt->gcRootsHash.put((void *)rp,
909 RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
912 JS_FRIEND_API(JSBool)
913 js_RemoveRoot(JSRuntime *rt, void *rp)
916 * Due to the JS_RemoveRootRT API, we may be called outside of a request.
917 * Same synchronization drill as above in js_AddRoot.
919 AutoLockGC lock(rt);
920 js_WaitForGC(rt);
921 rt->gcRootsHash.remove(rp);
922 rt->gcPoke = JS_TRUE;
923 return JS_TRUE;
926 typedef RootedValueMap::Range RootRange;
927 typedef RootedValueMap::Entry RootEntry;
928 typedef RootedValueMap::Enum RootEnum;
930 #ifdef DEBUG
932 static void
933 CheckLeakedRoots(JSRuntime *rt)
935 uint32 leakedroots = 0;
937 /* Warn (but don't assert) debug builds of any remaining roots. */
938 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
939 RootEntry &entry = r.front();
940 leakedroots++;
941 fprintf(stderr,
942 "JS engine warning: leaking GC root \'%s\' at %p\n",
943 entry.value.name ? entry.value.name : "", entry.key);
946 if (leakedroots > 0) {
947 if (leakedroots == 1) {
948 fprintf(stderr,
949 "JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
950 " This root may point to freed memory. Objects reachable\n"
951 " through it have not been finalized.\n",
952 (void *) rt);
953 } else {
954 fprintf(stderr,
955 "JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
956 " These roots may point to freed memory. Objects reachable\n"
957 " through them have not been finalized.\n",
958 (unsigned long) leakedroots, (void *) rt);
963 void
964 js_DumpNamedRoots(JSRuntime *rt,
965 void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
966 void *data)
968 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
969 RootEntry &entry = r.front();
970 if (const char *name = entry.value.name)
971 dump(name, entry.key, entry.value.type, data);
975 #endif /* DEBUG */
977 uint32
978 js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
980 AutoLockGC lock(rt);
981 int ct = 0;
982 for (RootEnum e(rt->gcRootsHash); !e.empty(); e.popFront()) {
983 RootEntry &entry = e.front();
985 ct++;
986 intN mapflags = map(entry.key, entry.value.type, entry.value.name, data);
988 if (mapflags & JS_MAP_GCROOT_REMOVE)
989 e.removeFront();
990 if (mapflags & JS_MAP_GCROOT_STOP)
991 break;
994 return ct;
997 void
998 JSRuntime::setGCTriggerFactor(uint32 factor)
1000 JS_ASSERT(factor >= 100);
1002 gcTriggerFactor = factor;
1003 setGCLastBytes(gcLastBytes);
1006 void
1007 JSRuntime::setGCLastBytes(size_t lastBytes)
1009 gcLastBytes = lastBytes;
1011 /* FIXME bug 603916 - we should unify the triggers here. */
1012 float trigger1 = float(lastBytes) * float(gcTriggerFactor) / 100.0f;
1013 float trigger2 = float(Max(lastBytes, GC_ARENA_ALLOCATION_TRIGGER)) *
1014 GC_HEAP_GROWTH_FACTOR;
1015 float maxtriger = Max(trigger1, trigger2);
1016 gcTriggerBytes = (float(gcMaxBytes) < maxtriger) ? gcMaxBytes : size_t(maxtriger);
1019 void
1020 FreeLists::purge()
1023 * Return the free list back to the arena so the GC finalization will not
1024 * run the finalizers over unitialized bytes from free things.
1026 for (FreeCell ***p = finalizables; p != JS_ARRAY_END(finalizables); ++p)
1027 *p = NULL;
1030 class JSShortString;
1032 ArenaList *
1033 GetFinalizableArenaList(JSCompartment *c, unsigned thingKind) {
1034 JS_ASSERT(thingKind < FINALIZE_LIMIT);
1035 return &c->arenas[thingKind];
1038 #ifdef DEBUG
1039 bool
1040 CheckAllocation(JSContext *cx)
1042 #ifdef JS_THREADSAFE
1043 JS_ASSERT(cx->thread);
1044 #endif
1045 JS_ASSERT(!cx->runtime->gcRunning);
1046 return true;
1048 #endif
1050 inline bool
1051 NeedLastDitchGC(JSContext *cx)
1053 JSRuntime *rt = cx->runtime;
1054 #ifdef JS_GC_ZEAL
1055 if (rt->gcZeal >= 1)
1056 return true;
1057 #endif
1058 return !!rt->gcIsNeeded;
1062 * Return false only if the GC run but could not bring its memory usage under
1063 * JSRuntime::gcMaxBytes.
1065 static bool
1066 RunLastDitchGC(JSContext *cx)
1068 JSRuntime *rt = cx->runtime;
1069 METER(rt->gcStats.lastditch++);
1070 #ifdef JS_THREADSAFE
1071 Conditionally<AutoUnlockDefaultCompartment>
1072 unlockDefaultCompartmenIf(cx->compartment == rt->defaultCompartment &&
1073 rt->defaultCompartmentIsLocked, cx);
1074 #endif
1075 /* The last ditch GC preserves all atoms. */
1076 AutoKeepAtoms keep(rt);
1077 js_GC(cx, GC_NORMAL);
1079 return rt->gcBytes < rt->gcMaxBytes;
1082 template <typename T>
1083 inline bool
1084 RefillTypedFreeList(JSContext *cx, unsigned thingKind)
1086 JSCompartment *compartment = cx->compartment;
1087 JS_ASSERT_IF(compartment->freeLists.finalizables[thingKind],
1088 !*compartment->freeLists.finalizables[thingKind]);
1090 JS_ASSERT(!cx->runtime->gcRunning);
1091 if (cx->runtime->gcRunning)
1092 return false;
1094 bool canGC = !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota;
1095 do {
1096 if (canGC && JS_UNLIKELY(NeedLastDitchGC(cx))) {
1097 if (!RunLastDitchGC(cx))
1098 break;
1101 * The JSGC_END callback can legitimately allocate new GC
1102 * things and populate the free list. If that happens, just
1103 * return that list head.
1105 if (compartment->freeLists.finalizables[thingKind])
1106 return true;
1107 canGC = false;
1110 ArenaList *arenaList = GetFinalizableArenaList(compartment, thingKind);
1111 Arena<T> *a = reinterpret_cast<Arena<T> *>(arenaList->getNextWithFreeList());
1112 if (a) {
1113 JS_ASSERT(a->header()->freeList);
1114 JS_ASSERT(sizeof(T) == a->header()->thingSize);
1115 compartment->freeLists.populate(a, thingKind);
1116 return true;
1120 * If the allocation fails rt->gcIsNeeded will be set and we will run
1121 * the GC on the next loop iteration if the last ditch GC is allowed.
1123 a = AllocateArena<T>(cx, thingKind);
1124 if (a) {
1125 compartment->freeLists.populate(a, thingKind);
1126 arenaList->insert((Arena<FreeCell> *) a);
1127 a->getMarkingDelay()->init();
1128 return true;
1130 } while (canGC);
1132 METER(cx->runtime->gcStats.fail++);
1133 js_ReportOutOfMemory(cx);
1134 return false;
1137 bool
1138 RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
1140 switch (thingKind) {
1141 case FINALIZE_OBJECT0:
1142 return RefillTypedFreeList<JSObject>(cx, thingKind);
1143 case FINALIZE_OBJECT2:
1144 return RefillTypedFreeList<JSObject_Slots2>(cx, thingKind);
1145 case FINALIZE_OBJECT4:
1146 return RefillTypedFreeList<JSObject_Slots4>(cx, thingKind);
1147 case FINALIZE_OBJECT8:
1148 return RefillTypedFreeList<JSObject_Slots8>(cx, thingKind);
1149 case FINALIZE_OBJECT12:
1150 return RefillTypedFreeList<JSObject_Slots12>(cx, thingKind);
1151 case FINALIZE_OBJECT16:
1152 return RefillTypedFreeList<JSObject_Slots16>(cx, thingKind);
1153 case FINALIZE_STRING:
1154 return RefillTypedFreeList<JSString>(cx, thingKind);
1155 case FINALIZE_EXTERNAL_STRING:
1156 return RefillTypedFreeList<JSExternalString>(cx, thingKind);
1157 case FINALIZE_SHORT_STRING:
1158 return RefillTypedFreeList<JSShortString>(cx, thingKind);
1159 case FINALIZE_FUNCTION:
1160 return RefillTypedFreeList<JSFunction>(cx, thingKind);
1161 #if JS_HAS_XML_SUPPORT
1162 case FINALIZE_XML:
1163 return RefillTypedFreeList<JSXML>(cx, thingKind);
1164 #endif
1165 default:
1166 JS_NOT_REACHED("bad finalize kind");
1167 return false;
1171 intN
1172 js_GetExternalStringGCType(JSString *str) {
1173 return GetExternalStringGCType((JSExternalString *)str);
1176 uint32
1177 js_GetGCThingTraceKind(void *thing) {
1178 return GetGCThingTraceKind(thing);
1181 JSBool
1182 js_LockGCThingRT(JSRuntime *rt, void *thing)
1184 GCLocks *locks;
1186 if (!thing)
1187 return true;
1188 locks = &rt->gcLocksHash;
1189 AutoLockGC lock(rt);
1190 GCLocks::AddPtr p = locks->lookupForAdd(thing);
1192 if (!p) {
1193 if (!locks->add(p, thing, 1))
1194 return false;
1195 } else {
1196 JS_ASSERT(p->value >= 1);
1197 p->value++;
1200 METER(rt->gcStats.lock++);
1201 return true;
1204 void
1205 js_UnlockGCThingRT(JSRuntime *rt, void *thing)
1207 if (!thing)
1208 return;
1210 AutoLockGC lock(rt);
1211 GCLocks::Ptr p = rt->gcLocksHash.lookup(thing);
1213 if (p) {
1214 rt->gcPoke = true;
1215 if (--p->value == 0)
1216 rt->gcLocksHash.remove(p);
1218 METER(rt->gcStats.unlock++);
1222 JS_PUBLIC_API(void)
1223 JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind)
1225 switch (kind) {
1226 case JSTRACE_OBJECT: {
1227 MarkChildren(trc, (JSObject *)thing);
1228 break;
1231 case JSTRACE_STRING: {
1232 MarkChildren(trc, (JSString *)thing);
1233 break;
1236 #if JS_HAS_XML_SUPPORT
1237 case JSTRACE_XML:
1238 MarkChildren(trc, (JSXML *)thing);
1239 break;
1240 #endif
1244 namespace js {
1247 * When the native stack is low, the GC does not call JS_TraceChildren to mark
1248 * the reachable "children" of the thing. Rather the thing is put aside and
1249 * JS_TraceChildren is called later with more space on the C stack.
1251 * To implement such delayed marking of the children with minimal overhead for
1252 * the normal case of sufficient native stack, the code adds a field per
1253 * arena. The field marlingdelay->link links all arenas with delayed things
1254 * into a stack list with the pointer to stack top in
1255 * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
1256 * arenas to the stack as necessary while markDelayedChildren pops the arenas
1257 * from the stack until it empties.
1260 GCMarker::GCMarker(JSContext *cx)
1261 : color(0), stackLimit(0), unmarkedArenaStackTop(NULL)
1263 JS_TRACER_INIT(this, cx, NULL);
1264 #ifdef DEBUG
1265 markLaterCount = 0;
1266 #endif
1267 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1268 conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
1269 memset(&conservativeStats, 0, sizeof(conservativeStats));
1270 #endif
1273 GCMarker::~GCMarker()
1275 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1276 dumpConservativeRoots();
1277 #endif
1278 #ifdef JS_GCMETER
1279 /* Update total stats. */
1280 context->runtime->gcStats.conservative.add(conservativeStats);
1281 #endif
1284 void
1285 GCMarker::delayMarkingChildren(void *thing)
1287 Cell *cell = reinterpret_cast<Cell *>(thing);
1288 Arena<Cell> *a = cell->arena();
1289 JS_ASSERT(cell->isMarked());
1290 METER(cell->compartment()->rt->gcStats.unmarked++);
1291 MarkingDelay *markingDelay = a->getMarkingDelay();
1293 if (markingDelay->link) {
1294 if (markingDelay->start > (jsuword)cell)
1295 markingDelay->start = (jsuword)cell;
1296 /* Arena already scheduled to be marked again */
1297 return;
1299 markingDelay->start = (jsuword)cell;
1300 Arena<Cell> *tos = unmarkedArenaStackTop;
1301 markingDelay->link = tos ? tos : a;
1302 unmarkedArenaStackTop = a;
1303 #ifdef DEBUG
1304 JSCompartment *comp = cell->compartment();
1305 markLaterCount += Arena<FreeCell>::ThingsPerArena;
1306 METER_UPDATE_MAX(comp->rt->gcStats.maxunmarked, markLaterCount);
1307 #endif
1310 template<typename T>
1311 void
1312 Arena<T>::markDelayedChildren(JSTracer *trc)
1314 T* thing = (T *)getMarkingDelay()->start;
1315 T *thingsEnd = &t.things[ThingsPerArena-1].t;
1316 JS_ASSERT(thing == getAlignedThing(thing));
1317 while (thing <= thingsEnd) {
1318 if (thing->asCell()->isMarked())
1319 MarkChildren(trc, thing);
1321 thing++;
1325 void
1326 GCMarker::markDelayedChildren()
1328 while (Arena<Cell> *a = unmarkedArenaStackTop) {
1330 * markingDelay->link == current arena indicates last arena on stack.
1331 * If marking gets delayed at the same arena again, the arena is pushed
1332 * again in delayMarkingChildren. markingDelay->link has to be cleared,
1333 * otherwise the arena is not pushed again.
1335 MarkingDelay *markingDelay = a->getMarkingDelay();
1336 unmarkedArenaStackTop = (markingDelay->link != a)
1337 ? markingDelay->link
1338 : NULL;
1339 markingDelay->link = NULL;
1340 #ifdef DEBUG
1341 markLaterCount -= Arena<FreeCell>::ThingsPerArena;
1342 #endif
1344 switch (a->header()->thingKind) {
1345 case FINALIZE_OBJECT0:
1346 reinterpret_cast<Arena<JSObject> *>(a)->markDelayedChildren(this);
1347 break;
1348 case FINALIZE_OBJECT2:
1349 reinterpret_cast<Arena<JSObject_Slots2> *>(a)->markDelayedChildren(this);
1350 break;
1351 case FINALIZE_OBJECT4:
1352 reinterpret_cast<Arena<JSObject_Slots4> *>(a)->markDelayedChildren(this);
1353 break;
1354 case FINALIZE_OBJECT8:
1355 reinterpret_cast<Arena<JSObject_Slots8> *>(a)->markDelayedChildren(this);
1356 break;
1357 case FINALIZE_OBJECT12:
1358 reinterpret_cast<Arena<JSObject_Slots12> *>(a)->markDelayedChildren(this);
1359 break;
1360 case FINALIZE_OBJECT16:
1361 reinterpret_cast<Arena<JSObject_Slots16> *>(a)->markDelayedChildren(this);
1362 break;
1363 case FINALIZE_STRING:
1364 reinterpret_cast<Arena<JSString> *>(a)->markDelayedChildren(this);
1365 break;
1366 case FINALIZE_EXTERNAL_STRING:
1367 reinterpret_cast<Arena<JSExternalString> *>(a)->markDelayedChildren(this);
1368 break;
1369 case FINALIZE_SHORT_STRING:
1370 JS_ASSERT(false);
1371 break;
1372 case FINALIZE_FUNCTION:
1373 reinterpret_cast<Arena<JSFunction> *>(a)->markDelayedChildren(this);
1374 break;
1375 #if JS_HAS_XML_SUPPORT
1376 case FINALIZE_XML:
1377 reinterpret_cast<Arena<JSXML> *>(a)->markDelayedChildren(this);
1378 break;
1379 #endif
1380 default:
1381 JS_NOT_REACHED("wrong thingkind");
1384 JS_ASSERT(markLaterCount == 0);
1385 JS_ASSERT(!unmarkedArenaStackTop);
1388 } /* namespace js */
1390 static void
1391 gc_root_traversal(JSTracer *trc, const RootEntry &entry)
1393 #ifdef DEBUG
1394 void *ptr;
1395 if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
1396 ptr = *reinterpret_cast<void **>(entry.key);
1397 } else {
1398 Value *vp = reinterpret_cast<Value *>(entry.key);
1399 ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
1402 if (ptr) {
1403 if (!JSString::isStatic(ptr)) {
1404 bool root_points_to_gcArenaList = false;
1405 JSCompartment **c = trc->context->runtime->compartments.begin();
1406 for (; c != trc->context->runtime->compartments.end(); ++c) {
1407 JSCompartment *comp = *c;
1408 if (checkArenaListsForThing(comp, ptr)) {
1409 root_points_to_gcArenaList = true;
1410 break;
1413 if (!root_points_to_gcArenaList && entry.value.name) {
1414 fprintf(stderr,
1415 "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
1416 "invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
1417 "The root's name is \"%s\".\n",
1418 entry.value.name);
1420 JS_ASSERT(root_points_to_gcArenaList);
1423 #endif
1424 JS_SET_TRACING_NAME(trc, entry.value.name ? entry.value.name : "root");
1425 if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
1426 MarkGCThing(trc, *reinterpret_cast<void **>(entry.key));
1427 else
1428 MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key));
1431 static void
1432 gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
1434 JS_ASSERT(entry.value >= 1);
1435 MarkGCThing(trc, entry.key, "locked object");
1438 void
1439 js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp)
1441 MarkObject(trc, fp->scopeChain(), "scope chain");
1442 if (fp->isDummyFrame())
1443 return;
1445 if (fp->hasCallObj())
1446 MarkObject(trc, fp->callObj(), "call");
1447 if (fp->hasArgsObj())
1448 MarkObject(trc, fp->argsObj(), "arguments");
1449 if (fp->isScriptFrame()) {
1450 js_TraceScript(trc, fp->script());
1451 fp->script()->compartment->active = true;
1454 MarkValue(trc, fp->returnValue(), "rval");
1457 void
1458 AutoIdArray::trace(JSTracer *trc)
1460 JS_ASSERT(tag == IDARRAY);
1461 gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
1464 void
1465 AutoEnumStateRooter::trace(JSTracer *trc)
1467 js::gc::MarkObject(trc, *obj, "js::AutoEnumStateRooter.obj");
1470 inline void
1471 AutoGCRooter::trace(JSTracer *trc)
1473 switch (tag) {
1474 case JSVAL:
1475 MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
1476 return;
1478 case SHAPE:
1479 static_cast<AutoShapeRooter *>(this)->shape->trace(trc);
1480 return;
1482 case PARSER:
1483 static_cast<Parser *>(this)->trace(trc);
1484 return;
1486 case SCRIPT:
1487 if (JSScript *script = static_cast<AutoScriptRooter *>(this)->script)
1488 js_TraceScript(trc, script);
1489 return;
1491 case ENUMERATOR:
1492 static_cast<AutoEnumStateRooter *>(this)->trace(trc);
1493 return;
1495 case IDARRAY: {
1496 JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
1497 MarkIdRange(trc, ida->length, ida->vector, "js::AutoIdArray.idArray");
1498 return;
1501 case DESCRIPTORS: {
1502 PropDescArray &descriptors =
1503 static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
1504 for (size_t i = 0, len = descriptors.length(); i < len; i++) {
1505 PropDesc &desc = descriptors[i];
1506 MarkValue(trc, desc.pd, "PropDesc::pd");
1507 MarkValue(trc, desc.value, "PropDesc::value");
1508 MarkValue(trc, desc.get, "PropDesc::get");
1509 MarkValue(trc, desc.set, "PropDesc::set");
1510 MarkId(trc, desc.id, "PropDesc::id");
1512 return;
1515 case DESCRIPTOR : {
1516 PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
1517 if (desc.obj)
1518 MarkObject(trc, *desc.obj, "Descriptor::obj");
1519 MarkValue(trc, desc.value, "Descriptor::value");
1520 if ((desc.attrs & JSPROP_GETTER) && desc.getter)
1521 MarkObject(trc, *CastAsObject(desc.getter), "Descriptor::get");
1522 if (desc.attrs & JSPROP_SETTER && desc.setter)
1523 MarkObject(trc, *CastAsObject(desc.setter), "Descriptor::set");
1524 return;
1527 case NAMESPACES: {
1528 JSXMLArray &array = static_cast<AutoNamespaceArray *>(this)->array;
1529 MarkObjectRange(trc, array.length, reinterpret_cast<JSObject **>(array.vector),
1530 "JSXMLArray.vector");
1531 array.cursors->trace(trc);
1532 return;
1535 case XML:
1536 js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
1537 return;
1539 case OBJECT:
1540 if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
1541 MarkObject(trc, *obj, "js::AutoObjectRooter.obj");
1542 return;
1544 case ID:
1545 MarkId(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
1546 return;
1548 case VALVECTOR: {
1549 Vector<Value, 8> &vector = static_cast<js::AutoValueVector *>(this)->vector;
1550 MarkValueRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
1551 return;
1554 case STRING:
1555 if (JSString *str = static_cast<js::AutoStringRooter *>(this)->str)
1556 MarkString(trc, str, "js::AutoStringRooter.str");
1557 return;
1559 case IDVECTOR: {
1560 Vector<jsid, 8> &vector = static_cast<js::AutoIdVector *>(this)->vector;
1561 MarkIdRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
1562 return;
1566 JS_ASSERT(tag >= 0);
1567 MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
1570 namespace js {
1572 void
1573 MarkContext(JSTracer *trc, JSContext *acx)
1575 /* Stack frames and slots are traced by StackSpace::mark. */
1577 /* Mark other roots-by-definition in acx. */
1578 if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL))
1579 MarkObject(trc, *acx->globalObject, "global object");
1580 if (acx->isExceptionPending())
1581 MarkValue(trc, acx->getPendingException(), "exception");
1583 for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
1584 gcr->trace(trc);
1586 if (acx->sharpObjectMap.depth > 0)
1587 js_TraceSharpMap(trc, &acx->sharpObjectMap);
1589 MarkValue(trc, acx->iterValue, "iterValue");
1591 if (acx->compartment)
1592 acx->compartment->marked = true;
1594 #ifdef JS_TRACER
1595 TracerState* state = acx->tracerState;
1596 while (state) {
1597 if (state->nativeVp)
1598 MarkValueRange(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
1599 state = state->prev;
1601 #endif
1604 JS_REQUIRES_STACK void
1605 MarkRuntime(JSTracer *trc)
1607 JSRuntime *rt = trc->context->runtime;
1609 if (rt->state != JSRTS_LANDING)
1610 MarkConservativeStackRoots(trc);
1613 * Verify that we do not have at this point unmarked GC things stored in
1614 * autorooters. To maximize test coverage we abort even in non-debug
1615 * builds for now, see bug 574313.
1617 JSContext *iter;
1618 #if 1
1619 iter = NULL;
1620 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
1621 for (AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) {
1622 #ifdef JS_THREADSAFE
1623 JS_ASSERT_IF(!acx->thread->data.requestDepth, acx->thread->suspendCount);
1624 #endif
1625 JS_ASSERT(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan());
1626 void *thing;
1627 switch (gcr->tag) {
1628 default:
1629 continue;
1630 case AutoGCRooter::JSVAL: {
1631 const Value &v = static_cast<AutoValueRooter *>(gcr)->val;
1632 if (!v.isMarkable())
1633 continue;
1634 thing = v.toGCThing();
1635 break;
1637 case AutoGCRooter::XML:
1638 thing = static_cast<AutoXMLRooter *>(gcr)->xml;
1639 break;
1640 case AutoGCRooter::OBJECT:
1641 thing = static_cast<AutoObjectRooter *>(gcr)->obj;
1642 if (!thing)
1643 continue;
1644 break;
1645 case AutoGCRooter::ID: {
1646 jsid id = static_cast<AutoIdRooter *>(gcr)->id();
1647 if (!JSID_IS_GCTHING(id))
1648 continue;
1649 thing = JSID_TO_GCTHING(id);
1650 break;
1654 if (JSString::isStatic(thing))
1655 continue;
1657 if (!reinterpret_cast<Cell *>(thing)->isMarked()) {
1658 ConservativeGCTest test = MarkIfGCThingWord(trc, reinterpret_cast<jsuword>(thing));
1659 fprintf(stderr,
1660 "Conservative GC scanner has missed the root 0x%p with tag %ld"
1661 " on the stack due to %d. The root location 0x%p, distance from"
1662 " the stack base %ld, conservative gc span %ld."
1663 " Consevtaive GC status for the thread %d."
1664 " Aborting.\n",
1665 thing, (long) gcr->tag, int(test), (void *) gcr,
1666 (long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase - (jsword) gcr),
1667 (long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase -
1668 (jsword) JS_THREAD_DATA(acx)->conservativeGC.nativeStackTop),
1669 int(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan()));
1670 JS_ASSERT(false);
1671 abort();
1675 #endif
1677 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
1678 gc_root_traversal(trc, r.front());
1680 for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
1681 gc_lock_traversal(r.front(), trc);
1683 js_TraceAtomState(trc);
1684 js_MarkTraps(trc);
1686 iter = NULL;
1687 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
1688 MarkContext(trc, acx);
1690 for (ThreadDataIter i(rt); !i.empty(); i.popFront())
1691 i.threadData()->mark(trc);
1693 if (rt->emptyArgumentsShape)
1694 rt->emptyArgumentsShape->trace(trc);
1695 if (rt->emptyBlockShape)
1696 rt->emptyBlockShape->trace(trc);
1697 if (rt->emptyCallShape)
1698 rt->emptyCallShape->trace(trc);
1699 if (rt->emptyDeclEnvShape)
1700 rt->emptyDeclEnvShape->trace(trc);
1701 if (rt->emptyEnumeratorShape)
1702 rt->emptyEnumeratorShape->trace(trc);
1703 if (rt->emptyWithShape)
1704 rt->emptyWithShape->trace(trc);
1707 * We mark extra roots at the last thing so it can use use additional
1708 * colors to implement cycle collection.
1710 if (rt->gcExtraRootsTraceOp)
1711 rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
1713 #ifdef DEBUG
1714 if (rt->functionMeterFilename) {
1715 for (int k = 0; k < 2; k++) {
1716 typedef JSRuntime::FunctionCountMap HM;
1717 HM &h = (k == 0) ? rt->methodReadBarrierCountMap : rt->unjoinedFunctionCountMap;
1718 for (HM::Range r = h.all(); !r.empty(); r.popFront()) {
1719 JSFunction *fun = r.front().key;
1720 JS_CALL_OBJECT_TRACER(trc, fun, "FunctionCountMap key");
1724 #endif
1727 void
1728 TriggerGC(JSRuntime *rt)
1730 JS_ASSERT(!rt->gcRunning);
1731 if (rt->gcIsNeeded)
1732 return;
1735 * Trigger the GC when it is safe to call an operation callback on any
1736 * thread.
1738 rt->gcIsNeeded = true;
1739 TriggerAllOperationCallbacks(rt);
1742 } /* namespace js */
1744 void
1745 js_DestroyScriptsToGC(JSContext *cx, JSCompartment *comp)
1747 JSScript **listp, *script;
1749 for (size_t i = 0; i != JS_ARRAY_LENGTH(comp->scriptsToGC); ++i) {
1750 listp = &comp->scriptsToGC[i];
1751 while ((script = *listp) != NULL) {
1752 *listp = script->u.nextToGC;
1753 script->u.nextToGC = NULL;
1754 js_DestroyScriptFromGC(cx, script);
1760 * This function is called from js_FinishAtomState to force the finalization
1761 * of the permanently interned strings when cx is not available.
1763 void
1764 js_FinalizeStringRT(JSRuntime *rt, JSString *str)
1766 JS_RUNTIME_UNMETER(rt, liveStrings);
1767 JS_ASSERT(!JSString::isStatic(str));
1768 JS_ASSERT(!str->isRope());
1770 if (str->isDependent()) {
1771 /* A dependent string can not be external and must be valid. */
1772 JS_ASSERT(str->asCell()->arena()->header()->thingKind == FINALIZE_STRING);
1773 JS_ASSERT(str->dependentBase());
1774 JS_RUNTIME_UNMETER(rt, liveDependentStrings);
1775 } else {
1776 unsigned thingKind = str->asCell()->arena()->header()->thingKind;
1777 JS_ASSERT(IsFinalizableStringKind(thingKind));
1779 /* A stillborn string has null chars, so is not valid. */
1780 jschar *chars = const_cast<jschar *>(str->flatChars());
1781 if (!chars)
1782 return;
1783 if (thingKind == FINALIZE_STRING) {
1784 rt->stringMemoryUsed -= str->length() * 2;
1785 rt->free(chars);
1786 } else if (thingKind == FINALIZE_EXTERNAL_STRING) {
1787 ((JSExternalString *)str)->finalize();
1792 template<typename T>
1793 static void
1794 FinalizeArenaList(JSCompartment *comp, JSContext *cx, unsigned thingKind)
1796 JS_STATIC_ASSERT(!(sizeof(T) & Cell::CellMask));
1797 ArenaList *arenaList = GetFinalizableArenaList(comp, thingKind);
1798 Arena<FreeCell> **ap = &arenaList->head;
1799 Arena<T> *a = (Arena<T> *) *ap;
1800 if (!a)
1801 return;
1802 JS_ASSERT(sizeof(T) == arenaList->head->header()->thingSize);
1804 #ifdef JS_GCMETER
1805 uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0;
1806 #endif
1807 for (;;) {
1808 ArenaHeader *header = a->header();
1809 JS_ASSERT_IF(header->hasFreeThings, header->freeList);
1810 JS_ASSERT(header->thingKind == thingKind);
1811 JS_ASSERT(!a->getMarkingDelay()->link);
1812 JS_ASSERT(a->getMarkingDelay()->unmarkedChildren == 0);
1813 JS_ASSERT(a->header()->isUsed);
1815 FreeCell *nextFree = header->freeList;
1816 FreeCell *freeList = NULL;
1817 FreeCell **tailp = &freeList;
1818 bool allClear = true;
1820 T *thingsEnd = &a->t.things[a->ThingsPerArena-1].t;
1821 T *thing = &a->t.things[0].t;
1822 thingsEnd++;
1824 if (!nextFree) {
1825 nextFree = thingsEnd->asFreeCell();
1826 } else {
1827 JS_ASSERT(thing->asCell() <= nextFree);
1828 JS_ASSERT(nextFree < thingsEnd->asCell());
1831 for (;; thing++) {
1832 if (thing->asCell() == nextFree) {
1833 if (thing == thingsEnd)
1834 break;
1835 nextFree = nextFree->link;
1836 if (!nextFree) {
1837 nextFree = thingsEnd->asFreeCell();
1838 } else {
1839 JS_ASSERT(thing->asCell() < nextFree);
1840 JS_ASSERT(nextFree < thingsEnd->asFreeCell());
1842 } else if (thing->asCell()->isMarked()) {
1843 allClear = false;
1844 METER(nthings++);
1845 continue;
1846 } else {
1847 thing->finalize(cx);
1848 #ifdef DEBUG
1849 memset(thing, JS_FREE_PATTERN, sizeof(T));
1850 #endif
1852 FreeCell *t = thing->asFreeCell();
1853 *tailp = t;
1854 tailp = &t->link;
1857 #ifdef DEBUG
1858 /* Check that the free list is consistent. */
1859 unsigned nfree = 0;
1860 if (freeList) {
1861 JS_ASSERT(tailp != &freeList);
1862 FreeCell *t = freeList;
1863 for (;;) {
1864 ++nfree;
1865 if (&t->link == tailp)
1866 break;
1867 JS_ASSERT(t < t->link);
1868 t = t->link;
1871 #endif
1872 if (allClear) {
1874 * Forget just assembled free list head for the arena and
1875 * add the arena itself to the destroy list.
1877 JS_ASSERT(nfree == a->ThingsPerArena);
1878 JS_ASSERT((T *)tailp == &a->t.things[a->ThingsPerArena-1].t);
1879 *tailp = NULL;
1880 header->freeList = freeList;
1881 #ifdef DEBUG
1882 header->hasFreeThings = true;
1883 #endif
1884 *ap = (header->next);
1885 JS_ASSERT((T *)header->freeList == &a->t.things[0].t);
1886 a->chunk()->releaseArena(a);
1887 METER(nkilledarenas++);
1888 } else {
1889 JS_ASSERT(nfree < a->ThingsPerArena);
1890 *tailp = NULL;
1891 header->freeList = freeList;
1892 #ifdef DEBUG
1893 header->hasFreeThings = (nfree == 0) ? false : true;
1894 #endif
1895 ap = &header->next;
1896 METER(nlivearenas++);
1898 if (!(a = (Arena<T> *) *ap))
1899 break;
1901 arenaList->cursor = arenaList->head;
1902 METER(UpdateCompartmentStats(comp, thingKind, nlivearenas, nkilledarenas, nthings));
1905 #ifdef JS_THREADSAFE
1907 namespace js {
1909 bool
1910 GCHelperThread::init(JSRuntime *rt)
1912 if (!(wakeup = PR_NewCondVar(rt->gcLock)))
1913 return false;
1914 if (!(sweepingDone = PR_NewCondVar(rt->gcLock)))
1915 return false;
1917 thread = PR_CreateThread(PR_USER_THREAD, threadMain, rt, PR_PRIORITY_NORMAL,
1918 PR_LOCAL_THREAD, PR_JOINABLE_THREAD, 0);
1919 return !!thread;
1923 void
1924 GCHelperThread::finish(JSRuntime *rt)
1926 PRThread *join = NULL;
1928 AutoLockGC lock(rt);
1929 if (thread && !shutdown) {
1930 shutdown = true;
1931 PR_NotifyCondVar(wakeup);
1932 join = thread;
1935 if (join) {
1936 /* PR_DestroyThread is not necessary. */
1937 PR_JoinThread(join);
1939 if (wakeup)
1940 PR_DestroyCondVar(wakeup);
1941 if (sweepingDone)
1942 PR_DestroyCondVar(sweepingDone);
1945 /* static */
1946 void
1947 GCHelperThread::threadMain(void *arg)
1949 JSRuntime *rt = static_cast<JSRuntime *>(arg);
1950 rt->gcHelperThread.threadLoop(rt);
1953 void
1954 GCHelperThread::threadLoop(JSRuntime *rt)
1956 AutoLockGC lock(rt);
1957 while (!shutdown) {
1959 * Sweeping can be true here on the first iteration if a GC and the
1960 * corresponding startBackgroundSweep call happen before this thread
1961 * has a chance to run.
1963 if (!sweeping)
1964 PR_WaitCondVar(wakeup, PR_INTERVAL_NO_TIMEOUT);
1965 if (sweeping) {
1966 AutoUnlockGC unlock(rt);
1967 doSweep();
1969 sweeping = false;
1970 PR_NotifyAllCondVar(sweepingDone);
1974 void
1975 GCHelperThread::startBackgroundSweep(JSRuntime *rt)
1977 /* The caller takes the GC lock. */
1978 JS_ASSERT(!sweeping);
1979 sweeping = true;
1980 PR_NotifyCondVar(wakeup);
1983 void
1984 GCHelperThread::waitBackgroundSweepEnd(JSRuntime *rt)
1986 AutoLockGC lock(rt);
1987 while (sweeping)
1988 PR_WaitCondVar(sweepingDone, PR_INTERVAL_NO_TIMEOUT);
1991 JS_FRIEND_API(void)
1992 GCHelperThread::replenishAndFreeLater(void *ptr)
1994 JS_ASSERT(freeCursor == freeCursorEnd);
1995 do {
1996 if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
1997 break;
1998 freeCursor = (void **) js_malloc(FREE_ARRAY_SIZE);
1999 if (!freeCursor) {
2000 freeCursorEnd = NULL;
2001 break;
2003 freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
2004 *freeCursor++ = ptr;
2005 return;
2006 } while (false);
2007 js_free(ptr);
2010 void
2011 GCHelperThread::doSweep()
2013 if (freeCursor) {
2014 void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
2015 freeElementsAndArray(array, freeCursor);
2016 freeCursor = freeCursorEnd = NULL;
2017 } else {
2018 JS_ASSERT(!freeCursorEnd);
2020 for (void ***iter = freeVector.begin(); iter != freeVector.end(); ++iter) {
2021 void **array = *iter;
2022 freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
2024 freeVector.resize(0);
2029 #endif /* JS_THREADSAFE */
2031 static void
2032 SweepCompartments(JSContext *cx, JSGCInvocationKind gckind)
2034 JSRuntime *rt = cx->runtime;
2035 JSCompartmentCallback callback = rt->compartmentCallback;
2036 JSCompartment **read = rt->compartments.begin();
2037 JSCompartment **end = rt->compartments.end();
2038 JSCompartment **write = read;
2040 /* Delete defaultCompartment only during runtime shutdown */
2041 rt->defaultCompartment->marked = true;
2044 * Figure out how much JIT code should be released from inactive compartments.
2045 * If multiple eighth-lifes have passed, compound the release interval linearly;
2046 * if enough time has passed, all inactive JIT code will be released.
2048 uint32 releaseInterval = 0;
2049 int64 now = PRMJ_Now();
2050 if (now >= rt->gcJitReleaseTime) {
2051 releaseInterval = 8;
2052 while (now >= rt->gcJitReleaseTime) {
2053 if (--releaseInterval == 1)
2054 rt->gcJitReleaseTime = now;
2055 rt->gcJitReleaseTime += JIT_SCRIPT_EIGHTH_LIFETIME;
2059 while (read < end) {
2060 JSCompartment *compartment = (*read++);
2061 if (compartment->marked) {
2062 compartment->marked = false;
2063 *write++ = compartment;
2064 /* Remove dead wrappers from the compartment map. */
2065 compartment->sweep(cx, releaseInterval);
2066 } else {
2067 JS_ASSERT(compartment->freeLists.isEmpty());
2068 if (compartment->arenaListsAreEmpty() || gckind == GC_LAST_CONTEXT) {
2069 if (callback)
2070 (void) callback(cx, compartment, JSCOMPARTMENT_DESTROY);
2071 if (compartment->principals)
2072 JSPRINCIPALS_DROP(cx, compartment->principals);
2073 delete compartment;
2074 } else {
2075 compartment->marked = false;
2076 *write++ = compartment;
2077 compartment->sweep(cx, releaseInterval);
2081 rt->compartments.resize(write - rt->compartments.begin());
2085 * Common cache invalidation and so forth that must be done before GC. Even if
2086 * GCUntilDone calls GC several times, this work needs to be done only once.
2088 static void
2089 PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
2091 JSRuntime *rt = cx->runtime;
2093 /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
2094 rt->gcIsNeeded = JS_FALSE;
2096 /* Reset malloc counter. */
2097 rt->resetGCMallocBytes();
2099 #ifdef JS_DUMP_SCOPE_METERS
2101 extern void js_DumpScopeMeters(JSRuntime *rt);
2102 js_DumpScopeMeters(rt);
2104 #endif
2107 * Reset the property cache's type id generator so we can compress ids.
2108 * Same for the protoHazardShape proxy-shape standing in for all object
2109 * prototypes having readonly or setter properties.
2111 if (rt->shapeGen & SHAPE_OVERFLOW_BIT
2112 #ifdef JS_GC_ZEAL
2113 || rt->gcZeal >= 1
2114 #endif
2116 rt->gcRegenShapes = true;
2117 rt->shapeGen = Shape::LAST_RESERVED_SHAPE;
2118 rt->protoHazardShape = 0;
2120 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2121 (*c)->purge(cx);
2123 js_PurgeThreads(cx);
2125 JSContext *iter = NULL;
2126 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
2127 acx->purge();
2132 * Perform mark-and-sweep GC.
2134 * In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each
2135 * other thread must be either outside all requests or blocked waiting for GC
2136 * to finish. Note that the caller does not hold rt->gcLock.
2138 static void
2139 MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
2141 JSRuntime *rt = cx->runtime;
2142 rt->gcNumber++;
2145 * Mark phase.
2147 GCMarker gcmarker(cx);
2148 JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
2149 JS_ASSERT(gcmarker.getMarkColor() == BLACK);
2150 rt->gcMarkingTracer = &gcmarker;
2151 gcmarker.stackLimit = cx->stackLimit;
2153 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
2154 r.front()->clearMarkBitmap();
2156 MarkRuntime(&gcmarker);
2157 js_MarkScriptFilenames(rt);
2160 * Mark children of things that caused too deep recursion during the above
2161 * tracing.
2163 gcmarker.markDelayedChildren();
2165 rt->gcMarkingTracer = NULL;
2167 if (rt->gcCallback)
2168 (void) rt->gcCallback(cx, JSGC_MARK_END);
2170 #ifdef JS_THREADSAFE
2172 * cx->gcBackgroundFree is set if we need several mark-and-sweep loops to
2173 * finish the GC.
2175 if(!cx->gcBackgroundFree) {
2176 /* Wait until the sweeping from the previois GC finishes. */
2177 rt->gcHelperThread.waitBackgroundSweepEnd(rt);
2178 cx->gcBackgroundFree = &rt->gcHelperThread;
2180 #endif
2183 * Sweep phase.
2185 * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
2186 * so that any attempt to allocate a GC-thing from a finalizer will fail,
2187 * rather than nest badly and leave the unmarked newborn to be swept.
2189 * We first sweep atom state so we can use js_IsAboutToBeFinalized on
2190 * JSString held in a hashtable to check if the hashtable entry can be
2191 * freed. Note that even after the entry is freed, JSObject finalizers can
2192 * continue to access the corresponding JSString* assuming that they are
2193 * unique. This works since the atomization API must not be called during
2194 * the GC.
2196 TIMESTAMP(startSweep);
2197 js_SweepAtomState(cx);
2199 /* Finalize watch points associated with unreachable objects. */
2200 js_SweepWatchPoints(cx);
2202 #ifdef DEBUG
2203 /* Save the pre-sweep count of scope-mapped properties. */
2204 rt->liveObjectPropsPreSweep = rt->liveObjectProps;
2205 #endif
2208 * We finalize iterators before other objects so the iterator can use the
2209 * object which properties it enumerates over to finalize the enumeration
2210 * state. We finalize objects before other GC things to ensure that
2211 * object's finalizer can access them even if they will be freed.
2214 for (JSCompartment **comp = rt->compartments.begin(); comp != rt->compartments.end(); comp++) {
2215 FinalizeArenaList<JSObject>(*comp, cx, FINALIZE_OBJECT0);
2216 FinalizeArenaList<JSObject_Slots2>(*comp, cx, FINALIZE_OBJECT2);
2217 FinalizeArenaList<JSObject_Slots4>(*comp, cx, FINALIZE_OBJECT4);
2218 FinalizeArenaList<JSObject_Slots8>(*comp, cx, FINALIZE_OBJECT8);
2219 FinalizeArenaList<JSObject_Slots12>(*comp, cx, FINALIZE_OBJECT12);
2220 FinalizeArenaList<JSObject_Slots16>(*comp, cx, FINALIZE_OBJECT16);
2221 FinalizeArenaList<JSFunction>(*comp, cx, FINALIZE_FUNCTION);
2222 #if JS_HAS_XML_SUPPORT
2223 FinalizeArenaList<JSXML>(*comp, cx, FINALIZE_XML);
2224 #endif
2226 TIMESTAMP(sweepObjectEnd);
2228 for (JSCompartment **comp = rt->compartments.begin(); comp != rt->compartments.end(); comp++) {
2229 FinalizeArenaList<JSShortString>(*comp, cx, FINALIZE_SHORT_STRING);
2230 FinalizeArenaList<JSString>(*comp, cx, FINALIZE_STRING);
2231 FinalizeArenaList<JSExternalString>(*comp, cx, FINALIZE_EXTERNAL_STRING);
2234 TIMESTAMP(sweepStringEnd);
2236 SweepCompartments(cx, gckind);
2239 * Sweep the runtime's property trees after finalizing objects, in case any
2240 * had watchpoints referencing tree nodes.
2242 js::PropertyTree::sweepShapes(cx);
2245 * Sweep script filenames after sweeping functions in the generic loop
2246 * above. In this way when a scripted function's finalizer destroys the
2247 * script and calls rt->destroyScriptHook, the hook can still access the
2248 * script's filename. See bug 323267.
2250 js_SweepScriptFilenames(rt);
2253 * Destroy arenas after we finished the sweeping so finalizers can safely
2254 * use js_IsAboutToBeFinalized().
2256 ExpireGCChunks(rt);
2257 TIMESTAMP(sweepDestroyEnd);
2259 if (rt->gcCallback)
2260 (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
2261 #ifdef DEBUG_srcnotesize
2262 { extern void DumpSrcNoteSizeHist();
2263 DumpSrcNoteSizeHist();
2264 printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes);
2266 #endif
2268 #ifdef JS_SCOPE_DEPTH_METER
2269 DumpScopeDepthMeter(rt);
2270 #endif
2272 #ifdef JS_DUMP_LOOP_STATS
2273 DumpLoopStats(rt);
2274 #endif
2277 #ifdef JS_THREADSAFE
2280 * If the GC is running and we're called on another thread, wait for this GC
2281 * activation to finish. We can safely wait here without fear of deadlock (in
2282 * the case where we are called within a request on another thread's context)
2283 * because the GC doesn't set rt->gcRunning until after it has waited for all
2284 * active requests to end.
2286 * We call here js_CurrentThreadId() after checking for rt->gcState to avoid
2287 * an expensive call when the GC is not running.
2289 void
2290 js_WaitForGC(JSRuntime *rt)
2292 if (rt->gcRunning && rt->gcThread->id != js_CurrentThreadId()) {
2293 do {
2294 JS_AWAIT_GC_DONE(rt);
2295 } while (rt->gcRunning);
2300 * GC is running on another thread. Temporarily suspend all requests running
2301 * on the current thread and wait until the GC is done.
2303 static void
2304 LetOtherGCFinish(JSContext *cx)
2306 JSRuntime *rt = cx->runtime;
2307 JS_ASSERT(rt->gcThread);
2308 JS_ASSERT(cx->thread != rt->gcThread);
2310 size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
2311 JS_ASSERT(requestDebit <= rt->requestCount);
2312 #ifdef JS_TRACER
2313 JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
2314 #endif
2315 if (requestDebit != 0) {
2316 #ifdef JS_TRACER
2317 if (JS_ON_TRACE(cx)) {
2319 * Leave trace before we decrease rt->requestCount and notify the
2320 * GC. Otherwise the GC may start immediately after we unlock while
2321 * this thread is still on trace.
2323 AutoUnlockGC unlock(rt);
2324 LeaveTrace(cx);
2326 #endif
2327 rt->requestCount -= requestDebit;
2328 if (rt->requestCount == 0)
2329 JS_NOTIFY_REQUEST_DONE(rt);
2333 * Check that we did not release the GC lock above and let the GC to
2334 * finish before we wait.
2336 JS_ASSERT(rt->gcThread);
2339 * Wait for GC to finish on the other thread, even if requestDebit is 0
2340 * and even if GC has not started yet because the gcThread is waiting in
2341 * AutoGCSession. This ensures that js_GC never returns without a full GC
2342 * cycle happening.
2344 do {
2345 JS_AWAIT_GC_DONE(rt);
2346 } while (rt->gcThread);
2348 rt->requestCount += requestDebit;
2351 #endif
2353 class AutoGCSession {
2354 public:
2355 explicit AutoGCSession(JSContext *cx);
2356 ~AutoGCSession();
2358 private:
2359 JSContext *context;
2361 /* Disable copy constructor or assignments */
2362 AutoGCSession(const AutoGCSession&);
2363 void operator=(const AutoGCSession&);
2367 * Start a new GC session. Together with LetOtherGCFinish this function
2368 * contains the rendezvous algorithm by which we stop the world for GC.
2370 * This thread becomes the GC thread. Wait for all other threads to quiesce.
2371 * Then set rt->gcRunning and return.
2373 AutoGCSession::AutoGCSession(JSContext *cx)
2374 : context(cx)
2376 JSRuntime *rt = cx->runtime;
2378 #ifdef JS_THREADSAFE
2379 if (rt->gcThread && rt->gcThread != cx->thread)
2380 LetOtherGCFinish(cx);
2381 #endif
2383 JS_ASSERT(!rt->gcRunning);
2385 #ifdef JS_THREADSAFE
2386 /* No other thread is in GC, so indicate that we're now in GC. */
2387 JS_ASSERT(!rt->gcThread);
2388 rt->gcThread = cx->thread;
2391 * Notify operation callbacks on other threads, which will give them a
2392 * chance to yield their requests. Threads without requests perform their
2393 * callback at some later point, which then will be unnecessary, but
2394 * harmless.
2396 for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) {
2397 JSThread *thread = r.front().value;
2398 if (thread != cx->thread)
2399 thread->data.triggerOperationCallback(rt);
2403 * Discount the request on the current thread from contributing to
2404 * rt->requestCount before we wait for all other requests to finish.
2405 * JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
2406 * rt->requestCount transitions to 0.
2408 size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
2409 JS_ASSERT(requestDebit <= rt->requestCount);
2410 if (requestDebit != rt->requestCount) {
2411 rt->requestCount -= requestDebit;
2413 do {
2414 JS_AWAIT_REQUEST_DONE(rt);
2415 } while (rt->requestCount > 0);
2416 rt->requestCount += requestDebit;
2419 #endif /* JS_THREADSAFE */
2422 * Set rt->gcRunning here within the GC lock, and after waiting for any
2423 * active requests to end. This way js_WaitForGC called outside a request
2424 * would not block on the GC that is waiting for other requests to finish
2425 * with rt->gcThread set while JS_BeginRequest would do such wait.
2427 rt->gcRunning = true;
2430 /* End the current GC session and allow other threads to proceed. */
2431 AutoGCSession::~AutoGCSession()
2433 JSRuntime *rt = context->runtime;
2434 rt->gcRunning = false;
2435 #ifdef JS_THREADSAFE
2436 JS_ASSERT(rt->gcThread == context->thread);
2437 rt->gcThread = NULL;
2438 JS_NOTIFY_GC_DONE(rt);
2439 #endif
2443 * GC, repeatedly if necessary, until we think we have not created any new
2444 * garbage and no other threads are demanding more GC.
2446 static void
2447 GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
2449 if (JS_ON_TRACE(cx))
2450 return;
2452 JSRuntime *rt = cx->runtime;
2454 /* Recursive GC or a call from another thread restarts the GC cycle. */
2455 if (rt->gcMarkAndSweep) {
2456 rt->gcPoke = true;
2457 #ifdef JS_THREADSAFE
2458 JS_ASSERT(rt->gcThread);
2459 if (rt->gcThread != cx->thread) {
2460 /* We do not return until another GC finishes. */
2461 LetOtherGCFinish(cx);
2463 #endif
2464 return;
2467 AutoGCSession gcsession(cx);
2469 METER(rt->gcStats.poke++);
2471 bool firstRun = true;
2472 rt->gcMarkAndSweep = true;
2473 #ifdef JS_THREADSAFE
2474 JS_ASSERT(!cx->gcBackgroundFree);
2475 #endif
2476 do {
2477 rt->gcPoke = false;
2479 AutoUnlockGC unlock(rt);
2480 if (firstRun) {
2481 PreGCCleanup(cx, gckind);
2482 TIMESTAMP(startMark);
2483 firstRun = false;
2485 MarkAndSweep(cx, gckind GCTIMER_ARG);
2487 // GC again if:
2488 // - another thread, not in a request, called js_GC
2489 // - js_GC was called recursively
2490 // - a finalizer called js_RemoveRoot or js_UnlockGCThingRT.
2491 } while (rt->gcPoke);
2493 #ifdef JS_THREADSAFE
2494 JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
2495 cx->gcBackgroundFree = NULL;
2496 rt->gcHelperThread.startBackgroundSweep(rt);
2497 #endif
2499 rt->gcMarkAndSweep = false;
2500 rt->gcRegenShapes = false;
2501 rt->setGCLastBytes(rt->gcBytes);
2504 void
2505 js_GC(JSContext *cx, JSGCInvocationKind gckind)
2507 JSRuntime *rt = cx->runtime;
2510 * Don't collect garbage if the runtime isn't up, and cx is not the last
2511 * context in the runtime. The last context must force a GC, and nothing
2512 * should suppress that final collection or there may be shutdown leaks,
2513 * or runtime bloat until the next context is created.
2515 if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
2516 return;
2518 RecordNativeStackTopForGC(cx);
2520 #ifdef DEBUG
2521 int stackDummy;
2522 # if JS_STACK_GROWTH_DIRECTION > 0
2523 /* cx->stackLimit is set to jsuword(-1) by default. */
2524 JS_ASSERT_IF(cx->stackLimit != jsuword(-1),
2525 JS_CHECK_STACK_SIZE(cx->stackLimit + (1 << 14), &stackDummy));
2526 # else
2527 /* -16k because it is possible to perform a GC during an overrecursion report. */
2528 JS_ASSERT_IF(cx->stackLimit, JS_CHECK_STACK_SIZE(cx->stackLimit - (1 << 14), &stackDummy));
2529 # endif
2530 #endif
2532 GCTIMER_BEGIN();
2534 do {
2536 * Let the API user decide to defer a GC if it wants to (unless this
2537 * is the last context). Invoke the callback regardless. Sample the
2538 * callback in case we are freely racing with a JS_SetGCCallback{,RT}
2539 * on another thread.
2541 if (JSGCCallback callback = rt->gcCallback) {
2542 if (!callback(cx, JSGC_BEGIN) && gckind != GC_LAST_CONTEXT)
2543 return;
2547 /* Lock out other GC allocator and collector invocations. */
2548 AutoLockGC lock(rt);
2550 GCUntilDone(cx, gckind GCTIMER_ARG);
2553 /* We re-sample the callback again as the finalizers can change it. */
2554 if (JSGCCallback callback = rt->gcCallback)
2555 (void) callback(cx, JSGC_END);
2558 * On shutdown, iterate until the JSGC_END callback stops creating
2559 * garbage.
2561 } while (gckind == GC_LAST_CONTEXT && rt->gcPoke);
2562 #ifdef JS_GCMETER
2563 js_DumpGCStats(cx->runtime, stderr);
2564 #endif
2565 GCTIMER_END(gckind == GC_LAST_CONTEXT);
2568 namespace js {
2569 namespace gc {
2571 bool
2572 SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto)
2575 * This function cannot be called during the GC and always requires a
2576 * request.
2578 #ifdef JS_THREADSAFE
2579 JS_ASSERT(cx->thread->data.requestDepth);
2582 * This is only necessary if AutoGCSession below would wait for GC to
2583 * finish on another thread, but to capture the minimal stack space and
2584 * for code simplicity we do it here unconditionally.
2586 RecordNativeStackTopForGC(cx);
2587 #endif
2589 JSRuntime *rt = cx->runtime;
2590 AutoLockGC lock(rt);
2591 AutoGCSession gcsession(cx);
2592 AutoUnlockGC unlock(rt);
2594 bool cycle = false;
2595 for (JSObject *obj2 = proto; obj2;) {
2596 if (obj2 == obj) {
2597 cycle = true;
2598 break;
2600 obj2 = obj2->getProto();
2602 if (!cycle)
2603 obj->setProto(proto);
2605 return !cycle;
2608 JSCompartment *
2609 NewCompartment(JSContext *cx, JSPrincipals *principals)
2611 JSRuntime *rt = cx->runtime;
2612 JSCompartment *compartment = new JSCompartment(rt);
2613 if (!compartment || !compartment->init()) {
2614 delete compartment;
2615 JS_ReportOutOfMemory(cx);
2616 return NULL;
2619 if (principals) {
2620 compartment->principals = principals;
2621 JSPRINCIPALS_HOLD(cx, principals);
2625 AutoLockGC lock(rt);
2627 if (!rt->compartments.append(compartment)) {
2628 AutoUnlockGC unlock(rt);
2629 JS_ReportOutOfMemory(cx);
2630 return NULL;
2634 JSCompartmentCallback callback = rt->compartmentCallback;
2635 if (callback && !callback(cx, compartment, JSCOMPARTMENT_NEW)) {
2636 AutoLockGC lock(rt);
2637 rt->compartments.popBack();
2638 return NULL;
2640 return compartment;
2643 } /* namespace gc */
2645 void
2646 TraceRuntime(JSTracer *trc)
2648 LeaveTrace(trc->context);
2650 #ifdef JS_THREADSAFE
2652 JSContext *cx = trc->context;
2653 JSRuntime *rt = cx->runtime;
2654 AutoLockGC lock(rt);
2656 if (rt->gcThread != cx->thread) {
2657 AutoGCSession gcsession(cx);
2658 AutoUnlockGC unlock(rt);
2659 RecordNativeStackTopForGC(trc->context);
2660 MarkRuntime(trc);
2661 return;
2664 #else
2665 RecordNativeStackTopForGC(trc->context);
2666 #endif
2669 * Calls from inside a normal GC or a recursive calls are OK and do not
2670 * require session setup.
2672 MarkRuntime(trc);
2675 } /* namespace js */