1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS Mark-and-Sweep Garbage Collector.
44 * This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
45 * jsgc.h). It allocates from a special GC arena pool with each arena allocated
46 * using malloc. It uses an ideally parallel array of flag bytes to hold the
47 * mark bit, finalizer type index, etc.
49 * XXX swizzle page to freelist for better locality of reference
51 #include <stdlib.h> /* for free */
53 #include <string.h> /* for memset used when DEBUG */
64 #include "jsversion.h"
69 #include "jsgcchunk.h"
79 #include "jsstaticcheck.h"
82 #include "methodjit/MethodJIT.h"
84 #if JS_HAS_XML_SUPPORT
89 #include "jscntxtinlines.h"
90 #include "jsinterpinlines.h"
91 #include "jsobjinlines.h"
92 #include "jshashtable.h"
94 #include "jsstrinlines.h"
95 #include "jscompartment.h"
101 # include <valgrind/memcheck.h>
105 using namespace js::gc
;
108 * Check that JSTRACE_XML follows JSTRACE_OBJECT and JSTRACE_STRING.
110 JS_STATIC_ASSERT(JSTRACE_OBJECT
== 0);
111 JS_STATIC_ASSERT(JSTRACE_STRING
== 1);
112 JS_STATIC_ASSERT(JSTRACE_XML
== 2);
115 * JS_IS_VALID_TRACE_KIND assumes that JSTRACE_STRING is the last non-xml
116 * trace kind when JS_HAS_XML_SUPPORT is false.
118 JS_STATIC_ASSERT(JSTRACE_STRING
+ 1 == JSTRACE_XML
);
121 * Everything we store in the heap must be a multiple of the cell size.
123 JS_STATIC_ASSERT(sizeof(JSString
) % sizeof(FreeCell
) == 0);
124 JS_STATIC_ASSERT(sizeof(JSShortString
) % sizeof(FreeCell
) == 0);
125 JS_STATIC_ASSERT(sizeof(JSObject
) % sizeof(FreeCell
) == 0);
126 JS_STATIC_ASSERT(sizeof(JSFunction
) % sizeof(FreeCell
) == 0);
128 JS_STATIC_ASSERT(sizeof(JSXML
) % sizeof(FreeCell
) == 0);
132 * All arenas must be exactly 4k.
134 JS_STATIC_ASSERT(sizeof(Arena
<JSString
>) == 4096);
135 JS_STATIC_ASSERT(sizeof(Arena
<JSExternalString
>) == 4096);
136 JS_STATIC_ASSERT(sizeof(Arena
<JSShortString
>) == 4096);
137 JS_STATIC_ASSERT(sizeof(Arena
<JSObject
>) == 4096);
138 JS_STATIC_ASSERT(sizeof(Arena
<JSFunction
>) == 4096);
139 JS_STATIC_ASSERT(sizeof(Arena
<JSXML
>) == 4096);
142 # define METER(x) ((void) (x))
143 # define METER_IF(condition, x) ((void) ((condition) && (x)))
145 # define METER(x) ((void) 0)
146 # define METER_IF(condition, x) ((void) 0)
149 # define METER_UPDATE_MAX(maxLval, rval) \
150 METER_IF((maxLval) < (rval), (maxLval) = (rval))
155 /* This array should be const, but that doesn't link right under GCC. */
156 FinalizeKind slotsToThingKind
[] = {
157 /* 0 */ FINALIZE_OBJECT0
, FINALIZE_OBJECT2
, FINALIZE_OBJECT2
, FINALIZE_OBJECT4
,
158 /* 4 */ FINALIZE_OBJECT4
, FINALIZE_OBJECT8
, FINALIZE_OBJECT8
, FINALIZE_OBJECT8
,
159 /* 8 */ FINALIZE_OBJECT8
, FINALIZE_OBJECT12
, FINALIZE_OBJECT12
, FINALIZE_OBJECT12
,
160 /* 12 */ FINALIZE_OBJECT12
, FINALIZE_OBJECT16
, FINALIZE_OBJECT16
, FINALIZE_OBJECT16
,
161 /* 16 */ FINALIZE_OBJECT16
164 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind
) == SLOTS_TO_THING_KIND_LIMIT
);
166 /* Initialize the arena and setup the free list. */
167 template <typename T
>
169 Arena
<T
>::init(JSCompartment
*compartment
, unsigned thingKind
)
171 aheader
.compartment
= compartment
;
172 aheader
.thingKind
= thingKind
;
173 aheader
.freeList
= &t
.things
[0].cell
;
174 aheader
.thingSize
= sizeof(T
);
175 aheader
.isUsed
= true;
176 JS_ASSERT(sizeof(T
) == sizeof(ThingOrCell
<T
>));
177 ThingOrCell
<T
> *thing
= &t
.things
[0];
178 ThingOrCell
<T
> *last
= &t
.things
[JS_ARRAY_LENGTH(t
.things
) - 1];
179 while (thing
< last
) {
180 thing
->cell
.link
= &(thing
+ 1)->cell
;
183 last
->cell
.link
= NULL
;
185 aheader
.hasFreeThings
= true;
189 template <typename T
>
191 Arena
<T
>::inFreeList(void *thing
) const
193 FreeCell
*cursor
= aheader
.freeList
;
195 JS_ASSERT(aheader
.thingSize
== sizeof(T
));
196 JS_ASSERT(!cursor
->isMarked());
198 /* If the cursor moves past the thing, it's not in the freelist. */
202 /* If we find it on the freelist, it's dead. */
205 JS_ASSERT_IF(cursor
->link
, cursor
< cursor
->link
);
206 cursor
= cursor
->link
;
212 inline ConservativeGCTest
213 Arena
<T
>::mark(T
*thing
, JSTracer
*trc
)
215 JS_ASSERT(sizeof(T
) == aheader
.thingSize
);
217 T
*alignedThing
= getAlignedThing(thing
);
219 if (alignedThing
> &t
.things
[ThingsPerArena
-1].t
|| alignedThing
< &t
.things
[0].t
)
220 return CGCT_NOTARENA
;
222 if (!aheader
.isUsed
|| inFreeList(alignedThing
))
225 JS_SET_TRACING_NAME(trc
, "machine stack");
226 Mark(trc
, alignedThing
);
228 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
229 if (alignedThing
!= thing
)
230 return CGCT_VALIDWITHOFFSET
;
237 checkArenaListsForThing(JSCompartment
*comp
, void *thing
) {
238 if (comp
->arenas
[FINALIZE_OBJECT0
].arenasContainThing
<JSObject
>(thing
) ||
239 comp
->arenas
[FINALIZE_OBJECT2
].arenasContainThing
<JSObject_Slots2
>(thing
) ||
240 comp
->arenas
[FINALIZE_OBJECT4
].arenasContainThing
<JSObject_Slots4
>(thing
) ||
241 comp
->arenas
[FINALIZE_OBJECT8
].arenasContainThing
<JSObject_Slots8
>(thing
) ||
242 comp
->arenas
[FINALIZE_OBJECT12
].arenasContainThing
<JSObject_Slots12
>(thing
) ||
243 comp
->arenas
[FINALIZE_OBJECT16
].arenasContainThing
<JSObject_Slots16
>(thing
) ||
244 comp
->arenas
[FINALIZE_FUNCTION
].arenasContainThing
<JSFunction
>(thing
) ||
245 #if JS_HAS_XML_SUPPORT
246 comp
->arenas
[FINALIZE_XML
].arenasContainThing
<JSXML
>(thing
) ||
248 comp
->arenas
[FINALIZE_STRING
].arenasContainThing
<JSString
>(thing
) ||
249 comp
->arenas
[FINALIZE_EXTERNAL_STRING
].arenasContainThing
<JSExternalString
>(thing
) ||
250 comp
->arenas
[FINALIZE_SHORT_STRING
].arenasContainThing
<JSShortString
>(thing
)) {
258 checkArenaListAllUnmarked(JSCompartment
*comp
) {
259 for (unsigned i
= 0; i
< FINALIZE_LIMIT
; i
++) {
260 if (comp
->arenas
[i
].markedThingsInArenaList())
271 JSCompartment::finishArenaLists()
273 for (int i
= 0; i
< FINALIZE_LIMIT
; i
++)
274 arenas
[i
].releaseAll();
278 Chunk::clearMarkBitmap()
280 PodZero(&bitmaps
[0], ArenasPerChunk
);
284 Chunk::init(JSRuntime
*rt
)
288 info
.emptyArenaLists
.init();
289 info
.emptyArenaLists
.cellFreeList
= &arenas
[0];
290 Arena
<FreeCell
> *arena
= &arenas
[0];
291 Arena
<FreeCell
> *last
= &arenas
[JS_ARRAY_LENGTH(arenas
) - 1];
292 while (arena
< last
) {
293 arena
->header()->next
= arena
+ 1;
294 arena
->header()->isUsed
= false;
297 last
->header()->next
= NULL
;
298 last
->header()->isUsed
= false;
299 info
.numFree
= ArenasPerChunk
;
305 return info
.numFree
== ArenasPerChunk
;
309 Chunk::hasAvailableArenas()
311 return info
.numFree
> 0;
315 Chunk::withinArenasRange(Cell
*cell
)
317 uintptr_t addr
= uintptr_t(cell
);
318 if (addr
>= uintptr_t(&arenas
[0]) && addr
< uintptr_t(&arenas
[ArenasPerChunk
]))
323 template <typename T
>
325 Chunk::allocateArena(JSCompartment
*comp
, unsigned thingKind
)
327 JS_ASSERT(hasAvailableArenas());
328 Arena
<T
> *arena
= info
.emptyArenaLists
.getNext
<T
>(comp
, thingKind
);
330 JS_ASSERT(arena
->header()->isUsed
);
333 JSRuntime
*rt
= info
.runtime
;
334 rt
->gcBytes
+= sizeof(Arena
<T
>);
335 comp
->gcBytes
+= sizeof(Arena
<T
>);
336 if (comp
->gcBytes
>= comp
->gcTriggerBytes
)
337 TriggerCompartmentGC(comp
);
338 METER(rt
->gcStats
.nallarenas
++);
342 template <typename T
>
344 Chunk::releaseArena(Arena
<T
> *arena
)
346 JSRuntime
*rt
= info
.runtime
;
347 JSCompartment
*comp
= arena
->header()->compartment
;
348 METER(rt
->gcStats
.afree
++);
349 JS_ASSERT(rt
->gcStats
.nallarenas
!= 0);
350 METER(rt
->gcStats
.nallarenas
--);
351 JS_ASSERT(rt
->gcBytes
>= sizeof(Arena
<T
>));
352 JS_ASSERT(comp
->gcBytes
>= sizeof(Arena
<T
>));
354 rt
->gcBytes
-= sizeof(Arena
<T
>);
355 comp
->gcBytes
-= sizeof(Arena
<T
>);
356 info
.emptyArenaLists
.insert((Arena
<Cell
> *)arena
);
357 arena
->header()->isUsed
= false;
368 return info
.age
++ > MaxAge
;
378 GetGCChunk(JSRuntime
*rt
)
380 void *p
= rt
->gcChunkAllocator
->alloc();
383 JS_ATOMIC_INCREMENT(&newChunkCount
);
385 METER_IF(p
, rt
->gcStats
.nchunks
++);
386 METER_UPDATE_MAX(rt
->gcStats
.maxnchunks
, rt
->gcStats
.nchunks
);
387 return reinterpret_cast<jsuword
>(p
);
391 ReleaseGCChunk(JSRuntime
*rt
, jsuword chunk
)
393 void *p
= reinterpret_cast<void *>(chunk
);
396 JS_ATOMIC_INCREMENT(&destroyChunkCount
);
398 JS_ASSERT(rt
->gcStats
.nchunks
!= 0);
399 METER(rt
->gcStats
.nchunks
--);
400 rt
->gcChunkAllocator
->free(p
);
404 AllocateGCChunk(JSRuntime
*rt
)
406 Chunk
*p
= (Chunk
*)rt
->gcChunkAllocator
->alloc();
409 JS_ATOMIC_INCREMENT(&newChunkCount
);
411 METER_IF(p
, rt
->gcStats
.nchunks
++);
416 ReleaseGCChunk(JSRuntime
*rt
, Chunk
*p
)
420 JS_ATOMIC_INCREMENT(&destroyChunkCount
);
422 JS_ASSERT(rt
->gcStats
.nchunks
!= 0);
423 METER(rt
->gcStats
.nchunks
--);
424 rt
->gcChunkAllocator
->free(p
);
428 PickChunk(JSRuntime
*rt
)
431 for (GCChunkSet::Range
r(rt
->gcChunkSet
.all()); !r
.empty(); r
.popFront()) {
432 if (r
.front()->hasAvailableArenas())
436 chunk
= AllocateGCChunk(rt
);
441 * FIXME bug 583732 - chunk is newly allocated and cannot be present in
442 * the table so using ordinary lookupForAdd is suboptimal here.
444 GCChunkSet::AddPtr p
= rt
->gcChunkSet
.lookupForAdd(chunk
);
446 if (!rt
->gcChunkSet
.add(p
, chunk
)) {
447 ReleaseGCChunk(rt
, chunk
);
457 ExpireGCChunks(JSRuntime
*rt
)
459 /* Remove unused chunks. */
462 for (GCChunkSet::Enum
e(rt
->gcChunkSet
); !e
.empty(); e
.popFront()) {
463 Chunk
*chunk
= e
.front();
464 JS_ASSERT(chunk
->info
.runtime
== rt
);
465 if (chunk
->expire()) {
467 ReleaseGCChunk(rt
, chunk
);
473 template <typename T
>
475 AllocateArena(JSContext
*cx
, unsigned thingKind
)
477 JSRuntime
*rt
= cx
->runtime
;
479 Chunk
*chunk
= cx
->compartment
->chunk
;
480 if (!chunk
|| !chunk
->hasAvailableArenas()) {
481 chunk
= PickChunk(rt
);
486 cx
->compartment
->chunk
= chunk
;
488 return chunk
->allocateArena
<T
>(cx
->compartment
, thingKind
);
492 IsAboutToBeFinalized(JSContext
*cx
, void *thing
)
494 if (JSString::isStatic(thing
))
498 JSCompartment
*thingCompartment
= reinterpret_cast<Cell
*>(thing
)->compartment();
499 JSRuntime
*rt
= cx
->runtime
;
500 JS_ASSERT(rt
== thingCompartment
->rt
);
501 if (rt
->gcCurrentCompartment
!= NULL
&& rt
->gcCurrentCompartment
!= thingCompartment
)
504 return !reinterpret_cast<Cell
*>(thing
)->isMarked();
508 js_GCThingIsMarked(void *thing
, uintN color
= BLACK
)
511 AssertValidColor(thing
, color
);
512 return reinterpret_cast<Cell
*>(thing
)->isMarked(color
);
516 * 1/8 life for JIT code. After this number of microseconds have passed, 1/8 of all
517 * JIT code is discarded in inactive compartments, regardless of how often that
520 static const int64 JIT_SCRIPT_EIGHTH_LIFETIME
= 120 * 1000 * 1000;
523 js_InitGC(JSRuntime
*rt
, uint32 maxbytes
)
526 * Make room for at least 16 chunks so the table would not grow before
527 * the browser starts up.
529 if (!rt
->gcChunkSet
.init(16))
532 if (!rt
->gcRootsHash
.init(256))
535 if (!rt
->gcLocksHash
.init(256))
539 rt
->gcLock
= JS_NEW_LOCK();
542 rt
->gcDone
= JS_NEW_CONDVAR(rt
->gcLock
);
545 rt
->requestDone
= JS_NEW_CONDVAR(rt
->gcLock
);
546 if (!rt
->requestDone
)
548 if (!rt
->gcHelperThread
.init(rt
))
553 * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
554 * for default backward API compatibility.
556 rt
->gcMaxBytes
= maxbytes
;
557 rt
->setGCMaxMallocBytes(maxbytes
);
559 rt
->gcEmptyArenaPoolLifespan
= 30000;
561 rt
->gcTriggerFactor
= uint32(100.0f
* GC_HEAP_GROWTH_FACTOR
);
563 rt
->atomsCompartment
->setGCLastBytes(8192);
566 * The assigned value prevents GC from running when GC memory is too low
567 * (during JS engine start).
569 rt
->setGCLastBytes(8192);
571 rt
->gcJitReleaseTime
= PRMJ_Now() + JIT_SCRIPT_EIGHTH_LIFETIME
;
573 METER(PodZero(&rt
->gcStats
));
579 template <typename T
>
580 static inline ConservativeGCTest
581 MarkCell(Cell
*cell
, JSTracer
*trc
)
583 return GetArena
<T
>(cell
)->mark((T
*)cell
, trc
);
587 * Returns CGCT_VALID or CGCT_VALIDWITHOFFSET and mark it if the w can be a
588 * live GC thing and sets thingKind accordingly. Otherwise returns the
589 * reason for rejection.
591 inline ConservativeGCTest
592 MarkIfGCThingWord(JSTracer
*trc
, jsuword w
, uint32
&thingKind
)
594 JSRuntime
*rt
= trc
->context
->runtime
;
596 * The conservative scanner may access words that valgrind considers as
597 * undefined. To avoid false positives and not to alter valgrind view of
598 * the memory we make as memcheck-defined the argument, a copy of the
599 * original word. See bug 572678.
602 VALGRIND_MAKE_MEM_DEFINED(&w
, sizeof(w
));
606 * We assume that the compiler never uses sub-word alignment to store
607 * pointers and does not tag pointers on its own. Additionally, the value
608 * representation for all values and the jsid representation for GC-things
609 * do not touch the low two bits. Thus any word with the low two bits set
610 * is not a valid GC-thing.
612 JS_STATIC_ASSERT(JSID_TYPE_STRING
== 0 && JSID_TYPE_OBJECT
== 4);
614 return CGCT_LOWBITSET
;
617 * An object jsid has its low bits tagged. In the value representation on
618 * 64-bit, the high bits are tagged.
620 const jsuword JSID_PAYLOAD_MASK
= ~jsuword(JSID_TYPE_MASK
);
621 #if JS_BITS_PER_WORD == 32
622 jsuword payload
= w
& JSID_PAYLOAD_MASK
;
623 #elif JS_BITS_PER_WORD == 64
624 jsuword payload
= w
& JSID_PAYLOAD_MASK
& JSVAL_PAYLOAD_MASK
;
627 Cell
*cell
= reinterpret_cast<Cell
*>(payload
);
628 Chunk
*chunk
= cell
->chunk();
630 if (!rt
->gcChunkSet
.has(chunk
))
631 return CGCT_NOTCHUNK
;
633 if (!chunk
->withinArenasRange(cell
))
634 return CGCT_NOTARENA
;
636 ArenaHeader
*aheader
= cell
->arena()->header();
638 if (!aheader
->isUsed
)
639 return CGCT_FREEARENA
;
641 ConservativeGCTest test
;
642 thingKind
= aheader
->thingKind
;
645 case FINALIZE_OBJECT0
:
646 test
= MarkCell
<JSObject
>(cell
, trc
);
648 case FINALIZE_OBJECT2
:
649 test
= MarkCell
<JSObject_Slots2
>(cell
, trc
);
651 case FINALIZE_OBJECT4
:
652 test
= MarkCell
<JSObject_Slots4
>(cell
, trc
);
654 case FINALIZE_OBJECT8
:
655 test
= MarkCell
<JSObject_Slots8
>(cell
, trc
);
657 case FINALIZE_OBJECT12
:
658 test
= MarkCell
<JSObject_Slots12
>(cell
, trc
);
660 case FINALIZE_OBJECT16
:
661 test
= MarkCell
<JSObject_Slots16
>(cell
, trc
);
663 case FINALIZE_STRING
:
664 test
= MarkCell
<JSString
>(cell
, trc
);
666 case FINALIZE_EXTERNAL_STRING
:
667 test
= MarkCell
<JSExternalString
>(cell
, trc
);
669 case FINALIZE_SHORT_STRING
:
670 test
= MarkCell
<JSShortString
>(cell
, trc
);
672 case FINALIZE_FUNCTION
:
673 test
= MarkCell
<JSFunction
>(cell
, trc
);
675 #if JS_HAS_XML_SUPPORT
677 test
= MarkCell
<JSXML
>(cell
, trc
);
681 test
= CGCT_WRONGTAG
;
682 JS_NOT_REACHED("wrong tag");
688 inline ConservativeGCTest
689 MarkIfGCThingWord(JSTracer
*trc
, jsuword w
)
692 return MarkIfGCThingWord(trc
, w
, thingKind
);
696 MarkWordConservatively(JSTracer
*trc
, jsuword w
)
699 * The conservative scanner may access words that valgrind considers as
700 * undefined. To avoid false positives and not to alter valgrind view of
701 * the memory we make as memcheck-defined the argument, a copy of the
702 * original word. See bug 572678.
705 VALGRIND_MAKE_MEM_DEFINED(&w
, sizeof(w
));
709 #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
710 ConservativeGCTest test
=
712 MarkIfGCThingWord(trc
, w
, thingKind
);
714 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
715 if (test
== CGCT_VALID
|| test
== CGCT_VALIDWITHOFFSET
) {
716 if (IS_GC_MARKING_TRACER(trc
) && static_cast<GCMarker
*>(trc
)->conservativeDumpFileName
) {
717 const jsuword JSID_PAYLOAD_MASK
= ~jsuword(JSID_TYPE_MASK
);
718 #if JS_BITS_PER_WORD == 32
719 jsuword payload
= w
& JSID_PAYLOAD_MASK
;
720 #elif JS_BITS_PER_WORD == 64
721 jsuword payload
= w
& JSID_PAYLOAD_MASK
& JSVAL_PAYLOAD_MASK
;
723 void *thing
= (test
== CGCT_VALIDWITHOFFSET
)
724 ? GetAlignedThing((void *)payload
, thingKind
)
726 GCMarker::ConservativeRoot root
= {thing
, thingKind
};
727 static_cast<GCMarker
*>(trc
)->conservativeRoots
.append(root
);
732 #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
733 if (IS_GC_MARKING_TRACER(trc
))
734 static_cast<GCMarker
*>(trc
)->conservativeStats
.counter
[test
]++;
739 MarkRangeConservatively(JSTracer
*trc
, const jsuword
*begin
, const jsuword
*end
)
741 JS_ASSERT(begin
<= end
);
742 for (const jsuword
*i
= begin
; i
!= end
; ++i
)
743 MarkWordConservatively(trc
, *i
);
747 MarkThreadDataConservatively(JSTracer
*trc
, JSThreadData
*td
)
749 ConservativeGCThreadData
*ctd
= &td
->conservativeGC
;
750 JS_ASSERT(ctd
->hasStackToScan());
751 jsuword
*stackMin
, *stackEnd
;
752 #if JS_STACK_GROWTH_DIRECTION > 0
753 stackMin
= td
->nativeStackBase
;
754 stackEnd
= ctd
->nativeStackTop
;
756 stackMin
= ctd
->nativeStackTop
+ 1;
757 stackEnd
= td
->nativeStackBase
;
759 JS_ASSERT(stackMin
<= stackEnd
);
760 MarkRangeConservatively(trc
, stackMin
, stackEnd
);
761 MarkRangeConservatively(trc
, ctd
->registerSnapshot
.words
,
762 JS_ARRAY_END(ctd
->registerSnapshot
.words
));
767 MarkStackRangeConservatively(JSTracer
*trc
, Value
*beginv
, Value
*endv
)
769 const jsuword
*begin
= beginv
->payloadWord();
770 const jsuword
*end
= endv
->payloadWord();;
773 * With 64-bit jsvals on 32-bit systems, we can optimize a bit by
774 * scanning only the payloads.
776 JS_ASSERT(begin
<= end
);
777 for (const jsuword
*i
= begin
; i
!= end
; i
+= sizeof(Value
)/sizeof(jsuword
))
778 MarkWordConservatively(trc
, *i
);
780 MarkRangeConservatively(trc
, begin
, end
);
785 MarkConservativeStackRoots(JSTracer
*trc
)
788 for (JSThread::Map::Range r
= trc
->context
->runtime
->threads
.all(); !r
.empty(); r
.popFront()) {
789 JSThread
*thread
= r
.front().value
;
790 ConservativeGCThreadData
*ctd
= &thread
->data
.conservativeGC
;
791 if (ctd
->hasStackToScan()) {
792 JS_ASSERT_IF(!thread
->data
.requestDepth
, thread
->suspendCount
);
793 MarkThreadDataConservatively(trc
, &thread
->data
);
795 JS_ASSERT(!thread
->suspendCount
);
796 JS_ASSERT(thread
->data
.requestDepth
<= ctd
->requestThreshold
);
800 MarkThreadDataConservatively(trc
, &trc
->context
->runtime
->threadData
);
805 ConservativeGCThreadData::recordStackTop()
807 /* Update the native stack pointer if it points to a bigger stack. */
809 nativeStackTop
= &dummy
;
812 * To record and update the register snapshot for the conservative
813 * scanning with the latest values we use setjmp.
815 #if defined(_MSC_VER)
816 # pragma warning(push)
817 # pragma warning(disable: 4611)
819 (void) setjmp(registerSnapshot
.jmpbuf
);
820 #if defined(_MSC_VER)
821 # pragma warning(pop)
826 RecordNativeStackTopForGC(JSContext
*cx
)
828 ConservativeGCThreadData
*ctd
= &JS_THREAD_DATA(cx
)->conservativeGC
;
831 /* Record the stack top here only if we are called from a request. */
832 JS_ASSERT(cx
->thread
->data
.requestDepth
>= ctd
->requestThreshold
);
833 if (cx
->thread
->data
.requestDepth
== ctd
->requestThreshold
)
836 ctd
->recordStackTop();
843 CheckLeakedRoots(JSRuntime
*rt
);
847 js_FinishGC(JSRuntime
*rt
)
850 JS_DumpArenaStats(stdout
);
853 if (JS_WANT_GC_METER_PRINT
)
854 js_DumpGCStats(rt
, stdout
);
857 /* Delete all remaining Compartments. */
858 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
) {
859 JSCompartment
*comp
= *c
;
860 comp
->finishArenaLists();
863 rt
->compartments
.clear();
864 rt
->atomsCompartment
= NULL
;
866 for (GCChunkSet::Range
r(rt
->gcChunkSet
.all()); !r
.empty(); r
.popFront())
867 ReleaseGCChunk(rt
, r
.front());
868 rt
->gcChunkSet
.clear();
871 rt
->gcHelperThread
.finish(rt
);
875 if (!rt
->gcRootsHash
.empty())
876 CheckLeakedRoots(rt
);
878 rt
->gcRootsHash
.clear();
879 rt
->gcLocksHash
.clear();
883 js_AddRoot(JSContext
*cx
, Value
*vp
, const char *name
)
885 JSBool ok
= js_AddRootRT(cx
->runtime
, Jsvalify(vp
), name
);
887 JS_ReportOutOfMemory(cx
);
892 js_AddGCThingRoot(JSContext
*cx
, void **rp
, const char *name
)
894 JSBool ok
= js_AddGCThingRootRT(cx
->runtime
, rp
, name
);
896 JS_ReportOutOfMemory(cx
);
900 JS_FRIEND_API(JSBool
)
901 js_AddRootRT(JSRuntime
*rt
, jsval
*vp
, const char *name
)
904 * Due to the long-standing, but now removed, use of rt->gcLock across the
905 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
906 * properly with a racing GC, without calling JS_AddRoot from a request.
907 * We have to preserve API compatibility here, now that we avoid holding
908 * rt->gcLock across the mark phase (including the root hashtable mark).
913 return !!rt
->gcRootsHash
.put((void *)vp
,
914 RootInfo(name
, JS_GC_ROOT_VALUE_PTR
));
917 JS_FRIEND_API(JSBool
)
918 js_AddGCThingRootRT(JSRuntime
*rt
, void **rp
, const char *name
)
921 * Due to the long-standing, but now removed, use of rt->gcLock across the
922 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
923 * properly with a racing GC, without calling JS_AddRoot from a request.
924 * We have to preserve API compatibility here, now that we avoid holding
925 * rt->gcLock across the mark phase (including the root hashtable mark).
930 return !!rt
->gcRootsHash
.put((void *)rp
,
931 RootInfo(name
, JS_GC_ROOT_GCTHING_PTR
));
934 JS_FRIEND_API(JSBool
)
935 js_RemoveRoot(JSRuntime
*rt
, void *rp
)
938 * Due to the JS_RemoveRootRT API, we may be called outside of a request.
939 * Same synchronization drill as above in js_AddRoot.
943 rt
->gcRootsHash
.remove(rp
);
944 rt
->gcPoke
= JS_TRUE
;
948 typedef RootedValueMap::Range RootRange
;
949 typedef RootedValueMap::Entry RootEntry
;
950 typedef RootedValueMap::Enum RootEnum
;
955 CheckLeakedRoots(JSRuntime
*rt
)
957 uint32 leakedroots
= 0;
959 /* Warn (but don't assert) debug builds of any remaining roots. */
960 for (RootRange r
= rt
->gcRootsHash
.all(); !r
.empty(); r
.popFront()) {
961 RootEntry
&entry
= r
.front();
964 "JS engine warning: leaking GC root \'%s\' at %p\n",
965 entry
.value
.name
? entry
.value
.name
: "", entry
.key
);
968 if (leakedroots
> 0) {
969 if (leakedroots
== 1) {
971 "JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
972 " This root may point to freed memory. Objects reachable\n"
973 " through it have not been finalized.\n",
977 "JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
978 " These roots may point to freed memory. Objects reachable\n"
979 " through them have not been finalized.\n",
980 (unsigned long) leakedroots
, (void *) rt
);
986 js_DumpNamedRoots(JSRuntime
*rt
,
987 void (*dump
)(const char *name
, void *rp
, JSGCRootType type
, void *data
),
990 for (RootRange r
= rt
->gcRootsHash
.all(); !r
.empty(); r
.popFront()) {
991 RootEntry
&entry
= r
.front();
992 if (const char *name
= entry
.value
.name
)
993 dump(name
, entry
.key
, entry
.value
.type
, data
);
1000 js_MapGCRoots(JSRuntime
*rt
, JSGCRootMapFun map
, void *data
)
1002 AutoLockGC
lock(rt
);
1004 for (RootEnum
e(rt
->gcRootsHash
); !e
.empty(); e
.popFront()) {
1005 RootEntry
&entry
= e
.front();
1008 intN mapflags
= map(entry
.key
, entry
.value
.type
, entry
.value
.name
, data
);
1010 if (mapflags
& JS_MAP_GCROOT_REMOVE
)
1012 if (mapflags
& JS_MAP_GCROOT_STOP
)
1020 JSRuntime::setGCTriggerFactor(uint32 factor
)
1022 JS_ASSERT(factor
>= 100);
1024 gcTriggerFactor
= factor
;
1025 setGCLastBytes(gcLastBytes
);
1027 for (JSCompartment
**c
= compartments
.begin(); c
!= compartments
.end(); ++c
) {
1028 (*c
)->setGCLastBytes(gcLastBytes
);
1033 JSRuntime::setGCLastBytes(size_t lastBytes
)
1035 gcLastBytes
= lastBytes
;
1037 /* FIXME bug 603916 - we should unify the triggers here. */
1038 float trigger1
= float(lastBytes
) * float(gcTriggerFactor
) / 100.0f
;
1039 float trigger2
= float(Max(lastBytes
, GC_ARENA_ALLOCATION_TRIGGER
)) *
1040 GC_HEAP_GROWTH_FACTOR
;
1041 float maxtrigger
= Max(trigger1
, trigger2
);
1042 gcTriggerBytes
= (float(gcMaxBytes
) < maxtrigger
) ? gcMaxBytes
: size_t(maxtrigger
);
1046 JSCompartment::setGCLastBytes(size_t lastBytes
)
1048 gcLastBytes
= lastBytes
;
1050 /* FIXME bug 603916 - we should unify the triggers here. */
1051 float trigger1
= float(lastBytes
) * float(rt
->gcTriggerFactor
) / 100.0f
;
1052 float trigger2
= float(Max(lastBytes
, GC_ARENA_ALLOCATION_TRIGGER
)) *
1053 GC_HEAP_GROWTH_FACTOR
;
1054 float maxtrigger
= Max(trigger1
, trigger2
);
1055 gcTriggerBytes
= (float(rt
->gcMaxBytes
) < maxtrigger
) ? rt
->gcMaxBytes
: size_t(maxtrigger
);
1062 * Return the free list back to the arena so the GC finalization will not
1063 * run the finalizers over unitialized bytes from free things.
1065 for (FreeCell
***p
= finalizables
; p
!= JS_ARRAY_END(finalizables
); ++p
)
1069 class JSShortString
;
1072 GetFinalizableArenaList(JSCompartment
*c
, unsigned thingKind
) {
1073 JS_ASSERT(thingKind
< FINALIZE_LIMIT
);
1074 return &c
->arenas
[thingKind
];
1079 CheckAllocation(JSContext
*cx
)
1081 #ifdef JS_THREADSAFE
1082 JS_ASSERT(cx
->thread
);
1084 JS_ASSERT(!cx
->runtime
->gcRunning
);
1090 NeedLastDitchGC(JSContext
*cx
)
1092 JSRuntime
*rt
= cx
->runtime
;
1094 if (rt
->gcZeal
>= 1)
1097 return rt
->gcIsNeeded
;
1101 * Return false only if the GC run but could not bring its memory usage under
1102 * JSRuntime::gcMaxBytes.
1105 RunLastDitchGC(JSContext
*cx
)
1107 JSRuntime
*rt
= cx
->runtime
;
1108 METER(rt
->gcStats
.lastditch
++);
1109 #ifdef JS_THREADSAFE
1110 Conditionally
<AutoUnlockAtomsCompartment
>
1111 unlockAtomsCompartmenIf(cx
->compartment
== rt
->atomsCompartment
&&
1112 rt
->atomsCompartmentIsLocked
, cx
);
1114 /* The last ditch GC preserves all atoms. */
1115 AutoKeepAtoms
keep(rt
);
1116 js_GC(cx
, rt
->gcTriggerCompartment
, GC_NORMAL
);
1118 return rt
->gcBytes
< rt
->gcMaxBytes
;
1121 template <typename T
>
1123 RefillTypedFreeList(JSContext
*cx
, unsigned thingKind
)
1125 JSCompartment
*compartment
= cx
->compartment
;
1126 JS_ASSERT_IF(compartment
->freeLists
.finalizables
[thingKind
],
1127 !*compartment
->freeLists
.finalizables
[thingKind
]);
1129 JS_ASSERT(!cx
->runtime
->gcRunning
);
1130 if (cx
->runtime
->gcRunning
)
1133 bool canGC
= !JS_ON_TRACE(cx
) && !JS_THREAD_DATA(cx
)->waiveGCQuota
;
1135 if (canGC
&& JS_UNLIKELY(NeedLastDitchGC(cx
))) {
1136 if (!RunLastDitchGC(cx
))
1140 * The JSGC_END callback can legitimately allocate new GC
1141 * things and populate the free list. If that happens, just
1142 * return that list head.
1144 if (compartment
->freeLists
.finalizables
[thingKind
])
1149 ArenaList
*arenaList
= GetFinalizableArenaList(compartment
, thingKind
);
1150 Arena
<T
> *a
= reinterpret_cast<Arena
<T
> *>(arenaList
->getNextWithFreeList());
1152 JS_ASSERT(a
->header()->freeList
);
1153 JS_ASSERT(sizeof(T
) == a
->header()->thingSize
);
1154 compartment
->freeLists
.populate(a
, thingKind
);
1159 * If the allocation fails rt->gcIsNeeded will be set and we will run
1160 * the GC on the next loop iteration if the last ditch GC is allowed.
1162 a
= AllocateArena
<T
>(cx
, thingKind
);
1164 compartment
->freeLists
.populate(a
, thingKind
);
1165 arenaList
->insert((Arena
<FreeCell
> *) a
);
1166 a
->getMarkingDelay()->init();
1171 METER(cx
->runtime
->gcStats
.fail
++);
1172 js_ReportOutOfMemory(cx
);
1177 RefillFinalizableFreeList(JSContext
*cx
, unsigned thingKind
)
1179 switch (thingKind
) {
1180 case FINALIZE_OBJECT0
:
1181 return RefillTypedFreeList
<JSObject
>(cx
, thingKind
);
1182 case FINALIZE_OBJECT2
:
1183 return RefillTypedFreeList
<JSObject_Slots2
>(cx
, thingKind
);
1184 case FINALIZE_OBJECT4
:
1185 return RefillTypedFreeList
<JSObject_Slots4
>(cx
, thingKind
);
1186 case FINALIZE_OBJECT8
:
1187 return RefillTypedFreeList
<JSObject_Slots8
>(cx
, thingKind
);
1188 case FINALIZE_OBJECT12
:
1189 return RefillTypedFreeList
<JSObject_Slots12
>(cx
, thingKind
);
1190 case FINALIZE_OBJECT16
:
1191 return RefillTypedFreeList
<JSObject_Slots16
>(cx
, thingKind
);
1192 case FINALIZE_STRING
:
1193 return RefillTypedFreeList
<JSString
>(cx
, thingKind
);
1194 case FINALIZE_EXTERNAL_STRING
:
1195 return RefillTypedFreeList
<JSExternalString
>(cx
, thingKind
);
1196 case FINALIZE_SHORT_STRING
:
1197 return RefillTypedFreeList
<JSShortString
>(cx
, thingKind
);
1198 case FINALIZE_FUNCTION
:
1199 return RefillTypedFreeList
<JSFunction
>(cx
, thingKind
);
1200 #if JS_HAS_XML_SUPPORT
1202 return RefillTypedFreeList
<JSXML
>(cx
, thingKind
);
1205 JS_NOT_REACHED("bad finalize kind");
1211 js_GetExternalStringGCType(JSString
*str
) {
1212 return GetExternalStringGCType((JSExternalString
*)str
);
1216 js_GetGCThingTraceKind(void *thing
) {
1217 return GetGCThingTraceKind(thing
);
1221 js_LockGCThingRT(JSRuntime
*rt
, void *thing
)
1227 locks
= &rt
->gcLocksHash
;
1228 AutoLockGC
lock(rt
);
1229 GCLocks::AddPtr p
= locks
->lookupForAdd(thing
);
1232 if (!locks
->add(p
, thing
, 1))
1235 JS_ASSERT(p
->value
>= 1);
1239 METER(rt
->gcStats
.lock
++);
1244 js_UnlockGCThingRT(JSRuntime
*rt
, void *thing
)
1249 AutoLockGC
lock(rt
);
1250 GCLocks::Ptr p
= rt
->gcLocksHash
.lookup(thing
);
1254 if (--p
->value
== 0)
1255 rt
->gcLocksHash
.remove(p
);
1257 METER(rt
->gcStats
.unlock
++);
1262 JS_TraceChildren(JSTracer
*trc
, void *thing
, uint32 kind
)
1265 case JSTRACE_OBJECT
: {
1266 MarkChildren(trc
, (JSObject
*)thing
);
1270 case JSTRACE_STRING
: {
1271 MarkChildren(trc
, (JSString
*)thing
);
1275 #if JS_HAS_XML_SUPPORT
1277 MarkChildren(trc
, (JSXML
*)thing
);
1286 * When the native stack is low, the GC does not call JS_TraceChildren to mark
1287 * the reachable "children" of the thing. Rather the thing is put aside and
1288 * JS_TraceChildren is called later with more space on the C stack.
1290 * To implement such delayed marking of the children with minimal overhead for
1291 * the normal case of sufficient native stack, the code adds a field per
1292 * arena. The field marlingdelay->link links all arenas with delayed things
1293 * into a stack list with the pointer to stack top in
1294 * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
1295 * arenas to the stack as necessary while markDelayedChildren pops the arenas
1296 * from the stack until it empties.
1299 GCMarker::GCMarker(JSContext
*cx
)
1300 : color(0), stackLimit(0), unmarkedArenaStackTop(NULL
)
1302 JS_TRACER_INIT(this, cx
, NULL
);
1306 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1307 conservativeDumpFileName
= getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
1308 memset(&conservativeStats
, 0, sizeof(conservativeStats
));
1312 GCMarker::~GCMarker()
1314 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1315 dumpConservativeRoots();
1318 /* Update total stats. */
1319 context
->runtime
->gcStats
.conservative
.add(conservativeStats
);
1324 GCMarker::delayMarkingChildren(void *thing
)
1326 Cell
*cell
= reinterpret_cast<Cell
*>(thing
);
1327 Arena
<Cell
> *a
= cell
->arena();
1328 JS_ASSERT(cell
->isMarked());
1329 METER(cell
->compartment()->rt
->gcStats
.unmarked
++);
1330 MarkingDelay
*markingDelay
= a
->getMarkingDelay();
1332 if (markingDelay
->link
) {
1333 if (markingDelay
->start
> (jsuword
)cell
)
1334 markingDelay
->start
= (jsuword
)cell
;
1335 /* Arena already scheduled to be marked again */
1338 markingDelay
->start
= (jsuword
)cell
;
1339 Arena
<Cell
> *tos
= unmarkedArenaStackTop
;
1340 markingDelay
->link
= tos
? tos
: a
;
1341 unmarkedArenaStackTop
= a
;
1343 JSCompartment
*comp
= cell
->compartment();
1344 markLaterCount
+= Arena
<FreeCell
>::ThingsPerArena
;
1345 METER_UPDATE_MAX(comp
->rt
->gcStats
.maxunmarked
, markLaterCount
);
1349 template<typename T
>
1351 Arena
<T
>::markDelayedChildren(JSTracer
*trc
)
1353 T
* thing
= (T
*)getMarkingDelay()->start
;
1354 T
*thingsEnd
= &t
.things
[ThingsPerArena
-1].t
;
1355 JS_ASSERT(thing
== getAlignedThing(thing
));
1356 while (thing
<= thingsEnd
) {
1357 if (thing
->asCell()->isMarked())
1358 MarkChildren(trc
, thing
);
1365 GCMarker::markDelayedChildren()
1367 while (Arena
<Cell
> *a
= unmarkedArenaStackTop
) {
1369 * markingDelay->link == current arena indicates last arena on stack.
1370 * If marking gets delayed at the same arena again, the arena is pushed
1371 * again in delayMarkingChildren. markingDelay->link has to be cleared,
1372 * otherwise the arena is not pushed again.
1374 MarkingDelay
*markingDelay
= a
->getMarkingDelay();
1375 unmarkedArenaStackTop
= (markingDelay
->link
!= a
)
1376 ? markingDelay
->link
1378 markingDelay
->link
= NULL
;
1380 markLaterCount
-= Arena
<FreeCell
>::ThingsPerArena
;
1383 switch (a
->header()->thingKind
) {
1384 case FINALIZE_OBJECT0
:
1385 reinterpret_cast<Arena
<JSObject
> *>(a
)->markDelayedChildren(this);
1387 case FINALIZE_OBJECT2
:
1388 reinterpret_cast<Arena
<JSObject_Slots2
> *>(a
)->markDelayedChildren(this);
1390 case FINALIZE_OBJECT4
:
1391 reinterpret_cast<Arena
<JSObject_Slots4
> *>(a
)->markDelayedChildren(this);
1393 case FINALIZE_OBJECT8
:
1394 reinterpret_cast<Arena
<JSObject_Slots8
> *>(a
)->markDelayedChildren(this);
1396 case FINALIZE_OBJECT12
:
1397 reinterpret_cast<Arena
<JSObject_Slots12
> *>(a
)->markDelayedChildren(this);
1399 case FINALIZE_OBJECT16
:
1400 reinterpret_cast<Arena
<JSObject_Slots16
> *>(a
)->markDelayedChildren(this);
1402 case FINALIZE_STRING
:
1403 reinterpret_cast<Arena
<JSString
> *>(a
)->markDelayedChildren(this);
1405 case FINALIZE_EXTERNAL_STRING
:
1406 reinterpret_cast<Arena
<JSExternalString
> *>(a
)->markDelayedChildren(this);
1408 case FINALIZE_SHORT_STRING
:
1411 case FINALIZE_FUNCTION
:
1412 reinterpret_cast<Arena
<JSFunction
> *>(a
)->markDelayedChildren(this);
1414 #if JS_HAS_XML_SUPPORT
1416 reinterpret_cast<Arena
<JSXML
> *>(a
)->markDelayedChildren(this);
1420 JS_NOT_REACHED("wrong thingkind");
1423 JS_ASSERT(markLaterCount
== 0);
1424 JS_ASSERT(!unmarkedArenaStackTop
);
1427 } /* namespace js */
1430 gc_root_traversal(JSTracer
*trc
, const RootEntry
&entry
)
1434 if (entry
.value
.type
== JS_GC_ROOT_GCTHING_PTR
) {
1435 ptr
= *reinterpret_cast<void **>(entry
.key
);
1437 Value
*vp
= reinterpret_cast<Value
*>(entry
.key
);
1438 ptr
= vp
->isGCThing() ? vp
->toGCThing() : NULL
;
1442 if (!JSString::isStatic(ptr
)) {
1443 bool root_points_to_gcArenaList
= false;
1444 JSCompartment
**c
= trc
->context
->runtime
->compartments
.begin();
1445 for (; c
!= trc
->context
->runtime
->compartments
.end(); ++c
) {
1446 JSCompartment
*comp
= *c
;
1447 if (checkArenaListsForThing(comp
, ptr
)) {
1448 root_points_to_gcArenaList
= true;
1452 if (!root_points_to_gcArenaList
&& entry
.value
.name
) {
1454 "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
1455 "invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
1456 "The root's name is \"%s\".\n",
1459 JS_ASSERT(root_points_to_gcArenaList
);
1463 JS_SET_TRACING_NAME(trc
, entry
.value
.name
? entry
.value
.name
: "root");
1464 if (entry
.value
.type
== JS_GC_ROOT_GCTHING_PTR
)
1465 MarkGCThing(trc
, *reinterpret_cast<void **>(entry
.key
));
1467 MarkValueRaw(trc
, *reinterpret_cast<Value
*>(entry
.key
));
1471 gc_lock_traversal(const GCLocks::Entry
&entry
, JSTracer
*trc
)
1473 JS_ASSERT(entry
.value
>= 1);
1474 MarkGCThing(trc
, entry
.key
, "locked object");
1478 js_TraceStackFrame(JSTracer
*trc
, JSStackFrame
*fp
)
1480 MarkObject(trc
, fp
->scopeChain(), "scope chain");
1481 if (fp
->isDummyFrame())
1484 if (fp
->hasCallObj())
1485 MarkObject(trc
, fp
->callObj(), "call");
1486 if (fp
->hasArgsObj())
1487 MarkObject(trc
, fp
->argsObj(), "arguments");
1488 if (fp
->isScriptFrame()) {
1489 js_TraceScript(trc
, fp
->script());
1490 fp
->script()->compartment
->active
= true;
1493 MarkValue(trc
, fp
->returnValue(), "rval");
1497 AutoIdArray::trace(JSTracer
*trc
)
1499 JS_ASSERT(tag
== IDARRAY
);
1500 gc::MarkIdRange(trc
, idArray
->length
, idArray
->vector
, "JSAutoIdArray.idArray");
1504 AutoEnumStateRooter::trace(JSTracer
*trc
)
1506 js::gc::MarkObject(trc
, *obj
, "js::AutoEnumStateRooter.obj");
1510 AutoGCRooter::trace(JSTracer
*trc
)
1514 MarkValue(trc
, static_cast<AutoValueRooter
*>(this)->val
, "js::AutoValueRooter.val");
1518 static_cast<AutoShapeRooter
*>(this)->shape
->trace(trc
);
1522 static_cast<Parser
*>(this)->trace(trc
);
1526 if (JSScript
*script
= static_cast<AutoScriptRooter
*>(this)->script
)
1527 js_TraceScript(trc
, script
);
1531 static_cast<AutoEnumStateRooter
*>(this)->trace(trc
);
1535 JSIdArray
*ida
= static_cast<AutoIdArray
*>(this)->idArray
;
1536 MarkIdRange(trc
, ida
->length
, ida
->vector
, "js::AutoIdArray.idArray");
1541 PropDescArray
&descriptors
=
1542 static_cast<AutoPropDescArrayRooter
*>(this)->descriptors
;
1543 for (size_t i
= 0, len
= descriptors
.length(); i
< len
; i
++) {
1544 PropDesc
&desc
= descriptors
[i
];
1545 MarkValue(trc
, desc
.pd
, "PropDesc::pd");
1546 MarkValue(trc
, desc
.value
, "PropDesc::value");
1547 MarkValue(trc
, desc
.get
, "PropDesc::get");
1548 MarkValue(trc
, desc
.set
, "PropDesc::set");
1549 MarkId(trc
, desc
.id
, "PropDesc::id");
1555 PropertyDescriptor
&desc
= *static_cast<AutoPropertyDescriptorRooter
*>(this);
1557 MarkObject(trc
, *desc
.obj
, "Descriptor::obj");
1558 MarkValue(trc
, desc
.value
, "Descriptor::value");
1559 if ((desc
.attrs
& JSPROP_GETTER
) && desc
.getter
)
1560 MarkObject(trc
, *CastAsObject(desc
.getter
), "Descriptor::get");
1561 if (desc
.attrs
& JSPROP_SETTER
&& desc
.setter
)
1562 MarkObject(trc
, *CastAsObject(desc
.setter
), "Descriptor::set");
1567 JSXMLArray
&array
= static_cast<AutoNamespaceArray
*>(this)->array
;
1568 MarkObjectRange(trc
, array
.length
, reinterpret_cast<JSObject
**>(array
.vector
),
1569 "JSXMLArray.vector");
1570 array
.cursors
->trace(trc
);
1575 js_TraceXML(trc
, static_cast<AutoXMLRooter
*>(this)->xml
);
1579 if (JSObject
*obj
= static_cast<AutoObjectRooter
*>(this)->obj
)
1580 MarkObject(trc
, *obj
, "js::AutoObjectRooter.obj");
1584 MarkId(trc
, static_cast<AutoIdRooter
*>(this)->id_
, "js::AutoIdRooter.val");
1588 Vector
<Value
, 8> &vector
= static_cast<js::AutoValueVector
*>(this)->vector
;
1589 MarkValueRange(trc
, vector
.length(), vector
.begin(), "js::AutoValueVector.vector");
1594 if (JSString
*str
= static_cast<js::AutoStringRooter
*>(this)->str
)
1595 MarkString(trc
, str
, "js::AutoStringRooter.str");
1599 Vector
<jsid
, 8> &vector
= static_cast<js::AutoIdVector
*>(this)->vector
;
1600 MarkIdRange(trc
, vector
.length(), vector
.begin(), "js::AutoIdVector.vector");
1605 Vector
<const Shape
*, 8> &vector
= static_cast<js::AutoShapeVector
*>(this)->vector
;
1606 MarkShapeRange(trc
, vector
.length(), vector
.begin(), "js::AutoShapeVector.vector");
1611 static_cast<js::AutoBindingsRooter
*>(this)->bindings
.trace(trc
);
1616 JS_ASSERT(tag
>= 0);
1617 MarkValueRange(trc
, tag
, static_cast<AutoArrayRooter
*>(this)->array
, "js::AutoArrayRooter.array");
1623 MarkContext(JSTracer
*trc
, JSContext
*acx
)
1625 /* Stack frames and slots are traced by StackSpace::mark. */
1627 /* Mark other roots-by-definition in acx. */
1628 if (acx
->globalObject
&& !acx
->hasRunOption(JSOPTION_UNROOTED_GLOBAL
))
1629 MarkObject(trc
, *acx
->globalObject
, "global object");
1630 if (acx
->isExceptionPending())
1631 MarkValue(trc
, acx
->getPendingException(), "exception");
1633 for (js::AutoGCRooter
*gcr
= acx
->autoGCRooters
; gcr
; gcr
= gcr
->down
)
1636 if (acx
->sharpObjectMap
.depth
> 0)
1637 js_TraceSharpMap(trc
, &acx
->sharpObjectMap
);
1639 MarkValue(trc
, acx
->iterValue
, "iterValue");
1641 if (acx
->compartment
)
1642 acx
->compartment
->mark(trc
);
1645 JS_REQUIRES_STACK
void
1646 MarkRuntime(JSTracer
*trc
)
1648 JSRuntime
*rt
= trc
->context
->runtime
;
1650 if (rt
->state
!= JSRTS_LANDING
)
1651 MarkConservativeStackRoots(trc
);
1654 * Verify that we do not have at this point unmarked GC things stored in
1655 * autorooters. To maximize test coverage we abort even in non-debug
1656 * builds for now, see bug 574313.
1661 while (JSContext
*acx
= js_ContextIterator(rt
, JS_TRUE
, &iter
)) {
1662 for (AutoGCRooter
*gcr
= acx
->autoGCRooters
; gcr
; gcr
= gcr
->down
) {
1663 #ifdef JS_THREADSAFE
1664 JS_ASSERT_IF(!acx
->thread
->data
.requestDepth
, acx
->thread
->suspendCount
);
1666 JS_ASSERT(JS_THREAD_DATA(acx
)->conservativeGC
.hasStackToScan());
1671 case AutoGCRooter::JSVAL
: {
1672 const Value
&v
= static_cast<AutoValueRooter
*>(gcr
)->val
;
1673 if (!v
.isMarkable())
1675 thing
= v
.toGCThing();
1678 case AutoGCRooter::XML
:
1679 thing
= static_cast<AutoXMLRooter
*>(gcr
)->xml
;
1681 case AutoGCRooter::OBJECT
:
1682 thing
= static_cast<AutoObjectRooter
*>(gcr
)->obj
;
1686 case AutoGCRooter::ID
: {
1687 jsid id
= static_cast<AutoIdRooter
*>(gcr
)->id();
1688 if (!JSID_IS_GCTHING(id
))
1690 thing
= JSID_TO_GCTHING(id
);
1695 if (JSString::isStatic(thing
))
1698 if (!reinterpret_cast<Cell
*>(thing
)->isMarked()) {
1699 ConservativeGCTest test
= MarkIfGCThingWord(trc
, reinterpret_cast<jsuword
>(thing
));
1701 "Conservative GC scanner has missed the root 0x%p with tag %ld"
1702 " on the stack due to %d. The root location 0x%p, distance from"
1703 " the stack base %ld, conservative gc span %ld."
1704 " Consevtaive GC status for the thread %d."
1706 thing
, (long) gcr
->tag
, int(test
), (void *) gcr
,
1707 (long) ((jsword
) JS_THREAD_DATA(acx
)->nativeStackBase
- (jsword
) gcr
),
1708 (long) ((jsword
) JS_THREAD_DATA(acx
)->nativeStackBase
-
1709 (jsword
) JS_THREAD_DATA(acx
)->conservativeGC
.nativeStackTop
),
1710 int(JS_THREAD_DATA(acx
)->conservativeGC
.hasStackToScan()));
1718 for (RootRange r
= rt
->gcRootsHash
.all(); !r
.empty(); r
.popFront())
1719 gc_root_traversal(trc
, r
.front());
1721 for (GCLocks::Range r
= rt
->gcLocksHash
.all(); !r
.empty(); r
.popFront())
1722 gc_lock_traversal(r
.front(), trc
);
1724 js_TraceAtomState(trc
);
1728 while (JSContext
*acx
= js_ContextIterator(rt
, JS_TRUE
, &iter
))
1729 MarkContext(trc
, acx
);
1731 rt
->atomsCompartment
->mark(trc
);
1734 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
1735 (*c
)->traceMonitor
.mark(trc
);
1738 for (ThreadDataIter
i(rt
); !i
.empty(); i
.popFront())
1739 i
.threadData()->mark(trc
);
1742 * We mark extra roots at the last thing so it can use use additional
1743 * colors to implement cycle collection.
1745 if (rt
->gcExtraRootsTraceOp
)
1746 rt
->gcExtraRootsTraceOp(trc
, rt
->gcExtraRootsData
);
1749 if (rt
->functionMeterFilename
) {
1750 for (int k
= 0; k
< 2; k
++) {
1751 typedef JSRuntime::FunctionCountMap HM
;
1752 HM
&h
= (k
== 0) ? rt
->methodReadBarrierCountMap
: rt
->unjoinedFunctionCountMap
;
1753 for (HM::Range r
= h
.all(); !r
.empty(); r
.popFront()) {
1754 JSFunction
*fun
= r
.front().key
;
1755 JS_CALL_OBJECT_TRACER(trc
, fun
, "FunctionCountMap key");
1763 TriggerGC(JSRuntime
*rt
)
1765 JS_ASSERT(!rt
->gcRunning
);
1770 * Trigger the GC when it is safe to call an operation callback on any
1773 rt
->gcIsNeeded
= true;
1774 rt
->gcTriggerCompartment
= NULL
;
1775 TriggerAllOperationCallbacks(rt
);
1779 TriggerCompartmentGC(JSCompartment
*comp
)
1781 JSRuntime
*rt
= comp
->rt
;
1782 JS_ASSERT(!rt
->gcRunning
);
1785 if (rt
->gcZeal
>= 1) {
1791 if (rt
->gcMode
!= JSGC_MODE_COMPARTMENT
|| comp
== rt
->atomsCompartment
) {
1792 /* We can't do a compartmental GC of the default compartment. */
1797 if (rt
->gcIsNeeded
) {
1798 /* If we need to GC more than one compartment, run a full GC. */
1799 if (rt
->gcTriggerCompartment
!= comp
)
1800 rt
->gcTriggerCompartment
= NULL
;
1804 if (rt
->gcBytes
> 8192 && rt
->gcBytes
>= 3 * (rt
->gcTriggerBytes
/ 2)) {
1805 /* If we're using significantly more than our quota, do a full GC. */
1811 * Trigger the GC when it is safe to call an operation callback on any
1814 rt
->gcIsNeeded
= true;
1815 rt
->gcTriggerCompartment
= comp
;
1816 TriggerAllOperationCallbacks(comp
->rt
);
1820 MaybeGC(JSContext
*cx
)
1822 JSRuntime
*rt
= cx
->runtime
;
1825 if (rt
->gcZeal
> 0) {
1826 js_GC(cx
, NULL
, GC_NORMAL
);
1831 JSCompartment
*comp
= cx
->compartment
;
1832 if (rt
->gcIsNeeded
) {
1833 js_GC(cx
, (comp
== rt
->gcTriggerCompartment
) ? comp
: NULL
, GC_NORMAL
);
1837 if (comp
->gcBytes
> 8192 && comp
->gcBytes
>= 3 * (comp
->gcTriggerBytes
/ 4))
1838 js_GC(cx
, (rt
->gcMode
== JSGC_MODE_COMPARTMENT
) ? comp
: NULL
, GC_NORMAL
);
1841 } /* namespace js */
1844 js_DestroyScriptsToGC(JSContext
*cx
, JSCompartment
*comp
)
1846 JSScript
**listp
, *script
;
1848 for (size_t i
= 0; i
!= JS_ARRAY_LENGTH(comp
->scriptsToGC
); ++i
) {
1849 listp
= &comp
->scriptsToGC
[i
];
1850 while ((script
= *listp
) != NULL
) {
1851 *listp
= script
->u
.nextToGC
;
1852 script
->u
.nextToGC
= NULL
;
1853 js_DestroyCachedScript(cx
, script
);
1859 * This function is called from js_FinishAtomState to force the finalization
1860 * of the permanently interned strings when cx is not available.
1863 js_FinalizeStringRT(JSRuntime
*rt
, JSString
*str
)
1865 JS_RUNTIME_UNMETER(rt
, liveStrings
);
1866 JS_ASSERT(!JSString::isStatic(str
));
1867 JS_ASSERT(!str
->isRope());
1869 if (str
->isDependent()) {
1870 /* A dependent string can not be external and must be valid. */
1871 JS_ASSERT(str
->asCell()->arena()->header()->thingKind
== FINALIZE_STRING
);
1872 JS_ASSERT(str
->dependentBase());
1873 JS_RUNTIME_UNMETER(rt
, liveDependentStrings
);
1875 unsigned thingKind
= str
->asCell()->arena()->header()->thingKind
;
1876 JS_ASSERT(IsFinalizableStringKind(thingKind
));
1878 /* A stillborn string has null chars, so is not valid. */
1879 jschar
*chars
= const_cast<jschar
*>(str
->flatChars());
1882 if (thingKind
== FINALIZE_STRING
) {
1883 rt
->stringMemoryUsed
-= str
->length() * 2;
1885 } else if (thingKind
== FINALIZE_EXTERNAL_STRING
) {
1886 ((JSExternalString
*)str
)->finalize();
1891 template<typename T
>
1893 FinalizeArenaList(JSCompartment
*comp
, JSContext
*cx
, unsigned thingKind
)
1895 JS_STATIC_ASSERT(!(sizeof(T
) & Cell::CellMask
));
1896 ArenaList
*arenaList
= GetFinalizableArenaList(comp
, thingKind
);
1897 Arena
<FreeCell
> **ap
= &arenaList
->head
;
1898 Arena
<T
> *a
= (Arena
<T
> *) *ap
;
1901 JS_ASSERT(sizeof(T
) == arenaList
->head
->header()->thingSize
);
1904 uint32 nlivearenas
= 0, nkilledarenas
= 0, nthings
= 0;
1907 ArenaHeader
*header
= a
->header();
1908 JS_ASSERT_IF(header
->hasFreeThings
, header
->freeList
);
1909 JS_ASSERT(header
->thingKind
== thingKind
);
1910 JS_ASSERT(!a
->getMarkingDelay()->link
);
1911 JS_ASSERT(a
->getMarkingDelay()->unmarkedChildren
== 0);
1912 JS_ASSERT(a
->header()->isUsed
);
1914 FreeCell
*nextFree
= header
->freeList
;
1915 FreeCell
*freeList
= NULL
;
1916 FreeCell
**tailp
= &freeList
;
1917 bool allClear
= true;
1919 T
*thingsEnd
= &a
->t
.things
[a
->ThingsPerArena
-1].t
;
1920 T
*thing
= &a
->t
.things
[0].t
;
1924 nextFree
= thingsEnd
->asFreeCell();
1926 JS_ASSERT(thing
->asCell() <= nextFree
);
1927 JS_ASSERT(nextFree
< thingsEnd
->asCell());
1931 if (thing
->asCell() == nextFree
) {
1932 if (thing
== thingsEnd
)
1934 nextFree
= nextFree
->link
;
1936 nextFree
= thingsEnd
->asFreeCell();
1938 JS_ASSERT(thing
->asCell() < nextFree
);
1939 JS_ASSERT(nextFree
< thingsEnd
->asFreeCell());
1941 } else if (thing
->asCell()->isMarked()) {
1946 thing
->finalize(cx
);
1948 memset(thing
, JS_FREE_PATTERN
, sizeof(T
));
1951 FreeCell
*t
= thing
->asFreeCell();
1957 /* Check that the free list is consistent. */
1960 JS_ASSERT(tailp
!= &freeList
);
1961 FreeCell
*t
= freeList
;
1964 if (&t
->link
== tailp
)
1966 JS_ASSERT(t
< t
->link
);
1973 * Forget just assembled free list head for the arena and
1974 * add the arena itself to the destroy list.
1976 JS_ASSERT(nfree
== a
->ThingsPerArena
);
1977 JS_ASSERT((T
*)tailp
== &a
->t
.things
[a
->ThingsPerArena
-1].t
);
1979 header
->freeList
= freeList
;
1981 header
->hasFreeThings
= true;
1983 *ap
= (header
->next
);
1984 JS_ASSERT((T
*)header
->freeList
== &a
->t
.things
[0].t
);
1985 a
->chunk()->releaseArena(a
);
1986 METER(nkilledarenas
++);
1988 JS_ASSERT(nfree
< a
->ThingsPerArena
);
1990 header
->freeList
= freeList
;
1992 header
->hasFreeThings
= (nfree
== 0) ? false : true;
1995 METER(nlivearenas
++);
1997 if (!(a
= (Arena
<T
> *) *ap
))
2000 arenaList
->cursor
= arenaList
->head
;
2001 METER(UpdateCompartmentStats(comp
, thingKind
, nlivearenas
, nkilledarenas
, nthings
));
2005 JSCompartment::finalizeObjectArenaLists(JSContext
*cx
)
2007 FinalizeArenaList
<JSObject
>(this, cx
, FINALIZE_OBJECT0
);
2008 FinalizeArenaList
<JSObject_Slots2
>(this, cx
, FINALIZE_OBJECT2
);
2009 FinalizeArenaList
<JSObject_Slots4
>(this, cx
, FINALIZE_OBJECT4
);
2010 FinalizeArenaList
<JSObject_Slots8
>(this, cx
, FINALIZE_OBJECT8
);
2011 FinalizeArenaList
<JSObject_Slots12
>(this, cx
, FINALIZE_OBJECT12
);
2012 FinalizeArenaList
<JSObject_Slots16
>(this, cx
, FINALIZE_OBJECT16
);
2013 FinalizeArenaList
<JSFunction
>(this, cx
, FINALIZE_FUNCTION
);
2014 #if JS_HAS_XML_SUPPORT
2015 FinalizeArenaList
<JSXML
>(this, cx
, FINALIZE_XML
);
2020 JSCompartment::finalizeStringArenaLists(JSContext
*cx
)
2022 FinalizeArenaList
<JSShortString
>(this, cx
, FINALIZE_SHORT_STRING
);
2023 FinalizeArenaList
<JSString
>(this, cx
, FINALIZE_STRING
);
2024 FinalizeArenaList
<JSExternalString
>(this, cx
, FINALIZE_EXTERNAL_STRING
);
2027 #ifdef JS_THREADSAFE
2032 GCHelperThread::init(JSRuntime
*rt
)
2034 if (!(wakeup
= PR_NewCondVar(rt
->gcLock
)))
2036 if (!(sweepingDone
= PR_NewCondVar(rt
->gcLock
)))
2039 thread
= PR_CreateThread(PR_USER_THREAD
, threadMain
, rt
, PR_PRIORITY_NORMAL
,
2040 PR_LOCAL_THREAD
, PR_JOINABLE_THREAD
, 0);
2046 GCHelperThread::finish(JSRuntime
*rt
)
2048 PRThread
*join
= NULL
;
2050 AutoLockGC
lock(rt
);
2051 if (thread
&& !shutdown
) {
2053 PR_NotifyCondVar(wakeup
);
2058 /* PR_DestroyThread is not necessary. */
2059 PR_JoinThread(join
);
2062 PR_DestroyCondVar(wakeup
);
2064 PR_DestroyCondVar(sweepingDone
);
2069 GCHelperThread::threadMain(void *arg
)
2071 JSRuntime
*rt
= static_cast<JSRuntime
*>(arg
);
2072 rt
->gcHelperThread
.threadLoop(rt
);
2076 GCHelperThread::threadLoop(JSRuntime
*rt
)
2078 AutoLockGC
lock(rt
);
2081 * Sweeping can be true here on the first iteration if a GC and the
2082 * corresponding startBackgroundSweep call happen before this thread
2083 * has a chance to run.
2086 PR_WaitCondVar(wakeup
, PR_INTERVAL_NO_TIMEOUT
);
2088 AutoUnlockGC
unlock(rt
);
2092 PR_NotifyAllCondVar(sweepingDone
);
2097 GCHelperThread::startBackgroundSweep(JSRuntime
*rt
)
2099 /* The caller takes the GC lock. */
2100 JS_ASSERT(!sweeping
);
2102 PR_NotifyCondVar(wakeup
);
2106 GCHelperThread::waitBackgroundSweepEnd(JSRuntime
*rt
)
2108 AutoLockGC
lock(rt
);
2110 PR_WaitCondVar(sweepingDone
, PR_INTERVAL_NO_TIMEOUT
);
2114 GCHelperThread::replenishAndFreeLater(void *ptr
)
2116 JS_ASSERT(freeCursor
== freeCursorEnd
);
2118 if (freeCursor
&& !freeVector
.append(freeCursorEnd
- FREE_ARRAY_LENGTH
))
2120 freeCursor
= (void **) js_malloc(FREE_ARRAY_SIZE
);
2122 freeCursorEnd
= NULL
;
2125 freeCursorEnd
= freeCursor
+ FREE_ARRAY_LENGTH
;
2126 *freeCursor
++ = ptr
;
2133 GCHelperThread::doSweep()
2136 void **array
= freeCursorEnd
- FREE_ARRAY_LENGTH
;
2137 freeElementsAndArray(array
, freeCursor
);
2138 freeCursor
= freeCursorEnd
= NULL
;
2140 JS_ASSERT(!freeCursorEnd
);
2142 for (void ***iter
= freeVector
.begin(); iter
!= freeVector
.end(); ++iter
) {
2143 void **array
= *iter
;
2144 freeElementsAndArray(array
, array
+ FREE_ARRAY_LENGTH
);
2146 freeVector
.resize(0);
2151 #endif /* JS_THREADSAFE */
2154 SweepCrossCompartmentWrappers(JSContext
*cx
)
2156 JSRuntime
*rt
= cx
->runtime
;
2158 * Figure out how much JIT code should be released from inactive compartments.
2159 * If multiple eighth-lives have passed, compound the release interval linearly;
2160 * if enough time has passed, all inactive JIT code will be released.
2162 uint32 releaseInterval
= 0;
2163 int64 now
= PRMJ_Now();
2164 if (now
>= rt
->gcJitReleaseTime
) {
2165 releaseInterval
= 8;
2166 while (now
>= rt
->gcJitReleaseTime
) {
2167 if (--releaseInterval
== 1)
2168 rt
->gcJitReleaseTime
= now
;
2169 rt
->gcJitReleaseTime
+= JIT_SCRIPT_EIGHTH_LIFETIME
;
2173 /* Remove dead wrappers from the compartment map. */
2174 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
2175 (*c
)->sweep(cx
, releaseInterval
);
2179 SweepCompartments(JSContext
*cx
, JSGCInvocationKind gckind
)
2181 JSRuntime
*rt
= cx
->runtime
;
2182 JSCompartmentCallback callback
= rt
->compartmentCallback
;
2184 /* Skip the atomsCompartment. */
2185 JSCompartment
**read
= rt
->compartments
.begin() + 1;
2186 JSCompartment
**end
= rt
->compartments
.end();
2187 JSCompartment
**write
= read
;
2188 JS_ASSERT(rt
->compartments
.length() >= 1);
2189 JS_ASSERT(*rt
->compartments
.begin() == rt
->atomsCompartment
);
2191 while (read
< end
) {
2192 JSCompartment
*compartment
= *read
++;
2194 /* Unmarked compartments containing marked objects don't get deleted, except LAST_CONTEXT GC is performed. */
2195 if ((!compartment
->isMarked() && compartment
->arenaListsAreEmpty())
2196 || gckind
== GC_LAST_CONTEXT
)
2198 JS_ASSERT(compartment
->freeLists
.isEmpty());
2200 (void) callback(cx
, compartment
, JSCOMPARTMENT_DESTROY
);
2201 if (compartment
->principals
)
2202 JSPRINCIPALS_DROP(cx
, compartment
->principals
);
2203 js_delete(compartment
);
2206 *write
++ = compartment
;
2208 rt
->compartments
.resize(write
- rt
->compartments
.begin());
2212 * Common cache invalidation and so forth that must be done before GC. Even if
2213 * GCUntilDone calls GC several times, this work needs to be done only once.
2216 PreGCCleanup(JSContext
*cx
, JSGCInvocationKind gckind
)
2218 JSRuntime
*rt
= cx
->runtime
;
2220 /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
2221 rt
->gcIsNeeded
= false;
2222 rt
->gcTriggerCompartment
= NULL
;
2224 /* Reset malloc counter. */
2225 rt
->resetGCMallocBytes();
2227 #ifdef JS_DUMP_SCOPE_METERS
2229 extern void js_DumpScopeMeters(JSRuntime
*rt
);
2230 js_DumpScopeMeters(rt
);
2235 * Reset the property cache's type id generator so we can compress ids.
2236 * Same for the protoHazardShape proxy-shape standing in for all object
2237 * prototypes having readonly or setter properties.
2239 if (rt
->shapeGen
& SHAPE_OVERFLOW_BIT
2244 rt
->gcRegenShapes
= true;
2246 rt
->protoHazardShape
= 0;
2249 if (rt
->gcCurrentCompartment
) {
2250 rt
->gcCurrentCompartment
->purge(cx
);
2252 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
2256 js_PurgeThreads(cx
);
2258 JSContext
*iter
= NULL
;
2259 while (JSContext
*acx
= js_ContextIterator(rt
, JS_TRUE
, &iter
))
2265 MarkAndSweepCompartment(JSContext
*cx
, JSCompartment
*comp
, JSGCInvocationKind gckind GCTIMER_PARAM
)
2267 JSRuntime
*rt
= cx
->runtime
;
2269 JS_ASSERT(!rt
->gcRegenShapes
);
2270 JS_ASSERT(gckind
!= GC_LAST_CONTEXT
);
2271 JS_ASSERT(comp
!= rt
->atomsCompartment
);
2272 JS_ASSERT(!comp
->isMarked());
2273 JS_ASSERT(comp
->rt
->gcMode
== JSGC_MODE_COMPARTMENT
);
2278 GCMarker
gcmarker(cx
);
2279 JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker
));
2280 JS_ASSERT(gcmarker
.getMarkColor() == BLACK
);
2281 rt
->gcMarkingTracer
= &gcmarker
;
2282 gcmarker
.stackLimit
= cx
->stackLimit
;
2284 for (GCChunkSet::Range
r(rt
->gcChunkSet
.all()); !r
.empty(); r
.popFront())
2285 r
.front()->clearMarkBitmap();
2287 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
2288 (*c
)->markCrossCompartment(&gcmarker
);
2290 comp
->mark(&gcmarker
);
2292 MarkRuntime(&gcmarker
);
2295 * Mark children of things that caused too deep recursion during the above
2298 gcmarker
.markDelayedChildren();
2300 rt
->gcMarkingTracer
= NULL
;
2303 (void) rt
->gcCallback(cx
, JSGC_MARK_END
);
2305 #ifdef JS_THREADSAFE
2307 * cx->gcBackgroundFree is set if we need several mark-and-sweep loops to
2310 if(!cx
->gcBackgroundFree
) {
2311 /* Wait until the sweeping from the previois GC finishes. */
2312 rt
->gcHelperThread
.waitBackgroundSweepEnd(rt
);
2313 cx
->gcBackgroundFree
= &rt
->gcHelperThread
;
2317 /* Make sure that we didn't mark an object in another compartment */
2318 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
2319 JS_ASSERT_IF(*c
!= comp
, checkArenaListAllUnmarked(*c
));
2325 * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
2326 * so that any attempt to allocate a GC-thing from a finalizer will fail,
2327 * rather than nest badly and leave the unmarked newborn to be swept.
2329 * We first sweep atom state so we can use js_IsAboutToBeFinalized on
2330 * JSString held in a hashtable to check if the hashtable entry can be
2331 * freed. Note that even after the entry is freed, JSObject finalizers can
2332 * continue to access the corresponding JSString* assuming that they are
2333 * unique. This works since the atomization API must not be called during
2336 TIMESTAMP(startSweep
);
2337 js_SweepAtomState(cx
);
2339 /* Finalize watch points associated with unreachable objects. */
2340 js_SweepWatchPoints(cx
);
2343 /* Save the pre-sweep count of scope-mapped properties. */
2344 rt
->liveObjectPropsPreSweep
= rt
->liveObjectProps
;
2348 * We finalize iterators before other objects so the iterator can use the
2349 * object which properties it enumerates over to finalize the enumeration
2350 * state. We finalize objects before other GC things to ensure that
2351 * object's finalizer can access them even if they will be freed.
2355 comp
->finalizeObjectArenaLists(cx
);
2356 TIMESTAMP(sweepObjectEnd
);
2358 comp
->finalizeStringArenaLists(cx
);
2359 TIMESTAMP(sweepStringEnd
);
2362 /* Make sure that we didn't mark a Shape in another compartment. */
2363 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
2364 JS_ASSERT_IF(*c
!= comp
, (*c
)->propertyTree
.checkShapesAllUnmarked(cx
));
2366 PropertyTree::dumpShapes(cx
);
2370 * Destroy arenas after we finished the sweeping so finalizers can safely
2371 * use js_IsAboutToBeFinalized().
2374 TIMESTAMP(sweepDestroyEnd
);
2379 (void) rt
->gcCallback(cx
, JSGC_FINALIZE_END
);
2383 * Perform mark-and-sweep GC.
2385 * In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each
2386 * other thread must be either outside all requests or blocked waiting for GC
2387 * to finish. Note that the caller does not hold rt->gcLock.
2390 MarkAndSweep(JSContext
*cx
, JSGCInvocationKind gckind GCTIMER_PARAM
)
2392 JSRuntime
*rt
= cx
->runtime
;
2398 GCMarker
gcmarker(cx
);
2399 JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker
));
2400 JS_ASSERT(gcmarker
.getMarkColor() == BLACK
);
2401 rt
->gcMarkingTracer
= &gcmarker
;
2402 gcmarker
.stackLimit
= cx
->stackLimit
;
2404 for (GCChunkSet::Range
r(rt
->gcChunkSet
.all()); !r
.empty(); r
.popFront())
2405 r
.front()->clearMarkBitmap();
2407 MarkRuntime(&gcmarker
);
2408 js_MarkScriptFilenames(rt
);
2411 * Mark children of things that caused too deep recursion during the above
2414 gcmarker
.markDelayedChildren();
2416 rt
->gcMarkingTracer
= NULL
;
2419 (void) rt
->gcCallback(cx
, JSGC_MARK_END
);
2421 #ifdef JS_THREADSAFE
2423 * cx->gcBackgroundFree is set if we need several mark-and-sweep loops to
2426 if(!cx
->gcBackgroundFree
) {
2427 /* Wait until the sweeping from the previois GC finishes. */
2428 rt
->gcHelperThread
.waitBackgroundSweepEnd(rt
);
2429 cx
->gcBackgroundFree
= &rt
->gcHelperThread
;
2436 * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
2437 * so that any attempt to allocate a GC-thing from a finalizer will fail,
2438 * rather than nest badly and leave the unmarked newborn to be swept.
2440 * We first sweep atom state so we can use js_IsAboutToBeFinalized on
2441 * JSString held in a hashtable to check if the hashtable entry can be
2442 * freed. Note that even after the entry is freed, JSObject finalizers can
2443 * continue to access the corresponding JSString* assuming that they are
2444 * unique. This works since the atomization API must not be called during
2447 TIMESTAMP(startSweep
);
2448 js_SweepAtomState(cx
);
2450 /* Finalize watch points associated with unreachable objects. */
2451 js_SweepWatchPoints(cx
);
2454 /* Save the pre-sweep count of scope-mapped properties. */
2455 rt
->liveObjectPropsPreSweep
= rt
->liveObjectProps
;
2458 SweepCrossCompartmentWrappers(cx
);
2461 * We finalize iterators before other objects so the iterator can use the
2462 * object which properties it enumerates over to finalize the enumeration
2463 * state. We finalize objects before other GC things to ensure that
2464 * object's finalizer can access them even if they will be freed.
2466 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); c
++)
2467 (*c
)->finalizeObjectArenaLists(cx
);
2469 TIMESTAMP(sweepObjectEnd
);
2471 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); c
++)
2472 (*c
)->finalizeStringArenaLists(cx
);
2474 TIMESTAMP(sweepStringEnd
);
2477 * Sweep the runtime's property trees after finalizing objects, in case any
2478 * had watchpoints referencing tree nodes.
2480 * Do this before sweeping compartments, so that we sweep all shapes in
2481 * unreachable compartments.
2483 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
2484 (*c
)->propertyTree
.sweepShapes(cx
);
2486 PropertyTree::dumpShapes(cx
);
2488 SweepCompartments(cx
, gckind
);
2491 * Sweep script filenames after sweeping functions in the generic loop
2492 * above. In this way when a scripted function's finalizer destroys the
2493 * script and calls rt->destroyScriptHook, the hook can still access the
2494 * script's filename. See bug 323267.
2496 js_SweepScriptFilenames(rt
);
2499 * Destroy arenas after we finished the sweeping so finalizers can safely
2500 * use js_IsAboutToBeFinalized().
2503 TIMESTAMP(sweepDestroyEnd
);
2505 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
2509 (void) rt
->gcCallback(cx
, JSGC_FINALIZE_END
);
2510 #ifdef DEBUG_srcnotesize
2511 { extern void DumpSrcNoteSizeHist();
2512 DumpSrcNoteSizeHist();
2513 printf("GC HEAP SIZE %lu\n", (unsigned long)rt
->gcBytes
);
2517 #ifdef JS_SCOPE_DEPTH_METER
2518 DumpScopeDepthMeter(rt
);
2521 #ifdef JS_DUMP_LOOP_STATS
2526 #ifdef JS_THREADSAFE
2529 * If the GC is running and we're called on another thread, wait for this GC
2530 * activation to finish. We can safely wait here without fear of deadlock (in
2531 * the case where we are called within a request on another thread's context)
2532 * because the GC doesn't set rt->gcRunning until after it has waited for all
2533 * active requests to end.
2535 * We call here js_CurrentThreadId() after checking for rt->gcState to avoid
2536 * an expensive call when the GC is not running.
2539 js_WaitForGC(JSRuntime
*rt
)
2541 if (rt
->gcRunning
&& rt
->gcThread
->id
!= js_CurrentThreadId()) {
2543 JS_AWAIT_GC_DONE(rt
);
2544 } while (rt
->gcRunning
);
2549 * GC is running on another thread. Temporarily suspend all requests running
2550 * on the current thread and wait until the GC is done.
2553 LetOtherGCFinish(JSContext
*cx
)
2555 JSRuntime
*rt
= cx
->runtime
;
2556 JS_ASSERT(rt
->gcThread
);
2557 JS_ASSERT(cx
->thread
!= rt
->gcThread
);
2559 size_t requestDebit
= cx
->thread
->data
.requestDepth
? 1 : 0;
2560 JS_ASSERT(requestDebit
<= rt
->requestCount
);
2562 JS_ASSERT_IF(requestDebit
== 0, !JS_ON_TRACE(cx
));
2564 if (requestDebit
!= 0) {
2566 if (JS_ON_TRACE(cx
)) {
2568 * Leave trace before we decrease rt->requestCount and notify the
2569 * GC. Otherwise the GC may start immediately after we unlock while
2570 * this thread is still on trace.
2572 AutoUnlockGC
unlock(rt
);
2576 rt
->requestCount
-= requestDebit
;
2577 if (rt
->requestCount
== 0)
2578 JS_NOTIFY_REQUEST_DONE(rt
);
2582 * Check that we did not release the GC lock above and let the GC to
2583 * finish before we wait.
2585 JS_ASSERT(rt
->gcThread
);
2588 * Wait for GC to finish on the other thread, even if requestDebit is 0
2589 * and even if GC has not started yet because the gcThread is waiting in
2590 * AutoGCSession. This ensures that js_GC never returns without a full GC
2594 JS_AWAIT_GC_DONE(rt
);
2595 } while (rt
->gcThread
);
2597 rt
->requestCount
+= requestDebit
;
2602 class AutoGCSession
{
2604 explicit AutoGCSession(JSContext
*cx
);
2610 /* Disable copy constructor or assignments */
2611 AutoGCSession(const AutoGCSession
&);
2612 void operator=(const AutoGCSession
&);
2616 * Start a new GC session. Together with LetOtherGCFinish this function
2617 * contains the rendezvous algorithm by which we stop the world for GC.
2619 * This thread becomes the GC thread. Wait for all other threads to quiesce.
2620 * Then set rt->gcRunning and return.
2622 AutoGCSession::AutoGCSession(JSContext
*cx
)
2625 JSRuntime
*rt
= cx
->runtime
;
2627 #ifdef JS_THREADSAFE
2628 if (rt
->gcThread
&& rt
->gcThread
!= cx
->thread
)
2629 LetOtherGCFinish(cx
);
2632 JS_ASSERT(!rt
->gcRunning
);
2634 #ifdef JS_THREADSAFE
2635 /* No other thread is in GC, so indicate that we're now in GC. */
2636 JS_ASSERT(!rt
->gcThread
);
2637 rt
->gcThread
= cx
->thread
;
2640 * Notify operation callbacks on other threads, which will give them a
2641 * chance to yield their requests. Threads without requests perform their
2642 * callback at some later point, which then will be unnecessary, but
2645 for (JSThread::Map::Range r
= rt
->threads
.all(); !r
.empty(); r
.popFront()) {
2646 JSThread
*thread
= r
.front().value
;
2647 if (thread
!= cx
->thread
)
2648 thread
->data
.triggerOperationCallback(rt
);
2652 * Discount the request on the current thread from contributing to
2653 * rt->requestCount before we wait for all other requests to finish.
2654 * JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
2655 * rt->requestCount transitions to 0.
2657 size_t requestDebit
= cx
->thread
->data
.requestDepth
? 1 : 0;
2658 JS_ASSERT(requestDebit
<= rt
->requestCount
);
2659 if (requestDebit
!= rt
->requestCount
) {
2660 rt
->requestCount
-= requestDebit
;
2663 JS_AWAIT_REQUEST_DONE(rt
);
2664 } while (rt
->requestCount
> 0);
2665 rt
->requestCount
+= requestDebit
;
2668 #endif /* JS_THREADSAFE */
2671 * Set rt->gcRunning here within the GC lock, and after waiting for any
2672 * active requests to end. This way js_WaitForGC called outside a request
2673 * would not block on the GC that is waiting for other requests to finish
2674 * with rt->gcThread set while JS_BeginRequest would do such wait.
2676 rt
->gcRunning
= true;
2679 /* End the current GC session and allow other threads to proceed. */
2680 AutoGCSession::~AutoGCSession()
2682 JSRuntime
*rt
= context
->runtime
;
2683 rt
->gcRunning
= false;
2684 #ifdef JS_THREADSAFE
2685 JS_ASSERT(rt
->gcThread
== context
->thread
);
2686 rt
->gcThread
= NULL
;
2687 JS_NOTIFY_GC_DONE(rt
);
2692 * GC, repeatedly if necessary, until we think we have not created any new
2693 * garbage and no other threads are demanding more GC.
2696 GCUntilDone(JSContext
*cx
, JSCompartment
*comp
, JSGCInvocationKind gckind GCTIMER_PARAM
)
2698 if (JS_ON_TRACE(cx
))
2701 JSRuntime
*rt
= cx
->runtime
;
2703 /* Recursive GC or a call from another thread restarts the GC cycle. */
2704 if (rt
->gcMarkAndSweep
) {
2706 #ifdef JS_THREADSAFE
2707 JS_ASSERT(rt
->gcThread
);
2708 if (rt
->gcThread
!= cx
->thread
) {
2709 /* We do not return until another GC finishes. */
2710 LetOtherGCFinish(cx
);
2716 AutoGCSession
gcsession(cx
);
2718 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
2719 JS_ASSERT(!(*c
)->isMarked());
2722 * We should not be depending on cx->compartment in the GC, so set it to
2723 * NULL to look for violations.
2725 SwitchToCompartment(cx
, (JSCompartment
*)NULL
);
2727 JS_ASSERT(!rt
->gcCurrentCompartment
);
2728 rt
->gcCurrentCompartment
= comp
;
2730 METER(rt
->gcStats
.poke
++);
2732 bool firstRun
= true;
2733 rt
->gcMarkAndSweep
= true;
2734 #ifdef JS_THREADSAFE
2735 JS_ASSERT(!cx
->gcBackgroundFree
);
2740 AutoUnlockGC
unlock(rt
);
2742 PreGCCleanup(cx
, gckind
);
2743 TIMESTAMP(startMark
);
2748 MarkAndSweepCompartment(cx
, comp
, gckind GCTIMER_ARG
);
2750 MarkAndSweep(cx
, gckind GCTIMER_ARG
);
2753 // - another thread, not in a request, called js_GC
2754 // - js_GC was called recursively
2755 // - a finalizer called js_RemoveRoot or js_UnlockGCThingRT.
2756 } while (rt
->gcPoke
);
2758 #ifdef JS_THREADSAFE
2759 JS_ASSERT(cx
->gcBackgroundFree
== &rt
->gcHelperThread
);
2760 cx
->gcBackgroundFree
= NULL
;
2761 rt
->gcHelperThread
.startBackgroundSweep(rt
);
2764 rt
->gcMarkAndSweep
= false;
2765 rt
->gcRegenShapes
= false;
2766 rt
->setGCLastBytes(rt
->gcBytes
);
2767 rt
->gcCurrentCompartment
= NULL
;
2769 for (JSCompartment
**c
= rt
->compartments
.begin(); c
!= rt
->compartments
.end(); ++c
)
2770 (*c
)->setGCLastBytes((*c
)->gcBytes
);
2774 js_GC(JSContext
*cx
, JSCompartment
*comp
, JSGCInvocationKind gckind
)
2776 JSRuntime
*rt
= cx
->runtime
;
2779 * Don't collect garbage if the runtime isn't up, and cx is not the last
2780 * context in the runtime. The last context must force a GC, and nothing
2781 * should suppress that final collection or there may be shutdown leaks,
2782 * or runtime bloat until the next context is created.
2784 if (rt
->state
!= JSRTS_UP
&& gckind
!= GC_LAST_CONTEXT
)
2787 RecordNativeStackTopForGC(cx
);
2791 # if JS_STACK_GROWTH_DIRECTION > 0
2792 /* cx->stackLimit is set to jsuword(-1) by default. */
2793 JS_ASSERT_IF(cx
->stackLimit
!= jsuword(-1),
2794 JS_CHECK_STACK_SIZE(cx
->stackLimit
+ (1 << 14), &stackDummy
));
2796 /* -16k because it is possible to perform a GC during an overrecursion report. */
2797 JS_ASSERT_IF(cx
->stackLimit
, JS_CHECK_STACK_SIZE(cx
->stackLimit
- (1 << 14), &stackDummy
));
2805 * Let the API user decide to defer a GC if it wants to (unless this
2806 * is the last context). Invoke the callback regardless. Sample the
2807 * callback in case we are freely racing with a JS_SetGCCallback{,RT}
2808 * on another thread.
2810 if (JSGCCallback callback
= rt
->gcCallback
) {
2811 if (!callback(cx
, JSGC_BEGIN
) && gckind
!= GC_LAST_CONTEXT
)
2816 /* Lock out other GC allocator and collector invocations. */
2817 AutoLockGC
lock(rt
);
2819 GCUntilDone(cx
, comp
, gckind GCTIMER_ARG
);
2822 /* We re-sample the callback again as the finalizers can change it. */
2823 if (JSGCCallback callback
= rt
->gcCallback
)
2824 (void) callback(cx
, JSGC_END
);
2827 * On shutdown, iterate until the JSGC_END callback stops creating
2830 } while (gckind
== GC_LAST_CONTEXT
&& rt
->gcPoke
);
2832 js_DumpGCStats(cx
->runtime
, stderr
);
2834 GCTIMER_END(gckind
== GC_LAST_CONTEXT
);
2841 SetProtoCheckingForCycles(JSContext
*cx
, JSObject
*obj
, JSObject
*proto
)
2844 * This function cannot be called during the GC and always requires a
2847 #ifdef JS_THREADSAFE
2848 JS_ASSERT(cx
->thread
->data
.requestDepth
);
2851 * This is only necessary if AutoGCSession below would wait for GC to
2852 * finish on another thread, but to capture the minimal stack space and
2853 * for code simplicity we do it here unconditionally.
2855 RecordNativeStackTopForGC(cx
);
2858 JSRuntime
*rt
= cx
->runtime
;
2859 AutoLockGC
lock(rt
);
2860 AutoGCSession
gcsession(cx
);
2861 AutoUnlockGC
unlock(rt
);
2864 for (JSObject
*obj2
= proto
; obj2
;) {
2869 obj2
= obj2
->getProto();
2872 obj
->setProto(proto
);
2878 NewCompartment(JSContext
*cx
, JSPrincipals
*principals
)
2880 JSRuntime
*rt
= cx
->runtime
;
2881 JSCompartment
*compartment
= js_new
<JSCompartment
>(rt
);
2882 if (!compartment
|| !compartment
->init()) {
2883 js_delete(compartment
);
2884 JS_ReportOutOfMemory(cx
);
2889 compartment
->principals
= principals
;
2890 JSPRINCIPALS_HOLD(cx
, principals
);
2893 compartment
->setGCLastBytes(8192);
2896 AutoLockGC
lock(rt
);
2898 if (!rt
->compartments
.append(compartment
)) {
2899 AutoUnlockGC
unlock(rt
);
2900 JS_ReportOutOfMemory(cx
);
2905 JSCompartmentCallback callback
= rt
->compartmentCallback
;
2906 if (callback
&& !callback(cx
, compartment
, JSCOMPARTMENT_NEW
)) {
2907 AutoLockGC
lock(rt
);
2908 rt
->compartments
.popBack();
2914 } /* namespace gc */
2917 TraceRuntime(JSTracer
*trc
)
2919 LeaveTrace(trc
->context
);
2921 #ifdef JS_THREADSAFE
2923 JSContext
*cx
= trc
->context
;
2924 JSRuntime
*rt
= cx
->runtime
;
2925 AutoLockGC
lock(rt
);
2927 if (rt
->gcThread
!= cx
->thread
) {
2928 AutoGCSession
gcsession(cx
);
2929 AutoUnlockGC
unlock(rt
);
2930 RecordNativeStackTopForGC(trc
->context
);
2936 RecordNativeStackTopForGC(trc
->context
);
2940 * Calls from inside a normal GC or a recursive calls are OK and do not
2941 * require session setup.
2946 } /* namespace js */