1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
3 * ***** BEGIN LICENSE BLOCK *****
4 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 * The contents of this file are subject to the Mozilla Public License Version
7 * 1.1 (the "License"); you may not use this file except in compliance with
8 * the License. You may obtain a copy of the License at
9 * http://www.mozilla.org/MPL/
11 * Software distributed under the License is distributed on an "AS IS" basis,
12 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
13 * for the specific language governing rights and limitations under the
16 * The Original Code is Mozilla Communicator client code, released
19 * The Initial Developer of the Original Code is
20 * Netscape Communications Corporation.
21 * Portions created by the Initial Developer are Copyright (C) 1998
22 * the Initial Developer. All Rights Reserved.
26 * Alternatively, the contents of this file may be used under the terms of
27 * either of the GNU General Public License Version 2 or later (the "GPL"),
28 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 * in which case the provisions of the GPL or the LGPL are applicable instead
30 * of those above. If you wish to allow use of your version of this file only
31 * under the terms of either the GPL or the LGPL, and not to allow others to
32 * use your version of this file under the terms of the MPL, indicate your
33 * decision by deleting the provisions above and replace them with the notice
34 * and other provisions required by the GPL or the LGPL. If you do not delete
35 * the provisions above, a recipient may use your version of this file under
36 * the terms of any one of the MPL, the GPL or the LGPL.
38 * ***** END LICENSE BLOCK ***** */
43 /* Gross special case for Gecko, which defines malloc/calloc/free. */
44 #ifdef mozilla_mozalloc_macro_wrappers_h
45 # define JS_GC_UNDEFD_MOZALLOC_WRAPPERS
46 /* The "anti-header" */
47 # include "mozilla/mozalloc_undef_macro_wrappers.h"
51 * JS Garbage Collector.
60 #include "jsgcchunk.h"
63 #include "jsversion.h"
66 #include "jsgcstats.h"
72 js_TraceXML(JSTracer
*trc
, JSXML
* thing
);
74 #if JS_STACK_GROWTH_DIRECTION > 0
75 # define JS_CHECK_STACK_SIZE(limit, lval) ((jsuword)(lval) < limit)
77 # define JS_CHECK_STACK_SIZE(limit, lval) ((jsuword)(lval) > limit)
87 * The kind of GC thing with a finalizer. The external strings follow the
88 * ordinary string to simplify js_GetExternalStringGCType.
97 FINALIZE_OBJECT_LAST
= FINALIZE_OBJECT16
,
99 #if JS_HAS_XML_SUPPORT
102 FINALIZE_SHORT_STRING
,
104 FINALIZE_EXTERNAL_STRING
,
108 const uintN JS_FINALIZE_OBJECT_LIMIT
= 6;
110 /* Every arena has a header. */
112 JSCompartment
*compartment
;
113 Arena
<FreeCell
> *next
;
123 template <typename T
>
129 template <typename T
, size_t N
, size_t R
>
131 ThingOrCell
<T
> things
[N
];
135 template <typename T
, size_t N
>
136 struct Things
<T
, N
, 0> {
137 ThingOrCell
<T
> things
[N
];
140 template <typename T
>
142 static const size_t ArenaSize
= 4096;
144 struct AlignedArenaHeader
{
145 T align
[(sizeof(ArenaHeader
) + sizeof(T
) - 1) / sizeof(T
)];
148 /* We want things in the arena to be aligned, so align the header. */
151 AlignedArenaHeader align
;
154 static const size_t ThingsPerArena
= (ArenaSize
- sizeof(AlignedArenaHeader
)) / sizeof(T
);
155 static const size_t FillerSize
= ArenaSize
- sizeof(AlignedArenaHeader
) - sizeof(T
) * ThingsPerArena
;
156 Things
<T
, ThingsPerArena
, FillerSize
> t
;
158 inline Chunk
*chunk() const;
159 inline size_t arenaIndex() const;
161 inline ArenaHeader
*header() { return &aheader
; };
163 inline MarkingDelay
*getMarkingDelay() const;
164 inline ArenaBitmap
*bitmap() const;
166 inline ConservativeGCTest
mark(T
*thing
, JSTracer
*trc
);
167 void markDelayedChildren(JSTracer
*trc
);
168 inline bool inFreeList(void *thing
) const;
169 inline T
*getAlignedThing(void *thing
);
171 inline bool assureThingIsAligned(void *thing
);
174 void init(JSCompartment
*compartment
, unsigned thingKind
);
176 JS_STATIC_ASSERT(sizeof(Arena
<FreeCell
>) == 4096);
179 * Live objects are marked black. How many other additional colors are available
180 * depends on the size of the GCThing.
182 static const uint32 BLACK
= 0;
184 /* An arena bitmap contains enough mark bits for all the cells in an arena. */
186 static const size_t BitCount
= Arena
<FreeCell
>::ArenaSize
/ Cell::CellSize
;
187 static const size_t BitWords
= BitCount
/ JS_BITS_PER_WORD
;
189 uintptr_t bitmap
[BitWords
];
191 JS_ALWAYS_INLINE
bool isMarked(size_t bit
, uint32 color
) {
193 JS_ASSERT(bit
< BitCount
);
194 uintptr_t *word
= &bitmap
[bit
/ JS_BITS_PER_WORD
];
195 return *word
& (uintptr_t(1) << (bit
% JS_BITS_PER_WORD
));
198 JS_ALWAYS_INLINE
bool markIfUnmarked(size_t bit
, uint32 color
) {
199 JS_ASSERT(bit
+ color
< BitCount
);
200 uintptr_t *word
= &bitmap
[bit
/ JS_BITS_PER_WORD
];
201 uintptr_t mask
= (uintptr_t(1) << (bit
% JS_BITS_PER_WORD
));
205 if (color
!= BLACK
) {
207 word
= &bitmap
[bit
/ JS_BITS_PER_WORD
];
208 mask
= (uintptr_t(1) << (bit
% JS_BITS_PER_WORD
));
216 JS_ALWAYS_INLINE
void unmark(size_t bit
, uint32 color
) {
218 JS_ASSERT(bit
< BitCount
);
219 uintptr_t *word
= &bitmap
[bit
/ JS_BITS_PER_WORD
];
220 *word
&= ~(uintptr_t(1) << (bit
% JS_BITS_PER_WORD
));
225 for (unsigned i
= 0; i
< BitWords
; i
++) {
226 if (bitmap
[i
] != uintptr_t(0))
234 /* Ensure that bitmap covers the whole arena. */
235 JS_STATIC_ASSERT(Arena
<FreeCell
>::ArenaSize
% Cell::CellSize
== 0);
236 JS_STATIC_ASSERT(ArenaBitmap::BitCount
% JS_BITS_PER_WORD
== 0);
238 /* Marking delay is used to resume marking later when recursive marking uses too much stack. */
239 struct MarkingDelay
{
241 uintptr_t unmarkedChildren
;
247 unmarkedChildren
= 0;
251 struct EmptyArenaLists
{
252 /* Arenas with no internal freelist prepared. */
253 Arena
<FreeCell
> *cellFreeList
;
255 /* Arenas with internal freelists prepared for a given finalize kind. */
256 Arena
<FreeCell
> *freeLists
[FINALIZE_LIMIT
];
262 Arena
<FreeCell
> *getOtherArena() {
263 Arena
<FreeCell
> *arena
= cellFreeList
;
265 cellFreeList
= arena
->header()->next
;
268 for (int i
= 0; i
< FINALIZE_LIMIT
; i
++) {
269 if ((arena
= (Arena
<FreeCell
> *) freeLists
[i
])) {
270 freeLists
[i
] = freeLists
[i
]->header()->next
;
274 JS_NOT_REACHED("No arena");
278 template <typename T
>
279 inline Arena
<T
> *getTypedFreeList(unsigned thingKind
);
281 template <typename T
>
282 inline Arena
<T
> *getNext(JSCompartment
*comp
, unsigned thingKind
);
284 template <typename T
>
285 inline void insert(Arena
<T
> *arena
);
288 template <typename T
>
290 EmptyArenaLists::getTypedFreeList(unsigned thingKind
) {
291 JS_ASSERT(thingKind
< FINALIZE_LIMIT
);
292 Arena
<T
> *arena
= (Arena
<T
>*) freeLists
[thingKind
];
294 freeLists
[thingKind
] = freeLists
[thingKind
]->header()->next
;
302 EmptyArenaLists::getNext(JSCompartment
*comp
, unsigned thingKind
) {
303 Arena
<T
> *arena
= getTypedFreeList
<T
>(thingKind
);
305 JS_ASSERT(arena
->header()->isUsed
== false);
306 JS_ASSERT(arena
->header()->thingSize
== sizeof(T
));
307 arena
->header()->isUsed
= true;
308 arena
->header()->thingKind
= thingKind
;
309 arena
->header()->compartment
= comp
;
312 arena
= (Arena
<T
> *)getOtherArena();
313 JS_ASSERT(arena
->header()->isUsed
== false);
314 arena
->init(comp
, thingKind
);
318 template <typename T
>
320 EmptyArenaLists::insert(Arena
<T
> *arena
) {
321 unsigned thingKind
= arena
->header()->thingKind
;
322 JS_ASSERT(thingKind
< FINALIZE_LIMIT
);
323 arena
->header()->next
= freeLists
[thingKind
];
324 freeLists
[thingKind
] = (Arena
<FreeCell
> *) arena
;
327 /* The chunk header (located at the end of the chunk to preserve arena alignment). */
331 EmptyArenaLists emptyArenaLists
;
336 /* Chunks contain arenas and associated data structures (mark bitmap, delayed marking state). */
338 static const size_t BytesPerArena
= sizeof(Arena
<FreeCell
>) +
339 sizeof(ArenaBitmap
) +
340 sizeof(MarkingDelay
);
342 static const size_t ArenasPerChunk
= (GC_CHUNK_SIZE
- sizeof(ChunkInfo
)) / BytesPerArena
;
343 static const size_t MaxAge
= 3;
345 Arena
<FreeCell
> arenas
[ArenasPerChunk
];
346 ArenaBitmap bitmaps
[ArenasPerChunk
];
347 MarkingDelay markingDelay
[ArenasPerChunk
];
351 void clearMarkBitmap();
352 void init(JSRuntime
*rt
);
355 bool hasAvailableArenas();
356 bool withinArenasRange(Cell
*cell
);
358 template <typename T
>
359 Arena
<T
> *allocateArena(JSCompartment
*comp
, unsigned thingKind
);
361 template <typename T
>
362 void releaseArena(Arena
<T
> *a
);
364 JSRuntime
*getRuntime();
367 JS_STATIC_ASSERT(sizeof(Chunk
) <= GC_CHUNK_SIZE
);
368 JS_STATIC_ASSERT(sizeof(Chunk
) + Chunk::BytesPerArena
> GC_CHUNK_SIZE
);
373 uintptr_t addr
= uintptr_t(this);
374 JS_ASSERT(addr
% sizeof(FreeCell
) == 0);
375 addr
&= ~(Arena
<FreeCell
>::ArenaSize
- 1);
376 return reinterpret_cast<Arena
<Cell
> *>(addr
);
382 uintptr_t addr
= uintptr_t(this);
383 JS_ASSERT(addr
% sizeof(FreeCell
) == 0);
384 addr
&= ~(GC_CHUNK_SIZE
- 1);
385 return reinterpret_cast<Chunk
*>(addr
);
391 return &chunk()->bitmaps
[arena()->arenaIndex()];
394 STATIC_POSTCONDITION_ASSUME(return < ArenaBitmap::BitCount
)
396 Cell::cellIndex() const
398 return reinterpret_cast<const FreeCell
*>(this) - reinterpret_cast<FreeCell
*>(&arena()->t
);
401 template <typename T
>
403 Arena
<T
>::chunk() const
405 uintptr_t addr
= uintptr_t(this);
406 JS_ASSERT(addr
% sizeof(FreeCell
) == 0);
407 addr
&= ~(GC_CHUNK_SIZE
- 1);
408 return reinterpret_cast<Chunk
*>(addr
);
411 template <typename T
>
413 Arena
<T
>::arenaIndex() const
415 return reinterpret_cast<const Arena
<FreeCell
> *>(this) - chunk()->arenas
;
418 template <typename T
>
420 Arena
<T
>::getMarkingDelay() const
422 return &chunk()->markingDelay
[arenaIndex()];
425 template <typename T
>
427 Arena
<T
>::bitmap() const
429 return &chunk()->bitmaps
[arenaIndex()];
432 template <typename T
>
434 Arena
<T
>::getAlignedThing(void *thing
)
436 jsuword start
= reinterpret_cast<jsuword
>(&t
.things
[0]);
437 jsuword offset
= reinterpret_cast<jsuword
>(thing
) - start
;
438 offset
-= offset
% aheader
.thingSize
;
439 return reinterpret_cast<T
*>(start
+ offset
);
443 template <typename T
>
445 Arena
<T
>::assureThingIsAligned(void *thing
)
447 return (getAlignedThing(thing
) == thing
);
452 AssertValidColor(const void *thing
, uint32 color
)
454 JS_ASSERT_IF(color
, color
< reinterpret_cast<const js::gc::FreeCell
*>(thing
)->arena()->header()->thingSize
/ sizeof(FreeCell
));
458 Cell::isMarked(uint32 color
= BLACK
) const
460 AssertValidColor(this, color
);
461 return bitmap()->isMarked(cellIndex(), color
);
465 Cell::markIfUnmarked(uint32 color
= BLACK
) const
467 AssertValidColor(this, color
);
468 return bitmap()->markIfUnmarked(cellIndex(), color
);
472 Cell::unmark(uint32 color
) const
474 JS_ASSERT(color
!= BLACK
);
475 AssertValidColor(this, color
);
476 bitmap()->unmark(cellIndex(), color
);
480 Cell::compartment() const
482 return arena()->header()->compartment
;
485 template <typename T
>
490 return reinterpret_cast<Arena
<T
> *>(cell
->arena());
493 #define JSTRACE_XML 2
496 * One past the maximum trace kind.
498 #define JSTRACE_LIMIT 3
501 * Lower limit after which we limit the heap growth
503 const size_t GC_ARENA_ALLOCATION_TRIGGER
= 30 * js::GC_CHUNK_SIZE
;
506 * A GC is triggered once the number of newly allocated arenas
507 * is GC_HEAP_GROWTH_FACTOR times the number of live arenas after
508 * the last GC starting after the lower limit of
509 * GC_ARENA_ALLOCATION_TRIGGER.
511 const float GC_HEAP_GROWTH_FACTOR
= 3.0f
;
514 GetFinalizableTraceKind(size_t thingKind
)
516 JS_STATIC_ASSERT(JSExternalString::TYPE_LIMIT
== 8);
518 static const uint8 map
[FINALIZE_LIMIT
] = {
519 JSTRACE_OBJECT
, /* FINALIZE_OBJECT0 */
520 JSTRACE_OBJECT
, /* FINALIZE_OBJECT2 */
521 JSTRACE_OBJECT
, /* FINALIZE_OBJECT4 */
522 JSTRACE_OBJECT
, /* FINALIZE_OBJECT8 */
523 JSTRACE_OBJECT
, /* FINALIZE_OBJECT12 */
524 JSTRACE_OBJECT
, /* FINALIZE_OBJECT16 */
525 JSTRACE_OBJECT
, /* FINALIZE_FUNCTION */
526 #if JS_HAS_XML_SUPPORT /* FINALIZE_XML */
529 JSTRACE_STRING
, /* FINALIZE_SHORT_STRING */
530 JSTRACE_STRING
, /* FINALIZE_STRING */
531 JSTRACE_STRING
, /* FINALIZE_EXTERNAL_STRING */
534 JS_ASSERT(thingKind
< FINALIZE_LIMIT
);
535 return map
[thingKind
];
539 IsFinalizableStringKind(unsigned thingKind
)
541 return unsigned(FINALIZE_SHORT_STRING
) <= thingKind
&&
542 thingKind
<= unsigned(FINALIZE_EXTERNAL_STRING
);
546 * Get the type of the external string or -1 if the string was not created
547 * with JS_NewExternalString.
550 GetExternalStringGCType(JSExternalString
*str
)
552 JS_STATIC_ASSERT(FINALIZE_STRING
+ 1 == FINALIZE_EXTERNAL_STRING
);
553 JS_ASSERT(!JSString::isStatic(str
));
555 unsigned thingKind
= str
->externalStringType
;
556 JS_ASSERT(IsFinalizableStringKind(thingKind
));
557 return intN(thingKind
);
561 GetGCThingTraceKind(void *thing
)
564 if (JSString::isStatic(thing
))
565 return JSTRACE_STRING
;
566 Cell
*cell
= reinterpret_cast<Cell
*>(thing
);
567 return GetFinalizableTraceKind(cell
->arena()->header()->thingKind
);
570 static inline JSRuntime
*
571 GetGCThingRuntime(void *thing
)
573 return reinterpret_cast<FreeCell
*>(thing
)->chunk()->info
.runtime
;
578 checkArenaListsForThing(JSCompartment
*comp
, jsuword thing
);
581 /* The arenas in a list have uniform kind. */
583 Arena
<FreeCell
> *head
; /* list start */
584 Arena
<FreeCell
> *cursor
; /* arena with free things */
591 inline Arena
<FreeCell
> *getNextWithFreeList() {
593 while (cursor
!= NULL
) {
594 ArenaHeader
*aheader
= cursor
->header();
596 cursor
= aheader
->next
;
597 if (aheader
->freeList
)
604 template <typename T
>
605 bool arenasContainThing(void *thing
) {
606 for (Arena
<T
> *a
= (Arena
<T
> *) head
; a
; a
= (Arena
<T
> *) a
->header()->next
) {
607 JS_ASSERT(a
->header()->isUsed
);
608 if (thing
>= &a
->t
.things
[0] && thing
< &a
->t
.things
[a
->ThingsPerArena
])
614 bool markedThingsInArenaList() {
615 for (Arena
<FreeCell
> *a
= (Arena
<FreeCell
> *) head
; a
; a
= (Arena
<FreeCell
> *) a
->header()->next
) {
616 if (!a
->bitmap()->noBitsSet())
623 inline void insert(Arena
<FreeCell
> *a
) {
624 a
->header()->next
= head
;
630 Arena
<FreeCell
> *next
= head
->header()->next
;
631 head
->chunk()->releaseArena(head
);
638 inline bool isEmpty() const {
639 return (head
== NULL
);
644 FreeCell
**finalizables
[FINALIZE_LIMIT
];
648 inline FreeCell
*getNext(uint32 kind
) {
649 FreeCell
*top
= NULL
;
650 if (finalizables
[kind
]) {
651 top
= *finalizables
[kind
];
653 *finalizables
[kind
] = top
->link
;
655 finalizables
[kind
] = NULL
;
658 if (top
&& !top
->link
)
659 top
->arena()->header()->hasFreeThings
= false;
665 template <typename T
>
666 inline void populate(Arena
<T
> *a
, uint32 thingKind
) {
667 finalizables
[thingKind
] = &a
->header()->freeList
;
671 bool isEmpty() const {
672 for (size_t i
= 0; i
!= JS_ARRAY_LENGTH(finalizables
); ++i
) {
682 typedef Vector
<gc::Chunk
*, 32, SystemAllocPolicy
> GCChunks
;
686 typedef void *Lookup
;
688 static HashNumber
hash(void *key
) {
689 return HashNumber(uintptr_t(key
) >> JS_GCTHING_ZEROBITS
);
692 static bool match(void *l
, void *k
) { return l
== k
; }
695 typedef HashMap
<void *, uint32
, GCPtrHasher
, SystemAllocPolicy
> GCLocks
;
699 RootInfo(const char *name
, JSGCRootType type
) : name(name
), type(type
) {}
704 typedef js::HashMap
<void *,
706 js::DefaultHasher
<void *>,
707 js::SystemAllocPolicy
> RootedValueMap
;
709 /* If HashNumber grows, need to change WrapperHasher. */
710 JS_STATIC_ASSERT(sizeof(HashNumber
) == 4);
714 typedef Value Lookup
;
716 static HashNumber
hash(Value key
) {
717 uint64 bits
= JSVAL_BITS(Jsvalify(key
));
718 return (uint32
)bits
^ (uint32
)(bits
>> 32);
721 static bool match(const Value
&l
, const Value
&k
) { return l
== k
; }
724 typedef HashMap
<Value
, Value
, WrapperHasher
, SystemAllocPolicy
> WrapperMap
;
726 class AutoValueVector
;
731 CheckGCFreeListLink(js::gc::FreeCell
*cell
)
734 * The GC things on the free lists come from one arena and the things on
735 * the free list are linked in ascending address order.
737 JS_ASSERT_IF(cell
->link
,
739 cell
->link
->arena());
740 JS_ASSERT_IF(cell
->link
, cell
< cell
->link
);
744 RefillFinalizableFreeList(JSContext
*cx
, unsigned thingKind
);
748 CheckAllocation(JSContext
*cx
);
752 * Get the type of the external string or -1 if the string was not created
753 * with JS_NewExternalString.
756 js_GetExternalStringGCType(JSString
*str
);
758 extern JS_FRIEND_API(uint32
)
759 js_GetGCThingTraceKind(void *thing
);
763 * Since we're forcing a GC from JS_GC anyway, don't bother wasting cycles
764 * loading oldval. XXX remove implied force, fix jsinterp.c's "second arg
767 #define GC_POKE(cx, oldval) ((cx)->runtime->gcPoke = JS_TRUE)
769 #define GC_POKE(cx, oldval) ((cx)->runtime->gcPoke = JSVAL_IS_GCTHING(oldval))
773 js_InitGC(JSRuntime
*rt
, uint32 maxbytes
);
776 js_FinishGC(JSRuntime
*rt
);
779 js_AddRoot(JSContext
*cx
, js::Value
*vp
, const char *name
);
782 js_AddGCThingRoot(JSContext
*cx
, void **rp
, const char *name
);
786 js_DumpNamedRoots(JSRuntime
*rt
,
787 void (*dump
)(const char *name
, void *rp
, JSGCRootType type
, void *data
),
792 js_MapGCRoots(JSRuntime
*rt
, JSGCRootMapFun map
, void *data
);
794 /* Table of pointers with count valid members. */
795 typedef struct JSPtrTable
{
801 js_RegisterCloseableIterator(JSContext
*cx
, JSObject
*obj
);
805 js_ReserveObjects(JSContext
*cx
, size_t nobjects
);
809 js_LockGCThingRT(JSRuntime
*rt
, void *thing
);
812 js_UnlockGCThingRT(JSRuntime
*rt
, void *thing
);
814 extern JS_FRIEND_API(bool)
815 IsAboutToBeFinalized(JSContext
*cx
, void *thing
);
817 extern JS_FRIEND_API(bool)
818 js_GCThingIsMarked(void *thing
, uintN color
);
821 js_TraceStackFrame(JSTracer
*trc
, JSStackFrame
*fp
);
825 extern JS_REQUIRES_STACK
void
826 MarkRuntime(JSTracer
*trc
);
829 TraceRuntime(JSTracer
*trc
);
831 extern JS_REQUIRES_STACK
JS_FRIEND_API(void)
832 MarkContext(JSTracer
*trc
, JSContext
*acx
);
834 /* Must be called with GC lock taken. */
836 TriggerGC(JSRuntime
*rt
);
838 /* Must be called with GC lock taken. */
840 TriggerCompartmentGC(JSCompartment
*comp
);
843 MaybeGC(JSContext
*cx
);
848 * Kinds of js_GC invocation.
850 typedef enum JSGCInvocationKind
{
851 /* Normal invocation. */
855 * Called from js_DestroyContext for last JSContext in a JSRuntime, when
856 * it is imperative that rt->gcPoke gets cleared early in js_GC.
859 } JSGCInvocationKind
;
861 /* Pass NULL for |comp| to get a full GC. */
863 js_GC(JSContext
*cx
, JSCompartment
*comp
, JSGCInvocationKind gckind
);
867 * This is a helper for code at can potentially run outside JS request to
868 * ensure that the GC is not running when the function returns.
870 * This function must be called with the GC lock held.
873 js_WaitForGC(JSRuntime
*rt
);
875 #else /* !JS_THREADSAFE */
877 # define js_WaitForGC(rt) ((void) 0)
882 js_DestroyScriptsToGC(JSContext
*cx
, JSCompartment
*comp
);
889 * During the finalization we do not free immediately. Rather we add the
890 * corresponding pointers to a buffer which we later release on a separated
893 * The buffer is implemented as a vector of 64K arrays of pointers, not as a
894 * simple vector, to avoid realloc calls during the vector growth and to not
895 * bloat the binary size of the inlined freeLater method. Any OOM during
896 * buffer growth results in the pointer being freed immediately.
898 class GCHelperThread
{
899 static const size_t FREE_ARRAY_SIZE
= size_t(1) << 16;
900 static const size_t FREE_ARRAY_LENGTH
= FREE_ARRAY_SIZE
/ sizeof(void *);
904 PRCondVar
* sweepingDone
;
908 Vector
<void **, 16, js::SystemAllocPolicy
> freeVector
;
910 void **freeCursorEnd
;
913 replenishAndFreeLater(void *ptr
);
915 static void freeElementsAndArray(void **array
, void **end
) {
916 JS_ASSERT(array
<= end
);
917 for (void **p
= array
; p
!= end
; ++p
)
922 static void threadMain(void* arg
);
924 void threadLoop(JSRuntime
*rt
);
935 freeCursorEnd(NULL
) { }
937 bool init(JSRuntime
*rt
);
938 void finish(JSRuntime
*rt
);
940 /* Must be called with GC lock taken. */
941 void startBackgroundSweep(JSRuntime
*rt
);
943 /* Must be called outside the GC lock. */
944 void waitBackgroundSweepEnd(JSRuntime
*rt
);
946 void freeLater(void *ptr
) {
947 JS_ASSERT(!sweeping
);
948 if (freeCursor
!= freeCursorEnd
)
951 replenishAndFreeLater(ptr
);
955 #endif /* JS_THREADSAFE */
957 struct GCChunkHasher
{
958 typedef gc::Chunk
*Lookup
;
961 * Strip zeros for better distribution after multiplying by the golden
964 static HashNumber
hash(gc::Chunk
*chunk
) {
965 JS_ASSERT(!(jsuword(chunk
) & GC_CHUNK_MASK
));
966 return HashNumber(jsuword(chunk
) >> GC_CHUNK_SHIFT
);
969 static bool match(gc::Chunk
*k
, gc::Chunk
*l
) {
970 JS_ASSERT(!(jsuword(k
) & GC_CHUNK_MASK
));
971 JS_ASSERT(!(jsuword(l
) & GC_CHUNK_MASK
));
976 typedef HashSet
<js::gc::Chunk
*, GCChunkHasher
, SystemAllocPolicy
> GCChunkSet
;
978 struct ConservativeGCThreadData
{
981 * The GC scans conservatively between JSThreadData::nativeStackBase and
982 * nativeStackTop unless the latter is NULL.
984 jsuword
*nativeStackTop
;
988 jsuword words
[JS_HOWMANY(sizeof(jmp_buf), sizeof(jsuword
))];
992 * Cycle collector uses this to communicate that the native stack of the
993 * GC thread should be scanned only if the thread have more than the given
994 * threshold of requests.
996 unsigned requestThreshold
;
998 JS_NEVER_INLINE
void recordStackTop();
1000 #ifdef JS_THREADSAFE
1001 void updateForRequestEnd(unsigned suspendCount
) {
1005 nativeStackTop
= NULL
;
1009 bool hasStackToScan() const {
1010 return !!nativeStackTop
;
1014 struct GCMarker
: public JSTracer
{
1016 /* The color is only applied to objects, functions and xml. */
1020 /* See comments before delayMarkingChildren is jsgc.cpp. */
1021 js::gc::Arena
<js::gc::Cell
> *unmarkedArenaStackTop
;
1023 size_t markLaterCount
;
1026 #if defined(JS_DUMP_CONSERVATIVE_GC_ROOTS) || defined(JS_GCMETER)
1027 js::gc::ConservativeGCStats conservativeStats
;
1030 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1031 struct ConservativeRoot
{ void *thing
; uint32 thingKind
; };
1032 Vector
<ConservativeRoot
, 0, SystemAllocPolicy
> conservativeRoots
;
1033 const char *conservativeDumpFileName
;
1035 void dumpConservativeRoots();
1039 explicit GCMarker(JSContext
*cx
);
1042 uint32
getMarkColor() const {
1046 void setMarkColor(uint32 newColor
) {
1048 * We must process any delayed marking here, otherwise we confuse
1051 markDelayedChildren();
1055 void delayMarkingChildren(void *thing
);
1057 JS_FRIEND_API(void) markDelayedChildren();
1061 MarkStackRangeConservatively(JSTracer
*trc
, Value
*begin
, Value
*end
);
1063 } /* namespace js */
1066 js_FinalizeStringRT(JSRuntime
*rt
, JSString
*str
);
1069 * This function is defined in jsdbgapi.cpp but is declared here to avoid
1070 * polluting jsdbgapi.h, a public API header, with internal functions.
1073 js_MarkTraps(JSTracer
*trc
);
1079 * Macro to test if a traversal is the marking phase of GC to avoid exposing
1080 * ScriptFilenameEntry to traversal implementations.
1082 #define IS_GC_MARKING_TRACER(trc) ((trc)->callback == NULL)
1084 #if JS_HAS_XML_SUPPORT
1085 # define JS_IS_VALID_TRACE_KIND(kind) ((uint32)(kind) < JSTRACE_LIMIT)
1087 # define JS_IS_VALID_TRACE_KIND(kind) ((uint32)(kind) <= JSTRACE_STRING)
1091 * Set object's prototype while checking that doing so would not create
1092 * a cycle in the proto chain. The cycle check and proto change are done
1093 * only when all other requests are finished or suspended to ensure exclusive
1094 * access to the chain. If there is a cycle, return false without reporting
1095 * an error. Otherwise, set the proto and return true.
1098 SetProtoCheckingForCycles(JSContext
*cx
, JSObject
*obj
, JSObject
*proto
);
1101 NewCompartment(JSContext
*cx
, JSPrincipals
*principals
);
1103 } /* namespace js */
1104 } /* namespace gc */
1106 inline JSCompartment
*
1107 JSObject::getCompartment() const
1109 return compartment();
1112 #ifdef JS_GC_UNDEFD_MOZALLOC_WRAPPERS
1113 # include "mozilla/mozalloc_macro_wrappers.h"
1116 #endif /* jsgc_h___ */