1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /* JS Garbage Collector. */
12 #include "mozilla/Atomics.h"
13 #include "mozilla/DebugOnly.h"
14 #include "mozilla/MemoryReporting.h"
15 #include "mozilla/TypeTraits.h"
21 #include "js/SliceBudget.h"
22 #include "js/Vector.h"
27 class ForkJoinNursery
;
30 unsigned GetCPUCount();
33 Idle
, // doing nothing with the GC heap
34 Tracing
, // tracing the GC heap without collecting, e.g. IterateCompartments()
35 MajorCollecting
, // doing a GC of the major heap
36 MinorCollecting
// doing a GC of the minor heap (nursery)
50 #ifdef JSGC_COMPACTING
55 static inline JSGCTraceKind
56 MapAllocToTraceKind(AllocKind kind
)
58 static const JSGCTraceKind map
[] = {
59 JSTRACE_OBJECT
, /* FINALIZE_OBJECT0 */
60 JSTRACE_OBJECT
, /* FINALIZE_OBJECT0_BACKGROUND */
61 JSTRACE_OBJECT
, /* FINALIZE_OBJECT2 */
62 JSTRACE_OBJECT
, /* FINALIZE_OBJECT2_BACKGROUND */
63 JSTRACE_OBJECT
, /* FINALIZE_OBJECT4 */
64 JSTRACE_OBJECT
, /* FINALIZE_OBJECT4_BACKGROUND */
65 JSTRACE_OBJECT
, /* FINALIZE_OBJECT8 */
66 JSTRACE_OBJECT
, /* FINALIZE_OBJECT8_BACKGROUND */
67 JSTRACE_OBJECT
, /* FINALIZE_OBJECT12 */
68 JSTRACE_OBJECT
, /* FINALIZE_OBJECT12_BACKGROUND */
69 JSTRACE_OBJECT
, /* FINALIZE_OBJECT16 */
70 JSTRACE_OBJECT
, /* FINALIZE_OBJECT16_BACKGROUND */
71 JSTRACE_SCRIPT
, /* FINALIZE_SCRIPT */
72 JSTRACE_LAZY_SCRIPT
,/* FINALIZE_LAZY_SCRIPT */
73 JSTRACE_SHAPE
, /* FINALIZE_SHAPE */
74 JSTRACE_BASE_SHAPE
, /* FINALIZE_BASE_SHAPE */
75 JSTRACE_TYPE_OBJECT
,/* FINALIZE_TYPE_OBJECT */
76 JSTRACE_STRING
, /* FINALIZE_FAT_INLINE_STRING */
77 JSTRACE_STRING
, /* FINALIZE_STRING */
78 JSTRACE_STRING
, /* FINALIZE_EXTERNAL_STRING */
79 JSTRACE_SYMBOL
, /* FINALIZE_SYMBOL */
80 JSTRACE_JITCODE
, /* FINALIZE_JITCODE */
82 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map
) == FINALIZE_LIMIT
);
86 /* Return a printable string for the given kind, for diagnostic purposes. */
88 TraceKindAsAscii(JSGCTraceKind kind
);
90 /* Map from C++ type to finalize kind. JSObject does not have a 1:1 mapping, so must use Arena::thingSize. */
91 template <typename T
> struct MapTypeToFinalizeKind
{};
92 template <> struct MapTypeToFinalizeKind
<JSScript
> { static const AllocKind kind
= FINALIZE_SCRIPT
; };
93 template <> struct MapTypeToFinalizeKind
<LazyScript
> { static const AllocKind kind
= FINALIZE_LAZY_SCRIPT
; };
94 template <> struct MapTypeToFinalizeKind
<Shape
> { static const AllocKind kind
= FINALIZE_SHAPE
; };
95 template <> struct MapTypeToFinalizeKind
<BaseShape
> { static const AllocKind kind
= FINALIZE_BASE_SHAPE
; };
96 template <> struct MapTypeToFinalizeKind
<types::TypeObject
> { static const AllocKind kind
= FINALIZE_TYPE_OBJECT
; };
97 template <> struct MapTypeToFinalizeKind
<JSFatInlineString
> { static const AllocKind kind
= FINALIZE_FAT_INLINE_STRING
; };
98 template <> struct MapTypeToFinalizeKind
<JSString
> { static const AllocKind kind
= FINALIZE_STRING
; };
99 template <> struct MapTypeToFinalizeKind
<JSExternalString
> { static const AllocKind kind
= FINALIZE_EXTERNAL_STRING
; };
100 template <> struct MapTypeToFinalizeKind
<JS::Symbol
> { static const AllocKind kind
= FINALIZE_SYMBOL
; };
101 template <> struct MapTypeToFinalizeKind
<jit::JitCode
> { static const AllocKind kind
= FINALIZE_JITCODE
; };
103 #if defined(JSGC_GENERATIONAL) || defined(DEBUG)
105 IsNurseryAllocable(AllocKind kind
)
107 JS_ASSERT(kind
>= 0 && unsigned(kind
) < FINALIZE_LIMIT
);
108 static const bool map
[] = {
109 false, /* FINALIZE_OBJECT0 */
110 true, /* FINALIZE_OBJECT0_BACKGROUND */
111 false, /* FINALIZE_OBJECT2 */
112 true, /* FINALIZE_OBJECT2_BACKGROUND */
113 false, /* FINALIZE_OBJECT4 */
114 true, /* FINALIZE_OBJECT4_BACKGROUND */
115 false, /* FINALIZE_OBJECT8 */
116 true, /* FINALIZE_OBJECT8_BACKGROUND */
117 false, /* FINALIZE_OBJECT12 */
118 true, /* FINALIZE_OBJECT12_BACKGROUND */
119 false, /* FINALIZE_OBJECT16 */
120 true, /* FINALIZE_OBJECT16_BACKGROUND */
121 false, /* FINALIZE_SCRIPT */
122 false, /* FINALIZE_LAZY_SCRIPT */
123 false, /* FINALIZE_SHAPE */
124 false, /* FINALIZE_BASE_SHAPE */
125 false, /* FINALIZE_TYPE_OBJECT */
126 false, /* FINALIZE_FAT_INLINE_STRING */
127 false, /* FINALIZE_STRING */
128 false, /* FINALIZE_EXTERNAL_STRING */
129 false, /* FINALIZE_SYMBOL */
130 false, /* FINALIZE_JITCODE */
132 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map
) == FINALIZE_LIMIT
);
137 #if defined(JSGC_FJGENERATIONAL)
138 // This is separate from IsNurseryAllocable() so that the latter can evolve
139 // without worrying about what the ForkJoinNursery's needs are, and vice
140 // versa to some extent.
142 IsFJNurseryAllocable(AllocKind kind
)
144 JS_ASSERT(kind
>= 0 && unsigned(kind
) < FINALIZE_LIMIT
);
145 static const bool map
[] = {
146 false, /* FINALIZE_OBJECT0 */
147 true, /* FINALIZE_OBJECT0_BACKGROUND */
148 false, /* FINALIZE_OBJECT2 */
149 true, /* FINALIZE_OBJECT2_BACKGROUND */
150 false, /* FINALIZE_OBJECT4 */
151 true, /* FINALIZE_OBJECT4_BACKGROUND */
152 false, /* FINALIZE_OBJECT8 */
153 true, /* FINALIZE_OBJECT8_BACKGROUND */
154 false, /* FINALIZE_OBJECT12 */
155 true, /* FINALIZE_OBJECT12_BACKGROUND */
156 false, /* FINALIZE_OBJECT16 */
157 true, /* FINALIZE_OBJECT16_BACKGROUND */
158 false, /* FINALIZE_SCRIPT */
159 false, /* FINALIZE_LAZY_SCRIPT */
160 false, /* FINALIZE_SHAPE */
161 false, /* FINALIZE_BASE_SHAPE */
162 false, /* FINALIZE_TYPE_OBJECT */
163 false, /* FINALIZE_FAT_INLINE_STRING */
164 false, /* FINALIZE_STRING */
165 false, /* FINALIZE_EXTERNAL_STRING */
166 false, /* FINALIZE_SYMBOL */
167 false, /* FINALIZE_JITCODE */
169 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map
) == FINALIZE_LIMIT
);
175 IsBackgroundFinalized(AllocKind kind
)
177 JS_ASSERT(kind
>= 0 && unsigned(kind
) < FINALIZE_LIMIT
);
178 static const bool map
[] = {
179 false, /* FINALIZE_OBJECT0 */
180 true, /* FINALIZE_OBJECT0_BACKGROUND */
181 false, /* FINALIZE_OBJECT2 */
182 true, /* FINALIZE_OBJECT2_BACKGROUND */
183 false, /* FINALIZE_OBJECT4 */
184 true, /* FINALIZE_OBJECT4_BACKGROUND */
185 false, /* FINALIZE_OBJECT8 */
186 true, /* FINALIZE_OBJECT8_BACKGROUND */
187 false, /* FINALIZE_OBJECT12 */
188 true, /* FINALIZE_OBJECT12_BACKGROUND */
189 false, /* FINALIZE_OBJECT16 */
190 true, /* FINALIZE_OBJECT16_BACKGROUND */
191 false, /* FINALIZE_SCRIPT */
192 false, /* FINALIZE_LAZY_SCRIPT */
193 true, /* FINALIZE_SHAPE */
194 true, /* FINALIZE_BASE_SHAPE */
195 true, /* FINALIZE_TYPE_OBJECT */
196 true, /* FINALIZE_FAT_INLINE_STRING */
197 true, /* FINALIZE_STRING */
198 false, /* FINALIZE_EXTERNAL_STRING */
199 true, /* FINALIZE_SYMBOL */
200 false, /* FINALIZE_JITCODE */
202 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map
) == FINALIZE_LIMIT
);
207 CanBeFinalizedInBackground(gc::AllocKind kind
, const Class
*clasp
)
209 JS_ASSERT(kind
<= gc::FINALIZE_OBJECT_LAST
);
210 /* If the class has no finalizer or a finalizer that is safe to call on
211 * a different thread, we change the finalize kind. For example,
212 * FINALIZE_OBJECT0 calls the finalizer on the main thread,
213 * FINALIZE_OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread.
214 * IsBackgroundFinalized is called to prevent recursively incrementing
215 * the finalize kind; kind may already be a background finalize kind.
217 return (!gc::IsBackgroundFinalized(kind
) &&
218 (!clasp
->finalize
|| (clasp
->flags
& JSCLASS_BACKGROUND_FINALIZE
)));
222 GetGCThingTraceKind(const void *thing
);
224 /* Capacity for slotsToThingKind */
225 const size_t SLOTS_TO_THING_KIND_LIMIT
= 17;
227 extern const AllocKind slotsToThingKind
[];
229 /* Get the best kind to use when making an object with the given slot count. */
230 static inline AllocKind
231 GetGCObjectKind(size_t numSlots
)
233 if (numSlots
>= SLOTS_TO_THING_KIND_LIMIT
)
234 return FINALIZE_OBJECT16
;
235 return slotsToThingKind
[numSlots
];
238 /* As for GetGCObjectKind, but for dense array allocation. */
239 static inline AllocKind
240 GetGCArrayKind(size_t numSlots
)
243 * Dense arrays can use their fixed slots to hold their elements array
244 * (less two Values worth of ObjectElements header), but if more than the
245 * maximum number of fixed slots is needed then the fixed slots will be
248 JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER
== 2);
249 if (numSlots
> JSObject::NELEMENTS_LIMIT
|| numSlots
+ 2 >= SLOTS_TO_THING_KIND_LIMIT
)
250 return FINALIZE_OBJECT2
;
251 return slotsToThingKind
[numSlots
+ 2];
254 static inline AllocKind
255 GetGCObjectFixedSlotsKind(size_t numFixedSlots
)
257 JS_ASSERT(numFixedSlots
< SLOTS_TO_THING_KIND_LIMIT
);
258 return slotsToThingKind
[numFixedSlots
];
261 static inline AllocKind
262 GetBackgroundAllocKind(AllocKind kind
)
264 JS_ASSERT(!IsBackgroundFinalized(kind
));
265 JS_ASSERT(kind
<= FINALIZE_OBJECT_LAST
);
266 return (AllocKind
) (kind
+ 1);
270 * Try to get the next larger size for an object, keeping BACKGROUND
274 TryIncrementAllocKind(AllocKind
*kindp
)
276 size_t next
= size_t(*kindp
) + 2;
277 if (next
>= size_t(FINALIZE_OBJECT_LIMIT
))
279 *kindp
= AllocKind(next
);
283 /* Get the number of fixed slots and initial capacity associated with a kind. */
285 GetGCKindSlots(AllocKind thingKind
)
287 /* Using a switch in hopes that thingKind will usually be a compile-time constant. */
289 case FINALIZE_OBJECT0
:
290 case FINALIZE_OBJECT0_BACKGROUND
:
292 case FINALIZE_OBJECT2
:
293 case FINALIZE_OBJECT2_BACKGROUND
:
295 case FINALIZE_OBJECT4
:
296 case FINALIZE_OBJECT4_BACKGROUND
:
298 case FINALIZE_OBJECT8
:
299 case FINALIZE_OBJECT8_BACKGROUND
:
301 case FINALIZE_OBJECT12
:
302 case FINALIZE_OBJECT12_BACKGROUND
:
304 case FINALIZE_OBJECT16
:
305 case FINALIZE_OBJECT16_BACKGROUND
:
308 MOZ_ASSUME_UNREACHABLE("Bad object finalize kind");
313 GetGCKindSlots(AllocKind thingKind
, const Class
*clasp
)
315 size_t nslots
= GetGCKindSlots(thingKind
);
317 /* An object's private data uses the space taken by its last fixed slot. */
318 if (clasp
->flags
& JSCLASS_HAS_PRIVATE
) {
319 JS_ASSERT(nslots
> 0);
324 * Functions have a larger finalize kind than FINALIZE_OBJECT to reserve
325 * space for the extra fields in JSFunction, but have no fixed slots.
327 if (clasp
== FunctionClassPtr
)
333 // Class to assist in triggering background chunk allocation. This cannot be done
334 // while holding the GC or worker thread state lock due to lock ordering issues.
335 // As a result, the triggering is delayed using this class until neither of the
336 // above locks is held.
337 class AutoMaybeStartBackgroundAllocation
;
340 * A single segment of a SortedArenaList. Each segment has a head and a tail,
341 * which track the start and end of a segment for O(1) append and concatenation.
343 struct SortedArenaListSegment
353 bool isEmpty() const {
354 return tailp
== &head
;
357 // Appends |aheader| to this segment.
358 void append(ArenaHeader
*aheader
) {
360 JS_ASSERT_IF(head
, head
->getAllocKind() == aheader
->getAllocKind());
362 tailp
= &aheader
->next
;
365 // Points the tail of this segment at |aheader|, which may be null. Note
366 // that this does not change the tail itself, but merely which arena
367 // follows it. This essentially turns the tail into a cursor (see also the
368 // description of ArenaList), but from the perspective of a SortedArenaList
369 // this makes no difference.
370 void linkTo(ArenaHeader
*aheader
) {
376 * Arena lists have a head and a cursor. The cursor conceptually lies on arena
377 * boundaries, i.e. before the first arena, between two arenas, or after the
380 * Normally the arena following the cursor is the first arena in the list with
381 * some free things and all arenas before the cursor are fully allocated. (And
382 * if the cursor is at the end of the list, then all the arenas are full.)
384 * However, the arena currently being allocated from is considered full while
385 * its list of free spans is moved into the freeList. Therefore, during GC or
386 * cell enumeration, when an unallocated freeList is moved back to the arena,
387 * we can see an arena with some free cells before the cursor.
389 * Arenas following the cursor should not be full.
392 // The cursor is implemented via an indirect pointer, |cursorp_|, to allow
393 // for efficient list insertion at the cursor point and other list
396 // - If the list is empty: |head| is null, |cursorp_| points to |head|, and
397 // therefore |*cursorp_| is null.
399 // - If the list is not empty: |head| is non-null, and...
401 // - If the cursor is at the start of the list: |cursorp_| points to
402 // |head|, and therefore |*cursorp_| points to the first arena.
404 // - If cursor is at the end of the list: |cursorp_| points to the |next|
405 // field of the last arena, and therefore |*cursorp_| is null.
407 // - If the cursor is at neither the start nor the end of the list:
408 // |cursorp_| points to the |next| field of the arena preceding the
409 // cursor, and therefore |*cursorp_| points to the arena following the
412 // |cursorp_| is never null.
415 ArenaHeader
**cursorp_
;
417 void copy(const ArenaList
&other
) {
420 cursorp_
= other
.isCursorAtHead() ? &head_
: other
.cursorp_
;
429 ArenaList(const ArenaList
&other
) {
433 ArenaList
&operator=(const ArenaList
&other
) {
438 explicit ArenaList(const SortedArenaListSegment
&segment
) {
439 head_
= segment
.head
;
440 cursorp_
= segment
.isEmpty() ? &head_
: segment
.tailp
;
444 // This does checking just of |head_| and |cursorp_|.
447 // If the list is empty, it must have this form.
448 JS_ASSERT_IF(!head_
, cursorp_
== &head_
);
450 // If there's an arena following the cursor, it must not be full.
451 ArenaHeader
*cursor
= *cursorp_
;
452 JS_ASSERT_IF(cursor
, cursor
->hasFreeThings());
462 bool isEmpty() const {
467 // This returns nullptr if the list is empty.
468 ArenaHeader
*head() const {
473 bool isCursorAtHead() const {
475 return cursorp_
== &head_
;
478 bool isCursorAtEnd() const {
483 // This can return nullptr.
484 ArenaHeader
*arenaAfterCursor() const {
489 // This moves the cursor past |aheader|. |aheader| must be an arena within
491 void moveCursorPast(ArenaHeader
*aheader
) {
492 cursorp_
= &aheader
->next
;
496 // This does two things.
497 // - Inserts |a| at the cursor.
498 // - Leaves the cursor sitting just before |a|, if |a| is not full, or just
499 // after |a|, if |a| is full.
501 void insertAtCursor(ArenaHeader
*a
) {
505 // At this point, the cursor is sitting before |a|. Move it after |a|
507 if (!a
->hasFreeThings())
512 // This inserts |other|, which must be full, at the cursor of |this|.
513 ArenaList
&insertListWithCursorAtEnd(const ArenaList
&other
) {
516 JS_ASSERT(other
.isCursorAtEnd());
517 if (other
.isCursorAtHead())
519 // Insert the full arenas of |other| after those of |this|.
520 *other
.cursorp_
= *cursorp_
;
521 *cursorp_
= other
.head_
;
522 cursorp_
= other
.cursorp_
;
527 #ifdef JSGC_COMPACTING
528 ArenaHeader
*pickArenasToRelocate();
529 ArenaHeader
*relocateArenas(ArenaHeader
*toRelocate
, ArenaHeader
*relocated
);
534 * A class that holds arenas in sorted order by appending arenas to specific
535 * segments. Each segment has a head and a tail, which can be linked up to
536 * other segments to create a contiguous ArenaList.
538 class SortedArenaList
541 // The minimum size, in bytes, of a GC thing.
542 static const size_t MinThingSize
= 16;
544 static_assert(ArenaSize
<= 4096, "When increasing the Arena size, please consider how"\
545 " this will affect the size of a SortedArenaList.");
547 static_assert(MinThingSize
>= 16, "When decreasing the minimum thing size, please consider"\
548 " how this will affect the size of a SortedArenaList.");
551 // The maximum number of GC things that an arena can hold.
552 static const size_t MaxThingsPerArena
= (ArenaSize
- sizeof(ArenaHeader
)) / MinThingSize
;
554 size_t thingsPerArena_
;
555 SortedArenaListSegment segments
[MaxThingsPerArena
+ 1];
557 // Convenience functions to get the nth head and tail.
558 ArenaHeader
*headAt(size_t n
) { return segments
[n
].head
; }
559 ArenaHeader
**tailAt(size_t n
) { return segments
[n
].tailp
; }
562 explicit SortedArenaList(size_t thingsPerArena
= MaxThingsPerArena
) {
563 reset(thingsPerArena
);
566 void setThingsPerArena(size_t thingsPerArena
) {
567 JS_ASSERT(thingsPerArena
&& thingsPerArena
<= MaxThingsPerArena
);
568 thingsPerArena_
= thingsPerArena
;
571 // Resets the first |thingsPerArena| segments of this list for further use.
572 void reset(size_t thingsPerArena
= MaxThingsPerArena
) {
573 setThingsPerArena(thingsPerArena
);
574 // Initialize the segments.
575 for (size_t i
= 0; i
<= thingsPerArena
; ++i
)
579 // Inserts a header, which has room for |nfree| more things, in its segment.
580 void insertAt(ArenaHeader
*aheader
, size_t nfree
) {
581 JS_ASSERT(nfree
<= thingsPerArena_
);
582 segments
[nfree
].append(aheader
);
585 // Links up the tail of each non-empty segment to the head of the next
586 // non-empty segment, creating a contiguous list that is returned as an
587 // ArenaList. This is not a destructive operation: neither the head nor tail
588 // of any segment is modified. However, note that the ArenaHeaders in the
589 // resulting ArenaList should be treated as read-only unless the
590 // SortedArenaList is no longer needed: inserting or removing arenas would
591 // invalidate the SortedArenaList.
592 ArenaList
toArenaList() {
593 // Link the non-empty segment tails up to the non-empty segment heads.
594 size_t tailIndex
= 0;
595 for (size_t headIndex
= 1; headIndex
<= thingsPerArena_
; ++headIndex
) {
596 if (headAt(headIndex
)) {
597 segments
[tailIndex
].linkTo(headAt(headIndex
));
598 tailIndex
= headIndex
;
601 // Point the tail of the final non-empty segment at null. Note that if
602 // the list is empty, this will just set segments[0].head to null.
603 segments
[tailIndex
].linkTo(nullptr);
604 // Create an ArenaList with head and cursor set to the head and tail of
605 // the first segment (if that segment is empty, only the head is used).
606 return ArenaList(segments
[0]);
613 * For each arena kind its free list is represented as the first span with
614 * free things. Initially all the spans are initialized as empty. After we
615 * find a new arena with available things we move its first free span into
616 * the list and set the arena as fully allocated. way we do not need to
617 * update the arena header after the initial allocation. When starting the
618 * GC we only move the head of the of the list of spans back to the arena
619 * only for the arena that was not fully allocated.
621 FreeList freeLists
[FINALIZE_LIMIT
];
623 ArenaList arenaLists
[FINALIZE_LIMIT
];
626 * The background finalization adds the finalized arenas to the list at
627 * the cursor position. backgroundFinalizeState controls the interaction
628 * between the GC lock and the access to the list from the allocation
631 * BFS_DONE indicates that the finalizations is not running or cannot
632 * affect this arena list. The allocation thread can access the list
633 * outside the GC lock.
635 * In BFS_RUN and BFS_JUST_FINISHED the allocation thread must take the
636 * lock. The former indicates that the finalization still runs. The latter
637 * signals that finalization just added to the list finalized arenas. In
638 * that case the lock effectively serves as a read barrier to ensure that
639 * the allocation thread sees all the writes done during finalization.
641 enum BackgroundFinalizeStateEnum
{
647 typedef mozilla::Atomic
<BackgroundFinalizeStateEnum
, mozilla::ReleaseAcquire
>
648 BackgroundFinalizeState
;
650 BackgroundFinalizeState backgroundFinalizeState
[FINALIZE_LIMIT
];
653 /* For each arena kind, a list of arenas remaining to be swept. */
654 ArenaHeader
*arenaListsToSweep
[FINALIZE_LIMIT
];
656 /* During incremental sweeping, a list of the arenas already swept. */
657 unsigned incrementalSweptArenaKind
;
658 ArenaList incrementalSweptArenas
;
660 /* Shape arenas to be swept in the foreground. */
661 ArenaHeader
*gcShapeArenasToSweep
;
665 for (size_t i
= 0; i
!= FINALIZE_LIMIT
; ++i
)
666 freeLists
[i
].initAsEmpty();
667 for (size_t i
= 0; i
!= FINALIZE_LIMIT
; ++i
)
668 backgroundFinalizeState
[i
] = BFS_DONE
;
669 for (size_t i
= 0; i
!= FINALIZE_LIMIT
; ++i
)
670 arenaListsToSweep
[i
] = nullptr;
671 incrementalSweptArenaKind
= FINALIZE_LIMIT
;
672 gcShapeArenasToSweep
= nullptr;
676 for (size_t i
= 0; i
!= FINALIZE_LIMIT
; ++i
) {
678 * We can only call this during the shutdown after the last GC when
679 * the background finalization is disabled.
681 JS_ASSERT(backgroundFinalizeState
[i
] == BFS_DONE
);
683 for (ArenaHeader
*aheader
= arenaLists
[i
].head(); aheader
; aheader
= next
) {
684 // Copy aheader->next before releasing.
685 next
= aheader
->next
;
686 aheader
->chunk()->releaseArena(aheader
);
690 for (ArenaHeader
*aheader
= incrementalSweptArenas
.head(); aheader
; aheader
= next
) {
691 // Copy aheader->next before releasing.
692 next
= aheader
->next
;
693 aheader
->chunk()->releaseArena(aheader
);
697 static uintptr_t getFreeListOffset(AllocKind thingKind
) {
698 uintptr_t offset
= offsetof(ArenaLists
, freeLists
);
699 return offset
+ thingKind
* sizeof(FreeList
);
702 const FreeList
*getFreeList(AllocKind thingKind
) const {
703 return &freeLists
[thingKind
];
706 ArenaHeader
*getFirstArena(AllocKind thingKind
) const {
707 return arenaLists
[thingKind
].head();
710 ArenaHeader
*getFirstArenaToSweep(AllocKind thingKind
) const {
711 return arenaListsToSweep
[thingKind
];
714 ArenaHeader
*getFirstSweptArena(AllocKind thingKind
) const {
715 if (thingKind
!= incrementalSweptArenaKind
)
717 return incrementalSweptArenas
.head();
720 ArenaHeader
*getArenaAfterCursor(AllocKind thingKind
) const {
721 return arenaLists
[thingKind
].arenaAfterCursor();
724 bool arenaListsAreEmpty() const {
725 for (size_t i
= 0; i
!= FINALIZE_LIMIT
; ++i
) {
727 * The arena cannot be empty if the background finalization is not yet
730 if (backgroundFinalizeState
[i
] != BFS_DONE
)
732 if (!arenaLists
[i
].isEmpty())
739 for (size_t i
= 0; i
!= FINALIZE_LIMIT
; ++i
) {
740 /* The background finalization must have stopped at this point. */
741 JS_ASSERT(backgroundFinalizeState
[i
] == BFS_DONE
||
742 backgroundFinalizeState
[i
] == BFS_JUST_FINISHED
);
743 for (ArenaHeader
*aheader
= arenaLists
[i
].head(); aheader
; aheader
= aheader
->next
) {
744 uintptr_t *word
= aheader
->chunk()->bitmap
.arenaBits(aheader
);
745 memset(word
, 0, ArenaBitmapWords
* sizeof(uintptr_t));
750 bool doneBackgroundFinalize(AllocKind kind
) const {
751 return backgroundFinalizeState
[kind
] == BFS_DONE
||
752 backgroundFinalizeState
[kind
] == BFS_JUST_FINISHED
;
755 bool needBackgroundFinalizeWait(AllocKind kind
) const {
756 return backgroundFinalizeState
[kind
] != BFS_DONE
;
760 * Return the free list back to the arena so the GC finalization will not
761 * run the finalizers over unitialized bytes from free things.
764 for (size_t i
= 0; i
!= FINALIZE_LIMIT
; ++i
)
768 void purge(AllocKind i
) {
769 FreeList
*freeList
= &freeLists
[i
];
770 if (!freeList
->isEmpty()) {
771 ArenaHeader
*aheader
= freeList
->arenaHeader();
772 aheader
->setFirstFreeSpan(freeList
->getHead());
773 freeList
->initAsEmpty();
777 inline void prepareForIncrementalGC(JSRuntime
*rt
);
780 * Temporarily copy the free list heads to the arenas so the code can see
781 * the proper value in ArenaHeader::freeList when accessing the latter
784 void copyFreeListsToArenas() {
785 for (size_t i
= 0; i
!= FINALIZE_LIMIT
; ++i
)
786 copyFreeListToArena(AllocKind(i
));
789 void copyFreeListToArena(AllocKind thingKind
) {
790 FreeList
*freeList
= &freeLists
[thingKind
];
791 if (!freeList
->isEmpty()) {
792 ArenaHeader
*aheader
= freeList
->arenaHeader();
793 JS_ASSERT(!aheader
->hasFreeThings());
794 aheader
->setFirstFreeSpan(freeList
->getHead());
799 * Clear the free lists in arenas that were temporarily set there using
802 void clearFreeListsInArenas() {
803 for (size_t i
= 0; i
!= FINALIZE_LIMIT
; ++i
)
804 clearFreeListInArena(AllocKind(i
));
807 void clearFreeListInArena(AllocKind kind
) {
808 FreeList
*freeList
= &freeLists
[kind
];
809 if (!freeList
->isEmpty()) {
810 ArenaHeader
*aheader
= freeList
->arenaHeader();
811 JS_ASSERT(freeList
->isSameNonEmptySpan(aheader
->getFirstFreeSpan()));
812 aheader
->setAsFullyUsed();
817 * Check that the free list is either empty or were synchronized with the
818 * arena using copyToArena().
820 bool isSynchronizedFreeList(AllocKind kind
) {
821 FreeList
*freeList
= &freeLists
[kind
];
822 if (freeList
->isEmpty())
824 ArenaHeader
*aheader
= freeList
->arenaHeader();
825 if (aheader
->hasFreeThings()) {
827 * If the arena has a free list, it must be the same as one in
830 JS_ASSERT(freeList
->isSameNonEmptySpan(aheader
->getFirstFreeSpan()));
836 /* Check if |aheader|'s arena is in use. */
837 bool arenaIsInUse(ArenaHeader
*aheader
, AllocKind kind
) const {
839 const FreeList
&freeList
= freeLists
[kind
];
840 if (freeList
.isEmpty())
842 return aheader
== freeList
.arenaHeader();
845 MOZ_ALWAYS_INLINE
void *allocateFromFreeList(AllocKind thingKind
, size_t thingSize
) {
846 return freeLists
[thingKind
].allocate(thingSize
);
849 template <AllowGC allowGC
>
850 static void *refillFreeList(ThreadSafeContext
*cx
, AllocKind thingKind
);
852 static void *refillFreeListInGC(Zone
*zone
, AllocKind thingKind
);
855 * Moves all arenas from |fromArenaLists| into |this|. In
856 * parallel blocks, we temporarily create one ArenaLists per
857 * parallel thread. When the parallel block ends, we move
858 * whatever allocations may have been performed back into the
859 * compartment's main arena list using this function.
861 void adoptArenas(JSRuntime
*runtime
, ArenaLists
*fromArenaLists
);
863 /* True if the ArenaHeader in question is found in this ArenaLists */
864 bool containsArena(JSRuntime
*runtime
, ArenaHeader
*arenaHeader
);
866 void checkEmptyFreeLists() {
868 for (size_t i
= 0; i
< mozilla::ArrayLength(freeLists
); ++i
)
869 JS_ASSERT(freeLists
[i
].isEmpty());
873 void checkEmptyFreeList(AllocKind kind
) {
874 JS_ASSERT(freeLists
[kind
].isEmpty());
877 #ifdef JSGC_COMPACTING
878 ArenaHeader
*relocateArenas(ArenaHeader
*relocatedList
);
881 void queueObjectsForSweep(FreeOp
*fop
);
882 void queueStringsAndSymbolsForSweep(FreeOp
*fop
);
883 void queueShapesForSweep(FreeOp
*fop
);
884 void queueScriptsForSweep(FreeOp
*fop
);
885 void queueJitCodeForSweep(FreeOp
*fop
);
887 bool foregroundFinalize(FreeOp
*fop
, AllocKind thingKind
, SliceBudget
&sliceBudget
,
888 SortedArenaList
&sweepList
);
889 static void backgroundFinalize(FreeOp
*fop
, ArenaHeader
*listHead
, bool onBackgroundThread
);
891 void wipeDuringParallelExecution(JSRuntime
*rt
);
894 inline void finalizeNow(FreeOp
*fop
, AllocKind thingKind
);
895 inline void forceFinalizeNow(FreeOp
*fop
, AllocKind thingKind
);
896 inline void queueForForegroundSweep(FreeOp
*fop
, AllocKind thingKind
);
897 inline void queueForBackgroundSweep(FreeOp
*fop
, AllocKind thingKind
);
899 void *allocateFromArena(JS::Zone
*zone
, AllocKind thingKind
);
900 inline void *allocateFromArenaInline(JS::Zone
*zone
, AllocKind thingKind
,
901 AutoMaybeStartBackgroundAllocation
&maybeStartBackgroundAllocation
);
903 inline void normalizeBackgroundFinalizeState(AllocKind thingKind
);
905 friend class js::Nursery
;
906 friend class js::gc::ForkJoinNursery
;
910 * Initial allocation size for data structures holding chunks is set to hold
911 * chunks with total capacity of 16MB to avoid buffer resizes during browser
914 const size_t INITIAL_CHUNK_CAPACITY
= 16 * 1024 * 1024 / ChunkSize
;
916 /* The number of GC cycles an empty chunk can survive before been released. */
917 const size_t MAX_EMPTY_CHUNK_AGE
= 4;
921 typedef enum JSGCRootType
{
922 JS_GC_ROOT_VALUE_PTR
,
923 JS_GC_ROOT_STRING_PTR
,
924 JS_GC_ROOT_OBJECT_PTR
,
925 JS_GC_ROOT_SCRIPT_PTR
930 RootInfo(const char *name
, JSGCRootType type
) : name(name
), type(type
) {}
935 typedef js::HashMap
<void *,
937 js::DefaultHasher
<void *>,
938 js::SystemAllocPolicy
> RootedValueMap
;
941 AddValueRoot(JSContext
*cx
, js::Value
*vp
, const char *name
);
944 AddValueRootRT(JSRuntime
*rt
, js::Value
*vp
, const char *name
);
947 AddStringRoot(JSContext
*cx
, JSString
**rp
, const char *name
);
950 AddObjectRoot(JSContext
*cx
, JSObject
**rp
, const char *name
);
953 AddObjectRoot(JSRuntime
*rt
, JSObject
**rp
, const char *name
);
956 AddScriptRoot(JSContext
*cx
, JSScript
**rp
, const char *name
);
959 RemoveRoot(JSRuntime
*rt
, void *rp
);
964 js_InitGC(JSRuntime
*rt
, uint32_t maxbytes
);
967 js_FinishGC(JSRuntime
*rt
);
971 class InterpreterFrame
;
974 MarkCompartmentActive(js::InterpreterFrame
*fp
);
977 TraceRuntime(JSTracer
*trc
);
980 ReleaseAllJITCode(FreeOp
*op
);
983 * Kinds of js_GC invocation.
985 typedef enum JSGCInvocationKind
{
986 /* Normal invocation. */
989 /* Minimize GC triggers and release empty GC chunks right away. */
991 } JSGCInvocationKind
;
994 PrepareForDebugGC(JSRuntime
*rt
);
996 /* Functions for managing cross compartment gray pointers. */
999 DelayCrossCompartmentGrayMarking(JSObject
*src
);
1002 NotifyGCNukeWrapper(JSObject
*o
);
1005 NotifyGCPreSwap(JSObject
*a
, JSObject
*b
);
1008 NotifyGCPostSwap(JSObject
*a
, JSObject
*b
, unsigned preResult
);
1011 * Helper state for use when JS helper threads sweep and allocate GC thing kinds
1012 * that can be swept and allocated off the main thread.
1014 * In non-threadsafe builds, all actual sweeping and allocation is performed
1015 * on the main thread, but GCHelperState encapsulates this from clients as
1027 // Associated runtime.
1028 JSRuntime
*const rt
;
1030 // Condvar for notifying the main thread when work has finished. This is
1031 // associated with the runtime's GC lock --- the worker thread state
1032 // condvars can't be used here due to lock ordering issues.
1035 // Activity for the helper to do, protected by the GC lock.
1038 // Thread which work is being performed on, or null.
1041 void startBackgroundThread(State newState
);
1042 void waitForBackgroundThread();
1045 void setState(State state
);
1050 bool backgroundAllocation
;
1052 friend class js::gc::ArenaLists
;
1054 static void freeElementsAndArray(void **array
, void **end
) {
1055 JS_ASSERT(array
<= end
);
1056 for (void **p
= array
; p
!= end
; ++p
)
1061 /* Must be called with the GC lock taken. */
1065 explicit GCHelperState(JSRuntime
*rt
)
1072 backgroundAllocation(true)
1080 /* Must be called with the GC lock taken. */
1081 void startBackgroundSweep(bool shouldShrink
);
1083 /* Must be called with the GC lock taken. */
1084 void startBackgroundShrink();
1086 /* Must be called without the GC lock taken. */
1087 void waitBackgroundSweepEnd();
1089 /* Must be called without the GC lock taken. */
1090 void waitBackgroundSweepOrAllocEnd();
1092 /* Must be called with the GC lock taken. */
1093 void startBackgroundAllocationIfIdle();
1095 bool canBackgroundAllocate() const {
1096 return backgroundAllocation
;
1099 void disableBackgroundAllocation() {
1100 backgroundAllocation
= false;
1103 bool onBackgroundThread();
1106 * Outside the GC lock may give true answer when in fact the sweeping has
1109 bool isBackgroundSweeping() const {
1110 return state_
== SWEEPING
;
1113 bool shouldShrink() const {
1114 JS_ASSERT(isBackgroundSweeping());
1119 struct GCChunkHasher
{
1120 typedef gc::Chunk
*Lookup
;
1123 * Strip zeros for better distribution after multiplying by the golden
1126 static HashNumber
hash(gc::Chunk
*chunk
) {
1127 JS_ASSERT(!(uintptr_t(chunk
) & gc::ChunkMask
));
1128 return HashNumber(uintptr_t(chunk
) >> gc::ChunkShift
);
1131 static bool match(gc::Chunk
*k
, gc::Chunk
*l
) {
1132 JS_ASSERT(!(uintptr_t(k
) & gc::ChunkMask
));
1133 JS_ASSERT(!(uintptr_t(l
) & gc::ChunkMask
));
1138 typedef HashSet
<js::gc::Chunk
*, GCChunkHasher
, SystemAllocPolicy
> GCChunkSet
;
1144 JSTraceNamePrinter debugPrinter
;
1145 const void *debugPrintArg
;
1146 size_t debugPrintIndex
;
1149 GrayRoot(void *thing
, JSGCTraceKind kind
)
1150 : thing(thing
), kind(kind
) {}
1154 MarkStackRangeConservatively(JSTracer
*trc
, Value
*begin
, Value
*end
);
1156 typedef void (*IterateChunkCallback
)(JSRuntime
*rt
, void *data
, gc::Chunk
*chunk
);
1157 typedef void (*IterateZoneCallback
)(JSRuntime
*rt
, void *data
, JS::Zone
*zone
);
1158 typedef void (*IterateArenaCallback
)(JSRuntime
*rt
, void *data
, gc::Arena
*arena
,
1159 JSGCTraceKind traceKind
, size_t thingSize
);
1160 typedef void (*IterateCellCallback
)(JSRuntime
*rt
, void *data
, void *thing
,
1161 JSGCTraceKind traceKind
, size_t thingSize
);
1164 * This function calls |zoneCallback| on every zone, |compartmentCallback| on
1165 * every compartment, |arenaCallback| on every in-use arena, and |cellCallback|
1166 * on every in-use cell in the GC heap.
1169 IterateZonesCompartmentsArenasCells(JSRuntime
*rt
, void *data
,
1170 IterateZoneCallback zoneCallback
,
1171 JSIterateCompartmentCallback compartmentCallback
,
1172 IterateArenaCallback arenaCallback
,
1173 IterateCellCallback cellCallback
);
1176 * This function is like IterateZonesCompartmentsArenasCells, but does it for a
1180 IterateZoneCompartmentsArenasCells(JSRuntime
*rt
, Zone
*zone
, void *data
,
1181 IterateZoneCallback zoneCallback
,
1182 JSIterateCompartmentCallback compartmentCallback
,
1183 IterateArenaCallback arenaCallback
,
1184 IterateCellCallback cellCallback
);
1187 * Invoke chunkCallback on every in-use chunk.
1190 IterateChunks(JSRuntime
*rt
, void *data
, IterateChunkCallback chunkCallback
);
1192 typedef void (*IterateScriptCallback
)(JSRuntime
*rt
, void *data
, JSScript
*script
);
1195 * Invoke scriptCallback on every in-use script for
1196 * the given compartment or for all compartments if it is null.
1199 IterateScripts(JSRuntime
*rt
, JSCompartment
*compartment
,
1200 void *data
, IterateScriptCallback scriptCallback
);
1202 } /* namespace js */
1205 js_FinalizeStringRT(JSRuntime
*rt
, JSString
*str
);
1210 NewCompartment(JSContext
*cx
, JS::Zone
*zone
, JSPrincipals
*principals
,
1211 const JS::CompartmentOptions
&options
);
1216 * Merge all contents of source into target. This can only be used if source is
1217 * the only compartment in its zone.
1220 MergeCompartments(JSCompartment
*source
, JSCompartment
*target
);
1222 #ifdef JSGC_COMPACTING
1224 /* Functions for checking and updating things that might be moved by compacting GC. */
1227 const uintptr_t ForwardedCellMagicValue
= 0xf1f1f1f1f1f1f1f1;
1229 const uintptr_t ForwardedCellMagicValue
= 0xf1f1f1f1;
1232 template <typename T
>
1236 static_assert(mozilla::IsBaseOf
<Cell
, T
>::value
, "T must be a subclass of Cell");
1237 uintptr_t *ptr
= reinterpret_cast<uintptr_t *>(t
);
1238 return ptr
[1] == ForwardedCellMagicValue
;
1242 IsForwarded(const JS::Value
&value
)
1244 if (value
.isObject())
1245 return IsForwarded(&value
.toObject());
1247 if (value
.isString())
1248 return IsForwarded(value
.toString());
1250 if (value
.isSymbol())
1251 return IsForwarded(value
.toSymbol());
1253 JS_ASSERT(!value
.isGCThing());
1257 template <typename T
>
1261 JS_ASSERT(IsForwarded(t
));
1262 uintptr_t *ptr
= reinterpret_cast<uintptr_t *>(t
);
1263 return reinterpret_cast<T
*>(ptr
[0]);
1267 Forwarded(const JS::Value
&value
)
1269 if (value
.isObject())
1270 return ObjectValue(*Forwarded(&value
.toObject()));
1271 else if (value
.isString())
1272 return StringValue(Forwarded(value
.toString()));
1273 else if (value
.isSymbol())
1274 return SymbolValue(Forwarded(value
.toSymbol()));
1276 JS_ASSERT(!value
.isGCThing());
1280 template <typename T
>
1284 return IsForwarded(t
) ? Forwarded(t
) : t
;
1289 template <typename T
> inline bool IsForwarded(T t
) { return false; }
1290 template <typename T
> inline T
Forwarded(T t
) { return t
; }
1291 template <typename T
> inline T
MaybeForwarded(T t
) { return t
; }
1293 #endif // JSGC_COMPACTING
1295 #ifdef JSGC_HASH_TABLE_CHECKS
1297 template <typename T
>
1299 CheckGCThingAfterMovingGC(T
*t
)
1301 JS_ASSERT_IF(t
, !IsInsideNursery(t
));
1302 #ifdef JSGC_COMPACTING
1303 JS_ASSERT_IF(t
, !IsForwarded(t
));
1308 CheckValueAfterMovingGC(const JS::Value
& value
)
1310 if (value
.isObject())
1311 return CheckGCThingAfterMovingGC(&value
.toObject());
1312 else if (value
.isString())
1313 return CheckGCThingAfterMovingGC(value
.toString());
1314 else if (value
.isSymbol())
1315 return CheckGCThingAfterMovingGC(value
.toSymbol());
1318 #endif // JSGC_HASH_TABLE_CHECKS
1320 const int ZealPokeValue
= 1;
1321 const int ZealAllocValue
= 2;
1322 const int ZealFrameGCValue
= 3;
1323 const int ZealVerifierPreValue
= 4;
1324 const int ZealFrameVerifierPreValue
= 5;
1325 const int ZealStackRootingValue
= 6;
1326 const int ZealGenerationalGCValue
= 7;
1327 const int ZealIncrementalRootsThenFinish
= 8;
1328 const int ZealIncrementalMarkAllThenFinish
= 9;
1329 const int ZealIncrementalMultipleSlices
= 10;
1330 const int ZealVerifierPostValue
= 11;
1331 const int ZealFrameVerifierPostValue
= 12;
1332 const int ZealCheckHashTablesOnMinorGC
= 13;
1333 const int ZealCompactValue
= 14;
1334 const int ZealLimit
= 14;
1343 /* Check that write barriers have been used correctly. See jsgc.cpp. */
1345 VerifyBarriers(JSRuntime
*rt
, VerifierType type
);
1348 MaybeVerifyBarriers(JSContext
*cx
, bool always
= false);
1353 VerifyBarriers(JSRuntime
*rt
, VerifierType type
)
1358 MaybeVerifyBarriers(JSContext
*cx
, bool always
= false)
1365 * Instances of this class set the |JSRuntime::suppressGC| flag for the duration
1366 * that they are live. Use of this class is highly discouraged. Please carefully
1367 * read the comment in jscntxt.h above |suppressGC| and take all appropriate
1368 * precautions before instantiating this class.
1370 class AutoSuppressGC
1372 int32_t &suppressGC_
;
1375 explicit AutoSuppressGC(ExclusiveContext
*cx
);
1376 explicit AutoSuppressGC(JSCompartment
*comp
);
1377 explicit AutoSuppressGC(JSRuntime
*rt
);
1386 /* Disable OOM testing in sections which are not OOM safe. */
1387 class AutoEnterOOMUnsafeRegion
1392 AutoEnterOOMUnsafeRegion() : saved_(OOM_maxAllocations
) {
1393 OOM_maxAllocations
= UINT32_MAX
;
1395 ~AutoEnterOOMUnsafeRegion() {
1396 OOM_maxAllocations
= saved_
;
1400 class AutoEnterOOMUnsafeRegion
{};
1403 // This tests whether something is inside the GGC's nursery only;
1404 // use sparingly, mostly testing for any nursery, using IsInsideNursery,
1407 IsInsideGGCNursery(const gc::Cell
*cell
);
1409 } /* namespace gc */
1412 /* Use this to avoid assertions when manipulating the wrapper map. */
1413 class AutoDisableProxyCheck
1415 MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
;
1419 explicit AutoDisableProxyCheck(JSRuntime
*rt
1420 MOZ_GUARD_OBJECT_NOTIFIER_PARAM
);
1421 ~AutoDisableProxyCheck();
1424 struct AutoDisableProxyCheck
1426 explicit AutoDisableProxyCheck(JSRuntime
*rt
) {}
1430 struct AutoDisableCompactingGC
1432 #ifdef JSGC_COMPACTING
1433 explicit AutoDisableCompactingGC(JSRuntime
*rt
);
1434 ~AutoDisableCompactingGC();
1439 explicit AutoDisableCompactingGC(JSRuntime
*rt
) {}
1440 ~AutoDisableCompactingGC() {}
1445 PurgeJITCaches(JS::Zone
*zone
);
1447 // This is the same as IsInsideNursery, but not inlined.
1449 UninlinedIsInsideNursery(const gc::Cell
*cell
);
1451 } /* namespace js */