Bumping manifests a=b2g-bump
[gecko.git] / js / src / jsgc.h
blob624480e12981c023e4e5e9e26b4944e7b5672f1b
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /* JS Garbage Collector. */
9 #ifndef jsgc_h
10 #define jsgc_h
12 #include "mozilla/Atomics.h"
13 #include "mozilla/DebugOnly.h"
14 #include "mozilla/MemoryReporting.h"
15 #include "mozilla/TypeTraits.h"
17 #include "jslock.h"
18 #include "jsobj.h"
20 #include "js/GCAPI.h"
21 #include "js/SliceBudget.h"
22 #include "js/Vector.h"
24 namespace js {
26 namespace gc {
27 class ForkJoinNursery;
30 unsigned GetCPUCount();
32 enum HeapState {
33 Idle, // doing nothing with the GC heap
34 Tracing, // tracing the GC heap without collecting, e.g. IterateCompartments()
35 MajorCollecting, // doing a GC of the major heap
36 MinorCollecting // doing a GC of the minor heap (nursery)
39 namespace jit {
40 class JitCode;
43 namespace gc {
45 enum State {
46 NO_INCREMENTAL,
47 MARK_ROOTS,
48 MARK,
49 SWEEP,
50 #ifdef JSGC_COMPACTING
51 COMPACT
52 #endif
55 static inline JSGCTraceKind
56 MapAllocToTraceKind(AllocKind kind)
58 static const JSGCTraceKind map[] = {
59 JSTRACE_OBJECT, /* FINALIZE_OBJECT0 */
60 JSTRACE_OBJECT, /* FINALIZE_OBJECT0_BACKGROUND */
61 JSTRACE_OBJECT, /* FINALIZE_OBJECT2 */
62 JSTRACE_OBJECT, /* FINALIZE_OBJECT2_BACKGROUND */
63 JSTRACE_OBJECT, /* FINALIZE_OBJECT4 */
64 JSTRACE_OBJECT, /* FINALIZE_OBJECT4_BACKGROUND */
65 JSTRACE_OBJECT, /* FINALIZE_OBJECT8 */
66 JSTRACE_OBJECT, /* FINALIZE_OBJECT8_BACKGROUND */
67 JSTRACE_OBJECT, /* FINALIZE_OBJECT12 */
68 JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */
69 JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */
70 JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */
71 JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */
72 JSTRACE_LAZY_SCRIPT,/* FINALIZE_LAZY_SCRIPT */
73 JSTRACE_SHAPE, /* FINALIZE_SHAPE */
74 JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
75 JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
76 JSTRACE_STRING, /* FINALIZE_FAT_INLINE_STRING */
77 JSTRACE_STRING, /* FINALIZE_STRING */
78 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING */
79 JSTRACE_SYMBOL, /* FINALIZE_SYMBOL */
80 JSTRACE_JITCODE, /* FINALIZE_JITCODE */
82 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
83 return map[kind];
86 /* Return a printable string for the given kind, for diagnostic purposes. */
87 const char *
88 TraceKindAsAscii(JSGCTraceKind kind);
90 /* Map from C++ type to finalize kind. JSObject does not have a 1:1 mapping, so must use Arena::thingSize. */
91 template <typename T> struct MapTypeToFinalizeKind {};
92 template <> struct MapTypeToFinalizeKind<JSScript> { static const AllocKind kind = FINALIZE_SCRIPT; };
93 template <> struct MapTypeToFinalizeKind<LazyScript> { static const AllocKind kind = FINALIZE_LAZY_SCRIPT; };
94 template <> struct MapTypeToFinalizeKind<Shape> { static const AllocKind kind = FINALIZE_SHAPE; };
95 template <> struct MapTypeToFinalizeKind<BaseShape> { static const AllocKind kind = FINALIZE_BASE_SHAPE; };
96 template <> struct MapTypeToFinalizeKind<types::TypeObject> { static const AllocKind kind = FINALIZE_TYPE_OBJECT; };
97 template <> struct MapTypeToFinalizeKind<JSFatInlineString> { static const AllocKind kind = FINALIZE_FAT_INLINE_STRING; };
98 template <> struct MapTypeToFinalizeKind<JSString> { static const AllocKind kind = FINALIZE_STRING; };
99 template <> struct MapTypeToFinalizeKind<JSExternalString> { static const AllocKind kind = FINALIZE_EXTERNAL_STRING; };
100 template <> struct MapTypeToFinalizeKind<JS::Symbol> { static const AllocKind kind = FINALIZE_SYMBOL; };
101 template <> struct MapTypeToFinalizeKind<jit::JitCode> { static const AllocKind kind = FINALIZE_JITCODE; };
103 #if defined(JSGC_GENERATIONAL) || defined(DEBUG)
104 static inline bool
105 IsNurseryAllocable(AllocKind kind)
107 JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
108 static const bool map[] = {
109 false, /* FINALIZE_OBJECT0 */
110 true, /* FINALIZE_OBJECT0_BACKGROUND */
111 false, /* FINALIZE_OBJECT2 */
112 true, /* FINALIZE_OBJECT2_BACKGROUND */
113 false, /* FINALIZE_OBJECT4 */
114 true, /* FINALIZE_OBJECT4_BACKGROUND */
115 false, /* FINALIZE_OBJECT8 */
116 true, /* FINALIZE_OBJECT8_BACKGROUND */
117 false, /* FINALIZE_OBJECT12 */
118 true, /* FINALIZE_OBJECT12_BACKGROUND */
119 false, /* FINALIZE_OBJECT16 */
120 true, /* FINALIZE_OBJECT16_BACKGROUND */
121 false, /* FINALIZE_SCRIPT */
122 false, /* FINALIZE_LAZY_SCRIPT */
123 false, /* FINALIZE_SHAPE */
124 false, /* FINALIZE_BASE_SHAPE */
125 false, /* FINALIZE_TYPE_OBJECT */
126 false, /* FINALIZE_FAT_INLINE_STRING */
127 false, /* FINALIZE_STRING */
128 false, /* FINALIZE_EXTERNAL_STRING */
129 false, /* FINALIZE_SYMBOL */
130 false, /* FINALIZE_JITCODE */
132 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
133 return map[kind];
135 #endif
137 #if defined(JSGC_FJGENERATIONAL)
138 // This is separate from IsNurseryAllocable() so that the latter can evolve
139 // without worrying about what the ForkJoinNursery's needs are, and vice
140 // versa to some extent.
141 static inline bool
142 IsFJNurseryAllocable(AllocKind kind)
144 JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
145 static const bool map[] = {
146 false, /* FINALIZE_OBJECT0 */
147 true, /* FINALIZE_OBJECT0_BACKGROUND */
148 false, /* FINALIZE_OBJECT2 */
149 true, /* FINALIZE_OBJECT2_BACKGROUND */
150 false, /* FINALIZE_OBJECT4 */
151 true, /* FINALIZE_OBJECT4_BACKGROUND */
152 false, /* FINALIZE_OBJECT8 */
153 true, /* FINALIZE_OBJECT8_BACKGROUND */
154 false, /* FINALIZE_OBJECT12 */
155 true, /* FINALIZE_OBJECT12_BACKGROUND */
156 false, /* FINALIZE_OBJECT16 */
157 true, /* FINALIZE_OBJECT16_BACKGROUND */
158 false, /* FINALIZE_SCRIPT */
159 false, /* FINALIZE_LAZY_SCRIPT */
160 false, /* FINALIZE_SHAPE */
161 false, /* FINALIZE_BASE_SHAPE */
162 false, /* FINALIZE_TYPE_OBJECT */
163 false, /* FINALIZE_FAT_INLINE_STRING */
164 false, /* FINALIZE_STRING */
165 false, /* FINALIZE_EXTERNAL_STRING */
166 false, /* FINALIZE_SYMBOL */
167 false, /* FINALIZE_JITCODE */
169 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
170 return map[kind];
172 #endif
174 static inline bool
175 IsBackgroundFinalized(AllocKind kind)
177 JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
178 static const bool map[] = {
179 false, /* FINALIZE_OBJECT0 */
180 true, /* FINALIZE_OBJECT0_BACKGROUND */
181 false, /* FINALIZE_OBJECT2 */
182 true, /* FINALIZE_OBJECT2_BACKGROUND */
183 false, /* FINALIZE_OBJECT4 */
184 true, /* FINALIZE_OBJECT4_BACKGROUND */
185 false, /* FINALIZE_OBJECT8 */
186 true, /* FINALIZE_OBJECT8_BACKGROUND */
187 false, /* FINALIZE_OBJECT12 */
188 true, /* FINALIZE_OBJECT12_BACKGROUND */
189 false, /* FINALIZE_OBJECT16 */
190 true, /* FINALIZE_OBJECT16_BACKGROUND */
191 false, /* FINALIZE_SCRIPT */
192 false, /* FINALIZE_LAZY_SCRIPT */
193 true, /* FINALIZE_SHAPE */
194 true, /* FINALIZE_BASE_SHAPE */
195 true, /* FINALIZE_TYPE_OBJECT */
196 true, /* FINALIZE_FAT_INLINE_STRING */
197 true, /* FINALIZE_STRING */
198 false, /* FINALIZE_EXTERNAL_STRING */
199 true, /* FINALIZE_SYMBOL */
200 false, /* FINALIZE_JITCODE */
202 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
203 return map[kind];
206 static inline bool
207 CanBeFinalizedInBackground(gc::AllocKind kind, const Class *clasp)
209 JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
210 /* If the class has no finalizer or a finalizer that is safe to call on
211 * a different thread, we change the finalize kind. For example,
212 * FINALIZE_OBJECT0 calls the finalizer on the main thread,
213 * FINALIZE_OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread.
214 * IsBackgroundFinalized is called to prevent recursively incrementing
215 * the finalize kind; kind may already be a background finalize kind.
217 return (!gc::IsBackgroundFinalized(kind) &&
218 (!clasp->finalize || (clasp->flags & JSCLASS_BACKGROUND_FINALIZE)));
221 inline JSGCTraceKind
222 GetGCThingTraceKind(const void *thing);
224 /* Capacity for slotsToThingKind */
225 const size_t SLOTS_TO_THING_KIND_LIMIT = 17;
227 extern const AllocKind slotsToThingKind[];
229 /* Get the best kind to use when making an object with the given slot count. */
230 static inline AllocKind
231 GetGCObjectKind(size_t numSlots)
233 if (numSlots >= SLOTS_TO_THING_KIND_LIMIT)
234 return FINALIZE_OBJECT16;
235 return slotsToThingKind[numSlots];
238 /* As for GetGCObjectKind, but for dense array allocation. */
239 static inline AllocKind
240 GetGCArrayKind(size_t numSlots)
243 * Dense arrays can use their fixed slots to hold their elements array
244 * (less two Values worth of ObjectElements header), but if more than the
245 * maximum number of fixed slots is needed then the fixed slots will be
246 * unused.
248 JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER == 2);
249 if (numSlots > JSObject::NELEMENTS_LIMIT || numSlots + 2 >= SLOTS_TO_THING_KIND_LIMIT)
250 return FINALIZE_OBJECT2;
251 return slotsToThingKind[numSlots + 2];
254 static inline AllocKind
255 GetGCObjectFixedSlotsKind(size_t numFixedSlots)
257 JS_ASSERT(numFixedSlots < SLOTS_TO_THING_KIND_LIMIT);
258 return slotsToThingKind[numFixedSlots];
261 static inline AllocKind
262 GetBackgroundAllocKind(AllocKind kind)
264 JS_ASSERT(!IsBackgroundFinalized(kind));
265 JS_ASSERT(kind <= FINALIZE_OBJECT_LAST);
266 return (AllocKind) (kind + 1);
270 * Try to get the next larger size for an object, keeping BACKGROUND
271 * consistent.
273 static inline bool
274 TryIncrementAllocKind(AllocKind *kindp)
276 size_t next = size_t(*kindp) + 2;
277 if (next >= size_t(FINALIZE_OBJECT_LIMIT))
278 return false;
279 *kindp = AllocKind(next);
280 return true;
283 /* Get the number of fixed slots and initial capacity associated with a kind. */
284 static inline size_t
285 GetGCKindSlots(AllocKind thingKind)
287 /* Using a switch in hopes that thingKind will usually be a compile-time constant. */
288 switch (thingKind) {
289 case FINALIZE_OBJECT0:
290 case FINALIZE_OBJECT0_BACKGROUND:
291 return 0;
292 case FINALIZE_OBJECT2:
293 case FINALIZE_OBJECT2_BACKGROUND:
294 return 2;
295 case FINALIZE_OBJECT4:
296 case FINALIZE_OBJECT4_BACKGROUND:
297 return 4;
298 case FINALIZE_OBJECT8:
299 case FINALIZE_OBJECT8_BACKGROUND:
300 return 8;
301 case FINALIZE_OBJECT12:
302 case FINALIZE_OBJECT12_BACKGROUND:
303 return 12;
304 case FINALIZE_OBJECT16:
305 case FINALIZE_OBJECT16_BACKGROUND:
306 return 16;
307 default:
308 MOZ_ASSUME_UNREACHABLE("Bad object finalize kind");
312 static inline size_t
313 GetGCKindSlots(AllocKind thingKind, const Class *clasp)
315 size_t nslots = GetGCKindSlots(thingKind);
317 /* An object's private data uses the space taken by its last fixed slot. */
318 if (clasp->flags & JSCLASS_HAS_PRIVATE) {
319 JS_ASSERT(nslots > 0);
320 nslots--;
324 * Functions have a larger finalize kind than FINALIZE_OBJECT to reserve
325 * space for the extra fields in JSFunction, but have no fixed slots.
327 if (clasp == FunctionClassPtr)
328 nslots = 0;
330 return nslots;
333 // Class to assist in triggering background chunk allocation. This cannot be done
334 // while holding the GC or worker thread state lock due to lock ordering issues.
335 // As a result, the triggering is delayed using this class until neither of the
336 // above locks is held.
337 class AutoMaybeStartBackgroundAllocation;
340 * A single segment of a SortedArenaList. Each segment has a head and a tail,
341 * which track the start and end of a segment for O(1) append and concatenation.
343 struct SortedArenaListSegment
345 ArenaHeader *head;
346 ArenaHeader **tailp;
348 void clear() {
349 head = nullptr;
350 tailp = &head;
353 bool isEmpty() const {
354 return tailp == &head;
357 // Appends |aheader| to this segment.
358 void append(ArenaHeader *aheader) {
359 JS_ASSERT(aheader);
360 JS_ASSERT_IF(head, head->getAllocKind() == aheader->getAllocKind());
361 *tailp = aheader;
362 tailp = &aheader->next;
365 // Points the tail of this segment at |aheader|, which may be null. Note
366 // that this does not change the tail itself, but merely which arena
367 // follows it. This essentially turns the tail into a cursor (see also the
368 // description of ArenaList), but from the perspective of a SortedArenaList
369 // this makes no difference.
370 void linkTo(ArenaHeader *aheader) {
371 *tailp = aheader;
376 * Arena lists have a head and a cursor. The cursor conceptually lies on arena
377 * boundaries, i.e. before the first arena, between two arenas, or after the
378 * last arena.
380 * Normally the arena following the cursor is the first arena in the list with
381 * some free things and all arenas before the cursor are fully allocated. (And
382 * if the cursor is at the end of the list, then all the arenas are full.)
384 * However, the arena currently being allocated from is considered full while
385 * its list of free spans is moved into the freeList. Therefore, during GC or
386 * cell enumeration, when an unallocated freeList is moved back to the arena,
387 * we can see an arena with some free cells before the cursor.
389 * Arenas following the cursor should not be full.
391 class ArenaList {
392 // The cursor is implemented via an indirect pointer, |cursorp_|, to allow
393 // for efficient list insertion at the cursor point and other list
394 // manipulations.
396 // - If the list is empty: |head| is null, |cursorp_| points to |head|, and
397 // therefore |*cursorp_| is null.
399 // - If the list is not empty: |head| is non-null, and...
401 // - If the cursor is at the start of the list: |cursorp_| points to
402 // |head|, and therefore |*cursorp_| points to the first arena.
404 // - If cursor is at the end of the list: |cursorp_| points to the |next|
405 // field of the last arena, and therefore |*cursorp_| is null.
407 // - If the cursor is at neither the start nor the end of the list:
408 // |cursorp_| points to the |next| field of the arena preceding the
409 // cursor, and therefore |*cursorp_| points to the arena following the
410 // cursor.
412 // |cursorp_| is never null.
414 ArenaHeader *head_;
415 ArenaHeader **cursorp_;
417 void copy(const ArenaList &other) {
418 other.check();
419 head_ = other.head_;
420 cursorp_ = other.isCursorAtHead() ? &head_ : other.cursorp_;
421 check();
424 public:
425 ArenaList() {
426 clear();
429 ArenaList(const ArenaList &other) {
430 copy(other);
433 ArenaList &operator=(const ArenaList &other) {
434 copy(other);
435 return *this;
438 explicit ArenaList(const SortedArenaListSegment &segment) {
439 head_ = segment.head;
440 cursorp_ = segment.isEmpty() ? &head_ : segment.tailp;
441 check();
444 // This does checking just of |head_| and |cursorp_|.
445 void check() const {
446 #ifdef DEBUG
447 // If the list is empty, it must have this form.
448 JS_ASSERT_IF(!head_, cursorp_ == &head_);
450 // If there's an arena following the cursor, it must not be full.
451 ArenaHeader *cursor = *cursorp_;
452 JS_ASSERT_IF(cursor, cursor->hasFreeThings());
453 #endif
456 void clear() {
457 head_ = nullptr;
458 cursorp_ = &head_;
459 check();
462 bool isEmpty() const {
463 check();
464 return !head_;
467 // This returns nullptr if the list is empty.
468 ArenaHeader *head() const {
469 check();
470 return head_;
473 bool isCursorAtHead() const {
474 check();
475 return cursorp_ == &head_;
478 bool isCursorAtEnd() const {
479 check();
480 return !*cursorp_;
483 // This can return nullptr.
484 ArenaHeader *arenaAfterCursor() const {
485 check();
486 return *cursorp_;
489 // This moves the cursor past |aheader|. |aheader| must be an arena within
490 // this list.
491 void moveCursorPast(ArenaHeader *aheader) {
492 cursorp_ = &aheader->next;
493 check();
496 // This does two things.
497 // - Inserts |a| at the cursor.
498 // - Leaves the cursor sitting just before |a|, if |a| is not full, or just
499 // after |a|, if |a| is full.
501 void insertAtCursor(ArenaHeader *a) {
502 check();
503 a->next = *cursorp_;
504 *cursorp_ = a;
505 // At this point, the cursor is sitting before |a|. Move it after |a|
506 // if necessary.
507 if (!a->hasFreeThings())
508 cursorp_ = &a->next;
509 check();
512 // This inserts |other|, which must be full, at the cursor of |this|.
513 ArenaList &insertListWithCursorAtEnd(const ArenaList &other) {
514 check();
515 other.check();
516 JS_ASSERT(other.isCursorAtEnd());
517 if (other.isCursorAtHead())
518 return *this;
519 // Insert the full arenas of |other| after those of |this|.
520 *other.cursorp_ = *cursorp_;
521 *cursorp_ = other.head_;
522 cursorp_ = other.cursorp_;
523 check();
524 return *this;
527 #ifdef JSGC_COMPACTING
528 ArenaHeader *pickArenasToRelocate();
529 ArenaHeader *relocateArenas(ArenaHeader *toRelocate, ArenaHeader *relocated);
530 #endif
534 * A class that holds arenas in sorted order by appending arenas to specific
535 * segments. Each segment has a head and a tail, which can be linked up to
536 * other segments to create a contiguous ArenaList.
538 class SortedArenaList
540 public:
541 // The minimum size, in bytes, of a GC thing.
542 static const size_t MinThingSize = 16;
544 static_assert(ArenaSize <= 4096, "When increasing the Arena size, please consider how"\
545 " this will affect the size of a SortedArenaList.");
547 static_assert(MinThingSize >= 16, "When decreasing the minimum thing size, please consider"\
548 " how this will affect the size of a SortedArenaList.");
550 private:
551 // The maximum number of GC things that an arena can hold.
552 static const size_t MaxThingsPerArena = (ArenaSize - sizeof(ArenaHeader)) / MinThingSize;
554 size_t thingsPerArena_;
555 SortedArenaListSegment segments[MaxThingsPerArena + 1];
557 // Convenience functions to get the nth head and tail.
558 ArenaHeader *headAt(size_t n) { return segments[n].head; }
559 ArenaHeader **tailAt(size_t n) { return segments[n].tailp; }
561 public:
562 explicit SortedArenaList(size_t thingsPerArena = MaxThingsPerArena) {
563 reset(thingsPerArena);
566 void setThingsPerArena(size_t thingsPerArena) {
567 JS_ASSERT(thingsPerArena && thingsPerArena <= MaxThingsPerArena);
568 thingsPerArena_ = thingsPerArena;
571 // Resets the first |thingsPerArena| segments of this list for further use.
572 void reset(size_t thingsPerArena = MaxThingsPerArena) {
573 setThingsPerArena(thingsPerArena);
574 // Initialize the segments.
575 for (size_t i = 0; i <= thingsPerArena; ++i)
576 segments[i].clear();
579 // Inserts a header, which has room for |nfree| more things, in its segment.
580 void insertAt(ArenaHeader *aheader, size_t nfree) {
581 JS_ASSERT(nfree <= thingsPerArena_);
582 segments[nfree].append(aheader);
585 // Links up the tail of each non-empty segment to the head of the next
586 // non-empty segment, creating a contiguous list that is returned as an
587 // ArenaList. This is not a destructive operation: neither the head nor tail
588 // of any segment is modified. However, note that the ArenaHeaders in the
589 // resulting ArenaList should be treated as read-only unless the
590 // SortedArenaList is no longer needed: inserting or removing arenas would
591 // invalidate the SortedArenaList.
592 ArenaList toArenaList() {
593 // Link the non-empty segment tails up to the non-empty segment heads.
594 size_t tailIndex = 0;
595 for (size_t headIndex = 1; headIndex <= thingsPerArena_; ++headIndex) {
596 if (headAt(headIndex)) {
597 segments[tailIndex].linkTo(headAt(headIndex));
598 tailIndex = headIndex;
601 // Point the tail of the final non-empty segment at null. Note that if
602 // the list is empty, this will just set segments[0].head to null.
603 segments[tailIndex].linkTo(nullptr);
604 // Create an ArenaList with head and cursor set to the head and tail of
605 // the first segment (if that segment is empty, only the head is used).
606 return ArenaList(segments[0]);
610 class ArenaLists
613 * For each arena kind its free list is represented as the first span with
614 * free things. Initially all the spans are initialized as empty. After we
615 * find a new arena with available things we move its first free span into
616 * the list and set the arena as fully allocated. way we do not need to
617 * update the arena header after the initial allocation. When starting the
618 * GC we only move the head of the of the list of spans back to the arena
619 * only for the arena that was not fully allocated.
621 FreeList freeLists[FINALIZE_LIMIT];
623 ArenaList arenaLists[FINALIZE_LIMIT];
626 * The background finalization adds the finalized arenas to the list at
627 * the cursor position. backgroundFinalizeState controls the interaction
628 * between the GC lock and the access to the list from the allocation
629 * thread.
631 * BFS_DONE indicates that the finalizations is not running or cannot
632 * affect this arena list. The allocation thread can access the list
633 * outside the GC lock.
635 * In BFS_RUN and BFS_JUST_FINISHED the allocation thread must take the
636 * lock. The former indicates that the finalization still runs. The latter
637 * signals that finalization just added to the list finalized arenas. In
638 * that case the lock effectively serves as a read barrier to ensure that
639 * the allocation thread sees all the writes done during finalization.
641 enum BackgroundFinalizeStateEnum {
642 BFS_DONE,
643 BFS_RUN,
644 BFS_JUST_FINISHED
647 typedef mozilla::Atomic<BackgroundFinalizeStateEnum, mozilla::ReleaseAcquire>
648 BackgroundFinalizeState;
650 BackgroundFinalizeState backgroundFinalizeState[FINALIZE_LIMIT];
652 public:
653 /* For each arena kind, a list of arenas remaining to be swept. */
654 ArenaHeader *arenaListsToSweep[FINALIZE_LIMIT];
656 /* During incremental sweeping, a list of the arenas already swept. */
657 unsigned incrementalSweptArenaKind;
658 ArenaList incrementalSweptArenas;
660 /* Shape arenas to be swept in the foreground. */
661 ArenaHeader *gcShapeArenasToSweep;
663 public:
664 ArenaLists() {
665 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
666 freeLists[i].initAsEmpty();
667 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
668 backgroundFinalizeState[i] = BFS_DONE;
669 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
670 arenaListsToSweep[i] = nullptr;
671 incrementalSweptArenaKind = FINALIZE_LIMIT;
672 gcShapeArenasToSweep = nullptr;
675 ~ArenaLists() {
676 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
678 * We can only call this during the shutdown after the last GC when
679 * the background finalization is disabled.
681 JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE);
682 ArenaHeader *next;
683 for (ArenaHeader *aheader = arenaLists[i].head(); aheader; aheader = next) {
684 // Copy aheader->next before releasing.
685 next = aheader->next;
686 aheader->chunk()->releaseArena(aheader);
689 ArenaHeader *next;
690 for (ArenaHeader *aheader = incrementalSweptArenas.head(); aheader; aheader = next) {
691 // Copy aheader->next before releasing.
692 next = aheader->next;
693 aheader->chunk()->releaseArena(aheader);
697 static uintptr_t getFreeListOffset(AllocKind thingKind) {
698 uintptr_t offset = offsetof(ArenaLists, freeLists);
699 return offset + thingKind * sizeof(FreeList);
702 const FreeList *getFreeList(AllocKind thingKind) const {
703 return &freeLists[thingKind];
706 ArenaHeader *getFirstArena(AllocKind thingKind) const {
707 return arenaLists[thingKind].head();
710 ArenaHeader *getFirstArenaToSweep(AllocKind thingKind) const {
711 return arenaListsToSweep[thingKind];
714 ArenaHeader *getFirstSweptArena(AllocKind thingKind) const {
715 if (thingKind != incrementalSweptArenaKind)
716 return nullptr;
717 return incrementalSweptArenas.head();
720 ArenaHeader *getArenaAfterCursor(AllocKind thingKind) const {
721 return arenaLists[thingKind].arenaAfterCursor();
724 bool arenaListsAreEmpty() const {
725 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
727 * The arena cannot be empty if the background finalization is not yet
728 * done.
730 if (backgroundFinalizeState[i] != BFS_DONE)
731 return false;
732 if (!arenaLists[i].isEmpty())
733 return false;
735 return true;
738 void unmarkAll() {
739 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
740 /* The background finalization must have stopped at this point. */
741 JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE ||
742 backgroundFinalizeState[i] == BFS_JUST_FINISHED);
743 for (ArenaHeader *aheader = arenaLists[i].head(); aheader; aheader = aheader->next) {
744 uintptr_t *word = aheader->chunk()->bitmap.arenaBits(aheader);
745 memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t));
750 bool doneBackgroundFinalize(AllocKind kind) const {
751 return backgroundFinalizeState[kind] == BFS_DONE ||
752 backgroundFinalizeState[kind] == BFS_JUST_FINISHED;
755 bool needBackgroundFinalizeWait(AllocKind kind) const {
756 return backgroundFinalizeState[kind] != BFS_DONE;
760 * Return the free list back to the arena so the GC finalization will not
761 * run the finalizers over unitialized bytes from free things.
763 void purge() {
764 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
765 purge(AllocKind(i));
768 void purge(AllocKind i) {
769 FreeList *freeList = &freeLists[i];
770 if (!freeList->isEmpty()) {
771 ArenaHeader *aheader = freeList->arenaHeader();
772 aheader->setFirstFreeSpan(freeList->getHead());
773 freeList->initAsEmpty();
777 inline void prepareForIncrementalGC(JSRuntime *rt);
780 * Temporarily copy the free list heads to the arenas so the code can see
781 * the proper value in ArenaHeader::freeList when accessing the latter
782 * outside the GC.
784 void copyFreeListsToArenas() {
785 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
786 copyFreeListToArena(AllocKind(i));
789 void copyFreeListToArena(AllocKind thingKind) {
790 FreeList *freeList = &freeLists[thingKind];
791 if (!freeList->isEmpty()) {
792 ArenaHeader *aheader = freeList->arenaHeader();
793 JS_ASSERT(!aheader->hasFreeThings());
794 aheader->setFirstFreeSpan(freeList->getHead());
799 * Clear the free lists in arenas that were temporarily set there using
800 * copyToArenas.
802 void clearFreeListsInArenas() {
803 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
804 clearFreeListInArena(AllocKind(i));
807 void clearFreeListInArena(AllocKind kind) {
808 FreeList *freeList = &freeLists[kind];
809 if (!freeList->isEmpty()) {
810 ArenaHeader *aheader = freeList->arenaHeader();
811 JS_ASSERT(freeList->isSameNonEmptySpan(aheader->getFirstFreeSpan()));
812 aheader->setAsFullyUsed();
817 * Check that the free list is either empty or were synchronized with the
818 * arena using copyToArena().
820 bool isSynchronizedFreeList(AllocKind kind) {
821 FreeList *freeList = &freeLists[kind];
822 if (freeList->isEmpty())
823 return true;
824 ArenaHeader *aheader = freeList->arenaHeader();
825 if (aheader->hasFreeThings()) {
827 * If the arena has a free list, it must be the same as one in
828 * lists.
830 JS_ASSERT(freeList->isSameNonEmptySpan(aheader->getFirstFreeSpan()));
831 return true;
833 return false;
836 /* Check if |aheader|'s arena is in use. */
837 bool arenaIsInUse(ArenaHeader *aheader, AllocKind kind) const {
838 JS_ASSERT(aheader);
839 const FreeList &freeList = freeLists[kind];
840 if (freeList.isEmpty())
841 return false;
842 return aheader == freeList.arenaHeader();
845 MOZ_ALWAYS_INLINE void *allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
846 return freeLists[thingKind].allocate(thingSize);
849 template <AllowGC allowGC>
850 static void *refillFreeList(ThreadSafeContext *cx, AllocKind thingKind);
852 static void *refillFreeListInGC(Zone *zone, AllocKind thingKind);
855 * Moves all arenas from |fromArenaLists| into |this|. In
856 * parallel blocks, we temporarily create one ArenaLists per
857 * parallel thread. When the parallel block ends, we move
858 * whatever allocations may have been performed back into the
859 * compartment's main arena list using this function.
861 void adoptArenas(JSRuntime *runtime, ArenaLists *fromArenaLists);
863 /* True if the ArenaHeader in question is found in this ArenaLists */
864 bool containsArena(JSRuntime *runtime, ArenaHeader *arenaHeader);
866 void checkEmptyFreeLists() {
867 #ifdef DEBUG
868 for (size_t i = 0; i < mozilla::ArrayLength(freeLists); ++i)
869 JS_ASSERT(freeLists[i].isEmpty());
870 #endif
873 void checkEmptyFreeList(AllocKind kind) {
874 JS_ASSERT(freeLists[kind].isEmpty());
877 #ifdef JSGC_COMPACTING
878 ArenaHeader *relocateArenas(ArenaHeader *relocatedList);
879 #endif
881 void queueObjectsForSweep(FreeOp *fop);
882 void queueStringsAndSymbolsForSweep(FreeOp *fop);
883 void queueShapesForSweep(FreeOp *fop);
884 void queueScriptsForSweep(FreeOp *fop);
885 void queueJitCodeForSweep(FreeOp *fop);
887 bool foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget,
888 SortedArenaList &sweepList);
889 static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, bool onBackgroundThread);
891 void wipeDuringParallelExecution(JSRuntime *rt);
893 private:
894 inline void finalizeNow(FreeOp *fop, AllocKind thingKind);
895 inline void forceFinalizeNow(FreeOp *fop, AllocKind thingKind);
896 inline void queueForForegroundSweep(FreeOp *fop, AllocKind thingKind);
897 inline void queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind);
899 void *allocateFromArena(JS::Zone *zone, AllocKind thingKind);
900 inline void *allocateFromArenaInline(JS::Zone *zone, AllocKind thingKind,
901 AutoMaybeStartBackgroundAllocation &maybeStartBackgroundAllocation);
903 inline void normalizeBackgroundFinalizeState(AllocKind thingKind);
905 friend class js::Nursery;
906 friend class js::gc::ForkJoinNursery;
910 * Initial allocation size for data structures holding chunks is set to hold
911 * chunks with total capacity of 16MB to avoid buffer resizes during browser
912 * startup.
914 const size_t INITIAL_CHUNK_CAPACITY = 16 * 1024 * 1024 / ChunkSize;
916 /* The number of GC cycles an empty chunk can survive before been released. */
917 const size_t MAX_EMPTY_CHUNK_AGE = 4;
919 } /* namespace gc */
921 typedef enum JSGCRootType {
922 JS_GC_ROOT_VALUE_PTR,
923 JS_GC_ROOT_STRING_PTR,
924 JS_GC_ROOT_OBJECT_PTR,
925 JS_GC_ROOT_SCRIPT_PTR
926 } JSGCRootType;
928 struct RootInfo {
929 RootInfo() {}
930 RootInfo(const char *name, JSGCRootType type) : name(name), type(type) {}
931 const char *name;
932 JSGCRootType type;
935 typedef js::HashMap<void *,
936 RootInfo,
937 js::DefaultHasher<void *>,
938 js::SystemAllocPolicy> RootedValueMap;
940 extern bool
941 AddValueRoot(JSContext *cx, js::Value *vp, const char *name);
943 extern bool
944 AddValueRootRT(JSRuntime *rt, js::Value *vp, const char *name);
946 extern bool
947 AddStringRoot(JSContext *cx, JSString **rp, const char *name);
949 extern bool
950 AddObjectRoot(JSContext *cx, JSObject **rp, const char *name);
952 extern bool
953 AddObjectRoot(JSRuntime *rt, JSObject **rp, const char *name);
955 extern bool
956 AddScriptRoot(JSContext *cx, JSScript **rp, const char *name);
958 extern void
959 RemoveRoot(JSRuntime *rt, void *rp);
961 } /* namespace js */
963 extern bool
964 js_InitGC(JSRuntime *rt, uint32_t maxbytes);
966 extern void
967 js_FinishGC(JSRuntime *rt);
969 namespace js {
971 class InterpreterFrame;
973 extern void
974 MarkCompartmentActive(js::InterpreterFrame *fp);
976 extern void
977 TraceRuntime(JSTracer *trc);
979 extern void
980 ReleaseAllJITCode(FreeOp *op);
983 * Kinds of js_GC invocation.
985 typedef enum JSGCInvocationKind {
986 /* Normal invocation. */
987 GC_NORMAL = 0,
989 /* Minimize GC triggers and release empty GC chunks right away. */
990 GC_SHRINK = 1
991 } JSGCInvocationKind;
993 extern void
994 PrepareForDebugGC(JSRuntime *rt);
996 /* Functions for managing cross compartment gray pointers. */
998 extern void
999 DelayCrossCompartmentGrayMarking(JSObject *src);
1001 extern void
1002 NotifyGCNukeWrapper(JSObject *o);
1004 extern unsigned
1005 NotifyGCPreSwap(JSObject *a, JSObject *b);
1007 extern void
1008 NotifyGCPostSwap(JSObject *a, JSObject *b, unsigned preResult);
1011 * Helper state for use when JS helper threads sweep and allocate GC thing kinds
1012 * that can be swept and allocated off the main thread.
1014 * In non-threadsafe builds, all actual sweeping and allocation is performed
1015 * on the main thread, but GCHelperState encapsulates this from clients as
1016 * much as possible.
1018 class GCHelperState
1020 enum State {
1021 IDLE,
1022 SWEEPING,
1023 ALLOCATING,
1024 CANCEL_ALLOCATION
1027 // Associated runtime.
1028 JSRuntime *const rt;
1030 // Condvar for notifying the main thread when work has finished. This is
1031 // associated with the runtime's GC lock --- the worker thread state
1032 // condvars can't be used here due to lock ordering issues.
1033 PRCondVar *done;
1035 // Activity for the helper to do, protected by the GC lock.
1036 State state_;
1038 // Thread which work is being performed on, or null.
1039 PRThread *thread;
1041 void startBackgroundThread(State newState);
1042 void waitForBackgroundThread();
1044 State state();
1045 void setState(State state);
1047 bool sweepFlag;
1048 bool shrinkFlag;
1050 bool backgroundAllocation;
1052 friend class js::gc::ArenaLists;
1054 static void freeElementsAndArray(void **array, void **end) {
1055 JS_ASSERT(array <= end);
1056 for (void **p = array; p != end; ++p)
1057 js_free(*p);
1058 js_free(array);
1061 /* Must be called with the GC lock taken. */
1062 void doSweep();
1064 public:
1065 explicit GCHelperState(JSRuntime *rt)
1066 : rt(rt),
1067 done(nullptr),
1068 state_(IDLE),
1069 thread(nullptr),
1070 sweepFlag(false),
1071 shrinkFlag(false),
1072 backgroundAllocation(true)
1075 bool init();
1076 void finish();
1078 void work();
1080 /* Must be called with the GC lock taken. */
1081 void startBackgroundSweep(bool shouldShrink);
1083 /* Must be called with the GC lock taken. */
1084 void startBackgroundShrink();
1086 /* Must be called without the GC lock taken. */
1087 void waitBackgroundSweepEnd();
1089 /* Must be called without the GC lock taken. */
1090 void waitBackgroundSweepOrAllocEnd();
1092 /* Must be called with the GC lock taken. */
1093 void startBackgroundAllocationIfIdle();
1095 bool canBackgroundAllocate() const {
1096 return backgroundAllocation;
1099 void disableBackgroundAllocation() {
1100 backgroundAllocation = false;
1103 bool onBackgroundThread();
1106 * Outside the GC lock may give true answer when in fact the sweeping has
1107 * been done.
1109 bool isBackgroundSweeping() const {
1110 return state_ == SWEEPING;
1113 bool shouldShrink() const {
1114 JS_ASSERT(isBackgroundSweeping());
1115 return shrinkFlag;
1119 struct GCChunkHasher {
1120 typedef gc::Chunk *Lookup;
1123 * Strip zeros for better distribution after multiplying by the golden
1124 * ratio.
1126 static HashNumber hash(gc::Chunk *chunk) {
1127 JS_ASSERT(!(uintptr_t(chunk) & gc::ChunkMask));
1128 return HashNumber(uintptr_t(chunk) >> gc::ChunkShift);
1131 static bool match(gc::Chunk *k, gc::Chunk *l) {
1132 JS_ASSERT(!(uintptr_t(k) & gc::ChunkMask));
1133 JS_ASSERT(!(uintptr_t(l) & gc::ChunkMask));
1134 return k == l;
1138 typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet;
1140 struct GrayRoot {
1141 void *thing;
1142 JSGCTraceKind kind;
1143 #ifdef DEBUG
1144 JSTraceNamePrinter debugPrinter;
1145 const void *debugPrintArg;
1146 size_t debugPrintIndex;
1147 #endif
1149 GrayRoot(void *thing, JSGCTraceKind kind)
1150 : thing(thing), kind(kind) {}
1153 void
1154 MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end);
1156 typedef void (*IterateChunkCallback)(JSRuntime *rt, void *data, gc::Chunk *chunk);
1157 typedef void (*IterateZoneCallback)(JSRuntime *rt, void *data, JS::Zone *zone);
1158 typedef void (*IterateArenaCallback)(JSRuntime *rt, void *data, gc::Arena *arena,
1159 JSGCTraceKind traceKind, size_t thingSize);
1160 typedef void (*IterateCellCallback)(JSRuntime *rt, void *data, void *thing,
1161 JSGCTraceKind traceKind, size_t thingSize);
1164 * This function calls |zoneCallback| on every zone, |compartmentCallback| on
1165 * every compartment, |arenaCallback| on every in-use arena, and |cellCallback|
1166 * on every in-use cell in the GC heap.
1168 extern void
1169 IterateZonesCompartmentsArenasCells(JSRuntime *rt, void *data,
1170 IterateZoneCallback zoneCallback,
1171 JSIterateCompartmentCallback compartmentCallback,
1172 IterateArenaCallback arenaCallback,
1173 IterateCellCallback cellCallback);
1176 * This function is like IterateZonesCompartmentsArenasCells, but does it for a
1177 * single zone.
1179 extern void
1180 IterateZoneCompartmentsArenasCells(JSRuntime *rt, Zone *zone, void *data,
1181 IterateZoneCallback zoneCallback,
1182 JSIterateCompartmentCallback compartmentCallback,
1183 IterateArenaCallback arenaCallback,
1184 IterateCellCallback cellCallback);
1187 * Invoke chunkCallback on every in-use chunk.
1189 extern void
1190 IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback);
1192 typedef void (*IterateScriptCallback)(JSRuntime *rt, void *data, JSScript *script);
1195 * Invoke scriptCallback on every in-use script for
1196 * the given compartment or for all compartments if it is null.
1198 extern void
1199 IterateScripts(JSRuntime *rt, JSCompartment *compartment,
1200 void *data, IterateScriptCallback scriptCallback);
1202 } /* namespace js */
1204 extern void
1205 js_FinalizeStringRT(JSRuntime *rt, JSString *str);
1207 namespace js {
1209 JSCompartment *
1210 NewCompartment(JSContext *cx, JS::Zone *zone, JSPrincipals *principals,
1211 const JS::CompartmentOptions &options);
1213 namespace gc {
1216 * Merge all contents of source into target. This can only be used if source is
1217 * the only compartment in its zone.
1219 void
1220 MergeCompartments(JSCompartment *source, JSCompartment *target);
1222 #ifdef JSGC_COMPACTING
1224 /* Functions for checking and updating things that might be moved by compacting GC. */
1226 #ifdef JS_PUNBOX64
1227 const uintptr_t ForwardedCellMagicValue = 0xf1f1f1f1f1f1f1f1;
1228 #else
1229 const uintptr_t ForwardedCellMagicValue = 0xf1f1f1f1;
1230 #endif
1232 template <typename T>
1233 inline bool
1234 IsForwarded(T *t)
1236 static_assert(mozilla::IsBaseOf<Cell, T>::value, "T must be a subclass of Cell");
1237 uintptr_t *ptr = reinterpret_cast<uintptr_t *>(t);
1238 return ptr[1] == ForwardedCellMagicValue;
1241 inline bool
1242 IsForwarded(const JS::Value &value)
1244 if (value.isObject())
1245 return IsForwarded(&value.toObject());
1247 if (value.isString())
1248 return IsForwarded(value.toString());
1250 if (value.isSymbol())
1251 return IsForwarded(value.toSymbol());
1253 JS_ASSERT(!value.isGCThing());
1254 return false;
1257 template <typename T>
1258 inline T *
1259 Forwarded(T *t)
1261 JS_ASSERT(IsForwarded(t));
1262 uintptr_t *ptr = reinterpret_cast<uintptr_t *>(t);
1263 return reinterpret_cast<T *>(ptr[0]);
1266 inline Value
1267 Forwarded(const JS::Value &value)
1269 if (value.isObject())
1270 return ObjectValue(*Forwarded(&value.toObject()));
1271 else if (value.isString())
1272 return StringValue(Forwarded(value.toString()));
1273 else if (value.isSymbol())
1274 return SymbolValue(Forwarded(value.toSymbol()));
1276 JS_ASSERT(!value.isGCThing());
1277 return value;
1280 template <typename T>
1281 inline T
1282 MaybeForwarded(T t)
1284 return IsForwarded(t) ? Forwarded(t) : t;
1287 #else
1289 template <typename T> inline bool IsForwarded(T t) { return false; }
1290 template <typename T> inline T Forwarded(T t) { return t; }
1291 template <typename T> inline T MaybeForwarded(T t) { return t; }
1293 #endif // JSGC_COMPACTING
1295 #ifdef JSGC_HASH_TABLE_CHECKS
1297 template <typename T>
1298 inline void
1299 CheckGCThingAfterMovingGC(T *t)
1301 JS_ASSERT_IF(t, !IsInsideNursery(t));
1302 #ifdef JSGC_COMPACTING
1303 JS_ASSERT_IF(t, !IsForwarded(t));
1304 #endif
1307 inline void
1308 CheckValueAfterMovingGC(const JS::Value& value)
1310 if (value.isObject())
1311 return CheckGCThingAfterMovingGC(&value.toObject());
1312 else if (value.isString())
1313 return CheckGCThingAfterMovingGC(value.toString());
1314 else if (value.isSymbol())
1315 return CheckGCThingAfterMovingGC(value.toSymbol());
1318 #endif // JSGC_HASH_TABLE_CHECKS
1320 const int ZealPokeValue = 1;
1321 const int ZealAllocValue = 2;
1322 const int ZealFrameGCValue = 3;
1323 const int ZealVerifierPreValue = 4;
1324 const int ZealFrameVerifierPreValue = 5;
1325 const int ZealStackRootingValue = 6;
1326 const int ZealGenerationalGCValue = 7;
1327 const int ZealIncrementalRootsThenFinish = 8;
1328 const int ZealIncrementalMarkAllThenFinish = 9;
1329 const int ZealIncrementalMultipleSlices = 10;
1330 const int ZealVerifierPostValue = 11;
1331 const int ZealFrameVerifierPostValue = 12;
1332 const int ZealCheckHashTablesOnMinorGC = 13;
1333 const int ZealCompactValue = 14;
1334 const int ZealLimit = 14;
1336 enum VerifierType {
1337 PreBarrierVerifier,
1338 PostBarrierVerifier
1341 #ifdef JS_GC_ZEAL
1343 /* Check that write barriers have been used correctly. See jsgc.cpp. */
1344 void
1345 VerifyBarriers(JSRuntime *rt, VerifierType type);
1347 void
1348 MaybeVerifyBarriers(JSContext *cx, bool always = false);
1350 #else
1352 static inline void
1353 VerifyBarriers(JSRuntime *rt, VerifierType type)
1357 static inline void
1358 MaybeVerifyBarriers(JSContext *cx, bool always = false)
1362 #endif
1365 * Instances of this class set the |JSRuntime::suppressGC| flag for the duration
1366 * that they are live. Use of this class is highly discouraged. Please carefully
1367 * read the comment in jscntxt.h above |suppressGC| and take all appropriate
1368 * precautions before instantiating this class.
1370 class AutoSuppressGC
1372 int32_t &suppressGC_;
1374 public:
1375 explicit AutoSuppressGC(ExclusiveContext *cx);
1376 explicit AutoSuppressGC(JSCompartment *comp);
1377 explicit AutoSuppressGC(JSRuntime *rt);
1379 ~AutoSuppressGC()
1381 suppressGC_--;
1385 #ifdef DEBUG
1386 /* Disable OOM testing in sections which are not OOM safe. */
1387 class AutoEnterOOMUnsafeRegion
1389 uint32_t saved_;
1391 public:
1392 AutoEnterOOMUnsafeRegion() : saved_(OOM_maxAllocations) {
1393 OOM_maxAllocations = UINT32_MAX;
1395 ~AutoEnterOOMUnsafeRegion() {
1396 OOM_maxAllocations = saved_;
1399 #else
1400 class AutoEnterOOMUnsafeRegion {};
1401 #endif /* DEBUG */
1403 // This tests whether something is inside the GGC's nursery only;
1404 // use sparingly, mostly testing for any nursery, using IsInsideNursery,
1405 // is appropriate.
1406 bool
1407 IsInsideGGCNursery(const gc::Cell *cell);
1409 } /* namespace gc */
1411 #ifdef DEBUG
1412 /* Use this to avoid assertions when manipulating the wrapper map. */
1413 class AutoDisableProxyCheck
1415 MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER;
1416 gc::GCRuntime &gc;
1418 public:
1419 explicit AutoDisableProxyCheck(JSRuntime *rt
1420 MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
1421 ~AutoDisableProxyCheck();
1423 #else
1424 struct AutoDisableProxyCheck
1426 explicit AutoDisableProxyCheck(JSRuntime *rt) {}
1428 #endif
1430 struct AutoDisableCompactingGC
1432 #ifdef JSGC_COMPACTING
1433 explicit AutoDisableCompactingGC(JSRuntime *rt);
1434 ~AutoDisableCompactingGC();
1436 private:
1437 gc::GCRuntime &gc;
1438 #else
1439 explicit AutoDisableCompactingGC(JSRuntime *rt) {}
1440 ~AutoDisableCompactingGC() {}
1441 #endif
1444 void
1445 PurgeJITCaches(JS::Zone *zone);
1447 // This is the same as IsInsideNursery, but not inlined.
1448 bool
1449 UninlinedIsInsideNursery(const gc::Cell *cell);
1451 } /* namespace js */
1453 #endif /* jsgc_h */