1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
10 #include "mozilla/Atomics.h"
11 #include "mozilla/BitSet.h"
14 #include <type_traits>
16 #include "js/AllocPolicy.h"
17 #include "js/GCAnnotations.h"
18 #include "js/HashTable.h"
19 #include "js/shadow/String.h" // JS::shadow::String
20 #include "js/shadow/Symbol.h" // JS::shadow::Symbol
21 #include "js/shadow/Zone.h" // JS::shadow::Zone
22 #include "js/TraceKind.h"
23 #include "js/TypeDecls.h"
25 /* These values are private to the JS engine. */
28 class NurseryDecommitTask
;
30 JS_PUBLIC_API
bool CurrentThreadCanAccessZone(JS::Zone
* zone
);
32 // To prevent false sharing, some data structures are aligned to a typical cache
34 static constexpr size_t TypicalCacheLineSize
= 64;
44 const size_t ArenaShift
= 12;
45 const size_t ArenaSize
= size_t(1) << ArenaShift
;
46 const size_t ArenaMask
= ArenaSize
- 1;
48 #if defined(XP_MACOSX) && defined(__aarch64__)
49 const size_t PageShift
= 14;
51 const size_t PageShift
= 12;
53 // Expected page size, so we could initialze ArenasPerPage at compile-time.
54 // The actual system page size should be queried by SystemPageSize().
55 const size_t PageSize
= size_t(1) << PageShift
;
56 constexpr size_t ArenasPerPage
= PageSize
/ ArenaSize
;
58 const size_t ChunkShift
= 20;
59 const size_t ChunkSize
= size_t(1) << ChunkShift
;
60 const size_t ChunkMask
= ChunkSize
- 1;
62 const size_t CellAlignShift
= 3;
63 const size_t CellAlignBytes
= size_t(1) << CellAlignShift
;
64 const size_t CellAlignMask
= CellAlignBytes
- 1;
66 const size_t CellBytesPerMarkBit
= CellAlignBytes
;
67 const size_t MarkBitsPerCell
= 2;
70 * The minimum cell size ends up as twice the cell alignment because the mark
71 * bitmap contains one bit per CellBytesPerMarkBit bytes (which is equal to
72 * CellAlignBytes) and we need two mark bits per cell.
74 const size_t MinCellSize
= CellBytesPerMarkBit
* MarkBitsPerCell
;
77 * The mark bitmap has one bit per each possible cell start position. This
78 * wastes some space for larger GC things but allows us to avoid division by the
79 * cell's size when accessing the bitmap.
81 const size_t ArenaBitmapBits
= ArenaSize
/ CellBytesPerMarkBit
;
82 const size_t ArenaBitmapBytes
= HowMany(ArenaBitmapBits
, 8);
83 const size_t ArenaBitmapWords
= HowMany(ArenaBitmapBits
, JS_BITS_PER_WORD
);
85 // The base class for all GC chunks, either in the nursery or in the tenured
86 // heap memory. This structure is locatable from any GC pointer by aligning to
88 class alignas(CellAlignBytes
) ChunkBase
{
90 ChunkBase(JSRuntime
* rt
, StoreBuffer
* sb
) {
91 MOZ_ASSERT((uintptr_t(this) & ChunkMask
) == 0);
95 void initBase(JSRuntime
* rt
, StoreBuffer
* sb
) {
101 // The store buffer for pointers from tenured things to things in this
102 // chunk. Will be non-null if and only if this is a nursery chunk.
103 StoreBuffer
* storeBuffer
;
105 // Provide quick access to the runtime from absolutely anywhere.
109 // Information about tenured heap chunks.
110 struct TenuredChunkInfo
{
112 friend class ChunkPool
;
113 TenuredChunk
* next
= nullptr;
114 TenuredChunk
* prev
= nullptr;
117 /* Number of free arenas, either committed or decommitted. */
118 uint32_t numArenasFree
;
120 /* Number of free, committed arenas. */
121 uint32_t numArenasFreeCommitted
;
125 * Calculating ArenasPerChunk:
127 * To figure out how many Arenas will fit in a chunk we need to know how much
128 * extra space is available after we allocate the header data. This is a problem
129 * because the header size depends on the number of arenas in the chunk.
131 * The dependent fields are markBits, decommittedPages and
132 * freeCommittedArenas. markBits needs ArenaBitmapBytes bytes per arena,
133 * decommittedPages needs one bit per page and freeCommittedArenas needs one
136 * We can calculate an approximate value by dividing the number of bits of free
137 * space in the chunk by the number of bits needed per arena. This is an
138 * approximation because it doesn't take account of the fact that the variable
139 * sized fields must be rounded up to a whole number of words, or any padding
140 * the compiler adds between fields.
142 * Fortunately, for the chunk and arena size parameters we use this
143 * approximation turns out to be correct. If it were not we might need to adjust
144 * the arena count down by one to allow more space for the padding.
146 const size_t BitsPerPageWithHeaders
=
147 (ArenaSize
+ ArenaBitmapBytes
) * ArenasPerPage
* CHAR_BIT
+ ArenasPerPage
+
149 const size_t ChunkBitsAvailable
=
150 (ChunkSize
- sizeof(ChunkBase
) - sizeof(TenuredChunkInfo
)) * CHAR_BIT
;
151 const size_t PagesPerChunk
= ChunkBitsAvailable
/ BitsPerPageWithHeaders
;
152 const size_t ArenasPerChunk
= PagesPerChunk
* ArenasPerPage
;
153 const size_t FreeCommittedBits
= ArenasPerChunk
;
154 const size_t DecommitBits
= PagesPerChunk
;
155 const size_t BitsPerArenaWithHeaders
=
156 (ArenaSize
+ ArenaBitmapBytes
) * CHAR_BIT
+
157 (DecommitBits
/ ArenasPerChunk
) + 1;
159 const size_t CalculatedChunkSizeRequired
=
160 sizeof(ChunkBase
) + sizeof(TenuredChunkInfo
) +
161 RoundUp(ArenasPerChunk
* ArenaBitmapBytes
, sizeof(uintptr_t)) +
162 RoundUp(FreeCommittedBits
, sizeof(uint32_t) * CHAR_BIT
) / CHAR_BIT
+
163 RoundUp(DecommitBits
, sizeof(uint32_t) * CHAR_BIT
) / CHAR_BIT
+
164 ArenasPerChunk
* ArenaSize
;
165 static_assert(CalculatedChunkSizeRequired
<= ChunkSize
,
166 "Calculated ArenasPerChunk is too large");
168 const size_t CalculatedChunkPadSize
= ChunkSize
- CalculatedChunkSizeRequired
;
169 static_assert(CalculatedChunkPadSize
* CHAR_BIT
< BitsPerArenaWithHeaders
,
170 "Calculated ArenasPerChunk is too small");
172 static_assert(ArenasPerChunk
== 252,
173 "Do not accidentally change our heap's density.");
175 // Mark bitmaps are atomic because they can be written by gray unmarking on the
176 // main thread while read by sweeping on a background thread. The former does
177 // not affect the result of the latter.
178 using MarkBitmapWord
= mozilla::Atomic
<uintptr_t, mozilla::Relaxed
>;
181 * Live objects are marked black or gray. Everything reachable from a JS root is
182 * marked black. Objects marked gray are eligible for cycle collection.
184 * BlackBit: GrayOrBlackBit: Color:
190 enum class ColorBit
: uint32_t { BlackBit
= 0, GrayOrBlackBit
= 1 };
192 // Mark colors. Order is important here: the greater value the 'more marked' a
194 enum class MarkColor
: uint8_t { Gray
= 1, Black
= 2 };
196 // Mark bitmap for a tenured heap chunk.
197 struct alignas(TypicalCacheLineSize
) MarkBitmap
{
198 static constexpr size_t WordCount
= ArenaBitmapWords
* ArenasPerChunk
;
199 MarkBitmapWord bitmap
[WordCount
];
201 inline void getMarkWordAndMask(const TenuredCell
* cell
, ColorBit colorBit
,
202 MarkBitmapWord
** wordp
, uintptr_t* maskp
);
204 // The following are not exported and are defined in gc/Heap.h:
205 inline bool markBit(const TenuredCell
* cell
, ColorBit colorBit
);
206 inline bool isMarkedAny(const TenuredCell
* cell
);
207 inline bool isMarkedBlack(const TenuredCell
* cell
);
208 inline bool isMarkedGray(const TenuredCell
* cell
);
209 inline bool markIfUnmarked(const TenuredCell
* cell
, MarkColor color
);
210 inline bool markIfUnmarkedAtomic(const TenuredCell
* cell
, MarkColor color
);
211 inline void markBlack(const TenuredCell
* cell
);
212 inline void markBlackAtomic(const TenuredCell
* cell
);
213 inline void copyMarkBit(TenuredCell
* dst
, const TenuredCell
* src
,
215 inline void unmark(const TenuredCell
* cell
);
216 inline MarkBitmapWord
* arenaBits(Arena
* arena
);
219 static_assert(ArenaBitmapBytes
* ArenasPerChunk
== sizeof(MarkBitmap
),
220 "Ensure our MarkBitmap actually covers all arenas.");
222 // Bitmap with one bit per page used for decommitted page set.
223 using ChunkPageBitmap
= mozilla::BitSet
<PagesPerChunk
, uint32_t>;
225 // Bitmap with one bit per arena used for free committed arena set.
226 using ChunkArenaBitmap
= mozilla::BitSet
<ArenasPerChunk
, uint32_t>;
228 // Base class containing data members for a tenured heap chunk.
229 class TenuredChunkBase
: public ChunkBase
{
231 TenuredChunkInfo info
;
233 ChunkArenaBitmap freeCommittedArenas
;
234 ChunkPageBitmap decommittedPages
;
237 explicit TenuredChunkBase(JSRuntime
* runtime
) : ChunkBase(runtime
, nullptr) {
238 info
.numArenasFree
= ArenasPerChunk
;
241 void initAsDecommitted();
245 * We sometimes use an index to refer to a cell in an arena. The index for a
246 * cell is found by dividing by the cell alignment so not all indices refer to
249 const size_t ArenaCellIndexBytes
= CellAlignBytes
;
250 const size_t MaxArenaCellIndex
= ArenaSize
/ CellAlignBytes
;
252 const size_t MarkBitmapWordBits
= sizeof(MarkBitmapWord
) * CHAR_BIT
;
254 constexpr size_t FirstArenaAdjustmentBits
=
255 RoundUp(sizeof(gc::TenuredChunkBase
), ArenaSize
) / gc::CellBytesPerMarkBit
;
257 static_assert((FirstArenaAdjustmentBits
% MarkBitmapWordBits
) == 0);
258 constexpr size_t FirstArenaAdjustmentWords
=
259 FirstArenaAdjustmentBits
/ MarkBitmapWordBits
;
261 const size_t ChunkStoreBufferOffset
= offsetof(ChunkBase
, storeBuffer
);
262 const size_t ChunkMarkBitmapOffset
= offsetof(TenuredChunkBase
, markBits
);
264 // Hardcoded offsets into Arena class.
265 const size_t ArenaZoneOffset
= 2 * sizeof(uint32_t);
266 const size_t ArenaHeaderSize
= ArenaZoneOffset
+ 2 * sizeof(uintptr_t) +
267 sizeof(size_t) + sizeof(uintptr_t);
269 // The first word of a GC thing has certain requirements from the GC and is used
270 // to store flags in the low bits.
271 const size_t CellFlagBitsReservedForGC
= 3;
273 // The first word can be used to store JSClass pointers for some thing kinds, so
274 // these must be suitably aligned.
275 const size_t JSClassAlignBytes
= size_t(1) << CellFlagBitsReservedForGC
;
278 /* When downcasting, ensure we are actually the right type. */
279 extern JS_PUBLIC_API
void AssertGCThingHasType(js::gc::Cell
* cell
,
282 inline void AssertGCThingHasType(js::gc::Cell
* cell
, JS::TraceKind kind
) {}
285 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const js::gc::Cell
* cell
);
286 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const js::gc::TenuredCell
* cell
);
293 enum class HeapState
{
294 Idle
, // doing nothing with the GC heap
295 Tracing
, // tracing the GC heap without collecting, e.g.
296 // IterateCompartments()
297 MajorCollecting
, // doing a GC of the major heap
298 MinorCollecting
, // doing a GC of the minor heap (nursery)
299 CycleCollecting
// in the "Unlink" phase of cycle collection
302 JS_PUBLIC_API HeapState
RuntimeHeapState();
304 static inline bool RuntimeHeapIsBusy() {
305 return RuntimeHeapState() != HeapState::Idle
;
308 static inline bool RuntimeHeapIsTracing() {
309 return RuntimeHeapState() == HeapState::Tracing
;
312 static inline bool RuntimeHeapIsMajorCollecting() {
313 return RuntimeHeapState() == HeapState::MajorCollecting
;
316 static inline bool RuntimeHeapIsMinorCollecting() {
317 return RuntimeHeapState() == HeapState::MinorCollecting
;
320 static inline bool RuntimeHeapIsCollecting(HeapState state
) {
321 return state
== HeapState::MajorCollecting
||
322 state
== HeapState::MinorCollecting
;
325 static inline bool RuntimeHeapIsCollecting() {
326 return RuntimeHeapIsCollecting(RuntimeHeapState());
329 static inline bool RuntimeHeapIsCycleCollecting() {
330 return RuntimeHeapState() == HeapState::CycleCollecting
;
334 * This list enumerates the different types of conceptual stacks we have in
335 * SpiderMonkey. In reality, they all share the C stack, but we allow different
336 * stack limits depending on the type of code running.
339 StackForSystemCode
, // C++, such as the GC, running on behalf of the VM.
340 StackForTrustedScript
, // Script running with trusted principals.
341 StackForUntrustedScript
, // Script running with untrusted principals.
346 * Default maximum size for the generational nursery in bytes. This is the
347 * initial value. In the browser this configured by the
348 * javascript.options.mem.nursery.max_kb pref.
350 const uint32_t DefaultNurseryMaxBytes
= 16 * js::gc::ChunkSize
;
352 /* Default maximum heap size in bytes to pass to JS_NewContext(). */
353 const uint32_t DefaultHeapMaxBytes
= 32 * 1024 * 1024;
356 * A GC pointer, tagged with the trace kind.
358 * In general, a GC pointer should be stored with an exact type. This class
359 * is for use when that is not possible because a single pointer must point
360 * to several kinds of GC thing.
362 class JS_PUBLIC_API GCCellPtr
{
364 GCCellPtr() : GCCellPtr(nullptr) {}
366 // Construction from a void* and trace kind.
367 GCCellPtr(void* gcthing
, JS::TraceKind traceKind
)
368 : ptr(checkedCast(gcthing
, traceKind
)) {}
370 // Automatically construct a null GCCellPtr from nullptr.
371 MOZ_IMPLICIT
GCCellPtr(decltype(nullptr))
372 : ptr(checkedCast(nullptr, JS::TraceKind::Null
)) {}
374 // Construction from an explicit type.
375 template <typename T
>
376 explicit GCCellPtr(T
* p
)
377 : ptr(checkedCast(p
, JS::MapTypeToTraceKind
<T
>::kind
)) {}
378 explicit GCCellPtr(JSFunction
* p
)
379 : ptr(checkedCast(p
, JS::TraceKind::Object
)) {}
380 explicit GCCellPtr(JSScript
* p
)
381 : ptr(checkedCast(p
, JS::TraceKind::Script
)) {}
382 explicit GCCellPtr(const Value
& v
);
384 JS::TraceKind
kind() const {
385 uintptr_t kindBits
= ptr
& OutOfLineTraceKindMask
;
386 if (kindBits
!= OutOfLineTraceKindMask
) {
387 return JS::TraceKind(kindBits
);
389 return outOfLineKind();
392 // Allow GCCellPtr to be used in a boolean context.
393 explicit operator bool() const {
394 MOZ_ASSERT(bool(asCell()) == (kind() != JS::TraceKind::Null
));
398 // Simplify checks to the kind.
399 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
401 return kind() == JS::MapTypeToTraceKind
<T
>::kind
;
404 // Conversions to more specific types must match the kind. Access to
405 // further refined types is not allowed directly from a GCCellPtr.
406 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
408 MOZ_ASSERT(kind() == JS::MapTypeToTraceKind
<T
>::kind
);
409 // We can't use static_cast here, because the fact that JSObject
410 // inherits from js::gc::Cell is not part of the public API.
411 return *reinterpret_cast<T
*>(asCell());
414 // Return a pointer to the cell this |GCCellPtr| refers to, or |nullptr|.
415 // (It would be more symmetrical with |to| for this to return a |Cell&|, but
416 // the result can be |nullptr|, and null references are undefined behavior.)
417 js::gc::Cell
* asCell() const {
418 return reinterpret_cast<js::gc::Cell
*>(ptr
& ~OutOfLineTraceKindMask
);
421 // The CC's trace logger needs an identity that is XPIDL serializable.
422 uint64_t unsafeAsInteger() const {
423 return static_cast<uint64_t>(unsafeAsUIntPtr());
425 // Inline mark bitmap access requires direct pointer arithmetic.
426 uintptr_t unsafeAsUIntPtr() const {
427 MOZ_ASSERT(asCell());
428 MOZ_ASSERT(!js::gc::IsInsideNursery(asCell()));
429 return reinterpret_cast<uintptr_t>(asCell());
432 MOZ_ALWAYS_INLINE
bool mayBeOwnedByOtherRuntime() const {
433 if (!is
<JSString
>() && !is
<JS::Symbol
>()) {
436 if (is
<JSString
>()) {
437 return JS::shadow::String::isPermanentAtom(asCell());
439 MOZ_ASSERT(is
<JS::Symbol
>());
440 return JS::shadow::Symbol::isWellKnownSymbol(asCell());
444 static uintptr_t checkedCast(void* p
, JS::TraceKind traceKind
) {
445 auto* cell
= static_cast<js::gc::Cell
*>(p
);
446 MOZ_ASSERT((uintptr_t(p
) & OutOfLineTraceKindMask
) == 0);
447 AssertGCThingHasType(cell
, traceKind
);
448 // Store trace in the bottom bits of pointer for common kinds.
449 uintptr_t kindBits
= uintptr_t(traceKind
);
450 if (kindBits
>= OutOfLineTraceKindMask
) {
451 kindBits
= OutOfLineTraceKindMask
;
453 return uintptr_t(p
) | kindBits
;
456 JS::TraceKind
outOfLineKind() const;
461 // Unwraps the given GCCellPtr, calls the functor |f| with a template argument
462 // of the actual type of the pointer, and returns the result.
463 template <typename F
>
464 auto MapGCThingTyped(GCCellPtr thing
, F
&& f
) {
465 switch (thing
.kind()) {
466 #define JS_EXPAND_DEF(name, type, _, _1) \
467 case JS::TraceKind::name: \
468 return f(&thing.as<type>());
469 JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF
);
472 MOZ_CRASH("Invalid trace kind in MapGCThingTyped for GCCellPtr.");
476 // Unwraps the given GCCellPtr and calls the functor |f| with a template
477 // argument of the actual type of the pointer. Doesn't return anything.
478 template <typename F
>
479 void ApplyGCThingTyped(GCCellPtr thing
, F
&& f
) {
480 // This function doesn't do anything but is supplied for symmetry with other
481 // MapGCThingTyped/ApplyGCThingTyped implementations that have to wrap the
482 // functor to return a dummy value that is ignored.
483 MapGCThingTyped(thing
, f
);
488 // These are defined in the toplevel namespace instead of within JS so that
489 // they won't shadow other operator== overloads (see bug 1456512.)
491 inline bool operator==(JS::GCCellPtr ptr1
, JS::GCCellPtr ptr2
) {
492 return ptr1
.asCell() == ptr2
.asCell();
495 inline bool operator!=(JS::GCCellPtr ptr1
, JS::GCCellPtr ptr2
) {
496 return !(ptr1
== ptr2
);
503 MOZ_ALWAYS_INLINE
void MarkBitmap::getMarkWordAndMask(const TenuredCell
* cell
,
505 MarkBitmapWord
** wordp
,
507 // Note: the JIT pre-barrier trampolines inline this code. Update
508 // MacroAssembler::emitPreBarrierFastPath code too when making changes here!
510 MOZ_ASSERT(size_t(colorBit
) < MarkBitsPerCell
);
512 size_t offset
= uintptr_t(cell
) & ChunkMask
;
513 const size_t bit
= offset
/ CellBytesPerMarkBit
+ size_t(colorBit
);
514 size_t word
= bit
/ MarkBitmapWordBits
- FirstArenaAdjustmentWords
;
515 MOZ_ASSERT(word
< WordCount
);
516 *wordp
= &bitmap
[word
];
517 *maskp
= uintptr_t(1) << (bit
% MarkBitmapWordBits
);
522 static MOZ_ALWAYS_INLINE ChunkBase
* GetCellChunkBase(const Cell
* cell
) {
524 auto* chunk
= reinterpret_cast<ChunkBase
*>(uintptr_t(cell
) & ~ChunkMask
);
525 MOZ_ASSERT(chunk
->runtime
);
529 static MOZ_ALWAYS_INLINE TenuredChunkBase
* GetCellChunkBase(
530 const TenuredCell
* cell
) {
533 reinterpret_cast<TenuredChunkBase
*>(uintptr_t(cell
) & ~ChunkMask
);
534 MOZ_ASSERT(chunk
->runtime
);
538 static MOZ_ALWAYS_INLINE
JS::Zone
* GetTenuredGCThingZone(const void* ptr
) {
539 // This takes a void* because the compiler can't see type relationships in
540 // this header. |ptr| must be a pointer to a tenured GC thing.
542 const uintptr_t zone_addr
= (uintptr_t(ptr
) & ~ArenaMask
) | ArenaZoneOffset
;
543 return *reinterpret_cast<JS::Zone
**>(zone_addr
);
546 static MOZ_ALWAYS_INLINE
bool TenuredCellIsMarkedBlack(
547 const TenuredCell
* cell
) {
548 // Return true if BlackBit is set.
551 MOZ_ASSERT(!js::gc::IsInsideNursery(cell
));
553 MarkBitmapWord
* blackWord
;
555 TenuredChunkBase
* chunk
= GetCellChunkBase(cell
);
556 chunk
->markBits
.getMarkWordAndMask(cell
, js::gc::ColorBit::BlackBit
,
557 &blackWord
, &blackMask
);
558 return *blackWord
& blackMask
;
561 static MOZ_ALWAYS_INLINE
bool NonBlackCellIsMarkedGray(
562 const TenuredCell
* cell
) {
563 // Return true if GrayOrBlackBit is set. Callers should check BlackBit first.
566 MOZ_ASSERT(!js::gc::IsInsideNursery(cell
));
567 MOZ_ASSERT(!TenuredCellIsMarkedBlack(cell
));
569 MarkBitmapWord
* grayWord
;
571 TenuredChunkBase
* chunk
= GetCellChunkBase(cell
);
572 chunk
->markBits
.getMarkWordAndMask(cell
, js::gc::ColorBit::GrayOrBlackBit
,
573 &grayWord
, &grayMask
);
574 return *grayWord
& grayMask
;
577 static MOZ_ALWAYS_INLINE
bool TenuredCellIsMarkedGray(const TenuredCell
* cell
) {
578 return !TenuredCellIsMarkedBlack(cell
) && NonBlackCellIsMarkedGray(cell
);
581 static MOZ_ALWAYS_INLINE
bool CellIsMarkedGray(const Cell
* cell
) {
583 if (js::gc::IsInsideNursery(cell
)) {
586 return TenuredCellIsMarkedGray(reinterpret_cast<const TenuredCell
*>(cell
));
589 extern JS_PUBLIC_API
bool CanCheckGrayBits(const TenuredCell
* cell
);
591 extern JS_PUBLIC_API
bool CellIsMarkedGrayIfKnown(const TenuredCell
* cell
);
594 extern JS_PUBLIC_API
void AssertCellIsNotGray(const Cell
* cell
);
596 extern JS_PUBLIC_API
bool ObjectIsMarkedBlack(const JSObject
* obj
);
599 MOZ_ALWAYS_INLINE
bool CellHasStoreBuffer(const Cell
* cell
) {
600 return GetCellChunkBase(cell
)->storeBuffer
;
603 } /* namespace detail */
605 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const Cell
* cell
) {
607 return detail::CellHasStoreBuffer(cell
);
610 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const TenuredCell
* cell
) {
612 MOZ_ASSERT(!IsInsideNursery(reinterpret_cast<const Cell
*>(cell
)));
616 // Allow use before the compiler knows the derivation of JSObject, JSString, and
618 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const JSObject
* obj
) {
619 return IsInsideNursery(reinterpret_cast<const Cell
*>(obj
));
621 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const JSString
* str
) {
622 return IsInsideNursery(reinterpret_cast<const Cell
*>(str
));
624 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const JS::BigInt
* bi
) {
625 return IsInsideNursery(reinterpret_cast<const Cell
*>(bi
));
628 MOZ_ALWAYS_INLINE
bool IsCellPointerValid(const void* ptr
) {
629 auto addr
= uintptr_t(ptr
);
630 if (addr
< ChunkSize
|| addr
% CellAlignBytes
!= 0) {
634 auto* cell
= reinterpret_cast<const Cell
*>(ptr
);
635 if (!IsInsideNursery(cell
)) {
636 return detail::GetTenuredGCThingZone(cell
) != nullptr;
642 MOZ_ALWAYS_INLINE
bool IsCellPointerValidOrNull(const void* cell
) {
646 return IsCellPointerValid(cell
);
654 extern JS_PUBLIC_API Zone
* GetTenuredGCThingZone(GCCellPtr thing
);
656 extern JS_PUBLIC_API Zone
* GetNurseryCellZone(js::gc::Cell
* cell
);
658 static MOZ_ALWAYS_INLINE Zone
* GetGCThingZone(GCCellPtr thing
) {
659 if (!js::gc::IsInsideNursery(thing
.asCell())) {
660 return js::gc::detail::GetTenuredGCThingZone(thing
.asCell());
663 return GetNurseryCellZone(thing
.asCell());
666 static MOZ_ALWAYS_INLINE Zone
* GetStringZone(JSString
* str
) {
667 if (!js::gc::IsInsideNursery(str
)) {
668 return js::gc::detail::GetTenuredGCThingZone(str
);
671 return GetNurseryCellZone(reinterpret_cast<js::gc::Cell
*>(str
));
674 extern JS_PUBLIC_API Zone
* GetObjectZone(JSObject
* obj
);
676 static MOZ_ALWAYS_INLINE
bool GCThingIsMarkedGray(GCCellPtr thing
) {
677 js::gc::Cell
* cell
= thing
.asCell();
678 if (IsInsideNursery(cell
)) {
682 auto* tenuredCell
= reinterpret_cast<js::gc::TenuredCell
*>(cell
);
683 return js::gc::detail::CellIsMarkedGrayIfKnown(tenuredCell
);
686 // Specialised gray marking check for use by the cycle collector. This is not
687 // called during incremental GC or when the gray bits are invalid.
688 static MOZ_ALWAYS_INLINE
bool GCThingIsMarkedGrayInCC(GCCellPtr thing
) {
689 js::gc::Cell
* cell
= thing
.asCell();
690 if (IsInsideNursery(cell
)) {
694 auto* tenuredCell
= reinterpret_cast<js::gc::TenuredCell
*>(cell
);
695 if (!js::gc::detail::TenuredCellIsMarkedGray(tenuredCell
)) {
699 MOZ_ASSERT(js::gc::detail::CanCheckGrayBits(tenuredCell
));
704 extern JS_PUBLIC_API
JS::TraceKind
GCThingTraceKind(void* thing
);
706 extern JS_PUBLIC_API
void EnableNurseryStrings(JSContext
* cx
);
708 extern JS_PUBLIC_API
void DisableNurseryStrings(JSContext
* cx
);
710 extern JS_PUBLIC_API
void EnableNurseryBigInts(JSContext
* cx
);
712 extern JS_PUBLIC_API
void DisableNurseryBigInts(JSContext
* cx
);
715 * Returns true when writes to GC thing pointers (and reads from weak pointers)
716 * must call an incremental barrier. This is generally only true when running
717 * mutator code in-between GC slices. At other times, the barrier may be elided
720 extern JS_PUBLIC_API
bool IsIncrementalBarrierNeeded(JSContext
* cx
);
723 * Notify the GC that a reference to a JSObject is about to be overwritten.
724 * This method must be called if IsIncrementalBarrierNeeded.
726 extern JS_PUBLIC_API
void IncrementalPreWriteBarrier(JSObject
* obj
);
729 * Notify the GC that a reference to a tenured GC cell is about to be
730 * overwritten. This method must be called if IsIncrementalBarrierNeeded.
732 extern JS_PUBLIC_API
void IncrementalPreWriteBarrier(GCCellPtr thing
);
735 * Unsets the gray bit for anything reachable from |thing|. |kind| should not be
736 * JS::TraceKind::Shape. |thing| should be non-null. The return value indicates
737 * if anything was unmarked.
739 extern JS_PUBLIC_API
bool UnmarkGrayGCThingRecursively(GCCellPtr thing
);
746 extern JS_PUBLIC_API
void PerformIncrementalReadBarrier(JS::GCCellPtr thing
);
748 static MOZ_ALWAYS_INLINE
void ExposeGCThingToActiveJS(JS::GCCellPtr thing
) {
749 // TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
750 // called while we are tracing the heap, e.g. during memory reporting
751 // (see bug 1313318).
752 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
754 // GC things residing in the nursery cannot be gray: they have no mark bits.
755 // All live objects in the nursery are moved to tenured at the beginning of
756 // each GC slice, so the gray marker never sees nursery things.
757 if (IsInsideNursery(thing
.asCell())) {
761 auto* cell
= reinterpret_cast<TenuredCell
*>(thing
.asCell());
762 if (detail::TenuredCellIsMarkedBlack(cell
)) {
766 // GC things owned by other runtimes are always black.
767 MOZ_ASSERT(!thing
.mayBeOwnedByOtherRuntime());
769 auto* zone
= JS::shadow::Zone::from(detail::GetTenuredGCThingZone(cell
));
770 if (zone
->needsIncrementalBarrier()) {
771 PerformIncrementalReadBarrier(thing
);
772 } else if (!zone
->isGCPreparing() && detail::NonBlackCellIsMarkedGray(cell
)) {
773 MOZ_ALWAYS_TRUE(JS::UnmarkGrayGCThingRecursively(thing
));
776 MOZ_ASSERT_IF(!zone
->isGCPreparing(), !detail::TenuredCellIsMarkedGray(cell
));
779 static MOZ_ALWAYS_INLINE
void IncrementalReadBarrier(JS::GCCellPtr thing
) {
780 // This is a lighter version of ExposeGCThingToActiveJS that doesn't do gray
783 if (IsInsideNursery(thing
.asCell())) {
787 auto* cell
= reinterpret_cast<TenuredCell
*>(thing
.asCell());
788 auto* zone
= JS::shadow::Zone::from(detail::GetTenuredGCThingZone(cell
));
789 if (zone
->needsIncrementalBarrier() &&
790 !detail::TenuredCellIsMarkedBlack(cell
)) {
791 // GC things owned by other runtimes are always black.
792 MOZ_ASSERT(!thing
.mayBeOwnedByOtherRuntime());
793 PerformIncrementalReadBarrier(thing
);
797 template <typename T
>
798 extern JS_PUBLIC_API
bool EdgeNeedsSweepUnbarrieredSlow(T
* thingp
);
800 static MOZ_ALWAYS_INLINE
bool EdgeNeedsSweepUnbarriered(JSObject
** objp
) {
801 // This function does not handle updating nursery pointers. Raw JSObject
802 // pointers should be updated separately or replaced with
803 // JS::Heap<JSObject*> which handles this automatically.
804 MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
805 if (IsInsideNursery(*objp
)) {
809 auto zone
= JS::shadow::Zone::from(detail::GetTenuredGCThingZone(*objp
));
810 if (!zone
->isGCSweepingOrCompacting()) {
814 return EdgeNeedsSweepUnbarrieredSlow(objp
);
823 * This should be called when an object that is marked gray is exposed to the JS
824 * engine (by handing it to running JS code or writing it into live JS
825 * data). During incremental GC, since the gray bits haven't been computed yet,
826 * we conservatively mark the object black.
828 static MOZ_ALWAYS_INLINE
void ExposeObjectToActiveJS(JSObject
* obj
) {
830 MOZ_ASSERT(!js::gc::EdgeNeedsSweepUnbarrieredSlow(&obj
));
831 js::gc::ExposeGCThingToActiveJS(GCCellPtr(obj
));
836 #endif /* js_HeapAPI_h */