1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
10 #include "mozilla/Atomics.h"
11 #include "mozilla/BitSet.h"
14 #include <type_traits>
16 #include "js/AllocPolicy.h"
17 #include "js/GCAnnotations.h"
18 #include "js/HashTable.h"
19 #include "js/shadow/String.h" // JS::shadow::String
20 #include "js/shadow/Symbol.h" // JS::shadow::Symbol
21 #include "js/shadow/Zone.h" // JS::shadow::Zone
22 #include "js/TraceKind.h"
23 #include "js/TypeDecls.h"
25 /* These values are private to the JS engine. */
28 class NurseryDecommitTask
;
30 JS_PUBLIC_API
bool CurrentThreadCanAccessZone(JS::Zone
* zone
);
32 // To prevent false sharing, some data structures are aligned to a typical cache
34 static constexpr size_t TypicalCacheLineSize
= 64;
44 const size_t ArenaShift
= 12;
45 const size_t ArenaSize
= size_t(1) << ArenaShift
;
46 const size_t ArenaMask
= ArenaSize
- 1;
48 #if defined(XP_MACOSX) && defined(__aarch64__)
49 const size_t PageShift
= 14;
51 const size_t PageShift
= 12;
53 // Expected page size, so we could initialze ArenasPerPage at compile-time.
54 // The actual system page size should be queried by SystemPageSize().
55 const size_t PageSize
= size_t(1) << PageShift
;
56 constexpr size_t ArenasPerPage
= PageSize
/ ArenaSize
;
58 const size_t ChunkShift
= 20;
59 const size_t ChunkSize
= size_t(1) << ChunkShift
;
60 const size_t ChunkMask
= ChunkSize
- 1;
62 const size_t CellAlignShift
= 3;
63 const size_t CellAlignBytes
= size_t(1) << CellAlignShift
;
64 const size_t CellAlignMask
= CellAlignBytes
- 1;
66 const size_t CellBytesPerMarkBit
= CellAlignBytes
;
67 const size_t MarkBitsPerCell
= 2;
70 * The mark bitmap has one bit per each possible cell start position. This
71 * wastes some space for larger GC things but allows us to avoid division by the
72 * cell's size when accessing the bitmap.
74 const size_t ArenaBitmapBits
= ArenaSize
/ CellBytesPerMarkBit
;
75 const size_t ArenaBitmapBytes
= HowMany(ArenaBitmapBits
, 8);
76 const size_t ArenaBitmapWords
= HowMany(ArenaBitmapBits
, JS_BITS_PER_WORD
);
78 // The base class for all GC chunks, either in the nursery or in the tenured
79 // heap memory. This structure is locatable from any GC pointer by aligning to
81 class alignas(CellAlignBytes
) ChunkBase
{
83 ChunkBase(JSRuntime
* rt
, StoreBuffer
* sb
) {
84 MOZ_ASSERT((uintptr_t(this) & ChunkMask
) == 0);
88 void initBase(JSRuntime
* rt
, StoreBuffer
* sb
) {
94 // The store buffer for pointers from tenured things to things in this
95 // chunk. Will be non-null if and only if this is a nursery chunk.
96 StoreBuffer
* storeBuffer
;
98 // Provide quick access to the runtime from absolutely anywhere.
102 // Information about tenured heap chunks.
103 struct TenuredChunkInfo
{
105 friend class ChunkPool
;
106 TenuredChunk
* next
= nullptr;
107 TenuredChunk
* prev
= nullptr;
110 /* Number of free arenas, either committed or decommitted. */
111 uint32_t numArenasFree
;
113 /* Number of free, committed arenas. */
114 uint32_t numArenasFreeCommitted
;
118 * Calculating ArenasPerChunk:
120 * To figure out how many Arenas will fit in a chunk we need to know how much
121 * extra space is available after we allocate the header data. This is a problem
122 * because the header size depends on the number of arenas in the chunk.
124 * The dependent fields are markBits, decommittedPages and
125 * freeCommittedArenas. markBits needs ArenaBitmapBytes bytes per arena,
126 * decommittedPages needs one bit per page and freeCommittedArenas needs one
129 * We can calculate an approximate value by dividing the number of bits of free
130 * space in the chunk by the number of bits needed per arena. This is an
131 * approximation because it doesn't take account of the fact that the variable
132 * sized fields must be rounded up to a whole number of words, or any padding
133 * the compiler adds between fields.
135 * Fortunately, for the chunk and arena size parameters we use this
136 * approximation turns out to be correct. If it were not we might need to adjust
137 * the arena count down by one to allow more space for the padding.
139 const size_t BitsPerPageWithHeaders
=
140 (ArenaSize
+ ArenaBitmapBytes
) * ArenasPerPage
* CHAR_BIT
+ ArenasPerPage
+
142 const size_t ChunkBitsAvailable
=
143 (ChunkSize
- sizeof(ChunkBase
) - sizeof(TenuredChunkInfo
)) * CHAR_BIT
;
144 const size_t PagesPerChunk
= ChunkBitsAvailable
/ BitsPerPageWithHeaders
;
145 const size_t ArenasPerChunk
= PagesPerChunk
* ArenasPerPage
;
146 const size_t FreeCommittedBits
= ArenasPerChunk
;
147 const size_t DecommitBits
= PagesPerChunk
;
148 const size_t BitsPerArenaWithHeaders
=
149 (ArenaSize
+ ArenaBitmapBytes
) * CHAR_BIT
+
150 (DecommitBits
/ ArenasPerChunk
) + 1;
152 const size_t CalculatedChunkSizeRequired
=
153 sizeof(ChunkBase
) + sizeof(TenuredChunkInfo
) +
154 RoundUp(ArenasPerChunk
* ArenaBitmapBytes
, sizeof(uintptr_t)) +
155 RoundUp(FreeCommittedBits
, sizeof(uint32_t) * CHAR_BIT
) / CHAR_BIT
+
156 RoundUp(DecommitBits
, sizeof(uint32_t) * CHAR_BIT
) / CHAR_BIT
+
157 ArenasPerChunk
* ArenaSize
;
158 static_assert(CalculatedChunkSizeRequired
<= ChunkSize
,
159 "Calculated ArenasPerChunk is too large");
161 const size_t CalculatedChunkPadSize
= ChunkSize
- CalculatedChunkSizeRequired
;
162 static_assert(CalculatedChunkPadSize
* CHAR_BIT
< BitsPerArenaWithHeaders
,
163 "Calculated ArenasPerChunk is too small");
165 static_assert(ArenasPerChunk
== 252,
166 "Do not accidentally change our heap's density.");
168 // Mark bitmaps are atomic because they can be written by gray unmarking on the
169 // main thread while read by sweeping on a background thread. The former does
170 // not affect the result of the latter.
171 using MarkBitmapWord
= mozilla::Atomic
<uintptr_t, mozilla::Relaxed
>;
174 * Live objects are marked black or gray. Everything reachable from a JS root is
175 * marked black. Objects marked gray are eligible for cycle collection.
177 * BlackBit: GrayOrBlackBit: Color:
183 enum class ColorBit
: uint32_t { BlackBit
= 0, GrayOrBlackBit
= 1 };
185 // Mark colors. Order is important here: the greater value the 'more marked' a
187 enum class MarkColor
: uint8_t { Gray
= 1, Black
= 2 };
189 // Mark bitmap for a tenured heap chunk.
190 struct alignas(TypicalCacheLineSize
) MarkBitmap
{
191 static constexpr size_t WordCount
= ArenaBitmapWords
* ArenasPerChunk
;
192 MarkBitmapWord bitmap
[WordCount
];
194 inline void getMarkWordAndMask(const TenuredCell
* cell
, ColorBit colorBit
,
195 MarkBitmapWord
** wordp
, uintptr_t* maskp
);
197 // The following are not exported and are defined in gc/Heap.h:
198 inline bool markBit(const TenuredCell
* cell
, ColorBit colorBit
);
199 inline bool isMarkedAny(const TenuredCell
* cell
);
200 inline bool isMarkedBlack(const TenuredCell
* cell
);
201 inline bool isMarkedGray(const TenuredCell
* cell
);
202 inline bool markIfUnmarked(const TenuredCell
* cell
, MarkColor color
);
203 inline bool markIfUnmarkedAtomic(const TenuredCell
* cell
, MarkColor color
);
204 inline void markBlack(const TenuredCell
* cell
);
205 inline void markBlackAtomic(const TenuredCell
* cell
);
206 inline void copyMarkBit(TenuredCell
* dst
, const TenuredCell
* src
,
208 inline void unmark(const TenuredCell
* cell
);
209 inline MarkBitmapWord
* arenaBits(Arena
* arena
);
212 static_assert(ArenaBitmapBytes
* ArenasPerChunk
== sizeof(MarkBitmap
),
213 "Ensure our MarkBitmap actually covers all arenas.");
215 // Bitmap with one bit per page used for decommitted page set.
216 using ChunkPageBitmap
= mozilla::BitSet
<PagesPerChunk
, uint32_t>;
218 // Bitmap with one bit per arena used for free committed arena set.
219 using ChunkArenaBitmap
= mozilla::BitSet
<ArenasPerChunk
, uint32_t>;
221 // Base class containing data members for a tenured heap chunk.
222 class TenuredChunkBase
: public ChunkBase
{
224 TenuredChunkInfo info
;
226 ChunkArenaBitmap freeCommittedArenas
;
227 ChunkPageBitmap decommittedPages
;
230 explicit TenuredChunkBase(JSRuntime
* runtime
) : ChunkBase(runtime
, nullptr) {
231 info
.numArenasFree
= ArenasPerChunk
;
234 void initAsDecommitted();
238 * We sometimes use an index to refer to a cell in an arena. The index for a
239 * cell is found by dividing by the cell alignment so not all indices refer to
242 const size_t ArenaCellIndexBytes
= CellAlignBytes
;
243 const size_t MaxArenaCellIndex
= ArenaSize
/ CellAlignBytes
;
245 const size_t MarkBitmapWordBits
= sizeof(MarkBitmapWord
) * CHAR_BIT
;
247 constexpr size_t FirstArenaAdjustmentBits
=
248 RoundUp(sizeof(gc::TenuredChunkBase
), ArenaSize
) / gc::CellBytesPerMarkBit
;
250 static_assert((FirstArenaAdjustmentBits
% MarkBitmapWordBits
) == 0);
251 constexpr size_t FirstArenaAdjustmentWords
=
252 FirstArenaAdjustmentBits
/ MarkBitmapWordBits
;
254 const size_t ChunkStoreBufferOffset
= offsetof(ChunkBase
, storeBuffer
);
255 const size_t ChunkMarkBitmapOffset
= offsetof(TenuredChunkBase
, markBits
);
257 // Hardcoded offsets into Arena class.
258 const size_t ArenaZoneOffset
= 2 * sizeof(uint32_t);
259 const size_t ArenaHeaderSize
= ArenaZoneOffset
+ 2 * sizeof(uintptr_t) +
260 sizeof(size_t) + sizeof(uintptr_t);
262 // The first word of a GC thing has certain requirements from the GC and is used
263 // to store flags in the low bits.
264 const size_t CellFlagBitsReservedForGC
= 3;
266 // The first word can be used to store JSClass pointers for some thing kinds, so
267 // these must be suitably aligned.
268 const size_t JSClassAlignBytes
= size_t(1) << CellFlagBitsReservedForGC
;
271 /* When downcasting, ensure we are actually the right type. */
272 extern JS_PUBLIC_API
void AssertGCThingHasType(js::gc::Cell
* cell
,
275 inline void AssertGCThingHasType(js::gc::Cell
* cell
, JS::TraceKind kind
) {}
278 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const js::gc::Cell
* cell
);
279 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const js::gc::TenuredCell
* cell
);
286 enum class HeapState
{
287 Idle
, // doing nothing with the GC heap
288 Tracing
, // tracing the GC heap without collecting, e.g.
289 // IterateCompartments()
290 MajorCollecting
, // doing a GC of the major heap
291 MinorCollecting
, // doing a GC of the minor heap (nursery)
292 CycleCollecting
// in the "Unlink" phase of cycle collection
295 JS_PUBLIC_API HeapState
RuntimeHeapState();
297 static inline bool RuntimeHeapIsBusy() {
298 return RuntimeHeapState() != HeapState::Idle
;
301 static inline bool RuntimeHeapIsTracing() {
302 return RuntimeHeapState() == HeapState::Tracing
;
305 static inline bool RuntimeHeapIsMajorCollecting() {
306 return RuntimeHeapState() == HeapState::MajorCollecting
;
309 static inline bool RuntimeHeapIsMinorCollecting() {
310 return RuntimeHeapState() == HeapState::MinorCollecting
;
313 static inline bool RuntimeHeapIsCollecting(HeapState state
) {
314 return state
== HeapState::MajorCollecting
||
315 state
== HeapState::MinorCollecting
;
318 static inline bool RuntimeHeapIsCollecting() {
319 return RuntimeHeapIsCollecting(RuntimeHeapState());
322 static inline bool RuntimeHeapIsCycleCollecting() {
323 return RuntimeHeapState() == HeapState::CycleCollecting
;
327 * This list enumerates the different types of conceptual stacks we have in
328 * SpiderMonkey. In reality, they all share the C stack, but we allow different
329 * stack limits depending on the type of code running.
332 StackForSystemCode
, // C++, such as the GC, running on behalf of the VM.
333 StackForTrustedScript
, // Script running with trusted principals.
334 StackForUntrustedScript
, // Script running with untrusted principals.
339 * Default maximum size for the generational nursery in bytes. This is the
340 * initial value. In the browser this configured by the
341 * javascript.options.mem.nursery.max_kb pref.
343 const uint32_t DefaultNurseryMaxBytes
= 16 * js::gc::ChunkSize
;
345 /* Default maximum heap size in bytes to pass to JS_NewContext(). */
346 const uint32_t DefaultHeapMaxBytes
= 32 * 1024 * 1024;
349 * A GC pointer, tagged with the trace kind.
351 * In general, a GC pointer should be stored with an exact type. This class
352 * is for use when that is not possible because a single pointer must point
353 * to several kinds of GC thing.
355 class JS_PUBLIC_API GCCellPtr
{
357 GCCellPtr() : GCCellPtr(nullptr) {}
359 // Construction from a void* and trace kind.
360 GCCellPtr(void* gcthing
, JS::TraceKind traceKind
)
361 : ptr(checkedCast(gcthing
, traceKind
)) {}
363 // Automatically construct a null GCCellPtr from nullptr.
364 MOZ_IMPLICIT
GCCellPtr(decltype(nullptr))
365 : ptr(checkedCast(nullptr, JS::TraceKind::Null
)) {}
367 // Construction from an explicit type.
368 template <typename T
>
369 explicit GCCellPtr(T
* p
)
370 : ptr(checkedCast(p
, JS::MapTypeToTraceKind
<T
>::kind
)) {}
371 explicit GCCellPtr(JSFunction
* p
)
372 : ptr(checkedCast(p
, JS::TraceKind::Object
)) {}
373 explicit GCCellPtr(JSScript
* p
)
374 : ptr(checkedCast(p
, JS::TraceKind::Script
)) {}
375 explicit GCCellPtr(const Value
& v
);
377 JS::TraceKind
kind() const {
378 uintptr_t kindBits
= ptr
& OutOfLineTraceKindMask
;
379 if (kindBits
!= OutOfLineTraceKindMask
) {
380 return JS::TraceKind(kindBits
);
382 return outOfLineKind();
385 // Allow GCCellPtr to be used in a boolean context.
386 explicit operator bool() const {
387 MOZ_ASSERT(bool(asCell()) == (kind() != JS::TraceKind::Null
));
391 // Simplify checks to the kind.
392 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
394 return kind() == JS::MapTypeToTraceKind
<T
>::kind
;
397 // Conversions to more specific types must match the kind. Access to
398 // further refined types is not allowed directly from a GCCellPtr.
399 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
401 MOZ_ASSERT(kind() == JS::MapTypeToTraceKind
<T
>::kind
);
402 // We can't use static_cast here, because the fact that JSObject
403 // inherits from js::gc::Cell is not part of the public API.
404 return *reinterpret_cast<T
*>(asCell());
407 // Return a pointer to the cell this |GCCellPtr| refers to, or |nullptr|.
408 // (It would be more symmetrical with |to| for this to return a |Cell&|, but
409 // the result can be |nullptr|, and null references are undefined behavior.)
410 js::gc::Cell
* asCell() const {
411 return reinterpret_cast<js::gc::Cell
*>(ptr
& ~OutOfLineTraceKindMask
);
414 // The CC's trace logger needs an identity that is XPIDL serializable.
415 uint64_t unsafeAsInteger() const {
416 return static_cast<uint64_t>(unsafeAsUIntPtr());
418 // Inline mark bitmap access requires direct pointer arithmetic.
419 uintptr_t unsafeAsUIntPtr() const {
420 MOZ_ASSERT(asCell());
421 MOZ_ASSERT(!js::gc::IsInsideNursery(asCell()));
422 return reinterpret_cast<uintptr_t>(asCell());
425 MOZ_ALWAYS_INLINE
bool mayBeOwnedByOtherRuntime() const {
426 if (!is
<JSString
>() && !is
<JS::Symbol
>()) {
429 if (is
<JSString
>()) {
430 return JS::shadow::String::isPermanentAtom(asCell());
432 MOZ_ASSERT(is
<JS::Symbol
>());
433 return JS::shadow::Symbol::isWellKnownSymbol(asCell());
437 static uintptr_t checkedCast(void* p
, JS::TraceKind traceKind
) {
438 auto* cell
= static_cast<js::gc::Cell
*>(p
);
439 MOZ_ASSERT((uintptr_t(p
) & OutOfLineTraceKindMask
) == 0);
440 AssertGCThingHasType(cell
, traceKind
);
441 // Store trace in the bottom bits of pointer for common kinds.
442 uintptr_t kindBits
= uintptr_t(traceKind
);
443 if (kindBits
>= OutOfLineTraceKindMask
) {
444 kindBits
= OutOfLineTraceKindMask
;
446 return uintptr_t(p
) | kindBits
;
449 JS::TraceKind
outOfLineKind() const;
454 // Unwraps the given GCCellPtr, calls the functor |f| with a template argument
455 // of the actual type of the pointer, and returns the result.
456 template <typename F
>
457 auto MapGCThingTyped(GCCellPtr thing
, F
&& f
) {
458 switch (thing
.kind()) {
459 #define JS_EXPAND_DEF(name, type, _, _1) \
460 case JS::TraceKind::name: \
461 return f(&thing.as<type>());
462 JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF
);
465 MOZ_CRASH("Invalid trace kind in MapGCThingTyped for GCCellPtr.");
469 // Unwraps the given GCCellPtr and calls the functor |f| with a template
470 // argument of the actual type of the pointer. Doesn't return anything.
471 template <typename F
>
472 void ApplyGCThingTyped(GCCellPtr thing
, F
&& f
) {
473 // This function doesn't do anything but is supplied for symmetry with other
474 // MapGCThingTyped/ApplyGCThingTyped implementations that have to wrap the
475 // functor to return a dummy value that is ignored.
476 MapGCThingTyped(thing
, f
);
481 // These are defined in the toplevel namespace instead of within JS so that
482 // they won't shadow other operator== overloads (see bug 1456512.)
484 inline bool operator==(JS::GCCellPtr ptr1
, JS::GCCellPtr ptr2
) {
485 return ptr1
.asCell() == ptr2
.asCell();
488 inline bool operator!=(JS::GCCellPtr ptr1
, JS::GCCellPtr ptr2
) {
489 return !(ptr1
== ptr2
);
496 MOZ_ALWAYS_INLINE
void MarkBitmap::getMarkWordAndMask(const TenuredCell
* cell
,
498 MarkBitmapWord
** wordp
,
500 // Note: the JIT pre-barrier trampolines inline this code. Update
501 // MacroAssembler::emitPreBarrierFastPath code too when making changes here!
503 MOZ_ASSERT(size_t(colorBit
) < MarkBitsPerCell
);
505 size_t offset
= uintptr_t(cell
) & ChunkMask
;
506 const size_t bit
= offset
/ CellBytesPerMarkBit
+ size_t(colorBit
);
507 size_t word
= bit
/ MarkBitmapWordBits
- FirstArenaAdjustmentWords
;
508 MOZ_ASSERT(word
< WordCount
);
509 *wordp
= &bitmap
[word
];
510 *maskp
= uintptr_t(1) << (bit
% MarkBitmapWordBits
);
515 static MOZ_ALWAYS_INLINE ChunkBase
* GetCellChunkBase(const Cell
* cell
) {
517 auto* chunk
= reinterpret_cast<ChunkBase
*>(uintptr_t(cell
) & ~ChunkMask
);
518 MOZ_ASSERT(chunk
->runtime
);
522 static MOZ_ALWAYS_INLINE TenuredChunkBase
* GetCellChunkBase(
523 const TenuredCell
* cell
) {
526 reinterpret_cast<TenuredChunkBase
*>(uintptr_t(cell
) & ~ChunkMask
);
527 MOZ_ASSERT(chunk
->runtime
);
531 static MOZ_ALWAYS_INLINE
JS::Zone
* GetTenuredGCThingZone(const uintptr_t addr
) {
533 const uintptr_t zone_addr
= (addr
& ~ArenaMask
) | ArenaZoneOffset
;
534 return *reinterpret_cast<JS::Zone
**>(zone_addr
);
537 static MOZ_ALWAYS_INLINE
bool TenuredCellIsMarkedBlack(
538 const TenuredCell
* cell
) {
539 // Return true if BlackBit is set.
542 MOZ_ASSERT(!js::gc::IsInsideNursery(cell
));
544 MarkBitmapWord
* blackWord
;
546 TenuredChunkBase
* chunk
= GetCellChunkBase(cell
);
547 chunk
->markBits
.getMarkWordAndMask(cell
, js::gc::ColorBit::BlackBit
,
548 &blackWord
, &blackMask
);
549 return *blackWord
& blackMask
;
552 static MOZ_ALWAYS_INLINE
bool NonBlackCellIsMarkedGray(
553 const TenuredCell
* cell
) {
554 // Return true if GrayOrBlackBit is set. Callers should check BlackBit first.
557 MOZ_ASSERT(!js::gc::IsInsideNursery(cell
));
558 MOZ_ASSERT(!TenuredCellIsMarkedBlack(cell
));
560 MarkBitmapWord
* grayWord
;
562 TenuredChunkBase
* chunk
= GetCellChunkBase(cell
);
563 chunk
->markBits
.getMarkWordAndMask(cell
, js::gc::ColorBit::GrayOrBlackBit
,
564 &grayWord
, &grayMask
);
565 return *grayWord
& grayMask
;
568 static MOZ_ALWAYS_INLINE
bool TenuredCellIsMarkedGray(const TenuredCell
* cell
) {
569 return !TenuredCellIsMarkedBlack(cell
) && NonBlackCellIsMarkedGray(cell
);
572 static MOZ_ALWAYS_INLINE
bool CellIsMarkedGray(const Cell
* cell
) {
574 if (js::gc::IsInsideNursery(cell
)) {
577 return TenuredCellIsMarkedGray(reinterpret_cast<const TenuredCell
*>(cell
));
580 extern JS_PUBLIC_API
bool CanCheckGrayBits(const TenuredCell
* cell
);
582 extern JS_PUBLIC_API
bool CellIsMarkedGrayIfKnown(const TenuredCell
* cell
);
585 extern JS_PUBLIC_API
void AssertCellIsNotGray(const Cell
* cell
);
587 extern JS_PUBLIC_API
bool ObjectIsMarkedBlack(const JSObject
* obj
);
590 MOZ_ALWAYS_INLINE
bool CellHasStoreBuffer(const Cell
* cell
) {
591 return GetCellChunkBase(cell
)->storeBuffer
;
594 } /* namespace detail */
596 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const Cell
* cell
) {
598 return detail::CellHasStoreBuffer(cell
);
601 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const TenuredCell
* cell
) {
603 MOZ_ASSERT(!IsInsideNursery(reinterpret_cast<const Cell
*>(cell
)));
607 // Allow use before the compiler knows the derivation of JSObject, JSString, and
609 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const JSObject
* obj
) {
610 return IsInsideNursery(reinterpret_cast<const Cell
*>(obj
));
612 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const JSString
* str
) {
613 return IsInsideNursery(reinterpret_cast<const Cell
*>(str
));
615 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const JS::BigInt
* bi
) {
616 return IsInsideNursery(reinterpret_cast<const Cell
*>(bi
));
619 MOZ_ALWAYS_INLINE
bool IsCellPointerValid(const void* ptr
) {
620 auto addr
= uintptr_t(ptr
);
621 if (addr
< ChunkSize
|| addr
% CellAlignBytes
!= 0) {
625 auto* cell
= reinterpret_cast<const Cell
*>(ptr
);
626 if (!IsInsideNursery(cell
)) {
627 return detail::GetTenuredGCThingZone(addr
) != nullptr;
633 MOZ_ALWAYS_INLINE
bool IsCellPointerValidOrNull(const void* cell
) {
637 return IsCellPointerValid(cell
);
645 static MOZ_ALWAYS_INLINE Zone
* GetTenuredGCThingZone(GCCellPtr thing
) {
646 MOZ_ASSERT(!js::gc::IsInsideNursery(thing
.asCell()));
647 return js::gc::detail::GetTenuredGCThingZone(thing
.unsafeAsUIntPtr());
650 extern JS_PUBLIC_API Zone
* GetNurseryCellZone(js::gc::Cell
* cell
);
652 static MOZ_ALWAYS_INLINE Zone
* GetGCThingZone(GCCellPtr thing
) {
653 if (!js::gc::IsInsideNursery(thing
.asCell())) {
654 return js::gc::detail::GetTenuredGCThingZone(thing
.unsafeAsUIntPtr());
657 return GetNurseryCellZone(thing
.asCell());
660 static MOZ_ALWAYS_INLINE Zone
* GetStringZone(JSString
* str
) {
661 if (!js::gc::IsInsideNursery(str
)) {
662 return js::gc::detail::GetTenuredGCThingZone(
663 reinterpret_cast<uintptr_t>(str
));
665 return GetNurseryCellZone(reinterpret_cast<js::gc::Cell
*>(str
));
668 extern JS_PUBLIC_API Zone
* GetObjectZone(JSObject
* obj
);
670 static MOZ_ALWAYS_INLINE
bool GCThingIsMarkedGray(GCCellPtr thing
) {
671 js::gc::Cell
* cell
= thing
.asCell();
672 if (IsInsideNursery(cell
)) {
676 auto* tenuredCell
= reinterpret_cast<js::gc::TenuredCell
*>(cell
);
677 return js::gc::detail::CellIsMarkedGrayIfKnown(tenuredCell
);
680 // Specialised gray marking check for use by the cycle collector. This is not
681 // called during incremental GC or when the gray bits are invalid.
682 static MOZ_ALWAYS_INLINE
bool GCThingIsMarkedGrayInCC(GCCellPtr thing
) {
683 js::gc::Cell
* cell
= thing
.asCell();
684 if (IsInsideNursery(cell
)) {
688 auto* tenuredCell
= reinterpret_cast<js::gc::TenuredCell
*>(cell
);
689 if (!js::gc::detail::TenuredCellIsMarkedGray(tenuredCell
)) {
693 MOZ_ASSERT(js::gc::detail::CanCheckGrayBits(tenuredCell
));
698 extern JS_PUBLIC_API
JS::TraceKind
GCThingTraceKind(void* thing
);
700 extern JS_PUBLIC_API
void EnableNurseryStrings(JSContext
* cx
);
702 extern JS_PUBLIC_API
void DisableNurseryStrings(JSContext
* cx
);
704 extern JS_PUBLIC_API
void EnableNurseryBigInts(JSContext
* cx
);
706 extern JS_PUBLIC_API
void DisableNurseryBigInts(JSContext
* cx
);
709 * Returns true when writes to GC thing pointers (and reads from weak pointers)
710 * must call an incremental barrier. This is generally only true when running
711 * mutator code in-between GC slices. At other times, the barrier may be elided
714 extern JS_PUBLIC_API
bool IsIncrementalBarrierNeeded(JSContext
* cx
);
717 * Notify the GC that a reference to a JSObject is about to be overwritten.
718 * This method must be called if IsIncrementalBarrierNeeded.
720 extern JS_PUBLIC_API
void IncrementalPreWriteBarrier(JSObject
* obj
);
723 * Notify the GC that a reference to a tenured GC cell is about to be
724 * overwritten. This method must be called if IsIncrementalBarrierNeeded.
726 extern JS_PUBLIC_API
void IncrementalPreWriteBarrier(GCCellPtr thing
);
729 * Unsets the gray bit for anything reachable from |thing|. |kind| should not be
730 * JS::TraceKind::Shape. |thing| should be non-null. The return value indicates
731 * if anything was unmarked.
733 extern JS_PUBLIC_API
bool UnmarkGrayGCThingRecursively(GCCellPtr thing
);
740 extern JS_PUBLIC_API
void PerformIncrementalReadBarrier(JS::GCCellPtr thing
);
742 static MOZ_ALWAYS_INLINE
void ExposeGCThingToActiveJS(JS::GCCellPtr thing
) {
743 // TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
744 // called while we are tracing the heap, e.g. during memory reporting
745 // (see bug 1313318).
746 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
748 // GC things residing in the nursery cannot be gray: they have no mark bits.
749 // All live objects in the nursery are moved to tenured at the beginning of
750 // each GC slice, so the gray marker never sees nursery things.
751 if (IsInsideNursery(thing
.asCell())) {
755 auto* cell
= reinterpret_cast<TenuredCell
*>(thing
.asCell());
756 if (detail::TenuredCellIsMarkedBlack(cell
)) {
760 // GC things owned by other runtimes are always black.
761 MOZ_ASSERT(!thing
.mayBeOwnedByOtherRuntime());
763 auto* zone
= JS::shadow::Zone::from(JS::GetTenuredGCThingZone(thing
));
764 if (zone
->needsIncrementalBarrier()) {
765 PerformIncrementalReadBarrier(thing
);
766 } else if (!zone
->isGCPreparing() && detail::NonBlackCellIsMarkedGray(cell
)) {
767 MOZ_ALWAYS_TRUE(JS::UnmarkGrayGCThingRecursively(thing
));
770 MOZ_ASSERT_IF(!zone
->isGCPreparing(), !detail::TenuredCellIsMarkedGray(cell
));
773 static MOZ_ALWAYS_INLINE
void IncrementalReadBarrier(JS::GCCellPtr thing
) {
774 // This is a lighter version of ExposeGCThingToActiveJS that doesn't do gray
777 if (IsInsideNursery(thing
.asCell())) {
781 auto* zone
= JS::shadow::Zone::from(JS::GetTenuredGCThingZone(thing
));
782 auto* cell
= reinterpret_cast<TenuredCell
*>(thing
.asCell());
783 if (zone
->needsIncrementalBarrier() &&
784 !detail::TenuredCellIsMarkedBlack(cell
)) {
785 // GC things owned by other runtimes are always black.
786 MOZ_ASSERT(!thing
.mayBeOwnedByOtherRuntime());
787 PerformIncrementalReadBarrier(thing
);
791 template <typename T
>
792 extern JS_PUBLIC_API
bool EdgeNeedsSweepUnbarrieredSlow(T
* thingp
);
794 static MOZ_ALWAYS_INLINE
bool EdgeNeedsSweepUnbarriered(JSObject
** objp
) {
795 // This function does not handle updating nursery pointers. Raw JSObject
796 // pointers should be updated separately or replaced with
797 // JS::Heap<JSObject*> which handles this automatically.
798 MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
799 if (IsInsideNursery(*objp
)) {
804 JS::shadow::Zone::from(detail::GetTenuredGCThingZone(uintptr_t(*objp
)));
805 if (!zone
->isGCSweepingOrCompacting()) {
809 return EdgeNeedsSweepUnbarrieredSlow(objp
);
818 * This should be called when an object that is marked gray is exposed to the JS
819 * engine (by handing it to running JS code or writing it into live JS
820 * data). During incremental GC, since the gray bits haven't been computed yet,
821 * we conservatively mark the object black.
823 static MOZ_ALWAYS_INLINE
void ExposeObjectToActiveJS(JSObject
* obj
) {
825 MOZ_ASSERT(!js::gc::EdgeNeedsSweepUnbarrieredSlow(&obj
));
826 js::gc::ExposeGCThingToActiveJS(GCCellPtr(obj
));
831 #endif /* js_HeapAPI_h */