Bug 1801336 - Don't show hover run message for mv2 without browser action r=willdurand
[gecko.git] / js / public / HeapAPI.h
blob33d8b0943a51185c613e63c54b1b6ae4a4b86f71
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef js_HeapAPI_h
8 #define js_HeapAPI_h
10 #include "mozilla/Atomics.h"
11 #include "mozilla/BitSet.h"
13 #include <limits.h>
14 #include <type_traits>
16 #include "js/AllocPolicy.h"
17 #include "js/GCAnnotations.h"
18 #include "js/HashTable.h"
19 #include "js/shadow/String.h" // JS::shadow::String
20 #include "js/shadow/Symbol.h" // JS::shadow::Symbol
21 #include "js/shadow/Zone.h" // JS::shadow::Zone
22 #include "js/TraceKind.h"
23 #include "js/TypeDecls.h"
25 /* These values are private to the JS engine. */
26 namespace js {
28 class NurseryDecommitTask;
30 JS_PUBLIC_API bool CurrentThreadCanAccessZone(JS::Zone* zone);
32 namespace gc {
34 class Arena;
35 struct Cell;
36 class TenuredChunk;
37 class StoreBuffer;
38 class TenuredCell;
40 const size_t ArenaShift = 12;
41 const size_t ArenaSize = size_t(1) << ArenaShift;
42 const size_t ArenaMask = ArenaSize - 1;
44 #if defined(XP_MACOSX) && defined(__aarch64__)
45 const size_t PageShift = 14;
46 #else
47 const size_t PageShift = 12;
48 #endif
49 // Expected page size, so we could initialze ArenasPerPage at compile-time.
50 // The actual system page size should be queried by SystemPageSize().
51 const size_t PageSize = size_t(1) << PageShift;
52 constexpr size_t ArenasPerPage = PageSize / ArenaSize;
54 #ifdef JS_GC_SMALL_CHUNK_SIZE
55 const size_t ChunkShift = 18;
56 #else
57 const size_t ChunkShift = 20;
58 #endif
59 const size_t ChunkSize = size_t(1) << ChunkShift;
60 const size_t ChunkMask = ChunkSize - 1;
62 const size_t CellAlignShift = 3;
63 const size_t CellAlignBytes = size_t(1) << CellAlignShift;
64 const size_t CellAlignMask = CellAlignBytes - 1;
66 const size_t CellBytesPerMarkBit = CellAlignBytes;
67 const size_t MarkBitsPerCell = 2;
70 * The mark bitmap has one bit per each possible cell start position. This
71 * wastes some space for larger GC things but allows us to avoid division by the
72 * cell's size when accessing the bitmap.
74 const size_t ArenaBitmapBits = ArenaSize / CellBytesPerMarkBit;
75 const size_t ArenaBitmapBytes = HowMany(ArenaBitmapBits, 8);
76 const size_t ArenaBitmapWords = HowMany(ArenaBitmapBits, JS_BITS_PER_WORD);
78 // The base class for all GC chunks, either in the nursery or in the tenured
79 // heap memory. This structure is locatable from any GC pointer by aligning to
80 // the chunk size.
81 class alignas(CellAlignBytes) ChunkBase {
82 protected:
83 ChunkBase(JSRuntime* rt, StoreBuffer* sb) {
84 MOZ_ASSERT((uintptr_t(this) & ChunkMask) == 0);
85 initBase(rt, sb);
88 void initBase(JSRuntime* rt, StoreBuffer* sb) {
89 runtime = rt;
90 storeBuffer = sb;
93 public:
94 // The store buffer for pointers from tenured things to things in this
95 // chunk. Will be non-null if and only if this is a nursery chunk.
96 StoreBuffer* storeBuffer;
98 // Provide quick access to the runtime from absolutely anywhere.
99 JSRuntime* runtime;
102 // Information about tenured heap chunks.
103 struct TenuredChunkInfo {
104 private:
105 friend class ChunkPool;
106 TenuredChunk* next = nullptr;
107 TenuredChunk* prev = nullptr;
109 public:
110 /* Number of free arenas, either committed or decommitted. */
111 uint32_t numArenasFree;
113 /* Number of free, committed arenas. */
114 uint32_t numArenasFreeCommitted;
118 * Calculating ArenasPerChunk:
120 * To figure out how many Arenas will fit in a chunk we need to know how much
121 * extra space is available after we allocate the header data. This is a problem
122 * because the header size depends on the number of arenas in the chunk.
124 * The dependent fields are markBits, decommittedPages and
125 * freeCommittedArenas. markBits needs ArenaBitmapBytes bytes per arena,
126 * decommittedPages needs one bit per page and freeCommittedArenas needs one
127 * bit per arena.
129 * We can calculate an approximate value by dividing the number of bits of free
130 * space in the chunk by the number of bits needed per arena. This is an
131 * approximation because it doesn't take account of the fact that the variable
132 * sized fields must be rounded up to a whole number of words, or any padding
133 * the compiler adds between fields.
135 * Fortunately, for the chunk and arena size parameters we use this
136 * approximation turns out to be correct. If it were not we might need to adjust
137 * the arena count down by one to allow more space for the padding.
139 const size_t BitsPerPageWithHeaders =
140 (ArenaSize + ArenaBitmapBytes) * ArenasPerPage * CHAR_BIT + ArenasPerPage +
142 const size_t ChunkBitsAvailable =
143 (ChunkSize - sizeof(ChunkBase) - sizeof(TenuredChunkInfo)) * CHAR_BIT;
144 const size_t PagesPerChunk = ChunkBitsAvailable / BitsPerPageWithHeaders;
145 const size_t ArenasPerChunk = PagesPerChunk * ArenasPerPage;
146 const size_t FreeCommittedBits = ArenasPerChunk;
147 const size_t DecommitBits = PagesPerChunk;
148 const size_t BitsPerArenaWithHeaders =
149 (ArenaSize + ArenaBitmapBytes) * CHAR_BIT +
150 (DecommitBits / ArenasPerChunk) + 1;
152 const size_t CalculatedChunkSizeRequired =
153 sizeof(ChunkBase) + sizeof(TenuredChunkInfo) +
154 RoundUp(ArenasPerChunk * ArenaBitmapBytes, sizeof(uintptr_t)) +
155 RoundUp(FreeCommittedBits, sizeof(uint32_t) * CHAR_BIT) / CHAR_BIT +
156 RoundUp(DecommitBits, sizeof(uint32_t) * CHAR_BIT) / CHAR_BIT +
157 ArenasPerChunk * ArenaSize;
158 static_assert(CalculatedChunkSizeRequired <= ChunkSize,
159 "Calculated ArenasPerChunk is too large");
161 const size_t CalculatedChunkPadSize = ChunkSize - CalculatedChunkSizeRequired;
162 static_assert(CalculatedChunkPadSize * CHAR_BIT < BitsPerArenaWithHeaders,
163 "Calculated ArenasPerChunk is too small");
165 // Define a macro for the expected number of arenas so its value appears in the
166 // error message if the assertion fails.
167 #ifdef JS_GC_SMALL_CHUNK_SIZE
168 # define EXPECTED_ARENA_COUNT 63
169 #else
170 # define EXPECTED_ARENA_COUNT 252
171 #endif
172 static_assert(ArenasPerChunk == EXPECTED_ARENA_COUNT,
173 "Do not accidentally change our heap's density.");
174 #undef EXPECTED_ARENA_COUNT
176 // Mark bitmaps are atomic because they can be written by gray unmarking on the
177 // main thread while read by sweeping on a background thread. The former does
178 // not affect the result of the latter.
179 using MarkBitmapWord = mozilla::Atomic<uintptr_t, mozilla::Relaxed>;
182 * Live objects are marked black or gray. Everything reachable from a JS root is
183 * marked black. Objects marked gray are eligible for cycle collection.
185 * BlackBit: GrayOrBlackBit: Color:
186 * 0 0 white
187 * 0 1 gray
188 * 1 0 black
189 * 1 1 black
191 enum class ColorBit : uint32_t { BlackBit = 0, GrayOrBlackBit = 1 };
193 // Mark colors. Order is important here: the greater value the 'more marked' a
194 // cell is.
195 enum class MarkColor : uint8_t { Gray = 1, Black = 2 };
197 // Mark bitmap for a tenured heap chunk.
198 struct MarkBitmap {
199 static constexpr size_t WordCount = ArenaBitmapWords * ArenasPerChunk;
200 MarkBitmapWord bitmap[WordCount];
202 inline void getMarkWordAndMask(const TenuredCell* cell, ColorBit colorBit,
203 MarkBitmapWord** wordp, uintptr_t* maskp);
205 // The following are not exported and are defined in gc/Heap.h:
206 inline bool markBit(const TenuredCell* cell, ColorBit colorBit);
207 inline bool isMarkedAny(const TenuredCell* cell);
208 inline bool isMarkedBlack(const TenuredCell* cell);
209 inline bool isMarkedGray(const TenuredCell* cell);
210 inline bool markIfUnmarked(const TenuredCell* cell, MarkColor color);
211 inline void markBlack(const TenuredCell* cell);
212 inline void copyMarkBit(TenuredCell* dst, const TenuredCell* src,
213 ColorBit colorBit);
214 inline void unmark(const TenuredCell* cell);
215 inline MarkBitmapWord* arenaBits(Arena* arena);
218 static_assert(ArenaBitmapBytes * ArenasPerChunk == sizeof(MarkBitmap),
219 "Ensure our MarkBitmap actually covers all arenas.");
221 // Bitmap with one bit per page used for decommitted page set.
222 using ChunkPageBitmap = mozilla::BitSet<PagesPerChunk, uint32_t>;
224 // Bitmap with one bit per arena used for free committed arena set.
225 using ChunkArenaBitmap = mozilla::BitSet<ArenasPerChunk, uint32_t>;
227 // Base class containing data members for a tenured heap chunk.
228 class TenuredChunkBase : public ChunkBase {
229 public:
230 TenuredChunkInfo info;
231 MarkBitmap markBits;
232 ChunkArenaBitmap freeCommittedArenas;
233 ChunkPageBitmap decommittedPages;
235 protected:
236 explicit TenuredChunkBase(JSRuntime* runtime) : ChunkBase(runtime, nullptr) {
237 info.numArenasFree = ArenasPerChunk;
240 void initAsDecommitted();
244 * We sometimes use an index to refer to a cell in an arena. The index for a
245 * cell is found by dividing by the cell alignment so not all indices refer to
246 * valid cells.
248 const size_t ArenaCellIndexBytes = CellAlignBytes;
249 const size_t MaxArenaCellIndex = ArenaSize / CellAlignBytes;
251 const size_t MarkBitmapWordBits = sizeof(MarkBitmapWord) * CHAR_BIT;
253 constexpr size_t FirstArenaAdjustmentBits =
254 RoundUp(sizeof(gc::TenuredChunkBase), ArenaSize) / gc::CellBytesPerMarkBit;
256 static_assert((FirstArenaAdjustmentBits % MarkBitmapWordBits) == 0);
257 constexpr size_t FirstArenaAdjustmentWords =
258 FirstArenaAdjustmentBits / MarkBitmapWordBits;
260 const size_t ChunkStoreBufferOffset = offsetof(ChunkBase, storeBuffer);
261 const size_t ChunkMarkBitmapOffset = offsetof(TenuredChunkBase, markBits);
263 // Hardcoded offsets into Arena class.
264 const size_t ArenaZoneOffset = 2 * sizeof(uint32_t);
265 const size_t ArenaHeaderSize = ArenaZoneOffset + 2 * sizeof(uintptr_t) +
266 sizeof(size_t) + sizeof(uintptr_t);
268 // The first word of a GC thing has certain requirements from the GC and is used
269 // to store flags in the low bits.
270 const size_t CellFlagBitsReservedForGC = 3;
272 // The first word can be used to store JSClass pointers for some thing kinds, so
273 // these must be suitably aligned.
274 const size_t JSClassAlignBytes = size_t(1) << CellFlagBitsReservedForGC;
276 #ifdef JS_DEBUG
277 /* When downcasting, ensure we are actually the right type. */
278 extern JS_PUBLIC_API void AssertGCThingHasType(js::gc::Cell* cell,
279 JS::TraceKind kind);
280 #else
281 inline void AssertGCThingHasType(js::gc::Cell* cell, JS::TraceKind kind) {}
282 #endif
284 MOZ_ALWAYS_INLINE bool IsInsideNursery(const js::gc::Cell* cell);
285 MOZ_ALWAYS_INLINE bool IsInsideNursery(const js::gc::TenuredCell* cell);
287 } /* namespace gc */
288 } /* namespace js */
290 namespace JS {
292 enum class HeapState {
293 Idle, // doing nothing with the GC heap
294 Tracing, // tracing the GC heap without collecting, e.g.
295 // IterateCompartments()
296 MajorCollecting, // doing a GC of the major heap
297 MinorCollecting, // doing a GC of the minor heap (nursery)
298 CycleCollecting // in the "Unlink" phase of cycle collection
301 JS_PUBLIC_API HeapState RuntimeHeapState();
303 static inline bool RuntimeHeapIsBusy() {
304 return RuntimeHeapState() != HeapState::Idle;
307 static inline bool RuntimeHeapIsTracing() {
308 return RuntimeHeapState() == HeapState::Tracing;
311 static inline bool RuntimeHeapIsMajorCollecting() {
312 return RuntimeHeapState() == HeapState::MajorCollecting;
315 static inline bool RuntimeHeapIsMinorCollecting() {
316 return RuntimeHeapState() == HeapState::MinorCollecting;
319 static inline bool RuntimeHeapIsCollecting(HeapState state) {
320 return state == HeapState::MajorCollecting ||
321 state == HeapState::MinorCollecting;
324 static inline bool RuntimeHeapIsCollecting() {
325 return RuntimeHeapIsCollecting(RuntimeHeapState());
328 static inline bool RuntimeHeapIsCycleCollecting() {
329 return RuntimeHeapState() == HeapState::CycleCollecting;
333 * This list enumerates the different types of conceptual stacks we have in
334 * SpiderMonkey. In reality, they all share the C stack, but we allow different
335 * stack limits depending on the type of code running.
337 enum StackKind {
338 StackForSystemCode, // C++, such as the GC, running on behalf of the VM.
339 StackForTrustedScript, // Script running with trusted principals.
340 StackForUntrustedScript, // Script running with untrusted principals.
341 StackKindCount
345 * Default maximum size for the generational nursery in bytes. This is the
346 * initial value. In the browser this configured by the
347 * javascript.options.mem.nursery.max_kb pref.
349 const uint32_t DefaultNurseryMaxBytes = 16 * js::gc::ChunkSize;
351 /* Default maximum heap size in bytes to pass to JS_NewContext(). */
352 const uint32_t DefaultHeapMaxBytes = 32 * 1024 * 1024;
355 * A GC pointer, tagged with the trace kind.
357 * In general, a GC pointer should be stored with an exact type. This class
358 * is for use when that is not possible because a single pointer must point
359 * to several kinds of GC thing.
361 class JS_PUBLIC_API GCCellPtr {
362 public:
363 GCCellPtr() : GCCellPtr(nullptr) {}
365 // Construction from a void* and trace kind.
366 GCCellPtr(void* gcthing, JS::TraceKind traceKind)
367 : ptr(checkedCast(gcthing, traceKind)) {}
369 // Automatically construct a null GCCellPtr from nullptr.
370 MOZ_IMPLICIT GCCellPtr(decltype(nullptr))
371 : ptr(checkedCast(nullptr, JS::TraceKind::Null)) {}
373 // Construction from an explicit type.
374 template <typename T>
375 explicit GCCellPtr(T* p)
376 : ptr(checkedCast(p, JS::MapTypeToTraceKind<T>::kind)) {}
377 explicit GCCellPtr(JSFunction* p)
378 : ptr(checkedCast(p, JS::TraceKind::Object)) {}
379 explicit GCCellPtr(JSScript* p)
380 : ptr(checkedCast(p, JS::TraceKind::Script)) {}
381 explicit GCCellPtr(const Value& v);
383 JS::TraceKind kind() const {
384 uintptr_t kindBits = ptr & OutOfLineTraceKindMask;
385 if (kindBits != OutOfLineTraceKindMask) {
386 return JS::TraceKind(kindBits);
388 return outOfLineKind();
391 // Allow GCCellPtr to be used in a boolean context.
392 explicit operator bool() const {
393 MOZ_ASSERT(bool(asCell()) == (kind() != JS::TraceKind::Null));
394 return asCell();
397 // Simplify checks to the kind.
398 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
399 bool is() const {
400 return kind() == JS::MapTypeToTraceKind<T>::kind;
403 // Conversions to more specific types must match the kind. Access to
404 // further refined types is not allowed directly from a GCCellPtr.
405 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
406 T& as() const {
407 MOZ_ASSERT(kind() == JS::MapTypeToTraceKind<T>::kind);
408 // We can't use static_cast here, because the fact that JSObject
409 // inherits from js::gc::Cell is not part of the public API.
410 return *reinterpret_cast<T*>(asCell());
413 // Return a pointer to the cell this |GCCellPtr| refers to, or |nullptr|.
414 // (It would be more symmetrical with |to| for this to return a |Cell&|, but
415 // the result can be |nullptr|, and null references are undefined behavior.)
416 js::gc::Cell* asCell() const {
417 return reinterpret_cast<js::gc::Cell*>(ptr & ~OutOfLineTraceKindMask);
420 // The CC's trace logger needs an identity that is XPIDL serializable.
421 uint64_t unsafeAsInteger() const {
422 return static_cast<uint64_t>(unsafeAsUIntPtr());
424 // Inline mark bitmap access requires direct pointer arithmetic.
425 uintptr_t unsafeAsUIntPtr() const {
426 MOZ_ASSERT(asCell());
427 MOZ_ASSERT(!js::gc::IsInsideNursery(asCell()));
428 return reinterpret_cast<uintptr_t>(asCell());
431 MOZ_ALWAYS_INLINE bool mayBeOwnedByOtherRuntime() const {
432 if (!is<JSString>() && !is<JS::Symbol>()) {
433 return false;
435 if (is<JSString>()) {
436 return JS::shadow::String::isPermanentAtom(asCell());
438 MOZ_ASSERT(is<JS::Symbol>());
439 return JS::shadow::Symbol::isWellKnownSymbol(asCell());
442 private:
443 static uintptr_t checkedCast(void* p, JS::TraceKind traceKind) {
444 auto* cell = static_cast<js::gc::Cell*>(p);
445 MOZ_ASSERT((uintptr_t(p) & OutOfLineTraceKindMask) == 0);
446 AssertGCThingHasType(cell, traceKind);
447 // Store trace in the bottom bits of pointer for common kinds.
448 uintptr_t kindBits = uintptr_t(traceKind);
449 if (kindBits >= OutOfLineTraceKindMask) {
450 kindBits = OutOfLineTraceKindMask;
452 return uintptr_t(p) | kindBits;
455 JS::TraceKind outOfLineKind() const;
457 uintptr_t ptr;
458 } JS_HAZ_GC_POINTER;
460 // Unwraps the given GCCellPtr, calls the functor |f| with a template argument
461 // of the actual type of the pointer, and returns the result.
462 template <typename F>
463 auto MapGCThingTyped(GCCellPtr thing, F&& f) {
464 switch (thing.kind()) {
465 #define JS_EXPAND_DEF(name, type, _, _1) \
466 case JS::TraceKind::name: \
467 return f(&thing.as<type>());
468 JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF);
469 #undef JS_EXPAND_DEF
470 default:
471 MOZ_CRASH("Invalid trace kind in MapGCThingTyped for GCCellPtr.");
475 // Unwraps the given GCCellPtr and calls the functor |f| with a template
476 // argument of the actual type of the pointer. Doesn't return anything.
477 template <typename F>
478 void ApplyGCThingTyped(GCCellPtr thing, F&& f) {
479 // This function doesn't do anything but is supplied for symmetry with other
480 // MapGCThingTyped/ApplyGCThingTyped implementations that have to wrap the
481 // functor to return a dummy value that is ignored.
482 MapGCThingTyped(thing, f);
485 } /* namespace JS */
487 // These are defined in the toplevel namespace instead of within JS so that
488 // they won't shadow other operator== overloads (see bug 1456512.)
490 inline bool operator==(JS::GCCellPtr ptr1, JS::GCCellPtr ptr2) {
491 return ptr1.asCell() == ptr2.asCell();
494 inline bool operator!=(JS::GCCellPtr ptr1, JS::GCCellPtr ptr2) {
495 return !(ptr1 == ptr2);
498 namespace js {
499 namespace gc {
501 /* static */
502 MOZ_ALWAYS_INLINE void MarkBitmap::getMarkWordAndMask(const TenuredCell* cell,
503 ColorBit colorBit,
504 MarkBitmapWord** wordp,
505 uintptr_t* maskp) {
506 // Note: the JIT pre-barrier trampolines inline this code. Update
507 // MacroAssembler::emitPreBarrierFastPath code too when making changes here!
509 MOZ_ASSERT(size_t(colorBit) < MarkBitsPerCell);
511 size_t offset = uintptr_t(cell) & ChunkMask;
512 const size_t bit = offset / CellBytesPerMarkBit + size_t(colorBit);
513 size_t word = bit / MarkBitmapWordBits - FirstArenaAdjustmentWords;
514 MOZ_ASSERT(word < WordCount);
515 *wordp = &bitmap[word];
516 *maskp = uintptr_t(1) << (bit % MarkBitmapWordBits);
519 namespace detail {
521 static MOZ_ALWAYS_INLINE ChunkBase* GetCellChunkBase(const Cell* cell) {
522 MOZ_ASSERT(cell);
523 return reinterpret_cast<ChunkBase*>(uintptr_t(cell) & ~ChunkMask);
526 static MOZ_ALWAYS_INLINE TenuredChunkBase* GetCellChunkBase(
527 const TenuredCell* cell) {
528 MOZ_ASSERT(cell);
529 return reinterpret_cast<TenuredChunkBase*>(uintptr_t(cell) & ~ChunkMask);
532 static MOZ_ALWAYS_INLINE JS::Zone* GetTenuredGCThingZone(const uintptr_t addr) {
533 MOZ_ASSERT(addr);
534 const uintptr_t zone_addr = (addr & ~ArenaMask) | ArenaZoneOffset;
535 return *reinterpret_cast<JS::Zone**>(zone_addr);
538 static MOZ_ALWAYS_INLINE bool TenuredCellIsMarkedBlack(
539 const TenuredCell* cell) {
540 // Return true if BlackBit is set.
542 MOZ_ASSERT(cell);
543 MOZ_ASSERT(!js::gc::IsInsideNursery(cell));
545 MarkBitmapWord* blackWord;
546 uintptr_t blackMask;
547 TenuredChunkBase* chunk = GetCellChunkBase(cell);
548 chunk->markBits.getMarkWordAndMask(cell, js::gc::ColorBit::BlackBit,
549 &blackWord, &blackMask);
550 return *blackWord & blackMask;
553 static MOZ_ALWAYS_INLINE bool NonBlackCellIsMarkedGray(
554 const TenuredCell* cell) {
555 // Return true if GrayOrBlackBit is set. Callers should check BlackBit first.
557 MOZ_ASSERT(cell);
558 MOZ_ASSERT(!js::gc::IsInsideNursery(cell));
559 MOZ_ASSERT(!TenuredCellIsMarkedBlack(cell));
561 MarkBitmapWord* grayWord;
562 uintptr_t grayMask;
563 TenuredChunkBase* chunk = GetCellChunkBase(cell);
564 chunk->markBits.getMarkWordAndMask(cell, js::gc::ColorBit::GrayOrBlackBit,
565 &grayWord, &grayMask);
566 return *grayWord & grayMask;
569 static MOZ_ALWAYS_INLINE bool TenuredCellIsMarkedGray(const TenuredCell* cell) {
570 return !TenuredCellIsMarkedBlack(cell) && NonBlackCellIsMarkedGray(cell);
573 static MOZ_ALWAYS_INLINE bool CellIsMarkedGray(const Cell* cell) {
574 MOZ_ASSERT(cell);
575 if (js::gc::IsInsideNursery(cell)) {
576 return false;
578 return TenuredCellIsMarkedGray(reinterpret_cast<const TenuredCell*>(cell));
581 extern JS_PUBLIC_API bool CanCheckGrayBits(const TenuredCell* cell);
583 extern JS_PUBLIC_API bool CellIsMarkedGrayIfKnown(const TenuredCell* cell);
585 #ifdef DEBUG
586 extern JS_PUBLIC_API void AssertCellIsNotGray(const Cell* cell);
588 extern JS_PUBLIC_API bool ObjectIsMarkedBlack(const JSObject* obj);
589 #endif
591 MOZ_ALWAYS_INLINE bool CellHasStoreBuffer(const Cell* cell) {
592 return GetCellChunkBase(cell)->storeBuffer;
595 } /* namespace detail */
597 MOZ_ALWAYS_INLINE bool IsInsideNursery(const Cell* cell) {
598 MOZ_ASSERT(cell);
599 return detail::CellHasStoreBuffer(cell);
602 MOZ_ALWAYS_INLINE bool IsInsideNursery(const TenuredCell* cell) {
603 MOZ_ASSERT(cell);
604 MOZ_ASSERT(!IsInsideNursery(reinterpret_cast<const Cell*>(cell)));
605 return false;
608 // Allow use before the compiler knows the derivation of JSObject, JSString, and
609 // JS::BigInt.
610 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JSObject* obj) {
611 return IsInsideNursery(reinterpret_cast<const Cell*>(obj));
613 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JSString* str) {
614 return IsInsideNursery(reinterpret_cast<const Cell*>(str));
616 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JS::BigInt* bi) {
617 return IsInsideNursery(reinterpret_cast<const Cell*>(bi));
620 MOZ_ALWAYS_INLINE bool IsCellPointerValid(const void* ptr) {
621 auto addr = uintptr_t(ptr);
622 if (addr < ChunkSize || addr % CellAlignBytes != 0) {
623 return false;
626 auto* cell = reinterpret_cast<const Cell*>(ptr);
627 if (!IsInsideNursery(cell)) {
628 return detail::GetTenuredGCThingZone(addr) != nullptr;
631 return true;
634 MOZ_ALWAYS_INLINE bool IsCellPointerValidOrNull(const void* cell) {
635 if (!cell) {
636 return true;
638 return IsCellPointerValid(cell);
641 } /* namespace gc */
642 } /* namespace js */
644 namespace JS {
646 static MOZ_ALWAYS_INLINE Zone* GetTenuredGCThingZone(GCCellPtr thing) {
647 MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
648 return js::gc::detail::GetTenuredGCThingZone(thing.unsafeAsUIntPtr());
651 extern JS_PUBLIC_API Zone* GetNurseryCellZone(js::gc::Cell* cell);
653 static MOZ_ALWAYS_INLINE Zone* GetGCThingZone(GCCellPtr thing) {
654 if (!js::gc::IsInsideNursery(thing.asCell())) {
655 return js::gc::detail::GetTenuredGCThingZone(thing.unsafeAsUIntPtr());
658 return GetNurseryCellZone(thing.asCell());
661 static MOZ_ALWAYS_INLINE Zone* GetStringZone(JSString* str) {
662 if (!js::gc::IsInsideNursery(str)) {
663 return js::gc::detail::GetTenuredGCThingZone(
664 reinterpret_cast<uintptr_t>(str));
666 return GetNurseryCellZone(reinterpret_cast<js::gc::Cell*>(str));
669 extern JS_PUBLIC_API Zone* GetObjectZone(JSObject* obj);
671 static MOZ_ALWAYS_INLINE bool GCThingIsMarkedGray(GCCellPtr thing) {
672 js::gc::Cell* cell = thing.asCell();
673 if (IsInsideNursery(cell)) {
674 return false;
677 auto* tenuredCell = reinterpret_cast<js::gc::TenuredCell*>(cell);
678 return js::gc::detail::CellIsMarkedGrayIfKnown(tenuredCell);
681 // Specialised gray marking check for use by the cycle collector. This is not
682 // called during incremental GC or when the gray bits are invalid.
683 static MOZ_ALWAYS_INLINE bool GCThingIsMarkedGrayInCC(GCCellPtr thing) {
684 js::gc::Cell* cell = thing.asCell();
685 if (IsInsideNursery(cell)) {
686 return false;
689 auto* tenuredCell = reinterpret_cast<js::gc::TenuredCell*>(cell);
690 if (!js::gc::detail::TenuredCellIsMarkedGray(tenuredCell)) {
691 return false;
694 MOZ_ASSERT(js::gc::detail::CanCheckGrayBits(tenuredCell));
696 return true;
699 extern JS_PUBLIC_API JS::TraceKind GCThingTraceKind(void* thing);
701 extern JS_PUBLIC_API void EnableNurseryStrings(JSContext* cx);
703 extern JS_PUBLIC_API void DisableNurseryStrings(JSContext* cx);
705 extern JS_PUBLIC_API void EnableNurseryBigInts(JSContext* cx);
707 extern JS_PUBLIC_API void DisableNurseryBigInts(JSContext* cx);
710 * Returns true when writes to GC thing pointers (and reads from weak pointers)
711 * must call an incremental barrier. This is generally only true when running
712 * mutator code in-between GC slices. At other times, the barrier may be elided
713 * for performance.
715 extern JS_PUBLIC_API bool IsIncrementalBarrierNeeded(JSContext* cx);
718 * Notify the GC that a reference to a JSObject is about to be overwritten.
719 * This method must be called if IsIncrementalBarrierNeeded.
721 extern JS_PUBLIC_API void IncrementalPreWriteBarrier(JSObject* obj);
724 * Notify the GC that a reference to a tenured GC cell is about to be
725 * overwritten. This method must be called if IsIncrementalBarrierNeeded.
727 extern JS_PUBLIC_API void IncrementalPreWriteBarrier(GCCellPtr thing);
730 * Unsets the gray bit for anything reachable from |thing|. |kind| should not be
731 * JS::TraceKind::Shape. |thing| should be non-null. The return value indicates
732 * if anything was unmarked.
734 extern JS_PUBLIC_API bool UnmarkGrayGCThingRecursively(GCCellPtr thing);
736 } // namespace JS
738 namespace js {
739 namespace gc {
741 extern JS_PUBLIC_API void PerformIncrementalReadBarrier(JS::GCCellPtr thing);
743 static MOZ_ALWAYS_INLINE void ExposeGCThingToActiveJS(JS::GCCellPtr thing) {
744 // TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
745 // called while we are tracing the heap, e.g. during memory reporting
746 // (see bug 1313318).
747 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
749 // GC things residing in the nursery cannot be gray: they have no mark bits.
750 // All live objects in the nursery are moved to tenured at the beginning of
751 // each GC slice, so the gray marker never sees nursery things.
752 if (IsInsideNursery(thing.asCell())) {
753 return;
756 auto* cell = reinterpret_cast<TenuredCell*>(thing.asCell());
757 if (detail::TenuredCellIsMarkedBlack(cell)) {
758 return;
761 // GC things owned by other runtimes are always black.
762 MOZ_ASSERT(!thing.mayBeOwnedByOtherRuntime());
764 auto* zone = JS::shadow::Zone::from(JS::GetTenuredGCThingZone(thing));
765 if (zone->needsIncrementalBarrier()) {
766 PerformIncrementalReadBarrier(thing);
767 } else if (!zone->isGCPreparing() && detail::NonBlackCellIsMarkedGray(cell)) {
768 MOZ_ALWAYS_TRUE(JS::UnmarkGrayGCThingRecursively(thing));
771 MOZ_ASSERT_IF(!zone->isGCPreparing(), !detail::TenuredCellIsMarkedGray(cell));
774 static MOZ_ALWAYS_INLINE void IncrementalReadBarrier(JS::GCCellPtr thing) {
775 // This is a lighter version of ExposeGCThingToActiveJS that doesn't do gray
776 // unmarking.
778 if (IsInsideNursery(thing.asCell())) {
779 return;
782 auto* zone = JS::shadow::Zone::from(JS::GetTenuredGCThingZone(thing));
783 auto* cell = reinterpret_cast<TenuredCell*>(thing.asCell());
784 if (zone->needsIncrementalBarrier() &&
785 !detail::TenuredCellIsMarkedBlack(cell)) {
786 // GC things owned by other runtimes are always black.
787 MOZ_ASSERT(!thing.mayBeOwnedByOtherRuntime());
788 PerformIncrementalReadBarrier(thing);
792 template <typename T>
793 extern JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow(T* thingp);
795 static MOZ_ALWAYS_INLINE bool EdgeNeedsSweepUnbarriered(JSObject** objp) {
796 // This function does not handle updating nursery pointers. Raw JSObject
797 // pointers should be updated separately or replaced with
798 // JS::Heap<JSObject*> which handles this automatically.
799 MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
800 if (IsInsideNursery(*objp)) {
801 return false;
804 auto zone =
805 JS::shadow::Zone::from(detail::GetTenuredGCThingZone(uintptr_t(*objp)));
806 if (!zone->isGCSweepingOrCompacting()) {
807 return false;
810 return EdgeNeedsSweepUnbarrieredSlow(objp);
813 } // namespace gc
814 } // namespace js
816 namespace JS {
819 * This should be called when an object that is marked gray is exposed to the JS
820 * engine (by handing it to running JS code or writing it into live JS
821 * data). During incremental GC, since the gray bits haven't been computed yet,
822 * we conservatively mark the object black.
824 static MOZ_ALWAYS_INLINE void ExposeObjectToActiveJS(JSObject* obj) {
825 MOZ_ASSERT(obj);
826 MOZ_ASSERT(!js::gc::EdgeNeedsSweepUnbarrieredSlow(&obj));
827 js::gc::ExposeGCThingToActiveJS(GCCellPtr(obj));
830 } /* namespace JS */
832 #endif /* js_HeapAPI_h */