Bug 1877642 - Disable browser_fullscreen-tab-close-race.js on apple_silicon !debug...
[gecko.git] / js / src / gc / Cell.h
blobf91163e2f55cdd8dc3129df96c00c697e1158d97
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef gc_Cell_h
8 #define gc_Cell_h
10 #include "mozilla/Atomics.h"
11 #include "mozilla/EndianUtils.h"
13 #include <type_traits>
15 #include "gc/GCContext.h"
16 #include "gc/Heap.h"
17 #include "gc/TraceKind.h"
18 #include "js/GCAnnotations.h"
19 #include "js/shadow/Zone.h" // JS::shadow::Zone
20 #include "js/TypeDecls.h"
22 namespace JS {
23 enum class TraceKind;
24 } /* namespace JS */
26 namespace js {
28 class JS_PUBLIC_API GenericPrinter;
30 extern bool RuntimeFromMainThreadIsHeapMajorCollecting(
31 JS::shadow::Zone* shadowZone);
33 #ifdef DEBUG
34 // Barriers can't be triggered during backend Ion compilation, which may run on
35 // a helper thread.
36 extern bool CurrentThreadIsIonCompiling();
37 #endif
39 extern void TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc,
40 gc::Cell** thingp,
41 const char* name);
43 namespace gc {
45 enum class AllocKind : uint8_t;
46 class CellAllocator; // Declared so subtypes of Cell can friend it easily.
47 class StoreBuffer;
48 class TenuredCell;
50 extern void PerformIncrementalReadBarrier(TenuredCell* cell);
51 extern void PerformIncrementalPreWriteBarrier(TenuredCell* cell);
52 extern void PerformIncrementalBarrierDuringFlattening(JSString* str);
53 extern void UnmarkGrayGCThingRecursively(TenuredCell* cell);
55 // Like gc::MarkColor but allows the possibility of the cell being unmarked.
56 enum class CellColor : uint8_t { White = 0, Gray = 1, Black = 2 };
57 static_assert(uint8_t(CellColor::Gray) == uint8_t(MarkColor::Gray));
58 static_assert(uint8_t(CellColor::Black) == uint8_t(MarkColor::Black));
60 inline bool IsMarked(CellColor color) { return color != CellColor::White; }
61 inline MarkColor AsMarkColor(CellColor color) {
62 MOZ_ASSERT(IsMarked(color));
63 return MarkColor(color);
65 inline CellColor AsCellColor(MarkColor color) { return CellColor(color); }
66 extern const char* CellColorName(CellColor color);
68 // Cell header word. Stores GC flags and derived class data.
70 // Loads of GC flags + all stores are marked as (relaxed) atomic operations,
71 // to deal with the following benign data race during compacting GC:
73 // - Thread 1 checks isForwarded (which is always false in this situation).
74 // - Thread 2 updates the derived class data (without changing the forwarded
75 // flag).
77 // To improve performance, we don't use atomic operations for get() because
78 // atomic operations inhibit certain compiler optimizations: GCC and Clang are
79 // unable to fold multiple loads even if they're both relaxed atomics. This is
80 // especially a problem for chained loads such as obj->shape->base->clasp.
81 class HeaderWord {
82 // Indicates whether the cell has been forwarded (moved) by generational or
83 // compacting GC and is now a RelocationOverlay.
84 static constexpr uintptr_t FORWARD_BIT = Bit(0);
85 // Bits 1 and 2 are reserved for future use by the GC.
87 uintptr_t value_;
89 void setAtomic(uintptr_t value) {
90 __atomic_store_n(&value_, value, __ATOMIC_RELAXED);
93 public:
94 static constexpr uintptr_t RESERVED_MASK =
95 BitMask(gc::CellFlagBitsReservedForGC);
96 static_assert(gc::CellFlagBitsReservedForGC >= 3,
97 "Not enough flag bits reserved for GC");
99 uintptr_t getAtomic() const {
100 return __atomic_load_n(&value_, __ATOMIC_RELAXED);
103 // Accessors for derived class data.
104 uintptr_t get() const {
105 // Note: non-atomic load. See class comment.
106 uintptr_t value = value_;
107 MOZ_ASSERT((value & RESERVED_MASK) == 0);
108 return value;
110 void set(uintptr_t value) {
111 MOZ_ASSERT((value & RESERVED_MASK) == 0);
112 setAtomic(value);
115 // Accessors for GC data.
116 uintptr_t flags() const { return getAtomic() & RESERVED_MASK; }
117 bool isForwarded() const { return flags() & FORWARD_BIT; }
118 void setForwardingAddress(uintptr_t ptr) {
119 MOZ_ASSERT((ptr & RESERVED_MASK) == 0);
120 setAtomic(ptr | FORWARD_BIT);
122 uintptr_t getForwardingAddress() const {
123 MOZ_ASSERT(isForwarded());
124 return getAtomic() & ~RESERVED_MASK;
128 // [SMDOC] GC Cell
130 // A GC cell is the ultimate base class for all GC things. All types allocated
131 // on the GC heap extend either gc::Cell or gc::TenuredCell. If a type is always
132 // tenured, prefer the TenuredCell class as base.
134 // The first word of Cell is a HeaderWord (a uintptr_t) that reserves the low
135 // three bits for GC purposes. The remaining bits are available to sub-classes
136 // and can be used store a pointer to another gc::Cell. To make use of the
137 // remaining space, sub-classes derive from a helper class such as
138 // TenuredCellWithNonGCPointer.
140 // During moving GC operation a Cell may be marked as forwarded. This indicates
141 // that a gc::RelocationOverlay is currently stored in the Cell's memory and
142 // should be used to find the new location of the Cell.
143 struct Cell {
144 // Cell header word. Stores GC flags and derived class data.
145 HeaderWord header_;
147 public:
148 Cell() = default;
150 Cell(const Cell&) = delete;
151 void operator=(const Cell&) = delete;
153 bool isForwarded() const { return header_.isForwarded(); }
154 uintptr_t flags() const { return header_.flags(); }
156 MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
157 MOZ_ALWAYS_INLINE const TenuredCell& asTenured() const;
158 MOZ_ALWAYS_INLINE TenuredCell& asTenured();
160 MOZ_ALWAYS_INLINE bool isMarkedAny() const;
161 MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
162 MOZ_ALWAYS_INLINE bool isMarkedGray() const;
163 MOZ_ALWAYS_INLINE bool isMarked(gc::MarkColor color) const;
164 MOZ_ALWAYS_INLINE bool isMarkedAtLeast(gc::MarkColor color) const;
165 MOZ_ALWAYS_INLINE CellColor color() const;
167 inline JSRuntime* runtimeFromMainThread() const;
169 // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
170 // thread can easily lead to races. Use this method very carefully.
171 inline JSRuntime* runtimeFromAnyThread() const;
173 // May be overridden by GC thing kinds that have a compartment pointer.
174 inline JS::Compartment* maybeCompartment() const { return nullptr; }
176 // The StoreBuffer used to record incoming pointers from the tenured heap.
177 // This will return nullptr for a tenured cell.
178 inline StoreBuffer* storeBuffer() const;
180 inline JS::TraceKind getTraceKind() const;
182 static MOZ_ALWAYS_INLINE bool needPreWriteBarrier(JS::Zone* zone);
184 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
185 inline bool is() const {
186 return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
189 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
190 inline T* as() {
191 // |this|-qualify the |is| call below to avoid compile errors with even
192 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
193 MOZ_ASSERT(this->is<T>());
194 return static_cast<T*>(this);
197 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
198 inline const T* as() const {
199 // |this|-qualify the |is| call below to avoid compile errors with even
200 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
201 MOZ_ASSERT(this->is<T>());
202 return static_cast<const T*>(this);
205 inline JS::Zone* zone() const;
206 inline JS::Zone* zoneFromAnyThread() const;
208 // Get the zone for a cell known to be in the nursery.
209 inline JS::Zone* nurseryZone() const;
210 inline JS::Zone* nurseryZoneFromAnyThread() const;
212 inline ChunkBase* chunk() const;
214 // Default implementation for kinds that cannot be permanent. This may be
215 // overriden by derived classes.
216 MOZ_ALWAYS_INLINE bool isPermanentAndMayBeShared() const { return false; }
218 #ifdef DEBUG
219 static inline void assertThingIsNotGray(Cell* cell);
220 inline bool isAligned() const;
221 void dump(GenericPrinter& out) const;
222 void dump() const;
223 #endif
225 protected:
226 uintptr_t address() const;
228 private:
229 // Cells are destroyed by the GC. Do not delete them directly.
230 void operator delete(void*) = delete;
231 } JS_HAZ_GC_THING;
233 // A GC TenuredCell gets behaviors that are valid for things in the Tenured
234 // heap, such as access to the arena and mark bits.
235 class TenuredCell : public Cell {
236 public:
237 MOZ_ALWAYS_INLINE bool isTenured() const {
238 MOZ_ASSERT(!IsInsideNursery(this));
239 return true;
242 TenuredChunk* chunk() const {
243 return static_cast<TenuredChunk*>(Cell::chunk());
246 // Mark bit management.
247 MOZ_ALWAYS_INLINE bool isMarkedAny() const;
248 MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
249 MOZ_ALWAYS_INLINE bool isMarkedGray() const;
250 MOZ_ALWAYS_INLINE CellColor color() const;
252 // The return value indicates if the cell went from unmarked to marked.
253 MOZ_ALWAYS_INLINE bool markIfUnmarked(
254 MarkColor color = MarkColor::Black) const;
255 MOZ_ALWAYS_INLINE bool markIfUnmarkedAtomic(MarkColor color) const;
256 MOZ_ALWAYS_INLINE void markBlack() const;
257 MOZ_ALWAYS_INLINE void markBlackAtomic() const;
258 MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell* src);
259 MOZ_ALWAYS_INLINE void unmark();
261 // Access to the arena.
262 inline Arena* arena() const;
263 inline AllocKind getAllocKind() const;
264 inline JS::TraceKind getTraceKind() const;
265 inline JS::Zone* zone() const;
266 inline JS::Zone* zoneFromAnyThread() const;
267 inline bool isInsideZone(JS::Zone* zone) const;
269 MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZone() const {
270 return JS::shadow::Zone::from(zone());
272 MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
273 return JS::shadow::Zone::from(zoneFromAnyThread());
276 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
277 inline bool is() const {
278 return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
281 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
282 inline T* as() {
283 // |this|-qualify the |is| call below to avoid compile errors with even
284 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
285 MOZ_ASSERT(this->is<T>());
286 return static_cast<T*>(this);
289 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
290 inline const T* as() const {
291 // |this|-qualify the |is| call below to avoid compile errors with even
292 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
293 MOZ_ASSERT(this->is<T>());
294 return static_cast<const T*>(this);
297 // Default implementation for kinds that don't require fixup.
298 void fixupAfterMovingGC() {}
300 static inline CellColor getColor(MarkBitmap* bitmap, const TenuredCell* cell);
302 #ifdef DEBUG
303 inline bool isAligned() const;
304 #endif
307 MOZ_ALWAYS_INLINE const TenuredCell& Cell::asTenured() const {
308 MOZ_ASSERT(isTenured());
309 return *static_cast<const TenuredCell*>(this);
312 MOZ_ALWAYS_INLINE TenuredCell& Cell::asTenured() {
313 MOZ_ASSERT(isTenured());
314 return *static_cast<TenuredCell*>(this);
317 MOZ_ALWAYS_INLINE bool Cell::isMarkedAny() const {
318 return !isTenured() || asTenured().isMarkedAny();
321 MOZ_ALWAYS_INLINE bool Cell::isMarkedBlack() const {
322 return !isTenured() || asTenured().isMarkedBlack();
325 MOZ_ALWAYS_INLINE bool Cell::isMarkedGray() const {
326 return isTenured() && asTenured().isMarkedGray();
329 MOZ_ALWAYS_INLINE bool Cell::isMarked(gc::MarkColor color) const {
330 return color == MarkColor::Gray ? isMarkedGray() : isMarkedBlack();
333 MOZ_ALWAYS_INLINE bool Cell::isMarkedAtLeast(gc::MarkColor color) const {
334 return color == MarkColor::Gray ? isMarkedAny() : isMarkedBlack();
337 MOZ_ALWAYS_INLINE CellColor Cell::color() const {
338 return isTenured() ? asTenured().color() : CellColor::Black;
341 inline JSRuntime* Cell::runtimeFromMainThread() const {
342 JSRuntime* rt = chunk()->runtime;
343 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
344 return rt;
347 inline JSRuntime* Cell::runtimeFromAnyThread() const {
348 return chunk()->runtime;
351 inline uintptr_t Cell::address() const {
352 uintptr_t addr = uintptr_t(this);
353 MOZ_ASSERT(addr % CellAlignBytes == 0);
354 MOZ_ASSERT(TenuredChunk::withinValidRange(addr));
355 return addr;
358 ChunkBase* Cell::chunk() const {
359 uintptr_t addr = uintptr_t(this);
360 MOZ_ASSERT(addr % CellAlignBytes == 0);
361 addr &= ~ChunkMask;
362 return reinterpret_cast<ChunkBase*>(addr);
365 inline StoreBuffer* Cell::storeBuffer() const { return chunk()->storeBuffer; }
367 JS::Zone* Cell::zone() const {
368 if (isTenured()) {
369 return asTenured().zone();
372 return nurseryZone();
375 JS::Zone* Cell::zoneFromAnyThread() const {
376 if (isTenured()) {
377 return asTenured().zoneFromAnyThread();
380 return nurseryZoneFromAnyThread();
383 JS::Zone* Cell::nurseryZone() const {
384 JS::Zone* zone = nurseryZoneFromAnyThread();
385 MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
386 return zone;
389 JS::Zone* Cell::nurseryZoneFromAnyThread() const {
390 return NurseryCellHeader::from(this)->zone();
393 #ifdef DEBUG
394 extern Cell* UninlinedForwarded(const Cell* cell);
395 #endif
397 inline JS::TraceKind Cell::getTraceKind() const {
398 if (isTenured()) {
399 MOZ_ASSERT_IF(isForwarded(), UninlinedForwarded(this)->getTraceKind() ==
400 asTenured().getTraceKind());
401 return asTenured().getTraceKind();
404 return NurseryCellHeader::from(this)->traceKind();
407 /* static */ MOZ_ALWAYS_INLINE bool Cell::needPreWriteBarrier(JS::Zone* zone) {
408 return JS::shadow::Zone::from(zone)->needsIncrementalBarrier();
411 MOZ_ALWAYS_INLINE bool TenuredCell::isMarkedAny() const {
412 MOZ_ASSERT(arena()->allocated());
413 return chunk()->markBits.isMarkedAny(this);
416 MOZ_ALWAYS_INLINE bool TenuredCell::isMarkedBlack() const {
417 MOZ_ASSERT(arena()->allocated());
418 return chunk()->markBits.isMarkedBlack(this);
421 MOZ_ALWAYS_INLINE bool TenuredCell::isMarkedGray() const {
422 MOZ_ASSERT(arena()->allocated());
423 return chunk()->markBits.isMarkedGray(this);
426 MOZ_ALWAYS_INLINE CellColor TenuredCell::color() const {
427 return getColor(&chunk()->markBits, this);
430 /* static */
431 inline CellColor TenuredCell::getColor(MarkBitmap* bitmap,
432 const TenuredCell* cell) {
433 // Note that this method isn't synchronised so may give surprising results if
434 // the mark bitmap is being modified concurrently.
436 if (bitmap->isMarkedBlack(cell)) {
437 return CellColor::Black;
440 if (bitmap->isMarkedGray(cell)) {
441 return CellColor::Gray;
444 return CellColor::White;
447 bool TenuredCell::markIfUnmarked(MarkColor color /* = Black */) const {
448 return chunk()->markBits.markIfUnmarked(this, color);
451 bool TenuredCell::markIfUnmarkedAtomic(MarkColor color) const {
452 return chunk()->markBits.markIfUnmarkedAtomic(this, color);
455 void TenuredCell::markBlack() const { chunk()->markBits.markBlack(this); }
456 void TenuredCell::markBlackAtomic() const {
457 chunk()->markBits.markBlackAtomic(this);
460 void TenuredCell::copyMarkBitsFrom(const TenuredCell* src) {
461 MarkBitmap& markBits = chunk()->markBits;
462 markBits.copyMarkBit(this, src, ColorBit::BlackBit);
463 markBits.copyMarkBit(this, src, ColorBit::GrayOrBlackBit);
466 void TenuredCell::unmark() { chunk()->markBits.unmark(this); }
468 inline Arena* TenuredCell::arena() const {
469 MOZ_ASSERT(isTenured());
470 uintptr_t addr = address();
471 addr &= ~ArenaMask;
472 return reinterpret_cast<Arena*>(addr);
475 AllocKind TenuredCell::getAllocKind() const { return arena()->getAllocKind(); }
477 JS::TraceKind TenuredCell::getTraceKind() const {
478 return MapAllocToTraceKind(getAllocKind());
481 JS::Zone* TenuredCell::zone() const {
482 JS::Zone* zone = arena()->zone;
483 MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
484 return zone;
487 JS::Zone* TenuredCell::zoneFromAnyThread() const { return arena()->zone; }
489 bool TenuredCell::isInsideZone(JS::Zone* zone) const {
490 return zone == arena()->zone;
493 // Read barrier and pre-write barrier implementation for GC cells.
495 template <typename T>
496 MOZ_ALWAYS_INLINE void ReadBarrier(T* thing) {
497 static_assert(std::is_base_of_v<Cell, T>);
498 static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
500 if (thing) {
501 ReadBarrierImpl(thing);
505 MOZ_ALWAYS_INLINE void ReadBarrierImpl(TenuredCell* thing) {
506 MOZ_ASSERT(CurrentThreadIsMainThread());
507 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
508 MOZ_ASSERT(thing);
510 JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
511 if (shadowZone->needsIncrementalBarrier()) {
512 PerformIncrementalReadBarrier(thing);
513 return;
516 if (thing->isMarkedGray()) {
517 UnmarkGrayGCThingRecursively(thing);
521 MOZ_ALWAYS_INLINE void ReadBarrierImpl(Cell* thing) {
522 MOZ_ASSERT(!CurrentThreadIsGCMarking());
523 MOZ_ASSERT(thing);
525 if (thing->isTenured()) {
526 ReadBarrierImpl(&thing->asTenured());
530 MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(TenuredCell* thing) {
531 MOZ_ASSERT(CurrentThreadIsMainThread() || CurrentThreadIsGCSweeping() ||
532 CurrentThreadIsGCFinalizing());
533 MOZ_ASSERT(thing);
535 // Barriers can be triggered on the main thread while collecting, but are
536 // disabled. For example, this happens when sweeping HeapPtr wrappers. See
537 // AutoDisableBarriers.
539 JS::shadow::Zone* zone = thing->shadowZoneFromAnyThread();
540 if (zone->needsIncrementalBarrier()) {
541 PerformIncrementalPreWriteBarrier(thing);
545 MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(Cell* thing) {
546 MOZ_ASSERT(!CurrentThreadIsGCMarking());
547 MOZ_ASSERT(thing);
549 if (thing->isTenured()) {
550 PreWriteBarrierImpl(&thing->asTenured());
554 template <typename T>
555 MOZ_ALWAYS_INLINE void PreWriteBarrier(T* thing) {
556 static_assert(std::is_base_of_v<Cell, T>);
557 static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
559 if (thing) {
560 PreWriteBarrierImpl(thing);
564 // Pre-write barrier implementation for structures containing GC cells, taking a
565 // functor to trace the structure.
566 template <typename T, typename F>
567 MOZ_ALWAYS_INLINE void PreWriteBarrier(JS::Zone* zone, T* data,
568 const F& traceFn) {
569 MOZ_ASSERT(data);
570 MOZ_ASSERT(!CurrentThreadIsIonCompiling());
571 MOZ_ASSERT(!CurrentThreadIsGCMarking());
573 auto* shadowZone = JS::shadow::Zone::from(zone);
574 if (!shadowZone->needsIncrementalBarrier()) {
575 return;
578 MOZ_ASSERT(CurrentThreadCanAccessRuntime(shadowZone->runtimeFromAnyThread()));
579 MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
581 traceFn(shadowZone->barrierTracer(), data);
584 // Pre-write barrier implementation for structures containing GC cells. T must
585 // support a |trace| method.
586 template <typename T>
587 MOZ_ALWAYS_INLINE void PreWriteBarrier(JS::Zone* zone, T* data) {
588 MOZ_ASSERT(data);
589 PreWriteBarrier(zone, data, [](JSTracer* trc, T* data) { data->trace(trc); });
592 #ifdef DEBUG
594 /* static */ void Cell::assertThingIsNotGray(Cell* cell) {
595 JS::AssertCellIsNotGray(cell);
598 bool Cell::isAligned() const {
599 if (!isTenured()) {
600 return true;
602 return asTenured().isAligned();
605 bool TenuredCell::isAligned() const {
606 return Arena::isAligned(address(), arena()->getThingSize());
609 #endif
611 // Base class for nusery-allocatable GC things that have 32-bit length and
612 // 32-bit flags (currently JSString and BigInt).
614 // This tries to store both in Cell::header_, but if that isn't large enough the
615 // length is stored separately.
617 // 32 0
618 // ------------------
619 // | Length | Flags |
620 // ------------------
622 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
623 // for GC. Derived classes must ensure they don't use these flags for non-GC
624 // purposes.
625 class alignas(gc::CellAlignBytes) CellWithLengthAndFlags : public Cell {
626 #if JS_BITS_PER_WORD == 32
627 // Additional storage for length if |header_| is too small to fit both.
628 uint32_t length_;
629 #endif
631 protected:
632 uint32_t headerLengthField() const {
633 #if JS_BITS_PER_WORD == 32
634 return length_;
635 #else
636 return uint32_t(header_.get() >> 32);
637 #endif
640 uint32_t headerFlagsField() const { return uint32_t(header_.get()); }
642 void setHeaderFlagBit(uint32_t flag) {
643 header_.set(header_.get() | uintptr_t(flag));
645 void clearHeaderFlagBit(uint32_t flag) {
646 header_.set(header_.get() & ~uintptr_t(flag));
648 void toggleHeaderFlagBit(uint32_t flag) {
649 header_.set(header_.get() ^ uintptr_t(flag));
652 void setHeaderLengthAndFlags(uint32_t len, uint32_t flags) {
653 #if JS_BITS_PER_WORD == 32
654 header_.set(flags);
655 length_ = len;
656 #else
657 header_.set((uint64_t(len) << 32) | uint64_t(flags));
658 #endif
661 public:
662 // Returns the offset of header_. JIT code should use offsetOfFlags
663 // below.
664 static constexpr size_t offsetOfRawHeaderFlagsField() {
665 return offsetof(CellWithLengthAndFlags, header_);
668 // Offsets for direct field from jit code. A number of places directly
669 // access 32-bit length and flags fields so do endian trickery here.
670 #if JS_BITS_PER_WORD == 32
671 static constexpr size_t offsetOfHeaderFlags() {
672 return offsetof(CellWithLengthAndFlags, header_);
674 static constexpr size_t offsetOfHeaderLength() {
675 return offsetof(CellWithLengthAndFlags, length_);
677 #elif MOZ_LITTLE_ENDIAN()
678 static constexpr size_t offsetOfHeaderFlags() {
679 return offsetof(CellWithLengthAndFlags, header_);
681 static constexpr size_t offsetOfHeaderLength() {
682 return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
684 #else
685 static constexpr size_t offsetOfHeaderFlags() {
686 return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
688 static constexpr size_t offsetOfHeaderLength() {
689 return offsetof(CellWithLengthAndFlags, header_);
691 #endif
694 // Base class for non-nursery-allocatable GC things that allows storing a non-GC
695 // thing pointer in the first word.
697 // The low bits of the word (see CellFlagBitsReservedForGC) are reserved for GC.
698 template <class PtrT>
699 class alignas(gc::CellAlignBytes) TenuredCellWithNonGCPointer
700 : public TenuredCell {
701 static_assert(!std::is_pointer_v<PtrT>,
702 "PtrT should be the type of the referent, not of the pointer");
703 static_assert(
704 !std::is_base_of_v<Cell, PtrT>,
705 "Don't use TenuredCellWithNonGCPointer for pointers to GC things");
707 protected:
708 TenuredCellWithNonGCPointer() = default;
709 explicit TenuredCellWithNonGCPointer(PtrT* initial) {
710 uintptr_t data = uintptr_t(initial);
711 header_.set(data);
714 PtrT* headerPtr() const {
715 MOZ_ASSERT(flags() == 0);
716 return reinterpret_cast<PtrT*>(uintptr_t(header_.get()));
719 void setHeaderPtr(PtrT* newValue) {
720 // As above, no flags are expected to be set here.
721 uintptr_t data = uintptr_t(newValue);
722 MOZ_ASSERT(flags() == 0);
723 header_.set(data);
726 public:
727 static constexpr size_t offsetOfHeaderPtr() {
728 return offsetof(TenuredCellWithNonGCPointer, header_);
732 // Base class for non-nursery-allocatable GC things that allows storing flags
733 // in the first word.
735 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
736 // for GC.
737 class alignas(gc::CellAlignBytes) TenuredCellWithFlags : public TenuredCell {
738 protected:
739 TenuredCellWithFlags() { header_.set(0); }
740 explicit TenuredCellWithFlags(uintptr_t initial) { header_.set(initial); }
742 uintptr_t headerFlagsField() const {
743 MOZ_ASSERT(flags() == 0);
744 return header_.get();
747 void setHeaderFlagBits(uintptr_t flags) {
748 header_.set(header_.get() | flags);
750 void clearHeaderFlagBits(uintptr_t flags) {
751 header_.set(header_.get() & ~flags);
755 // Base class for GC things that have a tenured GC pointer as their first word.
757 // The low bits of the first word (see CellFlagBitsReservedForGC) are reserved
758 // for GC.
760 // This includes a pre write barrier when the pointer is update. No post barrier
761 // is necessary as the pointer is always tenured.
762 template <class BaseCell, class PtrT>
763 class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
764 static void staticAsserts() {
765 // These static asserts are not in class scope because the PtrT may not be
766 // defined when this class template is instantiated.
767 static_assert(
768 std::is_same_v<BaseCell, Cell> || std::is_same_v<BaseCell, TenuredCell>,
769 "BaseCell must be either Cell or TenuredCell");
770 static_assert(
771 !std::is_pointer_v<PtrT>,
772 "PtrT should be the type of the referent, not of the pointer");
773 static_assert(
774 std::is_base_of_v<Cell, PtrT>,
775 "Only use CellWithTenuredGCPointer for pointers to GC things");
778 protected:
779 CellWithTenuredGCPointer() = default;
780 explicit CellWithTenuredGCPointer(PtrT* initial) { initHeaderPtr(initial); }
782 void initHeaderPtr(PtrT* initial) {
783 MOZ_ASSERT_IF(initial, !IsInsideNursery(initial));
784 uintptr_t data = uintptr_t(initial);
785 this->header_.set(data);
788 void setHeaderPtr(PtrT* newValue) {
789 // As above, no flags are expected to be set here.
790 MOZ_ASSERT_IF(newValue, !IsInsideNursery(newValue));
791 PreWriteBarrier(headerPtr());
792 unbarrieredSetHeaderPtr(newValue);
795 public:
796 PtrT* headerPtr() const {
797 staticAsserts();
798 MOZ_ASSERT(this->flags() == 0);
799 return reinterpret_cast<PtrT*>(uintptr_t(this->header_.get()));
801 PtrT* headerPtrAtomic() const {
802 staticAsserts();
803 MOZ_ASSERT(this->flags() == 0);
804 return reinterpret_cast<PtrT*>(uintptr_t(this->header_.getAtomic()));
807 void unbarrieredSetHeaderPtr(PtrT* newValue) {
808 uintptr_t data = uintptr_t(newValue);
809 MOZ_ASSERT(this->flags() == 0);
810 this->header_.set(data);
813 static constexpr size_t offsetOfHeaderPtr() {
814 return offsetof(CellWithTenuredGCPointer, header_);
818 void CellHeaderPostWriteBarrier(JSObject** ptr, JSObject* prev, JSObject* next);
820 template <typename T>
821 constexpr inline bool GCTypeIsTenured() {
822 static_assert(std::is_base_of_v<Cell, T>);
823 static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
825 return std::is_base_of_v<TenuredCell, T> || std::is_base_of_v<JSAtom, T>;
828 template <class PtrT>
829 class alignas(gc::CellAlignBytes) TenuredCellWithGCPointer
830 : public TenuredCell {
831 static void staticAsserts() {
832 // These static asserts are not in class scope because the PtrT may not be
833 // defined when this class template is instantiated.
834 static_assert(
835 !std::is_pointer_v<PtrT>,
836 "PtrT should be the type of the referent, not of the pointer");
837 static_assert(
838 std::is_base_of_v<Cell, PtrT>,
839 "Only use TenuredCellWithGCPointer for pointers to GC things");
840 static_assert(
841 !GCTypeIsTenured<PtrT>,
842 "Don't use TenuredCellWithGCPointer for always-tenured GC things");
845 protected:
846 TenuredCellWithGCPointer() = default;
847 explicit TenuredCellWithGCPointer(PtrT* initial) { initHeaderPtr(initial); }
849 void initHeaderPtr(PtrT* initial) {
850 uintptr_t data = uintptr_t(initial);
851 this->header_.set(data);
852 if (initial && IsInsideNursery(initial)) {
853 CellHeaderPostWriteBarrier(headerPtrAddress(), nullptr, initial);
857 PtrT** headerPtrAddress() {
858 MOZ_ASSERT(this->flags() == 0);
859 return reinterpret_cast<PtrT**>(&this->header_);
862 public:
863 PtrT* headerPtr() const {
864 MOZ_ASSERT(this->flags() == 0);
865 return reinterpret_cast<PtrT*>(uintptr_t(this->header_.get()));
868 void unbarrieredSetHeaderPtr(PtrT* newValue) {
869 uintptr_t data = uintptr_t(newValue);
870 MOZ_ASSERT(this->flags() == 0);
871 this->header_.set(data);
874 static constexpr size_t offsetOfHeaderPtr() {
875 return offsetof(TenuredCellWithGCPointer, header_);
879 // Check whether a typed GC thing is marked at all. Doesn't check gray bits for
880 // kinds that can't be marked gray.
881 template <typename T>
882 static inline bool TenuredThingIsMarkedAny(T* thing) {
883 using BaseT = typename BaseGCType<T>::type;
884 TenuredCell* cell = &thing->asTenured();
885 if constexpr (TraceKindCanBeGray<BaseT>::value) {
886 return cell->isMarkedAny();
887 } else {
888 MOZ_ASSERT(!cell->isMarkedGray());
889 return cell->isMarkedBlack();
893 template <>
894 inline bool TenuredThingIsMarkedAny<Cell>(Cell* thing) {
895 return thing->asTenured().isMarkedAny();
898 } /* namespace gc */
899 } /* namespace js */
901 #endif /* gc_Cell_h */