no bug - Bumping Firefox l10n changesets r=release a=l10n-bump DONTBUILD CLOSED TREE
[gecko.git] / js / src / gc / Cell.h
blob5a89ad794d6206688b8168023e3e615ec9146f08
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef gc_Cell_h
8 #define gc_Cell_h
10 #include "mozilla/Atomics.h"
11 #include "mozilla/EndianUtils.h"
13 #include <type_traits>
15 #include "gc/GCContext.h"
16 #include "gc/Heap.h"
17 #include "gc/TraceKind.h"
18 #include "js/GCAnnotations.h"
19 #include "js/shadow/Zone.h" // JS::shadow::Zone
20 #include "js/TypeDecls.h"
22 namespace JS {
23 enum class TraceKind;
24 } /* namespace JS */
26 namespace js {
28 class JS_PUBLIC_API GenericPrinter;
30 extern bool RuntimeFromMainThreadIsHeapMajorCollecting(
31 JS::shadow::Zone* shadowZone);
33 #ifdef DEBUG
34 // Barriers can't be triggered during backend Ion compilation, which may run on
35 // a helper thread.
36 extern bool CurrentThreadIsIonCompiling();
37 #endif
39 extern void TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc,
40 gc::Cell** thingp,
41 const char* name);
43 namespace gc {
45 enum class AllocKind : uint8_t;
46 class CellAllocator; // Declared so subtypes of Cell can friend it easily.
47 class StoreBuffer;
48 class TenuredCell;
50 extern void PerformIncrementalReadBarrier(TenuredCell* cell);
51 extern void PerformIncrementalPreWriteBarrier(TenuredCell* cell);
52 extern void PerformIncrementalBarrierDuringFlattening(JSString* str);
53 extern void UnmarkGrayGCThingRecursively(TenuredCell* cell);
55 // Like gc::MarkColor but allows the possibility of the cell being unmarked.
56 enum class CellColor : uint8_t { White = 0, Gray = 1, Black = 2 };
57 static_assert(uint8_t(CellColor::Gray) == uint8_t(MarkColor::Gray));
58 static_assert(uint8_t(CellColor::Black) == uint8_t(MarkColor::Black));
60 inline bool IsMarked(CellColor color) { return color != CellColor::White; }
61 inline MarkColor AsMarkColor(CellColor color) {
62 MOZ_ASSERT(IsMarked(color));
63 return MarkColor(color);
65 inline CellColor AsCellColor(MarkColor color) { return CellColor(color); }
66 extern const char* CellColorName(CellColor color);
68 // Cell header word. Stores GC flags and derived class data.
70 // Loads of GC flags + all stores are marked as (relaxed) atomic operations,
71 // to deal with the following benign data race during compacting GC:
73 // - Thread 1 checks isForwarded (which is always false in this situation).
74 // - Thread 2 updates the derived class data (without changing the forwarded
75 // flag).
77 // To improve performance, we don't use atomic operations for get() because
78 // atomic operations inhibit certain compiler optimizations: GCC and Clang are
79 // unable to fold multiple loads even if they're both relaxed atomics. This is
80 // especially a problem for chained loads such as obj->shape->base->clasp.
81 class HeaderWord {
82 // Indicates whether the cell has been forwarded (moved) by generational or
83 // compacting GC and is now a RelocationOverlay.
84 static constexpr uintptr_t FORWARD_BIT = Bit(0);
85 // Bits 1 and 2 are reserved for future use by the GC.
87 uintptr_t value_;
89 void setAtomic(uintptr_t value) {
90 __atomic_store_n(&value_, value, __ATOMIC_RELAXED);
93 public:
94 static constexpr uintptr_t RESERVED_MASK =
95 BitMask(gc::CellFlagBitsReservedForGC);
96 static_assert(gc::CellFlagBitsReservedForGC >= 3,
97 "Not enough flag bits reserved for GC");
99 uintptr_t getAtomic() const {
100 return __atomic_load_n(&value_, __ATOMIC_RELAXED);
103 // Accessors for derived class data.
104 uintptr_t get() const {
105 // Note: non-atomic load. See class comment.
106 uintptr_t value = value_;
107 MOZ_ASSERT((value & RESERVED_MASK) == 0);
108 return value;
110 void set(uintptr_t value) {
111 MOZ_ASSERT((value & RESERVED_MASK) == 0);
112 setAtomic(value);
115 // Accessors for GC data.
116 uintptr_t flags() const { return getAtomic() & RESERVED_MASK; }
117 bool isForwarded() const { return flags() & FORWARD_BIT; }
118 void setForwardingAddress(uintptr_t ptr) {
119 MOZ_ASSERT((ptr & RESERVED_MASK) == 0);
120 setAtomic(ptr | FORWARD_BIT);
122 uintptr_t getForwardingAddress() const {
123 MOZ_ASSERT(isForwarded());
124 return getAtomic() & ~RESERVED_MASK;
128 // [SMDOC] GC Cell
130 // A GC cell is the ultimate base class for all GC things. All types allocated
131 // on the GC heap extend either gc::Cell or gc::TenuredCell. If a type is always
132 // tenured, prefer the TenuredCell class as base.
134 // The first word of Cell is a HeaderWord (a uintptr_t) that reserves the low
135 // three bits for GC purposes. The remaining bits are available to sub-classes
136 // and can be used store a pointer to another gc::Cell. To make use of the
137 // remaining space, sub-classes derive from a helper class such as
138 // TenuredCellWithNonGCPointer.
140 // During moving GC operation a Cell may be marked as forwarded. This indicates
141 // that a gc::RelocationOverlay is currently stored in the Cell's memory and
142 // should be used to find the new location of the Cell.
143 struct Cell {
144 // Cell header word. Stores GC flags and derived class data.
145 HeaderWord header_;
147 public:
148 Cell() = default;
150 Cell(const Cell&) = delete;
151 void operator=(const Cell&) = delete;
153 bool isForwarded() const { return header_.isForwarded(); }
154 uintptr_t flags() const { return header_.flags(); }
156 MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
157 MOZ_ALWAYS_INLINE const TenuredCell& asTenured() const;
158 MOZ_ALWAYS_INLINE TenuredCell& asTenured();
160 MOZ_ALWAYS_INLINE bool isMarkedAny() const;
161 MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
162 MOZ_ALWAYS_INLINE bool isMarkedGray() const;
163 MOZ_ALWAYS_INLINE bool isMarked(gc::MarkColor color) const;
164 MOZ_ALWAYS_INLINE bool isMarkedAtLeast(gc::MarkColor color) const;
165 MOZ_ALWAYS_INLINE CellColor color() const;
167 inline JSRuntime* runtimeFromMainThread() const;
169 // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
170 // thread can easily lead to races. Use this method very carefully.
171 inline JSRuntime* runtimeFromAnyThread() const;
173 // May be overridden by GC thing kinds that have a compartment pointer.
174 inline JS::Compartment* maybeCompartment() const { return nullptr; }
176 // The StoreBuffer used to record incoming pointers from the tenured heap.
177 // This will return nullptr for a tenured cell.
178 inline StoreBuffer* storeBuffer() const;
180 inline JS::TraceKind getTraceKind() const;
182 static MOZ_ALWAYS_INLINE bool needPreWriteBarrier(JS::Zone* zone);
184 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
185 inline bool is() const {
186 return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
189 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
190 inline T* as() {
191 // |this|-qualify the |is| call below to avoid compile errors with even
192 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
193 MOZ_ASSERT(this->is<T>());
194 return static_cast<T*>(this);
197 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
198 inline const T* as() const {
199 // |this|-qualify the |is| call below to avoid compile errors with even
200 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
201 MOZ_ASSERT(this->is<T>());
202 return static_cast<const T*>(this);
205 inline JS::Zone* zone() const;
206 inline JS::Zone* zoneFromAnyThread() const;
208 // Get the zone for a cell known to be in the nursery.
209 inline JS::Zone* nurseryZone() const;
210 inline JS::Zone* nurseryZoneFromAnyThread() const;
212 // Default implementation for kinds that cannot be permanent. This may be
213 // overriden by derived classes.
214 MOZ_ALWAYS_INLINE bool isPermanentAndMayBeShared() const { return false; }
216 #ifdef DEBUG
217 static inline void assertThingIsNotGray(Cell* cell);
218 inline bool isAligned() const;
219 void dump(GenericPrinter& out) const;
220 void dump() const;
221 #endif
223 protected:
224 uintptr_t address() const;
225 inline ChunkBase* chunk() const;
227 private:
228 // Cells are destroyed by the GC. Do not delete them directly.
229 void operator delete(void*) = delete;
230 } JS_HAZ_GC_THING;
232 // A GC TenuredCell gets behaviors that are valid for things in the Tenured
233 // heap, such as access to the arena and mark bits.
234 class TenuredCell : public Cell {
235 public:
236 MOZ_ALWAYS_INLINE bool isTenured() const {
237 MOZ_ASSERT(!IsInsideNursery(this));
238 return true;
241 TenuredChunk* chunk() const {
242 return static_cast<TenuredChunk*>(Cell::chunk());
245 // Mark bit management.
246 MOZ_ALWAYS_INLINE bool isMarkedAny() const;
247 MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
248 MOZ_ALWAYS_INLINE bool isMarkedGray() const;
249 MOZ_ALWAYS_INLINE CellColor color() const;
251 // The return value indicates if the cell went from unmarked to marked.
252 MOZ_ALWAYS_INLINE bool markIfUnmarked(
253 MarkColor color = MarkColor::Black) const;
254 MOZ_ALWAYS_INLINE bool markIfUnmarkedAtomic(MarkColor color) const;
255 MOZ_ALWAYS_INLINE void markBlack() const;
256 MOZ_ALWAYS_INLINE void markBlackAtomic() const;
257 MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell* src);
258 MOZ_ALWAYS_INLINE void unmark();
260 // Access to the arena.
261 inline Arena* arena() const;
262 inline AllocKind getAllocKind() const;
263 inline JS::TraceKind getTraceKind() const;
264 inline JS::Zone* zone() const;
265 inline JS::Zone* zoneFromAnyThread() const;
266 inline bool isInsideZone(JS::Zone* zone) const;
268 MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZone() const {
269 return JS::shadow::Zone::from(zone());
271 MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
272 return JS::shadow::Zone::from(zoneFromAnyThread());
275 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
276 inline bool is() const {
277 return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
280 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
281 inline T* as() {
282 // |this|-qualify the |is| call below to avoid compile errors with even
283 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
284 MOZ_ASSERT(this->is<T>());
285 return static_cast<T*>(this);
288 template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
289 inline const T* as() const {
290 // |this|-qualify the |is| call below to avoid compile errors with even
291 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
292 MOZ_ASSERT(this->is<T>());
293 return static_cast<const T*>(this);
296 // Default implementation for kinds that don't require fixup.
297 void fixupAfterMovingGC() {}
299 static inline CellColor getColor(MarkBitmap* bitmap, const TenuredCell* cell);
301 #ifdef DEBUG
302 inline bool isAligned() const;
303 #endif
306 MOZ_ALWAYS_INLINE const TenuredCell& Cell::asTenured() const {
307 MOZ_ASSERT(isTenured());
308 return *static_cast<const TenuredCell*>(this);
311 MOZ_ALWAYS_INLINE TenuredCell& Cell::asTenured() {
312 MOZ_ASSERT(isTenured());
313 return *static_cast<TenuredCell*>(this);
316 MOZ_ALWAYS_INLINE bool Cell::isMarkedAny() const {
317 return !isTenured() || asTenured().isMarkedAny();
320 MOZ_ALWAYS_INLINE bool Cell::isMarkedBlack() const {
321 return !isTenured() || asTenured().isMarkedBlack();
324 MOZ_ALWAYS_INLINE bool Cell::isMarkedGray() const {
325 return isTenured() && asTenured().isMarkedGray();
328 MOZ_ALWAYS_INLINE bool Cell::isMarked(gc::MarkColor color) const {
329 return color == MarkColor::Gray ? isMarkedGray() : isMarkedBlack();
332 MOZ_ALWAYS_INLINE bool Cell::isMarkedAtLeast(gc::MarkColor color) const {
333 return color == MarkColor::Gray ? isMarkedAny() : isMarkedBlack();
336 MOZ_ALWAYS_INLINE CellColor Cell::color() const {
337 return isTenured() ? asTenured().color() : CellColor::Black;
340 inline JSRuntime* Cell::runtimeFromMainThread() const {
341 JSRuntime* rt = chunk()->runtime;
342 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
343 return rt;
346 inline JSRuntime* Cell::runtimeFromAnyThread() const {
347 return chunk()->runtime;
350 inline uintptr_t Cell::address() const {
351 uintptr_t addr = uintptr_t(this);
352 MOZ_ASSERT(addr % CellAlignBytes == 0);
353 MOZ_ASSERT(TenuredChunk::withinValidRange(addr));
354 return addr;
357 ChunkBase* Cell::chunk() const {
358 uintptr_t addr = uintptr_t(this);
359 MOZ_ASSERT(addr % CellAlignBytes == 0);
360 addr &= ~ChunkMask;
361 return reinterpret_cast<ChunkBase*>(addr);
364 inline StoreBuffer* Cell::storeBuffer() const { return chunk()->storeBuffer; }
366 JS::Zone* Cell::zone() const {
367 if (isTenured()) {
368 return asTenured().zone();
371 return nurseryZone();
374 JS::Zone* Cell::zoneFromAnyThread() const {
375 if (isTenured()) {
376 return asTenured().zoneFromAnyThread();
379 return nurseryZoneFromAnyThread();
382 JS::Zone* Cell::nurseryZone() const {
383 JS::Zone* zone = nurseryZoneFromAnyThread();
384 MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
385 return zone;
388 JS::Zone* Cell::nurseryZoneFromAnyThread() const {
389 return NurseryCellHeader::from(this)->zone();
392 #ifdef DEBUG
393 extern Cell* UninlinedForwarded(const Cell* cell);
394 #endif
396 inline JS::TraceKind Cell::getTraceKind() const {
397 if (isTenured()) {
398 MOZ_ASSERT_IF(isForwarded(), UninlinedForwarded(this)->getTraceKind() ==
399 asTenured().getTraceKind());
400 return asTenured().getTraceKind();
403 return NurseryCellHeader::from(this)->traceKind();
406 /* static */ MOZ_ALWAYS_INLINE bool Cell::needPreWriteBarrier(JS::Zone* zone) {
407 return JS::shadow::Zone::from(zone)->needsIncrementalBarrier();
410 MOZ_ALWAYS_INLINE bool TenuredCell::isMarkedAny() const {
411 MOZ_ASSERT(arena()->allocated());
412 return chunk()->markBits.isMarkedAny(this);
415 MOZ_ALWAYS_INLINE bool TenuredCell::isMarkedBlack() const {
416 MOZ_ASSERT(arena()->allocated());
417 return chunk()->markBits.isMarkedBlack(this);
420 MOZ_ALWAYS_INLINE bool TenuredCell::isMarkedGray() const {
421 MOZ_ASSERT(arena()->allocated());
422 return chunk()->markBits.isMarkedGray(this);
425 MOZ_ALWAYS_INLINE CellColor TenuredCell::color() const {
426 return getColor(&chunk()->markBits, this);
429 /* static */
430 inline CellColor TenuredCell::getColor(MarkBitmap* bitmap,
431 const TenuredCell* cell) {
432 // Note that this method isn't synchronised so may give surprising results if
433 // the mark bitmap is being modified concurrently.
435 if (bitmap->isMarkedBlack(cell)) {
436 return CellColor::Black;
439 if (bitmap->isMarkedGray(cell)) {
440 return CellColor::Gray;
443 return CellColor::White;
446 bool TenuredCell::markIfUnmarked(MarkColor color /* = Black */) const {
447 return chunk()->markBits.markIfUnmarked(this, color);
450 bool TenuredCell::markIfUnmarkedAtomic(MarkColor color) const {
451 return chunk()->markBits.markIfUnmarkedAtomic(this, color);
454 void TenuredCell::markBlack() const { chunk()->markBits.markBlack(this); }
455 void TenuredCell::markBlackAtomic() const {
456 chunk()->markBits.markBlackAtomic(this);
459 void TenuredCell::copyMarkBitsFrom(const TenuredCell* src) {
460 MarkBitmap& markBits = chunk()->markBits;
461 markBits.copyMarkBit(this, src, ColorBit::BlackBit);
462 markBits.copyMarkBit(this, src, ColorBit::GrayOrBlackBit);
465 void TenuredCell::unmark() { chunk()->markBits.unmark(this); }
467 inline Arena* TenuredCell::arena() const {
468 MOZ_ASSERT(isTenured());
469 uintptr_t addr = address();
470 addr &= ~ArenaMask;
471 return reinterpret_cast<Arena*>(addr);
474 AllocKind TenuredCell::getAllocKind() const { return arena()->getAllocKind(); }
476 JS::TraceKind TenuredCell::getTraceKind() const {
477 return MapAllocToTraceKind(getAllocKind());
480 JS::Zone* TenuredCell::zone() const {
481 JS::Zone* zone = arena()->zone;
482 MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
483 return zone;
486 JS::Zone* TenuredCell::zoneFromAnyThread() const { return arena()->zone; }
488 bool TenuredCell::isInsideZone(JS::Zone* zone) const {
489 return zone == arena()->zone;
492 // Read barrier and pre-write barrier implementation for GC cells.
494 template <typename T>
495 MOZ_ALWAYS_INLINE void ReadBarrier(T* thing) {
496 static_assert(std::is_base_of_v<Cell, T>);
497 static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
499 if (thing) {
500 ReadBarrierImpl(thing);
504 MOZ_ALWAYS_INLINE void ReadBarrierImpl(TenuredCell* thing) {
505 MOZ_ASSERT(CurrentThreadIsMainThread());
506 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
507 MOZ_ASSERT(thing);
509 JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
510 if (shadowZone->needsIncrementalBarrier()) {
511 PerformIncrementalReadBarrier(thing);
512 return;
515 if (thing->isMarkedGray()) {
516 UnmarkGrayGCThingRecursively(thing);
520 MOZ_ALWAYS_INLINE void ReadBarrierImpl(Cell* thing) {
521 MOZ_ASSERT(!CurrentThreadIsGCMarking());
522 MOZ_ASSERT(thing);
524 if (thing->isTenured()) {
525 ReadBarrierImpl(&thing->asTenured());
529 MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(TenuredCell* thing) {
530 MOZ_ASSERT(CurrentThreadIsMainThread() || CurrentThreadIsGCSweeping() ||
531 CurrentThreadIsGCFinalizing());
532 MOZ_ASSERT(thing);
534 // Barriers can be triggered on the main thread while collecting, but are
535 // disabled. For example, this happens when sweeping HeapPtr wrappers. See
536 // AutoDisableBarriers.
538 JS::shadow::Zone* zone = thing->shadowZoneFromAnyThread();
539 if (zone->needsIncrementalBarrier()) {
540 PerformIncrementalPreWriteBarrier(thing);
544 MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(Cell* thing) {
545 MOZ_ASSERT(!CurrentThreadIsGCMarking());
546 MOZ_ASSERT(thing);
548 if (thing->isTenured()) {
549 PreWriteBarrierImpl(&thing->asTenured());
553 template <typename T>
554 MOZ_ALWAYS_INLINE void PreWriteBarrier(T* thing) {
555 static_assert(std::is_base_of_v<Cell, T>);
556 static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
558 if (thing) {
559 PreWriteBarrierImpl(thing);
563 // Pre-write barrier implementation for structures containing GC cells, taking a
564 // functor to trace the structure.
565 template <typename T, typename F>
566 MOZ_ALWAYS_INLINE void PreWriteBarrier(JS::Zone* zone, T* data,
567 const F& traceFn) {
568 MOZ_ASSERT(data);
569 MOZ_ASSERT(!CurrentThreadIsIonCompiling());
570 MOZ_ASSERT(!CurrentThreadIsGCMarking());
572 auto* shadowZone = JS::shadow::Zone::from(zone);
573 if (!shadowZone->needsIncrementalBarrier()) {
574 return;
577 MOZ_ASSERT(CurrentThreadCanAccessRuntime(shadowZone->runtimeFromAnyThread()));
578 MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
580 traceFn(shadowZone->barrierTracer(), data);
583 // Pre-write barrier implementation for structures containing GC cells. T must
584 // support a |trace| method.
585 template <typename T>
586 MOZ_ALWAYS_INLINE void PreWriteBarrier(JS::Zone* zone, T* data) {
587 MOZ_ASSERT(data);
588 PreWriteBarrier(zone, data, [](JSTracer* trc, T* data) { data->trace(trc); });
591 #ifdef DEBUG
593 /* static */ void Cell::assertThingIsNotGray(Cell* cell) {
594 JS::AssertCellIsNotGray(cell);
597 bool Cell::isAligned() const {
598 if (!isTenured()) {
599 return true;
601 return asTenured().isAligned();
604 bool TenuredCell::isAligned() const {
605 return Arena::isAligned(address(), arena()->getThingSize());
608 #endif
610 // Base class for nusery-allocatable GC things that have 32-bit length and
611 // 32-bit flags (currently JSString and BigInt).
613 // This tries to store both in Cell::header_, but if that isn't large enough the
614 // length is stored separately.
616 // 32 0
617 // ------------------
618 // | Length | Flags |
619 // ------------------
621 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
622 // for GC. Derived classes must ensure they don't use these flags for non-GC
623 // purposes.
624 class alignas(gc::CellAlignBytes) CellWithLengthAndFlags : public Cell {
625 #if JS_BITS_PER_WORD == 32
626 // Additional storage for length if |header_| is too small to fit both.
627 uint32_t length_;
628 #endif
630 protected:
631 uint32_t headerLengthField() const {
632 #if JS_BITS_PER_WORD == 32
633 return length_;
634 #else
635 return uint32_t(header_.get() >> 32);
636 #endif
639 uint32_t headerFlagsField() const { return uint32_t(header_.get()); }
641 void setHeaderFlagBit(uint32_t flag) {
642 header_.set(header_.get() | uintptr_t(flag));
644 void clearHeaderFlagBit(uint32_t flag) {
645 header_.set(header_.get() & ~uintptr_t(flag));
647 void toggleHeaderFlagBit(uint32_t flag) {
648 header_.set(header_.get() ^ uintptr_t(flag));
651 void setHeaderLengthAndFlags(uint32_t len, uint32_t flags) {
652 #if JS_BITS_PER_WORD == 32
653 header_.set(flags);
654 length_ = len;
655 #else
656 header_.set((uint64_t(len) << 32) | uint64_t(flags));
657 #endif
660 public:
661 // Returns the offset of header_. JIT code should use offsetOfFlags
662 // below.
663 static constexpr size_t offsetOfRawHeaderFlagsField() {
664 return offsetof(CellWithLengthAndFlags, header_);
667 // Offsets for direct field from jit code. A number of places directly
668 // access 32-bit length and flags fields so do endian trickery here.
669 #if JS_BITS_PER_WORD == 32
670 static constexpr size_t offsetOfHeaderFlags() {
671 return offsetof(CellWithLengthAndFlags, header_);
673 static constexpr size_t offsetOfHeaderLength() {
674 return offsetof(CellWithLengthAndFlags, length_);
676 #elif MOZ_LITTLE_ENDIAN()
677 static constexpr size_t offsetOfHeaderFlags() {
678 return offsetof(CellWithLengthAndFlags, header_);
680 static constexpr size_t offsetOfHeaderLength() {
681 return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
683 #else
684 static constexpr size_t offsetOfHeaderFlags() {
685 return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
687 static constexpr size_t offsetOfHeaderLength() {
688 return offsetof(CellWithLengthAndFlags, header_);
690 #endif
693 // Base class for non-nursery-allocatable GC things that allows storing a non-GC
694 // thing pointer in the first word.
696 // The low bits of the word (see CellFlagBitsReservedForGC) are reserved for GC.
697 template <class PtrT>
698 class alignas(gc::CellAlignBytes) TenuredCellWithNonGCPointer
699 : public TenuredCell {
700 static_assert(!std::is_pointer_v<PtrT>,
701 "PtrT should be the type of the referent, not of the pointer");
702 static_assert(
703 !std::is_base_of_v<Cell, PtrT>,
704 "Don't use TenuredCellWithNonGCPointer for pointers to GC things");
706 protected:
707 TenuredCellWithNonGCPointer() = default;
708 explicit TenuredCellWithNonGCPointer(PtrT* initial) {
709 uintptr_t data = uintptr_t(initial);
710 header_.set(data);
713 PtrT* headerPtr() const {
714 MOZ_ASSERT(flags() == 0);
715 return reinterpret_cast<PtrT*>(uintptr_t(header_.get()));
718 void setHeaderPtr(PtrT* newValue) {
719 // As above, no flags are expected to be set here.
720 uintptr_t data = uintptr_t(newValue);
721 MOZ_ASSERT(flags() == 0);
722 header_.set(data);
725 public:
726 static constexpr size_t offsetOfHeaderPtr() {
727 return offsetof(TenuredCellWithNonGCPointer, header_);
731 // Base class for non-nursery-allocatable GC things that allows storing flags
732 // in the first word.
734 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
735 // for GC.
736 class alignas(gc::CellAlignBytes) TenuredCellWithFlags : public TenuredCell {
737 protected:
738 TenuredCellWithFlags() { header_.set(0); }
739 explicit TenuredCellWithFlags(uintptr_t initial) { header_.set(initial); }
741 uintptr_t headerFlagsField() const {
742 MOZ_ASSERT(flags() == 0);
743 return header_.get();
746 void setHeaderFlagBits(uintptr_t flags) {
747 header_.set(header_.get() | flags);
749 void clearHeaderFlagBits(uintptr_t flags) {
750 header_.set(header_.get() & ~flags);
754 // Base class for GC things that have a tenured GC pointer as their first word.
756 // The low bits of the first word (see CellFlagBitsReservedForGC) are reserved
757 // for GC.
759 // This includes a pre write barrier when the pointer is update. No post barrier
760 // is necessary as the pointer is always tenured.
761 template <class BaseCell, class PtrT>
762 class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
763 static void staticAsserts() {
764 // These static asserts are not in class scope because the PtrT may not be
765 // defined when this class template is instantiated.
766 static_assert(
767 std::is_same_v<BaseCell, Cell> || std::is_same_v<BaseCell, TenuredCell>,
768 "BaseCell must be either Cell or TenuredCell");
769 static_assert(
770 !std::is_pointer_v<PtrT>,
771 "PtrT should be the type of the referent, not of the pointer");
772 static_assert(
773 std::is_base_of_v<Cell, PtrT>,
774 "Only use CellWithTenuredGCPointer for pointers to GC things");
777 protected:
778 CellWithTenuredGCPointer() = default;
779 explicit CellWithTenuredGCPointer(PtrT* initial) { initHeaderPtr(initial); }
781 void initHeaderPtr(PtrT* initial) {
782 MOZ_ASSERT_IF(initial, !IsInsideNursery(initial));
783 uintptr_t data = uintptr_t(initial);
784 this->header_.set(data);
787 void setHeaderPtr(PtrT* newValue) {
788 // As above, no flags are expected to be set here.
789 MOZ_ASSERT_IF(newValue, !IsInsideNursery(newValue));
790 PreWriteBarrier(headerPtr());
791 unbarrieredSetHeaderPtr(newValue);
794 public:
795 PtrT* headerPtr() const {
796 staticAsserts();
797 MOZ_ASSERT(this->flags() == 0);
798 return reinterpret_cast<PtrT*>(uintptr_t(this->header_.get()));
800 PtrT* headerPtrAtomic() const {
801 staticAsserts();
802 MOZ_ASSERT(this->flags() == 0);
803 return reinterpret_cast<PtrT*>(uintptr_t(this->header_.getAtomic()));
806 void unbarrieredSetHeaderPtr(PtrT* newValue) {
807 uintptr_t data = uintptr_t(newValue);
808 MOZ_ASSERT(this->flags() == 0);
809 this->header_.set(data);
812 static constexpr size_t offsetOfHeaderPtr() {
813 return offsetof(CellWithTenuredGCPointer, header_);
817 void CellHeaderPostWriteBarrier(JSObject** ptr, JSObject* prev, JSObject* next);
819 template <typename T>
820 constexpr inline bool GCTypeIsTenured() {
821 static_assert(std::is_base_of_v<Cell, T>);
822 static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
824 return std::is_base_of_v<TenuredCell, T> || std::is_base_of_v<JSAtom, T>;
827 template <class PtrT>
828 class alignas(gc::CellAlignBytes) TenuredCellWithGCPointer
829 : public TenuredCell {
830 static void staticAsserts() {
831 // These static asserts are not in class scope because the PtrT may not be
832 // defined when this class template is instantiated.
833 static_assert(
834 !std::is_pointer_v<PtrT>,
835 "PtrT should be the type of the referent, not of the pointer");
836 static_assert(
837 std::is_base_of_v<Cell, PtrT>,
838 "Only use TenuredCellWithGCPointer for pointers to GC things");
839 static_assert(
840 !GCTypeIsTenured<PtrT>,
841 "Don't use TenuredCellWithGCPointer for always-tenured GC things");
844 protected:
845 TenuredCellWithGCPointer() = default;
846 explicit TenuredCellWithGCPointer(PtrT* initial) { initHeaderPtr(initial); }
848 void initHeaderPtr(PtrT* initial) {
849 uintptr_t data = uintptr_t(initial);
850 this->header_.set(data);
851 if (initial && IsInsideNursery(initial)) {
852 CellHeaderPostWriteBarrier(headerPtrAddress(), nullptr, initial);
856 PtrT** headerPtrAddress() {
857 MOZ_ASSERT(this->flags() == 0);
858 return reinterpret_cast<PtrT**>(&this->header_);
861 public:
862 PtrT* headerPtr() const {
863 MOZ_ASSERT(this->flags() == 0);
864 return reinterpret_cast<PtrT*>(uintptr_t(this->header_.get()));
867 void unbarrieredSetHeaderPtr(PtrT* newValue) {
868 uintptr_t data = uintptr_t(newValue);
869 MOZ_ASSERT(this->flags() == 0);
870 this->header_.set(data);
873 static constexpr size_t offsetOfHeaderPtr() {
874 return offsetof(TenuredCellWithGCPointer, header_);
878 // Check whether a typed GC thing is marked at all. Doesn't check gray bits for
879 // kinds that can't be marked gray.
880 template <typename T>
881 static inline bool TenuredThingIsMarkedAny(T* thing) {
882 using BaseT = typename BaseGCType<T>::type;
883 TenuredCell* cell = &thing->asTenured();
884 if constexpr (TraceKindCanBeGray<BaseT>::value) {
885 return cell->isMarkedAny();
886 } else {
887 MOZ_ASSERT(!cell->isMarkedGray());
888 return cell->isMarkedBlack();
892 template <>
893 inline bool TenuredThingIsMarkedAny<Cell>(Cell* thing) {
894 return thing->asTenured().isMarkedAny();
897 } /* namespace gc */
898 } /* namespace js */
900 #endif /* gc_Cell_h */