1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
10 #include "mozilla/Atomics.h"
11 #include "mozilla/EndianUtils.h"
13 #include <type_traits>
15 #include "gc/GCContext.h"
17 #include "gc/TraceKind.h"
18 #include "js/GCAnnotations.h"
19 #include "js/shadow/Zone.h" // JS::shadow::Zone
20 #include "js/TypeDecls.h"
28 class JS_PUBLIC_API GenericPrinter
;
30 extern bool RuntimeFromMainThreadIsHeapMajorCollecting(
31 JS::shadow::Zone
* shadowZone
);
34 // Barriers can't be triggered during backend Ion compilation, which may run on
36 extern bool CurrentThreadIsIonCompiling();
39 extern void TraceManuallyBarrieredGenericPointerEdge(JSTracer
* trc
,
45 enum class AllocKind
: uint8_t;
46 class CellAllocator
; // Declared so subtypes of Cell can friend it easily.
50 extern void PerformIncrementalReadBarrier(TenuredCell
* cell
);
51 extern void PerformIncrementalPreWriteBarrier(TenuredCell
* cell
);
52 extern void PerformIncrementalBarrierDuringFlattening(JSString
* str
);
53 extern void UnmarkGrayGCThingRecursively(TenuredCell
* cell
);
55 // Like gc::MarkColor but allows the possibility of the cell being unmarked.
57 // This class mimics an enum class, but supports operator overloading.
60 enum Color
{ White
= 0, Gray
= 1, Black
= 2 };
62 CellColor() : color(White
) {}
64 MOZ_IMPLICIT
CellColor(MarkColor markColor
)
65 : color(markColor
== MarkColor::Black
? Black
: Gray
) {}
67 MOZ_IMPLICIT
constexpr CellColor(Color c
) : color(c
) {}
69 MarkColor
asMarkColor() const {
70 MOZ_ASSERT(color
!= White
);
71 return color
== Black
? MarkColor::Black
: MarkColor::Gray
;
74 // Implement a total ordering for CellColor, with white being 'least marked'
75 // and black being 'most marked'.
76 bool operator<(const CellColor other
) const { return color
< other
.color
; }
77 bool operator>(const CellColor other
) const { return color
> other
.color
; }
78 bool operator<=(const CellColor other
) const { return color
<= other
.color
; }
79 bool operator>=(const CellColor other
) const { return color
>= other
.color
; }
80 bool operator!=(const CellColor other
) const { return color
!= other
.color
; }
81 bool operator==(const CellColor other
) const { return color
== other
.color
; }
82 explicit operator bool() const { return color
!= White
; }
84 #if defined(JS_GC_ZEAL) || defined(DEBUG)
85 const char* name() const {
87 case CellColor::White
:
89 case CellColor::Black
:
94 MOZ_CRASH("Unexpected cell color");
103 // Cell header word. Stores GC flags and derived class data.
105 // Loads of GC flags + all stores are marked as (relaxed) atomic operations,
106 // to deal with the following benign data race during compacting GC:
108 // - Thread 1 checks isForwarded (which is always false in this situation).
109 // - Thread 2 updates the derived class data (without changing the forwarded
112 // To improve performance, we don't use atomic operations for get() because
113 // atomic operations inhibit certain compiler optimizations: GCC and Clang are
114 // unable to fold multiple loads even if they're both relaxed atomics. This is
115 // especially a problem for chained loads such as obj->shape->base->clasp.
117 // Indicates whether the cell has been forwarded (moved) by generational or
118 // compacting GC and is now a RelocationOverlay.
119 static constexpr uintptr_t FORWARD_BIT
= Bit(0);
120 // Bits 1 and 2 are reserved for future use by the GC.
124 void setAtomic(uintptr_t value
) {
125 __atomic_store_n(&value_
, value
, __ATOMIC_RELAXED
);
129 static constexpr uintptr_t RESERVED_MASK
=
130 BitMask(gc::CellFlagBitsReservedForGC
);
131 static_assert(gc::CellFlagBitsReservedForGC
>= 3,
132 "Not enough flag bits reserved for GC");
134 uintptr_t getAtomic() const {
135 return __atomic_load_n(&value_
, __ATOMIC_RELAXED
);
138 // Accessors for derived class data.
139 uintptr_t get() const {
140 // Note: non-atomic load. See class comment.
141 uintptr_t value
= value_
;
142 MOZ_ASSERT((value
& RESERVED_MASK
) == 0);
145 void set(uintptr_t value
) {
146 MOZ_ASSERT((value
& RESERVED_MASK
) == 0);
150 // Accessors for GC data.
151 uintptr_t flags() const { return getAtomic() & RESERVED_MASK
; }
152 bool isForwarded() const { return flags() & FORWARD_BIT
; }
153 void setForwardingAddress(uintptr_t ptr
) {
154 MOZ_ASSERT((ptr
& RESERVED_MASK
) == 0);
155 setAtomic(ptr
| FORWARD_BIT
);
157 uintptr_t getForwardingAddress() const {
158 MOZ_ASSERT(isForwarded());
159 return getAtomic() & ~RESERVED_MASK
;
165 // A GC cell is the ultimate base class for all GC things. All types allocated
166 // on the GC heap extend either gc::Cell or gc::TenuredCell. If a type is always
167 // tenured, prefer the TenuredCell class as base.
169 // The first word of Cell is a HeaderWord (a uintptr_t) that reserves the low
170 // three bits for GC purposes. The remaining bits are available to sub-classes
171 // and can be used store a pointer to another gc::Cell. To make use of the
172 // remaining space, sub-classes derive from a helper class such as
173 // TenuredCellWithNonGCPointer.
175 // During moving GC operation a Cell may be marked as forwarded. This indicates
176 // that a gc::RelocationOverlay is currently stored in the Cell's memory and
177 // should be used to find the new location of the Cell.
179 // Cell header word. Stores GC flags and derived class data.
185 Cell(const Cell
&) = delete;
186 void operator=(const Cell
&) = delete;
188 bool isForwarded() const { return header_
.isForwarded(); }
189 uintptr_t flags() const { return header_
.flags(); }
191 MOZ_ALWAYS_INLINE
bool isTenured() const { return !IsInsideNursery(this); }
192 MOZ_ALWAYS_INLINE
const TenuredCell
& asTenured() const;
193 MOZ_ALWAYS_INLINE TenuredCell
& asTenured();
195 MOZ_ALWAYS_INLINE
bool isMarkedAny() const;
196 MOZ_ALWAYS_INLINE
bool isMarkedBlack() const;
197 MOZ_ALWAYS_INLINE
bool isMarkedGray() const;
198 MOZ_ALWAYS_INLINE
bool isMarked(gc::MarkColor color
) const;
199 MOZ_ALWAYS_INLINE
bool isMarkedAtLeast(gc::MarkColor color
) const;
200 MOZ_ALWAYS_INLINE CellColor
color() const;
202 inline JSRuntime
* runtimeFromMainThread() const;
204 // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
205 // thread can easily lead to races. Use this method very carefully.
206 inline JSRuntime
* runtimeFromAnyThread() const;
208 // May be overridden by GC thing kinds that have a compartment pointer.
209 inline JS::Compartment
* maybeCompartment() const { return nullptr; }
211 // The StoreBuffer used to record incoming pointers from the tenured heap.
212 // This will return nullptr for a tenured cell.
213 inline StoreBuffer
* storeBuffer() const;
215 inline JS::TraceKind
getTraceKind() const;
217 static MOZ_ALWAYS_INLINE
bool needPreWriteBarrier(JS::Zone
* zone
);
219 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
220 inline bool is() const {
221 return getTraceKind() == JS::MapTypeToTraceKind
<T
>::kind
;
224 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
226 // |this|-qualify the |is| call below to avoid compile errors with even
227 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
228 MOZ_ASSERT(this->is
<T
>());
229 return static_cast<T
*>(this);
232 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
233 inline const T
* as() const {
234 // |this|-qualify the |is| call below to avoid compile errors with even
235 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
236 MOZ_ASSERT(this->is
<T
>());
237 return static_cast<const T
*>(this);
240 inline JS::Zone
* zone() const;
241 inline JS::Zone
* zoneFromAnyThread() const;
243 // Get the zone for a cell known to be in the nursery.
244 inline JS::Zone
* nurseryZone() const;
245 inline JS::Zone
* nurseryZoneFromAnyThread() const;
247 // Default implementation for kinds that cannot be permanent. This may be
248 // overriden by derived classes.
249 MOZ_ALWAYS_INLINE
bool isPermanentAndMayBeShared() const { return false; }
252 static inline void assertThingIsNotGray(Cell
* cell
);
253 inline bool isAligned() const;
254 void dump(GenericPrinter
& out
) const;
259 uintptr_t address() const;
260 inline ChunkBase
* chunk() const;
263 // Cells are destroyed by the GC. Do not delete them directly.
264 void operator delete(void*) = delete;
267 // A GC TenuredCell gets behaviors that are valid for things in the Tenured
268 // heap, such as access to the arena and mark bits.
269 class TenuredCell
: public Cell
{
271 MOZ_ALWAYS_INLINE
bool isTenured() const {
272 MOZ_ASSERT(!IsInsideNursery(this));
276 TenuredChunk
* chunk() const {
277 return static_cast<TenuredChunk
*>(Cell::chunk());
280 // Mark bit management.
281 MOZ_ALWAYS_INLINE
bool isMarkedAny() const;
282 MOZ_ALWAYS_INLINE
bool isMarkedBlack() const;
283 MOZ_ALWAYS_INLINE
bool isMarkedGray() const;
284 MOZ_ALWAYS_INLINE CellColor
color() const;
286 // The return value indicates if the cell went from unmarked to marked.
287 MOZ_ALWAYS_INLINE
bool markIfUnmarked(
288 MarkColor color
= MarkColor::Black
) const;
289 MOZ_ALWAYS_INLINE
bool markIfUnmarkedAtomic(MarkColor color
) const;
290 MOZ_ALWAYS_INLINE
void markBlack() const;
291 MOZ_ALWAYS_INLINE
void markBlackAtomic() const;
292 MOZ_ALWAYS_INLINE
void copyMarkBitsFrom(const TenuredCell
* src
);
293 MOZ_ALWAYS_INLINE
void unmark();
295 // Access to the arena.
296 inline Arena
* arena() const;
297 inline AllocKind
getAllocKind() const;
298 inline JS::TraceKind
getTraceKind() const;
299 inline JS::Zone
* zone() const;
300 inline JS::Zone
* zoneFromAnyThread() const;
301 inline bool isInsideZone(JS::Zone
* zone
) const;
303 MOZ_ALWAYS_INLINE
JS::shadow::Zone
* shadowZone() const {
304 return JS::shadow::Zone::from(zone());
306 MOZ_ALWAYS_INLINE
JS::shadow::Zone
* shadowZoneFromAnyThread() const {
307 return JS::shadow::Zone::from(zoneFromAnyThread());
310 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
311 inline bool is() const {
312 return getTraceKind() == JS::MapTypeToTraceKind
<T
>::kind
;
315 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
317 // |this|-qualify the |is| call below to avoid compile errors with even
318 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
319 MOZ_ASSERT(this->is
<T
>());
320 return static_cast<T
*>(this);
323 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
324 inline const T
* as() const {
325 // |this|-qualify the |is| call below to avoid compile errors with even
326 // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
327 MOZ_ASSERT(this->is
<T
>());
328 return static_cast<const T
*>(this);
331 // Default implementation for kinds that don't require fixup.
332 void fixupAfterMovingGC() {}
334 static inline CellColor
getColor(MarkBitmap
* bitmap
, const TenuredCell
* cell
);
337 inline bool isAligned() const;
341 MOZ_ALWAYS_INLINE
const TenuredCell
& Cell::asTenured() const {
342 MOZ_ASSERT(isTenured());
343 return *static_cast<const TenuredCell
*>(this);
346 MOZ_ALWAYS_INLINE TenuredCell
& Cell::asTenured() {
347 MOZ_ASSERT(isTenured());
348 return *static_cast<TenuredCell
*>(this);
351 MOZ_ALWAYS_INLINE
bool Cell::isMarkedAny() const {
352 return !isTenured() || asTenured().isMarkedAny();
355 MOZ_ALWAYS_INLINE
bool Cell::isMarkedBlack() const {
356 return !isTenured() || asTenured().isMarkedBlack();
359 MOZ_ALWAYS_INLINE
bool Cell::isMarkedGray() const {
360 return isTenured() && asTenured().isMarkedGray();
363 MOZ_ALWAYS_INLINE
bool Cell::isMarked(gc::MarkColor color
) const {
364 return color
== MarkColor::Gray
? isMarkedGray() : isMarkedBlack();
367 MOZ_ALWAYS_INLINE
bool Cell::isMarkedAtLeast(gc::MarkColor color
) const {
368 return color
== MarkColor::Gray
? isMarkedAny() : isMarkedBlack();
371 MOZ_ALWAYS_INLINE CellColor
Cell::color() const {
372 return isTenured() ? asTenured().color() : CellColor::Black
;
375 inline JSRuntime
* Cell::runtimeFromMainThread() const {
376 JSRuntime
* rt
= chunk()->runtime
;
377 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt
));
381 inline JSRuntime
* Cell::runtimeFromAnyThread() const {
382 return chunk()->runtime
;
385 inline uintptr_t Cell::address() const {
386 uintptr_t addr
= uintptr_t(this);
387 MOZ_ASSERT(addr
% CellAlignBytes
== 0);
388 MOZ_ASSERT(TenuredChunk::withinValidRange(addr
));
392 ChunkBase
* Cell::chunk() const {
393 uintptr_t addr
= uintptr_t(this);
394 MOZ_ASSERT(addr
% CellAlignBytes
== 0);
396 return reinterpret_cast<ChunkBase
*>(addr
);
399 inline StoreBuffer
* Cell::storeBuffer() const { return chunk()->storeBuffer
; }
401 JS::Zone
* Cell::zone() const {
403 return asTenured().zone();
406 return nurseryZone();
409 JS::Zone
* Cell::zoneFromAnyThread() const {
411 return asTenured().zoneFromAnyThread();
414 return nurseryZoneFromAnyThread();
417 JS::Zone
* Cell::nurseryZone() const {
418 JS::Zone
* zone
= nurseryZoneFromAnyThread();
419 MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone
));
423 JS::Zone
* Cell::nurseryZoneFromAnyThread() const {
424 return NurseryCellHeader::from(this)->zone();
428 extern Cell
* UninlinedForwarded(const Cell
* cell
);
431 inline JS::TraceKind
Cell::getTraceKind() const {
433 MOZ_ASSERT_IF(isForwarded(), UninlinedForwarded(this)->getTraceKind() ==
434 asTenured().getTraceKind());
435 return asTenured().getTraceKind();
438 return NurseryCellHeader::from(this)->traceKind();
441 /* static */ MOZ_ALWAYS_INLINE
bool Cell::needPreWriteBarrier(JS::Zone
* zone
) {
442 return JS::shadow::Zone::from(zone
)->needsIncrementalBarrier();
445 MOZ_ALWAYS_INLINE
bool TenuredCell::isMarkedAny() const {
446 MOZ_ASSERT(arena()->allocated());
447 return chunk()->markBits
.isMarkedAny(this);
450 MOZ_ALWAYS_INLINE
bool TenuredCell::isMarkedBlack() const {
451 MOZ_ASSERT(arena()->allocated());
452 return chunk()->markBits
.isMarkedBlack(this);
455 MOZ_ALWAYS_INLINE
bool TenuredCell::isMarkedGray() const {
456 MOZ_ASSERT(arena()->allocated());
457 return chunk()->markBits
.isMarkedGray(this);
460 MOZ_ALWAYS_INLINE CellColor
TenuredCell::color() const {
461 return getColor(&chunk()->markBits
, this);
465 inline CellColor
TenuredCell::getColor(MarkBitmap
* bitmap
,
466 const TenuredCell
* cell
) {
467 // Note that this method isn't synchronised so may give surprising results if
468 // the mark bitmap is being modified concurrently.
470 if (bitmap
->isMarkedBlack(cell
)) {
471 return CellColor::Black
;
474 if (bitmap
->isMarkedGray(cell
)) {
475 return CellColor::Gray
;
478 return CellColor::White
;
481 bool TenuredCell::markIfUnmarked(MarkColor color
/* = Black */) const {
482 return chunk()->markBits
.markIfUnmarked(this, color
);
485 bool TenuredCell::markIfUnmarkedAtomic(MarkColor color
) const {
486 return chunk()->markBits
.markIfUnmarkedAtomic(this, color
);
489 void TenuredCell::markBlack() const { chunk()->markBits
.markBlack(this); }
490 void TenuredCell::markBlackAtomic() const {
491 chunk()->markBits
.markBlackAtomic(this);
494 void TenuredCell::copyMarkBitsFrom(const TenuredCell
* src
) {
495 MarkBitmap
& markBits
= chunk()->markBits
;
496 markBits
.copyMarkBit(this, src
, ColorBit::BlackBit
);
497 markBits
.copyMarkBit(this, src
, ColorBit::GrayOrBlackBit
);
500 void TenuredCell::unmark() { chunk()->markBits
.unmark(this); }
502 inline Arena
* TenuredCell::arena() const {
503 MOZ_ASSERT(isTenured());
504 uintptr_t addr
= address();
506 return reinterpret_cast<Arena
*>(addr
);
509 AllocKind
TenuredCell::getAllocKind() const { return arena()->getAllocKind(); }
511 JS::TraceKind
TenuredCell::getTraceKind() const {
512 return MapAllocToTraceKind(getAllocKind());
515 JS::Zone
* TenuredCell::zone() const {
516 JS::Zone
* zone
= arena()->zone
;
517 MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone
));
521 JS::Zone
* TenuredCell::zoneFromAnyThread() const { return arena()->zone
; }
523 bool TenuredCell::isInsideZone(JS::Zone
* zone
) const {
524 return zone
== arena()->zone
;
527 // Read barrier and pre-write barrier implementation for GC cells.
529 template <typename T
>
530 MOZ_ALWAYS_INLINE
void ReadBarrier(T
* thing
) {
531 static_assert(std::is_base_of_v
<Cell
, T
>);
532 static_assert(!std::is_same_v
<Cell
, T
> && !std::is_same_v
<TenuredCell
, T
>);
535 ReadBarrierImpl(thing
);
539 MOZ_ALWAYS_INLINE
void ReadBarrierImpl(TenuredCell
* thing
) {
540 MOZ_ASSERT(CurrentThreadIsMainThread());
541 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
544 JS::shadow::Zone
* shadowZone
= thing
->shadowZoneFromAnyThread();
545 if (shadowZone
->needsIncrementalBarrier()) {
546 PerformIncrementalReadBarrier(thing
);
550 if (thing
->isMarkedGray()) {
551 UnmarkGrayGCThingRecursively(thing
);
555 MOZ_ALWAYS_INLINE
void ReadBarrierImpl(Cell
* thing
) {
556 MOZ_ASSERT(!CurrentThreadIsGCMarking());
559 if (thing
->isTenured()) {
560 ReadBarrierImpl(&thing
->asTenured());
564 MOZ_ALWAYS_INLINE
void PreWriteBarrierImpl(TenuredCell
* thing
) {
565 MOZ_ASSERT(CurrentThreadIsMainThread() || CurrentThreadIsGCSweeping() ||
566 CurrentThreadIsGCFinalizing());
569 // Barriers can be triggered on the main thread while collecting, but are
570 // disabled. For example, this happens when sweeping HeapPtr wrappers. See
571 // AutoDisableBarriers.
573 JS::shadow::Zone
* zone
= thing
->shadowZoneFromAnyThread();
574 if (zone
->needsIncrementalBarrier()) {
575 PerformIncrementalPreWriteBarrier(thing
);
579 MOZ_ALWAYS_INLINE
void PreWriteBarrierImpl(Cell
* thing
) {
580 MOZ_ASSERT(!CurrentThreadIsGCMarking());
583 if (thing
->isTenured()) {
584 PreWriteBarrierImpl(&thing
->asTenured());
588 template <typename T
>
589 MOZ_ALWAYS_INLINE
void PreWriteBarrier(T
* thing
) {
590 static_assert(std::is_base_of_v
<Cell
, T
>);
591 static_assert(!std::is_same_v
<Cell
, T
> && !std::is_same_v
<TenuredCell
, T
>);
594 PreWriteBarrierImpl(thing
);
598 // Pre-write barrier implementation for structures containing GC cells, taking a
599 // functor to trace the structure.
600 template <typename T
, typename F
>
601 MOZ_ALWAYS_INLINE
void PreWriteBarrier(JS::Zone
* zone
, T
* data
,
604 MOZ_ASSERT(!CurrentThreadIsIonCompiling());
605 MOZ_ASSERT(!CurrentThreadIsGCMarking());
607 auto* shadowZone
= JS::shadow::Zone::from(zone
);
608 if (!shadowZone
->needsIncrementalBarrier()) {
612 MOZ_ASSERT(CurrentThreadCanAccessRuntime(shadowZone
->runtimeFromAnyThread()));
613 MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone
));
615 traceFn(shadowZone
->barrierTracer(), data
);
618 // Pre-write barrier implementation for structures containing GC cells. T must
619 // support a |trace| method.
620 template <typename T
>
621 MOZ_ALWAYS_INLINE
void PreWriteBarrier(JS::Zone
* zone
, T
* data
) {
623 PreWriteBarrier(zone
, data
, [](JSTracer
* trc
, T
* data
) { data
->trace(trc
); });
628 /* static */ void Cell::assertThingIsNotGray(Cell
* cell
) {
629 JS::AssertCellIsNotGray(cell
);
632 bool Cell::isAligned() const {
636 return asTenured().isAligned();
639 bool TenuredCell::isAligned() const {
640 return Arena::isAligned(address(), arena()->getThingSize());
645 // Base class for nusery-allocatable GC things that have 32-bit length and
646 // 32-bit flags (currently JSString and BigInt).
648 // This tries to store both in Cell::header_, but if that isn't large enough the
649 // length is stored separately.
652 // ------------------
653 // | Length | Flags |
654 // ------------------
656 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
657 // for GC. Derived classes must ensure they don't use these flags for non-GC
659 class alignas(gc::CellAlignBytes
) CellWithLengthAndFlags
: public Cell
{
660 #if JS_BITS_PER_WORD == 32
661 // Additional storage for length if |header_| is too small to fit both.
666 uint32_t headerLengthField() const {
667 #if JS_BITS_PER_WORD == 32
670 return uint32_t(header_
.get() >> 32);
674 uint32_t headerFlagsField() const { return uint32_t(header_
.get()); }
676 void setHeaderFlagBit(uint32_t flag
) {
677 header_
.set(header_
.get() | uintptr_t(flag
));
679 void clearHeaderFlagBit(uint32_t flag
) {
680 header_
.set(header_
.get() & ~uintptr_t(flag
));
682 void toggleHeaderFlagBit(uint32_t flag
) {
683 header_
.set(header_
.get() ^ uintptr_t(flag
));
686 void setHeaderLengthAndFlags(uint32_t len
, uint32_t flags
) {
687 #if JS_BITS_PER_WORD == 32
691 header_
.set((uint64_t(len
) << 32) | uint64_t(flags
));
696 // Returns the offset of header_. JIT code should use offsetOfFlags
698 static constexpr size_t offsetOfRawHeaderFlagsField() {
699 return offsetof(CellWithLengthAndFlags
, header_
);
702 // Offsets for direct field from jit code. A number of places directly
703 // access 32-bit length and flags fields so do endian trickery here.
704 #if JS_BITS_PER_WORD == 32
705 static constexpr size_t offsetOfHeaderFlags() {
706 return offsetof(CellWithLengthAndFlags
, header_
);
708 static constexpr size_t offsetOfHeaderLength() {
709 return offsetof(CellWithLengthAndFlags
, length_
);
711 #elif MOZ_LITTLE_ENDIAN()
712 static constexpr size_t offsetOfHeaderFlags() {
713 return offsetof(CellWithLengthAndFlags
, header_
);
715 static constexpr size_t offsetOfHeaderLength() {
716 return offsetof(CellWithLengthAndFlags
, header_
) + sizeof(uint32_t);
719 static constexpr size_t offsetOfHeaderFlags() {
720 return offsetof(CellWithLengthAndFlags
, header_
) + sizeof(uint32_t);
722 static constexpr size_t offsetOfHeaderLength() {
723 return offsetof(CellWithLengthAndFlags
, header_
);
728 // Base class for non-nursery-allocatable GC things that allows storing a non-GC
729 // thing pointer in the first word.
731 // The low bits of the word (see CellFlagBitsReservedForGC) are reserved for GC.
732 template <class PtrT
>
733 class alignas(gc::CellAlignBytes
) TenuredCellWithNonGCPointer
734 : public TenuredCell
{
735 static_assert(!std::is_pointer_v
<PtrT
>,
736 "PtrT should be the type of the referent, not of the pointer");
738 !std::is_base_of_v
<Cell
, PtrT
>,
739 "Don't use TenuredCellWithNonGCPointer for pointers to GC things");
742 TenuredCellWithNonGCPointer() = default;
743 explicit TenuredCellWithNonGCPointer(PtrT
* initial
) {
744 uintptr_t data
= uintptr_t(initial
);
748 PtrT
* headerPtr() const {
749 MOZ_ASSERT(flags() == 0);
750 return reinterpret_cast<PtrT
*>(uintptr_t(header_
.get()));
753 void setHeaderPtr(PtrT
* newValue
) {
754 // As above, no flags are expected to be set here.
755 uintptr_t data
= uintptr_t(newValue
);
756 MOZ_ASSERT(flags() == 0);
761 static constexpr size_t offsetOfHeaderPtr() {
762 return offsetof(TenuredCellWithNonGCPointer
, header_
);
766 // Base class for non-nursery-allocatable GC things that allows storing flags
767 // in the first word.
769 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
771 class alignas(gc::CellAlignBytes
) TenuredCellWithFlags
: public TenuredCell
{
773 TenuredCellWithFlags() { header_
.set(0); }
774 explicit TenuredCellWithFlags(uintptr_t initial
) { header_
.set(initial
); }
776 uintptr_t headerFlagsField() const {
777 MOZ_ASSERT(flags() == 0);
778 return header_
.get();
781 void setHeaderFlagBits(uintptr_t flags
) {
782 header_
.set(header_
.get() | flags
);
784 void clearHeaderFlagBits(uintptr_t flags
) {
785 header_
.set(header_
.get() & ~flags
);
789 // Base class for GC things that have a tenured GC pointer as their first word.
791 // The low bits of the first word (see CellFlagBitsReservedForGC) are reserved
794 // This includes a pre write barrier when the pointer is update. No post barrier
795 // is necessary as the pointer is always tenured.
796 template <class BaseCell
, class PtrT
>
797 class alignas(gc::CellAlignBytes
) CellWithTenuredGCPointer
: public BaseCell
{
798 static void staticAsserts() {
799 // These static asserts are not in class scope because the PtrT may not be
800 // defined when this class template is instantiated.
802 std::is_same_v
<BaseCell
, Cell
> || std::is_same_v
<BaseCell
, TenuredCell
>,
803 "BaseCell must be either Cell or TenuredCell");
805 !std::is_pointer_v
<PtrT
>,
806 "PtrT should be the type of the referent, not of the pointer");
808 std::is_base_of_v
<Cell
, PtrT
>,
809 "Only use CellWithTenuredGCPointer for pointers to GC things");
813 CellWithTenuredGCPointer() = default;
814 explicit CellWithTenuredGCPointer(PtrT
* initial
) { initHeaderPtr(initial
); }
816 void initHeaderPtr(PtrT
* initial
) {
817 MOZ_ASSERT_IF(initial
, !IsInsideNursery(initial
));
818 uintptr_t data
= uintptr_t(initial
);
819 this->header_
.set(data
);
822 void setHeaderPtr(PtrT
* newValue
) {
823 // As above, no flags are expected to be set here.
824 MOZ_ASSERT_IF(newValue
, !IsInsideNursery(newValue
));
825 PreWriteBarrier(headerPtr());
826 unbarrieredSetHeaderPtr(newValue
);
830 PtrT
* headerPtr() const {
832 MOZ_ASSERT(this->flags() == 0);
833 return reinterpret_cast<PtrT
*>(uintptr_t(this->header_
.get()));
835 PtrT
* headerPtrAtomic() const {
837 MOZ_ASSERT(this->flags() == 0);
838 return reinterpret_cast<PtrT
*>(uintptr_t(this->header_
.getAtomic()));
841 void unbarrieredSetHeaderPtr(PtrT
* newValue
) {
842 uintptr_t data
= uintptr_t(newValue
);
843 MOZ_ASSERT(this->flags() == 0);
844 this->header_
.set(data
);
847 static constexpr size_t offsetOfHeaderPtr() {
848 return offsetof(CellWithTenuredGCPointer
, header_
);
852 void CellHeaderPostWriteBarrier(JSObject
** ptr
, JSObject
* prev
, JSObject
* next
);
854 template <typename T
>
855 constexpr inline bool GCTypeIsTenured() {
856 static_assert(std::is_base_of_v
<Cell
, T
>);
857 static_assert(!std::is_same_v
<Cell
, T
> && !std::is_same_v
<TenuredCell
, T
>);
859 return std::is_base_of_v
<TenuredCell
, T
> || std::is_base_of_v
<JSAtom
, T
>;
862 template <class PtrT
>
863 class alignas(gc::CellAlignBytes
) TenuredCellWithGCPointer
864 : public TenuredCell
{
865 static void staticAsserts() {
866 // These static asserts are not in class scope because the PtrT may not be
867 // defined when this class template is instantiated.
869 !std::is_pointer_v
<PtrT
>,
870 "PtrT should be the type of the referent, not of the pointer");
872 std::is_base_of_v
<Cell
, PtrT
>,
873 "Only use TenuredCellWithGCPointer for pointers to GC things");
875 !GCTypeIsTenured
<PtrT
>,
876 "Don't use TenuredCellWithGCPointer for always-tenured GC things");
880 TenuredCellWithGCPointer() = default;
881 explicit TenuredCellWithGCPointer(PtrT
* initial
) { initHeaderPtr(initial
); }
883 void initHeaderPtr(PtrT
* initial
) {
884 uintptr_t data
= uintptr_t(initial
);
885 this->header_
.set(data
);
886 if (initial
&& IsInsideNursery(initial
)) {
887 CellHeaderPostWriteBarrier(headerPtrAddress(), nullptr, initial
);
891 PtrT
** headerPtrAddress() {
892 MOZ_ASSERT(this->flags() == 0);
893 return reinterpret_cast<PtrT
**>(&this->header_
);
897 PtrT
* headerPtr() const {
898 MOZ_ASSERT(this->flags() == 0);
899 return reinterpret_cast<PtrT
*>(uintptr_t(this->header_
.get()));
902 void unbarrieredSetHeaderPtr(PtrT
* newValue
) {
903 uintptr_t data
= uintptr_t(newValue
);
904 MOZ_ASSERT(this->flags() == 0);
905 this->header_
.set(data
);
908 static constexpr size_t offsetOfHeaderPtr() {
909 return offsetof(TenuredCellWithGCPointer
, header_
);
913 // Check whether a typed GC thing is marked at all. Doesn't check gray bits for
914 // kinds that can't be marked gray.
915 template <typename T
>
916 static inline bool TenuredThingIsMarkedAny(T
* thing
) {
917 using BaseT
= typename BaseGCType
<T
>::type
;
918 TenuredCell
* cell
= &thing
->asTenured();
919 if constexpr (TraceKindCanBeGray
<BaseT
>::value
) {
920 return cell
->isMarkedAny();
922 MOZ_ASSERT(!cell
->isMarkedGray());
923 return cell
->isMarkedBlack();
930 #endif /* gc_Cell_h */