1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
10 #include "mozilla/DebugOnly.h"
12 #include "gc/AllocKind.h"
13 #include "gc/Pretenuring.h"
14 #include "js/HeapAPI.h"
15 #include "js/TypeDecls.h"
16 #include "util/Poison.h"
21 class AutoLockGCBgAlloc
;
30 class MarkingValidator
;
31 class SortedArenaList
;
34 // Cells are aligned to CellAlignShift, so the largest tagged null pointer is:
35 const uintptr_t LargestTaggedNullCellPointer
= (1 << CellAlignShift
) - 1;
37 static_assert(ArenaSize
% CellAlignBytes
== 0,
38 "Arena size must be a multiple of cell alignment");
41 * A FreeSpan represents a contiguous sequence of free cells in an Arena. It
44 * - In an empty span, |first| and |last| are both zero.
46 * - In a non-empty span, |first| is the address of the first free thing in the
47 * span, and |last| is the address of the last free thing in the span.
48 * Furthermore, the memory pointed to by |last| holds a FreeSpan structure
49 * that points to the next span (which may be empty); this works because
50 * sizeof(FreeSpan) is less than the smallest thingSize.
54 friend class ArenaCellIter
;
55 friend class ArenaFreeCellIter
;
61 // This inits just |first| and |last|; if the span is non-empty it doesn't
62 // do anything with the next span stored at |last|.
63 void initBounds(uintptr_t firstArg
, uintptr_t lastArg
, const Arena
* arena
) {
64 checkRange(firstArg
, lastArg
, arena
);
74 // This sets |first| and |last|, and also sets the next span stored at
75 // |last| as empty. (As a result, |firstArg| and |lastArg| cannot represent
77 void initFinal(uintptr_t firstArg
, uintptr_t lastArg
, const Arena
* arena
) {
78 initBounds(firstArg
, lastArg
, arena
);
79 FreeSpan
* last
= nextSpanUnchecked(arena
);
84 bool isEmpty() const { return !first
; }
86 Arena
* getArenaUnchecked() { return reinterpret_cast<Arena
*>(this); }
87 inline Arena
* getArena();
89 static size_t offsetOfFirst() { return offsetof(FreeSpan
, first
); }
91 static size_t offsetOfLast() { return offsetof(FreeSpan
, last
); }
93 // Like nextSpan(), but no checking of the following span is done.
94 FreeSpan
* nextSpanUnchecked(const Arena
* arena
) const {
95 MOZ_ASSERT(arena
&& !isEmpty());
96 return reinterpret_cast<FreeSpan
*>(uintptr_t(arena
) + last
);
99 const FreeSpan
* nextSpan(const Arena
* arena
) const {
101 return nextSpanUnchecked(arena
);
104 MOZ_ALWAYS_INLINE TenuredCell
* allocate(size_t thingSize
) {
105 // Eschew the usual checks, because this might be the placeholder span.
106 // If this is somehow an invalid, non-empty span, checkSpan() will catch it.
107 Arena
* arena
= getArenaUnchecked();
109 uintptr_t thing
= uintptr_t(arena
) + first
;
111 // We have space for at least two more things, so do a simple
114 } else if (MOZ_LIKELY(first
)) {
115 // The last space points to the next free span (which may be empty).
116 const FreeSpan
* next
= nextSpan(arena
);
120 return nullptr; // The span is empty.
123 DebugOnlyPoison(reinterpret_cast<void*>(thing
),
124 JS_ALLOCATED_TENURED_PATTERN
, thingSize
,
125 MemCheckKind::MakeUndefined
);
126 return reinterpret_cast<TenuredCell
*>(thing
);
129 inline void checkSpan(const Arena
* arena
) const;
130 inline void checkRange(uintptr_t first
, uintptr_t last
,
131 const Arena
* arena
) const;
135 * Arenas are the allocation units of the tenured heap in the GC. An arena
136 * is 4kiB in size and 4kiB-aligned. It starts with several header fields
137 * followed by some bytes of padding. The remainder of the arena is filled
138 * with GC things of a particular AllocKind. The padding ensures that the
139 * GC thing array ends exactly at the end of the arena:
141 * <----------------------------------------------> = ArenaSize bytes
142 * +---------------+---------+----+----+-----+----+
143 * | header fields | padding | T0 | T1 | ... | Tn |
144 * +---------------+---------+----+----+-----+----+
145 * <-------------------------> = first thing offset
147 class alignas(ArenaSize
) Arena
{
148 static JS_PUBLIC_DATA
const uint8_t ThingSizes
[];
149 static JS_PUBLIC_DATA
const uint8_t FirstThingOffsets
[];
150 static JS_PUBLIC_DATA
const uint8_t ThingsPerArena
[];
152 * The first span of free things in the arena. Most of these spans are
153 * stored as offsets in free regions of the data array, and most operations
154 * on FreeSpans take an Arena pointer for safety. However, the FreeSpans
155 * used for allocation are stored here, at the start of an Arena, and use
156 * their own address to grab the next span within the same Arena.
158 FreeSpan firstFreeSpan
;
162 * One of the AllocKind constants or AllocKind::LIMIT when the arena does
163 * not contain any GC things and is on the list of empty arenas in the GC
169 * The zone that this Arena is contained within, when allocated. The offset
170 * of this field must match the ArenaZoneOffset stored in js/HeapAPI.h,
171 * as is statically asserted below.
176 * Arena::next has two purposes: when unallocated, it points to the next
177 * available Arena. When allocated, it points to the next Arena in the same
178 * zone and with the same alloc kind.
183 static const size_t ARENA_FLAG_BITS
= 4;
184 static const size_t DELAYED_MARKING_ARENA_BITS
=
185 JS_BITS_PER_WORD
- ArenaShift
;
187 ARENA_FLAG_BITS
+ DELAYED_MARKING_ARENA_BITS
<= JS_BITS_PER_WORD
,
188 "Not enough space to pack flags and nextDelayedMarkingArena_ pointer "
189 "into a single word.");
192 * True until the arena is swept for the first time.
194 size_t isNewlyCreated_
: 1;
197 * When recursive marking uses too much stack we delay marking of arenas and
198 * link them into a list for later processing. This uses the following fields.
200 size_t onDelayedMarkingList_
: 1;
201 size_t hasDelayedBlackMarking_
: 1;
202 size_t hasDelayedGrayMarking_
: 1;
203 size_t nextDelayedMarkingArena_
: DELAYED_MARKING_ARENA_BITS
;
207 * For arenas in zones other than the atoms zone, if non-null, points
208 * to an ArenaCellSet that represents the set of cells in this arena
209 * that are in the nursery's store buffer.
211 ArenaCellSet
* bufferedCells_
;
214 * For arenas in the atoms zone, the starting index into zone atom
215 * marking bitmaps (see AtomMarking.h) of the things in this zone.
216 * Atoms never refer to nursery things, so no store buffer index is
219 size_t atomBitmapStart_
;
224 * The size of data should be |ArenaSize - offsetof(data)|, but the offset
225 * is not yet known to the compiler, so we do it by hand. |firstFreeSpan|
226 * takes up 8 bytes on 64-bit due to alignment requirements; the rest are
227 * obvious. This constant is stored in js/HeapAPI.h.
229 uint8_t data
[ArenaSize
- ArenaHeaderSize
];
231 void init(JS::Zone
* zoneArg
, AllocKind kind
, const AutoLockGC
& lock
);
233 // Sets |firstFreeSpan| to the Arena's entire valid range, and
234 // also sets the next span stored at |firstFreeSpan.last| as empty.
235 void setAsFullyUnused() {
236 AllocKind kind
= getAllocKind();
237 firstFreeSpan
.first
= firstThingOffset(kind
);
238 firstFreeSpan
.last
= lastThingOffset(kind
);
239 FreeSpan
* last
= firstFreeSpan
.nextSpanUnchecked(this);
243 // Initialize an arena to its unallocated state. For arenas that were
244 // previously allocated for some zone, use release() instead.
245 void setAsNotAllocated() {
246 firstFreeSpan
.initAsEmpty();
248 // Poison zone pointer to highlight UAF on released arenas in crash data.
249 AlwaysPoison(&zone
, JS_FREED_ARENA_PATTERN
, sizeof(zone
),
250 MemCheckKind::MakeNoAccess
);
252 allocKind
= AllocKind::LIMIT
;
253 onDelayedMarkingList_
= 0;
254 hasDelayedBlackMarking_
= 0;
255 hasDelayedGrayMarking_
= 0;
256 nextDelayedMarkingArena_
= 0;
257 bufferedCells_
= nullptr;
259 MOZ_ASSERT(!allocated());
262 // Return an allocated arena to its unallocated state.
263 inline void release(const AutoLockGC
& lock
);
265 uintptr_t address() const {
267 return uintptr_t(this);
270 inline void checkAddress() const;
272 inline TenuredChunk
* chunk() const;
274 bool allocated() const {
275 MOZ_ASSERT(IsAllocKind(AllocKind(allocKind
)));
276 return IsValidAllocKind(AllocKind(allocKind
));
279 AllocKind
getAllocKind() const {
280 MOZ_ASSERT(allocated());
284 FreeSpan
* getFirstFreeSpan() { return &firstFreeSpan
; }
286 static size_t thingSize(AllocKind kind
) { return ThingSizes
[size_t(kind
)]; }
287 static size_t thingsPerArena(AllocKind kind
) {
288 return ThingsPerArena
[size_t(kind
)];
290 static size_t thingsSpan(AllocKind kind
) {
291 return thingsPerArena(kind
) * thingSize(kind
);
294 static size_t firstThingOffset(AllocKind kind
) {
295 return FirstThingOffsets
[size_t(kind
)];
297 static size_t lastThingOffset(AllocKind kind
) {
298 return ArenaSize
- thingSize(kind
);
301 size_t getThingSize() const { return thingSize(getAllocKind()); }
302 size_t getThingsPerArena() const { return thingsPerArena(getAllocKind()); }
303 size_t getThingsSpan() const { return getThingsPerArena() * getThingSize(); }
304 size_t getFirstThingOffset() const {
305 return firstThingOffset(getAllocKind());
308 uintptr_t thingsStart() const { return address() + getFirstThingOffset(); }
309 uintptr_t thingsEnd() const { return address() + ArenaSize
; }
311 bool isEmpty() const {
312 // Arena is empty if its first span covers the whole arena.
313 firstFreeSpan
.checkSpan(this);
314 AllocKind kind
= getAllocKind();
315 return firstFreeSpan
.first
== firstThingOffset(kind
) &&
316 firstFreeSpan
.last
== lastThingOffset(kind
);
319 bool hasFreeThings() const { return !firstFreeSpan
.isEmpty(); }
321 size_t numFreeThings(size_t thingSize
) const {
322 firstFreeSpan
.checkSpan(this);
324 const FreeSpan
* span
= &firstFreeSpan
;
325 for (; !span
->isEmpty(); span
= span
->nextSpan(this)) {
326 numFree
+= (span
->last
- span
->first
) / thingSize
+ 1;
331 size_t countFreeCells() { return numFreeThings(getThingSize()); }
332 size_t countUsedCells() { return getThingsPerArena() - countFreeCells(); }
335 bool inFreeList(uintptr_t thing
) {
336 uintptr_t base
= address();
337 const FreeSpan
* span
= &firstFreeSpan
;
338 for (; !span
->isEmpty(); span
= span
->nextSpan(this)) {
339 // If the thing comes before the current span, it's not free.
340 if (thing
< base
+ span
->first
) {
344 // If we find it before the end of the span, it's free.
345 if (thing
<= base
+ span
->last
) {
353 static bool isAligned(uintptr_t thing
, size_t thingSize
) {
354 /* Things ends at the arena end. */
355 uintptr_t tailOffset
= ArenaSize
- (thing
& ArenaMask
);
356 return tailOffset
% thingSize
== 0;
359 bool isNewlyCreated() const { return isNewlyCreated_
; }
361 bool onDelayedMarkingList() const { return onDelayedMarkingList_
; }
363 Arena
* getNextDelayedMarking() const {
364 MOZ_ASSERT(onDelayedMarkingList_
);
365 return reinterpret_cast<Arena
*>(nextDelayedMarkingArena_
<< ArenaShift
);
368 void setNextDelayedMarkingArena(Arena
* arena
) {
369 MOZ_ASSERT(!(uintptr_t(arena
) & ArenaMask
));
370 MOZ_ASSERT(!onDelayedMarkingList_
);
371 MOZ_ASSERT(!hasDelayedBlackMarking_
);
372 MOZ_ASSERT(!hasDelayedGrayMarking_
);
373 MOZ_ASSERT(!nextDelayedMarkingArena_
);
374 onDelayedMarkingList_
= 1;
376 nextDelayedMarkingArena_
= arena
->address() >> ArenaShift
;
380 void updateNextDelayedMarkingArena(Arena
* arena
) {
381 MOZ_ASSERT(!(uintptr_t(arena
) & ArenaMask
));
382 MOZ_ASSERT(onDelayedMarkingList_
);
383 nextDelayedMarkingArena_
= arena
? arena
->address() >> ArenaShift
: 0;
386 bool hasDelayedMarking(MarkColor color
) const {
387 MOZ_ASSERT(onDelayedMarkingList_
);
388 return color
== MarkColor::Black
? hasDelayedBlackMarking_
389 : hasDelayedGrayMarking_
;
392 bool hasAnyDelayedMarking() const {
393 MOZ_ASSERT(onDelayedMarkingList_
);
394 return hasDelayedBlackMarking_
|| hasDelayedGrayMarking_
;
397 void setHasDelayedMarking(MarkColor color
, bool value
) {
398 MOZ_ASSERT(onDelayedMarkingList_
);
399 if (color
== MarkColor::Black
) {
400 hasDelayedBlackMarking_
= value
;
402 hasDelayedGrayMarking_
= value
;
406 void clearDelayedMarkingState() {
407 MOZ_ASSERT(onDelayedMarkingList_
);
408 onDelayedMarkingList_
= 0;
409 hasDelayedBlackMarking_
= 0;
410 hasDelayedGrayMarking_
= 0;
411 nextDelayedMarkingArena_
= 0;
414 inline ArenaCellSet
*& bufferedCells();
415 inline size_t& atomBitmapStart();
417 template <typename T
>
418 size_t finalize(JS::GCContext
* gcx
, AllocKind thingKind
, size_t thingSize
);
420 static void staticAsserts();
421 static void checkLookupTables();
424 void unmarkPreMarkedFreeCells();
426 void arenaAllocatedDuringGC();
429 void checkNoMarkedFreeCells();
430 void checkAllCellsMarkedBlack();
433 #if defined(DEBUG) || defined(JS_GC_ZEAL)
434 void checkNoMarkedCells();
438 static_assert(ArenaZoneOffset
== offsetof(Arena
, zone
),
439 "The hardcoded API zone offset must match the actual offset.");
441 static_assert(sizeof(Arena
) == ArenaSize
,
442 "ArenaSize must match the actual size of the Arena structure.");
445 offsetof(Arena
, data
) == ArenaHeaderSize
,
446 "ArenaHeaderSize must match the actual size of the header fields.");
448 inline Arena
* FreeSpan::getArena() {
449 Arena
* arena
= getArenaUnchecked();
450 arena
->checkAddress();
454 inline void FreeSpan::checkSpan(const Arena
* arena
) const {
457 MOZ_ASSERT(!first
&& !last
);
461 arena
->checkAddress();
462 checkRange(first
, last
, arena
);
464 // If there's a following span, it must have a higher address,
465 // and the gap must be at least 2 * thingSize.
466 const FreeSpan
* next
= nextSpanUnchecked(arena
);
468 checkRange(next
->first
, next
->last
, arena
);
469 size_t thingSize
= arena
->getThingSize();
470 MOZ_ASSERT(last
+ 2 * thingSize
<= next
->first
);
475 inline void FreeSpan::checkRange(uintptr_t first
, uintptr_t last
,
476 const Arena
* arena
) const {
479 MOZ_ASSERT(first
<= last
);
480 AllocKind thingKind
= arena
->getAllocKind();
481 MOZ_ASSERT(first
>= Arena::firstThingOffset(thingKind
));
482 MOZ_ASSERT(last
<= Arena::lastThingOffset(thingKind
));
483 MOZ_ASSERT((last
- first
) % Arena::thingSize(thingKind
) == 0);
489 MOZ_ALWAYS_INLINE
bool MarkBitmap::markBit(const TenuredCell
* cell
,
491 MarkBitmapWord
* word
;
493 getMarkWordAndMask(cell
, colorBit
, &word
, &mask
);
497 MOZ_ALWAYS_INLINE
bool MarkBitmap::isMarkedAny(const TenuredCell
* cell
) {
498 return markBit(cell
, ColorBit::BlackBit
) ||
499 markBit(cell
, ColorBit::GrayOrBlackBit
);
502 MOZ_ALWAYS_INLINE
bool MarkBitmap::isMarkedBlack(const TenuredCell
* cell
) {
503 return markBit(cell
, ColorBit::BlackBit
);
506 MOZ_ALWAYS_INLINE
bool MarkBitmap::isMarkedGray(const TenuredCell
* cell
) {
507 return !markBit(cell
, ColorBit::BlackBit
) &&
508 markBit(cell
, ColorBit::GrayOrBlackBit
);
511 // The following methods that update the mark bits are not thread safe and must
512 // not be called in parallel with each other.
514 // They use separate read and write operations to avoid an unnecessarily strict
515 // atomic update on the marking bitmap.
517 // They may be called in parallel with read operations on the mark bitmap where
518 // there is no required ordering between the operations. This happens when gray
519 // unmarking occurs in parallel with background sweeping.
521 // The return value indicates if the cell went from unmarked to marked.
522 MOZ_ALWAYS_INLINE
bool MarkBitmap::markIfUnmarked(const TenuredCell
* cell
,
524 MarkBitmapWord
* word
;
526 getMarkWordAndMask(cell
, ColorBit::BlackBit
, &word
, &mask
);
530 if (color
== MarkColor::Black
) {
531 uintptr_t bits
= *word
;
534 // We use getMarkWordAndMask to recalculate both mask and word as doing just
535 // mask << color may overflow the mask.
536 getMarkWordAndMask(cell
, ColorBit::GrayOrBlackBit
, &word
, &mask
);
540 uintptr_t bits
= *word
;
546 MOZ_ALWAYS_INLINE
bool MarkBitmap::markIfUnmarkedAtomic(const TenuredCell
* cell
,
548 // This version of the method is safe in the face of concurrent writes to the
549 // mark bitmap but may return false positives. The extra synchronisation
550 // necessary to avoid this resulted in worse performance overall.
552 MarkBitmapWord
* word
;
554 getMarkWordAndMask(cell
, ColorBit::BlackBit
, &word
, &mask
);
558 if (color
== MarkColor::Black
) {
561 // We use getMarkWordAndMask to recalculate both mask and word as doing just
562 // mask << color may overflow the mask.
563 getMarkWordAndMask(cell
, ColorBit::GrayOrBlackBit
, &word
, &mask
);
572 MOZ_ALWAYS_INLINE
void MarkBitmap::markBlack(const TenuredCell
* cell
) {
573 MarkBitmapWord
* word
;
575 getMarkWordAndMask(cell
, ColorBit::BlackBit
, &word
, &mask
);
576 uintptr_t bits
= *word
;
580 MOZ_ALWAYS_INLINE
void MarkBitmap::markBlackAtomic(const TenuredCell
* cell
) {
581 MarkBitmapWord
* word
;
583 getMarkWordAndMask(cell
, ColorBit::BlackBit
, &word
, &mask
);
587 MOZ_ALWAYS_INLINE
void MarkBitmap::copyMarkBit(TenuredCell
* dst
,
588 const TenuredCell
* src
,
590 TenuredChunkBase
* srcChunk
= detail::GetCellChunkBase(src
);
591 MarkBitmapWord
* srcWord
;
593 srcChunk
->markBits
.getMarkWordAndMask(src
, colorBit
, &srcWord
, &srcMask
);
595 MarkBitmapWord
* dstWord
;
597 getMarkWordAndMask(dst
, colorBit
, &dstWord
, &dstMask
);
599 uintptr_t bits
= *dstWord
;
601 if (*srcWord
& srcMask
) {
607 MOZ_ALWAYS_INLINE
void MarkBitmap::unmark(const TenuredCell
* cell
) {
608 MarkBitmapWord
* word
;
611 getMarkWordAndMask(cell
, ColorBit::BlackBit
, &word
, &mask
);
613 *word
= bits
& ~mask
;
614 getMarkWordAndMask(cell
, ColorBit::GrayOrBlackBit
, &word
, &mask
);
616 *word
= bits
& ~mask
;
619 inline MarkBitmapWord
* MarkBitmap::arenaBits(Arena
* arena
) {
621 ArenaBitmapBits
== ArenaBitmapWords
* JS_BITS_PER_WORD
,
622 "We assume that the part of the bitmap corresponding to the arena "
623 "has the exact number of words so we do not need to deal with a word "
624 "that covers bits from two arenas.");
626 MarkBitmapWord
* word
;
628 getMarkWordAndMask(reinterpret_cast<TenuredCell
*>(arena
->address()),
629 ColorBit::BlackBit
, &word
, &unused
);
634 * A chunk in the tenured heap. TenuredChunks contain arenas and associated data
635 * structures (mark bitmap, delayed marking state).
637 class TenuredChunk
: public TenuredChunkBase
{
638 Arena arenas
[ArenasPerChunk
];
640 friend class GCRuntime
;
641 friend class MarkingValidator
;
644 static TenuredChunk
* fromAddress(uintptr_t addr
) {
646 return reinterpret_cast<TenuredChunk
*>(addr
);
649 static bool withinValidRange(uintptr_t addr
) {
650 uintptr_t offset
= addr
& ChunkMask
;
651 if (TenuredChunk::fromAddress(addr
)->isNurseryChunk()) {
652 return offset
>= sizeof(ChunkBase
) && offset
< ChunkSize
;
654 return offset
>= offsetof(TenuredChunk
, arenas
) && offset
< ChunkSize
;
657 static size_t arenaIndex(const Arena
* arena
) {
658 uintptr_t addr
= arena
->address();
659 MOZ_ASSERT(!TenuredChunk::fromAddress(addr
)->isNurseryChunk());
660 MOZ_ASSERT(withinValidRange(addr
));
661 uintptr_t offset
= addr
& ChunkMask
;
662 return (offset
- offsetof(TenuredChunk
, arenas
)) >> ArenaShift
;
665 explicit TenuredChunk(JSRuntime
* runtime
) : TenuredChunkBase(runtime
) {}
667 uintptr_t address() const {
668 uintptr_t addr
= reinterpret_cast<uintptr_t>(this);
669 MOZ_ASSERT(!(addr
& ChunkMask
));
673 bool unused() const { return info
.numArenasFree
== ArenasPerChunk
; }
675 bool hasAvailableArenas() const { return info
.numArenasFree
!= 0; }
677 bool isNurseryChunk() const { return storeBuffer
; }
679 Arena
* allocateArena(GCRuntime
* gc
, JS::Zone
* zone
, AllocKind kind
,
680 const AutoLockGC
& lock
);
682 void releaseArena(GCRuntime
* gc
, Arena
* arena
, const AutoLockGC
& lock
);
683 void recycleArena(Arena
* arena
, SortedArenaList
& dest
, size_t thingsPerArena
);
685 void decommitFreeArenas(GCRuntime
* gc
, const bool& cancel
, AutoLockGC
& lock
);
686 [[nodiscard
]] bool decommitOneFreePage(GCRuntime
* gc
, size_t pageIndex
,
688 void decommitAllArenas();
690 // This will decommit each unused not-already decommitted arena. It performs a
691 // system call for each arena but is only used during OOM.
692 void decommitFreeArenasWithoutUnlocking(const AutoLockGC
& lock
);
694 static void* allocate(GCRuntime
* gc
);
695 static TenuredChunk
* emplace(void* ptr
, GCRuntime
* gc
,
696 bool allMemoryCommitted
);
698 /* Unlink and return the freeArenasHead. */
699 Arena
* fetchNextFreeArena(GCRuntime
* gc
);
704 void verify() const {}
708 void commitOnePage(GCRuntime
* gc
);
710 void updateChunkListAfterAlloc(GCRuntime
* gc
, const AutoLockGC
& lock
);
711 void updateChunkListAfterFree(GCRuntime
* gc
, size_t numArenasFree
,
712 const AutoLockGC
& lock
);
714 // Check if all arenas in a page are free.
715 bool canDecommitPage(size_t pageIndex
) const;
717 // Check the arena from freeArenasList is located in a free page.
718 // Unlike the isPageFree(size_t) version, this isPageFree(Arena*) will see the
719 // following arenas from the freeArenasHead are also located in the same page,
720 // to prevent not to access the arenas mprotect'ed during compaction in debug
722 bool isPageFree(const Arena
* arena
) const;
724 // Get the page index of the arena.
725 size_t pageIndex(const Arena
* arena
) const {
726 return pageIndex(arenaIndex(arena
));
728 size_t pageIndex(size_t arenaIndex
) const {
729 return arenaIndex
/ ArenasPerPage
;
732 Arena
* pageAddress(size_t pageIndex
) {
733 return &arenas
[pageIndex
* ArenasPerPage
];
737 inline void Arena::checkAddress() const {
738 mozilla::DebugOnly
<uintptr_t> addr
= uintptr_t(this);
740 MOZ_ASSERT(!(addr
& ArenaMask
));
741 MOZ_ASSERT(TenuredChunk::withinValidRange(addr
));
744 inline TenuredChunk
* Arena::chunk() const {
745 return TenuredChunk::fromAddress(address());
748 // Cell header stored before all nursery cells.
749 struct alignas(gc::CellAlignBytes
) NurseryCellHeader
{
750 // Store zone pointer with the trace kind in the lowest three bits.
751 const uintptr_t allocSiteAndTraceKind
;
753 // We only need to store a subset of trace kinds so this doesn't cover the
755 static const uintptr_t TraceKindMask
= 3;
757 static uintptr_t MakeValue(AllocSite
* const site
, JS::TraceKind kind
) {
758 MOZ_ASSERT(uintptr_t(kind
) < TraceKindMask
);
759 MOZ_ASSERT((uintptr_t(site
) & TraceKindMask
) == 0);
760 return uintptr_t(site
) | uintptr_t(kind
);
763 inline NurseryCellHeader(AllocSite
* site
, JS::TraceKind kind
)
764 : allocSiteAndTraceKind(MakeValue(site
, kind
)) {}
766 AllocSite
* allocSite() const {
767 return reinterpret_cast<AllocSite
*>(allocSiteAndTraceKind
& ~TraceKindMask
);
770 JS::Zone
* zone() const { return allocSite()->zone(); }
772 JS::TraceKind
traceKind() const {
773 return JS::TraceKind(allocSiteAndTraceKind
& TraceKindMask
);
776 static const NurseryCellHeader
* from(const Cell
* cell
) {
777 MOZ_ASSERT(IsInsideNursery(cell
));
778 return reinterpret_cast<const NurseryCellHeader
*>(
779 uintptr_t(cell
) - sizeof(NurseryCellHeader
));
783 static_assert(uintptr_t(JS::TraceKind::Object
) <=
784 NurseryCellHeader::TraceKindMask
);
785 static_assert(uintptr_t(JS::TraceKind::String
) <=
786 NurseryCellHeader::TraceKindMask
);
787 static_assert(uintptr_t(JS::TraceKind::BigInt
) <=
788 NurseryCellHeader::TraceKindMask
);
794 // Utility functions meant to be called from an interactive debugger.
795 enum class MarkInfo
: int {
803 // For calling from gdb only: given a pointer that is either in the nursery
804 // (possibly pointing to a buffer, not necessarily a Cell) or a tenured Cell,
805 // return its mark color or NURSERY or UNKNOWN. UNKONWN is only for non-Cell
806 // pointers, and means it is not in the nursery (so could be malloced or stack
808 MOZ_NEVER_INLINE MarkInfo
GetMarkInfo(void* vp
);
810 // Sample usage from gdb:
812 // (gdb) p $word = js::debug::GetMarkWordAddress(obj)
813 // $1 = (uintptr_t *) 0x7fa56d5fe360
814 // (gdb) p/x $mask = js::debug::GetMarkMask(obj, js::gc::GRAY)
816 // (gdb) watch *$word
817 // Hardware watchpoint 7: *$word
818 // (gdb) cond 7 *$word & $mask
821 // Note that this is *not* a watchpoint on a single bit. It is a watchpoint on
822 // the whole word, which will trigger whenever the word changes and the
823 // selected bit is set after the change.
825 // So if the bit changing is the desired one, this is exactly what you want.
826 // But if a different bit changes (either set or cleared), you may still stop
827 // execution if the $mask bit happened to already be set. gdb does not expose
828 // enough information to restrict the watchpoint to just a single bit.
830 // Return the address of the word containing the mark bits for the given cell,
831 // or nullptr if the cell is in the nursery.
832 MOZ_NEVER_INLINE
uintptr_t* GetMarkWordAddress(js::gc::Cell
* cell
);
834 // Return the mask for the given cell and color bit, or 0 if the cell is in the
836 MOZ_NEVER_INLINE
uintptr_t GetMarkMask(js::gc::Cell
* cell
, uint32_t colorBit
);
838 } /* namespace debug */
841 #endif /* gc_Heap_h */