Bug 1874684 - Part 10: Replace BigInt with Int128 in RoundNumberToIncrement. r=mgaudet
[gecko.git] / js / src / gc / Heap.h
blob3b90a91c5237f85336678b24e78c2d7d83091d84
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef gc_Heap_h
8 #define gc_Heap_h
10 #include "mozilla/DebugOnly.h"
12 #include "gc/AllocKind.h"
13 #include "gc/Pretenuring.h"
14 #include "js/HeapAPI.h"
15 #include "js/TypeDecls.h"
16 #include "util/Poison.h"
18 namespace js {
20 class AutoLockGC;
21 class AutoLockGCBgAlloc;
22 class Nursery;
24 namespace gc {
26 class Arena;
27 class ArenaCellSet;
28 class ArenaList;
29 class GCRuntime;
30 class MarkingValidator;
31 class SortedArenaList;
32 class TenuredCell;
34 // Cells are aligned to CellAlignShift, so the largest tagged null pointer is:
35 const uintptr_t LargestTaggedNullCellPointer = (1 << CellAlignShift) - 1;
37 static_assert(ArenaSize % CellAlignBytes == 0,
38 "Arena size must be a multiple of cell alignment");
41 * A FreeSpan represents a contiguous sequence of free cells in an Arena. It
42 * can take two forms.
44 * - In an empty span, |first| and |last| are both zero.
46 * - In a non-empty span, |first| is the address of the first free thing in the
47 * span, and |last| is the address of the last free thing in the span.
48 * Furthermore, the memory pointed to by |last| holds a FreeSpan structure
49 * that points to the next span (which may be empty); this works because
50 * sizeof(FreeSpan) is less than the smallest thingSize.
52 class FreeSpan {
53 friend class Arena;
54 friend class ArenaCellIter;
55 friend class ArenaFreeCellIter;
57 uint16_t first;
58 uint16_t last;
60 public:
61 // This inits just |first| and |last|; if the span is non-empty it doesn't
62 // do anything with the next span stored at |last|.
63 void initBounds(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
64 checkRange(firstArg, lastArg, arena);
65 first = firstArg;
66 last = lastArg;
69 void initAsEmpty() {
70 first = 0;
71 last = 0;
74 // This sets |first| and |last|, and also sets the next span stored at
75 // |last| as empty. (As a result, |firstArg| and |lastArg| cannot represent
76 // an empty span.)
77 void initFinal(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
78 initBounds(firstArg, lastArg, arena);
79 FreeSpan* last = nextSpanUnchecked(arena);
80 last->initAsEmpty();
81 checkSpan(arena);
84 bool isEmpty() const { return !first; }
86 Arena* getArenaUnchecked() { return reinterpret_cast<Arena*>(this); }
87 inline Arena* getArena();
89 static size_t offsetOfFirst() { return offsetof(FreeSpan, first); }
91 static size_t offsetOfLast() { return offsetof(FreeSpan, last); }
93 // Like nextSpan(), but no checking of the following span is done.
94 FreeSpan* nextSpanUnchecked(const Arena* arena) const {
95 MOZ_ASSERT(arena && !isEmpty());
96 return reinterpret_cast<FreeSpan*>(uintptr_t(arena) + last);
99 const FreeSpan* nextSpan(const Arena* arena) const {
100 checkSpan(arena);
101 return nextSpanUnchecked(arena);
104 MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
105 // Eschew the usual checks, because this might be the placeholder span.
106 // If this is somehow an invalid, non-empty span, checkSpan() will catch it.
107 Arena* arena = getArenaUnchecked();
108 checkSpan(arena);
109 uintptr_t thing = uintptr_t(arena) + first;
110 if (first < last) {
111 // We have space for at least two more things, so do a simple
112 // bump-allocate.
113 first += thingSize;
114 } else if (MOZ_LIKELY(first)) {
115 // The last space points to the next free span (which may be empty).
116 const FreeSpan* next = nextSpan(arena);
117 first = next->first;
118 last = next->last;
119 } else {
120 return nullptr; // The span is empty.
122 checkSpan(arena);
123 DebugOnlyPoison(reinterpret_cast<void*>(thing),
124 JS_ALLOCATED_TENURED_PATTERN, thingSize,
125 MemCheckKind::MakeUndefined);
126 return reinterpret_cast<TenuredCell*>(thing);
129 inline void checkSpan(const Arena* arena) const;
130 inline void checkRange(uintptr_t first, uintptr_t last,
131 const Arena* arena) const;
135 * Arenas are the allocation units of the tenured heap in the GC. An arena
136 * is 4kiB in size and 4kiB-aligned. It starts with several header fields
137 * followed by some bytes of padding. The remainder of the arena is filled
138 * with GC things of a particular AllocKind. The padding ensures that the
139 * GC thing array ends exactly at the end of the arena:
141 * <----------------------------------------------> = ArenaSize bytes
142 * +---------------+---------+----+----+-----+----+
143 * | header fields | padding | T0 | T1 | ... | Tn |
144 * +---------------+---------+----+----+-----+----+
145 * <-------------------------> = first thing offset
147 class alignas(ArenaSize) Arena {
148 static JS_PUBLIC_DATA const uint8_t ThingSizes[];
149 static JS_PUBLIC_DATA const uint8_t FirstThingOffsets[];
150 static JS_PUBLIC_DATA const uint8_t ThingsPerArena[];
152 * The first span of free things in the arena. Most of these spans are
153 * stored as offsets in free regions of the data array, and most operations
154 * on FreeSpans take an Arena pointer for safety. However, the FreeSpans
155 * used for allocation are stored here, at the start of an Arena, and use
156 * their own address to grab the next span within the same Arena.
158 FreeSpan firstFreeSpan;
160 public:
162 * One of the AllocKind constants or AllocKind::LIMIT when the arena does
163 * not contain any GC things and is on the list of empty arenas in the GC
164 * chunk.
166 AllocKind allocKind;
169 * The zone that this Arena is contained within, when allocated. The offset
170 * of this field must match the ArenaZoneOffset stored in js/HeapAPI.h,
171 * as is statically asserted below.
173 JS::Zone* zone;
176 * Arena::next has two purposes: when unallocated, it points to the next
177 * available Arena. When allocated, it points to the next Arena in the same
178 * zone and with the same alloc kind.
180 Arena* next;
182 private:
183 static const size_t ARENA_FLAG_BITS = 4;
184 static const size_t DELAYED_MARKING_ARENA_BITS =
185 JS_BITS_PER_WORD - ArenaShift;
186 static_assert(
187 ARENA_FLAG_BITS + DELAYED_MARKING_ARENA_BITS <= JS_BITS_PER_WORD,
188 "Not enough space to pack flags and nextDelayedMarkingArena_ pointer "
189 "into a single word.");
192 * True until the arena is swept for the first time.
194 size_t isNewlyCreated_ : 1;
197 * When recursive marking uses too much stack we delay marking of arenas and
198 * link them into a list for later processing. This uses the following fields.
200 size_t onDelayedMarkingList_ : 1;
201 size_t hasDelayedBlackMarking_ : 1;
202 size_t hasDelayedGrayMarking_ : 1;
203 size_t nextDelayedMarkingArena_ : DELAYED_MARKING_ARENA_BITS;
205 union {
207 * For arenas in zones other than the atoms zone, if non-null, points
208 * to an ArenaCellSet that represents the set of cells in this arena
209 * that are in the nursery's store buffer.
211 ArenaCellSet* bufferedCells_;
214 * For arenas in the atoms zone, the starting index into zone atom
215 * marking bitmaps (see AtomMarking.h) of the things in this zone.
216 * Atoms never refer to nursery things, so no store buffer index is
217 * needed.
219 size_t atomBitmapStart_;
222 public:
224 * The size of data should be |ArenaSize - offsetof(data)|, but the offset
225 * is not yet known to the compiler, so we do it by hand. |firstFreeSpan|
226 * takes up 8 bytes on 64-bit due to alignment requirements; the rest are
227 * obvious. This constant is stored in js/HeapAPI.h.
229 uint8_t data[ArenaSize - ArenaHeaderSize];
231 void init(JS::Zone* zoneArg, AllocKind kind, const AutoLockGC& lock);
233 // Sets |firstFreeSpan| to the Arena's entire valid range, and
234 // also sets the next span stored at |firstFreeSpan.last| as empty.
235 void setAsFullyUnused() {
236 AllocKind kind = getAllocKind();
237 firstFreeSpan.first = firstThingOffset(kind);
238 firstFreeSpan.last = lastThingOffset(kind);
239 FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
240 last->initAsEmpty();
243 // Initialize an arena to its unallocated state. For arenas that were
244 // previously allocated for some zone, use release() instead.
245 void setAsNotAllocated() {
246 firstFreeSpan.initAsEmpty();
248 // Poison zone pointer to highlight UAF on released arenas in crash data.
249 AlwaysPoison(&zone, JS_FREED_ARENA_PATTERN, sizeof(zone),
250 MemCheckKind::MakeNoAccess);
252 allocKind = AllocKind::LIMIT;
253 onDelayedMarkingList_ = 0;
254 hasDelayedBlackMarking_ = 0;
255 hasDelayedGrayMarking_ = 0;
256 nextDelayedMarkingArena_ = 0;
257 bufferedCells_ = nullptr;
259 MOZ_ASSERT(!allocated());
262 // Return an allocated arena to its unallocated state.
263 inline void release(const AutoLockGC& lock);
265 uintptr_t address() const {
266 checkAddress();
267 return uintptr_t(this);
270 inline void checkAddress() const;
272 inline TenuredChunk* chunk() const;
274 bool allocated() const {
275 MOZ_ASSERT(IsAllocKind(AllocKind(allocKind)));
276 return IsValidAllocKind(AllocKind(allocKind));
279 AllocKind getAllocKind() const {
280 MOZ_ASSERT(allocated());
281 return allocKind;
284 FreeSpan* getFirstFreeSpan() { return &firstFreeSpan; }
286 static size_t thingSize(AllocKind kind) { return ThingSizes[size_t(kind)]; }
287 static size_t thingsPerArena(AllocKind kind) {
288 return ThingsPerArena[size_t(kind)];
290 static size_t thingsSpan(AllocKind kind) {
291 return thingsPerArena(kind) * thingSize(kind);
294 static size_t firstThingOffset(AllocKind kind) {
295 return FirstThingOffsets[size_t(kind)];
297 static size_t lastThingOffset(AllocKind kind) {
298 return ArenaSize - thingSize(kind);
301 size_t getThingSize() const { return thingSize(getAllocKind()); }
302 size_t getThingsPerArena() const { return thingsPerArena(getAllocKind()); }
303 size_t getThingsSpan() const { return getThingsPerArena() * getThingSize(); }
304 size_t getFirstThingOffset() const {
305 return firstThingOffset(getAllocKind());
308 uintptr_t thingsStart() const { return address() + getFirstThingOffset(); }
309 uintptr_t thingsEnd() const { return address() + ArenaSize; }
311 bool isEmpty() const {
312 // Arena is empty if its first span covers the whole arena.
313 firstFreeSpan.checkSpan(this);
314 AllocKind kind = getAllocKind();
315 return firstFreeSpan.first == firstThingOffset(kind) &&
316 firstFreeSpan.last == lastThingOffset(kind);
319 bool hasFreeThings() const { return !firstFreeSpan.isEmpty(); }
321 size_t numFreeThings(size_t thingSize) const {
322 firstFreeSpan.checkSpan(this);
323 size_t numFree = 0;
324 const FreeSpan* span = &firstFreeSpan;
325 for (; !span->isEmpty(); span = span->nextSpan(this)) {
326 numFree += (span->last - span->first) / thingSize + 1;
328 return numFree;
331 size_t countFreeCells() { return numFreeThings(getThingSize()); }
332 size_t countUsedCells() { return getThingsPerArena() - countFreeCells(); }
334 #ifdef DEBUG
335 bool inFreeList(uintptr_t thing) {
336 uintptr_t base = address();
337 const FreeSpan* span = &firstFreeSpan;
338 for (; !span->isEmpty(); span = span->nextSpan(this)) {
339 // If the thing comes before the current span, it's not free.
340 if (thing < base + span->first) {
341 return false;
344 // If we find it before the end of the span, it's free.
345 if (thing <= base + span->last) {
346 return true;
349 return false;
351 #endif
353 static bool isAligned(uintptr_t thing, size_t thingSize) {
354 /* Things ends at the arena end. */
355 uintptr_t tailOffset = ArenaSize - (thing & ArenaMask);
356 return tailOffset % thingSize == 0;
359 bool isNewlyCreated() const { return isNewlyCreated_; }
361 bool onDelayedMarkingList() const { return onDelayedMarkingList_; }
363 Arena* getNextDelayedMarking() const {
364 MOZ_ASSERT(onDelayedMarkingList_);
365 return reinterpret_cast<Arena*>(nextDelayedMarkingArena_ << ArenaShift);
368 void setNextDelayedMarkingArena(Arena* arena) {
369 MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
370 MOZ_ASSERT(!onDelayedMarkingList_);
371 MOZ_ASSERT(!hasDelayedBlackMarking_);
372 MOZ_ASSERT(!hasDelayedGrayMarking_);
373 MOZ_ASSERT(!nextDelayedMarkingArena_);
374 onDelayedMarkingList_ = 1;
375 if (arena) {
376 nextDelayedMarkingArena_ = arena->address() >> ArenaShift;
380 void updateNextDelayedMarkingArena(Arena* arena) {
381 MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
382 MOZ_ASSERT(onDelayedMarkingList_);
383 nextDelayedMarkingArena_ = arena ? arena->address() >> ArenaShift : 0;
386 bool hasDelayedMarking(MarkColor color) const {
387 MOZ_ASSERT(onDelayedMarkingList_);
388 return color == MarkColor::Black ? hasDelayedBlackMarking_
389 : hasDelayedGrayMarking_;
392 bool hasAnyDelayedMarking() const {
393 MOZ_ASSERT(onDelayedMarkingList_);
394 return hasDelayedBlackMarking_ || hasDelayedGrayMarking_;
397 void setHasDelayedMarking(MarkColor color, bool value) {
398 MOZ_ASSERT(onDelayedMarkingList_);
399 if (color == MarkColor::Black) {
400 hasDelayedBlackMarking_ = value;
401 } else {
402 hasDelayedGrayMarking_ = value;
406 void clearDelayedMarkingState() {
407 MOZ_ASSERT(onDelayedMarkingList_);
408 onDelayedMarkingList_ = 0;
409 hasDelayedBlackMarking_ = 0;
410 hasDelayedGrayMarking_ = 0;
411 nextDelayedMarkingArena_ = 0;
414 inline ArenaCellSet*& bufferedCells();
415 inline size_t& atomBitmapStart();
417 template <typename T>
418 size_t finalize(JS::GCContext* gcx, AllocKind thingKind, size_t thingSize);
420 static void staticAsserts();
421 static void checkLookupTables();
423 void unmarkAll();
424 void unmarkPreMarkedFreeCells();
426 void arenaAllocatedDuringGC();
428 #ifdef DEBUG
429 void checkNoMarkedFreeCells();
430 void checkAllCellsMarkedBlack();
431 #endif
433 #if defined(DEBUG) || defined(JS_GC_ZEAL)
434 void checkNoMarkedCells();
435 #endif
438 static_assert(ArenaZoneOffset == offsetof(Arena, zone),
439 "The hardcoded API zone offset must match the actual offset.");
441 static_assert(sizeof(Arena) == ArenaSize,
442 "ArenaSize must match the actual size of the Arena structure.");
444 static_assert(
445 offsetof(Arena, data) == ArenaHeaderSize,
446 "ArenaHeaderSize must match the actual size of the header fields.");
448 inline Arena* FreeSpan::getArena() {
449 Arena* arena = getArenaUnchecked();
450 arena->checkAddress();
451 return arena;
454 inline void FreeSpan::checkSpan(const Arena* arena) const {
455 #ifdef DEBUG
456 if (!first) {
457 MOZ_ASSERT(!first && !last);
458 return;
461 arena->checkAddress();
462 checkRange(first, last, arena);
464 // If there's a following span, it must have a higher address,
465 // and the gap must be at least 2 * thingSize.
466 const FreeSpan* next = nextSpanUnchecked(arena);
467 if (next->first) {
468 checkRange(next->first, next->last, arena);
469 size_t thingSize = arena->getThingSize();
470 MOZ_ASSERT(last + 2 * thingSize <= next->first);
472 #endif
475 inline void FreeSpan::checkRange(uintptr_t first, uintptr_t last,
476 const Arena* arena) const {
477 #ifdef DEBUG
478 MOZ_ASSERT(arena);
479 MOZ_ASSERT(first <= last);
480 AllocKind thingKind = arena->getAllocKind();
481 MOZ_ASSERT(first >= Arena::firstThingOffset(thingKind));
482 MOZ_ASSERT(last <= Arena::lastThingOffset(thingKind));
483 MOZ_ASSERT((last - first) % Arena::thingSize(thingKind) == 0);
484 #endif
487 // Mark bitmap API:
489 MOZ_ALWAYS_INLINE bool MarkBitmap::markBit(const TenuredCell* cell,
490 ColorBit colorBit) {
491 MarkBitmapWord* word;
492 uintptr_t mask;
493 getMarkWordAndMask(cell, colorBit, &word, &mask);
494 return *word & mask;
497 MOZ_ALWAYS_INLINE bool MarkBitmap::isMarkedAny(const TenuredCell* cell) {
498 return markBit(cell, ColorBit::BlackBit) ||
499 markBit(cell, ColorBit::GrayOrBlackBit);
502 MOZ_ALWAYS_INLINE bool MarkBitmap::isMarkedBlack(const TenuredCell* cell) {
503 return markBit(cell, ColorBit::BlackBit);
506 MOZ_ALWAYS_INLINE bool MarkBitmap::isMarkedGray(const TenuredCell* cell) {
507 return !markBit(cell, ColorBit::BlackBit) &&
508 markBit(cell, ColorBit::GrayOrBlackBit);
511 // The following methods that update the mark bits are not thread safe and must
512 // not be called in parallel with each other.
514 // They use separate read and write operations to avoid an unnecessarily strict
515 // atomic update on the marking bitmap.
517 // They may be called in parallel with read operations on the mark bitmap where
518 // there is no required ordering between the operations. This happens when gray
519 // unmarking occurs in parallel with background sweeping.
521 // The return value indicates if the cell went from unmarked to marked.
522 MOZ_ALWAYS_INLINE bool MarkBitmap::markIfUnmarked(const TenuredCell* cell,
523 MarkColor color) {
524 MarkBitmapWord* word;
525 uintptr_t mask;
526 getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
527 if (*word & mask) {
528 return false;
530 if (color == MarkColor::Black) {
531 uintptr_t bits = *word;
532 *word = bits | mask;
533 } else {
534 // We use getMarkWordAndMask to recalculate both mask and word as doing just
535 // mask << color may overflow the mask.
536 getMarkWordAndMask(cell, ColorBit::GrayOrBlackBit, &word, &mask);
537 if (*word & mask) {
538 return false;
540 uintptr_t bits = *word;
541 *word = bits | mask;
543 return true;
546 MOZ_ALWAYS_INLINE bool MarkBitmap::markIfUnmarkedAtomic(const TenuredCell* cell,
547 MarkColor color) {
548 // This version of the method is safe in the face of concurrent writes to the
549 // mark bitmap but may return false positives. The extra synchronisation
550 // necessary to avoid this resulted in worse performance overall.
552 MarkBitmapWord* word;
553 uintptr_t mask;
554 getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
555 if (*word & mask) {
556 return false;
558 if (color == MarkColor::Black) {
559 *word |= mask;
560 } else {
561 // We use getMarkWordAndMask to recalculate both mask and word as doing just
562 // mask << color may overflow the mask.
563 getMarkWordAndMask(cell, ColorBit::GrayOrBlackBit, &word, &mask);
564 if (*word & mask) {
565 return false;
567 *word |= mask;
569 return true;
572 MOZ_ALWAYS_INLINE void MarkBitmap::markBlack(const TenuredCell* cell) {
573 MarkBitmapWord* word;
574 uintptr_t mask;
575 getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
576 uintptr_t bits = *word;
577 *word = bits | mask;
580 MOZ_ALWAYS_INLINE void MarkBitmap::markBlackAtomic(const TenuredCell* cell) {
581 MarkBitmapWord* word;
582 uintptr_t mask;
583 getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
584 *word |= mask;
587 MOZ_ALWAYS_INLINE void MarkBitmap::copyMarkBit(TenuredCell* dst,
588 const TenuredCell* src,
589 ColorBit colorBit) {
590 TenuredChunkBase* srcChunk = detail::GetCellChunkBase(src);
591 MarkBitmapWord* srcWord;
592 uintptr_t srcMask;
593 srcChunk->markBits.getMarkWordAndMask(src, colorBit, &srcWord, &srcMask);
595 MarkBitmapWord* dstWord;
596 uintptr_t dstMask;
597 getMarkWordAndMask(dst, colorBit, &dstWord, &dstMask);
599 uintptr_t bits = *dstWord;
600 bits &= ~dstMask;
601 if (*srcWord & srcMask) {
602 bits |= dstMask;
604 *dstWord = bits;
607 MOZ_ALWAYS_INLINE void MarkBitmap::unmark(const TenuredCell* cell) {
608 MarkBitmapWord* word;
609 uintptr_t mask;
610 uintptr_t bits;
611 getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
612 bits = *word;
613 *word = bits & ~mask;
614 getMarkWordAndMask(cell, ColorBit::GrayOrBlackBit, &word, &mask);
615 bits = *word;
616 *word = bits & ~mask;
619 inline MarkBitmapWord* MarkBitmap::arenaBits(Arena* arena) {
620 static_assert(
621 ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
622 "We assume that the part of the bitmap corresponding to the arena "
623 "has the exact number of words so we do not need to deal with a word "
624 "that covers bits from two arenas.");
626 MarkBitmapWord* word;
627 uintptr_t unused;
628 getMarkWordAndMask(reinterpret_cast<TenuredCell*>(arena->address()),
629 ColorBit::BlackBit, &word, &unused);
630 return word;
634 * A chunk in the tenured heap. TenuredChunks contain arenas and associated data
635 * structures (mark bitmap, delayed marking state).
637 class TenuredChunk : public TenuredChunkBase {
638 Arena arenas[ArenasPerChunk];
640 friend class GCRuntime;
641 friend class MarkingValidator;
643 public:
644 static TenuredChunk* fromAddress(uintptr_t addr) {
645 addr &= ~ChunkMask;
646 return reinterpret_cast<TenuredChunk*>(addr);
649 static bool withinValidRange(uintptr_t addr) {
650 uintptr_t offset = addr & ChunkMask;
651 if (TenuredChunk::fromAddress(addr)->isNurseryChunk()) {
652 return offset >= sizeof(ChunkBase) && offset < ChunkSize;
654 return offset >= offsetof(TenuredChunk, arenas) && offset < ChunkSize;
657 static size_t arenaIndex(const Arena* arena) {
658 uintptr_t addr = arena->address();
659 MOZ_ASSERT(!TenuredChunk::fromAddress(addr)->isNurseryChunk());
660 MOZ_ASSERT(withinValidRange(addr));
661 uintptr_t offset = addr & ChunkMask;
662 return (offset - offsetof(TenuredChunk, arenas)) >> ArenaShift;
665 explicit TenuredChunk(JSRuntime* runtime) : TenuredChunkBase(runtime) {}
667 uintptr_t address() const {
668 uintptr_t addr = reinterpret_cast<uintptr_t>(this);
669 MOZ_ASSERT(!(addr & ChunkMask));
670 return addr;
673 bool unused() const { return info.numArenasFree == ArenasPerChunk; }
675 bool hasAvailableArenas() const { return info.numArenasFree != 0; }
677 bool isNurseryChunk() const { return storeBuffer; }
679 Arena* allocateArena(GCRuntime* gc, JS::Zone* zone, AllocKind kind,
680 const AutoLockGC& lock);
682 void releaseArena(GCRuntime* gc, Arena* arena, const AutoLockGC& lock);
683 void recycleArena(Arena* arena, SortedArenaList& dest, size_t thingsPerArena);
685 void decommitFreeArenas(GCRuntime* gc, const bool& cancel, AutoLockGC& lock);
686 [[nodiscard]] bool decommitOneFreePage(GCRuntime* gc, size_t pageIndex,
687 AutoLockGC& lock);
688 void decommitAllArenas();
690 // This will decommit each unused not-already decommitted arena. It performs a
691 // system call for each arena but is only used during OOM.
692 void decommitFreeArenasWithoutUnlocking(const AutoLockGC& lock);
694 static void* allocate(GCRuntime* gc);
695 static TenuredChunk* emplace(void* ptr, GCRuntime* gc,
696 bool allMemoryCommitted);
698 /* Unlink and return the freeArenasHead. */
699 Arena* fetchNextFreeArena(GCRuntime* gc);
701 #ifdef DEBUG
702 void verify() const;
703 #else
704 void verify() const {}
705 #endif
707 private:
708 void commitOnePage(GCRuntime* gc);
710 void updateChunkListAfterAlloc(GCRuntime* gc, const AutoLockGC& lock);
711 void updateChunkListAfterFree(GCRuntime* gc, size_t numArenasFree,
712 const AutoLockGC& lock);
714 // Check if all arenas in a page are free.
715 bool canDecommitPage(size_t pageIndex) const;
717 // Check the arena from freeArenasList is located in a free page.
718 // Unlike the isPageFree(size_t) version, this isPageFree(Arena*) will see the
719 // following arenas from the freeArenasHead are also located in the same page,
720 // to prevent not to access the arenas mprotect'ed during compaction in debug
721 // build.
722 bool isPageFree(const Arena* arena) const;
724 // Get the page index of the arena.
725 size_t pageIndex(const Arena* arena) const {
726 return pageIndex(arenaIndex(arena));
728 size_t pageIndex(size_t arenaIndex) const {
729 return arenaIndex / ArenasPerPage;
732 Arena* pageAddress(size_t pageIndex) {
733 return &arenas[pageIndex * ArenasPerPage];
737 inline void Arena::checkAddress() const {
738 mozilla::DebugOnly<uintptr_t> addr = uintptr_t(this);
739 MOZ_ASSERT(addr);
740 MOZ_ASSERT(!(addr & ArenaMask));
741 MOZ_ASSERT(TenuredChunk::withinValidRange(addr));
744 inline TenuredChunk* Arena::chunk() const {
745 return TenuredChunk::fromAddress(address());
748 // Cell header stored before all nursery cells.
749 struct alignas(gc::CellAlignBytes) NurseryCellHeader {
750 // Store zone pointer with the trace kind in the lowest three bits.
751 const uintptr_t allocSiteAndTraceKind;
753 // We only need to store a subset of trace kinds so this doesn't cover the
754 // full range.
755 static const uintptr_t TraceKindMask = 3;
757 static uintptr_t MakeValue(AllocSite* const site, JS::TraceKind kind) {
758 MOZ_ASSERT(uintptr_t(kind) < TraceKindMask);
759 MOZ_ASSERT((uintptr_t(site) & TraceKindMask) == 0);
760 return uintptr_t(site) | uintptr_t(kind);
763 inline NurseryCellHeader(AllocSite* site, JS::TraceKind kind)
764 : allocSiteAndTraceKind(MakeValue(site, kind)) {}
766 AllocSite* allocSite() const {
767 return reinterpret_cast<AllocSite*>(allocSiteAndTraceKind & ~TraceKindMask);
770 JS::Zone* zone() const { return allocSite()->zone(); }
772 JS::TraceKind traceKind() const {
773 return JS::TraceKind(allocSiteAndTraceKind & TraceKindMask);
776 static const NurseryCellHeader* from(const Cell* cell) {
777 MOZ_ASSERT(IsInsideNursery(cell));
778 return reinterpret_cast<const NurseryCellHeader*>(
779 uintptr_t(cell) - sizeof(NurseryCellHeader));
783 static_assert(uintptr_t(JS::TraceKind::Object) <=
784 NurseryCellHeader::TraceKindMask);
785 static_assert(uintptr_t(JS::TraceKind::String) <=
786 NurseryCellHeader::TraceKindMask);
787 static_assert(uintptr_t(JS::TraceKind::BigInt) <=
788 NurseryCellHeader::TraceKindMask);
790 } /* namespace gc */
792 namespace debug {
794 // Utility functions meant to be called from an interactive debugger.
795 enum class MarkInfo : int {
796 BLACK = 0,
797 GRAY = 1,
798 UNMARKED = -1,
799 NURSERY = -2,
800 UNKNOWN = -3,
803 // For calling from gdb only: given a pointer that is either in the nursery
804 // (possibly pointing to a buffer, not necessarily a Cell) or a tenured Cell,
805 // return its mark color or NURSERY or UNKNOWN. UNKONWN is only for non-Cell
806 // pointers, and means it is not in the nursery (so could be malloced or stack
807 // or whatever.)
808 MOZ_NEVER_INLINE MarkInfo GetMarkInfo(void* vp);
810 // Sample usage from gdb:
812 // (gdb) p $word = js::debug::GetMarkWordAddress(obj)
813 // $1 = (uintptr_t *) 0x7fa56d5fe360
814 // (gdb) p/x $mask = js::debug::GetMarkMask(obj, js::gc::GRAY)
815 // $2 = 0x200000000
816 // (gdb) watch *$word
817 // Hardware watchpoint 7: *$word
818 // (gdb) cond 7 *$word & $mask
819 // (gdb) cont
821 // Note that this is *not* a watchpoint on a single bit. It is a watchpoint on
822 // the whole word, which will trigger whenever the word changes and the
823 // selected bit is set after the change.
825 // So if the bit changing is the desired one, this is exactly what you want.
826 // But if a different bit changes (either set or cleared), you may still stop
827 // execution if the $mask bit happened to already be set. gdb does not expose
828 // enough information to restrict the watchpoint to just a single bit.
830 // Return the address of the word containing the mark bits for the given cell,
831 // or nullptr if the cell is in the nursery.
832 MOZ_NEVER_INLINE uintptr_t* GetMarkWordAddress(js::gc::Cell* cell);
834 // Return the mask for the given cell and color bit, or 0 if the cell is in the
835 // nursery.
836 MOZ_NEVER_INLINE uintptr_t GetMarkMask(js::gc::Cell* cell, uint32_t colorBit);
838 } /* namespace debug */
839 } /* namespace js */
841 #endif /* gc_Heap_h */