1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
10 #include <type_traits> // std::true_type
12 #include "NamespaceImports.h"
15 #include "gc/GCContext.h"
16 #include "gc/StoreBuffer.h"
17 #include "js/ComparisonOperators.h" // JS::detail::DefineComparisonOps
18 #include "js/experimental/TypedData.h" // js::EnableIfABOVType
19 #include "js/HeapAPI.h"
21 #include "js/RootingAPI.h"
23 #include "util/Poison.h"
28 * Several kinds of barrier are necessary to allow the GC to function correctly.
29 * These are triggered by reading or writing to GC pointers in the heap and
30 * serve to tell the collector about changes to the graph of reachable GC
33 * Since it would be awkward to change every write to memory into a function
34 * call, this file contains a bunch of C++ classes and templates that use
35 * operator overloading to take care of barriers automatically. In most cases,
36 * all that's necessary is to replace:
42 * HeapPtr<Type> field;
44 * All heap-based GC pointers and tagged pointers must use one of these classes,
45 * except in a couple of exceptional cases.
47 * These classes are designed to be used by the internals of the JS engine.
48 * Barriers designed to be used externally are provided in js/RootingAPI.h.
53 * This file implements the following concrete classes:
55 * HeapPtr General wrapper for heap-based pointers that provides pre- and
56 * post-write barriers. Most clients should use this.
58 * GCPtr An optimisation of HeapPtr for objects which are only destroyed
59 * by GC finalization (this rules out use in Vector, for example).
61 * PreBarriered Provides a pre-barrier but not a post-barrier. Necessary when
62 * generational GC updates are handled manually, e.g. for hash
63 * table keys that don't use StableCellHasher.
65 * HeapSlot Provides pre and post-barriers, optimised for use in JSObject
68 * WeakHeapPtr Provides read and post-write barriers, for use with weak
71 * UnsafeBarePtr Provides no barriers. Don't add new uses of this, or only if
72 * you really know what you are doing.
74 * The following classes are implemented in js/RootingAPI.h (in the JS
77 * Heap General wrapper for external clients. Like HeapPtr but also
78 * handles cycle collector concerns. Most external clients should
81 * Heap::Tenured Like Heap but doesn't allow nursery pointers. Allows storing
82 * flags in unused lower bits of the pointer.
87 * Answer the following questions to decide which barrier class is right for
90 * Is your code part of the JS engine?
91 * Yes, it's internal =>
92 * Is your pointer weak or strong?
94 * Do you want automatic handling of nursery pointers?
96 * Can your object be destroyed outside of a GC?
97 * Yes => Use HeapPtr<T>
98 * No => Use GCPtr<T> (optimization)
99 * No, I'll do this myself =>
100 * Do you want pre-barriers so incremental marking works?
101 * Yes, of course => Use PreBarriered<T>
102 * No, and I'll fix all the bugs myself => Use UnsafeBarePtr<T>
103 * Weak => Use WeakHeapPtr<T>
104 * No, it's external =>
105 * Can your pointer refer to nursery objects?
106 * Yes => Use JS::Heap<T>
107 * Never => Use JS::Heap::Tenured<T> (optimization)
109 * If in doubt, use HeapPtr<T>.
114 * A write barrier is a mechanism used by incremental or generational GCs to
115 * ensure that every value that needs to be marked is marked. In general, the
116 * write barrier should be invoked whenever a write can cause the set of things
117 * traced through by the GC to change. This includes:
119 * - writes to object properties
120 * - writes to array slots
121 * - writes to fields like JSObject::shape_ that we trace through
122 * - writes to fields in private data
123 * - writes to non-markable fields like JSObject::private that point to
126 * The last category is the trickiest. Even though the private pointer does not
127 * point to a GC thing, changing the private pointer may change the set of
128 * objects that are traced by the GC. Therefore it needs a write barrier.
130 * Every barriered write should have the following form:
133 * obj->field = value; // do the actual write
136 * The pre-barrier is used for incremental GC and the post-barrier is for
142 * To understand the pre-barrier, let's consider how incremental GC works. The
143 * GC itself is divided into "slices". Between each slice, JS code is allowed to
144 * run. Each slice should be short so that the user doesn't notice the
145 * interruptions. In our GC, the structure of the slices is as follows:
147 * 1. ... JS work, which leads to a request to do GC ...
148 * 2. [first GC slice, which performs all root marking and (maybe) more marking]
149 * 3. ... more JS work is allowed to run ...
150 * 4. [GC mark slice, which runs entirely in
151 * GCRuntime::markUntilBudgetExhausted]
152 * 5. ... more JS work ...
153 * 6. [GC mark slice, which runs entirely in
154 * GCRuntime::markUntilBudgetExhausted]
155 * 7. ... more JS work ...
156 * 8. [GC marking finishes; sweeping done non-incrementally; GC is done]
157 * 9. ... JS continues uninterrupted now that GC is finishes ...
159 * Of course, there may be a different number of slices depending on how much
160 * marking is to be done.
162 * The danger inherent in this scheme is that the JS code in steps 3, 5, and 7
163 * might change the heap in a way that causes the GC to collect an object that
164 * is actually reachable. The write barrier prevents this from happening. We use
165 * a variant of incremental GC called "snapshot at the beginning." This approach
166 * guarantees the invariant that if an object is reachable in step 2, then we
167 * will mark it eventually. The name comes from the idea that we take a
168 * theoretical "snapshot" of all reachable objects in step 2; all objects in
169 * that snapshot should eventually be marked. (Note that the write barrier
170 * verifier code takes an actual snapshot.)
172 * The basic correctness invariant of a snapshot-at-the-beginning collector is
173 * that any object reachable at the end of the GC (step 9) must either:
174 * (1) have been reachable at the beginning (step 2) and thus in the snapshot
175 * (2) or must have been newly allocated, in steps 3, 5, or 7.
176 * To deal with case (2), any objects allocated during an incremental GC are
177 * automatically marked black.
179 * This strategy is actually somewhat conservative: if an object becomes
180 * unreachable between steps 2 and 8, it would be safe to collect it. We won't,
181 * mainly for simplicity. (Also, note that the snapshot is entirely
182 * theoretical. We don't actually do anything special in step 2 that we wouldn't
183 * do in a non-incremental GC.
185 * It's the pre-barrier's job to maintain the snapshot invariant. Consider the
186 * write "obj->field = value". Let the prior value of obj->field be
187 * value0. Since it's possible that value0 may have been what obj->field
188 * contained in step 2, when the snapshot was taken, the barrier marks
189 * value0. Note that it only does this if we're in the middle of an incremental
190 * GC. Since this is rare, the cost of the write barrier is usually just an
193 * In practice, we implement the pre-barrier differently based on the type of
194 * value0. E.g., see JSObject::preWriteBarrier, which is used if obj->field is
195 * a JSObject*. It takes value0 as a parameter.
200 * For generational GC, we want to be able to quickly collect the nursery in a
201 * minor collection. Part of the way this is achieved is to only mark the
202 * nursery itself; tenured things, which may form the majority of the heap, are
203 * not traced through or marked. This leads to the problem of what to do about
204 * tenured objects that have pointers into the nursery: if such things are not
205 * marked, they may be discarded while there are still live objects which
206 * reference them. The solution is to maintain information about these pointers,
207 * and mark their targets when we start a minor collection.
209 * The pointers can be thought of as edges in an object graph, and the set of
210 * edges from the tenured generation into the nursery is known as the remembered
211 * set. Post barriers are used to track this remembered set.
213 * Whenever a slot which could contain such a pointer is written, we check
214 * whether the pointed-to thing is in the nursery (if storeBuffer() returns a
215 * buffer). If so we add the cell into the store buffer, which is the
216 * collector's representation of the remembered set. This means that when we
217 * come to do a minor collection we can examine the contents of the store buffer
218 * and mark any edge targets that are in the nursery.
223 * Weak pointer read barrier
224 * -------------------------
226 * Weak pointers must have a read barrier to prevent the referent from being
227 * collected if it is read after the start of an incremental GC.
229 * The problem happens when, during an incremental GC, some code reads a weak
230 * pointer and writes it somewhere on the heap that has been marked black in a
231 * previous slice. Since the weak pointer will not otherwise be marked and will
232 * be swept and finalized in the last slice, this will leave the pointer just
233 * written dangling after the GC. To solve this, we immediately mark black all
234 * weak pointers that get read between slices so that it is safe to store them
235 * in an already marked part of the heap, e.g. in Rooted.
237 * Cycle collector read barrier
238 * ----------------------------
240 * Heap pointers external to the engine may be marked gray. The JS API has an
241 * invariant that no gray pointers may be passed, and this maintained by a read
242 * barrier that calls ExposeGCThingToActiveJS on such pointers. This is
243 * implemented by JS::Heap<T> in js/RootingAPI.h.
245 * Implementation Details
246 * ======================
248 * One additional note: not all object writes need to be pre-barriered. Writes
249 * to newly allocated objects do not need a pre-barrier. In these cases, we use
250 * the "obj->field.init(value)" method instead of "obj->field = value". We use
251 * the init naming idiom in many places to signify that a field is being
252 * assigned for the first time.
254 * This file implements the following hierarchy of classes:
256 * BarrieredBase base class of all barriers
258 * | WriteBarriered base class which provides common write operations
260 * | | | | PreBarriered provides pre-barriers only
262 * | | | GCPtr provides pre- and post-barriers
264 * | | HeapPtr provides pre- and post-barriers; is relocatable
265 * | | and deletable for use inside C++ managed memory
267 * | HeapSlot similar to GCPtr, but tailored to slots storage
269 * ReadBarriered base class which provides common read operations
271 * WeakHeapPtr provides read barriers only
274 * The implementation of the barrier logic is implemented in the
275 * Cell/TenuredCell base classes, which are called via:
277 * WriteBarriered<T>::pre
278 * -> InternalBarrierMethods<T*>::preBarrier
279 * -> Cell::preWriteBarrier
280 * -> InternalBarrierMethods<Value>::preBarrier
281 * -> InternalBarrierMethods<jsid>::preBarrier
282 * -> InternalBarrierMethods<T*>::preBarrier
283 * -> Cell::preWriteBarrier
285 * GCPtr<T>::post and HeapPtr<T>::post
286 * -> InternalBarrierMethods<T*>::postBarrier
287 * -> gc::PostWriteBarrierImpl
288 * -> InternalBarrierMethods<Value>::postBarrier
289 * -> StoreBuffer::put
291 * Barriers for use outside of the JS engine call into the same barrier
292 * implementations at InternalBarrierMethods<T>::post via an indirect call to
293 * Heap(.+)PostWriteBarrier.
295 * These clases are designed to be used to wrap GC thing pointers or values that
296 * act like them (i.e. JS::Value and jsid). It is possible to use them for
297 * other types by supplying the necessary barrier implementations but this
298 * is not usually necessary and should be done with caution.
307 inline void ValueReadBarrier(const Value
& v
) {
308 MOZ_ASSERT(v
.isGCThing());
309 ReadBarrierImpl(v
.toGCThing());
312 inline void ValuePreWriteBarrier(const Value
& v
) {
313 MOZ_ASSERT(v
.isGCThing());
314 PreWriteBarrierImpl(v
.toGCThing());
317 inline void IdPreWriteBarrier(jsid id
) {
318 MOZ_ASSERT(id
.isGCThing());
319 PreWriteBarrierImpl(&id
.toGCThing()->asTenured());
322 inline void CellPtrPreWriteBarrier(JS::GCCellPtr thing
) {
324 PreWriteBarrierImpl(thing
.asCell());
327 inline void WasmAnyRefPreWriteBarrier(const wasm::AnyRef
& v
) {
328 MOZ_ASSERT(v
.isGCThing());
329 PreWriteBarrierImpl(v
.toGCThing());
336 bool CurrentThreadIsTouchingGrayThings();
338 bool IsMarkedBlack(JSObject
* obj
);
342 template <typename T
, typename Enable
= void>
343 struct InternalBarrierMethods
{};
345 template <typename T
>
346 struct InternalBarrierMethods
<T
*> {
347 static_assert(std::is_base_of_v
<gc::Cell
, T
>, "Expected a GC thing type");
349 static bool isMarkable(const T
* v
) { return v
!= nullptr; }
351 static void preBarrier(T
* v
) { gc::PreWriteBarrier(v
); }
353 static void postBarrier(T
** vp
, T
* prev
, T
* next
) {
354 gc::PostWriteBarrier(vp
, prev
, next
);
357 static void readBarrier(T
* v
) { gc::ReadBarrier(v
); }
360 static void assertThingIsNotGray(T
* v
) { return T::assertThingIsNotGray(v
); }
365 MOZ_ALWAYS_INLINE
void ValuePostWriteBarrier(Value
* vp
, const Value
& prev
,
367 MOZ_ASSERT(!CurrentThreadIsIonCompiling());
370 // If the target needs an entry, add it.
371 js::gc::StoreBuffer
* sb
;
372 if (next
.isGCThing() && (sb
= next
.toGCThing()->storeBuffer())) {
373 // If we know that the prev has already inserted an entry, we can
374 // skip doing the lookup to add the new entry. Note that we cannot
375 // safely assert the presence of the entry because it may have been
376 // added via a different store buffer.
377 if (prev
.isGCThing() && prev
.toGCThing()->storeBuffer()) {
383 // Remove the prev entry if the new value does not need it.
384 if (prev
.isGCThing() && (sb
= prev
.toGCThing()->storeBuffer())) {
391 struct InternalBarrierMethods
<Value
> {
392 static bool isMarkable(const Value
& v
) { return v
.isGCThing(); }
394 static void preBarrier(const Value
& v
) {
396 gc::ValuePreWriteBarrier(v
);
400 static MOZ_ALWAYS_INLINE
void postBarrier(Value
* vp
, const Value
& prev
,
402 gc::ValuePostWriteBarrier(vp
, prev
, next
);
405 static void readBarrier(const Value
& v
) {
407 gc::ValueReadBarrier(v
);
412 static void assertThingIsNotGray(const Value
& v
) {
413 JS::AssertValueIsNotGray(v
);
419 struct InternalBarrierMethods
<jsid
> {
420 static bool isMarkable(jsid id
) { return id
.isGCThing(); }
421 static void preBarrier(jsid id
) {
422 if (id
.isGCThing()) {
423 gc::IdPreWriteBarrier(id
);
426 static void postBarrier(jsid
* idp
, jsid prev
, jsid next
) {}
428 static void assertThingIsNotGray(jsid id
) { JS::AssertIdIsNotGray(id
); }
432 // Specialization for JS::ArrayBufferOrView subclasses.
433 template <typename T
>
434 struct InternalBarrierMethods
<T
, EnableIfABOVType
<T
>> {
435 using BM
= BarrierMethods
<T
>;
437 static bool isMarkable(const T
& thing
) { return bool(thing
); }
438 static void preBarrier(const T
& thing
) {
439 gc::PreWriteBarrier(thing
.asObjectUnbarriered());
441 static void postBarrier(T
* tp
, const T
& prev
, const T
& next
) {
442 BM::postWriteBarrier(tp
, prev
, next
);
444 static void readBarrier(const T
& thing
) { BM::readBarrier(thing
); }
446 static void assertThingIsNotGray(const T
& thing
) {
447 JSObject
* obj
= thing
.asObjectUnbarriered();
449 JS::AssertValueIsNotGray(JS::ObjectValue(*obj
));
455 template <typename T
>
456 static inline void AssertTargetIsNotGray(const T
& v
) {
458 if (!CurrentThreadIsTouchingGrayThings()) {
459 InternalBarrierMethods
<T
>::assertThingIsNotGray(v
);
464 // Base class of all barrier types.
466 // This is marked non-memmovable since post barriers added by derived classes
467 // can add pointers to class instances to the store buffer.
468 template <typename T
>
469 class MOZ_NON_MEMMOVABLE BarrieredBase
{
471 // BarrieredBase is not directly instantiable.
472 explicit BarrieredBase(const T
& v
) : value(v
) {}
474 // BarrieredBase subclasses cannot be copy constructed by default.
475 BarrieredBase(const BarrieredBase
<T
>& other
) = default;
477 // Storage for all barrier classes. |value| must be a GC thing reference
478 // type: either a direct pointer to a GC thing or a supported tagged
479 // pointer that can reference GC things, such as JS::Value or jsid. Nested
480 // barrier types are NOT supported. See assertTypeConstraints.
484 using ElementType
= T
;
486 // Note: this is public because C++ cannot friend to a specific template
487 // instantiation. Friending to the generic template leads to a number of
488 // unintended consequences, including template resolution ambiguity and a
489 // circular dependency with Tracing.h.
490 T
* unbarrieredAddress() const { return const_cast<T
*>(&value
); }
493 // Base class for barriered pointer types that intercept only writes.
495 class WriteBarriered
: public BarrieredBase
<T
>,
496 public WrappedPtrOperations
<T
, WriteBarriered
<T
>> {
498 using BarrieredBase
<T
>::value
;
500 // WriteBarriered is not directly instantiable.
501 explicit WriteBarriered(const T
& v
) : BarrieredBase
<T
>(v
) {}
504 DECLARE_POINTER_CONSTREF_OPS(T
);
506 // Use this if the automatic coercion to T isn't working.
507 const T
& get() const { return this->value
; }
509 // Use this if you want to change the value without invoking barriers.
510 // Obviously this is dangerous unless you know the barrier is not needed.
511 void unbarrieredSet(const T
& v
) { this->value
= v
; }
513 // For users who need to manually barrier the raw types.
514 static void preWriteBarrier(const T
& v
) {
515 InternalBarrierMethods
<T
>::preBarrier(v
);
519 void pre() { InternalBarrierMethods
<T
>::preBarrier(this->value
); }
520 MOZ_ALWAYS_INLINE
void post(const T
& prev
, const T
& next
) {
521 InternalBarrierMethods
<T
>::postBarrier(&this->value
, prev
, next
);
525 #define DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(Wrapper, T) \
526 DECLARE_POINTER_ASSIGN_OPS(Wrapper, T) \
527 Wrapper<T>& operator=(Wrapper<T>&& other) noexcept { \
528 setUnchecked(other.release()); \
533 * PreBarriered only automatically handles pre-barriers. Post-barriers must be
534 * manually implemented when using this class. GCPtr and HeapPtr should be used
535 * in all cases that do not require explicit low-level control of moving
538 * This class is useful for example for HashMap keys where automatically
539 * updating a moved nursery pointer would break the hash table.
542 class PreBarriered
: public WriteBarriered
<T
> {
544 PreBarriered() : WriteBarriered
<T
>(JS::SafelyInitialized
<T
>::create()) {}
546 * Allow implicit construction for use in generic contexts.
548 MOZ_IMPLICIT
PreBarriered(const T
& v
) : WriteBarriered
<T
>(v
) {}
550 explicit PreBarriered(const PreBarriered
<T
>& other
)
551 : WriteBarriered
<T
>(other
.value
) {}
553 PreBarriered(PreBarriered
<T
>&& other
) noexcept
554 : WriteBarriered
<T
>(other
.release()) {}
556 ~PreBarriered() { this->pre(); }
558 void init(const T
& v
) { this->value
= v
; }
560 /* Use to set the pointer to nullptr. */
561 void clear() { set(JS::SafelyInitialized
<T
>::create()); }
563 DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(PreBarriered
, T
);
565 void set(const T
& v
) {
566 AssertTargetIsNotGray(v
);
571 void setUnchecked(const T
& v
) {
578 this->value
= JS::SafelyInitialized
<T
>::create();
585 namespace JS::detail
{
586 template <typename T
>
587 struct DefineComparisonOps
<js::PreBarriered
<T
>> : std::true_type
{
588 static const T
& get(const js::PreBarriered
<T
>& v
) { return v
.get(); }
590 } // namespace JS::detail
595 * A pre- and post-barriered heap pointer, for use inside the JS engine.
597 * It must only be stored in memory that has GC lifetime. GCPtr must not be
598 * used in contexts where it may be implicitly moved or deleted, e.g. most
601 * The post-barriers implemented by this class are faster than those
602 * implemented by js::HeapPtr<T> or JS::Heap<T> at the cost of not
603 * automatically handling deletion or movement.
606 class GCPtr
: public WriteBarriered
<T
> {
608 GCPtr() : WriteBarriered
<T
>(JS::SafelyInitialized
<T
>::create()) {}
610 explicit GCPtr(const T
& v
) : WriteBarriered
<T
>(v
) {
611 this->post(JS::SafelyInitialized
<T
>::create(), v
);
614 explicit GCPtr(const GCPtr
<T
>& v
) : WriteBarriered
<T
>(v
) {
615 this->post(JS::SafelyInitialized
<T
>::create(), v
);
620 // No barriers are necessary as this only happens when the GC is sweeping.
622 // If this assertion fails you may need to make the containing object use a
623 // HeapPtr instead, as this can be deleted from outside of GC.
624 MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsGCFinalizing());
626 Poison(this, JS_FREED_HEAP_PTR_PATTERN
, sizeof(*this),
627 MemCheckKind::MakeNoAccess
);
632 * Unlike HeapPtr<T>, GCPtr<T> must be managed with GC lifetimes.
633 * Specifically, the memory used by the pointer itself must be live until
634 * at least the next minor GC. For that reason, move semantics are invalid
635 * and are deleted here. Please note that not all containers support move
636 * semantics, so this does not completely prevent invalid uses.
638 GCPtr(GCPtr
<T
>&&) = delete;
639 GCPtr
<T
>& operator=(GCPtr
<T
>&&) = delete;
641 void init(const T
& v
) {
642 AssertTargetIsNotGray(v
);
644 this->post(JS::SafelyInitialized
<T
>::create(), v
);
647 DECLARE_POINTER_ASSIGN_OPS(GCPtr
, T
);
649 void set(const T
& v
) {
650 AssertTargetIsNotGray(v
);
655 void setUnchecked(const T
& v
) {
659 this->post(tmp
, this->value
);
665 namespace JS::detail
{
666 template <typename T
>
667 struct DefineComparisonOps
<js::GCPtr
<T
>> : std::true_type
{
668 static const T
& get(const js::GCPtr
<T
>& v
) { return v
.get(); }
670 } // namespace JS::detail
675 * A pre- and post-barriered heap pointer, for use inside the JS engine. These
676 * heap pointers can be stored in C++ containers like GCVector and GCHashMap.
678 * The GC sometimes keeps pointers to pointers to GC things --- for example, to
679 * track references into the nursery. However, C++ containers like GCVector and
680 * GCHashMap usually reserve the right to relocate their elements any time
681 * they're modified, invalidating all pointers to the elements. HeapPtr
682 * has a move constructor which knows how to keep the GC up to date if it is
683 * moved to a new location.
685 * However, because of this additional communication with the GC, HeapPtr
686 * is somewhat slower, so it should only be used in contexts where this ability
689 * Obviously, JSObjects, JSStrings, and the like get tenured and compacted, so
690 * whatever pointers they contain get relocated, in the sense used here.
691 * However, since the GC itself is moving those values, it takes care of its
692 * internal pointers to those pointers itself. HeapPtr is only necessary
693 * when the relocation would otherwise occur without the GC's knowledge.
696 class HeapPtr
: public WriteBarriered
<T
> {
698 HeapPtr() : WriteBarriered
<T
>(JS::SafelyInitialized
<T
>::create()) {}
700 // Implicitly adding barriers is a reasonable default.
701 MOZ_IMPLICIT
HeapPtr(const T
& v
) : WriteBarriered
<T
>(v
) {
702 this->post(JS::SafelyInitialized
<T
>::create(), this->value
);
705 MOZ_IMPLICIT
HeapPtr(const HeapPtr
<T
>& other
) : WriteBarriered
<T
>(other
) {
706 this->post(JS::SafelyInitialized
<T
>::create(), this->value
);
709 HeapPtr(HeapPtr
<T
>&& other
) noexcept
: WriteBarriered
<T
>(other
.release()) {
710 this->post(JS::SafelyInitialized
<T
>::create(), this->value
);
715 this->post(this->value
, JS::SafelyInitialized
<T
>::create());
718 void init(const T
& v
) {
719 MOZ_ASSERT(this->value
== JS::SafelyInitialized
<T
>::create());
720 AssertTargetIsNotGray(v
);
722 this->post(JS::SafelyInitialized
<T
>::create(), this->value
);
725 DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(HeapPtr
, T
);
727 void set(const T
& v
) {
728 AssertTargetIsNotGray(v
);
732 /* Make this friend so it can access pre() and post(). */
733 template <class T1
, class T2
>
734 friend inline void BarrieredSetPair(Zone
* zone
, HeapPtr
<T1
*>& v1
, T1
* val1
,
735 HeapPtr
<T2
*>& v2
, T2
* val2
);
738 void setUnchecked(const T
& v
) {
743 void postBarrieredSet(const T
& v
) {
746 this->post(tmp
, this->value
);
751 postBarrieredSet(JS::SafelyInitialized
<T
>::create());
757 * A pre-barriered heap pointer, for use inside the JS engine.
759 * Similar to GCPtr, but used for a pointer to a malloc-allocated structure
760 * containing GC thing pointers.
762 * It must only be stored in memory that has GC lifetime. It must not be used in
763 * contexts where it may be implicitly moved or deleted, e.g. most containers.
765 * A post-barrier is unnecessary since malloc-allocated structures cannot be in
769 class GCStructPtr
: public BarrieredBase
<T
> {
771 // This is sometimes used to hold tagged pointers.
772 static constexpr uintptr_t MaxTaggedPointer
= 0x2;
774 GCStructPtr() : BarrieredBase
<T
>(JS::SafelyInitialized
<T
>::create()) {}
776 // Implicitly adding barriers is a reasonable default.
777 MOZ_IMPLICIT
GCStructPtr(const T
& v
) : BarrieredBase
<T
>(v
) {}
779 GCStructPtr(const GCStructPtr
<T
>& other
) : BarrieredBase
<T
>(other
) {}
781 GCStructPtr(GCStructPtr
<T
>&& other
) noexcept
782 : BarrieredBase
<T
>(other
.release()) {}
785 // No barriers are necessary as this only happens when the GC is sweeping.
786 MOZ_ASSERT_IF(isTraceable(),
787 CurrentThreadIsGCSweeping() || CurrentThreadIsGCFinalizing());
790 void init(const T
& v
) {
791 MOZ_ASSERT(this->get() == JS::SafelyInitialized
<T
>());
792 AssertTargetIsNotGray(v
);
796 void set(JS::Zone
* zone
, const T
& v
) {
801 T
get() const { return this->value
; }
802 operator T() const { return get(); }
803 T
operator->() const { return get(); }
806 bool isTraceable() const { return uintptr_t(get()) > MaxTaggedPointer
; }
808 void pre(JS::Zone
* zone
) {
810 PreWriteBarrier(zone
, get());
817 namespace JS::detail
{
818 template <typename T
>
819 struct DefineComparisonOps
<js::HeapPtr
<T
>> : std::true_type
{
820 static const T
& get(const js::HeapPtr
<T
>& v
) { return v
.get(); }
822 } // namespace JS::detail
826 // Base class for barriered pointer types that intercept reads and writes.
827 template <typename T
>
828 class ReadBarriered
: public BarrieredBase
<T
> {
830 // ReadBarriered is not directly instantiable.
831 explicit ReadBarriered(const T
& v
) : BarrieredBase
<T
>(v
) {}
833 void read() const { InternalBarrierMethods
<T
>::readBarrier(this->value
); }
834 void post(const T
& prev
, const T
& next
) {
835 InternalBarrierMethods
<T
>::postBarrier(&this->value
, prev
, next
);
839 // Incremental GC requires that weak pointers have read barriers. See the block
840 // comment at the top of Barrier.h for a complete discussion of why.
842 // Note that this class also has post-barriers, so is safe to use with nursery
843 // pointers. However, when used as a hashtable key, care must still be taken to
844 // insert manual post-barriers on the table for rekeying if the key is based in
845 // any way on the address of the object.
846 template <typename T
>
847 class WeakHeapPtr
: public ReadBarriered
<T
>,
848 public WrappedPtrOperations
<T
, WeakHeapPtr
<T
>> {
850 using ReadBarriered
<T
>::value
;
853 WeakHeapPtr() : ReadBarriered
<T
>(JS::SafelyInitialized
<T
>::create()) {}
855 // It is okay to add barriers implicitly.
856 MOZ_IMPLICIT
WeakHeapPtr(const T
& v
) : ReadBarriered
<T
>(v
) {
857 this->post(JS::SafelyInitialized
<T
>::create(), v
);
860 // The copy constructor creates a new weak edge but the wrapped pointer does
861 // not escape, so no read barrier is necessary.
862 explicit WeakHeapPtr(const WeakHeapPtr
& other
) : ReadBarriered
<T
>(other
) {
863 this->post(JS::SafelyInitialized
<T
>::create(), value
);
866 // Move retains the lifetime status of the source edge, so does not fire
867 // the read barrier of the defunct edge.
868 WeakHeapPtr(WeakHeapPtr
&& other
) noexcept
869 : ReadBarriered
<T
>(other
.release()) {
870 this->post(JS::SafelyInitialized
<T
>::create(), value
);
874 this->post(this->value
, JS::SafelyInitialized
<T
>::create());
877 WeakHeapPtr
& operator=(const WeakHeapPtr
& v
) {
878 AssertTargetIsNotGray(v
.value
);
879 T prior
= this->value
;
880 this->value
= v
.value
;
881 this->post(prior
, v
.value
);
885 const T
& get() const {
886 if (InternalBarrierMethods
<T
>::isMarkable(this->value
)) {
892 const T
& unbarrieredGet() const { return this->value
; }
894 explicit operator bool() const { return bool(this->value
); }
896 operator const T
&() const { return get(); }
898 const T
& operator->() const { return get(); }
900 void set(const T
& v
) {
901 AssertTargetIsNotGray(v
);
905 void unbarrieredSet(const T
& v
) {
906 AssertTargetIsNotGray(v
);
911 void setUnchecked(const T
& v
) {
919 set(JS::SafelyInitialized
<T
>::create());
924 // A wrapper for a bare pointer, with no barriers.
926 // This should only be necessary in a limited number of cases. Please don't add
927 // more uses of this if at all possible.
928 template <typename T
>
929 class UnsafeBarePtr
: public BarrieredBase
<T
> {
931 UnsafeBarePtr() : BarrieredBase
<T
>(JS::SafelyInitialized
<T
>::create()) {}
932 MOZ_IMPLICIT
UnsafeBarePtr(T v
) : BarrieredBase
<T
>(v
) {}
933 const T
& get() const { return this->value
; }
934 void set(T newValue
) { this->value
= newValue
; }
935 DECLARE_POINTER_CONSTREF_OPS(T
);
940 namespace JS::detail
{
941 template <typename T
>
942 struct DefineComparisonOps
<js::WeakHeapPtr
<T
>> : std::true_type
{
943 static const T
& get(const js::WeakHeapPtr
<T
>& v
) {
944 return v
.unbarrieredGet();
947 } // namespace JS::detail
951 // A pre- and post-barriered Value that is specialized to be aware that it
952 // resides in a slots or elements vector. This allows it to be relocated in
953 // memory, but with substantially less overhead than a HeapPtr.
954 class HeapSlot
: public WriteBarriered
<Value
> {
956 enum Kind
{ Slot
= 0, Element
= 1 };
958 void init(NativeObject
* owner
, Kind kind
, uint32_t slot
, const Value
& v
) {
960 post(owner
, kind
, slot
, v
);
963 void initAsUndefined() { value
.setUndefined(); }
965 void destroy() { pre(); }
967 void setUndefinedUnchecked() {
969 value
.setUndefined();
973 bool preconditionForSet(NativeObject
* owner
, Kind kind
, uint32_t slot
) const;
974 void assertPreconditionForPostWriteBarrier(NativeObject
* obj
, Kind kind
,
976 const Value
& target
) const;
979 MOZ_ALWAYS_INLINE
void set(NativeObject
* owner
, Kind kind
, uint32_t slot
,
981 MOZ_ASSERT(preconditionForSet(owner
, kind
, slot
));
984 post(owner
, kind
, slot
, v
);
988 void post(NativeObject
* owner
, Kind kind
, uint32_t slot
,
989 const Value
& target
) {
991 assertPreconditionForPostWriteBarrier(owner
, kind
, slot
, target
);
993 if (this->value
.isGCThing()) {
994 gc::Cell
* cell
= this->value
.toGCThing();
995 if (cell
->storeBuffer()) {
996 cell
->storeBuffer()->putSlot(owner
, kind
, slot
, 1);
1004 namespace JS::detail
{
1006 struct DefineComparisonOps
<js::HeapSlot
> : std::true_type
{
1007 static const Value
& get(const js::HeapSlot
& v
) { return v
.get(); }
1009 } // namespace JS::detail
1013 class HeapSlotArray
{
1017 explicit HeapSlotArray(HeapSlot
* array
) : array(array
) {}
1019 HeapSlot
* begin() const { return array
; }
1021 operator const Value
*() const {
1022 static_assert(sizeof(GCPtr
<Value
>) == sizeof(Value
));
1023 static_assert(sizeof(HeapSlot
) == sizeof(Value
));
1024 return reinterpret_cast<const Value
*>(array
);
1026 operator HeapSlot
*() const { return begin(); }
1028 HeapSlotArray
operator+(int offset
) const {
1029 return HeapSlotArray(array
+ offset
);
1031 HeapSlotArray
operator+(uint32_t offset
) const {
1032 return HeapSlotArray(array
+ offset
);
1037 * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
1038 * barriers with only one branch to check if we're in an incremental GC.
1040 template <class T1
, class T2
>
1041 static inline void BarrieredSetPair(Zone
* zone
, HeapPtr
<T1
*>& v1
, T1
* val1
,
1042 HeapPtr
<T2
*>& v2
, T2
* val2
) {
1043 AssertTargetIsNotGray(val1
);
1044 AssertTargetIsNotGray(val2
);
1045 if (T1::needPreWriteBarrier(zone
)) {
1049 v1
.postBarrieredSet(val1
);
1050 v2
.postBarrieredSet(val2
);
1054 * ImmutableTenuredPtr is designed for one very narrow case: replacing
1055 * immutable raw pointers to GC-managed things, implicitly converting to a
1056 * handle type for ease of use. Pointers encapsulated by this type must:
1058 * be immutable (no incremental write barriers),
1059 * never point into the nursery (no generational write barriers), and
1060 * be traced via MarkRuntime (we use fromMarkedLocation).
1062 * In short: you *really* need to know what you're doing before you use this
1065 template <typename T
>
1066 class MOZ_HEAP_CLASS ImmutableTenuredPtr
{
1070 operator T() const { return value
; }
1071 T
operator->() const { return value
; }
1073 // `ImmutableTenuredPtr<T>` is implicitly convertible to `Handle<T>`.
1075 // In case you need to convert to `Handle<U>` where `U` is base class of `T`,
1076 // convert this to `Handle<T>` by `toHandle()` and then use implicit
1077 // conversion from `Handle<T>` to `Handle<U>`.
1078 operator Handle
<T
>() const { return toHandle(); }
1079 Handle
<T
> toHandle() const { return Handle
<T
>::fromMarkedLocation(&value
); }
1082 MOZ_ASSERT(ptr
->isTenured());
1083 AssertTargetIsNotGray(ptr
);
1087 T
get() const { return value
; }
1088 const T
* address() { return &value
; }
1091 // Template to remove any barrier wrapper and get the underlying type.
1092 template <typename T
>
1093 struct RemoveBarrier
{
1096 template <typename T
>
1097 struct RemoveBarrier
<HeapPtr
<T
>> {
1100 template <typename T
>
1101 struct RemoveBarrier
<GCPtr
<T
>> {
1104 template <typename T
>
1105 struct RemoveBarrier
<PreBarriered
<T
>> {
1108 template <typename T
>
1109 struct RemoveBarrier
<WeakHeapPtr
<T
>> {
1114 template struct JS_PUBLIC_API StableCellHasher
<JSObject
*>;
1117 template <typename T
>
1118 struct StableCellHasher
<PreBarriered
<T
>> {
1119 using Key
= PreBarriered
<T
>;
1122 static bool maybeGetHash(const Lookup
& l
, HashNumber
* hashOut
) {
1123 return StableCellHasher
<T
>::maybeGetHash(l
, hashOut
);
1125 static bool ensureHash(const Lookup
& l
, HashNumber
* hashOut
) {
1126 return StableCellHasher
<T
>::ensureHash(l
, hashOut
);
1128 static HashNumber
hash(const Lookup
& l
) {
1129 return StableCellHasher
<T
>::hash(l
);
1131 static bool match(const Key
& k
, const Lookup
& l
) {
1132 return StableCellHasher
<T
>::match(k
, l
);
1136 template <typename T
>
1137 struct StableCellHasher
<HeapPtr
<T
>> {
1138 using Key
= HeapPtr
<T
>;
1141 static bool maybeGetHash(const Lookup
& l
, HashNumber
* hashOut
) {
1142 return StableCellHasher
<T
>::maybeGetHash(l
, hashOut
);
1144 static bool ensureHash(const Lookup
& l
, HashNumber
* hashOut
) {
1145 return StableCellHasher
<T
>::ensureHash(l
, hashOut
);
1147 static HashNumber
hash(const Lookup
& l
) {
1148 return StableCellHasher
<T
>::hash(l
);
1150 static bool match(const Key
& k
, const Lookup
& l
) {
1151 return StableCellHasher
<T
>::match(k
, l
);
1155 template <typename T
>
1156 struct StableCellHasher
<WeakHeapPtr
<T
>> {
1157 using Key
= WeakHeapPtr
<T
>;
1160 static bool maybeGetHash(const Lookup
& l
, HashNumber
* hashOut
) {
1161 return StableCellHasher
<T
>::maybeGetHash(l
, hashOut
);
1163 static bool ensureHash(const Lookup
& l
, HashNumber
* hashOut
) {
1164 return StableCellHasher
<T
>::ensureHash(l
, hashOut
);
1166 static HashNumber
hash(const Lookup
& l
) {
1167 return StableCellHasher
<T
>::hash(l
);
1169 static bool match(const Key
& k
, const Lookup
& l
) {
1170 return StableCellHasher
<T
>::match(k
.unbarrieredGet(), l
);
1174 /* Useful for hashtables with a HeapPtr as key. */
1176 struct HeapPtrHasher
{
1177 using Key
= HeapPtr
<T
>;
1180 static HashNumber
hash(Lookup obj
) { return DefaultHasher
<T
>::hash(obj
); }
1181 static bool match(const Key
& k
, Lookup l
) { return k
.get() == l
; }
1182 static void rekey(Key
& k
, const Key
& newKey
) { k
.unbarrieredSet(newKey
); }
1186 struct PreBarrieredHasher
{
1187 using Key
= PreBarriered
<T
>;
1190 static HashNumber
hash(Lookup obj
) { return DefaultHasher
<T
>::hash(obj
); }
1191 static bool match(const Key
& k
, Lookup l
) { return k
.get() == l
; }
1192 static void rekey(Key
& k
, const Key
& newKey
) { k
.unbarrieredSet(newKey
); }
1195 /* Useful for hashtables with a WeakHeapPtr as key. */
1197 struct WeakHeapPtrHasher
{
1198 using Key
= WeakHeapPtr
<T
>;
1201 static HashNumber
hash(Lookup obj
) { return DefaultHasher
<T
>::hash(obj
); }
1202 static bool match(const Key
& k
, Lookup l
) { return k
.unbarrieredGet() == l
; }
1203 static void rekey(Key
& k
, const Key
& newKey
) {
1204 k
.set(newKey
.unbarrieredGet());
1209 struct UnsafeBarePtrHasher
{
1210 using Key
= UnsafeBarePtr
<T
>;
1213 static HashNumber
hash(const Lookup
& l
) { return DefaultHasher
<T
>::hash(l
); }
1214 static bool match(const Key
& k
, Lookup l
) { return k
.get() == l
; }
1215 static void rekey(Key
& k
, const Key
& newKey
) { k
.set(newKey
.get()); }
1218 // Set up descriptive type aliases.
1220 using PreBarrierWrapper
= PreBarriered
<T
>;
1222 using PreAndPostBarrierWrapper
= GCPtr
<T
>;
1229 struct DefaultHasher
<js::HeapPtr
<T
>> : js::HeapPtrHasher
<T
> {};
1232 struct DefaultHasher
<js::GCPtr
<T
>> {
1233 // Not implemented. GCPtr can't be used as a hash table key because it has a
1234 // post barrier but doesn't support relocation.
1238 struct DefaultHasher
<js::PreBarriered
<T
>> : js::PreBarrieredHasher
<T
> {};
1241 struct DefaultHasher
<js::WeakHeapPtr
<T
>> : js::WeakHeapPtrHasher
<T
> {};
1244 struct DefaultHasher
<js::UnsafeBarePtr
<T
>> : js::UnsafeBarePtrHasher
<T
> {};
1246 } // namespace mozilla
1248 #endif /* gc_Barrier_h */