1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
12 #include "gc/GCTrace.h"
14 #include "vm/ForkJoin.h"
21 ThreadSafeContext::allocator() const
23 MOZ_ASSERT_IF(isJSContext(), &asJSContext()->zone()->allocator
== allocator_
);
29 ThreadSafeContext::isThreadLocal(T thing
) const
31 if (!isForkJoinContext())
35 MOZ_ASSERT(!IsInsideNursery(thing
));
37 // The thing is not in the nursery, but is it in the private tenured area?
38 if (allocator_
->arenas
.containsArena(runtime_
, thing
->asTenured().arenaHeader()))
40 // GC should be suppressed in preparation for mutating thread local
41 // objects, as we don't want to trip any barriers.
42 MOZ_ASSERT(!thing
->zoneFromAnyThread()->needsIncrementalBarrier());
43 MOZ_ASSERT(!thing
->runtimeFromAnyThread()->needsIncrementalBarrier());
53 static inline AllocKind
54 GetGCObjectKind(const Class
* clasp
)
56 if (clasp
== FunctionClassPtr
)
57 return JSFunction::FinalizeKind
;
58 uint32_t nslots
= JSCLASS_RESERVED_SLOTS(clasp
);
59 if (clasp
->flags
& JSCLASS_HAS_PRIVATE
)
61 return GetGCObjectKind(nslots
);
65 ShouldNurseryAllocateObject(const Nursery
& nursery
, InitialHeap heap
)
67 return nursery
.isEnabled() && heap
!= TenuredHeap
;
71 GetGCThingTraceKind(const void* thing
)
74 const Cell
* cell
= static_cast<const Cell
*>(thing
);
75 if (IsInsideNursery(cell
))
76 return JSTRACE_OBJECT
;
77 return MapAllocToTraceKind(cell
->asTenured().getAllocKind());
86 /* Schedule a GC to happen "soon" after a GC poke. */
87 if (zealMode
== ZealPokeValue
)
95 ArenaHeader
* unsweptHeader
;
96 ArenaHeader
* sweptHeader
;
101 unsweptHeader
= nullptr;
102 sweptHeader
= nullptr;
105 ArenaIter(JS::Zone
* zone
, AllocKind kind
) {
109 void init(Allocator
* allocator
, AllocKind kind
) {
110 aheader
= allocator
->arenas
.getFirstArena(kind
);
111 unsweptHeader
= allocator
->arenas
.getFirstArenaToSweep(kind
);
112 sweptHeader
= allocator
->arenas
.getFirstSweptArena(kind
);
113 if (!unsweptHeader
) {
114 unsweptHeader
= sweptHeader
;
115 sweptHeader
= nullptr;
118 aheader
= unsweptHeader
;
119 unsweptHeader
= sweptHeader
;
120 sweptHeader
= nullptr;
124 void init(JS::Zone
* zone
, AllocKind kind
) {
125 init(&zone
->allocator
, kind
);
132 ArenaHeader
* get() const {
138 aheader
= aheader
->next
;
140 aheader
= unsweptHeader
;
141 unsweptHeader
= sweptHeader
;
142 sweptHeader
= nullptr;
147 class ArenaCellIterImpl
149 // These three are set in initUnsynchronized().
150 size_t firstThingOffset
;
156 // These three are set in reset() (which is called by init()).
161 // Upon entry, |thing| points to any thing (free or used) and finds the
162 // first used thing, which may be |thing|.
163 void moveForwardIfFree() {
166 // Note: if |span| is empty, this test will fail, which is what we want
167 // -- |span| being empty means that we're past the end of the last free
168 // thing, all the remaining things in the arena are used, and we'll
169 // never need to move forward.
170 if (thing
== span
.first
) {
171 thing
= span
.last
+ thingSize
;
172 span
= *span
.nextSpan();
178 : firstThingOffset(0) // Squelch
179 , thingSize(0) // warnings
184 void initUnsynchronized(ArenaHeader
* aheader
) {
185 AllocKind kind
= aheader
->getAllocKind();
189 firstThingOffset
= Arena::firstThingOffset(kind
);
190 thingSize
= Arena::thingSize(kind
);
194 void init(ArenaHeader
* aheader
) {
196 AllocKind kind
= aheader
->getAllocKind();
197 MOZ_ASSERT(aheader
->zone
->allocator
.arenas
.isSynchronizedFreeList(kind
));
199 initUnsynchronized(aheader
);
202 // Use this to move from an Arena of a particular kind to another Arena of
204 void reset(ArenaHeader
* aheader
) {
205 MOZ_ASSERT(isInited
);
206 span
= aheader
->getFirstFreeSpan();
207 uintptr_t arenaAddr
= aheader
->arenaAddress();
208 thing
= arenaAddr
+ firstThingOffset
;
209 limit
= arenaAddr
+ ArenaSize
;
214 return thing
== limit
;
217 TenuredCell
* getCell() const {
219 return reinterpret_cast<TenuredCell
*>(thing
);
222 template<typename T
> T
* get() const {
224 return static_cast<T
*>(getCell());
237 ArenaCellIterImpl::get
<JSObject
>() const;
239 class ArenaCellIterUnderGC
: public ArenaCellIterImpl
242 explicit ArenaCellIterUnderGC(ArenaHeader
* aheader
) {
243 MOZ_ASSERT(aheader
->zone
->runtimeFromAnyThread()->isHeapBusy());
248 class ArenaCellIterUnderFinalize
: public ArenaCellIterImpl
251 explicit ArenaCellIterUnderFinalize(ArenaHeader
* aheader
) {
252 initUnsynchronized(aheader
);
256 class ZoneCellIterImpl
259 ArenaCellIterImpl cellIter
;
262 ZoneCellIterImpl() {}
264 void init(JS::Zone
* zone
, AllocKind kind
) {
265 MOZ_ASSERT(zone
->allocator
.arenas
.isSynchronizedFreeList(kind
));
266 arenaIter
.init(zone
, kind
);
267 if (!arenaIter
.done())
268 cellIter
.init(arenaIter
.get());
273 return arenaIter
.done();
276 template<typename T
> T
* get() const {
278 return cellIter
.get
<T
>();
281 Cell
* getCell() const {
283 return cellIter
.getCell();
289 if (cellIter
.done()) {
290 MOZ_ASSERT(!arenaIter
.done());
292 if (!arenaIter
.done())
293 cellIter
.reset(arenaIter
.get());
298 class ZoneCellIterUnderGC
: public ZoneCellIterImpl
301 ZoneCellIterUnderGC(JS::Zone
* zone
, AllocKind kind
) {
302 MOZ_ASSERT(zone
->runtimeFromAnyThread()->gc
.nursery
.isEmpty());
303 MOZ_ASSERT(zone
->runtimeFromAnyThread()->isHeapBusy());
308 class ZoneCellIter
: public ZoneCellIterImpl
310 JS::AutoAssertNoAlloc noAlloc
;
315 ZoneCellIter(JS::Zone
* zone
, AllocKind kind
)
316 : lists(&zone
->allocator
.arenas
),
319 JSRuntime
* rt
= zone
->runtimeFromMainThread();
322 * We have a single-threaded runtime, so there's no need to protect
323 * against other threads iterating or allocating. However, we do have
324 * background finalization; we have to wait for this to finish if it's
327 if (IsBackgroundFinalized(kind
) &&
328 zone
->allocator
.arenas
.needBackgroundFinalizeWait(kind
))
330 rt
->gc
.waitBackgroundSweepEnd();
333 /* Evict the nursery before iterating so we can see all things. */
334 rt
->gc
.evictNursery();
336 if (lists
->isSynchronizedFreeList(kind
)) {
339 MOZ_ASSERT(!rt
->isHeapBusy());
340 lists
->copyFreeListToArena(kind
);
343 /* Assert that no GCs can occur while a ZoneCellIter is live. */
344 noAlloc
.disallowAlloc(rt
);
351 lists
->clearFreeListInArena(kind
);
361 explicit GCZonesIter(JSRuntime
* rt
, ZoneSelector selector
= WithAtoms
)
364 if (!zone
->isCollecting())
368 bool done() const { return zone
.done(); }
374 } while (!zone
.done() && !zone
->isCollectingFromAnyThread());
377 JS::Zone
* get() const {
382 operator JS::Zone
*() const { return get(); }
383 JS::Zone
* operator->() const { return get(); }
386 typedef CompartmentsIterT
<GCZonesIter
> GCCompartmentsIter
;
388 /* Iterates over all zones in the current zone group. */
389 class GCZoneGroupIter
{
394 explicit GCZoneGroupIter(JSRuntime
* rt
) {
395 MOZ_ASSERT(rt
->isHeapBusy());
396 current
= rt
->gc
.getCurrentZoneGroup();
399 bool done() const { return !current
; }
403 current
= current
->nextNodeInGroup();
406 JS::Zone
* get() const {
411 operator JS::Zone
*() const { return get(); }
412 JS::Zone
* operator->() const { return get(); }
415 typedef CompartmentsIterT
<GCZoneGroupIter
> GCCompartmentGroupIter
;
418 * Attempt to allocate a new GC thing out of the nursery. If there is not enough
419 * room in the nursery or there is an OOM, this method will return nullptr.
421 template <AllowGC allowGC
>
423 TryNewNurseryObject(JSContext
* cx
, size_t thingSize
, size_t nDynamicSlots
, const js::Class
* clasp
)
425 MOZ_ASSERT(!IsAtomsCompartment(cx
->compartment()));
426 JSRuntime
* rt
= cx
->runtime();
427 Nursery
& nursery
= rt
->gc
.nursery
;
428 JSObject
* obj
= nursery
.allocateObject(cx
, thingSize
, nDynamicSlots
, clasp
);
431 if (allowGC
&& !rt
->mainThread
.suppressGC
) {
432 cx
->minorGC(JS::gcreason::OUT_OF_NURSERY
);
434 /* Exceeding gcMaxBytes while tenuring can disable the Nursery. */
435 if (nursery
.isEnabled()) {
436 JSObject
* obj
= nursery
.allocateObject(cx
, thingSize
, nDynamicSlots
, clasp
);
447 JS_OOM_POSSIBLY_FAIL_BOOL();
451 template <AllowGC allowGC
>
453 CheckAllocatorState(ThreadSafeContext
* cx
, AllocKind kind
)
455 if (!cx
->isJSContext())
458 JSContext
* ncx
= cx
->asJSContext();
459 JSRuntime
* rt
= ncx
->runtime();
460 #if defined(JS_GC_ZEAL) || defined(DEBUG)
461 MOZ_ASSERT_IF(rt
->isAtomsCompartment(ncx
->compartment()),
462 kind
== FINALIZE_STRING
||
463 kind
== FINALIZE_FAT_INLINE_STRING
||
464 kind
== FINALIZE_SYMBOL
||
465 kind
== FINALIZE_JITCODE
);
466 MOZ_ASSERT(!rt
->isHeapBusy());
467 MOZ_ASSERT(rt
->gc
.isAllocAllowed());
470 // Crash if we perform a GC action when it is not safe.
471 if (allowGC
&& !rt
->mainThread
.suppressGC
)
472 JS::AutoAssertOnGC::VerifyIsSafeToGC(rt
);
474 // For testing out of memory conditions
475 if (!PossiblyFail()) {
476 js_ReportOutOfMemory(cx
->asJSContext());
482 if (rt
->gc
.needZealousGC())
486 if (rt
->hasPendingInterrupt()) {
487 // Invoking the interrupt callback can fail and we can't usefully
488 // handle that here. Just check in case we need to collect instead.
496 template <typename T
>
498 CheckIncrementalZoneState(ThreadSafeContext
* cx
, T
* t
)
501 if (!cx
->isJSContext())
504 Zone
* zone
= cx
->asJSContext()->zone();
505 MOZ_ASSERT_IF(t
&& zone
->wasGCStarted() && (zone
->isGCMarking() || zone
->isGCSweeping()),
506 t
->asTenured().arenaHeader()->allocatedDuringIncremental
);
511 * Allocate a new GC thing. After a successful allocation the caller must
512 * fully initialize the thing before calling any function that can potentially
513 * trigger GC. This will ensure that GC tracing never sees junk values stored
514 * in the partially initialized thing.
517 template <AllowGC allowGC
>
519 AllocateObject(ThreadSafeContext
* cx
, AllocKind kind
, size_t nDynamicSlots
, InitialHeap heap
,
520 const js::Class
* clasp
)
522 size_t thingSize
= Arena::thingSize(kind
);
524 MOZ_ASSERT(thingSize
== Arena::thingSize(kind
));
525 MOZ_ASSERT(thingSize
>= sizeof(JSObject
));
526 static_assert(sizeof(JSObject
) >= CellSize
,
527 "All allocations must be at least the allocator-imposed minimum size.");
529 if (!CheckAllocatorState
<allowGC
>(cx
, kind
))
532 if (cx
->isJSContext() &&
533 ShouldNurseryAllocateObject(cx
->asJSContext()->nursery(), heap
))
535 JSObject
* obj
= TryNewNurseryObject
<allowGC
>(cx
->asJSContext(), thingSize
, nDynamicSlots
,
541 HeapSlot
* slots
= nullptr;
543 if (cx
->isExclusiveContext())
544 slots
= cx
->asExclusiveContext()->zone()->pod_malloc
<HeapSlot
>(nDynamicSlots
);
546 slots
= js_pod_malloc
<HeapSlot
>(nDynamicSlots
);
547 if (MOZ_UNLIKELY(!slots
))
549 js::Debug_SetSlotRangeToCrashOnTouch(slots
, nDynamicSlots
);
552 JSObject
* obj
= reinterpret_cast<JSObject
*>(
553 cx
->allocator()->arenas
.allocateFromFreeList(kind
, thingSize
));
555 obj
= reinterpret_cast<JSObject
*>(GCRuntime::refillFreeListFromAnyThread
<allowGC
>(cx
, kind
));
558 obj
->setInitialSlotsMaybeNonNative(slots
);
562 CheckIncrementalZoneState(cx
, obj
);
563 js::gc::TraceTenuredAlloc(obj
, kind
);
567 template <typename T
, AllowGC allowGC
>
569 AllocateNonObject(ThreadSafeContext
* cx
)
571 static_assert(sizeof(T
) >= CellSize
,
572 "All allocations must be at least the allocator-imposed minimum size.");
574 AllocKind kind
= MapTypeToFinalizeKind
<T
>::kind
;
575 size_t thingSize
= sizeof(T
);
577 MOZ_ASSERT(thingSize
== Arena::thingSize(kind
));
578 if (!CheckAllocatorState
<allowGC
>(cx
, kind
))
581 T
* t
= static_cast<T
*>(cx
->allocator()->arenas
.allocateFromFreeList(kind
, thingSize
));
583 t
= static_cast<T
*>(GCRuntime::refillFreeListFromAnyThread
<allowGC
>(cx
, kind
));
585 CheckIncrementalZoneState(cx
, t
);
586 js::gc::TraceTenuredAlloc(t
, kind
);
591 * When allocating for initialization from a cached object copy, we will
592 * potentially destroy the cache entry we want to copy if we allow GC. On the
593 * other hand, since these allocations are extremely common, we don't want to
594 * delay GC from these allocation sites. Instead we allow the GC, but still
595 * fail the allocation, forcing the non-cached path.
597 template <AllowGC allowGC
>
599 AllocateObjectForCacheHit(JSContext
* cx
, AllocKind kind
, InitialHeap heap
, const js::Class
* clasp
)
601 if (ShouldNurseryAllocateObject(cx
->nursery(), heap
)) {
602 size_t thingSize
= Arena::thingSize(kind
);
604 MOZ_ASSERT(thingSize
== Arena::thingSize(kind
));
605 if (!CheckAllocatorState
<NoGC
>(cx
, kind
))
608 JSObject
* obj
= TryNewNurseryObject
<NoGC
>(cx
, thingSize
, 0, clasp
);
609 if (!obj
&& allowGC
) {
610 cx
->minorGC(JS::gcreason::OUT_OF_NURSERY
);
616 JSObject
* obj
= AllocateObject
<NoGC
>(cx
, kind
, 0, heap
, clasp
);
617 if (!obj
&& allowGC
) {
618 cx
->runtime()->gc
.maybeGC(cx
->zone());
626 IsInsideGGCNursery(const js::gc::Cell
* cell
)
630 uintptr_t addr
= uintptr_t(cell
);
631 addr
&= ~js::gc::ChunkMask
;
632 addr
|= js::gc::ChunkLocationOffset
;
633 uint32_t location
= *reinterpret_cast<uint32_t*>(addr
);
634 MOZ_ASSERT(location
!= 0);
635 return location
& js::gc::ChunkLocationBitNursery
;
640 template <js::AllowGC allowGC
>
642 NewGCObject(js::ThreadSafeContext
* cx
, js::gc::AllocKind kind
, size_t nDynamicSlots
,
643 js::gc::InitialHeap heap
, const js::Class
* clasp
)
645 MOZ_ASSERT(kind
>= js::gc::FINALIZE_OBJECT0
&& kind
<= js::gc::FINALIZE_OBJECT_LAST
);
646 return js::gc::AllocateObject
<allowGC
>(cx
, kind
, nDynamicSlots
, heap
, clasp
);
649 template <js::AllowGC allowGC
>
651 NewJitCode(js::ThreadSafeContext
* cx
)
653 return gc::AllocateNonObject
<jit::JitCode
, allowGC
>(cx
);
658 NewTypeObject(js::ThreadSafeContext
* cx
)
660 return gc::AllocateNonObject
<types::TypeObject
, js::CanGC
>(cx
);
663 template <js::AllowGC allowGC
>
665 NewGCString(js::ThreadSafeContext
* cx
)
667 return js::gc::AllocateNonObject
<JSString
, allowGC
>(cx
);
670 template <js::AllowGC allowGC
>
671 inline JSFatInlineString
*
672 NewGCFatInlineString(js::ThreadSafeContext
* cx
)
674 return js::gc::AllocateNonObject
<JSFatInlineString
, allowGC
>(cx
);
677 inline JSExternalString
*
678 NewGCExternalString(js::ThreadSafeContext
* cx
)
680 return js::gc::AllocateNonObject
<JSExternalString
, js::CanGC
>(cx
);
684 NewGCShape(ThreadSafeContext
* cx
)
686 return gc::AllocateNonObject
<Shape
, CanGC
>(cx
);
690 NewGCAccessorShape(ThreadSafeContext
* cx
)
692 return gc::AllocateNonObject
<AccessorShape
, CanGC
>(cx
);
698 js_NewGCScript(js::ThreadSafeContext
* cx
)
700 return js::gc::AllocateNonObject
<JSScript
, js::CanGC
>(cx
);
703 inline js::LazyScript
*
704 js_NewGCLazyScript(js::ThreadSafeContext
* cx
)
706 return js::gc::AllocateNonObject
<js::LazyScript
, js::CanGC
>(cx
);
709 template <js::AllowGC allowGC
>
710 inline js::BaseShape
*
711 js_NewGCBaseShape(js::ThreadSafeContext
* cx
)
713 return js::gc::AllocateNonObject
<js::BaseShape
, allowGC
>(cx
);
716 #endif /* jsgcinlines_h */