1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
12 #include "gc/GCTrace.h"
14 #include "vm/ForkJoin.h"
21 ThreadSafeContext::allocator() const
23 JS_ASSERT_IF(isJSContext(), &asJSContext()->zone()->allocator
== allocator_
);
29 ThreadSafeContext::isThreadLocal(T thing
) const
31 if (!isForkJoinContext())
34 #ifdef JSGC_FJGENERATIONAL
35 ForkJoinContext
* cx
= static_cast<ForkJoinContext
*>(const_cast<ThreadSafeContext
*>(this));
36 if (cx
->nursery().isInsideNewspace(thing
))
41 JS_ASSERT(!IsInsideNursery(thing
));
43 // The thing is not in the nursery, but is it in the private tenured area?
44 if (allocator_
->arenas
.containsArena(runtime_
, thing
->arenaHeader()))
46 // GC should be suppressed in preparation for mutating thread local
47 // objects, as we don't want to trip any barriers.
48 JS_ASSERT(!thing
->zoneFromAnyThread()->needsIncrementalBarrier());
49 JS_ASSERT(!thing
->runtimeFromAnyThread()->needsIncrementalBarrier());
59 static inline AllocKind
60 GetGCObjectKind(const Class
* clasp
)
62 if (clasp
== FunctionClassPtr
)
63 return JSFunction::FinalizeKind
;
64 uint32_t nslots
= JSCLASS_RESERVED_SLOTS(clasp
);
65 if (clasp
->flags
& JSCLASS_HAS_PRIVATE
)
67 return GetGCObjectKind(nslots
);
70 #ifdef JSGC_GENERATIONAL
72 ShouldNurseryAllocate(const Nursery
& nursery
, AllocKind kind
, InitialHeap heap
)
74 return nursery
.isEnabled() && IsNurseryAllocable(kind
) && heap
!= TenuredHeap
;
78 #ifdef JSGC_FJGENERATIONAL
80 ShouldFJNurseryAllocate(const ForkJoinNursery
& nursery
, AllocKind kind
, InitialHeap heap
)
82 return IsFJNurseryAllocable(kind
) && heap
!= TenuredHeap
;
87 GetGCThingTraceKind(const void* thing
)
90 const Cell
* cell
= static_cast<const Cell
*>(thing
);
91 #ifdef JSGC_GENERATIONAL
92 if (IsInsideNursery(cell
))
93 return JSTRACE_OBJECT
;
95 return MapAllocToTraceKind(cell
->tenuredGetAllocKind());
104 /* Schedule a GC to happen "soon" after a GC poke. */
105 if (zealMode
== ZealPokeValue
)
112 ArenaHeader
* aheader
;
113 ArenaHeader
* unsweptHeader
;
114 ArenaHeader
* sweptHeader
;
119 unsweptHeader
= nullptr;
120 sweptHeader
= nullptr;
123 ArenaIter(JS::Zone
* zone
, AllocKind kind
) {
127 void init(Allocator
* allocator
, AllocKind kind
) {
128 aheader
= allocator
->arenas
.getFirstArena(kind
);
129 unsweptHeader
= allocator
->arenas
.getFirstArenaToSweep(kind
);
130 sweptHeader
= allocator
->arenas
.getFirstSweptArena(kind
);
131 if (!unsweptHeader
) {
132 unsweptHeader
= sweptHeader
;
133 sweptHeader
= nullptr;
136 aheader
= unsweptHeader
;
137 unsweptHeader
= sweptHeader
;
138 sweptHeader
= nullptr;
142 void init(JS::Zone
* zone
, AllocKind kind
) {
143 init(&zone
->allocator
, kind
);
150 ArenaHeader
* get() const {
156 aheader
= aheader
->next
;
158 aheader
= unsweptHeader
;
159 unsweptHeader
= sweptHeader
;
160 sweptHeader
= nullptr;
165 class ArenaCellIterImpl
167 // These three are set in initUnsynchronized().
168 size_t firstThingOffset
;
174 // These three are set in reset() (which is called by init()).
179 // Upon entry, |thing| points to any thing (free or used) and finds the
180 // first used thing, which may be |thing|.
181 void moveForwardIfFree() {
184 // Note: if |span| is empty, this test will fail, which is what we want
185 // -- |span| being empty means that we're past the end of the last free
186 // thing, all the remaining things in the arena are used, and we'll
187 // never need to move forward.
188 if (thing
== span
.first
) {
189 thing
= span
.last
+ thingSize
;
190 span
= *span
.nextSpan();
196 : firstThingOffset(0) // Squelch
197 , thingSize(0) // warnings
202 void initUnsynchronized(ArenaHeader
* aheader
) {
203 AllocKind kind
= aheader
->getAllocKind();
207 firstThingOffset
= Arena::firstThingOffset(kind
);
208 thingSize
= Arena::thingSize(kind
);
212 void init(ArenaHeader
* aheader
) {
214 AllocKind kind
= aheader
->getAllocKind();
215 JS_ASSERT(aheader
->zone
->allocator
.arenas
.isSynchronizedFreeList(kind
));
217 initUnsynchronized(aheader
);
220 // Use this to move from an Arena of a particular kind to another Arena of
222 void reset(ArenaHeader
* aheader
) {
224 span
= aheader
->getFirstFreeSpan();
225 uintptr_t arenaAddr
= aheader
->arenaAddress();
226 thing
= arenaAddr
+ firstThingOffset
;
227 limit
= arenaAddr
+ ArenaSize
;
232 return thing
== limit
;
235 Cell
* getCell() const {
237 return reinterpret_cast<Cell
*>(thing
);
240 template<typename T
> T
* get() const {
242 return static_cast<T
*>(getCell());
253 class ArenaCellIterUnderGC
: public ArenaCellIterImpl
256 explicit ArenaCellIterUnderGC(ArenaHeader
* aheader
) {
257 JS_ASSERT(aheader
->zone
->runtimeFromAnyThread()->isHeapBusy());
262 class ArenaCellIterUnderFinalize
: public ArenaCellIterImpl
265 explicit ArenaCellIterUnderFinalize(ArenaHeader
* aheader
) {
266 initUnsynchronized(aheader
);
270 class ZoneCellIterImpl
273 ArenaCellIterImpl cellIter
;
276 ZoneCellIterImpl() {}
278 void init(JS::Zone
* zone
, AllocKind kind
) {
279 JS_ASSERT(zone
->allocator
.arenas
.isSynchronizedFreeList(kind
));
280 arenaIter
.init(zone
, kind
);
281 if (!arenaIter
.done())
282 cellIter
.init(arenaIter
.get());
287 return arenaIter
.done();
290 template<typename T
> T
* get() const {
292 return cellIter
.get
<T
>();
295 Cell
* getCell() const {
297 return cellIter
.getCell();
303 if (cellIter
.done()) {
304 JS_ASSERT(!arenaIter
.done());
306 if (!arenaIter
.done())
307 cellIter
.reset(arenaIter
.get());
312 class ZoneCellIterUnderGC
: public ZoneCellIterImpl
315 ZoneCellIterUnderGC(JS::Zone
* zone
, AllocKind kind
) {
316 #ifdef JSGC_GENERATIONAL
317 JS_ASSERT(zone
->runtimeFromAnyThread()->gc
.nursery
.isEmpty());
319 JS_ASSERT(zone
->runtimeFromAnyThread()->isHeapBusy());
324 /* In debug builds, assert that no allocation occurs. */
325 class AutoAssertNoAlloc
331 AutoAssertNoAlloc() : gc(nullptr) {}
332 explicit AutoAssertNoAlloc(JSRuntime
* rt
) : gc(nullptr) {
335 void disallowAlloc(JSRuntime
* rt
) {
340 ~AutoAssertNoAlloc() {
346 AutoAssertNoAlloc() {}
347 explicit AutoAssertNoAlloc(JSRuntime
*) {}
348 void disallowAlloc(JSRuntime
* rt
) {}
352 class ZoneCellIter
: public ZoneCellIterImpl
354 AutoAssertNoAlloc noAlloc
;
359 ZoneCellIter(JS::Zone
* zone
, AllocKind kind
)
360 : lists(&zone
->allocator
.arenas
),
364 * We have a single-threaded runtime, so there's no need to protect
365 * against other threads iterating or allocating. However, we do have
366 * background finalization; we have to wait for this to finish if it's
369 if (IsBackgroundFinalized(kind
) &&
370 zone
->allocator
.arenas
.needBackgroundFinalizeWait(kind
))
372 zone
->runtimeFromMainThread()->gc
.waitBackgroundSweepEnd();
375 #ifdef JSGC_GENERATIONAL
376 /* Evict the nursery before iterating so we can see all things. */
377 JSRuntime
* rt
= zone
->runtimeFromMainThread();
378 rt
->gc
.evictNursery();
381 if (lists
->isSynchronizedFreeList(kind
)) {
384 JS_ASSERT(!zone
->runtimeFromMainThread()->isHeapBusy());
385 lists
->copyFreeListToArena(kind
);
388 /* Assert that no GCs can occur while a ZoneCellIter is live. */
389 noAlloc
.disallowAlloc(zone
->runtimeFromMainThread());
396 lists
->clearFreeListInArena(kind
);
406 explicit GCZonesIter(JSRuntime
* rt
) : zone(rt
, WithAtoms
) {
407 if (!zone
->isCollecting())
411 bool done() const { return zone
.done(); }
417 } while (!zone
.done() && !zone
->isCollecting());
420 JS::Zone
* get() const {
425 operator JS::Zone
*() const { return get(); }
426 JS::Zone
* operator->() const { return get(); }
429 typedef CompartmentsIterT
<GCZonesIter
> GCCompartmentsIter
;
431 /* Iterates over all zones in the current zone group. */
432 class GCZoneGroupIter
{
437 explicit GCZoneGroupIter(JSRuntime
* rt
) {
438 JS_ASSERT(rt
->isHeapBusy());
439 current
= rt
->gc
.getCurrentZoneGroup();
442 bool done() const { return !current
; }
446 current
= current
->nextNodeInGroup();
449 JS::Zone
* get() const {
454 operator JS::Zone
*() const { return get(); }
455 JS::Zone
* operator->() const { return get(); }
458 typedef CompartmentsIterT
<GCZoneGroupIter
> GCCompartmentGroupIter
;
460 #ifdef JSGC_GENERATIONAL
462 * Attempt to allocate a new GC thing out of the nursery. If there is not enough
463 * room in the nursery or there is an OOM, this method will return nullptr.
465 template <AllowGC allowGC
>
467 TryNewNurseryObject(JSContext
* cx
, size_t thingSize
, size_t nDynamicSlots
)
469 JS_ASSERT(!IsAtomsCompartment(cx
->compartment()));
470 JSRuntime
* rt
= cx
->runtime();
471 Nursery
& nursery
= rt
->gc
.nursery
;
472 JSObject
* obj
= nursery
.allocateObject(cx
, thingSize
, nDynamicSlots
);
475 if (allowGC
&& !rt
->mainThread
.suppressGC
) {
476 cx
->minorGC(JS::gcreason::OUT_OF_NURSERY
);
478 /* Exceeding gcMaxBytes while tenuring can disable the Nursery. */
479 if (nursery
.isEnabled()) {
480 JSObject
* obj
= nursery
.allocateObject(cx
, thingSize
, nDynamicSlots
);
487 #endif /* JSGC_GENERATIONAL */
489 #ifdef JSGC_FJGENERATIONAL
490 template <AllowGC allowGC
>
492 TryNewNurseryObject(ForkJoinContext
* cx
, size_t thingSize
, size_t nDynamicSlots
)
494 ForkJoinNursery
& nursery
= cx
->nursery();
495 bool tooLarge
= false;
496 JSObject
* obj
= nursery
.allocateObject(thingSize
, nDynamicSlots
, tooLarge
);
500 if (!tooLarge
&& allowGC
) {
502 obj
= nursery
.allocateObject(thingSize
, nDynamicSlots
, tooLarge
);
509 #endif /* JSGC_FJGENERATIONAL */
514 JS_OOM_POSSIBLY_FAIL();
518 template <AllowGC allowGC
>
520 CheckAllocatorState(ThreadSafeContext
* cx
, AllocKind kind
)
522 if (!cx
->isJSContext())
525 JSContext
* ncx
= cx
->asJSContext();
526 JSRuntime
* rt
= ncx
->runtime();
527 #if defined(JS_GC_ZEAL) || defined(DEBUG)
528 JS_ASSERT_IF(rt
->isAtomsCompartment(ncx
->compartment()),
529 kind
== FINALIZE_STRING
||
530 kind
== FINALIZE_FAT_INLINE_STRING
||
531 kind
== FINALIZE_SYMBOL
||
532 kind
== FINALIZE_JITCODE
);
533 JS_ASSERT(!rt
->isHeapBusy());
534 JS_ASSERT(rt
->gc
.isAllocAllowed());
537 // Crash if we perform a GC action when it is not safe.
538 if (allowGC
&& !rt
->mainThread
.suppressGC
)
539 JS::AutoAssertOnGC::VerifyIsSafeToGC(rt
);
541 // For testing out of memory conditions
542 if (!PossiblyFail()) {
543 js_ReportOutOfMemory(cx
);
549 if (rt
->gc
.needZealousGC())
554 // Invoking the interrupt callback can fail and we can't usefully
555 // handle that here. Just check in case we need to collect instead.
563 template <typename T
>
565 CheckIncrementalZoneState(ThreadSafeContext
* cx
, T
* t
)
568 if (!cx
->isJSContext())
571 Zone
* zone
= cx
->asJSContext()->zone();
572 JS_ASSERT_IF(t
&& zone
->wasGCStarted() && (zone
->isGCMarking() || zone
->isGCSweeping()),
573 t
->arenaHeader()->allocatedDuringIncremental
);
578 * Allocate a new GC thing. After a successful allocation the caller must
579 * fully initialize the thing before calling any function that can potentially
580 * trigger GC. This will ensure that GC tracing never sees junk values stored
581 * in the partially initialized thing.
584 template <AllowGC allowGC
>
586 AllocateObject(ThreadSafeContext
* cx
, AllocKind kind
, size_t nDynamicSlots
, InitialHeap heap
)
588 size_t thingSize
= Arena::thingSize(kind
);
590 JS_ASSERT(thingSize
== Arena::thingSize(kind
));
591 JS_ASSERT(thingSize
>= sizeof(JSObject
));
592 static_assert(sizeof(JSObject
) >= CellSize
,
593 "All allocations must be at least the allocator-imposed minimum size.");
595 if (!CheckAllocatorState
<allowGC
>(cx
, kind
))
598 #ifdef JSGC_GENERATIONAL
599 if (cx
->isJSContext() &&
600 ShouldNurseryAllocate(cx
->asJSContext()->nursery(), kind
, heap
)) {
601 JSObject
* obj
= TryNewNurseryObject
<allowGC
>(cx
->asJSContext(), thingSize
, nDynamicSlots
);
606 #ifdef JSGC_FJGENERATIONAL
607 if (cx
->isForkJoinContext() &&
608 ShouldFJNurseryAllocate(cx
->asForkJoinContext()->nursery(), kind
, heap
))
611 TryNewNurseryObject
<allowGC
>(cx
->asForkJoinContext(), thingSize
, nDynamicSlots
);
617 HeapSlot
* slots
= nullptr;
619 if (cx
->isExclusiveContext())
620 slots
= cx
->asExclusiveContext()->zone()->pod_malloc
<HeapSlot
>(nDynamicSlots
);
622 slots
= js_pod_malloc
<HeapSlot
>(nDynamicSlots
);
623 if (MOZ_UNLIKELY(!slots
))
625 js::Debug_SetSlotRangeToCrashOnTouch(slots
, nDynamicSlots
);
628 JSObject
* obj
= static_cast<JSObject
*>(cx
->allocator()->arenas
.allocateFromFreeList(kind
, thingSize
));
630 obj
= static_cast<JSObject
*>(js::gc::ArenaLists::refillFreeList
<allowGC
>(cx
, kind
));
633 obj
->setInitialSlots(slots
);
637 CheckIncrementalZoneState(cx
, obj
);
638 js::gc::TraceTenuredAlloc(obj
, kind
);
642 template <typename T
, AllowGC allowGC
>
644 AllocateNonObject(ThreadSafeContext
* cx
)
646 static_assert(sizeof(T
) >= CellSize
,
647 "All allocations must be at least the allocator-imposed minimum size.");
649 AllocKind kind
= MapTypeToFinalizeKind
<T
>::kind
;
650 size_t thingSize
= sizeof(T
);
652 JS_ASSERT(thingSize
== Arena::thingSize(kind
));
653 if (!CheckAllocatorState
<allowGC
>(cx
, kind
))
656 T
* t
= static_cast<T
*>(cx
->allocator()->arenas
.allocateFromFreeList(kind
, thingSize
));
658 t
= static_cast<T
*>(js::gc::ArenaLists::refillFreeList
<allowGC
>(cx
, kind
));
660 CheckIncrementalZoneState(cx
, t
);
661 js::gc::TraceTenuredAlloc(t
, kind
);
666 * When allocating for initialization from a cached object copy, we will
667 * potentially destroy the cache entry we want to copy if we allow GC. On the
668 * other hand, since these allocations are extremely common, we don't want to
669 * delay GC from these allocation sites. Instead we allow the GC, but still
670 * fail the allocation, forcing the non-cached path.
672 * Observe this won't be used for ForkJoin allocation, as it takes a JSContext*
674 template <AllowGC allowGC
>
676 AllocateObjectForCacheHit(JSContext
* cx
, AllocKind kind
, InitialHeap heap
)
678 #ifdef JSGC_GENERATIONAL
679 if (ShouldNurseryAllocate(cx
->nursery(), kind
, heap
)) {
680 size_t thingSize
= Arena::thingSize(kind
);
682 JS_ASSERT(thingSize
== Arena::thingSize(kind
));
683 if (!CheckAllocatorState
<NoGC
>(cx
, kind
))
686 JSObject
* obj
= TryNewNurseryObject
<NoGC
>(cx
, thingSize
, 0);
687 if (!obj
&& allowGC
) {
688 cx
->minorGC(JS::gcreason::OUT_OF_NURSERY
);
695 JSObject
* obj
= AllocateObject
<NoGC
>(cx
, kind
, 0, heap
);
696 if (!obj
&& allowGC
) {
697 cx
->runtime()->gc
.maybeGC(cx
->zone());
705 IsInsideGGCNursery(const js::gc::Cell
* cell
)
707 #ifdef JSGC_GENERATIONAL
710 uintptr_t addr
= uintptr_t(cell
);
711 addr
&= ~js::gc::ChunkMask
;
712 addr
|= js::gc::ChunkLocationOffset
;
713 uint32_t location
= *reinterpret_cast<uint32_t*>(addr
);
714 JS_ASSERT(location
!= 0);
715 return location
& js::gc::ChunkLocationBitNursery
;
723 template <js::AllowGC allowGC
>
725 NewGCObject(js::ThreadSafeContext
* cx
, js::gc::AllocKind kind
, size_t nDynamicSlots
, js::gc::InitialHeap heap
)
727 JS_ASSERT(kind
>= js::gc::FINALIZE_OBJECT0
&& kind
<= js::gc::FINALIZE_OBJECT_LAST
);
728 return js::gc::AllocateObject
<allowGC
>(cx
, kind
, nDynamicSlots
, heap
);
731 template <js::AllowGC allowGC
>
733 NewJitCode(js::ThreadSafeContext
* cx
)
735 return gc::AllocateNonObject
<jit::JitCode
, allowGC
>(cx
);
740 NewTypeObject(js::ThreadSafeContext
* cx
)
742 return gc::AllocateNonObject
<types::TypeObject
, js::CanGC
>(cx
);
745 template <js::AllowGC allowGC
>
747 NewGCString(js::ThreadSafeContext
* cx
)
749 return js::gc::AllocateNonObject
<JSString
, allowGC
>(cx
);
752 template <js::AllowGC allowGC
>
753 inline JSFatInlineString
*
754 NewGCFatInlineString(js::ThreadSafeContext
* cx
)
756 return js::gc::AllocateNonObject
<JSFatInlineString
, allowGC
>(cx
);
759 inline JSExternalString
*
760 NewGCExternalString(js::ThreadSafeContext
* cx
)
762 return js::gc::AllocateNonObject
<JSExternalString
, js::CanGC
>(cx
);
768 js_NewGCScript(js::ThreadSafeContext
* cx
)
770 return js::gc::AllocateNonObject
<JSScript
, js::CanGC
>(cx
);
773 inline js::LazyScript
*
774 js_NewGCLazyScript(js::ThreadSafeContext
* cx
)
776 return js::gc::AllocateNonObject
<js::LazyScript
, js::CanGC
>(cx
);
780 js_NewGCShape(js::ThreadSafeContext
* cx
)
782 return js::gc::AllocateNonObject
<js::Shape
, js::CanGC
>(cx
);
785 template <js::AllowGC allowGC
>
786 inline js::BaseShape
*
787 js_NewGCBaseShape(js::ThreadSafeContext
* cx
)
789 return js::gc::AllocateNonObject
<js::BaseShape
, allowGC
>(cx
);
792 #endif /* jsgcinlines_h */