Bumping manifests a=b2g-bump
[gecko.git] / js / src / jsgcinlines.h
blob4315fbd90257cba133b5d24e7c0fb6551bb842e9
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef jsgcinlines_h
8 #define jsgcinlines_h
10 #include "jsgc.h"
12 #include "gc/GCTrace.h"
13 #include "gc/Zone.h"
14 #include "vm/ForkJoin.h"
16 namespace js {
18 class Shape;
20 inline Allocator*
21 ThreadSafeContext::allocator() const
23 JS_ASSERT_IF(isJSContext(), &asJSContext()->zone()->allocator == allocator_);
24 return allocator_;
27 template <typename T>
28 inline bool
29 ThreadSafeContext::isThreadLocal(T thing) const
31 if (!isForkJoinContext())
32 return true;
34 #ifdef JSGC_FJGENERATIONAL
35 ForkJoinContext* cx = static_cast<ForkJoinContext*>(const_cast<ThreadSafeContext*>(this));
36 if (cx->nursery().isInsideNewspace(thing))
37 return true;
38 #endif
40 // Global invariant
41 JS_ASSERT(!IsInsideNursery(thing));
43 // The thing is not in the nursery, but is it in the private tenured area?
44 if (allocator_->arenas.containsArena(runtime_, thing->arenaHeader()))
46 // GC should be suppressed in preparation for mutating thread local
47 // objects, as we don't want to trip any barriers.
48 JS_ASSERT(!thing->zoneFromAnyThread()->needsIncrementalBarrier());
49 JS_ASSERT(!thing->runtimeFromAnyThread()->needsIncrementalBarrier());
51 return true;
54 return false;
57 namespace gc {
59 static inline AllocKind
60 GetGCObjectKind(const Class* clasp)
62 if (clasp == FunctionClassPtr)
63 return JSFunction::FinalizeKind;
64 uint32_t nslots = JSCLASS_RESERVED_SLOTS(clasp);
65 if (clasp->flags & JSCLASS_HAS_PRIVATE)
66 nslots++;
67 return GetGCObjectKind(nslots);
70 #ifdef JSGC_GENERATIONAL
71 inline bool
72 ShouldNurseryAllocate(const Nursery& nursery, AllocKind kind, InitialHeap heap)
74 return nursery.isEnabled() && IsNurseryAllocable(kind) && heap != TenuredHeap;
76 #endif
78 #ifdef JSGC_FJGENERATIONAL
79 inline bool
80 ShouldFJNurseryAllocate(const ForkJoinNursery& nursery, AllocKind kind, InitialHeap heap)
82 return IsFJNurseryAllocable(kind) && heap != TenuredHeap;
84 #endif
86 inline JSGCTraceKind
87 GetGCThingTraceKind(const void* thing)
89 JS_ASSERT(thing);
90 const Cell* cell = static_cast<const Cell*>(thing);
91 #ifdef JSGC_GENERATIONAL
92 if (IsInsideNursery(cell))
93 return JSTRACE_OBJECT;
94 #endif
95 return MapAllocToTraceKind(cell->tenuredGetAllocKind());
98 inline void
99 GCRuntime::poke()
101 poked = true;
103 #ifdef JS_GC_ZEAL
104 /* Schedule a GC to happen "soon" after a GC poke. */
105 if (zealMode == ZealPokeValue)
106 nextScheduled = 1;
107 #endif
110 class ArenaIter
112 ArenaHeader* aheader;
113 ArenaHeader* unsweptHeader;
114 ArenaHeader* sweptHeader;
116 public:
117 ArenaIter() {
118 aheader = nullptr;
119 unsweptHeader = nullptr;
120 sweptHeader = nullptr;
123 ArenaIter(JS::Zone* zone, AllocKind kind) {
124 init(zone, kind);
127 void init(Allocator* allocator, AllocKind kind) {
128 aheader = allocator->arenas.getFirstArena(kind);
129 unsweptHeader = allocator->arenas.getFirstArenaToSweep(kind);
130 sweptHeader = allocator->arenas.getFirstSweptArena(kind);
131 if (!unsweptHeader) {
132 unsweptHeader = sweptHeader;
133 sweptHeader = nullptr;
135 if (!aheader) {
136 aheader = unsweptHeader;
137 unsweptHeader = sweptHeader;
138 sweptHeader = nullptr;
142 void init(JS::Zone* zone, AllocKind kind) {
143 init(&zone->allocator, kind);
146 bool done() const {
147 return !aheader;
150 ArenaHeader* get() const {
151 return aheader;
154 void next() {
155 JS_ASSERT(!done());
156 aheader = aheader->next;
157 if (!aheader) {
158 aheader = unsweptHeader;
159 unsweptHeader = sweptHeader;
160 sweptHeader = nullptr;
165 class ArenaCellIterImpl
167 // These three are set in initUnsynchronized().
168 size_t firstThingOffset;
169 size_t thingSize;
170 #ifdef DEBUG
171 bool isInited;
172 #endif
174 // These three are set in reset() (which is called by init()).
175 FreeSpan span;
176 uintptr_t thing;
177 uintptr_t limit;
179 // Upon entry, |thing| points to any thing (free or used) and finds the
180 // first used thing, which may be |thing|.
181 void moveForwardIfFree() {
182 JS_ASSERT(!done());
183 JS_ASSERT(thing);
184 // Note: if |span| is empty, this test will fail, which is what we want
185 // -- |span| being empty means that we're past the end of the last free
186 // thing, all the remaining things in the arena are used, and we'll
187 // never need to move forward.
188 if (thing == span.first) {
189 thing = span.last + thingSize;
190 span = *span.nextSpan();
194 public:
195 ArenaCellIterImpl()
196 : firstThingOffset(0) // Squelch
197 , thingSize(0) // warnings
198 , limit(0)
202 void initUnsynchronized(ArenaHeader* aheader) {
203 AllocKind kind = aheader->getAllocKind();
204 #ifdef DEBUG
205 isInited = true;
206 #endif
207 firstThingOffset = Arena::firstThingOffset(kind);
208 thingSize = Arena::thingSize(kind);
209 reset(aheader);
212 void init(ArenaHeader* aheader) {
213 #ifdef DEBUG
214 AllocKind kind = aheader->getAllocKind();
215 JS_ASSERT(aheader->zone->allocator.arenas.isSynchronizedFreeList(kind));
216 #endif
217 initUnsynchronized(aheader);
220 // Use this to move from an Arena of a particular kind to another Arena of
221 // the same kind.
222 void reset(ArenaHeader* aheader) {
223 JS_ASSERT(isInited);
224 span = aheader->getFirstFreeSpan();
225 uintptr_t arenaAddr = aheader->arenaAddress();
226 thing = arenaAddr + firstThingOffset;
227 limit = arenaAddr + ArenaSize;
228 moveForwardIfFree();
231 bool done() const {
232 return thing == limit;
235 Cell* getCell() const {
236 JS_ASSERT(!done());
237 return reinterpret_cast<Cell*>(thing);
240 template<typename T> T* get() const {
241 JS_ASSERT(!done());
242 return static_cast<T*>(getCell());
245 void next() {
246 MOZ_ASSERT(!done());
247 thing += thingSize;
248 if (thing < limit)
249 moveForwardIfFree();
253 class ArenaCellIterUnderGC : public ArenaCellIterImpl
255 public:
256 explicit ArenaCellIterUnderGC(ArenaHeader* aheader) {
257 JS_ASSERT(aheader->zone->runtimeFromAnyThread()->isHeapBusy());
258 init(aheader);
262 class ArenaCellIterUnderFinalize : public ArenaCellIterImpl
264 public:
265 explicit ArenaCellIterUnderFinalize(ArenaHeader* aheader) {
266 initUnsynchronized(aheader);
270 class ZoneCellIterImpl
272 ArenaIter arenaIter;
273 ArenaCellIterImpl cellIter;
275 protected:
276 ZoneCellIterImpl() {}
278 void init(JS::Zone* zone, AllocKind kind) {
279 JS_ASSERT(zone->allocator.arenas.isSynchronizedFreeList(kind));
280 arenaIter.init(zone, kind);
281 if (!arenaIter.done())
282 cellIter.init(arenaIter.get());
285 public:
286 bool done() const {
287 return arenaIter.done();
290 template<typename T> T* get() const {
291 JS_ASSERT(!done());
292 return cellIter.get<T>();
295 Cell* getCell() const {
296 JS_ASSERT(!done());
297 return cellIter.getCell();
300 void next() {
301 JS_ASSERT(!done());
302 cellIter.next();
303 if (cellIter.done()) {
304 JS_ASSERT(!arenaIter.done());
305 arenaIter.next();
306 if (!arenaIter.done())
307 cellIter.reset(arenaIter.get());
312 class ZoneCellIterUnderGC : public ZoneCellIterImpl
314 public:
315 ZoneCellIterUnderGC(JS::Zone* zone, AllocKind kind) {
316 #ifdef JSGC_GENERATIONAL
317 JS_ASSERT(zone->runtimeFromAnyThread()->gc.nursery.isEmpty());
318 #endif
319 JS_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy());
320 init(zone, kind);
324 /* In debug builds, assert that no allocation occurs. */
325 class AutoAssertNoAlloc
327 #ifdef JS_DEBUG
328 GCRuntime* gc;
330 public:
331 AutoAssertNoAlloc() : gc(nullptr) {}
332 explicit AutoAssertNoAlloc(JSRuntime* rt) : gc(nullptr) {
333 disallowAlloc(rt);
335 void disallowAlloc(JSRuntime* rt) {
336 JS_ASSERT(!gc);
337 gc = &rt->gc;
338 gc->disallowAlloc();
340 ~AutoAssertNoAlloc() {
341 if (gc)
342 gc->allowAlloc();
344 #else
345 public:
346 AutoAssertNoAlloc() {}
347 explicit AutoAssertNoAlloc(JSRuntime*) {}
348 void disallowAlloc(JSRuntime* rt) {}
349 #endif
352 class ZoneCellIter : public ZoneCellIterImpl
354 AutoAssertNoAlloc noAlloc;
355 ArenaLists* lists;
356 AllocKind kind;
358 public:
359 ZoneCellIter(JS::Zone* zone, AllocKind kind)
360 : lists(&zone->allocator.arenas),
361 kind(kind)
364 * We have a single-threaded runtime, so there's no need to protect
365 * against other threads iterating or allocating. However, we do have
366 * background finalization; we have to wait for this to finish if it's
367 * currently active.
369 if (IsBackgroundFinalized(kind) &&
370 zone->allocator.arenas.needBackgroundFinalizeWait(kind))
372 zone->runtimeFromMainThread()->gc.waitBackgroundSweepEnd();
375 #ifdef JSGC_GENERATIONAL
376 /* Evict the nursery before iterating so we can see all things. */
377 JSRuntime* rt = zone->runtimeFromMainThread();
378 rt->gc.evictNursery();
379 #endif
381 if (lists->isSynchronizedFreeList(kind)) {
382 lists = nullptr;
383 } else {
384 JS_ASSERT(!zone->runtimeFromMainThread()->isHeapBusy());
385 lists->copyFreeListToArena(kind);
388 /* Assert that no GCs can occur while a ZoneCellIter is live. */
389 noAlloc.disallowAlloc(zone->runtimeFromMainThread());
391 init(zone, kind);
394 ~ZoneCellIter() {
395 if (lists)
396 lists->clearFreeListInArena(kind);
400 class GCZonesIter
402 private:
403 ZonesIter zone;
405 public:
406 explicit GCZonesIter(JSRuntime* rt) : zone(rt, WithAtoms) {
407 if (!zone->isCollecting())
408 next();
411 bool done() const { return zone.done(); }
413 void next() {
414 JS_ASSERT(!done());
415 do {
416 zone.next();
417 } while (!zone.done() && !zone->isCollecting());
420 JS::Zone* get() const {
421 JS_ASSERT(!done());
422 return zone;
425 operator JS::Zone*() const { return get(); }
426 JS::Zone* operator->() const { return get(); }
429 typedef CompartmentsIterT<GCZonesIter> GCCompartmentsIter;
431 /* Iterates over all zones in the current zone group. */
432 class GCZoneGroupIter {
433 private:
434 JS::Zone* current;
436 public:
437 explicit GCZoneGroupIter(JSRuntime* rt) {
438 JS_ASSERT(rt->isHeapBusy());
439 current = rt->gc.getCurrentZoneGroup();
442 bool done() const { return !current; }
444 void next() {
445 JS_ASSERT(!done());
446 current = current->nextNodeInGroup();
449 JS::Zone* get() const {
450 JS_ASSERT(!done());
451 return current;
454 operator JS::Zone*() const { return get(); }
455 JS::Zone* operator->() const { return get(); }
458 typedef CompartmentsIterT<GCZoneGroupIter> GCCompartmentGroupIter;
460 #ifdef JSGC_GENERATIONAL
462 * Attempt to allocate a new GC thing out of the nursery. If there is not enough
463 * room in the nursery or there is an OOM, this method will return nullptr.
465 template <AllowGC allowGC>
466 inline JSObject*
467 TryNewNurseryObject(JSContext* cx, size_t thingSize, size_t nDynamicSlots)
469 JS_ASSERT(!IsAtomsCompartment(cx->compartment()));
470 JSRuntime* rt = cx->runtime();
471 Nursery& nursery = rt->gc.nursery;
472 JSObject* obj = nursery.allocateObject(cx, thingSize, nDynamicSlots);
473 if (obj)
474 return obj;
475 if (allowGC && !rt->mainThread.suppressGC) {
476 cx->minorGC(JS::gcreason::OUT_OF_NURSERY);
478 /* Exceeding gcMaxBytes while tenuring can disable the Nursery. */
479 if (nursery.isEnabled()) {
480 JSObject* obj = nursery.allocateObject(cx, thingSize, nDynamicSlots);
481 JS_ASSERT(obj);
482 return obj;
485 return nullptr;
487 #endif /* JSGC_GENERATIONAL */
489 #ifdef JSGC_FJGENERATIONAL
490 template <AllowGC allowGC>
491 inline JSObject*
492 TryNewNurseryObject(ForkJoinContext* cx, size_t thingSize, size_t nDynamicSlots)
494 ForkJoinNursery& nursery = cx->nursery();
495 bool tooLarge = false;
496 JSObject* obj = nursery.allocateObject(thingSize, nDynamicSlots, tooLarge);
497 if (obj)
498 return obj;
500 if (!tooLarge && allowGC) {
501 nursery.minorGC();
502 obj = nursery.allocateObject(thingSize, nDynamicSlots, tooLarge);
503 if (obj)
504 return obj;
507 return nullptr;
509 #endif /* JSGC_FJGENERATIONAL */
511 static inline bool
512 PossiblyFail()
514 JS_OOM_POSSIBLY_FAIL();
515 return true;
518 template <AllowGC allowGC>
519 static inline bool
520 CheckAllocatorState(ThreadSafeContext* cx, AllocKind kind)
522 if (!cx->isJSContext())
523 return true;
525 JSContext* ncx = cx->asJSContext();
526 JSRuntime* rt = ncx->runtime();
527 #if defined(JS_GC_ZEAL) || defined(DEBUG)
528 JS_ASSERT_IF(rt->isAtomsCompartment(ncx->compartment()),
529 kind == FINALIZE_STRING ||
530 kind == FINALIZE_FAT_INLINE_STRING ||
531 kind == FINALIZE_SYMBOL ||
532 kind == FINALIZE_JITCODE);
533 JS_ASSERT(!rt->isHeapBusy());
534 JS_ASSERT(rt->gc.isAllocAllowed());
535 #endif
537 // Crash if we perform a GC action when it is not safe.
538 if (allowGC && !rt->mainThread.suppressGC)
539 JS::AutoAssertOnGC::VerifyIsSafeToGC(rt);
541 // For testing out of memory conditions
542 if (!PossiblyFail()) {
543 js_ReportOutOfMemory(cx);
544 return false;
547 if (allowGC) {
548 #ifdef JS_GC_ZEAL
549 if (rt->gc.needZealousGC())
550 rt->gc.runDebugGC();
551 #endif
553 if (rt->interrupt) {
554 // Invoking the interrupt callback can fail and we can't usefully
555 // handle that here. Just check in case we need to collect instead.
556 ncx->gcIfNeeded();
560 return true;
563 template <typename T>
564 static inline void
565 CheckIncrementalZoneState(ThreadSafeContext* cx, T* t)
567 #ifdef DEBUG
568 if (!cx->isJSContext())
569 return;
571 Zone* zone = cx->asJSContext()->zone();
572 JS_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
573 t->arenaHeader()->allocatedDuringIncremental);
574 #endif
578 * Allocate a new GC thing. After a successful allocation the caller must
579 * fully initialize the thing before calling any function that can potentially
580 * trigger GC. This will ensure that GC tracing never sees junk values stored
581 * in the partially initialized thing.
584 template <AllowGC allowGC>
585 inline JSObject*
586 AllocateObject(ThreadSafeContext* cx, AllocKind kind, size_t nDynamicSlots, InitialHeap heap)
588 size_t thingSize = Arena::thingSize(kind);
590 JS_ASSERT(thingSize == Arena::thingSize(kind));
591 JS_ASSERT(thingSize >= sizeof(JSObject));
592 static_assert(sizeof(JSObject) >= CellSize,
593 "All allocations must be at least the allocator-imposed minimum size.");
595 if (!CheckAllocatorState<allowGC>(cx, kind))
596 return nullptr;
598 #ifdef JSGC_GENERATIONAL
599 if (cx->isJSContext() &&
600 ShouldNurseryAllocate(cx->asJSContext()->nursery(), kind, heap)) {
601 JSObject* obj = TryNewNurseryObject<allowGC>(cx->asJSContext(), thingSize, nDynamicSlots);
602 if (obj)
603 return obj;
605 #endif
606 #ifdef JSGC_FJGENERATIONAL
607 if (cx->isForkJoinContext() &&
608 ShouldFJNurseryAllocate(cx->asForkJoinContext()->nursery(), kind, heap))
610 JSObject* obj =
611 TryNewNurseryObject<allowGC>(cx->asForkJoinContext(), thingSize, nDynamicSlots);
612 if (obj)
613 return obj;
615 #endif
617 HeapSlot* slots = nullptr;
618 if (nDynamicSlots) {
619 if (cx->isExclusiveContext())
620 slots = cx->asExclusiveContext()->zone()->pod_malloc<HeapSlot>(nDynamicSlots);
621 else
622 slots = js_pod_malloc<HeapSlot>(nDynamicSlots);
623 if (MOZ_UNLIKELY(!slots))
624 return nullptr;
625 js::Debug_SetSlotRangeToCrashOnTouch(slots, nDynamicSlots);
628 JSObject* obj = static_cast<JSObject*>(cx->allocator()->arenas.allocateFromFreeList(kind, thingSize));
629 if (!obj)
630 obj = static_cast<JSObject*>(js::gc::ArenaLists::refillFreeList<allowGC>(cx, kind));
632 if (obj)
633 obj->setInitialSlots(slots);
634 else
635 js_free(slots);
637 CheckIncrementalZoneState(cx, obj);
638 js::gc::TraceTenuredAlloc(obj, kind);
639 return obj;
642 template <typename T, AllowGC allowGC>
643 inline T*
644 AllocateNonObject(ThreadSafeContext* cx)
646 static_assert(sizeof(T) >= CellSize,
647 "All allocations must be at least the allocator-imposed minimum size.");
649 AllocKind kind = MapTypeToFinalizeKind<T>::kind;
650 size_t thingSize = sizeof(T);
652 JS_ASSERT(thingSize == Arena::thingSize(kind));
653 if (!CheckAllocatorState<allowGC>(cx, kind))
654 return nullptr;
656 T* t = static_cast<T*>(cx->allocator()->arenas.allocateFromFreeList(kind, thingSize));
657 if (!t)
658 t = static_cast<T*>(js::gc::ArenaLists::refillFreeList<allowGC>(cx, kind));
660 CheckIncrementalZoneState(cx, t);
661 js::gc::TraceTenuredAlloc(t, kind);
662 return t;
666 * When allocating for initialization from a cached object copy, we will
667 * potentially destroy the cache entry we want to copy if we allow GC. On the
668 * other hand, since these allocations are extremely common, we don't want to
669 * delay GC from these allocation sites. Instead we allow the GC, but still
670 * fail the allocation, forcing the non-cached path.
672 * Observe this won't be used for ForkJoin allocation, as it takes a JSContext*
674 template <AllowGC allowGC>
675 inline JSObject*
676 AllocateObjectForCacheHit(JSContext* cx, AllocKind kind, InitialHeap heap)
678 #ifdef JSGC_GENERATIONAL
679 if (ShouldNurseryAllocate(cx->nursery(), kind, heap)) {
680 size_t thingSize = Arena::thingSize(kind);
682 JS_ASSERT(thingSize == Arena::thingSize(kind));
683 if (!CheckAllocatorState<NoGC>(cx, kind))
684 return nullptr;
686 JSObject* obj = TryNewNurseryObject<NoGC>(cx, thingSize, 0);
687 if (!obj && allowGC) {
688 cx->minorGC(JS::gcreason::OUT_OF_NURSERY);
689 return nullptr;
691 return obj;
693 #endif
695 JSObject* obj = AllocateObject<NoGC>(cx, kind, 0, heap);
696 if (!obj && allowGC) {
697 cx->runtime()->gc.maybeGC(cx->zone());
698 return nullptr;
701 return obj;
704 inline bool
705 IsInsideGGCNursery(const js::gc::Cell* cell)
707 #ifdef JSGC_GENERATIONAL
708 if (!cell)
709 return false;
710 uintptr_t addr = uintptr_t(cell);
711 addr &= ~js::gc::ChunkMask;
712 addr |= js::gc::ChunkLocationOffset;
713 uint32_t location = *reinterpret_cast<uint32_t*>(addr);
714 JS_ASSERT(location != 0);
715 return location & js::gc::ChunkLocationBitNursery;
716 #else
717 return false;
718 #endif
721 } /* namespace gc */
723 template <js::AllowGC allowGC>
724 inline JSObject*
725 NewGCObject(js::ThreadSafeContext* cx, js::gc::AllocKind kind, size_t nDynamicSlots, js::gc::InitialHeap heap)
727 JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
728 return js::gc::AllocateObject<allowGC>(cx, kind, nDynamicSlots, heap);
731 template <js::AllowGC allowGC>
732 inline jit::JitCode*
733 NewJitCode(js::ThreadSafeContext* cx)
735 return gc::AllocateNonObject<jit::JitCode, allowGC>(cx);
738 inline
739 types::TypeObject*
740 NewTypeObject(js::ThreadSafeContext* cx)
742 return gc::AllocateNonObject<types::TypeObject, js::CanGC>(cx);
745 template <js::AllowGC allowGC>
746 inline JSString*
747 NewGCString(js::ThreadSafeContext* cx)
749 return js::gc::AllocateNonObject<JSString, allowGC>(cx);
752 template <js::AllowGC allowGC>
753 inline JSFatInlineString*
754 NewGCFatInlineString(js::ThreadSafeContext* cx)
756 return js::gc::AllocateNonObject<JSFatInlineString, allowGC>(cx);
759 inline JSExternalString*
760 NewGCExternalString(js::ThreadSafeContext* cx)
762 return js::gc::AllocateNonObject<JSExternalString, js::CanGC>(cx);
765 } /* namespace js */
767 inline JSScript*
768 js_NewGCScript(js::ThreadSafeContext* cx)
770 return js::gc::AllocateNonObject<JSScript, js::CanGC>(cx);
773 inline js::LazyScript*
774 js_NewGCLazyScript(js::ThreadSafeContext* cx)
776 return js::gc::AllocateNonObject<js::LazyScript, js::CanGC>(cx);
779 inline js::Shape*
780 js_NewGCShape(js::ThreadSafeContext* cx)
782 return js::gc::AllocateNonObject<js::Shape, js::CanGC>(cx);
785 template <js::AllowGC allowGC>
786 inline js::BaseShape*
787 js_NewGCBaseShape(js::ThreadSafeContext* cx)
789 return js::gc::AllocateNonObject<js::BaseShape, allowGC>(cx);
792 #endif /* jsgcinlines_h */