Bug 588735 - Mirror glass caption buttons for rtl windows. r=roc, a=blocking-betaN.
[mozilla-central.git] / js / src / jsgc.cpp
blob8e06f7ed02294866f8fdfe199ebc539d96be8043
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS Mark-and-Sweep Garbage Collector.
44 * This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
45 * jsgc.h). It allocates from a special GC arena pool with each arena allocated
46 * using malloc. It uses an ideally parallel array of flag bytes to hold the
47 * mark bit, finalizer type index, etc.
49 * XXX swizzle page to freelist for better locality of reference
51 #include <stdlib.h> /* for free */
52 #include <math.h>
53 #include <string.h> /* for memset used when DEBUG */
54 #include "jstypes.h"
55 #include "jsstdint.h"
56 #include "jsutil.h" /* Added by JSIFY */
57 #include "jshash.h" /* Added by JSIFY */
58 #include "jsbit.h"
59 #include "jsclist.h"
60 #include "jsprf.h"
61 #include "jsapi.h"
62 #include "jsatom.h"
63 #include "jscntxt.h"
64 #include "jsversion.h"
65 #include "jsdbgapi.h"
66 #include "jsexn.h"
67 #include "jsfun.h"
68 #include "jsgc.h"
69 #include "jsgcchunk.h"
70 #include "jsinterp.h"
71 #include "jsiter.h"
72 #include "jslock.h"
73 #include "jsnum.h"
74 #include "jsobj.h"
75 #include "jsparse.h"
76 #include "jsproxy.h"
77 #include "jsscope.h"
78 #include "jsscript.h"
79 #include "jsstaticcheck.h"
80 #include "jsstr.h"
81 #include "jstask.h"
82 #include "jstracer.h"
84 #if JS_HAS_XML_SUPPORT
85 #include "jsxml.h"
86 #endif
88 #include "jsdtracef.h"
89 #include "jscntxtinlines.h"
90 #include "jsobjinlines.h"
91 #include "jshashtable.h"
93 #ifdef MOZ_VALGRIND
94 # define JS_VALGRIND
95 #endif
96 #ifdef JS_VALGRIND
97 # include <valgrind/memcheck.h>
98 #endif
100 using namespace js;
103 * Check that JSTRACE_XML follows JSTRACE_OBJECT and JSTRACE_STRING.
105 JS_STATIC_ASSERT(JSTRACE_OBJECT == 0);
106 JS_STATIC_ASSERT(JSTRACE_STRING == 1);
107 JS_STATIC_ASSERT(JSTRACE_XML == 2);
110 * JS_IS_VALID_TRACE_KIND assumes that JSTRACE_STRING is the last non-xml
111 * trace kind when JS_HAS_XML_SUPPORT is false.
113 JS_STATIC_ASSERT(JSTRACE_STRING + 1 == JSTRACE_XML);
116 * Check consistency of external string constants from JSFinalizeGCThingKind.
118 JS_STATIC_ASSERT(FINALIZE_EXTERNAL_STRING_LAST - FINALIZE_EXTERNAL_STRING0 ==
119 JS_EXTERNAL_STRING_LIMIT - 1);
122 * GC memory is allocated in chunks. The size of each chunk is GC_CHUNK_SIZE.
123 * The chunk contains an array of GC arenas holding GC things, an array of
124 * the mark bitmaps for each arena, an array of JSGCArenaInfo arena
125 * descriptors, an array of JSGCMarkingDelay descriptors, the GCChunkInfo
126 * chunk descriptor and a bitmap indicating free arenas in the chunk. The
127 * following picture demonstrates the layout:
129 * +--------+--------------+-------+--------+------------+-----------------+
130 * | arenas | mark bitmaps | infos | delays | chunk info | free arena bits |
131 * +--------+--------------+-------+--------+------------+-----------------+
133 * To ensure fast O(1) lookup of mark bits and arena descriptors each chunk is
134 * allocated on GC_CHUNK_SIZE boundary. This way a simple mask and shift
135 * operation gives an arena index into the mark and JSGCArenaInfo arrays.
137 * All chunks that have at least one free arena are put on the doubly-linked
138 * list with the head stored in JSRuntime.gcChunkList. GCChunkInfo contains
139 * the head of the chunk's free arena list together with the link fields for
140 * gcChunkList.
142 * A GC arena contains GC_ARENA_SIZE bytes aligned on GC_ARENA_SIZE boundary
143 * and holds things of the same size and kind. The size of each thing in the
144 * arena must be divisible by GC_CELL_SIZE, the minimal allocation unit, and
145 * the size of the mark bitmap is fixed and is independent of the thing's
146 * size with one bit per each GC_CELL_SIZE bytes. For thing sizes that exceed
147 * GC_CELL_SIZE this implies that we waste space in the mark bitmap. The
148 * advantage is that we can find the mark bit for the thing using just
149 * integer shifts avoiding an expensive integer division. We trade some space
150 * for speed here.
152 * The number of arenas in the chunk is given by GC_ARENAS_PER_CHUNK. We find
153 * that number as follows. Suppose chunk contains n arenas. Together with the
154 * word-aligned free arena bitmap and GCChunkInfo they should fit into the
155 * chunk. Hence GC_ARENAS_PER_CHUNK or n_max is the maximum value of n for
156 * which the following holds:
158 * n*s + ceil(n/B) <= M (1)
160 * where "/" denotes normal real division,
161 * ceil(r) gives the least integer not smaller than the number r,
162 * s is the number of words in the GC arena, arena's mark bitmap,
163 * JSGCArenaInfo and JSGCMarkingDelay or GC_ARENA_ALL_WORDS.
164 * B is number of bits per word or B == JS_BITS_PER_WORD
165 * M is the number of words in the chunk without GCChunkInfo or
166 * M == (GC_CHUNK_SIZE - sizeof(JSGCArenaInfo)) / sizeof(jsuword).
168 * We rewrite the inequality as
170 * n*B*s/B + ceil(n/B) <= M,
171 * ceil(n*B*s/B + n/B) <= M,
172 * ceil(n*(B*s + 1)/B) <= M (2)
174 * We define a helper function e(n, s, B),
176 * e(n, s, B) := ceil(n*(B*s + 1)/B) - n*(B*s + 1)/B, 0 <= e(n, s, B) < 1.
178 * It gives:
180 * n*(B*s + 1)/B + e(n, s, B) <= M,
181 * n + e*B/(B*s + 1) <= M*B/(B*s + 1)
183 * We apply the floor function to both sides of the last equation, where
184 * floor(r) gives the biggest integer not greater than r. As a consequence we
185 * have:
187 * floor(n + e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)),
188 * n + floor(e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)),
189 * n <= floor(M*B/(B*s + 1)), (3)
191 * where floor(e*B/(B*s + 1)) is zero as e*B/(B*s + 1) < B/(B*s + 1) < 1.
192 * Thus any n that satisfies the original constraint (1) or its equivalent (2),
193 * must also satisfy (3). That is, we got an upper estimate for the maximum
194 * value of n. Lets show that this upper estimate,
196 * floor(M*B/(B*s + 1)), (4)
198 * also satisfies (1) and, as such, gives the required maximum value.
199 * Substituting it into (2) gives:
201 * ceil(floor(M*B/(B*s + 1))*(B*s + 1)/B) == ceil(floor(M/X)*X)
203 * where X == (B*s + 1)/B > 1. But then floor(M/X)*X <= M/X*X == M and
205 * ceil(floor(M/X)*X) <= ceil(M) == M.
207 * Thus the value of (4) gives the maximum n satisfying (1).
209 * For the final result we observe that in (4)
211 * M*B == (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) / sizeof(jsuword) *
212 * JS_BITS_PER_WORD
213 * == (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) * JS_BITS_PER_BYTE
215 * since GC_CHUNK_SIZE and sizeof(GCChunkInfo) are at least word-aligned.
218 const jsuword GC_ARENA_SHIFT = 12;
219 const jsuword GC_ARENA_MASK = JS_BITMASK(GC_ARENA_SHIFT);
220 const jsuword GC_ARENA_SIZE = JS_BIT(GC_ARENA_SHIFT);
222 const jsuword GC_MAX_CHUNK_AGE = 3;
224 const size_t GC_CELL_SHIFT = 3;
225 const size_t GC_CELL_SIZE = size_t(1) << GC_CELL_SHIFT;
226 const size_t GC_CELL_MASK = GC_CELL_SIZE - 1;
228 const size_t BITS_PER_GC_CELL = GC_CELL_SIZE * JS_BITS_PER_BYTE;
230 const size_t GC_CELLS_PER_ARENA = size_t(1) << (GC_ARENA_SHIFT - GC_CELL_SHIFT);
231 const size_t GC_MARK_BITMAP_SIZE = GC_CELLS_PER_ARENA / JS_BITS_PER_BYTE;
232 const size_t GC_MARK_BITMAP_WORDS = GC_CELLS_PER_ARENA / JS_BITS_PER_WORD;
234 JS_STATIC_ASSERT(sizeof(jsbitmap) == sizeof(jsuword));
236 JS_STATIC_ASSERT(sizeof(JSString) % GC_CELL_SIZE == 0);
237 JS_STATIC_ASSERT(sizeof(JSObject) % GC_CELL_SIZE == 0);
238 JS_STATIC_ASSERT(sizeof(JSFunction) % GC_CELL_SIZE == 0);
239 #ifdef JSXML
240 JS_STATIC_ASSERT(sizeof(JSXML) % GC_CELL_SIZE == 0);
241 #endif
243 #ifdef JS_GCMETER
244 # define METER(x) ((void) (x))
245 # define METER_IF(condition, x) ((void) ((condition) && (x)))
246 #else
247 # define METER(x) ((void) 0)
248 # define METER_IF(condition, x) ((void) 0)
249 #endif
251 struct JSGCArenaInfo {
253 * Allocation list for the arena.
255 JSGCArenaList *list;
258 * Pointer to the previous arena in a linked list. The arena can either
259 * belong to one of JSContext.gcArenaList lists or, when it does not have
260 * any allocated GC things, to the list of free arenas in the chunk with
261 * head stored in GCChunkInfo.lastFreeArena.
263 JSGCArena *prev;
265 JSGCThing *freeList;
267 static inline JSGCArenaInfo *fromGCThing(void* thing);
270 /* See comments before ThingsPerUnmarkedBit below. */
271 struct JSGCMarkingDelay {
272 JSGCArena *link;
273 jsuword unmarkedChildren;
276 struct JSGCArena {
277 uint8 data[GC_ARENA_SIZE];
279 void checkAddress() const {
280 JS_ASSERT(!(reinterpret_cast<jsuword>(this) & GC_ARENA_MASK));
283 jsuword toPageStart() const {
284 checkAddress();
285 return reinterpret_cast<jsuword>(this);
288 static inline JSGCArena *fromGCThing(void* thing);
290 static inline JSGCArena *fromChunkAndIndex(jsuword chunk, size_t index);
292 jsuword getChunk() {
293 return toPageStart() & ~GC_CHUNK_MASK;
296 jsuword getIndex() {
297 return (toPageStart() & GC_CHUNK_MASK) >> GC_ARENA_SHIFT;
300 inline JSGCArenaInfo *getInfo();
302 inline JSGCMarkingDelay *getMarkingDelay();
304 inline jsbitmap *getMarkBitmap();
307 namespace js {
309 struct GCChunkInfo {
310 JSRuntime *runtime;
311 size_t numFreeArenas;
312 size_t gcChunkAge;
314 inline void init(JSRuntime *rt);
316 inline jsbitmap *getFreeArenaBitmap();
318 inline jsuword getChunk();
320 inline void clearMarkBitmap();
322 static inline GCChunkInfo *fromChunk(jsuword chunk);
325 } /* namespace js */
327 /* Check that all chunk arrays at least word-aligned. */
328 JS_STATIC_ASSERT(sizeof(JSGCArena) == GC_ARENA_SIZE);
329 JS_STATIC_ASSERT(GC_MARK_BITMAP_WORDS % sizeof(jsuword) == 0);
330 JS_STATIC_ASSERT(sizeof(JSGCArenaInfo) % sizeof(jsuword) == 0);
331 JS_STATIC_ASSERT(sizeof(JSGCMarkingDelay) % sizeof(jsuword) == 0);
333 const size_t GC_ARENA_ALL_WORDS = (GC_ARENA_SIZE + GC_MARK_BITMAP_SIZE +
334 sizeof(JSGCArenaInfo) +
335 sizeof(JSGCMarkingDelay)) / sizeof(jsuword);
337 /* The value according (4) above. */
338 const size_t GC_ARENAS_PER_CHUNK =
339 (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) * JS_BITS_PER_BYTE /
340 (JS_BITS_PER_WORD * GC_ARENA_ALL_WORDS + 1);
342 const size_t GC_FREE_ARENA_BITMAP_WORDS = (GC_ARENAS_PER_CHUNK +
343 JS_BITS_PER_WORD - 1) /
344 JS_BITS_PER_WORD;
346 const size_t GC_FREE_ARENA_BITMAP_SIZE = GC_FREE_ARENA_BITMAP_WORDS *
347 sizeof(jsuword);
349 /* Check that GC_ARENAS_PER_CHUNK indeed maximises (1). */
350 JS_STATIC_ASSERT(GC_ARENAS_PER_CHUNK * GC_ARENA_ALL_WORDS +
351 GC_FREE_ARENA_BITMAP_WORDS <=
352 (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) / sizeof(jsuword));
354 JS_STATIC_ASSERT((GC_ARENAS_PER_CHUNK + 1) * GC_ARENA_ALL_WORDS +
355 (GC_ARENAS_PER_CHUNK + 1 + JS_BITS_PER_WORD - 1) /
356 JS_BITS_PER_WORD >
357 (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) / sizeof(jsuword));
360 const size_t GC_MARK_BITMAP_ARRAY_OFFSET = GC_ARENAS_PER_CHUNK
361 << GC_ARENA_SHIFT;
363 const size_t GC_ARENA_INFO_ARRAY_OFFSET =
364 GC_MARK_BITMAP_ARRAY_OFFSET + GC_MARK_BITMAP_SIZE * GC_ARENAS_PER_CHUNK;
366 const size_t GC_MARKING_DELAY_ARRAY_OFFSET =
367 GC_ARENA_INFO_ARRAY_OFFSET + sizeof(JSGCArenaInfo) * GC_ARENAS_PER_CHUNK;
369 const size_t GC_CHUNK_INFO_OFFSET = GC_CHUNK_SIZE - GC_FREE_ARENA_BITMAP_SIZE -
370 sizeof(GCChunkInfo);
372 inline jsuword
373 GCChunkInfo::getChunk() {
374 jsuword addr = reinterpret_cast<jsuword>(this);
375 JS_ASSERT((addr & GC_CHUNK_MASK) == GC_CHUNK_INFO_OFFSET);
376 jsuword chunk = addr & ~GC_CHUNK_MASK;
377 return chunk;
380 inline void
381 GCChunkInfo::clearMarkBitmap()
383 PodZero(reinterpret_cast<jsbitmap *>(getChunk() + GC_MARK_BITMAP_ARRAY_OFFSET),
384 GC_MARK_BITMAP_WORDS * GC_ARENAS_PER_CHUNK);
387 /* static */
388 inline GCChunkInfo *
389 GCChunkInfo::fromChunk(jsuword chunk) {
390 JS_ASSERT(!(chunk & GC_CHUNK_MASK));
391 jsuword addr = chunk | GC_CHUNK_INFO_OFFSET;
392 return reinterpret_cast<GCChunkInfo *>(addr);
395 inline jsbitmap *
396 GCChunkInfo::getFreeArenaBitmap()
398 jsuword addr = reinterpret_cast<jsuword>(this);
399 return reinterpret_cast<jsbitmap *>(addr + sizeof(GCChunkInfo));
402 inline void
403 GCChunkInfo::init(JSRuntime *rt)
405 runtime = rt;
406 numFreeArenas = GC_ARENAS_PER_CHUNK;
407 gcChunkAge = 0;
410 * For simplicity we set all bits to 1 including the high bits in the
411 * last word that corresponds to nonexistent arenas. This is fine since
412 * the arena scans the bitmap words from lowest to highest bits and the
413 * allocation checks numFreeArenas before doing the search.
415 memset(getFreeArenaBitmap(), 0xFF, GC_FREE_ARENA_BITMAP_SIZE);
418 inline void
419 CheckValidGCThingPtr(void *thing)
421 #ifdef DEBUG
422 JS_ASSERT(!JSString::isStatic(thing));
423 jsuword addr = reinterpret_cast<jsuword>(thing);
424 JS_ASSERT(!(addr & GC_CELL_MASK));
425 JS_ASSERT((addr & GC_CHUNK_MASK) < GC_MARK_BITMAP_ARRAY_OFFSET);
426 #endif
429 /* static */
430 inline JSGCArenaInfo *
431 JSGCArenaInfo::fromGCThing(void* thing)
433 CheckValidGCThingPtr(thing);
434 jsuword addr = reinterpret_cast<jsuword>(thing);
435 jsuword chunk = addr & ~GC_CHUNK_MASK;
436 JSGCArenaInfo *array =
437 reinterpret_cast<JSGCArenaInfo *>(chunk | GC_ARENA_INFO_ARRAY_OFFSET);
438 size_t arenaIndex = (addr & GC_CHUNK_MASK) >> GC_ARENA_SHIFT;
439 return array + arenaIndex;
442 /* static */
443 inline JSGCArena *
444 JSGCArena::fromGCThing(void* thing)
446 CheckValidGCThingPtr(thing);
447 jsuword addr = reinterpret_cast<jsuword>(thing);
448 return reinterpret_cast<JSGCArena *>(addr & ~GC_ARENA_MASK);
451 /* static */
452 inline JSGCArena *
453 JSGCArena::fromChunkAndIndex(jsuword chunk, size_t index) {
454 JS_ASSERT(chunk);
455 JS_ASSERT(!(chunk & GC_CHUNK_MASK));
456 JS_ASSERT(index < GC_ARENAS_PER_CHUNK);
457 return reinterpret_cast<JSGCArena *>(chunk | (index << GC_ARENA_SHIFT));
460 inline JSGCArenaInfo *
461 JSGCArena::getInfo()
463 jsuword chunk = getChunk();
464 jsuword index = getIndex();
465 jsuword offset = GC_ARENA_INFO_ARRAY_OFFSET + index * sizeof(JSGCArenaInfo);
466 return reinterpret_cast<JSGCArenaInfo *>(chunk | offset);
469 inline JSGCMarkingDelay *
470 JSGCArena::getMarkingDelay()
472 jsuword chunk = getChunk();
473 jsuword index = getIndex();
474 jsuword offset = GC_MARKING_DELAY_ARRAY_OFFSET +
475 index * sizeof(JSGCMarkingDelay);
476 return reinterpret_cast<JSGCMarkingDelay *>(chunk | offset);
479 inline jsbitmap *
480 JSGCArena::getMarkBitmap()
482 jsuword chunk = getChunk();
483 jsuword index = getIndex();
484 jsuword offset = GC_MARK_BITMAP_ARRAY_OFFSET + index * GC_MARK_BITMAP_SIZE;
485 return reinterpret_cast<jsbitmap *>(chunk | offset);
489 * Helpers for GC-thing operations.
492 inline jsbitmap *
493 GetGCThingMarkBit(void *thing, size_t &bitIndex)
495 CheckValidGCThingPtr(thing);
496 jsuword addr = reinterpret_cast<jsuword>(thing);
497 jsuword chunk = addr & ~GC_CHUNK_MASK;
498 bitIndex = (addr & GC_CHUNK_MASK) >> GC_CELL_SHIFT;
499 return reinterpret_cast<jsbitmap *>(chunk | GC_MARK_BITMAP_ARRAY_OFFSET);
503 * Live objects are marked black. How many other additional colors are available
504 * depends on the size of the GCThing.
506 static const uint32 BLACK = 0;
508 static void
509 AssertValidColor(void *thing, uint32 color)
511 JS_ASSERT_IF(color, color < JSGCArenaInfo::fromGCThing(thing)->list->thingSize / GC_CELL_SIZE);
514 inline bool
515 IsMarkedGCThing(void *thing, uint32 color = BLACK)
517 AssertValidColor(thing, color);
519 size_t index;
520 jsbitmap *markBitmap = GetGCThingMarkBit(thing, index);
521 return !!JS_TEST_BIT(markBitmap, index + color);
525 * The GC always marks live objects BLACK. If color is not BLACK, we also mark
526 * the object with that additional color.
528 inline bool
529 MarkIfUnmarkedGCThing(void *thing, uint32 color = BLACK)
531 AssertValidColor(thing, color);
533 size_t index;
534 jsbitmap *markBitmap = GetGCThingMarkBit(thing, index);
535 if (JS_TEST_BIT(markBitmap, index))
536 return false;
537 JS_SET_BIT(markBitmap, index);
538 if (color != BLACK)
539 JS_SET_BIT(markBitmap, index + color);
540 return true;
543 size_t
544 ThingsPerArena(size_t thingSize)
546 JS_ASSERT(!(thingSize & GC_CELL_MASK));
547 JS_ASSERT(thingSize <= GC_ARENA_SIZE);
548 return GC_ARENA_SIZE / thingSize;
551 /* Can only be called if thing belongs to an arena where a->list is not null. */
552 inline size_t
553 GCThingToArenaIndex(void *thing)
555 CheckValidGCThingPtr(thing);
556 jsuword addr = reinterpret_cast<jsuword>(thing);
557 jsuword offsetInArena = addr & GC_ARENA_MASK;
558 JSGCArenaInfo *a = JSGCArenaInfo::fromGCThing(thing);
559 JS_ASSERT(a->list);
560 JS_ASSERT(offsetInArena % a->list->thingSize == 0);
561 return offsetInArena / a->list->thingSize;
564 /* Can only be applicable to arena where a->list is not null. */
565 inline uint8 *
566 GCArenaIndexToThing(JSGCArena *a, JSGCArenaInfo *ainfo, size_t index)
568 JS_ASSERT(a->getInfo() == ainfo);
571 * We use "<=" and not "<" in the assert so index can mean the limit.
572 * For the same reason we use "+", not "|" when finding the thing address
573 * as the limit address can start at the next arena.
575 JS_ASSERT(index <= ThingsPerArena(ainfo->list->thingSize));
576 jsuword offsetInArena = index * ainfo->list->thingSize;
577 return reinterpret_cast<uint8 *>(a->toPageStart() + offsetInArena);
581 * The private JSGCThing struct, which describes a JSRuntime.gcFreeList element.
583 struct JSGCThing {
584 JSGCThing *link;
587 static inline JSGCThing *
588 MakeNewArenaFreeList(JSGCArena *a, size_t thingSize)
590 jsuword thingsStart = a->toPageStart();
591 jsuword lastThingMinAddr = thingsStart + GC_ARENA_SIZE - thingSize * 2 + 1;
592 jsuword thingPtr = thingsStart;
593 do {
594 jsuword nextPtr = thingPtr + thingSize;
595 JS_ASSERT((nextPtr & GC_ARENA_MASK) + thingSize <= GC_ARENA_SIZE);
596 JSGCThing *thing = reinterpret_cast<JSGCThing *>(thingPtr);
597 thing->link = reinterpret_cast<JSGCThing *>(nextPtr);
598 thingPtr = nextPtr;
599 } while (thingPtr < lastThingMinAddr);
601 JSGCThing *lastThing = reinterpret_cast<JSGCThing *>(thingPtr);
602 lastThing->link = NULL;
603 return reinterpret_cast<JSGCThing *>(thingsStart);
606 inline jsuword
607 GetGCChunk(JSRuntime *rt)
609 void *p = rt->gcChunkAllocator->alloc();
610 #ifdef MOZ_GCTIMER
611 if (p)
612 JS_ATOMIC_INCREMENT(&newChunkCount);
613 #endif
614 METER_IF(p, rt->gcStats.nchunks++);
615 METER_UPDATE_MAX(rt->gcStats.maxnchunks, rt->gcStats.nchunks);
616 return reinterpret_cast<jsuword>(p);
619 inline void
620 ReleaseGCChunk(JSRuntime *rt, jsuword chunk)
622 void *p = reinterpret_cast<void *>(chunk);
623 JS_ASSERT(p);
624 #ifdef MOZ_GCTIMER
625 JS_ATOMIC_INCREMENT(&destroyChunkCount);
626 #endif
627 JS_ASSERT(rt->gcStats.nchunks != 0);
628 METER(rt->gcStats.nchunks--);
629 rt->gcChunkAllocator->free(p);
632 static JSGCArena *
633 NewGCArena(JSContext *cx)
635 JSRuntime *rt = cx->runtime;
636 if (!JS_THREAD_DATA(cx)->waiveGCQuota && rt->gcBytes >= rt->gcMaxBytes) {
638 * FIXME bug 524051 We cannot run a last-ditch GC on trace for now, so
639 * just pretend we are out of memory which will throw us off trace and
640 * we will re-try this code path from the interpreter.
642 if (!JS_ON_TRACE(cx))
643 return NULL;
644 js_TriggerGC(cx, true);
647 if (rt->gcFreeArenaChunks.empty()) {
648 #ifdef DEBUG
649 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
650 JS_ASSERT(GCChunkInfo::fromChunk(r.front())->numFreeArenas == 0);
651 #endif
653 * Make sure that after the GC we can append all allocated chunks to
654 * gcFreeArenaChunks.
656 * FIXME bug 583729 - use the same for the rt->gcChunkSet.
658 if (!rt->gcFreeArenaChunks.reserve(rt->gcChunkSet.count() + 1))
659 return NULL;
660 jsuword chunk = GetGCChunk(rt);
661 if (!chunk)
662 return NULL;
663 GCChunkInfo *ci = GCChunkInfo::fromChunk(chunk);
664 ci->init(rt);
667 * FIXME bug 583732 - chunk is newly allocated and cannot present in
668 * the table so using ordinary lookupForAdd is suboptimal here.
670 GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk);
671 JS_ASSERT(!p);
672 if (!rt->gcChunkSet.add(p, chunk)) {
673 ReleaseGCChunk(rt, chunk);
674 return NULL;
676 JS_ALWAYS_TRUE(rt->gcFreeArenaChunks.append(ci));
679 GCChunkInfo *ci = rt->gcFreeArenaChunks.back();
680 JS_ASSERT(ci->numFreeArenas);
682 /* Scan the bitmap for the first non-zero bit. */
683 jsbitmap *freeArenas = ci->getFreeArenaBitmap();
684 size_t arenaIndex = 0;
685 while (!*freeArenas) {
686 arenaIndex += JS_BITS_PER_WORD;
687 freeArenas++;
689 size_t bit = CountTrailingZeros(*freeArenas);
690 arenaIndex += bit;
691 JS_ASSERT(arenaIndex < GC_ARENAS_PER_CHUNK);
692 JS_ASSERT(*freeArenas & (jsuword(1) << bit));
693 *freeArenas &= ~(jsuword(1) << bit);
694 --ci->numFreeArenas;
695 if (ci->numFreeArenas == 0) {
696 JS_ASSERT(ci == rt->gcFreeArenaChunks.back());
697 rt->gcFreeArenaChunks.popBack();
700 rt->gcBytes += GC_ARENA_SIZE;
701 METER(rt->gcStats.nallarenas++);
702 METER_UPDATE_MAX(rt->gcStats.maxnallarenas, rt->gcStats.nallarenas);
704 return JSGCArena::fromChunkAndIndex(ci->getChunk(), arenaIndex);
708 * This function does not touch the arena or release its memory so code can
709 * still refer into it.
711 static void
712 ReleaseGCArena(JSRuntime *rt, JSGCArena *a)
714 METER(rt->gcStats.afree++);
715 JS_ASSERT(rt->gcBytes >= GC_ARENA_SIZE);
716 rt->gcBytes -= GC_ARENA_SIZE;
717 JS_ASSERT(rt->gcStats.nallarenas != 0);
718 METER(rt->gcStats.nallarenas--);
720 jsuword chunk = a->getChunk();
721 GCChunkInfo *ci = GCChunkInfo::fromChunk(chunk);
722 JS_ASSERT(ci->numFreeArenas <= GC_ARENAS_PER_CHUNK - 1);
723 jsbitmap *freeArenas = ci->getFreeArenaBitmap();
724 JS_ASSERT(!JS_TEST_BIT(freeArenas, a->getIndex()));
725 JS_SET_BIT(freeArenas, a->getIndex());
726 ci->numFreeArenas++;
727 if (ci->numFreeArenas == GC_ARENAS_PER_CHUNK)
728 ci->gcChunkAge = 0;
730 #ifdef DEBUG
731 a->getInfo()->prev = rt->gcEmptyArenaList;
732 rt->gcEmptyArenaList = a;
733 #endif
736 static void
737 FreeGCChunks(JSRuntime *rt)
739 #ifdef DEBUG
740 while (rt->gcEmptyArenaList) {
741 JSGCArena *next = rt->gcEmptyArenaList->getInfo()->prev;
742 memset(rt->gcEmptyArenaList, JS_FREE_PATTERN, GC_ARENA_SIZE);
743 rt->gcEmptyArenaList = next;
745 #endif
747 /* Remove unused chunks and rebuild gcFreeArenaChunks. */
748 rt->gcFreeArenaChunks.clear();
749 JS_ASSERT(rt->gcFreeArenaChunks.capacity() >= rt->gcChunkSet.count());
750 for (GCChunkSet::Enum e(rt->gcChunkSet); !e.empty(); e.popFront()) {
751 GCChunkInfo *ci = GCChunkInfo::fromChunk(e.front());
752 JS_ASSERT(ci->runtime == rt);
753 if (ci->numFreeArenas == GC_ARENAS_PER_CHUNK) {
754 if (ci->gcChunkAge > GC_MAX_CHUNK_AGE) {
755 e.removeFront();
756 ReleaseGCChunk(rt, ci->getChunk());
757 continue;
759 ci->gcChunkAge++;
762 if (ci->numFreeArenas)
763 JS_ALWAYS_TRUE(rt->gcFreeArenaChunks.append(ci));
767 static inline size_t
768 GetFinalizableThingSize(unsigned thingKind)
770 JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
772 static const uint8 map[FINALIZE_LIMIT] = {
773 sizeof(JSObject), /* FINALIZE_OBJECT */
774 sizeof(JSFunction), /* FINALIZE_FUNCTION */
775 #if JS_HAS_XML_SUPPORT
776 sizeof(JSXML), /* FINALIZE_XML */
777 #endif
778 sizeof(JSShortString), /* FINALIZE_SHORT_STRING */
779 sizeof(JSString), /* FINALIZE_STRING */
780 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING0 */
781 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING1 */
782 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING2 */
783 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING3 */
784 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING4 */
785 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING5 */
786 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING6 */
787 sizeof(JSString), /* FINALIZE_EXTERNAL_STRING7 */
790 JS_ASSERT(thingKind < FINALIZE_LIMIT);
791 return map[thingKind];
794 static inline size_t
795 GetFinalizableTraceKind(size_t thingKind)
797 JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
799 static const uint8 map[FINALIZE_LIMIT] = {
800 JSTRACE_OBJECT, /* FINALIZE_OBJECT */
801 JSTRACE_OBJECT, /* FINALIZE_FUNCTION */
802 #if JS_HAS_XML_SUPPORT /* FINALIZE_XML */
803 JSTRACE_XML,
804 #endif
805 JSTRACE_STRING, /* FINALIZE_SHORT_STRING */
806 JSTRACE_STRING, /* FINALIZE_STRING */
807 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING0 */
808 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING1 */
809 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING2 */
810 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING3 */
811 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING4 */
812 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING5 */
813 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING6 */
814 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING7 */
817 JS_ASSERT(thingKind < FINALIZE_LIMIT);
818 return map[thingKind];
821 static inline size_t
822 GetFinalizableArenaTraceKind(JSGCArenaInfo *ainfo)
824 JS_ASSERT(ainfo->list);
825 return GetFinalizableTraceKind(ainfo->list->thingKind);
828 static inline size_t
829 GetArenaTraceKind(JSGCArenaInfo *ainfo)
831 return GetFinalizableArenaTraceKind(ainfo);
834 static inline size_t
835 GetFinalizableThingTraceKind(void *thing)
837 JSGCArenaInfo *ainfo = JSGCArenaInfo::fromGCThing(thing);
838 return GetFinalizableArenaTraceKind(ainfo);
841 static void
842 InitGCArenaLists(JSRuntime *rt)
844 for (unsigned i = 0; i != FINALIZE_LIMIT; ++i) {
845 JSGCArenaList *arenaList = &rt->gcArenaList[i];
846 arenaList->head = NULL;
847 arenaList->cursor = NULL;
848 arenaList->thingKind = i;
849 arenaList->thingSize = GetFinalizableThingSize(i);
853 static void
854 FinishGCArenaLists(JSRuntime *rt)
856 for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
857 rt->gcArenaList[i].head = NULL;
858 rt->gcArenaList[i].cursor = NULL;
861 rt->gcBytes = 0;
863 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
864 ReleaseGCChunk(rt, r.front());
865 rt->gcChunkSet.clear();
866 rt->gcFreeArenaChunks.clear();
869 intN
870 js_GetExternalStringGCType(JSString *str)
872 JS_STATIC_ASSERT(FINALIZE_STRING + 1 == FINALIZE_EXTERNAL_STRING0);
873 JS_ASSERT(!JSString::isStatic(str));
875 unsigned thingKind = JSGCArenaInfo::fromGCThing(str)->list->thingKind;
876 JS_ASSERT(IsFinalizableStringKind(thingKind));
877 return intN(thingKind) - intN(FINALIZE_EXTERNAL_STRING0);
880 JS_FRIEND_API(uint32)
881 js_GetGCThingTraceKind(void *thing)
883 if (JSString::isStatic(thing))
884 return JSTRACE_STRING;
886 JSGCArenaInfo *ainfo = JSGCArenaInfo::fromGCThing(thing);
887 return GetArenaTraceKind(ainfo);
890 JSRuntime *
891 js_GetGCThingRuntime(void *thing)
893 jsuword chunk = JSGCArena::fromGCThing(thing)->getChunk();
894 return GCChunkInfo::fromChunk(chunk)->runtime;
897 JS_FRIEND_API(bool)
898 js_IsAboutToBeFinalized(void *thing)
900 if (JSString::isStatic(thing))
901 return false;
903 return !IsMarkedGCThing(thing);
906 JS_FRIEND_API(bool)
907 js_GCThingIsMarked(void *thing, uint32 color)
909 return IsMarkedGCThing(thing, color);
912 JSBool
913 js_InitGC(JSRuntime *rt, uint32 maxbytes)
915 InitGCArenaLists(rt);
918 * Make room for at least 16 chunks so the table would not grow before
919 * the browser starts up.
921 if (!rt->gcChunkSet.init(16))
922 return false;
924 if (!rt->gcRootsHash.init(256))
925 return false;
927 if (!rt->gcLocksHash.init(256))
928 return false;
930 #ifdef JS_THREADSAFE
931 if (!rt->gcHelperThread.init())
932 return false;
933 #endif
936 * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
937 * for default backward API compatibility.
939 rt->gcMaxBytes = maxbytes;
940 rt->setGCMaxMallocBytes(maxbytes);
942 rt->gcEmptyArenaPoolLifespan = 30000;
945 * By default the trigger factor gets maximum possible value. This
946 * means that GC will not be triggered by growth of GC memory (gcBytes).
948 rt->setGCTriggerFactor((uint32) -1);
951 * The assigned value prevents GC from running when GC memory is too low
952 * (during JS engine start).
954 rt->setGCLastBytes(8192);
956 METER(PodZero(&rt->gcStats));
957 return true;
960 namespace js {
963 * Returns CGCT_VALID if the w can be a live GC thing and sets thing and traceKind
964 * accordingly. Otherwise returns the reason for rejection.
966 inline ConservativeGCTest
967 IsGCThingWord(JSRuntime *rt, jsuword w, void *&thing, uint32 &traceKind)
970 * The conservative scanner may access words that valgrind considers as
971 * undefined. To avoid false positives and not to alter valgrind view of
972 * the memory we make as memcheck-defined the argument, a copy of the
973 * original word. See bug 572678.
975 #ifdef JS_VALGRIND
976 VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
977 #endif
980 * We assume that the compiler never uses sub-word alignment to store
981 * pointers and does not tag pointers on its own. Additionally, the value
982 * representation for all values and the jsid representation for GC-things
983 * do not touch the low two bits. Thus any word with the low two bits set
984 * is not a valid GC-thing.
986 JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
987 if (w & 0x3)
988 return CGCT_LOWBITSET;
991 * An object jsid has its low bits tagged. In the value representation on
992 * 64-bit, the high bits are tagged.
994 const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
995 #if JS_BITS_PER_WORD == 32
996 jsuword payload = w & JSID_PAYLOAD_MASK;
997 #elif JS_BITS_PER_WORD == 64
998 jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
999 #endif
1001 jsuword chunk = payload & ~GC_CHUNK_MASK;
1002 if (!rt->gcChunkSet.has(chunk))
1003 return CGCT_NOTCHUNK;
1005 GCChunkInfo *ci = GCChunkInfo::fromChunk(chunk);
1007 if ((payload & GC_CHUNK_MASK) >= GC_MARK_BITMAP_ARRAY_OFFSET)
1008 return CGCT_NOTARENA;
1010 size_t arenaIndex = (payload & GC_CHUNK_MASK) >> GC_ARENA_SHIFT;
1011 if (JS_TEST_BIT(ci->getFreeArenaBitmap(), arenaIndex))
1012 return CGCT_FREEARENA;
1014 JSGCArena *a = JSGCArena::fromChunkAndIndex(chunk, arenaIndex);
1015 JSGCArenaInfo *ainfo = a->getInfo();
1017 traceKind = GetFinalizableArenaTraceKind(ainfo);
1020 * On 64-bit we might consider using the tag bits in w to disqualify
1021 * additional false roots, however, the condition would have to look
1022 * something like:
1024 * if ((traceKind == JSTRACE_STRING && tag > 0 && tag != JSVAL_TAG_SHIFT) ||
1025 * (traceKind == JSTRACE_OBJECT && tag > 0 && tag != JSVAL_TAG_OBJECT))
1026 * return CGCT_WRONGTAG;
1028 * However, it seems like we should measure how often this actually avoids
1029 * false roots.
1032 jsuword start = a->toPageStart();
1033 jsuword offset = payload - start;
1034 size_t thingSize = ainfo->list->thingSize;
1035 offset -= offset % thingSize;
1038 * If GC_ARENA_SIZE % thingSize != 0 or when thingSize is not a power
1039 * of two, thingSize-aligned pointer may point at the end of the last
1040 * thing yet be inside the arena.
1042 if (offset + thingSize > GC_ARENA_SIZE) {
1043 JS_ASSERT(thingSize & (thingSize - 1));
1044 return CGCT_NOTARENA;
1046 thing = (JSGCThing *) (start + offset);
1048 /* Make sure the thing is not on the freelist of the arena. */
1049 JSGCThing *cursor = ainfo->freeList;
1050 while (cursor) {
1051 JS_ASSERT((((jsuword) cursor) & GC_ARENA_MASK) % thingSize == 0);
1052 JS_ASSERT(!IsMarkedGCThing(cursor));
1054 /* If the cursor moves past the thing, it's not in the freelist. */
1055 if (thing < cursor)
1056 break;
1058 /* If we find it on the freelist, it's dead. */
1059 if (thing == cursor)
1060 return CGCT_NOTLIVE;
1061 JS_ASSERT_IF(cursor->link, cursor < cursor->link);
1062 cursor = cursor->link;
1065 return CGCT_VALID;
1068 inline ConservativeGCTest
1069 IsGCThingWord(JSRuntime *rt, jsuword w)
1071 void *thing;
1072 uint32 traceKind;
1073 return IsGCThingWord(rt, w, thing, traceKind);
1076 static void
1077 MarkWordConservatively(JSTracer *trc, jsuword w)
1080 * The conservative scanner may access words that valgrind considers as
1081 * undefined. To avoid false positives and not to alter valgrind view of
1082 * the memory we make as memcheck-defined the argument, a copy of the
1083 * original word. See bug 572678.
1085 #ifdef JS_VALGRIND
1086 VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
1087 #endif
1089 void *thing;
1090 uint32 traceKind;
1091 ConservativeGCTest test = IsGCThingWord(trc->context->runtime, w, thing, traceKind);
1092 if (test == CGCT_VALID) {
1093 Mark(trc, thing, traceKind, "machine stack");
1094 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1095 if (IS_GC_MARKING_TRACER(trc) && static_cast<GCMarker *>(trc)->conservativeDumpFileName) {
1096 GCMarker::ConservativeRoot root = {thing, traceKind};
1097 static_cast<GCMarker *>(trc)->conservativeRoots.append(root);
1099 #endif
1102 #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
1103 if (IS_GC_MARKING_TRACER(trc))
1104 static_cast<GCMarker *>(trc)->conservativeStats.counter[test]++;
1105 #endif
1108 static void
1109 MarkRangeConservatively(JSTracer *trc, jsuword *begin, jsuword *end)
1111 JS_ASSERT(begin <= end);
1112 for (jsuword *i = begin; i != end; ++i)
1113 MarkWordConservatively(trc, *i);
1116 void
1117 MarkConservativeStackRoots(JSTracer *trc)
1119 /* Do conservative scanning of the stack and registers. */
1120 for (ThreadDataIter i(trc->context->runtime); !i.empty(); i.popFront()) {
1121 JSThreadData *td = i.threadData();
1122 ConservativeGCThreadData *ctd = &td->conservativeGC;
1123 if (ctd->isEnabled()) {
1124 jsuword *stackMin, *stackEnd;
1125 #if JS_STACK_GROWTH_DIRECTION > 0
1126 stackMin = td->nativeStackBase;
1127 stackEnd = ctd->nativeStackTop;
1128 #else
1129 stackMin = ctd->nativeStackTop + 1;
1130 stackEnd = td->nativeStackBase;
1131 #endif
1132 JS_ASSERT(stackMin <= stackEnd);
1133 MarkRangeConservatively(trc, stackMin, stackEnd);
1134 MarkRangeConservatively(trc, ctd->registerSnapshot.words,
1135 JS_ARRAY_END(ctd->registerSnapshot.words));
1140 JS_NEVER_INLINE JS_FRIEND_API(void)
1141 ConservativeGCThreadData::enable(bool knownStackBoundary)
1143 ++enableCount;
1144 if (enableCount <= 0)
1145 return;
1147 /* Update the native stack pointer if it points to a bigger stack. */
1148 #if JS_STACK_GROWTH_DIRECTION > 0
1149 # define CMP >
1150 #else
1151 # define CMP <
1152 #endif
1153 jsuword dummy;
1154 if (knownStackBoundary || enableCount == 1 || &dummy CMP nativeStackTop)
1155 nativeStackTop = &dummy;
1156 #undef CMP
1158 /* Update the register snapshot with the latest values. */
1159 #if defined(_MSC_VER)
1160 # pragma warning(push)
1161 # pragma warning(disable: 4611)
1162 #endif
1163 setjmp(registerSnapshot.jmpbuf);
1164 #if defined(_MSC_VER)
1165 # pragma warning(pop)
1166 #endif
1170 JS_NEVER_INLINE JS_FRIEND_API(void)
1171 ConservativeGCThreadData::disable()
1173 --enableCount;
1174 #ifdef DEBUG
1175 if (enableCount == 0)
1176 nativeStackTop = NULL;
1177 #endif
1180 } /* namespace js */
1182 #ifdef DEBUG
1183 static void
1184 CheckLeakedRoots(JSRuntime *rt);
1185 #endif
1187 void
1188 js_FinishGC(JSRuntime *rt)
1190 #ifdef JS_ARENAMETER
1191 JS_DumpArenaStats(stdout);
1192 #endif
1193 #ifdef JS_GCMETER
1194 if (JS_WANT_GC_METER_PRINT)
1195 js_DumpGCStats(rt, stdout);
1196 #endif
1198 #ifdef JS_THREADSAFE
1199 rt->gcHelperThread.cancel();
1200 #endif
1201 FinishGCArenaLists(rt);
1203 #ifdef DEBUG
1204 if (!rt->gcRootsHash.empty())
1205 CheckLeakedRoots(rt);
1206 #endif
1207 rt->gcRootsHash.clear();
1208 rt->gcLocksHash.clear();
1211 JSBool
1212 js_AddRoot(JSContext *cx, Value *vp, const char *name)
1214 JSBool ok = js_AddRootRT(cx->runtime, Jsvalify(vp), name);
1215 if (!ok)
1216 JS_ReportOutOfMemory(cx);
1217 return ok;
1220 JSBool
1221 js_AddGCThingRoot(JSContext *cx, void **rp, const char *name)
1223 JSBool ok = js_AddGCThingRootRT(cx->runtime, rp, name);
1224 if (!ok)
1225 JS_ReportOutOfMemory(cx);
1226 return ok;
1229 JS_FRIEND_API(JSBool)
1230 js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
1233 * Due to the long-standing, but now removed, use of rt->gcLock across the
1234 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
1235 * properly with a racing GC, without calling JS_AddRoot from a request.
1236 * We have to preserve API compatibility here, now that we avoid holding
1237 * rt->gcLock across the mark phase (including the root hashtable mark).
1239 AutoLockGC lock(rt);
1240 js_WaitForGC(rt);
1242 return !!rt->gcRootsHash.put((void *)vp,
1243 RootInfo(name, JS_GC_ROOT_VALUE_PTR));
1246 JS_FRIEND_API(JSBool)
1247 js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
1250 * Due to the long-standing, but now removed, use of rt->gcLock across the
1251 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
1252 * properly with a racing GC, without calling JS_AddRoot from a request.
1253 * We have to preserve API compatibility here, now that we avoid holding
1254 * rt->gcLock across the mark phase (including the root hashtable mark).
1256 AutoLockGC lock(rt);
1257 js_WaitForGC(rt);
1259 return !!rt->gcRootsHash.put((void *)rp,
1260 RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
1263 JS_FRIEND_API(JSBool)
1264 js_RemoveRoot(JSRuntime *rt, void *rp)
1267 * Due to the JS_RemoveRootRT API, we may be called outside of a request.
1268 * Same synchronization drill as above in js_AddRoot.
1270 AutoLockGC lock(rt);
1271 js_WaitForGC(rt);
1272 rt->gcRootsHash.remove(rp);
1273 rt->gcPoke = JS_TRUE;
1274 return JS_TRUE;
1277 typedef RootedValueMap::Range RootRange;
1278 typedef RootedValueMap::Entry RootEntry;
1279 typedef RootedValueMap::Enum RootEnum;
1281 #ifdef DEBUG
1283 static void
1284 CheckLeakedRoots(JSRuntime *rt)
1286 uint32 leakedroots = 0;
1288 /* Warn (but don't assert) debug builds of any remaining roots. */
1289 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
1290 RootEntry &entry = r.front();
1291 leakedroots++;
1292 fprintf(stderr,
1293 "JS engine warning: leaking GC root \'%s\' at %p\n",
1294 entry.value.name ? entry.value.name : "", entry.key);
1297 if (leakedroots > 0) {
1298 if (leakedroots == 1) {
1299 fprintf(stderr,
1300 "JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
1301 " This root may point to freed memory. Objects reachable\n"
1302 " through it have not been finalized.\n",
1303 (void *) rt);
1304 } else {
1305 fprintf(stderr,
1306 "JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
1307 " These roots may point to freed memory. Objects reachable\n"
1308 " through them have not been finalized.\n",
1309 (unsigned long) leakedroots, (void *) rt);
1314 void
1315 js_DumpNamedRoots(JSRuntime *rt,
1316 void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
1317 void *data)
1319 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
1320 RootEntry &entry = r.front();
1321 if (const char *name = entry.value.name)
1322 dump(name, entry.key, entry.value.type, data);
1326 #endif /* DEBUG */
1328 uint32
1329 js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
1331 AutoLockGC lock(rt);
1332 int ct = 0;
1333 for (RootEnum e(rt->gcRootsHash); !e.empty(); e.popFront()) {
1334 RootEntry &entry = e.front();
1336 ct++;
1337 intN mapflags = map(entry.key, entry.value.type, entry.value.name, data);
1339 if (mapflags & JS_MAP_GCROOT_REMOVE)
1340 e.removeFront();
1341 if (mapflags & JS_MAP_GCROOT_STOP)
1342 break;
1345 return ct;
1348 void
1349 JSRuntime::setGCTriggerFactor(uint32 factor)
1351 JS_ASSERT(factor >= 100);
1353 gcTriggerFactor = factor;
1354 setGCLastBytes(gcLastBytes);
1357 void
1358 JSRuntime::setGCLastBytes(size_t lastBytes)
1360 gcLastBytes = lastBytes;
1361 uint64 triggerBytes = uint64(lastBytes) * uint64(gcTriggerFactor / 100);
1362 if (triggerBytes != size_t(triggerBytes))
1363 triggerBytes = size_t(-1);
1364 gcTriggerBytes = size_t(triggerBytes);
1367 void
1368 JSGCFreeLists::purge()
1371 * Return the free list back to the arena so the GC finalization will not
1372 * run the finalizers over unitialized bytes from free things.
1374 for (JSGCThing **p = finalizables; p != JS_ARRAY_END(finalizables); ++p) {
1375 JSGCThing *freeListHead = *p;
1376 if (freeListHead) {
1377 JSGCArenaInfo *ainfo = JSGCArenaInfo::fromGCThing(freeListHead);
1378 JS_ASSERT(!ainfo->freeList);
1379 ainfo->freeList = freeListHead;
1380 *p = NULL;
1385 void
1386 JSGCFreeLists::moveTo(JSGCFreeLists *another)
1388 *another = *this;
1389 PodArrayZero(finalizables);
1390 JS_ASSERT(isEmpty());
1393 static inline bool
1394 IsGCThresholdReached(JSRuntime *rt)
1396 #ifdef JS_GC_ZEAL
1397 if (rt->gcZeal >= 1)
1398 return true;
1399 #endif
1402 * Since the initial value of the gcLastBytes parameter is not equal to
1403 * zero (see the js_InitGC function) the return value is false when
1404 * the gcBytes value is close to zero at the JS engine start.
1406 return rt->isGCMallocLimitReached() || rt->gcBytes >= rt->gcTriggerBytes;
1409 static void
1410 LastDitchGC(JSContext *cx)
1412 JS_ASSERT(!JS_ON_TRACE(cx));
1414 /* The last ditch GC preserves weak roots and all atoms. */
1415 AutoKeepAtoms keep(cx->runtime);
1418 * Keep rt->gcLock across the call into the GC so we don't starve and
1419 * lose to racing threads who deplete the heap just after the GC has
1420 * replenished it (or has synchronized with a racing GC that collected a
1421 * bunch of garbage). This unfair scheduling can happen on certain
1422 * operating systems. For the gory details, see bug 162779.
1424 js_GC(cx, GC_LOCK_HELD);
1427 static JSGCThing *
1428 RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
1430 JS_ASSERT(!JS_THREAD_DATA(cx)->gcFreeLists.finalizables[thingKind]);
1431 JSRuntime *rt = cx->runtime;
1432 JSGCArenaList *arenaList;
1433 JSGCArena *a;
1436 AutoLockGC lock(rt);
1437 JS_ASSERT(!rt->gcRunning);
1438 if (rt->gcRunning)
1439 return NULL;
1441 bool canGC = !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota;
1442 bool doGC = canGC && IsGCThresholdReached(rt);
1443 arenaList = &rt->gcArenaList[thingKind];
1444 for (;;) {
1445 if (doGC) {
1446 LastDitchGC(cx);
1447 METER(cx->runtime->gcArenaStats[thingKind].retry++);
1448 canGC = false;
1451 * The JSGC_END callback can legitimately allocate new GC
1452 * things and populate the free list. If that happens, just
1453 * return that list head.
1455 JSGCThing *freeList = JS_THREAD_DATA(cx)->gcFreeLists.finalizables[thingKind];
1456 if (freeList)
1457 return freeList;
1460 while ((a = arenaList->cursor) != NULL) {
1461 JSGCArenaInfo *ainfo = a->getInfo();
1462 arenaList->cursor = ainfo->prev;
1463 JSGCThing *freeList = ainfo->freeList;
1464 if (freeList) {
1465 ainfo->freeList = NULL;
1466 return freeList;
1470 a = NewGCArena(cx);
1471 if (a)
1472 break;
1473 if (!canGC) {
1474 METER(cx->runtime->gcArenaStats[thingKind].fail++);
1475 return NULL;
1477 doGC = true;
1481 * Do only minimal initialization of the arena inside the GC lock. We
1482 * can do the rest outside the lock because no other threads will see
1483 * the arena until the GC is run.
1485 JSGCArenaInfo *ainfo = a->getInfo();
1486 ainfo->list = arenaList;
1487 ainfo->prev = arenaList->head;
1488 ainfo->freeList = NULL;
1489 arenaList->head = a;
1492 JSGCMarkingDelay *markingDelay = a->getMarkingDelay();
1493 markingDelay->link = NULL;
1494 markingDelay->unmarkedChildren = 0;
1496 return MakeNewArenaFreeList(a, arenaList->thingSize);
1499 static inline void
1500 CheckGCFreeListLink(JSGCThing *thing)
1503 * The GC things on the free lists come from one arena and the things on
1504 * the free list are linked in ascending address order.
1506 JS_ASSERT_IF(thing->link,
1507 JSGCArena::fromGCThing(thing) ==
1508 JSGCArena::fromGCThing(thing->link));
1509 JS_ASSERT_IF(thing->link, thing < thing->link);
1512 void *
1513 js_NewFinalizableGCThing(JSContext *cx, unsigned thingKind)
1515 JS_ASSERT(thingKind < FINALIZE_LIMIT);
1516 #ifdef JS_THREADSAFE
1517 JS_ASSERT(cx->thread);
1518 #endif
1520 /* Updates of metering counters here may not be thread-safe. */
1521 METER(cx->runtime->gcArenaStats[thingKind].alloc++);
1523 JSGCThing **freeListp =
1524 JS_THREAD_DATA(cx)->gcFreeLists.finalizables + thingKind;
1525 JSGCThing *thing = *freeListp;
1526 if (thing) {
1527 *freeListp = thing->link;
1528 CheckGCFreeListLink(thing);
1529 METER(cx->runtime->gcArenaStats[thingKind].localalloc++);
1530 return thing;
1533 thing = RefillFinalizableFreeList(cx, thingKind);
1534 if (!thing) {
1535 js_ReportOutOfMemory(cx);
1536 return NULL;
1540 * See comments in RefillFinalizableFreeList about a possibility
1541 * of *freeListp == thing.
1543 JS_ASSERT(!*freeListp || *freeListp == thing);
1544 *freeListp = thing->link;
1546 CheckGCFreeListLink(thing);
1548 return thing;
1551 JSBool
1552 js_LockGCThingRT(JSRuntime *rt, void *thing)
1554 GCLocks *locks;
1556 if (!thing)
1557 return true;
1558 locks = &rt->gcLocksHash;
1559 AutoLockGC lock(rt);
1560 GCLocks::AddPtr p = locks->lookupForAdd(thing);
1562 if (!p) {
1563 if (!locks->add(p, thing, 1))
1564 return false;
1565 } else {
1566 JS_ASSERT(p->value >= 1);
1567 p->value++;
1570 METER(rt->gcStats.lock++);
1571 return true;
1574 void
1575 js_UnlockGCThingRT(JSRuntime *rt, void *thing)
1577 if (!thing)
1578 return;
1580 AutoLockGC lock(rt);
1581 GCLocks::Ptr p = rt->gcLocksHash.lookup(thing);
1583 if (p) {
1584 rt->gcPoke = true;
1585 if (--p->value == 0)
1586 rt->gcLocksHash.remove(p);
1588 METER(rt->gcStats.unlock++);
1592 JS_PUBLIC_API(void)
1593 JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind)
1595 switch (kind) {
1596 case JSTRACE_OBJECT: {
1597 /* If obj has no map, it must be a newborn. */
1598 JSObject *obj = (JSObject *) thing;
1599 if (!obj->map)
1600 break;
1601 if (JSObject *proto = obj->getProto())
1602 JS_CALL_OBJECT_TRACER(trc, proto, "proto");
1603 if (JSObject *parent = obj->getParent())
1604 JS_CALL_OBJECT_TRACER(trc, parent, "parent");
1605 JSTraceOp op = obj->getOps()->trace;
1606 (op ? op : js_TraceObject)(trc, obj);
1607 break;
1610 case JSTRACE_STRING: {
1611 JSString *str = (JSString *) thing;
1612 if (str->isDependent())
1613 JS_CALL_STRING_TRACER(trc, str->dependentBase(), "base");
1614 else if (str->isRope()) {
1615 if (str->isInteriorNode())
1616 JS_CALL_STRING_TRACER(trc, str->interiorNodeParent(), "parent");
1617 JS_CALL_STRING_TRACER(trc, str->ropeLeft(), "left child");
1618 JS_CALL_STRING_TRACER(trc, str->ropeRight(), "right child");
1620 break;
1623 #if JS_HAS_XML_SUPPORT
1624 case JSTRACE_XML:
1625 js_TraceXML(trc, (JSXML *)thing);
1626 break;
1627 #endif
1631 namespace js {
1634 * When the native stack is low, the GC does not call JS_TraceChildren to mark
1635 * the reachable "children" of the thing. Rather the thing is put aside and
1636 * JS_TraceChildren is called later with more space on the C stack.
1638 * To implement such delayed marking of the children with minimal overhead for
1639 * the normal case of sufficient native stack, the code uses two fields per
1640 * arena stored in JSGCMarkingDelay. The first field, JSGCMarkingDelay::link,
1641 * links all arenas with delayed things into a stack list with the pointer to
1642 * stack top in JSRuntime::gcUnmarkedArenaStackTop. delayMarkingChildren adds
1643 * arenas to the stack as necessary while markDelayedChildren pops the arenas
1644 * from the stack until it empties.
1646 * The second field, JSGCMarkingDelay::unmarkedChildren, is a bitmap that
1647 * tells for which things the GC should call JS_TraceChildren later. The
1648 * bitmap is a single word. As such it does not pinpoint the delayed things
1649 * in the arena but rather tells the intervals containing
1650 * ThingsPerUnmarkedBit(thingSize) things. Later the code in
1651 * markDelayedChildren discovers such intervals and calls JS_TraceChildren on
1652 * any marked thing in the interval. This implies that JS_TraceChildren can be
1653 * called many times for a single thing if the thing shares the same interval
1654 * with some delayed things. This should be fine as any GC graph
1655 * marking/traversing hooks must allow repeated calls during the same GC cycle.
1656 * In particular, xpcom cycle collector relies on this.
1658 * Note that such repeated scanning may slow down the GC. In particular, it is
1659 * possible to construct an object graph where the GC calls JS_TraceChildren
1660 * ThingsPerUnmarkedBit(thingSize) for almost all things in the graph. We
1661 * tolerate this as the max value for ThingsPerUnmarkedBit(thingSize) is 4.
1662 * This is archived for JSObject on 32 bit system as it is exactly JSObject
1663 * that has the smallest size among the GC things that can be delayed. On 32
1664 * bit CPU we have less than 128 objects per 4K GC arena so each bit in
1665 * unmarkedChildren covers 4 objects.
1667 inline unsigned
1668 ThingsPerUnmarkedBit(unsigned thingSize)
1670 return JS_HOWMANY(ThingsPerArena(thingSize), JS_BITS_PER_WORD);
1673 GCMarker::GCMarker(JSContext *cx)
1674 : color(0), unmarkedArenaStackTop(NULL)
1676 JS_TRACER_INIT(this, cx, NULL);
1677 #ifdef DEBUG
1678 markLaterCount = 0;
1679 #endif
1680 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1681 conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
1682 memset(&conservativeStats, 0, sizeof(conservativeStats));
1683 #endif
1686 GCMarker::~GCMarker()
1688 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1689 dumpConservativeRoots();
1690 #endif
1691 #ifdef JS_GCMETER
1692 /* Update total stats. */
1693 context->runtime->gcStats.conservative.add(conservativeStats);
1694 #endif
1697 void
1698 GCMarker::delayMarkingChildren(void *thing)
1700 JS_ASSERT(this == context->runtime->gcMarkingTracer);
1701 JS_ASSERT(IsMarkedGCThing(thing));
1702 METER(context->runtime->gcStats.unmarked++);
1704 JSGCArena *a = JSGCArena::fromGCThing(thing);
1705 JSGCArenaInfo *ainfo = a->getInfo();
1706 JSGCMarkingDelay *markingDelay = a->getMarkingDelay();
1708 size_t thingArenaIndex = GCThingToArenaIndex(thing);
1709 size_t unmarkedBitIndex = thingArenaIndex /
1710 ThingsPerUnmarkedBit(ainfo->list->thingSize);
1711 JS_ASSERT(unmarkedBitIndex < JS_BITS_PER_WORD);
1713 jsuword bit = jsuword(1) << unmarkedBitIndex;
1714 if (markingDelay->unmarkedChildren != 0) {
1715 JS_ASSERT(unmarkedArenaStackTop);
1716 if (markingDelay->unmarkedChildren & bit) {
1717 /* bit already covers things with children to mark later. */
1718 return;
1720 markingDelay->unmarkedChildren |= bit;
1721 } else {
1723 * The thing is the first thing with not yet marked children in the
1724 * whole arena, so push the arena on the stack of arenas with things
1725 * to be marked later unless the arena has already been pushed. We
1726 * detect that through checking prevUnmarked as the field is 0
1727 * only for not yet pushed arenas. To ensure that
1728 * prevUnmarked != 0
1729 * even when the stack contains one element, we make prevUnmarked
1730 * for the arena at the bottom to point to itself.
1732 * See comments in markDelayedChildren.
1734 markingDelay->unmarkedChildren = bit;
1735 if (!markingDelay->link) {
1736 if (!unmarkedArenaStackTop) {
1737 /* Stack was empty, mark the arena as the bottom element. */
1738 markingDelay->link = a;
1739 } else {
1740 JS_ASSERT(unmarkedArenaStackTop->getMarkingDelay()->link);
1741 markingDelay->link = unmarkedArenaStackTop;
1743 unmarkedArenaStackTop = a;
1745 JS_ASSERT(unmarkedArenaStackTop);
1747 #ifdef DEBUG
1748 markLaterCount += ThingsPerUnmarkedBit(ainfo->list->thingSize);
1749 METER_UPDATE_MAX(context->runtime->gcStats.maxunmarked, markLaterCount);
1750 #endif
1753 JS_FRIEND_API(void)
1754 GCMarker::markDelayedChildren()
1756 JS_ASSERT(this == context->runtime->gcMarkingTracer);
1758 JSGCArena *a = unmarkedArenaStackTop;
1759 if (!a) {
1760 JS_ASSERT(markLaterCount == 0);
1761 return;
1764 for (;;) {
1766 * The following assert verifies that the current arena belongs to the
1767 * unmarked stack, since delayMarkingChildren ensures that even for
1768 * the stack's bottom, prevUnmarked != 0 but rather points to
1769 * itself.
1771 JSGCArenaInfo *ainfo = a->getInfo();
1772 JSGCMarkingDelay *markingDelay = a->getMarkingDelay();
1773 JS_ASSERT(markingDelay->link);
1774 JS_ASSERT(unmarkedArenaStackTop->getMarkingDelay()->link);
1775 unsigned thingSize = ainfo->list->thingSize;
1776 unsigned traceKind = GetFinalizableArenaTraceKind(ainfo);
1777 unsigned indexLimit = ThingsPerArena(thingSize);
1778 unsigned thingsPerUnmarkedBit = ThingsPerUnmarkedBit(thingSize);
1781 * We cannot use do-while loop here as a->unmarkedChildren can be zero
1782 * before the loop as a leftover from the previous iterations. See
1783 * comments after the loop.
1785 while (markingDelay->unmarkedChildren != 0) {
1786 unsigned unmarkedBitIndex = JS_FLOOR_LOG2W(markingDelay->unmarkedChildren);
1787 markingDelay->unmarkedChildren &= ~(jsuword(1) << unmarkedBitIndex);
1788 #ifdef DEBUG
1789 JS_ASSERT(markLaterCount >= thingsPerUnmarkedBit);
1790 markLaterCount -= thingsPerUnmarkedBit;
1791 #endif
1792 unsigned thingIndex = unmarkedBitIndex * thingsPerUnmarkedBit;
1793 unsigned endIndex = thingIndex + thingsPerUnmarkedBit;
1796 * endIndex can go beyond the last allocated thing as the real
1797 * limit can be "inside" the bit.
1799 if (endIndex > indexLimit)
1800 endIndex = indexLimit;
1801 uint8 *thing = GCArenaIndexToThing(a, ainfo, thingIndex);
1802 uint8 *end = GCArenaIndexToThing(a, ainfo, endIndex);
1803 do {
1804 JS_ASSERT(thing < end);
1805 if (IsMarkedGCThing(thing))
1806 JS_TraceChildren(this, thing, traceKind);
1807 thing += thingSize;
1808 } while (thing != end);
1812 * We finished tracing of all things in the the arena but we can only
1813 * pop it from the stack if the arena is the stack's top.
1815 * When JS_TraceChildren from the above calls JS_CallTracer that in
1816 * turn on low C stack calls delayMarkingChildren and the latter
1817 * pushes new arenas to the unmarked stack, we have to skip popping
1818 * of this arena until it becomes the top of the stack again.
1820 if (a == unmarkedArenaStackTop) {
1821 JSGCArena *aprev = markingDelay->link;
1822 markingDelay->link = NULL;
1823 if (a == aprev) {
1825 * prevUnmarked points to itself and we reached the bottom of
1826 * the stack.
1828 break;
1830 unmarkedArenaStackTop = a = aprev;
1831 } else {
1832 a = unmarkedArenaStackTop;
1835 JS_ASSERT(unmarkedArenaStackTop);
1836 JS_ASSERT(!unmarkedArenaStackTop->getMarkingDelay()->link);
1837 unmarkedArenaStackTop = NULL;
1838 JS_ASSERT(markLaterCount == 0);
1841 void
1842 GCMarker::slowifyArrays()
1844 while (!arraysToSlowify.empty()) {
1845 JSObject *obj = arraysToSlowify.back();
1846 arraysToSlowify.popBack();
1847 if (IsMarkedGCThing(obj))
1848 obj->makeDenseArraySlow(context);
1852 void
1853 Mark(JSTracer *trc, void *thing, uint32 kind)
1855 JS_ASSERT(thing);
1856 JS_ASSERT(JS_IS_VALID_TRACE_KIND(kind));
1857 JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
1858 JS_ASSERT_IF(!JSString::isStatic(thing), kind == GetFinalizableThingTraceKind(thing));
1859 #ifdef DEBUG
1860 if (IS_GC_MARKING_TRACER(trc)) {
1861 JSRuntime *rt = trc->context->runtime;
1862 JS_ASSERT(rt->gcMarkingTracer == trc);
1863 JS_ASSERT(rt->gcRunning);
1865 #endif
1867 if (!IS_GC_MARKING_TRACER(trc)) {
1868 trc->callback(trc, thing, kind);
1869 } else {
1870 GCMarker *gcmarker = static_cast<GCMarker *>(trc);
1872 if (kind == JSTRACE_STRING) {
1874 * Optimize for string as their marking is not recursive.
1876 * Iterate through all nodes and leaves in the rope if this is
1877 * part of a rope; otherwise, we only iterate once: on the string
1878 * itself.
1880 JSRopeNodeIterator iter((JSString *) thing);
1881 JSString *str = iter.init();
1882 do {
1883 for (;;) {
1884 if (JSString::isStatic(str))
1885 break;
1886 JS_ASSERT(kind == GetFinalizableThingTraceKind(str));
1887 if (!MarkIfUnmarkedGCThing(str))
1888 break;
1889 if (!str->isDependent())
1890 break;
1891 str = str->dependentBase();
1893 str = iter.next();
1894 } while (str);
1896 } else if (MarkIfUnmarkedGCThing(thing, gcmarker->getMarkColor())) {
1898 * With JS_GC_ASSUME_LOW_C_STACK defined the mark phase of GC
1899 * always uses the non-recursive code that otherwise would be
1900 * called only on a low C stack condition.
1902 #ifdef JS_GC_ASSUME_LOW_C_STACK
1903 # define RECURSION_TOO_DEEP() true
1904 #else
1905 int stackDummy;
1906 # define RECURSION_TOO_DEEP() (!JS_CHECK_STACK_SIZE(trc->context, stackDummy))
1907 #endif
1908 if (RECURSION_TOO_DEEP())
1909 gcmarker->delayMarkingChildren(thing);
1910 else
1911 JS_TraceChildren(trc, thing, kind);
1915 #ifdef DEBUG
1916 trc->debugPrinter = NULL;
1917 trc->debugPrintArg = NULL;
1918 #endif
1921 void
1922 MarkGCThing(JSTracer *trc, void *thing)
1924 JS_ASSERT(size_t(thing) % JS_GCTHING_ALIGN == 0);
1926 if (!thing)
1927 return;
1929 uint32 kind = js_GetGCThingTraceKind(thing);
1930 Mark(trc, thing, kind);
1933 } /* namespace js */
1935 static void
1936 gc_root_traversal(JSTracer *trc, const RootEntry &entry)
1938 #ifdef DEBUG
1939 void *ptr;
1940 if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
1941 ptr = *reinterpret_cast<void **>(entry.key);
1942 } else {
1943 Value *vp = reinterpret_cast<Value *>(entry.key);
1944 ptr = vp->isGCThing() ? vp->asGCThing() : NULL;
1947 if (ptr) {
1948 if (!JSString::isStatic(ptr)) {
1949 bool root_points_to_gcArenaList = false;
1950 jsuword thing = (jsuword) ptr;
1951 JSRuntime *rt = trc->context->runtime;
1952 for (unsigned i = 0; i != FINALIZE_LIMIT; i++) {
1953 JSGCArenaList *arenaList = &rt->gcArenaList[i];
1954 size_t thingSize = arenaList->thingSize;
1955 size_t limit = ThingsPerArena(thingSize) * thingSize;
1956 for (JSGCArena *a = arenaList->head;
1958 a = a->getInfo()->prev) {
1959 if (thing - a->toPageStart() < limit) {
1960 root_points_to_gcArenaList = true;
1961 break;
1965 if (!root_points_to_gcArenaList && entry.value.name) {
1966 fprintf(stderr,
1967 "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
1968 "invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
1969 "The root's name is \"%s\".\n",
1970 entry.value.name);
1972 JS_ASSERT(root_points_to_gcArenaList);
1975 #endif
1976 JS_SET_TRACING_NAME(trc, entry.value.name ? entry.value.name : "root");
1977 if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
1978 MarkGCThing(trc, *reinterpret_cast<void **>(entry.key));
1979 else
1980 MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key));
1983 static void
1984 gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
1986 uint32 traceKind;
1988 JS_ASSERT(entry.value >= 1);
1989 traceKind = js_GetGCThingTraceKind(entry.key);
1990 JS_CALL_TRACER(trc, entry.key, traceKind, "locked object");
1993 void
1994 js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp)
1996 if (fp->hasCallObj())
1997 JS_CALL_OBJECT_TRACER(trc, fp->getCallObj(), "call");
1998 if (fp->hasArgsObj())
1999 JS_CALL_OBJECT_TRACER(trc, fp->getArgsObj(), "arguments");
2000 if (fp->hasScript())
2001 js_TraceScript(trc, fp->getScript());
2003 /* Allow for primitive this parameter due to JSFUN_THISP_* flags. */
2004 MarkValue(trc, fp->getThisValue(), "this");
2005 MarkValue(trc, fp->getReturnValue(), "rval");
2006 if (fp->hasScopeChain())
2007 JS_CALL_OBJECT_TRACER(trc, fp->getScopeChain(), "scope chain");
2010 inline void
2011 AutoGCRooter::trace(JSTracer *trc)
2013 switch (tag) {
2014 case JSVAL:
2015 MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
2016 return;
2018 case SPROP:
2019 static_cast<AutoScopePropertyRooter *>(this)->sprop->trace(trc);
2020 return;
2022 case PARSER:
2023 static_cast<Parser *>(this)->trace(trc);
2024 return;
2026 case SCRIPT:
2027 if (JSScript *script = static_cast<AutoScriptRooter *>(this)->script)
2028 js_TraceScript(trc, script);
2029 return;
2031 case ENUMERATOR:
2032 static_cast<AutoEnumStateRooter *>(this)->trace(trc);
2033 return;
2035 case IDARRAY: {
2036 JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
2037 MarkIdRange(trc, ida->length, ida->vector, "js::AutoIdArray.idArray");
2038 return;
2041 case DESCRIPTORS: {
2042 PropDescArray &descriptors =
2043 static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
2044 for (size_t i = 0, len = descriptors.length(); i < len; i++) {
2045 PropDesc &desc = descriptors[i];
2046 MarkValue(trc, desc.pd, "PropDesc::pd");
2047 MarkValue(trc, desc.value, "PropDesc::value");
2048 MarkValue(trc, desc.get, "PropDesc::get");
2049 MarkValue(trc, desc.set, "PropDesc::set");
2050 MarkId(trc, desc.id, "PropDesc::id");
2052 return;
2055 case DESCRIPTOR : {
2056 PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
2057 if (desc.obj)
2058 MarkObject(trc, desc.obj, "Descriptor::obj");
2059 MarkValue(trc, desc.value, "Descriptor::value");
2060 if ((desc.attrs & JSPROP_GETTER) && desc.getter)
2061 MarkObject(trc, CastAsObject(desc.getter), "Descriptor::get");
2062 if (desc.attrs & JSPROP_SETTER && desc.setter)
2063 MarkObject(trc, CastAsObject(desc.setter), "Descriptor::set");
2064 return;
2067 case NAMESPACES: {
2068 JSXMLArray &array = static_cast<AutoNamespaceArray *>(this)->array;
2069 MarkObjectRange(trc, array.length, reinterpret_cast<JSObject **>(array.vector),
2070 "JSXMLArray.vector");
2071 array.cursors->trace(trc);
2072 return;
2075 case XML:
2076 js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
2077 return;
2079 case OBJECT:
2080 if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
2081 MarkObject(trc, obj, "js::AutoObjectRooter.obj");
2082 return;
2084 case ID:
2085 MarkId(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
2086 return;
2088 case VALVECTOR: {
2089 Vector<Value, 8> &vector = static_cast<js::AutoValueVector *>(this)->vector;
2090 MarkValueRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
2091 return;
2094 case STRING:
2095 if (JSString *str = static_cast<js::AutoStringRooter *>(this)->str)
2096 MarkString(trc, str, "js::AutoStringRooter.str");
2097 return;
2099 case IDVECTOR: {
2100 Vector<jsid, 8> &vector = static_cast<js::AutoIdVector *>(this)->vector;
2101 MarkIdRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
2102 return;
2106 JS_ASSERT(tag >= 0);
2107 MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
2110 void
2111 js_TraceContext(JSTracer *trc, JSContext *acx)
2113 /* Stack frames and slots are traced by StackSpace::mark. */
2115 /* Mark other roots-by-definition in acx. */
2116 if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL))
2117 JS_CALL_OBJECT_TRACER(trc, acx->globalObject, "global object");
2118 if (acx->throwing) {
2119 MarkValue(trc, acx->exception, "exception");
2120 } else {
2121 /* Avoid keeping GC-ed junk stored in JSContext.exception. */
2122 acx->exception.setNull();
2125 for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
2126 gcr->trace(trc);
2128 if (acx->sharpObjectMap.depth > 0)
2129 js_TraceSharpMap(trc, &acx->sharpObjectMap);
2131 js_TraceRegExpStatics(trc, acx);
2133 MarkValue(trc, acx->iterValue, "iterValue");
2135 acx->compartment->marked = true;
2137 #ifdef JS_TRACER
2138 TracerState* state = acx->tracerState;
2139 while (state) {
2140 if (state->nativeVp)
2141 MarkValueRange(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
2142 state = state->prev;
2144 #endif
2147 JS_REQUIRES_STACK void
2148 js_TraceRuntime(JSTracer *trc)
2150 JSRuntime *rt = trc->context->runtime;
2152 if (rt->state != JSRTS_LANDING)
2153 MarkConservativeStackRoots(trc);
2156 * Verify that we do not have at this point unmarked GC things stored in
2157 * autorooters. To maximize test coverage we abort even in non-debug
2158 * builds for now, see bug 574313.
2160 JSContext *iter;
2161 #if 1
2162 iter = NULL;
2163 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
2164 for (AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) {
2165 #ifdef JS_THREADSAFE
2166 JS_ASSERT(acx->outstandingRequests != 0);
2167 #endif
2168 JS_ASSERT(JS_THREAD_DATA(acx)->conservativeGC.isEnabled());
2169 void *thing;
2170 switch (gcr->tag) {
2171 default:
2172 continue;
2173 case AutoGCRooter::JSVAL: {
2174 const Value &v = static_cast<AutoValueRooter *>(gcr)->val;
2175 if (!v.isMarkable())
2176 continue;
2177 thing = v.asGCThing();
2178 break;
2180 case AutoGCRooter::XML:
2181 thing = static_cast<AutoXMLRooter *>(gcr)->xml;
2182 break;
2183 case AutoGCRooter::OBJECT:
2184 thing = static_cast<AutoObjectRooter *>(gcr)->obj;
2185 if (!thing)
2186 continue;
2187 break;
2188 case AutoGCRooter::ID: {
2189 jsid id = static_cast<AutoIdRooter *>(gcr)->id();
2190 if (!JSID_IS_GCTHING(id))
2191 continue;
2192 thing = JSID_TO_GCTHING(id);
2193 break;
2197 if (JSString::isStatic(thing))
2198 continue;
2200 if (!IsMarkedGCThing(thing)) {
2201 ConservativeGCTest test = IsGCThingWord(rt, reinterpret_cast<jsuword>(thing));
2202 fprintf(stderr,
2203 "Conservative GC scanner has missed the root 0x%p with tag %ld"
2204 " on the stack due to %d. The root location 0x%p, distance from"
2205 " the stack base %ld, conservative gc span %ld."
2206 " Consevtaive GC status for the thread %d."
2207 " Aborting.\n",
2208 thing, (long) gcr->tag, int(test), (void *) gcr,
2209 (long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase - (jsword) gcr),
2210 (long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase -
2211 (jsword) JS_THREAD_DATA(acx)->conservativeGC.nativeStackTop),
2212 JS_THREAD_DATA(acx)->conservativeGC.enableCount);
2213 JS_ASSERT(false);
2214 abort();
2218 #endif
2220 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
2221 gc_root_traversal(trc, r.front());
2223 for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
2224 gc_lock_traversal(r.front(), trc);
2226 js_TraceAtomState(trc);
2227 js_MarkTraps(trc);
2229 iter = NULL;
2230 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
2231 js_TraceContext(trc, acx);
2233 for (ThreadDataIter i(rt); !i.empty(); i.popFront())
2234 i.threadData()->mark(trc);
2237 * We mark extra roots at the last thing so it can use use additional
2238 * colors to implement cycle collection.
2240 if (rt->gcExtraRootsTraceOp)
2241 rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
2243 #ifdef DEBUG
2244 if (rt->functionMeterFilename) {
2245 for (int k = 0; k < 2; k++) {
2246 typedef JSRuntime::FunctionCountMap HM;
2247 HM &h = (k == 0) ? rt->methodReadBarrierCountMap : rt->unjoinedFunctionCountMap;
2248 for (HM::Range r = h.all(); !r.empty(); r.popFront()) {
2249 JSFunction *fun = r.front().key;
2250 JS_CALL_OBJECT_TRACER(trc, fun, "FunctionCountMap key");
2254 #endif
2257 void
2258 js_TriggerGC(JSContext *cx, JSBool gcLocked)
2260 JSRuntime *rt = cx->runtime;
2262 #ifdef JS_THREADSAFE
2263 JS_ASSERT(cx->requestDepth > 0);
2264 #endif
2265 JS_ASSERT(!rt->gcRunning);
2266 if (rt->gcIsNeeded)
2267 return;
2270 * Trigger the GC when it is safe to call an operation callback on any
2271 * thread.
2273 rt->gcIsNeeded = JS_TRUE;
2274 js_TriggerAllOperationCallbacks(rt, gcLocked);
2277 void
2278 js_DestroyScriptsToGC(JSContext *cx, JSThreadData *data)
2280 JSScript **listp, *script;
2282 for (size_t i = 0; i != JS_ARRAY_LENGTH(data->scriptsToGC); ++i) {
2283 listp = &data->scriptsToGC[i];
2284 while ((script = *listp) != NULL) {
2285 *listp = script->u.nextToGC;
2286 script->u.nextToGC = NULL;
2287 js_DestroyScript(cx, script);
2292 inline void
2293 FinalizeObject(JSContext *cx, JSObject *obj, unsigned thingKind)
2295 JS_ASSERT(thingKind == FINALIZE_OBJECT ||
2296 thingKind == FINALIZE_FUNCTION);
2298 /* Cope with stillborn objects that have no map. */
2299 if (!obj->map)
2300 return;
2302 /* Finalize obj first, in case it needs map and slots. */
2303 Class *clasp = obj->getClass();
2304 if (clasp->finalize)
2305 clasp->finalize(cx, obj);
2307 DTrace::finalizeObject(obj);
2309 if (JS_LIKELY(obj->isNative())) {
2310 JSScope *scope = obj->scope();
2311 if (scope->isSharedEmpty())
2312 static_cast<JSEmptyScope *>(scope)->dropFromGC(cx);
2313 else
2314 scope->destroy(cx);
2316 if (obj->hasSlotsArray())
2317 obj->freeSlotsArray(cx);
2320 inline void
2321 FinalizeFunction(JSContext *cx, JSFunction *fun, unsigned thingKind)
2323 FinalizeObject(cx, FUN_OBJECT(fun), thingKind);
2326 #if JS_HAS_XML_SUPPORT
2327 inline void
2328 FinalizeXML(JSContext *cx, JSXML *xml, unsigned thingKind)
2330 js_FinalizeXML(cx, xml);
2332 #endif
2334 JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
2335 static JSStringFinalizeOp str_finalizers[JS_EXTERNAL_STRING_LIMIT] = {
2336 NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL
2339 intN
2340 js_ChangeExternalStringFinalizer(JSStringFinalizeOp oldop,
2341 JSStringFinalizeOp newop)
2343 for (uintN i = 0; i != JS_ARRAY_LENGTH(str_finalizers); i++) {
2344 if (str_finalizers[i] == oldop) {
2345 str_finalizers[i] = newop;
2346 return intN(i);
2349 return -1;
2352 inline void
2353 FinalizeShortString(JSContext *cx, JSShortString *str, unsigned thingKind)
2355 JS_ASSERT(FINALIZE_SHORT_STRING == thingKind);
2356 JS_ASSERT(!JSString::isStatic(str->header()));
2357 JS_ASSERT(str->header()->isFlat());
2358 JS_RUNTIME_UNMETER(cx->runtime, liveStrings);
2361 inline void
2362 FinalizeString(JSContext *cx, JSString *str, unsigned thingKind)
2364 JS_ASSERT(FINALIZE_STRING == thingKind);
2365 JS_ASSERT(!JSString::isStatic(str));
2366 JS_RUNTIME_UNMETER(cx->runtime, liveStrings);
2367 if (str->isDependent()) {
2368 JS_ASSERT(str->dependentBase());
2369 JS_RUNTIME_UNMETER(cx->runtime, liveDependentStrings);
2370 } else if (str->isFlat()) {
2372 * flatChars for stillborn string is null, but cx->free checks
2373 * for a null pointer on its own.
2375 cx->free(str->flatChars());
2376 } else if (str->isTopNode()) {
2377 cx->free(str->topNodeBuffer());
2379 /* Nothing to be done for rope interior nodes. */
2382 inline void
2383 FinalizeExternalString(JSContext *cx, JSString *str, unsigned thingKind)
2385 unsigned type = thingKind - FINALIZE_EXTERNAL_STRING0;
2386 JS_ASSERT(type < JS_ARRAY_LENGTH(str_finalizers));
2387 JS_ASSERT(!JSString::isStatic(str));
2388 JS_ASSERT(str->isFlat());
2390 JS_RUNTIME_UNMETER(cx->runtime, liveStrings);
2392 /* A stillborn string has null chars. */
2393 jschar *chars = str->flatChars();
2394 if (!chars)
2395 return;
2396 JSStringFinalizeOp finalizer = str_finalizers[type];
2397 if (finalizer)
2398 finalizer(cx, str);
2402 * This function is called from js_FinishAtomState to force the finalization
2403 * of the permanently interned strings when cx is not available.
2405 void
2406 js_FinalizeStringRT(JSRuntime *rt, JSString *str)
2408 JS_RUNTIME_UNMETER(rt, liveStrings);
2409 JS_ASSERT(!JSString::isStatic(str));
2410 JS_ASSERT(!str->isRope());
2412 if (str->isDependent()) {
2413 /* A dependent string can not be external and must be valid. */
2414 JS_ASSERT(JSGCArenaInfo::fromGCThing(str)->list->thingKind == FINALIZE_STRING);
2415 JS_ASSERT(str->dependentBase());
2416 JS_RUNTIME_UNMETER(rt, liveDependentStrings);
2417 } else {
2418 unsigned thingKind = JSGCArenaInfo::fromGCThing(str)->list->thingKind;
2419 JS_ASSERT(IsFinalizableStringKind(thingKind));
2421 /* A stillborn string has null chars, so is not valid. */
2422 jschar *chars = str->flatChars();
2423 if (!chars)
2424 return;
2425 if (thingKind == FINALIZE_STRING) {
2426 rt->free(chars);
2427 } else if (thingKind != FINALIZE_SHORT_STRING) {
2428 unsigned type = thingKind - FINALIZE_EXTERNAL_STRING0;
2429 JS_ASSERT(type < JS_ARRAY_LENGTH(str_finalizers));
2430 JSStringFinalizeOp finalizer = str_finalizers[type];
2431 if (finalizer) {
2433 * Assume that the finalizer for the permanently interned
2434 * string knows how to deal with null context.
2436 finalizer(NULL, str);
2442 template<typename T,
2443 void finalizer(JSContext *cx, T *thing, unsigned thingKind)>
2444 static void
2445 FinalizeArenaList(JSContext *cx, unsigned thingKind)
2447 JS_STATIC_ASSERT(!(sizeof(T) & GC_CELL_MASK));
2448 JSGCArenaList *arenaList = &cx->runtime->gcArenaList[thingKind];
2449 JS_ASSERT(sizeof(T) == arenaList->thingSize);
2451 JSGCArena **ap = &arenaList->head;
2452 JSGCArena *a = *ap;
2453 if (!a)
2454 return;
2456 #ifdef JS_GCMETER
2457 uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0;
2458 #endif
2459 for (;;) {
2460 JSGCArenaInfo *ainfo = a->getInfo();
2461 JS_ASSERT(ainfo->list == arenaList);
2462 JS_ASSERT(!a->getMarkingDelay()->link);
2463 JS_ASSERT(a->getMarkingDelay()->unmarkedChildren == 0);
2465 JSGCThing *freeList = NULL;
2466 JSGCThing **tailp = &freeList;
2467 bool allClear = true;
2469 jsuword thing = a->toPageStart();
2470 jsuword thingsEnd = thing + GC_ARENA_SIZE / sizeof(T) * sizeof(T);
2472 jsuword nextFree = reinterpret_cast<jsuword>(ainfo->freeList);
2473 if (!nextFree) {
2474 nextFree = thingsEnd;
2475 } else {
2476 JS_ASSERT(thing <= nextFree);
2477 JS_ASSERT(nextFree < thingsEnd);
2480 jsuword gcCellIndex = 0;
2481 jsbitmap *bitmap = a->getMarkBitmap();
2482 for (;; thing += sizeof(T), gcCellIndex += sizeof(T) >> GC_CELL_SHIFT) {
2483 if (thing == nextFree) {
2484 if (thing == thingsEnd)
2485 break;
2486 nextFree = reinterpret_cast<jsuword>(
2487 reinterpret_cast<JSGCThing *>(nextFree)->link);
2488 if (!nextFree) {
2489 nextFree = thingsEnd;
2490 } else {
2491 JS_ASSERT(thing < nextFree);
2492 JS_ASSERT(nextFree < thingsEnd);
2494 } else if (JS_TEST_BIT(bitmap, gcCellIndex)) {
2495 allClear = false;
2496 METER(nthings++);
2497 continue;
2498 } else {
2499 T *t = reinterpret_cast<T *>(thing);
2500 finalizer(cx, t, thingKind);
2501 #ifdef DEBUG
2502 memset(t, JS_FREE_PATTERN, sizeof(T));
2503 #endif
2505 JSGCThing *t = reinterpret_cast<JSGCThing *>(thing);
2506 *tailp = t;
2507 tailp = &t->link;
2510 #ifdef DEBUG
2511 /* Check that the free list is consistent. */
2512 unsigned nfree = 0;
2513 if (freeList) {
2514 JS_ASSERT(tailp != &freeList);
2515 JSGCThing *t = freeList;
2516 for (;;) {
2517 ++nfree;
2518 if (&t->link == tailp)
2519 break;
2520 JS_ASSERT(t < t->link);
2521 t = t->link;
2524 #endif
2525 if (allClear) {
2527 * Forget just assembled free list head for the arena and
2528 * add the arena itself to the destroy list.
2530 JS_ASSERT(nfree == ThingsPerArena(sizeof(T)));
2531 *ap = ainfo->prev;
2532 ReleaseGCArena(cx->runtime, a);
2533 METER(nkilledarenas++);
2534 } else {
2535 JS_ASSERT(nfree < ThingsPerArena(sizeof(T)));
2536 *tailp = NULL;
2537 ainfo->freeList = freeList;
2538 ap = &ainfo->prev;
2539 METER(nlivearenas++);
2541 if (!(a = *ap))
2542 break;
2544 arenaList->cursor = arenaList->head;
2546 METER(UpdateArenaStats(&cx->runtime->gcArenaStats[thingKind],
2547 nlivearenas, nkilledarenas, nthings));
2550 #ifdef JS_THREADSAFE
2552 namespace js {
2554 JS_FRIEND_API(void)
2555 BackgroundSweepTask::replenishAndFreeLater(void *ptr)
2557 JS_ASSERT(freeCursor == freeCursorEnd);
2558 do {
2559 if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
2560 break;
2561 freeCursor = (void **) js_malloc(FREE_ARRAY_SIZE);
2562 if (!freeCursor) {
2563 freeCursorEnd = NULL;
2564 break;
2566 freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
2567 *freeCursor++ = ptr;
2568 return;
2569 } while (false);
2570 js_free(ptr);
2573 void
2574 BackgroundSweepTask::run()
2576 if (freeCursor) {
2577 void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
2578 freeElementsAndArray(array, freeCursor);
2579 freeCursor = freeCursorEnd = NULL;
2580 } else {
2581 JS_ASSERT(!freeCursorEnd);
2583 for (void ***iter = freeVector.begin(); iter != freeVector.end(); ++iter) {
2584 void **array = *iter;
2585 freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
2591 #endif /* JS_THREADSAFE */
2593 static void
2594 SweepCompartments(JSContext *cx)
2596 JSRuntime *rt = cx->runtime;
2597 JSCompartmentCallback callback = rt->compartmentCallback;
2598 JSCompartment **read = rt->compartments.begin();
2599 JSCompartment **end = rt->compartments.end();
2600 JSCompartment **write = read;
2602 /* Delete defaultCompartment only during runtime shutdown */
2603 rt->defaultCompartment->marked = true;
2605 while (read < end) {
2606 JSCompartment *compartment = (*read++);
2607 if (compartment->marked) {
2608 compartment->marked = false;
2609 *write++ = compartment;
2610 /* Remove dead wrappers from the compartment map. */
2611 compartment->sweep(cx);
2612 } else {
2613 if (callback)
2614 (void) callback(cx, compartment, JSCOMPARTMENT_DESTROY);
2615 if (compartment->principals)
2616 JSPRINCIPALS_DROP(cx, compartment->principals);
2617 delete compartment;
2620 rt->compartments.resize(write - rt->compartments.begin());
2624 * Common cache invalidation and so forth that must be done before GC. Even if
2625 * GCUntilDone calls GC several times, this work only needs to be done once.
2627 static void
2628 PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
2630 JSRuntime *rt = cx->runtime;
2632 /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
2633 rt->gcIsNeeded = JS_FALSE;
2635 /* Reset malloc counter. */
2636 rt->resetGCMallocBytes();
2638 #ifdef JS_DUMP_SCOPE_METERS
2640 extern void js_DumpScopeMeters(JSRuntime *rt);
2641 js_DumpScopeMeters(rt);
2643 #endif
2646 * Reset the property cache's type id generator so we can compress ids.
2647 * Same for the protoHazardShape proxy-shape standing in for all object
2648 * prototypes having readonly or setter properties.
2650 if (rt->shapeGen & SHAPE_OVERFLOW_BIT
2651 #ifdef JS_GC_ZEAL
2652 || rt->gcZeal >= 1
2653 #endif
2655 rt->gcRegenShapes = true;
2656 rt->gcRegenShapesScopeFlag ^= JSScope::SHAPE_REGEN;
2657 rt->shapeGen = JSScope::LAST_RESERVED_SHAPE;
2658 rt->protoHazardShape = 0;
2661 js_PurgeThreads(cx);
2663 JSContext *iter = NULL;
2664 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
2665 acx->purge();
2670 * Perform mark-and-sweep GC.
2672 * In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each
2673 * other thread must be either outside all requests or blocked waiting for GC
2674 * to finish. Note that the caller does not hold rt->gcLock.
2676 static void
2677 GC(JSContext *cx GCTIMER_PARAM)
2679 JSRuntime *rt = cx->runtime;
2680 rt->gcNumber++;
2683 * Mark phase.
2685 GCMarker gcmarker(cx);
2686 JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
2687 JS_ASSERT(gcmarker.getMarkColor() == BLACK);
2688 rt->gcMarkingTracer = &gcmarker;
2690 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
2691 GCChunkInfo::fromChunk(r.front())->clearMarkBitmap();
2693 js_TraceRuntime(&gcmarker);
2694 js_MarkScriptFilenames(rt);
2697 * Mark children of things that caused too deep recursion during the above
2698 * tracing.
2700 gcmarker.markDelayedChildren();
2702 rt->gcMarkingTracer = NULL;
2704 if (rt->gcCallback)
2705 (void) rt->gcCallback(cx, JSGC_MARK_END);
2707 #ifdef JS_THREADSAFE
2708 JS_ASSERT(!cx->gcSweepTask);
2709 if (!rt->gcHelperThread.busy())
2710 cx->gcSweepTask = new js::BackgroundSweepTask();
2711 #endif
2714 * Sweep phase.
2716 * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
2717 * so that any attempt to allocate a GC-thing from a finalizer will fail,
2718 * rather than nest badly and leave the unmarked newborn to be swept.
2720 * We first sweep atom state so we can use js_IsAboutToBeFinalized on
2721 * JSString held in a hashtable to check if the hashtable entry can be
2722 * freed. Note that even after the entry is freed, JSObject finalizers can
2723 * continue to access the corresponding JSString* assuming that they are
2724 * unique. This works since the atomization API must not be called during
2725 * the GC.
2727 TIMESTAMP(startSweep);
2728 js_SweepAtomState(cx);
2730 /* Finalize watch points associated with unreachable objects. */
2731 js_SweepWatchPoints(cx);
2733 #ifdef DEBUG
2734 /* Save the pre-sweep count of scope-mapped properties. */
2735 rt->liveScopePropsPreSweep = rt->liveScopeProps;
2736 #endif
2739 * We finalize iterators before other objects so the iterator can use the
2740 * object which properties it enumerates over to finalize the enumeration
2741 * state. We finalize objects before other GC things to ensure that
2742 * object's finalizer can access them even if they will be freed.
2744 JS_ASSERT(!rt->gcEmptyArenaList);
2745 FinalizeArenaList<JSObject, FinalizeObject>(cx, FINALIZE_OBJECT);
2746 FinalizeArenaList<JSFunction, FinalizeFunction>(cx, FINALIZE_FUNCTION);
2747 #if JS_HAS_XML_SUPPORT
2748 FinalizeArenaList<JSXML, FinalizeXML>(cx, FINALIZE_XML);
2749 #endif
2750 TIMESTAMP(sweepObjectEnd);
2753 * We sweep the deflated cache before we finalize the strings so the
2754 * cache can safely use js_IsAboutToBeFinalized..
2756 rt->deflatedStringCache->sweep(cx);
2758 FinalizeArenaList<JSShortString, FinalizeShortString>(cx, FINALIZE_SHORT_STRING);
2759 FinalizeArenaList<JSString, FinalizeString>(cx, FINALIZE_STRING);
2760 for (unsigned i = FINALIZE_EXTERNAL_STRING0;
2761 i <= FINALIZE_EXTERNAL_STRING_LAST;
2762 ++i) {
2763 FinalizeArenaList<JSString, FinalizeExternalString>(cx, i);
2765 TIMESTAMP(sweepStringEnd);
2767 SweepCompartments(cx);
2770 * Sweep the runtime's property tree after finalizing objects, in case any
2771 * had watchpoints referencing tree nodes.
2773 js::SweepScopeProperties(cx);
2776 * Sweep script filenames after sweeping functions in the generic loop
2777 * above. In this way when a scripted function's finalizer destroys the
2778 * script and calls rt->destroyScriptHook, the hook can still access the
2779 * script's filename. See bug 323267.
2781 js_SweepScriptFilenames(rt);
2783 /* Slowify arrays we have accumulated. */
2784 gcmarker.slowifyArrays();
2787 * Destroy arenas after we finished the sweeping so finalizers can safely
2788 * use js_IsAboutToBeFinalized().
2790 FreeGCChunks(rt);
2791 TIMESTAMP(sweepDestroyEnd);
2793 #ifdef JS_THREADSAFE
2794 if (cx->gcSweepTask) {
2795 rt->gcHelperThread.schedule(cx->gcSweepTask);
2796 cx->gcSweepTask = NULL;
2798 #endif
2800 if (rt->gcCallback)
2801 (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
2802 #ifdef DEBUG_srcnotesize
2803 { extern void DumpSrcNoteSizeHist();
2804 DumpSrcNoteSizeHist();
2805 printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes);
2807 #endif
2809 #ifdef JS_SCOPE_DEPTH_METER
2810 DumpScopeDepthMeter(rt);
2811 #endif
2813 #ifdef JS_DUMP_LOOP_STATS
2814 DumpLoopStats(rt);
2815 #endif
2818 #ifdef JS_THREADSAFE
2821 * If the GC is running and we're called on another thread, wait for this GC
2822 * activation to finish. We can safely wait here without fear of deadlock (in
2823 * the case where we are called within a request on another thread's context)
2824 * because the GC doesn't set rt->gcRunning until after it has waited for all
2825 * active requests to end.
2827 * We call here js_CurrentThreadId() after checking for rt->gcState to avoid
2828 * an expensive call when the GC is not running.
2830 void
2831 js_WaitForGC(JSRuntime *rt)
2833 if (rt->gcRunning && rt->gcThread->id != js_CurrentThreadId()) {
2834 do {
2835 JS_AWAIT_GC_DONE(rt);
2836 } while (rt->gcRunning);
2841 * GC is running on another thread. Temporarily suspend all requests running
2842 * on the current thread and wait until the GC is done.
2844 static void
2845 LetOtherGCFinish(JSContext *cx)
2847 JSRuntime *rt = cx->runtime;
2848 JS_ASSERT(rt->gcThread);
2849 JS_ASSERT(cx->thread != rt->gcThread);
2851 size_t requestDebit = cx->thread->requestContext ? 1 : 0;
2852 JS_ASSERT(requestDebit <= rt->requestCount);
2853 #ifdef JS_TRACER
2854 JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
2855 #endif
2856 if (requestDebit != 0) {
2857 #ifdef JS_TRACER
2858 if (JS_ON_TRACE(cx)) {
2860 * Leave trace before we decrease rt->requestCount and notify the
2861 * GC. Otherwise the GC may start immediately after we unlock while
2862 * this thread is still on trace.
2864 AutoUnlockGC unlock(rt);
2865 LeaveTrace(cx);
2867 #endif
2868 rt->requestCount -= requestDebit;
2869 if (rt->requestCount == 0)
2870 JS_NOTIFY_REQUEST_DONE(rt);
2873 /* See comments before another call to js_ShareWaitingTitles below. */
2874 cx->thread->gcWaiting = true;
2875 js_ShareWaitingTitles(cx);
2878 * Check that we did not release the GC lock above and let the GC to
2879 * finish before we wait.
2881 JS_ASSERT(rt->gcThread);
2882 JS_THREAD_DATA(cx)->conservativeGC.enable(true);
2885 * Wait for GC to finish on the other thread, even if requestDebit is 0
2886 * and even if GC has not started yet because the gcThread is waiting in
2887 * BeginGCSession. This ensures that js_GC never returns without a full GC
2888 * cycle happening.
2890 do {
2891 JS_AWAIT_GC_DONE(rt);
2892 } while (rt->gcThread);
2894 JS_THREAD_DATA(cx)->conservativeGC.disable();
2895 cx->thread->gcWaiting = false;
2896 rt->requestCount += requestDebit;
2899 #endif
2902 * Start a new GC session assuming no GC is running on this or other threads.
2903 * Together with LetOtherGCFinish this function contains the rendezvous
2904 * algorithm by which we stop the world for GC.
2906 * This thread becomes the GC thread. Wait for all other threads to quiesce.
2907 * Then set rt->gcRunning and return. The caller must call EndGCSession when
2908 * GC work is done.
2910 static void
2911 BeginGCSession(JSContext *cx)
2913 JSRuntime *rt = cx->runtime;
2914 JS_ASSERT(!rt->gcRunning);
2916 #ifdef JS_THREADSAFE
2917 /* No other thread is in GC, so indicate that we're now in GC. */
2918 JS_ASSERT(!rt->gcThread);
2919 rt->gcThread = cx->thread;
2922 * Notify operation callbacks on other threads, which will give them a
2923 * chance to yield their requests. Threads without requests perform their
2924 * callback at some later point, which then will be unnecessary, but
2925 * harmless.
2927 for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) {
2928 JSThread *thread = r.front().value;
2929 if (thread != cx->thread)
2930 thread->data.triggerOperationCallback();
2934 * Discount the request on the current thread from contributing to
2935 * rt->requestCount before we wait for all other requests to finish.
2936 * JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
2937 * rt->requestCount transitions to 0.
2939 JS_ASSERT_IF(cx->requestDepth != 0, cx->thread->requestContext);
2940 size_t requestDebit = cx->thread->requestContext ? 1 : 0;
2941 JS_ASSERT(requestDebit <= rt->requestCount);
2942 if (requestDebit != rt->requestCount) {
2943 rt->requestCount -= requestDebit;
2946 * Share any title that is owned by the GC thread before we wait, to
2947 * avoid a deadlock with ClaimTitle. We also set the gcWaiting flag so
2948 * that ClaimTitle can claim the title ownership from the GC thread if
2949 * that function is called while the GC is waiting.
2951 cx->thread->gcWaiting = true;
2952 js_ShareWaitingTitles(cx);
2953 do {
2954 JS_AWAIT_REQUEST_DONE(rt);
2955 } while (rt->requestCount > 0);
2956 cx->thread->gcWaiting = false;
2957 rt->requestCount += requestDebit;
2960 #endif /* JS_THREADSAFE */
2963 * Set rt->gcRunning here within the GC lock, and after waiting for any
2964 * active requests to end. This way js_WaitForGC called outside a request
2965 * would not block on the GC that is waiting for other requests to finish
2966 * with rt->gcThread set while JS_BeginRequest would do such wait.
2968 rt->gcRunning = true;
2971 /* End the current GC session and allow other threads to proceed. */
2972 static void
2973 EndGCSession(JSContext *cx)
2975 JSRuntime *rt = cx->runtime;
2977 rt->gcRunning = false;
2978 #ifdef JS_THREADSAFE
2979 JS_ASSERT(rt->gcThread == cx->thread);
2980 rt->gcThread = NULL;
2981 JS_NOTIFY_GC_DONE(rt);
2982 #endif
2986 * GC, repeatedly if necessary, until we think we have not created any new
2987 * garbage and no other threads are demanding more GC.
2989 static void
2990 GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
2992 if (JS_ON_TRACE(cx))
2993 return;
2995 JSRuntime *rt = cx->runtime;
2997 /* Recursive GC or a call from another thread restarts the GC cycle. */
2998 #ifndef JS_THREADSAFE
2999 if (rt->gcRunning) {
3000 rt->gcPoke = true;
3001 return;
3003 #else /* JS_THREADSAFE */
3004 if (rt->gcThread) {
3005 rt->gcPoke = true;
3006 if (cx->thread == rt->gcThread) {
3007 JS_ASSERT(rt->gcRunning);
3008 return;
3010 LetOtherGCFinish(cx);
3013 * Check if the GC on another thread have collected the garbage and
3014 * it was not a set slot request.
3016 if (!rt->gcPoke)
3017 return;
3019 #endif /* JS_THREADSAFE */
3021 BeginGCSession(cx);
3023 METER(rt->gcStats.poke++);
3026 * Do not scan the current thread on the shutdown or when the GC is called
3027 * outside a request.
3029 bool scanGCThreadStack = (rt->state != JSRTS_LANDING);
3030 #ifdef JS_THREADSAFE
3031 scanGCThreadStack &= !!cx->thread->requestContext;
3032 #endif
3033 if (scanGCThreadStack)
3034 JS_THREAD_DATA(cx)->conservativeGC.enable(true);
3035 bool firstRun = true;
3036 do {
3037 rt->gcPoke = false;
3039 AutoUnlockGC unlock(rt);
3040 if (firstRun) {
3041 PreGCCleanup(cx, gckind);
3042 TIMESTAMP(startMark);
3043 firstRun = false;
3045 GC(cx GCTIMER_ARG);
3047 // GC again if:
3048 // - another thread, not in a request, called js_GC
3049 // - js_GC was called recursively
3050 // - a finalizer called js_RemoveRoot or js_UnlockGCThingRT.
3051 } while (rt->gcPoke);
3053 if (scanGCThreadStack)
3054 JS_THREAD_DATA(cx)->conservativeGC.disable();
3056 rt->gcRegenShapes = false;
3057 rt->setGCLastBytes(rt->gcBytes);
3059 EndGCSession(cx);
3063 * The gckind flag bit GC_LOCK_HELD indicates a call from js_NewGCThing with
3064 * rt->gcLock already held, so the lock should be kept on return.
3066 void
3067 js_GC(JSContext *cx, JSGCInvocationKind gckind)
3069 JSRuntime *rt = cx->runtime;
3072 * Don't collect garbage if the runtime isn't up, and cx is not the last
3073 * context in the runtime. The last context must force a GC, and nothing
3074 * should suppress that final collection or there may be shutdown leaks,
3075 * or runtime bloat until the next context is created.
3077 if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
3078 return;
3080 GCTIMER_BEGIN();
3082 do {
3084 * Let the API user decide to defer a GC if it wants to (unless this
3085 * is the last context). Invoke the callback regardless. Sample the
3086 * callback in case we are freely racing with a JS_SetGCCallback{,RT}
3087 * on another thread.
3089 if (JSGCCallback callback = rt->gcCallback) {
3090 Conditionally<AutoUnlockGC> unlockIf(!!(gckind & GC_LOCK_HELD), rt);
3091 if (!callback(cx, JSGC_BEGIN) && gckind != GC_LAST_CONTEXT)
3092 return;
3096 /* Lock out other GC allocator and collector invocations. */
3097 Conditionally<AutoLockGC> lockIf(!(gckind & GC_LOCK_HELD), rt);
3099 GCUntilDone(cx, gckind GCTIMER_ARG);
3102 /* We re-sample the callback again as the finalizers can change it. */
3103 if (JSGCCallback callback = rt->gcCallback) {
3104 Conditionally<AutoUnlockGC> unlockIf(gckind & GC_LOCK_HELD, rt);
3106 (void) callback(cx, JSGC_END);
3110 * On shutdown, iterate until the JSGC_END callback stops creating
3111 * garbage.
3113 } while (gckind == GC_LAST_CONTEXT && rt->gcPoke);
3115 GCTIMER_END(gckind == GC_LAST_CONTEXT);
3118 namespace js {
3120 bool
3121 SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto)
3123 JSRuntime *rt = cx->runtime;
3126 * This function cannot be called during the GC and always requires a
3127 * request.
3129 #ifdef JS_THREADSAFE
3130 JS_ASSERT(cx->requestDepth);
3131 #endif
3133 AutoLockGC lock(rt);
3136 * The set slot request cannot be called recursively and must not be
3137 * called during a normal GC. So if at this point JSRuntime::gcThread is
3138 * set it must be a GC or a set slot request from another thread.
3140 #ifdef JS_THREADSAFE
3141 if (rt->gcThread) {
3142 JS_ASSERT(cx->thread != rt->gcThread);
3143 LetOtherGCFinish(cx);
3145 #endif
3147 BeginGCSession(cx);
3149 bool cycle;
3151 AutoUnlockGC unlock(rt);
3153 cycle = false;
3154 for (JSObject *obj2 = proto; obj2;) {
3155 obj2 = obj2->wrappedObject(cx);
3156 if (obj2 == obj) {
3157 cycle = true;
3158 break;
3160 obj2 = obj2->getProto();
3162 if (!cycle)
3163 obj->setProto(proto);
3166 EndGCSession(cx);
3168 return !cycle;
3171 JSCompartment *
3172 NewCompartment(JSContext *cx, JSPrincipals *principals)
3174 JSRuntime *rt = cx->runtime;
3175 JSCompartment *compartment = new JSCompartment(rt);
3176 if (!compartment || !compartment->init()) {
3177 JS_ReportOutOfMemory(cx);
3178 return NULL;
3181 if (principals) {
3182 compartment->principals = principals;
3183 JSPRINCIPALS_HOLD(cx, principals);
3187 AutoLockGC lock(rt);
3189 if (!rt->compartments.append(compartment)) {
3190 AutoUnlockGC unlock(rt);
3191 JS_ReportOutOfMemory(cx);
3192 return NULL;
3196 JSCompartmentCallback callback = rt->compartmentCallback;
3197 if (callback && !callback(cx, compartment, JSCOMPARTMENT_NEW)) {
3198 AutoLockGC lock(rt);
3199 rt->compartments.popBack();
3200 return NULL;
3203 return compartment;