Remove temporary diagnostics (b=605754 r=dmandelin a=betaN+
[mozilla-central.git] / js / src / jscntxt.h
bloba09352a102105cd28bb540a4d4d6c8e2968fb36f
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
41 #ifndef jscntxt_h___
42 #define jscntxt_h___
44 * JS execution context.
46 #include <string.h>
48 /* Gross special case for Gecko, which defines malloc/calloc/free. */
49 #ifdef mozilla_mozalloc_macro_wrappers_h
50 # define JS_UNDEFD_MOZALLOC_WRAPPERS
51 /* The "anti-header" */
52 # include "mozilla/mozalloc_undef_macro_wrappers.h"
53 #endif
55 #include "jsprvtd.h"
56 #include "jsarena.h"
57 #include "jsclist.h"
58 #include "jslong.h"
59 #include "jsatom.h"
60 #include "jsdhash.h"
61 #include "jsdtoa.h"
62 #include "jsfun.h"
63 #include "jsgc.h"
64 #include "jsgcchunk.h"
65 #include "jshashtable.h"
66 #include "jsinterp.h"
67 #include "jsmath.h"
68 #include "jsobj.h"
69 #include "jspropertycache.h"
70 #include "jspropertytree.h"
71 #include "jsstaticcheck.h"
72 #include "jsutil.h"
73 #include "jsarray.h"
74 #include "jsvector.h"
75 #include "prmjtime.h"
77 #ifdef _MSC_VER
78 #pragma warning(push)
79 #pragma warning(disable:4100) /* Silence unreferenced formal parameter warnings */
80 #pragma warning(push)
81 #pragma warning(disable:4355) /* Silence warning about "this" used in base member initializer list */
82 #endif
85 * js_GetSrcNote cache to avoid O(n^2) growth in finding a source note for a
86 * given pc in a script. We use the script->code pointer to tag the cache,
87 * instead of the script address itself, so that source notes are always found
88 * by offset from the bytecode with which they were generated.
90 typedef struct JSGSNCache {
91 jsbytecode *code;
92 JSDHashTable table;
93 #ifdef JS_GSNMETER
94 uint32 hits;
95 uint32 misses;
96 uint32 fills;
97 uint32 purges;
98 # define GSN_CACHE_METER(cache,cnt) (++(cache)->cnt)
99 #else
100 # define GSN_CACHE_METER(cache,cnt) /* nothing */
101 #endif
102 } JSGSNCache;
104 #define js_FinishGSNCache(cache) js_PurgeGSNCache(cache)
106 extern void
107 js_PurgeGSNCache(JSGSNCache *cache);
109 /* These helper macros take a cx as parameter and operate on its GSN cache. */
110 #define JS_PURGE_GSN_CACHE(cx) js_PurgeGSNCache(&JS_GSN_CACHE(cx))
111 #define JS_METER_GSN_CACHE(cx,cnt) GSN_CACHE_METER(&JS_GSN_CACHE(cx), cnt)
113 /* Forward declarations of nanojit types. */
114 namespace nanojit {
116 class Assembler;
117 class CodeAlloc;
118 class Fragment;
119 template<typename K> struct DefaultHash;
120 template<typename K, typename V, typename H> class HashMap;
121 template<typename T> class Seq;
123 } /* namespace nanojit */
125 namespace JSC {
126 class ExecutableAllocator;
129 namespace js {
131 /* Tracer constants. */
132 static const size_t MONITOR_N_GLOBAL_STATES = 4;
133 static const size_t FRAGMENT_TABLE_SIZE = 512;
134 static const size_t MAX_NATIVE_STACK_SLOTS = 4096;
135 static const size_t MAX_CALL_STACK_ENTRIES = 500;
136 static const size_t MAX_GLOBAL_SLOTS = 4096;
137 static const size_t GLOBAL_SLOTS_BUFFER_SIZE = MAX_GLOBAL_SLOTS + 1;
138 static const size_t MAX_SLOW_NATIVE_EXTRA_SLOTS = 16;
140 /* Forward declarations of tracer types. */
141 class VMAllocator;
142 class FrameInfoCache;
143 struct REHashFn;
144 struct REHashKey;
145 struct FrameInfo;
146 struct VMSideExit;
147 struct TreeFragment;
148 struct TracerState;
149 template<typename T> class Queue;
150 typedef Queue<uint16> SlotList;
151 class TypeMap;
152 struct REFragment;
153 typedef nanojit::HashMap<REHashKey, REFragment*, REHashFn> REHashMap;
154 class LoopProfile;
156 #if defined(JS_JIT_SPEW) || defined(DEBUG)
157 struct FragPI;
158 typedef nanojit::HashMap<uint32, FragPI, nanojit::DefaultHash<uint32> > FragStatsMap;
159 #endif
161 namespace mjit {
162 class JaegerCompartment;
166 * Allocation policy that calls JSContext memory functions and reports errors
167 * to the context. Since the JSContext given on construction is stored for
168 * the lifetime of the container, this policy may only be used for containers
169 * whose lifetime is a shorter than the given JSContext.
171 class ContextAllocPolicy
173 JSContext *cx;
175 public:
176 ContextAllocPolicy(JSContext *cx) : cx(cx) {}
177 JSContext *context() const { return cx; }
179 /* Inline definitions below. */
180 void *malloc(size_t bytes);
181 void free(void *p);
182 void *realloc(void *p, size_t bytes);
183 void reportAllocOverflow() const;
186 /* Holds the execution state during trace execution. */
187 struct TracerState
189 JSContext* cx; // current VM context handle
190 double* stackBase; // native stack base
191 double* sp; // native stack pointer, stack[0] is spbase[0]
192 double* eos; // first unusable word after the native stack / begin of globals
193 FrameInfo** callstackBase; // call stack base
194 void* sor; // start of rp stack
195 FrameInfo** rp; // call stack pointer
196 void* eor; // first unusable word after the call stack
197 VMSideExit* lastTreeExitGuard; // guard we exited on during a tree call
198 VMSideExit* lastTreeCallGuard; // guard we want to grow from if the tree
199 // call exit guard mismatched
200 void* rpAtLastTreeCall; // value of rp at innermost tree call guard
201 VMSideExit* outermostTreeExitGuard; // the last side exit returned by js_CallTree
202 TreeFragment* outermostTree; // the outermost tree we initially invoked
203 uintN* inlineCallCountp; // inline call count counter
204 VMSideExit** innermostNestedGuardp;
205 VMSideExit* innermost;
206 uint64 startTime;
207 TracerState* prev;
209 // Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
210 // JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
211 // if an error or exception occurred.
212 uint32 builtinStatus;
214 // Used to communicate the location of the return value in case of a deep bail.
215 double* deepBailSp;
217 // Used when calling natives from trace to root the vp vector.
218 uintN nativeVpLen;
219 js::Value* nativeVp;
221 TracerState(JSContext *cx, TraceMonitor *tm, TreeFragment *ti,
222 uintN &inlineCallCountp, VMSideExit** innermostNestedGuardp);
223 ~TracerState();
227 * Storage for the execution state and store during trace execution. Generated
228 * code depends on the fact that the globals begin |MAX_NATIVE_STACK_SLOTS|
229 * doubles after the stack begins. Thus, on trace, |TracerState::eos| holds a
230 * pointer to the first global.
232 struct TraceNativeStorage
234 double stack_global_buf[MAX_NATIVE_STACK_SLOTS + GLOBAL_SLOTS_BUFFER_SIZE];
235 FrameInfo *callstack_buf[MAX_CALL_STACK_ENTRIES];
237 double *stack() { return stack_global_buf; }
238 double *global() { return stack_global_buf + MAX_NATIVE_STACK_SLOTS; }
239 FrameInfo **callstack() { return callstack_buf; }
242 /* Holds data to track a single globa. */
243 struct GlobalState {
244 JSObject* globalObj;
245 uint32 globalShape;
246 SlotList* globalSlots;
250 * A StackSegment (referred to as just a 'segment') contains a prev-linked set
251 * of stack frames and the slots associated with each frame. A segment and its
252 * contained frames/slots also have a precise memory layout that is described
253 * in the js::StackSpace comment. A key layout invariant for segments is that
254 * prev-linked frames are adjacent in memory, separated only by the values that
255 * constitute the locals and expression stack of the prev-frame.
257 * The set of stack frames in a non-empty segment start at the segment's
258 * "current frame", which is the most recently pushed frame, and ends at the
259 * segment's "initial frame". Note that, while all stack frames in a segment
260 * are prev-linked, not all prev-linked frames are in the same segment. Hence,
261 * for a segment |ss|, |ss->getInitialFrame()->prev| may be non-null and in a
262 * different segment. This occurs when the VM reenters itself (via Invoke or
263 * Execute). In full generality, a single context may contain a forest of trees
264 * of stack frames. With respect to this forest, a segment contains a linear
265 * path along a single tree, not necessarily to the root.
267 * The frames of a non-empty segment must all be in the same context and thus
268 * each non-empty segment is referred to as being "in" a context. Segments in a
269 * context have an additional state of being either "active" or "suspended". A
270 * suspended segment |ss| has a "suspended frame" which is snapshot of |cx->regs|
271 * when the segment was suspended and serves as the current frame of |ss|.
272 * There is at most one active segment in a given context. Segments in a
273 * context execute LIFO and are maintained in a stack. The top of this stack
274 * is the context's "current segment". If a context |cx| has an active segment
275 * |ss|, then:
276 * 1. |ss| is |cx|'s current segment,
277 * 2. |cx->regs != NULL|, and
278 * 3. |ss|'s current frame is |cx->regs->fp|.
279 * Moreover, |cx->regs != NULL| iff |cx| has an active segment.
281 * An empty segment is not associated with any context. Empty segments are
282 * created when there is not an active segment for a context at the top of the
283 * stack and claim space for the arguments of an Invoke before the Invoke's
284 * stack frame is pushed. During the intervals when the arguments have been
285 * pushed, but not the stack frame, the segment cannot be pushed onto the
286 * context, since that would require some hack to deal with cx->fp not being
287 * the current frame of cx->currentSegment.
289 * Finally, (to support JS_SaveFrameChain/JS_RestoreFrameChain) a suspended
290 * segment may or may not be "saved". Normally, when the active segment is
291 * popped, the previous segment (which is necessarily suspended) becomes
292 * active. If the previous segment was saved, however, then it stays suspended
293 * until it is made active by a call to JS_RestoreFrameChain. This is why a
294 * context may have a current segment, but not an active segment.
296 class StackSegment
298 /* The context to which this segment belongs. */
299 JSContext *cx;
301 /* Link for JSContext segment stack mentioned in big comment above. */
302 StackSegment *previousInContext;
304 /* Link for StackSpace segment stack mentioned in StackSpace comment. */
305 StackSegment *previousInMemory;
307 /* The first frame executed in this segment. null iff cx is null */
308 JSStackFrame *initialFrame;
310 /* If this segment is suspended, |cx->regs| when it was suspended. */
311 JSFrameRegs *suspendedRegs;
313 /* The varobj on entry to initialFrame. */
314 JSObject *initialVarObj;
316 /* Whether this segment was suspended by JS_SaveFrameChain. */
317 bool saved;
319 /* Align at 8 bytes on all platforms. */
320 #if JS_BITS_PER_WORD == 32
321 void *padding;
322 #endif
325 * To make isActive a single null-ness check, this non-null constant is
326 * assigned to suspendedRegs when !inContext.
328 #define NON_NULL_SUSPENDED_REGS ((JSFrameRegs *)0x1)
330 public:
331 StackSegment()
332 : cx(NULL), previousInContext(NULL), previousInMemory(NULL),
333 initialFrame(NULL), suspendedRegs(NON_NULL_SUSPENDED_REGS),
334 initialVarObj(NULL), saved(false)
336 JS_ASSERT(!inContext());
339 /* Safe casts guaranteed by the contiguous-stack layout. */
341 Value *valueRangeBegin() const {
342 return (Value *)(this + 1);
346 * As described in the comment at the beginning of the class, a segment
347 * is in one of three states:
349 * !inContext: the segment has been created to root arguments for a
350 * future call to Invoke.
351 * isActive: the segment describes a set of stack frames in a context,
352 * where the top frame currently executing.
353 * isSuspended: like isActive, but the top frame has been suspended.
356 bool inContext() const {
357 JS_ASSERT(!!cx == !!initialFrame);
358 JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS && !saved);
359 return cx;
362 bool isActive() const {
363 JS_ASSERT_IF(!suspendedRegs, cx && !saved);
364 JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS);
365 return !suspendedRegs;
368 bool isSuspended() const {
369 JS_ASSERT_IF(!cx || !suspendedRegs, !saved);
370 JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS);
371 return cx && suspendedRegs;
374 /* Substate of suspended, queryable in any state. */
376 bool isSaved() const {
377 JS_ASSERT_IF(saved, isSuspended());
378 return saved;
381 /* Transitioning between inContext <--> isActive */
383 void joinContext(JSContext *cx, JSStackFrame *f) {
384 JS_ASSERT(!inContext());
385 this->cx = cx;
386 initialFrame = f;
387 suspendedRegs = NULL;
388 JS_ASSERT(isActive());
391 void leaveContext() {
392 JS_ASSERT(isActive());
393 this->cx = NULL;
394 initialFrame = NULL;
395 suspendedRegs = NON_NULL_SUSPENDED_REGS;
396 JS_ASSERT(!inContext());
399 JSContext *maybeContext() const {
400 return cx;
403 #undef NON_NULL_SUSPENDED_REGS
405 /* Transitioning between isActive <--> isSuspended */
407 void suspend(JSFrameRegs *regs) {
408 JS_ASSERT(isActive());
409 JS_ASSERT(regs && regs->fp && contains(regs->fp));
410 suspendedRegs = regs;
411 JS_ASSERT(isSuspended());
414 void resume() {
415 JS_ASSERT(isSuspended());
416 suspendedRegs = NULL;
417 JS_ASSERT(isActive());
420 /* When isSuspended, transitioning isSaved <--> !isSaved */
422 void save(JSFrameRegs *regs) {
423 JS_ASSERT(!isSuspended());
424 suspend(regs);
425 saved = true;
426 JS_ASSERT(isSaved());
429 void restore() {
430 JS_ASSERT(isSaved());
431 saved = false;
432 resume();
433 JS_ASSERT(!isSuspended());
436 /* Data available when inContext */
438 JSStackFrame *getInitialFrame() const {
439 JS_ASSERT(inContext());
440 return initialFrame;
443 inline JSFrameRegs *getCurrentRegs() const;
444 inline JSStackFrame *getCurrentFrame() const;
446 /* Data available when isSuspended. */
448 JSFrameRegs *getSuspendedRegs() const {
449 JS_ASSERT(isSuspended());
450 return suspendedRegs;
453 JSStackFrame *getSuspendedFrame() const {
454 return suspendedRegs->fp;
457 /* JSContext / js::StackSpace bookkeeping. */
459 void setPreviousInContext(StackSegment *seg) {
460 previousInContext = seg;
463 StackSegment *getPreviousInContext() const {
464 return previousInContext;
467 void setPreviousInMemory(StackSegment *seg) {
468 previousInMemory = seg;
471 StackSegment *getPreviousInMemory() const {
472 return previousInMemory;
475 void setInitialVarObj(JSObject *obj) {
476 JS_ASSERT(inContext());
477 initialVarObj = obj;
480 bool hasInitialVarObj() {
481 JS_ASSERT(inContext());
482 return initialVarObj != NULL;
485 JSObject &getInitialVarObj() const {
486 JS_ASSERT(inContext() && initialVarObj);
487 return *initialVarObj;
490 #ifdef DEBUG
491 JS_REQUIRES_STACK bool contains(const JSStackFrame *fp) const;
492 #endif
495 static const size_t VALUES_PER_STACK_SEGMENT = sizeof(StackSegment) / sizeof(Value);
496 JS_STATIC_ASSERT(sizeof(StackSegment) % sizeof(Value) == 0);
498 /* See StackSpace::pushInvokeArgs. */
499 class InvokeArgsGuard : public CallArgs
501 friend class StackSpace;
502 JSContext *cx; /* null implies nothing pushed */
503 StackSegment *seg;
504 Value *prevInvokeArgEnd;
505 #ifdef DEBUG
506 StackSegment *prevInvokeSegment;
507 JSStackFrame *prevInvokeFrame;
508 #endif
509 public:
510 InvokeArgsGuard() : cx(NULL), seg(NULL) {}
511 ~InvokeArgsGuard();
512 bool pushed() const { return cx != NULL; }
516 * This type can be used to call Invoke when the arguments have already been
517 * pushed onto the stack as part of normal execution.
519 struct InvokeArgsAlreadyOnTheStack : CallArgs
521 InvokeArgsAlreadyOnTheStack(Value *vp, uintN argc) : CallArgs(vp + 2, argc) {}
524 /* See StackSpace::pushInvokeFrame. */
525 class InvokeFrameGuard
527 friend class StackSpace;
528 JSContext *cx_; /* null implies nothing pushed */
529 JSFrameRegs regs_;
530 JSFrameRegs *prevRegs_;
531 public:
532 InvokeFrameGuard() : cx_(NULL) {}
533 ~InvokeFrameGuard() { if (pushed()) pop(); }
534 bool pushed() const { return cx_ != NULL; }
535 void pop();
536 JSStackFrame *fp() const { return regs_.fp; }
539 /* Reusable base; not for direct use. */
540 class FrameGuard
542 friend class StackSpace;
543 JSContext *cx_; /* null implies nothing pushed */
544 StackSegment *seg_;
545 Value *vp_;
546 JSStackFrame *fp_;
547 public:
548 FrameGuard() : cx_(NULL), vp_(NULL), fp_(NULL) {}
549 JS_REQUIRES_STACK ~FrameGuard();
550 bool pushed() const { return cx_ != NULL; }
551 StackSegment *segment() const { return seg_; }
552 Value *vp() const { return vp_; }
553 JSStackFrame *fp() const { return fp_; }
556 /* See StackSpace::pushExecuteFrame. */
557 class ExecuteFrameGuard : public FrameGuard
559 friend class StackSpace;
560 JSFrameRegs regs_;
563 /* See StackSpace::pushDummyFrame. */
564 class DummyFrameGuard : public FrameGuard
566 friend class StackSpace;
567 JSFrameRegs regs_;
570 /* See StackSpace::pushGeneratorFrame. */
571 class GeneratorFrameGuard : public FrameGuard
575 * Stack layout
577 * Each JSThreadData has one associated StackSpace object which allocates all
578 * segments for the thread. StackSpace performs all such allocations in a
579 * single, fixed-size buffer using a specific layout scheme that allows some
580 * associations between segments, frames, and slots to be implicit, rather
581 * than explicitly stored as pointers. To maintain useful invariants, stack
582 * space is not given out arbitrarily, but rather allocated/deallocated for
583 * specific purposes. The use cases currently supported are: calling a function
584 * with arguments (e.g. Invoke), executing a script (e.g. Execute), inline
585 * interpreter calls, and pushing "dummy" frames for bookkeeping purposes. See
586 * associated member functions below.
588 * First, we consider the layout of individual segments. (See the
589 * js::StackSegment comment for terminology.) A non-empty segment (i.e., a
590 * segment in a context) has the following layout:
592 * initial frame current frame ------. if regs,
593 * .------------. | | regs->sp
594 * | V V V
595 * |segment| slots |frame| slots |frame| slots |frame| slots |
596 * | ^ | ^ |
597 * ? <----------' `----------' `----------'
598 * prev prev prev
600 * Moreover, the bytes in the following ranges form a contiguous array of
601 * Values that are marked during GC:
602 * 1. between a segment and its first frame
603 * 2. between two adjacent frames in a segment
604 * 3. between a segment's current frame and (if fp->regs) fp->regs->sp
605 * Thus, the VM must ensure that all such Values are safe to be marked.
607 * An empty segment is followed by arguments that are rooted by the
608 * StackSpace::invokeArgEnd pointer:
610 * invokeArgEnd
613 * |segment| slots |
615 * Above the level of segments, a StackSpace is simply a contiguous sequence
616 * of segments kept in a linked list:
618 * base currentSegment firstUnused end
619 * | | | |
620 * V V V V
621 * |segment| --- |segment| --- |segment| ------- | |
622 * | ^ | ^ |
623 * 0 <---' `-----------' `-----------'
624 * previous previous previous
626 * Both js::StackSpace and JSContext maintain a stack of segments, the top of
627 * which is the "current segment" for that thread or context, respectively.
628 * Since different contexts can arbitrarily interleave execution in a single
629 * thread, these stacks are different enough that a segment needs both
630 * "previousInMemory" and "previousInContext".
632 * For example, in a single thread, a function in segment S1 in a context CX1
633 * may call out into C++ code that reenters the VM in a context CX2, which
634 * creates a new segment S2 in CX2, and CX1 may or may not equal CX2.
636 * Note that there is some structure to this interleaving of segments:
637 * 1. the inclusion from segments in a context to segments in a thread
638 * preserves order (in terms of previousInContext and previousInMemory,
639 * respectively).
640 * 2. the mapping from stack frames to their containing segment preserves
641 * order (in terms of prev and previousInContext, respectively).
643 class StackSpace
645 Value *base;
646 #ifdef XP_WIN
647 mutable Value *commitEnd;
648 #endif
649 Value *end;
650 StackSegment *currentSegment;
651 #ifdef DEBUG
653 * Keep track of which segment/frame bumped invokeArgEnd so that
654 * firstUnused() can assert that, when invokeArgEnd is used as the top of
655 * the stack, it is being used appropriately.
657 StackSegment *invokeSegment;
658 JSStackFrame *invokeFrame;
659 #endif
660 Value *invokeArgEnd;
662 friend class InvokeArgsGuard;
663 friend class InvokeFrameGuard;
664 friend class FrameGuard;
666 bool pushSegmentForInvoke(JSContext *cx, uintN argc, InvokeArgsGuard *ag);
667 void popSegmentForInvoke(const InvokeArgsGuard &ag);
669 bool pushInvokeFrameSlow(JSContext *cx, const InvokeArgsGuard &ag,
670 InvokeFrameGuard *fg);
671 void popInvokeFrameSlow(const CallArgs &args);
673 bool getSegmentAndFrame(JSContext *cx, uintN vplen, uintN nfixed,
674 FrameGuard *fg) const;
675 void pushSegmentAndFrame(JSContext *cx, JSObject *initialVarObj,
676 JSFrameRegs *regs, FrameGuard *fg);
677 void popSegmentAndFrame(JSContext *cx);
679 struct EnsureSpaceCheck {
680 inline bool operator()(const StackSpace &, JSContext *, Value *, uintN);
683 struct LimitCheck {
684 JSStackFrame *base;
685 Value **limit;
686 LimitCheck(JSStackFrame *base, Value **limit) : base(base), limit(limit) {}
687 inline bool operator()(const StackSpace &, JSContext *, Value *, uintN);
690 template <class Check>
691 inline JSStackFrame *getCallFrame(JSContext *cx, Value *sp, uintN nactual,
692 JSFunction *fun, JSScript *script,
693 uint32 *pflags, Check check) const;
695 inline void popInvokeArgs(const InvokeArgsGuard &args);
696 inline void popInvokeFrame(const InvokeFrameGuard &ag);
698 inline Value *firstUnused() const;
700 inline bool isCurrentAndActive(JSContext *cx) const;
701 friend class AllFramesIter;
702 StackSegment *getCurrentSegment() const { return currentSegment; }
704 #ifdef XP_WIN
705 /* Commit more memory from the reserved stack space. */
706 JS_FRIEND_API(bool) bumpCommit(Value *from, ptrdiff_t nvals) const;
707 #endif
709 public:
710 static const size_t CAPACITY_VALS = 512 * 1024;
711 static const size_t CAPACITY_BYTES = CAPACITY_VALS * sizeof(Value);
712 static const size_t COMMIT_VALS = 16 * 1024;
713 static const size_t COMMIT_BYTES = COMMIT_VALS * sizeof(Value);
716 * SunSpider and v8bench have roughly an average of 9 slots per script.
717 * Our heuristic for a quick over-recursion check uses a generous slot
718 * count based on this estimate. We take this frame size and multiply it
719 * by the old recursion limit from the interpreter.
721 * Worst case, if an average size script (<=9 slots) over recurses, it'll
722 * effectively be the same as having increased the old inline call count
723 * to <= 5,000.
725 static const size_t STACK_QUOTA = (VALUES_PER_STACK_FRAME + 18) *
726 JS_MAX_INLINE_CALL_COUNT;
728 /* Kept as a member of JSThreadData; cannot use constructor/destructor. */
729 bool init();
730 void finish();
732 #ifdef DEBUG
733 template <class T>
734 bool contains(T *t) const {
735 char *v = (char *)t;
736 JS_ASSERT(size_t(-1) - uintptr_t(t) >= sizeof(T));
737 return v >= (char *)base && v + sizeof(T) <= (char *)end;
739 #endif
742 * When we LeaveTree, we need to rebuild the stack, which requires stack
743 * allocation. There is no good way to handle an OOM for these allocations,
744 * so this function checks that they cannot occur using the size of the
745 * TraceNativeStorage as a conservative upper bound.
747 inline bool ensureEnoughSpaceToEnterTrace();
749 /* +1 for slow native's stack frame. */
750 static const ptrdiff_t MAX_TRACE_SPACE_VALS =
751 MAX_NATIVE_STACK_SLOTS + MAX_CALL_STACK_ENTRIES * VALUES_PER_STACK_FRAME +
752 (VALUES_PER_STACK_SEGMENT + VALUES_PER_STACK_FRAME /* synthesized slow native */);
754 /* Mark all segments, frames, and slots on the stack. */
755 JS_REQUIRES_STACK void mark(JSTracer *trc);
758 * For all five use cases below:
759 * - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM.
760 * - The "get*Frame" functions do not change any global state, they just
761 * check OOM and return pointers to an uninitialized frame with the
762 * requested missing arguments/slots. Only once the "push*Frame"
763 * function has been called is global state updated. Thus, between
764 * "get*Frame" and "push*Frame", the frame and slots are unrooted.
765 * - The "push*Frame" functions will set fp->prev; the caller needn't.
766 * - Functions taking "*Guard" arguments will use the guard's destructor
767 * to pop the allocation. The caller must ensure the guard has the
768 * appropriate lifetime.
769 * - The get*Frame functions put the 'nmissing' slots contiguously after
770 * the arguments.
774 * pushInvokeArgs allocates |argc + 2| rooted values that will be passed as
775 * the arguments to Invoke. A single allocation can be used for multiple
776 * Invoke calls. The InvokeArgumentsGuard passed to Invoke must come from
777 * an immediately-enclosing (stack-wise) call to pushInvokeArgs.
779 bool pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard *ag);
781 /* These functions are called inside Invoke, not Invoke clients. */
782 bool getInvokeFrame(JSContext *cx, const CallArgs &args, JSFunction *fun,
783 JSScript *script, uint32 *flags, InvokeFrameGuard *fg) const;
785 void pushInvokeFrame(JSContext *cx, const CallArgs &args, InvokeFrameGuard *fg);
787 /* These functions are called inside Execute, not Execute clients. */
788 bool getExecuteFrame(JSContext *cx, JSScript *script, ExecuteFrameGuard *fg) const;
789 void pushExecuteFrame(JSContext *cx, JSObject *initialVarObj, ExecuteFrameGuard *fg);
792 * Since RAII cannot be used for inline frames, callers must manually
793 * call pushInlineFrame/popInlineFrame.
795 inline JSStackFrame *getInlineFrame(JSContext *cx, Value *sp, uintN nactual,
796 JSFunction *fun, JSScript *script,
797 uint32 *flags) const;
798 inline void pushInlineFrame(JSContext *cx, JSScript *script, JSStackFrame *fp,
799 JSFrameRegs *regs);
800 inline void popInlineFrame(JSContext *cx, JSStackFrame *prev, js::Value *newsp);
802 /* These functions are called inside SendToGenerator. */
803 bool getGeneratorFrame(JSContext *cx, uintN vplen, uintN nfixed,
804 GeneratorFrameGuard *fg);
805 void pushGeneratorFrame(JSContext *cx, JSFrameRegs *regs, GeneratorFrameGuard *fg);
807 /* Pushes a JSStackFrame::isDummyFrame. */
808 bool pushDummyFrame(JSContext *cx, JSObject &scopeChain, DummyFrameGuard *fg);
810 /* Check and bump the given stack limit. */
811 inline JSStackFrame *getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual,
812 JSFunction *fun, JSScript *script, uint32 *flags,
813 JSStackFrame *base, Value **limit) const;
816 * Compute a stack limit for entering method jit code which allows the
817 * method jit to check for end-of-stack and over-recursion with a single
818 * comparison. See STACK_QUOTA above.
820 inline Value *getStackLimit(JSContext *cx);
823 * Try to bump the given 'limit' by bumping the commit limit. Return false
824 * if fully committed or if 'limit' exceeds 'base' + STACK_QUOTA.
826 bool bumpCommitAndLimit(JSStackFrame *base, Value *from, uintN nvals, Value **limit) const;
829 * Allocate nvals on the top of the stack, report error on failure.
830 * N.B. the caller must ensure |from >= firstUnused()|.
832 inline bool ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const;
835 JS_STATIC_ASSERT(StackSpace::CAPACITY_VALS % StackSpace::COMMIT_VALS == 0);
838 * While |cx->fp|'s pc/sp are available in |cx->regs|, to compute the saved
839 * value of pc/sp for any other frame, it is necessary to know about that
840 * frame's next-frame. This iterator maintains this information when walking
841 * a chain of stack frames starting at |cx->fp|.
843 * Usage:
844 * for (FrameRegsIter i(cx); !i.done(); ++i)
845 * ... i.fp() ... i.sp() ... i.pc()
847 class FrameRegsIter
849 JSContext *cx;
850 StackSegment *curseg;
851 JSStackFrame *curfp;
852 Value *cursp;
853 jsbytecode *curpc;
855 void initSlow();
856 void incSlow(JSStackFrame *fp, JSStackFrame *prev);
858 public:
859 JS_REQUIRES_STACK inline FrameRegsIter(JSContext *cx);
861 bool done() const { return curfp == NULL; }
862 inline FrameRegsIter &operator++();
864 JSStackFrame *fp() const { return curfp; }
865 Value *sp() const { return cursp; }
866 jsbytecode *pc() const { return curpc; }
870 * Utility class for iteration over all active stack frames.
872 class AllFramesIter
874 public:
875 AllFramesIter(JSContext *cx);
877 bool done() const { return curfp == NULL; }
878 AllFramesIter& operator++();
880 JSStackFrame *fp() const { return curfp; }
882 private:
883 StackSegment *curcs;
884 JSStackFrame *curfp;
887 /* Holds the number of recording attemps for an address. */
888 typedef HashMap<jsbytecode*,
889 size_t,
890 DefaultHasher<jsbytecode*>,
891 SystemAllocPolicy> RecordAttemptMap;
893 /* Holds the profile data for loops. */
894 typedef HashMap<jsbytecode*,
895 LoopProfile*,
896 DefaultHasher<jsbytecode*>,
897 SystemAllocPolicy> LoopProfileMap;
899 class Oracle;
901 typedef HashSet<JSScript *,
902 DefaultHasher<JSScript *>,
903 SystemAllocPolicy> TracedScriptSet;
906 * Trace monitor. Every JSThread (if JS_THREADSAFE) or JSRuntime (if not
907 * JS_THREADSAFE) has an associated trace monitor that keeps track of loop
908 * frequencies for all JavaScript code loaded into that runtime.
910 struct TraceMonitor {
912 * The context currently executing JIT-compiled code on this thread, or
913 * NULL if none. Among other things, this can in certain cases prevent
914 * last-ditch GC and suppress calls to JS_ReportOutOfMemory.
916 * !tracecx && !recorder: not on trace
917 * !tracecx && recorder: recording
918 * tracecx && !recorder: executing a trace
919 * tracecx && recorder: executing inner loop, recording outer loop
921 JSContext *tracecx;
924 * Cached storage to use when executing on trace. While we may enter nested
925 * traces, we always reuse the outer trace's storage, so never need more
926 * than of these.
928 TraceNativeStorage *storage;
931 * There are 5 allocators here. This might seem like overkill, but they
932 * have different lifecycles, and by keeping them separate we keep the
933 * amount of retained memory down significantly. They are flushed (ie.
934 * all the allocated memory is freed) periodically.
936 * - dataAlloc has the lifecycle of the monitor. It's flushed only when
937 * the monitor is flushed. It's used for fragments.
939 * - traceAlloc has the same flush lifecycle as the dataAlloc, but it is
940 * also *marked* when a recording starts and rewinds to the mark point
941 * if recording aborts. So you can put things in it that are only
942 * reachable on a successful record/compile cycle like GuardRecords and
943 * SideExits.
945 * - tempAlloc is flushed after each recording, successful or not. It's
946 * used to store LIR code and for all other elements in the LIR
947 * pipeline.
949 * - reTempAlloc is just like tempAlloc, but is used for regexp
950 * compilation in RegExpNativeCompiler rather than normal compilation in
951 * TraceRecorder.
953 * - codeAlloc has the same lifetime as dataAlloc, but its API is
954 * different (CodeAlloc vs. VMAllocator). It's used for native code.
955 * It's also a good idea to keep code and data separate to avoid I-cache
956 * vs. D-cache issues.
958 VMAllocator* dataAlloc;
959 VMAllocator* traceAlloc;
960 VMAllocator* tempAlloc;
961 VMAllocator* reTempAlloc;
962 nanojit::CodeAlloc* codeAlloc;
963 nanojit::Assembler* assembler;
964 FrameInfoCache* frameCache;
966 /* This gets incremented every time the monitor is flushed. */
967 uintN flushEpoch;
969 Oracle* oracle;
970 TraceRecorder* recorder;
972 /* If we are profiling a loop, this tracks the current profile. Otherwise NULL. */
973 LoopProfile* profile;
975 GlobalState globalStates[MONITOR_N_GLOBAL_STATES];
976 TreeFragment* vmfragments[FRAGMENT_TABLE_SIZE];
977 RecordAttemptMap* recordAttempts;
979 /* A hashtable mapping PC values to loop profiles for those loops. */
980 LoopProfileMap* loopProfiles;
983 * Maximum size of the code cache before we start flushing. 1/16 of this
984 * size is used as threshold for the regular expression code cache.
986 uint32 maxCodeCacheBytes;
989 * If nonzero, do not flush the JIT cache after a deep bail. That would
990 * free JITted code pages that we will later return to. Instead, set the
991 * needFlush flag so that it can be flushed later.
993 JSBool needFlush;
996 * Fragment map for the regular expression compiler.
998 REHashMap* reFragments;
1000 // Cached temporary typemap to avoid realloc'ing every time we create one.
1001 // This must be used in only one place at a given time. It must be cleared
1002 // before use.
1003 TypeMap* cachedTempTypeMap;
1005 /* Scripts with recorded fragments. */
1006 TracedScriptSet tracedScripts;
1008 #ifdef DEBUG
1009 /* Fields needed for fragment/guard profiling. */
1010 nanojit::Seq<nanojit::Fragment*>* branches;
1011 uint32 lastFragID;
1013 * profAlloc has a lifetime which spans exactly from js_InitJIT to
1014 * js_FinishJIT.
1016 VMAllocator* profAlloc;
1017 FragStatsMap* profTab;
1018 #endif
1020 bool ontrace() const {
1021 return !!tracecx;
1024 /* Flush the JIT cache. */
1025 void flush();
1027 /* Sweep any cache entry pointing to dead GC things. */
1028 void sweep();
1030 bool outOfMemory() const;
1033 } /* namespace js */
1036 * N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current
1037 * thread, regardless of whether cx is the context in which that trace is
1038 * executing. cx must be a context on the current thread.
1040 #ifdef JS_TRACER
1041 # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).ontrace())
1042 #else
1043 # define JS_ON_TRACE(cx) false
1044 #endif
1046 /* Number of potentially reusable scriptsToGC to search for the eval cache. */
1047 #ifndef JS_EVAL_CACHE_SHIFT
1048 # define JS_EVAL_CACHE_SHIFT 6
1049 #endif
1050 #define JS_EVAL_CACHE_SIZE JS_BIT(JS_EVAL_CACHE_SHIFT)
1052 #ifdef DEBUG
1053 # define EVAL_CACHE_METER_LIST(_) _(probe), _(hit), _(step), _(noscope)
1054 # define identity(x) x
1056 struct JSEvalCacheMeter {
1057 uint64 EVAL_CACHE_METER_LIST(identity);
1060 # undef identity
1061 #endif
1063 #ifdef DEBUG
1064 # define FUNCTION_KIND_METER_LIST(_) \
1065 _(allfun), _(heavy), _(nofreeupvar), _(onlyfreevar), \
1066 _(display), _(flat), _(setupvar), _(badfunarg), \
1067 _(joinedsetmethod), _(joinedinitmethod), \
1068 _(joinedreplace), _(joinedsort), _(joinedmodulepat), \
1069 _(mreadbarrier), _(mwritebarrier), _(mwslotbarrier), \
1070 _(unjoined)
1071 # define identity(x) x
1073 struct JSFunctionMeter {
1074 int32 FUNCTION_KIND_METER_LIST(identity);
1077 # undef identity
1079 # define JS_FUNCTION_METER(cx,x) JS_RUNTIME_METER((cx)->runtime, functionMeter.x)
1080 #else
1081 # define JS_FUNCTION_METER(cx,x) ((void)0)
1082 #endif
1085 #define NATIVE_ITER_CACHE_LOG2 8
1086 #define NATIVE_ITER_CACHE_MASK JS_BITMASK(NATIVE_ITER_CACHE_LOG2)
1087 #define NATIVE_ITER_CACHE_SIZE JS_BIT(NATIVE_ITER_CACHE_LOG2)
1089 struct JSPendingProxyOperation {
1090 JSPendingProxyOperation *next;
1091 JSObject *object;
1094 struct JSThreadData {
1095 #ifdef JS_THREADSAFE
1096 /* The request depth for this thread. */
1097 unsigned requestDepth;
1098 #endif
1101 * If non-zero, we were been asked to call the operation callback as soon
1102 * as possible. If the thread has an active request, this contributes
1103 * towards rt->interruptCounter.
1105 volatile int32 interruptFlags;
1107 /* Keeper of the contiguous stack used by all contexts in this thread. */
1108 js::StackSpace stackSpace;
1111 * Flag indicating that we are waiving any soft limits on the GC heap
1112 * because we want allocations to be infallible (except when we hit OOM).
1114 bool waiveGCQuota;
1117 * The GSN cache is per thread since even multi-cx-per-thread embeddings
1118 * do not interleave js_GetSrcNote calls.
1120 JSGSNCache gsnCache;
1122 /* Property cache for faster call/get/set invocation. */
1123 js::PropertyCache propertyCache;
1125 #ifdef JS_TRACER
1126 /* Trace-tree JIT recorder/interpreter state. */
1127 js::TraceMonitor traceMonitor;
1129 /* Counts the number of iterations run by a trace. */
1130 unsigned iterationCounter;
1131 #endif
1133 /* Lock-free hashed lists of scripts created by eval to garbage-collect. */
1134 JSScript *scriptsToGC[JS_EVAL_CACHE_SIZE];
1136 #ifdef DEBUG
1137 JSEvalCacheMeter evalCacheMeter;
1138 #endif
1140 /* State used by dtoa.c. */
1141 DtoaState *dtoaState;
1144 * A single-entry cache for some base-10 double-to-string conversions.
1145 * This helps date-format-xparb.js. It also avoids skewing the results
1146 * for v8-splay.js when measured by the SunSpider harness, where the splay
1147 * tree initialization (which includes many repeated double-to-string
1148 * conversions) is erroneously included in the measurement; see bug
1149 * 562553.
1151 struct {
1152 jsdouble d;
1153 jsint base;
1154 JSString *s; // if s==NULL, d and base are not valid
1155 } dtoaCache;
1157 /* Cached native iterators. */
1158 JSObject *cachedNativeIterators[NATIVE_ITER_CACHE_SIZE];
1160 /* Native iterator most recently started. */
1161 JSObject *lastNativeIterator;
1163 /* Base address of the native stack for the current thread. */
1164 jsuword *nativeStackBase;
1166 /* List of currently pending operations on proxies. */
1167 JSPendingProxyOperation *pendingProxyOperation;
1169 js::ConservativeGCThreadData conservativeGC;
1171 private:
1172 js::MathCache *mathCache;
1174 js::MathCache *allocMathCache(JSContext *cx);
1175 public:
1177 js::MathCache *getMathCache(JSContext *cx) {
1178 return mathCache ? mathCache : allocMathCache(cx);
1181 bool init();
1182 void finish();
1183 void mark(JSTracer *trc);
1184 void purge(JSContext *cx);
1186 /* This must be called with the GC lock held. */
1187 inline void triggerOperationCallback(JSRuntime *rt);
1190 #ifdef JS_THREADSAFE
1193 * Structure uniquely representing a thread. It holds thread-private data
1194 * that can be accessed without a global lock.
1196 struct JSThread {
1197 typedef js::HashMap<void *,
1198 JSThread *,
1199 js::DefaultHasher<void *>,
1200 js::SystemAllocPolicy> Map;
1202 /* Linked list of all contexts in use on this thread. */
1203 JSCList contextList;
1205 /* Opaque thread-id, from NSPR's PR_GetCurrentThread(). */
1206 void *id;
1208 /* Number of JS_SuspendRequest calls withot JS_ResumeRequest. */
1209 unsigned suspendCount;
1211 # ifdef DEBUG
1212 unsigned checkRequestDepth;
1213 # endif
1215 /* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */
1216 JSThreadData data;
1219 #define JS_THREAD_DATA(cx) (&(cx)->thread->data)
1221 extern JSThread *
1222 js_CurrentThread(JSRuntime *rt);
1225 * The function takes the GC lock and does not release in successful return.
1226 * On error (out of memory) the function releases the lock but delegates
1227 * the error reporting to the caller.
1229 extern JSBool
1230 js_InitContextThread(JSContext *cx);
1233 * On entrance the GC lock must be held and it will be held on exit.
1235 extern void
1236 js_ClearContextThread(JSContext *cx);
1238 #endif /* JS_THREADSAFE */
1240 typedef enum JSDestroyContextMode {
1241 JSDCM_NO_GC,
1242 JSDCM_MAYBE_GC,
1243 JSDCM_FORCE_GC,
1244 JSDCM_NEW_FAILED
1245 } JSDestroyContextMode;
1247 typedef enum JSRuntimeState {
1248 JSRTS_DOWN,
1249 JSRTS_LAUNCHING,
1250 JSRTS_UP,
1251 JSRTS_LANDING
1252 } JSRuntimeState;
1254 typedef struct JSPropertyTreeEntry {
1255 JSDHashEntryHdr hdr;
1256 js::Shape *child;
1257 } JSPropertyTreeEntry;
1259 typedef void
1260 (* JSActivityCallback)(void *arg, JSBool active);
1262 namespace js {
1264 typedef js::Vector<JSCompartment *, 0, js::SystemAllocPolicy> WrapperVector;
1268 struct JSRuntime {
1269 /* Default compartment. */
1270 JSCompartment *defaultCompartment;
1271 #ifdef JS_THREADSAFE
1272 bool defaultCompartmentIsLocked;
1273 #endif
1275 /* List of compartments (protected by the GC lock). */
1276 js::WrapperVector compartments;
1278 /* Runtime state, synchronized by the stateChange/gcLock condvar/lock. */
1279 JSRuntimeState state;
1281 /* Context create/destroy callback. */
1282 JSContextCallback cxCallback;
1284 /* Compartment create/destroy callback. */
1285 JSCompartmentCallback compartmentCallback;
1288 * Sets a callback that is run whenever the runtime goes idle - the
1289 * last active request ceases - and begins activity - when it was
1290 * idle and a request begins. Note: The callback is called under the
1291 * GC lock.
1293 void setActivityCallback(JSActivityCallback cb, void *arg) {
1294 activityCallback = cb;
1295 activityCallbackArg = arg;
1298 JSActivityCallback activityCallback;
1299 void *activityCallbackArg;
1302 * Shape regenerated whenever a prototype implicated by an "add property"
1303 * property cache fill and induced trace guard has a readonly property or a
1304 * setter defined on it. This number proxies for the shapes of all objects
1305 * along the prototype chain of all objects in the runtime on which such an
1306 * add-property result has been cached/traced.
1308 * See bug 492355 for more details.
1310 * This comes early in JSRuntime to minimize the immediate format used by
1311 * trace-JITted code that reads it.
1313 uint32 protoHazardShape;
1315 /* Garbage collector state, used by jsgc.c. */
1316 js::GCChunkSet gcChunkSet;
1318 js::RootedValueMap gcRootsHash;
1319 js::GCLocks gcLocksHash;
1320 jsrefcount gcKeepAtoms;
1321 size_t gcBytes;
1322 size_t gcTriggerBytes;
1323 size_t gcLastBytes;
1324 size_t gcMaxBytes;
1325 size_t gcMaxMallocBytes;
1326 uint32 gcEmptyArenaPoolLifespan;
1327 uint32 gcNumber;
1328 js::GCMarker *gcMarkingTracer;
1329 uint32 gcTriggerFactor;
1330 volatile JSBool gcIsNeeded;
1333 * We can pack these flags as only the GC thread writes to them. Atomic
1334 * updates to packed bytes are not guaranteed, so stores issued by one
1335 * thread may be lost due to unsynchronized read-modify-write cycles on
1336 * other threads.
1338 bool gcPoke;
1339 bool gcMarkAndSweep;
1340 bool gcRunning;
1341 bool gcRegenShapes;
1343 #ifdef JS_GC_ZEAL
1344 jsrefcount gcZeal;
1345 #endif
1347 JSGCCallback gcCallback;
1349 private:
1351 * Malloc counter to measure memory pressure for GC scheduling. It runs
1352 * from gcMaxMallocBytes down to zero.
1354 volatile ptrdiff_t gcMallocBytes;
1356 public:
1357 js::GCChunkAllocator *gcChunkAllocator;
1359 void setCustomGCChunkAllocator(js::GCChunkAllocator *allocator) {
1360 JS_ASSERT(allocator);
1361 JS_ASSERT(state == JSRTS_DOWN);
1362 gcChunkAllocator = allocator;
1366 * The trace operation and its data argument to trace embedding-specific
1367 * GC roots.
1369 JSTraceDataOp gcExtraRootsTraceOp;
1370 void *gcExtraRootsData;
1372 /* Well-known numbers held for use by this runtime's contexts. */
1373 js::Value NaNValue;
1374 js::Value negativeInfinityValue;
1375 js::Value positiveInfinityValue;
1377 js::DeflatedStringCache *deflatedStringCache;
1379 JSString *emptyString;
1381 /* List of active contexts sharing this runtime; protected by gcLock. */
1382 JSCList contextList;
1384 /* Per runtime debug hooks -- see jsprvtd.h and jsdbgapi.h. */
1385 JSDebugHooks globalDebugHooks;
1388 * Right now, we only support runtime-wide debugging.
1390 JSBool debugMode;
1392 #ifdef JS_TRACER
1393 /* True if any debug hooks not supported by the JIT are enabled. */
1394 bool debuggerInhibitsJIT() const {
1395 return (globalDebugHooks.interruptHook ||
1396 globalDebugHooks.callHook);
1398 #endif
1400 /* More debugging state, see jsdbgapi.c. */
1401 JSCList trapList;
1402 JSCList watchPointList;
1404 /* Client opaque pointers */
1405 void *data;
1407 #ifdef JS_THREADSAFE
1408 /* These combine to interlock the GC and new requests. */
1409 PRLock *gcLock;
1410 PRCondVar *gcDone;
1411 PRCondVar *requestDone;
1412 uint32 requestCount;
1413 JSThread *gcThread;
1415 js::GCHelperThread gcHelperThread;
1417 /* Lock and owning thread pointer for JS_LOCK_RUNTIME. */
1418 PRLock *rtLock;
1419 #ifdef DEBUG
1420 void * rtLockOwner;
1421 #endif
1423 /* Used to synchronize down/up state change; protected by gcLock. */
1424 PRCondVar *stateChange;
1427 * Lock serializing trapList and watchPointList accesses, and count of all
1428 * mutations to trapList and watchPointList made by debugger threads. To
1429 * keep the code simple, we define debuggerMutations for the thread-unsafe
1430 * case too.
1432 PRLock *debuggerLock;
1434 JSThread::Map threads;
1435 #endif /* JS_THREADSAFE */
1436 uint32 debuggerMutations;
1439 * Security callbacks set on the runtime are used by each context unless
1440 * an override is set on the context.
1442 JSSecurityCallbacks *securityCallbacks;
1444 /* Structured data callbacks are runtime-wide. */
1445 const JSStructuredCloneCallbacks *structuredCloneCallbacks;
1448 * Shared scope property tree, and arena-pool for allocating its nodes.
1449 * This really should be free of all locking overhead and allocated in
1450 * thread-local storage, hence the JS_PROPERTY_TREE(cx) macro.
1452 js::PropertyTree propertyTree;
1454 #define JS_PROPERTY_TREE(cx) ((cx)->runtime->propertyTree)
1457 * The propertyRemovals counter is incremented for every JSObject::clear,
1458 * and for each JSObject::remove method call that frees a slot in the given
1459 * object. See js_NativeGet and js_NativeSet in jsobj.cpp.
1461 int32 propertyRemovals;
1463 /* Script filename table. */
1464 struct JSHashTable *scriptFilenameTable;
1465 JSCList scriptFilenamePrefixes;
1466 #ifdef JS_THREADSAFE
1467 PRLock *scriptFilenameTableLock;
1468 #endif
1470 /* Number localization, used by jsnum.c */
1471 const char *thousandsSeparator;
1472 const char *decimalSeparator;
1473 const char *numGrouping;
1476 * Weak references to lazily-created, well-known XML singletons.
1478 * NB: Singleton objects must be carefully disconnected from the rest of
1479 * the object graph usually associated with a JSContext's global object,
1480 * including the set of standard class objects. See jsxml.c for details.
1482 JSObject *anynameObject;
1483 JSObject *functionNamespaceObject;
1485 #ifdef JS_THREADSAFE
1486 /* Number of threads with active requests and unhandled interrupts. */
1487 volatile int32 interruptCounter;
1488 #else
1489 JSThreadData threadData;
1491 #define JS_THREAD_DATA(cx) (&(cx)->runtime->threadData)
1492 #endif
1495 * Object shape (property cache structural type) identifier generator.
1497 * Type 0 stands for the empty scope, and must not be regenerated due to
1498 * uint32 wrap-around. Since js_GenerateShape (in jsinterp.cpp) uses
1499 * atomic pre-increment, the initial value for the first typed non-empty
1500 * scope will be 1.
1502 * If this counter overflows into SHAPE_OVERFLOW_BIT (in jsinterp.h), the
1503 * cache is disabled, to avoid aliasing two different types. It stays
1504 * disabled until a triggered GC at some later moment compresses live
1505 * types, minimizing rt->shapeGen in the process.
1507 volatile uint32 shapeGen;
1509 /* Literal table maintained by jsatom.c functions. */
1510 JSAtomState atomState;
1513 * Runtime-shared empty scopes for well-known built-in objects that lack
1514 * class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
1516 js::EmptyShape *emptyArgumentsShape;
1517 js::EmptyShape *emptyBlockShape;
1518 js::EmptyShape *emptyCallShape;
1519 js::EmptyShape *emptyDeclEnvShape;
1520 js::EmptyShape *emptyEnumeratorShape;
1521 js::EmptyShape *emptyWithShape;
1524 * Various metering fields are defined at the end of JSRuntime. In this
1525 * way there is no need to recompile all the code that refers to other
1526 * fields of JSRuntime after enabling the corresponding metering macro.
1528 #ifdef JS_DUMP_ENUM_CACHE_STATS
1529 int32 nativeEnumProbes;
1530 int32 nativeEnumMisses;
1531 # define ENUM_CACHE_METER(name) JS_ATOMIC_INCREMENT(&cx->runtime->name)
1532 #else
1533 # define ENUM_CACHE_METER(name) ((void) 0)
1534 #endif
1536 #ifdef JS_DUMP_LOOP_STATS
1537 /* Loop statistics, to trigger trace recording and compiling. */
1538 JSBasicStats loopStats;
1539 #endif
1541 #ifdef DEBUG
1542 /* Function invocation metering. */
1543 jsrefcount inlineCalls;
1544 jsrefcount nativeCalls;
1545 jsrefcount nonInlineCalls;
1546 jsrefcount constructs;
1548 /* Property metering. */
1549 jsrefcount liveObjectProps;
1550 jsrefcount liveObjectPropsPreSweep;
1551 jsrefcount totalObjectProps;
1552 jsrefcount livePropTreeNodes;
1553 jsrefcount duplicatePropTreeNodes;
1554 jsrefcount totalPropTreeNodes;
1555 jsrefcount propTreeKidsChunks;
1556 jsrefcount liveDictModeNodes;
1559 * NB: emptyShapes is init'ed iff at least one of these envars is set:
1561 * JS_PROPTREE_STATFILE statistics on the property tree forest
1562 * JS_PROPTREE_DUMPFILE all paths in the property tree forest
1564 const char *propTreeStatFilename;
1565 const char *propTreeDumpFilename;
1567 bool meterEmptyShapes() const { return propTreeStatFilename || propTreeDumpFilename; }
1569 typedef js::HashSet<js::EmptyShape *,
1570 js::DefaultHasher<js::EmptyShape *>,
1571 js::SystemAllocPolicy> EmptyShapeSet;
1573 EmptyShapeSet emptyShapes;
1575 /* String instrumentation. */
1576 jsrefcount liveStrings;
1577 jsrefcount totalStrings;
1578 jsrefcount liveDependentStrings;
1579 jsrefcount totalDependentStrings;
1580 jsrefcount badUndependStrings;
1581 double lengthSum;
1582 double lengthSquaredSum;
1583 double strdepLengthSum;
1584 double strdepLengthSquaredSum;
1586 /* Script instrumentation. */
1587 jsrefcount liveScripts;
1588 jsrefcount totalScripts;
1589 jsrefcount liveEmptyScripts;
1590 jsrefcount totalEmptyScripts;
1591 #endif /* DEBUG */
1593 #ifdef JS_SCOPE_DEPTH_METER
1595 * Stats on runtime prototype chain lookups and scope chain depths, i.e.,
1596 * counts of objects traversed on a chain until the wanted id is found.
1598 JSBasicStats protoLookupDepthStats;
1599 JSBasicStats scopeSearchDepthStats;
1602 * Stats on compile-time host environment and lexical scope chain lengths
1603 * (maximum depths).
1605 JSBasicStats hostenvScopeDepthStats;
1606 JSBasicStats lexicalScopeDepthStats;
1607 #endif
1609 #ifdef JS_GCMETER
1610 js::gc::JSGCStats gcStats;
1611 js::gc::JSGCArenaStats globalArenaStats[js::gc::FINALIZE_LIMIT];
1612 #endif
1614 #ifdef DEBUG
1616 * If functionMeterFilename, set from an envariable in JSRuntime's ctor, is
1617 * null, the remaining members in this ifdef'ed group are not initialized.
1619 const char *functionMeterFilename;
1620 JSFunctionMeter functionMeter;
1621 char lastScriptFilename[1024];
1623 typedef js::HashMap<JSFunction *,
1624 int32,
1625 js::DefaultHasher<JSFunction *>,
1626 js::SystemAllocPolicy> FunctionCountMap;
1628 FunctionCountMap methodReadBarrierCountMap;
1629 FunctionCountMap unjoinedFunctionCountMap;
1630 #endif
1632 JSWrapObjectCallback wrapObjectCallback;
1633 JSPreWrapCallback preWrapObjectCallback;
1635 JSC::ExecutableAllocator *regExpAllocator;
1637 JSRuntime();
1638 ~JSRuntime();
1640 bool init(uint32 maxbytes);
1642 void setGCTriggerFactor(uint32 factor);
1643 void setGCLastBytes(size_t lastBytes);
1646 * Call the system malloc while checking for GC memory pressure and
1647 * reporting OOM error when cx is not null.
1649 void* malloc(size_t bytes, JSContext *cx = NULL) {
1650 updateMallocCounter(bytes);
1651 void *p = ::js_malloc(bytes);
1652 return JS_LIKELY(!!p) ? p : onOutOfMemory(NULL, bytes, cx);
1656 * Call the system calloc while checking for GC memory pressure and
1657 * reporting OOM error when cx is not null.
1659 void* calloc(size_t bytes, JSContext *cx = NULL) {
1660 updateMallocCounter(bytes);
1661 void *p = ::js_calloc(bytes);
1662 return JS_LIKELY(!!p) ? p : onOutOfMemory(reinterpret_cast<void *>(1), bytes, cx);
1665 void* realloc(void* p, size_t bytes, JSContext *cx = NULL) {
1667 * For compatibility we do not account for realloc that increases
1668 * previously allocated memory.
1670 if (!p)
1671 updateMallocCounter(bytes);
1672 void *p2 = ::js_realloc(p, bytes);
1673 return JS_LIKELY(!!p2) ? p2 : onOutOfMemory(p, bytes, cx);
1676 void free(void* p) { ::js_free(p); }
1678 bool isGCMallocLimitReached() const { return gcMallocBytes <= 0; }
1680 void resetGCMallocBytes() { gcMallocBytes = ptrdiff_t(gcMaxMallocBytes); }
1682 void setGCMaxMallocBytes(size_t value) {
1684 * For compatibility treat any value that exceeds PTRDIFF_T_MAX to
1685 * mean that value.
1687 gcMaxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
1688 resetGCMallocBytes();
1692 * Call this after allocating memory held by GC things, to update memory
1693 * pressure counters or report the OOM error if necessary. If oomError and
1694 * cx is not null the function also reports OOM error.
1696 * The function must be called outside the GC lock and in case of OOM error
1697 * the caller must ensure that no deadlock possible during OOM reporting.
1699 void updateMallocCounter(size_t nbytes) {
1700 /* We tolerate any thread races when updating gcMallocBytes. */
1701 ptrdiff_t newCount = gcMallocBytes - ptrdiff_t(nbytes);
1702 gcMallocBytes = newCount;
1703 if (JS_UNLIKELY(newCount <= 0))
1704 onTooMuchMalloc();
1707 private:
1709 * The function must be called outside the GC lock.
1711 JS_FRIEND_API(void) onTooMuchMalloc();
1714 * This should be called after system malloc/realloc returns NULL to try
1715 * to recove some memory or to report an error. Failures in malloc and
1716 * calloc are signaled by p == null and p == reinterpret_cast<void *>(1).
1717 * Other values of p mean a realloc failure.
1719 * The function must be called outside the GC lock.
1721 JS_FRIEND_API(void *) onOutOfMemory(void *p, size_t nbytes, JSContext *cx);
1724 /* Common macros to access thread-local caches in JSThread or JSRuntime. */
1725 #define JS_GSN_CACHE(cx) (JS_THREAD_DATA(cx)->gsnCache)
1726 #define JS_PROPERTY_CACHE(cx) (JS_THREAD_DATA(cx)->propertyCache)
1727 #define JS_TRACE_MONITOR(cx) (JS_THREAD_DATA(cx)->traceMonitor)
1728 #define JS_SCRIPTS_TO_GC(cx) (JS_THREAD_DATA(cx)->scriptsToGC)
1730 #ifdef DEBUG
1731 # define EVAL_CACHE_METER(x) (JS_THREAD_DATA(cx)->evalCacheMeter.x++)
1732 #else
1733 # define EVAL_CACHE_METER(x) ((void) 0)
1734 #endif
1736 #ifdef DEBUG
1737 # define JS_RUNTIME_METER(rt, which) JS_ATOMIC_INCREMENT(&(rt)->which)
1738 # define JS_RUNTIME_UNMETER(rt, which) JS_ATOMIC_DECREMENT(&(rt)->which)
1739 #else
1740 # define JS_RUNTIME_METER(rt, which) /* nothing */
1741 # define JS_RUNTIME_UNMETER(rt, which) /* nothing */
1742 #endif
1744 #define JS_KEEP_ATOMS(rt) JS_ATOMIC_INCREMENT(&(rt)->gcKeepAtoms);
1745 #define JS_UNKEEP_ATOMS(rt) JS_ATOMIC_DECREMENT(&(rt)->gcKeepAtoms);
1747 #ifdef JS_ARGUMENT_FORMATTER_DEFINED
1749 * Linked list mapping format strings for JS_{Convert,Push}Arguments{,VA} to
1750 * formatter functions. Elements are sorted in non-increasing format string
1751 * length order.
1753 struct JSArgumentFormatMap {
1754 const char *format;
1755 size_t length;
1756 JSArgumentFormatter formatter;
1757 JSArgumentFormatMap *next;
1759 #endif
1762 * Key and entry types for the JSContext.resolvingTable hash table, typedef'd
1763 * here because all consumers need to see these declarations (and not just the
1764 * typedef names, as would be the case for an opaque pointer-to-typedef'd-type
1765 * declaration), along with cx->resolvingTable.
1767 typedef struct JSResolvingKey {
1768 JSObject *obj;
1769 jsid id;
1770 } JSResolvingKey;
1772 typedef struct JSResolvingEntry {
1773 JSDHashEntryHdr hdr;
1774 JSResolvingKey key;
1775 uint32 flags;
1776 } JSResolvingEntry;
1778 #define JSRESFLAG_LOOKUP 0x1 /* resolving id from lookup */
1779 #define JSRESFLAG_WATCH 0x2 /* resolving id from watch */
1780 #define JSRESOLVE_INFER 0xffff /* infer bits from current bytecode */
1782 extern const JSDebugHooks js_NullDebugHooks; /* defined in jsdbgapi.cpp */
1784 namespace js {
1786 class AutoGCRooter;
1788 #define JS_HAS_OPTION(cx,option) (((cx)->options & (option)) != 0)
1789 #define JS_HAS_STRICT_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_STRICT)
1790 #define JS_HAS_WERROR_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_WERROR)
1791 #define JS_HAS_COMPILE_N_GO_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_COMPILE_N_GO)
1792 #define JS_HAS_ATLINE_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_ATLINE)
1794 static inline bool
1795 OptionsHasXML(uint32 options)
1797 return !!(options & JSOPTION_XML);
1800 static inline bool
1801 OptionsHasAnonFunFix(uint32 options)
1803 return !!(options & JSOPTION_ANONFUNFIX);
1806 static inline bool
1807 OptionsSameVersionFlags(uint32 self, uint32 other)
1809 static const uint32 mask = JSOPTION_XML | JSOPTION_ANONFUNFIX;
1810 return !((self & mask) ^ (other & mask));
1814 * Flags accompany script version data so that a) dynamically created scripts
1815 * can inherit their caller's compile-time properties and b) scripts can be
1816 * appropriately compared in the eval cache across global option changes. An
1817 * example of the latter is enabling the top-level-anonymous-function-is-error
1818 * option: subsequent evals of the same, previously-valid script text may have
1819 * become invalid.
1821 namespace VersionFlags {
1822 static const uint32 MASK = 0x0FFF; /* see JSVersion in jspubtd.h */
1823 static const uint32 HAS_XML = 0x1000; /* flag induced by XML option */
1824 static const uint32 ANONFUNFIX = 0x2000; /* see jsapi.h comment on JSOPTION_ANONFUNFIX */
1827 static inline JSVersion
1828 VersionNumber(JSVersion version)
1830 return JSVersion(uint32(version) & VersionFlags::MASK);
1833 static inline bool
1834 VersionHasXML(JSVersion version)
1836 return !!(version & VersionFlags::HAS_XML);
1839 /* @warning This is a distinct condition from having the XML flag set. */
1840 static inline bool
1841 VersionShouldParseXML(JSVersion version)
1843 return VersionHasXML(version) || VersionNumber(version) >= JSVERSION_1_6;
1846 static inline bool
1847 VersionHasAnonFunFix(JSVersion version)
1849 return !!(version & VersionFlags::ANONFUNFIX);
1852 static inline void
1853 VersionSetXML(JSVersion *version, bool enable)
1855 if (enable)
1856 *version = JSVersion(uint32(*version) | VersionFlags::HAS_XML);
1857 else
1858 *version = JSVersion(uint32(*version) & ~VersionFlags::HAS_XML);
1861 static inline void
1862 VersionSetAnonFunFix(JSVersion *version, bool enable)
1864 if (enable)
1865 *version = JSVersion(uint32(*version) | VersionFlags::ANONFUNFIX);
1866 else
1867 *version = JSVersion(uint32(*version) & ~VersionFlags::ANONFUNFIX);
1870 static inline JSVersion
1871 VersionExtractFlags(JSVersion version)
1873 return JSVersion(uint32(version) & ~VersionFlags::MASK);
1876 static inline bool
1877 VersionHasFlags(JSVersion version)
1879 return !!VersionExtractFlags(version);
1882 static inline bool
1883 VersionIsKnown(JSVersion version)
1885 return VersionNumber(version) != JSVERSION_UNKNOWN;
1888 } /* namespace js */
1890 struct JSContext
1892 explicit JSContext(JSRuntime *rt);
1894 /* JSRuntime contextList linkage. */
1895 JSCList link;
1897 private:
1898 /* See JSContext::findVersion. */
1899 JSVersion defaultVersion; /* script compilation version */
1900 JSVersion versionOverride; /* supercedes defaultVersion when valid */
1901 bool hasVersionOverride;
1903 public:
1904 /* Per-context options. */
1905 uint32 options; /* see jsapi.h for JSOPTION_* */
1907 /* Locale specific callbacks for string conversion. */
1908 JSLocaleCallbacks *localeCallbacks;
1911 * cx->resolvingTable is non-null and non-empty if we are initializing
1912 * standard classes lazily, or if we are otherwise recursing indirectly
1913 * from js_LookupProperty through a Class.resolve hook. It is used to
1914 * limit runaway recursion (see jsapi.c and jsobj.c).
1916 JSDHashTable *resolvingTable;
1919 * True if generating an error, to prevent runaway recursion.
1920 * NB: generatingError packs with throwing below.
1922 JSPackedBool generatingError;
1924 /* Exception state -- the exception member is a GC root by definition. */
1925 JSBool throwing; /* is there a pending exception? */
1926 js::Value exception; /* most-recently-thrown exception */
1928 /* Limit pointer for checking native stack consumption during recursion. */
1929 jsuword stackLimit;
1931 /* Quota on the size of arenas used to compile and execute scripts. */
1932 size_t scriptStackQuota;
1934 /* Data shared by threads in an address space. */
1935 JSRuntime *const runtime;
1937 /* GC heap compartment. */
1938 JSCompartment *compartment;
1940 /* Currently executing frame and regs, set by stack operations. */
1941 JS_REQUIRES_STACK
1942 JSFrameRegs *regs;
1944 /* Current frame accessors. */
1946 JSStackFrame* fp() {
1947 JS_ASSERT(regs && regs->fp);
1948 return regs->fp;
1951 JSStackFrame* maybefp() {
1952 JS_ASSERT_IF(regs, regs->fp);
1953 return regs ? regs->fp : NULL;
1956 bool hasfp() {
1957 JS_ASSERT_IF(regs, regs->fp);
1958 return !!regs;
1961 public:
1962 friend class js::StackSpace;
1963 friend bool js::Interpret(JSContext *, JSStackFrame *, uintN, JSInterpMode);
1965 void resetCompartment();
1967 /* 'regs' must only be changed by calling this function. */
1968 void setCurrentRegs(JSFrameRegs *regs) {
1969 JS_ASSERT_IF(regs, regs->fp);
1970 this->regs = regs;
1971 if (!regs)
1972 resetCompartment();
1975 /* Temporary arena pool used while compiling and decompiling. */
1976 JSArenaPool tempPool;
1978 /* Temporary arena pool used while evaluate regular expressions. */
1979 JSArenaPool regExpPool;
1981 /* Top-level object and pointer to top stack frame's scope chain. */
1982 JSObject *globalObject;
1984 /* State for object and array toSource conversion. */
1985 JSSharpObjectMap sharpObjectMap;
1986 js::HashSet<JSObject *> busyArrays;
1988 /* Argument formatter support for JS_{Convert,Push}Arguments{,VA}. */
1989 JSArgumentFormatMap *argumentFormatMap;
1991 /* Last message string and log file for debugging. */
1992 char *lastMessage;
1993 #ifdef DEBUG
1994 void *logfp;
1995 jsbytecode *logPrevPc;
1996 #endif
1998 /* Per-context optional error reporter. */
1999 JSErrorReporter errorReporter;
2001 /* Branch callback. */
2002 JSOperationCallback operationCallback;
2004 /* Interpreter activation count. */
2005 uintN interpLevel;
2007 /* Client opaque pointers. */
2008 void *data;
2009 void *data2;
2011 private:
2012 /* Linked list of segments. See StackSegment. */
2013 js::StackSegment *currentSegment;
2015 public:
2016 void assertSegmentsInSync() const {
2017 #ifdef DEBUG
2018 if (regs) {
2019 JS_ASSERT(currentSegment->isActive());
2020 if (js::StackSegment *prev = currentSegment->getPreviousInContext())
2021 JS_ASSERT(!prev->isActive());
2022 } else {
2023 JS_ASSERT_IF(currentSegment, !currentSegment->isActive());
2025 #endif
2028 /* Return whether this context has an active segment. */
2029 bool hasActiveSegment() const {
2030 assertSegmentsInSync();
2031 return !!regs;
2034 /* Assuming there is an active segment, return it. */
2035 js::StackSegment *activeSegment() const {
2036 JS_ASSERT(hasActiveSegment());
2037 return currentSegment;
2040 /* Return the current segment, which may or may not be active. */
2041 js::StackSegment *getCurrentSegment() const {
2042 assertSegmentsInSync();
2043 return currentSegment;
2046 inline js::RegExpStatics *regExpStatics();
2048 /* Add the given segment to the list as the new active segment. */
2049 void pushSegmentAndFrame(js::StackSegment *newseg, JSFrameRegs &regs);
2051 /* Remove the active segment and make the next segment active. */
2052 void popSegmentAndFrame();
2054 /* Mark the top segment as suspended, without pushing a new one. */
2055 void saveActiveSegment();
2057 /* Undoes calls to suspendActiveSegment. */
2058 void restoreSegment();
2060 /* Get the frame whose prev() is fp, which may be in any segment. */
2061 inline JSStackFrame *computeNextFrame(JSStackFrame *fp);
2064 * Perform a linear search of all frames in all segments in the given context
2065 * for the given frame, returning the segment, if found, and null otherwise.
2067 js::StackSegment *containingSegment(const JSStackFrame *target);
2069 /* Search the call stack for the nearest frame with static level targetLevel. */
2070 JSStackFrame *findFrameAtLevel(uintN targetLevel) const {
2071 JSStackFrame *fp = regs->fp;
2072 while (true) {
2073 JS_ASSERT(fp && fp->isScriptFrame());
2074 if (fp->script()->staticLevel == targetLevel)
2075 break;
2076 fp = fp->prev();
2078 return fp;
2081 private:
2083 * The default script compilation version can be set iff there is no code running.
2084 * This typically occurs via the JSAPI right after a context is constructed.
2086 bool canSetDefaultVersion() const {
2087 return !regs && !hasVersionOverride;
2090 /* Force a version for future script compilation. */
2091 void overrideVersion(JSVersion newVersion) {
2092 JS_ASSERT(!canSetDefaultVersion());
2093 versionOverride = newVersion;
2094 hasVersionOverride = true;
2097 public:
2098 void clearVersionOverride() {
2099 hasVersionOverride = false;
2102 bool isVersionOverridden() const {
2103 return hasVersionOverride;
2106 /* Set the default script compilation version. */
2107 void setDefaultVersion(JSVersion version) {
2108 defaultVersion = version;
2112 * Set the default version if possible; otherwise, force the version.
2113 * Return whether an override occurred.
2115 bool maybeOverrideVersion(JSVersion newVersion) {
2116 if (canSetDefaultVersion()) {
2117 setDefaultVersion(newVersion);
2118 return false;
2120 overrideVersion(newVersion);
2121 return true;
2125 * Return:
2126 * - The override version, if there is an override version.
2127 * - The newest scripted frame's version, if there is such a frame.
2128 * - The default verion.
2130 * @note If this ever shows up in a profile, just add caching!
2132 JSVersion findVersion() const {
2133 if (hasVersionOverride)
2134 return versionOverride;
2136 if (regs) {
2137 /* There may be a scripted function somewhere on the stack! */
2138 JSStackFrame *fp = regs->fp;
2139 while (fp && !fp->isScriptFrame())
2140 fp = fp->prev();
2141 if (fp)
2142 return fp->script()->getVersion();
2145 return defaultVersion;
2148 void optionFlagsToVersion(JSVersion *version) const {
2149 js::VersionSetXML(version, js::OptionsHasXML(options));
2150 js::VersionSetAnonFunFix(version, js::OptionsHasAnonFunFix(options));
2153 void checkOptionVersionSync() const {
2154 #ifdef DEBUG
2155 JSVersion version = findVersion();
2156 JS_ASSERT(js::VersionHasXML(version) == js::OptionsHasXML(options));
2157 JS_ASSERT(js::VersionHasAnonFunFix(version) == js::OptionsHasAnonFunFix(options));
2158 #endif
2161 /* Note: may override the version. */
2162 void syncOptionsToVersion() {
2163 JSVersion version = findVersion();
2164 if (js::OptionsHasXML(options) == js::VersionHasXML(version) &&
2165 js::OptionsHasAnonFunFix(options) == js::VersionHasAnonFunFix(version))
2166 return;
2167 js::VersionSetXML(&version, js::OptionsHasXML(options));
2168 js::VersionSetAnonFunFix(&version, js::OptionsHasAnonFunFix(options));
2169 maybeOverrideVersion(version);
2172 #ifdef JS_THREADSAFE
2173 JSThread *thread;
2174 unsigned outstandingRequests;/* number of JS_BeginRequest calls
2175 without the corresponding
2176 JS_EndRequest. */
2177 JSCList threadLinks; /* JSThread contextList linkage */
2179 #define CX_FROM_THREAD_LINKS(tl) \
2180 ((JSContext *)((char *)(tl) - offsetof(JSContext, threadLinks)))
2181 #endif
2183 /* Stack of thread-stack-allocated GC roots. */
2184 js::AutoGCRooter *autoGCRooters;
2186 /* Debug hooks associated with the current context. */
2187 const JSDebugHooks *debugHooks;
2189 /* Security callbacks that override any defined on the runtime. */
2190 JSSecurityCallbacks *securityCallbacks;
2192 /* Stored here to avoid passing it around as a parameter. */
2193 uintN resolveFlags;
2195 /* Random number generator state, used by jsmath.cpp. */
2196 int64 rngSeed;
2198 /* Location to stash the iteration value between JSOP_MOREITER and JSOP_FOR*. */
2199 js::Value iterValue;
2201 #ifdef JS_TRACER
2203 * State for the current tree execution. bailExit is valid if the tree has
2204 * called back into native code via a _FAIL builtin and has not yet bailed,
2205 * else garbage (NULL in debug builds).
2207 js::TracerState *tracerState;
2208 js::VMSideExit *bailExit;
2211 * True if traces may be executed. Invariant: The value of traceJitenabled
2212 * is always equal to the expression in updateJITEnabled below.
2214 * This flag and the fields accessed by updateJITEnabled are written only
2215 * in runtime->gcLock, to avoid race conditions that would leave the wrong
2216 * value in traceJitEnabled. (But the interpreter reads this without
2217 * locking. That can race against another thread setting debug hooks, but
2218 * we always read cx->debugHooks without locking anyway.)
2220 bool traceJitEnabled;
2221 #endif
2223 #ifdef JS_METHODJIT
2224 bool methodJitEnabled;
2225 bool profilingEnabled;
2226 #endif
2228 /* Caller must be holding runtime->gcLock. */
2229 void updateJITEnabled();
2231 #ifdef MOZ_TRACE_JSCALLS
2232 /* Function entry/exit debugging callback. */
2233 JSFunctionCallback functionCallback;
2235 void doFunctionCallback(const JSFunction *fun,
2236 const JSScript *scr,
2237 int entering) const
2239 if (functionCallback)
2240 functionCallback(fun, scr, this, entering);
2242 #endif
2244 DSTOffsetCache dstOffsetCache;
2246 /* List of currently active non-escaping enumerators (for-in). */
2247 JSObject *enumerators;
2249 private:
2251 * To go from a live generator frame (on the stack) to its generator object
2252 * (see comment js_FloatingFrameIfGenerator), we maintain a stack of active
2253 * generators, pushing and popping when entering and leaving generator
2254 * frames, respectively.
2256 js::Vector<JSGenerator *, 2, js::SystemAllocPolicy> genStack;
2258 public:
2259 #ifdef JS_METHODJIT
2260 inline js::mjit::JaegerCompartment *jaegerCompartment();
2261 #endif
2263 /* Return the generator object for the given generator frame. */
2264 JSGenerator *generatorFor(JSStackFrame *fp) const;
2266 /* Early OOM-check. */
2267 inline bool ensureGeneratorStackSpace();
2269 bool enterGenerator(JSGenerator *gen) {
2270 return genStack.append(gen);
2273 void leaveGenerator(JSGenerator *gen) {
2274 JS_ASSERT(genStack.back() == gen);
2275 genStack.popBack();
2278 #ifdef JS_THREADSAFE
2280 * When non-null JSContext::free delegates the job to the background
2281 * thread.
2283 js::GCHelperThread *gcBackgroundFree;
2284 #endif
2286 inline void* malloc(size_t bytes) {
2287 return runtime->malloc(bytes, this);
2290 inline void* mallocNoReport(size_t bytes) {
2291 JS_ASSERT(bytes != 0);
2292 return runtime->malloc(bytes, NULL);
2295 inline void* calloc(size_t bytes) {
2296 JS_ASSERT(bytes != 0);
2297 return runtime->calloc(bytes, this);
2300 inline void* realloc(void* p, size_t bytes) {
2301 return runtime->realloc(p, bytes, this);
2304 inline void free(void* p) {
2305 #ifdef JS_THREADSAFE
2306 if (gcBackgroundFree) {
2307 gcBackgroundFree->freeLater(p);
2308 return;
2310 #endif
2311 runtime->free(p);
2315 * In the common case that we'd like to allocate the memory for an object
2316 * with cx->malloc/free, we cannot use overloaded C++ operators (no
2317 * placement delete). Factor the common workaround into one place.
2319 #define CREATE_BODY(parms) \
2320 void *memory = this->malloc(sizeof(T)); \
2321 if (!memory) \
2322 return NULL; \
2323 return new(memory) T parms;
2325 template <class T>
2326 JS_ALWAYS_INLINE T *create() {
2327 CREATE_BODY(())
2330 template <class T, class P1>
2331 JS_ALWAYS_INLINE T *create(const P1 &p1) {
2332 CREATE_BODY((p1))
2335 template <class T, class P1, class P2>
2336 JS_ALWAYS_INLINE T *create(const P1 &p1, const P2 &p2) {
2337 CREATE_BODY((p1, p2))
2340 template <class T, class P1, class P2, class P3>
2341 JS_ALWAYS_INLINE T *create(const P1 &p1, const P2 &p2, const P3 &p3) {
2342 CREATE_BODY((p1, p2, p3))
2344 #undef CREATE_BODY
2346 template <class T>
2347 JS_ALWAYS_INLINE void destroy(T *p) {
2348 p->~T();
2349 this->free(p);
2352 void purge();
2354 js::StackSpace &stack() const {
2355 return JS_THREAD_DATA(this)->stackSpace;
2358 #ifdef DEBUG
2359 void assertValidStackDepth(uintN depth) {
2360 JS_ASSERT(0 <= regs->sp - regs->fp->base());
2361 JS_ASSERT(depth <= uintptr_t(regs->sp - regs->fp->base()));
2363 #else
2364 void assertValidStackDepth(uintN /*depth*/) {}
2365 #endif
2367 private:
2370 * The allocation code calls the function to indicate either OOM failure
2371 * when p is null or that a memory pressure counter has reached some
2372 * threshold when p is not null. The function takes the pointer and not
2373 * a boolean flag to minimize the amount of code in its inlined callers.
2375 JS_FRIEND_API(void) checkMallocGCPressure(void *p);
2377 /* To silence MSVC warning about using 'this' in a member initializer. */
2378 JSContext *thisInInitializer() { return this; }
2379 }; /* struct JSContext */
2381 #ifdef JS_THREADSAFE
2382 # define JS_THREAD_ID(cx) ((cx)->thread ? (cx)->thread->id : 0)
2383 #endif
2385 #if defined JS_THREADSAFE && defined DEBUG
2387 namespace js {
2389 class AutoCheckRequestDepth {
2390 JSContext *cx;
2391 public:
2392 AutoCheckRequestDepth(JSContext *cx) : cx(cx) { cx->thread->checkRequestDepth++; }
2394 ~AutoCheckRequestDepth() {
2395 JS_ASSERT(cx->thread->checkRequestDepth != 0);
2396 cx->thread->checkRequestDepth--;
2402 # define CHECK_REQUEST(cx) \
2403 JS_ASSERT((cx)->thread); \
2404 JS_ASSERT((cx)->thread->data.requestDepth || (cx)->thread == (cx)->runtime->gcThread); \
2405 AutoCheckRequestDepth _autoCheckRequestDepth(cx);
2407 #else
2408 # define CHECK_REQUEST(cx) ((void) 0)
2409 # define CHECK_REQUEST_THREAD(cx) ((void) 0)
2410 #endif
2412 static inline uintN
2413 FramePCOffset(JSContext *cx, JSStackFrame* fp)
2415 jsbytecode *pc = fp->hasImacropc() ? fp->imacropc() : fp->pc(cx);
2416 return uintN(pc - fp->script()->code);
2419 static inline JSAtom **
2420 FrameAtomBase(JSContext *cx, JSStackFrame *fp)
2422 return fp->hasImacropc()
2423 ? COMMON_ATOMS_START(&cx->runtime->atomState)
2424 : fp->script()->atomMap.vector;
2427 namespace js {
2429 class AutoGCRooter {
2430 public:
2431 AutoGCRooter(JSContext *cx, ptrdiff_t tag)
2432 : down(cx->autoGCRooters), tag(tag), context(cx)
2434 JS_ASSERT(this != cx->autoGCRooters);
2435 CHECK_REQUEST(cx);
2436 cx->autoGCRooters = this;
2439 ~AutoGCRooter() {
2440 JS_ASSERT(this == context->autoGCRooters);
2441 CHECK_REQUEST(context);
2442 context->autoGCRooters = down;
2445 /* Implemented in jsgc.cpp. */
2446 inline void trace(JSTracer *trc);
2448 #ifdef __GNUC__
2449 # pragma GCC visibility push(default)
2450 #endif
2451 friend void MarkContext(JSTracer *trc, JSContext *acx);
2452 friend void MarkRuntime(JSTracer *trc);
2453 #ifdef __GNUC__
2454 # pragma GCC visibility pop
2455 #endif
2457 protected:
2458 AutoGCRooter * const down;
2461 * Discriminates actual subclass of this being used. If non-negative, the
2462 * subclass roots an array of values of the length stored in this field.
2463 * If negative, meaning is indicated by the corresponding value in the enum
2464 * below. Any other negative value indicates some deeper problem such as
2465 * memory corruption.
2467 ptrdiff_t tag;
2469 JSContext * const context;
2471 enum {
2472 JSVAL = -1, /* js::AutoValueRooter */
2473 SHAPE = -2, /* js::AutoShapeRooter */
2474 PARSER = -3, /* js::Parser */
2475 SCRIPT = -4, /* js::AutoScriptRooter */
2476 ENUMERATOR = -5, /* js::AutoEnumStateRooter */
2477 IDARRAY = -6, /* js::AutoIdArray */
2478 DESCRIPTORS = -7, /* js::AutoPropDescArrayRooter */
2479 NAMESPACES = -8, /* js::AutoNamespaceArray */
2480 XML = -9, /* js::AutoXMLRooter */
2481 OBJECT = -10, /* js::AutoObjectRooter */
2482 ID = -11, /* js::AutoIdRooter */
2483 VALVECTOR = -12, /* js::AutoValueVector */
2484 DESCRIPTOR = -13, /* js::AutoPropertyDescriptorRooter */
2485 STRING = -14, /* js::AutoStringRooter */
2486 IDVECTOR = -15 /* js::AutoIdVector */
2489 private:
2490 /* No copy or assignment semantics. */
2491 AutoGCRooter(AutoGCRooter &ida);
2492 void operator=(AutoGCRooter &ida);
2495 /* FIXME(bug 332648): Move this into a public header. */
2496 class AutoValueRooter : private AutoGCRooter
2498 public:
2499 explicit AutoValueRooter(JSContext *cx
2500 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2501 : AutoGCRooter(cx, JSVAL), val(js::NullValue())
2503 JS_GUARD_OBJECT_NOTIFIER_INIT;
2506 AutoValueRooter(JSContext *cx, const Value &v
2507 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2508 : AutoGCRooter(cx, JSVAL), val(v)
2510 JS_GUARD_OBJECT_NOTIFIER_INIT;
2513 AutoValueRooter(JSContext *cx, jsval v
2514 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2515 : AutoGCRooter(cx, JSVAL), val(js::Valueify(v))
2517 JS_GUARD_OBJECT_NOTIFIER_INIT;
2521 * If you are looking for Object* overloads, use AutoObjectRooter instead;
2522 * rooting Object*s as a js::Value requires discerning whether or not it is
2523 * a function object. Also, AutoObjectRooter is smaller.
2526 void set(Value v) {
2527 JS_ASSERT(tag == JSVAL);
2528 val = v;
2531 void set(jsval v) {
2532 JS_ASSERT(tag == JSVAL);
2533 val = js::Valueify(v);
2536 const Value &value() const {
2537 JS_ASSERT(tag == JSVAL);
2538 return val;
2541 Value *addr() {
2542 JS_ASSERT(tag == JSVAL);
2543 return &val;
2546 const jsval &jsval_value() const {
2547 JS_ASSERT(tag == JSVAL);
2548 return Jsvalify(val);
2551 jsval *jsval_addr() {
2552 JS_ASSERT(tag == JSVAL);
2553 return Jsvalify(&val);
2556 friend void AutoGCRooter::trace(JSTracer *trc);
2557 friend void MarkRuntime(JSTracer *trc);
2559 private:
2560 Value val;
2561 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2564 class AutoObjectRooter : private AutoGCRooter {
2565 public:
2566 AutoObjectRooter(JSContext *cx, JSObject *obj = NULL
2567 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2568 : AutoGCRooter(cx, OBJECT), obj(obj)
2570 JS_GUARD_OBJECT_NOTIFIER_INIT;
2573 void setObject(JSObject *obj) {
2574 this->obj = obj;
2577 JSObject * object() const {
2578 return obj;
2581 JSObject ** addr() {
2582 return &obj;
2585 friend void AutoGCRooter::trace(JSTracer *trc);
2586 friend void MarkRuntime(JSTracer *trc);
2588 private:
2589 JSObject *obj;
2590 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2593 class AutoStringRooter : private AutoGCRooter {
2594 public:
2595 AutoStringRooter(JSContext *cx, JSString *str = NULL
2596 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2597 : AutoGCRooter(cx, STRING), str(str)
2599 JS_GUARD_OBJECT_NOTIFIER_INIT;
2602 void setString(JSString *str) {
2603 this->str = str;
2606 JSString * string() const {
2607 return str;
2610 JSString ** addr() {
2611 return &str;
2614 friend void AutoGCRooter::trace(JSTracer *trc);
2616 private:
2617 JSString *str;
2618 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2621 class AutoArrayRooter : private AutoGCRooter {
2622 public:
2623 AutoArrayRooter(JSContext *cx, size_t len, Value *vec
2624 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2625 : AutoGCRooter(cx, len), array(vec)
2627 JS_GUARD_OBJECT_NOTIFIER_INIT;
2628 JS_ASSERT(tag >= 0);
2631 AutoArrayRooter(JSContext *cx, size_t len, jsval *vec
2632 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2633 : AutoGCRooter(cx, len), array(Valueify(vec))
2635 JS_GUARD_OBJECT_NOTIFIER_INIT;
2636 JS_ASSERT(tag >= 0);
2639 void changeLength(size_t newLength) {
2640 tag = ptrdiff_t(newLength);
2641 JS_ASSERT(tag >= 0);
2644 void changeArray(Value *newArray, size_t newLength) {
2645 changeLength(newLength);
2646 array = newArray;
2649 Value *array;
2651 friend void AutoGCRooter::trace(JSTracer *trc);
2653 private:
2654 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2657 class AutoShapeRooter : private AutoGCRooter {
2658 public:
2659 AutoShapeRooter(JSContext *cx, const js::Shape *shape
2660 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2661 : AutoGCRooter(cx, SHAPE), shape(shape)
2663 JS_GUARD_OBJECT_NOTIFIER_INIT;
2666 friend void AutoGCRooter::trace(JSTracer *trc);
2667 friend void MarkRuntime(JSTracer *trc);
2669 private:
2670 const js::Shape * const shape;
2671 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2674 class AutoScriptRooter : private AutoGCRooter {
2675 public:
2676 AutoScriptRooter(JSContext *cx, JSScript *script
2677 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2678 : AutoGCRooter(cx, SCRIPT), script(script)
2680 JS_GUARD_OBJECT_NOTIFIER_INIT;
2683 void setScript(JSScript *script) {
2684 this->script = script;
2687 friend void AutoGCRooter::trace(JSTracer *trc);
2689 private:
2690 JSScript *script;
2691 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2694 class AutoIdRooter : private AutoGCRooter
2696 public:
2697 explicit AutoIdRooter(JSContext *cx, jsid id = INT_TO_JSID(0)
2698 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2699 : AutoGCRooter(cx, ID), id_(id)
2701 JS_GUARD_OBJECT_NOTIFIER_INIT;
2704 jsid id() {
2705 return id_;
2708 jsid * addr() {
2709 return &id_;
2712 friend void AutoGCRooter::trace(JSTracer *trc);
2713 friend void MarkRuntime(JSTracer *trc);
2715 private:
2716 jsid id_;
2717 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2720 class AutoIdArray : private AutoGCRooter {
2721 public:
2722 AutoIdArray(JSContext *cx, JSIdArray *ida JS_GUARD_OBJECT_NOTIFIER_PARAM)
2723 : AutoGCRooter(cx, IDARRAY), idArray(ida)
2725 JS_GUARD_OBJECT_NOTIFIER_INIT;
2727 ~AutoIdArray() {
2728 if (idArray)
2729 JS_DestroyIdArray(context, idArray);
2731 bool operator!() {
2732 return idArray == NULL;
2734 jsid operator[](size_t i) const {
2735 JS_ASSERT(idArray);
2736 JS_ASSERT(i < size_t(idArray->length));
2737 return idArray->vector[i];
2739 size_t length() const {
2740 return idArray->length;
2743 friend void AutoGCRooter::trace(JSTracer *trc);
2745 JSIdArray *steal() {
2746 JSIdArray *copy = idArray;
2747 idArray = NULL;
2748 return copy;
2751 protected:
2752 inline void trace(JSTracer *trc);
2754 private:
2755 JSIdArray * idArray;
2756 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2758 /* No copy or assignment semantics. */
2759 AutoIdArray(AutoIdArray &ida);
2760 void operator=(AutoIdArray &ida);
2763 /* The auto-root for enumeration object and its state. */
2764 class AutoEnumStateRooter : private AutoGCRooter
2766 public:
2767 AutoEnumStateRooter(JSContext *cx, JSObject *obj
2768 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2769 : AutoGCRooter(cx, ENUMERATOR), obj(obj), stateValue()
2771 JS_GUARD_OBJECT_NOTIFIER_INIT;
2772 JS_ASSERT(obj);
2775 ~AutoEnumStateRooter() {
2776 if (!stateValue.isNull()) {
2777 #ifdef DEBUG
2778 JSBool ok =
2779 #endif
2780 obj->enumerate(context, JSENUMERATE_DESTROY, &stateValue, 0);
2781 JS_ASSERT(ok);
2785 friend void AutoGCRooter::trace(JSTracer *trc);
2787 const Value &state() const { return stateValue; }
2788 Value *addr() { return &stateValue; }
2790 protected:
2791 void trace(JSTracer *trc);
2793 JSObject * const obj;
2795 private:
2796 Value stateValue;
2797 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2800 #ifdef JS_HAS_XML_SUPPORT
2801 class AutoXMLRooter : private AutoGCRooter {
2802 public:
2803 AutoXMLRooter(JSContext *cx, JSXML *xml)
2804 : AutoGCRooter(cx, XML), xml(xml)
2806 JS_ASSERT(xml);
2809 friend void AutoGCRooter::trace(JSTracer *trc);
2810 friend void MarkRuntime(JSTracer *trc);
2812 private:
2813 JSXML * const xml;
2815 #endif /* JS_HAS_XML_SUPPORT */
2817 class AutoLockGC {
2818 private:
2819 JSRuntime *rt;
2820 public:
2821 explicit AutoLockGC(JSRuntime *rt) : rt(rt) { JS_LOCK_GC(rt); }
2822 ~AutoLockGC() { JS_UNLOCK_GC(rt); }
2825 class AutoUnlockGC {
2826 private:
2827 JSRuntime *rt;
2828 public:
2829 explicit AutoUnlockGC(JSRuntime *rt) : rt(rt) { JS_UNLOCK_GC(rt); }
2830 ~AutoUnlockGC() { JS_LOCK_GC(rt); }
2833 class AutoLockDefaultCompartment {
2834 private:
2835 JSContext *cx;
2836 public:
2837 AutoLockDefaultCompartment(JSContext *cx) : cx(cx) {
2838 JS_LOCK(cx, &cx->runtime->atomState.lock);
2839 #ifdef JS_THREADSAFE
2840 cx->runtime->defaultCompartmentIsLocked = true;
2841 #endif
2843 ~AutoLockDefaultCompartment() {
2844 JS_UNLOCK(cx, &cx->runtime->atomState.lock);
2845 #ifdef JS_THREADSAFE
2846 cx->runtime->defaultCompartmentIsLocked = false;
2847 #endif
2851 class AutoUnlockDefaultCompartment {
2852 private:
2853 JSContext *cx;
2854 public:
2855 AutoUnlockDefaultCompartment(JSContext *cx) : cx(cx) {
2856 JS_UNLOCK(cx, &cx->runtime->atomState.lock);
2857 #ifdef JS_THREADSAFE
2858 cx->runtime->defaultCompartmentIsLocked = false;
2859 #endif
2861 ~AutoUnlockDefaultCompartment() {
2862 JS_LOCK(cx, &cx->runtime->atomState.lock);
2863 #ifdef JS_THREADSAFE
2864 cx->runtime->defaultCompartmentIsLocked = true;
2865 #endif
2869 class AutoKeepAtoms {
2870 JSRuntime *rt;
2871 public:
2872 explicit AutoKeepAtoms(JSRuntime *rt) : rt(rt) { JS_KEEP_ATOMS(rt); }
2873 ~AutoKeepAtoms() { JS_UNKEEP_ATOMS(rt); }
2876 class AutoArenaAllocator {
2877 JSArenaPool *pool;
2878 void *mark;
2879 public:
2880 explicit AutoArenaAllocator(JSArenaPool *pool) : pool(pool) { mark = JS_ARENA_MARK(pool); }
2881 ~AutoArenaAllocator() { JS_ARENA_RELEASE(pool, mark); }
2883 template <typename T>
2884 T *alloc(size_t elems) {
2885 void *ptr;
2886 JS_ARENA_ALLOCATE(ptr, pool, elems * sizeof(T));
2887 return static_cast<T *>(ptr);
2891 class AutoReleasePtr {
2892 JSContext *cx;
2893 void *ptr;
2894 AutoReleasePtr operator=(const AutoReleasePtr &other);
2895 public:
2896 explicit AutoReleasePtr(JSContext *cx, void *ptr) : cx(cx), ptr(ptr) {}
2897 ~AutoReleasePtr() { cx->free(ptr); }
2901 * FIXME: bug 602774: cleaner API for AutoReleaseNullablePtr
2903 class AutoReleaseNullablePtr {
2904 JSContext *cx;
2905 void *ptr;
2906 AutoReleaseNullablePtr operator=(const AutoReleaseNullablePtr &other);
2907 public:
2908 explicit AutoReleaseNullablePtr(JSContext *cx, void *ptr) : cx(cx), ptr(ptr) {}
2909 void reset(void *ptr2) {
2910 if (ptr)
2911 cx->free(ptr);
2912 ptr = ptr2;
2914 ~AutoReleaseNullablePtr() { if (ptr) cx->free(ptr); }
2917 class AutoLocalNameArray {
2918 public:
2919 explicit AutoLocalNameArray(JSContext *cx, JSFunction *fun
2920 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2921 : context(cx),
2922 mark(JS_ARENA_MARK(&cx->tempPool)),
2923 names(fun->getLocalNameArray(cx, &cx->tempPool)),
2924 count(fun->countLocalNames())
2926 JS_GUARD_OBJECT_NOTIFIER_INIT;
2929 ~AutoLocalNameArray() {
2930 JS_ARENA_RELEASE(&context->tempPool, mark);
2933 operator bool() const { return !!names; }
2935 uint32 length() const { return count; }
2937 const jsuword &operator [](unsigned i) const { return names[i]; }
2939 private:
2940 JSContext *context;
2941 void *mark;
2942 jsuword *names;
2943 uint32 count;
2945 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2948 } /* namespace js */
2950 class JSAutoResolveFlags
2952 public:
2953 JSAutoResolveFlags(JSContext *cx, uintN flags
2954 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2955 : mContext(cx), mSaved(cx->resolveFlags)
2957 JS_GUARD_OBJECT_NOTIFIER_INIT;
2958 cx->resolveFlags = flags;
2961 ~JSAutoResolveFlags() { mContext->resolveFlags = mSaved; }
2963 private:
2964 JSContext *mContext;
2965 uintN mSaved;
2966 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2969 extern JSThreadData *
2970 js_CurrentThreadData(JSRuntime *rt);
2972 extern JSBool
2973 js_InitThreads(JSRuntime *rt);
2975 extern void
2976 js_FinishThreads(JSRuntime *rt);
2978 extern void
2979 js_PurgeThreads(JSContext *cx);
2981 namespace js {
2983 #ifdef JS_THREADSAFE
2985 /* Iterator over JSThreadData from all JSThread instances. */
2986 class ThreadDataIter : public JSThread::Map::Range
2988 public:
2989 ThreadDataIter(JSRuntime *rt) : JSThread::Map::Range(rt->threads.all()) {}
2991 JSThreadData *threadData() const {
2992 return &front().value->data;
2996 #else /* !JS_THREADSAFE */
2998 class ThreadDataIter
3000 JSRuntime *runtime;
3001 bool done;
3002 public:
3003 ThreadDataIter(JSRuntime *rt) : runtime(rt), done(false) {}
3005 bool empty() const {
3006 return done;
3009 void popFront() {
3010 JS_ASSERT(!done);
3011 done = true;
3014 JSThreadData *threadData() const {
3015 JS_ASSERT(!done);
3016 return &runtime->threadData;
3020 #endif /* !JS_THREADSAFE */
3022 } /* namespace js */
3025 * Create and destroy functions for JSContext, which is manually allocated
3026 * and exclusively owned.
3028 extern JSContext *
3029 js_NewContext(JSRuntime *rt, size_t stackChunkSize);
3031 extern void
3032 js_DestroyContext(JSContext *cx, JSDestroyContextMode mode);
3034 static JS_INLINE JSContext *
3035 js_ContextFromLinkField(JSCList *link)
3037 JS_ASSERT(link);
3038 return (JSContext *) ((uint8 *) link - offsetof(JSContext, link));
3042 * If unlocked, acquire and release rt->gcLock around *iterp update; otherwise
3043 * the caller must be holding rt->gcLock.
3045 extern JSContext *
3046 js_ContextIterator(JSRuntime *rt, JSBool unlocked, JSContext **iterp);
3049 * Iterate through contexts with active requests. The caller must be holding
3050 * rt->gcLock in case of a thread-safe build, or otherwise guarantee that the
3051 * context list is not alternated asynchroniously.
3053 extern JS_FRIEND_API(JSContext *)
3054 js_NextActiveContext(JSRuntime *, JSContext *);
3057 * Class.resolve and watchpoint recursion damping machinery.
3059 extern JSBool
3060 js_StartResolving(JSContext *cx, JSResolvingKey *key, uint32 flag,
3061 JSResolvingEntry **entryp);
3063 extern void
3064 js_StopResolving(JSContext *cx, JSResolvingKey *key, uint32 flag,
3065 JSResolvingEntry *entry, uint32 generation);
3068 * Report an exception, which is currently realized as a printf-style format
3069 * string and its arguments.
3071 typedef enum JSErrNum {
3072 #define MSG_DEF(name, number, count, exception, format) \
3073 name = number,
3074 #include "js.msg"
3075 #undef MSG_DEF
3076 JSErr_Limit
3077 } JSErrNum;
3079 extern JS_FRIEND_API(const JSErrorFormatString *)
3080 js_GetErrorMessage(void *userRef, const char *locale, const uintN errorNumber);
3082 #ifdef va_start
3083 extern JSBool
3084 js_ReportErrorVA(JSContext *cx, uintN flags, const char *format, va_list ap);
3086 extern JSBool
3087 js_ReportErrorNumberVA(JSContext *cx, uintN flags, JSErrorCallback callback,
3088 void *userRef, const uintN errorNumber,
3089 JSBool charArgs, va_list ap);
3091 extern JSBool
3092 js_ExpandErrorArguments(JSContext *cx, JSErrorCallback callback,
3093 void *userRef, const uintN errorNumber,
3094 char **message, JSErrorReport *reportp,
3095 bool charArgs, va_list ap);
3096 #endif
3098 extern void
3099 js_ReportOutOfMemory(JSContext *cx);
3102 * Report that cx->scriptStackQuota is exhausted.
3104 void
3105 js_ReportOutOfScriptQuota(JSContext *cx);
3107 extern JS_FRIEND_API(void)
3108 js_ReportOverRecursed(JSContext *cx);
3110 extern JS_FRIEND_API(void)
3111 js_ReportAllocationOverflow(JSContext *cx);
3113 #define JS_CHECK_RECURSION(cx, onerror) \
3114 JS_BEGIN_MACRO \
3115 int stackDummy_; \
3117 if (!JS_CHECK_STACK_SIZE(cx->stackLimit, &stackDummy_)) { \
3118 js_ReportOverRecursed(cx); \
3119 onerror; \
3121 JS_END_MACRO
3124 * Report an exception using a previously composed JSErrorReport.
3125 * XXXbe remove from "friend" API
3127 extern JS_FRIEND_API(void)
3128 js_ReportErrorAgain(JSContext *cx, const char *message, JSErrorReport *report);
3130 extern void
3131 js_ReportIsNotDefined(JSContext *cx, const char *name);
3134 * Report an attempt to access the property of a null or undefined value (v).
3136 extern JSBool
3137 js_ReportIsNullOrUndefined(JSContext *cx, intN spindex, const js::Value &v,
3138 JSString *fallback);
3140 extern void
3141 js_ReportMissingArg(JSContext *cx, const js::Value &v, uintN arg);
3144 * Report error using js_DecompileValueGenerator(cx, spindex, v, fallback) as
3145 * the first argument for the error message. If the error message has less
3146 * then 3 arguments, use null for arg1 or arg2.
3148 extern JSBool
3149 js_ReportValueErrorFlags(JSContext *cx, uintN flags, const uintN errorNumber,
3150 intN spindex, const js::Value &v, JSString *fallback,
3151 const char *arg1, const char *arg2);
3153 #define js_ReportValueError(cx,errorNumber,spindex,v,fallback) \
3154 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3155 spindex, v, fallback, NULL, NULL))
3157 #define js_ReportValueError2(cx,errorNumber,spindex,v,fallback,arg1) \
3158 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3159 spindex, v, fallback, arg1, NULL))
3161 #define js_ReportValueError3(cx,errorNumber,spindex,v,fallback,arg1,arg2) \
3162 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3163 spindex, v, fallback, arg1, arg2))
3165 extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit];
3167 #ifdef JS_THREADSAFE
3168 # define JS_ASSERT_REQUEST_DEPTH(cx) (JS_ASSERT((cx)->thread), \
3169 JS_ASSERT((cx)->thread->data.requestDepth >= 1))
3170 #else
3171 # define JS_ASSERT_REQUEST_DEPTH(cx) ((void) 0)
3172 #endif
3175 * If the operation callback flag was set, call the operation callback.
3176 * This macro can run the full GC. Return true if it is OK to continue and
3177 * false otherwise.
3179 #define JS_CHECK_OPERATION_LIMIT(cx) \
3180 (JS_ASSERT_REQUEST_DEPTH(cx), \
3181 (!JS_THREAD_DATA(cx)->interruptFlags || js_InvokeOperationCallback(cx)))
3183 JS_ALWAYS_INLINE void
3184 JSThreadData::triggerOperationCallback(JSRuntime *rt)
3187 * Use JS_ATOMIC_SET and JS_ATOMIC_INCREMENT in the hope that it ensures
3188 * the write will become immediately visible to other processors polling
3189 * the flag. Note that we only care about visibility here, not read/write
3190 * ordering: this field can only be written with the GC lock held.
3192 if (interruptFlags)
3193 return;
3194 JS_ATOMIC_SET(&interruptFlags, 1);
3196 #ifdef JS_THREADSAFE
3197 /* rt->interruptCounter does not reflect suspended threads. */
3198 if (requestDepth != 0)
3199 JS_ATOMIC_INCREMENT(&rt->interruptCounter);
3200 #endif
3204 * Invoke the operation callback and return false if the current execution
3205 * is to be terminated.
3207 extern JSBool
3208 js_InvokeOperationCallback(JSContext *cx);
3210 extern JSBool
3211 js_HandleExecutionInterrupt(JSContext *cx);
3213 namespace js {
3215 /* These must be called with GC lock taken. */
3217 JS_FRIEND_API(void)
3218 TriggerOperationCallback(JSContext *cx);
3220 void
3221 TriggerAllOperationCallbacks(JSRuntime *rt);
3223 } /* namespace js */
3225 extern JSStackFrame *
3226 js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp);
3228 extern jsbytecode*
3229 js_GetCurrentBytecodePC(JSContext* cx);
3231 extern bool
3232 js_CurrentPCIsInImacro(JSContext *cx);
3234 namespace js {
3236 #ifdef JS_TRACER
3238 * Reconstruct the JS stack and clear cx->tracecx. We must be currently in a
3239 * _FAIL builtin from trace on cx or another context on the same thread. The
3240 * machine code for the trace remains on the C stack when js_DeepBail returns.
3242 * Implemented in jstracer.cpp.
3244 JS_FORCES_STACK JS_FRIEND_API(void)
3245 DeepBail(JSContext *cx);
3246 #endif
3248 static JS_FORCES_STACK JS_INLINE void
3249 LeaveTrace(JSContext *cx)
3251 #ifdef JS_TRACER
3252 if (JS_ON_TRACE(cx))
3253 DeepBail(cx);
3254 #endif
3257 static JS_INLINE void
3258 LeaveTraceIfGlobalObject(JSContext *cx, JSObject *obj)
3260 if (!obj->parent)
3261 LeaveTrace(cx);
3264 static JS_INLINE JSBool
3265 CanLeaveTrace(JSContext *cx)
3267 JS_ASSERT(JS_ON_TRACE(cx));
3268 #ifdef JS_TRACER
3269 return cx->bailExit != NULL;
3270 #else
3271 return JS_FALSE;
3272 #endif
3275 extern void
3276 SetPendingException(JSContext *cx, const Value &v);
3278 class RegExpStatics;
3280 } /* namespace js */
3283 * Get the current frame, first lazily instantiating stack frames if needed.
3284 * (Do not access cx->fp() directly except in JS_REQUIRES_STACK code.)
3286 * Defined in jstracer.cpp if JS_TRACER is defined.
3288 static JS_FORCES_STACK JS_INLINE JSStackFrame *
3289 js_GetTopStackFrame(JSContext *cx)
3291 js::LeaveTrace(cx);
3292 return cx->maybefp();
3295 static JS_INLINE JSBool
3296 js_IsPropertyCacheDisabled(JSContext *cx)
3298 return cx->runtime->shapeGen >= js::SHAPE_OVERFLOW_BIT;
3301 static JS_INLINE uint32
3302 js_RegenerateShapeForGC(JSContext *cx)
3304 JS_ASSERT(cx->runtime->gcRunning);
3305 JS_ASSERT(cx->runtime->gcRegenShapes);
3308 * Under the GC, compared with js_GenerateShape, we don't need to use
3309 * atomic increments but we still must make sure that after an overflow
3310 * the shape stays such.
3312 uint32 shape = cx->runtime->shapeGen;
3313 shape = (shape + 1) | (shape & js::SHAPE_OVERFLOW_BIT);
3314 cx->runtime->shapeGen = shape;
3315 return shape;
3318 namespace js {
3320 inline void *
3321 ContextAllocPolicy::malloc(size_t bytes)
3323 return cx->malloc(bytes);
3326 inline void
3327 ContextAllocPolicy::free(void *p)
3329 cx->free(p);
3332 inline void *
3333 ContextAllocPolicy::realloc(void *p, size_t bytes)
3335 return cx->realloc(p, bytes);
3338 inline void
3339 ContextAllocPolicy::reportAllocOverflow() const
3341 js_ReportAllocationOverflow(cx);
3344 class AutoValueVector : private AutoGCRooter
3346 public:
3347 explicit AutoValueVector(JSContext *cx
3348 JS_GUARD_OBJECT_NOTIFIER_PARAM)
3349 : AutoGCRooter(cx, VALVECTOR), vector(cx)
3351 JS_GUARD_OBJECT_NOTIFIER_INIT;
3354 size_t length() const { return vector.length(); }
3356 bool append(const Value &v) { return vector.append(v); }
3358 void popBack() { vector.popBack(); }
3360 bool growBy(size_t inc) {
3361 /* N.B. Value's default ctor leaves the Value undefined */
3362 size_t oldLength = vector.length();
3363 if (!vector.growByUninitialized(inc))
3364 return false;
3365 MakeValueRangeGCSafe(vector.begin() + oldLength, vector.end());
3366 return true;
3369 bool resize(size_t newLength) {
3370 size_t oldLength = vector.length();
3371 if (newLength <= oldLength) {
3372 vector.shrinkBy(oldLength - newLength);
3373 return true;
3375 /* N.B. Value's default ctor leaves the Value undefined */
3376 if (!vector.growByUninitialized(newLength - oldLength))
3377 return false;
3378 MakeValueRangeGCSafe(vector.begin() + oldLength, vector.end());
3379 return true;
3382 bool reserve(size_t newLength) {
3383 return vector.reserve(newLength);
3386 Value &operator[](size_t i) { return vector[i]; }
3387 const Value &operator[](size_t i) const { return vector[i]; }
3389 const Value *begin() const { return vector.begin(); }
3390 Value *begin() { return vector.begin(); }
3392 const Value *end() const { return vector.end(); }
3393 Value *end() { return vector.end(); }
3395 const jsval *jsval_begin() const { return Jsvalify(begin()); }
3396 jsval *jsval_begin() { return Jsvalify(begin()); }
3398 const jsval *jsval_end() const { return Jsvalify(end()); }
3399 jsval *jsval_end() { return Jsvalify(end()); }
3401 const Value &back() const { return vector.back(); }
3403 friend void AutoGCRooter::trace(JSTracer *trc);
3405 private:
3406 Vector<Value, 8> vector;
3407 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
3410 class AutoIdVector : private AutoGCRooter
3412 public:
3413 explicit AutoIdVector(JSContext *cx
3414 JS_GUARD_OBJECT_NOTIFIER_PARAM)
3415 : AutoGCRooter(cx, IDVECTOR), vector(cx)
3417 JS_GUARD_OBJECT_NOTIFIER_INIT;
3420 size_t length() const { return vector.length(); }
3422 bool append(jsid id) { return vector.append(id); }
3424 void popBack() { vector.popBack(); }
3426 bool growBy(size_t inc) {
3427 /* N.B. jsid's default ctor leaves the jsid undefined */
3428 size_t oldLength = vector.length();
3429 if (!vector.growByUninitialized(inc))
3430 return false;
3431 MakeIdRangeGCSafe(vector.begin() + oldLength, vector.end());
3432 return true;
3435 bool resize(size_t newLength) {
3436 size_t oldLength = vector.length();
3437 if (newLength <= oldLength) {
3438 vector.shrinkBy(oldLength - newLength);
3439 return true;
3441 /* N.B. jsid's default ctor leaves the jsid undefined */
3442 if (!vector.growByUninitialized(newLength - oldLength))
3443 return false;
3444 MakeIdRangeGCSafe(vector.begin() + oldLength, vector.end());
3445 return true;
3448 bool reserve(size_t newLength) {
3449 return vector.reserve(newLength);
3452 jsid &operator[](size_t i) { return vector[i]; }
3453 const jsid &operator[](size_t i) const { return vector[i]; }
3455 const jsid *begin() const { return vector.begin(); }
3456 jsid *begin() { return vector.begin(); }
3458 const jsid *end() const { return vector.end(); }
3459 jsid *end() { return vector.end(); }
3461 const jsid &back() const { return vector.back(); }
3463 friend void AutoGCRooter::trace(JSTracer *trc);
3465 private:
3466 Vector<jsid, 8> vector;
3467 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
3470 JSIdArray *
3471 NewIdArray(JSContext *cx, jsint length);
3473 } /* namespace js */
3475 #ifdef _MSC_VER
3476 #pragma warning(pop)
3477 #pragma warning(pop)
3478 #endif
3480 #ifdef JS_UNDEFD_MOZALLOC_WRAPPERS
3481 # include "mozilla/mozalloc_macro_wrappers.h"
3482 #endif
3484 #endif /* jscntxt_h___ */