Avoid staying in Interpret() after recording (bug 593532, r=dmandelin).
[mozilla-central.git] / js / src / jscntxt.h
blobbdb0eae3ef61b468fc7e905182087c7b085a4b4e
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
41 #ifndef jscntxt_h___
42 #define jscntxt_h___
44 * JS execution context.
46 #include <string.h>
48 /* Gross special case for Gecko, which defines malloc/calloc/free. */
49 #ifdef mozilla_mozalloc_macro_wrappers_h
50 # define JS_UNDEFD_MOZALLOC_WRAPPERS
51 /* The "anti-header" */
52 # include "mozilla/mozalloc_undef_macro_wrappers.h"
53 #endif
55 #include "jsprvtd.h"
56 #include "jsarena.h" /* Added by JSIFY */
57 #include "jsclist.h"
58 #include "jslong.h"
59 #include "jsatom.h"
60 #include "jsdhash.h"
61 #include "jsdtoa.h"
62 #include "jsfun.h"
63 #include "jsgc.h"
64 #include "jsgcchunk.h"
65 #include "jshashtable.h"
66 #include "jsinterp.h"
67 #include "jsobj.h"
68 #include "jspropertycache.h"
69 #include "jspropertytree.h"
70 #include "jsregexp.h"
71 #include "jsutil.h"
72 #include "jsarray.h"
73 #include "jsvector.h"
74 #include "prmjtime.h"
76 #ifdef _MSC_VER
77 #pragma warning(push)
78 #pragma warning(disable:4100) /* Silence unreferenced formal parameter warnings */
79 #pragma warning(push)
80 #pragma warning(disable:4355) /* Silence warning about "this" used in base member initializer list */
81 #endif
84 * js_GetSrcNote cache to avoid O(n^2) growth in finding a source note for a
85 * given pc in a script. We use the script->code pointer to tag the cache,
86 * instead of the script address itself, so that source notes are always found
87 * by offset from the bytecode with which they were generated.
89 typedef struct JSGSNCache {
90 jsbytecode *code;
91 JSDHashTable table;
92 #ifdef JS_GSNMETER
93 uint32 hits;
94 uint32 misses;
95 uint32 fills;
96 uint32 purges;
97 # define GSN_CACHE_METER(cache,cnt) (++(cache)->cnt)
98 #else
99 # define GSN_CACHE_METER(cache,cnt) /* nothing */
100 #endif
101 } JSGSNCache;
103 #define js_FinishGSNCache(cache) js_PurgeGSNCache(cache)
105 extern void
106 js_PurgeGSNCache(JSGSNCache *cache);
108 /* These helper macros take a cx as parameter and operate on its GSN cache. */
109 #define JS_PURGE_GSN_CACHE(cx) js_PurgeGSNCache(&JS_GSN_CACHE(cx))
110 #define JS_METER_GSN_CACHE(cx,cnt) GSN_CACHE_METER(&JS_GSN_CACHE(cx), cnt)
112 /* Forward declarations of nanojit types. */
113 namespace nanojit {
115 class Assembler;
116 class CodeAlloc;
117 class Fragment;
118 template<typename K> struct DefaultHash;
119 template<typename K, typename V, typename H> class HashMap;
120 template<typename T> class Seq;
122 } /* namespace nanojit */
124 namespace JSC {
125 class ExecutableAllocator;
128 namespace js {
130 #ifdef JS_METHODJIT
131 struct VMFrame;
132 #endif
134 /* Tracer constants. */
135 static const size_t MONITOR_N_GLOBAL_STATES = 4;
136 static const size_t FRAGMENT_TABLE_SIZE = 512;
137 static const size_t MAX_NATIVE_STACK_SLOTS = 4096;
138 static const size_t MAX_CALL_STACK_ENTRIES = 500;
139 static const size_t MAX_GLOBAL_SLOTS = 4096;
140 static const size_t GLOBAL_SLOTS_BUFFER_SIZE = MAX_GLOBAL_SLOTS + 1;
141 static const size_t MAX_SLOW_NATIVE_EXTRA_SLOTS = 16;
143 /* Forward declarations of tracer types. */
144 class VMAllocator;
145 class FrameInfoCache;
146 struct REHashFn;
147 struct REHashKey;
148 struct FrameInfo;
149 struct VMSideExit;
150 struct TreeFragment;
151 struct TracerState;
152 template<typename T> class Queue;
153 typedef Queue<uint16> SlotList;
154 class TypeMap;
155 struct REFragment;
156 typedef nanojit::HashMap<REHashKey, REFragment*, REHashFn> REHashMap;
158 #if defined(JS_JIT_SPEW) || defined(DEBUG)
159 struct FragPI;
160 typedef nanojit::HashMap<uint32, FragPI, nanojit::DefaultHash<uint32> > FragStatsMap;
161 #endif
163 namespace mjit {
164 class CallStackIterator;
168 * Allocation policy that calls JSContext memory functions and reports errors
169 * to the context. Since the JSContext given on construction is stored for
170 * the lifetime of the container, this policy may only be used for containers
171 * whose lifetime is a shorter than the given JSContext.
173 class ContextAllocPolicy
175 JSContext *cx;
177 public:
178 ContextAllocPolicy(JSContext *cx) : cx(cx) {}
179 JSContext *context() const { return cx; }
181 /* Inline definitions below. */
182 void *malloc(size_t bytes);
183 void free(void *p);
184 void *realloc(void *p, size_t bytes);
185 void reportAllocOverflow() const;
188 /* Holds the execution state during trace execution. */
189 struct TracerState
191 JSContext* cx; // current VM context handle
192 double* stackBase; // native stack base
193 double* sp; // native stack pointer, stack[0] is spbase[0]
194 double* eos; // first unusable word after the native stack / begin of globals
195 FrameInfo** callstackBase; // call stack base
196 void* sor; // start of rp stack
197 FrameInfo** rp; // call stack pointer
198 void* eor; // first unusable word after the call stack
199 VMSideExit* lastTreeExitGuard; // guard we exited on during a tree call
200 VMSideExit* lastTreeCallGuard; // guard we want to grow from if the tree
201 // call exit guard mismatched
202 void* rpAtLastTreeCall; // value of rp at innermost tree call guard
203 VMSideExit* outermostTreeExitGuard; // the last side exit returned by js_CallTree
204 TreeFragment* outermostTree; // the outermost tree we initially invoked
205 uintN* inlineCallCountp; // inline call count counter
206 VMSideExit** innermostNestedGuardp;
207 VMSideExit* innermost;
208 uint64 startTime;
209 TracerState* prev;
211 // Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
212 // JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
213 // if an error or exception occurred.
214 uint32 builtinStatus;
216 // Used to communicate the location of the return value in case of a deep bail.
217 double* deepBailSp;
219 // Used when calling natives from trace to root the vp vector.
220 uintN nativeVpLen;
221 js::Value* nativeVp;
223 // The regs pointed to by cx->regs while a deep-bailed slow native
224 // completes execution.
225 JSFrameRegs bailedSlowNativeRegs;
227 TracerState(JSContext *cx, TraceMonitor *tm, TreeFragment *ti,
228 uintN &inlineCallCountp, VMSideExit** innermostNestedGuardp);
229 ~TracerState();
232 #ifdef JS_METHODJIT
233 namespace mjit {
234 struct Trampolines
236 void (* forceReturn)();
237 JSC::ExecutablePool *forceReturnPool;
238 #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
239 void (* forceReturnFast)();
240 JSC::ExecutablePool *forceReturnFastPool;
241 #endif
244 struct ThreadData
246 JSC::ExecutableAllocator *execPool;
248 // Trampolines for JIT code.
249 Trampolines trampolines;
251 VMFrame *activeFrame;
253 bool Initialize();
254 void Finish();
257 #endif /* JS_METHODJIT */
260 * Storage for the execution state and store during trace execution. Generated
261 * code depends on the fact that the globals begin |MAX_NATIVE_STACK_SLOTS|
262 * doubles after the stack begins. Thus, on trace, |TracerState::eos| holds a
263 * pointer to the first global.
265 struct TraceNativeStorage
267 double stack_global_buf[MAX_NATIVE_STACK_SLOTS + GLOBAL_SLOTS_BUFFER_SIZE];
268 FrameInfo *callstack_buf[MAX_CALL_STACK_ENTRIES];
270 double *stack() { return stack_global_buf; }
271 double *global() { return stack_global_buf + MAX_NATIVE_STACK_SLOTS; }
272 FrameInfo **callstack() { return callstack_buf; }
275 /* Holds data to track a single globa. */
276 struct GlobalState {
277 JSObject* globalObj;
278 uint32 globalShape;
279 SlotList* globalSlots;
283 * A StackSegment (referred to as just a 'segment') contains a down-linked set
284 * of stack frames and the slots associated with each frame. A segment and its
285 * contained frames/slots also have a precise memory layout that is described
286 * in the js::StackSpace comment. A key layout invariant for segments is that
287 * down-linked frames are adjacent in memory, separated only by the values that
288 * constitute the locals and expression stack of the down-frame and arguments
289 * of the up-frame.
291 * The set of stack frames in a non-empty segment start at the segment's
292 * "current frame", which is the most recently pushed frame, and ends at the
293 * segment's "initial frame". Note that, while all stack frames in a segment
294 * are down-linked, not all down-linked frames are in the same segment. Hence,
295 * for a segment |ss|, |ss->getInitialFrame()->down| may be non-null and in a
296 * different segment. This occurs when the VM reenters itself (via Invoke or
297 * Execute). In full generality, a single context may contain a forest of trees
298 * of stack frames. With respect to this forest, a segment contains a linear
299 * path along a single tree, not necessarily to the root.
301 * The frames of a non-empty segment must all be in the same context and thus
302 * each non-empty segment is referred to as being "in" a context. Segments in a
303 * context have an additional state of being either "active" or "suspended". A
304 * suspended segment |ss| has a "suspended frame" which is snapshot of |cx->regs|
305 * when the segment was suspended and serves as the current frame of |ss|.
306 * There is at most one active segment in a given context. Segments in a
307 * context execute LIFO and are maintained in a stack. The top of this stack
308 * is the context's "current segment". If a context |cx| has an active segment
309 * |ss|, then:
310 * 1. |ss| is |cx|'s current segment,
311 * 2. |cx->regs != NULL|, and
312 * 3. |ss|'s current frame is |cx->regs->fp|.
313 * Moreover, |cx->regs != NULL| iff |cx| has an active segment.
315 * An empty segment is not associated with any context. Empty segments are
316 * created when there is not an active segment for a context at the top of the
317 * stack and claim space for the arguments of an Invoke before the Invoke's
318 * stack frame is pushed. During the intervals when the arguments have been
319 * pushed, but not the stack frame, the segment cannot be pushed onto the
320 * context, since that would require some hack to deal with cx->fp not being
321 * the current frame of cx->currentSegment.
323 * Finally, (to support JS_SaveFrameChain/JS_RestoreFrameChain) a suspended
324 * segment may or may not be "saved". Normally, when the active segment is
325 * popped, the previous segment (which is necessarily suspended) becomes
326 * active. If the previous segment was saved, however, then it stays suspended
327 * until it is made active by a call to JS_RestoreFrameChain. This is why a
328 * context may have a current segment, but not an active segment.
330 class StackSegment
332 /* The context to which this segment belongs. */
333 JSContext *cx;
335 /* Link for JSContext segment stack mentioned in big comment above. */
336 StackSegment *previousInContext;
338 /* Link for StackSpace segment stack mentioned in StackSpace comment. */
339 StackSegment *previousInMemory;
341 /* The first frame executed in this segment. null iff cx is null */
342 JSStackFrame *initialFrame;
344 /* If this segment is suspended, |cx->regs| when it was suspended. */
345 JSFrameRegs *suspendedRegs;
347 /* The varobj on entry to initialFrame. */
348 JSObject *initialVarObj;
350 /* Whether this segment was suspended by JS_SaveFrameChain. */
351 bool saved;
353 /* Align at 8 bytes on all platforms. */
354 #if JS_BITS_PER_WORD == 32
355 void *padding;
356 #endif
359 * To make isActive a single null-ness check, this non-null constant is
360 * assigned to suspendedRegs when !inContext.
362 #define NON_NULL_SUSPENDED_REGS ((JSFrameRegs *)0x1)
364 public:
365 StackSegment()
366 : cx(NULL), previousInContext(NULL), previousInMemory(NULL),
367 initialFrame(NULL), suspendedRegs(NON_NULL_SUSPENDED_REGS),
368 initialVarObj(NULL), saved(false)
370 JS_ASSERT(!inContext());
373 /* Safe casts guaranteed by the contiguous-stack layout. */
375 Value *previousSegmentEnd() const {
376 return (Value *)this;
379 Value *getInitialArgBegin() const {
380 return (Value *)(this + 1);
384 * As described in the comment at the beginning of the class, a segment
385 * is in one of three states:
387 * !inContext: the segment has been created to root arguments for a
388 * future call to Invoke.
389 * isActive: the segment describes a set of stack frames in a context,
390 * where the top frame currently executing.
391 * isSuspended: like isActive, but the top frame has been suspended.
394 bool inContext() const {
395 JS_ASSERT(!!cx == !!initialFrame);
396 JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS && !saved);
397 return cx;
400 bool isActive() const {
401 JS_ASSERT_IF(!suspendedRegs, cx && !saved);
402 JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS);
403 return !suspendedRegs;
406 bool isSuspended() const {
407 JS_ASSERT_IF(!cx || !suspendedRegs, !saved);
408 JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS);
409 return cx && suspendedRegs;
412 /* Substate of suspended, queryable in any state. */
414 bool isSaved() const {
415 JS_ASSERT_IF(saved, isSuspended());
416 return saved;
419 /* Transitioning between inContext <--> isActive */
421 void joinContext(JSContext *cx, JSStackFrame *f) {
422 JS_ASSERT(!inContext());
423 this->cx = cx;
424 initialFrame = f;
425 suspendedRegs = NULL;
426 JS_ASSERT(isActive());
429 void leaveContext() {
430 JS_ASSERT(isActive());
431 this->cx = NULL;
432 initialFrame = NULL;
433 suspendedRegs = NON_NULL_SUSPENDED_REGS;
434 JS_ASSERT(!inContext());
437 JSContext *maybeContext() const {
438 return cx;
441 #undef NON_NULL_SUSPENDED_REGS
443 /* Transitioning between isActive <--> isSuspended */
445 void suspend(JSFrameRegs *regs) {
446 JS_ASSERT(isActive());
447 JS_ASSERT(regs && regs->fp && contains(regs->fp));
448 suspendedRegs = regs;
449 JS_ASSERT(isSuspended());
452 void resume() {
453 JS_ASSERT(isSuspended());
454 suspendedRegs = NULL;
455 JS_ASSERT(isActive());
458 /* When isSuspended, transitioning isSaved <--> !isSaved */
460 void save(JSFrameRegs *regs) {
461 JS_ASSERT(!isSuspended());
462 suspend(regs);
463 saved = true;
464 JS_ASSERT(isSaved());
467 void restore() {
468 JS_ASSERT(isSaved());
469 saved = false;
470 resume();
471 JS_ASSERT(!isSuspended());
474 /* Data available when inContext */
476 JSStackFrame *getInitialFrame() const {
477 JS_ASSERT(inContext());
478 return initialFrame;
481 inline JSFrameRegs *getCurrentRegs() const;
482 inline JSStackFrame *getCurrentFrame() const;
484 /* Data available when isSuspended. */
486 JSFrameRegs *getSuspendedRegs() const {
487 JS_ASSERT(isSuspended());
488 return suspendedRegs;
491 JSStackFrame *getSuspendedFrame() const {
492 return suspendedRegs->fp;
495 /* JSContext / js::StackSpace bookkeeping. */
497 void setPreviousInContext(StackSegment *seg) {
498 previousInContext = seg;
501 StackSegment *getPreviousInContext() const {
502 return previousInContext;
505 void setPreviousInMemory(StackSegment *seg) {
506 previousInMemory = seg;
509 StackSegment *getPreviousInMemory() const {
510 return previousInMemory;
513 void setInitialVarObj(JSObject *obj) {
514 JS_ASSERT(inContext());
515 initialVarObj = obj;
518 JSObject *getInitialVarObj() const {
519 JS_ASSERT(inContext());
520 return initialVarObj;
523 #ifdef DEBUG
524 JS_REQUIRES_STACK bool contains(const JSStackFrame *fp) const;
525 #endif
528 static const size_t VALUES_PER_STACK_SEGMENT = sizeof(StackSegment) / sizeof(Value);
529 JS_STATIC_ASSERT(sizeof(StackSegment) % sizeof(Value) == 0);
531 /* See StackSpace::pushInvokeArgs. */
532 class InvokeArgsGuard : public CallArgs
534 friend class StackSpace;
535 JSContext *cx; /* null implies nothing pushed */
536 StackSegment *seg;
537 Value *prevInvokeArgEnd;
538 #ifdef DEBUG
539 StackSegment *prevInvokeSegment;
540 JSStackFrame *prevInvokeFrame;
541 #endif
542 public:
543 inline InvokeArgsGuard() : cx(NULL), seg(NULL) {}
544 inline InvokeArgsGuard(JSContext *cx, Value *vp, uintN argc);
545 inline ~InvokeArgsGuard();
546 bool pushed() const { return cx != NULL; }
550 * This type can be used to call Invoke when the arguments have already been
551 * pushed onto the stack as part of normal execution.
553 struct InvokeArgsAlreadyOnTheStack : CallArgs
555 InvokeArgsAlreadyOnTheStack(Value *vp, uintN argc) : CallArgs(vp + 2, argc) {}
558 /* See StackSpace::pushInvokeFrame. */
559 class InvokeFrameGuard
561 friend class StackSpace;
562 JSContext *cx; /* null implies nothing pushed */
563 JSFrameRegs regs;
564 JSFrameRegs *prevRegs;
565 public:
566 InvokeFrameGuard() : cx(NULL) {}
567 JS_REQUIRES_STACK ~InvokeFrameGuard();
568 bool pushed() const { return cx != NULL; }
569 JSFrameRegs &getRegs() { return regs; }
572 /* See StackSpace::pushExecuteFrame. */
573 class FrameGuard
575 friend class StackSpace;
576 JSContext *cx; /* null implies nothing pushed */
577 StackSegment *seg;
578 Value *vp;
579 JSStackFrame *fp;
580 JSStackFrame *down;
581 public:
582 FrameGuard() : cx(NULL), vp(NULL), fp(NULL) {}
583 JS_REQUIRES_STACK ~FrameGuard();
584 bool pushed() const { return cx != NULL; }
585 Value *getvp() const { return vp; }
586 JSStackFrame *getFrame() const { return fp; }
590 * Stack layout
592 * Each JSThreadData has one associated StackSpace object which allocates all
593 * segments for the thread. StackSpace performs all such allocations in a
594 * single, fixed-size buffer using a specific layout scheme that allows some
595 * associations between segments, frames, and slots to be implicit, rather
596 * than explicitly stored as pointers. To maintain useful invariants, stack
597 * space is not given out arbitrarily, but rather allocated/deallocated for
598 * specific purposes. The use cases currently supported are: calling a function
599 * with arguments (e.g. Invoke), executing a script (e.g. Execute), inline
600 * interpreter calls, and pushing "dummy" frames for bookkeeping purposes. See
601 * associated member functions below.
603 * First, we consider the layout of individual segments. (See the
604 * js::StackSegment comment for terminology.) A non-empty segment (i.e., a
605 * segment in a context) has the following layout:
607 * initial frame current frame ------. if regs,
608 * .------------. | | regs->sp
609 * | V V V
610 * |segment| slots |frame| slots |frame| slots |frame| slots |
611 * | ^ | ^ |
612 * ? <----------' `----------' `----------'
613 * down down down
615 * Moreover, the bytes in the following ranges form a contiguous array of
616 * Values that are marked during GC:
617 * 1. between a segment and its first frame
618 * 2. between two adjacent frames in a segment
619 * 3. between a segment's current frame and (if fp->regs) fp->regs->sp
620 * Thus, the VM must ensure that all such Values are safe to be marked.
622 * An empty segment is followed by arguments that are rooted by the
623 * StackSpace::invokeArgEnd pointer:
625 * invokeArgEnd
628 * |segment| slots |
630 * Above the level of segments, a StackSpace is simply a contiguous sequence
631 * of segments kept in a linked list:
633 * base currentSegment firstUnused end
634 * | | | |
635 * V V V V
636 * |segment| --- |segment| --- |segment| ------- | |
637 * | ^ | ^ |
638 * 0 <---' `-----------' `-----------'
639 * previous previous previous
641 * Both js::StackSpace and JSContext maintain a stack of segments, the top of
642 * which is the "current segment" for that thread or context, respectively.
643 * Since different contexts can arbitrarily interleave execution in a single
644 * thread, these stacks are different enough that a segment needs both
645 * "previousInMemory" and "previousInContext".
647 * For example, in a single thread, a function in segment S1 in a context CX1
648 * may call out into C++ code that reenters the VM in a context CX2, which
649 * creates a new segment S2 in CX2, and CX1 may or may not equal CX2.
651 * Note that there is some structure to this interleaving of segments:
652 * 1. the inclusion from segments in a context to segments in a thread
653 * preserves order (in terms of previousInContext and previousInMemory,
654 * respectively).
655 * 2. the mapping from stack frames to their containing segment preserves
656 * order (in terms of down and previousInContext, respectively).
658 class StackSpace
660 Value *base;
661 #ifdef XP_WIN
662 mutable Value *commitEnd;
663 #endif
664 Value *end;
665 StackSegment *currentSegment;
666 #ifdef DEBUG
668 * Keep track of which segment/frame bumped invokeArgEnd so that
669 * firstUnused() can assert that, when invokeArgEnd is used as the top of
670 * the stack, it is being used appropriately.
672 StackSegment *invokeSegment;
673 JSStackFrame *invokeFrame;
674 #endif
675 Value *invokeArgEnd;
677 JS_REQUIRES_STACK bool pushSegmentForInvoke(JSContext *cx, uintN argc,
678 InvokeArgsGuard &ag);
679 JS_REQUIRES_STACK bool pushInvokeFrameSlow(JSContext *cx, const InvokeArgsGuard &ag,
680 InvokeFrameGuard &fg);
681 JS_REQUIRES_STACK void popInvokeFrameSlow(const CallArgs &args);
682 JS_REQUIRES_STACK void popSegmentForInvoke(const InvokeArgsGuard &ag);
684 /* Although guards are friends, XGuard should only call popX(). */
685 friend class InvokeArgsGuard;
686 JS_REQUIRES_STACK inline void popInvokeArgs(const InvokeArgsGuard &args);
687 friend class InvokeFrameGuard;
688 JS_REQUIRES_STACK void popInvokeFrame(const InvokeFrameGuard &ag);
689 friend class FrameGuard;
690 JS_REQUIRES_STACK void popFrame(JSContext *cx);
692 /* Return a pointer to the first unused slot. */
693 JS_REQUIRES_STACK
694 inline Value *firstUnused() const;
696 inline bool isCurrentAndActive(JSContext *cx) const;
697 friend class AllFramesIter;
698 StackSegment *getCurrentSegment() const { return currentSegment; }
701 * Allocate nvals on the top of the stack, report error on failure.
702 * N.B. the caller must ensure |from == firstUnused()|.
704 inline bool ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const;
706 #ifdef XP_WIN
707 /* Commit more memory from the reserved stack space. */
708 JS_FRIEND_API(bool) bumpCommit(Value *from, ptrdiff_t nvals) const;
709 #endif
711 public:
712 static const size_t CAPACITY_VALS = 512 * 1024;
713 static const size_t CAPACITY_BYTES = CAPACITY_VALS * sizeof(Value);
714 static const size_t COMMIT_VALS = 16 * 1024;
715 static const size_t COMMIT_BYTES = COMMIT_VALS * sizeof(Value);
717 /* Kept as a member of JSThreadData; cannot use constructor/destructor. */
718 bool init();
719 void finish();
721 #ifdef DEBUG
722 template <class T>
723 bool contains(T *t) const {
724 char *v = (char *)t;
725 JS_ASSERT(size_t(-1) - uintptr_t(t) >= sizeof(T));
726 return v >= (char *)base && v + sizeof(T) <= (char *)end;
728 #endif
731 * When we LeaveTree, we need to rebuild the stack, which requires stack
732 * allocation. There is no good way to handle an OOM for these allocations,
733 * so this function checks that they cannot occur using the size of the
734 * TraceNativeStorage as a conservative upper bound.
736 inline bool ensureEnoughSpaceToEnterTrace();
738 /* +1 for slow native's stack frame. */
739 static const ptrdiff_t MAX_TRACE_SPACE_VALS =
740 MAX_NATIVE_STACK_SLOTS + MAX_CALL_STACK_ENTRIES * VALUES_PER_STACK_FRAME +
741 (VALUES_PER_STACK_SEGMENT + VALUES_PER_STACK_FRAME /* synthesized slow native */);
743 /* Mark all segments, frames, and slots on the stack. */
744 JS_REQUIRES_STACK void mark(JSTracer *trc);
747 * For all four use cases below:
748 * - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM.
749 * - The "get*Frame" functions do not change any global state, they just
750 * check OOM and return pointers to an uninitialized frame with the
751 * requested missing arguments/slots. Only once the "push*Frame"
752 * function has been called is global state updated. Thus, between
753 * "get*Frame" and "push*Frame", the frame and slots are unrooted.
754 * - The "push*Frame" functions will set fp->down; the caller needn't.
755 * - Functions taking "*Guard" arguments will use the guard's destructor
756 * to pop the allocation. The caller must ensure the guard has the
757 * appropriate lifetime.
758 * - The get*Frame functions put the 'nmissing' slots contiguously after
759 * the arguments.
763 * pushInvokeArgs allocates |argc + 2| rooted values that will be passed as
764 * the arguments to Invoke. A single allocation can be used for multiple
765 * Invoke calls. The InvokeArgumentsGuard passed to Invoke must come from
766 * an immediately-enclosing (stack-wise) call to pushInvokeArgs.
768 JS_REQUIRES_STACK
769 bool pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard &ag);
771 /* These functions are called inside Invoke, not Invoke clients. */
772 bool getInvokeFrame(JSContext *cx, const CallArgs &args,
773 uintN nmissing, uintN nfixed,
774 InvokeFrameGuard &fg) const;
776 JS_REQUIRES_STACK
777 void pushInvokeFrame(JSContext *cx, const CallArgs &args, InvokeFrameGuard &fg);
780 * For the simpler case when arguments are allocated at the same time as
781 * the frame and it is not necessary to have rooted argument values before
782 * pushing the frame.
784 JS_REQUIRES_STACK
785 bool getExecuteFrame(JSContext *cx, JSStackFrame *down,
786 uintN vplen, uintN nfixed,
787 FrameGuard &fg) const;
788 JS_REQUIRES_STACK
789 void pushExecuteFrame(JSContext *cx, FrameGuard &fg,
790 JSFrameRegs &regs, JSObject *initialVarObj);
793 * Since RAII cannot be used for inline frames, callers must manually
794 * call pushInlineFrame/popInlineFrame.
796 JS_REQUIRES_STACK
797 inline JSStackFrame *getInlineFrame(JSContext *cx, Value *sp,
798 uintN nmissing, uintN nfixed) const;
800 JS_REQUIRES_STACK
801 inline JSStackFrame *getInlineFrameUnchecked(JSContext *cx, Value *sp,
802 uintN nmissing) const;
804 JS_REQUIRES_STACK
805 inline void pushInlineFrame(JSContext *cx, JSStackFrame *fp, jsbytecode *pc,
806 JSStackFrame *newfp);
808 JS_REQUIRES_STACK
809 inline void popInlineFrame(JSContext *cx, JSStackFrame *up, JSStackFrame *down);
812 * For pushing a bookkeeping frame.
814 JS_REQUIRES_STACK
815 bool pushDummyFrame(JSContext *cx, FrameGuard &fg, JSFrameRegs &regs, JSObject *scopeChain);
818 * Ensure space based on an over-recursion limit.
820 inline bool ensureSpace(JSContext *maybecx, Value *start, Value *from,
821 Value *& limit, uint32 nslots) const;
824 * Create a stack limit for quickly detecting over-recursion and whether
825 * a commit bump is needed.
827 inline Value *makeStackLimit(Value *start) const;
830 JS_STATIC_ASSERT(StackSpace::CAPACITY_VALS % StackSpace::COMMIT_VALS == 0);
833 * While |cx->fp|'s pc/sp are available in |cx->regs|, to compute the saved
834 * value of pc/sp for any other frame, it is necessary to know about that
835 * frame's up-frame. This iterator maintains this information when walking down
836 * a chain of stack frames starting at |cx->fp|.
838 * Usage:
839 * for (FrameRegsIter i(cx); !i.done(); ++i)
840 * ... i.fp() ... i.sp() ... i.pc()
842 class FrameRegsIter
844 StackSegment *curseg;
845 JSStackFrame *curfp;
846 Value *cursp;
847 jsbytecode *curpc;
849 void initSlow();
850 void incSlow(JSStackFrame *up, JSStackFrame *down);
851 static inline Value *contiguousDownFrameSP(JSStackFrame *up);
853 public:
854 JS_REQUIRES_STACK inline FrameRegsIter(JSContext *cx);
856 bool done() const { return curfp == NULL; }
857 inline FrameRegsIter &operator++();
859 JSStackFrame *fp() const { return curfp; }
860 Value *sp() const { return cursp; }
861 jsbytecode *pc() const { return curpc; }
865 * Utility class for iteration over all active stack frames.
867 class AllFramesIter
869 public:
870 AllFramesIter(JSContext *cx);
872 bool done() const { return curfp == NULL; }
873 AllFramesIter& operator++();
875 JSStackFrame *fp() const { return curfp; }
877 private:
878 StackSegment *curcs;
879 JSStackFrame *curfp;
882 /* Holds the number of recording attemps for an address. */
883 typedef HashMap<jsbytecode*,
884 size_t,
885 DefaultHasher<jsbytecode*>,
886 SystemAllocPolicy> RecordAttemptMap;
888 class Oracle;
891 * Trace monitor. Every JSThread (if JS_THREADSAFE) or JSRuntime (if not
892 * JS_THREADSAFE) has an associated trace monitor that keeps track of loop
893 * frequencies for all JavaScript code loaded into that runtime.
895 struct TraceMonitor {
897 * The context currently executing JIT-compiled code on this thread, or
898 * NULL if none. Among other things, this can in certain cases prevent
899 * last-ditch GC and suppress calls to JS_ReportOutOfMemory.
901 * !tracecx && !recorder: not on trace
902 * !tracecx && recorder: recording
903 * tracecx && !recorder: executing a trace
904 * tracecx && recorder: executing inner loop, recording outer loop
906 JSContext *tracecx;
909 * Cached storage to use when executing on trace. While we may enter nested
910 * traces, we always reuse the outer trace's storage, so never need more
911 * than of these.
913 TraceNativeStorage *storage;
916 * There are 5 allocators here. This might seem like overkill, but they
917 * have different lifecycles, and by keeping them separate we keep the
918 * amount of retained memory down significantly. They are flushed (ie.
919 * all the allocated memory is freed) periodically.
921 * - dataAlloc has the lifecycle of the monitor. It's flushed only when
922 * the monitor is flushed. It's used for fragments.
924 * - traceAlloc has the same flush lifecycle as the dataAlloc, but it is
925 * also *marked* when a recording starts and rewinds to the mark point
926 * if recording aborts. So you can put things in it that are only
927 * reachable on a successful record/compile cycle like GuardRecords and
928 * SideExits.
930 * - tempAlloc is flushed after each recording, successful or not. It's
931 * used to store LIR code and for all other elements in the LIR
932 * pipeline.
934 * - reTempAlloc is just like tempAlloc, but is used for regexp
935 * compilation in RegExpNativeCompiler rather than normal compilation in
936 * TraceRecorder.
938 * - codeAlloc has the same lifetime as dataAlloc, but its API is
939 * different (CodeAlloc vs. VMAllocator). It's used for native code.
940 * It's also a good idea to keep code and data separate to avoid I-cache
941 * vs. D-cache issues.
943 VMAllocator* dataAlloc;
944 VMAllocator* traceAlloc;
945 VMAllocator* tempAlloc;
946 VMAllocator* reTempAlloc;
947 nanojit::CodeAlloc* codeAlloc;
948 nanojit::Assembler* assembler;
949 FrameInfoCache* frameCache;
951 Oracle* oracle;
952 TraceRecorder* recorder;
954 GlobalState globalStates[MONITOR_N_GLOBAL_STATES];
955 TreeFragment* vmfragments[FRAGMENT_TABLE_SIZE];
956 RecordAttemptMap* recordAttempts;
959 * Maximum size of the code cache before we start flushing. 1/16 of this
960 * size is used as threshold for the regular expression code cache.
962 uint32 maxCodeCacheBytes;
965 * If nonzero, do not flush the JIT cache after a deep bail. That would
966 * free JITted code pages that we will later return to. Instead, set the
967 * needFlush flag so that it can be flushed later.
969 JSBool needFlush;
972 * Fragment map for the regular expression compiler.
974 REHashMap* reFragments;
976 // Cached temporary typemap to avoid realloc'ing every time we create one.
977 // This must be used in only one place at a given time. It must be cleared
978 // before use.
979 TypeMap* cachedTempTypeMap;
981 #ifdef DEBUG
982 /* Fields needed for fragment/guard profiling. */
983 nanojit::Seq<nanojit::Fragment*>* branches;
984 uint32 lastFragID;
986 * profAlloc has a lifetime which spans exactly from js_InitJIT to
987 * js_FinishJIT.
989 VMAllocator* profAlloc;
990 FragStatsMap* profTab;
991 #endif
993 /* Flush the JIT cache. */
994 void flush();
996 /* Mark all objects baked into native code in the code cache. */
997 void mark(JSTracer *trc);
999 bool outOfMemory() const;
1002 } /* namespace js */
1005 * N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current
1006 * thread, regardless of whether cx is the context in which that trace is
1007 * executing. cx must be a context on the current thread.
1009 #ifdef JS_TRACER
1010 # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).tracecx != NULL)
1011 #else
1012 # define JS_ON_TRACE(cx) JS_FALSE
1013 #endif
1015 /* Number of potentially reusable scriptsToGC to search for the eval cache. */
1016 #ifndef JS_EVAL_CACHE_SHIFT
1017 # define JS_EVAL_CACHE_SHIFT 6
1018 #endif
1019 #define JS_EVAL_CACHE_SIZE JS_BIT(JS_EVAL_CACHE_SHIFT)
1021 #ifdef DEBUG
1022 # define EVAL_CACHE_METER_LIST(_) _(probe), _(hit), _(step), _(noscope)
1023 # define identity(x) x
1025 struct JSEvalCacheMeter {
1026 uint64 EVAL_CACHE_METER_LIST(identity);
1029 # undef identity
1030 #endif
1032 #ifdef DEBUG
1033 # define FUNCTION_KIND_METER_LIST(_) \
1034 _(allfun), _(heavy), _(nofreeupvar), _(onlyfreevar), \
1035 _(display), _(flat), _(setupvar), _(badfunarg), \
1036 _(joinedsetmethod), _(joinedinitmethod), \
1037 _(joinedreplace), _(joinedsort), _(joinedmodulepat), \
1038 _(mreadbarrier), _(mwritebarrier), _(mwslotbarrier), \
1039 _(unjoined)
1040 # define identity(x) x
1042 struct JSFunctionMeter {
1043 int32 FUNCTION_KIND_METER_LIST(identity);
1046 # undef identity
1048 # define JS_FUNCTION_METER(cx,x) JS_RUNTIME_METER((cx)->runtime, functionMeter.x)
1049 #else
1050 # define JS_FUNCTION_METER(cx,x) ((void)0)
1051 #endif
1054 #define NATIVE_ITER_CACHE_LOG2 8
1055 #define NATIVE_ITER_CACHE_MASK JS_BITMASK(NATIVE_ITER_CACHE_LOG2)
1056 #define NATIVE_ITER_CACHE_SIZE JS_BIT(NATIVE_ITER_CACHE_LOG2)
1058 struct JSPendingProxyOperation {
1059 JSPendingProxyOperation *next;
1060 JSObject *object;
1063 struct JSThreadData {
1065 * If this flag is set, we were asked to call back the operation callback
1066 * as soon as possible.
1068 volatile jsword interruptFlags;
1070 JSGCFreeLists gcFreeLists;
1072 /* Keeper of the contiguous stack used by all contexts in this thread. */
1073 js::StackSpace stackSpace;
1076 * Flag indicating that we are waiving any soft limits on the GC heap
1077 * because we want allocations to be infallible (except when we hit
1078 * a hard quota).
1080 bool waiveGCQuota;
1083 * The GSN cache is per thread since even multi-cx-per-thread embeddings
1084 * do not interleave js_GetSrcNote calls.
1086 JSGSNCache gsnCache;
1088 /* Property cache for faster call/get/set invocation. */
1089 js::PropertyCache propertyCache;
1091 #ifdef JS_TRACER
1092 /* Trace-tree JIT recorder/interpreter state. */
1093 js::TraceMonitor traceMonitor;
1095 /* Counts the number of iterations run by a trace. */
1096 unsigned iterationCounter;
1097 #endif
1099 #ifdef JS_METHODJIT
1100 js::mjit::ThreadData jmData;
1101 #endif
1103 /* Lock-free hashed lists of scripts created by eval to garbage-collect. */
1104 JSScript *scriptsToGC[JS_EVAL_CACHE_SIZE];
1106 #ifdef DEBUG
1107 JSEvalCacheMeter evalCacheMeter;
1108 #endif
1110 /* State used by dtoa.c. */
1111 DtoaState *dtoaState;
1114 * State used to cache some double-to-string conversions. A stupid
1115 * optimization aimed directly at v8-splay.js, which stupidly converts
1116 * many doubles multiple times in a row.
1118 struct {
1119 jsdouble d;
1120 jsint base;
1121 JSString *s; // if s==NULL, d and base are not valid
1122 } dtoaCache;
1124 /* Cached native iterators. */
1125 JSObject *cachedNativeIterators[NATIVE_ITER_CACHE_SIZE];
1127 /* Native iterator most recently started. */
1128 JSObject *lastNativeIterator;
1130 /* Base address of the native stack for the current thread. */
1131 jsuword *nativeStackBase;
1133 /* List of currently pending operations on proxies. */
1134 JSPendingProxyOperation *pendingProxyOperation;
1136 js::ConservativeGCThreadData conservativeGC;
1138 bool init();
1139 void finish();
1140 void mark(JSTracer *trc);
1141 void purge(JSContext *cx);
1143 static const jsword INTERRUPT_OPERATION_CALLBACK = 0x1;
1145 void triggerOperationCallback() {
1147 * Use JS_ATOMIC_SET in the hope that it will make sure the write will
1148 * become immediately visible to other processors polling the flag.
1149 * Note that we only care about visibility here, not read/write
1150 * ordering.
1152 JS_ATOMIC_SET_MASK(&interruptFlags, INTERRUPT_OPERATION_CALLBACK);
1156 #ifdef JS_THREADSAFE
1159 * Structure uniquely representing a thread. It holds thread-private data
1160 * that can be accessed without a global lock.
1162 struct JSThread {
1163 typedef js::HashMap<void *,
1164 JSThread *,
1165 js::DefaultHasher<void *>,
1166 js::SystemAllocPolicy> Map;
1168 /* Linked list of all contexts in use on this thread. */
1169 JSCList contextList;
1171 /* Opaque thread-id, from NSPR's PR_GetCurrentThread(). */
1172 void *id;
1174 /* Indicates that the thread is waiting in ClaimTitle from jslock.cpp. */
1175 JSTitle *titleToShare;
1178 * This thread is inside js_GC, either waiting until it can start GC, or
1179 * waiting for GC to finish on another thread. This thread holds no locks;
1180 * other threads may steal titles from it.
1182 * Protected by rt->gcLock.
1184 bool gcWaiting;
1186 /* The request depth for this thread. */
1187 unsigned requestDepth;
1189 /* Number of JS_SuspendRequest calls withot JS_ResumeRequest. */
1190 unsigned suspendCount;
1192 # ifdef DEBUG
1193 unsigned checkRequestDepth;
1194 # endif
1196 /* Weak ref, for low-cost sealed title locking */
1197 JSTitle *lockedSealedTitle;
1199 /* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */
1200 JSThreadData data;
1203 #define JS_THREAD_DATA(cx) (&(cx)->thread->data)
1205 extern JSThread *
1206 js_CurrentThread(JSRuntime *rt);
1209 * The function takes the GC lock and does not release in successful return.
1210 * On error (out of memory) the function releases the lock but delegates
1211 * the error reporting to the caller.
1213 extern JSBool
1214 js_InitContextThread(JSContext *cx);
1217 * On entrance the GC lock must be held and it will be held on exit.
1219 extern void
1220 js_ClearContextThread(JSContext *cx);
1222 #endif /* JS_THREADSAFE */
1224 typedef enum JSDestroyContextMode {
1225 JSDCM_NO_GC,
1226 JSDCM_MAYBE_GC,
1227 JSDCM_FORCE_GC,
1228 JSDCM_NEW_FAILED
1229 } JSDestroyContextMode;
1231 typedef enum JSRuntimeState {
1232 JSRTS_DOWN,
1233 JSRTS_LAUNCHING,
1234 JSRTS_UP,
1235 JSRTS_LANDING
1236 } JSRuntimeState;
1238 typedef struct JSPropertyTreeEntry {
1239 JSDHashEntryHdr hdr;
1240 js::Shape *child;
1241 } JSPropertyTreeEntry;
1244 namespace js {
1246 struct GCPtrHasher
1248 typedef void *Lookup;
1250 static HashNumber hash(void *key) {
1251 return HashNumber(uintptr_t(key) >> JS_GCTHING_ZEROBITS);
1254 static bool match(void *l, void *k) {
1255 return l == k;
1259 typedef HashMap<void *, uint32, GCPtrHasher, SystemAllocPolicy> GCLocks;
1261 struct RootInfo {
1262 RootInfo() {}
1263 RootInfo(const char *name, JSGCRootType type) : name(name), type(type) {}
1264 const char *name;
1265 JSGCRootType type;
1268 typedef js::HashMap<void *,
1269 RootInfo,
1270 js::DefaultHasher<void *>,
1271 js::SystemAllocPolicy> RootedValueMap;
1273 /* If HashNumber grows, need to change WrapperHasher. */
1274 JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
1276 struct WrapperHasher
1278 typedef Value Lookup;
1280 static HashNumber hash(Value key) {
1281 uint64 bits = JSVAL_BITS(Jsvalify(key));
1282 return (uint32)bits ^ (uint32)(bits >> 32);
1285 static bool match(const Value &l, const Value &k) {
1286 return l == k;
1290 typedef HashMap<Value, Value, WrapperHasher, SystemAllocPolicy> WrapperMap;
1292 class AutoValueVector;
1293 class AutoIdVector;
1295 } /* namespace js */
1297 struct JSCompartment {
1298 JSRuntime *rt;
1299 JSPrincipals *principals;
1300 void *data;
1301 bool marked;
1302 js::WrapperMap crossCompartmentWrappers;
1303 bool debugMode;
1305 /* List all scripts in this compartment. */
1306 JSCList scripts;
1308 JSCompartment(JSRuntime *cx);
1309 ~JSCompartment();
1311 bool init();
1313 bool wrap(JSContext *cx, js::Value *vp);
1314 bool wrap(JSContext *cx, JSString **strp);
1315 bool wrap(JSContext *cx, JSObject **objp);
1316 bool wrapId(JSContext *cx, jsid *idp);
1317 bool wrap(JSContext *cx, js::PropertyOp *op);
1318 bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
1319 bool wrap(JSContext *cx, js::AutoIdVector &props);
1320 bool wrapException(JSContext *cx);
1322 void sweep(JSContext *cx);
1324 #ifdef JS_METHODJIT
1325 bool addScript(JSContext *cx, JSScript *script);
1326 void removeScript(JSScript *script);
1327 #endif
1328 void purge(JSContext *cx);
1331 typedef void
1332 (* JSActivityCallback)(void *arg, JSBool active);
1334 struct JSRuntime {
1335 /* Default compartment. */
1336 JSCompartment *defaultCompartment;
1338 /* List of compartments (protected by the GC lock). */
1339 js::Vector<JSCompartment *, 0, js::SystemAllocPolicy> compartments;
1341 /* Runtime state, synchronized by the stateChange/gcLock condvar/lock. */
1342 JSRuntimeState state;
1344 /* Context create/destroy callback. */
1345 JSContextCallback cxCallback;
1347 /* Compartment create/destroy callback. */
1348 JSCompartmentCallback compartmentCallback;
1351 * Sets a callback that is run whenever the runtime goes idle - the
1352 * last active request ceases - and begins activity - when it was
1353 * idle and a request begins. Note: The callback is called under the
1354 * GC lock.
1356 void setActivityCallback(JSActivityCallback cb, void *arg) {
1357 activityCallback = cb;
1358 activityCallbackArg = arg;
1361 JSActivityCallback activityCallback;
1362 void *activityCallbackArg;
1365 * Shape regenerated whenever a prototype implicated by an "add property"
1366 * property cache fill and induced trace guard has a readonly property or a
1367 * setter defined on it. This number proxies for the shapes of all objects
1368 * along the prototype chain of all objects in the runtime on which such an
1369 * add-property result has been cached/traced.
1371 * See bug 492355 for more details.
1373 * This comes early in JSRuntime to minimize the immediate format used by
1374 * trace-JITted code that reads it.
1376 uint32 protoHazardShape;
1378 /* Garbage collector state, used by jsgc.c. */
1379 js::GCChunkSet gcChunkSet;
1381 /* GC chunks with at least one free arena. */
1382 js::GCChunkInfoVector gcFreeArenaChunks;
1383 #ifdef DEBUG
1384 JSGCArena *gcEmptyArenaList;
1385 #endif
1386 JSGCArenaList gcArenaList[FINALIZE_LIMIT];
1387 js::RootedValueMap gcRootsHash;
1388 js::GCLocks gcLocksHash;
1389 jsrefcount gcKeepAtoms;
1390 size_t gcBytes;
1391 size_t gcLastBytes;
1392 size_t gcMaxBytes;
1393 size_t gcMaxMallocBytes;
1394 size_t gcNewArenaTriggerBytes;
1395 uint32 gcEmptyArenaPoolLifespan;
1396 uint32 gcNumber;
1397 js::GCMarker *gcMarkingTracer;
1398 uint32 gcTriggerFactor;
1399 size_t gcTriggerBytes;
1400 volatile JSBool gcIsNeeded;
1401 volatile JSBool gcFlushCodeCaches;
1404 * NB: do not pack another flag here by claiming gcPadding unless the new
1405 * flag is written only by the GC thread. Atomic updates to packed bytes
1406 * are not guaranteed, so stores issued by one thread may be lost due to
1407 * unsynchronized read-modify-write cycles on other threads.
1409 bool gcPoke;
1410 bool gcMarkAndSweep;
1411 bool gcRunning;
1412 bool gcRegenShapes;
1414 #ifdef JS_GC_ZEAL
1415 jsrefcount gcZeal;
1416 #endif
1418 JSGCCallback gcCallback;
1420 private:
1422 * Malloc counter to measure memory pressure for GC scheduling. It runs
1423 * from gcMaxMallocBytes down to zero.
1425 volatile ptrdiff_t gcMallocBytes;
1427 public:
1428 js::GCChunkAllocator *gcChunkAllocator;
1430 void setCustomGCChunkAllocator(js::GCChunkAllocator *allocator) {
1431 JS_ASSERT(allocator);
1432 JS_ASSERT(state == JSRTS_DOWN);
1433 gcChunkAllocator = allocator;
1437 * The trace operation and its data argument to trace embedding-specific
1438 * GC roots.
1440 JSTraceDataOp gcExtraRootsTraceOp;
1441 void *gcExtraRootsData;
1443 /* Well-known numbers held for use by this runtime's contexts. */
1444 js::Value NaNValue;
1445 js::Value negativeInfinityValue;
1446 js::Value positiveInfinityValue;
1448 js::DeflatedStringCache *deflatedStringCache;
1450 JSString *emptyString;
1452 /* List of active contexts sharing this runtime; protected by gcLock. */
1453 JSCList contextList;
1455 /* Per runtime debug hooks -- see jsprvtd.h and jsdbgapi.h. */
1456 JSDebugHooks globalDebugHooks;
1458 #ifdef JS_TRACER
1459 /* True if any debug hooks not supported by the JIT are enabled. */
1460 bool debuggerInhibitsJIT() const {
1461 return (globalDebugHooks.interruptHook ||
1462 globalDebugHooks.callHook);
1464 #endif
1466 /* More debugging state, see jsdbgapi.c. */
1467 JSCList trapList;
1468 JSCList watchPointList;
1470 /* Client opaque pointers */
1471 void *data;
1473 #ifdef JS_THREADSAFE
1474 /* These combine to interlock the GC and new requests. */
1475 PRLock *gcLock;
1476 PRCondVar *gcDone;
1477 PRCondVar *requestDone;
1478 uint32 requestCount;
1479 JSThread *gcThread;
1481 js::GCHelperThread gcHelperThread;
1483 /* Lock and owning thread pointer for JS_LOCK_RUNTIME. */
1484 PRLock *rtLock;
1485 #ifdef DEBUG
1486 void * rtLockOwner;
1487 #endif
1489 /* Used to synchronize down/up state change; protected by gcLock. */
1490 PRCondVar *stateChange;
1493 * State for sharing single-threaded titles, once a second thread tries to
1494 * lock a title. The titleSharingDone condvar is protected by rt->gcLock
1495 * to minimize number of locks taken in JS_EndRequest.
1497 * The titleSharingTodo linked list is likewise "global" per runtime, not
1498 * one-list-per-context, to conserve space over all contexts, optimizing
1499 * for the likely case that titles become shared rarely, and among a very
1500 * small set of threads (contexts).
1502 PRCondVar *titleSharingDone;
1503 JSTitle *titleSharingTodo;
1506 * Magic terminator for the rt->titleSharingTodo linked list, threaded through
1507 * title->u.link. This hack allows us to test whether a title is on the list
1508 * by asking whether title->u.link is non-null. We use a large, likely bogus
1509 * pointer here to distinguish this value from any valid u.count (small int)
1510 * value.
1512 #define NO_TITLE_SHARING_TODO ((JSTitle *) 0xfeedbeef)
1515 * Lock serializing trapList and watchPointList accesses, and count of all
1516 * mutations to trapList and watchPointList made by debugger threads. To
1517 * keep the code simple, we define debuggerMutations for the thread-unsafe
1518 * case too.
1520 PRLock *debuggerLock;
1522 JSThread::Map threads;
1523 #endif /* JS_THREADSAFE */
1524 uint32 debuggerMutations;
1527 * Security callbacks set on the runtime are used by each context unless
1528 * an override is set on the context.
1530 JSSecurityCallbacks *securityCallbacks;
1533 * Shared scope property tree, and arena-pool for allocating its nodes.
1534 * This really should be free of all locking overhead and allocated in
1535 * thread-local storage, hence the JS_PROPERTY_TREE(cx) macro.
1537 js::PropertyTree propertyTree;
1539 #define JS_PROPERTY_TREE(cx) ((cx)->runtime->propertyTree)
1542 * The propertyRemovals counter is incremented for every JSObject::clear,
1543 * and for each JSObject::remove method call that frees a slot in the given
1544 * object. See js_NativeGet and js_NativeSet in jsobj.cpp.
1546 int32 propertyRemovals;
1548 /* Script filename table. */
1549 struct JSHashTable *scriptFilenameTable;
1550 JSCList scriptFilenamePrefixes;
1551 #ifdef JS_THREADSAFE
1552 PRLock *scriptFilenameTableLock;
1553 #endif
1555 /* Number localization, used by jsnum.c */
1556 const char *thousandsSeparator;
1557 const char *decimalSeparator;
1558 const char *numGrouping;
1561 * Weak references to lazily-created, well-known XML singletons.
1563 * NB: Singleton objects must be carefully disconnected from the rest of
1564 * the object graph usually associated with a JSContext's global object,
1565 * including the set of standard class objects. See jsxml.c for details.
1567 JSObject *anynameObject;
1568 JSObject *functionNamespaceObject;
1570 #ifndef JS_THREADSAFE
1571 JSThreadData threadData;
1573 #define JS_THREAD_DATA(cx) (&(cx)->runtime->threadData)
1574 #endif
1577 * Object shape (property cache structural type) identifier generator.
1579 * Type 0 stands for the empty scope, and must not be regenerated due to
1580 * uint32 wrap-around. Since js_GenerateShape (in jsinterp.cpp) uses
1581 * atomic pre-increment, the initial value for the first typed non-empty
1582 * scope will be 1.
1584 * If this counter overflows into SHAPE_OVERFLOW_BIT (in jsinterp.h), the
1585 * cache is disabled, to avoid aliasing two different types. It stays
1586 * disabled until a triggered GC at some later moment compresses live
1587 * types, minimizing rt->shapeGen in the process.
1589 volatile uint32 shapeGen;
1591 /* Literal table maintained by jsatom.c functions. */
1592 JSAtomState atomState;
1595 * Runtime-shared empty scopes for well-known built-in objects that lack
1596 * class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
1598 js::EmptyShape *emptyArgumentsShape;
1599 js::EmptyShape *emptyBlockShape;
1600 js::EmptyShape *emptyCallShape;
1601 js::EmptyShape *emptyDeclEnvShape;
1602 js::EmptyShape *emptyEnumeratorShape;
1603 js::EmptyShape *emptyWithShape;
1606 * Various metering fields are defined at the end of JSRuntime. In this
1607 * way there is no need to recompile all the code that refers to other
1608 * fields of JSRuntime after enabling the corresponding metering macro.
1610 #ifdef JS_DUMP_ENUM_CACHE_STATS
1611 int32 nativeEnumProbes;
1612 int32 nativeEnumMisses;
1613 # define ENUM_CACHE_METER(name) JS_ATOMIC_INCREMENT(&cx->runtime->name)
1614 #else
1615 # define ENUM_CACHE_METER(name) ((void) 0)
1616 #endif
1618 #ifdef JS_DUMP_LOOP_STATS
1619 /* Loop statistics, to trigger trace recording and compiling. */
1620 JSBasicStats loopStats;
1621 #endif
1623 #ifdef DEBUG
1624 /* Function invocation metering. */
1625 jsrefcount inlineCalls;
1626 jsrefcount nativeCalls;
1627 jsrefcount nonInlineCalls;
1628 jsrefcount constructs;
1630 /* Title lock and scope property metering. */
1631 jsrefcount claimAttempts;
1632 jsrefcount claimedTitles;
1633 jsrefcount deadContexts;
1634 jsrefcount deadlocksAvoided;
1635 jsrefcount liveShapes;
1636 jsrefcount sharedTitles;
1637 jsrefcount totalShapes;
1638 jsrefcount liveObjectProps;
1639 jsrefcount liveObjectPropsPreSweep;
1640 jsrefcount totalObjectProps;
1641 jsrefcount livePropTreeNodes;
1642 jsrefcount duplicatePropTreeNodes;
1643 jsrefcount totalPropTreeNodes;
1644 jsrefcount propTreeKidsChunks;
1645 jsrefcount liveDictModeNodes;
1648 * NB: emptyShapes is init'ed iff at least one of these envars is set:
1650 * JS_PROPTREE_STATFILE statistics on the property tree forest
1651 * JS_PROPTREE_DUMPFILE all paths in the property tree forest
1653 const char *propTreeStatFilename;
1654 const char *propTreeDumpFilename;
1656 bool meterEmptyShapes() const { return propTreeStatFilename || propTreeDumpFilename; }
1658 typedef js::HashSet<js::EmptyShape *,
1659 js::DefaultHasher<js::EmptyShape *>,
1660 js::SystemAllocPolicy> EmptyShapeSet;
1662 EmptyShapeSet emptyShapes;
1664 /* String instrumentation. */
1665 jsrefcount liveStrings;
1666 jsrefcount totalStrings;
1667 jsrefcount liveDependentStrings;
1668 jsrefcount totalDependentStrings;
1669 jsrefcount badUndependStrings;
1670 double lengthSum;
1671 double lengthSquaredSum;
1672 double strdepLengthSum;
1673 double strdepLengthSquaredSum;
1675 /* Script instrumentation. */
1676 jsrefcount liveScripts;
1677 jsrefcount totalScripts;
1678 jsrefcount liveEmptyScripts;
1679 jsrefcount totalEmptyScripts;
1680 #endif /* DEBUG */
1682 #ifdef JS_SCOPE_DEPTH_METER
1684 * Stats on runtime prototype chain lookups and scope chain depths, i.e.,
1685 * counts of objects traversed on a chain until the wanted id is found.
1687 JSBasicStats protoLookupDepthStats;
1688 JSBasicStats scopeSearchDepthStats;
1691 * Stats on compile-time host environment and lexical scope chain lengths
1692 * (maximum depths).
1694 JSBasicStats hostenvScopeDepthStats;
1695 JSBasicStats lexicalScopeDepthStats;
1696 #endif
1698 #ifdef JS_GCMETER
1699 JSGCStats gcStats;
1700 JSGCArenaStats gcArenaStats[FINALIZE_LIMIT];
1701 #endif
1703 #ifdef DEBUG
1705 * If functionMeterFilename, set from an envariable in JSRuntime's ctor, is
1706 * null, the remaining members in this ifdef'ed group are not initialized.
1708 const char *functionMeterFilename;
1709 JSFunctionMeter functionMeter;
1710 char lastScriptFilename[1024];
1712 typedef js::HashMap<JSFunction *,
1713 int32,
1714 js::DefaultHasher<JSFunction *>,
1715 js::SystemAllocPolicy> FunctionCountMap;
1717 FunctionCountMap methodReadBarrierCountMap;
1718 FunctionCountMap unjoinedFunctionCountMap;
1719 #endif
1721 JSWrapObjectCallback wrapObjectCallback;
1723 JSC::ExecutableAllocator *regExpAllocator;
1725 JSRuntime();
1726 ~JSRuntime();
1728 bool init(uint32 maxbytes);
1730 void setGCTriggerFactor(uint32 factor);
1731 void setGCLastBytes(size_t lastBytes);
1734 * Call the system malloc while checking for GC memory pressure and
1735 * reporting OOM error when cx is not null.
1737 void* malloc(size_t bytes, JSContext *cx = NULL) {
1738 updateMallocCounter(bytes);
1739 void *p = ::js_malloc(bytes);
1740 return JS_LIKELY(!!p) ? p : onOutOfMemory(NULL, bytes, cx);
1744 * Call the system calloc while checking for GC memory pressure and
1745 * reporting OOM error when cx is not null.
1747 void* calloc(size_t bytes, JSContext *cx = NULL) {
1748 updateMallocCounter(bytes);
1749 void *p = ::js_calloc(bytes);
1750 return JS_LIKELY(!!p) ? p : onOutOfMemory(reinterpret_cast<void *>(1), bytes, cx);
1753 void* realloc(void* p, size_t bytes, JSContext *cx = NULL) {
1755 * For compatibility we do not account for realloc that increases
1756 * previously allocated memory.
1758 if (!p)
1759 updateMallocCounter(bytes);
1760 void *p2 = ::js_realloc(p, bytes);
1761 return JS_LIKELY(!!p2) ? p2 : onOutOfMemory(p, bytes, cx);
1764 void free(void* p) { ::js_free(p); }
1766 bool isGCMallocLimitReached() const { return gcMallocBytes <= 0; }
1768 void resetGCMallocBytes() { gcMallocBytes = ptrdiff_t(gcMaxMallocBytes); }
1770 void setGCMaxMallocBytes(size_t value) {
1772 * For compatibility treat any value that exceeds PTRDIFF_T_MAX to
1773 * mean that value.
1775 gcMaxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
1776 resetGCMallocBytes();
1780 * Call this after allocating memory held by GC things, to update memory
1781 * pressure counters or report the OOM error if necessary. If oomError and
1782 * cx is not null the function also reports OOM error.
1784 * The function must be called outside the GC lock and in case of OOM error
1785 * the caller must ensure that no deadlock possible during OOM reporting.
1787 void updateMallocCounter(size_t nbytes) {
1788 /* We tolerate any thread races when updating gcMallocBytes. */
1789 ptrdiff_t newCount = gcMallocBytes - ptrdiff_t(nbytes);
1790 gcMallocBytes = newCount;
1791 if (JS_UNLIKELY(newCount <= 0))
1792 onTooMuchMalloc();
1795 private:
1797 * The function must be called outside the GC lock.
1799 JS_FRIEND_API(void) onTooMuchMalloc();
1802 * This should be called after system malloc/realloc returns NULL to try
1803 * to recove some memory or to report an error. Failures in malloc and
1804 * calloc are signaled by p == null and p == reinterpret_cast<void *>(1).
1805 * Other values of p mean a realloc failure.
1807 * The function must be called outside the GC lock.
1809 JS_FRIEND_API(void *) onOutOfMemory(void *p, size_t nbytes, JSContext *cx);
1812 /* Common macros to access thread-local caches in JSThread or JSRuntime. */
1813 #define JS_GSN_CACHE(cx) (JS_THREAD_DATA(cx)->gsnCache)
1814 #define JS_PROPERTY_CACHE(cx) (JS_THREAD_DATA(cx)->propertyCache)
1815 #define JS_TRACE_MONITOR(cx) (JS_THREAD_DATA(cx)->traceMonitor)
1816 #define JS_METHODJIT_DATA(cx) (JS_THREAD_DATA(cx)->jmData)
1817 #define JS_SCRIPTS_TO_GC(cx) (JS_THREAD_DATA(cx)->scriptsToGC)
1819 #ifdef DEBUG
1820 # define EVAL_CACHE_METER(x) (JS_THREAD_DATA(cx)->evalCacheMeter.x++)
1821 #else
1822 # define EVAL_CACHE_METER(x) ((void) 0)
1823 #endif
1825 #ifdef DEBUG
1826 # define JS_RUNTIME_METER(rt, which) JS_ATOMIC_INCREMENT(&(rt)->which)
1827 # define JS_RUNTIME_UNMETER(rt, which) JS_ATOMIC_DECREMENT(&(rt)->which)
1828 #else
1829 # define JS_RUNTIME_METER(rt, which) /* nothing */
1830 # define JS_RUNTIME_UNMETER(rt, which) /* nothing */
1831 #endif
1833 #define JS_KEEP_ATOMS(rt) JS_ATOMIC_INCREMENT(&(rt)->gcKeepAtoms);
1834 #define JS_UNKEEP_ATOMS(rt) JS_ATOMIC_DECREMENT(&(rt)->gcKeepAtoms);
1836 #ifdef JS_ARGUMENT_FORMATTER_DEFINED
1838 * Linked list mapping format strings for JS_{Convert,Push}Arguments{,VA} to
1839 * formatter functions. Elements are sorted in non-increasing format string
1840 * length order.
1842 struct JSArgumentFormatMap {
1843 const char *format;
1844 size_t length;
1845 JSArgumentFormatter formatter;
1846 JSArgumentFormatMap *next;
1848 #endif
1851 * Key and entry types for the JSContext.resolvingTable hash table, typedef'd
1852 * here because all consumers need to see these declarations (and not just the
1853 * typedef names, as would be the case for an opaque pointer-to-typedef'd-type
1854 * declaration), along with cx->resolvingTable.
1856 typedef struct JSResolvingKey {
1857 JSObject *obj;
1858 jsid id;
1859 } JSResolvingKey;
1861 typedef struct JSResolvingEntry {
1862 JSDHashEntryHdr hdr;
1863 JSResolvingKey key;
1864 uint32 flags;
1865 } JSResolvingEntry;
1867 #define JSRESFLAG_LOOKUP 0x1 /* resolving id from lookup */
1868 #define JSRESFLAG_WATCH 0x2 /* resolving id from watch */
1869 #define JSRESOLVE_INFER 0xffff /* infer bits from current bytecode */
1871 extern const JSDebugHooks js_NullDebugHooks; /* defined in jsdbgapi.cpp */
1873 namespace js {
1875 class AutoGCRooter;
1877 class RegExpStatics
1879 js::Vector<int, 20> matchPairs;
1880 JSContext *cx;
1881 JSString *input;
1882 uintN flags;
1884 bool createDependent(size_t start, size_t end, Value *out) const;
1886 size_t pairCount() const {
1887 JS_ASSERT(matchPairs.length() % 2 == 0);
1888 return matchPairs.length() / 2;
1891 * Check whether the index at |checkValidIndex| is valid (>= 0).
1892 * If so, construct a string for it and place it in |*out|.
1893 * If not, place undefined in |*out|.
1895 bool makeMatch(size_t checkValidIndex, size_t pairNum, Value *out) const;
1896 static const uintN allFlags = JSREG_FOLD | JSREG_GLOB | JSREG_STICKY | JSREG_MULTILINE;
1897 friend class RegExp;
1899 public:
1900 explicit RegExpStatics(JSContext *cx) : matchPairs(cx), cx(cx) { clear(); }
1901 void clone(const RegExpStatics &other);
1903 /* Mutators. */
1905 void setMultiline(bool enabled) {
1906 if (enabled)
1907 flags = flags | JSREG_MULTILINE;
1908 else
1909 flags = flags & ~JSREG_MULTILINE;
1912 void clear() {
1913 input = 0;
1914 flags = 0;
1915 matchPairs.clear();
1918 void checkInvariants() {
1919 if (pairCount() > 0) {
1920 JS_ASSERT(input);
1921 JS_ASSERT(get(0, 0) <= get(0, 1));
1922 JS_ASSERT(get(0, 1) <= int(input->length()));
1926 void reset(JSString *newInput, bool newMultiline) {
1927 clear();
1928 input = newInput;
1929 setMultiline(newMultiline);
1930 checkInvariants();
1933 void setInput(JSString *newInput) {
1934 input = newInput;
1937 /* Accessors. */
1939 JSString *getInput() const { return input; }
1940 uintN getFlags() const { return flags; }
1941 bool multiline() const { return flags & JSREG_MULTILINE; }
1942 bool matched() const { JS_ASSERT(pairCount() > 0); return get(0, 1) - get(0, 0) > 0; }
1943 size_t getParenCount() const { JS_ASSERT(pairCount() > 0); return pairCount() - 1; }
1945 void mark(JSTracer *trc) const {
1946 if (input)
1947 JS_CALL_STRING_TRACER(trc, input, "res->input");
1950 size_t getParenLength(size_t parenNum) const {
1951 if (pairCount() <= parenNum + 1)
1952 return 0;
1953 return get(parenNum + 1, 1) - get(parenNum + 1, 0);
1956 int get(size_t pairNum, bool which) const {
1957 JS_ASSERT(pairNum < pairCount());
1958 return matchPairs[2 * pairNum + which];
1961 /* Value creators. */
1963 bool createInput(Value *out) const;
1964 bool createLastMatch(Value *out) const { return makeMatch(0, 0, out); }
1965 bool createLastParen(Value *out) const;
1966 bool createLeftContext(Value *out) const;
1967 bool createRightContext(Value *out) const;
1969 bool createParen(size_t parenNum, Value *out) const {
1970 return makeMatch((parenNum + 1) * 2, parenNum + 1, out);
1973 /* Substring creators. */
1975 void getParen(size_t num, JSSubString *out) const;
1976 void getLastMatch(JSSubString *out) const;
1977 void getLastParen(JSSubString *out) const;
1978 void getLeftContext(JSSubString *out) const;
1979 void getRightContext(JSSubString *out) const;
1982 } /* namespace js */
1984 struct JSContext
1986 explicit JSContext(JSRuntime *rt);
1988 /* JSRuntime contextList linkage. */
1989 JSCList link;
1991 /* Runtime version control identifier. */
1992 uint16 version;
1994 /* Per-context options. */
1995 uint32 options; /* see jsapi.h for JSOPTION_* */
1997 /* Locale specific callbacks for string conversion. */
1998 JSLocaleCallbacks *localeCallbacks;
2001 * cx->resolvingTable is non-null and non-empty if we are initializing
2002 * standard classes lazily, or if we are otherwise recursing indirectly
2003 * from js_LookupProperty through a Class.resolve hook. It is used to
2004 * limit runaway recursion (see jsapi.c and jsobj.c).
2006 JSDHashTable *resolvingTable;
2009 * True if generating an error, to prevent runaway recursion.
2010 * NB: generatingError packs with throwing below.
2012 JSPackedBool generatingError;
2014 /* Exception state -- the exception member is a GC root by definition. */
2015 JSBool throwing; /* is there a pending exception? */
2016 js::Value exception; /* most-recently-thrown exception */
2018 /* Limit pointer for checking native stack consumption during recursion. */
2019 jsuword stackLimit;
2021 /* Quota on the size of arenas used to compile and execute scripts. */
2022 size_t scriptStackQuota;
2024 /* Data shared by threads in an address space. */
2025 JSRuntime *const runtime;
2027 /* GC heap compartment. */
2028 JSCompartment *compartment;
2030 /* Currently executing frame and regs, set by stack operations. */
2031 JS_REQUIRES_STACK
2032 JSFrameRegs *regs;
2034 /* Current frame accessors. */
2036 JSStackFrame* fp() {
2037 JS_ASSERT(regs && regs->fp);
2038 return regs->fp;
2041 JSStackFrame* maybefp() {
2042 JS_ASSERT_IF(regs, regs->fp);
2043 return regs ? regs->fp : NULL;
2046 bool hasfp() {
2047 JS_ASSERT_IF(regs, regs->fp);
2048 return !!regs;
2051 public:
2052 friend class js::StackSpace;
2053 friend bool js::Interpret(JSContext *, JSStackFrame *, uintN, uintN);
2055 /* 'regs' must only be changed by calling this function. */
2056 void setCurrentRegs(JSFrameRegs *regs) {
2057 this->regs = regs;
2060 /* Temporary arena pool used while compiling and decompiling. */
2061 JSArenaPool tempPool;
2063 /* Temporary arena pool used while evaluate regular expressions. */
2064 JSArenaPool regExpPool;
2066 /* Top-level object and pointer to top stack frame's scope chain. */
2067 JSObject *globalObject;
2069 /* Regular expression class statics. */
2070 js::RegExpStatics regExpStatics;
2072 /* State for object and array toSource conversion. */
2073 JSSharpObjectMap sharpObjectMap;
2074 js::HashSet<JSObject *> busyArrays;
2076 /* Argument formatter support for JS_{Convert,Push}Arguments{,VA}. */
2077 JSArgumentFormatMap *argumentFormatMap;
2079 /* Last message string and trace file for debugging. */
2080 char *lastMessage;
2081 #ifdef DEBUG
2082 void *tracefp;
2083 jsbytecode *tracePrevPc;
2084 #endif
2086 /* Per-context optional error reporter. */
2087 JSErrorReporter errorReporter;
2089 /* Branch callback. */
2090 JSOperationCallback operationCallback;
2092 /* Interpreter activation count. */
2093 uintN interpLevel;
2095 /* Client opaque pointers. */
2096 void *data;
2097 void *data2;
2099 private:
2100 /* Linked list of segments. See StackSegment. */
2101 js::StackSegment *currentSegment;
2103 public:
2104 void assertSegmentsInSync() const {
2105 #ifdef DEBUG
2106 if (regs) {
2107 JS_ASSERT(currentSegment->isActive());
2108 if (js::StackSegment *prev = currentSegment->getPreviousInContext())
2109 JS_ASSERT(!prev->isActive());
2110 } else {
2111 JS_ASSERT_IF(currentSegment, !currentSegment->isActive());
2113 #endif
2116 /* Return whether this context has an active segment. */
2117 bool hasActiveSegment() const {
2118 assertSegmentsInSync();
2119 return !!regs;
2122 /* Assuming there is an active segment, return it. */
2123 js::StackSegment *activeSegment() const {
2124 JS_ASSERT(hasActiveSegment());
2125 return currentSegment;
2128 /* Return the current segment, which may or may not be active. */
2129 js::StackSegment *getCurrentSegment() const {
2130 assertSegmentsInSync();
2131 return currentSegment;
2134 /* Add the given segment to the list as the new active segment. */
2135 void pushSegmentAndFrame(js::StackSegment *newseg, JSFrameRegs &regs);
2137 /* Remove the active segment and make the next segment active. */
2138 void popSegmentAndFrame();
2140 /* Mark the top segment as suspended, without pushing a new one. */
2141 void saveActiveSegment();
2143 /* Undoes calls to suspendActiveSegment. */
2144 void restoreSegment();
2147 * Perform a linear search of all frames in all segments in the given context
2148 * for the given frame, returning the segment, if found, and null otherwise.
2150 js::StackSegment *containingSegment(const JSStackFrame *target);
2153 * Search the call stack for the nearest frame with static level targetLevel.
2155 JSStackFrame *findFrameAtLevel(uintN targetLevel) {
2156 JSStackFrame *fp = this->regs->fp;
2157 while (true) {
2158 JS_ASSERT(fp && fp->hasScript());
2159 if (fp->getScript()->staticLevel == targetLevel)
2160 break;
2161 fp = fp->down;
2163 return fp;
2166 #ifdef JS_THREADSAFE
2167 JSThread *thread;
2168 unsigned outstandingRequests;/* number of JS_BeginRequest calls
2169 without the corresponding
2170 JS_EndRequest. */
2171 JSCList threadLinks; /* JSThread contextList linkage */
2173 #define CX_FROM_THREAD_LINKS(tl) \
2174 ((JSContext *)((char *)(tl) - offsetof(JSContext, threadLinks)))
2175 #endif
2177 /* Stack of thread-stack-allocated GC roots. */
2178 js::AutoGCRooter *autoGCRooters;
2180 /* Debug hooks associated with the current context. */
2181 const JSDebugHooks *debugHooks;
2183 /* Security callbacks that override any defined on the runtime. */
2184 JSSecurityCallbacks *securityCallbacks;
2186 /* Stored here to avoid passing it around as a parameter. */
2187 uintN resolveFlags;
2189 /* Random number generator state, used by jsmath.cpp. */
2190 int64 rngSeed;
2192 /* Location to stash the iteration value between JSOP_MOREITER and JSOP_FOR*. */
2193 js::Value iterValue;
2195 #ifdef JS_TRACER
2197 * State for the current tree execution. bailExit is valid if the tree has
2198 * called back into native code via a _FAIL builtin and has not yet bailed,
2199 * else garbage (NULL in debug builds).
2201 js::TracerState *tracerState;
2202 js::VMSideExit *bailExit;
2205 * True if traces may be executed. Invariant: The value of jitEnabled is
2206 * always equal to the expression in updateJITEnabled below.
2208 * This flag and the fields accessed by updateJITEnabled are written only
2209 * in runtime->gcLock, to avoid race conditions that would leave the wrong
2210 * value in jitEnabled. (But the interpreter reads this without
2211 * locking. That can race against another thread setting debug hooks, but
2212 * we always read cx->debugHooks without locking anyway.)
2214 bool jitEnabled;
2215 #endif
2217 /* Caller must be holding runtime->gcLock. */
2218 void updateJITEnabled() {
2219 #ifdef JS_TRACER
2220 jitEnabled = ((options & JSOPTION_JIT) &&
2221 (debugHooks == &js_NullDebugHooks ||
2222 (debugHooks == &runtime->globalDebugHooks &&
2223 !runtime->debuggerInhibitsJIT())));
2224 #endif
2227 #ifdef MOZ_TRACE_JSCALLS
2228 /* Function entry/exit debugging callback. */
2229 JSFunctionCallback functionCallback;
2231 void doFunctionCallback(const JSFunction *fun,
2232 const JSScript *scr,
2233 JSBool entering) const
2235 if (functionCallback)
2236 functionCallback(fun, scr, this, entering);
2238 #endif
2240 DSTOffsetCache dstOffsetCache;
2242 /* List of currently active non-escaping enumerators (for-in). */
2243 JSObject *enumerators;
2245 private:
2247 * To go from a live generator frame (on the stack) to its generator object
2248 * (see comment js_FloatingFrameIfGenerator), we maintain a stack of active
2249 * generators, pushing and popping when entering and leaving generator
2250 * frames, respectively.
2252 js::Vector<JSGenerator *, 2, js::SystemAllocPolicy> genStack;
2254 public:
2255 /* Return the generator object for the given generator frame. */
2256 JSGenerator *generatorFor(JSStackFrame *fp) const;
2258 /* Early OOM-check. */
2259 inline bool ensureGeneratorStackSpace();
2261 bool enterGenerator(JSGenerator *gen) {
2262 return genStack.append(gen);
2265 void leaveGenerator(JSGenerator *gen) {
2266 JS_ASSERT(genStack.back() == gen);
2267 genStack.popBack();
2270 #ifdef JS_THREADSAFE
2272 * When non-null JSContext::free delegates the job to the background
2273 * thread.
2275 js::GCHelperThread *gcBackgroundFree;
2276 #endif
2278 inline void* malloc(size_t bytes) {
2279 return runtime->malloc(bytes, this);
2282 inline void* mallocNoReport(size_t bytes) {
2283 JS_ASSERT(bytes != 0);
2284 return runtime->malloc(bytes, NULL);
2287 inline void* calloc(size_t bytes) {
2288 JS_ASSERT(bytes != 0);
2289 return runtime->calloc(bytes, this);
2292 inline void* realloc(void* p, size_t bytes) {
2293 return runtime->realloc(p, bytes, this);
2296 inline void free(void* p) {
2297 #ifdef JS_THREADSAFE
2298 if (gcBackgroundFree) {
2299 gcBackgroundFree->freeLater(p);
2300 return;
2302 #endif
2303 runtime->free(p);
2307 * In the common case that we'd like to allocate the memory for an object
2308 * with cx->malloc/free, we cannot use overloaded C++ operators (no
2309 * placement delete). Factor the common workaround into one place.
2311 #define CREATE_BODY(parms) \
2312 void *memory = this->malloc(sizeof(T)); \
2313 if (!memory) \
2314 return NULL; \
2315 return new(memory) T parms;
2317 template <class T>
2318 JS_ALWAYS_INLINE T *create() {
2319 CREATE_BODY(())
2322 template <class T, class P1>
2323 JS_ALWAYS_INLINE T *create(const P1 &p1) {
2324 CREATE_BODY((p1))
2327 template <class T, class P1, class P2>
2328 JS_ALWAYS_INLINE T *create(const P1 &p1, const P2 &p2) {
2329 CREATE_BODY((p1, p2))
2332 template <class T, class P1, class P2, class P3>
2333 JS_ALWAYS_INLINE T *create(const P1 &p1, const P2 &p2, const P3 &p3) {
2334 CREATE_BODY((p1, p2, p3))
2336 #undef CREATE_BODY
2338 template <class T>
2339 JS_ALWAYS_INLINE void destroy(T *p) {
2340 p->~T();
2341 this->free(p);
2344 void purge();
2346 js::StackSpace &stack() const {
2347 return JS_THREAD_DATA(this)->stackSpace;
2350 #ifdef DEBUG
2351 void assertValidStackDepth(uintN depth) {
2352 JS_ASSERT(0 <= regs->sp - regs->fp->base());
2353 JS_ASSERT(depth <= uintptr_t(regs->sp - regs->fp->base()));
2355 #else
2356 void assertValidStackDepth(uintN /*depth*/) {}
2357 #endif
2359 private:
2362 * The allocation code calls the function to indicate either OOM failure
2363 * when p is null or that a memory pressure counter has reached some
2364 * threshold when p is not null. The function takes the pointer and not
2365 * a boolean flag to minimize the amount of code in its inlined callers.
2367 JS_FRIEND_API(void) checkMallocGCPressure(void *p);
2370 static inline void
2371 js_TraceRegExpStatics(JSTracer *trc, JSContext *acx)
2373 acx->regExpStatics.mark(trc);
2376 JS_ALWAYS_INLINE JSObject *
2377 JSStackFrame::varobj(js::StackSegment *seg) const
2379 JS_ASSERT(seg->contains(this));
2380 return hasFunction() ? maybeCallObj() : seg->getInitialVarObj();
2383 JS_ALWAYS_INLINE JSObject *
2384 JSStackFrame::varobj(JSContext *cx) const
2386 JS_ASSERT(cx->activeSegment()->contains(this));
2387 return hasFunction() ? maybeCallObj() : cx->activeSegment()->getInitialVarObj();
2390 JS_ALWAYS_INLINE jsbytecode *
2391 JSStackFrame::pc(JSContext *cx) const
2393 JS_ASSERT(cx->regs && cx->containingSegment(this) != NULL);
2394 return (cx->regs->fp == this) ? cx->regs->pc : savedPC;
2397 #ifdef JS_THREADSAFE
2398 # define JS_THREAD_ID(cx) ((cx)->thread ? (cx)->thread->id : 0)
2399 #endif
2401 #if defined JS_THREADSAFE && defined DEBUG
2403 namespace js {
2405 class AutoCheckRequestDepth {
2406 JSContext *cx;
2407 public:
2408 AutoCheckRequestDepth(JSContext *cx) : cx(cx) { cx->thread->checkRequestDepth++; }
2410 ~AutoCheckRequestDepth() {
2411 JS_ASSERT(cx->thread->checkRequestDepth != 0);
2412 cx->thread->checkRequestDepth--;
2418 # define CHECK_REQUEST(cx) \
2419 JS_ASSERT((cx)->thread); \
2420 JS_ASSERT((cx)->thread->requestDepth || (cx)->thread == (cx)->runtime->gcThread); \
2421 AutoCheckRequestDepth _autoCheckRequestDepth(cx);
2423 #else
2424 # define CHECK_REQUEST(cx) ((void) 0)
2425 # define CHECK_REQUEST_THREAD(cx) ((void) 0)
2426 #endif
2428 static inline uintN
2429 FramePCOffset(JSContext *cx, JSStackFrame* fp)
2431 jsbytecode *pc = fp->hasIMacroPC() ? fp->getIMacroPC() : fp->pc(cx);
2432 return uintN(pc - fp->getScript()->code);
2435 static inline JSAtom **
2436 FrameAtomBase(JSContext *cx, JSStackFrame *fp)
2438 return fp->hasIMacroPC()
2439 ? COMMON_ATOMS_START(&cx->runtime->atomState)
2440 : fp->getScript()->atomMap.vector;
2443 namespace js {
2445 class AutoGCRooter {
2446 public:
2447 AutoGCRooter(JSContext *cx, ptrdiff_t tag)
2448 : down(cx->autoGCRooters), tag(tag), context(cx)
2450 JS_ASSERT(this != cx->autoGCRooters);
2451 CHECK_REQUEST(cx);
2452 cx->autoGCRooters = this;
2455 ~AutoGCRooter() {
2456 JS_ASSERT(this == context->autoGCRooters);
2457 CHECK_REQUEST(context);
2458 context->autoGCRooters = down;
2461 /* Implemented in jsgc.cpp. */
2462 inline void trace(JSTracer *trc);
2464 #ifdef __GNUC__
2465 # pragma GCC visibility push(default)
2466 #endif
2467 friend void MarkContext(JSTracer *trc, JSContext *acx);
2468 friend void MarkRuntime(JSTracer *trc);
2469 #ifdef __GNUC__
2470 # pragma GCC visibility pop
2471 #endif
2473 protected:
2474 AutoGCRooter * const down;
2477 * Discriminates actual subclass of this being used. If non-negative, the
2478 * subclass roots an array of values of the length stored in this field.
2479 * If negative, meaning is indicated by the corresponding value in the enum
2480 * below. Any other negative value indicates some deeper problem such as
2481 * memory corruption.
2483 ptrdiff_t tag;
2485 JSContext * const context;
2487 enum {
2488 JSVAL = -1, /* js::AutoValueRooter */
2489 SHAPE = -2, /* js::AutoShapeRooter */
2490 PARSER = -3, /* js::Parser */
2491 SCRIPT = -4, /* js::AutoScriptRooter */
2492 ENUMERATOR = -5, /* js::AutoEnumStateRooter */
2493 IDARRAY = -6, /* js::AutoIdArray */
2494 DESCRIPTORS = -7, /* js::AutoPropDescArrayRooter */
2495 NAMESPACES = -8, /* js::AutoNamespaceArray */
2496 XML = -9, /* js::AutoXMLRooter */
2497 OBJECT = -10, /* js::AutoObjectRooter */
2498 ID = -11, /* js::AutoIdRooter */
2499 VALVECTOR = -12, /* js::AutoValueVector */
2500 DESCRIPTOR = -13, /* js::AutoPropertyDescriptorRooter */
2501 STRING = -14, /* js::AutoStringRooter */
2502 IDVECTOR = -15 /* js::AutoIdVector */
2505 private:
2506 /* No copy or assignment semantics. */
2507 AutoGCRooter(AutoGCRooter &ida);
2508 void operator=(AutoGCRooter &ida);
2511 /* FIXME(bug 332648): Move this into a public header. */
2512 class AutoValueRooter : private AutoGCRooter
2514 public:
2515 explicit AutoValueRooter(JSContext *cx
2516 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2517 : AutoGCRooter(cx, JSVAL), val(js::NullValue())
2519 JS_GUARD_OBJECT_NOTIFIER_INIT;
2522 AutoValueRooter(JSContext *cx, const Value &v
2523 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2524 : AutoGCRooter(cx, JSVAL), val(v)
2526 JS_GUARD_OBJECT_NOTIFIER_INIT;
2529 AutoValueRooter(JSContext *cx, jsval v
2530 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2531 : AutoGCRooter(cx, JSVAL), val(js::Valueify(v))
2533 JS_GUARD_OBJECT_NOTIFIER_INIT;
2537 * If you are looking for Object* overloads, use AutoObjectRooter instead;
2538 * rooting Object*s as a js::Value requires discerning whether or not it is
2539 * a function object. Also, AutoObjectRooter is smaller.
2542 void set(Value v) {
2543 JS_ASSERT(tag == JSVAL);
2544 val = v;
2547 void set(jsval v) {
2548 JS_ASSERT(tag == JSVAL);
2549 val = js::Valueify(v);
2552 const Value &value() const {
2553 JS_ASSERT(tag == JSVAL);
2554 return val;
2557 Value *addr() {
2558 JS_ASSERT(tag == JSVAL);
2559 return &val;
2562 const jsval &jsval_value() const {
2563 JS_ASSERT(tag == JSVAL);
2564 return Jsvalify(val);
2567 jsval *jsval_addr() {
2568 JS_ASSERT(tag == JSVAL);
2569 return Jsvalify(&val);
2572 friend void AutoGCRooter::trace(JSTracer *trc);
2573 friend void MarkRuntime(JSTracer *trc);
2575 private:
2576 Value val;
2577 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2580 class AutoObjectRooter : private AutoGCRooter {
2581 public:
2582 AutoObjectRooter(JSContext *cx, JSObject *obj = NULL
2583 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2584 : AutoGCRooter(cx, OBJECT), obj(obj)
2586 JS_GUARD_OBJECT_NOTIFIER_INIT;
2589 void setObject(JSObject *obj) {
2590 this->obj = obj;
2593 JSObject * object() const {
2594 return obj;
2597 JSObject ** addr() {
2598 return &obj;
2601 friend void AutoGCRooter::trace(JSTracer *trc);
2602 friend void MarkRuntime(JSTracer *trc);
2604 private:
2605 JSObject *obj;
2606 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2609 class AutoStringRooter : private AutoGCRooter {
2610 public:
2611 AutoStringRooter(JSContext *cx, JSString *str = NULL
2612 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2613 : AutoGCRooter(cx, STRING), str(str)
2615 JS_GUARD_OBJECT_NOTIFIER_INIT;
2618 void setString(JSString *str) {
2619 this->str = str;
2622 JSString * string() const {
2623 return str;
2626 JSString ** addr() {
2627 return &str;
2630 friend void AutoGCRooter::trace(JSTracer *trc);
2632 private:
2633 JSString *str;
2634 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2637 class AutoArrayRooter : private AutoGCRooter {
2638 public:
2639 AutoArrayRooter(JSContext *cx, size_t len, Value *vec
2640 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2641 : AutoGCRooter(cx, len), array(vec)
2643 JS_GUARD_OBJECT_NOTIFIER_INIT;
2644 JS_ASSERT(tag >= 0);
2647 AutoArrayRooter(JSContext *cx, size_t len, jsval *vec
2648 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2649 : AutoGCRooter(cx, len), array(Valueify(vec))
2651 JS_GUARD_OBJECT_NOTIFIER_INIT;
2652 JS_ASSERT(tag >= 0);
2655 void changeLength(size_t newLength) {
2656 tag = ptrdiff_t(newLength);
2657 JS_ASSERT(tag >= 0);
2660 void changeArray(Value *newArray, size_t newLength) {
2661 changeLength(newLength);
2662 array = newArray;
2665 Value *array;
2667 friend void AutoGCRooter::trace(JSTracer *trc);
2669 private:
2670 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2673 class AutoShapeRooter : private AutoGCRooter {
2674 public:
2675 AutoShapeRooter(JSContext *cx, const js::Shape *shape
2676 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2677 : AutoGCRooter(cx, SHAPE), shape(shape)
2679 JS_GUARD_OBJECT_NOTIFIER_INIT;
2682 friend void AutoGCRooter::trace(JSTracer *trc);
2683 friend void MarkRuntime(JSTracer *trc);
2685 private:
2686 const js::Shape * const shape;
2687 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2690 class AutoScriptRooter : private AutoGCRooter {
2691 public:
2692 AutoScriptRooter(JSContext *cx, JSScript *script
2693 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2694 : AutoGCRooter(cx, SCRIPT), script(script)
2696 JS_GUARD_OBJECT_NOTIFIER_INIT;
2699 void setScript(JSScript *script) {
2700 this->script = script;
2703 friend void AutoGCRooter::trace(JSTracer *trc);
2705 private:
2706 JSScript *script;
2707 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2710 class AutoIdRooter : private AutoGCRooter
2712 public:
2713 explicit AutoIdRooter(JSContext *cx, jsid id = INT_TO_JSID(0)
2714 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2715 : AutoGCRooter(cx, ID), id_(id)
2717 JS_GUARD_OBJECT_NOTIFIER_INIT;
2720 jsid id() {
2721 return id_;
2724 jsid * addr() {
2725 return &id_;
2728 friend void AutoGCRooter::trace(JSTracer *trc);
2729 friend void MarkRuntime(JSTracer *trc);
2731 private:
2732 jsid id_;
2733 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2736 class AutoIdArray : private AutoGCRooter {
2737 public:
2738 AutoIdArray(JSContext *cx, JSIdArray *ida JS_GUARD_OBJECT_NOTIFIER_PARAM)
2739 : AutoGCRooter(cx, IDARRAY), idArray(ida)
2741 JS_GUARD_OBJECT_NOTIFIER_INIT;
2743 ~AutoIdArray() {
2744 if (idArray)
2745 JS_DestroyIdArray(context, idArray);
2747 bool operator!() {
2748 return idArray == NULL;
2750 jsid operator[](size_t i) const {
2751 JS_ASSERT(idArray);
2752 JS_ASSERT(i < size_t(idArray->length));
2753 return idArray->vector[i];
2755 size_t length() const {
2756 return idArray->length;
2759 friend void AutoGCRooter::trace(JSTracer *trc);
2761 JSIdArray *steal() {
2762 JSIdArray *copy = idArray;
2763 idArray = NULL;
2764 return copy;
2767 protected:
2768 inline void trace(JSTracer *trc);
2770 private:
2771 JSIdArray * idArray;
2772 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2774 /* No copy or assignment semantics. */
2775 AutoIdArray(AutoIdArray &ida);
2776 void operator=(AutoIdArray &ida);
2779 /* The auto-root for enumeration object and its state. */
2780 class AutoEnumStateRooter : private AutoGCRooter
2782 public:
2783 AutoEnumStateRooter(JSContext *cx, JSObject *obj
2784 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2785 : AutoGCRooter(cx, ENUMERATOR), obj(obj), stateValue()
2787 JS_GUARD_OBJECT_NOTIFIER_INIT;
2788 JS_ASSERT(obj);
2791 ~AutoEnumStateRooter() {
2792 if (!stateValue.isNull()) {
2793 #ifdef DEBUG
2794 JSBool ok =
2795 #endif
2796 obj->enumerate(context, JSENUMERATE_DESTROY, &stateValue, 0);
2797 JS_ASSERT(ok);
2801 friend void AutoGCRooter::trace(JSTracer *trc);
2803 const Value &state() const { return stateValue; }
2804 Value *addr() { return &stateValue; }
2806 protected:
2807 void trace(JSTracer *trc) {
2808 JS_CALL_OBJECT_TRACER(trc, obj, "js::AutoEnumStateRooter.obj");
2811 JSObject * const obj;
2813 private:
2814 Value stateValue;
2815 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2818 #ifdef JS_HAS_XML_SUPPORT
2819 class AutoXMLRooter : private AutoGCRooter {
2820 public:
2821 AutoXMLRooter(JSContext *cx, JSXML *xml)
2822 : AutoGCRooter(cx, XML), xml(xml)
2824 JS_ASSERT(xml);
2827 friend void AutoGCRooter::trace(JSTracer *trc);
2828 friend void MarkRuntime(JSTracer *trc);
2830 private:
2831 JSXML * const xml;
2833 #endif /* JS_HAS_XML_SUPPORT */
2835 class AutoLockGC {
2836 private:
2837 JSRuntime *rt;
2838 public:
2839 explicit AutoLockGC(JSRuntime *rt) : rt(rt) { JS_LOCK_GC(rt); }
2840 ~AutoLockGC() { JS_UNLOCK_GC(rt); }
2843 class AutoUnlockGC {
2844 private:
2845 JSRuntime *rt;
2846 public:
2847 explicit AutoUnlockGC(JSRuntime *rt) : rt(rt) { JS_UNLOCK_GC(rt); }
2848 ~AutoUnlockGC() { JS_LOCK_GC(rt); }
2851 class AutoKeepAtoms {
2852 JSRuntime *rt;
2853 public:
2854 explicit AutoKeepAtoms(JSRuntime *rt) : rt(rt) { JS_KEEP_ATOMS(rt); }
2855 ~AutoKeepAtoms() { JS_UNKEEP_ATOMS(rt); }
2858 class AutoArenaAllocator {
2859 JSArenaPool *pool;
2860 void *mark;
2861 public:
2862 explicit AutoArenaAllocator(JSArenaPool *pool) : pool(pool) { mark = JS_ARENA_MARK(pool); }
2863 ~AutoArenaAllocator() { JS_ARENA_RELEASE(pool, mark); }
2865 template <typename T>
2866 T *alloc(size_t elems) {
2867 void *ptr;
2868 JS_ARENA_ALLOCATE(ptr, pool, elems * sizeof(T));
2869 return static_cast<T *>(ptr);
2873 class AutoReleasePtr {
2874 JSContext *cx;
2875 void *ptr;
2876 AutoReleasePtr operator=(const AutoReleasePtr &other);
2877 public:
2878 explicit AutoReleasePtr(JSContext *cx, void *ptr) : cx(cx), ptr(ptr) {}
2879 ~AutoReleasePtr() { cx->free(ptr); }
2882 class AutoLocalNameArray {
2883 public:
2884 explicit AutoLocalNameArray(JSContext *cx, JSFunction *fun
2885 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2886 : context(cx),
2887 mark(JS_ARENA_MARK(&cx->tempPool)),
2888 names(fun->getLocalNameArray(cx, &cx->tempPool)),
2889 count(fun->countLocalNames())
2891 JS_GUARD_OBJECT_NOTIFIER_INIT;
2894 ~AutoLocalNameArray() {
2895 JS_ARENA_RELEASE(&context->tempPool, mark);
2898 operator bool() const { return !!names; }
2900 uint32 length() const { return count; }
2902 const jsuword &operator [](unsigned i) const { return names[i]; }
2904 private:
2905 JSContext *context;
2906 void *mark;
2907 jsuword *names;
2908 uint32 count;
2910 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2913 } /* namespace js */
2915 class JSAutoResolveFlags
2917 public:
2918 JSAutoResolveFlags(JSContext *cx, uintN flags
2919 JS_GUARD_OBJECT_NOTIFIER_PARAM)
2920 : mContext(cx), mSaved(cx->resolveFlags)
2922 JS_GUARD_OBJECT_NOTIFIER_INIT;
2923 cx->resolveFlags = flags;
2926 ~JSAutoResolveFlags() { mContext->resolveFlags = mSaved; }
2928 private:
2929 JSContext *mContext;
2930 uintN mSaved;
2931 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2935 * Slightly more readable macros for testing per-context option settings (also
2936 * to hide bitset implementation detail).
2938 * JSOPTION_XML must be handled specially in order to propagate from compile-
2939 * to run-time (from cx->options to script->version/cx->version). To do that,
2940 * we copy JSOPTION_XML from cx->options into cx->version as JSVERSION_HAS_XML
2941 * whenever options are set, and preserve this XML flag across version number
2942 * changes done via the JS_SetVersion API.
2944 * But when executing a script or scripted function, the interpreter changes
2945 * cx->version, including the XML flag, to script->version. Thus JSOPTION_XML
2946 * is a compile-time option that causes a run-time version change during each
2947 * activation of the compiled script. That version change has the effect of
2948 * changing JS_HAS_XML_OPTION, so that any compiling done via eval enables XML
2949 * support. If an XML-enabled script or function calls a non-XML function,
2950 * the flag bit will be cleared during the callee's activation.
2952 * Note that JS_SetVersion API calls never pass JSVERSION_HAS_XML or'd into
2953 * that API's version parameter.
2955 * Note also that script->version must contain this XML option flag in order
2956 * for XDR'ed scripts to serialize and deserialize with that option preserved
2957 * for detection at run-time. We can't copy other compile-time options into
2958 * script->version because that would break backward compatibility (certain
2959 * other options, e.g. JSOPTION_VAROBJFIX, are analogous to JSOPTION_XML).
2961 #define JS_HAS_OPTION(cx,option) (((cx)->options & (option)) != 0)
2962 #define JS_HAS_STRICT_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_STRICT)
2963 #define JS_HAS_WERROR_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_WERROR)
2964 #define JS_HAS_COMPILE_N_GO_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_COMPILE_N_GO)
2965 #define JS_HAS_ATLINE_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_ATLINE)
2967 #define JSVERSION_MASK 0x0FFF /* see JSVersion in jspubtd.h */
2968 #define JSVERSION_HAS_XML 0x1000 /* flag induced by XML option */
2969 #define JSVERSION_ANONFUNFIX 0x2000 /* see jsapi.h, the comments
2970 for JSOPTION_ANONFUNFIX */
2972 #define JSVERSION_NUMBER(cx) ((JSVersion)((cx)->version & \
2973 JSVERSION_MASK))
2974 #define JS_HAS_XML_OPTION(cx) ((cx)->version & JSVERSION_HAS_XML || \
2975 JSVERSION_NUMBER(cx) >= JSVERSION_1_6)
2977 extern JSThreadData *
2978 js_CurrentThreadData(JSRuntime *rt);
2980 extern JSBool
2981 js_InitThreads(JSRuntime *rt);
2983 extern void
2984 js_FinishThreads(JSRuntime *rt);
2986 extern void
2987 js_PurgeThreads(JSContext *cx);
2989 namespace js {
2991 #ifdef JS_THREADSAFE
2993 /* Iterator over JSThreadData from all JSThread instances. */
2994 class ThreadDataIter : public JSThread::Map::Range
2996 public:
2997 ThreadDataIter(JSRuntime *rt) : JSThread::Map::Range(rt->threads.all()) {}
2999 JSThreadData *threadData() const {
3000 return &front().value->data;
3004 #else /* !JS_THREADSAFE */
3006 class ThreadDataIter
3008 JSRuntime *runtime;
3009 bool done;
3010 public:
3011 ThreadDataIter(JSRuntime *rt) : runtime(rt), done(false) {}
3013 bool empty() const {
3014 return done;
3017 void popFront() {
3018 JS_ASSERT(!done);
3019 done = true;
3022 JSThreadData *threadData() const {
3023 JS_ASSERT(!done);
3024 return &runtime->threadData;
3028 #endif /* !JS_THREADSAFE */
3030 } /* namespace js */
3033 * Ensures the JSOPTION_XML and JSOPTION_ANONFUNFIX bits of cx->options are
3034 * reflected in cx->version, since each bit must travel with a script that has
3035 * it set.
3037 extern void
3038 js_SyncOptionsToVersion(JSContext *cx);
3041 * Common subroutine of JS_SetVersion and js_SetVersion, to update per-context
3042 * data that depends on version.
3044 extern void
3045 js_OnVersionChange(JSContext *cx);
3048 * Unlike the JS_SetVersion API, this function stores JSVERSION_HAS_XML and
3049 * any future non-version-number flags induced by compiler options.
3051 extern void
3052 js_SetVersion(JSContext *cx, JSVersion version);
3055 * Create and destroy functions for JSContext, which is manually allocated
3056 * and exclusively owned.
3058 extern JSContext *
3059 js_NewContext(JSRuntime *rt, size_t stackChunkSize);
3061 extern void
3062 js_DestroyContext(JSContext *cx, JSDestroyContextMode mode);
3065 * Return true if cx points to a context in rt->contextList, else return false.
3066 * NB: the caller (see jslock.c:ClaimTitle) must hold rt->gcLock.
3068 extern JSBool
3069 js_ValidContextPointer(JSRuntime *rt, JSContext *cx);
3071 static JS_INLINE JSContext *
3072 js_ContextFromLinkField(JSCList *link)
3074 JS_ASSERT(link);
3075 return (JSContext *) ((uint8 *) link - offsetof(JSContext, link));
3079 * If unlocked, acquire and release rt->gcLock around *iterp update; otherwise
3080 * the caller must be holding rt->gcLock.
3082 extern JSContext *
3083 js_ContextIterator(JSRuntime *rt, JSBool unlocked, JSContext **iterp);
3086 * Iterate through contexts with active requests. The caller must be holding
3087 * rt->gcLock in case of a thread-safe build, or otherwise guarantee that the
3088 * context list is not alternated asynchroniously.
3090 extern JS_FRIEND_API(JSContext *)
3091 js_NextActiveContext(JSRuntime *, JSContext *);
3094 * Class.resolve and watchpoint recursion damping machinery.
3096 extern JSBool
3097 js_StartResolving(JSContext *cx, JSResolvingKey *key, uint32 flag,
3098 JSResolvingEntry **entryp);
3100 extern void
3101 js_StopResolving(JSContext *cx, JSResolvingKey *key, uint32 flag,
3102 JSResolvingEntry *entry, uint32 generation);
3105 * Report an exception, which is currently realized as a printf-style format
3106 * string and its arguments.
3108 typedef enum JSErrNum {
3109 #define MSG_DEF(name, number, count, exception, format) \
3110 name = number,
3111 #include "js.msg"
3112 #undef MSG_DEF
3113 JSErr_Limit
3114 } JSErrNum;
3116 extern JS_FRIEND_API(const JSErrorFormatString *)
3117 js_GetErrorMessage(void *userRef, const char *locale, const uintN errorNumber);
3119 #ifdef va_start
3120 extern JSBool
3121 js_ReportErrorVA(JSContext *cx, uintN flags, const char *format, va_list ap);
3123 extern JSBool
3124 js_ReportErrorNumberVA(JSContext *cx, uintN flags, JSErrorCallback callback,
3125 void *userRef, const uintN errorNumber,
3126 JSBool charArgs, va_list ap);
3128 extern JSBool
3129 js_ExpandErrorArguments(JSContext *cx, JSErrorCallback callback,
3130 void *userRef, const uintN errorNumber,
3131 char **message, JSErrorReport *reportp,
3132 bool charArgs, va_list ap);
3133 #endif
3135 extern void
3136 js_ReportOutOfMemory(JSContext *cx);
3139 * Report that cx->scriptStackQuota is exhausted.
3141 void
3142 js_ReportOutOfScriptQuota(JSContext *cx);
3144 extern JS_FRIEND_API(void)
3145 js_ReportOverRecursed(JSContext *cx);
3147 extern JS_FRIEND_API(void)
3148 js_ReportAllocationOverflow(JSContext *cx);
3150 #define JS_CHECK_RECURSION(cx, onerror) \
3151 JS_BEGIN_MACRO \
3152 int stackDummy_; \
3154 if (!JS_CHECK_STACK_SIZE(cx, stackDummy_)) { \
3155 js_ReportOverRecursed(cx); \
3156 onerror; \
3158 JS_END_MACRO
3161 * Report an exception using a previously composed JSErrorReport.
3162 * XXXbe remove from "friend" API
3164 extern JS_FRIEND_API(void)
3165 js_ReportErrorAgain(JSContext *cx, const char *message, JSErrorReport *report);
3167 extern void
3168 js_ReportIsNotDefined(JSContext *cx, const char *name);
3171 * Report an attempt to access the property of a null or undefined value (v).
3173 extern JSBool
3174 js_ReportIsNullOrUndefined(JSContext *cx, intN spindex, const js::Value &v,
3175 JSString *fallback);
3177 extern void
3178 js_ReportMissingArg(JSContext *cx, const js::Value &v, uintN arg);
3181 * Report error using js_DecompileValueGenerator(cx, spindex, v, fallback) as
3182 * the first argument for the error message. If the error message has less
3183 * then 3 arguments, use null for arg1 or arg2.
3185 extern JSBool
3186 js_ReportValueErrorFlags(JSContext *cx, uintN flags, const uintN errorNumber,
3187 intN spindex, const js::Value &v, JSString *fallback,
3188 const char *arg1, const char *arg2);
3190 #define js_ReportValueError(cx,errorNumber,spindex,v,fallback) \
3191 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3192 spindex, v, fallback, NULL, NULL))
3194 #define js_ReportValueError2(cx,errorNumber,spindex,v,fallback,arg1) \
3195 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3196 spindex, v, fallback, arg1, NULL))
3198 #define js_ReportValueError3(cx,errorNumber,spindex,v,fallback,arg1,arg2) \
3199 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3200 spindex, v, fallback, arg1, arg2))
3202 extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit];
3205 * See JS_SetThreadStackLimit in jsapi.c, where we check that the stack
3206 * grows in the expected direction.
3208 #if JS_STACK_GROWTH_DIRECTION > 0
3209 # define JS_CHECK_STACK_SIZE(cx, lval) ((jsuword)&(lval) < (cx)->stackLimit)
3210 #else
3211 # define JS_CHECK_STACK_SIZE(cx, lval) ((jsuword)&(lval) > (cx)->stackLimit)
3212 #endif
3214 #ifdef JS_THREADSAFE
3215 # define JS_ASSERT_REQUEST_DEPTH(cx) (JS_ASSERT((cx)->thread), \
3216 JS_ASSERT((cx)->thread->requestDepth >= 1))
3217 #else
3218 # define JS_ASSERT_REQUEST_DEPTH(cx) ((void) 0)
3219 #endif
3222 * If the operation callback flag was set, call the operation callback.
3223 * This macro can run the full GC. Return true if it is OK to continue and
3224 * false otherwise.
3226 #define JS_CHECK_OPERATION_LIMIT(cx) \
3227 (JS_ASSERT_REQUEST_DEPTH(cx), \
3228 (!(JS_THREAD_DATA(cx)->interruptFlags & JSThreadData::INTERRUPT_OPERATION_CALLBACK) || js_InvokeOperationCallback(cx)))
3231 * Invoke the operation callback and return false if the current execution
3232 * is to be terminated.
3234 extern JSBool
3235 js_InvokeOperationCallback(JSContext *cx);
3237 extern JSBool
3238 js_HandleExecutionInterrupt(JSContext *cx);
3240 namespace js {
3242 /* Must be called with GC lock taken. */
3243 void
3244 TriggerAllOperationCallbacks(JSRuntime *rt);
3246 } /* namespace js */
3248 extern JSStackFrame *
3249 js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp);
3251 extern jsbytecode*
3252 js_GetCurrentBytecodePC(JSContext* cx);
3254 extern bool
3255 js_CurrentPCIsInImacro(JSContext *cx);
3257 namespace js {
3259 #ifdef JS_TRACER
3261 * Reconstruct the JS stack and clear cx->tracecx. We must be currently in a
3262 * _FAIL builtin from trace on cx or another context on the same thread. The
3263 * machine code for the trace remains on the C stack when js_DeepBail returns.
3265 * Implemented in jstracer.cpp.
3267 JS_FORCES_STACK JS_FRIEND_API(void)
3268 DeepBail(JSContext *cx);
3269 #endif
3271 static JS_FORCES_STACK JS_INLINE void
3272 LeaveTrace(JSContext *cx)
3274 #ifdef JS_TRACER
3275 if (JS_ON_TRACE(cx))
3276 DeepBail(cx);
3277 #endif
3280 static JS_INLINE void
3281 LeaveTraceIfGlobalObject(JSContext *cx, JSObject *obj)
3283 if (!obj->parent)
3284 LeaveTrace(cx);
3287 static JS_INLINE JSBool
3288 CanLeaveTrace(JSContext *cx)
3290 JS_ASSERT(JS_ON_TRACE(cx));
3291 #ifdef JS_TRACER
3292 return cx->bailExit != NULL;
3293 #else
3294 return JS_FALSE;
3295 #endif
3298 extern void
3299 SetPendingException(JSContext *cx, const Value &v);
3301 } /* namespace js */
3304 * Get the current frame, first lazily instantiating stack frames if needed.
3305 * (Do not access cx->fp() directly except in JS_REQUIRES_STACK code.)
3307 * Defined in jstracer.cpp if JS_TRACER is defined.
3309 static JS_FORCES_STACK JS_INLINE JSStackFrame *
3310 js_GetTopStackFrame(JSContext *cx)
3312 js::LeaveTrace(cx);
3313 return cx->maybefp();
3316 static JS_INLINE JSBool
3317 js_IsPropertyCacheDisabled(JSContext *cx)
3319 return cx->runtime->shapeGen >= js::SHAPE_OVERFLOW_BIT;
3322 static JS_INLINE uint32
3323 js_RegenerateShapeForGC(JSContext *cx)
3325 JS_ASSERT(cx->runtime->gcRunning);
3326 JS_ASSERT(cx->runtime->gcRegenShapes);
3329 * Under the GC, compared with js_GenerateShape, we don't need to use
3330 * atomic increments but we still must make sure that after an overflow
3331 * the shape stays such.
3333 uint32 shape = cx->runtime->shapeGen;
3334 shape = (shape + 1) | (shape & js::SHAPE_OVERFLOW_BIT);
3335 cx->runtime->shapeGen = shape;
3336 return shape;
3339 namespace js {
3341 inline void *
3342 ContextAllocPolicy::malloc(size_t bytes)
3344 return cx->malloc(bytes);
3347 inline void
3348 ContextAllocPolicy::free(void *p)
3350 cx->free(p);
3353 inline void *
3354 ContextAllocPolicy::realloc(void *p, size_t bytes)
3356 return cx->realloc(p, bytes);
3359 inline void
3360 ContextAllocPolicy::reportAllocOverflow() const
3362 js_ReportAllocationOverflow(cx);
3365 class AutoValueVector : private AutoGCRooter
3367 public:
3368 explicit AutoValueVector(JSContext *cx
3369 JS_GUARD_OBJECT_NOTIFIER_PARAM)
3370 : AutoGCRooter(cx, VALVECTOR), vector(cx)
3372 JS_GUARD_OBJECT_NOTIFIER_INIT;
3375 size_t length() const { return vector.length(); }
3377 bool append(const Value &v) { return vector.append(v); }
3379 void popBack() { vector.popBack(); }
3381 bool growBy(size_t inc) {
3382 /* N.B. Value's default ctor leaves the Value undefined */
3383 size_t oldLength = vector.length();
3384 if (!vector.growByUninitialized(inc))
3385 return false;
3386 MakeValueRangeGCSafe(vector.begin() + oldLength, vector.end());
3387 return true;
3390 bool resize(size_t newLength) {
3391 size_t oldLength = vector.length();
3392 if (newLength <= oldLength) {
3393 vector.shrinkBy(oldLength - newLength);
3394 return true;
3396 /* N.B. Value's default ctor leaves the Value undefined */
3397 if (!vector.growByUninitialized(newLength - oldLength))
3398 return false;
3399 MakeValueRangeGCSafe(vector.begin() + oldLength, vector.end());
3400 return true;
3403 bool reserve(size_t newLength) {
3404 return vector.reserve(newLength);
3407 Value &operator[](size_t i) { return vector[i]; }
3408 const Value &operator[](size_t i) const { return vector[i]; }
3410 const Value *begin() const { return vector.begin(); }
3411 Value *begin() { return vector.begin(); }
3413 const Value *end() const { return vector.end(); }
3414 Value *end() { return vector.end(); }
3416 const jsval *jsval_begin() const { return Jsvalify(begin()); }
3417 jsval *jsval_begin() { return Jsvalify(begin()); }
3419 const jsval *jsval_end() const { return Jsvalify(end()); }
3420 jsval *jsval_end() { return Jsvalify(end()); }
3422 const Value &back() const { return vector.back(); }
3424 friend void AutoGCRooter::trace(JSTracer *trc);
3426 private:
3427 Vector<Value, 8> vector;
3428 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
3431 class AutoIdVector : private AutoGCRooter
3433 public:
3434 explicit AutoIdVector(JSContext *cx
3435 JS_GUARD_OBJECT_NOTIFIER_PARAM)
3436 : AutoGCRooter(cx, IDVECTOR), vector(cx)
3438 JS_GUARD_OBJECT_NOTIFIER_INIT;
3441 size_t length() const { return vector.length(); }
3443 bool append(jsid id) { return vector.append(id); }
3445 void popBack() { vector.popBack(); }
3447 bool growBy(size_t inc) {
3448 /* N.B. jsid's default ctor leaves the jsid undefined */
3449 size_t oldLength = vector.length();
3450 if (!vector.growByUninitialized(inc))
3451 return false;
3452 MakeIdRangeGCSafe(vector.begin() + oldLength, vector.end());
3453 return true;
3456 bool resize(size_t newLength) {
3457 size_t oldLength = vector.length();
3458 if (newLength <= oldLength) {
3459 vector.shrinkBy(oldLength - newLength);
3460 return true;
3462 /* N.B. jsid's default ctor leaves the jsid undefined */
3463 if (!vector.growByUninitialized(newLength - oldLength))
3464 return false;
3465 MakeIdRangeGCSafe(vector.begin() + oldLength, vector.end());
3466 return true;
3469 bool reserve(size_t newLength) {
3470 return vector.reserve(newLength);
3473 jsid &operator[](size_t i) { return vector[i]; }
3474 const jsid &operator[](size_t i) const { return vector[i]; }
3476 const jsid *begin() const { return vector.begin(); }
3477 jsid *begin() { return vector.begin(); }
3479 const jsid *end() const { return vector.end(); }
3480 jsid *end() { return vector.end(); }
3482 const jsid &back() const { return vector.back(); }
3484 friend void AutoGCRooter::trace(JSTracer *trc);
3486 private:
3487 Vector<jsid, 8> vector;
3488 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
3491 JSIdArray *
3492 NewIdArray(JSContext *cx, jsint length);
3494 } /* namespace js */
3496 #ifdef _MSC_VER
3497 #pragma warning(pop)
3498 #pragma warning(pop)
3499 #endif
3501 #ifdef JS_UNDEFD_MOZALLOC_WRAPPERS
3502 # include "mozilla/mozalloc_macro_wrappers.h"
3503 #endif
3505 #endif /* jscntxt_h___ */