1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
44 * JS execution context.
48 /* Gross special case for Gecko, which defines malloc/calloc/free. */
49 #ifdef mozilla_mozalloc_macro_wrappers_h
50 # define JS_UNDEFD_MOZALLOC_WRAPPERS
51 /* The "anti-header" */
52 # include "mozilla/mozalloc_undef_macro_wrappers.h"
56 #include "jsarena.h" /* Added by JSIFY */
63 #include "jsgcchunk.h"
64 #include "jshashtable.h"
67 #include "jspropertycache.h"
68 #include "jspropertytree.h"
78 #pragma warning(disable:4100) /* Silence unreferenced formal parameter warnings */
80 #pragma warning(disable:4355) /* Silence warning about "this" used in base member initializer list */
84 * js_GetSrcNote cache to avoid O(n^2) growth in finding a source note for a
85 * given pc in a script. We use the script->code pointer to tag the cache,
86 * instead of the script address itself, so that source notes are always found
87 * by offset from the bytecode with which they were generated.
89 typedef struct JSGSNCache
{
97 # define GSN_CACHE_METER(cache,cnt) (++(cache)->cnt)
99 # define GSN_CACHE_METER(cache,cnt) /* nothing */
103 #define js_FinishGSNCache(cache) js_PurgeGSNCache(cache)
106 js_PurgeGSNCache(JSGSNCache
*cache
);
108 /* These helper macros take a cx as parameter and operate on its GSN cache. */
109 #define JS_PURGE_GSN_CACHE(cx) js_PurgeGSNCache(&JS_GSN_CACHE(cx))
110 #define JS_METER_GSN_CACHE(cx,cnt) GSN_CACHE_METER(&JS_GSN_CACHE(cx), cnt)
112 /* Forward declarations of nanojit types. */
118 template<typename K
> struct DefaultHash
;
119 template<typename K
, typename V
, typename H
> class HashMap
;
120 template<typename T
> class Seq
;
122 } /* namespace nanojit */
125 class ExecutableAllocator
;
130 /* Tracer constants. */
131 static const size_t MONITOR_N_GLOBAL_STATES
= 4;
132 static const size_t FRAGMENT_TABLE_SIZE
= 512;
133 static const size_t MAX_NATIVE_STACK_SLOTS
= 4096;
134 static const size_t MAX_CALL_STACK_ENTRIES
= 500;
135 static const size_t MAX_GLOBAL_SLOTS
= 4096;
136 static const size_t GLOBAL_SLOTS_BUFFER_SIZE
= MAX_GLOBAL_SLOTS
+ 1;
137 static const size_t MAX_SLOW_NATIVE_EXTRA_SLOTS
= 16;
139 /* Forward declarations of tracer types. */
141 class FrameInfoCache
;
148 template<typename T
> class Queue
;
149 typedef Queue
<uint16
> SlotList
;
152 typedef nanojit::HashMap
<REHashKey
, REFragment
*, REHashFn
> REHashMap
;
154 #if defined(JS_JIT_SPEW) || defined(DEBUG)
156 typedef nanojit::HashMap
<uint32
, FragPI
, nanojit::DefaultHash
<uint32
> > FragStatsMap
;
160 * Allocation policy that calls JSContext memory functions and reports errors
161 * to the context. Since the JSContext given on construction is stored for
162 * the lifetime of the container, this policy may only be used for containers
163 * whose lifetime is a shorter than the given JSContext.
165 class ContextAllocPolicy
170 ContextAllocPolicy(JSContext
*cx
) : cx(cx
) {}
171 JSContext
*context() const { return cx
; }
173 /* Inline definitions below. */
174 void *malloc(size_t bytes
);
176 void *realloc(void *p
, size_t bytes
);
177 void reportAllocOverflow() const;
180 /* Holds the execution state during trace execution. */
183 JSContext
* cx
; // current VM context handle
184 double* stackBase
; // native stack base
185 double* sp
; // native stack pointer, stack[0] is spbase[0]
186 double* eos
; // first unusable word after the native stack / begin of globals
187 FrameInfo
** callstackBase
; // call stack base
188 void* sor
; // start of rp stack
189 FrameInfo
** rp
; // call stack pointer
190 void* eor
; // first unusable word after the call stack
191 VMSideExit
* lastTreeExitGuard
; // guard we exited on during a tree call
192 VMSideExit
* lastTreeCallGuard
; // guard we want to grow from if the tree
193 // call exit guard mismatched
194 void* rpAtLastTreeCall
; // value of rp at innermost tree call guard
195 VMSideExit
* outermostTreeExitGuard
; // the last side exit returned by js_CallTree
196 TreeFragment
* outermostTree
; // the outermost tree we initially invoked
197 uintN
* inlineCallCountp
; // inline call count counter
198 VMSideExit
** innermostNestedGuardp
;
199 VMSideExit
* innermost
;
203 // Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
204 // JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
205 // if an error or exception occurred.
206 uint32 builtinStatus
;
208 // Used to communicate the location of the return value in case of a deep bail.
211 // Used when calling natives from trace to root the vp vector.
215 // The regs pointed to by cx->regs while a deep-bailed slow native
216 // completes execution.
217 JSFrameRegs bailedSlowNativeRegs
;
219 TracerState(JSContext
*cx
, TraceMonitor
*tm
, TreeFragment
*ti
,
220 uintN
&inlineCallCountp
, VMSideExit
** innermostNestedGuardp
);
225 * Storage for the execution state and store during trace execution. Generated
226 * code depends on the fact that the globals begin |MAX_NATIVE_STACK_SLOTS|
227 * doubles after the stack begins. Thus, on trace, |TracerState::eos| holds a
228 * pointer to the first global.
230 struct TraceNativeStorage
232 double stack_global_buf
[MAX_NATIVE_STACK_SLOTS
+ GLOBAL_SLOTS_BUFFER_SIZE
];
233 FrameInfo
*callstack_buf
[MAX_CALL_STACK_ENTRIES
];
235 double *stack() { return stack_global_buf
; }
236 double *global() { return stack_global_buf
+ MAX_NATIVE_STACK_SLOTS
; }
237 FrameInfo
**callstack() { return callstack_buf
; }
240 /* Holds data to track a single globa. */
244 SlotList
* globalSlots
;
248 * A StackSegment (referred to as just a 'segment') contains a down-linked set
249 * of stack frames and the slots associated with each frame. A segment and its
250 * contained frames/slots also have a precise memory layout that is described
251 * in the js::StackSpace comment. A key layout invariant for segments is that
252 * down-linked frames are adjacent in memory, separated only by the values that
253 * constitute the locals and expression stack of the down-frame and arguments
256 * The set of stack frames in a non-empty segment start at the segment's
257 * "current frame", which is the most recently pushed frame, and ends at the
258 * segment's "initial frame". Note that, while all stack frames in a segment
259 * are down-linked, not all down-linked frames are in the same segment. Hence,
260 * for a segment |ss|, |ss->getInitialFrame()->down| may be non-null and in a
261 * different segment. This occurs when the VM reenters itself (via Invoke or
262 * Execute). In full generality, a single context may contain a forest of trees
263 * of stack frames. With respect to this forest, a segment contains a linear
264 * path along a single tree, not necessarily to the root.
266 * The frames of a non-empty segment must all be in the same context and thus
267 * each non-empty segment is referred to as being "in" a context. Segments in a
268 * context have an additional state of being either "active" or "suspended". A
269 * suspended segment |ss| has a "suspended frame" which is snapshot of |cx->regs|
270 * when the segment was suspended and serves as the current frame of |ss|.
271 * There is at most one active segment in a given context. Segments in a
272 * context execute LIFO and are maintained in a stack. The top of this stack
273 * is the context's "current segment". If a context |cx| has an active segment
275 * 1. |ss| is |cx|'s current segment,
276 * 2. |cx->regs != NULL|, and
277 * 3. |ss|'s current frame is |cx->regs->fp|.
278 * Moreover, |cx->regs != NULL| iff |cx| has an active segment.
280 * An empty segment is not associated with any context. Empty segments are
281 * created when there is not an active segment for a context at the top of the
282 * stack and claim space for the arguments of an Invoke before the Invoke's
283 * stack frame is pushed. During the intervals when the arguments have been
284 * pushed, but not the stack frame, the segment cannot be pushed onto the
285 * context, since that would require some hack to deal with cx->fp not being
286 * the current frame of cx->currentSegment.
288 * Finally, (to support JS_SaveFrameChain/JS_RestoreFrameChain) a suspended
289 * segment may or may not be "saved". Normally, when the active segment is
290 * popped, the previous segment (which is necessarily suspended) becomes
291 * active. If the previous segment was saved, however, then it stays suspended
292 * until it is made active by a call to JS_RestoreFrameChain. This is why a
293 * context may have a current segment, but not an active segment.
297 /* The context to which this segment belongs. */
300 /* Link for JSContext segment stack mentioned in big comment above. */
301 StackSegment
*previousInContext
;
303 /* Link for StackSpace segment stack mentioned in StackSpace comment. */
304 StackSegment
*previousInMemory
;
306 /* The first frame executed in this segment. null iff cx is null */
307 JSStackFrame
*initialFrame
;
309 /* If this segment is suspended, |cx->regs| when it was suspended. */
310 JSFrameRegs
*suspendedRegs
;
312 /* The varobj on entry to initialFrame. */
313 JSObject
*initialVarObj
;
315 /* Whether this segment was suspended by JS_SaveFrameChain. */
318 /* Align at 8 bytes on all platforms. */
319 #if JS_BITS_PER_WORD == 32
324 * To make isActive a single null-ness check, this non-null constant is
325 * assigned to suspendedRegs when !inContext.
327 #define NON_NULL_SUSPENDED_REGS ((JSFrameRegs *)0x1)
331 : cx(NULL
), previousInContext(NULL
), previousInMemory(NULL
),
332 initialFrame(NULL
), suspendedRegs(NON_NULL_SUSPENDED_REGS
),
333 initialVarObj(NULL
), saved(false)
335 JS_ASSERT(!inContext());
338 /* Safe casts guaranteed by the contiguous-stack layout. */
340 Value
*previousSegmentEnd() const {
341 return (Value
*)this;
344 Value
*getInitialArgBegin() const {
345 return (Value
*)(this + 1);
349 * As described in the comment at the beginning of the class, a segment
350 * is in one of three states:
352 * !inContext: the segment has been created to root arguments for a
353 * future call to Invoke.
354 * isActive: the segment describes a set of stack frames in a context,
355 * where the top frame currently executing.
356 * isSuspended: like isActive, but the top frame has been suspended.
359 bool inContext() const {
360 JS_ASSERT(!!cx
== !!initialFrame
);
361 JS_ASSERT_IF(!cx
, suspendedRegs
== NON_NULL_SUSPENDED_REGS
&& !saved
);
365 bool isActive() const {
366 JS_ASSERT_IF(!suspendedRegs
, cx
&& !saved
);
367 JS_ASSERT_IF(!cx
, suspendedRegs
== NON_NULL_SUSPENDED_REGS
);
368 return !suspendedRegs
;
371 bool isSuspended() const {
372 JS_ASSERT_IF(!cx
|| !suspendedRegs
, !saved
);
373 JS_ASSERT_IF(!cx
, suspendedRegs
== NON_NULL_SUSPENDED_REGS
);
374 return cx
&& suspendedRegs
;
377 /* Substate of suspended, queryable in any state. */
379 bool isSaved() const {
380 JS_ASSERT_IF(saved
, isSuspended());
384 /* Transitioning between inContext <--> isActive */
386 void joinContext(JSContext
*cx
, JSStackFrame
*f
) {
387 JS_ASSERT(!inContext());
390 suspendedRegs
= NULL
;
391 JS_ASSERT(isActive());
394 void leaveContext() {
395 JS_ASSERT(isActive());
398 suspendedRegs
= NON_NULL_SUSPENDED_REGS
;
399 JS_ASSERT(!inContext());
402 JSContext
*maybeContext() const {
406 #undef NON_NULL_SUSPENDED_REGS
408 /* Transitioning between isActive <--> isSuspended */
410 void suspend(JSFrameRegs
*regs
) {
411 JS_ASSERT(isActive());
412 JS_ASSERT(regs
&& regs
->fp
&& contains(regs
->fp
));
413 suspendedRegs
= regs
;
414 JS_ASSERT(isSuspended());
418 JS_ASSERT(isSuspended());
419 suspendedRegs
= NULL
;
420 JS_ASSERT(isActive());
423 /* When isSuspended, transitioning isSaved <--> !isSaved */
425 void save(JSFrameRegs
*regs
) {
426 JS_ASSERT(!isSuspended());
429 JS_ASSERT(isSaved());
433 JS_ASSERT(isSaved());
436 JS_ASSERT(!isSuspended());
439 /* Data available when inContext */
441 JSStackFrame
*getInitialFrame() const {
442 JS_ASSERT(inContext());
446 inline JSFrameRegs
*getCurrentRegs() const;
447 inline JSStackFrame
*getCurrentFrame() const;
449 /* Data available when isSuspended. */
451 JSFrameRegs
*getSuspendedRegs() const {
452 JS_ASSERT(isSuspended());
453 return suspendedRegs
;
456 JSStackFrame
*getSuspendedFrame() const {
457 return suspendedRegs
->fp
;
460 /* JSContext / js::StackSpace bookkeeping. */
462 void setPreviousInContext(StackSegment
*seg
) {
463 previousInContext
= seg
;
466 StackSegment
*getPreviousInContext() const {
467 return previousInContext
;
470 void setPreviousInMemory(StackSegment
*seg
) {
471 previousInMemory
= seg
;
474 StackSegment
*getPreviousInMemory() const {
475 return previousInMemory
;
478 void setInitialVarObj(JSObject
*obj
) {
479 JS_ASSERT(inContext());
483 JSObject
*getInitialVarObj() const {
484 JS_ASSERT(inContext());
485 return initialVarObj
;
489 JS_REQUIRES_STACK
bool contains(const JSStackFrame
*fp
) const;
493 static const size_t VALUES_PER_STACK_SEGMENT
= sizeof(StackSegment
) / sizeof(Value
);
494 JS_STATIC_ASSERT(sizeof(StackSegment
) % sizeof(Value
) == 0);
496 /* See StackSpace::pushInvokeArgs. */
497 class InvokeArgsGuard
: public CallArgs
499 friend class StackSpace
;
500 JSContext
*cx
; /* null implies nothing pushed */
502 Value
*prevInvokeArgEnd
;
504 StackSegment
*prevInvokeSegment
;
505 JSStackFrame
*prevInvokeFrame
;
508 inline InvokeArgsGuard() : cx(NULL
), seg(NULL
) {}
509 inline InvokeArgsGuard(JSContext
*cx
, Value
*vp
, uintN argc
);
510 inline ~InvokeArgsGuard();
511 bool pushed() const { return cx
!= NULL
; }
515 * This type can be used to call Invoke when the arguments have already been
516 * pushed onto the stack as part of normal execution.
518 struct InvokeArgsAlreadyOnTheStack
: CallArgs
520 InvokeArgsAlreadyOnTheStack(Value
*vp
, uintN argc
) : CallArgs(vp
+ 2, argc
) {}
523 /* See StackSpace::pushInvokeFrame. */
524 class InvokeFrameGuard
526 friend class StackSpace
;
527 JSContext
*cx
; /* null implies nothing pushed */
529 JSFrameRegs
*prevRegs
;
531 InvokeFrameGuard() : cx(NULL
) {}
532 JS_REQUIRES_STACK
~InvokeFrameGuard();
533 bool pushed() const { return cx
!= NULL
; }
534 JSFrameRegs
&getRegs() { return regs
; }
537 /* See StackSpace::pushExecuteFrame. */
540 friend class StackSpace
;
541 JSContext
*cx
; /* null implies nothing pushed */
547 FrameGuard() : cx(NULL
), vp(NULL
), fp(NULL
) {}
548 JS_REQUIRES_STACK
~FrameGuard();
549 bool pushed() const { return cx
!= NULL
; }
550 Value
*getvp() const { return vp
; }
551 JSStackFrame
*getFrame() const { return fp
; }
557 * Each JSThreadData has one associated StackSpace object which allocates all
558 * segments for the thread. StackSpace performs all such allocations in a
559 * single, fixed-size buffer using a specific layout scheme that allows some
560 * associations between segments, frames, and slots to be implicit, rather
561 * than explicitly stored as pointers. To maintain useful invariants, stack
562 * space is not given out arbitrarily, but rather allocated/deallocated for
563 * specific purposes. The use cases currently supported are: calling a function
564 * with arguments (e.g. Invoke), executing a script (e.g. Execute), inline
565 * interpreter calls, and pushing "dummy" frames for bookkeeping purposes. See
566 * associated member functions below.
568 * First, we consider the layout of individual segments. (See the
569 * js::StackSegment comment for terminology.) A non-empty segment (i.e., a
570 * segment in a context) has the following layout:
572 * initial frame current frame ------. if regs,
573 * .------------. | | regs->sp
575 * |segment| slots |frame| slots |frame| slots |frame| slots |
577 * ? <----------' `----------' `----------'
580 * Moreover, the bytes in the following ranges form a contiguous array of
581 * Values that are marked during GC:
582 * 1. between a segment and its first frame
583 * 2. between two adjacent frames in a segment
584 * 3. between a segment's current frame and (if fp->regs) fp->regs->sp
585 * Thus, the VM must ensure that all such Values are safe to be marked.
587 * An empty segment is followed by arguments that are rooted by the
588 * StackSpace::invokeArgEnd pointer:
595 * Above the level of segments, a StackSpace is simply a contiguous sequence
596 * of segments kept in a linked list:
598 * base currentSegment firstUnused end
601 * |segment| --- |segment| --- |segment| ------- | |
603 * 0 <---' `-----------' `-----------'
604 * previous previous previous
606 * Both js::StackSpace and JSContext maintain a stack of segments, the top of
607 * which is the "current segment" for that thread or context, respectively.
608 * Since different contexts can arbitrarily interleave execution in a single
609 * thread, these stacks are different enough that a segment needs both
610 * "previousInMemory" and "previousInContext".
612 * For example, in a single thread, a function in segment S1 in a context CX1
613 * may call out into C++ code that reenters the VM in a context CX2, which
614 * creates a new segment S2 in CX2, and CX1 may or may not equal CX2.
616 * Note that there is some structure to this interleaving of segments:
617 * 1. the inclusion from segments in a context to segments in a thread
618 * preserves order (in terms of previousInContext and previousInMemory,
620 * 2. the mapping from stack frames to their containing segment preserves
621 * order (in terms of down and previousInContext, respectively).
627 mutable Value
*commitEnd
;
630 StackSegment
*currentSegment
;
633 * Keep track of which segment/frame bumped invokeArgEnd so that
634 * firstUnused() can assert that, when invokeArgEnd is used as the top of
635 * the stack, it is being used appropriately.
637 StackSegment
*invokeSegment
;
638 JSStackFrame
*invokeFrame
;
642 JS_REQUIRES_STACK
bool pushSegmentForInvoke(JSContext
*cx
, uintN argc
,
643 InvokeArgsGuard
&ag
);
644 JS_REQUIRES_STACK
bool pushInvokeFrameSlow(JSContext
*cx
, const InvokeArgsGuard
&ag
,
645 InvokeFrameGuard
&fg
);
646 JS_REQUIRES_STACK
void popInvokeFrameSlow(const CallArgs
&args
);
647 JS_REQUIRES_STACK
void popSegmentForInvoke(const InvokeArgsGuard
&ag
);
649 /* Although guards are friends, XGuard should only call popX(). */
650 friend class InvokeArgsGuard
;
651 JS_REQUIRES_STACK
inline void popInvokeArgs(const InvokeArgsGuard
&args
);
652 friend class InvokeFrameGuard
;
653 JS_REQUIRES_STACK
void popInvokeFrame(const InvokeFrameGuard
&ag
);
654 friend class FrameGuard
;
655 JS_REQUIRES_STACK
void popFrame(JSContext
*cx
);
657 /* Return a pointer to the first unused slot. */
659 inline Value
*firstUnused() const;
661 inline bool isCurrentAndActive(JSContext
*cx
) const;
663 StackSegment
*getCurrentSegment() const { return currentSegment
; }
667 * Allocate nvals on the top of the stack, report error on failure.
668 * N.B. the caller must ensure |from == firstUnused()|.
670 inline bool ensureSpace(JSContext
*maybecx
, Value
*from
, ptrdiff_t nvals
) const;
673 /* Commit more memory from the reserved stack space. */
674 JS_FRIEND_API(bool) bumpCommit(Value
*from
, ptrdiff_t nvals
) const;
678 static const size_t CAPACITY_VALS
= 512 * 1024;
679 static const size_t CAPACITY_BYTES
= CAPACITY_VALS
* sizeof(Value
);
680 static const size_t COMMIT_VALS
= 16 * 1024;
681 static const size_t COMMIT_BYTES
= COMMIT_VALS
* sizeof(Value
);
683 /* Kept as a member of JSThreadData; cannot use constructor/destructor. */
689 bool contains(T
*t
) const {
691 JS_ASSERT(size_t(-1) - uintptr_t(t
) >= sizeof(T
));
692 return v
>= (char *)base
&& v
+ sizeof(T
) <= (char *)end
;
697 * When we LeaveTree, we need to rebuild the stack, which requires stack
698 * allocation. There is no good way to handle an OOM for these allocations,
699 * so this function checks that they cannot occur using the size of the
700 * TraceNativeStorage as a conservative upper bound.
702 inline bool ensureEnoughSpaceToEnterTrace();
704 /* +1 for slow native's stack frame. */
705 static const ptrdiff_t MAX_TRACE_SPACE_VALS
=
706 MAX_NATIVE_STACK_SLOTS
+ MAX_CALL_STACK_ENTRIES
* VALUES_PER_STACK_FRAME
+
707 (VALUES_PER_STACK_SEGMENT
+ VALUES_PER_STACK_FRAME
/* synthesized slow native */);
709 /* Mark all segments, frames, and slots on the stack. */
710 JS_REQUIRES_STACK
void mark(JSTracer
*trc
);
713 * For all four use cases below:
714 * - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM.
715 * - The "get*Frame" functions do not change any global state, they just
716 * check OOM and return pointers to an uninitialized frame with the
717 * requested missing arguments/slots. Only once the "push*Frame"
718 * function has been called is global state updated. Thus, between
719 * "get*Frame" and "push*Frame", the frame and slots are unrooted.
720 * - The "push*Frame" functions will set fp->down; the caller needn't.
721 * - Functions taking "*Guard" arguments will use the guard's destructor
722 * to pop the allocation. The caller must ensure the guard has the
723 * appropriate lifetime.
724 * - The get*Frame functions put the 'nmissing' slots contiguously after
729 * pushInvokeArgs allocates |argc + 2| rooted values that will be passed as
730 * the arguments to Invoke. A single allocation can be used for multiple
731 * Invoke calls. The InvokeArgumentsGuard passed to Invoke must come from
732 * an immediately-enclosing (stack-wise) call to pushInvokeArgs.
735 bool pushInvokeArgs(JSContext
*cx
, uintN argc
, InvokeArgsGuard
&ag
);
737 /* These functions are called inside Invoke, not Invoke clients. */
738 bool getInvokeFrame(JSContext
*cx
, const CallArgs
&args
,
739 uintN nmissing
, uintN nfixed
,
740 InvokeFrameGuard
&fg
) const;
743 void pushInvokeFrame(JSContext
*cx
, const CallArgs
&args
, InvokeFrameGuard
&fg
);
746 * For the simpler case when arguments are allocated at the same time as
747 * the frame and it is not necessary to have rooted argument values before
751 bool getExecuteFrame(JSContext
*cx
, JSStackFrame
*down
,
752 uintN vplen
, uintN nfixed
,
753 FrameGuard
&fg
) const;
755 void pushExecuteFrame(JSContext
*cx
, FrameGuard
&fg
,
756 JSFrameRegs
®s
, JSObject
*initialVarObj
);
759 * Since RAII cannot be used for inline frames, callers must manually
760 * call pushInlineFrame/popInlineFrame.
763 inline JSStackFrame
*getInlineFrame(JSContext
*cx
, Value
*sp
,
764 uintN nmissing
, uintN nfixed
) const;
767 inline void pushInlineFrame(JSContext
*cx
, JSStackFrame
*fp
, jsbytecode
*pc
,
768 JSStackFrame
*newfp
);
771 inline void popInlineFrame(JSContext
*cx
, JSStackFrame
*up
, JSStackFrame
*down
);
774 * For the special case of the slow native stack frame pushed and popped by
775 * tracing deep bail logic.
778 void getSynthesizedSlowNativeFrame(JSContext
*cx
, StackSegment
*&seg
, JSStackFrame
*&fp
);
781 void pushSynthesizedSlowNativeFrame(JSContext
*cx
, StackSegment
*seg
, JSFrameRegs
®s
);
784 void popSynthesizedSlowNativeFrame(JSContext
*cx
);
787 * For pushing a bookkeeping frame.
790 bool pushDummyFrame(JSContext
*cx
, FrameGuard
&fg
, JSFrameRegs
®s
, JSObject
*scopeChain
);
793 JS_STATIC_ASSERT(StackSpace::CAPACITY_VALS
% StackSpace::COMMIT_VALS
== 0);
796 * While |cx->fp|'s pc/sp are available in |cx->regs|, to compute the saved
797 * value of pc/sp for any other frame, it is necessary to know about that
798 * frame's up-frame. This iterator maintains this information when walking down
799 * a chain of stack frames starting at |cx->fp|.
802 * for (FrameRegsIter i(cx); !i.done(); ++i)
803 * ... i.fp() ... i.sp() ... i.pc()
807 StackSegment
*curseg
;
813 void incSlow(JSStackFrame
*up
, JSStackFrame
*down
);
814 static inline Value
*contiguousDownFrameSP(JSStackFrame
*up
);
817 JS_REQUIRES_STACK
inline FrameRegsIter(JSContext
*cx
);
819 bool done() const { return curfp
== NULL
; }
820 inline FrameRegsIter
&operator++();
822 JSStackFrame
*fp() const { return curfp
; }
823 Value
*sp() const { return cursp
; }
824 jsbytecode
*pc() const { return curpc
; }
827 /* Holds the number of recording attemps for an address. */
828 typedef HashMap
<jsbytecode
*,
830 DefaultHasher
<jsbytecode
*>,
831 SystemAllocPolicy
> RecordAttemptMap
;
836 * Trace monitor. Every JSThread (if JS_THREADSAFE) or JSRuntime (if not
837 * JS_THREADSAFE) has an associated trace monitor that keeps track of loop
838 * frequencies for all JavaScript code loaded into that runtime.
840 struct TraceMonitor
{
842 * The context currently executing JIT-compiled code on this thread, or
843 * NULL if none. Among other things, this can in certain cases prevent
844 * last-ditch GC and suppress calls to JS_ReportOutOfMemory.
846 * !tracecx && !recorder: not on trace
847 * !tracecx && recorder: recording
848 * tracecx && !recorder: executing a trace
849 * tracecx && recorder: executing inner loop, recording outer loop
854 * Cached storage to use when executing on trace. While we may enter nested
855 * traces, we always reuse the outer trace's storage, so never need more
858 TraceNativeStorage
*storage
;
861 * There are 5 allocators here. This might seem like overkill, but they
862 * have different lifecycles, and by keeping them separate we keep the
863 * amount of retained memory down significantly. They are flushed (ie.
864 * all the allocated memory is freed) periodically.
866 * - dataAlloc has the lifecycle of the monitor. It's flushed only when
867 * the monitor is flushed. It's used for fragments.
869 * - traceAlloc has the same flush lifecycle as the dataAlloc, but it is
870 * also *marked* when a recording starts and rewinds to the mark point
871 * if recording aborts. So you can put things in it that are only
872 * reachable on a successful record/compile cycle like GuardRecords and
875 * - tempAlloc is flushed after each recording, successful or not. It's
876 * used to store LIR code and for all other elements in the LIR
879 * - reTempAlloc is just like tempAlloc, but is used for regexp
880 * compilation in RegExpNativeCompiler rather than normal compilation in
883 * - codeAlloc has the same lifetime as dataAlloc, but its API is
884 * different (CodeAlloc vs. VMAllocator). It's used for native code.
885 * It's also a good idea to keep code and data separate to avoid I-cache
886 * vs. D-cache issues.
888 VMAllocator
* dataAlloc
;
889 VMAllocator
* traceAlloc
;
890 VMAllocator
* tempAlloc
;
891 VMAllocator
* reTempAlloc
;
892 nanojit::CodeAlloc
* codeAlloc
;
893 nanojit::Assembler
* assembler
;
894 FrameInfoCache
* frameCache
;
897 TraceRecorder
* recorder
;
899 GlobalState globalStates
[MONITOR_N_GLOBAL_STATES
];
900 TreeFragment
* vmfragments
[FRAGMENT_TABLE_SIZE
];
901 RecordAttemptMap
* recordAttempts
;
904 * Maximum size of the code cache before we start flushing. 1/16 of this
905 * size is used as threshold for the regular expression code cache.
907 uint32 maxCodeCacheBytes
;
910 * If nonzero, do not flush the JIT cache after a deep bail. That would
911 * free JITted code pages that we will later return to. Instead, set the
912 * needFlush flag so that it can be flushed later.
917 * Fragment map for the regular expression compiler.
919 REHashMap
* reFragments
;
921 // Cached temporary typemap to avoid realloc'ing every time we create one.
922 // This must be used in only one place at a given time. It must be cleared
924 TypeMap
* cachedTempTypeMap
;
927 /* Fields needed for fragment/guard profiling. */
928 nanojit::Seq
<nanojit::Fragment
*>* branches
;
931 * profAlloc has a lifetime which spans exactly from js_InitJIT to
934 VMAllocator
* profAlloc
;
935 FragStatsMap
* profTab
;
938 /* Flush the JIT cache. */
941 /* Mark all objects baked into native code in the code cache. */
942 void mark(JSTracer
*trc
);
944 bool outOfMemory() const;
950 * N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current
951 * thread, regardless of whether cx is the context in which that trace is
952 * executing. cx must be a context on the current thread.
955 # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).tracecx != NULL)
957 # define JS_ON_TRACE(cx) JS_FALSE
960 /* Number of potentially reusable scriptsToGC to search for the eval cache. */
961 #ifndef JS_EVAL_CACHE_SHIFT
962 # define JS_EVAL_CACHE_SHIFT 6
964 #define JS_EVAL_CACHE_SIZE JS_BIT(JS_EVAL_CACHE_SHIFT)
967 # define EVAL_CACHE_METER_LIST(_) _(probe), _(hit), _(step), _(noscope)
968 # define identity(x) x
970 struct JSEvalCacheMeter
{
971 uint64
EVAL_CACHE_METER_LIST(identity
);
978 # define FUNCTION_KIND_METER_LIST(_) \
979 _(allfun), _(heavy), _(nofreeupvar), _(onlyfreevar), \
980 _(display), _(flat), _(setupvar), _(badfunarg), \
981 _(joinedsetmethod), _(joinedinitmethod), \
982 _(joinedreplace), _(joinedsort), _(joinedmodulepat), \
983 _(mreadbarrier), _(mwritebarrier), _(mwslotbarrier), \
985 # define identity(x) x
987 struct JSFunctionMeter
{
988 int32
FUNCTION_KIND_METER_LIST(identity
);
993 # define JS_FUNCTION_METER(cx,x) JS_RUNTIME_METER((cx)->runtime, functionMeter.x)
995 # define JS_FUNCTION_METER(cx,x) ((void)0)
999 #define NATIVE_ITER_CACHE_LOG2 8
1000 #define NATIVE_ITER_CACHE_MASK JS_BITMASK(NATIVE_ITER_CACHE_LOG2)
1001 #define NATIVE_ITER_CACHE_SIZE JS_BIT(NATIVE_ITER_CACHE_LOG2)
1003 struct JSPendingProxyOperation
{
1004 JSPendingProxyOperation
*next
;
1008 struct JSThreadData
{
1010 * If this flag is set, we were asked to call back the operation callback
1011 * as soon as possible.
1013 volatile int32 operationCallbackFlag
;
1015 JSGCFreeLists gcFreeLists
;
1017 /* Keeper of the contiguous stack used by all contexts in this thread. */
1018 js::StackSpace stackSpace
;
1021 * Flag indicating that we are waiving any soft limits on the GC heap
1022 * because we want allocations to be infallible (except when we hit
1028 * The GSN cache is per thread since even multi-cx-per-thread embeddings
1029 * do not interleave js_GetSrcNote calls.
1031 JSGSNCache gsnCache
;
1033 /* Property cache for faster call/get/set invocation. */
1034 js::PropertyCache propertyCache
;
1037 /* Trace-tree JIT recorder/interpreter state. */
1038 js::TraceMonitor traceMonitor
;
1041 /* Lock-free hashed lists of scripts created by eval to garbage-collect. */
1042 JSScript
*scriptsToGC
[JS_EVAL_CACHE_SIZE
];
1045 JSEvalCacheMeter evalCacheMeter
;
1048 /* State used by dtoa.c. */
1049 DtoaState
*dtoaState
;
1052 * State used to cache some double-to-string conversions. A stupid
1053 * optimization aimed directly at v8-splay.js, which stupidly converts
1054 * many doubles multiple times in a row.
1059 JSString
*s
; // if s==NULL, d and base are not valid
1062 /* Cached native iterators. */
1063 JSObject
*cachedNativeIterators
[NATIVE_ITER_CACHE_SIZE
];
1065 /* Base address of the native stack for the current thread. */
1066 jsuword
*nativeStackBase
;
1068 /* List of currently pending operations on proxies. */
1069 JSPendingProxyOperation
*pendingProxyOperation
;
1071 js::ConservativeGCThreadData conservativeGC
;
1075 void mark(JSTracer
*trc
);
1076 void purge(JSContext
*cx
);
1078 void triggerOperationCallback() {
1080 * Use JS_ATOMIC_SET in the hope that it will make sure the write will
1081 * become immediately visible to other processors polling the flag.
1082 * Note that we only care about visibility here, not read/write
1085 JS_ATOMIC_SET(&operationCallbackFlag
, 1);
1089 #ifdef JS_THREADSAFE
1092 * Structure uniquely representing a thread. It holds thread-private data
1093 * that can be accessed without a global lock.
1096 typedef js::HashMap
<void *,
1098 js::DefaultHasher
<void *>,
1099 js::SystemAllocPolicy
> Map
;
1101 /* Linked list of all contexts in use on this thread. */
1102 JSCList contextList
;
1104 /* Opaque thread-id, from NSPR's PR_GetCurrentThread(). */
1107 /* Indicates that the thread is waiting in ClaimTitle from jslock.cpp. */
1108 JSTitle
*titleToShare
;
1111 * Thread-local version of JSRuntime.gcMallocBytes to avoid taking
1112 * locks on each JS_malloc.
1114 ptrdiff_t gcThreadMallocBytes
;
1117 * This thread is inside js_GC, either waiting until it can start GC, or
1118 * waiting for GC to finish on another thread. This thread holds no locks;
1119 * other threads may steal titles from it.
1121 * Protected by rt->gcLock.
1126 * The context running the requests.
1128 JSContext
*requestContext
;
1130 /* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */
1135 * Only when JSThread::gcThreadMallocBytes exhausts the following limit we
1136 * update JSRuntime::gcMallocBytes.
1139 const size_t JS_GC_THREAD_MALLOC_LIMIT
= 1 << 19;
1141 #define JS_THREAD_DATA(cx) (&(cx)->thread->data)
1144 js_CurrentThread(JSRuntime
*rt
);
1147 * The function takes the GC lock and does not release in successful return.
1148 * On error (out of memory) the function releases the lock but delegates
1149 * the error reporting to the caller.
1152 js_InitContextThread(JSContext
*cx
);
1155 * On entrance the GC lock must be held and it will be held on exit.
1158 js_ClearContextThread(JSContext
*cx
);
1160 #endif /* JS_THREADSAFE */
1162 typedef enum JSDestroyContextMode
{
1167 } JSDestroyContextMode
;
1169 typedef enum JSRuntimeState
{
1176 typedef struct JSPropertyTreeEntry
{
1177 JSDHashEntryHdr hdr
;
1178 JSScopeProperty
*child
;
1179 } JSPropertyTreeEntry
;
1186 typedef void *Lookup
;
1188 static HashNumber
hash(void *key
) {
1189 return HashNumber(uintptr_t(key
) >> JS_GCTHING_ZEROBITS
);
1192 static bool match(void *l
, void *k
) {
1197 typedef HashMap
<void *, uint32
, GCPtrHasher
, SystemAllocPolicy
> GCLocks
;
1201 RootInfo(const char *name
, JSGCRootType type
) : name(name
), type(type
) {}
1206 typedef js::HashMap
<void *,
1208 js::DefaultHasher
<void *>,
1209 js::SystemAllocPolicy
> RootedValueMap
;
1211 /* If HashNumber grows, need to change WrapperHasher. */
1212 JS_STATIC_ASSERT(sizeof(HashNumber
) == 4);
1214 struct WrapperHasher
1216 typedef Value Lookup
;
1218 static HashNumber
hash(Value key
) {
1219 uint64 bits
= JSVAL_BITS(Jsvalify(key
));
1220 return (uint32
)bits
^ (uint32
)(bits
>> 32);
1223 static bool match(const Value
&l
, const Value
&k
) {
1228 typedef HashMap
<Value
, Value
, WrapperHasher
, SystemAllocPolicy
> WrapperMap
;
1230 class AutoValueVector
;
1233 } /* namespace js */
1235 struct JSCompartment
{
1237 JSPrincipals
*principals
;
1240 js::WrapperMap crossCompartmentWrappers
;
1242 JSCompartment(JSRuntime
*cx
);
1247 bool wrap(JSContext
*cx
, js::Value
*vp
);
1248 bool wrap(JSContext
*cx
, JSString
**strp
);
1249 bool wrap(JSContext
*cx
, JSObject
**objp
);
1250 bool wrapId(JSContext
*cx
, jsid
*idp
);
1251 bool wrap(JSContext
*cx
, js::PropertyOp
*op
);
1252 bool wrap(JSContext
*cx
, js::PropertyDescriptor
*desc
);
1253 bool wrap(JSContext
*cx
, js::AutoIdVector
&props
);
1254 bool wrapException(JSContext
*cx
);
1256 void sweep(JSContext
*cx
);
1260 (* JSActivityCallback
)(void *arg
, JSBool active
);
1263 /* Default compartment. */
1264 JSCompartment
*defaultCompartment
;
1266 /* List of compartments (protected by the GC lock). */
1267 js::Vector
<JSCompartment
*, 0, js::SystemAllocPolicy
> compartments
;
1269 /* Runtime state, synchronized by the stateChange/gcLock condvar/lock. */
1270 JSRuntimeState state
;
1272 /* Context create/destroy callback. */
1273 JSContextCallback cxCallback
;
1275 /* Compartment create/destroy callback. */
1276 JSCompartmentCallback compartmentCallback
;
1279 * Sets a callback that is run whenever the runtime goes idle - the
1280 * last active request ceases - and begins activity - when it was
1281 * idle and a request begins. Note: The callback is called under the
1284 void setActivityCallback(JSActivityCallback cb
, void *arg
) {
1285 activityCallback
= cb
;
1286 activityCallbackArg
= arg
;
1289 JSActivityCallback activityCallback
;
1290 void *activityCallbackArg
;
1293 * Shape regenerated whenever a prototype implicated by an "add property"
1294 * property cache fill and induced trace guard has a readonly property or a
1295 * setter defined on it. This number proxies for the shapes of all objects
1296 * along the prototype chain of all objects in the runtime on which such an
1297 * add-property result has been cached/traced.
1299 * See bug 492355 for more details.
1301 * This comes early in JSRuntime to minimize the immediate format used by
1302 * trace-JITted code that reads it.
1304 uint32 protoHazardShape
;
1306 /* Garbage collector state, used by jsgc.c. */
1307 js::GCChunkSet gcChunkSet
;
1309 /* GC chunks with at least one free arena. */
1310 js::GCChunkInfoVector gcFreeArenaChunks
;
1312 JSGCArena
*gcEmptyArenaList
;
1314 JSGCArenaList gcArenaList
[FINALIZE_LIMIT
];
1315 js::RootedValueMap gcRootsHash
;
1316 js::GCLocks gcLocksHash
;
1317 jsrefcount gcKeepAtoms
;
1321 size_t gcMaxMallocBytes
;
1322 uint32 gcEmptyArenaPoolLifespan
;
1324 js::GCMarker
*gcMarkingTracer
;
1325 uint32 gcTriggerFactor
;
1326 size_t gcTriggerBytes
;
1327 volatile JSBool gcIsNeeded
;
1328 volatile JSBool gcFlushCodeCaches
;
1331 * NB: do not pack another flag here by claiming gcPadding unless the new
1332 * flag is written only by the GC thread. Atomic updates to packed bytes
1333 * are not guaranteed, so stores issued by one thread may be lost due to
1334 * unsynchronized read-modify-write cycles on other threads.
1336 JSPackedBool gcPoke
;
1337 JSPackedBool gcRunning
;
1338 JSPackedBool gcRegenShapes
;
1341 * During gc, if rt->gcRegenShapes &&
1342 * (scope->flags & JSScope::SHAPE_REGEN) == rt->gcRegenShapesScopeFlag,
1343 * then the scope's shape has already been regenerated during this GC.
1344 * To avoid having to sweep JSScopes, the bit's meaning toggles with each
1345 * shape-regenerating GC.
1347 * FIXME Once scopes are GC'd (bug 505004), this will be obsolete.
1349 uint8 gcRegenShapesScopeFlag
;
1355 JSGCCallback gcCallback
;
1358 * Malloc counter to measure memory pressure for GC scheduling. It runs
1359 * from gcMaxMallocBytes down to zero.
1361 ptrdiff_t gcMallocBytes
;
1363 #ifdef JS_THREADSAFE
1364 JSBackgroundThread gcHelperThread
;
1367 js::GCChunkAllocator
*gcChunkAllocator
;
1369 void setCustomGCChunkAllocator(js::GCChunkAllocator
*allocator
) {
1370 JS_ASSERT(allocator
);
1371 JS_ASSERT(state
== JSRTS_DOWN
);
1372 gcChunkAllocator
= allocator
;
1376 * The trace operation and its data argument to trace embedding-specific
1379 JSTraceDataOp gcExtraRootsTraceOp
;
1380 void *gcExtraRootsData
;
1382 /* Well-known numbers held for use by this runtime's contexts. */
1384 js::Value negativeInfinityValue
;
1385 js::Value positiveInfinityValue
;
1387 js::DeflatedStringCache
*deflatedStringCache
;
1389 JSString
*emptyString
;
1391 /* List of active contexts sharing this runtime; protected by gcLock. */
1392 JSCList contextList
;
1394 /* Per runtime debug hooks -- see jsprvtd.h and jsdbgapi.h. */
1395 JSDebugHooks globalDebugHooks
;
1398 /* True if any debug hooks not supported by the JIT are enabled. */
1399 bool debuggerInhibitsJIT() const {
1400 return (globalDebugHooks
.interruptHook
||
1401 globalDebugHooks
.callHook
);
1405 /* More debugging state, see jsdbgapi.c. */
1407 JSCList watchPointList
;
1409 /* Client opaque pointers */
1412 #ifdef JS_THREADSAFE
1413 /* These combine to interlock the GC and new requests. */
1416 PRCondVar
*requestDone
;
1417 uint32 requestCount
;
1420 /* Lock and owning thread pointer for JS_LOCK_RUNTIME. */
1426 /* Used to synchronize down/up state change; protected by gcLock. */
1427 PRCondVar
*stateChange
;
1430 * State for sharing single-threaded titles, once a second thread tries to
1431 * lock a title. The titleSharingDone condvar is protected by rt->gcLock
1432 * to minimize number of locks taken in JS_EndRequest.
1434 * The titleSharingTodo linked list is likewise "global" per runtime, not
1435 * one-list-per-context, to conserve space over all contexts, optimizing
1436 * for the likely case that titles become shared rarely, and among a very
1437 * small set of threads (contexts).
1439 PRCondVar
*titleSharingDone
;
1440 JSTitle
*titleSharingTodo
;
1443 * Magic terminator for the rt->titleSharingTodo linked list, threaded through
1444 * title->u.link. This hack allows us to test whether a title is on the list
1445 * by asking whether title->u.link is non-null. We use a large, likely bogus
1446 * pointer here to distinguish this value from any valid u.count (small int)
1449 #define NO_TITLE_SHARING_TODO ((JSTitle *) 0xfeedbeef)
1452 * Lock serializing trapList and watchPointList accesses, and count of all
1453 * mutations to trapList and watchPointList made by debugger threads. To
1454 * keep the code simple, we define debuggerMutations for the thread-unsafe
1457 PRLock
*debuggerLock
;
1459 JSThread::Map threads
;
1460 #endif /* JS_THREADSAFE */
1461 uint32 debuggerMutations
;
1464 * Security callbacks set on the runtime are used by each context unless
1465 * an override is set on the context.
1467 JSSecurityCallbacks
*securityCallbacks
;
1470 * Shared scope property tree, and arena-pool for allocating its nodes.
1471 * This really should be free of all locking overhead and allocated in
1472 * thread-local storage, hence the JS_PROPERTY_TREE(cx) macro.
1474 js::PropertyTree propertyTree
;
1476 #define JS_PROPERTY_TREE(cx) ((cx)->runtime->propertyTree)
1479 * The propertyRemovals counter is incremented for every JSScope::clear,
1480 * and for each JSScope::remove method call that frees a slot in an object.
1481 * See js_NativeGet and js_NativeSet in jsobj.cpp.
1483 int32 propertyRemovals
;
1485 /* Script filename table. */
1486 struct JSHashTable
*scriptFilenameTable
;
1487 JSCList scriptFilenamePrefixes
;
1488 #ifdef JS_THREADSAFE
1489 PRLock
*scriptFilenameTableLock
;
1492 /* Number localization, used by jsnum.c */
1493 const char *thousandsSeparator
;
1494 const char *decimalSeparator
;
1495 const char *numGrouping
;
1498 * Weak references to lazily-created, well-known XML singletons.
1500 * NB: Singleton objects must be carefully disconnected from the rest of
1501 * the object graph usually associated with a JSContext's global object,
1502 * including the set of standard class objects. See jsxml.c for details.
1504 JSObject
*anynameObject
;
1505 JSObject
*functionNamespaceObject
;
1507 #ifndef JS_THREADSAFE
1508 JSThreadData threadData
;
1510 #define JS_THREAD_DATA(cx) (&(cx)->runtime->threadData)
1514 * Object shape (property cache structural type) identifier generator.
1516 * Type 0 stands for the empty scope, and must not be regenerated due to
1517 * uint32 wrap-around. Since js_GenerateShape (in jsinterp.cpp) uses
1518 * atomic pre-increment, the initial value for the first typed non-empty
1521 * If this counter overflows into SHAPE_OVERFLOW_BIT (in jsinterp.h), the
1522 * cache is disabled, to avoid aliasing two different types. It stays
1523 * disabled until a triggered GC at some later moment compresses live
1524 * types, minimizing rt->shapeGen in the process.
1526 volatile uint32 shapeGen
;
1528 /* Literal table maintained by jsatom.c functions. */
1529 JSAtomState atomState
;
1532 * Runtime-shared empty scopes for well-known built-in objects that lack
1533 * class prototypes (the usual locus of an emptyScope). Mnemonic: ABCDEW
1535 JSEmptyScope
*emptyArgumentsScope
;
1536 JSEmptyScope
*emptyBlockScope
;
1537 JSEmptyScope
*emptyCallScope
;
1538 JSEmptyScope
*emptyDeclEnvScope
;
1539 JSEmptyScope
*emptyEnumeratorScope
;
1540 JSEmptyScope
*emptyWithScope
;
1543 * Various metering fields are defined at the end of JSRuntime. In this
1544 * way there is no need to recompile all the code that refers to other
1545 * fields of JSRuntime after enabling the corresponding metering macro.
1547 #ifdef JS_DUMP_ENUM_CACHE_STATS
1548 int32 nativeEnumProbes
;
1549 int32 nativeEnumMisses
;
1550 # define ENUM_CACHE_METER(name) JS_ATOMIC_INCREMENT(&cx->runtime->name)
1552 # define ENUM_CACHE_METER(name) ((void) 0)
1555 #ifdef JS_DUMP_LOOP_STATS
1556 /* Loop statistics, to trigger trace recording and compiling. */
1557 JSBasicStats loopStats
;
1561 /* Function invocation metering. */
1562 jsrefcount inlineCalls
;
1563 jsrefcount nativeCalls
;
1564 jsrefcount nonInlineCalls
;
1565 jsrefcount constructs
;
1567 /* Title lock and scope property metering. */
1568 jsrefcount claimAttempts
;
1569 jsrefcount claimedTitles
;
1570 jsrefcount deadContexts
;
1571 jsrefcount deadlocksAvoided
;
1572 jsrefcount liveScopes
;
1573 jsrefcount sharedTitles
;
1574 jsrefcount totalScopes
;
1575 jsrefcount liveScopeProps
;
1576 jsrefcount liveScopePropsPreSweep
;
1577 jsrefcount totalScopeProps
;
1578 jsrefcount livePropTreeNodes
;
1579 jsrefcount duplicatePropTreeNodes
;
1580 jsrefcount totalPropTreeNodes
;
1581 jsrefcount propTreeKidsChunks
;
1583 /* String instrumentation. */
1584 jsrefcount liveStrings
;
1585 jsrefcount totalStrings
;
1586 jsrefcount liveDependentStrings
;
1587 jsrefcount totalDependentStrings
;
1588 jsrefcount badUndependStrings
;
1590 double lengthSquaredSum
;
1591 double strdepLengthSum
;
1592 double strdepLengthSquaredSum
;
1594 /* Script instrumentation. */
1595 jsrefcount liveScripts
;
1596 jsrefcount totalScripts
;
1597 jsrefcount liveEmptyScripts
;
1598 jsrefcount totalEmptyScripts
;
1601 #ifdef JS_SCOPE_DEPTH_METER
1603 * Stats on runtime prototype chain lookups and scope chain depths, i.e.,
1604 * counts of objects traversed on a chain until the wanted id is found.
1606 JSBasicStats protoLookupDepthStats
;
1607 JSBasicStats scopeSearchDepthStats
;
1610 * Stats on compile-time host environment and lexical scope chain lengths
1613 JSBasicStats hostenvScopeDepthStats
;
1614 JSBasicStats lexicalScopeDepthStats
;
1619 JSGCArenaStats gcArenaStats
[FINALIZE_LIMIT
];
1624 * If functionMeterFilename, set from an envariable in JSRuntime's ctor, is
1625 * null, the remaining members in this ifdef'ed group are not initialized.
1627 const char *functionMeterFilename
;
1628 JSFunctionMeter functionMeter
;
1629 char lastScriptFilename
[1024];
1631 typedef js::HashMap
<JSFunction
*,
1633 js::DefaultHasher
<JSFunction
*>,
1634 js::SystemAllocPolicy
> FunctionCountMap
;
1636 FunctionCountMap methodReadBarrierCountMap
;
1637 FunctionCountMap unjoinedFunctionCountMap
;
1640 JSWrapObjectCallback wrapObjectCallback
;
1642 JSC::ExecutableAllocator
*regExpAllocator
;
1647 bool init(uint32 maxbytes
);
1649 void setGCTriggerFactor(uint32 factor
);
1650 void setGCLastBytes(size_t lastBytes
);
1652 void* malloc(size_t bytes
) { return ::js_malloc(bytes
); }
1654 void* calloc(size_t bytes
) { return ::js_calloc(bytes
); }
1656 void* realloc(void* p
, size_t bytes
) { return ::js_realloc(p
, bytes
); }
1658 void free(void* p
) { ::js_free(p
); }
1660 bool isGCMallocLimitReached() const { return gcMallocBytes
<= 0; }
1662 void resetGCMallocBytes() { gcMallocBytes
= ptrdiff_t(gcMaxMallocBytes
); }
1664 void setGCMaxMallocBytes(size_t value
) {
1666 * For compatibility treat any value that exceeds PTRDIFF_T_MAX to
1669 gcMaxMallocBytes
= (ptrdiff_t(value
) >= 0) ? value
: size_t(-1) >> 1;
1670 resetGCMallocBytes();
1674 /* Common macros to access thread-local caches in JSThread or JSRuntime. */
1675 #define JS_GSN_CACHE(cx) (JS_THREAD_DATA(cx)->gsnCache)
1676 #define JS_PROPERTY_CACHE(cx) (JS_THREAD_DATA(cx)->propertyCache)
1677 #define JS_TRACE_MONITOR(cx) (JS_THREAD_DATA(cx)->traceMonitor)
1678 #define JS_SCRIPTS_TO_GC(cx) (JS_THREAD_DATA(cx)->scriptsToGC)
1681 # define EVAL_CACHE_METER(x) (JS_THREAD_DATA(cx)->evalCacheMeter.x++)
1683 # define EVAL_CACHE_METER(x) ((void) 0)
1687 # define JS_RUNTIME_METER(rt, which) JS_ATOMIC_INCREMENT(&(rt)->which)
1688 # define JS_RUNTIME_UNMETER(rt, which) JS_ATOMIC_DECREMENT(&(rt)->which)
1690 # define JS_RUNTIME_METER(rt, which) /* nothing */
1691 # define JS_RUNTIME_UNMETER(rt, which) /* nothing */
1694 #define JS_KEEP_ATOMS(rt) JS_ATOMIC_INCREMENT(&(rt)->gcKeepAtoms);
1695 #define JS_UNKEEP_ATOMS(rt) JS_ATOMIC_DECREMENT(&(rt)->gcKeepAtoms);
1697 #ifdef JS_ARGUMENT_FORMATTER_DEFINED
1699 * Linked list mapping format strings for JS_{Convert,Push}Arguments{,VA} to
1700 * formatter functions. Elements are sorted in non-increasing format string
1703 struct JSArgumentFormatMap
{
1706 JSArgumentFormatter formatter
;
1707 JSArgumentFormatMap
*next
;
1712 * Key and entry types for the JSContext.resolvingTable hash table, typedef'd
1713 * here because all consumers need to see these declarations (and not just the
1714 * typedef names, as would be the case for an opaque pointer-to-typedef'd-type
1715 * declaration), along with cx->resolvingTable.
1717 typedef struct JSResolvingKey
{
1722 typedef struct JSResolvingEntry
{
1723 JSDHashEntryHdr hdr
;
1728 #define JSRESFLAG_LOOKUP 0x1 /* resolving id from lookup */
1729 #define JSRESFLAG_WATCH 0x2 /* resolving id from watch */
1730 #define JSRESOLVE_INFER 0xffff /* infer bits from current bytecode */
1732 extern const JSDebugHooks js_NullDebugHooks
; /* defined in jsdbgapi.cpp */
1744 js::Vector
<int, 20> matchPairs
;
1749 bool createDependent(size_t start
, size_t end
, Value
*out
) const;
1751 size_t pairCount() const {
1752 JS_ASSERT(matchPairs
.length() % 2 == 0);
1753 return matchPairs
.length() / 2;
1756 * Check whether the index at |checkValidIndex| is valid (>= 0).
1757 * If so, construct a string for it and place it in |*out|.
1758 * If not, place undefined in |*out|.
1760 bool makeMatch(size_t checkValidIndex
, size_t pairNum
, Value
*out
) const;
1761 static const uintN allFlags
= JSREG_FOLD
| JSREG_GLOB
| JSREG_STICKY
| JSREG_MULTILINE
;
1762 friend class RegExp
;
1765 explicit RegExpStatics(JSContext
*cx
) : matchPairs(cx
), cx(cx
) { clear(); }
1766 void clone(const RegExpStatics
&other
);
1770 void setMultiline(bool enabled
) {
1772 flags
= flags
| JSREG_MULTILINE
;
1774 flags
= flags
& ~JSREG_MULTILINE
;
1783 void checkInvariants() {
1784 if (pairCount() > 0) {
1786 JS_ASSERT(get(0, 0) <= get(0, 1));
1787 JS_ASSERT(get(0, 1) <= int(input
->length()));
1791 void reset(JSString
*newInput
, bool newMultiline
) {
1794 setMultiline(newMultiline
);
1798 void setInput(JSString
*newInput
) {
1804 JSString
*getInput() const { return input
; }
1805 uintN
getFlags() const { return flags
; }
1806 bool multiline() const { return flags
& JSREG_MULTILINE
; }
1807 bool matched() const { JS_ASSERT(pairCount() > 0); return get(0, 1) - get(0, 0) > 0; }
1808 size_t getParenCount() const { JS_ASSERT(pairCount() > 0); return pairCount() - 1; }
1810 void mark(JSTracer
*trc
) const {
1812 JS_CALL_STRING_TRACER(trc
, input
, "res->input");
1815 size_t getParenLength(size_t parenNum
) const {
1816 if (pairCount() <= parenNum
+ 1)
1818 return get(parenNum
+ 1, 1) - get(parenNum
+ 1, 0);
1821 int get(size_t pairNum
, bool which
) const {
1822 JS_ASSERT(pairNum
< pairCount());
1823 return matchPairs
[2 * pairNum
+ which
];
1826 /* Value creators. */
1828 bool createInput(Value
*out
) const;
1829 bool createLastMatch(Value
*out
) const { return makeMatch(0, 0, out
); }
1830 bool createLastParen(Value
*out
) const;
1831 bool createLeftContext(Value
*out
) const;
1832 bool createRightContext(Value
*out
) const;
1834 bool createParen(size_t parenNum
, Value
*out
) const {
1835 return makeMatch((parenNum
+ 1) * 2, parenNum
+ 1, out
);
1838 /* Substring creators. */
1840 void getParen(size_t num
, JSSubString
*out
) const;
1841 void getLastMatch(JSSubString
*out
) const;
1842 void getLastParen(JSSubString
*out
) const;
1843 void getLeftContext(JSSubString
*out
) const;
1844 void getRightContext(JSSubString
*out
) const;
1851 explicit JSContext(JSRuntime
*rt
);
1853 /* JSRuntime contextList linkage. */
1856 /* Runtime version control identifier. */
1859 /* Per-context options. */
1860 uint32 options
; /* see jsapi.h for JSOPTION_* */
1862 /* Locale specific callbacks for string conversion. */
1863 JSLocaleCallbacks
*localeCallbacks
;
1866 * cx->resolvingTable is non-null and non-empty if we are initializing
1867 * standard classes lazily, or if we are otherwise recursing indirectly
1868 * from js_LookupProperty through a Class.resolve hook. It is used to
1869 * limit runaway recursion (see jsapi.c and jsobj.c).
1871 JSDHashTable
*resolvingTable
;
1874 * True if generating an error, to prevent runaway recursion.
1875 * NB: generatingError packs with throwing below.
1877 JSPackedBool generatingError
;
1879 /* Exception state -- the exception member is a GC root by definition. */
1880 JSPackedBool throwing
; /* is there a pending exception? */
1881 js::Value exception
; /* most-recently-thrown exception */
1883 /* Limit pointer for checking native stack consumption during recursion. */
1886 /* Quota on the size of arenas used to compile and execute scripts. */
1887 size_t scriptStackQuota
;
1889 /* Data shared by threads in an address space. */
1890 JSRuntime
*const runtime
;
1892 /* GC heap compartment. */
1893 JSCompartment
*compartment
;
1895 /* Currently executing frame and regs, set by stack operations. */
1899 /* Current frame accessors. */
1901 JSStackFrame
* fp() {
1902 JS_ASSERT(regs
&& regs
->fp
);
1906 JSStackFrame
* maybefp() {
1907 JS_ASSERT_IF(regs
, regs
->fp
);
1908 return regs
? regs
->fp
: NULL
;
1912 JS_ASSERT_IF(regs
, regs
->fp
);
1917 friend class js::StackSpace
;
1918 friend bool js::Interpret(JSContext
*);
1920 /* 'regs' must only be changed by calling this function. */
1921 void setCurrentRegs(JSFrameRegs
*regs
) {
1926 /* Temporary arena pool used while compiling and decompiling. */
1927 JSArenaPool tempPool
;
1929 /* Temporary arena pool used while evaluate regular expressions. */
1930 JSArenaPool regExpPool
;
1932 /* Top-level object and pointer to top stack frame's scope chain. */
1933 JSObject
*globalObject
;
1935 /* Regular expression class statics. */
1936 js::RegExpStatics regExpStatics
;
1938 /* State for object and array toSource conversion. */
1939 JSSharpObjectMap sharpObjectMap
;
1940 js::HashSet
<JSObject
*> busyArrays
;
1942 /* Argument formatter support for JS_{Convert,Push}Arguments{,VA}. */
1943 JSArgumentFormatMap
*argumentFormatMap
;
1945 /* Last message string and trace file for debugging. */
1949 jsbytecode
*tracePrevPc
;
1952 /* Per-context optional error reporter. */
1953 JSErrorReporter errorReporter
;
1955 /* Branch callback. */
1956 JSOperationCallback operationCallback
;
1958 /* Interpreter activation count. */
1961 /* Client opaque pointers. */
1966 /* Linked list of segments. See StackSegment. */
1967 js::StackSegment
*currentSegment
;
1970 void assertSegmentsInSync() const {
1973 JS_ASSERT(currentSegment
->isActive());
1974 if (js::StackSegment
*prev
= currentSegment
->getPreviousInContext())
1975 JS_ASSERT(!prev
->isActive());
1977 JS_ASSERT_IF(currentSegment
, !currentSegment
->isActive());
1982 /* Return whether this context has an active segment. */
1983 bool hasActiveSegment() const {
1984 assertSegmentsInSync();
1988 /* Assuming there is an active segment, return it. */
1989 js::StackSegment
*activeSegment() const {
1990 JS_ASSERT(hasActiveSegment());
1991 return currentSegment
;
1994 /* Return the current segment, which may or may not be active. */
1995 js::StackSegment
*getCurrentSegment() const {
1996 assertSegmentsInSync();
1997 return currentSegment
;
2000 /* Add the given segment to the list as the new active segment. */
2001 void pushSegmentAndFrame(js::StackSegment
*newseg
, JSFrameRegs
®s
);
2003 /* Remove the active segment and make the next segment active. */
2004 void popSegmentAndFrame();
2006 /* Mark the top segment as suspended, without pushing a new one. */
2007 void saveActiveSegment();
2009 /* Undoes calls to suspendActiveSegment. */
2010 void restoreSegment();
2013 * Perform a linear search of all frames in all segments in the given context
2014 * for the given frame, returning the segment, if found, and null otherwise.
2016 js::StackSegment
*containingSegment(const JSStackFrame
*target
);
2019 * Search the call stack for the nearest frame with static level targetLevel.
2021 JSStackFrame
*findFrameAtLevel(uintN targetLevel
) {
2022 JSStackFrame
*fp
= this->regs
->fp
;
2024 JS_ASSERT(fp
&& fp
->hasScript());
2025 if (fp
->getScript()->staticLevel
== targetLevel
)
2032 #ifdef JS_THREADSAFE
2034 jsrefcount requestDepth
;
2035 /* Same as requestDepth but ignoring JS_SuspendRequest/JS_ResumeRequest */
2036 jsrefcount outstandingRequests
;
2037 JSContext
*prevRequestContext
;
2038 jsrefcount prevRequestDepth
;
2040 unsigned checkRequestDepth
;
2043 JSTitle
*lockedSealedTitle
; /* weak ref, for low-cost sealed
2045 JSCList threadLinks
; /* JSThread contextList linkage */
2047 #define CX_FROM_THREAD_LINKS(tl) \
2048 ((JSContext *)((char *)(tl) - offsetof(JSContext, threadLinks)))
2051 /* Stack of thread-stack-allocated GC roots. */
2052 js::AutoGCRooter
*autoGCRooters
;
2054 /* Debug hooks associated with the current context. */
2055 const JSDebugHooks
*debugHooks
;
2057 /* Security callbacks that override any defined on the runtime. */
2058 JSSecurityCallbacks
*securityCallbacks
;
2060 /* Stored here to avoid passing it around as a parameter. */
2063 /* Random number generator state, used by jsmath.cpp. */
2066 /* Location to stash the iteration value between JSOP_MOREITER and JSOP_FOR*. */
2067 js::Value iterValue
;
2071 * State for the current tree execution. bailExit is valid if the tree has
2072 * called back into native code via a _FAIL builtin and has not yet bailed,
2073 * else garbage (NULL in debug builds).
2075 js::TracerState
*tracerState
;
2076 js::VMSideExit
*bailExit
;
2079 * True if traces may be executed. Invariant: The value of jitEnabled is
2080 * always equal to the expression in updateJITEnabled below.
2082 * This flag and the fields accessed by updateJITEnabled are written only
2083 * in runtime->gcLock, to avoid race conditions that would leave the wrong
2084 * value in jitEnabled. (But the interpreter reads this without
2085 * locking. That can race against another thread setting debug hooks, but
2086 * we always read cx->debugHooks without locking anyway.)
2091 /* Caller must be holding runtime->gcLock. */
2092 void updateJITEnabled() {
2094 jitEnabled
= ((options
& JSOPTION_JIT
) &&
2095 (debugHooks
== &js_NullDebugHooks
||
2096 (debugHooks
== &runtime
->globalDebugHooks
&&
2097 !runtime
->debuggerInhibitsJIT())));
2101 #ifdef MOZ_TRACE_JSCALLS
2102 /* Function entry/exit debugging callback. */
2103 JSFunctionCallback functionCallback
;
2105 void doFunctionCallback(const JSFunction
*fun
,
2106 const JSScript
*scr
,
2107 JSBool entering
) const
2109 if (functionCallback
)
2110 functionCallback(fun
, scr
, this, entering
);
2114 DSTOffsetCache dstOffsetCache
;
2116 /* List of currently active non-escaping enumerators (for-in). */
2117 JSObject
*enumerators
;
2121 * To go from a live generator frame (on the stack) to its generator object
2122 * (see comment js_FloatingFrameIfGenerator), we maintain a stack of active
2123 * generators, pushing and popping when entering and leaving generator
2124 * frames, respectively.
2126 js::Vector
<JSGenerator
*, 2, js::SystemAllocPolicy
> genStack
;
2129 /* Return the generator object for the given generator frame. */
2130 JSGenerator
*generatorFor(JSStackFrame
*fp
) const;
2132 /* Early OOM-check. */
2133 inline bool ensureGeneratorStackSpace();
2135 bool enterGenerator(JSGenerator
*gen
) {
2136 return genStack
.append(gen
);
2139 void leaveGenerator(JSGenerator
*gen
) {
2140 JS_ASSERT(genStack
.back() == gen
);
2144 #ifdef JS_THREADSAFE
2146 * The sweep task for this context.
2148 js::BackgroundSweepTask
*gcSweepTask
;
2151 ptrdiff_t &getMallocCounter() {
2152 #ifdef JS_THREADSAFE
2153 return thread
->gcThreadMallocBytes
;
2155 return runtime
->gcMallocBytes
;
2160 * Call this after allocating memory held by GC things, to update memory
2161 * pressure counters or report the OOM error if necessary.
2163 inline void updateMallocCounter(void *p
, size_t nbytes
) {
2164 JS_ASSERT(ptrdiff_t(nbytes
) >= 0);
2165 ptrdiff_t &counter
= getMallocCounter();
2166 counter
-= ptrdiff_t(nbytes
);
2167 if (!p
|| counter
<= 0)
2168 checkMallocGCPressure(p
);
2172 * Call this after successfully allocating memory held by GC things, to
2173 * update memory pressure counters.
2175 inline void updateMallocCounter(size_t nbytes
) {
2176 JS_ASSERT(ptrdiff_t(nbytes
) >= 0);
2177 ptrdiff_t &counter
= getMallocCounter();
2178 counter
-= ptrdiff_t(nbytes
);
2181 * Use 1 as an arbitrary non-null pointer indicating successful
2184 checkMallocGCPressure(reinterpret_cast<void *>(jsuword(1)));
2188 inline void* malloc(size_t bytes
) {
2189 JS_ASSERT(bytes
!= 0);
2190 void *p
= runtime
->malloc(bytes
);
2191 updateMallocCounter(p
, bytes
);
2195 inline void* mallocNoReport(size_t bytes
) {
2196 JS_ASSERT(bytes
!= 0);
2197 void *p
= runtime
->malloc(bytes
);
2200 updateMallocCounter(bytes
);
2204 inline void* calloc(size_t bytes
) {
2205 JS_ASSERT(bytes
!= 0);
2206 void *p
= runtime
->calloc(bytes
);
2207 updateMallocCounter(p
, bytes
);
2211 inline void* realloc(void* p
, size_t bytes
) {
2213 p
= runtime
->realloc(p
, bytes
);
2216 * For compatibility we do not account for realloc that increases
2217 * previously allocated memory.
2219 updateMallocCounter(p
, orig
? 0 : bytes
);
2223 inline void free(void* p
) {
2224 #ifdef JS_THREADSAFE
2226 gcSweepTask
->freeLater(p
);
2234 * In the common case that we'd like to allocate the memory for an object
2235 * with cx->malloc/free, we cannot use overloaded C++ operators (no
2236 * placement delete). Factor the common workaround into one place.
2238 #define CREATE_BODY(parms) \
2239 void *memory = this->malloc(sizeof(T)); \
2242 return new(memory) T parms;
2245 JS_ALWAYS_INLINE T
*create() {
2249 template <class T
, class P1
>
2250 JS_ALWAYS_INLINE T
*create(const P1
&p1
) {
2254 template <class T
, class P1
, class P2
>
2255 JS_ALWAYS_INLINE T
*create(const P1
&p1
, const P2
&p2
) {
2256 CREATE_BODY((p1
, p2
))
2259 template <class T
, class P1
, class P2
, class P3
>
2260 JS_ALWAYS_INLINE T
*create(const P1
&p1
, const P2
&p2
, const P3
&p3
) {
2261 CREATE_BODY((p1
, p2
, p3
))
2266 JS_ALWAYS_INLINE
void destroy(T
*p
) {
2271 bool isConstructing();
2275 js::StackSpace
&stack() const {
2276 return JS_THREAD_DATA(this)->stackSpace
;
2280 void assertValidStackDepth(uintN depth
) {
2281 JS_ASSERT(0 <= regs
->sp
- regs
->fp
->base());
2282 JS_ASSERT(depth
<= uintptr_t(regs
->sp
- regs
->fp
->base()));
2285 void assertValidStackDepth(uintN
/*depth*/) {}
2291 * The allocation code calls the function to indicate either OOM failure
2292 * when p is null or that a memory pressure counter has reached some
2293 * threshold when p is not null. The function takes the pointer and not
2294 * a boolean flag to minimize the amount of code in its inlined callers.
2296 JS_FRIEND_API(void) checkMallocGCPressure(void *p
);
2300 js_TraceRegExpStatics(JSTracer
*trc
, JSContext
*acx
)
2302 acx
->regExpStatics
.mark(trc
);
2305 JS_ALWAYS_INLINE JSObject
*
2306 JSStackFrame::varobj(js::StackSegment
*seg
) const
2308 JS_ASSERT(seg
->contains(this));
2309 return hasFunction() ? maybeCallObj() : seg
->getInitialVarObj();
2312 JS_ALWAYS_INLINE JSObject
*
2313 JSStackFrame::varobj(JSContext
*cx
) const
2315 JS_ASSERT(cx
->activeSegment()->contains(this));
2316 return hasFunction() ? maybeCallObj() : cx
->activeSegment()->getInitialVarObj();
2319 JS_ALWAYS_INLINE jsbytecode
*
2320 JSStackFrame::pc(JSContext
*cx
) const
2322 JS_ASSERT(cx
->regs
&& cx
->containingSegment(this) != NULL
);
2323 return (cx
->regs
->fp
== this) ? cx
->regs
->pc
: savedPC
;
2326 #ifdef JS_THREADSAFE
2327 # define JS_THREAD_ID(cx) ((cx)->thread ? (cx)->thread->id : 0)
2330 #if defined JS_THREADSAFE && defined DEBUG
2334 class AutoCheckRequestDepth
{
2337 AutoCheckRequestDepth(JSContext
*cx
) : cx(cx
) { cx
->checkRequestDepth
++; }
2339 ~AutoCheckRequestDepth() {
2340 JS_ASSERT(cx
->checkRequestDepth
!= 0);
2341 cx
->checkRequestDepth
--;
2347 # define CHECK_REQUEST(cx) \
2348 JS_ASSERT((cx)->requestDepth || (cx)->thread == (cx)->runtime->gcThread);\
2349 AutoCheckRequestDepth _autoCheckRequestDepth(cx);
2352 # define CHECK_REQUEST(cx) ((void)0)
2356 FramePCOffset(JSContext
*cx
, JSStackFrame
* fp
)
2358 jsbytecode
*pc
= fp
->hasIMacroPC() ? fp
->getIMacroPC() : fp
->pc(cx
);
2359 return uintN(pc
- fp
->getScript()->code
);
2362 static inline JSAtom
**
2363 FrameAtomBase(JSContext
*cx
, JSStackFrame
*fp
)
2365 return fp
->hasIMacroPC()
2366 ? COMMON_ATOMS_START(&cx
->runtime
->atomState
)
2367 : fp
->getScript()->atomMap
.vector
;
2372 class AutoGCRooter
{
2374 AutoGCRooter(JSContext
*cx
, ptrdiff_t tag
)
2375 : down(cx
->autoGCRooters
), tag(tag
), context(cx
)
2377 JS_ASSERT(this != cx
->autoGCRooters
);
2378 #ifdef JS_THREADSAFE
2379 JS_ASSERT(cx
->requestDepth
!= 0);
2381 cx
->autoGCRooters
= this;
2385 JS_ASSERT(this == context
->autoGCRooters
);
2386 #ifdef JS_THREADSAFE
2387 JS_ASSERT(context
->requestDepth
!= 0);
2389 context
->autoGCRooters
= down
;
2392 /* Implemented in jsgc.cpp. */
2393 inline void trace(JSTracer
*trc
);
2396 # pragma GCC visibility push(default)
2398 friend void ::js_TraceContext(JSTracer
*trc
, JSContext
*acx
);
2399 friend void ::js_TraceRuntime(JSTracer
*trc
);
2401 # pragma GCC visibility pop
2405 AutoGCRooter
* const down
;
2408 * Discriminates actual subclass of this being used. If non-negative, the
2409 * subclass roots an array of values of the length stored in this field.
2410 * If negative, meaning is indicated by the corresponding value in the enum
2411 * below. Any other negative value indicates some deeper problem such as
2412 * memory corruption.
2416 JSContext
* const context
;
2419 JSVAL
= -1, /* js::AutoValueRooter */
2420 SPROP
= -2, /* js::AutoScopePropertyRooter */
2421 PARSER
= -3, /* js::Parser */
2422 SCRIPT
= -4, /* js::AutoScriptRooter */
2423 ENUMERATOR
= -5, /* js::AutoEnumStateRooter */
2424 IDARRAY
= -6, /* js::AutoIdArray */
2425 DESCRIPTORS
= -7, /* js::AutoPropDescArrayRooter */
2426 NAMESPACES
= -8, /* js::AutoNamespaceArray */
2427 XML
= -9, /* js::AutoXMLRooter */
2428 OBJECT
= -10, /* js::AutoObjectRooter */
2429 ID
= -11, /* js::AutoIdRooter */
2430 VALVECTOR
= -12, /* js::AutoValueVector */
2431 DESCRIPTOR
= -13, /* js::AutoPropertyDescriptorRooter */
2432 STRING
= -14, /* js::AutoStringRooter */
2433 IDVECTOR
= -15 /* js::AutoIdVector */
2437 /* No copy or assignment semantics. */
2438 AutoGCRooter(AutoGCRooter
&ida
);
2439 void operator=(AutoGCRooter
&ida
);
2442 /* FIXME(bug 332648): Move this into a public header. */
2443 class AutoValueRooter
: private AutoGCRooter
2446 explicit AutoValueRooter(JSContext
*cx
2447 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2448 : AutoGCRooter(cx
, JSVAL
), val(js::NullValue())
2450 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2453 AutoValueRooter(JSContext
*cx
, const Value
&v
2454 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2455 : AutoGCRooter(cx
, JSVAL
), val(v
)
2457 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2460 AutoValueRooter(JSContext
*cx
, jsval v
2461 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2462 : AutoGCRooter(cx
, JSVAL
), val(js::Valueify(v
))
2464 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2468 * If you are looking for Object* overloads, use AutoObjectRooter instead;
2469 * rooting Object*s as a js::Value requires discerning whether or not it is
2470 * a function object. Also, AutoObjectRooter is smaller.
2474 JS_ASSERT(tag
== JSVAL
);
2479 JS_ASSERT(tag
== JSVAL
);
2480 val
= js::Valueify(v
);
2483 const Value
&value() const {
2484 JS_ASSERT(tag
== JSVAL
);
2489 JS_ASSERT(tag
== JSVAL
);
2493 const jsval
&jsval_value() const {
2494 JS_ASSERT(tag
== JSVAL
);
2495 return Jsvalify(val
);
2498 jsval
*jsval_addr() {
2499 JS_ASSERT(tag
== JSVAL
);
2500 return Jsvalify(&val
);
2503 friend void AutoGCRooter::trace(JSTracer
*trc
);
2504 friend void ::js_TraceRuntime(JSTracer
*trc
);
2508 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2511 class AutoObjectRooter
: private AutoGCRooter
{
2513 AutoObjectRooter(JSContext
*cx
, JSObject
*obj
= NULL
2514 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2515 : AutoGCRooter(cx
, OBJECT
), obj(obj
)
2517 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2520 void setObject(JSObject
*obj
) {
2524 JSObject
* object() const {
2528 JSObject
** addr() {
2532 friend void AutoGCRooter::trace(JSTracer
*trc
);
2533 friend void ::js_TraceRuntime(JSTracer
*trc
);
2537 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2540 class AutoStringRooter
: private AutoGCRooter
{
2542 AutoStringRooter(JSContext
*cx
, JSString
*str
= NULL
2543 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2544 : AutoGCRooter(cx
, STRING
), str(str
)
2546 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2549 void setString(JSString
*str
) {
2553 JSString
* string() const {
2557 JSString
** addr() {
2561 friend void AutoGCRooter::trace(JSTracer
*trc
);
2565 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2568 class AutoArrayRooter
: private AutoGCRooter
{
2570 AutoArrayRooter(JSContext
*cx
, size_t len
, Value
*vec
2571 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2572 : AutoGCRooter(cx
, len
), array(vec
)
2574 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2575 JS_ASSERT(tag
>= 0);
2578 AutoArrayRooter(JSContext
*cx
, size_t len
, jsval
*vec
2579 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2580 : AutoGCRooter(cx
, len
), array(Valueify(vec
))
2582 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2583 JS_ASSERT(tag
>= 0);
2586 void changeLength(size_t newLength
) {
2587 tag
= ptrdiff_t(newLength
);
2588 JS_ASSERT(tag
>= 0);
2591 void changeArray(Value
*newArray
, size_t newLength
) {
2592 changeLength(newLength
);
2598 friend void AutoGCRooter::trace(JSTracer
*trc
);
2601 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2604 class AutoScopePropertyRooter
: private AutoGCRooter
{
2606 AutoScopePropertyRooter(JSContext
*cx
, JSScopeProperty
*sprop
2607 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2608 : AutoGCRooter(cx
, SPROP
), sprop(sprop
)
2610 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2613 friend void AutoGCRooter::trace(JSTracer
*trc
);
2614 friend void ::js_TraceRuntime(JSTracer
*trc
);
2617 JSScopeProperty
* const sprop
;
2618 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2621 class AutoScriptRooter
: private AutoGCRooter
{
2623 AutoScriptRooter(JSContext
*cx
, JSScript
*script
2624 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2625 : AutoGCRooter(cx
, SCRIPT
), script(script
)
2627 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2630 void setScript(JSScript
*script
) {
2631 this->script
= script
;
2634 friend void AutoGCRooter::trace(JSTracer
*trc
);
2638 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2641 class AutoIdRooter
: private AutoGCRooter
2644 explicit AutoIdRooter(JSContext
*cx
, jsid id
= INT_TO_JSID(0)
2645 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2646 : AutoGCRooter(cx
, ID
), id_(id
)
2648 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2659 friend void AutoGCRooter::trace(JSTracer
*trc
);
2660 friend void ::js_TraceRuntime(JSTracer
*trc
);
2664 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2667 class AutoIdArray
: private AutoGCRooter
{
2669 AutoIdArray(JSContext
*cx
, JSIdArray
*ida JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2670 : AutoGCRooter(cx
, IDARRAY
), idArray(ida
)
2672 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2676 JS_DestroyIdArray(context
, idArray
);
2679 return idArray
== NULL
;
2681 jsid
operator[](size_t i
) const {
2683 JS_ASSERT(i
< size_t(idArray
->length
));
2684 return idArray
->vector
[i
];
2686 size_t length() const {
2687 return idArray
->length
;
2690 friend void AutoGCRooter::trace(JSTracer
*trc
);
2692 JSIdArray
*steal() {
2693 JSIdArray
*copy
= idArray
;
2699 inline void trace(JSTracer
*trc
);
2702 JSIdArray
* idArray
;
2703 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2705 /* No copy or assignment semantics. */
2706 AutoIdArray(AutoIdArray
&ida
);
2707 void operator=(AutoIdArray
&ida
);
2710 /* The auto-root for enumeration object and its state. */
2711 class AutoEnumStateRooter
: private AutoGCRooter
2714 AutoEnumStateRooter(JSContext
*cx
, JSObject
*obj
2715 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2716 : AutoGCRooter(cx
, ENUMERATOR
), obj(obj
), stateValue()
2718 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2722 ~AutoEnumStateRooter() {
2723 if (!stateValue
.isNull()) {
2727 obj
->enumerate(context
, JSENUMERATE_DESTROY
, &stateValue
, 0);
2732 friend void AutoGCRooter::trace(JSTracer
*trc
);
2734 const Value
&state() const { return stateValue
; }
2735 Value
*addr() { return &stateValue
; }
2738 void trace(JSTracer
*trc
) {
2739 JS_CALL_OBJECT_TRACER(trc
, obj
, "js::AutoEnumStateRooter.obj");
2742 JSObject
* const obj
;
2746 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2749 #ifdef JS_HAS_XML_SUPPORT
2750 class AutoXMLRooter
: private AutoGCRooter
{
2752 AutoXMLRooter(JSContext
*cx
, JSXML
*xml
)
2753 : AutoGCRooter(cx
, XML
), xml(xml
)
2758 friend void AutoGCRooter::trace(JSTracer
*trc
);
2759 friend void ::js_TraceRuntime(JSTracer
*trc
);
2764 #endif /* JS_HAS_XML_SUPPORT */
2770 explicit AutoLockGC(JSRuntime
*rt
) : rt(rt
) { JS_LOCK_GC(rt
); }
2771 ~AutoLockGC() { JS_UNLOCK_GC(rt
); }
2774 class AutoUnlockGC
{
2778 explicit AutoUnlockGC(JSRuntime
*rt
) : rt(rt
) { JS_UNLOCK_GC(rt
); }
2779 ~AutoUnlockGC() { JS_LOCK_GC(rt
); }
2782 class AutoKeepAtoms
{
2785 explicit AutoKeepAtoms(JSRuntime
*rt
) : rt(rt
) { JS_KEEP_ATOMS(rt
); }
2786 ~AutoKeepAtoms() { JS_UNKEEP_ATOMS(rt
); }
2789 class AutoArenaAllocator
{
2793 explicit AutoArenaAllocator(JSArenaPool
*pool
) : pool(pool
) { mark
= JS_ARENA_MARK(pool
); }
2794 ~AutoArenaAllocator() { JS_ARENA_RELEASE(pool
, mark
); }
2796 template <typename T
>
2797 T
*alloc(size_t elems
) {
2799 JS_ARENA_ALLOCATE(ptr
, pool
, elems
* sizeof(T
));
2800 return static_cast<T
*>(ptr
);
2804 class AutoReleasePtr
{
2807 AutoReleasePtr
operator=(const AutoReleasePtr
&other
);
2809 explicit AutoReleasePtr(JSContext
*cx
, void *ptr
) : cx(cx
), ptr(ptr
) {}
2810 ~AutoReleasePtr() { cx
->free(ptr
); }
2813 } /* namespace js */
2815 class JSAutoResolveFlags
2818 JSAutoResolveFlags(JSContext
*cx
, uintN flags
2819 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2820 : mContext(cx
), mSaved(cx
->resolveFlags
)
2822 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2823 cx
->resolveFlags
= flags
;
2826 ~JSAutoResolveFlags() { mContext
->resolveFlags
= mSaved
; }
2829 JSContext
*mContext
;
2831 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2835 * Slightly more readable macros for testing per-context option settings (also
2836 * to hide bitset implementation detail).
2838 * JSOPTION_XML must be handled specially in order to propagate from compile-
2839 * to run-time (from cx->options to script->version/cx->version). To do that,
2840 * we copy JSOPTION_XML from cx->options into cx->version as JSVERSION_HAS_XML
2841 * whenever options are set, and preserve this XML flag across version number
2842 * changes done via the JS_SetVersion API.
2844 * But when executing a script or scripted function, the interpreter changes
2845 * cx->version, including the XML flag, to script->version. Thus JSOPTION_XML
2846 * is a compile-time option that causes a run-time version change during each
2847 * activation of the compiled script. That version change has the effect of
2848 * changing JS_HAS_XML_OPTION, so that any compiling done via eval enables XML
2849 * support. If an XML-enabled script or function calls a non-XML function,
2850 * the flag bit will be cleared during the callee's activation.
2852 * Note that JS_SetVersion API calls never pass JSVERSION_HAS_XML or'd into
2853 * that API's version parameter.
2855 * Note also that script->version must contain this XML option flag in order
2856 * for XDR'ed scripts to serialize and deserialize with that option preserved
2857 * for detection at run-time. We can't copy other compile-time options into
2858 * script->version because that would break backward compatibility (certain
2859 * other options, e.g. JSOPTION_VAROBJFIX, are analogous to JSOPTION_XML).
2861 #define JS_HAS_OPTION(cx,option) (((cx)->options & (option)) != 0)
2862 #define JS_HAS_STRICT_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_STRICT)
2863 #define JS_HAS_WERROR_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_WERROR)
2864 #define JS_HAS_COMPILE_N_GO_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_COMPILE_N_GO)
2865 #define JS_HAS_ATLINE_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_ATLINE)
2867 #define JSVERSION_MASK 0x0FFF /* see JSVersion in jspubtd.h */
2868 #define JSVERSION_HAS_XML 0x1000 /* flag induced by XML option */
2869 #define JSVERSION_ANONFUNFIX 0x2000 /* see jsapi.h, the comments
2870 for JSOPTION_ANONFUNFIX */
2872 #define JSVERSION_NUMBER(cx) ((JSVersion)((cx)->version & \
2874 #define JS_HAS_XML_OPTION(cx) ((cx)->version & JSVERSION_HAS_XML || \
2875 JSVERSION_NUMBER(cx) >= JSVERSION_1_6)
2877 extern JSThreadData
*
2878 js_CurrentThreadData(JSRuntime
*rt
);
2881 js_InitThreads(JSRuntime
*rt
);
2884 js_FinishThreads(JSRuntime
*rt
);
2887 js_PurgeThreads(JSContext
*cx
);
2891 #ifdef JS_THREADSAFE
2893 /* Iterator over JSThreadData from all JSThread instances. */
2894 class ThreadDataIter
: public JSThread::Map::Range
2897 ThreadDataIter(JSRuntime
*rt
) : JSThread::Map::Range(rt
->threads
.all()) {}
2899 JSThreadData
*threadData() const {
2900 return &front().value
->data
;
2904 #else /* !JS_THREADSAFE */
2906 class ThreadDataIter
2911 ThreadDataIter(JSRuntime
*rt
) : runtime(rt
), done(false) {}
2913 bool empty() const {
2922 JSThreadData
*threadData() const {
2924 return &runtime
->threadData
;
2928 #endif /* !JS_THREADSAFE */
2930 } /* namespace js */
2933 * Ensures the JSOPTION_XML and JSOPTION_ANONFUNFIX bits of cx->options are
2934 * reflected in cx->version, since each bit must travel with a script that has
2938 js_SyncOptionsToVersion(JSContext
*cx
);
2941 * Common subroutine of JS_SetVersion and js_SetVersion, to update per-context
2942 * data that depends on version.
2945 js_OnVersionChange(JSContext
*cx
);
2948 * Unlike the JS_SetVersion API, this function stores JSVERSION_HAS_XML and
2949 * any future non-version-number flags induced by compiler options.
2952 js_SetVersion(JSContext
*cx
, JSVersion version
);
2955 * Create and destroy functions for JSContext, which is manually allocated
2956 * and exclusively owned.
2959 js_NewContext(JSRuntime
*rt
, size_t stackChunkSize
);
2962 js_DestroyContext(JSContext
*cx
, JSDestroyContextMode mode
);
2965 * Return true if cx points to a context in rt->contextList, else return false.
2966 * NB: the caller (see jslock.c:ClaimTitle) must hold rt->gcLock.
2969 js_ValidContextPointer(JSRuntime
*rt
, JSContext
*cx
);
2971 static JS_INLINE JSContext
*
2972 js_ContextFromLinkField(JSCList
*link
)
2975 return (JSContext
*) ((uint8
*) link
- offsetof(JSContext
, link
));
2979 * If unlocked, acquire and release rt->gcLock around *iterp update; otherwise
2980 * the caller must be holding rt->gcLock.
2983 js_ContextIterator(JSRuntime
*rt
, JSBool unlocked
, JSContext
**iterp
);
2986 * Iterate through contexts with active requests. The caller must be holding
2987 * rt->gcLock in case of a thread-safe build, or otherwise guarantee that the
2988 * context list is not alternated asynchroniously.
2990 extern JS_FRIEND_API(JSContext
*)
2991 js_NextActiveContext(JSRuntime
*, JSContext
*);
2994 * Class.resolve and watchpoint recursion damping machinery.
2997 js_StartResolving(JSContext
*cx
, JSResolvingKey
*key
, uint32 flag
,
2998 JSResolvingEntry
**entryp
);
3001 js_StopResolving(JSContext
*cx
, JSResolvingKey
*key
, uint32 flag
,
3002 JSResolvingEntry
*entry
, uint32 generation
);
3005 * Report an exception, which is currently realized as a printf-style format
3006 * string and its arguments.
3008 typedef enum JSErrNum
{
3009 #define MSG_DEF(name, number, count, exception, format) \
3016 extern JS_FRIEND_API(const JSErrorFormatString
*)
3017 js_GetErrorMessage(void *userRef
, const char *locale
, const uintN errorNumber
);
3021 js_ReportErrorVA(JSContext
*cx
, uintN flags
, const char *format
, va_list ap
);
3024 js_ReportErrorNumberVA(JSContext
*cx
, uintN flags
, JSErrorCallback callback
,
3025 void *userRef
, const uintN errorNumber
,
3026 JSBool charArgs
, va_list ap
);
3029 js_ExpandErrorArguments(JSContext
*cx
, JSErrorCallback callback
,
3030 void *userRef
, const uintN errorNumber
,
3031 char **message
, JSErrorReport
*reportp
,
3032 bool charArgs
, va_list ap
);
3036 js_ReportOutOfMemory(JSContext
*cx
);
3039 * Report that cx->scriptStackQuota is exhausted.
3042 js_ReportOutOfScriptQuota(JSContext
*cx
);
3044 extern JS_FRIEND_API(void)
3045 js_ReportOverRecursed(JSContext
*cx
);
3047 extern JS_FRIEND_API(void)
3048 js_ReportAllocationOverflow(JSContext
*cx
);
3050 #define JS_CHECK_RECURSION(cx, onerror) \
3054 if (!JS_CHECK_STACK_SIZE(cx, stackDummy_)) { \
3055 js_ReportOverRecursed(cx); \
3061 * Report an exception using a previously composed JSErrorReport.
3062 * XXXbe remove from "friend" API
3064 extern JS_FRIEND_API(void)
3065 js_ReportErrorAgain(JSContext
*cx
, const char *message
, JSErrorReport
*report
);
3068 js_ReportIsNotDefined(JSContext
*cx
, const char *name
);
3071 * Report an attempt to access the property of a null or undefined value (v).
3074 js_ReportIsNullOrUndefined(JSContext
*cx
, intN spindex
, const js::Value
&v
,
3075 JSString
*fallback
);
3078 js_ReportMissingArg(JSContext
*cx
, const js::Value
&v
, uintN arg
);
3081 * Report error using js_DecompileValueGenerator(cx, spindex, v, fallback) as
3082 * the first argument for the error message. If the error message has less
3083 * then 3 arguments, use null for arg1 or arg2.
3086 js_ReportValueErrorFlags(JSContext
*cx
, uintN flags
, const uintN errorNumber
,
3087 intN spindex
, const js::Value
&v
, JSString
*fallback
,
3088 const char *arg1
, const char *arg2
);
3090 #define js_ReportValueError(cx,errorNumber,spindex,v,fallback) \
3091 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3092 spindex, v, fallback, NULL, NULL))
3094 #define js_ReportValueError2(cx,errorNumber,spindex,v,fallback,arg1) \
3095 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3096 spindex, v, fallback, arg1, NULL))
3098 #define js_ReportValueError3(cx,errorNumber,spindex,v,fallback,arg1,arg2) \
3099 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3100 spindex, v, fallback, arg1, arg2))
3102 extern JSErrorFormatString js_ErrorFormatString
[JSErr_Limit
];
3105 * See JS_SetThreadStackLimit in jsapi.c, where we check that the stack
3106 * grows in the expected direction.
3108 #if JS_STACK_GROWTH_DIRECTION > 0
3109 # define JS_CHECK_STACK_SIZE(cx, lval) ((jsuword)&(lval) < (cx)->stackLimit)
3111 # define JS_CHECK_STACK_SIZE(cx, lval) ((jsuword)&(lval) > (cx)->stackLimit)
3114 #ifdef JS_THREADSAFE
3115 # define JS_ASSERT_REQUEST_DEPTH(cx) JS_ASSERT((cx)->requestDepth >= 1)
3117 # define JS_ASSERT_REQUEST_DEPTH(cx) ((void) 0)
3121 * If the operation callback flag was set, call the operation callback.
3122 * This macro can run the full GC. Return true if it is OK to continue and
3125 #define JS_CHECK_OPERATION_LIMIT(cx) \
3126 (JS_ASSERT_REQUEST_DEPTH(cx), \
3127 (!JS_THREAD_DATA(cx)->operationCallbackFlag || js_InvokeOperationCallback(cx)))
3130 * Invoke the operation callback and return false if the current execution
3131 * is to be terminated.
3134 js_InvokeOperationCallback(JSContext
*cx
);
3136 #ifndef JS_THREADSAFE
3137 # define js_TriggerAllOperationCallbacks(rt, gcLocked) \
3138 js_TriggerAllOperationCallbacks (rt)
3142 js_TriggerAllOperationCallbacks(JSRuntime
*rt
, JSBool gcLocked
);
3144 extern JSStackFrame
*
3145 js_GetScriptedCaller(JSContext
*cx
, JSStackFrame
*fp
);
3148 js_GetCurrentBytecodePC(JSContext
* cx
);
3151 js_CurrentPCIsInImacro(JSContext
*cx
);
3157 * Reconstruct the JS stack and clear cx->tracecx. We must be currently in a
3158 * _FAIL builtin from trace on cx or another context on the same thread. The
3159 * machine code for the trace remains on the C stack when js_DeepBail returns.
3161 * Implemented in jstracer.cpp.
3163 JS_FORCES_STACK
JS_FRIEND_API(void)
3164 DeepBail(JSContext
*cx
);
3167 static JS_FORCES_STACK JS_INLINE
void
3168 LeaveTrace(JSContext
*cx
)
3171 if (JS_ON_TRACE(cx
))
3176 static JS_INLINE
void
3177 LeaveTraceIfGlobalObject(JSContext
*cx
, JSObject
*obj
)
3183 static JS_INLINE JSBool
3184 CanLeaveTrace(JSContext
*cx
)
3186 JS_ASSERT(JS_ON_TRACE(cx
));
3188 return cx
->bailExit
!= NULL
;
3195 SetPendingException(JSContext
*cx
, const Value
&v
);
3197 } /* namespace js */
3200 * Get the current frame, first lazily instantiating stack frames if needed.
3201 * (Do not access cx->fp() directly except in JS_REQUIRES_STACK code.)
3203 * Defined in jstracer.cpp if JS_TRACER is defined.
3205 static JS_FORCES_STACK JS_INLINE JSStackFrame
*
3206 js_GetTopStackFrame(JSContext
*cx
)
3209 return cx
->maybefp();
3212 static JS_INLINE JSBool
3213 js_IsPropertyCacheDisabled(JSContext
*cx
)
3215 return cx
->runtime
->shapeGen
>= js::SHAPE_OVERFLOW_BIT
;
3218 static JS_INLINE uint32
3219 js_RegenerateShapeForGC(JSContext
*cx
)
3221 JS_ASSERT(cx
->runtime
->gcRunning
);
3222 JS_ASSERT(cx
->runtime
->gcRegenShapes
);
3225 * Under the GC, compared with js_GenerateShape, we don't need to use
3226 * atomic increments but we still must make sure that after an overflow
3227 * the shape stays such.
3229 uint32 shape
= cx
->runtime
->shapeGen
;
3230 shape
= (shape
+ 1) | (shape
& js::SHAPE_OVERFLOW_BIT
);
3231 cx
->runtime
->shapeGen
= shape
;
3238 ContextAllocPolicy::malloc(size_t bytes
)
3240 return cx
->malloc(bytes
);
3244 ContextAllocPolicy::free(void *p
)
3250 ContextAllocPolicy::realloc(void *p
, size_t bytes
)
3252 return cx
->realloc(p
, bytes
);
3256 ContextAllocPolicy::reportAllocOverflow() const
3258 js_ReportAllocationOverflow(cx
);
3261 class AutoValueVector
: private AutoGCRooter
3264 explicit AutoValueVector(JSContext
*cx
3265 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
3266 : AutoGCRooter(cx
, VALVECTOR
), vector(cx
)
3268 JS_GUARD_OBJECT_NOTIFIER_INIT
;
3271 size_t length() const { return vector
.length(); }
3273 bool append(const Value
&v
) { return vector
.append(v
); }
3275 void popBack() { vector
.popBack(); }
3277 bool growBy(size_t inc
) {
3278 /* N.B. Value's default ctor leaves the Value undefined */
3279 size_t oldLength
= vector
.length();
3280 if (!vector
.growByUninitialized(inc
))
3282 MakeValueRangeGCSafe(vector
.begin() + oldLength
, vector
.end());
3286 bool resize(size_t newLength
) {
3287 size_t oldLength
= vector
.length();
3288 if (newLength
<= oldLength
) {
3289 vector
.shrinkBy(oldLength
- newLength
);
3292 /* N.B. Value's default ctor leaves the Value undefined */
3293 if (!vector
.growByUninitialized(newLength
- oldLength
))
3295 MakeValueRangeGCSafe(vector
.begin() + oldLength
, vector
.end());
3299 bool reserve(size_t newLength
) {
3300 return vector
.reserve(newLength
);
3303 Value
&operator[](size_t i
) { return vector
[i
]; }
3304 const Value
&operator[](size_t i
) const { return vector
[i
]; }
3306 const Value
*begin() const { return vector
.begin(); }
3307 Value
*begin() { return vector
.begin(); }
3309 const Value
*end() const { return vector
.end(); }
3310 Value
*end() { return vector
.end(); }
3312 const jsval
*jsval_begin() const { return Jsvalify(begin()); }
3313 jsval
*jsval_begin() { return Jsvalify(begin()); }
3315 const jsval
*jsval_end() const { return Jsvalify(end()); }
3316 jsval
*jsval_end() { return Jsvalify(end()); }
3318 const Value
&back() const { return vector
.back(); }
3320 friend void AutoGCRooter::trace(JSTracer
*trc
);
3323 Vector
<Value
, 8> vector
;
3324 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
3327 class AutoIdVector
: private AutoGCRooter
3330 explicit AutoIdVector(JSContext
*cx
3331 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
3332 : AutoGCRooter(cx
, IDVECTOR
), vector(cx
)
3334 JS_GUARD_OBJECT_NOTIFIER_INIT
;
3337 size_t length() const { return vector
.length(); }
3339 bool append(jsid id
) { return vector
.append(id
); }
3341 void popBack() { vector
.popBack(); }
3343 bool growBy(size_t inc
) {
3344 /* N.B. jsid's default ctor leaves the jsid undefined */
3345 size_t oldLength
= vector
.length();
3346 if (!vector
.growByUninitialized(inc
))
3348 MakeIdRangeGCSafe(vector
.begin() + oldLength
, vector
.end());
3352 bool resize(size_t newLength
) {
3353 size_t oldLength
= vector
.length();
3354 if (newLength
<= oldLength
) {
3355 vector
.shrinkBy(oldLength
- newLength
);
3358 /* N.B. jsid's default ctor leaves the jsid undefined */
3359 if (!vector
.growByUninitialized(newLength
- oldLength
))
3361 MakeIdRangeGCSafe(vector
.begin() + oldLength
, vector
.end());
3365 bool reserve(size_t newLength
) {
3366 return vector
.reserve(newLength
);
3369 jsid
&operator[](size_t i
) { return vector
[i
]; }
3370 const jsid
&operator[](size_t i
) const { return vector
[i
]; }
3372 const jsid
*begin() const { return vector
.begin(); }
3373 jsid
*begin() { return vector
.begin(); }
3375 const jsid
*end() const { return vector
.end(); }
3376 jsid
*end() { return vector
.end(); }
3378 const jsid
&back() const { return vector
.back(); }
3380 friend void AutoGCRooter::trace(JSTracer
*trc
);
3383 Vector
<jsid
, 8> vector
;
3384 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
3388 NewIdArray(JSContext
*cx
, jsint length
);
3390 } /* namespace js */
3393 #pragma warning(pop)
3394 #pragma warning(pop)
3397 #ifdef JS_UNDEFD_MOZALLOC_WRAPPERS
3398 # include "mozilla/mozalloc_macro_wrappers.h"
3401 #endif /* jscntxt_h___ */