1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
44 * JS execution context.
48 /* Gross special case for Gecko, which defines malloc/calloc/free. */
49 #ifdef mozilla_mozalloc_macro_wrappers_h
50 # define JS_UNDEFD_MOZALLOC_WRAPPERS
51 /* The "anti-header" */
52 # include "mozilla/mozalloc_undef_macro_wrappers.h"
64 #include "jsgcchunk.h"
65 #include "jshashtable.h"
69 #include "jspropertycache.h"
70 #include "jspropertytree.h"
71 #include "jsstaticcheck.h"
79 #pragma warning(disable:4100) /* Silence unreferenced formal parameter warnings */
81 #pragma warning(disable:4355) /* Silence warning about "this" used in base member initializer list */
85 * js_GetSrcNote cache to avoid O(n^2) growth in finding a source note for a
86 * given pc in a script. We use the script->code pointer to tag the cache,
87 * instead of the script address itself, so that source notes are always found
88 * by offset from the bytecode with which they were generated.
90 typedef struct JSGSNCache
{
98 # define GSN_CACHE_METER(cache,cnt) (++(cache)->cnt)
100 # define GSN_CACHE_METER(cache,cnt) /* nothing */
104 #define js_FinishGSNCache(cache) js_PurgeGSNCache(cache)
107 js_PurgeGSNCache(JSGSNCache
*cache
);
109 /* These helper macros take a cx as parameter and operate on its GSN cache. */
110 #define JS_PURGE_GSN_CACHE(cx) js_PurgeGSNCache(&JS_GSN_CACHE(cx))
111 #define JS_METER_GSN_CACHE(cx,cnt) GSN_CACHE_METER(&JS_GSN_CACHE(cx), cnt)
113 /* Forward declarations of nanojit types. */
119 template<typename K
> struct DefaultHash
;
120 template<typename K
, typename V
, typename H
> class HashMap
;
121 template<typename T
> class Seq
;
123 } /* namespace nanojit */
127 /* Tracer constants. */
128 static const size_t MONITOR_N_GLOBAL_STATES
= 4;
129 static const size_t FRAGMENT_TABLE_SIZE
= 512;
130 static const size_t MAX_NATIVE_STACK_SLOTS
= 4096;
131 static const size_t MAX_CALL_STACK_ENTRIES
= 500;
132 static const size_t MAX_GLOBAL_SLOTS
= 4096;
133 static const size_t GLOBAL_SLOTS_BUFFER_SIZE
= MAX_GLOBAL_SLOTS
+ 1;
134 static const size_t MAX_SLOW_NATIVE_EXTRA_SLOTS
= 16;
136 /* Forward declarations of tracer types. */
138 class FrameInfoCache
;
145 template<typename T
> class Queue
;
146 typedef Queue
<uint16
> SlotList
;
149 typedef nanojit::HashMap
<REHashKey
, REFragment
*, REHashFn
> REHashMap
;
152 #if defined(JS_JIT_SPEW) || defined(DEBUG)
154 typedef nanojit::HashMap
<uint32
, FragPI
, nanojit::DefaultHash
<uint32
> > FragStatsMap
;
158 class JaegerCompartment
;
162 * Allocation policy that calls JSContext memory functions and reports errors
163 * to the context. Since the JSContext given on construction is stored for
164 * the lifetime of the container, this policy may only be used for containers
165 * whose lifetime is a shorter than the given JSContext.
167 class ContextAllocPolicy
172 ContextAllocPolicy(JSContext
*cx
) : cx(cx
) {}
173 JSContext
*context() const { return cx
; }
175 /* Inline definitions below. */
176 void *malloc(size_t bytes
);
178 void *realloc(void *p
, size_t bytes
);
179 void reportAllocOverflow() const;
182 /* Holds the execution state during trace execution. */
185 JSContext
* cx
; // current VM context handle
186 double* stackBase
; // native stack base
187 double* sp
; // native stack pointer, stack[0] is spbase[0]
188 double* eos
; // first unusable word after the native stack / begin of globals
189 FrameInfo
** callstackBase
; // call stack base
190 void* sor
; // start of rp stack
191 FrameInfo
** rp
; // call stack pointer
192 void* eor
; // first unusable word after the call stack
193 VMSideExit
* lastTreeExitGuard
; // guard we exited on during a tree call
194 VMSideExit
* lastTreeCallGuard
; // guard we want to grow from if the tree
195 // call exit guard mismatched
196 void* rpAtLastTreeCall
; // value of rp at innermost tree call guard
197 VMSideExit
* outermostTreeExitGuard
; // the last side exit returned by js_CallTree
198 TreeFragment
* outermostTree
; // the outermost tree we initially invoked
199 uintN
* inlineCallCountp
; // inline call count counter
200 VMSideExit
** innermostNestedGuardp
;
201 VMSideExit
* innermost
;
205 // Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
206 // JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
207 // if an error or exception occurred.
208 uint32 builtinStatus
;
210 // Used to communicate the location of the return value in case of a deep bail.
213 // Used when calling natives from trace to root the vp vector.
217 TracerState(JSContext
*cx
, TraceMonitor
*tm
, TreeFragment
*ti
,
218 uintN
&inlineCallCountp
, VMSideExit
** innermostNestedGuardp
);
223 * Storage for the execution state and store during trace execution. Generated
224 * code depends on the fact that the globals begin |MAX_NATIVE_STACK_SLOTS|
225 * doubles after the stack begins. Thus, on trace, |TracerState::eos| holds a
226 * pointer to the first global.
228 struct TraceNativeStorage
230 double stack_global_buf
[MAX_NATIVE_STACK_SLOTS
+ GLOBAL_SLOTS_BUFFER_SIZE
];
231 FrameInfo
*callstack_buf
[MAX_CALL_STACK_ENTRIES
];
233 double *stack() { return stack_global_buf
; }
234 double *global() { return stack_global_buf
+ MAX_NATIVE_STACK_SLOTS
; }
235 FrameInfo
**callstack() { return callstack_buf
; }
238 /* Holds data to track a single globa. */
242 SlotList
* globalSlots
;
246 * A StackSegment (referred to as just a 'segment') contains a prev-linked set
247 * of stack frames and the slots associated with each frame. A segment and its
248 * contained frames/slots also have a precise memory layout that is described
249 * in the js::StackSpace comment. A key layout invariant for segments is that
250 * prev-linked frames are adjacent in memory, separated only by the values that
251 * constitute the locals and expression stack of the prev-frame.
253 * The set of stack frames in a non-empty segment start at the segment's
254 * "current frame", which is the most recently pushed frame, and ends at the
255 * segment's "initial frame". Note that, while all stack frames in a segment
256 * are prev-linked, not all prev-linked frames are in the same segment. Hence,
257 * for a segment |ss|, |ss->getInitialFrame()->prev| may be non-null and in a
258 * different segment. This occurs when the VM reenters itself (via Invoke or
259 * Execute). In full generality, a single context may contain a forest of trees
260 * of stack frames. With respect to this forest, a segment contains a linear
261 * path along a single tree, not necessarily to the root.
263 * The frames of a non-empty segment must all be in the same context and thus
264 * each non-empty segment is referred to as being "in" a context. Segments in a
265 * context have an additional state of being either "active" or "suspended". A
266 * suspended segment |ss| has a "suspended frame" which is snapshot of |cx->regs|
267 * when the segment was suspended and serves as the current frame of |ss|.
268 * There is at most one active segment in a given context. Segments in a
269 * context execute LIFO and are maintained in a stack. The top of this stack
270 * is the context's "current segment". If a context |cx| has an active segment
272 * 1. |ss| is |cx|'s current segment,
273 * 2. |cx->regs != NULL|, and
274 * 3. |ss|'s current frame is |cx->regs->fp|.
275 * Moreover, |cx->regs != NULL| iff |cx| has an active segment.
277 * An empty segment is not associated with any context. Empty segments are
278 * created when there is not an active segment for a context at the top of the
279 * stack and claim space for the arguments of an Invoke before the Invoke's
280 * stack frame is pushed. During the intervals when the arguments have been
281 * pushed, but not the stack frame, the segment cannot be pushed onto the
282 * context, since that would require some hack to deal with cx->fp not being
283 * the current frame of cx->currentSegment.
285 * Finally, (to support JS_SaveFrameChain/JS_RestoreFrameChain) a suspended
286 * segment may or may not be "saved". Normally, when the active segment is
287 * popped, the previous segment (which is necessarily suspended) becomes
288 * active. If the previous segment was saved, however, then it stays suspended
289 * until it is made active by a call to JS_RestoreFrameChain. This is why a
290 * context may have a current segment, but not an active segment.
294 /* The context to which this segment belongs. */
297 /* Link for JSContext segment stack mentioned in big comment above. */
298 StackSegment
*previousInContext
;
300 /* Link for StackSpace segment stack mentioned in StackSpace comment. */
301 StackSegment
*previousInMemory
;
303 /* The first frame executed in this segment. null iff cx is null */
304 JSStackFrame
*initialFrame
;
306 /* If this segment is suspended, |cx->regs| when it was suspended. */
307 JSFrameRegs
*suspendedRegs
;
309 /* The varobj on entry to initialFrame. */
310 JSObject
*initialVarObj
;
312 /* Whether this segment was suspended by JS_SaveFrameChain. */
315 /* Align at 8 bytes on all platforms. */
316 #if JS_BITS_PER_WORD == 32
321 * To make isActive a single null-ness check, this non-null constant is
322 * assigned to suspendedRegs when !inContext.
324 #define NON_NULL_SUSPENDED_REGS ((JSFrameRegs *)0x1)
328 : cx(NULL
), previousInContext(NULL
), previousInMemory(NULL
),
329 initialFrame(NULL
), suspendedRegs(NON_NULL_SUSPENDED_REGS
),
330 initialVarObj(NULL
), saved(false)
332 JS_ASSERT(!inContext());
335 /* Safe casts guaranteed by the contiguous-stack layout. */
337 Value
*valueRangeBegin() const {
338 return (Value
*)(this + 1);
342 * As described in the comment at the beginning of the class, a segment
343 * is in one of three states:
345 * !inContext: the segment has been created to root arguments for a
346 * future call to Invoke.
347 * isActive: the segment describes a set of stack frames in a context,
348 * where the top frame currently executing.
349 * isSuspended: like isActive, but the top frame has been suspended.
352 bool inContext() const {
353 JS_ASSERT(!!cx
== !!initialFrame
);
354 JS_ASSERT_IF(!cx
, suspendedRegs
== NON_NULL_SUSPENDED_REGS
&& !saved
);
358 bool isActive() const {
359 JS_ASSERT_IF(!suspendedRegs
, cx
&& !saved
);
360 JS_ASSERT_IF(!cx
, suspendedRegs
== NON_NULL_SUSPENDED_REGS
);
361 return !suspendedRegs
;
364 bool isSuspended() const {
365 JS_ASSERT_IF(!cx
|| !suspendedRegs
, !saved
);
366 JS_ASSERT_IF(!cx
, suspendedRegs
== NON_NULL_SUSPENDED_REGS
);
367 return cx
&& suspendedRegs
;
370 /* Substate of suspended, queryable in any state. */
372 bool isSaved() const {
373 JS_ASSERT_IF(saved
, isSuspended());
377 /* Transitioning between inContext <--> isActive */
379 void joinContext(JSContext
*cx
, JSStackFrame
*f
) {
380 JS_ASSERT(!inContext());
383 suspendedRegs
= NULL
;
384 JS_ASSERT(isActive());
387 void leaveContext() {
388 JS_ASSERT(isActive());
391 suspendedRegs
= NON_NULL_SUSPENDED_REGS
;
392 JS_ASSERT(!inContext());
395 JSContext
*maybeContext() const {
399 #undef NON_NULL_SUSPENDED_REGS
401 /* Transitioning between isActive <--> isSuspended */
403 void suspend(JSFrameRegs
*regs
) {
404 JS_ASSERT(isActive());
405 JS_ASSERT(regs
&& regs
->fp
&& contains(regs
->fp
));
406 suspendedRegs
= regs
;
407 JS_ASSERT(isSuspended());
411 JS_ASSERT(isSuspended());
412 suspendedRegs
= NULL
;
413 JS_ASSERT(isActive());
416 /* When isSuspended, transitioning isSaved <--> !isSaved */
418 void save(JSFrameRegs
*regs
) {
419 JS_ASSERT(!isSuspended());
422 JS_ASSERT(isSaved());
426 JS_ASSERT(isSaved());
429 JS_ASSERT(!isSuspended());
432 /* Data available when inContext */
434 JSStackFrame
*getInitialFrame() const {
435 JS_ASSERT(inContext());
439 inline JSFrameRegs
*getCurrentRegs() const;
440 inline JSStackFrame
*getCurrentFrame() const;
442 /* Data available when isSuspended. */
444 JSFrameRegs
*getSuspendedRegs() const {
445 JS_ASSERT(isSuspended());
446 return suspendedRegs
;
449 JSStackFrame
*getSuspendedFrame() const {
450 return suspendedRegs
->fp
;
453 /* JSContext / js::StackSpace bookkeeping. */
455 void setPreviousInContext(StackSegment
*seg
) {
456 previousInContext
= seg
;
459 StackSegment
*getPreviousInContext() const {
460 return previousInContext
;
463 void setPreviousInMemory(StackSegment
*seg
) {
464 previousInMemory
= seg
;
467 StackSegment
*getPreviousInMemory() const {
468 return previousInMemory
;
471 void setInitialVarObj(JSObject
*obj
) {
472 JS_ASSERT(inContext());
476 bool hasInitialVarObj() {
477 JS_ASSERT(inContext());
478 return initialVarObj
!= NULL
;
481 JSObject
&getInitialVarObj() const {
482 JS_ASSERT(inContext() && initialVarObj
);
483 return *initialVarObj
;
487 JS_REQUIRES_STACK
bool contains(const JSStackFrame
*fp
) const;
491 static const size_t VALUES_PER_STACK_SEGMENT
= sizeof(StackSegment
) / sizeof(Value
);
492 JS_STATIC_ASSERT(sizeof(StackSegment
) % sizeof(Value
) == 0);
494 /* See StackSpace::pushInvokeArgs. */
495 class InvokeArgsGuard
: public CallArgs
497 friend class StackSpace
;
498 JSContext
*cx
; /* null implies nothing pushed */
500 Value
*prevInvokeArgEnd
;
502 StackSegment
*prevInvokeSegment
;
503 JSStackFrame
*prevInvokeFrame
;
506 InvokeArgsGuard() : cx(NULL
), seg(NULL
) {}
508 bool pushed() const { return cx
!= NULL
; }
512 * This type can be used to call Invoke when the arguments have already been
513 * pushed onto the stack as part of normal execution.
515 struct InvokeArgsAlreadyOnTheStack
: CallArgs
517 InvokeArgsAlreadyOnTheStack(Value
*vp
, uintN argc
) : CallArgs(vp
+ 2, argc
) {}
520 /* See StackSpace::pushInvokeFrame. */
521 class InvokeFrameGuard
523 friend class StackSpace
;
524 JSContext
*cx_
; /* null implies nothing pushed */
526 JSFrameRegs
*prevRegs_
;
528 InvokeFrameGuard() : cx_(NULL
) {}
529 ~InvokeFrameGuard() { if (pushed()) pop(); }
530 bool pushed() const { return cx_
!= NULL
; }
532 JSStackFrame
*fp() const { return regs_
.fp
; }
535 /* Reusable base; not for direct use. */
538 friend class StackSpace
;
539 JSContext
*cx_
; /* null implies nothing pushed */
544 FrameGuard() : cx_(NULL
), vp_(NULL
), fp_(NULL
) {}
545 JS_REQUIRES_STACK
~FrameGuard();
546 bool pushed() const { return cx_
!= NULL
; }
547 StackSegment
*segment() const { return seg_
; }
548 Value
*vp() const { return vp_
; }
549 JSStackFrame
*fp() const { return fp_
; }
552 /* See StackSpace::pushExecuteFrame. */
553 class ExecuteFrameGuard
: public FrameGuard
555 friend class StackSpace
;
559 /* See StackSpace::pushDummyFrame. */
560 class DummyFrameGuard
: public FrameGuard
562 friend class StackSpace
;
566 /* See StackSpace::pushGeneratorFrame. */
567 class GeneratorFrameGuard
: public FrameGuard
573 * Each JSThreadData has one associated StackSpace object which allocates all
574 * segments for the thread. StackSpace performs all such allocations in a
575 * single, fixed-size buffer using a specific layout scheme that allows some
576 * associations between segments, frames, and slots to be implicit, rather
577 * than explicitly stored as pointers. To maintain useful invariants, stack
578 * space is not given out arbitrarily, but rather allocated/deallocated for
579 * specific purposes. The use cases currently supported are: calling a function
580 * with arguments (e.g. Invoke), executing a script (e.g. Execute), inline
581 * interpreter calls, and pushing "dummy" frames for bookkeeping purposes. See
582 * associated member functions below.
584 * First, we consider the layout of individual segments. (See the
585 * js::StackSegment comment for terminology.) A non-empty segment (i.e., a
586 * segment in a context) has the following layout:
588 * initial frame current frame ------. if regs,
589 * .------------. | | regs->sp
591 * |segment| slots |frame| slots |frame| slots |frame| slots |
593 * ? <----------' `----------' `----------'
596 * Moreover, the bytes in the following ranges form a contiguous array of
597 * Values that are marked during GC:
598 * 1. between a segment and its first frame
599 * 2. between two adjacent frames in a segment
600 * 3. between a segment's current frame and (if fp->regs) fp->regs->sp
601 * Thus, the VM must ensure that all such Values are safe to be marked.
603 * An empty segment is followed by arguments that are rooted by the
604 * StackSpace::invokeArgEnd pointer:
611 * Above the level of segments, a StackSpace is simply a contiguous sequence
612 * of segments kept in a linked list:
614 * base currentSegment firstUnused end
617 * |segment| --- |segment| --- |segment| ------- | |
619 * 0 <---' `-----------' `-----------'
620 * previous previous previous
622 * Both js::StackSpace and JSContext maintain a stack of segments, the top of
623 * which is the "current segment" for that thread or context, respectively.
624 * Since different contexts can arbitrarily interleave execution in a single
625 * thread, these stacks are different enough that a segment needs both
626 * "previousInMemory" and "previousInContext".
628 * For example, in a single thread, a function in segment S1 in a context CX1
629 * may call out into C++ code that reenters the VM in a context CX2, which
630 * creates a new segment S2 in CX2, and CX1 may or may not equal CX2.
632 * Note that there is some structure to this interleaving of segments:
633 * 1. the inclusion from segments in a context to segments in a thread
634 * preserves order (in terms of previousInContext and previousInMemory,
636 * 2. the mapping from stack frames to their containing segment preserves
637 * order (in terms of prev and previousInContext, respectively).
643 mutable Value
*commitEnd
;
646 StackSegment
*currentSegment
;
649 * Keep track of which segment/frame bumped invokeArgEnd so that
650 * firstUnused() can assert that, when invokeArgEnd is used as the top of
651 * the stack, it is being used appropriately.
653 StackSegment
*invokeSegment
;
654 JSStackFrame
*invokeFrame
;
658 friend class InvokeArgsGuard
;
659 friend class InvokeFrameGuard
;
660 friend class FrameGuard
;
662 bool pushSegmentForInvoke(JSContext
*cx
, uintN argc
, InvokeArgsGuard
*ag
);
663 void popSegmentForInvoke(const InvokeArgsGuard
&ag
);
665 bool pushInvokeFrameSlow(JSContext
*cx
, const InvokeArgsGuard
&ag
,
666 InvokeFrameGuard
*fg
);
667 void popInvokeFrameSlow(const CallArgs
&args
);
669 bool getSegmentAndFrame(JSContext
*cx
, uintN vplen
, uintN nfixed
,
670 FrameGuard
*fg
) const;
671 void pushSegmentAndFrame(JSContext
*cx
, JSObject
*initialVarObj
,
672 JSFrameRegs
*regs
, FrameGuard
*fg
);
673 void popSegmentAndFrame(JSContext
*cx
);
675 struct EnsureSpaceCheck
{
676 inline bool operator()(const StackSpace
&, JSContext
*, Value
*, uintN
);
682 LimitCheck(JSStackFrame
*base
, Value
**limit
) : base(base
), limit(limit
) {}
683 inline bool operator()(const StackSpace
&, JSContext
*, Value
*, uintN
);
686 template <class Check
>
687 inline JSStackFrame
*getCallFrame(JSContext
*cx
, Value
*sp
, uintN nactual
,
688 JSFunction
*fun
, JSScript
*script
,
689 uint32
*pflags
, Check check
) const;
691 inline void popInvokeArgs(const InvokeArgsGuard
&args
);
692 inline void popInvokeFrame(const InvokeFrameGuard
&ag
);
694 inline Value
*firstUnused() const;
696 inline bool isCurrentAndActive(JSContext
*cx
) const;
697 friend class AllFramesIter
;
698 StackSegment
*getCurrentSegment() const { return currentSegment
; }
701 /* Commit more memory from the reserved stack space. */
702 JS_FRIEND_API(bool) bumpCommit(Value
*from
, ptrdiff_t nvals
) const;
706 static const size_t CAPACITY_VALS
= 512 * 1024;
707 static const size_t CAPACITY_BYTES
= CAPACITY_VALS
* sizeof(Value
);
708 static const size_t COMMIT_VALS
= 16 * 1024;
709 static const size_t COMMIT_BYTES
= COMMIT_VALS
* sizeof(Value
);
712 * SunSpider and v8bench have roughly an average of 9 slots per script.
713 * Our heuristic for a quick over-recursion check uses a generous slot
714 * count based on this estimate. We take this frame size and multiply it
715 * by the old recursion limit from the interpreter.
717 * Worst case, if an average size script (<=9 slots) over recurses, it'll
718 * effectively be the same as having increased the old inline call count
721 static const size_t STACK_QUOTA
= (VALUES_PER_STACK_FRAME
+ 18) *
722 JS_MAX_INLINE_CALL_COUNT
;
724 /* Kept as a member of JSThreadData; cannot use constructor/destructor. */
730 bool contains(T
*t
) const {
732 JS_ASSERT(size_t(-1) - uintptr_t(t
) >= sizeof(T
));
733 return v
>= (char *)base
&& v
+ sizeof(T
) <= (char *)end
;
738 * When we LeaveTree, we need to rebuild the stack, which requires stack
739 * allocation. There is no good way to handle an OOM for these allocations,
740 * so this function checks that they cannot occur using the size of the
741 * TraceNativeStorage as a conservative upper bound.
743 inline bool ensureEnoughSpaceToEnterTrace();
745 /* +1 for slow native's stack frame. */
746 static const ptrdiff_t MAX_TRACE_SPACE_VALS
=
747 MAX_NATIVE_STACK_SLOTS
+ MAX_CALL_STACK_ENTRIES
* VALUES_PER_STACK_FRAME
+
748 (VALUES_PER_STACK_SEGMENT
+ VALUES_PER_STACK_FRAME
/* synthesized slow native */);
750 /* Mark all segments, frames, and slots on the stack. */
751 JS_REQUIRES_STACK
void mark(JSTracer
*trc
);
754 * For all five use cases below:
755 * - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM.
756 * - The "get*Frame" functions do not change any global state, they just
757 * check OOM and return pointers to an uninitialized frame with the
758 * requested missing arguments/slots. Only once the "push*Frame"
759 * function has been called is global state updated. Thus, between
760 * "get*Frame" and "push*Frame", the frame and slots are unrooted.
761 * - The "push*Frame" functions will set fp->prev; the caller needn't.
762 * - Functions taking "*Guard" arguments will use the guard's destructor
763 * to pop the allocation. The caller must ensure the guard has the
764 * appropriate lifetime.
765 * - The get*Frame functions put the 'nmissing' slots contiguously after
770 * pushInvokeArgs allocates |argc + 2| rooted values that will be passed as
771 * the arguments to Invoke. A single allocation can be used for multiple
772 * Invoke calls. The InvokeArgumentsGuard passed to Invoke must come from
773 * an immediately-enclosing (stack-wise) call to pushInvokeArgs.
775 bool pushInvokeArgs(JSContext
*cx
, uintN argc
, InvokeArgsGuard
*ag
);
777 /* These functions are called inside Invoke, not Invoke clients. */
778 bool getInvokeFrame(JSContext
*cx
, const CallArgs
&args
, JSFunction
*fun
,
779 JSScript
*script
, uint32
*flags
, InvokeFrameGuard
*fg
) const;
781 void pushInvokeFrame(JSContext
*cx
, const CallArgs
&args
, InvokeFrameGuard
*fg
);
783 /* These functions are called inside Execute, not Execute clients. */
784 bool getExecuteFrame(JSContext
*cx
, JSScript
*script
, ExecuteFrameGuard
*fg
) const;
785 void pushExecuteFrame(JSContext
*cx
, JSObject
*initialVarObj
, ExecuteFrameGuard
*fg
);
788 * Since RAII cannot be used for inline frames, callers must manually
789 * call pushInlineFrame/popInlineFrame.
791 inline JSStackFrame
*getInlineFrame(JSContext
*cx
, Value
*sp
, uintN nactual
,
792 JSFunction
*fun
, JSScript
*script
,
793 uint32
*flags
) const;
794 inline void pushInlineFrame(JSContext
*cx
, JSScript
*script
, JSStackFrame
*fp
,
796 inline void popInlineFrame(JSContext
*cx
, JSStackFrame
*prev
, js::Value
*newsp
);
798 /* These functions are called inside SendToGenerator. */
799 bool getGeneratorFrame(JSContext
*cx
, uintN vplen
, uintN nfixed
,
800 GeneratorFrameGuard
*fg
);
801 void pushGeneratorFrame(JSContext
*cx
, JSFrameRegs
*regs
, GeneratorFrameGuard
*fg
);
803 /* Pushes a JSStackFrame::isDummyFrame. */
804 bool pushDummyFrame(JSContext
*cx
, JSObject
&scopeChain
, DummyFrameGuard
*fg
);
806 /* Check and bump the given stack limit. */
807 inline JSStackFrame
*getInlineFrameWithinLimit(JSContext
*cx
, Value
*sp
, uintN nactual
,
808 JSFunction
*fun
, JSScript
*script
, uint32
*flags
,
809 JSStackFrame
*base
, Value
**limit
) const;
812 * Compute a stack limit for entering method jit code which allows the
813 * method jit to check for end-of-stack and over-recursion with a single
814 * comparison. See STACK_QUOTA above.
816 inline Value
*getStackLimit(JSContext
*cx
);
819 * Try to bump the given 'limit' by bumping the commit limit. Return false
820 * if fully committed or if 'limit' exceeds 'base' + STACK_QUOTA.
822 bool bumpCommitAndLimit(JSStackFrame
*base
, Value
*from
, uintN nvals
, Value
**limit
) const;
825 * Allocate nvals on the top of the stack, report error on failure.
826 * N.B. the caller must ensure |from >= firstUnused()|.
828 inline bool ensureSpace(JSContext
*maybecx
, Value
*from
, ptrdiff_t nvals
) const;
831 JS_STATIC_ASSERT(StackSpace::CAPACITY_VALS
% StackSpace::COMMIT_VALS
== 0);
834 * While |cx->fp|'s pc/sp are available in |cx->regs|, to compute the saved
835 * value of pc/sp for any other frame, it is necessary to know about that
836 * frame's next-frame. This iterator maintains this information when walking
837 * a chain of stack frames starting at |cx->fp|.
840 * for (FrameRegsIter i(cx); !i.done(); ++i)
841 * ... i.fp() ... i.sp() ... i.pc()
846 StackSegment
*curseg
;
852 void incSlow(JSStackFrame
*fp
, JSStackFrame
*prev
);
855 JS_REQUIRES_STACK
inline FrameRegsIter(JSContext
*cx
);
857 bool done() const { return curfp
== NULL
; }
858 inline FrameRegsIter
&operator++();
860 JSStackFrame
*fp() const { return curfp
; }
861 Value
*sp() const { return cursp
; }
862 jsbytecode
*pc() const { return curpc
; }
866 * Utility class for iteration over all active stack frames.
871 AllFramesIter(JSContext
*cx
);
873 bool done() const { return curfp
== NULL
; }
874 AllFramesIter
& operator++();
876 JSStackFrame
*fp() const { return curfp
; }
886 * N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current
887 * thread, regardless of whether cx is the context in which that trace is
888 * executing. cx must be a context on the current thread.
891 # define JS_ON_TRACE(cx) (cx->compartment && JS_TRACE_MONITOR(cx).ontrace())
893 # define JS_ON_TRACE(cx) false
897 # define FUNCTION_KIND_METER_LIST(_) \
898 _(allfun), _(heavy), _(nofreeupvar), _(onlyfreevar), \
899 _(display), _(flat), _(setupvar), _(badfunarg), \
900 _(joinedsetmethod), _(joinedinitmethod), \
901 _(joinedreplace), _(joinedsort), _(joinedmodulepat), \
902 _(mreadbarrier), _(mwritebarrier), _(mwslotbarrier), \
904 # define identity(x) x
906 struct JSFunctionMeter
{
907 int32
FUNCTION_KIND_METER_LIST(identity
);
912 # define JS_FUNCTION_METER(cx,x) JS_RUNTIME_METER((cx)->runtime, functionMeter.x)
914 # define JS_FUNCTION_METER(cx,x) ((void)0)
918 struct JSPendingProxyOperation
{
919 JSPendingProxyOperation
*next
;
923 struct JSThreadData
{
925 /* The request depth for this thread. */
926 unsigned requestDepth
;
930 * If non-zero, we were been asked to call the operation callback as soon
931 * as possible. If the thread has an active request, this contributes
932 * towards rt->interruptCounter.
934 volatile int32 interruptFlags
;
936 /* Keeper of the contiguous stack used by all contexts in this thread. */
937 js::StackSpace stackSpace
;
940 * Flag indicating that we are waiving any soft limits on the GC heap
941 * because we want allocations to be infallible (except when we hit OOM).
946 * The GSN cache is per thread since even multi-cx-per-thread embeddings
947 * do not interleave js_GetSrcNote calls.
951 /* Property cache for faster call/get/set invocation. */
952 js::PropertyCache propertyCache
;
955 /* Maximum size of the tracer's code cache before we start flushing. */
956 uint32 maxCodeCacheBytes
;
959 /* State used by dtoa.c. */
960 DtoaState
*dtoaState
;
963 * A single-entry cache for some base-10 double-to-string conversions.
964 * This helps date-format-xparb.js. It also avoids skewing the results
965 * for v8-splay.js when measured by the SunSpider harness, where the splay
966 * tree initialization (which includes many repeated double-to-string
967 * conversions) is erroneously included in the measurement; see bug
973 JSString
*s
; // if s==NULL, d and base are not valid
976 /* Base address of the native stack for the current thread. */
977 jsuword
*nativeStackBase
;
979 /* List of currently pending operations on proxies. */
980 JSPendingProxyOperation
*pendingProxyOperation
;
982 js::ConservativeGCThreadData conservativeGC
;
986 void mark(JSTracer
*trc
);
987 void purge(JSContext
*cx
);
989 /* This must be called with the GC lock held. */
990 inline void triggerOperationCallback(JSRuntime
*rt
);
996 * Structure uniquely representing a thread. It holds thread-private data
997 * that can be accessed without a global lock.
1000 typedef js::HashMap
<void *,
1002 js::DefaultHasher
<void *>,
1003 js::SystemAllocPolicy
> Map
;
1005 /* Linked list of all contexts in use on this thread. */
1006 JSCList contextList
;
1008 /* Opaque thread-id, from NSPR's PR_GetCurrentThread(). */
1011 /* Number of JS_SuspendRequest calls withot JS_ResumeRequest. */
1012 unsigned suspendCount
;
1015 unsigned checkRequestDepth
;
1018 /* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */
1022 #define JS_THREAD_DATA(cx) (&(cx)->thread->data)
1025 js_CurrentThread(JSRuntime
*rt
);
1028 * The function takes the GC lock and does not release in successful return.
1029 * On error (out of memory) the function releases the lock but delegates
1030 * the error reporting to the caller.
1033 js_InitContextThread(JSContext
*cx
);
1036 * On entrance the GC lock must be held and it will be held on exit.
1039 js_ClearContextThread(JSContext
*cx
);
1041 #endif /* JS_THREADSAFE */
1043 typedef enum JSDestroyContextMode
{
1048 } JSDestroyContextMode
;
1050 typedef enum JSRuntimeState
{
1057 typedef struct JSPropertyTreeEntry
{
1058 JSDHashEntryHdr hdr
;
1060 } JSPropertyTreeEntry
;
1063 (* JSActivityCallback
)(void *arg
, JSBool active
);
1067 typedef js::Vector
<JSCompartment
*, 0, js::SystemAllocPolicy
> WrapperVector
;
1072 /* Default compartment. */
1073 JSCompartment
*defaultCompartment
;
1074 #ifdef JS_THREADSAFE
1075 bool defaultCompartmentIsLocked
;
1078 /* List of compartments (protected by the GC lock). */
1079 js::WrapperVector compartments
;
1081 /* Runtime state, synchronized by the stateChange/gcLock condvar/lock. */
1082 JSRuntimeState state
;
1084 /* Context create/destroy callback. */
1085 JSContextCallback cxCallback
;
1087 /* Compartment create/destroy callback. */
1088 JSCompartmentCallback compartmentCallback
;
1091 * Sets a callback that is run whenever the runtime goes idle - the
1092 * last active request ceases - and begins activity - when it was
1093 * idle and a request begins. Note: The callback is called under the
1096 void setActivityCallback(JSActivityCallback cb
, void *arg
) {
1097 activityCallback
= cb
;
1098 activityCallbackArg
= arg
;
1101 JSActivityCallback activityCallback
;
1102 void *activityCallbackArg
;
1105 * Shape regenerated whenever a prototype implicated by an "add property"
1106 * property cache fill and induced trace guard has a readonly property or a
1107 * setter defined on it. This number proxies for the shapes of all objects
1108 * along the prototype chain of all objects in the runtime on which such an
1109 * add-property result has been cached/traced.
1111 * See bug 492355 for more details.
1113 * This comes early in JSRuntime to minimize the immediate format used by
1114 * trace-JITted code that reads it.
1116 uint32 protoHazardShape
;
1118 /* Garbage collector state, used by jsgc.c. */
1119 js::GCChunkSet gcChunkSet
;
1121 js::RootedValueMap gcRootsHash
;
1122 js::GCLocks gcLocksHash
;
1123 jsrefcount gcKeepAtoms
;
1125 size_t gcTriggerBytes
;
1128 size_t gcMaxMallocBytes
;
1129 uint32 gcEmptyArenaPoolLifespan
;
1131 js::GCMarker
*gcMarkingTracer
;
1132 uint32 gcTriggerFactor
;
1133 int64 gcJitReleaseTime
;
1134 volatile JSBool gcIsNeeded
;
1137 * We can pack these flags as only the GC thread writes to them. Atomic
1138 * updates to packed bytes are not guaranteed, so stores issued by one
1139 * thread may be lost due to unsynchronized read-modify-write cycles on
1143 bool gcMarkAndSweep
;
1151 JSGCCallback gcCallback
;
1155 * Malloc counter to measure memory pressure for GC scheduling. It runs
1156 * from gcMaxMallocBytes down to zero.
1158 volatile ptrdiff_t gcMallocBytes
;
1161 js::GCChunkAllocator
*gcChunkAllocator
;
1163 void setCustomGCChunkAllocator(js::GCChunkAllocator
*allocator
) {
1164 JS_ASSERT(allocator
);
1165 JS_ASSERT(state
== JSRTS_DOWN
);
1166 gcChunkAllocator
= allocator
;
1170 * The trace operation and its data argument to trace embedding-specific
1173 JSTraceDataOp gcExtraRootsTraceOp
;
1174 void *gcExtraRootsData
;
1176 /* Well-known numbers held for use by this runtime's contexts. */
1178 js::Value negativeInfinityValue
;
1179 js::Value positiveInfinityValue
;
1181 JSFlatString
*emptyString
;
1183 /* List of active contexts sharing this runtime; protected by gcLock. */
1184 JSCList contextList
;
1186 /* Per runtime debug hooks -- see jsprvtd.h and jsdbgapi.h. */
1187 JSDebugHooks globalDebugHooks
;
1190 * Right now, we only support runtime-wide debugging.
1195 /* True if any debug hooks not supported by the JIT are enabled. */
1196 bool debuggerInhibitsJIT() const {
1197 return (globalDebugHooks
.interruptHook
||
1198 globalDebugHooks
.callHook
);
1202 /* More debugging state, see jsdbgapi.c. */
1204 JSCList watchPointList
;
1206 /* Client opaque pointers */
1209 #ifdef JS_THREADSAFE
1210 /* These combine to interlock the GC and new requests. */
1213 PRCondVar
*requestDone
;
1214 uint32 requestCount
;
1217 js::GCHelperThread gcHelperThread
;
1219 /* Lock and owning thread pointer for JS_LOCK_RUNTIME. */
1225 /* Used to synchronize down/up state change; protected by gcLock. */
1226 PRCondVar
*stateChange
;
1229 * Lock serializing trapList and watchPointList accesses, and count of all
1230 * mutations to trapList and watchPointList made by debugger threads. To
1231 * keep the code simple, we define debuggerMutations for the thread-unsafe
1234 PRLock
*debuggerLock
;
1236 JSThread::Map threads
;
1237 #endif /* JS_THREADSAFE */
1238 uint32 debuggerMutations
;
1241 * Security callbacks set on the runtime are used by each context unless
1242 * an override is set on the context.
1244 JSSecurityCallbacks
*securityCallbacks
;
1246 /* Structured data callbacks are runtime-wide. */
1247 const JSStructuredCloneCallbacks
*structuredCloneCallbacks
;
1250 * Shared scope property tree, and arena-pool for allocating its nodes.
1251 * This really should be free of all locking overhead and allocated in
1252 * thread-local storage, hence the JS_PROPERTY_TREE(cx) macro.
1254 js::PropertyTree propertyTree
;
1256 #define JS_PROPERTY_TREE(cx) ((cx)->runtime->propertyTree)
1259 * The propertyRemovals counter is incremented for every JSObject::clear,
1260 * and for each JSObject::remove method call that frees a slot in the given
1261 * object. See js_NativeGet and js_NativeSet in jsobj.cpp.
1263 int32 propertyRemovals
;
1265 /* Script filename table. */
1266 struct JSHashTable
*scriptFilenameTable
;
1267 JSCList scriptFilenamePrefixes
;
1268 #ifdef JS_THREADSAFE
1269 PRLock
*scriptFilenameTableLock
;
1272 /* Number localization, used by jsnum.c */
1273 const char *thousandsSeparator
;
1274 const char *decimalSeparator
;
1275 const char *numGrouping
;
1278 * Weak references to lazily-created, well-known XML singletons.
1280 * NB: Singleton objects must be carefully disconnected from the rest of
1281 * the object graph usually associated with a JSContext's global object,
1282 * including the set of standard class objects. See jsxml.c for details.
1284 JSObject
*anynameObject
;
1285 JSObject
*functionNamespaceObject
;
1287 #ifdef JS_THREADSAFE
1288 /* Number of threads with active requests and unhandled interrupts. */
1289 volatile int32 interruptCounter
;
1291 JSThreadData threadData
;
1293 #define JS_THREAD_DATA(cx) (&(cx)->runtime->threadData)
1297 * Object shape (property cache structural type) identifier generator.
1299 * Type 0 stands for the empty scope, and must not be regenerated due to
1300 * uint32 wrap-around. Since js_GenerateShape (in jsinterp.cpp) uses
1301 * atomic pre-increment, the initial value for the first typed non-empty
1304 * If this counter overflows into SHAPE_OVERFLOW_BIT (in jsinterp.h), the
1305 * cache is disabled, to avoid aliasing two different types. It stays
1306 * disabled until a triggered GC at some later moment compresses live
1307 * types, minimizing rt->shapeGen in the process.
1309 volatile uint32 shapeGen
;
1311 /* Literal table maintained by jsatom.c functions. */
1312 JSAtomState atomState
;
1315 * Runtime-shared empty scopes for well-known built-in objects that lack
1316 * class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
1318 js::EmptyShape
*emptyArgumentsShape
;
1319 js::EmptyShape
*emptyBlockShape
;
1320 js::EmptyShape
*emptyCallShape
;
1321 js::EmptyShape
*emptyDeclEnvShape
;
1322 js::EmptyShape
*emptyEnumeratorShape
;
1323 js::EmptyShape
*emptyWithShape
;
1326 * Various metering fields are defined at the end of JSRuntime. In this
1327 * way there is no need to recompile all the code that refers to other
1328 * fields of JSRuntime after enabling the corresponding metering macro.
1330 #ifdef JS_DUMP_ENUM_CACHE_STATS
1331 int32 nativeEnumProbes
;
1332 int32 nativeEnumMisses
;
1333 # define ENUM_CACHE_METER(name) JS_ATOMIC_INCREMENT(&cx->runtime->name)
1335 # define ENUM_CACHE_METER(name) ((void) 0)
1338 #ifdef JS_DUMP_LOOP_STATS
1339 /* Loop statistics, to trigger trace recording and compiling. */
1340 JSBasicStats loopStats
;
1344 /* Function invocation metering. */
1345 jsrefcount inlineCalls
;
1346 jsrefcount nativeCalls
;
1347 jsrefcount nonInlineCalls
;
1348 jsrefcount constructs
;
1350 /* Property metering. */
1351 jsrefcount liveObjectProps
;
1352 jsrefcount liveObjectPropsPreSweep
;
1353 jsrefcount totalObjectProps
;
1354 jsrefcount livePropTreeNodes
;
1355 jsrefcount duplicatePropTreeNodes
;
1356 jsrefcount totalPropTreeNodes
;
1357 jsrefcount propTreeKidsChunks
;
1358 jsrefcount liveDictModeNodes
;
1361 * NB: emptyShapes is init'ed iff at least one of these envars is set:
1363 * JS_PROPTREE_STATFILE statistics on the property tree forest
1364 * JS_PROPTREE_DUMPFILE all paths in the property tree forest
1366 const char *propTreeStatFilename
;
1367 const char *propTreeDumpFilename
;
1369 bool meterEmptyShapes() const { return propTreeStatFilename
|| propTreeDumpFilename
; }
1371 typedef js::HashSet
<js::EmptyShape
*,
1372 js::DefaultHasher
<js::EmptyShape
*>,
1373 js::SystemAllocPolicy
> EmptyShapeSet
;
1375 EmptyShapeSet emptyShapes
;
1377 /* String instrumentation. */
1378 jsrefcount liveStrings
;
1379 jsrefcount totalStrings
;
1380 jsrefcount liveDependentStrings
;
1381 jsrefcount totalDependentStrings
;
1382 jsrefcount badUndependStrings
;
1384 double lengthSquaredSum
;
1385 double strdepLengthSum
;
1386 double strdepLengthSquaredSum
;
1388 /* Script instrumentation. */
1389 jsrefcount liveScripts
;
1390 jsrefcount totalScripts
;
1391 jsrefcount liveEmptyScripts
;
1392 jsrefcount totalEmptyScripts
;
1393 jsrefcount highWaterLiveScripts
;
1396 #ifdef JS_SCOPE_DEPTH_METER
1398 * Stats on runtime prototype chain lookups and scope chain depths, i.e.,
1399 * counts of objects traversed on a chain until the wanted id is found.
1401 JSBasicStats protoLookupDepthStats
;
1402 JSBasicStats scopeSearchDepthStats
;
1405 * Stats on compile-time host environment and lexical scope chain lengths
1408 JSBasicStats hostenvScopeDepthStats
;
1409 JSBasicStats lexicalScopeDepthStats
;
1413 js::gc::JSGCStats gcStats
;
1414 js::gc::JSGCArenaStats globalArenaStats
[js::gc::FINALIZE_LIMIT
];
1419 * If functionMeterFilename, set from an envariable in JSRuntime's ctor, is
1420 * null, the remaining members in this ifdef'ed group are not initialized.
1422 const char *functionMeterFilename
;
1423 JSFunctionMeter functionMeter
;
1424 char lastScriptFilename
[1024];
1426 typedef js::HashMap
<JSFunction
*,
1428 js::DefaultHasher
<JSFunction
*>,
1429 js::SystemAllocPolicy
> FunctionCountMap
;
1431 FunctionCountMap methodReadBarrierCountMap
;
1432 FunctionCountMap unjoinedFunctionCountMap
;
1435 JSWrapObjectCallback wrapObjectCallback
;
1436 JSPreWrapCallback preWrapObjectCallback
;
1439 uint32 mjitMemoryUsed
;
1441 uint32 stringMemoryUsed
;
1446 bool init(uint32 maxbytes
);
1448 void setGCTriggerFactor(uint32 factor
);
1449 void setGCLastBytes(size_t lastBytes
);
1452 * Call the system malloc while checking for GC memory pressure and
1453 * reporting OOM error when cx is not null.
1455 void* malloc(size_t bytes
, JSContext
*cx
= NULL
) {
1456 updateMallocCounter(bytes
);
1457 void *p
= ::js_malloc(bytes
);
1458 return JS_LIKELY(!!p
) ? p
: onOutOfMemory(NULL
, bytes
, cx
);
1462 * Call the system calloc while checking for GC memory pressure and
1463 * reporting OOM error when cx is not null.
1465 void* calloc(size_t bytes
, JSContext
*cx
= NULL
) {
1466 updateMallocCounter(bytes
);
1467 void *p
= ::js_calloc(bytes
);
1468 return JS_LIKELY(!!p
) ? p
: onOutOfMemory(reinterpret_cast<void *>(1), bytes
, cx
);
1471 void* realloc(void* p
, size_t oldBytes
, size_t newBytes
, JSContext
*cx
= NULL
) {
1472 JS_ASSERT(oldBytes
< newBytes
);
1473 updateMallocCounter(newBytes
- oldBytes
);
1474 void *p2
= ::js_realloc(p
, newBytes
);
1475 return JS_LIKELY(!!p2
) ? p2
: onOutOfMemory(p
, newBytes
, cx
);
1478 void* realloc(void* p
, size_t bytes
, JSContext
*cx
= NULL
) {
1480 * For compatibility we do not account for realloc that increases
1481 * previously allocated memory.
1484 updateMallocCounter(bytes
);
1485 void *p2
= ::js_realloc(p
, bytes
);
1486 return JS_LIKELY(!!p2
) ? p2
: onOutOfMemory(p
, bytes
, cx
);
1489 void free(void* p
) { ::js_free(p
); }
1491 bool isGCMallocLimitReached() const { return gcMallocBytes
<= 0; }
1493 void resetGCMallocBytes() { gcMallocBytes
= ptrdiff_t(gcMaxMallocBytes
); }
1495 void setGCMaxMallocBytes(size_t value
) {
1497 * For compatibility treat any value that exceeds PTRDIFF_T_MAX to
1500 gcMaxMallocBytes
= (ptrdiff_t(value
) >= 0) ? value
: size_t(-1) >> 1;
1501 resetGCMallocBytes();
1505 * Call this after allocating memory held by GC things, to update memory
1506 * pressure counters or report the OOM error if necessary. If oomError and
1507 * cx is not null the function also reports OOM error.
1509 * The function must be called outside the GC lock and in case of OOM error
1510 * the caller must ensure that no deadlock possible during OOM reporting.
1512 void updateMallocCounter(size_t nbytes
) {
1513 /* We tolerate any thread races when updating gcMallocBytes. */
1514 ptrdiff_t newCount
= gcMallocBytes
- ptrdiff_t(nbytes
);
1515 gcMallocBytes
= newCount
;
1516 if (JS_UNLIKELY(newCount
<= 0))
1522 * The function must be called outside the GC lock.
1524 JS_FRIEND_API(void) onTooMuchMalloc();
1527 * This should be called after system malloc/realloc returns NULL to try
1528 * to recove some memory or to report an error. Failures in malloc and
1529 * calloc are signaled by p == null and p == reinterpret_cast<void *>(1).
1530 * Other values of p mean a realloc failure.
1532 * The function must be called outside the GC lock.
1534 JS_FRIEND_API(void *) onOutOfMemory(void *p
, size_t nbytes
, JSContext
*cx
);
1537 /* Common macros to access thread-local caches in JSThread or JSRuntime. */
1538 #define JS_GSN_CACHE(cx) (JS_THREAD_DATA(cx)->gsnCache)
1539 #define JS_PROPERTY_CACHE(cx) (JS_THREAD_DATA(cx)->propertyCache)
1542 # define JS_RUNTIME_METER(rt, which) JS_ATOMIC_INCREMENT(&(rt)->which)
1543 # define JS_RUNTIME_UNMETER(rt, which) JS_ATOMIC_DECREMENT(&(rt)->which)
1545 # define JS_RUNTIME_METER(rt, which) /* nothing */
1546 # define JS_RUNTIME_UNMETER(rt, which) /* nothing */
1549 #define JS_KEEP_ATOMS(rt) JS_ATOMIC_INCREMENT(&(rt)->gcKeepAtoms);
1550 #define JS_UNKEEP_ATOMS(rt) JS_ATOMIC_DECREMENT(&(rt)->gcKeepAtoms);
1552 #ifdef JS_ARGUMENT_FORMATTER_DEFINED
1554 * Linked list mapping format strings for JS_{Convert,Push}Arguments{,VA} to
1555 * formatter functions. Elements are sorted in non-increasing format string
1558 struct JSArgumentFormatMap
{
1561 JSArgumentFormatter formatter
;
1562 JSArgumentFormatMap
*next
;
1567 * Key and entry types for the JSContext.resolvingTable hash table, typedef'd
1568 * here because all consumers need to see these declarations (and not just the
1569 * typedef names, as would be the case for an opaque pointer-to-typedef'd-type
1570 * declaration), along with cx->resolvingTable.
1572 typedef struct JSResolvingKey
{
1577 typedef struct JSResolvingEntry
{
1578 JSDHashEntryHdr hdr
;
1583 #define JSRESFLAG_LOOKUP 0x1 /* resolving id from lookup */
1584 #define JSRESFLAG_WATCH 0x2 /* resolving id from watch */
1585 #define JSRESOLVE_INFER 0xffff /* infer bits from current bytecode */
1587 extern const JSDebugHooks js_NullDebugHooks
; /* defined in jsdbgapi.cpp */
1593 #define JS_HAS_OPTION(cx,option) (((cx)->options & (option)) != 0)
1594 #define JS_HAS_STRICT_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_STRICT)
1595 #define JS_HAS_WERROR_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_WERROR)
1596 #define JS_HAS_COMPILE_N_GO_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_COMPILE_N_GO)
1597 #define JS_HAS_ATLINE_OPTION(cx) JS_HAS_OPTION(cx, JSOPTION_ATLINE)
1600 OptionsHasXML(uint32 options
)
1602 return !!(options
& JSOPTION_XML
);
1606 OptionsHasAnonFunFix(uint32 options
)
1608 return !!(options
& JSOPTION_ANONFUNFIX
);
1612 OptionsSameVersionFlags(uint32 self
, uint32 other
)
1614 static const uint32 mask
= JSOPTION_XML
| JSOPTION_ANONFUNFIX
;
1615 return !((self
& mask
) ^ (other
& mask
));
1619 * Flags accompany script version data so that a) dynamically created scripts
1620 * can inherit their caller's compile-time properties and b) scripts can be
1621 * appropriately compared in the eval cache across global option changes. An
1622 * example of the latter is enabling the top-level-anonymous-function-is-error
1623 * option: subsequent evals of the same, previously-valid script text may have
1626 namespace VersionFlags
{
1627 static const uint32 MASK
= 0x0FFF; /* see JSVersion in jspubtd.h */
1628 static const uint32 HAS_XML
= 0x1000; /* flag induced by XML option */
1629 static const uint32 ANONFUNFIX
= 0x2000; /* see jsapi.h comment on JSOPTION_ANONFUNFIX */
1632 static inline JSVersion
1633 VersionNumber(JSVersion version
)
1635 return JSVersion(uint32(version
) & VersionFlags::MASK
);
1639 VersionHasXML(JSVersion version
)
1641 return !!(version
& VersionFlags::HAS_XML
);
1644 /* @warning This is a distinct condition from having the XML flag set. */
1646 VersionShouldParseXML(JSVersion version
)
1648 return VersionHasXML(version
) || VersionNumber(version
) >= JSVERSION_1_6
;
1652 VersionHasAnonFunFix(JSVersion version
)
1654 return !!(version
& VersionFlags::ANONFUNFIX
);
1658 VersionSetXML(JSVersion
*version
, bool enable
)
1661 *version
= JSVersion(uint32(*version
) | VersionFlags::HAS_XML
);
1663 *version
= JSVersion(uint32(*version
) & ~VersionFlags::HAS_XML
);
1667 VersionSetAnonFunFix(JSVersion
*version
, bool enable
)
1670 *version
= JSVersion(uint32(*version
) | VersionFlags::ANONFUNFIX
);
1672 *version
= JSVersion(uint32(*version
) & ~VersionFlags::ANONFUNFIX
);
1675 static inline JSVersion
1676 VersionExtractFlags(JSVersion version
)
1678 return JSVersion(uint32(version
) & ~VersionFlags::MASK
);
1682 VersionHasFlags(JSVersion version
)
1684 return !!VersionExtractFlags(version
);
1688 VersionIsKnown(JSVersion version
)
1690 return VersionNumber(version
) != JSVERSION_UNKNOWN
;
1693 } /* namespace js */
1697 explicit JSContext(JSRuntime
*rt
);
1699 /* JSRuntime contextList linkage. */
1703 /* See JSContext::findVersion. */
1704 JSVersion defaultVersion
; /* script compilation version */
1705 JSVersion versionOverride
; /* supercedes defaultVersion when valid */
1706 bool hasVersionOverride
;
1708 /* Exception state -- the exception member is a GC root by definition. */
1709 JSBool throwing
; /* is there a pending exception? */
1710 js::Value exception
; /* most-recently-thrown exception */
1713 /* Per-context options. */
1714 uint32 options
; /* see jsapi.h for JSOPTION_* */
1716 /* Locale specific callbacks for string conversion. */
1717 JSLocaleCallbacks
*localeCallbacks
;
1720 * cx->resolvingTable is non-null and non-empty if we are initializing
1721 * standard classes lazily, or if we are otherwise recursing indirectly
1722 * from js_LookupProperty through a Class.resolve hook. It is used to
1723 * limit runaway recursion (see jsapi.c and jsobj.c).
1725 JSDHashTable
*resolvingTable
;
1728 * True if generating an error, to prevent runaway recursion.
1729 * NB: generatingError packs with throwing below.
1731 JSPackedBool generatingError
;
1733 /* Limit pointer for checking native stack consumption during recursion. */
1736 /* Quota on the size of arenas used to compile and execute scripts. */
1737 size_t scriptStackQuota
;
1739 /* Data shared by threads in an address space. */
1740 JSRuntime
*const runtime
;
1742 /* GC heap compartment. */
1743 JSCompartment
*compartment
;
1745 /* Currently executing frame and regs, set by stack operations. */
1749 /* Current frame accessors. */
1751 JSStackFrame
* fp() {
1752 JS_ASSERT(regs
&& regs
->fp
);
1756 JSStackFrame
* maybefp() {
1757 JS_ASSERT_IF(regs
, regs
->fp
);
1758 return regs
? regs
->fp
: NULL
;
1762 JS_ASSERT_IF(regs
, regs
->fp
);
1767 friend class js::StackSpace
;
1768 friend bool js::Interpret(JSContext
*, JSStackFrame
*, uintN
, JSInterpMode
);
1770 void resetCompartment();
1772 /* 'regs' must only be changed by calling this function. */
1773 void setCurrentRegs(JSFrameRegs
*regs
) {
1774 JS_ASSERT_IF(regs
, regs
->fp
);
1780 /* Temporary arena pool used while compiling and decompiling. */
1781 JSArenaPool tempPool
;
1783 /* Temporary arena pool used while evaluate regular expressions. */
1784 JSArenaPool regExpPool
;
1786 /* Top-level object and pointer to top stack frame's scope chain. */
1787 JSObject
*globalObject
;
1789 /* State for object and array toSource conversion. */
1790 JSSharpObjectMap sharpObjectMap
;
1791 js::HashSet
<JSObject
*> busyArrays
;
1793 /* Argument formatter support for JS_{Convert,Push}Arguments{,VA}. */
1794 JSArgumentFormatMap
*argumentFormatMap
;
1796 /* Last message string and log file for debugging. */
1800 jsbytecode
*logPrevPc
;
1803 /* Per-context optional error reporter. */
1804 JSErrorReporter errorReporter
;
1806 /* Branch callback. */
1807 JSOperationCallback operationCallback
;
1809 /* Interpreter activation count. */
1812 /* Client opaque pointers. */
1817 /* Linked list of segments. See StackSegment. */
1818 js::StackSegment
*currentSegment
;
1821 void assertSegmentsInSync() const {
1824 JS_ASSERT(currentSegment
->isActive());
1825 if (js::StackSegment
*prev
= currentSegment
->getPreviousInContext())
1826 JS_ASSERT(!prev
->isActive());
1828 JS_ASSERT_IF(currentSegment
, !currentSegment
->isActive());
1833 /* Return whether this context has an active segment. */
1834 bool hasActiveSegment() const {
1835 assertSegmentsInSync();
1839 /* Assuming there is an active segment, return it. */
1840 js::StackSegment
*activeSegment() const {
1841 JS_ASSERT(hasActiveSegment());
1842 return currentSegment
;
1845 /* Return the current segment, which may or may not be active. */
1846 js::StackSegment
*getCurrentSegment() const {
1847 assertSegmentsInSync();
1848 return currentSegment
;
1851 inline js::RegExpStatics
*regExpStatics();
1853 /* Add the given segment to the list as the new active segment. */
1854 void pushSegmentAndFrame(js::StackSegment
*newseg
, JSFrameRegs
®s
);
1856 /* Remove the active segment and make the next segment active. */
1857 void popSegmentAndFrame();
1859 /* Mark the top segment as suspended, without pushing a new one. */
1860 void saveActiveSegment();
1862 /* Undoes calls to suspendActiveSegment. */
1863 void restoreSegment();
1865 /* Get the frame whose prev() is fp, which may be in any segment. */
1866 inline JSStackFrame
*computeNextFrame(JSStackFrame
*fp
);
1869 * Perform a linear search of all frames in all segments in the given context
1870 * for the given frame, returning the segment, if found, and null otherwise.
1872 js::StackSegment
*containingSegment(const JSStackFrame
*target
);
1874 /* Search the call stack for the nearest frame with static level targetLevel. */
1875 JSStackFrame
*findFrameAtLevel(uintN targetLevel
) const {
1876 JSStackFrame
*fp
= regs
->fp
;
1878 JS_ASSERT(fp
&& fp
->isScriptFrame());
1879 if (fp
->script()->staticLevel
== targetLevel
)
1888 * The default script compilation version can be set iff there is no code running.
1889 * This typically occurs via the JSAPI right after a context is constructed.
1891 bool canSetDefaultVersion() const {
1892 return !regs
&& !hasVersionOverride
;
1895 /* Force a version for future script compilation. */
1896 void overrideVersion(JSVersion newVersion
) {
1897 JS_ASSERT(!canSetDefaultVersion());
1898 versionOverride
= newVersion
;
1899 hasVersionOverride
= true;
1903 void clearVersionOverride() {
1904 hasVersionOverride
= false;
1907 bool isVersionOverridden() const {
1908 return hasVersionOverride
;
1911 /* Set the default script compilation version. */
1912 void setDefaultVersion(JSVersion version
) {
1913 defaultVersion
= version
;
1917 * Set the default version if possible; otherwise, force the version.
1918 * Return whether an override occurred.
1920 bool maybeOverrideVersion(JSVersion newVersion
) {
1921 if (canSetDefaultVersion()) {
1922 setDefaultVersion(newVersion
);
1925 overrideVersion(newVersion
);
1931 * - The override version, if there is an override version.
1932 * - The newest scripted frame's version, if there is such a frame.
1933 * - The default verion.
1935 * Note: if this ever shows up in a profile, just add caching!
1937 JSVersion
findVersion() const {
1938 if (hasVersionOverride
)
1939 return versionOverride
;
1942 /* There may be a scripted function somewhere on the stack! */
1943 JSStackFrame
*fp
= regs
->fp
;
1944 while (fp
&& !fp
->isScriptFrame())
1947 return fp
->script()->getVersion();
1950 return defaultVersion
;
1953 void optionFlagsToVersion(JSVersion
*version
) const {
1954 js::VersionSetXML(version
, js::OptionsHasXML(options
));
1955 js::VersionSetAnonFunFix(version
, js::OptionsHasAnonFunFix(options
));
1958 void checkOptionVersionSync() const {
1960 JSVersion version
= findVersion();
1961 JS_ASSERT(js::VersionHasXML(version
) == js::OptionsHasXML(options
));
1962 JS_ASSERT(js::VersionHasAnonFunFix(version
) == js::OptionsHasAnonFunFix(options
));
1966 /* Note: may override the version. */
1967 void syncOptionsToVersion() {
1968 JSVersion version
= findVersion();
1969 if (js::OptionsHasXML(options
) == js::VersionHasXML(version
) &&
1970 js::OptionsHasAnonFunFix(options
) == js::VersionHasAnonFunFix(version
))
1972 js::VersionSetXML(&version
, js::OptionsHasXML(options
));
1973 js::VersionSetAnonFunFix(&version
, js::OptionsHasAnonFunFix(options
));
1974 maybeOverrideVersion(version
);
1977 #ifdef JS_THREADSAFE
1979 unsigned outstandingRequests
;/* number of JS_BeginRequest calls
1980 without the corresponding
1982 JSCList threadLinks
; /* JSThread contextList linkage */
1984 #define CX_FROM_THREAD_LINKS(tl) \
1985 ((JSContext *)((char *)(tl) - offsetof(JSContext, threadLinks)))
1988 /* Stack of thread-stack-allocated GC roots. */
1989 js::AutoGCRooter
*autoGCRooters
;
1991 /* Debug hooks associated with the current context. */
1992 const JSDebugHooks
*debugHooks
;
1994 /* Security callbacks that override any defined on the runtime. */
1995 JSSecurityCallbacks
*securityCallbacks
;
1997 /* Stored here to avoid passing it around as a parameter. */
2000 /* Random number generator state, used by jsmath.cpp. */
2003 /* Location to stash the iteration value between JSOP_MOREITER and JSOP_FOR*. */
2004 js::Value iterValue
;
2008 * State for the current tree execution. bailExit is valid if the tree has
2009 * called back into native code via a _FAIL builtin and has not yet bailed,
2010 * else garbage (NULL in debug builds).
2012 js::TracerState
*tracerState
;
2013 js::VMSideExit
*bailExit
;
2016 * True if traces may be executed. Invariant: The value of traceJitenabled
2017 * is always equal to the expression in updateJITEnabled below.
2019 * This flag and the fields accessed by updateJITEnabled are written only
2020 * in runtime->gcLock, to avoid race conditions that would leave the wrong
2021 * value in traceJitEnabled. (But the interpreter reads this without
2022 * locking. That can race against another thread setting debug hooks, but
2023 * we always read cx->debugHooks without locking anyway.)
2025 bool traceJitEnabled
;
2029 bool methodJitEnabled
;
2030 bool profilingEnabled
;
2033 /* Caller must be holding runtime->gcLock. */
2034 void updateJITEnabled();
2036 #ifdef MOZ_TRACE_JSCALLS
2037 /* Function entry/exit debugging callback. */
2038 JSFunctionCallback functionCallback
;
2040 void doFunctionCallback(const JSFunction
*fun
,
2041 const JSScript
*scr
,
2044 if (functionCallback
)
2045 functionCallback(fun
, scr
, this, entering
);
2049 DSTOffsetCache dstOffsetCache
;
2051 /* List of currently active non-escaping enumerators (for-in). */
2052 JSObject
*enumerators
;
2056 * To go from a live generator frame (on the stack) to its generator object
2057 * (see comment js_FloatingFrameIfGenerator), we maintain a stack of active
2058 * generators, pushing and popping when entering and leaving generator
2059 * frames, respectively.
2061 js::Vector
<JSGenerator
*, 2, js::SystemAllocPolicy
> genStack
;
2065 inline js::mjit::JaegerCompartment
*jaegerCompartment();
2068 /* Return the generator object for the given generator frame. */
2069 JSGenerator
*generatorFor(JSStackFrame
*fp
) const;
2071 /* Early OOM-check. */
2072 inline bool ensureGeneratorStackSpace();
2074 bool enterGenerator(JSGenerator
*gen
) {
2075 return genStack
.append(gen
);
2078 void leaveGenerator(JSGenerator
*gen
) {
2079 JS_ASSERT(genStack
.back() == gen
);
2083 #ifdef JS_THREADSAFE
2085 * When non-null JSContext::free delegates the job to the background
2088 js::GCHelperThread
*gcBackgroundFree
;
2091 inline void* malloc(size_t bytes
) {
2092 return runtime
->malloc(bytes
, this);
2095 inline void* mallocNoReport(size_t bytes
) {
2096 JS_ASSERT(bytes
!= 0);
2097 return runtime
->malloc(bytes
, NULL
);
2100 inline void* calloc(size_t bytes
) {
2101 JS_ASSERT(bytes
!= 0);
2102 return runtime
->calloc(bytes
, this);
2105 inline void* realloc(void* p
, size_t bytes
) {
2106 return runtime
->realloc(p
, bytes
, this);
2109 inline void* realloc(void* p
, size_t oldBytes
, size_t newBytes
) {
2110 return runtime
->realloc(p
, oldBytes
, newBytes
, this);
2113 inline void free(void* p
) {
2114 #ifdef JS_THREADSAFE
2115 if (gcBackgroundFree
) {
2116 gcBackgroundFree
->freeLater(p
);
2124 * In the common case that we'd like to allocate the memory for an object
2125 * with cx->malloc/free, we cannot use overloaded C++ operators (no
2126 * placement delete). Factor the common workaround into one place.
2128 #define CREATE_BODY(parms) \
2129 void *memory = this->malloc(sizeof(T)); \
2132 return new(memory) T parms;
2135 JS_ALWAYS_INLINE T
*create() {
2139 template <class T
, class P1
>
2140 JS_ALWAYS_INLINE T
*create(const P1
&p1
) {
2144 template <class T
, class P1
, class P2
>
2145 JS_ALWAYS_INLINE T
*create(const P1
&p1
, const P2
&p2
) {
2146 CREATE_BODY((p1
, p2
))
2149 template <class T
, class P1
, class P2
, class P3
>
2150 JS_ALWAYS_INLINE T
*create(const P1
&p1
, const P2
&p2
, const P3
&p3
) {
2151 CREATE_BODY((p1
, p2
, p3
))
2156 JS_ALWAYS_INLINE
void destroy(T
*p
) {
2163 js::StackSpace
&stack() const {
2164 return JS_THREAD_DATA(this)->stackSpace
;
2168 void assertValidStackDepth(uintN depth
) {
2169 JS_ASSERT(0 <= regs
->sp
- regs
->fp
->base());
2170 JS_ASSERT(depth
<= uintptr_t(regs
->sp
- regs
->fp
->base()));
2173 void assertValidStackDepth(uintN
/*depth*/) {}
2176 bool isExceptionPending() {
2180 js::Value
getPendingException() {
2181 JS_ASSERT(throwing
);
2185 void setPendingException(js::Value v
);
2187 void clearPendingException() {
2188 this->throwing
= false;
2189 this->exception
.setUndefined();
2194 * The allocation code calls the function to indicate either OOM failure
2195 * when p is null or that a memory pressure counter has reached some
2196 * threshold when p is not null. The function takes the pointer and not
2197 * a boolean flag to minimize the amount of code in its inlined callers.
2199 JS_FRIEND_API(void) checkMallocGCPressure(void *p
);
2201 /* To silence MSVC warning about using 'this' in a member initializer. */
2202 JSContext
*thisInInitializer() { return this; }
2203 }; /* struct JSContext */
2205 #ifdef JS_THREADSAFE
2206 # define JS_THREAD_ID(cx) ((cx)->thread ? (cx)->thread->id : 0)
2209 #if defined JS_THREADSAFE && defined DEBUG
2213 class AutoCheckRequestDepth
{
2216 AutoCheckRequestDepth(JSContext
*cx
) : cx(cx
) { cx
->thread
->checkRequestDepth
++; }
2218 ~AutoCheckRequestDepth() {
2219 JS_ASSERT(cx
->thread
->checkRequestDepth
!= 0);
2220 cx
->thread
->checkRequestDepth
--;
2226 # define CHECK_REQUEST(cx) \
2227 JS_ASSERT((cx)->thread); \
2228 JS_ASSERT((cx)->thread->data.requestDepth || (cx)->thread == (cx)->runtime->gcThread); \
2229 AutoCheckRequestDepth _autoCheckRequestDepth(cx);
2232 # define CHECK_REQUEST(cx) ((void) 0)
2233 # define CHECK_REQUEST_THREAD(cx) ((void) 0)
2237 FramePCOffset(JSContext
*cx
, JSStackFrame
* fp
)
2239 jsbytecode
*pc
= fp
->hasImacropc() ? fp
->imacropc() : fp
->pc(cx
);
2240 return uintN(pc
- fp
->script()->code
);
2243 static inline JSAtom
**
2244 FrameAtomBase(JSContext
*cx
, JSStackFrame
*fp
)
2246 return fp
->hasImacropc()
2247 ? COMMON_ATOMS_START(&cx
->runtime
->atomState
)
2248 : fp
->script()->atomMap
.vector
;
2253 class AutoGCRooter
{
2255 AutoGCRooter(JSContext
*cx
, ptrdiff_t tag
)
2256 : down(cx
->autoGCRooters
), tag(tag
), context(cx
)
2258 JS_ASSERT(this != cx
->autoGCRooters
);
2260 cx
->autoGCRooters
= this;
2264 JS_ASSERT(this == context
->autoGCRooters
);
2265 CHECK_REQUEST(context
);
2266 context
->autoGCRooters
= down
;
2269 /* Implemented in jsgc.cpp. */
2270 inline void trace(JSTracer
*trc
);
2273 # pragma GCC visibility push(default)
2275 friend void MarkContext(JSTracer
*trc
, JSContext
*acx
);
2276 friend void MarkRuntime(JSTracer
*trc
);
2278 # pragma GCC visibility pop
2282 AutoGCRooter
* const down
;
2285 * Discriminates actual subclass of this being used. If non-negative, the
2286 * subclass roots an array of values of the length stored in this field.
2287 * If negative, meaning is indicated by the corresponding value in the enum
2288 * below. Any other negative value indicates some deeper problem such as
2289 * memory corruption.
2293 JSContext
* const context
;
2296 JSVAL
= -1, /* js::AutoValueRooter */
2297 SHAPE
= -2, /* js::AutoShapeRooter */
2298 PARSER
= -3, /* js::Parser */
2299 SCRIPT
= -4, /* js::AutoScriptRooter */
2300 ENUMERATOR
= -5, /* js::AutoEnumStateRooter */
2301 IDARRAY
= -6, /* js::AutoIdArray */
2302 DESCRIPTORS
= -7, /* js::AutoPropDescArrayRooter */
2303 NAMESPACES
= -8, /* js::AutoNamespaceArray */
2304 XML
= -9, /* js::AutoXMLRooter */
2305 OBJECT
= -10, /* js::AutoObjectRooter */
2306 ID
= -11, /* js::AutoIdRooter */
2307 VALVECTOR
= -12, /* js::AutoValueVector */
2308 DESCRIPTOR
= -13, /* js::AutoPropertyDescriptorRooter */
2309 STRING
= -14, /* js::AutoStringRooter */
2310 IDVECTOR
= -15 /* js::AutoIdVector */
2314 /* No copy or assignment semantics. */
2315 AutoGCRooter(AutoGCRooter
&ida
);
2316 void operator=(AutoGCRooter
&ida
);
2319 /* FIXME(bug 332648): Move this into a public header. */
2320 class AutoValueRooter
: private AutoGCRooter
2323 explicit AutoValueRooter(JSContext
*cx
2324 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2325 : AutoGCRooter(cx
, JSVAL
), val(js::NullValue())
2327 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2330 AutoValueRooter(JSContext
*cx
, const Value
&v
2331 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2332 : AutoGCRooter(cx
, JSVAL
), val(v
)
2334 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2337 AutoValueRooter(JSContext
*cx
, jsval v
2338 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2339 : AutoGCRooter(cx
, JSVAL
), val(js::Valueify(v
))
2341 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2345 * If you are looking for Object* overloads, use AutoObjectRooter instead;
2346 * rooting Object*s as a js::Value requires discerning whether or not it is
2347 * a function object. Also, AutoObjectRooter is smaller.
2351 JS_ASSERT(tag
== JSVAL
);
2356 JS_ASSERT(tag
== JSVAL
);
2357 val
= js::Valueify(v
);
2360 const Value
&value() const {
2361 JS_ASSERT(tag
== JSVAL
);
2366 JS_ASSERT(tag
== JSVAL
);
2370 const jsval
&jsval_value() const {
2371 JS_ASSERT(tag
== JSVAL
);
2372 return Jsvalify(val
);
2375 jsval
*jsval_addr() {
2376 JS_ASSERT(tag
== JSVAL
);
2377 return Jsvalify(&val
);
2380 friend void AutoGCRooter::trace(JSTracer
*trc
);
2381 friend void MarkRuntime(JSTracer
*trc
);
2385 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2388 class AutoObjectRooter
: private AutoGCRooter
{
2390 AutoObjectRooter(JSContext
*cx
, JSObject
*obj
= NULL
2391 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2392 : AutoGCRooter(cx
, OBJECT
), obj(obj
)
2394 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2397 void setObject(JSObject
*obj
) {
2401 JSObject
* object() const {
2405 JSObject
** addr() {
2409 friend void AutoGCRooter::trace(JSTracer
*trc
);
2410 friend void MarkRuntime(JSTracer
*trc
);
2414 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2417 class AutoStringRooter
: private AutoGCRooter
{
2419 AutoStringRooter(JSContext
*cx
, JSString
*str
= NULL
2420 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2421 : AutoGCRooter(cx
, STRING
), str(str
)
2423 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2426 void setString(JSString
*str
) {
2430 JSString
* string() const {
2434 JSString
** addr() {
2438 friend void AutoGCRooter::trace(JSTracer
*trc
);
2442 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2445 class AutoArrayRooter
: private AutoGCRooter
{
2447 AutoArrayRooter(JSContext
*cx
, size_t len
, Value
*vec
2448 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2449 : AutoGCRooter(cx
, len
), array(vec
)
2451 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2452 JS_ASSERT(tag
>= 0);
2455 AutoArrayRooter(JSContext
*cx
, size_t len
, jsval
*vec
2456 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2457 : AutoGCRooter(cx
, len
), array(Valueify(vec
))
2459 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2460 JS_ASSERT(tag
>= 0);
2463 void changeLength(size_t newLength
) {
2464 tag
= ptrdiff_t(newLength
);
2465 JS_ASSERT(tag
>= 0);
2468 void changeArray(Value
*newArray
, size_t newLength
) {
2469 changeLength(newLength
);
2475 friend void AutoGCRooter::trace(JSTracer
*trc
);
2478 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2481 class AutoShapeRooter
: private AutoGCRooter
{
2483 AutoShapeRooter(JSContext
*cx
, const js::Shape
*shape
2484 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2485 : AutoGCRooter(cx
, SHAPE
), shape(shape
)
2487 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2490 friend void AutoGCRooter::trace(JSTracer
*trc
);
2491 friend void MarkRuntime(JSTracer
*trc
);
2494 const js::Shape
* const shape
;
2495 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2498 class AutoScriptRooter
: private AutoGCRooter
{
2500 AutoScriptRooter(JSContext
*cx
, JSScript
*script
2501 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2502 : AutoGCRooter(cx
, SCRIPT
), script(script
)
2504 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2507 void setScript(JSScript
*script
) {
2508 this->script
= script
;
2511 friend void AutoGCRooter::trace(JSTracer
*trc
);
2515 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2518 class AutoIdRooter
: private AutoGCRooter
2521 explicit AutoIdRooter(JSContext
*cx
, jsid id
= INT_TO_JSID(0)
2522 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2523 : AutoGCRooter(cx
, ID
), id_(id
)
2525 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2536 friend void AutoGCRooter::trace(JSTracer
*trc
);
2537 friend void MarkRuntime(JSTracer
*trc
);
2541 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2544 class AutoIdArray
: private AutoGCRooter
{
2546 AutoIdArray(JSContext
*cx
, JSIdArray
*ida JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2547 : AutoGCRooter(cx
, IDARRAY
), idArray(ida
)
2549 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2553 JS_DestroyIdArray(context
, idArray
);
2556 return idArray
== NULL
;
2558 jsid
operator[](size_t i
) const {
2560 JS_ASSERT(i
< size_t(idArray
->length
));
2561 return idArray
->vector
[i
];
2563 size_t length() const {
2564 return idArray
->length
;
2567 friend void AutoGCRooter::trace(JSTracer
*trc
);
2569 JSIdArray
*steal() {
2570 JSIdArray
*copy
= idArray
;
2576 inline void trace(JSTracer
*trc
);
2579 JSIdArray
* idArray
;
2580 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2582 /* No copy or assignment semantics. */
2583 AutoIdArray(AutoIdArray
&ida
);
2584 void operator=(AutoIdArray
&ida
);
2587 /* The auto-root for enumeration object and its state. */
2588 class AutoEnumStateRooter
: private AutoGCRooter
2591 AutoEnumStateRooter(JSContext
*cx
, JSObject
*obj
2592 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2593 : AutoGCRooter(cx
, ENUMERATOR
), obj(obj
), stateValue()
2595 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2599 ~AutoEnumStateRooter() {
2600 if (!stateValue
.isNull()) {
2604 obj
->enumerate(context
, JSENUMERATE_DESTROY
, &stateValue
, 0);
2609 friend void AutoGCRooter::trace(JSTracer
*trc
);
2611 const Value
&state() const { return stateValue
; }
2612 Value
*addr() { return &stateValue
; }
2615 void trace(JSTracer
*trc
);
2617 JSObject
* const obj
;
2621 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2624 #ifdef JS_HAS_XML_SUPPORT
2625 class AutoXMLRooter
: private AutoGCRooter
{
2627 AutoXMLRooter(JSContext
*cx
, JSXML
*xml
)
2628 : AutoGCRooter(cx
, XML
), xml(xml
)
2633 friend void AutoGCRooter::trace(JSTracer
*trc
);
2634 friend void MarkRuntime(JSTracer
*trc
);
2639 #endif /* JS_HAS_XML_SUPPORT */
2645 explicit AutoLockGC(JSRuntime
*rt
) : rt(rt
) { JS_LOCK_GC(rt
); }
2646 ~AutoLockGC() { JS_UNLOCK_GC(rt
); }
2649 class AutoUnlockGC
{
2653 explicit AutoUnlockGC(JSRuntime
*rt
) : rt(rt
) { JS_UNLOCK_GC(rt
); }
2654 ~AutoUnlockGC() { JS_LOCK_GC(rt
); }
2657 class AutoLockDefaultCompartment
{
2661 AutoLockDefaultCompartment(JSContext
*cx
) : cx(cx
) {
2662 JS_LOCK(cx
, &cx
->runtime
->atomState
.lock
);
2663 #ifdef JS_THREADSAFE
2664 cx
->runtime
->defaultCompartmentIsLocked
= true;
2667 ~AutoLockDefaultCompartment() {
2668 #ifdef JS_THREADSAFE
2669 cx
->runtime
->defaultCompartmentIsLocked
= false;
2671 JS_UNLOCK(cx
, &cx
->runtime
->atomState
.lock
);
2675 class AutoUnlockDefaultCompartment
{
2679 AutoUnlockDefaultCompartment(JSContext
*cx
) : cx(cx
) {
2680 JS_UNLOCK(cx
, &cx
->runtime
->atomState
.lock
);
2681 #ifdef JS_THREADSAFE
2682 cx
->runtime
->defaultCompartmentIsLocked
= false;
2685 ~AutoUnlockDefaultCompartment() {
2686 #ifdef JS_THREADSAFE
2687 cx
->runtime
->defaultCompartmentIsLocked
= true;
2689 JS_LOCK(cx
, &cx
->runtime
->atomState
.lock
);
2693 class AutoKeepAtoms
{
2696 explicit AutoKeepAtoms(JSRuntime
*rt
) : rt(rt
) { JS_KEEP_ATOMS(rt
); }
2697 ~AutoKeepAtoms() { JS_UNKEEP_ATOMS(rt
); }
2700 class AutoArenaAllocator
{
2704 explicit AutoArenaAllocator(JSArenaPool
*pool
) : pool(pool
) { mark
= JS_ARENA_MARK(pool
); }
2705 ~AutoArenaAllocator() { JS_ARENA_RELEASE(pool
, mark
); }
2707 template <typename T
>
2708 T
*alloc(size_t elems
) {
2710 JS_ARENA_ALLOCATE(ptr
, pool
, elems
* sizeof(T
));
2711 return static_cast<T
*>(ptr
);
2715 class AutoReleasePtr
{
2718 AutoReleasePtr
operator=(const AutoReleasePtr
&other
);
2720 explicit AutoReleasePtr(JSContext
*cx
, void *ptr
) : cx(cx
), ptr(ptr
) {}
2721 ~AutoReleasePtr() { cx
->free(ptr
); }
2725 * FIXME: bug 602774: cleaner API for AutoReleaseNullablePtr
2727 class AutoReleaseNullablePtr
{
2730 AutoReleaseNullablePtr
operator=(const AutoReleaseNullablePtr
&other
);
2732 explicit AutoReleaseNullablePtr(JSContext
*cx
, void *ptr
) : cx(cx
), ptr(ptr
) {}
2733 void reset(void *ptr2
) {
2738 ~AutoReleaseNullablePtr() { if (ptr
) cx
->free(ptr
); }
2741 class AutoLocalNameArray
{
2743 explicit AutoLocalNameArray(JSContext
*cx
, JSFunction
*fun
2744 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2746 mark(JS_ARENA_MARK(&cx
->tempPool
)),
2747 names(fun
->script()->bindings
.getLocalNameArray(cx
, &cx
->tempPool
)),
2748 count(fun
->script()->bindings
.countLocalNames())
2750 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2753 ~AutoLocalNameArray() {
2754 JS_ARENA_RELEASE(&context
->tempPool
, mark
);
2757 operator bool() const { return !!names
; }
2759 uint32
length() const { return count
; }
2761 const jsuword
&operator [](unsigned i
) const { return names
[i
]; }
2769 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2772 template <class RefCountable
>
2773 class AutoRefCount
{
2774 JSContext
* const cx
;
2778 explicit AutoRefCount(JSContext
*cx
) : cx(cx
), obj(NULL
) {}
2780 AutoRefCount(JSContext
*cx
, RefCountable
*obj
) : cx(cx
), obj(NULL
) {
2789 void reset(RefCountable
*aobj
) {
2799 RefCountable
*get() {
2804 } /* namespace js */
2806 class JSAutoResolveFlags
2809 JSAutoResolveFlags(JSContext
*cx
, uintN flags
2810 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
2811 : mContext(cx
), mSaved(cx
->resolveFlags
)
2813 JS_GUARD_OBJECT_NOTIFIER_INIT
;
2814 cx
->resolveFlags
= flags
;
2817 ~JSAutoResolveFlags() { mContext
->resolveFlags
= mSaved
; }
2820 JSContext
*mContext
;
2822 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
2825 extern JSThreadData
*
2826 js_CurrentThreadData(JSRuntime
*rt
);
2829 js_InitThreads(JSRuntime
*rt
);
2832 js_FinishThreads(JSRuntime
*rt
);
2835 js_PurgeThreads(JSContext
*cx
);
2839 #ifdef JS_THREADSAFE
2841 /* Iterator over JSThreadData from all JSThread instances. */
2842 class ThreadDataIter
: public JSThread::Map::Range
2845 ThreadDataIter(JSRuntime
*rt
) : JSThread::Map::Range(rt
->threads
.all()) {}
2847 JSThreadData
*threadData() const {
2848 return &front().value
->data
;
2852 #else /* !JS_THREADSAFE */
2854 class ThreadDataIter
2859 ThreadDataIter(JSRuntime
*rt
) : runtime(rt
), done(false) {}
2861 bool empty() const {
2870 JSThreadData
*threadData() const {
2872 return &runtime
->threadData
;
2876 #endif /* !JS_THREADSAFE */
2878 } /* namespace js */
2881 * Create and destroy functions for JSContext, which is manually allocated
2882 * and exclusively owned.
2885 js_NewContext(JSRuntime
*rt
, size_t stackChunkSize
);
2888 js_DestroyContext(JSContext
*cx
, JSDestroyContextMode mode
);
2890 static JS_INLINE JSContext
*
2891 js_ContextFromLinkField(JSCList
*link
)
2894 return (JSContext
*) ((uint8
*) link
- offsetof(JSContext
, link
));
2898 * If unlocked, acquire and release rt->gcLock around *iterp update; otherwise
2899 * the caller must be holding rt->gcLock.
2902 js_ContextIterator(JSRuntime
*rt
, JSBool unlocked
, JSContext
**iterp
);
2905 * Iterate through contexts with active requests. The caller must be holding
2906 * rt->gcLock in case of a thread-safe build, or otherwise guarantee that the
2907 * context list is not alternated asynchroniously.
2909 extern JS_FRIEND_API(JSContext
*)
2910 js_NextActiveContext(JSRuntime
*, JSContext
*);
2913 * Class.resolve and watchpoint recursion damping machinery.
2916 js_StartResolving(JSContext
*cx
, JSResolvingKey
*key
, uint32 flag
,
2917 JSResolvingEntry
**entryp
);
2920 js_StopResolving(JSContext
*cx
, JSResolvingKey
*key
, uint32 flag
,
2921 JSResolvingEntry
*entry
, uint32 generation
);
2924 * Report an exception, which is currently realized as a printf-style format
2925 * string and its arguments.
2927 typedef enum JSErrNum
{
2928 #define MSG_DEF(name, number, count, exception, format) \
2935 extern JS_FRIEND_API(const JSErrorFormatString
*)
2936 js_GetErrorMessage(void *userRef
, const char *locale
, const uintN errorNumber
);
2940 js_ReportErrorVA(JSContext
*cx
, uintN flags
, const char *format
, va_list ap
);
2943 js_ReportErrorNumberVA(JSContext
*cx
, uintN flags
, JSErrorCallback callback
,
2944 void *userRef
, const uintN errorNumber
,
2945 JSBool charArgs
, va_list ap
);
2948 js_ExpandErrorArguments(JSContext
*cx
, JSErrorCallback callback
,
2949 void *userRef
, const uintN errorNumber
,
2950 char **message
, JSErrorReport
*reportp
,
2951 bool charArgs
, va_list ap
);
2955 js_ReportOutOfMemory(JSContext
*cx
);
2958 * Report that cx->scriptStackQuota is exhausted.
2961 js_ReportOutOfScriptQuota(JSContext
*cx
);
2963 extern JS_FRIEND_API(void)
2964 js_ReportOverRecursed(JSContext
*cx
);
2966 extern JS_FRIEND_API(void)
2967 js_ReportAllocationOverflow(JSContext
*cx
);
2969 #define JS_CHECK_RECURSION(cx, onerror) \
2973 if (!JS_CHECK_STACK_SIZE(cx->stackLimit, &stackDummy_)) { \
2974 js_ReportOverRecursed(cx); \
2980 * Report an exception using a previously composed JSErrorReport.
2981 * XXXbe remove from "friend" API
2983 extern JS_FRIEND_API(void)
2984 js_ReportErrorAgain(JSContext
*cx
, const char *message
, JSErrorReport
*report
);
2987 js_ReportIsNotDefined(JSContext
*cx
, const char *name
);
2990 * Report an attempt to access the property of a null or undefined value (v).
2993 js_ReportIsNullOrUndefined(JSContext
*cx
, intN spindex
, const js::Value
&v
,
2994 JSString
*fallback
);
2997 js_ReportMissingArg(JSContext
*cx
, const js::Value
&v
, uintN arg
);
3000 * Report error using js_DecompileValueGenerator(cx, spindex, v, fallback) as
3001 * the first argument for the error message. If the error message has less
3002 * then 3 arguments, use null for arg1 or arg2.
3005 js_ReportValueErrorFlags(JSContext
*cx
, uintN flags
, const uintN errorNumber
,
3006 intN spindex
, const js::Value
&v
, JSString
*fallback
,
3007 const char *arg1
, const char *arg2
);
3009 #define js_ReportValueError(cx,errorNumber,spindex,v,fallback) \
3010 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3011 spindex, v, fallback, NULL, NULL))
3013 #define js_ReportValueError2(cx,errorNumber,spindex,v,fallback,arg1) \
3014 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3015 spindex, v, fallback, arg1, NULL))
3017 #define js_ReportValueError3(cx,errorNumber,spindex,v,fallback,arg1,arg2) \
3018 ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
3019 spindex, v, fallback, arg1, arg2))
3021 extern JSErrorFormatString js_ErrorFormatString
[JSErr_Limit
];
3023 #ifdef JS_THREADSAFE
3024 # define JS_ASSERT_REQUEST_DEPTH(cx) (JS_ASSERT((cx)->thread), \
3025 JS_ASSERT((cx)->thread->data.requestDepth >= 1))
3027 # define JS_ASSERT_REQUEST_DEPTH(cx) ((void) 0)
3031 * If the operation callback flag was set, call the operation callback.
3032 * This macro can run the full GC. Return true if it is OK to continue and
3035 #define JS_CHECK_OPERATION_LIMIT(cx) \
3036 (JS_ASSERT_REQUEST_DEPTH(cx), \
3037 (!JS_THREAD_DATA(cx)->interruptFlags || js_InvokeOperationCallback(cx)))
3039 JS_ALWAYS_INLINE
void
3040 JSThreadData::triggerOperationCallback(JSRuntime
*rt
)
3043 * Use JS_ATOMIC_SET and JS_ATOMIC_INCREMENT in the hope that it ensures
3044 * the write will become immediately visible to other processors polling
3045 * the flag. Note that we only care about visibility here, not read/write
3046 * ordering: this field can only be written with the GC lock held.
3050 JS_ATOMIC_SET(&interruptFlags
, 1);
3052 #ifdef JS_THREADSAFE
3053 /* rt->interruptCounter does not reflect suspended threads. */
3054 if (requestDepth
!= 0)
3055 JS_ATOMIC_INCREMENT(&rt
->interruptCounter
);
3060 * Invoke the operation callback and return false if the current execution
3061 * is to be terminated.
3064 js_InvokeOperationCallback(JSContext
*cx
);
3067 js_HandleExecutionInterrupt(JSContext
*cx
);
3071 /* These must be called with GC lock taken. */
3074 TriggerOperationCallback(JSContext
*cx
);
3077 TriggerAllOperationCallbacks(JSRuntime
*rt
);
3079 } /* namespace js */
3081 extern JSStackFrame
*
3082 js_GetScriptedCaller(JSContext
*cx
, JSStackFrame
*fp
);
3085 js_GetCurrentBytecodePC(JSContext
* cx
);
3088 js_CurrentPCIsInImacro(JSContext
*cx
);
3092 class RegExpStatics
;
3094 extern JS_FORCES_STACK
JS_FRIEND_API(void)
3095 LeaveTrace(JSContext
*cx
);
3097 } /* namespace js */
3100 * Get the current frame, first lazily instantiating stack frames if needed.
3101 * (Do not access cx->fp() directly except in JS_REQUIRES_STACK code.)
3103 * Defined in jstracer.cpp if JS_TRACER is defined.
3105 static JS_FORCES_STACK JS_INLINE JSStackFrame
*
3106 js_GetTopStackFrame(JSContext
*cx
)
3109 return cx
->maybefp();
3112 static JS_INLINE JSBool
3113 js_IsPropertyCacheDisabled(JSContext
*cx
)
3115 return cx
->runtime
->shapeGen
>= js::SHAPE_OVERFLOW_BIT
;
3118 static JS_INLINE uint32
3119 js_RegenerateShapeForGC(JSContext
*cx
)
3121 JS_ASSERT(cx
->runtime
->gcRunning
);
3122 JS_ASSERT(cx
->runtime
->gcRegenShapes
);
3125 * Under the GC, compared with js_GenerateShape, we don't need to use
3126 * atomic increments but we still must make sure that after an overflow
3127 * the shape stays such.
3129 uint32 shape
= cx
->runtime
->shapeGen
;
3130 shape
= (shape
+ 1) | (shape
& js::SHAPE_OVERFLOW_BIT
);
3131 cx
->runtime
->shapeGen
= shape
;
3138 ContextAllocPolicy::malloc(size_t bytes
)
3140 return cx
->malloc(bytes
);
3144 ContextAllocPolicy::free(void *p
)
3150 ContextAllocPolicy::realloc(void *p
, size_t bytes
)
3152 return cx
->realloc(p
, bytes
);
3156 ContextAllocPolicy::reportAllocOverflow() const
3158 js_ReportAllocationOverflow(cx
);
3161 class AutoValueVector
: private AutoGCRooter
3164 explicit AutoValueVector(JSContext
*cx
3165 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
3166 : AutoGCRooter(cx
, VALVECTOR
), vector(cx
)
3168 JS_GUARD_OBJECT_NOTIFIER_INIT
;
3171 size_t length() const { return vector
.length(); }
3173 bool append(const Value
&v
) { return vector
.append(v
); }
3175 void popBack() { vector
.popBack(); }
3177 bool growBy(size_t inc
) {
3178 /* N.B. Value's default ctor leaves the Value undefined */
3179 size_t oldLength
= vector
.length();
3180 if (!vector
.growByUninitialized(inc
))
3182 MakeValueRangeGCSafe(vector
.begin() + oldLength
, vector
.end());
3186 bool resize(size_t newLength
) {
3187 size_t oldLength
= vector
.length();
3188 if (newLength
<= oldLength
) {
3189 vector
.shrinkBy(oldLength
- newLength
);
3192 /* N.B. Value's default ctor leaves the Value undefined */
3193 if (!vector
.growByUninitialized(newLength
- oldLength
))
3195 MakeValueRangeGCSafe(vector
.begin() + oldLength
, vector
.end());
3199 bool reserve(size_t newLength
) {
3200 return vector
.reserve(newLength
);
3203 Value
&operator[](size_t i
) { return vector
[i
]; }
3204 const Value
&operator[](size_t i
) const { return vector
[i
]; }
3206 const Value
*begin() const { return vector
.begin(); }
3207 Value
*begin() { return vector
.begin(); }
3209 const Value
*end() const { return vector
.end(); }
3210 Value
*end() { return vector
.end(); }
3212 const jsval
*jsval_begin() const { return Jsvalify(begin()); }
3213 jsval
*jsval_begin() { return Jsvalify(begin()); }
3215 const jsval
*jsval_end() const { return Jsvalify(end()); }
3216 jsval
*jsval_end() { return Jsvalify(end()); }
3218 const Value
&back() const { return vector
.back(); }
3220 friend void AutoGCRooter::trace(JSTracer
*trc
);
3223 Vector
<Value
, 8> vector
;
3224 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
3227 class AutoIdVector
: private AutoGCRooter
3230 explicit AutoIdVector(JSContext
*cx
3231 JS_GUARD_OBJECT_NOTIFIER_PARAM
)
3232 : AutoGCRooter(cx
, IDVECTOR
), vector(cx
)
3234 JS_GUARD_OBJECT_NOTIFIER_INIT
;
3237 size_t length() const { return vector
.length(); }
3239 bool append(jsid id
) { return vector
.append(id
); }
3241 void popBack() { vector
.popBack(); }
3243 bool growBy(size_t inc
) {
3244 /* N.B. jsid's default ctor leaves the jsid undefined */
3245 size_t oldLength
= vector
.length();
3246 if (!vector
.growByUninitialized(inc
))
3248 MakeIdRangeGCSafe(vector
.begin() + oldLength
, vector
.end());
3252 bool resize(size_t newLength
) {
3253 size_t oldLength
= vector
.length();
3254 if (newLength
<= oldLength
) {
3255 vector
.shrinkBy(oldLength
- newLength
);
3258 /* N.B. jsid's default ctor leaves the jsid undefined */
3259 if (!vector
.growByUninitialized(newLength
- oldLength
))
3261 MakeIdRangeGCSafe(vector
.begin() + oldLength
, vector
.end());
3265 bool reserve(size_t newLength
) {
3266 return vector
.reserve(newLength
);
3269 jsid
&operator[](size_t i
) { return vector
[i
]; }
3270 const jsid
&operator[](size_t i
) const { return vector
[i
]; }
3272 const jsid
*begin() const { return vector
.begin(); }
3273 jsid
*begin() { return vector
.begin(); }
3275 const jsid
*end() const { return vector
.end(); }
3276 jsid
*end() { return vector
.end(); }
3278 const jsid
&back() const { return vector
.back(); }
3280 friend void AutoGCRooter::trace(JSTracer
*trc
);
3283 Vector
<jsid
, 8> vector
;
3284 JS_DECL_USE_GUARD_OBJECT_NOTIFIER
3288 NewIdArray(JSContext
*cx
, jsint length
);
3290 } /* namespace js */
3293 #pragma warning(pop)
3294 #pragma warning(pop)
3297 #ifdef JS_UNDEFD_MOZALLOC_WRAPPERS
3298 # include "mozilla/mozalloc_macro_wrappers.h"
3301 #endif /* jscntxt_h___ */