1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JavaScript bytecode interpreter.
49 #include "jsarena.h" /* Added by JSIFY */
50 #include "jsutil.h" /* Added by JSIFY */
58 #include "jsversion.h"
68 #include "jspropertycache.h"
73 #include "jsstaticcheck.h"
75 #include "jslibmath.h"
78 #include "jsatominlines.h"
79 #include "jspropertycacheinlines.h"
80 #include "jsobjinlines.h"
81 #include "jsscopeinlines.h"
82 #include "jsscriptinlines.h"
83 #include "jsstrinlines.h"
84 #include "jsdtracef.h"
86 #if JS_HAS_XML_SUPPORT
90 #include "jsautooplen.h"
94 /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */
95 #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___
98 * Check if the current arena has enough space to fit nslots after sp and, if
99 * so, reserve the necessary space.
101 static JS_REQUIRES_STACK JSBool
102 AllocateAfterSP(JSContext
*cx
, jsval
*sp
, uintN nslots
)
104 jsval
*avail
= (jsval
*) cx
->stackPool
.getCurrent()->getAvail();
105 JS_ASSERT((jsval
*) cx
->stackPool
.getCurrent()->getBase() <= sp
);
106 JS_ASSERT(sp
<= avail
);
108 uintN surplus
= avail
- sp
;
111 if (nslots
<= surplus
)
115 * No room before current->avail, check if the arena has enough space to
116 * fit the missing slots before the limit.
118 if (nslots
> (size_t) ((jsval
*) cx
->stackPool
.getCurrent()->getLimit() - sp
))
121 cx
->stackPool
.allocateCast
<jsval
*>(sp2
, (nslots
- surplus
) * sizeof(jsval
));
122 JS_ASSERT(sp2
== sp
+ surplus
);
126 JS_STATIC_INTERPRET JS_REQUIRES_STACK jsval
*
127 js_AllocRawStack(JSContext
*cx
, uintN nslots
, void **markp
)
131 JS_ASSERT(nslots
!= 0);
132 JS_ASSERT_NOT_ON_TRACE(cx
);
134 if (!cx
->stackPool
.getSecond()) {
137 cx
->stackPool
.allocateCast
<int64
*>(timestamp
, sizeof *timestamp
);
139 js_ReportOutOfScriptQuota(cx
);
142 *timestamp
= JS_Now();
146 *markp
= cx
->stackPool
.getMark();
147 cx
->stackPool
.allocateCast
<jsval
*>(sp
, nslots
* sizeof(jsval
));
149 js_ReportOutOfScriptQuota(cx
);
153 JS_STATIC_INTERPRET JS_REQUIRES_STACK
void
154 js_FreeRawStack(JSContext
*cx
, void *mark
)
156 cx
->stackPool
.release(mark
);
159 JS_REQUIRES_STACK
JS_FRIEND_API(jsval
*)
160 js_AllocStack(JSContext
*cx
, uintN nslots
, void **markp
)
165 /* Callers don't check for zero nslots: we do to avoid empty segments. */
168 return (jsval
*) cx
->stackPool
.getMark();
171 /* Allocate 2 extra slots for the stack segment header we'll likely need. */
172 sp
= js_AllocRawStack(cx
, 2 + nslots
, markp
);
176 /* Try to avoid another header if we can piggyback on the last segment. */
177 JSArena
*a
= cx
->stackPool
.getCurrent();
178 sh
= cx
->stackHeaders
;
179 if (sh
&& JS_STACK_SEGMENT(sh
) + sh
->nslots
== sp
) {
180 /* Extend the last stack segment, give back the 2 header slots. */
181 sh
->nslots
+= nslots
;
182 a
->setAvail(a
->getAvail() - 2 * sizeof(jsval
));
185 * Need a new stack segment, so allocate and push a stack segment
186 * header from the 2 extra slots.
188 sh
= (JSStackHeader
*)sp
;
190 sh
->down
= cx
->stackHeaders
;
191 cx
->stackHeaders
= sh
;
196 * Store JSVAL_NULL using memset, to let compilers optimize as they see
197 * fit, in case a caller allocates and pushes GC-things one by one, which
198 * could nest a last-ditch GC that will scan this segment.
200 memset(sp
, 0, nslots
* sizeof(jsval
));
204 JS_REQUIRES_STACK
JS_FRIEND_API(void)
205 js_FreeStack(JSContext
*cx
, void *mark
)
210 /* Check for zero nslots allocation special case. */
214 /* We can assert because js_FreeStack always balances js_AllocStack. */
215 sh
= cx
->stackHeaders
;
218 /* If mark is in the current segment, reduce sh->nslots, else pop sh. */
219 slotdiff
= JS_UPTRDIFF(mark
, JS_STACK_SEGMENT(sh
)) / sizeof(jsval
);
220 if (slotdiff
< (jsuword
)sh
->nslots
)
221 sh
->nslots
= slotdiff
;
223 cx
->stackHeaders
= sh
->down
;
225 /* Release the stackPool space allocated since mark was set. */
226 cx
->stackPool
.release(mark
);
230 js_GetScopeChain(JSContext
*cx
, JSStackFrame
*fp
)
232 JSObject
*sharedBlock
= fp
->blockChain
;
236 * Don't force a call object for a lightweight function call, but do
237 * insist that there is a call object for a heavyweight function call.
239 JS_ASSERT(!fp
->fun
||
240 !(fp
->fun
->flags
& JSFUN_HEAVYWEIGHT
) ||
242 JS_ASSERT(fp
->scopeChain
);
243 return fp
->scopeChain
;
246 /* We don't handle cloning blocks on trace. */
250 * We have one or more lexical scopes to reflect into fp->scopeChain, so
251 * make sure there's a call object at the current head of the scope chain,
252 * if this frame is a call frame.
254 * Also, identify the innermost compiler-allocated block we needn't clone.
256 JSObject
*limitBlock
, *limitClone
;
257 if (fp
->fun
&& !fp
->callobj
) {
258 JS_ASSERT(fp
->scopeChain
->getClass() != &js_BlockClass
||
259 fp
->scopeChain
->getPrivate() != fp
);
260 if (!js_GetCallObject(cx
, fp
))
263 /* We know we must clone everything on blockChain. */
264 limitBlock
= limitClone
= NULL
;
267 * scopeChain includes all blocks whose static scope we're within that
268 * have already been cloned. Find the innermost such block. Its
269 * prototype should appear on blockChain; we'll clone blockChain up
270 * to, but not including, that prototype.
272 limitClone
= fp
->scopeChain
;
273 while (limitClone
->getClass() == &js_WithClass
)
274 limitClone
= limitClone
->getParent();
275 JS_ASSERT(limitClone
);
278 * It may seem like we don't know enough about limitClone to be able
279 * to just grab its prototype as we do here, but it's actually okay.
281 * If limitClone is a block object belonging to this frame, then its
282 * prototype is the innermost entry in blockChain that we have already
283 * cloned, and is thus the place to stop when we clone below.
285 * Otherwise, there are no blocks for this frame on scopeChain, and we
286 * need to clone the whole blockChain. In this case, limitBlock can
287 * point to any object known not to be on blockChain, since we simply
288 * loop until we hit limitBlock or NULL. If limitClone is a block, it
289 * isn't a block from this function, since blocks can't be nested
290 * within themselves on scopeChain (recursion is dynamic nesting, not
291 * static nesting). If limitClone isn't a block, its prototype won't
292 * be a block either. So we can just grab limitClone's prototype here
293 * regardless of its type or which frame it belongs to.
295 limitBlock
= limitClone
->getProto();
297 /* If the innermost block has already been cloned, we are done. */
298 if (limitBlock
== sharedBlock
)
299 return fp
->scopeChain
;
303 * Special-case cloning the innermost block; this doesn't have enough in
304 * common with subsequent steps to include in the loop.
306 * js_CloneBlockObject leaves the clone's parent slot uninitialized. We
309 JSObject
*innermostNewChild
= js_CloneBlockObject(cx
, sharedBlock
, fp
);
310 if (!innermostNewChild
)
312 AutoValueRooter
tvr(cx
, innermostNewChild
);
315 * Clone our way towards outer scopes until we reach the innermost
316 * enclosing function, or the innermost block we've already cloned.
318 JSObject
*newChild
= innermostNewChild
;
320 JS_ASSERT(newChild
->getProto() == sharedBlock
);
321 sharedBlock
= sharedBlock
->getParent();
323 /* Sometimes limitBlock will be NULL, so check that first. */
324 if (sharedBlock
== limitBlock
|| !sharedBlock
)
327 /* As in the call above, we don't know the real parent yet. */
329 = js_CloneBlockObject(cx
, sharedBlock
, fp
);
333 newChild
->setParent(clone
);
336 newChild
->setParent(fp
->scopeChain
);
340 * If we found a limit block belonging to this frame, then we should have
341 * found it in blockChain.
343 JS_ASSERT_IF(limitBlock
&&
344 limitBlock
->getClass() == &js_BlockClass
&&
345 limitClone
->getPrivate() == fp
,
348 /* Place our newly cloned blocks at the head of the scope chain. */
349 fp
->scopeChain
= innermostNewChild
;
350 return fp
->scopeChain
;
354 js_GetPrimitiveThis(JSContext
*cx
, jsval
*vp
, JSClass
*clasp
, jsval
*thisvp
)
360 if (JSVAL_IS_OBJECT(v
)) {
361 obj
= JS_THIS_OBJECT(cx
, vp
);
362 if (!JS_InstanceOf(cx
, obj
, clasp
, vp
+ 2))
364 v
= obj
->getPrimitiveThis();
370 /* Some objects (e.g., With) delegate 'this' to another object. */
371 static inline JSObject
*
372 CallThisObjectHook(JSContext
*cx
, JSObject
*obj
, jsval
*argv
)
374 JSObject
*thisp
= obj
->thisObject(cx
);
377 argv
[-1] = OBJECT_TO_JSVAL(thisp
);
382 * ECMA requires "the global object", but in embeddings such as the browser,
383 * which have multiple top-level objects (windows, frames, etc. in the DOM),
384 * we prefer fun's parent. An example that causes this code to run:
387 * function f() { return this }
388 * function g() { return f }
394 * The alert should display "true".
396 JS_STATIC_INTERPRET JSObject
*
397 js_ComputeGlobalThis(JSContext
*cx
, jsval
*argv
)
401 if (JSVAL_IS_PRIMITIVE(argv
[-2]) ||
402 !JSVAL_TO_OBJECT(argv
[-2])->getParent()) {
403 thisp
= cx
->globalObject
;
405 thisp
= JSVAL_TO_OBJECT(argv
[-2])->getGlobal();
408 return CallThisObjectHook(cx
, thisp
, argv
);
412 ComputeThis(JSContext
*cx
, jsval
*argv
)
416 JS_ASSERT(!JSVAL_IS_NULL(argv
[-1]));
417 if (!JSVAL_IS_OBJECT(argv
[-1])) {
418 if (!js_PrimitiveToObject(cx
, &argv
[-1]))
420 thisp
= JSVAL_TO_OBJECT(argv
[-1]);
424 thisp
= JSVAL_TO_OBJECT(argv
[-1]);
425 if (thisp
->getClass() == &js_CallClass
|| thisp
->getClass() == &js_BlockClass
)
426 return js_ComputeGlobalThis(cx
, argv
);
428 return CallThisObjectHook(cx
, thisp
, argv
);
432 js_ComputeThis(JSContext
*cx
, jsval
*argv
)
434 JS_ASSERT(argv
[-1] != JSVAL_HOLE
); // check for SynthesizeFrame poisoning
435 if (JSVAL_IS_NULL(argv
[-1]))
436 return js_ComputeGlobalThis(cx
, argv
);
437 return ComputeThis(cx
, argv
);
440 #if JS_HAS_NO_SUCH_METHOD
442 const uint32 JSSLOT_FOUND_FUNCTION
= JSSLOT_PRIVATE
;
443 const uint32 JSSLOT_SAVED_ID
= JSSLOT_PRIVATE
+ 1;
445 JSClass js_NoSuchMethodClass
= {
447 JSCLASS_HAS_RESERVED_SLOTS(2) | JSCLASS_IS_ANONYMOUS
,
448 JS_PropertyStub
, JS_PropertyStub
, JS_PropertyStub
, JS_PropertyStub
,
449 JS_EnumerateStub
, JS_ResolveStub
, JS_ConvertStub
, NULL
,
450 NULL
, NULL
, NULL
, NULL
, NULL
, NULL
, NULL
, NULL
454 * When JSOP_CALLPROP or JSOP_CALLELEM does not find the method property of
455 * the base object, we search for the __noSuchMethod__ method in the base.
456 * If it exists, we store the method and the property's id into an object of
457 * NoSuchMethod class and store this object into the callee's stack slot.
458 * Later, js_Invoke will recognise such an object and transfer control to
459 * NoSuchMethod that invokes the method like:
461 * this.__noSuchMethod__(id, args)
463 * where id is the name of the method that this invocation attempted to
464 * call by name, and args is an Array containing this invocation's actual
467 JS_STATIC_INTERPRET JSBool
468 js_OnUnknownMethod(JSContext
*cx
, jsval
*vp
)
470 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp
[1]));
472 JSObject
*obj
= JSVAL_TO_OBJECT(vp
[1]);
473 jsid id
= ATOM_TO_JSID(cx
->runtime
->atomState
.noSuchMethodAtom
);
474 AutoValueRooter
tvr(cx
, JSVAL_NULL
);
475 if (!js_GetMethod(cx
, obj
, id
, JSGET_NO_METHOD_BARRIER
, tvr
.addr()))
477 if (JSVAL_IS_PRIMITIVE(tvr
.value())) {
480 #if JS_HAS_XML_SUPPORT
481 /* Extract the function name from function::name qname. */
482 if (!JSVAL_IS_PRIMITIVE(vp
[0])) {
483 obj
= JSVAL_TO_OBJECT(vp
[0]);
484 if (!js_IsFunctionQName(cx
, obj
, &id
))
487 vp
[0] = ID_TO_VALUE(id
);
490 obj
= NewObjectWithGivenProto(cx
, &js_NoSuchMethodClass
, NULL
, NULL
);
493 obj
->fslots
[JSSLOT_FOUND_FUNCTION
] = tvr
.value();
494 obj
->fslots
[JSSLOT_SAVED_ID
] = vp
[0];
495 vp
[0] = OBJECT_TO_JSVAL(obj
);
500 static JS_REQUIRES_STACK JSBool
501 NoSuchMethod(JSContext
*cx
, uintN argc
, jsval
*vp
, uint32 flags
)
506 JSObject
*obj
, *argsobj
;
508 invokevp
= js_AllocStack(cx
, 2 + 2, &mark
);
512 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp
[0]));
513 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp
[1]));
514 obj
= JSVAL_TO_OBJECT(vp
[0]);
515 JS_ASSERT(obj
->getClass() == &js_NoSuchMethodClass
);
517 invokevp
[0] = obj
->fslots
[JSSLOT_FOUND_FUNCTION
];
519 invokevp
[2] = obj
->fslots
[JSSLOT_SAVED_ID
];
520 argsobj
= js_NewArrayObject(cx
, argc
, vp
+ 2);
524 invokevp
[3] = OBJECT_TO_JSVAL(argsobj
);
525 ok
= (flags
& JSINVOKE_CONSTRUCT
)
526 ? js_InvokeConstructor(cx
, 2, JS_TRUE
, invokevp
)
527 : js_Invoke(cx
, 2, invokevp
, flags
);
530 js_FreeStack(cx
, mark
);
534 #endif /* JS_HAS_NO_SUCH_METHOD */
537 * We check if the function accepts a primitive value as |this|. For that we
538 * use a table that maps value's tag into the corresponding function flag.
540 JS_STATIC_ASSERT(JSVAL_INT
== 1);
541 JS_STATIC_ASSERT(JSVAL_DOUBLE
== 2);
542 JS_STATIC_ASSERT(JSVAL_STRING
== 4);
543 JS_STATIC_ASSERT(JSVAL_SPECIAL
== 6);
545 const uint16 js_PrimitiveTestFlags
[] = {
546 JSFUN_THISP_NUMBER
, /* INT */
547 JSFUN_THISP_NUMBER
, /* DOUBLE */
548 JSFUN_THISP_NUMBER
, /* INT */
549 JSFUN_THISP_STRING
, /* STRING */
550 JSFUN_THISP_NUMBER
, /* INT */
551 JSFUN_THISP_BOOLEAN
, /* BOOLEAN */
552 JSFUN_THISP_NUMBER
/* INT */
556 * Find a function reference and its 'this' object implicit first parameter
557 * under argc arguments on cx's stack, and call the function. Push missing
558 * required arguments, allocate declared local variables, and pop everything
559 * when done. Then push the return value.
561 JS_REQUIRES_STACK
JS_FRIEND_API(JSBool
)
562 js_Invoke(JSContext
*cx
, uintN argc
, jsval
*vp
, uintN flags
)
565 CallStack
callStack(cx
);
567 jsval
*sp
, *argv
, *newvp
;
569 JSObject
*funobj
, *parent
;
572 const JSObjectOps
*ops
;
577 uint32 rootedArgsFlag
;
578 JSInterpreterHook hook
;
582 JS_ASSERT(argc
<= JS_ARGS_LENGTH_MAX
);
584 /* [vp .. vp + 2 + argc) must belong to the last JS stack arena. */
585 JS_ASSERT((jsval
*) cx
->stackPool
.getCurrent()->getBase() <= vp
);
586 JS_ASSERT(vp
+ 2 + argc
<= (jsval
*) cx
->stackPool
.getCurrent()->getAvail());
588 /* Mark the top of stack and load frequently-used registers. */
589 mark
= cx
->stackPool
.getMark();
590 MUST_FLOW_THROUGH("out2");
593 if (JSVAL_IS_PRIMITIVE(v
))
596 funobj
= JSVAL_TO_OBJECT(v
);
597 parent
= funobj
->getParent();
598 clasp
= funobj
->getClass();
599 if (clasp
!= &js_FunctionClass
) {
600 #if JS_HAS_NO_SUCH_METHOD
601 if (clasp
== &js_NoSuchMethodClass
) {
602 ok
= NoSuchMethod(cx
, argc
, vp
, flags
);
607 /* Function is inlined, all other classes use object ops. */
608 ops
= funobj
->map
->ops
;
614 /* Try a call or construct native object op. */
615 if (flags
& JSINVOKE_CONSTRUCT
) {
616 if (!JSVAL_IS_OBJECT(vp
[1])) {
617 ok
= js_PrimitiveToObject(cx
, &vp
[1]);
621 native
= ops
->construct
;
628 /* Get private data and set derived locals from it. */
629 fun
= GET_FUNCTION_PRIVATE(cx
, funobj
);
630 nslots
= FUN_MINARGS(fun
);
631 nslots
= (nslots
> argc
) ? nslots
- argc
: 0;
632 if (FUN_INTERPRETED(fun
)) {
634 script
= fun
->u
.i
.script
;
637 if (script
->isEmpty()) {
638 if (flags
& JSINVOKE_CONSTRUCT
) {
639 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp
[1]));
648 native
= fun
->u
.n
.native
;
650 nslots
+= fun
->u
.n
.extra
;
653 if (JSFUN_BOUND_METHOD_TEST(fun
->flags
)) {
654 /* Handle bound method special case. */
655 vp
[1] = OBJECT_TO_JSVAL(parent
);
656 } else if (!JSVAL_IS_OBJECT(vp
[1])) {
657 JS_ASSERT(!(flags
& JSINVOKE_CONSTRUCT
));
658 if (PRIMITIVE_THIS_TEST(fun
, vp
[1]))
663 if (flags
& JSINVOKE_CONSTRUCT
) {
664 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp
[1]));
667 * We must call js_ComputeThis in case we are not called from the
668 * interpreter, where a prior bytecode has computed an appropriate
671 * But we need to compute |this| eagerly only for so-called "slow"
672 * (i.e., not fast) native functions. Fast natives must use either
673 * JS_THIS or JS_THIS_OBJECT, and scripted functions will go through
674 * the appropriate this-computing bytecode, e.g., JSOP_THIS.
676 if (native
&& (!fun
|| !(fun
->flags
& JSFUN_FAST_NATIVE
))) {
677 if (!js_ComputeThis(cx
, vp
+ 2)) {
681 flags
|= JSFRAME_COMPUTED_THIS
;
686 if (native
&& fun
&& (fun
->flags
& JSFUN_FAST_NATIVE
)) {
687 #ifdef DEBUG_NOT_THROWING
688 JSBool alreadyThrowing
= cx
->throwing
;
690 JS_ASSERT(nslots
== 0);
691 ok
= ((JSFastNative
) native
)(cx
, argc
, vp
);
692 JS_RUNTIME_METER(cx
->runtime
, nativeCalls
);
693 #ifdef DEBUG_NOT_THROWING
694 if (ok
&& !alreadyThrowing
)
695 ASSERT_NOT_THROWING(cx
);
703 rootedArgsFlag
= JSFRAME_ROOTED_ARGV
;
706 * The extra slots required by the function continue with argument
707 * slots. Thus, when the last stack pool arena does not have room to
708 * fit nslots right after sp and AllocateAfterSP fails, we have to copy
709 * [vp..vp+2+argc) slots and clear rootedArgsFlag to root the copy.
711 if (!AllocateAfterSP(cx
, sp
, nslots
)) {
713 newvp
= js_AllocRawStack(cx
, 2 + argc
+ nslots
, NULL
);
718 memcpy(newvp
, vp
, (2 + argc
) * sizeof(jsval
));
723 /* Push void to initialize missing args. */
730 /* Allocate space for local variables and stack of interpreted function. */
731 if (script
&& script
->nslots
!= 0) {
732 if (!AllocateAfterSP(cx
, sp
, script
->nslots
)) {
733 /* NB: Discontinuity between argv and slots, stack slots. */
734 sp
= js_AllocRawStack(cx
, script
->nslots
, NULL
);
741 /* Push void to initialize local variables. */
742 for (jsval
*end
= sp
+ fun
->u
.i
.nvars
; sp
!= end
; ++sp
)
747 * Initialize the frame.
750 frame
.callobj
= NULL
;
751 frame
.argsobj
= NULL
;
752 frame
.script
= script
;
757 /* Default return value for a constructor is the new object. */
758 frame
.rval
= (flags
& JSINVOKE_CONSTRUCT
) ? vp
[1] : JSVAL_VOID
;
760 frame
.annotation
= NULL
;
761 frame
.scopeChain
= NULL
; /* set below for real, after cx->fp is set */
762 frame
.blockChain
= NULL
;
766 frame
.flags
= flags
| rootedArgsFlag
;
767 frame
.displaySave
= NULL
;
769 MUST_FLOW_THROUGH("out");
773 * The initialVarObj is left NULL since fp->callobj is NULL and, for
774 * interpreted functions, fp->varobj() == fp->callobj.
776 callStack
.setInitialFrame(&frame
);
777 cx
->pushCallStack(&callStack
);
781 /* Init these now in case we goto out before first hook call. */
782 hook
= cx
->debugHooks
->callHook
;
786 /* Slow natives expect the caller's scopeChain as their scopeChain. */
788 JS_ASSERT(!pushCall
);
789 frame
.scopeChain
= frame
.down
->scopeChain
;
792 /* Ensure that we have a scope chain. */
793 if (!frame
.scopeChain
)
794 frame
.scopeChain
= parent
;
796 /* Use parent scope so js_GetCallObject can find the right "Call". */
797 frame
.scopeChain
= parent
;
798 if (JSFUN_HEAVYWEIGHT_TEST(fun
->flags
)) {
799 /* Scope with a call object parented by the callee's parent. */
800 if (!js_GetCallObject(cx
, &frame
)) {
805 frame
.slots
= sp
- fun
->u
.i
.nvars
;
808 /* Call the hook if present after we fully initialized the frame. */
810 hookData
= hook(cx
, &frame
, JS_TRUE
, 0, cx
->debugHooks
->callHookData
);
812 DTrace::enterJSFun(cx
, &frame
, fun
, frame
.down
, frame
.argc
, frame
.argv
);
814 /* Call the function, either a native method or an interpreted script. */
816 #ifdef DEBUG_NOT_THROWING
817 JSBool alreadyThrowing
= cx
->throwing
;
819 /* Primitive |this| should not be passed to slow natives. */
820 JSObject
*thisp
= JSVAL_TO_OBJECT(frame
.thisv
);
821 ok
= native(cx
, thisp
, argc
, frame
.argv
, &frame
.rval
);
822 JS_RUNTIME_METER(cx
->runtime
, nativeCalls
);
823 #ifdef DEBUG_NOT_THROWING
824 if (ok
&& !alreadyThrowing
)
825 ASSERT_NOT_THROWING(cx
);
829 ok
= js_Interpret(cx
);
832 DTrace::exitJSFun(cx
, &frame
, fun
, frame
.rval
);
836 hook
= cx
->debugHooks
->callHook
;
838 hook(cx
, &frame
, JS_FALSE
, &ok
, hookData
);
841 frame
.putActivationObjects(cx
);
845 /* Restore cx->fp now that we're done releasing frame objects. */
851 /* Pop everything we may have allocated off the stack. */
852 cx
->stackPool
.release(mark
);
858 js_ReportIsNotFunction(cx
, vp
, flags
& JSINVOKE_FUNFLAGS
);
864 js_InternalInvoke(JSContext
*cx
, JSObject
*obj
, jsval fval
, uintN flags
,
865 uintN argc
, jsval
*argv
, jsval
*rval
)
872 invokevp
= js_AllocStack(cx
, 2 + argc
, &mark
);
877 invokevp
[1] = OBJECT_TO_JSVAL(obj
);
878 memcpy(invokevp
+ 2, argv
, argc
* sizeof *argv
);
880 ok
= js_Invoke(cx
, argc
, invokevp
, flags
);
883 * Store *rval in the a scoped local root if a scope is open, else in
884 * the lastInternalResult pigeon-hole GC root, solely so users of
885 * js_InternalInvoke and its direct and indirect (js_ValueToString for
886 * example) callers do not need to manage roots for local, temporary
887 * references to such results.
890 if (JSVAL_IS_GCTHING(*rval
) && *rval
!= JSVAL_NULL
) {
891 JSLocalRootStack
*lrs
= JS_THREAD_DATA(cx
)->localRootStack
;
893 if (js_PushLocalRoot(cx
, lrs
, *rval
) < 0)
896 cx
->weakRoots
.lastInternalResult
= *rval
;
901 js_FreeStack(cx
, mark
);
906 js_InternalGetOrSet(JSContext
*cx
, JSObject
*obj
, jsid id
, jsval fval
,
907 JSAccessMode mode
, uintN argc
, jsval
*argv
, jsval
*rval
)
912 * js_InternalInvoke could result in another try to get or set the same id
913 * again, see bug 355497.
915 JS_CHECK_RECURSION(cx
, return JS_FALSE
);
917 return js_InternalCall(cx
, obj
, fval
, argc
, argv
, rval
);
921 js_Execute(JSContext
*cx
, JSObject
*chain
, JSScript
*script
,
922 JSStackFrame
*down
, uintN flags
, jsval
*result
)
924 if (script
->isEmpty()) {
926 *result
= JSVAL_VOID
;
932 DTrace::ExecutionScope
executionScope(script
);
934 JSInterpreterHook hook
= cx
->debugHooks
->executeHook
;
935 void *hookData
= NULL
;
937 CallStack
callStack(cx
);
938 frame
.script
= script
;
940 /* Propagate arg state for eval and the debugger API. */
941 frame
.callobj
= down
->callobj
;
942 frame
.argsobj
= down
->argsobj
;
943 frame
.fun
= (script
->staticLevel
> 0) ? down
->fun
: NULL
;
944 frame
.thisv
= down
->thisv
;
945 if (down
->flags
& JSFRAME_COMPUTED_THIS
)
946 flags
|= JSFRAME_COMPUTED_THIS
;
947 frame
.argc
= down
->argc
;
948 frame
.argv
= down
->argv
;
949 frame
.annotation
= down
->annotation
;
952 * We want to call |down->varobj()|, but this requires knowing the
953 * CallStack of |down|. If |down == cx->fp|, the callstack is simply
954 * the context's active callstack, so we can use |down->varobj(cx)|.
955 * When |down != cx->fp|, we need to do a slow linear search. Luckily,
956 * this only happens with indirect eval and JS_EvaluateInStackFrame.
958 if (down
== cx
->fp
) {
959 callStack
.setInitialVarObj(down
->varobj(cx
));
961 CallStack
*cs
= cx
->containingCallStack(down
);
962 callStack
.setInitialVarObj(down
->varobj(cs
));
965 frame
.callobj
= NULL
;
966 frame
.argsobj
= NULL
;
967 JSObject
*obj
= chain
;
968 if (cx
->options
& JSOPTION_VAROBJFIX
) {
969 while (JSObject
*tmp
= obj
->getParent())
973 frame
.thisv
= OBJECT_TO_JSVAL(chain
);
976 frame
.annotation
= NULL
;
977 callStack
.setInitialVarObj(obj
);
982 struct RawStackGuard
{
985 RawStackGuard(JSContext
*cx
) : cx(cx
), mark(NULL
) {}
986 ~RawStackGuard() { if (mark
) js_FreeRawStack(cx
, mark
); }
989 if (script
->nslots
!= 0) {
990 frame
.slots
= js_AllocRawStack(cx
, script
->nslots
, &rawStackGuard
.mark
);
993 memset(frame
.slots
, 0, script
->nfixed
* sizeof(jsval
));
995 #if JS_HAS_SHARP_VARS
996 JS_STATIC_ASSERT(SHARP_NSLOTS
== 2);
998 if (script
->hasSharps
) {
999 JS_ASSERT(script
->nfixed
>= SHARP_NSLOTS
);
1000 jsval
*sharps
= &frame
.slots
[script
->nfixed
- SHARP_NSLOTS
];
1002 if (down
&& down
->script
&& down
->script
->hasSharps
) {
1003 JS_ASSERT(down
->script
->nfixed
>= SHARP_NSLOTS
);
1004 int base
= (down
->fun
&& !(down
->flags
& JSFRAME_SPECIAL
))
1005 ? down
->fun
->sharpSlotBase(cx
)
1006 : down
->script
->nfixed
- SHARP_NSLOTS
;
1009 sharps
[0] = down
->slots
[base
];
1010 sharps
[1] = down
->slots
[base
+ 1];
1012 sharps
[0] = sharps
[1] = JSVAL_VOID
;
1020 frame
.rval
= JSVAL_VOID
;
1022 frame
.scopeChain
= chain
;
1024 frame
.flags
= flags
;
1025 frame
.blockChain
= NULL
;
1028 * We need to push/pop a new callstack if there is no existing callstack
1029 * or the current callstack needs to be suspended (so that its frames are
1032 JSStackFrame
*oldfp
= cx
->fp
;
1033 bool newCallStack
= !oldfp
|| oldfp
!= down
;
1035 callStack
.setInitialFrame(&frame
);
1036 cx
->pushCallStack(&callStack
);
1040 struct FinishGuard
{
1042 JSStackFrame
*oldfp
;
1044 FinishGuard(JSContext
*cx
, JSStackFrame
*oldfp
, bool newCallStack
)
1045 : cx(cx
), oldfp(oldfp
), newCallStack(newCallStack
) {}
1051 } finishGuard(cx
, oldfp
, newCallStack
);
1054 OBJ_TO_INNER_OBJECT(cx
, chain
);
1057 frame
.scopeChain
= chain
;
1059 JSObject
*thisp
= JSVAL_TO_OBJECT(frame
.thisv
)->thisObject(cx
);
1062 frame
.thisv
= OBJECT_TO_JSVAL(thisp
);
1063 frame
.flags
|= JSFRAME_COMPUTED_THIS
;
1067 hookData
= hook(cx
, &frame
, JS_TRUE
, 0,
1068 cx
->debugHooks
->executeHookData
);
1071 JSBool ok
= js_Interpret(cx
);
1073 *result
= frame
.rval
;
1076 hook
= cx
->debugHooks
->executeHook
;
1078 hook(cx
, &frame
, JS_FALSE
, &ok
, hookData
);
1085 js_CheckRedeclaration(JSContext
*cx
, JSObject
*obj
, jsid id
, uintN attrs
,
1086 JSObject
**objp
, JSProperty
**propp
)
1090 uintN oldAttrs
, report
;
1093 const char *type
, *name
;
1096 * Both objp and propp must be either null or given. When given, *propp
1097 * must be null. This way we avoid an extra "if (propp) *propp = NULL" for
1098 * the common case of a non-existing property.
1100 JS_ASSERT(!objp
== !propp
);
1101 JS_ASSERT_IF(propp
, !*propp
);
1103 /* The JSPROP_INITIALIZER case below may generate a warning. Since we must
1104 * drop the property before reporting it, we insists on !propp to avoid
1105 * looking up the property again after the reporting is done.
1107 JS_ASSERT_IF(attrs
& JSPROP_INITIALIZER
, attrs
== JSPROP_INITIALIZER
);
1108 JS_ASSERT_IF(attrs
== JSPROP_INITIALIZER
, !propp
);
1110 if (!obj
->lookupProperty(cx
, id
, &obj2
, &prop
))
1115 /* Use prop as a speedup hint to obj->getAttributes. */
1116 if (!obj2
->getAttributes(cx
, id
, prop
, &oldAttrs
)) {
1117 obj2
->dropProperty(cx
, prop
);
1122 * If our caller doesn't want prop, drop it (we don't need it any longer).
1125 obj2
->dropProperty(cx
, prop
);
1132 if (attrs
== JSPROP_INITIALIZER
) {
1133 /* Allow the new object to override properties. */
1137 /* The property must be dropped already. */
1139 report
= JSREPORT_WARNING
| JSREPORT_STRICT
;
1142 isFunction
= false; /* suppress bogus gcc warnings */
1145 /* We allow redeclaring some non-readonly properties. */
1146 if (((oldAttrs
| attrs
) & JSPROP_READONLY
) == 0) {
1147 /* Allow redeclaration of variables and functions. */
1148 if (!(attrs
& (JSPROP_GETTER
| JSPROP_SETTER
)))
1152 * Allow adding a getter only if a property already has a setter
1153 * but no getter and similarly for adding a setter. That is, we
1154 * allow only the following transitions:
1156 * no-property --> getter --> getter + setter
1157 * no-property --> setter --> getter + setter
1159 if ((~(oldAttrs
^ attrs
) & (JSPROP_GETTER
| JSPROP_SETTER
)) == 0)
1163 * Allow redeclaration of an impermanent property (in which case
1164 * anyone could delete it and redefine it, willy-nilly).
1166 if (!(oldAttrs
& JSPROP_PERMANENT
))
1170 obj2
->dropProperty(cx
, prop
);
1172 report
= JSREPORT_ERROR
;
1173 isFunction
= (oldAttrs
& (JSPROP_GETTER
| JSPROP_SETTER
)) != 0;
1175 if (!obj
->getProperty(cx
, id
, &value
))
1177 isFunction
= VALUE_IS_FUNCTION(cx
, value
);
1181 type
= (attrs
== JSPROP_INITIALIZER
)
1183 : (oldAttrs
& attrs
& JSPROP_GETTER
)
1185 : (oldAttrs
& attrs
& JSPROP_SETTER
)
1187 : (oldAttrs
& JSPROP_READONLY
)
1192 name
= js_ValueToPrintableString(cx
, ID_TO_VALUE(id
));
1195 return JS_ReportErrorFlagsAndNumber(cx
, report
,
1196 js_GetErrorMessage
, NULL
,
1197 JSMSG_REDECLARED_VAR
,
1202 js_StrictlyEqual(JSContext
*cx
, jsval lval
, jsval rval
)
1204 jsval ltag
= JSVAL_TAG(lval
), rtag
= JSVAL_TAG(rval
);
1208 if (ltag
== JSVAL_STRING
) {
1209 JSString
*lstr
= JSVAL_TO_STRING(lval
),
1210 *rstr
= JSVAL_TO_STRING(rval
);
1211 return js_EqualStrings(lstr
, rstr
);
1213 if (ltag
== JSVAL_DOUBLE
) {
1214 ld
= *JSVAL_TO_DOUBLE(lval
);
1215 rd
= *JSVAL_TO_DOUBLE(rval
);
1216 return JSDOUBLE_COMPARE(ld
, ==, rd
, JS_FALSE
);
1218 if (ltag
== JSVAL_OBJECT
&&
1220 !JSVAL_IS_NULL(lval
) &&
1221 !JSVAL_IS_NULL(rval
)) {
1222 JSObject
*lobj
, *robj
;
1224 lobj
= js_GetWrappedObject(cx
, JSVAL_TO_OBJECT(lval
));
1225 robj
= js_GetWrappedObject(cx
, JSVAL_TO_OBJECT(rval
));
1226 lval
= OBJECT_TO_JSVAL(lobj
);
1227 rval
= OBJECT_TO_JSVAL(robj
);
1229 return lval
== rval
;
1231 if (ltag
== JSVAL_DOUBLE
&& JSVAL_IS_INT(rval
)) {
1232 ld
= *JSVAL_TO_DOUBLE(lval
);
1233 rd
= JSVAL_TO_INT(rval
);
1234 return JSDOUBLE_COMPARE(ld
, ==, rd
, JS_FALSE
);
1236 if (JSVAL_IS_INT(lval
) && rtag
== JSVAL_DOUBLE
) {
1237 ld
= JSVAL_TO_INT(lval
);
1238 rd
= *JSVAL_TO_DOUBLE(rval
);
1239 return JSDOUBLE_COMPARE(ld
, ==, rd
, JS_FALSE
);
1241 return lval
== rval
;
1245 IsNegativeZero(jsval v
)
1247 return JSVAL_IS_DOUBLE(v
) && JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v
));
1253 return JSVAL_IS_DOUBLE(v
) && JSDOUBLE_IS_NaN(*JSVAL_TO_DOUBLE(v
));
1257 js_SameValue(jsval v1
, jsval v2
, JSContext
*cx
)
1259 if (IsNegativeZero(v1
))
1260 return IsNegativeZero(v2
);
1261 if (IsNegativeZero(v2
))
1263 if (IsNaN(v1
) && IsNaN(v2
))
1265 return js_StrictlyEqual(cx
, v1
, v2
);
1268 JS_REQUIRES_STACK JSBool
1269 js_InvokeConstructor(JSContext
*cx
, uintN argc
, JSBool clampReturn
, jsval
*vp
)
1271 JSFunction
*fun
, *fun2
;
1272 JSObject
*obj
, *obj2
, *proto
, *parent
;
1279 if (!JSVAL_IS_OBJECT(lval
) ||
1280 (obj2
= JSVAL_TO_OBJECT(lval
)) == NULL
||
1281 /* XXX clean up to avoid special cases above ObjectOps layer */
1282 obj2
->getClass() == &js_FunctionClass
||
1283 !obj2
->map
->ops
->construct
)
1285 fun
= js_ValueToFunction(cx
, vp
, JSV2F_CONSTRUCT
);
1290 clasp
= &js_ObjectClass
;
1292 proto
= parent
= NULL
;
1296 * Get the constructor prototype object for this function.
1297 * Use the nominal 'this' parameter slot, vp[1], as a local
1298 * root to protect this prototype, in case it has no other
1301 if (!obj2
->getProperty(cx
, ATOM_TO_JSID(cx
->runtime
->atomState
.classPrototypeAtom
),
1306 proto
= JSVAL_IS_OBJECT(rval
) ? JSVAL_TO_OBJECT(rval
) : NULL
;
1307 parent
= obj2
->getParent();
1309 if (obj2
->getClass() == &js_FunctionClass
) {
1310 fun2
= GET_FUNCTION_PRIVATE(cx
, obj2
);
1311 if (!FUN_INTERPRETED(fun2
) && fun2
->u
.n
.clasp
)
1312 clasp
= fun2
->u
.n
.clasp
;
1315 obj
= NewObject(cx
, clasp
, proto
, parent
);
1319 /* Now we have an object with a constructor method; call it. */
1320 vp
[1] = OBJECT_TO_JSVAL(obj
);
1321 if (!js_Invoke(cx
, argc
, vp
, JSINVOKE_CONSTRUCT
))
1324 /* Check the return value and if it's primitive, force it to be obj. */
1326 if (clampReturn
&& JSVAL_IS_PRIMITIVE(rval
)) {
1328 /* native [[Construct]] returning primitive is error */
1329 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
,
1330 JSMSG_BAD_NEW_RESULT
,
1331 js_ValueToPrintableString(cx
, rval
));
1334 *vp
= OBJECT_TO_JSVAL(obj
);
1337 JS_RUNTIME_METER(cx
->runtime
, constructs
);
1342 js_InternNonIntElementId(JSContext
*cx
, JSObject
*obj
, jsval idval
, jsid
*idp
)
1344 JS_ASSERT(!JSVAL_IS_INT(idval
));
1346 #if JS_HAS_XML_SUPPORT
1347 if (!JSVAL_IS_PRIMITIVE(idval
)) {
1348 if (OBJECT_IS_XML(cx
, obj
)) {
1349 *idp
= OBJECT_JSVAL_TO_JSID(idval
);
1352 if (!js_IsFunctionQName(cx
, JSVAL_TO_OBJECT(idval
), idp
))
1359 return js_ValueToStringId(cx
, idval
, idp
);
1363 * Enter the new with scope using an object at sp[-1] and associate the depth
1364 * of the with block with sp + stackIndex.
1366 JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool
1367 js_EnterWith(JSContext
*cx
, jsint stackIndex
)
1371 JSObject
*obj
, *parent
, *withobj
;
1375 JS_ASSERT(stackIndex
< 0);
1376 JS_ASSERT(StackBase(fp
) <= sp
+ stackIndex
);
1378 if (!JSVAL_IS_PRIMITIVE(sp
[-1])) {
1379 obj
= JSVAL_TO_OBJECT(sp
[-1]);
1381 obj
= js_ValueToNonNullObject(cx
, sp
[-1]);
1384 sp
[-1] = OBJECT_TO_JSVAL(obj
);
1387 parent
= js_GetScopeChain(cx
, fp
);
1391 OBJ_TO_INNER_OBJECT(cx
, obj
);
1395 withobj
= js_NewWithObject(cx
, obj
, parent
,
1396 sp
+ stackIndex
- StackBase(fp
));
1400 fp
->scopeChain
= withobj
;
1404 JS_STATIC_INTERPRET JS_REQUIRES_STACK
void
1405 js_LeaveWith(JSContext
*cx
)
1409 withobj
= cx
->fp
->scopeChain
;
1410 JS_ASSERT(withobj
->getClass() == &js_WithClass
);
1411 JS_ASSERT(withobj
->getPrivate() == cx
->fp
);
1412 JS_ASSERT(OBJ_BLOCK_DEPTH(cx
, withobj
) >= 0);
1413 cx
->fp
->scopeChain
= withobj
->getParent();
1414 withobj
->setPrivate(NULL
);
1417 JS_REQUIRES_STACK JSClass
*
1418 js_IsActiveWithOrBlock(JSContext
*cx
, JSObject
*obj
, int stackDepth
)
1422 clasp
= obj
->getClass();
1423 if ((clasp
== &js_WithClass
|| clasp
== &js_BlockClass
) &&
1424 obj
->getPrivate() == cx
->fp
&&
1425 OBJ_BLOCK_DEPTH(cx
, obj
) >= stackDepth
) {
1432 * Unwind block and scope chains to match the given depth. The function sets
1433 * fp->sp on return to stackDepth.
1435 JS_REQUIRES_STACK JSBool
1436 js_UnwindScope(JSContext
*cx
, JSStackFrame
*fp
, jsint stackDepth
,
1437 JSBool normalUnwind
)
1442 JS_ASSERT(stackDepth
>= 0);
1443 JS_ASSERT(StackBase(fp
) + stackDepth
<= fp
->regs
->sp
);
1445 for (obj
= fp
->blockChain
; obj
; obj
= obj
->getParent()) {
1446 JS_ASSERT(obj
->getClass() == &js_BlockClass
);
1447 if (OBJ_BLOCK_DEPTH(cx
, obj
) < stackDepth
)
1450 fp
->blockChain
= obj
;
1453 obj
= fp
->scopeChain
;
1454 clasp
= js_IsActiveWithOrBlock(cx
, obj
, stackDepth
);
1457 if (clasp
== &js_BlockClass
) {
1458 /* Don't fail until after we've updated all stacks. */
1459 normalUnwind
&= js_PutBlockObject(cx
, normalUnwind
);
1465 fp
->regs
->sp
= StackBase(fp
) + stackDepth
;
1466 return normalUnwind
;
1469 JS_STATIC_INTERPRET JSBool
1470 js_DoIncDec(JSContext
*cx
, const JSCodeSpec
*cs
, jsval
*vp
, jsval
*vp2
)
1472 if (cs
->format
& JOF_POST
) {
1474 if (!ValueToNumberValue(cx
, vp
, &d
))
1476 (cs
->format
& JOF_INC
) ? ++d
: --d
;
1477 return js_NewNumberInRootedValue(cx
, d
, vp2
);
1481 if (!ValueToNumber(cx
, *vp
, &d
))
1483 (cs
->format
& JOF_INC
) ? ++d
: --d
;
1484 if (!js_NewNumberInRootedValue(cx
, d
, vp2
))
1491 js_GetUpvar(JSContext
*cx
, uintN level
, uintN cookie
)
1493 level
-= UPVAR_FRAME_SKIP(cookie
);
1494 JS_ASSERT(level
< JS_DISPLAY_SIZE
);
1496 JSStackFrame
*fp
= cx
->display
[level
];
1497 JS_ASSERT(fp
->script
);
1499 uintN slot
= UPVAR_FRAME_SLOT(cookie
);
1502 if (!fp
->fun
|| (fp
->flags
& JSFRAME_EVAL
)) {
1503 vp
= fp
->slots
+ fp
->script
->nfixed
;
1504 } else if (slot
< fp
->fun
->nargs
) {
1506 } else if (slot
== CALLEE_UPVAR_SLOT
) {
1510 slot
-= fp
->fun
->nargs
;
1511 JS_ASSERT(slot
< fp
->script
->nslots
);
1520 JS_STATIC_INTERPRET JS_REQUIRES_STACK
void
1521 js_TraceOpcode(JSContext
*cx
)
1526 intN ndefs
, n
, nuses
;
1531 tracefp
= (FILE *) cx
->tracefp
;
1537 * Operations in prologues don't produce interesting values, and
1538 * js_DecompileValueGenerator isn't set up to handle them anyway.
1540 if (cx
->tracePrevPc
&& regs
->pc
>= fp
->script
->main
) {
1541 JSOp tracePrevOp
= JSOp(*cx
->tracePrevPc
);
1542 ndefs
= js_GetStackDefs(cx
, &js_CodeSpec
[tracePrevOp
], tracePrevOp
,
1543 fp
->script
, cx
->tracePrevPc
);
1546 * If there aren't that many elements on the stack, then we have
1547 * probably entered a new frame, and printing output would just be
1551 ndefs
< regs
->sp
- fp
->slots
) {
1552 for (n
= -ndefs
; n
< 0; n
++) {
1553 char *bytes
= js_DecompileValueGenerator(cx
, n
, regs
->sp
[n
],
1556 fprintf(tracefp
, "%s %s",
1557 (n
== -ndefs
) ? " output:" : ",",
1561 JS_ClearPendingException(cx
);
1564 fprintf(tracefp
, " @ %u\n", (uintN
) (regs
->sp
- StackBase(fp
)));
1566 fprintf(tracefp
, " stack: ");
1567 for (siter
= StackBase(fp
); siter
< regs
->sp
; siter
++) {
1568 str
= js_ValueToString(cx
, *siter
);
1570 fputs("<null>", tracefp
);
1572 JS_ClearPendingException(cx
);
1573 js_FileEscapedString(tracefp
, str
, 0);
1575 fputc(' ', tracefp
);
1577 fputc('\n', tracefp
);
1580 fprintf(tracefp
, "%4u: ",
1581 js_PCToLineNumber(cx
, fp
->script
, fp
->imacpc
? fp
->imacpc
: regs
->pc
));
1582 js_Disassemble1(cx
, fp
->script
, regs
->pc
,
1583 regs
->pc
- fp
->script
->code
,
1585 op
= (JSOp
) *regs
->pc
;
1586 nuses
= js_GetStackUses(&js_CodeSpec
[op
], op
, regs
->pc
);
1588 for (n
= -nuses
; n
< 0; n
++) {
1589 char *bytes
= js_DecompileValueGenerator(cx
, n
, regs
->sp
[n
],
1592 fprintf(tracefp
, "%s %s",
1593 (n
== -nuses
) ? " inputs:" : ",",
1597 JS_ClearPendingException(cx
);
1600 fprintf(tracefp
, " @ %u\n", (uintN
) (regs
->sp
- StackBase(fp
)));
1602 cx
->tracePrevPc
= regs
->pc
;
1604 /* It's nice to have complete traces when debugging a crash. */
1612 # include <stdlib.h>
1614 # define HIST_NSLOTS 8
1617 * The second dimension is hardcoded at 256 because we know that many bits fit
1618 * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address
1619 * any particular row.
1621 static uint32 succeeds
[JSOP_LIMIT
][256];
1622 static uint32 slot_ops
[JSOP_LIMIT
][HIST_NSLOTS
];
1624 JS_STATIC_INTERPRET
void
1625 js_MeterOpcodePair(JSOp op1
, JSOp op2
)
1627 if (op1
!= JSOP_STOP
)
1628 ++succeeds
[op1
][op2
];
1631 JS_STATIC_INTERPRET
void
1632 js_MeterSlotOpcode(JSOp op
, uint32 slot
)
1634 if (slot
< HIST_NSLOTS
)
1635 ++slot_ops
[op
][slot
];
1638 typedef struct Edge
{
1645 compare_edges(const void *a
, const void *b
)
1647 const Edge
*ea
= (const Edge
*) a
;
1648 const Edge
*eb
= (const Edge
*) b
;
1650 return (int32
)eb
->count
- (int32
)ea
->count
;
1656 const char *name
, *from
, *style
;
1658 uint32 total
, count
;
1659 uint32 i
, j
, nedges
;
1662 name
= getenv("JS_OPMETER_FILE");
1664 name
= "/tmp/ops.dot";
1665 fp
= fopen(name
, "w");
1672 for (i
= 0; i
< JSOP_LIMIT
; i
++) {
1673 for (j
= 0; j
< JSOP_LIMIT
; j
++) {
1674 count
= succeeds
[i
][j
];
1682 # define SIGNIFICANT(count,total) (200. * (count) >= (total))
1684 graph
= (Edge
*) js_calloc(nedges
* sizeof graph
[0]);
1685 for (i
= nedges
= 0; i
< JSOP_LIMIT
; i
++) {
1686 from
= js_CodeName
[i
];
1687 for (j
= 0; j
< JSOP_LIMIT
; j
++) {
1688 count
= succeeds
[i
][j
];
1689 if (count
!= 0 && SIGNIFICANT(count
, total
)) {
1690 graph
[nedges
].from
= from
;
1691 graph
[nedges
].to
= js_CodeName
[j
];
1692 graph
[nedges
].count
= count
;
1697 qsort(graph
, nedges
, sizeof(Edge
), compare_edges
);
1701 fputs("digraph {\n", fp
);
1702 for (i
= 0, style
= NULL
; i
< nedges
; i
++) {
1703 JS_ASSERT(i
== 0 || graph
[i
-1].count
>= graph
[i
].count
);
1704 if (!style
|| graph
[i
-1].count
!= graph
[i
].count
) {
1705 style
= (i
> nedges
* .75) ? "dotted" :
1706 (i
> nedges
* .50) ? "dashed" :
1707 (i
> nedges
* .25) ? "solid" : "bold";
1709 fprintf(fp
, " %s -> %s [label=\"%lu\" style=%s]\n",
1710 graph
[i
].from
, graph
[i
].to
,
1711 (unsigned long)graph
[i
].count
, style
);
1717 name
= getenv("JS_OPMETER_HIST");
1719 name
= "/tmp/ops.hist";
1720 fp
= fopen(name
, "w");
1725 fputs("bytecode", fp
);
1726 for (j
= 0; j
< HIST_NSLOTS
; j
++)
1727 fprintf(fp
, " slot %1u", (unsigned)j
);
1729 fputs("========", fp
);
1730 for (j
= 0; j
< HIST_NSLOTS
; j
++)
1731 fputs(" =======", fp
);
1733 for (i
= 0; i
< JSOP_LIMIT
; i
++) {
1734 for (j
= 0; j
< HIST_NSLOTS
; j
++) {
1735 if (slot_ops
[i
][j
] != 0) {
1736 /* Reuse j in the next loop, since we break after. */
1737 fprintf(fp
, "%-8.8s", js_CodeName
[i
]);
1738 for (j
= 0; j
< HIST_NSLOTS
; j
++)
1739 fprintf(fp
, " %7lu", (unsigned long)slot_ops
[i
][j
]);
1748 #endif /* JS_OPSMETER */
1750 #endif /* !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ */
1752 #ifndef jsinvoke_cpp___
1755 // jsval representation metering: this measures the kinds of jsvals that
1756 // are used as inputs to each JSOp.
1757 namespace reprmeter
{
1767 FUNCTION_INTERPRETED
,
1768 FUNCTION_FASTNATIVE
,
1769 FUNCTION_SLOWNATIVE
,
1774 // Return the |repr| value giving the representation of the given jsval.
1778 if (JSVAL_IS_INT(v
))
1780 if (JSVAL_IS_DOUBLE(v
))
1782 if (JSVAL_IS_SPECIAL(v
)) {
1783 return (v
== JSVAL_TRUE
|| v
== JSVAL_FALSE
)
1787 if (JSVAL_IS_STRING(v
))
1790 JS_ASSERT(JSVAL_IS_OBJECT(v
));
1792 JSObject
*obj
= JSVAL_TO_OBJECT(v
);
1793 if (VALUE_IS_FUNCTION(cx
, v
)) {
1794 JSFunction
*fun
= GET_FUNCTION_PRIVATE(cx
, obj
);
1795 if (FUN_INTERPRETED(fun
))
1796 return FUNCTION_INTERPRETED
;
1797 if (fun
->flags
& JSFUN_FAST_NATIVE
)
1798 return FUNCTION_FASTNATIVE
;
1799 return FUNCTION_SLOWNATIVE
;
1801 // This must come before the general array test, because that
1802 // one subsumes this one.
1805 if (obj
->isDenseArray())
1809 return OBJECT_PLAIN
;
1812 static const char *reprName
[] = { "invalid", "int", "double", "bool", "special",
1813 "string", "null", "object",
1814 "fun:interp", "fun:fast", "fun:slow",
1815 "array:slow", "array:dense" };
1817 // Logically, a tuple of (JSOp, repr_1, ..., repr_n) where repr_i is
1818 // the |repr| of the ith input to the JSOp.
1820 enum { max_uses
= 16 };
1823 Repr uses
[max_uses
];
1825 OpInput() : op(JSOp(255)) {
1826 for (int i
= 0; i
< max_uses
; ++i
)
1830 OpInput(JSOp op
) : op(op
) {
1831 for (int i
= 0; i
< max_uses
; ++i
)
1836 operator uint32() const {
1838 for (int i
= 0; i
< max_uses
; ++i
)
1839 h
= h
* 7 + uses
[i
] * 13;
1843 bool operator==(const OpInput
&opinput
) const {
1844 if (op
!= opinput
.op
)
1846 for (int i
= 0; i
< max_uses
; ++i
) {
1847 if (uses
[i
] != opinput
.uses
[i
])
1853 OpInput
&operator=(const OpInput
&opinput
) {
1855 for (int i
= 0; i
< max_uses
; ++i
)
1856 uses
[i
] = opinput
.uses
[i
];
1861 typedef HashMap
<OpInput
, uint64
, DefaultHasher
<OpInput
>, SystemAllocPolicy
> OpInputHistogram
;
1863 OpInputHistogram opinputs
;
1864 bool opinputsInitialized
= false;
1866 // Record an OpInput for the current op. This should be called just
1867 // before executing the op.
1869 MeterRepr(JSStackFrame
*fp
)
1871 // Note that we simply ignore the possibility of errors (OOMs)
1872 // using the hash map, since this is only metering code.
1874 if (!opinputsInitialized
) {
1876 opinputsInitialized
= true;
1879 JSOp op
= JSOp(*fp
->regs
->pc
);
1880 unsigned nuses
= js_GetStackUses(&js_CodeSpec
[op
], op
, fp
->regs
->pc
);
1882 // Build the OpInput.
1883 OpInput
opinput(op
);
1884 for (unsigned i
= 0; i
< nuses
; ++i
) {
1885 jsval v
= fp
->regs
->sp
[-nuses
+i
];
1886 opinput
.uses
[i
] = GetRepr(v
);
1889 OpInputHistogram::AddPtr p
= opinputs
.lookupForAdd(opinput
);
1893 opinputs
.add(p
, opinput
, 1);
1899 FILE *f
= fopen("/tmp/reprmeter.txt", "w");
1901 for (OpInputHistogram::Range r
= opinputs
.all(); !r
.empty(); r
.popFront()) {
1902 const OpInput
&o
= r
.front().key
;
1903 uint64 c
= r
.front().value
;
1904 fprintf(f
, "%3d,%s", o
.op
, js_CodeName
[o
.op
]);
1905 for (int i
= 0; i
< OpInput::max_uses
&& o
.uses
[i
] != NONE
; ++i
)
1906 fprintf(f
, ",%s", reprName
[o
.uses
[i
]]);
1907 fprintf(f
, ",%llu\n", c
);
1912 #endif /* JS_REPRMETER */
1914 #define PUSH(v) (*regs.sp++ = (v))
1915 #define PUSH_OPND(v) PUSH(v)
1916 #define STORE_OPND(n,v) (regs.sp[n] = (v))
1917 #define POP() (*--regs.sp)
1918 #define POP_OPND() POP()
1919 #define FETCH_OPND(n) (regs.sp[n])
1922 * Push the jsdouble d using sp from the lexical environment. Try to convert d
1923 * to a jsint that fits in a jsval, otherwise GC-alloc space for it and push a
1926 #define STORE_NUMBER(cx, n, d) \
1930 if (JSDOUBLE_IS_INT(d, i_) && INT_FITS_IN_JSVAL(i_)) \
1931 regs.sp[n] = INT_TO_JSVAL(i_); \
1932 else if (!js_NewDoubleInRootedValue(cx, d, ®s.sp[n])) \
1936 #define STORE_INT(cx, n, i) \
1938 if (INT_FITS_IN_JSVAL(i)) \
1939 regs.sp[n] = INT_TO_JSVAL(i); \
1940 else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (i), ®s.sp[n])) \
1944 #define STORE_UINT(cx, n, u) \
1946 if ((u) <= JSVAL_INT_MAX) \
1947 regs.sp[n] = INT_TO_JSVAL(u); \
1948 else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (u), ®s.sp[n])) \
1952 #define FETCH_NUMBER(cx, n, d) \
1956 v_ = FETCH_OPND(n); \
1957 VALUE_TO_NUMBER(cx, v_, d); \
1960 #define FETCH_INT(cx, n, i) \
1962 if (!ValueToECMAInt32(cx, regs.sp[n], &i)) \
1966 #define FETCH_UINT(cx, n, ui) \
1968 if (!ValueToECMAUint32(cx, regs.sp[n], &ui)) \
1972 #define VALUE_TO_NUMBER(cx, v, d) \
1974 if (!ValueToNumber(cx, v, &d)) \
1978 #define POP_BOOLEAN(cx, v, b) \
1980 v = FETCH_OPND(-1); \
1981 if (v == JSVAL_NULL) { \
1983 } else if (JSVAL_IS_BOOLEAN(v)) { \
1984 b = JSVAL_TO_BOOLEAN(v); \
1986 b = js_ValueToBoolean(v); \
1991 #define VALUE_TO_OBJECT(cx, n, v, obj) \
1993 if (!JSVAL_IS_PRIMITIVE(v)) { \
1994 obj = JSVAL_TO_OBJECT(v); \
1996 obj = js_ValueToNonNullObject(cx, v); \
1999 STORE_OPND(n, OBJECT_TO_JSVAL(obj)); \
2003 #define FETCH_OBJECT(cx, n, v, obj) \
2005 v = FETCH_OPND(n); \
2006 VALUE_TO_OBJECT(cx, n, v, obj); \
2009 #define DEFAULT_VALUE(cx, n, hint, v) \
2011 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v)); \
2012 JS_ASSERT(v == regs.sp[n]); \
2013 if (!JSVAL_TO_OBJECT(v)->defaultValue(cx, hint, ®s.sp[n])) \
2019 * Quickly test if v is an int from the [-2**29, 2**29) range, that is, when
2020 * the lowest bit of v is 1 and the bits 30 and 31 are both either 0 or 1. For
2021 * such v we can do increment or decrement via adding or subtracting two
2022 * without checking that the result overflows JSVAL_INT_MIN or JSVAL_INT_MAX.
2024 #define CAN_DO_FAST_INC_DEC(v) (((((v) << 1) ^ v) & 0x80000001) == 1)
2026 JS_STATIC_ASSERT(JSVAL_INT
== 1);
2027 JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MIN
)));
2028 JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MAX
)));
2031 * Conditional assert to detect failure to clear a pending exception that is
2032 * suppressed (or unintentional suppression of a wanted exception).
2034 #if defined DEBUG_brendan || defined DEBUG_mrbkap || defined DEBUG_shaver
2035 # define DEBUG_NOT_THROWING 1
2038 #ifdef DEBUG_NOT_THROWING
2039 # define ASSERT_NOT_THROWING(cx) JS_ASSERT(!(cx)->throwing)
2041 # define ASSERT_NOT_THROWING(cx) /* nothing */
2045 * Define JS_OPMETER to instrument bytecode succession, generating a .dot file
2046 * on shutdown that shows the graph of significant predecessor/successor pairs
2047 * executed, where the edge labels give the succession counts. The .dot file
2048 * is named by the JS_OPMETER_FILE envariable, and defaults to /tmp/ops.dot.
2050 * Bonus feature: JS_OPMETER also enables counters for stack-addressing ops
2051 * such as JSOP_GETLOCAL, JSOP_INCARG, via METER_SLOT_OP. The resulting counts
2052 * are written to JS_OPMETER_HIST, defaulting to /tmp/ops.hist.
2055 # define METER_OP_INIT(op) /* nothing */
2056 # define METER_OP_PAIR(op1,op2) /* nothing */
2057 # define METER_SLOT_OP(op,slot) /* nothing */
2061 * The second dimension is hardcoded at 256 because we know that many bits fit
2062 * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address
2063 * any particular row.
2065 # define METER_OP_INIT(op) ((op) = JSOP_STOP)
2066 # define METER_OP_PAIR(op1,op2) (js_MeterOpcodePair(op1, op2))
2067 # define METER_SLOT_OP(op,slot) (js_MeterSlotOpcode(op, slot))
2072 # define METER_REPR(fp) (reprmeter::MeterRepr(fp))
2074 # define METER_REPR(fp) ((void) 0)
2075 #endif /* JS_REPRMETER */
2078 * Threaded interpretation via computed goto appears to be well-supported by
2079 * GCC 3 and higher. IBM's C compiler when run with the right options (e.g.,
2080 * -qlanglvl=extended) also supports threading. Ditto the SunPro C compiler.
2081 * Currently it's broken for JS_VERSION < 160, though this isn't worth fixing.
2082 * Add your compiler support macros here.
2084 #ifndef JS_THREADED_INTERP
2085 # if JS_VERSION >= 160 && ( \
2087 (__IBMC__ >= 700 && defined __IBM_COMPUTED_GOTO) || \
2088 __SUNPRO_C >= 0x570)
2089 # define JS_THREADED_INTERP 1
2091 # define JS_THREADED_INTERP 0
2096 * Deadlocks or else bad races are likely if JS_THREADSAFE, so we must rely on
2097 * single-thread DEBUG js shell testing to verify property cache hits.
2099 #if defined DEBUG && !defined JS_THREADSAFE
2101 # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \
2103 if (!AssertValidPropertyCacheHit(cx, script, regs, pcoff, obj, pobj, \
2110 AssertValidPropertyCacheHit(JSContext
*cx
, JSScript
*script
, JSFrameRegs
& regs
,
2111 ptrdiff_t pcoff
, JSObject
*start
, JSObject
*found
,
2112 PropertyCacheEntry
*entry
)
2114 uint32 sample
= cx
->runtime
->gcNumber
;
2118 GET_ATOM_FROM_BYTECODE(script
, regs
.pc
, pcoff
, atom
);
2120 atom
= cx
->runtime
->atomState
.lengthAtom
;
2122 JSObject
*obj
, *pobj
;
2126 if (JOF_OPMODE(*regs
.pc
) == JOF_NAME
) {
2127 ok
= js_FindProperty(cx
, ATOM_TO_JSID(atom
), &obj
, &pobj
, &prop
);
2130 ok
= js_LookupProperty(cx
, obj
, ATOM_TO_JSID(atom
), &pobj
, &prop
);
2134 if (cx
->runtime
->gcNumber
!= sample
|| entry
->vshape() != pobj
->shape()) {
2135 pobj
->dropProperty(cx
, prop
);
2139 JS_ASSERT(pobj
== found
);
2141 JSScopeProperty
*sprop
= (JSScopeProperty
*) prop
;
2142 if (entry
->vword
.isSlot()) {
2143 JS_ASSERT(entry
->vword
.toSlot() == sprop
->slot
);
2144 JS_ASSERT(!sprop
->isMethod());
2145 } else if (entry
->vword
.isSprop()) {
2146 JS_ASSERT(entry
->vword
.toSprop() == sprop
);
2147 JS_ASSERT_IF(sprop
->isMethod(),
2148 sprop
->methodValue() == pobj
->lockedGetSlot(sprop
->slot
));
2151 JS_ASSERT(entry
->vword
.isObject());
2152 JS_ASSERT(!entry
->vword
.isNull());
2153 JS_ASSERT(pobj
->scope()->brandedOrHasMethodBarrier());
2154 JS_ASSERT(sprop
->hasDefaultGetterOrIsMethod());
2155 JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop
, pobj
->scope()));
2156 v
= pobj
->lockedGetSlot(sprop
->slot
);
2157 JS_ASSERT(VALUE_IS_FUNCTION(cx
, v
));
2158 JS_ASSERT(entry
->vword
.toObject() == JSVAL_TO_OBJECT(v
));
2160 if (sprop
->isMethod()) {
2161 JS_ASSERT(js_CodeSpec
[*regs
.pc
].format
& JOF_CALLOP
);
2162 JS_ASSERT(sprop
->methodValue() == v
);
2166 pobj
->dropProperty(cx
, prop
);
2171 # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0)
2175 * Ensure that the intrepreter switch can close call-bytecode cases in the
2176 * same way as non-call bytecodes.
2178 JS_STATIC_ASSERT(JSOP_NAME_LENGTH
== JSOP_CALLNAME_LENGTH
);
2179 JS_STATIC_ASSERT(JSOP_GETGVAR_LENGTH
== JSOP_CALLGVAR_LENGTH
);
2180 JS_STATIC_ASSERT(JSOP_GETUPVAR_LENGTH
== JSOP_CALLUPVAR_LENGTH
);
2181 JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH
== JSOP_CALLUPVAR_DBG_LENGTH
);
2182 JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH
== JSOP_GETUPVAR_LENGTH
);
2183 JS_STATIC_ASSERT(JSOP_GETDSLOT_LENGTH
== JSOP_CALLDSLOT_LENGTH
);
2184 JS_STATIC_ASSERT(JSOP_GETARG_LENGTH
== JSOP_CALLARG_LENGTH
);
2185 JS_STATIC_ASSERT(JSOP_GETLOCAL_LENGTH
== JSOP_CALLLOCAL_LENGTH
);
2186 JS_STATIC_ASSERT(JSOP_XMLNAME_LENGTH
== JSOP_CALLXMLNAME_LENGTH
);
2189 * Same for debuggable flat closures defined at top level in another function
2190 * or program fragment.
2192 JS_STATIC_ASSERT(JSOP_DEFFUN_FC_LENGTH
== JSOP_DEFFUN_DBGFC_LENGTH
);
2195 * Same for JSOP_SETNAME and JSOP_SETPROP, which differ only slightly but
2196 * remain distinct for the decompiler. Likewise for JSOP_INIT{PROP,METHOD}.
2198 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH
== JSOP_SETPROP_LENGTH
);
2199 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH
== JSOP_SETMETHOD_LENGTH
);
2200 JS_STATIC_ASSERT(JSOP_INITPROP_LENGTH
== JSOP_INITMETHOD_LENGTH
);
2202 /* See TRY_BRANCH_AFTER_COND. */
2203 JS_STATIC_ASSERT(JSOP_IFNE_LENGTH
== JSOP_IFEQ_LENGTH
);
2204 JS_STATIC_ASSERT(JSOP_IFNE
== JSOP_IFEQ
+ 1);
2206 /* For the fastest case inder JSOP_INCNAME, etc. */
2207 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH
== JSOP_DECNAME_LENGTH
);
2208 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH
== JSOP_NAMEINC_LENGTH
);
2209 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH
== JSOP_NAMEDEC_LENGTH
);
2212 # define ABORT_RECORDING(cx, reason) \
2214 if (TRACE_RECORDER(cx)) \
2215 AbortRecording(cx, reason); \
2218 # define ABORT_RECORDING(cx, reason) ((void) 0)
2222 * Inline fast paths for iteration. js_IteratorMore and js_IteratorNext handle
2223 * all cases, but we inline the most frequently taken paths here.
2226 IteratorMore(JSContext
*cx
, JSObject
*iterobj
, JSBool
*cond
, jsval
*rval
)
2228 if (iterobj
->getClass() == &js_IteratorClass
.base
) {
2229 NativeIterator
*ni
= (NativeIterator
*) iterobj
->getPrivate();
2230 *cond
= (ni
->props_cursor
< ni
->props_end
);
2232 if (!js_IteratorMore(cx
, iterobj
, rval
))
2234 *cond
= (*rval
== JSVAL_TRUE
);
2240 IteratorNext(JSContext
*cx
, JSObject
*iterobj
, jsval
*rval
)
2242 if (iterobj
->getClass() == &js_IteratorClass
.base
) {
2243 NativeIterator
*ni
= (NativeIterator
*) iterobj
->getPrivate();
2244 JS_ASSERT(ni
->props_cursor
< ni
->props_end
);
2245 *rval
= *ni
->props_cursor
;
2246 if (JSVAL_IS_STRING(*rval
) || (ni
->flags
& JSITER_FOREACH
)) {
2250 /* Take the slow path if we have to stringify a numeric property name. */
2252 return js_IteratorNext(cx
, iterobj
, rval
);
2255 JS_REQUIRES_STACK JSBool
2256 js_Interpret(JSContext
*cx
)
2259 TraceVisStateObj
tvso(cx
, S_INTERP
);
2265 uintN inlineCallCount
;
2267 JSVersion currentVersion
, originalVersion
;
2269 JSObject
*obj
, *obj2
, *parent
;
2272 jsbytecode
*endpc
, *pc2
;
2276 uintN argc
, attrs
, flags
;
2278 jsval
*vp
, lval
, rval
, ltmp
, rtmp
;
2281 JSScopeProperty
*sprop
;
2282 JSString
*str
, *str2
;
2288 jsint low
, high
, off
, npairs
;
2290 JSPropertyOp getter
, setter
;
2291 JSAutoResolveFlags
rf(cx
, JSRESOLVE_INFER
);
2295 * We call this macro from BEGIN_CASE in threaded interpreters,
2296 * and before entering the switch in non-threaded interpreters.
2297 * However, reaching such points doesn't mean we've actually
2298 * fetched an OP from the instruction stream: some opcodes use
2299 * 'op=x; DO_OP()' to let another opcode's implementation finish
2300 * their work, and many opcodes share entry points with a run of
2301 * consecutive BEGIN_CASEs.
2303 * Take care to trace OP only when it is the opcode fetched from
2304 * the instruction stream, so the trace matches what one would
2305 * expect from looking at the code. (We do omit POPs after SETs;
2306 * unfortunate, but not worth fixing.)
2308 # define TRACE_OPCODE(OP) JS_BEGIN_MACRO \
2309 if (JS_UNLIKELY(cx->tracefp != NULL) && \
2311 js_TraceOpcode(cx); \
2314 # define TRACE_OPCODE(OP) ((void) 0)
2317 #if JS_THREADED_INTERP
2318 static void *const normalJumpTable
[] = {
2319 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
2320 JS_EXTENSION &&L_##op,
2321 # include "jsopcode.tbl"
2325 static void *const interruptJumpTable
[] = {
2326 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
2327 JS_EXTENSION &&interrupt,
2328 # include "jsopcode.tbl"
2332 register void * const *jumpTable
= normalJumpTable
;
2334 METER_OP_INIT(op
); /* to nullify first METER_OP_PAIR */
2336 # define ENABLE_INTERRUPTS() ((void) (jumpTable = interruptJumpTable))
2339 # define CHECK_RECORDER() \
2340 JS_ASSERT_IF(TRACE_RECORDER(cx), jumpTable == interruptJumpTable)
2342 # define CHECK_RECORDER() ((void)0)
2345 # define DO_OP() JS_BEGIN_MACRO \
2347 JS_EXTENSION_(goto *jumpTable[op]); \
2349 # define DO_NEXT_OP(n) JS_BEGIN_MACRO \
2350 METER_OP_PAIR(op, JSOp(regs.pc[n])); \
2351 op = (JSOp) *(regs.pc += (n)); \
2356 # define BEGIN_CASE(OP) L_##OP: TRACE_OPCODE(OP); CHECK_RECORDER();
2357 # define END_CASE(OP) DO_NEXT_OP(OP##_LENGTH);
2358 # define END_VARLEN_CASE DO_NEXT_OP(len);
2359 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP) \
2360 JS_ASSERT(js_CodeSpec[OP].length == 1); \
2361 op = (JSOp) *++regs.pc; \
2364 # define END_EMPTY_CASES
2366 #else /* !JS_THREADED_INTERP */
2368 register intN switchMask
= 0;
2371 # define ENABLE_INTERRUPTS() ((void) (switchMask = -1))
2374 # define CHECK_RECORDER() \
2375 JS_ASSERT_IF(TRACE_RECORDER(cx), switchMask == -1)
2377 # define CHECK_RECORDER() ((void)0)
2380 # define DO_OP() goto do_op
2381 # define DO_NEXT_OP(n) JS_BEGIN_MACRO \
2382 JS_ASSERT((n) == len); \
2386 # define BEGIN_CASE(OP) case OP: CHECK_RECORDER();
2387 # define END_CASE(OP) END_CASE_LEN(OP##_LENGTH)
2388 # define END_CASE_LEN(n) END_CASE_LENX(n)
2389 # define END_CASE_LENX(n) END_CASE_LEN##n
2392 * To share the code for all len == 1 cases we use the specialized label with
2393 * code that falls through to advance_pc: .
2395 # define END_CASE_LEN1 goto advance_pc_by_one;
2396 # define END_CASE_LEN2 len = 2; goto advance_pc;
2397 # define END_CASE_LEN3 len = 3; goto advance_pc;
2398 # define END_CASE_LEN4 len = 4; goto advance_pc;
2399 # define END_CASE_LEN5 len = 5; goto advance_pc;
2400 # define END_VARLEN_CASE goto advance_pc;
2401 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP)
2402 # define END_EMPTY_CASES goto advance_pc_by_one;
2404 #endif /* !JS_THREADED_INTERP */
2406 /* Check for too deep of a native thread stack. */
2407 JS_CHECK_RECURSION(cx
, return JS_FALSE
);
2411 /* Set registerized frame pointer and derived script pointer. */
2413 script
= fp
->script
;
2414 JS_ASSERT(!script
->isEmpty());
2415 JS_ASSERT(script
->length
> 1);
2417 /* Count of JS function calls that nest in this C js_Interpret frame. */
2418 inlineCallCount
= 0;
2421 * Initialize the index segment register used by LOAD_ATOM and
2422 * GET_FULL_INDEX macros below. As a register we use a pointer based on
2423 * the atom map to turn frequently executed LOAD_ATOM into simple array
2424 * access. For less frequent object and regexp loads we have to recover
2425 * the segment from atoms pointer first.
2427 atoms
= script
->atomMap
.vector
;
2429 #define LOAD_ATOM(PCOFF) \
2431 JS_ASSERT(fp->imacpc \
2432 ? atoms == COMMON_ATOMS_START(&rt->atomState) && \
2433 GET_INDEX(regs.pc + PCOFF) < js_common_atom_count \
2434 : (size_t)(atoms - script->atomMap.vector) < \
2435 (size_t)(script->atomMap.length - \
2436 GET_INDEX(regs.pc + PCOFF))); \
2437 atom = atoms[GET_INDEX(regs.pc + PCOFF)]; \
2440 #define GET_FULL_INDEX(PCOFF) \
2441 (atoms - script->atomMap.vector + GET_INDEX(regs.pc + PCOFF))
2443 #define LOAD_OBJECT(PCOFF) \
2444 (obj = script->getObject(GET_FULL_INDEX(PCOFF)))
2446 #define LOAD_FUNCTION(PCOFF) \
2447 (fun = script->getFunction(GET_FULL_INDEX(PCOFF)))
2452 #if JS_THREADED_INTERP
2453 #define MONITOR_BRANCH_TRACEVIS \
2455 if (jumpTable != interruptJumpTable) \
2456 EnterTraceVisState(cx, S_RECORD, R_NONE); \
2458 #else /* !JS_THREADED_INTERP */
2459 #define MONITOR_BRANCH_TRACEVIS \
2461 EnterTraceVisState(cx, S_RECORD, R_NONE); \
2465 #define MONITOR_BRANCH_TRACEVIS
2468 #define RESTORE_INTERP_VARS() \
2471 script = fp->script; \
2472 atoms = FrameAtomBase(cx, fp); \
2473 currentVersion = (JSVersion) script->version; \
2474 JS_ASSERT(fp->regs == ®s); \
2477 #define MONITOR_BRANCH(reason) \
2479 if (TRACING_ENABLED(cx)) { \
2480 MonitorResult r = MonitorLoopEdge(cx, inlineCallCount, reason); \
2481 if (r == MONITOR_RECORDING) { \
2482 JS_ASSERT(TRACE_RECORDER(cx)); \
2483 MONITOR_BRANCH_TRACEVIS; \
2484 ENABLE_INTERRUPTS(); \
2486 RESTORE_INTERP_VARS(); \
2487 JS_ASSERT_IF(cx->throwing, r == MONITOR_ERROR); \
2488 if (r == MONITOR_ERROR) \
2493 #else /* !JS_TRACER */
2495 #define MONITOR_BRANCH(reason) ((void) 0)
2497 #endif /* !JS_TRACER */
2500 * Prepare to call a user-supplied branch handler, and abort the script
2501 * if it returns false.
2503 #define CHECK_BRANCH() \
2505 if (!JS_CHECK_OPERATION_LIMIT(cx)) \
2509 #ifndef TRACE_RECORDER
2510 #define TRACE_RECORDER(cx) (false)
2516 op = (JSOp) *regs.pc; \
2519 if (op == JSOP_NOP) { \
2520 if (TRACE_RECORDER(cx)) { \
2521 MONITOR_BRANCH(Record_Branch); \
2522 op = (JSOp) *regs.pc; \
2524 op = (JSOp) *++regs.pc; \
2526 } else if (op == JSOP_TRACE) { \
2527 MONITOR_BRANCH(Record_Branch); \
2528 op = (JSOp) *regs.pc; \
2534 MUST_FLOW_THROUGH("exit");
2538 * Optimized Get and SetVersion for proper script language versioning.
2540 * If any native method or JSClass/JSObjectOps hook calls js_SetVersion
2541 * and changes cx->version, the effect will "stick" and we will stop
2542 * maintaining currentVersion. This is relied upon by testsuites, for
2543 * the most part -- web browsers select version before compiling and not
2546 currentVersion
= (JSVersion
) script
->version
;
2547 originalVersion
= (JSVersion
) cx
->version
;
2548 if (currentVersion
!= originalVersion
)
2549 js_SetVersion(cx
, currentVersion
);
2551 /* Update the static-link display. */
2552 if (script
->staticLevel
< JS_DISPLAY_SIZE
) {
2553 JSStackFrame
**disp
= &cx
->display
[script
->staticLevel
];
2554 fp
->displaySave
= *disp
;
2558 # define CHECK_INTERRUPT_HANDLER() \
2560 if (cx->debugHooks->interruptHook) \
2561 ENABLE_INTERRUPTS(); \
2565 * Load the debugger's interrupt hook here and after calling out to native
2566 * functions (but not to getters, setters, or other native hooks), so we do
2567 * not have to reload it each time through the interpreter loop -- we hope
2568 * the compiler can keep it in a register when it is non-null.
2570 CHECK_INTERRUPT_HANDLER();
2572 #if !JS_HAS_GENERATORS
2573 JS_ASSERT(!fp
->regs
);
2575 /* Initialize the pc and sp registers unless we're resuming a generator. */
2576 if (JS_LIKELY(!fp
->regs
)) {
2578 ASSERT_NOT_THROWING(cx
);
2579 regs
.pc
= script
->code
;
2580 regs
.sp
= StackBase(fp
);
2582 #if JS_HAS_GENERATORS
2586 JS_ASSERT(fp
->flags
& JSFRAME_GENERATOR
);
2587 gen
= FRAME_TO_GENERATOR(fp
);
2588 JS_ASSERT(fp
->regs
== &gen
->savedRegs
);
2589 regs
= gen
->savedRegs
;
2591 JS_ASSERT((size_t) (regs
.pc
- script
->code
) <= script
->length
);
2592 JS_ASSERT((size_t) (regs
.sp
- StackBase(fp
)) <= StackDepth(script
));
2595 * To support generator_throw and to catch ignored exceptions,
2596 * fail if cx->throwing is set.
2599 #ifdef DEBUG_NOT_THROWING
2600 if (cx
->exception
!= JSVAL_ARETURN
) {
2601 printf("JS INTERPRETER CALLED WITH PENDING EXCEPTION %lx\n",
2602 (unsigned long) cx
->exception
);
2608 #endif /* JS_HAS_GENERATORS */
2612 * We cannot reenter the interpreter while recording; wait to abort until
2613 * after cx->fp->regs is set.
2615 if (TRACE_RECORDER(cx
))
2616 AbortRecording(cx
, "attempt to reenter interpreter while recording");
2620 * It is important that "op" be initialized before calling DO_OP because
2621 * it is possible for "op" to be specially assigned during the normal
2622 * processing of an opcode while looping. We rely on DO_NEXT_OP to manage
2623 * "op" correctly in all other cases.
2628 #if JS_THREADED_INTERP
2630 * This is a loop, but it does not look like a loop. The loop-closing
2631 * jump is distributed throughout goto *jumpTable[op] inside of DO_OP.
2632 * When interrupts are enabled, jumpTable is set to interruptJumpTable
2633 * where all jumps point to the interrupt label. The latter, after
2634 * calling the interrupt handler, dispatches through normalJumpTable to
2635 * continue the normal bytecode processing.
2638 #else /* !JS_THREADED_INTERP */
2641 JS_ASSERT(js_CodeSpec
[op
].length
== 1);
2645 op
= (JSOp
) *regs
.pc
;
2650 switchOp
= intN(op
) | switchMask
;
2655 /********************** Here we include the operations ***********************/
2656 #include "jsops.cpp"
2657 /*****************************************************************************/
2659 #if !JS_THREADED_INTERP
2667 JS_snprintf(numBuf
, sizeof numBuf
, "%d", op
);
2668 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
,
2669 JSMSG_BAD_BYTECODE
, numBuf
);
2673 #if !JS_THREADED_INTERP
2676 #endif /* !JS_THREADED_INTERP */
2680 if (fp
->imacpc
&& cx
->throwing
) {
2681 // Handle other exceptions as if they came from the imacro-calling pc.
2682 regs
.pc
= fp
->imacpc
;
2684 atoms
= script
->atomMap
.vector
;
2688 JS_ASSERT((size_t)((fp
->imacpc
? fp
->imacpc
: regs
.pc
) - script
->code
) < script
->length
);
2692 * This abort could be weakened to permit tracing through exceptions that
2693 * are thrown and caught within a loop, with the co-operation of the tracer.
2694 * For now just bail on any sign of trouble.
2696 if (TRACE_RECORDER(cx
))
2697 AbortRecording(cx
, "error or exception while recording");
2700 if (!cx
->throwing
) {
2701 /* This is an error, not a catchable exception, quit the frame ASAP. */
2704 JSThrowHook handler
;
2705 JSTryNote
*tn
, *tnlimit
;
2708 /* Call debugger throw hook if set. */
2709 handler
= cx
->debugHooks
->throwHook
;
2711 switch (handler(cx
, script
, regs
.pc
, &rval
,
2712 cx
->debugHooks
->throwHookData
)) {
2714 cx
->throwing
= JS_FALSE
;
2717 cx
->throwing
= JS_FALSE
;
2722 cx
->exception
= rval
;
2723 case JSTRAP_CONTINUE
:
2726 CHECK_INTERRUPT_HANDLER();
2730 * Look for a try block in script that can catch this exception.
2732 if (script
->trynotesOffset
== 0)
2735 offset
= (uint32
)(regs
.pc
- script
->main
);
2736 tn
= script
->trynotes()->vector
;
2737 tnlimit
= tn
+ script
->trynotes()->length
;
2739 if (offset
- tn
->start
>= tn
->length
)
2743 * We have a note that covers the exception pc but we must check
2744 * whether the interpreter has already executed the corresponding
2745 * handler. This is possible when the executed bytecode
2746 * implements break or return from inside a for-in loop.
2748 * In this case the emitter generates additional [enditer] and
2749 * [gosub] opcodes to close all outstanding iterators and execute
2750 * the finally blocks. If such an [enditer] throws an exception,
2751 * its pc can still be inside several nested for-in loops and
2752 * try-finally statements even if we have already closed the
2753 * corresponding iterators and invoked the finally blocks.
2755 * To address this, we make [enditer] always decrease the stack
2756 * even when its implementation throws an exception. Thus already
2757 * executed [enditer] and [gosub] opcodes will have try notes
2758 * with the stack depth exceeding the current one and this
2759 * condition is what we use to filter them out.
2761 if (tn
->stackDepth
> regs
.sp
- StackBase(fp
))
2765 * Set pc to the first bytecode after the the try note to point
2766 * to the beginning of catch or finally or to [enditer] closing
2769 regs
.pc
= (script
)->main
+ tn
->start
+ tn
->length
;
2771 ok
= js_UnwindScope(cx
, fp
, tn
->stackDepth
, JS_TRUE
);
2772 JS_ASSERT(fp
->regs
->sp
== StackBase(fp
) + tn
->stackDepth
);
2775 * Restart the handler search with updated pc and stack depth
2776 * to properly notify the debugger.
2783 JS_ASSERT(js_GetOpcode(cx
, fp
->script
, regs
.pc
) == JSOP_ENTERBLOCK
);
2785 #if JS_HAS_GENERATORS
2786 /* Catch cannot intercept the closing of a generator. */
2787 if (JS_UNLIKELY(cx
->exception
== JSVAL_ARETURN
))
2792 * Don't clear cx->throwing to save cx->exception from GC
2793 * until it is pushed to the stack via [exception] in the
2801 * Push (true, exception) pair for finally to indicate that
2802 * [retsub] should rethrow the exception.
2805 PUSH(cx
->exception
);
2806 cx
->throwing
= JS_FALSE
;
2811 /* This is similar to JSOP_ENDITER in the interpreter loop. */
2812 JS_ASSERT(js_GetOpcode(cx
, fp
->script
, regs
.pc
) == JSOP_ENDITER
);
2813 AutoValueRooter
tvr(cx
, cx
->exception
);
2814 cx
->throwing
= false;
2815 ok
= js_CloseIterator(cx
, regs
.sp
[-1]);
2819 cx
->throwing
= true;
2820 cx
->exception
= tvr
.value();
2823 } while (++tn
!= tnlimit
);
2827 * Propagate the exception or error to the caller unless the exception
2828 * is an asynchronous return from a generator.
2831 #if JS_HAS_GENERATORS
2832 if (JS_UNLIKELY(cx
->throwing
&& cx
->exception
== JSVAL_ARETURN
)) {
2833 cx
->throwing
= JS_FALSE
;
2835 fp
->rval
= JSVAL_VOID
;
2842 * Unwind the scope making sure that ok stays false even when UnwindScope
2845 * When a trap handler returns JSTRAP_RETURN, we jump here with ok set to
2846 * true bypassing any finally blocks.
2848 ok
&= js_UnwindScope(cx
, fp
, 0, ok
|| cx
->throwing
);
2849 JS_ASSERT(regs
.sp
== StackBase(fp
));
2852 cx
->tracePrevPc
= NULL
;
2855 if (inlineCallCount
)
2860 * At this point we are inevitably leaving an interpreted function or a
2861 * top-level script, and returning to one of:
2862 * (a) an "out of line" call made through js_Invoke;
2863 * (b) a js_Execute activation;
2864 * (c) a generator (SendToGenerator, jsiter.c).
2866 * We must not be in an inline frame. The check above ensures that for the
2867 * error case and for a normal return, the code jumps directly to parent's
2870 JS_ASSERT(inlineCallCount
== 0);
2871 JS_ASSERT(fp
->regs
== ®s
);
2873 if (TRACE_RECORDER(cx
))
2874 AbortRecording(cx
, "recording out of js_Interpret");
2876 #if JS_HAS_GENERATORS
2877 if (JS_UNLIKELY(fp
->flags
& JSFRAME_YIELDING
)) {
2880 gen
= FRAME_TO_GENERATOR(fp
);
2881 gen
->savedRegs
= regs
;
2882 gen
->frame
.regs
= &gen
->savedRegs
;
2884 #endif /* JS_HAS_GENERATORS */
2886 JS_ASSERT(!fp
->blockChain
);
2887 JS_ASSERT(!js_IsActiveWithOrBlock(cx
, fp
->scopeChain
, 0));
2891 /* Undo the remaining effects committed on entry to js_Interpret. */
2892 if (script
->staticLevel
< JS_DISPLAY_SIZE
)
2893 cx
->display
[script
->staticLevel
] = fp
->displaySave
;
2894 if (cx
->version
== currentVersion
&& currentVersion
!= originalVersion
)
2895 js_SetVersion(cx
, originalVersion
);
2902 const char *printable
;
2904 printable
= js_AtomToPrintableString(cx
, atom
);
2906 js_ReportIsNotDefined(cx
, printable
);
2911 #endif /* !defined jsinvoke_cpp___ */