Bug 559408: Arena pool macros to methods. (r=gal)
[mozilla-central.git] / js / src / jsinterp.cpp
blob94440c194d6c8677193258a35e07b78af3247424
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JavaScript bytecode interpreter.
44 #include <stdio.h>
45 #include <string.h>
46 #include <math.h>
47 #include "jstypes.h"
48 #include "jsstdint.h"
49 #include "jsarena.h" /* Added by JSIFY */
50 #include "jsutil.h" /* Added by JSIFY */
51 #include "jsprf.h"
52 #include "jsapi.h"
53 #include "jsarray.h"
54 #include "jsatom.h"
55 #include "jsbool.h"
56 #include "jscntxt.h"
57 #include "jsdate.h"
58 #include "jsversion.h"
59 #include "jsdbgapi.h"
60 #include "jsfun.h"
61 #include "jsgc.h"
62 #include "jsinterp.h"
63 #include "jsiter.h"
64 #include "jslock.h"
65 #include "jsnum.h"
66 #include "jsobj.h"
67 #include "jsopcode.h"
68 #include "jspropertycache.h"
69 #include "jsscan.h"
70 #include "jsscope.h"
71 #include "jsscript.h"
72 #include "jsstr.h"
73 #include "jsstaticcheck.h"
74 #include "jstracer.h"
75 #include "jslibmath.h"
76 #include "jsvector.h"
78 #include "jsatominlines.h"
79 #include "jspropertycacheinlines.h"
80 #include "jsobjinlines.h"
81 #include "jsscopeinlines.h"
82 #include "jsscriptinlines.h"
83 #include "jsstrinlines.h"
84 #include "jsdtracef.h"
86 #if JS_HAS_XML_SUPPORT
87 #include "jsxml.h"
88 #endif
90 #include "jsautooplen.h"
92 using namespace js;
94 /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */
95 #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___
98 * Check if the current arena has enough space to fit nslots after sp and, if
99 * so, reserve the necessary space.
101 static JS_REQUIRES_STACK JSBool
102 AllocateAfterSP(JSContext *cx, jsval *sp, uintN nslots)
104 jsval *avail = (jsval *) cx->stackPool.getCurrent()->getAvail();
105 JS_ASSERT((jsval *) cx->stackPool.getCurrent()->getBase() <= sp);
106 JS_ASSERT(sp <= avail);
108 uintN surplus = avail - sp;
109 jsval *sp2;
111 if (nslots <= surplus)
112 return JS_TRUE;
115 * No room before current->avail, check if the arena has enough space to
116 * fit the missing slots before the limit.
118 if (nslots > (size_t) ((jsval *) cx->stackPool.getCurrent()->getLimit() - sp))
119 return JS_FALSE;
121 cx->stackPool.allocateCast<jsval *>(sp2, (nslots - surplus) * sizeof(jsval));
122 JS_ASSERT(sp2 == sp + surplus);
123 return JS_TRUE;
126 JS_STATIC_INTERPRET JS_REQUIRES_STACK jsval *
127 js_AllocRawStack(JSContext *cx, uintN nslots, void **markp)
129 jsval *sp;
131 JS_ASSERT(nslots != 0);
132 JS_ASSERT_NOT_ON_TRACE(cx);
134 if (!cx->stackPool.getSecond()) {
135 int64 *timestamp;
137 cx->stackPool.allocateCast<int64 *>(timestamp, sizeof *timestamp);
138 if (!timestamp) {
139 js_ReportOutOfScriptQuota(cx);
140 return NULL;
142 *timestamp = JS_Now();
145 if (markp)
146 *markp = cx->stackPool.getMark();
147 cx->stackPool.allocateCast<jsval *>(sp, nslots * sizeof(jsval));
148 if (!sp)
149 js_ReportOutOfScriptQuota(cx);
150 return sp;
153 JS_STATIC_INTERPRET JS_REQUIRES_STACK void
154 js_FreeRawStack(JSContext *cx, void *mark)
156 cx->stackPool.release(mark);
159 JS_REQUIRES_STACK JS_FRIEND_API(jsval *)
160 js_AllocStack(JSContext *cx, uintN nslots, void **markp)
162 jsval *sp;
163 JSStackHeader *sh;
165 /* Callers don't check for zero nslots: we do to avoid empty segments. */
166 if (nslots == 0) {
167 *markp = NULL;
168 return (jsval *) cx->stackPool.getMark();
171 /* Allocate 2 extra slots for the stack segment header we'll likely need. */
172 sp = js_AllocRawStack(cx, 2 + nslots, markp);
173 if (!sp)
174 return NULL;
176 /* Try to avoid another header if we can piggyback on the last segment. */
177 JSArena *a = cx->stackPool.getCurrent();
178 sh = cx->stackHeaders;
179 if (sh && JS_STACK_SEGMENT(sh) + sh->nslots == sp) {
180 /* Extend the last stack segment, give back the 2 header slots. */
181 sh->nslots += nslots;
182 a->setAvail(a->getAvail() - 2 * sizeof(jsval));
183 } else {
185 * Need a new stack segment, so allocate and push a stack segment
186 * header from the 2 extra slots.
188 sh = (JSStackHeader *)sp;
189 sh->nslots = nslots;
190 sh->down = cx->stackHeaders;
191 cx->stackHeaders = sh;
192 sp += 2;
196 * Store JSVAL_NULL using memset, to let compilers optimize as they see
197 * fit, in case a caller allocates and pushes GC-things one by one, which
198 * could nest a last-ditch GC that will scan this segment.
200 memset(sp, 0, nslots * sizeof(jsval));
201 return sp;
204 JS_REQUIRES_STACK JS_FRIEND_API(void)
205 js_FreeStack(JSContext *cx, void *mark)
207 JSStackHeader *sh;
208 jsuword slotdiff;
210 /* Check for zero nslots allocation special case. */
211 if (!mark)
212 return;
214 /* We can assert because js_FreeStack always balances js_AllocStack. */
215 sh = cx->stackHeaders;
216 JS_ASSERT(sh);
218 /* If mark is in the current segment, reduce sh->nslots, else pop sh. */
219 slotdiff = JS_UPTRDIFF(mark, JS_STACK_SEGMENT(sh)) / sizeof(jsval);
220 if (slotdiff < (jsuword)sh->nslots)
221 sh->nslots = slotdiff;
222 else
223 cx->stackHeaders = sh->down;
225 /* Release the stackPool space allocated since mark was set. */
226 cx->stackPool.release(mark);
229 JSObject *
230 js_GetScopeChain(JSContext *cx, JSStackFrame *fp)
232 JSObject *sharedBlock = fp->blockChain;
234 if (!sharedBlock) {
236 * Don't force a call object for a lightweight function call, but do
237 * insist that there is a call object for a heavyweight function call.
239 JS_ASSERT(!fp->fun ||
240 !(fp->fun->flags & JSFUN_HEAVYWEIGHT) ||
241 fp->callobj);
242 JS_ASSERT(fp->scopeChain);
243 return fp->scopeChain;
246 /* We don't handle cloning blocks on trace. */
247 LeaveTrace(cx);
250 * We have one or more lexical scopes to reflect into fp->scopeChain, so
251 * make sure there's a call object at the current head of the scope chain,
252 * if this frame is a call frame.
254 * Also, identify the innermost compiler-allocated block we needn't clone.
256 JSObject *limitBlock, *limitClone;
257 if (fp->fun && !fp->callobj) {
258 JS_ASSERT(fp->scopeChain->getClass() != &js_BlockClass ||
259 fp->scopeChain->getPrivate() != fp);
260 if (!js_GetCallObject(cx, fp))
261 return NULL;
263 /* We know we must clone everything on blockChain. */
264 limitBlock = limitClone = NULL;
265 } else {
267 * scopeChain includes all blocks whose static scope we're within that
268 * have already been cloned. Find the innermost such block. Its
269 * prototype should appear on blockChain; we'll clone blockChain up
270 * to, but not including, that prototype.
272 limitClone = fp->scopeChain;
273 while (limitClone->getClass() == &js_WithClass)
274 limitClone = limitClone->getParent();
275 JS_ASSERT(limitClone);
278 * It may seem like we don't know enough about limitClone to be able
279 * to just grab its prototype as we do here, but it's actually okay.
281 * If limitClone is a block object belonging to this frame, then its
282 * prototype is the innermost entry in blockChain that we have already
283 * cloned, and is thus the place to stop when we clone below.
285 * Otherwise, there are no blocks for this frame on scopeChain, and we
286 * need to clone the whole blockChain. In this case, limitBlock can
287 * point to any object known not to be on blockChain, since we simply
288 * loop until we hit limitBlock or NULL. If limitClone is a block, it
289 * isn't a block from this function, since blocks can't be nested
290 * within themselves on scopeChain (recursion is dynamic nesting, not
291 * static nesting). If limitClone isn't a block, its prototype won't
292 * be a block either. So we can just grab limitClone's prototype here
293 * regardless of its type or which frame it belongs to.
295 limitBlock = limitClone->getProto();
297 /* If the innermost block has already been cloned, we are done. */
298 if (limitBlock == sharedBlock)
299 return fp->scopeChain;
303 * Special-case cloning the innermost block; this doesn't have enough in
304 * common with subsequent steps to include in the loop.
306 * js_CloneBlockObject leaves the clone's parent slot uninitialized. We
307 * populate it below.
309 JSObject *innermostNewChild = js_CloneBlockObject(cx, sharedBlock, fp);
310 if (!innermostNewChild)
311 return NULL;
312 AutoValueRooter tvr(cx, innermostNewChild);
315 * Clone our way towards outer scopes until we reach the innermost
316 * enclosing function, or the innermost block we've already cloned.
318 JSObject *newChild = innermostNewChild;
319 for (;;) {
320 JS_ASSERT(newChild->getProto() == sharedBlock);
321 sharedBlock = sharedBlock->getParent();
323 /* Sometimes limitBlock will be NULL, so check that first. */
324 if (sharedBlock == limitBlock || !sharedBlock)
325 break;
327 /* As in the call above, we don't know the real parent yet. */
328 JSObject *clone
329 = js_CloneBlockObject(cx, sharedBlock, fp);
330 if (!clone)
331 return NULL;
333 newChild->setParent(clone);
334 newChild = clone;
336 newChild->setParent(fp->scopeChain);
340 * If we found a limit block belonging to this frame, then we should have
341 * found it in blockChain.
343 JS_ASSERT_IF(limitBlock &&
344 limitBlock->getClass() == &js_BlockClass &&
345 limitClone->getPrivate() == fp,
346 sharedBlock);
348 /* Place our newly cloned blocks at the head of the scope chain. */
349 fp->scopeChain = innermostNewChild;
350 return fp->scopeChain;
353 JSBool
354 js_GetPrimitiveThis(JSContext *cx, jsval *vp, JSClass *clasp, jsval *thisvp)
356 jsval v;
357 JSObject *obj;
359 v = vp[1];
360 if (JSVAL_IS_OBJECT(v)) {
361 obj = JS_THIS_OBJECT(cx, vp);
362 if (!JS_InstanceOf(cx, obj, clasp, vp + 2))
363 return JS_FALSE;
364 v = obj->getPrimitiveThis();
366 *thisvp = v;
367 return JS_TRUE;
370 /* Some objects (e.g., With) delegate 'this' to another object. */
371 static inline JSObject *
372 CallThisObjectHook(JSContext *cx, JSObject *obj, jsval *argv)
374 JSObject *thisp = obj->thisObject(cx);
375 if (!thisp)
376 return NULL;
377 argv[-1] = OBJECT_TO_JSVAL(thisp);
378 return thisp;
382 * ECMA requires "the global object", but in embeddings such as the browser,
383 * which have multiple top-level objects (windows, frames, etc. in the DOM),
384 * we prefer fun's parent. An example that causes this code to run:
386 * // in window w1
387 * function f() { return this }
388 * function g() { return f }
390 * // in window w2
391 * var h = w1.g()
392 * alert(h() == w1)
394 * The alert should display "true".
396 JS_STATIC_INTERPRET JSObject *
397 js_ComputeGlobalThis(JSContext *cx, jsval *argv)
399 JSObject *thisp;
401 if (JSVAL_IS_PRIMITIVE(argv[-2]) ||
402 !JSVAL_TO_OBJECT(argv[-2])->getParent()) {
403 thisp = cx->globalObject;
404 } else {
405 thisp = JSVAL_TO_OBJECT(argv[-2])->getGlobal();
408 return CallThisObjectHook(cx, thisp, argv);
411 static JSObject *
412 ComputeThis(JSContext *cx, jsval *argv)
414 JSObject *thisp;
416 JS_ASSERT(!JSVAL_IS_NULL(argv[-1]));
417 if (!JSVAL_IS_OBJECT(argv[-1])) {
418 if (!js_PrimitiveToObject(cx, &argv[-1]))
419 return NULL;
420 thisp = JSVAL_TO_OBJECT(argv[-1]);
421 return thisp;
424 thisp = JSVAL_TO_OBJECT(argv[-1]);
425 if (thisp->getClass() == &js_CallClass || thisp->getClass() == &js_BlockClass)
426 return js_ComputeGlobalThis(cx, argv);
428 return CallThisObjectHook(cx, thisp, argv);
431 JSObject *
432 js_ComputeThis(JSContext *cx, jsval *argv)
434 JS_ASSERT(argv[-1] != JSVAL_HOLE); // check for SynthesizeFrame poisoning
435 if (JSVAL_IS_NULL(argv[-1]))
436 return js_ComputeGlobalThis(cx, argv);
437 return ComputeThis(cx, argv);
440 #if JS_HAS_NO_SUCH_METHOD
442 const uint32 JSSLOT_FOUND_FUNCTION = JSSLOT_PRIVATE;
443 const uint32 JSSLOT_SAVED_ID = JSSLOT_PRIVATE + 1;
445 JSClass js_NoSuchMethodClass = {
446 "NoSuchMethod",
447 JSCLASS_HAS_RESERVED_SLOTS(2) | JSCLASS_IS_ANONYMOUS,
448 JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_PropertyStub,
449 JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
450 NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL
454 * When JSOP_CALLPROP or JSOP_CALLELEM does not find the method property of
455 * the base object, we search for the __noSuchMethod__ method in the base.
456 * If it exists, we store the method and the property's id into an object of
457 * NoSuchMethod class and store this object into the callee's stack slot.
458 * Later, js_Invoke will recognise such an object and transfer control to
459 * NoSuchMethod that invokes the method like:
461 * this.__noSuchMethod__(id, args)
463 * where id is the name of the method that this invocation attempted to
464 * call by name, and args is an Array containing this invocation's actual
465 * parameters.
467 JS_STATIC_INTERPRET JSBool
468 js_OnUnknownMethod(JSContext *cx, jsval *vp)
470 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
472 JSObject *obj = JSVAL_TO_OBJECT(vp[1]);
473 jsid id = ATOM_TO_JSID(cx->runtime->atomState.noSuchMethodAtom);
474 AutoValueRooter tvr(cx, JSVAL_NULL);
475 if (!js_GetMethod(cx, obj, id, JSGET_NO_METHOD_BARRIER, tvr.addr()))
476 return false;
477 if (JSVAL_IS_PRIMITIVE(tvr.value())) {
478 vp[0] = tvr.value();
479 } else {
480 #if JS_HAS_XML_SUPPORT
481 /* Extract the function name from function::name qname. */
482 if (!JSVAL_IS_PRIMITIVE(vp[0])) {
483 obj = JSVAL_TO_OBJECT(vp[0]);
484 if (!js_IsFunctionQName(cx, obj, &id))
485 return false;
486 if (id != 0)
487 vp[0] = ID_TO_VALUE(id);
489 #endif
490 obj = NewObjectWithGivenProto(cx, &js_NoSuchMethodClass, NULL, NULL);
491 if (!obj)
492 return false;
493 obj->fslots[JSSLOT_FOUND_FUNCTION] = tvr.value();
494 obj->fslots[JSSLOT_SAVED_ID] = vp[0];
495 vp[0] = OBJECT_TO_JSVAL(obj);
497 return true;
500 static JS_REQUIRES_STACK JSBool
501 NoSuchMethod(JSContext *cx, uintN argc, jsval *vp, uint32 flags)
503 jsval *invokevp;
504 void *mark;
505 JSBool ok;
506 JSObject *obj, *argsobj;
508 invokevp = js_AllocStack(cx, 2 + 2, &mark);
509 if (!invokevp)
510 return JS_FALSE;
512 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[0]));
513 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
514 obj = JSVAL_TO_OBJECT(vp[0]);
515 JS_ASSERT(obj->getClass() == &js_NoSuchMethodClass);
517 invokevp[0] = obj->fslots[JSSLOT_FOUND_FUNCTION];
518 invokevp[1] = vp[1];
519 invokevp[2] = obj->fslots[JSSLOT_SAVED_ID];
520 argsobj = js_NewArrayObject(cx, argc, vp + 2);
521 if (!argsobj) {
522 ok = JS_FALSE;
523 } else {
524 invokevp[3] = OBJECT_TO_JSVAL(argsobj);
525 ok = (flags & JSINVOKE_CONSTRUCT)
526 ? js_InvokeConstructor(cx, 2, JS_TRUE, invokevp)
527 : js_Invoke(cx, 2, invokevp, flags);
528 vp[0] = invokevp[0];
530 js_FreeStack(cx, mark);
531 return ok;
534 #endif /* JS_HAS_NO_SUCH_METHOD */
537 * We check if the function accepts a primitive value as |this|. For that we
538 * use a table that maps value's tag into the corresponding function flag.
540 JS_STATIC_ASSERT(JSVAL_INT == 1);
541 JS_STATIC_ASSERT(JSVAL_DOUBLE == 2);
542 JS_STATIC_ASSERT(JSVAL_STRING == 4);
543 JS_STATIC_ASSERT(JSVAL_SPECIAL == 6);
545 const uint16 js_PrimitiveTestFlags[] = {
546 JSFUN_THISP_NUMBER, /* INT */
547 JSFUN_THISP_NUMBER, /* DOUBLE */
548 JSFUN_THISP_NUMBER, /* INT */
549 JSFUN_THISP_STRING, /* STRING */
550 JSFUN_THISP_NUMBER, /* INT */
551 JSFUN_THISP_BOOLEAN, /* BOOLEAN */
552 JSFUN_THISP_NUMBER /* INT */
556 * Find a function reference and its 'this' object implicit first parameter
557 * under argc arguments on cx's stack, and call the function. Push missing
558 * required arguments, allocate declared local variables, and pop everything
559 * when done. Then push the return value.
561 JS_REQUIRES_STACK JS_FRIEND_API(JSBool)
562 js_Invoke(JSContext *cx, uintN argc, jsval *vp, uintN flags)
564 void *mark;
565 CallStack callStack(cx);
566 JSStackFrame frame;
567 jsval *sp, *argv, *newvp;
568 jsval v;
569 JSObject *funobj, *parent;
570 JSBool ok;
571 JSClass *clasp;
572 const JSObjectOps *ops;
573 JSNative native;
574 JSFunction *fun;
575 JSScript *script;
576 uintN nslots, i;
577 uint32 rootedArgsFlag;
578 JSInterpreterHook hook;
579 void *hookData;
580 bool pushCall;
582 JS_ASSERT(argc <= JS_ARGS_LENGTH_MAX);
584 /* [vp .. vp + 2 + argc) must belong to the last JS stack arena. */
585 JS_ASSERT((jsval *) cx->stackPool.getCurrent()->getBase() <= vp);
586 JS_ASSERT(vp + 2 + argc <= (jsval *) cx->stackPool.getCurrent()->getAvail());
588 /* Mark the top of stack and load frequently-used registers. */
589 mark = cx->stackPool.getMark();
590 MUST_FLOW_THROUGH("out2");
591 v = *vp;
593 if (JSVAL_IS_PRIMITIVE(v))
594 goto bad;
596 funobj = JSVAL_TO_OBJECT(v);
597 parent = funobj->getParent();
598 clasp = funobj->getClass();
599 if (clasp != &js_FunctionClass) {
600 #if JS_HAS_NO_SUCH_METHOD
601 if (clasp == &js_NoSuchMethodClass) {
602 ok = NoSuchMethod(cx, argc, vp, flags);
603 goto out2;
605 #endif
607 /* Function is inlined, all other classes use object ops. */
608 ops = funobj->map->ops;
610 fun = NULL;
611 script = NULL;
612 nslots = 0;
614 /* Try a call or construct native object op. */
615 if (flags & JSINVOKE_CONSTRUCT) {
616 if (!JSVAL_IS_OBJECT(vp[1])) {
617 ok = js_PrimitiveToObject(cx, &vp[1]);
618 if (!ok)
619 goto out2;
621 native = ops->construct;
622 } else {
623 native = ops->call;
625 if (!native)
626 goto bad;
627 } else {
628 /* Get private data and set derived locals from it. */
629 fun = GET_FUNCTION_PRIVATE(cx, funobj);
630 nslots = FUN_MINARGS(fun);
631 nslots = (nslots > argc) ? nslots - argc : 0;
632 if (FUN_INTERPRETED(fun)) {
633 native = NULL;
634 script = fun->u.i.script;
635 JS_ASSERT(script);
637 if (script->isEmpty()) {
638 if (flags & JSINVOKE_CONSTRUCT) {
639 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
640 *vp = vp[1];
641 } else {
642 *vp = JSVAL_VOID;
644 ok = JS_TRUE;
645 goto out2;
647 } else {
648 native = fun->u.n.native;
649 script = NULL;
650 nslots += fun->u.n.extra;
653 if (JSFUN_BOUND_METHOD_TEST(fun->flags)) {
654 /* Handle bound method special case. */
655 vp[1] = OBJECT_TO_JSVAL(parent);
656 } else if (!JSVAL_IS_OBJECT(vp[1])) {
657 JS_ASSERT(!(flags & JSINVOKE_CONSTRUCT));
658 if (PRIMITIVE_THIS_TEST(fun, vp[1]))
659 goto start_call;
663 if (flags & JSINVOKE_CONSTRUCT) {
664 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
665 } else {
667 * We must call js_ComputeThis in case we are not called from the
668 * interpreter, where a prior bytecode has computed an appropriate
669 * |this| already.
671 * But we need to compute |this| eagerly only for so-called "slow"
672 * (i.e., not fast) native functions. Fast natives must use either
673 * JS_THIS or JS_THIS_OBJECT, and scripted functions will go through
674 * the appropriate this-computing bytecode, e.g., JSOP_THIS.
676 if (native && (!fun || !(fun->flags & JSFUN_FAST_NATIVE))) {
677 if (!js_ComputeThis(cx, vp + 2)) {
678 ok = JS_FALSE;
679 goto out2;
681 flags |= JSFRAME_COMPUTED_THIS;
685 start_call:
686 if (native && fun && (fun->flags & JSFUN_FAST_NATIVE)) {
687 #ifdef DEBUG_NOT_THROWING
688 JSBool alreadyThrowing = cx->throwing;
689 #endif
690 JS_ASSERT(nslots == 0);
691 ok = ((JSFastNative) native)(cx, argc, vp);
692 JS_RUNTIME_METER(cx->runtime, nativeCalls);
693 #ifdef DEBUG_NOT_THROWING
694 if (ok && !alreadyThrowing)
695 ASSERT_NOT_THROWING(cx);
696 #endif
697 goto out2;
700 argv = vp + 2;
701 sp = argv + argc;
703 rootedArgsFlag = JSFRAME_ROOTED_ARGV;
704 if (nslots != 0) {
706 * The extra slots required by the function continue with argument
707 * slots. Thus, when the last stack pool arena does not have room to
708 * fit nslots right after sp and AllocateAfterSP fails, we have to copy
709 * [vp..vp+2+argc) slots and clear rootedArgsFlag to root the copy.
711 if (!AllocateAfterSP(cx, sp, nslots)) {
712 rootedArgsFlag = 0;
713 newvp = js_AllocRawStack(cx, 2 + argc + nslots, NULL);
714 if (!newvp) {
715 ok = JS_FALSE;
716 goto out2;
718 memcpy(newvp, vp, (2 + argc) * sizeof(jsval));
719 argv = newvp + 2;
720 sp = argv + argc;
723 /* Push void to initialize missing args. */
724 i = nslots;
725 do {
726 *sp++ = JSVAL_VOID;
727 } while (--i != 0);
730 /* Allocate space for local variables and stack of interpreted function. */
731 if (script && script->nslots != 0) {
732 if (!AllocateAfterSP(cx, sp, script->nslots)) {
733 /* NB: Discontinuity between argv and slots, stack slots. */
734 sp = js_AllocRawStack(cx, script->nslots, NULL);
735 if (!sp) {
736 ok = JS_FALSE;
737 goto out2;
741 /* Push void to initialize local variables. */
742 for (jsval *end = sp + fun->u.i.nvars; sp != end; ++sp)
743 *sp = JSVAL_VOID;
747 * Initialize the frame.
749 frame.thisv = vp[1];
750 frame.callobj = NULL;
751 frame.argsobj = NULL;
752 frame.script = script;
753 frame.fun = fun;
754 frame.argc = argc;
755 frame.argv = argv;
757 /* Default return value for a constructor is the new object. */
758 frame.rval = (flags & JSINVOKE_CONSTRUCT) ? vp[1] : JSVAL_VOID;
759 frame.down = cx->fp;
760 frame.annotation = NULL;
761 frame.scopeChain = NULL; /* set below for real, after cx->fp is set */
762 frame.blockChain = NULL;
763 frame.regs = NULL;
764 frame.imacpc = NULL;
765 frame.slots = NULL;
766 frame.flags = flags | rootedArgsFlag;
767 frame.displaySave = NULL;
769 MUST_FLOW_THROUGH("out");
770 pushCall = !cx->fp;
771 if (pushCall) {
773 * The initialVarObj is left NULL since fp->callobj is NULL and, for
774 * interpreted functions, fp->varobj() == fp->callobj.
776 callStack.setInitialFrame(&frame);
777 cx->pushCallStack(&callStack);
779 cx->fp = &frame;
781 /* Init these now in case we goto out before first hook call. */
782 hook = cx->debugHooks->callHook;
783 hookData = NULL;
785 if (native) {
786 /* Slow natives expect the caller's scopeChain as their scopeChain. */
787 if (frame.down) {
788 JS_ASSERT(!pushCall);
789 frame.scopeChain = frame.down->scopeChain;
792 /* Ensure that we have a scope chain. */
793 if (!frame.scopeChain)
794 frame.scopeChain = parent;
795 } else {
796 /* Use parent scope so js_GetCallObject can find the right "Call". */
797 frame.scopeChain = parent;
798 if (JSFUN_HEAVYWEIGHT_TEST(fun->flags)) {
799 /* Scope with a call object parented by the callee's parent. */
800 if (!js_GetCallObject(cx, &frame)) {
801 ok = JS_FALSE;
802 goto out;
805 frame.slots = sp - fun->u.i.nvars;
808 /* Call the hook if present after we fully initialized the frame. */
809 if (hook)
810 hookData = hook(cx, &frame, JS_TRUE, 0, cx->debugHooks->callHookData);
812 DTrace::enterJSFun(cx, &frame, fun, frame.down, frame.argc, frame.argv);
814 /* Call the function, either a native method or an interpreted script. */
815 if (native) {
816 #ifdef DEBUG_NOT_THROWING
817 JSBool alreadyThrowing = cx->throwing;
818 #endif
819 /* Primitive |this| should not be passed to slow natives. */
820 JSObject *thisp = JSVAL_TO_OBJECT(frame.thisv);
821 ok = native(cx, thisp, argc, frame.argv, &frame.rval);
822 JS_RUNTIME_METER(cx->runtime, nativeCalls);
823 #ifdef DEBUG_NOT_THROWING
824 if (ok && !alreadyThrowing)
825 ASSERT_NOT_THROWING(cx);
826 #endif
827 } else {
828 JS_ASSERT(script);
829 ok = js_Interpret(cx);
832 DTrace::exitJSFun(cx, &frame, fun, frame.rval);
834 out:
835 if (hookData) {
836 hook = cx->debugHooks->callHook;
837 if (hook)
838 hook(cx, &frame, JS_FALSE, &ok, hookData);
841 frame.putActivationObjects(cx);
843 *vp = frame.rval;
845 /* Restore cx->fp now that we're done releasing frame objects. */
846 if (pushCall)
847 cx->popCallStack();
848 cx->fp = frame.down;
850 out2:
851 /* Pop everything we may have allocated off the stack. */
852 cx->stackPool.release(mark);
853 if (!ok)
854 *vp = JSVAL_NULL;
855 return ok;
857 bad:
858 js_ReportIsNotFunction(cx, vp, flags & JSINVOKE_FUNFLAGS);
859 ok = JS_FALSE;
860 goto out2;
863 JSBool
864 js_InternalInvoke(JSContext *cx, JSObject *obj, jsval fval, uintN flags,
865 uintN argc, jsval *argv, jsval *rval)
867 jsval *invokevp;
868 void *mark;
869 JSBool ok;
871 LeaveTrace(cx);
872 invokevp = js_AllocStack(cx, 2 + argc, &mark);
873 if (!invokevp)
874 return JS_FALSE;
876 invokevp[0] = fval;
877 invokevp[1] = OBJECT_TO_JSVAL(obj);
878 memcpy(invokevp + 2, argv, argc * sizeof *argv);
880 ok = js_Invoke(cx, argc, invokevp, flags);
881 if (ok) {
883 * Store *rval in the a scoped local root if a scope is open, else in
884 * the lastInternalResult pigeon-hole GC root, solely so users of
885 * js_InternalInvoke and its direct and indirect (js_ValueToString for
886 * example) callers do not need to manage roots for local, temporary
887 * references to such results.
889 *rval = *invokevp;
890 if (JSVAL_IS_GCTHING(*rval) && *rval != JSVAL_NULL) {
891 JSLocalRootStack *lrs = JS_THREAD_DATA(cx)->localRootStack;
892 if (lrs) {
893 if (js_PushLocalRoot(cx, lrs, *rval) < 0)
894 ok = JS_FALSE;
895 } else {
896 cx->weakRoots.lastInternalResult = *rval;
901 js_FreeStack(cx, mark);
902 return ok;
905 JSBool
906 js_InternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, jsval fval,
907 JSAccessMode mode, uintN argc, jsval *argv, jsval *rval)
909 LeaveTrace(cx);
912 * js_InternalInvoke could result in another try to get or set the same id
913 * again, see bug 355497.
915 JS_CHECK_RECURSION(cx, return JS_FALSE);
917 return js_InternalCall(cx, obj, fval, argc, argv, rval);
920 JSBool
921 js_Execute(JSContext *cx, JSObject *chain, JSScript *script,
922 JSStackFrame *down, uintN flags, jsval *result)
924 if (script->isEmpty()) {
925 if (result)
926 *result = JSVAL_VOID;
927 return JS_TRUE;
930 LeaveTrace(cx);
932 DTrace::ExecutionScope executionScope(script);
934 JSInterpreterHook hook = cx->debugHooks->executeHook;
935 void *hookData = NULL;
936 JSStackFrame frame;
937 CallStack callStack(cx);
938 frame.script = script;
939 if (down) {
940 /* Propagate arg state for eval and the debugger API. */
941 frame.callobj = down->callobj;
942 frame.argsobj = down->argsobj;
943 frame.fun = (script->staticLevel > 0) ? down->fun : NULL;
944 frame.thisv = down->thisv;
945 if (down->flags & JSFRAME_COMPUTED_THIS)
946 flags |= JSFRAME_COMPUTED_THIS;
947 frame.argc = down->argc;
948 frame.argv = down->argv;
949 frame.annotation = down->annotation;
952 * We want to call |down->varobj()|, but this requires knowing the
953 * CallStack of |down|. If |down == cx->fp|, the callstack is simply
954 * the context's active callstack, so we can use |down->varobj(cx)|.
955 * When |down != cx->fp|, we need to do a slow linear search. Luckily,
956 * this only happens with indirect eval and JS_EvaluateInStackFrame.
958 if (down == cx->fp) {
959 callStack.setInitialVarObj(down->varobj(cx));
960 } else {
961 CallStack *cs = cx->containingCallStack(down);
962 callStack.setInitialVarObj(down->varobj(cs));
964 } else {
965 frame.callobj = NULL;
966 frame.argsobj = NULL;
967 JSObject *obj = chain;
968 if (cx->options & JSOPTION_VAROBJFIX) {
969 while (JSObject *tmp = obj->getParent())
970 obj = tmp;
972 frame.fun = NULL;
973 frame.thisv = OBJECT_TO_JSVAL(chain);
974 frame.argc = 0;
975 frame.argv = NULL;
976 frame.annotation = NULL;
977 callStack.setInitialVarObj(obj);
980 frame.imacpc = NULL;
982 struct RawStackGuard {
983 JSContext *cx;
984 void *mark;
985 RawStackGuard(JSContext *cx) : cx(cx), mark(NULL) {}
986 ~RawStackGuard() { if (mark) js_FreeRawStack(cx, mark); }
987 } rawStackGuard(cx);
989 if (script->nslots != 0) {
990 frame.slots = js_AllocRawStack(cx, script->nslots, &rawStackGuard.mark);
991 if (!frame.slots)
992 return false;
993 memset(frame.slots, 0, script->nfixed * sizeof(jsval));
995 #if JS_HAS_SHARP_VARS
996 JS_STATIC_ASSERT(SHARP_NSLOTS == 2);
998 if (script->hasSharps) {
999 JS_ASSERT(script->nfixed >= SHARP_NSLOTS);
1000 jsval *sharps = &frame.slots[script->nfixed - SHARP_NSLOTS];
1002 if (down && down->script && down->script->hasSharps) {
1003 JS_ASSERT(down->script->nfixed >= SHARP_NSLOTS);
1004 int base = (down->fun && !(down->flags & JSFRAME_SPECIAL))
1005 ? down->fun->sharpSlotBase(cx)
1006 : down->script->nfixed - SHARP_NSLOTS;
1007 if (base < 0)
1008 return false;
1009 sharps[0] = down->slots[base];
1010 sharps[1] = down->slots[base + 1];
1011 } else {
1012 sharps[0] = sharps[1] = JSVAL_VOID;
1015 #endif
1016 } else {
1017 frame.slots = NULL;
1020 frame.rval = JSVAL_VOID;
1021 frame.down = down;
1022 frame.scopeChain = chain;
1023 frame.regs = NULL;
1024 frame.flags = flags;
1025 frame.blockChain = NULL;
1028 * We need to push/pop a new callstack if there is no existing callstack
1029 * or the current callstack needs to be suspended (so that its frames are
1030 * marked by GC).
1032 JSStackFrame *oldfp = cx->fp;
1033 bool newCallStack = !oldfp || oldfp != down;
1034 if (newCallStack) {
1035 callStack.setInitialFrame(&frame);
1036 cx->pushCallStack(&callStack);
1038 cx->fp = &frame;
1040 struct FinishGuard {
1041 JSContext *cx;
1042 JSStackFrame *oldfp;
1043 bool newCallStack;
1044 FinishGuard(JSContext *cx, JSStackFrame *oldfp, bool newCallStack)
1045 : cx(cx), oldfp(oldfp), newCallStack(newCallStack) {}
1046 ~FinishGuard() {
1047 if (newCallStack)
1048 cx->popCallStack();
1049 cx->fp = oldfp;
1051 } finishGuard(cx, oldfp, newCallStack);
1053 if (!down) {
1054 OBJ_TO_INNER_OBJECT(cx, chain);
1055 if (!chain)
1056 return false;
1057 frame.scopeChain = chain;
1059 JSObject *thisp = JSVAL_TO_OBJECT(frame.thisv)->thisObject(cx);
1060 if (!thisp)
1061 return false;
1062 frame.thisv = OBJECT_TO_JSVAL(thisp);
1063 frame.flags |= JSFRAME_COMPUTED_THIS;
1066 if (hook) {
1067 hookData = hook(cx, &frame, JS_TRUE, 0,
1068 cx->debugHooks->executeHookData);
1071 JSBool ok = js_Interpret(cx);
1072 if (result)
1073 *result = frame.rval;
1075 if (hookData) {
1076 hook = cx->debugHooks->executeHook;
1077 if (hook)
1078 hook(cx, &frame, JS_FALSE, &ok, hookData);
1081 return ok;
1084 JSBool
1085 js_CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs,
1086 JSObject **objp, JSProperty **propp)
1088 JSObject *obj2;
1089 JSProperty *prop;
1090 uintN oldAttrs, report;
1091 bool isFunction;
1092 jsval value;
1093 const char *type, *name;
1096 * Both objp and propp must be either null or given. When given, *propp
1097 * must be null. This way we avoid an extra "if (propp) *propp = NULL" for
1098 * the common case of a non-existing property.
1100 JS_ASSERT(!objp == !propp);
1101 JS_ASSERT_IF(propp, !*propp);
1103 /* The JSPROP_INITIALIZER case below may generate a warning. Since we must
1104 * drop the property before reporting it, we insists on !propp to avoid
1105 * looking up the property again after the reporting is done.
1107 JS_ASSERT_IF(attrs & JSPROP_INITIALIZER, attrs == JSPROP_INITIALIZER);
1108 JS_ASSERT_IF(attrs == JSPROP_INITIALIZER, !propp);
1110 if (!obj->lookupProperty(cx, id, &obj2, &prop))
1111 return JS_FALSE;
1112 if (!prop)
1113 return JS_TRUE;
1115 /* Use prop as a speedup hint to obj->getAttributes. */
1116 if (!obj2->getAttributes(cx, id, prop, &oldAttrs)) {
1117 obj2->dropProperty(cx, prop);
1118 return JS_FALSE;
1122 * If our caller doesn't want prop, drop it (we don't need it any longer).
1124 if (!propp) {
1125 obj2->dropProperty(cx, prop);
1126 prop = NULL;
1127 } else {
1128 *objp = obj2;
1129 *propp = prop;
1132 if (attrs == JSPROP_INITIALIZER) {
1133 /* Allow the new object to override properties. */
1134 if (obj2 != obj)
1135 return JS_TRUE;
1137 /* The property must be dropped already. */
1138 JS_ASSERT(!prop);
1139 report = JSREPORT_WARNING | JSREPORT_STRICT;
1141 #ifdef __GNUC__
1142 isFunction = false; /* suppress bogus gcc warnings */
1143 #endif
1144 } else {
1145 /* We allow redeclaring some non-readonly properties. */
1146 if (((oldAttrs | attrs) & JSPROP_READONLY) == 0) {
1147 /* Allow redeclaration of variables and functions. */
1148 if (!(attrs & (JSPROP_GETTER | JSPROP_SETTER)))
1149 return JS_TRUE;
1152 * Allow adding a getter only if a property already has a setter
1153 * but no getter and similarly for adding a setter. That is, we
1154 * allow only the following transitions:
1156 * no-property --> getter --> getter + setter
1157 * no-property --> setter --> getter + setter
1159 if ((~(oldAttrs ^ attrs) & (JSPROP_GETTER | JSPROP_SETTER)) == 0)
1160 return JS_TRUE;
1163 * Allow redeclaration of an impermanent property (in which case
1164 * anyone could delete it and redefine it, willy-nilly).
1166 if (!(oldAttrs & JSPROP_PERMANENT))
1167 return JS_TRUE;
1169 if (prop)
1170 obj2->dropProperty(cx, prop);
1172 report = JSREPORT_ERROR;
1173 isFunction = (oldAttrs & (JSPROP_GETTER | JSPROP_SETTER)) != 0;
1174 if (!isFunction) {
1175 if (!obj->getProperty(cx, id, &value))
1176 return JS_FALSE;
1177 isFunction = VALUE_IS_FUNCTION(cx, value);
1181 type = (attrs == JSPROP_INITIALIZER)
1182 ? "property"
1183 : (oldAttrs & attrs & JSPROP_GETTER)
1184 ? js_getter_str
1185 : (oldAttrs & attrs & JSPROP_SETTER)
1186 ? js_setter_str
1187 : (oldAttrs & JSPROP_READONLY)
1188 ? js_const_str
1189 : isFunction
1190 ? js_function_str
1191 : js_var_str;
1192 name = js_ValueToPrintableString(cx, ID_TO_VALUE(id));
1193 if (!name)
1194 return JS_FALSE;
1195 return JS_ReportErrorFlagsAndNumber(cx, report,
1196 js_GetErrorMessage, NULL,
1197 JSMSG_REDECLARED_VAR,
1198 type, name);
1201 JSBool
1202 js_StrictlyEqual(JSContext *cx, jsval lval, jsval rval)
1204 jsval ltag = JSVAL_TAG(lval), rtag = JSVAL_TAG(rval);
1205 jsdouble ld, rd;
1207 if (ltag == rtag) {
1208 if (ltag == JSVAL_STRING) {
1209 JSString *lstr = JSVAL_TO_STRING(lval),
1210 *rstr = JSVAL_TO_STRING(rval);
1211 return js_EqualStrings(lstr, rstr);
1213 if (ltag == JSVAL_DOUBLE) {
1214 ld = *JSVAL_TO_DOUBLE(lval);
1215 rd = *JSVAL_TO_DOUBLE(rval);
1216 return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
1218 if (ltag == JSVAL_OBJECT &&
1219 lval != rval &&
1220 !JSVAL_IS_NULL(lval) &&
1221 !JSVAL_IS_NULL(rval)) {
1222 JSObject *lobj, *robj;
1224 lobj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(lval));
1225 robj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(rval));
1226 lval = OBJECT_TO_JSVAL(lobj);
1227 rval = OBJECT_TO_JSVAL(robj);
1229 return lval == rval;
1231 if (ltag == JSVAL_DOUBLE && JSVAL_IS_INT(rval)) {
1232 ld = *JSVAL_TO_DOUBLE(lval);
1233 rd = JSVAL_TO_INT(rval);
1234 return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
1236 if (JSVAL_IS_INT(lval) && rtag == JSVAL_DOUBLE) {
1237 ld = JSVAL_TO_INT(lval);
1238 rd = *JSVAL_TO_DOUBLE(rval);
1239 return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
1241 return lval == rval;
1244 static inline bool
1245 IsNegativeZero(jsval v)
1247 return JSVAL_IS_DOUBLE(v) && JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v));
1250 static inline bool
1251 IsNaN(jsval v)
1253 return JSVAL_IS_DOUBLE(v) && JSDOUBLE_IS_NaN(*JSVAL_TO_DOUBLE(v));
1256 JSBool
1257 js_SameValue(jsval v1, jsval v2, JSContext *cx)
1259 if (IsNegativeZero(v1))
1260 return IsNegativeZero(v2);
1261 if (IsNegativeZero(v2))
1262 return JS_FALSE;
1263 if (IsNaN(v1) && IsNaN(v2))
1264 return JS_TRUE;
1265 return js_StrictlyEqual(cx, v1, v2);
1268 JS_REQUIRES_STACK JSBool
1269 js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp)
1271 JSFunction *fun, *fun2;
1272 JSObject *obj, *obj2, *proto, *parent;
1273 jsval lval, rval;
1274 JSClass *clasp;
1276 fun = NULL;
1277 obj2 = NULL;
1278 lval = *vp;
1279 if (!JSVAL_IS_OBJECT(lval) ||
1280 (obj2 = JSVAL_TO_OBJECT(lval)) == NULL ||
1281 /* XXX clean up to avoid special cases above ObjectOps layer */
1282 obj2->getClass() == &js_FunctionClass ||
1283 !obj2->map->ops->construct)
1285 fun = js_ValueToFunction(cx, vp, JSV2F_CONSTRUCT);
1286 if (!fun)
1287 return JS_FALSE;
1290 clasp = &js_ObjectClass;
1291 if (!obj2) {
1292 proto = parent = NULL;
1293 fun = NULL;
1294 } else {
1296 * Get the constructor prototype object for this function.
1297 * Use the nominal 'this' parameter slot, vp[1], as a local
1298 * root to protect this prototype, in case it has no other
1299 * strong refs.
1301 if (!obj2->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom),
1302 &vp[1])) {
1303 return JS_FALSE;
1305 rval = vp[1];
1306 proto = JSVAL_IS_OBJECT(rval) ? JSVAL_TO_OBJECT(rval) : NULL;
1307 parent = obj2->getParent();
1309 if (obj2->getClass() == &js_FunctionClass) {
1310 fun2 = GET_FUNCTION_PRIVATE(cx, obj2);
1311 if (!FUN_INTERPRETED(fun2) && fun2->u.n.clasp)
1312 clasp = fun2->u.n.clasp;
1315 obj = NewObject(cx, clasp, proto, parent);
1316 if (!obj)
1317 return JS_FALSE;
1319 /* Now we have an object with a constructor method; call it. */
1320 vp[1] = OBJECT_TO_JSVAL(obj);
1321 if (!js_Invoke(cx, argc, vp, JSINVOKE_CONSTRUCT))
1322 return JS_FALSE;
1324 /* Check the return value and if it's primitive, force it to be obj. */
1325 rval = *vp;
1326 if (clampReturn && JSVAL_IS_PRIMITIVE(rval)) {
1327 if (!fun) {
1328 /* native [[Construct]] returning primitive is error */
1329 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1330 JSMSG_BAD_NEW_RESULT,
1331 js_ValueToPrintableString(cx, rval));
1332 return JS_FALSE;
1334 *vp = OBJECT_TO_JSVAL(obj);
1337 JS_RUNTIME_METER(cx->runtime, constructs);
1338 return JS_TRUE;
1341 JSBool
1342 js_InternNonIntElementId(JSContext *cx, JSObject *obj, jsval idval, jsid *idp)
1344 JS_ASSERT(!JSVAL_IS_INT(idval));
1346 #if JS_HAS_XML_SUPPORT
1347 if (!JSVAL_IS_PRIMITIVE(idval)) {
1348 if (OBJECT_IS_XML(cx, obj)) {
1349 *idp = OBJECT_JSVAL_TO_JSID(idval);
1350 return JS_TRUE;
1352 if (!js_IsFunctionQName(cx, JSVAL_TO_OBJECT(idval), idp))
1353 return JS_FALSE;
1354 if (*idp != 0)
1355 return JS_TRUE;
1357 #endif
1359 return js_ValueToStringId(cx, idval, idp);
1363 * Enter the new with scope using an object at sp[-1] and associate the depth
1364 * of the with block with sp + stackIndex.
1366 JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool
1367 js_EnterWith(JSContext *cx, jsint stackIndex)
1369 JSStackFrame *fp;
1370 jsval *sp;
1371 JSObject *obj, *parent, *withobj;
1373 fp = cx->fp;
1374 sp = fp->regs->sp;
1375 JS_ASSERT(stackIndex < 0);
1376 JS_ASSERT(StackBase(fp) <= sp + stackIndex);
1378 if (!JSVAL_IS_PRIMITIVE(sp[-1])) {
1379 obj = JSVAL_TO_OBJECT(sp[-1]);
1380 } else {
1381 obj = js_ValueToNonNullObject(cx, sp[-1]);
1382 if (!obj)
1383 return JS_FALSE;
1384 sp[-1] = OBJECT_TO_JSVAL(obj);
1387 parent = js_GetScopeChain(cx, fp);
1388 if (!parent)
1389 return JS_FALSE;
1391 OBJ_TO_INNER_OBJECT(cx, obj);
1392 if (!obj)
1393 return JS_FALSE;
1395 withobj = js_NewWithObject(cx, obj, parent,
1396 sp + stackIndex - StackBase(fp));
1397 if (!withobj)
1398 return JS_FALSE;
1400 fp->scopeChain = withobj;
1401 return JS_TRUE;
1404 JS_STATIC_INTERPRET JS_REQUIRES_STACK void
1405 js_LeaveWith(JSContext *cx)
1407 JSObject *withobj;
1409 withobj = cx->fp->scopeChain;
1410 JS_ASSERT(withobj->getClass() == &js_WithClass);
1411 JS_ASSERT(withobj->getPrivate() == cx->fp);
1412 JS_ASSERT(OBJ_BLOCK_DEPTH(cx, withobj) >= 0);
1413 cx->fp->scopeChain = withobj->getParent();
1414 withobj->setPrivate(NULL);
1417 JS_REQUIRES_STACK JSClass *
1418 js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth)
1420 JSClass *clasp;
1422 clasp = obj->getClass();
1423 if ((clasp == &js_WithClass || clasp == &js_BlockClass) &&
1424 obj->getPrivate() == cx->fp &&
1425 OBJ_BLOCK_DEPTH(cx, obj) >= stackDepth) {
1426 return clasp;
1428 return NULL;
1432 * Unwind block and scope chains to match the given depth. The function sets
1433 * fp->sp on return to stackDepth.
1435 JS_REQUIRES_STACK JSBool
1436 js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth,
1437 JSBool normalUnwind)
1439 JSObject *obj;
1440 JSClass *clasp;
1442 JS_ASSERT(stackDepth >= 0);
1443 JS_ASSERT(StackBase(fp) + stackDepth <= fp->regs->sp);
1445 for (obj = fp->blockChain; obj; obj = obj->getParent()) {
1446 JS_ASSERT(obj->getClass() == &js_BlockClass);
1447 if (OBJ_BLOCK_DEPTH(cx, obj) < stackDepth)
1448 break;
1450 fp->blockChain = obj;
1452 for (;;) {
1453 obj = fp->scopeChain;
1454 clasp = js_IsActiveWithOrBlock(cx, obj, stackDepth);
1455 if (!clasp)
1456 break;
1457 if (clasp == &js_BlockClass) {
1458 /* Don't fail until after we've updated all stacks. */
1459 normalUnwind &= js_PutBlockObject(cx, normalUnwind);
1460 } else {
1461 js_LeaveWith(cx);
1465 fp->regs->sp = StackBase(fp) + stackDepth;
1466 return normalUnwind;
1469 JS_STATIC_INTERPRET JSBool
1470 js_DoIncDec(JSContext *cx, const JSCodeSpec *cs, jsval *vp, jsval *vp2)
1472 if (cs->format & JOF_POST) {
1473 double d;
1474 if (!ValueToNumberValue(cx, vp, &d))
1475 return JS_FALSE;
1476 (cs->format & JOF_INC) ? ++d : --d;
1477 return js_NewNumberInRootedValue(cx, d, vp2);
1480 double d;
1481 if (!ValueToNumber(cx, *vp, &d))
1482 return JS_FALSE;
1483 (cs->format & JOF_INC) ? ++d : --d;
1484 if (!js_NewNumberInRootedValue(cx, d, vp2))
1485 return JS_FALSE;
1486 *vp = *vp2;
1487 return JS_TRUE;
1490 jsval&
1491 js_GetUpvar(JSContext *cx, uintN level, uintN cookie)
1493 level -= UPVAR_FRAME_SKIP(cookie);
1494 JS_ASSERT(level < JS_DISPLAY_SIZE);
1496 JSStackFrame *fp = cx->display[level];
1497 JS_ASSERT(fp->script);
1499 uintN slot = UPVAR_FRAME_SLOT(cookie);
1500 jsval *vp;
1502 if (!fp->fun || (fp->flags & JSFRAME_EVAL)) {
1503 vp = fp->slots + fp->script->nfixed;
1504 } else if (slot < fp->fun->nargs) {
1505 vp = fp->argv;
1506 } else if (slot == CALLEE_UPVAR_SLOT) {
1507 vp = &fp->argv[-2];
1508 slot = 0;
1509 } else {
1510 slot -= fp->fun->nargs;
1511 JS_ASSERT(slot < fp->script->nslots);
1512 vp = fp->slots;
1515 return vp[slot];
1518 #ifdef DEBUG
1520 JS_STATIC_INTERPRET JS_REQUIRES_STACK void
1521 js_TraceOpcode(JSContext *cx)
1523 FILE *tracefp;
1524 JSStackFrame *fp;
1525 JSFrameRegs *regs;
1526 intN ndefs, n, nuses;
1527 jsval *siter;
1528 JSString *str;
1529 JSOp op;
1531 tracefp = (FILE *) cx->tracefp;
1532 JS_ASSERT(tracefp);
1533 fp = cx->fp;
1534 regs = fp->regs;
1537 * Operations in prologues don't produce interesting values, and
1538 * js_DecompileValueGenerator isn't set up to handle them anyway.
1540 if (cx->tracePrevPc && regs->pc >= fp->script->main) {
1541 JSOp tracePrevOp = JSOp(*cx->tracePrevPc);
1542 ndefs = js_GetStackDefs(cx, &js_CodeSpec[tracePrevOp], tracePrevOp,
1543 fp->script, cx->tracePrevPc);
1546 * If there aren't that many elements on the stack, then we have
1547 * probably entered a new frame, and printing output would just be
1548 * misleading.
1550 if (ndefs != 0 &&
1551 ndefs < regs->sp - fp->slots) {
1552 for (n = -ndefs; n < 0; n++) {
1553 char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n],
1554 NULL);
1555 if (bytes) {
1556 fprintf(tracefp, "%s %s",
1557 (n == -ndefs) ? " output:" : ",",
1558 bytes);
1559 cx->free(bytes);
1560 } else {
1561 JS_ClearPendingException(cx);
1564 fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp)));
1566 fprintf(tracefp, " stack: ");
1567 for (siter = StackBase(fp); siter < regs->sp; siter++) {
1568 str = js_ValueToString(cx, *siter);
1569 if (!str) {
1570 fputs("<null>", tracefp);
1571 } else {
1572 JS_ClearPendingException(cx);
1573 js_FileEscapedString(tracefp, str, 0);
1575 fputc(' ', tracefp);
1577 fputc('\n', tracefp);
1580 fprintf(tracefp, "%4u: ",
1581 js_PCToLineNumber(cx, fp->script, fp->imacpc ? fp->imacpc : regs->pc));
1582 js_Disassemble1(cx, fp->script, regs->pc,
1583 regs->pc - fp->script->code,
1584 JS_FALSE, tracefp);
1585 op = (JSOp) *regs->pc;
1586 nuses = js_GetStackUses(&js_CodeSpec[op], op, regs->pc);
1587 if (nuses != 0) {
1588 for (n = -nuses; n < 0; n++) {
1589 char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n],
1590 NULL);
1591 if (bytes) {
1592 fprintf(tracefp, "%s %s",
1593 (n == -nuses) ? " inputs:" : ",",
1594 bytes);
1595 cx->free(bytes);
1596 } else {
1597 JS_ClearPendingException(cx);
1600 fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp)));
1602 cx->tracePrevPc = regs->pc;
1604 /* It's nice to have complete traces when debugging a crash. */
1605 fflush(tracefp);
1608 #endif /* DEBUG */
1610 #ifdef JS_OPMETER
1612 # include <stdlib.h>
1614 # define HIST_NSLOTS 8
1617 * The second dimension is hardcoded at 256 because we know that many bits fit
1618 * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address
1619 * any particular row.
1621 static uint32 succeeds[JSOP_LIMIT][256];
1622 static uint32 slot_ops[JSOP_LIMIT][HIST_NSLOTS];
1624 JS_STATIC_INTERPRET void
1625 js_MeterOpcodePair(JSOp op1, JSOp op2)
1627 if (op1 != JSOP_STOP)
1628 ++succeeds[op1][op2];
1631 JS_STATIC_INTERPRET void
1632 js_MeterSlotOpcode(JSOp op, uint32 slot)
1634 if (slot < HIST_NSLOTS)
1635 ++slot_ops[op][slot];
1638 typedef struct Edge {
1639 const char *from;
1640 const char *to;
1641 uint32 count;
1642 } Edge;
1644 static int
1645 compare_edges(const void *a, const void *b)
1647 const Edge *ea = (const Edge *) a;
1648 const Edge *eb = (const Edge *) b;
1650 return (int32)eb->count - (int32)ea->count;
1653 void
1654 js_DumpOpMeters()
1656 const char *name, *from, *style;
1657 FILE *fp;
1658 uint32 total, count;
1659 uint32 i, j, nedges;
1660 Edge *graph;
1662 name = getenv("JS_OPMETER_FILE");
1663 if (!name)
1664 name = "/tmp/ops.dot";
1665 fp = fopen(name, "w");
1666 if (!fp) {
1667 perror(name);
1668 return;
1671 total = nedges = 0;
1672 for (i = 0; i < JSOP_LIMIT; i++) {
1673 for (j = 0; j < JSOP_LIMIT; j++) {
1674 count = succeeds[i][j];
1675 if (count != 0) {
1676 total += count;
1677 ++nedges;
1682 # define SIGNIFICANT(count,total) (200. * (count) >= (total))
1684 graph = (Edge *) js_calloc(nedges * sizeof graph[0]);
1685 for (i = nedges = 0; i < JSOP_LIMIT; i++) {
1686 from = js_CodeName[i];
1687 for (j = 0; j < JSOP_LIMIT; j++) {
1688 count = succeeds[i][j];
1689 if (count != 0 && SIGNIFICANT(count, total)) {
1690 graph[nedges].from = from;
1691 graph[nedges].to = js_CodeName[j];
1692 graph[nedges].count = count;
1693 ++nedges;
1697 qsort(graph, nedges, sizeof(Edge), compare_edges);
1699 # undef SIGNIFICANT
1701 fputs("digraph {\n", fp);
1702 for (i = 0, style = NULL; i < nedges; i++) {
1703 JS_ASSERT(i == 0 || graph[i-1].count >= graph[i].count);
1704 if (!style || graph[i-1].count != graph[i].count) {
1705 style = (i > nedges * .75) ? "dotted" :
1706 (i > nedges * .50) ? "dashed" :
1707 (i > nedges * .25) ? "solid" : "bold";
1709 fprintf(fp, " %s -> %s [label=\"%lu\" style=%s]\n",
1710 graph[i].from, graph[i].to,
1711 (unsigned long)graph[i].count, style);
1713 js_free(graph);
1714 fputs("}\n", fp);
1715 fclose(fp);
1717 name = getenv("JS_OPMETER_HIST");
1718 if (!name)
1719 name = "/tmp/ops.hist";
1720 fp = fopen(name, "w");
1721 if (!fp) {
1722 perror(name);
1723 return;
1725 fputs("bytecode", fp);
1726 for (j = 0; j < HIST_NSLOTS; j++)
1727 fprintf(fp, " slot %1u", (unsigned)j);
1728 putc('\n', fp);
1729 fputs("========", fp);
1730 for (j = 0; j < HIST_NSLOTS; j++)
1731 fputs(" =======", fp);
1732 putc('\n', fp);
1733 for (i = 0; i < JSOP_LIMIT; i++) {
1734 for (j = 0; j < HIST_NSLOTS; j++) {
1735 if (slot_ops[i][j] != 0) {
1736 /* Reuse j in the next loop, since we break after. */
1737 fprintf(fp, "%-8.8s", js_CodeName[i]);
1738 for (j = 0; j < HIST_NSLOTS; j++)
1739 fprintf(fp, " %7lu", (unsigned long)slot_ops[i][j]);
1740 putc('\n', fp);
1741 break;
1745 fclose(fp);
1748 #endif /* JS_OPSMETER */
1750 #endif /* !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ */
1752 #ifndef jsinvoke_cpp___
1754 #ifdef JS_REPRMETER
1755 // jsval representation metering: this measures the kinds of jsvals that
1756 // are used as inputs to each JSOp.
1757 namespace reprmeter {
1758 enum Repr {
1759 NONE,
1760 INT,
1761 DOUBLE,
1762 BOOLEAN_PROPER,
1763 BOOLEAN_OTHER,
1764 STRING,
1765 OBJECT_NULL,
1766 OBJECT_PLAIN,
1767 FUNCTION_INTERPRETED,
1768 FUNCTION_FASTNATIVE,
1769 FUNCTION_SLOWNATIVE,
1770 ARRAY_SLOW,
1771 ARRAY_DENSE
1774 // Return the |repr| value giving the representation of the given jsval.
1775 static Repr
1776 GetRepr(jsval v)
1778 if (JSVAL_IS_INT(v))
1779 return INT;
1780 if (JSVAL_IS_DOUBLE(v))
1781 return DOUBLE;
1782 if (JSVAL_IS_SPECIAL(v)) {
1783 return (v == JSVAL_TRUE || v == JSVAL_FALSE)
1784 ? BOOLEAN_PROPER
1785 : BOOLEAN_OTHER;
1787 if (JSVAL_IS_STRING(v))
1788 return STRING;
1790 JS_ASSERT(JSVAL_IS_OBJECT(v));
1792 JSObject *obj = JSVAL_TO_OBJECT(v);
1793 if (VALUE_IS_FUNCTION(cx, v)) {
1794 JSFunction *fun = GET_FUNCTION_PRIVATE(cx, obj);
1795 if (FUN_INTERPRETED(fun))
1796 return FUNCTION_INTERPRETED;
1797 if (fun->flags & JSFUN_FAST_NATIVE)
1798 return FUNCTION_FASTNATIVE;
1799 return FUNCTION_SLOWNATIVE;
1801 // This must come before the general array test, because that
1802 // one subsumes this one.
1803 if (!obj)
1804 return OBJECT_NULL;
1805 if (obj->isDenseArray())
1806 return ARRAY_DENSE;
1807 if (obj->isArray())
1808 return ARRAY_SLOW;
1809 return OBJECT_PLAIN;
1812 static const char *reprName[] = { "invalid", "int", "double", "bool", "special",
1813 "string", "null", "object",
1814 "fun:interp", "fun:fast", "fun:slow",
1815 "array:slow", "array:dense" };
1817 // Logically, a tuple of (JSOp, repr_1, ..., repr_n) where repr_i is
1818 // the |repr| of the ith input to the JSOp.
1819 struct OpInput {
1820 enum { max_uses = 16 };
1822 JSOp op;
1823 Repr uses[max_uses];
1825 OpInput() : op(JSOp(255)) {
1826 for (int i = 0; i < max_uses; ++i)
1827 uses[i] = NONE;
1830 OpInput(JSOp op) : op(op) {
1831 for (int i = 0; i < max_uses; ++i)
1832 uses[i] = NONE;
1835 // Hash function
1836 operator uint32() const {
1837 uint32 h = op;
1838 for (int i = 0; i < max_uses; ++i)
1839 h = h * 7 + uses[i] * 13;
1840 return h;
1843 bool operator==(const OpInput &opinput) const {
1844 if (op != opinput.op)
1845 return false;
1846 for (int i = 0; i < max_uses; ++i) {
1847 if (uses[i] != opinput.uses[i])
1848 return false;
1850 return true;
1853 OpInput &operator=(const OpInput &opinput) {
1854 op = opinput.op;
1855 for (int i = 0; i < max_uses; ++i)
1856 uses[i] = opinput.uses[i];
1857 return *this;
1861 typedef HashMap<OpInput, uint64, DefaultHasher<OpInput>, SystemAllocPolicy> OpInputHistogram;
1863 OpInputHistogram opinputs;
1864 bool opinputsInitialized = false;
1866 // Record an OpInput for the current op. This should be called just
1867 // before executing the op.
1868 static void
1869 MeterRepr(JSStackFrame *fp)
1871 // Note that we simply ignore the possibility of errors (OOMs)
1872 // using the hash map, since this is only metering code.
1874 if (!opinputsInitialized) {
1875 opinputs.init();
1876 opinputsInitialized = true;
1879 JSOp op = JSOp(*fp->regs->pc);
1880 unsigned nuses = js_GetStackUses(&js_CodeSpec[op], op, fp->regs->pc);
1882 // Build the OpInput.
1883 OpInput opinput(op);
1884 for (unsigned i = 0; i < nuses; ++i) {
1885 jsval v = fp->regs->sp[-nuses+i];
1886 opinput.uses[i] = GetRepr(v);
1889 OpInputHistogram::AddPtr p = opinputs.lookupForAdd(opinput);
1890 if (p)
1891 ++p->value;
1892 else
1893 opinputs.add(p, opinput, 1);
1896 void
1897 js_DumpReprMeter()
1899 FILE *f = fopen("/tmp/reprmeter.txt", "w");
1900 JS_ASSERT(f);
1901 for (OpInputHistogram::Range r = opinputs.all(); !r.empty(); r.popFront()) {
1902 const OpInput &o = r.front().key;
1903 uint64 c = r.front().value;
1904 fprintf(f, "%3d,%s", o.op, js_CodeName[o.op]);
1905 for (int i = 0; i < OpInput::max_uses && o.uses[i] != NONE; ++i)
1906 fprintf(f, ",%s", reprName[o.uses[i]]);
1907 fprintf(f, ",%llu\n", c);
1909 fclose(f);
1912 #endif /* JS_REPRMETER */
1914 #define PUSH(v) (*regs.sp++ = (v))
1915 #define PUSH_OPND(v) PUSH(v)
1916 #define STORE_OPND(n,v) (regs.sp[n] = (v))
1917 #define POP() (*--regs.sp)
1918 #define POP_OPND() POP()
1919 #define FETCH_OPND(n) (regs.sp[n])
1922 * Push the jsdouble d using sp from the lexical environment. Try to convert d
1923 * to a jsint that fits in a jsval, otherwise GC-alloc space for it and push a
1924 * reference.
1926 #define STORE_NUMBER(cx, n, d) \
1927 JS_BEGIN_MACRO \
1928 jsint i_; \
1930 if (JSDOUBLE_IS_INT(d, i_) && INT_FITS_IN_JSVAL(i_)) \
1931 regs.sp[n] = INT_TO_JSVAL(i_); \
1932 else if (!js_NewDoubleInRootedValue(cx, d, &regs.sp[n])) \
1933 goto error; \
1934 JS_END_MACRO
1936 #define STORE_INT(cx, n, i) \
1937 JS_BEGIN_MACRO \
1938 if (INT_FITS_IN_JSVAL(i)) \
1939 regs.sp[n] = INT_TO_JSVAL(i); \
1940 else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (i), &regs.sp[n])) \
1941 goto error; \
1942 JS_END_MACRO
1944 #define STORE_UINT(cx, n, u) \
1945 JS_BEGIN_MACRO \
1946 if ((u) <= JSVAL_INT_MAX) \
1947 regs.sp[n] = INT_TO_JSVAL(u); \
1948 else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (u), &regs.sp[n])) \
1949 goto error; \
1950 JS_END_MACRO
1952 #define FETCH_NUMBER(cx, n, d) \
1953 JS_BEGIN_MACRO \
1954 jsval v_; \
1956 v_ = FETCH_OPND(n); \
1957 VALUE_TO_NUMBER(cx, v_, d); \
1958 JS_END_MACRO
1960 #define FETCH_INT(cx, n, i) \
1961 JS_BEGIN_MACRO \
1962 if (!ValueToECMAInt32(cx, regs.sp[n], &i)) \
1963 goto error; \
1964 JS_END_MACRO
1966 #define FETCH_UINT(cx, n, ui) \
1967 JS_BEGIN_MACRO \
1968 if (!ValueToECMAUint32(cx, regs.sp[n], &ui)) \
1969 goto error; \
1970 JS_END_MACRO
1972 #define VALUE_TO_NUMBER(cx, v, d) \
1973 JS_BEGIN_MACRO \
1974 if (!ValueToNumber(cx, v, &d)) \
1975 goto error; \
1976 JS_END_MACRO
1978 #define POP_BOOLEAN(cx, v, b) \
1979 JS_BEGIN_MACRO \
1980 v = FETCH_OPND(-1); \
1981 if (v == JSVAL_NULL) { \
1982 b = JS_FALSE; \
1983 } else if (JSVAL_IS_BOOLEAN(v)) { \
1984 b = JSVAL_TO_BOOLEAN(v); \
1985 } else { \
1986 b = js_ValueToBoolean(v); \
1988 regs.sp--; \
1989 JS_END_MACRO
1991 #define VALUE_TO_OBJECT(cx, n, v, obj) \
1992 JS_BEGIN_MACRO \
1993 if (!JSVAL_IS_PRIMITIVE(v)) { \
1994 obj = JSVAL_TO_OBJECT(v); \
1995 } else { \
1996 obj = js_ValueToNonNullObject(cx, v); \
1997 if (!obj) \
1998 goto error; \
1999 STORE_OPND(n, OBJECT_TO_JSVAL(obj)); \
2001 JS_END_MACRO
2003 #define FETCH_OBJECT(cx, n, v, obj) \
2004 JS_BEGIN_MACRO \
2005 v = FETCH_OPND(n); \
2006 VALUE_TO_OBJECT(cx, n, v, obj); \
2007 JS_END_MACRO
2009 #define DEFAULT_VALUE(cx, n, hint, v) \
2010 JS_BEGIN_MACRO \
2011 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v)); \
2012 JS_ASSERT(v == regs.sp[n]); \
2013 if (!JSVAL_TO_OBJECT(v)->defaultValue(cx, hint, &regs.sp[n])) \
2014 goto error; \
2015 v = regs.sp[n]; \
2016 JS_END_MACRO
2019 * Quickly test if v is an int from the [-2**29, 2**29) range, that is, when
2020 * the lowest bit of v is 1 and the bits 30 and 31 are both either 0 or 1. For
2021 * such v we can do increment or decrement via adding or subtracting two
2022 * without checking that the result overflows JSVAL_INT_MIN or JSVAL_INT_MAX.
2024 #define CAN_DO_FAST_INC_DEC(v) (((((v) << 1) ^ v) & 0x80000001) == 1)
2026 JS_STATIC_ASSERT(JSVAL_INT == 1);
2027 JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MIN)));
2028 JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MAX)));
2031 * Conditional assert to detect failure to clear a pending exception that is
2032 * suppressed (or unintentional suppression of a wanted exception).
2034 #if defined DEBUG_brendan || defined DEBUG_mrbkap || defined DEBUG_shaver
2035 # define DEBUG_NOT_THROWING 1
2036 #endif
2038 #ifdef DEBUG_NOT_THROWING
2039 # define ASSERT_NOT_THROWING(cx) JS_ASSERT(!(cx)->throwing)
2040 #else
2041 # define ASSERT_NOT_THROWING(cx) /* nothing */
2042 #endif
2045 * Define JS_OPMETER to instrument bytecode succession, generating a .dot file
2046 * on shutdown that shows the graph of significant predecessor/successor pairs
2047 * executed, where the edge labels give the succession counts. The .dot file
2048 * is named by the JS_OPMETER_FILE envariable, and defaults to /tmp/ops.dot.
2050 * Bonus feature: JS_OPMETER also enables counters for stack-addressing ops
2051 * such as JSOP_GETLOCAL, JSOP_INCARG, via METER_SLOT_OP. The resulting counts
2052 * are written to JS_OPMETER_HIST, defaulting to /tmp/ops.hist.
2054 #ifndef JS_OPMETER
2055 # define METER_OP_INIT(op) /* nothing */
2056 # define METER_OP_PAIR(op1,op2) /* nothing */
2057 # define METER_SLOT_OP(op,slot) /* nothing */
2058 #else
2061 * The second dimension is hardcoded at 256 because we know that many bits fit
2062 * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address
2063 * any particular row.
2065 # define METER_OP_INIT(op) ((op) = JSOP_STOP)
2066 # define METER_OP_PAIR(op1,op2) (js_MeterOpcodePair(op1, op2))
2067 # define METER_SLOT_OP(op,slot) (js_MeterSlotOpcode(op, slot))
2069 #endif
2071 #ifdef JS_REPRMETER
2072 # define METER_REPR(fp) (reprmeter::MeterRepr(fp))
2073 #else
2074 # define METER_REPR(fp) ((void) 0)
2075 #endif /* JS_REPRMETER */
2078 * Threaded interpretation via computed goto appears to be well-supported by
2079 * GCC 3 and higher. IBM's C compiler when run with the right options (e.g.,
2080 * -qlanglvl=extended) also supports threading. Ditto the SunPro C compiler.
2081 * Currently it's broken for JS_VERSION < 160, though this isn't worth fixing.
2082 * Add your compiler support macros here.
2084 #ifndef JS_THREADED_INTERP
2085 # if JS_VERSION >= 160 && ( \
2086 __GNUC__ >= 3 || \
2087 (__IBMC__ >= 700 && defined __IBM_COMPUTED_GOTO) || \
2088 __SUNPRO_C >= 0x570)
2089 # define JS_THREADED_INTERP 1
2090 # else
2091 # define JS_THREADED_INTERP 0
2092 # endif
2093 #endif
2096 * Deadlocks or else bad races are likely if JS_THREADSAFE, so we must rely on
2097 * single-thread DEBUG js shell testing to verify property cache hits.
2099 #if defined DEBUG && !defined JS_THREADSAFE
2101 # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \
2102 JS_BEGIN_MACRO \
2103 if (!AssertValidPropertyCacheHit(cx, script, regs, pcoff, obj, pobj, \
2104 entry)) { \
2105 goto error; \
2107 JS_END_MACRO
2109 static bool
2110 AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, JSFrameRegs& regs,
2111 ptrdiff_t pcoff, JSObject *start, JSObject *found,
2112 PropertyCacheEntry *entry)
2114 uint32 sample = cx->runtime->gcNumber;
2116 JSAtom *atom;
2117 if (pcoff >= 0)
2118 GET_ATOM_FROM_BYTECODE(script, regs.pc, pcoff, atom);
2119 else
2120 atom = cx->runtime->atomState.lengthAtom;
2122 JSObject *obj, *pobj;
2123 JSProperty *prop;
2124 JSBool ok;
2126 if (JOF_OPMODE(*regs.pc) == JOF_NAME) {
2127 ok = js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &pobj, &prop);
2128 } else {
2129 obj = start;
2130 ok = js_LookupProperty(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop);
2132 if (!ok)
2133 return false;
2134 if (cx->runtime->gcNumber != sample || entry->vshape() != pobj->shape()) {
2135 pobj->dropProperty(cx, prop);
2136 return true;
2138 JS_ASSERT(prop);
2139 JS_ASSERT(pobj == found);
2141 JSScopeProperty *sprop = (JSScopeProperty *) prop;
2142 if (entry->vword.isSlot()) {
2143 JS_ASSERT(entry->vword.toSlot() == sprop->slot);
2144 JS_ASSERT(!sprop->isMethod());
2145 } else if (entry->vword.isSprop()) {
2146 JS_ASSERT(entry->vword.toSprop() == sprop);
2147 JS_ASSERT_IF(sprop->isMethod(),
2148 sprop->methodValue() == pobj->lockedGetSlot(sprop->slot));
2149 } else {
2150 jsval v;
2151 JS_ASSERT(entry->vword.isObject());
2152 JS_ASSERT(!entry->vword.isNull());
2153 JS_ASSERT(pobj->scope()->brandedOrHasMethodBarrier());
2154 JS_ASSERT(sprop->hasDefaultGetterOrIsMethod());
2155 JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, pobj->scope()));
2156 v = pobj->lockedGetSlot(sprop->slot);
2157 JS_ASSERT(VALUE_IS_FUNCTION(cx, v));
2158 JS_ASSERT(entry->vword.toObject() == JSVAL_TO_OBJECT(v));
2160 if (sprop->isMethod()) {
2161 JS_ASSERT(js_CodeSpec[*regs.pc].format & JOF_CALLOP);
2162 JS_ASSERT(sprop->methodValue() == v);
2166 pobj->dropProperty(cx, prop);
2167 return true;
2170 #else
2171 # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0)
2172 #endif
2175 * Ensure that the intrepreter switch can close call-bytecode cases in the
2176 * same way as non-call bytecodes.
2178 JS_STATIC_ASSERT(JSOP_NAME_LENGTH == JSOP_CALLNAME_LENGTH);
2179 JS_STATIC_ASSERT(JSOP_GETGVAR_LENGTH == JSOP_CALLGVAR_LENGTH);
2180 JS_STATIC_ASSERT(JSOP_GETUPVAR_LENGTH == JSOP_CALLUPVAR_LENGTH);
2181 JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_CALLUPVAR_DBG_LENGTH);
2182 JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_GETUPVAR_LENGTH);
2183 JS_STATIC_ASSERT(JSOP_GETDSLOT_LENGTH == JSOP_CALLDSLOT_LENGTH);
2184 JS_STATIC_ASSERT(JSOP_GETARG_LENGTH == JSOP_CALLARG_LENGTH);
2185 JS_STATIC_ASSERT(JSOP_GETLOCAL_LENGTH == JSOP_CALLLOCAL_LENGTH);
2186 JS_STATIC_ASSERT(JSOP_XMLNAME_LENGTH == JSOP_CALLXMLNAME_LENGTH);
2189 * Same for debuggable flat closures defined at top level in another function
2190 * or program fragment.
2192 JS_STATIC_ASSERT(JSOP_DEFFUN_FC_LENGTH == JSOP_DEFFUN_DBGFC_LENGTH);
2195 * Same for JSOP_SETNAME and JSOP_SETPROP, which differ only slightly but
2196 * remain distinct for the decompiler. Likewise for JSOP_INIT{PROP,METHOD}.
2198 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
2199 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETMETHOD_LENGTH);
2200 JS_STATIC_ASSERT(JSOP_INITPROP_LENGTH == JSOP_INITMETHOD_LENGTH);
2202 /* See TRY_BRANCH_AFTER_COND. */
2203 JS_STATIC_ASSERT(JSOP_IFNE_LENGTH == JSOP_IFEQ_LENGTH);
2204 JS_STATIC_ASSERT(JSOP_IFNE == JSOP_IFEQ + 1);
2206 /* For the fastest case inder JSOP_INCNAME, etc. */
2207 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_DECNAME_LENGTH);
2208 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEINC_LENGTH);
2209 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEDEC_LENGTH);
2211 #ifdef JS_TRACER
2212 # define ABORT_RECORDING(cx, reason) \
2213 JS_BEGIN_MACRO \
2214 if (TRACE_RECORDER(cx)) \
2215 AbortRecording(cx, reason); \
2216 JS_END_MACRO
2217 #else
2218 # define ABORT_RECORDING(cx, reason) ((void) 0)
2219 #endif
2222 * Inline fast paths for iteration. js_IteratorMore and js_IteratorNext handle
2223 * all cases, but we inline the most frequently taken paths here.
2225 static inline bool
2226 IteratorMore(JSContext *cx, JSObject *iterobj, JSBool *cond, jsval *rval)
2228 if (iterobj->getClass() == &js_IteratorClass.base) {
2229 NativeIterator *ni = (NativeIterator *) iterobj->getPrivate();
2230 *cond = (ni->props_cursor < ni->props_end);
2231 } else {
2232 if (!js_IteratorMore(cx, iterobj, rval))
2233 return false;
2234 *cond = (*rval == JSVAL_TRUE);
2236 return true;
2239 static inline bool
2240 IteratorNext(JSContext *cx, JSObject *iterobj, jsval *rval)
2242 if (iterobj->getClass() == &js_IteratorClass.base) {
2243 NativeIterator *ni = (NativeIterator *) iterobj->getPrivate();
2244 JS_ASSERT(ni->props_cursor < ni->props_end);
2245 *rval = *ni->props_cursor;
2246 if (JSVAL_IS_STRING(*rval) || (ni->flags & JSITER_FOREACH)) {
2247 ni->props_cursor++;
2248 return true;
2250 /* Take the slow path if we have to stringify a numeric property name. */
2252 return js_IteratorNext(cx, iterobj, rval);
2255 JS_REQUIRES_STACK JSBool
2256 js_Interpret(JSContext *cx)
2258 #ifdef MOZ_TRACEVIS
2259 TraceVisStateObj tvso(cx, S_INTERP);
2260 #endif
2262 JSRuntime *rt;
2263 JSStackFrame *fp;
2264 JSScript *script;
2265 uintN inlineCallCount;
2266 JSAtom **atoms;
2267 JSVersion currentVersion, originalVersion;
2268 JSFrameRegs regs;
2269 JSObject *obj, *obj2, *parent;
2270 JSBool ok, cond;
2271 jsint len;
2272 jsbytecode *endpc, *pc2;
2273 JSOp op, op2;
2274 jsatomid index;
2275 JSAtom *atom;
2276 uintN argc, attrs, flags;
2277 uint32 slot;
2278 jsval *vp, lval, rval, ltmp, rtmp;
2279 jsid id;
2280 JSProperty *prop;
2281 JSScopeProperty *sprop;
2282 JSString *str, *str2;
2283 int32_t i, j;
2284 jsdouble d, d2;
2285 JSClass *clasp;
2286 JSFunction *fun;
2287 JSType type;
2288 jsint low, high, off, npairs;
2289 JSBool match;
2290 JSPropertyOp getter, setter;
2291 JSAutoResolveFlags rf(cx, JSRESOLVE_INFER);
2293 # ifdef DEBUG
2295 * We call this macro from BEGIN_CASE in threaded interpreters,
2296 * and before entering the switch in non-threaded interpreters.
2297 * However, reaching such points doesn't mean we've actually
2298 * fetched an OP from the instruction stream: some opcodes use
2299 * 'op=x; DO_OP()' to let another opcode's implementation finish
2300 * their work, and many opcodes share entry points with a run of
2301 * consecutive BEGIN_CASEs.
2303 * Take care to trace OP only when it is the opcode fetched from
2304 * the instruction stream, so the trace matches what one would
2305 * expect from looking at the code. (We do omit POPs after SETs;
2306 * unfortunate, but not worth fixing.)
2308 # define TRACE_OPCODE(OP) JS_BEGIN_MACRO \
2309 if (JS_UNLIKELY(cx->tracefp != NULL) && \
2310 (OP) == *regs.pc) \
2311 js_TraceOpcode(cx); \
2312 JS_END_MACRO
2313 # else
2314 # define TRACE_OPCODE(OP) ((void) 0)
2315 # endif
2317 #if JS_THREADED_INTERP
2318 static void *const normalJumpTable[] = {
2319 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
2320 JS_EXTENSION &&L_##op,
2321 # include "jsopcode.tbl"
2322 # undef OPDEF
2325 static void *const interruptJumpTable[] = {
2326 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
2327 JS_EXTENSION &&interrupt,
2328 # include "jsopcode.tbl"
2329 # undef OPDEF
2332 register void * const *jumpTable = normalJumpTable;
2334 METER_OP_INIT(op); /* to nullify first METER_OP_PAIR */
2336 # define ENABLE_INTERRUPTS() ((void) (jumpTable = interruptJumpTable))
2338 # ifdef JS_TRACER
2339 # define CHECK_RECORDER() \
2340 JS_ASSERT_IF(TRACE_RECORDER(cx), jumpTable == interruptJumpTable)
2341 # else
2342 # define CHECK_RECORDER() ((void)0)
2343 # endif
2345 # define DO_OP() JS_BEGIN_MACRO \
2346 CHECK_RECORDER(); \
2347 JS_EXTENSION_(goto *jumpTable[op]); \
2348 JS_END_MACRO
2349 # define DO_NEXT_OP(n) JS_BEGIN_MACRO \
2350 METER_OP_PAIR(op, JSOp(regs.pc[n])); \
2351 op = (JSOp) *(regs.pc += (n)); \
2352 METER_REPR(fp); \
2353 DO_OP(); \
2354 JS_END_MACRO
2356 # define BEGIN_CASE(OP) L_##OP: TRACE_OPCODE(OP); CHECK_RECORDER();
2357 # define END_CASE(OP) DO_NEXT_OP(OP##_LENGTH);
2358 # define END_VARLEN_CASE DO_NEXT_OP(len);
2359 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP) \
2360 JS_ASSERT(js_CodeSpec[OP].length == 1); \
2361 op = (JSOp) *++regs.pc; \
2362 DO_OP();
2364 # define END_EMPTY_CASES
2366 #else /* !JS_THREADED_INTERP */
2368 register intN switchMask = 0;
2369 intN switchOp;
2371 # define ENABLE_INTERRUPTS() ((void) (switchMask = -1))
2373 # ifdef JS_TRACER
2374 # define CHECK_RECORDER() \
2375 JS_ASSERT_IF(TRACE_RECORDER(cx), switchMask == -1)
2376 # else
2377 # define CHECK_RECORDER() ((void)0)
2378 # endif
2380 # define DO_OP() goto do_op
2381 # define DO_NEXT_OP(n) JS_BEGIN_MACRO \
2382 JS_ASSERT((n) == len); \
2383 goto advance_pc; \
2384 JS_END_MACRO
2386 # define BEGIN_CASE(OP) case OP: CHECK_RECORDER();
2387 # define END_CASE(OP) END_CASE_LEN(OP##_LENGTH)
2388 # define END_CASE_LEN(n) END_CASE_LENX(n)
2389 # define END_CASE_LENX(n) END_CASE_LEN##n
2392 * To share the code for all len == 1 cases we use the specialized label with
2393 * code that falls through to advance_pc: .
2395 # define END_CASE_LEN1 goto advance_pc_by_one;
2396 # define END_CASE_LEN2 len = 2; goto advance_pc;
2397 # define END_CASE_LEN3 len = 3; goto advance_pc;
2398 # define END_CASE_LEN4 len = 4; goto advance_pc;
2399 # define END_CASE_LEN5 len = 5; goto advance_pc;
2400 # define END_VARLEN_CASE goto advance_pc;
2401 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP)
2402 # define END_EMPTY_CASES goto advance_pc_by_one;
2404 #endif /* !JS_THREADED_INTERP */
2406 /* Check for too deep of a native thread stack. */
2407 JS_CHECK_RECURSION(cx, return JS_FALSE);
2409 rt = cx->runtime;
2411 /* Set registerized frame pointer and derived script pointer. */
2412 fp = cx->fp;
2413 script = fp->script;
2414 JS_ASSERT(!script->isEmpty());
2415 JS_ASSERT(script->length > 1);
2417 /* Count of JS function calls that nest in this C js_Interpret frame. */
2418 inlineCallCount = 0;
2421 * Initialize the index segment register used by LOAD_ATOM and
2422 * GET_FULL_INDEX macros below. As a register we use a pointer based on
2423 * the atom map to turn frequently executed LOAD_ATOM into simple array
2424 * access. For less frequent object and regexp loads we have to recover
2425 * the segment from atoms pointer first.
2427 atoms = script->atomMap.vector;
2429 #define LOAD_ATOM(PCOFF) \
2430 JS_BEGIN_MACRO \
2431 JS_ASSERT(fp->imacpc \
2432 ? atoms == COMMON_ATOMS_START(&rt->atomState) && \
2433 GET_INDEX(regs.pc + PCOFF) < js_common_atom_count \
2434 : (size_t)(atoms - script->atomMap.vector) < \
2435 (size_t)(script->atomMap.length - \
2436 GET_INDEX(regs.pc + PCOFF))); \
2437 atom = atoms[GET_INDEX(regs.pc + PCOFF)]; \
2438 JS_END_MACRO
2440 #define GET_FULL_INDEX(PCOFF) \
2441 (atoms - script->atomMap.vector + GET_INDEX(regs.pc + PCOFF))
2443 #define LOAD_OBJECT(PCOFF) \
2444 (obj = script->getObject(GET_FULL_INDEX(PCOFF)))
2446 #define LOAD_FUNCTION(PCOFF) \
2447 (fun = script->getFunction(GET_FULL_INDEX(PCOFF)))
2449 #ifdef JS_TRACER
2451 #ifdef MOZ_TRACEVIS
2452 #if JS_THREADED_INTERP
2453 #define MONITOR_BRANCH_TRACEVIS \
2454 JS_BEGIN_MACRO \
2455 if (jumpTable != interruptJumpTable) \
2456 EnterTraceVisState(cx, S_RECORD, R_NONE); \
2457 JS_END_MACRO
2458 #else /* !JS_THREADED_INTERP */
2459 #define MONITOR_BRANCH_TRACEVIS \
2460 JS_BEGIN_MACRO \
2461 EnterTraceVisState(cx, S_RECORD, R_NONE); \
2462 JS_END_MACRO
2463 #endif
2464 #else
2465 #define MONITOR_BRANCH_TRACEVIS
2466 #endif
2468 #define RESTORE_INTERP_VARS() \
2469 JS_BEGIN_MACRO \
2470 fp = cx->fp; \
2471 script = fp->script; \
2472 atoms = FrameAtomBase(cx, fp); \
2473 currentVersion = (JSVersion) script->version; \
2474 JS_ASSERT(fp->regs == &regs); \
2475 JS_END_MACRO
2477 #define MONITOR_BRANCH(reason) \
2478 JS_BEGIN_MACRO \
2479 if (TRACING_ENABLED(cx)) { \
2480 MonitorResult r = MonitorLoopEdge(cx, inlineCallCount, reason); \
2481 if (r == MONITOR_RECORDING) { \
2482 JS_ASSERT(TRACE_RECORDER(cx)); \
2483 MONITOR_BRANCH_TRACEVIS; \
2484 ENABLE_INTERRUPTS(); \
2486 RESTORE_INTERP_VARS(); \
2487 JS_ASSERT_IF(cx->throwing, r == MONITOR_ERROR); \
2488 if (r == MONITOR_ERROR) \
2489 goto error; \
2491 JS_END_MACRO
2493 #else /* !JS_TRACER */
2495 #define MONITOR_BRANCH(reason) ((void) 0)
2497 #endif /* !JS_TRACER */
2500 * Prepare to call a user-supplied branch handler, and abort the script
2501 * if it returns false.
2503 #define CHECK_BRANCH() \
2504 JS_BEGIN_MACRO \
2505 if (!JS_CHECK_OPERATION_LIMIT(cx)) \
2506 goto error; \
2507 JS_END_MACRO
2509 #ifndef TRACE_RECORDER
2510 #define TRACE_RECORDER(cx) (false)
2511 #endif
2513 #define BRANCH(n) \
2514 JS_BEGIN_MACRO \
2515 regs.pc += (n); \
2516 op = (JSOp) *regs.pc; \
2517 if ((n) <= 0) { \
2518 CHECK_BRANCH(); \
2519 if (op == JSOP_NOP) { \
2520 if (TRACE_RECORDER(cx)) { \
2521 MONITOR_BRANCH(Record_Branch); \
2522 op = (JSOp) *regs.pc; \
2523 } else { \
2524 op = (JSOp) *++regs.pc; \
2526 } else if (op == JSOP_TRACE) { \
2527 MONITOR_BRANCH(Record_Branch); \
2528 op = (JSOp) *regs.pc; \
2531 DO_OP(); \
2532 JS_END_MACRO
2534 MUST_FLOW_THROUGH("exit");
2535 ++cx->interpLevel;
2538 * Optimized Get and SetVersion for proper script language versioning.
2540 * If any native method or JSClass/JSObjectOps hook calls js_SetVersion
2541 * and changes cx->version, the effect will "stick" and we will stop
2542 * maintaining currentVersion. This is relied upon by testsuites, for
2543 * the most part -- web browsers select version before compiling and not
2544 * at run-time.
2546 currentVersion = (JSVersion) script->version;
2547 originalVersion = (JSVersion) cx->version;
2548 if (currentVersion != originalVersion)
2549 js_SetVersion(cx, currentVersion);
2551 /* Update the static-link display. */
2552 if (script->staticLevel < JS_DISPLAY_SIZE) {
2553 JSStackFrame **disp = &cx->display[script->staticLevel];
2554 fp->displaySave = *disp;
2555 *disp = fp;
2558 # define CHECK_INTERRUPT_HANDLER() \
2559 JS_BEGIN_MACRO \
2560 if (cx->debugHooks->interruptHook) \
2561 ENABLE_INTERRUPTS(); \
2562 JS_END_MACRO
2565 * Load the debugger's interrupt hook here and after calling out to native
2566 * functions (but not to getters, setters, or other native hooks), so we do
2567 * not have to reload it each time through the interpreter loop -- we hope
2568 * the compiler can keep it in a register when it is non-null.
2570 CHECK_INTERRUPT_HANDLER();
2572 #if !JS_HAS_GENERATORS
2573 JS_ASSERT(!fp->regs);
2574 #else
2575 /* Initialize the pc and sp registers unless we're resuming a generator. */
2576 if (JS_LIKELY(!fp->regs)) {
2577 #endif
2578 ASSERT_NOT_THROWING(cx);
2579 regs.pc = script->code;
2580 regs.sp = StackBase(fp);
2581 fp->regs = &regs;
2582 #if JS_HAS_GENERATORS
2583 } else {
2584 JSGenerator *gen;
2586 JS_ASSERT(fp->flags & JSFRAME_GENERATOR);
2587 gen = FRAME_TO_GENERATOR(fp);
2588 JS_ASSERT(fp->regs == &gen->savedRegs);
2589 regs = gen->savedRegs;
2590 fp->regs = &regs;
2591 JS_ASSERT((size_t) (regs.pc - script->code) <= script->length);
2592 JS_ASSERT((size_t) (regs.sp - StackBase(fp)) <= StackDepth(script));
2595 * To support generator_throw and to catch ignored exceptions,
2596 * fail if cx->throwing is set.
2598 if (cx->throwing) {
2599 #ifdef DEBUG_NOT_THROWING
2600 if (cx->exception != JSVAL_ARETURN) {
2601 printf("JS INTERPRETER CALLED WITH PENDING EXCEPTION %lx\n",
2602 (unsigned long) cx->exception);
2604 #endif
2605 goto error;
2608 #endif /* JS_HAS_GENERATORS */
2610 #ifdef JS_TRACER
2612 * We cannot reenter the interpreter while recording; wait to abort until
2613 * after cx->fp->regs is set.
2615 if (TRACE_RECORDER(cx))
2616 AbortRecording(cx, "attempt to reenter interpreter while recording");
2617 #endif
2620 * It is important that "op" be initialized before calling DO_OP because
2621 * it is possible for "op" to be specially assigned during the normal
2622 * processing of an opcode while looping. We rely on DO_NEXT_OP to manage
2623 * "op" correctly in all other cases.
2625 len = 0;
2626 DO_NEXT_OP(len);
2628 #if JS_THREADED_INTERP
2630 * This is a loop, but it does not look like a loop. The loop-closing
2631 * jump is distributed throughout goto *jumpTable[op] inside of DO_OP.
2632 * When interrupts are enabled, jumpTable is set to interruptJumpTable
2633 * where all jumps point to the interrupt label. The latter, after
2634 * calling the interrupt handler, dispatches through normalJumpTable to
2635 * continue the normal bytecode processing.
2638 #else /* !JS_THREADED_INTERP */
2639 for (;;) {
2640 advance_pc_by_one:
2641 JS_ASSERT(js_CodeSpec[op].length == 1);
2642 len = 1;
2643 advance_pc:
2644 regs.pc += len;
2645 op = (JSOp) *regs.pc;
2647 do_op:
2648 CHECK_RECORDER();
2649 TRACE_OPCODE(op);
2650 switchOp = intN(op) | switchMask;
2651 do_switch:
2652 switch (switchOp) {
2653 #endif
2655 /********************** Here we include the operations ***********************/
2656 #include "jsops.cpp"
2657 /*****************************************************************************/
2659 #if !JS_THREADED_INTERP
2660 default:
2661 #endif
2662 #ifndef JS_TRACER
2663 bad_opcode:
2664 #endif
2666 char numBuf[12];
2667 JS_snprintf(numBuf, sizeof numBuf, "%d", op);
2668 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
2669 JSMSG_BAD_BYTECODE, numBuf);
2670 goto error;
2673 #if !JS_THREADED_INTERP
2674 } /* switch (op) */
2675 } /* for (;;) */
2676 #endif /* !JS_THREADED_INTERP */
2678 error:
2679 #ifdef JS_TRACER
2680 if (fp->imacpc && cx->throwing) {
2681 // Handle other exceptions as if they came from the imacro-calling pc.
2682 regs.pc = fp->imacpc;
2683 fp->imacpc = NULL;
2684 atoms = script->atomMap.vector;
2686 #endif
2688 JS_ASSERT((size_t)((fp->imacpc ? fp->imacpc : regs.pc) - script->code) < script->length);
2690 #ifdef JS_TRACER
2692 * This abort could be weakened to permit tracing through exceptions that
2693 * are thrown and caught within a loop, with the co-operation of the tracer.
2694 * For now just bail on any sign of trouble.
2696 if (TRACE_RECORDER(cx))
2697 AbortRecording(cx, "error or exception while recording");
2698 #endif
2700 if (!cx->throwing) {
2701 /* This is an error, not a catchable exception, quit the frame ASAP. */
2702 ok = JS_FALSE;
2703 } else {
2704 JSThrowHook handler;
2705 JSTryNote *tn, *tnlimit;
2706 uint32 offset;
2708 /* Call debugger throw hook if set. */
2709 handler = cx->debugHooks->throwHook;
2710 if (handler) {
2711 switch (handler(cx, script, regs.pc, &rval,
2712 cx->debugHooks->throwHookData)) {
2713 case JSTRAP_ERROR:
2714 cx->throwing = JS_FALSE;
2715 goto error;
2716 case JSTRAP_RETURN:
2717 cx->throwing = JS_FALSE;
2718 fp->rval = rval;
2719 ok = JS_TRUE;
2720 goto forced_return;
2721 case JSTRAP_THROW:
2722 cx->exception = rval;
2723 case JSTRAP_CONTINUE:
2724 default:;
2726 CHECK_INTERRUPT_HANDLER();
2730 * Look for a try block in script that can catch this exception.
2732 if (script->trynotesOffset == 0)
2733 goto no_catch;
2735 offset = (uint32)(regs.pc - script->main);
2736 tn = script->trynotes()->vector;
2737 tnlimit = tn + script->trynotes()->length;
2738 do {
2739 if (offset - tn->start >= tn->length)
2740 continue;
2743 * We have a note that covers the exception pc but we must check
2744 * whether the interpreter has already executed the corresponding
2745 * handler. This is possible when the executed bytecode
2746 * implements break or return from inside a for-in loop.
2748 * In this case the emitter generates additional [enditer] and
2749 * [gosub] opcodes to close all outstanding iterators and execute
2750 * the finally blocks. If such an [enditer] throws an exception,
2751 * its pc can still be inside several nested for-in loops and
2752 * try-finally statements even if we have already closed the
2753 * corresponding iterators and invoked the finally blocks.
2755 * To address this, we make [enditer] always decrease the stack
2756 * even when its implementation throws an exception. Thus already
2757 * executed [enditer] and [gosub] opcodes will have try notes
2758 * with the stack depth exceeding the current one and this
2759 * condition is what we use to filter them out.
2761 if (tn->stackDepth > regs.sp - StackBase(fp))
2762 continue;
2765 * Set pc to the first bytecode after the the try note to point
2766 * to the beginning of catch or finally or to [enditer] closing
2767 * the for-in loop.
2769 regs.pc = (script)->main + tn->start + tn->length;
2771 ok = js_UnwindScope(cx, fp, tn->stackDepth, JS_TRUE);
2772 JS_ASSERT(fp->regs->sp == StackBase(fp) + tn->stackDepth);
2773 if (!ok) {
2775 * Restart the handler search with updated pc and stack depth
2776 * to properly notify the debugger.
2778 goto error;
2781 switch (tn->kind) {
2782 case JSTRY_CATCH:
2783 JS_ASSERT(js_GetOpcode(cx, fp->script, regs.pc) == JSOP_ENTERBLOCK);
2785 #if JS_HAS_GENERATORS
2786 /* Catch cannot intercept the closing of a generator. */
2787 if (JS_UNLIKELY(cx->exception == JSVAL_ARETURN))
2788 break;
2789 #endif
2792 * Don't clear cx->throwing to save cx->exception from GC
2793 * until it is pushed to the stack via [exception] in the
2794 * catch block.
2796 len = 0;
2797 DO_NEXT_OP(len);
2799 case JSTRY_FINALLY:
2801 * Push (true, exception) pair for finally to indicate that
2802 * [retsub] should rethrow the exception.
2804 PUSH(JSVAL_TRUE);
2805 PUSH(cx->exception);
2806 cx->throwing = JS_FALSE;
2807 len = 0;
2808 DO_NEXT_OP(len);
2810 case JSTRY_ITER: {
2811 /* This is similar to JSOP_ENDITER in the interpreter loop. */
2812 JS_ASSERT(js_GetOpcode(cx, fp->script, regs.pc) == JSOP_ENDITER);
2813 AutoValueRooter tvr(cx, cx->exception);
2814 cx->throwing = false;
2815 ok = js_CloseIterator(cx, regs.sp[-1]);
2816 regs.sp -= 1;
2817 if (!ok)
2818 goto error;
2819 cx->throwing = true;
2820 cx->exception = tvr.value();
2823 } while (++tn != tnlimit);
2825 no_catch:
2827 * Propagate the exception or error to the caller unless the exception
2828 * is an asynchronous return from a generator.
2830 ok = JS_FALSE;
2831 #if JS_HAS_GENERATORS
2832 if (JS_UNLIKELY(cx->throwing && cx->exception == JSVAL_ARETURN)) {
2833 cx->throwing = JS_FALSE;
2834 ok = JS_TRUE;
2835 fp->rval = JSVAL_VOID;
2837 #endif
2840 forced_return:
2842 * Unwind the scope making sure that ok stays false even when UnwindScope
2843 * returns true.
2845 * When a trap handler returns JSTRAP_RETURN, we jump here with ok set to
2846 * true bypassing any finally blocks.
2848 ok &= js_UnwindScope(cx, fp, 0, ok || cx->throwing);
2849 JS_ASSERT(regs.sp == StackBase(fp));
2851 #ifdef DEBUG
2852 cx->tracePrevPc = NULL;
2853 #endif
2855 if (inlineCallCount)
2856 goto inline_return;
2858 exit:
2860 * At this point we are inevitably leaving an interpreted function or a
2861 * top-level script, and returning to one of:
2862 * (a) an "out of line" call made through js_Invoke;
2863 * (b) a js_Execute activation;
2864 * (c) a generator (SendToGenerator, jsiter.c).
2866 * We must not be in an inline frame. The check above ensures that for the
2867 * error case and for a normal return, the code jumps directly to parent's
2868 * frame pc.
2870 JS_ASSERT(inlineCallCount == 0);
2871 JS_ASSERT(fp->regs == &regs);
2872 #ifdef JS_TRACER
2873 if (TRACE_RECORDER(cx))
2874 AbortRecording(cx, "recording out of js_Interpret");
2875 #endif
2876 #if JS_HAS_GENERATORS
2877 if (JS_UNLIKELY(fp->flags & JSFRAME_YIELDING)) {
2878 JSGenerator *gen;
2880 gen = FRAME_TO_GENERATOR(fp);
2881 gen->savedRegs = regs;
2882 gen->frame.regs = &gen->savedRegs;
2883 } else
2884 #endif /* JS_HAS_GENERATORS */
2886 JS_ASSERT(!fp->blockChain);
2887 JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0));
2888 fp->regs = NULL;
2891 /* Undo the remaining effects committed on entry to js_Interpret. */
2892 if (script->staticLevel < JS_DISPLAY_SIZE)
2893 cx->display[script->staticLevel] = fp->displaySave;
2894 if (cx->version == currentVersion && currentVersion != originalVersion)
2895 js_SetVersion(cx, originalVersion);
2896 --cx->interpLevel;
2898 return ok;
2900 atom_not_defined:
2902 const char *printable;
2904 printable = js_AtomToPrintableString(cx, atom);
2905 if (printable)
2906 js_ReportIsNotDefined(cx, printable);
2907 goto error;
2911 #endif /* !defined jsinvoke_cpp___ */