Bug 505523 - Property cache can skip JSClass::resolve or JSClass::addProperty hooks...
[mozilla-central.git] / js / src / jsinterp.cpp
blob5893026231286d958b5f691696c43481ecb5f2b7
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JavaScript bytecode interpreter.
44 #include <stdio.h>
45 #include <string.h>
46 #include <math.h>
47 #include "jstypes.h"
48 #include "jsstdint.h"
49 #include "jsarena.h" /* Added by JSIFY */
50 #include "jsutil.h" /* Added by JSIFY */
51 #include "jsprf.h"
52 #include "jsapi.h"
53 #include "jsarray.h"
54 #include "jsatom.h"
55 #include "jsbool.h"
56 #include "jscntxt.h"
57 #include "jsdate.h"
58 #include "jsversion.h"
59 #include "jsdbgapi.h"
60 #include "jsfun.h"
61 #include "jsgc.h"
62 #include "jsinterp.h"
63 #include "jsiter.h"
64 #include "jslock.h"
65 #include "jsnum.h"
66 #include "jsobj.h"
67 #include "jsopcode.h"
68 #include "jsscan.h"
69 #include "jsscope.h"
70 #include "jsscript.h"
71 #include "jsstr.h"
72 #include "jsstaticcheck.h"
73 #include "jstracer.h"
74 #include "jslibmath.h"
75 #include "jsvector.h"
77 #include "jsatominlines.h"
78 #include "jsscopeinlines.h"
79 #include "jsscriptinlines.h"
80 #include "jsstrinlines.h"
82 #ifdef INCLUDE_MOZILLA_DTRACE
83 #include "jsdtracef.h"
84 #endif
86 #if JS_HAS_XML_SUPPORT
87 #include "jsxml.h"
88 #endif
90 #include "jsautooplen.h"
92 /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */
93 #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___
95 JS_REQUIRES_STACK JSPropCacheEntry *
96 js_FillPropertyCache(JSContext *cx, JSObject *obj,
97 uintN scopeIndex, uintN protoIndex, JSObject *pobj,
98 JSScopeProperty *sprop, JSBool adding)
100 JSPropertyCache *cache;
101 jsbytecode *pc;
102 JSScope *scope;
103 jsuword kshape, vshape, khash;
104 JSOp op;
105 const JSCodeSpec *cs;
106 jsuword vword;
107 ptrdiff_t pcoff;
108 JSAtom *atom;
109 JSPropCacheEntry *entry;
111 JS_ASSERT(!cx->runtime->gcRunning);
112 cache = &JS_PROPERTY_CACHE(cx);
114 /* FIXME bug 489098: consider enabling the property cache for eval. */
115 if (js_IsPropertyCacheDisabled(cx) || (cx->fp->flags & JSFRAME_EVAL)) {
116 PCMETER(cache->disfills++);
117 return JS_NO_PROP_CACHE_FILL;
121 * Check for fill from js_SetPropertyHelper where the setter removed sprop
122 * from pobj's scope (via unwatch or delete, e.g.).
124 scope = OBJ_SCOPE(pobj);
125 if (!scope->has(sprop)) {
126 PCMETER(cache->oddfills++);
127 return JS_NO_PROP_CACHE_FILL;
131 * Check for overdeep scope and prototype chain. Because resolve, getter,
132 * and setter hooks can change the prototype chain using JS_SetPrototype
133 * after js_LookupPropertyWithFlags has returned the nominal protoIndex,
134 * we have to validate protoIndex if it is non-zero. If it is zero, then
135 * we know thanks to the scope->has test above, combined with the fact that
136 * obj == pobj, that protoIndex is invariant.
138 * The scopeIndex can't be wrong. We require JS_SetParent calls to happen
139 * before any running script might consult a parent-linked scope chain. If
140 * this requirement is not satisfied, the fill in progress will never hit,
141 * but vcap vs. scope shape tests ensure nothing malfunctions.
143 JS_ASSERT_IF(scopeIndex == 0 && protoIndex == 0, obj == pobj);
145 if (protoIndex != 0) {
146 JSObject *tmp = obj;
148 for (uintN i = 0; i != scopeIndex; i++)
149 tmp = OBJ_GET_PARENT(cx, tmp);
150 JS_ASSERT(tmp != pobj);
152 protoIndex = 1;
153 for (;;) {
154 tmp = OBJ_GET_PROTO(cx, tmp);
157 * We cannot cache properties coming from native objects behind
158 * non-native ones on the prototype chain. The non-natives can
159 * mutate in arbitrary way without changing any shapes.
161 if (!tmp || !OBJ_IS_NATIVE(tmp)) {
162 PCMETER(cache->noprotos++);
163 return JS_NO_PROP_CACHE_FILL;
165 if (tmp == pobj)
166 break;
167 ++protoIndex;
171 if (scopeIndex > PCVCAP_SCOPEMASK || protoIndex > PCVCAP_PROTOMASK) {
172 PCMETER(cache->longchains++);
173 return JS_NO_PROP_CACHE_FILL;
177 * Optimize the cached vword based on our parameters and the current pc's
178 * opcode format flags.
180 pc = cx->fp->regs->pc;
181 op = js_GetOpcode(cx, cx->fp->script, pc);
182 cs = &js_CodeSpec[op];
183 kshape = 0;
185 do {
187 * Check for a prototype "plain old method" callee computation. What
188 * is a plain old method? It's a function-valued property with stub
189 * getter, so get of a function is idempotent.
191 if (cs->format & JOF_CALLOP) {
192 jsval v;
194 if (sprop->isMethod()) {
196 * A compiler-created function object, AKA a method, already
197 * memoized in the property tree.
199 JS_ASSERT(scope->hasMethodBarrier());
200 v = sprop->methodValue();
201 JS_ASSERT(VALUE_IS_FUNCTION(cx, v));
202 JS_ASSERT(v == LOCKED_OBJ_GET_SLOT(pobj, sprop->slot));
203 vword = JSVAL_OBJECT_TO_PCVAL(v);
204 break;
207 if (SPROP_HAS_STUB_GETTER(sprop) &&
208 SPROP_HAS_VALID_SLOT(sprop, scope)) {
209 v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot);
210 if (VALUE_IS_FUNCTION(cx, v)) {
212 * Great, we have a function-valued prototype property
213 * where the getter is JS_PropertyStub. The type id in
214 * pobj's scope does not evolve with changes to property
215 * values, however.
217 * So here, on first cache fill for this method, we brand
218 * the scope with a new shape and set the JSScope::BRANDED
219 * flag. Once this flag is set, any property assignment
220 * that changes the value from or to a different function
221 * object will result in shape being regenerated.
223 if (!scope->branded()) {
224 PCMETER(cache->brandfills++);
225 #ifdef DEBUG_notme
226 fprintf(stderr,
227 "branding %p (%s) for funobj %p (%s), shape %lu\n",
228 pobj, pobj->getClass()->name,
229 JSVAL_TO_OBJECT(v),
230 JS_GetFunctionName(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(v))),
231 OBJ_SHAPE(obj));
232 #endif
233 scope->brandingShapeChange(cx, sprop->slot, v);
234 if (js_IsPropertyCacheDisabled(cx)) /* check for rt->shapeGen overflow */
235 return JS_NO_PROP_CACHE_FILL;
236 scope->setBranded();
238 vword = JSVAL_OBJECT_TO_PCVAL(v);
239 break;
244 /* If getting a value via a stub getter, we can cache the slot. */
245 if (!(cs->format & (JOF_SET | JOF_INCDEC | JOF_FOR)) &&
246 SPROP_HAS_STUB_GETTER(sprop) &&
247 SPROP_HAS_VALID_SLOT(sprop, scope)) {
248 /* Great, let's cache sprop's slot and use it on cache hit. */
249 vword = SLOT_TO_PCVAL(sprop->slot);
250 } else {
251 /* Best we can do is to cache sprop (still a nice speedup). */
252 vword = SPROP_TO_PCVAL(sprop);
253 if (adding &&
254 sprop == scope->lastProp &&
255 scope->shape == sprop->shape) {
257 * Our caller added a new property. We also know that a setter
258 * that js_NativeSet could have run has not mutated the scope,
259 * so the added property is still the last one added, and the
260 * scope is not branded.
262 * We want to cache under scope's shape before the property
263 * addition to bias for the case when the mutator opcode
264 * always adds the same property. This allows us to optimize
265 * periodic execution of object initializers or other explicit
266 * initialization sequences such as
268 * obj = {}; obj.x = 1; obj.y = 2;
270 * We assume that on average the win from this optimization is
271 * greater than the cost of an extra mismatch per loop owing to
272 * the bias for the following case:
274 * obj = {}; ... for (...) { ... obj.x = ... }
276 * On the first iteration of such a for loop, JSOP_SETPROP
277 * fills the cache with the shape of the newly created object
278 * obj, not the shape of obj after obj.x has been assigned.
279 * That mismatches obj's shape on the second iteration. Note
280 * that on the third and subsequent iterations the cache will
281 * be hit because the shape is no longer updated.
283 JS_ASSERT(scope->owned());
284 if (sprop->parent) {
285 kshape = sprop->parent->shape;
286 } else {
288 * If obj had its own empty scope before, with a unique
289 * shape, that is lost. Here we only attempt to find a
290 * matching empty scope. In unusual cases involving
291 * __proto__ assignment we may not find one.
293 JSObject *proto = STOBJ_GET_PROTO(obj);
294 if (!proto || !OBJ_IS_NATIVE(proto))
295 return JS_NO_PROP_CACHE_FILL;
296 JSScope *protoscope = OBJ_SCOPE(proto);
297 if (!protoscope->emptyScope ||
298 protoscope->emptyScope->clasp != obj->getClass()) {
299 return JS_NO_PROP_CACHE_FILL;
301 kshape = protoscope->emptyScope->shape;
305 * When adding we predict no prototype object will later gain a
306 * readonly property or setter.
308 vshape = cx->runtime->protoHazardShape;
311 } while (0);
313 if (kshape == 0) {
314 kshape = OBJ_SHAPE(obj);
315 vshape = scope->shape;
317 JS_ASSERT(kshape < SHAPE_OVERFLOW_BIT);
319 khash = PROPERTY_CACHE_HASH_PC(pc, kshape);
320 if (obj == pobj) {
321 JS_ASSERT(scopeIndex == 0 && protoIndex == 0);
322 } else {
323 if (op == JSOP_LENGTH) {
324 atom = cx->runtime->atomState.lengthAtom;
325 } else {
326 pcoff = (JOF_TYPE(cs->format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
327 GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, atom);
330 #ifdef DEBUG
331 if (scopeIndex == 0) {
332 JS_ASSERT(protoIndex != 0);
333 JS_ASSERT((protoIndex == 1) == (OBJ_GET_PROTO(cx, obj) == pobj));
335 #endif
337 if (scopeIndex != 0 || protoIndex != 1) {
338 khash = PROPERTY_CACHE_HASH_ATOM(atom, obj);
339 PCMETER(if (PCVCAP_TAG(cache->table[khash].vcap) <= 1)
340 cache->pcrecycles++);
341 pc = (jsbytecode *) atom;
342 kshape = (jsuword) obj;
345 * Make sure that a later shadowing assignment will enter
346 * PurgeProtoChain and invalidate this entry, bug 479198.
348 * This is thread-safe even though obj is not locked. Only the
349 * DELEGATE bit of obj->classword can change at runtime, given that
350 * obj is native; and the bit is only set, never cleared. And on
351 * platforms where another CPU can fail to see this write, it's OK
352 * because the property cache and JIT cache are thread-local.
354 obj->setDelegate();
357 JS_ASSERT(vshape < SHAPE_OVERFLOW_BIT);
359 entry = &cache->table[khash];
360 PCMETER(PCVAL_IS_NULL(entry->vword) || cache->recycles++);
361 entry->kpc = pc;
362 entry->kshape = kshape;
363 entry->vcap = PCVCAP_MAKE(vshape, scopeIndex, protoIndex);
364 entry->vword = vword;
366 cache->empty = JS_FALSE;
367 PCMETER(cache->fills++);
370 * The modfills counter is not exact. It increases if a getter or setter
371 * recurse into the interpreter.
373 PCMETER(entry == cache->pctestentry || cache->modfills++);
374 PCMETER(cache->pctestentry = NULL);
375 return entry;
378 JS_REQUIRES_STACK JSAtom *
379 js_FullTestPropertyCache(JSContext *cx, jsbytecode *pc,
380 JSObject **objp, JSObject **pobjp,
381 JSPropCacheEntry **entryp)
383 JSOp op;
384 const JSCodeSpec *cs;
385 ptrdiff_t pcoff;
386 JSAtom *atom;
387 JSObject *obj, *pobj, *tmp;
388 JSPropCacheEntry *entry;
389 uint32 vcap;
391 JS_ASSERT(uintN((cx->fp->imacpc ? cx->fp->imacpc : pc) - cx->fp->script->code)
392 < cx->fp->script->length);
394 op = js_GetOpcode(cx, cx->fp->script, pc);
395 cs = &js_CodeSpec[op];
396 if (op == JSOP_LENGTH) {
397 atom = cx->runtime->atomState.lengthAtom;
398 } else {
399 pcoff = (JOF_TYPE(cs->format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
400 GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, atom);
403 obj = *objp;
404 JS_ASSERT(OBJ_IS_NATIVE(obj));
405 entry = &JS_PROPERTY_CACHE(cx).table[PROPERTY_CACHE_HASH_ATOM(atom, obj)];
406 *entryp = entry;
407 vcap = entry->vcap;
409 if (entry->kpc != (jsbytecode *) atom) {
410 PCMETER(JS_PROPERTY_CACHE(cx).idmisses++);
412 #ifdef DEBUG_notme
413 entry = &JS_PROPERTY_CACHE(cx).table[PROPERTY_CACHE_HASH_PC(pc, OBJ_SHAPE(obj))];
414 fprintf(stderr,
415 "id miss for %s from %s:%u"
416 " (pc %u, kpc %u, kshape %u, shape %u)\n",
417 js_AtomToPrintableString(cx, atom),
418 cx->fp->script->filename,
419 js_PCToLineNumber(cx, cx->fp->script, pc),
420 pc - cx->fp->script->code,
421 entry->kpc - cx->fp->script->code,
422 entry->kshape,
423 OBJ_SHAPE(obj));
424 js_Disassemble1(cx, cx->fp->script, pc,
425 pc - cx->fp->script->code,
426 JS_FALSE, stderr);
427 #endif
429 return atom;
432 if (entry->kshape != (jsuword) obj) {
433 PCMETER(JS_PROPERTY_CACHE(cx).komisses++);
434 return atom;
437 pobj = obj;
439 if (JOF_MODE(cs->format) == JOF_NAME) {
440 while (vcap & (PCVCAP_SCOPEMASK << PCVCAP_PROTOBITS)) {
441 tmp = OBJ_GET_PARENT(cx, pobj);
442 if (!tmp || !OBJ_IS_NATIVE(tmp))
443 break;
444 pobj = tmp;
445 vcap -= PCVCAP_PROTOSIZE;
448 *objp = pobj;
451 while (vcap & PCVCAP_PROTOMASK) {
452 tmp = OBJ_GET_PROTO(cx, pobj);
453 if (!tmp || !OBJ_IS_NATIVE(tmp))
454 break;
455 pobj = tmp;
456 --vcap;
459 if (JS_LOCK_OBJ_IF_SHAPE(cx, pobj, PCVCAP_SHAPE(vcap))) {
460 #ifdef DEBUG
461 jsid id = ATOM_TO_JSID(atom);
463 id = js_CheckForStringIndex(id);
464 JS_ASSERT(OBJ_SCOPE(pobj)->lookup(id));
465 JS_ASSERT_IF(OBJ_SCOPE(pobj)->object, OBJ_SCOPE(pobj)->object == pobj);
466 #endif
467 *pobjp = pobj;
468 return NULL;
471 PCMETER(JS_PROPERTY_CACHE(cx).vcmisses++);
472 return atom;
475 #ifdef DEBUG
476 #define ASSERT_CACHE_IS_EMPTY(cache) \
477 JS_BEGIN_MACRO \
478 JSPropertyCache *cache_ = (cache); \
479 uintN i_; \
480 JS_ASSERT(cache_->empty); \
481 for (i_ = 0; i_ < PROPERTY_CACHE_SIZE; i_++) { \
482 JS_ASSERT(!cache_->table[i_].kpc); \
483 JS_ASSERT(!cache_->table[i_].kshape); \
484 JS_ASSERT(!cache_->table[i_].vcap); \
485 JS_ASSERT(!cache_->table[i_].vword); \
487 JS_END_MACRO
488 #else
489 #define ASSERT_CACHE_IS_EMPTY(cache) ((void)0)
490 #endif
492 JS_STATIC_ASSERT(PCVAL_NULL == 0);
494 void
495 js_PurgePropertyCache(JSContext *cx, JSPropertyCache *cache)
497 if (cache->empty) {
498 ASSERT_CACHE_IS_EMPTY(cache);
499 return;
502 memset(cache->table, 0, sizeof cache->table);
503 cache->empty = JS_TRUE;
505 #ifdef JS_PROPERTY_CACHE_METERING
506 { static FILE *fp;
507 if (!fp)
508 fp = fopen("/tmp/propcache.stats", "w");
509 if (fp) {
510 fputs("Property cache stats for ", fp);
511 #ifdef JS_THREADSAFE
512 fprintf(fp, "thread %lu, ", (unsigned long) cx->thread->id);
513 #endif
514 fprintf(fp, "GC %u\n", cx->runtime->gcNumber);
516 # define P(mem) fprintf(fp, "%11s %10lu\n", #mem, (unsigned long)cache->mem)
517 P(fills);
518 P(nofills);
519 P(rofills);
520 P(disfills);
521 P(oddfills);
522 P(modfills);
523 P(brandfills);
524 P(noprotos);
525 P(longchains);
526 P(recycles);
527 P(pcrecycles);
528 P(tests);
529 P(pchits);
530 P(protopchits);
531 P(initests);
532 P(inipchits);
533 P(inipcmisses);
534 P(settests);
535 P(addpchits);
536 P(setpchits);
537 P(setpcmisses);
538 P(slotchanges);
539 P(setmisses);
540 P(idmisses);
541 P(komisses);
542 P(vcmisses);
543 P(misses);
544 P(flushes);
545 P(pcpurges);
546 # undef P
548 fprintf(fp, "hit rates: pc %g%% (proto %g%%), set %g%%, ini %g%%, full %g%%\n",
549 (100. * cache->pchits) / cache->tests,
550 (100. * cache->protopchits) / cache->tests,
551 (100. * (cache->addpchits + cache->setpchits))
552 / cache->settests,
553 (100. * cache->inipchits) / cache->initests,
554 (100. * (cache->tests - cache->misses)) / cache->tests);
555 fflush(fp);
558 #endif
560 PCMETER(cache->flushes++);
563 void
564 js_PurgePropertyCacheForScript(JSContext *cx, JSScript *script)
566 JSPropertyCache *cache;
567 JSPropCacheEntry *entry;
569 cache = &JS_PROPERTY_CACHE(cx);
570 for (entry = cache->table; entry < cache->table + PROPERTY_CACHE_SIZE;
571 entry++) {
572 if (JS_UPTRDIFF(entry->kpc, script->code) < script->length) {
573 entry->kpc = NULL;
574 entry->kshape = 0;
575 #ifdef DEBUG
576 entry->vcap = entry->vword = 0;
577 #endif
583 * Check if the current arena has enough space to fit nslots after sp and, if
584 * so, reserve the necessary space.
586 static JS_REQUIRES_STACK JSBool
587 AllocateAfterSP(JSContext *cx, jsval *sp, uintN nslots)
589 uintN surplus;
590 jsval *sp2;
592 JS_ASSERT((jsval *) cx->stackPool.current->base <= sp);
593 JS_ASSERT(sp <= (jsval *) cx->stackPool.current->avail);
594 surplus = (jsval *) cx->stackPool.current->avail - sp;
595 if (nslots <= surplus)
596 return JS_TRUE;
599 * No room before current->avail, check if the arena has enough space to
600 * fit the missing slots before the limit.
602 if (nslots > (size_t) ((jsval *) cx->stackPool.current->limit - sp))
603 return JS_FALSE;
605 JS_ARENA_ALLOCATE_CAST(sp2, jsval *, &cx->stackPool,
606 (nslots - surplus) * sizeof(jsval));
607 JS_ASSERT(sp2 == sp + surplus);
608 return JS_TRUE;
611 JS_STATIC_INTERPRET JS_REQUIRES_STACK jsval *
612 js_AllocRawStack(JSContext *cx, uintN nslots, void **markp)
614 jsval *sp;
616 JS_ASSERT(nslots != 0);
617 JS_ASSERT_NOT_ON_TRACE(cx);
619 if (!cx->stackPool.first.next) {
620 int64 *timestamp;
622 JS_ARENA_ALLOCATE_CAST(timestamp, int64 *,
623 &cx->stackPool, sizeof *timestamp);
624 if (!timestamp) {
625 js_ReportOutOfScriptQuota(cx);
626 return NULL;
628 *timestamp = JS_Now();
631 if (markp)
632 *markp = JS_ARENA_MARK(&cx->stackPool);
633 JS_ARENA_ALLOCATE_CAST(sp, jsval *, &cx->stackPool, nslots * sizeof(jsval));
634 if (!sp)
635 js_ReportOutOfScriptQuota(cx);
636 return sp;
639 JS_STATIC_INTERPRET JS_REQUIRES_STACK void
640 js_FreeRawStack(JSContext *cx, void *mark)
642 JS_ARENA_RELEASE(&cx->stackPool, mark);
645 JS_REQUIRES_STACK JS_FRIEND_API(jsval *)
646 js_AllocStack(JSContext *cx, uintN nslots, void **markp)
648 jsval *sp;
649 JSArena *a;
650 JSStackHeader *sh;
652 /* Callers don't check for zero nslots: we do to avoid empty segments. */
653 if (nslots == 0) {
654 *markp = NULL;
655 return (jsval *) JS_ARENA_MARK(&cx->stackPool);
658 /* Allocate 2 extra slots for the stack segment header we'll likely need. */
659 sp = js_AllocRawStack(cx, 2 + nslots, markp);
660 if (!sp)
661 return NULL;
663 /* Try to avoid another header if we can piggyback on the last segment. */
664 a = cx->stackPool.current;
665 sh = cx->stackHeaders;
666 if (sh && JS_STACK_SEGMENT(sh) + sh->nslots == sp) {
667 /* Extend the last stack segment, give back the 2 header slots. */
668 sh->nslots += nslots;
669 a->avail -= 2 * sizeof(jsval);
670 } else {
672 * Need a new stack segment, so allocate and push a stack segment
673 * header from the 2 extra slots.
675 sh = (JSStackHeader *)sp;
676 sh->nslots = nslots;
677 sh->down = cx->stackHeaders;
678 cx->stackHeaders = sh;
679 sp += 2;
683 * Store JSVAL_NULL using memset, to let compilers optimize as they see
684 * fit, in case a caller allocates and pushes GC-things one by one, which
685 * could nest a last-ditch GC that will scan this segment.
687 memset(sp, 0, nslots * sizeof(jsval));
688 return sp;
691 JS_REQUIRES_STACK JS_FRIEND_API(void)
692 js_FreeStack(JSContext *cx, void *mark)
694 JSStackHeader *sh;
695 jsuword slotdiff;
697 /* Check for zero nslots allocation special case. */
698 if (!mark)
699 return;
701 /* We can assert because js_FreeStack always balances js_AllocStack. */
702 sh = cx->stackHeaders;
703 JS_ASSERT(sh);
705 /* If mark is in the current segment, reduce sh->nslots, else pop sh. */
706 slotdiff = JS_UPTRDIFF(mark, JS_STACK_SEGMENT(sh)) / sizeof(jsval);
707 if (slotdiff < (jsuword)sh->nslots)
708 sh->nslots = slotdiff;
709 else
710 cx->stackHeaders = sh->down;
712 /* Release the stackPool space allocated since mark was set. */
713 JS_ARENA_RELEASE(&cx->stackPool, mark);
716 JSObject *
717 js_GetScopeChain(JSContext *cx, JSStackFrame *fp)
719 JSObject *sharedBlock = fp->blockChain;
721 if (!sharedBlock) {
723 * Don't force a call object for a lightweight function call, but do
724 * insist that there is a call object for a heavyweight function call.
726 JS_ASSERT(!fp->fun ||
727 !(fp->fun->flags & JSFUN_HEAVYWEIGHT) ||
728 fp->callobj);
729 JS_ASSERT(fp->scopeChain);
730 return fp->scopeChain;
733 /* We don't handle cloning blocks on trace. */
734 js_LeaveTrace(cx);
737 * We have one or more lexical scopes to reflect into fp->scopeChain, so
738 * make sure there's a call object at the current head of the scope chain,
739 * if this frame is a call frame.
741 * Also, identify the innermost compiler-allocated block we needn't clone.
743 JSObject *limitBlock, *limitClone;
744 if (fp->fun && !fp->callobj) {
745 JS_ASSERT(OBJ_GET_CLASS(cx, fp->scopeChain) != &js_BlockClass ||
746 fp->scopeChain->getPrivate() != fp);
747 if (!js_GetCallObject(cx, fp))
748 return NULL;
750 /* We know we must clone everything on blockChain. */
751 limitBlock = limitClone = NULL;
752 } else {
754 * scopeChain includes all blocks whose static scope we're within that
755 * have already been cloned. Find the innermost such block. Its
756 * prototype should appear on blockChain; we'll clone blockChain up
757 * to, but not including, that prototype.
759 limitClone = fp->scopeChain;
760 while (OBJ_GET_CLASS(cx, limitClone) == &js_WithClass)
761 limitClone = OBJ_GET_PARENT(cx, limitClone);
762 JS_ASSERT(limitClone);
765 * It may seem like we don't know enough about limitClone to be able
766 * to just grab its prototype as we do here, but it's actually okay.
768 * If limitClone is a block object belonging to this frame, then its
769 * prototype is the innermost entry in blockChain that we have already
770 * cloned, and is thus the place to stop when we clone below.
772 * Otherwise, there are no blocks for this frame on scopeChain, and we
773 * need to clone the whole blockChain. In this case, limitBlock can
774 * point to any object known not to be on blockChain, since we simply
775 * loop until we hit limitBlock or NULL. If limitClone is a block, it
776 * isn't a block from this function, since blocks can't be nested
777 * within themselves on scopeChain (recursion is dynamic nesting, not
778 * static nesting). If limitClone isn't a block, its prototype won't
779 * be a block either. So we can just grab limitClone's prototype here
780 * regardless of its type or which frame it belongs to.
782 limitBlock = OBJ_GET_PROTO(cx, limitClone);
784 /* If the innermost block has already been cloned, we are done. */
785 if (limitBlock == sharedBlock)
786 return fp->scopeChain;
790 * Special-case cloning the innermost block; this doesn't have enough in
791 * common with subsequent steps to include in the loop.
793 * js_CloneBlockObject leaves the clone's parent slot uninitialized. We
794 * populate it below.
796 JSObject *innermostNewChild = js_CloneBlockObject(cx, sharedBlock, fp);
797 if (!innermostNewChild)
798 return NULL;
799 JSAutoTempValueRooter tvr(cx, innermostNewChild);
802 * Clone our way towards outer scopes until we reach the innermost
803 * enclosing function, or the innermost block we've already cloned.
805 JSObject *newChild = innermostNewChild;
806 for (;;) {
807 JS_ASSERT(OBJ_GET_PROTO(cx, newChild) == sharedBlock);
808 sharedBlock = OBJ_GET_PARENT(cx, sharedBlock);
810 /* Sometimes limitBlock will be NULL, so check that first. */
811 if (sharedBlock == limitBlock || !sharedBlock)
812 break;
814 /* As in the call above, we don't know the real parent yet. */
815 JSObject *clone
816 = js_CloneBlockObject(cx, sharedBlock, fp);
817 if (!clone)
818 return NULL;
821 * Avoid OBJ_SET_PARENT overhead as newChild cannot escape to
822 * other threads.
824 STOBJ_SET_PARENT(newChild, clone);
825 newChild = clone;
827 STOBJ_SET_PARENT(newChild, fp->scopeChain);
831 * If we found a limit block belonging to this frame, then we should have
832 * found it in blockChain.
834 JS_ASSERT_IF(limitBlock &&
835 OBJ_GET_CLASS(cx, limitBlock) == &js_BlockClass &&
836 limitClone->getPrivate() == fp,
837 sharedBlock);
839 /* Place our newly cloned blocks at the head of the scope chain. */
840 fp->scopeChain = innermostNewChild;
841 return fp->scopeChain;
844 JSBool
845 js_GetPrimitiveThis(JSContext *cx, jsval *vp, JSClass *clasp, jsval *thisvp)
847 jsval v;
848 JSObject *obj;
850 v = vp[1];
851 if (JSVAL_IS_OBJECT(v)) {
852 obj = JS_THIS_OBJECT(cx, vp);
853 if (!JS_InstanceOf(cx, obj, clasp, vp + 2))
854 return JS_FALSE;
855 v = obj->fslots[JSSLOT_PRIMITIVE_THIS];
857 *thisvp = v;
858 return JS_TRUE;
861 /* Some objects (e.g., With) delegate 'this' to another object. */
862 static inline JSObject *
863 CallThisObjectHook(JSContext *cx, JSObject *obj, jsval *argv)
865 JSObject *thisp = obj->thisObject(cx);
866 if (!thisp)
867 return NULL;
868 argv[-1] = OBJECT_TO_JSVAL(thisp);
869 return thisp;
873 * ECMA requires "the global object", but in embeddings such as the browser,
874 * which have multiple top-level objects (windows, frames, etc. in the DOM),
875 * we prefer fun's parent. An example that causes this code to run:
877 * // in window w1
878 * function f() { return this }
879 * function g() { return f }
881 * // in window w2
882 * var h = w1.g()
883 * alert(h() == w1)
885 * The alert should display "true".
887 JS_STATIC_INTERPRET JSObject *
888 js_ComputeGlobalThis(JSContext *cx, JSBool lazy, jsval *argv)
890 JSObject *thisp;
892 if (JSVAL_IS_PRIMITIVE(argv[-2]) ||
893 !OBJ_GET_PARENT(cx, JSVAL_TO_OBJECT(argv[-2]))) {
894 thisp = cx->globalObject;
895 } else {
896 JSStackFrame *fp;
897 jsid id;
898 jsval v;
899 uintN attrs;
900 JSBool ok;
901 JSObject *parent;
904 * Walk up the parent chain, first checking that the running script
905 * has access to the callee's parent object. Note that if lazy, the
906 * running script whose principals we want to check is the script
907 * associated with fp->down, not with fp.
909 * FIXME: 417851 -- this access check should not be required, as it
910 * imposes a performance penalty on all js_ComputeGlobalThis calls,
911 * and it represents a maintenance hazard.
913 fp = js_GetTopStackFrame(cx); /* quell GCC overwarning */
914 if (lazy) {
915 JS_ASSERT(fp->argv == argv);
916 fp->dormantNext = cx->dormantFrameChain;
917 cx->dormantFrameChain = fp;
918 cx->fp = fp->down;
919 fp->down = NULL;
921 thisp = JSVAL_TO_OBJECT(argv[-2]);
922 id = ATOM_TO_JSID(cx->runtime->atomState.parentAtom);
924 ok = thisp->checkAccess(cx, id, JSACC_PARENT, &v, &attrs);
925 if (lazy) {
926 cx->dormantFrameChain = fp->dormantNext;
927 fp->dormantNext = NULL;
928 fp->down = cx->fp;
929 cx->fp = fp;
931 if (!ok)
932 return NULL;
934 thisp = JSVAL_IS_VOID(v)
935 ? OBJ_GET_PARENT(cx, thisp)
936 : JSVAL_TO_OBJECT(v);
937 while ((parent = OBJ_GET_PARENT(cx, thisp)) != NULL)
938 thisp = parent;
941 return CallThisObjectHook(cx, thisp, argv);
944 static JSObject *
945 ComputeThis(JSContext *cx, JSBool lazy, jsval *argv)
947 JSObject *thisp;
949 JS_ASSERT(!JSVAL_IS_NULL(argv[-1]));
950 if (!JSVAL_IS_OBJECT(argv[-1])) {
951 if (!js_PrimitiveToObject(cx, &argv[-1]))
952 return NULL;
953 thisp = JSVAL_TO_OBJECT(argv[-1]);
954 return thisp;
957 thisp = JSVAL_TO_OBJECT(argv[-1]);
958 if (OBJ_GET_CLASS(cx, thisp) == &js_CallClass || OBJ_GET_CLASS(cx, thisp) == &js_BlockClass)
959 return js_ComputeGlobalThis(cx, lazy, argv);
961 return CallThisObjectHook(cx, thisp, argv);
964 JSObject *
965 js_ComputeThis(JSContext *cx, JSBool lazy, jsval *argv)
967 if (JSVAL_IS_NULL(argv[-1]))
968 return js_ComputeGlobalThis(cx, lazy, argv);
969 return ComputeThis(cx, lazy, argv);
972 #if JS_HAS_NO_SUCH_METHOD
974 const uint32 JSSLOT_FOUND_FUNCTION = JSSLOT_PRIVATE;
975 const uint32 JSSLOT_SAVED_ID = JSSLOT_PRIVATE + 1;
977 JSClass js_NoSuchMethodClass = {
978 "NoSuchMethod",
979 JSCLASS_HAS_RESERVED_SLOTS(2) | JSCLASS_IS_ANONYMOUS,
980 JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_PropertyStub,
981 JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
982 NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL
986 * When JSOP_CALLPROP or JSOP_CALLELEM does not find the method property of
987 * the base object, we search for the __noSuchMethod__ method in the base.
988 * If it exists, we store the method and the property's id into an object of
989 * NoSuchMethod class and store this object into the callee's stack slot.
990 * Later, js_Invoke will recognise such an object and transfer control to
991 * NoSuchMethod that invokes the method like:
993 * this.__noSuchMethod__(id, args)
995 * where id is the name of the method that this invocation attempted to
996 * call by name, and args is an Array containing this invocation's actual
997 * parameters.
999 JS_STATIC_INTERPRET JSBool
1000 js_OnUnknownMethod(JSContext *cx, jsval *vp)
1002 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
1004 JSObject *obj = JSVAL_TO_OBJECT(vp[1]);
1005 jsid id = ATOM_TO_JSID(cx->runtime->atomState.noSuchMethodAtom);
1006 JSAutoTempValueRooter tvr(cx, JSVAL_NULL);
1007 if (!js_GetMethod(cx, obj, id, JSGET_NO_METHOD_BARRIER, tvr.addr()))
1008 return false;
1009 if (JSVAL_IS_PRIMITIVE(tvr.value())) {
1010 vp[0] = tvr.value();
1011 } else {
1012 #if JS_HAS_XML_SUPPORT
1013 /* Extract the function name from function::name qname. */
1014 if (!JSVAL_IS_PRIMITIVE(vp[0])) {
1015 obj = JSVAL_TO_OBJECT(vp[0]);
1016 if (!js_IsFunctionQName(cx, obj, &id))
1017 return false;
1018 if (id != 0)
1019 vp[0] = ID_TO_VALUE(id);
1021 #endif
1022 obj = js_NewObjectWithGivenProto(cx, &js_NoSuchMethodClass,
1023 NULL, NULL);
1024 if (!obj)
1025 return false;
1026 obj->fslots[JSSLOT_FOUND_FUNCTION] = tvr.value();
1027 obj->fslots[JSSLOT_SAVED_ID] = vp[0];
1028 vp[0] = OBJECT_TO_JSVAL(obj);
1030 return true;
1033 static JS_REQUIRES_STACK JSBool
1034 NoSuchMethod(JSContext *cx, uintN argc, jsval *vp, uint32 flags)
1036 jsval *invokevp;
1037 void *mark;
1038 JSBool ok;
1039 JSObject *obj, *argsobj;
1041 invokevp = js_AllocStack(cx, 2 + 2, &mark);
1042 if (!invokevp)
1043 return JS_FALSE;
1045 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[0]));
1046 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
1047 obj = JSVAL_TO_OBJECT(vp[0]);
1048 JS_ASSERT(STOBJ_GET_CLASS(obj) == &js_NoSuchMethodClass);
1050 invokevp[0] = obj->fslots[JSSLOT_FOUND_FUNCTION];
1051 invokevp[1] = vp[1];
1052 invokevp[2] = obj->fslots[JSSLOT_SAVED_ID];
1053 argsobj = js_NewArrayObject(cx, argc, vp + 2);
1054 if (!argsobj) {
1055 ok = JS_FALSE;
1056 } else {
1057 invokevp[3] = OBJECT_TO_JSVAL(argsobj);
1058 ok = (flags & JSINVOKE_CONSTRUCT)
1059 ? js_InvokeConstructor(cx, 2, JS_TRUE, invokevp)
1060 : js_Invoke(cx, 2, invokevp, flags);
1061 vp[0] = invokevp[0];
1063 js_FreeStack(cx, mark);
1064 return ok;
1067 #endif /* JS_HAS_NO_SUCH_METHOD */
1070 * We check if the function accepts a primitive value as |this|. For that we
1071 * use a table that maps value's tag into the corresponding function flag.
1073 JS_STATIC_ASSERT(JSVAL_INT == 1);
1074 JS_STATIC_ASSERT(JSVAL_DOUBLE == 2);
1075 JS_STATIC_ASSERT(JSVAL_STRING == 4);
1076 JS_STATIC_ASSERT(JSVAL_SPECIAL == 6);
1078 const uint16 js_PrimitiveTestFlags[] = {
1079 JSFUN_THISP_NUMBER, /* INT */
1080 JSFUN_THISP_NUMBER, /* DOUBLE */
1081 JSFUN_THISP_NUMBER, /* INT */
1082 JSFUN_THISP_STRING, /* STRING */
1083 JSFUN_THISP_NUMBER, /* INT */
1084 JSFUN_THISP_BOOLEAN, /* BOOLEAN */
1085 JSFUN_THISP_NUMBER /* INT */
1089 * Find a function reference and its 'this' object implicit first parameter
1090 * under argc arguments on cx's stack, and call the function. Push missing
1091 * required arguments, allocate declared local variables, and pop everything
1092 * when done. Then push the return value.
1094 JS_REQUIRES_STACK JS_FRIEND_API(JSBool)
1095 js_Invoke(JSContext *cx, uintN argc, jsval *vp, uintN flags)
1097 void *mark;
1098 JSStackFrame frame;
1099 jsval *sp, *argv, *newvp;
1100 jsval v;
1101 JSObject *funobj, *parent;
1102 JSBool ok;
1103 JSClass *clasp;
1104 const JSObjectOps *ops;
1105 JSNative native;
1106 JSFunction *fun;
1107 JSScript *script;
1108 uintN nslots, i;
1109 uint32 rootedArgsFlag;
1110 JSInterpreterHook hook;
1111 void *hookData;
1113 JS_ASSERT(argc <= JS_ARGS_LENGTH_MAX);
1115 /* [vp .. vp + 2 + argc) must belong to the last JS stack arena. */
1116 JS_ASSERT((jsval *) cx->stackPool.current->base <= vp);
1117 JS_ASSERT(vp + 2 + argc <= (jsval *) cx->stackPool.current->avail);
1119 /* Mark the top of stack and load frequently-used registers. */
1120 mark = JS_ARENA_MARK(&cx->stackPool);
1121 MUST_FLOW_THROUGH("out2");
1122 v = *vp;
1124 if (JSVAL_IS_PRIMITIVE(v))
1125 goto bad;
1127 funobj = JSVAL_TO_OBJECT(v);
1128 parent = OBJ_GET_PARENT(cx, funobj);
1129 clasp = OBJ_GET_CLASS(cx, funobj);
1130 if (clasp != &js_FunctionClass) {
1131 #if JS_HAS_NO_SUCH_METHOD
1132 if (clasp == &js_NoSuchMethodClass) {
1133 ok = NoSuchMethod(cx, argc, vp, flags);
1134 goto out2;
1136 #endif
1138 /* Function is inlined, all other classes use object ops. */
1139 ops = funobj->map->ops;
1142 * XXX this makes no sense -- why convert to function if clasp->call?
1143 * XXX better to call that hook without converting
1145 * FIXME bug 408416: try converting to function, for API compatibility
1146 * if there is a call op defined.
1148 if ((ops == &js_ObjectOps) ? clasp->call : ops->call) {
1149 ok = clasp->convert(cx, funobj, JSTYPE_FUNCTION, &v);
1150 if (!ok)
1151 goto out2;
1153 if (VALUE_IS_FUNCTION(cx, v)) {
1154 /* Make vp refer to funobj to keep it available as argv[-2]. */
1155 *vp = v;
1156 funobj = JSVAL_TO_OBJECT(v);
1157 parent = OBJ_GET_PARENT(cx, funobj);
1158 goto have_fun;
1161 fun = NULL;
1162 script = NULL;
1163 nslots = 0;
1165 /* Try a call or construct native object op. */
1166 if (flags & JSINVOKE_CONSTRUCT) {
1167 if (!JSVAL_IS_OBJECT(vp[1])) {
1168 ok = js_PrimitiveToObject(cx, &vp[1]);
1169 if (!ok)
1170 goto out2;
1172 native = ops->construct;
1173 } else {
1174 native = ops->call;
1176 if (!native)
1177 goto bad;
1178 } else {
1179 have_fun:
1180 /* Get private data and set derived locals from it. */
1181 fun = GET_FUNCTION_PRIVATE(cx, funobj);
1182 nslots = FUN_MINARGS(fun);
1183 nslots = (nslots > argc) ? nslots - argc : 0;
1184 if (FUN_INTERPRETED(fun)) {
1185 native = NULL;
1186 script = fun->u.i.script;
1187 JS_ASSERT(script);
1189 if (script->isEmpty()) {
1190 if (flags & JSINVOKE_CONSTRUCT) {
1191 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
1192 *vp = vp[1];
1193 } else {
1194 *vp = JSVAL_VOID;
1196 ok = JS_TRUE;
1197 goto out2;
1199 } else {
1200 native = fun->u.n.native;
1201 script = NULL;
1202 nslots += fun->u.n.extra;
1205 if (JSFUN_BOUND_METHOD_TEST(fun->flags)) {
1206 /* Handle bound method special case. */
1207 vp[1] = OBJECT_TO_JSVAL(parent);
1208 } else if (!JSVAL_IS_OBJECT(vp[1])) {
1209 JS_ASSERT(!(flags & JSINVOKE_CONSTRUCT));
1210 if (PRIMITIVE_THIS_TEST(fun, vp[1]))
1211 goto start_call;
1215 if (flags & JSINVOKE_CONSTRUCT) {
1216 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
1217 } else {
1219 * We must call js_ComputeThis in case we are not called from the
1220 * interpreter, where a prior bytecode has computed an appropriate
1221 * |this| already.
1223 * But we need to compute |this| eagerly only for so-called "slow"
1224 * (i.e., not fast) native functions. Fast natives must use either
1225 * JS_THIS or JS_THIS_OBJECT, and scripted functions will go through
1226 * the appropriate this-computing bytecode, e.g., JSOP_THIS.
1228 if (native && (!fun || !(fun->flags & JSFUN_FAST_NATIVE))) {
1229 if (!js_ComputeThis(cx, JS_FALSE, vp + 2)) {
1230 ok = JS_FALSE;
1231 goto out2;
1233 flags |= JSFRAME_COMPUTED_THIS;
1237 start_call:
1238 if (native && fun && (fun->flags & JSFUN_FAST_NATIVE)) {
1239 #ifdef DEBUG_NOT_THROWING
1240 JSBool alreadyThrowing = cx->throwing;
1241 #endif
1242 JS_ASSERT(nslots == 0);
1243 ok = ((JSFastNative) native)(cx, argc, vp);
1244 JS_RUNTIME_METER(cx->runtime, nativeCalls);
1245 #ifdef DEBUG_NOT_THROWING
1246 if (ok && !alreadyThrowing)
1247 ASSERT_NOT_THROWING(cx);
1248 #endif
1249 goto out2;
1252 argv = vp + 2;
1253 sp = argv + argc;
1255 rootedArgsFlag = JSFRAME_ROOTED_ARGV;
1256 if (nslots != 0) {
1258 * The extra slots required by the function continue with argument
1259 * slots. Thus, when the last stack pool arena does not have room to
1260 * fit nslots right after sp and AllocateAfterSP fails, we have to copy
1261 * [vp..vp+2+argc) slots and clear rootedArgsFlag to root the copy.
1263 if (!AllocateAfterSP(cx, sp, nslots)) {
1264 rootedArgsFlag = 0;
1265 newvp = js_AllocRawStack(cx, 2 + argc + nslots, NULL);
1266 if (!newvp) {
1267 ok = JS_FALSE;
1268 goto out2;
1270 memcpy(newvp, vp, (2 + argc) * sizeof(jsval));
1271 argv = newvp + 2;
1272 sp = argv + argc;
1275 /* Push void to initialize missing args. */
1276 i = nslots;
1277 do {
1278 *sp++ = JSVAL_VOID;
1279 } while (--i != 0);
1282 /* Allocate space for local variables and stack of interpreted function. */
1283 if (script && script->nslots != 0) {
1284 if (!AllocateAfterSP(cx, sp, script->nslots)) {
1285 /* NB: Discontinuity between argv and slots, stack slots. */
1286 sp = js_AllocRawStack(cx, script->nslots, NULL);
1287 if (!sp) {
1288 ok = JS_FALSE;
1289 goto out2;
1293 /* Push void to initialize local variables. */
1294 for (jsval *end = sp + fun->u.i.nvars; sp != end; ++sp)
1295 *sp = JSVAL_VOID;
1299 * Initialize the frame.
1301 frame.thisv = vp[1];
1302 frame.varobj = NULL;
1303 frame.callobj = NULL;
1304 frame.argsobj = NULL;
1305 frame.script = script;
1306 frame.fun = fun;
1307 frame.argc = argc;
1308 frame.argv = argv;
1310 /* Default return value for a constructor is the new object. */
1311 frame.rval = (flags & JSINVOKE_CONSTRUCT) ? vp[1] : JSVAL_VOID;
1312 frame.down = cx->fp;
1313 frame.annotation = NULL;
1314 frame.scopeChain = NULL; /* set below for real, after cx->fp is set */
1315 frame.blockChain = NULL;
1316 frame.regs = NULL;
1317 frame.imacpc = NULL;
1318 frame.slots = NULL;
1319 frame.flags = flags | rootedArgsFlag;
1320 frame.dormantNext = NULL;
1321 frame.displaySave = NULL;
1323 MUST_FLOW_THROUGH("out");
1324 cx->fp = &frame;
1326 /* Init these now in case we goto out before first hook call. */
1327 hook = cx->debugHooks->callHook;
1328 hookData = NULL;
1330 if (native) {
1331 /* If native, use caller varobj and scopeChain for eval. */
1332 JS_ASSERT(!frame.varobj);
1333 JS_ASSERT(!frame.scopeChain);
1334 if (frame.down) {
1335 frame.varobj = frame.down->varobj;
1336 frame.scopeChain = frame.down->scopeChain;
1339 /* But ensure that we have a scope chain. */
1340 if (!frame.scopeChain)
1341 frame.scopeChain = parent;
1342 } else {
1343 /* Use parent scope so js_GetCallObject can find the right "Call". */
1344 frame.scopeChain = parent;
1345 if (JSFUN_HEAVYWEIGHT_TEST(fun->flags)) {
1346 /* Scope with a call object parented by the callee's parent. */
1347 if (!js_GetCallObject(cx, &frame)) {
1348 ok = JS_FALSE;
1349 goto out;
1352 frame.slots = sp - fun->u.i.nvars;
1355 /* Call the hook if present after we fully initialized the frame. */
1356 if (hook)
1357 hookData = hook(cx, &frame, JS_TRUE, 0, cx->debugHooks->callHookData);
1359 #ifdef INCLUDE_MOZILLA_DTRACE
1360 /* DTrace function entry, non-inlines */
1361 if (JAVASCRIPT_FUNCTION_ENTRY_ENABLED())
1362 jsdtrace_function_entry(cx, &frame, fun);
1363 if (JAVASCRIPT_FUNCTION_INFO_ENABLED())
1364 jsdtrace_function_info(cx, &frame, frame.down, fun);
1365 if (JAVASCRIPT_FUNCTION_ARGS_ENABLED())
1366 jsdtrace_function_args(cx, &frame, fun, frame.argc, frame.argv);
1367 #endif
1369 /* Call the function, either a native method or an interpreted script. */
1370 if (native) {
1371 #ifdef DEBUG_NOT_THROWING
1372 JSBool alreadyThrowing = cx->throwing;
1373 #endif
1374 /* Primitive |this| should not be passed to slow natives. */
1375 JSObject *thisp = JSVAL_TO_OBJECT(frame.thisv);
1376 ok = native(cx, thisp, argc, frame.argv, &frame.rval);
1377 JS_RUNTIME_METER(cx->runtime, nativeCalls);
1378 #ifdef DEBUG_NOT_THROWING
1379 if (ok && !alreadyThrowing)
1380 ASSERT_NOT_THROWING(cx);
1381 #endif
1382 } else {
1383 JS_ASSERT(script);
1384 ok = js_Interpret(cx);
1387 #ifdef INCLUDE_MOZILLA_DTRACE
1388 /* DTrace function return, non-inlines */
1389 if (JAVASCRIPT_FUNCTION_RVAL_ENABLED())
1390 jsdtrace_function_rval(cx, &frame, fun, &frame.rval);
1391 if (JAVASCRIPT_FUNCTION_RETURN_ENABLED())
1392 jsdtrace_function_return(cx, &frame, fun);
1393 #endif
1395 out:
1396 if (hookData) {
1397 hook = cx->debugHooks->callHook;
1398 if (hook)
1399 hook(cx, &frame, JS_FALSE, &ok, hookData);
1402 frame.putActivationObjects(cx);
1404 *vp = frame.rval;
1406 /* Restore cx->fp now that we're done releasing frame objects. */
1407 cx->fp = frame.down;
1409 out2:
1410 /* Pop everything we may have allocated off the stack. */
1411 JS_ARENA_RELEASE(&cx->stackPool, mark);
1412 if (!ok)
1413 *vp = JSVAL_NULL;
1414 return ok;
1416 bad:
1417 js_ReportIsNotFunction(cx, vp, flags & JSINVOKE_FUNFLAGS);
1418 ok = JS_FALSE;
1419 goto out2;
1422 JSBool
1423 js_InternalInvoke(JSContext *cx, JSObject *obj, jsval fval, uintN flags,
1424 uintN argc, jsval *argv, jsval *rval)
1426 jsval *invokevp;
1427 void *mark;
1428 JSBool ok;
1430 js_LeaveTrace(cx);
1431 invokevp = js_AllocStack(cx, 2 + argc, &mark);
1432 if (!invokevp)
1433 return JS_FALSE;
1435 invokevp[0] = fval;
1436 invokevp[1] = OBJECT_TO_JSVAL(obj);
1437 memcpy(invokevp + 2, argv, argc * sizeof *argv);
1439 ok = js_Invoke(cx, argc, invokevp, flags);
1440 if (ok) {
1442 * Store *rval in the a scoped local root if a scope is open, else in
1443 * the lastInternalResult pigeon-hole GC root, solely so users of
1444 * js_InternalInvoke and its direct and indirect (js_ValueToString for
1445 * example) callers do not need to manage roots for local, temporary
1446 * references to such results.
1448 *rval = *invokevp;
1449 if (JSVAL_IS_GCTHING(*rval) && *rval != JSVAL_NULL) {
1450 JSLocalRootStack *lrs = JS_THREAD_DATA(cx)->localRootStack;
1451 if (lrs) {
1452 if (js_PushLocalRoot(cx, lrs, *rval) < 0)
1453 ok = JS_FALSE;
1454 } else {
1455 cx->weakRoots.lastInternalResult = *rval;
1460 js_FreeStack(cx, mark);
1461 return ok;
1464 JSBool
1465 js_InternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, jsval fval,
1466 JSAccessMode mode, uintN argc, jsval *argv, jsval *rval)
1468 js_LeaveTrace(cx);
1471 * js_InternalInvoke could result in another try to get or set the same id
1472 * again, see bug 355497.
1474 JS_CHECK_RECURSION(cx, return JS_FALSE);
1476 return js_InternalCall(cx, obj, fval, argc, argv, rval);
1479 JSBool
1480 js_Execute(JSContext *cx, JSObject *chain, JSScript *script,
1481 JSStackFrame *down, uintN flags, jsval *result)
1483 JSInterpreterHook hook;
1484 void *hookData, *mark;
1485 JSStackFrame *oldfp, frame;
1486 JSObject *obj, *tmp;
1487 JSBool ok;
1489 if (script->isEmpty()) {
1490 if (result)
1491 *result = JSVAL_VOID;
1492 return JS_TRUE;
1495 js_LeaveTrace(cx);
1497 #ifdef INCLUDE_MOZILLA_DTRACE
1498 if (JAVASCRIPT_EXECUTE_START_ENABLED())
1499 jsdtrace_execute_start(script);
1500 #endif
1502 hook = cx->debugHooks->executeHook;
1503 hookData = mark = NULL;
1504 oldfp = js_GetTopStackFrame(cx);
1505 frame.script = script;
1506 if (down) {
1507 /* Propagate arg state for eval and the debugger API. */
1508 frame.callobj = down->callobj;
1509 frame.argsobj = down->argsobj;
1510 frame.varobj = down->varobj;
1511 frame.fun = (script->staticLevel > 0) ? down->fun : NULL;
1512 frame.thisv = down->thisv;
1513 if (down->flags & JSFRAME_COMPUTED_THIS)
1514 flags |= JSFRAME_COMPUTED_THIS;
1515 frame.argc = down->argc;
1516 frame.argv = down->argv;
1517 frame.annotation = down->annotation;
1518 } else {
1519 frame.callobj = NULL;
1520 frame.argsobj = NULL;
1521 obj = chain;
1522 if (cx->options & JSOPTION_VAROBJFIX) {
1523 while ((tmp = OBJ_GET_PARENT(cx, obj)) != NULL)
1524 obj = tmp;
1526 frame.varobj = obj;
1527 frame.fun = NULL;
1528 frame.thisv = OBJECT_TO_JSVAL(chain);
1529 frame.argc = 0;
1530 frame.argv = NULL;
1531 frame.annotation = NULL;
1534 frame.imacpc = NULL;
1535 if (script->nslots != 0) {
1536 frame.slots = js_AllocRawStack(cx, script->nslots, &mark);
1537 if (!frame.slots) {
1538 ok = JS_FALSE;
1539 goto out;
1541 memset(frame.slots, 0, script->nfixed * sizeof(jsval));
1543 #if JS_HAS_SHARP_VARS
1544 JS_STATIC_ASSERT(SHARP_NSLOTS == 2);
1546 if (script->hasSharps) {
1547 JS_ASSERT(script->nfixed >= SHARP_NSLOTS);
1548 jsval *sharps = &frame.slots[script->nfixed - SHARP_NSLOTS];
1550 if (down && down->script && down->script->hasSharps) {
1551 JS_ASSERT(down->script->nfixed >= SHARP_NSLOTS);
1552 int base = (down->fun && !(down->flags & JSFRAME_SPECIAL))
1553 ? down->fun->sharpSlotBase(cx)
1554 : down->script->nfixed - SHARP_NSLOTS;
1555 if (base < 0) {
1556 ok = JS_FALSE;
1557 goto out;
1559 sharps[0] = down->slots[base];
1560 sharps[1] = down->slots[base + 1];
1561 } else {
1562 sharps[0] = sharps[1] = JSVAL_VOID;
1565 #endif
1566 } else {
1567 frame.slots = NULL;
1570 frame.rval = JSVAL_VOID;
1571 frame.down = down;
1572 frame.scopeChain = chain;
1573 frame.regs = NULL;
1574 frame.flags = flags;
1575 frame.dormantNext = NULL;
1576 frame.blockChain = NULL;
1579 * Here we wrap the call to js_Interpret with code to (conditionally)
1580 * save and restore the old stack frame chain into a chain of 'dormant'
1581 * frame chains. Since we are replacing cx->fp, we were running into
1582 * the problem that if GC was called under this frame, some of the GC
1583 * things associated with the old frame chain (available here only in
1584 * the C variable 'oldfp') were not rooted and were being collected.
1586 * So, now we preserve the links to these 'dormant' frame chains in cx
1587 * before calling js_Interpret and cleanup afterwards. The GC walks
1588 * these dormant chains and marks objects in the same way that it marks
1589 * objects in the primary cx->fp chain.
1591 if (oldfp && oldfp != down) {
1592 JS_ASSERT(!oldfp->dormantNext);
1593 oldfp->dormantNext = cx->dormantFrameChain;
1594 cx->dormantFrameChain = oldfp;
1597 cx->fp = &frame;
1598 if (!down) {
1599 OBJ_TO_INNER_OBJECT(cx, chain);
1600 if (!chain)
1601 return JS_FALSE;
1602 frame.scopeChain = chain;
1604 JSObject *thisp = JSVAL_TO_OBJECT(frame.thisv)->thisObject(cx);
1605 if (!thisp) {
1606 ok = JS_FALSE;
1607 goto out2;
1609 frame.thisv = OBJECT_TO_JSVAL(thisp);
1610 frame.flags |= JSFRAME_COMPUTED_THIS;
1613 if (hook) {
1614 hookData = hook(cx, &frame, JS_TRUE, 0,
1615 cx->debugHooks->executeHookData);
1618 ok = js_Interpret(cx);
1619 if (result)
1620 *result = frame.rval;
1622 if (hookData) {
1623 hook = cx->debugHooks->executeHook;
1624 if (hook)
1625 hook(cx, &frame, JS_FALSE, &ok, hookData);
1628 out2:
1629 if (mark)
1630 js_FreeRawStack(cx, mark);
1631 cx->fp = oldfp;
1633 if (oldfp && oldfp != down) {
1634 JS_ASSERT(cx->dormantFrameChain == oldfp);
1635 cx->dormantFrameChain = oldfp->dormantNext;
1636 oldfp->dormantNext = NULL;
1639 out:
1640 #ifdef INCLUDE_MOZILLA_DTRACE
1641 if (JAVASCRIPT_EXECUTE_DONE_ENABLED())
1642 jsdtrace_execute_done(script);
1643 #endif
1644 return ok;
1647 JSBool
1648 js_CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs,
1649 JSObject **objp, JSProperty **propp)
1651 JSObject *obj2;
1652 JSProperty *prop;
1653 uintN oldAttrs, report;
1654 bool isFunction;
1655 jsval value;
1656 const char *type, *name;
1659 * Both objp and propp must be either null or given. When given, *propp
1660 * must be null. This way we avoid an extra "if (propp) *propp = NULL" for
1661 * the common case of a non-existing property.
1663 JS_ASSERT(!objp == !propp);
1664 JS_ASSERT_IF(propp, !*propp);
1666 /* The JSPROP_INITIALIZER case below may generate a warning. Since we must
1667 * drop the property before reporting it, we insists on !propp to avoid
1668 * looking up the property again after the reporting is done.
1670 JS_ASSERT_IF(attrs & JSPROP_INITIALIZER, attrs == JSPROP_INITIALIZER);
1671 JS_ASSERT_IF(attrs == JSPROP_INITIALIZER, !propp);
1673 if (!obj->lookupProperty(cx, id, &obj2, &prop))
1674 return JS_FALSE;
1675 if (!prop)
1676 return JS_TRUE;
1678 /* Use prop as a speedup hint to obj->getAttributes. */
1679 if (!obj2->getAttributes(cx, id, prop, &oldAttrs)) {
1680 obj2->dropProperty(cx, prop);
1681 return JS_FALSE;
1685 * If our caller doesn't want prop, drop it (we don't need it any longer).
1687 if (!propp) {
1688 obj2->dropProperty(cx, prop);
1689 prop = NULL;
1690 } else {
1691 *objp = obj2;
1692 *propp = prop;
1695 if (attrs == JSPROP_INITIALIZER) {
1696 /* Allow the new object to override properties. */
1697 if (obj2 != obj)
1698 return JS_TRUE;
1700 /* The property must be dropped already. */
1701 JS_ASSERT(!prop);
1702 report = JSREPORT_WARNING | JSREPORT_STRICT;
1704 #ifdef __GNUC__
1705 isFunction = false; /* suppress bogus gcc warnings */
1706 #endif
1707 } else {
1708 /* We allow redeclaring some non-readonly properties. */
1709 if (((oldAttrs | attrs) & JSPROP_READONLY) == 0) {
1710 /* Allow redeclaration of variables and functions. */
1711 if (!(attrs & (JSPROP_GETTER | JSPROP_SETTER)))
1712 return JS_TRUE;
1715 * Allow adding a getter only if a property already has a setter
1716 * but no getter and similarly for adding a setter. That is, we
1717 * allow only the following transitions:
1719 * no-property --> getter --> getter + setter
1720 * no-property --> setter --> getter + setter
1722 if ((~(oldAttrs ^ attrs) & (JSPROP_GETTER | JSPROP_SETTER)) == 0)
1723 return JS_TRUE;
1726 * Allow redeclaration of an impermanent property (in which case
1727 * anyone could delete it and redefine it, willy-nilly).
1729 if (!(oldAttrs & JSPROP_PERMANENT))
1730 return JS_TRUE;
1732 if (prop)
1733 obj2->dropProperty(cx, prop);
1735 report = JSREPORT_ERROR;
1736 isFunction = (oldAttrs & (JSPROP_GETTER | JSPROP_SETTER)) != 0;
1737 if (!isFunction) {
1738 if (!obj->getProperty(cx, id, &value))
1739 return JS_FALSE;
1740 isFunction = VALUE_IS_FUNCTION(cx, value);
1744 type = (attrs == JSPROP_INITIALIZER)
1745 ? "property"
1746 : (oldAttrs & attrs & JSPROP_GETTER)
1747 ? js_getter_str
1748 : (oldAttrs & attrs & JSPROP_SETTER)
1749 ? js_setter_str
1750 : (oldAttrs & JSPROP_READONLY)
1751 ? js_const_str
1752 : isFunction
1753 ? js_function_str
1754 : js_var_str;
1755 name = js_ValueToPrintableString(cx, ID_TO_VALUE(id));
1756 if (!name)
1757 return JS_FALSE;
1758 return JS_ReportErrorFlagsAndNumber(cx, report,
1759 js_GetErrorMessage, NULL,
1760 JSMSG_REDECLARED_VAR,
1761 type, name);
1764 JSBool
1765 js_StrictlyEqual(JSContext *cx, jsval lval, jsval rval)
1767 jsval ltag = JSVAL_TAG(lval), rtag = JSVAL_TAG(rval);
1768 jsdouble ld, rd;
1770 if (ltag == rtag) {
1771 if (ltag == JSVAL_STRING) {
1772 JSString *lstr = JSVAL_TO_STRING(lval),
1773 *rstr = JSVAL_TO_STRING(rval);
1774 return js_EqualStrings(lstr, rstr);
1776 if (ltag == JSVAL_DOUBLE) {
1777 ld = *JSVAL_TO_DOUBLE(lval);
1778 rd = *JSVAL_TO_DOUBLE(rval);
1779 return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
1781 if (ltag == JSVAL_OBJECT &&
1782 lval != rval &&
1783 !JSVAL_IS_NULL(lval) &&
1784 !JSVAL_IS_NULL(rval)) {
1785 JSObject *lobj, *robj;
1787 lobj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(lval));
1788 robj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(rval));
1789 lval = OBJECT_TO_JSVAL(lobj);
1790 rval = OBJECT_TO_JSVAL(robj);
1792 return lval == rval;
1794 if (ltag == JSVAL_DOUBLE && JSVAL_IS_INT(rval)) {
1795 ld = *JSVAL_TO_DOUBLE(lval);
1796 rd = JSVAL_TO_INT(rval);
1797 return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
1799 if (JSVAL_IS_INT(lval) && rtag == JSVAL_DOUBLE) {
1800 ld = JSVAL_TO_INT(lval);
1801 rd = *JSVAL_TO_DOUBLE(rval);
1802 return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
1804 return lval == rval;
1807 static inline bool
1808 IsNegativeZero(jsval v)
1810 return JSVAL_IS_DOUBLE(v) && JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v));
1813 static inline bool
1814 IsNaN(jsval v)
1816 return JSVAL_IS_DOUBLE(v) && JSDOUBLE_IS_NaN(*JSVAL_TO_DOUBLE(v));
1819 JSBool
1820 js_SameValue(jsval v1, jsval v2, JSContext *cx)
1822 if (IsNegativeZero(v1))
1823 return IsNegativeZero(v2);
1824 if (IsNegativeZero(v2))
1825 return JS_FALSE;
1826 if (IsNaN(v1) && IsNaN(v2))
1827 return JS_TRUE;
1828 return js_StrictlyEqual(cx, v1, v2);
1831 JS_REQUIRES_STACK JSBool
1832 js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp)
1834 JSFunction *fun, *fun2;
1835 JSObject *obj, *obj2, *proto, *parent;
1836 jsval lval, rval;
1837 JSClass *clasp;
1839 fun = NULL;
1840 obj2 = NULL;
1841 lval = *vp;
1842 if (!JSVAL_IS_OBJECT(lval) ||
1843 (obj2 = JSVAL_TO_OBJECT(lval)) == NULL ||
1844 /* XXX clean up to avoid special cases above ObjectOps layer */
1845 OBJ_GET_CLASS(cx, obj2) == &js_FunctionClass ||
1846 !obj2->map->ops->construct)
1848 fun = js_ValueToFunction(cx, vp, JSV2F_CONSTRUCT);
1849 if (!fun)
1850 return JS_FALSE;
1853 clasp = &js_ObjectClass;
1854 if (!obj2) {
1855 proto = parent = NULL;
1856 fun = NULL;
1857 } else {
1859 * Get the constructor prototype object for this function.
1860 * Use the nominal 'this' parameter slot, vp[1], as a local
1861 * root to protect this prototype, in case it has no other
1862 * strong refs.
1864 if (!obj2->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom),
1865 &vp[1])) {
1866 return JS_FALSE;
1868 rval = vp[1];
1869 proto = JSVAL_IS_OBJECT(rval) ? JSVAL_TO_OBJECT(rval) : NULL;
1870 parent = OBJ_GET_PARENT(cx, obj2);
1872 if (OBJ_GET_CLASS(cx, obj2) == &js_FunctionClass) {
1873 fun2 = GET_FUNCTION_PRIVATE(cx, obj2);
1874 if (!FUN_INTERPRETED(fun2) && fun2->u.n.clasp)
1875 clasp = fun2->u.n.clasp;
1878 obj = js_NewObject(cx, clasp, proto, parent);
1879 if (!obj)
1880 return JS_FALSE;
1882 /* Now we have an object with a constructor method; call it. */
1883 vp[1] = OBJECT_TO_JSVAL(obj);
1884 if (!js_Invoke(cx, argc, vp, JSINVOKE_CONSTRUCT))
1885 return JS_FALSE;
1887 /* Check the return value and if it's primitive, force it to be obj. */
1888 rval = *vp;
1889 if (clampReturn && JSVAL_IS_PRIMITIVE(rval)) {
1890 if (!fun) {
1891 /* native [[Construct]] returning primitive is error */
1892 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1893 JSMSG_BAD_NEW_RESULT,
1894 js_ValueToPrintableString(cx, rval));
1895 return JS_FALSE;
1897 *vp = OBJECT_TO_JSVAL(obj);
1900 JS_RUNTIME_METER(cx->runtime, constructs);
1901 return JS_TRUE;
1904 JSBool
1905 js_InternNonIntElementId(JSContext *cx, JSObject *obj, jsval idval, jsid *idp)
1907 JS_ASSERT(!JSVAL_IS_INT(idval));
1909 #if JS_HAS_XML_SUPPORT
1910 if (!JSVAL_IS_PRIMITIVE(idval)) {
1911 if (OBJECT_IS_XML(cx, obj)) {
1912 *idp = OBJECT_JSVAL_TO_JSID(idval);
1913 return JS_TRUE;
1915 if (!js_IsFunctionQName(cx, JSVAL_TO_OBJECT(idval), idp))
1916 return JS_FALSE;
1917 if (*idp != 0)
1918 return JS_TRUE;
1920 #endif
1922 return js_ValueToStringId(cx, idval, idp);
1926 * Enter the new with scope using an object at sp[-1] and associate the depth
1927 * of the with block with sp + stackIndex.
1929 JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool
1930 js_EnterWith(JSContext *cx, jsint stackIndex)
1932 JSStackFrame *fp;
1933 jsval *sp;
1934 JSObject *obj, *parent, *withobj;
1936 fp = cx->fp;
1937 sp = fp->regs->sp;
1938 JS_ASSERT(stackIndex < 0);
1939 JS_ASSERT(StackBase(fp) <= sp + stackIndex);
1941 if (!JSVAL_IS_PRIMITIVE(sp[-1])) {
1942 obj = JSVAL_TO_OBJECT(sp[-1]);
1943 } else {
1944 obj = js_ValueToNonNullObject(cx, sp[-1]);
1945 if (!obj)
1946 return JS_FALSE;
1947 sp[-1] = OBJECT_TO_JSVAL(obj);
1950 parent = js_GetScopeChain(cx, fp);
1951 if (!parent)
1952 return JS_FALSE;
1954 OBJ_TO_INNER_OBJECT(cx, obj);
1955 if (!obj)
1956 return JS_FALSE;
1958 withobj = js_NewWithObject(cx, obj, parent,
1959 sp + stackIndex - StackBase(fp));
1960 if (!withobj)
1961 return JS_FALSE;
1963 fp->scopeChain = withobj;
1964 return JS_TRUE;
1967 JS_STATIC_INTERPRET JS_REQUIRES_STACK void
1968 js_LeaveWith(JSContext *cx)
1970 JSObject *withobj;
1972 withobj = cx->fp->scopeChain;
1973 JS_ASSERT(OBJ_GET_CLASS(cx, withobj) == &js_WithClass);
1974 JS_ASSERT(withobj->getPrivate() == cx->fp);
1975 JS_ASSERT(OBJ_BLOCK_DEPTH(cx, withobj) >= 0);
1976 cx->fp->scopeChain = OBJ_GET_PARENT(cx, withobj);
1977 withobj->setPrivate(NULL);
1980 JS_REQUIRES_STACK JSClass *
1981 js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth)
1983 JSClass *clasp;
1985 clasp = OBJ_GET_CLASS(cx, obj);
1986 if ((clasp == &js_WithClass || clasp == &js_BlockClass) &&
1987 obj->getPrivate() == cx->fp &&
1988 OBJ_BLOCK_DEPTH(cx, obj) >= stackDepth) {
1989 return clasp;
1991 return NULL;
1995 * Unwind block and scope chains to match the given depth. The function sets
1996 * fp->sp on return to stackDepth.
1998 JS_REQUIRES_STACK JSBool
1999 js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth,
2000 JSBool normalUnwind)
2002 JSObject *obj;
2003 JSClass *clasp;
2005 JS_ASSERT(stackDepth >= 0);
2006 JS_ASSERT(StackBase(fp) + stackDepth <= fp->regs->sp);
2008 for (obj = fp->blockChain; obj; obj = OBJ_GET_PARENT(cx, obj)) {
2009 JS_ASSERT(OBJ_GET_CLASS(cx, obj) == &js_BlockClass);
2010 if (OBJ_BLOCK_DEPTH(cx, obj) < stackDepth)
2011 break;
2013 fp->blockChain = obj;
2015 for (;;) {
2016 obj = fp->scopeChain;
2017 clasp = js_IsActiveWithOrBlock(cx, obj, stackDepth);
2018 if (!clasp)
2019 break;
2020 if (clasp == &js_BlockClass) {
2021 /* Don't fail until after we've updated all stacks. */
2022 normalUnwind &= js_PutBlockObject(cx, normalUnwind);
2023 } else {
2024 js_LeaveWith(cx);
2028 fp->regs->sp = StackBase(fp) + stackDepth;
2029 return normalUnwind;
2032 JS_STATIC_INTERPRET JSBool
2033 js_DoIncDec(JSContext *cx, const JSCodeSpec *cs, jsval *vp, jsval *vp2)
2035 jsval v;
2036 jsdouble d;
2038 v = *vp;
2039 if (JSVAL_IS_DOUBLE(v)) {
2040 d = *JSVAL_TO_DOUBLE(v);
2041 } else if (JSVAL_IS_INT(v)) {
2042 d = JSVAL_TO_INT(v);
2043 } else {
2044 d = js_ValueToNumber(cx, vp);
2045 if (JSVAL_IS_NULL(*vp))
2046 return JS_FALSE;
2047 JS_ASSERT(JSVAL_IS_NUMBER(*vp) || *vp == JSVAL_TRUE);
2049 /* Store the result of v conversion back in vp for post increments. */
2050 if ((cs->format & JOF_POST) &&
2051 *vp == JSVAL_TRUE
2052 && !js_NewNumberInRootedValue(cx, d, vp)) {
2053 return JS_FALSE;
2057 (cs->format & JOF_INC) ? d++ : d--;
2058 if (!js_NewNumberInRootedValue(cx, d, vp2))
2059 return JS_FALSE;
2061 if (!(cs->format & JOF_POST))
2062 *vp = *vp2;
2063 return JS_TRUE;
2066 jsval&
2067 js_GetUpvar(JSContext *cx, uintN level, uintN cookie)
2069 level -= UPVAR_FRAME_SKIP(cookie);
2070 JS_ASSERT(level < JS_DISPLAY_SIZE);
2072 JSStackFrame *fp = cx->display[level];
2073 JS_ASSERT(fp->script);
2075 uintN slot = UPVAR_FRAME_SLOT(cookie);
2076 jsval *vp;
2078 if (!fp->fun) {
2079 vp = fp->slots + fp->script->nfixed;
2080 } else if (slot < fp->fun->nargs) {
2081 vp = fp->argv;
2082 } else if (slot == CALLEE_UPVAR_SLOT) {
2083 vp = &fp->argv[-2];
2084 slot = 0;
2085 } else {
2086 slot -= fp->fun->nargs;
2087 JS_ASSERT(slot < fp->script->nslots);
2088 vp = fp->slots;
2091 return vp[slot];
2094 #ifdef DEBUG
2096 JS_STATIC_INTERPRET JS_REQUIRES_STACK void
2097 js_TraceOpcode(JSContext *cx)
2099 FILE *tracefp;
2100 JSStackFrame *fp;
2101 JSFrameRegs *regs;
2102 intN ndefs, n, nuses;
2103 jsval *siter;
2104 JSString *str;
2105 JSOp op;
2107 tracefp = (FILE *) cx->tracefp;
2108 JS_ASSERT(tracefp);
2109 fp = cx->fp;
2110 regs = fp->regs;
2113 * Operations in prologues don't produce interesting values, and
2114 * js_DecompileValueGenerator isn't set up to handle them anyway.
2116 if (cx->tracePrevPc && regs->pc >= fp->script->main) {
2117 JSOp tracePrevOp = JSOp(*cx->tracePrevPc);
2118 ndefs = js_GetStackDefs(cx, &js_CodeSpec[tracePrevOp], tracePrevOp,
2119 fp->script, cx->tracePrevPc);
2122 * If there aren't that many elements on the stack, then we have
2123 * probably entered a new frame, and printing output would just be
2124 * misleading.
2126 if (ndefs != 0 &&
2127 ndefs < regs->sp - fp->slots) {
2128 for (n = -ndefs; n < 0; n++) {
2129 char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n],
2130 NULL);
2131 if (bytes) {
2132 fprintf(tracefp, "%s %s",
2133 (n == -ndefs) ? " output:" : ",",
2134 bytes);
2135 cx->free(bytes);
2138 fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp)));
2140 fprintf(tracefp, " stack: ");
2141 for (siter = StackBase(fp); siter < regs->sp; siter++) {
2142 str = js_ValueToString(cx, *siter);
2143 if (!str)
2144 fputs("<null>", tracefp);
2145 else
2146 js_FileEscapedString(tracefp, str, 0);
2147 fputc(' ', tracefp);
2149 fputc('\n', tracefp);
2152 fprintf(tracefp, "%4u: ",
2153 js_PCToLineNumber(cx, fp->script, fp->imacpc ? fp->imacpc : regs->pc));
2154 js_Disassemble1(cx, fp->script, regs->pc,
2155 regs->pc - fp->script->code,
2156 JS_FALSE, tracefp);
2157 op = (JSOp) *regs->pc;
2158 nuses = js_GetStackUses(&js_CodeSpec[op], op, regs->pc);
2159 if (nuses != 0) {
2160 for (n = -nuses; n < 0; n++) {
2161 char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n],
2162 NULL);
2163 if (bytes) {
2164 fprintf(tracefp, "%s %s",
2165 (n == -nuses) ? " inputs:" : ",",
2166 bytes);
2167 cx->free(bytes);
2170 fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp)));
2172 cx->tracePrevPc = regs->pc;
2174 /* It's nice to have complete traces when debugging a crash. */
2175 fflush(tracefp);
2178 #endif /* DEBUG */
2180 #ifdef JS_OPMETER
2182 # include <stdlib.h>
2184 # define HIST_NSLOTS 8
2187 * The second dimension is hardcoded at 256 because we know that many bits fit
2188 * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address
2189 * any particular row.
2191 static uint32 succeeds[JSOP_LIMIT][256];
2192 static uint32 slot_ops[JSOP_LIMIT][HIST_NSLOTS];
2194 JS_STATIC_INTERPRET void
2195 js_MeterOpcodePair(JSOp op1, JSOp op2)
2197 if (op1 != JSOP_STOP)
2198 ++succeeds[op1][op2];
2201 JS_STATIC_INTERPRET void
2202 js_MeterSlotOpcode(JSOp op, uint32 slot)
2204 if (slot < HIST_NSLOTS)
2205 ++slot_ops[op][slot];
2208 typedef struct Edge {
2209 const char *from;
2210 const char *to;
2211 uint32 count;
2212 } Edge;
2214 static int
2215 compare_edges(const void *a, const void *b)
2217 const Edge *ea = (const Edge *) a;
2218 const Edge *eb = (const Edge *) b;
2220 return (int32)eb->count - (int32)ea->count;
2223 void
2224 js_DumpOpMeters()
2226 const char *name, *from, *style;
2227 FILE *fp;
2228 uint32 total, count;
2229 uint32 i, j, nedges;
2230 Edge *graph;
2232 name = getenv("JS_OPMETER_FILE");
2233 if (!name)
2234 name = "/tmp/ops.dot";
2235 fp = fopen(name, "w");
2236 if (!fp) {
2237 perror(name);
2238 return;
2241 total = nedges = 0;
2242 for (i = 0; i < JSOP_LIMIT; i++) {
2243 for (j = 0; j < JSOP_LIMIT; j++) {
2244 count = succeeds[i][j];
2245 if (count != 0) {
2246 total += count;
2247 ++nedges;
2252 # define SIGNIFICANT(count,total) (200. * (count) >= (total))
2254 graph = (Edge *) js_calloc(nedges * sizeof graph[0]);
2255 for (i = nedges = 0; i < JSOP_LIMIT; i++) {
2256 from = js_CodeName[i];
2257 for (j = 0; j < JSOP_LIMIT; j++) {
2258 count = succeeds[i][j];
2259 if (count != 0 && SIGNIFICANT(count, total)) {
2260 graph[nedges].from = from;
2261 graph[nedges].to = js_CodeName[j];
2262 graph[nedges].count = count;
2263 ++nedges;
2267 qsort(graph, nedges, sizeof(Edge), compare_edges);
2269 # undef SIGNIFICANT
2271 fputs("digraph {\n", fp);
2272 for (i = 0, style = NULL; i < nedges; i++) {
2273 JS_ASSERT(i == 0 || graph[i-1].count >= graph[i].count);
2274 if (!style || graph[i-1].count != graph[i].count) {
2275 style = (i > nedges * .75) ? "dotted" :
2276 (i > nedges * .50) ? "dashed" :
2277 (i > nedges * .25) ? "solid" : "bold";
2279 fprintf(fp, " %s -> %s [label=\"%lu\" style=%s]\n",
2280 graph[i].from, graph[i].to,
2281 (unsigned long)graph[i].count, style);
2283 js_free(graph);
2284 fputs("}\n", fp);
2285 fclose(fp);
2287 name = getenv("JS_OPMETER_HIST");
2288 if (!name)
2289 name = "/tmp/ops.hist";
2290 fp = fopen(name, "w");
2291 if (!fp) {
2292 perror(name);
2293 return;
2295 fputs("bytecode", fp);
2296 for (j = 0; j < HIST_NSLOTS; j++)
2297 fprintf(fp, " slot %1u", (unsigned)j);
2298 putc('\n', fp);
2299 fputs("========", fp);
2300 for (j = 0; j < HIST_NSLOTS; j++)
2301 fputs(" =======", fp);
2302 putc('\n', fp);
2303 for (i = 0; i < JSOP_LIMIT; i++) {
2304 for (j = 0; j < HIST_NSLOTS; j++) {
2305 if (slot_ops[i][j] != 0) {
2306 /* Reuse j in the next loop, since we break after. */
2307 fprintf(fp, "%-8.8s", js_CodeName[i]);
2308 for (j = 0; j < HIST_NSLOTS; j++)
2309 fprintf(fp, " %7lu", (unsigned long)slot_ops[i][j]);
2310 putc('\n', fp);
2311 break;
2315 fclose(fp);
2318 #endif /* JS_OPSMETER */
2320 #endif /* !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ */
2322 #ifndef jsinvoke_cpp___
2324 #define PUSH(v) (*regs.sp++ = (v))
2325 #define PUSH_OPND(v) PUSH(v)
2326 #define STORE_OPND(n,v) (regs.sp[n] = (v))
2327 #define POP() (*--regs.sp)
2328 #define POP_OPND() POP()
2329 #define FETCH_OPND(n) (regs.sp[n])
2332 * Push the jsdouble d using sp from the lexical environment. Try to convert d
2333 * to a jsint that fits in a jsval, otherwise GC-alloc space for it and push a
2334 * reference.
2336 #define STORE_NUMBER(cx, n, d) \
2337 JS_BEGIN_MACRO \
2338 jsint i_; \
2340 if (JSDOUBLE_IS_INT(d, i_) && INT_FITS_IN_JSVAL(i_)) \
2341 regs.sp[n] = INT_TO_JSVAL(i_); \
2342 else if (!js_NewDoubleInRootedValue(cx, d, &regs.sp[n])) \
2343 goto error; \
2344 JS_END_MACRO
2346 #define STORE_INT(cx, n, i) \
2347 JS_BEGIN_MACRO \
2348 if (INT_FITS_IN_JSVAL(i)) \
2349 regs.sp[n] = INT_TO_JSVAL(i); \
2350 else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (i), &regs.sp[n])) \
2351 goto error; \
2352 JS_END_MACRO
2354 #define STORE_UINT(cx, n, u) \
2355 JS_BEGIN_MACRO \
2356 if ((u) <= JSVAL_INT_MAX) \
2357 regs.sp[n] = INT_TO_JSVAL(u); \
2358 else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (u), &regs.sp[n])) \
2359 goto error; \
2360 JS_END_MACRO
2362 #define FETCH_NUMBER(cx, n, d) \
2363 JS_BEGIN_MACRO \
2364 jsval v_; \
2366 v_ = FETCH_OPND(n); \
2367 VALUE_TO_NUMBER(cx, n, v_, d); \
2368 JS_END_MACRO
2370 #define FETCH_INT(cx, n, i) \
2371 JS_BEGIN_MACRO \
2372 jsval v_; \
2374 v_= FETCH_OPND(n); \
2375 if (JSVAL_IS_INT(v_)) { \
2376 i = JSVAL_TO_INT(v_); \
2377 } else { \
2378 i = js_ValueToECMAInt32(cx, &regs.sp[n]); \
2379 if (JSVAL_IS_NULL(regs.sp[n])) \
2380 goto error; \
2382 JS_END_MACRO
2384 #define FETCH_UINT(cx, n, ui) \
2385 JS_BEGIN_MACRO \
2386 jsval v_; \
2388 v_= FETCH_OPND(n); \
2389 if (JSVAL_IS_INT(v_)) { \
2390 ui = (uint32) JSVAL_TO_INT(v_); \
2391 } else { \
2392 ui = js_ValueToECMAUint32(cx, &regs.sp[n]); \
2393 if (JSVAL_IS_NULL(regs.sp[n])) \
2394 goto error; \
2396 JS_END_MACRO
2399 * Optimized conversion macros that test for the desired type in v before
2400 * homing sp and calling a conversion function.
2402 #define VALUE_TO_NUMBER(cx, n, v, d) \
2403 JS_BEGIN_MACRO \
2404 JS_ASSERT(v == regs.sp[n]); \
2405 if (JSVAL_IS_INT(v)) { \
2406 d = (jsdouble)JSVAL_TO_INT(v); \
2407 } else if (JSVAL_IS_DOUBLE(v)) { \
2408 d = *JSVAL_TO_DOUBLE(v); \
2409 } else { \
2410 d = js_ValueToNumber(cx, &regs.sp[n]); \
2411 if (JSVAL_IS_NULL(regs.sp[n])) \
2412 goto error; \
2413 JS_ASSERT(JSVAL_IS_NUMBER(regs.sp[n]) || \
2414 regs.sp[n] == JSVAL_TRUE); \
2416 JS_END_MACRO
2418 #define POP_BOOLEAN(cx, v, b) \
2419 JS_BEGIN_MACRO \
2420 v = FETCH_OPND(-1); \
2421 if (v == JSVAL_NULL) { \
2422 b = JS_FALSE; \
2423 } else if (JSVAL_IS_BOOLEAN(v)) { \
2424 b = JSVAL_TO_BOOLEAN(v); \
2425 } else { \
2426 b = js_ValueToBoolean(v); \
2428 regs.sp--; \
2429 JS_END_MACRO
2431 #define VALUE_TO_OBJECT(cx, n, v, obj) \
2432 JS_BEGIN_MACRO \
2433 if (!JSVAL_IS_PRIMITIVE(v)) { \
2434 obj = JSVAL_TO_OBJECT(v); \
2435 } else { \
2436 obj = js_ValueToNonNullObject(cx, v); \
2437 if (!obj) \
2438 goto error; \
2439 STORE_OPND(n, OBJECT_TO_JSVAL(obj)); \
2441 JS_END_MACRO
2443 #define FETCH_OBJECT(cx, n, v, obj) \
2444 JS_BEGIN_MACRO \
2445 v = FETCH_OPND(n); \
2446 VALUE_TO_OBJECT(cx, n, v, obj); \
2447 JS_END_MACRO
2449 #define DEFAULT_VALUE(cx, n, hint, v) \
2450 JS_BEGIN_MACRO \
2451 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v)); \
2452 JS_ASSERT(v == regs.sp[n]); \
2453 if (!JSVAL_TO_OBJECT(v)->defaultValue(cx, hint, &regs.sp[n])) \
2454 goto error; \
2455 v = regs.sp[n]; \
2456 JS_END_MACRO
2459 * Quickly test if v is an int from the [-2**29, 2**29) range, that is, when
2460 * the lowest bit of v is 1 and the bits 30 and 31 are both either 0 or 1. For
2461 * such v we can do increment or decrement via adding or subtracting two
2462 * without checking that the result overflows JSVAL_INT_MIN or JSVAL_INT_MAX.
2464 #define CAN_DO_FAST_INC_DEC(v) (((((v) << 1) ^ v) & 0x80000001) == 1)
2466 JS_STATIC_ASSERT(JSVAL_INT == 1);
2467 JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MIN)));
2468 JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MAX)));
2471 * Conditional assert to detect failure to clear a pending exception that is
2472 * suppressed (or unintentional suppression of a wanted exception).
2474 #if defined DEBUG_brendan || defined DEBUG_mrbkap || defined DEBUG_shaver
2475 # define DEBUG_NOT_THROWING 1
2476 #endif
2478 #ifdef DEBUG_NOT_THROWING
2479 # define ASSERT_NOT_THROWING(cx) JS_ASSERT(!(cx)->throwing)
2480 #else
2481 # define ASSERT_NOT_THROWING(cx) /* nothing */
2482 #endif
2485 * Define JS_OPMETER to instrument bytecode succession, generating a .dot file
2486 * on shutdown that shows the graph of significant predecessor/successor pairs
2487 * executed, where the edge labels give the succession counts. The .dot file
2488 * is named by the JS_OPMETER_FILE envariable, and defaults to /tmp/ops.dot.
2490 * Bonus feature: JS_OPMETER also enables counters for stack-addressing ops
2491 * such as JSOP_GETLOCAL, JSOP_INCARG, via METER_SLOT_OP. The resulting counts
2492 * are written to JS_OPMETER_HIST, defaulting to /tmp/ops.hist.
2494 #ifndef JS_OPMETER
2495 # define METER_OP_INIT(op) /* nothing */
2496 # define METER_OP_PAIR(op1,op2) /* nothing */
2497 # define METER_SLOT_OP(op,slot) /* nothing */
2498 #else
2501 * The second dimension is hardcoded at 256 because we know that many bits fit
2502 * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address
2503 * any particular row.
2505 # define METER_OP_INIT(op) ((op) = JSOP_STOP)
2506 # define METER_OP_PAIR(op1,op2) (js_MeterOpcodePair(op1, op2))
2507 # define METER_SLOT_OP(op,slot) (js_MeterSlotOpcode(op, slot))
2509 #endif
2512 * Threaded interpretation via computed goto appears to be well-supported by
2513 * GCC 3 and higher. IBM's C compiler when run with the right options (e.g.,
2514 * -qlanglvl=extended) also supports threading. Ditto the SunPro C compiler.
2515 * Currently it's broken for JS_VERSION < 160, though this isn't worth fixing.
2516 * Add your compiler support macros here.
2518 #ifndef JS_THREADED_INTERP
2519 # if JS_VERSION >= 160 && ( \
2520 __GNUC__ >= 3 || \
2521 (__IBMC__ >= 700 && defined __IBM_COMPUTED_GOTO) || \
2522 __SUNPRO_C >= 0x570)
2523 # define JS_THREADED_INTERP 1
2524 # else
2525 # define JS_THREADED_INTERP 0
2526 # endif
2527 #endif
2530 * Deadlocks or else bad races are likely if JS_THREADSAFE, so we must rely on
2531 * single-thread DEBUG js shell testing to verify property cache hits.
2533 #if defined DEBUG && !defined JS_THREADSAFE
2535 # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \
2536 JS_BEGIN_MACRO \
2537 if (!AssertValidPropertyCacheHit(cx, script, regs, pcoff, obj, pobj, \
2538 entry)) { \
2539 goto error; \
2541 JS_END_MACRO
2543 static bool
2544 AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, JSFrameRegs& regs,
2545 ptrdiff_t pcoff, JSObject *start, JSObject *found,
2546 JSPropCacheEntry *entry)
2548 uint32 sample = cx->runtime->gcNumber;
2550 JSAtom *atom;
2551 if (pcoff >= 0)
2552 GET_ATOM_FROM_BYTECODE(script, regs.pc, pcoff, atom);
2553 else
2554 atom = cx->runtime->atomState.lengthAtom;
2556 JSObject *obj, *pobj;
2557 JSProperty *prop;
2558 JSBool ok;
2560 if (JOF_OPMODE(*regs.pc) == JOF_NAME) {
2561 ok = js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &pobj, &prop);
2562 } else {
2563 obj = start;
2564 ok = js_LookupProperty(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop);
2566 if (!ok)
2567 return false;
2568 if (cx->runtime->gcNumber != sample ||
2569 PCVCAP_SHAPE(entry->vcap) != OBJ_SHAPE(pobj)) {
2570 pobj->dropProperty(cx, prop);
2571 return true;
2573 JS_ASSERT(prop);
2574 JS_ASSERT(pobj == found);
2576 JSScopeProperty *sprop = (JSScopeProperty *) prop;
2577 if (PCVAL_IS_SLOT(entry->vword)) {
2578 JS_ASSERT(PCVAL_TO_SLOT(entry->vword) == sprop->slot);
2579 JS_ASSERT(!sprop->isMethod());
2580 } else if (PCVAL_IS_SPROP(entry->vword)) {
2581 JS_ASSERT(PCVAL_TO_SPROP(entry->vword) == sprop);
2582 JS_ASSERT_IF(sprop->isMethod(),
2583 sprop->methodValue() == LOCKED_OBJ_GET_SLOT(pobj, sprop->slot));
2584 } else {
2585 jsval v;
2586 JS_ASSERT(PCVAL_IS_OBJECT(entry->vword));
2587 JS_ASSERT(entry->vword != PCVAL_NULL);
2588 JS_ASSERT(OBJ_SCOPE(pobj)->branded() || OBJ_SCOPE(pobj)->hasMethodBarrier());
2589 JS_ASSERT(SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop));
2590 JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(pobj)));
2591 v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot);
2592 JS_ASSERT(VALUE_IS_FUNCTION(cx, v));
2593 JS_ASSERT(PCVAL_TO_OBJECT(entry->vword) == JSVAL_TO_OBJECT(v));
2595 if (sprop->isMethod()) {
2596 JS_ASSERT(js_CodeSpec[*regs.pc].format & JOF_CALLOP);
2597 JS_ASSERT(sprop->methodValue() == v);
2601 pobj->dropProperty(cx, prop);
2602 return true;
2605 #else
2606 # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0)
2607 #endif
2610 * Ensure that the intrepreter switch can close call-bytecode cases in the
2611 * same way as non-call bytecodes.
2613 JS_STATIC_ASSERT(JSOP_NAME_LENGTH == JSOP_CALLNAME_LENGTH);
2614 JS_STATIC_ASSERT(JSOP_GETGVAR_LENGTH == JSOP_CALLGVAR_LENGTH);
2615 JS_STATIC_ASSERT(JSOP_GETUPVAR_LENGTH == JSOP_CALLUPVAR_LENGTH);
2616 JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_CALLUPVAR_DBG_LENGTH);
2617 JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_GETUPVAR_LENGTH);
2618 JS_STATIC_ASSERT(JSOP_GETDSLOT_LENGTH == JSOP_CALLDSLOT_LENGTH);
2619 JS_STATIC_ASSERT(JSOP_GETARG_LENGTH == JSOP_CALLARG_LENGTH);
2620 JS_STATIC_ASSERT(JSOP_GETLOCAL_LENGTH == JSOP_CALLLOCAL_LENGTH);
2621 JS_STATIC_ASSERT(JSOP_XMLNAME_LENGTH == JSOP_CALLXMLNAME_LENGTH);
2624 * Same for debuggable flat closures defined at top level in another function
2625 * or program fragment.
2627 JS_STATIC_ASSERT(JSOP_DEFFUN_FC_LENGTH == JSOP_DEFFUN_DBGFC_LENGTH);
2630 * Same for JSOP_SETNAME and JSOP_SETPROP, which differ only slightly but
2631 * remain distinct for the decompiler. Likewise for JSOP_INIT{PROP,METHOD}.
2633 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
2634 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETMETHOD_LENGTH);
2635 JS_STATIC_ASSERT(JSOP_INITPROP_LENGTH == JSOP_INITMETHOD_LENGTH);
2637 /* See TRY_BRANCH_AFTER_COND. */
2638 JS_STATIC_ASSERT(JSOP_IFNE_LENGTH == JSOP_IFEQ_LENGTH);
2639 JS_STATIC_ASSERT(JSOP_IFNE == JSOP_IFEQ + 1);
2641 /* For the fastest case inder JSOP_INCNAME, etc. */
2642 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_DECNAME_LENGTH);
2643 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEINC_LENGTH);
2644 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEDEC_LENGTH);
2646 #ifdef JS_TRACER
2647 # define ABORT_RECORDING(cx, reason) \
2648 JS_BEGIN_MACRO \
2649 if (TRACE_RECORDER(cx)) \
2650 js_AbortRecording(cx, reason); \
2651 JS_END_MACRO
2652 #else
2653 # define ABORT_RECORDING(cx, reason) ((void) 0)
2654 #endif
2656 JS_REQUIRES_STACK JSBool
2657 js_Interpret(JSContext *cx)
2659 #ifdef MOZ_TRACEVIS
2660 TraceVisStateObj tvso(cx, S_INTERP);
2661 #endif
2663 JSRuntime *rt;
2664 JSStackFrame *fp;
2665 JSScript *script;
2666 uintN inlineCallCount;
2667 JSAtom **atoms;
2668 JSVersion currentVersion, originalVersion;
2669 JSFrameRegs regs;
2670 JSObject *obj, *obj2, *parent;
2671 JSBool ok, cond;
2672 jsint len;
2673 jsbytecode *endpc, *pc2;
2674 JSOp op, op2;
2675 jsatomid index;
2676 JSAtom *atom;
2677 uintN argc, attrs, flags;
2678 uint32 slot;
2679 jsval *vp, lval, rval, ltmp, rtmp;
2680 jsid id;
2681 JSProperty *prop;
2682 JSScopeProperty *sprop;
2683 JSString *str, *str2;
2684 jsint i, j;
2685 jsdouble d, d2;
2686 JSClass *clasp;
2687 JSFunction *fun;
2688 JSType type;
2689 jsint low, high, off, npairs;
2690 JSBool match;
2691 #if JS_HAS_GETTER_SETTER
2692 JSPropertyOp getter, setter;
2693 #endif
2694 JSAutoResolveFlags rf(cx, JSRESOLVE_INFER);
2696 # ifdef DEBUG
2698 * We call this macro from BEGIN_CASE in threaded interpreters,
2699 * and before entering the switch in non-threaded interpreters.
2700 * However, reaching such points doesn't mean we've actually
2701 * fetched an OP from the instruction stream: some opcodes use
2702 * 'op=x; DO_OP()' to let another opcode's implementation finish
2703 * their work, and many opcodes share entry points with a run of
2704 * consecutive BEGIN_CASEs.
2706 * Take care to trace OP only when it is the opcode fetched from
2707 * the instruction stream, so the trace matches what one would
2708 * expect from looking at the code. (We do omit POPs after SETs;
2709 * unfortunate, but not worth fixing.)
2711 # define TRACE_OPCODE(OP) JS_BEGIN_MACRO \
2712 if (JS_UNLIKELY(cx->tracefp != NULL) && \
2713 (OP) == *regs.pc) \
2714 js_TraceOpcode(cx); \
2715 JS_END_MACRO
2716 # else
2717 # define TRACE_OPCODE(OP) ((void) 0)
2718 # endif
2720 #if JS_THREADED_INTERP
2721 static void *const normalJumpTable[] = {
2722 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
2723 JS_EXTENSION &&L_##op,
2724 # include "jsopcode.tbl"
2725 # undef OPDEF
2728 static void *const interruptJumpTable[] = {
2729 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
2730 JS_EXTENSION &&interrupt,
2731 # include "jsopcode.tbl"
2732 # undef OPDEF
2735 register void * const *jumpTable = normalJumpTable;
2737 METER_OP_INIT(op); /* to nullify first METER_OP_PAIR */
2739 # define ENABLE_INTERRUPTS() ((void) (jumpTable = interruptJumpTable))
2741 # ifdef JS_TRACER
2742 # define CHECK_RECORDER() \
2743 JS_ASSERT_IF(TRACE_RECORDER(cx), jumpTable == interruptJumpTable)
2744 # else
2745 # define CHECK_RECORDER() ((void)0)
2746 # endif
2748 # define DO_OP() JS_BEGIN_MACRO \
2749 CHECK_RECORDER(); \
2750 JS_EXTENSION_(goto *jumpTable[op]); \
2751 JS_END_MACRO
2752 # define DO_NEXT_OP(n) JS_BEGIN_MACRO \
2753 METER_OP_PAIR(op, regs.pc[n]); \
2754 op = (JSOp) *(regs.pc += (n)); \
2755 DO_OP(); \
2756 JS_END_MACRO
2758 # define BEGIN_CASE(OP) L_##OP: TRACE_OPCODE(OP); CHECK_RECORDER();
2759 # define END_CASE(OP) DO_NEXT_OP(OP##_LENGTH);
2760 # define END_VARLEN_CASE DO_NEXT_OP(len);
2761 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP) \
2762 JS_ASSERT(js_CodeSpec[OP].length == 1); \
2763 op = (JSOp) *++regs.pc; \
2764 DO_OP();
2766 # define END_EMPTY_CASES
2768 #else /* !JS_THREADED_INTERP */
2770 register intN switchMask = 0;
2771 intN switchOp;
2773 # define ENABLE_INTERRUPTS() ((void) (switchMask = -1))
2775 # ifdef JS_TRACER
2776 # define CHECK_RECORDER() \
2777 JS_ASSERT_IF(TRACE_RECORDER(cx), switchMask == -1)
2778 # else
2779 # define CHECK_RECORDER() ((void)0)
2780 # endif
2782 # define DO_OP() goto do_op
2783 # define DO_NEXT_OP(n) JS_BEGIN_MACRO \
2784 JS_ASSERT((n) == len); \
2785 goto advance_pc; \
2786 JS_END_MACRO
2788 # define BEGIN_CASE(OP) case OP: CHECK_RECORDER();
2789 # define END_CASE(OP) END_CASE_LEN(OP##_LENGTH)
2790 # define END_CASE_LEN(n) END_CASE_LENX(n)
2791 # define END_CASE_LENX(n) END_CASE_LEN##n
2794 * To share the code for all len == 1 cases we use the specialized label with
2795 * code that falls through to advance_pc: .
2797 # define END_CASE_LEN1 goto advance_pc_by_one;
2798 # define END_CASE_LEN2 len = 2; goto advance_pc;
2799 # define END_CASE_LEN3 len = 3; goto advance_pc;
2800 # define END_CASE_LEN4 len = 4; goto advance_pc;
2801 # define END_CASE_LEN5 len = 5; goto advance_pc;
2802 # define END_VARLEN_CASE goto advance_pc;
2803 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP)
2804 # define END_EMPTY_CASES goto advance_pc_by_one;
2806 #endif /* !JS_THREADED_INTERP */
2808 /* Check for too deep of a native thread stack. */
2809 JS_CHECK_RECURSION(cx, return JS_FALSE);
2811 rt = cx->runtime;
2813 /* Set registerized frame pointer and derived script pointer. */
2814 fp = cx->fp;
2815 script = fp->script;
2816 JS_ASSERT(!script->isEmpty());
2817 JS_ASSERT(script->length > 1);
2819 /* Count of JS function calls that nest in this C js_Interpret frame. */
2820 inlineCallCount = 0;
2823 * Initialize the index segment register used by LOAD_ATOM and
2824 * GET_FULL_INDEX macros below. As a register we use a pointer based on
2825 * the atom map to turn frequently executed LOAD_ATOM into simple array
2826 * access. For less frequent object and regexp loads we have to recover
2827 * the segment from atoms pointer first.
2829 atoms = script->atomMap.vector;
2831 #define LOAD_ATOM(PCOFF) \
2832 JS_BEGIN_MACRO \
2833 JS_ASSERT(fp->imacpc \
2834 ? atoms == COMMON_ATOMS_START(&rt->atomState) && \
2835 GET_INDEX(regs.pc + PCOFF) < js_common_atom_count \
2836 : (size_t)(atoms - script->atomMap.vector) < \
2837 (size_t)(script->atomMap.length - \
2838 GET_INDEX(regs.pc + PCOFF))); \
2839 atom = atoms[GET_INDEX(regs.pc + PCOFF)]; \
2840 JS_END_MACRO
2842 #define GET_FULL_INDEX(PCOFF) \
2843 (atoms - script->atomMap.vector + GET_INDEX(regs.pc + PCOFF))
2845 #define LOAD_OBJECT(PCOFF) \
2846 (obj = script->getObject(GET_FULL_INDEX(PCOFF)))
2848 #define LOAD_FUNCTION(PCOFF) \
2849 (fun = script->getFunction(GET_FULL_INDEX(PCOFF)))
2851 #ifdef JS_TRACER
2853 #ifdef MOZ_TRACEVIS
2854 #if JS_THREADED_INTERP
2855 #define MONITOR_BRANCH_TRACEVIS \
2856 JS_BEGIN_MACRO \
2857 if (jumpTable != interruptJumpTable) \
2858 js_EnterTraceVisState(cx, S_RECORD, R_NONE); \
2859 JS_END_MACRO
2860 #else /* !JS_THREADED_INTERP */
2861 #define MONITOR_BRANCH_TRACEVIS \
2862 JS_BEGIN_MACRO \
2863 js_EnterTraceVisState(cx, S_RECORD, R_NONE); \
2864 JS_END_MACRO
2865 #endif
2866 #else
2867 #define MONITOR_BRANCH_TRACEVIS
2868 #endif
2870 #define RESTORE_INTERP_VARS() \
2871 JS_BEGIN_MACRO \
2872 fp = cx->fp; \
2873 script = fp->script; \
2874 atoms = FrameAtomBase(cx, fp); \
2875 currentVersion = (JSVersion) script->version; \
2876 JS_ASSERT(fp->regs == &regs); \
2877 if (cx->throwing) \
2878 goto error; \
2879 JS_END_MACRO
2881 #define MONITOR_BRANCH(reason) \
2882 JS_BEGIN_MACRO \
2883 if (TRACING_ENABLED(cx)) { \
2884 if (js_MonitorLoopEdge(cx, inlineCallCount, reason)) { \
2885 JS_ASSERT(TRACE_RECORDER(cx)); \
2886 MONITOR_BRANCH_TRACEVIS; \
2887 ENABLE_INTERRUPTS(); \
2889 RESTORE_INTERP_VARS(); \
2891 JS_END_MACRO
2893 #else /* !JS_TRACER */
2895 #define MONITOR_BRANCH(reason) ((void) 0)
2897 #endif /* !JS_TRACER */
2900 * Prepare to call a user-supplied branch handler, and abort the script
2901 * if it returns false.
2903 #define CHECK_BRANCH() \
2904 JS_BEGIN_MACRO \
2905 if (!JS_CHECK_OPERATION_LIMIT(cx)) \
2906 goto error; \
2907 JS_END_MACRO
2909 #ifndef TRACE_RECORDER
2910 #define TRACE_RECORDER(cx) (false)
2911 #endif
2913 #define BRANCH(n) \
2914 JS_BEGIN_MACRO \
2915 regs.pc += (n); \
2916 op = (JSOp) *regs.pc; \
2917 if ((n) <= 0) { \
2918 CHECK_BRANCH(); \
2919 if (op == JSOP_NOP) { \
2920 if (TRACE_RECORDER(cx)) { \
2921 MONITOR_BRANCH(Record_Branch); \
2922 op = (JSOp) *regs.pc; \
2923 } else { \
2924 op = (JSOp) *++regs.pc; \
2926 } else if (op == JSOP_TRACE) { \
2927 MONITOR_BRANCH(Record_Branch); \
2928 op = (JSOp) *regs.pc; \
2931 DO_OP(); \
2932 JS_END_MACRO
2934 MUST_FLOW_THROUGH("exit");
2935 ++cx->interpLevel;
2938 * Optimized Get and SetVersion for proper script language versioning.
2940 * If any native method or JSClass/JSObjectOps hook calls js_SetVersion
2941 * and changes cx->version, the effect will "stick" and we will stop
2942 * maintaining currentVersion. This is relied upon by testsuites, for
2943 * the most part -- web browsers select version before compiling and not
2944 * at run-time.
2946 currentVersion = (JSVersion) script->version;
2947 originalVersion = (JSVersion) cx->version;
2948 if (currentVersion != originalVersion)
2949 js_SetVersion(cx, currentVersion);
2951 /* Update the static-link display. */
2952 if (script->staticLevel < JS_DISPLAY_SIZE) {
2953 JSStackFrame **disp = &cx->display[script->staticLevel];
2954 fp->displaySave = *disp;
2955 *disp = fp;
2958 # define CHECK_INTERRUPT_HANDLER() \
2959 JS_BEGIN_MACRO \
2960 if (cx->debugHooks->interruptHandler) \
2961 ENABLE_INTERRUPTS(); \
2962 JS_END_MACRO
2965 * Load the debugger's interrupt hook here and after calling out to native
2966 * functions (but not to getters, setters, or other native hooks), so we do
2967 * not have to reload it each time through the interpreter loop -- we hope
2968 * the compiler can keep it in a register when it is non-null.
2970 CHECK_INTERRUPT_HANDLER();
2972 #if !JS_HAS_GENERATORS
2973 JS_ASSERT(!fp->regs);
2974 #else
2975 /* Initialize the pc and sp registers unless we're resuming a generator. */
2976 if (JS_LIKELY(!fp->regs)) {
2977 #endif
2978 ASSERT_NOT_THROWING(cx);
2979 regs.pc = script->code;
2980 regs.sp = StackBase(fp);
2981 fp->regs = &regs;
2982 #if JS_HAS_GENERATORS
2983 } else {
2984 JSGenerator *gen;
2986 JS_ASSERT(fp->flags & JSFRAME_GENERATOR);
2987 gen = FRAME_TO_GENERATOR(fp);
2988 JS_ASSERT(fp->regs == &gen->savedRegs);
2989 regs = gen->savedRegs;
2990 fp->regs = &regs;
2991 JS_ASSERT((size_t) (regs.pc - script->code) <= script->length);
2992 JS_ASSERT((size_t) (regs.sp - StackBase(fp)) <= StackDepth(script));
2995 * To support generator_throw and to catch ignored exceptions,
2996 * fail if cx->throwing is set.
2998 if (cx->throwing) {
2999 #ifdef DEBUG_NOT_THROWING
3000 if (cx->exception != JSVAL_ARETURN) {
3001 printf("JS INTERPRETER CALLED WITH PENDING EXCEPTION %lx\n",
3002 (unsigned long) cx->exception);
3004 #endif
3005 goto error;
3008 #endif /* JS_HAS_GENERATORS */
3010 #ifdef JS_TRACER
3012 * We cannot reenter the interpreter while recording; wait to abort until
3013 * after cx->fp->regs is set.
3015 if (TRACE_RECORDER(cx))
3016 js_AbortRecording(cx, "attempt to reenter interpreter while recording");
3017 #endif
3020 * It is important that "op" be initialized before calling DO_OP because
3021 * it is possible for "op" to be specially assigned during the normal
3022 * processing of an opcode while looping. We rely on DO_NEXT_OP to manage
3023 * "op" correctly in all other cases.
3025 len = 0;
3026 DO_NEXT_OP(len);
3028 #if JS_THREADED_INTERP
3030 * This is a loop, but it does not look like a loop. The loop-closing
3031 * jump is distributed throughout goto *jumpTable[op] inside of DO_OP.
3032 * When interrupts are enabled, jumpTable is set to interruptJumpTable
3033 * where all jumps point to the interrupt label. The latter, after
3034 * calling the interrupt handler, dispatches through normalJumpTable to
3035 * continue the normal bytecode processing.
3038 #else /* !JS_THREADED_INTERP */
3039 for (;;) {
3040 advance_pc_by_one:
3041 JS_ASSERT(js_CodeSpec[op].length == 1);
3042 len = 1;
3043 advance_pc:
3044 regs.pc += len;
3045 op = (JSOp) *regs.pc;
3047 do_op:
3048 CHECK_RECORDER();
3049 TRACE_OPCODE(op);
3050 switchOp = intN(op) | switchMask;
3051 do_switch:
3052 switch (switchOp) {
3053 #endif
3055 /********************** Here we include the operations ***********************/
3056 #include "jsops.cpp"
3057 /*****************************************************************************/
3059 #if !JS_THREADED_INTERP
3060 default:
3061 #endif
3062 #ifndef JS_TRACER
3063 bad_opcode:
3064 #endif
3066 char numBuf[12];
3067 JS_snprintf(numBuf, sizeof numBuf, "%d", op);
3068 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
3069 JSMSG_BAD_BYTECODE, numBuf);
3070 goto error;
3073 #if !JS_THREADED_INTERP
3074 } /* switch (op) */
3075 } /* for (;;) */
3076 #endif /* !JS_THREADED_INTERP */
3078 error:
3079 if (fp->imacpc && cx->throwing) {
3080 // To keep things simple, we hard-code imacro exception handlers here.
3081 if (*fp->imacpc == JSOP_NEXTITER && js_ValueIsStopIteration(cx->exception)) {
3082 // pc may point to JSOP_DUP here due to bug 474854.
3083 JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP || *regs.pc == JSOP_TRUE);
3084 cx->throwing = JS_FALSE;
3085 cx->exception = JSVAL_VOID;
3086 regs.sp[-1] = JSVAL_HOLE;
3087 PUSH(JSVAL_FALSE);
3088 goto end_imacro;
3091 // Handle other exceptions as if they came from the imacro-calling pc.
3092 regs.pc = fp->imacpc;
3093 fp->imacpc = NULL;
3094 atoms = script->atomMap.vector;
3097 JS_ASSERT((size_t)((fp->imacpc ? fp->imacpc : regs.pc) - script->code) < script->length);
3099 #ifdef JS_TRACER
3101 * This abort could be weakened to permit tracing through exceptions that
3102 * are thrown and caught within a loop, with the co-operation of the tracer.
3103 * For now just bail on any sign of trouble.
3105 if (TRACE_RECORDER(cx))
3106 js_AbortRecording(cx, "error or exception while recording");
3107 #endif
3109 if (!cx->throwing) {
3110 /* This is an error, not a catchable exception, quit the frame ASAP. */
3111 ok = JS_FALSE;
3112 } else {
3113 JSTrapHandler handler;
3114 JSTryNote *tn, *tnlimit;
3115 uint32 offset;
3117 /* Call debugger throw hook if set. */
3118 handler = cx->debugHooks->throwHook;
3119 if (handler) {
3120 switch (handler(cx, script, regs.pc, &rval,
3121 cx->debugHooks->throwHookData)) {
3122 case JSTRAP_ERROR:
3123 cx->throwing = JS_FALSE;
3124 goto error;
3125 case JSTRAP_RETURN:
3126 cx->throwing = JS_FALSE;
3127 fp->rval = rval;
3128 ok = JS_TRUE;
3129 goto forced_return;
3130 case JSTRAP_THROW:
3131 cx->exception = rval;
3132 case JSTRAP_CONTINUE:
3133 default:;
3135 CHECK_INTERRUPT_HANDLER();
3139 * Look for a try block in script that can catch this exception.
3141 if (script->trynotesOffset == 0)
3142 goto no_catch;
3144 offset = (uint32)(regs.pc - script->main);
3145 tn = script->trynotes()->vector;
3146 tnlimit = tn + script->trynotes()->length;
3147 do {
3148 if (offset - tn->start >= tn->length)
3149 continue;
3152 * We have a note that covers the exception pc but we must check
3153 * whether the interpreter has already executed the corresponding
3154 * handler. This is possible when the executed bytecode
3155 * implements break or return from inside a for-in loop.
3157 * In this case the emitter generates additional [enditer] and
3158 * [gosub] opcodes to close all outstanding iterators and execute
3159 * the finally blocks. If such an [enditer] throws an exception,
3160 * its pc can still be inside several nested for-in loops and
3161 * try-finally statements even if we have already closed the
3162 * corresponding iterators and invoked the finally blocks.
3164 * To address this, we make [enditer] always decrease the stack
3165 * even when its implementation throws an exception. Thus already
3166 * executed [enditer] and [gosub] opcodes will have try notes
3167 * with the stack depth exceeding the current one and this
3168 * condition is what we use to filter them out.
3170 if (tn->stackDepth > regs.sp - StackBase(fp))
3171 continue;
3174 * Set pc to the first bytecode after the the try note to point
3175 * to the beginning of catch or finally or to [enditer] closing
3176 * the for-in loop.
3178 regs.pc = (script)->main + tn->start + tn->length;
3180 ok = js_UnwindScope(cx, fp, tn->stackDepth, JS_TRUE);
3181 JS_ASSERT(fp->regs->sp == StackBase(fp) + tn->stackDepth);
3182 if (!ok) {
3184 * Restart the handler search with updated pc and stack depth
3185 * to properly notify the debugger.
3187 goto error;
3190 switch (tn->kind) {
3191 case JSTRY_CATCH:
3192 JS_ASSERT(js_GetOpcode(cx, fp->script, regs.pc) == JSOP_ENTERBLOCK);
3194 #if JS_HAS_GENERATORS
3195 /* Catch cannot intercept the closing of a generator. */
3196 if (JS_UNLIKELY(cx->exception == JSVAL_ARETURN))
3197 break;
3198 #endif
3201 * Don't clear cx->throwing to save cx->exception from GC
3202 * until it is pushed to the stack via [exception] in the
3203 * catch block.
3205 len = 0;
3206 DO_NEXT_OP(len);
3208 case JSTRY_FINALLY:
3210 * Push (true, exception) pair for finally to indicate that
3211 * [retsub] should rethrow the exception.
3213 PUSH(JSVAL_TRUE);
3214 PUSH(cx->exception);
3215 cx->throwing = JS_FALSE;
3216 len = 0;
3217 DO_NEXT_OP(len);
3219 case JSTRY_ITER:
3221 * This is similar to JSOP_ENDITER in the interpreter loop,
3222 * except the code now uses the stack slot normally used by
3223 * JSOP_NEXTITER, namely regs.sp[-1] before the regs.sp -= 2
3224 * adjustment and regs.sp[1] after, to save and restore the
3225 * pending exception.
3227 JS_ASSERT(js_GetOpcode(cx, fp->script, regs.pc) == JSOP_ENDITER);
3228 regs.sp[-1] = cx->exception;
3229 cx->throwing = JS_FALSE;
3230 ok = js_CloseIterator(cx, regs.sp[-2]);
3231 regs.sp -= 2;
3232 if (!ok)
3233 goto error;
3234 cx->throwing = JS_TRUE;
3235 cx->exception = regs.sp[1];
3237 } while (++tn != tnlimit);
3239 no_catch:
3241 * Propagate the exception or error to the caller unless the exception
3242 * is an asynchronous return from a generator.
3244 ok = JS_FALSE;
3245 #if JS_HAS_GENERATORS
3246 if (JS_UNLIKELY(cx->throwing && cx->exception == JSVAL_ARETURN)) {
3247 cx->throwing = JS_FALSE;
3248 ok = JS_TRUE;
3249 fp->rval = JSVAL_VOID;
3251 #endif
3254 forced_return:
3256 * Unwind the scope making sure that ok stays false even when UnwindScope
3257 * returns true.
3259 * When a trap handler returns JSTRAP_RETURN, we jump here with ok set to
3260 * true bypassing any finally blocks.
3262 ok &= js_UnwindScope(cx, fp, 0, ok || cx->throwing);
3263 JS_ASSERT(regs.sp == StackBase(fp));
3265 #ifdef DEBUG
3266 cx->tracePrevPc = NULL;
3267 #endif
3269 if (inlineCallCount)
3270 goto inline_return;
3272 exit:
3274 * At this point we are inevitably leaving an interpreted function or a
3275 * top-level script, and returning to one of:
3276 * (a) an "out of line" call made through js_Invoke;
3277 * (b) a js_Execute activation;
3278 * (c) a generator (SendToGenerator, jsiter.c).
3280 * We must not be in an inline frame. The check above ensures that for the
3281 * error case and for a normal return, the code jumps directly to parent's
3282 * frame pc.
3284 JS_ASSERT(inlineCallCount == 0);
3285 JS_ASSERT(fp->regs == &regs);
3286 #ifdef JS_TRACER
3287 if (TRACE_RECORDER(cx))
3288 js_AbortRecording(cx, "recording out of js_Interpret");
3289 #endif
3290 #if JS_HAS_GENERATORS
3291 if (JS_UNLIKELY(fp->flags & JSFRAME_YIELDING)) {
3292 JSGenerator *gen;
3294 gen = FRAME_TO_GENERATOR(fp);
3295 gen->savedRegs = regs;
3296 gen->frame.regs = &gen->savedRegs;
3297 } else
3298 #endif /* JS_HAS_GENERATORS */
3300 JS_ASSERT(!fp->blockChain);
3301 JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0));
3302 fp->regs = NULL;
3305 /* Undo the remaining effects committed on entry to js_Interpret. */
3306 if (script->staticLevel < JS_DISPLAY_SIZE)
3307 cx->display[script->staticLevel] = fp->displaySave;
3308 if (cx->version == currentVersion && currentVersion != originalVersion)
3309 js_SetVersion(cx, originalVersion);
3310 --cx->interpLevel;
3312 return ok;
3314 atom_not_defined:
3316 const char *printable;
3318 printable = js_AtomToPrintableString(cx, atom);
3319 if (printable)
3320 js_ReportIsNotDefined(cx, printable);
3321 goto error;
3325 #endif /* !defined jsinvoke_cpp___ */