bug 491126 - sharing object map for non-native objects. r=brendan
[mozilla-central.git] / js / src / jstracer.cpp
blobe03d68a6f0a145b587893307a2635017ca53bffd
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
23 * Contributor(s):
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
42 #include "jsstdint.h"
43 #include "jsbit.h" // low-level (NSPR-based) headers next
44 #include "jsprf.h"
45 #include <math.h> // standard headers next
47 #if defined(_MSC_VER) || defined(__MINGW32__)
48 #include <malloc.h>
49 #ifdef _MSC_VER
50 #define alloca _alloca
51 #endif
52 #endif
53 #ifdef SOLARIS
54 #include <alloca.h>
55 #endif
56 #include <limits.h>
58 #include "nanojit/nanojit.h"
59 #include "jsapi.h" // higher-level library and API headers
60 #include "jsarray.h"
61 #include "jsbool.h"
62 #include "jscntxt.h"
63 #include "jsdbgapi.h"
64 #include "jsemit.h"
65 #include "jsfun.h"
66 #include "jsinterp.h"
67 #include "jsiter.h"
68 #include "jsobj.h"
69 #include "jsopcode.h"
70 #include "jsregexp.h"
71 #include "jsscope.h"
72 #include "jsscript.h"
73 #include "jsdate.h"
74 #include "jsstaticcheck.h"
75 #include "jstracer.h"
76 #include "jsxml.h"
78 #include "jsautooplen.h" // generated headers last
79 #include "imacros.c.out"
81 #if JS_HAS_XML_SUPPORT
82 #define ABORT_IF_XML(v) \
83 JS_BEGIN_MACRO \
84 if (!JSVAL_IS_PRIMITIVE(v) && OBJECT_IS_XML(BOGUS_CX, JSVAL_TO_OBJECT(v)))\
85 ABORT_TRACE("xml detected"); \
86 JS_END_MACRO
87 #else
88 #define ABORT_IF_XML(cx, v) ((void) 0)
89 #endif
91 /* Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and
92 the type. What you want to use is JSVAL_TAG(x) == JSVAL_BOOLEAN and then
93 handle the undefined case properly (bug 457363). */
94 #undef JSVAL_IS_BOOLEAN
95 #define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0)
97 /* Use a fake tag to represent boxed values, borrowing from the integer tag
98 range since we only use JSVAL_INT to indicate integers. */
99 #define JSVAL_BOXED 3
101 /* Another fake jsval tag, used to distinguish null from object values. */
102 #define JSVAL_TNULL 5
104 /* A last fake jsval tag distinguishing functions from non-function objects. */
105 #define JSVAL_TFUN 7
107 /* Map to translate a type tag into a printable representation. */
108 static const char typeChar[] = "OIDXSNBF";
109 static const char tagChar[] = "OIDISIBI";
111 /* Blacklist parameters. */
113 /* Number of iterations of a loop where we start tracing. That is, we don't
114 start tracing until the beginning of the HOTLOOP-th iteration. */
115 #define HOTLOOP 2
117 /* Attempt recording this many times before blacklisting permanently. */
118 #define BL_ATTEMPTS 2
120 /* Skip this many future hits before allowing recording again after blacklisting. */
121 #define BL_BACKOFF 32
123 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
124 #define HOTEXIT 1
126 /* Number of times we try to extend the tree along a side exit. */
127 #define MAXEXIT 3
129 /* Maximum number of peer trees allowed. */
130 #define MAXPEERS 9
132 /* Max call depths for inlining. */
133 #define MAX_CALLDEPTH 10
135 /* Max native stack size. */
136 #define MAX_NATIVE_STACK_SLOTS 1024
138 /* Max call stack size. */
139 #define MAX_CALL_STACK_ENTRIES 64
141 /* Max global object size. */
142 #define MAX_GLOBAL_SLOTS 4096
144 /* Max memory you can allocate in a LIR buffer via a single insSkip() call. */
145 #define MAX_SKIP_BYTES (NJ_PAGE_SIZE - sizeof(LIns))
147 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
148 #define MAX_INTERP_STACK_BYTES \
149 (MAX_NATIVE_STACK_SLOTS * sizeof(jsval) + \
150 MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) + \
151 sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
153 /* Max number of branches per tree. */
154 #define MAX_BRANCHES 32
156 #define CHECK_STATUS(expr) \
157 JS_BEGIN_MACRO \
158 JSRecordingStatus _status = (expr); \
159 if (_status != JSRS_CONTINUE) \
160 return _status; \
161 JS_END_MACRO
163 #ifdef JS_JIT_SPEW
164 #define debug_only_a(x) if (js_verboseAbort || js_verboseDebug ) { x; }
165 #define ABORT_TRACE_RV(msg, value) \
166 JS_BEGIN_MACRO \
167 debug_only_a(fprintf(stdout, "abort: %d: %s\n", __LINE__, (msg));) \
168 return (value); \
169 JS_END_MACRO
170 #else
171 #define debug_only_a(x)
172 #define ABORT_TRACE_RV(msg, value) return (value)
173 #endif
175 #define ABORT_TRACE(msg) ABORT_TRACE_RV(msg, JSRS_STOP)
176 #define ABORT_TRACE_ERROR(msg) ABORT_TRACE_RV(msg, JSRS_ERROR)
178 #ifdef JS_JIT_SPEW
179 struct __jitstats {
180 #define JITSTAT(x) uint64 x;
181 #include "jitstats.tbl"
182 #undef JITSTAT
183 } jitstats = { 0LL, };
185 JS_STATIC_ASSERT(sizeof(jitstats) % sizeof(uint64) == 0);
187 enum jitstat_ids {
188 #define JITSTAT(x) STAT ## x ## ID,
189 #include "jitstats.tbl"
190 #undef JITSTAT
191 STAT_IDS_TOTAL
194 static JSPropertySpec jitstats_props[] = {
195 #define JITSTAT(x) { #x, STAT ## x ## ID, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT },
196 #include "jitstats.tbl"
197 #undef JITSTAT
198 { 0 }
201 static JSBool
202 jitstats_getProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
204 int index = -1;
206 if (JSVAL_IS_STRING(id)) {
207 JSString* str = JSVAL_TO_STRING(id);
208 if (strcmp(JS_GetStringBytes(str), "HOTLOOP") == 0) {
209 *vp = INT_TO_JSVAL(HOTLOOP);
210 return JS_TRUE;
214 if (JSVAL_IS_INT(id))
215 index = JSVAL_TO_INT(id);
217 uint64 result = 0;
218 switch (index) {
219 #define JITSTAT(x) case STAT ## x ## ID: result = jitstats.x; break;
220 #include "jitstats.tbl"
221 #undef JITSTAT
222 default:
223 *vp = JSVAL_VOID;
224 return JS_TRUE;
227 if (result < JSVAL_INT_MAX) {
228 *vp = INT_TO_JSVAL(result);
229 return JS_TRUE;
231 char retstr[64];
232 JS_snprintf(retstr, sizeof retstr, "%llu", result);
233 *vp = STRING_TO_JSVAL(JS_NewStringCopyZ(cx, retstr));
234 return JS_TRUE;
237 JSClass jitstats_class = {
238 "jitstats",
239 JSCLASS_HAS_PRIVATE,
240 JS_PropertyStub, JS_PropertyStub,
241 jitstats_getProperty, JS_PropertyStub,
242 JS_EnumerateStub, JS_ResolveStub,
243 JS_ConvertStub, JS_FinalizeStub,
244 JSCLASS_NO_OPTIONAL_MEMBERS
247 void
248 js_InitJITStatsClass(JSContext *cx, JSObject *glob)
250 JS_InitClass(cx, glob, NULL, &jitstats_class, NULL, 0, jitstats_props, NULL, NULL, NULL);
253 #define AUDIT(x) (jitstats.x++)
254 #else
255 #define AUDIT(x) ((void)0)
256 #endif /* JS_JIT_SPEW */
258 #define INS_CONST(c) addName(lir->insImm(c), #c)
259 #define INS_CONSTPTR(p) addName(lir->insImmPtr(p), #p)
260 #define INS_CONSTFUNPTR(p) addName(lir->insImmPtr(JS_FUNC_TO_DATA_PTR(void*, p)), #p)
261 #define INS_CONSTWORD(v) addName(lir->insImmPtr((void *) v), #v)
263 using namespace avmplus;
264 using namespace nanojit;
266 static GC gc = GC();
267 static avmplus::AvmCore s_core = avmplus::AvmCore();
268 static avmplus::AvmCore* core = &s_core;
270 #ifdef JS_JIT_SPEW
271 void
272 js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape, uint32 argc);
273 #endif
275 /* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */
276 static bool did_we_check_processor_features = false;
278 #ifdef JS_JIT_SPEW
279 bool js_verboseDebug = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");
280 bool js_verboseStats = js_verboseDebug ||
281 (getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "stats"));
282 bool js_verboseAbort = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "abort");
283 #endif
285 /* The entire VM shares one oracle. Collisions and concurrent updates are tolerated and worst
286 case cause performance regressions. */
287 static Oracle oracle;
289 Tracker::Tracker()
291 pagelist = 0;
294 Tracker::~Tracker()
296 clear();
299 jsuword
300 Tracker::getPageBase(const void* v) const
302 return jsuword(v) & ~jsuword(NJ_PAGE_SIZE-1);
305 struct Tracker::Page*
306 Tracker::findPage(const void* v) const
308 jsuword base = getPageBase(v);
309 struct Tracker::Page* p = pagelist;
310 while (p) {
311 if (p->base == base) {
312 return p;
314 p = p->next;
316 return 0;
319 struct Tracker::Page*
320 Tracker::addPage(const void* v) {
321 jsuword base = getPageBase(v);
322 struct Tracker::Page* p = (struct Tracker::Page*)
323 GC::Alloc(sizeof(*p) - sizeof(p->map) + (NJ_PAGE_SIZE >> 2) * sizeof(LIns*));
324 p->base = base;
325 p->next = pagelist;
326 pagelist = p;
327 return p;
330 void
331 Tracker::clear()
333 while (pagelist) {
334 Page* p = pagelist;
335 pagelist = pagelist->next;
336 GC::Free(p);
340 bool
341 Tracker::has(const void *v) const
343 return get(v) != NULL;
346 #if defined NANOJIT_64BIT
347 #define PAGEMASK 0x7ff
348 #else
349 #define PAGEMASK 0xfff
350 #endif
352 LIns*
353 Tracker::get(const void* v) const
355 struct Tracker::Page* p = findPage(v);
356 if (!p)
357 return NULL;
358 return p->map[(jsuword(v) & PAGEMASK) >> 2];
361 void
362 Tracker::set(const void* v, LIns* i)
364 struct Tracker::Page* p = findPage(v);
365 if (!p)
366 p = addPage(v);
367 p->map[(jsuword(v) & PAGEMASK) >> 2] = i;
370 static inline jsuint argSlots(JSStackFrame* fp)
372 return JS_MAX(fp->argc, fp->fun->nargs);
375 static inline bool isNumber(jsval v)
377 return JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v);
380 static inline jsdouble asNumber(jsval v)
382 JS_ASSERT(isNumber(v));
383 if (JSVAL_IS_DOUBLE(v))
384 return *JSVAL_TO_DOUBLE(v);
385 return (jsdouble)JSVAL_TO_INT(v);
388 static inline bool isInt32(jsval v)
390 if (!isNumber(v))
391 return false;
392 jsdouble d = asNumber(v);
393 jsint i;
394 return JSDOUBLE_IS_INT(d, i);
397 static inline jsint asInt32(jsval v)
399 JS_ASSERT(isNumber(v));
400 if (JSVAL_IS_INT(v))
401 return JSVAL_TO_INT(v);
402 #ifdef DEBUG
403 jsint i;
404 JS_ASSERT(JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v), i));
405 #endif
406 return jsint(*JSVAL_TO_DOUBLE(v));
409 /* Return JSVAL_DOUBLE for all numbers (int and double) and the tag otherwise. */
410 static inline uint8 getPromotedType(jsval v)
412 if (JSVAL_IS_INT(v))
413 return JSVAL_DOUBLE;
414 if (JSVAL_IS_OBJECT(v)) {
415 if (JSVAL_IS_NULL(v))
416 return JSVAL_TNULL;
417 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)))
418 return JSVAL_TFUN;
419 return JSVAL_OBJECT;
421 return uint8(JSVAL_TAG(v));
424 /* Return JSVAL_INT for all whole numbers that fit into signed 32-bit and the tag otherwise. */
425 static inline uint8 getCoercedType(jsval v)
427 if (isInt32(v))
428 return JSVAL_INT;
429 if (JSVAL_IS_OBJECT(v)) {
430 if (JSVAL_IS_NULL(v))
431 return JSVAL_TNULL;
432 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)))
433 return JSVAL_TFUN;
434 return JSVAL_OBJECT;
436 return uint8(JSVAL_TAG(v));
440 * Constant seed and accumulate step borrowed from the DJB hash.
443 #define ORACLE_MASK (ORACLE_SIZE - 1)
444 #define FRAGMENT_TABLE_MASK (FRAGMENT_TABLE_SIZE - 1)
445 #define HASH_SEED 5381
447 static inline void
448 hash_accum(uintptr_t& h, uintptr_t i, uintptr_t mask)
450 h = ((h << 5) + h + (mask & i)) & mask;
453 JS_REQUIRES_STACK static inline int
454 stackSlotHash(JSContext* cx, unsigned slot)
456 uintptr_t h = HASH_SEED;
457 hash_accum(h, uintptr_t(cx->fp->script), ORACLE_MASK);
458 hash_accum(h, uintptr_t(cx->fp->regs->pc), ORACLE_MASK);
459 hash_accum(h, uintptr_t(slot), ORACLE_MASK);
460 return int(h);
463 JS_REQUIRES_STACK static inline int
464 globalSlotHash(JSContext* cx, unsigned slot)
466 uintptr_t h = HASH_SEED;
467 JSStackFrame* fp = cx->fp;
469 while (fp->down)
470 fp = fp->down;
472 hash_accum(h, uintptr_t(fp->script), ORACLE_MASK);
473 hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain))),
474 ORACLE_MASK);
475 hash_accum(h, uintptr_t(slot), ORACLE_MASK);
476 return int(h);
479 Oracle::Oracle()
481 /* Grow the oracle bitsets to their (fixed) size here, once. */
482 _stackDontDemote.set(&gc, ORACLE_SIZE-1);
483 _globalDontDemote.set(&gc, ORACLE_SIZE-1);
484 clear();
487 /* Tell the oracle that a certain global variable should not be demoted. */
488 JS_REQUIRES_STACK void
489 Oracle::markGlobalSlotUndemotable(JSContext* cx, unsigned slot)
491 _globalDontDemote.set(&gc, globalSlotHash(cx, slot));
494 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
495 JS_REQUIRES_STACK bool
496 Oracle::isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const
498 return _globalDontDemote.get(globalSlotHash(cx, slot));
501 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
502 JS_REQUIRES_STACK void
503 Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot)
505 _stackDontDemote.set(&gc, stackSlotHash(cx, slot));
508 /* Consult with the oracle whether we shouldn't demote a certain slot. */
509 JS_REQUIRES_STACK bool
510 Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const
512 return _stackDontDemote.get(stackSlotHash(cx, slot));
515 void
516 Oracle::clearDemotability()
518 _stackDontDemote.reset();
519 _globalDontDemote.reset();
523 struct PCHashEntry : public JSDHashEntryStub {
524 size_t count;
527 #define PC_HASH_COUNT 1024
529 static void
530 js_Blacklist(jsbytecode* pc)
532 JS_ASSERT(*pc == JSOP_LOOP || *pc == JSOP_NOP);
533 *pc = JSOP_NOP;
536 static void
537 js_Backoff(JSContext *cx, jsbytecode* pc, Fragment* tree=NULL)
539 JSDHashTable *table = &JS_TRACE_MONITOR(cx).recordAttempts;
541 if (table->ops) {
542 PCHashEntry *entry = (PCHashEntry *)
543 JS_DHashTableOperate(table, pc, JS_DHASH_ADD);
545 if (entry) {
546 if (!entry->key) {
547 entry->key = pc;
548 JS_ASSERT(entry->count == 0);
550 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry->hdr)));
551 if (entry->count++ > (BL_ATTEMPTS * MAXPEERS)) {
552 entry->count = 0;
553 js_Blacklist(pc);
554 return;
559 if (tree) {
560 tree->hits() -= BL_BACKOFF;
563 * In case there is no entry or no table (due to OOM) or some
564 * serious imbalance in the recording-attempt distribution on a
565 * multitree, give each tree another chance to blacklist here as
566 * well.
568 if (++tree->recordAttempts > BL_ATTEMPTS)
569 js_Blacklist(pc);
573 static void
574 js_resetRecordingAttempts(JSContext *cx, jsbytecode* pc)
576 JSDHashTable *table = &JS_TRACE_MONITOR(cx).recordAttempts;
577 if (table->ops) {
578 PCHashEntry *entry = (PCHashEntry *)
579 JS_DHashTableOperate(table, pc, JS_DHASH_LOOKUP);
581 if (JS_DHASH_ENTRY_IS_FREE(&(entry->hdr)))
582 return;
583 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry->hdr)));
584 entry->count = 0;
588 static inline size_t
589 fragmentHash(const void *ip, JSObject* globalObj, uint32 globalShape, uint32 argc)
591 uintptr_t h = HASH_SEED;
592 hash_accum(h, uintptr_t(ip), FRAGMENT_TABLE_MASK);
593 hash_accum(h, uintptr_t(globalObj), FRAGMENT_TABLE_MASK);
594 hash_accum(h, uintptr_t(globalShape), FRAGMENT_TABLE_MASK);
595 hash_accum(h, uintptr_t(argc), FRAGMENT_TABLE_MASK);
596 return size_t(h);
600 * argc is cx->fp->argc at the trace loop header, i.e., the number of arguments
601 * pushed for the innermost JS frame. This is required as part of the fragment
602 * key because the fragment will write those arguments back to the interpreter
603 * stack when it exits, using its typemap, which implicitly incorporates a given
604 * value of argc. Without this feature, a fragment could be called as an inner
605 * tree with two different values of argc, and entry type checking or exit
606 * frame synthesis could crash.
608 struct VMFragment : public Fragment
610 VMFragment(const void* _ip, JSObject* _globalObj, uint32 _globalShape, uint32 _argc) :
611 Fragment(_ip),
612 next(NULL),
613 globalObj(_globalObj),
614 globalShape(_globalShape),
615 argc(_argc)
617 VMFragment* next;
618 JSObject* globalObj;
619 uint32 globalShape;
620 uint32 argc;
623 static VMFragment*
624 getVMFragment(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
625 uint32 argc)
627 size_t h = fragmentHash(ip, globalObj, globalShape, argc);
628 VMFragment* vf = tm->vmfragments[h];
629 while (vf &&
630 ! (vf->globalObj == globalObj &&
631 vf->globalShape == globalShape &&
632 vf->ip == ip &&
633 vf->argc == argc)) {
634 vf = vf->next;
636 return vf;
639 static VMFragment*
640 getLoop(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
641 uint32 argc)
643 return getVMFragment(tm, ip, globalObj, globalShape, argc);
646 static Fragment*
647 getAnchor(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
648 uint32 argc)
650 VMFragment *f = new (&gc) VMFragment(ip, globalObj, globalShape, argc);
651 JS_ASSERT(f);
653 Fragment *p = getVMFragment(tm, ip, globalObj, globalShape, argc);
655 if (p) {
656 f->first = p;
657 /* append at the end of the peer list */
658 Fragment* next;
659 while ((next = p->peer) != NULL)
660 p = next;
661 p->peer = f;
662 } else {
663 /* this is the first fragment */
664 f->first = f;
665 size_t h = fragmentHash(ip, globalObj, globalShape, argc);
666 f->next = tm->vmfragments[h];
667 tm->vmfragments[h] = f;
669 f->anchor = f;
670 f->root = f;
671 f->kind = LoopTrace;
672 return f;
675 static void
676 js_AttemptCompilation(JSContext *cx, JSTraceMonitor* tm, JSObject* globalObj, jsbytecode* pc,
677 uint32 argc)
680 * If we already permanently blacklisted the location, undo that.
682 JS_ASSERT(*(jsbytecode*)pc == JSOP_NOP || *(jsbytecode*)pc == JSOP_LOOP);
683 *(jsbytecode*)pc = JSOP_LOOP;
684 js_resetRecordingAttempts(cx, pc);
687 * Breath new live into all peer fragments at the designated loop header.
689 Fragment* f = (VMFragment*)getLoop(tm, pc, globalObj, OBJ_SHAPE(globalObj),
690 argc);
691 if (!f) {
693 * If the global object's shape changed, we can't easily find the
694 * corresponding loop header via a hash table lookup. In this
695 * we simply bail here and hope that the fragment has another
696 * outstanding compilation attempt. This case is extremely rare.
698 return;
700 JS_ASSERT(f->root == f);
701 f = f->first;
702 while (f) {
703 JS_ASSERT(f->root == f);
704 --f->recordAttempts;
705 f->hits() = HOTLOOP;
706 f = f->peer;
710 // Forward declarations.
711 JS_DEFINE_CALLINFO_1(static, DOUBLE, i2f, INT32, 1, 1)
712 JS_DEFINE_CALLINFO_1(static, DOUBLE, u2f, UINT32, 1, 1)
714 static bool isi2f(LInsp i)
716 if (i->isop(LIR_i2f))
717 return true;
719 if (nanojit::AvmCore::config.soft_float &&
720 i->isop(LIR_qjoin) &&
721 i->oprnd1()->isop(LIR_call) &&
722 i->oprnd2()->isop(LIR_callh))
724 if (i->oprnd1()->callInfo() == &i2f_ci)
725 return true;
728 return false;
731 static bool isu2f(LInsp i)
733 if (i->isop(LIR_u2f))
734 return true;
736 if (nanojit::AvmCore::config.soft_float &&
737 i->isop(LIR_qjoin) &&
738 i->oprnd1()->isop(LIR_call) &&
739 i->oprnd2()->isop(LIR_callh))
741 if (i->oprnd1()->callInfo() == &u2f_ci)
742 return true;
745 return false;
748 static LInsp iu2fArg(LInsp i)
750 if (nanojit::AvmCore::config.soft_float &&
751 i->isop(LIR_qjoin))
753 return i->oprnd1()->arg(0);
756 return i->oprnd1();
760 static LIns* demote(LirWriter *out, LInsp i)
762 if (i->isCall())
763 return callArgN(i, 0);
764 if (isi2f(i) || isu2f(i))
765 return iu2fArg(i);
766 if (i->isconst())
767 return i;
768 AvmAssert(i->isconstq());
769 double cf = i->imm64f();
770 int32_t ci = cf > 0x7fffffff ? uint32_t(cf) : int32_t(cf);
771 return out->insImm(ci);
774 static bool isPromoteInt(LIns* i)
776 if (isi2f(i) || i->isconst())
777 return true;
778 if (!i->isconstq())
779 return false;
780 jsdouble d = i->imm64f();
781 return d == jsdouble(jsint(d)) && !JSDOUBLE_IS_NEGZERO(d);
784 static bool isPromoteUint(LIns* i)
786 if (isu2f(i) || i->isconst())
787 return true;
788 if (!i->isconstq())
789 return false;
790 jsdouble d = i->imm64f();
791 return d == jsdouble(jsuint(d)) && !JSDOUBLE_IS_NEGZERO(d);
794 static bool isPromote(LIns* i)
796 return isPromoteInt(i) || isPromoteUint(i);
799 static bool isconst(LIns* i, int32_t c)
801 return i->isconst() && i->imm32() == c;
804 static bool overflowSafe(LIns* i)
806 LIns* c;
807 return (i->isop(LIR_and) && ((c = i->oprnd2())->isconst()) &&
808 ((c->imm32() & 0xc0000000) == 0)) ||
809 (i->isop(LIR_rsh) && ((c = i->oprnd2())->isconst()) &&
810 ((c->imm32() > 0)));
813 /* soft float support */
815 static jsdouble FASTCALL
816 fneg(jsdouble x)
818 return -x;
820 JS_DEFINE_CALLINFO_1(static, DOUBLE, fneg, DOUBLE, 1, 1)
822 static jsdouble FASTCALL
823 i2f(int32 i)
825 return i;
828 static jsdouble FASTCALL
829 u2f(jsuint u)
831 return u;
834 static int32 FASTCALL
835 fcmpeq(jsdouble x, jsdouble y)
837 return x==y;
839 JS_DEFINE_CALLINFO_2(static, INT32, fcmpeq, DOUBLE, DOUBLE, 1, 1)
841 static int32 FASTCALL
842 fcmplt(jsdouble x, jsdouble y)
844 return x < y;
846 JS_DEFINE_CALLINFO_2(static, INT32, fcmplt, DOUBLE, DOUBLE, 1, 1)
848 static int32 FASTCALL
849 fcmple(jsdouble x, jsdouble y)
851 return x <= y;
853 JS_DEFINE_CALLINFO_2(static, INT32, fcmple, DOUBLE, DOUBLE, 1, 1)
855 static int32 FASTCALL
856 fcmpgt(jsdouble x, jsdouble y)
858 return x > y;
860 JS_DEFINE_CALLINFO_2(static, INT32, fcmpgt, DOUBLE, DOUBLE, 1, 1)
862 static int32 FASTCALL
863 fcmpge(jsdouble x, jsdouble y)
865 return x >= y;
867 JS_DEFINE_CALLINFO_2(static, INT32, fcmpge, DOUBLE, DOUBLE, 1, 1)
869 static jsdouble FASTCALL
870 fmul(jsdouble x, jsdouble y)
872 return x * y;
874 JS_DEFINE_CALLINFO_2(static, DOUBLE, fmul, DOUBLE, DOUBLE, 1, 1)
876 static jsdouble FASTCALL
877 fadd(jsdouble x, jsdouble y)
879 return x + y;
881 JS_DEFINE_CALLINFO_2(static, DOUBLE, fadd, DOUBLE, DOUBLE, 1, 1)
883 static jsdouble FASTCALL
884 fdiv(jsdouble x, jsdouble y)
886 return x / y;
888 JS_DEFINE_CALLINFO_2(static, DOUBLE, fdiv, DOUBLE, DOUBLE, 1, 1)
890 static jsdouble FASTCALL
891 fsub(jsdouble x, jsdouble y)
893 return x - y;
895 JS_DEFINE_CALLINFO_2(static, DOUBLE, fsub, DOUBLE, DOUBLE, 1, 1)
897 class SoftFloatFilter: public LirWriter
899 public:
900 SoftFloatFilter(LirWriter* out):
901 LirWriter(out)
905 LInsp quadCall(const CallInfo *ci, LInsp args[]) {
906 LInsp qlo, qhi;
908 qlo = out->insCall(ci, args);
909 qhi = out->ins1(LIR_callh, qlo);
910 return out->qjoin(qlo, qhi);
913 LInsp ins1(LOpcode v, LInsp s0)
915 if (v == LIR_fneg)
916 return quadCall(&fneg_ci, &s0);
918 if (v == LIR_i2f)
919 return quadCall(&i2f_ci, &s0);
921 if (v == LIR_u2f)
922 return quadCall(&u2f_ci, &s0);
924 return out->ins1(v, s0);
927 LInsp ins2(LOpcode v, LInsp s0, LInsp s1)
929 LInsp args[2];
930 LInsp bv;
932 // change the numeric value and order of these LIR opcodes and die
933 if (LIR_fadd <= v && v <= LIR_fdiv) {
934 static const CallInfo *fmap[] = { &fadd_ci, &fsub_ci, &fmul_ci, &fdiv_ci };
936 args[0] = s1;
937 args[1] = s0;
939 return quadCall(fmap[v - LIR_fadd], args);
942 if (LIR_feq <= v && v <= LIR_fge) {
943 static const CallInfo *fmap[] = { &fcmpeq_ci, &fcmplt_ci, &fcmpgt_ci, &fcmple_ci, &fcmpge_ci };
945 args[0] = s1;
946 args[1] = s0;
948 bv = out->insCall(fmap[v - LIR_feq], args);
949 return out->ins2(LIR_eq, bv, out->insImm(1));
952 return out->ins2(v, s0, s1);
955 LInsp insCall(const CallInfo *ci, LInsp args[])
957 // if the return type is ARGSIZE_F, we have
958 // to do a quadCall ( qjoin(call,callh) )
959 if ((ci->_argtypes & 3) == ARGSIZE_F)
960 return quadCall(ci, args);
962 return out->insCall(ci, args);
966 class FuncFilter: public LirWriter
968 public:
969 FuncFilter(LirWriter* out):
970 LirWriter(out)
974 LInsp ins2(LOpcode v, LInsp s0, LInsp s1)
976 if (s0 == s1 && v == LIR_feq) {
977 if (isPromote(s0)) {
978 // double(int) and double(uint) cannot be nan
979 return insImm(1);
981 if (s0->isop(LIR_fmul) || s0->isop(LIR_fsub) || s0->isop(LIR_fadd)) {
982 LInsp lhs = s0->oprnd1();
983 LInsp rhs = s0->oprnd2();
984 if (isPromote(lhs) && isPromote(rhs)) {
985 // add/sub/mul promoted ints can't be nan
986 return insImm(1);
989 } else if (LIR_feq <= v && v <= LIR_fge) {
990 if (isPromoteInt(s0) && isPromoteInt(s1)) {
991 // demote fcmp to cmp
992 v = LOpcode(v + (LIR_eq - LIR_feq));
993 return out->ins2(v, demote(out, s0), demote(out, s1));
994 } else if (isPromoteUint(s0) && isPromoteUint(s1)) {
995 // uint compare
996 v = LOpcode(v + (LIR_eq - LIR_feq));
997 if (v != LIR_eq)
998 v = LOpcode(v + (LIR_ult - LIR_lt)); // cmp -> ucmp
999 return out->ins2(v, demote(out, s0), demote(out, s1));
1001 } else if (v == LIR_or &&
1002 s0->isop(LIR_lsh) && isconst(s0->oprnd2(), 16) &&
1003 s1->isop(LIR_and) && isconst(s1->oprnd2(), 0xffff)) {
1004 LIns* msw = s0->oprnd1();
1005 LIns* lsw = s1->oprnd1();
1006 LIns* x;
1007 LIns* y;
1008 if (lsw->isop(LIR_add) &&
1009 lsw->oprnd1()->isop(LIR_and) &&
1010 lsw->oprnd2()->isop(LIR_and) &&
1011 isconst(lsw->oprnd1()->oprnd2(), 0xffff) &&
1012 isconst(lsw->oprnd2()->oprnd2(), 0xffff) &&
1013 msw->isop(LIR_add) &&
1014 msw->oprnd1()->isop(LIR_add) &&
1015 msw->oprnd2()->isop(LIR_rsh) &&
1016 msw->oprnd1()->oprnd1()->isop(LIR_rsh) &&
1017 msw->oprnd1()->oprnd2()->isop(LIR_rsh) &&
1018 isconst(msw->oprnd2()->oprnd2(), 16) &&
1019 isconst(msw->oprnd1()->oprnd1()->oprnd2(), 16) &&
1020 isconst(msw->oprnd1()->oprnd2()->oprnd2(), 16) &&
1021 (x = lsw->oprnd1()->oprnd1()) == msw->oprnd1()->oprnd1()->oprnd1() &&
1022 (y = lsw->oprnd2()->oprnd1()) == msw->oprnd1()->oprnd2()->oprnd1() &&
1023 lsw == msw->oprnd2()->oprnd1()) {
1024 return out->ins2(LIR_add, x, y);
1028 return out->ins2(v, s0, s1);
1031 LInsp insCall(const CallInfo *ci, LInsp args[])
1033 if (ci == &js_DoubleToUint32_ci) {
1034 LInsp s0 = args[0];
1035 if (s0->isconstq())
1036 return out->insImm(js_DoubleToECMAUint32(s0->imm64f()));
1037 if (isi2f(s0) || isu2f(s0))
1038 return iu2fArg(s0);
1039 } else if (ci == &js_DoubleToInt32_ci) {
1040 LInsp s0 = args[0];
1041 if (s0->isconstq())
1042 return out->insImm(js_DoubleToECMAInt32(s0->imm64f()));
1043 if (s0->isop(LIR_fadd) || s0->isop(LIR_fsub)) {
1044 LInsp lhs = s0->oprnd1();
1045 LInsp rhs = s0->oprnd2();
1046 if (isPromote(lhs) && isPromote(rhs)) {
1047 LOpcode op = LOpcode(s0->opcode() & ~LIR64);
1048 return out->ins2(op, demote(out, lhs), demote(out, rhs));
1051 if (isi2f(s0) || isu2f(s0))
1052 return iu2fArg(s0);
1053 // XXX ARM -- check for qjoin(call(UnboxDouble),call(UnboxDouble))
1054 if (s0->isCall()) {
1055 const CallInfo* ci2 = s0->callInfo();
1056 if (ci2 == &js_UnboxDouble_ci) {
1057 LIns* args2[] = { callArgN(s0, 0) };
1058 return out->insCall(&js_UnboxInt32_ci, args2);
1059 } else if (ci2 == &js_StringToNumber_ci) {
1060 // callArgN's ordering is that as seen by the builtin, not as stored in
1061 // args here. True story!
1062 LIns* args2[] = { callArgN(s0, 1), callArgN(s0, 0) };
1063 return out->insCall(&js_StringToInt32_ci, args2);
1064 } else if (ci2 == &js_String_p_charCodeAt0_ci) {
1065 // Use a fast path builtin for a charCodeAt that converts to an int right away.
1066 LIns* args2[] = { callArgN(s0, 0) };
1067 return out->insCall(&js_String_p_charCodeAt0_int_ci, args2);
1068 } else if (ci2 == &js_String_p_charCodeAt_ci) {
1069 LIns* idx = callArgN(s0, 1);
1070 // If the index is not already an integer, force it to be an integer.
1071 idx = isPromote(idx)
1072 ? demote(out, idx)
1073 : out->insCall(&js_DoubleToInt32_ci, &idx);
1074 LIns* args2[] = { idx, callArgN(s0, 0) };
1075 return out->insCall(&js_String_p_charCodeAt_int_ci, args2);
1078 } else if (ci == &js_BoxDouble_ci) {
1079 LInsp s0 = args[0];
1080 JS_ASSERT(s0->isQuad());
1081 if (isi2f(s0)) {
1082 LIns* args2[] = { iu2fArg(s0), args[1] };
1083 return out->insCall(&js_BoxInt32_ci, args2);
1085 if (s0->isCall() && s0->callInfo() == &js_UnboxDouble_ci)
1086 return callArgN(s0, 0);
1088 return out->insCall(ci, args);
1092 /* In debug mode vpname contains a textual description of the type of the
1093 slot during the forall iteration over all slots. If JS_JIT_SPEW is not
1094 defined, vpnum is set to a very large integer to catch invalid uses of
1095 it. Non-debug code should never use vpnum. */
1096 #ifdef JS_JIT_SPEW
1097 #define DEF_VPNAME const char* vpname; unsigned vpnum
1098 #define SET_VPNAME(name) do { vpname = name; vpnum = 0; } while(0)
1099 #define INC_VPNUM() do { ++vpnum; } while(0)
1100 #else
1101 #define DEF_VPNAME do {} while (0)
1102 #define vpname ""
1103 #define vpnum 0x40000000
1104 #define SET_VPNAME(name) ((void)0)
1105 #define INC_VPNUM() ((void)0)
1106 #endif
1108 /* Iterate over all interned global variables. */
1109 #define FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code) \
1110 JS_BEGIN_MACRO \
1111 DEF_VPNAME; \
1112 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain); \
1113 unsigned n; \
1114 jsval* vp; \
1115 SET_VPNAME("global"); \
1116 for (n = 0; n < ngslots; ++n) { \
1117 vp = &STOBJ_GET_SLOT(globalObj, gslots[n]); \
1118 { code; } \
1119 INC_VPNUM(); \
1121 JS_END_MACRO
1123 /* Iterate over all slots in the frame, consisting of args, vars, and stack
1124 (except for the top-level frame which does not have args or vars. */
1125 #define FORALL_FRAME_SLOTS(fp, depth, code) \
1126 JS_BEGIN_MACRO \
1127 jsval* vp; \
1128 jsval* vpstop; \
1129 if (fp->callee) { \
1130 if (depth == 0) { \
1131 SET_VPNAME("callee"); \
1132 vp = &fp->argv[-2]; \
1133 { code; } \
1134 SET_VPNAME("this"); \
1135 vp = &fp->argv[-1]; \
1136 { code; } \
1137 SET_VPNAME("argv"); \
1138 vp = &fp->argv[0]; vpstop = &fp->argv[argSlots(fp)]; \
1139 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
1141 SET_VPNAME("vars"); \
1142 vp = fp->slots; vpstop = &fp->slots[fp->script->nfixed]; \
1143 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
1145 SET_VPNAME("stack"); \
1146 vp = StackBase(fp); vpstop = fp->regs->sp; \
1147 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
1148 if (fsp < fspstop - 1) { \
1149 JSStackFrame* fp2 = fsp[1]; \
1150 int missing = fp2->fun->nargs - fp2->argc; \
1151 if (missing > 0) { \
1152 SET_VPNAME("missing"); \
1153 vp = fp->regs->sp; \
1154 vpstop = vp + missing; \
1155 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
1158 JS_END_MACRO
1160 /* Iterate over all slots in each pending frame. */
1161 #define FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code) \
1162 JS_BEGIN_MACRO \
1163 DEF_VPNAME; \
1164 unsigned n; \
1165 JSStackFrame* currentFrame = cx->fp; \
1166 JSStackFrame* entryFrame; \
1167 JSStackFrame* fp = currentFrame; \
1168 for (n = 0; n < callDepth; ++n) { fp = fp->down; } \
1169 entryFrame = fp; \
1170 unsigned frames = callDepth+1; \
1171 JSStackFrame** fstack = \
1172 (JSStackFrame**) alloca(frames * sizeof (JSStackFrame*)); \
1173 JSStackFrame** fspstop = &fstack[frames]; \
1174 JSStackFrame** fsp = fspstop-1; \
1175 fp = currentFrame; \
1176 for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; } \
1177 unsigned depth; \
1178 for (depth = 0, fsp = fstack; fsp < fspstop; ++fsp, ++depth) { \
1179 fp = *fsp; \
1180 FORALL_FRAME_SLOTS(fp, depth, code); \
1182 JS_END_MACRO
1184 #define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code) \
1185 JS_BEGIN_MACRO \
1186 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code); \
1187 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code); \
1188 JS_END_MACRO
1190 /* Calculate the total number of native frame slots we need from this frame
1191 all the way back to the entry frame, including the current stack usage. */
1192 JS_REQUIRES_STACK unsigned
1193 js_NativeStackSlots(JSContext *cx, unsigned callDepth)
1195 JSStackFrame* fp = cx->fp;
1196 unsigned slots = 0;
1197 #if defined _DEBUG
1198 unsigned int origCallDepth = callDepth;
1199 #endif
1200 for (;;) {
1201 unsigned operands = fp->regs->sp - StackBase(fp);
1202 slots += operands;
1203 if (fp->callee)
1204 slots += fp->script->nfixed;
1205 if (callDepth-- == 0) {
1206 if (fp->callee)
1207 slots += 2/*callee,this*/ + argSlots(fp);
1208 #if defined _DEBUG
1209 unsigned int m = 0;
1210 FORALL_SLOTS_IN_PENDING_FRAMES(cx, origCallDepth, m++);
1211 JS_ASSERT(m == slots);
1212 #endif
1213 return slots;
1215 JSStackFrame* fp2 = fp;
1216 fp = fp->down;
1217 int missing = fp2->fun->nargs - fp2->argc;
1218 if (missing > 0)
1219 slots += missing;
1221 JS_NOT_REACHED("js_NativeStackSlots");
1225 * Capture the type map for the selected slots of the global object and currently pending
1226 * stack frames.
1228 JS_REQUIRES_STACK void
1229 TypeMap::captureTypes(JSContext* cx, SlotList& slots, unsigned callDepth)
1231 unsigned ngslots = slots.length();
1232 uint16* gslots = slots.data();
1233 setLength(js_NativeStackSlots(cx, callDepth) + ngslots);
1234 uint8* map = data();
1235 uint8* m = map;
1236 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1237 uint8 type = getCoercedType(*vp);
1238 if ((type == JSVAL_INT) && oracle.isStackSlotUndemotable(cx, unsigned(m - map)))
1239 type = JSVAL_DOUBLE;
1240 JS_ASSERT(type != JSVAL_BOXED);
1241 debug_only_v(printf("capture stack type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
1242 JS_ASSERT(uintptr_t(m - map) < length());
1243 *m++ = type;
1245 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1246 uint8 type = getCoercedType(*vp);
1247 if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n]))
1248 type = JSVAL_DOUBLE;
1249 JS_ASSERT(type != JSVAL_BOXED);
1250 debug_only_v(printf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
1251 JS_ASSERT(uintptr_t(m - map) < length());
1252 *m++ = type;
1254 JS_ASSERT(uintptr_t(m - map) == length());
1257 JS_REQUIRES_STACK void
1258 TypeMap::captureMissingGlobalTypes(JSContext* cx, SlotList& slots, unsigned stackSlots)
1260 unsigned oldSlots = length() - stackSlots;
1261 int diff = slots.length() - oldSlots;
1262 JS_ASSERT(diff >= 0);
1263 unsigned ngslots = slots.length();
1264 uint16* gslots = slots.data();
1265 setLength(length() + diff);
1266 uint8* map = data() + stackSlots;
1267 uint8* m = map;
1268 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1269 if (n >= oldSlots) {
1270 uint8 type = getCoercedType(*vp);
1271 if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n]))
1272 type = JSVAL_DOUBLE;
1273 JS_ASSERT(type != JSVAL_BOXED);
1274 debug_only_v(printf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
1275 *m = type;
1276 JS_ASSERT((m > map + oldSlots) || (*m == type));
1278 m++;
1282 /* Compare this type map to another one and see whether they match. */
1283 bool
1284 TypeMap::matches(TypeMap& other) const
1286 if (length() != other.length())
1287 return false;
1288 return !memcmp(data(), other.data(), length());
1291 /* Use the provided storage area to create a new type map that contains the partial type map
1292 with the rest of it filled up from the complete type map. */
1293 static void
1294 mergeTypeMaps(uint8** partial, unsigned* plength, uint8* complete, unsigned clength, uint8* mem)
1296 unsigned l = *plength;
1297 JS_ASSERT(l < clength);
1298 memcpy(mem, *partial, l * sizeof(uint8));
1299 memcpy(mem + l, complete + l, (clength - l) * sizeof(uint8));
1300 *partial = mem;
1301 *plength = clength;
1304 /* Specializes a tree to any missing globals, including any dependent trees. */
1305 static JS_REQUIRES_STACK void
1306 specializeTreesToMissingGlobals(JSContext* cx, TreeInfo* root)
1308 TreeInfo* ti = root;
1310 ti->typeMap.captureMissingGlobalTypes(cx, *ti->globalSlots, ti->nStackTypes);
1311 JS_ASSERT(ti->globalSlots->length() == ti->typeMap.length() - ti->nStackTypes);
1313 for (unsigned i = 0; i < root->dependentTrees.length(); i++) {
1314 ti = (TreeInfo*)root->dependentTrees.data()[i]->vmprivate;
1315 /* ti can be NULL if we hit the recording tree in emitTreeCall; this is harmless. */
1316 if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
1317 specializeTreesToMissingGlobals(cx, ti);
1319 for (unsigned i = 0; i < root->linkedTrees.length(); i++) {
1320 ti = (TreeInfo*)root->linkedTrees.data()[i]->vmprivate;
1321 if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
1322 specializeTreesToMissingGlobals(cx, ti);
1326 static void
1327 js_TrashTree(JSContext* cx, Fragment* f);
1329 JS_REQUIRES_STACK
1330 TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment,
1331 TreeInfo* ti, unsigned stackSlots, unsigned ngslots, uint8* typeMap,
1332 VMSideExit* innermostNestedGuard, jsbytecode* outer, uint32 outerArgc)
1334 JS_ASSERT(!_fragment->vmprivate && ti && cx->fp->regs->pc == (jsbytecode*)_fragment->ip);
1336 /* Reset the fragment state we care about in case we got a recycled fragment. */
1337 _fragment->lastIns = NULL;
1339 this->cx = cx;
1340 this->traceMonitor = &JS_TRACE_MONITOR(cx);
1341 this->globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
1342 this->lexicalBlock = cx->fp->blockChain;
1343 this->anchor = _anchor;
1344 this->fragment = _fragment;
1345 this->lirbuf = _fragment->lirbuf;
1346 this->treeInfo = ti;
1347 this->callDepth = _anchor ? _anchor->calldepth : 0;
1348 this->atoms = FrameAtomBase(cx, cx->fp);
1349 this->deepAborted = false;
1350 this->trashSelf = false;
1351 this->global_dslots = this->globalObj->dslots;
1352 this->loop = true; /* default assumption is we are compiling a loop */
1353 this->wasRootFragment = _fragment == _fragment->root;
1354 this->outer = outer;
1355 this->outerArgc = outerArgc;
1356 this->pendingTraceableNative = NULL;
1357 this->newobj_ins = NULL;
1358 this->generatedTraceableNative = new JSTraceableNative();
1359 JS_ASSERT(generatedTraceableNative);
1361 debug_only_v(printf("recording starting from %s:%u@%u\n",
1362 ti->treeFileName, ti->treeLineNumber, ti->treePCOffset);)
1363 debug_only_v(printf("globalObj=%p, shape=%d\n", (void*)this->globalObj, OBJ_SHAPE(this->globalObj));)
1365 lir = lir_buf_writer = new (&gc) LirBufWriter(lirbuf);
1366 debug_only_v(lir = verbose_filter = new (&gc) VerboseWriter(&gc, lir, lirbuf->names);)
1367 if (nanojit::AvmCore::config.soft_float)
1368 lir = float_filter = new (&gc) SoftFloatFilter(lir);
1369 else
1370 float_filter = 0;
1371 lir = cse_filter = new (&gc) CseFilter(lir, &gc);
1372 lir = expr_filter = new (&gc) ExprFilter(lir);
1373 lir = func_filter = new (&gc) FuncFilter(lir);
1374 lir->ins0(LIR_start);
1376 if (!nanojit::AvmCore::config.tree_opt || fragment->root == fragment)
1377 lirbuf->state = addName(lir->insParam(0, 0), "state");
1379 lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
1380 lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
1381 cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
1382 eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
1383 eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
1385 /* If we came from exit, we might not have enough global types. */
1386 if (ti->globalSlots->length() > ti->nGlobalTypes())
1387 specializeTreesToMissingGlobals(cx, ti);
1389 /* read into registers all values on the stack and all globals we know so far */
1390 import(treeInfo, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
1392 if (fragment == fragment->root) {
1394 * We poll the operation callback request flag. It is updated asynchronously whenever
1395 * the callback is to be invoked.
1397 LIns* x = lir->insLoadi(cx_ins, offsetof(JSContext, operationCallbackFlag));
1398 guard(true, lir->ins_eq0(x), snapshot(TIMEOUT_EXIT));
1401 /* If we are attached to a tree call guard, make sure the guard the inner tree exited from
1402 is what we expect it to be. */
1403 if (_anchor && _anchor->exitType == NESTED_EXIT) {
1404 LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state,
1405 offsetof(InterpState, lastTreeExitGuard)),
1406 "lastTreeExitGuard");
1407 guard(true, lir->ins2(LIR_eq, nested_ins, INS_CONSTPTR(innermostNestedGuard)), NESTED_EXIT);
1411 TreeInfo::~TreeInfo()
1413 UnstableExit* temp;
1415 while (unstableExits) {
1416 temp = unstableExits->next;
1417 delete unstableExits;
1418 unstableExits = temp;
1422 TraceRecorder::~TraceRecorder()
1424 JS_ASSERT(nextRecorderToAbort == NULL);
1425 JS_ASSERT(treeInfo && (fragment || wasDeepAborted()));
1426 #ifdef DEBUG
1427 TraceRecorder* tr = JS_TRACE_MONITOR(cx).abortStack;
1428 while (tr != NULL)
1430 JS_ASSERT(this != tr);
1431 tr = tr->nextRecorderToAbort;
1433 #endif
1434 if (fragment) {
1435 if (wasRootFragment && !fragment->root->code()) {
1436 JS_ASSERT(!fragment->root->vmprivate);
1437 delete treeInfo;
1440 if (trashSelf)
1441 js_TrashTree(cx, fragment->root);
1443 for (unsigned int i = 0; i < whichTreesToTrash.length(); i++)
1444 js_TrashTree(cx, whichTreesToTrash.get(i));
1445 } else if (wasRootFragment) {
1446 delete treeInfo;
1448 #ifdef DEBUG
1449 delete verbose_filter;
1450 #endif
1451 delete cse_filter;
1452 delete expr_filter;
1453 delete func_filter;
1454 delete float_filter;
1455 delete lir_buf_writer;
1456 delete generatedTraceableNative;
1459 void TraceRecorder::removeFragmentoReferences()
1461 fragment = NULL;
1464 void TraceRecorder::deepAbort()
1466 debug_only_v(printf("deep abort");)
1467 deepAborted = true;
1470 /* Add debug information to a LIR instruction as we emit it. */
1471 inline LIns*
1472 TraceRecorder::addName(LIns* ins, const char* name)
1474 #ifdef JS_JIT_SPEW
1475 if (js_verboseDebug)
1476 lirbuf->names->addName(ins, name);
1477 #endif
1478 return ins;
1481 /* Determine the current call depth (starting with the entry frame.) */
1482 unsigned
1483 TraceRecorder::getCallDepth() const
1485 return callDepth;
1488 /* Determine the offset in the native global frame for a jsval we track */
1489 ptrdiff_t
1490 TraceRecorder::nativeGlobalOffset(jsval* p) const
1492 JS_ASSERT(isGlobal(p));
1493 if (size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS)
1494 return sizeof(InterpState) + size_t(p - globalObj->fslots) * sizeof(double);
1495 return sizeof(InterpState) + ((p - globalObj->dslots) + JS_INITIAL_NSLOTS) * sizeof(double);
1498 /* Determine whether a value is a global stack slot */
1499 bool
1500 TraceRecorder::isGlobal(jsval* p) const
1502 return ((size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS) ||
1503 (size_t(p - globalObj->dslots) < (STOBJ_NSLOTS(globalObj) - JS_INITIAL_NSLOTS)));
1506 /* Determine the offset in the native stack for a jsval we track */
1507 JS_REQUIRES_STACK ptrdiff_t
1508 TraceRecorder::nativeStackOffset(jsval* p) const
1510 #ifdef DEBUG
1511 size_t slow_offset = 0;
1512 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1513 if (vp == p) goto done;
1514 slow_offset += sizeof(double)
1518 * If it's not in a pending frame, it must be on the stack of the current frame above
1519 * sp but below fp->slots + script->nslots.
1521 JS_ASSERT(size_t(p - cx->fp->slots) < cx->fp->script->nslots);
1522 slow_offset += size_t(p - cx->fp->regs->sp) * sizeof(double);
1524 done:
1525 #define RETURN(offset) { JS_ASSERT((offset) == slow_offset); return offset; }
1526 #else
1527 #define RETURN(offset) { return offset; }
1528 #endif
1529 size_t offset = 0;
1530 JSStackFrame* currentFrame = cx->fp;
1531 JSStackFrame* entryFrame;
1532 JSStackFrame* fp = currentFrame;
1533 for (unsigned n = 0; n < callDepth; ++n) { fp = fp->down; }
1534 entryFrame = fp;
1535 unsigned frames = callDepth+1;
1536 JSStackFrame** fstack = (JSStackFrame **)alloca(frames * sizeof (JSStackFrame *));
1537 JSStackFrame** fspstop = &fstack[frames];
1538 JSStackFrame** fsp = fspstop-1;
1539 fp = currentFrame;
1540 for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; }
1541 for (fsp = fstack; fsp < fspstop; ++fsp) {
1542 fp = *fsp;
1543 if (fp->callee) {
1544 if (fsp == fstack) {
1545 if (size_t(p - &fp->argv[-2]) < size_t(2/*callee,this*/ + argSlots(fp)))
1546 RETURN(offset + size_t(p - &fp->argv[-2]) * sizeof(double));
1547 offset += (2/*callee,this*/ + argSlots(fp)) * sizeof(double);
1549 if (size_t(p - &fp->slots[0]) < fp->script->nfixed)
1550 RETURN(offset + size_t(p - &fp->slots[0]) * sizeof(double));
1551 offset += fp->script->nfixed * sizeof(double);
1553 jsval* spbase = StackBase(fp);
1554 if (size_t(p - spbase) < size_t(fp->regs->sp - spbase))
1555 RETURN(offset + size_t(p - spbase) * sizeof(double));
1556 offset += size_t(fp->regs->sp - spbase) * sizeof(double);
1557 if (fsp < fspstop - 1) {
1558 JSStackFrame* fp2 = fsp[1];
1559 int missing = fp2->fun->nargs - fp2->argc;
1560 if (missing > 0) {
1561 if (size_t(p - fp->regs->sp) < size_t(missing))
1562 RETURN(offset + size_t(p - fp->regs->sp) * sizeof(double));
1563 offset += size_t(missing) * sizeof(double);
1569 * If it's not in a pending frame, it must be on the stack of the current frame above
1570 * sp but below fp->slots + script->nslots.
1572 JS_ASSERT(size_t(p - currentFrame->slots) < currentFrame->script->nslots);
1573 offset += size_t(p - currentFrame->regs->sp) * sizeof(double);
1574 RETURN(offset);
1575 #undef RETURN
1578 /* Track the maximum number of native frame slots we need during
1579 execution. */
1580 void
1581 TraceRecorder::trackNativeStackUse(unsigned slots)
1583 if (slots > treeInfo->maxNativeStackSlots)
1584 treeInfo->maxNativeStackSlots = slots;
1587 /* Unbox a jsval into a slot. Slots are wide enough to hold double values directly (instead of
1588 storing a pointer to them). We now assert instead of type checking, the caller must ensure the
1589 types are compatible. */
1590 static void
1591 ValueToNative(JSContext* cx, jsval v, uint8 type, double* slot)
1593 unsigned tag = JSVAL_TAG(v);
1594 switch (type) {
1595 case JSVAL_OBJECT:
1596 JS_ASSERT(tag == JSVAL_OBJECT);
1597 JS_ASSERT(!JSVAL_IS_NULL(v) && !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)));
1598 *(JSObject**)slot = JSVAL_TO_OBJECT(v);
1599 debug_only_v(printf("object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1600 JSVAL_IS_NULL(v)
1601 ? "null"
1602 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
1603 return;
1604 case JSVAL_INT:
1605 jsint i;
1606 if (JSVAL_IS_INT(v))
1607 *(jsint*)slot = JSVAL_TO_INT(v);
1608 else if ((tag == JSVAL_DOUBLE) && JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v), i))
1609 *(jsint*)slot = i;
1610 else
1611 JS_ASSERT(JSVAL_IS_INT(v));
1612 debug_only_v(printf("int<%d> ", *(jsint*)slot);)
1613 return;
1614 case JSVAL_DOUBLE:
1615 jsdouble d;
1616 if (JSVAL_IS_INT(v))
1617 d = JSVAL_TO_INT(v);
1618 else
1619 d = *JSVAL_TO_DOUBLE(v);
1620 JS_ASSERT(JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v));
1621 *(jsdouble*)slot = d;
1622 debug_only_v(printf("double<%g> ", d);)
1623 return;
1624 case JSVAL_BOXED:
1625 JS_NOT_REACHED("found boxed type in an entry type map");
1626 return;
1627 case JSVAL_STRING:
1628 JS_ASSERT(tag == JSVAL_STRING);
1629 *(JSString**)slot = JSVAL_TO_STRING(v);
1630 debug_only_v(printf("string<%p> ", (void*)(*(JSString**)slot));)
1631 return;
1632 case JSVAL_TNULL:
1633 JS_ASSERT(tag == JSVAL_OBJECT);
1634 *(JSObject**)slot = NULL;
1635 debug_only_v(printf("null ");)
1636 return;
1637 case JSVAL_BOOLEAN:
1638 /* Watch out for pseudo-booleans. */
1639 JS_ASSERT(tag == JSVAL_BOOLEAN);
1640 *(JSBool*)slot = JSVAL_TO_PSEUDO_BOOLEAN(v);
1641 debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)
1642 return;
1643 case JSVAL_TFUN: {
1644 JS_ASSERT(tag == JSVAL_OBJECT);
1645 JSObject* obj = JSVAL_TO_OBJECT(v);
1646 *(JSObject**)slot = obj;
1647 #ifdef DEBUG
1648 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
1649 debug_only_v(printf("function<%p:%s> ", (void*) obj,
1650 fun->atom
1651 ? JS_GetStringBytes(ATOM_TO_STRING(fun->atom))
1652 : "unnamed");)
1653 #endif
1654 return;
1658 JS_NOT_REACHED("unexpected type");
1661 /* We maintain an emergency pool of doubles so we can recover safely if a trace runs
1662 out of memory (doubles or objects). */
1663 static jsval
1664 AllocateDoubleFromReservedPool(JSContext* cx)
1666 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
1667 JS_ASSERT(tm->reservedDoublePoolPtr > tm->reservedDoublePool);
1668 return *--tm->reservedDoublePoolPtr;
1671 static bool
1672 js_ReplenishReservedPool(JSContext* cx, JSTraceMonitor* tm)
1674 /* We should not be called with a full pool. */
1675 JS_ASSERT((size_t) (tm->reservedDoublePoolPtr - tm->reservedDoublePool) <
1676 MAX_NATIVE_STACK_SLOTS);
1679 * When the GC runs in js_NewDoubleInRootedValue, it resets
1680 * tm->reservedDoublePoolPtr back to tm->reservedDoublePool.
1682 JSRuntime* rt = cx->runtime;
1683 uintN gcNumber = rt->gcNumber;
1684 uintN lastgcNumber = gcNumber;
1685 jsval* ptr = tm->reservedDoublePoolPtr;
1686 while (ptr < tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) {
1687 if (!js_NewDoubleInRootedValue(cx, 0.0, ptr))
1688 goto oom;
1690 /* Check if the last call to js_NewDoubleInRootedValue GC'd. */
1691 if (rt->gcNumber != lastgcNumber) {
1692 lastgcNumber = rt->gcNumber;
1693 JS_ASSERT(tm->reservedDoublePoolPtr == tm->reservedDoublePool);
1694 ptr = tm->reservedDoublePool;
1697 * Have we GC'd more than once? We're probably running really
1698 * low on memory, bail now.
1700 if (uintN(rt->gcNumber - gcNumber) > uintN(1))
1701 goto oom;
1702 continue;
1704 ++ptr;
1706 tm->reservedDoublePoolPtr = ptr;
1707 return true;
1709 oom:
1711 * Already massive GC pressure, no need to hold doubles back.
1712 * We won't run any native code anyway.
1714 tm->reservedDoublePoolPtr = tm->reservedDoublePool;
1715 return false;
1718 /* Box a value from the native stack back into the jsval format. Integers
1719 that are too large to fit into a jsval are automatically boxed into
1720 heap-allocated doubles. */
1721 static void
1722 NativeToValue(JSContext* cx, jsval& v, uint8 type, double* slot)
1724 jsint i;
1725 jsdouble d;
1726 switch (type) {
1727 case JSVAL_OBJECT:
1728 v = OBJECT_TO_JSVAL(*(JSObject**)slot);
1729 JS_ASSERT(JSVAL_TAG(v) == JSVAL_OBJECT); /* if this fails the pointer was not aligned */
1730 JS_ASSERT(v != JSVAL_ERROR_COOKIE); /* don't leak JSVAL_ERROR_COOKIE */
1731 debug_only_v(printf("object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1732 JSVAL_IS_NULL(v)
1733 ? "null"
1734 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
1735 break;
1736 case JSVAL_INT:
1737 i = *(jsint*)slot;
1738 debug_only_v(printf("int<%d> ", i);)
1739 store_int:
1740 if (INT_FITS_IN_JSVAL(i)) {
1741 v = INT_TO_JSVAL(i);
1742 break;
1744 d = (jsdouble)i;
1745 goto store_double;
1746 case JSVAL_DOUBLE:
1747 d = *slot;
1748 debug_only_v(printf("double<%g> ", d);)
1749 if (JSDOUBLE_IS_INT(d, i))
1750 goto store_int;
1751 store_double: {
1752 /* Its not safe to trigger the GC here, so use an emergency heap if we are out of
1753 double boxes. */
1754 if (cx->doubleFreeList) {
1755 #ifdef DEBUG
1756 JSBool ok =
1757 #endif
1758 js_NewDoubleInRootedValue(cx, d, &v);
1759 JS_ASSERT(ok);
1760 return;
1762 v = AllocateDoubleFromReservedPool(cx);
1763 JS_ASSERT(JSVAL_IS_DOUBLE(v) && *JSVAL_TO_DOUBLE(v) == 0.0);
1764 *JSVAL_TO_DOUBLE(v) = d;
1765 return;
1767 case JSVAL_BOXED:
1768 v = *(jsval*)slot;
1769 JS_ASSERT(v != JSVAL_ERROR_COOKIE); /* don't leak JSVAL_ERROR_COOKIE */
1770 debug_only_v(printf("box<%p> ", (void*)v));
1771 break;
1772 case JSVAL_STRING:
1773 v = STRING_TO_JSVAL(*(JSString**)slot);
1774 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING); /* if this fails the pointer was not aligned */
1775 debug_only_v(printf("string<%p> ", (void*)(*(JSString**)slot));)
1776 break;
1777 case JSVAL_TNULL:
1778 JS_ASSERT(*(JSObject**)slot == NULL);
1779 v = JSVAL_NULL;
1780 debug_only_v(printf("null<%p> ", (void*)(*(JSObject**)slot)));
1781 break;
1782 case JSVAL_BOOLEAN:
1783 /* Watch out for pseudo-booleans. */
1784 v = PSEUDO_BOOLEAN_TO_JSVAL(*(JSBool*)slot);
1785 debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)
1786 break;
1787 case JSVAL_TFUN: {
1788 JS_ASSERT(HAS_FUNCTION_CLASS(*(JSObject**)slot));
1789 v = OBJECT_TO_JSVAL(*(JSObject**)slot);
1790 #ifdef DEBUG
1791 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(v));
1792 debug_only_v(printf("function<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1793 fun->atom
1794 ? JS_GetStringBytes(ATOM_TO_STRING(fun->atom))
1795 : "unnamed");)
1796 #endif
1797 break;
1802 /* Attempt to unbox the given list of interned globals onto the native global frame. */
1803 static JS_REQUIRES_STACK void
1804 BuildNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
1806 debug_only_v(printf("global: ");)
1807 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1808 ValueToNative(cx, *vp, *mp, np + gslots[n]);
1809 ++mp;
1811 debug_only_v(printf("\n");)
1814 /* Attempt to unbox the given JS frame onto a native frame. */
1815 static JS_REQUIRES_STACK void
1816 BuildNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np)
1818 debug_only_v(printf("stack: ");)
1819 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1820 debug_only_v(printf("%s%u=", vpname, vpnum);)
1821 ValueToNative(cx, *vp, *mp, np);
1822 ++mp; ++np;
1824 debug_only_v(printf("\n");)
1827 /* Box the given native frame into a JS frame. This is infallible. */
1828 static JS_REQUIRES_STACK int
1829 FlushNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
1831 uint8* mp_base = mp;
1832 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1833 debug_only_v(printf("%s%u=", vpname, vpnum);)
1834 NativeToValue(cx, *vp, *mp, np + gslots[n]);
1835 ++mp;
1837 debug_only_v(printf("\n");)
1838 return mp - mp_base;
1842 * Builtin to get an upvar on trace. See js_GetUpvar for the meaning
1843 * of the first three arguments. The value of the upvar is stored in
1844 * *result as an unboxed native. The return value is the typemap type.
1846 uint32 JS_FASTCALL
1847 js_GetUpvarOnTrace(JSContext *cx, uint32 level, uint32 cookie, double* result)
1849 uintN skip = UPVAR_FRAME_SKIP(cookie);
1850 InterpState* state = cx->interpState;
1851 uintN callDepth = state->rp - state->callstackBase;
1854 * If we are skipping past all frames that are part of active traces,
1855 * then we simply get the value from the interpreter state.
1857 if (skip > callDepth) {
1858 jsval v = js_GetUpvar(cx, level, cookie);
1859 uint8 type = getCoercedType(v);
1860 ValueToNative(cx, v, type, result);
1861 return type;
1865 * The value we need is logically in a stack frame that is part of
1866 * an active trace. We reconstruct the value we need from the tracer
1867 * stack records.
1869 uintN frameIndex = callDepth - skip; // pos of target frame in rp stack
1870 uintN nativeStackFramePos = 0; // pos of target stack frame in sp stack
1871 for (uintN i = 0; i < frameIndex; ++i)
1872 nativeStackFramePos += state->callstackBase[i]->s.spdist;
1873 FrameInfo* fi = state->callstackBase[frameIndex];
1874 uint8* typemap = (uint8*) (fi+1);
1876 uintN slot = UPVAR_FRAME_SLOT(cookie);
1877 slot = slot == CALLEE_UPVAR_SLOT ? 0 : slot + 2;
1878 *result = state->stackBase[nativeStackFramePos + slot];
1879 return typemap[slot];
1883 * Box the given native stack frame into the virtual machine stack. This
1884 * is infallible.
1886 * @param callDepth the distance between the entry frame into our trace and
1887 * cx->fp when we make this call. If this is not called as a
1888 * result of a nested exit, callDepth is 0.
1889 * @param mp pointer to an array of type tags (JSVAL_INT, etc.) that indicate
1890 * what the types of the things on the stack are.
1891 * @param np pointer to the native stack. We want to copy values from here to
1892 * the JS stack as needed.
1893 * @param stopFrame if non-null, this frame and everything above it should not
1894 * be restored.
1895 * @return the number of things we popped off of np.
1897 static JS_REQUIRES_STACK int
1898 FlushNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np,
1899 JSStackFrame* stopFrame)
1901 jsval* stopAt = stopFrame ? &stopFrame->argv[-2] : NULL;
1902 uint8* mp_base = mp;
1903 /* Root all string and object references first (we don't need to call the GC for this). */
1904 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1905 if (vp == stopAt) goto skip;
1906 debug_only_v(printf("%s%u=", vpname, vpnum);)
1907 NativeToValue(cx, *vp, *mp, np);
1908 ++mp; ++np
1910 skip:
1911 // Restore thisp from the now-restored argv[-1] in each pending frame.
1912 // Keep in mind that we didn't restore frames at stopFrame and above!
1913 // Scope to keep |fp| from leaking into the macros we're using.
1915 unsigned n = callDepth+1; // +1 to make sure we restore the entry frame
1916 JSStackFrame* fp = cx->fp;
1917 if (stopFrame) {
1918 for (; fp != stopFrame; fp = fp->down) {
1919 JS_ASSERT(n != 0);
1920 --n;
1922 // Skip over stopFrame itself.
1923 JS_ASSERT(n != 0);
1924 --n;
1925 fp = fp->down;
1927 for (; n != 0; fp = fp->down) {
1928 --n;
1929 if (fp->callee) {
1930 JS_ASSERT(JSVAL_IS_OBJECT(fp->argv[-1]));
1931 fp->thisp = JSVAL_TO_OBJECT(fp->argv[-1]);
1932 if (fp->flags & JSFRAME_CONSTRUCTING) // constructors always compute 'this'
1933 fp->flags |= JSFRAME_COMPUTED_THIS;
1937 debug_only_v(printf("\n");)
1938 return mp - mp_base;
1941 /* Emit load instructions onto the trace that read the initial stack state. */
1942 JS_REQUIRES_STACK void
1943 TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8 t,
1944 const char *prefix, uintN index, JSStackFrame *fp)
1946 LIns* ins;
1947 if (t == JSVAL_INT) { /* demoted */
1948 JS_ASSERT(isInt32(*p));
1949 /* Ok, we have a valid demotion attempt pending, so insert an integer
1950 read and promote it to double since all arithmetic operations expect
1951 to see doubles on entry. The first op to use this slot will emit a
1952 f2i cast which will cancel out the i2f we insert here. */
1953 ins = lir->insLoadi(base, offset);
1954 ins = lir->ins1(LIR_i2f, ins);
1955 } else {
1956 JS_ASSERT_IF(t != JSVAL_BOXED, isNumber(*p) == (t == JSVAL_DOUBLE));
1957 if (t == JSVAL_DOUBLE) {
1958 ins = lir->insLoad(LIR_ldq, base, offset);
1959 } else if (t == JSVAL_BOOLEAN) {
1960 ins = lir->insLoad(LIR_ld, base, offset);
1961 } else {
1962 ins = lir->insLoad(LIR_ldp, base, offset);
1965 checkForGlobalObjectReallocation();
1966 tracker.set(p, ins);
1968 #ifdef DEBUG
1969 char name[64];
1970 JS_ASSERT(strlen(prefix) < 10);
1971 void* mark = NULL;
1972 jsuword* localNames = NULL;
1973 const char* funName = NULL;
1974 if (*prefix == 'a' || *prefix == 'v') {
1975 mark = JS_ARENA_MARK(&cx->tempPool);
1976 if (JS_GET_LOCAL_NAME_COUNT(fp->fun) != 0)
1977 localNames = js_GetLocalNameArray(cx, fp->fun, &cx->tempPool);
1978 funName = fp->fun->atom ? js_AtomToPrintableString(cx, fp->fun->atom) : "<anonymous>";
1980 if (!strcmp(prefix, "argv")) {
1981 if (index < fp->fun->nargs) {
1982 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[index]);
1983 JS_snprintf(name, sizeof name, "$%s.%s", funName, js_AtomToPrintableString(cx, atom));
1984 } else {
1985 JS_snprintf(name, sizeof name, "$%s.<arg%d>", funName, index);
1987 } else if (!strcmp(prefix, "vars")) {
1988 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[fp->fun->nargs + index]);
1989 JS_snprintf(name, sizeof name, "$%s.%s", funName, js_AtomToPrintableString(cx, atom));
1990 } else {
1991 JS_snprintf(name, sizeof name, "$%s%d", prefix, index);
1994 if (mark)
1995 JS_ARENA_RELEASE(&cx->tempPool, mark);
1996 addName(ins, name);
1998 static const char* typestr[] = {
1999 "object", "int", "double", "boxed", "string", "null", "boolean", "function"
2001 debug_only_v(printf("import vp=%p name=%s type=%s flags=%d\n",
2002 (void*)p, name, typestr[t & 7], t >> 3);)
2003 #endif
2006 JS_REQUIRES_STACK void
2007 TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned stackSlots, unsigned ngslots,
2008 unsigned callDepth, uint8* typeMap)
2010 /* If we get a partial list that doesn't have all the types (i.e. recording from a side
2011 exit that was recorded but we added more global slots later), merge the missing types
2012 from the entry type map. This is safe because at the loop edge we verify that we
2013 have compatible types for all globals (entry type and loop edge type match). While
2014 a different trace of the tree might have had a guard with a different type map for
2015 these slots we just filled in here (the guard we continue from didn't know about them),
2016 since we didn't take that particular guard the only way we could have ended up here
2017 is if that other trace had at its end a compatible type distribution with the entry
2018 map. Since thats exactly what we used to fill in the types our current side exit
2019 didn't provide, this is always safe to do. */
2021 uint8* globalTypeMap = typeMap + stackSlots;
2022 unsigned length = treeInfo->nGlobalTypes();
2025 * This is potentially the typemap of the side exit and thus shorter than the tree's
2026 * global type map.
2028 if (ngslots < length) {
2029 mergeTypeMaps(&globalTypeMap/*out param*/, &ngslots/*out param*/,
2030 treeInfo->globalTypeMap(), length,
2031 (uint8*)alloca(sizeof(uint8) * length));
2033 JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2036 * Check whether there are any values on the stack we have to unbox and do that first
2037 * before we waste any time fetching the state from the stack.
2039 ptrdiff_t offset = -treeInfo->nativeStackBase;
2040 uint8* m = typeMap;
2041 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
2042 if (*m == JSVAL_BOXED) {
2043 import(sp, offset, vp, JSVAL_BOXED, "boxed", vpnum, cx->fp);
2044 LIns* vp_ins = get(vp);
2045 unbox_jsval(*vp, vp_ins, copy(anchor));
2046 set(vp, vp_ins);
2048 m++; offset += sizeof(double);
2052 * The first time we compile a tree this will be empty as we add entries lazily.
2054 uint16* gslots = treeInfo->globalSlots->data();
2055 m = globalTypeMap;
2056 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
2057 JS_ASSERT(*m != JSVAL_BOXED);
2058 import(lirbuf->state, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, NULL);
2059 m++;
2061 offset = -treeInfo->nativeStackBase;
2062 m = typeMap;
2063 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
2064 if (*m != JSVAL_BOXED)
2065 import(sp, offset, vp, *m, vpname, vpnum, fp);
2066 m++; offset += sizeof(double);
2070 JS_REQUIRES_STACK bool
2071 TraceRecorder::isValidSlot(JSScope* scope, JSScopeProperty* sprop)
2073 uint32 setflags = (js_CodeSpec[*cx->fp->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
2075 if (setflags) {
2076 if (!SPROP_HAS_STUB_SETTER(sprop))
2077 ABORT_TRACE_RV("non-stub setter", false);
2078 if (sprop->attrs & JSPROP_READONLY)
2079 ABORT_TRACE_RV("writing to a read-only property", false);
2081 /* This check applies even when setflags == 0. */
2082 if (setflags != JOF_SET && !SPROP_HAS_STUB_GETTER(sprop))
2083 ABORT_TRACE_RV("non-stub getter", false);
2085 if (!SPROP_HAS_VALID_SLOT(sprop, scope))
2086 ABORT_TRACE_RV("slotless obj property", false);
2088 return true;
2091 /* Lazily import a global slot if we don't already have it in the tracker. */
2092 JS_REQUIRES_STACK bool
2093 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
2095 if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
2096 return false;
2098 * If the global object grows too large, alloca in js_ExecuteTree might fail, so
2099 * abort tracing on global objects with unreasonably many slots.
2101 if (STOBJ_NSLOTS(globalObj) > MAX_GLOBAL_SLOTS)
2102 return false;
2103 jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);
2104 if (known(vp))
2105 return true; /* we already have it */
2106 unsigned index = treeInfo->globalSlots->length();
2107 /* Add the slot to the list of interned global slots. */
2108 JS_ASSERT(treeInfo->nGlobalTypes() == treeInfo->globalSlots->length());
2109 treeInfo->globalSlots->add(slot);
2110 uint8 type = getCoercedType(*vp);
2111 if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, slot))
2112 type = JSVAL_DOUBLE;
2113 treeInfo->typeMap.add(type);
2114 import(lirbuf->state, sizeof(struct InterpState) + slot*sizeof(double),
2115 vp, type, "global", index, NULL);
2116 specializeTreesToMissingGlobals(cx, treeInfo);
2117 return true;
2120 /* Write back a value onto the stack or global frames. */
2121 LIns*
2122 TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset)
2124 /* Sink all type casts targeting the stack into the side exit by simply storing the original
2125 (uncasted) value. Each guard generates the side exit map based on the types of the
2126 last stores to every stack location, so its safe to not perform them on-trace. */
2127 if (isPromoteInt(i))
2128 i = ::demote(lir, i);
2129 return lir->insStorei(i, base, offset);
2132 /* Update the tracker, then issue a write back store. */
2133 JS_REQUIRES_STACK void
2134 TraceRecorder::set(jsval* p, LIns* i, bool initializing)
2136 JS_ASSERT(i != NULL);
2137 JS_ASSERT(initializing || known(p));
2138 checkForGlobalObjectReallocation();
2139 tracker.set(p, i);
2140 /* If we are writing to this location for the first time, calculate the offset into the
2141 native frame manually, otherwise just look up the last load or store associated with
2142 the same source address (p) and use the same offset/base. */
2143 LIns* x = nativeFrameTracker.get(p);
2144 if (!x) {
2145 if (isGlobal(p))
2146 x = writeBack(i, lirbuf->state, nativeGlobalOffset(p));
2147 else
2148 x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p));
2149 nativeFrameTracker.set(p, x);
2150 } else {
2151 #define ASSERT_VALID_CACHE_HIT(base, offset) \
2152 JS_ASSERT(base == lirbuf->sp || base == lirbuf->state); \
2153 JS_ASSERT(offset == ((base == lirbuf->sp) \
2154 ? -treeInfo->nativeStackBase + nativeStackOffset(p) \
2155 : nativeGlobalOffset(p))); \
2157 JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
2158 ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->immdisp());
2159 writeBack(i, x->oprnd2(), x->immdisp());
2161 #undef ASSERT_VALID_CACHE_HIT
2164 JS_REQUIRES_STACK LIns*
2165 TraceRecorder::get(jsval* p)
2167 checkForGlobalObjectReallocation();
2168 return tracker.get(p);
2171 JS_REQUIRES_STACK bool
2172 TraceRecorder::known(jsval* p)
2174 checkForGlobalObjectReallocation();
2175 return tracker.has(p);
2179 * The dslots of the global object are sometimes reallocated by the interpreter.
2180 * This function check for that condition and re-maps the entries of the tracker
2181 * accordingly.
2183 JS_REQUIRES_STACK void
2184 TraceRecorder::checkForGlobalObjectReallocation()
2186 if (global_dslots != globalObj->dslots) {
2187 debug_only_v(printf("globalObj->dslots relocated, updating tracker\n");)
2188 jsval* src = global_dslots;
2189 jsval* dst = globalObj->dslots;
2190 jsuint length = globalObj->dslots[-1] - JS_INITIAL_NSLOTS;
2191 LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
2192 for (jsuint n = 0; n < length; ++n) {
2193 map[n] = tracker.get(src);
2194 tracker.set(src++, NULL);
2196 for (jsuint n = 0; n < length; ++n)
2197 tracker.set(dst++, map[n]);
2198 global_dslots = globalObj->dslots;
2202 /* Determine whether the current branch is a loop edge (taken or not taken). */
2203 static JS_REQUIRES_STACK bool
2204 js_IsLoopEdge(jsbytecode* pc, jsbytecode* header)
2206 switch (*pc) {
2207 case JSOP_IFEQ:
2208 case JSOP_IFNE:
2209 return ((pc + GET_JUMP_OFFSET(pc)) == header);
2210 case JSOP_IFEQX:
2211 case JSOP_IFNEX:
2212 return ((pc + GET_JUMPX_OFFSET(pc)) == header);
2213 default:
2214 JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) ||
2215 (*pc == JSOP_OR) || (*pc == JSOP_ORX));
2217 return false;
2221 * Promote slots if necessary to match the called tree's type map. This function is
2222 * infallible and must only be called if we are certain that it is possible to
2223 * reconcile the types for each slot in the inner and outer trees.
2225 JS_REQUIRES_STACK void
2226 TraceRecorder::adjustCallerTypes(Fragment* f)
2228 uint16* gslots = treeInfo->globalSlots->data();
2229 unsigned ngslots = treeInfo->globalSlots->length();
2230 JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2231 TreeInfo* ti = (TreeInfo*)f->vmprivate;
2232 uint8* map = ti->globalTypeMap();
2233 uint8* m = map;
2234 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
2235 LIns* i = get(vp);
2236 bool isPromote = isPromoteInt(i);
2237 if (isPromote && *m == JSVAL_DOUBLE)
2238 lir->insStorei(get(vp), lirbuf->state, nativeGlobalOffset(vp));
2239 JS_ASSERT(!(!isPromote && *m == JSVAL_INT));
2240 ++m;
2242 JS_ASSERT(unsigned(m - map) == ti->nGlobalTypes());
2243 map = ti->stackTypeMap();
2244 m = map;
2245 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2246 LIns* i = get(vp);
2247 bool isPromote = isPromoteInt(i);
2248 if (isPromote && *m == JSVAL_DOUBLE) {
2249 lir->insStorei(get(vp), lirbuf->sp,
2250 -treeInfo->nativeStackBase + nativeStackOffset(vp));
2251 /* Aggressively undo speculation so the inner tree will compile if this fails. */
2252 oracle.markStackSlotUndemotable(cx, unsigned(m - map));
2254 JS_ASSERT(!(!isPromote && *m == JSVAL_INT));
2255 ++m;
2257 JS_ASSERT(unsigned(m - map) == ti->nStackTypes);
2258 JS_ASSERT(f == f->root);
2261 JS_REQUIRES_STACK uint8
2262 TraceRecorder::determineSlotType(jsval* vp)
2264 uint8 m;
2265 LIns* i = get(vp);
2266 if (isNumber(*vp)) {
2267 m = isPromoteInt(i) ? JSVAL_INT : JSVAL_DOUBLE;
2268 } else if (JSVAL_IS_OBJECT(*vp)) {
2269 if (JSVAL_IS_NULL(*vp))
2270 m = JSVAL_TNULL;
2271 else if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp)))
2272 m = JSVAL_TFUN;
2273 else
2274 m = JSVAL_OBJECT;
2275 } else {
2276 m = JSVAL_TAG(*vp);
2278 JS_ASSERT((m != JSVAL_INT) || isInt32(*vp));
2279 return m;
2282 JS_REQUIRES_STACK VMSideExit*
2283 TraceRecorder::snapshot(ExitType exitType)
2285 JSStackFrame* fp = cx->fp;
2286 JSFrameRegs* regs = fp->regs;
2287 jsbytecode* pc = regs->pc;
2289 /* Check for a return-value opcode that needs to restart at the next instruction. */
2290 const JSCodeSpec& cs = js_CodeSpec[*pc];
2293 * When calling a _FAIL native, make the snapshot's pc point to the next
2294 * instruction after the CALL or APPLY. Even on failure, a _FAIL native must not
2295 * be called again from the interpreter.
2297 bool resumeAfter = (pendingTraceableNative &&
2298 JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS);
2299 if (resumeAfter) {
2300 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEW);
2301 pc += cs.length;
2302 regs->pc = pc;
2303 MUST_FLOW_THROUGH("restore_pc");
2306 /* Generate the entry map for the (possibly advanced) pc and stash it in the trace. */
2307 unsigned stackSlots = js_NativeStackSlots(cx, callDepth);
2309 /* It's sufficient to track the native stack use here since all stores above the
2310 stack watermark defined by guards are killed. */
2311 trackNativeStackUse(stackSlots + 1);
2313 /* Capture the type map into a temporary location. */
2314 unsigned ngslots = treeInfo->globalSlots->length();
2315 unsigned typemap_size = (stackSlots + ngslots) * sizeof(uint8);
2316 uint8* typemap = (uint8*)alloca(typemap_size);
2317 uint8* m = typemap;
2319 /* Determine the type of a store by looking at the current type of the actual value the
2320 interpreter is using. For numbers we have to check what kind of store we used last
2321 (integer or double) to figure out what the side exit show reflect in its typemap. */
2322 FORALL_SLOTS(cx, ngslots, treeInfo->globalSlots->data(), callDepth,
2323 *m++ = determineSlotType(vp);
2325 JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots);
2328 * If we are currently executing a traceable native or we are attaching a second trace
2329 * to it, the value on top of the stack is boxed. Make a note of this in the typemap.
2331 if (pendingTraceableNative && (pendingTraceableNative->flags & JSTN_UNBOX_AFTER))
2332 typemap[stackSlots - 1] = JSVAL_BOXED;
2334 /* Now restore the the original pc (after which early returns are ok). */
2335 if (resumeAfter) {
2336 MUST_FLOW_LABEL(restore_pc);
2337 regs->pc = pc - cs.length;
2338 } else {
2339 /* If we take a snapshot on a goto, advance to the target address. This avoids inner
2340 trees returning on a break goto, which the outer recorder then would confuse with
2341 a break in the outer tree. */
2342 if (*pc == JSOP_GOTO)
2343 pc += GET_JUMP_OFFSET(pc);
2344 else if (*pc == JSOP_GOTOX)
2345 pc += GET_JUMPX_OFFSET(pc);
2349 * Check if we already have a matching side exit; if so we can return that
2350 * side exit instead of creating a new one.
2352 VMSideExit** exits = treeInfo->sideExits.data();
2353 unsigned nexits = treeInfo->sideExits.length();
2354 if (exitType == LOOP_EXIT) {
2355 for (unsigned n = 0; n < nexits; ++n) {
2356 VMSideExit* e = exits[n];
2357 if (e->pc == pc && e->imacpc == fp->imacpc &&
2358 !memcmp(getFullTypeMap(exits[n]), typemap, typemap_size)) {
2359 AUDIT(mergedLoopExits);
2360 return e;
2365 if (sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(uint8) >= MAX_SKIP_BYTES) {
2367 * ::snapshot() is infallible in the sense that callers don't
2368 * expect errors; but this is a trace-aborting error condition. So
2369 * mangle the request to consume zero slots, and mark the tree as
2370 * to-be-trashed. This should be safe as the trace will be aborted
2371 * before assembly or execution due to the call to
2372 * trackNativeStackUse above.
2374 stackSlots = 0;
2375 ngslots = 0;
2376 trashSelf = true;
2379 /* We couldn't find a matching side exit, so create a new one. */
2380 LIns* data = lir->insSkip(sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(uint8));
2381 VMSideExit* exit = (VMSideExit*) data->payload();
2383 /* Setup side exit structure. */
2384 memset(exit, 0, sizeof(VMSideExit));
2385 exit->from = fragment;
2386 exit->calldepth = callDepth;
2387 exit->numGlobalSlots = ngslots;
2388 exit->numStackSlots = stackSlots;
2389 exit->numStackSlotsBelowCurrentFrame = cx->fp->callee
2390 ? nativeStackOffset(&cx->fp->argv[-2])/sizeof(double)
2391 : 0;
2392 exit->exitType = exitType;
2393 exit->block = fp->blockChain;
2394 exit->pc = pc;
2395 exit->imacpc = fp->imacpc;
2396 exit->sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase;
2397 exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
2398 exit->nativeCalleeWord = 0;
2399 memcpy(getFullTypeMap(exit), typemap, typemap_size);
2400 return exit;
2403 JS_REQUIRES_STACK LIns*
2404 TraceRecorder::createGuardRecord(VMSideExit* exit)
2406 LIns* guardRec = lir->insSkip(sizeof(GuardRecord));
2407 GuardRecord* gr = (GuardRecord*) guardRec->payload();
2409 memset(gr, 0, sizeof(GuardRecord));
2410 gr->exit = exit;
2411 exit->addGuard(gr);
2413 return guardRec;
2417 * Emit a guard for condition (cond), expecting to evaluate to boolean result
2418 * (expected) and using the supplied side exit if the conditon doesn't hold.
2420 JS_REQUIRES_STACK void
2421 TraceRecorder::guard(bool expected, LIns* cond, VMSideExit* exit)
2423 LIns* guardRec = createGuardRecord(exit);
2426 * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
2427 * safe to keep references to the side exits here. If we ever start
2428 * rewinding those lirbufs, we have to make sure we purge the side exits
2429 * that then no longer will be in valid memory.
2431 if (exit->exitType == LOOP_EXIT)
2432 treeInfo->sideExits.add(exit);
2434 if (!cond->isCond()) {
2435 expected = !expected;
2436 cond = lir->ins_eq0(cond);
2439 LIns* guardIns =
2440 lir->insGuard(expected ? LIR_xf : LIR_xt, cond, guardRec);
2441 if (guardIns) {
2442 debug_only_v(printf(" SideExit=%p exitType=%d\n", (void*)exit, exit->exitType);)
2443 } else {
2444 debug_only_v(printf(" redundant guard, eliminated\n");)
2448 JS_REQUIRES_STACK VMSideExit*
2449 TraceRecorder::copy(VMSideExit* copy)
2451 size_t typemap_size = copy->numGlobalSlots + copy->numStackSlots;
2452 LIns* data = lir->insSkip(sizeof(VMSideExit) + typemap_size * sizeof(uint8));
2453 VMSideExit* exit = (VMSideExit*) data->payload();
2455 /* Copy side exit structure. */
2456 memcpy(exit, copy, sizeof(VMSideExit) + typemap_size * sizeof(uint8));
2457 exit->guards = NULL;
2458 exit->from = fragment;
2459 exit->target = NULL;
2462 * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
2463 * safe to keep references to the side exits here. If we ever start
2464 * rewinding those lirbufs, we have to make sure we purge the side exits
2465 * that then no longer will be in valid memory.
2467 if (exit->exitType == LOOP_EXIT)
2468 treeInfo->sideExits.add(exit);
2469 return exit;
2472 /* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected)
2473 and generate a side exit with type exitType to jump to if the condition does not hold. */
2474 JS_REQUIRES_STACK void
2475 TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType)
2477 guard(expected, cond, snapshot(exitType));
2480 /* Try to match the type of a slot to type t. checkType is used to verify that the type of
2481 * values flowing into the loop edge is compatible with the type we expect in the loop header.
2483 * @param v Value.
2484 * @param t Typemap entry for value.
2485 * @param stage_val Outparam for set() address.
2486 * @param stage_ins Outparam for set() instruction.
2487 * @param stage_count Outparam for set() buffer count.
2488 * @return True if types are compatible, false otherwise.
2490 JS_REQUIRES_STACK bool
2491 TraceRecorder::checkType(jsval& v, uint8 t, jsval*& stage_val, LIns*& stage_ins,
2492 unsigned& stage_count)
2494 if (t == JSVAL_INT) { /* initially all whole numbers cause the slot to be demoted */
2495 debug_only_v(printf("checkType(tag=1, t=%d, isnum=%d, i2f=%d) stage_count=%d\n",
2497 isNumber(v),
2498 isPromoteInt(get(&v)),
2499 stage_count);)
2500 if (!isNumber(v))
2501 return false; /* not a number? type mismatch */
2502 LIns* i = get(&v);
2503 /* This is always a type mismatch, we can't close a double to an int. */
2504 if (!isPromoteInt(i))
2505 return false;
2506 /* Looks good, slot is an int32, the last instruction should be promotable. */
2507 JS_ASSERT(isInt32(v) && isPromoteInt(i));
2508 /* Overwrite the value in this slot with the argument promoted back to an integer. */
2509 stage_val = &v;
2510 stage_ins = f2i(i);
2511 stage_count++;
2512 return true;
2514 if (t == JSVAL_DOUBLE) {
2515 debug_only_v(printf("checkType(tag=2, t=%d, isnum=%d, promote=%d) stage_count=%d\n",
2517 isNumber(v),
2518 isPromoteInt(get(&v)),
2519 stage_count);)
2520 if (!isNumber(v))
2521 return false; /* not a number? type mismatch */
2522 LIns* i = get(&v);
2523 /* We sink i2f conversions into the side exit, but at the loop edge we have to make
2524 sure we promote back to double if at loop entry we want a double. */
2525 if (isPromoteInt(i)) {
2526 stage_val = &v;
2527 stage_ins = lir->ins1(LIR_i2f, i);
2528 stage_count++;
2530 return true;
2532 if (t == JSVAL_TNULL)
2533 return JSVAL_IS_NULL(v);
2534 if (t == JSVAL_TFUN)
2535 return !JSVAL_IS_PRIMITIVE(v) && HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v));
2536 if (t == JSVAL_OBJECT)
2537 return !JSVAL_IS_PRIMITIVE(v) && !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v));
2539 /* for non-number types we expect a precise match of the type */
2540 uint8 vt = getCoercedType(v);
2541 #ifdef DEBUG
2542 if (vt != t) {
2543 debug_only_v(printf("Type mismatch: val %c, map %c ", typeChar[vt],
2544 typeChar[t]);)
2546 #endif
2547 debug_only_v(printf("checkType(vt=%d, t=%d) stage_count=%d\n",
2548 (int) vt, t, stage_count);)
2549 return vt == t;
2553 * Make sure that the current values in the given stack frame and all stack frames
2554 * up and including entryFrame are type-compatible with the entry map.
2556 * @param root_peer First fragment in peer list.
2557 * @param stable_peer Outparam for first type stable peer.
2558 * @param demote True if stability was achieved through demotion.
2559 * @return True if type stable, false otherwise.
2561 JS_REQUIRES_STACK bool
2562 TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer, bool& demote)
2564 uint8* m;
2565 uint8* typemap;
2566 unsigned ngslots = treeInfo->globalSlots->length();
2567 uint16* gslots = treeInfo->globalSlots->data();
2568 JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2570 if (stable_peer)
2571 *stable_peer = NULL;
2574 * Rather than calculate all of this stuff twice, it gets cached locally. The "stage" buffers
2575 * are for calls to set() that will change the exit types.
2577 bool success;
2578 unsigned stage_count;
2579 jsval** stage_vals = (jsval**)alloca(sizeof(jsval*) * (treeInfo->typeMap.length()));
2580 LIns** stage_ins = (LIns**)alloca(sizeof(LIns*) * (treeInfo->typeMap.length()));
2582 /* First run through and see if we can close ourselves - best case! */
2583 stage_count = 0;
2584 success = false;
2586 debug_only_v(printf("Checking type stability against self=%p\n", (void*)fragment);)
2588 m = typemap = treeInfo->globalTypeMap();
2589 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
2590 debug_only_v(printf("%s%d ", vpname, vpnum);)
2591 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {
2592 /* If the failure was an int->double, tell the oracle. */
2593 if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) {
2594 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
2595 demote = true;
2596 } else {
2597 goto checktype_fail_1;
2600 ++m;
2602 m = typemap = treeInfo->stackTypeMap();
2603 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2604 debug_only_v(printf("%s%d ", vpname, vpnum);)
2605 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {
2606 if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) {
2607 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
2608 demote = true;
2609 } else {
2610 goto checktype_fail_1;
2613 ++m;
2616 success = true;
2618 checktype_fail_1:
2619 /* If we got a success and we don't need to recompile, we should just close here. */
2620 if (success && !demote) {
2621 for (unsigned i = 0; i < stage_count; i++)
2622 set(stage_vals[i], stage_ins[i]);
2623 return true;
2624 /* If we need to trash, don't bother checking peers. */
2625 } else if (trashSelf) {
2626 return false;
2629 demote = false;
2631 /* At this point the tree is about to be incomplete, so let's see if we can connect to any
2632 * peer fragment that is type stable.
2634 Fragment* f;
2635 TreeInfo* ti;
2636 for (f = root_peer; f != NULL; f = f->peer) {
2637 debug_only_v(printf("Checking type stability against peer=%p (code=%p)\n", (void*)f, f->code());)
2638 if (!f->code())
2639 continue;
2640 ti = (TreeInfo*)f->vmprivate;
2641 /* Don't allow varying stack depths */
2642 if ((ti->nStackTypes != treeInfo->nStackTypes) ||
2643 (ti->typeMap.length() != treeInfo->typeMap.length()) ||
2644 (ti->globalSlots->length() != treeInfo->globalSlots->length()))
2645 continue;
2646 stage_count = 0;
2647 success = false;
2649 m = ti->globalTypeMap();
2650 FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
2651 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
2652 goto checktype_fail_2;
2653 ++m;
2656 m = ti->stackTypeMap();
2657 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2658 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
2659 goto checktype_fail_2;
2660 ++m;
2663 success = true;
2665 checktype_fail_2:
2666 if (success) {
2668 * There was a successful match. We don't care about restoring the saved staging, but
2669 * we do need to clear the original undemote list.
2671 for (unsigned i = 0; i < stage_count; i++)
2672 set(stage_vals[i], stage_ins[i]);
2673 if (stable_peer)
2674 *stable_peer = f;
2675 demote = false;
2676 return false;
2681 * If this is a loop trace and it would be stable with demotions, build an undemote list
2682 * and return true. Our caller should sniff this and trash the tree, recording a new one
2683 * that will assumedly stabilize.
2685 if (demote && fragment->kind == LoopTrace) {
2686 typemap = m = treeInfo->globalTypeMap();
2687 FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
2688 if (*m == JSVAL_INT) {
2689 JS_ASSERT(isNumber(*vp));
2690 if (!isPromoteInt(get(vp)))
2691 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
2692 } else if (*m == JSVAL_DOUBLE) {
2693 JS_ASSERT(isNumber(*vp));
2694 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
2695 } else {
2696 JS_ASSERT(*m == JSVAL_TAG(*vp));
2698 m++;
2701 typemap = m = treeInfo->stackTypeMap();
2702 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2703 if (*m == JSVAL_INT) {
2704 JS_ASSERT(isNumber(*vp));
2705 if (!isPromoteInt(get(vp)))
2706 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
2707 } else if (*m == JSVAL_DOUBLE) {
2708 JS_ASSERT(isNumber(*vp));
2709 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
2710 } else {
2711 JS_ASSERT((*m == JSVAL_TNULL)
2712 ? JSVAL_IS_NULL(*vp)
2713 : *m == JSVAL_TFUN
2714 ? !JSVAL_IS_PRIMITIVE(*vp) && HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))
2715 : *m == JSVAL_TAG(*vp));
2717 m++;
2719 return true;
2720 } else {
2721 demote = false;
2724 return false;
2727 static JS_REQUIRES_STACK void
2728 FlushJITCache(JSContext* cx)
2730 if (!TRACING_ENABLED(cx))
2731 return;
2732 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
2733 debug_only_v(printf("Flushing cache.\n");)
2734 if (tm->recorder)
2735 js_AbortRecording(cx, "flush cache");
2736 TraceRecorder* tr;
2737 while ((tr = tm->abortStack) != NULL) {
2738 tr->removeFragmentoReferences();
2739 tr->deepAbort();
2740 tr->popAbortStack();
2742 Fragmento* fragmento = tm->fragmento;
2743 if (fragmento) {
2744 if (tm->prohibitFlush) {
2745 debug_only_v(printf("Deferring fragmento flush due to deep bail.\n");)
2746 tm->needFlush = JS_TRUE;
2747 return;
2750 fragmento->clearFrags();
2751 #ifdef DEBUG
2752 JS_ASSERT(fragmento->labels);
2753 fragmento->labels->clear();
2754 #endif
2755 tm->lirbuf->rewind();
2756 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
2757 VMFragment* f = tm->vmfragments[i];
2758 while (f) {
2759 VMFragment* next = f->next;
2760 fragmento->clearFragment(f);
2761 f = next;
2763 tm->vmfragments[i] = NULL;
2765 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
2766 tm->globalStates[i].globalShape = -1;
2767 tm->globalStates[i].globalSlots->clear();
2770 tm->needFlush = JS_FALSE;
2773 /* Compile the current fragment. */
2774 JS_REQUIRES_STACK void
2775 TraceRecorder::compile(JSTraceMonitor* tm)
2777 if (tm->needFlush) {
2778 FlushJITCache(cx);
2779 return;
2781 Fragmento* fragmento = tm->fragmento;
2782 if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
2783 debug_only_v(printf("Blacklist: excessive stack use.\n"));
2784 js_Blacklist((jsbytecode*) fragment->root->ip);
2785 return;
2787 if (anchor && anchor->exitType != CASE_EXIT)
2788 ++treeInfo->branchCount;
2789 if (lirbuf->outOMem()) {
2790 fragmento->assm()->setError(nanojit::OutOMem);
2791 return;
2793 ::compile(fragmento->assm(), fragment);
2794 if (fragmento->assm()->error() == nanojit::OutOMem)
2795 return;
2796 if (fragmento->assm()->error() != nanojit::None) {
2797 debug_only_v(printf("Blacklisted: error during compilation\n");)
2798 js_Blacklist((jsbytecode*) fragment->root->ip);
2799 return;
2801 js_resetRecordingAttempts(cx, (jsbytecode*) fragment->ip);
2802 js_resetRecordingAttempts(cx, (jsbytecode*) fragment->root->ip);
2803 if (anchor) {
2804 #ifdef NANOJIT_IA32
2805 if (anchor->exitType == CASE_EXIT)
2806 fragmento->assm()->patch(anchor, anchor->switchInfo);
2807 else
2808 #endif
2809 fragmento->assm()->patch(anchor);
2811 JS_ASSERT(fragment->code());
2812 JS_ASSERT(!fragment->vmprivate);
2813 if (fragment == fragment->root)
2814 fragment->vmprivate = treeInfo;
2815 /* :TODO: windows support */
2816 #if defined DEBUG && !defined WIN32
2817 const char* filename = cx->fp->script->filename;
2818 char* label = (char*)malloc((filename ? strlen(filename) : 7) + 16);
2819 sprintf(label, "%s:%u", filename ? filename : "<stdin>",
2820 js_FramePCToLineNumber(cx, cx->fp));
2821 fragmento->labels->add(fragment, sizeof(Fragment), 0, label);
2822 free(label);
2823 #endif
2824 AUDIT(traceCompleted);
2827 static bool
2828 js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stableTree,
2829 VMSideExit* exit)
2831 JS_ASSERT(exit->numStackSlots == stableTree->nStackTypes);
2833 /* Must have a matching type unstable exit. */
2834 if ((exit->numGlobalSlots + exit->numStackSlots != stableTree->typeMap.length()) ||
2835 memcmp(getFullTypeMap(exit), stableTree->typeMap.data(), stableTree->typeMap.length())) {
2836 return false;
2839 exit->target = stableFrag;
2840 frago->assm()->patch(exit);
2842 stableTree->dependentTrees.addUnique(exit->from->root);
2843 ((TreeInfo*)exit->from->root->vmprivate)->linkedTrees.addUnique(stableFrag);
2845 return true;
2848 /* Complete and compile a trace and link it to the existing tree if appropriate. */
2849 JS_REQUIRES_STACK void
2850 TraceRecorder::closeLoop(JSTraceMonitor* tm, bool& demote)
2853 * We should have arrived back at the loop header, and hence we don't want to be in an imacro
2854 * here and the opcode should be either JSOP_LOOP, or in case this loop was blacklisted in the
2855 * meantime JSOP_NOP.
2857 JS_ASSERT((*cx->fp->regs->pc == JSOP_LOOP || *cx->fp->regs->pc == JSOP_NOP) && !cx->fp->imacpc);
2859 bool stable;
2860 Fragment* peer;
2861 VMFragment* peer_root;
2862 Fragmento* fragmento = tm->fragmento;
2864 if (callDepth != 0) {
2865 debug_only_v(printf("Blacklisted: stack depth mismatch, possible recursion.\n");)
2866 js_Blacklist((jsbytecode*) fragment->root->ip);
2867 trashSelf = true;
2868 return;
2871 VMSideExit* exit = snapshot(UNSTABLE_LOOP_EXIT);
2872 JS_ASSERT(exit->numStackSlots == treeInfo->nStackTypes);
2874 VMFragment* root = (VMFragment*)fragment->root;
2875 peer_root = getLoop(traceMonitor, root->ip, root->globalObj, root->globalShape, root->argc);
2876 JS_ASSERT(peer_root != NULL);
2878 stable = deduceTypeStability(peer_root, &peer, demote);
2880 #if DEBUG
2881 if (!stable)
2882 AUDIT(unstableLoopVariable);
2883 #endif
2885 if (trashSelf) {
2886 debug_only_v(printf("Trashing tree from type instability.\n");)
2887 return;
2890 if (stable && demote) {
2891 JS_ASSERT(fragment->kind == LoopTrace);
2892 return;
2895 if (!stable) {
2896 fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(exit));
2899 * If we didn't find a type stable peer, we compile the loop anyway and
2900 * hope it becomes stable later.
2902 if (!peer) {
2904 * If such a fragment does not exist, let's compile the loop ahead
2905 * of time anyway. Later, if the loop becomes type stable, we will
2906 * connect these two fragments together.
2908 debug_only_v(printf("Trace has unstable loop variable with no stable peer, "
2909 "compiling anyway.\n");)
2910 UnstableExit* uexit = new UnstableExit;
2911 uexit->fragment = fragment;
2912 uexit->exit = exit;
2913 uexit->next = treeInfo->unstableExits;
2914 treeInfo->unstableExits = uexit;
2915 } else {
2916 JS_ASSERT(peer->code());
2917 exit->target = peer;
2918 debug_only_v(printf("Joining type-unstable trace to target fragment %p.\n", (void*)peer);)
2919 stable = true;
2920 ((TreeInfo*)peer->vmprivate)->dependentTrees.addUnique(fragment->root);
2921 treeInfo->linkedTrees.addUnique(peer);
2923 } else {
2924 exit->target = fragment->root;
2925 fragment->lastIns = lir->insGuard(LIR_loop, lir->insImm(1), createGuardRecord(exit));
2927 compile(tm);
2929 if (fragmento->assm()->error() != nanojit::None)
2930 return;
2932 joinEdgesToEntry(fragmento, peer_root);
2934 debug_only_v(printf("updating specializations on dependent and linked trees\n"))
2935 if (fragment->root->vmprivate)
2936 specializeTreesToMissingGlobals(cx, (TreeInfo*)fragment->root->vmprivate);
2939 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
2940 * should try to compile the outer tree again.
2942 if (outer)
2943 js_AttemptCompilation(cx, tm, globalObj, outer, outerArgc);
2945 debug_only_v(printf("recording completed at %s:%u@%u via closeLoop\n",
2946 cx->fp->script->filename,
2947 js_FramePCToLineNumber(cx, cx->fp),
2948 FramePCOffset(cx->fp));)
2951 JS_REQUIRES_STACK void
2952 TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, VMFragment* peer_root)
2954 if (fragment->kind == LoopTrace) {
2955 TreeInfo* ti;
2956 Fragment* peer;
2957 uint8* t1, *t2;
2958 UnstableExit* uexit, **unext;
2959 uint32* stackDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->nStackTypes);
2960 uint32* globalDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->nGlobalTypes());
2962 for (peer = peer_root; peer != NULL; peer = peer->peer) {
2963 if (!peer->code())
2964 continue;
2965 ti = (TreeInfo*)peer->vmprivate;
2966 uexit = ti->unstableExits;
2967 unext = &ti->unstableExits;
2968 while (uexit != NULL) {
2969 bool remove = js_JoinPeersIfCompatible(fragmento, fragment, treeInfo, uexit->exit);
2970 JS_ASSERT(!remove || fragment != peer);
2971 debug_only_v(if (remove) {
2972 printf("Joining type-stable trace to target exit %p->%p.\n",
2973 (void*)uexit->fragment, (void*)uexit->exit); });
2974 if (!remove) {
2975 /* See if this exit contains mismatch demotions, which imply trashing a tree.
2976 This is actually faster than trashing the original tree as soon as the
2977 instability is detected, since we could have compiled a fairly stable
2978 tree that ran faster with integers. */
2979 unsigned stackCount = 0;
2980 unsigned globalCount = 0;
2981 t1 = treeInfo->stackTypeMap();
2982 t2 = getStackTypeMap(uexit->exit);
2983 for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) {
2984 if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) {
2985 stackDemotes[stackCount++] = i;
2986 } else if (t2[i] != t1[i]) {
2987 stackCount = 0;
2988 break;
2991 t1 = treeInfo->globalTypeMap();
2992 t2 = getGlobalTypeMap(uexit->exit);
2993 for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) {
2994 if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) {
2995 globalDemotes[globalCount++] = i;
2996 } else if (t2[i] != t1[i]) {
2997 globalCount = 0;
2998 stackCount = 0;
2999 break;
3002 if (stackCount || globalCount) {
3003 for (unsigned i = 0; i < stackCount; i++)
3004 oracle.markStackSlotUndemotable(cx, stackDemotes[i]);
3005 for (unsigned i = 0; i < globalCount; i++)
3006 oracle.markGlobalSlotUndemotable(cx, ti->globalSlots->data()[globalDemotes[i]]);
3007 JS_ASSERT(peer == uexit->fragment->root);
3008 if (fragment == peer)
3009 trashSelf = true;
3010 else
3011 whichTreesToTrash.addUnique(uexit->fragment->root);
3012 break;
3015 if (remove) {
3016 *unext = uexit->next;
3017 delete uexit;
3018 uexit = *unext;
3019 } else {
3020 unext = &uexit->next;
3021 uexit = uexit->next;
3027 debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip, peer_root->globalObj,
3028 peer_root->globalShape, peer_root->argc);)
3031 /* Emit an always-exit guard and compile the tree (used for break statements. */
3032 JS_REQUIRES_STACK void
3033 TraceRecorder::endLoop(JSTraceMonitor* tm)
3035 if (callDepth != 0) {
3036 debug_only_v(printf("Blacklisted: stack depth mismatch, possible recursion.\n");)
3037 js_Blacklist((jsbytecode*) fragment->root->ip);
3038 trashSelf = true;
3039 return;
3042 fragment->lastIns =
3043 lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(snapshot(LOOP_EXIT)));
3044 compile(tm);
3046 if (tm->fragmento->assm()->error() != nanojit::None)
3047 return;
3049 VMFragment* root = (VMFragment*)fragment->root;
3050 joinEdgesToEntry(tm->fragmento, getLoop(tm, root->ip, root->globalObj, root->globalShape, root->argc));
3052 /* Note: this must always be done, in case we added new globals on trace and haven't yet
3053 propagated those to linked and dependent trees. */
3054 debug_only_v(printf("updating specializations on dependent and linked trees\n"))
3055 if (fragment->root->vmprivate)
3056 specializeTreesToMissingGlobals(cx, (TreeInfo*)fragment->root->vmprivate);
3059 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
3060 * should try to compile the outer tree again.
3062 if (outer)
3063 js_AttemptCompilation(cx, tm, globalObj, outer, outerArgc);
3065 debug_only_v(printf("recording completed at %s:%u@%u via endLoop\n",
3066 cx->fp->script->filename,
3067 js_FramePCToLineNumber(cx, cx->fp),
3068 FramePCOffset(cx->fp));)
3071 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
3072 JS_REQUIRES_STACK void
3073 TraceRecorder::prepareTreeCall(Fragment* inner)
3075 TreeInfo* ti = (TreeInfo*)inner->vmprivate;
3076 inner_sp_ins = lirbuf->sp;
3077 /* The inner tree expects to be called from the current frame. If the outer tree (this
3078 trace) is currently inside a function inlining code (calldepth > 0), we have to advance
3079 the native stack pointer such that we match what the inner trace expects to see. We
3080 move it back when we come out of the inner tree call. */
3081 if (callDepth > 0) {
3082 /* Calculate the amount we have to lift the native stack pointer by to compensate for
3083 any outer frames that the inner tree doesn't expect but the outer tree has. */
3084 ptrdiff_t sp_adj = nativeStackOffset(&cx->fp->argv[-2]);
3085 /* Calculate the amount we have to lift the call stack by */
3086 ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo*);
3087 /* Guard that we have enough stack space for the tree we are trying to call on top
3088 of the new value for sp. */
3089 debug_only_v(printf("sp_adj=%d outer=%d inner=%d\n",
3090 sp_adj, treeInfo->nativeStackBase, ti->nativeStackBase));
3091 LIns* sp_top = lir->ins2i(LIR_piadd, lirbuf->sp,
3092 - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
3093 + sp_adj /* adjust for stack in outer frame inner tree can't see */
3094 + ti->maxNativeStackSlots * sizeof(double)); /* plus the inner tree's stack */
3095 guard(true, lir->ins2(LIR_lt, sp_top, eos_ins), OOM_EXIT);
3096 /* Guard that we have enough call stack space. */
3097 LIns* rp_top = lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj +
3098 ti->maxCallDepth * sizeof(FrameInfo*));
3099 guard(true, lir->ins2(LIR_lt, rp_top, eor_ins), OOM_EXIT);
3100 /* We have enough space, so adjust sp and rp to their new level. */
3101 lir->insStorei(inner_sp_ins = lir->ins2i(LIR_piadd, lirbuf->sp,
3102 - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
3103 + sp_adj /* adjust for stack in outer frame inner tree can't see */
3104 + ti->nativeStackBase), /* plus the inner tree's stack base */
3105 lirbuf->state, offsetof(InterpState, sp));
3106 lir->insStorei(lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj),
3107 lirbuf->state, offsetof(InterpState, rp));
3111 /* Record a call to an inner tree. */
3112 JS_REQUIRES_STACK void
3113 TraceRecorder::emitTreeCall(Fragment* inner, VMSideExit* exit)
3115 TreeInfo* ti = (TreeInfo*)inner->vmprivate;
3117 /* Invoke the inner tree. */
3118 LIns* args[] = { INS_CONSTPTR(inner), lirbuf->state }; /* reverse order */
3119 LIns* ret = lir->insCall(&js_CallTree_ci, args);
3121 /* Read back all registers, in case the called tree changed any of them. */
3122 JS_ASSERT(!memchr(getGlobalTypeMap(exit), JSVAL_BOXED, exit->numGlobalSlots) &&
3123 !memchr(getStackTypeMap(exit), JSVAL_BOXED, exit->numStackSlots));
3124 import(ti, inner_sp_ins, exit->numStackSlots, exit->numGlobalSlots,
3125 exit->calldepth, getFullTypeMap(exit));
3127 /* Restore sp and rp to their original values (we still have them in a register). */
3128 if (callDepth > 0) {
3129 lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
3130 lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
3134 * Guard that we come out of the inner tree along the same side exit we came out when
3135 * we called the inner tree at recording time.
3137 guard(true, lir->ins2(LIR_eq, ret, INS_CONSTPTR(exit)), NESTED_EXIT);
3138 /* Register us as a dependent tree of the inner tree. */
3139 ((TreeInfo*)inner->vmprivate)->dependentTrees.addUnique(fragment->root);
3140 treeInfo->linkedTrees.addUnique(inner);
3143 /* Add a if/if-else control-flow merge point to the list of known merge points. */
3144 JS_REQUIRES_STACK void
3145 TraceRecorder::trackCfgMerges(jsbytecode* pc)
3147 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
3148 JS_ASSERT((*pc == JSOP_IFEQ) || (*pc == JSOP_IFEQX));
3149 jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc);
3150 if (sn != NULL) {
3151 if (SN_TYPE(sn) == SRC_IF) {
3152 cfgMerges.add((*pc == JSOP_IFEQ)
3153 ? pc + GET_JUMP_OFFSET(pc)
3154 : pc + GET_JUMPX_OFFSET(pc));
3155 } else if (SN_TYPE(sn) == SRC_IF_ELSE)
3156 cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));
3160 /* Invert the direction of the guard if this is a loop edge that is not
3161 taken (thin loop). */
3162 JS_REQUIRES_STACK void
3163 TraceRecorder::emitIf(jsbytecode* pc, bool cond, LIns* x)
3165 ExitType exitType;
3166 if (js_IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
3167 exitType = LOOP_EXIT;
3170 * If we are about to walk out of the loop, generate code for the inverse loop
3171 * condition, pretending we recorded the case that stays on trace.
3173 if ((*pc == JSOP_IFEQ || *pc == JSOP_IFEQX) == cond) {
3174 JS_ASSERT(*pc == JSOP_IFNE || *pc == JSOP_IFNEX || *pc == JSOP_IFEQ || *pc == JSOP_IFEQX);
3175 debug_only_v(printf("Walking out of the loop, terminating it anyway.\n");)
3176 cond = !cond;
3180 * Conditional guards do not have to be emitted if the condition is constant. We
3181 * make a note whether the loop condition is true or false here, so we later know
3182 * whether to emit a loop edge or a loop end.
3184 if (x->isconst()) {
3185 loop = (x->imm32() == cond);
3186 return;
3188 } else {
3189 exitType = BRANCH_EXIT;
3191 if (!x->isconst())
3192 guard(cond, x, exitType);
3195 /* Emit code for a fused IFEQ/IFNE. */
3196 JS_REQUIRES_STACK void
3197 TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
3199 if (*pc == JSOP_IFEQ || *pc == JSOP_IFNE) {
3200 emitIf(pc, cond, x);
3201 if (*pc == JSOP_IFEQ)
3202 trackCfgMerges(pc);
3206 /* Check whether we have reached the end of the trace. */
3207 JS_REQUIRES_STACK JSRecordingStatus
3208 TraceRecorder::checkTraceEnd(jsbytecode *pc)
3210 if (js_IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
3212 * If we compile a loop, the trace should have a zero stack balance at the loop
3213 * edge. Currently we are parked on a comparison op or IFNE/IFEQ, so advance
3214 * pc to the loop header and adjust the stack pointer and pretend we have
3215 * reached the loop header.
3217 if (loop) {
3218 JS_ASSERT(!cx->fp->imacpc && (pc == cx->fp->regs->pc || pc == cx->fp->regs->pc + 1));
3219 bool fused = pc != cx->fp->regs->pc;
3220 JSFrameRegs orig = *cx->fp->regs;
3222 cx->fp->regs->pc = (jsbytecode*)fragment->root->ip;
3223 cx->fp->regs->sp -= fused ? 2 : 1;
3225 bool demote = false;
3226 closeLoop(traceMonitor, demote);
3228 *cx->fp->regs = orig;
3231 * If compiling this loop generated new oracle information which will likely
3232 * lead to a different compilation result, immediately trigger another
3233 * compiler run. This is guaranteed to converge since the oracle only
3234 * accumulates adverse information but never drops it (except when we
3235 * flush it during garbage collection.)
3237 if (demote)
3238 js_AttemptCompilation(cx, traceMonitor, globalObj, outer, outerArgc);
3239 } else {
3240 endLoop(traceMonitor);
3242 return JSRS_STOP;
3244 return JSRS_CONTINUE;
3247 bool
3248 TraceRecorder::hasMethod(JSObject* obj, jsid id)
3250 if (!obj)
3251 return false;
3253 JSObject* pobj;
3254 JSProperty* prop;
3255 int protoIndex = OBJ_LOOKUP_PROPERTY(cx, obj, id, &pobj, &prop);
3256 if (protoIndex < 0 || !prop)
3257 return false;
3259 bool found = false;
3260 if (OBJ_IS_NATIVE(pobj)) {
3261 JSScope* scope = OBJ_SCOPE(pobj);
3262 JSScopeProperty* sprop = (JSScopeProperty*) prop;
3264 if (SPROP_HAS_STUB_GETTER(sprop) &&
3265 SPROP_HAS_VALID_SLOT(sprop, scope)) {
3266 jsval v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot);
3267 if (VALUE_IS_FUNCTION(cx, v)) {
3268 found = true;
3269 if (!SCOPE_IS_BRANDED(scope)) {
3270 js_MakeScopeShapeUnique(cx, scope);
3271 SCOPE_SET_BRANDED(scope);
3277 OBJ_DROP_PROPERTY(cx, pobj, prop);
3278 return found;
3281 JS_REQUIRES_STACK bool
3282 TraceRecorder::hasIteratorMethod(JSObject* obj)
3284 JS_ASSERT(cx->fp->regs->sp + 2 <= cx->fp->slots + cx->fp->script->nslots);
3286 return hasMethod(obj, ATOM_TO_JSID(cx->runtime->atomState.iteratorAtom));
3290 nanojit::StackFilter::getTop(LInsp guard)
3292 VMSideExit* e = (VMSideExit*)guard->record()->exit;
3293 if (sp == lirbuf->sp)
3294 return e->sp_adj;
3295 JS_ASSERT(sp == lirbuf->rp);
3296 return e->rp_adj;
3299 #if defined NJ_VERBOSE
3300 void
3301 nanojit::LirNameMap::formatGuard(LIns *i, char *out)
3303 VMSideExit *x;
3305 x = (VMSideExit *)i->record()->exit;
3306 sprintf(out,
3307 "%s: %s %s -> pc=%p imacpc=%p sp%+ld rp%+ld",
3308 formatRef(i),
3309 lirNames[i->opcode()],
3310 i->oprnd1()->isCond() ? formatRef(i->oprnd1()) : "",
3311 (void *)x->pc,
3312 (void *)x->imacpc,
3313 (long int)x->sp_adj,
3314 (long int)x->rp_adj);
3316 #endif
3318 void
3319 nanojit::Fragment::onDestroy()
3321 delete (TreeInfo *)vmprivate;
3324 static JS_REQUIRES_STACK bool
3325 js_DeleteRecorder(JSContext* cx)
3327 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3329 /* Aborting and completing a trace end up here. */
3330 delete tm->recorder;
3331 tm->recorder = NULL;
3334 * If we ran out of memory, flush the code cache.
3336 if (JS_TRACE_MONITOR(cx).fragmento->assm()->error() == OutOMem ||
3337 js_OverfullFragmento(tm, tm->fragmento)) {
3338 FlushJITCache(cx);
3339 return false;
3342 return true;
3346 * Checks whether the shape of the global object has changed.
3348 static JS_REQUIRES_STACK bool
3349 CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj,
3350 uint32 *shape=NULL, SlotList** slots=NULL)
3352 if (tm->needFlush) {
3353 FlushJITCache(cx);
3354 return false;
3357 if (STOBJ_NSLOTS(globalObj) > MAX_GLOBAL_SLOTS)
3358 return false;
3360 uint32 globalShape = OBJ_SHAPE(globalObj);
3362 if (tm->recorder) {
3363 VMFragment* root = (VMFragment*)tm->recorder->getFragment()->root;
3364 TreeInfo* ti = tm->recorder->getTreeInfo();
3365 /* Check the global shape matches the recorder's treeinfo's shape. */
3366 if (globalObj != root->globalObj || globalShape != root->globalShape) {
3367 AUDIT(globalShapeMismatchAtEntry);
3368 debug_only_v(printf("Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
3369 (void*)globalObj, globalShape, (void*)root->globalObj,
3370 root->globalShape);)
3371 js_Backoff(cx, (jsbytecode*) root->ip);
3372 FlushJITCache(cx);
3373 return false;
3375 if (shape)
3376 *shape = globalShape;
3377 if (slots)
3378 *slots = ti->globalSlots;
3379 return true;
3382 /* No recorder, search for a tracked global-state (or allocate one). */
3383 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
3384 GlobalState &state = tm->globalStates[i];
3386 if (state.globalShape == uint32(-1)) {
3387 state.globalObj = globalObj;
3388 state.globalShape = globalShape;
3389 JS_ASSERT(state.globalSlots);
3390 JS_ASSERT(state.globalSlots->length() == 0);
3393 if (state.globalObj == globalObj && state.globalShape == globalShape) {
3394 if (shape)
3395 *shape = globalShape;
3396 if (slots)
3397 *slots = state.globalSlots;
3398 return true;
3402 /* No currently-tracked-global found and no room to allocate, abort. */
3403 AUDIT(globalShapeMismatchAtEntry);
3404 debug_only_v(printf("No global slotlist for global shape %u, flushing cache.\n",
3405 globalShape));
3406 FlushJITCache(cx);
3407 return false;
3410 static JS_REQUIRES_STACK bool
3411 js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
3412 unsigned stackSlots, unsigned ngslots, uint8* typeMap,
3413 VMSideExit* expectedInnerExit, jsbytecode* outer, uint32 outerArgc)
3415 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3416 if (JS_TRACE_MONITOR(cx).needFlush) {
3417 FlushJITCache(cx);
3418 return false;
3421 JS_ASSERT(f->root != f || !cx->fp->imacpc);
3423 /* start recording if no exception during construction */
3424 tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
3425 stackSlots, ngslots, typeMap,
3426 expectedInnerExit, outer, outerArgc);
3428 if (cx->throwing) {
3429 js_AbortRecording(cx, "setting up recorder failed");
3430 return false;
3432 /* clear any leftover error state */
3433 tm->fragmento->assm()->setError(None);
3434 return true;
3437 static void
3438 js_TrashTree(JSContext* cx, Fragment* f)
3440 JS_ASSERT((!f->code()) == (!f->vmprivate));
3441 JS_ASSERT(f == f->root);
3442 if (!f->code())
3443 return;
3444 AUDIT(treesTrashed);
3445 debug_only_v(printf("Trashing tree info.\n");)
3446 Fragmento* fragmento = JS_TRACE_MONITOR(cx).fragmento;
3447 TreeInfo* ti = (TreeInfo*)f->vmprivate;
3448 f->vmprivate = NULL;
3449 f->releaseCode(fragmento);
3450 Fragment** data = ti->dependentTrees.data();
3451 unsigned length = ti->dependentTrees.length();
3452 for (unsigned n = 0; n < length; ++n)
3453 js_TrashTree(cx, data[n]);
3454 delete ti;
3455 JS_ASSERT(!f->code() && !f->vmprivate);
3458 static int
3459 js_SynthesizeFrame(JSContext* cx, const FrameInfo& fi)
3461 VOUCH_DOES_NOT_REQUIRE_STACK();
3463 JS_ASSERT(HAS_FUNCTION_CLASS(fi.callee));
3465 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, fi.callee);
3466 JS_ASSERT(FUN_INTERPRETED(fun));
3468 /* Assert that we have a correct sp distance from cx->fp->slots in fi. */
3469 JSStackFrame* fp = cx->fp;
3470 JS_ASSERT_IF(!fi.imacpc,
3471 js_ReconstructStackDepth(cx, fp->script, fi.pc)
3472 == uintN(fi.s.spdist - fp->script->nfixed));
3474 uintN nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval));
3475 JSScript* script = fun->u.i.script;
3476 size_t nbytes = (nframeslots + script->nslots) * sizeof(jsval);
3478 /* Code duplicated from inline_call: case in js_Interpret (FIXME). */
3479 JSArena* a = cx->stackPool.current;
3480 void* newmark = (void*) a->avail;
3481 uintN argc = fi.s.argc & 0x7fff;
3482 jsval* vp = fp->slots + fi.s.spdist - (2 + argc);
3483 uintN missing = 0;
3484 jsval* newsp;
3486 if (fun->nargs > argc) {
3487 const JSFrameRegs& regs = *fp->regs;
3489 newsp = vp + 2 + fun->nargs;
3490 JS_ASSERT(newsp > regs.sp);
3491 if ((jsuword) newsp <= a->limit) {
3492 if ((jsuword) newsp > a->avail)
3493 a->avail = (jsuword) newsp;
3494 jsval* argsp = newsp;
3495 do {
3496 *--argsp = JSVAL_VOID;
3497 } while (argsp != regs.sp);
3498 missing = 0;
3499 } else {
3500 missing = fun->nargs - argc;
3501 nbytes += (2 + fun->nargs) * sizeof(jsval);
3505 /* Allocate the inline frame with its vars and operands. */
3506 if (a->avail + nbytes <= a->limit) {
3507 newsp = (jsval *) a->avail;
3508 a->avail += nbytes;
3509 JS_ASSERT(missing == 0);
3510 } else {
3511 /* This allocation is infallible: js_ExecuteTree reserved enough stack. */
3512 JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, nbytes);
3513 JS_ASSERT(newsp);
3516 * Move args if the missing ones overflow arena a, then push
3517 * undefined for the missing args.
3519 if (missing) {
3520 memcpy(newsp, vp, (2 + argc) * sizeof(jsval));
3521 vp = newsp;
3522 newsp = vp + 2 + argc;
3523 do {
3524 *newsp++ = JSVAL_VOID;
3525 } while (--missing != 0);
3529 /* Claim space for the stack frame and initialize it. */
3530 JSInlineFrame* newifp = (JSInlineFrame *) newsp;
3531 newsp += nframeslots;
3533 newifp->frame.callobj = NULL;
3534 newifp->frame.argsobj = NULL;
3535 newifp->frame.varobj = NULL;
3536 newifp->frame.script = script;
3537 newifp->frame.callee = fi.callee;
3538 newifp->frame.fun = fun;
3540 bool constructing = (fi.s.argc & 0x8000) != 0;
3541 newifp->frame.argc = argc;
3542 newifp->callerRegs.pc = fi.pc;
3543 newifp->callerRegs.sp = fp->slots + fi.s.spdist;
3544 fp->imacpc = fi.imacpc;
3546 #ifdef DEBUG
3547 if (fi.block != fp->blockChain) {
3548 for (JSObject* obj = fi.block; obj != fp->blockChain; obj = STOBJ_GET_PARENT(obj))
3549 JS_ASSERT(obj);
3551 #endif
3552 fp->blockChain = fi.block;
3554 newifp->frame.argv = newifp->callerRegs.sp - argc;
3555 JS_ASSERT(newifp->frame.argv);
3556 #ifdef DEBUG
3557 // Initialize argv[-1] to a known-bogus value so we'll catch it if
3558 // someone forgets to initialize it later.
3559 newifp->frame.argv[-1] = JSVAL_HOLE;
3560 #endif
3561 JS_ASSERT(newifp->frame.argv >= StackBase(fp) + 2);
3563 newifp->frame.rval = JSVAL_VOID;
3564 newifp->frame.down = fp;
3565 newifp->frame.annotation = NULL;
3566 newifp->frame.scopeChain = OBJ_GET_PARENT(cx, fi.callee);
3567 newifp->frame.sharpDepth = 0;
3568 newifp->frame.sharpArray = NULL;
3569 newifp->frame.flags = constructing ? JSFRAME_CONSTRUCTING : 0;
3570 newifp->frame.dormantNext = NULL;
3571 newifp->frame.xmlNamespace = NULL;
3572 newifp->frame.blockChain = NULL;
3573 newifp->mark = newmark;
3574 newifp->frame.thisp = NULL; // will be updated in FlushNativeStackFrame
3576 newifp->frame.regs = fp->regs;
3577 newifp->frame.regs->pc = script->code;
3578 newifp->frame.regs->sp = newsp + script->nfixed;
3579 newifp->frame.imacpc = NULL;
3580 newifp->frame.slots = newsp;
3581 if (script->staticLevel < JS_DISPLAY_SIZE) {
3582 JSStackFrame **disp = &cx->display[script->staticLevel];
3583 newifp->frame.displaySave = *disp;
3584 *disp = &newifp->frame;
3588 * Note that fp->script is still the caller's script; set the callee
3589 * inline frame's idea of caller version from its version.
3591 newifp->callerVersion = (JSVersion) fp->script->version;
3593 // After this paragraph, fp and cx->fp point to the newly synthesized frame.
3594 fp->regs = &newifp->callerRegs;
3595 fp = cx->fp = &newifp->frame;
3597 if (fun->flags & JSFUN_HEAVYWEIGHT) {
3599 * Set hookData to null because the failure case for js_GetCallObject
3600 * involves it calling the debugger hook.
3602 * Allocating the Call object must not fail, so use an object
3603 * previously reserved by js_ExecuteTree if needed.
3605 newifp->hookData = NULL;
3606 JS_ASSERT(!JS_TRACE_MONITOR(cx).useReservedObjects);
3607 JS_TRACE_MONITOR(cx).useReservedObjects = JS_TRUE;
3608 #ifdef DEBUG
3609 JSObject *obj =
3610 #endif
3611 js_GetCallObject(cx, &newifp->frame);
3612 JS_ASSERT(obj);
3613 JS_TRACE_MONITOR(cx).useReservedObjects = JS_FALSE;
3617 * If there's a call hook, invoke it to compute the hookData used by
3618 * debuggers that cooperate with the interpreter.
3620 JSInterpreterHook hook = cx->debugHooks->callHook;
3621 if (hook) {
3622 newifp->hookData = hook(cx, &newifp->frame, JS_TRUE, 0,
3623 cx->debugHooks->callHookData);
3624 } else {
3625 newifp->hookData = NULL;
3628 // FIXME? we must count stack slots from caller's operand stack up to (but not including)
3629 // callee's, including missing arguments. Could we shift everything down to the caller's
3630 // fp->slots (where vars start) and avoid some of the complexity?
3631 return (fi.s.spdist - fp->down->script->nfixed) +
3632 ((fun->nargs > fp->argc) ? fun->nargs - fp->argc : 0) +
3633 script->nfixed;
3636 static void
3637 SynthesizeSlowNativeFrame(JSContext *cx, VMSideExit *exit)
3639 VOUCH_DOES_NOT_REQUIRE_STACK();
3641 void *mark;
3642 JSInlineFrame *ifp;
3644 /* This allocation is infallible: js_ExecuteTree reserved enough stack. */
3645 mark = JS_ARENA_MARK(&cx->stackPool);
3646 JS_ARENA_ALLOCATE_CAST(ifp, JSInlineFrame *, &cx->stackPool, sizeof(JSInlineFrame));
3647 JS_ASSERT(ifp);
3649 JSStackFrame *fp = &ifp->frame;
3650 fp->regs = NULL;
3651 fp->imacpc = NULL;
3652 fp->slots = NULL;
3653 fp->callobj = NULL;
3654 fp->argsobj = NULL;
3655 fp->varobj = cx->fp->varobj;
3656 fp->callee = exit->nativeCallee();
3657 fp->script = NULL;
3658 fp->fun = GET_FUNCTION_PRIVATE(cx, fp->callee);
3659 // fp->thisp is really a jsval, so reinterpret_cast here, not JSVAL_TO_OBJECT.
3660 fp->thisp = (JSObject *) cx->nativeVp[1];
3661 fp->argc = cx->nativeVpLen - 2;
3662 fp->argv = cx->nativeVp + 2;
3663 fp->rval = JSVAL_VOID;
3664 fp->down = cx->fp;
3665 fp->annotation = NULL;
3666 JS_ASSERT(cx->fp->scopeChain);
3667 fp->scopeChain = cx->fp->scopeChain;
3668 fp->blockChain = NULL;
3669 fp->sharpDepth = 0;
3670 fp->sharpArray = NULL;
3671 fp->flags = exit->constructing() ? JSFRAME_CONSTRUCTING : 0;
3672 fp->dormantNext = NULL;
3673 fp->xmlNamespace = NULL;
3674 fp->displaySave = NULL;
3676 ifp->mark = mark;
3677 cx->fp = fp;
3680 JS_REQUIRES_STACK bool
3681 js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
3682 uint32 outerArgc, JSObject* globalObj, uint32 globalShape,
3683 SlotList* globalSlots, uint32 argc)
3685 JS_ASSERT(f->root == f);
3687 /* Make sure the global type map didn't change on us. */
3688 if (!CheckGlobalObjectShape(cx, tm, globalObj)) {
3689 js_Backoff(cx, (jsbytecode*) f->root->ip);
3690 return false;
3693 AUDIT(recorderStarted);
3695 /* Try to find an unused peer fragment, or allocate a new one. */
3696 while (f->code() && f->peer)
3697 f = f->peer;
3698 if (f->code())
3699 f = getAnchor(&JS_TRACE_MONITOR(cx), f->root->ip, globalObj, globalShape, argc);
3701 if (!f) {
3702 FlushJITCache(cx);
3703 return false;
3706 f->root = f;
3707 f->lirbuf = tm->lirbuf;
3709 if (f->lirbuf->outOMem() || js_OverfullFragmento(tm, tm->fragmento)) {
3710 js_Backoff(cx, (jsbytecode*) f->root->ip);
3711 FlushJITCache(cx);
3712 debug_only_v(printf("Out of memory recording new tree, flushing cache.\n");)
3713 return false;
3716 JS_ASSERT(!f->code() && !f->vmprivate);
3718 /* setup the VM-private treeInfo structure for this fragment */
3719 TreeInfo* ti = new (&gc) TreeInfo(f, globalSlots);
3721 /* capture the coerced type of each active slot in the type map */
3722 ti->typeMap.captureTypes(cx, *globalSlots, 0/*callDepth*/);
3723 ti->nStackTypes = ti->typeMap.length() - globalSlots->length();
3725 #ifdef DEBUG
3727 * Check for duplicate entry type maps. This is always wrong and hints at
3728 * trace explosion since we are trying to stabilize something without
3729 * properly connecting peer edges.
3731 TreeInfo* ti_other;
3732 for (Fragment* peer = getLoop(tm, f->root->ip, globalObj, globalShape, argc); peer != NULL;
3733 peer = peer->peer) {
3734 if (!peer->code() || peer == f)
3735 continue;
3736 ti_other = (TreeInfo*)peer->vmprivate;
3737 JS_ASSERT(ti_other);
3738 JS_ASSERT(!ti->typeMap.matches(ti_other->typeMap));
3740 ti->treeFileName = cx->fp->script->filename;
3741 ti->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp);
3742 ti->treePCOffset = FramePCOffset(cx->fp);
3743 #endif
3745 /* determine the native frame layout at the entry point */
3746 unsigned entryNativeStackSlots = ti->nStackTypes;
3747 JS_ASSERT(entryNativeStackSlots == js_NativeStackSlots(cx, 0/*callDepth*/));
3748 ti->nativeStackBase = (entryNativeStackSlots -
3749 (cx->fp->regs->sp - StackBase(cx->fp))) * sizeof(double);
3750 ti->maxNativeStackSlots = entryNativeStackSlots;
3751 ti->maxCallDepth = 0;
3752 ti->script = cx->fp->script;
3754 /* recording primary trace */
3755 if (!js_StartRecorder(cx, NULL, f, ti,
3756 ti->nStackTypes,
3757 ti->globalSlots->length(),
3758 ti->typeMap.data(), NULL, outer, outerArgc)) {
3759 return false;
3762 return true;
3765 JS_REQUIRES_STACK static inline bool
3766 isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
3768 if (slot < ti->nStackTypes)
3769 return oracle.isStackSlotUndemotable(cx, slot);
3771 uint16* gslots = ti->globalSlots->data();
3772 return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
3775 JS_REQUIRES_STACK static bool
3776 js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, jsbytecode* outer, uint32 outerArgc)
3778 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3779 if (tm->needFlush) {
3780 FlushJITCache(cx);
3781 return false;
3784 VMFragment* from = (VMFragment*)exit->from->root;
3785 TreeInfo* from_ti = (TreeInfo*)from->vmprivate;
3787 JS_ASSERT(exit->from->root->code());
3789 /* Make sure any doubles are not accidentally undemoted */
3790 uint8* m = getStackTypeMap(exit);
3791 for (unsigned i = 0; i < exit->numStackSlots; i++) {
3792 if (m[i] == JSVAL_DOUBLE)
3793 oracle.markStackSlotUndemotable(cx, i);
3795 m = getGlobalTypeMap(exit);
3796 for (unsigned i = 0; i < exit->numGlobalSlots; i++) {
3797 if (m[i] == JSVAL_DOUBLE)
3798 oracle.markGlobalSlotUndemotable(cx, from_ti->globalSlots->data()[i]);
3801 /* If this exit does not have enough globals, there might exist a peer with more globals that we
3802 * can join to, but only if the parent's globals match.
3804 m = getFullTypeMap(exit);
3805 if (exit->numGlobalSlots < from_ti->nGlobalTypes()) {
3806 uint32 partial = exit->numStackSlots + exit->numGlobalSlots;
3807 m = (uint8*)alloca(from_ti->typeMap.length());
3808 memcpy(m, getFullTypeMap(exit), partial);
3809 memcpy(m + partial, from_ti->globalTypeMap() + exit->numGlobalSlots,
3810 from_ti->nGlobalTypes() - exit->numGlobalSlots);
3813 bool bound = false;
3814 for (Fragment* f = from->first; f != NULL; f = f->peer) {
3815 if (!f->code())
3816 continue;
3817 TreeInfo* ti = (TreeInfo*)f->vmprivate;
3818 JS_ASSERT(exit->numStackSlots == ti->nStackTypes);
3819 /* Check the minimum number of slots that need to be compared. */
3820 unsigned checkSlots = JS_MIN(from_ti->typeMap.length(), ti->typeMap.length());
3821 uint8* m2 = ti->typeMap.data();
3822 /* Analyze the exit typemap against the peer typemap.
3823 * Two conditions are important:
3824 * 1) Typemaps are identical: these peers can be attached.
3825 * 2) Typemaps do not match, but only contain I->D mismatches.
3826 * In this case, the original tree must be trashed because it
3827 * will never connect to any peer.
3829 bool matched = true;
3830 bool undemote = false;
3831 for (uint32 i = 0; i < checkSlots; i++) {
3832 /* If the types are equal we're okay. */
3833 if (m[i] == m2[i])
3834 continue;
3835 matched = false;
3836 /* If there's an I->D that cannot be resolved, flag it.
3837 * Otherwise, break and go to the next peer.
3839 if (m[i] == JSVAL_INT && m2[i] == JSVAL_DOUBLE && isSlotUndemotable(cx, ti, i)) {
3840 undemote = true;
3841 } else {
3842 undemote = false;
3843 break;
3846 if (matched) {
3847 JS_ASSERT(from_ti->globalSlots == ti->globalSlots);
3848 JS_ASSERT(from_ti->nStackTypes == ti->nStackTypes);
3849 /* Capture missing globals on both trees and link the fragments together. */
3850 if (from != f) {
3851 ti->dependentTrees.addUnique(from);
3852 from_ti->linkedTrees.addUnique(f);
3854 if (ti->nGlobalTypes() < ti->globalSlots->length())
3855 specializeTreesToMissingGlobals(cx, ti);
3856 exit->target = f;
3857 tm->fragmento->assm()->patch(exit);
3858 /* Now erase this exit from the unstable exit list. */
3859 UnstableExit** tail = &from_ti->unstableExits;
3860 for (UnstableExit* uexit = from_ti->unstableExits; uexit != NULL; uexit = uexit->next) {
3861 if (uexit->exit == exit) {
3862 *tail = uexit->next;
3863 delete uexit;
3864 bound = true;
3865 break;
3867 tail = &uexit->next;
3869 JS_ASSERT(bound);
3870 debug_only_v(js_DumpPeerStability(tm, f->ip, from->globalObj, from->globalShape, from->argc);)
3871 break;
3872 } else if (undemote) {
3873 /* The original tree is unconnectable, so trash it. */
3874 js_TrashTree(cx, f);
3875 /* We shouldn't attempt to record now, since we'll hit a duplicate. */
3876 return false;
3879 if (bound)
3880 return false;
3882 VMFragment* root = (VMFragment*)from->root;
3883 return js_RecordTree(cx, tm, from->first, outer, outerArgc, root->globalObj,
3884 root->globalShape, from_ti->globalSlots, cx->fp->argc);
3887 static JS_REQUIRES_STACK bool
3888 js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, jsbytecode* outer)
3890 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3891 if (tm->needFlush) {
3892 FlushJITCache(cx);
3893 return false;
3896 Fragment* f = anchor->from->root;
3897 JS_ASSERT(f->vmprivate);
3898 TreeInfo* ti = (TreeInfo*)f->vmprivate;
3900 /* Don't grow trees above a certain size to avoid code explosion due to tail duplication. */
3901 if (ti->branchCount >= MAX_BRANCHES)
3902 return false;
3904 Fragment* c;
3905 if (!(c = anchor->target)) {
3906 c = JS_TRACE_MONITOR(cx).fragmento->createBranch(anchor, cx->fp->regs->pc);
3907 c->spawnedFrom = anchor;
3908 c->parent = f;
3909 anchor->target = c;
3910 c->root = f;
3914 * If we are recycling a fragment, it might have a different ip so reset it here. This
3915 * can happen when attaching a branch to a NESTED_EXIT, which might extend along separate paths
3916 * (i.e. after the loop edge, and after a return statement).
3918 c->ip = cx->fp->regs->pc;
3920 debug_only_v(printf("trying to attach another branch to the tree (hits = %d)\n", c->hits());)
3922 int32_t& hits = c->hits();
3923 if (outer || (hits++ >= HOTEXIT && hits <= HOTEXIT+MAXEXIT)) {
3924 /* start tracing secondary trace from this point */
3925 c->lirbuf = f->lirbuf;
3926 unsigned stackSlots;
3927 unsigned ngslots;
3928 uint8* typeMap;
3929 TypeMap fullMap;
3930 if (exitedFrom == NULL) {
3931 /* If we are coming straight from a simple side exit, just use that exit's type map
3932 as starting point. */
3933 ngslots = anchor->numGlobalSlots;
3934 stackSlots = anchor->numStackSlots;
3935 typeMap = getFullTypeMap(anchor);
3936 } else {
3937 /* If we side-exited on a loop exit and continue on a nesting guard, the nesting
3938 guard (anchor) has the type information for everything below the current scope,
3939 and the actual guard we exited from has the types for everything in the current
3940 scope (and whatever it inlined). We have to merge those maps here. */
3941 VMSideExit* e1 = anchor;
3942 VMSideExit* e2 = exitedFrom;
3943 fullMap.add(getStackTypeMap(e1), e1->numStackSlotsBelowCurrentFrame);
3944 fullMap.add(getStackTypeMap(e2), e2->numStackSlots);
3945 stackSlots = fullMap.length();
3946 fullMap.add(getGlobalTypeMap(e2), e2->numGlobalSlots);
3947 ngslots = e2->numGlobalSlots;
3948 typeMap = fullMap.data();
3950 return js_StartRecorder(cx, anchor, c, (TreeInfo*)f->vmprivate, stackSlots,
3951 ngslots, typeMap, exitedFrom, outer, cx->fp->argc);
3953 return false;
3956 static JS_REQUIRES_STACK VMSideExit*
3957 js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
3958 VMSideExit** innermostNestedGuardp);
3960 JS_REQUIRES_STACK bool
3961 js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
3963 #ifdef JS_THREADSAFE
3964 if (OBJ_SCOPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain))->title.ownercx != cx) {
3965 js_AbortRecording(cx, "Global object not owned by this context");
3966 return false; /* we stay away from shared global objects */
3968 #endif
3970 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3972 /* Process needFlush and deep abort requests. */
3973 if (tm->needFlush) {
3974 FlushJITCache(cx);
3975 return false;
3977 if (r->wasDeepAborted()) {
3978 js_AbortRecording(cx, "deep abort requested");
3979 return false;
3982 JS_ASSERT(r->getFragment() && !r->getFragment()->lastIns);
3983 VMFragment* root = (VMFragment*)r->getFragment()->root;
3985 /* Does this branch go to an inner loop? */
3986 Fragment* first = getLoop(&JS_TRACE_MONITOR(cx), cx->fp->regs->pc,
3987 root->globalObj, root->globalShape, cx->fp->argc);
3988 if (!first) {
3989 /* Not an inner loop we can call, abort trace. */
3990 AUDIT(returnToDifferentLoopHeader);
3991 JS_ASSERT(!cx->fp->imacpc);
3992 debug_only_v(printf("loop edge to %d, header %d\n",
3993 cx->fp->regs->pc - cx->fp->script->code,
3994 (jsbytecode*)r->getFragment()->root->ip - cx->fp->script->code));
3995 js_AbortRecording(cx, "Loop edge does not return to header");
3996 return false;
3999 /* Make sure inner tree call will not run into an out-of-memory condition. */
4000 if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
4001 !js_ReplenishReservedPool(cx, tm)) {
4002 js_AbortRecording(cx, "Couldn't call inner tree (out of memory)");
4003 return false;
4006 /* Make sure the shape of the global object still matches (this might flush
4007 the JIT cache). */
4008 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
4009 uint32 globalShape = -1;
4010 SlotList* globalSlots = NULL;
4011 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots))
4012 return false;
4014 debug_only_v(printf("Looking for type-compatible peer (%s:%d@%d)\n",
4015 cx->fp->script->filename,
4016 js_FramePCToLineNumber(cx, cx->fp),
4017 FramePCOffset(cx->fp));)
4019 // Find a matching inner tree. If none can be found, compile one.
4020 Fragment* f = r->findNestedCompatiblePeer(first);
4021 if (!f || !f->code()) {
4022 AUDIT(noCompatInnerTrees);
4024 VMFragment* outerFragment = (VMFragment*) tm->recorder->getFragment()->root;
4025 jsbytecode* outer = (jsbytecode*) outerFragment->ip;
4026 uint32 outerArgc = outerFragment->argc;
4027 uint32 argc = cx->fp->argc;
4028 js_AbortRecording(cx, "No compatible inner tree");
4030 // Find an empty fragment we can recycle, or allocate a new one.
4031 for (f = first; f != NULL; f = f->peer) {
4032 if (!f->code())
4033 break;
4035 if (!f || f->code()) {
4036 f = getAnchor(tm, cx->fp->regs->pc, globalObj, globalShape, argc);
4037 if (!f) {
4038 FlushJITCache(cx);
4039 return false;
4042 return js_RecordTree(cx, tm, f, outer, outerArgc, globalObj, globalShape, globalSlots, argc);
4045 r->adjustCallerTypes(f);
4046 r->prepareTreeCall(f);
4047 VMSideExit* innermostNestedGuard = NULL;
4048 VMSideExit* lr = js_ExecuteTree(cx, f, inlineCallCount, &innermostNestedGuard);
4049 if (!lr || r->wasDeepAborted()) {
4050 if (!lr)
4051 js_AbortRecording(cx, "Couldn't call inner tree");
4052 return false;
4055 VMFragment* outerFragment = (VMFragment*) tm->recorder->getFragment()->root;
4056 jsbytecode* outer = (jsbytecode*) outerFragment->ip;
4057 switch (lr->exitType) {
4058 case LOOP_EXIT:
4059 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
4060 if (innermostNestedGuard) {
4061 js_AbortRecording(cx, "Inner tree took different side exit, abort current "
4062 "recording and grow nesting tree");
4063 return js_AttemptToExtendTree(cx, innermostNestedGuard, lr, outer);
4065 /* emit a call to the inner tree and continue recording the outer tree trace */
4066 r->emitTreeCall(f, lr);
4067 return true;
4068 case UNSTABLE_LOOP_EXIT:
4069 /* abort recording so the inner loop can become type stable. */
4070 js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording");
4071 return js_AttemptToStabilizeTree(cx, lr, outer, outerFragment->argc);
4072 case BRANCH_EXIT:
4073 /* abort recording the outer tree, extend the inner tree */
4074 js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording");
4075 return js_AttemptToExtendTree(cx, lr, NULL, outer);
4076 default:
4077 debug_only_v(printf("exit_type=%d\n", lr->exitType);)
4078 js_AbortRecording(cx, "Inner tree not suitable for calling");
4079 return false;
4083 static bool
4084 js_IsEntryTypeCompatible(jsval* vp, uint8* m)
4086 unsigned tag = JSVAL_TAG(*vp);
4088 debug_only_v(printf("%c/%c ", tagChar[tag], typeChar[*m]);)
4090 switch (*m) {
4091 case JSVAL_OBJECT:
4092 if (tag == JSVAL_OBJECT && !JSVAL_IS_NULL(*vp) &&
4093 !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))) {
4094 return true;
4096 debug_only_v(printf("object != tag%u ", tag);)
4097 return false;
4098 case JSVAL_INT:
4099 jsint i;
4100 if (JSVAL_IS_INT(*vp))
4101 return true;
4102 if ((tag == JSVAL_DOUBLE) && JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(*vp), i))
4103 return true;
4104 debug_only_v(printf("int != tag%u(value=%lu) ", tag, (unsigned long)*vp);)
4105 return false;
4106 case JSVAL_DOUBLE:
4107 if (JSVAL_IS_INT(*vp) || tag == JSVAL_DOUBLE)
4108 return true;
4109 debug_only_v(printf("double != tag%u ", tag);)
4110 return false;
4111 case JSVAL_BOXED:
4112 JS_NOT_REACHED("shouldn't see boxed type in entry");
4113 return false;
4114 case JSVAL_STRING:
4115 if (tag == JSVAL_STRING)
4116 return true;
4117 debug_only_v(printf("string != tag%u ", tag);)
4118 return false;
4119 case JSVAL_TNULL:
4120 if (JSVAL_IS_NULL(*vp))
4121 return true;
4122 debug_only_v(printf("null != tag%u ", tag);)
4123 return false;
4124 case JSVAL_BOOLEAN:
4125 if (tag == JSVAL_BOOLEAN)
4126 return true;
4127 debug_only_v(printf("bool != tag%u ", tag);)
4128 return false;
4129 default:
4130 JS_ASSERT(*m == JSVAL_TFUN);
4131 if (tag == JSVAL_OBJECT && !JSVAL_IS_NULL(*vp) &&
4132 HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))) {
4133 return true;
4135 debug_only_v(printf("fun != tag%u ", tag);)
4136 return false;
4140 JS_REQUIRES_STACK Fragment*
4141 TraceRecorder::findNestedCompatiblePeer(Fragment* f)
4143 JSTraceMonitor* tm;
4145 tm = &JS_TRACE_MONITOR(cx);
4146 unsigned int ngslots = treeInfo->globalSlots->length();
4147 uint16* gslots = treeInfo->globalSlots->data();
4149 TreeInfo* ti;
4150 for (; f != NULL; f = f->peer) {
4151 if (!f->code())
4152 continue;
4154 ti = (TreeInfo*)f->vmprivate;
4156 debug_only_v(printf("checking nested types %p: ", (void*)f);)
4158 if (ngslots > ti->nGlobalTypes())
4159 specializeTreesToMissingGlobals(cx, ti);
4161 uint8* typemap = ti->typeMap.data();
4164 * Determine whether the typemap of the inner tree matches the outer tree's
4165 * current state. If the inner tree expects an integer, but the outer tree
4166 * doesn't guarantee an integer for that slot, we mark the slot undemotable
4167 * and mismatch here. This will force a new tree to be compiled that accepts
4168 * a double for the slot. If the inner tree expects a double, but the outer
4169 * tree has an integer, we can proceed, but we mark the location undemotable.
4171 bool ok = true;
4172 uint8* m = typemap;
4173 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
4174 debug_only_v(printf("%s%d=", vpname, vpnum);)
4175 if (!js_IsEntryTypeCompatible(vp, m)) {
4176 ok = false;
4177 } else if (!isPromoteInt(get(vp)) && *m == JSVAL_INT) {
4178 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
4179 ok = false;
4180 } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) {
4181 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
4183 m++;
4185 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
4186 debug_only_v(printf("%s%d=", vpname, vpnum);)
4187 if (!js_IsEntryTypeCompatible(vp, m)) {
4188 ok = false;
4189 } else if (!isPromoteInt(get(vp)) && *m == JSVAL_INT) {
4190 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
4191 ok = false;
4192 } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) {
4193 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
4195 m++;
4197 JS_ASSERT(unsigned(m - ti->typeMap.data()) == ti->typeMap.length());
4199 debug_only_v(printf(" %s\n", ok ? "match" : "");)
4201 if (ok)
4202 return f;
4205 return NULL;
4209 * Check if types are usable for trace execution.
4211 * @param cx Context.
4212 * @param ti Tree info of peer we're testing.
4213 * @return True if compatible (with or without demotions), false otherwise.
4215 static JS_REQUIRES_STACK bool
4216 js_CheckEntryTypes(JSContext* cx, TreeInfo* ti)
4218 unsigned int ngslots = ti->globalSlots->length();
4219 uint16* gslots = ti->globalSlots->data();
4221 JS_ASSERT(ti->nStackTypes == js_NativeStackSlots(cx, 0));
4223 if (ngslots > ti->nGlobalTypes())
4224 specializeTreesToMissingGlobals(cx, ti);
4226 uint8* m = ti->typeMap.data();
4228 JS_ASSERT(ti->typeMap.length() == js_NativeStackSlots(cx, 0) + ngslots);
4229 JS_ASSERT(ti->typeMap.length() == ti->nStackTypes + ngslots);
4230 JS_ASSERT(ti->nGlobalTypes() == ngslots);
4231 FORALL_SLOTS(cx, ngslots, gslots, 0,
4232 debug_only_v(printf("%s%d=", vpname, vpnum);)
4233 JS_ASSERT(*m != 0xCD);
4234 if (!js_IsEntryTypeCompatible(vp, m))
4235 goto check_fail;
4236 m++;
4238 JS_ASSERT(unsigned(m - ti->typeMap.data()) == ti->typeMap.length());
4240 debug_only_v(printf("\n");)
4241 return true;
4243 check_fail:
4244 debug_only_v(printf("\n");)
4245 return false;
4249 * Find an acceptable entry tree given a PC.
4251 * @param cx Context.
4252 * @param f First peer fragment.
4253 * @param nodemote If true, will try to find a peer that does not require demotion.
4254 * @out count Number of fragments consulted.
4256 static JS_REQUIRES_STACK Fragment*
4257 js_FindVMCompatiblePeer(JSContext* cx, Fragment* f, uintN& count)
4259 count = 0;
4260 for (; f != NULL; f = f->peer) {
4261 if (f->vmprivate == NULL)
4262 continue;
4263 debug_only_v(printf("checking vm types %p (ip: %p): ", (void*)f, f->ip);)
4264 if (js_CheckEntryTypes(cx, (TreeInfo*)f->vmprivate))
4265 return f;
4266 ++count;
4268 return NULL;
4271 static void
4272 LeaveTree(InterpState&, VMSideExit* lr);
4275 * Executes a tree.
4277 static JS_REQUIRES_STACK VMSideExit*
4278 js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
4279 VMSideExit** innermostNestedGuardp)
4281 JS_ASSERT(f->root == f && f->code() && f->vmprivate);
4283 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4284 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
4285 TreeInfo* ti = (TreeInfo*)f->vmprivate;
4286 unsigned ngslots = ti->globalSlots->length();
4287 uint16* gslots = ti->globalSlots->data();
4288 unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
4290 /* Make sure the global object is sane. */
4291 JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) ==
4292 ((VMFragment*)f)->globalShape));
4293 /* Make sure our caller replenished the double pool. */
4294 JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
4296 /* Reserve objects and stack space now, to make leaving the tree infallible. */
4297 if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
4298 return NULL;
4300 #ifdef DEBUG
4301 uintN savedProhibitFlush = JS_TRACE_MONITOR(cx).prohibitFlush;
4302 #endif
4304 /* Setup the interpreter state block, which is followed by the native global frame. */
4305 InterpState* state = (InterpState*)alloca(sizeof(InterpState) + (globalFrameSize+1)*sizeof(double));
4306 state->cx = cx;
4307 state->inlineCallCountp = &inlineCallCount;
4308 state->innermostNestedGuardp = innermostNestedGuardp;
4309 state->outermostTree = ti;
4310 state->lastTreeExitGuard = NULL;
4311 state->lastTreeCallGuard = NULL;
4312 state->rpAtLastTreeCall = NULL;
4313 state->builtinStatus = 0;
4315 /* Setup the native global frame. */
4316 double* global = (double*)(state+1);
4318 /* Setup the native stack frame. */
4319 double stack_buffer[MAX_NATIVE_STACK_SLOTS];
4320 state->stackBase = stack_buffer;
4321 state->sp = stack_buffer + (ti->nativeStackBase/sizeof(double));
4322 state->eos = stack_buffer + MAX_NATIVE_STACK_SLOTS;
4324 /* Setup the native call stack frame. */
4325 FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
4326 state->callstackBase = callstack_buffer;
4327 state->rp = callstack_buffer;
4328 state->eor = callstack_buffer + MAX_CALL_STACK_ENTRIES;
4330 void *reserve;
4331 state->stackMark = JS_ARENA_MARK(&cx->stackPool);
4332 JS_ARENA_ALLOCATE(reserve, &cx->stackPool, MAX_INTERP_STACK_BYTES);
4333 if (!reserve)
4334 return NULL;
4336 #ifdef DEBUG
4337 memset(stack_buffer, 0xCD, sizeof(stack_buffer));
4338 memset(global, 0xCD, (globalFrameSize+1)*sizeof(double));
4339 JS_ASSERT(globalFrameSize <= MAX_GLOBAL_SLOTS);
4340 #endif
4342 debug_only(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
4343 debug_only_v(printf("entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
4344 cx->fp->script->filename,
4345 js_FramePCToLineNumber(cx, cx->fp),
4346 FramePCOffset(cx->fp),
4347 ti->maxNativeStackSlots,
4348 f->code());)
4350 JS_ASSERT(ti->nGlobalTypes() == ngslots);
4352 if (ngslots)
4353 BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), global);
4354 BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack_buffer);
4356 union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
4357 u.code = f->code();
4359 #ifdef EXECUTE_TREE_TIMER
4360 state->startTime = rdtsc();
4361 #endif
4363 JS_ASSERT(!tm->tracecx);
4364 tm->tracecx = cx;
4365 state->prev = cx->interpState;
4366 cx->interpState = state;
4368 debug_only(fflush(NULL);)
4369 GuardRecord* rec;
4370 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
4371 SIMULATE_FASTCALL(rec, state, NULL, u.func);
4372 #else
4373 rec = u.func(state, NULL);
4374 #endif
4375 VMSideExit* lr = (VMSideExit*)rec->exit;
4377 AUDIT(traceTriggered);
4379 cx->interpState = state->prev;
4381 JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth);
4382 tm->tracecx = NULL;
4383 LeaveTree(*state, lr);
4384 JS_ASSERT(JS_TRACE_MONITOR(cx).prohibitFlush == savedProhibitFlush);
4385 return state->innermost;
4388 static JS_FORCES_STACK void
4389 LeaveTree(InterpState& state, VMSideExit* lr)
4391 VOUCH_DOES_NOT_REQUIRE_STACK();
4393 JSContext* cx = state.cx;
4394 FrameInfo** callstack = state.callstackBase;
4395 double* stack = state.stackBase;
4397 /* Except if we find that this is a nested bailout, the guard the call returned is the
4398 one we have to use to adjust pc and sp. */
4399 VMSideExit* innermost = lr;
4401 /* While executing a tree we do not update state.sp and state.rp even if they grow. Instead,
4402 guards tell us by how much sp and rp should be incremented in case of a side exit. When
4403 calling a nested tree, however, we actively adjust sp and rp. If we have such frames
4404 from outer trees on the stack, then rp will have been adjusted. Before we can process
4405 the stack of the frames of the tree we directly exited from, we have to first work our
4406 way through the outer frames and generate interpreter frames for them. Once the call
4407 stack (rp) is empty, we can process the final frames (which again are not directly
4408 visible and only the guard we exited on will tells us about). */
4409 FrameInfo** rp = (FrameInfo**)state.rp;
4410 if (lr->exitType == NESTED_EXIT) {
4411 VMSideExit* nested = state.lastTreeCallGuard;
4412 if (!nested) {
4413 /* If lastTreeCallGuard is not set in state, we only have a single level of
4414 nesting in this exit, so lr itself is the innermost and outermost nested
4415 guard, and hence we set nested to lr. The calldepth of the innermost guard
4416 is not added to state.rp, so we do it here manually. For a nesting depth
4417 greater than 1 the CallTree builtin already added the innermost guard's
4418 calldepth to state.rpAtLastTreeCall. */
4419 nested = lr;
4420 rp += lr->calldepth;
4421 } else {
4422 /* During unwinding state.rp gets overwritten at every step and we restore
4423 it here to its state at the innermost nested guard. The builtin already
4424 added the calldepth of that innermost guard to rpAtLastTreeCall. */
4425 rp = (FrameInfo**)state.rpAtLastTreeCall;
4427 innermost = state.lastTreeExitGuard;
4428 if (state.innermostNestedGuardp)
4429 *state.innermostNestedGuardp = nested;
4430 JS_ASSERT(nested);
4431 JS_ASSERT(nested->exitType == NESTED_EXIT);
4432 JS_ASSERT(state.lastTreeExitGuard);
4433 JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
4436 int32_t bs = state.builtinStatus;
4437 bool bailed = innermost->exitType == STATUS_EXIT && (bs & JSBUILTIN_BAILED);
4438 if (bailed) {
4440 * Deep-bail case.
4442 * A _FAIL native already called LeaveTree. We already reconstructed
4443 * the interpreter stack, in pre-call state, with pc pointing to the
4444 * CALL/APPLY op, for correctness. Then we continued in native code.
4446 * First, if we just returned from a slow native, pop its stack frame.
4448 if (!cx->fp->script) {
4449 JSStackFrame *fp = cx->fp;
4450 JS_ASSERT(FUN_SLOW_NATIVE(GET_FUNCTION_PRIVATE(cx, fp->callee)));
4451 JS_ASSERT(fp->regs == NULL);
4452 JS_ASSERT(fp->down->regs != &((JSInlineFrame *) fp)->callerRegs);
4453 cx->fp = fp->down;
4454 JS_ARENA_RELEASE(&cx->stackPool, ((JSInlineFrame *) fp)->mark);
4456 JS_ASSERT(cx->fp->script);
4458 if (!(bs & JSBUILTIN_ERROR)) {
4460 * The native succeeded (no exception or error). After it returned, the
4461 * trace stored the return value (at the top of the native stack) and
4462 * then immediately flunked the guard on state->builtinStatus.
4464 * Now LeaveTree has been called again from the tail of
4465 * js_ExecuteTree. We are about to return to the interpreter. Adjust
4466 * the top stack frame to resume on the next op.
4468 JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL ||
4469 *cx->fp->regs->pc == JSOP_APPLY ||
4470 *cx->fp->regs->pc == JSOP_NEW);
4471 uintN argc = GET_ARGC(cx->fp->regs->pc);
4472 cx->fp->regs->pc += JSOP_CALL_LENGTH;
4473 cx->fp->regs->sp -= argc + 1;
4474 JS_ASSERT_IF(!cx->fp->imacpc,
4475 cx->fp->slots + cx->fp->script->nfixed +
4476 js_ReconstructStackDepth(cx, cx->fp->script, cx->fp->regs->pc) ==
4477 cx->fp->regs->sp);
4480 * The return value was not available when we reconstructed the stack,
4481 * but we have it now. Box it.
4483 uint8* typeMap = getStackTypeMap(innermost);
4484 NativeToValue(cx,
4485 cx->fp->regs->sp[-1],
4486 typeMap[innermost->numStackSlots - 1],
4487 (jsdouble *) state.sp + innermost->sp_adj / sizeof(jsdouble) - 1);
4489 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4490 if (tm->prohibitFlush && --tm->prohibitFlush == 0 && tm->needFlush)
4491 FlushJITCache(cx);
4492 return;
4495 JS_ARENA_RELEASE(&cx->stackPool, state.stackMark);
4496 while (callstack < rp) {
4497 /* Synthesize a stack frame and write out the values in it using the type map pointer
4498 on the native call stack. */
4499 js_SynthesizeFrame(cx, **callstack);
4500 int slots = FlushNativeStackFrame(cx, 1/*callDepth*/, (uint8*)(*callstack+1), stack, cx->fp);
4501 #ifdef DEBUG
4502 JSStackFrame* fp = cx->fp;
4503 debug_only_v(printf("synthesized deep frame for %s:%u@%u, slots=%d\n",
4504 fp->script->filename,
4505 js_FramePCToLineNumber(cx, fp),
4506 FramePCOffset(fp),
4507 slots);)
4508 #endif
4509 /* Keep track of the additional frames we put on the interpreter stack and the native
4510 stack slots we consumed. */
4511 ++*state.inlineCallCountp;
4512 ++callstack;
4513 stack += slots;
4516 /* We already synthesized the frames around the innermost guard. Here we just deal
4517 with additional frames inside the tree we are bailing out from. */
4518 JS_ASSERT(rp == callstack);
4519 unsigned calldepth = innermost->calldepth;
4520 unsigned calldepth_slots = 0;
4521 for (unsigned n = 0; n < calldepth; ++n) {
4522 calldepth_slots += js_SynthesizeFrame(cx, *callstack[n]);
4523 ++*state.inlineCallCountp;
4524 #ifdef DEBUG
4525 JSStackFrame* fp = cx->fp;
4526 debug_only_v(printf("synthesized shallow frame for %s:%u@%u\n",
4527 fp->script->filename, js_FramePCToLineNumber(cx, fp),
4528 FramePCOffset(fp));)
4529 #endif
4532 /* Adjust sp and pc relative to the tree we exited from (not the tree we entered into).
4533 These are our final values for sp and pc since js_SynthesizeFrame has already taken
4534 care of all frames in between. But first we recover fp->blockChain, which comes from
4535 the side exit struct. */
4536 JSStackFrame* fp = cx->fp;
4538 fp->blockChain = innermost->block;
4540 /* If we are not exiting from an inlined frame the state->sp is spbase, otherwise spbase
4541 is whatever slots frames around us consume. */
4542 fp->regs->pc = innermost->pc;
4543 fp->imacpc = innermost->imacpc;
4544 fp->regs->sp = StackBase(fp) + (innermost->sp_adj / sizeof(double)) - calldepth_slots;
4545 JS_ASSERT_IF(!fp->imacpc,
4546 fp->slots + fp->script->nfixed +
4547 js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp);
4549 #ifdef EXECUTE_TREE_TIMER
4550 uint64 cycles = rdtsc() - state.startTime;
4551 #elif defined(JS_JIT_SPEW)
4552 uint64 cycles = 0;
4553 #endif
4555 debug_only_v(printf("leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%d, sp=%d, "
4556 "calldepth=%d, cycles=%llu\n",
4557 fp->script->filename,
4558 js_FramePCToLineNumber(cx, fp),
4559 FramePCOffset(fp),
4560 js_CodeName[fp->imacpc ? *fp->imacpc : *fp->regs->pc],
4561 (void*)lr,
4562 lr->exitType,
4563 fp->regs->sp - StackBase(fp),
4564 calldepth,
4565 cycles));
4567 /* If this trace is part of a tree, later branches might have added additional globals for
4568 which we don't have any type information available in the side exit. We merge in this
4569 information from the entry type-map. See also comment in the constructor of TraceRecorder
4570 why this is always safe to do. */
4571 TreeInfo* outermostTree = state.outermostTree;
4572 uint16* gslots = outermostTree->globalSlots->data();
4573 unsigned ngslots = outermostTree->globalSlots->length();
4574 JS_ASSERT(ngslots == outermostTree->nGlobalTypes());
4575 uint8* globalTypeMap;
4577 /* Are there enough globals? This is the ideal fast path. */
4578 if (innermost->numGlobalSlots == ngslots) {
4579 globalTypeMap = getGlobalTypeMap(innermost);
4580 /* Otherwise, merge the typemap of the innermost entry and exit together. This should always
4581 work because it is invalid for nested trees or linked trees to have incompatible types.
4582 Thus, whenever a new global type is lazily added into a tree, all dependent and linked
4583 trees are immediately specialized (see bug 476653). */
4584 } else {
4585 TreeInfo* ti = (TreeInfo*)innermost->from->root->vmprivate;
4586 JS_ASSERT(ti->nGlobalTypes() == ngslots);
4587 JS_ASSERT(ti->nGlobalTypes() > innermost->numGlobalSlots);
4588 globalTypeMap = (uint8*)alloca(ngslots * sizeof(uint8));
4589 memcpy(globalTypeMap, getGlobalTypeMap(innermost), innermost->numGlobalSlots);
4590 memcpy(globalTypeMap + innermost->numGlobalSlots,
4591 ti->globalTypeMap() + innermost->numGlobalSlots,
4592 ti->nGlobalTypes() - innermost->numGlobalSlots);
4595 /* write back interned globals */
4596 double* global = (double*)(&state + 1);
4597 FlushNativeGlobalFrame(cx, ngslots, gslots, globalTypeMap, global);
4598 JS_ASSERT(*(uint64*)&global[STOBJ_NSLOTS(JS_GetGlobalForObject(cx, cx->fp->scopeChain))] ==
4599 0xdeadbeefdeadbeefLL);
4601 /* write back native stack frame */
4602 #ifdef DEBUG
4603 int slots =
4604 #endif
4605 FlushNativeStackFrame(cx, innermost->calldepth,
4606 getStackTypeMap(innermost),
4607 stack, NULL);
4608 JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
4610 if (innermost->nativeCalleeWord)
4611 SynthesizeSlowNativeFrame(cx, innermost);
4613 cx->nativeVp = NULL;
4615 #ifdef DEBUG
4616 // Verify that our state restoration worked.
4617 for (JSStackFrame* fp = cx->fp; fp; fp = fp->down) {
4618 JS_ASSERT_IF(fp->callee, JSVAL_IS_OBJECT(fp->argv[-1]));
4620 #endif
4621 #ifdef JS_JIT_SPEW
4622 if (innermost->exitType != TIMEOUT_EXIT)
4623 AUDIT(sideExitIntoInterpreter);
4624 else
4625 AUDIT(timeoutIntoInterpreter);
4626 #endif
4628 state.innermost = innermost;
4631 JS_REQUIRES_STACK bool
4632 js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
4634 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4636 /* Is the recorder currently active? */
4637 if (tm->recorder) {
4638 jsbytecode* innerLoopHeaderPC = cx->fp->regs->pc;
4640 if (js_RecordLoopEdge(cx, tm->recorder, inlineCallCount))
4641 return true;
4644 * js_RecordLoopEdge will invoke an inner tree if we have a matching one. If we
4645 * arrive here, that tree didn't run to completion and instead we mis-matched
4646 * or the inner tree took a side exit other than the loop exit. We are thus
4647 * no longer guaranteed to be parked on the same loop header js_MonitorLoopEdge
4648 * was called for. In fact, this might not even be a loop header at all. Hence
4649 * if the program counter no longer hovers over the inner loop header, return to
4650 * the interpreter and do not attempt to trigger or record a new tree at this
4651 * location.
4653 if (innerLoopHeaderPC != cx->fp->regs->pc)
4654 return false;
4656 JS_ASSERT(!tm->recorder);
4658 /* Check the pool of reserved doubles (this might trigger a GC). */
4659 if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
4660 !js_ReplenishReservedPool(cx, tm)) {
4661 return false; /* Out of memory, don't try to record now. */
4664 /* Make sure the shape of the global object still matches (this might flush the JIT cache). */
4665 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
4666 uint32 globalShape = -1;
4667 SlotList* globalSlots = NULL;
4669 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
4670 js_Backoff(cx, cx->fp->regs->pc);
4671 return false;
4674 /* Do not enter the JIT code with a pending operation callback. */
4675 if (cx->operationCallbackFlag)
4676 return false;
4678 jsbytecode* pc = cx->fp->regs->pc;
4679 uint32 argc = cx->fp->argc;
4681 Fragment* f = getLoop(tm, pc, globalObj, globalShape, argc);
4682 if (!f)
4683 f = getAnchor(tm, pc, globalObj, globalShape, argc);
4685 if (!f) {
4686 FlushJITCache(cx);
4687 return false;
4690 /* If we have no code in the anchor and no peers, we definitively won't be able to
4691 activate any trees so, start compiling. */
4692 if (!f->code() && !f->peer) {
4693 record:
4694 if (++f->hits() < HOTLOOP)
4695 return false;
4696 /* We can give RecordTree the root peer. If that peer is already taken, it will
4697 walk the peer list and find us a free slot or allocate a new tree if needed. */
4698 return js_RecordTree(cx, tm, f->first, NULL, 0, globalObj, globalShape,
4699 globalSlots, argc);
4702 debug_only_v(printf("Looking for compat peer %d@%d, from %p (ip: %p)\n",
4703 js_FramePCToLineNumber(cx, cx->fp),
4704 FramePCOffset(cx->fp), (void*)f, f->ip);)
4706 uintN count;
4707 Fragment* match = js_FindVMCompatiblePeer(cx, f, count);
4708 if (!match) {
4709 if (count < MAXPEERS)
4710 goto record;
4711 /* If we hit the max peers ceiling, don't try to lookup fragments all the time. Thats
4712 expensive. This must be a rather type-unstable loop. */
4713 debug_only_v(printf("Blacklisted: too many peer trees.\n");)
4714 js_Blacklist((jsbytecode*) f->root->ip);
4715 return false;
4718 VMSideExit* lr = NULL;
4719 VMSideExit* innermostNestedGuard = NULL;
4721 lr = js_ExecuteTree(cx, match, inlineCallCount, &innermostNestedGuard);
4722 if (!lr)
4723 return false;
4725 /* If we exit on a branch, or on a tree call guard, try to grow the inner tree (in case
4726 of a branch exit), or the tree nested around the tree we exited from (in case of the
4727 tree call guard). */
4728 switch (lr->exitType) {
4729 case UNSTABLE_LOOP_EXIT:
4730 return js_AttemptToStabilizeTree(cx, lr, NULL, NULL);
4731 case BRANCH_EXIT:
4732 case CASE_EXIT:
4733 return js_AttemptToExtendTree(cx, lr, NULL, NULL);
4734 case LOOP_EXIT:
4735 if (innermostNestedGuard)
4736 return js_AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL);
4737 return false;
4738 default:
4739 /* No, this was an unusual exit (i.e. out of memory/GC), so just resume interpretation. */
4740 return false;
4744 JS_REQUIRES_STACK JSRecordingStatus
4745 TraceRecorder::monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op)
4747 /* Process needFlush and deepAbort() requests now. */
4748 if (JS_TRACE_MONITOR(cx).needFlush) {
4749 FlushJITCache(cx);
4750 return JSRS_STOP;
4752 if (tr->wasDeepAborted()) {
4753 js_AbortRecording(cx, "deep abort requested");
4754 return JSRS_STOP;
4756 JS_ASSERT(!tr->fragment->lastIns);
4759 * Clear one-shot state used to communicate between record_JSOP_CALL and post-
4760 * opcode-case-guts record hook (record_NativeCallComplete).
4762 tr->pendingTraceableNative = NULL;
4763 tr->newobj_ins = NULL;
4765 debug_only_v(js_Disassemble1(cx, cx->fp->script, cx->fp->regs->pc,
4766 cx->fp->imacpc ? 0 : cx->fp->regs->pc - cx->fp->script->code,
4767 !cx->fp->imacpc, stdout);)
4769 /* If op is not a break or a return from a loop, continue recording and follow the
4770 trace. We check for imacro-calling bytecodes inside each switch case to resolve
4771 the if (JSOP_IS_IMACOP(x)) conditions at compile time. */
4773 JSRecordingStatus status;
4774 #ifdef DEBUG
4775 bool wasInImacro = (cx->fp->imacpc != NULL);
4776 #endif
4777 switch (op) {
4778 default:
4779 status = JSRS_ERROR;
4780 goto stop_recording;
4781 # define OPDEF(x,val,name,token,length,nuses,ndefs,prec,format) \
4782 case x: \
4783 status = tr->record_##x(); \
4784 if (JSOP_IS_IMACOP(x)) \
4785 goto imacro; \
4786 break;
4787 # include "jsopcode.tbl"
4788 # undef OPDEF
4791 JS_ASSERT(status != JSRS_IMACRO);
4792 JS_ASSERT_IF(!wasInImacro, cx->fp->imacpc == NULL);
4794 /* Process deepAbort() requests now. */
4795 if (tr->wasDeepAborted()) {
4796 js_AbortRecording(cx, "deep abort requested");
4797 return JSRS_STOP;
4800 if (JS_TRACE_MONITOR(cx).fragmento->assm()->error()) {
4801 js_AbortRecording(cx, "error during recording");
4802 return JSRS_STOP;
4805 if (tr->lirbuf->outOMem() ||
4806 js_OverfullFragmento(&JS_TRACE_MONITOR(cx), JS_TRACE_MONITOR(cx).fragmento)) {
4807 js_AbortRecording(cx, "no more LIR memory");
4808 FlushJITCache(cx);
4809 return JSRS_STOP;
4812 imacro:
4813 if (!STATUS_ABORTS_RECORDING(status))
4814 return status;
4816 stop_recording:
4817 /* If we recorded the end of the trace, destroy the recorder now. */
4818 if (tr->fragment->lastIns) {
4819 js_DeleteRecorder(cx);
4820 return status;
4823 /* Looks like we encountered an error condition. Abort recording. */
4824 js_AbortRecording(cx, js_CodeName[op]);
4825 return status;
4828 JS_REQUIRES_STACK void
4829 js_AbortRecording(JSContext* cx, const char* reason)
4831 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4832 JS_ASSERT(tm->recorder != NULL);
4833 AUDIT(recorderAborted);
4835 /* Abort the trace and blacklist its starting point. */
4836 Fragment* f = tm->recorder->getFragment();
4839 * If the recorder already had its fragment disposed, or we actually finished
4840 * recording and this recorder merely is passing through the deep abort state
4841 * to the next recorder on the stack, just destroy the recorder. There is
4842 * nothing to abort.
4844 if (!f || f->lastIns) {
4845 js_DeleteRecorder(cx);
4846 return;
4849 JS_ASSERT(!f->vmprivate);
4850 #ifdef DEBUG
4851 TreeInfo* ti = tm->recorder->getTreeInfo();
4852 debug_only_a(printf("Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n",
4853 ti->treeFileName,
4854 ti->treeLineNumber,
4855 ti->treePCOffset,
4856 cx->fp->script->filename,
4857 js_FramePCToLineNumber(cx, cx->fp),
4858 FramePCOffset(cx->fp),
4859 reason);)
4860 #endif
4862 js_Backoff(cx, (jsbytecode*) f->root->ip, f->root);
4865 * If js_DeleteRecorder flushed the code cache, we can't rely on f any more.
4867 if (!js_DeleteRecorder(cx))
4868 return;
4871 * If this is the primary trace and we didn't succeed compiling, trash the
4872 * TreeInfo object.
4874 if (!f->code() && (f->root == f))
4875 js_TrashTree(cx, f);
4878 #if defined NANOJIT_IA32
4879 static bool
4880 js_CheckForSSE2()
4882 int features = 0;
4883 #if defined _MSC_VER
4884 __asm
4886 pushad
4887 mov eax, 1
4888 cpuid
4889 mov features, edx
4890 popad
4892 #elif defined __GNUC__
4893 asm("xchg %%esi, %%ebx\n" /* we can't clobber ebx on gcc (PIC register) */
4894 "mov $0x01, %%eax\n"
4895 "cpuid\n"
4896 "mov %%edx, %0\n"
4897 "xchg %%esi, %%ebx\n"
4898 : "=m" (features)
4899 : /* We have no inputs */
4900 : "%eax", "%esi", "%ecx", "%edx"
4902 #elif defined __SUNPRO_C || defined __SUNPRO_CC
4903 asm("push %%ebx\n"
4904 "mov $0x01, %%eax\n"
4905 "cpuid\n"
4906 "pop %%ebx\n"
4907 : "=d" (features)
4908 : /* We have no inputs */
4909 : "%eax", "%ecx"
4911 #endif
4912 return (features & (1<<26)) != 0;
4914 #endif
4916 #if defined(NANOJIT_ARM)
4918 #if defined(_MSC_VER) && defined(WINCE)
4920 // these come in from jswince.asm
4921 extern "C" int js_arm_try_thumb_op();
4922 extern "C" int js_arm_try_armv6t2_op();
4923 extern "C" int js_arm_try_armv5_op();
4924 extern "C" int js_arm_try_armv6_op();
4925 extern "C" int js_arm_try_armv7_op();
4926 extern "C" int js_arm_try_vfp_op();
4928 static bool
4929 js_arm_check_thumb() {
4930 bool ret = false;
4931 __try {
4932 js_arm_try_thumb_op();
4933 ret = true;
4934 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
4935 ret = false;
4937 return ret;
4940 static bool
4941 js_arm_check_thumb2() {
4942 bool ret = false;
4943 __try {
4944 js_arm_try_armv6t2_op();
4945 ret = true;
4946 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
4947 ret = false;
4949 return ret;
4952 static unsigned int
4953 js_arm_check_arch() {
4954 unsigned int arch = 4;
4955 __try {
4956 js_arm_try_armv5_op();
4957 arch = 5;
4958 js_arm_try_armv6_op();
4959 arch = 6;
4960 js_arm_try_armv7_op();
4961 arch = 7;
4962 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
4964 return arch;
4967 static bool
4968 js_arm_check_vfp() {
4969 bool ret = false;
4970 __try {
4971 js_arm_try_vfp_op();
4972 ret = true;
4973 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
4974 ret = false;
4976 return ret;
4979 #elif defined(__GNUC__) && defined(AVMPLUS_LINUX)
4981 #include <stdlib.h>
4982 #include <unistd.h>
4983 #include <sys/types.h>
4984 #include <sys/stat.h>
4985 #include <sys/mman.h>
4986 #include <fcntl.h>
4987 #include <string.h>
4988 #include <elf.h>
4990 // Assume ARMv4 by default.
4991 static unsigned int arm_arch = 4;
4992 static bool arm_has_thumb = false;
4993 static bool arm_has_vfp = false;
4994 static bool arm_has_neon = false;
4995 static bool arm_has_iwmmxt = false;
4996 static bool arm_tests_initialized = false;
4998 static void
4999 arm_read_auxv() {
5000 int fd;
5001 Elf32_auxv_t aux;
5003 fd = open("/proc/self/auxv", O_RDONLY);
5004 if (fd > 0) {
5005 while (read(fd, &aux, sizeof(Elf32_auxv_t))) {
5006 if (aux.a_type == AT_HWCAP) {
5007 uint32_t hwcap = aux.a_un.a_val;
5008 if (getenv("ARM_FORCE_HWCAP"))
5009 hwcap = strtoul(getenv("ARM_FORCE_HWCAP"), NULL, 0);
5010 // hardcode these values to avoid depending on specific versions
5011 // of the hwcap header, e.g. HWCAP_NEON
5012 arm_has_thumb = (hwcap & 4) != 0;
5013 arm_has_vfp = (hwcap & 64) != 0;
5014 arm_has_iwmmxt = (hwcap & 512) != 0;
5015 // this flag is only present on kernel 2.6.29
5016 arm_has_neon = (hwcap & 4096) != 0;
5017 } else if (aux.a_type == AT_PLATFORM) {
5018 const char *plat = (const char*) aux.a_un.a_val;
5019 if (getenv("ARM_FORCE_PLATFORM"))
5020 plat = getenv("ARM_FORCE_PLATFORM");
5021 // The platform string has the form "v[0-9][lb]". The "l" or "b" indicate little-
5022 // or big-endian variants and the digit indicates the version of the platform.
5023 // We can only accept ARMv4 and above, but allow anything up to ARMv9 for future
5024 // processors. Architectures newer than ARMv7 are assumed to be
5025 // backwards-compatible with ARMv7.
5026 if ((plat[0] == 'v') &&
5027 (plat[1] >= '4') && (plat[1] <= '9') &&
5028 ((plat[2] == 'l') || (plat[2] == 'b')))
5030 arm_arch = plat[1] - '0';
5032 else
5034 // For production code, ignore invalid (or unexpected) platform strings and
5035 // fall back to the default. For debug code, use an assertion to catch this.
5036 JS_ASSERT(false);
5040 close (fd);
5042 // if we don't have 2.6.29, we have to do this hack; set
5043 // the env var to trust HWCAP.
5044 if (!getenv("ARM_TRUST_HWCAP") && (arm_arch >= 7))
5045 arm_has_neon = true;
5048 arm_tests_initialized = true;
5051 static bool
5052 js_arm_check_thumb() {
5053 if (!arm_tests_initialized)
5054 arm_read_auxv();
5056 return arm_has_thumb;
5059 static bool
5060 js_arm_check_thumb2() {
5061 if (!arm_tests_initialized)
5062 arm_read_auxv();
5064 // ARMv6T2 also supports Thumb2, but Linux doesn't provide an easy way to test for this as
5065 // there is no associated bit in auxv. ARMv7 always supports Thumb2, and future architectures
5066 // are assumed to be backwards-compatible.
5067 return (arm_arch >= 7);
5070 static unsigned int
5071 js_arm_check_arch() {
5072 if (!arm_tests_initialized)
5073 arm_read_auxv();
5075 return arm_arch;
5078 static bool
5079 js_arm_check_vfp() {
5080 if (!arm_tests_initialized)
5081 arm_read_auxv();
5083 return arm_has_vfp;
5086 #else
5087 #warning Not sure how to check for architecture variant on your platform. Assuming ARMv4.
5088 static bool
5089 js_arm_check_thumb() { return false; }
5090 static bool
5091 js_arm_check_thumb2() { return false; }
5092 static unsigned int
5093 js_arm_check_arch() { return 4; }
5094 static bool
5095 js_arm_check_vfp() { return false; }
5096 #endif
5098 #endif /* NANOJIT_ARM */
5100 #define K *1024
5101 #define M K K
5102 #define G K M
5104 void
5105 js_SetMaxCodeCacheBytes(JSContext* cx, uint32 bytes)
5107 JSTraceMonitor* tm = &JS_THREAD_DATA(cx)->traceMonitor;
5108 JS_ASSERT(tm->fragmento && tm->reFragmento);
5109 if (bytes > 1 G)
5110 bytes = 1 G;
5111 if (bytes < 128 K)
5112 bytes = 128 K;
5113 tm->maxCodeCacheBytes = bytes;
5116 void
5117 js_InitJIT(JSTraceMonitor *tm)
5119 if (!did_we_check_processor_features) {
5120 #if defined NANOJIT_IA32
5121 avmplus::AvmCore::config.use_cmov =
5122 avmplus::AvmCore::config.sse2 = js_CheckForSSE2();
5123 #endif
5124 #if defined NANOJIT_ARM
5125 bool arm_vfp = js_arm_check_vfp();
5126 bool arm_thumb = js_arm_check_thumb();
5127 bool arm_thumb2 = js_arm_check_thumb2();
5128 unsigned int arm_arch = js_arm_check_arch();
5130 avmplus::AvmCore::config.vfp = arm_vfp;
5131 avmplus::AvmCore::config.soft_float = !arm_vfp;
5132 avmplus::AvmCore::config.thumb = arm_thumb;
5133 avmplus::AvmCore::config.thumb2 = arm_thumb2;
5134 avmplus::AvmCore::config.arch = arm_arch;
5136 // Sanity-check the configuration detection.
5137 // * We don't understand architectures prior to ARMv4.
5138 JS_ASSERT(arm_arch >= 4);
5139 // * All architectures support Thumb with the possible exception of ARMv4.
5140 JS_ASSERT((arm_thumb) || (arm_arch == 4));
5141 // * Only ARMv6T2 and ARMv7(+) support Thumb2, but ARMv6 does not.
5142 JS_ASSERT((arm_thumb2) || (arm_arch <= 6));
5143 // * All architectures that support Thumb2 also support Thumb.
5144 JS_ASSERT((arm_thumb2 && arm_thumb) || (!arm_thumb2));
5145 #endif
5146 did_we_check_processor_features = true;
5150 * Set the default size for the code cache to 16MB.
5152 tm->maxCodeCacheBytes = 16 M;
5154 if (!tm->recordAttempts.ops) {
5155 JS_DHashTableInit(&tm->recordAttempts, JS_DHashGetStubOps(),
5156 NULL, sizeof(PCHashEntry),
5157 JS_DHASH_DEFAULT_CAPACITY(PC_HASH_COUNT));
5160 if (!tm->fragmento) {
5161 JS_ASSERT(!tm->reservedDoublePool);
5162 Fragmento* fragmento = new (&gc) Fragmento(core, 32);
5163 verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);)
5164 tm->fragmento = fragmento;
5165 tm->lirbuf = new (&gc) LirBuffer(fragmento, NULL);
5166 #ifdef DEBUG
5167 tm->lirbuf->names = new (&gc) LirNameMap(&gc, NULL, tm->fragmento->labels);
5168 #endif
5169 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
5170 tm->globalStates[i].globalShape = -1;
5171 JS_ASSERT(!tm->globalStates[i].globalSlots);
5172 tm->globalStates[i].globalSlots = new (&gc) SlotList();
5174 tm->reservedDoublePoolPtr = tm->reservedDoublePool = new jsval[MAX_NATIVE_STACK_SLOTS];
5175 memset(tm->vmfragments, 0, sizeof(tm->vmfragments));
5177 if (!tm->reFragmento) {
5178 Fragmento* fragmento = new (&gc) Fragmento(core, 32);
5179 verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);)
5180 tm->reFragmento = fragmento;
5181 tm->reLirBuf = new (&gc) LirBuffer(fragmento, NULL);
5183 #if !defined XP_WIN
5184 debug_only(memset(&jitstats, 0, sizeof(jitstats)));
5185 #endif
5188 void
5189 js_FinishJIT(JSTraceMonitor *tm)
5191 #ifdef JS_JIT_SPEW
5192 if (js_verboseStats && jitstats.recorderStarted) {
5193 printf("recorder: started(%llu), aborted(%llu), completed(%llu), different header(%llu), "
5194 "trees trashed(%llu), slot promoted(%llu), unstable loop variable(%llu), "
5195 "breaks(%llu), returns(%llu), unstableInnerCalls(%llu)\n",
5196 jitstats.recorderStarted, jitstats.recorderAborted, jitstats.traceCompleted,
5197 jitstats.returnToDifferentLoopHeader, jitstats.treesTrashed, jitstats.slotPromoted,
5198 jitstats.unstableLoopVariable, jitstats.breakLoopExits, jitstats.returnLoopExits,
5199 jitstats.noCompatInnerTrees);
5200 printf("monitor: triggered(%llu), exits(%llu), type mismatch(%llu), "
5201 "global mismatch(%llu)\n", jitstats.traceTriggered, jitstats.sideExitIntoInterpreter,
5202 jitstats.typeMapMismatchAtEntry, jitstats.globalShapeMismatchAtEntry);
5204 #endif
5205 if (tm->fragmento != NULL) {
5206 JS_ASSERT(tm->reservedDoublePool);
5207 verbose_only(delete tm->fragmento->labels;)
5208 #ifdef DEBUG
5209 delete tm->lirbuf->names;
5210 tm->lirbuf->names = NULL;
5211 #endif
5212 delete tm->lirbuf;
5213 tm->lirbuf = NULL;
5215 if (tm->recordAttempts.ops)
5216 JS_DHashTableFinish(&tm->recordAttempts);
5218 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
5219 VMFragment* f = tm->vmfragments[i];
5220 while(f) {
5221 VMFragment* next = f->next;
5222 tm->fragmento->clearFragment(f);
5223 f = next;
5225 tm->vmfragments[i] = NULL;
5227 delete tm->fragmento;
5228 tm->fragmento = NULL;
5229 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
5230 JS_ASSERT(tm->globalStates[i].globalSlots);
5231 delete tm->globalStates[i].globalSlots;
5233 delete[] tm->reservedDoublePool;
5234 tm->reservedDoublePool = tm->reservedDoublePoolPtr = NULL;
5236 if (tm->reFragmento != NULL) {
5237 delete tm->reLirBuf;
5238 verbose_only(delete tm->reFragmento->labels;)
5239 delete tm->reFragmento;
5243 void
5244 TraceRecorder::pushAbortStack()
5246 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5248 JS_ASSERT(tm->abortStack != this);
5250 nextRecorderToAbort = tm->abortStack;
5251 tm->abortStack = this;
5254 void
5255 TraceRecorder::popAbortStack()
5257 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5259 JS_ASSERT(tm->abortStack == this);
5261 tm->abortStack = nextRecorderToAbort;
5262 nextRecorderToAbort = NULL;
5265 void
5266 js_PurgeJITOracle()
5268 oracle.clear();
5271 static JSDHashOperator
5272 js_PurgeScriptRecordingAttempts(JSDHashTable *table,
5273 JSDHashEntryHdr *hdr,
5274 uint32 number, void *arg)
5276 PCHashEntry *e = (PCHashEntry *)hdr;
5277 JSScript *script = (JSScript *)arg;
5278 jsbytecode *pc = (jsbytecode *)e->key;
5280 if (JS_UPTRDIFF(pc, script->code) < script->length)
5281 return JS_DHASH_REMOVE;
5282 return JS_DHASH_NEXT;
5285 JS_REQUIRES_STACK void
5286 js_PurgeScriptFragments(JSContext* cx, JSScript* script)
5288 if (!TRACING_ENABLED(cx))
5289 return;
5290 debug_only_v(printf("Purging fragments for JSScript %p.\n", (void*)script);)
5291 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5292 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
5293 for (VMFragment **f = &(tm->vmfragments[i]); *f; ) {
5294 VMFragment* frag = *f;
5295 /* Disable future use of any script-associated VMFragment.*/
5296 if (JS_UPTRDIFF(frag->ip, script->code) < script->length) {
5297 JS_ASSERT(frag->root == frag);
5298 debug_only_v(printf("Disconnecting VMFragment %p "
5299 "with ip %p, in range [%p,%p).\n",
5300 (void*)frag, frag->ip, script->code,
5301 script->code + script->length));
5302 VMFragment* next = frag->next;
5303 js_TrashTree(cx, frag);
5304 *f = next;
5305 } else {
5306 f = &((*f)->next);
5310 JS_DHashTableEnumerate(&(tm->recordAttempts),
5311 js_PurgeScriptRecordingAttempts, script);
5315 bool
5316 js_OverfullFragmento(JSTraceMonitor* tm, Fragmento *fragmento)
5319 * You might imagine the outOMem flag on the lirbuf is sufficient
5320 * to model the notion of "running out of memory", but there are actually
5321 * two separate issues involved:
5323 * 1. The process truly running out of memory: malloc() or mmap()
5324 * failed.
5326 * 2. The limit we put on the "intended size" of the tracemonkey code
5327 * cache, in pages, has been exceeded.
5329 * Condition 1 doesn't happen very often, but we're obliged to try to
5330 * safely shut down and signal the rest of spidermonkey when it
5331 * does. Condition 2 happens quite regularly.
5333 * Presently, the code in this file doesn't check the outOMem condition
5334 * often enough, and frequently misuses the unchecked results of
5335 * lirbuffer insertions on the asssumption that it will notice the
5336 * outOMem flag "soon enough" when it returns to the monitorRecording
5337 * function. This turns out to be a false assumption if we use outOMem
5338 * to signal condition 2: we regularly provoke "passing our intended
5339 * size" and regularly fail to notice it in time to prevent writing
5340 * over the end of an artificially self-limited LIR buffer.
5342 * To mitigate, though not completely solve, this problem, we're
5343 * modeling the two forms of memory exhaustion *separately* for the
5344 * time being: condition 1 is handled by the outOMem flag inside
5345 * nanojit, and condition 2 is being handled independently *here*. So
5346 * we construct our fragmentos to use all available memory they like,
5347 * and only report outOMem to us when there is literally no OS memory
5348 * left. Merely purging our cache when we hit our highwater mark is
5349 * handled by the (few) callers of this function.
5352 jsuint maxsz = tm->maxCodeCacheBytes;
5353 if (fragmento == tm->fragmento) {
5354 if (tm->prohibitFlush)
5355 return false;
5356 } else {
5358 * At the time of making the code cache size configurable, we were using
5359 * 16 MB for the main code cache and 1 MB for the regular expression code
5360 * cache. We will stick to this 16:1 ratio here until we unify the two
5361 * code caches.
5363 maxsz /= 16;
5365 return (fragmento->_stats.pages > (maxsz >> NJ_LOG2_PAGE_SIZE));
5368 JS_FORCES_STACK JS_FRIEND_API(void)
5369 js_DeepBail(JSContext *cx)
5371 JS_ASSERT(JS_ON_TRACE(cx));
5374 * Exactly one context on the current thread is on trace. Find out which
5375 * one. (Most callers cannot guarantee that it's cx.)
5377 JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
5378 JSContext *tracecx = tm->tracecx;
5380 /* It's a bug if a non-FAIL_STATUS builtin gets here. */
5381 JS_ASSERT(tracecx->bailExit);
5383 tm->tracecx = NULL;
5384 tm->prohibitFlush++;
5385 debug_only_v(printf("Deep bail.\n");)
5386 LeaveTree(*tracecx->interpState, tracecx->bailExit);
5387 tracecx->bailExit = NULL;
5388 tracecx->interpState->builtinStatus |= JSBUILTIN_BAILED;
5391 JS_REQUIRES_STACK jsval&
5392 TraceRecorder::argval(unsigned n) const
5394 JS_ASSERT(n < cx->fp->fun->nargs);
5395 return cx->fp->argv[n];
5398 JS_REQUIRES_STACK jsval&
5399 TraceRecorder::varval(unsigned n) const
5401 JS_ASSERT(n < cx->fp->script->nslots);
5402 return cx->fp->slots[n];
5405 JS_REQUIRES_STACK jsval&
5406 TraceRecorder::stackval(int n) const
5408 jsval* sp = cx->fp->regs->sp;
5409 return sp[n];
5412 JS_REQUIRES_STACK LIns*
5413 TraceRecorder::scopeChain() const
5415 return lir->insLoad(LIR_ldp,
5416 lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp)),
5417 offsetof(JSStackFrame, scopeChain));
5420 static inline bool
5421 FrameInRange(JSStackFrame* fp, JSStackFrame *target, unsigned callDepth)
5423 while (fp != target) {
5424 if (callDepth-- == 0)
5425 return false;
5426 if (!(fp = fp->down))
5427 return false;
5429 return true;
5432 JS_REQUIRES_STACK JSRecordingStatus
5433 TraceRecorder::activeCallOrGlobalSlot(JSObject* obj, jsval*& vp)
5435 // Lookup a name in the scope chain, arriving at a property either in the
5436 // global object or some call object's fp->slots, and import that property
5437 // into the trace's native stack frame. This could theoretically do *lookup*
5438 // through the property cache, but there is little performance to be gained
5439 // by doing so since at trace-execution time the underlying object (call
5440 // object or global object) will not be consulted at all: the jsval*
5441 // returned from this function will map (in the tracker) to a LIns* directly
5442 // defining a slot in the trace's native stack.
5444 JS_ASSERT(obj != globalObj);
5446 JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
5447 JSObject* obj2;
5448 JSProperty* prop;
5449 if (!js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &obj2, &prop))
5450 ABORT_TRACE_ERROR("error in js_FindProperty");
5451 if (!prop)
5452 ABORT_TRACE("failed to find name in non-global scope chain");
5454 if (obj == globalObj) {
5455 JSScopeProperty* sprop = (JSScopeProperty*) prop;
5457 if (obj2 != obj) {
5458 OBJ_DROP_PROPERTY(cx, obj2, prop);
5459 ABORT_TRACE("prototype property");
5461 if (!isValidSlot(OBJ_SCOPE(obj), sprop)) {
5462 OBJ_DROP_PROPERTY(cx, obj2, prop);
5463 return JSRS_STOP;
5465 if (!lazilyImportGlobalSlot(sprop->slot)) {
5466 OBJ_DROP_PROPERTY(cx, obj2, prop);
5467 ABORT_TRACE("lazy import of global slot failed");
5469 vp = &STOBJ_GET_SLOT(obj, sprop->slot);
5470 OBJ_DROP_PROPERTY(cx, obj2, prop);
5471 return JSRS_CONTINUE;
5474 if (wasDeepAborted())
5475 ABORT_TRACE("deep abort from property lookup");
5477 if (obj == obj2 && OBJ_GET_CLASS(cx, obj) == &js_CallClass) {
5478 JSStackFrame* cfp = (JSStackFrame*) JS_GetPrivate(cx, obj);
5479 if (cfp && FrameInRange(cx->fp, cfp, callDepth)) {
5480 JSScopeProperty* sprop = (JSScopeProperty*) prop;
5481 uintN slot = sprop->shortid;
5483 vp = NULL;
5484 if (sprop->getter == js_GetCallArg) {
5485 JS_ASSERT(slot < cfp->fun->nargs);
5486 vp = &cfp->argv[slot];
5487 } else if (sprop->getter == js_GetCallVar) {
5488 JS_ASSERT(slot < cfp->script->nslots);
5489 vp = &cfp->slots[slot];
5491 OBJ_DROP_PROPERTY(cx, obj2, prop);
5492 if (!vp)
5493 ABORT_TRACE("dynamic property of Call object");
5494 return JSRS_CONTINUE;
5498 OBJ_DROP_PROPERTY(cx, obj2, prop);
5499 ABORT_TRACE("fp->scopeChain is not global or active call object");
5502 JS_REQUIRES_STACK LIns*
5503 TraceRecorder::arg(unsigned n)
5505 return get(&argval(n));
5508 JS_REQUIRES_STACK void
5509 TraceRecorder::arg(unsigned n, LIns* i)
5511 set(&argval(n), i);
5514 JS_REQUIRES_STACK LIns*
5515 TraceRecorder::var(unsigned n)
5517 return get(&varval(n));
5520 JS_REQUIRES_STACK void
5521 TraceRecorder::var(unsigned n, LIns* i)
5523 set(&varval(n), i);
5526 JS_REQUIRES_STACK LIns*
5527 TraceRecorder::stack(int n)
5529 return get(&stackval(n));
5532 JS_REQUIRES_STACK void
5533 TraceRecorder::stack(int n, LIns* i)
5535 set(&stackval(n), i, n >= 0);
5538 JS_REQUIRES_STACK LIns*
5539 TraceRecorder::alu(LOpcode v, jsdouble v0, jsdouble v1, LIns* s0, LIns* s1)
5541 if (v == LIR_fadd || v == LIR_fsub) {
5542 jsdouble r;
5543 if (v == LIR_fadd)
5544 r = v0 + v1;
5545 else
5546 r = v0 - v1;
5548 * Calculate the result of the addition for the current values. If the
5549 * value is not within the integer range, don't even try to demote
5550 * here.
5552 if (!JSDOUBLE_IS_NEGZERO(r) && (jsint(r) == r) && isPromoteInt(s0) && isPromoteInt(s1)) {
5553 LIns* d0 = ::demote(lir, s0);
5554 LIns* d1 = ::demote(lir, s1);
5556 * If the inputs are constant, generate an integer constant for
5557 * this operation.
5559 if (d0->isconst() && d1->isconst())
5560 return lir->ins1(LIR_i2f, lir->insImm(jsint(r)));
5562 * Speculatively generate code that will perform the addition over
5563 * the integer inputs as an integer addition/subtraction and exit
5564 * if that fails.
5566 v = (LOpcode)((int)v & ~LIR64);
5567 LIns* result = lir->ins2(v, d0, d1);
5568 if (!result->isconst() && (!overflowSafe(d0) || !overflowSafe(d1))) {
5569 VMSideExit* exit = snapshot(OVERFLOW_EXIT);
5570 lir->insGuard(LIR_xt, lir->ins1(LIR_ov, result), createGuardRecord(exit));
5572 return lir->ins1(LIR_i2f, result);
5575 * The result doesn't fit into the integer domain, so either generate
5576 * a floating point constant or a floating point operation.
5578 if (s0->isconst() && s1->isconst())
5579 return lir->insImmf(r);
5580 return lir->ins2(v, s0, s1);
5582 return lir->ins2(v, s0, s1);
5585 LIns*
5586 TraceRecorder::f2i(LIns* f)
5588 return lir->insCall(&js_DoubleToInt32_ci, &f);
5591 JS_REQUIRES_STACK LIns*
5592 TraceRecorder::makeNumberInt32(LIns* f)
5594 JS_ASSERT(f->isQuad());
5595 LIns* x;
5596 if (!isPromote(f)) {
5597 x = f2i(f);
5598 guard(true, lir->ins2(LIR_feq, f, lir->ins1(LIR_i2f, x)), MISMATCH_EXIT);
5599 } else {
5600 x = ::demote(lir, f);
5602 return x;
5605 JS_REQUIRES_STACK LIns*
5606 TraceRecorder::stringify(jsval& v)
5608 LIns* v_ins = get(&v);
5609 if (JSVAL_IS_STRING(v))
5610 return v_ins;
5612 LIns* args[] = { v_ins, cx_ins };
5613 const CallInfo* ci;
5614 if (JSVAL_IS_NUMBER(v)) {
5615 ci = &js_NumberToString_ci;
5616 } else if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
5617 ci = &js_BooleanOrUndefinedToString_ci;
5618 } else {
5620 * Callers must deal with non-primitive (non-null object) values by
5621 * calling an imacro. We don't try to guess about which imacro, with
5622 * what valueOf hint, here.
5624 JS_ASSERT(JSVAL_IS_NULL(v));
5625 return INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.nullAtom));
5628 v_ins = lir->insCall(ci, args);
5629 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
5630 return v_ins;
5633 JS_REQUIRES_STACK JSRecordingStatus
5634 TraceRecorder::call_imacro(jsbytecode* imacro)
5636 JSStackFrame* fp = cx->fp;
5637 JSFrameRegs* regs = fp->regs;
5639 // We can't nest imacros.
5640 if (fp->imacpc)
5641 return JSRS_STOP;
5643 fp->imacpc = regs->pc;
5644 regs->pc = imacro;
5645 atoms = COMMON_ATOMS_START(&cx->runtime->atomState);
5646 return JSRS_IMACRO;
5649 JS_REQUIRES_STACK JSRecordingStatus
5650 TraceRecorder::ifop()
5652 jsval& v = stackval(-1);
5653 LIns* v_ins = get(&v);
5654 bool cond;
5655 LIns* x;
5657 if (JSVAL_IS_NULL(v)) {
5658 cond = false;
5659 x = lir->insImm(0);
5660 } else if (!JSVAL_IS_PRIMITIVE(v)) {
5661 cond = true;
5662 x = lir->insImm(1);
5663 } else if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
5664 /* Test for boolean is true, negate later if we are testing for false. */
5665 cond = JSVAL_TO_PSEUDO_BOOLEAN(v) == JS_TRUE;
5666 x = lir->ins2i(LIR_eq, v_ins, 1);
5667 } else if (isNumber(v)) {
5668 jsdouble d = asNumber(v);
5669 cond = !JSDOUBLE_IS_NaN(d) && d;
5670 x = lir->ins2(LIR_and,
5671 lir->ins2(LIR_feq, v_ins, v_ins),
5672 lir->ins_eq0(lir->ins2(LIR_feq, v_ins, lir->insImmq(0))));
5673 } else if (JSVAL_IS_STRING(v)) {
5674 cond = JSSTRING_LENGTH(JSVAL_TO_STRING(v)) != 0;
5675 x = lir->ins2(LIR_piand,
5676 lir->insLoad(LIR_ldp,
5677 v_ins,
5678 (int)offsetof(JSString, length)),
5679 INS_CONSTPTR(reinterpret_cast<void *>(JSSTRING_LENGTH_MASK)));
5680 } else {
5681 JS_NOT_REACHED("ifop");
5682 return JSRS_STOP;
5685 jsbytecode* pc = cx->fp->regs->pc;
5686 emitIf(pc, cond, x);
5687 return checkTraceEnd(pc);
5690 #ifdef NANOJIT_IA32
5691 /* Record LIR for a tableswitch or tableswitchx op. We record LIR only the
5692 "first" time we hit the op. Later, when we start traces after exiting that
5693 trace, we just patch. */
5694 JS_REQUIRES_STACK LIns*
5695 TraceRecorder::tableswitch()
5697 jsval& v = stackval(-1);
5698 if (!isNumber(v))
5699 return NULL;
5701 /* no need to guard if condition is constant */
5702 LIns* v_ins = f2i(get(&v));
5703 if (v_ins->isconst() || v_ins->isconstq())
5704 return NULL;
5706 jsbytecode* pc = cx->fp->regs->pc;
5707 /* Starting a new trace after exiting a trace via switch. */
5708 if (anchor &&
5709 (anchor->exitType == CASE_EXIT || anchor->exitType == DEFAULT_EXIT) &&
5710 fragment->ip == pc) {
5711 return NULL;
5714 /* Decode jsop. */
5715 jsint low, high;
5716 if (*pc == JSOP_TABLESWITCH) {
5717 pc += JUMP_OFFSET_LEN;
5718 low = GET_JUMP_OFFSET(pc);
5719 pc += JUMP_OFFSET_LEN;
5720 high = GET_JUMP_OFFSET(pc);
5721 } else {
5722 pc += JUMPX_OFFSET_LEN;
5723 low = GET_JUMPX_OFFSET(pc);
5724 pc += JUMPX_OFFSET_LEN;
5725 high = GET_JUMPX_OFFSET(pc);
5728 /* Really large tables won't fit in a page. This is a conservative check.
5729 If it matters in practice we need to go off-page. */
5730 if ((high + 1 - low) * sizeof(intptr_t*) + 128 > (unsigned) LARGEST_UNDERRUN_PROT) {
5731 // This throws away the return value of switchop but it seems
5732 // ok because switchop always returns true.
5733 (void) switchop();
5734 return NULL;
5737 /* Generate switch LIR. */
5738 LIns* si_ins = lir_buf_writer->insSkip(sizeof(SwitchInfo));
5739 SwitchInfo* si = (SwitchInfo*) si_ins->payload();
5740 si->count = high + 1 - low;
5741 si->table = 0;
5742 si->index = (uint32) -1;
5743 LIns* diff = lir->ins2(LIR_sub, v_ins, lir->insImm(low));
5744 LIns* cmp = lir->ins2(LIR_ult, diff, lir->insImm(si->count));
5745 lir->insGuard(LIR_xf, cmp, createGuardRecord(snapshot(DEFAULT_EXIT)));
5746 lir->insStorei(diff, lir->insImmPtr(&si->index), 0);
5747 VMSideExit* exit = snapshot(CASE_EXIT);
5748 exit->switchInfo = si;
5749 return lir->insGuard(LIR_xtbl, diff, createGuardRecord(exit));
5751 #endif
5753 JS_REQUIRES_STACK JSRecordingStatus
5754 TraceRecorder::switchop()
5756 jsval& v = stackval(-1);
5757 LIns* v_ins = get(&v);
5758 /* no need to guard if condition is constant */
5759 if (v_ins->isconst() || v_ins->isconstq())
5760 return JSRS_CONTINUE;
5761 if (isNumber(v)) {
5762 jsdouble d = asNumber(v);
5763 guard(true,
5764 addName(lir->ins2(LIR_feq, v_ins, lir->insImmf(d)),
5765 "guard(switch on numeric)"),
5766 BRANCH_EXIT);
5767 } else if (JSVAL_IS_STRING(v)) {
5768 LIns* args[] = { v_ins, INS_CONSTPTR(JSVAL_TO_STRING(v)) };
5769 guard(true,
5770 addName(lir->ins_eq0(lir->ins_eq0(lir->insCall(&js_EqualStrings_ci, args))),
5771 "guard(switch on string)"),
5772 BRANCH_EXIT);
5773 } else if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
5774 guard(true,
5775 addName(lir->ins2(LIR_eq, v_ins, lir->insImm(JSVAL_TO_PUBLIC_PSEUDO_BOOLEAN(v))),
5776 "guard(switch on boolean)"),
5777 BRANCH_EXIT);
5778 } else {
5779 ABORT_TRACE("switch on object or null");
5781 return JSRS_CONTINUE;
5784 JS_REQUIRES_STACK JSRecordingStatus
5785 TraceRecorder::inc(jsval& v, jsint incr, bool pre)
5787 LIns* v_ins = get(&v);
5788 CHECK_STATUS(inc(v, v_ins, incr, pre));
5789 set(&v, v_ins);
5790 return JSRS_CONTINUE;
5794 * On exit, v_ins is the incremented unboxed value, and the appropriate
5795 * value (pre- or post-increment as described by pre) is stacked.
5797 JS_REQUIRES_STACK JSRecordingStatus
5798 TraceRecorder::inc(jsval& v, LIns*& v_ins, jsint incr, bool pre)
5800 if (!isNumber(v))
5801 ABORT_TRACE("can only inc numbers");
5803 LIns* v_after = alu(LIR_fadd, asNumber(v), incr, v_ins, lir->insImmf(incr));
5805 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
5806 JS_ASSERT(cs.ndefs == 1);
5807 stack(-cs.nuses, pre ? v_after : v_ins);
5808 v_ins = v_after;
5809 return JSRS_CONTINUE;
5812 JS_REQUIRES_STACK JSRecordingStatus
5813 TraceRecorder::incProp(jsint incr, bool pre)
5815 jsval& l = stackval(-1);
5816 if (JSVAL_IS_PRIMITIVE(l))
5817 ABORT_TRACE("incProp on primitive");
5819 JSObject* obj = JSVAL_TO_OBJECT(l);
5820 LIns* obj_ins = get(&l);
5822 uint32 slot;
5823 LIns* v_ins;
5824 CHECK_STATUS(prop(obj, obj_ins, slot, v_ins));
5826 if (slot == SPROP_INVALID_SLOT)
5827 ABORT_TRACE("incProp on invalid slot");
5829 jsval& v = STOBJ_GET_SLOT(obj, slot);
5830 CHECK_STATUS(inc(v, v_ins, incr, pre));
5832 box_jsval(v, v_ins);
5834 LIns* dslots_ins = NULL;
5835 stobj_set_slot(obj_ins, slot, dslots_ins, v_ins);
5836 return JSRS_CONTINUE;
5839 JS_REQUIRES_STACK JSRecordingStatus
5840 TraceRecorder::incElem(jsint incr, bool pre)
5842 jsval& r = stackval(-1);
5843 jsval& l = stackval(-2);
5844 jsval* vp;
5845 LIns* v_ins;
5846 LIns* addr_ins;
5847 CHECK_STATUS(elem(l, r, vp, v_ins, addr_ins));
5848 if (!addr_ins) // if we read a hole, abort
5849 return JSRS_STOP;
5850 CHECK_STATUS(inc(*vp, v_ins, incr, pre));
5851 box_jsval(*vp, v_ins);
5852 lir->insStorei(v_ins, addr_ins, 0);
5853 return JSRS_CONTINUE;
5856 static bool
5857 evalCmp(LOpcode op, double result)
5859 bool cond;
5860 switch (op) {
5861 case LIR_feq:
5862 cond = (result == 0);
5863 break;
5864 case LIR_flt:
5865 cond = result < 0;
5866 break;
5867 case LIR_fgt:
5868 cond = result > 0;
5869 break;
5870 case LIR_fle:
5871 cond = result <= 0;
5872 break;
5873 case LIR_fge:
5874 cond = result >= 0;
5875 break;
5876 default:
5877 JS_NOT_REACHED("unexpected comparison op");
5878 return false;
5880 return cond;
5883 static bool
5884 evalCmp(LOpcode op, double l, double r)
5886 return evalCmp(op, l - r);
5889 static bool
5890 evalCmp(LOpcode op, JSString* l, JSString* r)
5892 if (op == LIR_feq)
5893 return js_EqualStrings(l, r);
5894 return evalCmp(op, js_CompareStrings(l, r));
5897 JS_REQUIRES_STACK void
5898 TraceRecorder::strictEquality(bool equal, bool cmpCase)
5900 jsval& r = stackval(-1);
5901 jsval& l = stackval(-2);
5902 LIns* l_ins = get(&l);
5903 LIns* r_ins = get(&r);
5904 LIns* x;
5905 bool cond;
5907 uint8 ltag = getPromotedType(l);
5908 if (ltag != getPromotedType(r)) {
5909 cond = !equal;
5910 x = lir->insImm(cond);
5911 } else if (ltag == JSVAL_STRING) {
5912 LIns* args[] = { r_ins, l_ins };
5913 x = lir->ins2i(LIR_eq, lir->insCall(&js_EqualStrings_ci, args), equal);
5914 cond = js_EqualStrings(JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
5915 } else {
5916 LOpcode op = (ltag != JSVAL_DOUBLE) ? LIR_eq : LIR_feq;
5917 x = lir->ins2(op, l_ins, r_ins);
5918 if (!equal)
5919 x = lir->ins_eq0(x);
5920 cond = (ltag == JSVAL_DOUBLE)
5921 ? asNumber(l) == asNumber(r)
5922 : l == r;
5924 cond = (cond == equal);
5926 if (cmpCase) {
5927 /* Only guard if the same path may not always be taken. */
5928 if (!x->isconst())
5929 guard(cond, x, BRANCH_EXIT);
5930 return;
5933 set(&l, x);
5936 JS_REQUIRES_STACK JSRecordingStatus
5937 TraceRecorder::equality(bool negate, bool tryBranchAfterCond)
5939 jsval& rval = stackval(-1);
5940 jsval& lval = stackval(-2);
5941 LIns* l_ins = get(&lval);
5942 LIns* r_ins = get(&rval);
5944 return equalityHelper(lval, rval, l_ins, r_ins, negate, tryBranchAfterCond, lval);
5947 JS_REQUIRES_STACK JSRecordingStatus
5948 TraceRecorder::equalityHelper(jsval l, jsval r, LIns* l_ins, LIns* r_ins,
5949 bool negate, bool tryBranchAfterCond,
5950 jsval& rval)
5952 bool fp = false;
5953 bool cond;
5954 LIns* args[] = { NULL, NULL };
5957 * The if chain below closely mirrors that found in 11.9.3, in general
5958 * deviating from that ordering of ifs only to account for SpiderMonkey's
5959 * conflation of booleans and undefined and for the possibility of
5960 * confusing objects and null. Note carefully the spec-mandated recursion
5961 * in the final else clause, which terminates because Number == T recurs
5962 * only if T is Object, but that must recur again to convert Object to
5963 * primitive, and ToPrimitive throws if the object cannot be converted to
5964 * a primitive value (which would terminate recursion).
5967 if (getPromotedType(l) == getPromotedType(r)) {
5968 if (JSVAL_TAG(l) == JSVAL_OBJECT || JSVAL_TAG(l) == JSVAL_BOOLEAN) {
5969 cond = (l == r);
5970 } else if (JSVAL_IS_STRING(l)) {
5971 args[0] = r_ins, args[1] = l_ins;
5972 l_ins = lir->insCall(&js_EqualStrings_ci, args);
5973 r_ins = lir->insImm(1);
5974 cond = js_EqualStrings(JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
5975 } else {
5976 JS_ASSERT(isNumber(l) && isNumber(r));
5977 cond = (asNumber(l) == asNumber(r));
5978 fp = true;
5980 } else if (JSVAL_IS_NULL(l) && JSVAL_TAG(r) == JSVAL_BOOLEAN) {
5981 l_ins = lir->insImm(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
5982 cond = (r == JSVAL_VOID);
5983 } else if (JSVAL_TAG(l) == JSVAL_BOOLEAN && JSVAL_IS_NULL(r)) {
5984 r_ins = lir->insImm(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
5985 cond = (l == JSVAL_VOID);
5986 } else if (isNumber(l) && JSVAL_IS_STRING(r)) {
5987 args[0] = r_ins, args[1] = cx_ins;
5988 r_ins = lir->insCall(&js_StringToNumber_ci, args);
5989 cond = (asNumber(l) == js_StringToNumber(cx, JSVAL_TO_STRING(r)));
5990 fp = true;
5991 } else if (JSVAL_IS_STRING(l) && isNumber(r)) {
5992 args[0] = l_ins, args[1] = cx_ins;
5993 l_ins = lir->insCall(&js_StringToNumber_ci, args);
5994 cond = (js_StringToNumber(cx, JSVAL_TO_STRING(l)) == asNumber(r));
5995 fp = true;
5996 } else {
5997 if (JSVAL_TAG(l) == JSVAL_BOOLEAN) {
5998 bool isVoid = JSVAL_IS_VOID(l);
5999 guard(isVoid,
6000 lir->ins2(LIR_eq, l_ins, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID))),
6001 BRANCH_EXIT);
6002 if (!isVoid) {
6003 args[0] = l_ins, args[1] = cx_ins;
6004 l_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6005 l = (l == JSVAL_VOID)
6006 ? DOUBLE_TO_JSVAL(cx->runtime->jsNaN)
6007 : INT_TO_JSVAL(l == JSVAL_TRUE);
6008 return equalityHelper(l, r, l_ins, r_ins, negate,
6009 tryBranchAfterCond, rval);
6011 } else if (JSVAL_TAG(r) == JSVAL_BOOLEAN) {
6012 bool isVoid = JSVAL_IS_VOID(r);
6013 guard(isVoid,
6014 lir->ins2(LIR_eq, r_ins, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID))),
6015 BRANCH_EXIT);
6016 if (!isVoid) {
6017 args[0] = r_ins, args[1] = cx_ins;
6018 r_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6019 r = (r == JSVAL_VOID)
6020 ? DOUBLE_TO_JSVAL(cx->runtime->jsNaN)
6021 : INT_TO_JSVAL(r == JSVAL_TRUE);
6022 return equalityHelper(l, r, l_ins, r_ins, negate,
6023 tryBranchAfterCond, rval);
6025 } else {
6026 if ((JSVAL_IS_STRING(l) || isNumber(l)) && !JSVAL_IS_PRIMITIVE(r)) {
6027 ABORT_IF_XML(r);
6028 return call_imacro(equality_imacros.any_obj);
6030 if (!JSVAL_IS_PRIMITIVE(l) && (JSVAL_IS_STRING(r) || isNumber(r))) {
6031 ABORT_IF_XML(l);
6032 return call_imacro(equality_imacros.obj_any);
6036 l_ins = lir->insImm(0);
6037 r_ins = lir->insImm(1);
6038 cond = false;
6041 /* If the operands aren't numbers, compare them as integers. */
6042 LOpcode op = fp ? LIR_feq : LIR_eq;
6043 LIns* x = lir->ins2(op, l_ins, r_ins);
6044 if (negate) {
6045 x = lir->ins_eq0(x);
6046 cond = !cond;
6049 jsbytecode* pc = cx->fp->regs->pc;
6052 * Don't guard if the same path is always taken. If it isn't, we have to
6053 * fuse comparisons and the following branch, because the interpreter does
6054 * that.
6056 if (tryBranchAfterCond)
6057 fuseIf(pc + 1, cond, x);
6060 * There is no need to write out the result of this comparison if the trace
6061 * ends on this operation.
6063 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
6064 CHECK_STATUS(checkTraceEnd(pc + 1));
6067 * We update the stack after the guard. This is safe since the guard bails
6068 * out at the comparison and the interpreter will therefore re-execute the
6069 * comparison. This way the value of the condition doesn't have to be
6070 * calculated and saved on the stack in most cases.
6072 set(&rval, x);
6074 return JSRS_CONTINUE;
6077 JS_REQUIRES_STACK JSRecordingStatus
6078 TraceRecorder::relational(LOpcode op, bool tryBranchAfterCond)
6080 jsval& r = stackval(-1);
6081 jsval& l = stackval(-2);
6082 LIns* x = NULL;
6083 bool cond;
6084 LIns* l_ins = get(&l);
6085 LIns* r_ins = get(&r);
6086 bool fp = false;
6087 jsdouble lnum, rnum;
6090 * 11.8.5 if either argument is an object with a function-valued valueOf
6091 * property; if both arguments are objects with non-function-valued valueOf
6092 * properties, abort.
6094 if (!JSVAL_IS_PRIMITIVE(l)) {
6095 ABORT_IF_XML(l);
6096 if (!JSVAL_IS_PRIMITIVE(r)) {
6097 ABORT_IF_XML(r);
6098 return call_imacro(binary_imacros.obj_obj);
6100 return call_imacro(binary_imacros.obj_any);
6102 if (!JSVAL_IS_PRIMITIVE(r)) {
6103 ABORT_IF_XML(r);
6104 return call_imacro(binary_imacros.any_obj);
6107 /* 11.8.5 steps 3, 16-21. */
6108 if (JSVAL_IS_STRING(l) && JSVAL_IS_STRING(r)) {
6109 LIns* args[] = { r_ins, l_ins };
6110 l_ins = lir->insCall(&js_CompareStrings_ci, args);
6111 r_ins = lir->insImm(0);
6112 cond = evalCmp(op, JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
6113 goto do_comparison;
6116 /* 11.8.5 steps 4-5. */
6117 if (!JSVAL_IS_NUMBER(l)) {
6118 LIns* args[] = { l_ins, cx_ins };
6119 switch (JSVAL_TAG(l)) {
6120 case JSVAL_BOOLEAN:
6121 l_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6122 break;
6123 case JSVAL_STRING:
6124 l_ins = lir->insCall(&js_StringToNumber_ci, args);
6125 break;
6126 case JSVAL_OBJECT:
6127 if (JSVAL_IS_NULL(l)) {
6128 l_ins = lir->insImmq(0);
6129 break;
6131 // FALL THROUGH
6132 case JSVAL_INT:
6133 case JSVAL_DOUBLE:
6134 default:
6135 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
6136 "have been handled at start of method");
6137 ABORT_TRACE("safety belt");
6140 if (!JSVAL_IS_NUMBER(r)) {
6141 LIns* args[] = { r_ins, cx_ins };
6142 switch (JSVAL_TAG(r)) {
6143 case JSVAL_BOOLEAN:
6144 r_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6145 break;
6146 case JSVAL_STRING:
6147 r_ins = lir->insCall(&js_StringToNumber_ci, args);
6148 break;
6149 case JSVAL_OBJECT:
6150 if (JSVAL_IS_NULL(r)) {
6151 r_ins = lir->insImmq(0);
6152 break;
6154 // FALL THROUGH
6155 case JSVAL_INT:
6156 case JSVAL_DOUBLE:
6157 default:
6158 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
6159 "have been handled at start of method");
6160 ABORT_TRACE("safety belt");
6164 jsval tmp = JSVAL_NULL;
6165 JSAutoTempValueRooter tvr(cx, 1, &tmp);
6167 tmp = l;
6168 lnum = js_ValueToNumber(cx, &tmp);
6169 tmp = r;
6170 rnum = js_ValueToNumber(cx, &tmp);
6172 cond = evalCmp(op, lnum, rnum);
6173 fp = true;
6175 /* 11.8.5 steps 6-15. */
6176 do_comparison:
6177 /* If the result is not a number or it's not a quad, we must use an integer compare. */
6178 if (!fp) {
6179 JS_ASSERT(op >= LIR_feq && op <= LIR_fge);
6180 op = LOpcode(op + (LIR_eq - LIR_feq));
6182 x = lir->ins2(op, l_ins, r_ins);
6184 jsbytecode* pc = cx->fp->regs->pc;
6187 * Don't guard if the same path is always taken. If it isn't, we have to
6188 * fuse comparisons and the following branch, because the interpreter does
6189 * that.
6191 if (tryBranchAfterCond)
6192 fuseIf(pc + 1, cond, x);
6195 * There is no need to write out the result of this comparison if the trace
6196 * ends on this operation.
6198 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
6199 CHECK_STATUS(checkTraceEnd(pc + 1));
6202 * We update the stack after the guard. This is safe since the guard bails
6203 * out at the comparison and the interpreter will therefore re-execute the
6204 * comparison. This way the value of the condition doesn't have to be
6205 * calculated and saved on the stack in most cases.
6207 set(&l, x);
6209 return JSRS_CONTINUE;
6212 JS_REQUIRES_STACK JSRecordingStatus
6213 TraceRecorder::unary(LOpcode op)
6215 jsval& v = stackval(-1);
6216 bool intop = !(op & LIR64);
6217 if (isNumber(v)) {
6218 LIns* a = get(&v);
6219 if (intop)
6220 a = f2i(a);
6221 a = lir->ins1(op, a);
6222 if (intop)
6223 a = lir->ins1(LIR_i2f, a);
6224 set(&v, a);
6225 return JSRS_CONTINUE;
6227 return JSRS_STOP;
6230 JS_REQUIRES_STACK JSRecordingStatus
6231 TraceRecorder::binary(LOpcode op)
6233 jsval& r = stackval(-1);
6234 jsval& l = stackval(-2);
6236 if (!JSVAL_IS_PRIMITIVE(l)) {
6237 ABORT_IF_XML(l);
6238 if (!JSVAL_IS_PRIMITIVE(r)) {
6239 ABORT_IF_XML(r);
6240 return call_imacro(binary_imacros.obj_obj);
6242 return call_imacro(binary_imacros.obj_any);
6244 if (!JSVAL_IS_PRIMITIVE(r)) {
6245 ABORT_IF_XML(r);
6246 return call_imacro(binary_imacros.any_obj);
6249 bool intop = !(op & LIR64);
6250 LIns* a = get(&l);
6251 LIns* b = get(&r);
6253 bool leftIsNumber = isNumber(l);
6254 jsdouble lnum = leftIsNumber ? asNumber(l) : 0;
6256 bool rightIsNumber = isNumber(r);
6257 jsdouble rnum = rightIsNumber ? asNumber(r) : 0;
6259 if ((op >= LIR_sub && op <= LIR_ush) || // sub, mul, (callh), or, xor, (not,) lsh, rsh, ush
6260 (op >= LIR_fsub && op <= LIR_fdiv)) { // fsub, fmul, fdiv
6261 LIns* args[2];
6262 if (JSVAL_IS_STRING(l)) {
6263 args[0] = a;
6264 args[1] = cx_ins;
6265 a = lir->insCall(&js_StringToNumber_ci, args);
6266 lnum = js_StringToNumber(cx, JSVAL_TO_STRING(l));
6267 leftIsNumber = true;
6269 if (JSVAL_IS_STRING(r)) {
6270 args[0] = b;
6271 args[1] = cx_ins;
6272 b = lir->insCall(&js_StringToNumber_ci, args);
6273 rnum = js_StringToNumber(cx, JSVAL_TO_STRING(r));
6274 rightIsNumber = true;
6277 if (JSVAL_TAG(l) == JSVAL_BOOLEAN) {
6278 LIns* args[] = { a, cx_ins };
6279 a = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6280 lnum = js_BooleanOrUndefinedToNumber(cx, JSVAL_TO_PSEUDO_BOOLEAN(l));
6281 leftIsNumber = true;
6283 if (JSVAL_TAG(r) == JSVAL_BOOLEAN) {
6284 LIns* args[] = { b, cx_ins };
6285 b = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6286 rnum = js_BooleanOrUndefinedToNumber(cx, JSVAL_TO_PSEUDO_BOOLEAN(r));
6287 rightIsNumber = true;
6289 if (leftIsNumber && rightIsNumber) {
6290 if (intop) {
6291 LIns *args[] = { a };
6292 a = lir->insCall(op == LIR_ush ? &js_DoubleToUint32_ci : &js_DoubleToInt32_ci, args);
6293 b = f2i(b);
6295 a = alu(op, lnum, rnum, a, b);
6296 if (intop)
6297 a = lir->ins1(op == LIR_ush ? LIR_u2f : LIR_i2f, a);
6298 set(&l, a);
6299 return JSRS_CONTINUE;
6301 return JSRS_STOP;
6304 JS_STATIC_ASSERT(offsetof(JSObjectOps, objectMap) == 0);
6306 bool
6307 TraceRecorder::map_is_native(JSObjectMap* map, LIns* map_ins, LIns*& ops_ins, size_t op_offset)
6309 JS_ASSERT(op_offset < sizeof(JSObjectOps));
6310 JS_ASSERT(op_offset % sizeof(void *) == 0);
6312 #define OP(ops) (*(void **) ((uint8 *) (ops) + op_offset))
6313 void* ptr = OP(map->ops);
6314 if (ptr != OP(&js_ObjectOps))
6315 return false;
6316 #undef OP
6318 ops_ins = addName(lir->insLoad(LIR_ldp, map_ins, int(offsetof(JSObjectMap, ops))), "ops");
6319 LIns* n = lir->insLoad(LIR_ldp, ops_ins, op_offset);
6320 guard(true,
6321 addName(lir->ins2(LIR_eq, n, INS_CONSTPTR(ptr)), "guard(native-map)"),
6322 BRANCH_EXIT);
6324 return true;
6327 JS_REQUIRES_STACK JSRecordingStatus
6328 TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, jsuword& pcval)
6330 jsbytecode* pc = cx->fp->regs->pc;
6331 JS_ASSERT(*pc != JSOP_INITPROP && *pc != JSOP_SETNAME && *pc != JSOP_SETPROP);
6333 // Mimic the interpreter's special case for dense arrays by skipping up one
6334 // hop along the proto chain when accessing a named (not indexed) property,
6335 // typically to find Array.prototype methods.
6336 JSObject* aobj = obj;
6337 if (OBJ_IS_DENSE_ARRAY(cx, obj)) {
6338 guardDenseArray(obj, obj_ins, BRANCH_EXIT);
6339 aobj = OBJ_GET_PROTO(cx, obj);
6340 obj_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
6343 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
6344 LIns* ops_ins;
6346 // Interpreter calls to PROPERTY_CACHE_TEST guard on native object ops
6347 // which is required to use native objects (those whose maps are scopes),
6348 // or even more narrow conditions required because the cache miss case
6349 // will call a particular object-op (js_GetProperty, js_SetProperty).
6351 // We parameterize using offsetof and guard on match against the hook at
6352 // the given offset in js_ObjectOps. TraceRecorder::record_JSOP_SETPROP
6353 // guards the js_SetProperty case.
6354 uint32 format = js_CodeSpec[*pc].format;
6355 uint32 mode = JOF_MODE(format);
6357 // No need to guard native-ness of global object.
6358 JS_ASSERT(OBJ_IS_NATIVE(globalObj));
6359 if (aobj != globalObj) {
6360 size_t op_offset = offsetof(JSObjectOps, objectMap);
6361 if (mode == JOF_PROP || mode == JOF_VARPROP) {
6362 JS_ASSERT(!(format & JOF_SET));
6363 op_offset = offsetof(JSObjectOps, getProperty);
6364 } else {
6365 JS_ASSERT(mode == JOF_NAME);
6368 if (!map_is_native(aobj->map, map_ins, ops_ins, op_offset))
6369 ABORT_TRACE("non-native map");
6372 JSAtom* atom;
6373 JSPropCacheEntry* entry;
6374 PROPERTY_CACHE_TEST(cx, pc, aobj, obj2, entry, atom);
6375 if (!atom) {
6376 // Null atom means that obj2 is locked and must now be unlocked.
6377 JS_UNLOCK_OBJ(cx, obj2);
6378 } else {
6379 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
6380 jsid id = ATOM_TO_JSID(atom);
6381 JSProperty* prop;
6382 if (JOF_OPMODE(*pc) == JOF_NAME) {
6383 JS_ASSERT(aobj == obj);
6384 entry = js_FindPropertyHelper(cx, id, true, &obj, &obj2, &prop);
6386 if (!entry)
6387 ABORT_TRACE_ERROR("error in js_FindPropertyHelper");
6388 if (entry == JS_NO_PROP_CACHE_FILL)
6389 ABORT_TRACE("cannot cache name");
6390 } else {
6391 int protoIndex = js_LookupPropertyWithFlags(cx, aobj, id,
6392 cx->resolveFlags,
6393 &obj2, &prop);
6395 if (protoIndex < 0)
6396 ABORT_TRACE_ERROR("error in js_LookupPropertyWithFlags");
6398 if (prop) {
6399 if (!OBJ_IS_NATIVE(obj2)) {
6400 OBJ_DROP_PROPERTY(cx, obj2, prop);
6401 ABORT_TRACE("property found on non-native object");
6403 entry = js_FillPropertyCache(cx, aobj, 0, protoIndex, obj2,
6404 (JSScopeProperty*) prop, false);
6405 JS_ASSERT(entry);
6406 if (entry == JS_NO_PROP_CACHE_FILL)
6407 entry = NULL;
6411 if (!prop) {
6412 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
6413 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
6414 // the global it's assigning does not yet exist, create it.
6415 obj2 = obj;
6417 // Use PCVAL_NULL to return "no such property" to our caller.
6418 pcval = PCVAL_NULL;
6419 return JSRS_CONTINUE;
6422 OBJ_DROP_PROPERTY(cx, obj2, prop);
6423 if (!entry)
6424 ABORT_TRACE("failed to fill property cache");
6427 if (wasDeepAborted())
6428 ABORT_TRACE("deep abort from property lookup");
6430 #ifdef JS_THREADSAFE
6431 // There's a potential race in any JS_THREADSAFE embedding that's nuts
6432 // enough to share mutable objects on the scope or proto chain, but we
6433 // don't care about such insane embeddings. Anyway, the (scope, proto)
6434 // entry->vcap coordinates must reach obj2 from aobj at this point.
6435 JS_ASSERT(cx->requestDepth);
6436 #endif
6438 // Emit guard(s), common code for both hit and miss cases.
6439 // Check for first-level cache hit and guard on kshape if possible.
6440 // Otherwise guard on key object exact match.
6441 if (PCVCAP_TAG(entry->vcap) <= 1) {
6442 if (aobj != globalObj) {
6443 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
6444 "shape");
6445 guard(true, addName(lir->ins2i(LIR_eq, shape_ins, entry->kshape), "guard(kshape)"),
6446 BRANCH_EXIT);
6448 } else {
6449 #ifdef DEBUG
6450 JSOp op = js_GetOpcode(cx, cx->fp->script, pc);
6451 JSAtom *pcatom;
6452 if (op == JSOP_LENGTH) {
6453 pcatom = cx->runtime->atomState.lengthAtom;
6454 } else {
6455 ptrdiff_t pcoff = (JOF_TYPE(js_CodeSpec[op].format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
6456 GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, pcatom);
6458 JS_ASSERT(entry->kpc == (jsbytecode *) pcatom);
6459 JS_ASSERT(entry->kshape == jsuword(aobj));
6460 #endif
6461 if (aobj != globalObj && !obj_ins->isconstp()) {
6462 guard(true, addName(lir->ins2i(LIR_eq, obj_ins, entry->kshape), "guard(kobj)"),
6463 BRANCH_EXIT);
6467 // For any hit that goes up the scope and/or proto chains, we will need to
6468 // guard on the shape of the object containing the property.
6469 if (PCVCAP_TAG(entry->vcap) >= 1) {
6470 jsuword vcap = entry->vcap;
6471 uint32 vshape = PCVCAP_SHAPE(vcap);
6472 JS_ASSERT(OBJ_SHAPE(obj2) == vshape);
6474 LIns* obj2_ins;
6475 if (PCVCAP_TAG(entry->vcap) == 1) {
6476 // Duplicate the special case in PROPERTY_CACHE_TEST.
6477 obj2_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
6478 guard(false, lir->ins_eq0(obj2_ins), BRANCH_EXIT);
6479 } else {
6480 obj2_ins = INS_CONSTPTR(obj2);
6482 map_ins = lir->insLoad(LIR_ldp, obj2_ins, (int)offsetof(JSObject, map));
6483 if (!map_is_native(obj2->map, map_ins, ops_ins))
6484 ABORT_TRACE("non-native map");
6486 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
6487 "shape");
6488 guard(true,
6489 addName(lir->ins2i(LIR_eq, shape_ins, vshape), "guard(vshape)"),
6490 BRANCH_EXIT);
6493 pcval = entry->vword;
6494 return JSRS_CONTINUE;
6497 void
6498 TraceRecorder::stobj_set_dslot(LIns *obj_ins, unsigned slot, LIns*& dslots_ins, LIns* v_ins,
6499 const char *name)
6501 if (!dslots_ins)
6502 dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
6503 addName(lir->insStorei(v_ins, dslots_ins, slot * sizeof(jsval)), name);
6506 void
6507 TraceRecorder::stobj_set_slot(LIns* obj_ins, unsigned slot, LIns*& dslots_ins, LIns* v_ins)
6509 if (slot < JS_INITIAL_NSLOTS) {
6510 addName(lir->insStorei(v_ins, obj_ins,
6511 offsetof(JSObject, fslots) + slot * sizeof(jsval)),
6512 "set_slot(fslots)");
6513 } else {
6514 stobj_set_dslot(obj_ins, slot - JS_INITIAL_NSLOTS, dslots_ins, v_ins,
6515 "set_slot(dslots)");
6519 LIns*
6520 TraceRecorder::stobj_get_fslot(LIns* obj_ins, unsigned slot)
6522 JS_ASSERT(slot < JS_INITIAL_NSLOTS);
6523 return lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, fslots) + slot * sizeof(jsval));
6526 LIns*
6527 TraceRecorder::stobj_get_dslot(LIns* obj_ins, unsigned index, LIns*& dslots_ins)
6529 if (!dslots_ins)
6530 dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
6531 return lir->insLoad(LIR_ldp, dslots_ins, index * sizeof(jsval));
6534 LIns*
6535 TraceRecorder::stobj_get_slot(LIns* obj_ins, unsigned slot, LIns*& dslots_ins)
6537 if (slot < JS_INITIAL_NSLOTS)
6538 return stobj_get_fslot(obj_ins, slot);
6539 return stobj_get_dslot(obj_ins, slot - JS_INITIAL_NSLOTS, dslots_ins);
6542 JSRecordingStatus
6543 TraceRecorder::native_set(LIns* obj_ins, JSScopeProperty* sprop, LIns*& dslots_ins, LIns* v_ins)
6545 if (SPROP_HAS_STUB_SETTER(sprop) && sprop->slot != SPROP_INVALID_SLOT) {
6546 stobj_set_slot(obj_ins, sprop->slot, dslots_ins, v_ins);
6547 return JSRS_CONTINUE;
6549 ABORT_TRACE("unallocated or non-stub sprop");
6552 JSRecordingStatus
6553 TraceRecorder::native_get(LIns* obj_ins, LIns* pobj_ins, JSScopeProperty* sprop,
6554 LIns*& dslots_ins, LIns*& v_ins)
6556 if (!SPROP_HAS_STUB_GETTER(sprop))
6557 return JSRS_STOP;
6559 if (sprop->slot != SPROP_INVALID_SLOT)
6560 v_ins = stobj_get_slot(pobj_ins, sprop->slot, dslots_ins);
6561 else
6562 v_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
6563 return JSRS_CONTINUE;
6566 JS_REQUIRES_STACK void
6567 TraceRecorder::box_jsval(jsval v, LIns*& v_ins)
6569 if (isNumber(v)) {
6570 LIns* args[] = { v_ins, cx_ins };
6571 v_ins = lir->insCall(&js_BoxDouble_ci, args);
6572 guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)),
6573 OOM_EXIT);
6574 return;
6576 switch (JSVAL_TAG(v)) {
6577 case JSVAL_BOOLEAN:
6578 v_ins = lir->ins2i(LIR_pior, lir->ins2i(LIR_pilsh, v_ins, JSVAL_TAGBITS), JSVAL_BOOLEAN);
6579 return;
6580 case JSVAL_OBJECT:
6581 return;
6582 default:
6583 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING);
6584 v_ins = lir->ins2(LIR_pior, v_ins, INS_CONST(JSVAL_STRING));
6585 return;
6589 JS_REQUIRES_STACK void
6590 TraceRecorder::unbox_jsval(jsval v, LIns*& v_ins, VMSideExit* exit)
6592 if (isNumber(v)) {
6593 // JSVAL_IS_NUMBER(v)
6594 guard(false,
6595 lir->ins_eq0(lir->ins2(LIR_pior,
6596 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_INT)),
6597 lir->ins2i(LIR_eq,
6598 lir->ins2(LIR_piand, v_ins,
6599 INS_CONST(JSVAL_TAGMASK)),
6600 JSVAL_DOUBLE))),
6601 exit);
6602 LIns* args[] = { v_ins };
6603 v_ins = lir->insCall(&js_UnboxDouble_ci, args);
6604 return;
6606 switch (JSVAL_TAG(v)) {
6607 case JSVAL_BOOLEAN:
6608 guard(true,
6609 lir->ins2i(LIR_eq,
6610 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
6611 JSVAL_BOOLEAN),
6612 exit);
6613 v_ins = lir->ins2i(LIR_ush, v_ins, JSVAL_TAGBITS);
6614 return;
6615 case JSVAL_OBJECT:
6616 if (JSVAL_IS_NULL(v)) {
6617 // JSVAL_NULL maps to type JSVAL_TNULL, so insist that v_ins == 0 here.
6618 guard(true, lir->ins_eq0(v_ins), exit);
6619 } else {
6620 guard(false, lir->ins_eq0(v_ins), exit);
6621 guard(true,
6622 lir->ins2i(LIR_eq,
6623 lir->ins2(LIR_piand, v_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
6624 JSVAL_OBJECT),
6625 exit);
6626 guard(HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)),
6627 lir->ins2(LIR_eq,
6628 lir->ins2(LIR_piand,
6629 lir->insLoad(LIR_ldp, v_ins, offsetof(JSObject, classword)),
6630 INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS)),
6631 INS_CONSTPTR(&js_FunctionClass)),
6632 exit);
6634 return;
6635 default:
6636 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING);
6637 guard(true,
6638 lir->ins2i(LIR_eq,
6639 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
6640 JSVAL_STRING),
6641 exit);
6642 v_ins = lir->ins2(LIR_piand, v_ins, INS_CONST(~JSVAL_TAGMASK));
6643 return;
6647 JS_REQUIRES_STACK JSRecordingStatus
6648 TraceRecorder::getThis(LIns*& this_ins)
6651 * In global code, bake in the global object as 'this' object.
6653 if (!cx->fp->callee) {
6654 JS_ASSERT(callDepth == 0);
6655 JSObject* thisObj = js_ComputeThisForFrame(cx, cx->fp);
6656 if (!thisObj)
6657 ABORT_TRACE_ERROR("error in js_ComputeThisForFrame");
6658 this_ins = INS_CONSTPTR(thisObj);
6661 * We don't have argv[-1] in global code, so we don't update the tracker here.
6663 return JSRS_CONTINUE;
6666 jsval& thisv = cx->fp->argv[-1];
6669 * Traces type-specialize between null and objects, so if we currently see a null
6670 * value in argv[-1], this trace will only match if we see null at runtime as well.
6671 * Bake in the global object as 'this' object, updating the tracker as well. We
6672 * can only detect this condition prior to calling js_ComputeThisForFrame, since it
6673 * updates the interpreter's copy of argv[-1].
6675 if (JSVAL_IS_NULL(thisv)) {
6676 JSObject* thisObj = js_ComputeThisForFrame(cx, cx->fp);
6677 if (!thisObj)
6678 ABORT_TRACE_ERROR("js_ComputeThisForName failed");
6679 JS_ASSERT(!JSVAL_IS_PRIMITIVE(thisv));
6680 if (thisObj != globalObj)
6681 ABORT_TRACE("global object was wrapped while recording");
6682 this_ins = INS_CONSTPTR(thisObj);
6683 set(&thisv, this_ins);
6684 return JSRS_CONTINUE;
6686 this_ins = get(&thisv);
6689 * mrbkap says its not necessary to ever call the thisObject hook if obj is not the global
6690 * object, because the only implicit way to obtain a reference to an object that must be
6691 * wrapped is via the global object. All other sources (API, explicit references) already
6692 * are wrapped as we obtain them through XPConnect. The only exception are With objects,
6693 * which have to call the getThis object hook. We don't trace those cases.
6696 if (guardClass(JSVAL_TO_OBJECT(thisv), this_ins, &js_WithClass, snapshot(MISMATCH_EXIT)))
6697 ABORT_TRACE("can't trace getThis on With object");
6699 return JSRS_CONTINUE;
6703 LIns*
6704 TraceRecorder::getStringLength(LIns* str_ins)
6706 LIns* len_ins = lir->insLoad(LIR_ldp, str_ins, (int)offsetof(JSString, length));
6708 LIns* masked_len_ins = lir->ins2(LIR_piand,
6709 len_ins,
6710 INS_CONSTWORD(JSSTRING_LENGTH_MASK));
6712 return
6713 lir->ins_choose(lir->ins_eq0(lir->ins2(LIR_piand,
6714 len_ins,
6715 INS_CONSTWORD(JSSTRFLAG_DEPENDENT))),
6716 masked_len_ins,
6717 lir->ins_choose(lir->ins_eq0(lir->ins2(LIR_piand,
6718 len_ins,
6719 INS_CONSTWORD(JSSTRFLAG_PREFIX))),
6720 lir->ins2(LIR_piand,
6721 len_ins,
6722 INS_CONSTWORD(JSSTRDEP_LENGTH_MASK)),
6723 masked_len_ins));
6726 JS_REQUIRES_STACK bool
6727 TraceRecorder::guardClass(JSObject* obj, LIns* obj_ins, JSClass* clasp, VMSideExit* exit)
6729 bool cond = STOBJ_GET_CLASS(obj) == clasp;
6731 LIns* class_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, classword));
6732 class_ins = lir->ins2(LIR_piand, class_ins, lir->insImm(~JSSLOT_CLASS_MASK_BITS));
6734 char namebuf[32];
6735 JS_snprintf(namebuf, sizeof namebuf, "guard(class is %s)", clasp->name);
6736 guard(cond, addName(lir->ins2(LIR_eq, class_ins, INS_CONSTPTR(clasp)), namebuf), exit);
6737 return cond;
6740 JS_REQUIRES_STACK bool
6741 TraceRecorder::guardDenseArray(JSObject* obj, LIns* obj_ins, ExitType exitType)
6743 return guardClass(obj, obj_ins, &js_ArrayClass, snapshot(exitType));
6746 JS_REQUIRES_STACK JSRecordingStatus
6747 TraceRecorder::guardPrototypeHasNoIndexedProperties(JSObject* obj, LIns* obj_ins, ExitType exitType)
6750 * Guard that no object along the prototype chain has any indexed properties which
6751 * might become visible through holes in the array.
6753 VMSideExit* exit = snapshot(exitType);
6755 if (js_PrototypeHasIndexedProperties(cx, obj))
6756 return JSRS_STOP;
6758 while ((obj = JSVAL_TO_OBJECT(obj->fslots[JSSLOT_PROTO])) != NULL) {
6759 obj_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
6760 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
6761 LIns* ops_ins;
6762 if (!map_is_native(obj->map, map_ins, ops_ins))
6763 ABORT_TRACE("non-native object involved along prototype chain");
6765 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
6766 "shape");
6767 guard(true,
6768 addName(lir->ins2i(LIR_eq, shape_ins, OBJ_SHAPE(obj)), "guard(shape)"),
6769 exit);
6771 return JSRS_CONTINUE;
6774 JSRecordingStatus
6775 TraceRecorder::guardNotGlobalObject(JSObject* obj, LIns* obj_ins)
6777 if (obj == globalObj)
6778 ABORT_TRACE("reference aliases global object");
6779 guard(false, lir->ins2(LIR_eq, obj_ins, INS_CONSTPTR(globalObj)), MISMATCH_EXIT);
6780 return JSRS_CONTINUE;
6783 JS_REQUIRES_STACK void
6784 TraceRecorder::clearFrameSlotsFromCache()
6786 /* Clear out all slots of this frame in the nativeFrameTracker. Different locations on the
6787 VM stack might map to different locations on the native stack depending on the
6788 number of arguments (i.e.) of the next call, so we have to make sure we map
6789 those in to the cache with the right offsets. */
6790 JSStackFrame* fp = cx->fp;
6791 jsval* vp;
6792 jsval* vpstop;
6793 if (fp->callee) {
6794 vp = &fp->argv[-2];
6795 vpstop = &fp->argv[argSlots(fp)];
6796 while (vp < vpstop)
6797 nativeFrameTracker.set(vp++, (LIns*)0);
6799 vp = &fp->slots[0];
6800 vpstop = &fp->slots[fp->script->nslots];
6801 while (vp < vpstop)
6802 nativeFrameTracker.set(vp++, (LIns*)0);
6805 JS_REQUIRES_STACK JSRecordingStatus
6806 TraceRecorder::record_EnterFrame()
6808 JSStackFrame* fp = cx->fp;
6810 if (++callDepth >= MAX_CALLDEPTH)
6811 ABORT_TRACE("exceeded maximum call depth");
6812 // FIXME: Allow and attempt to inline a single level of recursion until we compile
6813 // recursive calls as independent trees (459301).
6814 if (fp->script == fp->down->script && fp->down->down && fp->down->down->script == fp->script)
6815 ABORT_TRACE("recursive call");
6817 debug_only_v(printf("EnterFrame %s, callDepth=%d\n",
6818 js_AtomToPrintableString(cx, cx->fp->fun->atom),
6819 callDepth);)
6820 debug_only_v(
6821 js_Disassemble(cx, cx->fp->script, JS_TRUE, stdout);
6822 printf("----\n");)
6823 LIns* void_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
6825 jsval* vp = &fp->argv[fp->argc];
6826 jsval* vpstop = vp + ptrdiff_t(fp->fun->nargs) - ptrdiff_t(fp->argc);
6827 while (vp < vpstop) {
6828 if (vp >= fp->down->regs->sp)
6829 nativeFrameTracker.set(vp, (LIns*)0);
6830 set(vp++, void_ins, true);
6833 vp = &fp->slots[0];
6834 vpstop = vp + fp->script->nfixed;
6835 while (vp < vpstop)
6836 set(vp++, void_ins, true);
6837 return JSRS_CONTINUE;
6840 JS_REQUIRES_STACK JSRecordingStatus
6841 TraceRecorder::record_LeaveFrame()
6843 debug_only_v(
6844 if (cx->fp->fun)
6845 printf("LeaveFrame (back to %s), callDepth=%d\n",
6846 js_AtomToPrintableString(cx, cx->fp->fun->atom),
6847 callDepth);
6849 if (callDepth-- <= 0)
6850 ABORT_TRACE("returned out of a loop we started tracing");
6852 // LeaveFrame gets called after the interpreter popped the frame and
6853 // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
6854 atoms = FrameAtomBase(cx, cx->fp);
6855 set(&stackval(-1), rval_ins, true);
6856 return JSRS_CONTINUE;
6859 JS_REQUIRES_STACK JSRecordingStatus
6860 TraceRecorder::record_JSOP_PUSH()
6862 stack(0, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)));
6863 return JSRS_CONTINUE;
6866 JS_REQUIRES_STACK JSRecordingStatus
6867 TraceRecorder::record_JSOP_POPV()
6869 jsval& rval = stackval(-1);
6870 LIns *rval_ins = get(&rval);
6871 box_jsval(rval, rval_ins);
6873 // Store it in cx->fp->rval. NB: Tricky dependencies. cx->fp is the right
6874 // frame because POPV appears only in global and eval code and we don't
6875 // trace JSOP_EVAL or leaving the frame where tracing started.
6876 LIns *fp_ins = lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp));
6877 lir->insStorei(rval_ins, fp_ins, offsetof(JSStackFrame, rval));
6878 return JSRS_CONTINUE;
6881 JS_REQUIRES_STACK JSRecordingStatus
6882 TraceRecorder::record_JSOP_ENTERWITH()
6884 return JSRS_STOP;
6887 JS_REQUIRES_STACK JSRecordingStatus
6888 TraceRecorder::record_JSOP_LEAVEWITH()
6890 return JSRS_STOP;
6893 JS_REQUIRES_STACK JSRecordingStatus
6894 TraceRecorder::record_JSOP_RETURN()
6896 /* A return from callDepth 0 terminates the current loop. */
6897 if (callDepth == 0) {
6898 AUDIT(returnLoopExits);
6899 endLoop(traceMonitor);
6900 return JSRS_STOP;
6903 /* If we inlined this function call, make the return value available to the caller code. */
6904 jsval& rval = stackval(-1);
6905 JSStackFrame *fp = cx->fp;
6906 if ((cx->fp->flags & JSFRAME_CONSTRUCTING) && JSVAL_IS_PRIMITIVE(rval)) {
6907 JS_ASSERT(OBJECT_TO_JSVAL(fp->thisp) == fp->argv[-1]);
6908 rval_ins = get(&fp->argv[-1]);
6909 } else {
6910 rval_ins = get(&rval);
6912 debug_only_v(printf("returning from %s\n", js_AtomToPrintableString(cx, cx->fp->fun->atom));)
6913 clearFrameSlotsFromCache();
6915 return JSRS_CONTINUE;
6918 JS_REQUIRES_STACK JSRecordingStatus
6919 TraceRecorder::record_JSOP_GOTO()
6922 * If we hit a break, end the loop and generate an always taken loop exit guard.
6923 * For other downward gotos (like if/else) continue recording.
6925 jssrcnote* sn = js_GetSrcNote(cx->fp->script, cx->fp->regs->pc);
6927 if (sn && SN_TYPE(sn) == SRC_BREAK) {
6928 AUDIT(breakLoopExits);
6929 endLoop(traceMonitor);
6930 return JSRS_STOP;
6932 return JSRS_CONTINUE;
6935 JS_REQUIRES_STACK JSRecordingStatus
6936 TraceRecorder::record_JSOP_IFEQ()
6938 trackCfgMerges(cx->fp->regs->pc);
6939 return ifop();
6942 JS_REQUIRES_STACK JSRecordingStatus
6943 TraceRecorder::record_JSOP_IFNE()
6945 return ifop();
6948 JS_REQUIRES_STACK JSRecordingStatus
6949 TraceRecorder::record_JSOP_ARGUMENTS()
6951 #if 1
6952 ABORT_TRACE("can't trace arguments yet");
6953 #else
6954 LIns* args[] = { cx_ins };
6955 LIns* a_ins = lir->insCall(&js_Arguments_ci, args);
6956 guard(false, lir->ins_eq0(a_ins), OOM_EXIT);
6957 stack(0, a_ins);
6958 return JSRS_CONTINUE;
6959 #endif
6962 JS_REQUIRES_STACK JSRecordingStatus
6963 TraceRecorder::record_JSOP_DUP()
6965 stack(0, get(&stackval(-1)));
6966 return JSRS_CONTINUE;
6969 JS_REQUIRES_STACK JSRecordingStatus
6970 TraceRecorder::record_JSOP_DUP2()
6972 stack(0, get(&stackval(-2)));
6973 stack(1, get(&stackval(-1)));
6974 return JSRS_CONTINUE;
6977 JS_REQUIRES_STACK JSRecordingStatus
6978 TraceRecorder::record_JSOP_SWAP()
6980 jsval& l = stackval(-2);
6981 jsval& r = stackval(-1);
6982 LIns* l_ins = get(&l);
6983 LIns* r_ins = get(&r);
6984 set(&r, l_ins);
6985 set(&l, r_ins);
6986 return JSRS_CONTINUE;
6989 JS_REQUIRES_STACK JSRecordingStatus
6990 TraceRecorder::record_JSOP_PICK()
6992 jsval* sp = cx->fp->regs->sp;
6993 jsint n = cx->fp->regs->pc[1];
6994 JS_ASSERT(sp - (n+1) >= StackBase(cx->fp));
6995 LIns* top = get(sp - (n+1));
6996 for (jsint i = 0; i < n; ++i)
6997 set(sp - (n+1) + i, get(sp - n + i));
6998 set(&sp[-1], top);
6999 return JSRS_CONTINUE;
7002 JS_REQUIRES_STACK JSRecordingStatus
7003 TraceRecorder::record_JSOP_SETCONST()
7005 return JSRS_STOP;
7008 JS_REQUIRES_STACK JSRecordingStatus
7009 TraceRecorder::record_JSOP_BITOR()
7011 return binary(LIR_or);
7014 JS_REQUIRES_STACK JSRecordingStatus
7015 TraceRecorder::record_JSOP_BITXOR()
7017 return binary(LIR_xor);
7020 JS_REQUIRES_STACK JSRecordingStatus
7021 TraceRecorder::record_JSOP_BITAND()
7023 return binary(LIR_and);
7026 JS_REQUIRES_STACK JSRecordingStatus
7027 TraceRecorder::record_JSOP_EQ()
7029 return equality(false, true);
7032 JS_REQUIRES_STACK JSRecordingStatus
7033 TraceRecorder::record_JSOP_NE()
7035 return equality(true, true);
7038 JS_REQUIRES_STACK JSRecordingStatus
7039 TraceRecorder::record_JSOP_LT()
7041 return relational(LIR_flt, true);
7044 JS_REQUIRES_STACK JSRecordingStatus
7045 TraceRecorder::record_JSOP_LE()
7047 return relational(LIR_fle, true);
7050 JS_REQUIRES_STACK JSRecordingStatus
7051 TraceRecorder::record_JSOP_GT()
7053 return relational(LIR_fgt, true);
7056 JS_REQUIRES_STACK JSRecordingStatus
7057 TraceRecorder::record_JSOP_GE()
7059 return relational(LIR_fge, true);
7062 JS_REQUIRES_STACK JSRecordingStatus
7063 TraceRecorder::record_JSOP_LSH()
7065 return binary(LIR_lsh);
7068 JS_REQUIRES_STACK JSRecordingStatus
7069 TraceRecorder::record_JSOP_RSH()
7071 return binary(LIR_rsh);
7074 JS_REQUIRES_STACK JSRecordingStatus
7075 TraceRecorder::record_JSOP_URSH()
7077 return binary(LIR_ush);
7080 JS_REQUIRES_STACK JSRecordingStatus
7081 TraceRecorder::record_JSOP_ADD()
7083 jsval& r = stackval(-1);
7084 jsval& l = stackval(-2);
7086 if (!JSVAL_IS_PRIMITIVE(l)) {
7087 ABORT_IF_XML(l);
7088 if (!JSVAL_IS_PRIMITIVE(r)) {
7089 ABORT_IF_XML(r);
7090 return call_imacro(add_imacros.obj_obj);
7092 return call_imacro(add_imacros.obj_any);
7094 if (!JSVAL_IS_PRIMITIVE(r)) {
7095 ABORT_IF_XML(r);
7096 return call_imacro(add_imacros.any_obj);
7099 if (JSVAL_IS_STRING(l) || JSVAL_IS_STRING(r)) {
7100 LIns* args[] = { stringify(r), stringify(l), cx_ins };
7101 LIns* concat = lir->insCall(&js_ConcatStrings_ci, args);
7102 guard(false, lir->ins_eq0(concat), OOM_EXIT);
7103 set(&l, concat);
7104 return JSRS_CONTINUE;
7107 return binary(LIR_fadd);
7110 JS_REQUIRES_STACK JSRecordingStatus
7111 TraceRecorder::record_JSOP_SUB()
7113 return binary(LIR_fsub);
7116 JS_REQUIRES_STACK JSRecordingStatus
7117 TraceRecorder::record_JSOP_MUL()
7119 return binary(LIR_fmul);
7122 JS_REQUIRES_STACK JSRecordingStatus
7123 TraceRecorder::record_JSOP_DIV()
7125 return binary(LIR_fdiv);
7128 JS_REQUIRES_STACK JSRecordingStatus
7129 TraceRecorder::record_JSOP_MOD()
7131 jsval& r = stackval(-1);
7132 jsval& l = stackval(-2);
7134 if (!JSVAL_IS_PRIMITIVE(l)) {
7135 ABORT_IF_XML(l);
7136 if (!JSVAL_IS_PRIMITIVE(r)) {
7137 ABORT_IF_XML(r);
7138 return call_imacro(binary_imacros.obj_obj);
7140 return call_imacro(binary_imacros.obj_any);
7142 if (!JSVAL_IS_PRIMITIVE(r)) {
7143 ABORT_IF_XML(r);
7144 return call_imacro(binary_imacros.any_obj);
7147 if (isNumber(l) && isNumber(r)) {
7148 LIns* l_ins = get(&l);
7149 LIns* r_ins = get(&r);
7150 LIns* x;
7151 /* We can't demote this in a filter since we need the actual values of l and r. */
7152 if (isPromote(l_ins) && isPromote(r_ins) && asNumber(l) >= 0 && asNumber(r) > 0) {
7153 LIns* args[] = { ::demote(lir, r_ins), ::demote(lir, l_ins) };
7154 x = lir->insCall(&js_imod_ci, args);
7155 guard(false, lir->ins2(LIR_eq, x, lir->insImm(-1)), BRANCH_EXIT);
7156 x = lir->ins1(LIR_i2f, x);
7157 } else {
7158 LIns* args[] = { r_ins, l_ins };
7159 x = lir->insCall(&js_dmod_ci, args);
7161 set(&l, x);
7162 return JSRS_CONTINUE;
7164 return JSRS_STOP;
7167 JS_REQUIRES_STACK JSRecordingStatus
7168 TraceRecorder::record_JSOP_NOT()
7170 jsval& v = stackval(-1);
7171 if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
7172 set(&v, lir->ins_eq0(lir->ins2i(LIR_eq, get(&v), 1)));
7173 return JSRS_CONTINUE;
7175 if (isNumber(v)) {
7176 LIns* v_ins = get(&v);
7177 set(&v, lir->ins2(LIR_or, lir->ins2(LIR_feq, v_ins, lir->insImmq(0)),
7178 lir->ins_eq0(lir->ins2(LIR_feq, v_ins, v_ins))));
7179 return JSRS_CONTINUE;
7181 if (JSVAL_TAG(v) == JSVAL_OBJECT) {
7182 set(&v, lir->ins_eq0(get(&v)));
7183 return JSRS_CONTINUE;
7185 JS_ASSERT(JSVAL_IS_STRING(v));
7186 set(&v, lir->ins_eq0(lir->ins2(LIR_piand,
7187 lir->insLoad(LIR_ldp, get(&v), (int)offsetof(JSString, length)),
7188 INS_CONSTPTR(reinterpret_cast<void *>(JSSTRING_LENGTH_MASK)))));
7189 return JSRS_CONTINUE;
7192 JS_REQUIRES_STACK JSRecordingStatus
7193 TraceRecorder::record_JSOP_BITNOT()
7195 return unary(LIR_not);
7198 JS_REQUIRES_STACK JSRecordingStatus
7199 TraceRecorder::record_JSOP_NEG()
7201 jsval& v = stackval(-1);
7203 if (!JSVAL_IS_PRIMITIVE(v)) {
7204 ABORT_IF_XML(v);
7205 return call_imacro(unary_imacros.sign);
7208 if (isNumber(v)) {
7209 LIns* a = get(&v);
7211 /* If we're a promoted integer, we have to watch out for 0s since -0 is a double.
7212 Only follow this path if we're not an integer that's 0 and we're not a double
7213 that's zero.
7215 if (isPromoteInt(a) &&
7216 (!JSVAL_IS_INT(v) || JSVAL_TO_INT(v) != 0) &&
7217 (!JSVAL_IS_DOUBLE(v) || !JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v))) &&
7218 -asNumber(v) == (int)-asNumber(v)) {
7219 a = lir->ins1(LIR_neg, ::demote(lir, a));
7220 if (!a->isconst()) {
7221 VMSideExit* exit = snapshot(OVERFLOW_EXIT);
7222 lir->insGuard(LIR_xt, lir->ins1(LIR_ov, a),
7223 createGuardRecord(exit));
7224 lir->insGuard(LIR_xt, lir->ins2(LIR_eq, a, lir->insImm(0)),
7225 createGuardRecord(exit));
7227 a = lir->ins1(LIR_i2f, a);
7228 } else {
7229 a = lir->ins1(LIR_fneg, a);
7232 set(&v, a);
7233 return JSRS_CONTINUE;
7236 if (JSVAL_IS_NULL(v)) {
7237 set(&v, lir->insImmf(-0.0));
7238 return JSRS_CONTINUE;
7241 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING || JSVAL_TAG(v) == JSVAL_BOOLEAN);
7243 LIns* args[] = { get(&v), cx_ins };
7244 set(&v, lir->ins1(LIR_fneg,
7245 lir->insCall(JSVAL_IS_STRING(v)
7246 ? &js_StringToNumber_ci
7247 : &js_BooleanOrUndefinedToNumber_ci,
7248 args)));
7249 return JSRS_CONTINUE;
7252 JS_REQUIRES_STACK JSRecordingStatus
7253 TraceRecorder::record_JSOP_POS()
7255 jsval& v = stackval(-1);
7257 if (!JSVAL_IS_PRIMITIVE(v)) {
7258 ABORT_IF_XML(v);
7259 return call_imacro(unary_imacros.sign);
7262 if (isNumber(v))
7263 return JSRS_CONTINUE;
7265 if (JSVAL_IS_NULL(v)) {
7266 set(&v, lir->insImmq(0));
7267 return JSRS_CONTINUE;
7270 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING || JSVAL_TAG(v) == JSVAL_BOOLEAN);
7272 LIns* args[] = { get(&v), cx_ins };
7273 set(&v, lir->insCall(JSVAL_IS_STRING(v)
7274 ? &js_StringToNumber_ci
7275 : &js_BooleanOrUndefinedToNumber_ci,
7276 args));
7277 return JSRS_CONTINUE;
7280 JS_REQUIRES_STACK JSRecordingStatus
7281 TraceRecorder::record_JSOP_PRIMTOP()
7283 // Either this opcode does nothing or we couldn't have traced here, because
7284 // we'd have thrown an exception -- so do nothing if we actually hit this.
7285 return JSRS_CONTINUE;
7288 JS_REQUIRES_STACK JSRecordingStatus
7289 TraceRecorder::record_JSOP_OBJTOP()
7291 jsval& v = stackval(-1);
7292 ABORT_IF_XML(v);
7293 return JSRS_CONTINUE;
7296 JSBool
7297 js_Array(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval);
7299 JSBool
7300 js_Object(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval);
7302 JSBool
7303 js_Date(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval);
7305 JSRecordingStatus
7306 TraceRecorder::getClassPrototype(JSObject* ctor, LIns*& proto_ins)
7308 jsval pval;
7310 if (!OBJ_GET_PROPERTY(cx, ctor,
7311 ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom),
7312 &pval)) {
7313 ABORT_TRACE_ERROR("error getting prototype from constructor");
7315 if (JSVAL_TAG(pval) != JSVAL_OBJECT)
7316 ABORT_TRACE("got primitive prototype from constructor");
7317 #ifdef DEBUG
7318 JSBool ok, found;
7319 uintN attrs;
7320 ok = JS_GetPropertyAttributes(cx, ctor, js_class_prototype_str, &attrs, &found);
7321 JS_ASSERT(ok);
7322 JS_ASSERT(found);
7323 JS_ASSERT((~attrs & (JSPROP_READONLY | JSPROP_PERMANENT)) == 0);
7324 #endif
7325 proto_ins = INS_CONSTPTR(JSVAL_TO_OBJECT(pval));
7326 return JSRS_CONTINUE;
7329 JSRecordingStatus
7330 TraceRecorder::getClassPrototype(JSProtoKey key, LIns*& proto_ins)
7332 JSObject* proto;
7333 if (!js_GetClassPrototype(cx, globalObj, INT_TO_JSID(key), &proto))
7334 ABORT_TRACE_ERROR("error in js_GetClassPrototype");
7335 proto_ins = INS_CONSTPTR(proto);
7336 return JSRS_CONTINUE;
7339 #define IGNORE_NATIVE_CALL_COMPLETE_CALLBACK ((JSTraceableNative*)1)
7341 JSRecordingStatus
7342 TraceRecorder::newString(JSObject* ctor, uint32 argc, jsval* argv, jsval* rval)
7344 JS_ASSERT(argc == 1);
7346 if (!JSVAL_IS_PRIMITIVE(argv[0])) {
7347 ABORT_IF_XML(argv[0]);
7348 return call_imacro(new_imacros.String);
7351 LIns* proto_ins;
7352 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
7354 LIns* args[] = { stringify(argv[0]), proto_ins, cx_ins };
7355 LIns* obj_ins = lir->insCall(&js_String_tn_ci, args);
7356 guard(false, lir->ins_eq0(obj_ins), OOM_EXIT);
7358 set(rval, obj_ins);
7359 pendingTraceableNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
7360 return JSRS_CONTINUE;
7363 JSRecordingStatus
7364 TraceRecorder::newArray(JSObject* ctor, uint32 argc, jsval* argv, jsval* rval)
7366 LIns *proto_ins;
7367 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
7369 LIns *arr_ins;
7370 if (argc == 0 || (argc == 1 && JSVAL_IS_NUMBER(argv[0]))) {
7371 // arr_ins = js_NewEmptyArray(cx, Array.prototype)
7372 LIns *args[] = { proto_ins, cx_ins };
7373 arr_ins = lir->insCall(&js_NewEmptyArray_ci, args);
7374 guard(false, lir->ins_eq0(arr_ins), OOM_EXIT);
7375 if (argc == 1) {
7376 // array_ins.fslots[JSSLOT_ARRAY_LENGTH] = length
7377 lir->insStorei(f2i(get(argv)), // FIXME: is this 64-bit safe?
7378 arr_ins,
7379 offsetof(JSObject, fslots) + JSSLOT_ARRAY_LENGTH * sizeof(jsval));
7381 } else {
7382 // arr_ins = js_NewUninitializedArray(cx, Array.prototype, argc)
7383 LIns *args[] = { INS_CONST(argc), proto_ins, cx_ins };
7384 arr_ins = lir->insCall(&js_NewUninitializedArray_ci, args);
7385 guard(false, lir->ins_eq0(arr_ins), OOM_EXIT);
7387 // arr->dslots[i] = box_jsval(vp[i]); for i in 0..argc
7388 LIns *dslots_ins = NULL;
7389 for (uint32 i = 0; i < argc && !lirbuf->outOMem(); i++) {
7390 LIns *elt_ins = get(argv + i);
7391 box_jsval(argv[i], elt_ins);
7392 stobj_set_dslot(arr_ins, i, dslots_ins, elt_ins, "set_array_elt");
7396 set(rval, arr_ins);
7397 pendingTraceableNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
7398 return JSRS_CONTINUE;
7401 JS_REQUIRES_STACK JSRecordingStatus
7402 TraceRecorder::emitNativeCall(JSTraceableNative* known, uintN argc, LIns* args[])
7404 bool constructing = known->flags & JSTN_CONSTRUCTOR;
7406 if (JSTN_ERRTYPE(known) == FAIL_STATUS) {
7407 // This needs to capture the pre-call state of the stack. So do not set
7408 // pendingTraceableNative before taking this snapshot.
7409 JS_ASSERT(!pendingTraceableNative);
7411 // Take snapshot for deep LeaveTree and store it in cx->bailExit.
7412 // If we are calling a slow native, add information to the side exit
7413 // for SynthesizeSlowNativeFrame.
7414 VMSideExit* exit = snapshot(DEEP_BAIL_EXIT);
7415 JSObject* funobj = JSVAL_TO_OBJECT(stackval(0 - (2 + argc)));
7416 if (FUN_SLOW_NATIVE(GET_FUNCTION_PRIVATE(cx, funobj)))
7417 exit->setNativeCallee(funobj, constructing);
7418 lir->insStorei(INS_CONSTPTR(exit), cx_ins, offsetof(JSContext, bailExit));
7420 // Tell nanojit not to discard or defer stack writes before this call.
7421 LIns* guardRec = createGuardRecord(exit);
7422 lir->insGuard(LIR_xbarrier, guardRec, guardRec);
7425 LIns* res_ins = lir->insCall(known->builtin, args);
7426 rval_ins = res_ins;
7427 switch (JSTN_ERRTYPE(known)) {
7428 case FAIL_NULL:
7429 guard(false, lir->ins_eq0(res_ins), OOM_EXIT);
7430 break;
7431 case FAIL_NEG:
7432 res_ins = lir->ins1(LIR_i2f, res_ins);
7433 guard(false, lir->ins2(LIR_flt, res_ins, lir->insImmq(0)), OOM_EXIT);
7434 break;
7435 case FAIL_VOID:
7436 guard(false, lir->ins2i(LIR_eq, res_ins, JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
7437 break;
7438 case FAIL_COOKIE:
7439 guard(false, lir->ins2(LIR_eq, res_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
7440 break;
7441 default:;
7444 set(&stackval(0 - (2 + argc)), res_ins);
7447 * The return value will be processed by NativeCallComplete since
7448 * we have to know the actual return value type for calls that return
7449 * jsval (like Array_p_pop).
7451 pendingTraceableNative = known;
7453 return JSRS_CONTINUE;
7457 * Check whether we have a specialized implementation for this native invocation.
7459 JS_REQUIRES_STACK JSRecordingStatus
7460 TraceRecorder::callTraceableNative(JSFunction* fun, uintN argc, bool constructing)
7462 JSTraceableNative* known = FUN_TRCINFO(fun);
7463 JS_ASSERT(known && (JSFastNative)fun->u.n.native == known->native);
7465 JSStackFrame* fp = cx->fp;
7466 jsbytecode *pc = fp->regs->pc;
7468 jsval& fval = stackval(0 - (2 + argc));
7469 jsval& tval = stackval(0 - (1 + argc));
7471 LIns* this_ins = get(&tval);
7473 LIns* args[nanojit::MAXARGS];
7474 do {
7475 if (((known->flags & JSTN_CONSTRUCTOR) != 0) != constructing)
7476 continue;
7478 uintN knownargc = strlen(known->argtypes);
7479 if (argc != knownargc)
7480 continue;
7482 intN prefixc = strlen(known->prefix);
7483 JS_ASSERT(prefixc <= 3);
7484 LIns** argp = &args[argc + prefixc - 1];
7485 char argtype;
7487 #if defined _DEBUG
7488 memset(args, 0xCD, sizeof(args));
7489 #endif
7491 uintN i;
7492 for (i = prefixc; i--; ) {
7493 argtype = known->prefix[i];
7494 if (argtype == 'C') {
7495 *argp = cx_ins;
7496 } else if (argtype == 'T') { /* this, as an object */
7497 if (JSVAL_IS_PRIMITIVE(tval))
7498 goto next_specialization;
7499 *argp = this_ins;
7500 } else if (argtype == 'S') { /* this, as a string */
7501 if (!JSVAL_IS_STRING(tval))
7502 goto next_specialization;
7503 *argp = this_ins;
7504 } else if (argtype == 'f') {
7505 *argp = INS_CONSTPTR(JSVAL_TO_OBJECT(fval));
7506 } else if (argtype == 'p') {
7507 CHECK_STATUS(getClassPrototype(JSVAL_TO_OBJECT(fval), *argp));
7508 } else if (argtype == 'R') {
7509 *argp = INS_CONSTPTR(cx->runtime);
7510 } else if (argtype == 'P') {
7511 // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
7512 // JSOP_GETELEM imacro (bug 476559).
7513 if (*pc == JSOP_CALL && fp->imacpc && *fp->imacpc == JSOP_GETELEM)
7514 *argp = INS_CONSTPTR(fp->imacpc);
7515 else
7516 *argp = INS_CONSTPTR(pc);
7517 } else if (argtype == 'D') { /* this, as a number */
7518 if (!isNumber(tval))
7519 goto next_specialization;
7520 *argp = this_ins;
7521 } else {
7522 JS_NOT_REACHED("unknown prefix arg type");
7524 argp--;
7527 for (i = knownargc; i--; ) {
7528 jsval& arg = stackval(0 - (i + 1));
7529 *argp = get(&arg);
7531 argtype = known->argtypes[i];
7532 if (argtype == 'd' || argtype == 'i') {
7533 if (!isNumber(arg))
7534 goto next_specialization;
7535 if (argtype == 'i')
7536 *argp = f2i(*argp);
7537 } else if (argtype == 'o') {
7538 if (JSVAL_IS_PRIMITIVE(arg))
7539 goto next_specialization;
7540 } else if (argtype == 's') {
7541 if (!JSVAL_IS_STRING(arg))
7542 goto next_specialization;
7543 } else if (argtype == 'r') {
7544 if (!VALUE_IS_REGEXP(cx, arg))
7545 goto next_specialization;
7546 } else if (argtype == 'f') {
7547 if (!VALUE_IS_FUNCTION(cx, arg))
7548 goto next_specialization;
7549 } else if (argtype == 'v') {
7550 box_jsval(arg, *argp);
7551 } else {
7552 goto next_specialization;
7554 argp--;
7556 #if defined _DEBUG
7557 JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
7558 #endif
7559 return emitNativeCall(known, argc, args);
7561 next_specialization:;
7562 } while ((known++)->flags & JSTN_MORE);
7564 return JSRS_STOP;
7567 JS_REQUIRES_STACK JSRecordingStatus
7568 TraceRecorder::callNative(uintN argc, JSOp mode)
7570 LIns* args[5];
7572 JS_ASSERT(mode == JSOP_CALL || mode == JSOP_NEW || mode == JSOP_APPLY);
7574 jsval* vp = &stackval(0 - (2 + argc));
7575 JSObject* funobj = JSVAL_TO_OBJECT(vp[0]);
7576 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, funobj);
7578 if (fun->flags & JSFUN_TRACEABLE) {
7579 JSRecordingStatus status;
7580 if ((status = callTraceableNative(fun, argc, mode == JSOP_NEW)) != JSRS_STOP)
7581 return status;
7584 JSFastNative native = (JSFastNative)fun->u.n.native;
7585 if (native == js_fun_apply || native == js_fun_call)
7586 ABORT_TRACE("trying to call native apply or call");
7588 // Allocate the vp vector and emit code to root it.
7589 uintN vplen = 2 + JS_MAX(argc, FUN_MINARGS(fun)) + fun->u.n.extra;
7590 if (!(fun->flags & JSFUN_FAST_NATIVE))
7591 vplen++; // slow native return value slot
7592 lir->insStorei(INS_CONST(vplen), cx_ins, offsetof(JSContext, nativeVpLen));
7593 LIns* invokevp_ins = lir->insAlloc(vplen * sizeof(jsval));
7594 lir->insStorei(invokevp_ins, cx_ins, offsetof(JSContext, nativeVp));
7596 // vp[0] is the callee.
7597 lir->insStorei(INS_CONSTWORD(OBJECT_TO_JSVAL(funobj)), invokevp_ins, 0);
7599 // Calculate |this|.
7600 LIns* this_ins;
7601 if (mode == JSOP_NEW) {
7602 JSClass* clasp = fun->u.n.clasp;
7603 JS_ASSERT(clasp != &js_SlowArrayClass);
7604 if (!clasp)
7605 clasp = &js_ObjectClass;
7606 JS_ASSERT(((jsuword) clasp & 3) == 0);
7608 // Abort on |new Function|. js_NewInstance would allocate a regular-
7609 // sized JSObject, not a Function-sized one. (The Function ctor would
7610 // deep-bail anyway but let's not go there.)
7611 if (clasp == &js_FunctionClass)
7612 ABORT_TRACE("new Function");
7614 if (clasp->getObjectOps)
7615 ABORT_TRACE("new with non-native ops");
7617 args[0] = INS_CONSTPTR(funobj);
7618 args[1] = INS_CONSTPTR(clasp);
7619 args[2] = cx_ins;
7620 newobj_ins = lir->insCall(&js_NewInstance_ci, args);
7621 guard(false, lir->ins_eq0(newobj_ins), OOM_EXIT);
7622 this_ins = newobj_ins; // boxing an object is a no-op
7623 } else if (JSFUN_BOUND_METHOD_TEST(fun->flags)) {
7624 this_ins = INS_CONSTWORD(OBJECT_TO_JSVAL(OBJ_GET_PARENT(cx, funobj)));
7625 } else {
7626 this_ins = get(&vp[1]);
7628 * For fast natives, 'null' or primitives are fine as as 'this' value.
7629 * For slow natives we have to ensure the object is substituted for the
7630 * appropriate global object or boxed object value. JSOP_NEW allocates its
7631 * own object so its guaranteed to have a valid 'this' value.
7633 if (!(fun->flags & JSFUN_FAST_NATIVE)) {
7634 if (JSVAL_IS_NULL(vp[1])) {
7635 JSObject* thisObj = js_ComputeThis(cx, JS_FALSE, vp + 2);
7636 if (!thisObj)
7637 ABORT_TRACE_ERROR("error in js_ComputeGlobalThis");
7638 this_ins = INS_CONSTPTR(thisObj);
7639 } else if (!JSVAL_IS_OBJECT(vp[1])) {
7640 ABORT_TRACE("slow native(primitive, args)");
7641 } else {
7642 if (guardClass(JSVAL_TO_OBJECT(vp[1]), this_ins, &js_WithClass, snapshot(MISMATCH_EXIT)))
7643 ABORT_TRACE("can't trace slow native invocation on With object");
7645 this_ins = lir->ins_choose(lir->ins_eq0(stobj_get_fslot(this_ins, JSSLOT_PARENT)),
7646 INS_CONSTPTR(globalObj),
7647 this_ins);
7650 box_jsval(vp[1], this_ins);
7652 lir->insStorei(this_ins, invokevp_ins, 1 * sizeof(jsval));
7654 // Populate argv.
7655 for (uintN n = 2; n < 2 + argc; n++) {
7656 LIns* i = get(&vp[n]);
7657 box_jsval(vp[n], i);
7658 lir->insStorei(i, invokevp_ins, n * sizeof(jsval));
7660 // For a very long argument list we might run out of LIR space, so
7661 // check inside the loop.
7662 if (lirbuf->outOMem())
7663 ABORT_TRACE("out of memory in argument list");
7666 // Populate extra slots, including the return value slot for a slow native.
7667 if (2 + argc < vplen) {
7668 LIns* undef_ins = INS_CONSTWORD(JSVAL_VOID);
7669 for (uintN n = 2 + argc; n < vplen; n++) {
7670 lir->insStorei(undef_ins, invokevp_ins, n * sizeof(jsval));
7672 if (lirbuf->outOMem())
7673 ABORT_TRACE("out of memory in extra slots");
7677 // Set up arguments for the JSNative or JSFastNative.
7678 uint32 types;
7679 if (fun->flags & JSFUN_FAST_NATIVE) {
7680 if (mode == JSOP_NEW)
7681 ABORT_TRACE("untraceable fast native constructor");
7682 native_rval_ins = invokevp_ins;
7683 args[0] = invokevp_ins;
7684 args[1] = lir->insImm(argc);
7685 args[2] = cx_ins;
7686 types = ARGSIZE_LO | ARGSIZE_LO << 2 | ARGSIZE_LO << 4 | ARGSIZE_LO << 6;
7687 } else {
7688 native_rval_ins = lir->ins2i(LIR_piadd, invokevp_ins, int32_t((vplen - 1) * sizeof(jsval)));
7689 args[0] = native_rval_ins;
7690 args[1] = lir->ins2i(LIR_piadd, invokevp_ins, int32_t(2 * sizeof(jsval)));
7691 args[2] = lir->insImm(argc);
7692 args[3] = this_ins;
7693 args[4] = cx_ins;
7694 types = ARGSIZE_LO | ARGSIZE_LO << 2 | ARGSIZE_LO << 4 | ARGSIZE_LO << 6 |
7695 ARGSIZE_LO << 8 | ARGSIZE_LO << 10;
7698 // Generate CallInfo and a JSTraceableNative structure on the fly. Do not
7699 // use JSTN_UNBOX_AFTER for mode JSOP_NEW because record_NativeCallComplete
7700 // unboxes the result specially.
7702 CallInfo* ci = (CallInfo*) lir->insSkip(sizeof(struct CallInfo))->payload();
7703 ci->_address = uintptr_t(fun->u.n.native);
7704 ci->_cse = ci->_fold = 0;
7705 ci->_abi = ABI_CDECL;
7706 ci->_argtypes = types;
7707 #ifdef DEBUG
7708 ci->_name = JS_GetFunctionName(fun);
7709 #endif
7711 // Generate a JSTraceableNative structure on the fly.
7712 generatedTraceableNative->builtin = ci;
7713 generatedTraceableNative->native = (JSFastNative)fun->u.n.native;
7714 generatedTraceableNative->flags = FAIL_STATUS | ((mode == JSOP_NEW)
7715 ? JSTN_CONSTRUCTOR
7716 : JSTN_UNBOX_AFTER);
7718 generatedTraceableNative->prefix = generatedTraceableNative->argtypes = NULL;
7720 // argc is the original argc here. It is used to calculate where to place
7721 // the return value.
7722 JSRecordingStatus status;
7723 if ((status = emitNativeCall(generatedTraceableNative, argc, args)) != JSRS_CONTINUE)
7724 return status;
7726 // Unroot the vp.
7727 lir->insStorei(INS_CONSTPTR(NULL), cx_ins, offsetof(JSContext, nativeVp));
7728 return JSRS_CONTINUE;
7731 JS_REQUIRES_STACK JSRecordingStatus
7732 TraceRecorder::functionCall(uintN argc, JSOp mode)
7734 jsval& fval = stackval(0 - (2 + argc));
7735 JS_ASSERT(&fval >= StackBase(cx->fp));
7737 if (!VALUE_IS_FUNCTION(cx, fval))
7738 ABORT_TRACE("callee is not a function");
7740 jsval& tval = stackval(0 - (1 + argc));
7743 * If callee is not constant, it's a shapeless call and we have to guard
7744 * explicitly that we will get this callee again at runtime.
7746 if (!get(&fval)->isconst())
7747 CHECK_STATUS(guardCallee(fval));
7750 * Require that the callee be a function object, to avoid guarding on its
7751 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
7752 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
7753 * the property cache and guard on the object (this) in which the callee
7754 * was found. So it's sufficient to test here that the particular function
7755 * is interpreted, not guard on that condition.
7757 * Bytecode sequences that push shapeless callees must guard on the callee
7758 * class being Function and the function being interpreted.
7760 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fval));
7762 if (FUN_INTERPRETED(fun)) {
7763 if (mode == JSOP_NEW) {
7764 LIns* args[] = { get(&fval), INS_CONSTPTR(&js_ObjectClass), cx_ins };
7765 LIns* tv_ins = lir->insCall(&js_NewInstance_ci, args);
7766 guard(false, lir->ins_eq0(tv_ins), OOM_EXIT);
7767 set(&tval, tv_ins);
7769 return interpretedFunctionCall(fval, fun, argc, mode == JSOP_NEW);
7772 if (FUN_SLOW_NATIVE(fun)) {
7773 JSNative native = fun->u.n.native;
7774 jsval* argv = &tval + 1;
7775 if (native == js_Array)
7776 return newArray(JSVAL_TO_OBJECT(fval), argc, argv, &fval);
7777 if (native == js_String && argc == 1) {
7778 if (mode == JSOP_NEW)
7779 return newString(JSVAL_TO_OBJECT(fval), 1, argv, &fval);
7780 if (!JSVAL_IS_PRIMITIVE(argv[0])) {
7781 ABORT_IF_XML(argv[0]);
7782 return call_imacro(call_imacros.String);
7784 set(&fval, stringify(argv[0]));
7785 pendingTraceableNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
7786 return JSRS_CONTINUE;
7790 return callNative(argc, mode);
7793 JS_REQUIRES_STACK JSRecordingStatus
7794 TraceRecorder::record_JSOP_NEW()
7796 uintN argc = GET_ARGC(cx->fp->regs->pc);
7797 cx->fp->assertValidStackDepth(argc + 2);
7798 return functionCall(argc, JSOP_NEW);
7801 JS_REQUIRES_STACK JSRecordingStatus
7802 TraceRecorder::record_JSOP_DELNAME()
7804 return JSRS_STOP;
7807 JS_REQUIRES_STACK JSRecordingStatus
7808 TraceRecorder::record_JSOP_DELPROP()
7810 return JSRS_STOP;
7813 JS_REQUIRES_STACK JSRecordingStatus
7814 TraceRecorder::record_JSOP_DELELEM()
7816 return JSRS_STOP;
7819 JS_REQUIRES_STACK JSRecordingStatus
7820 TraceRecorder::record_JSOP_TYPEOF()
7822 jsval& r = stackval(-1);
7823 LIns* type;
7824 if (JSVAL_IS_STRING(r)) {
7825 type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_STRING]));
7826 } else if (isNumber(r)) {
7827 type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_NUMBER]));
7828 } else if (VALUE_IS_FUNCTION(cx, r)) {
7829 type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_FUNCTION]));
7830 } else {
7831 LIns* args[] = { get(&r), cx_ins };
7832 if (JSVAL_TAG(r) == JSVAL_BOOLEAN) {
7833 // We specialize identically for boolean and undefined. We must not have a hole here.
7834 // Pass the unboxed type here, since TypeOfBoolean knows how to handle it.
7835 JS_ASSERT(r == JSVAL_TRUE || r == JSVAL_FALSE || r == JSVAL_VOID);
7836 type = lir->insCall(&js_TypeOfBoolean_ci, args);
7837 } else {
7838 JS_ASSERT(JSVAL_TAG(r) == JSVAL_OBJECT);
7839 type = lir->insCall(&js_TypeOfObject_ci, args);
7842 set(&r, type);
7843 return JSRS_CONTINUE;
7846 JS_REQUIRES_STACK JSRecordingStatus
7847 TraceRecorder::record_JSOP_VOID()
7849 stack(-1, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)));
7850 return JSRS_CONTINUE;
7853 JS_REQUIRES_STACK JSRecordingStatus
7854 TraceRecorder::record_JSOP_INCNAME()
7856 return incName(1);
7859 JS_REQUIRES_STACK JSRecordingStatus
7860 TraceRecorder::record_JSOP_INCPROP()
7862 return incProp(1);
7865 JS_REQUIRES_STACK JSRecordingStatus
7866 TraceRecorder::record_JSOP_INCELEM()
7868 return incElem(1);
7871 JS_REQUIRES_STACK JSRecordingStatus
7872 TraceRecorder::record_JSOP_DECNAME()
7874 return incName(-1);
7877 JS_REQUIRES_STACK JSRecordingStatus
7878 TraceRecorder::record_JSOP_DECPROP()
7880 return incProp(-1);
7883 JS_REQUIRES_STACK JSRecordingStatus
7884 TraceRecorder::record_JSOP_DECELEM()
7886 return incElem(-1);
7889 JS_REQUIRES_STACK JSRecordingStatus
7890 TraceRecorder::incName(jsint incr, bool pre)
7892 jsval* vp;
7893 CHECK_STATUS(name(vp));
7894 LIns* v_ins = get(vp);
7895 CHECK_STATUS(inc(*vp, v_ins, incr, pre));
7896 set(vp, v_ins);
7897 return JSRS_CONTINUE;
7900 JS_REQUIRES_STACK JSRecordingStatus
7901 TraceRecorder::record_JSOP_NAMEINC()
7903 return incName(1, false);
7906 JS_REQUIRES_STACK JSRecordingStatus
7907 TraceRecorder::record_JSOP_PROPINC()
7909 return incProp(1, false);
7912 // XXX consolidate with record_JSOP_GETELEM code...
7913 JS_REQUIRES_STACK JSRecordingStatus
7914 TraceRecorder::record_JSOP_ELEMINC()
7916 return incElem(1, false);
7919 JS_REQUIRES_STACK JSRecordingStatus
7920 TraceRecorder::record_JSOP_NAMEDEC()
7922 return incName(-1, false);
7925 JS_REQUIRES_STACK JSRecordingStatus
7926 TraceRecorder::record_JSOP_PROPDEC()
7928 return incProp(-1, false);
7931 JS_REQUIRES_STACK JSRecordingStatus
7932 TraceRecorder::record_JSOP_ELEMDEC()
7934 return incElem(-1, false);
7937 JS_REQUIRES_STACK JSRecordingStatus
7938 TraceRecorder::record_JSOP_GETPROP()
7940 return getProp(stackval(-1));
7943 JS_REQUIRES_STACK JSRecordingStatus
7944 TraceRecorder::record_JSOP_SETPROP()
7946 jsval& l = stackval(-2);
7947 if (JSVAL_IS_PRIMITIVE(l))
7948 ABORT_TRACE("primitive this for SETPROP");
7950 JSObject* obj = JSVAL_TO_OBJECT(l);
7951 if (obj->map->ops->setProperty != js_SetProperty)
7952 ABORT_TRACE("non-native JSObjectOps::setProperty");
7953 return JSRS_CONTINUE;
7956 JS_REQUIRES_STACK JSRecordingStatus
7957 TraceRecorder::record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop)
7959 if (entry == JS_NO_PROP_CACHE_FILL)
7960 ABORT_TRACE("can't trace uncacheable property set");
7961 if (PCVCAP_TAG(entry->vcap) >= 1)
7962 ABORT_TRACE("can't trace inherited property set");
7964 jsbytecode* pc = cx->fp->regs->pc;
7965 JS_ASSERT(entry->kpc == pc);
7967 jsval& r = stackval(-1);
7968 jsval& l = stackval(-2);
7970 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l));
7971 JSObject* obj = JSVAL_TO_OBJECT(l);
7972 LIns* obj_ins = get(&l);
7973 JSScope* scope = OBJ_SCOPE(obj);
7975 JS_ASSERT(scope->object == obj);
7976 JS_ASSERT(SCOPE_HAS_PROPERTY(scope, sprop));
7978 if (!isValidSlot(scope, sprop))
7979 return JSRS_STOP;
7981 if (obj == globalObj) {
7982 JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, scope));
7983 uint32 slot = sprop->slot;
7984 if (!lazilyImportGlobalSlot(slot))
7985 ABORT_TRACE("lazy import of global slot failed");
7987 LIns* r_ins = get(&r);
7990 * Writing a function into the global object might rebrand it; we don't
7991 * trace that case. There's no need to guard on that, though, because
7992 * separating functions into the trace-time type JSVAL_TFUN will save
7993 * the day!
7995 if (VALUE_IS_FUNCTION(cx, r))
7996 ABORT_TRACE("potential rebranding of the global object");
7997 set(&STOBJ_GET_SLOT(obj, slot), r_ins);
7999 JS_ASSERT(*pc != JSOP_INITPROP);
8000 if (pc[JSOP_SETPROP_LENGTH] != JSOP_POP)
8001 set(&l, r_ins);
8002 return JSRS_CONTINUE;
8005 // The global object's shape is guarded at trace entry, all others need a guard here.
8006 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
8007 LIns* ops_ins;
8008 if (!map_is_native(obj->map, map_ins, ops_ins, offsetof(JSObjectOps, setProperty)))
8009 ABORT_TRACE("non-native map");
8011 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)), "shape");
8012 guard(true, addName(lir->ins2i(LIR_eq, shape_ins, entry->kshape), "guard(kshape)"),
8013 BRANCH_EXIT);
8015 uint32 vshape = PCVCAP_SHAPE(entry->vcap);
8016 if (entry->kshape != vshape) {
8017 LIns *vshape_ins = lir->insLoad(LIR_ld,
8018 lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, runtime)),
8019 offsetof(JSRuntime, protoHazardShape));
8020 guard(true, addName(lir->ins2i(LIR_eq, vshape_ins, vshape), "guard(vshape)"),
8021 MISMATCH_EXIT);
8023 LIns* args[] = { INS_CONSTPTR(sprop), obj_ins, cx_ins };
8024 LIns* ok_ins = lir->insCall(&js_AddProperty_ci, args);
8025 guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);
8028 LIns* dslots_ins = NULL;
8029 LIns* v_ins = get(&r);
8030 LIns* boxed_ins = v_ins;
8031 box_jsval(r, boxed_ins);
8032 CHECK_STATUS(native_set(obj_ins, sprop, dslots_ins, boxed_ins));
8034 if (*pc != JSOP_INITPROP && pc[JSOP_SETPROP_LENGTH] != JSOP_POP)
8035 set(&l, v_ins);
8036 return JSRS_CONTINUE;
8039 /* Functions used by JSOP_GETELEM. */
8041 static JSBool
8042 GetProperty(JSContext *cx, uintN argc, jsval *vp)
8044 jsval *argv;
8045 jsid id;
8047 JS_ASSERT_NOT_ON_TRACE(cx);
8048 JS_ASSERT(cx->fp->imacpc && argc == 1);
8049 argv = JS_ARGV(cx, vp);
8050 JS_ASSERT(JSVAL_IS_STRING(argv[0]));
8051 if (!js_ValueToStringId(cx, argv[0], &id))
8052 return JS_FALSE;
8053 argv[0] = ID_TO_VALUE(id);
8054 return OBJ_GET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &JS_RVAL(cx, vp));
8057 static jsval FASTCALL
8058 GetProperty_tn(JSContext *cx, jsbytecode *pc, JSObject *obj, JSString *name)
8060 JSAutoTempIdRooter idr(cx);
8061 JSAutoTempValueRooter tvr(cx);
8063 if (!js_ValueToStringId(cx, STRING_TO_JSVAL(name), idr.addr()) ||
8064 !OBJ_GET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
8065 js_SetBuiltinError(cx);
8066 *tvr.addr() = JSVAL_ERROR_COOKIE;
8068 return tvr.value();
8071 static JSBool
8072 GetElement(JSContext *cx, uintN argc, jsval *vp)
8074 jsval *argv;
8075 jsid id;
8077 JS_ASSERT_NOT_ON_TRACE(cx);
8078 JS_ASSERT(cx->fp->imacpc && argc == 1);
8079 argv = JS_ARGV(cx, vp);
8080 JS_ASSERT(JSVAL_IS_NUMBER(argv[0]));
8081 if (!JS_ValueToId(cx, argv[0], &id))
8082 return JS_FALSE;
8083 argv[0] = ID_TO_VALUE(id);
8084 return OBJ_GET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &JS_RVAL(cx, vp));
8087 static jsval FASTCALL
8088 GetElement_tn(JSContext* cx, jsbytecode *pc, JSObject* obj, int32 index)
8090 JSAutoTempValueRooter tvr(cx);
8091 JSAutoTempIdRooter idr(cx);
8093 if (!js_Int32ToId(cx, index, idr.addr())) {
8094 js_SetBuiltinError(cx);
8095 return JSVAL_ERROR_COOKIE;
8097 if (!OBJ_GET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
8098 js_SetBuiltinError(cx);
8099 *tvr.addr() = JSVAL_ERROR_COOKIE;
8101 return tvr.value();
8104 JS_DEFINE_TRCINFO_1(GetProperty,
8105 (4, (static, JSVAL_FAIL, GetProperty_tn, CONTEXT, PC, THIS, STRING, 0, 0)))
8106 JS_DEFINE_TRCINFO_1(GetElement,
8107 (4, (extern, JSVAL_FAIL, GetElement_tn, CONTEXT, PC, THIS, INT32, 0, 0)))
8109 JS_REQUIRES_STACK JSRecordingStatus
8110 TraceRecorder::record_JSOP_GETELEM()
8112 bool call = *cx->fp->regs->pc == JSOP_CALLELEM;
8114 jsval& idx = stackval(-1);
8115 jsval& lval = stackval(-2);
8117 LIns* obj_ins = get(&lval);
8118 LIns* idx_ins = get(&idx);
8120 // Special case for array-like access of strings.
8121 if (JSVAL_IS_STRING(lval) && isInt32(idx)) {
8122 if (call)
8123 ABORT_TRACE("JSOP_CALLELEM on a string");
8124 int i = asInt32(idx);
8125 if (size_t(i) >= JSSTRING_LENGTH(JSVAL_TO_STRING(lval)))
8126 ABORT_TRACE("Invalid string index in JSOP_GETELEM");
8127 idx_ins = makeNumberInt32(idx_ins);
8128 LIns* args[] = { idx_ins, obj_ins, cx_ins };
8129 LIns* unitstr_ins = lir->insCall(&js_String_getelem_ci, args);
8130 guard(false, lir->ins_eq0(unitstr_ins), MISMATCH_EXIT);
8131 set(&lval, unitstr_ins);
8132 return JSRS_CONTINUE;
8135 if (JSVAL_IS_PRIMITIVE(lval))
8136 ABORT_TRACE("JSOP_GETLEM on a primitive");
8137 ABORT_IF_XML(lval);
8139 JSObject* obj = JSVAL_TO_OBJECT(lval);
8140 jsval id;
8141 LIns* v_ins;
8143 /* Property access using a string name or something we have to stringify. */
8144 if (!JSVAL_IS_INT(idx)) {
8145 if (!JSVAL_IS_PRIMITIVE(idx))
8146 ABORT_TRACE("non-primitive index");
8147 // If index is not a string, turn it into a string.
8148 if (!js_InternNonIntElementId(cx, obj, idx, &id))
8149 ABORT_TRACE_ERROR("failed to intern non-int element id");
8150 set(&idx, stringify(idx));
8152 // Store the interned string to the stack to save the interpreter from redoing this work.
8153 idx = ID_TO_VALUE(id);
8155 // The object is not guaranteed to be a dense array at this point, so it might be the
8156 // global object, which we have to guard against.
8157 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8159 return call_imacro(call ? callelem_imacros.callprop : getelem_imacros.getprop);
8162 // Invalid dense array index or not a dense array.
8163 if (JSVAL_TO_INT(idx) < 0 || !OBJ_IS_DENSE_ARRAY(cx, obj)) {
8164 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8166 return call_imacro(call ? callelem_imacros.callelem : getelem_imacros.getelem);
8169 // Fast path for dense arrays accessed with a non-negative integer index.
8170 jsval* vp;
8171 LIns* addr_ins;
8172 CHECK_STATUS(elem(lval, idx, vp, v_ins, addr_ins));
8173 set(&lval, v_ins);
8174 if (call)
8175 set(&idx, obj_ins);
8176 return JSRS_CONTINUE;
8179 /* Functions used by JSOP_SETELEM */
8181 static JSBool
8182 SetProperty(JSContext *cx, uintN argc, jsval *vp)
8184 jsval *argv;
8185 jsid id;
8187 JS_ASSERT(argc == 2);
8188 argv = JS_ARGV(cx, vp);
8189 JS_ASSERT(JSVAL_IS_STRING(argv[0]));
8190 if (!js_ValueToStringId(cx, argv[0], &id))
8191 return JS_FALSE;
8192 argv[0] = ID_TO_VALUE(id);
8193 if (!OBJ_SET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &argv[1]))
8194 return JS_FALSE;
8195 JS_SET_RVAL(cx, vp, JSVAL_VOID);
8196 return JS_TRUE;
8199 static JSBool FASTCALL
8200 SetProperty_tn(JSContext* cx, JSObject* obj, JSString* idstr, jsval v)
8202 JSAutoTempValueRooter tvr(cx, v);
8203 JSAutoTempIdRooter idr(cx);
8205 if (!js_ValueToStringId(cx, STRING_TO_JSVAL(idstr), idr.addr()) ||
8206 !OBJ_SET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
8207 js_SetBuiltinError(cx);
8209 return JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID);
8212 static JSBool
8213 SetElement(JSContext *cx, uintN argc, jsval *vp)
8215 jsval *argv;
8216 jsid id;
8218 JS_ASSERT(argc == 2);
8219 argv = JS_ARGV(cx, vp);
8220 JS_ASSERT(JSVAL_IS_NUMBER(argv[0]));
8221 if (!JS_ValueToId(cx, argv[0], &id))
8222 return JS_FALSE;
8223 argv[0] = ID_TO_VALUE(id);
8224 if (!OBJ_SET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &argv[1]))
8225 return JS_FALSE;
8226 JS_SET_RVAL(cx, vp, JSVAL_VOID);
8227 return JS_TRUE;
8230 static JSBool FASTCALL
8231 SetElement_tn(JSContext* cx, JSObject* obj, int32 index, jsval v)
8233 JSAutoTempIdRooter idr(cx);
8234 JSAutoTempValueRooter tvr(cx, v);
8236 if (!js_Int32ToId(cx, index, idr.addr()) ||
8237 !OBJ_SET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
8238 js_SetBuiltinError(cx);
8240 return JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID);
8243 JS_DEFINE_TRCINFO_1(SetProperty,
8244 (4, (extern, BOOL_FAIL, SetProperty_tn, CONTEXT, THIS, STRING, JSVAL, 0, 0)))
8245 JS_DEFINE_TRCINFO_1(SetElement,
8246 (4, (extern, BOOL_FAIL, SetElement_tn, CONTEXT, THIS, INT32, JSVAL, 0, 0)))
8248 JS_REQUIRES_STACK JSRecordingStatus
8249 TraceRecorder::record_JSOP_SETELEM()
8251 jsval& v = stackval(-1);
8252 jsval& idx = stackval(-2);
8253 jsval& lval = stackval(-3);
8255 /* no guards for type checks, trace specialized this already */
8256 if (JSVAL_IS_PRIMITIVE(lval))
8257 ABORT_TRACE("left JSOP_SETELEM operand is not an object");
8258 ABORT_IF_XML(lval);
8260 JSObject* obj = JSVAL_TO_OBJECT(lval);
8261 LIns* obj_ins = get(&lval);
8262 LIns* idx_ins = get(&idx);
8263 LIns* v_ins = get(&v);
8264 jsid id;
8266 if (!JSVAL_IS_INT(idx)) {
8267 if (!JSVAL_IS_PRIMITIVE(idx))
8268 ABORT_TRACE("non-primitive index");
8269 // If index is not a string, turn it into a string.
8270 if (!js_InternNonIntElementId(cx, obj, idx, &id))
8271 ABORT_TRACE_ERROR("failed to intern non-int element id");
8272 set(&idx, stringify(idx));
8274 // Store the interned string to the stack to save the interpreter from redoing this work.
8275 idx = ID_TO_VALUE(id);
8277 // The object is not guaranteed to be a dense array at this point, so it might be the
8278 // global object, which we have to guard against.
8279 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8281 return call_imacro((*cx->fp->regs->pc == JSOP_INITELEM)
8282 ? initelem_imacros.initprop
8283 : setelem_imacros.setprop);
8286 if (JSVAL_TO_INT(idx) < 0 || !OBJ_IS_DENSE_ARRAY(cx, obj)) {
8287 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8289 return call_imacro((*cx->fp->regs->pc == JSOP_INITELEM)
8290 ? initelem_imacros.initelem
8291 : setelem_imacros.setelem);
8294 // Make sure the array is actually dense.
8295 if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT))
8296 return JSRS_STOP;
8298 // Fast path for dense arrays accessed with a non-negative integer index. In case the trace
8299 // calculated the index using the FPU, force it to be an integer.
8300 idx_ins = makeNumberInt32(idx_ins);
8302 // Box the value so we can use one builtin instead of having to add one builtin for every
8303 // storage type.
8304 LIns* boxed_v_ins = v_ins;
8305 box_jsval(v, boxed_v_ins);
8307 LIns* args[] = { boxed_v_ins, idx_ins, obj_ins, cx_ins };
8308 LIns* res_ins = lir->insCall(&js_Array_dense_setelem_ci, args);
8309 guard(false, lir->ins_eq0(res_ins), MISMATCH_EXIT);
8311 jsbytecode* pc = cx->fp->regs->pc;
8312 if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
8313 set(&lval, v_ins);
8315 return JSRS_CONTINUE;
8318 JS_REQUIRES_STACK JSRecordingStatus
8319 TraceRecorder::record_JSOP_CALLNAME()
8321 JSObject* obj = cx->fp->scopeChain;
8322 if (obj != globalObj) {
8323 jsval* vp;
8324 CHECK_STATUS(activeCallOrGlobalSlot(obj, vp));
8325 stack(0, get(vp));
8326 stack(1, INS_CONSTPTR(globalObj));
8327 return JSRS_CONTINUE;
8330 LIns* obj_ins = scopeChain();
8331 JSObject* obj2;
8332 jsuword pcval;
8334 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
8336 if (PCVAL_IS_NULL(pcval) || !PCVAL_IS_OBJECT(pcval))
8337 ABORT_TRACE("callee is not an object");
8339 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval)));
8341 stack(0, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval)));
8342 stack(1, obj_ins);
8343 return JSRS_CONTINUE;
8346 JS_DEFINE_CALLINFO_4(extern, UINT32, js_GetUpvarOnTrace, CONTEXT, UINT32, UINT32, DOUBLEPTR, 0, 0)
8348 JS_REQUIRES_STACK JSRecordingStatus
8349 TraceRecorder::record_JSOP_GETUPVAR()
8351 uintN index = GET_UINT16(cx->fp->regs->pc);
8352 JSScript *script = cx->fp->script;
8354 JSUpvarArray* uva = JS_SCRIPT_UPVARS(script);
8355 JS_ASSERT(index < uva->length);
8358 * Try to find the upvar in the current trace's tracker.
8360 jsval& v = js_GetUpvar(cx, script->staticLevel, uva->vector[index]);
8361 LIns* upvar_ins = get(&v);
8362 if (upvar_ins) {
8363 stack(0, upvar_ins);
8364 return JSRS_CONTINUE;
8368 * The upvar is not in the current trace, so get the upvar value
8369 * exactly as the interpreter does and unbox.
8371 LIns* outp = lir->insAlloc(sizeof(double));
8372 LIns* args[] = {
8373 outp,
8374 lir->insImm(uva->vector[index]),
8375 lir->insImm(script->staticLevel),
8376 cx_ins
8378 const CallInfo* ci = &js_GetUpvarOnTrace_ci;
8379 LIns* call_ins = lir->insCall(ci, args);
8380 uint8 type = getCoercedType(v);
8381 guard(true,
8382 addName(lir->ins2(LIR_eq, call_ins, lir->insImm(type)),
8383 "guard(type-stable upvar)"),
8384 BRANCH_EXIT);
8386 LOpcode loadOp;
8387 switch (type) {
8388 case JSVAL_DOUBLE:
8389 loadOp = LIR_ldq;
8390 break;
8391 case JSVAL_OBJECT:
8392 case JSVAL_STRING:
8393 case JSVAL_TFUN:
8394 case JSVAL_TNULL:
8395 loadOp = LIR_ldp;
8396 break;
8397 case JSVAL_INT:
8398 case JSVAL_BOOLEAN:
8399 loadOp = LIR_ld;
8400 break;
8401 case JSVAL_BOXED:
8402 default:
8403 JS_NOT_REACHED("found boxed type in an upvar type map entry");
8404 return JSRS_STOP;
8407 LIns* result = lir->insLoad(loadOp, outp, lir->insImm(0));
8408 if (type == JSVAL_INT)
8409 result = lir->ins1(LIR_i2f, result);
8410 stack(0, result);
8411 return JSRS_CONTINUE;
8414 JS_REQUIRES_STACK JSRecordingStatus
8415 TraceRecorder::record_JSOP_CALLUPVAR()
8417 CHECK_STATUS(record_JSOP_GETUPVAR());
8418 stack(1, INS_CONSTPTR(NULL));
8419 return JSRS_CONTINUE;
8422 JS_REQUIRES_STACK JSRecordingStatus
8423 TraceRecorder::record_JSOP_GETDSLOT()
8425 JSObject* callee = cx->fp->callee;
8426 LIns* callee_ins = (callDepth == 0) ? get(&cx->fp->argv[-2]) : INS_CONSTPTR(callee);
8428 unsigned index = GET_UINT16(cx->fp->regs->pc);
8429 LIns* dslots_ins = NULL;
8430 LIns* v_ins = stobj_get_dslot(callee_ins, index, dslots_ins);
8432 unbox_jsval(callee->dslots[index], v_ins, snapshot(BRANCH_EXIT));
8433 stack(0, v_ins);
8434 return JSRS_CONTINUE;
8437 JS_REQUIRES_STACK JSRecordingStatus
8438 TraceRecorder::record_JSOP_CALLDSLOT()
8440 CHECK_STATUS(record_JSOP_GETDSLOT());
8441 stack(1, INS_CONSTPTR(NULL));
8442 return JSRS_CONTINUE;
8445 JS_REQUIRES_STACK JSRecordingStatus
8446 TraceRecorder::guardCallee(jsval& callee)
8448 JS_ASSERT(VALUE_IS_FUNCTION(cx, callee));
8450 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
8451 JSObject* callee_obj = JSVAL_TO_OBJECT(callee);
8452 LIns* callee_ins = get(&callee);
8454 guard(true,
8455 lir->ins2(LIR_eq,
8456 lir->ins2(LIR_piand,
8457 stobj_get_fslot(callee_ins, JSSLOT_PRIVATE),
8458 INS_CONSTWORD(~JSVAL_INT)),
8459 INS_CONSTPTR(OBJ_GET_PRIVATE(cx, callee_obj))),
8460 branchExit);
8461 guard(true,
8462 lir->ins2(LIR_eq,
8463 stobj_get_fslot(callee_ins, JSSLOT_PARENT),
8464 INS_CONSTPTR(OBJ_GET_PARENT(cx, callee_obj))),
8465 branchExit);
8466 return JSRS_CONTINUE;
8469 JS_REQUIRES_STACK JSRecordingStatus
8470 TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, bool constructing)
8472 if (JS_GetGlobalForObject(cx, JSVAL_TO_OBJECT(fval)) != globalObj)
8473 ABORT_TRACE("JSOP_CALL or JSOP_NEW crosses global scopes");
8475 JSStackFrame* fp = cx->fp;
8477 // TODO: track the copying via the tracker...
8478 if (argc < fun->nargs &&
8479 jsuword(fp->regs->sp + (fun->nargs - argc)) > cx->stackPool.current->limit) {
8480 ABORT_TRACE("can't trace calls with too few args requiring argv move");
8483 // Generate a type map for the outgoing frame and stash it in the LIR
8484 unsigned stackSlots = js_NativeStackSlots(cx, 0/*callDepth*/);
8485 if (sizeof(FrameInfo) + stackSlots * sizeof(uint8) > MAX_SKIP_BYTES)
8486 ABORT_TRACE("interpreted function call requires saving too much stack");
8487 LIns* data = lir->insSkip(sizeof(FrameInfo) + stackSlots * sizeof(uint8));
8488 FrameInfo* fi = (FrameInfo*)data->payload();
8489 uint8* typemap = (uint8 *)(fi + 1);
8490 uint8* m = typemap;
8491 /* Determine the type of a store by looking at the current type of the actual value the
8492 interpreter is using. For numbers we have to check what kind of store we used last
8493 (integer or double) to figure out what the side exit show reflect in its typemap. */
8494 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0/*callDepth*/,
8495 *m++ = determineSlotType(vp);
8498 if (argc >= 0x8000)
8499 ABORT_TRACE("too many arguments");
8501 fi->callee = JSVAL_TO_OBJECT(fval);
8502 fi->block = fp->blockChain;
8503 fi->pc = fp->regs->pc;
8504 fi->imacpc = fp->imacpc;
8505 fi->s.spdist = fp->regs->sp - fp->slots;
8506 fi->s.argc = argc | (constructing ? 0x8000 : 0);
8508 unsigned callDepth = getCallDepth();
8509 if (callDepth >= treeInfo->maxCallDepth)
8510 treeInfo->maxCallDepth = callDepth + 1;
8512 lir->insStorei(INS_CONSTPTR(fi), lirbuf->rp, callDepth * sizeof(FrameInfo*));
8514 atoms = fun->u.i.script->atomMap.vector;
8515 return JSRS_CONTINUE;
8518 JS_REQUIRES_STACK JSRecordingStatus
8519 TraceRecorder::record_JSOP_CALL()
8521 uintN argc = GET_ARGC(cx->fp->regs->pc);
8522 cx->fp->assertValidStackDepth(argc + 2);
8523 return functionCall(argc,
8524 (cx->fp->imacpc && *cx->fp->imacpc == JSOP_APPLY)
8525 ? JSOP_APPLY
8526 : JSOP_CALL);
8529 static jsbytecode* apply_imacro_table[] = {
8530 apply_imacros.apply0,
8531 apply_imacros.apply1,
8532 apply_imacros.apply2,
8533 apply_imacros.apply3,
8534 apply_imacros.apply4,
8535 apply_imacros.apply5,
8536 apply_imacros.apply6,
8537 apply_imacros.apply7,
8538 apply_imacros.apply8
8541 static jsbytecode* call_imacro_table[] = {
8542 apply_imacros.call0,
8543 apply_imacros.call1,
8544 apply_imacros.call2,
8545 apply_imacros.call3,
8546 apply_imacros.call4,
8547 apply_imacros.call5,
8548 apply_imacros.call6,
8549 apply_imacros.call7,
8550 apply_imacros.call8
8553 JS_REQUIRES_STACK JSRecordingStatus
8554 TraceRecorder::record_JSOP_APPLY()
8556 JSStackFrame* fp = cx->fp;
8557 jsbytecode *pc = fp->regs->pc;
8558 uintN argc = GET_ARGC(pc);
8559 cx->fp->assertValidStackDepth(argc + 2);
8561 jsval* vp = fp->regs->sp - (argc + 2);
8562 jsuint length = 0;
8563 JSObject* aobj = NULL;
8564 LIns* aobj_ins = NULL;
8566 JS_ASSERT(!fp->imacpc);
8568 if (!VALUE_IS_FUNCTION(cx, vp[0]))
8569 return record_JSOP_CALL();
8570 ABORT_IF_XML(vp[0]);
8572 JSObject* obj = JSVAL_TO_OBJECT(vp[0]);
8573 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
8574 if (FUN_INTERPRETED(fun))
8575 return record_JSOP_CALL();
8577 bool apply = (JSFastNative)fun->u.n.native == js_fun_apply;
8578 if (!apply && (JSFastNative)fun->u.n.native != js_fun_call)
8579 return record_JSOP_CALL();
8582 * We don't trace apply and call with a primitive 'this', which is the
8583 * first positional parameter.
8585 if (argc > 0 && JSVAL_IS_PRIMITIVE(vp[2]))
8586 return record_JSOP_CALL();
8589 * Guard on the identity of this, which is the function we are applying.
8591 if (!VALUE_IS_FUNCTION(cx, vp[1]))
8592 ABORT_TRACE("callee is not a function");
8593 CHECK_STATUS(guardCallee(vp[1]));
8595 if (apply && argc >= 2) {
8596 if (argc != 2)
8597 ABORT_TRACE("apply with excess arguments");
8598 if (JSVAL_IS_PRIMITIVE(vp[3]))
8599 ABORT_TRACE("arguments parameter of apply is primitive");
8600 aobj = JSVAL_TO_OBJECT(vp[3]);
8601 aobj_ins = get(&vp[3]);
8604 * We expect a dense array for the arguments (the other
8605 * frequent case is the arguments object, but that we
8606 * don't trace at the moment).
8608 if (!guardDenseArray(aobj, aobj_ins))
8609 ABORT_TRACE("arguments parameter of apply is not a dense array");
8612 * We trace only apply calls with a certain number of arguments.
8614 length = jsuint(aobj->fslots[JSSLOT_ARRAY_LENGTH]);
8615 if (length >= JS_ARRAY_LENGTH(apply_imacro_table))
8616 ABORT_TRACE("too many arguments to apply");
8619 * Make sure the array has the same length at runtime.
8621 guard(true,
8622 lir->ins2i(LIR_eq,
8623 stobj_get_fslot(aobj_ins, JSSLOT_ARRAY_LENGTH),
8624 length),
8625 BRANCH_EXIT);
8627 return call_imacro(apply_imacro_table[length]);
8630 if (argc >= JS_ARRAY_LENGTH(call_imacro_table))
8631 ABORT_TRACE("too many arguments to call");
8633 return call_imacro(call_imacro_table[argc]);
8636 static JSBool FASTCALL
8637 CatchStopIteration_tn(JSContext* cx, JSBool ok, jsval* vp)
8639 if (!ok && cx->throwing && js_ValueIsStopIteration(cx->exception)) {
8640 cx->throwing = JS_FALSE;
8641 cx->exception = JSVAL_VOID;
8642 *vp = JSVAL_HOLE;
8643 return JS_TRUE;
8645 return ok;
8648 JS_DEFINE_TRCINFO_1(CatchStopIteration_tn,
8649 (3, (static, BOOL, CatchStopIteration_tn, CONTEXT, BOOL, JSVALPTR, 0, 0)))
8651 JS_REQUIRES_STACK JSRecordingStatus
8652 TraceRecorder::record_NativeCallComplete()
8654 if (pendingTraceableNative == IGNORE_NATIVE_CALL_COMPLETE_CALLBACK)
8655 return JSRS_CONTINUE;
8657 jsbytecode* pc = cx->fp->regs->pc;
8659 JS_ASSERT(pendingTraceableNative);
8660 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEW);
8662 jsval& v = stackval(-1);
8663 LIns* v_ins = get(&v);
8665 /* At this point the generated code has already called the native function
8666 and we can no longer fail back to the original pc location (JSOP_CALL)
8667 because that would cause the interpreter to re-execute the native
8668 function, which might have side effects.
8670 Instead, the snapshot() call below sees that we are currently parked on
8671 a traceable native's JSOP_CALL instruction, and it will advance the pc
8672 to restore by the length of the current opcode. If the native's return
8673 type is jsval, snapshot() will also indicate in the type map that the
8674 element on top of the stack is a boxed value which doesn't need to be
8675 boxed if the type guard generated by unbox_jsval() fails. */
8677 if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS) {
8678 // Keep cx->bailExit null when it's invalid.
8679 lir->insStorei(INS_CONSTPTR(NULL), cx_ins, (int) offsetof(JSContext, bailExit));
8681 LIns* status = lir->insLoad(LIR_ld, lirbuf->state, (int) offsetof(InterpState, builtinStatus));
8682 if (pendingTraceableNative == generatedTraceableNative) {
8683 LIns* ok_ins = v_ins;
8686 * Custom implementations of Iterator.next() throw a StopIteration exception.
8687 * Catch and clear it and set the return value to JSVAL_HOLE in this case.
8689 if (uintptr_t(pc - nextiter_imacros.custom_iter_next) <
8690 sizeof(nextiter_imacros.custom_iter_next)) {
8691 LIns* args[] = { native_rval_ins, ok_ins, cx_ins }; /* reverse order */
8692 ok_ins = lir->insCall(&CatchStopIteration_tn_ci, args);
8696 * If we run a generic traceable native, the return value is in the argument
8697 * vector for native function calls. The actual return value of the native is a JSBool
8698 * indicating the error status.
8700 v_ins = lir->insLoad(LIR_ld, native_rval_ins, 0);
8701 if (*pc == JSOP_NEW) {
8702 LIns* x = lir->ins_eq0(lir->ins2i(LIR_piand, v_ins, JSVAL_TAGMASK));
8703 x = lir->ins_choose(x, v_ins, INS_CONST(0));
8704 v_ins = lir->ins_choose(lir->ins_eq0(x), newobj_ins, x);
8706 set(&v, v_ins);
8709 * If this is a generic traceable native invocation, propagate the boolean return
8710 * value of the native into builtinStatus. If the return value (v_ins)
8711 * is true, status' == status. Otherwise status' = status | JSBUILTIN_ERROR.
8712 * We calculate (rval&1)^1, which is 1 if rval is JS_FALSE (error), and then
8713 * shift that by 1 which is JSBUILTIN_ERROR.
8715 JS_STATIC_ASSERT((1 - JS_TRUE) << 1 == 0);
8716 JS_STATIC_ASSERT((1 - JS_FALSE) << 1 == JSBUILTIN_ERROR);
8717 status = lir->ins2(LIR_or,
8718 status,
8719 lir->ins2i(LIR_lsh,
8720 lir->ins2i(LIR_xor,
8721 lir->ins2i(LIR_and, ok_ins, 1),
8723 1));
8724 lir->insStorei(status, lirbuf->state, (int) offsetof(InterpState, builtinStatus));
8726 guard(true,
8727 lir->ins_eq0(status),
8728 STATUS_EXIT);
8731 JSRecordingStatus ok = JSRS_CONTINUE;
8732 if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER) {
8734 * If we side exit on the unboxing code due to a type change, make sure that the boxed
8735 * value is actually currently associated with that location, and that we are talking
8736 * about the top of the stack here, which is where we expected boxed values.
8738 JS_ASSERT(&v == &cx->fp->regs->sp[-1] && get(&v) == v_ins);
8739 unbox_jsval(v, v_ins, snapshot(BRANCH_EXIT));
8740 set(&v, v_ins);
8741 } else if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_NEG) {
8742 /* Already added i2f in functionCall. */
8743 JS_ASSERT(JSVAL_IS_NUMBER(v));
8744 } else {
8745 /* Convert the result to double if the builtin returns int32. */
8746 if (JSVAL_IS_NUMBER(v) &&
8747 (pendingTraceableNative->builtin->_argtypes & 3) == nanojit::ARGSIZE_LO) {
8748 set(&v, lir->ins1(LIR_i2f, v_ins));
8752 // We'll null pendingTraceableNative in monitorRecording, on the next op cycle.
8753 // There must be a next op since the stack is non-empty.
8754 return ok;
8757 JS_REQUIRES_STACK JSRecordingStatus
8758 TraceRecorder::name(jsval*& vp)
8760 JSObject* obj = cx->fp->scopeChain;
8761 if (obj != globalObj)
8762 return activeCallOrGlobalSlot(obj, vp);
8764 /* Can't use prop here, because we don't want unboxing from global slots. */
8765 LIns* obj_ins = scopeChain();
8766 uint32 slot;
8768 JSObject* obj2;
8769 jsuword pcval;
8772 * Property cache ensures that we are dealing with an existing property,
8773 * and guards the shape for us.
8775 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
8778 * Abort if property doesn't exist (interpreter will report an error.)
8780 if (PCVAL_IS_NULL(pcval))
8781 ABORT_TRACE("named property not found");
8784 * Insist on obj being the directly addressed object.
8786 if (obj2 != obj)
8787 ABORT_TRACE("name() hit prototype chain");
8789 /* Don't trace getter or setter calls, our caller wants a direct slot. */
8790 if (PCVAL_IS_SPROP(pcval)) {
8791 JSScopeProperty* sprop = PCVAL_TO_SPROP(pcval);
8792 if (!isValidSlot(OBJ_SCOPE(obj), sprop))
8793 ABORT_TRACE("name() not accessing a valid slot");
8794 slot = sprop->slot;
8795 } else {
8796 if (!PCVAL_IS_SLOT(pcval))
8797 ABORT_TRACE("PCE is not a slot");
8798 slot = PCVAL_TO_SLOT(pcval);
8801 if (!lazilyImportGlobalSlot(slot))
8802 ABORT_TRACE("lazy import of global slot failed");
8804 vp = &STOBJ_GET_SLOT(obj, slot);
8805 return JSRS_CONTINUE;
8808 JS_REQUIRES_STACK JSRecordingStatus
8809 TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins)
8812 * Can't specialize to assert obj != global, must guard to avoid aliasing
8813 * stale homes of stacked global variables.
8815 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8818 * Property cache ensures that we are dealing with an existing property,
8819 * and guards the shape for us.
8821 JSObject* obj2;
8822 jsuword pcval;
8823 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
8825 /* Check for non-existent property reference, which results in undefined. */
8826 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
8827 if (PCVAL_IS_NULL(pcval)) {
8829 * This trace will be valid as long as neither the object nor any object
8830 * on its prototype chain change shape.
8832 VMSideExit* exit = snapshot(BRANCH_EXIT);
8833 for (;;) {
8834 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
8835 LIns* ops_ins;
8836 if (map_is_native(obj->map, map_ins, ops_ins)) {
8837 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
8838 "shape");
8839 guard(true,
8840 addName(lir->ins2i(LIR_eq, shape_ins, OBJ_SHAPE(obj)), "guard(shape)"),
8841 exit);
8842 } else if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT))
8843 ABORT_TRACE("non-native object involved in undefined property access");
8845 obj = JSVAL_TO_OBJECT(obj->fslots[JSSLOT_PROTO]);
8846 if (!obj)
8847 break;
8848 obj_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
8851 v_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
8852 slot = SPROP_INVALID_SLOT;
8853 return JSRS_CONTINUE;
8856 /* Insist if setting on obj being the directly addressed object. */
8857 uint32 setflags = (cs.format & (JOF_SET | JOF_INCDEC | JOF_FOR));
8858 LIns* dslots_ins = NULL;
8860 /* Don't trace getter or setter calls, our caller wants a direct slot. */
8861 if (PCVAL_IS_SPROP(pcval)) {
8862 JSScopeProperty* sprop = PCVAL_TO_SPROP(pcval);
8864 if (setflags && !SPROP_HAS_STUB_SETTER(sprop))
8865 ABORT_TRACE("non-stub setter");
8866 if (setflags && (sprop->attrs & JSPROP_READONLY))
8867 ABORT_TRACE("writing to a readonly property");
8868 if (setflags != JOF_SET && !SPROP_HAS_STUB_GETTER(sprop)) {
8869 // FIXME 450335: generalize this away from regexp built-in getters.
8870 if (setflags == 0 &&
8871 sprop->getter == js_RegExpClass.getProperty &&
8872 sprop->shortid < 0) {
8873 if (sprop->shortid == REGEXP_LAST_INDEX)
8874 ABORT_TRACE("can't trace RegExp.lastIndex yet");
8875 LIns* args[] = { INS_CONSTPTR(sprop), obj_ins, cx_ins };
8876 v_ins = lir->insCall(&js_CallGetter_ci, args);
8877 guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
8879 * BIG FAT WARNING: This snapshot cannot be a BRANCH_EXIT, since
8880 * the value to the top of the stack is not the value we unbox.
8882 unbox_jsval((sprop->shortid == REGEXP_SOURCE) ? JSVAL_STRING : JSVAL_BOOLEAN,
8883 v_ins,
8884 snapshot(MISMATCH_EXIT));
8885 return JSRS_CONTINUE;
8887 if (setflags == 0 &&
8888 sprop->getter == js_StringClass.getProperty &&
8889 sprop->id == ATOM_KEY(cx->runtime->atomState.lengthAtom)) {
8890 if (!guardClass(obj, obj_ins, &js_StringClass, snapshot(MISMATCH_EXIT)))
8891 ABORT_TRACE("can't trace String.length on non-String objects");
8892 LIns* str_ins = stobj_get_fslot(obj_ins, JSSLOT_PRIVATE);
8893 str_ins = lir->ins2(LIR_piand, str_ins, INS_CONSTWORD(~JSVAL_TAGMASK));
8894 v_ins = lir->ins1(LIR_i2f, getStringLength(str_ins));
8895 return JSRS_CONTINUE;
8897 ABORT_TRACE("non-stub getter");
8899 if (!SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj)))
8900 ABORT_TRACE("no valid slot");
8901 slot = sprop->slot;
8902 } else {
8903 if (!PCVAL_IS_SLOT(pcval))
8904 ABORT_TRACE("PCE is not a slot");
8905 slot = PCVAL_TO_SLOT(pcval);
8908 if (obj2 != obj) {
8909 if (setflags)
8910 ABORT_TRACE("JOF_SET opcode hit prototype chain");
8913 * We're getting a proto-property. Walk up the prototype chain emitting
8914 * proto slot loads, updating obj as we go, leaving obj set to obj2 with
8915 * obj_ins the last proto-load.
8917 while (obj != obj2) {
8918 obj_ins = stobj_get_slot(obj_ins, JSSLOT_PROTO, dslots_ins);
8919 obj = STOBJ_GET_PROTO(obj);
8923 v_ins = stobj_get_slot(obj_ins, slot, dslots_ins);
8924 unbox_jsval(STOBJ_GET_SLOT(obj, slot), v_ins, snapshot(BRANCH_EXIT));
8926 return JSRS_CONTINUE;
8929 JS_REQUIRES_STACK JSRecordingStatus
8930 TraceRecorder::elem(jsval& oval, jsval& ival, jsval*& vp, LIns*& v_ins, LIns*& addr_ins)
8932 /* no guards for type checks, trace specialized this already */
8933 if (JSVAL_IS_PRIMITIVE(oval) || !JSVAL_IS_INT(ival))
8934 return JSRS_STOP;
8936 JSObject* obj = JSVAL_TO_OBJECT(oval);
8937 LIns* obj_ins = get(&oval);
8938 jsint idx = JSVAL_TO_INT(ival);
8939 LIns* idx_ins = makeNumberInt32(get(&ival));
8941 /* make sure the object is actually a dense array */
8942 if (!guardDenseArray(obj, obj_ins))
8943 return JSRS_STOP;
8945 VMSideExit* exit = snapshot(BRANCH_EXIT);
8947 /* check that the index is within bounds */
8948 LIns* dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
8949 jsuint capacity = js_DenseArrayCapacity(obj);
8950 bool within = (jsuint(idx) < jsuint(obj->fslots[JSSLOT_ARRAY_LENGTH]) && jsuint(idx) < capacity);
8951 if (!within) {
8952 /* If idx < 0, stay on trace (and read value as undefined, since this is a dense array). */
8953 LIns* br1 = NULL;
8954 if (MAX_DSLOTS_LENGTH > JS_BITMASK(30) && !idx_ins->isconst()) {
8955 JS_ASSERT(sizeof(jsval) == 8); // Only 64-bit machines support large enough arrays for this.
8956 br1 = lir->insBranch(LIR_jt,
8957 lir->ins2i(LIR_lt, idx_ins, 0),
8958 NULL);
8961 /* If not idx < length, stay on trace (and read value as undefined). */
8962 LIns* br2 = lir->insBranch(LIR_jf,
8963 lir->ins2(LIR_ult,
8964 idx_ins,
8965 stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH)),
8966 NULL);
8968 /* If dslots is NULL, stay on trace (and read value as undefined). */
8969 LIns* br3 = lir->insBranch(LIR_jt, lir->ins_eq0(dslots_ins), NULL);
8971 /* If not idx < capacity, stay on trace (and read value as undefined). */
8972 LIns* br4 = lir->insBranch(LIR_jf,
8973 lir->ins2(LIR_ult,
8974 idx_ins,
8975 lir->insLoad(LIR_ldp,
8976 dslots_ins,
8977 -(int)sizeof(jsval))),
8978 NULL);
8979 lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(exit));
8980 LIns* label = lir->ins0(LIR_label);
8981 if (br1)
8982 br1->setTarget(label);
8983 br2->setTarget(label);
8984 br3->setTarget(label);
8985 br4->setTarget(label);
8987 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, MISMATCH_EXIT));
8989 // Return undefined and indicate that we didn't actually read this (addr_ins).
8990 v_ins = lir->insImm(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
8991 addr_ins = NULL;
8992 return JSRS_CONTINUE;
8995 /* Guard against negative index */
8996 if (MAX_DSLOTS_LENGTH > JS_BITMASK(30) && !idx_ins->isconst()) {
8997 JS_ASSERT(sizeof(jsval) == 8); // Only 64-bit machines support large enough arrays for this.
8998 guard(false,
8999 lir->ins2i(LIR_lt, idx_ins, 0),
9000 exit);
9003 /* Guard array length */
9004 guard(true,
9005 lir->ins2(LIR_ult, idx_ins, stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH)),
9006 exit);
9008 /* dslots must not be NULL */
9009 guard(false,
9010 lir->ins_eq0(dslots_ins),
9011 exit);
9013 /* Guard array capacity */
9014 guard(true,
9015 lir->ins2(LIR_ult,
9016 idx_ins,
9017 lir->insLoad(LIR_ldp, dslots_ins, 0 - (int)sizeof(jsval))),
9018 exit);
9020 /* Load the value and guard on its type to unbox it. */
9021 vp = &obj->dslots[jsuint(idx)];
9022 addr_ins = lir->ins2(LIR_piadd, dslots_ins,
9023 lir->ins2i(LIR_pilsh, idx_ins, (sizeof(jsval) == 4) ? 2 : 3));
9024 v_ins = lir->insLoad(LIR_ldp, addr_ins, 0);
9025 unbox_jsval(*vp, v_ins, exit);
9027 if (JSVAL_TAG(*vp) == JSVAL_BOOLEAN) {
9029 * If we read a hole from the array, convert it to undefined and guard that there
9030 * are no indexed properties along the prototype chain.
9032 LIns* br = lir->insBranch(LIR_jf,
9033 lir->ins2i(LIR_eq, v_ins, JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_HOLE)),
9034 NULL);
9035 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, MISMATCH_EXIT));
9036 br->setTarget(lir->ins0(LIR_label));
9039 * Don't let the hole value escape. Turn it into an undefined.
9041 v_ins = lir->ins2i(LIR_and, v_ins, ~(JSVAL_HOLE_FLAG >> JSVAL_TAGBITS));
9043 return JSRS_CONTINUE;
9046 JS_REQUIRES_STACK JSRecordingStatus
9047 TraceRecorder::getProp(JSObject* obj, LIns* obj_ins)
9049 uint32 slot;
9050 LIns* v_ins;
9051 CHECK_STATUS(prop(obj, obj_ins, slot, v_ins));
9053 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
9054 JS_ASSERT(cs.ndefs == 1);
9055 stack(-cs.nuses, v_ins);
9056 return JSRS_CONTINUE;
9059 JS_REQUIRES_STACK JSRecordingStatus
9060 TraceRecorder::getProp(jsval& v)
9062 if (JSVAL_IS_PRIMITIVE(v))
9063 ABORT_TRACE("primitive lhs");
9065 return getProp(JSVAL_TO_OBJECT(v), get(&v));
9068 JS_REQUIRES_STACK JSRecordingStatus
9069 TraceRecorder::record_JSOP_NAME()
9071 jsval* vp;
9072 CHECK_STATUS(name(vp));
9073 stack(0, get(vp));
9074 return JSRS_CONTINUE;
9077 JS_REQUIRES_STACK JSRecordingStatus
9078 TraceRecorder::record_JSOP_DOUBLE()
9080 jsval v = jsval(atoms[GET_INDEX(cx->fp->regs->pc)]);
9081 stack(0, lir->insImmf(*JSVAL_TO_DOUBLE(v)));
9082 return JSRS_CONTINUE;
9085 JS_REQUIRES_STACK JSRecordingStatus
9086 TraceRecorder::record_JSOP_STRING()
9088 JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
9089 JS_ASSERT(ATOM_IS_STRING(atom));
9090 stack(0, INS_CONSTPTR(ATOM_TO_STRING(atom)));
9091 return JSRS_CONTINUE;
9094 JS_REQUIRES_STACK JSRecordingStatus
9095 TraceRecorder::record_JSOP_ZERO()
9097 stack(0, lir->insImmq(0));
9098 return JSRS_CONTINUE;
9101 JS_REQUIRES_STACK JSRecordingStatus
9102 TraceRecorder::record_JSOP_ONE()
9104 stack(0, lir->insImmf(1));
9105 return JSRS_CONTINUE;
9108 JS_REQUIRES_STACK JSRecordingStatus
9109 TraceRecorder::record_JSOP_NULL()
9111 stack(0, INS_CONSTPTR(NULL));
9112 return JSRS_CONTINUE;
9115 JS_REQUIRES_STACK JSRecordingStatus
9116 TraceRecorder::record_JSOP_THIS()
9118 LIns* this_ins;
9119 CHECK_STATUS(getThis(this_ins));
9120 stack(0, this_ins);
9121 return JSRS_CONTINUE;
9124 JS_REQUIRES_STACK JSRecordingStatus
9125 TraceRecorder::record_JSOP_FALSE()
9127 stack(0, lir->insImm(0));
9128 return JSRS_CONTINUE;
9131 JS_REQUIRES_STACK JSRecordingStatus
9132 TraceRecorder::record_JSOP_TRUE()
9134 stack(0, lir->insImm(1));
9135 return JSRS_CONTINUE;
9138 JS_REQUIRES_STACK JSRecordingStatus
9139 TraceRecorder::record_JSOP_OR()
9141 return ifop();
9144 JS_REQUIRES_STACK JSRecordingStatus
9145 TraceRecorder::record_JSOP_AND()
9147 return ifop();
9150 JS_REQUIRES_STACK JSRecordingStatus
9151 TraceRecorder::record_JSOP_TABLESWITCH()
9153 #ifdef NANOJIT_IA32
9154 /* Handle tableswitches specially -- prepare a jump table if needed. */
9155 LIns* guardIns = tableswitch();
9156 if (guardIns) {
9157 fragment->lastIns = guardIns;
9158 compile(&JS_TRACE_MONITOR(cx));
9160 return JSRS_STOP;
9161 #else
9162 return switchop();
9163 #endif
9166 JS_REQUIRES_STACK JSRecordingStatus
9167 TraceRecorder::record_JSOP_LOOKUPSWITCH()
9169 return switchop();
9172 JS_REQUIRES_STACK JSRecordingStatus
9173 TraceRecorder::record_JSOP_STRICTEQ()
9175 strictEquality(true, false);
9176 return JSRS_CONTINUE;
9179 JS_REQUIRES_STACK JSRecordingStatus
9180 TraceRecorder::record_JSOP_STRICTNE()
9182 strictEquality(false, false);
9183 return JSRS_CONTINUE;
9186 JS_REQUIRES_STACK JSRecordingStatus
9187 TraceRecorder::record_JSOP_OBJECT()
9189 JSStackFrame* fp = cx->fp;
9190 JSScript* script = fp->script;
9191 unsigned index = atoms - script->atomMap.vector + GET_INDEX(fp->regs->pc);
9193 JSObject* obj;
9194 JS_GET_SCRIPT_OBJECT(script, index, obj);
9195 stack(0, INS_CONSTPTR(obj));
9196 return JSRS_CONTINUE;
9199 JS_REQUIRES_STACK JSRecordingStatus
9200 TraceRecorder::record_JSOP_POP()
9202 return JSRS_CONTINUE;
9205 JS_REQUIRES_STACK JSRecordingStatus
9206 TraceRecorder::record_JSOP_TRAP()
9208 return JSRS_STOP;
9211 JS_REQUIRES_STACK JSRecordingStatus
9212 TraceRecorder::record_JSOP_GETARG()
9214 stack(0, arg(GET_ARGNO(cx->fp->regs->pc)));
9215 return JSRS_CONTINUE;
9218 JS_REQUIRES_STACK JSRecordingStatus
9219 TraceRecorder::record_JSOP_SETARG()
9221 arg(GET_ARGNO(cx->fp->regs->pc), stack(-1));
9222 return JSRS_CONTINUE;
9225 JS_REQUIRES_STACK JSRecordingStatus
9226 TraceRecorder::record_JSOP_GETLOCAL()
9228 stack(0, var(GET_SLOTNO(cx->fp->regs->pc)));
9229 return JSRS_CONTINUE;
9232 JS_REQUIRES_STACK JSRecordingStatus
9233 TraceRecorder::record_JSOP_SETLOCAL()
9235 var(GET_SLOTNO(cx->fp->regs->pc), stack(-1));
9236 return JSRS_CONTINUE;
9239 JS_REQUIRES_STACK JSRecordingStatus
9240 TraceRecorder::record_JSOP_UINT16()
9242 stack(0, lir->insImmf(GET_UINT16(cx->fp->regs->pc)));
9243 return JSRS_CONTINUE;
9246 JS_REQUIRES_STACK JSRecordingStatus
9247 TraceRecorder::record_JSOP_NEWINIT()
9249 JSProtoKey key = JSProtoKey(GET_INT8(cx->fp->regs->pc));
9250 LIns *proto_ins;
9251 CHECK_STATUS(getClassPrototype(key, proto_ins));
9253 LIns* args[] = { proto_ins, cx_ins };
9254 const CallInfo *ci = (key == JSProto_Array) ? &js_NewEmptyArray_ci : &js_Object_tn_ci;
9255 LIns* v_ins = lir->insCall(ci, args);
9256 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
9257 stack(0, v_ins);
9258 return JSRS_CONTINUE;
9261 JS_REQUIRES_STACK JSRecordingStatus
9262 TraceRecorder::record_JSOP_ENDINIT()
9264 #ifdef DEBUG
9265 jsval& v = stackval(-1);
9266 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v));
9267 #endif
9268 return JSRS_CONTINUE;
9271 JS_REQUIRES_STACK JSRecordingStatus
9272 TraceRecorder::record_JSOP_INITPROP()
9274 // All the action is in record_SetPropHit.
9275 return JSRS_CONTINUE;
9278 JS_REQUIRES_STACK JSRecordingStatus
9279 TraceRecorder::record_JSOP_INITELEM()
9281 return record_JSOP_SETELEM();
9284 JS_REQUIRES_STACK JSRecordingStatus
9285 TraceRecorder::record_JSOP_DEFSHARP()
9287 return JSRS_STOP;
9290 JS_REQUIRES_STACK JSRecordingStatus
9291 TraceRecorder::record_JSOP_USESHARP()
9293 return JSRS_STOP;
9296 JS_REQUIRES_STACK JSRecordingStatus
9297 TraceRecorder::record_JSOP_INCARG()
9299 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1);
9302 JS_REQUIRES_STACK JSRecordingStatus
9303 TraceRecorder::record_JSOP_INCLOCAL()
9305 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1);
9308 JS_REQUIRES_STACK JSRecordingStatus
9309 TraceRecorder::record_JSOP_DECARG()
9311 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1);
9314 JS_REQUIRES_STACK JSRecordingStatus
9315 TraceRecorder::record_JSOP_DECLOCAL()
9317 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1);
9320 JS_REQUIRES_STACK JSRecordingStatus
9321 TraceRecorder::record_JSOP_ARGINC()
9323 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1, false);
9326 JS_REQUIRES_STACK JSRecordingStatus
9327 TraceRecorder::record_JSOP_LOCALINC()
9329 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1, false);
9332 JS_REQUIRES_STACK JSRecordingStatus
9333 TraceRecorder::record_JSOP_ARGDEC()
9335 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1, false);
9338 JS_REQUIRES_STACK JSRecordingStatus
9339 TraceRecorder::record_JSOP_LOCALDEC()
9341 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1, false);
9344 JS_REQUIRES_STACK JSRecordingStatus
9345 TraceRecorder::record_JSOP_IMACOP()
9347 JS_ASSERT(cx->fp->imacpc);
9348 return JSRS_CONTINUE;
9351 JS_REQUIRES_STACK JSRecordingStatus
9352 TraceRecorder::record_JSOP_ITER()
9354 jsval& v = stackval(-1);
9355 if (JSVAL_IS_PRIMITIVE(v))
9356 ABORT_TRACE("for-in on a primitive value");
9357 ABORT_IF_XML(v);
9359 jsuint flags = cx->fp->regs->pc[1];
9361 if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) {
9362 if (flags == JSITER_ENUMERATE)
9363 return call_imacro(iter_imacros.for_in);
9364 if (flags == (JSITER_ENUMERATE | JSITER_FOREACH))
9365 return call_imacro(iter_imacros.for_each);
9366 } else {
9367 if (flags == JSITER_ENUMERATE)
9368 return call_imacro(iter_imacros.for_in_native);
9369 if (flags == (JSITER_ENUMERATE | JSITER_FOREACH))
9370 return call_imacro(iter_imacros.for_each_native);
9372 ABORT_TRACE("unimplemented JSITER_* flags");
9375 JS_REQUIRES_STACK JSRecordingStatus
9376 TraceRecorder::record_JSOP_NEXTITER()
9378 jsval& iterobj_val = stackval(-2);
9379 if (JSVAL_IS_PRIMITIVE(iterobj_val))
9380 ABORT_TRACE("for-in on a primitive value");
9381 ABORT_IF_XML(iterobj_val);
9382 JSObject* iterobj = JSVAL_TO_OBJECT(iterobj_val);
9383 JSClass* clasp = STOBJ_GET_CLASS(iterobj);
9384 LIns* iterobj_ins = get(&iterobj_val);
9385 if (clasp == &js_IteratorClass || clasp == &js_GeneratorClass) {
9386 guardClass(iterobj, iterobj_ins, clasp, snapshot(BRANCH_EXIT));
9387 return call_imacro(nextiter_imacros.native_iter_next);
9389 return call_imacro(nextiter_imacros.custom_iter_next);
9392 JS_REQUIRES_STACK JSRecordingStatus
9393 TraceRecorder::record_JSOP_ENDITER()
9395 LIns* args[] = { stack(-2), cx_ins };
9396 LIns* ok_ins = lir->insCall(&js_CloseIterator_ci, args);
9397 guard(false, lir->ins_eq0(ok_ins), MISMATCH_EXIT);
9398 return JSRS_CONTINUE;
9401 JS_REQUIRES_STACK JSRecordingStatus
9402 TraceRecorder::record_JSOP_FORNAME()
9404 jsval* vp;
9405 CHECK_STATUS(name(vp));
9406 set(vp, stack(-1));
9407 return JSRS_CONTINUE;
9410 JS_REQUIRES_STACK JSRecordingStatus
9411 TraceRecorder::record_JSOP_FORPROP()
9413 return JSRS_STOP;
9416 JS_REQUIRES_STACK JSRecordingStatus
9417 TraceRecorder::record_JSOP_FORELEM()
9419 return record_JSOP_DUP();
9422 JS_REQUIRES_STACK JSRecordingStatus
9423 TraceRecorder::record_JSOP_FORARG()
9425 return record_JSOP_SETARG();
9428 JS_REQUIRES_STACK JSRecordingStatus
9429 TraceRecorder::record_JSOP_FORLOCAL()
9431 return record_JSOP_SETLOCAL();
9434 JS_REQUIRES_STACK JSRecordingStatus
9435 TraceRecorder::record_JSOP_POPN()
9437 return JSRS_CONTINUE;
9440 JS_REQUIRES_STACK JSRecordingStatus
9441 TraceRecorder::record_JSOP_BINDNAME()
9443 JSStackFrame *fp = cx->fp;
9444 JSObject *obj;
9446 if (fp->fun) {
9447 // We can't trace BINDNAME in functions that contain direct
9448 // calls to eval, as they might add bindings which
9449 // previously-traced references would have to see.
9450 if (JSFUN_HEAVYWEIGHT_TEST(fp->fun->flags))
9451 ABORT_TRACE("Can't trace JSOP_BINDNAME in heavyweight functions.");
9453 // In non-heavyweight functions, we can safely skip the call
9454 // object, if any.
9455 obj = OBJ_GET_PARENT(cx, fp->callee);
9456 } else {
9457 obj = fp->scopeChain;
9459 // In global code, fp->scopeChain can only contain blocks
9460 // whose values are still on the stack. We never use BINDNAME
9461 // to refer to these.
9462 while (OBJ_GET_CLASS(cx, obj) == &js_BlockClass) {
9463 // The block's values are still on the stack.
9464 JS_ASSERT(OBJ_GET_PRIVATE(cx, obj) == fp);
9466 obj = OBJ_GET_PARENT(cx, obj);
9468 // Blocks always have parents.
9469 JS_ASSERT(obj);
9473 if (obj != globalObj)
9474 ABORT_TRACE("JSOP_BINDNAME must return global object on trace");
9476 // The trace is specialized to this global object. Furthermore,
9477 // we know it is the sole 'global' object on the scope chain: we
9478 // set globalObj to the scope chain element with no parent, and we
9479 // reached it starting from the function closure or the current
9480 // scopeChain, so there is nothing inner to it. So this must be
9481 // the right base object.
9482 stack(0, INS_CONSTPTR(globalObj));
9483 return JSRS_CONTINUE;
9486 JS_REQUIRES_STACK JSRecordingStatus
9487 TraceRecorder::record_JSOP_SETNAME()
9489 jsval& l = stackval(-2);
9490 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l));
9493 * Trace cases that are global code or in lightweight functions scoped by
9494 * the global object only.
9496 JSObject* obj = JSVAL_TO_OBJECT(l);
9497 if (obj != cx->fp->scopeChain || obj != globalObj)
9498 ABORT_TRACE("JSOP_SETNAME left operand is not the global object");
9500 // The rest of the work is in record_SetPropHit.
9501 return JSRS_CONTINUE;
9504 JS_REQUIRES_STACK JSRecordingStatus
9505 TraceRecorder::record_JSOP_THROW()
9507 return JSRS_STOP;
9510 JS_REQUIRES_STACK JSRecordingStatus
9511 TraceRecorder::record_JSOP_IN()
9513 jsval& rval = stackval(-1);
9514 jsval& lval = stackval(-2);
9516 if (JSVAL_IS_PRIMITIVE(rval))
9517 ABORT_TRACE("JSOP_IN on non-object right operand");
9518 JSObject* obj = JSVAL_TO_OBJECT(rval);
9519 LIns* obj_ins = get(&rval);
9521 jsid id;
9522 LIns* x;
9523 if (JSVAL_IS_INT(lval)) {
9524 id = INT_JSVAL_TO_JSID(lval);
9525 LIns* args[] = { makeNumberInt32(get(&lval)), obj_ins, cx_ins };
9526 x = lir->insCall(&js_HasNamedPropertyInt32_ci, args);
9527 } else if (JSVAL_IS_STRING(lval)) {
9528 if (!js_ValueToStringId(cx, lval, &id))
9529 ABORT_TRACE_ERROR("left operand of JSOP_IN didn't convert to a string-id");
9530 LIns* args[] = { get(&lval), obj_ins, cx_ins };
9531 x = lir->insCall(&js_HasNamedProperty_ci, args);
9532 } else {
9533 ABORT_TRACE("string or integer expected");
9536 guard(false, lir->ins2i(LIR_eq, x, JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
9537 x = lir->ins2i(LIR_eq, x, 1);
9539 JSObject* obj2;
9540 JSProperty* prop;
9541 if (!OBJ_LOOKUP_PROPERTY(cx, obj, id, &obj2, &prop))
9542 ABORT_TRACE_ERROR("OBJ_LOOKUP_PROPERTY failed in JSOP_IN");
9543 bool cond = prop != NULL;
9544 if (prop)
9545 OBJ_DROP_PROPERTY(cx, obj2, prop);
9547 /* The interpreter fuses comparisons and the following branch,
9548 so we have to do that here as well. */
9549 fuseIf(cx->fp->regs->pc + 1, cond, x);
9551 /* We update the stack after the guard. This is safe since
9552 the guard bails out at the comparison and the interpreter
9553 will therefore re-execute the comparison. This way the
9554 value of the condition doesn't have to be calculated and
9555 saved on the stack in most cases. */
9556 set(&lval, x);
9557 return JSRS_CONTINUE;
9560 JS_REQUIRES_STACK JSRecordingStatus
9561 TraceRecorder::record_JSOP_INSTANCEOF()
9563 return JSRS_STOP;
9566 JS_REQUIRES_STACK JSRecordingStatus
9567 TraceRecorder::record_JSOP_DEBUGGER()
9569 return JSRS_STOP;
9572 JS_REQUIRES_STACK JSRecordingStatus
9573 TraceRecorder::record_JSOP_GOSUB()
9575 return JSRS_STOP;
9578 JS_REQUIRES_STACK JSRecordingStatus
9579 TraceRecorder::record_JSOP_RETSUB()
9581 return JSRS_STOP;
9584 JS_REQUIRES_STACK JSRecordingStatus
9585 TraceRecorder::record_JSOP_EXCEPTION()
9587 return JSRS_STOP;
9590 JS_REQUIRES_STACK JSRecordingStatus
9591 TraceRecorder::record_JSOP_LINENO()
9593 return JSRS_CONTINUE;
9596 JS_REQUIRES_STACK JSRecordingStatus
9597 TraceRecorder::record_JSOP_CONDSWITCH()
9599 return JSRS_CONTINUE;
9602 JS_REQUIRES_STACK JSRecordingStatus
9603 TraceRecorder::record_JSOP_CASE()
9605 strictEquality(true, true);
9606 return JSRS_CONTINUE;
9609 JS_REQUIRES_STACK JSRecordingStatus
9610 TraceRecorder::record_JSOP_DEFAULT()
9612 return JSRS_CONTINUE;
9615 JS_REQUIRES_STACK JSRecordingStatus
9616 TraceRecorder::record_JSOP_EVAL()
9618 return JSRS_STOP;
9621 JS_REQUIRES_STACK JSRecordingStatus
9622 TraceRecorder::record_JSOP_ENUMELEM()
9624 return JSRS_STOP;
9627 JS_REQUIRES_STACK JSRecordingStatus
9628 TraceRecorder::record_JSOP_GETTER()
9630 return JSRS_STOP;
9633 JS_REQUIRES_STACK JSRecordingStatus
9634 TraceRecorder::record_JSOP_SETTER()
9636 return JSRS_STOP;
9639 JS_REQUIRES_STACK JSRecordingStatus
9640 TraceRecorder::record_JSOP_DEFFUN()
9642 return JSRS_STOP;
9645 JS_REQUIRES_STACK JSRecordingStatus
9646 TraceRecorder::record_JSOP_DEFFUN_FC()
9648 return JSRS_STOP;
9651 JS_REQUIRES_STACK JSRecordingStatus
9652 TraceRecorder::record_JSOP_DEFCONST()
9654 return JSRS_STOP;
9657 JS_REQUIRES_STACK JSRecordingStatus
9658 TraceRecorder::record_JSOP_DEFVAR()
9660 return JSRS_STOP;
9663 jsatomid
9664 TraceRecorder::getFullIndex(ptrdiff_t pcoff)
9666 jsatomid index = GET_INDEX(cx->fp->regs->pc + pcoff);
9667 index += atoms - cx->fp->script->atomMap.vector;
9668 return index;
9671 JS_REQUIRES_STACK JSRecordingStatus
9672 TraceRecorder::record_JSOP_LAMBDA()
9674 JSFunction* fun;
9675 JS_GET_SCRIPT_FUNCTION(cx->fp->script, getFullIndex(), fun);
9677 if (FUN_NULL_CLOSURE(fun) && OBJ_GET_PARENT(cx, FUN_OBJECT(fun)) == globalObj) {
9678 LIns *proto_ins;
9679 CHECK_STATUS(getClassPrototype(JSProto_Function, proto_ins));
9681 LIns* args[] = { INS_CONSTPTR(globalObj), proto_ins, INS_CONSTPTR(fun), cx_ins };
9682 LIns* x = lir->insCall(&js_NewNullClosure_ci, args);
9683 stack(0, x);
9684 return JSRS_CONTINUE;
9686 return JSRS_STOP;
9689 JS_REQUIRES_STACK JSRecordingStatus
9690 TraceRecorder::record_JSOP_LAMBDA_FC()
9692 return JSRS_STOP;
9695 JS_REQUIRES_STACK JSRecordingStatus
9696 TraceRecorder::record_JSOP_CALLEE()
9698 stack(0, INS_CONSTPTR(cx->fp->callee));
9699 return JSRS_CONTINUE;
9702 JS_REQUIRES_STACK JSRecordingStatus
9703 TraceRecorder::record_JSOP_SETLOCALPOP()
9705 var(GET_SLOTNO(cx->fp->regs->pc), stack(-1));
9706 return JSRS_CONTINUE;
9709 JS_REQUIRES_STACK JSRecordingStatus
9710 TraceRecorder::record_JSOP_IFPRIMTOP()
9712 // Traces are type-specialized, including null vs. object, so we need do
9713 // nothing here. The upstream unbox_jsval called after valueOf or toString
9714 // from an imacro (e.g.) will fork the trace for us, allowing us to just
9715 // follow along mindlessly :-).
9716 return JSRS_CONTINUE;
9719 JS_REQUIRES_STACK JSRecordingStatus
9720 TraceRecorder::record_JSOP_SETCALL()
9722 return JSRS_STOP;
9725 JS_REQUIRES_STACK JSRecordingStatus
9726 TraceRecorder::record_JSOP_TRY()
9728 return JSRS_CONTINUE;
9731 JS_REQUIRES_STACK JSRecordingStatus
9732 TraceRecorder::record_JSOP_FINALLY()
9734 return JSRS_CONTINUE;
9737 JS_REQUIRES_STACK JSRecordingStatus
9738 TraceRecorder::record_JSOP_NOP()
9740 return JSRS_CONTINUE;
9743 JS_REQUIRES_STACK JSRecordingStatus
9744 TraceRecorder::record_JSOP_ARGSUB()
9746 JSStackFrame* fp = cx->fp;
9747 if (!(fp->fun->flags & JSFUN_HEAVYWEIGHT)) {
9748 uintN slot = GET_ARGNO(fp->regs->pc);
9749 if (slot < fp->fun->nargs && slot < fp->argc && !fp->argsobj) {
9750 stack(0, get(&cx->fp->argv[slot]));
9751 return JSRS_CONTINUE;
9754 ABORT_TRACE("can't trace JSOP_ARGSUB hard case");
9757 JS_REQUIRES_STACK JSRecordingStatus
9758 TraceRecorder::record_JSOP_ARGCNT()
9760 if (!(cx->fp->fun->flags & JSFUN_HEAVYWEIGHT)) {
9761 stack(0, lir->insImmf(cx->fp->argc));
9762 return JSRS_CONTINUE;
9764 ABORT_TRACE("can't trace heavyweight JSOP_ARGCNT");
9767 JS_REQUIRES_STACK JSRecordingStatus
9768 TraceRecorder::record_DefLocalFunSetSlot(uint32 slot, JSObject* obj)
9770 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
9772 if (FUN_NULL_CLOSURE(fun) && OBJ_GET_PARENT(cx, FUN_OBJECT(fun)) == globalObj) {
9773 LIns *proto_ins;
9774 CHECK_STATUS(getClassPrototype(JSProto_Function, proto_ins));
9776 LIns* args[] = { INS_CONSTPTR(globalObj), proto_ins, INS_CONSTPTR(fun), cx_ins };
9777 LIns* x = lir->insCall(&js_NewNullClosure_ci, args);
9778 var(slot, x);
9779 return JSRS_CONTINUE;
9782 return JSRS_STOP;
9785 JS_REQUIRES_STACK JSRecordingStatus
9786 TraceRecorder::record_JSOP_DEFLOCALFUN()
9788 return JSRS_CONTINUE;
9791 JS_REQUIRES_STACK JSRecordingStatus
9792 TraceRecorder::record_JSOP_DEFLOCALFUN_FC()
9794 return JSRS_CONTINUE;
9797 JS_REQUIRES_STACK JSRecordingStatus
9798 TraceRecorder::record_JSOP_GOTOX()
9800 return record_JSOP_GOTO();
9803 JS_REQUIRES_STACK JSRecordingStatus
9804 TraceRecorder::record_JSOP_IFEQX()
9806 return record_JSOP_IFEQ();
9809 JS_REQUIRES_STACK JSRecordingStatus
9810 TraceRecorder::record_JSOP_IFNEX()
9812 return record_JSOP_IFNE();
9815 JS_REQUIRES_STACK JSRecordingStatus
9816 TraceRecorder::record_JSOP_ORX()
9818 return record_JSOP_OR();
9821 JS_REQUIRES_STACK JSRecordingStatus
9822 TraceRecorder::record_JSOP_ANDX()
9824 return record_JSOP_AND();
9827 JS_REQUIRES_STACK JSRecordingStatus
9828 TraceRecorder::record_JSOP_GOSUBX()
9830 return record_JSOP_GOSUB();
9833 JS_REQUIRES_STACK JSRecordingStatus
9834 TraceRecorder::record_JSOP_CASEX()
9836 strictEquality(true, true);
9837 return JSRS_CONTINUE;
9840 JS_REQUIRES_STACK JSRecordingStatus
9841 TraceRecorder::record_JSOP_DEFAULTX()
9843 return JSRS_CONTINUE;
9846 JS_REQUIRES_STACK JSRecordingStatus
9847 TraceRecorder::record_JSOP_TABLESWITCHX()
9849 return record_JSOP_TABLESWITCH();
9852 JS_REQUIRES_STACK JSRecordingStatus
9853 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
9855 return switchop();
9858 JS_REQUIRES_STACK JSRecordingStatus
9859 TraceRecorder::record_JSOP_BACKPATCH()
9861 return JSRS_CONTINUE;
9864 JS_REQUIRES_STACK JSRecordingStatus
9865 TraceRecorder::record_JSOP_BACKPATCH_POP()
9867 return JSRS_CONTINUE;
9870 JS_REQUIRES_STACK JSRecordingStatus
9871 TraceRecorder::record_JSOP_THROWING()
9873 return JSRS_STOP;
9876 JS_REQUIRES_STACK JSRecordingStatus
9877 TraceRecorder::record_JSOP_SETRVAL()
9879 // If we implement this, we need to update JSOP_STOP.
9880 return JSRS_STOP;
9883 JS_REQUIRES_STACK JSRecordingStatus
9884 TraceRecorder::record_JSOP_RETRVAL()
9886 return JSRS_STOP;
9889 JS_REQUIRES_STACK JSRecordingStatus
9890 TraceRecorder::record_JSOP_GETGVAR()
9892 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9893 if (JSVAL_IS_NULL(slotval))
9894 return JSRS_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
9896 uint32 slot = JSVAL_TO_INT(slotval);
9898 if (!lazilyImportGlobalSlot(slot))
9899 ABORT_TRACE("lazy import of global slot failed");
9901 stack(0, get(&STOBJ_GET_SLOT(globalObj, slot)));
9902 return JSRS_CONTINUE;
9905 JS_REQUIRES_STACK JSRecordingStatus
9906 TraceRecorder::record_JSOP_SETGVAR()
9908 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9909 if (JSVAL_IS_NULL(slotval))
9910 return JSRS_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
9912 uint32 slot = JSVAL_TO_INT(slotval);
9914 if (!lazilyImportGlobalSlot(slot))
9915 ABORT_TRACE("lazy import of global slot failed");
9917 set(&STOBJ_GET_SLOT(globalObj, slot), stack(-1));
9918 return JSRS_CONTINUE;
9921 JS_REQUIRES_STACK JSRecordingStatus
9922 TraceRecorder::record_JSOP_INCGVAR()
9924 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9925 if (JSVAL_IS_NULL(slotval))
9926 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
9927 return JSRS_CONTINUE;
9929 uint32 slot = JSVAL_TO_INT(slotval);
9931 if (!lazilyImportGlobalSlot(slot))
9932 ABORT_TRACE("lazy import of global slot failed");
9934 return inc(STOBJ_GET_SLOT(globalObj, slot), 1);
9937 JS_REQUIRES_STACK JSRecordingStatus
9938 TraceRecorder::record_JSOP_DECGVAR()
9940 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9941 if (JSVAL_IS_NULL(slotval))
9942 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
9943 return JSRS_CONTINUE;
9945 uint32 slot = JSVAL_TO_INT(slotval);
9947 if (!lazilyImportGlobalSlot(slot))
9948 ABORT_TRACE("lazy import of global slot failed");
9950 return inc(STOBJ_GET_SLOT(globalObj, slot), -1);
9953 JS_REQUIRES_STACK JSRecordingStatus
9954 TraceRecorder::record_JSOP_GVARINC()
9956 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9957 if (JSVAL_IS_NULL(slotval))
9958 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
9959 return JSRS_CONTINUE;
9961 uint32 slot = JSVAL_TO_INT(slotval);
9963 if (!lazilyImportGlobalSlot(slot))
9964 ABORT_TRACE("lazy import of global slot failed");
9966 return inc(STOBJ_GET_SLOT(globalObj, slot), 1, false);
9969 JS_REQUIRES_STACK JSRecordingStatus
9970 TraceRecorder::record_JSOP_GVARDEC()
9972 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9973 if (JSVAL_IS_NULL(slotval))
9974 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
9975 return JSRS_CONTINUE;
9977 uint32 slot = JSVAL_TO_INT(slotval);
9979 if (!lazilyImportGlobalSlot(slot))
9980 ABORT_TRACE("lazy import of global slot failed");
9982 return inc(STOBJ_GET_SLOT(globalObj, slot), -1, false);
9985 JS_REQUIRES_STACK JSRecordingStatus
9986 TraceRecorder::record_JSOP_REGEXP()
9988 return JSRS_STOP;
9991 // begin JS_HAS_XML_SUPPORT
9993 JS_REQUIRES_STACK JSRecordingStatus
9994 TraceRecorder::record_JSOP_DEFXMLNS()
9996 return JSRS_STOP;
9999 JS_REQUIRES_STACK JSRecordingStatus
10000 TraceRecorder::record_JSOP_ANYNAME()
10002 return JSRS_STOP;
10005 JS_REQUIRES_STACK JSRecordingStatus
10006 TraceRecorder::record_JSOP_QNAMEPART()
10008 return record_JSOP_STRING();
10011 JS_REQUIRES_STACK JSRecordingStatus
10012 TraceRecorder::record_JSOP_QNAMECONST()
10014 return JSRS_STOP;
10017 JS_REQUIRES_STACK JSRecordingStatus
10018 TraceRecorder::record_JSOP_QNAME()
10020 return JSRS_STOP;
10023 JS_REQUIRES_STACK JSRecordingStatus
10024 TraceRecorder::record_JSOP_TOATTRNAME()
10026 return JSRS_STOP;
10029 JS_REQUIRES_STACK JSRecordingStatus
10030 TraceRecorder::record_JSOP_TOATTRVAL()
10032 return JSRS_STOP;
10035 JS_REQUIRES_STACK JSRecordingStatus
10036 TraceRecorder::record_JSOP_ADDATTRNAME()
10038 return JSRS_STOP;
10041 JS_REQUIRES_STACK JSRecordingStatus
10042 TraceRecorder::record_JSOP_ADDATTRVAL()
10044 return JSRS_STOP;
10047 JS_REQUIRES_STACK JSRecordingStatus
10048 TraceRecorder::record_JSOP_BINDXMLNAME()
10050 return JSRS_STOP;
10053 JS_REQUIRES_STACK JSRecordingStatus
10054 TraceRecorder::record_JSOP_SETXMLNAME()
10056 return JSRS_STOP;
10059 JS_REQUIRES_STACK JSRecordingStatus
10060 TraceRecorder::record_JSOP_XMLNAME()
10062 return JSRS_STOP;
10065 JS_REQUIRES_STACK JSRecordingStatus
10066 TraceRecorder::record_JSOP_DESCENDANTS()
10068 return JSRS_STOP;
10071 JS_REQUIRES_STACK JSRecordingStatus
10072 TraceRecorder::record_JSOP_FILTER()
10074 return JSRS_STOP;
10077 JS_REQUIRES_STACK JSRecordingStatus
10078 TraceRecorder::record_JSOP_ENDFILTER()
10080 return JSRS_STOP;
10083 JS_REQUIRES_STACK JSRecordingStatus
10084 TraceRecorder::record_JSOP_TOXML()
10086 return JSRS_STOP;
10089 JS_REQUIRES_STACK JSRecordingStatus
10090 TraceRecorder::record_JSOP_TOXMLLIST()
10092 return JSRS_STOP;
10095 JS_REQUIRES_STACK JSRecordingStatus
10096 TraceRecorder::record_JSOP_XMLTAGEXPR()
10098 return JSRS_STOP;
10101 JS_REQUIRES_STACK JSRecordingStatus
10102 TraceRecorder::record_JSOP_XMLELTEXPR()
10104 return JSRS_STOP;
10107 JS_REQUIRES_STACK JSRecordingStatus
10108 TraceRecorder::record_JSOP_XMLOBJECT()
10110 return JSRS_STOP;
10113 JS_REQUIRES_STACK JSRecordingStatus
10114 TraceRecorder::record_JSOP_XMLCDATA()
10116 return JSRS_STOP;
10119 JS_REQUIRES_STACK JSRecordingStatus
10120 TraceRecorder::record_JSOP_XMLCOMMENT()
10122 return JSRS_STOP;
10125 JS_REQUIRES_STACK JSRecordingStatus
10126 TraceRecorder::record_JSOP_XMLPI()
10128 return JSRS_STOP;
10131 JS_REQUIRES_STACK JSRecordingStatus
10132 TraceRecorder::record_JSOP_GETFUNNS()
10134 return JSRS_STOP;
10137 JS_REQUIRES_STACK JSRecordingStatus
10138 TraceRecorder::record_JSOP_STARTXML()
10140 return JSRS_STOP;
10143 JS_REQUIRES_STACK JSRecordingStatus
10144 TraceRecorder::record_JSOP_STARTXMLEXPR()
10146 return JSRS_STOP;
10149 // end JS_HAS_XML_SUPPORT
10151 JS_REQUIRES_STACK JSRecordingStatus
10152 TraceRecorder::record_JSOP_CALLPROP()
10154 jsval& l = stackval(-1);
10155 JSObject* obj;
10156 LIns* obj_ins;
10157 LIns* this_ins;
10158 if (!JSVAL_IS_PRIMITIVE(l)) {
10159 obj = JSVAL_TO_OBJECT(l);
10160 obj_ins = get(&l);
10161 this_ins = obj_ins; // |this| for subsequent call
10162 } else {
10163 jsint i;
10164 debug_only(const char* protoname = NULL;)
10165 if (JSVAL_IS_STRING(l)) {
10166 i = JSProto_String;
10167 debug_only(protoname = "String.prototype";)
10168 } else if (JSVAL_IS_NUMBER(l)) {
10169 i = JSProto_Number;
10170 debug_only(protoname = "Number.prototype";)
10171 } else if (JSVAL_TAG(l) == JSVAL_BOOLEAN) {
10172 if (l == JSVAL_VOID)
10173 ABORT_TRACE("callprop on void");
10174 guard(false, lir->ins2i(LIR_eq, get(&l), JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)), MISMATCH_EXIT);
10175 i = JSProto_Boolean;
10176 debug_only(protoname = "Boolean.prototype";)
10177 } else {
10178 JS_ASSERT(JSVAL_IS_NULL(l) || JSVAL_IS_VOID(l));
10179 ABORT_TRACE("callprop on null or void");
10182 if (!js_GetClassPrototype(cx, NULL, INT_TO_JSID(i), &obj))
10183 ABORT_TRACE_ERROR("GetClassPrototype failed!");
10185 obj_ins = INS_CONSTPTR(obj);
10186 debug_only(obj_ins = addName(obj_ins, protoname);)
10187 this_ins = get(&l); // use primitive as |this|
10190 JSObject* obj2;
10191 jsuword pcval;
10192 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
10194 if (PCVAL_IS_NULL(pcval) || !PCVAL_IS_OBJECT(pcval))
10195 ABORT_TRACE("callee is not an object");
10196 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval)));
10198 if (JSVAL_IS_PRIMITIVE(l)) {
10199 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, PCVAL_TO_OBJECT(pcval));
10200 if (!PRIMITIVE_THIS_TEST(fun, l))
10201 ABORT_TRACE("callee does not accept primitive |this|");
10204 stack(0, this_ins);
10205 stack(-1, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval)));
10206 return JSRS_CONTINUE;
10209 JS_REQUIRES_STACK JSRecordingStatus
10210 TraceRecorder::record_JSOP_DELDESC()
10212 return JSRS_STOP;
10215 JS_REQUIRES_STACK JSRecordingStatus
10216 TraceRecorder::record_JSOP_UINT24()
10218 stack(0, lir->insImmf(GET_UINT24(cx->fp->regs->pc)));
10219 return JSRS_CONTINUE;
10222 JS_REQUIRES_STACK JSRecordingStatus
10223 TraceRecorder::record_JSOP_INDEXBASE()
10225 atoms += GET_INDEXBASE(cx->fp->regs->pc);
10226 return JSRS_CONTINUE;
10229 JS_REQUIRES_STACK JSRecordingStatus
10230 TraceRecorder::record_JSOP_RESETBASE()
10232 atoms = cx->fp->script->atomMap.vector;
10233 return JSRS_CONTINUE;
10236 JS_REQUIRES_STACK JSRecordingStatus
10237 TraceRecorder::record_JSOP_RESETBASE0()
10239 atoms = cx->fp->script->atomMap.vector;
10240 return JSRS_CONTINUE;
10243 JS_REQUIRES_STACK JSRecordingStatus
10244 TraceRecorder::record_JSOP_CALLELEM()
10246 return record_JSOP_GETELEM();
10249 JS_REQUIRES_STACK JSRecordingStatus
10250 TraceRecorder::record_JSOP_STOP()
10252 JSStackFrame *fp = cx->fp;
10254 if (fp->imacpc) {
10255 // End of imacro, so return true to the interpreter immediately. The
10256 // interpreter's JSOP_STOP case will return from the imacro, back to
10257 // the pc after the calling op, still in the same JSStackFrame.
10258 atoms = fp->script->atomMap.vector;
10259 return JSRS_CONTINUE;
10263 * We know falling off the end of a constructor returns the new object that
10264 * was passed in via fp->argv[-1], while falling off the end of a function
10265 * returns undefined.
10267 * NB: we do not support script rval (eval, API users who want the result
10268 * of the last expression-statement, debugger API calls).
10270 if (fp->flags & JSFRAME_CONSTRUCTING) {
10271 JS_ASSERT(OBJECT_TO_JSVAL(fp->thisp) == fp->argv[-1]);
10272 rval_ins = get(&fp->argv[-1]);
10273 } else {
10274 rval_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
10276 clearFrameSlotsFromCache();
10277 return JSRS_CONTINUE;
10280 JS_REQUIRES_STACK JSRecordingStatus
10281 TraceRecorder::record_JSOP_GETXPROP()
10283 jsval& l = stackval(-1);
10284 if (JSVAL_IS_PRIMITIVE(l))
10285 ABORT_TRACE("primitive-this for GETXPROP?");
10287 JSObject* obj = JSVAL_TO_OBJECT(l);
10288 if (obj != cx->fp->scopeChain || obj != globalObj)
10289 return JSRS_STOP;
10291 jsval* vp;
10292 CHECK_STATUS(name(vp));
10293 stack(-1, get(vp));
10294 return JSRS_CONTINUE;
10297 JS_REQUIRES_STACK JSRecordingStatus
10298 TraceRecorder::record_JSOP_CALLXMLNAME()
10300 return JSRS_STOP;
10303 JS_REQUIRES_STACK JSRecordingStatus
10304 TraceRecorder::record_JSOP_TYPEOFEXPR()
10306 return record_JSOP_TYPEOF();
10309 JS_REQUIRES_STACK JSRecordingStatus
10310 TraceRecorder::record_JSOP_ENTERBLOCK()
10312 JSObject* obj;
10313 JS_GET_SCRIPT_OBJECT(cx->fp->script, getFullIndex(0), obj);
10315 LIns* void_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
10316 for (int i = 0, n = OBJ_BLOCK_COUNT(cx, obj); i < n; i++)
10317 stack(i, void_ins);
10318 return JSRS_CONTINUE;
10321 JS_REQUIRES_STACK JSRecordingStatus
10322 TraceRecorder::record_JSOP_LEAVEBLOCK()
10324 /* We mustn't exit the lexical block we began recording in. */
10325 if (cx->fp->blockChain != lexicalBlock)
10326 return JSRS_CONTINUE;
10327 else
10328 return JSRS_STOP;
10331 JS_REQUIRES_STACK JSRecordingStatus
10332 TraceRecorder::record_JSOP_GENERATOR()
10334 return JSRS_STOP;
10337 JS_REQUIRES_STACK JSRecordingStatus
10338 TraceRecorder::record_JSOP_YIELD()
10340 return JSRS_STOP;
10343 JS_REQUIRES_STACK JSRecordingStatus
10344 TraceRecorder::record_JSOP_ARRAYPUSH()
10346 uint32_t slot = GET_UINT16(cx->fp->regs->pc);
10347 JS_ASSERT(cx->fp->script->nfixed <= slot);
10348 JS_ASSERT(cx->fp->slots + slot < cx->fp->regs->sp - 1);
10349 jsval &arrayval = cx->fp->slots[slot];
10350 JS_ASSERT(JSVAL_IS_OBJECT(arrayval));
10351 JS_ASSERT(OBJ_IS_DENSE_ARRAY(cx, JSVAL_TO_OBJECT(arrayval)));
10352 LIns *array_ins = get(&arrayval);
10353 jsval &elt = stackval(-1);
10354 LIns *elt_ins = get(&elt);
10355 box_jsval(elt, elt_ins);
10357 LIns *args[] = { elt_ins, array_ins, cx_ins };
10358 LIns *ok_ins = lir->insCall(&js_ArrayCompPush_ci, args);
10359 guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);
10360 return JSRS_CONTINUE;
10363 JS_REQUIRES_STACK JSRecordingStatus
10364 TraceRecorder::record_JSOP_ENUMCONSTELEM()
10366 return JSRS_STOP;
10369 JS_REQUIRES_STACK JSRecordingStatus
10370 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
10372 LIns* v_ins = stack(-1);
10373 int n = -1 - GET_UINT16(cx->fp->regs->pc);
10374 stack(n, v_ins);
10375 return JSRS_CONTINUE;
10378 JS_REQUIRES_STACK JSRecordingStatus
10379 TraceRecorder::record_JSOP_GETTHISPROP()
10381 LIns* this_ins;
10383 CHECK_STATUS(getThis(this_ins));
10385 * It's safe to just use cx->fp->thisp here because getThis() returns JSRS_STOP if thisp
10386 * is not available.
10388 CHECK_STATUS(getProp(cx->fp->thisp, this_ins));
10389 return JSRS_CONTINUE;
10392 JS_REQUIRES_STACK JSRecordingStatus
10393 TraceRecorder::record_JSOP_GETARGPROP()
10395 return getProp(argval(GET_ARGNO(cx->fp->regs->pc)));
10398 JS_REQUIRES_STACK JSRecordingStatus
10399 TraceRecorder::record_JSOP_GETLOCALPROP()
10401 return getProp(varval(GET_SLOTNO(cx->fp->regs->pc)));
10404 JS_REQUIRES_STACK JSRecordingStatus
10405 TraceRecorder::record_JSOP_INDEXBASE1()
10407 atoms += 1 << 16;
10408 return JSRS_CONTINUE;
10411 JS_REQUIRES_STACK JSRecordingStatus
10412 TraceRecorder::record_JSOP_INDEXBASE2()
10414 atoms += 2 << 16;
10415 return JSRS_CONTINUE;
10418 JS_REQUIRES_STACK JSRecordingStatus
10419 TraceRecorder::record_JSOP_INDEXBASE3()
10421 atoms += 3 << 16;
10422 return JSRS_CONTINUE;
10425 JS_REQUIRES_STACK JSRecordingStatus
10426 TraceRecorder::record_JSOP_CALLGVAR()
10428 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
10429 if (JSVAL_IS_NULL(slotval))
10430 // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
10431 return JSRS_CONTINUE;
10433 uint32 slot = JSVAL_TO_INT(slotval);
10435 if (!lazilyImportGlobalSlot(slot))
10436 ABORT_TRACE("lazy import of global slot failed");
10438 jsval& v = STOBJ_GET_SLOT(globalObj, slot);
10439 stack(0, get(&v));
10440 stack(1, INS_CONSTPTR(NULL));
10441 return JSRS_CONTINUE;
10444 JS_REQUIRES_STACK JSRecordingStatus
10445 TraceRecorder::record_JSOP_CALLLOCAL()
10447 uintN slot = GET_SLOTNO(cx->fp->regs->pc);
10448 stack(0, var(slot));
10449 stack(1, INS_CONSTPTR(NULL));
10450 return JSRS_CONTINUE;
10453 JS_REQUIRES_STACK JSRecordingStatus
10454 TraceRecorder::record_JSOP_CALLARG()
10456 uintN slot = GET_ARGNO(cx->fp->regs->pc);
10457 stack(0, arg(slot));
10458 stack(1, INS_CONSTPTR(NULL));
10459 return JSRS_CONTINUE;
10462 /* Functions for use with JSOP_CALLBUILTIN. */
10464 static JSBool
10465 ObjectToIterator(JSContext *cx, uintN argc, jsval *vp)
10467 jsval *argv = JS_ARGV(cx, vp);
10468 JS_ASSERT(JSVAL_IS_INT(argv[0]));
10469 JS_SET_RVAL(cx, vp, JS_THIS(cx, vp));
10470 return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp));
10473 static JSObject* FASTCALL
10474 ObjectToIterator_tn(JSContext* cx, jsbytecode* pc, JSObject *obj, int32 flags)
10476 jsval v = OBJECT_TO_JSVAL(obj);
10477 JSBool ok = js_ValueToIterator(cx, flags, &v);
10479 if (!ok) {
10480 js_SetBuiltinError(cx);
10481 return NULL;
10483 return JSVAL_TO_OBJECT(v);
10486 static JSBool
10487 CallIteratorNext(JSContext *cx, uintN argc, jsval *vp)
10489 return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp));
10492 static jsval FASTCALL
10493 CallIteratorNext_tn(JSContext* cx, jsbytecode* pc, JSObject* iterobj)
10495 JSAutoTempValueRooter tvr(cx);
10496 JSBool ok = js_CallIteratorNext(cx, iterobj, tvr.addr());
10498 if (!ok) {
10499 js_SetBuiltinError(cx);
10500 return JSVAL_ERROR_COOKIE;
10502 return tvr.value();
10505 JS_DEFINE_TRCINFO_1(ObjectToIterator,
10506 (4, (static, OBJECT_FAIL, ObjectToIterator_tn, CONTEXT, PC, THIS, INT32, 0, 0)))
10507 JS_DEFINE_TRCINFO_1(CallIteratorNext,
10508 (3, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, PC, THIS, 0, 0)))
10510 static const struct BuiltinFunctionInfo {
10511 JSTraceableNative *tn;
10512 int nargs;
10513 } builtinFunctionInfo[JSBUILTIN_LIMIT] = {
10514 {ObjectToIterator_trcinfo, 1},
10515 {CallIteratorNext_trcinfo, 0},
10516 {GetProperty_trcinfo, 1},
10517 {GetElement_trcinfo, 1},
10518 {SetProperty_trcinfo, 2},
10519 {SetElement_trcinfo, 2}
10522 JSObject *
10523 js_GetBuiltinFunction(JSContext *cx, uintN index)
10525 JSRuntime *rt = cx->runtime;
10526 JSObject *funobj = rt->builtinFunctions[index];
10528 if (!funobj) {
10529 /* Use NULL parent and atom. Builtin functions never escape to scripts. */
10530 JS_ASSERT(index < JS_ARRAY_LENGTH(builtinFunctionInfo));
10531 const BuiltinFunctionInfo *bfi = &builtinFunctionInfo[index];
10532 JSFunction *fun = js_NewFunction(cx,
10533 NULL,
10534 JS_DATA_TO_FUNC_PTR(JSNative, bfi->tn),
10535 bfi->nargs,
10536 JSFUN_FAST_NATIVE | JSFUN_TRACEABLE,
10537 NULL,
10538 NULL);
10539 if (fun) {
10540 funobj = FUN_OBJECT(fun);
10541 STOBJ_CLEAR_PROTO(funobj);
10542 STOBJ_CLEAR_PARENT(funobj);
10544 JS_LOCK_GC(rt);
10545 if (!rt->builtinFunctions[index]) /* retest now that the lock is held */
10546 rt->builtinFunctions[index] = funobj;
10547 else
10548 funobj = rt->builtinFunctions[index];
10549 JS_UNLOCK_GC(rt);
10552 return funobj;
10555 JS_REQUIRES_STACK JSRecordingStatus
10556 TraceRecorder::record_JSOP_CALLBUILTIN()
10558 JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc));
10559 if (!obj)
10560 ABORT_TRACE_ERROR("error in js_GetBuiltinFunction");
10562 stack(0, get(&stackval(-1)));
10563 stack(-1, INS_CONSTPTR(obj));
10564 return JSRS_CONTINUE;
10567 JS_REQUIRES_STACK JSRecordingStatus
10568 TraceRecorder::record_JSOP_INT8()
10570 stack(0, lir->insImmf(GET_INT8(cx->fp->regs->pc)));
10571 return JSRS_CONTINUE;
10574 JS_REQUIRES_STACK JSRecordingStatus
10575 TraceRecorder::record_JSOP_INT32()
10577 stack(0, lir->insImmf(GET_INT32(cx->fp->regs->pc)));
10578 return JSRS_CONTINUE;
10581 JS_REQUIRES_STACK JSRecordingStatus
10582 TraceRecorder::record_JSOP_LENGTH()
10584 jsval& l = stackval(-1);
10585 if (JSVAL_IS_PRIMITIVE(l)) {
10586 if (!JSVAL_IS_STRING(l))
10587 ABORT_TRACE("non-string primitive JSOP_LENGTH unsupported");
10588 set(&l, lir->ins1(LIR_i2f, getStringLength(get(&l))));
10589 return JSRS_CONTINUE;
10592 JSObject* obj = JSVAL_TO_OBJECT(l);
10593 LIns* obj_ins = get(&l);
10594 LIns* v_ins;
10595 if (OBJ_IS_ARRAY(cx, obj)) {
10596 if (OBJ_IS_DENSE_ARRAY(cx, obj)) {
10597 if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT)) {
10598 JS_NOT_REACHED("OBJ_IS_DENSE_ARRAY but not?!?");
10599 return JSRS_STOP;
10601 } else {
10602 if (!guardClass(obj, obj_ins, &js_SlowArrayClass, snapshot(BRANCH_EXIT)))
10603 ABORT_TRACE("can't trace length property access on non-array");
10605 v_ins = lir->ins1(LIR_i2f, stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH));
10606 } else {
10607 if (!OBJ_IS_NATIVE(obj))
10608 ABORT_TRACE("can't trace length property access on non-array, non-native object");
10609 return getProp(obj, obj_ins);
10611 set(&l, v_ins);
10612 return JSRS_CONTINUE;
10615 JS_REQUIRES_STACK JSRecordingStatus
10616 TraceRecorder::record_JSOP_NEWARRAY()
10618 LIns *proto_ins;
10619 CHECK_STATUS(getClassPrototype(JSProto_Array, proto_ins));
10621 uint32 len = GET_UINT16(cx->fp->regs->pc);
10622 cx->fp->assertValidStackDepth(len);
10624 LIns* args[] = { lir->insImm(len), proto_ins, cx_ins };
10625 LIns* v_ins = lir->insCall(&js_NewUninitializedArray_ci, args);
10626 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
10628 LIns* dslots_ins = NULL;
10629 uint32 count = 0;
10630 for (uint32 i = 0; i < len; i++) {
10631 jsval& v = stackval(int(i) - int(len));
10632 if (v != JSVAL_HOLE)
10633 count++;
10634 LIns* elt_ins = get(&v);
10635 box_jsval(v, elt_ins);
10636 stobj_set_dslot(v_ins, i, dslots_ins, elt_ins, "set_array_elt");
10639 LIns* dummy = NULL;
10640 if (count > 0)
10641 stobj_set_slot(v_ins, JSSLOT_ARRAY_COUNT, dummy, INS_CONST(count));
10643 stack(-int(len), v_ins);
10644 return JSRS_CONTINUE;
10647 JS_REQUIRES_STACK JSRecordingStatus
10648 TraceRecorder::record_JSOP_HOLE()
10650 stack(0, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_HOLE)));
10651 return JSRS_CONTINUE;
10654 JSRecordingStatus
10655 TraceRecorder::record_JSOP_LOOP()
10657 return JSRS_CONTINUE;
10660 #ifdef JS_JIT_SPEW
10661 /* Prints information about entry typemaps and unstable exits for all peers at a PC */
10662 void
10663 js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape,
10664 uint32 argc)
10666 Fragment* f;
10667 TreeInfo* ti;
10668 bool looped = false;
10669 unsigned length = 0;
10671 for (f = getLoop(tm, ip, globalObj, globalShape, argc); f != NULL; f = f->peer) {
10672 if (!f->vmprivate)
10673 continue;
10674 printf("fragment %p:\nENTRY: ", (void*)f);
10675 ti = (TreeInfo*)f->vmprivate;
10676 if (looped)
10677 JS_ASSERT(ti->nStackTypes == length);
10678 for (unsigned i = 0; i < ti->nStackTypes; i++)
10679 printf("S%d ", ti->stackTypeMap()[i]);
10680 for (unsigned i = 0; i < ti->nGlobalTypes(); i++)
10681 printf("G%d ", ti->globalTypeMap()[i]);
10682 printf("\n");
10683 UnstableExit* uexit = ti->unstableExits;
10684 while (uexit != NULL) {
10685 printf("EXIT: ");
10686 uint8* m = getFullTypeMap(uexit->exit);
10687 for (unsigned i = 0; i < uexit->exit->numStackSlots; i++)
10688 printf("S%d ", m[i]);
10689 for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++)
10690 printf("G%d ", m[uexit->exit->numStackSlots + i]);
10691 printf("\n");
10692 uexit = uexit->next;
10694 length = ti->nStackTypes;
10695 looped = true;
10698 #endif
10700 #define UNUSED(n) \
10701 JS_REQUIRES_STACK bool \
10702 TraceRecorder::record_JSOP_UNUSED##n() { \
10703 JS_NOT_REACHED("JSOP_UNUSED" # n); \
10704 return false; \