Bug 492904 - TM: Crash [@ TraceRecorder::test_property_cache] (r=gal).
[mozilla-central.git] / js / src / jstracer.cpp
blobfd141303676d1c3f5dcc961816d6455e7ea98a12
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
23 * Contributor(s):
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
42 #include "jsstdint.h"
43 #include "jsbit.h" // low-level (NSPR-based) headers next
44 #include "jsprf.h"
45 #include <math.h> // standard headers next
47 #if defined(_MSC_VER) || defined(__MINGW32__)
48 #include <malloc.h>
49 #ifdef _MSC_VER
50 #define alloca _alloca
51 #endif
52 #endif
53 #ifdef SOLARIS
54 #include <alloca.h>
55 #endif
56 #include <limits.h>
58 #include "nanojit/nanojit.h"
59 #include "jsapi.h" // higher-level library and API headers
60 #include "jsarray.h"
61 #include "jsbool.h"
62 #include "jscntxt.h"
63 #include "jsdbgapi.h"
64 #include "jsemit.h"
65 #include "jsfun.h"
66 #include "jsinterp.h"
67 #include "jsiter.h"
68 #include "jsobj.h"
69 #include "jsopcode.h"
70 #include "jsregexp.h"
71 #include "jsscope.h"
72 #include "jsscript.h"
73 #include "jsdate.h"
74 #include "jsstaticcheck.h"
75 #include "jstracer.h"
76 #include "jsxml.h"
78 #include "jsautooplen.h" // generated headers last
79 #include "imacros.c.out"
81 #if JS_HAS_XML_SUPPORT
82 #define ABORT_IF_XML(v) \
83 JS_BEGIN_MACRO \
84 if (!JSVAL_IS_PRIMITIVE(v) && OBJECT_IS_XML(BOGUS_CX, JSVAL_TO_OBJECT(v)))\
85 ABORT_TRACE("xml detected"); \
86 JS_END_MACRO
87 #else
88 #define ABORT_IF_XML(cx, v) ((void) 0)
89 #endif
91 /* Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and
92 the type. What you want to use is JSVAL_TAG(x) == JSVAL_BOOLEAN and then
93 handle the undefined case properly (bug 457363). */
94 #undef JSVAL_IS_BOOLEAN
95 #define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0)
97 /* Use a fake tag to represent boxed values, borrowing from the integer tag
98 range since we only use JSVAL_INT to indicate integers. */
99 #define JSVAL_BOXED 3
101 /* Another fake jsval tag, used to distinguish null from object values. */
102 #define JSVAL_TNULL 5
104 /* A last fake jsval tag distinguishing functions from non-function objects. */
105 #define JSVAL_TFUN 7
107 /* Map to translate a type tag into a printable representation. */
108 static const char typeChar[] = "OIDXSNBF";
109 static const char tagChar[] = "OIDISIBI";
111 /* Blacklist parameters. */
113 /* Number of iterations of a loop where we start tracing. That is, we don't
114 start tracing until the beginning of the HOTLOOP-th iteration. */
115 #define HOTLOOP 2
117 /* Attempt recording this many times before blacklisting permanently. */
118 #define BL_ATTEMPTS 2
120 /* Skip this many hits before attempting recording again, after an aborted attempt. */
121 #define BL_BACKOFF 32
123 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
124 #define HOTEXIT 1
126 /* Number of times we try to extend the tree along a side exit. */
127 #define MAXEXIT 3
129 /* Maximum number of peer trees allowed. */
130 #define MAXPEERS 9
132 /* Max call depths for inlining. */
133 #define MAX_CALLDEPTH 10
135 /* Max native stack size. */
136 #define MAX_NATIVE_STACK_SLOTS 1024
138 /* Max call stack size. */
139 #define MAX_CALL_STACK_ENTRIES 64
141 /* Max global object size. */
142 #define MAX_GLOBAL_SLOTS 4096
144 /* Max memory you can allocate in a LIR buffer via a single insSkip() call. */
145 #define MAX_SKIP_BYTES (NJ_PAGE_SIZE - sizeof(LIns))
147 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
148 #define MAX_INTERP_STACK_BYTES \
149 (MAX_NATIVE_STACK_SLOTS * sizeof(jsval) + \
150 MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) + \
151 sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
153 /* Max number of branches per tree. */
154 #define MAX_BRANCHES 32
156 #define CHECK_STATUS(expr) \
157 JS_BEGIN_MACRO \
158 JSRecordingStatus _status = (expr); \
159 if (_status != JSRS_CONTINUE) \
160 return _status; \
161 JS_END_MACRO
163 #ifdef JS_JIT_SPEW
164 #define debug_only_a(x) if (js_verboseAbort || js_verboseDebug ) { x; }
165 #define ABORT_TRACE_RV(msg, value) \
166 JS_BEGIN_MACRO \
167 debug_only_a(fprintf(stdout, "abort: %d: %s\n", __LINE__, (msg));) \
168 return (value); \
169 JS_END_MACRO
170 #else
171 #define debug_only_a(x)
172 #define ABORT_TRACE_RV(msg, value) return (value)
173 #endif
175 #define ABORT_TRACE(msg) ABORT_TRACE_RV(msg, JSRS_STOP)
176 #define ABORT_TRACE_ERROR(msg) ABORT_TRACE_RV(msg, JSRS_ERROR)
178 #ifdef JS_JIT_SPEW
179 struct __jitstats {
180 #define JITSTAT(x) uint64 x;
181 #include "jitstats.tbl"
182 #undef JITSTAT
183 } jitstats = { 0LL, };
185 JS_STATIC_ASSERT(sizeof(jitstats) % sizeof(uint64) == 0);
187 enum jitstat_ids {
188 #define JITSTAT(x) STAT ## x ## ID,
189 #include "jitstats.tbl"
190 #undef JITSTAT
191 STAT_IDS_TOTAL
194 static JSPropertySpec jitstats_props[] = {
195 #define JITSTAT(x) { #x, STAT ## x ## ID, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT },
196 #include "jitstats.tbl"
197 #undef JITSTAT
198 { 0 }
201 static JSBool
202 jitstats_getProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
204 int index = -1;
206 if (JSVAL_IS_STRING(id)) {
207 JSString* str = JSVAL_TO_STRING(id);
208 if (strcmp(JS_GetStringBytes(str), "HOTLOOP") == 0) {
209 *vp = INT_TO_JSVAL(HOTLOOP);
210 return JS_TRUE;
214 if (JSVAL_IS_INT(id))
215 index = JSVAL_TO_INT(id);
217 uint64 result = 0;
218 switch (index) {
219 #define JITSTAT(x) case STAT ## x ## ID: result = jitstats.x; break;
220 #include "jitstats.tbl"
221 #undef JITSTAT
222 default:
223 *vp = JSVAL_VOID;
224 return JS_TRUE;
227 if (result < JSVAL_INT_MAX) {
228 *vp = INT_TO_JSVAL(result);
229 return JS_TRUE;
231 char retstr[64];
232 JS_snprintf(retstr, sizeof retstr, "%llu", result);
233 *vp = STRING_TO_JSVAL(JS_NewStringCopyZ(cx, retstr));
234 return JS_TRUE;
237 JSClass jitstats_class = {
238 "jitstats",
239 JSCLASS_HAS_PRIVATE,
240 JS_PropertyStub, JS_PropertyStub,
241 jitstats_getProperty, JS_PropertyStub,
242 JS_EnumerateStub, JS_ResolveStub,
243 JS_ConvertStub, JS_FinalizeStub,
244 JSCLASS_NO_OPTIONAL_MEMBERS
247 void
248 js_InitJITStatsClass(JSContext *cx, JSObject *glob)
250 JS_InitClass(cx, glob, NULL, &jitstats_class, NULL, 0, jitstats_props, NULL, NULL, NULL);
253 #define AUDIT(x) (jitstats.x++)
254 #else
255 #define AUDIT(x) ((void)0)
256 #endif /* JS_JIT_SPEW */
258 #define INS_CONST(c) addName(lir->insImm(c), #c)
259 #define INS_CONSTPTR(p) addName(lir->insImmPtr(p), #p)
260 #define INS_CONSTFUNPTR(p) addName(lir->insImmPtr(JS_FUNC_TO_DATA_PTR(void*, p)), #p)
261 #define INS_CONSTWORD(v) addName(lir->insImmPtr((void *) v), #v)
263 using namespace avmplus;
264 using namespace nanojit;
266 static GC gc = GC();
267 static avmplus::AvmCore s_core = avmplus::AvmCore();
268 static avmplus::AvmCore* core = &s_core;
270 #ifdef JS_JIT_SPEW
271 void
272 js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape, uint32 argc);
273 #endif
275 /* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */
276 static bool did_we_check_processor_features = false;
278 #ifdef JS_JIT_SPEW
279 bool js_verboseDebug = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");
280 bool js_verboseStats = js_verboseDebug ||
281 (getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "stats"));
282 bool js_verboseAbort = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "abort");
283 #endif
285 /* The entire VM shares one oracle. Collisions and concurrent updates are tolerated and worst
286 case cause performance regressions. */
287 static Oracle oracle;
289 Tracker::Tracker()
291 pagelist = 0;
294 Tracker::~Tracker()
296 clear();
299 jsuword
300 Tracker::getPageBase(const void* v) const
302 return jsuword(v) & ~jsuword(NJ_PAGE_SIZE-1);
305 struct Tracker::Page*
306 Tracker::findPage(const void* v) const
308 jsuword base = getPageBase(v);
309 struct Tracker::Page* p = pagelist;
310 while (p) {
311 if (p->base == base) {
312 return p;
314 p = p->next;
316 return 0;
319 struct Tracker::Page*
320 Tracker::addPage(const void* v) {
321 jsuword base = getPageBase(v);
322 struct Tracker::Page* p = (struct Tracker::Page*)
323 GC::Alloc(sizeof(*p) - sizeof(p->map) + (NJ_PAGE_SIZE >> 2) * sizeof(LIns*));
324 p->base = base;
325 p->next = pagelist;
326 pagelist = p;
327 return p;
330 void
331 Tracker::clear()
333 while (pagelist) {
334 Page* p = pagelist;
335 pagelist = pagelist->next;
336 GC::Free(p);
340 bool
341 Tracker::has(const void *v) const
343 return get(v) != NULL;
346 #if defined NANOJIT_64BIT
347 #define PAGEMASK 0x7ff
348 #else
349 #define PAGEMASK 0xfff
350 #endif
352 LIns*
353 Tracker::get(const void* v) const
355 struct Tracker::Page* p = findPage(v);
356 if (!p)
357 return NULL;
358 return p->map[(jsuword(v) & PAGEMASK) >> 2];
361 void
362 Tracker::set(const void* v, LIns* i)
364 struct Tracker::Page* p = findPage(v);
365 if (!p)
366 p = addPage(v);
367 p->map[(jsuword(v) & PAGEMASK) >> 2] = i;
370 static inline jsuint argSlots(JSStackFrame* fp)
372 return JS_MAX(fp->argc, fp->fun->nargs);
375 static inline bool isNumber(jsval v)
377 return JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v);
380 static inline jsdouble asNumber(jsval v)
382 JS_ASSERT(isNumber(v));
383 if (JSVAL_IS_DOUBLE(v))
384 return *JSVAL_TO_DOUBLE(v);
385 return (jsdouble)JSVAL_TO_INT(v);
388 static inline bool isInt32(jsval v)
390 if (!isNumber(v))
391 return false;
392 jsdouble d = asNumber(v);
393 jsint i;
394 return JSDOUBLE_IS_INT(d, i);
397 static inline jsint asInt32(jsval v)
399 JS_ASSERT(isNumber(v));
400 if (JSVAL_IS_INT(v))
401 return JSVAL_TO_INT(v);
402 #ifdef DEBUG
403 jsint i;
404 JS_ASSERT(JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v), i));
405 #endif
406 return jsint(*JSVAL_TO_DOUBLE(v));
409 /* Return JSVAL_DOUBLE for all numbers (int and double) and the tag otherwise. */
410 static inline uint8 getPromotedType(jsval v)
412 if (JSVAL_IS_INT(v))
413 return JSVAL_DOUBLE;
414 if (JSVAL_IS_OBJECT(v)) {
415 if (JSVAL_IS_NULL(v))
416 return JSVAL_TNULL;
417 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)))
418 return JSVAL_TFUN;
419 return JSVAL_OBJECT;
421 return uint8(JSVAL_TAG(v));
424 /* Return JSVAL_INT for all whole numbers that fit into signed 32-bit and the tag otherwise. */
425 static inline uint8 getCoercedType(jsval v)
427 if (isInt32(v))
428 return JSVAL_INT;
429 if (JSVAL_IS_OBJECT(v)) {
430 if (JSVAL_IS_NULL(v))
431 return JSVAL_TNULL;
432 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)))
433 return JSVAL_TFUN;
434 return JSVAL_OBJECT;
436 return uint8(JSVAL_TAG(v));
440 * Constant seed and accumulate step borrowed from the DJB hash.
443 #define ORACLE_MASK (ORACLE_SIZE - 1)
444 #define FRAGMENT_TABLE_MASK (FRAGMENT_TABLE_SIZE - 1)
445 #define HASH_SEED 5381
447 static inline void
448 hash_accum(uintptr_t& h, uintptr_t i, uintptr_t mask)
450 h = ((h << 5) + h + (mask & i)) & mask;
453 JS_REQUIRES_STACK static inline int
454 stackSlotHash(JSContext* cx, unsigned slot)
456 uintptr_t h = HASH_SEED;
457 hash_accum(h, uintptr_t(cx->fp->script), ORACLE_MASK);
458 hash_accum(h, uintptr_t(cx->fp->regs->pc), ORACLE_MASK);
459 hash_accum(h, uintptr_t(slot), ORACLE_MASK);
460 return int(h);
463 JS_REQUIRES_STACK static inline int
464 globalSlotHash(JSContext* cx, unsigned slot)
466 uintptr_t h = HASH_SEED;
467 JSStackFrame* fp = cx->fp;
469 while (fp->down)
470 fp = fp->down;
472 hash_accum(h, uintptr_t(fp->script), ORACLE_MASK);
473 hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain))),
474 ORACLE_MASK);
475 hash_accum(h, uintptr_t(slot), ORACLE_MASK);
476 return int(h);
479 Oracle::Oracle()
481 /* Grow the oracle bitsets to their (fixed) size here, once. */
482 _stackDontDemote.set(&gc, ORACLE_SIZE-1);
483 _globalDontDemote.set(&gc, ORACLE_SIZE-1);
484 clear();
487 /* Tell the oracle that a certain global variable should not be demoted. */
488 JS_REQUIRES_STACK void
489 Oracle::markGlobalSlotUndemotable(JSContext* cx, unsigned slot)
491 _globalDontDemote.set(&gc, globalSlotHash(cx, slot));
494 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
495 JS_REQUIRES_STACK bool
496 Oracle::isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const
498 return _globalDontDemote.get(globalSlotHash(cx, slot));
501 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
502 JS_REQUIRES_STACK void
503 Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot)
505 _stackDontDemote.set(&gc, stackSlotHash(cx, slot));
508 /* Consult with the oracle whether we shouldn't demote a certain slot. */
509 JS_REQUIRES_STACK bool
510 Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const
512 return _stackDontDemote.get(stackSlotHash(cx, slot));
515 void
516 Oracle::clearDemotability()
518 _stackDontDemote.reset();
519 _globalDontDemote.reset();
523 struct PCHashEntry : public JSDHashEntryStub {
524 size_t count;
527 #define PC_HASH_COUNT 1024
529 static void
530 js_Blacklist(jsbytecode* pc)
532 JS_ASSERT(*pc == JSOP_LOOP || *pc == JSOP_NOP);
533 *pc = JSOP_NOP;
536 static void
537 js_Backoff(JSContext *cx, jsbytecode* pc, Fragment* tree=NULL)
539 JSDHashTable *table = &JS_TRACE_MONITOR(cx).recordAttempts;
541 if (table->ops) {
542 PCHashEntry *entry = (PCHashEntry *)
543 JS_DHashTableOperate(table, pc, JS_DHASH_ADD);
545 if (entry) {
546 if (!entry->key) {
547 entry->key = pc;
548 JS_ASSERT(entry->count == 0);
550 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry->hdr)));
551 if (entry->count++ > (BL_ATTEMPTS * MAXPEERS)) {
552 entry->count = 0;
553 js_Blacklist(pc);
554 return;
559 if (tree) {
560 tree->hits() -= BL_BACKOFF;
563 * In case there is no entry or no table (due to OOM) or some
564 * serious imbalance in the recording-attempt distribution on a
565 * multitree, give each tree another chance to blacklist here as
566 * well.
568 if (++tree->recordAttempts > BL_ATTEMPTS)
569 js_Blacklist(pc);
573 static void
574 js_resetRecordingAttempts(JSContext *cx, jsbytecode* pc)
576 JSDHashTable *table = &JS_TRACE_MONITOR(cx).recordAttempts;
577 if (table->ops) {
578 PCHashEntry *entry = (PCHashEntry *)
579 JS_DHashTableOperate(table, pc, JS_DHASH_LOOKUP);
581 if (JS_DHASH_ENTRY_IS_FREE(&(entry->hdr)))
582 return;
583 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry->hdr)));
584 entry->count = 0;
588 static inline size_t
589 fragmentHash(const void *ip, JSObject* globalObj, uint32 globalShape, uint32 argc)
591 uintptr_t h = HASH_SEED;
592 hash_accum(h, uintptr_t(ip), FRAGMENT_TABLE_MASK);
593 hash_accum(h, uintptr_t(globalObj), FRAGMENT_TABLE_MASK);
594 hash_accum(h, uintptr_t(globalShape), FRAGMENT_TABLE_MASK);
595 hash_accum(h, uintptr_t(argc), FRAGMENT_TABLE_MASK);
596 return size_t(h);
600 * argc is cx->fp->argc at the trace loop header, i.e., the number of arguments
601 * pushed for the innermost JS frame. This is required as part of the fragment
602 * key because the fragment will write those arguments back to the interpreter
603 * stack when it exits, using its typemap, which implicitly incorporates a given
604 * value of argc. Without this feature, a fragment could be called as an inner
605 * tree with two different values of argc, and entry type checking or exit
606 * frame synthesis could crash.
608 struct VMFragment : public Fragment
610 VMFragment(const void* _ip, JSObject* _globalObj, uint32 _globalShape, uint32 _argc) :
611 Fragment(_ip),
612 next(NULL),
613 globalObj(_globalObj),
614 globalShape(_globalShape),
615 argc(_argc)
617 VMFragment* next;
618 JSObject* globalObj;
619 uint32 globalShape;
620 uint32 argc;
623 static VMFragment*
624 getVMFragment(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
625 uint32 argc)
627 size_t h = fragmentHash(ip, globalObj, globalShape, argc);
628 VMFragment* vf = tm->vmfragments[h];
629 while (vf &&
630 ! (vf->globalObj == globalObj &&
631 vf->globalShape == globalShape &&
632 vf->ip == ip &&
633 vf->argc == argc)) {
634 vf = vf->next;
636 return vf;
639 static VMFragment*
640 getLoop(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
641 uint32 argc)
643 return getVMFragment(tm, ip, globalObj, globalShape, argc);
646 static Fragment*
647 getAnchor(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
648 uint32 argc)
650 VMFragment *f = new (&gc) VMFragment(ip, globalObj, globalShape, argc);
651 JS_ASSERT(f);
653 Fragment *p = getVMFragment(tm, ip, globalObj, globalShape, argc);
655 if (p) {
656 f->first = p;
657 /* append at the end of the peer list */
658 Fragment* next;
659 while ((next = p->peer) != NULL)
660 p = next;
661 p->peer = f;
662 } else {
663 /* this is the first fragment */
664 f->first = f;
665 size_t h = fragmentHash(ip, globalObj, globalShape, argc);
666 f->next = tm->vmfragments[h];
667 tm->vmfragments[h] = f;
669 f->anchor = f;
670 f->root = f;
671 f->kind = LoopTrace;
672 return f;
675 static void
676 js_AttemptCompilation(JSContext *cx, JSTraceMonitor* tm, JSObject* globalObj, jsbytecode* pc,
677 uint32 argc)
680 * If we already permanently blacklisted the location, undo that.
682 JS_ASSERT(*(jsbytecode*)pc == JSOP_NOP || *(jsbytecode*)pc == JSOP_LOOP);
683 *(jsbytecode*)pc = JSOP_LOOP;
684 js_resetRecordingAttempts(cx, pc);
687 * Breath new live into all peer fragments at the designated loop header.
689 Fragment* f = (VMFragment*)getLoop(tm, pc, globalObj, OBJ_SHAPE(globalObj),
690 argc);
691 if (!f) {
693 * If the global object's shape changed, we can't easily find the
694 * corresponding loop header via a hash table lookup. In this
695 * we simply bail here and hope that the fragment has another
696 * outstanding compilation attempt. This case is extremely rare.
698 return;
700 JS_ASSERT(f->root == f);
701 f = f->first;
702 while (f) {
703 JS_ASSERT(f->root == f);
704 --f->recordAttempts;
705 f->hits() = HOTLOOP;
706 f = f->peer;
710 // Forward declarations.
711 JS_DEFINE_CALLINFO_1(static, DOUBLE, i2f, INT32, 1, 1)
712 JS_DEFINE_CALLINFO_1(static, DOUBLE, u2f, UINT32, 1, 1)
714 static bool isi2f(LInsp i)
716 if (i->isop(LIR_i2f))
717 return true;
719 if (nanojit::AvmCore::config.soft_float &&
720 i->isop(LIR_qjoin) &&
721 i->oprnd1()->isop(LIR_call) &&
722 i->oprnd2()->isop(LIR_callh))
724 if (i->oprnd1()->callInfo() == &i2f_ci)
725 return true;
728 return false;
731 static bool isu2f(LInsp i)
733 if (i->isop(LIR_u2f))
734 return true;
736 if (nanojit::AvmCore::config.soft_float &&
737 i->isop(LIR_qjoin) &&
738 i->oprnd1()->isop(LIR_call) &&
739 i->oprnd2()->isop(LIR_callh))
741 if (i->oprnd1()->callInfo() == &u2f_ci)
742 return true;
745 return false;
748 static LInsp iu2fArg(LInsp i)
750 if (nanojit::AvmCore::config.soft_float &&
751 i->isop(LIR_qjoin))
753 return i->oprnd1()->arg(0);
756 return i->oprnd1();
760 static LIns* demote(LirWriter *out, LInsp i)
762 if (i->isCall())
763 return callArgN(i, 0);
764 if (isi2f(i) || isu2f(i))
765 return iu2fArg(i);
766 if (i->isconst())
767 return i;
768 AvmAssert(i->isconstq());
769 double cf = i->imm64f();
770 int32_t ci = cf > 0x7fffffff ? uint32_t(cf) : int32_t(cf);
771 return out->insImm(ci);
774 static bool isPromoteInt(LIns* i)
776 if (isi2f(i) || i->isconst())
777 return true;
778 if (!i->isconstq())
779 return false;
780 jsdouble d = i->imm64f();
781 return d == jsdouble(jsint(d)) && !JSDOUBLE_IS_NEGZERO(d);
784 static bool isPromoteUint(LIns* i)
786 if (isu2f(i) || i->isconst())
787 return true;
788 if (!i->isconstq())
789 return false;
790 jsdouble d = i->imm64f();
791 return d == jsdouble(jsuint(d)) && !JSDOUBLE_IS_NEGZERO(d);
794 static bool isPromote(LIns* i)
796 return isPromoteInt(i) || isPromoteUint(i);
799 static bool isconst(LIns* i, int32_t c)
801 return i->isconst() && i->imm32() == c;
804 static bool overflowSafe(LIns* i)
806 LIns* c;
807 return (i->isop(LIR_and) && ((c = i->oprnd2())->isconst()) &&
808 ((c->imm32() & 0xc0000000) == 0)) ||
809 (i->isop(LIR_rsh) && ((c = i->oprnd2())->isconst()) &&
810 ((c->imm32() > 0)));
813 /* soft float support */
815 static jsdouble FASTCALL
816 fneg(jsdouble x)
818 return -x;
820 JS_DEFINE_CALLINFO_1(static, DOUBLE, fneg, DOUBLE, 1, 1)
822 static jsdouble FASTCALL
823 i2f(int32 i)
825 return i;
828 static jsdouble FASTCALL
829 u2f(jsuint u)
831 return u;
834 static int32 FASTCALL
835 fcmpeq(jsdouble x, jsdouble y)
837 return x==y;
839 JS_DEFINE_CALLINFO_2(static, INT32, fcmpeq, DOUBLE, DOUBLE, 1, 1)
841 static int32 FASTCALL
842 fcmplt(jsdouble x, jsdouble y)
844 return x < y;
846 JS_DEFINE_CALLINFO_2(static, INT32, fcmplt, DOUBLE, DOUBLE, 1, 1)
848 static int32 FASTCALL
849 fcmple(jsdouble x, jsdouble y)
851 return x <= y;
853 JS_DEFINE_CALLINFO_2(static, INT32, fcmple, DOUBLE, DOUBLE, 1, 1)
855 static int32 FASTCALL
856 fcmpgt(jsdouble x, jsdouble y)
858 return x > y;
860 JS_DEFINE_CALLINFO_2(static, INT32, fcmpgt, DOUBLE, DOUBLE, 1, 1)
862 static int32 FASTCALL
863 fcmpge(jsdouble x, jsdouble y)
865 return x >= y;
867 JS_DEFINE_CALLINFO_2(static, INT32, fcmpge, DOUBLE, DOUBLE, 1, 1)
869 static jsdouble FASTCALL
870 fmul(jsdouble x, jsdouble y)
872 return x * y;
874 JS_DEFINE_CALLINFO_2(static, DOUBLE, fmul, DOUBLE, DOUBLE, 1, 1)
876 static jsdouble FASTCALL
877 fadd(jsdouble x, jsdouble y)
879 return x + y;
881 JS_DEFINE_CALLINFO_2(static, DOUBLE, fadd, DOUBLE, DOUBLE, 1, 1)
883 static jsdouble FASTCALL
884 fdiv(jsdouble x, jsdouble y)
886 return x / y;
888 JS_DEFINE_CALLINFO_2(static, DOUBLE, fdiv, DOUBLE, DOUBLE, 1, 1)
890 static jsdouble FASTCALL
891 fsub(jsdouble x, jsdouble y)
893 return x - y;
895 JS_DEFINE_CALLINFO_2(static, DOUBLE, fsub, DOUBLE, DOUBLE, 1, 1)
897 class SoftFloatFilter: public LirWriter
899 public:
900 SoftFloatFilter(LirWriter* out):
901 LirWriter(out)
905 LInsp quadCall(const CallInfo *ci, LInsp args[]) {
906 LInsp qlo, qhi;
908 qlo = out->insCall(ci, args);
909 qhi = out->ins1(LIR_callh, qlo);
910 return out->qjoin(qlo, qhi);
913 LInsp ins1(LOpcode v, LInsp s0)
915 if (v == LIR_fneg)
916 return quadCall(&fneg_ci, &s0);
918 if (v == LIR_i2f)
919 return quadCall(&i2f_ci, &s0);
921 if (v == LIR_u2f)
922 return quadCall(&u2f_ci, &s0);
924 return out->ins1(v, s0);
927 LInsp ins2(LOpcode v, LInsp s0, LInsp s1)
929 LInsp args[2];
930 LInsp bv;
932 // change the numeric value and order of these LIR opcodes and die
933 if (LIR_fadd <= v && v <= LIR_fdiv) {
934 static const CallInfo *fmap[] = { &fadd_ci, &fsub_ci, &fmul_ci, &fdiv_ci };
936 args[0] = s1;
937 args[1] = s0;
939 return quadCall(fmap[v - LIR_fadd], args);
942 if (LIR_feq <= v && v <= LIR_fge) {
943 static const CallInfo *fmap[] = { &fcmpeq_ci, &fcmplt_ci, &fcmpgt_ci, &fcmple_ci, &fcmpge_ci };
945 args[0] = s1;
946 args[1] = s0;
948 bv = out->insCall(fmap[v - LIR_feq], args);
949 return out->ins2(LIR_eq, bv, out->insImm(1));
952 return out->ins2(v, s0, s1);
955 LInsp insCall(const CallInfo *ci, LInsp args[])
957 // if the return type is ARGSIZE_F, we have
958 // to do a quadCall ( qjoin(call,callh) )
959 if ((ci->_argtypes & 3) == ARGSIZE_F)
960 return quadCall(ci, args);
962 return out->insCall(ci, args);
966 class FuncFilter: public LirWriter
968 public:
969 FuncFilter(LirWriter* out):
970 LirWriter(out)
974 LInsp ins2(LOpcode v, LInsp s0, LInsp s1)
976 if (s0 == s1 && v == LIR_feq) {
977 if (isPromote(s0)) {
978 // double(int) and double(uint) cannot be nan
979 return insImm(1);
981 if (s0->isop(LIR_fmul) || s0->isop(LIR_fsub) || s0->isop(LIR_fadd)) {
982 LInsp lhs = s0->oprnd1();
983 LInsp rhs = s0->oprnd2();
984 if (isPromote(lhs) && isPromote(rhs)) {
985 // add/sub/mul promoted ints can't be nan
986 return insImm(1);
989 } else if (LIR_feq <= v && v <= LIR_fge) {
990 if (isPromoteInt(s0) && isPromoteInt(s1)) {
991 // demote fcmp to cmp
992 v = LOpcode(v + (LIR_eq - LIR_feq));
993 return out->ins2(v, demote(out, s0), demote(out, s1));
994 } else if (isPromoteUint(s0) && isPromoteUint(s1)) {
995 // uint compare
996 v = LOpcode(v + (LIR_eq - LIR_feq));
997 if (v != LIR_eq)
998 v = LOpcode(v + (LIR_ult - LIR_lt)); // cmp -> ucmp
999 return out->ins2(v, demote(out, s0), demote(out, s1));
1001 } else if (v == LIR_or &&
1002 s0->isop(LIR_lsh) && isconst(s0->oprnd2(), 16) &&
1003 s1->isop(LIR_and) && isconst(s1->oprnd2(), 0xffff)) {
1004 LIns* msw = s0->oprnd1();
1005 LIns* lsw = s1->oprnd1();
1006 LIns* x;
1007 LIns* y;
1008 if (lsw->isop(LIR_add) &&
1009 lsw->oprnd1()->isop(LIR_and) &&
1010 lsw->oprnd2()->isop(LIR_and) &&
1011 isconst(lsw->oprnd1()->oprnd2(), 0xffff) &&
1012 isconst(lsw->oprnd2()->oprnd2(), 0xffff) &&
1013 msw->isop(LIR_add) &&
1014 msw->oprnd1()->isop(LIR_add) &&
1015 msw->oprnd2()->isop(LIR_rsh) &&
1016 msw->oprnd1()->oprnd1()->isop(LIR_rsh) &&
1017 msw->oprnd1()->oprnd2()->isop(LIR_rsh) &&
1018 isconst(msw->oprnd2()->oprnd2(), 16) &&
1019 isconst(msw->oprnd1()->oprnd1()->oprnd2(), 16) &&
1020 isconst(msw->oprnd1()->oprnd2()->oprnd2(), 16) &&
1021 (x = lsw->oprnd1()->oprnd1()) == msw->oprnd1()->oprnd1()->oprnd1() &&
1022 (y = lsw->oprnd2()->oprnd1()) == msw->oprnd1()->oprnd2()->oprnd1() &&
1023 lsw == msw->oprnd2()->oprnd1()) {
1024 return out->ins2(LIR_add, x, y);
1028 return out->ins2(v, s0, s1);
1031 LInsp insCall(const CallInfo *ci, LInsp args[])
1033 if (ci == &js_DoubleToUint32_ci) {
1034 LInsp s0 = args[0];
1035 if (s0->isconstq())
1036 return out->insImm(js_DoubleToECMAUint32(s0->imm64f()));
1037 if (isi2f(s0) || isu2f(s0))
1038 return iu2fArg(s0);
1039 } else if (ci == &js_DoubleToInt32_ci) {
1040 LInsp s0 = args[0];
1041 if (s0->isconstq())
1042 return out->insImm(js_DoubleToECMAInt32(s0->imm64f()));
1043 if (s0->isop(LIR_fadd) || s0->isop(LIR_fsub)) {
1044 LInsp lhs = s0->oprnd1();
1045 LInsp rhs = s0->oprnd2();
1046 if (isPromote(lhs) && isPromote(rhs)) {
1047 LOpcode op = LOpcode(s0->opcode() & ~LIR64);
1048 return out->ins2(op, demote(out, lhs), demote(out, rhs));
1051 if (isi2f(s0) || isu2f(s0))
1052 return iu2fArg(s0);
1053 // XXX ARM -- check for qjoin(call(UnboxDouble),call(UnboxDouble))
1054 if (s0->isCall()) {
1055 const CallInfo* ci2 = s0->callInfo();
1056 if (ci2 == &js_UnboxDouble_ci) {
1057 LIns* args2[] = { callArgN(s0, 0) };
1058 return out->insCall(&js_UnboxInt32_ci, args2);
1059 } else if (ci2 == &js_StringToNumber_ci) {
1060 // callArgN's ordering is that as seen by the builtin, not as stored in
1061 // args here. True story!
1062 LIns* args2[] = { callArgN(s0, 1), callArgN(s0, 0) };
1063 return out->insCall(&js_StringToInt32_ci, args2);
1064 } else if (ci2 == &js_String_p_charCodeAt0_ci) {
1065 // Use a fast path builtin for a charCodeAt that converts to an int right away.
1066 LIns* args2[] = { callArgN(s0, 0) };
1067 return out->insCall(&js_String_p_charCodeAt0_int_ci, args2);
1068 } else if (ci2 == &js_String_p_charCodeAt_ci) {
1069 LIns* idx = callArgN(s0, 1);
1070 // If the index is not already an integer, force it to be an integer.
1071 idx = isPromote(idx)
1072 ? demote(out, idx)
1073 : out->insCall(&js_DoubleToInt32_ci, &idx);
1074 LIns* args2[] = { idx, callArgN(s0, 0) };
1075 return out->insCall(&js_String_p_charCodeAt_int_ci, args2);
1078 } else if (ci == &js_BoxDouble_ci) {
1079 LInsp s0 = args[0];
1080 JS_ASSERT(s0->isQuad());
1081 if (isi2f(s0)) {
1082 LIns* args2[] = { iu2fArg(s0), args[1] };
1083 return out->insCall(&js_BoxInt32_ci, args2);
1085 if (s0->isCall() && s0->callInfo() == &js_UnboxDouble_ci)
1086 return callArgN(s0, 0);
1088 return out->insCall(ci, args);
1092 /* In debug mode vpname contains a textual description of the type of the
1093 slot during the forall iteration over all slots. If JS_JIT_SPEW is not
1094 defined, vpnum is set to a very large integer to catch invalid uses of
1095 it. Non-debug code should never use vpnum. */
1096 #ifdef JS_JIT_SPEW
1097 #define DEF_VPNAME const char* vpname; unsigned vpnum
1098 #define SET_VPNAME(name) do { vpname = name; vpnum = 0; } while(0)
1099 #define INC_VPNUM() do { ++vpnum; } while(0)
1100 #else
1101 #define DEF_VPNAME do {} while (0)
1102 #define vpname ""
1103 #define vpnum 0x40000000
1104 #define SET_VPNAME(name) ((void)0)
1105 #define INC_VPNUM() ((void)0)
1106 #endif
1108 /* Iterate over all interned global variables. */
1109 #define FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code) \
1110 JS_BEGIN_MACRO \
1111 DEF_VPNAME; \
1112 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain); \
1113 unsigned n; \
1114 jsval* vp; \
1115 SET_VPNAME("global"); \
1116 for (n = 0; n < ngslots; ++n) { \
1117 vp = &STOBJ_GET_SLOT(globalObj, gslots[n]); \
1118 { code; } \
1119 INC_VPNUM(); \
1121 JS_END_MACRO
1123 /* Iterate over all slots in the frame, consisting of args, vars, and stack
1124 (except for the top-level frame which does not have args or vars. */
1125 #define FORALL_FRAME_SLOTS(fp, depth, code) \
1126 JS_BEGIN_MACRO \
1127 jsval* vp; \
1128 jsval* vpstop; \
1129 if (fp->callee) { \
1130 if (depth == 0) { \
1131 SET_VPNAME("callee"); \
1132 vp = &fp->argv[-2]; \
1133 { code; } \
1134 SET_VPNAME("this"); \
1135 vp = &fp->argv[-1]; \
1136 { code; } \
1137 SET_VPNAME("argv"); \
1138 vp = &fp->argv[0]; vpstop = &fp->argv[argSlots(fp)]; \
1139 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
1141 SET_VPNAME("vars"); \
1142 vp = fp->slots; vpstop = &fp->slots[fp->script->nfixed]; \
1143 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
1145 SET_VPNAME("stack"); \
1146 vp = StackBase(fp); vpstop = fp->regs->sp; \
1147 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
1148 if (fsp < fspstop - 1) { \
1149 JSStackFrame* fp2 = fsp[1]; \
1150 int missing = fp2->fun->nargs - fp2->argc; \
1151 if (missing > 0) { \
1152 SET_VPNAME("missing"); \
1153 vp = fp->regs->sp; \
1154 vpstop = vp + missing; \
1155 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
1158 JS_END_MACRO
1160 /* Iterate over all slots in each pending frame. */
1161 #define FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code) \
1162 JS_BEGIN_MACRO \
1163 DEF_VPNAME; \
1164 unsigned n; \
1165 JSStackFrame* currentFrame = cx->fp; \
1166 JSStackFrame* entryFrame; \
1167 JSStackFrame* fp = currentFrame; \
1168 for (n = 0; n < callDepth; ++n) { fp = fp->down; } \
1169 entryFrame = fp; \
1170 unsigned frames = callDepth+1; \
1171 JSStackFrame** fstack = \
1172 (JSStackFrame**) alloca(frames * sizeof (JSStackFrame*)); \
1173 JSStackFrame** fspstop = &fstack[frames]; \
1174 JSStackFrame** fsp = fspstop-1; \
1175 fp = currentFrame; \
1176 for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; } \
1177 unsigned depth; \
1178 for (depth = 0, fsp = fstack; fsp < fspstop; ++fsp, ++depth) { \
1179 fp = *fsp; \
1180 FORALL_FRAME_SLOTS(fp, depth, code); \
1182 JS_END_MACRO
1184 #define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code) \
1185 JS_BEGIN_MACRO \
1186 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code); \
1187 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code); \
1188 JS_END_MACRO
1190 /* Calculate the total number of native frame slots we need from this frame
1191 all the way back to the entry frame, including the current stack usage. */
1192 JS_REQUIRES_STACK unsigned
1193 js_NativeStackSlots(JSContext *cx, unsigned callDepth)
1195 JSStackFrame* fp = cx->fp;
1196 unsigned slots = 0;
1197 #if defined _DEBUG
1198 unsigned int origCallDepth = callDepth;
1199 #endif
1200 for (;;) {
1201 unsigned operands = fp->regs->sp - StackBase(fp);
1202 slots += operands;
1203 if (fp->callee)
1204 slots += fp->script->nfixed;
1205 if (callDepth-- == 0) {
1206 if (fp->callee)
1207 slots += 2/*callee,this*/ + argSlots(fp);
1208 #if defined _DEBUG
1209 unsigned int m = 0;
1210 FORALL_SLOTS_IN_PENDING_FRAMES(cx, origCallDepth, m++);
1211 JS_ASSERT(m == slots);
1212 #endif
1213 return slots;
1215 JSStackFrame* fp2 = fp;
1216 fp = fp->down;
1217 int missing = fp2->fun->nargs - fp2->argc;
1218 if (missing > 0)
1219 slots += missing;
1221 JS_NOT_REACHED("js_NativeStackSlots");
1225 * Capture the type map for the selected slots of the global object and currently pending
1226 * stack frames.
1228 JS_REQUIRES_STACK void
1229 TypeMap::captureTypes(JSContext* cx, SlotList& slots, unsigned callDepth)
1231 unsigned ngslots = slots.length();
1232 uint16* gslots = slots.data();
1233 setLength(js_NativeStackSlots(cx, callDepth) + ngslots);
1234 uint8* map = data();
1235 uint8* m = map;
1236 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1237 uint8 type = getCoercedType(*vp);
1238 if ((type == JSVAL_INT) && oracle.isStackSlotUndemotable(cx, unsigned(m - map)))
1239 type = JSVAL_DOUBLE;
1240 JS_ASSERT(type != JSVAL_BOXED);
1241 debug_only_v(printf("capture stack type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
1242 JS_ASSERT(uintptr_t(m - map) < length());
1243 *m++ = type;
1245 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1246 uint8 type = getCoercedType(*vp);
1247 if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n]))
1248 type = JSVAL_DOUBLE;
1249 JS_ASSERT(type != JSVAL_BOXED);
1250 debug_only_v(printf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
1251 JS_ASSERT(uintptr_t(m - map) < length());
1252 *m++ = type;
1254 JS_ASSERT(uintptr_t(m - map) == length());
1257 JS_REQUIRES_STACK void
1258 TypeMap::captureMissingGlobalTypes(JSContext* cx, SlotList& slots, unsigned stackSlots)
1260 unsigned oldSlots = length() - stackSlots;
1261 int diff = slots.length() - oldSlots;
1262 JS_ASSERT(diff >= 0);
1263 unsigned ngslots = slots.length();
1264 uint16* gslots = slots.data();
1265 setLength(length() + diff);
1266 uint8* map = data() + stackSlots;
1267 uint8* m = map;
1268 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1269 if (n >= oldSlots) {
1270 uint8 type = getCoercedType(*vp);
1271 if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n]))
1272 type = JSVAL_DOUBLE;
1273 JS_ASSERT(type != JSVAL_BOXED);
1274 debug_only_v(printf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
1275 *m = type;
1276 JS_ASSERT((m > map + oldSlots) || (*m == type));
1278 m++;
1282 /* Compare this type map to another one and see whether they match. */
1283 bool
1284 TypeMap::matches(TypeMap& other) const
1286 if (length() != other.length())
1287 return false;
1288 return !memcmp(data(), other.data(), length());
1291 /* Use the provided storage area to create a new type map that contains the partial type map
1292 with the rest of it filled up from the complete type map. */
1293 static void
1294 mergeTypeMaps(uint8** partial, unsigned* plength, uint8* complete, unsigned clength, uint8* mem)
1296 unsigned l = *plength;
1297 JS_ASSERT(l < clength);
1298 memcpy(mem, *partial, l * sizeof(uint8));
1299 memcpy(mem + l, complete + l, (clength - l) * sizeof(uint8));
1300 *partial = mem;
1301 *plength = clength;
1304 /* Specializes a tree to any missing globals, including any dependent trees. */
1305 static JS_REQUIRES_STACK void
1306 specializeTreesToMissingGlobals(JSContext* cx, TreeInfo* root)
1308 TreeInfo* ti = root;
1310 ti->typeMap.captureMissingGlobalTypes(cx, *ti->globalSlots, ti->nStackTypes);
1311 JS_ASSERT(ti->globalSlots->length() == ti->typeMap.length() - ti->nStackTypes);
1313 for (unsigned i = 0; i < root->dependentTrees.length(); i++) {
1314 ti = (TreeInfo*)root->dependentTrees.data()[i]->vmprivate;
1315 /* ti can be NULL if we hit the recording tree in emitTreeCall; this is harmless. */
1316 if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
1317 specializeTreesToMissingGlobals(cx, ti);
1319 for (unsigned i = 0; i < root->linkedTrees.length(); i++) {
1320 ti = (TreeInfo*)root->linkedTrees.data()[i]->vmprivate;
1321 if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
1322 specializeTreesToMissingGlobals(cx, ti);
1326 static void
1327 js_TrashTree(JSContext* cx, Fragment* f);
1329 JS_REQUIRES_STACK
1330 TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment,
1331 TreeInfo* ti, unsigned stackSlots, unsigned ngslots, uint8* typeMap,
1332 VMSideExit* innermostNestedGuard, jsbytecode* outer, uint32 outerArgc)
1334 JS_ASSERT(!_fragment->vmprivate && ti && cx->fp->regs->pc == (jsbytecode*)_fragment->ip);
1336 /* Reset the fragment state we care about in case we got a recycled fragment. */
1337 _fragment->lastIns = NULL;
1339 this->cx = cx;
1340 this->traceMonitor = &JS_TRACE_MONITOR(cx);
1341 this->globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
1342 this->lexicalBlock = cx->fp->blockChain;
1343 this->anchor = _anchor;
1344 this->fragment = _fragment;
1345 this->lirbuf = _fragment->lirbuf;
1346 this->treeInfo = ti;
1347 this->callDepth = _anchor ? _anchor->calldepth : 0;
1348 this->atoms = FrameAtomBase(cx, cx->fp);
1349 this->deepAborted = false;
1350 this->trashSelf = false;
1351 this->global_dslots = this->globalObj->dslots;
1352 this->loop = true; /* default assumption is we are compiling a loop */
1353 this->wasRootFragment = _fragment == _fragment->root;
1354 this->outer = outer;
1355 this->outerArgc = outerArgc;
1356 this->pendingTraceableNative = NULL;
1357 this->newobj_ins = NULL;
1358 this->generatedTraceableNative = new JSTraceableNative();
1359 JS_ASSERT(generatedTraceableNative);
1361 debug_only_v(printf("recording starting from %s:%u@%u\n",
1362 ti->treeFileName, ti->treeLineNumber, ti->treePCOffset);)
1363 debug_only_v(printf("globalObj=%p, shape=%d\n", (void*)this->globalObj, OBJ_SHAPE(this->globalObj));)
1365 lir = lir_buf_writer = new (&gc) LirBufWriter(lirbuf);
1366 debug_only_v(lir = verbose_filter = new (&gc) VerboseWriter(&gc, lir, lirbuf->names);)
1367 if (nanojit::AvmCore::config.soft_float)
1368 lir = float_filter = new (&gc) SoftFloatFilter(lir);
1369 else
1370 float_filter = 0;
1371 lir = cse_filter = new (&gc) CseFilter(lir, &gc);
1372 lir = expr_filter = new (&gc) ExprFilter(lir);
1373 lir = func_filter = new (&gc) FuncFilter(lir);
1374 lir->ins0(LIR_start);
1376 if (!nanojit::AvmCore::config.tree_opt || fragment->root == fragment)
1377 lirbuf->state = addName(lir->insParam(0, 0), "state");
1379 lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
1380 lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
1381 cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
1382 eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
1383 eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
1385 /* If we came from exit, we might not have enough global types. */
1386 if (ti->globalSlots->length() > ti->nGlobalTypes())
1387 specializeTreesToMissingGlobals(cx, ti);
1389 /* read into registers all values on the stack and all globals we know so far */
1390 import(treeInfo, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
1392 if (fragment == fragment->root) {
1394 * We poll the operation callback request flag. It is updated asynchronously whenever
1395 * the callback is to be invoked.
1397 LIns* x = lir->insLoadi(cx_ins, offsetof(JSContext, operationCallbackFlag));
1398 guard(true, lir->ins_eq0(x), snapshot(TIMEOUT_EXIT));
1401 /* If we are attached to a tree call guard, make sure the guard the inner tree exited from
1402 is what we expect it to be. */
1403 if (_anchor && _anchor->exitType == NESTED_EXIT) {
1404 LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state,
1405 offsetof(InterpState, lastTreeExitGuard)),
1406 "lastTreeExitGuard");
1407 guard(true, lir->ins2(LIR_eq, nested_ins, INS_CONSTPTR(innermostNestedGuard)), NESTED_EXIT);
1411 TreeInfo::~TreeInfo()
1413 UnstableExit* temp;
1415 while (unstableExits) {
1416 temp = unstableExits->next;
1417 delete unstableExits;
1418 unstableExits = temp;
1422 TraceRecorder::~TraceRecorder()
1424 JS_ASSERT(nextRecorderToAbort == NULL);
1425 JS_ASSERT(treeInfo && (fragment || wasDeepAborted()));
1426 #ifdef DEBUG
1427 TraceRecorder* tr = JS_TRACE_MONITOR(cx).abortStack;
1428 while (tr != NULL)
1430 JS_ASSERT(this != tr);
1431 tr = tr->nextRecorderToAbort;
1433 #endif
1434 if (fragment) {
1435 if (wasRootFragment && !fragment->root->code()) {
1436 JS_ASSERT(!fragment->root->vmprivate);
1437 delete treeInfo;
1440 if (trashSelf)
1441 js_TrashTree(cx, fragment->root);
1443 for (unsigned int i = 0; i < whichTreesToTrash.length(); i++)
1444 js_TrashTree(cx, whichTreesToTrash.get(i));
1445 } else if (wasRootFragment) {
1446 delete treeInfo;
1448 #ifdef DEBUG
1449 delete verbose_filter;
1450 #endif
1451 delete cse_filter;
1452 delete expr_filter;
1453 delete func_filter;
1454 delete float_filter;
1455 delete lir_buf_writer;
1456 delete generatedTraceableNative;
1459 void TraceRecorder::removeFragmentoReferences()
1461 fragment = NULL;
1464 void TraceRecorder::deepAbort()
1466 debug_only_v(printf("deep abort");)
1467 deepAborted = true;
1470 /* Add debug information to a LIR instruction as we emit it. */
1471 inline LIns*
1472 TraceRecorder::addName(LIns* ins, const char* name)
1474 #ifdef JS_JIT_SPEW
1475 if (js_verboseDebug)
1476 lirbuf->names->addName(ins, name);
1477 #endif
1478 return ins;
1481 /* Determine the current call depth (starting with the entry frame.) */
1482 unsigned
1483 TraceRecorder::getCallDepth() const
1485 return callDepth;
1488 /* Determine the offset in the native global frame for a jsval we track */
1489 ptrdiff_t
1490 TraceRecorder::nativeGlobalOffset(jsval* p) const
1492 JS_ASSERT(isGlobal(p));
1493 if (size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS)
1494 return sizeof(InterpState) + size_t(p - globalObj->fslots) * sizeof(double);
1495 return sizeof(InterpState) + ((p - globalObj->dslots) + JS_INITIAL_NSLOTS) * sizeof(double);
1498 /* Determine whether a value is a global stack slot */
1499 bool
1500 TraceRecorder::isGlobal(jsval* p) const
1502 return ((size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS) ||
1503 (size_t(p - globalObj->dslots) < (STOBJ_NSLOTS(globalObj) - JS_INITIAL_NSLOTS)));
1506 /* Determine the offset in the native stack for a jsval we track */
1507 JS_REQUIRES_STACK ptrdiff_t
1508 TraceRecorder::nativeStackOffset(jsval* p) const
1510 #ifdef DEBUG
1511 size_t slow_offset = 0;
1512 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1513 if (vp == p) goto done;
1514 slow_offset += sizeof(double)
1518 * If it's not in a pending frame, it must be on the stack of the current frame above
1519 * sp but below fp->slots + script->nslots.
1521 JS_ASSERT(size_t(p - cx->fp->slots) < cx->fp->script->nslots);
1522 slow_offset += size_t(p - cx->fp->regs->sp) * sizeof(double);
1524 done:
1525 #define RETURN(offset) { JS_ASSERT((offset) == slow_offset); return offset; }
1526 #else
1527 #define RETURN(offset) { return offset; }
1528 #endif
1529 size_t offset = 0;
1530 JSStackFrame* currentFrame = cx->fp;
1531 JSStackFrame* entryFrame;
1532 JSStackFrame* fp = currentFrame;
1533 for (unsigned n = 0; n < callDepth; ++n) { fp = fp->down; }
1534 entryFrame = fp;
1535 unsigned frames = callDepth+1;
1536 JSStackFrame** fstack = (JSStackFrame **)alloca(frames * sizeof (JSStackFrame *));
1537 JSStackFrame** fspstop = &fstack[frames];
1538 JSStackFrame** fsp = fspstop-1;
1539 fp = currentFrame;
1540 for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; }
1541 for (fsp = fstack; fsp < fspstop; ++fsp) {
1542 fp = *fsp;
1543 if (fp->callee) {
1544 if (fsp == fstack) {
1545 if (size_t(p - &fp->argv[-2]) < size_t(2/*callee,this*/ + argSlots(fp)))
1546 RETURN(offset + size_t(p - &fp->argv[-2]) * sizeof(double));
1547 offset += (2/*callee,this*/ + argSlots(fp)) * sizeof(double);
1549 if (size_t(p - &fp->slots[0]) < fp->script->nfixed)
1550 RETURN(offset + size_t(p - &fp->slots[0]) * sizeof(double));
1551 offset += fp->script->nfixed * sizeof(double);
1553 jsval* spbase = StackBase(fp);
1554 if (size_t(p - spbase) < size_t(fp->regs->sp - spbase))
1555 RETURN(offset + size_t(p - spbase) * sizeof(double));
1556 offset += size_t(fp->regs->sp - spbase) * sizeof(double);
1557 if (fsp < fspstop - 1) {
1558 JSStackFrame* fp2 = fsp[1];
1559 int missing = fp2->fun->nargs - fp2->argc;
1560 if (missing > 0) {
1561 if (size_t(p - fp->regs->sp) < size_t(missing))
1562 RETURN(offset + size_t(p - fp->regs->sp) * sizeof(double));
1563 offset += size_t(missing) * sizeof(double);
1569 * If it's not in a pending frame, it must be on the stack of the current frame above
1570 * sp but below fp->slots + script->nslots.
1572 JS_ASSERT(size_t(p - currentFrame->slots) < currentFrame->script->nslots);
1573 offset += size_t(p - currentFrame->regs->sp) * sizeof(double);
1574 RETURN(offset);
1575 #undef RETURN
1578 /* Track the maximum number of native frame slots we need during
1579 execution. */
1580 void
1581 TraceRecorder::trackNativeStackUse(unsigned slots)
1583 if (slots > treeInfo->maxNativeStackSlots)
1584 treeInfo->maxNativeStackSlots = slots;
1587 /* Unbox a jsval into a slot. Slots are wide enough to hold double values directly (instead of
1588 storing a pointer to them). We now assert instead of type checking, the caller must ensure the
1589 types are compatible. */
1590 static void
1591 ValueToNative(JSContext* cx, jsval v, uint8 type, double* slot)
1593 unsigned tag = JSVAL_TAG(v);
1594 switch (type) {
1595 case JSVAL_OBJECT:
1596 JS_ASSERT(tag == JSVAL_OBJECT);
1597 JS_ASSERT(!JSVAL_IS_NULL(v) && !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)));
1598 *(JSObject**)slot = JSVAL_TO_OBJECT(v);
1599 debug_only_v(printf("object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1600 JSVAL_IS_NULL(v)
1601 ? "null"
1602 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
1603 return;
1604 case JSVAL_INT:
1605 jsint i;
1606 if (JSVAL_IS_INT(v))
1607 *(jsint*)slot = JSVAL_TO_INT(v);
1608 else if ((tag == JSVAL_DOUBLE) && JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v), i))
1609 *(jsint*)slot = i;
1610 else
1611 JS_ASSERT(JSVAL_IS_INT(v));
1612 debug_only_v(printf("int<%d> ", *(jsint*)slot);)
1613 return;
1614 case JSVAL_DOUBLE:
1615 jsdouble d;
1616 if (JSVAL_IS_INT(v))
1617 d = JSVAL_TO_INT(v);
1618 else
1619 d = *JSVAL_TO_DOUBLE(v);
1620 JS_ASSERT(JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v));
1621 *(jsdouble*)slot = d;
1622 debug_only_v(printf("double<%g> ", d);)
1623 return;
1624 case JSVAL_BOXED:
1625 JS_NOT_REACHED("found boxed type in an entry type map");
1626 return;
1627 case JSVAL_STRING:
1628 JS_ASSERT(tag == JSVAL_STRING);
1629 *(JSString**)slot = JSVAL_TO_STRING(v);
1630 debug_only_v(printf("string<%p> ", (void*)(*(JSString**)slot));)
1631 return;
1632 case JSVAL_TNULL:
1633 JS_ASSERT(tag == JSVAL_OBJECT);
1634 *(JSObject**)slot = NULL;
1635 debug_only_v(printf("null ");)
1636 return;
1637 case JSVAL_BOOLEAN:
1638 /* Watch out for pseudo-booleans. */
1639 JS_ASSERT(tag == JSVAL_BOOLEAN);
1640 *(JSBool*)slot = JSVAL_TO_PSEUDO_BOOLEAN(v);
1641 debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)
1642 return;
1643 case JSVAL_TFUN: {
1644 JS_ASSERT(tag == JSVAL_OBJECT);
1645 JSObject* obj = JSVAL_TO_OBJECT(v);
1646 *(JSObject**)slot = obj;
1647 #ifdef DEBUG
1648 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
1649 debug_only_v(printf("function<%p:%s> ", (void*) obj,
1650 fun->atom
1651 ? JS_GetStringBytes(ATOM_TO_STRING(fun->atom))
1652 : "unnamed");)
1653 #endif
1654 return;
1658 JS_NOT_REACHED("unexpected type");
1661 /* We maintain an emergency pool of doubles so we can recover safely if a trace runs
1662 out of memory (doubles or objects). */
1663 static jsval
1664 AllocateDoubleFromReservedPool(JSContext* cx)
1666 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
1667 JS_ASSERT(tm->reservedDoublePoolPtr > tm->reservedDoublePool);
1668 return *--tm->reservedDoublePoolPtr;
1671 static bool
1672 js_ReplenishReservedPool(JSContext* cx, JSTraceMonitor* tm)
1674 /* We should not be called with a full pool. */
1675 JS_ASSERT((size_t) (tm->reservedDoublePoolPtr - tm->reservedDoublePool) <
1676 MAX_NATIVE_STACK_SLOTS);
1679 * When the GC runs in js_NewDoubleInRootedValue, it resets
1680 * tm->reservedDoublePoolPtr back to tm->reservedDoublePool.
1682 JSRuntime* rt = cx->runtime;
1683 uintN gcNumber = rt->gcNumber;
1684 uintN lastgcNumber = gcNumber;
1685 jsval* ptr = tm->reservedDoublePoolPtr;
1686 while (ptr < tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) {
1687 if (!js_NewDoubleInRootedValue(cx, 0.0, ptr))
1688 goto oom;
1690 /* Check if the last call to js_NewDoubleInRootedValue GC'd. */
1691 if (rt->gcNumber != lastgcNumber) {
1692 lastgcNumber = rt->gcNumber;
1693 JS_ASSERT(tm->reservedDoublePoolPtr == tm->reservedDoublePool);
1694 ptr = tm->reservedDoublePool;
1697 * Have we GC'd more than once? We're probably running really
1698 * low on memory, bail now.
1700 if (uintN(rt->gcNumber - gcNumber) > uintN(1))
1701 goto oom;
1702 continue;
1704 ++ptr;
1706 tm->reservedDoublePoolPtr = ptr;
1707 return true;
1709 oom:
1711 * Already massive GC pressure, no need to hold doubles back.
1712 * We won't run any native code anyway.
1714 tm->reservedDoublePoolPtr = tm->reservedDoublePool;
1715 return false;
1718 /* Box a value from the native stack back into the jsval format. Integers
1719 that are too large to fit into a jsval are automatically boxed into
1720 heap-allocated doubles. */
1721 static void
1722 NativeToValue(JSContext* cx, jsval& v, uint8 type, double* slot)
1724 jsint i;
1725 jsdouble d;
1726 switch (type) {
1727 case JSVAL_OBJECT:
1728 v = OBJECT_TO_JSVAL(*(JSObject**)slot);
1729 JS_ASSERT(JSVAL_TAG(v) == JSVAL_OBJECT); /* if this fails the pointer was not aligned */
1730 JS_ASSERT(v != JSVAL_ERROR_COOKIE); /* don't leak JSVAL_ERROR_COOKIE */
1731 debug_only_v(printf("object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1732 JSVAL_IS_NULL(v)
1733 ? "null"
1734 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
1735 break;
1736 case JSVAL_INT:
1737 i = *(jsint*)slot;
1738 debug_only_v(printf("int<%d> ", i);)
1739 store_int:
1740 if (INT_FITS_IN_JSVAL(i)) {
1741 v = INT_TO_JSVAL(i);
1742 break;
1744 d = (jsdouble)i;
1745 goto store_double;
1746 case JSVAL_DOUBLE:
1747 d = *slot;
1748 debug_only_v(printf("double<%g> ", d);)
1749 if (JSDOUBLE_IS_INT(d, i))
1750 goto store_int;
1751 store_double: {
1752 /* Its not safe to trigger the GC here, so use an emergency heap if we are out of
1753 double boxes. */
1754 if (cx->doubleFreeList) {
1755 #ifdef DEBUG
1756 JSBool ok =
1757 #endif
1758 js_NewDoubleInRootedValue(cx, d, &v);
1759 JS_ASSERT(ok);
1760 return;
1762 v = AllocateDoubleFromReservedPool(cx);
1763 JS_ASSERT(JSVAL_IS_DOUBLE(v) && *JSVAL_TO_DOUBLE(v) == 0.0);
1764 *JSVAL_TO_DOUBLE(v) = d;
1765 return;
1767 case JSVAL_BOXED:
1768 v = *(jsval*)slot;
1769 JS_ASSERT(v != JSVAL_ERROR_COOKIE); /* don't leak JSVAL_ERROR_COOKIE */
1770 debug_only_v(printf("box<%p> ", (void*)v));
1771 break;
1772 case JSVAL_STRING:
1773 v = STRING_TO_JSVAL(*(JSString**)slot);
1774 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING); /* if this fails the pointer was not aligned */
1775 debug_only_v(printf("string<%p> ", (void*)(*(JSString**)slot));)
1776 break;
1777 case JSVAL_TNULL:
1778 JS_ASSERT(*(JSObject**)slot == NULL);
1779 v = JSVAL_NULL;
1780 debug_only_v(printf("null<%p> ", (void*)(*(JSObject**)slot)));
1781 break;
1782 case JSVAL_BOOLEAN:
1783 /* Watch out for pseudo-booleans. */
1784 v = PSEUDO_BOOLEAN_TO_JSVAL(*(JSBool*)slot);
1785 debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)
1786 break;
1787 case JSVAL_TFUN: {
1788 JS_ASSERT(HAS_FUNCTION_CLASS(*(JSObject**)slot));
1789 v = OBJECT_TO_JSVAL(*(JSObject**)slot);
1790 #ifdef DEBUG
1791 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(v));
1792 debug_only_v(printf("function<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1793 fun->atom
1794 ? JS_GetStringBytes(ATOM_TO_STRING(fun->atom))
1795 : "unnamed");)
1796 #endif
1797 break;
1802 /* Attempt to unbox the given list of interned globals onto the native global frame. */
1803 static JS_REQUIRES_STACK void
1804 BuildNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
1806 debug_only_v(printf("global: ");)
1807 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1808 ValueToNative(cx, *vp, *mp, np + gslots[n]);
1809 ++mp;
1811 debug_only_v(printf("\n");)
1814 /* Attempt to unbox the given JS frame onto a native frame. */
1815 static JS_REQUIRES_STACK void
1816 BuildNativeStackFrames(JSContext* cx, unsigned callDepth, uint8* mp, double* np)
1818 debug_only_v(printf("stack: ");)
1819 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1820 debug_only_v(printf("%s%u=", vpname, vpnum);)
1821 ValueToNative(cx, *vp, *mp, np);
1822 ++mp; ++np;
1824 debug_only_v(printf("\n");)
1827 /* Box the given native frame into a JS frame. This is infallible. */
1828 static JS_REQUIRES_STACK int
1829 FlushNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
1831 uint8* mp_base = mp;
1832 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1833 debug_only_v(printf("%s%u=", vpname, vpnum);)
1834 NativeToValue(cx, *vp, *mp, np + gslots[n]);
1835 ++mp;
1837 debug_only_v(printf("\n");)
1838 return mp - mp_base;
1842 * Builtin to get an upvar on trace. See js_GetUpvar for the meaning
1843 * of the first three arguments. The value of the upvar is stored in
1844 * *result as an unboxed native. The return value is the typemap type.
1846 uint32 JS_FASTCALL
1847 js_GetUpvarOnTrace(JSContext *cx, uint32 level, uint32 cookie, double* result)
1849 uintN skip = UPVAR_FRAME_SKIP(cookie);
1850 InterpState* state = cx->interpState;
1851 uintN callDepth = state->rp - state->callstackBase;
1854 * If we are skipping past all frames that are part of active traces,
1855 * then we simply get the value from the interpreter state.
1857 if (skip > callDepth) {
1858 jsval v = js_GetUpvar(cx, level, cookie);
1859 uint8 type = getCoercedType(v);
1860 ValueToNative(cx, v, type, result);
1861 return type;
1865 * The value we need is logically in a stack frame that is part of
1866 * an active trace. We reconstruct the value we need from the tracer
1867 * stack records.
1869 uintN frameIndex = callDepth - skip; // pos of target frame in rp stack
1870 uintN nativeStackFramePos = 0; // pos of target stack frame in sp stack
1871 for (uintN i = 0; i < frameIndex; ++i)
1872 nativeStackFramePos += state->callstackBase[i]->s.spdist;
1873 FrameInfo* fi = state->callstackBase[frameIndex];
1874 uint8* typemap = (uint8*) (fi+1);
1876 uintN slot = UPVAR_FRAME_SLOT(cookie);
1877 slot = slot == CALLEE_UPVAR_SLOT ? 0 : slot + 2;
1878 *result = state->stackBase[nativeStackFramePos + slot];
1879 return typemap[slot];
1883 * Box the given native stack frame into the virtual machine stack. This
1884 * is infallible.
1886 * @param callDepth the distance between the entry frame into our trace and
1887 * cx->fp when we make this call. If this is not called as a
1888 * result of a nested exit, callDepth is 0.
1889 * @param mp pointer to an array of type tags (JSVAL_INT, etc.) that indicate
1890 * what the types of the things on the stack are.
1891 * @param np pointer to the native stack. We want to copy values from here to
1892 * the JS stack as needed.
1893 * @param stopFrame if non-null, this frame and everything above it should not
1894 * be restored.
1895 * @return the number of things we popped off of np.
1897 static JS_REQUIRES_STACK int
1898 FlushNativeStackFrames(JSContext* cx, unsigned callDepth, uint8* mp, double* np,
1899 JSStackFrame* stopFrame)
1901 jsval* stopAt = stopFrame ? &stopFrame->argv[-2] : NULL;
1902 uint8* mp_base = mp;
1903 /* Root all string and object references first (we don't need to call the GC for this). */
1904 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1905 if (vp == stopAt) goto skip;
1906 debug_only_v(printf("%s%u=", vpname, vpnum);)
1907 NativeToValue(cx, *vp, *mp, np);
1908 ++mp; ++np
1910 skip:
1911 // Restore thisp from the now-restored argv[-1] in each pending frame.
1912 // Keep in mind that we didn't restore frames at stopFrame and above!
1913 // Scope to keep |fp| from leaking into the macros we're using.
1915 unsigned n = callDepth+1; // +1 to make sure we restore the entry frame
1916 JSStackFrame* fp = cx->fp;
1917 if (stopFrame) {
1918 for (; fp != stopFrame; fp = fp->down) {
1919 JS_ASSERT(n != 0);
1920 --n;
1922 // Skip over stopFrame itself.
1923 JS_ASSERT(n != 0);
1924 --n;
1925 fp = fp->down;
1927 for (; n != 0; fp = fp->down) {
1928 --n;
1929 if (fp->callee) {
1931 * We might return from trace with a different function object, but it still
1932 * has to be the same function (FIXME: bug 471425, eliminate fp->callee).
1934 JS_ASSERT(JSVAL_IS_OBJECT(fp->argv[-1]));
1935 JS_ASSERT(HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(fp->argv[-2])));
1936 JS_ASSERT(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fp->argv[-2])) ==
1937 GET_FUNCTION_PRIVATE(cx, fp->callee));
1938 fp->callee = JSVAL_TO_OBJECT(fp->argv[-2]);
1939 fp->scopeChain = OBJ_GET_PARENT(cx, fp->callee);
1940 fp->thisp = JSVAL_TO_OBJECT(fp->argv[-1]);
1941 if (fp->flags & JSFRAME_CONSTRUCTING) // constructors always compute 'this'
1942 fp->flags |= JSFRAME_COMPUTED_THIS;
1946 debug_only_v(printf("\n");)
1947 return mp - mp_base;
1950 /* Emit load instructions onto the trace that read the initial stack state. */
1951 JS_REQUIRES_STACK void
1952 TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8 t,
1953 const char *prefix, uintN index, JSStackFrame *fp)
1955 LIns* ins;
1956 if (t == JSVAL_INT) { /* demoted */
1957 JS_ASSERT(isInt32(*p));
1958 /* Ok, we have a valid demotion attempt pending, so insert an integer
1959 read and promote it to double since all arithmetic operations expect
1960 to see doubles on entry. The first op to use this slot will emit a
1961 f2i cast which will cancel out the i2f we insert here. */
1962 ins = lir->insLoadi(base, offset);
1963 ins = lir->ins1(LIR_i2f, ins);
1964 } else {
1965 JS_ASSERT_IF(t != JSVAL_BOXED, isNumber(*p) == (t == JSVAL_DOUBLE));
1966 if (t == JSVAL_DOUBLE) {
1967 ins = lir->insLoad(LIR_ldq, base, offset);
1968 } else if (t == JSVAL_BOOLEAN) {
1969 ins = lir->insLoad(LIR_ld, base, offset);
1970 } else {
1971 ins = lir->insLoad(LIR_ldp, base, offset);
1974 checkForGlobalObjectReallocation();
1975 tracker.set(p, ins);
1977 #ifdef DEBUG
1978 char name[64];
1979 JS_ASSERT(strlen(prefix) < 10);
1980 void* mark = NULL;
1981 jsuword* localNames = NULL;
1982 const char* funName = NULL;
1983 if (*prefix == 'a' || *prefix == 'v') {
1984 mark = JS_ARENA_MARK(&cx->tempPool);
1985 if (JS_GET_LOCAL_NAME_COUNT(fp->fun) != 0)
1986 localNames = js_GetLocalNameArray(cx, fp->fun, &cx->tempPool);
1987 funName = fp->fun->atom ? js_AtomToPrintableString(cx, fp->fun->atom) : "<anonymous>";
1989 if (!strcmp(prefix, "argv")) {
1990 if (index < fp->fun->nargs) {
1991 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[index]);
1992 JS_snprintf(name, sizeof name, "$%s.%s", funName, js_AtomToPrintableString(cx, atom));
1993 } else {
1994 JS_snprintf(name, sizeof name, "$%s.<arg%d>", funName, index);
1996 } else if (!strcmp(prefix, "vars")) {
1997 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[fp->fun->nargs + index]);
1998 JS_snprintf(name, sizeof name, "$%s.%s", funName, js_AtomToPrintableString(cx, atom));
1999 } else {
2000 JS_snprintf(name, sizeof name, "$%s%d", prefix, index);
2003 if (mark)
2004 JS_ARENA_RELEASE(&cx->tempPool, mark);
2005 addName(ins, name);
2007 static const char* typestr[] = {
2008 "object", "int", "double", "boxed", "string", "null", "boolean", "function"
2010 debug_only_v(printf("import vp=%p name=%s type=%s flags=%d\n",
2011 (void*)p, name, typestr[t & 7], t >> 3);)
2012 #endif
2015 JS_REQUIRES_STACK void
2016 TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned stackSlots, unsigned ngslots,
2017 unsigned callDepth, uint8* typeMap)
2019 /* If we get a partial list that doesn't have all the types (i.e. recording from a side
2020 exit that was recorded but we added more global slots later), merge the missing types
2021 from the entry type map. This is safe because at the loop edge we verify that we
2022 have compatible types for all globals (entry type and loop edge type match). While
2023 a different trace of the tree might have had a guard with a different type map for
2024 these slots we just filled in here (the guard we continue from didn't know about them),
2025 since we didn't take that particular guard the only way we could have ended up here
2026 is if that other trace had at its end a compatible type distribution with the entry
2027 map. Since thats exactly what we used to fill in the types our current side exit
2028 didn't provide, this is always safe to do. */
2030 uint8* globalTypeMap = typeMap + stackSlots;
2031 unsigned length = treeInfo->nGlobalTypes();
2034 * This is potentially the typemap of the side exit and thus shorter than the tree's
2035 * global type map.
2037 if (ngslots < length) {
2038 mergeTypeMaps(&globalTypeMap/*out param*/, &ngslots/*out param*/,
2039 treeInfo->globalTypeMap(), length,
2040 (uint8*)alloca(sizeof(uint8) * length));
2042 JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2045 * Check whether there are any values on the stack we have to unbox and do that first
2046 * before we waste any time fetching the state from the stack.
2048 ptrdiff_t offset = -treeInfo->nativeStackBase;
2049 uint8* m = typeMap;
2050 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
2051 if (*m == JSVAL_BOXED) {
2052 import(sp, offset, vp, JSVAL_BOXED, "boxed", vpnum, cx->fp);
2053 LIns* vp_ins = get(vp);
2054 unbox_jsval(*vp, vp_ins, copy(anchor));
2055 set(vp, vp_ins);
2057 m++; offset += sizeof(double);
2061 * The first time we compile a tree this will be empty as we add entries lazily.
2063 uint16* gslots = treeInfo->globalSlots->data();
2064 m = globalTypeMap;
2065 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
2066 JS_ASSERT(*m != JSVAL_BOXED);
2067 import(lirbuf->state, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, NULL);
2068 m++;
2070 offset = -treeInfo->nativeStackBase;
2071 m = typeMap;
2072 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
2073 if (*m != JSVAL_BOXED)
2074 import(sp, offset, vp, *m, vpname, vpnum, fp);
2075 m++; offset += sizeof(double);
2079 JS_REQUIRES_STACK bool
2080 TraceRecorder::isValidSlot(JSScope* scope, JSScopeProperty* sprop)
2082 uint32 setflags = (js_CodeSpec[*cx->fp->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
2084 if (setflags) {
2085 if (!SPROP_HAS_STUB_SETTER(sprop))
2086 ABORT_TRACE_RV("non-stub setter", false);
2087 if (sprop->attrs & JSPROP_READONLY)
2088 ABORT_TRACE_RV("writing to a read-only property", false);
2090 /* This check applies even when setflags == 0. */
2091 if (setflags != JOF_SET && !SPROP_HAS_STUB_GETTER(sprop))
2092 ABORT_TRACE_RV("non-stub getter", false);
2094 if (!SPROP_HAS_VALID_SLOT(sprop, scope))
2095 ABORT_TRACE_RV("slotless obj property", false);
2097 return true;
2100 /* Lazily import a global slot if we don't already have it in the tracker. */
2101 JS_REQUIRES_STACK bool
2102 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
2104 if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
2105 return false;
2107 * If the global object grows too large, alloca in js_ExecuteTree might fail, so
2108 * abort tracing on global objects with unreasonably many slots.
2110 if (STOBJ_NSLOTS(globalObj) > MAX_GLOBAL_SLOTS)
2111 return false;
2112 jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);
2113 if (known(vp))
2114 return true; /* we already have it */
2115 unsigned index = treeInfo->globalSlots->length();
2116 /* Add the slot to the list of interned global slots. */
2117 JS_ASSERT(treeInfo->nGlobalTypes() == treeInfo->globalSlots->length());
2118 treeInfo->globalSlots->add(slot);
2119 uint8 type = getCoercedType(*vp);
2120 if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, slot))
2121 type = JSVAL_DOUBLE;
2122 treeInfo->typeMap.add(type);
2123 import(lirbuf->state, sizeof(struct InterpState) + slot*sizeof(double),
2124 vp, type, "global", index, NULL);
2125 specializeTreesToMissingGlobals(cx, treeInfo);
2126 return true;
2129 /* Write back a value onto the stack or global frames. */
2130 LIns*
2131 TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset)
2133 /* Sink all type casts targeting the stack into the side exit by simply storing the original
2134 (uncasted) value. Each guard generates the side exit map based on the types of the
2135 last stores to every stack location, so its safe to not perform them on-trace. */
2136 if (isPromoteInt(i))
2137 i = ::demote(lir, i);
2138 return lir->insStorei(i, base, offset);
2141 /* Update the tracker, then issue a write back store. */
2142 JS_REQUIRES_STACK void
2143 TraceRecorder::set(jsval* p, LIns* i, bool initializing)
2145 JS_ASSERT(i != NULL);
2146 JS_ASSERT(initializing || known(p));
2147 checkForGlobalObjectReallocation();
2148 tracker.set(p, i);
2149 /* If we are writing to this location for the first time, calculate the offset into the
2150 native frame manually, otherwise just look up the last load or store associated with
2151 the same source address (p) and use the same offset/base. */
2152 LIns* x = nativeFrameTracker.get(p);
2153 if (!x) {
2154 if (isGlobal(p))
2155 x = writeBack(i, lirbuf->state, nativeGlobalOffset(p));
2156 else
2157 x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p));
2158 nativeFrameTracker.set(p, x);
2159 } else {
2160 #define ASSERT_VALID_CACHE_HIT(base, offset) \
2161 JS_ASSERT(base == lirbuf->sp || base == lirbuf->state); \
2162 JS_ASSERT(offset == ((base == lirbuf->sp) \
2163 ? -treeInfo->nativeStackBase + nativeStackOffset(p) \
2164 : nativeGlobalOffset(p))); \
2166 JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
2167 ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->immdisp());
2168 writeBack(i, x->oprnd2(), x->immdisp());
2170 #undef ASSERT_VALID_CACHE_HIT
2173 JS_REQUIRES_STACK LIns*
2174 TraceRecorder::get(jsval* p)
2176 checkForGlobalObjectReallocation();
2177 return tracker.get(p);
2180 JS_REQUIRES_STACK bool
2181 TraceRecorder::known(jsval* p)
2183 checkForGlobalObjectReallocation();
2184 return tracker.has(p);
2188 * The dslots of the global object are sometimes reallocated by the interpreter.
2189 * This function check for that condition and re-maps the entries of the tracker
2190 * accordingly.
2192 JS_REQUIRES_STACK void
2193 TraceRecorder::checkForGlobalObjectReallocation()
2195 if (global_dslots != globalObj->dslots) {
2196 debug_only_v(printf("globalObj->dslots relocated, updating tracker\n");)
2197 jsval* src = global_dslots;
2198 jsval* dst = globalObj->dslots;
2199 jsuint length = globalObj->dslots[-1] - JS_INITIAL_NSLOTS;
2200 LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
2201 for (jsuint n = 0; n < length; ++n) {
2202 map[n] = tracker.get(src);
2203 tracker.set(src++, NULL);
2205 for (jsuint n = 0; n < length; ++n)
2206 tracker.set(dst++, map[n]);
2207 global_dslots = globalObj->dslots;
2211 /* Determine whether the current branch is a loop edge (taken or not taken). */
2212 static JS_REQUIRES_STACK bool
2213 js_IsLoopEdge(jsbytecode* pc, jsbytecode* header)
2215 switch (*pc) {
2216 case JSOP_IFEQ:
2217 case JSOP_IFNE:
2218 return ((pc + GET_JUMP_OFFSET(pc)) == header);
2219 case JSOP_IFEQX:
2220 case JSOP_IFNEX:
2221 return ((pc + GET_JUMPX_OFFSET(pc)) == header);
2222 default:
2223 JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) ||
2224 (*pc == JSOP_OR) || (*pc == JSOP_ORX));
2226 return false;
2230 * Promote slots if necessary to match the called tree's type map. This function is
2231 * infallible and must only be called if we are certain that it is possible to
2232 * reconcile the types for each slot in the inner and outer trees.
2234 JS_REQUIRES_STACK void
2235 TraceRecorder::adjustCallerTypes(Fragment* f)
2237 uint16* gslots = treeInfo->globalSlots->data();
2238 unsigned ngslots = treeInfo->globalSlots->length();
2239 JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2240 TreeInfo* ti = (TreeInfo*)f->vmprivate;
2241 uint8* map = ti->globalTypeMap();
2242 uint8* m = map;
2243 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
2244 LIns* i = get(vp);
2245 bool isPromote = isPromoteInt(i);
2246 if (isPromote && *m == JSVAL_DOUBLE)
2247 lir->insStorei(get(vp), lirbuf->state, nativeGlobalOffset(vp));
2248 JS_ASSERT(!(!isPromote && *m == JSVAL_INT));
2249 ++m;
2251 JS_ASSERT(unsigned(m - map) == ti->nGlobalTypes());
2252 map = ti->stackTypeMap();
2253 m = map;
2254 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2255 LIns* i = get(vp);
2256 bool isPromote = isPromoteInt(i);
2257 if (isPromote && *m == JSVAL_DOUBLE) {
2258 lir->insStorei(get(vp), lirbuf->sp,
2259 -treeInfo->nativeStackBase + nativeStackOffset(vp));
2260 /* Aggressively undo speculation so the inner tree will compile if this fails. */
2261 oracle.markStackSlotUndemotable(cx, unsigned(m - map));
2263 JS_ASSERT(!(!isPromote && *m == JSVAL_INT));
2264 ++m;
2266 JS_ASSERT(unsigned(m - map) == ti->nStackTypes);
2267 JS_ASSERT(f == f->root);
2270 JS_REQUIRES_STACK uint8
2271 TraceRecorder::determineSlotType(jsval* vp)
2273 uint8 m;
2274 LIns* i = get(vp);
2275 if (isNumber(*vp)) {
2276 m = isPromoteInt(i) ? JSVAL_INT : JSVAL_DOUBLE;
2277 } else if (JSVAL_IS_OBJECT(*vp)) {
2278 if (JSVAL_IS_NULL(*vp))
2279 m = JSVAL_TNULL;
2280 else if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp)))
2281 m = JSVAL_TFUN;
2282 else
2283 m = JSVAL_OBJECT;
2284 } else {
2285 m = JSVAL_TAG(*vp);
2287 JS_ASSERT((m != JSVAL_INT) || isInt32(*vp));
2288 return m;
2291 JS_REQUIRES_STACK VMSideExit*
2292 TraceRecorder::snapshot(ExitType exitType)
2294 JSStackFrame* fp = cx->fp;
2295 JSFrameRegs* regs = fp->regs;
2296 jsbytecode* pc = regs->pc;
2298 /* Check for a return-value opcode that needs to restart at the next instruction. */
2299 const JSCodeSpec& cs = js_CodeSpec[*pc];
2302 * When calling a _FAIL native, make the snapshot's pc point to the next
2303 * instruction after the CALL or APPLY. Even on failure, a _FAIL native must not
2304 * be called again from the interpreter.
2306 bool resumeAfter = (pendingTraceableNative &&
2307 JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS);
2308 if (resumeAfter) {
2309 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEW);
2310 pc += cs.length;
2311 regs->pc = pc;
2312 MUST_FLOW_THROUGH("restore_pc");
2315 /* Generate the entry map for the (possibly advanced) pc and stash it in the trace. */
2316 unsigned stackSlots = js_NativeStackSlots(cx, callDepth);
2318 /* It's sufficient to track the native stack use here since all stores above the
2319 stack watermark defined by guards are killed. */
2320 trackNativeStackUse(stackSlots + 1);
2322 /* Capture the type map into a temporary location. */
2323 unsigned ngslots = treeInfo->globalSlots->length();
2324 unsigned typemap_size = (stackSlots + ngslots) * sizeof(uint8);
2325 uint8* typemap = (uint8*)alloca(typemap_size);
2326 uint8* m = typemap;
2328 /* Determine the type of a store by looking at the current type of the actual value the
2329 interpreter is using. For numbers we have to check what kind of store we used last
2330 (integer or double) to figure out what the side exit show reflect in its typemap. */
2331 FORALL_SLOTS(cx, ngslots, treeInfo->globalSlots->data(), callDepth,
2332 *m++ = determineSlotType(vp);
2334 JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots);
2337 * If we are currently executing a traceable native or we are attaching a second trace
2338 * to it, the value on top of the stack is boxed. Make a note of this in the typemap.
2340 if (pendingTraceableNative && (pendingTraceableNative->flags & JSTN_UNBOX_AFTER))
2341 typemap[stackSlots - 1] = JSVAL_BOXED;
2343 /* Now restore the the original pc (after which early returns are ok). */
2344 if (resumeAfter) {
2345 MUST_FLOW_LABEL(restore_pc);
2346 regs->pc = pc - cs.length;
2347 } else {
2348 /* If we take a snapshot on a goto, advance to the target address. This avoids inner
2349 trees returning on a break goto, which the outer recorder then would confuse with
2350 a break in the outer tree. */
2351 if (*pc == JSOP_GOTO)
2352 pc += GET_JUMP_OFFSET(pc);
2353 else if (*pc == JSOP_GOTOX)
2354 pc += GET_JUMPX_OFFSET(pc);
2358 * Check if we already have a matching side exit; if so we can return that
2359 * side exit instead of creating a new one.
2361 VMSideExit** exits = treeInfo->sideExits.data();
2362 unsigned nexits = treeInfo->sideExits.length();
2363 if (exitType == LOOP_EXIT) {
2364 for (unsigned n = 0; n < nexits; ++n) {
2365 VMSideExit* e = exits[n];
2366 if (e->pc == pc && e->imacpc == fp->imacpc &&
2367 !memcmp(getFullTypeMap(exits[n]), typemap, typemap_size)) {
2368 AUDIT(mergedLoopExits);
2369 return e;
2374 if (sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(uint8) >= MAX_SKIP_BYTES) {
2376 * ::snapshot() is infallible in the sense that callers don't
2377 * expect errors; but this is a trace-aborting error condition. So
2378 * mangle the request to consume zero slots, and mark the tree as
2379 * to-be-trashed. This should be safe as the trace will be aborted
2380 * before assembly or execution due to the call to
2381 * trackNativeStackUse above.
2383 stackSlots = 0;
2384 ngslots = 0;
2385 trashSelf = true;
2388 /* We couldn't find a matching side exit, so create a new one. */
2389 LIns* data = lir->insSkip(sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(uint8));
2390 VMSideExit* exit = (VMSideExit*) data->payload();
2392 /* Setup side exit structure. */
2393 memset(exit, 0, sizeof(VMSideExit));
2394 exit->from = fragment;
2395 exit->calldepth = callDepth;
2396 exit->numGlobalSlots = ngslots;
2397 exit->numStackSlots = stackSlots;
2398 exit->numStackSlotsBelowCurrentFrame = cx->fp->callee
2399 ? nativeStackOffset(&cx->fp->argv[-2])/sizeof(double)
2400 : 0;
2401 exit->exitType = exitType;
2402 exit->block = fp->blockChain;
2403 exit->pc = pc;
2404 exit->imacpc = fp->imacpc;
2405 exit->sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase;
2406 exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
2407 exit->nativeCalleeWord = 0;
2408 memcpy(getFullTypeMap(exit), typemap, typemap_size);
2409 return exit;
2412 JS_REQUIRES_STACK LIns*
2413 TraceRecorder::createGuardRecord(VMSideExit* exit)
2415 LIns* guardRec = lir->insSkip(sizeof(GuardRecord));
2416 GuardRecord* gr = (GuardRecord*) guardRec->payload();
2418 memset(gr, 0, sizeof(GuardRecord));
2419 gr->exit = exit;
2420 exit->addGuard(gr);
2422 return guardRec;
2426 * Emit a guard for condition (cond), expecting to evaluate to boolean result
2427 * (expected) and using the supplied side exit if the conditon doesn't hold.
2429 JS_REQUIRES_STACK void
2430 TraceRecorder::guard(bool expected, LIns* cond, VMSideExit* exit)
2432 LIns* guardRec = createGuardRecord(exit);
2435 * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
2436 * safe to keep references to the side exits here. If we ever start
2437 * rewinding those lirbufs, we have to make sure we purge the side exits
2438 * that then no longer will be in valid memory.
2440 if (exit->exitType == LOOP_EXIT)
2441 treeInfo->sideExits.add(exit);
2443 if (!cond->isCond()) {
2444 expected = !expected;
2445 cond = lir->ins_eq0(cond);
2448 LIns* guardIns =
2449 lir->insGuard(expected ? LIR_xf : LIR_xt, cond, guardRec);
2450 if (guardIns) {
2451 debug_only_v(printf(" SideExit=%p exitType=%d\n", (void*)exit, exit->exitType);)
2452 } else {
2453 debug_only_v(printf(" redundant guard, eliminated\n");)
2457 JS_REQUIRES_STACK VMSideExit*
2458 TraceRecorder::copy(VMSideExit* copy)
2460 size_t typemap_size = copy->numGlobalSlots + copy->numStackSlots;
2461 LIns* data = lir->insSkip(sizeof(VMSideExit) + typemap_size * sizeof(uint8));
2462 VMSideExit* exit = (VMSideExit*) data->payload();
2464 /* Copy side exit structure. */
2465 memcpy(exit, copy, sizeof(VMSideExit) + typemap_size * sizeof(uint8));
2466 exit->guards = NULL;
2467 exit->from = fragment;
2468 exit->target = NULL;
2471 * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
2472 * safe to keep references to the side exits here. If we ever start
2473 * rewinding those lirbufs, we have to make sure we purge the side exits
2474 * that then no longer will be in valid memory.
2476 if (exit->exitType == LOOP_EXIT)
2477 treeInfo->sideExits.add(exit);
2478 return exit;
2481 /* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected)
2482 and generate a side exit with type exitType to jump to if the condition does not hold. */
2483 JS_REQUIRES_STACK void
2484 TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType)
2486 guard(expected, cond, snapshot(exitType));
2489 /* Try to match the type of a slot to type t. checkType is used to verify that the type of
2490 * values flowing into the loop edge is compatible with the type we expect in the loop header.
2492 * @param v Value.
2493 * @param t Typemap entry for value.
2494 * @param stage_val Outparam for set() address.
2495 * @param stage_ins Outparam for set() instruction.
2496 * @param stage_count Outparam for set() buffer count.
2497 * @return True if types are compatible, false otherwise.
2499 JS_REQUIRES_STACK bool
2500 TraceRecorder::checkType(jsval& v, uint8 t, jsval*& stage_val, LIns*& stage_ins,
2501 unsigned& stage_count)
2503 if (t == JSVAL_INT) { /* initially all whole numbers cause the slot to be demoted */
2504 debug_only_v(printf("checkType(tag=1, t=%d, isnum=%d, i2f=%d) stage_count=%d\n",
2506 isNumber(v),
2507 isPromoteInt(get(&v)),
2508 stage_count);)
2509 if (!isNumber(v))
2510 return false; /* not a number? type mismatch */
2511 LIns* i = get(&v);
2512 /* This is always a type mismatch, we can't close a double to an int. */
2513 if (!isPromoteInt(i))
2514 return false;
2515 /* Looks good, slot is an int32, the last instruction should be promotable. */
2516 JS_ASSERT(isInt32(v) && isPromoteInt(i));
2517 /* Overwrite the value in this slot with the argument promoted back to an integer. */
2518 stage_val = &v;
2519 stage_ins = f2i(i);
2520 stage_count++;
2521 return true;
2523 if (t == JSVAL_DOUBLE) {
2524 debug_only_v(printf("checkType(tag=2, t=%d, isnum=%d, promote=%d) stage_count=%d\n",
2526 isNumber(v),
2527 isPromoteInt(get(&v)),
2528 stage_count);)
2529 if (!isNumber(v))
2530 return false; /* not a number? type mismatch */
2531 LIns* i = get(&v);
2532 /* We sink i2f conversions into the side exit, but at the loop edge we have to make
2533 sure we promote back to double if at loop entry we want a double. */
2534 if (isPromoteInt(i)) {
2535 stage_val = &v;
2536 stage_ins = lir->ins1(LIR_i2f, i);
2537 stage_count++;
2539 return true;
2541 if (t == JSVAL_TNULL)
2542 return JSVAL_IS_NULL(v);
2543 if (t == JSVAL_TFUN)
2544 return !JSVAL_IS_PRIMITIVE(v) && HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v));
2545 if (t == JSVAL_OBJECT)
2546 return !JSVAL_IS_PRIMITIVE(v) && !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v));
2548 /* for non-number types we expect a precise match of the type */
2549 uint8 vt = getCoercedType(v);
2550 #ifdef DEBUG
2551 if (vt != t) {
2552 debug_only_v(printf("Type mismatch: val %c, map %c ", typeChar[vt],
2553 typeChar[t]);)
2555 #endif
2556 debug_only_v(printf("checkType(vt=%d, t=%d) stage_count=%d\n",
2557 (int) vt, t, stage_count);)
2558 return vt == t;
2562 * Make sure that the current values in the given stack frame and all stack frames
2563 * up and including entryFrame are type-compatible with the entry map.
2565 * @param root_peer First fragment in peer list.
2566 * @param stable_peer Outparam for first type stable peer.
2567 * @param demote True if stability was achieved through demotion.
2568 * @return True if type stable, false otherwise.
2570 JS_REQUIRES_STACK bool
2571 TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer, bool& demote)
2573 uint8* m;
2574 uint8* typemap;
2575 unsigned ngslots = treeInfo->globalSlots->length();
2576 uint16* gslots = treeInfo->globalSlots->data();
2577 JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2579 if (stable_peer)
2580 *stable_peer = NULL;
2583 * Rather than calculate all of this stuff twice, it gets cached locally. The "stage" buffers
2584 * are for calls to set() that will change the exit types.
2586 bool success;
2587 unsigned stage_count;
2588 jsval** stage_vals = (jsval**)alloca(sizeof(jsval*) * (treeInfo->typeMap.length()));
2589 LIns** stage_ins = (LIns**)alloca(sizeof(LIns*) * (treeInfo->typeMap.length()));
2591 /* First run through and see if we can close ourselves - best case! */
2592 stage_count = 0;
2593 success = false;
2595 debug_only_v(printf("Checking type stability against self=%p\n", (void*)fragment);)
2597 m = typemap = treeInfo->globalTypeMap();
2598 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
2599 debug_only_v(printf("%s%d ", vpname, vpnum);)
2600 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {
2601 /* If the failure was an int->double, tell the oracle. */
2602 if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) {
2603 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
2604 demote = true;
2605 } else {
2606 goto checktype_fail_1;
2609 ++m;
2611 m = typemap = treeInfo->stackTypeMap();
2612 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2613 debug_only_v(printf("%s%d ", vpname, vpnum);)
2614 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {
2615 if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) {
2616 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
2617 demote = true;
2618 } else {
2619 goto checktype_fail_1;
2622 ++m;
2625 success = true;
2627 checktype_fail_1:
2628 /* If we got a success and we don't need to recompile, we should just close here. */
2629 if (success && !demote) {
2630 for (unsigned i = 0; i < stage_count; i++)
2631 set(stage_vals[i], stage_ins[i]);
2632 return true;
2633 /* If we need to trash, don't bother checking peers. */
2634 } else if (trashSelf) {
2635 return false;
2638 demote = false;
2640 /* At this point the tree is about to be incomplete, so let's see if we can connect to any
2641 * peer fragment that is type stable.
2643 Fragment* f;
2644 TreeInfo* ti;
2645 for (f = root_peer; f != NULL; f = f->peer) {
2646 debug_only_v(printf("Checking type stability against peer=%p (code=%p)\n", (void*)f, f->code());)
2647 if (!f->code())
2648 continue;
2649 ti = (TreeInfo*)f->vmprivate;
2650 /* Don't allow varying stack depths */
2651 if ((ti->nStackTypes != treeInfo->nStackTypes) ||
2652 (ti->typeMap.length() != treeInfo->typeMap.length()) ||
2653 (ti->globalSlots->length() != treeInfo->globalSlots->length()))
2654 continue;
2655 stage_count = 0;
2656 success = false;
2658 m = ti->globalTypeMap();
2659 FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
2660 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
2661 goto checktype_fail_2;
2662 ++m;
2665 m = ti->stackTypeMap();
2666 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2667 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
2668 goto checktype_fail_2;
2669 ++m;
2672 success = true;
2674 checktype_fail_2:
2675 if (success) {
2677 * There was a successful match. We don't care about restoring the saved staging, but
2678 * we do need to clear the original undemote list.
2680 for (unsigned i = 0; i < stage_count; i++)
2681 set(stage_vals[i], stage_ins[i]);
2682 if (stable_peer)
2683 *stable_peer = f;
2684 demote = false;
2685 return false;
2690 * If this is a loop trace and it would be stable with demotions, build an undemote list
2691 * and return true. Our caller should sniff this and trash the tree, recording a new one
2692 * that will assumedly stabilize.
2694 if (demote && fragment->kind == LoopTrace) {
2695 typemap = m = treeInfo->globalTypeMap();
2696 FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
2697 if (*m == JSVAL_INT) {
2698 JS_ASSERT(isNumber(*vp));
2699 if (!isPromoteInt(get(vp)))
2700 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
2701 } else if (*m == JSVAL_DOUBLE) {
2702 JS_ASSERT(isNumber(*vp));
2703 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
2704 } else {
2705 JS_ASSERT(*m == JSVAL_TAG(*vp));
2707 m++;
2710 typemap = m = treeInfo->stackTypeMap();
2711 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2712 if (*m == JSVAL_INT) {
2713 JS_ASSERT(isNumber(*vp));
2714 if (!isPromoteInt(get(vp)))
2715 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
2716 } else if (*m == JSVAL_DOUBLE) {
2717 JS_ASSERT(isNumber(*vp));
2718 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
2719 } else {
2720 JS_ASSERT((*m == JSVAL_TNULL)
2721 ? JSVAL_IS_NULL(*vp)
2722 : *m == JSVAL_TFUN
2723 ? !JSVAL_IS_PRIMITIVE(*vp) && HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))
2724 : *m == JSVAL_TAG(*vp));
2726 m++;
2728 return true;
2729 } else {
2730 demote = false;
2733 return false;
2736 static JS_REQUIRES_STACK void
2737 FlushJITCache(JSContext* cx)
2739 if (!TRACING_ENABLED(cx))
2740 return;
2741 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
2742 debug_only_v(printf("Flushing cache.\n");)
2743 if (tm->recorder)
2744 js_AbortRecording(cx, "flush cache");
2745 TraceRecorder* tr;
2746 while ((tr = tm->abortStack) != NULL) {
2747 tr->removeFragmentoReferences();
2748 tr->deepAbort();
2749 tr->popAbortStack();
2751 Fragmento* fragmento = tm->fragmento;
2752 if (fragmento) {
2753 if (tm->prohibitFlush) {
2754 debug_only_v(printf("Deferring fragmento flush due to deep bail.\n");)
2755 tm->needFlush = JS_TRUE;
2756 return;
2759 fragmento->clearFrags();
2760 #ifdef DEBUG
2761 JS_ASSERT(fragmento->labels);
2762 fragmento->labels->clear();
2763 #endif
2764 tm->lirbuf->rewind();
2765 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
2766 VMFragment* f = tm->vmfragments[i];
2767 while (f) {
2768 VMFragment* next = f->next;
2769 fragmento->clearFragment(f);
2770 f = next;
2772 tm->vmfragments[i] = NULL;
2774 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
2775 tm->globalStates[i].globalShape = -1;
2776 tm->globalStates[i].globalSlots->clear();
2779 tm->needFlush = JS_FALSE;
2782 /* Compile the current fragment. */
2783 JS_REQUIRES_STACK void
2784 TraceRecorder::compile(JSTraceMonitor* tm)
2786 if (tm->needFlush) {
2787 FlushJITCache(cx);
2788 return;
2790 Fragmento* fragmento = tm->fragmento;
2791 if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
2792 debug_only_v(printf("Blacklist: excessive stack use.\n"));
2793 js_Blacklist((jsbytecode*) fragment->root->ip);
2794 return;
2796 if (anchor && anchor->exitType != CASE_EXIT)
2797 ++treeInfo->branchCount;
2798 if (lirbuf->outOMem()) {
2799 fragmento->assm()->setError(nanojit::OutOMem);
2800 return;
2802 ::compile(fragmento->assm(), fragment);
2803 if (fragmento->assm()->error() == nanojit::OutOMem)
2804 return;
2805 if (fragmento->assm()->error() != nanojit::None) {
2806 debug_only_v(printf("Blacklisted: error during compilation\n");)
2807 js_Blacklist((jsbytecode*) fragment->root->ip);
2808 return;
2810 js_resetRecordingAttempts(cx, (jsbytecode*) fragment->ip);
2811 js_resetRecordingAttempts(cx, (jsbytecode*) fragment->root->ip);
2812 if (anchor) {
2813 #ifdef NANOJIT_IA32
2814 if (anchor->exitType == CASE_EXIT)
2815 fragmento->assm()->patch(anchor, anchor->switchInfo);
2816 else
2817 #endif
2818 fragmento->assm()->patch(anchor);
2820 JS_ASSERT(fragment->code());
2821 JS_ASSERT(!fragment->vmprivate);
2822 if (fragment == fragment->root)
2823 fragment->vmprivate = treeInfo;
2824 /* :TODO: windows support */
2825 #if defined DEBUG && !defined WIN32
2826 const char* filename = cx->fp->script->filename;
2827 char* label = (char*)malloc((filename ? strlen(filename) : 7) + 16);
2828 sprintf(label, "%s:%u", filename ? filename : "<stdin>",
2829 js_FramePCToLineNumber(cx, cx->fp));
2830 fragmento->labels->add(fragment, sizeof(Fragment), 0, label);
2831 free(label);
2832 #endif
2833 AUDIT(traceCompleted);
2836 static bool
2837 js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stableTree,
2838 VMSideExit* exit)
2840 JS_ASSERT(exit->numStackSlots == stableTree->nStackTypes);
2842 /* Must have a matching type unstable exit. */
2843 if ((exit->numGlobalSlots + exit->numStackSlots != stableTree->typeMap.length()) ||
2844 memcmp(getFullTypeMap(exit), stableTree->typeMap.data(), stableTree->typeMap.length())) {
2845 return false;
2848 exit->target = stableFrag;
2849 frago->assm()->patch(exit);
2851 stableTree->dependentTrees.addUnique(exit->from->root);
2852 ((TreeInfo*)exit->from->root->vmprivate)->linkedTrees.addUnique(stableFrag);
2854 return true;
2857 /* Complete and compile a trace and link it to the existing tree if appropriate. */
2858 JS_REQUIRES_STACK void
2859 TraceRecorder::closeLoop(JSTraceMonitor* tm, bool& demote)
2862 * We should have arrived back at the loop header, and hence we don't want to be in an imacro
2863 * here and the opcode should be either JSOP_LOOP, or in case this loop was blacklisted in the
2864 * meantime JSOP_NOP.
2866 JS_ASSERT((*cx->fp->regs->pc == JSOP_LOOP || *cx->fp->regs->pc == JSOP_NOP) && !cx->fp->imacpc);
2868 bool stable;
2869 Fragment* peer;
2870 VMFragment* peer_root;
2871 Fragmento* fragmento = tm->fragmento;
2873 if (callDepth != 0) {
2874 debug_only_v(printf("Blacklisted: stack depth mismatch, possible recursion.\n");)
2875 js_Blacklist((jsbytecode*) fragment->root->ip);
2876 trashSelf = true;
2877 return;
2880 VMSideExit* exit = snapshot(UNSTABLE_LOOP_EXIT);
2881 JS_ASSERT(exit->numStackSlots == treeInfo->nStackTypes);
2883 VMFragment* root = (VMFragment*)fragment->root;
2884 peer_root = getLoop(traceMonitor, root->ip, root->globalObj, root->globalShape, root->argc);
2885 JS_ASSERT(peer_root != NULL);
2887 stable = deduceTypeStability(peer_root, &peer, demote);
2889 #if DEBUG
2890 if (!stable)
2891 AUDIT(unstableLoopVariable);
2892 #endif
2894 if (trashSelf) {
2895 debug_only_v(printf("Trashing tree from type instability.\n");)
2896 return;
2899 if (stable && demote) {
2900 JS_ASSERT(fragment->kind == LoopTrace);
2901 return;
2904 if (!stable) {
2905 fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(exit));
2908 * If we didn't find a type stable peer, we compile the loop anyway and
2909 * hope it becomes stable later.
2911 if (!peer) {
2913 * If such a fragment does not exist, let's compile the loop ahead
2914 * of time anyway. Later, if the loop becomes type stable, we will
2915 * connect these two fragments together.
2917 debug_only_v(printf("Trace has unstable loop variable with no stable peer, "
2918 "compiling anyway.\n");)
2919 UnstableExit* uexit = new UnstableExit;
2920 uexit->fragment = fragment;
2921 uexit->exit = exit;
2922 uexit->next = treeInfo->unstableExits;
2923 treeInfo->unstableExits = uexit;
2924 } else {
2925 JS_ASSERT(peer->code());
2926 exit->target = peer;
2927 debug_only_v(printf("Joining type-unstable trace to target fragment %p.\n", (void*)peer);)
2928 stable = true;
2929 ((TreeInfo*)peer->vmprivate)->dependentTrees.addUnique(fragment->root);
2930 treeInfo->linkedTrees.addUnique(peer);
2932 } else {
2933 exit->target = fragment->root;
2934 fragment->lastIns = lir->insGuard(LIR_loop, lir->insImm(1), createGuardRecord(exit));
2936 compile(tm);
2938 if (fragmento->assm()->error() != nanojit::None)
2939 return;
2941 joinEdgesToEntry(fragmento, peer_root);
2943 debug_only_v(printf("updating specializations on dependent and linked trees\n"))
2944 if (fragment->root->vmprivate)
2945 specializeTreesToMissingGlobals(cx, (TreeInfo*)fragment->root->vmprivate);
2948 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
2949 * should try to compile the outer tree again.
2951 if (outer)
2952 js_AttemptCompilation(cx, tm, globalObj, outer, outerArgc);
2954 debug_only_v(printf("recording completed at %s:%u@%u via closeLoop\n",
2955 cx->fp->script->filename,
2956 js_FramePCToLineNumber(cx, cx->fp),
2957 FramePCOffset(cx->fp));)
2960 JS_REQUIRES_STACK void
2961 TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, VMFragment* peer_root)
2963 if (fragment->kind == LoopTrace) {
2964 TreeInfo* ti;
2965 Fragment* peer;
2966 uint8* t1, *t2;
2967 UnstableExit* uexit, **unext;
2968 uint32* stackDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->nStackTypes);
2969 uint32* globalDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->nGlobalTypes());
2971 for (peer = peer_root; peer != NULL; peer = peer->peer) {
2972 if (!peer->code())
2973 continue;
2974 ti = (TreeInfo*)peer->vmprivate;
2975 uexit = ti->unstableExits;
2976 unext = &ti->unstableExits;
2977 while (uexit != NULL) {
2978 bool remove = js_JoinPeersIfCompatible(fragmento, fragment, treeInfo, uexit->exit);
2979 JS_ASSERT(!remove || fragment != peer);
2980 debug_only_v(if (remove) {
2981 printf("Joining type-stable trace to target exit %p->%p.\n",
2982 (void*)uexit->fragment, (void*)uexit->exit); });
2983 if (!remove) {
2984 /* See if this exit contains mismatch demotions, which imply trashing a tree.
2985 This is actually faster than trashing the original tree as soon as the
2986 instability is detected, since we could have compiled a fairly stable
2987 tree that ran faster with integers. */
2988 unsigned stackCount = 0;
2989 unsigned globalCount = 0;
2990 t1 = treeInfo->stackTypeMap();
2991 t2 = getStackTypeMap(uexit->exit);
2992 for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) {
2993 if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) {
2994 stackDemotes[stackCount++] = i;
2995 } else if (t2[i] != t1[i]) {
2996 stackCount = 0;
2997 break;
3000 t1 = treeInfo->globalTypeMap();
3001 t2 = getGlobalTypeMap(uexit->exit);
3002 for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) {
3003 if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) {
3004 globalDemotes[globalCount++] = i;
3005 } else if (t2[i] != t1[i]) {
3006 globalCount = 0;
3007 stackCount = 0;
3008 break;
3011 if (stackCount || globalCount) {
3012 for (unsigned i = 0; i < stackCount; i++)
3013 oracle.markStackSlotUndemotable(cx, stackDemotes[i]);
3014 for (unsigned i = 0; i < globalCount; i++)
3015 oracle.markGlobalSlotUndemotable(cx, ti->globalSlots->data()[globalDemotes[i]]);
3016 JS_ASSERT(peer == uexit->fragment->root);
3017 if (fragment == peer)
3018 trashSelf = true;
3019 else
3020 whichTreesToTrash.addUnique(uexit->fragment->root);
3021 break;
3024 if (remove) {
3025 *unext = uexit->next;
3026 delete uexit;
3027 uexit = *unext;
3028 } else {
3029 unext = &uexit->next;
3030 uexit = uexit->next;
3036 debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip, peer_root->globalObj,
3037 peer_root->globalShape, peer_root->argc);)
3040 /* Emit an always-exit guard and compile the tree (used for break statements. */
3041 JS_REQUIRES_STACK void
3042 TraceRecorder::endLoop(JSTraceMonitor* tm)
3044 if (callDepth != 0) {
3045 debug_only_v(printf("Blacklisted: stack depth mismatch, possible recursion.\n");)
3046 js_Blacklist((jsbytecode*) fragment->root->ip);
3047 trashSelf = true;
3048 return;
3051 fragment->lastIns =
3052 lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(snapshot(LOOP_EXIT)));
3053 compile(tm);
3055 if (tm->fragmento->assm()->error() != nanojit::None)
3056 return;
3058 VMFragment* root = (VMFragment*)fragment->root;
3059 joinEdgesToEntry(tm->fragmento, getLoop(tm, root->ip, root->globalObj, root->globalShape, root->argc));
3061 /* Note: this must always be done, in case we added new globals on trace and haven't yet
3062 propagated those to linked and dependent trees. */
3063 debug_only_v(printf("updating specializations on dependent and linked trees\n"))
3064 if (fragment->root->vmprivate)
3065 specializeTreesToMissingGlobals(cx, (TreeInfo*)fragment->root->vmprivate);
3068 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
3069 * should try to compile the outer tree again.
3071 if (outer)
3072 js_AttemptCompilation(cx, tm, globalObj, outer, outerArgc);
3074 debug_only_v(printf("recording completed at %s:%u@%u via endLoop\n",
3075 cx->fp->script->filename,
3076 js_FramePCToLineNumber(cx, cx->fp),
3077 FramePCOffset(cx->fp));)
3080 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
3081 JS_REQUIRES_STACK void
3082 TraceRecorder::prepareTreeCall(Fragment* inner)
3084 TreeInfo* ti = (TreeInfo*)inner->vmprivate;
3085 inner_sp_ins = lirbuf->sp;
3086 /* The inner tree expects to be called from the current frame. If the outer tree (this
3087 trace) is currently inside a function inlining code (calldepth > 0), we have to advance
3088 the native stack pointer such that we match what the inner trace expects to see. We
3089 move it back when we come out of the inner tree call. */
3090 if (callDepth > 0) {
3091 /* Calculate the amount we have to lift the native stack pointer by to compensate for
3092 any outer frames that the inner tree doesn't expect but the outer tree has. */
3093 ptrdiff_t sp_adj = nativeStackOffset(&cx->fp->argv[-2]);
3094 /* Calculate the amount we have to lift the call stack by */
3095 ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo*);
3096 /* Guard that we have enough stack space for the tree we are trying to call on top
3097 of the new value for sp. */
3098 debug_only_v(printf("sp_adj=%d outer=%d inner=%d\n",
3099 sp_adj, treeInfo->nativeStackBase, ti->nativeStackBase));
3100 LIns* sp_top = lir->ins2i(LIR_piadd, lirbuf->sp,
3101 - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
3102 + sp_adj /* adjust for stack in outer frame inner tree can't see */
3103 + ti->maxNativeStackSlots * sizeof(double)); /* plus the inner tree's stack */
3104 guard(true, lir->ins2(LIR_lt, sp_top, eos_ins), OOM_EXIT);
3105 /* Guard that we have enough call stack space. */
3106 LIns* rp_top = lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj +
3107 ti->maxCallDepth * sizeof(FrameInfo*));
3108 guard(true, lir->ins2(LIR_lt, rp_top, eor_ins), OOM_EXIT);
3109 /* We have enough space, so adjust sp and rp to their new level. */
3110 lir->insStorei(inner_sp_ins = lir->ins2i(LIR_piadd, lirbuf->sp,
3111 - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
3112 + sp_adj /* adjust for stack in outer frame inner tree can't see */
3113 + ti->nativeStackBase), /* plus the inner tree's stack base */
3114 lirbuf->state, offsetof(InterpState, sp));
3115 lir->insStorei(lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj),
3116 lirbuf->state, offsetof(InterpState, rp));
3120 /* Record a call to an inner tree. */
3121 JS_REQUIRES_STACK void
3122 TraceRecorder::emitTreeCall(Fragment* inner, VMSideExit* exit)
3124 TreeInfo* ti = (TreeInfo*)inner->vmprivate;
3126 /* Invoke the inner tree. */
3127 LIns* args[] = { INS_CONSTPTR(inner), lirbuf->state }; /* reverse order */
3128 LIns* ret = lir->insCall(&js_CallTree_ci, args);
3130 /* Read back all registers, in case the called tree changed any of them. */
3131 JS_ASSERT(!memchr(getGlobalTypeMap(exit), JSVAL_BOXED, exit->numGlobalSlots) &&
3132 !memchr(getStackTypeMap(exit), JSVAL_BOXED, exit->numStackSlots));
3133 import(ti, inner_sp_ins, exit->numStackSlots, exit->numGlobalSlots,
3134 exit->calldepth, getFullTypeMap(exit));
3136 /* Restore sp and rp to their original values (we still have them in a register). */
3137 if (callDepth > 0) {
3138 lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
3139 lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
3143 * Guard that we come out of the inner tree along the same side exit we came out when
3144 * we called the inner tree at recording time.
3146 guard(true, lir->ins2(LIR_eq, ret, INS_CONSTPTR(exit)), NESTED_EXIT);
3147 /* Register us as a dependent tree of the inner tree. */
3148 ((TreeInfo*)inner->vmprivate)->dependentTrees.addUnique(fragment->root);
3149 treeInfo->linkedTrees.addUnique(inner);
3152 /* Add a if/if-else control-flow merge point to the list of known merge points. */
3153 JS_REQUIRES_STACK void
3154 TraceRecorder::trackCfgMerges(jsbytecode* pc)
3156 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
3157 JS_ASSERT((*pc == JSOP_IFEQ) || (*pc == JSOP_IFEQX));
3158 jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc);
3159 if (sn != NULL) {
3160 if (SN_TYPE(sn) == SRC_IF) {
3161 cfgMerges.add((*pc == JSOP_IFEQ)
3162 ? pc + GET_JUMP_OFFSET(pc)
3163 : pc + GET_JUMPX_OFFSET(pc));
3164 } else if (SN_TYPE(sn) == SRC_IF_ELSE)
3165 cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));
3169 /* Invert the direction of the guard if this is a loop edge that is not
3170 taken (thin loop). */
3171 JS_REQUIRES_STACK void
3172 TraceRecorder::emitIf(jsbytecode* pc, bool cond, LIns* x)
3174 ExitType exitType;
3175 if (js_IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
3176 exitType = LOOP_EXIT;
3179 * If we are about to walk out of the loop, generate code for the inverse loop
3180 * condition, pretending we recorded the case that stays on trace.
3182 if ((*pc == JSOP_IFEQ || *pc == JSOP_IFEQX) == cond) {
3183 JS_ASSERT(*pc == JSOP_IFNE || *pc == JSOP_IFNEX || *pc == JSOP_IFEQ || *pc == JSOP_IFEQX);
3184 debug_only_v(printf("Walking out of the loop, terminating it anyway.\n");)
3185 cond = !cond;
3189 * Conditional guards do not have to be emitted if the condition is constant. We
3190 * make a note whether the loop condition is true or false here, so we later know
3191 * whether to emit a loop edge or a loop end.
3193 if (x->isconst()) {
3194 loop = (x->imm32() == cond);
3195 return;
3197 } else {
3198 exitType = BRANCH_EXIT;
3200 if (!x->isconst())
3201 guard(cond, x, exitType);
3204 /* Emit code for a fused IFEQ/IFNE. */
3205 JS_REQUIRES_STACK void
3206 TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
3208 if (*pc == JSOP_IFEQ || *pc == JSOP_IFNE) {
3209 emitIf(pc, cond, x);
3210 if (*pc == JSOP_IFEQ)
3211 trackCfgMerges(pc);
3215 /* Check whether we have reached the end of the trace. */
3216 JS_REQUIRES_STACK JSRecordingStatus
3217 TraceRecorder::checkTraceEnd(jsbytecode *pc)
3219 if (js_IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
3221 * If we compile a loop, the trace should have a zero stack balance at the loop
3222 * edge. Currently we are parked on a comparison op or IFNE/IFEQ, so advance
3223 * pc to the loop header and adjust the stack pointer and pretend we have
3224 * reached the loop header.
3226 if (loop) {
3227 JS_ASSERT(!cx->fp->imacpc && (pc == cx->fp->regs->pc || pc == cx->fp->regs->pc + 1));
3228 bool fused = pc != cx->fp->regs->pc;
3229 JSFrameRegs orig = *cx->fp->regs;
3231 cx->fp->regs->pc = (jsbytecode*)fragment->root->ip;
3232 cx->fp->regs->sp -= fused ? 2 : 1;
3234 bool demote = false;
3235 closeLoop(traceMonitor, demote);
3237 *cx->fp->regs = orig;
3240 * If compiling this loop generated new oracle information which will likely
3241 * lead to a different compilation result, immediately trigger another
3242 * compiler run. This is guaranteed to converge since the oracle only
3243 * accumulates adverse information but never drops it (except when we
3244 * flush it during garbage collection.)
3246 if (demote)
3247 js_AttemptCompilation(cx, traceMonitor, globalObj, outer, outerArgc);
3248 } else {
3249 endLoop(traceMonitor);
3251 return JSRS_STOP;
3253 return JSRS_CONTINUE;
3256 bool
3257 TraceRecorder::hasMethod(JSObject* obj, jsid id)
3259 if (!obj)
3260 return false;
3262 JSObject* pobj;
3263 JSProperty* prop;
3264 int protoIndex = OBJ_LOOKUP_PROPERTY(cx, obj, id, &pobj, &prop);
3265 if (protoIndex < 0 || !prop)
3266 return false;
3268 bool found = false;
3269 if (OBJ_IS_NATIVE(pobj)) {
3270 JSScope* scope = OBJ_SCOPE(pobj);
3271 JSScopeProperty* sprop = (JSScopeProperty*) prop;
3273 if (SPROP_HAS_STUB_GETTER(sprop) &&
3274 SPROP_HAS_VALID_SLOT(sprop, scope)) {
3275 jsval v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot);
3276 if (VALUE_IS_FUNCTION(cx, v)) {
3277 found = true;
3278 if (!SCOPE_IS_BRANDED(scope)) {
3279 js_MakeScopeShapeUnique(cx, scope);
3280 SCOPE_SET_BRANDED(scope);
3286 OBJ_DROP_PROPERTY(cx, pobj, prop);
3287 return found;
3290 JS_REQUIRES_STACK bool
3291 TraceRecorder::hasIteratorMethod(JSObject* obj)
3293 JS_ASSERT(cx->fp->regs->sp + 2 <= cx->fp->slots + cx->fp->script->nslots);
3295 return hasMethod(obj, ATOM_TO_JSID(cx->runtime->atomState.iteratorAtom));
3299 nanojit::StackFilter::getTop(LInsp guard)
3301 VMSideExit* e = (VMSideExit*)guard->record()->exit;
3302 if (sp == lirbuf->sp)
3303 return e->sp_adj;
3304 JS_ASSERT(sp == lirbuf->rp);
3305 return e->rp_adj;
3308 #if defined NJ_VERBOSE
3309 void
3310 nanojit::LirNameMap::formatGuard(LIns *i, char *out)
3312 VMSideExit *x;
3314 x = (VMSideExit *)i->record()->exit;
3315 sprintf(out,
3316 "%s: %s %s -> pc=%p imacpc=%p sp%+ld rp%+ld",
3317 formatRef(i),
3318 lirNames[i->opcode()],
3319 i->oprnd1()->isCond() ? formatRef(i->oprnd1()) : "",
3320 (void *)x->pc,
3321 (void *)x->imacpc,
3322 (long int)x->sp_adj,
3323 (long int)x->rp_adj);
3325 #endif
3327 void
3328 nanojit::Fragment::onDestroy()
3330 delete (TreeInfo *)vmprivate;
3333 static JS_REQUIRES_STACK bool
3334 js_DeleteRecorder(JSContext* cx)
3336 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3338 /* Aborting and completing a trace end up here. */
3339 delete tm->recorder;
3340 tm->recorder = NULL;
3343 * If we ran out of memory, flush the code cache.
3345 if (JS_TRACE_MONITOR(cx).fragmento->assm()->error() == OutOMem ||
3346 js_OverfullFragmento(tm, tm->fragmento)) {
3347 FlushJITCache(cx);
3348 return false;
3351 return true;
3355 * Checks whether the shape of the global object has changed.
3357 static JS_REQUIRES_STACK bool
3358 CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj,
3359 uint32 *shape=NULL, SlotList** slots=NULL)
3361 if (tm->needFlush) {
3362 FlushJITCache(cx);
3363 return false;
3366 if (STOBJ_NSLOTS(globalObj) > MAX_GLOBAL_SLOTS)
3367 return false;
3369 uint32 globalShape = OBJ_SHAPE(globalObj);
3371 if (tm->recorder) {
3372 VMFragment* root = (VMFragment*)tm->recorder->getFragment()->root;
3373 TreeInfo* ti = tm->recorder->getTreeInfo();
3374 /* Check the global shape matches the recorder's treeinfo's shape. */
3375 if (globalObj != root->globalObj || globalShape != root->globalShape) {
3376 AUDIT(globalShapeMismatchAtEntry);
3377 debug_only_v(printf("Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
3378 (void*)globalObj, globalShape, (void*)root->globalObj,
3379 root->globalShape);)
3380 js_Backoff(cx, (jsbytecode*) root->ip);
3381 FlushJITCache(cx);
3382 return false;
3384 if (shape)
3385 *shape = globalShape;
3386 if (slots)
3387 *slots = ti->globalSlots;
3388 return true;
3391 /* No recorder, search for a tracked global-state (or allocate one). */
3392 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
3393 GlobalState &state = tm->globalStates[i];
3395 if (state.globalShape == uint32(-1)) {
3396 state.globalObj = globalObj;
3397 state.globalShape = globalShape;
3398 JS_ASSERT(state.globalSlots);
3399 JS_ASSERT(state.globalSlots->length() == 0);
3402 if (state.globalObj == globalObj && state.globalShape == globalShape) {
3403 if (shape)
3404 *shape = globalShape;
3405 if (slots)
3406 *slots = state.globalSlots;
3407 return true;
3411 /* No currently-tracked-global found and no room to allocate, abort. */
3412 AUDIT(globalShapeMismatchAtEntry);
3413 debug_only_v(printf("No global slotlist for global shape %u, flushing cache.\n",
3414 globalShape));
3415 FlushJITCache(cx);
3416 return false;
3419 static JS_REQUIRES_STACK bool
3420 js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
3421 unsigned stackSlots, unsigned ngslots, uint8* typeMap,
3422 VMSideExit* expectedInnerExit, jsbytecode* outer, uint32 outerArgc)
3424 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3425 if (JS_TRACE_MONITOR(cx).needFlush) {
3426 FlushJITCache(cx);
3427 return false;
3430 JS_ASSERT(f->root != f || !cx->fp->imacpc);
3432 /* start recording if no exception during construction */
3433 tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
3434 stackSlots, ngslots, typeMap,
3435 expectedInnerExit, outer, outerArgc);
3437 if (cx->throwing) {
3438 js_AbortRecording(cx, "setting up recorder failed");
3439 return false;
3441 /* clear any leftover error state */
3442 tm->fragmento->assm()->setError(None);
3443 return true;
3446 static void
3447 js_TrashTree(JSContext* cx, Fragment* f)
3449 JS_ASSERT((!f->code()) == (!f->vmprivate));
3450 JS_ASSERT(f == f->root);
3451 if (!f->code())
3452 return;
3453 AUDIT(treesTrashed);
3454 debug_only_v(printf("Trashing tree info.\n");)
3455 Fragmento* fragmento = JS_TRACE_MONITOR(cx).fragmento;
3456 TreeInfo* ti = (TreeInfo*)f->vmprivate;
3457 f->vmprivate = NULL;
3458 f->releaseCode(fragmento);
3459 Fragment** data = ti->dependentTrees.data();
3460 unsigned length = ti->dependentTrees.length();
3461 for (unsigned n = 0; n < length; ++n)
3462 js_TrashTree(cx, data[n]);
3463 delete ti;
3464 JS_ASSERT(!f->code() && !f->vmprivate);
3467 static int
3468 js_SynthesizeFrame(JSContext* cx, const FrameInfo& fi)
3470 VOUCH_DOES_NOT_REQUIRE_STACK();
3472 JS_ASSERT(HAS_FUNCTION_CLASS(fi.callee));
3474 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, fi.callee);
3475 JS_ASSERT(FUN_INTERPRETED(fun));
3477 /* Assert that we have a correct sp distance from cx->fp->slots in fi. */
3478 JSStackFrame* fp = cx->fp;
3479 JS_ASSERT_IF(!fi.imacpc,
3480 js_ReconstructStackDepth(cx, fp->script, fi.pc)
3481 == uintN(fi.s.spdist - fp->script->nfixed));
3483 uintN nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval));
3484 JSScript* script = fun->u.i.script;
3485 size_t nbytes = (nframeslots + script->nslots) * sizeof(jsval);
3487 /* Code duplicated from inline_call: case in js_Interpret (FIXME). */
3488 JSArena* a = cx->stackPool.current;
3489 void* newmark = (void*) a->avail;
3490 uintN argc = fi.s.argc & 0x7fff;
3491 jsval* vp = fp->slots + fi.s.spdist - (2 + argc);
3492 uintN missing = 0;
3493 jsval* newsp;
3495 if (fun->nargs > argc) {
3496 const JSFrameRegs& regs = *fp->regs;
3498 newsp = vp + 2 + fun->nargs;
3499 JS_ASSERT(newsp > regs.sp);
3500 if ((jsuword) newsp <= a->limit) {
3501 if ((jsuword) newsp > a->avail)
3502 a->avail = (jsuword) newsp;
3503 jsval* argsp = newsp;
3504 do {
3505 *--argsp = JSVAL_VOID;
3506 } while (argsp != regs.sp);
3507 missing = 0;
3508 } else {
3509 missing = fun->nargs - argc;
3510 nbytes += (2 + fun->nargs) * sizeof(jsval);
3514 /* Allocate the inline frame with its vars and operands. */
3515 if (a->avail + nbytes <= a->limit) {
3516 newsp = (jsval *) a->avail;
3517 a->avail += nbytes;
3518 JS_ASSERT(missing == 0);
3519 } else {
3520 /* This allocation is infallible: js_ExecuteTree reserved enough stack. */
3521 JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, nbytes);
3522 JS_ASSERT(newsp);
3525 * Move args if the missing ones overflow arena a, then push
3526 * undefined for the missing args.
3528 if (missing) {
3529 memcpy(newsp, vp, (2 + argc) * sizeof(jsval));
3530 vp = newsp;
3531 newsp = vp + 2 + argc;
3532 do {
3533 *newsp++ = JSVAL_VOID;
3534 } while (--missing != 0);
3538 /* Claim space for the stack frame and initialize it. */
3539 JSInlineFrame* newifp = (JSInlineFrame *) newsp;
3540 newsp += nframeslots;
3542 newifp->frame.callobj = NULL;
3543 newifp->frame.argsobj = NULL;
3544 newifp->frame.varobj = NULL;
3545 newifp->frame.script = script;
3546 newifp->frame.callee = fi.callee; // Roll with a potential wrong callee for now.
3547 newifp->frame.fun = fun;
3549 bool constructing = (fi.s.argc & 0x8000) != 0;
3550 newifp->frame.argc = argc;
3551 newifp->callerRegs.pc = fi.pc;
3552 newifp->callerRegs.sp = fp->slots + fi.s.spdist;
3553 fp->imacpc = fi.imacpc;
3555 #ifdef DEBUG
3556 if (fi.block != fp->blockChain) {
3557 for (JSObject* obj = fi.block; obj != fp->blockChain; obj = STOBJ_GET_PARENT(obj))
3558 JS_ASSERT(obj);
3560 #endif
3561 fp->blockChain = fi.block;
3563 newifp->frame.argv = newifp->callerRegs.sp - argc;
3564 JS_ASSERT(newifp->frame.argv);
3565 #ifdef DEBUG
3566 // Initialize argv[-1] to a known-bogus value so we'll catch it if
3567 // someone forgets to initialize it later.
3568 newifp->frame.argv[-1] = JSVAL_HOLE;
3569 #endif
3570 JS_ASSERT(newifp->frame.argv >= StackBase(fp) + 2);
3572 newifp->frame.rval = JSVAL_VOID;
3573 newifp->frame.down = fp;
3574 newifp->frame.annotation = NULL;
3575 newifp->frame.scopeChain = NULL; // will be updated in FlushNativeStackFrames
3576 newifp->frame.sharpDepth = 0;
3577 newifp->frame.sharpArray = NULL;
3578 newifp->frame.flags = constructing ? JSFRAME_CONSTRUCTING : 0;
3579 newifp->frame.dormantNext = NULL;
3580 newifp->frame.xmlNamespace = NULL;
3581 newifp->frame.blockChain = NULL;
3582 newifp->mark = newmark;
3583 newifp->frame.thisp = NULL; // will be updated in FlushNativeStackFrames
3585 newifp->frame.regs = fp->regs;
3586 newifp->frame.regs->pc = script->code;
3587 newifp->frame.regs->sp = newsp + script->nfixed;
3588 newifp->frame.imacpc = NULL;
3589 newifp->frame.slots = newsp;
3590 if (script->staticLevel < JS_DISPLAY_SIZE) {
3591 JSStackFrame **disp = &cx->display[script->staticLevel];
3592 newifp->frame.displaySave = *disp;
3593 *disp = &newifp->frame;
3597 * Note that fp->script is still the caller's script; set the callee
3598 * inline frame's idea of caller version from its version.
3600 newifp->callerVersion = (JSVersion) fp->script->version;
3602 // After this paragraph, fp and cx->fp point to the newly synthesized frame.
3603 fp->regs = &newifp->callerRegs;
3604 fp = cx->fp = &newifp->frame;
3606 if (fun->flags & JSFUN_HEAVYWEIGHT) {
3608 * Set hookData to null because the failure case for js_GetCallObject
3609 * involves it calling the debugger hook.
3611 * Allocating the Call object must not fail, so use an object
3612 * previously reserved by js_ExecuteTree if needed.
3614 newifp->hookData = NULL;
3615 JS_ASSERT(!JS_TRACE_MONITOR(cx).useReservedObjects);
3616 JS_TRACE_MONITOR(cx).useReservedObjects = JS_TRUE;
3617 #ifdef DEBUG
3618 JSObject *obj =
3619 #endif
3620 js_GetCallObject(cx, &newifp->frame);
3621 JS_ASSERT(obj);
3622 JS_TRACE_MONITOR(cx).useReservedObjects = JS_FALSE;
3626 * If there's a call hook, invoke it to compute the hookData used by
3627 * debuggers that cooperate with the interpreter.
3629 JSInterpreterHook hook = cx->debugHooks->callHook;
3630 if (hook) {
3631 newifp->hookData = hook(cx, &newifp->frame, JS_TRUE, 0,
3632 cx->debugHooks->callHookData);
3633 } else {
3634 newifp->hookData = NULL;
3637 // FIXME? we must count stack slots from caller's operand stack up to (but not including)
3638 // callee's, including missing arguments. Could we shift everything down to the caller's
3639 // fp->slots (where vars start) and avoid some of the complexity?
3640 return (fi.s.spdist - fp->down->script->nfixed) +
3641 ((fun->nargs > fp->argc) ? fun->nargs - fp->argc : 0) +
3642 script->nfixed;
3645 static void
3646 SynthesizeSlowNativeFrame(JSContext *cx, VMSideExit *exit)
3648 VOUCH_DOES_NOT_REQUIRE_STACK();
3650 void *mark;
3651 JSInlineFrame *ifp;
3653 /* This allocation is infallible: js_ExecuteTree reserved enough stack. */
3654 mark = JS_ARENA_MARK(&cx->stackPool);
3655 JS_ARENA_ALLOCATE_CAST(ifp, JSInlineFrame *, &cx->stackPool, sizeof(JSInlineFrame));
3656 JS_ASSERT(ifp);
3658 JSStackFrame *fp = &ifp->frame;
3659 fp->regs = NULL;
3660 fp->imacpc = NULL;
3661 fp->slots = NULL;
3662 fp->callobj = NULL;
3663 fp->argsobj = NULL;
3664 fp->varobj = cx->fp->varobj;
3665 fp->callee = exit->nativeCallee();
3666 fp->script = NULL;
3667 fp->fun = GET_FUNCTION_PRIVATE(cx, fp->callee);
3668 // fp->thisp is really a jsval, so reinterpret_cast here, not JSVAL_TO_OBJECT.
3669 fp->thisp = (JSObject *) cx->nativeVp[1];
3670 fp->argc = cx->nativeVpLen - 2;
3671 fp->argv = cx->nativeVp + 2;
3672 fp->rval = JSVAL_VOID;
3673 fp->down = cx->fp;
3674 fp->annotation = NULL;
3675 JS_ASSERT(cx->fp->scopeChain);
3676 fp->scopeChain = cx->fp->scopeChain;
3677 fp->blockChain = NULL;
3678 fp->sharpDepth = 0;
3679 fp->sharpArray = NULL;
3680 fp->flags = exit->constructing() ? JSFRAME_CONSTRUCTING : 0;
3681 fp->dormantNext = NULL;
3682 fp->xmlNamespace = NULL;
3683 fp->displaySave = NULL;
3685 ifp->mark = mark;
3686 cx->fp = fp;
3689 JS_REQUIRES_STACK bool
3690 js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
3691 uint32 outerArgc, JSObject* globalObj, uint32 globalShape,
3692 SlotList* globalSlots, uint32 argc)
3694 JS_ASSERT(f->root == f);
3696 /* Make sure the global type map didn't change on us. */
3697 if (!CheckGlobalObjectShape(cx, tm, globalObj)) {
3698 js_Backoff(cx, (jsbytecode*) f->root->ip);
3699 return false;
3702 AUDIT(recorderStarted);
3704 /* Try to find an unused peer fragment, or allocate a new one. */
3705 while (f->code() && f->peer)
3706 f = f->peer;
3707 if (f->code())
3708 f = getAnchor(&JS_TRACE_MONITOR(cx), f->root->ip, globalObj, globalShape, argc);
3710 if (!f) {
3711 FlushJITCache(cx);
3712 return false;
3715 f->root = f;
3716 f->lirbuf = tm->lirbuf;
3718 if (f->lirbuf->outOMem() || js_OverfullFragmento(tm, tm->fragmento)) {
3719 js_Backoff(cx, (jsbytecode*) f->root->ip);
3720 FlushJITCache(cx);
3721 debug_only_v(printf("Out of memory recording new tree, flushing cache.\n");)
3722 return false;
3725 JS_ASSERT(!f->code() && !f->vmprivate);
3727 /* setup the VM-private treeInfo structure for this fragment */
3728 TreeInfo* ti = new (&gc) TreeInfo(f, globalSlots);
3730 /* capture the coerced type of each active slot in the type map */
3731 ti->typeMap.captureTypes(cx, *globalSlots, 0/*callDepth*/);
3732 ti->nStackTypes = ti->typeMap.length() - globalSlots->length();
3734 #ifdef DEBUG
3736 * Check for duplicate entry type maps. This is always wrong and hints at
3737 * trace explosion since we are trying to stabilize something without
3738 * properly connecting peer edges.
3740 TreeInfo* ti_other;
3741 for (Fragment* peer = getLoop(tm, f->root->ip, globalObj, globalShape, argc); peer != NULL;
3742 peer = peer->peer) {
3743 if (!peer->code() || peer == f)
3744 continue;
3745 ti_other = (TreeInfo*)peer->vmprivate;
3746 JS_ASSERT(ti_other);
3747 JS_ASSERT(!ti->typeMap.matches(ti_other->typeMap));
3749 ti->treeFileName = cx->fp->script->filename;
3750 ti->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp);
3751 ti->treePCOffset = FramePCOffset(cx->fp);
3752 #endif
3754 /* determine the native frame layout at the entry point */
3755 unsigned entryNativeStackSlots = ti->nStackTypes;
3756 JS_ASSERT(entryNativeStackSlots == js_NativeStackSlots(cx, 0/*callDepth*/));
3757 ti->nativeStackBase = (entryNativeStackSlots -
3758 (cx->fp->regs->sp - StackBase(cx->fp))) * sizeof(double);
3759 ti->maxNativeStackSlots = entryNativeStackSlots;
3760 ti->maxCallDepth = 0;
3761 ti->script = cx->fp->script;
3763 /* recording primary trace */
3764 if (!js_StartRecorder(cx, NULL, f, ti,
3765 ti->nStackTypes,
3766 ti->globalSlots->length(),
3767 ti->typeMap.data(), NULL, outer, outerArgc)) {
3768 return false;
3771 return true;
3774 JS_REQUIRES_STACK static inline bool
3775 isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
3777 if (slot < ti->nStackTypes)
3778 return oracle.isStackSlotUndemotable(cx, slot);
3780 uint16* gslots = ti->globalSlots->data();
3781 return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
3784 JS_REQUIRES_STACK static bool
3785 js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, jsbytecode* outer, uint32 outerArgc)
3787 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3788 if (tm->needFlush) {
3789 FlushJITCache(cx);
3790 return false;
3793 VMFragment* from = (VMFragment*)exit->from->root;
3794 TreeInfo* from_ti = (TreeInfo*)from->vmprivate;
3796 JS_ASSERT(exit->from->root->code());
3798 /* Make sure any doubles are not accidentally undemoted */
3799 uint8* m = getStackTypeMap(exit);
3800 for (unsigned i = 0; i < exit->numStackSlots; i++) {
3801 if (m[i] == JSVAL_DOUBLE)
3802 oracle.markStackSlotUndemotable(cx, i);
3804 m = getGlobalTypeMap(exit);
3805 for (unsigned i = 0; i < exit->numGlobalSlots; i++) {
3806 if (m[i] == JSVAL_DOUBLE)
3807 oracle.markGlobalSlotUndemotable(cx, from_ti->globalSlots->data()[i]);
3810 /* If this exit does not have enough globals, there might exist a peer with more globals that we
3811 * can join to, but only if the parent's globals match.
3813 m = getFullTypeMap(exit);
3814 if (exit->numGlobalSlots < from_ti->nGlobalTypes()) {
3815 uint32 partial = exit->numStackSlots + exit->numGlobalSlots;
3816 m = (uint8*)alloca(from_ti->typeMap.length());
3817 memcpy(m, getFullTypeMap(exit), partial);
3818 memcpy(m + partial, from_ti->globalTypeMap() + exit->numGlobalSlots,
3819 from_ti->nGlobalTypes() - exit->numGlobalSlots);
3822 bool bound = false;
3823 for (Fragment* f = from->first; f != NULL; f = f->peer) {
3824 if (!f->code())
3825 continue;
3826 TreeInfo* ti = (TreeInfo*)f->vmprivate;
3827 JS_ASSERT(exit->numStackSlots == ti->nStackTypes);
3828 /* Check the minimum number of slots that need to be compared. */
3829 unsigned checkSlots = JS_MIN(from_ti->typeMap.length(), ti->typeMap.length());
3830 uint8* m2 = ti->typeMap.data();
3831 /* Analyze the exit typemap against the peer typemap.
3832 * Two conditions are important:
3833 * 1) Typemaps are identical: these peers can be attached.
3834 * 2) Typemaps do not match, but only contain I->D mismatches.
3835 * In this case, the original tree must be trashed because it
3836 * will never connect to any peer.
3838 bool matched = true;
3839 bool undemote = false;
3840 for (uint32 i = 0; i < checkSlots; i++) {
3841 /* If the types are equal we're okay. */
3842 if (m[i] == m2[i])
3843 continue;
3844 matched = false;
3845 /* If there's an I->D that cannot be resolved, flag it.
3846 * Otherwise, break and go to the next peer.
3848 if (m[i] == JSVAL_INT && m2[i] == JSVAL_DOUBLE && isSlotUndemotable(cx, ti, i)) {
3849 undemote = true;
3850 } else {
3851 undemote = false;
3852 break;
3855 if (matched) {
3856 JS_ASSERT(from_ti->globalSlots == ti->globalSlots);
3857 JS_ASSERT(from_ti->nStackTypes == ti->nStackTypes);
3858 /* Capture missing globals on both trees and link the fragments together. */
3859 if (from != f) {
3860 ti->dependentTrees.addUnique(from);
3861 from_ti->linkedTrees.addUnique(f);
3863 if (ti->nGlobalTypes() < ti->globalSlots->length())
3864 specializeTreesToMissingGlobals(cx, ti);
3865 exit->target = f;
3866 tm->fragmento->assm()->patch(exit);
3867 /* Now erase this exit from the unstable exit list. */
3868 UnstableExit** tail = &from_ti->unstableExits;
3869 for (UnstableExit* uexit = from_ti->unstableExits; uexit != NULL; uexit = uexit->next) {
3870 if (uexit->exit == exit) {
3871 *tail = uexit->next;
3872 delete uexit;
3873 bound = true;
3874 break;
3876 tail = &uexit->next;
3878 JS_ASSERT(bound);
3879 debug_only_v(js_DumpPeerStability(tm, f->ip, from->globalObj, from->globalShape, from->argc);)
3880 break;
3881 } else if (undemote) {
3882 /* The original tree is unconnectable, so trash it. */
3883 js_TrashTree(cx, f);
3884 /* We shouldn't attempt to record now, since we'll hit a duplicate. */
3885 return false;
3888 if (bound)
3889 return false;
3891 VMFragment* root = (VMFragment*)from->root;
3892 return js_RecordTree(cx, tm, from->first, outer, outerArgc, root->globalObj,
3893 root->globalShape, from_ti->globalSlots, cx->fp->argc);
3896 static JS_REQUIRES_STACK bool
3897 js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, jsbytecode* outer)
3899 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3900 if (tm->needFlush) {
3901 FlushJITCache(cx);
3902 return false;
3905 Fragment* f = anchor->from->root;
3906 JS_ASSERT(f->vmprivate);
3907 TreeInfo* ti = (TreeInfo*)f->vmprivate;
3909 /* Don't grow trees above a certain size to avoid code explosion due to tail duplication. */
3910 if (ti->branchCount >= MAX_BRANCHES)
3911 return false;
3913 Fragment* c;
3914 if (!(c = anchor->target)) {
3915 c = JS_TRACE_MONITOR(cx).fragmento->createBranch(anchor, cx->fp->regs->pc);
3916 c->spawnedFrom = anchor;
3917 c->parent = f;
3918 anchor->target = c;
3919 c->root = f;
3923 * If we are recycling a fragment, it might have a different ip so reset it here. This
3924 * can happen when attaching a branch to a NESTED_EXIT, which might extend along separate paths
3925 * (i.e. after the loop edge, and after a return statement).
3927 c->ip = cx->fp->regs->pc;
3929 debug_only_v(printf("trying to attach another branch to the tree (hits = %d)\n", c->hits());)
3931 int32_t& hits = c->hits();
3932 if (outer || (hits++ >= HOTEXIT && hits <= HOTEXIT+MAXEXIT)) {
3933 /* start tracing secondary trace from this point */
3934 c->lirbuf = f->lirbuf;
3935 unsigned stackSlots;
3936 unsigned ngslots;
3937 uint8* typeMap;
3938 TypeMap fullMap;
3939 if (exitedFrom == NULL) {
3940 /* If we are coming straight from a simple side exit, just use that exit's type map
3941 as starting point. */
3942 ngslots = anchor->numGlobalSlots;
3943 stackSlots = anchor->numStackSlots;
3944 typeMap = getFullTypeMap(anchor);
3945 } else {
3946 /* If we side-exited on a loop exit and continue on a nesting guard, the nesting
3947 guard (anchor) has the type information for everything below the current scope,
3948 and the actual guard we exited from has the types for everything in the current
3949 scope (and whatever it inlined). We have to merge those maps here. */
3950 VMSideExit* e1 = anchor;
3951 VMSideExit* e2 = exitedFrom;
3952 fullMap.add(getStackTypeMap(e1), e1->numStackSlotsBelowCurrentFrame);
3953 fullMap.add(getStackTypeMap(e2), e2->numStackSlots);
3954 stackSlots = fullMap.length();
3955 fullMap.add(getGlobalTypeMap(e2), e2->numGlobalSlots);
3956 ngslots = e2->numGlobalSlots;
3957 typeMap = fullMap.data();
3959 return js_StartRecorder(cx, anchor, c, (TreeInfo*)f->vmprivate, stackSlots,
3960 ngslots, typeMap, exitedFrom, outer, cx->fp->argc);
3962 return false;
3965 static JS_REQUIRES_STACK VMSideExit*
3966 js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
3967 VMSideExit** innermostNestedGuardp);
3969 JS_REQUIRES_STACK bool
3970 js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
3972 #ifdef JS_THREADSAFE
3973 if (OBJ_SCOPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain))->title.ownercx != cx) {
3974 js_AbortRecording(cx, "Global object not owned by this context");
3975 return false; /* we stay away from shared global objects */
3977 #endif
3979 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3981 /* Process needFlush and deep abort requests. */
3982 if (tm->needFlush) {
3983 FlushJITCache(cx);
3984 return false;
3986 if (r->wasDeepAborted()) {
3987 js_AbortRecording(cx, "deep abort requested");
3988 return false;
3991 JS_ASSERT(r->getFragment() && !r->getFragment()->lastIns);
3992 VMFragment* root = (VMFragment*)r->getFragment()->root;
3994 /* Does this branch go to an inner loop? */
3995 Fragment* first = getLoop(&JS_TRACE_MONITOR(cx), cx->fp->regs->pc,
3996 root->globalObj, root->globalShape, cx->fp->argc);
3997 if (!first) {
3998 /* Not an inner loop we can call, abort trace. */
3999 AUDIT(returnToDifferentLoopHeader);
4000 JS_ASSERT(!cx->fp->imacpc);
4001 debug_only_v(printf("loop edge to %d, header %d\n",
4002 cx->fp->regs->pc - cx->fp->script->code,
4003 (jsbytecode*)r->getFragment()->root->ip - cx->fp->script->code));
4004 js_AbortRecording(cx, "Loop edge does not return to header");
4005 return false;
4008 /* Make sure inner tree call will not run into an out-of-memory condition. */
4009 if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
4010 !js_ReplenishReservedPool(cx, tm)) {
4011 js_AbortRecording(cx, "Couldn't call inner tree (out of memory)");
4012 return false;
4015 /* Make sure the shape of the global object still matches (this might flush
4016 the JIT cache). */
4017 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
4018 uint32 globalShape = -1;
4019 SlotList* globalSlots = NULL;
4020 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots))
4021 return false;
4023 debug_only_v(printf("Looking for type-compatible peer (%s:%d@%d)\n",
4024 cx->fp->script->filename,
4025 js_FramePCToLineNumber(cx, cx->fp),
4026 FramePCOffset(cx->fp));)
4028 // Find a matching inner tree. If none can be found, compile one.
4029 Fragment* f = r->findNestedCompatiblePeer(first);
4030 if (!f || !f->code()) {
4031 AUDIT(noCompatInnerTrees);
4033 VMFragment* outerFragment = (VMFragment*) tm->recorder->getFragment()->root;
4034 jsbytecode* outer = (jsbytecode*) outerFragment->ip;
4035 uint32 outerArgc = outerFragment->argc;
4036 uint32 argc = cx->fp->argc;
4037 js_AbortRecording(cx, "No compatible inner tree");
4039 // Find an empty fragment we can recycle, or allocate a new one.
4040 for (f = first; f != NULL; f = f->peer) {
4041 if (!f->code())
4042 break;
4044 if (!f || f->code()) {
4045 f = getAnchor(tm, cx->fp->regs->pc, globalObj, globalShape, argc);
4046 if (!f) {
4047 FlushJITCache(cx);
4048 return false;
4051 return js_RecordTree(cx, tm, f, outer, outerArgc, globalObj, globalShape, globalSlots, argc);
4054 r->adjustCallerTypes(f);
4055 r->prepareTreeCall(f);
4056 VMSideExit* innermostNestedGuard = NULL;
4057 VMSideExit* lr = js_ExecuteTree(cx, f, inlineCallCount, &innermostNestedGuard);
4058 if (!lr || r->wasDeepAborted()) {
4059 if (!lr)
4060 js_AbortRecording(cx, "Couldn't call inner tree");
4061 return false;
4064 VMFragment* outerFragment = (VMFragment*) tm->recorder->getFragment()->root;
4065 jsbytecode* outer = (jsbytecode*) outerFragment->ip;
4066 switch (lr->exitType) {
4067 case LOOP_EXIT:
4068 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
4069 if (innermostNestedGuard) {
4070 js_AbortRecording(cx, "Inner tree took different side exit, abort current "
4071 "recording and grow nesting tree");
4072 return js_AttemptToExtendTree(cx, innermostNestedGuard, lr, outer);
4074 /* emit a call to the inner tree and continue recording the outer tree trace */
4075 r->emitTreeCall(f, lr);
4076 return true;
4077 case UNSTABLE_LOOP_EXIT:
4078 /* abort recording so the inner loop can become type stable. */
4079 js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording");
4080 return js_AttemptToStabilizeTree(cx, lr, outer, outerFragment->argc);
4081 case BRANCH_EXIT:
4082 /* abort recording the outer tree, extend the inner tree */
4083 js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording");
4084 return js_AttemptToExtendTree(cx, lr, NULL, outer);
4085 default:
4086 debug_only_v(printf("exit_type=%d\n", lr->exitType);)
4087 js_AbortRecording(cx, "Inner tree not suitable for calling");
4088 return false;
4092 static bool
4093 js_IsEntryTypeCompatible(jsval* vp, uint8* m)
4095 unsigned tag = JSVAL_TAG(*vp);
4097 debug_only_v(printf("%c/%c ", tagChar[tag], typeChar[*m]);)
4099 switch (*m) {
4100 case JSVAL_OBJECT:
4101 if (tag == JSVAL_OBJECT && !JSVAL_IS_NULL(*vp) &&
4102 !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))) {
4103 return true;
4105 debug_only_v(printf("object != tag%u ", tag);)
4106 return false;
4107 case JSVAL_INT:
4108 jsint i;
4109 if (JSVAL_IS_INT(*vp))
4110 return true;
4111 if ((tag == JSVAL_DOUBLE) && JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(*vp), i))
4112 return true;
4113 debug_only_v(printf("int != tag%u(value=%lu) ", tag, (unsigned long)*vp);)
4114 return false;
4115 case JSVAL_DOUBLE:
4116 if (JSVAL_IS_INT(*vp) || tag == JSVAL_DOUBLE)
4117 return true;
4118 debug_only_v(printf("double != tag%u ", tag);)
4119 return false;
4120 case JSVAL_BOXED:
4121 JS_NOT_REACHED("shouldn't see boxed type in entry");
4122 return false;
4123 case JSVAL_STRING:
4124 if (tag == JSVAL_STRING)
4125 return true;
4126 debug_only_v(printf("string != tag%u ", tag);)
4127 return false;
4128 case JSVAL_TNULL:
4129 if (JSVAL_IS_NULL(*vp))
4130 return true;
4131 debug_only_v(printf("null != tag%u ", tag);)
4132 return false;
4133 case JSVAL_BOOLEAN:
4134 if (tag == JSVAL_BOOLEAN)
4135 return true;
4136 debug_only_v(printf("bool != tag%u ", tag);)
4137 return false;
4138 default:
4139 JS_ASSERT(*m == JSVAL_TFUN);
4140 if (tag == JSVAL_OBJECT && !JSVAL_IS_NULL(*vp) &&
4141 HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))) {
4142 return true;
4144 debug_only_v(printf("fun != tag%u ", tag);)
4145 return false;
4149 JS_REQUIRES_STACK Fragment*
4150 TraceRecorder::findNestedCompatiblePeer(Fragment* f)
4152 JSTraceMonitor* tm;
4154 tm = &JS_TRACE_MONITOR(cx);
4155 unsigned int ngslots = treeInfo->globalSlots->length();
4156 uint16* gslots = treeInfo->globalSlots->data();
4158 TreeInfo* ti;
4159 for (; f != NULL; f = f->peer) {
4160 if (!f->code())
4161 continue;
4163 ti = (TreeInfo*)f->vmprivate;
4165 debug_only_v(printf("checking nested types %p: ", (void*)f);)
4167 if (ngslots > ti->nGlobalTypes())
4168 specializeTreesToMissingGlobals(cx, ti);
4170 uint8* typemap = ti->typeMap.data();
4173 * Determine whether the typemap of the inner tree matches the outer tree's
4174 * current state. If the inner tree expects an integer, but the outer tree
4175 * doesn't guarantee an integer for that slot, we mark the slot undemotable
4176 * and mismatch here. This will force a new tree to be compiled that accepts
4177 * a double for the slot. If the inner tree expects a double, but the outer
4178 * tree has an integer, we can proceed, but we mark the location undemotable.
4180 bool ok = true;
4181 uint8* m = typemap;
4182 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
4183 debug_only_v(printf("%s%d=", vpname, vpnum);)
4184 if (!js_IsEntryTypeCompatible(vp, m)) {
4185 ok = false;
4186 } else if (!isPromoteInt(get(vp)) && *m == JSVAL_INT) {
4187 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
4188 ok = false;
4189 } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) {
4190 oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
4192 m++;
4194 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
4195 debug_only_v(printf("%s%d=", vpname, vpnum);)
4196 if (!js_IsEntryTypeCompatible(vp, m)) {
4197 ok = false;
4198 } else if (!isPromoteInt(get(vp)) && *m == JSVAL_INT) {
4199 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
4200 ok = false;
4201 } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) {
4202 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
4204 m++;
4206 JS_ASSERT(unsigned(m - ti->typeMap.data()) == ti->typeMap.length());
4208 debug_only_v(printf(" %s\n", ok ? "match" : "");)
4210 if (ok)
4211 return f;
4214 return NULL;
4218 * Check if types are usable for trace execution.
4220 * @param cx Context.
4221 * @param ti Tree info of peer we're testing.
4222 * @return True if compatible (with or without demotions), false otherwise.
4224 static JS_REQUIRES_STACK bool
4225 js_CheckEntryTypes(JSContext* cx, TreeInfo* ti)
4227 unsigned int ngslots = ti->globalSlots->length();
4228 uint16* gslots = ti->globalSlots->data();
4230 JS_ASSERT(ti->nStackTypes == js_NativeStackSlots(cx, 0));
4232 if (ngslots > ti->nGlobalTypes())
4233 specializeTreesToMissingGlobals(cx, ti);
4235 uint8* m = ti->typeMap.data();
4237 JS_ASSERT(ti->typeMap.length() == js_NativeStackSlots(cx, 0) + ngslots);
4238 JS_ASSERT(ti->typeMap.length() == ti->nStackTypes + ngslots);
4239 JS_ASSERT(ti->nGlobalTypes() == ngslots);
4240 FORALL_SLOTS(cx, ngslots, gslots, 0,
4241 debug_only_v(printf("%s%d=", vpname, vpnum);)
4242 JS_ASSERT(*m != 0xCD);
4243 if (!js_IsEntryTypeCompatible(vp, m))
4244 goto check_fail;
4245 m++;
4247 JS_ASSERT(unsigned(m - ti->typeMap.data()) == ti->typeMap.length());
4249 debug_only_v(printf("\n");)
4250 return true;
4252 check_fail:
4253 debug_only_v(printf("\n");)
4254 return false;
4258 * Find an acceptable entry tree given a PC.
4260 * @param cx Context.
4261 * @param f First peer fragment.
4262 * @param nodemote If true, will try to find a peer that does not require demotion.
4263 * @out count Number of fragments consulted.
4265 static JS_REQUIRES_STACK Fragment*
4266 js_FindVMCompatiblePeer(JSContext* cx, Fragment* f, uintN& count)
4268 count = 0;
4269 for (; f != NULL; f = f->peer) {
4270 if (f->vmprivate == NULL)
4271 continue;
4272 debug_only_v(printf("checking vm types %p (ip: %p): ", (void*)f, f->ip);)
4273 if (js_CheckEntryTypes(cx, (TreeInfo*)f->vmprivate))
4274 return f;
4275 ++count;
4277 return NULL;
4280 static void
4281 LeaveTree(InterpState&, VMSideExit* lr);
4284 * Executes a tree.
4286 static JS_REQUIRES_STACK VMSideExit*
4287 js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
4288 VMSideExit** innermostNestedGuardp)
4290 JS_ASSERT(f->root == f && f->code() && f->vmprivate);
4292 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4293 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
4294 TreeInfo* ti = (TreeInfo*)f->vmprivate;
4295 unsigned ngslots = ti->globalSlots->length();
4296 uint16* gslots = ti->globalSlots->data();
4297 unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
4299 /* Make sure the global object is sane. */
4300 JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) ==
4301 ((VMFragment*)f)->globalShape));
4302 /* Make sure our caller replenished the double pool. */
4303 JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
4305 /* Reserve objects and stack space now, to make leaving the tree infallible. */
4306 if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
4307 return NULL;
4309 #ifdef DEBUG
4310 uintN savedProhibitFlush = JS_TRACE_MONITOR(cx).prohibitFlush;
4311 #endif
4313 /* Setup the interpreter state block, which is followed by the native global frame. */
4314 InterpState* state = (InterpState*)alloca(sizeof(InterpState) + (globalFrameSize+1)*sizeof(double));
4315 state->cx = cx;
4316 state->inlineCallCountp = &inlineCallCount;
4317 state->innermostNestedGuardp = innermostNestedGuardp;
4318 state->outermostTree = ti;
4319 state->lastTreeExitGuard = NULL;
4320 state->lastTreeCallGuard = NULL;
4321 state->rpAtLastTreeCall = NULL;
4322 state->builtinStatus = 0;
4324 /* Setup the native global frame. */
4325 double* global = (double*)(state+1);
4327 /* Setup the native stack frame. */
4328 double stack_buffer[MAX_NATIVE_STACK_SLOTS];
4329 state->stackBase = stack_buffer;
4330 state->sp = stack_buffer + (ti->nativeStackBase/sizeof(double));
4331 state->eos = stack_buffer + MAX_NATIVE_STACK_SLOTS;
4333 /* Setup the native call stack frame. */
4334 FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
4335 state->callstackBase = callstack_buffer;
4336 state->rp = callstack_buffer;
4337 state->eor = callstack_buffer + MAX_CALL_STACK_ENTRIES;
4339 void *reserve;
4340 state->stackMark = JS_ARENA_MARK(&cx->stackPool);
4341 JS_ARENA_ALLOCATE(reserve, &cx->stackPool, MAX_INTERP_STACK_BYTES);
4342 if (!reserve)
4343 return NULL;
4345 #ifdef DEBUG
4346 memset(stack_buffer, 0xCD, sizeof(stack_buffer));
4347 memset(global, 0xCD, (globalFrameSize+1)*sizeof(double));
4348 JS_ASSERT(globalFrameSize <= MAX_GLOBAL_SLOTS);
4349 #endif
4351 debug_only(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
4352 debug_only_v(printf("entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
4353 cx->fp->script->filename,
4354 js_FramePCToLineNumber(cx, cx->fp),
4355 FramePCOffset(cx->fp),
4356 ti->maxNativeStackSlots,
4357 f->code());)
4359 JS_ASSERT(ti->nGlobalTypes() == ngslots);
4361 if (ngslots)
4362 BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), global);
4363 BuildNativeStackFrames(cx, 0/*callDepth*/, ti->typeMap.data(), stack_buffer);
4365 union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
4366 u.code = f->code();
4368 #ifdef EXECUTE_TREE_TIMER
4369 state->startTime = rdtsc();
4370 #endif
4372 JS_ASSERT(!tm->tracecx);
4373 tm->tracecx = cx;
4374 state->prev = cx->interpState;
4375 cx->interpState = state;
4377 debug_only(fflush(NULL);)
4378 GuardRecord* rec;
4379 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
4380 SIMULATE_FASTCALL(rec, state, NULL, u.func);
4381 #else
4382 rec = u.func(state, NULL);
4383 #endif
4384 VMSideExit* lr = (VMSideExit*)rec->exit;
4386 AUDIT(traceTriggered);
4388 cx->interpState = state->prev;
4390 JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth);
4391 tm->tracecx = NULL;
4392 LeaveTree(*state, lr);
4393 JS_ASSERT(JS_TRACE_MONITOR(cx).prohibitFlush == savedProhibitFlush);
4394 return state->innermost;
4397 static JS_FORCES_STACK void
4398 LeaveTree(InterpState& state, VMSideExit* lr)
4400 VOUCH_DOES_NOT_REQUIRE_STACK();
4402 JSContext* cx = state.cx;
4403 FrameInfo** callstack = state.callstackBase;
4404 double* stack = state.stackBase;
4406 /* Except if we find that this is a nested bailout, the guard the call returned is the
4407 one we have to use to adjust pc and sp. */
4408 VMSideExit* innermost = lr;
4410 /* While executing a tree we do not update state.sp and state.rp even if they grow. Instead,
4411 guards tell us by how much sp and rp should be incremented in case of a side exit. When
4412 calling a nested tree, however, we actively adjust sp and rp. If we have such frames
4413 from outer trees on the stack, then rp will have been adjusted. Before we can process
4414 the stack of the frames of the tree we directly exited from, we have to first work our
4415 way through the outer frames and generate interpreter frames for them. Once the call
4416 stack (rp) is empty, we can process the final frames (which again are not directly
4417 visible and only the guard we exited on will tells us about). */
4418 FrameInfo** rp = (FrameInfo**)state.rp;
4419 if (lr->exitType == NESTED_EXIT) {
4420 VMSideExit* nested = state.lastTreeCallGuard;
4421 if (!nested) {
4422 /* If lastTreeCallGuard is not set in state, we only have a single level of
4423 nesting in this exit, so lr itself is the innermost and outermost nested
4424 guard, and hence we set nested to lr. The calldepth of the innermost guard
4425 is not added to state.rp, so we do it here manually. For a nesting depth
4426 greater than 1 the CallTree builtin already added the innermost guard's
4427 calldepth to state.rpAtLastTreeCall. */
4428 nested = lr;
4429 rp += lr->calldepth;
4430 } else {
4431 /* During unwinding state.rp gets overwritten at every step and we restore
4432 it here to its state at the innermost nested guard. The builtin already
4433 added the calldepth of that innermost guard to rpAtLastTreeCall. */
4434 rp = (FrameInfo**)state.rpAtLastTreeCall;
4436 innermost = state.lastTreeExitGuard;
4437 if (state.innermostNestedGuardp)
4438 *state.innermostNestedGuardp = nested;
4439 JS_ASSERT(nested);
4440 JS_ASSERT(nested->exitType == NESTED_EXIT);
4441 JS_ASSERT(state.lastTreeExitGuard);
4442 JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
4445 int32_t bs = state.builtinStatus;
4446 bool bailed = innermost->exitType == STATUS_EXIT && (bs & JSBUILTIN_BAILED);
4447 if (bailed) {
4449 * Deep-bail case.
4451 * A _FAIL native already called LeaveTree. We already reconstructed
4452 * the interpreter stack, in pre-call state, with pc pointing to the
4453 * CALL/APPLY op, for correctness. Then we continued in native code.
4455 * First, if we just returned from a slow native, pop its stack frame.
4457 if (!cx->fp->script) {
4458 JSStackFrame *fp = cx->fp;
4459 JS_ASSERT(FUN_SLOW_NATIVE(GET_FUNCTION_PRIVATE(cx, fp->callee)));
4460 JS_ASSERT(fp->regs == NULL);
4461 JS_ASSERT(fp->down->regs != &((JSInlineFrame *) fp)->callerRegs);
4462 cx->fp = fp->down;
4463 JS_ARENA_RELEASE(&cx->stackPool, ((JSInlineFrame *) fp)->mark);
4465 JS_ASSERT(cx->fp->script);
4467 if (!(bs & JSBUILTIN_ERROR)) {
4469 * The native succeeded (no exception or error). After it returned, the
4470 * trace stored the return value (at the top of the native stack) and
4471 * then immediately flunked the guard on state->builtinStatus.
4473 * Now LeaveTree has been called again from the tail of
4474 * js_ExecuteTree. We are about to return to the interpreter. Adjust
4475 * the top stack frame to resume on the next op.
4477 JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL ||
4478 *cx->fp->regs->pc == JSOP_APPLY ||
4479 *cx->fp->regs->pc == JSOP_NEW);
4480 uintN argc = GET_ARGC(cx->fp->regs->pc);
4481 cx->fp->regs->pc += JSOP_CALL_LENGTH;
4482 cx->fp->regs->sp -= argc + 1;
4483 JS_ASSERT_IF(!cx->fp->imacpc,
4484 cx->fp->slots + cx->fp->script->nfixed +
4485 js_ReconstructStackDepth(cx, cx->fp->script, cx->fp->regs->pc) ==
4486 cx->fp->regs->sp);
4489 * The return value was not available when we reconstructed the stack,
4490 * but we have it now. Box it.
4492 uint8* typeMap = getStackTypeMap(innermost);
4493 NativeToValue(cx,
4494 cx->fp->regs->sp[-1],
4495 typeMap[innermost->numStackSlots - 1],
4496 (jsdouble *) state.sp + innermost->sp_adj / sizeof(jsdouble) - 1);
4498 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4499 if (tm->prohibitFlush && --tm->prohibitFlush == 0 && tm->needFlush)
4500 FlushJITCache(cx);
4501 return;
4504 JS_ARENA_RELEASE(&cx->stackPool, state.stackMark);
4505 while (callstack < rp) {
4506 /* Synthesize a stack frame and write out the values in it using the type map pointer
4507 on the native call stack. */
4508 js_SynthesizeFrame(cx, **callstack);
4509 int slots = FlushNativeStackFrames(cx, 1/*callDepth*/, (uint8*)(*callstack+1), stack, cx->fp);
4510 #ifdef DEBUG
4511 JSStackFrame* fp = cx->fp;
4512 debug_only_v(printf("synthesized deep frame for %s:%u@%u, slots=%d\n",
4513 fp->script->filename,
4514 js_FramePCToLineNumber(cx, fp),
4515 FramePCOffset(fp),
4516 slots);)
4517 #endif
4518 /* Keep track of the additional frames we put on the interpreter stack and the native
4519 stack slots we consumed. */
4520 ++*state.inlineCallCountp;
4521 ++callstack;
4522 stack += slots;
4525 /* We already synthesized the frames around the innermost guard. Here we just deal
4526 with additional frames inside the tree we are bailing out from. */
4527 JS_ASSERT(rp == callstack);
4528 unsigned calldepth = innermost->calldepth;
4529 unsigned calldepth_slots = 0;
4530 for (unsigned n = 0; n < calldepth; ++n) {
4531 calldepth_slots += js_SynthesizeFrame(cx, *callstack[n]);
4532 ++*state.inlineCallCountp;
4533 #ifdef DEBUG
4534 JSStackFrame* fp = cx->fp;
4535 debug_only_v(printf("synthesized shallow frame for %s:%u@%u\n",
4536 fp->script->filename, js_FramePCToLineNumber(cx, fp),
4537 FramePCOffset(fp));)
4538 #endif
4541 /* Adjust sp and pc relative to the tree we exited from (not the tree we entered into).
4542 These are our final values for sp and pc since js_SynthesizeFrame has already taken
4543 care of all frames in between. But first we recover fp->blockChain, which comes from
4544 the side exit struct. */
4545 JSStackFrame* fp = cx->fp;
4547 fp->blockChain = innermost->block;
4549 /* If we are not exiting from an inlined frame the state->sp is spbase, otherwise spbase
4550 is whatever slots frames around us consume. */
4551 fp->regs->pc = innermost->pc;
4552 fp->imacpc = innermost->imacpc;
4553 fp->regs->sp = StackBase(fp) + (innermost->sp_adj / sizeof(double)) - calldepth_slots;
4554 JS_ASSERT_IF(!fp->imacpc,
4555 fp->slots + fp->script->nfixed +
4556 js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp);
4558 #ifdef EXECUTE_TREE_TIMER
4559 uint64 cycles = rdtsc() - state.startTime;
4560 #elif defined(JS_JIT_SPEW)
4561 uint64 cycles = 0;
4562 #endif
4564 debug_only_v(printf("leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%d, sp=%d, "
4565 "calldepth=%d, cycles=%llu\n",
4566 fp->script->filename,
4567 js_FramePCToLineNumber(cx, fp),
4568 FramePCOffset(fp),
4569 js_CodeName[fp->imacpc ? *fp->imacpc : *fp->regs->pc],
4570 (void*)lr,
4571 lr->exitType,
4572 fp->regs->sp - StackBase(fp),
4573 calldepth,
4574 cycles));
4576 /* If this trace is part of a tree, later branches might have added additional globals for
4577 which we don't have any type information available in the side exit. We merge in this
4578 information from the entry type-map. See also comment in the constructor of TraceRecorder
4579 why this is always safe to do. */
4580 TreeInfo* outermostTree = state.outermostTree;
4581 uint16* gslots = outermostTree->globalSlots->data();
4582 unsigned ngslots = outermostTree->globalSlots->length();
4583 JS_ASSERT(ngslots == outermostTree->nGlobalTypes());
4584 uint8* globalTypeMap;
4586 /* Are there enough globals? This is the ideal fast path. */
4587 if (innermost->numGlobalSlots == ngslots) {
4588 globalTypeMap = getGlobalTypeMap(innermost);
4589 /* Otherwise, merge the typemap of the innermost entry and exit together. This should always
4590 work because it is invalid for nested trees or linked trees to have incompatible types.
4591 Thus, whenever a new global type is lazily added into a tree, all dependent and linked
4592 trees are immediately specialized (see bug 476653). */
4593 } else {
4594 TreeInfo* ti = (TreeInfo*)innermost->from->root->vmprivate;
4595 JS_ASSERT(ti->nGlobalTypes() == ngslots);
4596 JS_ASSERT(ti->nGlobalTypes() > innermost->numGlobalSlots);
4597 globalTypeMap = (uint8*)alloca(ngslots * sizeof(uint8));
4598 memcpy(globalTypeMap, getGlobalTypeMap(innermost), innermost->numGlobalSlots);
4599 memcpy(globalTypeMap + innermost->numGlobalSlots,
4600 ti->globalTypeMap() + innermost->numGlobalSlots,
4601 ti->nGlobalTypes() - innermost->numGlobalSlots);
4604 /* write back native stack frame */
4605 #ifdef DEBUG
4606 int slots =
4607 #endif
4608 FlushNativeStackFrames(cx, innermost->calldepth,
4609 getStackTypeMap(innermost),
4610 stack, NULL);
4611 JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
4613 if (innermost->nativeCalleeWord)
4614 SynthesizeSlowNativeFrame(cx, innermost);
4617 * Write back interned globals. This must occur after we have restored and synthesized
4618 * stack frames, since we will use cx->fp->scopeChain to obtain a reference to the
4619 * global object.
4621 double* global = (double*)(&state + 1);
4622 FlushNativeGlobalFrame(cx, ngslots, gslots, globalTypeMap, global);
4623 JS_ASSERT(*(uint64*)&global[STOBJ_NSLOTS(JS_GetGlobalForObject(cx, cx->fp->scopeChain))] ==
4624 0xdeadbeefdeadbeefLL);
4626 cx->nativeVp = NULL;
4628 #ifdef DEBUG
4629 // Verify that our state restoration worked.
4630 for (JSStackFrame* fp = cx->fp; fp; fp = fp->down) {
4631 JS_ASSERT_IF(fp->callee, JSVAL_IS_OBJECT(fp->argv[-1]));
4633 #endif
4634 #ifdef JS_JIT_SPEW
4635 if (innermost->exitType != TIMEOUT_EXIT)
4636 AUDIT(sideExitIntoInterpreter);
4637 else
4638 AUDIT(timeoutIntoInterpreter);
4639 #endif
4641 state.innermost = innermost;
4644 JS_REQUIRES_STACK bool
4645 js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
4647 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4649 /* Is the recorder currently active? */
4650 if (tm->recorder) {
4651 jsbytecode* innerLoopHeaderPC = cx->fp->regs->pc;
4653 if (js_RecordLoopEdge(cx, tm->recorder, inlineCallCount))
4654 return true;
4657 * js_RecordLoopEdge will invoke an inner tree if we have a matching one. If we
4658 * arrive here, that tree didn't run to completion and instead we mis-matched
4659 * or the inner tree took a side exit other than the loop exit. We are thus
4660 * no longer guaranteed to be parked on the same loop header js_MonitorLoopEdge
4661 * was called for. In fact, this might not even be a loop header at all. Hence
4662 * if the program counter no longer hovers over the inner loop header, return to
4663 * the interpreter and do not attempt to trigger or record a new tree at this
4664 * location.
4666 if (innerLoopHeaderPC != cx->fp->regs->pc)
4667 return false;
4669 JS_ASSERT(!tm->recorder);
4671 /* Check the pool of reserved doubles (this might trigger a GC). */
4672 if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
4673 !js_ReplenishReservedPool(cx, tm)) {
4674 return false; /* Out of memory, don't try to record now. */
4677 /* Make sure the shape of the global object still matches (this might flush the JIT cache). */
4678 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
4679 uint32 globalShape = -1;
4680 SlotList* globalSlots = NULL;
4682 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
4683 js_Backoff(cx, cx->fp->regs->pc);
4684 return false;
4687 /* Do not enter the JIT code with a pending operation callback. */
4688 if (cx->operationCallbackFlag)
4689 return false;
4691 jsbytecode* pc = cx->fp->regs->pc;
4692 uint32 argc = cx->fp->argc;
4694 Fragment* f = getLoop(tm, pc, globalObj, globalShape, argc);
4695 if (!f)
4696 f = getAnchor(tm, pc, globalObj, globalShape, argc);
4698 if (!f) {
4699 FlushJITCache(cx);
4700 return false;
4703 /* If we have no code in the anchor and no peers, we definitively won't be able to
4704 activate any trees so, start compiling. */
4705 if (!f->code() && !f->peer) {
4706 record:
4707 if (++f->hits() < HOTLOOP)
4708 return false;
4709 /* We can give RecordTree the root peer. If that peer is already taken, it will
4710 walk the peer list and find us a free slot or allocate a new tree if needed. */
4711 return js_RecordTree(cx, tm, f->first, NULL, 0, globalObj, globalShape,
4712 globalSlots, argc);
4715 debug_only_v(printf("Looking for compat peer %d@%d, from %p (ip: %p)\n",
4716 js_FramePCToLineNumber(cx, cx->fp),
4717 FramePCOffset(cx->fp), (void*)f, f->ip);)
4719 uintN count;
4720 Fragment* match = js_FindVMCompatiblePeer(cx, f, count);
4721 if (!match) {
4722 if (count < MAXPEERS)
4723 goto record;
4724 /* If we hit the max peers ceiling, don't try to lookup fragments all the time. Thats
4725 expensive. This must be a rather type-unstable loop. */
4726 debug_only_v(printf("Blacklisted: too many peer trees.\n");)
4727 js_Blacklist((jsbytecode*) f->root->ip);
4728 return false;
4731 VMSideExit* lr = NULL;
4732 VMSideExit* innermostNestedGuard = NULL;
4734 lr = js_ExecuteTree(cx, match, inlineCallCount, &innermostNestedGuard);
4735 if (!lr)
4736 return false;
4738 /* If we exit on a branch, or on a tree call guard, try to grow the inner tree (in case
4739 of a branch exit), or the tree nested around the tree we exited from (in case of the
4740 tree call guard). */
4741 switch (lr->exitType) {
4742 case UNSTABLE_LOOP_EXIT:
4743 return js_AttemptToStabilizeTree(cx, lr, NULL, NULL);
4744 case BRANCH_EXIT:
4745 case CASE_EXIT:
4746 return js_AttemptToExtendTree(cx, lr, NULL, NULL);
4747 case LOOP_EXIT:
4748 if (innermostNestedGuard)
4749 return js_AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL);
4750 return false;
4751 default:
4752 /* No, this was an unusual exit (i.e. out of memory/GC), so just resume interpretation. */
4753 return false;
4757 JS_REQUIRES_STACK JSRecordingStatus
4758 TraceRecorder::monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op)
4760 /* Process needFlush and deepAbort() requests now. */
4761 if (JS_TRACE_MONITOR(cx).needFlush) {
4762 FlushJITCache(cx);
4763 return JSRS_STOP;
4765 if (tr->wasDeepAborted()) {
4766 js_AbortRecording(cx, "deep abort requested");
4767 return JSRS_STOP;
4769 JS_ASSERT(!tr->fragment->lastIns);
4772 * Clear one-shot state used to communicate between record_JSOP_CALL and post-
4773 * opcode-case-guts record hook (record_NativeCallComplete).
4775 tr->pendingTraceableNative = NULL;
4776 tr->newobj_ins = NULL;
4778 debug_only_v(js_Disassemble1(cx, cx->fp->script, cx->fp->regs->pc,
4779 cx->fp->imacpc ? 0 : cx->fp->regs->pc - cx->fp->script->code,
4780 !cx->fp->imacpc, stdout);)
4782 /* If op is not a break or a return from a loop, continue recording and follow the
4783 trace. We check for imacro-calling bytecodes inside each switch case to resolve
4784 the if (JSOP_IS_IMACOP(x)) conditions at compile time. */
4786 JSRecordingStatus status;
4787 #ifdef DEBUG
4788 bool wasInImacro = (cx->fp->imacpc != NULL);
4789 #endif
4790 switch (op) {
4791 default:
4792 status = JSRS_ERROR;
4793 goto stop_recording;
4794 # define OPDEF(x,val,name,token,length,nuses,ndefs,prec,format) \
4795 case x: \
4796 status = tr->record_##x(); \
4797 if (JSOP_IS_IMACOP(x)) \
4798 goto imacro; \
4799 break;
4800 # include "jsopcode.tbl"
4801 # undef OPDEF
4804 JS_ASSERT(status != JSRS_IMACRO);
4805 JS_ASSERT_IF(!wasInImacro, cx->fp->imacpc == NULL);
4807 /* Process deepAbort() requests now. */
4808 if (tr->wasDeepAborted()) {
4809 js_AbortRecording(cx, "deep abort requested");
4810 return JSRS_STOP;
4813 if (JS_TRACE_MONITOR(cx).fragmento->assm()->error()) {
4814 js_AbortRecording(cx, "error during recording");
4815 return JSRS_STOP;
4818 if (tr->lirbuf->outOMem() ||
4819 js_OverfullFragmento(&JS_TRACE_MONITOR(cx), JS_TRACE_MONITOR(cx).fragmento)) {
4820 js_AbortRecording(cx, "no more LIR memory");
4821 FlushJITCache(cx);
4822 return JSRS_STOP;
4825 imacro:
4826 if (!STATUS_ABORTS_RECORDING(status))
4827 return status;
4829 stop_recording:
4830 /* If we recorded the end of the trace, destroy the recorder now. */
4831 if (tr->fragment->lastIns) {
4832 js_DeleteRecorder(cx);
4833 return status;
4836 /* Looks like we encountered an error condition. Abort recording. */
4837 js_AbortRecording(cx, js_CodeName[op]);
4838 return status;
4841 JS_REQUIRES_STACK void
4842 js_AbortRecording(JSContext* cx, const char* reason)
4844 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4845 JS_ASSERT(tm->recorder != NULL);
4846 AUDIT(recorderAborted);
4848 /* Abort the trace and blacklist its starting point. */
4849 Fragment* f = tm->recorder->getFragment();
4852 * If the recorder already had its fragment disposed, or we actually finished
4853 * recording and this recorder merely is passing through the deep abort state
4854 * to the next recorder on the stack, just destroy the recorder. There is
4855 * nothing to abort.
4857 if (!f || f->lastIns) {
4858 js_DeleteRecorder(cx);
4859 return;
4862 JS_ASSERT(!f->vmprivate);
4863 #ifdef DEBUG
4864 TreeInfo* ti = tm->recorder->getTreeInfo();
4865 debug_only_a(printf("Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n",
4866 ti->treeFileName,
4867 ti->treeLineNumber,
4868 ti->treePCOffset,
4869 cx->fp->script->filename,
4870 js_FramePCToLineNumber(cx, cx->fp),
4871 FramePCOffset(cx->fp),
4872 reason);)
4873 #endif
4875 js_Backoff(cx, (jsbytecode*) f->root->ip, f->root);
4878 * If js_DeleteRecorder flushed the code cache, we can't rely on f any more.
4880 if (!js_DeleteRecorder(cx))
4881 return;
4884 * If this is the primary trace and we didn't succeed compiling, trash the
4885 * TreeInfo object.
4887 if (!f->code() && (f->root == f))
4888 js_TrashTree(cx, f);
4891 #if defined NANOJIT_IA32
4892 static bool
4893 js_CheckForSSE2()
4895 int features = 0;
4896 #if defined _MSC_VER
4897 __asm
4899 pushad
4900 mov eax, 1
4901 cpuid
4902 mov features, edx
4903 popad
4905 #elif defined __GNUC__
4906 asm("xchg %%esi, %%ebx\n" /* we can't clobber ebx on gcc (PIC register) */
4907 "mov $0x01, %%eax\n"
4908 "cpuid\n"
4909 "mov %%edx, %0\n"
4910 "xchg %%esi, %%ebx\n"
4911 : "=m" (features)
4912 : /* We have no inputs */
4913 : "%eax", "%esi", "%ecx", "%edx"
4915 #elif defined __SUNPRO_C || defined __SUNPRO_CC
4916 asm("push %%ebx\n"
4917 "mov $0x01, %%eax\n"
4918 "cpuid\n"
4919 "pop %%ebx\n"
4920 : "=d" (features)
4921 : /* We have no inputs */
4922 : "%eax", "%ecx"
4924 #endif
4925 return (features & (1<<26)) != 0;
4927 #endif
4929 #if defined(NANOJIT_ARM)
4931 #if defined(_MSC_VER) && defined(WINCE)
4933 // these come in from jswince.asm
4934 extern "C" int js_arm_try_thumb_op();
4935 extern "C" int js_arm_try_armv6t2_op();
4936 extern "C" int js_arm_try_armv5_op();
4937 extern "C" int js_arm_try_armv6_op();
4938 extern "C" int js_arm_try_armv7_op();
4939 extern "C" int js_arm_try_vfp_op();
4941 static bool
4942 js_arm_check_thumb() {
4943 bool ret = false;
4944 __try {
4945 js_arm_try_thumb_op();
4946 ret = true;
4947 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
4948 ret = false;
4950 return ret;
4953 static bool
4954 js_arm_check_thumb2() {
4955 bool ret = false;
4956 __try {
4957 js_arm_try_armv6t2_op();
4958 ret = true;
4959 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
4960 ret = false;
4962 return ret;
4965 static unsigned int
4966 js_arm_check_arch() {
4967 unsigned int arch = 4;
4968 __try {
4969 js_arm_try_armv5_op();
4970 arch = 5;
4971 js_arm_try_armv6_op();
4972 arch = 6;
4973 js_arm_try_armv7_op();
4974 arch = 7;
4975 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
4977 return arch;
4980 static bool
4981 js_arm_check_vfp() {
4982 bool ret = false;
4983 __try {
4984 js_arm_try_vfp_op();
4985 ret = true;
4986 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
4987 ret = false;
4989 return ret;
4992 #elif defined(__GNUC__) && defined(AVMPLUS_LINUX)
4994 #include <stdlib.h>
4995 #include <unistd.h>
4996 #include <sys/types.h>
4997 #include <sys/stat.h>
4998 #include <sys/mman.h>
4999 #include <fcntl.h>
5000 #include <string.h>
5001 #include <elf.h>
5003 // Assume ARMv4 by default.
5004 static unsigned int arm_arch = 4;
5005 static bool arm_has_thumb = false;
5006 static bool arm_has_vfp = false;
5007 static bool arm_has_neon = false;
5008 static bool arm_has_iwmmxt = false;
5009 static bool arm_tests_initialized = false;
5011 static void
5012 arm_read_auxv() {
5013 int fd;
5014 Elf32_auxv_t aux;
5016 fd = open("/proc/self/auxv", O_RDONLY);
5017 if (fd > 0) {
5018 while (read(fd, &aux, sizeof(Elf32_auxv_t))) {
5019 if (aux.a_type == AT_HWCAP) {
5020 uint32_t hwcap = aux.a_un.a_val;
5021 if (getenv("ARM_FORCE_HWCAP"))
5022 hwcap = strtoul(getenv("ARM_FORCE_HWCAP"), NULL, 0);
5023 // hardcode these values to avoid depending on specific versions
5024 // of the hwcap header, e.g. HWCAP_NEON
5025 arm_has_thumb = (hwcap & 4) != 0;
5026 arm_has_vfp = (hwcap & 64) != 0;
5027 arm_has_iwmmxt = (hwcap & 512) != 0;
5028 // this flag is only present on kernel 2.6.29
5029 arm_has_neon = (hwcap & 4096) != 0;
5030 } else if (aux.a_type == AT_PLATFORM) {
5031 const char *plat = (const char*) aux.a_un.a_val;
5032 if (getenv("ARM_FORCE_PLATFORM"))
5033 plat = getenv("ARM_FORCE_PLATFORM");
5034 // The platform string has the form "v[0-9][lb]". The "l" or "b" indicate little-
5035 // or big-endian variants and the digit indicates the version of the platform.
5036 // We can only accept ARMv4 and above, but allow anything up to ARMv9 for future
5037 // processors. Architectures newer than ARMv7 are assumed to be
5038 // backwards-compatible with ARMv7.
5039 if ((plat[0] == 'v') &&
5040 (plat[1] >= '4') && (plat[1] <= '9') &&
5041 ((plat[2] == 'l') || (plat[2] == 'b')))
5043 arm_arch = plat[1] - '0';
5045 else
5047 // For production code, ignore invalid (or unexpected) platform strings and
5048 // fall back to the default. For debug code, use an assertion to catch this.
5049 JS_ASSERT(false);
5053 close (fd);
5055 // if we don't have 2.6.29, we have to do this hack; set
5056 // the env var to trust HWCAP.
5057 if (!getenv("ARM_TRUST_HWCAP") && (arm_arch >= 7))
5058 arm_has_neon = true;
5061 arm_tests_initialized = true;
5064 static bool
5065 js_arm_check_thumb() {
5066 if (!arm_tests_initialized)
5067 arm_read_auxv();
5069 return arm_has_thumb;
5072 static bool
5073 js_arm_check_thumb2() {
5074 if (!arm_tests_initialized)
5075 arm_read_auxv();
5077 // ARMv6T2 also supports Thumb2, but Linux doesn't provide an easy way to test for this as
5078 // there is no associated bit in auxv. ARMv7 always supports Thumb2, and future architectures
5079 // are assumed to be backwards-compatible.
5080 return (arm_arch >= 7);
5083 static unsigned int
5084 js_arm_check_arch() {
5085 if (!arm_tests_initialized)
5086 arm_read_auxv();
5088 return arm_arch;
5091 static bool
5092 js_arm_check_vfp() {
5093 if (!arm_tests_initialized)
5094 arm_read_auxv();
5096 return arm_has_vfp;
5099 #else
5100 #warning Not sure how to check for architecture variant on your platform. Assuming ARMv4.
5101 static bool
5102 js_arm_check_thumb() { return false; }
5103 static bool
5104 js_arm_check_thumb2() { return false; }
5105 static unsigned int
5106 js_arm_check_arch() { return 4; }
5107 static bool
5108 js_arm_check_vfp() { return false; }
5109 #endif
5111 #endif /* NANOJIT_ARM */
5113 #define K *1024
5114 #define M K K
5115 #define G K M
5117 void
5118 js_SetMaxCodeCacheBytes(JSContext* cx, uint32 bytes)
5120 JSTraceMonitor* tm = &JS_THREAD_DATA(cx)->traceMonitor;
5121 JS_ASSERT(tm->fragmento && tm->reFragmento);
5122 if (bytes > 1 G)
5123 bytes = 1 G;
5124 if (bytes < 128 K)
5125 bytes = 128 K;
5126 tm->maxCodeCacheBytes = bytes;
5129 void
5130 js_InitJIT(JSTraceMonitor *tm)
5132 if (!did_we_check_processor_features) {
5133 #if defined NANOJIT_IA32
5134 avmplus::AvmCore::config.use_cmov =
5135 avmplus::AvmCore::config.sse2 = js_CheckForSSE2();
5136 #endif
5137 #if defined NANOJIT_ARM
5138 bool arm_vfp = js_arm_check_vfp();
5139 bool arm_thumb = js_arm_check_thumb();
5140 bool arm_thumb2 = js_arm_check_thumb2();
5141 unsigned int arm_arch = js_arm_check_arch();
5143 avmplus::AvmCore::config.vfp = arm_vfp;
5144 avmplus::AvmCore::config.soft_float = !arm_vfp;
5145 avmplus::AvmCore::config.thumb = arm_thumb;
5146 avmplus::AvmCore::config.thumb2 = arm_thumb2;
5147 avmplus::AvmCore::config.arch = arm_arch;
5149 // Sanity-check the configuration detection.
5150 // * We don't understand architectures prior to ARMv4.
5151 JS_ASSERT(arm_arch >= 4);
5152 // * All architectures support Thumb with the possible exception of ARMv4.
5153 JS_ASSERT((arm_thumb) || (arm_arch == 4));
5154 // * Only ARMv6T2 and ARMv7(+) support Thumb2, but ARMv6 does not.
5155 JS_ASSERT((arm_thumb2) || (arm_arch <= 6));
5156 // * All architectures that support Thumb2 also support Thumb.
5157 JS_ASSERT((arm_thumb2 && arm_thumb) || (!arm_thumb2));
5158 #endif
5159 did_we_check_processor_features = true;
5163 * Set the default size for the code cache to 16MB.
5165 tm->maxCodeCacheBytes = 16 M;
5167 if (!tm->recordAttempts.ops) {
5168 JS_DHashTableInit(&tm->recordAttempts, JS_DHashGetStubOps(),
5169 NULL, sizeof(PCHashEntry),
5170 JS_DHASH_DEFAULT_CAPACITY(PC_HASH_COUNT));
5173 if (!tm->fragmento) {
5174 JS_ASSERT(!tm->reservedDoublePool);
5175 Fragmento* fragmento = new (&gc) Fragmento(core, 32);
5176 verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);)
5177 tm->fragmento = fragmento;
5178 tm->lirbuf = new (&gc) LirBuffer(fragmento, NULL);
5179 #ifdef DEBUG
5180 tm->lirbuf->names = new (&gc) LirNameMap(&gc, tm->fragmento->labels);
5181 #endif
5182 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
5183 tm->globalStates[i].globalShape = -1;
5184 JS_ASSERT(!tm->globalStates[i].globalSlots);
5185 tm->globalStates[i].globalSlots = new (&gc) SlotList();
5187 tm->reservedDoublePoolPtr = tm->reservedDoublePool = new jsval[MAX_NATIVE_STACK_SLOTS];
5188 memset(tm->vmfragments, 0, sizeof(tm->vmfragments));
5190 if (!tm->reFragmento) {
5191 Fragmento* fragmento = new (&gc) Fragmento(core, 32);
5192 verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);)
5193 tm->reFragmento = fragmento;
5194 tm->reLirBuf = new (&gc) LirBuffer(fragmento, NULL);
5196 #if !defined XP_WIN
5197 debug_only(memset(&jitstats, 0, sizeof(jitstats)));
5198 #endif
5201 void
5202 js_FinishJIT(JSTraceMonitor *tm)
5204 #ifdef JS_JIT_SPEW
5205 if (js_verboseStats && jitstats.recorderStarted) {
5206 printf("recorder: started(%llu), aborted(%llu), completed(%llu), different header(%llu), "
5207 "trees trashed(%llu), slot promoted(%llu), unstable loop variable(%llu), "
5208 "breaks(%llu), returns(%llu), unstableInnerCalls(%llu)\n",
5209 jitstats.recorderStarted, jitstats.recorderAborted, jitstats.traceCompleted,
5210 jitstats.returnToDifferentLoopHeader, jitstats.treesTrashed, jitstats.slotPromoted,
5211 jitstats.unstableLoopVariable, jitstats.breakLoopExits, jitstats.returnLoopExits,
5212 jitstats.noCompatInnerTrees);
5213 printf("monitor: triggered(%llu), exits(%llu), type mismatch(%llu), "
5214 "global mismatch(%llu)\n", jitstats.traceTriggered, jitstats.sideExitIntoInterpreter,
5215 jitstats.typeMapMismatchAtEntry, jitstats.globalShapeMismatchAtEntry);
5217 #endif
5218 if (tm->fragmento != NULL) {
5219 JS_ASSERT(tm->reservedDoublePool);
5220 verbose_only(delete tm->fragmento->labels;)
5221 #ifdef DEBUG
5222 delete tm->lirbuf->names;
5223 tm->lirbuf->names = NULL;
5224 #endif
5225 delete tm->lirbuf;
5226 tm->lirbuf = NULL;
5228 if (tm->recordAttempts.ops)
5229 JS_DHashTableFinish(&tm->recordAttempts);
5231 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
5232 VMFragment* f = tm->vmfragments[i];
5233 while(f) {
5234 VMFragment* next = f->next;
5235 tm->fragmento->clearFragment(f);
5236 f = next;
5238 tm->vmfragments[i] = NULL;
5240 delete tm->fragmento;
5241 tm->fragmento = NULL;
5242 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
5243 JS_ASSERT(tm->globalStates[i].globalSlots);
5244 delete tm->globalStates[i].globalSlots;
5246 delete[] tm->reservedDoublePool;
5247 tm->reservedDoublePool = tm->reservedDoublePoolPtr = NULL;
5249 if (tm->reFragmento != NULL) {
5250 delete tm->reLirBuf;
5251 verbose_only(delete tm->reFragmento->labels;)
5252 delete tm->reFragmento;
5256 void
5257 TraceRecorder::pushAbortStack()
5259 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5261 JS_ASSERT(tm->abortStack != this);
5263 nextRecorderToAbort = tm->abortStack;
5264 tm->abortStack = this;
5267 void
5268 TraceRecorder::popAbortStack()
5270 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5272 JS_ASSERT(tm->abortStack == this);
5274 tm->abortStack = nextRecorderToAbort;
5275 nextRecorderToAbort = NULL;
5278 void
5279 js_PurgeJITOracle()
5281 oracle.clear();
5284 static JSDHashOperator
5285 js_PurgeScriptRecordingAttempts(JSDHashTable *table,
5286 JSDHashEntryHdr *hdr,
5287 uint32 number, void *arg)
5289 PCHashEntry *e = (PCHashEntry *)hdr;
5290 JSScript *script = (JSScript *)arg;
5291 jsbytecode *pc = (jsbytecode *)e->key;
5293 if (JS_UPTRDIFF(pc, script->code) < script->length)
5294 return JS_DHASH_REMOVE;
5295 return JS_DHASH_NEXT;
5298 JS_REQUIRES_STACK void
5299 js_PurgeScriptFragments(JSContext* cx, JSScript* script)
5301 if (!TRACING_ENABLED(cx))
5302 return;
5303 debug_only_v(printf("Purging fragments for JSScript %p.\n", (void*)script);)
5304 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5305 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
5306 for (VMFragment **f = &(tm->vmfragments[i]); *f; ) {
5307 VMFragment* frag = *f;
5308 /* Disable future use of any script-associated VMFragment.*/
5309 if (JS_UPTRDIFF(frag->ip, script->code) < script->length) {
5310 JS_ASSERT(frag->root == frag);
5311 debug_only_v(printf("Disconnecting VMFragment %p "
5312 "with ip %p, in range [%p,%p).\n",
5313 (void*)frag, frag->ip, script->code,
5314 script->code + script->length));
5315 VMFragment* next = frag->next;
5316 js_TrashTree(cx, frag);
5317 *f = next;
5318 } else {
5319 f = &((*f)->next);
5323 JS_DHashTableEnumerate(&(tm->recordAttempts),
5324 js_PurgeScriptRecordingAttempts, script);
5328 bool
5329 js_OverfullFragmento(JSTraceMonitor* tm, Fragmento *fragmento)
5332 * You might imagine the outOMem flag on the lirbuf is sufficient
5333 * to model the notion of "running out of memory", but there are actually
5334 * two separate issues involved:
5336 * 1. The process truly running out of memory: malloc() or mmap()
5337 * failed.
5339 * 2. The limit we put on the "intended size" of the tracemonkey code
5340 * cache, in pages, has been exceeded.
5342 * Condition 1 doesn't happen very often, but we're obliged to try to
5343 * safely shut down and signal the rest of spidermonkey when it
5344 * does. Condition 2 happens quite regularly.
5346 * Presently, the code in this file doesn't check the outOMem condition
5347 * often enough, and frequently misuses the unchecked results of
5348 * lirbuffer insertions on the asssumption that it will notice the
5349 * outOMem flag "soon enough" when it returns to the monitorRecording
5350 * function. This turns out to be a false assumption if we use outOMem
5351 * to signal condition 2: we regularly provoke "passing our intended
5352 * size" and regularly fail to notice it in time to prevent writing
5353 * over the end of an artificially self-limited LIR buffer.
5355 * To mitigate, though not completely solve, this problem, we're
5356 * modeling the two forms of memory exhaustion *separately* for the
5357 * time being: condition 1 is handled by the outOMem flag inside
5358 * nanojit, and condition 2 is being handled independently *here*. So
5359 * we construct our fragmentos to use all available memory they like,
5360 * and only report outOMem to us when there is literally no OS memory
5361 * left. Merely purging our cache when we hit our highwater mark is
5362 * handled by the (few) callers of this function.
5365 jsuint maxsz = tm->maxCodeCacheBytes;
5366 if (fragmento == tm->fragmento) {
5367 if (tm->prohibitFlush)
5368 return false;
5369 } else {
5371 * At the time of making the code cache size configurable, we were using
5372 * 16 MB for the main code cache and 1 MB for the regular expression code
5373 * cache. We will stick to this 16:1 ratio here until we unify the two
5374 * code caches.
5376 maxsz /= 16;
5378 return (fragmento->cacheUsed() > maxsz);
5381 JS_FORCES_STACK JS_FRIEND_API(void)
5382 js_DeepBail(JSContext *cx)
5384 JS_ASSERT(JS_ON_TRACE(cx));
5387 * Exactly one context on the current thread is on trace. Find out which
5388 * one. (Most callers cannot guarantee that it's cx.)
5390 JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
5391 JSContext *tracecx = tm->tracecx;
5393 /* It's a bug if a non-FAIL_STATUS builtin gets here. */
5394 JS_ASSERT(tracecx->bailExit);
5396 tm->tracecx = NULL;
5397 tm->prohibitFlush++;
5398 debug_only_v(printf("Deep bail.\n");)
5399 LeaveTree(*tracecx->interpState, tracecx->bailExit);
5400 tracecx->bailExit = NULL;
5401 tracecx->interpState->builtinStatus |= JSBUILTIN_BAILED;
5404 JS_REQUIRES_STACK jsval&
5405 TraceRecorder::argval(unsigned n) const
5407 JS_ASSERT(n < cx->fp->fun->nargs);
5408 return cx->fp->argv[n];
5411 JS_REQUIRES_STACK jsval&
5412 TraceRecorder::varval(unsigned n) const
5414 JS_ASSERT(n < cx->fp->script->nslots);
5415 return cx->fp->slots[n];
5418 JS_REQUIRES_STACK jsval&
5419 TraceRecorder::stackval(int n) const
5421 jsval* sp = cx->fp->regs->sp;
5422 return sp[n];
5425 JS_REQUIRES_STACK LIns*
5426 TraceRecorder::scopeChain() const
5428 return lir->insLoad(LIR_ldp,
5429 lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp)),
5430 offsetof(JSStackFrame, scopeChain));
5433 static inline bool
5434 FrameInRange(JSStackFrame* fp, JSStackFrame *target, unsigned callDepth)
5436 while (fp != target) {
5437 if (callDepth-- == 0)
5438 return false;
5439 if (!(fp = fp->down))
5440 return false;
5442 return true;
5445 JS_REQUIRES_STACK JSRecordingStatus
5446 TraceRecorder::activeCallOrGlobalSlot(JSObject* obj, jsval*& vp)
5448 // Lookup a name in the scope chain, arriving at a property either in the
5449 // global object or some call object's fp->slots, and import that property
5450 // into the trace's native stack frame. This could theoretically do *lookup*
5451 // through the property cache, but there is little performance to be gained
5452 // by doing so since at trace-execution time the underlying object (call
5453 // object or global object) will not be consulted at all: the jsval*
5454 // returned from this function will map (in the tracker) to a LIns* directly
5455 // defining a slot in the trace's native stack.
5457 JS_ASSERT(obj != globalObj);
5459 JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
5460 JSObject* obj2;
5461 JSProperty* prop;
5462 if (!js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &obj2, &prop))
5463 ABORT_TRACE_ERROR("error in js_FindProperty");
5464 if (!prop)
5465 ABORT_TRACE("failed to find name in non-global scope chain");
5467 if (obj == globalObj) {
5468 JSScopeProperty* sprop = (JSScopeProperty*) prop;
5470 if (obj2 != obj) {
5471 OBJ_DROP_PROPERTY(cx, obj2, prop);
5472 ABORT_TRACE("prototype property");
5474 if (!isValidSlot(OBJ_SCOPE(obj), sprop)) {
5475 OBJ_DROP_PROPERTY(cx, obj2, prop);
5476 return JSRS_STOP;
5478 if (!lazilyImportGlobalSlot(sprop->slot)) {
5479 OBJ_DROP_PROPERTY(cx, obj2, prop);
5480 ABORT_TRACE("lazy import of global slot failed");
5482 vp = &STOBJ_GET_SLOT(obj, sprop->slot);
5483 OBJ_DROP_PROPERTY(cx, obj2, prop);
5484 return JSRS_CONTINUE;
5487 if (wasDeepAborted())
5488 ABORT_TRACE("deep abort from property lookup");
5490 if (obj == obj2 && OBJ_GET_CLASS(cx, obj) == &js_CallClass) {
5491 JSStackFrame* cfp = (JSStackFrame*) JS_GetPrivate(cx, obj);
5492 if (cfp && FrameInRange(cx->fp, cfp, callDepth)) {
5493 JSScopeProperty* sprop = (JSScopeProperty*) prop;
5494 uintN slot = sprop->shortid;
5496 vp = NULL;
5497 if (sprop->getter == js_GetCallArg) {
5498 JS_ASSERT(slot < cfp->fun->nargs);
5499 vp = &cfp->argv[slot];
5500 } else if (sprop->getter == js_GetCallVar) {
5501 JS_ASSERT(slot < cfp->script->nslots);
5502 vp = &cfp->slots[slot];
5504 OBJ_DROP_PROPERTY(cx, obj2, prop);
5505 if (!vp)
5506 ABORT_TRACE("dynamic property of Call object");
5507 return JSRS_CONTINUE;
5511 OBJ_DROP_PROPERTY(cx, obj2, prop);
5512 ABORT_TRACE("fp->scopeChain is not global or active call object");
5515 JS_REQUIRES_STACK LIns*
5516 TraceRecorder::arg(unsigned n)
5518 return get(&argval(n));
5521 JS_REQUIRES_STACK void
5522 TraceRecorder::arg(unsigned n, LIns* i)
5524 set(&argval(n), i);
5527 JS_REQUIRES_STACK LIns*
5528 TraceRecorder::var(unsigned n)
5530 return get(&varval(n));
5533 JS_REQUIRES_STACK void
5534 TraceRecorder::var(unsigned n, LIns* i)
5536 set(&varval(n), i);
5539 JS_REQUIRES_STACK LIns*
5540 TraceRecorder::stack(int n)
5542 return get(&stackval(n));
5545 JS_REQUIRES_STACK void
5546 TraceRecorder::stack(int n, LIns* i)
5548 set(&stackval(n), i, n >= 0);
5551 JS_REQUIRES_STACK LIns*
5552 TraceRecorder::alu(LOpcode v, jsdouble v0, jsdouble v1, LIns* s0, LIns* s1)
5554 if (v == LIR_fadd || v == LIR_fsub) {
5555 jsdouble r;
5556 if (v == LIR_fadd)
5557 r = v0 + v1;
5558 else
5559 r = v0 - v1;
5561 * Calculate the result of the addition for the current values. If the
5562 * value is not within the integer range, don't even try to demote
5563 * here.
5565 if (!JSDOUBLE_IS_NEGZERO(r) && (jsint(r) == r) && isPromoteInt(s0) && isPromoteInt(s1)) {
5566 LIns* d0 = ::demote(lir, s0);
5567 LIns* d1 = ::demote(lir, s1);
5569 * If the inputs are constant, generate an integer constant for
5570 * this operation.
5572 if (d0->isconst() && d1->isconst())
5573 return lir->ins1(LIR_i2f, lir->insImm(jsint(r)));
5575 * Speculatively generate code that will perform the addition over
5576 * the integer inputs as an integer addition/subtraction and exit
5577 * if that fails.
5579 v = (LOpcode)((int)v & ~LIR64);
5580 LIns* result = lir->ins2(v, d0, d1);
5581 if (!result->isconst() && (!overflowSafe(d0) || !overflowSafe(d1))) {
5582 VMSideExit* exit = snapshot(OVERFLOW_EXIT);
5583 lir->insGuard(LIR_xt, lir->ins1(LIR_ov, result), createGuardRecord(exit));
5585 return lir->ins1(LIR_i2f, result);
5588 * The result doesn't fit into the integer domain, so either generate
5589 * a floating point constant or a floating point operation.
5591 if (s0->isconst() && s1->isconst())
5592 return lir->insImmf(r);
5593 return lir->ins2(v, s0, s1);
5595 return lir->ins2(v, s0, s1);
5598 LIns*
5599 TraceRecorder::f2i(LIns* f)
5601 return lir->insCall(&js_DoubleToInt32_ci, &f);
5604 JS_REQUIRES_STACK LIns*
5605 TraceRecorder::makeNumberInt32(LIns* f)
5607 JS_ASSERT(f->isQuad());
5608 LIns* x;
5609 if (!isPromote(f)) {
5610 x = f2i(f);
5611 guard(true, lir->ins2(LIR_feq, f, lir->ins1(LIR_i2f, x)), MISMATCH_EXIT);
5612 } else {
5613 x = ::demote(lir, f);
5615 return x;
5618 JS_REQUIRES_STACK LIns*
5619 TraceRecorder::stringify(jsval& v)
5621 LIns* v_ins = get(&v);
5622 if (JSVAL_IS_STRING(v))
5623 return v_ins;
5625 LIns* args[] = { v_ins, cx_ins };
5626 const CallInfo* ci;
5627 if (JSVAL_IS_NUMBER(v)) {
5628 ci = &js_NumberToString_ci;
5629 } else if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
5630 ci = &js_BooleanOrUndefinedToString_ci;
5631 } else {
5633 * Callers must deal with non-primitive (non-null object) values by
5634 * calling an imacro. We don't try to guess about which imacro, with
5635 * what valueOf hint, here.
5637 JS_ASSERT(JSVAL_IS_NULL(v));
5638 return INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.nullAtom));
5641 v_ins = lir->insCall(ci, args);
5642 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
5643 return v_ins;
5646 JS_REQUIRES_STACK JSRecordingStatus
5647 TraceRecorder::call_imacro(jsbytecode* imacro)
5649 JSStackFrame* fp = cx->fp;
5650 JSFrameRegs* regs = fp->regs;
5652 // We can't nest imacros.
5653 if (fp->imacpc)
5654 return JSRS_STOP;
5656 fp->imacpc = regs->pc;
5657 regs->pc = imacro;
5658 atoms = COMMON_ATOMS_START(&cx->runtime->atomState);
5659 return JSRS_IMACRO;
5662 JS_REQUIRES_STACK JSRecordingStatus
5663 TraceRecorder::ifop()
5665 jsval& v = stackval(-1);
5666 LIns* v_ins = get(&v);
5667 bool cond;
5668 LIns* x;
5670 if (JSVAL_IS_NULL(v)) {
5671 cond = false;
5672 x = lir->insImm(0);
5673 } else if (!JSVAL_IS_PRIMITIVE(v)) {
5674 cond = true;
5675 x = lir->insImm(1);
5676 } else if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
5677 /* Test for boolean is true, negate later if we are testing for false. */
5678 cond = JSVAL_TO_PSEUDO_BOOLEAN(v) == JS_TRUE;
5679 x = lir->ins2i(LIR_eq, v_ins, 1);
5680 } else if (isNumber(v)) {
5681 jsdouble d = asNumber(v);
5682 cond = !JSDOUBLE_IS_NaN(d) && d;
5683 x = lir->ins2(LIR_and,
5684 lir->ins2(LIR_feq, v_ins, v_ins),
5685 lir->ins_eq0(lir->ins2(LIR_feq, v_ins, lir->insImmq(0))));
5686 } else if (JSVAL_IS_STRING(v)) {
5687 cond = JSSTRING_LENGTH(JSVAL_TO_STRING(v)) != 0;
5688 x = lir->ins2(LIR_piand,
5689 lir->insLoad(LIR_ldp,
5690 v_ins,
5691 (int)offsetof(JSString, length)),
5692 INS_CONSTPTR(reinterpret_cast<void *>(JSSTRING_LENGTH_MASK)));
5693 } else {
5694 JS_NOT_REACHED("ifop");
5695 return JSRS_STOP;
5698 jsbytecode* pc = cx->fp->regs->pc;
5699 emitIf(pc, cond, x);
5700 return checkTraceEnd(pc);
5703 #ifdef NANOJIT_IA32
5704 /* Record LIR for a tableswitch or tableswitchx op. We record LIR only the
5705 "first" time we hit the op. Later, when we start traces after exiting that
5706 trace, we just patch. */
5707 JS_REQUIRES_STACK LIns*
5708 TraceRecorder::tableswitch()
5710 jsval& v = stackval(-1);
5711 if (!isNumber(v))
5712 return NULL;
5714 /* no need to guard if condition is constant */
5715 LIns* v_ins = f2i(get(&v));
5716 if (v_ins->isconst() || v_ins->isconstq())
5717 return NULL;
5719 jsbytecode* pc = cx->fp->regs->pc;
5720 /* Starting a new trace after exiting a trace via switch. */
5721 if (anchor &&
5722 (anchor->exitType == CASE_EXIT || anchor->exitType == DEFAULT_EXIT) &&
5723 fragment->ip == pc) {
5724 return NULL;
5727 /* Decode jsop. */
5728 jsint low, high;
5729 if (*pc == JSOP_TABLESWITCH) {
5730 pc += JUMP_OFFSET_LEN;
5731 low = GET_JUMP_OFFSET(pc);
5732 pc += JUMP_OFFSET_LEN;
5733 high = GET_JUMP_OFFSET(pc);
5734 } else {
5735 pc += JUMPX_OFFSET_LEN;
5736 low = GET_JUMPX_OFFSET(pc);
5737 pc += JUMPX_OFFSET_LEN;
5738 high = GET_JUMPX_OFFSET(pc);
5741 /* Really large tables won't fit in a page. This is a conservative check.
5742 If it matters in practice we need to go off-page. */
5743 if ((high + 1 - low) * sizeof(intptr_t*) + 128 > (unsigned) LARGEST_UNDERRUN_PROT) {
5744 // This throws away the return value of switchop but it seems
5745 // ok because switchop always returns true.
5746 (void) switchop();
5747 return NULL;
5750 /* Generate switch LIR. */
5751 LIns* si_ins = lir_buf_writer->insSkip(sizeof(SwitchInfo));
5752 SwitchInfo* si = (SwitchInfo*) si_ins->payload();
5753 si->count = high + 1 - low;
5754 si->table = 0;
5755 si->index = (uint32) -1;
5756 LIns* diff = lir->ins2(LIR_sub, v_ins, lir->insImm(low));
5757 LIns* cmp = lir->ins2(LIR_ult, diff, lir->insImm(si->count));
5758 lir->insGuard(LIR_xf, cmp, createGuardRecord(snapshot(DEFAULT_EXIT)));
5759 lir->insStorei(diff, lir->insImmPtr(&si->index), 0);
5760 VMSideExit* exit = snapshot(CASE_EXIT);
5761 exit->switchInfo = si;
5762 return lir->insGuard(LIR_xtbl, diff, createGuardRecord(exit));
5764 #endif
5766 JS_REQUIRES_STACK JSRecordingStatus
5767 TraceRecorder::switchop()
5769 jsval& v = stackval(-1);
5770 LIns* v_ins = get(&v);
5771 /* no need to guard if condition is constant */
5772 if (v_ins->isconst() || v_ins->isconstq())
5773 return JSRS_CONTINUE;
5774 if (isNumber(v)) {
5775 jsdouble d = asNumber(v);
5776 guard(true,
5777 addName(lir->ins2(LIR_feq, v_ins, lir->insImmf(d)),
5778 "guard(switch on numeric)"),
5779 BRANCH_EXIT);
5780 } else if (JSVAL_IS_STRING(v)) {
5781 LIns* args[] = { v_ins, INS_CONSTPTR(JSVAL_TO_STRING(v)) };
5782 guard(true,
5783 addName(lir->ins_eq0(lir->ins_eq0(lir->insCall(&js_EqualStrings_ci, args))),
5784 "guard(switch on string)"),
5785 BRANCH_EXIT);
5786 } else if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
5787 guard(true,
5788 addName(lir->ins2(LIR_eq, v_ins, lir->insImm(JSVAL_TO_PUBLIC_PSEUDO_BOOLEAN(v))),
5789 "guard(switch on boolean)"),
5790 BRANCH_EXIT);
5791 } else {
5792 ABORT_TRACE("switch on object or null");
5794 return JSRS_CONTINUE;
5797 JS_REQUIRES_STACK JSRecordingStatus
5798 TraceRecorder::inc(jsval& v, jsint incr, bool pre)
5800 LIns* v_ins = get(&v);
5801 CHECK_STATUS(inc(v, v_ins, incr, pre));
5802 set(&v, v_ins);
5803 return JSRS_CONTINUE;
5807 * On exit, v_ins is the incremented unboxed value, and the appropriate
5808 * value (pre- or post-increment as described by pre) is stacked.
5810 JS_REQUIRES_STACK JSRecordingStatus
5811 TraceRecorder::inc(jsval& v, LIns*& v_ins, jsint incr, bool pre)
5813 if (!isNumber(v))
5814 ABORT_TRACE("can only inc numbers");
5816 LIns* v_after = alu(LIR_fadd, asNumber(v), incr, v_ins, lir->insImmf(incr));
5818 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
5819 JS_ASSERT(cs.ndefs == 1);
5820 stack(-cs.nuses, pre ? v_after : v_ins);
5821 v_ins = v_after;
5822 return JSRS_CONTINUE;
5825 JS_REQUIRES_STACK JSRecordingStatus
5826 TraceRecorder::incProp(jsint incr, bool pre)
5828 jsval& l = stackval(-1);
5829 if (JSVAL_IS_PRIMITIVE(l))
5830 ABORT_TRACE("incProp on primitive");
5832 JSObject* obj = JSVAL_TO_OBJECT(l);
5833 LIns* obj_ins = get(&l);
5835 uint32 slot;
5836 LIns* v_ins;
5837 CHECK_STATUS(prop(obj, obj_ins, slot, v_ins));
5839 if (slot == SPROP_INVALID_SLOT)
5840 ABORT_TRACE("incProp on invalid slot");
5842 jsval& v = STOBJ_GET_SLOT(obj, slot);
5843 CHECK_STATUS(inc(v, v_ins, incr, pre));
5845 box_jsval(v, v_ins);
5847 LIns* dslots_ins = NULL;
5848 stobj_set_slot(obj_ins, slot, dslots_ins, v_ins);
5849 return JSRS_CONTINUE;
5852 JS_REQUIRES_STACK JSRecordingStatus
5853 TraceRecorder::incElem(jsint incr, bool pre)
5855 jsval& r = stackval(-1);
5856 jsval& l = stackval(-2);
5857 jsval* vp;
5858 LIns* v_ins;
5859 LIns* addr_ins;
5861 if (!JSVAL_IS_OBJECT(l) || !JSVAL_IS_INT(r) ||
5862 !guardDenseArray(JSVAL_TO_OBJECT(l), get(&l))) {
5863 return JSRS_STOP;
5866 CHECK_STATUS(denseArrayElement(l, r, vp, v_ins, addr_ins));
5867 if (!addr_ins) // if we read a hole, abort
5868 return JSRS_STOP;
5869 CHECK_STATUS(inc(*vp, v_ins, incr, pre));
5870 box_jsval(*vp, v_ins);
5871 lir->insStorei(v_ins, addr_ins, 0);
5872 return JSRS_CONTINUE;
5875 static bool
5876 evalCmp(LOpcode op, double result)
5878 bool cond;
5879 switch (op) {
5880 case LIR_feq:
5881 cond = (result == 0);
5882 break;
5883 case LIR_flt:
5884 cond = result < 0;
5885 break;
5886 case LIR_fgt:
5887 cond = result > 0;
5888 break;
5889 case LIR_fle:
5890 cond = result <= 0;
5891 break;
5892 case LIR_fge:
5893 cond = result >= 0;
5894 break;
5895 default:
5896 JS_NOT_REACHED("unexpected comparison op");
5897 return false;
5899 return cond;
5902 static bool
5903 evalCmp(LOpcode op, double l, double r)
5905 return evalCmp(op, l - r);
5908 static bool
5909 evalCmp(LOpcode op, JSString* l, JSString* r)
5911 if (op == LIR_feq)
5912 return js_EqualStrings(l, r);
5913 return evalCmp(op, js_CompareStrings(l, r));
5916 JS_REQUIRES_STACK void
5917 TraceRecorder::strictEquality(bool equal, bool cmpCase)
5919 jsval& r = stackval(-1);
5920 jsval& l = stackval(-2);
5921 LIns* l_ins = get(&l);
5922 LIns* r_ins = get(&r);
5923 LIns* x;
5924 bool cond;
5926 uint8 ltag = getPromotedType(l);
5927 if (ltag != getPromotedType(r)) {
5928 cond = !equal;
5929 x = lir->insImm(cond);
5930 } else if (ltag == JSVAL_STRING) {
5931 LIns* args[] = { r_ins, l_ins };
5932 x = lir->ins2i(LIR_eq, lir->insCall(&js_EqualStrings_ci, args), equal);
5933 cond = js_EqualStrings(JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
5934 } else {
5935 LOpcode op = (ltag != JSVAL_DOUBLE) ? LIR_eq : LIR_feq;
5936 x = lir->ins2(op, l_ins, r_ins);
5937 if (!equal)
5938 x = lir->ins_eq0(x);
5939 cond = (ltag == JSVAL_DOUBLE)
5940 ? asNumber(l) == asNumber(r)
5941 : l == r;
5943 cond = (cond == equal);
5945 if (cmpCase) {
5946 /* Only guard if the same path may not always be taken. */
5947 if (!x->isconst())
5948 guard(cond, x, BRANCH_EXIT);
5949 return;
5952 set(&l, x);
5955 JS_REQUIRES_STACK JSRecordingStatus
5956 TraceRecorder::equality(bool negate, bool tryBranchAfterCond)
5958 jsval& rval = stackval(-1);
5959 jsval& lval = stackval(-2);
5960 LIns* l_ins = get(&lval);
5961 LIns* r_ins = get(&rval);
5963 return equalityHelper(lval, rval, l_ins, r_ins, negate, tryBranchAfterCond, lval);
5966 JS_REQUIRES_STACK JSRecordingStatus
5967 TraceRecorder::equalityHelper(jsval l, jsval r, LIns* l_ins, LIns* r_ins,
5968 bool negate, bool tryBranchAfterCond,
5969 jsval& rval)
5971 bool fp = false;
5972 bool cond;
5973 LIns* args[] = { NULL, NULL };
5976 * The if chain below closely mirrors that found in 11.9.3, in general
5977 * deviating from that ordering of ifs only to account for SpiderMonkey's
5978 * conflation of booleans and undefined and for the possibility of
5979 * confusing objects and null. Note carefully the spec-mandated recursion
5980 * in the final else clause, which terminates because Number == T recurs
5981 * only if T is Object, but that must recur again to convert Object to
5982 * primitive, and ToPrimitive throws if the object cannot be converted to
5983 * a primitive value (which would terminate recursion).
5986 if (getPromotedType(l) == getPromotedType(r)) {
5987 if (JSVAL_TAG(l) == JSVAL_OBJECT || JSVAL_TAG(l) == JSVAL_BOOLEAN) {
5988 cond = (l == r);
5989 } else if (JSVAL_IS_STRING(l)) {
5990 args[0] = r_ins, args[1] = l_ins;
5991 l_ins = lir->insCall(&js_EqualStrings_ci, args);
5992 r_ins = lir->insImm(1);
5993 cond = js_EqualStrings(JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
5994 } else {
5995 JS_ASSERT(isNumber(l) && isNumber(r));
5996 cond = (asNumber(l) == asNumber(r));
5997 fp = true;
5999 } else if (JSVAL_IS_NULL(l) && JSVAL_TAG(r) == JSVAL_BOOLEAN) {
6000 l_ins = lir->insImm(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
6001 cond = (r == JSVAL_VOID);
6002 } else if (JSVAL_TAG(l) == JSVAL_BOOLEAN && JSVAL_IS_NULL(r)) {
6003 r_ins = lir->insImm(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
6004 cond = (l == JSVAL_VOID);
6005 } else if (isNumber(l) && JSVAL_IS_STRING(r)) {
6006 args[0] = r_ins, args[1] = cx_ins;
6007 r_ins = lir->insCall(&js_StringToNumber_ci, args);
6008 cond = (asNumber(l) == js_StringToNumber(cx, JSVAL_TO_STRING(r)));
6009 fp = true;
6010 } else if (JSVAL_IS_STRING(l) && isNumber(r)) {
6011 args[0] = l_ins, args[1] = cx_ins;
6012 l_ins = lir->insCall(&js_StringToNumber_ci, args);
6013 cond = (js_StringToNumber(cx, JSVAL_TO_STRING(l)) == asNumber(r));
6014 fp = true;
6015 } else {
6016 if (JSVAL_TAG(l) == JSVAL_BOOLEAN) {
6017 bool isVoid = JSVAL_IS_VOID(l);
6018 guard(isVoid,
6019 lir->ins2(LIR_eq, l_ins, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID))),
6020 BRANCH_EXIT);
6021 if (!isVoid) {
6022 args[0] = l_ins, args[1] = cx_ins;
6023 l_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6024 l = (l == JSVAL_VOID)
6025 ? DOUBLE_TO_JSVAL(cx->runtime->jsNaN)
6026 : INT_TO_JSVAL(l == JSVAL_TRUE);
6027 return equalityHelper(l, r, l_ins, r_ins, negate,
6028 tryBranchAfterCond, rval);
6030 } else if (JSVAL_TAG(r) == JSVAL_BOOLEAN) {
6031 bool isVoid = JSVAL_IS_VOID(r);
6032 guard(isVoid,
6033 lir->ins2(LIR_eq, r_ins, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID))),
6034 BRANCH_EXIT);
6035 if (!isVoid) {
6036 args[0] = r_ins, args[1] = cx_ins;
6037 r_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6038 r = (r == JSVAL_VOID)
6039 ? DOUBLE_TO_JSVAL(cx->runtime->jsNaN)
6040 : INT_TO_JSVAL(r == JSVAL_TRUE);
6041 return equalityHelper(l, r, l_ins, r_ins, negate,
6042 tryBranchAfterCond, rval);
6044 } else {
6045 if ((JSVAL_IS_STRING(l) || isNumber(l)) && !JSVAL_IS_PRIMITIVE(r)) {
6046 ABORT_IF_XML(r);
6047 return call_imacro(equality_imacros.any_obj);
6049 if (!JSVAL_IS_PRIMITIVE(l) && (JSVAL_IS_STRING(r) || isNumber(r))) {
6050 ABORT_IF_XML(l);
6051 return call_imacro(equality_imacros.obj_any);
6055 l_ins = lir->insImm(0);
6056 r_ins = lir->insImm(1);
6057 cond = false;
6060 /* If the operands aren't numbers, compare them as integers. */
6061 LOpcode op = fp ? LIR_feq : LIR_eq;
6062 LIns* x = lir->ins2(op, l_ins, r_ins);
6063 if (negate) {
6064 x = lir->ins_eq0(x);
6065 cond = !cond;
6068 jsbytecode* pc = cx->fp->regs->pc;
6071 * Don't guard if the same path is always taken. If it isn't, we have to
6072 * fuse comparisons and the following branch, because the interpreter does
6073 * that.
6075 if (tryBranchAfterCond)
6076 fuseIf(pc + 1, cond, x);
6079 * There is no need to write out the result of this comparison if the trace
6080 * ends on this operation.
6082 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
6083 CHECK_STATUS(checkTraceEnd(pc + 1));
6086 * We update the stack after the guard. This is safe since the guard bails
6087 * out at the comparison and the interpreter will therefore re-execute the
6088 * comparison. This way the value of the condition doesn't have to be
6089 * calculated and saved on the stack in most cases.
6091 set(&rval, x);
6093 return JSRS_CONTINUE;
6096 JS_REQUIRES_STACK JSRecordingStatus
6097 TraceRecorder::relational(LOpcode op, bool tryBranchAfterCond)
6099 jsval& r = stackval(-1);
6100 jsval& l = stackval(-2);
6101 LIns* x = NULL;
6102 bool cond;
6103 LIns* l_ins = get(&l);
6104 LIns* r_ins = get(&r);
6105 bool fp = false;
6106 jsdouble lnum, rnum;
6109 * 11.8.5 if either argument is an object with a function-valued valueOf
6110 * property; if both arguments are objects with non-function-valued valueOf
6111 * properties, abort.
6113 if (!JSVAL_IS_PRIMITIVE(l)) {
6114 ABORT_IF_XML(l);
6115 if (!JSVAL_IS_PRIMITIVE(r)) {
6116 ABORT_IF_XML(r);
6117 return call_imacro(binary_imacros.obj_obj);
6119 return call_imacro(binary_imacros.obj_any);
6121 if (!JSVAL_IS_PRIMITIVE(r)) {
6122 ABORT_IF_XML(r);
6123 return call_imacro(binary_imacros.any_obj);
6126 /* 11.8.5 steps 3, 16-21. */
6127 if (JSVAL_IS_STRING(l) && JSVAL_IS_STRING(r)) {
6128 LIns* args[] = { r_ins, l_ins };
6129 l_ins = lir->insCall(&js_CompareStrings_ci, args);
6130 r_ins = lir->insImm(0);
6131 cond = evalCmp(op, JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
6132 goto do_comparison;
6135 /* 11.8.5 steps 4-5. */
6136 if (!JSVAL_IS_NUMBER(l)) {
6137 LIns* args[] = { l_ins, cx_ins };
6138 switch (JSVAL_TAG(l)) {
6139 case JSVAL_BOOLEAN:
6140 l_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6141 break;
6142 case JSVAL_STRING:
6143 l_ins = lir->insCall(&js_StringToNumber_ci, args);
6144 break;
6145 case JSVAL_OBJECT:
6146 if (JSVAL_IS_NULL(l)) {
6147 l_ins = lir->insImmq(0);
6148 break;
6150 // FALL THROUGH
6151 case JSVAL_INT:
6152 case JSVAL_DOUBLE:
6153 default:
6154 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
6155 "have been handled at start of method");
6156 ABORT_TRACE("safety belt");
6159 if (!JSVAL_IS_NUMBER(r)) {
6160 LIns* args[] = { r_ins, cx_ins };
6161 switch (JSVAL_TAG(r)) {
6162 case JSVAL_BOOLEAN:
6163 r_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6164 break;
6165 case JSVAL_STRING:
6166 r_ins = lir->insCall(&js_StringToNumber_ci, args);
6167 break;
6168 case JSVAL_OBJECT:
6169 if (JSVAL_IS_NULL(r)) {
6170 r_ins = lir->insImmq(0);
6171 break;
6173 // FALL THROUGH
6174 case JSVAL_INT:
6175 case JSVAL_DOUBLE:
6176 default:
6177 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
6178 "have been handled at start of method");
6179 ABORT_TRACE("safety belt");
6183 jsval tmp = JSVAL_NULL;
6184 JSAutoTempValueRooter tvr(cx, 1, &tmp);
6186 tmp = l;
6187 lnum = js_ValueToNumber(cx, &tmp);
6188 tmp = r;
6189 rnum = js_ValueToNumber(cx, &tmp);
6191 cond = evalCmp(op, lnum, rnum);
6192 fp = true;
6194 /* 11.8.5 steps 6-15. */
6195 do_comparison:
6196 /* If the result is not a number or it's not a quad, we must use an integer compare. */
6197 if (!fp) {
6198 JS_ASSERT(op >= LIR_feq && op <= LIR_fge);
6199 op = LOpcode(op + (LIR_eq - LIR_feq));
6201 x = lir->ins2(op, l_ins, r_ins);
6203 jsbytecode* pc = cx->fp->regs->pc;
6206 * Don't guard if the same path is always taken. If it isn't, we have to
6207 * fuse comparisons and the following branch, because the interpreter does
6208 * that.
6210 if (tryBranchAfterCond)
6211 fuseIf(pc + 1, cond, x);
6214 * There is no need to write out the result of this comparison if the trace
6215 * ends on this operation.
6217 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
6218 CHECK_STATUS(checkTraceEnd(pc + 1));
6221 * We update the stack after the guard. This is safe since the guard bails
6222 * out at the comparison and the interpreter will therefore re-execute the
6223 * comparison. This way the value of the condition doesn't have to be
6224 * calculated and saved on the stack in most cases.
6226 set(&l, x);
6228 return JSRS_CONTINUE;
6231 JS_REQUIRES_STACK JSRecordingStatus
6232 TraceRecorder::unary(LOpcode op)
6234 jsval& v = stackval(-1);
6235 bool intop = !(op & LIR64);
6236 if (isNumber(v)) {
6237 LIns* a = get(&v);
6238 if (intop)
6239 a = f2i(a);
6240 a = lir->ins1(op, a);
6241 if (intop)
6242 a = lir->ins1(LIR_i2f, a);
6243 set(&v, a);
6244 return JSRS_CONTINUE;
6246 return JSRS_STOP;
6249 JS_REQUIRES_STACK JSRecordingStatus
6250 TraceRecorder::binary(LOpcode op)
6252 jsval& r = stackval(-1);
6253 jsval& l = stackval(-2);
6255 if (!JSVAL_IS_PRIMITIVE(l)) {
6256 ABORT_IF_XML(l);
6257 if (!JSVAL_IS_PRIMITIVE(r)) {
6258 ABORT_IF_XML(r);
6259 return call_imacro(binary_imacros.obj_obj);
6261 return call_imacro(binary_imacros.obj_any);
6263 if (!JSVAL_IS_PRIMITIVE(r)) {
6264 ABORT_IF_XML(r);
6265 return call_imacro(binary_imacros.any_obj);
6268 bool intop = !(op & LIR64);
6269 LIns* a = get(&l);
6270 LIns* b = get(&r);
6272 bool leftIsNumber = isNumber(l);
6273 jsdouble lnum = leftIsNumber ? asNumber(l) : 0;
6275 bool rightIsNumber = isNumber(r);
6276 jsdouble rnum = rightIsNumber ? asNumber(r) : 0;
6278 if ((op >= LIR_sub && op <= LIR_ush) || // sub, mul, (callh), or, xor, (not,) lsh, rsh, ush
6279 (op >= LIR_fsub && op <= LIR_fdiv)) { // fsub, fmul, fdiv
6280 LIns* args[2];
6281 if (JSVAL_IS_STRING(l)) {
6282 args[0] = a;
6283 args[1] = cx_ins;
6284 a = lir->insCall(&js_StringToNumber_ci, args);
6285 lnum = js_StringToNumber(cx, JSVAL_TO_STRING(l));
6286 leftIsNumber = true;
6288 if (JSVAL_IS_STRING(r)) {
6289 args[0] = b;
6290 args[1] = cx_ins;
6291 b = lir->insCall(&js_StringToNumber_ci, args);
6292 rnum = js_StringToNumber(cx, JSVAL_TO_STRING(r));
6293 rightIsNumber = true;
6296 if (JSVAL_TAG(l) == JSVAL_BOOLEAN) {
6297 LIns* args[] = { a, cx_ins };
6298 a = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6299 lnum = js_BooleanOrUndefinedToNumber(cx, JSVAL_TO_PSEUDO_BOOLEAN(l));
6300 leftIsNumber = true;
6302 if (JSVAL_TAG(r) == JSVAL_BOOLEAN) {
6303 LIns* args[] = { b, cx_ins };
6304 b = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
6305 rnum = js_BooleanOrUndefinedToNumber(cx, JSVAL_TO_PSEUDO_BOOLEAN(r));
6306 rightIsNumber = true;
6308 if (leftIsNumber && rightIsNumber) {
6309 if (intop) {
6310 LIns *args[] = { a };
6311 a = lir->insCall(op == LIR_ush ? &js_DoubleToUint32_ci : &js_DoubleToInt32_ci, args);
6312 b = f2i(b);
6314 a = alu(op, lnum, rnum, a, b);
6315 if (intop)
6316 a = lir->ins1(op == LIR_ush ? LIR_u2f : LIR_i2f, a);
6317 set(&l, a);
6318 return JSRS_CONTINUE;
6320 return JSRS_STOP;
6323 JS_STATIC_ASSERT(offsetof(JSObjectOps, objectMap) == 0);
6325 bool
6326 TraceRecorder::map_is_native(JSObjectMap* map, LIns* map_ins, LIns*& ops_ins, size_t op_offset)
6328 JS_ASSERT(op_offset < sizeof(JSObjectOps));
6329 JS_ASSERT(op_offset % sizeof(void *) == 0);
6331 #define OP(ops) (*(void **) ((uint8 *) (ops) + op_offset))
6332 void* ptr = OP(map->ops);
6333 if (ptr != OP(&js_ObjectOps))
6334 return false;
6335 #undef OP
6337 ops_ins = addName(lir->insLoad(LIR_ldp, map_ins, int(offsetof(JSObjectMap, ops))), "ops");
6338 LIns* n = lir->insLoad(LIR_ldp, ops_ins, op_offset);
6339 guard(true,
6340 addName(lir->ins2(LIR_eq, n, INS_CONSTPTR(ptr)), "guard(native-map)"),
6341 BRANCH_EXIT);
6343 return true;
6346 JS_REQUIRES_STACK JSRecordingStatus
6347 TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, jsuword& pcval)
6349 jsbytecode* pc = cx->fp->regs->pc;
6350 JS_ASSERT(*pc != JSOP_INITPROP && *pc != JSOP_SETNAME && *pc != JSOP_SETPROP);
6352 // Mimic the interpreter's special case for dense arrays by skipping up one
6353 // hop along the proto chain when accessing a named (not indexed) property,
6354 // typically to find Array.prototype methods.
6355 JSObject* aobj = obj;
6356 if (OBJ_IS_DENSE_ARRAY(cx, obj)) {
6357 guardDenseArray(obj, obj_ins, BRANCH_EXIT);
6358 aobj = OBJ_GET_PROTO(cx, obj);
6359 obj_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
6362 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
6363 LIns* ops_ins;
6365 // Interpreter calls to PROPERTY_CACHE_TEST guard on native object ops
6366 // which is required to use native objects (those whose maps are scopes),
6367 // or even more narrow conditions required because the cache miss case
6368 // will call a particular object-op (js_GetProperty, js_SetProperty).
6370 // We parameterize using offsetof and guard on match against the hook at
6371 // the given offset in js_ObjectOps. TraceRecorder::record_JSOP_SETPROP
6372 // guards the js_SetProperty case.
6373 uint32 format = js_CodeSpec[*pc].format;
6374 uint32 mode = JOF_MODE(format);
6376 // No need to guard native-ness of global object.
6377 JS_ASSERT(OBJ_IS_NATIVE(globalObj));
6378 if (aobj != globalObj) {
6379 size_t op_offset = offsetof(JSObjectOps, objectMap);
6380 if (mode == JOF_PROP || mode == JOF_VARPROP) {
6381 JS_ASSERT(!(format & JOF_SET));
6382 op_offset = offsetof(JSObjectOps, getProperty);
6383 } else {
6384 JS_ASSERT(mode == JOF_NAME);
6387 if (!map_is_native(aobj->map, map_ins, ops_ins, op_offset))
6388 ABORT_TRACE("non-native map");
6391 JSAtom* atom;
6392 JSPropCacheEntry* entry;
6393 PROPERTY_CACHE_TEST(cx, pc, aobj, obj2, entry, atom);
6394 if (!atom) {
6395 // Null atom means that obj2 is locked and must now be unlocked.
6396 JS_UNLOCK_OBJ(cx, obj2);
6397 } else {
6398 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
6399 jsid id = ATOM_TO_JSID(atom);
6400 JSProperty* prop;
6401 if (JOF_OPMODE(*pc) == JOF_NAME) {
6402 JS_ASSERT(aobj == obj);
6403 entry = js_FindPropertyHelper(cx, id, true, &obj, &obj2, &prop);
6405 if (!entry)
6406 ABORT_TRACE_ERROR("error in js_FindPropertyHelper");
6407 if (entry == JS_NO_PROP_CACHE_FILL)
6408 ABORT_TRACE("cannot cache name");
6409 } else {
6410 int protoIndex = js_LookupPropertyWithFlags(cx, aobj, id,
6411 cx->resolveFlags,
6412 &obj2, &prop);
6414 if (protoIndex < 0)
6415 ABORT_TRACE_ERROR("error in js_LookupPropertyWithFlags");
6417 if (prop) {
6418 if (!OBJ_IS_NATIVE(obj2)) {
6419 OBJ_DROP_PROPERTY(cx, obj2, prop);
6420 ABORT_TRACE("property found on non-native object");
6422 entry = js_FillPropertyCache(cx, aobj, 0, protoIndex, obj2,
6423 (JSScopeProperty*) prop, false);
6424 JS_ASSERT(entry);
6425 if (entry == JS_NO_PROP_CACHE_FILL)
6426 entry = NULL;
6430 if (!prop) {
6431 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
6432 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
6433 // the global it's assigning does not yet exist, create it.
6434 obj2 = obj;
6436 // Use PCVAL_NULL to return "no such property" to our caller.
6437 pcval = PCVAL_NULL;
6438 return JSRS_CONTINUE;
6441 OBJ_DROP_PROPERTY(cx, obj2, prop);
6442 if (!entry)
6443 ABORT_TRACE("failed to fill property cache");
6446 if (wasDeepAborted())
6447 ABORT_TRACE("deep abort from property lookup");
6449 #ifdef JS_THREADSAFE
6450 // There's a potential race in any JS_THREADSAFE embedding that's nuts
6451 // enough to share mutable objects on the scope or proto chain, but we
6452 // don't care about such insane embeddings. Anyway, the (scope, proto)
6453 // entry->vcap coordinates must reach obj2 from aobj at this point.
6454 JS_ASSERT(cx->requestDepth);
6455 #endif
6457 // Emit guard(s), common code for both hit and miss cases.
6458 // Check for first-level cache hit and guard on kshape if possible.
6459 // Otherwise guard on key object exact match.
6460 if (PCVCAP_TAG(entry->vcap) <= 1) {
6461 if (aobj != globalObj) {
6462 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
6463 "shape");
6464 guard(true, addName(lir->ins2i(LIR_eq, shape_ins, entry->kshape), "guard(kshape)"),
6465 BRANCH_EXIT);
6467 } else {
6468 #ifdef DEBUG
6469 JSOp op = js_GetOpcode(cx, cx->fp->script, pc);
6470 JSAtom *pcatom;
6471 if (op == JSOP_LENGTH) {
6472 pcatom = cx->runtime->atomState.lengthAtom;
6473 } else {
6474 ptrdiff_t pcoff = (JOF_TYPE(js_CodeSpec[op].format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
6475 GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, pcatom);
6477 JS_ASSERT(entry->kpc == (jsbytecode *) pcatom);
6478 JS_ASSERT(entry->kshape == jsuword(aobj));
6479 #endif
6480 if (aobj != globalObj && !obj_ins->isconstp()) {
6481 guard(true, addName(lir->ins2i(LIR_eq, obj_ins, entry->kshape), "guard(kobj)"),
6482 BRANCH_EXIT);
6486 // For any hit that goes up the scope and/or proto chains, we will need to
6487 // guard on the shape of the object containing the property.
6488 if (PCVCAP_TAG(entry->vcap) >= 1) {
6489 jsuword vcap = entry->vcap;
6490 uint32 vshape = PCVCAP_SHAPE(vcap);
6491 JS_ASSERT(OBJ_SHAPE(obj2) == vshape);
6493 LIns* obj2_ins;
6494 if (PCVCAP_TAG(entry->vcap) == 1) {
6495 // Duplicate the special case in PROPERTY_CACHE_TEST.
6496 obj2_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
6497 guard(false, lir->ins_eq0(obj2_ins), BRANCH_EXIT);
6498 } else {
6499 obj2_ins = INS_CONSTPTR(obj2);
6501 map_ins = lir->insLoad(LIR_ldp, obj2_ins, (int)offsetof(JSObject, map));
6502 if (!map_is_native(obj2->map, map_ins, ops_ins))
6503 ABORT_TRACE("non-native map");
6505 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
6506 "shape");
6507 guard(true,
6508 addName(lir->ins2i(LIR_eq, shape_ins, vshape), "guard(vshape)"),
6509 BRANCH_EXIT);
6512 pcval = entry->vword;
6513 return JSRS_CONTINUE;
6516 void
6517 TraceRecorder::stobj_set_fslot(LIns *obj_ins, unsigned slot, LIns* v_ins, const char *name)
6519 addName(lir->insStorei(v_ins, obj_ins, offsetof(JSObject, fslots) + slot * sizeof(jsval)),
6520 name);
6523 void
6524 TraceRecorder::stobj_set_dslot(LIns *obj_ins, unsigned slot, LIns*& dslots_ins, LIns* v_ins,
6525 const char *name)
6527 if (!dslots_ins)
6528 dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
6529 addName(lir->insStorei(v_ins, dslots_ins, slot * sizeof(jsval)), name);
6532 void
6533 TraceRecorder::stobj_set_slot(LIns* obj_ins, unsigned slot, LIns*& dslots_ins, LIns* v_ins)
6535 if (slot < JS_INITIAL_NSLOTS) {
6536 stobj_set_fslot(obj_ins, slot, v_ins, "set_slot(fslots)");
6537 } else {
6538 stobj_set_dslot(obj_ins, slot - JS_INITIAL_NSLOTS, dslots_ins, v_ins,
6539 "set_slot(dslots)");
6543 LIns*
6544 TraceRecorder::stobj_get_fslot(LIns* obj_ins, unsigned slot)
6546 JS_ASSERT(slot < JS_INITIAL_NSLOTS);
6547 return lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, fslots) + slot * sizeof(jsval));
6550 LIns*
6551 TraceRecorder::stobj_get_dslot(LIns* obj_ins, unsigned index, LIns*& dslots_ins)
6553 if (!dslots_ins)
6554 dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
6555 return lir->insLoad(LIR_ldp, dslots_ins, index * sizeof(jsval));
6558 LIns*
6559 TraceRecorder::stobj_get_slot(LIns* obj_ins, unsigned slot, LIns*& dslots_ins)
6561 if (slot < JS_INITIAL_NSLOTS)
6562 return stobj_get_fslot(obj_ins, slot);
6563 return stobj_get_dslot(obj_ins, slot - JS_INITIAL_NSLOTS, dslots_ins);
6566 JSRecordingStatus
6567 TraceRecorder::native_set(LIns* obj_ins, JSScopeProperty* sprop, LIns*& dslots_ins, LIns* v_ins)
6569 if (SPROP_HAS_STUB_SETTER(sprop) && sprop->slot != SPROP_INVALID_SLOT) {
6570 stobj_set_slot(obj_ins, sprop->slot, dslots_ins, v_ins);
6571 return JSRS_CONTINUE;
6573 ABORT_TRACE("unallocated or non-stub sprop");
6576 JSRecordingStatus
6577 TraceRecorder::native_get(LIns* obj_ins, LIns* pobj_ins, JSScopeProperty* sprop,
6578 LIns*& dslots_ins, LIns*& v_ins)
6580 if (!SPROP_HAS_STUB_GETTER(sprop))
6581 return JSRS_STOP;
6583 if (sprop->slot != SPROP_INVALID_SLOT)
6584 v_ins = stobj_get_slot(pobj_ins, sprop->slot, dslots_ins);
6585 else
6586 v_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
6587 return JSRS_CONTINUE;
6590 JS_REQUIRES_STACK void
6591 TraceRecorder::box_jsval(jsval v, LIns*& v_ins)
6593 if (isNumber(v)) {
6594 LIns* args[] = { v_ins, cx_ins };
6595 v_ins = lir->insCall(&js_BoxDouble_ci, args);
6596 guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)),
6597 OOM_EXIT);
6598 return;
6600 switch (JSVAL_TAG(v)) {
6601 case JSVAL_BOOLEAN:
6602 v_ins = lir->ins2i(LIR_pior, lir->ins2i(LIR_pilsh, v_ins, JSVAL_TAGBITS), JSVAL_BOOLEAN);
6603 return;
6604 case JSVAL_OBJECT:
6605 return;
6606 default:
6607 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING);
6608 v_ins = lir->ins2(LIR_pior, v_ins, INS_CONST(JSVAL_STRING));
6609 return;
6613 JS_REQUIRES_STACK void
6614 TraceRecorder::unbox_jsval(jsval v, LIns*& v_ins, VMSideExit* exit)
6616 if (isNumber(v)) {
6617 // JSVAL_IS_NUMBER(v)
6618 guard(false,
6619 lir->ins_eq0(lir->ins2(LIR_pior,
6620 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_INT)),
6621 lir->ins2i(LIR_eq,
6622 lir->ins2(LIR_piand, v_ins,
6623 INS_CONST(JSVAL_TAGMASK)),
6624 JSVAL_DOUBLE))),
6625 exit);
6626 LIns* args[] = { v_ins };
6627 v_ins = lir->insCall(&js_UnboxDouble_ci, args);
6628 return;
6630 switch (JSVAL_TAG(v)) {
6631 case JSVAL_BOOLEAN:
6632 guard(true,
6633 lir->ins2i(LIR_eq,
6634 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
6635 JSVAL_BOOLEAN),
6636 exit);
6637 v_ins = lir->ins2i(LIR_ush, v_ins, JSVAL_TAGBITS);
6638 return;
6639 case JSVAL_OBJECT:
6640 if (JSVAL_IS_NULL(v)) {
6641 // JSVAL_NULL maps to type JSVAL_TNULL, so insist that v_ins == 0 here.
6642 guard(true, lir->ins_eq0(v_ins), exit);
6643 } else {
6644 guard(false, lir->ins_eq0(v_ins), exit);
6645 guard(true,
6646 lir->ins2i(LIR_eq,
6647 lir->ins2(LIR_piand, v_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
6648 JSVAL_OBJECT),
6649 exit);
6650 guard(HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)),
6651 lir->ins2(LIR_eq,
6652 lir->ins2(LIR_piand,
6653 lir->insLoad(LIR_ldp, v_ins, offsetof(JSObject, classword)),
6654 INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS)),
6655 INS_CONSTPTR(&js_FunctionClass)),
6656 exit);
6658 return;
6659 default:
6660 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING);
6661 guard(true,
6662 lir->ins2i(LIR_eq,
6663 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
6664 JSVAL_STRING),
6665 exit);
6666 v_ins = lir->ins2(LIR_piand, v_ins, INS_CONST(~JSVAL_TAGMASK));
6667 return;
6671 JS_REQUIRES_STACK JSRecordingStatus
6672 TraceRecorder::getThis(LIns*& this_ins)
6674 JSObject* thisObj = js_ComputeThisForFrame(cx, cx->fp);
6675 if (!thisObj)
6676 ABORT_TRACE_ERROR("js_ComputeThisForName failed");
6679 * In global code, bake in the global object as 'this' object.
6681 if (!cx->fp->callee) {
6682 JS_ASSERT(callDepth == 0);
6683 this_ins = INS_CONSTPTR(thisObj);
6686 * We don't have argv[-1] in global code, so we don't update the tracker here.
6688 return JSRS_CONTINUE;
6691 jsval& thisv = cx->fp->argv[-1];
6694 * Traces type-specialize between null and objects, so if we currently see a null
6695 * value in argv[-1], this trace will only match if we see null at runtime as well.
6696 * Bake in the global object as 'this' object, updating the tracker as well. We
6697 * can only detect this condition prior to calling js_ComputeThisForFrame, since it
6698 * updates the interpreter's copy of argv[-1].
6700 if (JSVAL_IS_NULL(thisv)) {
6701 JS_ASSERT(!JSVAL_IS_PRIMITIVE(thisv));
6702 if (thisObj != globalObj)
6703 ABORT_TRACE("global object was wrapped while recording");
6704 this_ins = INS_CONSTPTR(thisObj);
6705 set(&thisv, this_ins);
6706 return JSRS_CONTINUE;
6708 this_ins = get(&thisv);
6711 * mrbkap says its not necessary to ever call the thisObject hook if obj is not the global
6712 * object, because the only implicit way to obtain a reference to an object that must be
6713 * wrapped is via the global object. All other sources (API, explicit references) already
6714 * are wrapped as we obtain them through XPConnect. The only exception are With objects,
6715 * which have to call the getThis object hook. We don't trace those cases.
6718 if (guardClass(JSVAL_TO_OBJECT(thisv), this_ins, &js_WithClass, snapshot(MISMATCH_EXIT)))
6719 ABORT_TRACE("can't trace getThis on With object");
6721 return JSRS_CONTINUE;
6725 LIns*
6726 TraceRecorder::getStringLength(LIns* str_ins)
6728 LIns* len_ins = lir->insLoad(LIR_ldp, str_ins, (int)offsetof(JSString, length));
6730 LIns* masked_len_ins = lir->ins2(LIR_piand,
6731 len_ins,
6732 INS_CONSTWORD(JSSTRING_LENGTH_MASK));
6734 return
6735 lir->ins_choose(lir->ins_eq0(lir->ins2(LIR_piand,
6736 len_ins,
6737 INS_CONSTWORD(JSSTRFLAG_DEPENDENT))),
6738 masked_len_ins,
6739 lir->ins_choose(lir->ins_eq0(lir->ins2(LIR_piand,
6740 len_ins,
6741 INS_CONSTWORD(JSSTRFLAG_PREFIX))),
6742 lir->ins2(LIR_piand,
6743 len_ins,
6744 INS_CONSTWORD(JSSTRDEP_LENGTH_MASK)),
6745 masked_len_ins));
6748 JS_REQUIRES_STACK bool
6749 TraceRecorder::guardClass(JSObject* obj, LIns* obj_ins, JSClass* clasp, VMSideExit* exit)
6751 bool cond = STOBJ_GET_CLASS(obj) == clasp;
6753 LIns* class_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, classword));
6754 class_ins = lir->ins2(LIR_piand, class_ins, lir->insImm(~JSSLOT_CLASS_MASK_BITS));
6756 char namebuf[32];
6757 JS_snprintf(namebuf, sizeof namebuf, "guard(class is %s)", clasp->name);
6758 guard(cond, addName(lir->ins2(LIR_eq, class_ins, INS_CONSTPTR(clasp)), namebuf), exit);
6759 return cond;
6762 JS_REQUIRES_STACK bool
6763 TraceRecorder::guardDenseArray(JSObject* obj, LIns* obj_ins, ExitType exitType)
6765 return guardClass(obj, obj_ins, &js_ArrayClass, snapshot(exitType));
6768 JS_REQUIRES_STACK JSRecordingStatus
6769 TraceRecorder::guardPrototypeHasNoIndexedProperties(JSObject* obj, LIns* obj_ins, ExitType exitType)
6772 * Guard that no object along the prototype chain has any indexed properties which
6773 * might become visible through holes in the array.
6775 VMSideExit* exit = snapshot(exitType);
6777 if (js_PrototypeHasIndexedProperties(cx, obj))
6778 return JSRS_STOP;
6780 while ((obj = JSVAL_TO_OBJECT(obj->fslots[JSSLOT_PROTO])) != NULL) {
6781 obj_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
6782 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
6783 LIns* ops_ins;
6784 if (!map_is_native(obj->map, map_ins, ops_ins))
6785 ABORT_TRACE("non-native object involved along prototype chain");
6787 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
6788 "shape");
6789 guard(true,
6790 addName(lir->ins2i(LIR_eq, shape_ins, OBJ_SHAPE(obj)), "guard(shape)"),
6791 exit);
6793 return JSRS_CONTINUE;
6796 JSRecordingStatus
6797 TraceRecorder::guardNotGlobalObject(JSObject* obj, LIns* obj_ins)
6799 if (obj == globalObj)
6800 ABORT_TRACE("reference aliases global object");
6801 guard(false, lir->ins2(LIR_eq, obj_ins, INS_CONSTPTR(globalObj)), MISMATCH_EXIT);
6802 return JSRS_CONTINUE;
6805 JS_REQUIRES_STACK void
6806 TraceRecorder::clearFrameSlotsFromCache()
6808 /* Clear out all slots of this frame in the nativeFrameTracker. Different locations on the
6809 VM stack might map to different locations on the native stack depending on the
6810 number of arguments (i.e.) of the next call, so we have to make sure we map
6811 those in to the cache with the right offsets. */
6812 JSStackFrame* fp = cx->fp;
6813 jsval* vp;
6814 jsval* vpstop;
6815 if (fp->callee) {
6816 vp = &fp->argv[-2];
6817 vpstop = &fp->argv[argSlots(fp)];
6818 while (vp < vpstop)
6819 nativeFrameTracker.set(vp++, (LIns*)0);
6821 vp = &fp->slots[0];
6822 vpstop = &fp->slots[fp->script->nslots];
6823 while (vp < vpstop)
6824 nativeFrameTracker.set(vp++, (LIns*)0);
6827 JS_REQUIRES_STACK JSRecordingStatus
6828 TraceRecorder::record_EnterFrame()
6830 JSStackFrame* fp = cx->fp;
6832 if (++callDepth >= MAX_CALLDEPTH)
6833 ABORT_TRACE("exceeded maximum call depth");
6834 // FIXME: Allow and attempt to inline a single level of recursion until we compile
6835 // recursive calls as independent trees (459301).
6836 if (fp->script == fp->down->script && fp->down->down && fp->down->down->script == fp->script)
6837 ABORT_TRACE("recursive call");
6839 debug_only_v(printf("EnterFrame %s, callDepth=%d\n",
6840 js_AtomToPrintableString(cx, cx->fp->fun->atom),
6841 callDepth);)
6842 debug_only_v(
6843 js_Disassemble(cx, cx->fp->script, JS_TRUE, stdout);
6844 printf("----\n");)
6845 LIns* void_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
6847 jsval* vp = &fp->argv[fp->argc];
6848 jsval* vpstop = vp + ptrdiff_t(fp->fun->nargs) - ptrdiff_t(fp->argc);
6849 while (vp < vpstop) {
6850 if (vp >= fp->down->regs->sp)
6851 nativeFrameTracker.set(vp, (LIns*)0);
6852 set(vp++, void_ins, true);
6855 vp = &fp->slots[0];
6856 vpstop = vp + fp->script->nfixed;
6857 while (vp < vpstop)
6858 set(vp++, void_ins, true);
6859 return JSRS_CONTINUE;
6862 JS_REQUIRES_STACK JSRecordingStatus
6863 TraceRecorder::record_LeaveFrame()
6865 debug_only_v(
6866 if (cx->fp->fun)
6867 printf("LeaveFrame (back to %s), callDepth=%d\n",
6868 js_AtomToPrintableString(cx, cx->fp->fun->atom),
6869 callDepth);
6871 if (callDepth-- <= 0)
6872 ABORT_TRACE("returned out of a loop we started tracing");
6874 // LeaveFrame gets called after the interpreter popped the frame and
6875 // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
6876 atoms = FrameAtomBase(cx, cx->fp);
6877 set(&stackval(-1), rval_ins, true);
6878 return JSRS_CONTINUE;
6881 JS_REQUIRES_STACK JSRecordingStatus
6882 TraceRecorder::record_JSOP_PUSH()
6884 stack(0, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)));
6885 return JSRS_CONTINUE;
6888 JS_REQUIRES_STACK JSRecordingStatus
6889 TraceRecorder::record_JSOP_POPV()
6891 jsval& rval = stackval(-1);
6892 LIns *rval_ins = get(&rval);
6893 box_jsval(rval, rval_ins);
6895 // Store it in cx->fp->rval. NB: Tricky dependencies. cx->fp is the right
6896 // frame because POPV appears only in global and eval code and we don't
6897 // trace JSOP_EVAL or leaving the frame where tracing started.
6898 LIns *fp_ins = lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp));
6899 lir->insStorei(rval_ins, fp_ins, offsetof(JSStackFrame, rval));
6900 return JSRS_CONTINUE;
6903 JS_REQUIRES_STACK JSRecordingStatus
6904 TraceRecorder::record_JSOP_ENTERWITH()
6906 return JSRS_STOP;
6909 JS_REQUIRES_STACK JSRecordingStatus
6910 TraceRecorder::record_JSOP_LEAVEWITH()
6912 return JSRS_STOP;
6915 JS_REQUIRES_STACK JSRecordingStatus
6916 TraceRecorder::record_JSOP_RETURN()
6918 /* A return from callDepth 0 terminates the current loop. */
6919 if (callDepth == 0) {
6920 AUDIT(returnLoopExits);
6921 endLoop(traceMonitor);
6922 return JSRS_STOP;
6925 /* If we inlined this function call, make the return value available to the caller code. */
6926 jsval& rval = stackval(-1);
6927 JSStackFrame *fp = cx->fp;
6928 if ((cx->fp->flags & JSFRAME_CONSTRUCTING) && JSVAL_IS_PRIMITIVE(rval)) {
6929 JS_ASSERT(OBJECT_TO_JSVAL(fp->thisp) == fp->argv[-1]);
6930 rval_ins = get(&fp->argv[-1]);
6931 } else {
6932 rval_ins = get(&rval);
6934 debug_only_v(printf("returning from %s\n", js_AtomToPrintableString(cx, cx->fp->fun->atom));)
6935 clearFrameSlotsFromCache();
6937 return JSRS_CONTINUE;
6940 JS_REQUIRES_STACK JSRecordingStatus
6941 TraceRecorder::record_JSOP_GOTO()
6944 * If we hit a break, end the loop and generate an always taken loop exit guard.
6945 * For other downward gotos (like if/else) continue recording.
6947 jssrcnote* sn = js_GetSrcNote(cx->fp->script, cx->fp->regs->pc);
6949 if (sn && SN_TYPE(sn) == SRC_BREAK) {
6950 AUDIT(breakLoopExits);
6951 endLoop(traceMonitor);
6952 return JSRS_STOP;
6954 return JSRS_CONTINUE;
6957 JS_REQUIRES_STACK JSRecordingStatus
6958 TraceRecorder::record_JSOP_IFEQ()
6960 trackCfgMerges(cx->fp->regs->pc);
6961 return ifop();
6964 JS_REQUIRES_STACK JSRecordingStatus
6965 TraceRecorder::record_JSOP_IFNE()
6967 return ifop();
6970 JS_REQUIRES_STACK JSRecordingStatus
6971 TraceRecorder::record_JSOP_ARGUMENTS()
6973 #if 1
6974 ABORT_TRACE("can't trace arguments yet");
6975 #else
6976 LIns* args[] = { cx_ins };
6977 LIns* a_ins = lir->insCall(&js_Arguments_ci, args);
6978 guard(false, lir->ins_eq0(a_ins), OOM_EXIT);
6979 stack(0, a_ins);
6980 return JSRS_CONTINUE;
6981 #endif
6984 JS_REQUIRES_STACK JSRecordingStatus
6985 TraceRecorder::record_JSOP_DUP()
6987 stack(0, get(&stackval(-1)));
6988 return JSRS_CONTINUE;
6991 JS_REQUIRES_STACK JSRecordingStatus
6992 TraceRecorder::record_JSOP_DUP2()
6994 stack(0, get(&stackval(-2)));
6995 stack(1, get(&stackval(-1)));
6996 return JSRS_CONTINUE;
6999 JS_REQUIRES_STACK JSRecordingStatus
7000 TraceRecorder::record_JSOP_SWAP()
7002 jsval& l = stackval(-2);
7003 jsval& r = stackval(-1);
7004 LIns* l_ins = get(&l);
7005 LIns* r_ins = get(&r);
7006 set(&r, l_ins);
7007 set(&l, r_ins);
7008 return JSRS_CONTINUE;
7011 JS_REQUIRES_STACK JSRecordingStatus
7012 TraceRecorder::record_JSOP_PICK()
7014 jsval* sp = cx->fp->regs->sp;
7015 jsint n = cx->fp->regs->pc[1];
7016 JS_ASSERT(sp - (n+1) >= StackBase(cx->fp));
7017 LIns* top = get(sp - (n+1));
7018 for (jsint i = 0; i < n; ++i)
7019 set(sp - (n+1) + i, get(sp - n + i));
7020 set(&sp[-1], top);
7021 return JSRS_CONTINUE;
7024 JS_REQUIRES_STACK JSRecordingStatus
7025 TraceRecorder::record_JSOP_SETCONST()
7027 return JSRS_STOP;
7030 JS_REQUIRES_STACK JSRecordingStatus
7031 TraceRecorder::record_JSOP_BITOR()
7033 return binary(LIR_or);
7036 JS_REQUIRES_STACK JSRecordingStatus
7037 TraceRecorder::record_JSOP_BITXOR()
7039 return binary(LIR_xor);
7042 JS_REQUIRES_STACK JSRecordingStatus
7043 TraceRecorder::record_JSOP_BITAND()
7045 return binary(LIR_and);
7048 JS_REQUIRES_STACK JSRecordingStatus
7049 TraceRecorder::record_JSOP_EQ()
7051 return equality(false, true);
7054 JS_REQUIRES_STACK JSRecordingStatus
7055 TraceRecorder::record_JSOP_NE()
7057 return equality(true, true);
7060 JS_REQUIRES_STACK JSRecordingStatus
7061 TraceRecorder::record_JSOP_LT()
7063 return relational(LIR_flt, true);
7066 JS_REQUIRES_STACK JSRecordingStatus
7067 TraceRecorder::record_JSOP_LE()
7069 return relational(LIR_fle, true);
7072 JS_REQUIRES_STACK JSRecordingStatus
7073 TraceRecorder::record_JSOP_GT()
7075 return relational(LIR_fgt, true);
7078 JS_REQUIRES_STACK JSRecordingStatus
7079 TraceRecorder::record_JSOP_GE()
7081 return relational(LIR_fge, true);
7084 JS_REQUIRES_STACK JSRecordingStatus
7085 TraceRecorder::record_JSOP_LSH()
7087 return binary(LIR_lsh);
7090 JS_REQUIRES_STACK JSRecordingStatus
7091 TraceRecorder::record_JSOP_RSH()
7093 return binary(LIR_rsh);
7096 JS_REQUIRES_STACK JSRecordingStatus
7097 TraceRecorder::record_JSOP_URSH()
7099 return binary(LIR_ush);
7102 JS_REQUIRES_STACK JSRecordingStatus
7103 TraceRecorder::record_JSOP_ADD()
7105 jsval& r = stackval(-1);
7106 jsval& l = stackval(-2);
7108 if (!JSVAL_IS_PRIMITIVE(l)) {
7109 ABORT_IF_XML(l);
7110 if (!JSVAL_IS_PRIMITIVE(r)) {
7111 ABORT_IF_XML(r);
7112 return call_imacro(add_imacros.obj_obj);
7114 return call_imacro(add_imacros.obj_any);
7116 if (!JSVAL_IS_PRIMITIVE(r)) {
7117 ABORT_IF_XML(r);
7118 return call_imacro(add_imacros.any_obj);
7121 if (JSVAL_IS_STRING(l) || JSVAL_IS_STRING(r)) {
7122 LIns* args[] = { stringify(r), stringify(l), cx_ins };
7123 LIns* concat = lir->insCall(&js_ConcatStrings_ci, args);
7124 guard(false, lir->ins_eq0(concat), OOM_EXIT);
7125 set(&l, concat);
7126 return JSRS_CONTINUE;
7129 return binary(LIR_fadd);
7132 JS_REQUIRES_STACK JSRecordingStatus
7133 TraceRecorder::record_JSOP_SUB()
7135 return binary(LIR_fsub);
7138 JS_REQUIRES_STACK JSRecordingStatus
7139 TraceRecorder::record_JSOP_MUL()
7141 return binary(LIR_fmul);
7144 JS_REQUIRES_STACK JSRecordingStatus
7145 TraceRecorder::record_JSOP_DIV()
7147 return binary(LIR_fdiv);
7150 JS_REQUIRES_STACK JSRecordingStatus
7151 TraceRecorder::record_JSOP_MOD()
7153 jsval& r = stackval(-1);
7154 jsval& l = stackval(-2);
7156 if (!JSVAL_IS_PRIMITIVE(l)) {
7157 ABORT_IF_XML(l);
7158 if (!JSVAL_IS_PRIMITIVE(r)) {
7159 ABORT_IF_XML(r);
7160 return call_imacro(binary_imacros.obj_obj);
7162 return call_imacro(binary_imacros.obj_any);
7164 if (!JSVAL_IS_PRIMITIVE(r)) {
7165 ABORT_IF_XML(r);
7166 return call_imacro(binary_imacros.any_obj);
7169 if (isNumber(l) && isNumber(r)) {
7170 LIns* l_ins = get(&l);
7171 LIns* r_ins = get(&r);
7172 LIns* x;
7173 /* We can't demote this in a filter since we need the actual values of l and r. */
7174 if (isPromote(l_ins) && isPromote(r_ins) && asNumber(l) >= 0 && asNumber(r) > 0) {
7175 LIns* args[] = { ::demote(lir, r_ins), ::demote(lir, l_ins) };
7176 x = lir->insCall(&js_imod_ci, args);
7177 guard(false, lir->ins2(LIR_eq, x, lir->insImm(-1)), BRANCH_EXIT);
7178 x = lir->ins1(LIR_i2f, x);
7179 } else {
7180 LIns* args[] = { r_ins, l_ins };
7181 x = lir->insCall(&js_dmod_ci, args);
7183 set(&l, x);
7184 return JSRS_CONTINUE;
7186 return JSRS_STOP;
7189 JS_REQUIRES_STACK JSRecordingStatus
7190 TraceRecorder::record_JSOP_NOT()
7192 jsval& v = stackval(-1);
7193 if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
7194 set(&v, lir->ins_eq0(lir->ins2i(LIR_eq, get(&v), 1)));
7195 return JSRS_CONTINUE;
7197 if (isNumber(v)) {
7198 LIns* v_ins = get(&v);
7199 set(&v, lir->ins2(LIR_or, lir->ins2(LIR_feq, v_ins, lir->insImmq(0)),
7200 lir->ins_eq0(lir->ins2(LIR_feq, v_ins, v_ins))));
7201 return JSRS_CONTINUE;
7203 if (JSVAL_TAG(v) == JSVAL_OBJECT) {
7204 set(&v, lir->ins_eq0(get(&v)));
7205 return JSRS_CONTINUE;
7207 JS_ASSERT(JSVAL_IS_STRING(v));
7208 set(&v, lir->ins_eq0(lir->ins2(LIR_piand,
7209 lir->insLoad(LIR_ldp, get(&v), (int)offsetof(JSString, length)),
7210 INS_CONSTPTR(reinterpret_cast<void *>(JSSTRING_LENGTH_MASK)))));
7211 return JSRS_CONTINUE;
7214 JS_REQUIRES_STACK JSRecordingStatus
7215 TraceRecorder::record_JSOP_BITNOT()
7217 return unary(LIR_not);
7220 JS_REQUIRES_STACK JSRecordingStatus
7221 TraceRecorder::record_JSOP_NEG()
7223 jsval& v = stackval(-1);
7225 if (!JSVAL_IS_PRIMITIVE(v)) {
7226 ABORT_IF_XML(v);
7227 return call_imacro(unary_imacros.sign);
7230 if (isNumber(v)) {
7231 LIns* a = get(&v);
7233 /* If we're a promoted integer, we have to watch out for 0s since -0 is a double.
7234 Only follow this path if we're not an integer that's 0 and we're not a double
7235 that's zero.
7237 if (isPromoteInt(a) &&
7238 (!JSVAL_IS_INT(v) || JSVAL_TO_INT(v) != 0) &&
7239 (!JSVAL_IS_DOUBLE(v) || !JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v))) &&
7240 -asNumber(v) == (int)-asNumber(v)) {
7241 a = lir->ins1(LIR_neg, ::demote(lir, a));
7242 if (!a->isconst()) {
7243 VMSideExit* exit = snapshot(OVERFLOW_EXIT);
7244 lir->insGuard(LIR_xt, lir->ins1(LIR_ov, a),
7245 createGuardRecord(exit));
7246 lir->insGuard(LIR_xt, lir->ins2(LIR_eq, a, lir->insImm(0)),
7247 createGuardRecord(exit));
7249 a = lir->ins1(LIR_i2f, a);
7250 } else {
7251 a = lir->ins1(LIR_fneg, a);
7254 set(&v, a);
7255 return JSRS_CONTINUE;
7258 if (JSVAL_IS_NULL(v)) {
7259 set(&v, lir->insImmf(-0.0));
7260 return JSRS_CONTINUE;
7263 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING || JSVAL_TAG(v) == JSVAL_BOOLEAN);
7265 LIns* args[] = { get(&v), cx_ins };
7266 set(&v, lir->ins1(LIR_fneg,
7267 lir->insCall(JSVAL_IS_STRING(v)
7268 ? &js_StringToNumber_ci
7269 : &js_BooleanOrUndefinedToNumber_ci,
7270 args)));
7271 return JSRS_CONTINUE;
7274 JS_REQUIRES_STACK JSRecordingStatus
7275 TraceRecorder::record_JSOP_POS()
7277 jsval& v = stackval(-1);
7279 if (!JSVAL_IS_PRIMITIVE(v)) {
7280 ABORT_IF_XML(v);
7281 return call_imacro(unary_imacros.sign);
7284 if (isNumber(v))
7285 return JSRS_CONTINUE;
7287 if (JSVAL_IS_NULL(v)) {
7288 set(&v, lir->insImmq(0));
7289 return JSRS_CONTINUE;
7292 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING || JSVAL_TAG(v) == JSVAL_BOOLEAN);
7294 LIns* args[] = { get(&v), cx_ins };
7295 set(&v, lir->insCall(JSVAL_IS_STRING(v)
7296 ? &js_StringToNumber_ci
7297 : &js_BooleanOrUndefinedToNumber_ci,
7298 args));
7299 return JSRS_CONTINUE;
7302 JS_REQUIRES_STACK JSRecordingStatus
7303 TraceRecorder::record_JSOP_PRIMTOP()
7305 // Either this opcode does nothing or we couldn't have traced here, because
7306 // we'd have thrown an exception -- so do nothing if we actually hit this.
7307 return JSRS_CONTINUE;
7310 JS_REQUIRES_STACK JSRecordingStatus
7311 TraceRecorder::record_JSOP_OBJTOP()
7313 jsval& v = stackval(-1);
7314 ABORT_IF_XML(v);
7315 return JSRS_CONTINUE;
7318 JSBool
7319 js_Array(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval);
7321 JSBool
7322 js_Object(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval);
7324 JSBool
7325 js_Date(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval);
7327 JSRecordingStatus
7328 TraceRecorder::getClassPrototype(JSObject* ctor, LIns*& proto_ins)
7330 jsval pval;
7332 if (!OBJ_GET_PROPERTY(cx, ctor,
7333 ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom),
7334 &pval)) {
7335 ABORT_TRACE_ERROR("error getting prototype from constructor");
7337 if (JSVAL_TAG(pval) != JSVAL_OBJECT)
7338 ABORT_TRACE("got primitive prototype from constructor");
7339 #ifdef DEBUG
7340 JSBool ok, found;
7341 uintN attrs;
7342 ok = JS_GetPropertyAttributes(cx, ctor, js_class_prototype_str, &attrs, &found);
7343 JS_ASSERT(ok);
7344 JS_ASSERT(found);
7345 JS_ASSERT((~attrs & (JSPROP_READONLY | JSPROP_PERMANENT)) == 0);
7346 #endif
7347 proto_ins = INS_CONSTPTR(JSVAL_TO_OBJECT(pval));
7348 return JSRS_CONTINUE;
7351 JSRecordingStatus
7352 TraceRecorder::getClassPrototype(JSProtoKey key, LIns*& proto_ins)
7354 JSObject* proto;
7355 if (!js_GetClassPrototype(cx, globalObj, INT_TO_JSID(key), &proto))
7356 ABORT_TRACE_ERROR("error in js_GetClassPrototype");
7357 proto_ins = INS_CONSTPTR(proto);
7358 return JSRS_CONTINUE;
7361 #define IGNORE_NATIVE_CALL_COMPLETE_CALLBACK ((JSTraceableNative*)1)
7363 JSRecordingStatus
7364 TraceRecorder::newString(JSObject* ctor, uint32 argc, jsval* argv, jsval* rval)
7366 JS_ASSERT(argc == 1);
7368 if (!JSVAL_IS_PRIMITIVE(argv[0])) {
7369 ABORT_IF_XML(argv[0]);
7370 return call_imacro(new_imacros.String);
7373 LIns* proto_ins;
7374 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
7376 LIns* args[] = { stringify(argv[0]), proto_ins, cx_ins };
7377 LIns* obj_ins = lir->insCall(&js_String_tn_ci, args);
7378 guard(false, lir->ins_eq0(obj_ins), OOM_EXIT);
7380 set(rval, obj_ins);
7381 pendingTraceableNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
7382 return JSRS_CONTINUE;
7385 JSRecordingStatus
7386 TraceRecorder::newArray(JSObject* ctor, uint32 argc, jsval* argv, jsval* rval)
7388 LIns *proto_ins;
7389 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
7391 LIns *arr_ins;
7392 if (argc == 0 || (argc == 1 && JSVAL_IS_NUMBER(argv[0]))) {
7393 // arr_ins = js_NewEmptyArray(cx, Array.prototype)
7394 LIns *args[] = { proto_ins, cx_ins };
7395 arr_ins = lir->insCall(&js_NewEmptyArray_ci, args);
7396 guard(false, lir->ins_eq0(arr_ins), OOM_EXIT);
7397 if (argc == 1) {
7398 // array_ins.fslots[JSSLOT_ARRAY_LENGTH] = length
7399 lir->insStorei(f2i(get(argv)), // FIXME: is this 64-bit safe?
7400 arr_ins,
7401 offsetof(JSObject, fslots) + JSSLOT_ARRAY_LENGTH * sizeof(jsval));
7403 } else {
7404 // arr_ins = js_NewUninitializedArray(cx, Array.prototype, argc)
7405 LIns *args[] = { INS_CONST(argc), proto_ins, cx_ins };
7406 arr_ins = lir->insCall(&js_NewUninitializedArray_ci, args);
7407 guard(false, lir->ins_eq0(arr_ins), OOM_EXIT);
7409 // arr->dslots[i] = box_jsval(vp[i]); for i in 0..argc
7410 LIns *dslots_ins = NULL;
7411 for (uint32 i = 0; i < argc && !lirbuf->outOMem(); i++) {
7412 LIns *elt_ins = get(argv + i);
7413 box_jsval(argv[i], elt_ins);
7414 stobj_set_dslot(arr_ins, i, dslots_ins, elt_ins, "set_array_elt");
7417 if (argc > 0)
7418 stobj_set_fslot(arr_ins, JSSLOT_ARRAY_COUNT, INS_CONST(argc), "set_array_count");
7421 set(rval, arr_ins);
7422 pendingTraceableNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
7423 return JSRS_CONTINUE;
7426 JS_REQUIRES_STACK JSRecordingStatus
7427 TraceRecorder::emitNativeCall(JSTraceableNative* known, uintN argc, LIns* args[])
7429 bool constructing = known->flags & JSTN_CONSTRUCTOR;
7431 if (JSTN_ERRTYPE(known) == FAIL_STATUS) {
7432 // This needs to capture the pre-call state of the stack. So do not set
7433 // pendingTraceableNative before taking this snapshot.
7434 JS_ASSERT(!pendingTraceableNative);
7436 // Take snapshot for deep LeaveTree and store it in cx->bailExit.
7437 // If we are calling a slow native, add information to the side exit
7438 // for SynthesizeSlowNativeFrame.
7439 VMSideExit* exit = snapshot(DEEP_BAIL_EXIT);
7440 JSObject* funobj = JSVAL_TO_OBJECT(stackval(0 - (2 + argc)));
7441 if (FUN_SLOW_NATIVE(GET_FUNCTION_PRIVATE(cx, funobj)))
7442 exit->setNativeCallee(funobj, constructing);
7443 lir->insStorei(INS_CONSTPTR(exit), cx_ins, offsetof(JSContext, bailExit));
7445 // Tell nanojit not to discard or defer stack writes before this call.
7446 LIns* guardRec = createGuardRecord(exit);
7447 lir->insGuard(LIR_xbarrier, guardRec, guardRec);
7450 LIns* res_ins = lir->insCall(known->builtin, args);
7451 rval_ins = res_ins;
7452 switch (JSTN_ERRTYPE(known)) {
7453 case FAIL_NULL:
7454 guard(false, lir->ins_eq0(res_ins), OOM_EXIT);
7455 break;
7456 case FAIL_NEG:
7457 res_ins = lir->ins1(LIR_i2f, res_ins);
7458 guard(false, lir->ins2(LIR_flt, res_ins, lir->insImmq(0)), OOM_EXIT);
7459 break;
7460 case FAIL_VOID:
7461 guard(false, lir->ins2i(LIR_eq, res_ins, JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
7462 break;
7463 case FAIL_COOKIE:
7464 guard(false, lir->ins2(LIR_eq, res_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
7465 break;
7466 default:;
7469 set(&stackval(0 - (2 + argc)), res_ins);
7472 * The return value will be processed by NativeCallComplete since
7473 * we have to know the actual return value type for calls that return
7474 * jsval (like Array_p_pop).
7476 pendingTraceableNative = known;
7478 return JSRS_CONTINUE;
7482 * Check whether we have a specialized implementation for this native invocation.
7484 JS_REQUIRES_STACK JSRecordingStatus
7485 TraceRecorder::callTraceableNative(JSFunction* fun, uintN argc, bool constructing)
7487 JSTraceableNative* known = FUN_TRCINFO(fun);
7488 JS_ASSERT(known && (JSFastNative)fun->u.n.native == known->native);
7490 JSStackFrame* fp = cx->fp;
7491 jsbytecode *pc = fp->regs->pc;
7493 jsval& fval = stackval(0 - (2 + argc));
7494 jsval& tval = stackval(0 - (1 + argc));
7496 LIns* this_ins = get(&tval);
7498 LIns* args[nanojit::MAXARGS];
7499 do {
7500 if (((known->flags & JSTN_CONSTRUCTOR) != 0) != constructing)
7501 continue;
7503 uintN knownargc = strlen(known->argtypes);
7504 if (argc != knownargc)
7505 continue;
7507 intN prefixc = strlen(known->prefix);
7508 JS_ASSERT(prefixc <= 3);
7509 LIns** argp = &args[argc + prefixc - 1];
7510 char argtype;
7512 #if defined _DEBUG
7513 memset(args, 0xCD, sizeof(args));
7514 #endif
7516 uintN i;
7517 for (i = prefixc; i--; ) {
7518 argtype = known->prefix[i];
7519 if (argtype == 'C') {
7520 *argp = cx_ins;
7521 } else if (argtype == 'T') { /* this, as an object */
7522 if (JSVAL_IS_PRIMITIVE(tval))
7523 goto next_specialization;
7524 *argp = this_ins;
7525 } else if (argtype == 'S') { /* this, as a string */
7526 if (!JSVAL_IS_STRING(tval))
7527 goto next_specialization;
7528 *argp = this_ins;
7529 } else if (argtype == 'f') {
7530 *argp = INS_CONSTPTR(JSVAL_TO_OBJECT(fval));
7531 } else if (argtype == 'p') {
7532 CHECK_STATUS(getClassPrototype(JSVAL_TO_OBJECT(fval), *argp));
7533 } else if (argtype == 'R') {
7534 *argp = INS_CONSTPTR(cx->runtime);
7535 } else if (argtype == 'P') {
7536 // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
7537 // JSOP_GETELEM imacro (bug 476559).
7538 if (*pc == JSOP_CALL && fp->imacpc && *fp->imacpc == JSOP_GETELEM)
7539 *argp = INS_CONSTPTR(fp->imacpc);
7540 else
7541 *argp = INS_CONSTPTR(pc);
7542 } else if (argtype == 'D') { /* this, as a number */
7543 if (!isNumber(tval))
7544 goto next_specialization;
7545 *argp = this_ins;
7546 } else {
7547 JS_NOT_REACHED("unknown prefix arg type");
7549 argp--;
7552 for (i = knownargc; i--; ) {
7553 jsval& arg = stackval(0 - (i + 1));
7554 *argp = get(&arg);
7556 argtype = known->argtypes[i];
7557 if (argtype == 'd' || argtype == 'i') {
7558 if (!isNumber(arg))
7559 goto next_specialization;
7560 if (argtype == 'i')
7561 *argp = f2i(*argp);
7562 } else if (argtype == 'o') {
7563 if (JSVAL_IS_PRIMITIVE(arg))
7564 goto next_specialization;
7565 } else if (argtype == 's') {
7566 if (!JSVAL_IS_STRING(arg))
7567 goto next_specialization;
7568 } else if (argtype == 'r') {
7569 if (!VALUE_IS_REGEXP(cx, arg))
7570 goto next_specialization;
7571 } else if (argtype == 'f') {
7572 if (!VALUE_IS_FUNCTION(cx, arg))
7573 goto next_specialization;
7574 } else if (argtype == 'v') {
7575 box_jsval(arg, *argp);
7576 } else {
7577 goto next_specialization;
7579 argp--;
7581 #if defined _DEBUG
7582 JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
7583 #endif
7584 return emitNativeCall(known, argc, args);
7586 next_specialization:;
7587 } while ((known++)->flags & JSTN_MORE);
7589 return JSRS_STOP;
7592 JS_REQUIRES_STACK JSRecordingStatus
7593 TraceRecorder::callNative(uintN argc, JSOp mode)
7595 LIns* args[5];
7597 JS_ASSERT(mode == JSOP_CALL || mode == JSOP_NEW || mode == JSOP_APPLY);
7599 jsval* vp = &stackval(0 - (2 + argc));
7600 JSObject* funobj = JSVAL_TO_OBJECT(vp[0]);
7601 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, funobj);
7603 if (fun->flags & JSFUN_TRACEABLE) {
7604 JSRecordingStatus status;
7605 if ((status = callTraceableNative(fun, argc, mode == JSOP_NEW)) != JSRS_STOP)
7606 return status;
7609 JSFastNative native = (JSFastNative)fun->u.n.native;
7610 if (native == js_fun_apply || native == js_fun_call)
7611 ABORT_TRACE("trying to call native apply or call");
7613 // Allocate the vp vector and emit code to root it.
7614 uintN vplen = 2 + JS_MAX(argc, FUN_MINARGS(fun)) + fun->u.n.extra;
7615 if (!(fun->flags & JSFUN_FAST_NATIVE))
7616 vplen++; // slow native return value slot
7617 lir->insStorei(INS_CONST(vplen), cx_ins, offsetof(JSContext, nativeVpLen));
7618 LIns* invokevp_ins = lir->insAlloc(vplen * sizeof(jsval));
7619 lir->insStorei(invokevp_ins, cx_ins, offsetof(JSContext, nativeVp));
7621 // vp[0] is the callee.
7622 lir->insStorei(INS_CONSTWORD(OBJECT_TO_JSVAL(funobj)), invokevp_ins, 0);
7624 // Calculate |this|.
7625 LIns* this_ins;
7626 if (mode == JSOP_NEW) {
7627 JSClass* clasp = fun->u.n.clasp;
7628 JS_ASSERT(clasp != &js_SlowArrayClass);
7629 if (!clasp)
7630 clasp = &js_ObjectClass;
7631 JS_ASSERT(((jsuword) clasp & 3) == 0);
7633 // Abort on |new Function|. js_NewInstance would allocate a regular-
7634 // sized JSObject, not a Function-sized one. (The Function ctor would
7635 // deep-bail anyway but let's not go there.)
7636 if (clasp == &js_FunctionClass)
7637 ABORT_TRACE("new Function");
7639 if (clasp->getObjectOps)
7640 ABORT_TRACE("new with non-native ops");
7642 args[0] = INS_CONSTPTR(funobj);
7643 args[1] = INS_CONSTPTR(clasp);
7644 args[2] = cx_ins;
7645 newobj_ins = lir->insCall(&js_NewInstance_ci, args);
7646 guard(false, lir->ins_eq0(newobj_ins), OOM_EXIT);
7647 this_ins = newobj_ins; // boxing an object is a no-op
7648 } else if (JSFUN_BOUND_METHOD_TEST(fun->flags)) {
7649 this_ins = INS_CONSTWORD(OBJECT_TO_JSVAL(OBJ_GET_PARENT(cx, funobj)));
7650 } else {
7651 this_ins = get(&vp[1]);
7653 * For fast natives, 'null' or primitives are fine as as 'this' value.
7654 * For slow natives we have to ensure the object is substituted for the
7655 * appropriate global object or boxed object value. JSOP_NEW allocates its
7656 * own object so its guaranteed to have a valid 'this' value.
7658 if (!(fun->flags & JSFUN_FAST_NATIVE)) {
7659 if (JSVAL_IS_NULL(vp[1])) {
7660 JSObject* thisObj = js_ComputeThis(cx, JS_FALSE, vp + 2);
7661 if (!thisObj)
7662 ABORT_TRACE_ERROR("error in js_ComputeGlobalThis");
7663 this_ins = INS_CONSTPTR(thisObj);
7664 } else if (!JSVAL_IS_OBJECT(vp[1])) {
7665 ABORT_TRACE("slow native(primitive, args)");
7666 } else {
7667 if (guardClass(JSVAL_TO_OBJECT(vp[1]), this_ins, &js_WithClass, snapshot(MISMATCH_EXIT)))
7668 ABORT_TRACE("can't trace slow native invocation on With object");
7670 this_ins = lir->ins_choose(lir->ins_eq0(stobj_get_fslot(this_ins, JSSLOT_PARENT)),
7671 INS_CONSTPTR(globalObj),
7672 this_ins);
7675 box_jsval(vp[1], this_ins);
7677 lir->insStorei(this_ins, invokevp_ins, 1 * sizeof(jsval));
7679 // Populate argv.
7680 for (uintN n = 2; n < 2 + argc; n++) {
7681 LIns* i = get(&vp[n]);
7682 box_jsval(vp[n], i);
7683 lir->insStorei(i, invokevp_ins, n * sizeof(jsval));
7685 // For a very long argument list we might run out of LIR space, so
7686 // check inside the loop.
7687 if (lirbuf->outOMem())
7688 ABORT_TRACE("out of memory in argument list");
7691 // Populate extra slots, including the return value slot for a slow native.
7692 if (2 + argc < vplen) {
7693 LIns* undef_ins = INS_CONSTWORD(JSVAL_VOID);
7694 for (uintN n = 2 + argc; n < vplen; n++) {
7695 lir->insStorei(undef_ins, invokevp_ins, n * sizeof(jsval));
7697 if (lirbuf->outOMem())
7698 ABORT_TRACE("out of memory in extra slots");
7702 // Set up arguments for the JSNative or JSFastNative.
7703 uint32 types;
7704 if (fun->flags & JSFUN_FAST_NATIVE) {
7705 if (mode == JSOP_NEW)
7706 ABORT_TRACE("untraceable fast native constructor");
7707 native_rval_ins = invokevp_ins;
7708 args[0] = invokevp_ins;
7709 args[1] = lir->insImm(argc);
7710 args[2] = cx_ins;
7711 types = ARGSIZE_LO | ARGSIZE_LO << 2 | ARGSIZE_LO << 4 | ARGSIZE_LO << 6;
7712 } else {
7713 native_rval_ins = lir->ins2i(LIR_piadd, invokevp_ins, int32_t((vplen - 1) * sizeof(jsval)));
7714 args[0] = native_rval_ins;
7715 args[1] = lir->ins2i(LIR_piadd, invokevp_ins, int32_t(2 * sizeof(jsval)));
7716 args[2] = lir->insImm(argc);
7717 args[3] = this_ins;
7718 args[4] = cx_ins;
7719 types = ARGSIZE_LO | ARGSIZE_LO << 2 | ARGSIZE_LO << 4 | ARGSIZE_LO << 6 |
7720 ARGSIZE_LO << 8 | ARGSIZE_LO << 10;
7723 // Generate CallInfo and a JSTraceableNative structure on the fly. Do not
7724 // use JSTN_UNBOX_AFTER for mode JSOP_NEW because record_NativeCallComplete
7725 // unboxes the result specially.
7727 CallInfo* ci = (CallInfo*) lir->insSkip(sizeof(struct CallInfo))->payload();
7728 ci->_address = uintptr_t(fun->u.n.native);
7729 ci->_cse = ci->_fold = 0;
7730 ci->_abi = ABI_CDECL;
7731 ci->_argtypes = types;
7732 #ifdef DEBUG
7733 ci->_name = JS_GetFunctionName(fun);
7734 #endif
7736 // Generate a JSTraceableNative structure on the fly.
7737 generatedTraceableNative->builtin = ci;
7738 generatedTraceableNative->native = (JSFastNative)fun->u.n.native;
7739 generatedTraceableNative->flags = FAIL_STATUS | ((mode == JSOP_NEW)
7740 ? JSTN_CONSTRUCTOR
7741 : JSTN_UNBOX_AFTER);
7743 generatedTraceableNative->prefix = generatedTraceableNative->argtypes = NULL;
7745 // argc is the original argc here. It is used to calculate where to place
7746 // the return value.
7747 JSRecordingStatus status;
7748 if ((status = emitNativeCall(generatedTraceableNative, argc, args)) != JSRS_CONTINUE)
7749 return status;
7751 // Unroot the vp.
7752 lir->insStorei(INS_CONSTPTR(NULL), cx_ins, offsetof(JSContext, nativeVp));
7753 return JSRS_CONTINUE;
7756 JS_REQUIRES_STACK JSRecordingStatus
7757 TraceRecorder::functionCall(uintN argc, JSOp mode)
7759 jsval& fval = stackval(0 - (2 + argc));
7760 JS_ASSERT(&fval >= StackBase(cx->fp));
7762 if (!VALUE_IS_FUNCTION(cx, fval))
7763 ABORT_TRACE("callee is not a function");
7765 jsval& tval = stackval(0 - (1 + argc));
7768 * If callee is not constant, it's a shapeless call and we have to guard
7769 * explicitly that we will get this callee again at runtime.
7771 if (!get(&fval)->isconst())
7772 CHECK_STATUS(guardCallee(fval));
7775 * Require that the callee be a function object, to avoid guarding on its
7776 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
7777 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
7778 * the property cache and guard on the object (this) in which the callee
7779 * was found. So it's sufficient to test here that the particular function
7780 * is interpreted, not guard on that condition.
7782 * Bytecode sequences that push shapeless callees must guard on the callee
7783 * class being Function and the function being interpreted.
7785 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fval));
7787 if (FUN_INTERPRETED(fun)) {
7788 if (mode == JSOP_NEW) {
7789 LIns* args[] = { get(&fval), INS_CONSTPTR(&js_ObjectClass), cx_ins };
7790 LIns* tv_ins = lir->insCall(&js_NewInstance_ci, args);
7791 guard(false, lir->ins_eq0(tv_ins), OOM_EXIT);
7792 set(&tval, tv_ins);
7794 return interpretedFunctionCall(fval, fun, argc, mode == JSOP_NEW);
7797 if (FUN_SLOW_NATIVE(fun)) {
7798 JSNative native = fun->u.n.native;
7799 jsval* argv = &tval + 1;
7800 if (native == js_Array)
7801 return newArray(JSVAL_TO_OBJECT(fval), argc, argv, &fval);
7802 if (native == js_String && argc == 1) {
7803 if (mode == JSOP_NEW)
7804 return newString(JSVAL_TO_OBJECT(fval), 1, argv, &fval);
7805 if (!JSVAL_IS_PRIMITIVE(argv[0])) {
7806 ABORT_IF_XML(argv[0]);
7807 return call_imacro(call_imacros.String);
7809 set(&fval, stringify(argv[0]));
7810 pendingTraceableNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
7811 return JSRS_CONTINUE;
7815 return callNative(argc, mode);
7818 JS_REQUIRES_STACK JSRecordingStatus
7819 TraceRecorder::record_JSOP_NEW()
7821 uintN argc = GET_ARGC(cx->fp->regs->pc);
7822 cx->fp->assertValidStackDepth(argc + 2);
7823 return functionCall(argc, JSOP_NEW);
7826 JS_REQUIRES_STACK JSRecordingStatus
7827 TraceRecorder::record_JSOP_DELNAME()
7829 return JSRS_STOP;
7832 JS_REQUIRES_STACK JSRecordingStatus
7833 TraceRecorder::record_JSOP_DELPROP()
7835 return JSRS_STOP;
7838 JS_REQUIRES_STACK JSRecordingStatus
7839 TraceRecorder::record_JSOP_DELELEM()
7841 return JSRS_STOP;
7844 JS_REQUIRES_STACK JSRecordingStatus
7845 TraceRecorder::record_JSOP_TYPEOF()
7847 jsval& r = stackval(-1);
7848 LIns* type;
7849 if (JSVAL_IS_STRING(r)) {
7850 type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_STRING]));
7851 } else if (isNumber(r)) {
7852 type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_NUMBER]));
7853 } else if (VALUE_IS_FUNCTION(cx, r)) {
7854 type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_FUNCTION]));
7855 } else {
7856 LIns* args[] = { get(&r), cx_ins };
7857 if (JSVAL_TAG(r) == JSVAL_BOOLEAN) {
7858 // We specialize identically for boolean and undefined. We must not have a hole here.
7859 // Pass the unboxed type here, since TypeOfBoolean knows how to handle it.
7860 JS_ASSERT(r == JSVAL_TRUE || r == JSVAL_FALSE || r == JSVAL_VOID);
7861 type = lir->insCall(&js_TypeOfBoolean_ci, args);
7862 } else {
7863 JS_ASSERT(JSVAL_TAG(r) == JSVAL_OBJECT);
7864 type = lir->insCall(&js_TypeOfObject_ci, args);
7867 set(&r, type);
7868 return JSRS_CONTINUE;
7871 JS_REQUIRES_STACK JSRecordingStatus
7872 TraceRecorder::record_JSOP_VOID()
7874 stack(-1, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)));
7875 return JSRS_CONTINUE;
7878 JS_REQUIRES_STACK JSRecordingStatus
7879 TraceRecorder::record_JSOP_INCNAME()
7881 return incName(1);
7884 JS_REQUIRES_STACK JSRecordingStatus
7885 TraceRecorder::record_JSOP_INCPROP()
7887 return incProp(1);
7890 JS_REQUIRES_STACK JSRecordingStatus
7891 TraceRecorder::record_JSOP_INCELEM()
7893 return incElem(1);
7896 JS_REQUIRES_STACK JSRecordingStatus
7897 TraceRecorder::record_JSOP_DECNAME()
7899 return incName(-1);
7902 JS_REQUIRES_STACK JSRecordingStatus
7903 TraceRecorder::record_JSOP_DECPROP()
7905 return incProp(-1);
7908 JS_REQUIRES_STACK JSRecordingStatus
7909 TraceRecorder::record_JSOP_DECELEM()
7911 return incElem(-1);
7914 JS_REQUIRES_STACK JSRecordingStatus
7915 TraceRecorder::incName(jsint incr, bool pre)
7917 jsval* vp;
7918 CHECK_STATUS(name(vp));
7919 LIns* v_ins = get(vp);
7920 CHECK_STATUS(inc(*vp, v_ins, incr, pre));
7921 set(vp, v_ins);
7922 return JSRS_CONTINUE;
7925 JS_REQUIRES_STACK JSRecordingStatus
7926 TraceRecorder::record_JSOP_NAMEINC()
7928 return incName(1, false);
7931 JS_REQUIRES_STACK JSRecordingStatus
7932 TraceRecorder::record_JSOP_PROPINC()
7934 return incProp(1, false);
7937 // XXX consolidate with record_JSOP_GETELEM code...
7938 JS_REQUIRES_STACK JSRecordingStatus
7939 TraceRecorder::record_JSOP_ELEMINC()
7941 return incElem(1, false);
7944 JS_REQUIRES_STACK JSRecordingStatus
7945 TraceRecorder::record_JSOP_NAMEDEC()
7947 return incName(-1, false);
7950 JS_REQUIRES_STACK JSRecordingStatus
7951 TraceRecorder::record_JSOP_PROPDEC()
7953 return incProp(-1, false);
7956 JS_REQUIRES_STACK JSRecordingStatus
7957 TraceRecorder::record_JSOP_ELEMDEC()
7959 return incElem(-1, false);
7962 JS_REQUIRES_STACK JSRecordingStatus
7963 TraceRecorder::record_JSOP_GETPROP()
7965 return getProp(stackval(-1));
7968 JS_REQUIRES_STACK JSRecordingStatus
7969 TraceRecorder::record_JSOP_SETPROP()
7971 jsval& l = stackval(-2);
7972 if (JSVAL_IS_PRIMITIVE(l))
7973 ABORT_TRACE("primitive this for SETPROP");
7975 JSObject* obj = JSVAL_TO_OBJECT(l);
7976 if (obj->map->ops->setProperty != js_SetProperty)
7977 ABORT_TRACE("non-native JSObjectOps::setProperty");
7978 return JSRS_CONTINUE;
7981 JS_REQUIRES_STACK JSRecordingStatus
7982 TraceRecorder::record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop)
7984 if (entry == JS_NO_PROP_CACHE_FILL)
7985 ABORT_TRACE("can't trace uncacheable property set");
7986 if (PCVCAP_TAG(entry->vcap) >= 1)
7987 ABORT_TRACE("can't trace inherited property set");
7989 jsbytecode* pc = cx->fp->regs->pc;
7990 JS_ASSERT(entry->kpc == pc);
7992 jsval& r = stackval(-1);
7993 jsval& l = stackval(-2);
7995 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l));
7996 JSObject* obj = JSVAL_TO_OBJECT(l);
7997 LIns* obj_ins = get(&l);
7998 JSScope* scope = OBJ_SCOPE(obj);
8000 JS_ASSERT(scope->object == obj);
8001 JS_ASSERT(SCOPE_HAS_PROPERTY(scope, sprop));
8003 if (!isValidSlot(scope, sprop))
8004 return JSRS_STOP;
8006 if (obj == globalObj) {
8007 JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, scope));
8008 uint32 slot = sprop->slot;
8009 if (!lazilyImportGlobalSlot(slot))
8010 ABORT_TRACE("lazy import of global slot failed");
8012 LIns* r_ins = get(&r);
8015 * Writing a function into the global object might rebrand it; we don't
8016 * trace that case. There's no need to guard on that, though, because
8017 * separating functions into the trace-time type JSVAL_TFUN will save
8018 * the day!
8020 if (VALUE_IS_FUNCTION(cx, r))
8021 ABORT_TRACE("potential rebranding of the global object");
8022 set(&STOBJ_GET_SLOT(obj, slot), r_ins);
8024 JS_ASSERT(*pc != JSOP_INITPROP);
8025 if (pc[JSOP_SETPROP_LENGTH] != JSOP_POP)
8026 set(&l, r_ins);
8027 return JSRS_CONTINUE;
8030 // The global object's shape is guarded at trace entry, all others need a guard here.
8031 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
8032 LIns* ops_ins;
8033 if (!map_is_native(obj->map, map_ins, ops_ins, offsetof(JSObjectOps, setProperty)))
8034 ABORT_TRACE("non-native map");
8036 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)), "shape");
8037 guard(true, addName(lir->ins2i(LIR_eq, shape_ins, entry->kshape), "guard(kshape)"),
8038 BRANCH_EXIT);
8040 uint32 vshape = PCVCAP_SHAPE(entry->vcap);
8041 if (entry->kshape != vshape) {
8042 LIns *vshape_ins = lir->insLoad(LIR_ld,
8043 lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, runtime)),
8044 offsetof(JSRuntime, protoHazardShape));
8045 guard(true, addName(lir->ins2i(LIR_eq, vshape_ins, vshape), "guard(vshape)"),
8046 MISMATCH_EXIT);
8048 LIns* args[] = { INS_CONSTPTR(sprop), obj_ins, cx_ins };
8049 LIns* ok_ins = lir->insCall(&js_AddProperty_ci, args);
8050 guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);
8053 LIns* dslots_ins = NULL;
8054 LIns* v_ins = get(&r);
8055 LIns* boxed_ins = v_ins;
8056 box_jsval(r, boxed_ins);
8057 CHECK_STATUS(native_set(obj_ins, sprop, dslots_ins, boxed_ins));
8059 if (*pc != JSOP_INITPROP && pc[JSOP_SETPROP_LENGTH] != JSOP_POP)
8060 set(&l, v_ins);
8061 return JSRS_CONTINUE;
8064 /* Functions used by JSOP_GETELEM. */
8066 static JSBool
8067 GetProperty(JSContext *cx, uintN argc, jsval *vp)
8069 jsval *argv;
8070 jsid id;
8072 JS_ASSERT_NOT_ON_TRACE(cx);
8073 JS_ASSERT(cx->fp->imacpc && argc == 1);
8074 argv = JS_ARGV(cx, vp);
8075 JS_ASSERT(JSVAL_IS_STRING(argv[0]));
8076 if (!js_ValueToStringId(cx, argv[0], &id))
8077 return JS_FALSE;
8078 argv[0] = ID_TO_VALUE(id);
8079 return OBJ_GET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &JS_RVAL(cx, vp));
8082 static jsval FASTCALL
8083 GetProperty_tn(JSContext *cx, jsbytecode *pc, JSObject *obj, JSString *name)
8085 JSAutoTempIdRooter idr(cx);
8086 JSAutoTempValueRooter tvr(cx);
8088 if (!js_ValueToStringId(cx, STRING_TO_JSVAL(name), idr.addr()) ||
8089 !OBJ_GET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
8090 js_SetBuiltinError(cx);
8091 *tvr.addr() = JSVAL_ERROR_COOKIE;
8093 return tvr.value();
8096 static JSBool
8097 GetElement(JSContext *cx, uintN argc, jsval *vp)
8099 jsval *argv;
8100 jsid id;
8102 JS_ASSERT_NOT_ON_TRACE(cx);
8103 JS_ASSERT(cx->fp->imacpc && argc == 1);
8104 argv = JS_ARGV(cx, vp);
8105 JS_ASSERT(JSVAL_IS_NUMBER(argv[0]));
8106 if (!JS_ValueToId(cx, argv[0], &id))
8107 return JS_FALSE;
8108 argv[0] = ID_TO_VALUE(id);
8109 return OBJ_GET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &JS_RVAL(cx, vp));
8112 static jsval FASTCALL
8113 GetElement_tn(JSContext* cx, jsbytecode *pc, JSObject* obj, int32 index)
8115 JSAutoTempValueRooter tvr(cx);
8116 JSAutoTempIdRooter idr(cx);
8118 if (!js_Int32ToId(cx, index, idr.addr())) {
8119 js_SetBuiltinError(cx);
8120 return JSVAL_ERROR_COOKIE;
8122 if (!OBJ_GET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
8123 js_SetBuiltinError(cx);
8124 *tvr.addr() = JSVAL_ERROR_COOKIE;
8126 return tvr.value();
8129 JS_DEFINE_TRCINFO_1(GetProperty,
8130 (4, (static, JSVAL_FAIL, GetProperty_tn, CONTEXT, PC, THIS, STRING, 0, 0)))
8131 JS_DEFINE_TRCINFO_1(GetElement,
8132 (4, (extern, JSVAL_FAIL, GetElement_tn, CONTEXT, PC, THIS, INT32, 0, 0)))
8134 JS_REQUIRES_STACK JSRecordingStatus
8135 TraceRecorder::record_JSOP_GETELEM()
8137 bool call = *cx->fp->regs->pc == JSOP_CALLELEM;
8139 jsval& idx = stackval(-1);
8140 jsval& lval = stackval(-2);
8142 LIns* obj_ins = get(&lval);
8143 LIns* idx_ins = get(&idx);
8145 // Special case for array-like access of strings.
8146 if (JSVAL_IS_STRING(lval) && isInt32(idx)) {
8147 if (call)
8148 ABORT_TRACE("JSOP_CALLELEM on a string");
8149 int i = asInt32(idx);
8150 if (size_t(i) >= JSSTRING_LENGTH(JSVAL_TO_STRING(lval)))
8151 ABORT_TRACE("Invalid string index in JSOP_GETELEM");
8152 idx_ins = makeNumberInt32(idx_ins);
8153 LIns* args[] = { idx_ins, obj_ins, cx_ins };
8154 LIns* unitstr_ins = lir->insCall(&js_String_getelem_ci, args);
8155 guard(false, lir->ins_eq0(unitstr_ins), MISMATCH_EXIT);
8156 set(&lval, unitstr_ins);
8157 return JSRS_CONTINUE;
8160 if (JSVAL_IS_PRIMITIVE(lval))
8161 ABORT_TRACE("JSOP_GETLEM on a primitive");
8162 ABORT_IF_XML(lval);
8164 JSObject* obj = JSVAL_TO_OBJECT(lval);
8165 jsval id;
8166 LIns* v_ins;
8168 /* Property access using a string name or something we have to stringify. */
8169 if (!JSVAL_IS_INT(idx)) {
8170 if (!JSVAL_IS_PRIMITIVE(idx))
8171 ABORT_TRACE("non-primitive index");
8172 // If index is not a string, turn it into a string.
8173 if (!js_InternNonIntElementId(cx, obj, idx, &id))
8174 ABORT_TRACE_ERROR("failed to intern non-int element id");
8175 set(&idx, stringify(idx));
8177 // Store the interned string to the stack to save the interpreter from redoing this work.
8178 idx = ID_TO_VALUE(id);
8180 // The object is not guaranteed to be a dense array at this point, so it might be the
8181 // global object, which we have to guard against.
8182 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8184 return call_imacro(call ? callelem_imacros.callprop : getelem_imacros.getprop);
8187 if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT)) {
8188 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8190 return call_imacro(call ? callelem_imacros.callelem : getelem_imacros.getelem);
8193 // Fast path for dense arrays accessed with a integer index.
8194 jsval* vp;
8195 LIns* addr_ins;
8196 CHECK_STATUS(denseArrayElement(lval, idx, vp, v_ins, addr_ins));
8197 set(&lval, v_ins);
8198 if (call)
8199 set(&idx, obj_ins);
8200 return JSRS_CONTINUE;
8203 /* Functions used by JSOP_SETELEM */
8205 static JSBool
8206 SetProperty(JSContext *cx, uintN argc, jsval *vp)
8208 jsval *argv;
8209 jsid id;
8211 JS_ASSERT(argc == 2);
8212 argv = JS_ARGV(cx, vp);
8213 JS_ASSERT(JSVAL_IS_STRING(argv[0]));
8214 if (!js_ValueToStringId(cx, argv[0], &id))
8215 return JS_FALSE;
8216 argv[0] = ID_TO_VALUE(id);
8217 if (!OBJ_SET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &argv[1]))
8218 return JS_FALSE;
8219 JS_SET_RVAL(cx, vp, JSVAL_VOID);
8220 return JS_TRUE;
8223 static JSBool FASTCALL
8224 SetProperty_tn(JSContext* cx, JSObject* obj, JSString* idstr, jsval v)
8226 JSAutoTempValueRooter tvr(cx, v);
8227 JSAutoTempIdRooter idr(cx);
8229 if (!js_ValueToStringId(cx, STRING_TO_JSVAL(idstr), idr.addr()) ||
8230 !OBJ_SET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
8231 js_SetBuiltinError(cx);
8233 return JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID);
8236 static JSBool
8237 SetElement(JSContext *cx, uintN argc, jsval *vp)
8239 jsval *argv;
8240 jsid id;
8242 JS_ASSERT(argc == 2);
8243 argv = JS_ARGV(cx, vp);
8244 JS_ASSERT(JSVAL_IS_NUMBER(argv[0]));
8245 if (!JS_ValueToId(cx, argv[0], &id))
8246 return JS_FALSE;
8247 argv[0] = ID_TO_VALUE(id);
8248 if (!OBJ_SET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &argv[1]))
8249 return JS_FALSE;
8250 JS_SET_RVAL(cx, vp, JSVAL_VOID);
8251 return JS_TRUE;
8254 static JSBool FASTCALL
8255 SetElement_tn(JSContext* cx, JSObject* obj, int32 index, jsval v)
8257 JSAutoTempIdRooter idr(cx);
8258 JSAutoTempValueRooter tvr(cx, v);
8260 if (!js_Int32ToId(cx, index, idr.addr()) ||
8261 !OBJ_SET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
8262 js_SetBuiltinError(cx);
8264 return JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID);
8267 JS_DEFINE_TRCINFO_1(SetProperty,
8268 (4, (extern, BOOL_FAIL, SetProperty_tn, CONTEXT, THIS, STRING, JSVAL, 0, 0)))
8269 JS_DEFINE_TRCINFO_1(SetElement,
8270 (4, (extern, BOOL_FAIL, SetElement_tn, CONTEXT, THIS, INT32, JSVAL, 0, 0)))
8272 JS_REQUIRES_STACK JSRecordingStatus
8273 TraceRecorder::record_JSOP_SETELEM()
8275 jsval& v = stackval(-1);
8276 jsval& idx = stackval(-2);
8277 jsval& lval = stackval(-3);
8279 /* no guards for type checks, trace specialized this already */
8280 if (JSVAL_IS_PRIMITIVE(lval))
8281 ABORT_TRACE("left JSOP_SETELEM operand is not an object");
8282 ABORT_IF_XML(lval);
8284 JSObject* obj = JSVAL_TO_OBJECT(lval);
8285 LIns* obj_ins = get(&lval);
8286 LIns* idx_ins = get(&idx);
8287 LIns* v_ins = get(&v);
8288 jsid id;
8290 if (!JSVAL_IS_INT(idx)) {
8291 if (!JSVAL_IS_PRIMITIVE(idx))
8292 ABORT_TRACE("non-primitive index");
8293 // If index is not a string, turn it into a string.
8294 if (!js_InternNonIntElementId(cx, obj, idx, &id))
8295 ABORT_TRACE_ERROR("failed to intern non-int element id");
8296 set(&idx, stringify(idx));
8298 // Store the interned string to the stack to save the interpreter from redoing this work.
8299 idx = ID_TO_VALUE(id);
8301 // The object is not guaranteed to be a dense array at this point, so it might be the
8302 // global object, which we have to guard against.
8303 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8305 return call_imacro((*cx->fp->regs->pc == JSOP_INITELEM)
8306 ? initelem_imacros.initprop
8307 : setelem_imacros.setprop);
8310 if (JSVAL_TO_INT(idx) < 0 || !OBJ_IS_DENSE_ARRAY(cx, obj)) {
8311 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8313 return call_imacro((*cx->fp->regs->pc == JSOP_INITELEM)
8314 ? initelem_imacros.initelem
8315 : setelem_imacros.setelem);
8318 // Make sure the array is actually dense.
8319 if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT))
8320 return JSRS_STOP;
8322 // Fast path for dense arrays accessed with a non-negative integer index. In case the trace
8323 // calculated the index using the FPU, force it to be an integer.
8324 idx_ins = makeNumberInt32(idx_ins);
8326 // Box the value so we can use one builtin instead of having to add one builtin for every
8327 // storage type.
8328 LIns* boxed_v_ins = v_ins;
8329 box_jsval(v, boxed_v_ins);
8331 LIns* args[] = { boxed_v_ins, idx_ins, obj_ins, cx_ins };
8332 LIns* res_ins = lir->insCall(&js_Array_dense_setelem_ci, args);
8333 guard(false, lir->ins_eq0(res_ins), MISMATCH_EXIT);
8335 jsbytecode* pc = cx->fp->regs->pc;
8336 if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
8337 set(&lval, v_ins);
8339 return JSRS_CONTINUE;
8342 JS_REQUIRES_STACK JSRecordingStatus
8343 TraceRecorder::record_JSOP_CALLNAME()
8345 JSObject* obj = cx->fp->scopeChain;
8346 if (obj != globalObj) {
8347 jsval* vp;
8348 CHECK_STATUS(activeCallOrGlobalSlot(obj, vp));
8349 stack(0, get(vp));
8350 stack(1, INS_CONSTPTR(globalObj));
8351 return JSRS_CONTINUE;
8354 LIns* obj_ins = scopeChain();
8355 JSObject* obj2;
8356 jsuword pcval;
8358 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
8360 if (PCVAL_IS_NULL(pcval) || !PCVAL_IS_OBJECT(pcval))
8361 ABORT_TRACE("callee is not an object");
8363 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval)));
8365 stack(0, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval)));
8366 stack(1, obj_ins);
8367 return JSRS_CONTINUE;
8370 JS_DEFINE_CALLINFO_4(extern, UINT32, js_GetUpvarOnTrace, CONTEXT, UINT32, UINT32, DOUBLEPTR, 0, 0)
8372 JS_REQUIRES_STACK JSRecordingStatus
8373 TraceRecorder::record_JSOP_GETUPVAR()
8375 uintN index = GET_UINT16(cx->fp->regs->pc);
8376 JSScript *script = cx->fp->script;
8378 JSUpvarArray* uva = JS_SCRIPT_UPVARS(script);
8379 JS_ASSERT(index < uva->length);
8382 * Try to find the upvar in the current trace's tracker.
8384 jsval& v = js_GetUpvar(cx, script->staticLevel, uva->vector[index]);
8385 LIns* upvar_ins = get(&v);
8386 if (upvar_ins) {
8387 stack(0, upvar_ins);
8388 return JSRS_CONTINUE;
8392 * The upvar is not in the current trace, so get the upvar value
8393 * exactly as the interpreter does and unbox.
8395 LIns* outp = lir->insAlloc(sizeof(double));
8396 LIns* args[] = {
8397 outp,
8398 lir->insImm(uva->vector[index]),
8399 lir->insImm(script->staticLevel),
8400 cx_ins
8402 const CallInfo* ci = &js_GetUpvarOnTrace_ci;
8403 LIns* call_ins = lir->insCall(ci, args);
8404 uint8 type = getCoercedType(v);
8405 guard(true,
8406 addName(lir->ins2(LIR_eq, call_ins, lir->insImm(type)),
8407 "guard(type-stable upvar)"),
8408 BRANCH_EXIT);
8410 LOpcode loadOp;
8411 switch (type) {
8412 case JSVAL_DOUBLE:
8413 loadOp = LIR_ldq;
8414 break;
8415 case JSVAL_OBJECT:
8416 case JSVAL_STRING:
8417 case JSVAL_TFUN:
8418 case JSVAL_TNULL:
8419 loadOp = LIR_ldp;
8420 break;
8421 case JSVAL_INT:
8422 case JSVAL_BOOLEAN:
8423 loadOp = LIR_ld;
8424 break;
8425 case JSVAL_BOXED:
8426 default:
8427 JS_NOT_REACHED("found boxed type in an upvar type map entry");
8428 return JSRS_STOP;
8431 LIns* result = lir->insLoad(loadOp, outp, lir->insImm(0));
8432 if (type == JSVAL_INT)
8433 result = lir->ins1(LIR_i2f, result);
8434 stack(0, result);
8435 return JSRS_CONTINUE;
8438 JS_REQUIRES_STACK JSRecordingStatus
8439 TraceRecorder::record_JSOP_CALLUPVAR()
8441 CHECK_STATUS(record_JSOP_GETUPVAR());
8442 stack(1, INS_CONSTPTR(NULL));
8443 return JSRS_CONTINUE;
8446 JS_REQUIRES_STACK JSRecordingStatus
8447 TraceRecorder::record_JSOP_GETDSLOT()
8449 JSObject* callee = cx->fp->callee;
8450 LIns* callee_ins = get(&cx->fp->argv[-2]);
8452 unsigned index = GET_UINT16(cx->fp->regs->pc);
8453 LIns* dslots_ins = NULL;
8454 LIns* v_ins = stobj_get_dslot(callee_ins, index, dslots_ins);
8456 unbox_jsval(callee->dslots[index], v_ins, snapshot(BRANCH_EXIT));
8457 stack(0, v_ins);
8458 return JSRS_CONTINUE;
8461 JS_REQUIRES_STACK JSRecordingStatus
8462 TraceRecorder::record_JSOP_CALLDSLOT()
8464 CHECK_STATUS(record_JSOP_GETDSLOT());
8465 stack(1, INS_CONSTPTR(NULL));
8466 return JSRS_CONTINUE;
8469 JS_REQUIRES_STACK JSRecordingStatus
8470 TraceRecorder::guardCallee(jsval& callee)
8472 JS_ASSERT(VALUE_IS_FUNCTION(cx, callee));
8474 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
8475 JSObject* callee_obj = JSVAL_TO_OBJECT(callee);
8476 LIns* callee_ins = get(&callee);
8478 guard(true,
8479 lir->ins2(LIR_eq,
8480 lir->ins2(LIR_piand,
8481 stobj_get_fslot(callee_ins, JSSLOT_PRIVATE),
8482 INS_CONSTWORD(~JSVAL_INT)),
8483 INS_CONSTPTR(OBJ_GET_PRIVATE(cx, callee_obj))),
8484 branchExit);
8485 guard(true,
8486 lir->ins2(LIR_eq,
8487 stobj_get_fslot(callee_ins, JSSLOT_PARENT),
8488 INS_CONSTPTR(OBJ_GET_PARENT(cx, callee_obj))),
8489 branchExit);
8490 return JSRS_CONTINUE;
8493 JS_REQUIRES_STACK JSRecordingStatus
8494 TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, bool constructing)
8496 if (JS_GetGlobalForObject(cx, JSVAL_TO_OBJECT(fval)) != globalObj)
8497 ABORT_TRACE("JSOP_CALL or JSOP_NEW crosses global scopes");
8499 JSStackFrame* fp = cx->fp;
8501 // TODO: track the copying via the tracker...
8502 if (argc < fun->nargs &&
8503 jsuword(fp->regs->sp + (fun->nargs - argc)) > cx->stackPool.current->limit) {
8504 ABORT_TRACE("can't trace calls with too few args requiring argv move");
8507 // Generate a type map for the outgoing frame and stash it in the LIR
8508 unsigned stackSlots = js_NativeStackSlots(cx, 0/*callDepth*/);
8509 if (sizeof(FrameInfo) + stackSlots * sizeof(uint8) > MAX_SKIP_BYTES)
8510 ABORT_TRACE("interpreted function call requires saving too much stack");
8511 LIns* data = lir->insSkip(sizeof(FrameInfo) + stackSlots * sizeof(uint8));
8512 FrameInfo* fi = (FrameInfo*)data->payload();
8513 uint8* typemap = (uint8 *)(fi + 1);
8514 uint8* m = typemap;
8515 /* Determine the type of a store by looking at the current type of the actual value the
8516 interpreter is using. For numbers we have to check what kind of store we used last
8517 (integer or double) to figure out what the side exit show reflect in its typemap. */
8518 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0/*callDepth*/,
8519 *m++ = determineSlotType(vp);
8522 if (argc >= 0x8000)
8523 ABORT_TRACE("too many arguments");
8525 fi->callee = JSVAL_TO_OBJECT(fval);
8526 fi->block = fp->blockChain;
8527 fi->pc = fp->regs->pc;
8528 fi->imacpc = fp->imacpc;
8529 fi->s.spdist = fp->regs->sp - fp->slots;
8530 fi->s.argc = argc | (constructing ? 0x8000 : 0);
8532 unsigned callDepth = getCallDepth();
8533 if (callDepth >= treeInfo->maxCallDepth)
8534 treeInfo->maxCallDepth = callDepth + 1;
8536 lir->insStorei(INS_CONSTPTR(fi), lirbuf->rp, callDepth * sizeof(FrameInfo*));
8538 atoms = fun->u.i.script->atomMap.vector;
8539 return JSRS_CONTINUE;
8542 JS_REQUIRES_STACK JSRecordingStatus
8543 TraceRecorder::record_JSOP_CALL()
8545 uintN argc = GET_ARGC(cx->fp->regs->pc);
8546 cx->fp->assertValidStackDepth(argc + 2);
8547 return functionCall(argc,
8548 (cx->fp->imacpc && *cx->fp->imacpc == JSOP_APPLY)
8549 ? JSOP_APPLY
8550 : JSOP_CALL);
8553 static jsbytecode* apply_imacro_table[] = {
8554 apply_imacros.apply0,
8555 apply_imacros.apply1,
8556 apply_imacros.apply2,
8557 apply_imacros.apply3,
8558 apply_imacros.apply4,
8559 apply_imacros.apply5,
8560 apply_imacros.apply6,
8561 apply_imacros.apply7,
8562 apply_imacros.apply8
8565 static jsbytecode* call_imacro_table[] = {
8566 apply_imacros.call0,
8567 apply_imacros.call1,
8568 apply_imacros.call2,
8569 apply_imacros.call3,
8570 apply_imacros.call4,
8571 apply_imacros.call5,
8572 apply_imacros.call6,
8573 apply_imacros.call7,
8574 apply_imacros.call8
8577 JS_REQUIRES_STACK JSRecordingStatus
8578 TraceRecorder::record_JSOP_APPLY()
8580 JSStackFrame* fp = cx->fp;
8581 jsbytecode *pc = fp->regs->pc;
8582 uintN argc = GET_ARGC(pc);
8583 cx->fp->assertValidStackDepth(argc + 2);
8585 jsval* vp = fp->regs->sp - (argc + 2);
8586 jsuint length = 0;
8587 JSObject* aobj = NULL;
8588 LIns* aobj_ins = NULL;
8590 JS_ASSERT(!fp->imacpc);
8592 if (!VALUE_IS_FUNCTION(cx, vp[0]))
8593 return record_JSOP_CALL();
8594 ABORT_IF_XML(vp[0]);
8596 JSObject* obj = JSVAL_TO_OBJECT(vp[0]);
8597 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
8598 if (FUN_INTERPRETED(fun))
8599 return record_JSOP_CALL();
8601 bool apply = (JSFastNative)fun->u.n.native == js_fun_apply;
8602 if (!apply && (JSFastNative)fun->u.n.native != js_fun_call)
8603 return record_JSOP_CALL();
8606 * We don't trace apply and call with a primitive 'this', which is the
8607 * first positional parameter.
8609 if (argc > 0 && JSVAL_IS_PRIMITIVE(vp[2]))
8610 return record_JSOP_CALL();
8613 * Guard on the identity of this, which is the function we are applying.
8615 if (!VALUE_IS_FUNCTION(cx, vp[1]))
8616 ABORT_TRACE("callee is not a function");
8617 CHECK_STATUS(guardCallee(vp[1]));
8619 if (apply && argc >= 2) {
8620 if (argc != 2)
8621 ABORT_TRACE("apply with excess arguments");
8622 if (JSVAL_IS_PRIMITIVE(vp[3]))
8623 ABORT_TRACE("arguments parameter of apply is primitive");
8624 aobj = JSVAL_TO_OBJECT(vp[3]);
8625 aobj_ins = get(&vp[3]);
8628 * We expect a dense array for the arguments (the other
8629 * frequent case is the arguments object, but that we
8630 * don't trace at the moment).
8632 if (!guardDenseArray(aobj, aobj_ins))
8633 ABORT_TRACE("arguments parameter of apply is not a dense array");
8636 * We trace only apply calls with a certain number of arguments.
8638 length = jsuint(aobj->fslots[JSSLOT_ARRAY_LENGTH]);
8639 if (length >= JS_ARRAY_LENGTH(apply_imacro_table))
8640 ABORT_TRACE("too many arguments to apply");
8643 * Make sure the array has the same length at runtime.
8645 guard(true,
8646 lir->ins2i(LIR_eq,
8647 stobj_get_fslot(aobj_ins, JSSLOT_ARRAY_LENGTH),
8648 length),
8649 BRANCH_EXIT);
8651 return call_imacro(apply_imacro_table[length]);
8654 if (argc >= JS_ARRAY_LENGTH(call_imacro_table))
8655 ABORT_TRACE("too many arguments to call");
8657 return call_imacro(call_imacro_table[argc]);
8660 static JSBool FASTCALL
8661 CatchStopIteration_tn(JSContext* cx, JSBool ok, jsval* vp)
8663 if (!ok && cx->throwing && js_ValueIsStopIteration(cx->exception)) {
8664 cx->throwing = JS_FALSE;
8665 cx->exception = JSVAL_VOID;
8666 *vp = JSVAL_HOLE;
8667 return JS_TRUE;
8669 return ok;
8672 JS_DEFINE_TRCINFO_1(CatchStopIteration_tn,
8673 (3, (static, BOOL, CatchStopIteration_tn, CONTEXT, BOOL, JSVALPTR, 0, 0)))
8675 JS_REQUIRES_STACK JSRecordingStatus
8676 TraceRecorder::record_NativeCallComplete()
8678 if (pendingTraceableNative == IGNORE_NATIVE_CALL_COMPLETE_CALLBACK)
8679 return JSRS_CONTINUE;
8681 jsbytecode* pc = cx->fp->regs->pc;
8683 JS_ASSERT(pendingTraceableNative);
8684 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEW);
8686 jsval& v = stackval(-1);
8687 LIns* v_ins = get(&v);
8689 /* At this point the generated code has already called the native function
8690 and we can no longer fail back to the original pc location (JSOP_CALL)
8691 because that would cause the interpreter to re-execute the native
8692 function, which might have side effects.
8694 Instead, the snapshot() call below sees that we are currently parked on
8695 a traceable native's JSOP_CALL instruction, and it will advance the pc
8696 to restore by the length of the current opcode. If the native's return
8697 type is jsval, snapshot() will also indicate in the type map that the
8698 element on top of the stack is a boxed value which doesn't need to be
8699 boxed if the type guard generated by unbox_jsval() fails. */
8701 if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS) {
8702 // Keep cx->bailExit null when it's invalid.
8703 lir->insStorei(INS_CONSTPTR(NULL), cx_ins, (int) offsetof(JSContext, bailExit));
8705 LIns* status = lir->insLoad(LIR_ld, lirbuf->state, (int) offsetof(InterpState, builtinStatus));
8706 if (pendingTraceableNative == generatedTraceableNative) {
8707 LIns* ok_ins = v_ins;
8710 * Custom implementations of Iterator.next() throw a StopIteration exception.
8711 * Catch and clear it and set the return value to JSVAL_HOLE in this case.
8713 if (uintptr_t(pc - nextiter_imacros.custom_iter_next) <
8714 sizeof(nextiter_imacros.custom_iter_next)) {
8715 LIns* args[] = { native_rval_ins, ok_ins, cx_ins }; /* reverse order */
8716 ok_ins = lir->insCall(&CatchStopIteration_tn_ci, args);
8720 * If we run a generic traceable native, the return value is in the argument
8721 * vector for native function calls. The actual return value of the native is a JSBool
8722 * indicating the error status.
8724 v_ins = lir->insLoad(LIR_ld, native_rval_ins, 0);
8725 if (*pc == JSOP_NEW) {
8726 LIns* x = lir->ins_eq0(lir->ins2i(LIR_piand, v_ins, JSVAL_TAGMASK));
8727 x = lir->ins_choose(x, v_ins, INS_CONST(0));
8728 v_ins = lir->ins_choose(lir->ins_eq0(x), newobj_ins, x);
8730 set(&v, v_ins);
8733 * If this is a generic traceable native invocation, propagate the boolean return
8734 * value of the native into builtinStatus. If the return value (v_ins)
8735 * is true, status' == status. Otherwise status' = status | JSBUILTIN_ERROR.
8736 * We calculate (rval&1)^1, which is 1 if rval is JS_FALSE (error), and then
8737 * shift that by 1 which is JSBUILTIN_ERROR.
8739 JS_STATIC_ASSERT((1 - JS_TRUE) << 1 == 0);
8740 JS_STATIC_ASSERT((1 - JS_FALSE) << 1 == JSBUILTIN_ERROR);
8741 status = lir->ins2(LIR_or,
8742 status,
8743 lir->ins2i(LIR_lsh,
8744 lir->ins2i(LIR_xor,
8745 lir->ins2i(LIR_and, ok_ins, 1),
8747 1));
8748 lir->insStorei(status, lirbuf->state, (int) offsetof(InterpState, builtinStatus));
8750 guard(true,
8751 lir->ins_eq0(status),
8752 STATUS_EXIT);
8755 JSRecordingStatus ok = JSRS_CONTINUE;
8756 if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER) {
8758 * If we side exit on the unboxing code due to a type change, make sure that the boxed
8759 * value is actually currently associated with that location, and that we are talking
8760 * about the top of the stack here, which is where we expected boxed values.
8762 JS_ASSERT(&v == &cx->fp->regs->sp[-1] && get(&v) == v_ins);
8763 unbox_jsval(v, v_ins, snapshot(BRANCH_EXIT));
8764 set(&v, v_ins);
8765 } else if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_NEG) {
8766 /* Already added i2f in functionCall. */
8767 JS_ASSERT(JSVAL_IS_NUMBER(v));
8768 } else {
8769 /* Convert the result to double if the builtin returns int32. */
8770 if (JSVAL_IS_NUMBER(v) &&
8771 (pendingTraceableNative->builtin->_argtypes & 3) == nanojit::ARGSIZE_LO) {
8772 set(&v, lir->ins1(LIR_i2f, v_ins));
8776 // We'll null pendingTraceableNative in monitorRecording, on the next op cycle.
8777 // There must be a next op since the stack is non-empty.
8778 return ok;
8781 JS_REQUIRES_STACK JSRecordingStatus
8782 TraceRecorder::name(jsval*& vp)
8784 JSObject* obj = cx->fp->scopeChain;
8785 if (obj != globalObj)
8786 return activeCallOrGlobalSlot(obj, vp);
8788 /* Can't use prop here, because we don't want unboxing from global slots. */
8789 LIns* obj_ins = scopeChain();
8790 uint32 slot;
8792 JSObject* obj2;
8793 jsuword pcval;
8796 * Property cache ensures that we are dealing with an existing property,
8797 * and guards the shape for us.
8799 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
8802 * Abort if property doesn't exist (interpreter will report an error.)
8804 if (PCVAL_IS_NULL(pcval))
8805 ABORT_TRACE("named property not found");
8808 * Insist on obj being the directly addressed object.
8810 if (obj2 != obj)
8811 ABORT_TRACE("name() hit prototype chain");
8813 /* Don't trace getter or setter calls, our caller wants a direct slot. */
8814 if (PCVAL_IS_SPROP(pcval)) {
8815 JSScopeProperty* sprop = PCVAL_TO_SPROP(pcval);
8816 if (!isValidSlot(OBJ_SCOPE(obj), sprop))
8817 ABORT_TRACE("name() not accessing a valid slot");
8818 slot = sprop->slot;
8819 } else {
8820 if (!PCVAL_IS_SLOT(pcval))
8821 ABORT_TRACE("PCE is not a slot");
8822 slot = PCVAL_TO_SLOT(pcval);
8825 if (!lazilyImportGlobalSlot(slot))
8826 ABORT_TRACE("lazy import of global slot failed");
8828 vp = &STOBJ_GET_SLOT(obj, slot);
8829 return JSRS_CONTINUE;
8832 JS_REQUIRES_STACK JSRecordingStatus
8833 TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins)
8836 * Can't specialize to assert obj != global, must guard to avoid aliasing
8837 * stale homes of stacked global variables.
8839 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
8842 * Property cache ensures that we are dealing with an existing property,
8843 * and guards the shape for us.
8845 JSObject* obj2;
8846 jsuword pcval;
8847 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
8849 /* Check for non-existent property reference, which results in undefined. */
8850 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
8851 if (PCVAL_IS_NULL(pcval)) {
8853 * This trace will be valid as long as neither the object nor any object
8854 * on its prototype chain change shape.
8856 VMSideExit* exit = snapshot(BRANCH_EXIT);
8857 for (;;) {
8858 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
8859 LIns* ops_ins;
8860 if (map_is_native(obj->map, map_ins, ops_ins)) {
8861 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
8862 "shape");
8863 guard(true,
8864 addName(lir->ins2i(LIR_eq, shape_ins, OBJ_SHAPE(obj)), "guard(shape)"),
8865 exit);
8866 } else if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT))
8867 ABORT_TRACE("non-native object involved in undefined property access");
8869 obj = JSVAL_TO_OBJECT(obj->fslots[JSSLOT_PROTO]);
8870 if (!obj)
8871 break;
8872 obj_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
8875 v_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
8876 slot = SPROP_INVALID_SLOT;
8877 return JSRS_CONTINUE;
8880 /* Insist if setting on obj being the directly addressed object. */
8881 uint32 setflags = (cs.format & (JOF_SET | JOF_INCDEC | JOF_FOR));
8882 LIns* dslots_ins = NULL;
8884 /* Don't trace getter or setter calls, our caller wants a direct slot. */
8885 if (PCVAL_IS_SPROP(pcval)) {
8886 JSScopeProperty* sprop = PCVAL_TO_SPROP(pcval);
8888 if (setflags && !SPROP_HAS_STUB_SETTER(sprop))
8889 ABORT_TRACE("non-stub setter");
8890 if (setflags && (sprop->attrs & JSPROP_READONLY))
8891 ABORT_TRACE("writing to a readonly property");
8892 if (setflags != JOF_SET && !SPROP_HAS_STUB_GETTER(sprop)) {
8893 // FIXME 450335: generalize this away from regexp built-in getters.
8894 if (setflags == 0 &&
8895 sprop->getter == js_RegExpClass.getProperty &&
8896 sprop->shortid < 0) {
8897 if (sprop->shortid == REGEXP_LAST_INDEX)
8898 ABORT_TRACE("can't trace RegExp.lastIndex yet");
8899 LIns* args[] = { INS_CONSTPTR(sprop), obj_ins, cx_ins };
8900 v_ins = lir->insCall(&js_CallGetter_ci, args);
8901 guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
8903 * BIG FAT WARNING: This snapshot cannot be a BRANCH_EXIT, since
8904 * the value to the top of the stack is not the value we unbox.
8906 unbox_jsval((sprop->shortid == REGEXP_SOURCE) ? JSVAL_STRING : JSVAL_BOOLEAN,
8907 v_ins,
8908 snapshot(MISMATCH_EXIT));
8909 return JSRS_CONTINUE;
8911 if (setflags == 0 &&
8912 sprop->getter == js_StringClass.getProperty &&
8913 sprop->id == ATOM_KEY(cx->runtime->atomState.lengthAtom)) {
8914 if (!guardClass(obj, obj_ins, &js_StringClass, snapshot(MISMATCH_EXIT)))
8915 ABORT_TRACE("can't trace String.length on non-String objects");
8916 LIns* str_ins = stobj_get_fslot(obj_ins, JSSLOT_PRIVATE);
8917 str_ins = lir->ins2(LIR_piand, str_ins, INS_CONSTWORD(~JSVAL_TAGMASK));
8918 v_ins = lir->ins1(LIR_i2f, getStringLength(str_ins));
8919 return JSRS_CONTINUE;
8921 ABORT_TRACE("non-stub getter");
8923 if (!SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj)))
8924 ABORT_TRACE("no valid slot");
8925 slot = sprop->slot;
8926 } else {
8927 if (!PCVAL_IS_SLOT(pcval))
8928 ABORT_TRACE("PCE is not a slot");
8929 slot = PCVAL_TO_SLOT(pcval);
8932 if (obj2 != obj) {
8933 if (setflags)
8934 ABORT_TRACE("JOF_SET opcode hit prototype chain");
8937 * We're getting a proto-property. Walk up the prototype chain emitting
8938 * proto slot loads, updating obj as we go, leaving obj set to obj2 with
8939 * obj_ins the last proto-load.
8941 while (obj != obj2) {
8942 obj_ins = stobj_get_slot(obj_ins, JSSLOT_PROTO, dslots_ins);
8943 obj = STOBJ_GET_PROTO(obj);
8947 v_ins = stobj_get_slot(obj_ins, slot, dslots_ins);
8948 unbox_jsval(STOBJ_GET_SLOT(obj, slot), v_ins, snapshot(BRANCH_EXIT));
8950 return JSRS_CONTINUE;
8953 JS_REQUIRES_STACK JSRecordingStatus
8954 TraceRecorder::denseArrayElement(jsval& oval, jsval& ival, jsval*& vp, LIns*& v_ins,
8955 LIns*& addr_ins)
8957 JS_ASSERT(JSVAL_IS_OBJECT(oval) && JSVAL_IS_INT(ival));
8959 JSObject* obj = JSVAL_TO_OBJECT(oval);
8960 LIns* obj_ins = get(&oval);
8961 jsint idx = JSVAL_TO_INT(ival);
8962 LIns* idx_ins = makeNumberInt32(get(&ival));
8964 VMSideExit* exit = snapshot(BRANCH_EXIT);
8966 /* check that the index is within bounds */
8967 LIns* dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
8968 jsuint capacity = js_DenseArrayCapacity(obj);
8969 bool within = (jsuint(idx) < jsuint(obj->fslots[JSSLOT_ARRAY_LENGTH]) && jsuint(idx) < capacity);
8970 if (!within) {
8971 /* If idx < 0, stay on trace (and read value as undefined, since this is a dense array). */
8972 LIns* br1 = NULL;
8973 if (MAX_DSLOTS_LENGTH > JS_BITMASK(30) && !idx_ins->isconst()) {
8974 JS_ASSERT(sizeof(jsval) == 8); // Only 64-bit machines support large enough arrays for this.
8975 br1 = lir->insBranch(LIR_jt,
8976 lir->ins2i(LIR_lt, idx_ins, 0),
8977 NULL);
8980 /* If not idx < length, stay on trace (and read value as undefined). */
8981 LIns* br2 = lir->insBranch(LIR_jf,
8982 lir->ins2(LIR_ult,
8983 idx_ins,
8984 stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH)),
8985 NULL);
8987 /* If dslots is NULL, stay on trace (and read value as undefined). */
8988 LIns* br3 = lir->insBranch(LIR_jt, lir->ins_eq0(dslots_ins), NULL);
8990 /* If not idx < capacity, stay on trace (and read value as undefined). */
8991 LIns* br4 = lir->insBranch(LIR_jf,
8992 lir->ins2(LIR_ult,
8993 idx_ins,
8994 lir->insLoad(LIR_ldp,
8995 dslots_ins,
8996 -(int)sizeof(jsval))),
8997 NULL);
8998 lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(exit));
8999 LIns* label = lir->ins0(LIR_label);
9000 if (br1)
9001 br1->setTarget(label);
9002 br2->setTarget(label);
9003 br3->setTarget(label);
9004 br4->setTarget(label);
9006 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, MISMATCH_EXIT));
9008 // Return undefined and indicate that we didn't actually read this (addr_ins).
9009 v_ins = lir->insImm(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
9010 addr_ins = NULL;
9011 return JSRS_CONTINUE;
9014 /* Guard against negative index */
9015 if (MAX_DSLOTS_LENGTH > JS_BITMASK(30) && !idx_ins->isconst()) {
9016 JS_ASSERT(sizeof(jsval) == 8); // Only 64-bit machines support large enough arrays for this.
9017 guard(false,
9018 lir->ins2i(LIR_lt, idx_ins, 0),
9019 exit);
9022 /* Guard array length */
9023 guard(true,
9024 lir->ins2(LIR_ult, idx_ins, stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH)),
9025 exit);
9027 /* dslots must not be NULL */
9028 guard(false,
9029 lir->ins_eq0(dslots_ins),
9030 exit);
9032 /* Guard array capacity */
9033 guard(true,
9034 lir->ins2(LIR_ult,
9035 idx_ins,
9036 lir->insLoad(LIR_ldp, dslots_ins, 0 - (int)sizeof(jsval))),
9037 exit);
9039 /* Load the value and guard on its type to unbox it. */
9040 vp = &obj->dslots[jsuint(idx)];
9041 addr_ins = lir->ins2(LIR_piadd, dslots_ins,
9042 lir->ins2i(LIR_pilsh, idx_ins, (sizeof(jsval) == 4) ? 2 : 3));
9043 v_ins = lir->insLoad(LIR_ldp, addr_ins, 0);
9044 unbox_jsval(*vp, v_ins, exit);
9046 if (JSVAL_TAG(*vp) == JSVAL_BOOLEAN) {
9048 * If we read a hole from the array, convert it to undefined and guard that there
9049 * are no indexed properties along the prototype chain.
9051 LIns* br = lir->insBranch(LIR_jf,
9052 lir->ins2i(LIR_eq, v_ins, JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_HOLE)),
9053 NULL);
9054 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, MISMATCH_EXIT));
9055 br->setTarget(lir->ins0(LIR_label));
9058 * Don't let the hole value escape. Turn it into an undefined.
9060 v_ins = lir->ins2i(LIR_and, v_ins, ~(JSVAL_HOLE_FLAG >> JSVAL_TAGBITS));
9062 return JSRS_CONTINUE;
9065 JS_REQUIRES_STACK JSRecordingStatus
9066 TraceRecorder::getProp(JSObject* obj, LIns* obj_ins)
9068 uint32 slot;
9069 LIns* v_ins;
9070 CHECK_STATUS(prop(obj, obj_ins, slot, v_ins));
9072 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
9073 JS_ASSERT(cs.ndefs == 1);
9074 stack(-cs.nuses, v_ins);
9075 return JSRS_CONTINUE;
9078 JS_REQUIRES_STACK JSRecordingStatus
9079 TraceRecorder::getProp(jsval& v)
9081 if (JSVAL_IS_PRIMITIVE(v))
9082 ABORT_TRACE("primitive lhs");
9084 return getProp(JSVAL_TO_OBJECT(v), get(&v));
9087 JS_REQUIRES_STACK JSRecordingStatus
9088 TraceRecorder::record_JSOP_NAME()
9090 jsval* vp;
9091 CHECK_STATUS(name(vp));
9092 stack(0, get(vp));
9093 return JSRS_CONTINUE;
9096 JS_REQUIRES_STACK JSRecordingStatus
9097 TraceRecorder::record_JSOP_DOUBLE()
9099 jsval v = jsval(atoms[GET_INDEX(cx->fp->regs->pc)]);
9100 stack(0, lir->insImmf(*JSVAL_TO_DOUBLE(v)));
9101 return JSRS_CONTINUE;
9104 JS_REQUIRES_STACK JSRecordingStatus
9105 TraceRecorder::record_JSOP_STRING()
9107 JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
9108 JS_ASSERT(ATOM_IS_STRING(atom));
9109 stack(0, INS_CONSTPTR(ATOM_TO_STRING(atom)));
9110 return JSRS_CONTINUE;
9113 JS_REQUIRES_STACK JSRecordingStatus
9114 TraceRecorder::record_JSOP_ZERO()
9116 stack(0, lir->insImmq(0));
9117 return JSRS_CONTINUE;
9120 JS_REQUIRES_STACK JSRecordingStatus
9121 TraceRecorder::record_JSOP_ONE()
9123 stack(0, lir->insImmf(1));
9124 return JSRS_CONTINUE;
9127 JS_REQUIRES_STACK JSRecordingStatus
9128 TraceRecorder::record_JSOP_NULL()
9130 stack(0, INS_CONSTPTR(NULL));
9131 return JSRS_CONTINUE;
9134 JS_REQUIRES_STACK JSRecordingStatus
9135 TraceRecorder::record_JSOP_THIS()
9137 LIns* this_ins;
9138 CHECK_STATUS(getThis(this_ins));
9139 stack(0, this_ins);
9140 return JSRS_CONTINUE;
9143 JS_REQUIRES_STACK JSRecordingStatus
9144 TraceRecorder::record_JSOP_FALSE()
9146 stack(0, lir->insImm(0));
9147 return JSRS_CONTINUE;
9150 JS_REQUIRES_STACK JSRecordingStatus
9151 TraceRecorder::record_JSOP_TRUE()
9153 stack(0, lir->insImm(1));
9154 return JSRS_CONTINUE;
9157 JS_REQUIRES_STACK JSRecordingStatus
9158 TraceRecorder::record_JSOP_OR()
9160 return ifop();
9163 JS_REQUIRES_STACK JSRecordingStatus
9164 TraceRecorder::record_JSOP_AND()
9166 return ifop();
9169 JS_REQUIRES_STACK JSRecordingStatus
9170 TraceRecorder::record_JSOP_TABLESWITCH()
9172 #ifdef NANOJIT_IA32
9173 /* Handle tableswitches specially -- prepare a jump table if needed. */
9174 LIns* guardIns = tableswitch();
9175 if (guardIns) {
9176 fragment->lastIns = guardIns;
9177 compile(&JS_TRACE_MONITOR(cx));
9179 return JSRS_STOP;
9180 #else
9181 return switchop();
9182 #endif
9185 JS_REQUIRES_STACK JSRecordingStatus
9186 TraceRecorder::record_JSOP_LOOKUPSWITCH()
9188 return switchop();
9191 JS_REQUIRES_STACK JSRecordingStatus
9192 TraceRecorder::record_JSOP_STRICTEQ()
9194 strictEquality(true, false);
9195 return JSRS_CONTINUE;
9198 JS_REQUIRES_STACK JSRecordingStatus
9199 TraceRecorder::record_JSOP_STRICTNE()
9201 strictEquality(false, false);
9202 return JSRS_CONTINUE;
9205 JS_REQUIRES_STACK JSRecordingStatus
9206 TraceRecorder::record_JSOP_OBJECT()
9208 JSStackFrame* fp = cx->fp;
9209 JSScript* script = fp->script;
9210 unsigned index = atoms - script->atomMap.vector + GET_INDEX(fp->regs->pc);
9212 JSObject* obj;
9213 JS_GET_SCRIPT_OBJECT(script, index, obj);
9214 stack(0, INS_CONSTPTR(obj));
9215 return JSRS_CONTINUE;
9218 JS_REQUIRES_STACK JSRecordingStatus
9219 TraceRecorder::record_JSOP_POP()
9221 return JSRS_CONTINUE;
9224 JS_REQUIRES_STACK JSRecordingStatus
9225 TraceRecorder::record_JSOP_TRAP()
9227 return JSRS_STOP;
9230 JS_REQUIRES_STACK JSRecordingStatus
9231 TraceRecorder::record_JSOP_GETARG()
9233 stack(0, arg(GET_ARGNO(cx->fp->regs->pc)));
9234 return JSRS_CONTINUE;
9237 JS_REQUIRES_STACK JSRecordingStatus
9238 TraceRecorder::record_JSOP_SETARG()
9240 arg(GET_ARGNO(cx->fp->regs->pc), stack(-1));
9241 return JSRS_CONTINUE;
9244 JS_REQUIRES_STACK JSRecordingStatus
9245 TraceRecorder::record_JSOP_GETLOCAL()
9247 stack(0, var(GET_SLOTNO(cx->fp->regs->pc)));
9248 return JSRS_CONTINUE;
9251 JS_REQUIRES_STACK JSRecordingStatus
9252 TraceRecorder::record_JSOP_SETLOCAL()
9254 var(GET_SLOTNO(cx->fp->regs->pc), stack(-1));
9255 return JSRS_CONTINUE;
9258 JS_REQUIRES_STACK JSRecordingStatus
9259 TraceRecorder::record_JSOP_UINT16()
9261 stack(0, lir->insImmf(GET_UINT16(cx->fp->regs->pc)));
9262 return JSRS_CONTINUE;
9265 JS_REQUIRES_STACK JSRecordingStatus
9266 TraceRecorder::record_JSOP_NEWINIT()
9268 JSProtoKey key = JSProtoKey(GET_INT8(cx->fp->regs->pc));
9269 LIns *proto_ins;
9270 CHECK_STATUS(getClassPrototype(key, proto_ins));
9272 LIns* args[] = { proto_ins, cx_ins };
9273 const CallInfo *ci = (key == JSProto_Array) ? &js_NewEmptyArray_ci : &js_Object_tn_ci;
9274 LIns* v_ins = lir->insCall(ci, args);
9275 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
9276 stack(0, v_ins);
9277 return JSRS_CONTINUE;
9280 JS_REQUIRES_STACK JSRecordingStatus
9281 TraceRecorder::record_JSOP_ENDINIT()
9283 #ifdef DEBUG
9284 jsval& v = stackval(-1);
9285 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v));
9286 #endif
9287 return JSRS_CONTINUE;
9290 JS_REQUIRES_STACK JSRecordingStatus
9291 TraceRecorder::record_JSOP_INITPROP()
9293 // All the action is in record_SetPropHit.
9294 return JSRS_CONTINUE;
9297 JS_REQUIRES_STACK JSRecordingStatus
9298 TraceRecorder::record_JSOP_INITELEM()
9300 return record_JSOP_SETELEM();
9303 JS_REQUIRES_STACK JSRecordingStatus
9304 TraceRecorder::record_JSOP_DEFSHARP()
9306 return JSRS_STOP;
9309 JS_REQUIRES_STACK JSRecordingStatus
9310 TraceRecorder::record_JSOP_USESHARP()
9312 return JSRS_STOP;
9315 JS_REQUIRES_STACK JSRecordingStatus
9316 TraceRecorder::record_JSOP_INCARG()
9318 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1);
9321 JS_REQUIRES_STACK JSRecordingStatus
9322 TraceRecorder::record_JSOP_INCLOCAL()
9324 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1);
9327 JS_REQUIRES_STACK JSRecordingStatus
9328 TraceRecorder::record_JSOP_DECARG()
9330 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1);
9333 JS_REQUIRES_STACK JSRecordingStatus
9334 TraceRecorder::record_JSOP_DECLOCAL()
9336 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1);
9339 JS_REQUIRES_STACK JSRecordingStatus
9340 TraceRecorder::record_JSOP_ARGINC()
9342 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1, false);
9345 JS_REQUIRES_STACK JSRecordingStatus
9346 TraceRecorder::record_JSOP_LOCALINC()
9348 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1, false);
9351 JS_REQUIRES_STACK JSRecordingStatus
9352 TraceRecorder::record_JSOP_ARGDEC()
9354 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1, false);
9357 JS_REQUIRES_STACK JSRecordingStatus
9358 TraceRecorder::record_JSOP_LOCALDEC()
9360 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1, false);
9363 JS_REQUIRES_STACK JSRecordingStatus
9364 TraceRecorder::record_JSOP_IMACOP()
9366 JS_ASSERT(cx->fp->imacpc);
9367 return JSRS_CONTINUE;
9370 JS_REQUIRES_STACK JSRecordingStatus
9371 TraceRecorder::record_JSOP_ITER()
9373 jsval& v = stackval(-1);
9374 if (JSVAL_IS_PRIMITIVE(v))
9375 ABORT_TRACE("for-in on a primitive value");
9376 ABORT_IF_XML(v);
9378 jsuint flags = cx->fp->regs->pc[1];
9380 if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) {
9381 if (flags == JSITER_ENUMERATE)
9382 return call_imacro(iter_imacros.for_in);
9383 if (flags == (JSITER_ENUMERATE | JSITER_FOREACH))
9384 return call_imacro(iter_imacros.for_each);
9385 } else {
9386 if (flags == JSITER_ENUMERATE)
9387 return call_imacro(iter_imacros.for_in_native);
9388 if (flags == (JSITER_ENUMERATE | JSITER_FOREACH))
9389 return call_imacro(iter_imacros.for_each_native);
9391 ABORT_TRACE("unimplemented JSITER_* flags");
9394 JS_REQUIRES_STACK JSRecordingStatus
9395 TraceRecorder::record_JSOP_NEXTITER()
9397 jsval& iterobj_val = stackval(-2);
9398 if (JSVAL_IS_PRIMITIVE(iterobj_val))
9399 ABORT_TRACE("for-in on a primitive value");
9400 ABORT_IF_XML(iterobj_val);
9401 JSObject* iterobj = JSVAL_TO_OBJECT(iterobj_val);
9402 JSClass* clasp = STOBJ_GET_CLASS(iterobj);
9403 LIns* iterobj_ins = get(&iterobj_val);
9404 if (clasp == &js_IteratorClass || clasp == &js_GeneratorClass) {
9405 guardClass(iterobj, iterobj_ins, clasp, snapshot(BRANCH_EXIT));
9406 return call_imacro(nextiter_imacros.native_iter_next);
9408 return call_imacro(nextiter_imacros.custom_iter_next);
9411 JS_REQUIRES_STACK JSRecordingStatus
9412 TraceRecorder::record_JSOP_ENDITER()
9414 LIns* args[] = { stack(-2), cx_ins };
9415 LIns* ok_ins = lir->insCall(&js_CloseIterator_ci, args);
9416 guard(false, lir->ins_eq0(ok_ins), MISMATCH_EXIT);
9417 return JSRS_CONTINUE;
9420 JS_REQUIRES_STACK JSRecordingStatus
9421 TraceRecorder::record_JSOP_FORNAME()
9423 jsval* vp;
9424 CHECK_STATUS(name(vp));
9425 set(vp, stack(-1));
9426 return JSRS_CONTINUE;
9429 JS_REQUIRES_STACK JSRecordingStatus
9430 TraceRecorder::record_JSOP_FORPROP()
9432 return JSRS_STOP;
9435 JS_REQUIRES_STACK JSRecordingStatus
9436 TraceRecorder::record_JSOP_FORELEM()
9438 return record_JSOP_DUP();
9441 JS_REQUIRES_STACK JSRecordingStatus
9442 TraceRecorder::record_JSOP_FORARG()
9444 return record_JSOP_SETARG();
9447 JS_REQUIRES_STACK JSRecordingStatus
9448 TraceRecorder::record_JSOP_FORLOCAL()
9450 return record_JSOP_SETLOCAL();
9453 JS_REQUIRES_STACK JSRecordingStatus
9454 TraceRecorder::record_JSOP_POPN()
9456 return JSRS_CONTINUE;
9459 JS_REQUIRES_STACK JSRecordingStatus
9460 TraceRecorder::record_JSOP_BINDNAME()
9462 JSStackFrame *fp = cx->fp;
9463 JSObject *obj;
9465 if (fp->fun) {
9466 // We can't trace BINDNAME in functions that contain direct
9467 // calls to eval, as they might add bindings which
9468 // previously-traced references would have to see.
9469 if (JSFUN_HEAVYWEIGHT_TEST(fp->fun->flags))
9470 ABORT_TRACE("Can't trace JSOP_BINDNAME in heavyweight functions.");
9472 // In non-heavyweight functions, we can safely skip the call
9473 // object, if any.
9474 obj = OBJ_GET_PARENT(cx, fp->callee);
9475 } else {
9476 obj = fp->scopeChain;
9478 // In global code, fp->scopeChain can only contain blocks
9479 // whose values are still on the stack. We never use BINDNAME
9480 // to refer to these.
9481 while (OBJ_GET_CLASS(cx, obj) == &js_BlockClass) {
9482 // The block's values are still on the stack.
9483 JS_ASSERT(OBJ_GET_PRIVATE(cx, obj) == fp);
9485 obj = OBJ_GET_PARENT(cx, obj);
9487 // Blocks always have parents.
9488 JS_ASSERT(obj);
9492 if (obj != globalObj)
9493 ABORT_TRACE("JSOP_BINDNAME must return global object on trace");
9495 // The trace is specialized to this global object. Furthermore,
9496 // we know it is the sole 'global' object on the scope chain: we
9497 // set globalObj to the scope chain element with no parent, and we
9498 // reached it starting from the function closure or the current
9499 // scopeChain, so there is nothing inner to it. So this must be
9500 // the right base object.
9501 stack(0, INS_CONSTPTR(globalObj));
9502 return JSRS_CONTINUE;
9505 JS_REQUIRES_STACK JSRecordingStatus
9506 TraceRecorder::record_JSOP_SETNAME()
9508 jsval& l = stackval(-2);
9509 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l));
9512 * Trace cases that are global code or in lightweight functions scoped by
9513 * the global object only.
9515 JSObject* obj = JSVAL_TO_OBJECT(l);
9516 if (obj != cx->fp->scopeChain || obj != globalObj)
9517 ABORT_TRACE("JSOP_SETNAME left operand is not the global object");
9519 // The rest of the work is in record_SetPropHit.
9520 return JSRS_CONTINUE;
9523 JS_REQUIRES_STACK JSRecordingStatus
9524 TraceRecorder::record_JSOP_THROW()
9526 return JSRS_STOP;
9529 JS_REQUIRES_STACK JSRecordingStatus
9530 TraceRecorder::record_JSOP_IN()
9532 jsval& rval = stackval(-1);
9533 jsval& lval = stackval(-2);
9535 if (JSVAL_IS_PRIMITIVE(rval))
9536 ABORT_TRACE("JSOP_IN on non-object right operand");
9537 JSObject* obj = JSVAL_TO_OBJECT(rval);
9538 LIns* obj_ins = get(&rval);
9540 jsid id;
9541 LIns* x;
9542 if (JSVAL_IS_INT(lval)) {
9543 id = INT_JSVAL_TO_JSID(lval);
9544 LIns* args[] = { makeNumberInt32(get(&lval)), obj_ins, cx_ins };
9545 x = lir->insCall(&js_HasNamedPropertyInt32_ci, args);
9546 } else if (JSVAL_IS_STRING(lval)) {
9547 if (!js_ValueToStringId(cx, lval, &id))
9548 ABORT_TRACE_ERROR("left operand of JSOP_IN didn't convert to a string-id");
9549 LIns* args[] = { get(&lval), obj_ins, cx_ins };
9550 x = lir->insCall(&js_HasNamedProperty_ci, args);
9551 } else {
9552 ABORT_TRACE("string or integer expected");
9555 guard(false, lir->ins2i(LIR_eq, x, JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
9556 x = lir->ins2i(LIR_eq, x, 1);
9558 JSObject* obj2;
9559 JSProperty* prop;
9560 if (!OBJ_LOOKUP_PROPERTY(cx, obj, id, &obj2, &prop))
9561 ABORT_TRACE_ERROR("OBJ_LOOKUP_PROPERTY failed in JSOP_IN");
9562 bool cond = prop != NULL;
9563 if (prop)
9564 OBJ_DROP_PROPERTY(cx, obj2, prop);
9566 /* The interpreter fuses comparisons and the following branch,
9567 so we have to do that here as well. */
9568 fuseIf(cx->fp->regs->pc + 1, cond, x);
9570 /* We update the stack after the guard. This is safe since
9571 the guard bails out at the comparison and the interpreter
9572 will therefore re-execute the comparison. This way the
9573 value of the condition doesn't have to be calculated and
9574 saved on the stack in most cases. */
9575 set(&lval, x);
9576 return JSRS_CONTINUE;
9579 JS_REQUIRES_STACK JSRecordingStatus
9580 TraceRecorder::record_JSOP_INSTANCEOF()
9582 return JSRS_STOP;
9585 JS_REQUIRES_STACK JSRecordingStatus
9586 TraceRecorder::record_JSOP_DEBUGGER()
9588 return JSRS_STOP;
9591 JS_REQUIRES_STACK JSRecordingStatus
9592 TraceRecorder::record_JSOP_GOSUB()
9594 return JSRS_STOP;
9597 JS_REQUIRES_STACK JSRecordingStatus
9598 TraceRecorder::record_JSOP_RETSUB()
9600 return JSRS_STOP;
9603 JS_REQUIRES_STACK JSRecordingStatus
9604 TraceRecorder::record_JSOP_EXCEPTION()
9606 return JSRS_STOP;
9609 JS_REQUIRES_STACK JSRecordingStatus
9610 TraceRecorder::record_JSOP_LINENO()
9612 return JSRS_CONTINUE;
9615 JS_REQUIRES_STACK JSRecordingStatus
9616 TraceRecorder::record_JSOP_CONDSWITCH()
9618 return JSRS_CONTINUE;
9621 JS_REQUIRES_STACK JSRecordingStatus
9622 TraceRecorder::record_JSOP_CASE()
9624 strictEquality(true, true);
9625 return JSRS_CONTINUE;
9628 JS_REQUIRES_STACK JSRecordingStatus
9629 TraceRecorder::record_JSOP_DEFAULT()
9631 return JSRS_CONTINUE;
9634 JS_REQUIRES_STACK JSRecordingStatus
9635 TraceRecorder::record_JSOP_EVAL()
9637 return JSRS_STOP;
9640 JS_REQUIRES_STACK JSRecordingStatus
9641 TraceRecorder::record_JSOP_ENUMELEM()
9643 return JSRS_STOP;
9646 JS_REQUIRES_STACK JSRecordingStatus
9647 TraceRecorder::record_JSOP_GETTER()
9649 return JSRS_STOP;
9652 JS_REQUIRES_STACK JSRecordingStatus
9653 TraceRecorder::record_JSOP_SETTER()
9655 return JSRS_STOP;
9658 JS_REQUIRES_STACK JSRecordingStatus
9659 TraceRecorder::record_JSOP_DEFFUN()
9661 return JSRS_STOP;
9664 JS_REQUIRES_STACK JSRecordingStatus
9665 TraceRecorder::record_JSOP_DEFFUN_FC()
9667 return JSRS_STOP;
9670 JS_REQUIRES_STACK JSRecordingStatus
9671 TraceRecorder::record_JSOP_DEFCONST()
9673 return JSRS_STOP;
9676 JS_REQUIRES_STACK JSRecordingStatus
9677 TraceRecorder::record_JSOP_DEFVAR()
9679 return JSRS_STOP;
9682 jsatomid
9683 TraceRecorder::getFullIndex(ptrdiff_t pcoff)
9685 jsatomid index = GET_INDEX(cx->fp->regs->pc + pcoff);
9686 index += atoms - cx->fp->script->atomMap.vector;
9687 return index;
9690 JS_REQUIRES_STACK JSRecordingStatus
9691 TraceRecorder::record_JSOP_LAMBDA()
9693 JSFunction* fun;
9694 JS_GET_SCRIPT_FUNCTION(cx->fp->script, getFullIndex(), fun);
9696 if (FUN_NULL_CLOSURE(fun) && OBJ_GET_PARENT(cx, FUN_OBJECT(fun)) == globalObj) {
9697 LIns *proto_ins;
9698 CHECK_STATUS(getClassPrototype(JSProto_Function, proto_ins));
9700 LIns* args[] = { INS_CONSTPTR(globalObj), proto_ins, INS_CONSTPTR(fun), cx_ins };
9701 LIns* x = lir->insCall(&js_NewNullClosure_ci, args);
9702 stack(0, x);
9703 return JSRS_CONTINUE;
9705 return JSRS_STOP;
9708 JS_REQUIRES_STACK JSRecordingStatus
9709 TraceRecorder::record_JSOP_LAMBDA_FC()
9711 return JSRS_STOP;
9714 JS_REQUIRES_STACK JSRecordingStatus
9715 TraceRecorder::record_JSOP_CALLEE()
9717 stack(0, get(&cx->fp->argv[-2]));
9718 return JSRS_CONTINUE;
9721 JS_REQUIRES_STACK JSRecordingStatus
9722 TraceRecorder::record_JSOP_SETLOCALPOP()
9724 var(GET_SLOTNO(cx->fp->regs->pc), stack(-1));
9725 return JSRS_CONTINUE;
9728 JS_REQUIRES_STACK JSRecordingStatus
9729 TraceRecorder::record_JSOP_IFPRIMTOP()
9731 // Traces are type-specialized, including null vs. object, so we need do
9732 // nothing here. The upstream unbox_jsval called after valueOf or toString
9733 // from an imacro (e.g.) will fork the trace for us, allowing us to just
9734 // follow along mindlessly :-).
9735 return JSRS_CONTINUE;
9738 JS_REQUIRES_STACK JSRecordingStatus
9739 TraceRecorder::record_JSOP_SETCALL()
9741 return JSRS_STOP;
9744 JS_REQUIRES_STACK JSRecordingStatus
9745 TraceRecorder::record_JSOP_TRY()
9747 return JSRS_CONTINUE;
9750 JS_REQUIRES_STACK JSRecordingStatus
9751 TraceRecorder::record_JSOP_FINALLY()
9753 return JSRS_CONTINUE;
9756 JS_REQUIRES_STACK JSRecordingStatus
9757 TraceRecorder::record_JSOP_NOP()
9759 return JSRS_CONTINUE;
9762 JS_REQUIRES_STACK JSRecordingStatus
9763 TraceRecorder::record_JSOP_ARGSUB()
9765 JSStackFrame* fp = cx->fp;
9766 if (!(fp->fun->flags & JSFUN_HEAVYWEIGHT)) {
9767 uintN slot = GET_ARGNO(fp->regs->pc);
9768 if (slot < fp->fun->nargs && slot < fp->argc && !fp->argsobj) {
9769 stack(0, get(&cx->fp->argv[slot]));
9770 return JSRS_CONTINUE;
9773 ABORT_TRACE("can't trace JSOP_ARGSUB hard case");
9776 JS_REQUIRES_STACK JSRecordingStatus
9777 TraceRecorder::record_JSOP_ARGCNT()
9779 if (!(cx->fp->fun->flags & JSFUN_HEAVYWEIGHT)) {
9780 stack(0, lir->insImmf(cx->fp->argc));
9781 return JSRS_CONTINUE;
9783 ABORT_TRACE("can't trace heavyweight JSOP_ARGCNT");
9786 JS_REQUIRES_STACK JSRecordingStatus
9787 TraceRecorder::record_DefLocalFunSetSlot(uint32 slot, JSObject* obj)
9789 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
9791 if (FUN_NULL_CLOSURE(fun) && OBJ_GET_PARENT(cx, FUN_OBJECT(fun)) == globalObj) {
9792 LIns *proto_ins;
9793 CHECK_STATUS(getClassPrototype(JSProto_Function, proto_ins));
9795 LIns* args[] = { INS_CONSTPTR(globalObj), proto_ins, INS_CONSTPTR(fun), cx_ins };
9796 LIns* x = lir->insCall(&js_NewNullClosure_ci, args);
9797 var(slot, x);
9798 return JSRS_CONTINUE;
9801 return JSRS_STOP;
9804 JS_REQUIRES_STACK JSRecordingStatus
9805 TraceRecorder::record_JSOP_DEFLOCALFUN()
9807 return JSRS_CONTINUE;
9810 JS_REQUIRES_STACK JSRecordingStatus
9811 TraceRecorder::record_JSOP_DEFLOCALFUN_FC()
9813 return JSRS_CONTINUE;
9816 JS_REQUIRES_STACK JSRecordingStatus
9817 TraceRecorder::record_JSOP_GOTOX()
9819 return record_JSOP_GOTO();
9822 JS_REQUIRES_STACK JSRecordingStatus
9823 TraceRecorder::record_JSOP_IFEQX()
9825 return record_JSOP_IFEQ();
9828 JS_REQUIRES_STACK JSRecordingStatus
9829 TraceRecorder::record_JSOP_IFNEX()
9831 return record_JSOP_IFNE();
9834 JS_REQUIRES_STACK JSRecordingStatus
9835 TraceRecorder::record_JSOP_ORX()
9837 return record_JSOP_OR();
9840 JS_REQUIRES_STACK JSRecordingStatus
9841 TraceRecorder::record_JSOP_ANDX()
9843 return record_JSOP_AND();
9846 JS_REQUIRES_STACK JSRecordingStatus
9847 TraceRecorder::record_JSOP_GOSUBX()
9849 return record_JSOP_GOSUB();
9852 JS_REQUIRES_STACK JSRecordingStatus
9853 TraceRecorder::record_JSOP_CASEX()
9855 strictEquality(true, true);
9856 return JSRS_CONTINUE;
9859 JS_REQUIRES_STACK JSRecordingStatus
9860 TraceRecorder::record_JSOP_DEFAULTX()
9862 return JSRS_CONTINUE;
9865 JS_REQUIRES_STACK JSRecordingStatus
9866 TraceRecorder::record_JSOP_TABLESWITCHX()
9868 return record_JSOP_TABLESWITCH();
9871 JS_REQUIRES_STACK JSRecordingStatus
9872 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
9874 return switchop();
9877 JS_REQUIRES_STACK JSRecordingStatus
9878 TraceRecorder::record_JSOP_BACKPATCH()
9880 return JSRS_CONTINUE;
9883 JS_REQUIRES_STACK JSRecordingStatus
9884 TraceRecorder::record_JSOP_BACKPATCH_POP()
9886 return JSRS_CONTINUE;
9889 JS_REQUIRES_STACK JSRecordingStatus
9890 TraceRecorder::record_JSOP_THROWING()
9892 return JSRS_STOP;
9895 JS_REQUIRES_STACK JSRecordingStatus
9896 TraceRecorder::record_JSOP_SETRVAL()
9898 // If we implement this, we need to update JSOP_STOP.
9899 return JSRS_STOP;
9902 JS_REQUIRES_STACK JSRecordingStatus
9903 TraceRecorder::record_JSOP_RETRVAL()
9905 return JSRS_STOP;
9908 JS_REQUIRES_STACK JSRecordingStatus
9909 TraceRecorder::record_JSOP_GETGVAR()
9911 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9912 if (JSVAL_IS_NULL(slotval))
9913 return JSRS_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
9915 uint32 slot = JSVAL_TO_INT(slotval);
9917 if (!lazilyImportGlobalSlot(slot))
9918 ABORT_TRACE("lazy import of global slot failed");
9920 stack(0, get(&STOBJ_GET_SLOT(globalObj, slot)));
9921 return JSRS_CONTINUE;
9924 JS_REQUIRES_STACK JSRecordingStatus
9925 TraceRecorder::record_JSOP_SETGVAR()
9927 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9928 if (JSVAL_IS_NULL(slotval))
9929 return JSRS_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
9931 uint32 slot = JSVAL_TO_INT(slotval);
9933 if (!lazilyImportGlobalSlot(slot))
9934 ABORT_TRACE("lazy import of global slot failed");
9936 set(&STOBJ_GET_SLOT(globalObj, slot), stack(-1));
9937 return JSRS_CONTINUE;
9940 JS_REQUIRES_STACK JSRecordingStatus
9941 TraceRecorder::record_JSOP_INCGVAR()
9943 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9944 if (JSVAL_IS_NULL(slotval))
9945 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
9946 return JSRS_CONTINUE;
9948 uint32 slot = JSVAL_TO_INT(slotval);
9950 if (!lazilyImportGlobalSlot(slot))
9951 ABORT_TRACE("lazy import of global slot failed");
9953 return inc(STOBJ_GET_SLOT(globalObj, slot), 1);
9956 JS_REQUIRES_STACK JSRecordingStatus
9957 TraceRecorder::record_JSOP_DECGVAR()
9959 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9960 if (JSVAL_IS_NULL(slotval))
9961 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
9962 return JSRS_CONTINUE;
9964 uint32 slot = JSVAL_TO_INT(slotval);
9966 if (!lazilyImportGlobalSlot(slot))
9967 ABORT_TRACE("lazy import of global slot failed");
9969 return inc(STOBJ_GET_SLOT(globalObj, slot), -1);
9972 JS_REQUIRES_STACK JSRecordingStatus
9973 TraceRecorder::record_JSOP_GVARINC()
9975 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9976 if (JSVAL_IS_NULL(slotval))
9977 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
9978 return JSRS_CONTINUE;
9980 uint32 slot = JSVAL_TO_INT(slotval);
9982 if (!lazilyImportGlobalSlot(slot))
9983 ABORT_TRACE("lazy import of global slot failed");
9985 return inc(STOBJ_GET_SLOT(globalObj, slot), 1, false);
9988 JS_REQUIRES_STACK JSRecordingStatus
9989 TraceRecorder::record_JSOP_GVARDEC()
9991 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
9992 if (JSVAL_IS_NULL(slotval))
9993 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
9994 return JSRS_CONTINUE;
9996 uint32 slot = JSVAL_TO_INT(slotval);
9998 if (!lazilyImportGlobalSlot(slot))
9999 ABORT_TRACE("lazy import of global slot failed");
10001 return inc(STOBJ_GET_SLOT(globalObj, slot), -1, false);
10004 JS_REQUIRES_STACK JSRecordingStatus
10005 TraceRecorder::record_JSOP_REGEXP()
10007 return JSRS_STOP;
10010 // begin JS_HAS_XML_SUPPORT
10012 JS_REQUIRES_STACK JSRecordingStatus
10013 TraceRecorder::record_JSOP_DEFXMLNS()
10015 return JSRS_STOP;
10018 JS_REQUIRES_STACK JSRecordingStatus
10019 TraceRecorder::record_JSOP_ANYNAME()
10021 return JSRS_STOP;
10024 JS_REQUIRES_STACK JSRecordingStatus
10025 TraceRecorder::record_JSOP_QNAMEPART()
10027 return record_JSOP_STRING();
10030 JS_REQUIRES_STACK JSRecordingStatus
10031 TraceRecorder::record_JSOP_QNAMECONST()
10033 return JSRS_STOP;
10036 JS_REQUIRES_STACK JSRecordingStatus
10037 TraceRecorder::record_JSOP_QNAME()
10039 return JSRS_STOP;
10042 JS_REQUIRES_STACK JSRecordingStatus
10043 TraceRecorder::record_JSOP_TOATTRNAME()
10045 return JSRS_STOP;
10048 JS_REQUIRES_STACK JSRecordingStatus
10049 TraceRecorder::record_JSOP_TOATTRVAL()
10051 return JSRS_STOP;
10054 JS_REQUIRES_STACK JSRecordingStatus
10055 TraceRecorder::record_JSOP_ADDATTRNAME()
10057 return JSRS_STOP;
10060 JS_REQUIRES_STACK JSRecordingStatus
10061 TraceRecorder::record_JSOP_ADDATTRVAL()
10063 return JSRS_STOP;
10066 JS_REQUIRES_STACK JSRecordingStatus
10067 TraceRecorder::record_JSOP_BINDXMLNAME()
10069 return JSRS_STOP;
10072 JS_REQUIRES_STACK JSRecordingStatus
10073 TraceRecorder::record_JSOP_SETXMLNAME()
10075 return JSRS_STOP;
10078 JS_REQUIRES_STACK JSRecordingStatus
10079 TraceRecorder::record_JSOP_XMLNAME()
10081 return JSRS_STOP;
10084 JS_REQUIRES_STACK JSRecordingStatus
10085 TraceRecorder::record_JSOP_DESCENDANTS()
10087 return JSRS_STOP;
10090 JS_REQUIRES_STACK JSRecordingStatus
10091 TraceRecorder::record_JSOP_FILTER()
10093 return JSRS_STOP;
10096 JS_REQUIRES_STACK JSRecordingStatus
10097 TraceRecorder::record_JSOP_ENDFILTER()
10099 return JSRS_STOP;
10102 JS_REQUIRES_STACK JSRecordingStatus
10103 TraceRecorder::record_JSOP_TOXML()
10105 return JSRS_STOP;
10108 JS_REQUIRES_STACK JSRecordingStatus
10109 TraceRecorder::record_JSOP_TOXMLLIST()
10111 return JSRS_STOP;
10114 JS_REQUIRES_STACK JSRecordingStatus
10115 TraceRecorder::record_JSOP_XMLTAGEXPR()
10117 return JSRS_STOP;
10120 JS_REQUIRES_STACK JSRecordingStatus
10121 TraceRecorder::record_JSOP_XMLELTEXPR()
10123 return JSRS_STOP;
10126 JS_REQUIRES_STACK JSRecordingStatus
10127 TraceRecorder::record_JSOP_XMLOBJECT()
10129 return JSRS_STOP;
10132 JS_REQUIRES_STACK JSRecordingStatus
10133 TraceRecorder::record_JSOP_XMLCDATA()
10135 return JSRS_STOP;
10138 JS_REQUIRES_STACK JSRecordingStatus
10139 TraceRecorder::record_JSOP_XMLCOMMENT()
10141 return JSRS_STOP;
10144 JS_REQUIRES_STACK JSRecordingStatus
10145 TraceRecorder::record_JSOP_XMLPI()
10147 return JSRS_STOP;
10150 JS_REQUIRES_STACK JSRecordingStatus
10151 TraceRecorder::record_JSOP_GETFUNNS()
10153 return JSRS_STOP;
10156 JS_REQUIRES_STACK JSRecordingStatus
10157 TraceRecorder::record_JSOP_STARTXML()
10159 return JSRS_STOP;
10162 JS_REQUIRES_STACK JSRecordingStatus
10163 TraceRecorder::record_JSOP_STARTXMLEXPR()
10165 return JSRS_STOP;
10168 // end JS_HAS_XML_SUPPORT
10170 JS_REQUIRES_STACK JSRecordingStatus
10171 TraceRecorder::record_JSOP_CALLPROP()
10173 jsval& l = stackval(-1);
10174 JSObject* obj;
10175 LIns* obj_ins;
10176 LIns* this_ins;
10177 if (!JSVAL_IS_PRIMITIVE(l)) {
10178 obj = JSVAL_TO_OBJECT(l);
10179 obj_ins = get(&l);
10180 this_ins = obj_ins; // |this| for subsequent call
10181 } else {
10182 jsint i;
10183 debug_only(const char* protoname = NULL;)
10184 if (JSVAL_IS_STRING(l)) {
10185 i = JSProto_String;
10186 debug_only(protoname = "String.prototype";)
10187 } else if (JSVAL_IS_NUMBER(l)) {
10188 i = JSProto_Number;
10189 debug_only(protoname = "Number.prototype";)
10190 } else if (JSVAL_TAG(l) == JSVAL_BOOLEAN) {
10191 if (l == JSVAL_VOID)
10192 ABORT_TRACE("callprop on void");
10193 guard(false, lir->ins2i(LIR_eq, get(&l), JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)), MISMATCH_EXIT);
10194 i = JSProto_Boolean;
10195 debug_only(protoname = "Boolean.prototype";)
10196 } else {
10197 JS_ASSERT(JSVAL_IS_NULL(l) || JSVAL_IS_VOID(l));
10198 ABORT_TRACE("callprop on null or void");
10201 if (!js_GetClassPrototype(cx, NULL, INT_TO_JSID(i), &obj))
10202 ABORT_TRACE_ERROR("GetClassPrototype failed!");
10204 obj_ins = INS_CONSTPTR(obj);
10205 debug_only(obj_ins = addName(obj_ins, protoname);)
10206 this_ins = get(&l); // use primitive as |this|
10209 JSObject* obj2;
10210 jsuword pcval;
10211 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
10213 if (PCVAL_IS_NULL(pcval) || !PCVAL_IS_OBJECT(pcval))
10214 ABORT_TRACE("callee is not an object");
10215 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval)));
10217 if (JSVAL_IS_PRIMITIVE(l)) {
10218 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, PCVAL_TO_OBJECT(pcval));
10219 if (!PRIMITIVE_THIS_TEST(fun, l))
10220 ABORT_TRACE("callee does not accept primitive |this|");
10223 stack(0, this_ins);
10224 stack(-1, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval)));
10225 return JSRS_CONTINUE;
10228 JS_REQUIRES_STACK JSRecordingStatus
10229 TraceRecorder::record_JSOP_DELDESC()
10231 return JSRS_STOP;
10234 JS_REQUIRES_STACK JSRecordingStatus
10235 TraceRecorder::record_JSOP_UINT24()
10237 stack(0, lir->insImmf(GET_UINT24(cx->fp->regs->pc)));
10238 return JSRS_CONTINUE;
10241 JS_REQUIRES_STACK JSRecordingStatus
10242 TraceRecorder::record_JSOP_INDEXBASE()
10244 atoms += GET_INDEXBASE(cx->fp->regs->pc);
10245 return JSRS_CONTINUE;
10248 JS_REQUIRES_STACK JSRecordingStatus
10249 TraceRecorder::record_JSOP_RESETBASE()
10251 atoms = cx->fp->script->atomMap.vector;
10252 return JSRS_CONTINUE;
10255 JS_REQUIRES_STACK JSRecordingStatus
10256 TraceRecorder::record_JSOP_RESETBASE0()
10258 atoms = cx->fp->script->atomMap.vector;
10259 return JSRS_CONTINUE;
10262 JS_REQUIRES_STACK JSRecordingStatus
10263 TraceRecorder::record_JSOP_CALLELEM()
10265 return record_JSOP_GETELEM();
10268 JS_REQUIRES_STACK JSRecordingStatus
10269 TraceRecorder::record_JSOP_STOP()
10271 JSStackFrame *fp = cx->fp;
10273 if (fp->imacpc) {
10274 // End of imacro, so return true to the interpreter immediately. The
10275 // interpreter's JSOP_STOP case will return from the imacro, back to
10276 // the pc after the calling op, still in the same JSStackFrame.
10277 atoms = fp->script->atomMap.vector;
10278 return JSRS_CONTINUE;
10282 * We know falling off the end of a constructor returns the new object that
10283 * was passed in via fp->argv[-1], while falling off the end of a function
10284 * returns undefined.
10286 * NB: we do not support script rval (eval, API users who want the result
10287 * of the last expression-statement, debugger API calls).
10289 if (fp->flags & JSFRAME_CONSTRUCTING) {
10290 JS_ASSERT(OBJECT_TO_JSVAL(fp->thisp) == fp->argv[-1]);
10291 rval_ins = get(&fp->argv[-1]);
10292 } else {
10293 rval_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
10295 clearFrameSlotsFromCache();
10296 return JSRS_CONTINUE;
10299 JS_REQUIRES_STACK JSRecordingStatus
10300 TraceRecorder::record_JSOP_GETXPROP()
10302 jsval& l = stackval(-1);
10303 if (JSVAL_IS_PRIMITIVE(l))
10304 ABORT_TRACE("primitive-this for GETXPROP?");
10306 JSObject* obj = JSVAL_TO_OBJECT(l);
10307 if (obj != cx->fp->scopeChain || obj != globalObj)
10308 return JSRS_STOP;
10310 jsval* vp;
10311 CHECK_STATUS(name(vp));
10312 stack(-1, get(vp));
10313 return JSRS_CONTINUE;
10316 JS_REQUIRES_STACK JSRecordingStatus
10317 TraceRecorder::record_JSOP_CALLXMLNAME()
10319 return JSRS_STOP;
10322 JS_REQUIRES_STACK JSRecordingStatus
10323 TraceRecorder::record_JSOP_TYPEOFEXPR()
10325 return record_JSOP_TYPEOF();
10328 JS_REQUIRES_STACK JSRecordingStatus
10329 TraceRecorder::record_JSOP_ENTERBLOCK()
10331 JSObject* obj;
10332 JS_GET_SCRIPT_OBJECT(cx->fp->script, getFullIndex(0), obj);
10334 LIns* void_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
10335 for (int i = 0, n = OBJ_BLOCK_COUNT(cx, obj); i < n; i++)
10336 stack(i, void_ins);
10337 return JSRS_CONTINUE;
10340 JS_REQUIRES_STACK JSRecordingStatus
10341 TraceRecorder::record_JSOP_LEAVEBLOCK()
10343 /* We mustn't exit the lexical block we began recording in. */
10344 if (cx->fp->blockChain != lexicalBlock)
10345 return JSRS_CONTINUE;
10346 else
10347 return JSRS_STOP;
10350 JS_REQUIRES_STACK JSRecordingStatus
10351 TraceRecorder::record_JSOP_GENERATOR()
10353 return JSRS_STOP;
10356 JS_REQUIRES_STACK JSRecordingStatus
10357 TraceRecorder::record_JSOP_YIELD()
10359 return JSRS_STOP;
10362 JS_REQUIRES_STACK JSRecordingStatus
10363 TraceRecorder::record_JSOP_ARRAYPUSH()
10365 uint32_t slot = GET_UINT16(cx->fp->regs->pc);
10366 JS_ASSERT(cx->fp->script->nfixed <= slot);
10367 JS_ASSERT(cx->fp->slots + slot < cx->fp->regs->sp - 1);
10368 jsval &arrayval = cx->fp->slots[slot];
10369 JS_ASSERT(JSVAL_IS_OBJECT(arrayval));
10370 JS_ASSERT(OBJ_IS_DENSE_ARRAY(cx, JSVAL_TO_OBJECT(arrayval)));
10371 LIns *array_ins = get(&arrayval);
10372 jsval &elt = stackval(-1);
10373 LIns *elt_ins = get(&elt);
10374 box_jsval(elt, elt_ins);
10376 LIns *args[] = { elt_ins, array_ins, cx_ins };
10377 LIns *ok_ins = lir->insCall(&js_ArrayCompPush_ci, args);
10378 guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);
10379 return JSRS_CONTINUE;
10382 JS_REQUIRES_STACK JSRecordingStatus
10383 TraceRecorder::record_JSOP_ENUMCONSTELEM()
10385 return JSRS_STOP;
10388 JS_REQUIRES_STACK JSRecordingStatus
10389 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
10391 LIns* v_ins = stack(-1);
10392 int n = -1 - GET_UINT16(cx->fp->regs->pc);
10393 stack(n, v_ins);
10394 return JSRS_CONTINUE;
10397 JS_REQUIRES_STACK JSRecordingStatus
10398 TraceRecorder::record_JSOP_GETTHISPROP()
10400 LIns* this_ins;
10402 CHECK_STATUS(getThis(this_ins));
10404 * It's safe to just use cx->fp->thisp here because getThis() returns JSRS_STOP if thisp
10405 * is not available.
10407 CHECK_STATUS(getProp(cx->fp->thisp, this_ins));
10408 return JSRS_CONTINUE;
10411 JS_REQUIRES_STACK JSRecordingStatus
10412 TraceRecorder::record_JSOP_GETARGPROP()
10414 return getProp(argval(GET_ARGNO(cx->fp->regs->pc)));
10417 JS_REQUIRES_STACK JSRecordingStatus
10418 TraceRecorder::record_JSOP_GETLOCALPROP()
10420 return getProp(varval(GET_SLOTNO(cx->fp->regs->pc)));
10423 JS_REQUIRES_STACK JSRecordingStatus
10424 TraceRecorder::record_JSOP_INDEXBASE1()
10426 atoms += 1 << 16;
10427 return JSRS_CONTINUE;
10430 JS_REQUIRES_STACK JSRecordingStatus
10431 TraceRecorder::record_JSOP_INDEXBASE2()
10433 atoms += 2 << 16;
10434 return JSRS_CONTINUE;
10437 JS_REQUIRES_STACK JSRecordingStatus
10438 TraceRecorder::record_JSOP_INDEXBASE3()
10440 atoms += 3 << 16;
10441 return JSRS_CONTINUE;
10444 JS_REQUIRES_STACK JSRecordingStatus
10445 TraceRecorder::record_JSOP_CALLGVAR()
10447 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
10448 if (JSVAL_IS_NULL(slotval))
10449 // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
10450 return JSRS_CONTINUE;
10452 uint32 slot = JSVAL_TO_INT(slotval);
10454 if (!lazilyImportGlobalSlot(slot))
10455 ABORT_TRACE("lazy import of global slot failed");
10457 jsval& v = STOBJ_GET_SLOT(globalObj, slot);
10458 stack(0, get(&v));
10459 stack(1, INS_CONSTPTR(NULL));
10460 return JSRS_CONTINUE;
10463 JS_REQUIRES_STACK JSRecordingStatus
10464 TraceRecorder::record_JSOP_CALLLOCAL()
10466 uintN slot = GET_SLOTNO(cx->fp->regs->pc);
10467 stack(0, var(slot));
10468 stack(1, INS_CONSTPTR(NULL));
10469 return JSRS_CONTINUE;
10472 JS_REQUIRES_STACK JSRecordingStatus
10473 TraceRecorder::record_JSOP_CALLARG()
10475 uintN slot = GET_ARGNO(cx->fp->regs->pc);
10476 stack(0, arg(slot));
10477 stack(1, INS_CONSTPTR(NULL));
10478 return JSRS_CONTINUE;
10481 /* Functions for use with JSOP_CALLBUILTIN. */
10483 static JSBool
10484 ObjectToIterator(JSContext *cx, uintN argc, jsval *vp)
10486 jsval *argv = JS_ARGV(cx, vp);
10487 JS_ASSERT(JSVAL_IS_INT(argv[0]));
10488 JS_SET_RVAL(cx, vp, JS_THIS(cx, vp));
10489 return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp));
10492 static JSObject* FASTCALL
10493 ObjectToIterator_tn(JSContext* cx, jsbytecode* pc, JSObject *obj, int32 flags)
10495 jsval v = OBJECT_TO_JSVAL(obj);
10496 JSBool ok = js_ValueToIterator(cx, flags, &v);
10498 if (!ok) {
10499 js_SetBuiltinError(cx);
10500 return NULL;
10502 return JSVAL_TO_OBJECT(v);
10505 static JSBool
10506 CallIteratorNext(JSContext *cx, uintN argc, jsval *vp)
10508 return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp));
10511 static jsval FASTCALL
10512 CallIteratorNext_tn(JSContext* cx, jsbytecode* pc, JSObject* iterobj)
10514 JSAutoTempValueRooter tvr(cx);
10515 JSBool ok = js_CallIteratorNext(cx, iterobj, tvr.addr());
10517 if (!ok) {
10518 js_SetBuiltinError(cx);
10519 return JSVAL_ERROR_COOKIE;
10521 return tvr.value();
10524 JS_DEFINE_TRCINFO_1(ObjectToIterator,
10525 (4, (static, OBJECT_FAIL, ObjectToIterator_tn, CONTEXT, PC, THIS, INT32, 0, 0)))
10526 JS_DEFINE_TRCINFO_1(CallIteratorNext,
10527 (3, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, PC, THIS, 0, 0)))
10529 static const struct BuiltinFunctionInfo {
10530 JSTraceableNative *tn;
10531 int nargs;
10532 } builtinFunctionInfo[JSBUILTIN_LIMIT] = {
10533 {ObjectToIterator_trcinfo, 1},
10534 {CallIteratorNext_trcinfo, 0},
10535 {GetProperty_trcinfo, 1},
10536 {GetElement_trcinfo, 1},
10537 {SetProperty_trcinfo, 2},
10538 {SetElement_trcinfo, 2}
10541 JSObject *
10542 js_GetBuiltinFunction(JSContext *cx, uintN index)
10544 JSRuntime *rt = cx->runtime;
10545 JSObject *funobj = rt->builtinFunctions[index];
10547 if (!funobj) {
10548 /* Use NULL parent and atom. Builtin functions never escape to scripts. */
10549 JS_ASSERT(index < JS_ARRAY_LENGTH(builtinFunctionInfo));
10550 const BuiltinFunctionInfo *bfi = &builtinFunctionInfo[index];
10551 JSFunction *fun = js_NewFunction(cx,
10552 NULL,
10553 JS_DATA_TO_FUNC_PTR(JSNative, bfi->tn),
10554 bfi->nargs,
10555 JSFUN_FAST_NATIVE | JSFUN_TRACEABLE,
10556 NULL,
10557 NULL);
10558 if (fun) {
10559 funobj = FUN_OBJECT(fun);
10560 STOBJ_CLEAR_PROTO(funobj);
10561 STOBJ_CLEAR_PARENT(funobj);
10563 JS_LOCK_GC(rt);
10564 if (!rt->builtinFunctions[index]) /* retest now that the lock is held */
10565 rt->builtinFunctions[index] = funobj;
10566 else
10567 funobj = rt->builtinFunctions[index];
10568 JS_UNLOCK_GC(rt);
10571 return funobj;
10574 JS_REQUIRES_STACK JSRecordingStatus
10575 TraceRecorder::record_JSOP_CALLBUILTIN()
10577 JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc));
10578 if (!obj)
10579 ABORT_TRACE_ERROR("error in js_GetBuiltinFunction");
10581 stack(0, get(&stackval(-1)));
10582 stack(-1, INS_CONSTPTR(obj));
10583 return JSRS_CONTINUE;
10586 JS_REQUIRES_STACK JSRecordingStatus
10587 TraceRecorder::record_JSOP_INT8()
10589 stack(0, lir->insImmf(GET_INT8(cx->fp->regs->pc)));
10590 return JSRS_CONTINUE;
10593 JS_REQUIRES_STACK JSRecordingStatus
10594 TraceRecorder::record_JSOP_INT32()
10596 stack(0, lir->insImmf(GET_INT32(cx->fp->regs->pc)));
10597 return JSRS_CONTINUE;
10600 JS_REQUIRES_STACK JSRecordingStatus
10601 TraceRecorder::record_JSOP_LENGTH()
10603 jsval& l = stackval(-1);
10604 if (JSVAL_IS_PRIMITIVE(l)) {
10605 if (!JSVAL_IS_STRING(l))
10606 ABORT_TRACE("non-string primitive JSOP_LENGTH unsupported");
10607 set(&l, lir->ins1(LIR_i2f, getStringLength(get(&l))));
10608 return JSRS_CONTINUE;
10611 JSObject* obj = JSVAL_TO_OBJECT(l);
10612 LIns* obj_ins = get(&l);
10613 LIns* v_ins;
10614 if (OBJ_IS_ARRAY(cx, obj)) {
10615 if (OBJ_IS_DENSE_ARRAY(cx, obj)) {
10616 if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT)) {
10617 JS_NOT_REACHED("OBJ_IS_DENSE_ARRAY but not?!?");
10618 return JSRS_STOP;
10620 } else {
10621 if (!guardClass(obj, obj_ins, &js_SlowArrayClass, snapshot(BRANCH_EXIT)))
10622 ABORT_TRACE("can't trace length property access on non-array");
10624 v_ins = lir->ins1(LIR_i2f, stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH));
10625 } else {
10626 if (!OBJ_IS_NATIVE(obj))
10627 ABORT_TRACE("can't trace length property access on non-array, non-native object");
10628 return getProp(obj, obj_ins);
10630 set(&l, v_ins);
10631 return JSRS_CONTINUE;
10634 JS_REQUIRES_STACK JSRecordingStatus
10635 TraceRecorder::record_JSOP_NEWARRAY()
10637 LIns *proto_ins;
10638 CHECK_STATUS(getClassPrototype(JSProto_Array, proto_ins));
10640 uint32 len = GET_UINT16(cx->fp->regs->pc);
10641 cx->fp->assertValidStackDepth(len);
10643 LIns* args[] = { lir->insImm(len), proto_ins, cx_ins };
10644 LIns* v_ins = lir->insCall(&js_NewUninitializedArray_ci, args);
10645 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
10647 LIns* dslots_ins = NULL;
10648 uint32 count = 0;
10649 for (uint32 i = 0; i < len; i++) {
10650 jsval& v = stackval(int(i) - int(len));
10651 if (v != JSVAL_HOLE)
10652 count++;
10653 LIns* elt_ins = get(&v);
10654 box_jsval(v, elt_ins);
10655 stobj_set_dslot(v_ins, i, dslots_ins, elt_ins, "set_array_elt");
10658 if (count > 0)
10659 stobj_set_fslot(v_ins, JSSLOT_ARRAY_COUNT, INS_CONST(count), "set_array_count");
10661 stack(-int(len), v_ins);
10662 return JSRS_CONTINUE;
10665 JS_REQUIRES_STACK JSRecordingStatus
10666 TraceRecorder::record_JSOP_HOLE()
10668 stack(0, INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_HOLE)));
10669 return JSRS_CONTINUE;
10672 JSRecordingStatus
10673 TraceRecorder::record_JSOP_LOOP()
10675 return JSRS_CONTINUE;
10678 #ifdef JS_JIT_SPEW
10679 /* Prints information about entry typemaps and unstable exits for all peers at a PC */
10680 void
10681 js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape,
10682 uint32 argc)
10684 Fragment* f;
10685 TreeInfo* ti;
10686 bool looped = false;
10687 unsigned length = 0;
10689 for (f = getLoop(tm, ip, globalObj, globalShape, argc); f != NULL; f = f->peer) {
10690 if (!f->vmprivate)
10691 continue;
10692 printf("fragment %p:\nENTRY: ", (void*)f);
10693 ti = (TreeInfo*)f->vmprivate;
10694 if (looped)
10695 JS_ASSERT(ti->nStackTypes == length);
10696 for (unsigned i = 0; i < ti->nStackTypes; i++)
10697 printf("S%d ", ti->stackTypeMap()[i]);
10698 for (unsigned i = 0; i < ti->nGlobalTypes(); i++)
10699 printf("G%d ", ti->globalTypeMap()[i]);
10700 printf("\n");
10701 UnstableExit* uexit = ti->unstableExits;
10702 while (uexit != NULL) {
10703 printf("EXIT: ");
10704 uint8* m = getFullTypeMap(uexit->exit);
10705 for (unsigned i = 0; i < uexit->exit->numStackSlots; i++)
10706 printf("S%d ", m[i]);
10707 for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++)
10708 printf("G%d ", m[uexit->exit->numStackSlots + i]);
10709 printf("\n");
10710 uexit = uexit->next;
10712 length = ti->nStackTypes;
10713 looped = true;
10716 #endif
10718 #define UNUSED(n) \
10719 JS_REQUIRES_STACK bool \
10720 TraceRecorder::record_JSOP_UNUSED##n() { \
10721 JS_NOT_REACHED("JSOP_UNUSED" # n); \
10722 return false; \