Fix js*inlines.h harder (515628, r=jorendorff).
[mozilla-central.git] / js / src / jstracer.cpp
blob33a02ec731c1c5ac01b34b2540d2d564158a0f85
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
23 * Contributor(s):
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
42 #include "jsstdint.h"
43 #include "jsbit.h" // low-level (NSPR-based) headers next
44 #include "jsprf.h"
45 #include <math.h> // standard headers next
47 #if defined(_MSC_VER) || defined(__MINGW32__)
48 #include <malloc.h>
49 #ifdef _MSC_VER
50 #define alloca _alloca
51 #endif
52 #endif
53 #ifdef SOLARIS
54 #include <alloca.h>
55 #endif
56 #include <limits.h>
58 #include "nanojit/nanojit.h"
59 #include "jsapi.h" // higher-level library and API headers
60 #include "jsarray.h"
61 #include "jsbool.h"
62 #include "jscntxt.h"
63 #include "jsdate.h"
64 #include "jsdbgapi.h"
65 #include "jsemit.h"
66 #include "jsfun.h"
67 #include "jsinterp.h"
68 #include "jsiter.h"
69 #include "jsmath.h"
70 #include "jsobj.h"
71 #include "jsopcode.h"
72 #include "jsregexp.h"
73 #include "jsscope.h"
74 #include "jsscript.h"
75 #include "jsscriptinlines.h"
76 #include "jsstaticcheck.h"
77 #include "jstracer.h"
78 #include "jsxml.h"
80 #include "jsatominlines.h"
81 #include "jsscriptinlines.h"
83 #include "jsautooplen.h" // generated headers last
84 #include "imacros.c.out"
86 using namespace avmplus;
87 using namespace nanojit;
89 #if JS_HAS_XML_SUPPORT
90 #define ABORT_IF_XML(v) \
91 JS_BEGIN_MACRO \
92 if (!JSVAL_IS_PRIMITIVE(v) && OBJECT_IS_XML(BOGUS_CX, JSVAL_TO_OBJECT(v)))\
93 ABORT_TRACE("xml detected"); \
94 JS_END_MACRO
95 #else
96 #define ABORT_IF_XML(v) ((void) 0)
97 #endif
100 * Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and
101 * the type. What you want to use is JSVAL_IS_SPECIAL(x) and then handle the
102 * undefined case properly (bug 457363).
104 #undef JSVAL_IS_BOOLEAN
105 #define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0)
107 JS_STATIC_ASSERT(sizeof(JSTraceType) == 1);
109 /* Map to translate a type tag into a printable representation. */
110 static const char typeChar[] = "OIDXSNBF";
111 static const char tagChar[] = "OIDISIBI";
113 /* Blacklist parameters. */
116 * Number of iterations of a loop where we start tracing. That is, we don't
117 * start tracing until the beginning of the HOTLOOP-th iteration.
119 #define HOTLOOP 2
121 /* Attempt recording this many times before blacklisting permanently. */
122 #define BL_ATTEMPTS 2
124 /* Skip this many hits before attempting recording again, after an aborted attempt. */
125 #define BL_BACKOFF 32
127 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
128 #define HOTEXIT 1
130 /* Number of times we try to extend the tree along a side exit. */
131 #define MAXEXIT 3
133 /* Maximum number of peer trees allowed. */
134 #define MAXPEERS 9
136 /* Max call depths for inlining. */
137 #define MAX_CALLDEPTH 10
139 /* Max native stack size. */
140 #define MAX_NATIVE_STACK_SLOTS 1024
142 /* Max call stack size. */
143 #define MAX_CALL_STACK_ENTRIES 64
145 /* Max global object size. */
146 #define MAX_GLOBAL_SLOTS 4096
148 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
149 #define MAX_INTERP_STACK_BYTES \
150 (MAX_NATIVE_STACK_SLOTS * sizeof(jsval) + \
151 MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) + \
152 sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
154 /* Max number of branches per tree. */
155 #define MAX_BRANCHES 32
157 #define CHECK_STATUS(expr) \
158 JS_BEGIN_MACRO \
159 JSRecordingStatus _status = (expr); \
160 if (_status != JSRS_CONTINUE) \
161 return _status; \
162 JS_END_MACRO
164 #ifdef JS_JIT_SPEW
165 #define ABORT_TRACE_RV(msg, value) \
166 JS_BEGIN_MACRO \
167 debug_only_printf(LC_TMAbort, "abort: %d: %s\n", __LINE__, (msg)); \
168 return (value); \
169 JS_END_MACRO
170 #else
171 #define ABORT_TRACE_RV(msg, value) return (value)
172 #endif
174 #define ABORT_TRACE(msg) ABORT_TRACE_RV(msg, JSRS_STOP)
175 #define ABORT_TRACE_ERROR(msg) ABORT_TRACE_RV(msg, JSRS_ERROR)
177 #ifdef JS_JIT_SPEW
178 struct __jitstats {
179 #define JITSTAT(x) uint64 x;
180 #include "jitstats.tbl"
181 #undef JITSTAT
182 } jitstats = { 0LL, };
184 JS_STATIC_ASSERT(sizeof(jitstats) % sizeof(uint64) == 0);
186 enum jitstat_ids {
187 #define JITSTAT(x) STAT ## x ## ID,
188 #include "jitstats.tbl"
189 #undef JITSTAT
190 STAT_IDS_TOTAL
193 static JSPropertySpec jitstats_props[] = {
194 #define JITSTAT(x) { #x, STAT ## x ## ID, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT },
195 #include "jitstats.tbl"
196 #undef JITSTAT
197 { 0 }
200 static JSBool
201 jitstats_getProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
203 int index = -1;
205 if (JSVAL_IS_STRING(id)) {
206 JSString* str = JSVAL_TO_STRING(id);
207 if (strcmp(JS_GetStringBytes(str), "HOTLOOP") == 0) {
208 *vp = INT_TO_JSVAL(HOTLOOP);
209 return JS_TRUE;
213 if (JSVAL_IS_INT(id))
214 index = JSVAL_TO_INT(id);
216 uint64 result = 0;
217 switch (index) {
218 #define JITSTAT(x) case STAT ## x ## ID: result = jitstats.x; break;
219 #include "jitstats.tbl"
220 #undef JITSTAT
221 default:
222 *vp = JSVAL_VOID;
223 return JS_TRUE;
226 if (result < JSVAL_INT_MAX) {
227 *vp = INT_TO_JSVAL(result);
228 return JS_TRUE;
230 char retstr[64];
231 JS_snprintf(retstr, sizeof retstr, "%llu", result);
232 *vp = STRING_TO_JSVAL(JS_NewStringCopyZ(cx, retstr));
233 return JS_TRUE;
236 JSClass jitstats_class = {
237 "jitstats",
239 JS_PropertyStub, JS_PropertyStub,
240 jitstats_getProperty, JS_PropertyStub,
241 JS_EnumerateStub, JS_ResolveStub,
242 JS_ConvertStub, NULL,
243 JSCLASS_NO_OPTIONAL_MEMBERS
246 void
247 js_InitJITStatsClass(JSContext *cx, JSObject *glob)
249 JS_InitClass(cx, glob, NULL, &jitstats_class, NULL, 0, jitstats_props, NULL, NULL, NULL);
252 #define AUDIT(x) (jitstats.x++)
253 #else
254 #define AUDIT(x) ((void)0)
255 #endif /* JS_JIT_SPEW */
258 * INS_CONSTPTR can be used to embed arbitrary pointers into the native code. It should not
259 * be used directly to embed GC thing pointers. Instead, use the INS_CONSTOBJ/FUN/STR/SPROP
260 * variants which ensure that the embedded pointer will be kept alive across GCs.
263 #define INS_CONST(c) addName(lir->insImm(c), #c)
264 #define INS_CONSTPTR(p) addName(lir->insImmPtr(p), #p)
265 #define INS_CONSTWORD(v) addName(lir->insImmPtr((void *) v), #v)
266 #define INS_CONSTOBJ(obj) addName(insImmObj(obj), #obj)
267 #define INS_CONSTFUN(fun) addName(insImmFun(fun), #fun)
268 #define INS_CONSTSTR(str) addName(insImmStr(str), #str)
269 #define INS_CONSTSPROP(sprop) addName(insImmSprop(sprop), #sprop)
270 #define INS_ATOM(atom) INS_CONSTSTR(ATOM_TO_STRING(atom))
271 #define INS_NULL() INS_CONSTPTR(NULL)
272 #define INS_VOID() INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID))
274 static GC gc = GC();
275 static avmplus::AvmCore s_core = avmplus::AvmCore();
276 static avmplus::AvmCore* core = &s_core;
278 /* Allocator SPI implementation. */
280 void*
281 nanojit::Allocator::allocChunk(size_t nbytes)
283 VMAllocator *vma = (VMAllocator*)this;
284 JS_ASSERT(!vma->outOfMemory());
285 void *p = malloc(nbytes);
286 if (!p) {
287 JS_ASSERT(nbytes < sizeof(vma->mReserve));
288 vma->mOutOfMemory = true;
289 p = (void*) &vma->mReserve[0];
291 vma->mSize += nbytes;
292 return p;
295 void
296 nanojit::Allocator::freeChunk(void *p) {
297 VMAllocator *vma = (VMAllocator*)this;
298 if (p != &vma->mReserve[0])
299 free(p);
302 void
303 nanojit::Allocator::postReset() {
304 VMAllocator *vma = (VMAllocator*)this;
305 vma->mOutOfMemory = false;
306 vma->mSize = 0;
310 #ifdef JS_JIT_SPEW
311 static void
312 DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape, uint32 argc);
313 #endif
316 * We really need a better way to configure the JIT. Shaver, where is
317 * my fancy JIT object?
319 * NB: this is raced on, if jstracer.cpp should ever be running MT.
320 * I think it's harmless tho.
322 static bool did_we_check_processor_features = false;
324 /* ------ Debug logging control ------ */
327 * All the logging control stuff lives in here. It is shared between
328 * all threads, but I think that's OK.
330 LogControl js_LogController;
332 #ifdef JS_JIT_SPEW
335 * NB: this is raced on too, if jstracer.cpp should ever be running MT.
336 * Also harmless.
338 static bool did_we_set_up_debug_logging = false;
340 static void
341 InitJITLogController()
343 char *tm, *tmf;
344 uint32_t bits;
346 js_LogController.lcbits = 0;
348 tm = getenv("TRACEMONKEY");
349 if (tm) {
350 fflush(NULL);
351 printf(
352 "The environment variable $TRACEMONKEY has been replaced by $TMFLAGS.\n"
353 "Try 'TMFLAGS=help js -j' for a list of options.\n"
355 exit(0);
358 tmf = getenv("TMFLAGS");
359 if (!tmf) return;
361 /* Using strstr() is really a cheap hack as far as flag decoding goes. */
362 if (strstr(tmf, "help")) {
363 fflush(NULL);
364 printf(
365 "usage: TMFLAGS=option,option,option,... where options can be:\n"
366 "\n"
367 " help show this message\n"
368 " ------ options for jstracer & jsregexp ------\n"
369 " minimal ultra-minimalist output; try this first\n"
370 " full everything except 'treevis' and 'nocodeaddrs'\n"
371 " tracer tracer lifetime (FIXME:better description)\n"
372 " recorder trace recording stuff (FIXME:better description)\n"
373 " abort show trace recording aborts\n"
374 " stats show trace recording stats\n"
375 " regexp show compilation & entry for regexps\n"
376 " treevis spew that tracevis/tree.py can parse\n"
377 " ------ options for Nanojit ------\n"
378 " liveness show LIR liveness at start of rdr pipeline\n"
379 " readlir show LIR as it enters the reader pipeline\n"
380 " aftersf show LIR after StackFilter\n"
381 " regalloc show regalloc details\n"
382 " assembly show final aggregated assembly code\n"
383 " nocodeaddrs don't show code addresses in assembly listings\n"
384 "\n"
386 exit(0);
387 /*NOTREACHED*/
390 bits = 0;
392 /* flags for jstracer.cpp */
393 if (strstr(tmf, "minimal") || strstr(tmf, "full")) bits |= LC_TMMinimal;
394 if (strstr(tmf, "tracer") || strstr(tmf, "full")) bits |= LC_TMTracer;
395 if (strstr(tmf, "recorder") || strstr(tmf, "full")) bits |= LC_TMRecorder;
396 if (strstr(tmf, "abort") || strstr(tmf, "full")) bits |= LC_TMAbort;
397 if (strstr(tmf, "stats") || strstr(tmf, "full")) bits |= LC_TMStats;
398 if (strstr(tmf, "regexp") || strstr(tmf, "full")) bits |= LC_TMRegexp;
399 if (strstr(tmf, "treevis")) bits |= LC_TMTreeVis;
401 /* flags for nanojit */
402 if (strstr(tmf, "liveness") || strstr(tmf, "full")) bits |= LC_Liveness;
403 if (strstr(tmf, "readlir") || strstr(tmf, "full")) bits |= LC_ReadLIR;
404 if (strstr(tmf, "aftersf") || strstr(tmf, "full")) bits |= LC_AfterSF;
405 if (strstr(tmf, "regalloc") || strstr(tmf, "full")) bits |= LC_RegAlloc;
406 if (strstr(tmf, "assembly") || strstr(tmf, "full")) bits |= LC_Assembly;
407 if (strstr(tmf, "nocodeaddrs")) bits |= LC_NoCodeAddrs;
409 js_LogController.lcbits = bits;
410 return;
413 #endif
415 #if defined DEBUG
416 static const char*
417 getExitName(ExitType type)
419 static const char* exitNames[] =
421 #define MAKE_EXIT_STRING(x) #x,
422 JS_TM_EXITCODES(MAKE_EXIT_STRING)
423 #undef MAKE_EXIT_STRING
424 NULL
427 JS_ASSERT(type < TOTAL_EXIT_TYPES);
429 return exitNames[type];
431 #endif
434 * The entire VM shares one oracle. Collisions and concurrent updates are
435 * tolerated and worst case cause performance regressions.
437 static Oracle oracle;
439 Tracker::Tracker()
441 pagelist = 0;
444 Tracker::~Tracker()
446 clear();
449 jsuword
450 Tracker::getPageBase(const void* v) const
452 return jsuword(v) & ~jsuword(NJ_PAGE_SIZE-1);
455 struct Tracker::Page*
456 Tracker::findPage(const void* v) const
458 jsuword base = getPageBase(v);
459 struct Tracker::Page* p = pagelist;
460 while (p) {
461 if (p->base == base) {
462 return p;
464 p = p->next;
466 return 0;
469 struct Tracker::Page*
470 Tracker::addPage(const void* v) {
471 jsuword base = getPageBase(v);
472 struct Tracker::Page* p = (struct Tracker::Page*)
473 GC::Alloc(sizeof(*p) - sizeof(p->map) + (NJ_PAGE_SIZE >> 2) * sizeof(LIns*));
474 p->base = base;
475 p->next = pagelist;
476 pagelist = p;
477 return p;
480 void
481 Tracker::clear()
483 while (pagelist) {
484 Page* p = pagelist;
485 pagelist = pagelist->next;
486 GC::Free(p);
490 bool
491 Tracker::has(const void *v) const
493 return get(v) != NULL;
496 #if defined NANOJIT_64BIT
497 #define PAGEMASK 0x7ff
498 #else
499 #define PAGEMASK 0xfff
500 #endif
502 LIns*
503 Tracker::get(const void* v) const
505 struct Tracker::Page* p = findPage(v);
506 if (!p)
507 return NULL;
508 return p->map[(jsuword(v) & PAGEMASK) >> 2];
511 void
512 Tracker::set(const void* v, LIns* i)
514 struct Tracker::Page* p = findPage(v);
515 if (!p)
516 p = addPage(v);
517 p->map[(jsuword(v) & PAGEMASK) >> 2] = i;
520 static inline jsuint
521 argSlots(JSStackFrame* fp)
523 return JS_MAX(fp->argc, fp->fun->nargs);
526 static inline bool
527 isNumber(jsval v)
529 return JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v);
532 static inline jsdouble
533 asNumber(jsval v)
535 JS_ASSERT(isNumber(v));
536 if (JSVAL_IS_DOUBLE(v))
537 return *JSVAL_TO_DOUBLE(v);
538 return (jsdouble)JSVAL_TO_INT(v);
541 static inline bool
542 isInt32(jsval v)
544 if (!isNumber(v))
545 return false;
546 jsdouble d = asNumber(v);
547 jsint i;
548 return JSDOUBLE_IS_INT(d, i);
551 static inline jsint
552 asInt32(jsval v)
554 JS_ASSERT(isNumber(v));
555 if (JSVAL_IS_INT(v))
556 return JSVAL_TO_INT(v);
557 #ifdef DEBUG
558 jsint i;
559 JS_ASSERT(JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v), i));
560 #endif
561 return jsint(*JSVAL_TO_DOUBLE(v));
564 /* Return TT_DOUBLE for all numbers (int and double) and the tag otherwise. */
565 static inline JSTraceType
566 GetPromotedType(jsval v)
568 if (JSVAL_IS_INT(v))
569 return TT_DOUBLE;
570 if (JSVAL_IS_OBJECT(v)) {
571 if (JSVAL_IS_NULL(v))
572 return TT_NULL;
573 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)))
574 return TT_FUNCTION;
575 return TT_OBJECT;
577 uint8_t tag = JSVAL_TAG(v);
578 JS_ASSERT(tag == JSVAL_DOUBLE || tag == JSVAL_STRING || tag == JSVAL_SPECIAL);
579 JS_STATIC_ASSERT(static_cast<jsvaltag>(TT_DOUBLE) == JSVAL_DOUBLE);
580 JS_STATIC_ASSERT(static_cast<jsvaltag>(TT_STRING) == JSVAL_STRING);
581 JS_STATIC_ASSERT(static_cast<jsvaltag>(TT_PSEUDOBOOLEAN) == JSVAL_SPECIAL);
582 return JSTraceType(tag);
585 /* Return TT_INT32 for all whole numbers that fit into signed 32-bit and the tag otherwise. */
586 static inline JSTraceType
587 getCoercedType(jsval v)
589 if (isInt32(v))
590 return TT_INT32;
591 if (JSVAL_IS_OBJECT(v)) {
592 if (JSVAL_IS_NULL(v))
593 return TT_NULL;
594 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)))
595 return TT_FUNCTION;
596 return TT_OBJECT;
598 uint8_t tag = JSVAL_TAG(v);
599 JS_ASSERT(tag == JSVAL_DOUBLE || tag == JSVAL_STRING || tag == JSVAL_SPECIAL);
600 JS_STATIC_ASSERT(static_cast<jsvaltag>(TT_DOUBLE) == JSVAL_DOUBLE);
601 JS_STATIC_ASSERT(static_cast<jsvaltag>(TT_STRING) == JSVAL_STRING);
602 JS_STATIC_ASSERT(static_cast<jsvaltag>(TT_PSEUDOBOOLEAN) == JSVAL_SPECIAL);
603 return JSTraceType(tag);
606 /* Constant seed and accumulate step borrowed from the DJB hash. */
608 const uintptr_t ORACLE_MASK = ORACLE_SIZE - 1;
609 JS_STATIC_ASSERT((ORACLE_MASK & ORACLE_SIZE) == 0);
611 const uintptr_t FRAGMENT_TABLE_MASK = FRAGMENT_TABLE_SIZE - 1;
612 JS_STATIC_ASSERT((FRAGMENT_TABLE_MASK & FRAGMENT_TABLE_SIZE) == 0);
614 const uintptr_t HASH_SEED = 5381;
616 static inline void
617 HashAccum(uintptr_t& h, uintptr_t i, uintptr_t mask)
619 h = ((h << 5) + h + (mask & i)) & mask;
622 static JS_REQUIRES_STACK inline int
623 StackSlotHash(JSContext* cx, unsigned slot)
625 uintptr_t h = HASH_SEED;
626 HashAccum(h, uintptr_t(cx->fp->script), ORACLE_MASK);
627 HashAccum(h, uintptr_t(cx->fp->regs->pc), ORACLE_MASK);
628 HashAccum(h, uintptr_t(slot), ORACLE_MASK);
629 return int(h);
632 static JS_REQUIRES_STACK inline int
633 GlobalSlotHash(JSContext* cx, unsigned slot)
635 uintptr_t h = HASH_SEED;
636 JSStackFrame* fp = cx->fp;
638 while (fp->down)
639 fp = fp->down;
641 HashAccum(h, uintptr_t(fp->script), ORACLE_MASK);
642 HashAccum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain))), ORACLE_MASK);
643 HashAccum(h, uintptr_t(slot), ORACLE_MASK);
644 return int(h);
647 static inline int
648 PCHash(jsbytecode* pc)
650 return int(uintptr_t(pc) & ORACLE_MASK);
653 Oracle::Oracle()
655 /* Grow the oracle bitsets to their (fixed) size here, once. */
656 _stackDontDemote.set(ORACLE_SIZE-1);
657 _globalDontDemote.set(ORACLE_SIZE-1);
658 clear();
661 /* Tell the oracle that a certain global variable should not be demoted. */
662 JS_REQUIRES_STACK void
663 Oracle::markGlobalSlotUndemotable(JSContext* cx, unsigned slot)
665 _globalDontDemote.set(GlobalSlotHash(cx, slot));
668 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
669 JS_REQUIRES_STACK bool
670 Oracle::isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const
672 return _globalDontDemote.get(GlobalSlotHash(cx, slot));
675 /* Tell the oracle that a certain slot at a certain stack slot should not be demoted. */
676 JS_REQUIRES_STACK void
677 Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot)
679 _stackDontDemote.set(StackSlotHash(cx, slot));
682 /* Consult with the oracle whether we shouldn't demote a certain slot. */
683 JS_REQUIRES_STACK bool
684 Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const
686 return _stackDontDemote.get(StackSlotHash(cx, slot));
689 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
690 void
691 Oracle::markInstructionUndemotable(jsbytecode* pc)
693 _pcDontDemote.set(PCHash(pc));
696 /* Consult with the oracle whether we shouldn't demote a certain bytecode location. */
697 bool
698 Oracle::isInstructionUndemotable(jsbytecode* pc) const
700 return _pcDontDemote.get(PCHash(pc));
703 void
704 Oracle::clearDemotability()
706 _stackDontDemote.reset();
707 _globalDontDemote.reset();
708 _pcDontDemote.reset();
711 JS_REQUIRES_STACK static JS_INLINE void
712 MarkSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
714 if (slot < ti->nStackTypes) {
715 oracle.markStackSlotUndemotable(cx, slot);
716 return;
719 uint16* gslots = ti->globalSlots->data();
720 oracle.markGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
723 static JS_REQUIRES_STACK inline bool
724 IsSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
726 if (slot < ti->nStackTypes)
727 return oracle.isStackSlotUndemotable(cx, slot);
729 uint16* gslots = ti->globalSlots->data();
730 return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
733 struct PCHashEntry : public JSDHashEntryStub {
734 size_t count;
737 #define PC_HASH_COUNT 1024
739 static void
740 Blacklist(jsbytecode* pc)
742 AUDIT(blacklisted);
743 JS_ASSERT(*pc == JSOP_LOOP || *pc == JSOP_NOP);
744 *pc = JSOP_NOP;
747 static void
748 Backoff(JSContext *cx, jsbytecode* pc, Fragment* tree = NULL)
750 JSDHashTable *table = &JS_TRACE_MONITOR(cx).recordAttempts;
752 if (table->ops) {
753 PCHashEntry *entry = (PCHashEntry *)
754 JS_DHashTableOperate(table, pc, JS_DHASH_ADD);
756 if (entry) {
757 if (!entry->key) {
758 entry->key = pc;
759 JS_ASSERT(entry->count == 0);
761 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry->hdr)));
762 if (entry->count++ > (BL_ATTEMPTS * MAXPEERS)) {
763 entry->count = 0;
764 Blacklist(pc);
765 return;
770 if (tree) {
771 tree->hits() -= BL_BACKOFF;
774 * In case there is no entry or no table (due to OOM) or some
775 * serious imbalance in the recording-attempt distribution on a
776 * multitree, give each tree another chance to blacklist here as
777 * well.
779 if (++tree->recordAttempts > BL_ATTEMPTS)
780 Blacklist(pc);
784 static void
785 ResetRecordingAttempts(JSContext *cx, jsbytecode* pc)
787 JSDHashTable *table = &JS_TRACE_MONITOR(cx).recordAttempts;
788 if (table->ops) {
789 PCHashEntry *entry = (PCHashEntry *)
790 JS_DHashTableOperate(table, pc, JS_DHASH_LOOKUP);
792 if (JS_DHASH_ENTRY_IS_FREE(&(entry->hdr)))
793 return;
794 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry->hdr)));
795 entry->count = 0;
799 static inline size_t
800 FragmentHash(const void *ip, JSObject* globalObj, uint32 globalShape, uint32 argc)
802 uintptr_t h = HASH_SEED;
803 HashAccum(h, uintptr_t(ip), FRAGMENT_TABLE_MASK);
804 HashAccum(h, uintptr_t(globalObj), FRAGMENT_TABLE_MASK);
805 HashAccum(h, uintptr_t(globalShape), FRAGMENT_TABLE_MASK);
806 HashAccum(h, uintptr_t(argc), FRAGMENT_TABLE_MASK);
807 return size_t(h);
811 * argc is cx->fp->argc at the trace loop header, i.e., the number of arguments
812 * pushed for the innermost JS frame. This is required as part of the fragment
813 * key because the fragment will write those arguments back to the interpreter
814 * stack when it exits, using its typemap, which implicitly incorporates a
815 * given value of argc. Without this feature, a fragment could be called as an
816 * inner tree with two different values of argc, and entry type checking or
817 * exit frame synthesis could crash.
819 struct VMFragment : public Fragment
821 VMFragment(const void* _ip, JSObject* _globalObj, uint32 _globalShape, uint32 _argc) :
822 Fragment(_ip),
823 first(NULL),
824 next(NULL),
825 peer(NULL),
826 globalObj(_globalObj),
827 globalShape(_globalShape),
828 argc(_argc)
831 inline TreeInfo* getTreeInfo() {
832 return (TreeInfo*)vmprivate;
835 VMFragment* first;
836 VMFragment* next;
837 VMFragment* peer;
838 JSObject* globalObj;
839 uint32 globalShape;
840 uint32 argc;
843 static VMFragment*
844 getVMFragment(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
845 uint32 argc)
847 size_t h = FragmentHash(ip, globalObj, globalShape, argc);
848 VMFragment* vf = tm->vmfragments[h];
849 while (vf &&
850 ! (vf->globalObj == globalObj &&
851 vf->globalShape == globalShape &&
852 vf->ip == ip &&
853 vf->argc == argc)) {
854 vf = vf->next;
856 return vf;
859 static VMFragment*
860 getLoop(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape, uint32 argc)
862 return getVMFragment(tm, ip, globalObj, globalShape, argc);
865 static VMFragment*
866 getAnchor(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape, uint32 argc)
868 VMFragment *f = new (*tm->allocator) VMFragment(ip, globalObj, globalShape, argc);
869 JS_ASSERT(f);
871 VMFragment *p = getVMFragment(tm, ip, globalObj, globalShape, argc);
873 if (p) {
874 f->first = p;
875 /* append at the end of the peer list */
876 VMFragment* next;
877 while ((next = p->peer) != NULL)
878 p = next;
879 p->peer = f;
880 } else {
881 /* this is the first fragment */
882 f->first = f;
883 size_t h = FragmentHash(ip, globalObj, globalShape, argc);
884 f->next = tm->vmfragments[h];
885 tm->vmfragments[h] = f;
887 f->root = f;
888 return f;
891 #ifdef DEBUG
892 static void
893 AssertTreeIsUnique(JSTraceMonitor* tm, VMFragment* f, TreeInfo* ti)
895 JS_ASSERT(f->root == f);
898 * Check for duplicate entry type maps. This is always wrong and hints at
899 * trace explosion since we are trying to stabilize something without
900 * properly connecting peer edges.
902 TreeInfo* ti_other;
903 for (VMFragment* peer = getLoop(tm, f->ip, f->globalObj, f->globalShape, f->argc);
904 peer != NULL;
905 peer = peer->peer) {
906 if (!peer->code() || peer == f)
907 continue;
908 ti_other = (TreeInfo*)peer->vmprivate;
909 JS_ASSERT(ti_other);
910 JS_ASSERT(!ti->typeMap.matches(ti_other->typeMap));
913 #endif
915 static void
916 AttemptCompilation(JSContext *cx, JSTraceMonitor* tm, JSObject* globalObj, jsbytecode* pc,
917 uint32 argc)
919 /* If we already permanently blacklisted the location, undo that. */
920 JS_ASSERT(*(jsbytecode*)pc == JSOP_NOP || *(jsbytecode*)pc == JSOP_LOOP);
921 *(jsbytecode*)pc = JSOP_LOOP;
922 ResetRecordingAttempts(cx, pc);
924 /* Breathe new life into all peer fragments at the designated loop header. */
925 VMFragment* f = (VMFragment*)getLoop(tm, pc, globalObj, OBJ_SHAPE(globalObj), argc);
926 if (!f) {
928 * If the global object's shape changed, we can't easily find the
929 * corresponding loop header via a hash table lookup. In this
930 * we simply bail here and hope that the fragment has another
931 * outstanding compilation attempt. This case is extremely rare.
933 return;
935 JS_ASSERT(f->root == f);
936 f = f->first;
937 while (f) {
938 JS_ASSERT(f->root == f);
939 --f->recordAttempts;
940 f->hits() = HOTLOOP;
941 f = f->peer;
945 // Forward declarations.
946 JS_DEFINE_CALLINFO_1(static, DOUBLE, i2f, INT32, 1, 1)
947 JS_DEFINE_CALLINFO_1(static, DOUBLE, u2f, UINT32, 1, 1)
949 static bool
950 isi2f(LIns* i)
952 if (i->isop(LIR_i2f))
953 return true;
955 if (nanojit::AvmCore::config.soft_float &&
956 i->isop(LIR_qjoin) &&
957 i->oprnd1()->isop(LIR_pcall) &&
958 i->oprnd2()->isop(LIR_callh)) {
959 if (i->oprnd1()->callInfo() == &i2f_ci)
960 return true;
963 return false;
966 static bool
967 isu2f(LIns* i)
969 if (i->isop(LIR_u2f))
970 return true;
972 if (nanojit::AvmCore::config.soft_float &&
973 i->isop(LIR_qjoin) &&
974 i->oprnd1()->isop(LIR_pcall) &&
975 i->oprnd2()->isop(LIR_callh)) {
976 if (i->oprnd1()->callInfo() == &u2f_ci)
977 return true;
980 return false;
983 static LIns*
984 iu2fArg(LIns* i)
986 if (nanojit::AvmCore::config.soft_float &&
987 i->isop(LIR_qjoin)) {
988 return i->oprnd1()->arg(0);
991 return i->oprnd1();
994 static LIns*
995 demote(LirWriter *out, LIns* i)
997 if (i->isCall())
998 return i->callArgN(0);
999 if (isi2f(i) || isu2f(i))
1000 return iu2fArg(i);
1001 if (i->isconst())
1002 return i;
1003 JS_ASSERT(i->isconstf());
1004 double cf = i->imm64f();
1005 int32_t ci = cf > 0x7fffffff ? uint32_t(cf) : int32_t(cf);
1006 return out->insImm(ci);
1009 static bool
1010 isPromoteInt(LIns* i)
1012 if (isi2f(i) || i->isconst())
1013 return true;
1014 if (!i->isconstf())
1015 return false;
1016 jsdouble d = i->imm64f();
1017 return d == jsdouble(jsint(d)) && !JSDOUBLE_IS_NEGZERO(d);
1020 static bool
1021 isPromoteUint(LIns* i)
1023 if (isu2f(i) || i->isconst())
1024 return true;
1025 if (!i->isconstf())
1026 return false;
1027 jsdouble d = i->imm64f();
1028 return d == jsdouble(jsuint(d)) && !JSDOUBLE_IS_NEGZERO(d);
1031 static bool
1032 isPromote(LIns* i)
1034 return isPromoteInt(i) || isPromoteUint(i);
1037 static bool
1038 IsConst(LIns* i, int32_t c)
1040 return i->isconst() && i->imm32() == c;
1044 * Determine whether this operand is guaranteed to not overflow the specified
1045 * integer operation.
1047 static bool
1048 IsOverflowSafe(LOpcode op, LIns* i)
1050 LIns* c;
1051 switch (op) {
1052 case LIR_add:
1053 case LIR_sub:
1054 return (i->isop(LIR_and) && ((c = i->oprnd2())->isconst()) &&
1055 ((c->imm32() & 0xc0000000) == 0)) ||
1056 (i->isop(LIR_rsh) && ((c = i->oprnd2())->isconst()) &&
1057 ((c->imm32() > 0)));
1058 default:
1059 JS_ASSERT(op == LIR_mul);
1061 return (i->isop(LIR_and) && ((c = i->oprnd2())->isconst()) &&
1062 ((c->imm32() & 0xffff0000) == 0)) ||
1063 (i->isop(LIR_ush) && ((c = i->oprnd2())->isconst()) &&
1064 ((c->imm32() >= 16)));
1067 /* soft float support */
1069 static jsdouble FASTCALL
1070 fneg(jsdouble x)
1072 return -x;
1074 JS_DEFINE_CALLINFO_1(static, DOUBLE, fneg, DOUBLE, 1, 1)
1076 static jsdouble FASTCALL
1077 i2f(int32 i)
1079 return i;
1082 static jsdouble FASTCALL
1083 u2f(jsuint u)
1085 return u;
1088 static int32 FASTCALL
1089 fcmpeq(jsdouble x, jsdouble y)
1091 return x==y;
1093 JS_DEFINE_CALLINFO_2(static, INT32, fcmpeq, DOUBLE, DOUBLE, 1, 1)
1095 static int32 FASTCALL
1096 fcmplt(jsdouble x, jsdouble y)
1098 return x < y;
1100 JS_DEFINE_CALLINFO_2(static, INT32, fcmplt, DOUBLE, DOUBLE, 1, 1)
1102 static int32 FASTCALL
1103 fcmple(jsdouble x, jsdouble y)
1105 return x <= y;
1107 JS_DEFINE_CALLINFO_2(static, INT32, fcmple, DOUBLE, DOUBLE, 1, 1)
1109 static int32 FASTCALL
1110 fcmpgt(jsdouble x, jsdouble y)
1112 return x > y;
1114 JS_DEFINE_CALLINFO_2(static, INT32, fcmpgt, DOUBLE, DOUBLE, 1, 1)
1116 static int32 FASTCALL
1117 fcmpge(jsdouble x, jsdouble y)
1119 return x >= y;
1121 JS_DEFINE_CALLINFO_2(static, INT32, fcmpge, DOUBLE, DOUBLE, 1, 1)
1123 static jsdouble FASTCALL
1124 fmul(jsdouble x, jsdouble y)
1126 return x * y;
1128 JS_DEFINE_CALLINFO_2(static, DOUBLE, fmul, DOUBLE, DOUBLE, 1, 1)
1130 static jsdouble FASTCALL
1131 fadd(jsdouble x, jsdouble y)
1133 return x + y;
1135 JS_DEFINE_CALLINFO_2(static, DOUBLE, fadd, DOUBLE, DOUBLE, 1, 1)
1137 static jsdouble FASTCALL
1138 fdiv(jsdouble x, jsdouble y)
1140 return x / y;
1142 JS_DEFINE_CALLINFO_2(static, DOUBLE, fdiv, DOUBLE, DOUBLE, 1, 1)
1144 static jsdouble FASTCALL
1145 fsub(jsdouble x, jsdouble y)
1147 return x - y;
1149 JS_DEFINE_CALLINFO_2(static, DOUBLE, fsub, DOUBLE, DOUBLE, 1, 1)
1151 class SoftFloatFilter: public LirWriter
1153 public:
1154 SoftFloatFilter(LirWriter* out):
1155 LirWriter(out)
1159 LIns* quadCall(const CallInfo *ci, LIns* args[]) {
1160 LInsp qlo, qhi;
1162 qlo = out->insCall(ci, args);
1163 qhi = out->ins1(LIR_callh, qlo);
1164 return out->qjoin(qlo, qhi);
1167 LIns* ins1(LOpcode v, LIns* s0)
1169 if (v == LIR_fneg)
1170 return quadCall(&fneg_ci, &s0);
1172 if (v == LIR_i2f)
1173 return quadCall(&i2f_ci, &s0);
1175 if (v == LIR_u2f)
1176 return quadCall(&u2f_ci, &s0);
1178 return out->ins1(v, s0);
1181 LIns* ins2(LOpcode v, LIns* s0, LIns* s1)
1183 LIns* args[2];
1184 LIns* bv;
1186 // change the numeric value and order of these LIR opcodes and die
1187 if (LIR_fadd <= v && v <= LIR_fdiv) {
1188 static const CallInfo *fmap[] = { &fadd_ci, &fsub_ci, &fmul_ci, &fdiv_ci };
1190 args[0] = s1;
1191 args[1] = s0;
1193 return quadCall(fmap[v - LIR_fadd], args);
1196 if (LIR_feq <= v && v <= LIR_fge) {
1197 static const CallInfo *fmap[] = { &fcmpeq_ci, &fcmplt_ci, &fcmpgt_ci, &fcmple_ci, &fcmpge_ci };
1199 args[0] = s1;
1200 args[1] = s0;
1202 bv = out->insCall(fmap[v - LIR_feq], args);
1203 return out->ins2(LIR_eq, bv, out->insImm(1));
1206 return out->ins2(v, s0, s1);
1209 LIns* insCall(const CallInfo *ci, LIns* args[])
1211 // if the return type is ARGSIZE_F, we have
1212 // to do a quadCall(qjoin(call,callh))
1213 if ((ci->_argtypes & ARGSIZE_MASK_ANY) == ARGSIZE_F)
1214 return quadCall(ci, args);
1216 return out->insCall(ci, args);
1220 class FuncFilter: public LirWriter
1222 public:
1223 FuncFilter(LirWriter* out):
1224 LirWriter(out)
1228 LIns* ins2(LOpcode v, LIns* s0, LIns* s1)
1230 if (s0 == s1 && v == LIR_feq) {
1231 if (isPromote(s0)) {
1232 // double(int) and double(uint) cannot be nan
1233 return insImm(1);
1235 if (s0->isop(LIR_fmul) || s0->isop(LIR_fsub) || s0->isop(LIR_fadd)) {
1236 LIns* lhs = s0->oprnd1();
1237 LIns* rhs = s0->oprnd2();
1238 if (isPromote(lhs) && isPromote(rhs)) {
1239 // add/sub/mul promoted ints can't be nan
1240 return insImm(1);
1243 } else if (LIR_feq <= v && v <= LIR_fge) {
1244 if (isPromoteInt(s0) && isPromoteInt(s1)) {
1245 // demote fcmp to cmp
1246 v = LOpcode(v + (LIR_eq - LIR_feq));
1247 return out->ins2(v, demote(out, s0), demote(out, s1));
1248 } else if (isPromoteUint(s0) && isPromoteUint(s1)) {
1249 // uint compare
1250 v = LOpcode(v + (LIR_eq - LIR_feq));
1251 if (v != LIR_eq)
1252 v = LOpcode(v + (LIR_ult - LIR_lt)); // cmp -> ucmp
1253 return out->ins2(v, demote(out, s0), demote(out, s1));
1255 } else if (v == LIR_or &&
1256 s0->isop(LIR_lsh) && IsConst(s0->oprnd2(), 16) &&
1257 s1->isop(LIR_and) && IsConst(s1->oprnd2(), 0xffff)) {
1258 LIns* msw = s0->oprnd1();
1259 LIns* lsw = s1->oprnd1();
1260 LIns* x;
1261 LIns* y;
1262 if (lsw->isop(LIR_add) &&
1263 lsw->oprnd1()->isop(LIR_and) &&
1264 lsw->oprnd2()->isop(LIR_and) &&
1265 IsConst(lsw->oprnd1()->oprnd2(), 0xffff) &&
1266 IsConst(lsw->oprnd2()->oprnd2(), 0xffff) &&
1267 msw->isop(LIR_add) &&
1268 msw->oprnd1()->isop(LIR_add) &&
1269 msw->oprnd2()->isop(LIR_rsh) &&
1270 msw->oprnd1()->oprnd1()->isop(LIR_rsh) &&
1271 msw->oprnd1()->oprnd2()->isop(LIR_rsh) &&
1272 IsConst(msw->oprnd2()->oprnd2(), 16) &&
1273 IsConst(msw->oprnd1()->oprnd1()->oprnd2(), 16) &&
1274 IsConst(msw->oprnd1()->oprnd2()->oprnd2(), 16) &&
1275 (x = lsw->oprnd1()->oprnd1()) == msw->oprnd1()->oprnd1()->oprnd1() &&
1276 (y = lsw->oprnd2()->oprnd1()) == msw->oprnd1()->oprnd2()->oprnd1() &&
1277 lsw == msw->oprnd2()->oprnd1()) {
1278 return out->ins2(LIR_add, x, y);
1282 return out->ins2(v, s0, s1);
1285 LIns* insCall(const CallInfo *ci, LIns* args[])
1287 if (ci == &js_DoubleToUint32_ci) {
1288 LIns* s0 = args[0];
1289 if (s0->isconstf())
1290 return out->insImm(js_DoubleToECMAUint32(s0->imm64f()));
1291 if (isi2f(s0) || isu2f(s0))
1292 return iu2fArg(s0);
1293 } else if (ci == &js_DoubleToInt32_ci) {
1294 LIns* s0 = args[0];
1295 if (s0->isconstf())
1296 return out->insImm(js_DoubleToECMAInt32(s0->imm64f()));
1297 if (s0->isop(LIR_fadd) || s0->isop(LIR_fsub)) {
1298 LIns* lhs = s0->oprnd1();
1299 LIns* rhs = s0->oprnd2();
1300 if (isPromote(lhs) && isPromote(rhs)) {
1301 LOpcode op = LOpcode(s0->opcode() & ~LIR64);
1302 return out->ins2(op, demote(out, lhs), demote(out, rhs));
1305 if (isi2f(s0) || isu2f(s0))
1306 return iu2fArg(s0);
1308 // XXX ARM -- check for qjoin(call(UnboxDouble),call(UnboxDouble))
1309 if (s0->isCall()) {
1310 const CallInfo* ci2 = s0->callInfo();
1311 if (ci2 == &js_UnboxDouble_ci) {
1312 LIns* args2[] = { s0->callArgN(0) };
1313 return out->insCall(&js_UnboxInt32_ci, args2);
1314 } else if (ci2 == &js_StringToNumber_ci) {
1315 // callArgN's ordering is that as seen by the builtin, not as stored in
1316 // args here. True story!
1317 LIns* args2[] = { s0->callArgN(1), s0->callArgN(0) };
1318 return out->insCall(&js_StringToInt32_ci, args2);
1319 } else if (ci2 == &js_String_p_charCodeAt0_ci) {
1320 // Use a fast path builtin for a charCodeAt that converts to an int right away.
1321 LIns* args2[] = { s0->callArgN(0) };
1322 return out->insCall(&js_String_p_charCodeAt0_int_ci, args2);
1323 } else if (ci2 == &js_String_p_charCodeAt_ci) {
1324 LIns* idx = s0->callArgN(1);
1325 // If the index is not already an integer, force it to be an integer.
1326 idx = isPromote(idx)
1327 ? demote(out, idx)
1328 : out->insCall(&js_DoubleToInt32_ci, &idx);
1329 LIns* args2[] = { idx, s0->callArgN(0) };
1330 return out->insCall(&js_String_p_charCodeAt_int_ci, args2);
1333 } else if (ci == &js_BoxDouble_ci) {
1334 LIns* s0 = args[0];
1335 JS_ASSERT(s0->isQuad());
1336 if (isPromoteInt(s0)) {
1337 LIns* args2[] = { demote(out, s0), args[1] };
1338 return out->insCall(&js_BoxInt32_ci, args2);
1340 if (s0->isCall() && s0->callInfo() == &js_UnboxDouble_ci)
1341 return s0->callArgN(0);
1343 return out->insCall(ci, args);
1348 * Visit the values in the given JSStackFrame that the tracer cares about. This
1349 * visitor function is (implicitly) the primary definition of the native stack
1350 * area layout. There are a few other independent pieces of code that must be
1351 * maintained to assume the same layout. They are marked like this:
1353 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
1355 template <typename Visitor>
1356 static JS_REQUIRES_STACK bool
1357 VisitFrameSlots(Visitor &visitor, unsigned depth, JSStackFrame *fp,
1358 JSStackFrame *up)
1360 if (depth > 0 && !VisitFrameSlots(visitor, depth-1, fp->down, fp))
1361 return false;
1363 if (fp->argv) {
1364 if (depth == 0) {
1365 visitor.setStackSlotKind("args");
1366 if (!visitor.visitStackSlots(&fp->argv[-2], argSlots(fp) + 2, fp))
1367 return false;
1369 visitor.setStackSlotKind("arguments");
1370 if (!visitor.visitStackSlots(&fp->argsobj, 1, fp))
1371 return false;
1372 visitor.setStackSlotKind("var");
1373 if (!visitor.visitStackSlots(fp->slots, fp->script->nfixed, fp))
1374 return false;
1376 visitor.setStackSlotKind("stack");
1377 JS_ASSERT(fp->regs->sp >= StackBase(fp));
1378 if (!visitor.visitStackSlots(StackBase(fp),
1379 size_t(fp->regs->sp - StackBase(fp)),
1380 fp)) {
1381 return false;
1383 if (up) {
1384 int missing = up->fun->nargs - up->argc;
1385 if (missing > 0) {
1386 visitor.setStackSlotKind("missing");
1387 if (!visitor.visitStackSlots(fp->regs->sp, size_t(missing), fp))
1388 return false;
1391 return true;
1394 template <typename Visitor>
1395 static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1396 VisitStackSlots(Visitor &visitor, JSContext *cx, unsigned callDepth)
1398 return VisitFrameSlots(visitor, callDepth, cx->fp, NULL);
1401 template <typename Visitor>
1402 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1403 VisitGlobalSlots(Visitor &visitor, JSContext *cx, JSObject *globalObj,
1404 unsigned ngslots, uint16 *gslots)
1406 for (unsigned n = 0; n < ngslots; ++n) {
1407 unsigned slot = gslots[n];
1408 visitor.visitGlobalSlot(&STOBJ_GET_SLOT(globalObj, slot), n, slot);
1412 class AdjustCallerTypeVisitor;
1414 template <typename Visitor>
1415 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1416 VisitGlobalSlots(Visitor &visitor, JSContext *cx, SlotList &gslots)
1418 VisitGlobalSlots(visitor, cx, JS_GetGlobalForObject(cx, cx->fp->scopeChain),
1419 gslots.length(), gslots.data());
1423 template <typename Visitor>
1424 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1425 VisitSlots(Visitor& visitor, JSContext* cx, JSObject* globalObj,
1426 unsigned callDepth, unsigned ngslots, uint16* gslots)
1428 if (VisitStackSlots(visitor, cx, callDepth))
1429 VisitGlobalSlots(visitor, cx, globalObj, ngslots, gslots);
1432 template <typename Visitor>
1433 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1434 VisitSlots(Visitor& visitor, JSContext* cx, unsigned callDepth,
1435 unsigned ngslots, uint16* gslots)
1437 VisitSlots(visitor, cx, JS_GetGlobalForObject(cx, cx->fp->scopeChain),
1438 callDepth, ngslots, gslots);
1441 template <typename Visitor>
1442 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1443 VisitSlots(Visitor &visitor, JSContext *cx, JSObject *globalObj,
1444 unsigned callDepth, const SlotList& slots)
1446 VisitSlots(visitor, cx, globalObj, callDepth, slots.length(),
1447 slots.data());
1450 template <typename Visitor>
1451 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1452 VisitSlots(Visitor &visitor, JSContext *cx, unsigned callDepth,
1453 const SlotList& slots)
1455 VisitSlots(visitor, cx, JS_GetGlobalForObject(cx, cx->fp->scopeChain),
1456 callDepth, slots.length(), slots.data());
1460 class SlotVisitorBase {
1461 #ifdef JS_JIT_SPEW
1462 protected:
1463 char const *mStackSlotKind;
1464 public:
1465 SlotVisitorBase() : mStackSlotKind(NULL) {}
1466 JS_ALWAYS_INLINE const char *stackSlotKind() { return mStackSlotKind; }
1467 JS_ALWAYS_INLINE void setStackSlotKind(char const *k) {
1468 mStackSlotKind = k;
1470 #else
1471 public:
1472 JS_ALWAYS_INLINE const char *stackSlotKind() { return NULL; }
1473 JS_ALWAYS_INLINE void setStackSlotKind(char const *k) {}
1474 #endif
1477 struct CountSlotsVisitor : public SlotVisitorBase
1479 unsigned mCount;
1480 bool mDone;
1481 jsval* mStop;
1482 public:
1483 JS_ALWAYS_INLINE CountSlotsVisitor(jsval* stop = NULL) :
1484 mCount(0),
1485 mDone(false),
1486 mStop(stop)
1489 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1490 visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
1491 if (mDone)
1492 return false;
1493 if (mStop && size_t(mStop - vp) < count) {
1494 mCount += size_t(mStop - vp);
1495 mDone = true;
1496 return false;
1498 mCount += count;
1499 return true;
1502 JS_ALWAYS_INLINE unsigned count() {
1503 return mCount;
1506 JS_ALWAYS_INLINE bool stopped() {
1507 return mDone;
1512 * Calculate the total number of native frame slots we need from this frame all
1513 * the way back to the entry frame, including the current stack usage.
1515 JS_REQUIRES_STACK unsigned
1516 NativeStackSlots(JSContext *cx, unsigned callDepth)
1518 JSStackFrame* fp = cx->fp;
1519 unsigned slots = 0;
1520 unsigned depth = callDepth;
1521 for (;;) {
1523 * Duplicate native stack layout computation: see VisitFrameSlots
1524 * header comment.
1526 unsigned operands = fp->regs->sp - StackBase(fp);
1527 slots += operands;
1528 if (fp->argv)
1529 slots += fp->script->nfixed + 1 /*argsobj*/;
1530 if (depth-- == 0) {
1531 if (fp->argv)
1532 slots += 2/*callee,this*/ + argSlots(fp);
1533 #ifdef DEBUG
1534 CountSlotsVisitor visitor;
1535 VisitStackSlots(visitor, cx, callDepth);
1536 JS_ASSERT(visitor.count() == slots && !visitor.stopped());
1537 #endif
1538 return slots;
1540 JSStackFrame* fp2 = fp;
1541 fp = fp->down;
1542 int missing = fp2->fun->nargs - fp2->argc;
1543 if (missing > 0)
1544 slots += missing;
1546 JS_NOT_REACHED("NativeStackSlots");
1549 class CaptureTypesVisitor : public SlotVisitorBase
1551 JSContext* mCx;
1552 JSTraceType* mTypeMap;
1553 JSTraceType* mPtr;
1555 public:
1556 JS_ALWAYS_INLINE CaptureTypesVisitor(JSContext* cx, JSTraceType* typeMap) :
1557 mCx(cx),
1558 mTypeMap(typeMap),
1559 mPtr(typeMap)
1562 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1563 visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
1564 JSTraceType type = getCoercedType(*vp);
1565 if (type == TT_INT32 &&
1566 oracle.isGlobalSlotUndemotable(mCx, slot))
1567 type = TT_DOUBLE;
1568 JS_ASSERT(type != TT_JSVAL);
1569 debug_only_printf(LC_TMTracer,
1570 "capture type global%d: %d=%c\n",
1571 n, type, typeChar[type]);
1572 *mPtr++ = type;
1575 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1576 visitStackSlots(jsval *vp, int count, JSStackFrame* fp) {
1577 for (int i = 0; i < count; ++i) {
1578 JSTraceType type = getCoercedType(vp[i]);
1579 if (type == TT_INT32 &&
1580 oracle.isStackSlotUndemotable(mCx, length()))
1581 type = TT_DOUBLE;
1582 JS_ASSERT(type != TT_JSVAL);
1583 debug_only_printf(LC_TMTracer,
1584 "capture type %s%d: %d=%c\n",
1585 stackSlotKind(), i, type, typeChar[type]);
1586 *mPtr++ = type;
1588 return true;
1591 JS_ALWAYS_INLINE uintptr_t length() {
1592 return mPtr - mTypeMap;
1597 * Capture the type map for the selected slots of the global object and currently pending
1598 * stack frames.
1600 JS_REQUIRES_STACK void
1601 TypeMap::captureTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned callDepth)
1603 setLength(NativeStackSlots(cx, callDepth) + slots.length());
1604 CaptureTypesVisitor visitor(cx, data());
1605 VisitSlots(visitor, cx, globalObj, callDepth, slots);
1606 JS_ASSERT(visitor.length() == length());
1609 JS_REQUIRES_STACK void
1610 TypeMap::captureMissingGlobalTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned stackSlots)
1612 unsigned oldSlots = length() - stackSlots;
1613 int diff = slots.length() - oldSlots;
1614 JS_ASSERT(diff >= 0);
1615 setLength(length() + diff);
1616 CaptureTypesVisitor visitor(cx, data() + stackSlots + oldSlots);
1617 VisitGlobalSlots(visitor, cx, globalObj, diff, slots.data() + oldSlots);
1620 /* Compare this type map to another one and see whether they match. */
1621 bool
1622 TypeMap::matches(TypeMap& other) const
1624 if (length() != other.length())
1625 return false;
1626 return !memcmp(data(), other.data(), length());
1629 void
1630 TypeMap::fromRaw(JSTraceType* other, unsigned numSlots)
1632 unsigned oldLength = length();
1633 setLength(length() + numSlots);
1634 for (unsigned i = 0; i < numSlots; i++)
1635 get(oldLength + i) = other[i];
1639 * Use the provided storage area to create a new type map that contains the
1640 * partial type map with the rest of it filled up from the complete type
1641 * map.
1643 static void
1644 MergeTypeMaps(JSTraceType** partial, unsigned* plength, JSTraceType* complete, unsigned clength, JSTraceType* mem)
1646 unsigned l = *plength;
1647 JS_ASSERT(l < clength);
1648 memcpy(mem, *partial, l * sizeof(JSTraceType));
1649 memcpy(mem + l, complete + l, (clength - l) * sizeof(JSTraceType));
1650 *partial = mem;
1651 *plength = clength;
1654 /* Specializes a tree to any missing globals, including any dependent trees. */
1655 static JS_REQUIRES_STACK void
1656 SpecializeTreesToMissingGlobals(JSContext* cx, JSObject* globalObj, TreeInfo* root)
1658 TreeInfo* ti = root;
1660 ti->typeMap.captureMissingGlobalTypes(cx, globalObj, *ti->globalSlots, ti->nStackTypes);
1661 JS_ASSERT(ti->globalSlots->length() == ti->typeMap.length() - ti->nStackTypes);
1663 for (unsigned i = 0; i < root->dependentTrees.length(); i++) {
1664 ti = (TreeInfo*)root->dependentTrees[i]->vmprivate;
1666 /* ti can be NULL if we hit the recording tree in emitTreeCall; this is harmless. */
1667 if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
1668 SpecializeTreesToMissingGlobals(cx, globalObj, ti);
1670 for (unsigned i = 0; i < root->linkedTrees.length(); i++) {
1671 ti = (TreeInfo*)root->linkedTrees[i]->vmprivate;
1672 if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
1673 SpecializeTreesToMissingGlobals(cx, globalObj, ti);
1677 static void
1678 TrashTree(JSContext* cx, Fragment* f);
1680 JS_REQUIRES_STACK
1681 TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment,
1682 TreeInfo* ti, unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap,
1683 VMSideExit* innermostNestedGuard, jsbytecode* outer, uint32 outerArgc)
1684 : whichTreesToTrash(JS_TRACE_MONITOR(cx).allocator),
1685 cfgMerges(JS_TRACE_MONITOR(cx).allocator)
1687 JS_ASSERT(!_fragment->vmprivate && ti && cx->fp->regs->pc == (jsbytecode*)_fragment->ip);
1689 /* Reset the fragment state we care about in case we got a recycled fragment. */
1690 _fragment->lastIns = NULL;
1692 this->cx = cx;
1693 this->traceMonitor = &JS_TRACE_MONITOR(cx);
1694 this->globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
1695 this->lexicalBlock = cx->fp->blockChain;
1696 this->anchor = _anchor;
1697 this->fragment = _fragment;
1698 this->lirbuf = _fragment->lirbuf;
1699 this->treeInfo = ti;
1700 this->callDepth = _anchor ? _anchor->calldepth : 0;
1701 this->atoms = FrameAtomBase(cx, cx->fp);
1702 this->deepAborted = false;
1703 this->trashSelf = false;
1704 this->global_dslots = this->globalObj->dslots;
1705 this->loop = true; /* default assumption is we are compiling a loop */
1706 this->wasRootFragment = _fragment == _fragment->root;
1707 this->outer = outer;
1708 this->outerArgc = outerArgc;
1709 this->pendingSpecializedNative = NULL;
1710 this->newobj_ins = NULL;
1711 this->loopLabel = NULL;
1713 #ifdef JS_JIT_SPEW
1714 debug_only_print0(LC_TMMinimal, "\n");
1715 debug_only_printf(LC_TMMinimal, "Recording starting from %s:%u@%u\n",
1716 ti->treeFileName, ti->treeLineNumber, ti->treePCOffset);
1718 debug_only_printf(LC_TMTracer, "globalObj=%p, shape=%d\n",
1719 (void*)this->globalObj, OBJ_SHAPE(this->globalObj));
1720 debug_only_printf(LC_TMTreeVis, "TREEVIS RECORD FRAG=%p ANCHOR=%p\n", (void*)fragment,
1721 (void*)anchor);
1723 /* Set up jitstats so that trace-test.js can determine which architecture
1724 * we're running on. */
1725 jitstats.archIsIA32 = 0;
1726 jitstats.archIsAMD64 = 0;
1727 jitstats.archIs64BIT = 0;
1728 jitstats.archIsARM = 0;
1729 jitstats.archIsSPARC = 0;
1730 jitstats.archIsPPC = 0;
1731 #if defined NANOJIT_IA32
1732 jitstats.archIsIA32 = 1;
1733 #endif
1734 #if defined NANOJIT_ARM64
1735 jitstats.archIsAMD64 = 1;
1736 #endif
1737 #if defined NANOJIT_64BIT
1738 jitstats.archIs64BIT = 1;
1739 #endif
1740 #if defined NANOJIT_ARM
1741 jitstats.archIsARM = 1;
1742 #endif
1743 #if defined NANOJIT_SPARC
1744 jitstats.archIsSPARC = 1;
1745 #endif
1746 #if defined NANOJIT_PPC
1747 jitstats.archIsPPC = 1;
1748 #endif
1750 #endif
1752 lir = lir_buf_writer = new (&gc) LirBufWriter(lirbuf);
1753 debug_only_stmt(
1754 if (js_LogController.lcbits & LC_TMRecorder) {
1755 lir = verbose_filter
1756 = new (&gc) VerboseWriter(*traceMonitor->allocator, lir,
1757 lirbuf->names, &js_LogController);
1760 if (nanojit::AvmCore::config.soft_float)
1761 lir = float_filter = new (&gc) SoftFloatFilter(lir);
1762 else
1763 float_filter = 0;
1764 lir = cse_filter = new (&gc) CseFilter(lir, *traceMonitor->allocator);
1765 lir = expr_filter = new (&gc) ExprFilter(lir);
1766 lir = func_filter = new (&gc) FuncFilter(lir);
1767 lir->ins0(LIR_start);
1769 for (int i = 0; i < NumSavedRegs; ++i)
1770 lir->insParam(i, 1);
1771 #ifdef DEBUG
1772 for (int i = 0; i < NumSavedRegs; ++i)
1773 addName(lirbuf->savedRegs[i], regNames[Assembler::savedRegs[i]]);
1774 #endif
1776 lirbuf->state = addName(lir->insParam(0, 0), "state");
1778 if (fragment == fragment->root)
1779 loopLabel = lir->ins0(LIR_label);
1781 lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
1782 lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
1783 cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
1784 eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
1785 eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
1787 /* If we came from exit, we might not have enough global types. */
1788 if (ti->globalSlots->length() > ti->nGlobalTypes())
1789 SpecializeTreesToMissingGlobals(cx, globalObj, ti);
1791 /* read into registers all values on the stack and all globals we know so far */
1792 import(treeInfo, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
1794 if (fragment == fragment->root) {
1796 * We poll the operation callback request flag. It is updated asynchronously whenever
1797 * the callback is to be invoked.
1799 LIns* x = lir->insLoad(LIR_ld, cx_ins, offsetof(JSContext, operationCallbackFlag));
1800 guard(true, lir->ins_eq0(x), snapshot(TIMEOUT_EXIT));
1804 * If we are attached to a tree call guard, make sure the guard the inner
1805 * tree exited from is what we expect it to be.
1807 if (_anchor && _anchor->exitType == NESTED_EXIT) {
1808 LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state,
1809 offsetof(InterpState, lastTreeExitGuard)),
1810 "lastTreeExitGuard");
1811 guard(true, lir->ins2(LIR_eq, nested_ins, INS_CONSTPTR(innermostNestedGuard)), NESTED_EXIT);
1815 TraceRecorder::~TraceRecorder()
1817 JS_ASSERT(nextRecorderToAbort == NULL);
1818 JS_ASSERT(treeInfo && (fragment || wasDeepAborted()));
1819 #ifdef DEBUG
1820 TraceRecorder* tr = JS_TRACE_MONITOR(cx).abortStack;
1821 while (tr != NULL)
1823 JS_ASSERT(this != tr);
1824 tr = tr->nextRecorderToAbort;
1826 #endif
1827 if (fragment) {
1829 if (trashSelf)
1830 TrashTree(cx, fragment->root);
1832 for (unsigned int i = 0; i < whichTreesToTrash.length(); i++)
1833 TrashTree(cx, whichTreesToTrash[i]);
1835 #ifdef DEBUG
1836 debug_only_stmt( delete verbose_filter; )
1837 #endif
1838 delete cse_filter;
1839 delete expr_filter;
1840 delete func_filter;
1841 delete float_filter;
1842 delete lir_buf_writer;
1845 void
1846 TraceRecorder::removeFragmentReferences()
1848 fragment = NULL;
1851 void
1852 TraceRecorder::deepAbort()
1854 debug_only_print0(LC_TMTracer|LC_TMAbort, "deep abort");
1855 deepAborted = true;
1858 /* Add debug information to a LIR instruction as we emit it. */
1859 inline LIns*
1860 TraceRecorder::addName(LIns* ins, const char* name)
1862 #ifdef JS_JIT_SPEW
1864 * We'll only ask for verbose Nanojit when .lcbits > 0, so there's no point
1865 * in adding names otherwise.
1867 if (js_LogController.lcbits > 0)
1868 lirbuf->names->addName(ins, name);
1869 #endif
1870 return ins;
1873 inline LIns*
1874 TraceRecorder::insImmObj(JSObject* obj)
1876 treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(obj));
1877 return lir->insImmPtr((void*)obj);
1880 inline LIns*
1881 TraceRecorder::insImmFun(JSFunction* fun)
1883 treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(FUN_OBJECT(fun)));
1884 return lir->insImmPtr((void*)fun);
1887 inline LIns*
1888 TraceRecorder::insImmStr(JSString* str)
1890 treeInfo->gcthings.addUnique(STRING_TO_JSVAL(str));
1891 return lir->insImmPtr((void*)str);
1894 inline LIns*
1895 TraceRecorder::insImmSprop(JSScopeProperty* sprop)
1897 treeInfo->sprops.addUnique(sprop);
1898 return lir->insImmPtr((void*)sprop);
1901 /* Determine the current call depth (starting with the entry frame.) */
1902 unsigned
1903 TraceRecorder::getCallDepth() const
1905 return callDepth;
1908 /* Determine the offset in the native global frame for a jsval we track. */
1909 ptrdiff_t
1910 TraceRecorder::nativeGlobalOffset(jsval* p) const
1912 JS_ASSERT(isGlobal(p));
1913 if (size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS)
1914 return sizeof(InterpState) + size_t(p - globalObj->fslots) * sizeof(double);
1915 return sizeof(InterpState) + ((p - globalObj->dslots) + JS_INITIAL_NSLOTS) * sizeof(double);
1918 /* Determine whether a value is a global stack slot. */
1919 bool
1920 TraceRecorder::isGlobal(jsval* p) const
1922 return ((size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS) ||
1923 (size_t(p - globalObj->dslots) < (STOBJ_NSLOTS(globalObj) - JS_INITIAL_NSLOTS)));
1927 * Return the offset in the native stack for the given jsval. More formally,
1928 * |p| must be the address of a jsval that is represented in the native stack
1929 * area. The return value is the offset, from InterpState::stackBase, in bytes,
1930 * where the native representation of |*p| is stored. To get the offset
1931 * relative to InterpState::sp, subtract TreeInfo::nativeStackBase.
1933 JS_REQUIRES_STACK ptrdiff_t
1934 TraceRecorder::nativeStackOffset(jsval* p) const
1936 CountSlotsVisitor visitor(p);
1937 VisitStackSlots(visitor, cx, callDepth);
1938 size_t offset = visitor.count() * sizeof(double);
1941 * If it's not in a pending frame, it must be on the stack of the current
1942 * frame above sp but below fp->slots + script->nslots.
1944 if (!visitor.stopped()) {
1945 JS_ASSERT(size_t(p - cx->fp->slots) < cx->fp->script->nslots);
1946 offset += size_t(p - cx->fp->regs->sp) * sizeof(double);
1948 return offset;
1951 /* Track the maximum number of native frame slots we need during execution. */
1952 void
1953 TraceRecorder::trackNativeStackUse(unsigned slots)
1955 if (slots > treeInfo->maxNativeStackSlots)
1956 treeInfo->maxNativeStackSlots = slots;
1960 * Unbox a jsval into a slot. Slots are wide enough to hold double values
1961 * directly (instead of storing a pointer to them). We assert instead of
1962 * type checking. The caller must ensure the types are compatible.
1964 static void
1965 ValueToNative(JSContext* cx, jsval v, JSTraceType type, double* slot)
1967 uint8_t tag = JSVAL_TAG(v);
1968 switch (type) {
1969 case TT_OBJECT:
1970 JS_ASSERT(tag == JSVAL_OBJECT);
1971 JS_ASSERT(!JSVAL_IS_NULL(v) && !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)));
1972 *(JSObject**)slot = JSVAL_TO_OBJECT(v);
1973 debug_only_printf(LC_TMTracer,
1974 "object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1975 JSVAL_IS_NULL(v)
1976 ? "null"
1977 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);
1978 return;
1980 case TT_INT32:
1981 jsint i;
1982 if (JSVAL_IS_INT(v))
1983 *(jsint*)slot = JSVAL_TO_INT(v);
1984 else if (tag == JSVAL_DOUBLE && JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v), i))
1985 *(jsint*)slot = i;
1986 else
1987 JS_ASSERT(JSVAL_IS_INT(v));
1988 debug_only_printf(LC_TMTracer, "int<%d> ", *(jsint*)slot);
1989 return;
1991 case TT_DOUBLE:
1992 jsdouble d;
1993 if (JSVAL_IS_INT(v))
1994 d = JSVAL_TO_INT(v);
1995 else
1996 d = *JSVAL_TO_DOUBLE(v);
1997 JS_ASSERT(JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v));
1998 *(jsdouble*)slot = d;
1999 debug_only_printf(LC_TMTracer, "double<%g> ", d);
2000 return;
2002 case TT_JSVAL:
2003 JS_NOT_REACHED("found jsval type in an entry type map");
2004 return;
2006 case TT_STRING:
2007 JS_ASSERT(tag == JSVAL_STRING);
2008 *(JSString**)slot = JSVAL_TO_STRING(v);
2009 debug_only_printf(LC_TMTracer, "string<%p> ", (void*)(*(JSString**)slot));
2010 return;
2012 case TT_NULL:
2013 JS_ASSERT(tag == JSVAL_OBJECT);
2014 *(JSObject**)slot = NULL;
2015 debug_only_print0(LC_TMTracer, "null ");
2016 return;
2018 case TT_PSEUDOBOOLEAN:
2019 /* Watch out for pseudo-booleans. */
2020 JS_ASSERT(tag == JSVAL_SPECIAL);
2021 *(JSBool*)slot = JSVAL_TO_SPECIAL(v);
2022 debug_only_printf(LC_TMTracer, "pseudoboolean<%d> ", *(JSBool*)slot);
2023 return;
2025 case TT_FUNCTION: {
2026 JS_ASSERT(tag == JSVAL_OBJECT);
2027 JSObject* obj = JSVAL_TO_OBJECT(v);
2028 *(JSObject**)slot = obj;
2029 #ifdef DEBUG
2030 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
2031 debug_only_printf(LC_TMTracer,
2032 "function<%p:%s> ", (void*) obj,
2033 fun->atom
2034 ? JS_GetStringBytes(ATOM_TO_STRING(fun->atom))
2035 : "unnamed");
2036 #endif
2037 return;
2041 JS_NOT_REACHED("unexpected type");
2045 * We maintain an emergency pool of doubles so we can recover safely if a trace
2046 * runs out of memory (doubles or objects).
2048 static jsval
2049 AllocateDoubleFromReservedPool(JSContext* cx)
2051 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
2052 JS_ASSERT(tm->reservedDoublePoolPtr > tm->reservedDoublePool);
2053 return *--tm->reservedDoublePoolPtr;
2056 static bool
2057 ReplenishReservedPool(JSContext* cx, JSTraceMonitor* tm)
2059 /* We should not be called with a full pool. */
2060 JS_ASSERT((size_t) (tm->reservedDoublePoolPtr - tm->reservedDoublePool) <
2061 MAX_NATIVE_STACK_SLOTS);
2064 * When the GC runs in js_NewDoubleInRootedValue, it resets
2065 * tm->reservedDoublePoolPtr back to tm->reservedDoublePool.
2067 JSRuntime* rt = cx->runtime;
2068 uintN gcNumber = rt->gcNumber;
2069 uintN lastgcNumber = gcNumber;
2070 jsval* ptr = tm->reservedDoublePoolPtr;
2071 while (ptr < tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) {
2072 if (!js_NewDoubleInRootedValue(cx, 0.0, ptr))
2073 goto oom;
2075 /* Check if the last call to js_NewDoubleInRootedValue GC'd. */
2076 if (rt->gcNumber != lastgcNumber) {
2077 lastgcNumber = rt->gcNumber;
2078 JS_ASSERT(tm->reservedDoublePoolPtr == tm->reservedDoublePool);
2079 ptr = tm->reservedDoublePool;
2082 * Have we GC'd more than once? We're probably running really
2083 * low on memory, bail now.
2085 if (uintN(rt->gcNumber - gcNumber) > uintN(1))
2086 goto oom;
2087 continue;
2089 ++ptr;
2091 tm->reservedDoublePoolPtr = ptr;
2092 return true;
2094 oom:
2096 * Already massive GC pressure, no need to hold doubles back.
2097 * We won't run any native code anyway.
2099 tm->reservedDoublePoolPtr = tm->reservedDoublePool;
2100 return false;
2103 void
2104 JSTraceMonitor::flush()
2106 memset(&vmfragments[0], 0,
2107 FRAGMENT_TABLE_SIZE * sizeof(VMFragment*));
2109 allocator->reset();
2110 delete codeAlloc;
2111 codeAlloc = new CodeAlloc();
2113 Allocator& alloc = *allocator;
2115 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
2116 globalStates[i].globalShape = -1;
2117 globalStates[i].globalSlots = new (alloc) SlotList(allocator);
2120 assembler = new (alloc) Assembler(*codeAlloc, alloc, core,
2121 &js_LogController);
2122 lirbuf = new (alloc) LirBuffer(alloc);
2124 #ifdef DEBUG
2125 JS_ASSERT(labels);
2126 labels = new (alloc) LabelMap(alloc, &js_LogController);
2127 lirbuf->names = new (alloc) LirNameMap(alloc, labels);
2128 #endif
2130 needFlush = JS_FALSE;
2133 void
2134 JSTraceMonitor::mark(JSTracer* trc)
2136 if (!trc->context->runtime->gcFlushCodeCaches) {
2137 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
2138 VMFragment* f = vmfragments[i];
2139 while (f) {
2140 TreeInfo* ti = (TreeInfo*)f->vmprivate;
2141 if (ti) {
2142 jsval* vp = ti->gcthings.data();
2143 unsigned len = ti->gcthings.length();
2144 while (len--) {
2145 jsval v = *vp++;
2146 JS_SET_TRACING_NAME(trc, "jitgcthing");
2147 JS_CallTracer(trc, JSVAL_TO_TRACEABLE(v), JSVAL_TRACE_KIND(v));
2149 JSScopeProperty** spropp = ti->sprops.data();
2150 len = ti->sprops.length();
2151 while (len--) {
2152 JSScopeProperty* sprop = *spropp++;
2153 sprop->trace(trc);
2156 f = f->next;
2159 } else {
2160 flush();
2165 * Box a value from the native stack back into the jsval format. Integers that
2166 * are too large to fit into a jsval are automatically boxed into
2167 * heap-allocated doubles.
2169 template <typename E>
2170 static inline bool
2171 NativeToValueBase(JSContext* cx, jsval& v, JSTraceType type, double* slot)
2173 jsint i;
2174 jsdouble d;
2175 switch (type) {
2176 case TT_OBJECT:
2177 v = OBJECT_TO_JSVAL(*(JSObject**)slot);
2178 JS_ASSERT(v != JSVAL_ERROR_COOKIE); /* don't leak JSVAL_ERROR_COOKIE */
2179 debug_only_printf(LC_TMTracer,
2180 "object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
2181 JSVAL_IS_NULL(v)
2182 ? "null"
2183 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);
2184 break;
2186 case TT_INT32:
2187 i = *(jsint*)slot;
2188 debug_only_printf(LC_TMTracer, "int<%d> ", i);
2189 store_int:
2190 if (INT_FITS_IN_JSVAL(i)) {
2191 v = INT_TO_JSVAL(i);
2192 break;
2194 d = (jsdouble)i;
2195 goto store_double;
2196 case TT_DOUBLE:
2197 d = *slot;
2198 debug_only_printf(LC_TMTracer, "double<%g> ", d);
2199 if (JSDOUBLE_IS_INT(d, i))
2200 goto store_int;
2201 store_double: {
2203 * It's not safe to trigger the GC here, so use an emergency heap if we
2204 * are out of double boxes.
2206 if (cx->doubleFreeList) {
2207 #ifdef DEBUG
2208 JSBool ok =
2209 #endif
2210 js_NewDoubleInRootedValue(cx, d, &v);
2211 JS_ASSERT(ok);
2212 return true;
2214 return E::handleDoubleOOM(cx, d, v);
2217 case TT_JSVAL:
2218 v = *(jsval*)slot;
2219 JS_ASSERT(v != JSVAL_ERROR_COOKIE); /* don't leak JSVAL_ERROR_COOKIE */
2220 debug_only_printf(LC_TMTracer, "box<%p> ", (void*)v);
2221 break;
2223 case TT_STRING:
2224 v = STRING_TO_JSVAL(*(JSString**)slot);
2225 debug_only_printf(LC_TMTracer, "string<%p> ", (void*)(*(JSString**)slot));
2226 break;
2228 case TT_NULL:
2229 JS_ASSERT(*(JSObject**)slot == NULL);
2230 v = JSVAL_NULL;
2231 debug_only_printf(LC_TMTracer, "null<%p> ", (void*)(*(JSObject**)slot));
2232 break;
2234 case TT_PSEUDOBOOLEAN:
2235 /* Watch out for pseudo-booleans. */
2236 v = SPECIAL_TO_JSVAL(*(JSBool*)slot);
2237 debug_only_printf(LC_TMTracer, "boolean<%d> ", *(JSBool*)slot);
2238 break;
2240 case TT_FUNCTION: {
2241 JS_ASSERT(HAS_FUNCTION_CLASS(*(JSObject**)slot));
2242 v = OBJECT_TO_JSVAL(*(JSObject**)slot);
2243 #ifdef DEBUG
2244 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(v));
2245 debug_only_printf(LC_TMTracer,
2246 "function<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
2247 fun->atom
2248 ? JS_GetStringBytes(ATOM_TO_STRING(fun->atom))
2249 : "unnamed");
2250 #endif
2251 break;
2254 return true;
2257 struct ReserveDoubleOOMHandler {
2258 static bool handleDoubleOOM(JSContext *cx, double d, jsval& v) {
2259 v = AllocateDoubleFromReservedPool(cx);
2260 JS_ASSERT(JSVAL_IS_DOUBLE(v) && *JSVAL_TO_DOUBLE(v) == 0.0);
2261 *JSVAL_TO_DOUBLE(v) = d;
2262 return true;
2266 static void
2267 NativeToValue(JSContext* cx, jsval& v, JSTraceType type, double* slot)
2269 #ifdef DEBUG
2270 bool ok =
2271 #endif
2272 NativeToValueBase<ReserveDoubleOOMHandler>(cx, v, type, slot);
2273 JS_ASSERT(ok);
2276 struct FailDoubleOOMHandler {
2277 static bool handleDoubleOOM(JSContext *cx, double d, jsval& v) {
2278 return false;
2282 bool
2283 js_NativeToValue(JSContext* cx, jsval& v, JSTraceType type, double* slot)
2285 return NativeToValueBase<FailDoubleOOMHandler>(cx, v, type, slot);
2288 class BuildNativeFrameVisitor : public SlotVisitorBase
2290 JSContext *mCx;
2291 JSTraceType *mTypeMap;
2292 double *mGlobal;
2293 double *mStack;
2294 public:
2295 BuildNativeFrameVisitor(JSContext *cx,
2296 JSTraceType *typemap,
2297 double *global,
2298 double *stack) :
2299 mCx(cx),
2300 mTypeMap(typemap),
2301 mGlobal(global),
2302 mStack(stack)
2305 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
2306 visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
2307 debug_only_printf(LC_TMTracer, "global%d: ", n);
2308 ValueToNative(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
2311 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
2312 visitStackSlots(jsval *vp, int count, JSStackFrame* fp) {
2313 for (int i = 0; i < count; ++i) {
2314 debug_only_printf(LC_TMTracer, "%s%d: ", stackSlotKind(), i);
2315 ValueToNative(mCx, *vp++, *mTypeMap++, mStack++);
2317 return true;
2321 static JS_REQUIRES_STACK void
2322 BuildNativeFrame(JSContext *cx, JSObject *globalObj, unsigned callDepth,
2323 unsigned ngslots, uint16 *gslots,
2324 JSTraceType *typeMap, double *global, double *stack)
2326 BuildNativeFrameVisitor visitor(cx, typeMap, global, stack);
2327 VisitSlots(visitor, cx, globalObj, callDepth, ngslots, gslots);
2328 debug_only_print0(LC_TMTracer, "\n");
2331 class FlushNativeGlobalFrameVisitor : public SlotVisitorBase
2333 JSContext *mCx;
2334 JSTraceType *mTypeMap;
2335 double *mGlobal;
2336 public:
2337 FlushNativeGlobalFrameVisitor(JSContext *cx,
2338 JSTraceType *typeMap,
2339 double *global) :
2340 mCx(cx),
2341 mTypeMap(typeMap),
2342 mGlobal(global)
2345 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
2346 visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
2347 debug_only_printf(LC_TMTracer, "global%d=", n);
2348 NativeToValue(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
2352 class FlushNativeStackFrameVisitor : public SlotVisitorBase
2354 JSContext *mCx;
2355 JSTraceType *mTypeMap;
2356 double *mStack;
2357 jsval *mStop;
2358 public:
2359 FlushNativeStackFrameVisitor(JSContext *cx,
2360 JSTraceType *typeMap,
2361 double *stack,
2362 jsval *stop) :
2363 mCx(cx),
2364 mTypeMap(typeMap),
2365 mStack(stack),
2366 mStop(stop)
2369 JSTraceType* getTypeMap()
2371 return mTypeMap;
2374 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
2375 visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
2376 for (size_t i = 0; i < count; ++i) {
2377 if (vp == mStop)
2378 return false;
2379 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i));
2380 NativeToValue(mCx, *vp++, *mTypeMap++, mStack++);
2382 return true;
2386 /* Box the given native frame into a JS frame. This is infallible. */
2387 static JS_REQUIRES_STACK void
2388 FlushNativeGlobalFrame(JSContext *cx, double *global, unsigned ngslots,
2389 uint16 *gslots, JSTraceType *typemap)
2391 FlushNativeGlobalFrameVisitor visitor(cx, typemap, global);
2392 JSObject *globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
2393 VisitGlobalSlots(visitor, cx, globalObj, ngslots, gslots);
2394 debug_only_print0(LC_TMTracer, "\n");
2398 * Returns the number of values on the native stack, excluding the innermost
2399 * frame. This walks all FrameInfos on the native frame stack and sums the
2400 * slot usage of each frame.
2402 static int32
2403 StackDepthFromCallStack(InterpState* state, uint32 callDepth)
2405 int32 nativeStackFramePos = 0;
2407 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
2408 for (FrameInfo** fip = state->callstackBase; fip < state->rp + callDepth; fip++)
2409 nativeStackFramePos += (*fip)->callerHeight;
2410 return nativeStackFramePos;
2414 * Generic function to read upvars on trace from slots of active frames.
2415 * T Traits type parameter. Must provide static functions:
2416 * interp_get(fp, slot) Read the value out of an interpreter frame.
2417 * native_slot(argc, slot) Return the position of the desired value in the on-trace
2418 * stack frame (with position 0 being callee).
2420 * upvarLevel Static level of the function containing the upvar definition
2421 * slot Identifies the value to get. The meaning is defined by the traits type.
2422 * callDepth Call depth of current point relative to trace entry
2424 template<typename T>
2425 inline JSTraceType
2426 GetUpvarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
2428 InterpState* state = cx->interpState;
2429 FrameInfo** fip = state->rp + callDepth;
2432 * First search the FrameInfo call stack for an entry containing our
2433 * upvar, namely one with level == upvarLevel. The first FrameInfo is a
2434 * transition from the entry frame to some callee. However, it is not
2435 * known (from looking at the FrameInfo) whether the entry frame had a
2436 * callee. Rather than special-case this or insert more logic into the
2437 * loop, instead just stop before that FrameInfo (i.e. |> base| instead of
2438 * |>= base|), and let the code after the loop handle it.
2440 int32 stackOffset = StackDepthFromCallStack(state, callDepth);
2441 while (--fip > state->callstackBase) {
2442 FrameInfo* fi = *fip;
2445 * The loop starts aligned to the top of the stack, so move down to the first meaningful
2446 * callee. Then read the callee directly from the frame.
2448 stackOffset -= fi->callerHeight;
2449 JSObject* callee = *(JSObject**)(&state->stackBase[stackOffset]);
2450 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, callee);
2451 uintN calleeLevel = fun->u.i.script->staticLevel;
2452 if (calleeLevel == upvarLevel) {
2454 * Now find the upvar's value in the native stack. stackOffset is
2455 * the offset of the start of the activation record corresponding
2456 * to *fip in the native stack.
2458 uint32 native_slot = T::native_slot(fi->callerArgc, slot);
2459 *result = state->stackBase[stackOffset + native_slot];
2460 return fi->get_typemap()[native_slot];
2464 // Next search the trace entry frame, which is not in the FrameInfo stack.
2465 if (state->outermostTree->script->staticLevel == upvarLevel) {
2466 uint32 argc = ((VMFragment*) state->outermostTree->fragment)->argc;
2467 uint32 native_slot = T::native_slot(argc, slot);
2468 *result = state->stackBase[native_slot];
2469 return state->callstackBase[0]->get_typemap()[native_slot];
2473 * If we did not find the upvar in the frames for the active traces,
2474 * then we simply get the value from the interpreter state.
2476 JS_ASSERT(upvarLevel < JS_DISPLAY_SIZE);
2477 JSStackFrame* fp = cx->display[upvarLevel];
2478 jsval v = T::interp_get(fp, slot);
2479 JSTraceType type = getCoercedType(v);
2480 ValueToNative(cx, v, type, result);
2481 return type;
2484 // For this traits type, 'slot' is the argument index, which may be -2 for callee.
2485 struct UpvarArgTraits {
2486 static jsval interp_get(JSStackFrame* fp, int32 slot) {
2487 return fp->argv[slot];
2490 static uint32 native_slot(uint32 argc, int32 slot) {
2491 return 2 /*callee,this*/ + slot;
2495 uint32 JS_FASTCALL
2496 GetUpvarArgOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
2498 return GetUpvarOnTrace<UpvarArgTraits>(cx, upvarLevel, slot, callDepth, result);
2501 // For this traits type, 'slot' is an index into the local slots array.
2502 struct UpvarVarTraits {
2503 static jsval interp_get(JSStackFrame* fp, int32 slot) {
2504 return fp->slots[slot];
2507 static uint32 native_slot(uint32 argc, int32 slot) {
2508 return 3 /*callee,this,arguments*/ + argc + slot;
2512 uint32 JS_FASTCALL
2513 GetUpvarVarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
2515 return GetUpvarOnTrace<UpvarVarTraits>(cx, upvarLevel, slot, callDepth, result);
2519 * For this traits type, 'slot' is an index into the stack area (within slots,
2520 * after nfixed) of a frame with no function. (On trace, the top-level frame is
2521 * the only one that can have no function.)
2523 struct UpvarStackTraits {
2524 static jsval interp_get(JSStackFrame* fp, int32 slot) {
2525 return fp->slots[slot + fp->script->nfixed];
2528 static uint32 native_slot(uint32 argc, int32 slot) {
2530 * Locals are not imported by the tracer when the frame has no
2531 * function, so we do not add fp->script->nfixed.
2533 JS_ASSERT(argc == 0);
2534 return slot;
2538 uint32 JS_FASTCALL
2539 GetUpvarStackOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth,
2540 double* result)
2542 return GetUpvarOnTrace<UpvarStackTraits>(cx, upvarLevel, slot, callDepth, result);
2545 // Parameters needed to access a value from a closure on trace.
2546 struct ClosureVarInfo
2548 jsid id;
2549 uint32 slot;
2550 uint32 callDepth;
2551 uint32 resolveFlags;
2555 * Generic function to read upvars from Call objects of active heavyweight functions.
2556 * call Callee Function object in which the upvar is accessed.
2558 template<typename T>
2559 inline uint32
2560 GetFromClosure(JSContext* cx, JSObject* call, const ClosureVarInfo* cv, double* result)
2562 JS_ASSERT(OBJ_GET_CLASS(cx, call) == &js_CallClass);
2564 InterpState* state = cx->interpState;
2566 #ifdef DEBUG
2567 int32 stackOffset = StackDepthFromCallStack(state, cv->callDepth);
2568 FrameInfo** fip = state->rp + cv->callDepth;
2569 while (--fip > state->callstackBase) {
2570 FrameInfo* fi = *fip;
2571 JSObject* callee = *(JSObject**)(&state->stackBase[stackOffset]);
2572 if (callee == call) {
2573 // This is not reachable as long as JSOP_LAMBDA is not traced:
2574 // - The upvar is found at this point only if the upvar was defined on a frame that was
2575 // entered on this trace.
2576 // - The upvar definition must be (dynamically, and thus on trace) before the definition
2577 // of the function that uses the upvar.
2578 // - Therefore, if the upvar is found at this point, the function definition JSOP_LAMBDA
2579 // is on the trace.
2580 JS_NOT_REACHED("JSOP_NAME variable found in outer trace");
2582 stackOffset -= fi->callerHeight;
2584 #endif
2587 * Here we specifically want to check the call object of the trace entry frame.
2589 uint32 slot = cv->slot;
2590 VOUCH_DOES_NOT_REQUIRE_STACK();
2591 if (cx->fp->callobj == call) {
2592 slot = T::adj_slot(cx->fp, slot);
2593 *result = state->stackBase[slot];
2594 return state->callstackBase[0]->get_typemap()[slot];
2597 JSStackFrame* fp = (JSStackFrame*) call->getPrivate();
2598 jsval v;
2599 if (fp) {
2600 v = T::slots(fp)[slot];
2601 } else {
2602 JS_ASSERT(cv->resolveFlags != JSRESOLVE_INFER);
2603 JSAutoResolveFlags rf(cx, cv->resolveFlags);
2604 #ifdef DEBUG
2605 JSBool rv =
2606 #endif
2607 js_GetPropertyHelper(cx, call, cv->id, JSGET_METHOD_BARRIER, &v);
2608 JS_ASSERT(rv);
2610 JSTraceType type = getCoercedType(v);
2611 ValueToNative(cx, v, type, result);
2612 return type;
2615 struct ArgClosureTraits
2617 static inline uint32 adj_slot(JSStackFrame* fp, uint32 slot) { return fp->argc + slot; }
2618 static inline jsval* slots(JSStackFrame* fp) { return fp->argv; }
2619 private:
2620 ArgClosureTraits();
2623 uint32 JS_FASTCALL
2624 GetClosureArg(JSContext* cx, JSObject* callee, const ClosureVarInfo* cv, double* result)
2626 return GetFromClosure<ArgClosureTraits>(cx, callee, cv, result);
2629 struct VarClosureTraits
2631 static inline uint32 adj_slot(JSStackFrame* fp, uint32 slot) { return slot; }
2632 static inline jsval* slots(JSStackFrame* fp) { return fp->slots; }
2633 private:
2634 VarClosureTraits();
2637 uint32 JS_FASTCALL
2638 GetClosureVar(JSContext* cx, JSObject* callee, const ClosureVarInfo* cv, double* result)
2640 return GetFromClosure<VarClosureTraits>(cx, callee, cv, result);
2644 * Box the given native stack frame into the virtual machine stack. This
2645 * is infallible.
2647 * @param callDepth the distance between the entry frame into our trace and
2648 * cx->fp when we make this call. If this is not called as a
2649 * result of a nested exit, callDepth is 0.
2650 * @param mp an array of JSTraceTypes that indicate what the types of the things
2651 * on the stack are.
2652 * @param np pointer to the native stack. We want to copy values from here to
2653 * the JS stack as needed.
2654 * @param stopFrame if non-null, this frame and everything above it should not
2655 * be restored.
2656 * @return the number of things we popped off of np.
2658 static JS_REQUIRES_STACK int
2659 FlushNativeStackFrame(JSContext* cx, unsigned callDepth, JSTraceType* mp, double* np,
2660 JSStackFrame* stopFrame)
2662 jsval* stopAt = stopFrame ? &stopFrame->argv[-2] : NULL;
2664 /* Root all string and object references first (we don't need to call the GC for this). */
2665 FlushNativeStackFrameVisitor visitor(cx, mp, np, stopAt);
2666 VisitStackSlots(visitor, cx, callDepth);
2668 // Restore thisp from the now-restored argv[-1] in each pending frame.
2669 // Keep in mind that we didn't restore frames at stopFrame and above!
2670 // Scope to keep |fp| from leaking into the macros we're using.
2672 unsigned n = callDepth+1; // +1 to make sure we restore the entry frame
2673 JSStackFrame* fp = cx->fp;
2674 if (stopFrame) {
2675 for (; fp != stopFrame; fp = fp->down) {
2676 JS_ASSERT(n != 0);
2677 --n;
2680 // Skip over stopFrame itself.
2681 JS_ASSERT(n != 0);
2682 --n;
2683 fp = fp->down;
2685 for (; n != 0; fp = fp->down) {
2686 --n;
2687 if (fp->argv) {
2688 if (fp->argsobj &&
2689 js_GetArgsPrivateNative(JSVAL_TO_OBJECT(fp->argsobj))) {
2690 JSVAL_TO_OBJECT(fp->argsobj)->setPrivate(fp);
2694 * We might return from trace with a different callee object, but it still
2695 * has to be the same JSFunction (FIXME: bug 471425, eliminate fp->callee).
2697 JS_ASSERT(JSVAL_IS_OBJECT(fp->argv[-1]));
2698 JS_ASSERT(HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(fp->argv[-2])));
2699 JS_ASSERT(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fp->argv[-2])) ==
2700 GET_FUNCTION_PRIVATE(cx, fp->callee()));
2701 JS_ASSERT(GET_FUNCTION_PRIVATE(cx, fp->callee()) == fp->fun);
2704 * SynthesizeFrame sets scopeChain to NULL, because we can't calculate the
2705 * correct scope chain until we have the final callee. Calculate the real
2706 * scope object here.
2708 if (!fp->scopeChain) {
2709 fp->scopeChain = OBJ_GET_PARENT(cx, JSVAL_TO_OBJECT(fp->argv[-2]));
2710 if (fp->fun->flags & JSFUN_HEAVYWEIGHT) {
2712 * Set hookData to null because the failure case for js_GetCallObject
2713 * involves it calling the debugger hook.
2715 * Allocating the Call object must not fail, so use an object
2716 * previously reserved by ExecuteTree if needed.
2718 void* hookData = ((JSInlineFrame*)fp)->hookData;
2719 ((JSInlineFrame*)fp)->hookData = NULL;
2720 JS_ASSERT(!JS_TRACE_MONITOR(cx).useReservedObjects);
2721 JS_TRACE_MONITOR(cx).useReservedObjects = JS_TRUE;
2722 #ifdef DEBUG
2723 JSObject *obj =
2724 #endif
2725 js_GetCallObject(cx, fp);
2726 JS_ASSERT(obj);
2727 JS_TRACE_MONITOR(cx).useReservedObjects = JS_FALSE;
2728 ((JSInlineFrame*)fp)->hookData = hookData;
2731 fp->thisp = JSVAL_TO_OBJECT(fp->argv[-1]);
2732 if (fp->flags & JSFRAME_CONSTRUCTING) // constructors always compute 'this'
2733 fp->flags |= JSFRAME_COMPUTED_THIS;
2737 debug_only_print0(LC_TMTracer, "\n");
2738 return visitor.getTypeMap() - mp;
2741 /* Emit load instructions onto the trace that read the initial stack state. */
2742 JS_REQUIRES_STACK void
2743 TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, JSTraceType t,
2744 const char *prefix, uintN index, JSStackFrame *fp)
2746 LIns* ins;
2747 if (t == TT_INT32) { /* demoted */
2748 JS_ASSERT(isInt32(*p));
2751 * Ok, we have a valid demotion attempt pending, so insert an integer
2752 * read and promote it to double since all arithmetic operations expect
2753 * to see doubles on entry. The first op to use this slot will emit a
2754 * f2i cast which will cancel out the i2f we insert here.
2756 ins = lir->insLoad(LIR_ld, base, offset);
2757 ins = lir->ins1(LIR_i2f, ins);
2758 } else {
2759 JS_ASSERT_IF(t != TT_JSVAL, isNumber(*p) == (t == TT_DOUBLE));
2760 if (t == TT_DOUBLE) {
2761 ins = lir->insLoad(LIR_ldq, base, offset);
2762 } else if (t == TT_PSEUDOBOOLEAN) {
2763 ins = lir->insLoad(LIR_ld, base, offset);
2764 } else {
2765 ins = lir->insLoad(LIR_ldp, base, offset);
2768 checkForGlobalObjectReallocation();
2769 tracker.set(p, ins);
2771 #ifdef DEBUG
2772 char name[64];
2773 JS_ASSERT(strlen(prefix) < 10);
2774 void* mark = NULL;
2775 jsuword* localNames = NULL;
2776 const char* funName = NULL;
2777 if (*prefix == 'a' || *prefix == 'v') {
2778 mark = JS_ARENA_MARK(&cx->tempPool);
2779 if (fp->fun->hasLocalNames())
2780 localNames = js_GetLocalNameArray(cx, fp->fun, &cx->tempPool);
2781 funName = fp->fun->atom ? js_AtomToPrintableString(cx, fp->fun->atom) : "<anonymous>";
2783 if (!strcmp(prefix, "argv")) {
2784 if (index < fp->fun->nargs) {
2785 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[index]);
2786 JS_snprintf(name, sizeof name, "$%s.%s", funName, js_AtomToPrintableString(cx, atom));
2787 } else {
2788 JS_snprintf(name, sizeof name, "$%s.<arg%d>", funName, index);
2790 } else if (!strcmp(prefix, "vars")) {
2791 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[fp->fun->nargs + index]);
2792 JS_snprintf(name, sizeof name, "$%s.%s", funName, js_AtomToPrintableString(cx, atom));
2793 } else {
2794 JS_snprintf(name, sizeof name, "$%s%d", prefix, index);
2797 if (mark)
2798 JS_ARENA_RELEASE(&cx->tempPool, mark);
2799 addName(ins, name);
2801 static const char* typestr[] = {
2802 "object", "int", "double", "jsval", "string", "null", "boolean", "function"
2804 debug_only_printf(LC_TMTracer, "import vp=%p name=%s type=%s flags=%d\n",
2805 (void*)p, name, typestr[t & 7], t >> 3);
2806 #endif
2809 class ImportGlobalSlotVisitor : public SlotVisitorBase
2811 TraceRecorder &mRecorder;
2812 LIns *mBase;
2813 JSTraceType *mTypemap;
2814 public:
2815 ImportGlobalSlotVisitor(TraceRecorder &recorder,
2816 LIns *base,
2817 JSTraceType *typemap) :
2818 mRecorder(recorder),
2819 mBase(base),
2820 mTypemap(typemap)
2823 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
2824 visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
2825 JS_ASSERT(*mTypemap != TT_JSVAL);
2826 mRecorder.import(mBase, mRecorder.nativeGlobalOffset(vp),
2827 vp, *mTypemap++, "global", n, NULL);
2831 class ImportBoxedStackSlotVisitor : public SlotVisitorBase
2833 TraceRecorder &mRecorder;
2834 LIns *mBase;
2835 ptrdiff_t mStackOffset;
2836 JSTraceType *mTypemap;
2837 JSStackFrame *mFp;
2838 public:
2839 ImportBoxedStackSlotVisitor(TraceRecorder &recorder,
2840 LIns *base,
2841 ptrdiff_t stackOffset,
2842 JSTraceType *typemap) :
2843 mRecorder(recorder),
2844 mBase(base),
2845 mStackOffset(stackOffset),
2846 mTypemap(typemap)
2849 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
2850 visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
2851 for (size_t i = 0; i < count; ++i) {
2852 if (*mTypemap == TT_JSVAL) {
2853 mRecorder.import(mBase, mStackOffset, vp, TT_JSVAL,
2854 "jsval", i, fp);
2855 LIns *vp_ins = mRecorder.unbox_jsval(*vp, mRecorder.get(vp),
2856 mRecorder.copy(mRecorder.anchor));
2857 mRecorder.set(vp, vp_ins);
2859 vp++;
2860 mTypemap++;
2861 mStackOffset += sizeof(double);
2863 return true;
2867 class ImportUnboxedStackSlotVisitor : public SlotVisitorBase
2869 TraceRecorder &mRecorder;
2870 LIns *mBase;
2871 ptrdiff_t mStackOffset;
2872 JSTraceType *mTypemap;
2873 JSStackFrame *mFp;
2874 public:
2875 ImportUnboxedStackSlotVisitor(TraceRecorder &recorder,
2876 LIns *base,
2877 ptrdiff_t stackOffset,
2878 JSTraceType *typemap) :
2879 mRecorder(recorder),
2880 mBase(base),
2881 mStackOffset(stackOffset),
2882 mTypemap(typemap)
2885 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
2886 visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
2887 for (size_t i = 0; i < count; ++i) {
2888 if (*mTypemap != TT_JSVAL) {
2889 mRecorder.import(mBase, mStackOffset, vp++, *mTypemap,
2890 stackSlotKind(), i, fp);
2892 mTypemap++;
2893 mStackOffset += sizeof(double);
2895 return true;
2899 JS_REQUIRES_STACK void
2900 TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned stackSlots, unsigned ngslots,
2901 unsigned callDepth, JSTraceType* typeMap)
2904 * If we get a partial list that doesn't have all the types (i.e. recording
2905 * from a side exit that was recorded but we added more global slots
2906 * later), merge the missing types from the entry type map. This is safe
2907 * because at the loop edge we verify that we have compatible types for all
2908 * globals (entry type and loop edge type match). While a different trace
2909 * of the tree might have had a guard with a different type map for these
2910 * slots we just filled in here (the guard we continue from didn't know
2911 * about them), since we didn't take that particular guard the only way we
2912 * could have ended up here is if that other trace had at its end a
2913 * compatible type distribution with the entry map. Since that's exactly
2914 * what we used to fill in the types our current side exit didn't provide,
2915 * this is always safe to do.
2918 JSTraceType* globalTypeMap = typeMap + stackSlots;
2919 unsigned length = treeInfo->nGlobalTypes();
2922 * This is potentially the typemap of the side exit and thus shorter than
2923 * the tree's global type map.
2925 if (ngslots < length) {
2926 MergeTypeMaps(&globalTypeMap /* out param */, &ngslots /* out param */,
2927 treeInfo->globalTypeMap(), length,
2928 (JSTraceType*)alloca(sizeof(JSTraceType) * length));
2930 JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2931 ptrdiff_t offset = -treeInfo->nativeStackBase;
2934 * Check whether there are any values on the stack we have to unbox and do
2935 * that first before we waste any time fetching the state from the stack.
2937 ImportBoxedStackSlotVisitor boxedStackVisitor(*this, sp, offset, typeMap);
2938 VisitStackSlots(boxedStackVisitor, cx, callDepth);
2940 ImportGlobalSlotVisitor globalVisitor(*this, lirbuf->state, globalTypeMap);
2941 VisitGlobalSlots(globalVisitor, cx, globalObj, ngslots,
2942 treeInfo->globalSlots->data());
2944 ImportUnboxedStackSlotVisitor unboxedStackVisitor(*this, sp, offset,
2945 typeMap);
2946 VisitStackSlots(unboxedStackVisitor, cx, callDepth);
2949 JS_REQUIRES_STACK bool
2950 TraceRecorder::isValidSlot(JSScope* scope, JSScopeProperty* sprop)
2952 uint32 setflags = (js_CodeSpec[*cx->fp->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
2954 if (setflags) {
2955 if (!SPROP_HAS_STUB_SETTER(sprop))
2956 ABORT_TRACE_RV("non-stub setter", false);
2957 if (sprop->attrs & JSPROP_READONLY)
2958 ABORT_TRACE_RV("writing to a read-only property", false);
2961 /* This check applies even when setflags == 0. */
2962 if (setflags != JOF_SET && !SPROP_HAS_STUB_GETTER(sprop)) {
2963 JS_ASSERT(!sprop->isMethod());
2964 ABORT_TRACE_RV("non-stub getter", false);
2967 if (!SPROP_HAS_VALID_SLOT(sprop, scope))
2968 ABORT_TRACE_RV("slotless obj property", false);
2970 return true;
2973 /* Lazily import a global slot if we don't already have it in the tracker. */
2974 JS_REQUIRES_STACK bool
2975 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
2977 if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
2978 return false;
2981 * If the global object grows too large, alloca in ExecuteTree might fail,
2982 * so abort tracing on global objects with unreasonably many slots.
2984 if (STOBJ_NSLOTS(globalObj) > MAX_GLOBAL_SLOTS)
2985 return false;
2986 jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);
2987 if (known(vp))
2988 return true; /* we already have it */
2989 unsigned index = treeInfo->globalSlots->length();
2991 /* Add the slot to the list of interned global slots. */
2992 JS_ASSERT(treeInfo->nGlobalTypes() == treeInfo->globalSlots->length());
2993 treeInfo->globalSlots->add(slot);
2994 JSTraceType type = getCoercedType(*vp);
2995 if (type == TT_INT32 && oracle.isGlobalSlotUndemotable(cx, slot))
2996 type = TT_DOUBLE;
2997 treeInfo->typeMap.add(type);
2998 import(lirbuf->state, sizeof(struct InterpState) + slot*sizeof(double),
2999 vp, type, "global", index, NULL);
3000 SpecializeTreesToMissingGlobals(cx, globalObj, treeInfo);
3001 return true;
3004 /* Write back a value onto the stack or global frames. */
3005 LIns*
3006 TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset, bool demote)
3009 * Sink all type casts targeting the stack into the side exit by simply storing the original
3010 * (uncasted) value. Each guard generates the side exit map based on the types of the
3011 * last stores to every stack location, so it's safe to not perform them on-trace.
3013 if (demote && isPromoteInt(i))
3014 i = ::demote(lir, i);
3015 return lir->insStorei(i, base, offset);
3018 /* Update the tracker, then issue a write back store. */
3019 JS_REQUIRES_STACK void
3020 TraceRecorder::set(jsval* p, LIns* i, bool initializing, bool demote)
3022 JS_ASSERT(i != NULL);
3023 JS_ASSERT(initializing || known(p));
3024 checkForGlobalObjectReallocation();
3025 tracker.set(p, i);
3028 * If we are writing to this location for the first time, calculate the
3029 * offset into the native frame manually. Otherwise just look up the last
3030 * load or store associated with the same source address (p) and use the
3031 * same offset/base.
3033 LIns* x = nativeFrameTracker.get(p);
3034 if (!x) {
3035 if (isGlobal(p))
3036 x = writeBack(i, lirbuf->state, nativeGlobalOffset(p), demote);
3037 else
3038 x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p), demote);
3039 nativeFrameTracker.set(p, x);
3040 } else {
3041 #define ASSERT_VALID_CACHE_HIT(base, offset) \
3042 JS_ASSERT(base == lirbuf->sp || base == lirbuf->state); \
3043 JS_ASSERT(offset == ((base == lirbuf->sp) \
3044 ? -treeInfo->nativeStackBase + nativeStackOffset(p) \
3045 : nativeGlobalOffset(p))); \
3047 JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
3048 ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->disp());
3049 writeBack(i, x->oprnd2(), x->disp(), demote);
3051 #undef ASSERT_VALID_CACHE_HIT
3054 JS_REQUIRES_STACK LIns*
3055 TraceRecorder::get(jsval* p)
3057 checkForGlobalObjectReallocation();
3058 return tracker.get(p);
3061 JS_REQUIRES_STACK LIns*
3062 TraceRecorder::addr(jsval* p)
3064 return isGlobal(p)
3065 ? lir->ins2i(LIR_piadd, lirbuf->state, nativeGlobalOffset(p))
3066 : lir->ins2i(LIR_piadd, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p));
3069 JS_REQUIRES_STACK bool
3070 TraceRecorder::known(jsval* p)
3072 checkForGlobalObjectReallocation();
3073 return tracker.has(p);
3077 * The dslots of the global object are sometimes reallocated by the interpreter.
3078 * This function check for that condition and re-maps the entries of the tracker
3079 * accordingly.
3081 JS_REQUIRES_STACK void
3082 TraceRecorder::checkForGlobalObjectReallocation()
3084 if (global_dslots != globalObj->dslots) {
3085 debug_only_print0(LC_TMTracer,
3086 "globalObj->dslots relocated, updating tracker\n");
3087 jsval* src = global_dslots;
3088 jsval* dst = globalObj->dslots;
3089 jsuint length = globalObj->dslots[-1] - JS_INITIAL_NSLOTS;
3090 LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
3091 for (jsuint n = 0; n < length; ++n) {
3092 map[n] = tracker.get(src);
3093 tracker.set(src++, NULL);
3095 for (jsuint n = 0; n < length; ++n)
3096 tracker.set(dst++, map[n]);
3097 global_dslots = globalObj->dslots;
3101 /* Determine whether the current branch is a loop edge (taken or not taken). */
3102 static JS_REQUIRES_STACK bool
3103 IsLoopEdge(jsbytecode* pc, jsbytecode* header)
3105 switch (*pc) {
3106 case JSOP_IFEQ:
3107 case JSOP_IFNE:
3108 return ((pc + GET_JUMP_OFFSET(pc)) == header);
3109 case JSOP_IFEQX:
3110 case JSOP_IFNEX:
3111 return ((pc + GET_JUMPX_OFFSET(pc)) == header);
3112 default:
3113 JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) ||
3114 (*pc == JSOP_OR) || (*pc == JSOP_ORX));
3116 return false;
3119 class AdjustCallerGlobalTypesVisitor : public SlotVisitorBase
3121 TraceRecorder &mRecorder;
3122 JSContext *mCx;
3123 nanojit::LirBuffer *mLirbuf;
3124 nanojit::LirWriter *mLir;
3125 JSTraceType *mTypeMap;
3126 public:
3127 AdjustCallerGlobalTypesVisitor(TraceRecorder &recorder,
3128 JSTraceType *typeMap) :
3129 mRecorder(recorder),
3130 mCx(mRecorder.cx),
3131 mLirbuf(mRecorder.lirbuf),
3132 mLir(mRecorder.lir),
3133 mTypeMap(typeMap)
3136 JSTraceType* getTypeMap()
3138 return mTypeMap;
3141 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
3142 visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
3143 LIns *ins = mRecorder.get(vp);
3144 bool isPromote = isPromoteInt(ins);
3145 if (isPromote && *mTypeMap == TT_DOUBLE) {
3146 mLir->insStorei(mRecorder.get(vp), mLirbuf->state,
3147 mRecorder.nativeGlobalOffset(vp));
3150 * Aggressively undo speculation so the inner tree will compile
3151 * if this fails.
3153 oracle.markGlobalSlotUndemotable(mCx, slot);
3155 JS_ASSERT(!(!isPromote && *mTypeMap == TT_INT32));
3156 ++mTypeMap;
3160 class AdjustCallerStackTypesVisitor : public SlotVisitorBase
3162 TraceRecorder &mRecorder;
3163 JSContext *mCx;
3164 nanojit::LirBuffer *mLirbuf;
3165 nanojit::LirWriter *mLir;
3166 unsigned mSlotnum;
3167 JSTraceType *mTypeMap;
3168 public:
3169 AdjustCallerStackTypesVisitor(TraceRecorder &recorder,
3170 JSTraceType *typeMap) :
3171 mRecorder(recorder),
3172 mCx(mRecorder.cx),
3173 mLirbuf(mRecorder.lirbuf),
3174 mLir(mRecorder.lir),
3175 mSlotnum(0),
3176 mTypeMap(typeMap)
3179 JSTraceType* getTypeMap()
3181 return mTypeMap;
3184 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3185 visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
3186 for (size_t i = 0; i < count; ++i) {
3187 LIns *ins = mRecorder.get(vp);
3188 bool isPromote = isPromoteInt(ins);
3189 if (isPromote && *mTypeMap == TT_DOUBLE) {
3190 mLir->insStorei(mRecorder.get(vp), mLirbuf->sp,
3191 -mRecorder.treeInfo->nativeStackBase +
3192 mRecorder.nativeStackOffset(vp));
3195 * Aggressively undo speculation so the inner tree will compile
3196 * if this fails.
3198 oracle.markStackSlotUndemotable(mCx, mSlotnum);
3200 JS_ASSERT(!(!isPromote && *mTypeMap == TT_INT32));
3201 ++vp;
3202 ++mTypeMap;
3203 ++mSlotnum;
3205 return true;
3210 * Promote slots if necessary to match the called tree's type map. This
3211 * function is infallible and must only be called if we are certain that it is
3212 * possible to reconcile the types for each slot in the inner and outer trees.
3214 JS_REQUIRES_STACK void
3215 TraceRecorder::adjustCallerTypes(Fragment* f)
3217 TreeInfo* ti = (TreeInfo*)f->vmprivate;
3219 AdjustCallerGlobalTypesVisitor globalVisitor(*this, ti->globalTypeMap());
3220 VisitGlobalSlots(globalVisitor, cx, *treeInfo->globalSlots);
3222 AdjustCallerStackTypesVisitor stackVisitor(*this, ti->stackTypeMap());
3223 VisitStackSlots(stackVisitor, cx, 0);
3225 JS_ASSERT(f == f->root);
3228 JS_REQUIRES_STACK JSTraceType
3229 TraceRecorder::determineSlotType(jsval* vp)
3231 JSTraceType m;
3232 LIns* i = get(vp);
3233 if (isNumber(*vp)) {
3234 m = isPromoteInt(i) ? TT_INT32 : TT_DOUBLE;
3235 } else if (JSVAL_IS_OBJECT(*vp)) {
3236 if (JSVAL_IS_NULL(*vp))
3237 m = TT_NULL;
3238 else if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp)))
3239 m = TT_FUNCTION;
3240 else
3241 m = TT_OBJECT;
3242 } else {
3243 JS_ASSERT(JSVAL_TAG(*vp) == JSVAL_STRING || JSVAL_IS_SPECIAL(*vp));
3244 JS_STATIC_ASSERT(static_cast<jsvaltag>(TT_STRING) == JSVAL_STRING);
3245 JS_STATIC_ASSERT(static_cast<jsvaltag>(TT_PSEUDOBOOLEAN) == JSVAL_SPECIAL);
3246 m = JSTraceType(JSVAL_TAG(*vp));
3248 JS_ASSERT(m != TT_INT32 || isInt32(*vp));
3249 return m;
3252 class DetermineTypesVisitor : public SlotVisitorBase
3254 TraceRecorder &mRecorder;
3255 JSTraceType *mTypeMap;
3256 public:
3257 DetermineTypesVisitor(TraceRecorder &recorder,
3258 JSTraceType *typeMap) :
3259 mRecorder(recorder),
3260 mTypeMap(typeMap)
3263 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
3264 visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
3265 *mTypeMap++ = mRecorder.determineSlotType(vp);
3268 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3269 visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
3270 for (size_t i = 0; i < count; ++i)
3271 *mTypeMap++ = mRecorder.determineSlotType(vp++);
3272 return true;
3275 JSTraceType* getTypeMap()
3277 return mTypeMap;
3281 #if defined JS_JIT_SPEW
3282 JS_REQUIRES_STACK static void
3283 TreevisLogExit(JSContext* cx, VMSideExit* exit)
3285 debug_only_printf(LC_TMTreeVis, "TREEVIS ADDEXIT EXIT=%p TYPE=%s FRAG=%p PC=%p FILE=\"%s\""
3286 " LINE=%d OFFS=%d", (void*)exit, getExitName(exit->exitType),
3287 (void*)exit->from, (void*)cx->fp->regs->pc, cx->fp->script->filename,
3288 js_FramePCToLineNumber(cx, cx->fp), FramePCOffset(cx->fp));
3289 debug_only_print0(LC_TMTreeVis, " STACK=\"");
3290 for (unsigned i = 0; i < exit->numStackSlots; i++)
3291 debug_only_printf(LC_TMTreeVis, "%c", typeChar[exit->stackTypeMap()[i]]);
3292 debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\"");
3293 for (unsigned i = 0; i < exit->numGlobalSlots; i++)
3294 debug_only_printf(LC_TMTreeVis, "%c", typeChar[exit->globalTypeMap()[i]]);
3295 debug_only_print0(LC_TMTreeVis, "\"\n");
3297 #endif
3299 JS_REQUIRES_STACK VMSideExit*
3300 TraceRecorder::snapshot(ExitType exitType)
3302 JSStackFrame* fp = cx->fp;
3303 JSFrameRegs* regs = fp->regs;
3304 jsbytecode* pc = regs->pc;
3307 * Check for a return-value opcode that needs to restart at the next
3308 * instruction.
3310 const JSCodeSpec& cs = js_CodeSpec[*pc];
3313 * When calling a _FAIL native, make the snapshot's pc point to the next
3314 * instruction after the CALL or APPLY. Even on failure, a _FAIL native
3315 * must not be called again from the interpreter.
3317 bool resumeAfter = (pendingSpecializedNative &&
3318 JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_STATUS);
3319 if (resumeAfter) {
3320 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEW ||
3321 *pc == JSOP_SETPROP || *pc == JSOP_SETNAME || *pc == JSOP_SETMETHOD);
3322 pc += cs.length;
3323 regs->pc = pc;
3324 MUST_FLOW_THROUGH("restore_pc");
3328 * Generate the entry map for the (possibly advanced) pc and stash it in
3329 * the trace.
3331 unsigned stackSlots = NativeStackSlots(cx, callDepth);
3334 * It's sufficient to track the native stack use here since all stores
3335 * above the stack watermark defined by guards are killed.
3337 trackNativeStackUse(stackSlots + 1);
3339 /* Capture the type map into a temporary location. */
3340 unsigned ngslots = treeInfo->globalSlots->length();
3341 unsigned typemap_size = (stackSlots + ngslots) * sizeof(JSTraceType);
3342 void *mark = JS_ARENA_MARK(&cx->tempPool);
3343 JSTraceType* typemap;
3344 JS_ARENA_ALLOCATE_CAST(typemap, JSTraceType*, &cx->tempPool, typemap_size);
3347 * Determine the type of a store by looking at the current type of the
3348 * actual value the interpreter is using. For numbers we have to check what
3349 * kind of store we used last (integer or double) to figure out what the
3350 * side exit show reflect in its typemap.
3352 DetermineTypesVisitor detVisitor(*this, typemap);
3353 VisitSlots(detVisitor, cx, callDepth, ngslots,
3354 treeInfo->globalSlots->data());
3355 JS_ASSERT(unsigned(detVisitor.getTypeMap() - typemap) ==
3356 ngslots + stackSlots);
3359 * If this snapshot is for a side exit that leaves a boxed jsval result on
3360 * the stack, make a note of this in the typemap. Examples include the
3361 * builtinStatus guard after calling a _FAIL builtin, a JSFastNative, or
3362 * GetPropertyByName; and the type guard in unbox_jsval after such a call
3363 * (also at the beginning of a trace branched from such a type guard).
3365 if (pendingUnboxSlot ||
3366 (pendingSpecializedNative && (pendingSpecializedNative->flags & JSTN_UNBOX_AFTER))) {
3367 unsigned pos = stackSlots - 1;
3368 if (pendingUnboxSlot == cx->fp->regs->sp - 2)
3369 pos = stackSlots - 2;
3370 typemap[pos] = TT_JSVAL;
3373 /* Now restore the the original pc (after which early returns are ok). */
3374 if (resumeAfter) {
3375 MUST_FLOW_LABEL(restore_pc);
3376 regs->pc = pc - cs.length;
3377 } else {
3379 * If we take a snapshot on a goto, advance to the target address. This
3380 * avoids inner trees returning on a break goto, which the outer
3381 * recorder then would confuse with a break in the outer tree.
3383 if (*pc == JSOP_GOTO)
3384 pc += GET_JUMP_OFFSET(pc);
3385 else if (*pc == JSOP_GOTOX)
3386 pc += GET_JUMPX_OFFSET(pc);
3390 * Check if we already have a matching side exit; if so we can return that
3391 * side exit instead of creating a new one.
3393 VMSideExit** exits = treeInfo->sideExits.data();
3394 unsigned nexits = treeInfo->sideExits.length();
3395 if (exitType == LOOP_EXIT) {
3396 for (unsigned n = 0; n < nexits; ++n) {
3397 VMSideExit* e = exits[n];
3398 if (e->pc == pc && e->imacpc == fp->imacpc &&
3399 ngslots == e->numGlobalSlots &&
3400 !memcmp(exits[n]->fullTypeMap(), typemap, typemap_size)) {
3401 AUDIT(mergedLoopExits);
3402 #if defined JS_JIT_SPEW
3403 TreevisLogExit(cx, e);
3404 #endif
3405 JS_ARENA_RELEASE(&cx->tempPool, mark);
3406 return e;
3411 if (sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(JSTraceType) >
3412 LirBuffer::MAX_SKIP_PAYLOAD_SZB) {
3414 * ::snapshot() is infallible in the sense that callers don't
3415 * expect errors; but this is a trace-aborting error condition. So
3416 * mangle the request to consume zero slots, and mark the tree as
3417 * to-be-trashed. This should be safe as the trace will be aborted
3418 * before assembly or execution due to the call to
3419 * trackNativeStackUse above.
3421 stackSlots = 0;
3422 ngslots = 0;
3423 typemap_size = 0;
3424 trashSelf = true;
3427 /* We couldn't find a matching side exit, so create a new one. */
3428 LIns* data = lir->insSkip(sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(JSTraceType));
3429 VMSideExit* exit = (VMSideExit*) data->payload();
3431 /* Setup side exit structure. */
3432 memset(exit, 0, sizeof(VMSideExit));
3433 exit->from = fragment;
3434 exit->calldepth = callDepth;
3435 exit->numGlobalSlots = ngslots;
3436 exit->numStackSlots = stackSlots;
3437 exit->numStackSlotsBelowCurrentFrame = cx->fp->argv ?
3438 nativeStackOffset(&cx->fp->argv[-2]) / sizeof(double) :
3440 exit->exitType = exitType;
3441 exit->block = fp->blockChain;
3442 if (fp->blockChain)
3443 treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain));
3444 exit->pc = pc;
3445 exit->imacpc = fp->imacpc;
3446 exit->sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase;
3447 exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
3448 exit->nativeCalleeWord = 0;
3449 exit->lookupFlags = js_InferFlags(cx, 0);
3450 memcpy(exit->fullTypeMap(), typemap, typemap_size);
3452 #if defined JS_JIT_SPEW
3453 TreevisLogExit(cx, exit);
3454 #endif
3456 JS_ARENA_RELEASE(&cx->tempPool, mark);
3457 return exit;
3460 JS_REQUIRES_STACK LIns*
3461 TraceRecorder::createGuardRecord(VMSideExit* exit)
3463 LIns* guardRec = lir->insSkip(sizeof(GuardRecord));
3464 GuardRecord* gr = (GuardRecord*) guardRec->payload();
3466 memset(gr, 0, sizeof(GuardRecord));
3467 gr->exit = exit;
3468 exit->addGuard(gr);
3470 return guardRec;
3474 * Emit a guard for condition (cond), expecting to evaluate to boolean result
3475 * (expected) and using the supplied side exit if the conditon doesn't hold.
3477 JS_REQUIRES_STACK void
3478 TraceRecorder::guard(bool expected, LIns* cond, VMSideExit* exit)
3480 debug_only_printf(LC_TMRecorder,
3481 " About to try emitting guard code for "
3482 "SideExit=%p exitType=%s\n",
3483 (void*)exit, getExitName(exit->exitType));
3485 LIns* guardRec = createGuardRecord(exit);
3488 * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
3489 * safe to keep references to the side exits here. If we ever start
3490 * clearing those lirbufs, we have to make sure we purge the side exits
3491 * that then no longer will be in valid memory.
3493 if (exit->exitType == LOOP_EXIT)
3494 treeInfo->sideExits.add(exit);
3496 if (!cond->isCond()) {
3497 expected = !expected;
3498 cond = lir->ins_eq0(cond);
3501 LIns* guardIns =
3502 lir->insGuard(expected ? LIR_xf : LIR_xt, cond, guardRec);
3503 if (!guardIns) {
3504 debug_only_print0(LC_TMRecorder,
3505 " redundant guard, eliminated, no codegen\n");
3509 JS_REQUIRES_STACK VMSideExit*
3510 TraceRecorder::copy(VMSideExit* copy)
3512 size_t typemap_size = copy->numGlobalSlots + copy->numStackSlots;
3513 LIns* data = lir->insSkip(sizeof(VMSideExit) + typemap_size * sizeof(JSTraceType));
3514 VMSideExit* exit = (VMSideExit*) data->payload();
3516 /* Copy side exit structure. */
3517 memcpy(exit, copy, sizeof(VMSideExit) + typemap_size * sizeof(JSTraceType));
3518 exit->guards = NULL;
3519 exit->from = fragment;
3520 exit->target = NULL;
3523 * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
3524 * safe to keep references to the side exits here. If we ever start
3525 * clearing those lirbufs, we have to make sure we purge the side exits
3526 * that then no longer will be in valid memory.
3528 if (exit->exitType == LOOP_EXIT)
3529 treeInfo->sideExits.add(exit);
3530 #if defined JS_JIT_SPEW
3531 TreevisLogExit(cx, exit);
3532 #endif
3533 return exit;
3537 * Emit a guard for condition (cond), expecting to evaluate to boolean result
3538 * (expected) and generate a side exit with type exitType to jump to if the
3539 * condition does not hold.
3541 JS_REQUIRES_STACK void
3542 TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType)
3544 guard(expected, cond, snapshot(exitType));
3548 * Determine whether any context associated with the same thread as cx is
3549 * executing native code.
3551 static inline bool
3552 ProhibitFlush(JSContext* cx)
3554 if (cx->interpState) // early out if the given is in native code
3555 return true;
3557 JSCList *cl;
3559 #ifdef JS_THREADSAFE
3560 JSThread* thread = cx->thread;
3561 for (cl = thread->contextList.next; cl != &thread->contextList; cl = cl->next)
3562 if (CX_FROM_THREAD_LINKS(cl)->interpState)
3563 return true;
3564 #else
3565 JSRuntime* rt = cx->runtime;
3566 for (cl = rt->contextList.next; cl != &rt->contextList; cl = cl->next)
3567 if (js_ContextFromLinkField(cl)->interpState)
3568 return true;
3569 #endif
3570 return false;
3573 static JS_REQUIRES_STACK void
3574 ResetJIT(JSContext* cx)
3576 if (!TRACING_ENABLED(cx))
3577 return;
3578 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3579 debug_only_print0(LC_TMTracer, "Flushing cache.\n");
3580 if (tm->recorder)
3581 js_AbortRecording(cx, "flush cache");
3582 TraceRecorder* tr;
3583 while ((tr = tm->abortStack) != NULL) {
3584 tr->removeFragmentReferences();
3585 tr->deepAbort();
3586 tr->popAbortStack();
3588 if (ProhibitFlush(cx)) {
3589 debug_only_print0(LC_TMTracer, "Deferring JIT flush due to deep bail.\n");
3590 tm->needFlush = JS_TRUE;
3591 return;
3593 tm->flush();
3596 /* Compile the current fragment. */
3597 JS_REQUIRES_STACK void
3598 TraceRecorder::compile(JSTraceMonitor* tm)
3600 #ifdef MOZ_TRACEVIS
3601 TraceVisStateObj tvso(cx, S_COMPILE);
3602 #endif
3604 if (tm->needFlush) {
3605 ResetJIT(cx);
3606 return;
3608 if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
3609 debug_only_print0(LC_TMTracer, "Blacklist: excessive stack use.\n");
3610 Blacklist((jsbytecode*) fragment->root->ip);
3611 return;
3613 if (anchor && anchor->exitType != CASE_EXIT)
3614 ++treeInfo->branchCount;
3615 if (tm->allocator->outOfMemory())
3616 return;
3618 Assembler *assm = tm->assembler;
3619 ::compile(assm, fragment, *tm->allocator verbose_only(, tm->labels));
3620 if (assm->error() == nanojit::OutOMem)
3621 return;
3623 if (assm->error() != nanojit::None) {
3624 debug_only_print0(LC_TMTracer, "Blacklisted: error during compilation\n");
3625 Blacklist((jsbytecode*) fragment->root->ip);
3626 return;
3628 ResetRecordingAttempts(cx, (jsbytecode*) fragment->ip);
3629 ResetRecordingAttempts(cx, (jsbytecode*) fragment->root->ip);
3630 if (anchor) {
3631 #ifdef NANOJIT_IA32
3632 if (anchor->exitType == CASE_EXIT)
3633 assm->patch(anchor, anchor->switchInfo);
3634 else
3635 #endif
3636 assm->patch(anchor);
3638 JS_ASSERT(fragment->code());
3639 JS_ASSERT(!fragment->vmprivate);
3640 if (fragment == fragment->root)
3641 fragment->vmprivate = treeInfo;
3643 /* :TODO: windows support */
3644 #if defined DEBUG && !defined WIN32
3645 const char* filename = cx->fp->script->filename;
3646 char* label = (char*)js_malloc((filename ? strlen(filename) : 7) + 16);
3647 sprintf(label, "%s:%u", filename ? filename : "<stdin>",
3648 js_FramePCToLineNumber(cx, cx->fp));
3649 tm->labels->add(fragment, sizeof(Fragment), 0, label);
3650 js_free(label);
3651 #endif
3652 AUDIT(traceCompleted);
3655 static void
3656 JoinPeers(Assembler* assm, VMSideExit* exit, VMFragment* target)
3658 exit->target = target;
3659 assm->patch(exit);
3661 debug_only_printf(LC_TMTreeVis, "TREEVIS JOIN ANCHOR=%p FRAG=%p\n", (void*)exit, (void*)target);
3663 if (exit->root() == target)
3664 return;
3666 target->getTreeInfo()->dependentTrees.addUnique(exit->root());
3667 exit->root()->getTreeInfo()->linkedTrees.addUnique(target);
3670 /* Results of trying to connect an arbitrary type A with arbitrary type B */
3671 enum TypeCheckResult
3673 TypeCheck_Okay, /* Okay: same type */
3674 TypeCheck_Promote, /* Okay: Type A needs f2i() */
3675 TypeCheck_Demote, /* Okay: Type A needs i2f() */
3676 TypeCheck_Undemote, /* Bad: Slot is undemotable */
3677 TypeCheck_Bad /* Bad: incompatible types */
3680 class SlotMap : public SlotVisitorBase
3682 public:
3683 struct SlotInfo
3685 SlotInfo()
3686 : v(0), promoteInt(false), lastCheck(TypeCheck_Bad)
3688 SlotInfo(jsval* v, bool promoteInt)
3689 : v(v), promoteInt(promoteInt), lastCheck(TypeCheck_Bad)
3691 jsval *v;
3692 bool promoteInt;
3693 TypeCheckResult lastCheck;
3696 SlotMap(TraceRecorder& rec, unsigned slotOffset)
3697 : mRecorder(rec),
3698 mCx(rec.cx),
3699 slots(NULL),
3700 slotOffset(slotOffset)
3704 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
3705 visitGlobalSlot(jsval *vp, unsigned n, unsigned slot)
3707 addSlot(vp);
3710 JS_ALWAYS_INLINE SlotMap::SlotInfo&
3711 operator [](unsigned i)
3713 return slots[i];
3716 JS_ALWAYS_INLINE SlotMap::SlotInfo&
3717 get(unsigned i)
3719 return slots[i];
3722 JS_ALWAYS_INLINE unsigned
3723 length()
3725 return slots.length();
3729 * Possible return states:
3731 * TypeConsensus_Okay: All types are compatible. Caller must go through slot list and handle
3732 * promote/demotes.
3733 * TypeConsensus_Bad: Types are not compatible. Individual type check results are undefined.
3734 * TypeConsensus_Undemotes: Types would be compatible if slots were marked as undemotable
3735 * before recording began. Caller can go through slot list and mark
3736 * such slots as undemotable.
3738 JS_REQUIRES_STACK TypeConsensus
3739 checkTypes(TreeInfo* ti)
3741 if (ti->typeMap.length() < slotOffset || length() != ti->typeMap.length() - slotOffset)
3742 return TypeConsensus_Bad;
3744 bool has_undemotes = false;
3745 for (unsigned i = 0; i < length(); i++) {
3746 TypeCheckResult result = checkType(i, ti->typeMap[i + slotOffset]);
3747 if (result == TypeCheck_Bad)
3748 return TypeConsensus_Bad;
3749 if (result == TypeCheck_Undemote)
3750 has_undemotes = true;
3751 slots[i].lastCheck = result;
3753 if (has_undemotes)
3754 return TypeConsensus_Undemotes;
3755 return TypeConsensus_Okay;
3758 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
3759 addSlot(jsval* vp)
3761 slots.add(SlotInfo(vp, isNumber(*vp) && isPromoteInt(mRecorder.get(vp))));
3764 JS_REQUIRES_STACK void
3765 markUndemotes()
3767 for (unsigned i = 0; i < length(); i++) {
3768 if (get(i).lastCheck == TypeCheck_Undemote)
3769 MarkSlotUndemotable(mRecorder.cx, mRecorder.treeInfo, slotOffset + i);
3773 JS_REQUIRES_STACK virtual void
3774 adjustTypes()
3776 for (unsigned i = 0; i < length(); i++) {
3777 SlotInfo& info = get(i);
3778 JS_ASSERT(info.lastCheck != TypeCheck_Undemote && info.lastCheck != TypeCheck_Bad);
3779 if (info.lastCheck == TypeCheck_Promote) {
3780 JS_ASSERT(isNumber(*info.v));
3781 mRecorder.set(info.v, mRecorder.f2i(mRecorder.get(info.v)));
3782 } else if (info.lastCheck == TypeCheck_Demote) {
3783 JS_ASSERT(isNumber(*info.v));
3784 JS_ASSERT(mRecorder.get(info.v)->isQuad());
3786 /* Never demote this final i2f. */
3787 mRecorder.set(info.v, mRecorder.get(info.v), false, false);
3791 private:
3792 TypeCheckResult
3793 checkType(unsigned i, JSTraceType t)
3795 debug_only_printf(LC_TMTracer,
3796 "checkType slot %d: interp=%c typemap=%c isNum=%d promoteInt=%d\n",
3798 typeChar[getCoercedType(*slots[i].v)],
3799 typeChar[t],
3800 isNumber(*slots[i].v),
3801 slots[i].promoteInt);
3802 switch (t) {
3803 case TT_INT32:
3804 if (!isNumber(*slots[i].v))
3805 return TypeCheck_Bad; /* Not a number? Type mismatch. */
3806 /* This is always a type mismatch, we can't close a double to an int. */
3807 if (!slots[i].promoteInt)
3808 return TypeCheck_Undemote;
3809 /* Looks good, slot is an int32, the last instruction should be promotable. */
3810 JS_ASSERT(isInt32(*slots[i].v) && slots[i].promoteInt);
3811 return TypeCheck_Promote;
3812 case TT_DOUBLE:
3813 if (!isNumber(*slots[i].v))
3814 return TypeCheck_Bad; /* Not a number? Type mismatch. */
3815 if (slots[i].promoteInt)
3816 return TypeCheck_Demote;
3817 return TypeCheck_Okay;
3818 case TT_NULL:
3819 return JSVAL_IS_NULL(*slots[i].v) ? TypeCheck_Okay : TypeCheck_Bad;
3820 case TT_FUNCTION:
3821 return !JSVAL_IS_PRIMITIVE(*slots[i].v) &&
3822 HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*slots[i].v)) ?
3823 TypeCheck_Okay : TypeCheck_Bad;
3824 case TT_OBJECT:
3825 return !JSVAL_IS_PRIMITIVE(*slots[i].v) &&
3826 !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*slots[i].v)) ?
3827 TypeCheck_Okay : TypeCheck_Bad;
3828 default:
3829 return getCoercedType(*slots[i].v) == t ? TypeCheck_Okay : TypeCheck_Bad;
3831 JS_NOT_REACHED("shouldn't fall through type check switch");
3833 protected:
3834 TraceRecorder& mRecorder;
3835 JSContext* mCx;
3836 Queue<SlotInfo> slots;
3837 unsigned slotOffset;
3840 class DefaultSlotMap : public SlotMap
3842 public:
3843 DefaultSlotMap(TraceRecorder& tr) : SlotMap(tr, 0)
3847 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3848 visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp)
3850 for (size_t i = 0; i < count; i++)
3851 addSlot(&vp[i]);
3852 return true;
3856 JS_REQUIRES_STACK TypeConsensus
3857 TraceRecorder::selfTypeStability(SlotMap& slotMap)
3859 debug_only_printf(LC_TMTracer, "Checking type stability against self=%p\n", (void*)fragment);
3860 TypeConsensus consensus = slotMap.checkTypes(treeInfo);
3862 /* Best case: loop jumps back to its own header */
3863 if (consensus == TypeConsensus_Okay)
3864 return TypeConsensus_Okay;
3866 /* If the only thing keeping this loop from being stable is undemotions, then mark relevant
3867 * slots as undemotable.
3869 if (consensus == TypeConsensus_Undemotes)
3870 slotMap.markUndemotes();
3872 return consensus;
3875 JS_REQUIRES_STACK TypeConsensus
3876 TraceRecorder::peerTypeStability(SlotMap& slotMap, VMFragment** pPeer)
3878 /* See if there are any peers that would make this stable */
3879 VMFragment* root = (VMFragment*)fragment->root;
3880 VMFragment* peer = getLoop(traceMonitor, root->ip, root->globalObj, root->globalShape,
3881 root->argc);
3882 JS_ASSERT(peer != NULL);
3883 bool onlyUndemotes = false;
3884 for (; peer != NULL; peer = (VMFragment*)peer->peer) {
3885 if (!peer->vmprivate || peer == fragment)
3886 continue;
3887 debug_only_printf(LC_TMTracer, "Checking type stability against peer=%p\n", (void*)peer);
3888 TypeConsensus consensus = slotMap.checkTypes((TreeInfo*)peer->vmprivate);
3889 if (consensus == TypeConsensus_Okay) {
3890 *pPeer = peer;
3891 /* Return this even though there will be linkage; the trace itself is not stable.
3892 * Caller should inspect ppeer to check for a compatible peer.
3894 return TypeConsensus_Okay;
3896 if (consensus == TypeConsensus_Undemotes)
3897 onlyUndemotes = true;
3900 return onlyUndemotes ? TypeConsensus_Undemotes : TypeConsensus_Bad;
3903 JS_REQUIRES_STACK bool
3904 TraceRecorder::closeLoop(TypeConsensus &consensus)
3906 DefaultSlotMap slotMap(*this);
3907 VisitSlots(slotMap, cx, 0, *treeInfo->globalSlots);
3908 return closeLoop(slotMap, snapshot(UNSTABLE_LOOP_EXIT), consensus);
3911 /* Complete and compile a trace and link it to the existing tree if appropriate.
3912 * Returns true if something was compiled. Outparam is always set.
3914 JS_REQUIRES_STACK bool
3915 TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit, TypeConsensus& consensus)
3918 * We should have arrived back at the loop header, and hence we don't want
3919 * to be in an imacro here and the opcode should be either JSOP_LOOP or, in
3920 * case this loop was blacklisted in the meantime, JSOP_NOP.
3922 JS_ASSERT((*cx->fp->regs->pc == JSOP_LOOP || *cx->fp->regs->pc == JSOP_NOP) && !cx->fp->imacpc);
3924 if (callDepth != 0) {
3925 debug_only_print0(LC_TMTracer,
3926 "Blacklisted: stack depth mismatch, possible recursion.\n");
3927 Blacklist((jsbytecode*) fragment->root->ip);
3928 trashSelf = true;
3929 consensus = TypeConsensus_Bad;
3930 return false;
3933 JS_ASSERT(exit->exitType == UNSTABLE_LOOP_EXIT);
3934 JS_ASSERT(exit->numStackSlots == treeInfo->nStackTypes);
3936 VMFragment* peer = NULL;
3937 VMFragment* root = (VMFragment*)fragment->root;
3939 consensus = selfTypeStability(slotMap);
3940 if (consensus != TypeConsensus_Okay) {
3941 TypeConsensus peerConsensus = peerTypeStability(slotMap, &peer);
3942 /* If there was a semblance of a stable peer (even if not linkable), keep the result. */
3943 if (peerConsensus != TypeConsensus_Bad)
3944 consensus = peerConsensus;
3947 #if DEBUG
3948 if (consensus != TypeConsensus_Okay || peer)
3949 AUDIT(unstableLoopVariable);
3950 #endif
3952 JS_ASSERT(!trashSelf);
3954 /* This exit is indeed linkable to something now. Process any promote/demotes that
3955 * are pending in the slot map.
3957 if (consensus == TypeConsensus_Okay)
3958 slotMap.adjustTypes();
3960 if (consensus != TypeConsensus_Okay || peer) {
3961 fragment->lastIns = lir->insGuard(LIR_x, NULL, createGuardRecord(exit));
3963 /* If there is a peer, there must have been an "Okay" consensus. */
3964 JS_ASSERT_IF(peer, consensus == TypeConsensus_Okay);
3966 /* Compile as a type-unstable loop, and hope for a connection later. */
3967 if (!peer) {
3969 * If such a fragment does not exist, let's compile the loop ahead
3970 * of time anyway. Later, if the loop becomes type stable, we will
3971 * connect these two fragments together.
3973 debug_only_print0(LC_TMTracer,
3974 "Trace has unstable loop variable with no stable peer, "
3975 "compiling anyway.\n");
3976 UnstableExit* uexit = new (*traceMonitor->allocator) UnstableExit;
3977 uexit->fragment = fragment;
3978 uexit->exit = exit;
3979 uexit->next = treeInfo->unstableExits;
3980 treeInfo->unstableExits = uexit;
3981 } else {
3982 JS_ASSERT(peer->code());
3983 exit->target = peer;
3984 debug_only_printf(LC_TMTracer,
3985 "Joining type-unstable trace to target fragment %p.\n",
3986 (void*)peer);
3987 ((TreeInfo*)peer->vmprivate)->dependentTrees.addUnique(fragment->root);
3988 treeInfo->linkedTrees.addUnique(peer);
3990 } else {
3991 exit->exitType = LOOP_EXIT;
3992 debug_only_printf(LC_TMTreeVis, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit,
3993 getExitName(LOOP_EXIT));
3995 JS_ASSERT((fragment == fragment->root) == !!loopLabel);
3996 if (loopLabel) {
3997 lir->insBranch(LIR_j, NULL, loopLabel);
3998 lir->ins1(LIR_live, lirbuf->state);
4001 exit->target = fragment->root;
4002 fragment->lastIns = lir->insGuard(LIR_x, NULL, createGuardRecord(exit));
4004 compile(traceMonitor);
4006 Assembler *assm = JS_TRACE_MONITOR(cx).assembler;
4007 if (assm->error() != nanojit::None)
4008 return false;
4010 debug_only_printf(LC_TMTreeVis, "TREEVIS CLOSELOOP EXIT=%p PEER=%p\n", (void*)exit, (void*)peer);
4012 peer = getLoop(traceMonitor, root->ip, root->globalObj, root->globalShape, root->argc);
4013 JS_ASSERT(peer);
4014 joinEdgesToEntry(peer);
4016 debug_only_stmt(DumpPeerStability(traceMonitor, peer->ip, peer->globalObj,
4017 peer->globalShape, peer->argc);)
4019 debug_only_print0(LC_TMTracer,
4020 "updating specializations on dependent and linked trees\n");
4021 if (fragment->root->vmprivate)
4022 SpecializeTreesToMissingGlobals(cx, globalObj, (TreeInfo*)fragment->root->vmprivate);
4025 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
4026 * should try to compile the outer tree again.
4028 if (outer)
4029 AttemptCompilation(cx, traceMonitor, globalObj, outer, outerArgc);
4030 #ifdef JS_JIT_SPEW
4031 debug_only_printf(LC_TMMinimal,
4032 "recording completed at %s:%u@%u via closeLoop\n",
4033 cx->fp->script->filename,
4034 js_FramePCToLineNumber(cx, cx->fp),
4035 FramePCOffset(cx->fp));
4036 debug_only_print0(LC_TMMinimal, "\n");
4037 #endif
4039 return true;
4042 static void
4043 FullMapFromExit(TypeMap& typeMap, VMSideExit* exit)
4045 typeMap.setLength(0);
4046 typeMap.fromRaw(exit->stackTypeMap(), exit->numStackSlots);
4047 typeMap.fromRaw(exit->globalTypeMap(), exit->numGlobalSlots);
4048 /* Include globals that were later specialized at the root of the tree. */
4049 if (exit->numGlobalSlots < exit->root()->getTreeInfo()->nGlobalTypes()) {
4050 typeMap.fromRaw(exit->root()->getTreeInfo()->globalTypeMap() + exit->numGlobalSlots,
4051 exit->root()->getTreeInfo()->nGlobalTypes() - exit->numGlobalSlots);
4055 static JS_REQUIRES_STACK TypeConsensus
4056 TypeMapLinkability(JSContext* cx, const TypeMap& typeMap, VMFragment* peer)
4058 const TypeMap& peerMap = peer->getTreeInfo()->typeMap;
4059 unsigned minSlots = JS_MIN(typeMap.length(), peerMap.length());
4060 TypeConsensus consensus = TypeConsensus_Okay;
4061 for (unsigned i = 0; i < minSlots; i++) {
4062 if (typeMap[i] == peerMap[i])
4063 continue;
4064 if (typeMap[i] == TT_INT32 && peerMap[i] == TT_DOUBLE &&
4065 IsSlotUndemotable(cx, peer->getTreeInfo(), i)) {
4066 consensus = TypeConsensus_Undemotes;
4067 } else {
4068 return TypeConsensus_Bad;
4071 return consensus;
4074 static JS_REQUIRES_STACK unsigned
4075 FindUndemotesInTypemaps(JSContext* cx, const TypeMap& typeMap, TreeInfo* treeInfo,
4076 Queue<unsigned>& undemotes)
4078 undemotes.setLength(0);
4079 unsigned minSlots = JS_MIN(typeMap.length(), treeInfo->typeMap.length());
4080 for (unsigned i = 0; i < minSlots; i++) {
4081 if (typeMap[i] == TT_INT32 && treeInfo->typeMap[i] == TT_DOUBLE) {
4082 undemotes.add(i);
4083 } else if (typeMap[i] != treeInfo->typeMap[i]) {
4084 return 0;
4087 for (unsigned i = 0; i < undemotes.length(); i++)
4088 MarkSlotUndemotable(cx, treeInfo, undemotes[i]);
4089 return undemotes.length();
4092 JS_REQUIRES_STACK void
4093 TraceRecorder::joinEdgesToEntry(VMFragment* peer_root)
4095 if (fragment->root != fragment)
4096 return;
4098 TypeMap typeMap(NULL);
4099 Queue<unsigned> undemotes(NULL);
4101 for (VMFragment* peer = peer_root; peer; peer = (VMFragment*)peer->peer) {
4102 TreeInfo* ti = peer->getTreeInfo();
4103 if (!ti)
4104 continue;
4105 UnstableExit* uexit = ti->unstableExits;
4106 while (uexit != NULL) {
4107 /* Build the full typemap for this unstable exit */
4108 FullMapFromExit(typeMap, uexit->exit);
4109 /* Check its compatibility against this tree */
4110 TypeConsensus consensus = TypeMapLinkability(cx, typeMap, (VMFragment*)fragment->root);
4111 JS_ASSERT_IF(consensus == TypeConsensus_Okay, peer != fragment);
4112 if (consensus == TypeConsensus_Okay) {
4113 debug_only_printf(LC_TMTracer,
4114 "Joining type-stable trace to target exit %p->%p.\n",
4115 (void*)uexit->fragment, (void*)uexit->exit);
4116 /* It's okay! Link together and remove the unstable exit. */
4117 JoinPeers(traceMonitor->assembler, uexit->exit, (VMFragment*)fragment);
4118 uexit = ti->removeUnstableExit(uexit->exit);
4119 } else {
4120 /* Check for int32->double slots that suggest trashing. */
4121 if (FindUndemotesInTypemaps(cx, typeMap, treeInfo, undemotes)) {
4122 JS_ASSERT(peer == uexit->fragment->root);
4123 if (fragment == peer)
4124 trashSelf = true;
4125 else
4126 whichTreesToTrash.addUnique(uexit->fragment->root);
4127 return;
4129 uexit = uexit->next;
4135 JS_REQUIRES_STACK void
4136 TraceRecorder::endLoop()
4138 endLoop(snapshot(LOOP_EXIT));
4141 /* Emit an always-exit guard and compile the tree (used for break statements. */
4142 JS_REQUIRES_STACK void
4143 TraceRecorder::endLoop(VMSideExit* exit)
4145 if (callDepth != 0) {
4146 debug_only_print0(LC_TMTracer, "Blacklisted: stack depth mismatch, possible recursion.\n");
4147 Blacklist((jsbytecode*) fragment->root->ip);
4148 trashSelf = true;
4149 return;
4152 fragment->lastIns =
4153 lir->insGuard(LIR_x, NULL, createGuardRecord(exit));
4154 compile(traceMonitor);
4156 Assembler *assm = traceMonitor->assembler;
4157 if (assm->error() != nanojit::None)
4158 return;
4160 debug_only_printf(LC_TMTreeVis, "TREEVIS ENDLOOP EXIT=%p\n", (void*)exit);
4162 VMFragment* root = (VMFragment*)fragment->root;
4163 joinEdgesToEntry(getLoop(traceMonitor,
4164 root->ip,
4165 root->globalObj,
4166 root->globalShape,
4167 root->argc));
4168 debug_only_stmt(DumpPeerStability(traceMonitor, root->ip, root->globalObj,
4169 root->globalShape, root->argc);)
4172 * Note: this must always be done, in case we added new globals on trace
4173 * and haven't yet propagated those to linked and dependent trees.
4175 debug_only_print0(LC_TMTracer,
4176 "updating specializations on dependent and linked trees\n");
4177 if (fragment->root->vmprivate)
4178 SpecializeTreesToMissingGlobals(cx, globalObj, (TreeInfo*)fragment->root->vmprivate);
4181 * If this is a newly formed tree, and the outer tree has not been compiled
4182 * yet, we should try to compile the outer tree again.
4184 if (outer)
4185 AttemptCompilation(cx, traceMonitor, globalObj, outer, outerArgc);
4186 #ifdef JS_JIT_SPEW
4187 debug_only_printf(LC_TMMinimal,
4188 "Recording completed at %s:%u@%u via endLoop\n",
4189 cx->fp->script->filename,
4190 js_FramePCToLineNumber(cx, cx->fp),
4191 FramePCOffset(cx->fp));
4192 debug_only_print0(LC_TMTracer, "\n");
4193 #endif
4196 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
4197 JS_REQUIRES_STACK void
4198 TraceRecorder::prepareTreeCall(VMFragment* inner)
4200 TreeInfo* ti = (TreeInfo*)inner->vmprivate;
4201 inner_sp_ins = lirbuf->sp;
4202 VMSideExit* exit = snapshot(OOM_EXIT);
4205 * The inner tree expects to be called from the current frame. If the outer
4206 * tree (this trace) is currently inside a function inlining code
4207 * (calldepth > 0), we have to advance the native stack pointer such that
4208 * we match what the inner trace expects to see. We move it back when we
4209 * come out of the inner tree call.
4211 if (callDepth > 0) {
4213 * Calculate the amount we have to lift the native stack pointer by to
4214 * compensate for any outer frames that the inner tree doesn't expect
4215 * but the outer tree has.
4217 ptrdiff_t sp_adj = nativeStackOffset(&cx->fp->argv[-2]);
4219 /* Calculate the amount we have to lift the call stack by. */
4220 ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo*);
4223 * Guard that we have enough stack space for the tree we are trying to
4224 * call on top of the new value for sp.
4226 debug_only_printf(LC_TMTracer,
4227 "sp_adj=%d outer=%d inner=%d\n",
4228 sp_adj, treeInfo->nativeStackBase, ti->nativeStackBase);
4229 LIns* sp_top = lir->ins2i(LIR_piadd, lirbuf->sp,
4230 - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
4231 + sp_adj /* adjust for stack in outer frame inner tree can't see */
4232 + ti->maxNativeStackSlots * sizeof(double)); /* plus the inner tree's stack */
4233 guard(true, lir->ins2(LIR_lt, sp_top, eos_ins), exit);
4235 /* Guard that we have enough call stack space. */
4236 LIns* rp_top = lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj +
4237 ti->maxCallDepth * sizeof(FrameInfo*));
4238 guard(true, lir->ins2(LIR_lt, rp_top, eor_ins), exit);
4240 /* We have enough space, so adjust sp and rp to their new level. */
4241 lir->insStorei(inner_sp_ins = lir->ins2i(LIR_piadd, lirbuf->sp,
4242 - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
4243 + sp_adj /* adjust for stack in outer frame inner tree can't see */
4244 + ti->nativeStackBase), /* plus the inner tree's stack base */
4245 lirbuf->state, offsetof(InterpState, sp));
4246 lir->insStorei(lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj),
4247 lirbuf->state, offsetof(InterpState, rp));
4251 * The inner tree will probably access stack slots. So tell nanojit not to
4252 * discard or defer stack writes before calling js_CallTree.
4254 * (The ExitType of this snapshot is nugatory. The exit can't be taken.)
4256 LIns* guardRec = createGuardRecord(exit);
4257 lir->insGuard(LIR_xbarrier, NULL, guardRec);
4260 static unsigned
4261 BuildGlobalTypeMapFromInnerTree(Queue<JSTraceType>& typeMap, VMSideExit* inner)
4263 #if defined DEBUG
4264 unsigned initialSlots = typeMap.length();
4265 #endif
4266 /* First, use the innermost exit's global typemap. */
4267 typeMap.add(inner->globalTypeMap(), inner->numGlobalSlots);
4269 /* Add missing global types from the innermost exit's tree. */
4270 TreeInfo* innerTree = inner->root()->getTreeInfo();
4271 unsigned slots = inner->numGlobalSlots;
4272 if (slots < innerTree->nGlobalTypes()) {
4273 typeMap.add(innerTree->globalTypeMap() + slots, innerTree->nGlobalTypes() - slots);
4274 slots = innerTree->nGlobalTypes();
4276 JS_ASSERT(typeMap.length() - initialSlots == slots);
4277 return slots;
4280 /* Record a call to an inner tree. */
4281 JS_REQUIRES_STACK void
4282 TraceRecorder::emitTreeCall(VMFragment* inner, VMSideExit* exit)
4284 TreeInfo* ti = (TreeInfo*)inner->vmprivate;
4286 /* Invoke the inner tree. */
4287 LIns* args[] = { INS_CONSTPTR(inner), lirbuf->state }; /* reverse order */
4288 LIns* ret = lir->insCall(&js_CallTree_ci, args);
4290 /* Read back all registers, in case the called tree changed any of them. */
4291 #ifdef DEBUG
4292 JSTraceType* map;
4293 size_t i;
4294 map = exit->globalTypeMap();
4295 for (i = 0; i < exit->numGlobalSlots; i++)
4296 JS_ASSERT(map[i] != TT_JSVAL);
4297 map = exit->stackTypeMap();
4298 for (i = 0; i < exit->numStackSlots; i++)
4299 JS_ASSERT(map[i] != TT_JSVAL);
4300 #endif
4302 * Bug 502604 - It is illegal to extend from the outer typemap without
4303 * first extending from the inner. Make a new typemap here.
4305 TypeMap fullMap(NULL);
4306 fullMap.add(exit->stackTypeMap(), exit->numStackSlots);
4307 BuildGlobalTypeMapFromInnerTree(fullMap, exit);
4308 import(ti, inner_sp_ins, exit->numStackSlots, fullMap.length() - exit->numStackSlots,
4309 exit->calldepth, fullMap.data());
4311 /* Restore sp and rp to their original values (we still have them in a register). */
4312 if (callDepth > 0) {
4313 lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
4314 lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
4318 * Guard that we come out of the inner tree along the same side exit we came out when
4319 * we called the inner tree at recording time.
4321 VMSideExit* nested = snapshot(NESTED_EXIT);
4322 guard(true, lir->ins2(LIR_eq, ret, INS_CONSTPTR(exit)), nested);
4323 debug_only_printf(LC_TMTreeVis, "TREEVIS TREECALL INNER=%p EXIT=%p GUARD=%p\n", (void*)inner,
4324 (void*)nested, (void*)exit);
4326 /* Register us as a dependent tree of the inner tree. */
4327 ((TreeInfo*)inner->vmprivate)->dependentTrees.addUnique(fragment->root);
4328 treeInfo->linkedTrees.addUnique(inner);
4331 /* Add a if/if-else control-flow merge point to the list of known merge points. */
4332 JS_REQUIRES_STACK void
4333 TraceRecorder::trackCfgMerges(jsbytecode* pc)
4335 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
4336 JS_ASSERT((*pc == JSOP_IFEQ) || (*pc == JSOP_IFEQX));
4337 jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc);
4338 if (sn != NULL) {
4339 if (SN_TYPE(sn) == SRC_IF) {
4340 cfgMerges.add((*pc == JSOP_IFEQ)
4341 ? pc + GET_JUMP_OFFSET(pc)
4342 : pc + GET_JUMPX_OFFSET(pc));
4343 } else if (SN_TYPE(sn) == SRC_IF_ELSE)
4344 cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));
4349 * Invert the direction of the guard if this is a loop edge that is not
4350 * taken (thin loop).
4352 JS_REQUIRES_STACK void
4353 TraceRecorder::emitIf(jsbytecode* pc, bool cond, LIns* x)
4355 ExitType exitType;
4356 if (IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
4357 exitType = LOOP_EXIT;
4360 * If we are about to walk out of the loop, generate code for the
4361 * inverse loop condition, pretending we recorded the case that stays
4362 * on trace.
4364 if ((*pc == JSOP_IFEQ || *pc == JSOP_IFEQX) == cond) {
4365 JS_ASSERT(*pc == JSOP_IFNE || *pc == JSOP_IFNEX || *pc == JSOP_IFEQ || *pc == JSOP_IFEQX);
4366 debug_only_print0(LC_TMTracer,
4367 "Walking out of the loop, terminating it anyway.\n");
4368 cond = !cond;
4372 * Conditional guards do not have to be emitted if the condition is
4373 * constant. We make a note whether the loop condition is true or false
4374 * here, so we later know whether to emit a loop edge or a loop end.
4376 if (x->isconst()) {
4377 loop = (x->imm32() == cond);
4378 return;
4380 } else {
4381 exitType = BRANCH_EXIT;
4383 if (!x->isconst())
4384 guard(cond, x, exitType);
4387 /* Emit code for a fused IFEQ/IFNE. */
4388 JS_REQUIRES_STACK void
4389 TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
4391 if (*pc == JSOP_IFEQ || *pc == JSOP_IFNE) {
4392 emitIf(pc, cond, x);
4393 if (*pc == JSOP_IFEQ)
4394 trackCfgMerges(pc);
4398 /* Check whether we have reached the end of the trace. */
4399 JS_REQUIRES_STACK JSRecordingStatus
4400 TraceRecorder::checkTraceEnd(jsbytecode *pc)
4402 if (IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
4404 * If we compile a loop, the trace should have a zero stack balance at
4405 * the loop edge. Currently we are parked on a comparison op or
4406 * IFNE/IFEQ, so advance pc to the loop header and adjust the stack
4407 * pointer and pretend we have reached the loop header.
4409 if (loop) {
4410 JS_ASSERT(!cx->fp->imacpc && (pc == cx->fp->regs->pc || pc == cx->fp->regs->pc + 1));
4411 bool fused = pc != cx->fp->regs->pc;
4412 JSFrameRegs orig = *cx->fp->regs;
4414 cx->fp->regs->pc = (jsbytecode*)fragment->root->ip;
4415 cx->fp->regs->sp -= fused ? 2 : 1;
4417 TypeConsensus consensus;
4418 closeLoop(consensus);
4420 *cx->fp->regs = orig;
4421 } else {
4422 endLoop();
4424 return JSRS_STOP;
4426 return JSRS_CONTINUE;
4429 bool
4430 TraceRecorder::hasMethod(JSObject* obj, jsid id)
4432 if (!obj)
4433 return false;
4435 JSObject* pobj;
4436 JSProperty* prop;
4437 int protoIndex = obj->lookupProperty(cx, id, &pobj, &prop);
4438 if (protoIndex < 0 || !prop)
4439 return false;
4441 bool found = false;
4442 if (OBJ_IS_NATIVE(pobj)) {
4443 JSScope* scope = OBJ_SCOPE(pobj);
4444 JSScopeProperty* sprop = (JSScopeProperty*) prop;
4446 if (SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop) &&
4447 SPROP_HAS_VALID_SLOT(sprop, scope)) {
4448 jsval v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot);
4449 if (VALUE_IS_FUNCTION(cx, v)) {
4450 found = true;
4451 if (!scope->branded()) {
4452 scope->brandingShapeChange(cx, sprop->slot, v);
4453 scope->setBranded();
4459 pobj->dropProperty(cx, prop);
4460 return found;
4463 JS_REQUIRES_STACK bool
4464 TraceRecorder::hasIteratorMethod(JSObject* obj)
4466 JS_ASSERT(cx->fp->regs->sp + 2 <= cx->fp->slots + cx->fp->script->nslots);
4468 return hasMethod(obj, ATOM_TO_JSID(cx->runtime->atomState.iteratorAtom));
4471 void
4472 nanojit::StackFilter::getTops(LIns* guard, int& spTop, int& rpTop)
4474 VMSideExit* e = (VMSideExit*)guard->record()->exit;
4475 spTop = e->sp_adj;
4476 rpTop = e->rp_adj;
4479 #if defined NJ_VERBOSE
4480 void
4481 nanojit::LirNameMap::formatGuard(LIns *i, char *out)
4483 VMSideExit *x;
4485 x = (VMSideExit *)i->record()->exit;
4486 sprintf(out,
4487 "%s: %s %s -> pc=%p imacpc=%p sp%+ld rp%+ld",
4488 formatRef(i),
4489 lirNames[i->opcode()],
4490 i->oprnd1() ? formatRef(i->oprnd1()) : "",
4491 (void *)x->pc,
4492 (void *)x->imacpc,
4493 (long int)x->sp_adj,
4494 (long int)x->rp_adj);
4496 #endif
4498 static JS_REQUIRES_STACK bool
4499 DeleteRecorder(JSContext* cx)
4501 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4503 /* Aborting and completing a trace end up here. */
4504 delete tm->recorder;
4505 tm->recorder = NULL;
4507 /* If we ran out of memory, flush the code cache. */
4508 Assembler *assm = JS_TRACE_MONITOR(cx).assembler;
4509 if (assm->error() == OutOMem ||
4510 js_OverfullJITCache(tm, false)) {
4511 ResetJIT(cx);
4512 return false;
4515 return true;
4518 /* Check whether the shape of the global object has changed. */
4519 static JS_REQUIRES_STACK bool
4520 CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj,
4521 uint32 *shape = NULL, SlotList** slots = NULL)
4523 if (tm->needFlush) {
4524 ResetJIT(cx);
4525 return false;
4528 if (STOBJ_NSLOTS(globalObj) > MAX_GLOBAL_SLOTS)
4529 return false;
4531 uint32 globalShape = OBJ_SHAPE(globalObj);
4533 if (tm->recorder) {
4534 VMFragment* root = (VMFragment*)tm->recorder->getFragment()->root;
4535 TreeInfo* ti = tm->recorder->getTreeInfo();
4537 /* Check the global shape matches the recorder's treeinfo's shape. */
4538 if (globalObj != root->globalObj || globalShape != root->globalShape) {
4539 AUDIT(globalShapeMismatchAtEntry);
4540 debug_only_printf(LC_TMTracer,
4541 "Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
4542 (void*)globalObj, globalShape, (void*)root->globalObj,
4543 root->globalShape);
4544 Backoff(cx, (jsbytecode*) root->ip);
4545 ResetJIT(cx);
4546 return false;
4548 if (shape)
4549 *shape = globalShape;
4550 if (slots)
4551 *slots = ti->globalSlots;
4552 return true;
4555 /* No recorder, search for a tracked global-state (or allocate one). */
4556 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
4557 GlobalState &state = tm->globalStates[i];
4559 if (state.globalShape == uint32(-1)) {
4560 state.globalObj = globalObj;
4561 state.globalShape = globalShape;
4562 JS_ASSERT(state.globalSlots);
4563 JS_ASSERT(state.globalSlots->length() == 0);
4566 if (state.globalObj == globalObj && state.globalShape == globalShape) {
4567 if (shape)
4568 *shape = globalShape;
4569 if (slots)
4570 *slots = state.globalSlots;
4571 return true;
4575 /* No currently-tracked-global found and no room to allocate, abort. */
4576 AUDIT(globalShapeMismatchAtEntry);
4577 debug_only_printf(LC_TMTracer,
4578 "No global slotlist for global shape %u, flushing cache.\n",
4579 globalShape);
4580 ResetJIT(cx);
4581 return false;
4584 static JS_REQUIRES_STACK bool
4585 StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
4586 unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap,
4587 VMSideExit* expectedInnerExit, jsbytecode* outer, uint32 outerArgc)
4589 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4590 if (JS_TRACE_MONITOR(cx).needFlush) {
4591 ResetJIT(cx);
4592 return false;
4595 JS_ASSERT(f->root != f || !cx->fp->imacpc);
4597 /* Start recording if no exception during construction. */
4598 tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
4599 stackSlots, ngslots, typeMap,
4600 expectedInnerExit, outer, outerArgc);
4602 if (cx->throwing) {
4603 js_AbortRecording(cx, "setting up recorder failed");
4604 return false;
4607 /* Clear any leftover error state. */
4608 Assembler *assm = JS_TRACE_MONITOR(cx).assembler;
4609 assm->setError(None);
4610 return true;
4613 static void
4614 TrashTree(JSContext* cx, Fragment* f)
4616 JS_ASSERT((!f->code()) == (!f->vmprivate));
4617 JS_ASSERT(f == f->root);
4618 debug_only_printf(LC_TMTreeVis, "TREEVIS TRASH FRAG=%p\n", (void*)f);
4619 if (!f->code())
4620 return;
4621 AUDIT(treesTrashed);
4622 debug_only_print0(LC_TMTracer, "Trashing tree info.\n");
4623 TreeInfo* ti = (TreeInfo*)f->vmprivate;
4624 f->vmprivate = NULL;
4625 f->setCode(NULL);
4626 Fragment** data = ti->dependentTrees.data();
4627 unsigned length = ti->dependentTrees.length();
4628 for (unsigned n = 0; n < length; ++n)
4629 TrashTree(cx, data[n]);
4630 data = ti->linkedTrees.data();
4631 length = ti->linkedTrees.length();
4632 for (unsigned n = 0; n < length; ++n)
4633 TrashTree(cx, data[n]);
4636 static int
4637 SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
4639 VOUCH_DOES_NOT_REQUIRE_STACK();
4641 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, callee);
4642 JS_ASSERT(FUN_INTERPRETED(fun));
4644 /* Assert that we have a correct sp distance from cx->fp->slots in fi. */
4645 JSStackFrame* fp = cx->fp;
4646 JS_ASSERT_IF(!fi.imacpc,
4647 js_ReconstructStackDepth(cx, fp->script, fi.pc) ==
4648 uintN(fi.spdist - fp->script->nfixed));
4650 uintN nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval));
4651 JSScript* script = fun->u.i.script;
4652 size_t nbytes = (nframeslots + script->nslots) * sizeof(jsval);
4654 /* Code duplicated from inline_call: case in js_Interpret (FIXME). */
4655 JSArena* a = cx->stackPool.current;
4656 void* newmark = (void*) a->avail;
4657 uintN argc = fi.get_argc();
4658 jsval* vp = fp->slots + fi.spdist - (2 + argc);
4659 uintN missing = 0;
4660 jsval* newsp;
4662 if (fun->nargs > argc) {
4663 const JSFrameRegs& regs = *fp->regs;
4665 newsp = vp + 2 + fun->nargs;
4666 JS_ASSERT(newsp > regs.sp);
4667 if ((jsuword) newsp <= a->limit) {
4668 if ((jsuword) newsp > a->avail)
4669 a->avail = (jsuword) newsp;
4670 jsval* argsp = newsp;
4671 do {
4672 *--argsp = JSVAL_VOID;
4673 } while (argsp != regs.sp);
4674 missing = 0;
4675 } else {
4676 missing = fun->nargs - argc;
4677 nbytes += (2 + fun->nargs) * sizeof(jsval);
4681 /* Allocate the inline frame with its vars and operands. */
4682 if (a->avail + nbytes <= a->limit) {
4683 newsp = (jsval *) a->avail;
4684 a->avail += nbytes;
4685 JS_ASSERT(missing == 0);
4686 } else {
4688 * This allocation is infallible: ExecuteTree reserved enough stack.
4689 * (But see bug 491023.)
4691 JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, nbytes);
4692 JS_ASSERT(newsp);
4695 * Move args if the missing ones overflow arena a, then push
4696 * undefined for the missing args.
4698 if (missing) {
4699 memcpy(newsp, vp, (2 + argc) * sizeof(jsval));
4700 vp = newsp;
4701 newsp = vp + 2 + argc;
4702 do {
4703 *newsp++ = JSVAL_VOID;
4704 } while (--missing != 0);
4708 /* Claim space for the stack frame and initialize it. */
4709 JSInlineFrame* newifp = (JSInlineFrame *) newsp;
4710 newsp += nframeslots;
4712 newifp->frame.callobj = NULL;
4713 newifp->frame.argsobj = NULL;
4714 newifp->frame.varobj = NULL;
4715 newifp->frame.script = script;
4716 newifp->frame.fun = fun;
4718 bool constructing = fi.is_constructing();
4719 newifp->frame.argc = argc;
4720 newifp->callerRegs.pc = fi.pc;
4721 newifp->callerRegs.sp = fp->slots + fi.spdist;
4722 fp->imacpc = fi.imacpc;
4724 #ifdef DEBUG
4725 if (fi.block != fp->blockChain) {
4726 for (JSObject* obj = fi.block; obj != fp->blockChain; obj = STOBJ_GET_PARENT(obj))
4727 JS_ASSERT(obj);
4729 #endif
4730 fp->blockChain = fi.block;
4732 newifp->frame.argv = newifp->callerRegs.sp - argc;
4733 JS_ASSERT(newifp->frame.argv);
4734 #ifdef DEBUG
4735 // Initialize argv[-1] to a known-bogus value so we'll catch it if
4736 // someone forgets to initialize it later.
4737 newifp->frame.argv[-1] = JSVAL_HOLE;
4738 #endif
4739 JS_ASSERT(newifp->frame.argv >= StackBase(fp) + 2);
4741 newifp->frame.rval = JSVAL_VOID;
4742 newifp->frame.down = fp;
4743 newifp->frame.annotation = NULL;
4744 newifp->frame.scopeChain = NULL; // will be updated in FlushNativeStackFrame
4745 newifp->frame.flags = constructing ? JSFRAME_CONSTRUCTING : 0;
4746 newifp->frame.dormantNext = NULL;
4747 newifp->frame.blockChain = NULL;
4748 newifp->mark = newmark;
4749 newifp->frame.thisp = NULL; // will be updated in FlushNativeStackFrame
4751 newifp->frame.regs = fp->regs;
4752 newifp->frame.regs->pc = script->code;
4753 newifp->frame.regs->sp = newsp + script->nfixed;
4754 newifp->frame.imacpc = NULL;
4755 newifp->frame.slots = newsp;
4756 if (script->staticLevel < JS_DISPLAY_SIZE) {
4757 JSStackFrame **disp = &cx->display[script->staticLevel];
4758 newifp->frame.displaySave = *disp;
4759 *disp = &newifp->frame;
4763 * Note that fp->script is still the caller's script; set the callee
4764 * inline frame's idea of caller version from its version.
4766 newifp->callerVersion = (JSVersion) fp->script->version;
4768 // After this paragraph, fp and cx->fp point to the newly synthesized frame.
4769 fp->regs = &newifp->callerRegs;
4770 fp = cx->fp = &newifp->frame;
4773 * If there's a call hook, invoke it to compute the hookData used by
4774 * debuggers that cooperate with the interpreter.
4776 JSInterpreterHook hook = cx->debugHooks->callHook;
4777 if (hook) {
4778 newifp->hookData = hook(cx, fp, JS_TRUE, 0, cx->debugHooks->callHookData);
4779 } else {
4780 newifp->hookData = NULL;
4784 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
4786 * FIXME - We must count stack slots from caller's operand stack up to (but
4787 * not including) callee's, including missing arguments. Could we shift
4788 * everything down to the caller's fp->slots (where vars start) and avoid
4789 * some of the complexity?
4791 return (fi.spdist - fp->down->script->nfixed) +
4792 ((fun->nargs > fp->argc) ? fun->nargs - fp->argc : 0) +
4793 script->nfixed + 1/*argsobj*/;
4796 static void
4797 SynthesizeSlowNativeFrame(InterpState& state, JSContext *cx, VMSideExit *exit)
4799 VOUCH_DOES_NOT_REQUIRE_STACK();
4801 void *mark;
4802 JSInlineFrame *ifp;
4804 /* This allocation is infallible: ExecuteTree reserved enough stack. */
4805 mark = JS_ARENA_MARK(&cx->stackPool);
4806 JS_ARENA_ALLOCATE_CAST(ifp, JSInlineFrame *, &cx->stackPool, sizeof(JSInlineFrame));
4807 JS_ASSERT(ifp);
4809 JSStackFrame *fp = &ifp->frame;
4810 fp->regs = NULL;
4811 fp->imacpc = NULL;
4812 fp->slots = NULL;
4813 fp->callobj = NULL;
4814 fp->argsobj = NULL;
4815 fp->varobj = cx->fp->varobj;
4816 fp->script = NULL;
4817 // fp->thisp is really a jsval, so reinterpret_cast here, not JSVAL_TO_OBJECT.
4818 fp->thisp = (JSObject *) state.nativeVp[1];
4819 fp->argc = state.nativeVpLen - 2;
4820 fp->argv = state.nativeVp + 2;
4821 fp->fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fp->argv[-2]));
4822 fp->rval = JSVAL_VOID;
4823 fp->down = cx->fp;
4824 fp->annotation = NULL;
4825 JS_ASSERT(cx->fp->scopeChain);
4826 fp->scopeChain = cx->fp->scopeChain;
4827 fp->blockChain = NULL;
4828 fp->flags = exit->constructing() ? JSFRAME_CONSTRUCTING : 0;
4829 fp->dormantNext = NULL;
4830 fp->displaySave = NULL;
4832 ifp->mark = mark;
4833 cx->fp = fp;
4836 static JS_REQUIRES_STACK bool
4837 RecordTree(JSContext* cx, JSTraceMonitor* tm, VMFragment* f, jsbytecode* outer,
4838 uint32 outerArgc, JSObject* globalObj, uint32 globalShape,
4839 SlotList* globalSlots, uint32 argc)
4841 JS_ASSERT(f->root == f);
4843 /* Make sure the global type map didn't change on us. */
4844 if (!CheckGlobalObjectShape(cx, tm, globalObj)) {
4845 Backoff(cx, (jsbytecode*) f->root->ip);
4846 return false;
4849 AUDIT(recorderStarted);
4851 /* Try to find an unused peer fragment, or allocate a new one. */
4852 while (f->code() && f->peer)
4853 f = f->peer;
4854 if (f->code())
4855 f = getAnchor(&JS_TRACE_MONITOR(cx), f->root->ip, globalObj, globalShape, argc);
4857 if (!f) {
4858 ResetJIT(cx);
4859 return false;
4862 f->root = f;
4863 f->lirbuf = tm->lirbuf;
4865 if (tm->allocator->outOfMemory() || js_OverfullJITCache(tm, false)) {
4866 Backoff(cx, (jsbytecode*) f->root->ip);
4867 ResetJIT(cx);
4868 debug_only_print0(LC_TMTracer,
4869 "Out of memory recording new tree, flushing cache.\n");
4870 return false;
4873 JS_ASSERT(!f->code() && !f->vmprivate);
4875 /* Set up the VM-private treeInfo structure for this fragment. */
4876 TreeInfo* ti = new (*tm->allocator) TreeInfo(tm->allocator, f, globalSlots);
4878 /* Capture the coerced type of each active slot in the type map. */
4879 ti->typeMap.captureTypes(cx, globalObj, *globalSlots, 0 /* callDepth */);
4880 ti->nStackTypes = ti->typeMap.length() - globalSlots->length();
4882 #ifdef DEBUG
4883 AssertTreeIsUnique(tm, (VMFragment*)f, ti);
4884 ti->treeFileName = cx->fp->script->filename;
4885 ti->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp);
4886 ti->treePCOffset = FramePCOffset(cx->fp);
4887 #endif
4888 #ifdef JS_JIT_SPEW
4889 debug_only_printf(LC_TMTreeVis, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d",
4890 (void*)f, f->ip, ti->treeFileName, ti->treeLineNumber,
4891 FramePCOffset(cx->fp));
4892 debug_only_print0(LC_TMTreeVis, " STACK=\"");
4893 for (unsigned i = 0; i < ti->nStackTypes; i++)
4894 debug_only_printf(LC_TMTreeVis, "%c", typeChar[ti->typeMap[i]]);
4895 debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\"");
4896 for (unsigned i = 0; i < ti->nGlobalTypes(); i++)
4897 debug_only_printf(LC_TMTreeVis, "%c", typeChar[ti->typeMap[ti->nStackTypes + i]]);
4898 debug_only_print0(LC_TMTreeVis, "\"\n");
4899 #endif
4901 /* Determine the native frame layout at the entry point. */
4902 unsigned entryNativeStackSlots = ti->nStackTypes;
4903 JS_ASSERT(entryNativeStackSlots == NativeStackSlots(cx, 0 /* callDepth */));
4904 ti->nativeStackBase = (entryNativeStackSlots -
4905 (cx->fp->regs->sp - StackBase(cx->fp))) * sizeof(double);
4906 ti->maxNativeStackSlots = entryNativeStackSlots;
4907 ti->maxCallDepth = 0;
4908 ti->script = cx->fp->script;
4910 /* Recording primary trace. */
4911 if (!StartRecorder(cx, NULL, f, ti,
4912 ti->nStackTypes,
4913 ti->globalSlots->length(),
4914 ti->typeMap.data(), NULL, outer, outerArgc)) {
4915 return false;
4918 return true;
4921 static JS_REQUIRES_STACK TypeConsensus
4922 FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, VMFragment** peerp)
4924 VMFragment* from = exit->root();
4925 TreeInfo* from_ti = from->getTreeInfo();
4927 JS_ASSERT(from->code());
4929 TypeMap typeMap(NULL);
4930 FullMapFromExit(typeMap, exit);
4931 JS_ASSERT(typeMap.length() - exit->numStackSlots == from_ti->nGlobalTypes());
4933 /* Mark all double slots as undemotable */
4934 for (unsigned i = 0; i < typeMap.length(); i++) {
4935 if (typeMap[i] == TT_DOUBLE)
4936 MarkSlotUndemotable(cx, from_ti, i);
4939 VMFragment* firstPeer = ((VMFragment*)from)->first;
4940 for (VMFragment* peer = firstPeer; peer; peer = peer->peer) {
4941 TreeInfo* peer_ti = peer->getTreeInfo();
4942 if (!peer_ti)
4943 continue;
4944 JS_ASSERT(peer->argc == from->argc);
4945 JS_ASSERT(exit->numStackSlots == peer_ti->nStackTypes);
4946 TypeConsensus consensus = TypeMapLinkability(cx, typeMap, peer);
4947 if (consensus == TypeConsensus_Okay || consensus == TypeConsensus_Undemotes) {
4948 *peerp = peer;
4949 return consensus;
4953 return TypeConsensus_Bad;
4956 UnstableExit*
4957 TreeInfo::removeUnstableExit(VMSideExit* exit)
4959 /* Now erase this exit from the unstable exit list. */
4960 UnstableExit** tail = &this->unstableExits;
4961 for (UnstableExit* uexit = this->unstableExits; uexit != NULL; uexit = uexit->next) {
4962 if (uexit->exit == exit) {
4963 *tail = uexit->next;
4964 return *tail;
4966 tail = &uexit->next;
4968 JS_NOT_REACHED("exit not in unstable exit list");
4969 return NULL;
4972 static JS_REQUIRES_STACK bool
4973 AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsbytecode* outer,
4974 uint32 outerArgc)
4976 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
4977 if (tm->needFlush) {
4978 ResetJIT(cx);
4979 return false;
4982 VMFragment* from = exit->root();
4983 TreeInfo* from_ti = from->getTreeInfo();
4985 VMFragment* peer = NULL;
4986 TypeConsensus consensus = FindLoopEdgeTarget(cx, exit, &peer);
4987 if (consensus == TypeConsensus_Okay) {
4988 TreeInfo* peer_ti = peer->getTreeInfo();
4989 JS_ASSERT(from_ti->globalSlots == peer_ti->globalSlots);
4990 JS_ASSERT(from_ti->nStackTypes == peer_ti->nStackTypes);
4991 /* Patch this exit to its peer */
4992 JoinPeers(tm->assembler, exit, peer);
4994 * Update peer global types. The |from| fragment should already be updated because it on
4995 * the execution path, and somehow connected to the entry trace.
4997 if (peer_ti->nGlobalTypes() < peer_ti->globalSlots->length())
4998 SpecializeTreesToMissingGlobals(cx, globalObj, peer_ti);
4999 JS_ASSERT(from_ti->nGlobalTypes() == from_ti->globalSlots->length());
5000 /* This exit is no longer unstable, so remove it. */
5001 from_ti->removeUnstableExit(exit);
5002 debug_only_stmt(DumpPeerStability(tm, peer->ip, from->globalObj, from->globalShape, from->argc);)
5003 return false;
5004 } else if (consensus == TypeConsensus_Undemotes) {
5005 /* The original tree is unconnectable, so trash it. */
5006 TrashTree(cx, peer);
5007 return false;
5010 return RecordTree(cx, tm, from->first, outer, outerArgc, from->globalObj,
5011 from->globalShape, from_ti->globalSlots, cx->fp->argc);
5014 static JS_REQUIRES_STACK bool
5015 AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, jsbytecode* outer
5016 #ifdef MOZ_TRACEVIS
5017 , TraceVisStateObj* tvso = NULL
5018 #endif
5021 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5022 if (tm->needFlush) {
5023 ResetJIT(cx);
5024 #ifdef MOZ_TRACEVIS
5025 if (tvso) tvso->r = R_FAIL_EXTEND_FLUSH;
5026 #endif
5027 return false;
5030 Fragment* f = anchor->root();
5031 JS_ASSERT(f->vmprivate);
5032 TreeInfo* ti = (TreeInfo*)f->vmprivate;
5035 * Don't grow trees above a certain size to avoid code explosion due to
5036 * tail duplication.
5038 if (ti->branchCount >= MAX_BRANCHES) {
5039 #ifdef MOZ_TRACEVIS
5040 if (tvso) tvso->r = R_FAIL_EXTEND_MAX_BRANCHES;
5041 #endif
5042 return false;
5045 Fragment* c;
5046 if (!(c = anchor->target)) {
5047 Allocator& alloc = *JS_TRACE_MONITOR(cx).allocator;
5048 c = new (alloc) Fragment(cx->fp->regs->pc);
5049 c->root = anchor->from->root;
5050 debug_only_printf(LC_TMTreeVis, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\""
5051 " LINE=%d ANCHOR=%p OFFS=%d\n",
5052 (void*)f, (void*)c, (void*)cx->fp->regs->pc, cx->fp->script->filename,
5053 js_FramePCToLineNumber(cx, cx->fp), (void*)anchor,
5054 FramePCOffset(cx->fp));
5055 anchor->target = c;
5056 c->root = f;
5060 * If we are recycling a fragment, it might have a different ip so reset it
5061 * here. This can happen when attaching a branch to a NESTED_EXIT, which
5062 * might extend along separate paths (i.e. after the loop edge, and after a
5063 * return statement).
5065 c->ip = cx->fp->regs->pc;
5067 debug_only_printf(LC_TMTracer,
5068 "trying to attach another branch to the tree (hits = %d)\n", c->hits());
5070 int32_t& hits = c->hits();
5071 if (outer || (hits++ >= HOTEXIT && hits <= HOTEXIT+MAXEXIT)) {
5072 /* start tracing secondary trace from this point */
5073 c->lirbuf = f->lirbuf;
5074 unsigned stackSlots;
5075 unsigned ngslots;
5076 JSTraceType* typeMap;
5077 TypeMap fullMap(NULL);
5078 if (exitedFrom == NULL) {
5080 * If we are coming straight from a simple side exit, just use that
5081 * exit's type map as starting point.
5083 ngslots = anchor->numGlobalSlots;
5084 stackSlots = anchor->numStackSlots;
5085 typeMap = anchor->fullTypeMap();
5086 } else {
5088 * If we side-exited on a loop exit and continue on a nesting
5089 * guard, the nesting guard (anchor) has the type information for
5090 * everything below the current scope, and the actual guard we
5091 * exited from has the types for everything in the current scope
5092 * (and whatever it inlined). We have to merge those maps here.
5094 VMSideExit* e1 = anchor;
5095 VMSideExit* e2 = exitedFrom;
5096 fullMap.add(e1->stackTypeMap(), e1->numStackSlotsBelowCurrentFrame);
5097 fullMap.add(e2->stackTypeMap(), e2->numStackSlots);
5098 stackSlots = fullMap.length();
5099 ngslots = BuildGlobalTypeMapFromInnerTree(fullMap, e2);
5100 JS_ASSERT(ngslots >= e1->numGlobalSlots); // inner tree must have all globals
5101 JS_ASSERT(ngslots == fullMap.length() - stackSlots);
5102 typeMap = fullMap.data();
5104 JS_ASSERT(ngslots >= anchor->numGlobalSlots);
5105 bool rv = StartRecorder(cx, anchor, c, (TreeInfo*)f->vmprivate, stackSlots,
5106 ngslots, typeMap, exitedFrom, outer, cx->fp->argc);
5107 #ifdef MOZ_TRACEVIS
5108 if (!rv && tvso)
5109 tvso->r = R_FAIL_EXTEND_START;
5110 #endif
5111 return rv;
5113 #ifdef MOZ_TRACEVIS
5114 if (tvso) tvso->r = R_FAIL_EXTEND_COLD;
5115 #endif
5116 return false;
5119 static JS_REQUIRES_STACK VMSideExit*
5120 ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
5121 VMSideExit** innermostNestedGuardp);
5123 static JS_REQUIRES_STACK bool
5124 RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
5126 #ifdef JS_THREADSAFE
5127 if (OBJ_SCOPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain))->title.ownercx != cx) {
5128 js_AbortRecording(cx, "Global object not owned by this context");
5129 return false; /* we stay away from shared global objects */
5131 #endif
5133 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5135 /* Process needFlush and deep abort requests. */
5136 if (tm->needFlush) {
5137 ResetJIT(cx);
5138 return false;
5140 if (r->wasDeepAborted()) {
5141 js_AbortRecording(cx, "deep abort requested");
5142 return false;
5145 JS_ASSERT(r->getFragment() && !r->getFragment()->lastIns);
5146 VMFragment* root = (VMFragment*)r->getFragment()->root;
5148 /* Does this branch go to an inner loop? */
5149 VMFragment* first = getLoop(&JS_TRACE_MONITOR(cx), cx->fp->regs->pc,
5150 root->globalObj, root->globalShape, cx->fp->argc);
5151 if (!first) {
5152 /* Not an inner loop we can call, abort trace. */
5153 AUDIT(returnToDifferentLoopHeader);
5154 JS_ASSERT(!cx->fp->imacpc);
5155 debug_only_printf(LC_TMTracer,
5156 "loop edge to %d, header %d\n",
5157 cx->fp->regs->pc - cx->fp->script->code,
5158 (jsbytecode*)r->getFragment()->root->ip - cx->fp->script->code);
5159 js_AbortRecording(cx, "Loop edge does not return to header");
5160 return false;
5163 /* Make sure inner tree call will not run into an out-of-memory condition. */
5164 if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
5165 !ReplenishReservedPool(cx, tm)) {
5166 js_AbortRecording(cx, "Couldn't call inner tree (out of memory)");
5167 return false;
5171 * Make sure the shape of the global object still matches (this might flush
5172 * the JIT cache).
5174 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
5175 uint32 globalShape = -1;
5176 SlotList* globalSlots = NULL;
5177 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots))
5178 return false;
5180 debug_only_printf(LC_TMTracer,
5181 "Looking for type-compatible peer (%s:%d@%d)\n",
5182 cx->fp->script->filename,
5183 js_FramePCToLineNumber(cx, cx->fp),
5184 FramePCOffset(cx->fp));
5186 // Find a matching inner tree. If none can be found, compile one.
5187 VMFragment* f = r->findNestedCompatiblePeer(first);
5188 if (!f || !f->code()) {
5189 AUDIT(noCompatInnerTrees);
5191 VMFragment* outerFragment = (VMFragment*) tm->recorder->getFragment()->root;
5192 jsbytecode* outer = (jsbytecode*) outerFragment->ip;
5193 uint32 outerArgc = outerFragment->argc;
5194 uint32 argc = cx->fp->argc;
5195 js_AbortRecording(cx, "No compatible inner tree");
5197 // Find an empty fragment we can recycle, or allocate a new one.
5198 for (f = first; f != NULL; f = f->peer) {
5199 if (!f->code())
5200 break;
5202 if (!f || f->code()) {
5203 f = getAnchor(tm, cx->fp->regs->pc, globalObj, globalShape, argc);
5204 if (!f) {
5205 ResetJIT(cx);
5206 return false;
5209 return RecordTree(cx, tm, f, outer, outerArgc, globalObj, globalShape, globalSlots, argc);
5212 r->adjustCallerTypes(f);
5213 r->prepareTreeCall(f);
5214 VMSideExit* innermostNestedGuard = NULL;
5215 VMSideExit* lr = ExecuteTree(cx, f, inlineCallCount, &innermostNestedGuard);
5216 if (!lr || r->wasDeepAborted()) {
5217 if (!lr)
5218 js_AbortRecording(cx, "Couldn't call inner tree");
5219 return false;
5222 VMFragment* outerFragment = (VMFragment*) tm->recorder->getFragment()->root;
5223 jsbytecode* outer = (jsbytecode*) outerFragment->ip;
5224 switch (lr->exitType) {
5225 case LOOP_EXIT:
5226 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
5227 if (innermostNestedGuard) {
5228 js_AbortRecording(cx, "Inner tree took different side exit, abort current "
5229 "recording and grow nesting tree");
5230 return AttemptToExtendTree(cx, innermostNestedGuard, lr, outer);
5233 /* Emit a call to the inner tree and continue recording the outer tree trace. */
5234 r->emitTreeCall(f, lr);
5235 return true;
5237 case UNSTABLE_LOOP_EXIT:
5238 /* Abort recording so the inner loop can become type stable. */
5239 js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording");
5240 return AttemptToStabilizeTree(cx, globalObj, lr, outer, outerFragment->argc);
5242 case OVERFLOW_EXIT:
5243 oracle.markInstructionUndemotable(cx->fp->regs->pc);
5244 /* FALL THROUGH */
5245 case BRANCH_EXIT:
5246 case CASE_EXIT:
5247 /* Abort recording the outer tree, extend the inner tree. */
5248 js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording");
5249 return AttemptToExtendTree(cx, lr, NULL, outer);
5251 default:
5252 debug_only_printf(LC_TMTracer, "exit_type=%s\n", getExitName(lr->exitType));
5253 js_AbortRecording(cx, "Inner tree not suitable for calling");
5254 return false;
5258 static bool
5259 IsEntryTypeCompatible(jsval* vp, JSTraceType* m)
5261 unsigned tag = JSVAL_TAG(*vp);
5263 debug_only_printf(LC_TMTracer, "%c/%c ", tagChar[tag], typeChar[*m]);
5265 switch (*m) {
5266 case TT_OBJECT:
5267 if (tag == JSVAL_OBJECT && !JSVAL_IS_NULL(*vp) &&
5268 !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))) {
5269 return true;
5271 debug_only_printf(LC_TMTracer, "object != tag%u ", tag);
5272 return false;
5273 case TT_INT32:
5274 jsint i;
5275 if (JSVAL_IS_INT(*vp))
5276 return true;
5277 if (tag == JSVAL_DOUBLE && JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(*vp), i))
5278 return true;
5279 debug_only_printf(LC_TMTracer, "int != tag%u(value=%lu) ", tag, (unsigned long)*vp);
5280 return false;
5281 case TT_DOUBLE:
5282 if (JSVAL_IS_INT(*vp) || tag == JSVAL_DOUBLE)
5283 return true;
5284 debug_only_printf(LC_TMTracer, "double != tag%u ", tag);
5285 return false;
5286 case TT_JSVAL:
5287 JS_NOT_REACHED("shouldn't see jsval type in entry");
5288 return false;
5289 case TT_STRING:
5290 if (tag == JSVAL_STRING)
5291 return true;
5292 debug_only_printf(LC_TMTracer, "string != tag%u ", tag);
5293 return false;
5294 case TT_NULL:
5295 if (JSVAL_IS_NULL(*vp))
5296 return true;
5297 debug_only_printf(LC_TMTracer, "null != tag%u ", tag);
5298 return false;
5299 case TT_PSEUDOBOOLEAN:
5300 if (tag == JSVAL_SPECIAL)
5301 return true;
5302 debug_only_printf(LC_TMTracer, "bool != tag%u ", tag);
5303 return false;
5304 default:
5305 JS_ASSERT(*m == TT_FUNCTION);
5306 if (tag == JSVAL_OBJECT && !JSVAL_IS_NULL(*vp) &&
5307 HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))) {
5308 return true;
5310 debug_only_printf(LC_TMTracer, "fun != tag%u ", tag);
5311 return false;
5315 class TypeCompatibilityVisitor : public SlotVisitorBase
5317 TraceRecorder &mRecorder;
5318 JSContext *mCx;
5319 JSTraceType *mTypeMap;
5320 unsigned mStackSlotNum;
5321 bool mOk;
5322 public:
5323 TypeCompatibilityVisitor (TraceRecorder &recorder,
5324 JSTraceType *typeMap) :
5325 mRecorder(recorder),
5326 mCx(mRecorder.cx),
5327 mTypeMap(typeMap),
5328 mStackSlotNum(0),
5329 mOk(true)
5332 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
5333 visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
5334 debug_only_printf(LC_TMTracer, "global%d=", n);
5335 if (!IsEntryTypeCompatible(vp, mTypeMap)) {
5336 mOk = false;
5337 } else if (!isPromoteInt(mRecorder.get(vp)) && *mTypeMap == TT_INT32) {
5338 oracle.markGlobalSlotUndemotable(mCx, slot);
5339 mOk = false;
5340 } else if (JSVAL_IS_INT(*vp) && *mTypeMap == TT_DOUBLE) {
5341 oracle.markGlobalSlotUndemotable(mCx, slot);
5343 mTypeMap++;
5346 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
5347 visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
5348 for (size_t i = 0; i < count; ++i) {
5349 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i));
5350 if (!IsEntryTypeCompatible(vp, mTypeMap)) {
5351 mOk = false;
5352 } else if (!isPromoteInt(mRecorder.get(vp)) && *mTypeMap == TT_INT32) {
5353 oracle.markStackSlotUndemotable(mCx, mStackSlotNum);
5354 mOk = false;
5355 } else if (JSVAL_IS_INT(*vp) && *mTypeMap == TT_DOUBLE) {
5356 oracle.markStackSlotUndemotable(mCx, mStackSlotNum);
5358 vp++;
5359 mTypeMap++;
5360 mStackSlotNum++;
5362 return true;
5365 bool isOk() {
5366 return mOk;
5370 JS_REQUIRES_STACK VMFragment*
5371 TraceRecorder::findNestedCompatiblePeer(VMFragment* f)
5373 JSTraceMonitor* tm;
5375 tm = &JS_TRACE_MONITOR(cx);
5376 unsigned int ngslots = treeInfo->globalSlots->length();
5378 TreeInfo* ti;
5379 for (; f != NULL; f = f->peer) {
5380 if (!f->code())
5381 continue;
5383 ti = (TreeInfo*)f->vmprivate;
5385 debug_only_printf(LC_TMTracer, "checking nested types %p: ", (void*)f);
5387 if (ngslots > ti->nGlobalTypes())
5388 SpecializeTreesToMissingGlobals(cx, globalObj, ti);
5391 * Determine whether the typemap of the inner tree matches the outer
5392 * tree's current state. If the inner tree expects an integer, but the
5393 * outer tree doesn't guarantee an integer for that slot, we mark the
5394 * slot undemotable and mismatch here. This will force a new tree to be
5395 * compiled that accepts a double for the slot. If the inner tree
5396 * expects a double, but the outer tree has an integer, we can proceed,
5397 * but we mark the location undemotable.
5399 TypeCompatibilityVisitor visitor(*this, ti->typeMap.data());
5400 VisitSlots(visitor, cx, 0, *treeInfo->globalSlots);
5402 debug_only_printf(LC_TMTracer, " %s\n", visitor.isOk() ? "match" : "");
5403 if (visitor.isOk())
5404 return f;
5407 return NULL;
5410 class CheckEntryTypeVisitor : public SlotVisitorBase
5412 bool mOk;
5413 JSTraceType *mTypeMap;
5414 public:
5415 CheckEntryTypeVisitor(JSTraceType *typeMap) :
5416 mOk(true),
5417 mTypeMap(typeMap)
5420 JS_ALWAYS_INLINE void checkSlot(jsval *vp, char const *name, int i) {
5421 debug_only_printf(LC_TMTracer, "%s%d=", name, i);
5422 JS_ASSERT(*(uint8_t*)mTypeMap != 0xCD);
5423 mOk = IsEntryTypeCompatible(vp, mTypeMap++);
5426 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
5427 visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
5428 if (mOk)
5429 checkSlot(vp, "global", n);
5432 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
5433 visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
5434 for (size_t i = 0; i < count; ++i) {
5435 if (!mOk)
5436 break;
5437 checkSlot(vp++, stackSlotKind(), i);
5439 return mOk;
5442 bool isOk() {
5443 return mOk;
5448 * Check if types are usable for trace execution.
5450 * @param cx Context.
5451 * @param ti Tree info of peer we're testing.
5452 * @return True if compatible (with or without demotions), false otherwise.
5454 static JS_REQUIRES_STACK bool
5455 CheckEntryTypes(JSContext* cx, JSObject* globalObj, TreeInfo* ti)
5457 unsigned int ngslots = ti->globalSlots->length();
5459 JS_ASSERT(ti->nStackTypes == NativeStackSlots(cx, 0));
5461 if (ngslots > ti->nGlobalTypes())
5462 SpecializeTreesToMissingGlobals(cx, globalObj, ti);
5464 JS_ASSERT(ti->typeMap.length() == NativeStackSlots(cx, 0) + ngslots);
5465 JS_ASSERT(ti->typeMap.length() == ti->nStackTypes + ngslots);
5466 JS_ASSERT(ti->nGlobalTypes() == ngslots);
5468 CheckEntryTypeVisitor visitor(ti->typeMap.data());
5469 VisitSlots(visitor, cx, 0, *ti->globalSlots);
5471 debug_only_print0(LC_TMTracer, "\n");
5472 return visitor.isOk();
5476 * Find an acceptable entry tree given a PC.
5478 * @param cx Context.
5479 * @param globalObj Global object.
5480 * @param f First peer fragment.
5481 * @param nodemote If true, will try to find a peer that does not require demotion.
5482 * @out count Number of fragments consulted.
5484 static JS_REQUIRES_STACK VMFragment*
5485 FindVMCompatiblePeer(JSContext* cx, JSObject* globalObj, VMFragment* f, uintN& count)
5487 count = 0;
5488 for (; f != NULL; f = f->peer) {
5489 if (f->vmprivate == NULL)
5490 continue;
5491 debug_only_printf(LC_TMTracer,
5492 "checking vm types %p (ip: %p): ", (void*)f, f->ip);
5493 if (CheckEntryTypes(cx, globalObj, (TreeInfo*)f->vmprivate))
5494 return f;
5495 ++count;
5497 return NULL;
5500 static void
5501 LeaveTree(InterpState&, VMSideExit* lr);
5503 static JS_REQUIRES_STACK VMSideExit*
5504 ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
5505 VMSideExit** innermostNestedGuardp)
5507 #ifdef MOZ_TRACEVIS
5508 TraceVisStateObj tvso(cx, S_EXECUTE);
5509 #endif
5511 JS_ASSERT(f->root == f && f->code() && f->vmprivate);
5514 * The JIT records and expects to execute with two scope-chain
5515 * assumptions baked-in:
5517 * 1. That the bottom of the scope chain is global, in the sense of
5518 * JSCLASS_IS_GLOBAL.
5520 * 2. That the scope chain between fp and the global is free of
5521 * "unusual" native objects such as HTML forms or other funny
5522 * things.
5524 * #2 is checked here while following the scope-chain links, via
5525 * js_IsCacheableNonGlobalScope, which consults a whitelist of known
5526 * class types; once a global is found, it's checked for #1. Failing
5527 * either check causes an early return from execution.
5529 JSObject* parent;
5530 JSObject* child = cx->fp->scopeChain;
5531 while ((parent = OBJ_GET_PARENT(cx, child)) != NULL) {
5532 if (!js_IsCacheableNonGlobalScope(child)) {
5533 debug_only_print0(LC_TMTracer,"Blacklist: non-cacheable object on scope chain.\n");
5534 Blacklist((jsbytecode*) f->root->ip);
5535 return NULL;
5537 child = parent;
5539 JSObject* globalObj = child;
5540 if (!(OBJ_GET_CLASS(cx, globalObj)->flags & JSCLASS_IS_GLOBAL)) {
5541 debug_only_print0(LC_TMTracer, "Blacklist: non-global at root of scope chain.\n");
5542 Blacklist((jsbytecode*) f->root->ip);
5543 return NULL;
5546 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5547 TreeInfo* ti = (TreeInfo*)f->vmprivate;
5548 unsigned ngslots = ti->globalSlots->length();
5549 uint16* gslots = ti->globalSlots->data();
5550 unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
5552 /* Make sure the global object is sane. */
5553 JS_ASSERT_IF(ngslots != 0,
5554 OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) ==
5555 ((VMFragment*)f)->globalShape);
5557 /* Make sure our caller replenished the double pool. */
5558 JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
5560 /* Reserve objects and stack space now, to make leaving the tree infallible. */
5561 if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
5562 return NULL;
5564 /* Set up the interpreter state block, which is followed by the native global frame. */
5565 InterpState* state = (InterpState*)alloca(sizeof(InterpState) + (globalFrameSize+1)*sizeof(double));
5566 state->cx = cx;
5567 state->inlineCallCountp = &inlineCallCount;
5568 state->innermostNestedGuardp = innermostNestedGuardp;
5569 state->outermostTree = ti;
5570 state->lastTreeExitGuard = NULL;
5571 state->lastTreeCallGuard = NULL;
5572 state->rpAtLastTreeCall = NULL;
5573 state->nativeVp = NULL;
5574 state->builtinStatus = 0;
5576 /* Set up the native global frame. */
5577 double* global = (double*)(state+1);
5579 /* Set up the native stack frame. */
5580 double stack_buffer[MAX_NATIVE_STACK_SLOTS];
5581 state->stackBase = stack_buffer;
5582 state->sp = stack_buffer + (ti->nativeStackBase/sizeof(double));
5583 state->eos = stack_buffer + MAX_NATIVE_STACK_SLOTS;
5585 /* Set up the native call stack frame. */
5586 FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
5587 state->callstackBase = callstack_buffer;
5588 state->rp = callstack_buffer;
5589 state->eor = callstack_buffer + MAX_CALL_STACK_ENTRIES;
5591 void *reserve;
5592 state->stackMark = JS_ARENA_MARK(&cx->stackPool);
5593 JS_ARENA_ALLOCATE(reserve, &cx->stackPool, MAX_INTERP_STACK_BYTES);
5594 if (!reserve)
5595 return NULL;
5597 #ifdef DEBUG
5598 memset(stack_buffer, 0xCD, sizeof(stack_buffer));
5599 memset(global, 0xCD, (globalFrameSize+1)*sizeof(double));
5600 JS_ASSERT(globalFrameSize <= MAX_GLOBAL_SLOTS);
5601 #endif
5603 debug_only_stmt(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
5604 debug_only_printf(LC_TMTracer,
5605 "entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
5606 cx->fp->script->filename,
5607 js_FramePCToLineNumber(cx, cx->fp),
5608 FramePCOffset(cx->fp),
5609 ti->maxNativeStackSlots,
5610 f->code());
5612 JS_ASSERT(ti->nGlobalTypes() == ngslots);
5613 BuildNativeFrame(cx, globalObj, 0 /* callDepth */, ngslots, gslots,
5614 ti->typeMap.data(), global, stack_buffer);
5616 union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
5617 u.code = f->code();
5619 #ifdef EXECUTE_TREE_TIMER
5620 state->startTime = rdtsc();
5621 #endif
5623 JS_ASSERT(!tm->tracecx);
5624 tm->tracecx = cx;
5625 state->prev = cx->interpState;
5626 cx->interpState = state;
5628 debug_only_stmt(fflush(NULL));
5629 GuardRecord* rec;
5631 // Note that the block scoping is crucial here for TraceVis; the
5632 // TraceVisStateObj constructors and destructors must run at the right times.
5634 #ifdef MOZ_TRACEVIS
5635 TraceVisStateObj tvso_n(cx, S_NATIVE);
5636 #endif
5637 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
5638 SIMULATE_FASTCALL(rec, state, NULL, u.func);
5639 #else
5640 rec = u.func(state, NULL);
5641 #endif
5644 JS_ASSERT(*(uint64*)&global[globalFrameSize] == 0xdeadbeefdeadbeefLL);
5645 JS_ASSERT(!state->nativeVp);
5647 VMSideExit* lr = (VMSideExit*)rec->exit;
5649 AUDIT(traceTriggered);
5651 cx->interpState = state->prev;
5653 JS_ASSERT(!cx->bailExit);
5654 JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth);
5655 tm->tracecx = NULL;
5656 LeaveTree(*state, lr);
5657 return state->innermost;
5660 static JS_FORCES_STACK void
5661 LeaveTree(InterpState& state, VMSideExit* lr)
5663 VOUCH_DOES_NOT_REQUIRE_STACK();
5665 JSContext* cx = state.cx;
5666 FrameInfo** callstack = state.callstackBase;
5667 double* stack = state.stackBase;
5670 * Except if we find that this is a nested bailout, the guard the call
5671 * returned is the one we have to use to adjust pc and sp.
5673 VMSideExit* innermost = lr;
5676 * While executing a tree we do not update state.sp and state.rp even if
5677 * they grow. Instead, guards tell us by how much sp and rp should be
5678 * incremented in case of a side exit. When calling a nested tree, however,
5679 * we actively adjust sp and rp. If we have such frames from outer trees on
5680 * the stack, then rp will have been adjusted. Before we can process the
5681 * stack of the frames of the tree we directly exited from, we have to
5682 * first work our way through the outer frames and generate interpreter
5683 * frames for them. Once the call stack (rp) is empty, we can process the
5684 * final frames (which again are not directly visible and only the guard we
5685 * exited on will tells us about).
5687 FrameInfo** rp = (FrameInfo**)state.rp;
5688 if (lr->exitType == NESTED_EXIT) {
5689 VMSideExit* nested = state.lastTreeCallGuard;
5690 if (!nested) {
5692 * If lastTreeCallGuard is not set in state, we only have a single
5693 * level of nesting in this exit, so lr itself is the innermost and
5694 * outermost nested guard, and hence we set nested to lr. The
5695 * calldepth of the innermost guard is not added to state.rp, so we
5696 * do it here manually. For a nesting depth greater than 1 the
5697 * CallTree builtin already added the innermost guard's calldepth
5698 * to state.rpAtLastTreeCall.
5700 nested = lr;
5701 rp += lr->calldepth;
5702 } else {
5704 * During unwinding state.rp gets overwritten at every step and we
5705 * restore it here to its state at the innermost nested guard. The
5706 * builtin already added the calldepth of that innermost guard to
5707 * rpAtLastTreeCall.
5709 rp = (FrameInfo**)state.rpAtLastTreeCall;
5711 innermost = state.lastTreeExitGuard;
5712 if (state.innermostNestedGuardp)
5713 *state.innermostNestedGuardp = nested;
5714 JS_ASSERT(nested);
5715 JS_ASSERT(nested->exitType == NESTED_EXIT);
5716 JS_ASSERT(state.lastTreeExitGuard);
5717 JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
5720 int32_t bs = state.builtinStatus;
5721 bool bailed = innermost->exitType == STATUS_EXIT && (bs & JSBUILTIN_BAILED);
5722 if (bailed) {
5724 * Deep-bail case.
5726 * A _FAIL native already called LeaveTree. We already reconstructed
5727 * the interpreter stack, in pre-call state, with pc pointing to the
5728 * CALL/APPLY op, for correctness. Then we continued in native code.
5730 * First, if we just returned from a slow native, pop its stack frame.
5732 if (!cx->fp->script) {
5733 JSStackFrame *fp = cx->fp;
5734 JS_ASSERT(FUN_SLOW_NATIVE(fp->fun));
5735 JS_ASSERT(fp->regs == NULL);
5736 JS_ASSERT(fp->down->regs != &((JSInlineFrame *) fp)->callerRegs);
5737 cx->fp = fp->down;
5738 JS_ARENA_RELEASE(&cx->stackPool, ((JSInlineFrame *) fp)->mark);
5740 JS_ASSERT(cx->fp->script);
5742 if (!(bs & JSBUILTIN_ERROR)) {
5744 * The builtin or native deep-bailed but finished successfully
5745 * (no exception or error).
5747 * After it returned, the JIT code stored the results of the
5748 * builtin or native at the top of the native stack and then
5749 * immediately flunked the guard on state->builtinStatus.
5751 * Now LeaveTree has been called again from the tail of
5752 * ExecuteTree. We are about to return to the interpreter. Adjust
5753 * the top stack frame to resume on the next op.
5755 JSFrameRegs* regs = cx->fp->regs;
5756 JSOp op = (JSOp) *regs->pc;
5757 JS_ASSERT(op == JSOP_CALL || op == JSOP_APPLY || op == JSOP_NEW ||
5758 op == JSOP_GETPROP || op == JSOP_GETTHISPROP || op == JSOP_GETARGPROP ||
5759 op == JSOP_GETLOCALPROP || op == JSOP_LENGTH ||
5760 op == JSOP_GETELEM || op == JSOP_CALLELEM ||
5761 op == JSOP_SETPROP || op == JSOP_SETNAME || op == JSOP_SETMETHOD ||
5762 op == JSOP_SETELEM || op == JSOP_INITELEM ||
5763 op == JSOP_INSTANCEOF);
5764 const JSCodeSpec& cs = js_CodeSpec[op];
5765 regs->sp -= (cs.format & JOF_INVOKE) ? GET_ARGC(regs->pc) + 2 : cs.nuses;
5766 regs->sp += cs.ndefs;
5767 regs->pc += cs.length;
5768 JS_ASSERT_IF(!cx->fp->imacpc,
5769 cx->fp->slots + cx->fp->script->nfixed +
5770 js_ReconstructStackDepth(cx, cx->fp->script, regs->pc) ==
5771 regs->sp);
5774 * If there's a tree call around the point that we deep exited at,
5775 * then state.sp and state.rp were restored to their original
5776 * values before the tree call and sp might be less than deepBailSp,
5777 * which we sampled when we were told to deep bail.
5779 JS_ASSERT(state.deepBailSp >= state.stackBase && state.sp <= state.deepBailSp);
5782 * As explained above, the JIT code stored a result value or values
5783 * on the native stack. Transfer them to the interpreter stack now.
5784 * (Some opcodes, like JSOP_CALLELEM, produce two values, hence the
5785 * loop.)
5787 JSTraceType* typeMap = innermost->stackTypeMap();
5788 for (int i = 1; i <= cs.ndefs; i++) {
5789 NativeToValue(cx,
5790 regs->sp[-i],
5791 typeMap[innermost->numStackSlots - i],
5792 (jsdouble *) state.deepBailSp
5793 + innermost->sp_adj / sizeof(jsdouble) - i);
5796 return;
5799 JS_ARENA_RELEASE(&cx->stackPool, state.stackMark);
5800 while (callstack < rp) {
5801 FrameInfo* fi = *callstack;
5802 /* Peek at the callee native slot in the not-yet-synthesized down frame. */
5803 JSObject* callee = *(JSObject**)&stack[fi->callerHeight];
5806 * Synthesize a stack frame and write out the values in it using the
5807 * type map pointer on the native call stack.
5809 SynthesizeFrame(cx, *fi, callee);
5810 int slots = FlushNativeStackFrame(cx, 1 /* callDepth */, (JSTraceType*)(fi + 1),
5811 stack, cx->fp);
5812 #ifdef DEBUG
5813 JSStackFrame* fp = cx->fp;
5814 debug_only_printf(LC_TMTracer,
5815 "synthesized deep frame for %s:%u@%u, slots=%d\n",
5816 fp->script->filename,
5817 js_FramePCToLineNumber(cx, fp),
5818 FramePCOffset(fp),
5819 slots);
5820 #endif
5822 * Keep track of the additional frames we put on the interpreter stack
5823 * and the native stack slots we consumed.
5825 ++*state.inlineCallCountp;
5826 ++callstack;
5827 stack += slots;
5831 * We already synthesized the frames around the innermost guard. Here we
5832 * just deal with additional frames inside the tree we are bailing out
5833 * from.
5835 JS_ASSERT(rp == callstack);
5836 unsigned calldepth = innermost->calldepth;
5837 unsigned calldepth_slots = 0;
5838 unsigned calleeOffset = 0;
5839 for (unsigned n = 0; n < calldepth; ++n) {
5840 /* Peek at the callee native slot in the not-yet-synthesized down frame. */
5841 calleeOffset += callstack[n]->callerHeight;
5842 JSObject* callee = *(JSObject**)&stack[calleeOffset];
5844 /* Reconstruct the frame. */
5845 calldepth_slots += SynthesizeFrame(cx, *callstack[n], callee);
5846 ++*state.inlineCallCountp;
5847 #ifdef DEBUG
5848 JSStackFrame* fp = cx->fp;
5849 debug_only_printf(LC_TMTracer,
5850 "synthesized shallow frame for %s:%u@%u\n",
5851 fp->script->filename, js_FramePCToLineNumber(cx, fp),
5852 FramePCOffset(fp));
5853 #endif
5857 * Adjust sp and pc relative to the tree we exited from (not the tree we
5858 * entered into). These are our final values for sp and pc since
5859 * SynthesizeFrame has already taken care of all frames in between. But
5860 * first we recover fp->blockChain, which comes from the side exit
5861 * struct.
5863 JSStackFrame* fp = cx->fp;
5865 fp->blockChain = innermost->block;
5868 * If we are not exiting from an inlined frame, the state->sp is spbase.
5869 * Otherwise spbase is whatever slots frames around us consume.
5871 fp->regs->pc = innermost->pc;
5872 fp->imacpc = innermost->imacpc;
5873 fp->regs->sp = StackBase(fp) + (innermost->sp_adj / sizeof(double)) - calldepth_slots;
5874 JS_ASSERT_IF(!fp->imacpc,
5875 fp->slots + fp->script->nfixed +
5876 js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp);
5878 #ifdef EXECUTE_TREE_TIMER
5879 uint64 cycles = rdtsc() - state.startTime;
5880 #elif defined(JS_JIT_SPEW)
5881 uint64 cycles = 0;
5882 #endif
5884 debug_only_printf(LC_TMTracer,
5885 "leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%s, sp=%d, "
5886 "calldepth=%d, cycles=%llu\n",
5887 fp->script->filename,
5888 js_FramePCToLineNumber(cx, fp),
5889 FramePCOffset(fp),
5890 js_CodeName[fp->imacpc ? *fp->imacpc : *fp->regs->pc],
5891 (void*)lr,
5892 getExitName(lr->exitType),
5893 fp->regs->sp - StackBase(fp),
5894 calldepth,
5895 cycles);
5898 * If this trace is part of a tree, later branches might have added
5899 * additional globals for which we don't have any type information
5900 * available in the side exit. We merge in this information from the entry
5901 * type-map. See also the comment in the constructor of TraceRecorder
5902 * regarding why this is always safe to do.
5904 TreeInfo* outermostTree = state.outermostTree;
5905 uint16* gslots = outermostTree->globalSlots->data();
5906 unsigned ngslots = outermostTree->globalSlots->length();
5907 JS_ASSERT(ngslots == outermostTree->nGlobalTypes());
5908 JSTraceType* globalTypeMap;
5910 /* Are there enough globals? */
5911 Queue<JSTraceType> typeMap(0);
5912 if (innermost->numGlobalSlots == ngslots) {
5913 /* Yes. This is the ideal fast path. */
5914 globalTypeMap = innermost->globalTypeMap();
5915 } else {
5917 * No. Merge the typemap of the innermost entry and exit together. This
5918 * should always work because it is invalid for nested trees or linked
5919 * trees to have incompatible types. Thus, whenever a new global type
5920 * is lazily added into a tree, all dependent and linked trees are
5921 * immediately specialized (see bug 476653).
5923 JS_ASSERT(innermost->root()->getTreeInfo()->nGlobalTypes() == ngslots);
5924 JS_ASSERT(innermost->root()->getTreeInfo()->nGlobalTypes() > innermost->numGlobalSlots);
5925 typeMap.ensure(ngslots);
5926 #ifdef DEBUG
5927 unsigned check_ngslots =
5928 #endif
5929 BuildGlobalTypeMapFromInnerTree(typeMap, innermost);
5930 JS_ASSERT(check_ngslots == ngslots);
5931 globalTypeMap = typeMap.data();
5934 /* Write back the topmost native stack frame. */
5935 #ifdef DEBUG
5936 int slots =
5937 #endif
5938 FlushNativeStackFrame(cx, innermost->calldepth,
5939 innermost->stackTypeMap(),
5940 stack, NULL);
5941 JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
5943 if (innermost->nativeCalleeWord)
5944 SynthesizeSlowNativeFrame(state, cx, innermost);
5946 /* Write back interned globals. */
5947 double* global = (double*)(&state + 1);
5948 FlushNativeGlobalFrame(cx, global,
5949 ngslots, gslots, globalTypeMap);
5950 #ifdef DEBUG
5951 /* Verify that our state restoration worked. */
5952 for (JSStackFrame* fp = cx->fp; fp; fp = fp->down) {
5953 JS_ASSERT_IF(fp->argv, JSVAL_IS_OBJECT(fp->argv[-1]));
5955 #endif
5956 #ifdef JS_JIT_SPEW
5957 if (innermost->exitType != TIMEOUT_EXIT)
5958 AUDIT(sideExitIntoInterpreter);
5959 else
5960 AUDIT(timeoutIntoInterpreter);
5961 #endif
5963 state.innermost = innermost;
5966 JS_REQUIRES_STACK bool
5967 js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
5969 #ifdef MOZ_TRACEVIS
5970 TraceVisStateObj tvso(cx, S_MONITOR);
5971 #endif
5973 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
5975 /* Is the recorder currently active? */
5976 if (tm->recorder) {
5977 jsbytecode* innerLoopHeaderPC = cx->fp->regs->pc;
5979 if (RecordLoopEdge(cx, tm->recorder, inlineCallCount))
5980 return true;
5983 * RecordLoopEdge will invoke an inner tree if we have a matching
5984 * one. If we arrive here, that tree didn't run to completion and
5985 * instead we mis-matched or the inner tree took a side exit other than
5986 * the loop exit. We are thus no longer guaranteed to be parked on the
5987 * same loop header js_MonitorLoopEdge was called for. In fact, this
5988 * might not even be a loop header at all. Hence if the program counter
5989 * no longer hovers over the inner loop header, return to the
5990 * interpreter and do not attempt to trigger or record a new tree at
5991 * this location.
5993 if (innerLoopHeaderPC != cx->fp->regs->pc) {
5994 #ifdef MOZ_TRACEVIS
5995 tvso.r = R_INNER_SIDE_EXIT;
5996 #endif
5997 return false;
6000 JS_ASSERT(!tm->recorder);
6002 /* Check the pool of reserved doubles (this might trigger a GC). */
6003 if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
6004 !ReplenishReservedPool(cx, tm)) {
6005 #ifdef MOZ_TRACEVIS
6006 tvso.r = R_DOUBLES;
6007 #endif
6008 return false; /* Out of memory, don't try to record now. */
6012 * Make sure the shape of the global object still matches (this might flush
6013 * the JIT cache).
6015 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
6016 uint32 globalShape = -1;
6017 SlotList* globalSlots = NULL;
6019 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
6020 Backoff(cx, cx->fp->regs->pc);
6021 return false;
6024 /* Do not enter the JIT code with a pending operation callback. */
6025 if (cx->operationCallbackFlag) {
6026 #ifdef MOZ_TRACEVIS
6027 tvso.r = R_CALLBACK_PENDING;
6028 #endif
6029 return false;
6032 jsbytecode* pc = cx->fp->regs->pc;
6033 uint32 argc = cx->fp->argc;
6035 VMFragment* f = getLoop(tm, pc, globalObj, globalShape, argc);
6036 if (!f)
6037 f = getAnchor(tm, pc, globalObj, globalShape, argc);
6039 if (!f) {
6040 ResetJIT(cx);
6041 #ifdef MOZ_TRACEVIS
6042 tvso.r = R_OOM_GETANCHOR;
6043 #endif
6044 return false;
6048 * If we have no code in the anchor and no peers, we definitively won't be
6049 * able to activate any trees, so start compiling.
6051 if (!f->code() && !f->peer) {
6052 record:
6053 if (++f->hits() < HOTLOOP) {
6054 #ifdef MOZ_TRACEVIS
6055 tvso.r = f->hits() < 1 ? R_BACKED_OFF : R_COLD;
6056 #endif
6057 return false;
6061 * We can give RecordTree the root peer. If that peer is already taken,
6062 * it will walk the peer list and find us a free slot or allocate a new
6063 * tree if needed.
6065 bool rv = RecordTree(cx, tm, f->first, NULL, 0, globalObj, globalShape,
6066 globalSlots, argc);
6067 #ifdef MOZ_TRACEVIS
6068 if (!rv)
6069 tvso.r = R_FAIL_RECORD_TREE;
6070 #endif
6071 return rv;
6074 debug_only_printf(LC_TMTracer,
6075 "Looking for compat peer %d@%d, from %p (ip: %p)\n",
6076 js_FramePCToLineNumber(cx, cx->fp),
6077 FramePCOffset(cx->fp), (void*)f, f->ip);
6079 uintN count;
6080 Fragment* match = FindVMCompatiblePeer(cx, globalObj, f, count);
6081 if (!match) {
6082 if (count < MAXPEERS)
6083 goto record;
6086 * If we hit the max peers ceiling, don't try to lookup fragments all
6087 * the time. That's expensive. This must be a rather type-unstable loop.
6089 debug_only_print0(LC_TMTracer, "Blacklisted: too many peer trees.\n");
6090 Blacklist((jsbytecode*) f->root->ip);
6091 #ifdef MOZ_TRACEVIS
6092 tvso.r = R_MAX_PEERS;
6093 #endif
6094 return false;
6097 VMSideExit* lr = NULL;
6098 VMSideExit* innermostNestedGuard = NULL;
6100 lr = ExecuteTree(cx, match, inlineCallCount, &innermostNestedGuard);
6101 if (!lr) {
6102 #ifdef MOZ_TRACEVIS
6103 tvso.r = R_FAIL_EXECUTE_TREE;
6104 #endif
6105 return false;
6109 * If we exit on a branch, or on a tree call guard, try to grow the inner
6110 * tree (in case of a branch exit), or the tree nested around the tree we
6111 * exited from (in case of the tree call guard).
6113 bool rv;
6114 switch (lr->exitType) {
6115 case UNSTABLE_LOOP_EXIT:
6116 rv = AttemptToStabilizeTree(cx, globalObj, lr, NULL, NULL);
6117 #ifdef MOZ_TRACEVIS
6118 if (!rv)
6119 tvso.r = R_FAIL_STABILIZE;
6120 #endif
6121 return rv;
6123 case OVERFLOW_EXIT:
6124 oracle.markInstructionUndemotable(cx->fp->regs->pc);
6125 /* FALL THROUGH */
6126 case BRANCH_EXIT:
6127 case CASE_EXIT:
6128 return AttemptToExtendTree(cx, lr, NULL, NULL
6129 #ifdef MOZ_TRACEVIS
6130 , &tvso
6131 #endif
6134 case LOOP_EXIT:
6135 if (innermostNestedGuard)
6136 return AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL
6137 #ifdef MOZ_TRACEVIS
6138 , &tvso
6139 #endif
6141 #ifdef MOZ_TRACEVIS
6142 tvso.r = R_NO_EXTEND_OUTER;
6143 #endif
6144 return false;
6146 #ifdef MOZ_TRACEVIS
6147 case MISMATCH_EXIT: tvso.r = R_MISMATCH_EXIT; return false;
6148 case OOM_EXIT: tvso.r = R_OOM_EXIT; return false;
6149 case TIMEOUT_EXIT: tvso.r = R_TIMEOUT_EXIT; return false;
6150 case DEEP_BAIL_EXIT: tvso.r = R_DEEP_BAIL_EXIT; return false;
6151 case STATUS_EXIT: tvso.r = R_STATUS_EXIT; return false;
6152 #endif
6154 default:
6156 * No, this was an unusual exit (i.e. out of memory/GC), so just resume
6157 * interpretation.
6159 #ifdef MOZ_TRACEVIS
6160 tvso.r = R_OTHER_EXIT;
6161 #endif
6162 return false;
6166 JS_REQUIRES_STACK JSRecordingStatus
6167 TraceRecorder::monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op)
6169 Assembler *assm = JS_TRACE_MONITOR(cx).assembler;
6171 /* Process needFlush and deepAbort() requests now. */
6172 if (JS_TRACE_MONITOR(cx).needFlush) {
6173 ResetJIT(cx);
6174 return JSRS_STOP;
6176 if (tr->wasDeepAborted()) {
6177 js_AbortRecording(cx, "deep abort requested");
6178 return JSRS_STOP;
6180 JS_ASSERT(!tr->fragment->lastIns);
6183 * Clear one-shot state used to communicate between record_JSOP_CALL and post-
6184 * opcode-case-guts record hook (record_NativeCallComplete).
6186 tr->pendingSpecializedNative = NULL;
6187 tr->newobj_ins = NULL;
6189 /* Handle one-shot request from finishGetProp to snapshot post-op state and guard. */
6190 if (tr->pendingGuardCondition) {
6191 tr->guard(true, tr->pendingGuardCondition, STATUS_EXIT);
6192 tr->pendingGuardCondition = NULL;
6195 /* Handle one-shot request to unbox the result of a property get. */
6196 if (tr->pendingUnboxSlot) {
6197 LIns* val_ins = tr->get(tr->pendingUnboxSlot);
6198 val_ins = tr->unbox_jsval(*tr->pendingUnboxSlot, val_ins, tr->snapshot(BRANCH_EXIT));
6199 tr->set(tr->pendingUnboxSlot, val_ins);
6200 tr->pendingUnboxSlot = 0;
6203 debug_only_stmt(
6204 if (js_LogController.lcbits & LC_TMRecorder) {
6205 js_Disassemble1(cx, cx->fp->script, cx->fp->regs->pc,
6206 cx->fp->imacpc
6207 ? 0 : cx->fp->regs->pc - cx->fp->script->code,
6208 !cx->fp->imacpc, stdout);
6213 * If op is not a break or a return from a loop, continue recording and
6214 * follow the trace. We check for imacro-calling bytecodes inside each
6215 * switch case to resolve the if (JSOP_IS_IMACOP(x)) conditions at compile
6216 * time.
6219 JSRecordingStatus status;
6220 #ifdef DEBUG
6221 bool wasInImacro = (cx->fp->imacpc != NULL);
6222 #endif
6223 switch (op) {
6224 default:
6225 status = JSRS_ERROR;
6226 goto stop_recording;
6227 # define OPDEF(x,val,name,token,length,nuses,ndefs,prec,format) \
6228 case x: \
6229 status = tr->record_##x(); \
6230 if (JSOP_IS_IMACOP(x)) \
6231 goto imacro; \
6232 break;
6233 # include "jsopcode.tbl"
6234 # undef OPDEF
6237 JS_ASSERT(status != JSRS_IMACRO);
6238 JS_ASSERT_IF(!wasInImacro, cx->fp->imacpc == NULL);
6240 /* Process deepAbort() requests now. */
6241 if (tr->wasDeepAborted()) {
6242 js_AbortRecording(cx, "deep abort requested");
6243 return JSRS_STOP;
6246 if (assm->error()) {
6247 js_AbortRecording(cx, "error during recording");
6248 return JSRS_STOP;
6251 if (tr->traceMonitor->allocator->outOfMemory() ||
6252 js_OverfullJITCache(&JS_TRACE_MONITOR(cx), false)) {
6253 js_AbortRecording(cx, "no more memory");
6254 ResetJIT(cx);
6255 return JSRS_STOP;
6258 imacro:
6259 if (!STATUS_ABORTS_RECORDING(status))
6260 return status;
6262 stop_recording:
6263 /* If we recorded the end of the trace, destroy the recorder now. */
6264 if (tr->fragment->lastIns) {
6265 DeleteRecorder(cx);
6266 return status;
6269 /* Looks like we encountered an error condition. Abort recording. */
6270 js_AbortRecording(cx, js_CodeName[op]);
6271 return status;
6274 JS_REQUIRES_STACK void
6275 js_AbortRecording(JSContext* cx, const char* reason)
6277 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
6278 JS_ASSERT(tm->recorder != NULL);
6279 AUDIT(recorderAborted);
6281 /* Abort the trace and blacklist its starting point. */
6282 Fragment* f = tm->recorder->getFragment();
6285 * If the recorder already had its fragment disposed, or we actually
6286 * finished recording and this recorder merely is passing through the deep
6287 * abort state to the next recorder on the stack, just destroy the
6288 * recorder. There is nothing to abort.
6290 if (!f || f->lastIns) {
6291 DeleteRecorder(cx);
6292 return;
6295 JS_ASSERT(!f->vmprivate);
6296 #ifdef DEBUG
6297 TreeInfo* ti = tm->recorder->getTreeInfo();
6298 debug_only_printf(LC_TMAbort,
6299 "Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n",
6300 ti->treeFileName,
6301 ti->treeLineNumber,
6302 ti->treePCOffset,
6303 cx->fp->script->filename,
6304 js_FramePCToLineNumber(cx, cx->fp),
6305 FramePCOffset(cx->fp),
6306 reason);
6307 #endif
6309 Backoff(cx, (jsbytecode*) f->root->ip, f->root);
6311 /* If DeleteRecorder flushed the code cache, we can't rely on f any more. */
6312 if (!DeleteRecorder(cx))
6313 return;
6316 * If this is the primary trace and we didn't succeed compiling, trash the
6317 * TreeInfo object.
6319 if (!f->code() && (f->root == f))
6320 TrashTree(cx, f);
6323 #if defined NANOJIT_IA32
6324 static bool
6325 CheckForSSE2()
6327 char *c = getenv("X86_FORCE_SSE2");
6328 if (c)
6329 return (!strcmp(c, "true") ||
6330 !strcmp(c, "1") ||
6331 !strcmp(c, "yes"));
6333 int features = 0;
6334 #if defined _MSC_VER
6335 __asm
6337 pushad
6338 mov eax, 1
6339 cpuid
6340 mov features, edx
6341 popad
6343 #elif defined __GNUC__
6344 asm("xchg %%esi, %%ebx\n" /* we can't clobber ebx on gcc (PIC register) */
6345 "mov $0x01, %%eax\n"
6346 "cpuid\n"
6347 "mov %%edx, %0\n"
6348 "xchg %%esi, %%ebx\n"
6349 : "=m" (features)
6350 : /* We have no inputs */
6351 : "%eax", "%esi", "%ecx", "%edx"
6353 #elif defined __SUNPRO_C || defined __SUNPRO_CC
6354 asm("push %%ebx\n"
6355 "mov $0x01, %%eax\n"
6356 "cpuid\n"
6357 "pop %%ebx\n"
6358 : "=d" (features)
6359 : /* We have no inputs */
6360 : "%eax", "%ecx"
6362 #endif
6363 return (features & (1<<26)) != 0;
6365 #endif
6367 #if defined(NANOJIT_ARM)
6369 #if defined(_MSC_VER) && defined(WINCE)
6371 // these come in from jswince.asm
6372 extern "C" int js_arm_try_thumb_op();
6373 extern "C" int js_arm_try_armv6t2_op();
6374 extern "C" int js_arm_try_armv5_op();
6375 extern "C" int js_arm_try_armv6_op();
6376 extern "C" int js_arm_try_armv7_op();
6377 extern "C" int js_arm_try_vfp_op();
6379 static bool
6380 js_arm_check_thumb() {
6381 bool ret = false;
6382 __try {
6383 js_arm_try_thumb_op();
6384 ret = true;
6385 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
6386 ret = false;
6388 return ret;
6391 static bool
6392 js_arm_check_thumb2() {
6393 bool ret = false;
6394 __try {
6395 js_arm_try_armv6t2_op();
6396 ret = true;
6397 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
6398 ret = false;
6400 return ret;
6403 static unsigned int
6404 js_arm_check_arch() {
6405 unsigned int arch = 4;
6406 __try {
6407 js_arm_try_armv5_op();
6408 arch = 5;
6409 js_arm_try_armv6_op();
6410 arch = 6;
6411 js_arm_try_armv7_op();
6412 arch = 7;
6413 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
6415 return arch;
6418 static bool
6419 js_arm_check_vfp() {
6420 bool ret = false;
6421 __try {
6422 js_arm_try_vfp_op();
6423 ret = true;
6424 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
6425 ret = false;
6427 return ret;
6430 #define HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS 1
6432 /* See "Suppressing Exception Notifications while Debugging", at
6433 * http://msdn.microsoft.com/en-us/library/ms924252.aspx
6435 static void
6436 js_disable_debugger_exceptions() {
6437 // 2 == TLSSLOT_KERNEL
6438 DWORD kctrl = (DWORD) TlsGetValue(2);
6439 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
6440 kctrl |= 0x12;
6441 TlsSetValue(2, (LPVOID) kctrl);
6444 static void
6445 js_enable_debugger_exceptions() {
6446 // 2 == TLSSLOT_KERNEL
6447 DWORD kctrl = (DWORD) TlsGetValue(2);
6448 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
6449 kctrl &= ~0x12;
6450 TlsSetValue(2, (LPVOID) kctrl);
6453 #elif defined(__GNUC__) && defined(AVMPLUS_LINUX)
6455 #include <stdlib.h>
6456 #include <unistd.h>
6457 #include <sys/types.h>
6458 #include <sys/stat.h>
6459 #include <sys/mman.h>
6460 #include <fcntl.h>
6461 #include <string.h>
6462 #include <elf.h>
6464 // Assume ARMv4 by default.
6465 static unsigned int arm_arch = 4;
6466 static bool arm_has_thumb = false;
6467 static bool arm_has_vfp = false;
6468 static bool arm_has_neon = false;
6469 static bool arm_has_iwmmxt = false;
6470 static bool arm_tests_initialized = false;
6472 static void
6473 arm_read_auxv() {
6474 int fd;
6475 Elf32_auxv_t aux;
6477 fd = open("/proc/self/auxv", O_RDONLY);
6478 if (fd > 0) {
6479 while (read(fd, &aux, sizeof(Elf32_auxv_t))) {
6480 if (aux.a_type == AT_HWCAP) {
6481 uint32_t hwcap = aux.a_un.a_val;
6482 if (getenv("ARM_FORCE_HWCAP"))
6483 hwcap = strtoul(getenv("ARM_FORCE_HWCAP"), NULL, 0);
6484 // hardcode these values to avoid depending on specific versions
6485 // of the hwcap header, e.g. HWCAP_NEON
6486 arm_has_thumb = (hwcap & 4) != 0;
6487 arm_has_vfp = (hwcap & 64) != 0;
6488 arm_has_iwmmxt = (hwcap & 512) != 0;
6489 // this flag is only present on kernel 2.6.29
6490 arm_has_neon = (hwcap & 4096) != 0;
6491 } else if (aux.a_type == AT_PLATFORM) {
6492 const char *plat = (const char*) aux.a_un.a_val;
6493 if (getenv("ARM_FORCE_PLATFORM"))
6494 plat = getenv("ARM_FORCE_PLATFORM");
6495 // The platform string has the form "v[0-9][lb]". The "l" or "b" indicate little-
6496 // or big-endian variants and the digit indicates the version of the platform.
6497 // We can only accept ARMv4 and above, but allow anything up to ARMv9 for future
6498 // processors. Architectures newer than ARMv7 are assumed to be
6499 // backwards-compatible with ARMv7.
6500 if ((plat[0] == 'v') &&
6501 (plat[1] >= '4') && (plat[1] <= '9') &&
6502 ((plat[2] == 'l') || (plat[2] == 'b')))
6504 arm_arch = plat[1] - '0';
6506 else
6508 // For production code, ignore invalid (or unexpected) platform strings and
6509 // fall back to the default. For debug code, use an assertion to catch this.
6510 JS_ASSERT(false);
6514 close (fd);
6516 // if we don't have 2.6.29, we have to do this hack; set
6517 // the env var to trust HWCAP.
6518 if (!getenv("ARM_TRUST_HWCAP") && (arm_arch >= 7))
6519 arm_has_neon = true;
6522 arm_tests_initialized = true;
6525 static bool
6526 js_arm_check_thumb() {
6527 if (!arm_tests_initialized)
6528 arm_read_auxv();
6530 return arm_has_thumb;
6533 static bool
6534 js_arm_check_thumb2() {
6535 if (!arm_tests_initialized)
6536 arm_read_auxv();
6538 // ARMv6T2 also supports Thumb2, but Linux doesn't provide an easy way to test for this as
6539 // there is no associated bit in auxv. ARMv7 always supports Thumb2, and future architectures
6540 // are assumed to be backwards-compatible.
6541 return (arm_arch >= 7);
6544 static unsigned int
6545 js_arm_check_arch() {
6546 if (!arm_tests_initialized)
6547 arm_read_auxv();
6549 return arm_arch;
6552 static bool
6553 js_arm_check_vfp() {
6554 if (!arm_tests_initialized)
6555 arm_read_auxv();
6557 return arm_has_vfp;
6560 #else
6561 #warning Not sure how to check for architecture variant on your platform. Assuming ARMv4.
6562 static bool
6563 js_arm_check_thumb() { return false; }
6564 static bool
6565 js_arm_check_thumb2() { return false; }
6566 static unsigned int
6567 js_arm_check_arch() { return 4; }
6568 static bool
6569 js_arm_check_vfp() { return false; }
6570 #endif
6572 #ifndef HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS
6573 static void
6574 js_enable_debugger_exceptions() { }
6575 static void
6576 js_disable_debugger_exceptions() { }
6577 #endif
6579 #endif /* NANOJIT_ARM */
6581 #define K *1024
6582 #define M K K
6583 #define G K M
6585 void
6586 js_SetMaxCodeCacheBytes(JSContext* cx, uint32 bytes)
6588 JSTraceMonitor* tm = &JS_THREAD_DATA(cx)->traceMonitor;
6589 JS_ASSERT(tm->codeAlloc && tm->reCodeAlloc &&
6590 tm->allocator && tm->reAllocator);
6591 if (bytes > 1 G)
6592 bytes = 1 G;
6593 if (bytes < 128 K)
6594 bytes = 128 K;
6595 tm->maxCodeCacheBytes = bytes;
6598 void
6599 js_InitJIT(JSTraceMonitor *tm)
6601 #if defined JS_JIT_SPEW
6602 /* Set up debug logging. */
6603 if (!did_we_set_up_debug_logging) {
6604 InitJITLogController();
6605 did_we_set_up_debug_logging = true;
6607 #else
6608 memset(&js_LogController, 0, sizeof(js_LogController));
6609 #endif
6611 if (!did_we_check_processor_features) {
6612 #if defined NANOJIT_IA32
6613 avmplus::AvmCore::config.use_cmov =
6614 avmplus::AvmCore::config.sse2 = CheckForSSE2();
6615 #endif
6616 #if defined NANOJIT_ARM
6618 js_disable_debugger_exceptions();
6620 bool arm_vfp = js_arm_check_vfp();
6621 bool arm_thumb = js_arm_check_thumb();
6622 bool arm_thumb2 = js_arm_check_thumb2();
6623 unsigned int arm_arch = js_arm_check_arch();
6625 js_enable_debugger_exceptions();
6627 avmplus::AvmCore::config.vfp = arm_vfp;
6628 avmplus::AvmCore::config.soft_float = !arm_vfp;
6629 avmplus::AvmCore::config.thumb = arm_thumb;
6630 avmplus::AvmCore::config.thumb2 = arm_thumb2;
6631 avmplus::AvmCore::config.arch = arm_arch;
6633 // Sanity-check the configuration detection.
6634 // * We don't understand architectures prior to ARMv4.
6635 JS_ASSERT(arm_arch >= 4);
6636 // * All architectures support Thumb with the possible exception of ARMv4.
6637 JS_ASSERT((arm_thumb) || (arm_arch == 4));
6638 // * Only ARMv6T2 and ARMv7(+) support Thumb2, but ARMv6 does not.
6639 JS_ASSERT((arm_thumb2) || (arm_arch <= 6));
6640 // * All architectures that support Thumb2 also support Thumb.
6641 JS_ASSERT((arm_thumb2 && arm_thumb) || (!arm_thumb2));
6642 #endif
6643 did_we_check_processor_features = true;
6646 /* Set the default size for the code cache to 16MB. */
6647 tm->maxCodeCacheBytes = 16 M;
6649 if (!tm->recordAttempts.ops) {
6650 JS_DHashTableInit(&tm->recordAttempts, JS_DHashGetStubOps(),
6651 NULL, sizeof(PCHashEntry),
6652 JS_DHASH_DEFAULT_CAPACITY(PC_HASH_COUNT));
6655 if (!tm->allocator)
6656 tm->allocator = new VMAllocator();
6658 Allocator& alloc = *tm->allocator;
6660 if (!tm->codeAlloc)
6661 tm->codeAlloc = new CodeAlloc();
6663 if (!tm->assembler) {
6664 tm->assembler = new (alloc) Assembler(*tm->codeAlloc, alloc, core,
6665 &js_LogController);
6668 JS_ASSERT(!tm->reservedDoublePool);
6669 tm->lirbuf = new (alloc) LirBuffer(alloc);
6670 #ifdef DEBUG
6671 tm->labels = new (alloc) LabelMap(alloc, &js_LogController);
6672 tm->lirbuf->names = new (alloc) LirNameMap(alloc, tm->labels);
6673 #endif
6674 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
6675 tm->globalStates[i].globalShape = -1;
6676 JS_ASSERT(!tm->globalStates[i].globalSlots);
6677 tm->globalStates[i].globalSlots = new (alloc) SlotList(tm->allocator);
6679 tm->reservedDoublePoolPtr = tm->reservedDoublePool = new jsval[MAX_NATIVE_STACK_SLOTS];
6680 memset(tm->vmfragments, 0, sizeof(tm->vmfragments));
6683 if (!tm->reAllocator)
6684 tm->reAllocator = new VMAllocator();
6686 Allocator& reAlloc = *tm->reAllocator;
6688 if (!tm->reCodeAlloc)
6689 tm->reCodeAlloc = new CodeAlloc();
6691 if (!tm->reAssembler) {
6692 tm->reAssembler = new (reAlloc) Assembler(*tm->reCodeAlloc, reAlloc, core,
6693 &js_LogController);
6695 tm->reFragments = new (reAlloc) REHashMap(reAlloc);
6696 tm->reLirBuf = new (reAlloc) LirBuffer(reAlloc);
6697 #ifdef DEBUG
6698 tm->reLabels = new (reAlloc) LabelMap(reAlloc, &js_LogController);
6699 tm->reLirBuf->names = new (reAlloc) LirNameMap(reAlloc, tm->reLabels);
6700 #endif
6702 #if !defined XP_WIN
6703 debug_only(memset(&jitstats, 0, sizeof(jitstats)));
6704 #endif
6707 void
6708 js_FinishJIT(JSTraceMonitor *tm)
6710 #ifdef JS_JIT_SPEW
6711 if (jitstats.recorderStarted) {
6712 debug_only_printf(LC_TMStats,
6713 "recorder: started(%llu), aborted(%llu), completed(%llu), different header(%llu), "
6714 "trees trashed(%llu), slot promoted(%llu), unstable loop variable(%llu), "
6715 "breaks(%llu), returns(%llu), unstableInnerCalls(%llu), blacklisted(%llu)\n",
6716 jitstats.recorderStarted, jitstats.recorderAborted, jitstats.traceCompleted,
6717 jitstats.returnToDifferentLoopHeader, jitstats.treesTrashed, jitstats.slotPromoted,
6718 jitstats.unstableLoopVariable, jitstats.breakLoopExits, jitstats.returnLoopExits,
6719 jitstats.noCompatInnerTrees, jitstats.blacklisted);
6720 debug_only_printf(LC_TMStats,
6721 "monitor: triggered(%llu), exits(%llu), type mismatch(%llu), "
6722 "global mismatch(%llu)\n", jitstats.traceTriggered, jitstats.sideExitIntoInterpreter,
6723 jitstats.typeMapMismatchAtEntry, jitstats.globalShapeMismatchAtEntry);
6725 #endif
6726 if (tm->assembler != NULL) {
6727 JS_ASSERT(tm->reservedDoublePool);
6729 tm->lirbuf = NULL;
6731 if (tm->recordAttempts.ops)
6732 JS_DHashTableFinish(&tm->recordAttempts);
6734 memset(&tm->vmfragments[0], 0,
6735 FRAGMENT_TABLE_SIZE * sizeof(VMFragment*));
6737 delete[] tm->reservedDoublePool;
6738 tm->reservedDoublePool = tm->reservedDoublePoolPtr = NULL;
6740 if (tm->reAssembler != NULL) {
6741 delete tm->reAllocator;
6742 delete tm->reCodeAlloc;
6744 if (tm->codeAlloc)
6745 delete tm->codeAlloc;
6746 if (tm->allocator)
6747 delete tm->allocator;
6750 void
6751 TraceRecorder::pushAbortStack()
6753 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
6755 JS_ASSERT(tm->abortStack != this);
6757 nextRecorderToAbort = tm->abortStack;
6758 tm->abortStack = this;
6761 void
6762 TraceRecorder::popAbortStack()
6764 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
6766 JS_ASSERT(tm->abortStack == this);
6768 tm->abortStack = nextRecorderToAbort;
6769 nextRecorderToAbort = NULL;
6772 void
6773 js_PurgeJITOracle()
6775 oracle.clear();
6778 static JSDHashOperator
6779 PurgeScriptRecordingAttempts(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg)
6781 PCHashEntry *e = (PCHashEntry *)hdr;
6782 JSScript *script = (JSScript *)arg;
6783 jsbytecode *pc = (jsbytecode *)e->key;
6785 if (JS_UPTRDIFF(pc, script->code) < script->length)
6786 return JS_DHASH_REMOVE;
6787 return JS_DHASH_NEXT;
6791 JS_REQUIRES_STACK void
6792 js_PurgeScriptFragments(JSContext* cx, JSScript* script)
6794 if (!TRACING_ENABLED(cx))
6795 return;
6796 debug_only_printf(LC_TMTracer,
6797 "Purging fragments for JSScript %p.\n", (void*)script);
6799 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
6800 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
6801 for (VMFragment **f = &(tm->vmfragments[i]); *f; ) {
6802 VMFragment* frag = *f;
6803 if (JS_UPTRDIFF(frag->ip, script->code) < script->length) {
6804 /* This fragment is associated with the script. */
6805 debug_only_printf(LC_TMTracer,
6806 "Disconnecting VMFragment %p "
6807 "with ip %p, in range [%p,%p).\n",
6808 (void*)frag, frag->ip, script->code,
6809 script->code + script->length);
6811 JS_ASSERT(frag->root == frag);
6812 VMFragment* next = frag->next;
6813 for (VMFragment *p = frag; p; p = p->peer)
6814 TrashTree(cx, p);
6815 *f = next;
6816 } else {
6817 f = &((*f)->next);
6822 JS_DHashTableEnumerate(&(tm->recordAttempts), PurgeScriptRecordingAttempts, script);
6825 bool
6826 js_OverfullJITCache(JSTraceMonitor* tm, bool reCache)
6829 * You might imagine the outOfMemory flag on the allocator is sufficient
6830 * to model the notion of "running out of memory", but there are actually
6831 * two separate issues involved:
6833 * 1. The process truly running out of memory: malloc() or mmap()
6834 * failed.
6836 * 2. The limit we put on the "intended size" of the tracemonkey code
6837 * cache, in pages, has been exceeded.
6839 * Condition 1 doesn't happen very often, but we're obliged to try to
6840 * safely shut down and signal the rest of spidermonkey when it
6841 * does. Condition 2 happens quite regularly.
6843 * Presently, the code in this file doesn't check the outOfMemory condition
6844 * often enough, and frequently misuses the unchecked results of
6845 * lirbuffer insertions on the asssumption that it will notice the
6846 * outOfMemory flag "soon enough" when it returns to the monitorRecording
6847 * function. This turns out to be a false assumption if we use outOfMemory
6848 * to signal condition 2: we regularly provoke "passing our intended
6849 * size" and regularly fail to notice it in time to prevent writing
6850 * over the end of an artificially self-limited LIR buffer.
6852 * To mitigate, though not completely solve, this problem, we're
6853 * modeling the two forms of memory exhaustion *separately* for the
6854 * time being: condition 1 is handled by the outOfMemory flag inside
6855 * nanojit, and condition 2 is being handled independently *here*. So
6856 * we construct our allocators to use all available memory they like,
6857 * and only report outOfMemory to us when there is literally no OS memory
6858 * left. Merely purging our cache when we hit our highwater mark is
6859 * handled by the (few) callers of this function.
6862 jsuint maxsz = tm->maxCodeCacheBytes;
6863 VMAllocator *allocator = tm->allocator;
6864 CodeAlloc *codeAlloc = tm->codeAlloc;
6865 if (reCache) {
6867 * At the time of making the code cache size configurable, we were using
6868 * 16 MB for the main code cache and 1 MB for the regular expression code
6869 * cache. We will stick to this 16:1 ratio here until we unify the two
6870 * code caches.
6872 maxsz /= 16;
6873 allocator = tm->reAllocator;
6874 codeAlloc = tm->reCodeAlloc;
6876 return (codeAlloc->size() + allocator->size() > maxsz);
6879 JS_FORCES_STACK JS_FRIEND_API(void)
6880 js_DeepBail(JSContext *cx)
6882 JS_ASSERT(JS_ON_TRACE(cx));
6885 * Exactly one context on the current thread is on trace. Find out which
6886 * one. (Most callers cannot guarantee that it's cx.)
6888 JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
6889 JSContext *tracecx = tm->tracecx;
6891 /* It's a bug if a non-FAIL_STATUS builtin gets here. */
6892 JS_ASSERT(tracecx->bailExit);
6894 tm->tracecx = NULL;
6895 debug_only_print0(LC_TMTracer, "Deep bail.\n");
6896 LeaveTree(*tracecx->interpState, tracecx->bailExit);
6897 tracecx->bailExit = NULL;
6899 InterpState* state = tracecx->interpState;
6900 state->builtinStatus |= JSBUILTIN_BAILED;
6901 state->deepBailSp = state->sp;
6904 JS_REQUIRES_STACK jsval&
6905 TraceRecorder::argval(unsigned n) const
6907 JS_ASSERT(n < cx->fp->fun->nargs);
6908 return cx->fp->argv[n];
6911 JS_REQUIRES_STACK jsval&
6912 TraceRecorder::varval(unsigned n) const
6914 JS_ASSERT(n < cx->fp->script->nslots);
6915 return cx->fp->slots[n];
6918 JS_REQUIRES_STACK jsval&
6919 TraceRecorder::stackval(int n) const
6921 jsval* sp = cx->fp->regs->sp;
6922 return sp[n];
6925 JS_REQUIRES_STACK LIns*
6926 TraceRecorder::scopeChain() const
6928 return lir->insLoad(LIR_ldp,
6929 lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp)),
6930 offsetof(JSStackFrame, scopeChain));
6934 * Return the frame of a call object if that frame is part of the current
6935 * trace. |depthp| is an optional outparam: if it is non-null, it will be
6936 * filled in with the depth of the call object's frame relevant to cx->fp.
6938 JS_REQUIRES_STACK JSStackFrame*
6939 TraceRecorder::frameIfInRange(JSObject* obj, unsigned* depthp) const
6941 JSStackFrame* ofp = (JSStackFrame*) obj->getPrivate();
6942 JSStackFrame* fp = cx->fp;
6943 for (unsigned depth = 0; depth <= callDepth; ++depth) {
6944 if (fp == ofp) {
6945 if (depthp)
6946 *depthp = depth;
6947 return ofp;
6949 if (!(fp = fp->down))
6950 break;
6952 return NULL;
6955 JS_DEFINE_CALLINFO_4(extern, UINT32, GetClosureVar, CONTEXT, OBJECT, CVIPTR, DOUBLEPTR, 0, 0)
6956 JS_DEFINE_CALLINFO_4(extern, UINT32, GetClosureArg, CONTEXT, OBJECT, CVIPTR, DOUBLEPTR, 0, 0)
6959 * Search the scope chain for a property lookup operation at the current PC and
6960 * generate LIR to access the given property. Return JSRS_CONTINUE on success,
6961 * otherwise abort and return JSRS_STOP. There are 3 outparams:
6963 * vp the address of the current property value
6964 * ins LIR instruction representing the property value on trace
6965 * NameResult describes how to look up name; see comment for NameResult in jstracer.h
6967 JS_REQUIRES_STACK JSRecordingStatus
6968 TraceRecorder::scopeChainProp(JSObject* obj, jsval*& vp, LIns*& ins, NameResult& nr)
6970 JS_ASSERT(obj != globalObj);
6972 JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
6973 JSObject* obj2;
6974 JSProperty* prop;
6975 if (!js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &obj2, &prop))
6976 ABORT_TRACE_ERROR("error in js_FindProperty");
6977 if (!prop)
6978 ABORT_TRACE("failed to find name in non-global scope chain");
6980 if (obj == globalObj) {
6981 // Even if the property is on the global object, we must guard against
6982 // the creation of properties that shadow the property in the middle
6983 // of the scope chain if we are in a function.
6984 if (cx->fp->argv) {
6985 LIns* obj_ins;
6986 JSObject* parent = STOBJ_GET_PARENT(JSVAL_TO_OBJECT(cx->fp->argv[-2]));
6987 LIns* parent_ins = stobj_get_parent(get(&cx->fp->argv[-2]));
6988 CHECK_STATUS(traverseScopeChain(parent, parent_ins, obj, obj_ins));
6991 JSScopeProperty* sprop = (JSScopeProperty*) prop;
6993 if (obj2 != obj) {
6994 obj2->dropProperty(cx, prop);
6995 ABORT_TRACE("prototype property");
6997 if (!isValidSlot(OBJ_SCOPE(obj), sprop)) {
6998 obj2->dropProperty(cx, prop);
6999 return JSRS_STOP;
7001 if (!lazilyImportGlobalSlot(sprop->slot)) {
7002 obj2->dropProperty(cx, prop);
7003 ABORT_TRACE("lazy import of global slot failed");
7005 vp = &STOBJ_GET_SLOT(obj, sprop->slot);
7006 ins = get(vp);
7007 obj2->dropProperty(cx, prop);
7008 nr.tracked = true;
7009 return JSRS_CONTINUE;
7012 if (wasDeepAborted())
7013 ABORT_TRACE("deep abort from property lookup");
7015 if (obj == obj2 && OBJ_GET_CLASS(cx, obj) == &js_CallClass)
7016 return callProp(obj, prop, ATOM_TO_JSID(atom), vp, ins, nr);
7018 obj2->dropProperty(cx, prop);
7019 ABORT_TRACE("fp->scopeChain is not global or active call object");
7023 * Generate LIR to access a property of a Call object.
7025 JS_REQUIRES_STACK JSRecordingStatus
7026 TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, jsval*& vp,
7027 LIns*& ins, NameResult& nr)
7029 JSScopeProperty *sprop = (JSScopeProperty*) prop;
7031 JSOp op = JSOp(*cx->fp->regs->pc);
7032 uint32 setflags = (js_CodeSpec[op].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
7033 if (setflags && (sprop->attrs & JSPROP_READONLY))
7034 ABORT_TRACE("writing to a read-only property");
7036 uintN slot = sprop->shortid;
7038 vp = NULL;
7039 uintN upvar_slot = SPROP_INVALID_SLOT;
7040 JSStackFrame* cfp = (JSStackFrame*) obj->getPrivate();
7041 if (cfp) {
7042 if (sprop->getter == js_GetCallArg) {
7043 JS_ASSERT(slot < cfp->fun->nargs);
7044 vp = &cfp->argv[slot];
7045 upvar_slot = slot;
7046 nr.v = *vp;
7047 } else if (sprop->getter == js_GetCallVar) {
7048 JS_ASSERT(slot < cfp->script->nslots);
7049 vp = &cfp->slots[slot];
7050 upvar_slot = cx->fp->fun->nargs + slot;
7051 nr.v = *vp;
7052 } else {
7053 ABORT_TRACE("dynamic property of Call object");
7055 obj->dropProperty(cx, prop);
7057 if (frameIfInRange(obj)) {
7058 // At this point we are guaranteed to be looking at an active call oject
7059 // whose properties are stored in the corresponding JSStackFrame.
7060 ins = get(vp);
7061 nr.tracked = true;
7062 return JSRS_CONTINUE;
7064 } else {
7065 // Call objects do not yet have sprop->isMethod() properties, but they
7066 // should. See bug 514046, for which this code is future-proof. Remove
7067 // this comment when that bug is fixed (so, FIXME: 514046).
7068 #ifdef DEBUG
7069 JSBool rv =
7070 #endif
7071 js_GetPropertyHelper(cx, obj, sprop->id,
7072 (op == JSOP_CALLNAME)
7073 ? JSGET_NO_METHOD_BARRIER
7074 : JSGET_METHOD_BARRIER,
7075 &nr.v);
7076 JS_ASSERT(rv);
7077 obj->dropProperty(cx, prop);
7080 LIns* obj_ins;
7081 JSObject* parent = STOBJ_GET_PARENT(JSVAL_TO_OBJECT(cx->fp->argv[-2]));
7082 LIns* parent_ins = stobj_get_parent(get(&cx->fp->argv[-2]));
7083 CHECK_STATUS(traverseScopeChain(parent, parent_ins, obj, obj_ins));
7085 LIns* cv_ins = lir_buf_writer->insSkip(sizeof(ClosureVarInfo));
7086 ClosureVarInfo* cv = (ClosureVarInfo*) cv_ins->payload();
7087 cv->id = id;
7088 cv->slot = slot;
7089 cv->callDepth = callDepth;
7090 cv->resolveFlags = cx->resolveFlags == JSRESOLVE_INFER
7091 ? js_InferFlags(cx, 0)
7092 : cx->resolveFlags;
7094 LIns* outp = lir->insAlloc(sizeof(double));
7095 LIns* args[] = {
7096 outp,
7097 INS_CONSTPTR(cv),
7098 obj_ins,
7099 cx_ins
7101 const CallInfo* ci;
7102 if (sprop->getter == js_GetCallArg)
7103 ci = &GetClosureArg_ci;
7104 else
7105 ci = &GetClosureVar_ci;
7107 LIns* call_ins = lir->insCall(ci, args);
7108 JSTraceType type = getCoercedType(nr.v);
7109 guard(true,
7110 addName(lir->ins2(LIR_eq, call_ins, lir->insImm(type)),
7111 "guard(type-stable name access)"),
7112 BRANCH_EXIT);
7113 ins = stackLoad(outp, type);
7114 nr.tracked = false;
7115 nr.obj = obj;
7116 nr.obj_ins = obj_ins;
7117 nr.sprop = sprop;
7118 return JSRS_CONTINUE;
7121 JS_REQUIRES_STACK LIns*
7122 TraceRecorder::arg(unsigned n)
7124 return get(&argval(n));
7127 JS_REQUIRES_STACK void
7128 TraceRecorder::arg(unsigned n, LIns* i)
7130 set(&argval(n), i);
7133 JS_REQUIRES_STACK LIns*
7134 TraceRecorder::var(unsigned n)
7136 return get(&varval(n));
7139 JS_REQUIRES_STACK void
7140 TraceRecorder::var(unsigned n, LIns* i)
7142 set(&varval(n), i);
7145 JS_REQUIRES_STACK LIns*
7146 TraceRecorder::stack(int n)
7148 return get(&stackval(n));
7151 JS_REQUIRES_STACK void
7152 TraceRecorder::stack(int n, LIns* i)
7154 set(&stackval(n), i, n >= 0);
7157 JS_REQUIRES_STACK LIns*
7158 TraceRecorder::alu(LOpcode v, jsdouble v0, jsdouble v1, LIns* s0, LIns* s1)
7161 * To even consider this operation for demotion, both operands have to be
7162 * integers and the oracle must not give us a negative hint for the
7163 * instruction.
7165 if (oracle.isInstructionUndemotable(cx->fp->regs->pc) || !isPromoteInt(s0) || !isPromoteInt(s1)) {
7166 out:
7167 if (v == LIR_fmod) {
7168 LIns* args[] = { s1, s0 };
7169 return lir->insCall(&js_dmod_ci, args);
7171 LIns* result = lir->ins2(v, s0, s1);
7172 JS_ASSERT_IF(s0->isconstf() && s1->isconstf(), result->isconstf());
7173 return result;
7176 jsdouble r;
7177 switch (v) {
7178 case LIR_fadd:
7179 r = v0 + v1;
7180 break;
7181 case LIR_fsub:
7182 r = v0 - v1;
7183 break;
7184 case LIR_fmul:
7185 r = v0 * v1;
7186 if (r == 0.0)
7187 goto out;
7188 break;
7189 #ifdef NANOJIT_IA32
7190 case LIR_fdiv:
7191 if (v1 == 0)
7192 goto out;
7193 r = v0 / v1;
7194 break;
7195 case LIR_fmod:
7196 if (v0 < 0 || v1 == 0 || (s1->isconstf() && v1 < 0))
7197 goto out;
7198 r = js_dmod(v0, v1);
7199 break;
7200 #endif
7201 default:
7202 goto out;
7206 * The result must be an integer at record time, otherwise there is no
7207 * point in trying to demote it.
7209 if (jsint(r) != r || JSDOUBLE_IS_NEGZERO(r))
7210 goto out;
7212 LIns* d0 = ::demote(lir, s0);
7213 LIns* d1 = ::demote(lir, s1);
7216 * Speculatively emit an integer operation, betting that at runtime we
7217 * will get integer results again.
7219 VMSideExit* exit;
7220 LIns* result;
7221 switch (v) {
7222 #ifdef NANOJIT_IA32
7223 case LIR_fdiv:
7224 if (d0->isconst() && d1->isconst())
7225 return lir->ins1(LIR_i2f, lir->insImm(jsint(r)));
7227 exit = snapshot(OVERFLOW_EXIT);
7230 * If the divisor is greater than zero its always safe to execute
7231 * the division. If not, we have to make sure we are not running
7232 * into -2147483648 / -1, because it can raise an overflow exception.
7234 if (!d1->isconst()) {
7235 LIns* gt = lir->insBranch(LIR_jt, lir->ins2i(LIR_gt, d1, 0), NULL);
7236 guard(false, lir->ins_eq0(d1), exit);
7237 guard(false, lir->ins2(LIR_and,
7238 lir->ins2i(LIR_eq, d0, 0x80000000),
7239 lir->ins2i(LIR_eq, d1, -1)), exit);
7240 gt->setTarget(lir->ins0(LIR_label));
7241 } else {
7242 if (d1->imm32() == -1)
7243 guard(false, lir->ins2i(LIR_eq, d0, 0x80000000), exit);
7245 result = lir->ins2(v = LIR_div, d0, d1);
7247 /* As long the modulus is zero, the result is an integer. */
7248 guard(true, lir->ins_eq0(lir->ins1(LIR_mod, result)), exit);
7250 /* Don't lose a -0. */
7251 guard(false, lir->ins_eq0(result), exit);
7252 break;
7254 case LIR_fmod: {
7255 if (d0->isconst() && d1->isconst())
7256 return lir->ins1(LIR_i2f, lir->insImm(jsint(r)));
7258 exit = snapshot(OVERFLOW_EXIT);
7260 /* Make sure we don't trigger division by zero at runtime. */
7261 if (!d1->isconst())
7262 guard(false, lir->ins_eq0(d1), exit);
7263 result = lir->ins1(v = LIR_mod, lir->ins2(LIR_div, d0, d1));
7265 /* If the result is not 0, it is always within the integer domain. */
7266 LIns* branch = lir->insBranch(LIR_jf, lir->ins_eq0(result), NULL);
7269 * If the result is zero, we must exit if the lhs is negative since
7270 * the result is -0 in this case, which is not in the integer domain.
7272 guard(false, lir->ins2i(LIR_lt, d1, 0), exit);
7273 branch->setTarget(lir->ins0(LIR_label));
7274 break;
7276 #endif
7278 default:
7279 v = (LOpcode)((int)v & ~LIR64);
7280 result = lir->ins2(v, d0, d1);
7283 * If the operands guarantee that the result will be an integer (i.e.
7284 * z = x + y with 0 <= (x|y) <= 0xffff guarantees z <= fffe0001), we
7285 * don't have to guard against an overflow. Otherwise we emit a guard
7286 * that will inform the oracle and cause a non-demoted trace to be
7287 * attached that uses floating-point math for this operation.
7289 if (!result->isconst() && (!IsOverflowSafe(v, d0) || !IsOverflowSafe(v, d1))) {
7290 exit = snapshot(OVERFLOW_EXIT);
7291 guard(false, lir->ins1(LIR_ov, result), exit);
7292 if (v == LIR_mul) // make sure we don't lose a -0
7293 guard(false, lir->ins_eq0(result), exit);
7295 break;
7297 JS_ASSERT_IF(d0->isconst() && d1->isconst(),
7298 result->isconst() && result->imm32() == jsint(r));
7299 return lir->ins1(LIR_i2f, result);
7302 LIns*
7303 TraceRecorder::f2i(LIns* f)
7305 return lir->insCall(&js_DoubleToInt32_ci, &f);
7308 JS_REQUIRES_STACK LIns*
7309 TraceRecorder::makeNumberInt32(LIns* f)
7311 JS_ASSERT(f->isQuad());
7312 LIns* x;
7313 if (!isPromote(f)) {
7314 x = f2i(f);
7315 guard(true, lir->ins2(LIR_feq, f, lir->ins1(LIR_i2f, x)), MISMATCH_EXIT);
7316 } else {
7317 x = ::demote(lir, f);
7319 return x;
7322 JS_REQUIRES_STACK LIns*
7323 TraceRecorder::stringify(jsval& v)
7325 LIns* v_ins = get(&v);
7326 if (JSVAL_IS_STRING(v))
7327 return v_ins;
7329 LIns* args[] = { v_ins, cx_ins };
7330 const CallInfo* ci;
7331 if (JSVAL_IS_NUMBER(v)) {
7332 ci = &js_NumberToString_ci;
7333 } else if (JSVAL_IS_SPECIAL(v)) {
7334 ci = &js_BooleanOrUndefinedToString_ci;
7335 } else {
7337 * Callers must deal with non-primitive (non-null object) values by
7338 * calling an imacro. We don't try to guess about which imacro, with
7339 * what valueOf hint, here.
7341 JS_ASSERT(JSVAL_IS_NULL(v));
7342 return INS_ATOM(cx->runtime->atomState.nullAtom);
7345 v_ins = lir->insCall(ci, args);
7346 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
7347 return v_ins;
7350 JS_REQUIRES_STACK JSRecordingStatus
7351 TraceRecorder::call_imacro(jsbytecode* imacro)
7353 JSStackFrame* fp = cx->fp;
7354 JSFrameRegs* regs = fp->regs;
7356 /* We cannot nest imacros, only tail-call. */
7357 if (fp->imacpc) {
7358 /* Dereference is safe since imacros are JSOP_STOP-terminated. */
7359 if (regs->pc[js_CodeSpec[*regs->pc].length] != JSOP_STOP)
7360 return JSRS_STOP;
7361 regs->pc = imacro;
7362 return JSRS_IMACRO;
7365 fp->imacpc = regs->pc;
7366 regs->pc = imacro;
7367 atoms = COMMON_ATOMS_START(&cx->runtime->atomState);
7368 return JSRS_IMACRO;
7371 JS_REQUIRES_STACK JSRecordingStatus
7372 TraceRecorder::ifop()
7374 jsval& v = stackval(-1);
7375 LIns* v_ins = get(&v);
7376 bool cond;
7377 LIns* x;
7379 if (JSVAL_IS_NULL(v)) {
7380 cond = false;
7381 x = lir->insImm(0);
7382 } else if (!JSVAL_IS_PRIMITIVE(v)) {
7383 cond = true;
7384 x = lir->insImm(1);
7385 } else if (JSVAL_IS_SPECIAL(v)) {
7386 /* Test for boolean is true, negate later if we are testing for false. */
7387 cond = JSVAL_TO_SPECIAL(v) == JS_TRUE;
7388 x = lir->ins2i(LIR_eq, v_ins, 1);
7389 } else if (isNumber(v)) {
7390 jsdouble d = asNumber(v);
7391 cond = !JSDOUBLE_IS_NaN(d) && d;
7392 x = lir->ins2(LIR_and,
7393 lir->ins2(LIR_feq, v_ins, v_ins),
7394 lir->ins_eq0(lir->ins2(LIR_feq, v_ins, lir->insImmf(0))));
7395 } else if (JSVAL_IS_STRING(v)) {
7396 cond = JSVAL_TO_STRING(v)->length() != 0;
7397 x = lir->ins2(LIR_piand,
7398 lir->insLoad(LIR_ldp,
7399 v_ins,
7400 (int)offsetof(JSString, mLength)),
7401 INS_CONSTWORD(JSString::LENGTH_MASK));
7402 } else {
7403 JS_NOT_REACHED("ifop");
7404 return JSRS_STOP;
7407 jsbytecode* pc = cx->fp->regs->pc;
7408 emitIf(pc, cond, x);
7409 return checkTraceEnd(pc);
7412 #ifdef NANOJIT_IA32
7414 * Record LIR for a tableswitch or tableswitchx op. We record LIR only the
7415 * "first" time we hit the op. Later, when we start traces after exiting that
7416 * trace, we just patch.
7418 JS_REQUIRES_STACK JSRecordingStatus
7419 TraceRecorder::tableswitch()
7421 jsval& v = stackval(-1);
7423 /* No need to guard if the condition can't match any of the cases. */
7424 if (!isNumber(v))
7425 return JSRS_CONTINUE;
7427 /* No need to guard if the condition is constant. */
7428 LIns* v_ins = f2i(get(&v));
7429 if (v_ins->isconst() || v_ins->isconstq())
7430 return JSRS_CONTINUE;
7432 jsbytecode* pc = cx->fp->regs->pc;
7433 /* Starting a new trace after exiting a trace via switch. */
7434 if (anchor &&
7435 (anchor->exitType == CASE_EXIT || anchor->exitType == DEFAULT_EXIT) &&
7436 fragment->ip == pc) {
7437 return JSRS_CONTINUE;
7440 /* Decode jsop. */
7441 jsint low, high;
7442 if (*pc == JSOP_TABLESWITCH) {
7443 pc += JUMP_OFFSET_LEN;
7444 low = GET_JUMP_OFFSET(pc);
7445 pc += JUMP_OFFSET_LEN;
7446 high = GET_JUMP_OFFSET(pc);
7447 } else {
7448 pc += JUMPX_OFFSET_LEN;
7449 low = GET_JUMPX_OFFSET(pc);
7450 pc += JUMPX_OFFSET_LEN;
7451 high = GET_JUMPX_OFFSET(pc);
7455 * Really large tables won't fit in a page. This is a conservative check.
7456 * If it matters in practice we need to go off-page.
7458 if ((high + 1 - low) * sizeof(intptr_t*) + 128 > (unsigned) LARGEST_UNDERRUN_PROT)
7459 return switchop();
7461 /* Generate switch LIR. */
7462 LIns* si_ins = lir_buf_writer->insSkip(sizeof(SwitchInfo));
7463 SwitchInfo* si = (SwitchInfo*) si_ins->payload();
7464 si->count = high + 1 - low;
7465 si->table = 0;
7466 si->index = (uint32) -1;
7467 LIns* diff = lir->ins2(LIR_sub, v_ins, lir->insImm(low));
7468 LIns* cmp = lir->ins2(LIR_ult, diff, lir->insImm(si->count));
7469 lir->insGuard(LIR_xf, cmp, createGuardRecord(snapshot(DEFAULT_EXIT)));
7470 lir->insStorei(diff, lir->insImmPtr(&si->index), 0);
7471 VMSideExit* exit = snapshot(CASE_EXIT);
7472 exit->switchInfo = si;
7473 LIns* guardIns = lir->insGuard(LIR_xtbl, diff, createGuardRecord(exit));
7474 fragment->lastIns = guardIns;
7475 compile(&JS_TRACE_MONITOR(cx));
7476 return JSRS_STOP;
7478 #endif
7480 static JS_ALWAYS_INLINE int32_t
7481 UnboxBooleanOrUndefined(jsval v)
7483 /* Although this says 'special', we really only expect 3 special values: */
7484 JS_ASSERT(v == JSVAL_TRUE || v == JSVAL_FALSE || v == JSVAL_VOID);
7485 return JSVAL_TO_SPECIAL(v);
7488 JS_REQUIRES_STACK JSRecordingStatus
7489 TraceRecorder::switchop()
7491 jsval& v = stackval(-1);
7492 LIns* v_ins = get(&v);
7494 /* No need to guard if the condition is constant. */
7495 if (v_ins->isconst() || v_ins->isconstq())
7496 return JSRS_CONTINUE;
7497 if (isNumber(v)) {
7498 jsdouble d = asNumber(v);
7499 guard(true,
7500 addName(lir->ins2(LIR_feq, v_ins, lir->insImmf(d)),
7501 "guard(switch on numeric)"),
7502 BRANCH_EXIT);
7503 } else if (JSVAL_IS_STRING(v)) {
7504 LIns* args[] = { v_ins, INS_CONSTSTR(JSVAL_TO_STRING(v)) };
7505 guard(true,
7506 addName(lir->ins_eq0(lir->ins_eq0(lir->insCall(&js_EqualStrings_ci, args))),
7507 "guard(switch on string)"),
7508 BRANCH_EXIT);
7509 } else if (JSVAL_IS_SPECIAL(v)) {
7510 guard(true,
7511 addName(lir->ins2(LIR_eq, v_ins, lir->insImm(UnboxBooleanOrUndefined(v))),
7512 "guard(switch on boolean)"),
7513 BRANCH_EXIT);
7514 } else {
7515 ABORT_TRACE("switch on object or null");
7517 return JSRS_CONTINUE;
7520 JS_REQUIRES_STACK JSRecordingStatus
7521 TraceRecorder::inc(jsval& v, jsint incr, bool pre)
7523 LIns* v_ins = get(&v);
7524 CHECK_STATUS(inc(v, v_ins, incr, pre));
7525 set(&v, v_ins);
7526 return JSRS_CONTINUE;
7530 * On exit, v_ins is the incremented unboxed value, and the appropriate value
7531 * (pre- or post-increment as described by pre) is stacked.
7533 JS_REQUIRES_STACK JSRecordingStatus
7534 TraceRecorder::inc(jsval v, LIns*& v_ins, jsint incr, bool pre)
7536 LIns* v_after;
7537 CHECK_STATUS(incHelper(v, v_ins, v_after, incr));
7539 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
7540 JS_ASSERT(cs.ndefs == 1);
7541 stack(-cs.nuses, pre ? v_after : v_ins);
7542 v_ins = v_after;
7543 return JSRS_CONTINUE;
7547 * Do an increment operation without storing anything to the stack.
7549 JS_REQUIRES_STACK JSRecordingStatus
7550 TraceRecorder::incHelper(jsval v, LIns* v_ins, LIns*& v_after, jsint incr)
7552 if (!isNumber(v))
7553 ABORT_TRACE("can only inc numbers");
7554 v_after = alu(LIR_fadd, asNumber(v), incr, v_ins, lir->insImmf(incr));
7555 return JSRS_CONTINUE;
7558 JS_REQUIRES_STACK JSRecordingStatus
7559 TraceRecorder::incProp(jsint incr, bool pre)
7561 jsval& l = stackval(-1);
7562 if (JSVAL_IS_PRIMITIVE(l))
7563 ABORT_TRACE("incProp on primitive");
7565 JSObject* obj = JSVAL_TO_OBJECT(l);
7566 LIns* obj_ins = get(&l);
7568 uint32 slot;
7569 LIns* v_ins;
7570 CHECK_STATUS(prop(obj, obj_ins, &slot, &v_ins, NULL));
7572 if (slot == SPROP_INVALID_SLOT)
7573 ABORT_TRACE("incProp on invalid slot");
7575 jsval& v = STOBJ_GET_SLOT(obj, slot);
7576 CHECK_STATUS(inc(v, v_ins, incr, pre));
7578 LIns* dslots_ins = NULL;
7579 stobj_set_slot(obj_ins, slot, dslots_ins, box_jsval(v, v_ins));
7580 return JSRS_CONTINUE;
7583 JS_REQUIRES_STACK JSRecordingStatus
7584 TraceRecorder::incElem(jsint incr, bool pre)
7586 jsval& r = stackval(-1);
7587 jsval& l = stackval(-2);
7588 jsval* vp;
7589 LIns* v_ins;
7590 LIns* addr_ins;
7592 if (!JSVAL_IS_OBJECT(l) || !JSVAL_IS_INT(r) ||
7593 !guardDenseArray(JSVAL_TO_OBJECT(l), get(&l))) {
7594 return JSRS_STOP;
7597 CHECK_STATUS(denseArrayElement(l, r, vp, v_ins, addr_ins));
7598 if (!addr_ins) // if we read a hole, abort
7599 return JSRS_STOP;
7600 CHECK_STATUS(inc(*vp, v_ins, incr, pre));
7601 lir->insStorei(box_jsval(*vp, v_ins), addr_ins, 0);
7602 return JSRS_CONTINUE;
7605 static bool
7606 EvalCmp(LOpcode op, double l, double r)
7608 bool cond;
7609 switch (op) {
7610 case LIR_feq:
7611 cond = (l == r);
7612 break;
7613 case LIR_flt:
7614 cond = l < r;
7615 break;
7616 case LIR_fgt:
7617 cond = l > r;
7618 break;
7619 case LIR_fle:
7620 cond = l <= r;
7621 break;
7622 case LIR_fge:
7623 cond = l >= r;
7624 break;
7625 default:
7626 JS_NOT_REACHED("unexpected comparison op");
7627 return false;
7629 return cond;
7632 static bool
7633 EvalCmp(LOpcode op, JSString* l, JSString* r)
7635 if (op == LIR_feq)
7636 return js_EqualStrings(l, r);
7637 return EvalCmp(op, js_CompareStrings(l, r), 0);
7640 JS_REQUIRES_STACK void
7641 TraceRecorder::strictEquality(bool equal, bool cmpCase)
7643 jsval& r = stackval(-1);
7644 jsval& l = stackval(-2);
7645 LIns* l_ins = get(&l);
7646 LIns* r_ins = get(&r);
7647 LIns* x;
7648 bool cond;
7650 JSTraceType ltag = GetPromotedType(l);
7651 if (ltag != GetPromotedType(r)) {
7652 cond = !equal;
7653 x = lir->insImm(cond);
7654 } else if (ltag == TT_STRING) {
7655 LIns* args[] = { r_ins, l_ins };
7656 x = lir->ins2i(LIR_eq, lir->insCall(&js_EqualStrings_ci, args), equal);
7657 cond = js_EqualStrings(JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
7658 } else {
7659 LOpcode op = (ltag != TT_DOUBLE) ? LIR_eq : LIR_feq;
7660 x = lir->ins2(op, l_ins, r_ins);
7661 if (!equal)
7662 x = lir->ins_eq0(x);
7663 cond = (ltag == TT_DOUBLE)
7664 ? asNumber(l) == asNumber(r)
7665 : l == r;
7667 cond = (cond == equal);
7669 if (cmpCase) {
7670 /* Only guard if the same path may not always be taken. */
7671 if (!x->isconst())
7672 guard(cond, x, BRANCH_EXIT);
7673 return;
7676 set(&l, x);
7679 JS_REQUIRES_STACK JSRecordingStatus
7680 TraceRecorder::equality(bool negate, bool tryBranchAfterCond)
7682 jsval& rval = stackval(-1);
7683 jsval& lval = stackval(-2);
7684 LIns* l_ins = get(&lval);
7685 LIns* r_ins = get(&rval);
7687 return equalityHelper(lval, rval, l_ins, r_ins, negate, tryBranchAfterCond, lval);
7690 JS_REQUIRES_STACK JSRecordingStatus
7691 TraceRecorder::equalityHelper(jsval l, jsval r, LIns* l_ins, LIns* r_ins,
7692 bool negate, bool tryBranchAfterCond,
7693 jsval& rval)
7695 bool fp = false;
7696 bool cond;
7697 LIns* args[] = { NULL, NULL };
7700 * The if chain below closely mirrors that found in 11.9.3, in general
7701 * deviating from that ordering of ifs only to account for SpiderMonkey's
7702 * conflation of booleans and undefined and for the possibility of
7703 * confusing objects and null. Note carefully the spec-mandated recursion
7704 * in the final else clause, which terminates because Number == T recurs
7705 * only if T is Object, but that must recur again to convert Object to
7706 * primitive, and ToPrimitive throws if the object cannot be converted to
7707 * a primitive value (which would terminate recursion).
7710 if (GetPromotedType(l) == GetPromotedType(r)) {
7711 if (JSVAL_TAG(l) == JSVAL_OBJECT || JSVAL_IS_SPECIAL(l)) {
7712 cond = (l == r);
7713 } else if (JSVAL_IS_STRING(l)) {
7714 args[0] = r_ins, args[1] = l_ins;
7715 l_ins = lir->insCall(&js_EqualStrings_ci, args);
7716 r_ins = lir->insImm(1);
7717 cond = js_EqualStrings(JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
7718 } else {
7719 JS_ASSERT(isNumber(l) && isNumber(r));
7720 cond = (asNumber(l) == asNumber(r));
7721 fp = true;
7723 } else if (JSVAL_IS_NULL(l) && JSVAL_IS_SPECIAL(r)) {
7724 l_ins = lir->insImm(JSVAL_TO_SPECIAL(JSVAL_VOID));
7725 cond = (r == JSVAL_VOID);
7726 } else if (JSVAL_IS_SPECIAL(l) && JSVAL_IS_NULL(r)) {
7727 r_ins = lir->insImm(JSVAL_TO_SPECIAL(JSVAL_VOID));
7728 cond = (l == JSVAL_VOID);
7729 } else if (isNumber(l) && JSVAL_IS_STRING(r)) {
7730 args[0] = r_ins, args[1] = cx_ins;
7731 r_ins = lir->insCall(&js_StringToNumber_ci, args);
7732 cond = (asNumber(l) == js_StringToNumber(cx, JSVAL_TO_STRING(r)));
7733 fp = true;
7734 } else if (JSVAL_IS_STRING(l) && isNumber(r)) {
7735 args[0] = l_ins, args[1] = cx_ins;
7736 l_ins = lir->insCall(&js_StringToNumber_ci, args);
7737 cond = (js_StringToNumber(cx, JSVAL_TO_STRING(l)) == asNumber(r));
7738 fp = true;
7739 } else {
7740 if (JSVAL_IS_SPECIAL(l)) {
7741 bool isVoid = JSVAL_IS_VOID(l);
7742 guard(isVoid,
7743 lir->ins2(LIR_eq, l_ins, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID))),
7744 BRANCH_EXIT);
7745 if (!isVoid) {
7746 args[0] = l_ins, args[1] = cx_ins;
7747 l_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
7748 l = (l == JSVAL_VOID)
7749 ? DOUBLE_TO_JSVAL(cx->runtime->jsNaN)
7750 : INT_TO_JSVAL(l == JSVAL_TRUE);
7751 return equalityHelper(l, r, l_ins, r_ins, negate,
7752 tryBranchAfterCond, rval);
7754 } else if (JSVAL_IS_SPECIAL(r)) {
7755 bool isVoid = JSVAL_IS_VOID(r);
7756 guard(isVoid,
7757 lir->ins2(LIR_eq, r_ins, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID))),
7758 BRANCH_EXIT);
7759 if (!isVoid) {
7760 args[0] = r_ins, args[1] = cx_ins;
7761 r_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
7762 r = (r == JSVAL_VOID)
7763 ? DOUBLE_TO_JSVAL(cx->runtime->jsNaN)
7764 : INT_TO_JSVAL(r == JSVAL_TRUE);
7765 return equalityHelper(l, r, l_ins, r_ins, negate,
7766 tryBranchAfterCond, rval);
7768 } else {
7769 if ((JSVAL_IS_STRING(l) || isNumber(l)) && !JSVAL_IS_PRIMITIVE(r)) {
7770 ABORT_IF_XML(r);
7771 return call_imacro(equality_imacros.any_obj);
7773 if (!JSVAL_IS_PRIMITIVE(l) && (JSVAL_IS_STRING(r) || isNumber(r))) {
7774 ABORT_IF_XML(l);
7775 return call_imacro(equality_imacros.obj_any);
7779 l_ins = lir->insImm(0);
7780 r_ins = lir->insImm(1);
7781 cond = false;
7784 /* If the operands aren't numbers, compare them as integers. */
7785 LOpcode op = fp ? LIR_feq : LIR_eq;
7786 LIns* x = lir->ins2(op, l_ins, r_ins);
7787 if (negate) {
7788 x = lir->ins_eq0(x);
7789 cond = !cond;
7792 jsbytecode* pc = cx->fp->regs->pc;
7795 * Don't guard if the same path is always taken. If it isn't, we have to
7796 * fuse comparisons and the following branch, because the interpreter does
7797 * that.
7799 if (tryBranchAfterCond)
7800 fuseIf(pc + 1, cond, x);
7803 * There is no need to write out the result of this comparison if the trace
7804 * ends on this operation.
7806 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
7807 CHECK_STATUS(checkTraceEnd(pc + 1));
7810 * We update the stack after the guard. This is safe since the guard bails
7811 * out at the comparison and the interpreter will therefore re-execute the
7812 * comparison. This way the value of the condition doesn't have to be
7813 * calculated and saved on the stack in most cases.
7815 set(&rval, x);
7817 return JSRS_CONTINUE;
7820 JS_REQUIRES_STACK JSRecordingStatus
7821 TraceRecorder::relational(LOpcode op, bool tryBranchAfterCond)
7823 jsval& r = stackval(-1);
7824 jsval& l = stackval(-2);
7825 LIns* x = NULL;
7826 bool cond;
7827 LIns* l_ins = get(&l);
7828 LIns* r_ins = get(&r);
7829 bool fp = false;
7830 jsdouble lnum, rnum;
7833 * 11.8.5 if either argument is an object with a function-valued valueOf
7834 * property; if both arguments are objects with non-function-valued valueOf
7835 * properties, abort.
7837 if (!JSVAL_IS_PRIMITIVE(l)) {
7838 ABORT_IF_XML(l);
7839 if (!JSVAL_IS_PRIMITIVE(r)) {
7840 ABORT_IF_XML(r);
7841 return call_imacro(binary_imacros.obj_obj);
7843 return call_imacro(binary_imacros.obj_any);
7845 if (!JSVAL_IS_PRIMITIVE(r)) {
7846 ABORT_IF_XML(r);
7847 return call_imacro(binary_imacros.any_obj);
7850 /* 11.8.5 steps 3, 16-21. */
7851 if (JSVAL_IS_STRING(l) && JSVAL_IS_STRING(r)) {
7852 LIns* args[] = { r_ins, l_ins };
7853 l_ins = lir->insCall(&js_CompareStrings_ci, args);
7854 r_ins = lir->insImm(0);
7855 cond = EvalCmp(op, JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
7856 goto do_comparison;
7859 /* 11.8.5 steps 4-5. */
7860 if (!JSVAL_IS_NUMBER(l)) {
7861 LIns* args[] = { l_ins, cx_ins };
7862 switch (JSVAL_TAG(l)) {
7863 case JSVAL_SPECIAL:
7864 l_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
7865 break;
7866 case JSVAL_STRING:
7867 l_ins = lir->insCall(&js_StringToNumber_ci, args);
7868 break;
7869 case JSVAL_OBJECT:
7870 if (JSVAL_IS_NULL(l)) {
7871 l_ins = lir->insImmf(0.0);
7872 break;
7874 // FALL THROUGH
7875 case JSVAL_INT:
7876 case JSVAL_DOUBLE:
7877 default:
7878 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
7879 "have been handled at start of method");
7880 ABORT_TRACE("safety belt");
7883 if (!JSVAL_IS_NUMBER(r)) {
7884 LIns* args[] = { r_ins, cx_ins };
7885 switch (JSVAL_TAG(r)) {
7886 case JSVAL_SPECIAL:
7887 r_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
7888 break;
7889 case JSVAL_STRING:
7890 r_ins = lir->insCall(&js_StringToNumber_ci, args);
7891 break;
7892 case JSVAL_OBJECT:
7893 if (JSVAL_IS_NULL(r)) {
7894 r_ins = lir->insImmf(0.0);
7895 break;
7897 // FALL THROUGH
7898 case JSVAL_INT:
7899 case JSVAL_DOUBLE:
7900 default:
7901 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
7902 "have been handled at start of method");
7903 ABORT_TRACE("safety belt");
7907 jsval tmp = JSVAL_NULL;
7908 JSAutoTempValueRooter tvr(cx, 1, &tmp);
7910 tmp = l;
7911 lnum = js_ValueToNumber(cx, &tmp);
7912 tmp = r;
7913 rnum = js_ValueToNumber(cx, &tmp);
7915 cond = EvalCmp(op, lnum, rnum);
7916 fp = true;
7918 /* 11.8.5 steps 6-15. */
7919 do_comparison:
7921 * If the result is not a number or it's not a quad, we must use an integer
7922 * compare.
7924 if (!fp) {
7925 JS_ASSERT(op >= LIR_feq && op <= LIR_fge);
7926 op = LOpcode(op + (LIR_eq - LIR_feq));
7928 x = lir->ins2(op, l_ins, r_ins);
7930 jsbytecode* pc = cx->fp->regs->pc;
7933 * Don't guard if the same path is always taken. If it isn't, we have to
7934 * fuse comparisons and the following branch, because the interpreter does
7935 * that.
7937 if (tryBranchAfterCond)
7938 fuseIf(pc + 1, cond, x);
7941 * There is no need to write out the result of this comparison if the trace
7942 * ends on this operation.
7944 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
7945 CHECK_STATUS(checkTraceEnd(pc + 1));
7948 * We update the stack after the guard. This is safe since the guard bails
7949 * out at the comparison and the interpreter will therefore re-execute the
7950 * comparison. This way the value of the condition doesn't have to be
7951 * calculated and saved on the stack in most cases.
7953 set(&l, x);
7955 return JSRS_CONTINUE;
7958 JS_REQUIRES_STACK JSRecordingStatus
7959 TraceRecorder::unary(LOpcode op)
7961 jsval& v = stackval(-1);
7962 bool intop = !(op & LIR64);
7963 if (isNumber(v)) {
7964 LIns* a = get(&v);
7965 if (intop)
7966 a = f2i(a);
7967 a = lir->ins1(op, a);
7968 if (intop)
7969 a = lir->ins1(LIR_i2f, a);
7970 set(&v, a);
7971 return JSRS_CONTINUE;
7973 return JSRS_STOP;
7976 JS_REQUIRES_STACK JSRecordingStatus
7977 TraceRecorder::binary(LOpcode op)
7979 jsval& r = stackval(-1);
7980 jsval& l = stackval(-2);
7982 if (!JSVAL_IS_PRIMITIVE(l)) {
7983 ABORT_IF_XML(l);
7984 if (!JSVAL_IS_PRIMITIVE(r)) {
7985 ABORT_IF_XML(r);
7986 return call_imacro(binary_imacros.obj_obj);
7988 return call_imacro(binary_imacros.obj_any);
7990 if (!JSVAL_IS_PRIMITIVE(r)) {
7991 ABORT_IF_XML(r);
7992 return call_imacro(binary_imacros.any_obj);
7995 bool intop = !(op & LIR64);
7996 LIns* a = get(&l);
7997 LIns* b = get(&r);
7999 bool leftIsNumber = isNumber(l);
8000 jsdouble lnum = leftIsNumber ? asNumber(l) : 0;
8002 bool rightIsNumber = isNumber(r);
8003 jsdouble rnum = rightIsNumber ? asNumber(r) : 0;
8005 if ((op >= LIR_sub && op <= LIR_ush) || // sub, mul, (callh), or, xor, (not,) lsh, rsh, ush
8006 (op >= LIR_fsub && op <= LIR_fmod)) { // fsub, fmul, fdiv, fmod
8007 LIns* args[2];
8008 if (JSVAL_IS_STRING(l)) {
8009 args[0] = a;
8010 args[1] = cx_ins;
8011 a = lir->insCall(&js_StringToNumber_ci, args);
8012 lnum = js_StringToNumber(cx, JSVAL_TO_STRING(l));
8013 leftIsNumber = true;
8015 if (JSVAL_IS_STRING(r)) {
8016 args[0] = b;
8017 args[1] = cx_ins;
8018 b = lir->insCall(&js_StringToNumber_ci, args);
8019 rnum = js_StringToNumber(cx, JSVAL_TO_STRING(r));
8020 rightIsNumber = true;
8023 if (JSVAL_IS_SPECIAL(l)) {
8024 LIns* args[] = { a, cx_ins };
8025 a = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
8026 lnum = js_BooleanOrUndefinedToNumber(cx, JSVAL_TO_SPECIAL(l));
8027 leftIsNumber = true;
8029 if (JSVAL_IS_SPECIAL(r)) {
8030 LIns* args[] = { b, cx_ins };
8031 b = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
8032 rnum = js_BooleanOrUndefinedToNumber(cx, JSVAL_TO_SPECIAL(r));
8033 rightIsNumber = true;
8035 if (leftIsNumber && rightIsNumber) {
8036 if (intop) {
8037 LIns *args[] = { a };
8038 a = lir->insCall(op == LIR_ush ? &js_DoubleToUint32_ci : &js_DoubleToInt32_ci, args);
8039 b = f2i(b);
8041 a = alu(op, lnum, rnum, a, b);
8042 if (intop)
8043 a = lir->ins1(op == LIR_ush ? LIR_u2f : LIR_i2f, a);
8044 set(&l, a);
8045 return JSRS_CONTINUE;
8047 return JSRS_STOP;
8050 JS_STATIC_ASSERT(offsetof(JSObjectOps, objectMap) == 0);
8052 inline LIns*
8053 TraceRecorder::map(LIns *obj_ins)
8055 return addName(lir->insLoad(LIR_ldp, obj_ins, (int) offsetof(JSObject, map)), "map");
8058 bool
8059 TraceRecorder::map_is_native(JSObjectMap* map, LIns* map_ins, LIns*& ops_ins, size_t op_offset)
8061 JS_ASSERT(op_offset < sizeof(JSObjectOps));
8062 JS_ASSERT(op_offset % sizeof(void *) == 0);
8064 #define OP(ops) (*(void **) ((uint8 *) (ops) + op_offset))
8065 void* ptr = OP(map->ops);
8066 if (ptr != OP(&js_ObjectOps))
8067 return false;
8068 #undef OP
8070 ops_ins = addName(lir->insLoad(LIR_ldp, map_ins, int(offsetof(JSObjectMap, ops))), "ops");
8071 LIns* n = lir->insLoad(LIR_ldp, ops_ins, op_offset);
8072 guard(true,
8073 addName(lir->ins2(LIR_eq, n, INS_CONSTPTR(ptr)), "guard(native-map)"),
8074 BRANCH_EXIT);
8076 return true;
8079 JS_REQUIRES_STACK JSRecordingStatus
8080 TraceRecorder::guardNativePropertyOp(JSObject* aobj, LIns* map_ins)
8083 * Interpreter calls to PROPERTY_CACHE_TEST guard on native object ops
8084 * which is required to use native objects (those whose maps are scopes),
8085 * or even more narrow conditions required because the cache miss case
8086 * will call a particular object-op (js_GetProperty, js_SetProperty).
8088 * We parameterize using offsetof and guard on match against the hook at
8089 * the given offset in js_ObjectOps. TraceRecorder::record_JSOP_SETPROP
8090 * guards the js_SetProperty case.
8092 uint32 format = js_CodeSpec[*cx->fp->regs->pc].format;
8093 uint32 mode = JOF_MODE(format);
8095 // No need to guard native-ness of global object.
8096 JS_ASSERT(OBJ_IS_NATIVE(globalObj));
8097 if (aobj != globalObj) {
8098 size_t op_offset = offsetof(JSObjectOps, objectMap);
8099 if (mode == JOF_PROP || mode == JOF_VARPROP) {
8100 op_offset = (format & JOF_SET)
8101 ? offsetof(JSObjectOps, setProperty)
8102 : offsetof(JSObjectOps, getProperty);
8103 } else {
8104 JS_ASSERT(mode == JOF_NAME);
8107 LIns* ops_ins;
8108 if (!map_is_native(aobj->map, map_ins, ops_ins, op_offset))
8109 ABORT_TRACE("non-native map");
8111 return JSRS_CONTINUE;
8114 JS_REQUIRES_STACK JSRecordingStatus
8115 TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, jsuword& pcval)
8117 jsbytecode* pc = cx->fp->regs->pc;
8118 JS_ASSERT(*pc != JSOP_INITPROP && *pc != JSOP_INITMETHOD &&
8119 *pc != JSOP_SETNAME && *pc != JSOP_SETPROP && *pc != JSOP_SETMETHOD);
8121 // Mimic the interpreter's special case for dense arrays by skipping up one
8122 // hop along the proto chain when accessing a named (not indexed) property,
8123 // typically to find Array.prototype methods.
8124 JSObject* aobj = obj;
8125 if (OBJ_IS_DENSE_ARRAY(cx, obj)) {
8126 guardDenseArray(obj, obj_ins, BRANCH_EXIT);
8127 aobj = OBJ_GET_PROTO(cx, obj);
8128 obj_ins = stobj_get_proto(obj_ins);
8131 LIns* map_ins = map(obj_ins);
8133 CHECK_STATUS(guardNativePropertyOp(aobj, map_ins));
8135 JSAtom* atom;
8136 JSPropCacheEntry* entry;
8137 PROPERTY_CACHE_TEST(cx, pc, aobj, obj2, entry, atom);
8138 if (!atom) {
8139 // Null atom means that obj2 is locked and must now be unlocked.
8140 JS_UNLOCK_OBJ(cx, obj2);
8141 } else {
8142 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
8143 jsid id = ATOM_TO_JSID(atom);
8144 JSProperty* prop;
8145 if (JOF_OPMODE(*pc) == JOF_NAME) {
8146 JS_ASSERT(aobj == obj);
8147 entry = js_FindPropertyHelper(cx, id, true, &obj, &obj2, &prop);
8149 if (!entry)
8150 ABORT_TRACE_ERROR("error in js_FindPropertyHelper");
8151 if (entry == JS_NO_PROP_CACHE_FILL)
8152 ABORT_TRACE("cannot cache name");
8153 } else {
8154 int protoIndex = js_LookupPropertyWithFlags(cx, aobj, id,
8155 cx->resolveFlags,
8156 &obj2, &prop);
8158 if (protoIndex < 0)
8159 ABORT_TRACE_ERROR("error in js_LookupPropertyWithFlags");
8161 if (prop) {
8162 if (!OBJ_IS_NATIVE(obj2)) {
8163 obj2->dropProperty(cx, prop);
8164 ABORT_TRACE("property found on non-native object");
8166 entry = js_FillPropertyCache(cx, aobj, 0, protoIndex, obj2,
8167 (JSScopeProperty*) prop, false);
8168 JS_ASSERT(entry);
8169 if (entry == JS_NO_PROP_CACHE_FILL)
8170 entry = NULL;
8174 if (!prop) {
8175 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
8176 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
8177 // the global it's assigning does not yet exist, create it.
8178 obj2 = obj;
8180 // Use PCVAL_NULL to return "no such property" to our caller.
8181 pcval = PCVAL_NULL;
8182 return JSRS_CONTINUE;
8185 obj2->dropProperty(cx, prop);
8186 if (!entry)
8187 ABORT_TRACE("failed to fill property cache");
8190 if (wasDeepAborted())
8191 ABORT_TRACE("deep abort from property lookup");
8193 #ifdef JS_THREADSAFE
8194 // There's a potential race in any JS_THREADSAFE embedding that's nuts
8195 // enough to share mutable objects on the scope or proto chain, but we
8196 // don't care about such insane embeddings. Anyway, the (scope, proto)
8197 // entry->vcap coordinates must reach obj2 from aobj at this point.
8198 JS_ASSERT(cx->requestDepth);
8199 #endif
8201 return guardPropertyCacheHit(obj_ins, map_ins, aobj, obj2, entry, pcval);
8204 JS_REQUIRES_STACK JSRecordingStatus
8205 TraceRecorder::guardPropertyCacheHit(LIns* obj_ins,
8206 LIns* map_ins,
8207 JSObject* aobj,
8208 JSObject* obj2,
8209 JSPropCacheEntry* entry,
8210 jsuword& pcval)
8212 uint32 vshape = PCVCAP_SHAPE(entry->vcap);
8214 // Check for first-level cache hit and guard on kshape if possible.
8215 // Otherwise guard on key object exact match.
8216 if (PCVCAP_TAG(entry->vcap) <= 1) {
8217 if (aobj != globalObj) {
8218 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
8219 "shape");
8220 guard(true,
8221 addName(lir->ins2i(LIR_eq, shape_ins, entry->kshape), "guard_kshape"),
8222 BRANCH_EXIT);
8225 if (entry->adding()) {
8226 if (aobj == globalObj)
8227 ABORT_TRACE("adding a property to the global object");
8229 LIns *vshape_ins = addName(
8230 lir->insLoad(LIR_ld,
8231 addName(lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, runtime)),
8232 "runtime"),
8233 offsetof(JSRuntime, protoHazardShape)),
8234 "protoHazardShape");
8235 guard(true,
8236 addName(lir->ins2i(LIR_eq, vshape_ins, vshape), "guard_protoHazardShape"),
8237 MISMATCH_EXIT);
8239 } else {
8240 #ifdef DEBUG
8241 JSOp op = js_GetOpcode(cx, cx->fp->script, cx->fp->regs->pc);
8242 JSAtom *pcatom;
8243 if (op == JSOP_LENGTH) {
8244 pcatom = cx->runtime->atomState.lengthAtom;
8245 } else {
8246 ptrdiff_t pcoff = (JOF_TYPE(js_CodeSpec[op].format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
8247 GET_ATOM_FROM_BYTECODE(cx->fp->script, cx->fp->regs->pc, pcoff, pcatom);
8249 JS_ASSERT(entry->kpc == (jsbytecode *) pcatom);
8250 JS_ASSERT(entry->kshape == jsuword(aobj));
8251 #endif
8252 if (aobj != globalObj && !obj_ins->isconstp()) {
8253 guard(true,
8254 addName(lir->ins2i(LIR_eq, obj_ins, entry->kshape), "guard_kobj"),
8255 BRANCH_EXIT);
8259 // For any hit that goes up the scope and/or proto chains, we will need to
8260 // guard on the shape of the object containing the property.
8261 if (PCVCAP_TAG(entry->vcap) >= 1) {
8262 JS_ASSERT(OBJ_SHAPE(obj2) == vshape);
8264 LIns* obj2_ins;
8265 if (PCVCAP_TAG(entry->vcap) == 1) {
8266 // Duplicate the special case in PROPERTY_CACHE_TEST.
8267 obj2_ins = addName(stobj_get_proto(obj_ins), "proto");
8268 guard(false, lir->ins_eq0(obj2_ins), BRANCH_EXIT);
8269 } else {
8270 obj2_ins = INS_CONSTOBJ(obj2);
8272 map_ins = map(obj2_ins);
8273 LIns* ops_ins;
8274 if (!map_is_native(obj2->map, map_ins, ops_ins))
8275 ABORT_TRACE("non-native map");
8277 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
8278 "obj2_shape");
8279 guard(true,
8280 addName(lir->ins2i(LIR_eq, shape_ins, vshape), "guard_vshape"),
8281 BRANCH_EXIT);
8284 pcval = entry->vword;
8285 return JSRS_CONTINUE;
8288 void
8289 TraceRecorder::stobj_set_fslot(LIns *obj_ins, unsigned slot, LIns* v_ins)
8291 lir->insStorei(v_ins, obj_ins, offsetof(JSObject, fslots) + slot * sizeof(jsval));
8294 void
8295 TraceRecorder::stobj_set_dslot(LIns *obj_ins, unsigned slot, LIns*& dslots_ins, LIns* v_ins)
8297 if (!dslots_ins)
8298 dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
8299 lir->insStorei(v_ins, dslots_ins, slot * sizeof(jsval));
8302 void
8303 TraceRecorder::stobj_set_slot(LIns* obj_ins, unsigned slot, LIns*& dslots_ins, LIns* v_ins)
8305 if (slot < JS_INITIAL_NSLOTS) {
8306 stobj_set_fslot(obj_ins, slot, v_ins);
8307 } else {
8308 stobj_set_dslot(obj_ins, slot - JS_INITIAL_NSLOTS, dslots_ins, v_ins);
8312 LIns*
8313 TraceRecorder::stobj_get_fslot(LIns* obj_ins, unsigned slot)
8315 JS_ASSERT(slot < JS_INITIAL_NSLOTS);
8316 return lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, fslots) + slot * sizeof(jsval));
8319 LIns*
8320 TraceRecorder::stobj_get_dslot(LIns* obj_ins, unsigned index, LIns*& dslots_ins)
8322 if (!dslots_ins)
8323 dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
8324 return lir->insLoad(LIR_ldp, dslots_ins, index * sizeof(jsval));
8327 LIns*
8328 TraceRecorder::stobj_get_slot(LIns* obj_ins, unsigned slot, LIns*& dslots_ins)
8330 if (slot < JS_INITIAL_NSLOTS)
8331 return stobj_get_fslot(obj_ins, slot);
8332 return stobj_get_dslot(obj_ins, slot - JS_INITIAL_NSLOTS, dslots_ins);
8335 JS_REQUIRES_STACK LIns*
8336 TraceRecorder::box_jsval(jsval v, LIns* v_ins)
8338 if (isNumber(v)) {
8339 LIns* args[] = { v_ins, cx_ins };
8340 v_ins = lir->insCall(&js_BoxDouble_ci, args);
8341 guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)),
8342 OOM_EXIT);
8343 return v_ins;
8345 switch (JSVAL_TAG(v)) {
8346 case JSVAL_SPECIAL:
8347 return lir->ins2i(LIR_pior, lir->ins2i(LIR_pilsh, v_ins, JSVAL_TAGBITS), JSVAL_SPECIAL);
8348 case JSVAL_OBJECT:
8349 return v_ins;
8350 default:
8351 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING);
8352 return lir->ins2(LIR_pior, v_ins, INS_CONST(JSVAL_STRING));
8356 JS_REQUIRES_STACK LIns*
8357 TraceRecorder::unbox_jsval(jsval v, LIns* v_ins, VMSideExit* exit)
8359 if (isNumber(v)) {
8360 // JSVAL_IS_NUMBER(v)
8361 guard(false,
8362 lir->ins_eq0(lir->ins2(LIR_pior,
8363 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_INT)),
8364 lir->ins2i(LIR_eq,
8365 lir->ins2(LIR_piand, v_ins,
8366 INS_CONST(JSVAL_TAGMASK)),
8367 JSVAL_DOUBLE))),
8368 exit);
8369 LIns* args[] = { v_ins };
8370 return lir->insCall(&js_UnboxDouble_ci, args);
8372 switch (JSVAL_TAG(v)) {
8373 case JSVAL_SPECIAL:
8374 guard(true,
8375 lir->ins2i(LIR_eq,
8376 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
8377 JSVAL_SPECIAL),
8378 exit);
8379 return lir->ins2i(LIR_ush, v_ins, JSVAL_TAGBITS);
8381 case JSVAL_OBJECT:
8382 if (JSVAL_IS_NULL(v)) {
8383 // JSVAL_NULL maps to type TT_NULL, so insist that v_ins == 0 here.
8384 guard(true, lir->ins_eq0(v_ins), exit);
8385 } else {
8386 guard(false, lir->ins_eq0(v_ins), exit);
8387 guard(true,
8388 lir->ins2i(LIR_eq,
8389 lir->ins2(LIR_piand, v_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
8390 JSVAL_OBJECT),
8391 exit);
8392 guard(HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)),
8393 lir->ins2(LIR_eq,
8394 lir->ins2(LIR_piand,
8395 lir->insLoad(LIR_ldp, v_ins, offsetof(JSObject, classword)),
8396 INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS)),
8397 INS_CONSTPTR(&js_FunctionClass)),
8398 exit);
8400 return v_ins;
8402 default:
8403 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING);
8404 guard(true,
8405 lir->ins2i(LIR_eq,
8406 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
8407 JSVAL_STRING),
8408 exit);
8409 return lir->ins2(LIR_piand, v_ins, INS_CONST(~JSVAL_TAGMASK));
8413 JS_REQUIRES_STACK JSRecordingStatus
8414 TraceRecorder::getThis(LIns*& this_ins)
8417 * js_ComputeThisForFrame updates cx->fp->argv[-1], so sample it into 'original' first.
8419 jsval original = JSVAL_NULL;
8420 if (cx->fp->argv) {
8421 original = cx->fp->argv[-1];
8422 if (!JSVAL_IS_PRIMITIVE(original) &&
8423 guardClass(JSVAL_TO_OBJECT(original), get(&cx->fp->argv[-1]), &js_WithClass, snapshot(MISMATCH_EXIT))) {
8424 ABORT_TRACE("can't trace getThis on With object");
8428 JSObject* thisObj = js_ComputeThisForFrame(cx, cx->fp);
8429 if (!thisObj)
8430 ABORT_TRACE_ERROR("js_ComputeThisForName failed");
8432 /* In global code, bake in the global object as 'this' object. */
8433 if (!cx->fp->callee()) {
8434 JS_ASSERT(callDepth == 0);
8435 this_ins = INS_CONSTOBJ(thisObj);
8438 * We don't have argv[-1] in global code, so we don't update the
8439 * tracker here.
8441 return JSRS_CONTINUE;
8444 jsval& thisv = cx->fp->argv[-1];
8445 JS_ASSERT(JSVAL_IS_OBJECT(thisv));
8448 * Traces type-specialize between null and objects, so if we currently see
8449 * a null value in argv[-1], this trace will only match if we see null at
8450 * runtime as well. Bake in the global object as 'this' object, updating
8451 * the tracker as well. We can only detect this condition prior to calling
8452 * js_ComputeThisForFrame, since it updates the interpreter's copy of
8453 * argv[-1].
8455 JSClass* clasp = NULL;;
8456 if (JSVAL_IS_NULL(original) ||
8457 (((clasp = STOBJ_GET_CLASS(JSVAL_TO_OBJECT(original))) == &js_CallClass) ||
8458 (clasp == &js_BlockClass))) {
8459 if (clasp)
8460 guardClass(JSVAL_TO_OBJECT(original), get(&thisv), clasp, snapshot(BRANCH_EXIT));
8461 JS_ASSERT(!JSVAL_IS_PRIMITIVE(thisv));
8462 if (thisObj != globalObj)
8463 ABORT_TRACE("global object was wrapped while recording");
8464 this_ins = INS_CONSTOBJ(thisObj);
8465 set(&thisv, this_ins);
8466 return JSRS_CONTINUE;
8468 this_ins = get(&thisv);
8471 * The only unwrapped object that needs to be wrapped that we can get here
8472 * is the global object obtained throught the scope chain.
8474 JSObject* obj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(thisv));
8475 JSObject* inner = obj;
8476 OBJ_TO_INNER_OBJECT(cx, inner);
8477 if (!obj)
8478 return JSRS_ERROR;
8480 JS_ASSERT(original == thisv ||
8481 original == OBJECT_TO_JSVAL(inner) ||
8482 original == OBJECT_TO_JSVAL(obj));
8485 * If the returned this object is the unwrapped inner or outer object,
8486 * then we need to use the wrapped outer object.
8488 LIns* is_inner = lir->ins2(LIR_eq, this_ins, INS_CONSTOBJ(inner));
8489 LIns* is_outer = lir->ins2(LIR_eq, this_ins, INS_CONSTOBJ(obj));
8490 LIns* wrapper = INS_CONSTOBJ(JSVAL_TO_OBJECT(thisv));
8492 this_ins = lir->ins_choose(is_inner,
8493 wrapper,
8494 lir->ins_choose(is_outer,
8495 wrapper,
8496 this_ins));
8498 return JSRS_CONTINUE;
8502 LIns*
8503 TraceRecorder::getStringLength(LIns* str_ins)
8505 LIns* len_ins = lir->insLoad(LIR_ldp, str_ins, (int)offsetof(JSString, mLength));
8507 LIns* masked_len_ins = lir->ins2(LIR_piand,
8508 len_ins,
8509 INS_CONSTWORD(JSString::LENGTH_MASK));
8511 return
8512 lir->ins_choose(lir->ins_eq0(lir->ins2(LIR_piand,
8513 len_ins,
8514 INS_CONSTWORD(JSString::DEPENDENT))),
8515 masked_len_ins,
8516 lir->ins_choose(lir->ins_eq0(lir->ins2(LIR_piand,
8517 len_ins,
8518 INS_CONSTWORD(JSString::PREFIX))),
8519 lir->ins2(LIR_piand,
8520 len_ins,
8521 INS_CONSTWORD(JSString::DEPENDENT_LENGTH_MASK)),
8522 masked_len_ins));
8525 JS_REQUIRES_STACK bool
8526 TraceRecorder::guardClass(JSObject* obj, LIns* obj_ins, JSClass* clasp, VMSideExit* exit)
8528 bool cond = STOBJ_GET_CLASS(obj) == clasp;
8530 LIns* class_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, classword));
8531 class_ins = lir->ins2(LIR_piand, class_ins, lir->insImm(~JSSLOT_CLASS_MASK_BITS));
8533 char namebuf[32];
8534 JS_snprintf(namebuf, sizeof namebuf, "guard(class is %s)", clasp->name);
8535 guard(cond, addName(lir->ins2(LIR_eq, class_ins, INS_CONSTPTR(clasp)), namebuf), exit);
8536 return cond;
8539 JS_REQUIRES_STACK bool
8540 TraceRecorder::guardDenseArray(JSObject* obj, LIns* obj_ins, ExitType exitType)
8542 return guardClass(obj, obj_ins, &js_ArrayClass, snapshot(exitType));
8545 JS_REQUIRES_STACK bool
8546 TraceRecorder::guardHasPrototype(JSObject* obj, LIns* obj_ins,
8547 JSObject** pobj, LIns** pobj_ins,
8548 VMSideExit* exit)
8550 *pobj = obj->getProto();
8551 *pobj_ins = stobj_get_proto(obj_ins);
8553 bool cond = *pobj == NULL;
8554 guard(cond, addName(lir->ins_eq0(*pobj_ins), "guard(proto-not-null)"), exit);
8555 return !cond;
8558 JS_REQUIRES_STACK JSRecordingStatus
8559 TraceRecorder::guardPrototypeHasNoIndexedProperties(JSObject* obj, LIns* obj_ins, ExitType exitType)
8562 * Guard that no object along the prototype chain has any indexed
8563 * properties which might become visible through holes in the array.
8565 VMSideExit* exit = snapshot(exitType);
8567 if (js_PrototypeHasIndexedProperties(cx, obj))
8568 return JSRS_STOP;
8570 while (guardHasPrototype(obj, obj_ins, &obj, &obj_ins, exit)) {
8571 LIns* map_ins = map(obj_ins);
8572 LIns* ops_ins;
8573 if (!map_is_native(obj->map, map_ins, ops_ins))
8574 ABORT_TRACE("non-native object involved along prototype chain");
8576 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
8577 "shape");
8578 guard(true,
8579 addName(lir->ins2i(LIR_eq, shape_ins, OBJ_SHAPE(obj)), "guard(shape)"),
8580 exit);
8582 return JSRS_CONTINUE;
8585 JSRecordingStatus
8586 TraceRecorder::guardNotGlobalObject(JSObject* obj, LIns* obj_ins)
8588 if (obj == globalObj)
8589 ABORT_TRACE("reference aliases global object");
8590 guard(false, lir->ins2(LIR_eq, obj_ins, INS_CONSTOBJ(globalObj)), MISMATCH_EXIT);
8591 return JSRS_CONTINUE;
8594 JS_REQUIRES_STACK void
8595 TraceRecorder::clearFrameSlotsFromCache()
8598 * Clear out all slots of this frame in the nativeFrameTracker. Different
8599 * locations on the VM stack might map to different locations on the native
8600 * stack depending on the number of arguments (i.e.) of the next call, so
8601 * we have to make sure we map those in to the cache with the right
8602 * offsets.
8604 JSStackFrame* fp = cx->fp;
8605 jsval* vp;
8606 jsval* vpstop;
8609 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
8610 * This doesn't do layout arithmetic, but it must clear out all the slots defined as
8611 * imported by VisitFrameSlots.
8613 if (fp->argv) {
8614 vp = &fp->argv[-2];
8615 vpstop = &fp->argv[argSlots(fp)];
8616 while (vp < vpstop)
8617 nativeFrameTracker.set(vp++, (LIns*)0);
8618 nativeFrameTracker.set(&fp->argsobj, (LIns*)0);
8620 vp = &fp->slots[0];
8621 vpstop = &fp->slots[fp->script->nslots];
8622 while (vp < vpstop)
8623 nativeFrameTracker.set(vp++, (LIns*)0);
8627 * If we have created an |arguments| object for the frame, we must copy the
8628 * argument values into the object as properties in case it is used after
8629 * this frame returns.
8631 JS_REQUIRES_STACK void
8632 TraceRecorder::putArguments()
8634 if (cx->fp->argsobj && cx->fp->argc) {
8635 LIns* argsobj_ins = get(&cx->fp->argsobj);
8636 LIns* args_ins = lir->insAlloc(sizeof(jsval) * cx->fp->argc);
8637 for (uintN i = 0; i < cx->fp->argc; ++i) {
8638 LIns* arg_ins = box_jsval(cx->fp->argv[i], get(&cx->fp->argv[i]));
8639 lir->insStorei(arg_ins, args_ins, i * sizeof(jsval));
8641 LIns* args[] = { args_ins, argsobj_ins, cx_ins };
8642 lir->insCall(&js_PutArguments_ci, args);
8646 JS_REQUIRES_STACK JSRecordingStatus
8647 TraceRecorder::record_EnterFrame()
8649 JSStackFrame* fp = cx->fp;
8651 if (++callDepth >= MAX_CALLDEPTH)
8652 ABORT_TRACE("exceeded maximum call depth");
8654 // FIXME: Allow and attempt to inline a single level of recursion until we compile
8655 // recursive calls as independent trees (459301).
8656 if (fp->script == fp->down->script && fp->down->down && fp->down->down->script == fp->script)
8657 ABORT_TRACE("recursive call");
8659 debug_only_printf(LC_TMTracer, "EnterFrame %s, callDepth=%d\n",
8660 js_AtomToPrintableString(cx, cx->fp->fun->atom),
8661 callDepth);
8662 debug_only_stmt(
8663 if (js_LogController.lcbits & LC_TMRecorder) {
8664 js_Disassemble(cx, cx->fp->script, JS_TRUE, stdout);
8665 debug_only_print0(LC_TMTracer, "----\n");
8668 LIns* void_ins = INS_VOID();
8670 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
8671 // This doesn't do layout arithmetic, but it must initialize in the tracker all the
8672 // slots defined as imported by VisitFrameSlots.
8673 jsval* vp = &fp->argv[fp->argc];
8674 jsval* vpstop = vp + ptrdiff_t(fp->fun->nargs) - ptrdiff_t(fp->argc);
8675 while (vp < vpstop) {
8676 if (vp >= fp->down->regs->sp)
8677 nativeFrameTracker.set(vp, (LIns*)0);
8678 set(vp++, void_ins, true);
8681 vp = &fp->slots[0];
8682 vpstop = vp + fp->script->nfixed;
8683 while (vp < vpstop)
8684 set(vp++, void_ins, true);
8685 set(&fp->argsobj, INS_NULL(), true);
8686 return JSRS_CONTINUE;
8689 JS_REQUIRES_STACK JSRecordingStatus
8690 TraceRecorder::record_LeaveFrame()
8692 debug_only_stmt(
8693 if (cx->fp->fun)
8694 debug_only_printf(LC_TMTracer,
8695 "LeaveFrame (back to %s), callDepth=%d\n",
8696 js_AtomToPrintableString(cx, cx->fp->fun->atom),
8697 callDepth);
8699 if (callDepth-- <= 0)
8700 ABORT_TRACE("returned out of a loop we started tracing");
8702 // LeaveFrame gets called after the interpreter popped the frame and
8703 // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
8704 atoms = FrameAtomBase(cx, cx->fp);
8705 set(&stackval(-1), rval_ins, true);
8706 return JSRS_CONTINUE;
8709 JS_REQUIRES_STACK JSRecordingStatus
8710 TraceRecorder::record_JSOP_PUSH()
8712 stack(0, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID)));
8713 return JSRS_CONTINUE;
8716 JS_REQUIRES_STACK JSRecordingStatus
8717 TraceRecorder::record_JSOP_POPV()
8719 jsval& rval = stackval(-1);
8720 LIns *rval_ins = box_jsval(rval, get(&rval));
8722 // Store it in cx->fp->rval. NB: Tricky dependencies. cx->fp is the right
8723 // frame because POPV appears only in global and eval code and we don't
8724 // trace JSOP_EVAL or leaving the frame where tracing started.
8725 LIns *fp_ins = lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp));
8726 lir->insStorei(rval_ins, fp_ins, offsetof(JSStackFrame, rval));
8727 return JSRS_CONTINUE;
8730 JS_REQUIRES_STACK JSRecordingStatus
8731 TraceRecorder::record_JSOP_ENTERWITH()
8733 return JSRS_STOP;
8736 JS_REQUIRES_STACK JSRecordingStatus
8737 TraceRecorder::record_JSOP_LEAVEWITH()
8739 return JSRS_STOP;
8742 JS_REQUIRES_STACK JSRecordingStatus
8743 TraceRecorder::record_JSOP_RETURN()
8745 /* A return from callDepth 0 terminates the current loop. */
8746 if (callDepth == 0) {
8747 AUDIT(returnLoopExits);
8748 endLoop();
8749 return JSRS_STOP;
8752 putArguments();
8754 /* If we inlined this function call, make the return value available to the caller code. */
8755 jsval& rval = stackval(-1);
8756 JSStackFrame *fp = cx->fp;
8757 if ((cx->fp->flags & JSFRAME_CONSTRUCTING) && JSVAL_IS_PRIMITIVE(rval)) {
8758 JS_ASSERT(OBJECT_TO_JSVAL(fp->thisp) == fp->argv[-1]);
8759 rval_ins = get(&fp->argv[-1]);
8760 } else {
8761 rval_ins = get(&rval);
8763 debug_only_printf(LC_TMTracer,
8764 "returning from %s\n",
8765 js_AtomToPrintableString(cx, cx->fp->fun->atom));
8766 clearFrameSlotsFromCache();
8768 return JSRS_CONTINUE;
8771 JS_REQUIRES_STACK JSRecordingStatus
8772 TraceRecorder::record_JSOP_GOTO()
8775 * If we hit a break or a continue to an outer loop, end the loop and
8776 * generate an always-taken loop exit guard. For other downward gotos
8777 * (like if/else) continue recording.
8779 jssrcnote* sn = js_GetSrcNote(cx->fp->script, cx->fp->regs->pc);
8781 if (sn && (SN_TYPE(sn) == SRC_BREAK || SN_TYPE(sn) == SRC_CONT2LABEL)) {
8782 AUDIT(breakLoopExits);
8783 endLoop();
8784 return JSRS_STOP;
8786 return JSRS_CONTINUE;
8789 JS_REQUIRES_STACK JSRecordingStatus
8790 TraceRecorder::record_JSOP_IFEQ()
8792 trackCfgMerges(cx->fp->regs->pc);
8793 return ifop();
8796 JS_REQUIRES_STACK JSRecordingStatus
8797 TraceRecorder::record_JSOP_IFNE()
8799 return ifop();
8802 LIns*
8803 TraceRecorder::newArguments()
8805 LIns* global_ins = INS_CONSTOBJ(globalObj);
8806 LIns* argc_ins = INS_CONST(cx->fp->argc);
8807 LIns* callee_ins = get(&cx->fp->argv[-2]);
8808 LIns* argv_ins = cx->fp->argc
8809 ? lir->ins2(LIR_piadd, lirbuf->sp,
8810 INS_CONST(-treeInfo->nativeStackBase + nativeStackOffset(&cx->fp->argv[0])))
8811 : INS_CONSTPTR((void *) 2);
8812 js_ArgsPrivateNative *apn = js_ArgsPrivateNative::create(*traceMonitor->allocator,
8813 cx->fp->argc);
8814 for (uintN i = 0; i < cx->fp->argc; ++i) {
8815 apn->typemap()[i] = determineSlotType(&cx->fp->argv[i]);
8818 LIns* args[] = { INS_CONSTPTR(apn), argv_ins, callee_ins, argc_ins, global_ins, cx_ins };
8819 LIns* call_ins = lir->insCall(&js_Arguments_ci, args);
8820 guard(false, lir->ins_eq0(call_ins), OOM_EXIT);
8821 return call_ins;
8824 JS_REQUIRES_STACK JSRecordingStatus
8825 TraceRecorder::record_JSOP_ARGUMENTS()
8827 if (cx->fp->flags & JSFRAME_OVERRIDE_ARGS)
8828 ABORT_TRACE("Can't trace |arguments| if |arguments| is assigned to");
8830 LIns* a_ins = get(&cx->fp->argsobj);
8831 LIns* args_ins;
8832 if (a_ins->opcode() == LIR_int) {
8833 // |arguments| is set to 0 by EnterFrame on this trace, so call to create it.
8834 args_ins = newArguments();
8835 } else {
8836 // Generate LIR to create arguments only if it has not already been created.
8838 LIns* mem_ins = lir->insAlloc(sizeof(jsval));
8840 LIns* br1 = lir->insBranch(LIR_jt, lir->ins_eq0(a_ins), NULL);
8841 lir->insStorei(a_ins, mem_ins, 0);
8842 LIns* br2 = lir->insBranch(LIR_j, NULL, NULL);
8844 LIns* label1 = lir->ins0(LIR_label);
8845 br1->setTarget(label1);
8847 LIns* call_ins = newArguments();
8848 lir->insStorei(call_ins, mem_ins, 0);
8850 LIns* label2 = lir->ins0(LIR_label);
8851 br2->setTarget(label2);
8853 args_ins = lir->insLoad(LIR_ld, mem_ins, 0);
8856 stack(0, args_ins);
8857 set(&cx->fp->argsobj, args_ins);
8858 return JSRS_CONTINUE;
8861 JS_REQUIRES_STACK JSRecordingStatus
8862 TraceRecorder::record_JSOP_DUP()
8864 stack(0, get(&stackval(-1)));
8865 return JSRS_CONTINUE;
8868 JS_REQUIRES_STACK JSRecordingStatus
8869 TraceRecorder::record_JSOP_DUP2()
8871 stack(0, get(&stackval(-2)));
8872 stack(1, get(&stackval(-1)));
8873 return JSRS_CONTINUE;
8876 JS_REQUIRES_STACK JSRecordingStatus
8877 TraceRecorder::record_JSOP_SWAP()
8879 jsval& l = stackval(-2);
8880 jsval& r = stackval(-1);
8881 LIns* l_ins = get(&l);
8882 LIns* r_ins = get(&r);
8883 set(&r, l_ins);
8884 set(&l, r_ins);
8885 return JSRS_CONTINUE;
8888 JS_REQUIRES_STACK JSRecordingStatus
8889 TraceRecorder::record_JSOP_PICK()
8891 jsval* sp = cx->fp->regs->sp;
8892 jsint n = cx->fp->regs->pc[1];
8893 JS_ASSERT(sp - (n+1) >= StackBase(cx->fp));
8894 LIns* top = get(sp - (n+1));
8895 for (jsint i = 0; i < n; ++i)
8896 set(sp - (n+1) + i, get(sp - n + i));
8897 set(&sp[-1], top);
8898 return JSRS_CONTINUE;
8901 JS_REQUIRES_STACK JSRecordingStatus
8902 TraceRecorder::record_JSOP_SETCONST()
8904 return JSRS_STOP;
8907 JS_REQUIRES_STACK JSRecordingStatus
8908 TraceRecorder::record_JSOP_BITOR()
8910 return binary(LIR_or);
8913 JS_REQUIRES_STACK JSRecordingStatus
8914 TraceRecorder::record_JSOP_BITXOR()
8916 return binary(LIR_xor);
8919 JS_REQUIRES_STACK JSRecordingStatus
8920 TraceRecorder::record_JSOP_BITAND()
8922 return binary(LIR_and);
8925 JS_REQUIRES_STACK JSRecordingStatus
8926 TraceRecorder::record_JSOP_EQ()
8928 return equality(false, true);
8931 JS_REQUIRES_STACK JSRecordingStatus
8932 TraceRecorder::record_JSOP_NE()
8934 return equality(true, true);
8937 JS_REQUIRES_STACK JSRecordingStatus
8938 TraceRecorder::record_JSOP_LT()
8940 return relational(LIR_flt, true);
8943 JS_REQUIRES_STACK JSRecordingStatus
8944 TraceRecorder::record_JSOP_LE()
8946 return relational(LIR_fle, true);
8949 JS_REQUIRES_STACK JSRecordingStatus
8950 TraceRecorder::record_JSOP_GT()
8952 return relational(LIR_fgt, true);
8955 JS_REQUIRES_STACK JSRecordingStatus
8956 TraceRecorder::record_JSOP_GE()
8958 return relational(LIR_fge, true);
8961 JS_REQUIRES_STACK JSRecordingStatus
8962 TraceRecorder::record_JSOP_LSH()
8964 return binary(LIR_lsh);
8967 JS_REQUIRES_STACK JSRecordingStatus
8968 TraceRecorder::record_JSOP_RSH()
8970 return binary(LIR_rsh);
8973 JS_REQUIRES_STACK JSRecordingStatus
8974 TraceRecorder::record_JSOP_URSH()
8976 return binary(LIR_ush);
8979 JS_REQUIRES_STACK JSRecordingStatus
8980 TraceRecorder::record_JSOP_ADD()
8982 jsval& r = stackval(-1);
8983 jsval& l = stackval(-2);
8985 if (!JSVAL_IS_PRIMITIVE(l)) {
8986 ABORT_IF_XML(l);
8987 if (!JSVAL_IS_PRIMITIVE(r)) {
8988 ABORT_IF_XML(r);
8989 return call_imacro(add_imacros.obj_obj);
8991 return call_imacro(add_imacros.obj_any);
8993 if (!JSVAL_IS_PRIMITIVE(r)) {
8994 ABORT_IF_XML(r);
8995 return call_imacro(add_imacros.any_obj);
8998 if (JSVAL_IS_STRING(l) || JSVAL_IS_STRING(r)) {
8999 LIns* args[] = { stringify(r), stringify(l), cx_ins };
9000 LIns* concat = lir->insCall(&js_ConcatStrings_ci, args);
9001 guard(false, lir->ins_eq0(concat), OOM_EXIT);
9002 set(&l, concat);
9003 return JSRS_CONTINUE;
9006 return binary(LIR_fadd);
9009 JS_REQUIRES_STACK JSRecordingStatus
9010 TraceRecorder::record_JSOP_SUB()
9012 return binary(LIR_fsub);
9015 JS_REQUIRES_STACK JSRecordingStatus
9016 TraceRecorder::record_JSOP_MUL()
9018 return binary(LIR_fmul);
9021 JS_REQUIRES_STACK JSRecordingStatus
9022 TraceRecorder::record_JSOP_DIV()
9024 return binary(LIR_fdiv);
9027 JS_REQUIRES_STACK JSRecordingStatus
9028 TraceRecorder::record_JSOP_MOD()
9030 return binary(LIR_fmod);
9033 JS_REQUIRES_STACK JSRecordingStatus
9034 TraceRecorder::record_JSOP_NOT()
9036 jsval& v = stackval(-1);
9037 if (JSVAL_IS_SPECIAL(v)) {
9038 set(&v, lir->ins_eq0(lir->ins2i(LIR_eq, get(&v), 1)));
9039 return JSRS_CONTINUE;
9041 if (isNumber(v)) {
9042 LIns* v_ins = get(&v);
9043 set(&v, lir->ins2(LIR_or, lir->ins2(LIR_feq, v_ins, lir->insImmf(0)),
9044 lir->ins_eq0(lir->ins2(LIR_feq, v_ins, v_ins))));
9045 return JSRS_CONTINUE;
9047 if (JSVAL_TAG(v) == JSVAL_OBJECT) {
9048 set(&v, lir->ins_eq0(get(&v)));
9049 return JSRS_CONTINUE;
9051 JS_ASSERT(JSVAL_IS_STRING(v));
9052 set(&v, lir->ins_eq0(lir->ins2(LIR_piand,
9053 lir->insLoad(LIR_ldp, get(&v), (int)offsetof(JSString, mLength)),
9054 INS_CONSTWORD(JSString::LENGTH_MASK))));
9055 return JSRS_CONTINUE;
9058 JS_REQUIRES_STACK JSRecordingStatus
9059 TraceRecorder::record_JSOP_BITNOT()
9061 return unary(LIR_not);
9064 JS_REQUIRES_STACK JSRecordingStatus
9065 TraceRecorder::record_JSOP_NEG()
9067 jsval& v = stackval(-1);
9069 if (!JSVAL_IS_PRIMITIVE(v)) {
9070 ABORT_IF_XML(v);
9071 return call_imacro(unary_imacros.sign);
9074 if (isNumber(v)) {
9075 LIns* a = get(&v);
9078 * If we're a promoted integer, we have to watch out for 0s since -0 is
9079 * a double. Only follow this path if we're not an integer that's 0 and
9080 * we're not a double that's zero.
9082 if (!oracle.isInstructionUndemotable(cx->fp->regs->pc) &&
9083 isPromoteInt(a) &&
9084 (!JSVAL_IS_INT(v) || JSVAL_TO_INT(v) != 0) &&
9085 (!JSVAL_IS_DOUBLE(v) || !JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v))) &&
9086 -asNumber(v) == (int)-asNumber(v)) {
9087 a = lir->ins1(LIR_neg, ::demote(lir, a));
9088 if (!a->isconst()) {
9089 VMSideExit* exit = snapshot(OVERFLOW_EXIT);
9090 guard(false, lir->ins1(LIR_ov, a), exit);
9091 guard(false, lir->ins2i(LIR_eq, a, 0), exit);
9093 a = lir->ins1(LIR_i2f, a);
9094 } else {
9095 a = lir->ins1(LIR_fneg, a);
9098 set(&v, a);
9099 return JSRS_CONTINUE;
9102 if (JSVAL_IS_NULL(v)) {
9103 set(&v, lir->insImmf(-0.0));
9104 return JSRS_CONTINUE;
9107 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING || JSVAL_IS_SPECIAL(v));
9109 LIns* args[] = { get(&v), cx_ins };
9110 set(&v, lir->ins1(LIR_fneg,
9111 lir->insCall(JSVAL_IS_STRING(v)
9112 ? &js_StringToNumber_ci
9113 : &js_BooleanOrUndefinedToNumber_ci,
9114 args)));
9115 return JSRS_CONTINUE;
9118 JS_REQUIRES_STACK JSRecordingStatus
9119 TraceRecorder::record_JSOP_POS()
9121 jsval& v = stackval(-1);
9123 if (!JSVAL_IS_PRIMITIVE(v)) {
9124 ABORT_IF_XML(v);
9125 return call_imacro(unary_imacros.sign);
9128 if (isNumber(v))
9129 return JSRS_CONTINUE;
9131 if (JSVAL_IS_NULL(v)) {
9132 set(&v, lir->insImmf(0));
9133 return JSRS_CONTINUE;
9136 JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING || JSVAL_IS_SPECIAL(v));
9138 LIns* args[] = { get(&v), cx_ins };
9139 set(&v, lir->insCall(JSVAL_IS_STRING(v)
9140 ? &js_StringToNumber_ci
9141 : &js_BooleanOrUndefinedToNumber_ci,
9142 args));
9143 return JSRS_CONTINUE;
9146 JS_REQUIRES_STACK JSRecordingStatus
9147 TraceRecorder::record_JSOP_PRIMTOP()
9149 // Either this opcode does nothing or we couldn't have traced here, because
9150 // we'd have thrown an exception -- so do nothing if we actually hit this.
9151 return JSRS_CONTINUE;
9154 JS_REQUIRES_STACK JSRecordingStatus
9155 TraceRecorder::record_JSOP_OBJTOP()
9157 jsval& v = stackval(-1);
9158 ABORT_IF_XML(v);
9159 return JSRS_CONTINUE;
9162 JSRecordingStatus
9163 TraceRecorder::getClassPrototype(JSObject* ctor, LIns*& proto_ins)
9165 jsval pval;
9167 if (!ctor->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom), &pval))
9168 ABORT_TRACE_ERROR("error getting prototype from constructor");
9169 if (JSVAL_TAG(pval) != JSVAL_OBJECT)
9170 ABORT_TRACE("got primitive prototype from constructor");
9171 #ifdef DEBUG
9172 JSBool ok, found;
9173 uintN attrs;
9174 ok = JS_GetPropertyAttributes(cx, ctor, js_class_prototype_str, &attrs, &found);
9175 JS_ASSERT(ok);
9176 JS_ASSERT(found);
9177 JS_ASSERT((~attrs & (JSPROP_READONLY | JSPROP_PERMANENT)) == 0);
9178 #endif
9179 proto_ins = INS_CONSTOBJ(JSVAL_TO_OBJECT(pval));
9180 return JSRS_CONTINUE;
9183 JSRecordingStatus
9184 TraceRecorder::getClassPrototype(JSProtoKey key, LIns*& proto_ins)
9186 JSObject* proto;
9187 if (!js_GetClassPrototype(cx, globalObj, INT_TO_JSID(key), &proto))
9188 ABORT_TRACE_ERROR("error in js_GetClassPrototype");
9189 proto_ins = INS_CONSTOBJ(proto);
9190 return JSRS_CONTINUE;
9193 #define IGNORE_NATIVE_CALL_COMPLETE_CALLBACK ((JSSpecializedNative*)1)
9195 JSRecordingStatus
9196 TraceRecorder::newString(JSObject* ctor, uint32 argc, jsval* argv, jsval* rval)
9198 JS_ASSERT(argc == 1);
9200 if (!JSVAL_IS_PRIMITIVE(argv[0])) {
9201 ABORT_IF_XML(argv[0]);
9202 return call_imacro(new_imacros.String);
9205 LIns* proto_ins;
9206 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
9208 LIns* args[] = { stringify(argv[0]), proto_ins, cx_ins };
9209 LIns* obj_ins = lir->insCall(&js_String_tn_ci, args);
9210 guard(false, lir->ins_eq0(obj_ins), OOM_EXIT);
9212 set(rval, obj_ins);
9213 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
9214 return JSRS_CONTINUE;
9217 JSRecordingStatus
9218 TraceRecorder::newArray(JSObject* ctor, uint32 argc, jsval* argv, jsval* rval)
9220 LIns *proto_ins;
9221 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
9223 LIns *arr_ins;
9224 if (argc == 0 || (argc == 1 && JSVAL_IS_NUMBER(argv[0]))) {
9225 // arr_ins = js_NewEmptyArray(cx, Array.prototype)
9226 LIns *args[] = { proto_ins, cx_ins };
9227 arr_ins = lir->insCall(&js_NewEmptyArray_ci, args);
9228 guard(false, lir->ins_eq0(arr_ins), OOM_EXIT);
9229 if (argc == 1) {
9230 // array_ins.fslots[JSSLOT_ARRAY_LENGTH] = length
9231 lir->insStorei(f2i(get(argv)), // FIXME: is this 64-bit safe?
9232 arr_ins,
9233 offsetof(JSObject, fslots) + JSSLOT_ARRAY_LENGTH * sizeof(jsval));
9235 } else {
9236 // arr_ins = js_NewUninitializedArray(cx, Array.prototype, argc)
9237 LIns *args[] = { INS_CONST(argc), proto_ins, cx_ins };
9238 arr_ins = lir->insCall(&js_NewUninitializedArray_ci, args);
9239 guard(false, lir->ins_eq0(arr_ins), OOM_EXIT);
9241 // arr->dslots[i] = box_jsval(vp[i]); for i in 0..argc
9242 LIns *dslots_ins = NULL;
9243 VMAllocator *alloc = traceMonitor->allocator;
9244 for (uint32 i = 0; i < argc && !alloc->outOfMemory(); i++) {
9245 LIns *elt_ins = box_jsval(argv[i], get(&argv[i]));
9246 stobj_set_dslot(arr_ins, i, dslots_ins, elt_ins);
9249 if (argc > 0)
9250 stobj_set_fslot(arr_ins, JSSLOT_ARRAY_COUNT, INS_CONST(argc));
9253 set(rval, arr_ins);
9254 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
9255 return JSRS_CONTINUE;
9258 JS_REQUIRES_STACK void
9259 TraceRecorder::propagateFailureToBuiltinStatus(LIns* ok_ins, LIns*& status_ins)
9262 * Check the boolean return value (ok_ins) of a native JSNative,
9263 * JSFastNative, or JSPropertyOp hook for failure. On failure, set the
9264 * JSBUILTIN_ERROR bit of cx->builtinStatus.
9266 * If the return value (ok_ins) is true, status' == status. Otherwise
9267 * status' = status | JSBUILTIN_ERROR. We calculate (rval&1)^1, which is 1
9268 * if rval is JS_FALSE (error), and then shift that by 1, which is the log2
9269 * of JSBUILTIN_ERROR.
9271 JS_STATIC_ASSERT(((JS_TRUE & 1) ^ 1) << 1 == 0);
9272 JS_STATIC_ASSERT(((JS_FALSE & 1) ^ 1) << 1 == JSBUILTIN_ERROR);
9273 status_ins = lir->ins2(LIR_or,
9274 status_ins,
9275 lir->ins2i(LIR_lsh,
9276 lir->ins2i(LIR_xor,
9277 lir->ins2i(LIR_and, ok_ins, 1),
9279 1));
9280 lir->insStorei(status_ins, lirbuf->state, (int) offsetof(InterpState, builtinStatus));
9283 JS_REQUIRES_STACK void
9284 TraceRecorder::emitNativePropertyOp(JSScope* scope, JSScopeProperty* sprop, LIns* obj_ins,
9285 bool setflag, LIns* boxed_ins)
9287 JS_ASSERT(!(sprop->attrs & (setflag ? JSPROP_SETTER : JSPROP_GETTER)));
9288 JS_ASSERT(setflag ? !SPROP_HAS_STUB_SETTER(sprop) : !SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop));
9290 enterDeepBailCall();
9292 // It is unsafe to pass the address of an object slot as the out parameter,
9293 // because the getter or setter could end up resizing the object's dslots.
9294 // Instead, use a word of stack and root it in nativeVp.
9295 LIns* vp_ins = lir->insAlloc(sizeof(jsval));
9296 lir->insStorei(vp_ins, lirbuf->state, offsetof(InterpState, nativeVp));
9297 lir->insStorei(INS_CONST(1), lirbuf->state, offsetof(InterpState, nativeVpLen));
9298 if (setflag)
9299 lir->insStorei(boxed_ins, vp_ins, 0);
9301 CallInfo* ci = (CallInfo*) lir->insSkip(sizeof(struct CallInfo))->payload();
9302 ci->_address = uintptr_t(setflag ? sprop->setter : sprop->getter);
9303 ci->_argtypes = ARGSIZE_I << (0*ARGSIZE_SHIFT) |
9304 ARGSIZE_P << (1*ARGSIZE_SHIFT) |
9305 ARGSIZE_P << (2*ARGSIZE_SHIFT) |
9306 ARGSIZE_P << (3*ARGSIZE_SHIFT) |
9307 ARGSIZE_P << (4*ARGSIZE_SHIFT);
9308 ci->_cse = ci->_fold = 0;
9309 ci->_abi = ABI_CDECL;
9310 #ifdef DEBUG
9311 ci->_name = "JSPropertyOp";
9312 #endif
9313 LIns* args[] = { vp_ins, INS_CONSTWORD(SPROP_USERID(sprop)), obj_ins, cx_ins };
9314 LIns* ok_ins = lir->insCall(ci, args);
9316 // Cleanup. Immediately clear nativeVp before we might deep bail.
9317 lir->insStorei(INS_NULL(), lirbuf->state, offsetof(InterpState, nativeVp));
9318 leaveDeepBailCall();
9320 // Guard that the call succeeded and builtinStatus is still 0.
9321 // If the native op succeeds but we deep-bail here, the result value is
9322 // lost! Therefore this can only be used for setters of shared properties.
9323 // In that case we ignore the result value anyway.
9324 LIns* status_ins = lir->insLoad(LIR_ld,
9325 lirbuf->state,
9326 (int) offsetof(InterpState, builtinStatus));
9327 propagateFailureToBuiltinStatus(ok_ins, status_ins);
9328 guard(true, lir->ins_eq0(status_ins), STATUS_EXIT);
9330 // Re-load the value--but this is currently unused, so commented out.
9331 //boxed_ins = lir->insLoad(LIR_ldp, vp_ins, 0);
9334 JS_REQUIRES_STACK JSRecordingStatus
9335 TraceRecorder::emitNativeCall(JSSpecializedNative* sn, uintN argc, LIns* args[], bool rooted)
9337 bool constructing = sn->flags & JSTN_CONSTRUCTOR;
9339 if (JSTN_ERRTYPE(sn) == FAIL_STATUS) {
9340 // This needs to capture the pre-call state of the stack. So do not set
9341 // pendingSpecializedNative before taking this snapshot.
9342 JS_ASSERT(!pendingSpecializedNative);
9344 // Take snapshot for js_DeepBail and store it in cx->bailExit.
9345 // If we are calling a slow native, add information to the side exit
9346 // for SynthesizeSlowNativeFrame.
9347 VMSideExit* exit = snapshot(DEEP_BAIL_EXIT);
9348 JSObject* funobj = JSVAL_TO_OBJECT(stackval(0 - (2 + argc)));
9349 if (FUN_SLOW_NATIVE(GET_FUNCTION_PRIVATE(cx, funobj))) {
9350 exit->setNativeCallee(funobj, constructing);
9351 treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(funobj));
9353 lir->insStorei(INS_CONSTPTR(exit), cx_ins, offsetof(JSContext, bailExit));
9355 // Tell nanojit not to discard or defer stack writes before this call.
9356 LIns* guardRec = createGuardRecord(exit);
9357 lir->insGuard(LIR_xbarrier, NULL, guardRec);
9360 LIns* res_ins = lir->insCall(sn->builtin, args);
9362 // Immediately unroot the vp as soon we return since we might deep bail next.
9363 if (rooted)
9364 lir->insStorei(INS_NULL(), lirbuf->state, offsetof(InterpState, nativeVp));
9366 rval_ins = res_ins;
9367 switch (JSTN_ERRTYPE(sn)) {
9368 case FAIL_NULL:
9369 guard(false, lir->ins_eq0(res_ins), OOM_EXIT);
9370 break;
9371 case FAIL_NEG:
9372 res_ins = lir->ins1(LIR_i2f, res_ins);
9373 guard(false, lir->ins2(LIR_flt, res_ins, lir->insImmf(0)), OOM_EXIT);
9374 break;
9375 case FAIL_VOID:
9376 guard(false, lir->ins2i(LIR_eq, res_ins, JSVAL_TO_SPECIAL(JSVAL_VOID)), OOM_EXIT);
9377 break;
9378 case FAIL_COOKIE:
9379 guard(false, lir->ins2(LIR_eq, res_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
9380 break;
9381 default:;
9384 set(&stackval(0 - (2 + argc)), res_ins);
9387 * The return value will be processed by NativeCallComplete since
9388 * we have to know the actual return value type for calls that return
9389 * jsval (like Array_p_pop).
9391 pendingSpecializedNative = sn;
9393 return JSRS_CONTINUE;
9397 * Check whether we have a specialized implementation for this native
9398 * invocation.
9400 JS_REQUIRES_STACK JSRecordingStatus
9401 TraceRecorder::callSpecializedNative(JSNativeTraceInfo *trcinfo, uintN argc,
9402 bool constructing)
9404 JSStackFrame* fp = cx->fp;
9405 jsbytecode *pc = fp->regs->pc;
9407 jsval& fval = stackval(0 - (2 + argc));
9408 jsval& tval = stackval(0 - (1 + argc));
9410 LIns* this_ins = get(&tval);
9412 LIns* args[nanojit::MAXARGS];
9413 JSSpecializedNative *sn = trcinfo->specializations;
9414 JS_ASSERT(sn);
9415 do {
9416 if (((sn->flags & JSTN_CONSTRUCTOR) != 0) != constructing)
9417 continue;
9419 uintN knownargc = strlen(sn->argtypes);
9420 if (argc != knownargc)
9421 continue;
9423 intN prefixc = strlen(sn->prefix);
9424 JS_ASSERT(prefixc <= 3);
9425 LIns** argp = &args[argc + prefixc - 1];
9426 char argtype;
9428 #if defined DEBUG
9429 memset(args, 0xCD, sizeof(args));
9430 #endif
9432 uintN i;
9433 for (i = prefixc; i--; ) {
9434 argtype = sn->prefix[i];
9435 if (argtype == 'C') {
9436 *argp = cx_ins;
9437 } else if (argtype == 'T') { /* this, as an object */
9438 if (JSVAL_IS_PRIMITIVE(tval))
9439 goto next_specialization;
9440 *argp = this_ins;
9441 } else if (argtype == 'S') { /* this, as a string */
9442 if (!JSVAL_IS_STRING(tval))
9443 goto next_specialization;
9444 *argp = this_ins;
9445 } else if (argtype == 'f') {
9446 *argp = INS_CONSTOBJ(JSVAL_TO_OBJECT(fval));
9447 } else if (argtype == 'p') {
9448 CHECK_STATUS(getClassPrototype(JSVAL_TO_OBJECT(fval), *argp));
9449 } else if (argtype == 'R') {
9450 *argp = INS_CONSTPTR(cx->runtime);
9451 } else if (argtype == 'P') {
9452 // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
9453 // JSOP_GETELEM imacro (bug 476559).
9454 if (*pc == JSOP_CALL && fp->imacpc && *fp->imacpc == JSOP_GETELEM)
9455 *argp = INS_CONSTPTR(fp->imacpc);
9456 else
9457 *argp = INS_CONSTPTR(pc);
9458 } else if (argtype == 'D') { /* this, as a number */
9459 if (!isNumber(tval))
9460 goto next_specialization;
9461 *argp = this_ins;
9462 } else {
9463 JS_NOT_REACHED("unknown prefix arg type");
9465 argp--;
9468 for (i = knownargc; i--; ) {
9469 jsval& arg = stackval(0 - (i + 1));
9470 *argp = get(&arg);
9472 argtype = sn->argtypes[i];
9473 if (argtype == 'd' || argtype == 'i') {
9474 if (!isNumber(arg))
9475 goto next_specialization;
9476 if (argtype == 'i')
9477 *argp = f2i(*argp);
9478 } else if (argtype == 'o') {
9479 if (JSVAL_IS_PRIMITIVE(arg))
9480 goto next_specialization;
9481 } else if (argtype == 's') {
9482 if (!JSVAL_IS_STRING(arg))
9483 goto next_specialization;
9484 } else if (argtype == 'r') {
9485 if (!VALUE_IS_REGEXP(cx, arg))
9486 goto next_specialization;
9487 } else if (argtype == 'f') {
9488 if (!VALUE_IS_FUNCTION(cx, arg))
9489 goto next_specialization;
9490 } else if (argtype == 'v') {
9491 *argp = box_jsval(arg, *argp);
9492 } else {
9493 goto next_specialization;
9495 argp--;
9497 #if defined DEBUG
9498 JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
9499 #endif
9500 return emitNativeCall(sn, argc, args, false);
9502 next_specialization:;
9503 } while ((sn++)->flags & JSTN_MORE);
9505 return JSRS_STOP;
9508 JS_REQUIRES_STACK JSRecordingStatus
9509 TraceRecorder::callNative(uintN argc, JSOp mode)
9511 LIns* args[5];
9513 JS_ASSERT(mode == JSOP_CALL || mode == JSOP_NEW || mode == JSOP_APPLY);
9515 jsval* vp = &stackval(0 - (2 + argc));
9516 JSObject* funobj = JSVAL_TO_OBJECT(vp[0]);
9517 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, funobj);
9518 JSFastNative native = (JSFastNative)fun->u.n.native;
9520 switch (argc) {
9521 case 1:
9522 if (isNumber(vp[2]) &&
9523 (native == js_math_ceil || native == js_math_floor || native == js_math_round)) {
9524 LIns* a = get(&vp[2]);
9525 if (isPromote(a)) {
9526 set(&vp[0], a);
9527 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
9528 return JSRS_CONTINUE;
9531 break;
9533 case 2:
9534 if (isNumber(vp[2]) && isNumber(vp[3]) &&
9535 (native == js_math_min || native == js_math_max)) {
9536 LIns* a = get(&vp[2]);
9537 LIns* b = get(&vp[3]);
9538 if (isPromote(a) && isPromote(b)) {
9539 a = ::demote(lir, a);
9540 b = ::demote(lir, b);
9541 set(&vp[0],
9542 lir->ins1(LIR_i2f,
9543 lir->ins_choose(lir->ins2((native == js_math_min)
9544 ? LIR_lt
9545 : LIR_gt, a, b),
9546 a, b)));
9547 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
9548 return JSRS_CONTINUE;
9551 break;
9554 if (fun->flags & JSFUN_TRCINFO) {
9555 JSNativeTraceInfo *trcinfo = FUN_TRCINFO(fun);
9556 JS_ASSERT(trcinfo && (JSFastNative)fun->u.n.native == trcinfo->native);
9558 /* Try to call a type specialized version of the native. */
9559 if (trcinfo->specializations) {
9560 JSRecordingStatus status = callSpecializedNative(trcinfo, argc, mode == JSOP_NEW);
9561 if (status != JSRS_STOP)
9562 return status;
9566 if (native == js_fun_apply || native == js_fun_call)
9567 ABORT_TRACE("trying to call native apply or call");
9569 // Allocate the vp vector and emit code to root it.
9570 uintN vplen = 2 + JS_MAX(argc, FUN_MINARGS(fun)) + fun->u.n.extra;
9571 if (!(fun->flags & JSFUN_FAST_NATIVE))
9572 vplen++; // slow native return value slot
9573 LIns* invokevp_ins = lir->insAlloc(vplen * sizeof(jsval));
9575 // vp[0] is the callee.
9576 lir->insStorei(INS_CONSTWORD(OBJECT_TO_JSVAL(funobj)), invokevp_ins, 0);
9578 // Calculate |this|.
9579 LIns* this_ins;
9580 if (mode == JSOP_NEW) {
9581 JSClass* clasp = fun->u.n.clasp;
9582 JS_ASSERT(clasp != &js_SlowArrayClass);
9583 if (!clasp)
9584 clasp = &js_ObjectClass;
9585 JS_ASSERT(((jsuword) clasp & 3) == 0);
9587 // Abort on |new Function|. js_NewInstance would allocate a regular-
9588 // sized JSObject, not a Function-sized one. (The Function ctor would
9589 // deep-bail anyway but let's not go there.)
9590 if (clasp == &js_FunctionClass)
9591 ABORT_TRACE("new Function");
9593 if (clasp->getObjectOps)
9594 ABORT_TRACE("new with non-native ops");
9596 args[0] = INS_CONSTOBJ(funobj);
9597 args[1] = INS_CONSTPTR(clasp);
9598 args[2] = cx_ins;
9599 newobj_ins = lir->insCall(&js_NewInstance_ci, args);
9600 guard(false, lir->ins_eq0(newobj_ins), OOM_EXIT);
9601 this_ins = newobj_ins; /* boxing an object is a no-op */
9602 } else if (JSFUN_BOUND_METHOD_TEST(fun->flags)) {
9603 this_ins = INS_CONSTWORD(OBJECT_TO_JSVAL(OBJ_GET_PARENT(cx, funobj)));
9604 } else {
9605 this_ins = get(&vp[1]);
9608 * For fast natives, 'null' or primitives are fine as as 'this' value.
9609 * For slow natives we have to ensure the object is substituted for the
9610 * appropriate global object or boxed object value. JSOP_NEW allocates its
9611 * own object so it's guaranteed to have a valid 'this' value.
9613 if (!(fun->flags & JSFUN_FAST_NATIVE)) {
9614 if (JSVAL_IS_NULL(vp[1])) {
9615 JSObject* thisObj = js_ComputeThis(cx, JS_FALSE, vp + 2);
9616 if (!thisObj)
9617 ABORT_TRACE_ERROR("error in js_ComputeGlobalThis");
9618 this_ins = INS_CONSTOBJ(thisObj);
9619 } else if (!JSVAL_IS_OBJECT(vp[1])) {
9620 ABORT_TRACE("slow native(primitive, args)");
9621 } else {
9622 if (guardClass(JSVAL_TO_OBJECT(vp[1]), this_ins, &js_WithClass, snapshot(MISMATCH_EXIT)))
9623 ABORT_TRACE("can't trace slow native invocation on With object");
9625 this_ins = lir->ins_choose(lir->ins_eq0(stobj_get_fslot(this_ins, JSSLOT_PARENT)),
9626 INS_CONSTOBJ(globalObj),
9627 this_ins);
9630 this_ins = box_jsval(vp[1], this_ins);
9632 lir->insStorei(this_ins, invokevp_ins, 1 * sizeof(jsval));
9634 VMAllocator *alloc = traceMonitor->allocator;
9635 // Populate argv.
9636 for (uintN n = 2; n < 2 + argc; n++) {
9637 LIns* i = box_jsval(vp[n], get(&vp[n]));
9638 lir->insStorei(i, invokevp_ins, n * sizeof(jsval));
9640 // For a very long argument list we might run out of LIR space, so
9641 // check inside the loop.
9642 if (alloc->outOfMemory())
9643 ABORT_TRACE("out of memory in argument list");
9646 // Populate extra slots, including the return value slot for a slow native.
9647 if (2 + argc < vplen) {
9648 LIns* undef_ins = INS_CONSTWORD(JSVAL_VOID);
9649 for (uintN n = 2 + argc; n < vplen; n++) {
9650 lir->insStorei(undef_ins, invokevp_ins, n * sizeof(jsval));
9652 if (alloc->outOfMemory())
9653 ABORT_TRACE("out of memory in extra slots");
9657 // Set up arguments for the JSNative or JSFastNative.
9658 uint32 types;
9659 if (fun->flags & JSFUN_FAST_NATIVE) {
9660 if (mode == JSOP_NEW)
9661 ABORT_TRACE("untraceable fast native constructor");
9662 native_rval_ins = invokevp_ins;
9663 args[0] = invokevp_ins;
9664 args[1] = lir->insImm(argc);
9665 args[2] = cx_ins;
9666 types = ARGSIZE_I << (0*ARGSIZE_SHIFT) |
9667 ARGSIZE_P << (1*ARGSIZE_SHIFT) |
9668 ARGSIZE_I << (2*ARGSIZE_SHIFT) |
9669 ARGSIZE_P << (3*ARGSIZE_SHIFT);
9670 } else {
9671 native_rval_ins = lir->ins2i(LIR_piadd, invokevp_ins, int32_t((vplen - 1) * sizeof(jsval)));
9672 args[0] = native_rval_ins;
9673 args[1] = lir->ins2i(LIR_piadd, invokevp_ins, int32_t(2 * sizeof(jsval)));
9674 args[2] = lir->insImm(argc);
9675 args[3] = this_ins;
9676 args[4] = cx_ins;
9677 types = ARGSIZE_I << (0*ARGSIZE_SHIFT) |
9678 ARGSIZE_P << (1*ARGSIZE_SHIFT) |
9679 ARGSIZE_P << (2*ARGSIZE_SHIFT) |
9680 ARGSIZE_I << (3*ARGSIZE_SHIFT) |
9681 ARGSIZE_P << (4*ARGSIZE_SHIFT) |
9682 ARGSIZE_P << (5*ARGSIZE_SHIFT);
9685 // Generate CallInfo and a JSSpecializedNative structure on the fly.
9686 // Do not use JSTN_UNBOX_AFTER for mode JSOP_NEW because
9687 // record_NativeCallComplete unboxes the result specially.
9689 CallInfo* ci = (CallInfo*) lir->insSkip(sizeof(struct CallInfo))->payload();
9690 ci->_address = uintptr_t(fun->u.n.native);
9691 ci->_cse = ci->_fold = 0;
9692 ci->_abi = ABI_CDECL;
9693 ci->_argtypes = types;
9694 #ifdef DEBUG
9695 ci->_name = JS_GetFunctionName(fun);
9696 #endif
9698 // Generate a JSSpecializedNative structure on the fly.
9699 generatedSpecializedNative.builtin = ci;
9700 generatedSpecializedNative.flags = FAIL_STATUS | ((mode == JSOP_NEW)
9701 ? JSTN_CONSTRUCTOR
9702 : JSTN_UNBOX_AFTER);
9703 generatedSpecializedNative.prefix = NULL;
9704 generatedSpecializedNative.argtypes = NULL;
9706 // We only have to ensure that the values we wrote into the stack buffer
9707 // are rooted if we actually make it to the call, so only set nativeVp and
9708 // nativeVpLen immediately before emitting the call code. This way we avoid
9709 // leaving trace with a bogus nativeVp because we fall off trace while unboxing
9710 // values into the stack buffer.
9711 lir->insStorei(INS_CONST(vplen), lirbuf->state, offsetof(InterpState, nativeVpLen));
9712 lir->insStorei(invokevp_ins, lirbuf->state, offsetof(InterpState, nativeVp));
9714 // argc is the original argc here. It is used to calculate where to place
9715 // the return value.
9716 return emitNativeCall(&generatedSpecializedNative, argc, args, true);
9719 JS_REQUIRES_STACK JSRecordingStatus
9720 TraceRecorder::functionCall(uintN argc, JSOp mode)
9722 jsval& fval = stackval(0 - (2 + argc));
9723 JS_ASSERT(&fval >= StackBase(cx->fp));
9725 if (!VALUE_IS_FUNCTION(cx, fval))
9726 ABORT_TRACE("callee is not a function");
9728 jsval& tval = stackval(0 - (1 + argc));
9731 * If callee is not constant, it's a shapeless call and we have to guard
9732 * explicitly that we will get this callee again at runtime.
9734 if (!get(&fval)->isconst())
9735 CHECK_STATUS(guardCallee(fval));
9738 * Require that the callee be a function object, to avoid guarding on its
9739 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
9740 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
9741 * the property cache and guard on the object (this) in which the callee
9742 * was found. So it's sufficient to test here that the particular function
9743 * is interpreted, not guard on that condition.
9745 * Bytecode sequences that push shapeless callees must guard on the callee
9746 * class being Function and the function being interpreted.
9748 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fval));
9750 if (FUN_INTERPRETED(fun)) {
9751 if (mode == JSOP_NEW) {
9752 LIns* args[] = { get(&fval), INS_CONSTPTR(&js_ObjectClass), cx_ins };
9753 LIns* tv_ins = lir->insCall(&js_NewInstance_ci, args);
9754 guard(false, lir->ins_eq0(tv_ins), OOM_EXIT);
9755 set(&tval, tv_ins);
9757 return interpretedFunctionCall(fval, fun, argc, mode == JSOP_NEW);
9760 if (FUN_SLOW_NATIVE(fun)) {
9761 JSNative native = fun->u.n.native;
9762 jsval* argv = &tval + 1;
9763 if (native == js_Array)
9764 return newArray(JSVAL_TO_OBJECT(fval), argc, argv, &fval);
9765 if (native == js_String && argc == 1) {
9766 if (mode == JSOP_NEW)
9767 return newString(JSVAL_TO_OBJECT(fval), 1, argv, &fval);
9768 if (!JSVAL_IS_PRIMITIVE(argv[0])) {
9769 ABORT_IF_XML(argv[0]);
9770 return call_imacro(call_imacros.String);
9772 set(&fval, stringify(argv[0]));
9773 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
9774 return JSRS_CONTINUE;
9778 return callNative(argc, mode);
9781 JS_REQUIRES_STACK JSRecordingStatus
9782 TraceRecorder::record_JSOP_NEW()
9784 uintN argc = GET_ARGC(cx->fp->regs->pc);
9785 cx->fp->assertValidStackDepth(argc + 2);
9786 return functionCall(argc, JSOP_NEW);
9789 JS_REQUIRES_STACK JSRecordingStatus
9790 TraceRecorder::record_JSOP_DELNAME()
9792 return JSRS_STOP;
9795 JS_REQUIRES_STACK JSRecordingStatus
9796 TraceRecorder::record_JSOP_DELPROP()
9798 return JSRS_STOP;
9801 JS_REQUIRES_STACK JSRecordingStatus
9802 TraceRecorder::record_JSOP_DELELEM()
9804 return JSRS_STOP;
9807 JS_REQUIRES_STACK JSRecordingStatus
9808 TraceRecorder::record_JSOP_TYPEOF()
9810 jsval& r = stackval(-1);
9811 LIns* type;
9812 if (JSVAL_IS_STRING(r)) {
9813 type = INS_ATOM(cx->runtime->atomState.typeAtoms[JSTYPE_STRING]);
9814 } else if (isNumber(r)) {
9815 type = INS_ATOM(cx->runtime->atomState.typeAtoms[JSTYPE_NUMBER]);
9816 } else if (VALUE_IS_FUNCTION(cx, r)) {
9817 type = INS_ATOM(cx->runtime->atomState.typeAtoms[JSTYPE_FUNCTION]);
9818 } else {
9819 LIns* args[] = { get(&r), cx_ins };
9820 if (JSVAL_IS_SPECIAL(r)) {
9821 // We specialize identically for boolean and undefined. We must not have a hole here.
9822 // Pass the unboxed type here, since TypeOfBoolean knows how to handle it.
9823 JS_ASSERT(r == JSVAL_TRUE || r == JSVAL_FALSE || r == JSVAL_VOID);
9824 type = lir->insCall(&js_TypeOfBoolean_ci, args);
9825 } else {
9826 JS_ASSERT(JSVAL_TAG(r) == JSVAL_OBJECT);
9827 type = lir->insCall(&js_TypeOfObject_ci, args);
9830 set(&r, type);
9831 return JSRS_CONTINUE;
9834 JS_REQUIRES_STACK JSRecordingStatus
9835 TraceRecorder::record_JSOP_VOID()
9837 stack(-1, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID)));
9838 return JSRS_CONTINUE;
9841 JS_REQUIRES_STACK JSRecordingStatus
9842 TraceRecorder::record_JSOP_INCNAME()
9844 return incName(1);
9847 JS_REQUIRES_STACK JSRecordingStatus
9848 TraceRecorder::record_JSOP_INCPROP()
9850 return incProp(1);
9853 JS_REQUIRES_STACK JSRecordingStatus
9854 TraceRecorder::record_JSOP_INCELEM()
9856 return incElem(1);
9859 JS_REQUIRES_STACK JSRecordingStatus
9860 TraceRecorder::record_JSOP_DECNAME()
9862 return incName(-1);
9865 JS_REQUIRES_STACK JSRecordingStatus
9866 TraceRecorder::record_JSOP_DECPROP()
9868 return incProp(-1);
9871 JS_REQUIRES_STACK JSRecordingStatus
9872 TraceRecorder::record_JSOP_DECELEM()
9874 return incElem(-1);
9877 JS_REQUIRES_STACK JSRecordingStatus
9878 TraceRecorder::incName(jsint incr, bool pre)
9880 jsval* vp;
9881 LIns* v_ins;
9882 LIns* v_after;
9883 NameResult nr;
9885 CHECK_STATUS(name(vp, v_ins, nr));
9886 jsval v = nr.tracked ? *vp : nr.v;
9887 CHECK_STATUS(incHelper(v, v_ins, v_after, incr));
9888 LIns* v_result = pre ? v_after : v_ins;
9889 if (nr.tracked) {
9890 set(vp, v_after);
9891 stack(0, v_result);
9892 return JSRS_CONTINUE;
9895 if (OBJ_GET_CLASS(cx, nr.obj) != &js_CallClass)
9896 ABORT_TRACE("incName on unsupported object class");
9898 CHECK_STATUS(setCallProp(nr.obj, nr.obj_ins, nr.sprop, v_after, v));
9899 stack(0, v_result);
9900 return JSRS_CONTINUE;
9903 JS_REQUIRES_STACK JSRecordingStatus
9904 TraceRecorder::record_JSOP_NAMEINC()
9906 return incName(1, false);
9909 JS_REQUIRES_STACK JSRecordingStatus
9910 TraceRecorder::record_JSOP_PROPINC()
9912 return incProp(1, false);
9915 // XXX consolidate with record_JSOP_GETELEM code...
9916 JS_REQUIRES_STACK JSRecordingStatus
9917 TraceRecorder::record_JSOP_ELEMINC()
9919 return incElem(1, false);
9922 JS_REQUIRES_STACK JSRecordingStatus
9923 TraceRecorder::record_JSOP_NAMEDEC()
9925 return incName(-1, false);
9928 JS_REQUIRES_STACK JSRecordingStatus
9929 TraceRecorder::record_JSOP_PROPDEC()
9931 return incProp(-1, false);
9934 JS_REQUIRES_STACK JSRecordingStatus
9935 TraceRecorder::record_JSOP_ELEMDEC()
9937 return incElem(-1, false);
9940 JS_REQUIRES_STACK JSRecordingStatus
9941 TraceRecorder::record_JSOP_GETPROP()
9943 return getProp(stackval(-1));
9946 JS_REQUIRES_STACK JSRecordingStatus
9947 TraceRecorder::record_JSOP_SETPROP()
9949 jsval& l = stackval(-2);
9950 if (JSVAL_IS_PRIMITIVE(l))
9951 ABORT_TRACE("primitive this for SETPROP");
9953 JSObject* obj = JSVAL_TO_OBJECT(l);
9954 if (obj->map->ops->setProperty != js_SetProperty)
9955 ABORT_TRACE("non-native JSObjectOps::setProperty");
9956 return JSRS_CONTINUE;
9959 /* Emit a specialized, inlined copy of js_NativeSet. */
9960 JS_REQUIRES_STACK JSRecordingStatus
9961 TraceRecorder::nativeSet(JSObject* obj, LIns* obj_ins, JSScopeProperty* sprop,
9962 jsval v, LIns* v_ins)
9964 JSScope* scope = OBJ_SCOPE(obj);
9965 uint32 slot = sprop->slot;
9968 * We do not trace assignment to properties that have both a nonstub setter
9969 * and a slot, for several reasons.
9971 * First, that would require sampling rt->propertyRemovals before and after
9972 * (see js_NativeSet), and even more code to handle the case where the two
9973 * samples differ. A mere guard is not enough, because you can't just bail
9974 * off trace in the middle of a property assignment without storing the
9975 * value and making the stack right.
9977 * If obj is the global object, there are two additional problems. We would
9978 * have to emit still more code to store the result in the object (not the
9979 * native global frame) if the setter returned successfully after
9980 * deep-bailing. And we would have to cope if the run-time type of the
9981 * setter's return value differed from the record-time type of v, in which
9982 * case unboxing would fail and, having called a native setter, we could
9983 * not just retry the instruction in the interpreter.
9985 JS_ASSERT(SPROP_HAS_STUB_SETTER(sprop) || slot == SPROP_INVALID_SLOT);
9987 // Box the value to be stored, if necessary.
9988 LIns* boxed_ins = NULL;
9989 if (!SPROP_HAS_STUB_SETTER(sprop) || (slot != SPROP_INVALID_SLOT && obj != globalObj))
9990 boxed_ins = box_jsval(v, v_ins);
9992 // Call the setter, if any.
9993 if (!SPROP_HAS_STUB_SETTER(sprop))
9994 emitNativePropertyOp(scope, sprop, obj_ins, true, boxed_ins);
9996 // Store the value, if this property has a slot.
9997 if (slot != SPROP_INVALID_SLOT) {
9998 JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, scope));
9999 JS_ASSERT(!(sprop->attrs & JSPROP_SHARED));
10000 if (obj == globalObj) {
10001 if (!lazilyImportGlobalSlot(slot))
10002 ABORT_TRACE("lazy import of global slot failed");
10003 set(&STOBJ_GET_SLOT(obj, slot), v_ins);
10004 } else {
10005 LIns* dslots_ins = NULL;
10006 stobj_set_slot(obj_ins, slot, dslots_ins, boxed_ins);
10010 return JSRS_CONTINUE;
10013 static JSBool FASTCALL
10014 MethodWriteBarrier(JSContext* cx, JSObject* obj, JSScopeProperty* sprop, JSObject* funobj)
10016 JSAutoTempValueRooter tvr(cx, funobj);
10018 return OBJ_SCOPE(obj)->methodWriteBarrier(cx, sprop, tvr.value());
10020 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, MethodWriteBarrier, CONTEXT, OBJECT, SCOPEPROP, OBJECT,
10021 0, 0)
10023 JS_REQUIRES_STACK JSRecordingStatus
10024 TraceRecorder::setProp(jsval &l, JSPropCacheEntry* entry, JSScopeProperty* sprop,
10025 jsval &v, LIns*& v_ins)
10027 if (entry == JS_NO_PROP_CACHE_FILL)
10028 ABORT_TRACE("can't trace uncacheable property set");
10029 JS_ASSERT_IF(PCVCAP_TAG(entry->vcap) >= 1, sprop->attrs & JSPROP_SHARED);
10030 if (!SPROP_HAS_STUB_SETTER(sprop) && sprop->slot != SPROP_INVALID_SLOT)
10031 ABORT_TRACE("can't trace set of property with setter and slot");
10032 if (sprop->attrs & JSPROP_SETTER)
10033 ABORT_TRACE("can't trace JavaScript function setter");
10035 // These two cases are errors and can't be traced.
10036 if (sprop->attrs & JSPROP_GETTER)
10037 ABORT_TRACE("can't assign to property with script getter but no setter");
10038 if (sprop->attrs & JSPROP_READONLY)
10039 ABORT_TRACE("can't assign to readonly property");
10041 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l));
10042 JSObject* obj = JSVAL_TO_OBJECT(l);
10043 LIns* obj_ins = get(&l);
10044 JSScope* scope = OBJ_SCOPE(obj);
10046 JS_ASSERT_IF(entry->vcap == PCVCAP_MAKE(entry->kshape, 0, 0), scope->has(sprop));
10048 // Fast path for CallClass. This is about 20% faster than the general case.
10049 v_ins = get(&v);
10050 if (OBJ_GET_CLASS(cx, obj) == &js_CallClass)
10051 return setCallProp(obj, obj_ins, sprop, v_ins, v);
10054 * Setting a function-valued property might need to rebrand the object, so
10055 * we emit a call to the method write barrier. There's no need to guard on
10056 * this, because functions have distinct trace-type from other values and
10057 * branded-ness is implied by the shape, which we've already guarded on.
10059 if (scope->branded() && VALUE_IS_FUNCTION(cx, v) && entry->directHit()) {
10060 if (obj == globalObj)
10061 ABORT_TRACE("can't trace function-valued property set in branded global scope");
10063 LIns* args[] = { v_ins, INS_CONSTSPROP(sprop), obj_ins, cx_ins };
10064 LIns* ok_ins = lir->insCall(&MethodWriteBarrier_ci, args);
10065 guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);
10068 // Find obj2. If entry->adding(), the TAG bits are all 0.
10069 JSObject* obj2 = obj;
10070 for (jsuword i = PCVCAP_TAG(entry->vcap) >> PCVCAP_PROTOBITS; i; i--)
10071 obj2 = OBJ_GET_PARENT(cx, obj2);
10072 for (jsuword j = PCVCAP_TAG(entry->vcap) & PCVCAP_PROTOMASK; j; j--)
10073 obj2 = OBJ_GET_PROTO(cx, obj2);
10074 scope = OBJ_SCOPE(obj2);
10075 JS_ASSERT_IF(entry->adding(), obj2 == obj);
10077 // Guard before anything else.
10078 LIns* map_ins = map(obj_ins);
10079 CHECK_STATUS(guardNativePropertyOp(obj, map_ins));
10080 jsuword pcval;
10081 CHECK_STATUS(guardPropertyCacheHit(obj_ins, map_ins, obj, obj2, entry, pcval));
10082 JS_ASSERT(scope->object == obj2);
10083 JS_ASSERT(scope->has(sprop));
10084 JS_ASSERT_IF(obj2 != obj, sprop->attrs & JSPROP_SHARED);
10086 // Add a property to the object if necessary.
10087 if (entry->adding()) {
10088 JS_ASSERT(!(sprop->attrs & JSPROP_SHARED));
10089 if (obj == globalObj)
10090 ABORT_TRACE("adding a property to the global object");
10092 LIns* args[] = { INS_CONSTSPROP(sprop), obj_ins, cx_ins };
10093 LIns* ok_ins = lir->insCall(&js_AddProperty_ci, args);
10094 guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);
10097 return nativeSet(obj, obj_ins, sprop, v, v_ins);
10100 JS_REQUIRES_STACK JSRecordingStatus
10101 TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty *sprop,
10102 LIns *v_ins, jsval v)
10104 // Set variables in on-trace-stack call objects by updating the tracker.
10105 JSStackFrame *fp = frameIfInRange(callobj);
10106 if (fp) {
10107 jsint slot = JSVAL_TO_INT(SPROP_USERID(sprop));
10108 if (sprop->setter == SetCallArg) {
10109 jsval *vp2 = &fp->argv[slot];
10110 set(vp2, v_ins);
10111 return JSRS_CONTINUE;
10113 if (sprop->setter == SetCallVar) {
10114 jsval *vp2 = &fp->slots[slot];
10115 set(vp2, v_ins);
10116 return JSRS_CONTINUE;
10118 ABORT_TRACE("can't trace special CallClass setter");
10121 // Set variables in off-trace-stack call objects by calling standard builtins.
10122 const CallInfo* ci = NULL;
10123 if (sprop->setter == SetCallArg)
10124 ci = &js_SetCallArg_ci;
10125 else if (sprop->setter == SetCallVar)
10126 ci = &js_SetCallVar_ci;
10127 else
10128 ABORT_TRACE("can't trace special CallClass setter");
10130 LIns* args[] = {
10131 box_jsval(v, v_ins),
10132 INS_CONST(SPROP_USERID(sprop)),
10133 callobj_ins,
10134 cx_ins
10136 LIns* call_ins = lir->insCall(ci, args);
10137 guard(false, addName(lir->ins_eq0(call_ins), "guard(set upvar)"), STATUS_EXIT);
10138 return JSRS_CONTINUE;
10141 JS_REQUIRES_STACK JSRecordingStatus
10142 TraceRecorder::record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop)
10144 jsval& r = stackval(-1);
10145 jsval& l = stackval(-2);
10146 LIns* v_ins;
10147 CHECK_STATUS(setProp(l, entry, sprop, r, v_ins));
10149 jsbytecode* pc = cx->fp->regs->pc;
10150 switch (*pc) {
10151 case JSOP_SETPROP:
10152 case JSOP_SETNAME:
10153 case JSOP_SETMETHOD:
10154 if (pc[JSOP_SETPROP_LENGTH] != JSOP_POP)
10155 set(&l, v_ins);
10156 break;
10158 default:;
10161 return JSRS_CONTINUE;
10164 JS_REQUIRES_STACK void
10165 TraceRecorder::enterDeepBailCall()
10167 // Take snapshot for js_DeepBail and store it in cx->bailExit.
10168 VMSideExit* exit = snapshot(DEEP_BAIL_EXIT);
10169 lir->insStorei(INS_CONSTPTR(exit), cx_ins, offsetof(JSContext, bailExit));
10171 // Tell nanojit not to discard or defer stack writes before this call.
10172 LIns* guardRec = createGuardRecord(exit);
10173 lir->insGuard(LIR_xbarrier, NULL, guardRec);
10176 JS_REQUIRES_STACK void
10177 TraceRecorder::leaveDeepBailCall()
10179 // Keep cx->bailExit null when it's invalid.
10180 lir->insStorei(INS_NULL(), cx_ins, offsetof(JSContext, bailExit));
10183 JS_REQUIRES_STACK void
10184 TraceRecorder::finishGetProp(LIns* obj_ins, LIns* vp_ins, LIns* ok_ins, jsval* outp)
10186 // Store the boxed result (and this-object, if JOF_CALLOP) before the
10187 // guard. The deep-bail case requires this. If the property get fails,
10188 // these slots will be ignored anyway.
10189 LIns* result_ins = lir->insLoad(LIR_ldp, vp_ins, 0);
10190 set(outp, result_ins, true);
10191 if (js_CodeSpec[*cx->fp->regs->pc].format & JOF_CALLOP)
10192 set(outp + 1, obj_ins, true);
10194 // We need to guard on ok_ins, but this requires a snapshot of the state
10195 // after this op. monitorRecording will do it for us.
10196 pendingGuardCondition = ok_ins;
10198 // Note there is a boxed result sitting on the stack. The caller must leave
10199 // it there for the time being, since the return type is not yet
10200 // known. monitorRecording will emit the code to unbox it.
10201 pendingUnboxSlot = outp;
10204 static inline bool
10205 RootedStringToId(JSContext* cx, JSString** namep, jsid* idp)
10207 JSString* name = *namep;
10208 if (name->isAtomized()) {
10209 *idp = ATOM_TO_JSID((JSAtom*) STRING_TO_JSVAL(name));
10210 return true;
10213 JSAtom* atom = js_AtomizeString(cx, name, 0);
10214 if (!atom)
10215 return false;
10216 *namep = ATOM_TO_STRING(atom); /* write back to GC root */
10217 *idp = ATOM_TO_JSID(atom);
10218 return true;
10221 static JSBool FASTCALL
10222 GetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, jsval* vp)
10224 js_LeaveTraceIfGlobalObject(cx, obj);
10226 jsid id;
10227 if (!RootedStringToId(cx, namep, &id) || !obj->getProperty(cx, id, vp)) {
10228 js_SetBuiltinError(cx);
10229 return false;
10231 return cx->interpState->builtinStatus == 0;
10233 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyByName, CONTEXT, OBJECT, STRINGPTR, JSVALPTR,
10234 0, 0)
10236 // Convert the value in a slot to a string and store the resulting string back
10237 // in the slot (typically in order to root it).
10238 JS_REQUIRES_STACK JSRecordingStatus
10239 TraceRecorder::primitiveToStringInPlace(jsval* vp)
10241 jsval v = *vp;
10242 JS_ASSERT(JSVAL_IS_PRIMITIVE(v));
10244 if (!JSVAL_IS_STRING(v)) {
10245 // v is not a string. Turn it into one. js_ValueToString is safe
10246 // because v is not an object.
10247 JSString *str = js_ValueToString(cx, v);
10248 if (!str)
10249 ABORT_TRACE_ERROR("failed to stringify element id");
10250 v = STRING_TO_JSVAL(str);
10251 set(vp, stringify(*vp));
10253 // Write the string back to the stack to save the interpreter some work
10254 // and to ensure snapshots get the correct type for this slot.
10255 *vp = v;
10257 return JSRS_CONTINUE;
10260 JS_REQUIRES_STACK JSRecordingStatus
10261 TraceRecorder::getPropertyByName(LIns* obj_ins, jsval* idvalp, jsval* outp)
10263 CHECK_STATUS(primitiveToStringInPlace(idvalp));
10264 enterDeepBailCall();
10266 // Call GetPropertyByName. The vp parameter points to stack because this is
10267 // what the interpreter currently does. obj and id are rooted on the
10268 // interpreter stack, but the slot at vp is not a root.
10269 LIns* vp_ins = addName(lir->insAlloc(sizeof(jsval)), "vp");
10270 LIns* idvalp_ins = addName(addr(idvalp), "idvalp");
10271 LIns* args[] = {vp_ins, idvalp_ins, obj_ins, cx_ins};
10272 LIns* ok_ins = lir->insCall(&GetPropertyByName_ci, args);
10274 // GetPropertyByName can assign to *idvalp, so the tracker has an incorrect
10275 // entry for that address. Correct it. (If the value in the address is
10276 // never used again, the usual case, Nanojit will kill this load.)
10277 tracker.set(idvalp, lir->insLoad(LIR_ldp, idvalp_ins, 0));
10279 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
10280 leaveDeepBailCall();
10281 return JSRS_CONTINUE;
10284 static JSBool FASTCALL
10285 GetPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, jsval* vp)
10287 js_LeaveTraceIfGlobalObject(cx, obj);
10289 JSAutoTempIdRooter idr(cx);
10290 if (!js_Int32ToId(cx, index, idr.addr()) || !obj->getProperty(cx, idr.id(), vp)) {
10291 js_SetBuiltinError(cx);
10292 return JS_FALSE;
10294 return cx->interpState->builtinStatus == 0;
10296 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyByIndex, CONTEXT, OBJECT, INT32, JSVALPTR, 0, 0)
10298 JS_REQUIRES_STACK JSRecordingStatus
10299 TraceRecorder::getPropertyByIndex(LIns* obj_ins, LIns* index_ins, jsval* outp)
10301 index_ins = makeNumberInt32(index_ins);
10303 // See note in getPropertyByName about vp.
10304 enterDeepBailCall();
10305 LIns* vp_ins = addName(lir->insAlloc(sizeof(jsval)), "vp");
10306 LIns* args[] = {vp_ins, index_ins, obj_ins, cx_ins};
10307 LIns* ok_ins = lir->insCall(&GetPropertyByIndex_ci, args);
10308 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
10309 leaveDeepBailCall();
10310 return JSRS_CONTINUE;
10313 static JSBool FASTCALL
10314 GetPropertyById(JSContext* cx, JSObject* obj, jsid id, jsval* vp)
10316 js_LeaveTraceIfGlobalObject(cx, obj);
10317 if (!obj->getProperty(cx, id, vp)) {
10318 js_SetBuiltinError(cx);
10319 return JS_FALSE;
10321 return cx->interpState->builtinStatus == 0;
10323 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyById,
10324 CONTEXT, OBJECT, JSVAL, JSVALPTR, 0, 0)
10326 JS_REQUIRES_STACK JSRecordingStatus
10327 TraceRecorder::getPropertyById(LIns* obj_ins, jsval* outp)
10329 // Find the atom.
10330 JSAtom* atom;
10331 jsbytecode* pc = cx->fp->regs->pc;
10332 const JSCodeSpec& cs = js_CodeSpec[*pc];
10333 if (*pc == JSOP_LENGTH) {
10334 atom = cx->runtime->atomState.lengthAtom;
10335 } else if (JOF_TYPE(cs.format) == JOF_ATOM) {
10336 atom = atoms[GET_INDEX(pc)];
10337 } else {
10338 JS_ASSERT(JOF_TYPE(cs.format) == JOF_SLOTATOM);
10339 atom = atoms[GET_INDEX(pc + SLOTNO_LEN)];
10342 // Call GetPropertyById. See note in getPropertyByName about vp.
10343 enterDeepBailCall();
10344 jsid id = ATOM_TO_JSID(atom);
10345 LIns* vp_ins = addName(lir->insAlloc(sizeof(jsval)), "vp");
10346 LIns* args[] = {vp_ins, INS_CONSTWORD(id), obj_ins, cx_ins};
10347 LIns* ok_ins = lir->insCall(&GetPropertyById_ci, args);
10348 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
10349 leaveDeepBailCall();
10350 return JSRS_CONTINUE;
10353 /* Manually inlined, specialized copy of js_NativeGet. */
10354 static JSBool FASTCALL
10355 GetPropertyWithNativeGetter(JSContext* cx, JSObject* obj, JSScopeProperty* sprop, jsval* vp)
10357 js_LeaveTraceIfGlobalObject(cx, obj);
10359 #ifdef DEBUG
10360 JSProperty* prop;
10361 JSObject* pobj;
10362 JS_ASSERT(obj->lookupProperty(cx, sprop->id, &pobj, &prop));
10363 JS_ASSERT(prop == (JSProperty*) sprop);
10364 obj->dropProperty(cx, prop);
10365 #endif
10367 // JSScopeProperty::get contains a special case for With objects. We can
10368 // elide it here because With objects are, we claim, never on the operand
10369 // stack while recording.
10370 JS_ASSERT(STOBJ_GET_CLASS(obj) != &js_WithClass);
10372 *vp = JSVAL_VOID;
10373 if (!sprop->getter(cx, obj, SPROP_USERID(sprop), vp)) {
10374 js_SetBuiltinError(cx);
10375 return JS_FALSE;
10377 return cx->interpState->builtinStatus == 0;
10379 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyWithNativeGetter,
10380 CONTEXT, OBJECT, SCOPEPROP, JSVALPTR, 0, 0)
10382 JS_REQUIRES_STACK JSRecordingStatus
10383 TraceRecorder::getPropertyWithNativeGetter(LIns* obj_ins, JSScopeProperty* sprop, jsval* outp)
10385 JS_ASSERT(!(sprop->attrs & JSPROP_GETTER));
10386 JS_ASSERT(sprop->slot == SPROP_INVALID_SLOT);
10387 JS_ASSERT(!SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop));
10389 // Call GetPropertyWithNativeGetter. See note in getPropertyByName about vp.
10390 // FIXME - We should call the getter directly. Using a builtin function for
10391 // now because it buys some extra asserts. See bug 508310.
10392 enterDeepBailCall();
10393 LIns* vp_ins = addName(lir->insAlloc(sizeof(jsval)), "vp");
10394 LIns* args[] = {vp_ins, INS_CONSTPTR(sprop), obj_ins, cx_ins};
10395 LIns* ok_ins = lir->insCall(&GetPropertyWithNativeGetter_ci, args);
10396 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
10397 leaveDeepBailCall();
10398 return JSRS_CONTINUE;
10401 JS_REQUIRES_STACK JSRecordingStatus
10402 TraceRecorder::record_JSOP_GETELEM()
10404 bool call = *cx->fp->regs->pc == JSOP_CALLELEM;
10406 jsval& idx = stackval(-1);
10407 jsval& lval = stackval(-2);
10409 LIns* obj_ins = get(&lval);
10410 LIns* idx_ins = get(&idx);
10412 // Special case for array-like access of strings.
10413 if (JSVAL_IS_STRING(lval) && isInt32(idx)) {
10414 if (call)
10415 ABORT_TRACE("JSOP_CALLELEM on a string");
10416 int i = asInt32(idx);
10417 if (size_t(i) >= JSVAL_TO_STRING(lval)->length())
10418 ABORT_TRACE("Invalid string index in JSOP_GETELEM");
10419 idx_ins = makeNumberInt32(idx_ins);
10420 LIns* args[] = { idx_ins, obj_ins, cx_ins };
10421 LIns* unitstr_ins = lir->insCall(&js_String_getelem_ci, args);
10422 guard(false, lir->ins_eq0(unitstr_ins), MISMATCH_EXIT);
10423 set(&lval, unitstr_ins);
10424 return JSRS_CONTINUE;
10427 if (JSVAL_IS_PRIMITIVE(lval))
10428 ABORT_TRACE("JSOP_GETLEM on a primitive");
10429 ABORT_IF_XML(lval);
10431 JSObject* obj = JSVAL_TO_OBJECT(lval);
10432 if (obj == globalObj)
10433 ABORT_TRACE("JSOP_GETELEM on global");
10434 LIns* v_ins;
10436 /* Property access using a string name or something we have to stringify. */
10437 if (!JSVAL_IS_INT(idx)) {
10438 if (!JSVAL_IS_PRIMITIVE(idx))
10439 ABORT_TRACE("object used as index");
10441 return getPropertyByName(obj_ins, &idx, &lval);
10444 if (STOBJ_GET_CLASS(obj) == &js_ArgumentsClass) {
10445 unsigned depth;
10446 JSStackFrame *afp = guardArguments(obj, obj_ins, &depth);
10447 if (afp) {
10448 uintN int_idx = JSVAL_TO_INT(idx);
10449 jsval* vp = &afp->argv[int_idx];
10450 if (idx_ins->isconstf()) {
10451 if (int_idx >= 0 && int_idx < afp->argc)
10452 v_ins = get(vp);
10453 else
10454 v_ins = INS_VOID();
10455 } else {
10456 // If the index is not a constant expression, we generate LIR to load the value from
10457 // the native stack area. The guard on js_ArgumentClass above ensures the up-to-date
10458 // value has been written back to the native stack area.
10459 idx_ins = makeNumberInt32(idx_ins);
10460 if (int_idx >= 0 && int_idx < afp->argc) {
10461 JSTraceType type = getCoercedType(*vp);
10463 // Guard that the argument has the same type on trace as during recording.
10464 LIns* typemap_ins;
10465 if (callDepth == depth) {
10466 // In this case, we are in the same frame where the arguments object was created.
10467 // The entry type map is not necessarily up-to-date, so we capture a new type map
10468 // for this point in the code.
10469 unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */);
10470 if (stackSlots * sizeof(JSTraceType) > LirBuffer::MAX_SKIP_PAYLOAD_SZB)
10471 ABORT_TRACE("|arguments| requires saving too much stack");
10472 JSTraceType* typemap = (JSTraceType*) lir->insSkip(stackSlots * sizeof(JSTraceType))->payload();
10473 DetermineTypesVisitor detVisitor(*this, typemap);
10474 VisitStackSlots(detVisitor, cx, 0);
10475 typemap_ins = INS_CONSTPTR(typemap + 2 /* callee, this */);
10476 } else {
10477 // In this case, we are in a deeper frame from where the arguments object was
10478 // created. The type map at the point of the call out from the creation frame
10479 // is accurate.
10480 // Note: this relies on the assumption that we abort on setting an element of
10481 // an arguments object in any deeper frame.
10482 LIns* fip_ins = lir->insLoad(LIR_ldp, lirbuf->rp, (callDepth-depth)*sizeof(FrameInfo*));
10483 typemap_ins = lir->ins2(LIR_add, fip_ins, INS_CONST(sizeof(FrameInfo) + 2/*callee,this*/ * sizeof(JSTraceType)));
10486 LIns* typep_ins = lir->ins2(LIR_add, typemap_ins,
10487 lir->ins2(LIR_mul, idx_ins, INS_CONST(sizeof(JSTraceType))));
10488 LIns* type_ins = lir->insLoad(LIR_ldcb, typep_ins, 0);
10489 guard(true,
10490 addName(lir->ins2(LIR_eq, type_ins, lir->insImm(type)),
10491 "guard(type-stable upvar)"),
10492 BRANCH_EXIT);
10494 // Read the value out of the native stack area.
10495 guard(true, lir->ins2(LIR_ult, idx_ins, INS_CONST(afp->argc)),
10496 snapshot(BRANCH_EXIT));
10497 size_t stackOffset = -treeInfo->nativeStackBase + nativeStackOffset(&afp->argv[0]);
10498 LIns* args_addr_ins = lir->ins2(LIR_add, lirbuf->sp, INS_CONST(stackOffset));
10499 LIns* argi_addr_ins = lir->ins2(LIR_add, args_addr_ins,
10500 lir->ins2(LIR_mul, idx_ins, INS_CONST(sizeof(double))));
10501 v_ins = stackLoad(argi_addr_ins, type);
10502 } else {
10503 guard(false, lir->ins2(LIR_ult, idx_ins, INS_CONST(afp->argc)),
10504 snapshot(BRANCH_EXIT));
10505 v_ins = INS_VOID();
10508 JS_ASSERT(v_ins);
10509 set(&lval, v_ins);
10510 return JSRS_CONTINUE;
10512 ABORT_TRACE("can't reach arguments object's frame");
10514 if (js_IsDenseArray(obj)) {
10515 // Fast path for dense arrays accessed with a integer index.
10516 jsval* vp;
10517 LIns* addr_ins;
10519 guardDenseArray(obj, obj_ins, BRANCH_EXIT);
10520 CHECK_STATUS(denseArrayElement(lval, idx, vp, v_ins, addr_ins));
10521 set(&lval, v_ins);
10522 if (call)
10523 set(&idx, obj_ins);
10524 return JSRS_CONTINUE;
10527 return getPropertyByIndex(obj_ins, idx_ins, &lval);
10530 /* Functions used by JSOP_SETELEM */
10532 static JSBool FASTCALL
10533 SetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, jsval* vp)
10535 js_LeaveTraceIfGlobalObject(cx, obj);
10537 jsid id;
10538 if (!RootedStringToId(cx, namep, &id) || !obj->setProperty(cx, id, vp)) {
10539 js_SetBuiltinError(cx);
10540 return JS_FALSE;
10542 return cx->interpState->builtinStatus == 0;
10544 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, SetPropertyByName, CONTEXT, OBJECT, STRINGPTR, JSVALPTR,
10545 0, 0)
10547 static JSBool FASTCALL
10548 InitPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, jsval val)
10550 js_LeaveTraceIfGlobalObject(cx, obj);
10552 jsid id;
10553 if (!RootedStringToId(cx, namep, &id) ||
10554 !obj->defineProperty(cx, id, val, NULL, NULL, JSPROP_ENUMERATE)) {
10555 js_SetBuiltinError(cx);
10556 return JS_FALSE;
10558 return cx->interpState->builtinStatus == 0;
10560 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, InitPropertyByName, CONTEXT, OBJECT, STRINGPTR, JSVAL,
10561 0, 0)
10563 JS_REQUIRES_STACK JSRecordingStatus
10564 TraceRecorder::initOrSetPropertyByName(LIns* obj_ins, jsval* idvalp, jsval* rvalp, bool init)
10566 CHECK_STATUS(primitiveToStringInPlace(idvalp));
10568 LIns* rval_ins = box_jsval(*rvalp, get(rvalp));
10570 enterDeepBailCall();
10572 LIns* ok_ins;
10573 LIns* idvalp_ins = addName(addr(idvalp), "idvalp");
10574 if (init) {
10575 LIns* args[] = {rval_ins, idvalp_ins, obj_ins, cx_ins};
10576 ok_ins = lir->insCall(&InitPropertyByName_ci, args);
10577 } else {
10578 // See note in getPropertyByName about vp.
10579 LIns* vp_ins = addName(lir->insAlloc(sizeof(jsval)), "vp");
10580 lir->insStorei(rval_ins, vp_ins, 0);
10581 LIns* args[] = {vp_ins, idvalp_ins, obj_ins, cx_ins};
10582 ok_ins = lir->insCall(&SetPropertyByName_ci, args);
10584 guard(true, ok_ins, STATUS_EXIT);
10586 leaveDeepBailCall();
10587 return JSRS_CONTINUE;
10590 static JSBool FASTCALL
10591 SetPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, jsval* vp)
10593 js_LeaveTraceIfGlobalObject(cx, obj);
10595 JSAutoTempIdRooter idr(cx);
10596 if (!js_Int32ToId(cx, index, idr.addr()) || !obj->setProperty(cx, idr.id(), vp)) {
10597 js_SetBuiltinError(cx);
10598 return JS_FALSE;
10600 return cx->interpState->builtinStatus == 0;
10602 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, SetPropertyByIndex, CONTEXT, OBJECT, INT32, JSVALPTR, 0, 0)
10604 static JSBool FASTCALL
10605 InitPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, jsval val)
10607 js_LeaveTraceIfGlobalObject(cx, obj);
10609 JSAutoTempIdRooter idr(cx);
10610 if (!js_Int32ToId(cx, index, idr.addr()) ||
10611 !obj->defineProperty(cx, idr.id(), val, NULL, NULL, JSPROP_ENUMERATE)) {
10612 js_SetBuiltinError(cx);
10613 return JS_FALSE;
10615 return cx->interpState->builtinStatus == 0;
10617 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, InitPropertyByIndex, CONTEXT, OBJECT, INT32, JSVAL, 0, 0)
10619 JS_REQUIRES_STACK JSRecordingStatus
10620 TraceRecorder::initOrSetPropertyByIndex(LIns* obj_ins, LIns* index_ins, jsval* rvalp, bool init)
10622 index_ins = makeNumberInt32(index_ins);
10624 LIns* rval_ins = box_jsval(*rvalp, get(rvalp));
10626 enterDeepBailCall();
10628 LIns* ok_ins;
10629 if (init) {
10630 LIns* args[] = {rval_ins, index_ins, obj_ins, cx_ins};
10631 ok_ins = lir->insCall(&InitPropertyByIndex_ci, args);
10632 } else {
10633 // See note in getPropertyByName about vp.
10634 LIns* vp_ins = addName(lir->insAlloc(sizeof(jsval)), "vp");
10635 lir->insStorei(rval_ins, vp_ins, 0);
10636 LIns* args[] = {vp_ins, index_ins, obj_ins, cx_ins};
10637 ok_ins = lir->insCall(&SetPropertyByIndex_ci, args);
10639 guard(true, ok_ins, STATUS_EXIT);
10641 leaveDeepBailCall();
10642 return JSRS_CONTINUE;
10645 JS_REQUIRES_STACK JSRecordingStatus
10646 TraceRecorder::record_JSOP_SETELEM()
10648 jsval& v = stackval(-1);
10649 jsval& idx = stackval(-2);
10650 jsval& lval = stackval(-3);
10652 if (JSVAL_IS_PRIMITIVE(lval))
10653 ABORT_TRACE("left JSOP_SETELEM operand is not an object");
10654 ABORT_IF_XML(lval);
10656 JSObject* obj = JSVAL_TO_OBJECT(lval);
10657 LIns* obj_ins = get(&lval);
10658 LIns* idx_ins = get(&idx);
10659 LIns* v_ins = get(&v);
10661 if (!JSVAL_IS_INT(idx)) {
10662 if (!JSVAL_IS_PRIMITIVE(idx))
10663 ABORT_TRACE("non-primitive index");
10664 CHECK_STATUS(initOrSetPropertyByName(obj_ins, &idx, &v,
10665 *cx->fp->regs->pc == JSOP_INITELEM));
10666 } else if (JSVAL_TO_INT(idx) < 0 || !OBJ_IS_DENSE_ARRAY(cx, obj)) {
10667 CHECK_STATUS(initOrSetPropertyByIndex(obj_ins, idx_ins, &v,
10668 *cx->fp->regs->pc == JSOP_INITELEM));
10669 } else {
10670 // Fast path: assigning to element of dense array.
10672 // Make sure the array is actually dense.
10673 if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT))
10674 return JSRS_STOP;
10676 // The index was on the stack and is therefore a LIR float. Force it to
10677 // be an integer.
10678 idx_ins = makeNumberInt32(idx_ins);
10680 // Box the value so we can use one builtin instead of having to add one
10681 // builtin for every storage type. Special case for integers though,
10682 // since they are so common.
10683 LIns* res_ins;
10684 LIns* args[] = { NULL, idx_ins, obj_ins, cx_ins };
10685 if (isNumber(v)) {
10686 if (isPromoteInt(v_ins)) {
10687 args[0] = ::demote(lir, v_ins);
10688 res_ins = lir->insCall(&js_Array_dense_setelem_int_ci, args);
10689 } else {
10690 args[0] = v_ins;
10691 res_ins = lir->insCall(&js_Array_dense_setelem_double_ci, args);
10693 } else {
10694 LIns* args[] = { box_jsval(v, v_ins), idx_ins, obj_ins, cx_ins };
10695 res_ins = lir->insCall(&js_Array_dense_setelem_ci, args);
10697 guard(false, lir->ins_eq0(res_ins), MISMATCH_EXIT);
10700 jsbytecode* pc = cx->fp->regs->pc;
10701 if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
10702 set(&lval, v_ins);
10704 return JSRS_CONTINUE;
10707 JS_REQUIRES_STACK JSRecordingStatus
10708 TraceRecorder::record_JSOP_CALLNAME()
10710 JSObject* obj = cx->fp->scopeChain;
10711 if (obj != globalObj) {
10712 jsval* vp;
10713 LIns* ins;
10714 NameResult nr;
10715 CHECK_STATUS(scopeChainProp(obj, vp, ins, nr));
10716 stack(0, ins);
10717 stack(1, INS_CONSTOBJ(globalObj));
10718 return JSRS_CONTINUE;
10721 LIns* obj_ins = scopeChain();
10722 JSObject* obj2;
10723 jsuword pcval;
10725 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
10727 if (PCVAL_IS_NULL(pcval) || !PCVAL_IS_OBJECT(pcval))
10728 ABORT_TRACE("callee is not an object");
10730 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval)));
10732 stack(0, INS_CONSTOBJ(PCVAL_TO_OBJECT(pcval)));
10733 stack(1, obj_ins);
10734 return JSRS_CONTINUE;
10737 JS_DEFINE_CALLINFO_5(extern, UINT32, GetUpvarArgOnTrace, CONTEXT, UINT32, INT32, UINT32,
10738 DOUBLEPTR, 0, 0)
10739 JS_DEFINE_CALLINFO_5(extern, UINT32, GetUpvarVarOnTrace, CONTEXT, UINT32, INT32, UINT32,
10740 DOUBLEPTR, 0, 0)
10741 JS_DEFINE_CALLINFO_5(extern, UINT32, GetUpvarStackOnTrace, CONTEXT, UINT32, INT32, UINT32,
10742 DOUBLEPTR, 0, 0)
10745 * Record LIR to get the given upvar. Return the LIR instruction for the upvar
10746 * value. NULL is returned only on a can't-happen condition with an invalid
10747 * typemap. The value of the upvar is returned as v.
10749 JS_REQUIRES_STACK LIns*
10750 TraceRecorder::upvar(JSScript* script, JSUpvarArray* uva, uintN index, jsval& v)
10753 * Try to find the upvar in the current trace's tracker. For &vr to be
10754 * the address of the jsval found in js_GetUpvar, we must initialize
10755 * vr directly with the result, so it is a reference to the same location.
10756 * It does not work to assign the result to v, because v is an already
10757 * existing reference that points to something else.
10759 uint32 cookie = uva->vector[index];
10760 jsval& vr = js_GetUpvar(cx, script->staticLevel, cookie);
10761 v = vr;
10762 LIns* upvar_ins = get(&vr);
10763 if (upvar_ins) {
10764 return upvar_ins;
10768 * The upvar is not in the current trace, so get the upvar value exactly as
10769 * the interpreter does and unbox.
10771 uint32 level = script->staticLevel - UPVAR_FRAME_SKIP(cookie);
10772 uint32 cookieSlot = UPVAR_FRAME_SLOT(cookie);
10773 JSStackFrame* fp = cx->display[level];
10774 const CallInfo* ci;
10775 int32 slot;
10776 if (!fp->fun) {
10777 ci = &GetUpvarStackOnTrace_ci;
10778 slot = cookieSlot;
10779 } else if (cookieSlot < fp->fun->nargs) {
10780 ci = &GetUpvarArgOnTrace_ci;
10781 slot = cookieSlot;
10782 } else if (cookieSlot == CALLEE_UPVAR_SLOT) {
10783 ci = &GetUpvarArgOnTrace_ci;
10784 slot = -2;
10785 } else {
10786 ci = &GetUpvarVarOnTrace_ci;
10787 slot = cookieSlot - fp->fun->nargs;
10790 LIns* outp = lir->insAlloc(sizeof(double));
10791 LIns* args[] = {
10792 outp,
10793 INS_CONST(callDepth),
10794 INS_CONST(slot),
10795 INS_CONST(level),
10796 cx_ins
10798 LIns* call_ins = lir->insCall(ci, args);
10799 JSTraceType type = getCoercedType(v);
10800 guard(true,
10801 addName(lir->ins2(LIR_eq, call_ins, lir->insImm(type)),
10802 "guard(type-stable upvar)"),
10803 BRANCH_EXIT);
10804 return stackLoad(outp, type);
10808 * Generate LIR to load a value from the native stack. This method ensures that
10809 * the correct LIR load operator is used.
10811 LIns* TraceRecorder::stackLoad(LIns* base, uint8 type)
10813 LOpcode loadOp;
10814 switch (type) {
10815 case TT_DOUBLE:
10816 loadOp = LIR_ldq;
10817 break;
10818 case TT_OBJECT:
10819 case TT_STRING:
10820 case TT_FUNCTION:
10821 case TT_NULL:
10822 loadOp = LIR_ldp;
10823 break;
10824 case TT_INT32:
10825 case TT_PSEUDOBOOLEAN:
10826 loadOp = LIR_ld;
10827 break;
10828 case TT_JSVAL:
10829 default:
10830 JS_NOT_REACHED("found jsval type in an upvar type map entry");
10831 return NULL;
10834 LIns* result = lir->insLoad(loadOp, base, 0);
10835 if (type == TT_INT32)
10836 result = lir->ins1(LIR_i2f, result);
10837 return result;
10840 JS_REQUIRES_STACK JSRecordingStatus
10841 TraceRecorder::record_JSOP_GETUPVAR()
10843 uintN index = GET_UINT16(cx->fp->regs->pc);
10844 JSScript *script = cx->fp->script;
10845 JSUpvarArray* uva = script->upvars();
10846 JS_ASSERT(index < uva->length);
10848 jsval v;
10849 LIns* upvar_ins = upvar(script, uva, index, v);
10850 if (!upvar_ins)
10851 return JSRS_STOP;
10852 stack(0, upvar_ins);
10853 return JSRS_CONTINUE;
10856 JS_REQUIRES_STACK JSRecordingStatus
10857 TraceRecorder::record_JSOP_CALLUPVAR()
10859 CHECK_STATUS(record_JSOP_GETUPVAR());
10860 stack(1, INS_NULL());
10861 return JSRS_CONTINUE;
10864 JS_REQUIRES_STACK JSRecordingStatus
10865 TraceRecorder::record_JSOP_GETDSLOT()
10867 JSObject* callee = JSVAL_TO_OBJECT(cx->fp->argv[-2]);
10868 LIns* callee_ins = get(&cx->fp->argv[-2]);
10870 unsigned index = GET_UINT16(cx->fp->regs->pc);
10871 LIns* dslots_ins = NULL;
10872 LIns* v_ins = stobj_get_dslot(callee_ins, index, dslots_ins);
10874 stack(0, unbox_jsval(callee->dslots[index], v_ins, snapshot(BRANCH_EXIT)));
10875 return JSRS_CONTINUE;
10878 JS_REQUIRES_STACK JSRecordingStatus
10879 TraceRecorder::record_JSOP_CALLDSLOT()
10881 CHECK_STATUS(record_JSOP_GETDSLOT());
10882 stack(1, INS_NULL());
10883 return JSRS_CONTINUE;
10886 JS_REQUIRES_STACK JSRecordingStatus
10887 TraceRecorder::guardCallee(jsval& callee)
10889 JS_ASSERT(VALUE_IS_FUNCTION(cx, callee));
10891 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
10892 JSObject* callee_obj = JSVAL_TO_OBJECT(callee);
10893 LIns* callee_ins = get(&callee);
10895 treeInfo->gcthings.addUnique(callee);
10896 guard(true,
10897 lir->ins2(LIR_eq,
10898 stobj_get_private(callee_ins),
10899 INS_CONSTPTR(callee_obj->getPrivate())),
10900 branchExit);
10901 guard(true,
10902 lir->ins2(LIR_eq,
10903 stobj_get_parent(callee_ins),
10904 INS_CONSTOBJ(OBJ_GET_PARENT(cx, callee_obj))),
10905 branchExit);
10906 return JSRS_CONTINUE;
10910 * Prepare the given |arguments| object to be accessed on trace. If the return
10911 * value is non-NULL, then the given |arguments| object refers to a frame on
10912 * the current trace and is guaranteed to refer to the same frame on trace for
10913 * all later executions.
10915 JS_REQUIRES_STACK JSStackFrame *
10916 TraceRecorder::guardArguments(JSObject *obj, LIns* obj_ins, unsigned *depthp)
10918 JS_ASSERT(STOBJ_GET_CLASS(obj) == &js_ArgumentsClass);
10920 JSStackFrame *afp = frameIfInRange(obj, depthp);
10921 if (!afp)
10922 return NULL;
10924 VMSideExit *exit = snapshot(MISMATCH_EXIT);
10925 guardClass(obj, obj_ins, &js_ArgumentsClass, exit);
10927 LIns* args_ins = get(&afp->argsobj);
10928 LIns* cmp = lir->ins2(LIR_eq, args_ins, obj_ins);
10929 lir->insGuard(LIR_xf, cmp, createGuardRecord(exit));
10930 return afp;
10933 JS_REQUIRES_STACK JSRecordingStatus
10934 TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, bool constructing)
10936 if (JS_GetGlobalForObject(cx, JSVAL_TO_OBJECT(fval)) != globalObj)
10937 ABORT_TRACE("JSOP_CALL or JSOP_NEW crosses global scopes");
10939 JSStackFrame* fp = cx->fp;
10941 // TODO: track the copying via the tracker...
10942 if (argc < fun->nargs &&
10943 jsuword(fp->regs->sp + (fun->nargs - argc)) > cx->stackPool.current->limit) {
10944 ABORT_TRACE("can't trace calls with too few args requiring argv move");
10947 // Generate a type map for the outgoing frame and stash it in the LIR
10948 unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */);
10949 if (sizeof(FrameInfo) + stackSlots * sizeof(JSTraceType) > LirBuffer::MAX_SKIP_PAYLOAD_SZB)
10950 ABORT_TRACE("interpreted function call requires saving too much stack");
10951 LIns* data = lir->insSkip(sizeof(FrameInfo) + stackSlots * sizeof(JSTraceType));
10952 FrameInfo* fi = (FrameInfo*)data->payload();
10953 JSTraceType* typemap = reinterpret_cast<JSTraceType *>(fi + 1);
10955 DetermineTypesVisitor detVisitor(*this, typemap);
10956 VisitStackSlots(detVisitor, cx, 0);
10958 JS_ASSERT(argc < FrameInfo::CONSTRUCTING_FLAG);
10960 treeInfo->gcthings.addUnique(fval);
10961 fi->block = fp->blockChain;
10962 if (fp->blockChain)
10963 treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain));
10964 fi->pc = fp->regs->pc;
10965 fi->imacpc = fp->imacpc;
10966 fi->spdist = fp->regs->sp - fp->slots;
10967 fi->set_argc(argc, constructing);
10968 fi->callerHeight = NativeStackSlots(cx, 0) - (2 + argc);
10969 fi->callerArgc = fp->argc;
10971 unsigned callDepth = getCallDepth();
10972 if (callDepth >= treeInfo->maxCallDepth)
10973 treeInfo->maxCallDepth = callDepth + 1;
10975 lir->insStorei(INS_CONSTPTR(fi), lirbuf->rp, callDepth * sizeof(FrameInfo*));
10977 atoms = fun->u.i.script->atomMap.vector;
10978 return JSRS_CONTINUE;
10981 JS_REQUIRES_STACK JSRecordingStatus
10982 TraceRecorder::record_JSOP_CALL()
10984 uintN argc = GET_ARGC(cx->fp->regs->pc);
10985 cx->fp->assertValidStackDepth(argc + 2);
10986 return functionCall(argc,
10987 (cx->fp->imacpc && *cx->fp->imacpc == JSOP_APPLY)
10988 ? JSOP_APPLY
10989 : JSOP_CALL);
10992 static jsbytecode* apply_imacro_table[] = {
10993 apply_imacros.apply0,
10994 apply_imacros.apply1,
10995 apply_imacros.apply2,
10996 apply_imacros.apply3,
10997 apply_imacros.apply4,
10998 apply_imacros.apply5,
10999 apply_imacros.apply6,
11000 apply_imacros.apply7,
11001 apply_imacros.apply8
11004 static jsbytecode* call_imacro_table[] = {
11005 apply_imacros.call0,
11006 apply_imacros.call1,
11007 apply_imacros.call2,
11008 apply_imacros.call3,
11009 apply_imacros.call4,
11010 apply_imacros.call5,
11011 apply_imacros.call6,
11012 apply_imacros.call7,
11013 apply_imacros.call8
11016 JS_REQUIRES_STACK JSRecordingStatus
11017 TraceRecorder::record_JSOP_APPLY()
11019 JSStackFrame* fp = cx->fp;
11020 jsbytecode *pc = fp->regs->pc;
11021 uintN argc = GET_ARGC(pc);
11022 cx->fp->assertValidStackDepth(argc + 2);
11024 jsval* vp = fp->regs->sp - (argc + 2);
11025 jsuint length = 0;
11026 JSObject* aobj = NULL;
11027 LIns* aobj_ins = NULL;
11029 JS_ASSERT(!fp->imacpc);
11031 if (!VALUE_IS_FUNCTION(cx, vp[0]))
11032 return record_JSOP_CALL();
11033 ABORT_IF_XML(vp[0]);
11035 JSObject* obj = JSVAL_TO_OBJECT(vp[0]);
11036 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
11037 if (FUN_INTERPRETED(fun))
11038 return record_JSOP_CALL();
11040 bool apply = (JSFastNative)fun->u.n.native == js_fun_apply;
11041 if (!apply && (JSFastNative)fun->u.n.native != js_fun_call)
11042 return record_JSOP_CALL();
11045 * We don't trace apply and call with a primitive 'this', which is the
11046 * first positional parameter.
11048 if (argc > 0 && JSVAL_IS_PRIMITIVE(vp[2]))
11049 return record_JSOP_CALL();
11052 * Guard on the identity of this, which is the function we are applying.
11054 if (!VALUE_IS_FUNCTION(cx, vp[1]))
11055 ABORT_TRACE("callee is not a function");
11056 CHECK_STATUS(guardCallee(vp[1]));
11058 if (apply && argc >= 2) {
11059 if (argc != 2)
11060 ABORT_TRACE("apply with excess arguments");
11061 if (JSVAL_IS_PRIMITIVE(vp[3]))
11062 ABORT_TRACE("arguments parameter of apply is primitive");
11063 aobj = JSVAL_TO_OBJECT(vp[3]);
11064 aobj_ins = get(&vp[3]);
11067 * We trace dense arrays and arguments objects. The code we generate
11068 * for apply uses imacros to handle a specific number of arguments.
11070 if (OBJ_IS_DENSE_ARRAY(cx, aobj)) {
11071 guardDenseArray(aobj, aobj_ins);
11072 length = jsuint(aobj->fslots[JSSLOT_ARRAY_LENGTH]);
11073 guard(true,
11074 lir->ins2i(LIR_eq,
11075 stobj_get_fslot(aobj_ins, JSSLOT_ARRAY_LENGTH),
11076 length),
11077 BRANCH_EXIT);
11078 } else if (OBJ_GET_CLASS(cx, aobj) == &js_ArgumentsClass) {
11079 unsigned depth;
11080 JSStackFrame *afp = guardArguments(aobj, aobj_ins, &depth);
11081 if (!afp)
11082 ABORT_TRACE("can't reach arguments object's frame");
11083 length = afp->argc;
11084 } else {
11085 ABORT_TRACE("arguments parameter of apply is not a dense array or argments object");
11088 if (length >= JS_ARRAY_LENGTH(apply_imacro_table))
11089 ABORT_TRACE("too many arguments to apply");
11091 return call_imacro(apply_imacro_table[length]);
11094 if (argc >= JS_ARRAY_LENGTH(call_imacro_table))
11095 ABORT_TRACE("too many arguments to call");
11097 return call_imacro(call_imacro_table[argc]);
11100 static JSBool FASTCALL
11101 CatchStopIteration_tn(JSContext* cx, JSBool ok, jsval* vp)
11103 if (!ok && cx->throwing && js_ValueIsStopIteration(cx->exception)) {
11104 cx->throwing = JS_FALSE;
11105 cx->exception = JSVAL_VOID;
11106 *vp = JSVAL_HOLE;
11107 return JS_TRUE;
11109 return ok;
11112 JS_DEFINE_TRCINFO_1(CatchStopIteration_tn,
11113 (3, (static, BOOL, CatchStopIteration_tn, CONTEXT, BOOL, JSVALPTR, 0, 0)))
11115 JS_REQUIRES_STACK JSRecordingStatus
11116 TraceRecorder::record_NativeCallComplete()
11118 if (pendingSpecializedNative == IGNORE_NATIVE_CALL_COMPLETE_CALLBACK)
11119 return JSRS_CONTINUE;
11121 jsbytecode* pc = cx->fp->regs->pc;
11123 JS_ASSERT(pendingSpecializedNative);
11124 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEW || *pc == JSOP_SETPROP);
11126 jsval& v = stackval(-1);
11127 LIns* v_ins = get(&v);
11130 * At this point the generated code has already called the native function
11131 * and we can no longer fail back to the original pc location (JSOP_CALL)
11132 * because that would cause the interpreter to re-execute the native
11133 * function, which might have side effects.
11135 * Instead, the snapshot() call below sees that we are currently parked on
11136 * a traceable native's JSOP_CALL instruction, and it will advance the pc
11137 * to restore by the length of the current opcode. If the native's return
11138 * type is jsval, snapshot() will also indicate in the type map that the
11139 * element on top of the stack is a boxed value which doesn't need to be
11140 * boxed if the type guard generated by unbox_jsval() fails.
11143 if (JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_STATUS) {
11144 /* Keep cx->bailExit null when it's invalid. */
11145 lir->insStorei(INS_NULL(), cx_ins, (int) offsetof(JSContext, bailExit));
11147 LIns* status = lir->insLoad(LIR_ld, lirbuf->state, (int) offsetof(InterpState, builtinStatus));
11148 if (pendingSpecializedNative == &generatedSpecializedNative) {
11149 LIns* ok_ins = v_ins;
11152 * Custom implementations of Iterator.next() throw a StopIteration exception.
11153 * Catch and clear it and set the return value to JSVAL_HOLE in this case.
11155 if (uintptr_t(pc - nextiter_imacros.custom_iter_next) <
11156 sizeof(nextiter_imacros.custom_iter_next)) {
11157 LIns* args[] = { native_rval_ins, ok_ins, cx_ins }; /* reverse order */
11158 ok_ins = lir->insCall(&CatchStopIteration_tn_ci, args);
11162 * If we run a generic traceable native, the return value is in the argument
11163 * vector for native function calls. The actual return value of the native is a JSBool
11164 * indicating the error status.
11166 v_ins = lir->insLoad(LIR_ld, native_rval_ins, 0);
11167 if (*pc == JSOP_NEW) {
11168 LIns* x = lir->ins_eq0(lir->ins2i(LIR_piand, v_ins, JSVAL_TAGMASK));
11169 x = lir->ins_choose(x, v_ins, INS_CONST(0));
11170 v_ins = lir->ins_choose(lir->ins_eq0(x), newobj_ins, x);
11172 set(&v, v_ins);
11174 propagateFailureToBuiltinStatus(ok_ins, status);
11176 guard(true, lir->ins_eq0(status), STATUS_EXIT);
11179 JSRecordingStatus ok = JSRS_CONTINUE;
11180 if (pendingSpecializedNative->flags & JSTN_UNBOX_AFTER) {
11182 * If we side exit on the unboxing code due to a type change, make sure that the boxed
11183 * value is actually currently associated with that location, and that we are talking
11184 * about the top of the stack here, which is where we expected boxed values.
11186 JS_ASSERT(&v == &cx->fp->regs->sp[-1] && get(&v) == v_ins);
11187 set(&v, unbox_jsval(v, v_ins, snapshot(BRANCH_EXIT)));
11188 } else if (JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_NEG) {
11189 /* Already added i2f in functionCall. */
11190 JS_ASSERT(JSVAL_IS_NUMBER(v));
11191 } else {
11192 /* Convert the result to double if the builtin returns int32. */
11193 if (JSVAL_IS_NUMBER(v) &&
11194 (pendingSpecializedNative->builtin->_argtypes & ARGSIZE_MASK_ANY) == ARGSIZE_I) {
11195 set(&v, lir->ins1(LIR_i2f, v_ins));
11199 // We'll null pendingSpecializedNative in monitorRecording, on the next op
11200 // cycle. There must be a next op since the stack is non-empty.
11201 return ok;
11204 JS_REQUIRES_STACK JSRecordingStatus
11205 TraceRecorder::name(jsval*& vp, LIns*& ins, NameResult& nr)
11207 JSObject* obj = cx->fp->scopeChain;
11208 if (obj != globalObj)
11209 return scopeChainProp(obj, vp, ins, nr);
11211 /* Can't use prop here, because we don't want unboxing from global slots. */
11212 LIns* obj_ins = scopeChain();
11213 uint32 slot;
11215 JSObject* obj2;
11216 jsuword pcval;
11219 * Property cache ensures that we are dealing with an existing property,
11220 * and guards the shape for us.
11222 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
11224 /* Abort if property doesn't exist (interpreter will report an error.) */
11225 if (PCVAL_IS_NULL(pcval))
11226 ABORT_TRACE("named property not found");
11228 /* Insist on obj being the directly addressed object. */
11229 if (obj2 != obj)
11230 ABORT_TRACE("name() hit prototype chain");
11232 /* Don't trace getter or setter calls, our caller wants a direct slot. */
11233 if (PCVAL_IS_SPROP(pcval)) {
11234 JSScopeProperty* sprop = PCVAL_TO_SPROP(pcval);
11235 if (!isValidSlot(OBJ_SCOPE(obj), sprop))
11236 ABORT_TRACE("name() not accessing a valid slot");
11237 slot = sprop->slot;
11238 } else {
11239 if (!PCVAL_IS_SLOT(pcval))
11240 ABORT_TRACE("PCE is not a slot");
11241 slot = PCVAL_TO_SLOT(pcval);
11244 if (!lazilyImportGlobalSlot(slot))
11245 ABORT_TRACE("lazy import of global slot failed");
11247 vp = &STOBJ_GET_SLOT(obj, slot);
11248 ins = get(vp);
11249 nr.tracked = true;
11250 return JSRS_CONTINUE;
11253 static JSObject* FASTCALL
11254 MethodReadBarrier(JSContext* cx, JSObject* obj, JSScopeProperty* sprop, JSObject* funobj)
11256 JSAutoTempValueRooter tvr(cx, funobj);
11258 if (!OBJ_SCOPE(obj)->methodReadBarrier(cx, sprop, tvr.addr()))
11259 return NULL;
11260 JS_ASSERT(VALUE_IS_FUNCTION(cx, tvr.value()));
11261 return JSVAL_TO_OBJECT(tvr.value());
11263 JS_DEFINE_CALLINFO_4(static, OBJECT_FAIL, MethodReadBarrier, CONTEXT, OBJECT, SCOPEPROP, OBJECT,
11264 0, 0)
11267 * Get a property. The current opcode has JOF_ATOM.
11269 * There are two modes. The caller must pass nonnull pointers for either outp
11270 * or both slotp and v_insp. In the latter case, we require a plain old
11271 * property with a slot; if the property turns out to be anything else, abort
11272 * tracing (rather than emit a call to a native getter or GetAnyProperty).
11274 JS_REQUIRES_STACK JSRecordingStatus
11275 TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32 *slotp, LIns** v_insp, jsval *outp)
11277 JS_ASSERT((slotp && v_insp && !outp) || (!slotp && !v_insp && outp));
11280 * Can't specialize to assert obj != global, must guard to avoid aliasing
11281 * stale homes of stacked global variables.
11283 CHECK_STATUS(guardNotGlobalObject(obj, obj_ins));
11286 * Property cache ensures that we are dealing with an existing property,
11287 * and guards the shape for us.
11289 JSObject* obj2;
11290 jsuword pcval;
11291 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
11293 /* Check for non-existent property reference, which results in undefined. */
11294 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
11295 if (PCVAL_IS_NULL(pcval)) {
11296 if (slotp)
11297 ABORT_TRACE("property not found");
11300 * We could specialize to guard on just JSClass.getProperty, but a mere
11301 * class guard is simpler and slightly faster.
11303 if (OBJ_GET_CLASS(cx, obj)->getProperty != JS_PropertyStub) {
11304 ABORT_TRACE("can't trace through access to undefined property if "
11305 "JSClass.getProperty hook isn't stubbed");
11307 guardClass(obj, obj_ins, OBJ_GET_CLASS(cx, obj), snapshot(MISMATCH_EXIT));
11310 * This trace will be valid as long as neither the object nor any object
11311 * on its prototype chain changes shape.
11313 * FIXME: This loop can become a single shape guard once bug 497789 has
11314 * been fixed.
11316 VMSideExit* exit = snapshot(BRANCH_EXIT);
11317 do {
11318 LIns* map_ins = map(obj_ins);
11319 LIns* ops_ins;
11320 if (map_is_native(obj->map, map_ins, ops_ins)) {
11321 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
11322 "shape");
11323 guard(true,
11324 addName(lir->ins2i(LIR_eq, shape_ins, OBJ_SHAPE(obj)), "guard(shape)"),
11325 exit);
11326 } else if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT))
11327 ABORT_TRACE("non-native object involved in undefined property access");
11328 } while (guardHasPrototype(obj, obj_ins, &obj, &obj_ins, exit));
11330 set(outp, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID)), true);
11331 return JSRS_CONTINUE;
11334 uint32 setflags = (cs.format & (JOF_INCDEC | JOF_FOR));
11335 JS_ASSERT(!(cs.format & JOF_SET));
11337 JSScopeProperty* sprop;
11338 uint32 slot;
11339 bool isMethod;
11341 if (PCVAL_IS_SPROP(pcval)) {
11342 sprop = PCVAL_TO_SPROP(pcval);
11343 JS_ASSERT(OBJ_SCOPE(obj2)->has(sprop));
11345 if (setflags && !SPROP_HAS_STUB_SETTER(sprop))
11346 ABORT_TRACE("non-stub setter");
11347 if (setflags && (sprop->attrs & JSPROP_READONLY))
11348 ABORT_TRACE("writing to a readonly property");
11349 if (!SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop)) {
11350 if (slotp)
11351 ABORT_TRACE("can't trace non-stub getter for this opcode");
11352 if (sprop->attrs & JSPROP_GETTER)
11353 ABORT_TRACE("script getter");
11354 if (sprop->slot == SPROP_INVALID_SLOT)
11355 return getPropertyWithNativeGetter(obj_ins, sprop, outp);
11356 return getPropertyById(obj_ins, outp);
11358 if (!SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj2)))
11359 ABORT_TRACE("no valid slot");
11360 slot = sprop->slot;
11361 isMethod = sprop->isMethod();
11362 JS_ASSERT_IF(isMethod, OBJ_SCOPE(obj2)->hasMethodBarrier());
11363 } else {
11364 if (!PCVAL_IS_SLOT(pcval))
11365 ABORT_TRACE("PCE is not a slot");
11366 slot = PCVAL_TO_SLOT(pcval);
11367 sprop = NULL;
11368 isMethod = false;
11371 /* We have a slot. Check whether it is direct or in a prototype. */
11372 if (obj2 != obj) {
11373 if (setflags)
11374 ABORT_TRACE("JOF_INCDEC|JOF_FOR opcode hit prototype chain");
11377 * We're getting a proto-property. Walk up the prototype chain emitting
11378 * proto slot loads, updating obj as we go, leaving obj set to obj2 with
11379 * obj_ins the last proto-load.
11381 do {
11382 obj_ins = stobj_get_proto(obj_ins);
11383 obj = STOBJ_GET_PROTO(obj);
11384 } while (obj != obj2);
11387 LIns* dslots_ins = NULL;
11388 LIns* v_ins = unbox_jsval(STOBJ_GET_SLOT(obj, slot),
11389 stobj_get_slot(obj_ins, slot, dslots_ins),
11390 snapshot(BRANCH_EXIT));
11393 * Joined function object stored as a method must be cloned when extracted
11394 * as a property value other than a callee. Note that shapes cover method
11395 * value as well as other property attributes and order, so this condition
11396 * is trace-invariant.
11398 * We do not impose the method read barrier if in an imacro, assuming any
11399 * property gets it does (e.g., for 'toString' from JSOP_NEW) will not be
11400 * leaked to the calling script.
11402 if (isMethod && !cx->fp->imacpc) {
11403 LIns* args[] = { v_ins, INS_CONSTSPROP(sprop), obj_ins, cx_ins };
11404 v_ins = lir->insCall(&MethodReadBarrier_ci, args);
11407 if (slotp) {
11408 *slotp = slot;
11409 *v_insp = v_ins;
11411 if (outp)
11412 set(outp, v_ins, true);
11413 return JSRS_CONTINUE;
11416 JS_REQUIRES_STACK JSRecordingStatus
11417 TraceRecorder::denseArrayElement(jsval& oval, jsval& ival, jsval*& vp, LIns*& v_ins,
11418 LIns*& addr_ins)
11420 JS_ASSERT(JSVAL_IS_OBJECT(oval) && JSVAL_IS_INT(ival));
11422 JSObject* obj = JSVAL_TO_OBJECT(oval);
11423 LIns* obj_ins = get(&oval);
11424 jsint idx = JSVAL_TO_INT(ival);
11425 LIns* idx_ins = makeNumberInt32(get(&ival));
11427 VMSideExit* exit = snapshot(BRANCH_EXIT);
11429 /* check that the index is within bounds */
11430 LIns* dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
11431 jsuint capacity = js_DenseArrayCapacity(obj);
11432 bool within = (jsuint(idx) < jsuint(obj->fslots[JSSLOT_ARRAY_LENGTH]) && jsuint(idx) < capacity);
11433 if (!within) {
11434 /* If idx < 0, stay on trace (and read value as undefined, since this is a dense array). */
11435 LIns* br1 = NULL;
11436 if (MAX_DSLOTS_LENGTH > MAX_DSLOTS_LENGTH32 && !idx_ins->isconst()) {
11437 /* Only 64-bit machines support large enough arrays for this. */
11438 JS_ASSERT(sizeof(jsval) == 8);
11439 br1 = lir->insBranch(LIR_jt,
11440 lir->ins2i(LIR_lt, idx_ins, 0),
11441 NULL);
11444 /* If not idx < length, stay on trace (and read value as undefined). */
11445 LIns* br2 = lir->insBranch(LIR_jf,
11446 lir->ins2(LIR_ult,
11447 idx_ins,
11448 stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH)),
11449 NULL);
11451 /* If dslots is NULL, stay on trace (and read value as undefined). */
11452 LIns* br3 = lir->insBranch(LIR_jt, lir->ins_eq0(dslots_ins), NULL);
11454 /* If not idx < capacity, stay on trace (and read value as undefined). */
11455 LIns* br4 = lir->insBranch(LIR_jf,
11456 lir->ins2(LIR_ult,
11457 idx_ins,
11458 lir->insLoad(LIR_ldp,
11459 dslots_ins,
11460 -(int)sizeof(jsval))),
11461 NULL);
11462 lir->insGuard(LIR_x, NULL, createGuardRecord(exit));
11463 LIns* label = lir->ins0(LIR_label);
11464 if (br1)
11465 br1->setTarget(label);
11466 br2->setTarget(label);
11467 br3->setTarget(label);
11468 br4->setTarget(label);
11470 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, MISMATCH_EXIT));
11472 // Return undefined and indicate that we didn't actually read this (addr_ins).
11473 v_ins = lir->insImm(JSVAL_TO_SPECIAL(JSVAL_VOID));
11474 addr_ins = NULL;
11475 return JSRS_CONTINUE;
11478 /* Guard against negative index */
11479 if (MAX_DSLOTS_LENGTH > MAX_DSLOTS_LENGTH32 && !idx_ins->isconst()) {
11480 /* Only 64-bit machines support large enough arrays for this. */
11481 JS_ASSERT(sizeof(jsval) == 8);
11482 guard(false,
11483 lir->ins2i(LIR_lt, idx_ins, 0),
11484 exit);
11487 /* Guard array length */
11488 guard(true,
11489 lir->ins2(LIR_ult, idx_ins, stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH)),
11490 exit);
11492 /* dslots must not be NULL */
11493 guard(false,
11494 lir->ins_eq0(dslots_ins),
11495 exit);
11497 /* Guard array capacity */
11498 guard(true,
11499 lir->ins2(LIR_ult,
11500 idx_ins,
11501 lir->insLoad(LIR_ldp, dslots_ins, 0 - (int)sizeof(jsval))),
11502 exit);
11504 /* Load the value and guard on its type to unbox it. */
11505 vp = &obj->dslots[jsuint(idx)];
11506 addr_ins = lir->ins2(LIR_piadd, dslots_ins,
11507 lir->ins2i(LIR_pilsh, idx_ins, (sizeof(jsval) == 4) ? 2 : 3));
11508 v_ins = unbox_jsval(*vp, lir->insLoad(LIR_ldp, addr_ins, 0), exit);
11510 if (JSVAL_IS_SPECIAL(*vp)) {
11512 * If we read a hole from the array, convert it to undefined and guard
11513 * that there are no indexed properties along the prototype chain.
11515 LIns* br = lir->insBranch(LIR_jf,
11516 lir->ins2i(LIR_eq, v_ins, JSVAL_TO_SPECIAL(JSVAL_HOLE)),
11517 NULL);
11518 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, MISMATCH_EXIT));
11519 br->setTarget(lir->ins0(LIR_label));
11521 /* Don't let the hole value escape. Turn it into an undefined. */
11522 v_ins = lir->ins2i(LIR_and, v_ins, ~(JSVAL_HOLE_FLAG >> JSVAL_TAGBITS));
11524 return JSRS_CONTINUE;
11527 JS_REQUIRES_STACK JSRecordingStatus
11528 TraceRecorder::getProp(JSObject* obj, LIns* obj_ins)
11530 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
11531 JS_ASSERT(cs.ndefs == 1);
11532 return prop(obj, obj_ins, NULL, NULL, &stackval(-cs.nuses));
11535 JS_REQUIRES_STACK JSRecordingStatus
11536 TraceRecorder::getProp(jsval& v)
11538 if (JSVAL_IS_PRIMITIVE(v))
11539 ABORT_TRACE("primitive lhs");
11541 return getProp(JSVAL_TO_OBJECT(v), get(&v));
11544 JS_REQUIRES_STACK JSRecordingStatus
11545 TraceRecorder::record_JSOP_NAME()
11547 jsval* vp;
11548 LIns* v_ins;
11549 NameResult nr;
11550 CHECK_STATUS(name(vp, v_ins, nr));
11551 stack(0, v_ins);
11552 return JSRS_CONTINUE;
11555 JS_REQUIRES_STACK JSRecordingStatus
11556 TraceRecorder::record_JSOP_DOUBLE()
11558 jsval v = jsval(atoms[GET_INDEX(cx->fp->regs->pc)]);
11559 stack(0, lir->insImmf(*JSVAL_TO_DOUBLE(v)));
11560 return JSRS_CONTINUE;
11563 JS_REQUIRES_STACK JSRecordingStatus
11564 TraceRecorder::record_JSOP_STRING()
11566 JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
11567 JS_ASSERT(ATOM_IS_STRING(atom));
11568 stack(0, INS_ATOM(atom));
11569 return JSRS_CONTINUE;
11572 JS_REQUIRES_STACK JSRecordingStatus
11573 TraceRecorder::record_JSOP_ZERO()
11575 stack(0, lir->insImmf(0));
11576 return JSRS_CONTINUE;
11579 JS_REQUIRES_STACK JSRecordingStatus
11580 TraceRecorder::record_JSOP_ONE()
11582 stack(0, lir->insImmf(1));
11583 return JSRS_CONTINUE;
11586 JS_REQUIRES_STACK JSRecordingStatus
11587 TraceRecorder::record_JSOP_NULL()
11589 stack(0, INS_NULL());
11590 return JSRS_CONTINUE;
11593 JS_REQUIRES_STACK JSRecordingStatus
11594 TraceRecorder::record_JSOP_THIS()
11596 LIns* this_ins;
11597 CHECK_STATUS(getThis(this_ins));
11598 stack(0, this_ins);
11599 return JSRS_CONTINUE;
11602 JS_REQUIRES_STACK JSRecordingStatus
11603 TraceRecorder::record_JSOP_FALSE()
11605 stack(0, lir->insImm(0));
11606 return JSRS_CONTINUE;
11609 JS_REQUIRES_STACK JSRecordingStatus
11610 TraceRecorder::record_JSOP_TRUE()
11612 stack(0, lir->insImm(1));
11613 return JSRS_CONTINUE;
11616 JS_REQUIRES_STACK JSRecordingStatus
11617 TraceRecorder::record_JSOP_OR()
11619 return ifop();
11622 JS_REQUIRES_STACK JSRecordingStatus
11623 TraceRecorder::record_JSOP_AND()
11625 return ifop();
11628 JS_REQUIRES_STACK JSRecordingStatus
11629 TraceRecorder::record_JSOP_TABLESWITCH()
11631 #ifdef NANOJIT_IA32
11632 /* Handle tableswitches specially -- prepare a jump table if needed. */
11633 return tableswitch();
11634 #else
11635 return switchop();
11636 #endif
11639 JS_REQUIRES_STACK JSRecordingStatus
11640 TraceRecorder::record_JSOP_LOOKUPSWITCH()
11642 return switchop();
11645 JS_REQUIRES_STACK JSRecordingStatus
11646 TraceRecorder::record_JSOP_STRICTEQ()
11648 strictEquality(true, false);
11649 return JSRS_CONTINUE;
11652 JS_REQUIRES_STACK JSRecordingStatus
11653 TraceRecorder::record_JSOP_STRICTNE()
11655 strictEquality(false, false);
11656 return JSRS_CONTINUE;
11659 JS_REQUIRES_STACK JSRecordingStatus
11660 TraceRecorder::record_JSOP_OBJECT()
11662 JSStackFrame* fp = cx->fp;
11663 JSScript* script = fp->script;
11664 unsigned index = atoms - script->atomMap.vector + GET_INDEX(fp->regs->pc);
11666 JSObject* obj;
11667 obj = script->getObject(index);
11668 stack(0, INS_CONSTOBJ(obj));
11669 return JSRS_CONTINUE;
11672 JS_REQUIRES_STACK JSRecordingStatus
11673 TraceRecorder::record_JSOP_POP()
11675 return JSRS_CONTINUE;
11678 JS_REQUIRES_STACK JSRecordingStatus
11679 TraceRecorder::record_JSOP_TRAP()
11681 return JSRS_STOP;
11684 JS_REQUIRES_STACK JSRecordingStatus
11685 TraceRecorder::record_JSOP_GETARG()
11687 stack(0, arg(GET_ARGNO(cx->fp->regs->pc)));
11688 return JSRS_CONTINUE;
11691 JS_REQUIRES_STACK JSRecordingStatus
11692 TraceRecorder::record_JSOP_SETARG()
11694 arg(GET_ARGNO(cx->fp->regs->pc), stack(-1));
11695 return JSRS_CONTINUE;
11698 JS_REQUIRES_STACK JSRecordingStatus
11699 TraceRecorder::record_JSOP_GETLOCAL()
11701 stack(0, var(GET_SLOTNO(cx->fp->regs->pc)));
11702 return JSRS_CONTINUE;
11705 JS_REQUIRES_STACK JSRecordingStatus
11706 TraceRecorder::record_JSOP_SETLOCAL()
11708 var(GET_SLOTNO(cx->fp->regs->pc), stack(-1));
11709 return JSRS_CONTINUE;
11712 JS_REQUIRES_STACK JSRecordingStatus
11713 TraceRecorder::record_JSOP_UINT16()
11715 stack(0, lir->insImmf(GET_UINT16(cx->fp->regs->pc)));
11716 return JSRS_CONTINUE;
11719 JS_REQUIRES_STACK JSRecordingStatus
11720 TraceRecorder::record_JSOP_NEWINIT()
11722 JSProtoKey key = JSProtoKey(GET_INT8(cx->fp->regs->pc));
11723 LIns* proto_ins;
11724 CHECK_STATUS(getClassPrototype(key, proto_ins));
11726 LIns* args[] = { proto_ins, cx_ins };
11727 const CallInfo *ci = (key == JSProto_Array) ? &js_NewEmptyArray_ci : &js_Object_tn_ci;
11728 LIns* v_ins = lir->insCall(ci, args);
11729 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
11730 stack(0, v_ins);
11731 return JSRS_CONTINUE;
11734 JS_REQUIRES_STACK JSRecordingStatus
11735 TraceRecorder::record_JSOP_ENDINIT()
11737 #ifdef DEBUG
11738 jsval& v = stackval(-1);
11739 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v));
11740 #endif
11741 return JSRS_CONTINUE;
11744 JS_REQUIRES_STACK JSRecordingStatus
11745 TraceRecorder::record_JSOP_INITPROP()
11747 // All the action is in record_SetPropHit.
11748 return JSRS_CONTINUE;
11751 JS_REQUIRES_STACK JSRecordingStatus
11752 TraceRecorder::record_JSOP_INITELEM()
11754 return record_JSOP_SETELEM();
11757 JS_REQUIRES_STACK JSRecordingStatus
11758 TraceRecorder::record_JSOP_DEFSHARP()
11760 return JSRS_STOP;
11763 JS_REQUIRES_STACK JSRecordingStatus
11764 TraceRecorder::record_JSOP_USESHARP()
11766 return JSRS_STOP;
11769 JS_REQUIRES_STACK JSRecordingStatus
11770 TraceRecorder::record_JSOP_INCARG()
11772 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1);
11775 JS_REQUIRES_STACK JSRecordingStatus
11776 TraceRecorder::record_JSOP_INCLOCAL()
11778 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1);
11781 JS_REQUIRES_STACK JSRecordingStatus
11782 TraceRecorder::record_JSOP_DECARG()
11784 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1);
11787 JS_REQUIRES_STACK JSRecordingStatus
11788 TraceRecorder::record_JSOP_DECLOCAL()
11790 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1);
11793 JS_REQUIRES_STACK JSRecordingStatus
11794 TraceRecorder::record_JSOP_ARGINC()
11796 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1, false);
11799 JS_REQUIRES_STACK JSRecordingStatus
11800 TraceRecorder::record_JSOP_LOCALINC()
11802 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1, false);
11805 JS_REQUIRES_STACK JSRecordingStatus
11806 TraceRecorder::record_JSOP_ARGDEC()
11808 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1, false);
11811 JS_REQUIRES_STACK JSRecordingStatus
11812 TraceRecorder::record_JSOP_LOCALDEC()
11814 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1, false);
11817 JS_REQUIRES_STACK JSRecordingStatus
11818 TraceRecorder::record_JSOP_IMACOP()
11820 JS_ASSERT(cx->fp->imacpc);
11821 return JSRS_CONTINUE;
11824 JS_REQUIRES_STACK JSRecordingStatus
11825 TraceRecorder::record_JSOP_ITER()
11827 jsval& v = stackval(-1);
11828 if (JSVAL_IS_PRIMITIVE(v))
11829 ABORT_TRACE("for-in on a primitive value");
11830 ABORT_IF_XML(v);
11832 jsuint flags = cx->fp->regs->pc[1];
11834 if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) {
11835 if (flags == JSITER_ENUMERATE)
11836 return call_imacro(iter_imacros.for_in);
11837 if (flags == (JSITER_ENUMERATE | JSITER_FOREACH))
11838 return call_imacro(iter_imacros.for_each);
11839 } else {
11840 if (flags == JSITER_ENUMERATE)
11841 return call_imacro(iter_imacros.for_in_native);
11842 if (flags == (JSITER_ENUMERATE | JSITER_FOREACH))
11843 return call_imacro(iter_imacros.for_each_native);
11845 ABORT_TRACE("unimplemented JSITER_* flags");
11848 JS_REQUIRES_STACK JSRecordingStatus
11849 TraceRecorder::record_JSOP_NEXTITER()
11851 jsval& iterobj_val = stackval(-2);
11852 if (JSVAL_IS_PRIMITIVE(iterobj_val))
11853 ABORT_TRACE("for-in on a primitive value");
11854 ABORT_IF_XML(iterobj_val);
11855 JSObject* iterobj = JSVAL_TO_OBJECT(iterobj_val);
11856 JSClass* clasp = STOBJ_GET_CLASS(iterobj);
11857 LIns* iterobj_ins = get(&iterobj_val);
11858 if (clasp == &js_IteratorClass || clasp == &js_GeneratorClass) {
11859 guardClass(iterobj, iterobj_ins, clasp, snapshot(BRANCH_EXIT));
11860 return call_imacro(nextiter_imacros.native_iter_next);
11862 return call_imacro(nextiter_imacros.custom_iter_next);
11865 JS_REQUIRES_STACK JSRecordingStatus
11866 TraceRecorder::record_JSOP_ENDITER()
11868 LIns* args[] = { stack(-2), cx_ins };
11869 LIns* ok_ins = lir->insCall(&js_CloseIterator_ci, args);
11870 guard(false, lir->ins_eq0(ok_ins), MISMATCH_EXIT);
11871 return JSRS_CONTINUE;
11874 JS_REQUIRES_STACK JSRecordingStatus
11875 TraceRecorder::record_JSOP_FORNAME()
11877 jsval* vp;
11878 LIns* x_ins;
11879 NameResult nr;
11880 CHECK_STATUS(name(vp, x_ins, nr));
11881 if (!nr.tracked)
11882 ABORT_TRACE("forname on non-tracked value not supported");
11883 set(vp, stack(-1));
11884 return JSRS_CONTINUE;
11887 JS_REQUIRES_STACK JSRecordingStatus
11888 TraceRecorder::record_JSOP_FORPROP()
11890 return JSRS_STOP;
11893 JS_REQUIRES_STACK JSRecordingStatus
11894 TraceRecorder::record_JSOP_FORELEM()
11896 return record_JSOP_DUP();
11899 JS_REQUIRES_STACK JSRecordingStatus
11900 TraceRecorder::record_JSOP_FORARG()
11902 return record_JSOP_SETARG();
11905 JS_REQUIRES_STACK JSRecordingStatus
11906 TraceRecorder::record_JSOP_FORLOCAL()
11908 return record_JSOP_SETLOCAL();
11911 JS_REQUIRES_STACK JSRecordingStatus
11912 TraceRecorder::record_JSOP_POPN()
11914 return JSRS_CONTINUE;
11918 * Generate LIR to reach |obj2| from |obj| by traversing the scope chain. The generated code
11919 * also ensures that any call objects found have not changed shape.
11921 * obj starting object
11922 * obj_ins LIR instruction representing obj
11923 * obj2 end object for traversal
11924 * obj2_ins [out] LIR instruction representing obj2
11926 JS_REQUIRES_STACK JSRecordingStatus
11927 TraceRecorder::traverseScopeChain(JSObject *obj, LIns *obj_ins, JSObject *obj2, LIns *&obj2_ins)
11929 for (;;) {
11930 if (obj != globalObj) {
11931 if (!js_IsCacheableNonGlobalScope(obj))
11932 ABORT_TRACE("scope chain lookup crosses non-cacheable object");
11934 // We must guard on the shape of all call objects for heavyweight functions
11935 // that we traverse on the scope chain: if the shape changes, a variable with
11936 // the same name may have been inserted in the scope chain.
11937 if (STOBJ_GET_CLASS(obj) == &js_CallClass &&
11938 JSFUN_HEAVYWEIGHT_TEST(js_GetCallObjectFunction(obj)->flags)) {
11939 LIns* map_ins = map(obj_ins);
11940 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
11941 "obj_shape");
11942 guard(true,
11943 addName(lir->ins2i(LIR_eq, shape_ins, OBJ_SHAPE(obj)), "guard_shape"),
11944 BRANCH_EXIT);
11948 if (obj == obj2)
11949 break;
11951 obj = STOBJ_GET_PARENT(obj);
11952 if (!obj)
11953 ABORT_TRACE("target object not reached on scope chain");
11954 obj_ins = stobj_get_parent(obj_ins);
11957 obj2_ins = obj_ins;
11958 return JSRS_CONTINUE;
11961 JS_REQUIRES_STACK JSRecordingStatus
11962 TraceRecorder::record_JSOP_BINDNAME()
11964 JSStackFrame *fp = cx->fp;
11965 JSObject *obj;
11967 if (!fp->fun) {
11968 obj = fp->scopeChain;
11970 // In global code, fp->scopeChain can only contain blocks whose values
11971 // are still on the stack. We never use BINDNAME to refer to these.
11972 while (OBJ_GET_CLASS(cx, obj) == &js_BlockClass) {
11973 // The block's values are still on the stack.
11974 JS_ASSERT(obj->getPrivate() == fp);
11975 obj = OBJ_GET_PARENT(cx, obj);
11976 // Blocks always have parents.
11977 JS_ASSERT(obj);
11980 if (obj != globalObj)
11981 ABORT_TRACE("BINDNAME in global code resolved to non-global object");
11984 * The trace is specialized to this global object. Furthermore, we know it
11985 * is the sole 'global' object on the scope chain: we set globalObj to the
11986 * scope chain element with no parent, and we reached it starting from the
11987 * function closure or the current scopeChain, so there is nothing inner to
11988 * it. Therefore this must be the right base object.
11990 stack(0, INS_CONSTOBJ(obj));
11991 return JSRS_CONTINUE;
11994 // We can't trace BINDNAME in functions that contain direct calls to eval,
11995 // as they might add bindings which previously-traced references would have
11996 // to see.
11997 if (JSFUN_HEAVYWEIGHT_TEST(fp->fun->flags))
11998 ABORT_TRACE("BINDNAME in heavyweight function.");
12000 // We don't have the scope chain on trace, so instead we get a start object
12001 // that is on the scope chain and doesn't skip the target object (the one
12002 // that contains the property).
12003 jsval *callee = &cx->fp->argv[-2];
12004 obj = STOBJ_GET_PARENT(JSVAL_TO_OBJECT(*callee));
12005 if (obj == globalObj) {
12006 stack(0, INS_CONSTOBJ(obj));
12007 return JSRS_CONTINUE;
12009 LIns *obj_ins = stobj_get_parent(get(callee));
12011 // Find the target object.
12012 JSAtom *atom = atoms[GET_INDEX(cx->fp->regs->pc)];
12013 jsid id = ATOM_TO_JSID(atom);
12014 JSObject *obj2 = js_FindIdentifierBase(cx, fp->scopeChain, id);
12015 if (obj2 != globalObj && STOBJ_GET_CLASS(obj2) != &js_CallClass)
12016 ABORT_TRACE("BINDNAME on non-global, non-call object");
12018 // Generate LIR to get to the target object from the start object.
12019 LIns *obj2_ins;
12020 CHECK_STATUS(traverseScopeChain(obj, obj_ins, obj2, obj2_ins));
12022 // If |obj2| is the global object, we can refer to it directly instead of walking up
12023 // the scope chain. There may still be guards on intervening call objects.
12024 stack(0, obj2 == globalObj ? INS_CONSTOBJ(obj2) : obj2_ins);
12025 return JSRS_CONTINUE;
12028 JS_REQUIRES_STACK JSRecordingStatus
12029 TraceRecorder::record_JSOP_SETNAME()
12031 jsval& l = stackval(-2);
12032 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l));
12035 * Trace only cases that are global code, in lightweight functions
12036 * scoped by the global object only, or in call objects.
12038 JSObject* obj = JSVAL_TO_OBJECT(l);
12039 if (OBJ_GET_CLASS(cx, obj) == &js_CallClass)
12040 return JSRS_CONTINUE;
12041 if (obj != cx->fp->scopeChain || obj != globalObj)
12042 ABORT_TRACE("JSOP_SETNAME left operand is not the global object");
12044 // The rest of the work is in record_SetPropHit.
12045 return JSRS_CONTINUE;
12048 JS_REQUIRES_STACK JSRecordingStatus
12049 TraceRecorder::record_JSOP_THROW()
12051 return JSRS_STOP;
12054 JS_REQUIRES_STACK JSRecordingStatus
12055 TraceRecorder::record_JSOP_IN()
12057 jsval& rval = stackval(-1);
12058 jsval& lval = stackval(-2);
12060 if (JSVAL_IS_PRIMITIVE(rval))
12061 ABORT_TRACE("JSOP_IN on non-object right operand");
12062 JSObject* obj = JSVAL_TO_OBJECT(rval);
12063 LIns* obj_ins = get(&rval);
12065 jsid id;
12066 LIns* x;
12067 if (JSVAL_IS_INT(lval)) {
12068 id = INT_JSVAL_TO_JSID(lval);
12069 LIns* args[] = { makeNumberInt32(get(&lval)), obj_ins, cx_ins };
12070 x = lir->insCall(&js_HasNamedPropertyInt32_ci, args);
12071 } else if (JSVAL_IS_STRING(lval)) {
12072 if (!js_ValueToStringId(cx, lval, &id))
12073 ABORT_TRACE_ERROR("left operand of JSOP_IN didn't convert to a string-id");
12074 LIns* args[] = { get(&lval), obj_ins, cx_ins };
12075 x = lir->insCall(&js_HasNamedProperty_ci, args);
12076 } else {
12077 ABORT_TRACE("string or integer expected");
12080 guard(false, lir->ins2i(LIR_eq, x, JSVAL_TO_SPECIAL(JSVAL_VOID)), OOM_EXIT);
12081 x = lir->ins2i(LIR_eq, x, 1);
12083 JSObject* obj2;
12084 JSProperty* prop;
12085 if (!obj->lookupProperty(cx, id, &obj2, &prop))
12086 ABORT_TRACE_ERROR("obj->lookupProperty failed in JSOP_IN");
12087 bool cond = prop != NULL;
12088 if (prop)
12089 obj2->dropProperty(cx, prop);
12090 if (wasDeepAborted())
12091 ABORT_TRACE("deep abort from property lookup");
12094 * The interpreter fuses comparisons and the following branch, so we have
12095 * to do that here as well.
12097 fuseIf(cx->fp->regs->pc + 1, cond, x);
12100 * We update the stack after the guard. This is safe since the guard bails
12101 * out at the comparison and the interpreter will therefore re-execute the
12102 * comparison. This way the value of the condition doesn't have to be
12103 * calculated and saved on the stack in most cases.
12105 set(&lval, x);
12106 return JSRS_CONTINUE;
12109 static JSBool FASTCALL
12110 HasInstance(JSContext* cx, JSObject* ctor, jsval val)
12112 JSBool result = JS_FALSE;
12113 if (!ctor->map->ops->hasInstance(cx, ctor, val, &result))
12114 js_SetBuiltinError(cx);
12115 return result;
12117 JS_DEFINE_CALLINFO_3(static, BOOL_FAIL, HasInstance, CONTEXT, OBJECT, JSVAL, 0, 0)
12119 JS_REQUIRES_STACK JSRecordingStatus
12120 TraceRecorder::record_JSOP_INSTANCEOF()
12122 // If the rhs isn't an object, we are headed for a TypeError.
12123 jsval& ctor = stackval(-1);
12124 if (JSVAL_IS_PRIMITIVE(ctor))
12125 ABORT_TRACE("non-object on rhs of instanceof");
12127 jsval& val = stackval(-2);
12128 LIns* val_ins = box_jsval(val, get(&val));
12130 enterDeepBailCall();
12131 LIns* args[] = {val_ins, get(&ctor), cx_ins};
12132 stack(-2, lir->insCall(&HasInstance_ci, args));
12133 LIns* status_ins = lir->insLoad(LIR_ld,
12134 lirbuf->state,
12135 (int) offsetof(InterpState, builtinStatus));
12136 guard(true, lir->ins_eq0(status_ins), STATUS_EXIT);
12137 leaveDeepBailCall();
12139 return JSRS_CONTINUE;
12142 JS_REQUIRES_STACK JSRecordingStatus
12143 TraceRecorder::record_JSOP_DEBUGGER()
12145 return JSRS_STOP;
12148 JS_REQUIRES_STACK JSRecordingStatus
12149 TraceRecorder::record_JSOP_GOSUB()
12151 return JSRS_STOP;
12154 JS_REQUIRES_STACK JSRecordingStatus
12155 TraceRecorder::record_JSOP_RETSUB()
12157 return JSRS_STOP;
12160 JS_REQUIRES_STACK JSRecordingStatus
12161 TraceRecorder::record_JSOP_EXCEPTION()
12163 return JSRS_STOP;
12166 JS_REQUIRES_STACK JSRecordingStatus
12167 TraceRecorder::record_JSOP_LINENO()
12169 return JSRS_CONTINUE;
12172 JS_REQUIRES_STACK JSRecordingStatus
12173 TraceRecorder::record_JSOP_CONDSWITCH()
12175 return JSRS_CONTINUE;
12178 JS_REQUIRES_STACK JSRecordingStatus
12179 TraceRecorder::record_JSOP_CASE()
12181 strictEquality(true, true);
12182 return JSRS_CONTINUE;
12185 JS_REQUIRES_STACK JSRecordingStatus
12186 TraceRecorder::record_JSOP_DEFAULT()
12188 return JSRS_CONTINUE;
12191 JS_REQUIRES_STACK JSRecordingStatus
12192 TraceRecorder::record_JSOP_EVAL()
12194 return JSRS_STOP;
12197 JS_REQUIRES_STACK JSRecordingStatus
12198 TraceRecorder::record_JSOP_ENUMELEM()
12200 return JSRS_STOP;
12203 JS_REQUIRES_STACK JSRecordingStatus
12204 TraceRecorder::record_JSOP_GETTER()
12206 return JSRS_STOP;
12209 JS_REQUIRES_STACK JSRecordingStatus
12210 TraceRecorder::record_JSOP_SETTER()
12212 return JSRS_STOP;
12215 JS_REQUIRES_STACK JSRecordingStatus
12216 TraceRecorder::record_JSOP_DEFFUN()
12218 return JSRS_STOP;
12221 JS_REQUIRES_STACK JSRecordingStatus
12222 TraceRecorder::record_JSOP_DEFFUN_FC()
12224 return JSRS_STOP;
12227 JS_REQUIRES_STACK JSRecordingStatus
12228 TraceRecorder::record_JSOP_DEFCONST()
12230 return JSRS_STOP;
12233 JS_REQUIRES_STACK JSRecordingStatus
12234 TraceRecorder::record_JSOP_DEFVAR()
12236 return JSRS_STOP;
12239 jsatomid
12240 TraceRecorder::getFullIndex(ptrdiff_t pcoff)
12242 jsatomid index = GET_INDEX(cx->fp->regs->pc + pcoff);
12243 index += atoms - cx->fp->script->atomMap.vector;
12244 return index;
12247 JS_REQUIRES_STACK JSRecordingStatus
12248 TraceRecorder::record_JSOP_LAMBDA()
12250 JSFunction* fun;
12251 fun = cx->fp->script->getFunction(getFullIndex());
12253 if (FUN_NULL_CLOSURE(fun) && OBJ_GET_PARENT(cx, FUN_OBJECT(fun)) == globalObj) {
12254 LIns *proto_ins;
12255 CHECK_STATUS(getClassPrototype(JSProto_Function, proto_ins));
12257 LIns* args[] = { INS_CONSTOBJ(globalObj), proto_ins, INS_CONSTFUN(fun), cx_ins };
12258 LIns* x = lir->insCall(&js_NewNullClosure_ci, args);
12259 stack(0, x);
12260 return JSRS_CONTINUE;
12262 return JSRS_STOP;
12265 JS_REQUIRES_STACK JSRecordingStatus
12266 TraceRecorder::record_JSOP_LAMBDA_FC()
12268 JSFunction* fun;
12269 fun = cx->fp->script->getFunction(getFullIndex());
12271 LIns* scopeChain_ins = get(&cx->fp->argv[-2]);
12272 JS_ASSERT(scopeChain_ins);
12274 LIns* args[] = {
12275 scopeChain_ins,
12276 INS_CONSTFUN(fun),
12277 cx_ins
12279 LIns* call_ins = lir->insCall(&js_AllocFlatClosure_ci, args);
12280 guard(false,
12281 addName(lir->ins2(LIR_eq, call_ins, INS_NULL()),
12282 "guard(js_AllocFlatClosure)"),
12283 OOM_EXIT);
12284 stack(0, call_ins);
12286 if (fun->u.i.nupvars) {
12287 JSUpvarArray *uva = fun->u.i.script->upvars();
12288 for (uint32 i = 0, n = uva->length; i < n; i++) {
12289 jsval v;
12290 LIns* upvar_ins = upvar(fun->u.i.script, uva, i, v);
12291 if (!upvar_ins)
12292 return JSRS_STOP;
12293 LIns* dslots_ins = NULL;
12294 stobj_set_dslot(call_ins, i, dslots_ins, box_jsval(v, upvar_ins));
12298 return JSRS_CONTINUE;
12301 JS_REQUIRES_STACK JSRecordingStatus
12302 TraceRecorder::record_JSOP_CALLEE()
12304 stack(0, get(&cx->fp->argv[-2]));
12305 return JSRS_CONTINUE;
12308 JS_REQUIRES_STACK JSRecordingStatus
12309 TraceRecorder::record_JSOP_SETLOCALPOP()
12311 var(GET_SLOTNO(cx->fp->regs->pc), stack(-1));
12312 return JSRS_CONTINUE;
12315 JS_REQUIRES_STACK JSRecordingStatus
12316 TraceRecorder::record_JSOP_IFPRIMTOP()
12318 // Traces are type-specialized, including null vs. object, so we need do
12319 // nothing here. The upstream unbox_jsval called after valueOf or toString
12320 // from an imacro (e.g.) will fork the trace for us, allowing us to just
12321 // follow along mindlessly :-).
12322 return JSRS_CONTINUE;
12325 JS_REQUIRES_STACK JSRecordingStatus
12326 TraceRecorder::record_JSOP_SETCALL()
12328 return JSRS_STOP;
12331 JS_REQUIRES_STACK JSRecordingStatus
12332 TraceRecorder::record_JSOP_TRY()
12334 return JSRS_CONTINUE;
12337 JS_REQUIRES_STACK JSRecordingStatus
12338 TraceRecorder::record_JSOP_FINALLY()
12340 return JSRS_CONTINUE;
12343 JS_REQUIRES_STACK JSRecordingStatus
12344 TraceRecorder::record_JSOP_NOP()
12346 return JSRS_CONTINUE;
12349 JS_REQUIRES_STACK JSRecordingStatus
12350 TraceRecorder::record_JSOP_ARGSUB()
12352 JSStackFrame* fp = cx->fp;
12353 if (!(fp->fun->flags & JSFUN_HEAVYWEIGHT)) {
12354 uintN slot = GET_ARGNO(fp->regs->pc);
12355 if (slot < fp->argc)
12356 stack(0, get(&cx->fp->argv[slot]));
12357 else
12358 stack(0, INS_VOID());
12359 return JSRS_CONTINUE;
12361 ABORT_TRACE("can't trace JSOP_ARGSUB hard case");
12364 JS_REQUIRES_STACK JSRecordingStatus
12365 TraceRecorder::record_JSOP_ARGCNT()
12367 if (!(cx->fp->fun->flags & JSFUN_HEAVYWEIGHT)) {
12368 stack(0, lir->insImmf(cx->fp->argc));
12369 return JSRS_CONTINUE;
12371 ABORT_TRACE("can't trace heavyweight JSOP_ARGCNT");
12374 JS_REQUIRES_STACK JSRecordingStatus
12375 TraceRecorder::record_DefLocalFunSetSlot(uint32 slot, JSObject* obj)
12377 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
12379 if (FUN_NULL_CLOSURE(fun) && OBJ_GET_PARENT(cx, FUN_OBJECT(fun)) == globalObj) {
12380 LIns *proto_ins;
12381 CHECK_STATUS(getClassPrototype(JSProto_Function, proto_ins));
12383 LIns* args[] = { INS_CONSTOBJ(globalObj), proto_ins, INS_CONSTFUN(fun), cx_ins };
12384 LIns* x = lir->insCall(&js_NewNullClosure_ci, args);
12385 var(slot, x);
12386 return JSRS_CONTINUE;
12389 return JSRS_STOP;
12392 JS_REQUIRES_STACK JSRecordingStatus
12393 TraceRecorder::record_JSOP_DEFLOCALFUN()
12395 return JSRS_CONTINUE;
12398 JS_REQUIRES_STACK JSRecordingStatus
12399 TraceRecorder::record_JSOP_DEFLOCALFUN_FC()
12401 return JSRS_CONTINUE;
12404 JS_REQUIRES_STACK JSRecordingStatus
12405 TraceRecorder::record_JSOP_GOTOX()
12407 return record_JSOP_GOTO();
12410 JS_REQUIRES_STACK JSRecordingStatus
12411 TraceRecorder::record_JSOP_IFEQX()
12413 return record_JSOP_IFEQ();
12416 JS_REQUIRES_STACK JSRecordingStatus
12417 TraceRecorder::record_JSOP_IFNEX()
12419 return record_JSOP_IFNE();
12422 JS_REQUIRES_STACK JSRecordingStatus
12423 TraceRecorder::record_JSOP_ORX()
12425 return record_JSOP_OR();
12428 JS_REQUIRES_STACK JSRecordingStatus
12429 TraceRecorder::record_JSOP_ANDX()
12431 return record_JSOP_AND();
12434 JS_REQUIRES_STACK JSRecordingStatus
12435 TraceRecorder::record_JSOP_GOSUBX()
12437 return record_JSOP_GOSUB();
12440 JS_REQUIRES_STACK JSRecordingStatus
12441 TraceRecorder::record_JSOP_CASEX()
12443 strictEquality(true, true);
12444 return JSRS_CONTINUE;
12447 JS_REQUIRES_STACK JSRecordingStatus
12448 TraceRecorder::record_JSOP_DEFAULTX()
12450 return JSRS_CONTINUE;
12453 JS_REQUIRES_STACK JSRecordingStatus
12454 TraceRecorder::record_JSOP_TABLESWITCHX()
12456 return record_JSOP_TABLESWITCH();
12459 JS_REQUIRES_STACK JSRecordingStatus
12460 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
12462 return switchop();
12465 JS_REQUIRES_STACK JSRecordingStatus
12466 TraceRecorder::record_JSOP_BACKPATCH()
12468 return JSRS_CONTINUE;
12471 JS_REQUIRES_STACK JSRecordingStatus
12472 TraceRecorder::record_JSOP_BACKPATCH_POP()
12474 return JSRS_CONTINUE;
12477 JS_REQUIRES_STACK JSRecordingStatus
12478 TraceRecorder::record_JSOP_THROWING()
12480 return JSRS_STOP;
12483 JS_REQUIRES_STACK JSRecordingStatus
12484 TraceRecorder::record_JSOP_SETRVAL()
12486 // If we implement this, we need to update JSOP_STOP.
12487 return JSRS_STOP;
12490 JS_REQUIRES_STACK JSRecordingStatus
12491 TraceRecorder::record_JSOP_RETRVAL()
12493 return JSRS_STOP;
12496 JS_REQUIRES_STACK JSRecordingStatus
12497 TraceRecorder::record_JSOP_GETGVAR()
12499 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
12500 if (JSVAL_IS_NULL(slotval))
12501 return JSRS_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
12503 uint32 slot = JSVAL_TO_INT(slotval);
12505 if (!lazilyImportGlobalSlot(slot))
12506 ABORT_TRACE("lazy import of global slot failed");
12508 stack(0, get(&STOBJ_GET_SLOT(globalObj, slot)));
12509 return JSRS_CONTINUE;
12512 JS_REQUIRES_STACK JSRecordingStatus
12513 TraceRecorder::record_JSOP_SETGVAR()
12515 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
12516 if (JSVAL_IS_NULL(slotval))
12517 return JSRS_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
12519 uint32 slot = JSVAL_TO_INT(slotval);
12521 if (!lazilyImportGlobalSlot(slot))
12522 ABORT_TRACE("lazy import of global slot failed");
12524 set(&STOBJ_GET_SLOT(globalObj, slot), stack(-1));
12525 return JSRS_CONTINUE;
12528 JS_REQUIRES_STACK JSRecordingStatus
12529 TraceRecorder::record_JSOP_INCGVAR()
12531 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
12532 if (JSVAL_IS_NULL(slotval))
12533 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
12534 return JSRS_CONTINUE;
12536 uint32 slot = JSVAL_TO_INT(slotval);
12538 if (!lazilyImportGlobalSlot(slot))
12539 ABORT_TRACE("lazy import of global slot failed");
12541 return inc(STOBJ_GET_SLOT(globalObj, slot), 1);
12544 JS_REQUIRES_STACK JSRecordingStatus
12545 TraceRecorder::record_JSOP_DECGVAR()
12547 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
12548 if (JSVAL_IS_NULL(slotval))
12549 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
12550 return JSRS_CONTINUE;
12552 uint32 slot = JSVAL_TO_INT(slotval);
12554 if (!lazilyImportGlobalSlot(slot))
12555 ABORT_TRACE("lazy import of global slot failed");
12557 return inc(STOBJ_GET_SLOT(globalObj, slot), -1);
12560 JS_REQUIRES_STACK JSRecordingStatus
12561 TraceRecorder::record_JSOP_GVARINC()
12563 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
12564 if (JSVAL_IS_NULL(slotval))
12565 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
12566 return JSRS_CONTINUE;
12568 uint32 slot = JSVAL_TO_INT(slotval);
12570 if (!lazilyImportGlobalSlot(slot))
12571 ABORT_TRACE("lazy import of global slot failed");
12573 return inc(STOBJ_GET_SLOT(globalObj, slot), 1, false);
12576 JS_REQUIRES_STACK JSRecordingStatus
12577 TraceRecorder::record_JSOP_GVARDEC()
12579 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
12580 if (JSVAL_IS_NULL(slotval))
12581 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
12582 return JSRS_CONTINUE;
12584 uint32 slot = JSVAL_TO_INT(slotval);
12586 if (!lazilyImportGlobalSlot(slot))
12587 ABORT_TRACE("lazy import of global slot failed");
12589 return inc(STOBJ_GET_SLOT(globalObj, slot), -1, false);
12592 JS_REQUIRES_STACK JSRecordingStatus
12593 TraceRecorder::record_JSOP_REGEXP()
12595 return JSRS_STOP;
12598 // begin JS_HAS_XML_SUPPORT
12600 JS_REQUIRES_STACK JSRecordingStatus
12601 TraceRecorder::record_JSOP_DEFXMLNS()
12603 return JSRS_STOP;
12606 JS_REQUIRES_STACK JSRecordingStatus
12607 TraceRecorder::record_JSOP_ANYNAME()
12609 return JSRS_STOP;
12612 JS_REQUIRES_STACK JSRecordingStatus
12613 TraceRecorder::record_JSOP_QNAMEPART()
12615 return record_JSOP_STRING();
12618 JS_REQUIRES_STACK JSRecordingStatus
12619 TraceRecorder::record_JSOP_QNAMECONST()
12621 return JSRS_STOP;
12624 JS_REQUIRES_STACK JSRecordingStatus
12625 TraceRecorder::record_JSOP_QNAME()
12627 return JSRS_STOP;
12630 JS_REQUIRES_STACK JSRecordingStatus
12631 TraceRecorder::record_JSOP_TOATTRNAME()
12633 return JSRS_STOP;
12636 JS_REQUIRES_STACK JSRecordingStatus
12637 TraceRecorder::record_JSOP_TOATTRVAL()
12639 return JSRS_STOP;
12642 JS_REQUIRES_STACK JSRecordingStatus
12643 TraceRecorder::record_JSOP_ADDATTRNAME()
12645 return JSRS_STOP;
12648 JS_REQUIRES_STACK JSRecordingStatus
12649 TraceRecorder::record_JSOP_ADDATTRVAL()
12651 return JSRS_STOP;
12654 JS_REQUIRES_STACK JSRecordingStatus
12655 TraceRecorder::record_JSOP_BINDXMLNAME()
12657 return JSRS_STOP;
12660 JS_REQUIRES_STACK JSRecordingStatus
12661 TraceRecorder::record_JSOP_SETXMLNAME()
12663 return JSRS_STOP;
12666 JS_REQUIRES_STACK JSRecordingStatus
12667 TraceRecorder::record_JSOP_XMLNAME()
12669 return JSRS_STOP;
12672 JS_REQUIRES_STACK JSRecordingStatus
12673 TraceRecorder::record_JSOP_DESCENDANTS()
12675 return JSRS_STOP;
12678 JS_REQUIRES_STACK JSRecordingStatus
12679 TraceRecorder::record_JSOP_FILTER()
12681 return JSRS_STOP;
12684 JS_REQUIRES_STACK JSRecordingStatus
12685 TraceRecorder::record_JSOP_ENDFILTER()
12687 return JSRS_STOP;
12690 JS_REQUIRES_STACK JSRecordingStatus
12691 TraceRecorder::record_JSOP_TOXML()
12693 return JSRS_STOP;
12696 JS_REQUIRES_STACK JSRecordingStatus
12697 TraceRecorder::record_JSOP_TOXMLLIST()
12699 return JSRS_STOP;
12702 JS_REQUIRES_STACK JSRecordingStatus
12703 TraceRecorder::record_JSOP_XMLTAGEXPR()
12705 return JSRS_STOP;
12708 JS_REQUIRES_STACK JSRecordingStatus
12709 TraceRecorder::record_JSOP_XMLELTEXPR()
12711 return JSRS_STOP;
12714 JS_REQUIRES_STACK JSRecordingStatus
12715 TraceRecorder::record_JSOP_XMLOBJECT()
12717 return JSRS_STOP;
12720 JS_REQUIRES_STACK JSRecordingStatus
12721 TraceRecorder::record_JSOP_XMLCDATA()
12723 return JSRS_STOP;
12726 JS_REQUIRES_STACK JSRecordingStatus
12727 TraceRecorder::record_JSOP_XMLCOMMENT()
12729 return JSRS_STOP;
12732 JS_REQUIRES_STACK JSRecordingStatus
12733 TraceRecorder::record_JSOP_XMLPI()
12735 return JSRS_STOP;
12738 JS_REQUIRES_STACK JSRecordingStatus
12739 TraceRecorder::record_JSOP_GETFUNNS()
12741 return JSRS_STOP;
12744 JS_REQUIRES_STACK JSRecordingStatus
12745 TraceRecorder::record_JSOP_STARTXML()
12747 return JSRS_STOP;
12750 JS_REQUIRES_STACK JSRecordingStatus
12751 TraceRecorder::record_JSOP_STARTXMLEXPR()
12753 return JSRS_STOP;
12756 // end JS_HAS_XML_SUPPORT
12758 JS_REQUIRES_STACK JSRecordingStatus
12759 TraceRecorder::record_JSOP_CALLPROP()
12761 jsval& l = stackval(-1);
12762 JSObject* obj;
12763 LIns* obj_ins;
12764 LIns* this_ins;
12765 if (!JSVAL_IS_PRIMITIVE(l)) {
12766 obj = JSVAL_TO_OBJECT(l);
12767 obj_ins = get(&l);
12768 this_ins = obj_ins; // |this| for subsequent call
12769 } else {
12770 jsint i;
12771 debug_only_stmt(const char* protoname = NULL;)
12772 if (JSVAL_IS_STRING(l)) {
12773 i = JSProto_String;
12774 debug_only_stmt(protoname = "String.prototype";)
12775 } else if (JSVAL_IS_NUMBER(l)) {
12776 i = JSProto_Number;
12777 debug_only_stmt(protoname = "Number.prototype";)
12778 } else if (JSVAL_IS_SPECIAL(l)) {
12779 if (l == JSVAL_VOID)
12780 ABORT_TRACE("callprop on void");
12781 guard(false, lir->ins2i(LIR_eq, get(&l), JSVAL_TO_SPECIAL(JSVAL_VOID)), MISMATCH_EXIT);
12782 i = JSProto_Boolean;
12783 debug_only_stmt(protoname = "Boolean.prototype";)
12784 } else {
12785 JS_ASSERT(JSVAL_IS_NULL(l) || JSVAL_IS_VOID(l));
12786 ABORT_TRACE("callprop on null or void");
12789 if (!js_GetClassPrototype(cx, NULL, INT_TO_JSID(i), &obj))
12790 ABORT_TRACE_ERROR("GetClassPrototype failed!");
12792 obj_ins = INS_CONSTOBJ(obj);
12793 debug_only_stmt(obj_ins = addName(obj_ins, protoname);)
12794 this_ins = get(&l); // use primitive as |this|
12797 JSObject* obj2;
12798 jsuword pcval;
12799 CHECK_STATUS(test_property_cache(obj, obj_ins, obj2, pcval));
12801 if (PCVAL_IS_NULL(pcval) || !PCVAL_IS_OBJECT(pcval))
12802 ABORT_TRACE("callee is not an object");
12803 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval)));
12805 if (JSVAL_IS_PRIMITIVE(l)) {
12806 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, PCVAL_TO_OBJECT(pcval));
12807 if (!PRIMITIVE_THIS_TEST(fun, l))
12808 ABORT_TRACE("callee does not accept primitive |this|");
12811 stack(0, this_ins);
12812 stack(-1, INS_CONSTOBJ(PCVAL_TO_OBJECT(pcval)));
12813 return JSRS_CONTINUE;
12816 JS_REQUIRES_STACK JSRecordingStatus
12817 TraceRecorder::record_JSOP_DELDESC()
12819 return JSRS_STOP;
12822 JS_REQUIRES_STACK JSRecordingStatus
12823 TraceRecorder::record_JSOP_UINT24()
12825 stack(0, lir->insImmf(GET_UINT24(cx->fp->regs->pc)));
12826 return JSRS_CONTINUE;
12829 JS_REQUIRES_STACK JSRecordingStatus
12830 TraceRecorder::record_JSOP_INDEXBASE()
12832 atoms += GET_INDEXBASE(cx->fp->regs->pc);
12833 return JSRS_CONTINUE;
12836 JS_REQUIRES_STACK JSRecordingStatus
12837 TraceRecorder::record_JSOP_RESETBASE()
12839 atoms = cx->fp->script->atomMap.vector;
12840 return JSRS_CONTINUE;
12843 JS_REQUIRES_STACK JSRecordingStatus
12844 TraceRecorder::record_JSOP_RESETBASE0()
12846 atoms = cx->fp->script->atomMap.vector;
12847 return JSRS_CONTINUE;
12850 JS_REQUIRES_STACK JSRecordingStatus
12851 TraceRecorder::record_JSOP_CALLELEM()
12853 return record_JSOP_GETELEM();
12856 JS_REQUIRES_STACK JSRecordingStatus
12857 TraceRecorder::record_JSOP_STOP()
12859 JSStackFrame *fp = cx->fp;
12861 if (fp->imacpc) {
12863 * End of imacro, so return true to the interpreter immediately. The
12864 * interpreter's JSOP_STOP case will return from the imacro, back to
12865 * the pc after the calling op, still in the same JSStackFrame.
12867 atoms = fp->script->atomMap.vector;
12868 return JSRS_CONTINUE;
12871 putArguments();
12874 * We know falling off the end of a constructor returns the new object that
12875 * was passed in via fp->argv[-1], while falling off the end of a function
12876 * returns undefined.
12878 * NB: we do not support script rval (eval, API users who want the result
12879 * of the last expression-statement, debugger API calls).
12881 if (fp->flags & JSFRAME_CONSTRUCTING) {
12882 JS_ASSERT(OBJECT_TO_JSVAL(fp->thisp) == fp->argv[-1]);
12883 rval_ins = get(&fp->argv[-1]);
12884 } else {
12885 rval_ins = INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID));
12887 clearFrameSlotsFromCache();
12888 return JSRS_CONTINUE;
12891 JS_REQUIRES_STACK JSRecordingStatus
12892 TraceRecorder::record_JSOP_GETXPROP()
12894 jsval& l = stackval(-1);
12895 if (JSVAL_IS_PRIMITIVE(l))
12896 ABORT_TRACE("primitive-this for GETXPROP?");
12898 jsval* vp;
12899 LIns* v_ins;
12900 NameResult nr;
12901 CHECK_STATUS(name(vp, v_ins, nr));
12902 stack(-1, v_ins);
12903 return JSRS_CONTINUE;
12906 JS_REQUIRES_STACK JSRecordingStatus
12907 TraceRecorder::record_JSOP_CALLXMLNAME()
12909 return JSRS_STOP;
12912 JS_REQUIRES_STACK JSRecordingStatus
12913 TraceRecorder::record_JSOP_TYPEOFEXPR()
12915 return record_JSOP_TYPEOF();
12918 JS_REQUIRES_STACK JSRecordingStatus
12919 TraceRecorder::record_JSOP_ENTERBLOCK()
12921 JSObject* obj;
12922 obj = cx->fp->script->getObject(getFullIndex(0));
12924 LIns* void_ins = INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID));
12925 for (int i = 0, n = OBJ_BLOCK_COUNT(cx, obj); i < n; i++)
12926 stack(i, void_ins);
12927 return JSRS_CONTINUE;
12930 JS_REQUIRES_STACK JSRecordingStatus
12931 TraceRecorder::record_JSOP_LEAVEBLOCK()
12933 /* We mustn't exit the lexical block we began recording in. */
12934 if (cx->fp->blockChain != lexicalBlock)
12935 return JSRS_CONTINUE;
12936 else
12937 return JSRS_STOP;
12940 JS_REQUIRES_STACK JSRecordingStatus
12941 TraceRecorder::record_JSOP_GENERATOR()
12943 return JSRS_STOP;
12946 JS_REQUIRES_STACK JSRecordingStatus
12947 TraceRecorder::record_JSOP_YIELD()
12949 return JSRS_STOP;
12952 JS_REQUIRES_STACK JSRecordingStatus
12953 TraceRecorder::record_JSOP_ARRAYPUSH()
12955 uint32_t slot = GET_UINT16(cx->fp->regs->pc);
12956 JS_ASSERT(cx->fp->script->nfixed <= slot);
12957 JS_ASSERT(cx->fp->slots + slot < cx->fp->regs->sp - 1);
12958 jsval &arrayval = cx->fp->slots[slot];
12959 JS_ASSERT(JSVAL_IS_OBJECT(arrayval));
12960 JS_ASSERT(OBJ_IS_DENSE_ARRAY(cx, JSVAL_TO_OBJECT(arrayval)));
12961 LIns *array_ins = get(&arrayval);
12962 jsval &elt = stackval(-1);
12963 LIns *elt_ins = box_jsval(elt, get(&elt));
12965 LIns *args[] = { elt_ins, array_ins, cx_ins };
12966 LIns *ok_ins = lir->insCall(&js_ArrayCompPush_ci, args);
12967 guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);
12968 return JSRS_CONTINUE;
12971 JS_REQUIRES_STACK JSRecordingStatus
12972 TraceRecorder::record_JSOP_ENUMCONSTELEM()
12974 return JSRS_STOP;
12977 JS_REQUIRES_STACK JSRecordingStatus
12978 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
12980 LIns* v_ins = stack(-1);
12981 int n = -1 - GET_UINT16(cx->fp->regs->pc);
12982 stack(n, v_ins);
12983 return JSRS_CONTINUE;
12986 JS_REQUIRES_STACK JSRecordingStatus
12987 TraceRecorder::record_JSOP_GETTHISPROP()
12989 LIns* this_ins;
12991 CHECK_STATUS(getThis(this_ins));
12994 * It's safe to just use cx->fp->thisp here because getThis() returns
12995 * JSRS_STOP if thisp is not available.
12997 CHECK_STATUS(getProp(cx->fp->thisp, this_ins));
12998 return JSRS_CONTINUE;
13001 JS_REQUIRES_STACK JSRecordingStatus
13002 TraceRecorder::record_JSOP_GETARGPROP()
13004 return getProp(argval(GET_ARGNO(cx->fp->regs->pc)));
13007 JS_REQUIRES_STACK JSRecordingStatus
13008 TraceRecorder::record_JSOP_GETLOCALPROP()
13010 return getProp(varval(GET_SLOTNO(cx->fp->regs->pc)));
13013 JS_REQUIRES_STACK JSRecordingStatus
13014 TraceRecorder::record_JSOP_INDEXBASE1()
13016 atoms += 1 << 16;
13017 return JSRS_CONTINUE;
13020 JS_REQUIRES_STACK JSRecordingStatus
13021 TraceRecorder::record_JSOP_INDEXBASE2()
13023 atoms += 2 << 16;
13024 return JSRS_CONTINUE;
13027 JS_REQUIRES_STACK JSRecordingStatus
13028 TraceRecorder::record_JSOP_INDEXBASE3()
13030 atoms += 3 << 16;
13031 return JSRS_CONTINUE;
13034 JS_REQUIRES_STACK JSRecordingStatus
13035 TraceRecorder::record_JSOP_CALLGVAR()
13037 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
13038 if (JSVAL_IS_NULL(slotval))
13039 // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
13040 return JSRS_CONTINUE;
13042 uint32 slot = JSVAL_TO_INT(slotval);
13044 if (!lazilyImportGlobalSlot(slot))
13045 ABORT_TRACE("lazy import of global slot failed");
13047 jsval& v = STOBJ_GET_SLOT(globalObj, slot);
13048 stack(0, get(&v));
13049 stack(1, INS_NULL());
13050 return JSRS_CONTINUE;
13053 JS_REQUIRES_STACK JSRecordingStatus
13054 TraceRecorder::record_JSOP_CALLLOCAL()
13056 uintN slot = GET_SLOTNO(cx->fp->regs->pc);
13057 stack(0, var(slot));
13058 stack(1, INS_NULL());
13059 return JSRS_CONTINUE;
13062 JS_REQUIRES_STACK JSRecordingStatus
13063 TraceRecorder::record_JSOP_CALLARG()
13065 uintN slot = GET_ARGNO(cx->fp->regs->pc);
13066 stack(0, arg(slot));
13067 stack(1, INS_NULL());
13068 return JSRS_CONTINUE;
13071 /* Functions for use with JSOP_CALLBUILTIN. */
13073 static JSBool
13074 ObjectToIterator(JSContext *cx, uintN argc, jsval *vp)
13076 jsval *argv = JS_ARGV(cx, vp);
13077 JS_ASSERT(JSVAL_IS_INT(argv[0]));
13078 JS_SET_RVAL(cx, vp, JS_THIS(cx, vp));
13079 return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp));
13082 static JSObject* FASTCALL
13083 ObjectToIterator_tn(JSContext* cx, jsbytecode* pc, JSObject *obj, int32 flags)
13085 jsval v = OBJECT_TO_JSVAL(obj);
13086 JSBool ok = js_ValueToIterator(cx, flags, &v);
13088 if (!ok) {
13089 js_SetBuiltinError(cx);
13090 return NULL;
13092 return JSVAL_TO_OBJECT(v);
13095 static JSBool
13096 CallIteratorNext(JSContext *cx, uintN argc, jsval *vp)
13098 return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp));
13101 static jsval FASTCALL
13102 CallIteratorNext_tn(JSContext* cx, jsbytecode* pc, JSObject* iterobj)
13104 JSAutoTempValueRooter tvr(cx);
13105 JSBool ok = js_CallIteratorNext(cx, iterobj, tvr.addr());
13107 if (!ok) {
13108 js_SetBuiltinError(cx);
13109 return JSVAL_ERROR_COOKIE;
13111 return tvr.value();
13114 JS_DEFINE_TRCINFO_1(ObjectToIterator,
13115 (4, (static, OBJECT_FAIL, ObjectToIterator_tn, CONTEXT, PC, THIS, INT32, 0, 0)))
13116 JS_DEFINE_TRCINFO_1(CallIteratorNext,
13117 (3, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, PC, THIS, 0, 0)))
13119 static const struct BuiltinFunctionInfo {
13120 JSNativeTraceInfo *ti;
13121 int nargs;
13122 } builtinFunctionInfo[JSBUILTIN_LIMIT] = {
13123 {&ObjectToIterator_trcinfo, 1},
13124 {&CallIteratorNext_trcinfo, 0},
13127 JSObject *
13128 js_GetBuiltinFunction(JSContext *cx, uintN index)
13130 JSRuntime *rt = cx->runtime;
13131 JSObject *funobj = rt->builtinFunctions[index];
13133 if (!funobj) {
13134 /* Use NULL parent and atom. Builtin functions never escape to scripts. */
13135 JS_ASSERT(index < JS_ARRAY_LENGTH(builtinFunctionInfo));
13136 const BuiltinFunctionInfo *bfi = &builtinFunctionInfo[index];
13137 JSFunction *fun = js_NewFunction(cx,
13138 NULL,
13139 JS_DATA_TO_FUNC_PTR(JSNative, bfi->ti),
13140 bfi->nargs,
13141 JSFUN_FAST_NATIVE | JSFUN_TRCINFO,
13142 NULL,
13143 NULL);
13144 if (fun) {
13145 funobj = FUN_OBJECT(fun);
13146 STOBJ_CLEAR_PROTO(funobj);
13147 STOBJ_CLEAR_PARENT(funobj);
13149 JS_LOCK_GC(rt);
13150 if (!rt->builtinFunctions[index]) /* retest now that the lock is held */
13151 rt->builtinFunctions[index] = funobj;
13152 else
13153 funobj = rt->builtinFunctions[index];
13154 JS_UNLOCK_GC(rt);
13157 return funobj;
13160 JS_REQUIRES_STACK JSRecordingStatus
13161 TraceRecorder::record_JSOP_CALLBUILTIN()
13163 JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc));
13164 if (!obj)
13165 ABORT_TRACE_ERROR("error in js_GetBuiltinFunction");
13167 stack(0, get(&stackval(-1)));
13168 stack(-1, INS_CONSTOBJ(obj));
13169 return JSRS_CONTINUE;
13172 JS_REQUIRES_STACK JSRecordingStatus
13173 TraceRecorder::record_JSOP_INT8()
13175 stack(0, lir->insImmf(GET_INT8(cx->fp->regs->pc)));
13176 return JSRS_CONTINUE;
13179 JS_REQUIRES_STACK JSRecordingStatus
13180 TraceRecorder::record_JSOP_INT32()
13182 stack(0, lir->insImmf(GET_INT32(cx->fp->regs->pc)));
13183 return JSRS_CONTINUE;
13186 JS_REQUIRES_STACK JSRecordingStatus
13187 TraceRecorder::record_JSOP_LENGTH()
13189 jsval& l = stackval(-1);
13190 if (JSVAL_IS_PRIMITIVE(l)) {
13191 if (!JSVAL_IS_STRING(l))
13192 ABORT_TRACE("non-string primitive JSOP_LENGTH unsupported");
13193 set(&l, lir->ins1(LIR_i2f, getStringLength(get(&l))));
13194 return JSRS_CONTINUE;
13197 JSObject* obj = JSVAL_TO_OBJECT(l);
13198 LIns* obj_ins = get(&l);
13200 if (STOBJ_GET_CLASS(obj) == &js_ArgumentsClass) {
13201 unsigned depth;
13202 JSStackFrame *afp = guardArguments(obj, obj_ins, &depth);
13203 if (!afp)
13204 ABORT_TRACE("can't reach arguments object's frame");
13206 LIns* v_ins = lir->ins1(LIR_i2f, INS_CONST(afp->argc));
13207 set(&l, v_ins);
13208 return JSRS_CONTINUE;
13211 LIns* v_ins;
13212 if (OBJ_IS_ARRAY(cx, obj)) {
13213 if (OBJ_IS_DENSE_ARRAY(cx, obj)) {
13214 if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT)) {
13215 JS_NOT_REACHED("OBJ_IS_DENSE_ARRAY but not?!?");
13216 return JSRS_STOP;
13218 } else {
13219 if (!guardClass(obj, obj_ins, &js_SlowArrayClass, snapshot(BRANCH_EXIT)))
13220 ABORT_TRACE("can't trace length property access on non-array");
13222 v_ins = lir->ins1(LIR_i2f, stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH));
13223 } else {
13224 if (!OBJ_IS_NATIVE(obj))
13225 ABORT_TRACE("can't trace length property access on non-array, non-native object");
13226 return getProp(obj, obj_ins);
13228 set(&l, v_ins);
13229 return JSRS_CONTINUE;
13232 JS_REQUIRES_STACK JSRecordingStatus
13233 TraceRecorder::record_JSOP_NEWARRAY()
13235 LIns *proto_ins;
13236 CHECK_STATUS(getClassPrototype(JSProto_Array, proto_ins));
13238 uint32 len = GET_UINT16(cx->fp->regs->pc);
13239 cx->fp->assertValidStackDepth(len);
13241 LIns* args[] = { lir->insImm(len), proto_ins, cx_ins };
13242 LIns* v_ins = lir->insCall(&js_NewUninitializedArray_ci, args);
13243 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
13245 LIns* dslots_ins = NULL;
13246 uint32 count = 0;
13247 for (uint32 i = 0; i < len; i++) {
13248 jsval& v = stackval(int(i) - int(len));
13249 if (v != JSVAL_HOLE)
13250 count++;
13251 LIns* elt_ins = box_jsval(v, get(&v));
13252 stobj_set_dslot(v_ins, i, dslots_ins, elt_ins);
13255 if (count > 0)
13256 stobj_set_fslot(v_ins, JSSLOT_ARRAY_COUNT, INS_CONST(count));
13258 stack(-int(len), v_ins);
13259 return JSRS_CONTINUE;
13262 JS_REQUIRES_STACK JSRecordingStatus
13263 TraceRecorder::record_JSOP_HOLE()
13265 stack(0, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_HOLE)));
13266 return JSRS_CONTINUE;
13269 JSRecordingStatus
13270 TraceRecorder::record_JSOP_LOOP()
13272 return JSRS_CONTINUE;
13275 static const uint32 sMaxConcatNSize = 32;
13278 * Copy the result of defvalue.string back into concatn's arguments, clean the
13279 * stack, and return a pointer to the argument that was just overwritten.
13281 JS_REQUIRES_STACK jsval *
13282 js_ConcatPostImacroStackCleanup(uint32 argc, JSFrameRegs &regs,
13283 TraceRecorder *recorder)
13285 JS_ASSERT(*regs.pc == JSOP_IMACOP);
13287 /* Pop the argument offset and imacro return value. */
13288 jsint offset = JSVAL_TO_INT(*--regs.sp);
13289 jsval *imacroResult = --regs.sp;
13291 /* Replace non-primitive argument with new primitive argument. */
13292 jsval *vp = regs.sp - offset;
13293 JS_ASSERT(regs.sp - argc <= vp && vp < regs.sp);
13294 if (recorder)
13295 recorder->set(vp, recorder->get(imacroResult));
13296 *vp = *imacroResult;
13298 return vp;
13302 * Initially, concatn takes N arguments on the stack, where N is immediate
13303 * operand. To convert these arguments to primitives, we must repeatedly call
13304 * the defvalue.string imacro. To achieve this iteration, defvalue.string ends
13305 * with imacop. Hence, this function is called multiple times, each time with
13306 * one less non-primitive. To keep track of where we are in the loop, we must
13307 * push an additional index value on the stack. Hence, on all subsequent
13308 * entries, the stack is organized as follows (bottom to top):
13310 * prim[1]
13311 * ...
13312 * prim[i-1]
13313 * nonprim[i] argument to imacro
13314 * arg[i+1]
13315 * ...
13316 * arg[N]
13317 * primarg[i] nonprim[i] converted to primitive
13320 * Hence, the stack setup on entry to this function (and JSOP_CONCATN in the
13321 * interpreter, on trace abort) is dependent on whether an imacro is in
13322 * progress. When all of concatn's arguments are primitive, it emits a builtin
13323 * call and allows the actual JSOP_CONCATN to be executed by the interpreter.
13325 JS_REQUIRES_STACK JSRecordingStatus
13326 TraceRecorder::record_JSOP_CONCATN()
13328 JSStackFrame *fp = cx->fp;
13329 JSFrameRegs &regs = *fp->regs;
13332 * If we are in an imacro, we must have just finished a call to
13333 * defvalue.string. Continue where we left off last time.
13335 uint32 argc;
13336 jsval *loopStart;
13337 if (fp->imacpc) {
13338 JS_ASSERT(*fp->imacpc == JSOP_CONCATN);
13339 argc = GET_ARGC(fp->imacpc);
13340 loopStart = js_ConcatPostImacroStackCleanup(argc, regs, this) + 1;
13341 } else {
13342 argc = GET_ARGC(regs.pc);
13343 JS_ASSERT(argc > 0);
13344 loopStart = regs.sp - argc;
13346 /* Prevent code/alloca explosion. */
13347 if (argc > sMaxConcatNSize)
13348 return JSRS_STOP;
13351 /* Convert non-primitives to primitives using defvalue.string. */
13352 for (jsval *vp = loopStart; vp != regs.sp; ++vp) {
13353 if (!JSVAL_IS_PRIMITIVE(*vp)) {
13355 * In addition to the jsval we want the imacro to convert to
13356 * primitive, pass through the offset of the argument on the stack.
13358 jsint offset = regs.sp - vp;
13360 /* Push the non-primitive to convert. */
13361 set(regs.sp, get(vp), true);
13362 *regs.sp++ = *vp;
13364 /* Push the argument index. */
13365 set(regs.sp, lir->insImm(offset), true);
13366 *regs.sp++ = INT_TO_JSVAL(offset);
13368 /* Nested imacro call OK because this is a tail call. */
13369 return call_imacro(defvalue_imacros.string);
13373 /* Build an array of the stringified primitives. */
13374 int32_t bufSize = argc * sizeof(JSString *);
13375 LIns *buf_ins = lir->insAlloc(bufSize);
13376 int32_t d = 0;
13377 for (jsval *vp = regs.sp - argc; vp != regs.sp; ++vp, d += sizeof(void *))
13378 lir->insStorei(stringify(*vp), buf_ins, d);
13380 /* Perform concatenation using a builtin. */
13381 LIns *args[] = { lir->insImm(argc), buf_ins, cx_ins };
13382 LIns *concat = lir->insCall(&js_ConcatN_ci, args);
13383 guard(false, lir->ins_eq0(concat), OOM_EXIT);
13385 /* Update tracker with result. */
13386 jsval *afterPop = regs.sp - (argc - 1);
13387 set(afterPop - 1, concat);
13389 return JSRS_CONTINUE;
13392 JS_REQUIRES_STACK JSRecordingStatus
13393 TraceRecorder::record_JSOP_SETMETHOD()
13395 return record_JSOP_SETPROP();
13398 JS_REQUIRES_STACK JSRecordingStatus
13399 TraceRecorder::record_JSOP_INITMETHOD()
13401 return record_JSOP_INITPROP();
13404 JS_REQUIRES_STACK JSRecordingStatus
13405 TraceRecorder::record_JSOP_SHARPINIT()
13407 return JSRS_STOP;
13410 #define DBG_STUB(OP) \
13411 JS_REQUIRES_STACK JSRecordingStatus \
13412 TraceRecorder::record_##OP() \
13414 ABORT_TRACE("can't trace " #OP); \
13417 DBG_STUB(JSOP_GETUPVAR_DBG)
13418 DBG_STUB(JSOP_CALLUPVAR_DBG)
13419 DBG_STUB(JSOP_DEFFUN_DBGFC)
13420 DBG_STUB(JSOP_DEFLOCALFUN_DBGFC)
13421 DBG_STUB(JSOP_LAMBDA_DBGFC)
13423 #ifdef JS_JIT_SPEW
13425 * Print information about entry typemaps and unstable exits for all peers
13426 * at a PC.
13428 void
13429 DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape,
13430 uint32 argc)
13432 VMFragment* f;
13433 TreeInfo* ti;
13434 bool looped = false;
13435 unsigned length = 0;
13437 for (f = getLoop(tm, ip, globalObj, globalShape, argc); f != NULL; f = f->peer) {
13438 if (!f->vmprivate)
13439 continue;
13440 debug_only_printf(LC_TMRecorder, "Stability of fragment %p:\nENTRY STACK=", (void*)f);
13441 ti = (TreeInfo*)f->vmprivate;
13442 if (looped)
13443 JS_ASSERT(ti->nStackTypes == length);
13444 for (unsigned i = 0; i < ti->nStackTypes; i++)
13445 debug_only_printf(LC_TMRecorder, "%c", typeChar[ti->stackTypeMap()[i]]);
13446 debug_only_print0(LC_TMRecorder, " GLOBALS=");
13447 for (unsigned i = 0; i < ti->nGlobalTypes(); i++)
13448 debug_only_printf(LC_TMRecorder, "%c", typeChar[ti->globalTypeMap()[i]]);
13449 debug_only_print0(LC_TMRecorder, "\n");
13450 UnstableExit* uexit = ti->unstableExits;
13451 while (uexit != NULL) {
13452 debug_only_print0(LC_TMRecorder, "EXIT ");
13453 JSTraceType* m = uexit->exit->fullTypeMap();
13454 debug_only_print0(LC_TMRecorder, "STACK=");
13455 for (unsigned i = 0; i < uexit->exit->numStackSlots; i++)
13456 debug_only_printf(LC_TMRecorder, "%c", typeChar[m[i]]);
13457 debug_only_print0(LC_TMRecorder, " GLOBALS=");
13458 for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) {
13459 debug_only_printf(LC_TMRecorder, "%c",
13460 typeChar[m[uexit->exit->numStackSlots + i]]);
13462 debug_only_print0(LC_TMRecorder, "\n");
13463 uexit = uexit->next;
13465 length = ti->nStackTypes;
13466 looped = true;
13469 #endif
13471 #ifdef MOZ_TRACEVIS
13473 FILE* traceVisLogFile = NULL;
13474 JSHashTable *traceVisScriptTable = NULL;
13476 JS_FRIEND_API(bool)
13477 JS_StartTraceVis(const char* filename = "tracevis.dat")
13479 if (traceVisLogFile) {
13480 // If we're currently recording, first we must stop.
13481 JS_StopTraceVis();
13484 traceVisLogFile = fopen(filename, "wb");
13485 if (!traceVisLogFile)
13486 return false;
13488 return true;
13491 JS_FRIEND_API(JSBool)
13492 js_StartTraceVis(JSContext *cx, JSObject *obj,
13493 uintN argc, jsval *argv, jsval *rval)
13495 JSBool ok;
13497 if (argc > 0 && JSVAL_IS_STRING(argv[0])) {
13498 JSString *str = JSVAL_TO_STRING(argv[0]);
13499 char *filename = js_DeflateString(cx, str->chars(), str->length());
13500 if (!filename)
13501 goto error;
13502 ok = JS_StartTraceVis(filename);
13503 cx->free(filename);
13504 } else {
13505 ok = JS_StartTraceVis();
13508 if (ok) {
13509 fprintf(stderr, "started TraceVis recording\n");
13510 return JS_TRUE;
13513 error:
13514 JS_ReportError(cx, "failed to start TraceVis recording");
13515 return JS_FALSE;
13518 JS_FRIEND_API(bool)
13519 JS_StopTraceVis()
13521 if (!traceVisLogFile)
13522 return false;
13524 fclose(traceVisLogFile); // not worth checking the result
13525 traceVisLogFile = NULL;
13527 return true;
13530 JS_FRIEND_API(JSBool)
13531 js_StopTraceVis(JSContext *cx, JSObject *obj,
13532 uintN argc, jsval *argv, jsval *rval)
13534 JSBool ok = JS_StopTraceVis();
13536 if (ok)
13537 fprintf(stderr, "stopped TraceVis recording\n");
13538 else
13539 JS_ReportError(cx, "TraceVis isn't running");
13541 return ok;
13544 #endif /* MOZ_TRACEVIS */
13546 #define UNUSED(n) \
13547 JS_REQUIRES_STACK JSRecordingStatus \
13548 TraceRecorder::record_JSOP_UNUSED##n() { \
13549 JS_NOT_REACHED("JSOP_UNUSED" #n); \
13550 return JSRS_STOP; \