Merge mozilla-central and tracemonkey. (a=blockers)
[mozilla-central.git] / js / src / jstracer.cpp
blob932c89463498c675d4f82ea368faea07c07bcb39
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
23 * Contributor(s):
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
42 #include "jsstdint.h"
43 #include "jsbit.h" // low-level (NSPR-based) headers next
44 #include "jsprf.h"
45 #include <math.h> // standard headers next
47 #if defined(_MSC_VER) || defined(__MINGW32__)
48 #include <malloc.h>
49 #ifdef _MSC_VER
50 #define alloca _alloca
51 #endif
52 #endif
53 #ifdef SOLARIS
54 #include <alloca.h>
55 #endif
56 #include <limits.h>
58 #include "nanojit/nanojit.h"
59 #include "jsapi.h" // higher-level library and API headers
60 #include "jsarray.h"
61 #include "jsbool.h"
62 #include "jscntxt.h"
63 #include "jscompartment.h"
64 #include "jsdate.h"
65 #include "jsdbgapi.h"
66 #include "jsemit.h"
67 #include "jsfun.h"
68 #include "jsinterp.h"
69 #include "jsiter.h"
70 #include "jsmath.h"
71 #include "jsobj.h"
72 #include "jsopcode.h"
73 #include "jsregexp.h"
74 #include "jsscope.h"
75 #include "jsscript.h"
76 #include "jsstaticcheck.h"
77 #include "jstl.h"
78 #include "jstracer.h"
79 #include "jsxml.h"
80 #include "jstypedarray.h"
82 #include "jsatominlines.h"
83 #include "jscntxtinlines.h"
84 #include "jsfuninlines.h"
85 #include "jsinterpinlines.h"
86 #include "jspropertycacheinlines.h"
87 #include "jsobjinlines.h"
88 #include "jsscopeinlines.h"
89 #include "jsscriptinlines.h"
90 #include "jscntxtinlines.h"
91 #include "jsopcodeinlines.h"
93 #ifdef JS_METHODJIT
94 #include "methodjit/MethodJIT.h"
95 #endif
97 #include "jsautooplen.h" // generated headers last
98 #include "imacros.c.out"
100 #if defined(NANOJIT_ARM) && defined(__GNUC__) && defined(AVMPLUS_LINUX)
101 #include <stdlib.h>
102 #include <unistd.h>
103 #include <sys/types.h>
104 #include <sys/stat.h>
105 #include <sys/mman.h>
106 #include <fcntl.h>
107 #include <string.h>
108 #include <elf.h>
109 #endif
111 #ifdef DEBUG
112 namespace js {
113 static const char*
114 getExitName(ExitType type)
116 static const char* exitNames[] =
118 #define MAKE_EXIT_STRING(x) #x,
119 JS_TM_EXITCODES(MAKE_EXIT_STRING)
120 #undef MAKE_EXIT_STRING
121 NULL
124 JS_ASSERT(type < TOTAL_EXIT_TYPES);
126 return exitNames[type];
129 #endif /* DEBUG */
131 namespace nanojit {
132 using namespace js;
133 using namespace js::gc;
134 using namespace js::tjit;
137 * This macro is just like JS_NOT_REACHED but it exists in non-debug builds
138 * too. Its presence indicates shortcomings in jstracer's handling of some
139 * OOM situations:
140 * - OOM failures in constructors, which lack a return value to pass back a
141 * failure code (though it can and should be done indirectly).
142 * - OOM failures in the "infallible" allocators used for Nanojit.
144 * FIXME: bug 624590 is open to fix these problems.
146 #define OUT_OF_MEMORY_ABORT(msg) JS_Assert(msg, __FILE__, __LINE__);
148 /* Implement embedder-specific nanojit members. */
151 * Nanojit requires infallible allocations most of the time. We satisfy this
152 * by reserving some space in each allocator which is used as a fallback if
153 * js_calloc() fails. Ideallly this reserve space should be big enough to
154 * allow for all infallible requests made to the allocator until the next OOM
155 * check occurs, but it turns out that's impossible to guarantee (though it
156 * should be unlikely). So we abort if the reserve runs out; this is better
157 * than allowing memory errors to occur.
159 * The space calculations are as follows... between OOM checks, each
160 * VMAllocator can do (ie. has been seen to do) the following maximum
161 * allocations on 64-bits:
163 * - dataAlloc: 31 minimum-sized chunks (MIN_CHUNK_SZB) in assm->compile()
164 * (though arbitrarily more could occur due to LabelStateMap additions done
165 * when handling labels): 62,248 bytes. This one is the most likely to
166 * overflow.
168 * - traceAlloc: 1 minimum-sized chunk: 2,008 bytes.
170 * - tempAlloc: 1 LIR code chunk (CHUNK_SZB) and 5 minimum-sized chunks for
171 * sundry small allocations: 18,048 bytes.
173 * The reserve sizes are chosen by exceeding this by a reasonable amount.
174 * Reserves for 32-bits are slightly more than half, because most of the
175 * allocated space is used to hold pointers.
177 * FIXME: Bug 624590 is open to get rid of all this.
179 static const size_t DataReserveSize = 12500 * sizeof(uintptr_t);
180 static const size_t TraceReserveSize = 5000 * sizeof(uintptr_t);
181 static const size_t TempReserveSize = 1000 * sizeof(uintptr_t);
183 void*
184 nanojit::Allocator::allocChunk(size_t nbytes, bool fallible)
186 VMAllocator *vma = (VMAllocator*)this;
188 * Nb: it's conceivable that request 1 might fail (in which case
189 * mOutOfMemory will be set) and then request 2 succeeds. The subsequent
190 * OOM check will still fail, which is what we want, and the success of
191 * request 2 makes it less likely that the reserve space will overflow.
193 void *p = js_calloc(nbytes);
194 if (p) {
195 vma->mSize += nbytes;
196 } else {
197 vma->mOutOfMemory = true;
198 if (!fallible) {
199 p = (void *)vma->mReserveCurr;
200 vma->mReserveCurr += nbytes;
201 if (vma->mReserveCurr > vma->mReserveLimit)
202 OUT_OF_MEMORY_ABORT("nanojit::Allocator::allocChunk: out of memory");
203 memset(p, 0, nbytes);
204 vma->mSize += nbytes;
207 return p;
210 void
211 nanojit::Allocator::freeChunk(void *p) {
212 VMAllocator *vma = (VMAllocator*)this;
213 if (p < vma->mReserve || uintptr_t(p) >= vma->mReserveLimit)
214 js_free(p);
217 void
218 nanojit::Allocator::postReset() {
219 VMAllocator *vma = (VMAllocator*)this;
220 vma->mOutOfMemory = false;
221 vma->mSize = 0;
222 vma->mReserveCurr = uintptr_t(vma->mReserve);
226 StackFilter::getTop(LIns* guard)
228 VMSideExit* e = (VMSideExit*)guard->record()->exit;
229 return e->sp_adj;
232 #if defined NJ_VERBOSE
233 static void
234 formatGuardExit(InsBuf *buf, LIns *ins)
236 VMSideExit *x = (VMSideExit *)ins->record()->exit;
237 RefBuf b1;
238 if (LogController.lcbits & LC_FragProfile)
239 VMPI_snprintf(b1.buf, b1.len, " (GuardID=%03d)", ins->record()->profGuardID);
240 else
241 b1.buf[0] = '\0';
242 VMPI_snprintf(buf->buf, buf->len,
243 " -> exit=%p pc=%p imacpc=%p sp%+ld rp%+ld %s%s",
244 (void *)x,
245 (void *)x->pc,
246 (void *)x->imacpc,
247 (long int)x->sp_adj,
248 (long int)x->rp_adj,
249 getExitName(x->exitType),
250 b1.buf);
253 void
254 LInsPrinter::formatGuard(InsBuf *buf, LIns *ins)
256 RefBuf b1, b2;
257 InsBuf b3;
258 formatGuardExit(&b3, ins);
259 VMPI_snprintf(buf->buf, buf->len,
260 "%s: %s %s%s",
261 formatRef(&b1, ins),
262 lirNames[ins->opcode()],
263 ins->oprnd1() ? formatRef(&b2, ins->oprnd1()) : "",
264 b3.buf);
267 void
268 LInsPrinter::formatGuardXov(InsBuf *buf, LIns *ins)
270 RefBuf b1, b2, b3;
271 InsBuf b4;
272 formatGuardExit(&b4, ins);
273 VMPI_snprintf(buf->buf, buf->len,
274 "%s = %s %s, %s%s",
275 formatRef(&b1, ins),
276 lirNames[ins->opcode()],
277 formatRef(&b2, ins->oprnd1()),
278 formatRef(&b3, ins->oprnd2()),
279 b4.buf);
282 const char*
283 nanojit::LInsPrinter::accNames[] = {
284 "state", // (1 << 0) == ACCSET_STATE
285 "sp", // (1 << 1) == ACCSET_STACK
286 "rp", // (1 << 2) == ACCSET_RSTACK
287 "cx", // (1 << 3) == ACCSET_CX
288 "tm", // (1 << 4) == ACCSET_TM
289 "eos", // (1 << 5) == ACCSET_EOS
290 "alloc", // (1 << 6) == ACCSET_ALLOC
291 "regs", // (1 << 7) == ACCSET_FRAMEREGS
292 "sf", // (1 << 8) == ACCSET_STACKFRAME
293 "rt", // (1 << 9) == ACCSET_RUNTIME
295 "objclasp", // (1 << 10) == ACCSET_OBJ_CLASP
296 "objflags", // (1 << 11) == ACCSET_OBJ_FLAGS
297 "objshape", // (1 << 12) == ACCSET_OBJ_SHAPE
298 "objproto", // (1 << 13) == ACCSET_OBJ_PROTO
299 "objparent", // (1 << 14) == ACCSET_OBJ_PARENT
300 "objprivate", // (1 << 15) == ACCSET_OBJ_PRIVATE
301 "objcapacity", // (1 << 16) == ACCSET_OBJ_CAPACITY
302 "objslots", // (1 << 17) == ACCSET_OBJ_SLOTS
304 "slots", // (1 << 18) == ACCSET_SLOTS
305 "tarray", // (1 << 19) == ACCSET_TARRAY
306 "tdata", // (1 << 20) == ACCSET_TARRAY_DATA
307 "iter", // (1 << 21) == ACCSET_ITER
308 "iterprops", // (1 << 22) == ACCSET_ITER_PROPS
309 "str", // (1 << 23) == ACCSET_STRING
310 "strmchars", // (1 << 24) == ACCSET_STRING_MCHARS
311 "typemap", // (1 << 25) == ACCSET_TYPEMAP
312 "fcslots", // (1 << 26) == ACCSET_FCSLOTS
313 "argsdata", // (1 << 27) == ACCSET_ARGS_DATA
315 "?!" // this entry should never be used, have it just in case
318 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(nanojit::LInsPrinter::accNames) == TM_NUM_USED_ACCS + 1);
319 #endif
321 } /* namespace nanojit */
323 JS_DEFINE_CALLINFO_2(extern, STRING, js_IntToString, CONTEXT, INT32, 1, nanojit::ACCSET_NONE)
325 namespace js {
327 using namespace nanojit;
329 #if JS_HAS_XML_SUPPORT
330 #define RETURN_VALUE_IF_XML(val, ret) \
331 JS_BEGIN_MACRO \
332 if (!val.isPrimitive() && val.toObject().isXML()) \
333 RETURN_VALUE("xml detected", ret); \
334 JS_END_MACRO
335 #else
336 #define RETURN_IF_XML(val, ret) ((void) 0)
337 #endif
339 #define RETURN_IF_XML_A(val) RETURN_VALUE_IF_XML(val, ARECORD_STOP)
340 #define RETURN_IF_XML(val) RETURN_VALUE_IF_XML(val, RECORD_STOP)
342 JS_STATIC_ASSERT(sizeof(JSValueType) == 1);
343 JS_STATIC_ASSERT(offsetof(TraceNativeStorage, stack_global_buf) % 16 == 0);
345 /* Map to translate a type tag into a printable representation. */
346 #ifdef DEBUG
347 static char
348 TypeToChar(JSValueType type)
350 switch (type) {
351 case JSVAL_TYPE_DOUBLE: return 'D';
352 case JSVAL_TYPE_INT32: return 'I';
353 case JSVAL_TYPE_STRING: return 'S';
354 case JSVAL_TYPE_OBJECT: return '!';
355 case JSVAL_TYPE_BOOLEAN: return 'B';
356 case JSVAL_TYPE_NULL: return 'N';
357 case JSVAL_TYPE_UNDEFINED: return 'U';
358 case JSVAL_TYPE_MAGIC: return 'M';
359 case JSVAL_TYPE_FUNOBJ: return 'F';
360 case JSVAL_TYPE_NONFUNOBJ: return 'O';
361 case JSVAL_TYPE_BOXED: return '#';
362 case JSVAL_TYPE_STRORNULL: return 's';
363 case JSVAL_TYPE_OBJORNULL: return 'o';
365 return '?';
368 static char
369 ValueToTypeChar(const Value &v)
371 if (v.isInt32()) return 'I';
372 if (v.isDouble()) return 'D';
373 if (v.isString()) return 'S';
374 if (v.isObject()) return v.toObject().isFunction() ? 'F' : 'O';
375 if (v.isBoolean()) return 'B';
376 if (v.isNull()) return 'N';
377 if (v.isUndefined()) return 'U';
378 if (v.isMagic()) return 'M';
379 return '?';
381 #endif
384 /* Blacklist parameters. */
387 * Number of iterations of a loop where we start tracing. That is, we don't
388 * start tracing until the beginning of the HOTLOOP-th iteration.
390 #define HOTLOOP 8
392 /* Attempt recording this many times before blacklisting permanently. */
393 #define BL_ATTEMPTS 2
395 /* Skip this many hits before attempting recording again, after an aborted attempt. */
396 #define BL_BACKOFF 32
399 * If, after running a trace CHECK_LOOP_ITERS times, it hasn't done MIN_LOOP_ITERS
400 * iterations, we blacklist it.
402 #define MIN_LOOP_ITERS 200
403 #define LOOP_CHECK_ITERS 10
405 #ifdef DEBUG
406 #define LOOP_COUNT_MAX 100000000
407 #else
408 #define LOOP_COUNT_MAX MIN_LOOP_ITERS
409 #endif
411 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
412 #define HOTEXIT 1
414 /* Number of times we try to extend the tree along a side exit. */
415 #define MAXEXIT 3
417 /* Maximum number of peer trees allowed. */
418 #define MAXPEERS 9
420 /* Max call depths for inlining. */
421 #define MAX_CALLDEPTH 10
423 /* Max number of slots in a table-switch. */
424 #define MAX_TABLE_SWITCH 256
426 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
427 #define MAX_INTERP_STACK_BYTES \
428 (MAX_NATIVE_STACK_SLOTS * sizeof(Value) + \
429 MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) + \
430 sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
432 /* Max number of branches per tree. */
433 #define MAX_BRANCHES 32
435 #define CHECK_STATUS(expr) \
436 JS_BEGIN_MACRO \
437 RecordingStatus _status = (expr); \
438 if (_status != RECORD_CONTINUE) \
439 return _status; \
440 JS_END_MACRO
442 #define CHECK_STATUS_A(expr) \
443 JS_BEGIN_MACRO \
444 AbortableRecordingStatus _status = InjectStatus((expr)); \
445 if (_status != ARECORD_CONTINUE) \
446 return _status; \
447 JS_END_MACRO
449 #ifdef JS_JIT_SPEW
450 #define RETURN_VALUE(msg, value) \
451 JS_BEGIN_MACRO \
452 debug_only_printf(LC_TMAbort, "trace stopped: %d: %s\n", __LINE__, (msg)); \
453 return (value); \
454 JS_END_MACRO
455 #else
456 #define RETURN_VALUE(msg, value) return (value)
457 #endif
459 #define RETURN_STOP(msg) RETURN_VALUE(msg, RECORD_STOP)
460 #define RETURN_STOP_A(msg) RETURN_VALUE(msg, ARECORD_STOP)
461 #define RETURN_ERROR(msg) RETURN_VALUE(msg, RECORD_ERROR)
462 #define RETURN_ERROR_A(msg) RETURN_VALUE(msg, ARECORD_ERROR)
464 #ifdef JS_JIT_SPEW
465 struct __jitstats {
466 #define JITSTAT(x) uint64 x;
467 #include "jitstats.tbl"
468 #undef JITSTAT
469 } jitstats = { 0LL, };
471 JS_STATIC_ASSERT(sizeof(jitstats) % sizeof(uint64) == 0);
473 enum jitstat_ids {
474 #define JITSTAT(x) STAT ## x ## ID,
475 #include "jitstats.tbl"
476 #undef JITSTAT
477 STAT_IDS_TOTAL
480 static JSBool
481 jitstats_getOnTrace(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
483 *vp = BOOLEAN_TO_JSVAL(JS_ON_TRACE(cx));
484 return true;
487 static JSPropertySpec jitstats_props[] = {
488 #define JITSTAT(x) { #x, STAT ## x ## ID, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT },
489 #include "jitstats.tbl"
490 #undef JITSTAT
491 { "onTrace", 0, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT, jitstats_getOnTrace, NULL },
492 { 0 }
495 static JSBool
496 jitstats_getProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
498 int index = -1;
500 if (JSID_IS_STRING(id)) {
501 JSAtom* str = JSID_TO_ATOM(id);
502 if (StringEqualsAscii(str, "HOTLOOP")) {
503 *vp = INT_TO_JSVAL(HOTLOOP);
504 return JS_TRUE;
507 #ifdef JS_METHODJIT
508 if (StringEqualsAscii(str, "profiler")) {
509 *vp = BOOLEAN_TO_JSVAL(cx->profilingEnabled);
510 return JS_TRUE;
512 #endif
515 if (JSID_IS_INT(id))
516 index = JSID_TO_INT(id);
518 uint64 result = 0;
519 switch (index) {
520 #define JITSTAT(x) case STAT ## x ## ID: result = jitstats.x; break;
521 #include "jitstats.tbl"
522 #undef JITSTAT
523 default:
524 *vp = JSVAL_VOID;
525 return JS_TRUE;
528 if (result < JSVAL_INT_MAX) {
529 *vp = INT_TO_JSVAL(jsint(result));
530 return JS_TRUE;
532 char retstr[64];
533 JS_snprintf(retstr, sizeof retstr, "%llu", result);
534 *vp = STRING_TO_JSVAL(JS_NewStringCopyZ(cx, retstr));
535 return JS_TRUE;
538 JSClass jitstats_class = {
539 "jitstats",
541 JS_PropertyStub, JS_PropertyStub,
542 jitstats_getProperty, JS_StrictPropertyStub,
543 JS_EnumerateStub, JS_ResolveStub,
544 JS_ConvertStub, NULL,
545 JSCLASS_NO_OPTIONAL_MEMBERS
548 void
549 InitJITStatsClass(JSContext *cx, JSObject *glob)
551 JS_InitClass(cx, glob, NULL, &jitstats_class, NULL, 0, jitstats_props, NULL, NULL, NULL);
554 #define AUDIT(x) (jitstats.x++)
555 #else
556 #define AUDIT(x) ((void)0)
557 #endif /* JS_JIT_SPEW */
559 static avmplus::AvmCore s_core = avmplus::AvmCore();
560 static avmplus::AvmCore* core = &s_core;
562 #ifdef JS_JIT_SPEW
563 static void
564 DumpPeerStability(TraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape, uint32 argc);
565 #endif
568 * We really need a better way to configure the JIT. Shaver, where is
569 * my fancy JIT object?
571 * NB: this is raced on, if jstracer.cpp should ever be running MT.
572 * I think it's harmless tho.
574 static bool did_we_check_processor_features = false;
576 /* ------ Debug logging control ------ */
579 * All the logging control stuff lives in here. It is shared between
580 * all threads, but I think that's OK.
582 LogControl LogController;
584 #ifdef JS_JIT_SPEW
587 * NB: this is raced on too, if jstracer.cpp should ever be running MT.
588 * Also harmless.
590 static bool did_we_set_up_debug_logging = false;
592 static void
593 InitJITLogController()
595 char *tm, *tmf;
596 uint32_t bits;
598 LogController.lcbits = 0;
600 tm = getenv("TRACEMONKEY");
601 if (tm) {
602 fflush(NULL);
603 printf(
604 "The environment variable $TRACEMONKEY has been replaced by $TMFLAGS.\n"
605 "Try 'TMFLAGS=help js -j' for a list of options.\n"
607 exit(0);
610 tmf = getenv("TMFLAGS");
611 if (!tmf) return;
613 /* Using strstr() is really a cheap hack as far as flag decoding goes. */
614 if (strstr(tmf, "help")) {
615 fflush(NULL);
616 printf(
617 "usage: TMFLAGS=option,option,option,... where options can be:\n"
618 "\n"
619 " help show this message\n"
620 " ------ options for jstracer & jsregexp ------\n"
621 " minimal ultra-minimalist output; try this first\n"
622 " full everything except 'treevis' and 'fragprofile'\n"
623 " tracer tracer lifetime (FIXME:better description)\n"
624 " recorder trace recording stuff (FIXME:better description)\n"
625 " abort show trace recording aborts\n"
626 " stats show trace recording stats\n"
627 " regexp show compilation & entry for regexps\n"
628 " profiler show loop profiles as they are profiled\n"
629 " treevis spew that tracevis/tree.py can parse\n"
630 " ------ options for Nanojit ------\n"
631 " fragprofile count entries and exits for each fragment\n"
632 " liveness show LIR liveness at start of reader pipeline\n"
633 " readlir show LIR as it enters the reader pipeline\n"
634 " aftersf show LIR after StackFilter\n"
635 " afterdce show LIR after dead code elimination\n"
636 " native show native code (interleaved with 'afterdce')\n"
637 " nativebytes show native code bytes in 'native' output\n"
638 " regalloc show regalloc state in 'native' output\n"
639 " activation show activation state in 'native' output\n"
640 "\n"
642 exit(0);
643 /*NOTREACHED*/
646 bits = 0;
648 /* flags for jstracer.cpp */
649 if (strstr(tmf, "minimal") || strstr(tmf, "full")) bits |= LC_TMMinimal;
650 if (strstr(tmf, "tracer") || strstr(tmf, "full")) bits |= LC_TMTracer;
651 if (strstr(tmf, "recorder") || strstr(tmf, "full")) bits |= LC_TMRecorder;
652 if (strstr(tmf, "abort") || strstr(tmf, "full")) bits |= LC_TMAbort;
653 if (strstr(tmf, "stats") || strstr(tmf, "full")) bits |= LC_TMStats;
654 if (strstr(tmf, "profiler") || strstr(tmf, "full")) bits |= LC_TMProfiler;
655 if (strstr(tmf, "treevis")) bits |= LC_TMTreeVis;
657 /* flags for nanojit */
658 if (strstr(tmf, "fragprofile")) bits |= LC_FragProfile;
659 if (strstr(tmf, "liveness") || strstr(tmf, "full")) bits |= LC_Liveness;
660 if (strstr(tmf, "readlir") || strstr(tmf, "full")) bits |= LC_ReadLIR;
661 if (strstr(tmf, "aftersf") || strstr(tmf, "full")) bits |= LC_AfterSF;
662 if (strstr(tmf, "afterdce") || strstr(tmf, "full")) bits |= LC_AfterDCE;
663 if (strstr(tmf, "native") || strstr(tmf, "full")) bits |= LC_Native;
664 if (strstr(tmf, "nativebytes")|| strstr(tmf, "full")) bits |= LC_Bytes;
665 if (strstr(tmf, "regalloc") || strstr(tmf, "full")) bits |= LC_RegAlloc;
666 if (strstr(tmf, "activation") || strstr(tmf, "full")) bits |= LC_Activation;
668 LogController.lcbits = bits;
669 return;
672 #endif
674 /* ------------------ Frag-level profiling support ------------------ */
676 #ifdef JS_JIT_SPEW
679 * All the allocations done by this profile data-collection and
680 * display machinery, are done in TraceMonitor::profAlloc. That is
681 * emptied out at the end of FinishJIT. It has a lifetime from
682 * InitJIT to FinishJIT, which exactly matches the span
683 * js_FragProfiling_init to js_FragProfiling_showResults.
685 template<class T>
686 static
687 Seq<T>* reverseInPlace(Seq<T>* seq)
689 Seq<T>* prev = NULL;
690 Seq<T>* curr = seq;
691 while (curr) {
692 Seq<T>* next = curr->tail;
693 curr->tail = prev;
694 prev = curr;
695 curr = next;
697 return prev;
700 // The number of top blocks to show in the profile
701 #define N_TOP_BLOCKS 50
703 // Contains profile info for a single guard
704 struct GuardPI {
705 uint32_t guardID; // identifying number
706 uint32_t count; // count.
709 struct FragPI {
710 uint32_t count; // entry count for this Fragment
711 uint32_t nStaticExits; // statically: the number of exits
712 size_t nCodeBytes; // statically: the number of insn bytes in the main fragment
713 size_t nExitBytes; // statically: the number of insn bytes in the exit paths
714 Seq<GuardPI>* guards; // guards, each with its own count
715 uint32_t largestGuardID; // that exists in .guards
718 void
719 FragProfiling_FragFinalizer(Fragment* f, TraceMonitor* tm)
721 // Recover profiling data from 'f', which is logically at the end
722 // of its useful lifetime.
723 if (!(LogController.lcbits & LC_FragProfile))
724 return;
726 NanoAssert(f);
727 // Valid profFragIDs start at 1
728 NanoAssert(f->profFragID >= 1);
729 // Should be called exactly once per Fragment. This will assert if
730 // you issue the same FragID to more than one Fragment.
731 NanoAssert(!tm->profTab->containsKey(f->profFragID));
733 FragPI pi = { f->profCount,
734 f->nStaticExits,
735 f->nCodeBytes,
736 f->nExitBytes,
737 NULL, 0 };
739 // Begin sanity check on the guards
740 SeqBuilder<GuardPI> guardsBuilder(*tm->profAlloc);
741 GuardRecord* gr;
742 uint32_t nGs = 0;
743 uint32_t sumOfDynExits = 0;
744 for (gr = f->guardsForFrag; gr; gr = gr->nextInFrag) {
745 nGs++;
746 // Also copy the data into our auxiliary structure.
747 // f->guardsForFrag is in reverse order, and so this
748 // copy preserves that ordering (->add adds at end).
749 // Valid profGuardIDs start at 1.
750 NanoAssert(gr->profGuardID > 0);
751 sumOfDynExits += gr->profCount;
752 GuardPI gpi = { gr->profGuardID, gr->profCount };
753 guardsBuilder.add(gpi);
754 if (gr->profGuardID > pi.largestGuardID)
755 pi.largestGuardID = gr->profGuardID;
757 pi.guards = guardsBuilder.get();
758 // And put the guard list in forwards order
759 pi.guards = reverseInPlace(pi.guards);
761 // Why is this so? Because nGs is the number of guards
762 // at the time the LIR was generated, whereas f->nStaticExits
763 // is the number of them observed by the time it makes it
764 // through to the assembler. It can be the case that LIR
765 // optimisation removes redundant guards; hence we expect
766 // nGs to always be the same or higher.
767 NanoAssert(nGs >= f->nStaticExits);
769 // Also we can assert that the sum of the exit counts
770 // can't exceed the entry count. It'd be nice to assert that
771 // they are exactly equal, but we can't because we don't know
772 // how many times we got to the end of the trace.
773 NanoAssert(f->profCount >= sumOfDynExits);
775 // End sanity check on guards
777 tm->profTab->put(f->profFragID, pi);
780 static void
781 FragProfiling_showResults(TraceMonitor* tm)
783 uint32_t topFragID[N_TOP_BLOCKS];
784 FragPI topPI[N_TOP_BLOCKS];
785 uint64_t totCount = 0, cumulCount;
786 uint32_t totSE = 0;
787 size_t totCodeB = 0, totExitB = 0;
788 PodArrayZero(topFragID);
789 PodArrayZero(topPI);
790 FragStatsMap::Iter iter(*tm->profTab);
791 while (iter.next()) {
792 uint32_t fragID = iter.key();
793 FragPI pi = iter.value();
794 uint32_t count = pi.count;
795 totCount += (uint64_t)count;
796 /* Find the rank for this entry, in tops */
797 int r = N_TOP_BLOCKS-1;
798 while (true) {
799 if (r == -1)
800 break;
801 if (topFragID[r] == 0) {
802 r--;
803 continue;
805 if (count > topPI[r].count) {
806 r--;
807 continue;
809 break;
811 r++;
812 NanoAssert(r >= 0 && r <= N_TOP_BLOCKS);
813 /* This entry should be placed at topPI[r], and entries
814 at higher numbered slots moved up one. */
815 if (r < N_TOP_BLOCKS) {
816 for (int s = N_TOP_BLOCKS-1; s > r; s--) {
817 topFragID[s] = topFragID[s-1];
818 topPI[s] = topPI[s-1];
820 topFragID[r] = fragID;
821 topPI[r] = pi;
825 LogController.printf(
826 "\n----------------- Per-fragment execution counts ------------------\n");
827 LogController.printf(
828 "\nTotal count = %llu\n\n", (unsigned long long int)totCount);
830 LogController.printf(
831 " Entry counts Entry counts ----- Static -----\n");
832 LogController.printf(
833 " ------Self------ ----Cumulative--- Exits Cbytes Xbytes FragID\n");
834 LogController.printf("\n");
836 if (totCount == 0)
837 totCount = 1; /* avoid division by zero */
838 cumulCount = 0;
839 int r;
840 for (r = 0; r < N_TOP_BLOCKS; r++) {
841 if (topFragID[r] == 0)
842 break;
843 cumulCount += (uint64_t)topPI[r].count;
844 LogController.printf("%3d: %5.2f%% %9u %6.2f%% %9llu"
845 " %3d %5u %5u %06u\n",
847 (double)topPI[r].count * 100.0 / (double)totCount,
848 topPI[r].count,
849 (double)cumulCount * 100.0 / (double)totCount,
850 (unsigned long long int)cumulCount,
851 topPI[r].nStaticExits,
852 (unsigned int)topPI[r].nCodeBytes,
853 (unsigned int)topPI[r].nExitBytes,
854 topFragID[r]);
855 totSE += (uint32_t)topPI[r].nStaticExits;
856 totCodeB += topPI[r].nCodeBytes;
857 totExitB += topPI[r].nExitBytes;
859 LogController.printf("\nTotal displayed code bytes = %u, "
860 "exit bytes = %u\n"
861 "Total displayed static exits = %d\n\n",
862 (unsigned int)totCodeB, (unsigned int)totExitB, totSE);
864 LogController.printf("Analysis by exit counts\n\n");
866 for (r = 0; r < N_TOP_BLOCKS; r++) {
867 if (topFragID[r] == 0)
868 break;
869 LogController.printf("FragID=%06u, total count %u:\n", topFragID[r],
870 topPI[r].count);
871 uint32_t madeItToEnd = topPI[r].count;
872 uint32_t totThisFrag = topPI[r].count;
873 if (totThisFrag == 0)
874 totThisFrag = 1;
875 GuardPI gpi;
876 // visit the guards, in forward order
877 for (Seq<GuardPI>* guards = topPI[r].guards; guards; guards = guards->tail) {
878 gpi = (*guards).head;
879 if (gpi.count == 0)
880 continue;
881 madeItToEnd -= gpi.count;
882 LogController.printf(" GuardID=%03u %7u (%5.2f%%)\n",
883 gpi.guardID, gpi.count,
884 100.0 * (double)gpi.count / (double)totThisFrag);
886 LogController.printf(" Looped (%03u) %7u (%5.2f%%)\n",
887 topPI[r].largestGuardID+1,
888 madeItToEnd,
889 100.0 * (double)madeItToEnd / (double)totThisFrag);
890 NanoAssert(madeItToEnd <= topPI[r].count); // else unsigned underflow
891 LogController.printf("\n");
894 tm->profTab = NULL;
897 #endif
899 /* ----------------------------------------------------------------- */
901 #ifdef DEBUG
902 static JSBool FASTCALL
903 PrintOnTrace(char* format, uint32 argc, double *argv)
905 union {
906 struct {
907 uint32 lo;
908 uint32 hi;
909 } i;
910 double d;
911 char *cstr;
912 JSObject *o;
913 JSString *s;
914 } u;
916 #define GET_ARG() JS_BEGIN_MACRO \
917 if (argi >= argc) { \
918 fprintf(out, "[too few args for format]"); \
919 break; \
921 u.d = argv[argi++]; \
922 JS_END_MACRO
924 FILE *out = stderr;
926 uint32 argi = 0;
927 for (char *p = format; *p; ++p) {
928 if (*p != '%') {
929 putc(*p, out);
930 continue;
932 char ch = *++p;
933 if (!ch) {
934 fprintf(out, "[trailing %%]");
935 continue;
938 switch (ch) {
939 case 'a':
940 GET_ARG();
941 fprintf(out, "[%u:%u 0x%x:0x%x %f]", u.i.lo, u.i.hi, u.i.lo, u.i.hi, u.d);
942 break;
943 case 'd':
944 GET_ARG();
945 fprintf(out, "%d", u.i.lo);
946 break;
947 case 'u':
948 GET_ARG();
949 fprintf(out, "%u", u.i.lo);
950 break;
951 case 'x':
952 GET_ARG();
953 fprintf(out, "%x", u.i.lo);
954 break;
955 case 'f':
956 GET_ARG();
957 fprintf(out, "%f", u.d);
958 break;
959 case 'o':
960 GET_ARG();
961 js_DumpObject(u.o);
962 break;
963 case 's':
964 GET_ARG();
966 size_t length = u.s->length();
967 // protect against massive spew if u.s is a bad pointer.
968 if (length > 1 << 16)
969 length = 1 << 16;
970 if (u.s->isRope()) {
971 fprintf(out, "<rope>");
972 break;
974 const jschar *chars = u.s->nonRopeChars();
975 for (unsigned i = 0; i < length; ++i) {
976 jschar co = chars[i];
977 if (co < 128)
978 putc(co, out);
979 else if (co < 256)
980 fprintf(out, "\\u%02x", co);
981 else
982 fprintf(out, "\\u%04x", co);
985 break;
986 case 'S':
987 GET_ARG();
988 fprintf(out, "%s", u.cstr);
989 break;
990 case 'v': {
991 GET_ARG();
992 Value *v = (Value *) u.i.lo;
993 js_DumpValue(*v);
994 break;
996 default:
997 fprintf(out, "[invalid %%%c]", *p);
1001 #undef GET_ARG
1003 return JS_TRUE;
1006 JS_DEFINE_CALLINFO_3(extern, BOOL, PrintOnTrace, CHARPTR, UINT32, DOUBLEPTR, 0, ACCSET_STORE_ANY)
1008 // This version is not intended to be called directly: usually it is easier to
1009 // use one of the other overloads.
1010 void
1011 TraceRecorder::tprint(const char *format, int count, nanojit::LIns *insa[])
1013 size_t size = strlen(format) + 1;
1014 char* data = (char*) traceMonitor->traceAlloc->alloc(size);
1015 memcpy(data, format, size);
1017 double *args = (double*) traceMonitor->traceAlloc->alloc(count * sizeof(double));
1018 LIns* argsp_ins = w.nameImmpNonGC(args);
1019 for (int i = 0; i < count; ++i)
1020 w.stTprintArg(insa, argsp_ins, i);
1022 LIns* args_ins[] = { w.nameImmpNonGC(args), w.nameImmi(count), w.nameImmpNonGC(data) };
1023 LIns* call_ins = w.call(&PrintOnTrace_ci, args_ins);
1024 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
1027 // Generate a 'printf'-type call from trace for debugging.
1028 void
1029 TraceRecorder::tprint(const char *format)
1031 LIns* insa[] = { NULL };
1032 tprint(format, 0, insa);
1035 void
1036 TraceRecorder::tprint(const char *format, LIns *ins)
1038 LIns* insa[] = { ins };
1039 tprint(format, 1, insa);
1042 void
1043 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2)
1045 LIns* insa[] = { ins1, ins2 };
1046 tprint(format, 2, insa);
1049 void
1050 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2, LIns *ins3)
1052 LIns* insa[] = { ins1, ins2, ins3 };
1053 tprint(format, 3, insa);
1056 void
1057 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2, LIns *ins3, LIns *ins4)
1059 LIns* insa[] = { ins1, ins2, ins3, ins4 };
1060 tprint(format, 4, insa);
1063 void
1064 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2, LIns *ins3, LIns *ins4,
1065 LIns *ins5)
1067 LIns* insa[] = { ins1, ins2, ins3, ins4, ins5 };
1068 tprint(format, 5, insa);
1071 void
1072 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2, LIns *ins3, LIns *ins4,
1073 LIns *ins5, LIns *ins6)
1075 LIns* insa[] = { ins1, ins2, ins3, ins4, ins5, ins6 };
1076 tprint(format, 6, insa);
1078 #endif
1080 Tracker::Tracker()
1082 pagelist = NULL;
1085 Tracker::~Tracker()
1087 clear();
1090 inline jsuword
1091 Tracker::getTrackerPageBase(const void* v) const
1093 return jsuword(v) & ~TRACKER_PAGE_MASK;
1096 inline jsuword
1097 Tracker::getTrackerPageOffset(const void* v) const
1099 return (jsuword(v) & TRACKER_PAGE_MASK) >> 2;
1102 struct Tracker::TrackerPage*
1103 Tracker::findTrackerPage(const void* v) const
1105 jsuword base = getTrackerPageBase(v);
1106 struct Tracker::TrackerPage* p = pagelist;
1107 while (p) {
1108 if (p->base == base)
1109 return p;
1110 p = p->next;
1112 return NULL;
1115 struct Tracker::TrackerPage*
1116 Tracker::addTrackerPage(const void* v)
1118 jsuword base = getTrackerPageBase(v);
1119 struct TrackerPage* p = (struct TrackerPage*) js_calloc(sizeof(*p));
1120 p->base = base;
1121 p->next = pagelist;
1122 pagelist = p;
1123 return p;
1126 void
1127 Tracker::clear()
1129 while (pagelist) {
1130 TrackerPage* p = pagelist;
1131 pagelist = pagelist->next;
1132 js_free(p);
1136 bool
1137 Tracker::has(const void *v) const
1139 return get(v) != NULL;
1142 LIns*
1143 Tracker::get(const void* v) const
1145 struct Tracker::TrackerPage* p = findTrackerPage(v);
1146 if (!p)
1147 return NULL;
1148 return p->map[getTrackerPageOffset(v)];
1151 void
1152 Tracker::set(const void* v, LIns* i)
1154 struct Tracker::TrackerPage* p = findTrackerPage(v);
1155 if (!p)
1156 p = addTrackerPage(v);
1157 p->map[getTrackerPageOffset(v)] = i;
1160 static inline bool
1161 hasInt32Repr(const Value &v)
1163 if (!v.isNumber())
1164 return false;
1165 if (v.isInt32())
1166 return true;
1167 int32_t _;
1168 return JSDOUBLE_IS_INT32(v.toDouble(), &_);
1171 static inline jsint
1172 asInt32(const Value &v)
1174 JS_ASSERT(v.isNumber());
1175 if (v.isInt32())
1176 return v.toInt32();
1177 #ifdef DEBUG
1178 int32_t _;
1179 JS_ASSERT(JSDOUBLE_IS_INT32(v.toDouble(), &_));
1180 #endif
1181 return jsint(v.toDouble());
1185 * Return JSVAL_TYPE_DOUBLE for all numbers (int and double). Split
1186 * JSVAL_TYPE_OBJECT into JSVAL_TYPE_FUNOBJ and JSVAL_TYPE_NONFUNOBJ.
1187 * Otherwise, just return the value's type.
1189 static inline JSValueType
1190 getPromotedType(const Value &v)
1192 if (v.isNumber())
1193 return JSVAL_TYPE_DOUBLE;
1194 if (v.isObject())
1195 return v.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
1196 return v.extractNonDoubleObjectTraceType();
1200 * Return JSVAL_TYPE_INT32 for all whole numbers that fit into signed 32-bit.
1201 * Split JSVAL_TYPE_OBJECT into JSVAL_TYPE_FUNOBJ and JSVAL_TYPE_NONFUNOBJ.
1202 * Otherwise, just return the value's type.
1204 static inline JSValueType
1205 getCoercedType(const Value &v)
1207 if (v.isNumber()) {
1208 int32_t _;
1209 return (v.isInt32() || JSDOUBLE_IS_INT32(v.toDouble(), &_))
1210 ? JSVAL_TYPE_INT32
1211 : JSVAL_TYPE_DOUBLE;
1213 if (v.isObject())
1214 return v.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
1215 return v.extractNonDoubleObjectTraceType();
1218 static inline JSValueType
1219 getFrameObjPtrTraceType(void *p, JSStackFrame *fp)
1221 if (p == fp->addressOfScopeChain()) {
1222 JS_ASSERT(*(JSObject **)p != NULL);
1223 return JSVAL_TYPE_NONFUNOBJ;
1225 JS_ASSERT(p == fp->addressOfArgs());
1226 return fp->hasArgsObj() ? JSVAL_TYPE_NONFUNOBJ : JSVAL_TYPE_NULL;
1229 static inline bool
1230 isFrameObjPtrTraceType(JSValueType t)
1232 return t == JSVAL_TYPE_NULL || t == JSVAL_TYPE_NONFUNOBJ;
1235 /* Constant seed and accumulate step borrowed from the DJB hash. */
1237 const uintptr_t ORACLE_MASK = ORACLE_SIZE - 1;
1238 JS_STATIC_ASSERT((ORACLE_MASK & ORACLE_SIZE) == 0);
1240 const uintptr_t FRAGMENT_TABLE_MASK = FRAGMENT_TABLE_SIZE - 1;
1241 JS_STATIC_ASSERT((FRAGMENT_TABLE_MASK & FRAGMENT_TABLE_SIZE) == 0);
1243 const uintptr_t HASH_SEED = 5381;
1245 static inline void
1246 HashAccum(uintptr_t& h, uintptr_t i, uintptr_t mask)
1248 h = ((h << 5) + h + (mask & i)) & mask;
1251 static JS_REQUIRES_STACK inline int
1252 StackSlotHash(JSContext* cx, unsigned slot, const void* pc)
1254 uintptr_t h = HASH_SEED;
1255 HashAccum(h, uintptr_t(cx->fp()->script()), ORACLE_MASK);
1256 HashAccum(h, uintptr_t(pc), ORACLE_MASK);
1257 HashAccum(h, uintptr_t(slot), ORACLE_MASK);
1258 return int(h);
1261 static JS_REQUIRES_STACK inline int
1262 GlobalSlotHash(JSContext* cx, unsigned slot)
1264 uintptr_t h = HASH_SEED;
1265 JSStackFrame* fp = cx->fp();
1267 while (fp->prev())
1268 fp = fp->prev();
1270 HashAccum(h, uintptr_t(fp->maybeScript()), ORACLE_MASK);
1271 HashAccum(h, uintptr_t(fp->scopeChain().getGlobal()->shape()), ORACLE_MASK);
1272 HashAccum(h, uintptr_t(slot), ORACLE_MASK);
1273 return int(h);
1276 static inline int
1277 PCHash(jsbytecode* pc)
1279 return int(uintptr_t(pc) & ORACLE_MASK);
1282 Oracle::Oracle()
1284 /* Grow the oracle bitsets to their (fixed) size here, once. */
1285 _stackDontDemote.set(ORACLE_SIZE-1);
1286 _globalDontDemote.set(ORACLE_SIZE-1);
1287 clear();
1290 /* Tell the oracle that a certain global variable should not be demoted. */
1291 JS_REQUIRES_STACK void
1292 Oracle::markGlobalSlotUndemotable(JSContext* cx, unsigned slot)
1294 _globalDontDemote.set(GlobalSlotHash(cx, slot));
1297 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
1298 JS_REQUIRES_STACK bool
1299 Oracle::isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const
1301 return _globalDontDemote.get(GlobalSlotHash(cx, slot));
1304 /* Tell the oracle that a certain slot at a certain stack slot should not be demoted. */
1305 JS_REQUIRES_STACK void
1306 Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc)
1308 _stackDontDemote.set(StackSlotHash(cx, slot, pc));
1311 JS_REQUIRES_STACK void
1312 Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot)
1314 markStackSlotUndemotable(cx, slot, cx->regs->pc);
1317 /* Consult with the oracle whether we shouldn't demote a certain slot. */
1318 JS_REQUIRES_STACK bool
1319 Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) const
1321 return _stackDontDemote.get(StackSlotHash(cx, slot, pc));
1324 JS_REQUIRES_STACK bool
1325 Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const
1327 return isStackSlotUndemotable(cx, slot, cx->regs->pc);
1330 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
1331 void
1332 Oracle::markInstructionUndemotable(jsbytecode* pc)
1334 _pcDontDemote.set(PCHash(pc));
1337 /* Consult with the oracle whether we shouldn't demote a certain bytecode location. */
1338 bool
1339 Oracle::isInstructionUndemotable(jsbytecode* pc) const
1341 return _pcDontDemote.get(PCHash(pc));
1344 /* Tell the oracle that the instruction at bytecode location should use a stronger (slower) test for -0. */
1345 void
1346 Oracle::markInstructionSlowZeroTest(jsbytecode* pc)
1348 _pcSlowZeroTest.set(PCHash(pc));
1351 /* Consult with the oracle whether we should use a stronger (slower) test for -0. */
1352 bool
1353 Oracle::isInstructionSlowZeroTest(jsbytecode* pc) const
1355 return _pcSlowZeroTest.get(PCHash(pc));
1358 void
1359 Oracle::clearDemotability()
1361 _stackDontDemote.reset();
1362 _globalDontDemote.reset();
1363 _pcDontDemote.reset();
1364 _pcSlowZeroTest.reset();
1367 JS_REQUIRES_STACK void
1368 TraceRecorder::markSlotUndemotable(LinkableFragment* f, unsigned slot)
1370 if (slot < f->nStackTypes) {
1371 traceMonitor->oracle->markStackSlotUndemotable(cx, slot);
1372 return;
1375 uint16* gslots = f->globalSlots->data();
1376 traceMonitor->oracle->markGlobalSlotUndemotable(cx, gslots[slot - f->nStackTypes]);
1379 JS_REQUIRES_STACK void
1380 TraceRecorder::markSlotUndemotable(LinkableFragment* f, unsigned slot, const void* pc)
1382 if (slot < f->nStackTypes) {
1383 traceMonitor->oracle->markStackSlotUndemotable(cx, slot, pc);
1384 return;
1387 uint16* gslots = f->globalSlots->data();
1388 traceMonitor->oracle->markGlobalSlotUndemotable(cx, gslots[slot - f->nStackTypes]);
1391 static JS_REQUIRES_STACK bool
1392 IsSlotUndemotable(Oracle* oracle, JSContext* cx, LinkableFragment* f, unsigned slot, const void* ip)
1394 if (slot < f->nStackTypes)
1395 return !oracle || oracle->isStackSlotUndemotable(cx, slot, ip);
1397 uint16* gslots = f->globalSlots->data();
1398 return !oracle || oracle->isGlobalSlotUndemotable(cx, gslots[slot - f->nStackTypes]);
1401 class FrameInfoCache
1403 struct HashPolicy
1405 typedef FrameInfo *Lookup;
1406 static HashNumber hash(const FrameInfo* fi) {
1407 size_t len = sizeof(FrameInfo) + fi->callerHeight * sizeof(JSValueType);
1408 HashNumber h = 0;
1409 const unsigned char *s = (const unsigned char*)fi;
1410 for (size_t i = 0; i < len; i++, s++)
1411 h = JS_ROTATE_LEFT32(h, 4) ^ *s;
1412 return h;
1415 static bool match(const FrameInfo* fi1, const FrameInfo* fi2) {
1416 if (memcmp(fi1, fi2, sizeof(FrameInfo)) != 0)
1417 return false;
1418 return memcmp(fi1->get_typemap(), fi2->get_typemap(),
1419 fi1->callerHeight * sizeof(JSValueType)) == 0;
1423 typedef HashSet<FrameInfo *, HashPolicy, SystemAllocPolicy> FrameSet;
1425 FrameSet set;
1426 VMAllocator *allocator;
1428 public:
1430 FrameInfoCache(VMAllocator *allocator);
1432 void reset() {
1433 set.clear();
1436 FrameInfo *memoize(FrameInfo *fi) {
1437 FrameSet::AddPtr p = set.lookupForAdd(fi);
1438 if (!p) {
1439 FrameInfo* n = (FrameInfo*)
1440 allocator->alloc(sizeof(FrameInfo) + fi->callerHeight * sizeof(JSValueType));
1441 memcpy(n, fi, sizeof(FrameInfo) + fi->callerHeight * sizeof(JSValueType));
1442 if (!set.add(p, n))
1443 return NULL;
1446 return *p;
1450 FrameInfoCache::FrameInfoCache(VMAllocator *allocator)
1451 : allocator(allocator)
1453 if (!set.init())
1454 OUT_OF_MEMORY_ABORT("FrameInfoCache::FrameInfoCache(): out of memory");
1457 #define PC_HASH_COUNT 1024
1459 static void
1460 Blacklist(jsbytecode* pc)
1462 AUDIT(blacklisted);
1463 JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOTRACE);
1464 *pc = JSOP_NOTRACE;
1467 static void
1468 Unblacklist(JSScript *script, jsbytecode *pc)
1470 JS_ASSERT(*pc == JSOP_NOTRACE || *pc == JSOP_TRACE);
1471 if (*pc == JSOP_NOTRACE) {
1472 *pc = JSOP_TRACE;
1474 #ifdef JS_METHODJIT
1475 /* This code takes care of unblacklisting in the method JIT. */
1476 js::mjit::ResetTraceHint(script, pc, GET_UINT16(pc), false);
1477 #endif
1481 static bool
1482 IsBlacklisted(jsbytecode* pc)
1484 if (*pc == JSOP_NOTRACE)
1485 return true;
1486 if (*pc == JSOP_CALL)
1487 return *(pc + JSOP_CALL_LENGTH) == JSOP_NOTRACE;
1488 return false;
1491 static void
1492 Backoff(TraceMonitor *tm, jsbytecode* pc, Fragment* tree = NULL)
1494 /* N.B. This code path cannot assume the recorder is/is not alive. */
1495 RecordAttemptMap &table = *tm->recordAttempts;
1496 if (RecordAttemptMap::AddPtr p = table.lookupForAdd(pc)) {
1497 if (p->value++ > (BL_ATTEMPTS * MAXPEERS)) {
1498 p->value = 0;
1499 Blacklist(pc);
1500 return;
1502 } else {
1503 table.add(p, pc, 0);
1506 if (tree) {
1507 tree->hits() -= BL_BACKOFF;
1510 * In case there is no entry or no table (due to OOM) or some
1511 * serious imbalance in the recording-attempt distribution on a
1512 * multitree, give each tree another chance to blacklist here as
1513 * well.
1515 if (++tree->recordAttempts > BL_ATTEMPTS)
1516 Blacklist(pc);
1520 static void
1521 ResetRecordingAttempts(TraceMonitor *tm, jsbytecode* pc)
1523 RecordAttemptMap &table = *tm->recordAttempts;
1524 if (RecordAttemptMap::Ptr p = table.lookup(pc))
1525 p->value = 0;
1528 static inline size_t
1529 FragmentHash(const void *ip, JSObject* globalObj, uint32 globalShape, uint32 argc)
1531 uintptr_t h = HASH_SEED;
1532 HashAccum(h, uintptr_t(ip), FRAGMENT_TABLE_MASK);
1533 HashAccum(h, uintptr_t(globalObj), FRAGMENT_TABLE_MASK);
1534 HashAccum(h, uintptr_t(globalShape), FRAGMENT_TABLE_MASK);
1535 HashAccum(h, uintptr_t(argc), FRAGMENT_TABLE_MASK);
1536 return size_t(h);
1539 static void
1540 RawLookupFirstPeer(TraceMonitor* tm, const void *ip, JSObject* globalObj,
1541 uint32 globalShape, uint32 argc,
1542 TreeFragment*& firstInBucket, TreeFragment**& prevTreeNextp)
1544 size_t h = FragmentHash(ip, globalObj, globalShape, argc);
1545 TreeFragment** ppf = &tm->vmfragments[h];
1546 firstInBucket = *ppf;
1547 for (; TreeFragment* pf = *ppf; ppf = &pf->next) {
1548 if (pf->globalObj == globalObj &&
1549 pf->globalShape == globalShape &&
1550 pf->ip == ip &&
1551 pf->argc == argc) {
1552 prevTreeNextp = ppf;
1553 return;
1556 prevTreeNextp = ppf;
1557 return;
1560 static TreeFragment*
1561 LookupLoop(TraceMonitor* tm, const void *ip, JSObject* globalObj,
1562 uint32 globalShape, uint32 argc)
1564 TreeFragment *_, **prevTreeNextp;
1565 RawLookupFirstPeer(tm, ip, globalObj, globalShape, argc, _, prevTreeNextp);
1566 return *prevTreeNextp;
1569 static TreeFragment*
1570 LookupOrAddLoop(TraceMonitor* tm, const void *ip, JSObject* globalObj,
1571 uint32 globalShape, uint32 argc)
1573 TreeFragment *firstInBucket, **prevTreeNextp;
1574 RawLookupFirstPeer(tm, ip, globalObj, globalShape, argc, firstInBucket, prevTreeNextp);
1575 if (TreeFragment *f = *prevTreeNextp)
1576 return f;
1578 verbose_only(
1579 uint32_t profFragID = (LogController.lcbits & LC_FragProfile)
1580 ? (++(tm->lastFragID)) : 0;
1582 TreeFragment* f = new (*tm->dataAlloc) TreeFragment(ip, tm->dataAlloc, tm->oracle,
1583 globalObj, globalShape,
1584 argc verbose_only(, profFragID));
1585 f->root = f; /* f is the root of a new tree */
1586 *prevTreeNextp = f; /* insert f at the end of the vmfragments bucket-list */
1587 f->next = NULL;
1588 f->first = f; /* initialize peer-list at f */
1589 f->peer = NULL;
1590 return f;
1593 static TreeFragment*
1594 AddNewPeerToPeerList(TraceMonitor* tm, TreeFragment* peer)
1596 JS_ASSERT(peer);
1597 verbose_only(
1598 uint32_t profFragID = (LogController.lcbits & LC_FragProfile)
1599 ? (++(tm->lastFragID)) : 0;
1601 TreeFragment* f = new (*tm->dataAlloc) TreeFragment(peer->ip, tm->dataAlloc, tm->oracle,
1602 peer->globalObj, peer->globalShape,
1603 peer->argc verbose_only(, profFragID));
1604 f->root = f; /* f is the root of a new tree */
1605 f->first = peer->first; /* add f to peer list */
1606 f->peer = peer->peer;
1607 peer->peer = f;
1608 /* only the |first| Fragment of a peer list needs a valid |next| field */
1609 debug_only(f->next = (TreeFragment*)0xcdcdcdcd);
1610 return f;
1613 JS_REQUIRES_STACK void
1614 TreeFragment::initialize(JSContext* cx, SlotList *globalSlots, bool speculate)
1616 this->dependentTrees.clear();
1617 this->linkedTrees.clear();
1618 this->globalSlots = globalSlots;
1620 /* Capture the coerced type of each active slot in the type map. */
1621 this->typeMap.captureTypes(cx, globalObj, *globalSlots, 0 /* callDepth */, speculate);
1622 this->nStackTypes = this->typeMap.length() - globalSlots->length();
1623 this->spOffsetAtEntry = cx->regs->sp - cx->fp()->base();
1625 #ifdef DEBUG
1626 this->treeFileName = cx->fp()->script()->filename;
1627 this->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp());
1628 this->treePCOffset = FramePCOffset(cx, cx->fp());
1629 #endif
1630 this->script = cx->fp()->script();
1631 this->gcthings.clear();
1632 this->shapes.clear();
1633 this->unstableExits = NULL;
1634 this->sideExits.clear();
1636 /* Determine the native frame layout at the entry point. */
1637 this->nativeStackBase = (nStackTypes - (cx->regs->sp - cx->fp()->base())) *
1638 sizeof(double);
1639 this->maxNativeStackSlots = nStackTypes;
1640 this->maxCallDepth = 0;
1641 this->execs = 0;
1642 this->iters = 0;
1645 UnstableExit*
1646 TreeFragment::removeUnstableExit(VMSideExit* exit)
1648 /* Now erase this exit from the unstable exit list. */
1649 UnstableExit** tail = &this->unstableExits;
1650 for (UnstableExit* uexit = this->unstableExits; uexit != NULL; uexit = uexit->next) {
1651 if (uexit->exit == exit) {
1652 *tail = uexit->next;
1653 return *tail;
1655 tail = &uexit->next;
1657 JS_NOT_REACHED("exit not in unstable exit list");
1658 return NULL;
1661 #ifdef DEBUG
1662 static void
1663 AssertTreeIsUnique(TraceMonitor* tm, TreeFragment* f)
1665 JS_ASSERT(f->root == f);
1668 * Check for duplicate entry type maps. This is always wrong and hints at
1669 * trace explosion since we are trying to stabilize something without
1670 * properly connecting peer edges.
1672 for (TreeFragment* peer = LookupLoop(tm, f->ip, f->globalObj, f->globalShape, f->argc);
1673 peer != NULL;
1674 peer = peer->peer) {
1675 if (!peer->code() || peer == f)
1676 continue;
1677 JS_ASSERT(!f->typeMap.matches(peer->typeMap));
1680 #endif
1682 static void
1683 AttemptCompilation(TraceMonitor *tm, JSObject* globalObj,
1684 JSScript* script, jsbytecode* pc, uint32 argc)
1686 /* If we already permanently blacklisted the location, undo that. */
1687 Unblacklist(script, pc);
1688 ResetRecordingAttempts(tm, pc);
1690 /* Breathe new life into all peer fragments at the designated loop header. */
1691 TreeFragment* f = LookupLoop(tm, pc, globalObj, globalObj->shape(), argc);
1692 if (!f) {
1694 * If the global object's shape changed, we can't easily find the
1695 * corresponding loop header via a hash table lookup. In this
1696 * we simply bail here and hope that the fragment has another
1697 * outstanding compilation attempt. This case is extremely rare.
1699 return;
1701 JS_ASSERT(f->root == f);
1702 f = f->first;
1703 while (f) {
1704 JS_ASSERT(f->root == f);
1705 --f->recordAttempts;
1706 f->hits() = HOTLOOP;
1707 f = f->peer;
1711 static const CallInfo *
1712 fcallinfo(LIns *ins)
1714 return ins->isop(LIR_calld) ? ins->callInfo() : NULL;
1718 * Determine whether this operand is guaranteed to not overflow the specified
1719 * integer operation.
1721 static void
1722 ChecksRequired(LOpcode op, LIns* op1, LIns* op2,
1723 bool* needsOverflowCheck, bool* needsNegZeroCheck)
1725 Interval x = Interval::of(op1, 3);
1726 Interval y = Interval::of(op2, 3);
1727 Interval z(0, 0);
1729 switch (op) {
1730 case LIR_addi:
1731 z = Interval::add(x, y);
1732 *needsNegZeroCheck = false;
1733 break;
1735 case LIR_subi:
1736 z = Interval::sub(x, y);
1737 *needsNegZeroCheck = false;
1738 break;
1740 case LIR_muli: {
1741 z = Interval::mul(x, y);
1742 // A would-be negative zero result can only occur if we have
1743 // mul(0, -n) or mul(-n, 0), where n != 0. In particular, a multiply
1744 // where one operand is a positive immediate cannot result in negative
1745 // zero.
1747 // This assumes that -0 cannot be an operand; if one had occurred we
1748 // would have already exited the trace in order to promote the
1749 // computation back to doubles.
1750 *needsNegZeroCheck = (x.canBeZero() && y.canBeNegative()) ||
1751 (y.canBeZero() && x.canBeNegative());
1752 break;
1755 default:
1756 JS_NOT_REACHED("needsOverflowCheck");
1759 *needsOverflowCheck = z.hasOverflowed;
1763 * JSStackFrame::numActualArgs is only defined for function frames. Since the
1764 * actual arguments of the entry frame are kept on trace, argc is included in
1765 * the tuple identifying a fragment so that two fragments for the same loop but
1766 * recorded with different number of actual arguments are treated as two
1767 * completely separate trees. For this particular use, we define the number of
1768 * actuals for global and eval frames to be 0.
1770 static inline uintN
1771 entryFrameArgc(JSContext *cx)
1773 JSStackFrame *fp = cx->fp();
1774 return fp->isGlobalFrame() || fp->isEvalFrame() ? 0 : fp->numActualArgs();
1777 template <typename Visitor>
1778 static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1779 VisitStackAndArgs(Visitor &visitor, JSStackFrame *fp, JSStackFrame *next, Value *stack)
1781 if (JS_LIKELY(!next->hasOverflowArgs()))
1782 return visitor.visitStackSlots(stack, next->formalArgsEnd() - stack, fp);
1785 * In the case of nactual > nformal, the formals are copied by the VM onto
1786 * the top of the stack. We only want to mark the formals once, so we
1787 * carefully mark only the canonical actual arguments (as defined by
1788 * JSStackFrame::canonicalActualArg).
1790 uintN nactual = next->numActualArgs();
1791 Value *actuals = next->actualArgs();
1792 size_t nstack = (actuals - 2 /* callee,this */) - stack;
1793 if (!visitor.visitStackSlots(stack, nstack, fp))
1794 return false;
1795 uintN nformal = next->numFormalArgs();
1796 Value *formals = next->formalArgs();
1797 if (!visitor.visitStackSlots(formals - 2, 2 + nformal, fp))
1798 return false;
1799 return visitor.visitStackSlots(actuals + nformal, nactual - nformal, fp);
1803 * Visit the values in the given JSStackFrame that the tracer cares about. This
1804 * visitor function is (implicitly) the primary definition of the native stack
1805 * area layout. There are a few other independent pieces of code that must be
1806 * maintained to assume the same layout. They are marked like this:
1808 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
1810 template <typename Visitor>
1811 static JS_REQUIRES_STACK bool
1812 VisitFrameSlots(Visitor &visitor, JSContext *cx, unsigned depth, JSStackFrame *fp,
1813 JSStackFrame *next)
1815 JS_ASSERT_IF(!next, cx->fp() == fp);
1817 if (depth > 0 && !VisitFrameSlots(visitor, cx, depth-1, fp->prev(), fp))
1818 return false;
1820 if (depth == 0) {
1821 if (fp->isGlobalFrame()) {
1822 visitor.setStackSlotKind("global");
1823 Value *base = fp->slots() + fp->globalScript()->nfixed;
1824 if (next)
1825 return VisitStackAndArgs(visitor, fp, next, base);
1826 return visitor.visitStackSlots(base, cx->regs->sp - base, fp);
1829 if (JS_UNLIKELY(fp->isEvalFrame())) {
1830 visitor.setStackSlotKind("eval");
1831 if (!visitor.visitStackSlots(&fp->calleeValue(), 2, fp))
1832 return false;
1833 } else {
1835 * Only the bottom function frame must visit its arguments; for all
1836 * other frames, arguments are visited by the prev-frame.
1838 visitor.setStackSlotKind("args");
1839 uintN nformal = fp->numFormalArgs();
1840 if (!visitor.visitStackSlots(fp->formalArgs() - 2, 2 + nformal, fp))
1841 return false;
1842 if (JS_UNLIKELY(fp->hasOverflowArgs())) {
1843 if (!visitor.visitStackSlots(fp->actualArgs() + nformal,
1844 fp->numActualArgs() - nformal, fp))
1845 return false;
1850 JS_ASSERT(fp->isFunctionFrame());
1853 * We keep two members of JSStackFrame on trace: the args obj pointer and
1854 * the scope chain pointer. The visitor must take care not to treat these
1855 * as js::Value-typed variables, since they are unboxed pointers.
1856 * Moreover, JSStackFrame compresses the args obj pointer with nactual, so
1857 * fp->addressOfArgs() is not really a JSObject**: the visitor must treat
1858 * !fp->hasArgsObj() as a null args obj pointer. Hence, visitFrameObjPtr
1859 * is only passed a void *.
1861 visitor.setStackSlotKind("arguments");
1862 if (!visitor.visitFrameObjPtr(fp->addressOfArgs(), fp))
1863 return false;
1864 visitor.setStackSlotKind("scopeChain");
1865 if (!visitor.visitFrameObjPtr(fp->addressOfScopeChain(), fp))
1866 return false;
1868 visitor.setStackSlotKind("slots");
1869 if (next)
1870 return VisitStackAndArgs(visitor, fp, next, fp->slots());
1871 return visitor.visitStackSlots(fp->slots(), cx->regs->sp - fp->slots(), fp);
1874 // Number of native frame slots used for 'special' values between args and vars.
1875 // Currently the two values are |arguments| (args object) and |scopeChain|.
1876 const int SPECIAL_FRAME_SLOTS = 2;
1878 template <typename Visitor>
1879 static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1880 VisitStackSlots(Visitor &visitor, JSContext *cx, unsigned callDepth)
1882 return VisitFrameSlots(visitor, cx, callDepth, cx->fp(), NULL);
1885 template <typename Visitor>
1886 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1887 VisitGlobalSlots(Visitor &visitor, JSContext *cx, JSObject *globalObj,
1888 unsigned ngslots, uint16 *gslots)
1890 for (unsigned n = 0; n < ngslots; ++n) {
1891 unsigned slot = gslots[n];
1892 visitor.visitGlobalSlot(&globalObj->getSlotRef(slot), n, slot);
1896 class AdjustCallerTypeVisitor;
1898 template <typename Visitor>
1899 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1900 VisitGlobalSlots(Visitor &visitor, JSContext *cx, SlotList &gslots)
1902 VisitGlobalSlots(visitor, cx, cx->fp()->scopeChain().getGlobal(),
1903 gslots.length(), gslots.data());
1907 template <typename Visitor>
1908 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1909 VisitSlots(Visitor& visitor, JSContext* cx, JSObject* globalObj,
1910 unsigned callDepth, unsigned ngslots, uint16* gslots)
1912 if (VisitStackSlots(visitor, cx, callDepth))
1913 VisitGlobalSlots(visitor, cx, globalObj, ngslots, gslots);
1916 template <typename Visitor>
1917 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1918 VisitSlots(Visitor& visitor, JSContext* cx, unsigned callDepth,
1919 unsigned ngslots, uint16* gslots)
1921 VisitSlots(visitor, cx, cx->fp()->scopeChain().getGlobal(),
1922 callDepth, ngslots, gslots);
1925 template <typename Visitor>
1926 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1927 VisitSlots(Visitor &visitor, JSContext *cx, JSObject *globalObj,
1928 unsigned callDepth, const SlotList& slots)
1930 VisitSlots(visitor, cx, globalObj, callDepth, slots.length(),
1931 slots.data());
1934 template <typename Visitor>
1935 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1936 VisitSlots(Visitor &visitor, JSContext *cx, unsigned callDepth,
1937 const SlotList& slots)
1939 VisitSlots(visitor, cx, cx->fp()->scopeChain().getGlobal(),
1940 callDepth, slots.length(), slots.data());
1944 class SlotVisitorBase {
1945 #if defined JS_JIT_SPEW
1946 protected:
1947 char const *mStackSlotKind;
1948 public:
1949 SlotVisitorBase() : mStackSlotKind(NULL) {}
1950 JS_ALWAYS_INLINE const char *stackSlotKind() { return mStackSlotKind; }
1951 JS_ALWAYS_INLINE void setStackSlotKind(char const *k) {
1952 mStackSlotKind = k;
1954 #else
1955 public:
1956 JS_ALWAYS_INLINE const char *stackSlotKind() { return NULL; }
1957 JS_ALWAYS_INLINE void setStackSlotKind(char const *k) {}
1958 #endif
1961 struct CountSlotsVisitor : public SlotVisitorBase
1963 unsigned mCount;
1964 bool mDone;
1965 const void* mStop;
1966 public:
1967 JS_ALWAYS_INLINE CountSlotsVisitor(const void* stop = NULL) :
1968 mCount(0),
1969 mDone(false),
1970 mStop(stop)
1973 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1974 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
1975 if (mDone)
1976 return false;
1977 if (mStop && size_t(((const Value *)mStop) - vp) < count) {
1978 mCount += size_t(((const Value *)mStop) - vp);
1979 mDone = true;
1980 return false;
1982 mCount += count;
1983 return true;
1986 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1987 visitFrameObjPtr(void* p, JSStackFrame* fp) {
1988 if (mDone)
1989 return false;
1990 if (mStop && mStop == p) {
1991 mDone = true;
1992 return false;
1994 mCount++;
1995 return true;
1998 JS_ALWAYS_INLINE unsigned count() {
1999 return mCount;
2002 JS_ALWAYS_INLINE bool stopped() {
2003 return mDone;
2007 static JS_REQUIRES_STACK JS_ALWAYS_INLINE unsigned
2008 CountStackAndArgs(JSStackFrame *next, Value *stack)
2010 if (JS_LIKELY(!next->hasOverflowArgs()))
2011 return (Value *)next - stack;
2012 size_t nvals = (next->formalArgs() - 2 /* callee, this */) - stack;
2013 JS_ASSERT(nvals == unsigned((next->actualArgs() - 2) - stack) + (2 + next->numActualArgs()));
2014 return nvals;
2017 static JS_ALWAYS_INLINE uintN
2018 NumSlotsBeforeFixed(JSStackFrame *fp)
2020 uintN numArgs = fp->isEvalFrame() ? 0 : Max(fp->numActualArgs(), fp->numFormalArgs());
2021 return 2 + numArgs + SPECIAL_FRAME_SLOTS;
2025 * Calculate the total number of native frame slots we need from this frame all
2026 * the way back to the entry frame, including the current stack usage.
2028 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
2030 JS_REQUIRES_STACK unsigned
2031 NativeStackSlots(JSContext *cx, unsigned callDepth)
2033 JSStackFrame *fp = cx->fp();
2034 JSStackFrame *next = NULL;
2035 unsigned slots = 0;
2036 unsigned depth = callDepth;
2038 for (; depth > 0; --depth, next = fp, fp = fp->prev()) {
2039 JS_ASSERT(fp->isFunctionFrame() && !fp->isEvalFrame());
2040 slots += SPECIAL_FRAME_SLOTS;
2041 if (next)
2042 slots += CountStackAndArgs(next, fp->slots());
2043 else
2044 slots += cx->regs->sp - fp->slots();
2047 Value *start;
2048 if (fp->isGlobalFrame()) {
2049 start = fp->slots() + fp->globalScript()->nfixed;
2050 } else {
2051 start = fp->slots();
2052 slots += NumSlotsBeforeFixed(fp);
2054 if (next)
2055 slots += CountStackAndArgs(next, start);
2056 else
2057 slots += cx->regs->sp - start;
2059 #ifdef DEBUG
2060 CountSlotsVisitor visitor;
2061 VisitStackSlots(visitor, cx, callDepth);
2062 JS_ASSERT(visitor.count() == slots && !visitor.stopped());
2063 #endif
2064 return slots;
2067 class CaptureTypesVisitor : public SlotVisitorBase
2069 JSContext* mCx;
2070 JSValueType* mTypeMap;
2071 JSValueType* mPtr;
2072 Oracle * mOracle;
2074 public:
2075 JS_ALWAYS_INLINE CaptureTypesVisitor(JSContext* cx, Oracle *oracle,
2076 JSValueType* typeMap, bool speculate)
2077 : mCx(cx),
2078 mTypeMap(typeMap),
2079 mPtr(typeMap),
2080 mOracle(speculate ? oracle : NULL)
2083 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
2084 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
2085 JSValueType type = getCoercedType(*vp);
2086 if (type == JSVAL_TYPE_INT32 && (!mOracle || mOracle->isGlobalSlotUndemotable(mCx, slot)))
2087 type = JSVAL_TYPE_DOUBLE;
2088 JS_ASSERT(type != JSVAL_TYPE_BOXED);
2089 debug_only_printf(LC_TMTracer,
2090 "capture type global%d: %c\n",
2091 n, TypeToChar(type));
2092 *mPtr++ = type;
2095 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
2096 visitStackSlots(Value *vp, int count, JSStackFrame* fp) {
2097 for (int i = 0; i < count; ++i) {
2098 JSValueType type = getCoercedType(vp[i]);
2099 if (type == JSVAL_TYPE_INT32 && (!mOracle || mOracle->isStackSlotUndemotable(mCx, length())))
2100 type = JSVAL_TYPE_DOUBLE;
2101 JS_ASSERT(type != JSVAL_TYPE_BOXED);
2102 debug_only_printf(LC_TMTracer,
2103 "capture type %s%d: %c\n",
2104 stackSlotKind(), i, TypeToChar(type));
2105 *mPtr++ = type;
2107 return true;
2110 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
2111 visitFrameObjPtr(void* p, JSStackFrame* fp) {
2112 JSValueType type = getFrameObjPtrTraceType(p, fp);
2113 debug_only_printf(LC_TMTracer,
2114 "capture type %s%d: %c\n",
2115 stackSlotKind(), 0, TypeToChar(type));
2116 *mPtr++ = type;
2117 return true;
2120 JS_ALWAYS_INLINE uintptr_t length() {
2121 return mPtr - mTypeMap;
2125 void
2126 TypeMap::set(unsigned stackSlots, unsigned ngslots,
2127 const JSValueType* stackTypeMap, const JSValueType* globalTypeMap)
2129 setLength(ngslots + stackSlots);
2130 memcpy(data(), stackTypeMap, stackSlots * sizeof(JSValueType));
2131 memcpy(data() + stackSlots, globalTypeMap, ngslots * sizeof(JSValueType));
2135 * Capture the type map for the selected slots of the global object and currently pending
2136 * stack frames.
2138 JS_REQUIRES_STACK void
2139 TypeMap::captureTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned callDepth,
2140 bool speculate)
2142 setLength(NativeStackSlots(cx, callDepth) + slots.length());
2143 CaptureTypesVisitor visitor(cx, oracle, data(), speculate);
2144 VisitSlots(visitor, cx, globalObj, callDepth, slots);
2145 JS_ASSERT(visitor.length() == length());
2148 JS_REQUIRES_STACK void
2149 TypeMap::captureMissingGlobalTypes(JSContext* cx,
2150 JSObject* globalObj, SlotList& slots, unsigned stackSlots,
2151 bool speculate)
2153 unsigned oldSlots = length() - stackSlots;
2154 int diff = slots.length() - oldSlots;
2155 JS_ASSERT(diff >= 0);
2156 setLength(length() + diff);
2157 CaptureTypesVisitor visitor(cx, oracle, data() + stackSlots + oldSlots, speculate);
2158 VisitGlobalSlots(visitor, cx, globalObj, diff, slots.data() + oldSlots);
2161 /* Compare this type map to another one and see whether they match. */
2162 bool
2163 TypeMap::matches(TypeMap& other) const
2165 if (length() != other.length())
2166 return false;
2167 return !memcmp(data(), other.data(), length());
2170 void
2171 TypeMap::fromRaw(JSValueType* other, unsigned numSlots)
2173 unsigned oldLength = length();
2174 setLength(length() + numSlots);
2175 for (unsigned i = 0; i < numSlots; i++)
2176 get(oldLength + i) = other[i];
2180 * Use the provided storage area to create a new type map that contains the
2181 * partial type map with the rest of it filled up from the complete type
2182 * map.
2184 static void
2185 MergeTypeMaps(JSValueType** partial, unsigned* plength, JSValueType* complete, unsigned clength, JSValueType* mem)
2187 unsigned l = *plength;
2188 JS_ASSERT(l < clength);
2189 memcpy(mem, *partial, l * sizeof(JSValueType));
2190 memcpy(mem + l, complete + l, (clength - l) * sizeof(JSValueType));
2191 *partial = mem;
2192 *plength = clength;
2196 * Specializes a tree to any specifically missing globals, including any
2197 * dependent trees.
2199 static JS_REQUIRES_STACK void
2200 SpecializeTreesToLateGlobals(JSContext* cx, TreeFragment* root, JSValueType* globalTypeMap,
2201 unsigned numGlobalSlots)
2203 for (unsigned i = root->nGlobalTypes(); i < numGlobalSlots; i++)
2204 root->typeMap.add(globalTypeMap[i]);
2206 JS_ASSERT(root->nGlobalTypes() == numGlobalSlots);
2208 for (unsigned i = 0; i < root->dependentTrees.length(); i++) {
2209 TreeFragment* tree = root->dependentTrees[i];
2210 if (tree->code() && tree->nGlobalTypes() < numGlobalSlots)
2211 SpecializeTreesToLateGlobals(cx, tree, globalTypeMap, numGlobalSlots);
2213 for (unsigned i = 0; i < root->linkedTrees.length(); i++) {
2214 TreeFragment* tree = root->linkedTrees[i];
2215 if (tree->code() && tree->nGlobalTypes() < numGlobalSlots)
2216 SpecializeTreesToLateGlobals(cx, tree, globalTypeMap, numGlobalSlots);
2220 /* Specializes a tree to any missing globals, including any dependent trees. */
2221 static JS_REQUIRES_STACK void
2222 SpecializeTreesToMissingGlobals(JSContext* cx, JSObject* globalObj, TreeFragment* root)
2224 /* If we already have a bunch of peer trees, try to be as generic as possible. */
2225 size_t count = 0;
2226 for (TreeFragment *f = root->first; f; f = f->peer, ++count);
2227 bool speculate = count < MAXPEERS-1;
2229 root->typeMap.captureMissingGlobalTypes(cx, globalObj, *root->globalSlots, root->nStackTypes,
2230 speculate);
2231 JS_ASSERT(root->globalSlots->length() == root->typeMap.length() - root->nStackTypes);
2233 SpecializeTreesToLateGlobals(cx, root, root->globalTypeMap(), root->nGlobalTypes());
2236 static void
2237 ResetJITImpl(JSContext* cx, TraceMonitor *tm);
2239 #ifdef MOZ_TRACEVIS
2240 static JS_INLINE void
2241 ResetJIT(JSContext* cx, TraceMonitor *tm, TraceVisFlushReason r)
2243 LogTraceVisEvent(cx, S_RESET, r);
2244 ResetJITImpl(cx, tm);
2246 #else
2247 # define ResetJIT(cx, tm, reason) ResetJITImpl(cx, tm)
2248 #endif
2250 void
2251 FlushJITCache(JSContext *cx, TraceMonitor *tm)
2253 ResetJIT(cx, tm, FR_OOM);
2256 static void
2257 TrashTree(TreeFragment* f);
2259 JS_REQUIRES_STACK
2260 TraceRecorder::TraceRecorder(JSContext* cx, TraceMonitor *tm,
2261 VMSideExit* anchor, VMFragment* fragment,
2262 unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
2263 VMSideExit* innermost, JSScript* outerScript, jsbytecode* outerPC,
2264 uint32 outerArgc, bool speculate)
2265 : cx(cx),
2266 traceMonitor(tm),
2267 oracle(speculate ? tm->oracle : NULL),
2268 fragment(fragment),
2269 tree(fragment->root),
2270 globalObj(tree->globalObj),
2271 outerScript(outerScript),
2272 outerPC(outerPC),
2273 outerArgc(outerArgc),
2274 anchor(anchor),
2275 cx_ins(NULL),
2276 eos_ins(NULL),
2277 eor_ins(NULL),
2278 loopLabel(NULL),
2279 importTypeMap(&tempAlloc(), tm->oracle),
2280 lirbuf(new (tempAlloc()) LirBuffer(tempAlloc())),
2281 mark(*traceMonitor->traceAlloc),
2282 numSideExitsBefore(tree->sideExits.length()),
2283 tracker(),
2284 nativeFrameTracker(),
2285 global_slots(NULL),
2286 callDepth(anchor ? anchor->calldepth : 0),
2287 atoms(FrameAtomBase(cx, cx->fp())),
2288 consts(JSScript::isValidOffset(cx->fp()->script()->constOffset)
2289 ? cx->fp()->script()->consts()->vector
2290 : NULL),
2291 strictModeCode_ins(NULL),
2292 cfgMerges(&tempAlloc()),
2293 trashSelf(false),
2294 whichTreesToTrash(&tempAlloc()),
2295 guardedShapeTable(cx),
2296 initDepth(0),
2297 hadNewInit(false),
2298 #ifdef DEBUG
2299 addPropShapeBefore(NULL),
2300 #endif
2301 rval_ins(NULL),
2302 native_rval_ins(NULL),
2303 newobj_ins(NULL),
2304 pendingSpecializedNative(NULL),
2305 pendingUnboxSlot(NULL),
2306 pendingGuardCondition(NULL),
2307 pendingGlobalSlotsToSet(cx),
2308 pendingLoop(true),
2309 generatedSpecializedNative(),
2310 tempTypeMap(cx),
2311 w(&tempAlloc(), lirbuf)
2313 JS_ASSERT(globalObj == cx->fp()->scopeChain().getGlobal());
2314 JS_ASSERT(globalObj->hasOwnShape());
2315 JS_ASSERT(cx->regs->pc == (jsbytecode*)fragment->ip);
2317 #ifdef JS_METHODJIT
2318 if (TRACE_PROFILER(cx))
2319 AbortProfiling(cx);
2320 #endif
2322 JS_ASSERT(JS_THREAD_DATA(cx)->onTraceCompartment == NULL);
2323 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
2324 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL);
2325 JS_THREAD_DATA(cx)->recordingCompartment = cx->compartment;
2327 #ifdef DEBUG
2328 lirbuf->printer = new (tempAlloc()) LInsPrinter(tempAlloc(), TM_NUM_USED_ACCS);
2329 #endif
2332 * Reset the fragment state we care about in case we got a recycled
2333 * fragment. This includes resetting any profiling data we might have
2334 * accumulated.
2336 fragment->lastIns = NULL;
2337 fragment->setCode(NULL);
2338 fragment->lirbuf = lirbuf;
2339 verbose_only( fragment->profCount = 0; )
2340 verbose_only( fragment->nStaticExits = 0; )
2341 verbose_only( fragment->nCodeBytes = 0; )
2342 verbose_only( fragment->nExitBytes = 0; )
2343 verbose_only( fragment->guardNumberer = 1; )
2344 verbose_only( fragment->guardsForFrag = NULL; )
2345 verbose_only( fragment->loopLabel = NULL; )
2348 * Don't change fragment->profFragID, though. Once the identity of the
2349 * Fragment is set up (for profiling purposes), we can't change it.
2352 if (!guardedShapeTable.init())
2353 OUT_OF_MEMORY_ABORT("TraceRecorder::TraceRecorder: out of memory");
2355 #ifdef JS_JIT_SPEW
2356 debug_only_print0(LC_TMMinimal, "\n");
2357 debug_only_printf(LC_TMMinimal, "Recording starting from %s:%u@%u (FragID=%06u)\n",
2358 tree->treeFileName, tree->treeLineNumber, tree->treePCOffset,
2359 fragment->profFragID);
2361 debug_only_printf(LC_TMTracer, "globalObj=%p, shape=%d\n",
2362 (void*)this->globalObj, this->globalObj->shape());
2363 debug_only_printf(LC_TMTreeVis, "TREEVIS RECORD FRAG=%p ANCHOR=%p\n", (void*)fragment,
2364 (void*)anchor);
2365 #endif
2367 /* This creates the LIR writer pipeline. */
2368 w.init(&LogController);
2370 w.start();
2372 for (int i = 0; i < NumSavedRegs; ++i)
2373 w.paramp(i, 1);
2374 #ifdef DEBUG
2375 for (int i = 0; i < NumSavedRegs; ++i)
2376 w.name(lirbuf->savedRegs[i], regNames[REGNUM(Assembler::savedRegs[i])]);
2377 #endif
2379 lirbuf->state = w.name(w.paramp(0, 0), "state");
2381 if (fragment == fragment->root) {
2382 w.comment("begin-loop");
2383 InitConst(loopLabel) = w.label();
2385 w.comment("begin-setup");
2387 // if profiling, drop a label, so the assembler knows to put a
2388 // frag-entry-counter increment at this point. If there's a
2389 // loopLabel, use that; else we'll have to make a dummy label
2390 // especially for this purpose.
2391 verbose_only( if (LogController.lcbits & LC_FragProfile) {
2392 LIns* entryLabel = NULL;
2393 if (fragment == fragment->root) {
2394 entryLabel = loopLabel;
2395 } else {
2396 entryLabel = w.label();
2398 NanoAssert(entryLabel);
2399 NanoAssert(!fragment->loopLabel);
2400 fragment->loopLabel = entryLabel;
2403 lirbuf->sp = w.name(w.ldpStateField(sp), "sp");
2404 lirbuf->rp = w.name(w.ldpStateField(rp), "rp");
2405 InitConst(cx_ins) = w.name(w.ldpStateField(cx), "cx");
2406 InitConst(eos_ins) = w.name(w.ldpStateField(eos), "eos");
2407 InitConst(eor_ins) = w.name(w.ldpStateField(eor), "eor");
2409 strictModeCode_ins = w.name(w.immi(cx->fp()->script()->strictModeCode), "strict");
2411 /* If we came from exit, we might not have enough global types. */
2412 if (tree->globalSlots->length() > tree->nGlobalTypes())
2413 SpecializeTreesToMissingGlobals(cx, globalObj, tree);
2415 /* read into registers all values on the stack and all globals we know so far */
2416 import(tree, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
2418 if (fragment == fragment->root) {
2420 * We poll the operation callback request flag. It is updated asynchronously whenever
2421 * the callback is to be invoked. We can use w.nameImmpNonGC here as JIT-ed code is per
2422 * thread and cannot outlive the corresponding JSThreadData.
2424 w.comment("begin-interruptFlags-check");
2425 /* FIXME: See bug 621140 for moving interruptCounter to the compartment. */
2426 #ifdef JS_THREADSAFE
2427 void *interrupt = (void*) &cx->runtime->interruptCounter;
2428 #else
2429 void *interrupt = (void*) &JS_THREAD_DATA(cx)->interruptFlags;
2430 #endif
2431 LIns* flagptr = w.nameImmpNonGC(interrupt);
2432 LIns* x = w.ldiVolatile(flagptr);
2433 guard(true, w.eqi0(x), TIMEOUT_EXIT);
2434 w.comment("end-interruptFlags-check");
2437 * Count the number of iterations run by a trace, so that we can blacklist if
2438 * the trace runs too few iterations to be worthwhile. Do this only if the methodjit
2439 * is on--otherwise we must try to trace as much as possible.
2441 #ifdef JS_METHODJIT
2442 if (cx->methodJitEnabled) {
2443 w.comment("begin-count-loop-iterations");
2444 LIns* counterPtr = w.nameImmpNonGC((void *) &traceMonitor->iterationCounter);
2445 LIns* counterValue = w.ldiVolatile(counterPtr);
2446 LIns* test = w.ltiN(counterValue, LOOP_COUNT_MAX);
2447 LIns *branch = w.jfUnoptimizable(test);
2449 * stiVolatile() uses ACCSET_STORE_ANY; If LICM is implemented
2450 * (bug 545406) this counter will need its own region.
2452 w.stiVolatile(w.addi(counterValue, w.immi(1)), counterPtr);
2453 w.label(branch);
2454 w.comment("end-count-loop-iterations");
2456 #endif
2460 * If we are attached to a tree call guard, make sure the guard the inner
2461 * tree exited from is what we expect it to be.
2463 if (anchor && anchor->exitType == NESTED_EXIT) {
2464 LIns* nested_ins = w.ldpStateField(outermostTreeExitGuard);
2465 guard(true, w.eqp(nested_ins, w.nameImmpNonGC(innermost)), NESTED_EXIT);
2468 w.comment("end-setup");
2471 TraceRecorder::~TraceRecorder()
2473 /* Should already have been adjusted by callers before calling delete. */
2474 JS_ASSERT(traceMonitor->recorder != this);
2476 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
2477 JS_ASSERT(&JS_THREAD_DATA(cx)->recordingCompartment->traceMonitor == traceMonitor);
2478 JS_THREAD_DATA(cx)->recordingCompartment = NULL;
2480 if (trashSelf)
2481 TrashTree(fragment->root);
2483 for (unsigned int i = 0; i < whichTreesToTrash.length(); i++)
2484 TrashTree(whichTreesToTrash[i]);
2486 /* Purge the tempAlloc used during recording. */
2487 tempAlloc().reset();
2489 forgetGuardedShapes();
2492 inline bool
2493 TraceMonitor::outOfMemory() const
2495 return dataAlloc->outOfMemory() ||
2496 tempAlloc->outOfMemory() ||
2497 traceAlloc->outOfMemory();
2501 * This function destroys the recorder after a successful recording, possibly
2502 * starting a suspended outer recorder.
2504 AbortableRecordingStatus
2505 TraceRecorder::finishSuccessfully()
2507 JS_ASSERT(!traceMonitor->profile);
2508 JS_ASSERT(traceMonitor->recorder == this);
2509 JS_ASSERT(fragment->lastIns && fragment->code());
2511 AUDIT(traceCompleted);
2512 mark.commit();
2514 /* Grab local copies of members needed after destruction of |this|. */
2515 JSContext* localcx = cx;
2516 TraceMonitor* localtm = traceMonitor;
2518 localtm->recorder = NULL;
2519 /* We can't (easily) use js_delete() here because the constructor is private. */
2520 this->~TraceRecorder();
2521 js_free(this);
2523 /* Catch OOM that occurred during recording. */
2524 if (localtm->outOfMemory() || OverfullJITCache(localcx, localtm)) {
2525 ResetJIT(localcx, localtm, FR_OOM);
2526 return ARECORD_ABORTED;
2528 return ARECORD_COMPLETED;
2531 /* This function aborts a recorder and any pending outer recorders. */
2532 JS_REQUIRES_STACK TraceRecorder::AbortResult
2533 TraceRecorder::finishAbort(const char* reason)
2535 JS_ASSERT(!traceMonitor->profile);
2536 JS_ASSERT(traceMonitor->recorder == this);
2538 AUDIT(recorderAborted);
2539 #ifdef DEBUG
2540 debug_only_printf(LC_TMMinimal | LC_TMAbort,
2541 "Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n",
2542 tree->treeFileName,
2543 tree->treeLineNumber,
2544 tree->treePCOffset,
2545 cx->fp()->script()->filename,
2546 js_FramePCToLineNumber(cx, cx->fp()),
2547 FramePCOffset(cx, cx->fp()),
2548 reason);
2549 #endif
2550 Backoff(traceMonitor, (jsbytecode*) fragment->root->ip, fragment->root);
2553 * If this is the primary trace and we didn't succeed compiling, trash the
2554 * tree. Otherwise, remove the VMSideExits we added while recording, which
2555 * are about to be invalid.
2557 * BIG FAT WARNING: resetting the length is only a valid strategy as long as
2558 * there may be only one recorder active for a single TreeInfo at a time.
2559 * Otherwise, we may be throwing away another recorder's valid side exits.
2561 if (fragment->root == fragment) {
2562 TrashTree(fragment->toTreeFragment());
2563 } else {
2564 JS_ASSERT(numSideExitsBefore <= fragment->root->sideExits.length());
2565 fragment->root->sideExits.setLength(numSideExitsBefore);
2568 /* Grab local copies of members needed after destruction of |this|. */
2569 JSContext* localcx = cx;
2570 TraceMonitor* localtm = traceMonitor;
2572 localtm->recorder = NULL;
2573 /* We can't (easily) use js_delete() here because the constructor is private. */
2574 this->~TraceRecorder();
2575 js_free(this);
2577 /* Catch OOM that occurred during recording. */
2578 if (localtm->outOfMemory() || OverfullJITCache(localcx, localtm)) {
2579 ResetJIT(localcx, localtm, FR_OOM);
2580 return JIT_RESET;
2582 return NORMAL_ABORT;
2585 inline LIns*
2586 TraceRecorder::w_immpObjGC(JSObject* obj)
2588 JS_ASSERT(obj);
2589 tree->gcthings.addUnique(ObjectValue(*obj));
2590 return w.immpNonGC((void*)obj);
2593 inline LIns*
2594 TraceRecorder::w_immpFunGC(JSFunction* fun)
2596 JS_ASSERT(fun);
2597 tree->gcthings.addUnique(ObjectValue(*fun));
2598 return w.immpNonGC((void*)fun);
2601 inline LIns*
2602 TraceRecorder::w_immpStrGC(JSString* str)
2604 JS_ASSERT(str);
2605 tree->gcthings.addUnique(StringValue(str));
2606 return w.immpNonGC((void*)str);
2609 inline LIns*
2610 TraceRecorder::w_immpShapeGC(const Shape* shape)
2612 JS_ASSERT(shape);
2613 tree->shapes.addUnique(shape);
2614 return w.immpNonGC((void*)shape);
2617 inline LIns*
2618 TraceRecorder::w_immpIdGC(jsid id)
2620 if (JSID_IS_GCTHING(id))
2621 tree->gcthings.addUnique(IdToValue(id));
2622 return w.immpNonGC((void*)JSID_BITS(id));
2625 ptrdiff_t
2626 TraceRecorder::nativeGlobalSlot(const Value* p) const
2628 JS_ASSERT(isGlobal(p));
2629 return ptrdiff_t(p - globalObj->slots);
2632 /* Determine the offset in the native global frame for a jsval we track. */
2633 ptrdiff_t
2634 TraceRecorder::nativeGlobalOffset(const Value* p) const
2636 return nativeGlobalSlot(p) * sizeof(double);
2639 /* Determine whether a value is a global stack slot. */
2640 bool
2641 TraceRecorder::isGlobal(const Value* p) const
2643 return (size_t(p - globalObj->slots) < globalObj->numSlots());
2646 bool
2647 TraceRecorder::isVoidPtrGlobal(const void* p) const
2649 return isGlobal((const Value *)p);
2653 * Return the offset in the native stack for the given jsval. More formally,
2654 * |p| must be the address of a jsval that is represented in the native stack
2655 * area. The return value is the offset, from TracerState::stackBase, in bytes,
2656 * where the native representation of |*p| is stored. To get the offset
2657 * relative to TracerState::sp, subtract TreeFragment::nativeStackBase.
2659 JS_REQUIRES_STACK ptrdiff_t
2660 TraceRecorder::nativeStackOffsetImpl(const void* p) const
2662 CountSlotsVisitor visitor(p);
2663 VisitStackSlots(visitor, cx, callDepth);
2664 size_t offset = visitor.count() * sizeof(double);
2667 * If it's not in a pending frame, it must be on the stack of the current
2668 * frame above sp but below fp->slots() + script->nslots.
2670 if (!visitor.stopped()) {
2671 const Value *vp = (const Value *)p;
2672 JS_ASSERT(size_t(vp - cx->fp()->slots()) < cx->fp()->numSlots());
2673 offset += size_t(vp - cx->regs->sp) * sizeof(double);
2675 return offset;
2678 JS_REQUIRES_STACK inline ptrdiff_t
2679 TraceRecorder::nativeStackOffset(const Value* p) const
2681 return nativeStackOffsetImpl(p);
2684 JS_REQUIRES_STACK inline ptrdiff_t
2685 TraceRecorder::nativeStackSlotImpl(const void* p) const
2687 return nativeStackOffsetImpl(p) / sizeof(double);
2690 JS_REQUIRES_STACK inline ptrdiff_t
2691 TraceRecorder::nativeStackSlot(const Value* p) const
2693 return nativeStackSlotImpl(p);
2697 * Return the offset, from TracerState:sp, for the given jsval. Shorthand for:
2698 * -TreeFragment::nativeStackBase + nativeStackOffset(p).
2700 inline JS_REQUIRES_STACK ptrdiff_t
2701 TraceRecorder::nativespOffsetImpl(const void* p) const
2703 return -tree->nativeStackBase + nativeStackOffsetImpl(p);
2706 inline JS_REQUIRES_STACK ptrdiff_t
2707 TraceRecorder::nativespOffset(const Value* p) const
2709 return nativespOffsetImpl(p);
2712 /* Track the maximum number of native frame slots we need during execution. */
2713 inline void
2714 TraceRecorder::trackNativeStackUse(unsigned slots)
2716 if (slots > tree->maxNativeStackSlots)
2717 tree->maxNativeStackSlots = slots;
2721 * Unbox a jsval into a slot. Slots are wide enough to hold double values
2722 * directly (instead of storing a pointer to them). We assert instead of
2723 * type checking. The caller must ensure the types are compatible.
2725 static inline void
2726 ValueToNative(const Value &v, JSValueType type, double* slot)
2728 JS_ASSERT(type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET);
2729 if (type > JSVAL_UPPER_INCL_TYPE_OF_NUMBER_SET)
2730 v.unboxNonDoubleTo((uint64 *)slot);
2731 else if (type == JSVAL_TYPE_INT32)
2732 *(int32_t *)slot = v.isInt32() ? v.toInt32() : (int32_t)v.toDouble();
2733 else
2734 *(double *)slot = v.toNumber();
2736 #ifdef DEBUG
2737 int32_t _;
2738 switch (type) {
2739 case JSVAL_TYPE_NONFUNOBJ: {
2740 JS_ASSERT(!IsFunctionObject(v));
2741 debug_only_printf(LC_TMTracer,
2742 "object<%p:%s> ", (void*)*(JSObject **)slot,
2743 v.toObject().getClass()->name);
2744 return;
2747 case JSVAL_TYPE_INT32:
2748 JS_ASSERT(v.isInt32() || (v.isDouble() && JSDOUBLE_IS_INT32(v.toDouble(), &_)));
2749 debug_only_printf(LC_TMTracer, "int<%d> ", *(jsint *)slot);
2750 return;
2752 case JSVAL_TYPE_DOUBLE:
2753 JS_ASSERT(v.isNumber());
2754 debug_only_printf(LC_TMTracer, "double<%g> ", *(jsdouble *)slot);
2755 return;
2757 case JSVAL_TYPE_BOXED:
2758 JS_NOT_REACHED("found jsval type in an entry type map");
2759 return;
2761 case JSVAL_TYPE_STRING:
2762 JS_ASSERT(v.isString());
2763 debug_only_printf(LC_TMTracer, "string<%p> ", (void*)*(JSString**)slot);
2764 return;
2766 case JSVAL_TYPE_NULL:
2767 JS_ASSERT(v.isNull());
2768 debug_only_print0(LC_TMTracer, "null ");
2769 return;
2771 case JSVAL_TYPE_BOOLEAN:
2772 JS_ASSERT(v.isBoolean());
2773 debug_only_printf(LC_TMTracer, "special<%d> ", *(JSBool*)slot);
2774 return;
2776 case JSVAL_TYPE_UNDEFINED:
2777 JS_ASSERT(v.isUndefined());
2778 debug_only_print0(LC_TMTracer, "undefined ");
2779 return;
2781 case JSVAL_TYPE_MAGIC:
2782 JS_ASSERT(v.isMagic());
2783 debug_only_print0(LC_TMTracer, "hole ");
2784 return;
2786 case JSVAL_TYPE_FUNOBJ: {
2787 JS_ASSERT(IsFunctionObject(v));
2788 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, &v.toObject());
2789 #if defined JS_JIT_SPEW
2790 if (LogController.lcbits & LC_TMTracer) {
2791 char funName[40];
2792 if (fun->atom)
2793 JS_PutEscapedFlatString(funName, sizeof funName, ATOM_TO_STRING(fun->atom), 0);
2794 else
2795 strcpy(funName, "unnamed");
2796 LogController.printf("function<%p:%s> ", (void*)*(JSObject **)slot, funName);
2798 #endif
2799 return;
2801 default:
2802 JS_NOT_REACHED("unexpected type");
2803 break;
2805 #endif
2808 void
2809 TraceMonitor::flush()
2811 /* flush should only be called after all recorders have been aborted. */
2812 JS_ASSERT(!recorder);
2813 JS_ASSERT(!profile);
2814 AUDIT(cacheFlushed);
2816 // recover profiling data from expiring Fragments
2817 verbose_only(
2818 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
2819 for (TreeFragment *f = vmfragments[i]; f; f = f->next) {
2820 JS_ASSERT(f->root == f);
2821 for (TreeFragment *p = f; p; p = p->peer)
2822 FragProfiling_FragFinalizer(p, this);
2827 verbose_only(
2828 for (Seq<Fragment*>* f = branches; f; f = f->tail)
2829 FragProfiling_FragFinalizer(f->head, this);
2832 flushEpoch++;
2834 #ifdef JS_METHODJIT
2835 if (loopProfiles) {
2836 for (LoopProfileMap::Enum e(*loopProfiles); !e.empty(); e.popFront()) {
2837 jsbytecode *pc = e.front().key;
2838 LoopProfile *prof = e.front().value;
2839 /* This code takes care of resetting all methodjit state. */
2840 js::mjit::ResetTraceHint(prof->entryScript, pc, GET_UINT16(pc), true);
2843 #endif
2845 frameCache->reset();
2846 dataAlloc->reset();
2847 traceAlloc->reset();
2848 codeAlloc->reset();
2849 tempAlloc->reset();
2850 oracle->clear();
2851 loopProfiles->clear();
2853 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
2854 globalStates[i].globalShape = -1;
2855 globalStates[i].globalSlots = new (*dataAlloc) SlotList(dataAlloc);
2858 assembler = new (*dataAlloc) Assembler(*codeAlloc, *dataAlloc, *dataAlloc, core,
2859 &LogController, avmplus::AvmCore::config);
2860 verbose_only( branches = NULL; )
2862 PodArrayZero(vmfragments);
2863 tracedScripts.clear();
2865 needFlush = JS_FALSE;
2868 inline bool
2869 IsShapeAboutToBeFinalized(JSContext *cx, const js::Shape *shape)
2871 JSRuntime *rt = cx->runtime;
2872 if (rt->gcCurrentCompartment != NULL)
2873 return false;
2875 return !shape->marked();
2878 inline bool
2879 HasUnreachableGCThings(JSContext *cx, TreeFragment *f)
2882 * We do not check here for dead scripts as JSScript is not a GC thing.
2883 * Instead PurgeScriptFragments is used to remove dead script fragments.
2884 * See bug 584860.
2886 if (IsAboutToBeFinalized(cx, f->globalObj))
2887 return true;
2888 Value* vp = f->gcthings.data();
2889 for (unsigned len = f->gcthings.length(); len; --len) {
2890 Value &v = *vp++;
2891 JS_ASSERT(v.isMarkable());
2892 if (IsAboutToBeFinalized(cx, v.toGCThing()))
2893 return true;
2895 const Shape** shapep = f->shapes.data();
2896 for (unsigned len = f->shapes.length(); len; --len) {
2897 const Shape* shape = *shapep++;
2898 if (IsShapeAboutToBeFinalized(cx, shape))
2899 return true;
2901 return false;
2904 void
2905 TraceMonitor::sweep(JSContext *cx)
2907 JS_ASSERT(!ontrace());
2908 debug_only_print0(LC_TMTracer, "Purging fragments with dead things");
2910 bool shouldAbortRecording = false;
2911 TreeFragment *recorderTree = NULL;
2912 if (recorder) {
2913 recorderTree = recorder->getTree();
2914 shouldAbortRecording = HasUnreachableGCThings(cx, recorderTree);
2917 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
2918 TreeFragment** fragp = &vmfragments[i];
2919 while (TreeFragment* frag = *fragp) {
2920 TreeFragment* peer = frag;
2921 do {
2922 if (HasUnreachableGCThings(cx, peer))
2923 break;
2924 peer = peer->peer;
2925 } while (peer);
2926 if (peer) {
2927 debug_only_printf(LC_TMTracer,
2928 "TreeFragment peer %p has dead gc thing."
2929 "Disconnecting tree %p with ip %p\n",
2930 (void *) peer, (void *) frag, frag->ip);
2931 JS_ASSERT(frag->root == frag);
2932 *fragp = frag->next;
2933 do {
2934 verbose_only( FragProfiling_FragFinalizer(frag, this); );
2935 if (recorderTree == frag)
2936 shouldAbortRecording = true;
2937 TrashTree(frag);
2938 frag = frag->peer;
2939 } while (frag);
2940 } else {
2941 fragp = &frag->next;
2946 if (shouldAbortRecording)
2947 recorder->finishAbort("dead GC things");
2950 void
2951 TraceMonitor::mark(JSTracer *trc)
2953 TracerState* state = tracerState;
2954 while (state) {
2955 if (state->nativeVp)
2956 MarkValueRange(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
2957 state = state->prev;
2962 * Box a value from the native stack back into the Value format.
2964 static inline void
2965 NativeToValue(JSContext* cx, Value& v, JSValueType type, double* slot)
2967 if (type == JSVAL_TYPE_DOUBLE) {
2968 v.setNumber(*slot);
2969 } else if (JS_LIKELY(type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET)) {
2970 v.boxNonDoubleFrom(type, (uint64 *)slot);
2971 } else if (type == JSVAL_TYPE_STRORNULL) {
2972 JSString *str = *(JSString **)slot;
2973 v = str ? StringValue(str) : NullValue();
2974 } else if (type == JSVAL_TYPE_OBJORNULL) {
2975 JSObject *obj = *(JSObject **)slot;
2976 v = obj ? ObjectValue(*obj) : NullValue();
2977 } else {
2978 JS_ASSERT(type == JSVAL_TYPE_BOXED);
2979 JS_STATIC_ASSERT(sizeof(Value) == sizeof(double));
2980 v = *(Value *)slot;
2983 #ifdef DEBUG
2984 switch (type) {
2985 case JSVAL_TYPE_NONFUNOBJ:
2986 JS_ASSERT(!IsFunctionObject(v));
2987 debug_only_printf(LC_TMTracer,
2988 "object<%p:%s> ",
2989 (void*) &v.toObject(),
2990 v.toObject().getClass()->name);
2991 break;
2992 case JSVAL_TYPE_INT32:
2993 debug_only_printf(LC_TMTracer, "int<%d> ", v.toInt32());
2994 break;
2995 case JSVAL_TYPE_DOUBLE:
2996 debug_only_printf(LC_TMTracer, "double<%g> ", v.toNumber());
2997 break;
2998 case JSVAL_TYPE_STRING:
2999 debug_only_printf(LC_TMTracer, "string<%p> ", (void*)v.toString());
3000 break;
3001 case JSVAL_TYPE_NULL:
3002 JS_ASSERT(v.isNull());
3003 debug_only_print0(LC_TMTracer, "null ");
3004 break;
3005 case JSVAL_TYPE_BOOLEAN:
3006 debug_only_printf(LC_TMTracer, "bool<%d> ", v.toBoolean());
3007 break;
3008 case JSVAL_TYPE_UNDEFINED:
3009 JS_ASSERT(v.isUndefined());
3010 debug_only_print0(LC_TMTracer, "undefined ");
3011 break;
3012 case JSVAL_TYPE_MAGIC:
3013 debug_only_printf(LC_TMTracer, "magic<%d> ", v.whyMagic());
3014 break;
3015 case JSVAL_TYPE_FUNOBJ:
3016 JS_ASSERT(IsFunctionObject(v));
3017 #if defined JS_JIT_SPEW
3018 if (LogController.lcbits & LC_TMTracer) {
3019 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, &v.toObject());
3020 char funName[40];
3021 if (fun->atom)
3022 JS_PutEscapedFlatString(funName, sizeof funName, ATOM_TO_STRING(fun->atom), 0);
3023 else
3024 strcpy(funName, "unnamed");
3025 LogController.printf("function<%p:%s> ", (void*) &v.toObject(), funName);
3027 #endif
3028 break;
3029 case JSVAL_TYPE_STRORNULL:
3030 debug_only_printf(LC_TMTracer, "nullablestr<%p> ", v.isNull() ? NULL : (void *)v.toString());
3031 break;
3032 case JSVAL_TYPE_OBJORNULL:
3033 debug_only_printf(LC_TMTracer, "nullablestr<%p> ", v.isNull() ? NULL : (void *)&v.toObject());
3034 break;
3035 case JSVAL_TYPE_BOXED:
3036 debug_only_printf(LC_TMTracer, "box<%llx> ", (long long unsigned int)v.asRawBits());
3037 break;
3038 default:
3039 JS_NOT_REACHED("unexpected type");
3040 break;
3042 #endif
3045 void
3046 ExternNativeToValue(JSContext* cx, Value& v, JSValueType type, double* slot)
3048 return NativeToValue(cx, v, type, slot);
3051 class BuildNativeFrameVisitor : public SlotVisitorBase
3053 JSContext *mCx;
3054 JSValueType *mTypeMap;
3055 double *mGlobal;
3056 double *mStack;
3057 public:
3058 BuildNativeFrameVisitor(JSContext *cx,
3059 JSValueType *typemap,
3060 double *global,
3061 double *stack) :
3062 mCx(cx),
3063 mTypeMap(typemap),
3064 mGlobal(global),
3065 mStack(stack)
3068 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
3069 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
3070 debug_only_printf(LC_TMTracer, "global%d: ", n);
3071 ValueToNative(*vp, *mTypeMap++, &mGlobal[slot]);
3074 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3075 visitStackSlots(Value *vp, int count, JSStackFrame* fp) {
3076 for (int i = 0; i < count; ++i) {
3077 debug_only_printf(LC_TMTracer, "%s%d: ", stackSlotKind(), i);
3078 ValueToNative(*vp++, *mTypeMap++, mStack++);
3080 return true;
3083 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3084 visitFrameObjPtr(void* p, JSStackFrame* fp) {
3085 debug_only_printf(LC_TMTracer, "%s%d: ", stackSlotKind(), 0);
3086 if (p == fp->addressOfScopeChain())
3087 *(JSObject **)mStack = &fp->scopeChain();
3088 else
3089 *(JSObject **)mStack = fp->hasArgsObj() ? &fp->argsObj() : NULL;
3090 #ifdef DEBUG
3091 if (*mTypeMap == JSVAL_TYPE_NULL) {
3092 JS_ASSERT(*(JSObject **)mStack == NULL);
3093 debug_only_print0(LC_TMTracer, "null ");
3094 } else {
3095 JS_ASSERT(*mTypeMap == JSVAL_TYPE_NONFUNOBJ);
3096 JS_ASSERT(!(*(JSObject **)p)->isFunction());
3097 debug_only_printf(LC_TMTracer,
3098 "object<%p:%s> ", *(void **)p,
3099 (*(JSObject **)p)->getClass()->name);
3101 #endif
3102 mTypeMap++;
3103 mStack++;
3104 return true;
3108 static JS_REQUIRES_STACK void
3109 BuildNativeFrame(JSContext *cx, JSObject *globalObj, unsigned callDepth,
3110 unsigned ngslots, uint16 *gslots,
3111 JSValueType *typeMap, double *global, double *stack)
3113 BuildNativeFrameVisitor visitor(cx, typeMap, global, stack);
3114 VisitSlots(visitor, cx, globalObj, callDepth, ngslots, gslots);
3115 debug_only_print0(LC_TMTracer, "\n");
3118 class FlushNativeGlobalFrameVisitor : public SlotVisitorBase
3120 JSContext *mCx;
3121 JSValueType *mTypeMap;
3122 double *mGlobal;
3123 public:
3124 FlushNativeGlobalFrameVisitor(JSContext *cx,
3125 JSValueType *typeMap,
3126 double *global) :
3127 mCx(cx),
3128 mTypeMap(typeMap),
3129 mGlobal(global)
3132 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
3133 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
3134 debug_only_printf(LC_TMTracer, "global%d=", n);
3135 JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
3136 NativeToValue(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
3140 class FlushNativeStackFrameVisitor : public SlotVisitorBase
3142 JSContext *mCx;
3143 const JSValueType *mInitTypeMap;
3144 const JSValueType *mTypeMap;
3145 double *mStack;
3146 public:
3147 FlushNativeStackFrameVisitor(JSContext *cx,
3148 const JSValueType *typeMap,
3149 double *stack) :
3150 mCx(cx),
3151 mInitTypeMap(typeMap),
3152 mTypeMap(typeMap),
3153 mStack(stack)
3156 const JSValueType* getTypeMap()
3158 return mTypeMap;
3161 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3162 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
3163 JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
3164 for (size_t i = 0; i < count; ++i) {
3165 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i));
3166 NativeToValue(mCx, *vp, *mTypeMap, mStack);
3167 vp++;
3168 mTypeMap++;
3169 mStack++;
3171 return true;
3174 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3175 visitFrameObjPtr(void* p, JSStackFrame* fp) {
3176 JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
3177 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), 0);
3178 JSObject *frameobj = *(JSObject **)mStack;
3179 JS_ASSERT((frameobj == NULL) == (*mTypeMap == JSVAL_TYPE_NULL));
3180 if (p == fp->addressOfArgs()) {
3181 if (frameobj) {
3182 JS_ASSERT_IF(fp->hasArgsObj(), frameobj == &fp->argsObj());
3183 fp->setArgsObj(*frameobj);
3184 JS_ASSERT(frameobj->isArguments());
3185 if (frameobj->isNormalArguments())
3186 frameobj->setPrivate(fp);
3187 else
3188 JS_ASSERT(!frameobj->getPrivate());
3189 debug_only_printf(LC_TMTracer,
3190 "argsobj<%p> ",
3191 (void *)frameobj);
3192 } else {
3193 JS_ASSERT(!fp->hasArgsObj());
3194 debug_only_print0(LC_TMTracer,
3195 "argsobj<null> ");
3197 /* else, SynthesizeFrame has initialized fp->args.nactual */
3198 } else {
3199 JS_ASSERT(p == fp->addressOfScopeChain());
3200 if (frameobj->isCall() &&
3201 !frameobj->getPrivate() &&
3202 fp->maybeCallee() == frameobj->getCallObjCallee())
3204 JS_ASSERT(&fp->scopeChain() == JSStackFrame::sInvalidScopeChain);
3205 frameobj->setPrivate(fp);
3206 fp->setScopeChainAndCallObj(*frameobj);
3207 } else {
3208 fp->setScopeChainNoCallObj(*frameobj);
3210 debug_only_printf(LC_TMTracer,
3211 "scopechain<%p> ",
3212 (void *)frameobj);
3214 #ifdef DEBUG
3215 JSValueType type = *mTypeMap;
3216 if (type == JSVAL_TYPE_NULL) {
3217 debug_only_print0(LC_TMTracer, "null ");
3218 } else {
3219 JS_ASSERT(type == JSVAL_TYPE_NONFUNOBJ);
3220 JS_ASSERT(!frameobj->isFunction());
3221 debug_only_printf(LC_TMTracer,
3222 "object<%p:%s> ",
3223 *(void **)p,
3224 frameobj->getClass()->name);
3226 #endif
3227 mTypeMap++;
3228 mStack++;
3229 return true;
3233 /* Box the given native frame into a JS frame. This is infallible. */
3234 static JS_REQUIRES_STACK void
3235 FlushNativeGlobalFrame(JSContext *cx, JSObject *globalObj, double *global, unsigned ngslots,
3236 uint16 *gslots, JSValueType *typemap)
3238 FlushNativeGlobalFrameVisitor visitor(cx, typemap, global);
3239 VisitGlobalSlots(visitor, cx, globalObj, ngslots, gslots);
3240 debug_only_print0(LC_TMTracer, "\n");
3244 * Returns the number of values on the native stack, excluding the innermost
3245 * frame. This walks all FrameInfos on the native frame stack and sums the
3246 * slot usage of each frame.
3248 static int32
3249 StackDepthFromCallStack(TracerState* state, uint32 callDepth)
3251 int32 nativeStackFramePos = 0;
3253 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
3254 for (FrameInfo** fip = state->callstackBase; fip < state->rp + callDepth; fip++)
3255 nativeStackFramePos += (*fip)->callerHeight;
3256 return nativeStackFramePos;
3260 * Generic function to read upvars on trace from slots of active frames.
3261 * T Traits type parameter. Must provide static functions:
3262 * interp_get(fp, slot) Read the value out of an interpreter frame.
3263 * native_slot(argc, slot) Return the position of the desired value in the on-trace
3264 * stack frame (with position 0 being callee).
3266 * upvarLevel Static level of the function containing the upvar definition
3267 * slot Identifies the value to get. The meaning is defined by the traits type.
3268 * callDepth Call depth of current point relative to trace entry
3270 template<typename T>
3271 inline JSValueType
3272 GetUpvarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
3274 TracerState* state = JS_TRACE_MONITOR_ON_TRACE(cx)->tracerState;
3275 FrameInfo** fip = state->rp + callDepth;
3278 * First search the FrameInfo call stack for an entry containing our
3279 * upvar, namely one with level == upvarLevel. The first FrameInfo is a
3280 * transition from the entry frame to some callee. However, it is not
3281 * known (from looking at the FrameInfo) whether the entry frame had a
3282 * callee. Rather than special-case this or insert more logic into the
3283 * loop, instead just stop before that FrameInfo (i.e. |> base| instead of
3284 * |>= base|), and let the code after the loop handle it.
3286 int32 stackOffset = StackDepthFromCallStack(state, callDepth);
3287 while (--fip > state->callstackBase) {
3288 FrameInfo* fi = *fip;
3291 * The loop starts aligned to the top of the stack, so move down to the first meaningful
3292 * callee. Then read the callee directly from the frame.
3294 stackOffset -= fi->callerHeight;
3295 JSObject* callee = *(JSObject**)(&state->stackBase[stackOffset]);
3296 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, callee);
3297 uintN calleeLevel = fun->u.i.script->staticLevel;
3298 if (calleeLevel == upvarLevel) {
3300 * Now find the upvar's value in the native stack. stackOffset is
3301 * the offset of the start of the activation record corresponding
3302 * to *fip in the native stack.
3304 uint32 native_slot = T::native_slot(fi->callerArgc, slot);
3305 *result = state->stackBase[stackOffset + native_slot];
3306 return fi->get_typemap()[native_slot];
3310 // Next search the trace entry frame, which is not in the FrameInfo stack.
3311 if (state->outermostTree->script->staticLevel == upvarLevel) {
3312 uint32 argc = state->outermostTree->argc;
3313 uint32 native_slot = T::native_slot(argc, slot);
3314 *result = state->stackBase[native_slot];
3315 return state->callstackBase[0]->get_typemap()[native_slot];
3319 * If we did not find the upvar in the frames for the active traces,
3320 * then we simply get the value from the interpreter state.
3322 JS_ASSERT(upvarLevel < UpvarCookie::UPVAR_LEVEL_LIMIT);
3323 JSStackFrame* fp = cx->findFrameAtLevel(upvarLevel);
3324 Value v = T::interp_get(fp, slot);
3325 JSValueType type = getCoercedType(v);
3326 ValueToNative(v, type, result);
3327 return type;
3330 // For this traits type, 'slot' is the argument index, which may be -2 for callee.
3331 struct UpvarArgTraits {
3332 static Value interp_get(JSStackFrame* fp, int32 slot) {
3333 return fp->formalArg(slot);
3336 static uint32 native_slot(uint32 argc, int32 slot) {
3337 return 2 /*callee,this*/ + slot;
3341 uint32 JS_FASTCALL
3342 GetUpvarArgOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
3344 return GetUpvarOnTrace<UpvarArgTraits>(cx, upvarLevel, slot, callDepth, result);
3347 // For this traits type, 'slot' is an index into the local slots array.
3348 struct UpvarVarTraits {
3349 static Value interp_get(JSStackFrame* fp, int32 slot) {
3350 return fp->slots()[slot];
3353 static uint32 native_slot(uint32 argc, int32 slot) {
3354 return 4 /*callee,this,arguments,scopeChain*/ + argc + slot;
3358 uint32 JS_FASTCALL
3359 GetUpvarVarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
3361 return GetUpvarOnTrace<UpvarVarTraits>(cx, upvarLevel, slot, callDepth, result);
3365 * For this traits type, 'slot' is an index into the stack area (within slots,
3366 * after nfixed) of a frame with no function. (On trace, the top-level frame is
3367 * the only one that can have no function.)
3369 struct UpvarStackTraits {
3370 static Value interp_get(JSStackFrame* fp, int32 slot) {
3371 return fp->slots()[slot + fp->numFixed()];
3374 static uint32 native_slot(uint32 argc, int32 slot) {
3376 * Locals are not imported by the tracer when the frame has no
3377 * function, so we do not add fp->getFixedCount().
3379 JS_ASSERT(argc == 0);
3380 return slot;
3384 uint32 JS_FASTCALL
3385 GetUpvarStackOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth,
3386 double* result)
3388 return GetUpvarOnTrace<UpvarStackTraits>(cx, upvarLevel, slot, callDepth, result);
3391 // Parameters needed to access a value from a closure on trace.
3392 struct ClosureVarInfo
3394 uint32 slot;
3395 #ifdef DEBUG
3396 uint32 callDepth;
3397 #endif
3401 * Generic function to read upvars from Call objects of active heavyweight functions.
3402 * call Callee Function object in which the upvar is accessed.
3404 template<typename T>
3405 inline uint32
3406 GetFromClosure(JSContext* cx, JSObject* call, const ClosureVarInfo* cv, double* result)
3408 JS_ASSERT(call->isCall());
3410 #ifdef DEBUG
3411 TracerState* state = JS_TRACE_MONITOR_ON_TRACE(cx)->tracerState;
3412 FrameInfo** fip = state->rp + cv->callDepth;
3413 int32 stackOffset = StackDepthFromCallStack(state, cv->callDepth);
3414 while (--fip > state->callstackBase) {
3415 FrameInfo* fi = *fip;
3418 * The loop starts aligned to the top of the stack, so move down to the first meaningful
3419 * callee. Then read the callee directly from the frame.
3421 stackOffset -= fi->callerHeight;
3422 JSObject* callee = *(JSObject**)(&state->stackBase[stackOffset]);
3423 if (callee == call) {
3424 // This is not reachable as long as the tracer guards on the identity of the callee's
3425 // parent when making a call:
3427 // - We can only reach this point if we execute JSOP_LAMBDA on trace, then call the
3428 // function created by the lambda, and then execute a JSOP_NAME on trace.
3429 // - Each time we execute JSOP_LAMBDA we get a function with a different parent.
3430 // - When we execute the call to the new function, we exit trace because the parent
3431 // is different.
3432 JS_NOT_REACHED("JSOP_NAME variable found in outer trace");
3435 #endif
3437 // We already guarded on trace that we aren't touching an outer tree's entry frame
3438 VOUCH_DOES_NOT_REQUIRE_STACK();
3439 JSStackFrame* fp = (JSStackFrame*) call->getPrivate();
3440 JS_ASSERT(fp != cx->fp());
3442 Value v;
3443 if (fp) {
3444 v = T::get_slot(fp, cv->slot);
3445 } else {
3447 * Get the value from the object. We know we have a Call object, and
3448 * that our slot index is fine, so don't monkey around with calling the
3449 * property getter (which just looks in the slot) or calling
3450 * js_GetReservedSlot. Just get the slot directly. Note the static
3451 * asserts in jsfun.cpp which make sure Call objects use slots.
3453 JS_ASSERT(cv->slot < T::slot_count(call));
3454 v = T::get_slot(call, cv->slot);
3456 JSValueType type = getCoercedType(v);
3457 ValueToNative(v, type, result);
3458 return type;
3461 struct ArgClosureTraits
3463 // Get the right frame slots to use our slot index with.
3464 // See also UpvarArgTraits.
3465 static inline Value get_slot(JSStackFrame* fp, unsigned slot) {
3466 JS_ASSERT(slot < fp->numFormalArgs());
3467 return fp->formalArg(slot);
3470 // Get the right object slots to use our slot index with.
3471 static inline Value get_slot(JSObject* obj, unsigned slot) {
3472 return obj->getSlot(slot_offset(obj) + slot);
3475 // Get the offset of our object slots from the object's slots pointer.
3476 static inline uint32 slot_offset(JSObject* obj) {
3477 return JSObject::CALL_RESERVED_SLOTS;
3480 // Get the maximum slot index of this type that should be allowed
3481 static inline uint16 slot_count(JSObject* obj) {
3482 return obj->getCallObjCalleeFunction()->nargs;
3485 private:
3486 ArgClosureTraits();
3489 uint32 JS_FASTCALL
3490 GetClosureArg(JSContext* cx, JSObject* callee, const ClosureVarInfo* cv, double* result)
3492 return GetFromClosure<ArgClosureTraits>(cx, callee, cv, result);
3495 struct VarClosureTraits
3497 // See also UpvarVarTraits.
3498 static inline Value get_slot(JSStackFrame* fp, unsigned slot) {
3499 JS_ASSERT(slot < fp->fun()->script()->bindings.countVars());
3500 return fp->slots()[slot];
3503 static inline Value get_slot(JSObject* obj, unsigned slot) {
3504 return obj->getSlot(slot_offset(obj) + slot);
3507 static inline uint32 slot_offset(JSObject* obj) {
3508 return JSObject::CALL_RESERVED_SLOTS +
3509 obj->getCallObjCalleeFunction()->nargs;
3512 static inline uint16 slot_count(JSObject* obj) {
3513 return obj->getCallObjCalleeFunction()->script()->bindings.countVars();
3516 private:
3517 VarClosureTraits();
3520 uint32 JS_FASTCALL
3521 GetClosureVar(JSContext* cx, JSObject* callee, const ClosureVarInfo* cv, double* result)
3523 return GetFromClosure<VarClosureTraits>(cx, callee, cv, result);
3527 * Box the given native stack frame into the virtual machine stack. This
3528 * is infallible.
3530 * @param callDepth the distance between the entry frame into our trace and
3531 * cx->fp() when we make this call. If this is not called as a
3532 * result of a nested exit, callDepth is 0.
3533 * @param mp an array of JSValueType that indicate what the types of the things
3534 * on the stack are.
3535 * @param np pointer to the native stack. We want to copy values from here to
3536 * the JS stack as needed.
3537 * @return the number of things we popped off of np.
3539 static JS_REQUIRES_STACK int
3540 FlushNativeStackFrame(JSContext* cx, unsigned callDepth, const JSValueType* mp, double* np)
3542 /* Root all string and object references first (we don't need to call the GC for this). */
3543 FlushNativeStackFrameVisitor visitor(cx, mp, np);
3544 VisitStackSlots(visitor, cx, callDepth);
3546 debug_only_print0(LC_TMTracer, "\n");
3547 return visitor.getTypeMap() - mp;
3550 /* Emit load instructions onto the trace that read the initial stack state. */
3551 JS_REQUIRES_STACK void
3552 TraceRecorder::importImpl(Address addr, const void* p, JSValueType t,
3553 const char *prefix, uintN index, JSStackFrame *fp)
3555 LIns* ins;
3556 if (t == JSVAL_TYPE_INT32) { /* demoted */
3557 JS_ASSERT(hasInt32Repr(*(const Value *)p));
3560 * Ok, we have a valid demotion attempt pending, so insert an integer
3561 * read and promote it to double since all arithmetic operations expect
3562 * to see doubles on entry. The first op to use this slot will emit a
3563 * d2i cast which will cancel out the i2d we insert here.
3565 ins = w.ldi(addr);
3566 ins = w.i2d(ins);
3567 } else {
3568 JS_ASSERT_IF(t != JSVAL_TYPE_BOXED && !isFrameObjPtrTraceType(t),
3569 ((const Value *)p)->isNumber() == (t == JSVAL_TYPE_DOUBLE));
3570 if (t == JSVAL_TYPE_DOUBLE) {
3571 ins = w.ldd(addr);
3572 } else if (t == JSVAL_TYPE_BOOLEAN) {
3573 ins = w.ldi(addr);
3574 } else if (t == JSVAL_TYPE_UNDEFINED) {
3575 ins = w.immiUndefined();
3576 } else if (t == JSVAL_TYPE_MAGIC) {
3577 ins = w.ldi(addr);
3578 } else {
3579 ins = w.ldp(addr);
3582 checkForGlobalObjectReallocation();
3583 tracker.set(p, ins);
3585 #ifdef DEBUG
3586 char name[64];
3587 JS_ASSERT(strlen(prefix) < 11);
3588 void* mark = NULL;
3589 jsuword* localNames = NULL;
3590 const char* funName = NULL;
3591 JSAutoByteString funNameBytes;
3592 if (*prefix == 'a' || *prefix == 'v') {
3593 mark = JS_ARENA_MARK(&cx->tempPool);
3594 JSFunction *fun = fp->fun();
3595 Bindings &bindings = fun->script()->bindings;
3596 if (bindings.hasLocalNames())
3597 localNames = bindings.getLocalNameArray(cx, &cx->tempPool);
3598 funName = fun->atom
3599 ? js_AtomToPrintableString(cx, fun->atom, &funNameBytes)
3600 : "<anonymous>";
3602 if (!strcmp(prefix, "argv")) {
3603 if (index < fp->numFormalArgs()) {
3604 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[index]);
3605 JSAutoByteString atomBytes;
3606 JS_snprintf(name, sizeof name, "$%s.%s", funName,
3607 js_AtomToPrintableString(cx, atom, &atomBytes));
3608 } else {
3609 JS_snprintf(name, sizeof name, "$%s.<arg%d>", funName, index);
3611 } else if (!strcmp(prefix, "vars")) {
3612 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[fp->numFormalArgs() + index]);
3613 JSAutoByteString atomBytes;
3614 JS_snprintf(name, sizeof name, "$%s.%s", funName,
3615 js_AtomToPrintableString(cx, atom, &atomBytes));
3616 } else {
3617 JS_snprintf(name, sizeof name, "$%s%d", prefix, index);
3620 if (mark)
3621 JS_ARENA_RELEASE(&cx->tempPool, mark);
3622 w.name(ins, name);
3624 debug_only_printf(LC_TMTracer, "import vp=%p name=%s type=%c\n",
3625 p, name, TypeToChar(t));
3626 #endif
3629 JS_REQUIRES_STACK void
3630 TraceRecorder::import(Address addr, const Value* p, JSValueType t,
3631 const char *prefix, uintN index, JSStackFrame *fp)
3633 return importImpl(addr, p, t, prefix, index, fp);
3636 class ImportBoxedStackSlotVisitor : public SlotVisitorBase
3638 TraceRecorder &mRecorder;
3639 LIns *mBase;
3640 ptrdiff_t mStackOffset;
3641 JSValueType *mTypemap;
3642 JSStackFrame *mFp;
3643 public:
3644 ImportBoxedStackSlotVisitor(TraceRecorder &recorder,
3645 LIns *base,
3646 ptrdiff_t stackOffset,
3647 JSValueType *typemap) :
3648 mRecorder(recorder),
3649 mBase(base),
3650 mStackOffset(stackOffset),
3651 mTypemap(typemap)
3654 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3655 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
3656 for (size_t i = 0; i < count; ++i) {
3657 if (*mTypemap == JSVAL_TYPE_BOXED) {
3658 mRecorder.import(StackAddress(mBase, mStackOffset), vp, JSVAL_TYPE_BOXED,
3659 "jsval", i, fp);
3660 LIns *vp_ins = mRecorder.unbox_value(*vp,
3661 StackAddress(mBase, mStackOffset),
3662 mRecorder.copy(mRecorder.anchor));
3663 mRecorder.set(vp, vp_ins);
3665 vp++;
3666 mTypemap++;
3667 mStackOffset += sizeof(double);
3669 return true;
3672 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3673 visitFrameObjPtr(void* p, JSStackFrame *fp) {
3674 JS_ASSERT(*mTypemap != JSVAL_TYPE_BOXED);
3675 mTypemap++;
3676 mStackOffset += sizeof(double);
3677 return true;
3681 JS_REQUIRES_STACK void
3682 TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigned ngslots,
3683 unsigned callDepth, JSValueType* typeMap)
3686 * If we get a partial list that doesn't have all the types (i.e. recording
3687 * from a side exit that was recorded but we added more global slots
3688 * later), merge the missing types from the entry type map. This is safe
3689 * because at the loop edge we verify that we have compatible types for all
3690 * globals (entry type and loop edge type match). While a different trace
3691 * of the tree might have had a guard with a different type map for these
3692 * slots we just filled in here (the guard we continue from didn't know
3693 * about them), since we didn't take that particular guard the only way we
3694 * could have ended up here is if that other trace had at its end a
3695 * compatible type distribution with the entry map. Since that's exactly
3696 * what we used to fill in the types our current side exit didn't provide,
3697 * this is always safe to do.
3700 JSValueType* globalTypeMap = typeMap + stackSlots;
3701 unsigned length = tree->nGlobalTypes();
3704 * This is potentially the typemap of the side exit and thus shorter than
3705 * the tree's global type map.
3707 if (ngslots < length) {
3708 MergeTypeMaps(&globalTypeMap /* out param */, &ngslots /* out param */,
3709 tree->globalTypeMap(), length,
3710 (JSValueType*)alloca(sizeof(JSValueType) * length));
3712 JS_ASSERT(ngslots == tree->nGlobalTypes());
3715 * Check whether there are any values on the stack we have to unbox and do
3716 * that first before we waste any time fetching the state from the stack.
3718 ImportBoxedStackSlotVisitor boxedStackVisitor(*this, sp, -tree->nativeStackBase, typeMap);
3719 VisitStackSlots(boxedStackVisitor, cx, callDepth);
3722 * Remember the import type map so we can lazily import later whatever
3723 * we need.
3725 importTypeMap.set(importStackSlots = stackSlots,
3726 importGlobalSlots = ngslots,
3727 typeMap, globalTypeMap);
3730 JS_REQUIRES_STACK bool
3731 TraceRecorder::isValidSlot(JSObject *obj, const Shape* shape)
3733 uint32 setflags = (js_CodeSpec[*cx->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
3735 if (setflags) {
3736 if (!shape->hasDefaultSetter())
3737 RETURN_VALUE("non-stub setter", false);
3738 if (!shape->writable())
3739 RETURN_VALUE("writing to a read-only property", false);
3742 /* This check applies even when setflags == 0. */
3743 if (setflags != JOF_SET && !shape->hasDefaultGetter()) {
3744 JS_ASSERT(!shape->isMethod());
3745 RETURN_VALUE("non-stub getter", false);
3748 if (!obj->containsSlot(shape->slot))
3749 RETURN_VALUE("invalid-slot obj property", false);
3751 return true;
3754 /* Lazily import a global slot if we don't already have it in the tracker. */
3755 JS_REQUIRES_STACK void
3756 TraceRecorder::importGlobalSlot(unsigned slot)
3758 JS_ASSERT(slot == uint16(slot));
3759 JS_ASSERT(globalObj->numSlots() <= MAX_GLOBAL_SLOTS);
3761 Value* vp = &globalObj->getSlotRef(slot);
3762 JS_ASSERT(!known(vp));
3764 /* Add the slot to the list of interned global slots. */
3765 JSValueType type;
3766 int index = tree->globalSlots->offsetOf(uint16(slot));
3767 if (index == -1) {
3768 type = getCoercedType(*vp);
3769 if (type == JSVAL_TYPE_INT32 && (!oracle || oracle->isGlobalSlotUndemotable(cx, slot)))
3770 type = JSVAL_TYPE_DOUBLE;
3771 index = (int)tree->globalSlots->length();
3772 tree->globalSlots->add(uint16(slot));
3773 tree->typeMap.add(type);
3774 SpecializeTreesToMissingGlobals(cx, globalObj, tree);
3775 JS_ASSERT(tree->nGlobalTypes() == tree->globalSlots->length());
3776 } else {
3777 type = importTypeMap[importStackSlots + index];
3779 import(EosAddress(eos_ins, slot * sizeof(double)), vp, type, "global", index, NULL);
3782 /* Lazily import a global slot if we don't already have it in the tracker. */
3783 JS_REQUIRES_STACK bool
3784 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
3786 if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
3787 return false;
3789 * If the global object grows too large, alloca in ExecuteTree might fail,
3790 * so abort tracing on global objects with unreasonably many slots.
3792 if (globalObj->numSlots() > MAX_GLOBAL_SLOTS)
3793 return false;
3794 Value* vp = &globalObj->getSlotRef(slot);
3795 if (known(vp))
3796 return true; /* we already have it */
3797 importGlobalSlot(slot);
3798 return true;
3801 /* Write back a value onto the stack or global frames. */
3802 LIns*
3803 TraceRecorder::writeBack(LIns* ins, LIns* base, ptrdiff_t offset, bool shouldDemoteToInt32)
3806 * Sink all type casts targeting the stack into the side exit by simply storing the original
3807 * (uncasted) value. Each guard generates the side exit map based on the types of the
3808 * last stores to every stack location, so it's safe to not perform them on-trace.
3810 JS_ASSERT(base == lirbuf->sp || base == eos_ins);
3811 if (shouldDemoteToInt32 && IsPromotedInt32(ins))
3812 ins = w.demoteToInt32(ins);
3814 Address addr;
3815 if (base == lirbuf->sp) {
3816 addr = StackAddress(base, offset);
3817 } else {
3818 addr = EosAddress(base, offset);
3819 unsigned slot = unsigned(offset / sizeof(double));
3820 (void)pendingGlobalSlotsToSet.append(slot); /* OOM is safe. */
3822 return w.st(ins, addr);
3825 /* Update the tracker, then issue a write back store. */
3826 JS_REQUIRES_STACK void
3827 TraceRecorder::setImpl(void* p, LIns* i, bool shouldDemoteToInt32)
3829 JS_ASSERT(i != NULL);
3830 checkForGlobalObjectReallocation();
3831 tracker.set(p, i);
3834 * If we are writing to this location for the first time, calculate the
3835 * offset into the native frame manually. Otherwise just look up the last
3836 * load or store associated with the same source address (p) and use the
3837 * same offset/base.
3839 LIns* x = nativeFrameTracker.get(p);
3840 if (!x) {
3841 if (isVoidPtrGlobal(p))
3842 x = writeBack(i, eos_ins, nativeGlobalOffset((Value *)p), shouldDemoteToInt32);
3843 else
3844 x = writeBack(i, lirbuf->sp, nativespOffsetImpl(p), shouldDemoteToInt32);
3845 nativeFrameTracker.set(p, x);
3846 } else {
3847 #if defined NANOJIT_64BIT
3848 JS_ASSERT( x->isop(LIR_stq) || x->isop(LIR_sti) || x->isop(LIR_std));
3849 #else
3850 JS_ASSERT( x->isop(LIR_sti) || x->isop(LIR_std));
3851 #endif
3853 ptrdiff_t disp;
3854 LIns *base = x->oprnd2();
3855 if (base->isop(LIR_addp) && base->oprnd2()->isImmP()) {
3856 disp = ptrdiff_t(base->oprnd2()->immP());
3857 base = base->oprnd1();
3858 } else {
3859 disp = x->disp();
3862 JS_ASSERT(base == lirbuf->sp || base == eos_ins);
3863 JS_ASSERT(disp == ((base == lirbuf->sp)
3864 ? nativespOffsetImpl(p)
3865 : nativeGlobalOffset((Value *)p)));
3867 writeBack(i, base, disp, shouldDemoteToInt32);
3871 JS_REQUIRES_STACK inline void
3872 TraceRecorder::set(Value* p, LIns* i, bool shouldDemoteToInt32)
3874 return setImpl(p, i, shouldDemoteToInt32);
3877 JS_REQUIRES_STACK void
3878 TraceRecorder::setFrameObjPtr(void* p, LIns* i, bool shouldDemoteToInt32)
3880 JS_ASSERT(isValidFrameObjPtr(p));
3881 return setImpl(p, i, shouldDemoteToInt32);
3884 JS_REQUIRES_STACK LIns*
3885 TraceRecorder::attemptImport(const Value* p)
3887 if (LIns* i = getFromTracker(p))
3888 return i;
3890 /* If the variable was not known, it could require a lazy import. */
3891 CountSlotsVisitor countVisitor(p);
3892 VisitStackSlots(countVisitor, cx, callDepth);
3894 if (countVisitor.stopped() || size_t(p - cx->fp()->slots()) < cx->fp()->numSlots())
3895 return get(p);
3897 return NULL;
3900 inline nanojit::LIns*
3901 TraceRecorder::getFromTrackerImpl(const void* p)
3903 checkForGlobalObjectReallocation();
3904 return tracker.get(p);
3907 inline nanojit::LIns*
3908 TraceRecorder::getFromTracker(const Value* p)
3910 return getFromTrackerImpl(p);
3913 JS_REQUIRES_STACK LIns*
3914 TraceRecorder::getImpl(const void *p)
3916 LIns* x = getFromTrackerImpl(p);
3917 if (x)
3918 return x;
3919 if (isVoidPtrGlobal(p)) {
3920 unsigned slot = nativeGlobalSlot((const Value *)p);
3921 JS_ASSERT(tree->globalSlots->offsetOf(uint16(slot)) != -1);
3922 importGlobalSlot(slot);
3923 } else {
3924 unsigned slot = nativeStackSlotImpl(p);
3925 JSValueType type = importTypeMap[slot];
3926 importImpl(StackAddress(lirbuf->sp, -tree->nativeStackBase + slot * sizeof(jsdouble)),
3927 p, type, "stack", slot, cx->fp());
3929 JS_ASSERT(knownImpl(p));
3930 return tracker.get(p);
3933 JS_REQUIRES_STACK LIns*
3934 TraceRecorder::get(const Value *p)
3936 return getImpl(p);
3939 #ifdef DEBUG
3940 bool
3941 TraceRecorder::isValidFrameObjPtr(void *p)
3943 JSStackFrame *fp = cx->fp();
3944 for (; fp; fp = fp->prev()) {
3945 if (fp->addressOfScopeChain() == p || fp->addressOfArgs() == p)
3946 return true;
3948 return false;
3950 #endif
3952 JS_REQUIRES_STACK LIns*
3953 TraceRecorder::getFrameObjPtr(void *p)
3955 JS_ASSERT(isValidFrameObjPtr(p));
3956 return getImpl(p);
3959 JS_REQUIRES_STACK LIns*
3960 TraceRecorder::addr(Value* p)
3962 return isGlobal(p)
3963 ? w.addp(eos_ins, w.nameImmw(nativeGlobalOffset(p)))
3964 : w.addp(lirbuf->sp, w.nameImmw(nativespOffset(p)));
3967 JS_REQUIRES_STACK inline bool
3968 TraceRecorder::knownImpl(const void* p)
3970 checkForGlobalObjectReallocation();
3971 return tracker.has(p);
3974 JS_REQUIRES_STACK inline bool
3975 TraceRecorder::known(const Value* vp)
3977 return knownImpl(vp);
3980 JS_REQUIRES_STACK inline bool
3981 TraceRecorder::known(JSObject** p)
3983 return knownImpl(p);
3987 * The slots of the global object are sometimes reallocated by the interpreter.
3988 * This function check for that condition and re-maps the entries of the tracker
3989 * accordingly.
3991 JS_REQUIRES_STACK void
3992 TraceRecorder::checkForGlobalObjectReallocationHelper()
3994 debug_only_print0(LC_TMTracer, "globalObj->slots relocated, updating tracker\n");
3995 Value* src = global_slots;
3996 Value* dst = globalObj->getSlots();
3997 jsuint length = globalObj->capacity;
3998 LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
3999 for (jsuint n = 0; n < length; ++n) {
4000 map[n] = tracker.get(src);
4001 tracker.set(src++, NULL);
4003 for (jsuint n = 0; n < length; ++n)
4004 tracker.set(dst++, map[n]);
4005 global_slots = globalObj->getSlots();
4008 /* Determine whether the current branch is a loop edge (taken or not taken). */
4009 static JS_REQUIRES_STACK bool
4010 IsLoopEdge(jsbytecode* pc, jsbytecode* header)
4012 switch (*pc) {
4013 case JSOP_IFEQ:
4014 case JSOP_IFNE:
4015 return ((pc + GET_JUMP_OFFSET(pc)) == header);
4016 case JSOP_IFEQX:
4017 case JSOP_IFNEX:
4018 return ((pc + GET_JUMPX_OFFSET(pc)) == header);
4019 default:
4020 JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) ||
4021 (*pc == JSOP_OR) || (*pc == JSOP_ORX));
4023 return false;
4026 class AdjustCallerGlobalTypesVisitor : public SlotVisitorBase
4028 TraceRecorder &mRecorder;
4029 JSContext *mCx;
4030 nanojit::LirBuffer *mLirbuf;
4031 JSValueType *mTypeMap;
4032 public:
4033 AdjustCallerGlobalTypesVisitor(TraceRecorder &recorder,
4034 JSValueType *typeMap) :
4035 mRecorder(recorder),
4036 mCx(mRecorder.cx),
4037 mLirbuf(mRecorder.lirbuf),
4038 mTypeMap(typeMap)
4041 JSValueType* getTypeMap()
4043 return mTypeMap;
4046 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4047 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
4048 LIns *ins = mRecorder.get(vp);
4049 bool isPromote = IsPromotedInt32(ins);
4050 if (isPromote && *mTypeMap == JSVAL_TYPE_DOUBLE) {
4051 mRecorder.w.st(mRecorder.get(vp),
4052 EosAddress(mRecorder.eos_ins, mRecorder.nativeGlobalOffset(vp)));
4054 * Aggressively undo speculation so the inner tree will compile
4055 * if this fails.
4057 mRecorder.traceMonitor->oracle->markGlobalSlotUndemotable(mCx, slot);
4059 JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_TYPE_INT32));
4060 ++mTypeMap;
4064 class AdjustCallerStackTypesVisitor : public SlotVisitorBase
4066 TraceRecorder &mRecorder;
4067 JSContext *mCx;
4068 nanojit::LirBuffer *mLirbuf;
4069 unsigned mSlotnum;
4070 JSValueType *mTypeMap;
4071 public:
4072 AdjustCallerStackTypesVisitor(TraceRecorder &recorder,
4073 JSValueType *typeMap) :
4074 mRecorder(recorder),
4075 mCx(mRecorder.cx),
4076 mLirbuf(mRecorder.lirbuf),
4077 mSlotnum(0),
4078 mTypeMap(typeMap)
4081 JSValueType* getTypeMap()
4083 return mTypeMap;
4086 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4087 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
4088 /* N.B. vp may actually point to a JSObject*. */
4089 for (size_t i = 0; i < count; ++i) {
4090 LIns *ins = mRecorder.get(vp);
4091 bool isPromote = IsPromotedInt32(ins);
4092 if (isPromote && *mTypeMap == JSVAL_TYPE_DOUBLE) {
4093 mRecorder.w.st(ins, StackAddress(mLirbuf->sp, mRecorder.nativespOffset(vp)));
4095 * Aggressively undo speculation so the inner tree will compile
4096 * if this fails.
4098 mRecorder.traceMonitor->oracle->markStackSlotUndemotable(mCx, mSlotnum);
4100 JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_TYPE_INT32));
4101 ++vp;
4102 ++mTypeMap;
4103 ++mSlotnum;
4105 return true;
4108 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4109 visitFrameObjPtr(void* p, JSStackFrame* fp) {
4110 JS_ASSERT(*mTypeMap != JSVAL_TYPE_BOXED);
4111 ++mTypeMap;
4112 ++mSlotnum;
4113 return true;
4118 * Promote slots if necessary to match the called tree's type map. This
4119 * function is infallible and must only be called if we are certain that it is
4120 * possible to reconcile the types for each slot in the inner and outer trees.
4122 JS_REQUIRES_STACK void
4123 TraceRecorder::adjustCallerTypes(TreeFragment* f)
4125 AdjustCallerGlobalTypesVisitor globalVisitor(*this, f->globalTypeMap());
4126 VisitGlobalSlots(globalVisitor, cx, *tree->globalSlots);
4128 AdjustCallerStackTypesVisitor stackVisitor(*this, f->stackTypeMap());
4129 VisitStackSlots(stackVisitor, cx, 0);
4131 JS_ASSERT(f == f->root);
4134 JS_REQUIRES_STACK inline JSValueType
4135 TraceRecorder::determineSlotType(Value* vp)
4137 if (vp->isNumber()) {
4138 LIns *i = getFromTracker(vp);
4139 JSValueType t;
4140 if (i) {
4141 t = IsPromotedInt32(i) ? JSVAL_TYPE_INT32 : JSVAL_TYPE_DOUBLE;
4142 } else if (isGlobal(vp)) {
4143 int offset = tree->globalSlots->offsetOf(uint16(nativeGlobalSlot(vp)));
4144 JS_ASSERT(offset != -1);
4145 t = importTypeMap[importStackSlots + offset];
4146 } else {
4147 t = importTypeMap[nativeStackSlot(vp)];
4149 JS_ASSERT_IF(t == JSVAL_TYPE_INT32, hasInt32Repr(*vp));
4150 return t;
4153 if (vp->isObject())
4154 return vp->toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
4155 return vp->extractNonDoubleObjectTraceType();
4158 class DetermineTypesVisitor : public SlotVisitorBase
4160 TraceRecorder &mRecorder;
4161 JSValueType *mTypeMap;
4162 public:
4163 DetermineTypesVisitor(TraceRecorder &recorder,
4164 JSValueType *typeMap) :
4165 mRecorder(recorder),
4166 mTypeMap(typeMap)
4169 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4170 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
4171 *mTypeMap++ = mRecorder.determineSlotType(vp);
4174 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4175 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
4176 for (size_t i = 0; i < count; ++i)
4177 *mTypeMap++ = mRecorder.determineSlotType(vp++);
4178 return true;
4181 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4182 visitFrameObjPtr(void* p, JSStackFrame* fp) {
4183 *mTypeMap++ = getFrameObjPtrTraceType(p, fp);
4184 return true;
4187 JSValueType* getTypeMap()
4189 return mTypeMap;
4193 #if defined JS_JIT_SPEW
4194 JS_REQUIRES_STACK static void
4195 TreevisLogExit(JSContext* cx, VMSideExit* exit)
4197 debug_only_printf(LC_TMTreeVis, "TREEVIS ADDEXIT EXIT=%p TYPE=%s FRAG=%p PC=%p FILE=\"%s\""
4198 " LINE=%d OFFS=%d", (void*)exit, getExitName(exit->exitType),
4199 (void*)exit->from, (void*)cx->regs->pc, cx->fp()->script()->filename,
4200 js_FramePCToLineNumber(cx, cx->fp()), FramePCOffset(cx, cx->fp()));
4201 debug_only_print0(LC_TMTreeVis, " STACK=\"");
4202 for (unsigned i = 0; i < exit->numStackSlots; i++)
4203 debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(exit->stackTypeMap()[i]));
4204 debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\"");
4205 for (unsigned i = 0; i < exit->numGlobalSlots; i++)
4206 debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(exit->globalTypeMap()[i]));
4207 debug_only_print0(LC_TMTreeVis, "\"\n");
4209 #endif
4211 JS_REQUIRES_STACK VMSideExit*
4212 TraceRecorder::snapshot(ExitType exitType)
4214 JSStackFrame* const fp = cx->fp();
4215 JSFrameRegs* const regs = cx->regs;
4216 jsbytecode* pc = regs->pc;
4219 * Check for a return-value opcode that needs to restart at the next
4220 * instruction.
4222 const JSCodeSpec& cs = js_CodeSpec[*pc];
4225 * When calling a _FAIL native, make the snapshot's pc point to the next
4226 * instruction after the CALL or APPLY. Even on failure, a _FAIL native
4227 * must not be called again from the interpreter.
4229 bool resumeAfter = (pendingSpecializedNative &&
4230 JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_STATUS);
4231 if (resumeAfter) {
4232 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_FUNAPPLY || *pc == JSOP_FUNCALL ||
4233 *pc == JSOP_NEW || *pc == JSOP_SETPROP || *pc == JSOP_SETNAME);
4234 pc += cs.length;
4235 regs->pc = pc;
4236 MUST_FLOW_THROUGH("restore_pc");
4240 * Generate the entry map for the (possibly advanced) pc and stash it in
4241 * the trace.
4243 unsigned stackSlots = NativeStackSlots(cx, callDepth);
4246 * It's sufficient to track the native stack use here since all stores
4247 * above the stack watermark defined by guards are killed.
4249 trackNativeStackUse(stackSlots + 1);
4251 /* Capture the type map into a temporary location. */
4252 unsigned ngslots = tree->globalSlots->length();
4253 unsigned typemap_size = (stackSlots + ngslots) * sizeof(JSValueType);
4255 /* Use the recorder-local temporary type map. */
4256 JSValueType* typemap = NULL;
4257 if (tempTypeMap.resize(typemap_size))
4258 typemap = tempTypeMap.begin(); /* crash if resize() fails. */
4261 * Determine the type of a store by looking at the current type of the
4262 * actual value the interpreter is using. For numbers we have to check what
4263 * kind of store we used last (integer or double) to figure out what the
4264 * side exit show reflect in its typemap.
4266 DetermineTypesVisitor detVisitor(*this, typemap);
4267 VisitSlots(detVisitor, cx, callDepth, ngslots,
4268 tree->globalSlots->data());
4269 JS_ASSERT(unsigned(detVisitor.getTypeMap() - typemap) ==
4270 ngslots + stackSlots);
4273 * If this snapshot is for a side exit that leaves a boxed Value result on
4274 * the stack, make a note of this in the typemap. Examples include the
4275 * builtinStatus guard after calling a _FAIL builtin, a JSFastNative, or
4276 * GetPropertyByName; and the type guard in unbox_value after such a call
4277 * (also at the beginning of a trace branched from such a type guard).
4279 if (pendingUnboxSlot ||
4280 (pendingSpecializedNative && (pendingSpecializedNative->flags & JSTN_UNBOX_AFTER))) {
4281 unsigned pos = stackSlots - 1;
4282 if (pendingUnboxSlot == cx->regs->sp - 2)
4283 pos = stackSlots - 2;
4284 typemap[pos] = JSVAL_TYPE_BOXED;
4285 } else if (pendingSpecializedNative &&
4286 (pendingSpecializedNative->flags & JSTN_RETURN_NULLABLE_STR)) {
4287 typemap[stackSlots - 1] = JSVAL_TYPE_STRORNULL;
4288 } else if (pendingSpecializedNative &&
4289 (pendingSpecializedNative->flags & JSTN_RETURN_NULLABLE_OBJ)) {
4290 typemap[stackSlots - 1] = JSVAL_TYPE_OBJORNULL;
4293 /* Now restore the the original pc (after which early returns are ok). */
4294 if (resumeAfter) {
4295 MUST_FLOW_LABEL(restore_pc);
4296 regs->pc = pc - cs.length;
4297 } else {
4299 * If we take a snapshot on a goto, advance to the target address. This
4300 * avoids inner trees returning on a break goto, which the outer
4301 * recorder then would confuse with a break in the outer tree.
4303 if (*pc == JSOP_GOTO)
4304 pc += GET_JUMP_OFFSET(pc);
4305 else if (*pc == JSOP_GOTOX)
4306 pc += GET_JUMPX_OFFSET(pc);
4310 * Check if we already have a matching side exit; if so we can return that
4311 * side exit instead of creating a new one.
4313 VMSideExit** exits = tree->sideExits.data();
4314 unsigned nexits = tree->sideExits.length();
4315 if (exitType == LOOP_EXIT) {
4316 for (unsigned n = 0; n < nexits; ++n) {
4317 VMSideExit* e = exits[n];
4318 if (e->pc == pc && (e->imacpc == fp->maybeImacropc()) &&
4319 ngslots == e->numGlobalSlots &&
4320 !memcmp(exits[n]->fullTypeMap(), typemap, typemap_size)) {
4321 AUDIT(mergedLoopExits);
4322 #if defined JS_JIT_SPEW
4323 TreevisLogExit(cx, e);
4324 #endif
4325 return e;
4330 /* We couldn't find a matching side exit, so create a new one. */
4331 VMSideExit* exit = (VMSideExit*)
4332 traceAlloc().alloc(sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(JSValueType));
4334 /* Setup side exit structure. */
4335 exit->from = fragment;
4336 exit->calldepth = callDepth;
4337 exit->numGlobalSlots = ngslots;
4338 exit->numStackSlots = stackSlots;
4339 exit->numStackSlotsBelowCurrentFrame = cx->fp()->isFunctionFrame() ?
4340 nativeStackOffset(&cx->fp()->calleeValue()) / sizeof(double) :
4342 exit->exitType = exitType;
4343 exit->pc = pc;
4344 exit->imacpc = fp->maybeImacropc();
4345 exit->sp_adj = (stackSlots * sizeof(double)) - tree->nativeStackBase;
4346 exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
4347 exit->lookupFlags = js_InferFlags(cx, 0);
4348 memcpy(exit->fullTypeMap(), typemap, typemap_size);
4350 #if defined JS_JIT_SPEW
4351 TreevisLogExit(cx, exit);
4352 #endif
4353 return exit;
4356 JS_REQUIRES_STACK GuardRecord*
4357 TraceRecorder::createGuardRecord(VMSideExit* exit)
4359 #ifdef JS_JIT_SPEW
4360 // For debug builds, place the guard records in a longer lasting
4361 // pool. This is because the fragment profiler will look at them
4362 // relatively late in the day, after they would have been freed,
4363 // in some cases, had they been allocated in traceAlloc().
4364 GuardRecord* gr = new (dataAlloc()) GuardRecord();
4365 #else
4366 // The standard place (for production builds).
4367 GuardRecord* gr = new (traceAlloc()) GuardRecord();
4368 #endif
4370 gr->exit = exit;
4371 exit->addGuard(gr);
4373 // gr->profCount is calloc'd to zero
4374 verbose_only(
4375 gr->profGuardID = fragment->guardNumberer++;
4376 gr->nextInFrag = fragment->guardsForFrag;
4377 fragment->guardsForFrag = gr;
4380 return gr;
4383 /* Test if 'ins' is in a form that can be used as a guard/branch condition. */
4384 static bool
4385 isCond(LIns* ins)
4387 return ins->isCmp() || ins->isImmI(0) || ins->isImmI(1);
4390 /* Ensure 'ins' is in a form suitable for a guard/branch condition. */
4391 void
4392 TraceRecorder::ensureCond(LIns** ins, bool* cond)
4394 if (!isCond(*ins)) {
4395 *cond = !*cond;
4396 *ins = (*ins)->isI() ? w.eqi0(*ins) : w.eqp0(*ins);
4401 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4402 * (expected) and using the supplied side exit if the condition doesn't hold.
4404 * Callers shouldn't generate guards that always exit (which can occur due to
4405 * optimization of the guard condition) because it's bad for both compile-time
4406 * speed (all the code generated after the guard is dead) and run-time speed
4407 * (fragment that always exit are slow). This function has two modes for
4408 * handling an always-exit guard; which mode is used depends on the value of
4409 * abortIfAlwaysExits:
4411 * - abortIfAlwaysExits == false: This is the default mode. If the guard
4412 * will always exit, we assert (in debug builds) as a signal that we are
4413 * generating bad traces. (In optimized builds that lack assertions the
4414 * guard will be generated correctly, so the code will be slow but safe.) In
4415 * this mode, the caller is responsible for not generating an always-exit
4416 * guard. The return value will always be RECORD_CONTINUE, so the caller
4417 * need not check it.
4419 * - abortIfAlwaysExits == true: If the guard will always exit, we abort
4420 * recording and return RECORD_STOP; otherwise we generate the guard
4421 * normally and return RECORD_CONTINUE. This mode can be used when the
4422 * caller doesn't know ahead of time whether the guard will always exit. In
4423 * this mode, the caller must check the return value.
4425 JS_REQUIRES_STACK RecordingStatus
4426 TraceRecorder::guard(bool expected, LIns* cond, VMSideExit* exit,
4427 bool abortIfAlwaysExits/* = false */)
4429 if (exit->exitType == LOOP_EXIT)
4430 tree->sideExits.add(exit);
4432 JS_ASSERT(isCond(cond));
4434 if ((cond->isImmI(0) && expected) || (cond->isImmI(1) && !expected)) {
4435 if (abortIfAlwaysExits) {
4436 /* The guard always exits, the caller must check for an abort. */
4437 RETURN_STOP("Constantly false guard detected");
4440 * If you hit this assertion, first decide if you want recording to
4441 * abort in the case where the guard always exits. If not, find a way
4442 * to detect that case and avoid calling guard(). Otherwise, change
4443 * the invocation of guard() so it passes in abortIfAlwaysExits=true,
4444 * and have the caller check the return value, eg. using
4445 * CHECK_STATUS(). (In optimized builds, we'll fall through to the
4446 * insGuard() below and an always-exits guard will be inserted, which
4447 * is correct but sub-optimal.)
4449 JS_NOT_REACHED("unexpected constantly false guard detected");
4453 * Nb: if the guard is never taken, no instruction will be created and
4454 * insGuard() will return NULL. This is a good thing.
4456 GuardRecord* guardRec = createGuardRecord(exit);
4457 expected ? w.xf(cond, guardRec) : w.xt(cond, guardRec);
4458 return RECORD_CONTINUE;
4462 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4463 * (expected) and generate a side exit with type exitType to jump to if the
4464 * condition does not hold.
4466 JS_REQUIRES_STACK RecordingStatus
4467 TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType,
4468 bool abortIfAlwaysExits/* = false */)
4470 return guard(expected, cond, snapshot(exitType), abortIfAlwaysExits);
4474 * Emit a guard a 32-bit integer arithmetic operation op(d0, d1) and
4475 * using the supplied side exit if it overflows.
4477 JS_REQUIRES_STACK LIns*
4478 TraceRecorder::guard_xov(LOpcode op, LIns* d0, LIns* d1, VMSideExit* exit)
4480 JS_ASSERT(exit->exitType == OVERFLOW_EXIT);
4482 GuardRecord* guardRec = createGuardRecord(exit);
4483 switch (op) {
4484 case LIR_addi:
4485 return w.addxovi(d0, d1, guardRec);
4486 case LIR_subi:
4487 return w.subxovi(d0, d1, guardRec);
4488 case LIR_muli:
4489 return w.mulxovi(d0, d1, guardRec);
4490 default:
4491 break;
4493 JS_NOT_REACHED("unexpected opcode");
4494 return NULL;
4497 JS_REQUIRES_STACK VMSideExit*
4498 TraceRecorder::copy(VMSideExit* copy)
4500 size_t typemap_size = copy->numGlobalSlots + copy->numStackSlots;
4501 VMSideExit* exit = (VMSideExit*)
4502 traceAlloc().alloc(sizeof(VMSideExit) + typemap_size * sizeof(JSValueType));
4504 /* Copy side exit structure. */
4505 memcpy(exit, copy, sizeof(VMSideExit) + typemap_size * sizeof(JSValueType));
4506 exit->guards = NULL;
4507 exit->from = fragment;
4508 exit->target = NULL;
4510 if (exit->exitType == LOOP_EXIT)
4511 tree->sideExits.add(exit);
4512 #if defined JS_JIT_SPEW
4513 TreevisLogExit(cx, exit);
4514 #endif
4515 return exit;
4519 * Determine whether any context associated with the same thread as cx is
4520 * executing native code.
4522 static inline bool
4523 ProhibitFlush(TraceMonitor *tm)
4525 return !!tm->tracerState; // don't flush if we're running a trace
4528 static void
4529 ResetJITImpl(JSContext* cx, TraceMonitor* tm)
4531 if (!cx->traceJitEnabled)
4532 return;
4533 debug_only_print0(LC_TMTracer, "Flushing cache.\n");
4534 if (tm->recorder) {
4535 JS_ASSERT_NOT_ON_TRACE(cx);
4536 AbortRecording(cx, "flush cache");
4538 #if JS_METHODJIT
4539 if (tm->profile)
4540 AbortProfiling(cx);
4541 #endif
4542 if (ProhibitFlush(tm)) {
4543 debug_only_print0(LC_TMTracer, "Deferring JIT flush due to deep bail.\n");
4544 tm->needFlush = JS_TRUE;
4545 return;
4547 tm->flush();
4550 /* Compile the current fragment. */
4551 JS_REQUIRES_STACK AbortableRecordingStatus
4552 TraceRecorder::compile()
4554 #ifdef MOZ_TRACEVIS
4555 TraceVisStateObj tvso(cx, S_COMPILE);
4556 #endif
4558 if (traceMonitor->needFlush) {
4559 ResetJIT(cx, traceMonitor, FR_DEEP_BAIL);
4560 return ARECORD_ABORTED;
4562 if (tree->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
4563 debug_only_print0(LC_TMTracer, "Blacklist: excessive stack use.\n");
4564 Blacklist((jsbytecode*)tree->ip);
4565 return ARECORD_STOP;
4567 if (anchor && anchor->exitType != CASE_EXIT)
4568 ++tree->branchCount;
4569 if (outOfMemory())
4570 return ARECORD_STOP;
4572 /* :TODO: windows support */
4573 #if defined DEBUG && !defined WIN32
4574 /* Associate a filename and line number with the fragment. */
4575 const char* filename = cx->fp()->script()->filename;
4576 char* label = (char*)js_malloc((filename ? strlen(filename) : 7) + 16);
4577 if (label) {
4578 sprintf(label, "%s:%u", filename ? filename : "<stdin>",
4579 js_FramePCToLineNumber(cx, cx->fp()));
4580 lirbuf->printer->addrNameMap->addAddrRange(fragment, sizeof(Fragment), 0, label);
4581 js_free(label);
4583 #endif
4585 Assembler *assm = traceMonitor->assembler;
4586 JS_ASSERT(!assm->error());
4587 assm->compile(fragment, tempAlloc(), /*optimize*/true verbose_only(, lirbuf->printer));
4589 if (assm->error()) {
4590 assm->setError(nanojit::None);
4591 debug_only_print0(LC_TMTracer, "Blacklisted: error during compilation\n");
4592 Blacklist((jsbytecode*)tree->ip);
4593 return ARECORD_STOP;
4596 if (outOfMemory())
4597 return ARECORD_STOP;
4598 ResetRecordingAttempts(traceMonitor, (jsbytecode*)fragment->ip);
4599 ResetRecordingAttempts(traceMonitor, (jsbytecode*)tree->ip);
4600 if (anchor) {
4601 #ifdef NANOJIT_IA32
4602 if (anchor->exitType == CASE_EXIT)
4603 assm->patch(anchor, anchor->switchInfo);
4604 else
4605 #endif
4606 assm->patch(anchor);
4608 JS_ASSERT(fragment->code());
4609 JS_ASSERT_IF(fragment == fragment->root, fragment->root == tree);
4611 return ARECORD_CONTINUE;
4614 static void
4615 JoinPeers(Assembler* assm, VMSideExit* exit, TreeFragment* target)
4617 exit->target = target;
4618 assm->patch(exit);
4620 debug_only_printf(LC_TMTreeVis, "TREEVIS JOIN ANCHOR=%p FRAG=%p\n", (void*)exit, (void*)target);
4622 if (exit->root() == target)
4623 return;
4625 target->dependentTrees.addUnique(exit->root());
4626 exit->root()->linkedTrees.addUnique(target);
4629 /* Results of trying to connect an arbitrary type A with arbitrary type B */
4630 enum TypeCheckResult
4632 TypeCheck_Okay, /* Okay: same type */
4633 TypeCheck_Promote, /* Okay: Type A needs d2i() */
4634 TypeCheck_Demote, /* Okay: Type A needs i2d() */
4635 TypeCheck_Undemote, /* Bad: Slot is undemotable */
4636 TypeCheck_Bad /* Bad: incompatible types */
4639 class SlotMap : public SlotVisitorBase
4641 public:
4642 struct SlotInfo
4644 SlotInfo()
4645 : vp(NULL), isPromotedInt32(false), lastCheck(TypeCheck_Bad)
4647 SlotInfo(Value* vp, bool isPromotedInt32)
4648 : vp(vp), isPromotedInt32(isPromotedInt32), lastCheck(TypeCheck_Bad),
4649 type(getCoercedType(*vp))
4651 SlotInfo(JSValueType t)
4652 : vp(NULL), isPromotedInt32(false), lastCheck(TypeCheck_Bad), type(t)
4654 SlotInfo(Value* vp, JSValueType t)
4655 : vp(vp), isPromotedInt32(t == JSVAL_TYPE_INT32), lastCheck(TypeCheck_Bad), type(t)
4657 void *vp;
4658 bool isPromotedInt32;
4659 TypeCheckResult lastCheck;
4660 JSValueType type;
4663 SlotMap(TraceRecorder& rec)
4664 : mRecorder(rec),
4665 mCx(rec.cx),
4666 slots(NULL)
4670 virtual ~SlotMap()
4674 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4675 visitGlobalSlot(Value *vp, unsigned n, unsigned slot)
4677 addSlot(vp);
4680 JS_ALWAYS_INLINE SlotMap::SlotInfo&
4681 operator [](unsigned i)
4683 return slots[i];
4686 JS_ALWAYS_INLINE SlotMap::SlotInfo&
4687 get(unsigned i)
4689 return slots[i];
4692 JS_ALWAYS_INLINE unsigned
4693 length()
4695 return slots.length();
4699 * Possible return states:
4701 * TypeConsensus_Okay: All types are compatible. Caller must go through slot list and handle
4702 * promote/demotes.
4703 * TypeConsensus_Bad: Types are not compatible. Individual type check results are undefined.
4704 * TypeConsensus_Undemotes: Types would be compatible if slots were marked as undemotable
4705 * before recording began. Caller can go through slot list and mark
4706 * such slots as undemotable.
4708 JS_REQUIRES_STACK TypeConsensus
4709 checkTypes(LinkableFragment* f)
4711 if (length() != f->typeMap.length())
4712 return TypeConsensus_Bad;
4714 bool has_undemotes = false;
4715 for (unsigned i = 0; i < length(); i++) {
4716 TypeCheckResult result = checkType(i, f->typeMap[i]);
4717 if (result == TypeCheck_Bad)
4718 return TypeConsensus_Bad;
4719 if (result == TypeCheck_Undemote)
4720 has_undemotes = true;
4721 slots[i].lastCheck = result;
4723 if (has_undemotes)
4724 return TypeConsensus_Undemotes;
4725 return TypeConsensus_Okay;
4728 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4729 addSlot(Value* vp)
4731 bool isPromotedInt32 = false;
4732 if (vp->isNumber()) {
4733 if (LIns* i = mRecorder.getFromTracker(vp)) {
4734 isPromotedInt32 = IsPromotedInt32(i);
4735 } else if (mRecorder.isGlobal(vp)) {
4736 int offset = mRecorder.tree->globalSlots->offsetOf(uint16(mRecorder.nativeGlobalSlot(vp)));
4737 JS_ASSERT(offset != -1);
4738 isPromotedInt32 = mRecorder.importTypeMap[mRecorder.importStackSlots + offset] ==
4739 JSVAL_TYPE_INT32;
4740 } else {
4741 isPromotedInt32 = mRecorder.importTypeMap[mRecorder.nativeStackSlot(vp)] ==
4742 JSVAL_TYPE_INT32;
4745 slots.add(SlotInfo(vp, isPromotedInt32));
4748 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4749 addSlot(JSValueType t)
4751 slots.add(SlotInfo(NULL, t));
4754 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4755 addSlot(Value *vp, JSValueType t)
4757 slots.add(SlotInfo(vp, t));
4760 JS_REQUIRES_STACK void
4761 markUndemotes()
4763 for (unsigned i = 0; i < length(); i++) {
4764 if (get(i).lastCheck == TypeCheck_Undemote)
4765 mRecorder.markSlotUndemotable(mRecorder.tree, i);
4769 JS_REQUIRES_STACK virtual void
4770 adjustTypes()
4772 for (unsigned i = 0; i < length(); i++)
4773 adjustType(get(i));
4776 protected:
4777 JS_REQUIRES_STACK virtual void
4778 adjustType(SlotInfo& info) {
4779 JS_ASSERT(info.lastCheck != TypeCheck_Undemote && info.lastCheck != TypeCheck_Bad);
4780 #ifdef DEBUG
4781 if (info.lastCheck == TypeCheck_Promote) {
4782 JS_ASSERT(info.type == JSVAL_TYPE_INT32 || info.type == JSVAL_TYPE_DOUBLE);
4784 * This should only happen if the slot has a trivial conversion, i.e.
4785 * IsPromotedInt32() is true. We check this.
4787 * Note that getFromTracker() will return NULL if the slot was
4788 * never used, in which case we don't do the check. We could
4789 * instead called mRecorder.get(info.vp) and always check, but
4790 * get() has side-effects, which is not good in an assertion.
4791 * Not checking unused slots isn't so bad.
4793 LIns* ins = mRecorder.getFromTrackerImpl(info.vp);
4794 JS_ASSERT_IF(ins, IsPromotedInt32(ins));
4795 } else
4796 #endif
4797 if (info.lastCheck == TypeCheck_Demote) {
4798 JS_ASSERT(info.type == JSVAL_TYPE_INT32 || info.type == JSVAL_TYPE_DOUBLE);
4799 JS_ASSERT(mRecorder.getImpl(info.vp)->isD());
4801 /* Never demote this final i2d. */
4802 mRecorder.setImpl(info.vp, mRecorder.getImpl(info.vp), false);
4806 private:
4807 TypeCheckResult
4808 checkType(unsigned i, JSValueType t)
4810 debug_only_printf(LC_TMTracer,
4811 "checkType slot %d: interp=%c typemap=%c isNum=%d isPromotedInt32=%d\n",
4813 TypeToChar(slots[i].type),
4814 TypeToChar(t),
4815 slots[i].type == JSVAL_TYPE_INT32 || slots[i].type == JSVAL_TYPE_DOUBLE,
4816 slots[i].isPromotedInt32);
4817 switch (t) {
4818 case JSVAL_TYPE_INT32:
4819 if (slots[i].type != JSVAL_TYPE_INT32 && slots[i].type != JSVAL_TYPE_DOUBLE)
4820 return TypeCheck_Bad; /* Not a number? Type mismatch. */
4821 /* This is always a type mismatch, we can't close a double to an int. */
4822 if (!slots[i].isPromotedInt32)
4823 return TypeCheck_Undemote;
4824 /* Looks good, slot is an int32, the last instruction should be promotable. */
4825 JS_ASSERT_IF(slots[i].vp,
4826 hasInt32Repr(*(const Value *)slots[i].vp) && slots[i].isPromotedInt32);
4827 return slots[i].vp ? TypeCheck_Promote : TypeCheck_Okay;
4828 case JSVAL_TYPE_DOUBLE:
4829 if (slots[i].type != JSVAL_TYPE_INT32 && slots[i].type != JSVAL_TYPE_DOUBLE)
4830 return TypeCheck_Bad; /* Not a number? Type mismatch. */
4831 if (slots[i].isPromotedInt32)
4832 return slots[i].vp ? TypeCheck_Demote : TypeCheck_Bad;
4833 return TypeCheck_Okay;
4834 default:
4835 return slots[i].type == t ? TypeCheck_Okay : TypeCheck_Bad;
4837 JS_NOT_REACHED("shouldn't fall through type check switch");
4839 protected:
4840 TraceRecorder& mRecorder;
4841 JSContext* mCx;
4842 Queue<SlotInfo> slots;
4845 class DefaultSlotMap : public SlotMap
4847 public:
4848 DefaultSlotMap(TraceRecorder& tr) : SlotMap(tr)
4852 virtual ~DefaultSlotMap()
4856 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4857 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp)
4859 for (size_t i = 0; i < count; i++)
4860 addSlot(&vp[i]);
4861 return true;
4864 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4865 visitFrameObjPtr(void* p, JSStackFrame* fp)
4867 addSlot(getFrameObjPtrTraceType(p, fp));
4868 return true;
4872 JS_REQUIRES_STACK TypeConsensus
4873 TraceRecorder::selfTypeStability(SlotMap& slotMap)
4875 debug_only_printf(LC_TMTracer, "Checking type stability against self=%p\n", (void*)fragment);
4876 TypeConsensus consensus = slotMap.checkTypes(tree);
4878 /* Best case: loop jumps back to its own header */
4879 if (consensus == TypeConsensus_Okay)
4880 return TypeConsensus_Okay;
4883 * If the only thing keeping this loop from being stable is undemotions, then mark relevant
4884 * slots as undemotable.
4886 if (consensus == TypeConsensus_Undemotes)
4887 slotMap.markUndemotes();
4889 return consensus;
4892 JS_REQUIRES_STACK TypeConsensus
4893 TraceRecorder::peerTypeStability(SlotMap& slotMap, const void* ip, TreeFragment** pPeer)
4895 JS_ASSERT(tree->first == LookupLoop(traceMonitor, ip, tree->globalObj, tree->globalShape, tree->argc));
4897 /* See if there are any peers that would make this stable */
4898 bool onlyUndemotes = false;
4899 for (TreeFragment *peer = tree->first; peer != NULL; peer = peer->peer) {
4900 if (!peer->code() || peer == fragment)
4901 continue;
4902 debug_only_printf(LC_TMTracer, "Checking type stability against peer=%p\n", (void*)peer);
4903 TypeConsensus consensus = slotMap.checkTypes(peer);
4904 if (consensus == TypeConsensus_Okay) {
4905 *pPeer = peer;
4907 * Return this even though there will be linkage; the trace itself is not stable.
4908 * Caller should inspect ppeer to check for a compatible peer.
4910 return TypeConsensus_Okay;
4912 if (consensus == TypeConsensus_Undemotes)
4913 onlyUndemotes = true;
4916 return onlyUndemotes ? TypeConsensus_Undemotes : TypeConsensus_Bad;
4920 * Complete and compile a trace and link it to the existing tree if
4921 * appropriate. Returns ARECORD_ABORTED or ARECORD_STOP, depending on whether
4922 * the recorder was deleted. Outparam is always set.
4924 JS_REQUIRES_STACK AbortableRecordingStatus
4925 TraceRecorder::closeLoop()
4927 VMSideExit *exit = snapshot(UNSTABLE_LOOP_EXIT);
4929 DefaultSlotMap slotMap(*this);
4930 VisitSlots(slotMap, cx, 0, *tree->globalSlots);
4933 * We should have arrived back at the loop header, and hence we don't want
4934 * to be in an imacro here and the opcode should be either JSOP_TRACE or, in
4935 * case this loop was blacklisted in the meantime, JSOP_NOTRACE.
4937 JS_ASSERT(*cx->regs->pc == JSOP_TRACE || *cx->regs->pc == JSOP_NOTRACE);
4938 JS_ASSERT(!cx->fp()->hasImacropc());
4940 if (callDepth != 0) {
4941 debug_only_print0(LC_TMTracer,
4942 "Blacklisted: stack depth mismatch, possible recursion.\n");
4943 Blacklist((jsbytecode*)tree->ip);
4944 trashSelf = true;
4945 return ARECORD_STOP;
4948 JS_ASSERT(exit->numStackSlots == tree->nStackTypes);
4949 JS_ASSERT(fragment->root == tree);
4950 JS_ASSERT(!trashSelf);
4952 TreeFragment* peer = NULL;
4954 TypeConsensus consensus = selfTypeStability(slotMap);
4955 if (consensus != TypeConsensus_Okay) {
4956 TypeConsensus peerConsensus = peerTypeStability(slotMap, tree->ip, &peer);
4957 /* If there was a semblance of a stable peer (even if not linkable), keep the result. */
4958 if (peerConsensus != TypeConsensus_Bad)
4959 consensus = peerConsensus;
4962 #if DEBUG
4963 if (consensus != TypeConsensus_Okay || peer)
4964 AUDIT(unstableLoopVariable);
4965 #endif
4968 * This exit is indeed linkable to something now. Process any promote or
4969 * demotes that are pending in the slot map.
4971 if (consensus == TypeConsensus_Okay)
4972 slotMap.adjustTypes();
4974 if (consensus != TypeConsensus_Okay || peer) {
4975 fragment->lastIns = w.x(createGuardRecord(exit));
4977 /* If there is a peer, there must have been an "Okay" consensus. */
4978 JS_ASSERT_IF(peer, consensus == TypeConsensus_Okay);
4980 /* Compile as a type-unstable loop, and hope for a connection later. */
4981 if (!peer) {
4983 * If such a fragment does not exist, let's compile the loop ahead
4984 * of time anyway. Later, if the loop becomes type stable, we will
4985 * connect these two fragments together.
4987 debug_only_print0(LC_TMTracer,
4988 "Trace has unstable loop variable with no stable peer, "
4989 "compiling anyway.\n");
4990 UnstableExit* uexit = new (traceAlloc()) UnstableExit;
4991 uexit->fragment = fragment;
4992 uexit->exit = exit;
4993 uexit->next = tree->unstableExits;
4994 tree->unstableExits = uexit;
4995 } else {
4996 JS_ASSERT(peer->code());
4997 exit->target = peer;
4998 debug_only_printf(LC_TMTracer,
4999 "Joining type-unstable trace to target fragment %p.\n",
5000 (void*)peer);
5001 peer->dependentTrees.addUnique(tree);
5002 tree->linkedTrees.addUnique(peer);
5004 } else {
5005 exit->exitType = LOOP_EXIT;
5006 debug_only_printf(LC_TMTreeVis, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit,
5007 getExitName(LOOP_EXIT));
5009 JS_ASSERT((fragment == fragment->root) == !!loopLabel);
5010 if (loopLabel) {
5011 w.j(loopLabel);
5012 w.comment("end-loop");
5013 w.livep(lirbuf->state);
5016 exit->target = tree;
5018 * This guard is dead code. However, it must be present because it
5019 * can keep alive values on the stack. Without it, StackFilter can
5020 * remove some stack stores that it shouldn't. See bug 582766 comment
5021 * 19.
5023 fragment->lastIns = w.x(createGuardRecord(exit));
5026 CHECK_STATUS_A(compile());
5028 debug_only_printf(LC_TMTreeVis, "TREEVIS CLOSELOOP EXIT=%p PEER=%p\n", (void*)exit, (void*)peer);
5030 JS_ASSERT(LookupLoop(traceMonitor, tree->ip, tree->globalObj, tree->globalShape, tree->argc) ==
5031 tree->first);
5032 JS_ASSERT(tree->first);
5034 peer = tree->first;
5035 joinEdgesToEntry(peer);
5037 debug_only_stmt(DumpPeerStability(traceMonitor, peer->ip, peer->globalObj,
5038 peer->globalShape, peer->argc);)
5040 debug_only_print0(LC_TMTracer,
5041 "updating specializations on dependent and linked trees\n");
5042 if (tree->code())
5043 SpecializeTreesToMissingGlobals(cx, globalObj, tree);
5046 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
5047 * should try to compile the outer tree again.
5049 if (outerPC)
5050 AttemptCompilation(traceMonitor, globalObj, outerScript, outerPC, outerArgc);
5051 #ifdef JS_JIT_SPEW
5052 debug_only_printf(LC_TMMinimal,
5053 "Recording completed at %s:%u@%u via closeLoop (FragID=%06u)\n",
5054 cx->fp()->script()->filename,
5055 js_FramePCToLineNumber(cx, cx->fp()),
5056 FramePCOffset(cx, cx->fp()),
5057 fragment->profFragID);
5058 debug_only_print0(LC_TMMinimal, "\n");
5059 #endif
5061 return finishSuccessfully();
5064 static void
5065 FullMapFromExit(TypeMap& typeMap, VMSideExit* exit)
5067 typeMap.setLength(0);
5068 typeMap.fromRaw(exit->stackTypeMap(), exit->numStackSlots);
5069 typeMap.fromRaw(exit->globalTypeMap(), exit->numGlobalSlots);
5070 /* Include globals that were later specialized at the root of the tree. */
5071 if (exit->numGlobalSlots < exit->root()->nGlobalTypes()) {
5072 typeMap.fromRaw(exit->root()->globalTypeMap() + exit->numGlobalSlots,
5073 exit->root()->nGlobalTypes() - exit->numGlobalSlots);
5077 static JS_REQUIRES_STACK TypeConsensus
5078 TypeMapLinkability(JSContext* cx, TraceMonitor *tm, const TypeMap& typeMap, TreeFragment* peer)
5080 const TypeMap& peerMap = peer->typeMap;
5081 unsigned minSlots = JS_MIN(typeMap.length(), peerMap.length());
5082 TypeConsensus consensus = TypeConsensus_Okay;
5083 for (unsigned i = 0; i < minSlots; i++) {
5084 if (typeMap[i] == peerMap[i])
5085 continue;
5086 if (typeMap[i] == JSVAL_TYPE_INT32 && peerMap[i] == JSVAL_TYPE_DOUBLE &&
5087 IsSlotUndemotable(tm->oracle, cx, peer, i, peer->ip)) {
5088 consensus = TypeConsensus_Undemotes;
5089 } else {
5090 return TypeConsensus_Bad;
5093 return consensus;
5096 JS_REQUIRES_STACK unsigned
5097 TraceRecorder::findUndemotesInTypemaps(const TypeMap& typeMap, LinkableFragment* f,
5098 Queue<unsigned>& undemotes)
5100 undemotes.setLength(0);
5101 unsigned minSlots = JS_MIN(typeMap.length(), f->typeMap.length());
5102 for (unsigned i = 0; i < minSlots; i++) {
5103 if (typeMap[i] == JSVAL_TYPE_INT32 && f->typeMap[i] == JSVAL_TYPE_DOUBLE) {
5104 undemotes.add(i);
5105 } else if (typeMap[i] != f->typeMap[i]) {
5106 return 0;
5109 for (unsigned i = 0; i < undemotes.length(); i++)
5110 markSlotUndemotable(f, undemotes[i]);
5111 return undemotes.length();
5114 JS_REQUIRES_STACK void
5115 TraceRecorder::joinEdgesToEntry(TreeFragment* peer_root)
5117 if (fragment->root != fragment)
5118 return;
5120 TypeMap typeMap(NULL, traceMonitor->oracle);
5121 Queue<unsigned> undemotes(NULL);
5123 for (TreeFragment* peer = peer_root; peer; peer = peer->peer) {
5124 if (!peer->code())
5125 continue;
5126 UnstableExit* uexit = peer->unstableExits;
5127 while (uexit != NULL) {
5128 /* Build the full typemap for this unstable exit */
5129 FullMapFromExit(typeMap, uexit->exit);
5130 /* Check its compatibility against this tree */
5131 TypeConsensus consensus = TypeMapLinkability(cx, traceMonitor, typeMap, tree);
5132 JS_ASSERT_IF(consensus == TypeConsensus_Okay, peer != fragment);
5133 if (consensus == TypeConsensus_Okay) {
5134 debug_only_printf(LC_TMTracer,
5135 "Joining type-stable trace to target exit %p->%p.\n",
5136 (void*)uexit->fragment, (void*)uexit->exit);
5139 * See bug 531513. Before linking these trees, make sure the
5140 * peer's dependency graph is up to date.
5142 TreeFragment* from = uexit->exit->root();
5143 if (from->nGlobalTypes() < tree->nGlobalTypes()) {
5144 SpecializeTreesToLateGlobals(cx, from, tree->globalTypeMap(),
5145 tree->nGlobalTypes());
5148 /* It's okay! Link together and remove the unstable exit. */
5149 JS_ASSERT(tree == fragment);
5150 JoinPeers(traceMonitor->assembler, uexit->exit, tree);
5151 uexit = peer->removeUnstableExit(uexit->exit);
5152 } else {
5153 /* Check for int32->double slots that suggest trashing. */
5154 if (findUndemotesInTypemaps(typeMap, tree, undemotes)) {
5155 JS_ASSERT(peer == uexit->fragment->root);
5156 if (fragment == peer)
5157 trashSelf = true;
5158 else
5159 whichTreesToTrash.addUnique(uexit->fragment->root);
5160 break;
5162 uexit = uexit->next;
5168 JS_REQUIRES_STACK AbortableRecordingStatus
5169 TraceRecorder::endLoop()
5171 return endLoop(snapshot(LOOP_EXIT));
5174 /* Emit an always-exit guard and compile the tree (used for break statements. */
5175 JS_REQUIRES_STACK AbortableRecordingStatus
5176 TraceRecorder::endLoop(VMSideExit* exit)
5178 JS_ASSERT(fragment->root == tree);
5180 if (callDepth != 0) {
5181 debug_only_print0(LC_TMTracer, "Blacklisted: stack depth mismatch, possible recursion.\n");
5182 Blacklist((jsbytecode*)tree->ip);
5183 trashSelf = true;
5184 return ARECORD_STOP;
5187 fragment->lastIns = w.x(createGuardRecord(exit));
5189 CHECK_STATUS_A(compile());
5191 debug_only_printf(LC_TMTreeVis, "TREEVIS ENDLOOP EXIT=%p\n", (void*)exit);
5193 JS_ASSERT(LookupLoop(traceMonitor, tree->ip, tree->globalObj, tree->globalShape, tree->argc) ==
5194 tree->first);
5196 joinEdgesToEntry(tree->first);
5198 debug_only_stmt(DumpPeerStability(traceMonitor, tree->ip, tree->globalObj,
5199 tree->globalShape, tree->argc);)
5202 * Note: this must always be done, in case we added new globals on trace
5203 * and haven't yet propagated those to linked and dependent trees.
5205 debug_only_print0(LC_TMTracer,
5206 "updating specializations on dependent and linked trees\n");
5207 if (tree->code())
5208 SpecializeTreesToMissingGlobals(cx, globalObj, fragment->root);
5211 * If this is a newly formed tree, and the outer tree has not been compiled
5212 * yet, we should try to compile the outer tree again.
5214 if (outerPC)
5215 AttemptCompilation(traceMonitor, globalObj, outerScript, outerPC, outerArgc);
5216 #ifdef JS_JIT_SPEW
5217 debug_only_printf(LC_TMMinimal,
5218 "Recording completed at %s:%u@%u via endLoop (FragID=%06u)\n",
5219 cx->fp()->script()->filename,
5220 js_FramePCToLineNumber(cx, cx->fp()),
5221 FramePCOffset(cx, cx->fp()),
5222 fragment->profFragID);
5223 debug_only_print0(LC_TMTracer, "\n");
5224 #endif
5226 return finishSuccessfully();
5229 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
5230 JS_REQUIRES_STACK void
5231 TraceRecorder::prepareTreeCall(TreeFragment* inner)
5233 VMSideExit* exit = snapshot(OOM_EXIT);
5236 * The inner tree expects to be called from the current frame. If the outer
5237 * tree (this trace) is currently inside a function inlining code
5238 * (calldepth > 0), we have to advance the native stack pointer such that
5239 * we match what the inner trace expects to see. We move it back when we
5240 * come out of the inner tree call.
5242 if (callDepth > 0) {
5244 * Calculate the amount we have to lift the native stack pointer by to
5245 * compensate for any outer frames that the inner tree doesn't expect
5246 * but the outer tree has.
5248 ptrdiff_t sp_adj = nativeStackOffset(&cx->fp()->calleeValue());
5250 /* Calculate the amount we have to lift the call stack by. */
5251 ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo*);
5254 * Guard that we have enough stack space for the tree we are trying to
5255 * call on top of the new value for sp.
5257 debug_only_printf(LC_TMTracer,
5258 "sp_adj=%lld outer=%lld inner=%lld\n",
5259 (long long int)sp_adj,
5260 (long long int)tree->nativeStackBase,
5261 (long long int)inner->nativeStackBase);
5262 ptrdiff_t sp_offset =
5263 - tree->nativeStackBase /* rebase sp to beginning of outer tree's stack */
5264 + sp_adj /* adjust for stack in outer frame inner tree can't see */
5265 + inner->maxNativeStackSlots * sizeof(double); /* plus the inner tree's stack */
5266 LIns* sp_top = w.addp(lirbuf->sp, w.nameImmw(sp_offset));
5267 guard(true, w.ltp(sp_top, eos_ins), exit);
5269 /* Guard that we have enough call stack space. */
5270 ptrdiff_t rp_offset = rp_adj + inner->maxCallDepth * sizeof(FrameInfo*);
5271 LIns* rp_top = w.addp(lirbuf->rp, w.nameImmw(rp_offset));
5272 guard(true, w.ltp(rp_top, eor_ins), exit);
5274 sp_offset =
5275 - tree->nativeStackBase /* rebase sp to beginning of outer tree's stack */
5276 + sp_adj /* adjust for stack in outer frame inner tree can't see */
5277 + inner->nativeStackBase; /* plus the inner tree's stack base */
5278 /* We have enough space, so adjust sp and rp to their new level. */
5279 w.stStateField(w.addp(lirbuf->sp, w.nameImmw(sp_offset)), sp);
5280 w.stStateField(w.addp(lirbuf->rp, w.nameImmw(rp_adj)), rp);
5284 * The inner tree will probably access stack slots. So tell nanojit not to
5285 * discard or defer stack writes before emitting the call tree code.
5287 * (The ExitType of this snapshot is nugatory. The exit can't be taken.)
5289 w.xbarrier(createGuardRecord(exit));
5292 class ClearSlotsVisitor : public SlotVisitorBase
5294 Tracker &tracker;
5295 public:
5296 ClearSlotsVisitor(Tracker &tracker)
5297 : tracker(tracker)
5300 JS_ALWAYS_INLINE bool
5301 visitStackSlots(Value *vp, size_t count, JSStackFrame *) {
5302 for (Value *vpend = vp + count; vp != vpend; ++vp)
5303 tracker.set(vp, NULL);
5304 return true;
5307 JS_ALWAYS_INLINE bool
5308 visitFrameObjPtr(void *p, JSStackFrame *) {
5309 tracker.set(p, NULL);
5310 return true;
5314 static unsigned
5315 BuildGlobalTypeMapFromInnerTree(Queue<JSValueType>& typeMap, VMSideExit* inner)
5317 #if defined DEBUG
5318 unsigned initialSlots = typeMap.length();
5319 #endif
5320 /* First, use the innermost exit's global typemap. */
5321 typeMap.add(inner->globalTypeMap(), inner->numGlobalSlots);
5323 /* Add missing global types from the innermost exit's tree. */
5324 TreeFragment* innerFrag = inner->root();
5325 unsigned slots = inner->numGlobalSlots;
5326 if (slots < innerFrag->nGlobalTypes()) {
5327 typeMap.add(innerFrag->globalTypeMap() + slots, innerFrag->nGlobalTypes() - slots);
5328 slots = innerFrag->nGlobalTypes();
5330 JS_ASSERT(typeMap.length() - initialSlots == slots);
5331 return slots;
5334 /* Record a call to an inner tree. */
5335 JS_REQUIRES_STACK void
5336 TraceRecorder::emitTreeCall(TreeFragment* inner, VMSideExit* exit)
5338 /* Invoke the inner tree. */
5339 LIns* args[] = { lirbuf->state }; /* reverse order */
5340 /* Construct a call info structure for the target tree. */
5341 CallInfo* ci = new (traceAlloc()) CallInfo();
5342 ci->_address = uintptr_t(inner->code());
5343 JS_ASSERT(ci->_address);
5344 ci->_typesig = CallInfo::typeSig1(ARGTYPE_P, ARGTYPE_P);
5345 ci->_isPure = 0;
5346 ci->_storeAccSet = ACCSET_STORE_ANY;
5347 ci->_abi = ABI_FASTCALL;
5348 #ifdef DEBUG
5349 ci->_name = "fragment";
5350 #endif
5351 LIns* rec = w.call(ci, args);
5352 LIns* lr = w.ldpGuardRecordExit(rec);
5353 LIns* nested = w.jtUnoptimizable(w.eqiN(w.ldiVMSideExitField(lr, exitType), NESTED_EXIT));
5356 * If the tree exits on a regular (non-nested) guard, keep updating lastTreeExitGuard
5357 * with that guard. If we mismatch on a tree call guard, this will contain the last
5358 * non-nested guard we encountered, which is the innermost loop or branch guard.
5360 w.stStateField(lr, lastTreeExitGuard);
5361 LIns* done1 = w.j(NULL);
5364 * The tree exited on a nested guard. This only occurs once a tree call guard mismatches
5365 * and we unwind the tree call stack. We store the first (innermost) tree call guard in state
5366 * and we will try to grow the outer tree the failing call was in starting at that guard.
5368 w.label(nested);
5369 LIns* done2 = w.jfUnoptimizable(w.eqp0(w.ldpStateField(lastTreeCallGuard)));
5370 w.stStateField(lr, lastTreeCallGuard);
5371 w.stStateField(w.addp(w.ldpStateField(rp),
5372 w.i2p(w.lshiN(w.ldiVMSideExitField(lr, calldepth),
5373 sizeof(void*) == 4 ? 2 : 3))),
5374 rpAtLastTreeCall);
5375 w.label(done1, done2);
5378 * Keep updating outermostTreeExit so that TracerState always contains the most recent
5379 * side exit.
5381 w.stStateField(lr, outermostTreeExitGuard);
5383 /* Read back all registers, in case the called tree changed any of them. */
5384 #ifdef DEBUG
5385 JSValueType* map;
5386 size_t i;
5387 map = exit->globalTypeMap();
5388 for (i = 0; i < exit->numGlobalSlots; i++)
5389 JS_ASSERT(map[i] != JSVAL_TYPE_BOXED);
5390 map = exit->stackTypeMap();
5391 for (i = 0; i < exit->numStackSlots; i++)
5392 JS_ASSERT(map[i] != JSVAL_TYPE_BOXED);
5393 #endif
5395 /* The inner tree may modify currently-tracked upvars, so flush everything. */
5396 ClearSlotsVisitor visitor(tracker);
5397 VisitStackSlots(visitor, cx, callDepth);
5398 SlotList& gslots = *tree->globalSlots;
5399 for (unsigned i = 0; i < gslots.length(); i++) {
5400 unsigned slot = gslots[i];
5401 Value* vp = &globalObj->getSlotRef(slot);
5402 tracker.set(vp, NULL);
5405 /* Set stack slots from the innermost frame. */
5406 importTypeMap.setLength(NativeStackSlots(cx, callDepth));
5407 unsigned startOfInnerFrame = importTypeMap.length() - exit->numStackSlots;
5408 for (unsigned i = 0; i < exit->numStackSlots; i++)
5409 importTypeMap[startOfInnerFrame + i] = exit->stackTypeMap()[i];
5410 importStackSlots = importTypeMap.length();
5411 JS_ASSERT(importStackSlots == NativeStackSlots(cx, callDepth));
5414 * Bug 502604 - It is illegal to extend from the outer typemap without
5415 * first extending from the inner. Make a new typemap here.
5417 BuildGlobalTypeMapFromInnerTree(importTypeMap, exit);
5419 importGlobalSlots = importTypeMap.length() - importStackSlots;
5420 JS_ASSERT(importGlobalSlots == tree->globalSlots->length());
5422 /* Restore sp and rp to their original values (we still have them in a register). */
5423 if (callDepth > 0) {
5424 w.stStateField(lirbuf->sp, sp);
5425 w.stStateField(lirbuf->rp, rp);
5429 * Guard that we come out of the inner tree along the same side exit we came out when
5430 * we called the inner tree at recording time.
5432 VMSideExit* nestedExit = snapshot(NESTED_EXIT);
5433 JS_ASSERT(exit->exitType == LOOP_EXIT);
5434 guard(true, w.eqp(lr, w.nameImmpNonGC(exit)), nestedExit);
5435 debug_only_printf(LC_TMTreeVis, "TREEVIS TREECALL INNER=%p EXIT=%p GUARD=%p\n", (void*)inner,
5436 (void*)nestedExit, (void*)exit);
5438 /* Register us as a dependent tree of the inner tree. */
5439 inner->dependentTrees.addUnique(fragment->root);
5440 tree->linkedTrees.addUnique(inner);
5443 /* Add a if/if-else control-flow merge point to the list of known merge points. */
5444 JS_REQUIRES_STACK void
5445 TraceRecorder::trackCfgMerges(jsbytecode* pc)
5447 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
5448 JS_ASSERT((*pc == JSOP_IFEQ) || (*pc == JSOP_IFEQX));
5449 jssrcnote* sn = js_GetSrcNote(cx->fp()->script(), pc);
5450 if (sn != NULL) {
5451 if (SN_TYPE(sn) == SRC_IF) {
5452 cfgMerges.add((*pc == JSOP_IFEQ)
5453 ? pc + GET_JUMP_OFFSET(pc)
5454 : pc + GET_JUMPX_OFFSET(pc));
5455 } else if (SN_TYPE(sn) == SRC_IF_ELSE)
5456 cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));
5461 * Invert the direction of the guard if this is a loop edge that is not
5462 * taken (thin loop).
5464 JS_REQUIRES_STACK void
5465 TraceRecorder::emitIf(jsbytecode* pc, bool cond, LIns* x)
5467 ExitType exitType;
5468 JS_ASSERT(isCond(x));
5469 if (IsLoopEdge(pc, (jsbytecode*)tree->ip)) {
5470 exitType = LOOP_EXIT;
5473 * If we are about to walk out of the loop, generate code for the
5474 * inverse loop condition, pretending we recorded the case that stays
5475 * on trace.
5477 if ((*pc == JSOP_IFEQ || *pc == JSOP_IFEQX) == cond) {
5478 JS_ASSERT(*pc == JSOP_IFNE || *pc == JSOP_IFNEX || *pc == JSOP_IFEQ || *pc == JSOP_IFEQX);
5479 debug_only_print0(LC_TMTracer,
5480 "Walking out of the loop, terminating it anyway.\n");
5481 cond = !cond;
5485 * Conditional guards do not have to be emitted if the condition is
5486 * constant. We make a note whether the loop condition is true or false
5487 * here, so we later know whether to emit a loop edge or a loop end.
5489 if (x->isImmI()) {
5490 pendingLoop = (x->immI() == int32(cond));
5491 return;
5493 } else {
5494 exitType = BRANCH_EXIT;
5496 if (!x->isImmI())
5497 guard(cond, x, exitType);
5500 /* Emit code for a fused IFEQ/IFNE. */
5501 JS_REQUIRES_STACK void
5502 TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
5504 if (*pc == JSOP_IFEQ || *pc == JSOP_IFNE) {
5505 emitIf(pc, cond, x);
5506 if (*pc == JSOP_IFEQ)
5507 trackCfgMerges(pc);
5511 /* Check whether we have reached the end of the trace. */
5512 JS_REQUIRES_STACK AbortableRecordingStatus
5513 TraceRecorder::checkTraceEnd(jsbytecode *pc)
5515 if (IsLoopEdge(pc, (jsbytecode*)tree->ip)) {
5517 * If we compile a loop, the trace should have a zero stack balance at
5518 * the loop edge. Currently we are parked on a comparison op or
5519 * IFNE/IFEQ, so advance pc to the loop header and adjust the stack
5520 * pointer and pretend we have reached the loop header.
5522 if (pendingLoop) {
5523 JS_ASSERT(!cx->fp()->hasImacropc() && (pc == cx->regs->pc || pc == cx->regs->pc + 1));
5524 JSFrameRegs orig = *cx->regs;
5526 cx->regs->pc = (jsbytecode*)tree->ip;
5527 cx->regs->sp = cx->fp()->base() + tree->spOffsetAtEntry;
5529 JSContext* localcx = cx;
5530 AbortableRecordingStatus ars = closeLoop();
5531 *localcx->regs = orig;
5532 return ars;
5535 return endLoop();
5537 return ARECORD_CONTINUE;
5541 * Check whether the shape of the global object has changed. The return value
5542 * indicates whether the recorder is still active. If 'false', any active
5543 * recording has been aborted and the JIT may have been reset.
5545 static JS_REQUIRES_STACK bool
5546 CheckGlobalObjectShape(JSContext* cx, TraceMonitor* tm, JSObject* globalObj,
5547 uint32 *shape = NULL, SlotList** slots = NULL)
5549 if (tm->needFlush) {
5550 ResetJIT(cx, tm, FR_DEEP_BAIL);
5551 return false;
5554 if (globalObj->numSlots() > MAX_GLOBAL_SLOTS) {
5555 if (tm->recorder)
5556 AbortRecording(cx, "too many slots in global object");
5557 return false;
5561 * The global object must have a unique shape. That way, if an operand
5562 * isn't the global at record time, a shape guard suffices to ensure
5563 * that it isn't the global at run time.
5565 if (!globalObj->hasOwnShape()) {
5566 if (!globalObj->globalObjectOwnShapeChange(cx)) {
5567 debug_only_print0(LC_TMTracer,
5568 "Can't record: failed to give globalObj a unique shape.\n");
5569 return false;
5573 uint32 globalShape = globalObj->shape();
5575 if (tm->recorder) {
5576 TreeFragment* root = tm->recorder->getFragment()->root;
5578 /* Check the global shape matches the recorder's treeinfo's shape. */
5579 if (globalObj != root->globalObj || globalShape != root->globalShape) {
5580 AUDIT(globalShapeMismatchAtEntry);
5581 debug_only_printf(LC_TMTracer,
5582 "Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
5583 (void*)globalObj, globalShape, (void*)root->globalObj,
5584 root->globalShape);
5585 Backoff(tm, (jsbytecode*) root->ip);
5586 ResetJIT(cx, tm, FR_GLOBAL_SHAPE_MISMATCH);
5587 return false;
5589 if (shape)
5590 *shape = globalShape;
5591 if (slots)
5592 *slots = root->globalSlots;
5593 return true;
5596 /* No recorder, search for a tracked global-state (or allocate one). */
5597 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
5598 GlobalState &state = tm->globalStates[i];
5600 if (state.globalShape == uint32(-1)) {
5601 state.globalObj = globalObj;
5602 state.globalShape = globalShape;
5603 JS_ASSERT(state.globalSlots);
5604 JS_ASSERT(state.globalSlots->length() == 0);
5607 if (state.globalObj == globalObj && state.globalShape == globalShape) {
5608 if (shape)
5609 *shape = globalShape;
5610 if (slots)
5611 *slots = state.globalSlots;
5612 return true;
5616 /* No currently-tracked-global found and no room to allocate, abort. */
5617 AUDIT(globalShapeMismatchAtEntry);
5618 debug_only_printf(LC_TMTracer,
5619 "No global slotlist for global shape %u, flushing cache.\n",
5620 globalShape);
5621 ResetJIT(cx, tm, FR_GLOBALS_FULL);
5622 return false;
5626 * Return whether or not the recorder could be started. If 'false', the JIT has
5627 * been reset in response to an OOM.
5629 bool JS_REQUIRES_STACK
5630 TraceRecorder::startRecorder(JSContext* cx, TraceMonitor *tm, VMSideExit* anchor, VMFragment* f,
5631 unsigned stackSlots, unsigned ngslots,
5632 JSValueType* typeMap, VMSideExit* expectedInnerExit,
5633 JSScript* outerScript, jsbytecode* outerPC, uint32 outerArgc,
5634 bool speculate)
5636 JS_ASSERT(!tm->needFlush);
5637 JS_ASSERT_IF(cx->fp()->hasImacropc(), f->root != f);
5639 /* We can't (easily) use js_new() here because the constructor is private. */
5640 void *memory = js_malloc(sizeof(TraceRecorder));
5641 tm->recorder = memory
5642 ? new(memory) TraceRecorder(cx, tm, anchor, f, stackSlots, ngslots, typeMap,
5643 expectedInnerExit, outerScript, outerPC, outerArgc,
5644 speculate)
5645 : NULL;
5647 if (!tm->recorder || tm->outOfMemory() || OverfullJITCache(cx, tm)) {
5648 ResetJIT(cx, tm, FR_OOM);
5649 return false;
5652 return true;
5655 static void
5656 TrashTree(TreeFragment* f)
5658 JS_ASSERT(f == f->root);
5659 debug_only_printf(LC_TMTreeVis, "TREEVIS TRASH FRAG=%p\n", (void*)f);
5661 if (!f->code())
5662 return;
5663 AUDIT(treesTrashed);
5664 debug_only_print0(LC_TMTracer, "Trashing tree info.\n");
5665 f->setCode(NULL);
5666 TreeFragment** data = f->dependentTrees.data();
5667 unsigned length = f->dependentTrees.length();
5668 for (unsigned n = 0; n < length; ++n)
5669 TrashTree(data[n]);
5670 data = f->linkedTrees.data();
5671 length = f->linkedTrees.length();
5672 for (unsigned n = 0; n < length; ++n)
5673 TrashTree(data[n]);
5676 static void
5677 SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
5679 VOUCH_DOES_NOT_REQUIRE_STACK();
5681 /* Assert that we have a correct sp distance from cx->fp()->slots in fi. */
5682 JSStackFrame* const fp = cx->fp();
5683 JS_ASSERT_IF(!fi.imacpc,
5684 js_ReconstructStackDepth(cx, fp->script(), fi.pc) ==
5685 uintN(fi.spdist - fp->numFixed()));
5687 /* Use the just-flushed prev-frame to get the callee function. */
5688 JSFunction* newfun = callee->getFunctionPrivate();
5689 JSScript* newscript = newfun->script();
5691 /* Fill in the prev-frame's sp. */
5692 JSFrameRegs *regs = cx->regs;
5693 regs->sp = fp->slots() + fi.spdist;
5694 regs->pc = fi.pc;
5695 if (fi.imacpc)
5696 fp->setImacropc(fi.imacpc);
5698 /* Set argc/flags then mimic JSOP_CALL. */
5699 uintN argc = fi.get_argc();
5700 uint32 flags = fi.is_constructing ()
5701 ? JSFRAME_CONSTRUCTING | JSFRAME_CONSTRUCTING
5702 : 0;
5704 /* Get pointer to new/frame/slots, prepare arguments. */
5705 StackSpace &stack = cx->stack();
5706 JSStackFrame *newfp = stack.getInlineFrame(cx, regs->sp, argc, newfun,
5707 newscript, &flags);
5709 /* Initialize frame; do not need to initialize locals. */
5710 newfp->initCallFrame(cx, *callee, newfun, argc, flags);
5712 #ifdef DEBUG
5713 /* The stack is conservatively marked, so we can leave non-canonical args uninitialized. */
5714 if (newfp->hasOverflowArgs()) {
5715 Value *beg = newfp->actualArgs() - 2;
5716 Value *end = newfp->actualArgs() + newfp->numFormalArgs();
5717 for (Value *p = beg; p != end; ++p)
5718 p->setMagic(JS_ARG_POISON);
5721 /* These should be initialized by FlushNativeStackFrame. */
5722 newfp->thisValue().setMagic(JS_THIS_POISON);
5723 newfp->setScopeChainNoCallObj(*JSStackFrame::sInvalidScopeChain);
5724 #endif
5726 /* Officially push the frame. */
5727 stack.pushInlineFrame(cx, newscript, newfp, cx->regs);
5729 /* Call object will be set by FlushNativeStackFrame. */
5731 /* Call the debugger hook if present. */
5732 JSInterpreterHook hook = cx->debugHooks->callHook;
5733 if (hook) {
5734 newfp->setHookData(hook(cx, newfp, JS_TRUE, 0,
5735 cx->debugHooks->callHookData));
5739 static JS_REQUIRES_STACK bool
5740 RecordTree(JSContext* cx, TraceMonitor* tm, TreeFragment* first,
5741 JSScript* outerScript, jsbytecode* outerPC,
5742 uint32 outerArgc, SlotList* globalSlots)
5744 /* Try to find an unused peer fragment, or allocate a new one. */
5745 JS_ASSERT(first->first == first);
5746 TreeFragment* f = NULL;
5747 size_t count = 0;
5748 for (TreeFragment* peer = first; peer; peer = peer->peer, ++count) {
5749 if (!peer->code())
5750 f = peer;
5752 if (!f)
5753 f = AddNewPeerToPeerList(tm, first);
5754 JS_ASSERT(f->root == f);
5756 /* Disable speculation if we are starting to accumulate a lot of trees. */
5757 bool speculate = count < MAXPEERS-1;
5759 /* save a local copy for use after JIT flush */
5760 const void* localRootIP = f->root->ip;
5762 /* Make sure the global type map didn't change on us. */
5763 if (!CheckGlobalObjectShape(cx, tm, f->globalObj)) {
5764 Backoff(tm, (jsbytecode*) localRootIP);
5765 return false;
5768 AUDIT(recorderStarted);
5770 if (tm->outOfMemory() ||
5771 OverfullJITCache(cx, tm) ||
5772 !tm->tracedScripts.put(cx->fp()->script()))
5774 if (!OverfullJITCache(cx, tm))
5775 js_ReportOutOfMemory(cx);
5776 Backoff(tm, (jsbytecode*) f->root->ip);
5777 ResetJIT(cx, tm, FR_OOM);
5778 debug_only_print0(LC_TMTracer,
5779 "Out of memory recording new tree, flushing cache.\n");
5780 return false;
5783 JS_ASSERT(!f->code());
5785 f->initialize(cx, globalSlots, speculate);
5787 #ifdef DEBUG
5788 AssertTreeIsUnique(tm, f);
5789 #endif
5790 #ifdef JS_JIT_SPEW
5791 debug_only_printf(LC_TMTreeVis, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d",
5792 (void*)f, f->ip, f->treeFileName, f->treeLineNumber,
5793 FramePCOffset(cx, cx->fp()));
5794 debug_only_print0(LC_TMTreeVis, " STACK=\"");
5795 for (unsigned i = 0; i < f->nStackTypes; i++)
5796 debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(f->typeMap[i]));
5797 debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\"");
5798 for (unsigned i = 0; i < f->nGlobalTypes(); i++)
5799 debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(f->typeMap[f->nStackTypes + i]));
5800 debug_only_print0(LC_TMTreeVis, "\"\n");
5801 #endif
5803 /* Recording primary trace. */
5804 return TraceRecorder::startRecorder(cx, tm, NULL, f, f->nStackTypes,
5805 f->globalSlots->length(),
5806 f->typeMap.data(), NULL,
5807 outerScript, outerPC, outerArgc, speculate);
5810 static JS_REQUIRES_STACK TypeConsensus
5811 FindLoopEdgeTarget(JSContext* cx, TraceMonitor* tm, VMSideExit* exit, TreeFragment** peerp)
5813 TreeFragment* from = exit->root();
5815 JS_ASSERT(from->code());
5816 Oracle* oracle = tm->oracle;
5818 TypeMap typeMap(NULL, oracle);
5819 FullMapFromExit(typeMap, exit);
5820 JS_ASSERT(typeMap.length() - exit->numStackSlots == from->nGlobalTypes());
5822 /* Mark all double slots as undemotable */
5823 uint16* gslots = from->globalSlots->data();
5824 for (unsigned i = 0; i < typeMap.length(); i++) {
5825 if (typeMap[i] == JSVAL_TYPE_DOUBLE) {
5826 if (i < from->nStackTypes)
5827 oracle->markStackSlotUndemotable(cx, i, from->ip);
5828 else if (i >= exit->numStackSlots)
5829 oracle->markGlobalSlotUndemotable(cx, gslots[i - exit->numStackSlots]);
5833 JS_ASSERT(exit->exitType == UNSTABLE_LOOP_EXIT);
5835 TreeFragment* firstPeer = from->first;
5837 for (TreeFragment* peer = firstPeer; peer; peer = peer->peer) {
5838 if (!peer->code())
5839 continue;
5840 JS_ASSERT(peer->argc == from->argc);
5841 JS_ASSERT(exit->numStackSlots == peer->nStackTypes);
5842 TypeConsensus consensus = TypeMapLinkability(cx, tm, typeMap, peer);
5843 if (consensus == TypeConsensus_Okay || consensus == TypeConsensus_Undemotes) {
5844 *peerp = peer;
5845 return consensus;
5849 return TypeConsensus_Bad;
5852 static JS_REQUIRES_STACK bool
5853 AttemptToStabilizeTree(JSContext* cx, TraceMonitor* tm, JSObject* globalObj, VMSideExit* exit,
5854 JSScript* outerScript, jsbytecode* outerPC, uint32 outerArgc)
5856 if (tm->needFlush) {
5857 ResetJIT(cx, tm, FR_DEEP_BAIL);
5858 return false;
5861 TreeFragment* from = exit->root();
5863 TreeFragment* peer = NULL;
5864 TypeConsensus consensus = FindLoopEdgeTarget(cx, tm, exit, &peer);
5865 if (consensus == TypeConsensus_Okay) {
5866 JS_ASSERT(from->globalSlots == peer->globalSlots);
5867 JS_ASSERT_IF(exit->exitType == UNSTABLE_LOOP_EXIT,
5868 from->nStackTypes == peer->nStackTypes);
5869 JS_ASSERT(exit->numStackSlots == peer->nStackTypes);
5870 /* Patch this exit to its peer */
5871 JoinPeers(tm->assembler, exit, peer);
5873 * Update peer global types. The |from| fragment should already be updated because it on
5874 * the execution path, and somehow connected to the entry trace.
5876 if (peer->nGlobalTypes() < peer->globalSlots->length())
5877 SpecializeTreesToMissingGlobals(cx, globalObj, peer);
5878 JS_ASSERT(from->nGlobalTypes() == from->globalSlots->length());
5879 /* This exit is no longer unstable, so remove it. */
5880 if (exit->exitType == UNSTABLE_LOOP_EXIT)
5881 from->removeUnstableExit(exit);
5882 debug_only_stmt(DumpPeerStability(tm, peer->ip, globalObj, from->globalShape, from->argc);)
5883 return false;
5884 } else if (consensus == TypeConsensus_Undemotes) {
5885 /* The original tree is unconnectable, so trash it. */
5886 TrashTree(peer);
5887 return false;
5890 SlotList *globalSlots = from->globalSlots;
5892 JS_ASSERT(from == from->root);
5894 /* If this tree has been blacklisted, don't try to record a new one. */
5895 if (*(jsbytecode*)from->ip == JSOP_NOTRACE)
5896 return false;
5898 return RecordTree(cx, tm, from->first, outerScript, outerPC, outerArgc, globalSlots);
5901 static JS_REQUIRES_STACK VMFragment*
5902 CreateBranchFragment(JSContext* cx, TraceMonitor* tm, TreeFragment* root, VMSideExit* anchor)
5904 verbose_only(
5905 uint32_t profFragID = (LogController.lcbits & LC_FragProfile)
5906 ? (++(tm->lastFragID)) : 0;
5909 VMFragment* f = new (*tm->dataAlloc) VMFragment(cx->regs->pc verbose_only(, profFragID));
5911 debug_only_printf(LC_TMTreeVis, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\""
5912 " LINE=%d ANCHOR=%p OFFS=%d\n",
5913 (void*)root, (void*)f, (void*)cx->regs->pc, cx->fp()->script()->filename,
5914 js_FramePCToLineNumber(cx, cx->fp()), (void*)anchor,
5915 FramePCOffset(cx, cx->fp()));
5916 verbose_only( tm->branches = new (*tm->dataAlloc) Seq<Fragment*>(f, tm->branches); )
5918 f->root = root;
5919 if (anchor)
5920 anchor->target = f;
5921 return f;
5924 static JS_REQUIRES_STACK bool
5925 AttemptToExtendTree(JSContext* cx, TraceMonitor* tm, VMSideExit* anchor, VMSideExit* exitedFrom,
5926 JSScript *outerScript, jsbytecode* outerPC
5927 #ifdef MOZ_TRACEVIS
5928 , TraceVisStateObj* tvso = NULL
5929 #endif
5932 JS_ASSERT(!tm->recorder);
5934 if (tm->needFlush) {
5935 ResetJIT(cx, tm, FR_DEEP_BAIL);
5936 #ifdef MOZ_TRACEVIS
5937 if (tvso) tvso->r = R_FAIL_EXTEND_FLUSH;
5938 #endif
5939 return false;
5942 TreeFragment* f = anchor->root();
5943 JS_ASSERT(f->code());
5946 * Don't grow trees above a certain size to avoid code explosion due to
5947 * tail duplication.
5949 if (f->branchCount >= MAX_BRANCHES) {
5950 #ifdef JS_METHODJIT
5951 if (cx->methodJitEnabled && cx->profilingEnabled)
5952 Blacklist((jsbytecode *)f->ip);
5953 #endif
5954 #ifdef MOZ_TRACEVIS
5955 if (tvso) tvso->r = R_FAIL_EXTEND_MAX_BRANCHES;
5956 #endif
5957 return false;
5960 VMFragment* c = (VMFragment*)anchor->target;
5961 if (!c) {
5962 c = CreateBranchFragment(cx, tm, f, anchor);
5963 } else {
5965 * If we are recycling a fragment, it might have a different ip so reset it
5966 * here. This can happen when attaching a branch to a NESTED_EXIT, which
5967 * might extend along separate paths (i.e. after the loop edge, and after a
5968 * return statement).
5970 c->ip = cx->regs->pc;
5971 JS_ASSERT(c->root == f);
5974 debug_only_printf(LC_TMTracer,
5975 "trying to attach another branch to the tree (hits = %d)\n", c->hits());
5977 int32_t& hits = c->hits();
5978 int32_t maxHits = HOTEXIT + MAXEXIT;
5979 if (anchor->exitType == CASE_EXIT)
5980 maxHits *= anchor->switchInfo->count;
5981 if (outerPC || (hits++ >= HOTEXIT && hits <= maxHits)) {
5982 /* start tracing secondary trace from this point */
5983 unsigned stackSlots;
5984 unsigned ngslots;
5985 JSValueType* typeMap;
5986 TypeMap fullMap(NULL, tm->oracle);
5987 if (!exitedFrom) {
5989 * If we are coming straight from a simple side exit, just use that
5990 * exit's type map as starting point.
5992 ngslots = anchor->numGlobalSlots;
5993 stackSlots = anchor->numStackSlots;
5994 typeMap = anchor->fullTypeMap();
5995 } else {
5997 * If we side-exited on a loop exit and continue on a nesting
5998 * guard, the nesting guard (anchor) has the type information for
5999 * everything below the current scope, and the actual guard we
6000 * exited from has the types for everything in the current scope
6001 * (and whatever it inlined). We have to merge those maps here.
6003 VMSideExit* e1 = anchor;
6004 VMSideExit* e2 = exitedFrom;
6005 fullMap.add(e1->stackTypeMap(), e1->numStackSlotsBelowCurrentFrame);
6006 fullMap.add(e2->stackTypeMap(), e2->numStackSlots);
6007 stackSlots = fullMap.length();
6008 ngslots = BuildGlobalTypeMapFromInnerTree(fullMap, e2);
6009 JS_ASSERT(ngslots >= e1->numGlobalSlots); // inner tree must have all globals
6010 JS_ASSERT(ngslots == fullMap.length() - stackSlots);
6011 typeMap = fullMap.data();
6013 JS_ASSERT(ngslots >= anchor->numGlobalSlots);
6014 bool rv = TraceRecorder::startRecorder(cx, tm, anchor, c, stackSlots, ngslots, typeMap,
6015 exitedFrom, outerScript, outerPC, f->argc,
6016 hits < maxHits);
6017 #ifdef MOZ_TRACEVIS
6018 if (!rv && tvso)
6019 tvso->r = R_FAIL_EXTEND_START;
6020 #endif
6021 return rv;
6023 #ifdef MOZ_TRACEVIS
6024 if (tvso) tvso->r = R_FAIL_EXTEND_COLD;
6025 #endif
6026 return false;
6029 static JS_REQUIRES_STACK bool
6030 ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallCount,
6031 VMSideExit** innermostNestedGuardp, VMSideExit** lrp);
6033 static inline MonitorResult
6034 RecordingIfTrue(bool b)
6036 return b ? MONITOR_RECORDING : MONITOR_NOT_RECORDING;
6040 * A postcondition of recordLoopEdge is that if recordLoopEdge does not return
6041 * MONITOR_RECORDING, the recording has been aborted.
6043 JS_REQUIRES_STACK MonitorResult
6044 TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
6046 TraceMonitor* tm = r->traceMonitor;
6048 /* Process needFlush and deep abort requests. */
6049 if (tm->needFlush) {
6050 ResetJIT(cx, tm, FR_DEEP_BAIL);
6051 return MONITOR_NOT_RECORDING;
6054 JS_ASSERT(r->fragment && !r->fragment->lastIns);
6055 TreeFragment* root = r->fragment->root;
6056 TreeFragment* first = LookupOrAddLoop(tm, cx->regs->pc, root->globalObj,
6057 root->globalShape, entryFrameArgc(cx));
6060 * Make sure the shape of the global object still matches (this might flush
6061 * the JIT cache).
6063 JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
6064 uint32 globalShape = -1;
6065 SlotList* globalSlots = NULL;
6066 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
6067 JS_ASSERT(!tm->recorder);
6068 return MONITOR_NOT_RECORDING;
6071 debug_only_printf(LC_TMTracer,
6072 "Looking for type-compatible peer (%s:%d@%d)\n",
6073 cx->fp()->script()->filename,
6074 js_FramePCToLineNumber(cx, cx->fp()),
6075 FramePCOffset(cx, cx->fp()));
6077 // Find a matching inner tree. If none can be found, compile one.
6078 TreeFragment* f = r->findNestedCompatiblePeer(first);
6079 if (!f || !f->code()) {
6080 AUDIT(noCompatInnerTrees);
6082 TreeFragment* outerFragment = root;
6083 JSScript* outerScript = outerFragment->script;
6084 jsbytecode* outerPC = (jsbytecode*) outerFragment->ip;
6085 uint32 outerArgc = outerFragment->argc;
6086 JS_ASSERT(entryFrameArgc(cx) == first->argc);
6088 if (AbortRecording(cx, "No compatible inner tree") == JIT_RESET)
6089 return MONITOR_NOT_RECORDING;
6091 return RecordingIfTrue(RecordTree(cx, tm, first,
6092 outerScript, outerPC, outerArgc, globalSlots));
6095 AbortableRecordingStatus status = r->attemptTreeCall(f, inlineCallCount);
6096 if (status == ARECORD_CONTINUE)
6097 return MONITOR_RECORDING;
6098 if (status == ARECORD_ERROR) {
6099 if (tm->recorder)
6100 AbortRecording(cx, "Error returned while recording loop edge");
6101 return MONITOR_ERROR;
6103 JS_ASSERT(status == ARECORD_ABORTED && !tm->recorder);
6104 return MONITOR_NOT_RECORDING;
6107 JS_REQUIRES_STACK AbortableRecordingStatus
6108 TraceRecorder::attemptTreeCall(TreeFragment* f, uintN& inlineCallCount)
6110 adjustCallerTypes(f);
6111 prepareTreeCall(f);
6113 #ifdef DEBUG
6114 uintN oldInlineCallCount = inlineCallCount;
6115 #endif
6117 JSContext *localCx = cx;
6118 TraceMonitor *localtm = traceMonitor;
6120 // Refresh the import type map so the tracker can reimport values after the
6121 // call with their correct types. The inner tree must not change the type of
6122 // any variable in a frame above the current one (i.e., upvars).
6124 // Note that DetermineTypesVisitor may call determineSlotType, which may
6125 // read from the (current, stale) import type map, but this is safe here.
6126 // The reason is that determineSlotType will read the import type map only
6127 // if there is not a tracker instruction for that value, which means that
6128 // value has not been written yet, so that type map entry is up to date.
6129 importTypeMap.setLength(NativeStackSlots(cx, callDepth));
6130 DetermineTypesVisitor visitor(*this, importTypeMap.data());
6131 VisitStackSlots(visitor, cx, callDepth);
6133 VMSideExit* innermostNestedGuard = NULL;
6134 VMSideExit* lr;
6135 bool ok = ExecuteTree(cx, traceMonitor, f, inlineCallCount, &innermostNestedGuard, &lr);
6138 * If ExecuteTree reentered the interpreter, it may have killed |this|
6139 * and/or caused an error, which must be propagated.
6141 JS_ASSERT_IF(localtm->recorder, localtm->recorder == this);
6142 if (!ok)
6143 return ARECORD_ERROR;
6144 if (!localtm->recorder)
6145 return ARECORD_ABORTED;
6147 if (!lr) {
6148 AbortRecording(cx, "Couldn't call inner tree");
6149 return ARECORD_ABORTED;
6152 TreeFragment* outerFragment = tree;
6153 JSScript* outerScript = outerFragment->script;
6154 jsbytecode* outerPC = (jsbytecode*) outerFragment->ip;
6155 switch (lr->exitType) {
6156 case LOOP_EXIT:
6157 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
6158 if (innermostNestedGuard) {
6159 if (AbortRecording(cx, "Inner tree took different side exit, abort current "
6160 "recording and grow nesting tree") == JIT_RESET) {
6161 return ARECORD_ABORTED;
6163 return AttemptToExtendTree(localCx, localtm,
6164 innermostNestedGuard, lr, outerScript, outerPC)
6165 ? ARECORD_CONTINUE
6166 : ARECORD_ABORTED;
6169 JS_ASSERT(oldInlineCallCount == inlineCallCount);
6171 /* Emit a call to the inner tree and continue recording the outer tree trace. */
6172 emitTreeCall(f, lr);
6173 return ARECORD_CONTINUE;
6175 case UNSTABLE_LOOP_EXIT:
6177 /* Abort recording so the inner loop can become type stable. */
6178 JSObject* _globalObj = globalObj;
6179 if (AbortRecording(cx, "Inner tree is trying to stabilize, "
6180 "abort outer recording") == JIT_RESET) {
6181 return ARECORD_ABORTED;
6183 return AttemptToStabilizeTree(localCx, localtm, _globalObj, lr, outerScript, outerPC,
6184 outerFragment->argc)
6185 ? ARECORD_CONTINUE
6186 : ARECORD_ABORTED;
6189 case MUL_ZERO_EXIT:
6190 case OVERFLOW_EXIT:
6191 if (lr->exitType == MUL_ZERO_EXIT)
6192 traceMonitor->oracle->markInstructionSlowZeroTest(cx->regs->pc);
6193 else
6194 traceMonitor->oracle->markInstructionUndemotable(cx->regs->pc);
6195 /* FALL THROUGH */
6196 case BRANCH_EXIT:
6197 case CASE_EXIT:
6198 /* Abort recording the outer tree, extend the inner tree. */
6199 if (AbortRecording(cx, "Inner tree is trying to grow, "
6200 "abort outer recording") == JIT_RESET) {
6201 return ARECORD_ABORTED;
6203 return AttemptToExtendTree(localCx, localtm, lr, NULL, outerScript, outerPC)
6204 ? ARECORD_CONTINUE
6205 : ARECORD_ABORTED;
6207 case NESTED_EXIT:
6208 JS_NOT_REACHED("NESTED_EXIT should be replaced by innermost side exit");
6209 default:
6210 debug_only_printf(LC_TMTracer, "exit_type=%s\n", getExitName(lr->exitType));
6211 AbortRecording(cx, "Inner tree not suitable for calling");
6212 return ARECORD_ABORTED;
6216 static inline bool
6217 IsEntryTypeCompatible(const Value &v, JSValueType type)
6219 bool ok;
6221 JS_ASSERT(type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET);
6222 JS_ASSERT(type != JSVAL_TYPE_OBJECT); /* JSVAL_TYPE_OBJECT does not belong in a type map */
6224 if (v.isInt32()) {
6225 ok = (type == JSVAL_TYPE_INT32 || type == JSVAL_TYPE_DOUBLE);
6227 } else if (v.isDouble()) {
6228 int32_t _;
6229 ok = (type == JSVAL_TYPE_DOUBLE) ||
6230 (type == JSVAL_TYPE_INT32 && JSDOUBLE_IS_INT32(v.toDouble(), &_));
6232 } else if (v.isObject()) {
6233 ok = v.toObject().isFunction()
6234 ? type == JSVAL_TYPE_FUNOBJ
6235 : type == JSVAL_TYPE_NONFUNOBJ;
6237 } else {
6238 ok = v.extractNonDoubleObjectTraceType() == type;
6240 #ifdef DEBUG
6241 char ttag = TypeToChar(type);
6242 char vtag = ValueToTypeChar(v);
6243 debug_only_printf(LC_TMTracer, "%c/%c ", vtag, ttag);
6244 if (!ok)
6245 debug_only_printf(LC_TMTracer, "%s", "(incompatible types)");
6246 #endif
6247 return ok;
6250 static inline bool
6251 IsFrameObjPtrTypeCompatible(void *p, JSStackFrame *fp, JSValueType type)
6253 debug_only_printf(LC_TMTracer, "%c/%c ", TypeToChar(type),
6254 (p == fp->addressOfScopeChain() || fp->hasArgsObj())
6255 ? TypeToChar(JSVAL_TYPE_NONFUNOBJ)
6256 : TypeToChar(JSVAL_TYPE_NULL));
6257 if (p == fp->addressOfScopeChain())
6258 return type == JSVAL_TYPE_NONFUNOBJ;
6259 JS_ASSERT(p == fp->addressOfArgs());
6260 JS_ASSERT(type == JSVAL_TYPE_NONFUNOBJ || type == JSVAL_TYPE_NULL);
6261 return fp->hasArgsObj() == (type == JSVAL_TYPE_NONFUNOBJ);
6264 class TypeCompatibilityVisitor : public SlotVisitorBase
6266 TraceRecorder &mRecorder;
6267 JSContext *mCx;
6268 Oracle *mOracle;
6269 JSValueType *mTypeMap;
6270 unsigned mStackSlotNum;
6271 bool mOk;
6272 public:
6273 TypeCompatibilityVisitor (TraceRecorder &recorder,
6274 JSValueType *typeMap) :
6275 mRecorder(recorder),
6276 mCx(mRecorder.cx),
6277 mOracle(recorder.traceMonitor->oracle),
6278 mTypeMap(typeMap),
6279 mStackSlotNum(0),
6280 mOk(true)
6283 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
6284 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
6285 debug_only_printf(LC_TMTracer, "global%d=", n);
6286 if (!IsEntryTypeCompatible(*vp, *mTypeMap)) {
6287 mOk = false;
6288 } else if (!IsPromotedInt32(mRecorder.get(vp)) && *mTypeMap == JSVAL_TYPE_INT32) {
6289 mOracle->markGlobalSlotUndemotable(mCx, slot);
6290 mOk = false;
6291 } else if (vp->isInt32() && *mTypeMap == JSVAL_TYPE_DOUBLE) {
6292 mOracle->markGlobalSlotUndemotable(mCx, slot);
6294 mTypeMap++;
6298 * For the below two methods, one may be inclined to 'return false' early
6299 * when mOk is set to 'false'. Don't do that. It is very important to run
6300 * through the whole list to let all mis-matching slots get marked
6301 * undemotable in the oracle.
6304 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6305 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
6306 for (size_t i = 0; i < count; ++i) {
6307 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i));
6308 if (!IsEntryTypeCompatible(*vp, *mTypeMap)) {
6309 mOk = false;
6310 } else if (!IsPromotedInt32(mRecorder.get(vp)) && *mTypeMap == JSVAL_TYPE_INT32) {
6311 mOracle->markStackSlotUndemotable(mCx, mStackSlotNum);
6312 mOk = false;
6313 } else if (vp->isInt32() && *mTypeMap == JSVAL_TYPE_DOUBLE) {
6314 mOracle->markStackSlotUndemotable(mCx, mStackSlotNum);
6316 vp++;
6317 mTypeMap++;
6318 mStackSlotNum++;
6320 return true;
6323 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6324 visitFrameObjPtr(void* p, JSStackFrame* fp) {
6325 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), 0);
6326 if (!IsFrameObjPtrTypeCompatible(p, fp, *mTypeMap))
6327 mOk = false;
6328 mTypeMap++;
6329 mStackSlotNum++;
6330 return true;
6333 bool isOk() {
6334 return mOk;
6338 JS_REQUIRES_STACK TreeFragment*
6339 TraceRecorder::findNestedCompatiblePeer(TreeFragment* f)
6341 unsigned int ngslots = tree->globalSlots->length();
6343 for (; f != NULL; f = f->peer) {
6344 if (!f->code())
6345 continue;
6347 debug_only_printf(LC_TMTracer, "checking nested types %p: ", (void*)f);
6349 if (ngslots > f->nGlobalTypes())
6350 SpecializeTreesToMissingGlobals(cx, globalObj, f);
6353 * Determine whether the typemap of the inner tree matches the outer
6354 * tree's current state. If the inner tree expects an integer, but the
6355 * outer tree doesn't guarantee an integer for that slot, we mark the
6356 * slot undemotable and mismatch here. This will force a new tree to be
6357 * compiled that accepts a double for the slot. If the inner tree
6358 * expects a double, but the outer tree has an integer, we can proceed,
6359 * but we mark the location undemotable.
6361 TypeCompatibilityVisitor visitor(*this, f->typeMap.data());
6362 VisitSlots(visitor, cx, 0, *tree->globalSlots);
6364 debug_only_printf(LC_TMTracer, " %s\n", visitor.isOk() ? "match" : "");
6365 if (visitor.isOk())
6366 return f;
6369 return NULL;
6372 class CheckEntryTypeVisitor : public SlotVisitorBase
6374 bool mOk;
6375 JSValueType *mTypeMap;
6376 public:
6377 CheckEntryTypeVisitor(JSValueType *typeMap) :
6378 mOk(true),
6379 mTypeMap(typeMap)
6382 JS_ALWAYS_INLINE void checkSlot(const Value &v, char const *name, int i) {
6383 debug_only_printf(LC_TMTracer, "%s%d=", name, i);
6384 JS_ASSERT(*(uint8_t*)mTypeMap != 0xCD);
6385 mOk = IsEntryTypeCompatible(v, *mTypeMap++);
6388 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
6389 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
6390 if (mOk)
6391 checkSlot(*vp, "global", n);
6394 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6395 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
6396 for (size_t i = 0; i < count; ++i) {
6397 if (!mOk)
6398 break;
6399 checkSlot(*vp++, stackSlotKind(), i);
6401 return mOk;
6404 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6405 visitFrameObjPtr(void* p, JSStackFrame *fp) {
6406 debug_only_printf(LC_TMTracer, "%s%d=", stackSlotKind(), 0);
6407 JS_ASSERT(*(uint8_t*)mTypeMap != 0xCD);
6408 return mOk = IsFrameObjPtrTypeCompatible(p, fp, *mTypeMap++);
6411 bool isOk() {
6412 return mOk;
6417 * Check if types are usable for trace execution.
6419 * @param cx Context.
6420 * @param f Tree of peer we're testing.
6421 * @return True if compatible (with or without demotions), false otherwise.
6423 static JS_REQUIRES_STACK bool
6424 CheckEntryTypes(JSContext* cx, JSObject* globalObj, TreeFragment* f)
6426 unsigned int ngslots = f->globalSlots->length();
6428 JS_ASSERT(f->nStackTypes == NativeStackSlots(cx, 0));
6430 if (ngslots > f->nGlobalTypes())
6431 SpecializeTreesToMissingGlobals(cx, globalObj, f);
6433 JS_ASSERT(f->typeMap.length() == NativeStackSlots(cx, 0) + ngslots);
6434 JS_ASSERT(f->typeMap.length() == f->nStackTypes + ngslots);
6435 JS_ASSERT(f->nGlobalTypes() == ngslots);
6437 CheckEntryTypeVisitor visitor(f->typeMap.data());
6438 VisitSlots(visitor, cx, 0, *f->globalSlots);
6440 debug_only_print0(LC_TMTracer, "\n");
6441 return visitor.isOk();
6445 * Find an acceptable entry tree given a PC.
6447 * @param cx Context.
6448 * @param globalObj Global object.
6449 * @param f First peer fragment.
6450 * @param nodemote If true, will try to find a peer that does not require demotion.
6451 * @out count Number of fragments consulted.
6453 static JS_REQUIRES_STACK TreeFragment*
6454 FindVMCompatiblePeer(JSContext* cx, JSObject* globalObj, TreeFragment* f, uintN& count)
6456 count = 0;
6457 for (; f != NULL; f = f->peer) {
6458 if (!f->code())
6459 continue;
6460 debug_only_printf(LC_TMTracer,
6461 "checking vm types %p (ip: %p): ", (void*)f, f->ip);
6462 if (CheckEntryTypes(cx, globalObj, f))
6463 return f;
6464 ++count;
6466 return NULL;
6470 * For the native stacks and global frame, reuse the storage in |tm->storage|.
6471 * This reuse depends on the invariant that only one trace uses |tm->storage|
6472 * at a time. This is subtly correct in case of deep bail; see the comment
6473 * about "clobbering deep bails" in DeepBail.
6475 JS_ALWAYS_INLINE
6476 TracerState::TracerState(JSContext* cx, TraceMonitor* tm, TreeFragment* f,
6477 uintN& inlineCallCount, VMSideExit** innermostNestedGuardp)
6478 : cx(cx),
6479 traceMonitor(tm),
6480 stackBase(tm->storage->stack()),
6481 sp(stackBase + f->nativeStackBase / sizeof(double)),
6482 eos(tm->storage->global()),
6483 callstackBase(tm->storage->callstack()),
6484 sor(callstackBase),
6485 rp(callstackBase),
6486 eor(callstackBase + JS_MIN(MAX_CALL_STACK_ENTRIES,
6487 JS_MAX_INLINE_CALL_COUNT - inlineCallCount)),
6488 lastTreeExitGuard(NULL),
6489 lastTreeCallGuard(NULL),
6490 rpAtLastTreeCall(NULL),
6491 outermostTree(f),
6492 inlineCallCountp(&inlineCallCount),
6493 innermostNestedGuardp(innermostNestedGuardp),
6494 #ifdef EXECUTE_TREE_TIMER
6495 startTime(rdtsc()),
6496 #endif
6497 builtinStatus(0),
6498 nativeVp(NULL)
6500 JS_ASSERT(!tm->tracecx);
6501 tm->tracecx = cx;
6502 prev = tm->tracerState;
6503 tm->tracerState = this;
6505 #ifdef JS_METHODJIT
6506 if (TRACE_PROFILER(cx))
6507 AbortProfiling(cx);
6508 #endif
6510 JS_ASSERT(JS_THREAD_DATA(cx)->onTraceCompartment == NULL);
6511 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL ||
6512 JS_THREAD_DATA(cx)->recordingCompartment == cx->compartment);
6513 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
6514 JS_THREAD_DATA(cx)->onTraceCompartment = cx->compartment;
6516 JS_ASSERT(eos == stackBase + MAX_NATIVE_STACK_SLOTS);
6517 JS_ASSERT(sp < eos);
6520 * inlineCallCount has already been incremented, if being invoked from
6521 * EnterFrame. It is okay to have a 0-frame restriction since the JIT
6522 * might not need any frames.
6524 JS_ASSERT(inlineCallCount <= JS_MAX_INLINE_CALL_COUNT);
6526 #ifdef DEBUG
6528 * Cannot 0xCD-fill global frame since it may overwrite a bailed outer
6529 * ExecuteTree's 0xdeadbeefdeadbeef marker.
6531 memset(tm->storage->stack(), 0xCD, MAX_NATIVE_STACK_SLOTS * sizeof(double));
6532 memset(tm->storage->callstack(), 0xCD, MAX_CALL_STACK_ENTRIES * sizeof(FrameInfo*));
6533 #endif
6536 JS_ALWAYS_INLINE
6537 TracerState::~TracerState()
6539 JS_ASSERT(!nativeVp);
6541 if (traceMonitor->tracecx) {
6542 /* If we didn't already deep-bail... */
6543 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL ||
6544 JS_THREAD_DATA(cx)->recordingCompartment == cx->compartment);
6545 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
6546 JS_ASSERT(JS_THREAD_DATA(cx)->onTraceCompartment == cx->compartment);
6547 JS_THREAD_DATA(cx)->onTraceCompartment = NULL;
6550 traceMonitor->tracerState = prev;
6551 traceMonitor->tracecx = NULL;
6554 /* Call |f|, return the exit taken. */
6555 static JS_ALWAYS_INLINE VMSideExit*
6556 ExecuteTrace(JSContext* cx, TraceMonitor* tm, Fragment* f, TracerState& state)
6558 JS_ASSERT(!tm->bailExit);
6559 #ifdef JS_METHODJIT
6560 JS_ASSERT(!TRACE_PROFILER(cx));
6561 #endif
6562 union { NIns *code; GuardRecord* (FASTCALL *func)(TracerState*); } u;
6563 u.code = f->code();
6564 GuardRecord* rec;
6565 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
6566 SIMULATE_FASTCALL(rec, state, NULL, u.func);
6567 #else
6568 rec = u.func(&state);
6569 #endif
6570 JS_ASSERT(!tm->bailExit);
6571 return (VMSideExit*)rec->exit;
6574 /* Check whether our assumptions about the incoming scope-chain are upheld. */
6575 static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6576 ScopeChainCheck(JSContext* cx, TreeFragment* f)
6578 JS_ASSERT(f->globalObj == cx->fp()->scopeChain().getGlobal());
6581 * The JIT records and expects to execute with two scope-chain
6582 * assumptions baked-in:
6584 * 1. That the bottom of the scope chain is global, in the sense of
6585 * JSCLASS_IS_GLOBAL.
6587 * 2. That the scope chain between fp and the global is free of
6588 * "unusual" native objects such as HTML forms or other funny
6589 * things.
6591 * #2 is checked here while following the scope-chain links, via
6592 * js_IsCacheableNonGlobalScope, which consults a whitelist of known
6593 * class types; once a global is found, it's checked for #1. Failing
6594 * either check causes an early return from execution.
6596 JSObject* child = &cx->fp()->scopeChain();
6597 while (JSObject* parent = child->getParent()) {
6598 if (!js_IsCacheableNonGlobalScope(child)) {
6599 debug_only_print0(LC_TMTracer,"Blacklist: non-cacheable object on scope chain.\n");
6600 Blacklist((jsbytecode*) f->root->ip);
6601 return false;
6603 child = parent;
6605 JS_ASSERT(child == f->globalObj);
6607 if (!f->globalObj->isGlobal()) {
6608 debug_only_print0(LC_TMTracer, "Blacklist: non-global at root of scope chain.\n");
6609 Blacklist((jsbytecode*) f->root->ip);
6610 return false;
6613 return true;
6616 enum LEAVE_TREE_STATUS {
6617 NO_DEEP_BAIL = 0,
6618 DEEP_BAILED = 1
6621 static LEAVE_TREE_STATUS
6622 LeaveTree(TraceMonitor *tm, TracerState&, VMSideExit *lr);
6624 /* Return false if the interpreter should goto error. */
6625 static JS_REQUIRES_STACK bool
6626 ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallCount,
6627 VMSideExit** innermostNestedGuardp, VMSideExit **lrp)
6629 #ifdef MOZ_TRACEVIS
6630 TraceVisStateObj tvso(cx, S_EXECUTE);
6631 #endif
6632 JS_ASSERT(f->root == f && f->code());
6634 if (!ScopeChainCheck(cx, f) || !cx->stack().ensureEnoughSpaceToEnterTrace() ||
6635 inlineCallCount + f->maxCallDepth > JS_MAX_INLINE_CALL_COUNT) {
6636 *lrp = NULL;
6637 return true;
6640 /* Make sure the global object is sane. */
6641 JS_ASSERT(f->globalObj->numSlots() <= MAX_GLOBAL_SLOTS);
6642 JS_ASSERT(f->nGlobalTypes() == f->globalSlots->length());
6643 JS_ASSERT_IF(f->globalSlots->length() != 0,
6644 f->globalObj->shape() == f->globalShape);
6646 /* Initialize trace state. */
6647 TracerState state(cx, tm, f, inlineCallCount, innermostNestedGuardp);
6648 double* stack = tm->storage->stack();
6649 double* global = tm->storage->global();
6650 JSObject* globalObj = f->globalObj;
6651 unsigned ngslots = f->globalSlots->length();
6652 uint16* gslots = f->globalSlots->data();
6654 BuildNativeFrame(cx, globalObj, 0 /* callDepth */, ngslots, gslots,
6655 f->typeMap.data(), global, stack);
6657 AUDIT(traceTriggered);
6658 debug_only_printf(LC_TMTracer, "entering trace at %s:%u@%u, execs: %u code: %p\n",
6659 cx->fp()->script()->filename,
6660 js_FramePCToLineNumber(cx, cx->fp()),
6661 FramePCOffset(cx, cx->fp()),
6662 f->execs,
6663 f->code());
6665 debug_only_stmt(uint32 globalSlots = globalObj->numSlots();)
6666 debug_only_stmt(*(uint64*)&tm->storage->global()[globalSlots] = 0xdeadbeefdeadbeefLL;)
6668 /* Execute trace. */
6669 tm->iterationCounter = 0;
6670 debug_only(int64 t0 = PRMJ_Now();)
6671 #ifdef MOZ_TRACEVIS
6672 VMSideExit* lr = (TraceVisStateObj(cx, S_NATIVE), ExecuteTrace(cx, tm, f, state));
6673 #else
6674 VMSideExit* lr = ExecuteTrace(cx, tm, f, state);
6675 #endif
6676 debug_only(int64 t1 = PRMJ_Now();)
6678 JS_ASSERT_IF(lr->exitType == LOOP_EXIT, !lr->calldepth);
6680 /* Restore interpreter state. */
6681 #ifdef DEBUG
6682 LEAVE_TREE_STATUS lts =
6683 #endif
6684 LeaveTree(tm, state, lr);
6685 #ifdef DEBUG
6686 JS_ASSERT_IF(lts == NO_DEEP_BAIL,
6687 *(uint64*)&tm->storage->global()[globalSlots] == 0xdeadbeefdeadbeefLL);
6688 #endif
6690 *lrp = state.innermost;
6691 bool ok = !(state.builtinStatus & BUILTIN_ERROR);
6692 JS_ASSERT_IF(cx->isExceptionPending(), !ok);
6694 size_t iters = tm->iterationCounter;
6696 f->execs++;
6697 f->iters += iters;
6699 #ifdef DEBUG
6700 JSStackFrame *fp = cx->fp();
6701 const char *prefix = "";
6702 if (iters == LOOP_COUNT_MAX)
6703 prefix = ">";
6704 debug_only_printf(LC_TMMinimal, " [%.3f ms] Tree at line %u executed for %s%u iterations;"
6705 " executed %u times; leave for %s at %s:%u (%s)\n",
6706 double(t1-t0) / PRMJ_USEC_PER_MSEC,
6707 f->treeLineNumber, prefix, (uintN)iters, f->execs,
6708 getExitName(lr->exitType),
6709 fp->script()->filename,
6710 js_FramePCToLineNumber(cx, fp),
6711 js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs->pc]);
6712 #endif
6714 #ifdef JS_METHODJIT
6715 if (cx->methodJitEnabled) {
6716 if (lr->exitType == LOOP_EXIT && f->iters < MIN_LOOP_ITERS
6717 && f->execs >= LOOP_CHECK_ITERS)
6719 debug_only_printf(LC_TMMinimal, " Blacklisting at line %u (executed only %d iters)\n",
6720 f->treeLineNumber, f->iters);
6721 Blacklist((jsbytecode *)f->ip);
6724 #endif
6725 return ok;
6728 class Guardian {
6729 bool *flagp;
6730 public:
6731 Guardian(bool *flagp) {
6732 this->flagp = flagp;
6733 JS_ASSERT(!*flagp);
6734 *flagp = true;
6737 ~Guardian() {
6738 JS_ASSERT(*flagp);
6739 *flagp = false;
6743 static JS_FORCES_STACK LEAVE_TREE_STATUS
6744 LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr)
6746 VOUCH_DOES_NOT_REQUIRE_STACK();
6748 JSContext* cx = state.cx;
6750 /* Temporary waive the soft GC quota to make sure LeaveTree() doesn't fail. */
6751 Guardian waiver(&JS_THREAD_DATA(cx)->waiveGCQuota);
6753 FrameInfo** callstack = state.callstackBase;
6754 double* stack = state.stackBase;
6757 * Except if we find that this is a nested bailout, the guard the call
6758 * returned is the one we have to use to adjust pc and sp.
6760 VMSideExit* innermost = lr;
6763 * While executing a tree we do not update state.sp and state.rp even if
6764 * they grow. Instead, guards tell us by how much sp and rp should be
6765 * incremented in case of a side exit. When calling a nested tree, however,
6766 * we actively adjust sp and rp. If we have such frames from outer trees on
6767 * the stack, then rp will have been adjusted. Before we can process the
6768 * stack of the frames of the tree we directly exited from, we have to
6769 * first work our way through the outer frames and generate interpreter
6770 * frames for them. Once the call stack (rp) is empty, we can process the
6771 * final frames (which again are not directly visible and only the guard we
6772 * exited on will tells us about).
6774 FrameInfo** rp = (FrameInfo**)state.rp;
6775 if (lr->exitType == NESTED_EXIT) {
6776 VMSideExit* nested = state.lastTreeCallGuard;
6777 if (!nested) {
6779 * If lastTreeCallGuard is not set in state, we only have a single
6780 * level of nesting in this exit, so lr itself is the innermost and
6781 * outermost nested guard, and hence we set nested to lr. The
6782 * calldepth of the innermost guard is not added to state.rp, so we
6783 * do it here manually. For a nesting depth greater than 1 the
6784 * call tree code already added the innermost guard's calldepth
6785 * to state.rpAtLastTreeCall.
6787 nested = lr;
6788 rp += lr->calldepth;
6789 } else {
6791 * During unwinding state.rp gets overwritten at every step and we
6792 * restore it here to its state at the innermost nested guard. The
6793 * builtin already added the calldepth of that innermost guard to
6794 * rpAtLastTreeCall.
6796 rp = (FrameInfo**)state.rpAtLastTreeCall;
6798 innermost = state.lastTreeExitGuard;
6799 if (state.innermostNestedGuardp)
6800 *state.innermostNestedGuardp = nested;
6801 JS_ASSERT(nested);
6802 JS_ASSERT(nested->exitType == NESTED_EXIT);
6803 JS_ASSERT(state.lastTreeExitGuard);
6804 JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
6807 int32_t bs = state.builtinStatus;
6808 bool bailed = innermost->exitType == STATUS_EXIT && (bs & BUILTIN_BAILED);
6809 if (bailed) {
6811 * Deep-bail case.
6813 * A _FAIL native already called LeaveTree once. At that time we
6814 * reconstructed the interpreter stack, in pre-call state, with pc
6815 * pointing to the op that triggered the call. Then we continued in
6816 * native code.
6818 if (!(bs & BUILTIN_ERROR)) {
6820 * The builtin or native deep-bailed but finished successfully
6821 * (no exception or error).
6823 * After it returned, the JIT code stored the results of the
6824 * builtin or native at the top of the native stack and then
6825 * immediately flunked the guard on state->builtinStatus.
6827 * Now LeaveTree has been called again from the tail of
6828 * ExecuteTree. We are about to return to the interpreter. Adjust
6829 * the top stack frame to resume on the next op.
6831 JSFrameRegs* regs = cx->regs;
6832 JSOp op = (JSOp) *regs->pc;
6835 * JSOP_SETELEM can be coalesced with a JSOP_POP in the interpeter.
6836 * Since this doesn't re-enter the recorder, the post-state snapshot
6837 * is invalid. Fix it up here.
6839 if (op == JSOP_SETELEM && JSOp(regs->pc[JSOP_SETELEM_LENGTH]) == JSOP_POP) {
6840 regs->sp -= js_CodeSpec[JSOP_SETELEM].nuses;
6841 regs->sp += js_CodeSpec[JSOP_SETELEM].ndefs;
6842 regs->pc += JSOP_SETELEM_LENGTH;
6843 op = JSOP_POP;
6846 const JSCodeSpec& cs = js_CodeSpec[op];
6847 regs->sp -= (cs.format & JOF_INVOKE) ? GET_ARGC(regs->pc) + 2 : cs.nuses;
6848 regs->sp += cs.ndefs;
6849 regs->pc += cs.length;
6850 JS_ASSERT_IF(!cx->fp()->hasImacropc(),
6851 cx->fp()->slots() + cx->fp()->numFixed() +
6852 js_ReconstructStackDepth(cx, cx->fp()->script(), regs->pc) ==
6853 regs->sp);
6856 * If there's a tree call around the point that we deep exited at,
6857 * then state.sp and state.rp were restored to their original
6858 * values before the tree call and sp might be less than deepBailSp,
6859 * which we sampled when we were told to deep bail.
6861 JS_ASSERT(state.deepBailSp >= state.stackBase && state.sp <= state.deepBailSp);
6864 * As explained above, the JIT code stored a result value or values
6865 * on the native stack. Transfer them to the interpreter stack now.
6866 * (Some opcodes, like JSOP_CALLELEM, produce two values, hence the
6867 * loop.)
6869 JSValueType* typeMap = innermost->stackTypeMap();
6870 for (int i = 1; i <= cs.ndefs; i++) {
6871 NativeToValue(cx,
6872 regs->sp[-i],
6873 typeMap[innermost->numStackSlots - i],
6874 (jsdouble *) state.deepBailSp
6875 + innermost->sp_adj / sizeof(jsdouble) - i);
6878 return DEEP_BAILED;
6881 while (callstack < rp) {
6882 FrameInfo* fi = *callstack;
6883 /* Peek at the callee native slot in the not-yet-synthesized prev frame. */
6884 JSObject* callee = *(JSObject**)&stack[fi->callerHeight];
6887 * Flush the slots for cx->fp() (which will become cx->fp()->prev after
6888 * SynthesizeFrame). Since a frame's arguments (including callee
6889 * and thisv) are part of the frame, we only want to flush up to the
6890 * next frame's arguments, so set cx->regs->sp to to not include said
6891 * arguments. The upcoming call to SynthesizeFrame will reset regs->sp
6892 * to its correct value.
6894 cx->regs->sp = cx->fp()->slots() + (fi->spdist - (2 + fi->get_argc()));
6895 int slots = FlushNativeStackFrame(cx, 0 /* callDepth */, fi->get_typemap(), stack);
6897 /* Finish initializing cx->fp() and push a new cx->fp(). */
6898 SynthesizeFrame(cx, *fi, callee);
6899 #ifdef DEBUG
6900 JSStackFrame* fp = cx->fp();
6901 debug_only_printf(LC_TMTracer,
6902 "synthesized deep frame for %s:%u@%u, slots=%d, fi=%p\n",
6903 fp->script()->filename,
6904 js_FramePCToLineNumber(cx, fp),
6905 FramePCOffset(cx, fp),
6906 slots,
6907 (void*)*callstack);
6908 #endif
6910 * Keep track of the additional frames we put on the interpreter stack
6911 * and the native stack slots we consumed.
6913 ++*state.inlineCallCountp;
6914 ++callstack;
6915 stack += slots;
6919 * We already synthesized the frames around the innermost guard. Here we
6920 * just deal with additional frames inside the tree we are bailing out
6921 * from.
6923 JS_ASSERT(rp == callstack);
6924 unsigned calldepth = innermost->calldepth;
6925 unsigned calleeOffset = 0;
6926 for (unsigned n = 0; n < calldepth; ++n) {
6927 /* Peek at the callee native slot in the not-yet-synthesized prev frame. */
6928 calleeOffset += callstack[n]->callerHeight;
6929 JSObject* callee = *(JSObject**)&stack[calleeOffset];
6931 /* Reconstruct the frame. */
6932 SynthesizeFrame(cx, *callstack[n], callee);
6933 ++*state.inlineCallCountp;
6934 #ifdef DEBUG
6935 JSStackFrame* fp = cx->fp();
6936 debug_only_printf(LC_TMTracer,
6937 "synthesized shallow frame for %s:%u@%u\n",
6938 fp->script()->filename, js_FramePCToLineNumber(cx, fp),
6939 FramePCOffset(cx, fp));
6940 #endif
6944 * Adjust sp and pc relative to the tree we exited from (not the tree we
6945 * entered into). These are our final values for sp and pc since
6946 * SynthesizeFrame has already taken care of all frames in between.
6948 JSStackFrame* const fp = cx->fp();
6951 * If we are not exiting from an inlined frame, the state->sp is spbase.
6952 * Otherwise spbase is whatever slots frames around us consume.
6954 cx->regs->pc = innermost->pc;
6955 if (innermost->imacpc)
6956 fp->setImacropc(innermost->imacpc);
6957 else
6958 fp->clearImacropc();
6961 * Set cx->regs->regs for the top frame. Since the top frame does not have a
6962 * FrameInfo (a FrameInfo is only pushed for calls), we basically need to
6963 * compute the offset from fp->slots() to the top of the stack based on the
6964 * number of native slots allocated for this function.
6966 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
6968 uintN slotOffset = innermost->numStackSlots - innermost->numStackSlotsBelowCurrentFrame;
6969 if (fp->isGlobalFrame()) {
6970 /* Global nfixed slots are not kept on the native stack, so add them back. */
6971 slotOffset += fp->globalScript()->nfixed;
6972 } else {
6973 /* A frame's native slots includes args and frame ptrs, so strip them off. */
6974 slotOffset -= NumSlotsBeforeFixed(fp);
6976 cx->regs->sp = fp->slots() + slotOffset;
6978 /* Assert that we computed sp correctly. */
6979 JS_ASSERT_IF(!fp->hasImacropc(),
6980 fp->slots() + fp->numFixed() +
6981 js_ReconstructStackDepth(cx, fp->script(), cx->regs->pc) == cx->regs->sp);
6983 #ifdef EXECUTE_TREE_TIMER
6984 uint64 cycles = rdtsc() - state.startTime;
6985 #elif defined(JS_JIT_SPEW)
6986 uint64 cycles = 0;
6987 #endif
6988 debug_only_printf(LC_TMTracer,
6989 "leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%s, sp=%lld, "
6990 "calldepth=%d, cycles=%llu\n",
6991 fp->script()->filename,
6992 js_FramePCToLineNumber(cx, fp),
6993 FramePCOffset(cx, fp),
6994 js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs->pc],
6995 (void*)lr,
6996 getExitName(lr->exitType),
6997 (long long int)(cx->regs->sp - fp->base()),
6998 calldepth,
6999 (unsigned long long int)cycles);
7001 #ifdef DEBUG
7002 int slots =
7003 #endif
7004 FlushNativeStackFrame(cx, innermost->calldepth, innermost->stackTypeMap(), stack);
7005 JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
7008 * If this trace is part of a tree, later branches might have added
7009 * additional globals for which we don't have any type information
7010 * available in the side exit. We merge in this information from the entry
7011 * type-map. See also the comment in the constructor of TraceRecorder
7012 * regarding why this is always safe to do.
7014 TreeFragment* outermostTree = state.outermostTree;
7015 uint16* gslots = outermostTree->globalSlots->data();
7016 unsigned ngslots = outermostTree->globalSlots->length();
7017 JS_ASSERT(ngslots == outermostTree->nGlobalTypes());
7018 JSValueType* globalTypeMap;
7020 /* Are there enough globals? */
7021 TypeMap& typeMap = *tm->cachedTempTypeMap;
7022 typeMap.clear();
7023 if (innermost->numGlobalSlots == ngslots) {
7024 /* Yes. This is the ideal fast path. */
7025 globalTypeMap = innermost->globalTypeMap();
7026 } else {
7028 * No. Merge the typemap of the innermost entry and exit together. This
7029 * should always work because it is invalid for nested trees or linked
7030 * trees to have incompatible types. Thus, whenever a new global type
7031 * is lazily added into a tree, all dependent and linked trees are
7032 * immediately specialized (see bug 476653).
7034 JS_ASSERT(innermost->root()->nGlobalTypes() == ngslots);
7035 JS_ASSERT(innermost->root()->nGlobalTypes() > innermost->numGlobalSlots);
7036 typeMap.ensure(ngslots);
7037 #ifdef DEBUG
7038 unsigned check_ngslots =
7039 #endif
7040 BuildGlobalTypeMapFromInnerTree(typeMap, innermost);
7041 JS_ASSERT(check_ngslots == ngslots);
7042 globalTypeMap = typeMap.data();
7045 /* Write back interned globals. */
7046 JS_ASSERT(state.eos == state.stackBase + MAX_NATIVE_STACK_SLOTS);
7047 JSObject* globalObj = outermostTree->globalObj;
7048 FlushNativeGlobalFrame(cx, globalObj, state.eos, ngslots, gslots, globalTypeMap);
7050 #ifdef JS_JIT_SPEW
7051 if (innermost->exitType != TIMEOUT_EXIT)
7052 AUDIT(sideExitIntoInterpreter);
7053 else
7054 AUDIT(timeoutIntoInterpreter);
7055 #endif
7057 state.innermost = innermost;
7058 return NO_DEEP_BAIL;
7061 static jsbytecode *
7062 GetLoopBottom(JSContext *cx, jsbytecode *pc)
7064 JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOTRACE);
7065 JSScript *script = cx->fp()->script();
7066 jssrcnote *sn = js_GetSrcNote(script, pc);
7067 if (!sn)
7068 return NULL;
7069 return pc + js_GetSrcNoteOffset(sn, 0);
7072 JS_ALWAYS_INLINE void
7073 TraceRecorder::assertInsideLoop()
7075 #ifdef DEBUG
7076 /* Asserts at callDepth == 0 will catch problems at the call op. */
7077 if (callDepth > 0)
7078 return;
7080 jsbytecode *pc = cx->regs->fp->hasImacropc() ? cx->regs->fp->imacropc() : cx->regs->pc;
7081 jsbytecode *beg = (jsbytecode *)tree->ip;
7082 jsbytecode *end = GetLoopBottom(cx, beg);
7085 * In some cases (continue in a while loop), we jump to the goto
7086 * immediately preceeding a loop (the one that jumps to the loop
7087 * condition).
7089 JS_ASSERT(pc >= beg - JSOP_GOTO_LENGTH && pc <= end);
7090 #endif
7093 JS_REQUIRES_STACK MonitorResult
7094 RecordLoopEdge(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount)
7096 #ifdef MOZ_TRACEVIS
7097 TraceVisStateObj tvso(cx, S_MONITOR);
7098 #endif
7100 /* Is the recorder currently active? */
7101 if (tm->recorder) {
7102 tm->recorder->assertInsideLoop();
7103 jsbytecode* pc = cx->regs->pc;
7104 if (pc == tm->recorder->tree->ip) {
7105 tm->recorder->closeLoop();
7106 } else {
7107 MonitorResult r = TraceRecorder::recordLoopEdge(cx, tm->recorder, inlineCallCount);
7108 JS_ASSERT((r == MONITOR_RECORDING) == (tm->recorder != NULL));
7109 if (r == MONITOR_RECORDING || r == MONITOR_ERROR)
7110 return r;
7113 * recordLoopEdge will invoke an inner tree if we have a matching
7114 * one. If we arrive here, that tree didn't run to completion and
7115 * instead we mis-matched or the inner tree took a side exit other than
7116 * the loop exit. We are thus no longer guaranteed to be parked on the
7117 * same loop header RecordLoopEdge was called for. In fact, this
7118 * might not even be a loop header at all. Hence if the program counter
7119 * no longer hovers over the inner loop header, return to the
7120 * interpreter and do not attempt to trigger or record a new tree at
7121 * this location.
7123 if (pc != cx->regs->pc) {
7124 #ifdef MOZ_TRACEVIS
7125 tvso.r = R_INNER_SIDE_EXIT;
7126 #endif
7127 return MONITOR_NOT_RECORDING;
7131 JS_ASSERT(!tm->recorder);
7134 * Make sure the shape of the global object still matches (this might flush
7135 * the JIT cache).
7137 JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
7138 uint32 globalShape = -1;
7139 SlotList* globalSlots = NULL;
7141 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
7142 Backoff(tm, cx->regs->pc);
7143 return MONITOR_NOT_RECORDING;
7146 /* Do not enter the JIT code with a pending operation callback. */
7147 if (JS_THREAD_DATA(cx)->interruptFlags) {
7148 #ifdef MOZ_TRACEVIS
7149 tvso.r = R_CALLBACK_PENDING;
7150 #endif
7151 return MONITOR_NOT_RECORDING;
7154 jsbytecode* pc = cx->regs->pc;
7155 uint32 argc = entryFrameArgc(cx);
7157 TreeFragment* f = LookupOrAddLoop(tm, pc, globalObj, globalShape, argc);
7160 * If we have no code in the anchor and no peers, we definitively won't be
7161 * able to activate any trees, so start compiling.
7163 if (!f->code() && !f->peer) {
7164 record:
7165 if (++f->hits() < HOTLOOP) {
7166 #ifdef MOZ_TRACEVIS
7167 tvso.r = f->hits() < 1 ? R_BACKED_OFF : R_COLD;
7168 #endif
7169 return MONITOR_NOT_RECORDING;
7172 if (!ScopeChainCheck(cx, f)) {
7173 #ifdef MOZ_TRACEVIS
7174 tvso.r = R_FAIL_SCOPE_CHAIN_CHECK;
7175 #endif
7176 return MONITOR_NOT_RECORDING;
7180 * We can give RecordTree the root peer. If that peer is already taken,
7181 * it will walk the peer list and find us a free slot or allocate a new
7182 * tree if needed.
7184 bool rv = RecordTree(cx, tm, f->first, NULL, NULL, 0, globalSlots);
7185 #ifdef MOZ_TRACEVIS
7186 if (!rv)
7187 tvso.r = R_FAIL_RECORD_TREE;
7188 #endif
7189 return RecordingIfTrue(rv);
7192 debug_only_printf(LC_TMTracer,
7193 "Looking for compat peer %d@%d, from %p (ip: %p)\n",
7194 js_FramePCToLineNumber(cx, cx->fp()),
7195 FramePCOffset(cx, cx->fp()), (void*)f, f->ip);
7197 uintN count;
7198 TreeFragment* match = FindVMCompatiblePeer(cx, globalObj, f, count);
7199 if (!match) {
7200 if (count < MAXPEERS)
7201 goto record;
7204 * If we hit the max peers ceiling, don't try to lookup fragments all
7205 * the time. That's expensive. This must be a rather type-unstable loop.
7207 debug_only_print0(LC_TMTracer, "Blacklisted: too many peer trees.\n");
7208 Blacklist((jsbytecode*) f->root->ip);
7209 #ifdef MOZ_TRACEVIS
7210 tvso.r = R_MAX_PEERS;
7211 #endif
7212 return MONITOR_NOT_RECORDING;
7215 VMSideExit* lr = NULL;
7216 VMSideExit* innermostNestedGuard = NULL;
7218 if (!ExecuteTree(cx, tm, match, inlineCallCount, &innermostNestedGuard, &lr))
7219 return MONITOR_ERROR;
7221 if (!lr) {
7222 #ifdef MOZ_TRACEVIS
7223 tvso.r = R_FAIL_EXECUTE_TREE;
7224 #endif
7225 return MONITOR_NOT_RECORDING;
7229 * If we exit on a branch, or on a tree call guard, try to grow the inner
7230 * tree (in case of a branch exit), or the tree nested around the tree we
7231 * exited from (in case of the tree call guard).
7233 bool rv;
7234 switch (lr->exitType) {
7235 case UNSTABLE_LOOP_EXIT:
7236 rv = AttemptToStabilizeTree(cx, tm, globalObj, lr, NULL, NULL, 0);
7237 #ifdef MOZ_TRACEVIS
7238 if (!rv)
7239 tvso.r = R_FAIL_STABILIZE;
7240 #endif
7241 return RecordingIfTrue(rv);
7243 case MUL_ZERO_EXIT:
7244 case OVERFLOW_EXIT:
7245 if (lr->exitType == MUL_ZERO_EXIT)
7246 tm->oracle->markInstructionSlowZeroTest(cx->regs->pc);
7247 else
7248 tm->oracle->markInstructionUndemotable(cx->regs->pc);
7249 /* FALL THROUGH */
7250 case BRANCH_EXIT:
7251 case CASE_EXIT:
7252 rv = AttemptToExtendTree(cx, tm, lr, NULL, NULL, NULL
7253 #ifdef MOZ_TRACEVIS
7254 , &tvso
7255 #endif
7257 return RecordingIfTrue(rv);
7259 case LOOP_EXIT:
7260 if (innermostNestedGuard) {
7261 rv = AttemptToExtendTree(cx, tm, innermostNestedGuard, lr, NULL, NULL
7262 #ifdef MOZ_TRACEVIS
7263 , &tvso
7264 #endif
7266 return RecordingIfTrue(rv);
7268 #ifdef MOZ_TRACEVIS
7269 tvso.r = R_NO_EXTEND_OUTER;
7270 #endif
7271 return MONITOR_NOT_RECORDING;
7273 #ifdef MOZ_TRACEVIS
7274 case MISMATCH_EXIT:
7275 tvso.r = R_MISMATCH_EXIT;
7276 return MONITOR_NOT_RECORDING;
7277 case OOM_EXIT:
7278 tvso.r = R_OOM_EXIT;
7279 return MONITOR_NOT_RECORDING;
7280 case TIMEOUT_EXIT:
7281 tvso.r = R_TIMEOUT_EXIT;
7282 return MONITOR_NOT_RECORDING;
7283 case DEEP_BAIL_EXIT:
7284 tvso.r = R_DEEP_BAIL_EXIT;
7285 return MONITOR_NOT_RECORDING;
7286 case STATUS_EXIT:
7287 tvso.r = R_STATUS_EXIT;
7288 return MONITOR_NOT_RECORDING;
7289 #endif
7291 default:
7293 * No, this was an unusual exit (i.e. out of memory/GC), so just resume
7294 * interpretation.
7296 #ifdef MOZ_TRACEVIS
7297 tvso.r = R_OTHER_EXIT;
7298 #endif
7299 return MONITOR_NOT_RECORDING;
7303 JS_REQUIRES_STACK AbortableRecordingStatus
7304 TraceRecorder::monitorRecording(JSOp op)
7306 JS_ASSERT(!addPropShapeBefore);
7308 JS_ASSERT(traceMonitor == &cx->compartment->traceMonitor);
7310 TraceMonitor &localtm = *traceMonitor;
7311 debug_only_stmt( JSContext *localcx = cx; )
7312 assertInsideLoop();
7313 JS_ASSERT(!localtm.profile);
7315 /* Process needFlush requests now. */
7316 if (localtm.needFlush) {
7317 ResetJIT(cx, &localtm, FR_DEEP_BAIL);
7318 return ARECORD_ABORTED;
7320 JS_ASSERT(!fragment->lastIns);
7323 * Clear one-shot state used to communicate between record_JSOP_CALL and post-
7324 * opcode-case-guts record hook (record_NativeCallComplete).
7326 pendingSpecializedNative = NULL;
7327 newobj_ins = NULL;
7328 pendingGlobalSlotsToSet.clear();
7330 /* Handle one-shot request from finishGetProp or INSTANCEOF to snapshot post-op state and guard. */
7331 if (pendingGuardCondition) {
7332 LIns* cond = pendingGuardCondition;
7333 bool expected = true;
7335 /* Put 'cond' in a form suitable for a guard/branch condition if it's not already. */
7336 ensureCond(&cond, &expected);
7337 guard(expected, cond, STATUS_EXIT);
7338 pendingGuardCondition = NULL;
7341 /* Handle one-shot request to unbox the result of a property get or ObjectToIterator. */
7342 if (pendingUnboxSlot) {
7343 LIns* val_ins = get(pendingUnboxSlot);
7345 * We need to know from where to unbox the value. Since pendingUnboxSlot
7346 * is only set in finishGetProp, we can depend on LIns* tracked for
7347 * pendingUnboxSlot to have this information.
7349 LIns* unboxed_ins = unbox_value(*pendingUnboxSlot,
7350 AnyAddress(val_ins->oprnd1(), val_ins->disp()),
7351 snapshot(BRANCH_EXIT));
7352 set(pendingUnboxSlot, unboxed_ins);
7353 pendingUnboxSlot = 0;
7356 debug_only_stmt(
7357 if (LogController.lcbits & LC_TMRecorder) {
7358 debug_only_print0(LC_TMRecorder, "\n");
7359 js_Disassemble1(cx, cx->fp()->script(), cx->regs->pc,
7360 cx->fp()->hasImacropc()
7361 ? 0 : cx->regs->pc - cx->fp()->script()->code,
7362 !cx->fp()->hasImacropc(), stdout);
7367 * If op is not a break or a return from a loop, continue recording and
7368 * follow the trace. We check for imacro-calling bytecodes inside each
7369 * switch case to resolve the if (JSOP_IS_IMACOP(x)) conditions at compile
7370 * time.
7373 AbortableRecordingStatus status;
7374 #ifdef DEBUG
7375 bool wasInImacro = (cx->fp()->hasImacropc());
7376 #endif
7377 switch (op) {
7378 default:
7379 AbortRecording(cx, "unsupported opcode");
7380 status = ARECORD_ERROR;
7381 break;
7382 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
7383 case op: \
7384 w.comment(#op); \
7385 status = this->record_##op(); \
7386 break;
7387 # include "jsopcode.tbl"
7388 # undef OPDEF
7391 /* N.B. |this| may have been deleted. */
7393 if (!JSOP_IS_IMACOP(op)) {
7394 JS_ASSERT(status != ARECORD_IMACRO);
7395 JS_ASSERT_IF(!wasInImacro, !localcx->fp()->hasImacropc());
7398 if (localtm.recorder) {
7399 JS_ASSERT(status != ARECORD_ABORTED);
7400 JS_ASSERT(localtm.recorder == this);
7402 /* |this| recorder completed, but a new one started; keep recording. */
7403 if (status == ARECORD_COMPLETED)
7404 return ARECORD_CONTINUE;
7406 /* Handle lazy aborts; propagate the 'error' status. */
7407 if (StatusAbortsRecorderIfActive(status)) {
7408 AbortRecording(cx, js_CodeName[op]);
7409 return status == ARECORD_ERROR ? ARECORD_ERROR : ARECORD_ABORTED;
7412 if (outOfMemory() || OverfullJITCache(cx, &localtm)) {
7413 ResetJIT(cx, &localtm, FR_OOM);
7416 * If the status returned was ARECORD_IMACRO, then we just
7417 * changed cx->regs, we need to tell the interpreter to sync
7418 * its local variables.
7420 return status == ARECORD_IMACRO ? ARECORD_IMACRO_ABORTED : ARECORD_ABORTED;
7422 } else {
7423 JS_ASSERT(status == ARECORD_COMPLETED ||
7424 status == ARECORD_ABORTED ||
7425 status == ARECORD_ERROR);
7427 return status;
7430 JS_REQUIRES_STACK TraceRecorder::AbortResult
7431 AbortRecording(JSContext* cx, const char* reason)
7433 #ifdef DEBUG
7434 JS_ASSERT(TRACE_RECORDER(cx));
7435 return TRACE_RECORDER(cx)->finishAbort(reason);
7436 #else
7437 return TRACE_RECORDER(cx)->finishAbort("[no reason]");
7438 #endif
7441 #if defined NANOJIT_IA32
7442 static bool
7443 CheckForSSE2()
7445 char *c = getenv("X86_FORCE_SSE2");
7446 if (c)
7447 return (!strcmp(c, "true") ||
7448 !strcmp(c, "1") ||
7449 !strcmp(c, "yes"));
7451 int features = 0;
7452 #if defined _MSC_VER
7453 __asm
7455 pushad
7456 mov eax, 1
7457 cpuid
7458 mov features, edx
7459 popad
7461 #elif defined __GNUC__
7462 asm("xchg %%esi, %%ebx\n" /* we can't clobber ebx on gcc (PIC register) */
7463 "mov $0x01, %%eax\n"
7464 "cpuid\n"
7465 "mov %%edx, %0\n"
7466 "xchg %%esi, %%ebx\n"
7467 : "=m" (features)
7468 : /* We have no inputs */
7469 : "%eax", "%esi", "%ecx", "%edx"
7471 #elif defined __SUNPRO_C || defined __SUNPRO_CC
7472 asm("push %%ebx\n"
7473 "mov $0x01, %%eax\n"
7474 "cpuid\n"
7475 "pop %%ebx\n"
7476 : "=d" (features)
7477 : /* We have no inputs */
7478 : "%eax", "%ecx"
7480 #endif
7481 return (features & (1<<26)) != 0;
7483 #endif
7485 #if defined(NANOJIT_ARM)
7487 #if defined(_MSC_VER) && defined(WINCE)
7489 // these come in from jswince.asm
7490 extern "C" int js_arm_try_armv5_op();
7491 extern "C" int js_arm_try_armv6_op();
7492 extern "C" int js_arm_try_armv7_op();
7493 extern "C" int js_arm_try_vfp_op();
7495 static unsigned int
7496 arm_check_arch()
7498 unsigned int arch = 4;
7499 __try {
7500 js_arm_try_armv5_op();
7501 arch = 5;
7502 js_arm_try_armv6_op();
7503 arch = 6;
7504 js_arm_try_armv7_op();
7505 arch = 7;
7506 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
7508 return arch;
7511 static bool
7512 arm_check_vfp()
7514 #ifdef WINCE_WINDOWS_MOBILE
7515 return false;
7516 #else
7517 bool ret = false;
7518 __try {
7519 js_arm_try_vfp_op();
7520 ret = true;
7521 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
7522 ret = false;
7524 return ret;
7525 #endif
7528 #define HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS 1
7530 /* See "Suppressing Exception Notifications while Debugging", at
7531 * http://msdn.microsoft.com/en-us/library/ms924252.aspx
7533 static void
7534 disable_debugger_exceptions()
7536 // 2 == TLSSLOT_KERNEL
7537 DWORD kctrl = (DWORD) TlsGetValue(2);
7538 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7539 kctrl |= 0x12;
7540 TlsSetValue(2, (LPVOID) kctrl);
7543 static void
7544 enable_debugger_exceptions()
7546 // 2 == TLSSLOT_KERNEL
7547 DWORD kctrl = (DWORD) TlsGetValue(2);
7548 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7549 kctrl &= ~0x12;
7550 TlsSetValue(2, (LPVOID) kctrl);
7553 #elif defined(__GNUC__) && defined(AVMPLUS_LINUX)
7555 // Assume ARMv4 by default.
7556 static unsigned int arm_arch = 4;
7557 static bool arm_has_vfp = false;
7558 static bool arm_has_neon = false;
7559 static bool arm_has_iwmmxt = false;
7560 static bool arm_tests_initialized = false;
7562 #ifdef ANDROID
7563 // we're actually reading /proc/cpuinfo, but oh well
7564 static void
7565 arm_read_auxv()
7567 char buf[1024];
7568 char* pos;
7569 const char* ver_token = "CPU architecture: ";
7570 FILE* f = fopen("/proc/cpuinfo", "r");
7571 fread(buf, sizeof(char), 1024, f);
7572 fclose(f);
7573 pos = strstr(buf, ver_token);
7574 if (pos) {
7575 int ver = *(pos + strlen(ver_token)) - '0';
7576 arm_arch = ver;
7578 arm_has_neon = strstr(buf, "neon") != NULL;
7579 arm_has_vfp = strstr(buf, "vfp") != NULL;
7580 arm_has_iwmmxt = strstr(buf, "iwmmxt") != NULL;
7581 arm_tests_initialized = true;
7584 #else
7586 static void
7587 arm_read_auxv()
7589 int fd;
7590 Elf32_auxv_t aux;
7592 fd = open("/proc/self/auxv", O_RDONLY);
7593 if (fd > 0) {
7594 while (read(fd, &aux, sizeof(Elf32_auxv_t))) {
7595 if (aux.a_type == AT_HWCAP) {
7596 uint32_t hwcap = aux.a_un.a_val;
7597 if (getenv("ARM_FORCE_HWCAP"))
7598 hwcap = strtoul(getenv("ARM_FORCE_HWCAP"), NULL, 0);
7599 else if (getenv("_SBOX_DIR"))
7600 continue; // Ignore the rest, if we're running in scratchbox
7601 // hardcode these values to avoid depending on specific versions
7602 // of the hwcap header, e.g. HWCAP_NEON
7603 arm_has_vfp = (hwcap & 64) != 0;
7604 arm_has_iwmmxt = (hwcap & 512) != 0;
7605 // this flag is only present on kernel 2.6.29
7606 arm_has_neon = (hwcap & 4096) != 0;
7607 } else if (aux.a_type == AT_PLATFORM) {
7608 const char *plat = (const char*) aux.a_un.a_val;
7609 if (getenv("ARM_FORCE_PLATFORM"))
7610 plat = getenv("ARM_FORCE_PLATFORM");
7611 else if (getenv("_SBOX_DIR"))
7612 continue; // Ignore the rest, if we're running in scratchbox
7613 // The platform string has the form "v[0-9][lb]". The "l" or "b" indicate little-
7614 // or big-endian variants and the digit indicates the version of the platform.
7615 // We can only accept ARMv4 and above, but allow anything up to ARMv9 for future
7616 // processors. Architectures newer than ARMv7 are assumed to be
7617 // backwards-compatible with ARMv7.
7618 if ((plat[0] == 'v') &&
7619 (plat[1] >= '4') && (plat[1] <= '9') &&
7620 ((plat[2] == 'l') || (plat[2] == 'b')))
7622 arm_arch = plat[1] - '0';
7626 close (fd);
7628 // if we don't have 2.6.29, we have to do this hack; set
7629 // the env var to trust HWCAP.
7630 if (!getenv("ARM_TRUST_HWCAP") && (arm_arch >= 7))
7631 arm_has_neon = true;
7634 arm_tests_initialized = true;
7637 #endif
7639 static unsigned int
7640 arm_check_arch()
7642 if (!arm_tests_initialized)
7643 arm_read_auxv();
7645 return arm_arch;
7648 static bool
7649 arm_check_vfp()
7651 if (!arm_tests_initialized)
7652 arm_read_auxv();
7654 return arm_has_vfp;
7657 #else
7658 #warning Not sure how to check for architecture variant on your platform. Assuming ARMv4.
7659 static unsigned int
7660 arm_check_arch() { return 4; }
7661 static bool
7662 arm_check_vfp() { return false; }
7663 #endif
7665 #ifndef HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS
7666 static void
7667 enable_debugger_exceptions() { }
7668 static void
7669 disable_debugger_exceptions() { }
7670 #endif
7672 #endif /* NANOJIT_ARM */
7674 #define K *1024
7675 #define M K K
7676 #define G K M
7678 void
7679 SetMaxCodeCacheBytes(JSContext* cx, uint32 bytes)
7681 if (bytes > 1 G)
7682 bytes = 1 G;
7683 if (bytes < 128 K)
7684 bytes = 128 K;
7685 JS_THREAD_DATA(cx)->maxCodeCacheBytes = bytes;
7688 bool
7689 InitJIT(TraceMonitor *tm)
7691 #if defined JS_JIT_SPEW
7692 tm->profAlloc = NULL;
7693 /* Set up debug logging. */
7694 if (!did_we_set_up_debug_logging) {
7695 InitJITLogController();
7696 did_we_set_up_debug_logging = true;
7698 /* Set up fragprofiling, if required. */
7699 if (LogController.lcbits & LC_FragProfile) {
7700 tm->profAlloc = js_new<VMAllocator>((char*)NULL, 0); /* no reserve needed in debug builds */
7701 if (!tm->profAlloc)
7702 goto error;
7703 tm->profTab = new (*tm->profAlloc) FragStatsMap(*tm->profAlloc);
7705 tm->lastFragID = 0;
7706 #else
7707 PodZero(&LogController);
7708 #endif
7710 if (!did_we_check_processor_features) {
7711 #if defined NANOJIT_IA32
7712 avmplus::AvmCore::config.i386_use_cmov =
7713 avmplus::AvmCore::config.i386_sse2 = CheckForSSE2();
7714 avmplus::AvmCore::config.i386_fixed_esp = true;
7715 #endif
7716 #if defined NANOJIT_ARM
7718 disable_debugger_exceptions();
7720 bool arm_vfp = arm_check_vfp();
7721 unsigned int arm_arch = arm_check_arch();
7723 enable_debugger_exceptions();
7725 avmplus::AvmCore::config.arm_vfp = arm_vfp;
7726 avmplus::AvmCore::config.soft_float = !arm_vfp;
7727 avmplus::AvmCore::config.arm_arch = arm_arch;
7729 // Sanity-check the configuration detection.
7730 // * We don't understand architectures prior to ARMv4.
7731 JS_ASSERT(arm_arch >= 4);
7732 #endif
7733 did_we_check_processor_features = true;
7736 #define CHECK_ALLOC(lhs, rhs) \
7737 do { lhs = (rhs); if (!lhs) goto error; } while (0)
7739 CHECK_ALLOC(tm->oracle, js_new<Oracle>());
7741 tm->profile = NULL;
7743 CHECK_ALLOC(tm->recordAttempts, js_new<RecordAttemptMap>());
7744 if (!tm->recordAttempts->init(PC_HASH_COUNT))
7745 goto error;
7747 CHECK_ALLOC(tm->loopProfiles, js_new<LoopProfileMap>());
7748 if (!tm->loopProfiles->init(PC_HASH_COUNT))
7749 goto error;
7751 tm->flushEpoch = 0;
7753 char *dataReserve, *traceReserve, *tempReserve;
7754 CHECK_ALLOC(dataReserve, (char *)js_malloc(DataReserveSize));
7755 CHECK_ALLOC(traceReserve, (char *)js_malloc(TraceReserveSize));
7756 CHECK_ALLOC(tempReserve, (char *)js_malloc(TempReserveSize));
7757 CHECK_ALLOC(tm->dataAlloc, js_new<VMAllocator>(dataReserve, DataReserveSize));
7758 CHECK_ALLOC(tm->traceAlloc, js_new<VMAllocator>(traceReserve, TraceReserveSize));
7759 CHECK_ALLOC(tm->tempAlloc, js_new<VMAllocator>(tempReserve, TempReserveSize));
7760 CHECK_ALLOC(tm->codeAlloc, js_new<CodeAlloc>());
7761 CHECK_ALLOC(tm->frameCache, js_new<FrameInfoCache>(tm->dataAlloc));
7762 CHECK_ALLOC(tm->storage, js_new<TraceNativeStorage>());
7763 CHECK_ALLOC(tm->cachedTempTypeMap, js_new<TypeMap>((Allocator*)NULL, tm->oracle));
7764 tm->flush();
7765 verbose_only( tm->branches = NULL; )
7767 #if !defined XP_WIN
7768 debug_only(PodZero(&jitstats));
7769 #endif
7771 #ifdef JS_JIT_SPEW
7772 /* Architecture properties used by test cases. */
7773 jitstats.archIsIA32 = 0;
7774 jitstats.archIs64BIT = 0;
7775 jitstats.archIsARM = 0;
7776 jitstats.archIsSPARC = 0;
7777 jitstats.archIsPPC = 0;
7778 #if defined NANOJIT_IA32
7779 jitstats.archIsIA32 = 1;
7780 #endif
7781 #if defined NANOJIT_64BIT
7782 jitstats.archIs64BIT = 1;
7783 #endif
7784 #if defined NANOJIT_ARM
7785 jitstats.archIsARM = 1;
7786 #endif
7787 #if defined NANOJIT_SPARC
7788 jitstats.archIsSPARC = 1;
7789 #endif
7790 #if defined NANOJIT_PPC
7791 jitstats.archIsPPC = 1;
7792 #endif
7793 #if defined NANOJIT_X64
7794 jitstats.archIsAMD64 = 1;
7795 #endif
7796 #endif
7798 if (!tm->tracedScripts.init())
7799 goto error;
7800 return true;
7802 error:
7803 /* On error, don't rely on the compartment destructor being called. */
7804 FinishJIT(tm);
7805 return false;
7809 * NB: FinishJIT needs to work even when InitJIT fails. Each pointer must be
7810 * checked before it's dereferenced, as it may not have been allocated.
7812 void
7813 FinishJIT(TraceMonitor *tm)
7815 JS_ASSERT(!tm->recorder);
7816 JS_ASSERT(!tm->profile);
7818 #ifdef JS_JIT_SPEW
7819 if (jitstats.recorderStarted) {
7820 char sep = ':';
7821 debug_only_print0(LC_TMStats, "recorder");
7822 #define RECORDER_JITSTAT(_ident, _name) \
7823 debug_only_printf(LC_TMStats, "%c " _name "(%llu)", sep, \
7824 (unsigned long long int)jitstats._ident); \
7825 sep = ',';
7826 #define JITSTAT(x) /* nothing */
7827 #include "jitstats.tbl"
7828 #undef JITSTAT
7829 #undef RECORDER_JITSTAT
7830 debug_only_print0(LC_TMStats, "\n");
7832 sep = ':';
7833 debug_only_print0(LC_TMStats, "monitor");
7834 #define MONITOR_JITSTAT(_ident, _name) \
7835 debug_only_printf(LC_TMStats, "%c " _name "(%llu)", sep, \
7836 (unsigned long long int)jitstats._ident); \
7837 sep = ',';
7838 #define JITSTAT(x) /* nothing */
7839 #include "jitstats.tbl"
7840 #undef JITSTAT
7841 #undef MONITOR_JITSTAT
7842 debug_only_print0(LC_TMStats, "\n");
7844 #endif
7846 js_delete(tm->recordAttempts);
7847 js_delete(tm->loopProfiles);
7848 js_delete(tm->oracle);
7850 #ifdef DEBUG
7851 // Recover profiling data from expiring Fragments, and display
7852 // final results.
7853 if (LogController.lcbits & LC_FragProfile) {
7855 for (Seq<Fragment*>* f = tm->branches; f; f = f->tail)
7856 FragProfiling_FragFinalizer(f->head, tm);
7858 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
7859 for (TreeFragment *f = tm->vmfragments[i]; f; f = f->next) {
7860 JS_ASSERT(f->root == f);
7861 for (TreeFragment *p = f; p; p = p->peer)
7862 FragProfiling_FragFinalizer(p, tm);
7866 if (tm->profTab)
7867 FragProfiling_showResults(tm);
7868 js_delete(tm->profAlloc);
7870 } else {
7871 NanoAssert(!tm->profTab);
7872 NanoAssert(!tm->profAlloc);
7874 #endif
7876 PodArrayZero(tm->vmfragments);
7878 js_delete(tm->frameCache);
7879 tm->frameCache = NULL;
7881 js_delete(tm->codeAlloc);
7882 tm->codeAlloc = NULL;
7884 js_delete(tm->dataAlloc);
7885 tm->dataAlloc = NULL;
7887 js_delete(tm->traceAlloc);
7888 tm->traceAlloc = NULL;
7890 js_delete(tm->tempAlloc);
7891 tm->tempAlloc = NULL;
7893 js_delete(tm->storage);
7894 tm->storage = NULL;
7896 js_delete(tm->cachedTempTypeMap);
7897 tm->cachedTempTypeMap = NULL;
7900 JS_REQUIRES_STACK void
7901 PurgeScriptFragments(TraceMonitor* tm, JSScript* script)
7903 debug_only_printf(LC_TMTracer,
7904 "Purging fragments for JSScript %p.\n", (void*)script);
7906 /* A recorder script is being evaluated and can not be destroyed or GC-ed. */
7907 JS_ASSERT_IF(tm->recorder,
7908 JS_UPTRDIFF(tm->recorder->getTree()->ip, script->code) >= script->length);
7910 for (LoopProfileMap::Enum e(*tm->loopProfiles); !e.empty(); e.popFront()) {
7911 if (JS_UPTRDIFF(e.front().key, script->code) < script->length)
7912 e.removeFront();
7915 TracedScriptSet::Ptr found = tm->tracedScripts.lookup(script);
7916 if (!found)
7917 return;
7918 tm->tracedScripts.remove(found);
7920 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
7921 TreeFragment** fragp = &tm->vmfragments[i];
7922 while (TreeFragment* frag = *fragp) {
7923 if (JS_UPTRDIFF(frag->ip, script->code) < script->length) {
7924 /* This fragment is associated with the script. */
7925 debug_only_printf(LC_TMTracer,
7926 "Disconnecting TreeFragment %p "
7927 "with ip %p, in range [%p,%p).\n",
7928 (void*)frag, frag->ip, script->code,
7929 script->code + script->length);
7931 JS_ASSERT(frag->root == frag);
7932 *fragp = frag->next;
7933 do {
7934 verbose_only( FragProfiling_FragFinalizer(frag, tm); )
7935 TrashTree(frag);
7936 } while ((frag = frag->peer) != NULL);
7937 continue;
7939 fragp = &frag->next;
7943 RecordAttemptMap &table = *tm->recordAttempts;
7944 for (RecordAttemptMap::Enum e(table); !e.empty(); e.popFront()) {
7945 if (JS_UPTRDIFF(e.front().key, script->code) < script->length)
7946 e.removeFront();
7950 bool
7951 OverfullJITCache(JSContext *cx, TraceMonitor* tm)
7954 * You might imagine the outOfMemory flag on the allocator is sufficient
7955 * to model the notion of "running out of memory", but there are actually
7956 * two separate issues involved:
7958 * 1. The process truly running out of memory: malloc() or mmap()
7959 * failed.
7961 * 2. The limit we put on the "intended size" of the tracemonkey code
7962 * cache, in pages, has been exceeded.
7964 * Condition 1 doesn't happen very often, but we're obliged to try to
7965 * safely shut down and signal the rest of spidermonkey when it
7966 * does. Condition 2 happens quite regularly.
7968 * Presently, the code in this file doesn't check the outOfMemory condition
7969 * often enough, and frequently misuses the unchecked results of
7970 * lirbuffer insertions on the assumption that it will notice the
7971 * outOfMemory flag "soon enough" when it returns to the monitorRecording
7972 * function. This turns out to be a false assumption if we use outOfMemory
7973 * to signal condition 2: we regularly provoke "passing our intended
7974 * size" and regularly fail to notice it in time to prevent writing
7975 * over the end of an artificially self-limited LIR buffer.
7977 * To mitigate, though not completely solve, this problem, we're
7978 * modeling the two forms of memory exhaustion *separately* for the
7979 * time being: condition 1 is handled by the outOfMemory flag inside
7980 * nanojit, and condition 2 is being handled independently *here*. So
7981 * we construct our allocators to use all available memory they like,
7982 * and only report outOfMemory to us when there is literally no OS memory
7983 * left. Merely purging our cache when we hit our highwater mark is
7984 * handled by the (few) callers of this function.
7987 jsuint maxsz = JS_THREAD_DATA(cx)->maxCodeCacheBytes;
7988 return (tm->codeAlloc->size() + tm->dataAlloc->size() + tm->traceAlloc->size() > maxsz);
7991 JS_FORCES_STACK JS_FRIEND_API(void)
7992 DeepBail(JSContext *cx)
7994 JS_ASSERT(JS_ON_TRACE(cx));
7997 * Exactly one context on the current thread is on trace. Find out which
7998 * one. (Most callers cannot guarantee that it's cx.)
8000 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
8002 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
8003 JS_THREAD_DATA(cx)->onTraceCompartment = NULL;
8005 /* It's a bug if a non-FAIL_STATUS builtin gets here. */
8006 JS_ASSERT(tm->bailExit);
8008 tm->tracecx = NULL;
8009 debug_only_print0(LC_TMTracer, "Deep bail.\n");
8010 LeaveTree(tm, *tm->tracerState, tm->bailExit);
8011 tm->bailExit = NULL;
8013 TracerState* state = tm->tracerState;
8014 state->builtinStatus |= BUILTIN_BAILED;
8017 * Between now and the LeaveTree in ExecuteTree, |tm->storage| may be
8018 * reused if another trace executes before the currently executing native
8019 * returns. If this happens, at least some of the native stack will be
8020 * clobbered, potentially all of it. This is called a clobbering deep bail.
8022 * The nested trace will complete before we return to the deep-bailed one,
8023 * hence the invariant is maintained that only one trace uses |tm->storage|
8024 * at a time.
8026 * When we return to the deep-bailed trace, it will very soon reach a
8027 * STATUS_EXIT guard and bail out. Most of the native stack will just be
8028 * thrown away. However, LeaveTree will copy a few slots from the top of
8029 * the native stack to the interpreter stack--only those slots written by
8030 * the current bytecode instruction. To make sure LeaveTree has correct
8031 * data to copy from the native stack to the operand stack, we have this
8032 * rule: every caller of enterDeepBailCall must ensure that between the
8033 * deep bail call and the STATUS_EXIT guard, all those slots are written.
8035 * The rule is a bit subtle. For example, JSOP_MOREITER uses a slot which
8036 * it never writes to; in order to satisfy the above rule,
8037 * record_JSOP_MOREITER emits code to write the value back to the slot
8038 * anyway.
8040 state->deepBailSp = state->sp;
8043 JS_REQUIRES_STACK Value&
8044 TraceRecorder::argval(unsigned n) const
8046 JS_ASSERT(n < cx->fp()->numFormalArgs());
8047 return cx->fp()->formalArg(n);
8050 JS_REQUIRES_STACK Value&
8051 TraceRecorder::varval(unsigned n) const
8053 JS_ASSERT(n < cx->fp()->numSlots());
8054 return cx->fp()->slots()[n];
8057 JS_REQUIRES_STACK Value&
8058 TraceRecorder::stackval(int n) const
8060 return cx->regs->sp[n];
8063 JS_REQUIRES_STACK void
8064 TraceRecorder::updateAtoms()
8066 JSScript *script = cx->fp()->script();
8067 atoms = FrameAtomBase(cx, cx->fp());
8068 consts = (cx->fp()->hasImacropc() || !JSScript::isValidOffset(script->constOffset))
8070 : script->consts()->vector;
8071 strictModeCode_ins = w.name(w.immi(script->strictModeCode), "strict");
8074 JS_REQUIRES_STACK void
8075 TraceRecorder::updateAtoms(JSScript *script)
8077 atoms = script->atomMap.vector;
8078 consts = JSScript::isValidOffset(script->constOffset) ? script->consts()->vector : 0;
8079 strictModeCode_ins = w.name(w.immi(script->strictModeCode), "strict");
8083 * Generate LIR to compute the scope chain.
8085 JS_REQUIRES_STACK LIns*
8086 TraceRecorder::scopeChain()
8088 return cx->fp()->isFunctionFrame()
8089 ? getFrameObjPtr(cx->fp()->addressOfScopeChain())
8090 : entryScopeChain();
8094 * Generate LIR to compute the scope chain on entry to the trace. This is
8095 * generally useful only for getting to the global object, because only
8096 * the global object is guaranteed to be present.
8098 JS_REQUIRES_STACK LIns*
8099 TraceRecorder::entryScopeChain() const
8101 return w.ldpStackFrameScopeChain(entryFrameIns());
8105 * Generate LIR to compute the stack frame on entry to the trace.
8107 JS_REQUIRES_STACK LIns*
8108 TraceRecorder::entryFrameIns() const
8110 return w.ldpFrameFp(w.ldpContextField(regs));
8114 * Return the frame of a call object if that frame is part of the current
8115 * trace. |depthp| is an optional outparam: if it is non-null, it will be
8116 * filled in with the depth of the call object's frame relevant to cx->fp().
8118 JS_REQUIRES_STACK JSStackFrame*
8119 TraceRecorder::frameIfInRange(JSObject* obj, unsigned* depthp) const
8121 JSStackFrame* ofp = (JSStackFrame*) obj->getPrivate();
8122 JSStackFrame* fp = cx->fp();
8123 for (unsigned depth = 0; depth <= callDepth; ++depth) {
8124 if (fp == ofp) {
8125 if (depthp)
8126 *depthp = depth;
8127 return ofp;
8129 if (!(fp = fp->prev()))
8130 break;
8132 return NULL;
8135 JS_DEFINE_CALLINFO_4(extern, UINT32, GetClosureVar, CONTEXT, OBJECT, CVIPTR, DOUBLEPTR,
8136 0, ACCSET_STORE_ANY)
8137 JS_DEFINE_CALLINFO_4(extern, UINT32, GetClosureArg, CONTEXT, OBJECT, CVIPTR, DOUBLEPTR,
8138 0, ACCSET_STORE_ANY)
8141 * Search the scope chain for a property lookup operation at the current PC and
8142 * generate LIR to access the given property. Return RECORD_CONTINUE on success,
8143 * otherwise abort and return RECORD_STOP. There are 3 outparams:
8145 * vp the address of the current property value
8146 * ins LIR instruction representing the property value on trace
8147 * NameResult describes how to look up name; see comment for NameResult in jstracer.h
8149 JS_REQUIRES_STACK AbortableRecordingStatus
8150 TraceRecorder::scopeChainProp(JSObject* chainHead, Value*& vp, LIns*& ins, NameResult& nr)
8152 JS_ASSERT(chainHead == &cx->fp()->scopeChain());
8153 JS_ASSERT(chainHead != globalObj);
8155 TraceMonitor &localtm = *traceMonitor;
8157 JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)];
8158 JSObject* obj2;
8159 JSProperty* prop;
8160 JSObject *obj = chainHead;
8161 if (!js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &obj2, &prop))
8162 RETURN_ERROR_A("error in js_FindProperty");
8164 /* js_FindProperty can reenter the interpreter and kill |this|. */
8165 if (!localtm.recorder)
8166 return ARECORD_ABORTED;
8168 if (!prop)
8169 RETURN_STOP_A("failed to find name in non-global scope chain");
8171 if (obj == globalObj) {
8172 // Even if the property is on the global object, we must guard against
8173 // the creation of properties that shadow the property in the middle
8174 // of the scope chain.
8175 LIns *head_ins;
8176 if (cx->fp()->isFunctionFrame()) {
8177 // Skip any Call object when inside a function. Any reference to a
8178 // Call name the compiler resolves statically and we do not need
8179 // to match shapes of the Call objects.
8180 chainHead = cx->fp()->callee().getParent();
8181 head_ins = w.ldpObjParent(get(&cx->fp()->calleeValue()));
8182 } else {
8183 head_ins = scopeChain();
8185 LIns *obj_ins;
8186 CHECK_STATUS_A(traverseScopeChain(chainHead, head_ins, obj, obj_ins));
8188 if (obj2 != obj)
8189 RETURN_STOP_A("prototype property");
8191 Shape* shape = (Shape*) prop;
8192 if (!isValidSlot(obj, shape))
8193 return ARECORD_STOP;
8194 if (!lazilyImportGlobalSlot(shape->slot))
8195 RETURN_STOP_A("lazy import of global slot failed");
8196 vp = &obj->getSlotRef(shape->slot);
8197 ins = get(vp);
8198 nr.tracked = true;
8199 return ARECORD_CONTINUE;
8202 if (obj == obj2 && obj->isCall()) {
8203 AbortableRecordingStatus status =
8204 InjectStatus(callProp(obj, prop, ATOM_TO_JSID(atom), vp, ins, nr));
8205 return status;
8208 RETURN_STOP_A("fp->scopeChain is not global or active call object");
8212 * Generate LIR to access a property of a Call object.
8214 JS_REQUIRES_STACK RecordingStatus
8215 TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp,
8216 LIns*& ins, NameResult& nr)
8218 Shape *shape = (Shape*) prop;
8220 JSOp op = JSOp(*cx->regs->pc);
8221 uint32 setflags = (js_CodeSpec[op].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
8222 if (setflags && !shape->writable())
8223 RETURN_STOP("writing to a read-only property");
8225 uintN slot = uint16(shape->shortid);
8227 vp = NULL;
8228 JSStackFrame* cfp = (JSStackFrame*) obj->getPrivate();
8229 if (cfp) {
8230 if (shape->getterOp() == GetCallArg) {
8231 JS_ASSERT(slot < cfp->numFormalArgs());
8232 vp = &cfp->formalArg(slot);
8233 nr.v = *vp;
8234 } else if (shape->getterOp() == GetCallVar ||
8235 shape->getterOp() == GetCallVarChecked) {
8236 JS_ASSERT(slot < cfp->numSlots());
8237 vp = &cfp->slots()[slot];
8238 nr.v = *vp;
8239 } else {
8240 RETURN_STOP("dynamic property of Call object");
8243 // Now assert that our use of shape->shortid was in fact kosher.
8244 JS_ASSERT(shape->hasShortID());
8246 if (frameIfInRange(obj)) {
8247 // At this point we are guaranteed to be looking at an active call oject
8248 // whose properties are stored in the corresponding JSStackFrame.
8249 ins = get(vp);
8250 nr.tracked = true;
8251 return RECORD_CONTINUE;
8253 } else {
8254 // Call objects do not yet have shape->isMethod() properties, but they
8255 // should. See bug 514046, for which this code is future-proof. Remove
8256 // this comment when that bug is fixed (so, FIXME: 514046).
8257 #ifdef DEBUG
8258 JSBool rv =
8259 #endif
8260 js_GetPropertyHelper(cx, obj, shape->id,
8261 (op == JSOP_CALLNAME)
8262 ? JSGET_NO_METHOD_BARRIER
8263 : JSGET_METHOD_BARRIER,
8264 &nr.v);
8265 JS_ASSERT(rv);
8268 LIns* obj_ins;
8269 JSObject* parent = cx->fp()->callee().getParent();
8270 LIns* parent_ins = w.ldpObjParent(get(&cx->fp()->calleeValue()));
8271 CHECK_STATUS(traverseScopeChain(parent, parent_ins, obj, obj_ins));
8273 if (!cfp) {
8274 // Because the parent guard in guardCallee ensures this Call object
8275 // will be the same object now and on trace, and because once a Call
8276 // object loses its frame it never regains one, on trace we will also
8277 // have a null private in the Call object. So all we need to do is
8278 // write the value to the Call object's slot.
8279 if (shape->getterOp() == GetCallArg) {
8280 JS_ASSERT(slot < ArgClosureTraits::slot_count(obj));
8281 slot += ArgClosureTraits::slot_offset(obj);
8282 } else if (shape->getterOp() == GetCallVar ||
8283 shape->getterOp() == GetCallVarChecked) {
8284 JS_ASSERT(slot < VarClosureTraits::slot_count(obj));
8285 slot += VarClosureTraits::slot_offset(obj);
8286 } else {
8287 RETURN_STOP("dynamic property of Call object");
8290 // Now assert that our use of shape->shortid was in fact kosher.
8291 JS_ASSERT(shape->hasShortID());
8293 ins = unbox_slot(obj, obj_ins, slot, snapshot(BRANCH_EXIT));
8294 } else {
8295 ClosureVarInfo* cv = new (traceAlloc()) ClosureVarInfo();
8296 cv->slot = slot;
8297 #ifdef DEBUG
8298 cv->callDepth = callDepth;
8299 #endif
8301 // Even though the frame is out of range, later we might be called as an
8302 // inner trace such that the target variable is defined in the outer trace
8303 // entry frame. For simplicity, we just fall off trace.
8304 guard(false,
8305 w.eqp(entryFrameIns(), w.ldpObjPrivate(obj_ins)),
8306 MISMATCH_EXIT);
8308 LIns* outp = w.allocp(sizeof(double));
8309 LIns* args[] = {
8310 outp,
8311 w.nameImmpNonGC(cv),
8312 obj_ins,
8313 cx_ins
8315 const CallInfo* ci;
8316 if (shape->getterOp() == GetCallArg) {
8317 ci = &GetClosureArg_ci;
8318 } else if (shape->getterOp() == GetCallVar ||
8319 shape->getterOp() == GetCallVarChecked) {
8320 ci = &GetClosureVar_ci;
8321 } else {
8322 RETURN_STOP("dynamic property of Call object");
8325 // Now assert that our use of shape->shortid was in fact kosher.
8326 JS_ASSERT(shape->hasShortID());
8328 LIns* call_ins = w.call(ci, args);
8330 JSValueType type = getCoercedType(nr.v);
8331 guard(true,
8332 w.name(w.eqi(call_ins, w.immi(type)), "guard(type-stable name access)"),
8333 BRANCH_EXIT);
8334 ins = stackLoad(AllocSlotsAddress(outp), type);
8336 nr.tracked = false;
8337 nr.obj = obj;
8338 nr.obj_ins = obj_ins;
8339 nr.shape = shape;
8340 return RECORD_CONTINUE;
8343 JS_REQUIRES_STACK LIns*
8344 TraceRecorder::arg(unsigned n)
8346 return get(&argval(n));
8349 JS_REQUIRES_STACK void
8350 TraceRecorder::arg(unsigned n, LIns* i)
8352 set(&argval(n), i);
8355 JS_REQUIRES_STACK LIns*
8356 TraceRecorder::var(unsigned n)
8358 return get(&varval(n));
8361 JS_REQUIRES_STACK void
8362 TraceRecorder::var(unsigned n, LIns* i)
8364 set(&varval(n), i);
8367 JS_REQUIRES_STACK LIns*
8368 TraceRecorder::stack(int n)
8370 return get(&stackval(n));
8373 JS_REQUIRES_STACK void
8374 TraceRecorder::stack(int n, LIns* i)
8376 set(&stackval(n), i);
8379 /* Leave trace iff one operand is negative and the other is non-negative. */
8380 JS_REQUIRES_STACK void
8381 TraceRecorder::guardNonNeg(LIns* d0, LIns* d1, VMSideExit* exit)
8383 if (d0->isImmI())
8384 JS_ASSERT(d0->immI() >= 0);
8385 else
8386 guard(false, w.ltiN(d0, 0), exit);
8388 if (d1->isImmI())
8389 JS_ASSERT(d1->immI() >= 0);
8390 else
8391 guard(false, w.ltiN(d1, 0), exit);
8394 JS_REQUIRES_STACK LIns*
8395 TraceRecorder::alu(LOpcode v, jsdouble v0, jsdouble v1, LIns* s0, LIns* s1)
8398 * To even consider this operation for demotion, both operands have to be
8399 * integers and the oracle must not give us a negative hint for the
8400 * instruction.
8402 if (!oracle || oracle->isInstructionUndemotable(cx->regs->pc) ||
8403 !IsPromotedInt32(s0) || !IsPromotedInt32(s1)) {
8404 out:
8405 if (v == LIR_modd) {
8406 LIns* args[] = { s1, s0 };
8407 return w.call(&js_dmod_ci, args);
8409 LIns* result = w.ins2(v, s0, s1);
8410 JS_ASSERT_IF(s0->isImmD() && s1->isImmD(), result->isImmD());
8411 return result;
8414 jsdouble r;
8415 switch (v) {
8416 case LIR_addd:
8417 r = v0 + v1;
8418 break;
8419 case LIR_subd:
8420 r = v0 - v1;
8421 break;
8422 case LIR_muld:
8423 r = v0 * v1;
8424 if (r == 0.0 && (v0 < 0.0 || v1 < 0.0))
8425 goto out;
8426 break;
8427 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8428 case LIR_divd:
8429 if (v1 == 0)
8430 goto out;
8431 r = v0 / v1;
8432 break;
8433 case LIR_modd:
8434 if (v0 < 0 || v1 == 0 || (s1->isImmD() && v1 < 0))
8435 goto out;
8436 r = js_dmod(v0, v1);
8437 break;
8438 #endif
8439 default:
8440 goto out;
8444 * The result must be an integer at record time, otherwise there is no
8445 * point in trying to demote it.
8447 if (jsint(r) != r || JSDOUBLE_IS_NEGZERO(r))
8448 goto out;
8450 LIns* d0 = w.demoteToInt32(s0);
8451 LIns* d1 = w.demoteToInt32(s1);
8454 * Speculatively emit an integer operation, betting that at runtime we
8455 * will get integer results again.
8457 VMSideExit* exit = NULL;
8458 LIns* result;
8459 switch (v) {
8460 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8461 case LIR_divd:
8462 if (d0->isImmI() && d1->isImmI())
8463 return w.i2d(w.immi(jsint(r)));
8465 exit = snapshot(OVERFLOW_EXIT);
8468 * If the divisor is greater than zero its always safe to execute
8469 * the division. If not, we have to make sure we are not running
8470 * into -2147483648 / -1, because it can raise an overflow exception.
8472 if (!d1->isImmI()) {
8473 if (MaybeBranch mbr = w.jt(w.gtiN(d1, 0))) {
8474 guard(false, w.eqi0(d1), exit);
8475 guard(true, w.eqi0(w.andi(w.eqiN(d0, 0x80000000),
8476 w.eqiN(d1, -1))), exit);
8477 w.label(mbr);
8479 } else {
8480 if (d1->immI() == -1)
8481 guard(false, w.eqiN(d0, 0x80000000), exit);
8483 v = LIR_divi;
8484 result = w.divi(d0, d1);
8486 /* As long as the modulus is zero, the result is an integer. */
8487 guard(true, w.eqi0(w.modi(result)), exit);
8489 /* Don't lose a -0. */
8490 guard(false, w.eqi0(result), exit);
8491 break;
8493 case LIR_modd: {
8494 if (d0->isImmI() && d1->isImmI())
8495 return w.i2d(w.immi(jsint(r)));
8497 exit = snapshot(OVERFLOW_EXIT);
8499 /* Make sure we don't trigger division by zero at runtime. */
8500 if (!d1->isImmI())
8501 guard(false, w.eqi0(d1), exit);
8502 v = LIR_modi;
8503 result = w.modi(w.divi(d0, d1));
8505 /* If the result is not 0, it is always within the integer domain. */
8506 if (MaybeBranch mbr = w.jf(w.eqi0(result))) {
8508 * If the result is zero, we must exit if the lhs is negative since
8509 * the result is -0 in this case, which is not in the integer domain.
8511 guard(false, w.ltiN(d0, 0), exit);
8512 w.label(mbr);
8514 break;
8516 #endif
8518 default:
8519 v = arithOpcodeD2I(v);
8520 JS_ASSERT(v == LIR_addi || v == LIR_muli || v == LIR_subi);
8523 * If the operands guarantee that the result will be an integer (e.g.
8524 * z = x * y with 0 <= (x|y) <= 0xffff guarantees z <= fffe0001), we
8525 * don't have to guard against an overflow. Otherwise we emit a guard
8526 * that will inform the oracle and cause a non-demoted trace to be
8527 * attached that uses floating-point math for this operation.
8529 bool needsOverflowCheck = true, needsNegZeroCheck = true;
8530 ChecksRequired(v, d0, d1, &needsOverflowCheck, &needsNegZeroCheck);
8531 if (needsOverflowCheck) {
8532 exit = snapshot(OVERFLOW_EXIT);
8533 result = guard_xov(v, d0, d1, exit);
8534 } else {
8535 result = w.ins2(v, d0, d1);
8537 if (needsNegZeroCheck) {
8538 JS_ASSERT(v == LIR_muli);
8540 * Make sure we don't lose a -0. We exit if the result is zero and if
8541 * either operand is negative. We start out using a weaker guard, checking
8542 * if either argument is negative. If this ever fails, we recompile with
8543 * a stronger, but slower, guard.
8545 if (v0 < 0.0 || v1 < 0.0
8546 || !oracle || oracle->isInstructionSlowZeroTest(cx->regs->pc))
8548 if (!exit)
8549 exit = snapshot(OVERFLOW_EXIT);
8551 guard(true,
8552 w.eqi0(w.andi(w.eqi0(result),
8553 w.ori(w.ltiN(d0, 0),
8554 w.ltiN(d1, 0)))),
8555 exit);
8556 } else {
8557 guardNonNeg(d0, d1, snapshot(MUL_ZERO_EXIT));
8560 break;
8562 JS_ASSERT_IF(d0->isImmI() && d1->isImmI(), result->isImmI(jsint(r)));
8563 return w.i2d(result);
8566 LIns*
8567 TraceRecorder::d2i(LIns* d, bool resultCanBeImpreciseIfFractional)
8569 if (d->isImmD())
8570 return w.immi(js_DoubleToECMAInt32(d->immD()));
8571 if (d->isop(LIR_i2d) || d->isop(LIR_ui2d)) {
8572 // The d2i(i2d(i)) case is obviously a no-op. (Unlike i2d(d2i(d))!)
8573 // The d2i(ui2d(ui)) case is less obvious, but it is also a no-op.
8574 // For example, 4294967295U has the bit pattern 0xffffffff, and
8575 // d2i(ui2d(4294967295U)) is -1, which also has the bit pattern
8576 // 0xffffffff. Another way to think about it: d2i(ui2d(ui)) is
8577 // equivalent to ui2i(ui); ui2i doesn't exist, but it would be a
8578 // no-op if it did.
8579 // (Note that the above reasoning depends on the fact that d2i()
8580 // always succeeds, ie. it never aborts).
8581 return d->oprnd1();
8583 if (d->isop(LIR_addd) || d->isop(LIR_subd)) {
8584 // If 'i32ad' and 'i32bd' are integral doubles that fit in int32s, and
8585 // 'i32ai' and 'i32bi' are int32s with the equivalent values, then
8586 // this is true:
8588 // d2i(addd(i32ad, i32bd)) == addi(i32ai, i32bi)
8590 // If the RHS doesn't overflow, this is obvious. If it does overflow,
8591 // the result will truncate. And the LHS will truncate in exactly the
8592 // same way. So they're always equal.
8593 LIns* lhs = d->oprnd1();
8594 LIns* rhs = d->oprnd2();
8595 if (IsPromotedInt32(lhs) && IsPromotedInt32(rhs))
8596 return w.ins2(arithOpcodeD2I(d->opcode()), w.demoteToInt32(lhs), w.demoteToInt32(rhs));
8598 if (d->isCall()) {
8599 const CallInfo* ci = d->callInfo();
8600 if (ci == &js_UnboxDouble_ci) {
8601 #if JS_BITS_PER_WORD == 32
8602 LIns *tag_ins = d->callArgN(0);
8603 LIns *payload_ins = d->callArgN(1);
8604 LIns* args[] = { payload_ins, tag_ins };
8605 return w.call(&js_UnboxInt32_ci, args);
8606 #else
8607 LIns* val_ins = d->callArgN(0);
8608 LIns* args[] = { val_ins };
8609 return w.call(&js_UnboxInt32_ci, args);
8610 #endif
8612 if (ci == &js_StringToNumber_ci) {
8613 LIns* ok_ins = w.allocp(sizeof(JSBool));
8614 LIns* args[] = { ok_ins, d->callArgN(1), d->callArgN(0) };
8615 LIns* ret_ins = w.call(&js_StringToInt32_ci, args);
8616 guard(false,
8617 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
8618 OOM_EXIT);
8619 return ret_ins;
8622 return resultCanBeImpreciseIfFractional
8623 ? w.rawD2i(d)
8624 : w.call(&js_DoubleToInt32_ci, &d);
8627 LIns*
8628 TraceRecorder::d2u(LIns* d)
8630 if (d->isImmD())
8631 return w.immi(js_DoubleToECMAUint32(d->immD()));
8632 if (d->isop(LIR_i2d) || d->isop(LIR_ui2d))
8633 return d->oprnd1();
8634 return w.call(&js_DoubleToUint32_ci, &d);
8637 JS_REQUIRES_STACK RecordingStatus
8638 TraceRecorder::makeNumberInt32(LIns* d, LIns** out)
8640 JS_ASSERT(d->isD());
8641 if (IsPromotedInt32(d)) {
8642 *out = w.demoteToInt32(d);
8643 return RECORD_CONTINUE;
8646 // This means "convert double to int if it's integral, otherwise
8647 // exit". We first convert the double to an int, then convert it back
8648 // and exit if the two doubles don't match. If 'f' is a non-integral
8649 // immediate we'll end up aborting.
8650 *out = d2i(d, /* resultCanBeImpreciseIfFractional = */true);
8651 return guard(true, w.eqd(d, w.i2d(*out)), MISMATCH_EXIT, /* abortIfAlwaysExits = */true);
8654 JS_REQUIRES_STACK RecordingStatus
8655 TraceRecorder::makeNumberUint32(LIns* d, LIns** out)
8657 JS_ASSERT(d->isD());
8658 if (IsPromotedUint32(d)) {
8659 *out = w.demoteToUint32(d);
8660 return RECORD_CONTINUE;
8663 // This means "convert double to uint if it's integral, otherwise
8664 // exit". We first convert the double to an unsigned int, then
8665 // convert it back and exit if the two doubles don't match. If
8666 // 'f' is a non-integral immediate we'll end up aborting.
8667 *out = d2u(d);
8668 return guard(true, w.eqd(d, w.ui2d(*out)), MISMATCH_EXIT, /* abortIfAlwaysExits = */true);
8671 JS_REQUIRES_STACK LIns*
8672 TraceRecorder::stringify(const Value& v)
8674 LIns* v_ins = get(&v);
8675 if (v.isString())
8676 return v_ins;
8678 LIns* args[] = { v_ins, cx_ins };
8679 const CallInfo* ci;
8680 if (v.isNumber()) {
8681 ci = &js_NumberToString_ci;
8682 } else if (v.isUndefined()) {
8683 return w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_VOID]);
8684 } else if (v.isBoolean()) {
8685 ci = &js_BooleanIntToString_ci;
8686 } else {
8688 * Callers must deal with non-primitive (non-null object) values by
8689 * calling an imacro. We don't try to guess about which imacro, with
8690 * what valueOf hint, here.
8692 JS_ASSERT(v.isNull());
8693 return w.immpAtomGC(cx->runtime->atomState.nullAtom);
8696 v_ins = w.call(ci, args);
8697 guard(false, w.eqp0(v_ins), OOM_EXIT);
8698 return v_ins;
8701 JS_REQUIRES_STACK bool
8702 TraceRecorder::canCallImacro() const
8704 /* We cannot nest imacros. */
8705 return !cx->fp()->hasImacropc();
8708 JS_REQUIRES_STACK RecordingStatus
8709 TraceRecorder::callImacro(jsbytecode* imacro)
8711 return canCallImacro() ? callImacroInfallibly(imacro) : RECORD_STOP;
8714 JS_REQUIRES_STACK RecordingStatus
8715 TraceRecorder::callImacroInfallibly(jsbytecode* imacro)
8717 JSStackFrame* fp = cx->fp();
8718 JS_ASSERT(!fp->hasImacropc());
8719 JSFrameRegs* regs = cx->regs;
8720 fp->setImacropc(regs->pc);
8721 regs->pc = imacro;
8722 updateAtoms();
8723 return RECORD_IMACRO;
8726 JS_REQUIRES_STACK AbortableRecordingStatus
8727 TraceRecorder::ifop()
8729 Value& v = stackval(-1);
8730 LIns* v_ins = get(&v);
8731 bool cond;
8732 LIns* x;
8734 if (v.isNull() || v.isUndefined()) {
8735 cond = false;
8736 x = w.immi(0);
8737 } else if (!v.isPrimitive()) {
8738 cond = true;
8739 x = w.immi(1);
8740 } else if (v.isBoolean()) {
8741 /* Test for boolean is true, negate later if we are testing for false. */
8742 cond = v.isTrue();
8743 x = w.eqiN(v_ins, 1);
8744 } else if (v.isNumber()) {
8745 jsdouble d = v.toNumber();
8746 cond = !JSDOUBLE_IS_NaN(d) && d;
8747 x = w.eqi0(w.eqi0(w.andi(w.eqd(v_ins, v_ins), w.eqi0(w.eqd0(v_ins)))));
8748 } else if (v.isString()) {
8749 cond = v.toString()->length() != 0;
8750 x = w.eqi0(w.eqp0(w.getStringLength(v_ins)));
8751 } else {
8752 JS_NOT_REACHED("ifop");
8753 return ARECORD_STOP;
8756 jsbytecode* pc = cx->regs->pc;
8757 emitIf(pc, cond, x);
8758 return checkTraceEnd(pc);
8761 #ifdef NANOJIT_IA32
8763 * Record LIR for a tableswitch or tableswitchx op. We record LIR only the
8764 * "first" time we hit the op. Later, when we start traces after exiting that
8765 * trace, we just patch.
8767 JS_REQUIRES_STACK AbortableRecordingStatus
8768 TraceRecorder::tableswitch()
8770 Value& v = stackval(-1);
8772 /* No need to guard if the condition can't match any of the cases. */
8773 if (!v.isNumber())
8774 return ARECORD_CONTINUE;
8776 /* No need to guard if the condition is constant. */
8777 LIns* v_ins = d2i(get(&v));
8778 if (v_ins->isImmI())
8779 return ARECORD_CONTINUE;
8781 jsbytecode* pc = cx->regs->pc;
8782 /* Starting a new trace after exiting a trace via switch. */
8783 if (anchor &&
8784 (anchor->exitType == CASE_EXIT || anchor->exitType == DEFAULT_EXIT) &&
8785 fragment->ip == pc) {
8786 return ARECORD_CONTINUE;
8789 /* Decode jsop. */
8790 jsint low, high;
8791 if (*pc == JSOP_TABLESWITCH) {
8792 pc += JUMP_OFFSET_LEN;
8793 low = GET_JUMP_OFFSET(pc);
8794 pc += JUMP_OFFSET_LEN;
8795 high = GET_JUMP_OFFSET(pc);
8796 } else {
8797 pc += JUMPX_OFFSET_LEN;
8798 low = GET_JUMP_OFFSET(pc);
8799 pc += JUMP_OFFSET_LEN;
8800 high = GET_JUMP_OFFSET(pc);
8804 * If there are no cases, this is a no-op. The default case immediately
8805 * follows in the bytecode and is always taken, so we need no special
8806 * action to handle it.
8808 int count = high + 1 - low;
8809 JS_ASSERT(count >= 0);
8810 if (count == 0)
8811 return ARECORD_CONTINUE;
8813 /* Cap maximum table-switch size for modesty. */
8814 if (count > MAX_TABLE_SWITCH)
8815 return InjectStatus(switchop());
8817 /* Generate switch LIR. */
8818 SwitchInfo* si = new (traceAlloc()) SwitchInfo();
8819 si->count = count;
8820 si->table = 0;
8821 si->index = (uint32) -1;
8822 LIns* diff = w.subi(v_ins, w.immi(low));
8823 LIns* cmp = w.ltui(diff, w.immi(si->count));
8824 guard(true, cmp, DEFAULT_EXIT);
8825 // We use AnyAddress; it's imprecise but this case is rare and not worth its
8826 // own access region.
8827 w.st(diff, AnyAddress(w.immpNonGC(&si->index)));
8828 VMSideExit* exit = snapshot(CASE_EXIT);
8829 exit->switchInfo = si;
8830 LIns* guardIns = w.xtbl(diff, createGuardRecord(exit));
8831 fragment->lastIns = guardIns;
8832 CHECK_STATUS_A(compile());
8833 return finishSuccessfully();
8835 #endif
8837 JS_REQUIRES_STACK RecordingStatus
8838 TraceRecorder::switchop()
8840 Value& v = stackval(-1);
8841 LIns* v_ins = get(&v);
8843 /* No need to guard if the condition is constant. */
8844 if (v_ins->isImmAny())
8845 return RECORD_CONTINUE;
8846 if (v.isNumber()) {
8847 jsdouble d = v.toNumber();
8848 guard(true,
8849 w.name(w.eqd(v_ins, w.immd(d)), "guard(switch on numeric)"),
8850 BRANCH_EXIT,
8851 /* abortIfAlwaysExits = */true);
8852 } else if (v.isString()) {
8853 LIns* args[] = { w.immpStrGC(v.toString()), v_ins, cx_ins };
8854 LIns* equal_rval = w.call(&js_EqualStringsOnTrace_ci, args);
8855 guard(false,
8856 w.name(w.eqiN(equal_rval, JS_NEITHER), "guard(oom)"),
8857 OOM_EXIT);
8858 guard(false,
8859 w.name(w.eqi0(equal_rval), "guard(switch on string)"),
8860 BRANCH_EXIT);
8861 } else if (v.isBoolean()) {
8862 guard(true,
8863 w.name(w.eqi(v_ins, w.immi(v.isTrue())), "guard(switch on boolean)"),
8864 BRANCH_EXIT);
8865 } else if (v.isUndefined()) {
8866 // This is a unit type, so no guard is needed.
8867 } else {
8868 RETURN_STOP("switch on object or null");
8870 return RECORD_CONTINUE;
8873 JS_REQUIRES_STACK RecordingStatus
8874 TraceRecorder::inc(Value& v, jsint incr, bool pre)
8876 LIns* v_ins = get(&v);
8877 Value dummy;
8878 CHECK_STATUS(inc(v, v_ins, dummy, incr, pre));
8879 set(&v, v_ins);
8880 return RECORD_CONTINUE;
8884 * On exit, v_ins is the incremented unboxed value, and the appropriate value
8885 * (pre- or post-increment as described by pre) is stacked. v_out is set to
8886 * the value corresponding to v_ins.
8888 JS_REQUIRES_STACK RecordingStatus
8889 TraceRecorder::inc(const Value &v, LIns*& v_ins, Value &v_out, jsint incr, bool pre)
8891 LIns* v_after;
8892 CHECK_STATUS(incHelper(v, v_ins, v_out, v_after, incr));
8894 const JSCodeSpec& cs = js_CodeSpec[*cx->regs->pc];
8895 JS_ASSERT(cs.ndefs == 1);
8896 stack(-cs.nuses, pre ? v_after : v_ins);
8897 v_ins = v_after;
8898 return RECORD_CONTINUE;
8902 * Do an increment operation without storing anything to the stack.
8904 * v_after is an out param whose value corresponds to the instruction the
8905 * v_ins_after out param gets set to.
8907 JS_REQUIRES_STACK RecordingStatus
8908 TraceRecorder::incHelper(const Value &v, LIns*& v_ins, Value &v_after,
8909 LIns*& v_ins_after, jsint incr)
8911 // FIXME: Bug 606071 on making this work for objects.
8912 if (!v.isPrimitive())
8913 RETURN_STOP("can inc primitives only");
8915 // We need to modify |v_ins| the same way relational() modifies
8916 // its RHS and LHS.
8917 if (v.isUndefined()) {
8918 v_ins_after = w.immd(js_NaN);
8919 v_after.setDouble(js_NaN);
8920 v_ins = w.immd(js_NaN);
8921 } else if (v.isNull()) {
8922 v_ins_after = w.immd(incr);
8923 v_after.setDouble(incr);
8924 v_ins = w.immd(0.0);
8925 } else {
8926 if (v.isBoolean()) {
8927 v_ins = w.i2d(v_ins);
8928 } else if (v.isString()) {
8929 LIns* ok_ins = w.allocp(sizeof(JSBool));
8930 LIns* args[] = { ok_ins, v_ins, cx_ins };
8931 v_ins = w.call(&js_StringToNumber_ci, args);
8932 guard(false,
8933 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
8934 OOM_EXIT);
8935 } else {
8936 JS_ASSERT(v.isNumber());
8939 jsdouble num;
8940 AutoValueRooter tvr(cx);
8941 *tvr.addr() = v;
8942 ValueToNumber(cx, tvr.value(), &num);
8943 v_ins_after = alu(LIR_addd, num, incr, v_ins, w.immd(incr));
8944 v_after.setDouble(num + incr);
8947 return RECORD_CONTINUE;
8950 JS_REQUIRES_STACK AbortableRecordingStatus
8951 TraceRecorder::incProp(jsint incr, bool pre)
8953 Value& l = stackval(-1);
8954 if (l.isPrimitive())
8955 RETURN_STOP_A("incProp on primitive");
8957 JSObject* obj = &l.toObject();
8958 LIns* obj_ins = get(&l);
8960 uint32 slot;
8961 LIns* v_ins;
8962 CHECK_STATUS_A(prop(obj, obj_ins, &slot, &v_ins, NULL));
8964 if (slot == SHAPE_INVALID_SLOT)
8965 RETURN_STOP_A("incProp on invalid slot");
8967 Value& v = obj->getSlotRef(slot);
8968 Value v_after;
8969 CHECK_STATUS_A(inc(v, v_ins, v_after, incr, pre));
8971 LIns* slots_ins = NULL;
8972 stobj_set_slot(obj, obj_ins, slot, slots_ins, v_after, v_ins);
8973 return ARECORD_CONTINUE;
8976 JS_REQUIRES_STACK RecordingStatus
8977 TraceRecorder::incElem(jsint incr, bool pre)
8979 Value& r = stackval(-1);
8980 Value& l = stackval(-2);
8981 Value* vp;
8982 LIns* v_ins;
8983 LIns* addr_ins;
8985 if (!l.isPrimitive() && l.toObject().isDenseArray() && r.isInt32()) {
8986 guardDenseArray(get(&l), MISMATCH_EXIT);
8987 CHECK_STATUS(denseArrayElement(l, r, vp, v_ins, addr_ins, snapshot(BRANCH_EXIT)));
8988 if (!addr_ins) // if we read a hole, abort
8989 return RECORD_STOP;
8990 Value v_after;
8991 CHECK_STATUS(inc(*vp, v_ins, v_after, incr, pre));
8992 box_value_into(v_after, v_ins, DSlotsAddress(addr_ins));
8993 return RECORD_CONTINUE;
8996 return callImacro((incr == 1)
8997 ? pre ? incelem_imacros.incelem : incelem_imacros.eleminc
8998 : pre ? decelem_imacros.decelem : decelem_imacros.elemdec);
9001 static bool
9002 EvalCmp(LOpcode op, double l, double r)
9004 bool cond;
9005 switch (op) {
9006 case LIR_eqd:
9007 cond = (l == r);
9008 break;
9009 case LIR_ltd:
9010 cond = l < r;
9011 break;
9012 case LIR_gtd:
9013 cond = l > r;
9014 break;
9015 case LIR_led:
9016 cond = l <= r;
9017 break;
9018 case LIR_ged:
9019 cond = l >= r;
9020 break;
9021 default:
9022 JS_NOT_REACHED("unexpected comparison op");
9023 return false;
9025 return cond;
9028 static bool
9029 EvalCmp(JSContext *cx, LOpcode op, JSString* l, JSString* r, JSBool *ret)
9031 if (op == LIR_eqd)
9032 return EqualStrings(cx, l, r, ret);
9033 JSBool cmp;
9034 if (!CompareStrings(cx, l, r, &cmp))
9035 return false;
9036 *ret = EvalCmp(op, cmp, 0);
9037 return true;
9040 JS_REQUIRES_STACK RecordingStatus
9041 TraceRecorder::strictEquality(bool equal, bool cmpCase)
9043 Value& r = stackval(-1);
9044 Value& l = stackval(-2);
9045 LIns* l_ins = get(&l);
9046 LIns* r_ins = get(&r);
9047 LIns* x;
9048 JSBool cond;
9050 JSValueType ltag = getPromotedType(l);
9051 if (ltag != getPromotedType(r)) {
9052 cond = !equal;
9053 x = w.immi(cond);
9054 } else if (ltag == JSVAL_TYPE_STRING) {
9055 LIns* args[] = { r_ins, l_ins, cx_ins };
9056 LIns* equal_ins = w.call(&js_EqualStringsOnTrace_ci, args);
9057 guard(false,
9058 w.name(w.eqiN(equal_ins, JS_NEITHER), "guard(oom)"),
9059 OOM_EXIT);
9060 x = w.eqiN(equal_ins, equal);
9061 if (!EqualStrings(cx, l.toString(), r.toString(), &cond))
9062 RETURN_ERROR("oom");
9063 } else {
9064 if (ltag == JSVAL_TYPE_DOUBLE)
9065 x = w.eqd(l_ins, r_ins);
9066 else if (ltag == JSVAL_TYPE_NULL || ltag == JSVAL_TYPE_NONFUNOBJ || ltag == JSVAL_TYPE_FUNOBJ)
9067 x = w.eqp(l_ins, r_ins);
9068 else
9069 x = w.eqi(l_ins, r_ins);
9070 if (!equal)
9071 x = w.eqi0(x);
9072 cond = (ltag == JSVAL_TYPE_DOUBLE)
9073 ? l.toNumber() == r.toNumber()
9074 : l == r;
9076 cond = (!!cond == equal);
9078 if (cmpCase) {
9079 /* Only guard if the same path may not always be taken. */
9080 if (!x->isImmI())
9081 guard(cond, x, BRANCH_EXIT);
9082 return RECORD_CONTINUE;
9085 set(&l, x);
9086 return RECORD_CONTINUE;
9089 JS_REQUIRES_STACK AbortableRecordingStatus
9090 TraceRecorder::equality(bool negate, bool tryBranchAfterCond)
9092 Value& rval = stackval(-1);
9093 Value& lval = stackval(-2);
9094 LIns* l_ins = get(&lval);
9095 LIns* r_ins = get(&rval);
9097 return equalityHelper(lval, rval, l_ins, r_ins, negate, tryBranchAfterCond, lval);
9100 JS_REQUIRES_STACK AbortableRecordingStatus
9101 TraceRecorder::equalityHelper(Value& l, Value& r, LIns* l_ins, LIns* r_ins,
9102 bool negate, bool tryBranchAfterCond,
9103 Value& rval)
9105 LOpcode op = LIR_eqi;
9106 JSBool cond;
9107 LIns* args[] = { NULL, NULL, NULL };
9110 * The if chain below closely mirrors that found in 11.9.3, in general
9111 * deviating from that ordering of ifs only to account for SpiderMonkey's
9112 * conflation of booleans and undefined and for the possibility of
9113 * confusing objects and null. Note carefully the spec-mandated recursion
9114 * in the final else clause, which terminates because Number == T recurs
9115 * only if T is Object, but that must recur again to convert Object to
9116 * primitive, and ToPrimitive throws if the object cannot be converted to
9117 * a primitive value (which would terminate recursion).
9120 if (getPromotedType(l) == getPromotedType(r)) {
9121 if (l.isUndefined() || l.isNull()) {
9122 cond = true;
9123 if (l.isNull())
9124 op = LIR_eqp;
9125 } else if (l.isObject()) {
9126 if (l.toObject().getClass()->ext.equality)
9127 RETURN_STOP_A("Can't trace extended class equality operator");
9128 LIns* flags_ins = w.ldiObjFlags(l_ins);
9129 LIns* flag_ins = w.andi(flags_ins, w.nameImmui(JSObject::HAS_EQUALITY));
9130 guard(true, w.eqi0(flag_ins), BRANCH_EXIT);
9132 op = LIR_eqp;
9133 cond = (l == r);
9134 } else if (l.isBoolean()) {
9135 JS_ASSERT(r.isBoolean());
9136 cond = (l == r);
9137 } else if (l.isString()) {
9138 JSString *l_str = l.toString();
9139 JSString *r_str = r.toString();
9140 if (!l_str->isRope() && !r_str->isRope() && l_str->length() == 1 && r_str->length() == 1) {
9141 VMSideExit *exit = snapshot(BRANCH_EXIT);
9142 LIns *c = w.immw(1);
9143 guard(true, w.eqp(w.getStringLength(l_ins), c), exit);
9144 guard(true, w.eqp(w.getStringLength(r_ins), c), exit);
9145 l_ins = w.getStringChar(l_ins, w.immpNonGC(0));
9146 r_ins = w.getStringChar(r_ins, w.immpNonGC(0));
9147 } else {
9148 args[0] = r_ins, args[1] = l_ins, args[2] = cx_ins;
9149 LIns *equal_ins = w.call(&js_EqualStringsOnTrace_ci, args);
9150 guard(false,
9151 w.name(w.eqiN(equal_ins, JS_NEITHER), "guard(oom)"),
9152 OOM_EXIT);
9153 l_ins = equal_ins;
9154 r_ins = w.immi(1);
9156 if (!EqualStrings(cx, l.toString(), r.toString(), &cond))
9157 RETURN_ERROR_A("oom");
9158 } else {
9159 JS_ASSERT(l.isNumber() && r.isNumber());
9160 cond = (l.toNumber() == r.toNumber());
9161 op = LIR_eqd;
9163 } else if (l.isNull() && r.isUndefined()) {
9164 l_ins = w.immiUndefined();
9165 cond = true;
9166 } else if (l.isUndefined() && r.isNull()) {
9167 r_ins = w.immiUndefined();
9168 cond = true;
9169 } else if (l.isNumber() && r.isString()) {
9170 LIns* ok_ins = w.allocp(sizeof(JSBool));
9171 args[0] = ok_ins, args[1] = r_ins, args[2] = cx_ins;
9172 r_ins = w.call(&js_StringToNumber_ci, args);
9173 guard(false,
9174 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9175 OOM_EXIT);
9176 JSBool ok;
9177 double d = js_StringToNumber(cx, r.toString(), &ok);
9178 if (!ok)
9179 RETURN_ERROR_A("oom");
9180 cond = (l.toNumber() == d);
9181 op = LIR_eqd;
9182 } else if (l.isString() && r.isNumber()) {
9183 LIns* ok_ins = w.allocp(sizeof(JSBool));
9184 args[0] = ok_ins, args[1] = l_ins, args[2] = cx_ins;
9185 l_ins = w.call(&js_StringToNumber_ci, args);
9186 guard(false,
9187 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9188 OOM_EXIT);
9189 JSBool ok;
9190 double d = js_StringToNumber(cx, l.toString(), &ok);
9191 if (!ok)
9192 RETURN_ERROR_A("oom");
9193 cond = (d == r.toNumber());
9194 op = LIR_eqd;
9195 } else {
9196 // Below we may assign to l or r, which modifies the interpreter state.
9197 // This is fine as long as we also update the tracker.
9198 if (l.isBoolean()) {
9199 l_ins = w.i2d(l_ins);
9200 set(&l, l_ins);
9201 l.setInt32(l.isTrue());
9202 return equalityHelper(l, r, l_ins, r_ins, negate,
9203 tryBranchAfterCond, rval);
9205 if (r.isBoolean()) {
9206 r_ins = w.i2d(r_ins);
9207 set(&r, r_ins);
9208 r.setInt32(r.isTrue());
9209 return equalityHelper(l, r, l_ins, r_ins, negate,
9210 tryBranchAfterCond, rval);
9212 if ((l.isString() || l.isNumber()) && !r.isPrimitive()) {
9213 CHECK_STATUS_A(guardNativeConversion(r));
9214 return InjectStatus(callImacro(equality_imacros.any_obj));
9216 if (!l.isPrimitive() && (r.isString() || r.isNumber())) {
9217 CHECK_STATUS_A(guardNativeConversion(l));
9218 return InjectStatus(callImacro(equality_imacros.obj_any));
9221 l_ins = w.immi(0);
9222 r_ins = w.immi(1);
9223 cond = false;
9226 /* If the operands aren't numbers, compare them as integers. */
9227 LIns* x = w.ins2(op, l_ins, r_ins);
9228 if (negate) {
9229 x = w.eqi0(x);
9230 cond = !cond;
9233 jsbytecode* pc = cx->regs->pc;
9236 * Don't guard if the same path is always taken. If it isn't, we have to
9237 * fuse comparisons and the following branch, because the interpreter does
9238 * that.
9240 if (tryBranchAfterCond)
9241 fuseIf(pc + 1, cond, x);
9244 * There is no need to write out the result of this comparison if the trace
9245 * ends on this operation.
9247 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
9248 CHECK_STATUS_A(checkTraceEnd(pc + 1));
9251 * We update the stack after the guard. This is safe since the guard bails
9252 * out at the comparison and the interpreter will therefore re-execute the
9253 * comparison. This way the value of the condition doesn't have to be
9254 * calculated and saved on the stack in most cases.
9256 set(&rval, x);
9258 return ARECORD_CONTINUE;
9261 JS_REQUIRES_STACK AbortableRecordingStatus
9262 TraceRecorder::relational(LOpcode op, bool tryBranchAfterCond)
9264 Value& r = stackval(-1);
9265 Value& l = stackval(-2);
9266 LIns* x = NULL;
9267 JSBool cond;
9268 LIns* l_ins = get(&l);
9269 LIns* r_ins = get(&r);
9270 bool fp = false;
9271 jsdouble lnum, rnum;
9274 * 11.8.5 if either argument is an object with a function-valued valueOf
9275 * property; if both arguments are objects with non-function-valued valueOf
9276 * properties, abort.
9278 if (!l.isPrimitive()) {
9279 CHECK_STATUS_A(guardNativeConversion(l));
9280 if (!r.isPrimitive()) {
9281 CHECK_STATUS_A(guardNativeConversion(r));
9282 return InjectStatus(callImacro(binary_imacros.obj_obj));
9284 return InjectStatus(callImacro(binary_imacros.obj_any));
9286 if (!r.isPrimitive()) {
9287 CHECK_STATUS_A(guardNativeConversion(r));
9288 return InjectStatus(callImacro(binary_imacros.any_obj));
9291 /* 11.8.5 steps 3, 16-21. */
9292 if (l.isString() && r.isString()) {
9293 LIns* args[] = { r_ins, l_ins, cx_ins };
9294 LIns* result_ins = w.call(&js_CompareStringsOnTrace_ci, args);
9295 guard(false,
9296 w.name(w.eqiN(result_ins, INT32_MIN), "guard(oom)"),
9297 OOM_EXIT);
9298 l_ins = result_ins;
9299 r_ins = w.immi(0);
9300 if (!EvalCmp(cx, op, l.toString(), r.toString(), &cond))
9301 RETURN_ERROR_A("oom");
9302 goto do_comparison;
9305 /* 11.8.5 steps 4-5. */
9306 if (!l.isNumber()) {
9307 if (l.isBoolean()) {
9308 l_ins = w.i2d(l_ins);
9309 } else if (l.isUndefined()) {
9310 l_ins = w.immd(js_NaN);
9311 } else if (l.isString()) {
9312 LIns* ok_ins = w.allocp(sizeof(JSBool));
9313 LIns* args[] = { ok_ins, l_ins, cx_ins };
9314 l_ins = w.call(&js_StringToNumber_ci, args);
9315 guard(false,
9316 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9317 OOM_EXIT);
9318 } else if (l.isNull()) {
9319 l_ins = w.immd(0.0);
9320 } else {
9321 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
9322 "have been handled at start of method");
9323 RETURN_STOP_A("safety belt");
9326 if (!r.isNumber()) {
9327 if (r.isBoolean()) {
9328 r_ins = w.i2d(r_ins);
9329 } else if (r.isUndefined()) {
9330 r_ins = w.immd(js_NaN);
9331 } else if (r.isString()) {
9332 LIns* ok_ins = w.allocp(sizeof(JSBool));
9333 LIns* args[] = { ok_ins, r_ins, cx_ins };
9334 r_ins = w.call(&js_StringToNumber_ci, args);
9335 guard(false,
9336 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9337 OOM_EXIT);
9338 } else if (r.isNull()) {
9339 r_ins = w.immd(0.0);
9340 } else {
9341 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
9342 "have been handled at start of method");
9343 RETURN_STOP_A("safety belt");
9347 AutoValueRooter tvr(cx);
9348 *tvr.addr() = l;
9349 ValueToNumber(cx, tvr.value(), &lnum);
9350 *tvr.addr() = r;
9351 ValueToNumber(cx, tvr.value(), &rnum);
9353 cond = EvalCmp(op, lnum, rnum);
9354 fp = true;
9356 /* 11.8.5 steps 6-15. */
9357 do_comparison:
9359 * If the result is not a number or it's not a quad, we must use an integer
9360 * compare.
9362 if (!fp) {
9363 JS_ASSERT(isCmpDOpcode(op));
9364 op = cmpOpcodeD2I(op);
9366 x = w.ins2(op, l_ins, r_ins);
9368 jsbytecode* pc = cx->regs->pc;
9371 * Don't guard if the same path is always taken. If it isn't, we have to
9372 * fuse comparisons and the following branch, because the interpreter does
9373 * that.
9375 if (tryBranchAfterCond)
9376 fuseIf(pc + 1, cond, x);
9379 * There is no need to write out the result of this comparison if the trace
9380 * ends on this operation.
9382 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
9383 CHECK_STATUS_A(checkTraceEnd(pc + 1));
9386 * We update the stack after the guard. This is safe since the guard bails
9387 * out at the comparison and the interpreter will therefore re-execute the
9388 * comparison. This way the value of the condition doesn't have to be
9389 * calculated and saved on the stack in most cases.
9391 set(&l, x);
9393 return ARECORD_CONTINUE;
9396 JS_REQUIRES_STACK RecordingStatus
9397 TraceRecorder::unary(LOpcode op)
9399 Value& v = stackval(-1);
9400 bool intop = retTypes[op] == LTy_I;
9401 if (v.isNumber()) {
9402 LIns* a = get(&v);
9403 if (intop)
9404 a = d2i(a);
9405 a = w.ins1(op, a);
9406 if (intop)
9407 a = w.i2d(a);
9408 set(&v, a);
9409 return RECORD_CONTINUE;
9411 return RECORD_STOP;
9414 JS_REQUIRES_STACK RecordingStatus
9415 TraceRecorder::binary(LOpcode op)
9417 Value& r = stackval(-1);
9418 Value& l = stackval(-2);
9420 if (!l.isPrimitive()) {
9421 CHECK_STATUS(guardNativeConversion(l));
9422 if (!r.isPrimitive()) {
9423 CHECK_STATUS(guardNativeConversion(r));
9424 return callImacro(binary_imacros.obj_obj);
9426 return callImacro(binary_imacros.obj_any);
9428 if (!r.isPrimitive()) {
9429 CHECK_STATUS(guardNativeConversion(r));
9430 return callImacro(binary_imacros.any_obj);
9433 bool intop = retTypes[op] == LTy_I;
9434 LIns* a = get(&l);
9435 LIns* b = get(&r);
9437 bool leftIsNumber = l.isNumber();
9438 jsdouble lnum = leftIsNumber ? l.toNumber() : 0;
9440 bool rightIsNumber = r.isNumber();
9441 jsdouble rnum = rightIsNumber ? r.toNumber() : 0;
9443 if (l.isString()) {
9444 NanoAssert(op != LIR_addd); // LIR_addd/IS_STRING case handled by record_JSOP_ADD()
9445 LIns* ok_ins = w.allocp(sizeof(JSBool));
9446 LIns* args[] = { ok_ins, a, cx_ins };
9447 a = w.call(&js_StringToNumber_ci, args);
9448 guard(false,
9449 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9450 OOM_EXIT);
9451 JSBool ok;
9452 lnum = js_StringToNumber(cx, l.toString(), &ok);
9453 if (!ok)
9454 RETURN_ERROR("oom");
9455 leftIsNumber = true;
9457 if (r.isString()) {
9458 NanoAssert(op != LIR_addd); // LIR_addd/IS_STRING case handled by record_JSOP_ADD()
9459 LIns* ok_ins = w.allocp(sizeof(JSBool));
9460 LIns* args[] = { ok_ins, b, cx_ins };
9461 b = w.call(&js_StringToNumber_ci, args);
9462 guard(false,
9463 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9464 OOM_EXIT);
9465 JSBool ok;
9466 rnum = js_StringToNumber(cx, r.toString(), &ok);
9467 if (!ok)
9468 RETURN_ERROR("oom");
9469 rightIsNumber = true;
9471 if (l.isBoolean()) {
9472 a = w.i2d(a);
9473 lnum = l.toBoolean();
9474 leftIsNumber = true;
9475 } else if (l.isUndefined()) {
9476 a = w.immd(js_NaN);
9477 lnum = js_NaN;
9478 leftIsNumber = true;
9480 if (r.isBoolean()) {
9481 b = w.i2d(b);
9482 rnum = r.toBoolean();
9483 rightIsNumber = true;
9484 } else if (r.isUndefined()) {
9485 b = w.immd(js_NaN);
9486 rnum = js_NaN;
9487 rightIsNumber = true;
9489 if (leftIsNumber && rightIsNumber) {
9490 if (intop) {
9491 a = (op == LIR_rshui) ? d2u(a) : d2i(a);
9492 b = d2i(b);
9494 a = alu(op, lnum, rnum, a, b);
9495 if (intop)
9496 a = (op == LIR_rshui) ? w.ui2d(a) : w.i2d(a);
9497 set(&l, a);
9498 return RECORD_CONTINUE;
9500 return RECORD_STOP;
9503 #if defined DEBUG_notme && defined XP_UNIX
9504 #include <stdio.h>
9506 static FILE* shapefp = NULL;
9508 static void
9509 DumpShape(JSObject* obj, const char* prefix)
9511 if (!shapefp) {
9512 shapefp = fopen("/tmp/shapes.dump", "w");
9513 if (!shapefp)
9514 return;
9517 fprintf(shapefp, "\n%s: shape %u flags %x\n", prefix, obj->shape(), obj->flags);
9518 for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
9519 const Shape &shape = r.front();
9521 if (JSID_IS_ATOM(shape.id)) {
9522 putc(' ', shapefp);
9523 JS_PutString(JSID_TO_STRING(shape.id), shapefp);
9524 } else {
9525 JS_ASSERT(!JSID_IS_OBJECT(shape.id));
9526 fprintf(shapefp, " %d", JSID_TO_INT(shape.id));
9528 fprintf(shapefp, " %u %p %p %x %x %d\n",
9529 shape.slot, shape.getter, shape.setter, shape.attrs, shape.flags, shape.shortid);
9531 fflush(shapefp);
9534 void
9535 TraceRecorder::dumpGuardedShapes(const char* prefix)
9537 for (GuardedShapeTable::Range r = guardedShapeTable.all(); !r.empty(); r.popFront())
9538 DumpShape(r.front().value, prefix);
9540 #endif /* DEBUG_notme && XP_UNIX */
9542 JS_REQUIRES_STACK RecordingStatus
9543 TraceRecorder::guardShape(LIns* obj_ins, JSObject* obj, uint32 shape, const char* guardName,
9544 VMSideExit* exit)
9546 // Test (with add if missing) for a remembered guard for (obj_ins, obj).
9547 GuardedShapeTable::AddPtr p = guardedShapeTable.lookupForAdd(obj_ins);
9548 if (p) {
9549 JS_ASSERT(p->value == obj);
9550 return RECORD_CONTINUE;
9552 if (!guardedShapeTable.add(p, obj_ins, obj))
9553 return RECORD_ERROR;
9555 if (obj == globalObj) {
9556 // In this case checking object identity is equivalent and faster.
9557 guard(true,
9558 w.name(w.eqp(obj_ins, w.immpObjGC(globalObj)), "guard_global"),
9559 exit);
9560 return RECORD_CONTINUE;
9563 #if defined DEBUG_notme && defined XP_UNIX
9564 DumpShape(obj, "guard");
9565 fprintf(shapefp, "for obj_ins %p\n", obj_ins);
9566 #endif
9568 // Finally, emit the shape guard.
9569 guard(true, w.name(w.eqiN(w.ldiObjShape(obj_ins), shape), guardName), exit);
9570 return RECORD_CONTINUE;
9573 void
9574 TraceRecorder::forgetGuardedShapesForObject(JSObject* obj)
9576 for (GuardedShapeTable::Enum e(guardedShapeTable); !e.empty(); e.popFront()) {
9577 if (e.front().value == obj) {
9578 #if defined DEBUG_notme && defined XP_UNIX
9579 DumpShape(entry->obj, "forget");
9580 #endif
9581 e.removeFront();
9586 void
9587 TraceRecorder::forgetGuardedShapes()
9589 #if defined DEBUG_notme && defined XP_UNIX
9590 dumpGuardedShapes("forget-all");
9591 #endif
9592 guardedShapeTable.clear();
9595 JS_REQUIRES_STACK AbortableRecordingStatus
9596 TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, PCVal& pcval)
9598 jsbytecode* pc = cx->regs->pc;
9599 JS_ASSERT(*pc != JSOP_INITPROP && *pc != JSOP_INITMETHOD &&
9600 *pc != JSOP_SETNAME && *pc != JSOP_SETPROP && *pc != JSOP_SETMETHOD);
9602 // Mimic the interpreter's special case for dense arrays by skipping up one
9603 // hop along the proto chain when accessing a named (not indexed) property,
9604 // typically to find Array.prototype methods.
9605 JSObject* aobj = obj;
9606 if (obj->isDenseArray()) {
9607 guardDenseArray(obj_ins, BRANCH_EXIT);
9608 aobj = obj->getProto();
9609 obj_ins = w.ldpObjProto(obj_ins);
9612 if (!aobj->isNative())
9613 RETURN_STOP_A("non-native object");
9615 JSAtom* atom;
9616 PropertyCacheEntry* entry;
9617 JS_PROPERTY_CACHE(cx).test(cx, pc, aobj, obj2, entry, atom);
9618 if (atom) {
9619 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
9620 // FIXME: bug 458271.
9621 jsid id = ATOM_TO_JSID(atom);
9623 // The lookup below may change object shapes.
9624 forgetGuardedShapes();
9626 JSProperty* prop;
9627 if (JOF_OPMODE(*pc) == JOF_NAME) {
9628 JS_ASSERT(aobj == obj);
9630 TraceMonitor &localtm = *traceMonitor;
9631 entry = js_FindPropertyHelper(cx, id, true, &obj, &obj2, &prop);
9632 if (!entry)
9633 RETURN_ERROR_A("error in js_FindPropertyHelper");
9635 /* js_FindPropertyHelper can reenter the interpreter and kill |this|. */
9636 if (!localtm.recorder)
9637 return ARECORD_ABORTED;
9639 if (entry == JS_NO_PROP_CACHE_FILL)
9640 RETURN_STOP_A("cannot cache name");
9641 } else {
9642 TraceMonitor &localtm = *traceMonitor;
9643 int protoIndex = js_LookupPropertyWithFlags(cx, aobj, id,
9644 cx->resolveFlags,
9645 &obj2, &prop);
9647 if (protoIndex < 0)
9648 RETURN_ERROR_A("error in js_LookupPropertyWithFlags");
9650 /* js_LookupPropertyWithFlags can reenter the interpreter and kill |this|. */
9651 if (!localtm.recorder)
9652 return ARECORD_ABORTED;
9654 if (prop) {
9655 if (!obj2->isNative())
9656 RETURN_STOP_A("property found on non-native object");
9657 entry = JS_PROPERTY_CACHE(cx).fill(cx, aobj, 0, protoIndex, obj2,
9658 (Shape*) prop);
9659 JS_ASSERT(entry);
9660 if (entry == JS_NO_PROP_CACHE_FILL)
9661 entry = NULL;
9666 if (!prop) {
9667 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
9668 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
9669 // the global it's assigning does not yet exist, create it.
9670 obj2 = obj;
9672 // Use a null pcval to return "no such property" to our caller.
9673 pcval.setNull();
9674 return ARECORD_CONTINUE;
9677 if (!entry)
9678 RETURN_STOP_A("failed to fill property cache");
9681 #ifdef JS_THREADSAFE
9682 // There's a potential race in any JS_THREADSAFE embedding that's nuts
9683 // enough to share mutable objects on the scope or proto chain, but we
9684 // don't care about such insane embeddings. Anyway, the (scope, proto)
9685 // entry->vcap coordinates must reach obj2 from aobj at this point.
9686 JS_ASSERT(cx->thread->data.requestDepth);
9687 #endif
9689 return InjectStatus(guardPropertyCacheHit(obj_ins, aobj, obj2, entry, pcval));
9692 JS_REQUIRES_STACK RecordingStatus
9693 TraceRecorder::guardPropertyCacheHit(LIns* obj_ins,
9694 JSObject* aobj,
9695 JSObject* obj2,
9696 PropertyCacheEntry* entry,
9697 PCVal& pcval)
9699 VMSideExit* exit = snapshot(BRANCH_EXIT);
9701 uint32 vshape = entry->vshape();
9703 // Special case for the global object, which may be aliased to get a property value.
9704 // To catch cross-global property accesses we must check against globalObj identity.
9705 // But a JOF_NAME mode opcode needs no guard, as we ensure the global object's shape
9706 // never changes, and name ops can't reach across a global object ('with' aborts).
9707 if (aobj == globalObj) {
9708 if (entry->adding())
9709 RETURN_STOP("adding a property to the global object");
9711 JSOp op = js_GetOpcode(cx, cx->fp()->script(), cx->regs->pc);
9712 if (JOF_OPMODE(op) != JOF_NAME) {
9713 guard(true,
9714 w.name(w.eqp(obj_ins, w.immpObjGC(globalObj)), "guard_global"),
9715 exit);
9717 } else {
9718 CHECK_STATUS(guardShape(obj_ins, aobj, entry->kshape, "guard_kshape", exit));
9721 if (entry->adding()) {
9722 LIns *vshape_ins =
9723 w.ldiRuntimeProtoHazardShape(w.ldpConstContextField(runtime));
9725 guard(true,
9726 w.name(w.eqiN(vshape_ins, vshape), "guard_protoHazardShape"),
9727 BRANCH_EXIT);
9730 // For any hit that goes up the scope and/or proto chains, we will need to
9731 // guard on the shape of the object containing the property.
9732 if (entry->vcapTag() >= 1) {
9733 JS_ASSERT(obj2->shape() == vshape);
9734 if (obj2 == globalObj)
9735 RETURN_STOP("hitting the global object via a prototype chain");
9737 LIns* obj2_ins;
9738 if (entry->vcapTag() == 1) {
9739 // Duplicate the special case in PropertyCache::test.
9740 obj2_ins = w.ldpObjProto(obj_ins);
9741 guard(false, w.eqp0(obj2_ins), exit);
9742 } else {
9743 obj2_ins = w.immpObjGC(obj2);
9745 CHECK_STATUS(guardShape(obj2_ins, obj2, vshape, "guard_vshape", exit));
9748 pcval = entry->vword;
9749 return RECORD_CONTINUE;
9752 void
9753 TraceRecorder::stobj_set_fslot(LIns *obj_ins, unsigned slot, const Value &v, LIns* v_ins)
9755 box_value_into(v, v_ins, FSlotsAddress(obj_ins, slot));
9758 void
9759 TraceRecorder::stobj_set_dslot(LIns *obj_ins, unsigned slot, LIns*& slots_ins,
9760 const Value &v, LIns* v_ins)
9762 if (!slots_ins)
9763 slots_ins = w.ldpObjSlots(obj_ins);
9764 box_value_into(v, v_ins, DSlotsAddress(slots_ins, slot));
9767 void
9768 TraceRecorder::stobj_set_slot(JSObject *obj, LIns* obj_ins, unsigned slot, LIns*& slots_ins,
9769 const Value &v, LIns* v_ins)
9772 * A shape guard must have already been generated for obj, which will
9773 * ensure that future objects have the same number of fixed slots.
9775 if (!obj->hasSlotsArray()) {
9776 JS_ASSERT(slot < obj->numSlots());
9777 stobj_set_fslot(obj_ins, slot, v, v_ins);
9778 } else {
9779 stobj_set_dslot(obj_ins, slot, slots_ins, v, v_ins);
9783 LIns*
9784 TraceRecorder::unbox_slot(JSObject *obj, LIns *obj_ins, uint32 slot, VMSideExit *exit)
9786 /* Same guarantee about fixed slots as stobj_set_slot. */
9787 Address addr = (!obj->hasSlotsArray())
9788 ? (Address)FSlotsAddress(obj_ins, slot)
9789 : (Address)DSlotsAddress(w.ldpObjSlots(obj_ins), slot);
9791 return unbox_value(obj->getSlot(slot), addr, exit);
9794 #if JS_BITS_PER_WORD == 32
9796 void
9797 TraceRecorder::box_undefined_into(Address addr)
9799 w.stiValueTag(w.nameImmui(JSVAL_TAG_UNDEFINED), addr);
9800 w.stiValuePayload(w.immi(0), addr);
9803 void
9804 TraceRecorder::box_null_into(Address addr)
9806 w.stiValueTag(w.nameImmui(JSVAL_TAG_NULL), addr);
9807 w.stiValuePayload(w.immi(0), addr);
9810 inline LIns*
9811 TraceRecorder::unbox_number_as_double(Address addr, LIns *tag_ins, VMSideExit *exit)
9813 guard(true, w.leui(tag_ins, w.nameImmui(JSVAL_UPPER_INCL_TAG_OF_NUMBER_SET)), exit);
9814 LIns *val_ins = w.ldiValuePayload(addr);
9815 LIns* args[] = { val_ins, tag_ins };
9816 return w.call(&js_UnboxDouble_ci, args);
9819 inline LIns*
9820 TraceRecorder::unbox_non_double_object(Address addr, LIns* tag_ins,
9821 JSValueType type, VMSideExit* exit)
9823 LIns *val_ins;
9824 if (type == JSVAL_TYPE_UNDEFINED) {
9825 val_ins = w.immiUndefined();
9826 } else if (type == JSVAL_TYPE_NULL) {
9827 val_ins = w.immpNull();
9828 } else {
9829 JS_ASSERT(type == JSVAL_TYPE_INT32 || type == JSVAL_TYPE_OBJECT ||
9830 type == JSVAL_TYPE_STRING || type == JSVAL_TYPE_BOOLEAN ||
9831 type == JSVAL_TYPE_MAGIC);
9832 val_ins = w.ldiValuePayload(addr);
9835 guard(true, w.eqi(tag_ins, w.nameImmui(JSVAL_TYPE_TO_TAG(type))), exit);
9836 return val_ins;
9839 LIns*
9840 TraceRecorder::unbox_object(Address addr, LIns* tag_ins, JSValueType type, VMSideExit* exit)
9842 JS_ASSERT(type == JSVAL_TYPE_FUNOBJ || type == JSVAL_TYPE_NONFUNOBJ);
9843 guard(true, w.name(w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_OBJECT)), "isObj"), exit);
9844 LIns *payload_ins = w.ldiValuePayload(addr);
9845 if (type == JSVAL_TYPE_FUNOBJ)
9846 guardClass(payload_ins, &js_FunctionClass, exit, LOAD_NORMAL);
9847 else
9848 guardNotClass(payload_ins, &js_FunctionClass, exit, LOAD_NORMAL);
9849 return payload_ins;
9852 LIns*
9853 TraceRecorder::unbox_value(const Value &v, Address addr, VMSideExit *exit, bool force_double)
9855 LIns *tag_ins = w.ldiValueTag(addr);
9857 if (v.isNumber() && force_double)
9858 return unbox_number_as_double(addr, tag_ins, exit);
9860 if (v.isInt32()) {
9861 guard(true, w.name(w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_INT32)), "isInt"), exit);
9862 return w.i2d(w.ldiValuePayload(addr));
9865 if (v.isDouble()) {
9866 guard(true, w.name(w.ltui(tag_ins, w.nameImmui(JSVAL_TAG_CLEAR)), "isDouble"), exit);
9867 return w.ldd(addr);
9870 if (v.isObject()) {
9871 JSValueType type = v.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
9872 return unbox_object(addr, tag_ins, type, exit);
9875 JSValueType type = v.extractNonDoubleObjectTraceType();
9876 return unbox_non_double_object(addr, tag_ins, type, exit);
9879 void
9880 TraceRecorder::unbox_any_object(Address addr, LIns **obj_ins, LIns **is_obj_ins)
9882 LIns *tag_ins = w.ldiValueTag(addr);
9883 *is_obj_ins = w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_OBJECT));
9884 *obj_ins = w.ldiValuePayload(addr);
9887 LIns*
9888 TraceRecorder::is_boxed_true(Address addr)
9890 LIns *tag_ins = w.ldiValueTag(addr);
9891 LIns *bool_ins = w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_BOOLEAN));
9892 LIns *payload_ins = w.ldiValuePayload(addr);
9893 return w.gtiN(w.andi(bool_ins, payload_ins), 0);
9896 LIns*
9897 TraceRecorder::is_boxed_magic(Address addr, JSWhyMagic why)
9899 LIns *tag_ins = w.ldiValueTag(addr);
9900 return w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_MAGIC));
9903 void
9904 TraceRecorder::box_value_into(const Value &v, LIns *v_ins, Address addr)
9906 if (v.isNumber()) {
9907 JS_ASSERT(v_ins->isD());
9908 if (fcallinfo(v_ins) == &js_UnboxDouble_ci) {
9909 w.stiValueTag(v_ins->callArgN(0), addr);
9910 w.stiValuePayload(v_ins->callArgN(1), addr);
9911 } else if (IsPromotedInt32(v_ins)) {
9912 LIns *int_ins = w.demoteToInt32(v_ins);
9913 w.stiValueTag(w.nameImmui(JSVAL_TAG_INT32), addr);
9914 w.stiValuePayload(int_ins, addr);
9915 } else {
9916 w.std(v_ins, addr);
9918 return;
9921 if (v.isUndefined()) {
9922 box_undefined_into(addr);
9923 } else if (v.isNull()) {
9924 box_null_into(addr);
9925 } else {
9926 JSValueTag tag = v.isObject() ? JSVAL_TAG_OBJECT : v.extractNonDoubleObjectTraceTag();
9927 w.stiValueTag(w.nameImmui(tag), addr);
9928 w.stiValuePayload(v_ins, addr);
9932 LIns*
9933 TraceRecorder::box_value_for_native_call(const Value &v, LIns *v_ins)
9935 return box_value_into_alloc(v, v_ins);
9938 #elif JS_BITS_PER_WORD == 64
9940 void
9941 TraceRecorder::box_undefined_into(Address addr)
9943 w.stq(w.nameImmq(JSVAL_BITS(JSVAL_VOID)), addr);
9946 inline LIns *
9947 TraceRecorder::non_double_object_value_has_type(LIns *v_ins, JSValueType type)
9949 return w.eqi(w.q2i(w.rshuqN(v_ins, JSVAL_TAG_SHIFT)),
9950 w.nameImmui(JSVAL_TYPE_TO_TAG(type)));
9953 inline LIns *
9954 TraceRecorder::unpack_ptr(LIns *v_ins)
9956 return w.andq(v_ins, w.nameImmq(JSVAL_PAYLOAD_MASK));
9959 inline LIns *
9960 TraceRecorder::unbox_number_as_double(LIns *v_ins, VMSideExit *exit)
9962 guard(true,
9963 w.ltuq(v_ins, w.nameImmq(JSVAL_UPPER_EXCL_SHIFTED_TAG_OF_NUMBER_SET)),
9964 exit);
9965 LIns* args[] = { v_ins };
9966 return w.call(&js_UnboxDouble_ci, args);
9969 inline nanojit::LIns*
9970 TraceRecorder::unbox_non_double_object(LIns* v_ins, JSValueType type, VMSideExit* exit)
9972 JS_ASSERT(type <= JSVAL_UPPER_INCL_TYPE_OF_VALUE_SET);
9973 LIns *unboxed_ins;
9974 if (type == JSVAL_TYPE_UNDEFINED) {
9975 unboxed_ins = w.immiUndefined();
9976 } else if (type == JSVAL_TYPE_NULL) {
9977 unboxed_ins = w.immpNull();
9978 } else if (type >= JSVAL_LOWER_INCL_TYPE_OF_PTR_PAYLOAD_SET) {
9979 unboxed_ins = unpack_ptr(v_ins);
9980 } else {
9981 JS_ASSERT(type == JSVAL_TYPE_INT32 || type == JSVAL_TYPE_BOOLEAN || type == JSVAL_TYPE_MAGIC);
9982 unboxed_ins = w.q2i(v_ins);
9985 guard(true, non_double_object_value_has_type(v_ins, type), exit);
9986 return unboxed_ins;
9989 LIns*
9990 TraceRecorder::unbox_object(LIns* v_ins, JSValueType type, VMSideExit* exit)
9992 JS_STATIC_ASSERT(JSVAL_TYPE_OBJECT == JSVAL_UPPER_INCL_TYPE_OF_VALUE_SET);
9993 JS_ASSERT(type == JSVAL_TYPE_FUNOBJ || type == JSVAL_TYPE_NONFUNOBJ);
9994 guard(true,
9995 w.geuq(v_ins, w.nameImmq(JSVAL_SHIFTED_TAG_OBJECT)),
9996 exit);
9997 v_ins = unpack_ptr(v_ins);
9998 if (type == JSVAL_TYPE_FUNOBJ)
9999 guardClass(v_ins, &js_FunctionClass, exit, LOAD_NORMAL);
10000 else
10001 guardNotClass(v_ins, &js_FunctionClass, exit, LOAD_NORMAL);
10002 return v_ins;
10005 LIns*
10006 TraceRecorder::unbox_value(const Value &v, Address addr, VMSideExit *exit, bool force_double)
10008 LIns *v_ins = w.ldq(addr);
10010 if (v.isNumber() && force_double)
10011 return unbox_number_as_double(v_ins, exit);
10013 if (v.isInt32()) {
10014 guard(true, non_double_object_value_has_type(v_ins, JSVAL_TYPE_INT32), exit);
10015 return w.i2d(w.q2i(v_ins));
10018 if (v.isDouble()) {
10019 guard(true, w.leuq(v_ins, w.nameImmq(JSVAL_SHIFTED_TAG_MAX_DOUBLE)), exit);
10020 return w.qasd(v_ins);
10023 if (v.isObject()) {
10024 JSValueType type = v.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
10025 return unbox_object(v_ins, type, exit);
10028 JSValueType type = v.extractNonDoubleObjectTraceType();
10029 return unbox_non_double_object(v_ins, type, exit);
10032 void
10033 TraceRecorder::unbox_any_object(Address addr, LIns **obj_ins, LIns **is_obj_ins)
10035 JS_STATIC_ASSERT(JSVAL_TYPE_OBJECT == JSVAL_UPPER_INCL_TYPE_OF_VALUE_SET);
10036 LIns *v_ins = w.ldq(addr);
10037 *is_obj_ins = w.geuq(v_ins, w.nameImmq(JSVAL_TYPE_OBJECT));
10038 *obj_ins = unpack_ptr(v_ins);
10041 LIns*
10042 TraceRecorder::is_boxed_true(Address addr)
10044 LIns *v_ins = w.ldq(addr);
10045 return w.eqq(v_ins, w.immq(JSVAL_BITS(JSVAL_TRUE)));
10048 LIns*
10049 TraceRecorder::is_boxed_magic(Address addr, JSWhyMagic why)
10051 LIns *v_ins = w.ldq(addr);
10052 return w.eqq(v_ins, w.nameImmq(BUILD_JSVAL(JSVAL_TAG_MAGIC, why)));
10055 LIns*
10056 TraceRecorder::box_value_for_native_call(const Value &v, LIns *v_ins)
10058 if (v.isNumber()) {
10059 JS_ASSERT(v_ins->isD());
10060 if (fcallinfo(v_ins) == &js_UnboxDouble_ci)
10061 return v_ins->callArgN(0);
10062 if (IsPromotedInt32(v_ins)) {
10063 return w.orq(w.ui2uq(w.demoteToInt32(v_ins)),
10064 w.nameImmq(JSVAL_SHIFTED_TAG_INT32));
10066 return w.dasq(v_ins);
10069 if (v.isNull())
10070 return w.nameImmq(JSVAL_BITS(JSVAL_NULL));
10071 if (v.isUndefined())
10072 return w.nameImmq(JSVAL_BITS(JSVAL_VOID));
10074 JSValueTag tag = v.isObject() ? JSVAL_TAG_OBJECT : v.extractNonDoubleObjectTraceTag();
10075 uint64 shiftedTag = ((uint64)tag) << JSVAL_TAG_SHIFT;
10076 LIns *shiftedTag_ins = w.nameImmq(shiftedTag);
10078 if (v.hasPtrPayload())
10079 return w.orq(v_ins, shiftedTag_ins);
10080 return w.orq(w.ui2uq(v_ins), shiftedTag_ins);
10083 void
10084 TraceRecorder::box_value_into(const Value &v, LIns *v_ins, Address addr)
10086 LIns *boxed_ins = box_value_for_native_call(v, v_ins);
10087 w.st(boxed_ins, addr);
10090 #endif /* JS_BITS_PER_WORD */
10092 LIns*
10093 TraceRecorder::box_value_into_alloc(const Value &v, LIns *v_ins)
10095 LIns *alloc_ins = w.allocp(sizeof(Value));
10096 box_value_into(v, v_ins, AllocSlotsAddress(alloc_ins));
10097 return alloc_ins;
10100 LIns*
10101 TraceRecorder::is_string_id(LIns *id_ins)
10103 return w.eqp0(w.andp(id_ins, w.nameImmw(JSID_TYPE_MASK)));
10106 LIns *
10107 TraceRecorder::unbox_string_id(LIns *id_ins)
10109 JS_STATIC_ASSERT(JSID_TYPE_STRING == 0);
10110 return id_ins;
10113 LIns *
10114 TraceRecorder::unbox_int_id(LIns *id_ins)
10116 return w.rshiN(w.p2i(id_ins), 1);
10119 JS_REQUIRES_STACK RecordingStatus
10120 TraceRecorder::getThis(LIns*& this_ins)
10122 JSStackFrame *fp = cx->fp();
10124 if (fp->isGlobalFrame()) {
10125 // Top-level code. It is an invariant of the interpreter that fp->thisv
10126 // is non-null. Furthermore, we would not be recording if globalObj
10127 // were not at the end of the scope chain, so `this` can only be one
10128 // object, which we can burn into the trace.
10129 JS_ASSERT(!fp->thisValue().isPrimitive());
10131 #ifdef DEBUG
10132 JSObject *obj = globalObj->thisObject(cx);
10133 if (!obj)
10134 RETURN_ERROR("thisObject hook failed");
10135 JS_ASSERT(&fp->thisValue().toObject() == obj);
10136 #endif
10138 this_ins = w.immpObjGC(&fp->thisValue().toObject());
10139 return RECORD_CONTINUE;
10142 JS_ASSERT(fp->callee().getGlobal() == globalObj);
10143 Value& thisv = fp->thisValue();
10145 if (thisv.isObject() || fp->fun()->inStrictMode()) {
10147 * fp->thisValue() has already been computed. Since the
10148 * type-specialization of traces distinguishes between computed and
10149 * uncomputed |this|, the same will be true at run time (or we
10150 * won't get this far).
10152 this_ins = get(&fp->thisValue());
10153 return RECORD_CONTINUE;
10156 /* Don't bother tracing calls on wrapped primitive |this| values. */
10157 if (!thisv.isNullOrUndefined())
10158 RETURN_STOP("wrapping primitive |this|");
10161 * Compute 'this' now. The result is globalObj->thisObject(), which is
10162 * trace-constant. getThisObject writes back to fp->thisValue(), so do
10163 * the same on trace.
10165 if (!fp->computeThis(cx))
10166 RETURN_ERROR("computeThis failed");
10168 /* thisv is a reference, so it'll see the newly computed |this|. */
10169 this_ins = w.immpObjGC(globalObj);
10170 set(&thisv, this_ins);
10171 return RECORD_CONTINUE;
10174 JS_REQUIRES_STACK void
10175 TraceRecorder::guardClassHelper(bool cond, LIns* obj_ins, Class* clasp, VMSideExit* exit,
10176 LoadQual loadQual)
10178 LIns* class_ins = w.ldpObjClasp(obj_ins, loadQual);
10180 #ifdef JS_JIT_SPEW
10181 char namebuf[32];
10182 JS_snprintf(namebuf, sizeof namebuf, "%s_clasp", clasp->name);
10183 LIns* clasp_ins = w.name(w.immpNonGC(clasp), namebuf);
10184 JS_snprintf(namebuf, sizeof namebuf, "guard(class is %s)", clasp->name);
10185 LIns* cmp_ins = w.name(w.eqp(class_ins, clasp_ins), namebuf);
10186 #else
10187 LIns* clasp_ins = w.immpNonGC(clasp);
10188 LIns* cmp_ins = w.eqp(class_ins, clasp_ins);
10189 #endif
10190 guard(cond, cmp_ins, exit);
10193 JS_REQUIRES_STACK void
10194 TraceRecorder::guardClass(LIns* obj_ins, Class* clasp, VMSideExit* exit, LoadQual loadQual)
10196 guardClassHelper(true, obj_ins, clasp, exit, loadQual);
10199 JS_REQUIRES_STACK void
10200 TraceRecorder::guardNotClass(LIns* obj_ins, Class* clasp, VMSideExit* exit, LoadQual loadQual)
10202 guardClassHelper(false, obj_ins, clasp, exit, loadQual);
10205 JS_REQUIRES_STACK void
10206 TraceRecorder::guardDenseArray(LIns* obj_ins, ExitType exitType)
10208 guardClass(obj_ins, &js_ArrayClass, snapshot(exitType), LOAD_NORMAL);
10211 JS_REQUIRES_STACK void
10212 TraceRecorder::guardDenseArray(LIns* obj_ins, VMSideExit* exit)
10214 guardClass(obj_ins, &js_ArrayClass, exit, LOAD_NORMAL);
10217 JS_REQUIRES_STACK bool
10218 TraceRecorder::guardHasPrototype(JSObject* obj, LIns* obj_ins,
10219 JSObject** pobj, LIns** pobj_ins,
10220 VMSideExit* exit)
10222 *pobj = obj->getProto();
10223 *pobj_ins = w.ldpObjProto(obj_ins);
10225 bool cond = *pobj == NULL;
10226 guard(cond, w.name(w.eqp0(*pobj_ins), "guard(proto-not-null)"), exit);
10227 return !cond;
10230 JS_REQUIRES_STACK RecordingStatus
10231 TraceRecorder::guardPrototypeHasNoIndexedProperties(JSObject* obj, LIns* obj_ins, VMSideExit *exit)
10234 * Guard that no object along the prototype chain has any indexed
10235 * properties which might become visible through holes in the array.
10237 if (js_PrototypeHasIndexedProperties(cx, obj))
10238 return RECORD_STOP;
10240 JS_ASSERT(obj->isDenseArray());
10243 * Changing __proto__ on a dense array makes it slow, so we can just bake in
10244 * the current prototype as the first prototype to test. This avoids an
10245 * extra load when running the trace.
10247 obj = obj->getProto();
10248 JS_ASSERT(obj);
10250 obj_ins = w.immpObjGC(obj);
10253 * Changing __proto__ on a native object changes its shape, and adding
10254 * indexed properties changes shapes too. And non-native objects never pass
10255 * shape guards. So it's enough to just guard on shapes up the proto chain;
10256 * any change to the proto chain length will make us fail a guard before we
10257 * run off the end of the proto chain.
10259 do {
10260 CHECK_STATUS(guardShape(obj_ins, obj, obj->shape(), "guard(shape)", exit));
10261 obj = obj->getProto();
10262 obj_ins = w.ldpObjProto(obj_ins);
10263 } while (obj);
10265 return RECORD_CONTINUE;
10269 * Guard that the object stored in v has the ECMA standard [[DefaultValue]]
10270 * method. Several imacros require this.
10272 JS_REQUIRES_STACK RecordingStatus
10273 TraceRecorder::guardNativeConversion(Value& v)
10275 JSObject* obj = &v.toObject();
10276 LIns* obj_ins = get(&v);
10278 ConvertOp convert = obj->getClass()->convert;
10279 if (convert != Valueify(JS_ConvertStub) && convert != js_TryValueOf)
10280 RETURN_STOP("operand has convert hook");
10282 VMSideExit* exit = snapshot(BRANCH_EXIT);
10283 if (obj->isNative()) {
10284 // The common case. Guard on shape rather than class because it'll
10285 // often be free: we're about to do a shape guard anyway to get the
10286 // .valueOf property of this object, and this guard will be cached.
10287 CHECK_STATUS(guardShape(obj_ins, obj, obj->shape(),
10288 "guardNativeConversion", exit));
10289 } else {
10290 // We could specialize to guard on just JSClass.convert, but a mere
10291 // class guard is simpler and slightly faster.
10292 guardClass(obj_ins, obj->getClass(), snapshot(MISMATCH_EXIT), LOAD_NORMAL);
10294 return RECORD_CONTINUE;
10297 JS_REQUIRES_STACK void
10298 TraceRecorder::clearReturningFrameFromNativeTracker()
10301 * Clear all tracker entries associated with the frame for the same reason
10302 * described in record_EnterFrame. Reuse the generic visitor to avoid
10303 * duplicating logic. The generic visitor stops at 'sp', whereas we need to
10304 * clear up to script->nslots, so finish the job manually.
10306 ClearSlotsVisitor visitor(nativeFrameTracker);
10307 VisitStackSlots(visitor, cx, 0);
10308 Value *vp = cx->regs->sp;
10309 Value *vpend = cx->fp()->slots() + cx->fp()->script()->nslots;
10310 for (; vp < vpend; ++vp)
10311 nativeFrameTracker.set(vp, NULL);
10314 class BoxArg
10316 public:
10317 BoxArg(TraceRecorder *tr, Address addr)
10318 : tr(tr), addr(addr) {}
10319 TraceRecorder *tr;
10320 Address addr;
10321 void operator()(uintN argi, Value *src) {
10322 tr->box_value_into(*src, tr->get(src), OffsetAddress(addr, argi * sizeof(Value)));
10327 * If we have created an |arguments| object for the frame, we must copy the
10328 * argument values into the object as properties in case it is used after
10329 * this frame returns.
10331 JS_REQUIRES_STACK void
10332 TraceRecorder::putActivationObjects()
10334 JSStackFrame *const fp = cx->fp();
10335 bool have_args = fp->hasArgsObj() && !fp->argsObj().isStrictArguments();
10336 bool have_call = fp->isFunctionFrame() && fp->fun()->isHeavyweight();
10338 if (!have_args && !have_call)
10339 return;
10341 uintN nformal = fp->numFormalArgs();
10342 uintN nactual = fp->numActualArgs();
10343 uintN nargs = have_args && have_call ? Max(nformal, nactual)
10344 : have_args ? nactual : nformal;
10346 LIns *args_ins;
10347 if (nargs > 0) {
10348 args_ins = w.allocp(nargs * sizeof(Value));
10349 /* Don't copy all the actuals if we are only boxing for the callobj. */
10350 Address addr = AllocSlotsAddress(args_ins);
10351 if (nargs == nactual)
10352 fp->forEachCanonicalActualArg(BoxArg(this, addr));
10353 else
10354 fp->forEachFormalArg(BoxArg(this, addr));
10355 } else {
10356 args_ins = w.immpNonGC(0);
10359 if (have_args) {
10360 LIns* argsobj_ins = getFrameObjPtr(fp->addressOfArgs());
10361 LIns* args[] = { args_ins, argsobj_ins, cx_ins };
10362 w.call(&js_PutArgumentsOnTrace_ci, args);
10365 if (have_call) {
10366 int nslots = fp->fun()->script()->bindings.countVars();
10367 LIns* slots_ins;
10368 if (nslots) {
10369 slots_ins = w.allocp(sizeof(Value) * nslots);
10370 for (int i = 0; i < nslots; ++i) {
10371 box_value_into(fp->slots()[i], get(&fp->slots()[i]),
10372 AllocSlotsAddress(slots_ins, i));
10374 } else {
10375 slots_ins = w.immpNonGC(0);
10378 LIns* scopeChain_ins = getFrameObjPtr(fp->addressOfScopeChain());
10379 LIns* args[] = { slots_ins, w.nameImmi(nslots), args_ins,
10380 w.nameImmi(fp->numFormalArgs()), scopeChain_ins, cx_ins };
10381 w.call(&js_PutCallObjectOnTrace_ci, args);
10385 JS_REQUIRES_STACK AbortableRecordingStatus
10386 TraceRecorder::record_EnterFrame()
10388 JSStackFrame* const fp = cx->fp();
10390 if (++callDepth >= MAX_CALLDEPTH)
10391 RETURN_STOP_A("exceeded maximum call depth");
10393 debug_only_stmt(JSAutoByteString funBytes);
10394 debug_only_printf(LC_TMTracer, "EnterFrame %s, callDepth=%d\n",
10395 cx->fp()->fun()->atom ?
10396 js_AtomToPrintableString(cx, cx->fp()->fun()->atom, &funBytes) :
10397 "<anonymous>",
10398 callDepth);
10399 debug_only_stmt(
10400 if (LogController.lcbits & LC_TMRecorder) {
10401 js_Disassemble(cx, cx->fp()->script(), JS_TRUE, stdout);
10402 debug_only_print0(LC_TMTracer, "----\n");
10405 LIns* void_ins = w.immiUndefined();
10407 // Before we enter this frame, we need to clear out any dangling insns left
10408 // in the tracer. While we also clear when returning from a function, it is
10409 // possible to have the following sequence of stack usage:
10411 // [fp1]***************** push
10412 // [fp1]***** pop
10413 // [fp1]*****[fp2] call
10414 // [fp1]*****[fp2]*** push
10416 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
10418 // args: carefully copy stack layout
10419 uintN nactual = fp->numActualArgs();
10420 uintN nformal = fp->numFormalArgs();
10421 if (nactual < nformal) {
10422 // Fill in missing with void.
10423 JS_ASSERT(fp->actualArgs() == fp->formalArgs());
10424 Value *beg = fp->formalArgs() + nactual;
10425 Value *end = fp->formalArgsEnd();
10426 for (Value *vp = beg; vp != end; ++vp) {
10427 nativeFrameTracker.set(vp, NULL);
10428 set(vp, void_ins);
10430 } else if (nactual > nformal) {
10431 // Although the VM clones the formal args to the top of the stack, due
10432 // to the fact that we only track the canonical arguments (in argument
10433 // order), the native stack offset of the arguments doesn't change. The
10434 // only thing that changes is which js::Value* in the tracker maps to
10435 // that slot. Thus, all we need to do here is fixup the trackers, not
10436 // emit any actual copying on trace.
10437 JS_ASSERT(fp->actualArgs() != fp->formalArgs());
10438 JS_ASSERT(fp->hasOverflowArgs());
10439 Value *srcbeg = fp->actualArgs() - 2;
10440 Value *srcend = fp->actualArgs() + nformal;
10441 Value *dstbeg = fp->formalArgs() - 2;
10442 for (Value *src = srcbeg, *dst = dstbeg; src != srcend; ++src, ++dst) {
10443 nativeFrameTracker.set(dst, NULL);
10444 tracker.set(dst, tracker.get(src));
10445 nativeFrameTracker.set(src, NULL);
10446 tracker.set(src, NULL);
10450 // argsObj: clear and set to null
10451 nativeFrameTracker.set(fp->addressOfArgs(), NULL);
10452 setFrameObjPtr(fp->addressOfArgs(), w.immpNull());
10454 // scopeChain: clear, initialize before snapshot, set below
10455 nativeFrameTracker.set(fp->addressOfScopeChain(), NULL);
10456 setFrameObjPtr(fp->addressOfScopeChain(), w.immpNull());
10458 // nfixed: clear and set to undefined
10459 Value *vp = fp->slots();
10460 Value *vpstop = vp + fp->numFixed();
10461 for (; vp < vpstop; ++vp) {
10462 nativeFrameTracker.set(vp, NULL);
10463 set(vp, void_ins);
10466 // nfixed to nslots: clear
10467 vp = fp->base();
10468 vpstop = fp->slots() + fp->numSlots();
10469 for (; vp < vpstop; ++vp)
10470 nativeFrameTracker.set(vp, NULL);
10472 LIns* callee_ins = get(&cx->fp()->calleeValue());
10473 LIns* scopeChain_ins = w.ldpObjParent(callee_ins);
10475 // set scopeChain for real
10476 if (cx->fp()->fun()->isHeavyweight()) {
10477 if (js_IsNamedLambda(cx->fp()->fun()))
10478 RETURN_STOP_A("can't call named lambda heavyweight on trace");
10480 LIns* fun_ins = w.nameImmpNonGC(cx->fp()->fun());
10482 LIns* args[] = { scopeChain_ins, callee_ins, fun_ins, cx_ins };
10483 LIns* call_ins = w.call(&js_CreateCallObjectOnTrace_ci, args);
10484 guard(false, w.eqp0(call_ins), OOM_EXIT);
10486 setFrameObjPtr(fp->addressOfScopeChain(), call_ins);
10487 } else {
10488 setFrameObjPtr(fp->addressOfScopeChain(), scopeChain_ins);
10491 /* Try inlining one level in case this recursion doesn't go too deep. */
10492 if (fp->script() == fp->prev()->script() &&
10493 fp->prev()->prev() && fp->prev()->prev()->script() == fp->script()) {
10494 RETURN_STOP_A("recursion started inlining");
10497 return ARECORD_CONTINUE;
10500 JS_REQUIRES_STACK AbortableRecordingStatus
10501 TraceRecorder::record_LeaveFrame()
10503 debug_only_stmt(JSStackFrame *fp = cx->fp();)
10505 JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, fp->script(),
10506 cx->regs->pc)].length == JSOP_CALL_LENGTH);
10508 if (callDepth-- <= 0)
10509 RETURN_STOP_A("returned out of a loop we started tracing");
10511 // LeaveFrame gets called after the interpreter popped the frame and
10512 // stored rval, so cx->fp() not cx->fp()->prev, and -1 not 0.
10513 updateAtoms();
10514 set(&stackval(-1), rval_ins);
10515 return ARECORD_CONTINUE;
10518 JS_REQUIRES_STACK AbortableRecordingStatus
10519 TraceRecorder::record_JSOP_PUSH()
10521 stack(0, w.immiUndefined());
10522 return ARECORD_CONTINUE;
10525 JS_REQUIRES_STACK AbortableRecordingStatus
10526 TraceRecorder::record_JSOP_POPV()
10528 Value& rval = stackval(-1);
10530 // Store it in cx->fp()->rval. NB: Tricky dependencies. cx->fp() is the right
10531 // frame because POPV appears only in global and eval code and we don't
10532 // trace JSOP_EVAL or leaving the frame where tracing started.
10533 LIns *fp_ins = entryFrameIns();
10534 box_value_into(rval, get(&rval), StackFrameAddress(fp_ins,
10535 JSStackFrame::offsetOfReturnValue()));
10536 return ARECORD_CONTINUE;
10539 JS_REQUIRES_STACK AbortableRecordingStatus
10540 TraceRecorder::record_JSOP_ENTERWITH()
10542 return ARECORD_STOP;
10545 JS_REQUIRES_STACK AbortableRecordingStatus
10546 TraceRecorder::record_JSOP_LEAVEWITH()
10548 return ARECORD_STOP;
10551 static JSBool JS_FASTCALL
10552 functionProbe(JSContext *cx, JSFunction *fun, int enter)
10554 #ifdef MOZ_TRACE_JSCALLS
10555 JSScript *script = fun ? FUN_SCRIPT(fun) : NULL;
10556 if (enter > 0)
10557 Probes::enterJSFun(cx, fun, script, enter);
10558 else
10559 Probes::exitJSFun(cx, fun, script, enter);
10560 #endif
10561 return true;
10564 JS_DEFINE_CALLINFO_3(static, BOOL, functionProbe, CONTEXT, FUNCTION, INT32, 0, ACCSET_ALL)
10566 JS_REQUIRES_STACK AbortableRecordingStatus
10567 TraceRecorder::record_JSOP_RETURN()
10569 /* A return from callDepth 0 terminates the current loop, except for recursion. */
10570 if (callDepth == 0) {
10571 AUDIT(returnLoopExits);
10572 return endLoop();
10575 putActivationObjects();
10577 if (Probes::callTrackingActive(cx)) {
10578 LIns* args[] = { w.immi(0), w.nameImmpNonGC(cx->fp()->fun()), cx_ins };
10579 LIns* call_ins = w.call(&functionProbe_ci, args);
10580 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
10583 /* If we inlined this function call, make the return value available to the caller code. */
10584 Value& rval = stackval(-1);
10585 JSStackFrame *fp = cx->fp();
10586 if (fp->isConstructing() && rval.isPrimitive()) {
10587 rval_ins = get(&fp->thisValue());
10588 } else {
10589 rval_ins = get(&rval);
10591 debug_only_stmt(JSAutoByteString funBytes);
10592 debug_only_printf(LC_TMTracer,
10593 "returning from %s\n",
10594 fp->fun()->atom ?
10595 js_AtomToPrintableString(cx, fp->fun()->atom, &funBytes) :
10596 "<anonymous>");
10597 clearReturningFrameFromNativeTracker();
10599 return ARECORD_CONTINUE;
10602 JS_REQUIRES_STACK AbortableRecordingStatus
10603 TraceRecorder::record_JSOP_GOTO()
10606 * If we hit a break or a continue to an outer loop, end the loop and
10607 * generate an always-taken loop exit guard. For other downward gotos
10608 * (like if/else) continue recording.
10610 jssrcnote* sn = js_GetSrcNote(cx->fp()->script(), cx->regs->pc);
10612 if (sn) {
10613 if (SN_TYPE(sn) == SRC_BREAK) {
10614 AUDIT(breakLoopExits);
10615 return endLoop();
10619 * Tracing labeled break isn't impossible, but does require potentially
10620 * fixing up the block chain. See bug 616119.
10622 if (SN_TYPE(sn) == SRC_BREAK2LABEL || SN_TYPE(sn) == SRC_CONT2LABEL)
10623 RETURN_STOP_A("labeled break");
10625 return ARECORD_CONTINUE;
10628 JS_REQUIRES_STACK AbortableRecordingStatus
10629 TraceRecorder::record_JSOP_IFEQ()
10631 trackCfgMerges(cx->regs->pc);
10632 return ifop();
10635 JS_REQUIRES_STACK AbortableRecordingStatus
10636 TraceRecorder::record_JSOP_IFNE()
10638 return ifop();
10641 LIns*
10642 TraceRecorder::newArguments(LIns* callee_ins, bool strict)
10644 LIns* global_ins = w.immpObjGC(globalObj);
10645 LIns* argc_ins = w.nameImmi(cx->fp()->numActualArgs());
10647 LIns* args[] = { callee_ins, argc_ins, global_ins, cx_ins };
10648 LIns* argsobj_ins = w.call(&js_NewArgumentsOnTrace_ci, args);
10649 guard(false, w.eqp0(argsobj_ins), OOM_EXIT);
10651 if (strict) {
10652 LIns* argsData_ins = w.getObjPrivatizedSlot(argsobj_ins, JSObject::JSSLOT_ARGS_DATA);
10653 ptrdiff_t slotsOffset = offsetof(ArgumentsData, slots);
10654 cx->fp()->forEachCanonicalActualArg(BoxArg(this, ArgsSlotOffsetAddress(argsData_ins,
10655 slotsOffset)));
10658 return argsobj_ins;
10661 JS_REQUIRES_STACK AbortableRecordingStatus
10662 TraceRecorder::record_JSOP_ARGUMENTS()
10664 JSStackFrame* const fp = cx->fp();
10666 /* In an eval, 'arguments' will be a BINDNAME, which we don't trace. */
10667 JS_ASSERT(!fp->isEvalFrame());
10669 if (fp->hasOverriddenArgs())
10670 RETURN_STOP_A("Can't trace |arguments| if |arguments| is assigned to");
10672 LIns* a_ins = getFrameObjPtr(fp->addressOfArgs());
10673 LIns* args_ins;
10674 LIns* callee_ins = get(&fp->calleeValue());
10675 bool strict = fp->fun()->inStrictMode();
10676 if (a_ins->isImmP()) {
10677 // |arguments| is set to 0 by EnterFrame on this trace, so call to create it.
10678 args_ins = newArguments(callee_ins, strict);
10679 } else {
10680 // Generate LIR to create arguments only if it has not already been created.
10682 LIns* mem_ins = w.allocp(sizeof(JSObject *));
10684 LIns* isZero_ins = w.eqp0(a_ins);
10685 if (isZero_ins->isImmI(0)) {
10686 w.stAlloc(a_ins, mem_ins);
10687 } else if (isZero_ins->isImmI(1)) {
10688 LIns* call_ins = newArguments(callee_ins, strict);
10689 w.stAlloc(call_ins, mem_ins);
10690 } else {
10691 LIns* br1 = w.jtUnoptimizable(isZero_ins);
10692 w.stAlloc(a_ins, mem_ins);
10693 LIns* br2 = w.j(NULL);
10694 w.label(br1);
10696 LIns* call_ins = newArguments(callee_ins, strict);
10697 w.stAlloc(call_ins, mem_ins);
10698 w.label(br2);
10700 args_ins = w.ldpAlloc(mem_ins);
10703 stack(0, args_ins);
10704 setFrameObjPtr(fp->addressOfArgs(), args_ins);
10705 return ARECORD_CONTINUE;
10708 JS_REQUIRES_STACK AbortableRecordingStatus
10709 TraceRecorder::record_JSOP_DUP()
10711 stack(0, get(&stackval(-1)));
10712 return ARECORD_CONTINUE;
10715 JS_REQUIRES_STACK AbortableRecordingStatus
10716 TraceRecorder::record_JSOP_DUP2()
10718 stack(0, get(&stackval(-2)));
10719 stack(1, get(&stackval(-1)));
10720 return ARECORD_CONTINUE;
10723 JS_REQUIRES_STACK AbortableRecordingStatus
10724 TraceRecorder::record_JSOP_SWAP()
10726 Value& l = stackval(-2);
10727 Value& r = stackval(-1);
10728 LIns* l_ins = get(&l);
10729 LIns* r_ins = get(&r);
10730 set(&r, l_ins);
10731 set(&l, r_ins);
10732 return ARECORD_CONTINUE;
10735 JS_REQUIRES_STACK AbortableRecordingStatus
10736 TraceRecorder::record_JSOP_PICK()
10738 Value* sp = cx->regs->sp;
10739 jsint n = cx->regs->pc[1];
10740 JS_ASSERT(sp - (n+1) >= cx->fp()->base());
10741 LIns* top = get(sp - (n+1));
10742 for (jsint i = 0; i < n; ++i)
10743 set(sp - (n+1) + i, get(sp - n + i));
10744 set(&sp[-1], top);
10745 return ARECORD_CONTINUE;
10748 JS_REQUIRES_STACK AbortableRecordingStatus
10749 TraceRecorder::record_JSOP_SETCONST()
10751 return ARECORD_STOP;
10754 JS_REQUIRES_STACK AbortableRecordingStatus
10755 TraceRecorder::record_JSOP_BITOR()
10757 return InjectStatus(binary(LIR_ori));
10760 JS_REQUIRES_STACK AbortableRecordingStatus
10761 TraceRecorder::record_JSOP_BITXOR()
10763 return InjectStatus(binary(LIR_xori));
10766 JS_REQUIRES_STACK AbortableRecordingStatus
10767 TraceRecorder::record_JSOP_BITAND()
10769 return InjectStatus(binary(LIR_andi));
10772 JS_REQUIRES_STACK AbortableRecordingStatus
10773 TraceRecorder::record_JSOP_EQ()
10775 return equality(false, true);
10778 JS_REQUIRES_STACK AbortableRecordingStatus
10779 TraceRecorder::record_JSOP_NE()
10781 return equality(true, true);
10784 JS_REQUIRES_STACK AbortableRecordingStatus
10785 TraceRecorder::record_JSOP_LT()
10787 return relational(LIR_ltd, true);
10790 JS_REQUIRES_STACK AbortableRecordingStatus
10791 TraceRecorder::record_JSOP_LE()
10793 return relational(LIR_led, true);
10796 JS_REQUIRES_STACK AbortableRecordingStatus
10797 TraceRecorder::record_JSOP_GT()
10799 return relational(LIR_gtd, true);
10802 JS_REQUIRES_STACK AbortableRecordingStatus
10803 TraceRecorder::record_JSOP_GE()
10805 return relational(LIR_ged, true);
10808 JS_REQUIRES_STACK AbortableRecordingStatus
10809 TraceRecorder::record_JSOP_LSH()
10811 return InjectStatus(binary(LIR_lshi));
10814 JS_REQUIRES_STACK AbortableRecordingStatus
10815 TraceRecorder::record_JSOP_RSH()
10817 return InjectStatus(binary(LIR_rshi));
10820 JS_REQUIRES_STACK AbortableRecordingStatus
10821 TraceRecorder::record_JSOP_URSH()
10823 return InjectStatus(binary(LIR_rshui));
10826 JS_REQUIRES_STACK AbortableRecordingStatus
10827 TraceRecorder::record_JSOP_ADD()
10829 Value& r = stackval(-1);
10830 Value& l = stackval(-2);
10832 if (!l.isPrimitive()) {
10833 CHECK_STATUS_A(guardNativeConversion(l));
10834 if (!r.isPrimitive()) {
10835 CHECK_STATUS_A(guardNativeConversion(r));
10836 return InjectStatus(callImacro(add_imacros.obj_obj));
10838 return InjectStatus(callImacro(add_imacros.obj_any));
10840 if (!r.isPrimitive()) {
10841 CHECK_STATUS_A(guardNativeConversion(r));
10842 return InjectStatus(callImacro(add_imacros.any_obj));
10845 if (l.isString() || r.isString()) {
10846 LIns* args[] = { stringify(r), stringify(l), cx_ins };
10847 LIns* concat = w.call(&js_ConcatStrings_ci, args);
10848 guard(false, w.eqp0(concat), OOM_EXIT);
10849 set(&l, concat);
10850 return ARECORD_CONTINUE;
10853 return InjectStatus(binary(LIR_addd));
10856 JS_REQUIRES_STACK AbortableRecordingStatus
10857 TraceRecorder::record_JSOP_SUB()
10859 return InjectStatus(binary(LIR_subd));
10862 JS_REQUIRES_STACK AbortableRecordingStatus
10863 TraceRecorder::record_JSOP_MUL()
10865 return InjectStatus(binary(LIR_muld));
10868 JS_REQUIRES_STACK AbortableRecordingStatus
10869 TraceRecorder::record_JSOP_DIV()
10871 return InjectStatus(binary(LIR_divd));
10874 JS_REQUIRES_STACK AbortableRecordingStatus
10875 TraceRecorder::record_JSOP_MOD()
10877 return InjectStatus(binary(LIR_modd));
10880 JS_REQUIRES_STACK AbortableRecordingStatus
10881 TraceRecorder::record_JSOP_NOT()
10883 Value& v = stackval(-1);
10884 if (v.isBoolean() || v.isUndefined()) {
10885 set(&v, w.eqi0(w.eqiN(get(&v), 1)));
10886 return ARECORD_CONTINUE;
10888 if (v.isNumber()) {
10889 LIns* v_ins = get(&v);
10890 set(&v, w.ori(w.eqd0(v_ins), w.eqi0(w.eqd(v_ins, v_ins))));
10891 return ARECORD_CONTINUE;
10893 if (v.isObjectOrNull()) {
10894 set(&v, w.eqp0(get(&v)));
10895 return ARECORD_CONTINUE;
10897 JS_ASSERT(v.isString());
10898 set(&v, w.eqp0(w.getStringLength(get(&v))));
10899 return ARECORD_CONTINUE;
10902 JS_REQUIRES_STACK AbortableRecordingStatus
10903 TraceRecorder::record_JSOP_BITNOT()
10905 return InjectStatus(unary(LIR_noti));
10908 JS_REQUIRES_STACK AbortableRecordingStatus
10909 TraceRecorder::record_JSOP_NEG()
10911 Value& v = stackval(-1);
10913 if (!v.isPrimitive()) {
10914 CHECK_STATUS_A(guardNativeConversion(v));
10915 return InjectStatus(callImacro(unary_imacros.sign));
10918 if (v.isNumber()) {
10919 LIns* a = get(&v);
10922 * If we're a promoted integer, we have to watch out for 0s since -0 is
10923 * a double. Only follow this path if we're not an integer that's 0 and
10924 * we're not a double that's zero.
10926 if (oracle &&
10927 !oracle->isInstructionUndemotable(cx->regs->pc) &&
10928 IsPromotedInt32(a) &&
10929 (!v.isInt32() || v.toInt32() != 0) &&
10930 (!v.isDouble() || v.toDouble() != 0) &&
10931 -v.toNumber() == (int)-v.toNumber())
10933 VMSideExit* exit = snapshot(OVERFLOW_EXIT);
10934 a = guard_xov(LIR_subi, w.immi(0), w.demoteToInt32(a), exit);
10935 if (!a->isImmI() && a->isop(LIR_subxovi)) {
10936 guard(false, w.eqiN(a, 0), exit); // make sure we don't lose a -0
10938 a = w.i2d(a);
10939 } else {
10940 a = w.negd(a);
10943 set(&v, a);
10944 return ARECORD_CONTINUE;
10947 if (v.isNull()) {
10948 set(&v, w.immd(-0.0));
10949 return ARECORD_CONTINUE;
10952 if (v.isUndefined()) {
10953 set(&v, w.immd(js_NaN));
10954 return ARECORD_CONTINUE;
10957 if (v.isString()) {
10958 LIns* ok_ins = w.allocp(sizeof(JSBool));
10959 LIns* args[] = { ok_ins, get(&v), cx_ins };
10960 LIns* num_ins = w.call(&js_StringToNumber_ci, args);
10961 guard(false,
10962 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
10963 OOM_EXIT);
10964 set(&v, w.negd(num_ins));
10965 return ARECORD_CONTINUE;
10968 JS_ASSERT(v.isBoolean());
10969 set(&v, w.negd(w.i2d(get(&v))));
10970 return ARECORD_CONTINUE;
10973 JS_REQUIRES_STACK AbortableRecordingStatus
10974 TraceRecorder::record_JSOP_POS()
10976 Value& v = stackval(-1);
10978 if (!v.isPrimitive()) {
10979 CHECK_STATUS_A(guardNativeConversion(v));
10980 return InjectStatus(callImacro(unary_imacros.sign));
10983 if (v.isNumber())
10984 return ARECORD_CONTINUE;
10986 if (v.isNull()) {
10987 set(&v, w.immd(0));
10988 return ARECORD_CONTINUE;
10990 if (v.isUndefined()) {
10991 set(&v, w.immd(js_NaN));
10992 return ARECORD_CONTINUE;
10995 if (v.isString()) {
10996 LIns* ok_ins = w.allocp(sizeof(JSBool));
10997 LIns* args[] = { ok_ins, get(&v), cx_ins };
10998 LIns* num_ins = w.call(&js_StringToNumber_ci, args);
10999 guard(false,
11000 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
11001 OOM_EXIT);
11002 set(&v, num_ins);
11003 return ARECORD_CONTINUE;
11006 JS_ASSERT(v.isBoolean());
11007 set(&v, w.i2d(get(&v)));
11008 return ARECORD_CONTINUE;
11011 JS_REQUIRES_STACK AbortableRecordingStatus
11012 TraceRecorder::record_JSOP_PRIMTOP()
11014 // Either this opcode does nothing or we couldn't have traced here, because
11015 // we'd have thrown an exception -- so do nothing if we actually hit this.
11016 return ARECORD_CONTINUE;
11019 JS_REQUIRES_STACK AbortableRecordingStatus
11020 TraceRecorder::record_JSOP_OBJTOP()
11022 Value& v = stackval(-1);
11023 RETURN_IF_XML_A(v);
11024 return ARECORD_CONTINUE;
11027 RecordingStatus
11028 TraceRecorder::getClassPrototype(JSObject* ctor, LIns*& proto_ins)
11030 // ctor must be a function created via js_InitClass.
11031 #ifdef DEBUG
11032 Class *clasp = FUN_CLASP(GET_FUNCTION_PRIVATE(cx, ctor));
11033 JS_ASSERT(clasp);
11035 TraceMonitor &localtm = *traceMonitor;
11036 #endif
11038 Value pval;
11039 if (!ctor->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom), &pval))
11040 RETURN_ERROR("error getting prototype from constructor");
11042 // ctor.prototype is a permanent data property, so this lookup cannot have
11043 // deep-aborted.
11044 JS_ASSERT(localtm.recorder);
11046 #ifdef DEBUG
11047 JSBool ok, found;
11048 uintN attrs;
11049 ok = JS_GetPropertyAttributes(cx, ctor, js_class_prototype_str, &attrs, &found);
11050 JS_ASSERT(ok);
11051 JS_ASSERT(found);
11052 JS_ASSERT((~attrs & (JSPROP_READONLY | JSPROP_PERMANENT)) == 0);
11053 #endif
11055 // Since ctor was built by js_InitClass, we can assert (rather than check)
11056 // that pval is usable.
11057 JS_ASSERT(!pval.isPrimitive());
11058 JSObject *proto = &pval.toObject();
11059 JS_ASSERT_IF(clasp != &js_ArrayClass, proto->emptyShapes[0]->getClass() == clasp);
11061 proto_ins = w.immpObjGC(proto);
11062 return RECORD_CONTINUE;
11065 RecordingStatus
11066 TraceRecorder::getClassPrototype(JSProtoKey key, LIns*& proto_ins)
11068 #ifdef DEBUG
11069 TraceMonitor &localtm = *traceMonitor;
11070 #endif
11072 JSObject* proto;
11073 if (!js_GetClassPrototype(cx, globalObj, key, &proto))
11074 RETURN_ERROR("error in js_GetClassPrototype");
11076 // This should not have reentered.
11077 JS_ASSERT(localtm.recorder);
11079 #ifdef DEBUG
11080 /* Double-check that a native proto has a matching emptyShape. */
11081 if (key != JSProto_Array) {
11082 JS_ASSERT(proto->isNative());
11083 JS_ASSERT(proto->emptyShapes);
11084 EmptyShape *empty = proto->emptyShapes[0];
11085 JS_ASSERT(empty);
11086 JS_ASSERT(JSCLASS_CACHED_PROTO_KEY(empty->getClass()) == key);
11088 #endif
11090 proto_ins = w.immpObjGC(proto);
11091 return RECORD_CONTINUE;
11094 #define IGNORE_NATIVE_CALL_COMPLETE_CALLBACK ((JSSpecializedNative*)1)
11096 RecordingStatus
11097 TraceRecorder::newString(JSObject* ctor, uint32 argc, Value* argv, Value* rval)
11099 JS_ASSERT(argc == 1);
11101 if (!argv[0].isPrimitive()) {
11102 CHECK_STATUS(guardNativeConversion(argv[0]));
11103 return callImacro(new_imacros.String);
11106 LIns* proto_ins;
11107 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
11109 LIns* args[] = { stringify(argv[0]), proto_ins, cx_ins };
11110 LIns* obj_ins = w.call(&js_String_tn_ci, args);
11111 guard(false, w.eqp0(obj_ins), OOM_EXIT);
11113 set(rval, obj_ins);
11114 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11115 return RECORD_CONTINUE;
11118 RecordingStatus
11119 TraceRecorder::newArray(JSObject* ctor, uint32 argc, Value* argv, Value* rval)
11121 LIns *proto_ins;
11122 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
11124 LIns *arr_ins;
11125 if (argc == 0) {
11126 LIns *args[] = { proto_ins, cx_ins };
11127 arr_ins = w.call(&js::NewDenseEmptyArray_ci, args);
11128 guard(false, w.eqp0(arr_ins), OOM_EXIT);
11130 } else if (argc == 1 && argv[0].isNumber()) {
11131 /* Abort on RangeError if the double doesn't fit in a uint. */
11132 LIns *len_ins;
11133 CHECK_STATUS(makeNumberUint32(get(argv), &len_ins));
11134 LIns *args[] = { proto_ins, len_ins, cx_ins };
11135 arr_ins = w.call(&js::NewDenseUnallocatedArray_ci, args);
11136 guard(false, w.eqp0(arr_ins), OOM_EXIT);
11138 } else {
11139 LIns *args[] = { proto_ins, w.nameImmi(argc), cx_ins };
11140 arr_ins = w.call(&js::NewDenseAllocatedArray_ci, args);
11141 guard(false, w.eqp0(arr_ins), OOM_EXIT);
11143 // arr->slots[i] = box_jsval(vp[i]); for i in 0..argc
11144 LIns *slots_ins = NULL;
11145 for (uint32 i = 0; i < argc && !outOfMemory(); i++) {
11146 stobj_set_dslot(arr_ins, i, slots_ins, argv[i], get(&argv[i]));
11150 set(rval, arr_ins);
11151 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11152 return RECORD_CONTINUE;
11155 JS_REQUIRES_STACK void
11156 TraceRecorder::propagateFailureToBuiltinStatus(LIns* ok_ins, LIns*& status_ins)
11159 * Check the boolean return value (ok_ins) of a native JSNative,
11160 * JSFastNative, or JSPropertyOp hook for failure. On failure, set the
11161 * BUILTIN_ERROR bit of cx->builtinStatus.
11163 * If the return value (ok_ins) is true, status' == status. Otherwise
11164 * status' = status | BUILTIN_ERROR. We calculate (rval&1)^1, which is 1
11165 * if rval is JS_FALSE (error), and then shift that by 1, which is the log2
11166 * of BUILTIN_ERROR.
11168 JS_STATIC_ASSERT(((JS_TRUE & 1) ^ 1) << 1 == 0);
11169 JS_STATIC_ASSERT(((JS_FALSE & 1) ^ 1) << 1 == BUILTIN_ERROR);
11170 status_ins = w.ori(status_ins, w.lshiN(w.xoriN(w.andiN(ok_ins, 1), 1), 1));
11171 w.stStateField(status_ins, builtinStatus);
11174 JS_REQUIRES_STACK void
11175 TraceRecorder::emitNativePropertyOp(const Shape* shape, LIns* obj_ins,
11176 bool setflag, LIns* addr_boxed_val_ins)
11178 JS_ASSERT(addr_boxed_val_ins->isop(LIR_allocp));
11179 JS_ASSERT(setflag ? !shape->hasSetterValue() : !shape->hasGetterValue());
11180 JS_ASSERT(setflag ? !shape->hasDefaultSetter() : !shape->hasDefaultGetterOrIsMethod());
11182 enterDeepBailCall();
11184 w.stStateField(addr_boxed_val_ins, nativeVp);
11185 w.stStateField(w.immi(1), nativeVpLen);
11187 CallInfo* ci = new (traceAlloc()) CallInfo();
11188 /* Setters and getters have their initial arguments in common. */
11189 LIns* possibleArgs[] = { NULL, NULL, w.immpIdGC(SHAPE_USERID(shape)), obj_ins, cx_ins };
11190 LIns** args;
11191 if (setflag) {
11192 ci->_address = uintptr_t(shape->setterOp());
11193 ci->_typesig = CallInfo::typeSig5(ARGTYPE_I, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P, ARGTYPE_B,
11194 ARGTYPE_P);
11195 possibleArgs[0] = addr_boxed_val_ins;
11196 possibleArgs[1] = strictModeCode_ins;
11197 args = possibleArgs;
11198 } else {
11199 ci->_address = uintptr_t(shape->getterOp());
11200 ci->_typesig = CallInfo::typeSig4(ARGTYPE_I, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P);
11201 possibleArgs[1] = addr_boxed_val_ins;
11202 args = possibleArgs + 1;
11204 ci->_isPure = 0;
11205 ci->_storeAccSet = ACCSET_STORE_ANY;
11206 ci->_abi = ABI_CDECL;
11207 #ifdef DEBUG
11208 ci->_name = "JSPropertyOp";
11209 #endif
11210 LIns* ok_ins = w.call(ci, args);
11212 // Cleanup. Immediately clear nativeVp before we might deep bail.
11213 w.stStateField(w.immpNull(), nativeVp);
11214 leaveDeepBailCall();
11216 // Guard that the call succeeded and builtinStatus is still 0.
11217 // If the native op succeeds but we deep-bail here, the result value is
11218 // lost! Therefore this can only be used for setters of shared properties.
11219 // In that case we ignore the result value anyway.
11220 LIns* status_ins = w.ldiStateField(builtinStatus);
11221 propagateFailureToBuiltinStatus(ok_ins, status_ins);
11222 guard(true, w.eqi0(status_ins), STATUS_EXIT);
11225 JS_REQUIRES_STACK RecordingStatus
11226 TraceRecorder::emitNativeCall(JSSpecializedNative* sn, uintN argc, LIns* args[], bool rooted)
11228 if (JSTN_ERRTYPE(sn) == FAIL_STATUS) {
11229 // This needs to capture the pre-call state of the stack. So do not set
11230 // pendingSpecializedNative before taking this snapshot.
11231 JS_ASSERT(!pendingSpecializedNative);
11233 // Take snapshot for DeepBail and store it in tm->bailExit.
11234 enterDeepBailCall();
11237 LIns* res_ins = w.call(sn->builtin, args);
11239 // Immediately unroot the vp as soon we return since we might deep bail next.
11240 if (rooted)
11241 w.stStateField(w.immpNull(), nativeVp);
11243 rval_ins = res_ins;
11244 switch (JSTN_ERRTYPE(sn)) {
11245 case FAIL_NULL:
11246 guard(false, w.eqp0(res_ins), OOM_EXIT);
11247 break;
11248 case FAIL_NEG:
11249 res_ins = w.i2d(res_ins);
11250 guard(false, w.ltdN(res_ins, 0), OOM_EXIT);
11251 break;
11252 case FAIL_NEITHER:
11253 guard(false, w.eqiN(res_ins, JS_NEITHER), OOM_EXIT);
11254 break;
11255 default:;
11258 set(&stackval(0 - (2 + argc)), res_ins);
11261 * The return value will be processed by NativeCallComplete since
11262 * we have to know the actual return value type for calls that return
11263 * jsval.
11265 pendingSpecializedNative = sn;
11267 return RECORD_CONTINUE;
11271 * Check whether we have a specialized implementation for this native
11272 * invocation.
11274 JS_REQUIRES_STACK RecordingStatus
11275 TraceRecorder::callSpecializedNative(JSNativeTraceInfo *trcinfo, uintN argc,
11276 bool constructing)
11278 JSStackFrame* const fp = cx->fp();
11279 jsbytecode *pc = cx->regs->pc;
11281 Value& fval = stackval(0 - (2 + argc));
11282 Value& tval = stackval(0 - (1 + argc));
11284 LIns* this_ins = get(&tval);
11286 LIns* args[nanojit::MAXARGS];
11287 JSSpecializedNative *sn = trcinfo->specializations;
11288 JS_ASSERT(sn);
11289 do {
11290 if (((sn->flags & JSTN_CONSTRUCTOR) != 0) != constructing)
11291 continue;
11293 uintN knownargc = strlen(sn->argtypes);
11294 if (argc != knownargc)
11295 continue;
11297 intN prefixc = strlen(sn->prefix);
11298 JS_ASSERT(prefixc <= 3);
11299 LIns** argp = &args[argc + prefixc - 1];
11300 char argtype;
11302 #if defined DEBUG
11303 memset(args, 0xCD, sizeof(args));
11304 #endif
11306 uintN i;
11307 for (i = prefixc; i--; ) {
11308 argtype = sn->prefix[i];
11309 if (argtype == 'C') {
11310 *argp = cx_ins;
11311 } else if (argtype == 'T') { /* this, as an object */
11312 if (tval.isPrimitive())
11313 goto next_specialization;
11314 *argp = this_ins;
11315 } else if (argtype == 'S') { /* this, as a string */
11316 if (!tval.isString())
11317 goto next_specialization;
11318 *argp = this_ins;
11319 } else if (argtype == 'f') {
11320 *argp = w.immpObjGC(&fval.toObject());
11321 } else if (argtype == 'p') {
11322 CHECK_STATUS(getClassPrototype(&fval.toObject(), *argp));
11323 } else if (argtype == 'R') {
11324 *argp = w.nameImmpNonGC(cx->runtime);
11325 } else if (argtype == 'P') {
11326 // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
11327 // JSOP_GETELEM imacro (bug 476559).
11328 if ((*pc == JSOP_CALL) &&
11329 fp->hasImacropc() && *fp->imacropc() == JSOP_GETELEM)
11330 *argp = w.nameImmpNonGC(fp->imacropc());
11331 else
11332 *argp = w.nameImmpNonGC(pc);
11333 } else if (argtype == 'D') { /* this, as a number */
11334 if (!tval.isNumber())
11335 goto next_specialization;
11336 *argp = this_ins;
11337 } else if (argtype == 'M') {
11338 MathCache *mathCache = GetMathCache(cx);
11339 if (!mathCache)
11340 return RECORD_ERROR;
11341 *argp = w.nameImmpNonGC(mathCache);
11342 } else {
11343 JS_NOT_REACHED("unknown prefix arg type");
11345 argp--;
11348 for (i = knownargc; i--; ) {
11349 Value& arg = stackval(0 - (i + 1));
11350 *argp = get(&arg);
11352 argtype = sn->argtypes[i];
11353 if (argtype == 'd' || argtype == 'i') {
11354 if (!arg.isNumber())
11355 goto next_specialization;
11356 if (argtype == 'i')
11357 *argp = d2i(*argp);
11358 } else if (argtype == 'o') {
11359 if (arg.isPrimitive())
11360 goto next_specialization;
11361 } else if (argtype == 's') {
11362 if (!arg.isString())
11363 goto next_specialization;
11364 } else if (argtype == 'r') {
11365 if (!VALUE_IS_REGEXP(cx, arg))
11366 goto next_specialization;
11367 } else if (argtype == 'f') {
11368 if (!IsFunctionObject(arg))
11369 goto next_specialization;
11370 } else if (argtype == 'v') {
11371 *argp = box_value_for_native_call(arg, *argp);
11372 } else {
11373 goto next_specialization;
11375 argp--;
11377 #if defined DEBUG
11378 JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
11379 #endif
11380 return emitNativeCall(sn, argc, args, false);
11382 next_specialization:;
11383 } while ((sn++)->flags & JSTN_MORE);
11385 return RECORD_STOP;
11388 static JSBool FASTCALL
11389 ceilReturningInt(jsdouble x, int32 *out)
11391 jsdouble r = js_math_ceil_impl(x);
11392 return JSDOUBLE_IS_INT32(r, out);
11395 static JSBool FASTCALL
11396 floorReturningInt(jsdouble x, int32 *out)
11398 jsdouble r = js_math_floor_impl(x);
11399 return JSDOUBLE_IS_INT32(r, out);
11402 static JSBool FASTCALL
11403 roundReturningInt(jsdouble x, int32 *out)
11405 jsdouble r = js_math_round_impl(x);
11406 return JSDOUBLE_IS_INT32(r, out);
11410 * These functions store into their second argument, so they need to
11411 * be annotated accordingly. To be future-proof, we use ACCSET_STORE_ANY
11412 * so that new callers don't have to remember to update the annotation.
11414 JS_DEFINE_CALLINFO_2(static, BOOL, ceilReturningInt, DOUBLE, INT32PTR, 0, ACCSET_STORE_ANY)
11415 JS_DEFINE_CALLINFO_2(static, BOOL, floorReturningInt, DOUBLE, INT32PTR, 0, ACCSET_STORE_ANY)
11416 JS_DEFINE_CALLINFO_2(static, BOOL, roundReturningInt, DOUBLE, INT32PTR, 0, ACCSET_STORE_ANY)
11418 JS_REQUIRES_STACK RecordingStatus
11419 TraceRecorder::callFloatReturningInt(uintN argc, const nanojit::CallInfo *ci)
11421 Value& arg = stackval(-1);
11422 LIns* resptr_ins = w.allocp(sizeof(int32));
11423 LIns* args[] = { resptr_ins, get(&arg) };
11424 LIns* fits_ins = w.call(ci, args);
11426 guard(false, w.eqi0(fits_ins), OVERFLOW_EXIT);
11428 LIns* res_ins = w.ldiAlloc(resptr_ins);
11430 set(&stackval(0 - (2 + argc)), w.i2d(res_ins));
11432 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11434 return RECORD_CONTINUE;
11437 JS_REQUIRES_STACK RecordingStatus
11438 TraceRecorder::callNative(uintN argc, JSOp mode)
11440 LIns* args[5];
11442 JS_ASSERT(mode == JSOP_CALL || mode == JSOP_NEW || mode == JSOP_FUNAPPLY ||
11443 mode == JSOP_FUNCALL);
11445 Value* vp = &stackval(0 - (2 + argc));
11446 JSObject* funobj = &vp[0].toObject();
11447 JSFunction* fun = funobj->getFunctionPrivate();
11448 JS_ASSERT(fun->isNative());
11449 Native native = fun->u.n.native;
11451 switch (argc) {
11452 case 1:
11453 if (vp[2].isNumber() && mode == JSOP_CALL) {
11454 if (native == js_math_ceil || native == js_math_floor || native == js_math_round) {
11455 LIns* a = get(&vp[2]);
11456 int32 result;
11457 if (IsPromotedInt32OrUint32(a)) {
11458 set(&vp[0], a);
11459 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11460 return RECORD_CONTINUE;
11462 if (native == js_math_floor) {
11463 if (floorReturningInt(vp[2].toNumber(), &result))
11464 return callFloatReturningInt(argc, &floorReturningInt_ci);
11465 } else if (native == js_math_ceil) {
11466 if (ceilReturningInt(vp[2].toNumber(), &result))
11467 return callFloatReturningInt(argc, &ceilReturningInt_ci);
11468 } else if (native == js_math_round) {
11469 if (roundReturningInt(vp[2].toNumber(), &result))
11470 return callFloatReturningInt(argc, &roundReturningInt_ci);
11472 } else if (native == js_math_abs) {
11473 LIns* a = get(&vp[2]);
11474 if (IsPromotedInt32(a) && vp[2].toNumber() != INT_MIN) {
11475 a = w.demoteToInt32(a);
11476 /* abs(INT_MIN) can't be done using integers; exit if we see it. */
11477 LIns* intMin_ins = w.name(w.immi(0x80000000), "INT_MIN");
11478 LIns* isIntMin_ins = w.name(w.eqi(a, intMin_ins), "isIntMin");
11479 guard(false, isIntMin_ins, MISMATCH_EXIT);
11480 LIns* neg_ins = w.negi(a);
11481 LIns* isNeg_ins = w.name(w.ltiN(a, 0), "isNeg");
11482 LIns* abs_ins = w.name(w.cmovi(isNeg_ins, neg_ins, a), "abs");
11483 set(&vp[0], w.i2d(abs_ins));
11484 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11485 return RECORD_CONTINUE;
11488 if (vp[1].isString()) {
11489 JSString *str = vp[1].toString();
11490 if (native == js_str_charAt) {
11491 jsdouble i = vp[2].toNumber();
11492 if (JSDOUBLE_IS_NaN(i))
11493 i = 0;
11494 if (i < 0 || i >= str->length())
11495 RETURN_STOP("charAt out of bounds");
11496 LIns* str_ins = get(&vp[1]);
11497 LIns* idx_ins = get(&vp[2]);
11498 LIns* char_ins;
11499 CHECK_STATUS(getCharAt(str, str_ins, idx_ins, mode, &char_ins));
11500 set(&vp[0], char_ins);
11501 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11502 return RECORD_CONTINUE;
11503 } else if (native == js_str_charCodeAt) {
11504 jsdouble i = vp[2].toNumber();
11505 if (JSDOUBLE_IS_NaN(i))
11506 i = 0;
11507 if (i < 0 || i >= str->length())
11508 RETURN_STOP("charCodeAt out of bounds");
11509 LIns* str_ins = get(&vp[1]);
11510 LIns* idx_ins = get(&vp[2]);
11511 LIns* charCode_ins;
11512 CHECK_STATUS(getCharCodeAt(str, str_ins, idx_ins, &charCode_ins));
11513 set(&vp[0], charCode_ins);
11514 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11515 return RECORD_CONTINUE;
11518 } else if (vp[2].isString() && mode == JSOP_CALL) {
11519 if (native == js_regexp_exec) {
11520 jsbytecode *pc = cx->regs->pc;
11522 * If we see any of these sequences, the result is unused:
11523 * - call / pop
11524 * - call / trace / pop
11526 * If we see any of these sequences, the result is only tested for nullness:
11527 * - call / ifeq
11528 * - call / trace / ifeq
11529 * - call / not / ifeq
11530 * - call / trace / not / ifeq
11532 * In either case, we replace the call to RegExp.exec() on the
11533 * stack with a call to RegExp.test() because "r.exec(s) !=
11534 * null" is equivalent to "r.test(s)". This avoids building
11535 * the result array, which can be expensive. This requires
11536 * that RegExp.prototype.test() hasn't been changed; we check this.
11538 if (pc[0] == JSOP_CALL) {
11539 if ((pc[JSOP_CALL_LENGTH] == JSOP_POP) ||
11540 (pc[JSOP_CALL_LENGTH] == JSOP_TRACE &&
11541 pc[JSOP_CALL_LENGTH + JSOP_TRACE_LENGTH] == JSOP_POP) ||
11542 (pc[JSOP_CALL_LENGTH] == JSOP_IFEQ) ||
11543 (pc[JSOP_CALL_LENGTH] == JSOP_TRACE &&
11544 pc[JSOP_CALL_LENGTH + JSOP_TRACE_LENGTH] == JSOP_IFEQ) ||
11545 (pc[JSOP_CALL_LENGTH] == JSOP_NOT &&
11546 pc[JSOP_CALL_LENGTH + JSOP_NOT_LENGTH] == JSOP_IFEQ) ||
11547 (pc[JSOP_CALL_LENGTH] == JSOP_TRACE &&
11548 pc[JSOP_CALL_LENGTH + JSOP_TRACE_LENGTH] == JSOP_NOT &&
11549 pc[JSOP_CALL_LENGTH + JSOP_TRACE_LENGTH + JSOP_NOT_LENGTH] == JSOP_IFEQ))
11551 JSObject* proto;
11552 jsid id = ATOM_TO_JSID(cx->runtime->atomState.testAtom);
11553 /* Get RegExp.prototype.test() and check it hasn't been changed. */
11554 if (js_GetClassPrototype(cx, NULL, JSProto_RegExp, &proto)) {
11555 if (JSObject *tmp = HasNativeMethod(proto, id, js_regexp_test)) {
11556 vp[0] = ObjectValue(*tmp);
11557 funobj = tmp;
11558 fun = tmp->getFunctionPrivate();
11559 native = js_regexp_test;
11566 break;
11568 case 2:
11569 if (vp[2].isNumber() && vp[3].isNumber() && mode == JSOP_CALL &&
11570 (native == js_math_min || native == js_math_max)) {
11571 LIns* a = get(&vp[2]);
11572 LIns* b = get(&vp[3]);
11573 if (IsPromotedInt32(a) && IsPromotedInt32(b)) {
11574 a = w.demoteToInt32(a);
11575 b = w.demoteToInt32(b);
11576 LIns* cmp = (native == js_math_min) ? w.lti(a, b) : w.gti(a, b);
11577 set(&vp[0], w.i2d(w.cmovi(cmp, a, b)));
11578 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11579 return RECORD_CONTINUE;
11581 if (IsPromotedUint32(a) && IsPromotedUint32(b)) {
11582 a = w.demoteToUint32(a);
11583 b = w.demoteToUint32(b);
11584 LIns* cmp = (native == js_math_min) ? w.ltui(a, b) : w.gtui(a, b);
11585 set(&vp[0], w.ui2d(w.cmovi(cmp, a, b)));
11586 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11587 return RECORD_CONTINUE;
11590 break;
11593 if (fun->flags & JSFUN_TRCINFO) {
11594 JSNativeTraceInfo *trcinfo = FUN_TRCINFO(fun);
11595 JS_ASSERT(trcinfo && fun->u.n.native == trcinfo->native);
11597 /* Try to call a type specialized version of the native. */
11598 if (trcinfo->specializations) {
11599 RecordingStatus status = callSpecializedNative(trcinfo, argc, mode == JSOP_NEW);
11600 if (status != RECORD_STOP)
11601 return status;
11605 if (native == js_fun_apply || native == js_fun_call)
11606 RETURN_STOP("trying to call native apply or call");
11608 // Allocate the vp vector and emit code to root it.
11609 uintN vplen = 2 + argc;
11610 LIns* invokevp_ins = w.allocp(vplen * sizeof(Value));
11612 // vp[0] is the callee.
11613 box_value_into(vp[0], w.immpObjGC(funobj), AllocSlotsAddress(invokevp_ins));
11615 // Calculate |this|.
11616 LIns* this_ins;
11617 if (mode == JSOP_NEW) {
11618 Class* clasp = fun->u.n.clasp;
11619 JS_ASSERT(clasp != &js_SlowArrayClass);
11620 if (!clasp)
11621 clasp = &js_ObjectClass;
11622 JS_ASSERT(((jsuword) clasp & 3) == 0);
11624 // Abort on |new Function|. (FIXME: This restriction might not
11625 // unnecessary now that the constructor creates the new function object
11626 // itself.)
11627 if (clasp == &js_FunctionClass)
11628 RETURN_STOP("new Function");
11630 if (!clasp->isNative())
11631 RETURN_STOP("new with non-native ops");
11633 // Don't trace |new Math.sin(0)|.
11634 if (!fun->isConstructor())
11635 RETURN_STOP("new with non-constructor native function");
11637 vp[1].setMagicWithObjectOrNullPayload(NULL);
11638 newobj_ins = w.immpMagicNull();
11640 /* Treat this as a regular call, the constructor will behave correctly. */
11641 mode = JSOP_CALL;
11642 this_ins = newobj_ins;
11643 } else {
11644 this_ins = get(&vp[1]);
11646 set(&vp[1], this_ins);
11647 box_value_into(vp[1], this_ins, AllocSlotsAddress(invokevp_ins, 1));
11649 // Populate argv.
11650 for (uintN n = 2; n < 2 + argc; n++) {
11651 box_value_into(vp[n], get(&vp[n]), AllocSlotsAddress(invokevp_ins, n));
11652 // For a very long argument list we might run out of LIR space, so
11653 // check inside the loop.
11654 if (outOfMemory())
11655 RETURN_STOP("out of memory in argument list");
11658 // Populate extra slots, including the return value slot for a slow native.
11659 if (2 + argc < vplen) {
11660 for (uintN n = 2 + argc; n < vplen; n++) {
11661 box_undefined_into(AllocSlotsAddress(invokevp_ins, n));
11662 if (outOfMemory())
11663 RETURN_STOP("out of memory in extra slots");
11667 // Set up arguments for the JSNative or JSFastNative.
11668 if (mode == JSOP_NEW)
11669 RETURN_STOP("untraceable fast native constructor");
11670 native_rval_ins = invokevp_ins;
11671 args[0] = invokevp_ins;
11672 args[1] = w.immi(argc);
11673 args[2] = cx_ins;
11674 uint32 typesig = CallInfo::typeSig3(ARGTYPE_I, ARGTYPE_P, ARGTYPE_I, ARGTYPE_P);
11676 // Generate CallInfo and a JSSpecializedNative structure on the fly.
11677 // Do not use JSTN_UNBOX_AFTER for mode JSOP_NEW because
11678 // record_NativeCallComplete unboxes the result specially.
11680 CallInfo* ci = new (traceAlloc()) CallInfo();
11681 ci->_address = uintptr_t(fun->u.n.native);
11682 ci->_isPure = 0;
11683 ci->_storeAccSet = ACCSET_STORE_ANY;
11684 ci->_abi = ABI_CDECL;
11685 ci->_typesig = typesig;
11686 #ifdef DEBUG
11687 ci->_name = js_anonymous_str;
11688 if (fun->atom) {
11689 JSAutoByteString bytes(cx, ATOM_TO_STRING(fun->atom));
11690 if (!!bytes) {
11691 size_t n = strlen(bytes.ptr()) + 1;
11692 char *buffer = new (traceAlloc()) char[n];
11693 memcpy(buffer, bytes.ptr(), n);
11694 ci->_name = buffer;
11697 #endif
11699 // Generate a JSSpecializedNative structure on the fly.
11700 generatedSpecializedNative.builtin = ci;
11701 generatedSpecializedNative.flags = FAIL_STATUS | ((mode == JSOP_NEW)
11702 ? JSTN_CONSTRUCTOR
11703 : JSTN_UNBOX_AFTER);
11704 generatedSpecializedNative.prefix = NULL;
11705 generatedSpecializedNative.argtypes = NULL;
11707 // We only have to ensure that the values we wrote into the stack buffer
11708 // are rooted if we actually make it to the call, so only set nativeVp and
11709 // nativeVpLen immediately before emitting the call code. This way we avoid
11710 // leaving trace with a bogus nativeVp because we fall off trace while unboxing
11711 // values into the stack buffer.
11712 w.stStateField(w.nameImmi(vplen), nativeVpLen);
11713 w.stStateField(invokevp_ins, nativeVp);
11715 // argc is the original argc here. It is used to calculate where to place
11716 // the return value.
11717 return emitNativeCall(&generatedSpecializedNative, argc, args, true);
11720 JS_REQUIRES_STACK RecordingStatus
11721 TraceRecorder::functionCall(uintN argc, JSOp mode)
11723 Value& fval = stackval(0 - (2 + argc));
11724 JS_ASSERT(&fval >= cx->fp()->base());
11726 if (!IsFunctionObject(fval))
11727 RETURN_STOP("callee is not a function");
11729 Value& tval = stackval(0 - (1 + argc));
11732 * If callee is not constant, it's a shapeless call and we have to guard
11733 * explicitly that we will get this callee again at runtime.
11735 if (!get(&fval)->isImmP())
11736 CHECK_STATUS(guardCallee(fval));
11739 * Require that the callee be a function object, to avoid guarding on its
11740 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
11741 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
11742 * the property cache and guard on the object (this) in which the callee
11743 * was found. So it's sufficient to test here that the particular function
11744 * is interpreted, not guard on that condition.
11746 * Bytecode sequences that push shapeless callees must guard on the callee
11747 * class being Function and the function being interpreted.
11749 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, &fval.toObject());
11751 if (Probes::callTrackingActive(cx)) {
11752 JSScript *script = FUN_SCRIPT(fun);
11753 if (!script || !script->isEmpty()) {
11754 LIns* args[] = { w.immi(1), w.nameImmpNonGC(fun), cx_ins };
11755 LIns* call_ins = w.call(&functionProbe_ci, args);
11756 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
11760 if (FUN_INTERPRETED(fun))
11761 return interpretedFunctionCall(fval, fun, argc, mode == JSOP_NEW);
11763 Native native = fun->maybeNative();
11764 Value* argv = &tval + 1;
11765 if (native == js_Array)
11766 return newArray(&fval.toObject(), argc, argv, &fval);
11767 if (native == js_String && argc == 1) {
11768 if (mode == JSOP_NEW)
11769 return newString(&fval.toObject(), 1, argv, &fval);
11770 if (!argv[0].isPrimitive()) {
11771 CHECK_STATUS(guardNativeConversion(argv[0]));
11772 return callImacro(call_imacros.String);
11774 set(&fval, stringify(argv[0]));
11775 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11776 return RECORD_CONTINUE;
11779 RecordingStatus rs = callNative(argc, mode);
11780 if (Probes::callTrackingActive(cx)) {
11781 LIns* args[] = { w.immi(0), w.nameImmpNonGC(fun), cx_ins };
11782 LIns* call_ins = w.call(&functionProbe_ci, args);
11783 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
11785 return rs;
11788 JS_REQUIRES_STACK AbortableRecordingStatus
11789 TraceRecorder::record_JSOP_NEW()
11791 uintN argc = GET_ARGC(cx->regs->pc);
11792 cx->assertValidStackDepth(argc + 2);
11793 return InjectStatus(functionCall(argc, JSOP_NEW));
11796 JS_REQUIRES_STACK AbortableRecordingStatus
11797 TraceRecorder::record_JSOP_DELNAME()
11799 return ARECORD_STOP;
11802 static JSBool JS_FASTCALL
11803 DeleteIntKey(JSContext* cx, JSObject* obj, int32 i, JSBool strict)
11805 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
11807 LeaveTraceIfGlobalObject(cx, obj);
11808 LeaveTraceIfArgumentsObject(cx, obj);
11809 Value v = BooleanValue(false);
11810 jsid id;
11811 if (INT_FITS_IN_JSID(i)) {
11812 id = INT_TO_JSID(i);
11813 } else {
11814 if (!js_ValueToStringId(cx, Int32Value(i), &id)) {
11815 SetBuiltinError(tm);
11816 return false;
11820 if (!obj->deleteProperty(cx, id, &v, strict))
11821 SetBuiltinError(tm);
11822 return v.toBoolean();
11824 JS_DEFINE_CALLINFO_4(extern, BOOL_FAIL, DeleteIntKey, CONTEXT, OBJECT, INT32, BOOL,
11825 0, ACCSET_STORE_ANY)
11827 static JSBool JS_FASTCALL
11828 DeleteStrKey(JSContext* cx, JSObject* obj, JSString* str, JSBool strict)
11830 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
11832 LeaveTraceIfGlobalObject(cx, obj);
11833 LeaveTraceIfArgumentsObject(cx, obj);
11834 Value v = BooleanValue(false);
11835 jsid id;
11838 * NB: JSOP_DELPROP does not need js_ValueToStringId to atomize, but (see
11839 * jsatominlines.h) that helper early-returns if the computed property name
11840 * string is already atomized, and we are *not* on a perf-critical path!
11842 if (!js_ValueToStringId(cx, StringValue(str), &id) || !obj->deleteProperty(cx, id, &v, strict))
11843 SetBuiltinError(tm);
11844 return v.toBoolean();
11846 JS_DEFINE_CALLINFO_4(extern, BOOL_FAIL, DeleteStrKey, CONTEXT, OBJECT, STRING, BOOL,
11847 0, ACCSET_STORE_ANY)
11849 JS_REQUIRES_STACK AbortableRecordingStatus
11850 TraceRecorder::record_JSOP_DELPROP()
11852 Value& lval = stackval(-1);
11853 if (lval.isPrimitive())
11854 RETURN_STOP_A("JSOP_DELPROP on primitive base expression");
11855 if (&lval.toObject() == globalObj)
11856 RETURN_STOP_A("JSOP_DELPROP on global property");
11858 JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)];
11860 enterDeepBailCall();
11861 LIns* args[] = { strictModeCode_ins, w.immpAtomGC(atom), get(&lval), cx_ins };
11862 LIns* rval_ins = w.call(&DeleteStrKey_ci, args);
11864 LIns* status_ins = w.ldiStateField(builtinStatus);
11865 pendingGuardCondition = w.eqi0(status_ins);
11866 leaveDeepBailCall();
11868 set(&lval, rval_ins);
11869 return ARECORD_CONTINUE;
11872 JS_REQUIRES_STACK AbortableRecordingStatus
11873 TraceRecorder::record_JSOP_DELELEM()
11875 Value& lval = stackval(-2);
11876 if (lval.isPrimitive())
11877 RETURN_STOP_A("JSOP_DELELEM on primitive base expression");
11878 if (&lval.toObject() == globalObj)
11879 RETURN_STOP_A("JSOP_DELELEM on global property");
11880 if (lval.toObject().isArguments())
11881 RETURN_STOP_A("JSOP_DELELEM on the |arguments| object");
11883 Value& idx = stackval(-1);
11884 LIns* rval_ins;
11886 enterDeepBailCall();
11887 if (hasInt32Repr(idx)) {
11888 LIns* num_ins;
11889 CHECK_STATUS_A(makeNumberInt32(get(&idx), &num_ins));
11890 LIns* args[] = { strictModeCode_ins, num_ins, get(&lval), cx_ins };
11891 rval_ins = w.call(&DeleteIntKey_ci, args);
11892 } else if (idx.isString()) {
11893 LIns* args[] = { strictModeCode_ins, get(&idx), get(&lval), cx_ins };
11894 rval_ins = w.call(&DeleteStrKey_ci, args);
11895 } else {
11896 RETURN_STOP_A("JSOP_DELELEM on non-int, non-string index");
11899 LIns* status_ins = w.ldiStateField(builtinStatus);
11900 pendingGuardCondition = w.eqi0(status_ins);
11901 leaveDeepBailCall();
11903 set(&lval, rval_ins);
11904 return ARECORD_CONTINUE;
11907 JS_REQUIRES_STACK AbortableRecordingStatus
11908 TraceRecorder::record_JSOP_TYPEOF()
11910 Value& r = stackval(-1);
11911 LIns* type;
11912 if (r.isString()) {
11913 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_STRING]);
11914 } else if (r.isNumber()) {
11915 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_NUMBER]);
11916 } else if (r.isUndefined()) {
11917 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_VOID]);
11918 } else if (r.isBoolean()) {
11919 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_BOOLEAN]);
11920 } else if (r.isNull()) {
11921 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_OBJECT]);
11922 } else {
11923 if (r.toObject().isFunction()) {
11924 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_FUNCTION]);
11925 } else {
11926 LIns* args[] = { get(&r), cx_ins };
11927 type = w.call(&js_TypeOfObject_ci, args);
11930 set(&r, type);
11931 return ARECORD_CONTINUE;
11934 JS_REQUIRES_STACK AbortableRecordingStatus
11935 TraceRecorder::record_JSOP_VOID()
11937 stack(-1, w.immiUndefined());
11938 return ARECORD_CONTINUE;
11941 JS_REQUIRES_STACK AbortableRecordingStatus
11942 TraceRecorder::record_JSOP_INCNAME()
11944 return incName(1);
11947 JS_REQUIRES_STACK AbortableRecordingStatus
11948 TraceRecorder::record_JSOP_INCPROP()
11950 return incProp(1);
11953 JS_REQUIRES_STACK AbortableRecordingStatus
11954 TraceRecorder::record_JSOP_INCELEM()
11956 return InjectStatus(incElem(1));
11959 JS_REQUIRES_STACK AbortableRecordingStatus
11960 TraceRecorder::record_JSOP_DECNAME()
11962 return incName(-1);
11965 JS_REQUIRES_STACK AbortableRecordingStatus
11966 TraceRecorder::record_JSOP_DECPROP()
11968 return incProp(-1);
11971 JS_REQUIRES_STACK AbortableRecordingStatus
11972 TraceRecorder::record_JSOP_DECELEM()
11974 return InjectStatus(incElem(-1));
11977 JS_REQUIRES_STACK AbortableRecordingStatus
11978 TraceRecorder::incName(jsint incr, bool pre)
11980 Value* vp;
11981 LIns* v_ins;
11982 LIns* v_ins_after;
11983 NameResult nr;
11985 CHECK_STATUS_A(name(vp, v_ins, nr));
11986 Value v = nr.tracked ? *vp : nr.v;
11987 Value v_after;
11988 CHECK_STATUS_A(incHelper(v, v_ins, v_after, v_ins_after, incr));
11989 LIns* v_ins_result = pre ? v_ins_after : v_ins;
11990 if (nr.tracked) {
11991 set(vp, v_ins_after);
11992 stack(0, v_ins_result);
11993 return ARECORD_CONTINUE;
11996 if (!nr.obj->isCall())
11997 RETURN_STOP_A("incName on unsupported object class");
11999 CHECK_STATUS_A(setCallProp(nr.obj, nr.obj_ins, nr.shape, v_ins_after, v_after));
12000 stack(0, v_ins_result);
12001 return ARECORD_CONTINUE;
12004 JS_REQUIRES_STACK AbortableRecordingStatus
12005 TraceRecorder::record_JSOP_NAMEINC()
12007 return incName(1, false);
12010 JS_REQUIRES_STACK AbortableRecordingStatus
12011 TraceRecorder::record_JSOP_PROPINC()
12013 return incProp(1, false);
12016 // XXX consolidate with record_JSOP_GETELEM code...
12017 JS_REQUIRES_STACK AbortableRecordingStatus
12018 TraceRecorder::record_JSOP_ELEMINC()
12020 return InjectStatus(incElem(1, false));
12023 JS_REQUIRES_STACK AbortableRecordingStatus
12024 TraceRecorder::record_JSOP_NAMEDEC()
12026 return incName(-1, false);
12029 JS_REQUIRES_STACK AbortableRecordingStatus
12030 TraceRecorder::record_JSOP_PROPDEC()
12032 return incProp(-1, false);
12035 JS_REQUIRES_STACK AbortableRecordingStatus
12036 TraceRecorder::record_JSOP_ELEMDEC()
12038 return InjectStatus(incElem(-1, false));
12041 JS_REQUIRES_STACK AbortableRecordingStatus
12042 TraceRecorder::record_JSOP_GETPROP()
12044 return getProp(stackval(-1));
12048 * If possible, lookup obj[id] without calling any resolve hooks or touching
12049 * any non-native objects, store the results in *pobjp and *shapep (NULL if no
12050 * such property exists), and return true.
12052 * If a safe lookup is not possible, return false; *pobjp and *shapep are
12053 * undefined.
12055 static bool
12056 SafeLookup(JSContext *cx, JSObject* obj, jsid id, JSObject** pobjp, const Shape** shapep)
12058 do {
12059 // Avoid non-native lookupProperty hooks.
12060 if (obj->getOps()->lookupProperty)
12061 return false;
12063 if (const Shape *shape = obj->nativeLookup(id)) {
12064 *pobjp = obj;
12065 *shapep = shape;
12066 return true;
12069 // Avoid resolve hooks.
12070 if (obj->getClass()->resolve != JS_ResolveStub)
12071 return false;
12072 } while ((obj = obj->getProto()) != NULL);
12073 *pobjp = NULL;
12074 *shapep = NULL;
12075 return true;
12079 * Lookup the property for the SETPROP/SETNAME/SETMETHOD instruction at pc.
12080 * Emit guards to ensure that the result at run time is the same.
12082 JS_REQUIRES_STACK RecordingStatus
12083 TraceRecorder::lookupForSetPropertyOp(JSObject* obj, LIns* obj_ins, jsid id,
12084 bool* safep, JSObject** pobjp, const Shape** shapep)
12086 // We could consult the property cache here, but the contract for
12087 // PropertyCache::testForSet is intricate enough that it's a lot less code
12088 // to do a SafeLookup.
12089 *safep = SafeLookup(cx, obj, id, pobjp, shapep);
12090 if (!*safep)
12091 return RECORD_CONTINUE;
12093 VMSideExit *exit = snapshot(BRANCH_EXIT);
12094 if (*shapep) {
12095 CHECK_STATUS(guardShape(obj_ins, obj, obj->shape(), "guard_kshape", exit));
12096 if (obj != *pobjp && *pobjp != globalObj) {
12097 CHECK_STATUS(guardShape(w.immpObjGC(*pobjp), *pobjp, (*pobjp)->shape(),
12098 "guard_vshape", exit));
12100 } else {
12101 for (;;) {
12102 if (obj != globalObj)
12103 CHECK_STATUS(guardShape(obj_ins, obj, obj->shape(), "guard_proto_chain", exit));
12104 obj = obj->getProto();
12105 if (!obj)
12106 break;
12107 obj_ins = w.immpObjGC(obj);
12110 return RECORD_CONTINUE;
12113 static JSBool FASTCALL
12114 MethodWriteBarrier(JSContext* cx, JSObject* obj, uint32 slot, const Value* v)
12116 #ifdef DEBUG
12117 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12118 #endif
12120 bool ok = obj->methodWriteBarrier(cx, slot, *v);
12121 JS_ASSERT(WasBuiltinSuccessful(tm));
12122 return ok;
12124 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, MethodWriteBarrier, CONTEXT, OBJECT, UINT32, CVALUEPTR,
12125 0, ACCSET_STORE_ANY)
12127 /* Emit a specialized, inlined copy of js_NativeSet. */
12128 JS_REQUIRES_STACK RecordingStatus
12129 TraceRecorder::nativeSet(JSObject* obj, LIns* obj_ins, const Shape* shape,
12130 const Value &v, LIns* v_ins)
12132 uint32 slot = shape->slot;
12133 JS_ASSERT((slot != SHAPE_INVALID_SLOT) == shape->hasSlot());
12134 JS_ASSERT_IF(shape->hasSlot(), obj->nativeContains(*shape));
12137 * We do not trace assignment to properties that have both a non-default
12138 * setter and a slot, for several reasons.
12140 * First, that would require sampling rt->propertyRemovals before and after
12141 * (see js_NativeSet), and even more code to handle the case where the two
12142 * samples differ. A mere guard is not enough, because you can't just bail
12143 * off trace in the middle of a property assignment without storing the
12144 * value and making the stack right.
12146 * If obj is the global object, there are two additional problems. We would
12147 * have to emit still more code to store the result in the object (not the
12148 * native global frame) if the setter returned successfully after
12149 * deep-bailing. And we would have to cope if the run-time type of the
12150 * setter's return value differed from the record-time type of v, in which
12151 * case unboxing would fail and, having called a native setter, we could
12152 * not just retry the instruction in the interpreter.
12154 * If obj is branded, we would have a similar problem recovering from a
12155 * failed call to MethodWriteBarrier.
12157 if (!shape->hasDefaultSetter() && slot != SHAPE_INVALID_SLOT)
12158 RETURN_STOP("can't trace set of property with setter and slot");
12160 // These two cases are strict-mode errors and can't be traced.
12161 if (shape->hasGetterValue() && shape->hasDefaultSetter())
12162 RETURN_STOP("can't set a property that has only a getter");
12163 if (shape->isDataDescriptor() && !shape->writable())
12164 RETURN_STOP("can't assign to readonly property");
12166 // Call the setter, if any.
12167 if (!shape->hasDefaultSetter()) {
12168 if (shape->hasSetterValue())
12169 RETURN_STOP("can't trace JavaScript function setter yet");
12170 emitNativePropertyOp(shape, obj_ins, true, box_value_into_alloc(v, v_ins));
12173 if (slot != SHAPE_INVALID_SLOT) {
12174 if (obj->brandedOrHasMethodBarrier()) {
12175 if (obj == globalObj) {
12176 // Because the trace is type-specialized to the global object's
12177 // slots, no run-time check is needed. Avoid recording a global
12178 // shape change, though.
12179 JS_ASSERT(obj->nativeContains(*shape));
12180 if (IsFunctionObject(obj->getSlot(slot)))
12181 RETURN_STOP("can't trace set of function-valued global property");
12182 } else {
12183 // Setting a function-valued property might need to rebrand the
12184 // object. Call the method write barrier. Note that even if the
12185 // property is not function-valued now, it might be on trace.
12186 enterDeepBailCall();
12187 LIns* args[] = {box_value_into_alloc(v, v_ins), w.immi(slot), obj_ins, cx_ins};
12188 LIns* ok_ins = w.call(&MethodWriteBarrier_ci, args);
12189 guard(false, w.eqi0(ok_ins), OOM_EXIT);
12190 leaveDeepBailCall();
12194 // Store the value.
12195 if (obj == globalObj) {
12196 if (!lazilyImportGlobalSlot(slot))
12197 RETURN_STOP("lazy import of global slot failed");
12198 set(&obj->getSlotRef(slot), v_ins);
12199 } else {
12200 LIns* slots_ins = NULL;
12201 stobj_set_slot(obj, obj_ins, slot, slots_ins, v, v_ins);
12205 return RECORD_CONTINUE;
12208 JS_REQUIRES_STACK RecordingStatus
12209 TraceRecorder::addDataProperty(JSObject* obj)
12211 if (!obj->isExtensible())
12212 RETURN_STOP("assignment adds property to non-extensible object");
12214 // If obj is the global, the global shape is about to change. Note also
12215 // that since we do not record this case, SETNAME and SETPROP are identical
12216 // as far as the tracer is concerned. (js_CheckUndeclaredVarAssignment
12217 // distinguishes the two, in the interpreter.)
12218 if (obj == globalObj)
12219 RETURN_STOP("set new property of global object"); // global shape change
12221 // js_AddProperty does not call the addProperty hook.
12222 Class* clasp = obj->getClass();
12223 if (clasp->addProperty != Valueify(JS_PropertyStub))
12224 RETURN_STOP("set new property of object with addProperty hook");
12226 // See comment in TR::nativeSet about why we do not support setting a
12227 // property that has both a setter and a slot.
12228 if (clasp->setProperty != Valueify(JS_StrictPropertyStub))
12229 RETURN_STOP("set new property with setter and slot");
12231 #ifdef DEBUG
12232 addPropShapeBefore = obj->lastProperty();
12233 #endif
12234 return RECORD_CONTINUE;
12237 JS_REQUIRES_STACK AbortableRecordingStatus
12238 TraceRecorder::record_AddProperty(JSObject *obj)
12240 Value& objv = stackval(-2);
12241 JS_ASSERT(&objv.toObject() == obj);
12242 LIns* obj_ins = get(&objv);
12243 Value& v = stackval(-1);
12244 LIns* v_ins = get(&v);
12245 const Shape* shape = obj->lastProperty();
12247 if (!shape->hasDefaultSetter()) {
12248 JS_ASSERT(IsWatchedProperty(cx, shape));
12249 RETURN_STOP_A("assignment adds property with watchpoint");
12252 #ifdef DEBUG
12253 JS_ASSERT(addPropShapeBefore);
12254 if (obj->inDictionaryMode())
12255 JS_ASSERT(shape->previous()->matches(addPropShapeBefore));
12256 else
12257 JS_ASSERT(shape->previous() == addPropShapeBefore);
12258 JS_ASSERT(shape->isDataDescriptor());
12259 JS_ASSERT(shape->hasDefaultSetter());
12260 addPropShapeBefore = NULL;
12261 #endif
12263 if (obj->inDictionaryMode())
12264 RETURN_STOP_A("assignment adds property to dictionary"); // FIXME: bug 625900
12266 // On trace, call js_Add{,Atom}Property to do the dirty work.
12267 LIns* args[] = { w.immpShapeGC(shape), obj_ins, cx_ins };
12268 jsbytecode op = *cx->regs->pc;
12269 bool isDefinitelyAtom = (op == JSOP_SETPROP);
12270 const CallInfo *ci = isDefinitelyAtom ? &js_AddAtomProperty_ci : &js_AddProperty_ci;
12271 LIns* ok_ins = w.call(ci, args);
12272 guard(false, w.eqi0(ok_ins), OOM_EXIT);
12274 // Box the value and store it in the new slot.
12275 CHECK_STATUS_A(InjectStatus(nativeSet(obj, obj_ins, shape, v, v_ins)));
12277 // Finish off a SET instruction by moving sp[-1] to sp[-2].
12278 if (op == JSOP_SETPROP || op == JSOP_SETNAME || op == JSOP_SETMETHOD)
12279 set(&objv, v_ins);
12280 return ARECORD_CONTINUE;
12283 JS_REQUIRES_STACK RecordingStatus
12284 TraceRecorder::setUpwardTrackedVar(Value* stackVp, const Value &v, LIns* v_ins)
12286 JSValueType stackT = determineSlotType(stackVp);
12287 JSValueType otherT = getCoercedType(v);
12289 bool promote = true;
12291 if (stackT != otherT) {
12292 if (stackT == JSVAL_TYPE_DOUBLE && otherT == JSVAL_TYPE_INT32 && IsPromotedInt32(v_ins))
12293 promote = false;
12294 else
12295 RETURN_STOP("can't trace this upvar mutation");
12298 set(stackVp, v_ins, promote);
12300 return RECORD_CONTINUE;
12303 JS_REQUIRES_STACK RecordingStatus
12304 TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, const Shape *shape,
12305 LIns *v_ins, const Value &v)
12307 // Set variables in on-trace-stack call objects by updating the tracker.
12308 JSStackFrame *fp = frameIfInRange(callobj);
12309 if (fp) {
12310 if (shape->setterOp() == SetCallArg) {
12311 JS_ASSERT(shape->hasShortID());
12312 uintN slot = uint16(shape->shortid);
12313 Value *vp2 = &fp->formalArg(slot);
12314 CHECK_STATUS(setUpwardTrackedVar(vp2, v, v_ins));
12315 return RECORD_CONTINUE;
12317 if (shape->setterOp() == SetCallVar) {
12318 JS_ASSERT(shape->hasShortID());
12319 uintN slot = uint16(shape->shortid);
12320 Value *vp2 = &fp->slots()[slot];
12321 CHECK_STATUS(setUpwardTrackedVar(vp2, v, v_ins));
12322 return RECORD_CONTINUE;
12324 RETURN_STOP("can't trace special CallClass setter");
12327 if (!callobj->getPrivate()) {
12328 // Because the parent guard in guardCallee ensures this Call object
12329 // will be the same object now and on trace, and because once a Call
12330 // object loses its frame it never regains one, on trace we will also
12331 // have a null private in the Call object. So all we need to do is
12332 // write the value to the Call object's slot.
12333 intN slot = uint16(shape->shortid);
12334 if (shape->setterOp() == SetCallArg) {
12335 JS_ASSERT(slot < ArgClosureTraits::slot_count(callobj));
12336 slot += ArgClosureTraits::slot_offset(callobj);
12337 } else if (shape->setterOp() == SetCallVar) {
12338 JS_ASSERT(slot < VarClosureTraits::slot_count(callobj));
12339 slot += VarClosureTraits::slot_offset(callobj);
12340 } else {
12341 RETURN_STOP("can't trace special CallClass setter");
12344 // Now assert that the shortid get we did above was ok. Have to do it
12345 // after the RETURN_STOP above, since in that case we may in fact not
12346 // have a valid shortid; but we don't use it in that case anyway.
12347 JS_ASSERT(shape->hasShortID());
12349 LIns* slots_ins = NULL;
12350 stobj_set_dslot(callobj_ins, slot, slots_ins, v, v_ins);
12351 return RECORD_CONTINUE;
12354 // This is the hard case: we have a JSStackFrame private, but it's not in
12355 // range. During trace execution we may or may not have a JSStackFrame
12356 // anymore. Call the standard builtins, which handle that situation.
12358 // Set variables in off-trace-stack call objects by calling standard builtins.
12359 const CallInfo* ci = NULL;
12360 if (shape->setterOp() == SetCallArg)
12361 ci = &js_SetCallArg_ci;
12362 else if (shape->setterOp() == SetCallVar)
12363 ci = &js_SetCallVar_ci;
12364 else
12365 RETURN_STOP("can't trace special CallClass setter");
12367 // Even though the frame is out of range, later we might be called as an
12368 // inner trace such that the target variable is defined in the outer trace
12369 // entry frame. For simplicity, we just fall off trace.
12370 guard(false,
12371 w.eqp(entryFrameIns(), w.ldpObjPrivate(callobj_ins)),
12372 MISMATCH_EXIT);
12374 LIns* args[] = {
12375 box_value_for_native_call(v, v_ins),
12376 w.nameImmw(JSID_BITS(SHAPE_USERID(shape))),
12377 callobj_ins,
12378 cx_ins
12380 LIns* call_ins = w.call(ci, args);
12381 guard(false, w.name(w.eqi0(call_ins), "guard(set upvar)"), STATUS_EXIT);
12383 return RECORD_CONTINUE;
12387 * Emit a specialized, inlined copy of js_SetPropertyHelper for the current
12388 * instruction. On success, *deferredp is true if a call to record_AddProperty
12389 * is expected.
12391 JS_REQUIRES_STACK RecordingStatus
12392 TraceRecorder::setProperty(JSObject* obj, LIns* obj_ins, const Value &v, LIns* v_ins,
12393 bool* deferredp)
12395 *deferredp = false;
12397 JSAtom *atom = atoms[GET_INDEX(cx->regs->pc)];
12398 jsid id = ATOM_TO_JSID(atom);
12400 if (obj->getOps()->setProperty)
12401 RETURN_STOP("non-native object"); // FIXME: bug 625900
12403 bool safe;
12404 JSObject* pobj;
12405 const Shape* shape;
12406 CHECK_STATUS(lookupForSetPropertyOp(obj, obj_ins, id, &safe, &pobj, &shape));
12407 if (!safe)
12408 RETURN_STOP("setprop: lookup fail"); // FIXME: bug 625900
12410 // Handle Call objects specially. The Call objects we create on trace are
12411 // not fully populated until we leave trace. Calling the setter on such an
12412 // object wouldn't work.
12413 if (obj->isCall())
12414 return setCallProp(obj, obj_ins, shape, v_ins, v);
12416 // Handle setting a property that is not found on obj or anywhere on its
12417 // the prototype chain.
12418 if (!shape) {
12419 *deferredp = true;
12420 return addDataProperty(obj);
12423 // Check whether we can assign to/over the existing property.
12424 if (shape->isAccessorDescriptor()) {
12425 if (shape->hasDefaultSetter())
12426 RETURN_STOP("setting accessor property with no setter");
12427 } else if (!shape->writable()) {
12428 RETURN_STOP("setting readonly data property");
12431 // Handle setting an existing own property.
12432 if (pobj == obj) {
12433 if (*cx->regs->pc == JSOP_SETMETHOD) {
12434 if (shape->isMethod() && &shape->methodObject() == &v.toObject())
12435 return RECORD_CONTINUE;
12436 RETURN_STOP("setmethod: property exists");
12438 return nativeSet(obj, obj_ins, shape, v, v_ins);
12441 // If shape is an inherited non-SHARED property, we will add a new,
12442 // shadowing data property.
12443 if (shape->hasSlot()) {
12444 // Avoid being tripped up by legacy special case for shortids, where
12445 // the new shadowing data property inherits the setter.
12446 if (shape->hasShortID() && !shape->hasDefaultSetter())
12447 RETURN_STOP("shadowing assignment with shortid");
12448 *deferredp = true;
12449 return addDataProperty(obj);
12452 // Handle setting an inherited SHARED property.
12453 // If it has the default setter, the assignment is a no-op.
12454 if (shape->hasDefaultSetter() && !shape->hasGetterValue())
12455 return RECORD_CONTINUE;
12456 return nativeSet(obj, obj_ins, shape, v, v_ins);
12459 /* Record a JSOP_SET{PROP,NAME,METHOD} instruction. */
12460 JS_REQUIRES_STACK RecordingStatus
12461 TraceRecorder::recordSetPropertyOp()
12463 Value& l = stackval(-2);
12464 if (!l.isObject())
12465 RETURN_STOP("set property of primitive");
12466 JSObject* obj = &l.toObject();
12467 LIns* obj_ins = get(&l);
12469 Value& r = stackval(-1);
12470 LIns* r_ins = get(&r);
12472 bool deferred;
12473 CHECK_STATUS(setProperty(obj, obj_ins, r, r_ins, &deferred));
12475 // Finish off a SET instruction by moving sp[-1] to sp[-2]. But if
12476 // record_AddProperty is going be called, we're not done with sp[-2] yet,
12477 // so delay this move until the end of record_AddProperty.
12478 if (!deferred)
12479 set(&l, r_ins);
12480 return RECORD_CONTINUE;
12483 JS_REQUIRES_STACK AbortableRecordingStatus
12484 TraceRecorder::record_JSOP_SETPROP()
12486 return InjectStatus(recordSetPropertyOp());
12489 JS_REQUIRES_STACK AbortableRecordingStatus
12490 TraceRecorder::record_JSOP_SETMETHOD()
12492 return InjectStatus(recordSetPropertyOp());
12495 JS_REQUIRES_STACK AbortableRecordingStatus
12496 TraceRecorder::record_JSOP_SETNAME()
12498 return InjectStatus(recordSetPropertyOp());
12501 JS_REQUIRES_STACK RecordingStatus
12502 TraceRecorder::recordInitPropertyOp(jsbytecode op)
12504 Value& l = stackval(-2);
12505 JSObject* obj = &l.toObject();
12506 LIns* obj_ins = get(&l);
12507 JS_ASSERT(obj->getClass() == &js_ObjectClass);
12509 Value& v = stackval(-1);
12510 LIns* v_ins = get(&v);
12512 JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)];
12513 jsid id = js_CheckForStringIndex(ATOM_TO_JSID(atom));
12515 // If obj already has this property (because JSOP_NEWOBJECT already set its
12516 // shape or because the id appears more than once in the initializer), just
12517 // set it. The existing property can't be an accessor property: we wouldn't
12518 // get here, as JSOP_SETTER can't be recorded.
12519 if (const Shape* shape = obj->nativeLookup(id)) {
12520 // Don't assign a bare (non-cloned) function to an ordinary or method
12521 // property. The opposite case, assigning some other value to a method,
12522 // is OK. nativeSet emits code that trips the write barrier.
12523 if (op == JSOP_INITMETHOD)
12524 RETURN_STOP("initmethod: property exists");
12525 JS_ASSERT(shape->isDataDescriptor());
12526 JS_ASSERT(shape->hasSlot());
12527 JS_ASSERT(shape->hasDefaultSetter());
12528 return nativeSet(obj, obj_ins, shape, v, v_ins);
12531 // Duplicate the interpreter's special treatment of __proto__. Unlike the
12532 // SET opcodes, JSOP_INIT{PROP,METHOD} do not write to the stack.
12533 if (atom == cx->runtime->atomState.protoAtom) {
12534 bool deferred;
12535 return setProperty(obj, obj_ins, v, v_ins, &deferred);
12538 // Define a new property.
12539 return addDataProperty(obj);
12542 JS_REQUIRES_STACK AbortableRecordingStatus
12543 TraceRecorder::record_JSOP_INITPROP()
12545 return InjectStatus(recordInitPropertyOp(JSOP_INITPROP));
12548 JS_REQUIRES_STACK AbortableRecordingStatus
12549 TraceRecorder::record_JSOP_INITMETHOD()
12551 return InjectStatus(recordInitPropertyOp(JSOP_INITMETHOD));
12554 JS_REQUIRES_STACK VMSideExit*
12555 TraceRecorder::enterDeepBailCall()
12557 // Take snapshot for DeepBail and store it in tm->bailExit.
12558 VMSideExit* exit = snapshot(DEEP_BAIL_EXIT);
12559 w.stTraceMonitorField(w.nameImmpNonGC(exit), bailExit);
12561 // Tell nanojit not to discard or defer stack writes before this call.
12562 w.xbarrier(createGuardRecord(exit));
12564 // Forget about guarded shapes, since deep bailers can reshape the world.
12565 forgetGuardedShapes();
12566 return exit;
12569 JS_REQUIRES_STACK void
12570 TraceRecorder::leaveDeepBailCall()
12572 // Keep tm->bailExit null when it's invalid.
12573 w.stTraceMonitorField(w.immpNull(), bailExit);
12576 JS_REQUIRES_STACK void
12577 TraceRecorder::finishGetProp(LIns* obj_ins, LIns* vp_ins, LIns* ok_ins, Value* outp)
12579 // Store the boxed result (and this-object, if JOF_CALLOP) before the
12580 // guard. The deep-bail case requires this. If the property get fails,
12581 // these slots will be ignored anyway.
12582 // N.B. monitorRecording expects get(outp)->isLoad()
12583 JS_ASSERT(vp_ins->isop(LIR_allocp));
12584 LIns* result_ins = w.lddAlloc(vp_ins);
12585 set(outp, result_ins);
12586 if (js_CodeSpec[*cx->regs->pc].format & JOF_CALLOP)
12587 set(outp + 1, obj_ins);
12589 // We need to guard on ok_ins, but this requires a snapshot of the state
12590 // after this op. monitorRecording will do it for us.
12591 pendingGuardCondition = ok_ins;
12593 // Note there is a boxed result sitting on the stack. The caller must leave
12594 // it there for the time being, since the return type is not yet
12595 // known. monitorRecording will emit the code to unbox it.
12596 pendingUnboxSlot = outp;
12599 static inline bool
12600 RootedStringToId(JSContext* cx, JSString** namep, jsid* idp)
12602 JSString* name = *namep;
12603 if (name->isAtomized()) {
12604 *idp = INTERNED_STRING_TO_JSID(name);
12605 return true;
12608 JSAtom* atom = js_AtomizeString(cx, name, 0);
12609 if (!atom)
12610 return false;
12611 *namep = ATOM_TO_STRING(atom); /* write back to GC root */
12612 *idp = ATOM_TO_JSID(atom);
12613 return true;
12616 static const size_t PIC_TABLE_ENTRY_COUNT = 32;
12618 struct PICTableEntry
12620 jsid id;
12621 uint32 shape;
12622 uint32 slot;
12625 struct PICTable
12627 PICTable() : entryCount(0) {}
12629 PICTableEntry entries[PIC_TABLE_ENTRY_COUNT];
12630 uint32 entryCount;
12632 bool scan(uint32 shape, jsid id, uint32 *slotOut) {
12633 for (size_t i = 0; i < entryCount; ++i) {
12634 PICTableEntry &entry = entries[i];
12635 if (entry.shape == shape && entry.id == id) {
12636 *slotOut = entry.slot;
12637 return true;
12640 return false;
12643 void update(uint32 shape, jsid id, uint32 slot) {
12644 if (entryCount >= PIC_TABLE_ENTRY_COUNT)
12645 return;
12646 PICTableEntry &newEntry = entries[entryCount++];
12647 newEntry.shape = shape;
12648 newEntry.id = id;
12649 newEntry.slot = slot;
12653 static JSBool FASTCALL
12654 GetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, Value* vp, PICTable *picTable)
12656 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12658 LeaveTraceIfGlobalObject(cx, obj);
12660 jsid id;
12661 if (!RootedStringToId(cx, namep, &id)) {
12662 SetBuiltinError(tm);
12663 return false;
12666 /* Delegate to the op, if present. */
12667 PropertyIdOp op = obj->getOps()->getProperty;
12668 if (op) {
12669 bool result = op(cx, obj, obj, id, vp);
12670 if (!result)
12671 SetBuiltinError(tm);
12672 return WasBuiltinSuccessful(tm);
12675 /* Try to hit in the cache. */
12676 uint32 slot;
12677 if (picTable->scan(obj->shape(), id, &slot)) {
12678 *vp = obj->getSlot(slot);
12679 return WasBuiltinSuccessful(tm);
12682 const Shape *shape;
12683 JSObject *holder;
12684 if (!js_GetPropertyHelperWithShape(cx, obj, obj, id, JSGET_METHOD_BARRIER, vp, &shape,
12685 &holder)) {
12686 SetBuiltinError(tm);
12687 return false;
12690 /* Only update the table when the object is the holder of the property. */
12691 if (obj == holder && shape->hasSlot() && shape->hasDefaultGetter()) {
12693 * Note: we insert the non-normalized id into the table so you don't need to
12694 * normalize it before hitting in the table (faster lookup).
12696 picTable->update(obj->shape(), id, shape->slot);
12699 return WasBuiltinSuccessful(tm);
12701 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, GetPropertyByName, CONTEXT, OBJECT, STRINGPTR, VALUEPTR,
12702 PICTABLE,
12703 0, ACCSET_STORE_ANY)
12705 // Convert the value in a slot to a string and store the resulting string back
12706 // in the slot (typically in order to root it).
12707 JS_REQUIRES_STACK RecordingStatus
12708 TraceRecorder::primitiveToStringInPlace(Value* vp)
12710 Value v = *vp;
12711 JS_ASSERT(v.isPrimitive());
12713 if (!v.isString()) {
12714 // v is not a string. Turn it into one. js_ValueToString is safe
12715 // because v is not an object.
12716 #ifdef DEBUG
12717 TraceMonitor *localtm = traceMonitor;
12718 #endif
12719 JSString *str = js_ValueToString(cx, v);
12720 JS_ASSERT(localtm->recorder == this);
12721 if (!str)
12722 RETURN_ERROR("failed to stringify element id");
12723 v.setString(str);
12724 set(vp, stringify(*vp));
12726 // Write the string back to the stack to save the interpreter some work
12727 // and to ensure snapshots get the correct type for this slot.
12728 *vp = v;
12730 return RECORD_CONTINUE;
12733 JS_REQUIRES_STACK RecordingStatus
12734 TraceRecorder::getPropertyByName(LIns* obj_ins, Value* idvalp, Value* outp)
12736 CHECK_STATUS(primitiveToStringInPlace(idvalp));
12737 enterDeepBailCall();
12739 // Call GetPropertyByName. The vp parameter points to stack because this is
12740 // what the interpreter currently does. obj and id are rooted on the
12741 // interpreter stack, but the slot at vp is not a root.
12742 LIns* vp_ins = w.name(w.allocp(sizeof(Value)), "vp");
12743 LIns* idvalp_ins = w.name(addr(idvalp), "idvalp");
12744 PICTable *picTable = new (traceAlloc()) PICTable();
12745 LIns* pic_ins = w.nameImmpNonGC(picTable);
12746 LIns* args[] = {pic_ins, vp_ins, idvalp_ins, obj_ins, cx_ins};
12747 LIns* ok_ins = w.call(&GetPropertyByName_ci, args);
12749 // GetPropertyByName can assign to *idvalp, so the tracker has an incorrect
12750 // entry for that address. Correct it. (If the value in the address is
12751 // never used again, the usual case, Nanojit will kill this load.)
12752 // The Address could be made more precise with some effort (idvalp_ins may
12753 // be a stack location), but it's not worth it because this case is rare.
12754 tracker.set(idvalp, w.ldp(AnyAddress(idvalp_ins)));
12756 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
12757 leaveDeepBailCall();
12758 return RECORD_CONTINUE;
12761 static JSBool FASTCALL
12762 GetPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, Value* vp)
12764 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12766 LeaveTraceIfGlobalObject(cx, obj);
12768 AutoIdRooter idr(cx);
12769 if (!js_Int32ToId(cx, index, idr.addr()) || !obj->getProperty(cx, idr.id(), vp)) {
12770 SetBuiltinError(tm);
12771 return JS_FALSE;
12773 return WasBuiltinSuccessful(tm);
12775 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyByIndex, CONTEXT, OBJECT, INT32, VALUEPTR, 0,
12776 ACCSET_STORE_ANY)
12778 JS_REQUIRES_STACK RecordingStatus
12779 TraceRecorder::getPropertyByIndex(LIns* obj_ins, LIns* index_ins, Value* outp)
12781 CHECK_STATUS(makeNumberInt32(index_ins, &index_ins));
12783 // See note in getPropertyByName about vp.
12784 enterDeepBailCall();
12785 LIns* vp_ins = w.name(w.allocp(sizeof(Value)), "vp");
12786 LIns* args[] = {vp_ins, index_ins, obj_ins, cx_ins};
12787 LIns* ok_ins = w.call(&GetPropertyByIndex_ci, args);
12788 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
12789 leaveDeepBailCall();
12790 return RECORD_CONTINUE;
12793 static JSBool FASTCALL
12794 GetPropertyById(JSContext* cx, JSObject* obj, jsid id, Value* vp)
12796 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12798 LeaveTraceIfGlobalObject(cx, obj);
12799 if (!obj->getProperty(cx, id, vp)) {
12800 SetBuiltinError(tm);
12801 return JS_FALSE;
12803 return WasBuiltinSuccessful(tm);
12805 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyById, CONTEXT, OBJECT, JSID, VALUEPTR,
12806 0, ACCSET_STORE_ANY)
12808 JS_REQUIRES_STACK RecordingStatus
12809 TraceRecorder::getPropertyById(LIns* obj_ins, Value* outp)
12811 // Find the atom.
12812 JSAtom* atom;
12813 jsbytecode* pc = cx->regs->pc;
12814 const JSCodeSpec& cs = js_CodeSpec[*pc];
12815 if (*pc == JSOP_LENGTH) {
12816 atom = cx->runtime->atomState.lengthAtom;
12817 } else if (JOF_TYPE(cs.format) == JOF_ATOM) {
12818 atom = atoms[GET_INDEX(pc)];
12819 } else {
12820 JS_ASSERT(JOF_TYPE(cs.format) == JOF_SLOTATOM);
12821 atom = atoms[GET_INDEX(pc + SLOTNO_LEN)];
12824 JS_STATIC_ASSERT(sizeof(jsid) == sizeof(void *));
12825 jsid id = ATOM_TO_JSID(atom);
12827 // Call GetPropertyById. See note in getPropertyByName about vp.
12828 enterDeepBailCall();
12829 LIns* vp_ins = w.name(w.allocp(sizeof(Value)), "vp");
12830 LIns* args[] = {vp_ins, w.nameImmw(JSID_BITS(id)), obj_ins, cx_ins};
12831 LIns* ok_ins = w.call(&GetPropertyById_ci, args);
12832 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
12833 leaveDeepBailCall();
12834 return RECORD_CONTINUE;
12837 /* Manually inlined, specialized copy of js_NativeGet. */
12838 static JSBool FASTCALL
12839 GetPropertyWithNativeGetter(JSContext* cx, JSObject* obj, Shape* shape, Value* vp)
12841 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12843 LeaveTraceIfGlobalObject(cx, obj);
12845 #ifdef DEBUG
12846 JSProperty* prop;
12847 JSObject* pobj;
12848 JS_ASSERT(obj->lookupProperty(cx, shape->id, &pobj, &prop));
12849 JS_ASSERT(prop == (JSProperty*) shape);
12850 #endif
12852 // Shape::get contains a special case for With objects. We can elide it
12853 // here because With objects are, we claim, never on the operand stack
12854 // while recording.
12855 JS_ASSERT(obj->getClass() != &js_WithClass);
12857 vp->setUndefined();
12858 if (!shape->getterOp()(cx, obj, SHAPE_USERID(shape), vp)) {
12859 SetBuiltinError(tm);
12860 return JS_FALSE;
12862 return WasBuiltinSuccessful(tm);
12864 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyWithNativeGetter,
12865 CONTEXT, OBJECT, SHAPE, VALUEPTR, 0, ACCSET_STORE_ANY)
12867 JS_REQUIRES_STACK RecordingStatus
12868 TraceRecorder::getPropertyWithNativeGetter(LIns* obj_ins, const Shape* shape, Value* outp)
12870 JS_ASSERT(!shape->hasGetterValue());
12871 JS_ASSERT(shape->slot == SHAPE_INVALID_SLOT);
12872 JS_ASSERT(!shape->hasDefaultGetterOrIsMethod());
12874 // Call GetPropertyWithNativeGetter. See note in getPropertyByName about vp.
12875 // FIXME - We should call the getter directly. Using a builtin function for
12876 // now because it buys some extra asserts. See bug 508310.
12877 enterDeepBailCall();
12878 LIns* vp_ins = w.name(w.allocp(sizeof(Value)), "vp");
12879 LIns* args[] = {vp_ins, w.nameImmpNonGC(shape), obj_ins, cx_ins};
12880 LIns* ok_ins = w.call(&GetPropertyWithNativeGetter_ci, args);
12881 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
12882 leaveDeepBailCall();
12883 return RECORD_CONTINUE;
12886 JS_REQUIRES_STACK RecordingStatus
12887 TraceRecorder::getPropertyWithScriptGetter(JSObject *obj, LIns* obj_ins, const Shape* shape)
12889 if (!canCallImacro())
12890 RETURN_STOP("cannot trace script getter, already in imacro");
12892 // Rearrange the stack in preparation for the imacro, taking care to adjust
12893 // the interpreter state and the tracker in the same way. This adjustment
12894 // is noted in imacros.jsasm with .fixup tags.
12895 Value getter = shape->getterValue();
12896 Value*& sp = cx->regs->sp;
12897 switch (*cx->regs->pc) {
12898 case JSOP_GETPROP:
12899 sp++;
12900 sp[-1] = sp[-2];
12901 set(&sp[-1], get(&sp[-2]));
12902 sp[-2] = getter;
12903 set(&sp[-2], w.immpObjGC(&getter.toObject()));
12904 return callImacroInfallibly(getprop_imacros.scriptgetter);
12906 case JSOP_CALLPROP:
12907 sp += 2;
12908 sp[-2] = getter;
12909 set(&sp[-2], w.immpObjGC(&getter.toObject()));
12910 sp[-1] = sp[-3];
12911 set(&sp[-1], get(&sp[-3]));
12912 return callImacroInfallibly(callprop_imacros.scriptgetter);
12914 case JSOP_GETTHISPROP:
12915 case JSOP_GETARGPROP:
12916 case JSOP_GETLOCALPROP:
12917 sp += 2;
12918 sp[-2] = getter;
12919 set(&sp[-2], w.immpObjGC(&getter.toObject()));
12920 sp[-1] = ObjectValue(*obj);
12921 set(&sp[-1], obj_ins);
12922 return callImacroInfallibly(getthisprop_imacros.scriptgetter);
12924 default:
12925 RETURN_STOP("cannot trace script getter for this opcode");
12929 JS_REQUIRES_STACK RecordingStatus
12930 TraceRecorder::getCharCodeAt(JSString *str, LIns* str_ins, LIns* idx_ins, LIns** out)
12932 CHECK_STATUS(makeNumberInt32(idx_ins, &idx_ins));
12933 idx_ins = w.ui2p(idx_ins);
12934 LIns *lengthAndFlags_ins = w.ldpStringLengthAndFlags(str_ins);
12935 if (MaybeBranch mbr = w.jt(w.eqp0(w.andp(lengthAndFlags_ins, w.nameImmw(JSString::ROPE_BIT)))))
12937 LIns *args[] = { str_ins, cx_ins };
12938 LIns *ok_ins = w.call(&js_Flatten_ci, args);
12939 guard(false, w.eqi0(ok_ins), OOM_EXIT);
12940 w.label(mbr);
12943 guard(true,
12944 w.ltup(idx_ins, w.rshupN(lengthAndFlags_ins, JSString::LENGTH_SHIFT)),
12945 snapshot(MISMATCH_EXIT));
12946 *out = w.i2d(w.getStringChar(str_ins, idx_ins));
12947 return RECORD_CONTINUE;
12950 JS_STATIC_ASSERT(sizeof(JSString) == 16 || sizeof(JSString) == 32);
12953 JS_REQUIRES_STACK LIns*
12954 TraceRecorder::getUnitString(LIns* str_ins, LIns* idx_ins)
12956 LIns *ch_ins = w.getStringChar(str_ins, idx_ins);
12957 guard(true, w.ltuiN(ch_ins, UNIT_STRING_LIMIT), MISMATCH_EXIT);
12958 return w.addp(w.nameImmpNonGC(JSString::unitStringTable),
12959 w.lshpN(w.ui2p(ch_ins), (sizeof(JSString) == 16) ? 4 : 5));
12962 JS_REQUIRES_STACK RecordingStatus
12963 TraceRecorder::getCharAt(JSString *str, LIns* str_ins, LIns* idx_ins, JSOp mode, LIns** out)
12965 CHECK_STATUS(makeNumberInt32(idx_ins, &idx_ins));
12966 idx_ins = w.ui2p(idx_ins);
12967 LIns *lengthAndFlags_ins = w.ldpStringLengthAndFlags(str_ins);
12968 if (MaybeBranch mbr = w.jt(w.eqp0(w.andp(lengthAndFlags_ins,
12969 w.nameImmw(JSString::ROPE_BIT)))))
12971 LIns *args[] = { str_ins, cx_ins };
12972 LIns *ok_ins = w.call(&js_Flatten_ci, args);
12973 guard(false, w.eqi0(ok_ins), OOM_EXIT);
12974 w.label(mbr);
12977 LIns* inRange = w.ltup(idx_ins, w.rshupN(lengthAndFlags_ins, JSString::LENGTH_SHIFT));
12979 if (mode == JSOP_GETELEM) {
12980 guard(true, inRange, MISMATCH_EXIT);
12982 *out = getUnitString(str_ins, idx_ins);
12983 } else {
12984 LIns *phi_ins = w.allocp(sizeof(JSString *));
12985 w.stAlloc(w.nameImmpNonGC(cx->runtime->emptyString), phi_ins);
12987 if (MaybeBranch mbr = w.jf(inRange)) {
12988 LIns *unitstr_ins = getUnitString(str_ins, idx_ins);
12989 w.stAlloc(unitstr_ins, phi_ins);
12990 w.label(mbr);
12992 *out = w.ldpAlloc(phi_ins);
12994 return RECORD_CONTINUE;
12997 // Typed array tracing depends on EXPANDED_LOADSTORE and F2I
12998 #if NJ_EXPANDED_LOADSTORE_SUPPORTED && NJ_F2I_SUPPORTED
12999 static bool OkToTraceTypedArrays = true;
13000 #else
13001 static bool OkToTraceTypedArrays = false;
13002 #endif
13004 JS_REQUIRES_STACK void
13005 TraceRecorder::guardNotHole(LIns *argsobj_ins, LIns *idx_ins)
13007 // vp = &argsobj->slots[JSSLOT_ARGS_DATA].slots[idx]
13008 LIns* argsData_ins = w.getObjPrivatizedSlot(argsobj_ins, JSObject::JSSLOT_ARGS_DATA);
13009 LIns* slotOffset_ins = w.addp(w.nameImmw(offsetof(ArgumentsData, slots)),
13010 w.ui2p(w.muliN(idx_ins, sizeof(Value))));
13011 LIns* vp_ins = w.addp(argsData_ins, slotOffset_ins);
13013 guard(false,
13014 w.name(is_boxed_magic(ArgsSlotOffsetAddress(vp_ins), JS_ARGS_HOLE),
13015 "guard(not deleted arg)"),
13016 MISMATCH_EXIT);
13019 JS_REQUIRES_STACK AbortableRecordingStatus
13020 TraceRecorder::record_JSOP_GETELEM()
13022 bool call = *cx->regs->pc == JSOP_CALLELEM;
13024 Value& idx = stackval(-1);
13025 Value& lval = stackval(-2);
13027 LIns* obj_ins = get(&lval);
13028 LIns* idx_ins = get(&idx);
13030 // Special case for array-like access of strings.
13031 if (lval.isString() && hasInt32Repr(idx)) {
13032 if (call)
13033 RETURN_STOP_A("JSOP_CALLELEM on a string");
13034 int i = asInt32(idx);
13035 if (size_t(i) >= lval.toString()->length())
13036 RETURN_STOP_A("Invalid string index in JSOP_GETELEM");
13037 LIns* char_ins;
13038 CHECK_STATUS_A(getCharAt(lval.toString(), obj_ins, idx_ins, JSOP_GETELEM, &char_ins));
13039 set(&lval, char_ins);
13040 return ARECORD_CONTINUE;
13043 if (lval.isPrimitive())
13044 RETURN_STOP_A("JSOP_GETLEM on a primitive");
13045 RETURN_IF_XML_A(lval);
13047 JSObject* obj = &lval.toObject();
13048 if (obj == globalObj)
13049 RETURN_STOP_A("JSOP_GETELEM on global");
13050 LIns* v_ins;
13052 /* Property access using a string name or something we have to stringify. */
13053 if (!idx.isInt32()) {
13054 if (!idx.isPrimitive())
13055 RETURN_STOP_A("object used as index");
13057 return InjectStatus(getPropertyByName(obj_ins, &idx, &lval));
13060 if (obj->isArguments()) {
13061 // Don't even try to record if out of range or reading a deleted arg
13062 int32 int_idx = idx.toInt32();
13063 if (int_idx < 0 || int_idx >= (int32)obj->getArgsInitialLength())
13064 RETURN_STOP_A("cannot trace arguments with out of range index");
13065 if (obj->getArgsElement(int_idx).isMagic(JS_ARGS_HOLE))
13066 RETURN_STOP_A("reading deleted args element");
13068 // Only trace reading arguments out of active, tracked frame
13069 unsigned depth;
13070 JSStackFrame *afp = guardArguments(obj, obj_ins, &depth);
13071 if (afp) {
13072 Value* vp = &afp->canonicalActualArg(int_idx);
13073 if (idx_ins->isImmD()) {
13074 JS_ASSERT(int_idx == (int32)idx_ins->immD());
13075 guardNotHole(obj_ins, w.nameImmi(int_idx));
13076 v_ins = get(vp);
13077 } else {
13078 // If the index is not a constant expression, we generate LIR to load the value from
13079 // the native stack area. The guard on js_ArgumentClass above ensures the up-to-date
13080 // value has been written back to the native stack area.
13081 CHECK_STATUS_A(makeNumberInt32(idx_ins, &idx_ins));
13084 * For small nactual,
13085 * 0 <= int_idx < nactual iff unsigned(int_idx) < unsigned(nactual).
13087 guard(true,
13088 w.name(w.ltui(idx_ins, w.nameImmui(afp->numActualArgs())),
13089 "guard(upvar index in range)"),
13090 MISMATCH_EXIT);
13092 guardNotHole(obj_ins, idx_ins);
13094 JSValueType type = getCoercedType(*vp);
13096 // Guard that the argument has the same type on trace as during recording.
13097 LIns* typemap_ins;
13098 if (depth == 0) {
13099 // In this case, we are in the same frame where the arguments object was created.
13100 // The entry type map is not necessarily up-to-date, so we capture a new type map
13101 // for this point in the code.
13102 unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */);
13103 JSValueType* typemap = new (traceAlloc()) JSValueType[stackSlots];
13104 DetermineTypesVisitor detVisitor(*this, typemap);
13105 VisitStackSlots(detVisitor, cx, 0);
13106 typemap_ins = w.nameImmpNonGC(typemap + 2 /* callee, this */);
13107 } else {
13108 // In this case, we are in a deeper frame from where the arguments object was
13109 // created. The type map at the point of the call out from the creation frame
13110 // is accurate.
13111 // Note: this relies on the assumption that we abort on setting an element of
13112 // an arguments object in any deeper frame.
13113 LIns* fip_ins = w.ldpRstack(lirbuf->rp, (callDepth-depth)*sizeof(FrameInfo*));
13114 typemap_ins = w.addp(fip_ins, w.nameImmw(sizeof(FrameInfo) + 2/*callee,this*/ * sizeof(JSValueType)));
13117 LIns* type_ins = w.lduc2uiConstTypeMapEntry(typemap_ins, idx_ins);
13118 guard(true,
13119 w.name(w.eqi(type_ins, w.immi(type)), "guard(type-stable upvar)"),
13120 BRANCH_EXIT);
13122 // Read the value out of the native stack area.
13123 size_t stackOffset = nativespOffset(&afp->canonicalActualArg(0));
13124 LIns* args_addr_ins = w.addp(lirbuf->sp, w.nameImmw(stackOffset));
13125 LIns* argi_addr_ins = w.addp(args_addr_ins,
13126 w.ui2p(w.muli(idx_ins, w.nameImmi(sizeof(double)))));
13128 // The Address could be more precise, but ValidateWriter
13129 // doesn't recognise the complex expression involving 'sp' as
13130 // an stack access, and it's not worth the effort to be
13131 // more precise because this case is rare.
13132 v_ins = stackLoad(AnyAddress(argi_addr_ins), type);
13134 JS_ASSERT(v_ins);
13135 set(&lval, v_ins);
13136 if (call)
13137 set(&idx, obj_ins);
13138 return ARECORD_CONTINUE;
13140 RETURN_STOP_A("can't reach arguments object's frame");
13143 if (obj->isDenseArray()) {
13144 // Fast path for dense arrays accessed with a integer index.
13145 Value* vp;
13146 LIns* addr_ins;
13148 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
13149 guardDenseArray(obj_ins, branchExit);
13150 CHECK_STATUS_A(denseArrayElement(lval, idx, vp, v_ins, addr_ins, branchExit));
13151 set(&lval, v_ins);
13152 if (call)
13153 set(&idx, obj_ins);
13154 return ARECORD_CONTINUE;
13157 if (OkToTraceTypedArrays && js_IsTypedArray(obj)) {
13158 // Fast path for typed arrays accessed with a integer index.
13159 Value* vp;
13160 guardClass(obj_ins, obj->getClass(), snapshot(BRANCH_EXIT), LOAD_CONST);
13161 CHECK_STATUS_A(typedArrayElement(lval, idx, vp, v_ins));
13162 set(&lval, v_ins);
13163 if (call)
13164 set(&idx, obj_ins);
13165 return ARECORD_CONTINUE;
13168 return InjectStatus(getPropertyByIndex(obj_ins, idx_ins, &lval));
13171 /* Functions used by JSOP_SETELEM */
13173 static JSBool FASTCALL
13174 SetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, Value* vp, JSBool strict)
13176 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
13178 LeaveTraceIfGlobalObject(cx, obj);
13180 jsid id;
13181 if (!RootedStringToId(cx, namep, &id) || !obj->setProperty(cx, id, vp, strict)) {
13182 SetBuiltinError(tm);
13183 return false;
13185 return WasBuiltinSuccessful(tm);
13187 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, SetPropertyByName,
13188 CONTEXT, OBJECT, STRINGPTR, VALUEPTR, BOOL,
13189 0, ACCSET_STORE_ANY)
13191 static JSBool FASTCALL
13192 InitPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, ValueArgType arg)
13194 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
13196 LeaveTraceIfGlobalObject(cx, obj);
13198 jsid id;
13199 if (!RootedStringToId(cx, namep, &id) ||
13200 !obj->defineProperty(cx, id, ValueArgToConstRef(arg), NULL, NULL, JSPROP_ENUMERATE)) {
13201 SetBuiltinError(tm);
13202 return JS_FALSE;
13204 return WasBuiltinSuccessful(tm);
13206 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, InitPropertyByName, CONTEXT, OBJECT, STRINGPTR, VALUE,
13207 0, ACCSET_STORE_ANY)
13209 JS_REQUIRES_STACK RecordingStatus
13210 TraceRecorder::initOrSetPropertyByName(LIns* obj_ins, Value* idvalp, Value* rvalp, bool init)
13212 CHECK_STATUS(primitiveToStringInPlace(idvalp));
13214 if (init) {
13215 LIns* v_ins = box_value_for_native_call(*rvalp, get(rvalp));
13216 enterDeepBailCall();
13217 LIns* idvalp_ins = w.name(addr(idvalp), "idvalp");
13218 LIns* args[] = {v_ins, idvalp_ins, obj_ins, cx_ins};
13219 pendingGuardCondition = w.call(&InitPropertyByName_ci, args);
13220 } else {
13221 // See note in getPropertyByName about vp.
13222 LIns* vp_ins = box_value_into_alloc(*rvalp, get(rvalp));
13223 enterDeepBailCall();
13224 LIns* idvalp_ins = w.name(addr(idvalp), "idvalp");
13225 LIns* args[] = { strictModeCode_ins, vp_ins, idvalp_ins, obj_ins, cx_ins };
13226 pendingGuardCondition = w.call(&SetPropertyByName_ci, args);
13229 leaveDeepBailCall();
13230 return RECORD_CONTINUE;
13233 static JSBool FASTCALL
13234 SetPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, Value* vp, JSBool strict)
13236 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
13238 LeaveTraceIfGlobalObject(cx, obj);
13240 AutoIdRooter idr(cx);
13241 if (!js_Int32ToId(cx, index, idr.addr()) || !obj->setProperty(cx, idr.id(), vp, strict)) {
13242 SetBuiltinError(tm);
13243 return false;
13245 return WasBuiltinSuccessful(tm);
13247 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, SetPropertyByIndex, CONTEXT, OBJECT, INT32, VALUEPTR, BOOL,
13248 0, ACCSET_STORE_ANY)
13250 static JSBool FASTCALL
13251 InitPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, ValueArgType arg)
13253 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
13255 LeaveTraceIfGlobalObject(cx, obj);
13257 AutoIdRooter idr(cx);
13258 if (!js_Int32ToId(cx, index, idr.addr()) ||
13259 !obj->defineProperty(cx, idr.id(), ValueArgToConstRef(arg), NULL, NULL, JSPROP_ENUMERATE)) {
13260 SetBuiltinError(tm);
13261 return JS_FALSE;
13263 return WasBuiltinSuccessful(tm);
13265 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, InitPropertyByIndex, CONTEXT, OBJECT, INT32, VALUE,
13266 0, ACCSET_STORE_ANY)
13268 JS_REQUIRES_STACK RecordingStatus
13269 TraceRecorder::initOrSetPropertyByIndex(LIns* obj_ins, LIns* index_ins, Value* rvalp, bool init)
13271 CHECK_STATUS(makeNumberInt32(index_ins, &index_ins));
13273 if (init) {
13274 LIns* rval_ins = box_value_for_native_call(*rvalp, get(rvalp));
13275 enterDeepBailCall();
13276 LIns* args[] = {rval_ins, index_ins, obj_ins, cx_ins};
13277 pendingGuardCondition = w.call(&InitPropertyByIndex_ci, args);
13278 } else {
13279 // See note in getPropertyByName about vp.
13280 LIns* vp_ins = box_value_into_alloc(*rvalp, get(rvalp));
13281 enterDeepBailCall();
13282 LIns* args[] = {strictModeCode_ins, vp_ins, index_ins, obj_ins, cx_ins};
13283 pendingGuardCondition = w.call(&SetPropertyByIndex_ci, args);
13286 leaveDeepBailCall();
13287 return RECORD_CONTINUE;
13290 JS_REQUIRES_STACK AbortableRecordingStatus
13291 TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex)
13293 Value& v = stackval(v_spindex);
13294 Value& idx = stackval(idx_spindex);
13295 Value& lval = stackval(lval_spindex);
13297 if (lval.isPrimitive())
13298 RETURN_STOP_A("left JSOP_SETELEM operand is not an object");
13299 RETURN_IF_XML_A(lval);
13301 JSObject* obj = &lval.toObject();
13302 LIns* obj_ins = get(&lval);
13303 LIns* idx_ins = get(&idx);
13304 LIns* v_ins = get(&v);
13306 if (obj->isArguments())
13307 RETURN_STOP_A("can't trace setting elements of the |arguments| object");
13309 if (obj == globalObj)
13310 RETURN_STOP_A("can't trace setting elements on the global object");
13312 if (!idx.isInt32()) {
13313 if (!idx.isPrimitive())
13314 RETURN_STOP_A("non-primitive index");
13315 CHECK_STATUS_A(initOrSetPropertyByName(obj_ins, &idx, &v,
13316 *cx->regs->pc == JSOP_INITELEM));
13317 } else if (OkToTraceTypedArrays && js_IsTypedArray(obj)) {
13318 // Fast path: assigning to element of typed array.
13319 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
13321 // Ensure array is a typed array and is the same type as what was written
13322 guardClass(obj_ins, obj->getClass(), branchExit, LOAD_CONST);
13324 js::TypedArray* tarray = js::TypedArray::fromJSObject(obj);
13326 LIns* priv_ins = w.ldpObjPrivate(obj_ins);
13328 // The index was on the stack and is therefore a LIR float; force it to
13329 // be an integer.
13330 CHECK_STATUS_A(makeNumberInt32(idx_ins, &idx_ins));
13332 // Ensure idx >= 0 && idx < length (by using uint32)
13333 CHECK_STATUS_A(guard(true,
13334 w.name(w.ltui(idx_ins, w.ldiConstTypedArrayLength(priv_ins)),
13335 "inRange"),
13336 OVERFLOW_EXIT, /* abortIfAlwaysExits = */true));
13338 // We're now ready to store
13339 LIns* data_ins = w.ldpConstTypedArrayData(priv_ins);
13340 LIns* pidx_ins = w.ui2p(idx_ins);
13341 LIns* typed_v_ins = v_ins;
13343 // If it's not a number, convert objects to NaN,
13344 // null to 0, and call StringToNumber or BooleanOrUndefinedToNumber
13345 // for those.
13346 if (!v.isNumber()) {
13347 if (v.isNull()) {
13348 typed_v_ins = w.immd(0);
13349 } else if (v.isUndefined()) {
13350 typed_v_ins = w.immd(js_NaN);
13351 } else if (v.isString()) {
13352 LIns* ok_ins = w.allocp(sizeof(JSBool));
13353 LIns* args[] = { ok_ins, typed_v_ins, cx_ins };
13354 typed_v_ins = w.call(&js_StringToNumber_ci, args);
13355 guard(false,
13356 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
13357 OOM_EXIT);
13358 } else if (v.isBoolean()) {
13359 JS_ASSERT(v.isBoolean());
13360 typed_v_ins = w.i2d(typed_v_ins);
13361 } else {
13362 typed_v_ins = w.immd(js_NaN);
13366 switch (tarray->type) {
13367 case js::TypedArray::TYPE_INT8:
13368 case js::TypedArray::TYPE_INT16:
13369 case js::TypedArray::TYPE_INT32:
13370 typed_v_ins = d2i(typed_v_ins);
13371 break;
13372 case js::TypedArray::TYPE_UINT8:
13373 case js::TypedArray::TYPE_UINT16:
13374 case js::TypedArray::TYPE_UINT32:
13375 typed_v_ins = d2u(typed_v_ins);
13376 break;
13377 case js::TypedArray::TYPE_UINT8_CLAMPED:
13378 if (IsPromotedInt32(typed_v_ins)) {
13379 typed_v_ins = w.demoteToInt32(typed_v_ins);
13380 typed_v_ins = w.cmovi(w.ltiN(typed_v_ins, 0),
13381 w.immi(0),
13382 w.cmovi(w.gtiN(typed_v_ins, 0xff),
13383 w.immi(0xff),
13384 typed_v_ins));
13385 } else {
13386 typed_v_ins = w.call(&js_TypedArray_uint8_clamp_double_ci, &typed_v_ins);
13388 break;
13389 case js::TypedArray::TYPE_FLOAT32:
13390 case js::TypedArray::TYPE_FLOAT64:
13391 // Do nothing, this is already a float
13392 break;
13393 default:
13394 JS_NOT_REACHED("Unknown typed array type in tracer");
13397 switch (tarray->type) {
13398 case js::TypedArray::TYPE_INT8:
13399 case js::TypedArray::TYPE_UINT8_CLAMPED:
13400 case js::TypedArray::TYPE_UINT8:
13401 w.sti2cTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13402 break;
13403 case js::TypedArray::TYPE_INT16:
13404 case js::TypedArray::TYPE_UINT16:
13405 w.sti2sTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13406 break;
13407 case js::TypedArray::TYPE_INT32:
13408 case js::TypedArray::TYPE_UINT32:
13409 w.stiTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13410 break;
13411 case js::TypedArray::TYPE_FLOAT32:
13412 w.std2fTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13413 break;
13414 case js::TypedArray::TYPE_FLOAT64:
13415 w.stdTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13416 break;
13417 default:
13418 JS_NOT_REACHED("Unknown typed array type in tracer");
13420 } else if (idx.toInt32() < 0 || !obj->isDenseArray()) {
13421 CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins, idx_ins, &v,
13422 *cx->regs->pc == JSOP_INITELEM));
13423 } else {
13424 // Fast path: assigning to element of dense array.
13425 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
13426 VMSideExit* mismatchExit = snapshot(MISMATCH_EXIT);
13428 // Make sure the array is actually dense.
13429 if (!obj->isDenseArray())
13430 return ARECORD_STOP;
13431 guardDenseArray(obj_ins, branchExit);
13433 // The index was on the stack and is therefore a LIR float. Force it to
13434 // be an integer.
13435 CHECK_STATUS_A(makeNumberInt32(idx_ins, &idx_ins));
13437 if (!js_EnsureDenseArrayCapacity(cx, obj, idx.toInt32()))
13438 RETURN_STOP_A("couldn't ensure dense array capacity for setelem");
13440 // Grow the array if the index exceeds the capacity. This happens
13441 // rarely, eg. less than 1% of the time in SunSpider.
13442 LIns* capacity_ins = w.ldiDenseArrayCapacity(obj_ins);
13444 * It's important that CSE works across this control-flow diamond
13445 * because it really helps series of interleaved GETELEM and SETELEM
13446 * operations. Likewise with the diamond below.
13448 w.pauseAddingCSEValues();
13449 if (MaybeBranch mbr = w.jt(w.name(w.ltui(idx_ins, capacity_ins), "inRange"))) {
13450 LIns* args[] = { idx_ins, obj_ins, cx_ins };
13451 LIns* res_ins = w.call(&js_EnsureDenseArrayCapacity_ci, args);
13452 guard(false, w.eqi0(res_ins), mismatchExit);
13453 w.label(mbr);
13455 w.resumeAddingCSEValues();
13457 // Get the address of the element.
13458 LIns *elemp_ins = w.name(w.getDslotAddress(obj_ins, idx_ins), "elemp");
13460 // If we are overwriting a hole:
13461 // - Guard that we don't have any indexed properties along the prototype chain.
13462 // - Check if the length has changed; if so, update it to index+1.
13463 // This happens moderately often, eg. close to 10% of the time in
13464 // SunSpider, and for some benchmarks it's close to 100%.
13465 Address dslotAddr = DSlotsAddress(elemp_ins);
13466 LIns* isHole_ins = w.name(is_boxed_magic(dslotAddr, JS_ARRAY_HOLE),
13467 "isHole");
13468 w.pauseAddingCSEValues();
13469 if (MaybeBranch mbr1 = w.jf(isHole_ins)) {
13471 * It's important that this use branchExit, not mismatchExit, since
13472 * changes to shapes should just mean we compile a new branch, not
13473 * throw the whole trace away.
13475 CHECK_STATUS_A(guardPrototypeHasNoIndexedProperties(obj, obj_ins, branchExit));
13476 LIns* length_ins = w.lduiObjPrivate(obj_ins);
13477 if (MaybeBranch mbr2 = w.jt(w.ltui(idx_ins, length_ins))) {
13478 LIns* newLength_ins = w.name(w.addiN(idx_ins, 1), "newLength");
13479 w.stuiObjPrivate(obj_ins, newLength_ins);
13480 w.label(mbr2);
13482 w.label(mbr1);
13484 w.resumeAddingCSEValues();
13486 // Right, actually set the element.
13487 box_value_into(v, v_ins, dslotAddr);
13490 jsbytecode* pc = cx->regs->pc;
13491 if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
13492 set(&lval, v_ins);
13494 return ARECORD_CONTINUE;
13497 JS_REQUIRES_STACK AbortableRecordingStatus
13498 TraceRecorder::record_JSOP_SETELEM()
13500 return setElem(-3, -2, -1);
13503 JS_REQUIRES_STACK AbortableRecordingStatus
13504 TraceRecorder::record_JSOP_CALLNAME()
13506 JSObject* obj = &cx->fp()->scopeChain();
13507 if (obj != globalObj) {
13508 Value* vp;
13509 LIns* ins;
13510 NameResult nr;
13511 CHECK_STATUS_A(scopeChainProp(obj, vp, ins, nr));
13512 stack(0, ins);
13513 stack(1, w.immiUndefined());
13514 return ARECORD_CONTINUE;
13517 LIns* obj_ins = w.immpObjGC(globalObj);
13518 JSObject* obj2;
13519 PCVal pcval;
13521 CHECK_STATUS_A(test_property_cache(obj, obj_ins, obj2, pcval));
13523 if (pcval.isNull() || !pcval.isFunObj())
13524 RETURN_STOP_A("callee is not an object");
13526 stack(0, w.immpObjGC(&pcval.toFunObj()));
13527 stack(1, w.immiUndefined());
13528 return ARECORD_CONTINUE;
13531 JS_DEFINE_CALLINFO_5(extern, UINT32, GetUpvarArgOnTrace, CONTEXT, UINT32, INT32, UINT32,
13532 DOUBLEPTR, 0, ACCSET_STORE_ANY)
13533 JS_DEFINE_CALLINFO_5(extern, UINT32, GetUpvarVarOnTrace, CONTEXT, UINT32, INT32, UINT32,
13534 DOUBLEPTR, 0, ACCSET_STORE_ANY)
13535 JS_DEFINE_CALLINFO_5(extern, UINT32, GetUpvarStackOnTrace, CONTEXT, UINT32, INT32, UINT32,
13536 DOUBLEPTR, 0, ACCSET_STORE_ANY)
13539 * Record LIR to get the given upvar. Return the LIR instruction for the upvar
13540 * value. NULL is returned only on a can't-happen condition with an invalid
13541 * typemap. The value of the upvar is returned as v.
13543 JS_REQUIRES_STACK LIns*
13544 TraceRecorder::upvar(JSScript* script, JSUpvarArray* uva, uintN index, Value& v)
13547 * Try to find the upvar in the current trace's tracker. For &vr to be
13548 * the address of the jsval found in js::GetUpvar, we must initialize
13549 * vr directly with the result, so it is a reference to the same location.
13550 * It does not work to assign the result to v, because v is an already
13551 * existing reference that points to something else.
13553 UpvarCookie cookie = uva->vector[index];
13554 const Value& vr = GetUpvar(cx, script->staticLevel, cookie);
13555 v = vr;
13557 if (LIns* ins = attemptImport(&vr))
13558 return ins;
13561 * The upvar is not in the current trace, so get the upvar value exactly as
13562 * the interpreter does and unbox.
13564 uint32 level = script->staticLevel - cookie.level();
13565 uint32 cookieSlot = cookie.slot();
13566 JSStackFrame* fp = cx->findFrameAtLevel(level);
13567 const CallInfo* ci;
13568 int32 slot;
13569 if (!fp->isFunctionFrame() || fp->isEvalFrame()) {
13570 ci = &GetUpvarStackOnTrace_ci;
13571 slot = cookieSlot;
13572 } else if (cookieSlot < fp->numFormalArgs()) {
13573 ci = &GetUpvarArgOnTrace_ci;
13574 slot = cookieSlot;
13575 } else if (cookieSlot == UpvarCookie::CALLEE_SLOT) {
13576 ci = &GetUpvarArgOnTrace_ci;
13577 slot = -2;
13578 } else {
13579 ci = &GetUpvarVarOnTrace_ci;
13580 slot = cookieSlot - fp->numFormalArgs();
13583 LIns* outp = w.allocp(sizeof(double));
13584 LIns* args[] = {
13585 outp,
13586 w.nameImmi(callDepth),
13587 w.nameImmi(slot),
13588 w.nameImmi(level),
13589 cx_ins
13591 LIns* call_ins = w.call(ci, args);
13592 JSValueType type = getCoercedType(v);
13593 guard(true,
13594 w.name(w.eqi(call_ins, w.immi(type)), "guard(type-stable upvar)"),
13595 BRANCH_EXIT);
13596 return stackLoad(AllocSlotsAddress(outp), type);
13600 * Generate LIR to load a value from the native stack. This method ensures that
13601 * the correct LIR load operator is used.
13603 LIns*
13604 TraceRecorder::stackLoad(Address addr, uint8 type)
13606 switch (type) {
13607 case JSVAL_TYPE_DOUBLE:
13608 return w.ldd(addr);
13609 case JSVAL_TYPE_NONFUNOBJ:
13610 case JSVAL_TYPE_STRING:
13611 case JSVAL_TYPE_FUNOBJ:
13612 case JSVAL_TYPE_NULL:
13613 return w.ldp(addr);
13614 case JSVAL_TYPE_INT32:
13615 return w.i2d(w.ldi(addr));
13616 case JSVAL_TYPE_BOOLEAN:
13617 case JSVAL_TYPE_UNDEFINED:
13618 case JSVAL_TYPE_MAGIC:
13619 return w.ldi(addr);
13620 case JSVAL_TYPE_BOXED:
13621 default:
13622 JS_NOT_REACHED("found jsval type in an upvar type map entry");
13623 return NULL;
13627 JS_REQUIRES_STACK AbortableRecordingStatus
13628 TraceRecorder::record_JSOP_GETFCSLOT()
13630 JSObject& callee = cx->fp()->callee();
13631 LIns* callee_ins = get(&cx->fp()->calleeValue());
13633 LIns* upvars_ins = w.getObjPrivatizedSlot(callee_ins, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS);
13635 unsigned index = GET_UINT16(cx->regs->pc);
13636 LIns *v_ins = unbox_value(callee.getFlatClosureUpvar(index),
13637 FCSlotsAddress(upvars_ins, index),
13638 snapshot(BRANCH_EXIT));
13639 stack(0, v_ins);
13640 return ARECORD_CONTINUE;
13643 JS_REQUIRES_STACK AbortableRecordingStatus
13644 TraceRecorder::record_JSOP_CALLFCSLOT()
13646 CHECK_STATUS_A(record_JSOP_GETFCSLOT());
13647 stack(1, w.immiUndefined());
13648 return ARECORD_CONTINUE;
13651 JS_REQUIRES_STACK RecordingStatus
13652 TraceRecorder::guardCallee(Value& callee)
13654 JSObject& callee_obj = callee.toObject();
13655 JS_ASSERT(callee_obj.isFunction());
13656 JSFunction* callee_fun = (JSFunction*) callee_obj.getPrivate();
13659 * First, guard on the callee's function (JSFunction*) identity. This is
13660 * necessary since tracing always inlines function calls. But note that
13661 * TR::functionCall avoids calling TR::guardCallee for constant methods
13662 * (those hit in the property cache from JSOP_CALLPROP).
13664 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
13665 LIns* callee_ins = get(&callee);
13666 tree->gcthings.addUnique(callee);
13668 guard(true,
13669 w.eqp(w.ldpObjPrivate(callee_ins), w.nameImmpNonGC(callee_fun)),
13670 branchExit);
13673 * Second, consider guarding on the parent scope of the callee.
13675 * As long as we guard on parent scope, we are guaranteed when recording
13676 * variable accesses for a Call object having no private data that we can
13677 * emit code that avoids checking for an active JSStackFrame for the Call
13678 * object (which would hold fresh variable values -- the Call object's
13679 * slots would be stale until the stack frame is popped). This is because
13680 * Call objects can't pick up a new stack frame in their private slot once
13681 * they have none. TR::callProp and TR::setCallProp depend on this fact and
13682 * document where; if this guard is removed make sure to fix those methods.
13683 * Search for the "parent guard" comments in them.
13685 * In general, a loop in an escaping function scoped by Call objects could
13686 * be traced before the function has returned, and the trace then triggered
13687 * after, or vice versa. The function must escape, i.e., be a "funarg", or
13688 * else there's no need to guard callee parent at all. So once we know (by
13689 * static analysis) that a function may escape, we cannot avoid guarding on
13690 * either the private data of the Call object or the Call object itself, if
13691 * we wish to optimize for the particular deactivated stack frame (null
13692 * private data) case as noted above.
13694 if (callee_fun->isInterpreted() &&
13695 (!FUN_NULL_CLOSURE(callee_fun) || callee_fun->script()->bindings.hasUpvars())) {
13696 JSObject* parent = callee_obj.getParent();
13698 if (parent != globalObj) {
13699 if (!parent->isCall())
13700 RETURN_STOP("closure scoped by neither the global object nor a Call object");
13702 guard(true,
13703 w.eqp(w.ldpObjParent(callee_ins), w.immpObjGC(parent)),
13704 branchExit);
13707 return RECORD_CONTINUE;
13711 * Prepare the given |arguments| object to be accessed on trace. If the return
13712 * value is non-NULL, then the given |arguments| object refers to a frame on
13713 * the current trace and is guaranteed to refer to the same frame on trace for
13714 * all later executions.
13716 JS_REQUIRES_STACK JSStackFrame *
13717 TraceRecorder::guardArguments(JSObject *obj, LIns* obj_ins, unsigned *depthp)
13719 JS_ASSERT(obj->isArguments());
13721 JSStackFrame *afp = frameIfInRange(obj, depthp);
13722 if (!afp)
13723 return NULL;
13725 VMSideExit *exit = snapshot(MISMATCH_EXIT);
13726 guardClass(obj_ins, obj->getClass(), exit, LOAD_CONST);
13728 LIns* args_ins = getFrameObjPtr(afp->addressOfArgs());
13729 LIns* cmp = w.eqp(args_ins, obj_ins);
13730 guard(true, cmp, exit);
13731 return afp;
13734 JS_REQUIRES_STACK RecordingStatus
13735 TraceRecorder::createThis(JSObject& ctor, LIns* ctor_ins, LIns** thisobj_insp)
13737 JS_ASSERT(ctor.getFunctionPrivate()->isInterpreted());
13738 if (ctor.getFunctionPrivate()->isFunctionPrototype())
13739 RETURN_STOP("new Function.prototype");
13740 if (ctor.isBoundFunction())
13741 RETURN_STOP("new applied to bound function");
13743 // Given the above conditions, ctor.prototype is a non-configurable data
13744 // property with a slot.
13745 const Shape *shape = LookupInterpretedFunctionPrototype(cx, &ctor);
13746 if (!shape)
13747 RETURN_ERROR("new f: error resolving f.prototype");
13749 // At run time ctor might be a different instance of the same function. Its
13750 // .prototype property might not be resolved yet. Guard on the function
13751 // object's shape to make sure .prototype is there.
13753 // However, if ctor_ins is constant, which is usual, we don't need to
13754 // guard: .prototype is non-configurable, and an object's non-configurable
13755 // data properties always stay in the same slot for the life of the object.
13756 if (!ctor_ins->isImmP())
13757 guardShape(ctor_ins, &ctor, ctor.shape(), "ctor_shape", snapshot(MISMATCH_EXIT));
13759 // Pass the slot of ctor.prototype to js_CreateThisFromTrace. We can only
13760 // bake the slot into the trace, not the value, since .prototype is
13761 // writable.
13762 uintN protoSlot = shape->slot;
13763 LIns* args[] = { w.nameImmw(protoSlot), ctor_ins, cx_ins };
13764 *thisobj_insp = w.call(&js_CreateThisFromTrace_ci, args);
13765 guard(false, w.eqp0(*thisobj_insp), OOM_EXIT);
13766 return RECORD_CONTINUE;
13769 JS_REQUIRES_STACK RecordingStatus
13770 TraceRecorder::interpretedFunctionCall(Value& fval, JSFunction* fun, uintN argc, bool constructing)
13773 * The function's identity (JSFunction and therefore JSScript) is guarded,
13774 * so we can optimize away the function call if the corresponding script is
13775 * empty. No need to worry about crossing globals or relocating argv, even,
13776 * in this case!
13778 if (fun->script()->isEmpty()) {
13779 LIns* rval_ins;
13780 if (constructing)
13781 CHECK_STATUS(createThis(fval.toObject(), get(&fval), &rval_ins));
13782 else
13783 rval_ins = w.immiUndefined();
13784 stack(-2 - argc, rval_ins);
13785 return RECORD_CONTINUE;
13788 if (fval.toObject().getGlobal() != globalObj)
13789 RETURN_STOP("JSOP_CALL or JSOP_NEW crosses global scopes");
13791 JSStackFrame* const fp = cx->fp();
13793 if (constructing) {
13794 LIns* thisobj_ins;
13795 CHECK_STATUS(createThis(fval.toObject(), get(&fval), &thisobj_ins));
13796 stack(-argc - 1, thisobj_ins);
13799 // Generate a type map for the outgoing frame and stash it in the LIR
13800 unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */);
13801 FrameInfo* fi = (FrameInfo*)
13802 tempAlloc().alloc(sizeof(FrameInfo) + stackSlots * sizeof(JSValueType));
13803 JSValueType* typemap = (JSValueType*)(fi + 1);
13805 DetermineTypesVisitor detVisitor(*this, typemap);
13806 VisitStackSlots(detVisitor, cx, 0);
13808 JS_ASSERT(argc < FrameInfo::CONSTRUCTING_FLAG);
13810 tree->gcthings.addUnique(fval);
13811 fi->pc = cx->regs->pc;
13812 fi->imacpc = fp->maybeImacropc();
13813 fi->spdist = cx->regs->sp - fp->slots();
13814 fi->set_argc(uint16(argc), constructing);
13815 fi->callerHeight = stackSlots - (2 + argc);
13816 fi->callerArgc = fp->isGlobalFrame() || fp->isEvalFrame() ? 0 : fp->numActualArgs();
13818 if (callDepth >= tree->maxCallDepth)
13819 tree->maxCallDepth = callDepth + 1;
13821 fi = traceMonitor->frameCache->memoize(fi);
13822 if (!fi)
13823 RETURN_STOP("out of memory");
13824 w.stRstack(w.nameImmpNonGC(fi), lirbuf->rp, callDepth * sizeof(FrameInfo*));
13826 #if defined JS_JIT_SPEW
13827 debug_only_printf(LC_TMTracer, "iFC frameinfo=%p, stack=%d, map=", (void*)fi,
13828 fi->callerHeight);
13829 for (unsigned i = 0; i < fi->callerHeight; i++)
13830 debug_only_printf(LC_TMTracer, "%c", TypeToChar(fi->get_typemap()[i]));
13831 debug_only_print0(LC_TMTracer, "\n");
13832 #endif
13834 updateAtoms(fun->u.i.script);
13835 return RECORD_CONTINUE;
13839 * We implement JSOP_FUNAPPLY/JSOP_FUNCALL using imacros
13841 static inline JSOp
13842 GetCallMode(JSStackFrame *fp)
13844 if (fp->hasImacropc()) {
13845 JSOp op = (JSOp) *fp->imacropc();
13846 if (op == JSOP_FUNAPPLY || op == JSOP_FUNCALL)
13847 return op;
13849 return JSOP_CALL;
13852 JS_REQUIRES_STACK AbortableRecordingStatus
13853 TraceRecorder::record_JSOP_CALL()
13855 uintN argc = GET_ARGC(cx->regs->pc);
13856 cx->assertValidStackDepth(argc + 2);
13857 return InjectStatus(functionCall(argc, GetCallMode(cx->fp())));
13860 static jsbytecode* funapply_imacro_table[] = {
13861 funapply_imacros.apply0,
13862 funapply_imacros.apply1,
13863 funapply_imacros.apply2,
13864 funapply_imacros.apply3,
13865 funapply_imacros.apply4,
13866 funapply_imacros.apply5,
13867 funapply_imacros.apply6,
13868 funapply_imacros.apply7,
13869 funapply_imacros.apply8
13872 static jsbytecode* funcall_imacro_table[] = {
13873 funcall_imacros.call0,
13874 funcall_imacros.call1,
13875 funcall_imacros.call2,
13876 funcall_imacros.call3,
13877 funcall_imacros.call4,
13878 funcall_imacros.call5,
13879 funcall_imacros.call6,
13880 funcall_imacros.call7,
13881 funcall_imacros.call8
13884 JS_REQUIRES_STACK AbortableRecordingStatus
13885 TraceRecorder::record_JSOP_FUNCALL()
13887 return record_JSOP_FUNAPPLY();
13890 JS_REQUIRES_STACK AbortableRecordingStatus
13891 TraceRecorder::record_JSOP_FUNAPPLY()
13893 jsbytecode *pc = cx->regs->pc;
13894 uintN argc = GET_ARGC(pc);
13895 cx->assertValidStackDepth(argc + 2);
13897 Value* vp = cx->regs->sp - (argc + 2);
13898 jsuint length = 0;
13899 JSObject* aobj = NULL;
13900 LIns* aobj_ins = NULL;
13902 JS_ASSERT(!cx->fp()->hasImacropc());
13904 if (!IsFunctionObject(vp[0]))
13905 return record_JSOP_CALL();
13906 RETURN_IF_XML_A(vp[0]);
13908 JSObject* obj = &vp[0].toObject();
13909 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
13910 if (FUN_INTERPRETED(fun))
13911 return record_JSOP_CALL();
13913 bool apply = fun->u.n.native == js_fun_apply;
13914 if (!apply && fun->u.n.native != js_fun_call)
13915 return record_JSOP_CALL();
13918 * We don't trace apply and call with a primitive 'this', which is the
13919 * first positional parameter, unless 'this' is null. That's ok.
13921 if (argc > 0 && !vp[2].isObjectOrNull())
13922 return record_JSOP_CALL();
13925 * Guard on the identity of this, which is the function we are applying.
13927 if (!IsFunctionObject(vp[1]))
13928 RETURN_STOP_A("callee is not a function");
13929 CHECK_STATUS_A(guardCallee(vp[1]));
13931 if (apply && argc >= 2) {
13932 if (argc != 2)
13933 RETURN_STOP_A("apply with excess arguments");
13934 if (vp[3].isPrimitive())
13935 RETURN_STOP_A("arguments parameter of apply is primitive");
13936 aobj = &vp[3].toObject();
13937 aobj_ins = get(&vp[3]);
13940 * We trace dense arrays and arguments objects. The code we generate
13941 * for apply uses imacros to handle a specific number of arguments.
13943 if (aobj->isDenseArray()) {
13944 guardDenseArray(aobj_ins, MISMATCH_EXIT);
13945 length = aobj->getArrayLength();
13946 guard(true,
13947 w.eqiN(w.lduiObjPrivate(aobj_ins), length),
13948 BRANCH_EXIT);
13949 } else if (aobj->isArguments()) {
13950 unsigned depth;
13951 JSStackFrame *afp = guardArguments(aobj, aobj_ins, &depth);
13952 if (!afp)
13953 RETURN_STOP_A("can't reach arguments object's frame");
13954 if (aobj->isArgsLengthOverridden())
13955 RETURN_STOP_A("can't trace arguments with overridden length");
13956 guardArgsLengthNotAssigned(aobj_ins);
13957 length = afp->numActualArgs();
13958 } else {
13959 RETURN_STOP_A("arguments parameter of apply is not a dense array or argments object");
13962 if (length >= JS_ARRAY_LENGTH(funapply_imacro_table))
13963 RETURN_STOP_A("too many arguments to apply");
13965 return InjectStatus(callImacro(funapply_imacro_table[length]));
13968 if (argc >= JS_ARRAY_LENGTH(funcall_imacro_table))
13969 RETURN_STOP_A("too many arguments to call");
13971 return InjectStatus(callImacro(funcall_imacro_table[argc]));
13974 JS_REQUIRES_STACK AbortableRecordingStatus
13975 TraceRecorder::record_NativeCallComplete()
13977 if (pendingSpecializedNative == IGNORE_NATIVE_CALL_COMPLETE_CALLBACK)
13978 return ARECORD_CONTINUE;
13980 #ifdef DEBUG
13981 JS_ASSERT(pendingSpecializedNative);
13982 jsbytecode* pc = cx->regs->pc;
13983 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_FUNCALL || *pc == JSOP_FUNAPPLY ||
13984 *pc == JSOP_NEW || *pc == JSOP_SETPROP);
13985 #endif
13987 Value& v = stackval(-1);
13988 LIns* v_ins = get(&v);
13991 * At this point the generated code has already called the native function
13992 * and we can no longer fail back to the original pc location (JSOP_CALL)
13993 * because that would cause the interpreter to re-execute the native
13994 * function, which might have side effects.
13996 * Instead, the snapshot() call below sees that we are currently parked on
13997 * a traceable native's JSOP_CALL instruction, and it will advance the pc
13998 * to restore by the length of the current opcode. If the native's return
13999 * type is jsval, snapshot() will also indicate in the type map that the
14000 * element on top of the stack is a boxed value which doesn't need to be
14001 * boxed if the type guard generated by unbox_value() fails.
14004 if (JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_STATUS) {
14005 leaveDeepBailCall();
14007 LIns* status = w.ldiStateField(builtinStatus);
14008 if (pendingSpecializedNative == &generatedSpecializedNative) {
14009 LIns* ok_ins = v_ins;
14012 * If we run a generic traceable native, the return value is in the argument
14013 * vector for native function calls. The actual return value of the native is a JSBool
14014 * indicating the error status.
14017 Address nativeRvalAddr = AllocSlotsAddress(native_rval_ins);
14018 if (pendingSpecializedNative->flags & JSTN_CONSTRUCTOR) {
14019 LIns *cond_ins;
14020 LIns *x;
14022 // v_ins := the object payload from native_rval_ins
14023 // cond_ins := true if native_rval_ins contains a JSObject*
14024 unbox_any_object(nativeRvalAddr, &v_ins, &cond_ins);
14025 // x := v_ins if native_rval_ins contains a JSObject*, NULL otherwise
14026 x = w.cmovp(cond_ins, v_ins, w.immw(0));
14027 // v_ins := newobj_ins if native_rval_ins doesn't contain a JSObject*,
14028 // the object payload from native_rval_ins otherwise
14029 v_ins = w.cmovp(w.eqp0(x), newobj_ins, x);
14030 } else {
14031 v_ins = w.ldd(nativeRvalAddr);
14033 set(&v, v_ins);
14035 propagateFailureToBuiltinStatus(ok_ins, status);
14037 guard(true, w.eqi0(status), STATUS_EXIT);
14040 if (pendingSpecializedNative->flags & JSTN_UNBOX_AFTER) {
14042 * If we side exit on the unboxing code due to a type change, make sure that the boxed
14043 * value is actually currently associated with that location, and that we are talking
14044 * about the top of the stack here, which is where we expected boxed values.
14046 JS_ASSERT(&v == &cx->regs->sp[-1] && get(&v) == v_ins);
14047 set(&v, unbox_value(v, AllocSlotsAddress(native_rval_ins), snapshot(BRANCH_EXIT)));
14048 } else if (pendingSpecializedNative->flags &
14049 (JSTN_RETURN_NULLABLE_STR | JSTN_RETURN_NULLABLE_OBJ)) {
14050 guard(v.isNull(),
14051 w.name(w.eqp0(v_ins), "guard(nullness)"),
14052 BRANCH_EXIT);
14053 } else if (JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_NEG) {
14054 /* Already added i2d in functionCall. */
14055 JS_ASSERT(v.isNumber());
14056 } else {
14057 /* Convert the result to double if the builtin returns int32. */
14058 if (v.isNumber() &&
14059 pendingSpecializedNative->builtin->returnType() == ARGTYPE_I) {
14060 set(&v, w.i2d(v_ins));
14064 // We'll null pendingSpecializedNative in monitorRecording, on the next op
14065 // cycle. There must be a next op since the stack is non-empty.
14066 return ARECORD_CONTINUE;
14069 JS_REQUIRES_STACK AbortableRecordingStatus
14070 TraceRecorder::name(Value*& vp, LIns*& ins, NameResult& nr)
14072 JSObject* obj = &cx->fp()->scopeChain();
14073 JSOp op = JSOp(*cx->regs->pc);
14074 if (js_CodeSpec[op].format & JOF_GNAME)
14075 obj = obj->getGlobal();
14076 if (obj != globalObj)
14077 return scopeChainProp(obj, vp, ins, nr);
14079 /* Can't use prop here, because we don't want unboxing from global slots. */
14080 LIns* obj_ins = w.immpObjGC(globalObj);
14081 uint32 slot;
14083 JSObject* obj2;
14084 PCVal pcval;
14087 * Property cache ensures that we are dealing with an existing property,
14088 * and guards the shape for us.
14090 CHECK_STATUS_A(test_property_cache(obj, obj_ins, obj2, pcval));
14092 /* Abort if property doesn't exist (interpreter will report an error.) */
14093 if (pcval.isNull())
14094 RETURN_STOP_A("named property not found");
14096 /* Insist on obj being the directly addressed object. */
14097 if (obj2 != obj)
14098 RETURN_STOP_A("name() hit prototype chain");
14100 /* Don't trace getter or setter calls, our caller wants a direct slot. */
14101 if (pcval.isShape()) {
14102 const Shape* shape = pcval.toShape();
14103 if (!isValidSlot(obj, shape))
14104 RETURN_STOP_A("name() not accessing a valid slot");
14105 slot = shape->slot;
14106 } else {
14107 if (!pcval.isSlot())
14108 RETURN_STOP_A("PCE is not a slot");
14109 slot = pcval.toSlot();
14112 if (!lazilyImportGlobalSlot(slot))
14113 RETURN_STOP_A("lazy import of global slot failed");
14115 vp = &obj->getSlotRef(slot);
14116 ins = get(vp);
14117 nr.tracked = true;
14118 return ARECORD_CONTINUE;
14121 static JSObject* FASTCALL
14122 MethodReadBarrier(JSContext* cx, JSObject* obj, Shape* shape, JSObject* funobj)
14124 Value v = ObjectValue(*funobj);
14125 AutoValueRooter tvr(cx, v);
14127 if (!obj->methodReadBarrier(cx, *shape, tvr.addr()))
14128 return NULL;
14129 return &tvr.value().toObject();
14131 JS_DEFINE_CALLINFO_4(static, OBJECT_FAIL, MethodReadBarrier, CONTEXT, OBJECT, SHAPE, OBJECT,
14132 0, ACCSET_STORE_ANY)
14135 * Get a property. The current opcode has JOF_ATOM.
14137 * There are two modes. The caller must pass nonnull pointers for either outp
14138 * or both slotp and v_insp. In the latter case, we require a plain old
14139 * property with a slot; if the property turns out to be anything else, abort
14140 * tracing (rather than emit a call to a native getter or GetAnyProperty).
14142 JS_REQUIRES_STACK AbortableRecordingStatus
14143 TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32 *slotp, LIns** v_insp, Value *outp)
14146 * Insist that obj have js_SetProperty as its set object-op. This suffices
14147 * to prevent a rogue obj from being used on-trace (loaded via obj_ins),
14148 * because we will guard on shape (or else global object identity) and any
14149 * object not having the same op must have a different class, and therefore
14150 * must differ in its shape (or not be the global object).
14152 if (!obj->isDenseArray() && obj->getOps()->getProperty)
14153 RETURN_STOP_A("non-dense-array, non-native js::ObjectOps::getProperty");
14155 JS_ASSERT((slotp && v_insp && !outp) || (!slotp && !v_insp && outp));
14158 * Property cache ensures that we are dealing with an existing property,
14159 * and guards the shape for us.
14161 JSObject* obj2;
14162 PCVal pcval;
14163 CHECK_STATUS_A(test_property_cache(obj, obj_ins, obj2, pcval));
14165 /* Check for nonexistent property reference, which results in undefined. */
14166 if (pcval.isNull()) {
14167 if (slotp)
14168 RETURN_STOP_A("property not found");
14171 * We could specialize to guard on just JSClass.getProperty, but a mere
14172 * class guard is simpler and slightly faster.
14174 if (obj->getClass()->getProperty != Valueify(JS_PropertyStub)) {
14175 RETURN_STOP_A("can't trace through access to undefined property if "
14176 "JSClass.getProperty hook isn't stubbed");
14178 guardClass(obj_ins, obj->getClass(), snapshot(MISMATCH_EXIT), LOAD_NORMAL);
14181 * This trace will be valid as long as neither the object nor any object
14182 * on its prototype chain changes shape.
14184 * FIXME: This loop can become a single shape guard once bug 497789 has
14185 * been fixed.
14187 VMSideExit* exit = snapshot(BRANCH_EXIT);
14188 do {
14189 if (obj->isNative()) {
14190 CHECK_STATUS_A(guardShape(obj_ins, obj, obj->shape(), "guard(shape)", exit));
14191 } else if (obj->isDenseArray()) {
14192 guardDenseArray(obj_ins, exit);
14193 } else {
14194 RETURN_STOP_A("non-native object involved in undefined property access");
14196 } while (guardHasPrototype(obj, obj_ins, &obj, &obj_ins, exit));
14198 set(outp, w.immiUndefined());
14199 return ARECORD_CONTINUE;
14202 return InjectStatus(propTail(obj, obj_ins, obj2, pcval, slotp, v_insp, outp));
14205 JS_REQUIRES_STACK RecordingStatus
14206 TraceRecorder::propTail(JSObject* obj, LIns* obj_ins, JSObject* obj2, PCVal pcval,
14207 uint32 *slotp, LIns** v_insp, Value *outp)
14209 const JSCodeSpec& cs = js_CodeSpec[*cx->regs->pc];
14210 uint32 setflags = (cs.format & (JOF_INCDEC | JOF_FOR));
14211 JS_ASSERT(!(cs.format & JOF_SET));
14213 const Shape* shape;
14214 uint32 slot;
14215 bool isMethod;
14217 if (pcval.isShape()) {
14218 shape = pcval.toShape();
14219 JS_ASSERT(obj2->nativeContains(*shape));
14221 if (setflags && !shape->hasDefaultSetter())
14222 RETURN_STOP("non-stub setter");
14223 if (setflags && !shape->writable())
14224 RETURN_STOP("writing to a readonly property");
14225 if (!shape->hasDefaultGetterOrIsMethod()) {
14226 if (slotp)
14227 RETURN_STOP("can't trace non-stub getter for this opcode");
14228 if (shape->hasGetterValue())
14229 return getPropertyWithScriptGetter(obj, obj_ins, shape);
14230 if (shape->slot == SHAPE_INVALID_SLOT)
14231 return getPropertyWithNativeGetter(obj_ins, shape, outp);
14232 return getPropertyById(obj_ins, outp);
14234 if (!obj2->containsSlot(shape->slot))
14235 RETURN_STOP("no valid slot");
14236 slot = shape->slot;
14237 isMethod = shape->isMethod();
14238 JS_ASSERT_IF(isMethod, obj2->hasMethodBarrier());
14239 } else {
14240 if (!pcval.isSlot())
14241 RETURN_STOP("PCE is not a slot");
14242 slot = pcval.toSlot();
14243 shape = NULL;
14244 isMethod = false;
14247 /* We have a slot. Check whether it is direct or in a prototype. */
14248 if (obj2 != obj) {
14249 if (setflags)
14250 RETURN_STOP("JOF_INCDEC|JOF_FOR opcode hit prototype chain");
14253 * We're getting a prototype property. Two cases:
14255 * 1. If obj2 is obj's immediate prototype we must walk up from obj,
14256 * since direct and immediate-prototype cache hits key on obj's shape,
14257 * not its identity.
14259 * 2. Otherwise obj2 is higher up the prototype chain and we've keyed
14260 * on obj's identity, and since setting __proto__ reshapes all objects
14261 * along the old prototype chain, then provided we shape-guard obj2,
14262 * we can "teleport" directly to obj2 by embedding it as a constant
14263 * (this constant object instruction will be CSE'ed with the constant
14264 * emitted by test_property_cache, whose shape is guarded).
14266 obj_ins = (obj2 == obj->getProto()) ? w.ldpObjProto(obj_ins) : w.immpObjGC(obj2);
14267 obj = obj2;
14270 LIns* v_ins;
14271 if (obj2 == globalObj) {
14272 if (isMethod)
14273 RETURN_STOP("get global method");
14274 if (!lazilyImportGlobalSlot(slot))
14275 RETURN_STOP("lazy import of global slot failed");
14276 v_ins = get(&globalObj->getSlotRef(slot));
14277 } else {
14278 v_ins = unbox_slot(obj, obj_ins, slot, snapshot(BRANCH_EXIT));
14282 * Joined function object stored as a method must be cloned when extracted
14283 * as a property value other than a callee. Note that shapes cover method
14284 * value as well as other property attributes and order, so this condition
14285 * is trace-invariant.
14287 * We do not impose the method read barrier if in an imacro, assuming any
14288 * property gets it does (e.g., for 'toString' from JSOP_NEW) will not be
14289 * leaked to the calling script.
14291 if (isMethod && !cx->fp()->hasImacropc()) {
14292 enterDeepBailCall();
14293 LIns* args[] = { v_ins, w.immpShapeGC(shape), obj_ins, cx_ins };
14294 v_ins = w.call(&MethodReadBarrier_ci, args);
14295 leaveDeepBailCall();
14298 if (slotp) {
14299 *slotp = slot;
14300 *v_insp = v_ins;
14302 if (outp)
14303 set(outp, v_ins);
14304 return RECORD_CONTINUE;
14308 * When we end up with a hole, read it as undefined, and make sure to set
14309 * addr_ins to null.
14311 JS_REQUIRES_STACK RecordingStatus
14312 TraceRecorder::denseArrayElement(Value& oval, Value& ival, Value*& vp, LIns*& v_ins,
14313 LIns*& addr_ins, VMSideExit* branchExit)
14315 JS_ASSERT(oval.isObject() && ival.isInt32());
14317 JSObject* obj = &oval.toObject();
14318 LIns* obj_ins = get(&oval);
14319 jsint idx = ival.toInt32();
14320 LIns* idx_ins;
14321 CHECK_STATUS(makeNumberInt32(get(&ival), &idx_ins));
14324 * Arrays have both a length and a capacity, but we only need to check
14325 * |index < capacity|; in the case where |length < index < capacity|
14326 * the entries [length..capacity-1] will have already been marked as
14327 * holes by resizeDenseArrayElements() so we can read them and get
14328 * the correct value.
14330 LIns* capacity_ins = w.ldiDenseArrayCapacity(obj_ins);
14331 jsuint capacity = obj->getDenseArrayCapacity();
14332 bool within = (jsuint(idx) < capacity);
14333 if (!within) {
14334 /* If not idx < capacity, stay on trace (and read value as undefined). */
14335 guard(true, w.geui(idx_ins, capacity_ins), branchExit);
14337 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, snapshot(MISMATCH_EXIT)));
14339 v_ins = w.immiUndefined();
14340 addr_ins = NULL;
14341 return RECORD_CONTINUE;
14344 /* Guard that index is within capacity. */
14345 guard(true, w.name(w.ltui(idx_ins, capacity_ins), "inRange"), branchExit);
14347 /* Load the value and guard on its type to unbox it. */
14348 vp = &obj->slots[jsuint(idx)];
14349 JS_ASSERT(sizeof(Value) == 8); // The |3| in the following statement requires this.
14350 addr_ins = w.name(w.getDslotAddress(obj_ins, idx_ins), "elemp");
14351 v_ins = unbox_value(*vp, DSlotsAddress(addr_ins), branchExit);
14353 /* Don't let the hole value escape. Turn it into an undefined. */
14354 if (vp->isMagic()) {
14355 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, snapshot(MISMATCH_EXIT)));
14356 v_ins = w.immiUndefined();
14357 addr_ins = NULL;
14359 return RECORD_CONTINUE;
14362 /* See comments in TypedArrayTemplate<double>::copyIndexToValue. */
14363 LIns *
14364 TraceRecorder::canonicalizeNaNs(LIns *dval_ins)
14366 /* NaNs are the only floating point values that do not == themselves. */
14367 LIns *isnonnan_ins = w.eqd(dval_ins, dval_ins);
14368 return w.cmovd(isnonnan_ins, dval_ins, w.immd(js_NaN));
14371 JS_REQUIRES_STACK AbortableRecordingStatus
14372 TraceRecorder::typedArrayElement(Value& oval, Value& ival, Value*& vp, LIns*& v_ins)
14374 JS_ASSERT(oval.isObject() && ival.isInt32());
14376 JSObject* obj = &oval.toObject();
14377 LIns* obj_ins = get(&oval);
14378 jsint idx = ival.toInt32();
14379 LIns* idx_ins;
14380 CHECK_STATUS_A(makeNumberInt32(get(&ival), &idx_ins));
14381 LIns* pidx_ins = w.ui2p(idx_ins);
14383 js::TypedArray* tarray = js::TypedArray::fromJSObject(obj);
14384 JS_ASSERT(tarray);
14386 /* priv_ins will load the TypedArray* */
14387 LIns* priv_ins = w.ldpObjPrivate(obj_ins);
14389 /* for out-of-range, do the same thing that the interpreter does, which is return undefined */
14390 if ((jsuint) idx >= tarray->length) {
14391 CHECK_STATUS_A(guard(false,
14392 w.ltui(idx_ins, w.ldiConstTypedArrayLength(priv_ins)),
14393 BRANCH_EXIT,
14394 /* abortIfAlwaysExits = */true));
14395 v_ins = w.immiUndefined();
14396 return ARECORD_CONTINUE;
14400 * Ensure idx < length
14402 * NOTE! mLength is uint32, but it's guaranteed to fit in a Value
14403 * int, so we can treat it as either signed or unsigned.
14404 * If the index happens to be negative, when it's treated as
14405 * unsigned it'll be a very large int, and thus won't be less than
14406 * length.
14408 guard(true,
14409 w.name(w.ltui(idx_ins, w.ldiConstTypedArrayLength(priv_ins)), "inRange"),
14410 BRANCH_EXIT);
14412 /* We are now ready to load. Do a different type of load
14413 * depending on what type of thing we're loading. */
14414 LIns* data_ins = w.ldpConstTypedArrayData(priv_ins);
14416 switch (tarray->type) {
14417 case js::TypedArray::TYPE_INT8:
14418 v_ins = w.i2d(w.ldc2iTypedArrayElement(data_ins, pidx_ins));
14419 break;
14420 case js::TypedArray::TYPE_UINT8:
14421 case js::TypedArray::TYPE_UINT8_CLAMPED:
14422 // i2d on purpose here: it's safe, because an 8-bit uint is guaranteed
14423 // to fit in a 32-bit int, and i2d gets more optimization than ui2d.
14424 v_ins = w.i2d(w.lduc2uiTypedArrayElement(data_ins, pidx_ins));
14425 break;
14426 case js::TypedArray::TYPE_INT16:
14427 v_ins = w.i2d(w.lds2iTypedArrayElement(data_ins, pidx_ins));
14428 break;
14429 case js::TypedArray::TYPE_UINT16:
14430 // i2d on purpose here: it's safe, because a 16-bit uint is guaranteed
14431 // to fit in a 32-bit int, and i2d gets more optimization than ui2d.
14432 v_ins = w.i2d(w.ldus2uiTypedArrayElement(data_ins, pidx_ins));
14433 break;
14434 case js::TypedArray::TYPE_INT32:
14435 v_ins = w.i2d(w.ldiTypedArrayElement(data_ins, pidx_ins));
14436 break;
14437 case js::TypedArray::TYPE_UINT32:
14438 v_ins = w.ui2d(w.ldiTypedArrayElement(data_ins, pidx_ins));
14439 break;
14440 case js::TypedArray::TYPE_FLOAT32:
14441 v_ins = canonicalizeNaNs(w.ldf2dTypedArrayElement(data_ins, pidx_ins));
14442 break;
14443 case js::TypedArray::TYPE_FLOAT64:
14444 v_ins = canonicalizeNaNs(w.lddTypedArrayElement(data_ins, pidx_ins));
14445 break;
14446 default:
14447 JS_NOT_REACHED("Unknown typed array type in tracer");
14450 return ARECORD_CONTINUE;
14453 JS_REQUIRES_STACK AbortableRecordingStatus
14454 TraceRecorder::getProp(JSObject* obj, LIns* obj_ins)
14456 JSOp op = JSOp(*cx->regs->pc);
14457 const JSCodeSpec& cs = js_CodeSpec[op];
14459 JS_ASSERT(cs.ndefs == 1);
14460 return prop(obj, obj_ins, NULL, NULL, &stackval(-cs.nuses));
14463 JS_REQUIRES_STACK AbortableRecordingStatus
14464 TraceRecorder::getProp(Value& v)
14466 if (v.isPrimitive())
14467 RETURN_STOP_A("primitive lhs");
14469 return getProp(&v.toObject(), get(&v));
14472 JS_REQUIRES_STACK AbortableRecordingStatus
14473 TraceRecorder::record_JSOP_NAME()
14475 Value* vp;
14476 LIns* v_ins;
14477 NameResult nr;
14478 CHECK_STATUS_A(name(vp, v_ins, nr));
14479 stack(0, v_ins);
14480 return ARECORD_CONTINUE;
14483 JS_REQUIRES_STACK AbortableRecordingStatus
14484 TraceRecorder::record_JSOP_DOUBLE()
14486 double d = consts[GET_INDEX(cx->regs->pc)].toDouble();
14487 stack(0, w.immd(d));
14488 return ARECORD_CONTINUE;
14491 JS_REQUIRES_STACK AbortableRecordingStatus
14492 TraceRecorder::record_JSOP_STRING()
14494 JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)];
14495 stack(0, w.immpAtomGC(atom));
14496 return ARECORD_CONTINUE;
14499 JS_REQUIRES_STACK AbortableRecordingStatus
14500 TraceRecorder::record_JSOP_ZERO()
14502 stack(0, w.immd(0));
14503 return ARECORD_CONTINUE;
14506 JS_REQUIRES_STACK AbortableRecordingStatus
14507 TraceRecorder::record_JSOP_ONE()
14509 stack(0, w.immd(1));
14510 return ARECORD_CONTINUE;
14513 JS_REQUIRES_STACK AbortableRecordingStatus
14514 TraceRecorder::record_JSOP_NULL()
14516 stack(0, w.immpNull());
14517 return ARECORD_CONTINUE;
14520 JS_REQUIRES_STACK AbortableRecordingStatus
14521 TraceRecorder::record_JSOP_THIS()
14523 LIns* this_ins;
14524 CHECK_STATUS_A(getThis(this_ins));
14525 stack(0, this_ins);
14526 return ARECORD_CONTINUE;
14529 JS_REQUIRES_STACK AbortableRecordingStatus
14530 TraceRecorder::record_JSOP_FALSE()
14532 stack(0, w.immi(0));
14533 return ARECORD_CONTINUE;
14536 JS_REQUIRES_STACK AbortableRecordingStatus
14537 TraceRecorder::record_JSOP_TRUE()
14539 stack(0, w.immi(1));
14540 return ARECORD_CONTINUE;
14543 JS_REQUIRES_STACK AbortableRecordingStatus
14544 TraceRecorder::record_JSOP_OR()
14546 return ifop();
14549 JS_REQUIRES_STACK AbortableRecordingStatus
14550 TraceRecorder::record_JSOP_AND()
14552 return ifop();
14555 JS_REQUIRES_STACK AbortableRecordingStatus
14556 TraceRecorder::record_JSOP_TABLESWITCH()
14558 #ifdef NANOJIT_IA32
14559 /* Handle tableswitches specially -- prepare a jump table if needed. */
14560 return tableswitch();
14561 #else
14562 return InjectStatus(switchop());
14563 #endif
14566 JS_REQUIRES_STACK AbortableRecordingStatus
14567 TraceRecorder::record_JSOP_LOOKUPSWITCH()
14569 return InjectStatus(switchop());
14572 JS_REQUIRES_STACK AbortableRecordingStatus
14573 TraceRecorder::record_JSOP_STRICTEQ()
14575 CHECK_STATUS_A(strictEquality(true, false));
14576 return ARECORD_CONTINUE;
14579 JS_REQUIRES_STACK AbortableRecordingStatus
14580 TraceRecorder::record_JSOP_STRICTNE()
14582 CHECK_STATUS_A(strictEquality(false, false));
14583 return ARECORD_CONTINUE;
14586 JS_REQUIRES_STACK AbortableRecordingStatus
14587 TraceRecorder::record_JSOP_OBJECT()
14589 JSStackFrame* const fp = cx->fp();
14590 JSScript* script = fp->script();
14591 unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs->pc);
14593 JSObject* obj;
14594 obj = script->getObject(index);
14595 stack(0, w.immpObjGC(obj));
14596 return ARECORD_CONTINUE;
14599 JS_REQUIRES_STACK AbortableRecordingStatus
14600 TraceRecorder::record_JSOP_POP()
14602 return ARECORD_CONTINUE;
14605 JS_REQUIRES_STACK AbortableRecordingStatus
14606 TraceRecorder::record_JSOP_TRAP()
14608 return ARECORD_STOP;
14611 JS_REQUIRES_STACK AbortableRecordingStatus
14612 TraceRecorder::record_JSOP_GETARG()
14614 stack(0, arg(GET_ARGNO(cx->regs->pc)));
14615 return ARECORD_CONTINUE;
14618 JS_REQUIRES_STACK AbortableRecordingStatus
14619 TraceRecorder::record_JSOP_SETARG()
14621 arg(GET_ARGNO(cx->regs->pc), stack(-1));
14622 return ARECORD_CONTINUE;
14625 JS_REQUIRES_STACK AbortableRecordingStatus
14626 TraceRecorder::record_JSOP_GETLOCAL()
14628 stack(0, var(GET_SLOTNO(cx->regs->pc)));
14629 return ARECORD_CONTINUE;
14632 JS_REQUIRES_STACK AbortableRecordingStatus
14633 TraceRecorder::record_JSOP_SETLOCAL()
14635 var(GET_SLOTNO(cx->regs->pc), stack(-1));
14636 return ARECORD_CONTINUE;
14639 JS_REQUIRES_STACK AbortableRecordingStatus
14640 TraceRecorder::record_JSOP_UINT16()
14642 stack(0, w.immd(GET_UINT16(cx->regs->pc)));
14643 return ARECORD_CONTINUE;
14646 JS_REQUIRES_STACK AbortableRecordingStatus
14647 TraceRecorder::record_JSOP_NEWINIT()
14649 initDepth++;
14650 hadNewInit = true;
14652 JSProtoKey key = JSProtoKey(cx->regs->pc[1]);
14654 LIns* proto_ins;
14655 CHECK_STATUS_A(getClassPrototype(key, proto_ins));
14657 LIns *v_ins;
14658 if (key == JSProto_Array) {
14659 LIns *args[] = { proto_ins, cx_ins };
14660 v_ins = w.call(&NewDenseEmptyArray_ci, args);
14661 } else {
14662 LIns *args[] = { w.immpNull(), proto_ins, cx_ins };
14663 v_ins = w.call(&js_InitializerObject_ci, args);
14665 guard(false, w.eqp0(v_ins), OOM_EXIT);
14666 stack(0, v_ins);
14667 return ARECORD_CONTINUE;
14670 JS_REQUIRES_STACK AbortableRecordingStatus
14671 TraceRecorder::record_JSOP_NEWARRAY()
14673 initDepth++;
14675 LIns* proto_ins;
14676 CHECK_STATUS_A(getClassPrototype(JSProto_Array, proto_ins));
14678 unsigned count = GET_UINT24(cx->regs->pc);
14679 LIns *args[] = { proto_ins, w.immi(count), cx_ins };
14680 LIns *v_ins = w.call(&NewDenseAllocatedArray_ci, args);
14682 guard(false, w.eqp0(v_ins), OOM_EXIT);
14683 stack(0, v_ins);
14684 return ARECORD_CONTINUE;
14687 JS_REQUIRES_STACK AbortableRecordingStatus
14688 TraceRecorder::record_JSOP_NEWOBJECT()
14690 initDepth++;
14692 LIns* proto_ins;
14693 CHECK_STATUS_A(getClassPrototype(JSProto_Object, proto_ins));
14695 JSObject* baseobj = cx->fp()->script()->getObject(getFullIndex(0));
14697 LIns *args[] = { w.immpObjGC(baseobj), proto_ins, cx_ins };
14698 LIns *v_ins = w.call(&js_InitializerObject_ci, args);
14700 guard(false, w.eqp0(v_ins), OOM_EXIT);
14701 stack(0, v_ins);
14702 return ARECORD_CONTINUE;
14705 JS_REQUIRES_STACK AbortableRecordingStatus
14706 TraceRecorder::record_JSOP_ENDINIT()
14708 initDepth--;
14709 if (initDepth == 0)
14710 hadNewInit = false;
14712 #ifdef DEBUG
14713 Value& v = stackval(-1);
14714 JS_ASSERT(!v.isPrimitive());
14715 #endif
14716 return ARECORD_CONTINUE;
14719 JS_REQUIRES_STACK AbortableRecordingStatus
14720 TraceRecorder::record_JSOP_INITELEM()
14722 Value& v = stackval(-1);
14723 Value& idx = stackval(-2);
14724 Value& lval = stackval(-3);
14726 // The object is either a dense Array or an Object. Only handle the dense case here.
14727 // Also skip array initializers which might be unoptimized NEWINIT initializers.
14728 if (!lval.toObject().isDenseArray() || hadNewInit)
14729 return setElem(-3, -2, -1);
14731 // The index is always the same constant integer.
14732 JS_ASSERT(idx.isInt32());
14734 // Nothing to do for holes, the array's length has already been set.
14735 if (v.isMagic(JS_ARRAY_HOLE))
14736 return ARECORD_CONTINUE;
14738 LIns* obj_ins = get(&lval);
14739 LIns* v_ins = get(&v);
14741 // Set the element.
14742 LIns *slots_ins = w.ldpObjSlots(obj_ins);
14743 box_value_into(v, v_ins, DSlotsAddress(slots_ins, idx.toInt32()));
14745 return ARECORD_CONTINUE;
14748 JS_REQUIRES_STACK AbortableRecordingStatus
14749 TraceRecorder::record_JSOP_DEFSHARP()
14751 return ARECORD_STOP;
14754 JS_REQUIRES_STACK AbortableRecordingStatus
14755 TraceRecorder::record_JSOP_USESHARP()
14757 return ARECORD_STOP;
14760 JS_REQUIRES_STACK AbortableRecordingStatus
14761 TraceRecorder::record_JSOP_INCARG()
14763 return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), 1));
14766 JS_REQUIRES_STACK AbortableRecordingStatus
14767 TraceRecorder::record_JSOP_INCLOCAL()
14769 return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), 1));
14772 JS_REQUIRES_STACK AbortableRecordingStatus
14773 TraceRecorder::record_JSOP_DECARG()
14775 return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), -1));
14778 JS_REQUIRES_STACK AbortableRecordingStatus
14779 TraceRecorder::record_JSOP_DECLOCAL()
14781 return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), -1));
14784 JS_REQUIRES_STACK AbortableRecordingStatus
14785 TraceRecorder::record_JSOP_ARGINC()
14787 return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), 1, false));
14790 JS_REQUIRES_STACK AbortableRecordingStatus
14791 TraceRecorder::record_JSOP_LOCALINC()
14793 return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), 1, false));
14796 JS_REQUIRES_STACK AbortableRecordingStatus
14797 TraceRecorder::record_JSOP_ARGDEC()
14799 return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), -1, false));
14802 JS_REQUIRES_STACK AbortableRecordingStatus
14803 TraceRecorder::record_JSOP_LOCALDEC()
14805 return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), -1, false));
14808 JS_REQUIRES_STACK AbortableRecordingStatus
14809 TraceRecorder::record_JSOP_IMACOP()
14811 JS_ASSERT(cx->fp()->hasImacropc());
14812 return ARECORD_CONTINUE;
14815 static JSBool FASTCALL
14816 ObjectToIterator(JSContext* cx, JSObject *obj, int32 flags, Value* vp)
14818 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
14820 vp->setObject(*obj);
14821 bool ok = js_ValueToIterator(cx, flags, vp);
14822 if (!ok) {
14823 SetBuiltinError(tm);
14824 return false;
14826 return WasBuiltinSuccessful(tm);
14828 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, ObjectToIterator, CONTEXT, OBJECT, INT32, VALUEPTR,
14829 0, ACCSET_STORE_ANY)
14831 JS_REQUIRES_STACK AbortableRecordingStatus
14832 TraceRecorder::record_JSOP_ITER()
14834 Value& v = stackval(-1);
14835 if (v.isPrimitive())
14836 RETURN_STOP_A("for-in on a primitive value");
14838 RETURN_IF_XML_A(v);
14840 LIns *obj_ins = get(&v);
14841 jsuint flags = cx->regs->pc[1];
14843 enterDeepBailCall();
14845 LIns* vp_ins = w.allocp(sizeof(Value));
14846 LIns* args[] = { vp_ins, w.immi(flags), obj_ins, cx_ins };
14847 LIns* ok_ins = w.call(&ObjectToIterator_ci, args);
14849 // We need to guard on ok_ins, but this requires a snapshot of the state
14850 // after this op. monitorRecording will do it for us.
14851 pendingGuardCondition = ok_ins;
14853 // ObjectToIterator can deep-bail without throwing, leaving a value of
14854 // unknown type in *vp (it can be either a function or a non-function
14855 // object). Use the same mechanism as finishGetProp to arrange for
14856 // LeaveTree to deal with this value.
14857 pendingUnboxSlot = cx->regs->sp - 1;
14858 set(pendingUnboxSlot, w.name(w.lddAlloc(vp_ins), "iterval"));
14860 leaveDeepBailCall();
14862 return ARECORD_CONTINUE;
14865 static JSBool FASTCALL
14866 IteratorMore(JSContext *cx, JSObject *iterobj, Value *vp)
14868 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
14870 if (!js_IteratorMore(cx, iterobj, vp)) {
14871 SetBuiltinError(tm);
14872 return false;
14874 return WasBuiltinSuccessful(tm);
14876 JS_DEFINE_CALLINFO_3(extern, BOOL_FAIL, IteratorMore, CONTEXT, OBJECT, VALUEPTR,
14877 0, ACCSET_STORE_ANY)
14879 JS_REQUIRES_STACK AbortableRecordingStatus
14880 TraceRecorder::record_JSOP_MOREITER()
14882 Value& iterobj_val = stackval(-1);
14883 if (iterobj_val.isPrimitive())
14884 RETURN_STOP_A("for-in on a primitive value");
14886 RETURN_IF_XML_A(iterobj_val);
14888 JSObject* iterobj = &iterobj_val.toObject();
14889 LIns* iterobj_ins = get(&iterobj_val);
14890 LIns* cond_ins;
14893 * JSOP_FOR* already guards on this, but in certain rare cases we might
14894 * record misformed loop traces. Note that it's not necessary to guard on
14895 * ni->flags (nor do we in unboxNextValue), because the different
14896 * iteration type will guarantee a different entry typemap.
14898 if (iterobj->hasClass(&js_IteratorClass)) {
14899 guardClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
14901 NativeIterator *ni = (NativeIterator *) iterobj->getPrivate();
14902 if (ni->isKeyIter()) {
14903 LIns *ni_ins = w.ldpObjPrivate(iterobj_ins);
14904 LIns *cursor_ins = w.ldpIterCursor(ni_ins);
14905 LIns *end_ins = w.ldpIterEnd(ni_ins);
14907 cond_ins = w.ltp(cursor_ins, end_ins);
14908 stack(0, cond_ins);
14909 return ARECORD_CONTINUE;
14911 } else {
14912 guardNotClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
14915 enterDeepBailCall();
14917 LIns* vp_ins = w.allocp(sizeof(Value));
14918 LIns* args[] = { vp_ins, iterobj_ins, cx_ins };
14919 pendingGuardCondition = w.call(&IteratorMore_ci, args);
14921 leaveDeepBailCall();
14923 cond_ins = is_boxed_true(AllocSlotsAddress(vp_ins));
14924 stack(0, cond_ins);
14926 // Write this value back even though we haven't changed it.
14927 // See the comment in DeepBail about "clobbering deep bails".
14928 stack(-1, iterobj_ins);
14930 return ARECORD_CONTINUE;
14933 static JSBool FASTCALL
14934 CloseIterator(JSContext *cx, JSObject *iterobj)
14936 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
14938 if (!js_CloseIterator(cx, iterobj)) {
14939 SetBuiltinError(tm);
14940 return false;
14942 return WasBuiltinSuccessful(tm);
14944 JS_DEFINE_CALLINFO_2(extern, BOOL_FAIL, CloseIterator, CONTEXT, OBJECT, 0, ACCSET_STORE_ANY)
14946 JS_REQUIRES_STACK AbortableRecordingStatus
14947 TraceRecorder::record_JSOP_ENDITER()
14949 JS_ASSERT(!stackval(-1).isPrimitive());
14951 enterDeepBailCall();
14953 LIns* args[] = { stack(-1), cx_ins };
14954 LIns* ok_ins = w.call(&CloseIterator_ci, args);
14956 // We need to guard on ok_ins, but this requires a snapshot of the state
14957 // after this op. monitorRecording will do it for us.
14958 pendingGuardCondition = ok_ins;
14960 leaveDeepBailCall();
14962 return ARECORD_CONTINUE;
14965 #if JS_BITS_PER_WORD == 32
14966 JS_REQUIRES_STACK void
14967 TraceRecorder::storeMagic(JSWhyMagic why, Address addr)
14969 w.stiValuePayload(w.immpMagicWhy(why), addr);
14970 w.stiValueTag(w.immpMagicWhy(JSVAL_TAG_MAGIC), addr);
14972 #elif JS_BITS_PER_WORD == 64
14973 JS_REQUIRES_STACK void
14974 TraceRecorder::storeMagic(JSWhyMagic why, Address addr)
14976 LIns *magic = w.nameImmq(BUILD_JSVAL(JSVAL_TAG_MAGIC, why));
14977 w.stq(magic, addr);
14979 #endif
14981 JS_REQUIRES_STACK AbortableRecordingStatus
14982 TraceRecorder::unboxNextValue(LIns* &v_ins)
14984 Value &iterobj_val = stackval(-1);
14985 JSObject *iterobj = &iterobj_val.toObject();
14986 LIns* iterobj_ins = get(&iterobj_val);
14988 if (iterobj->hasClass(&js_IteratorClass)) {
14989 guardClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
14990 NativeIterator *ni = (NativeIterator *) iterobj->getPrivate();
14992 LIns *ni_ins = w.ldpObjPrivate(iterobj_ins);
14993 LIns *cursor_ins = w.ldpIterCursor(ni_ins);
14995 /* Emit code to stringify the id if necessary. */
14996 Address cursorAddr = IterPropsAddress(cursor_ins);
14997 if (ni->isKeyIter()) {
14998 /* Read the next id from the iterator. */
14999 jsid id = *ni->current();
15000 LIns *id_ins = w.name(w.ldp(cursorAddr), "id");
15003 * Most iterations over object properties never have to actually deal with
15004 * any numeric properties, so we guard here instead of branching.
15006 guard(JSID_IS_STRING(id), is_string_id(id_ins), BRANCH_EXIT);
15008 if (JSID_IS_STRING(id)) {
15009 v_ins = unbox_string_id(id_ins);
15010 } else if (JSID_IS_INT(id)) {
15011 /* id is an integer, convert to a string. */
15012 LIns *id_to_int_ins = unbox_int_id(id_ins);
15013 LIns* args[] = { id_to_int_ins, cx_ins };
15014 v_ins = w.call(&js_IntToString_ci, args);
15015 guard(false, w.eqp0(v_ins), OOM_EXIT);
15016 } else {
15017 #if JS_HAS_XML_SUPPORT
15018 JS_ASSERT(JSID_IS_OBJECT(id));
15019 JS_ASSERT(JSID_TO_OBJECT(id)->isXMLId());
15020 RETURN_STOP_A("iterated over a property with an XML id");
15021 #else
15022 JS_NEVER_REACHED("unboxNextValue");
15023 #endif
15026 /* Increment the cursor by one jsid and store it back. */
15027 cursor_ins = w.addp(cursor_ins, w.nameImmw(sizeof(jsid)));
15028 w.stpIterCursor(cursor_ins, ni_ins);
15029 return ARECORD_CONTINUE;
15031 } else {
15032 guardNotClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
15036 Address iterValueAddr = CxAddress(iterValue);
15037 v_ins = unbox_value(cx->iterValue, iterValueAddr, snapshot(BRANCH_EXIT));
15038 storeMagic(JS_NO_ITER_VALUE, iterValueAddr);
15040 return ARECORD_CONTINUE;
15043 JS_REQUIRES_STACK AbortableRecordingStatus
15044 TraceRecorder::record_JSOP_FORNAME()
15046 Value* vp;
15047 LIns* x_ins;
15048 NameResult nr;
15049 CHECK_STATUS_A(name(vp, x_ins, nr));
15050 if (!nr.tracked)
15051 RETURN_STOP_A("forname on non-tracked value not supported");
15052 LIns* v_ins;
15053 CHECK_STATUS_A(unboxNextValue(v_ins));
15054 set(vp, v_ins);
15055 return ARECORD_CONTINUE;
15058 JS_REQUIRES_STACK AbortableRecordingStatus
15059 TraceRecorder::record_JSOP_FORGNAME()
15061 return record_JSOP_FORNAME();
15064 JS_REQUIRES_STACK AbortableRecordingStatus
15065 TraceRecorder::record_JSOP_FORPROP()
15067 return ARECORD_STOP;
15070 JS_REQUIRES_STACK AbortableRecordingStatus
15071 TraceRecorder::record_JSOP_FORELEM()
15073 LIns* v_ins;
15074 CHECK_STATUS_A(unboxNextValue(v_ins));
15075 stack(0, v_ins);
15076 return ARECORD_CONTINUE;
15079 JS_REQUIRES_STACK AbortableRecordingStatus
15080 TraceRecorder::record_JSOP_FORARG()
15082 LIns* v_ins;
15083 CHECK_STATUS_A(unboxNextValue(v_ins));
15084 arg(GET_ARGNO(cx->regs->pc), v_ins);
15085 return ARECORD_CONTINUE;
15088 JS_REQUIRES_STACK AbortableRecordingStatus
15089 TraceRecorder::record_JSOP_FORLOCAL()
15091 LIns* v_ins;
15092 CHECK_STATUS_A(unboxNextValue(v_ins));
15093 var(GET_SLOTNO(cx->regs->pc), v_ins);
15094 return ARECORD_CONTINUE;
15097 JS_REQUIRES_STACK AbortableRecordingStatus
15098 TraceRecorder::record_JSOP_POPN()
15100 return ARECORD_CONTINUE;
15103 static inline bool
15104 IsFindableCallObj(JSObject *obj)
15106 return obj->isCall() &&
15107 (obj->callIsForEval() || obj->getCallObjCalleeFunction()->isHeavyweight());
15111 * Generate LIR to reach |obj2| from |obj| by traversing the scope chain. The
15112 * generated code also ensures that any call objects found have not changed shape.
15114 * obj starting object
15115 * obj_ins LIR instruction representing obj
15116 * targetObj end object for traversal
15117 * targetIns [out] LIR instruction representing obj2
15119 JS_REQUIRES_STACK RecordingStatus
15120 TraceRecorder::traverseScopeChain(JSObject *obj, LIns *obj_ins, JSObject *targetObj,
15121 LIns *&targetIns)
15123 VMSideExit* exit = NULL;
15126 * Scope chains are often left "incomplete", and reified lazily when
15127 * necessary, since doing so is expensive. When creating null and flat
15128 * closures on trace (the only kinds supported), the global object is
15129 * hardcoded as the parent, since reifying the scope chain on trace
15130 * would be extremely difficult. This is because block objects need frame
15131 * pointers, which do not exist on trace, and thus would require magic
15132 * similar to arguments objects or reification of stack frames. Luckily,
15133 * for null and flat closures, these blocks are unnecessary.
15135 * The problem, as exposed by bug 523793, is that this means creating a
15136 * fixed traversal on trace can be inconsistent with the shorter scope
15137 * chain used when executing a trace. To address this, perform an initial
15138 * sweep of the scope chain to make sure that if there is a heavyweight
15139 * function with a call object, and there is also a block object, the
15140 * trace is safely aborted.
15142 * If there is no call object, we must have arrived at the global object,
15143 * and can bypass the scope chain traversal completely.
15145 bool foundCallObj = false;
15146 bool foundBlockObj = false;
15147 JSObject* searchObj = obj;
15149 for (;;) {
15150 if (searchObj != globalObj) {
15151 if (searchObj->isBlock())
15152 foundBlockObj = true;
15153 else if (IsFindableCallObj(searchObj))
15154 foundCallObj = true;
15157 if (searchObj == targetObj)
15158 break;
15160 searchObj = searchObj->getParent();
15161 if (!searchObj)
15162 RETURN_STOP("cannot traverse this scope chain on trace");
15165 if (!foundCallObj) {
15166 JS_ASSERT(targetObj == globalObj);
15167 targetIns = w.nameImmpNonGC(globalObj);
15168 return RECORD_CONTINUE;
15171 if (foundBlockObj)
15172 RETURN_STOP("cannot traverse this scope chain on trace");
15174 /* There was a call object, or should be a call object now. */
15175 for (;;) {
15176 if (obj != globalObj) {
15177 if (!js_IsCacheableNonGlobalScope(obj))
15178 RETURN_STOP("scope chain lookup crosses non-cacheable object");
15180 // We must guard on the shape of all call objects for heavyweight functions
15181 // that we traverse on the scope chain: if the shape changes, a variable with
15182 // the same name may have been inserted in the scope chain.
15183 if (IsFindableCallObj(obj)) {
15184 if (!exit)
15185 exit = snapshot(BRANCH_EXIT);
15186 guard(true,
15187 w.name(w.eqiN(w.ldiObjShape(obj_ins), obj->shape()), "guard_shape"),
15188 exit);
15192 JS_ASSERT(!obj->isBlock());
15194 if (obj == targetObj)
15195 break;
15197 obj = obj->getParent();
15198 obj_ins = w.ldpObjParent(obj_ins);
15201 targetIns = obj_ins;
15202 return RECORD_CONTINUE;
15205 JS_REQUIRES_STACK AbortableRecordingStatus
15206 TraceRecorder::record_JSOP_BINDNAME()
15208 TraceMonitor *localtm = traceMonitor;
15209 JSStackFrame* const fp = cx->fp();
15210 JSObject *obj;
15212 if (!fp->isFunctionFrame()) {
15213 obj = &fp->scopeChain();
15215 #ifdef DEBUG
15216 JSStackFrame *fp2 = fp;
15217 #endif
15219 // In global code, fp->scopeChain can only contain blocks whose values
15220 // are still on the stack. We never use BINDNAME to refer to these.
15221 while (obj->isBlock()) {
15222 // The block's values are still on the stack.
15223 #ifdef DEBUG
15224 // NB: fp2 can't be a generator frame, because !fp->hasFunction.
15225 while (obj->getPrivate() != fp2) {
15226 JS_ASSERT(fp2->isEvalOrDebuggerFrame());
15227 fp2 = fp2->prev();
15228 if (!fp2)
15229 JS_NOT_REACHED("bad stack frame");
15231 #endif
15232 obj = obj->getParent();
15233 // Blocks always have parents.
15234 JS_ASSERT(obj);
15237 // If anything other than Block, Call, DeclEnv, and the global object
15238 // is on the scope chain, we shouldn't be recording. Of those, only
15239 // Block and global can be present in global code.
15240 JS_ASSERT(obj == globalObj);
15243 * The trace is specialized to this global object. Furthermore, we know it
15244 * is the sole 'global' object on the scope chain: we set globalObj to the
15245 * scope chain element with no parent, and we reached it starting from the
15246 * function closure or the current scopeChain, so there is nothing inner to
15247 * it. Therefore this must be the right base object.
15249 stack(0, w.immpObjGC(obj));
15250 return ARECORD_CONTINUE;
15253 // We can't trace BINDNAME in functions that contain direct calls to eval,
15254 // as they might add bindings which previously-traced references would have
15255 // to see.
15256 if (JSFUN_HEAVYWEIGHT_TEST(fp->fun()->flags))
15257 RETURN_STOP_A("BINDNAME in heavyweight function.");
15259 // We don't have the scope chain on trace, so instead we get a start object
15260 // that is on the scope chain and doesn't skip the target object (the one
15261 // that contains the property).
15262 Value *callee = &cx->fp()->calleeValue();
15263 obj = callee->toObject().getParent();
15264 if (obj == globalObj) {
15265 stack(0, w.immpObjGC(obj));
15266 return ARECORD_CONTINUE;
15268 LIns *obj_ins = w.ldpObjParent(get(callee));
15270 // Find the target object.
15271 JSAtom *atom = atoms[GET_INDEX(cx->regs->pc)];
15272 jsid id = ATOM_TO_JSID(atom);
15273 JSObject *obj2 = js_FindIdentifierBase(cx, &fp->scopeChain(), id);
15274 if (!obj2)
15275 RETURN_ERROR_A("error in js_FindIdentifierBase");
15276 if (!localtm->recorder)
15277 return ARECORD_ABORTED;
15278 if (obj2 != globalObj && !obj2->isCall())
15279 RETURN_STOP_A("BINDNAME on non-global, non-call object");
15281 // Generate LIR to get to the target object from the start object.
15282 LIns *obj2_ins;
15283 CHECK_STATUS_A(traverseScopeChain(obj, obj_ins, obj2, obj2_ins));
15285 // If |obj2| is the global object, we can refer to it directly instead of walking up
15286 // the scope chain. There may still be guards on intervening call objects.
15287 stack(0, obj2 == globalObj ? w.immpObjGC(obj2) : obj2_ins);
15288 return ARECORD_CONTINUE;
15291 JS_REQUIRES_STACK AbortableRecordingStatus
15292 TraceRecorder::record_JSOP_THROW()
15294 return ARECORD_STOP;
15297 JS_REQUIRES_STACK AbortableRecordingStatus
15298 TraceRecorder::record_JSOP_IN()
15300 Value& rval = stackval(-1);
15301 Value& lval = stackval(-2);
15303 if (rval.isPrimitive())
15304 RETURN_STOP_A("JSOP_IN on non-object right operand");
15305 JSObject* obj = &rval.toObject();
15306 LIns* obj_ins = get(&rval);
15308 jsid id;
15309 LIns* x;
15310 if (lval.isInt32()) {
15311 if (!js_Int32ToId(cx, lval.toInt32(), &id))
15312 RETURN_ERROR_A("OOM converting left operand of JSOP_IN to string");
15314 if (obj->isDenseArray()) {
15315 // Fast path for dense arrays
15316 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
15317 guardDenseArray(obj_ins, branchExit);
15319 // If our proto has indexed props, all bets are off on our
15320 // "false" values and out-of-bounds access. Just guard on
15321 // that.
15322 CHECK_STATUS_A(guardPrototypeHasNoIndexedProperties(obj, obj_ins,
15323 snapshot(MISMATCH_EXIT)));
15325 LIns* idx_ins;
15326 CHECK_STATUS_A(makeNumberInt32(get(&lval), &idx_ins));
15327 idx_ins = w.name(idx_ins, "index");
15328 LIns* capacity_ins = w.ldiDenseArrayCapacity(obj_ins);
15329 LIns* inRange = w.ltui(idx_ins, capacity_ins);
15331 if (jsuint(lval.toInt32()) < obj->getDenseArrayCapacity()) {
15332 guard(true, inRange, branchExit);
15334 LIns *elem_ins = w.getDslotAddress(obj_ins, idx_ins);
15335 // Need to make sure we don't have a hole
15336 LIns *is_hole_ins =
15337 is_boxed_magic(DSlotsAddress(elem_ins), JS_ARRAY_HOLE);
15339 // Set x to true (index in our array) if is_hole_ins == 0
15340 x = w.eqi0(is_hole_ins);
15341 } else {
15342 guard(false, inRange, branchExit);
15343 x = w.nameImmi(0);
15345 } else {
15346 LIns* num_ins;
15347 CHECK_STATUS_A(makeNumberInt32(get(&lval), &num_ins));
15348 LIns* args[] = { num_ins, obj_ins, cx_ins };
15349 x = w.call(&js_HasNamedPropertyInt32_ci, args);
15351 } else if (lval.isString()) {
15352 if (!js_ValueToStringId(cx, lval, &id))
15353 RETURN_ERROR_A("left operand of JSOP_IN didn't convert to a string-id");
15354 LIns* args[] = { get(&lval), obj_ins, cx_ins };
15355 x = w.call(&js_HasNamedProperty_ci, args);
15356 } else {
15357 RETURN_STOP_A("string or integer expected");
15360 guard(false, w.eqiN(x, JS_NEITHER), OOM_EXIT);
15361 x = w.eqiN(x, 1);
15363 TraceMonitor &localtm = *traceMonitor;
15365 JSObject* obj2;
15366 JSProperty* prop;
15367 JSBool ok = obj->lookupProperty(cx, id, &obj2, &prop);
15369 if (!ok)
15370 RETURN_ERROR_A("obj->lookupProperty failed in JSOP_IN");
15372 /* lookupProperty can reenter the interpreter and kill |this|. */
15373 if (!localtm.recorder)
15374 return ARECORD_ABORTED;
15376 bool cond = prop != NULL;
15379 * The interpreter fuses comparisons and the following branch, so we have
15380 * to do that here as well.
15382 jsbytecode *pc = cx->regs->pc;
15383 fuseIf(pc + 1, cond, x);
15385 /* If the branch was to a loop header, we may need to close it. */
15386 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
15387 CHECK_STATUS_A(checkTraceEnd(pc + 1));
15390 * We update the stack after the guard. This is safe since the guard bails
15391 * out at the comparison and the interpreter will therefore re-execute the
15392 * comparison. This way the value of the condition doesn't have to be
15393 * calculated and saved on the stack in most cases.
15395 set(&lval, x);
15396 return ARECORD_CONTINUE;
15399 static JSBool FASTCALL
15400 HasInstanceOnTrace(JSContext* cx, JSObject* ctor, ValueArgType arg)
15402 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
15404 const Value &argref = ValueArgToConstRef(arg);
15405 JSBool result = JS_FALSE;
15406 if (!HasInstance(cx, ctor, &argref, &result))
15407 SetBuiltinError(tm);
15408 return result;
15410 JS_DEFINE_CALLINFO_3(static, BOOL_FAIL, HasInstanceOnTrace, CONTEXT, OBJECT, VALUE,
15411 0, ACCSET_STORE_ANY)
15413 JS_REQUIRES_STACK AbortableRecordingStatus
15414 TraceRecorder::record_JSOP_INSTANCEOF()
15416 // If the rhs isn't an object, we are headed for a TypeError.
15417 Value& ctor = stackval(-1);
15418 if (ctor.isPrimitive())
15419 RETURN_STOP_A("non-object on rhs of instanceof");
15421 Value& val = stackval(-2);
15422 LIns* val_ins = box_value_for_native_call(val, get(&val));
15424 enterDeepBailCall();
15425 LIns* args[] = {val_ins, get(&ctor), cx_ins};
15426 stack(-2, w.call(&HasInstanceOnTrace_ci, args));
15427 LIns* status_ins = w.ldiStateField(builtinStatus);
15428 pendingGuardCondition = w.eqi0(status_ins);
15429 leaveDeepBailCall();
15431 return ARECORD_CONTINUE;
15434 JS_REQUIRES_STACK AbortableRecordingStatus
15435 TraceRecorder::record_JSOP_DEBUGGER()
15437 return ARECORD_STOP;
15440 JS_REQUIRES_STACK AbortableRecordingStatus
15441 TraceRecorder::record_JSOP_GOSUB()
15443 return ARECORD_STOP;
15446 JS_REQUIRES_STACK AbortableRecordingStatus
15447 TraceRecorder::record_JSOP_RETSUB()
15449 return ARECORD_STOP;
15452 JS_REQUIRES_STACK AbortableRecordingStatus
15453 TraceRecorder::record_JSOP_EXCEPTION()
15455 return ARECORD_STOP;
15458 JS_REQUIRES_STACK AbortableRecordingStatus
15459 TraceRecorder::record_JSOP_LINENO()
15461 return ARECORD_CONTINUE;
15464 JS_REQUIRES_STACK AbortableRecordingStatus
15465 TraceRecorder::record_JSOP_BLOCKCHAIN()
15467 return ARECORD_CONTINUE;
15470 JS_REQUIRES_STACK AbortableRecordingStatus
15471 TraceRecorder::record_JSOP_NULLBLOCKCHAIN()
15473 return ARECORD_CONTINUE;
15476 JS_REQUIRES_STACK AbortableRecordingStatus
15477 TraceRecorder::record_JSOP_CONDSWITCH()
15479 return ARECORD_CONTINUE;
15482 JS_REQUIRES_STACK AbortableRecordingStatus
15483 TraceRecorder::record_JSOP_CASE()
15485 CHECK_STATUS_A(strictEquality(true, true));
15486 return ARECORD_CONTINUE;
15489 JS_REQUIRES_STACK AbortableRecordingStatus
15490 TraceRecorder::record_JSOP_DEFAULT()
15492 return ARECORD_CONTINUE;
15495 JS_REQUIRES_STACK AbortableRecordingStatus
15496 TraceRecorder::record_JSOP_EVAL()
15498 return ARECORD_STOP;
15501 JS_REQUIRES_STACK AbortableRecordingStatus
15502 TraceRecorder::record_JSOP_ENUMELEM()
15505 * To quote from jsinterp.cpp's JSOP_ENUMELEM case:
15506 * Funky: the value to set is under the [obj, id] pair.
15508 return setElem(-2, -1, -3);
15511 JS_REQUIRES_STACK AbortableRecordingStatus
15512 TraceRecorder::record_JSOP_GETTER()
15514 return ARECORD_STOP;
15517 JS_REQUIRES_STACK AbortableRecordingStatus
15518 TraceRecorder::record_JSOP_SETTER()
15520 return ARECORD_STOP;
15523 JS_REQUIRES_STACK AbortableRecordingStatus
15524 TraceRecorder::record_JSOP_DEFFUN()
15526 return ARECORD_STOP;
15529 JS_REQUIRES_STACK AbortableRecordingStatus
15530 TraceRecorder::record_JSOP_DEFFUN_FC()
15532 return ARECORD_STOP;
15535 JS_REQUIRES_STACK AbortableRecordingStatus
15536 TraceRecorder::record_JSOP_DEFCONST()
15538 return ARECORD_STOP;
15541 JS_REQUIRES_STACK AbortableRecordingStatus
15542 TraceRecorder::record_JSOP_DEFVAR()
15544 return ARECORD_STOP;
15547 jsatomid
15548 TraceRecorder::getFullIndex(ptrdiff_t pcoff)
15550 jsatomid index = GET_INDEX(cx->regs->pc + pcoff);
15551 index += atoms - cx->fp()->script()->atomMap.vector;
15552 return index;
15555 JS_REQUIRES_STACK AbortableRecordingStatus
15556 TraceRecorder::record_JSOP_LAMBDA()
15558 JSFunction* fun;
15559 fun = cx->fp()->script()->getFunction(getFullIndex());
15561 if (FUN_NULL_CLOSURE(fun) && FUN_OBJECT(fun)->getParent() != globalObj)
15562 RETURN_STOP_A("Null closure function object parent must be global object");
15565 * Emit code to clone a null closure parented by this recorder's global
15566 * object, in order to preserve function object evaluation rules observable
15567 * via identity and mutation. But don't clone if our result is consumed by
15568 * JSOP_SETMETHOD or JSOP_INITMETHOD, since we optimize away the clone for
15569 * these combinations and clone only if the "method value" escapes.
15571 * See jsinterp.cpp, the JSOP_LAMBDA null closure case. The JSOP_SETMETHOD and
15572 * JSOP_INITMETHOD logic governing the early ARECORD_CONTINUE returns below
15573 * must agree with the corresponding break-from-do-while(0) logic there.
15575 if (FUN_NULL_CLOSURE(fun) && FUN_OBJECT(fun)->getParent() == &cx->fp()->scopeChain()) {
15576 jsbytecode *pc2 = AdvanceOverBlockchainOp(cx->regs->pc + JSOP_LAMBDA_LENGTH);
15577 JSOp op2 = JSOp(*pc2);
15579 if (op2 == JSOP_INITMETHOD) {
15580 stack(0, w.immpObjGC(FUN_OBJECT(fun)));
15581 return ARECORD_CONTINUE;
15584 if (op2 == JSOP_SETMETHOD) {
15585 Value lval = stackval(-1);
15587 if (!lval.isPrimitive() && lval.toObject().canHaveMethodBarrier()) {
15588 stack(0, w.immpObjGC(FUN_OBJECT(fun)));
15589 return ARECORD_CONTINUE;
15591 } else if (fun->joinable()) {
15592 if (op2 == JSOP_CALL) {
15594 * Array.prototype.sort and String.prototype.replace are
15595 * optimized as if they are special form. We know that they
15596 * won't leak the joined function object in obj, therefore
15597 * we don't need to clone that compiler- created function
15598 * object for identity/mutation reasons.
15600 int iargc = GET_ARGC(pc2);
15603 * Note that we have not yet pushed obj as the final argument,
15604 * so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)],
15605 * is the callee for this JSOP_CALL.
15607 const Value &cref = cx->regs->sp[1 - (iargc + 2)];
15608 JSObject *callee;
15610 if (IsFunctionObject(cref, &callee)) {
15611 JSFunction *calleeFun = callee->getFunctionPrivate();
15612 Native native = calleeFun->maybeNative();
15614 if ((iargc == 1 && native == array_sort) ||
15615 (iargc == 2 && native == str_replace)) {
15616 stack(0, w.immpObjGC(FUN_OBJECT(fun)));
15617 return ARECORD_CONTINUE;
15620 } else if (op2 == JSOP_NULL) {
15621 pc2 += JSOP_NULL_LENGTH;
15622 op2 = JSOp(*pc2);
15624 if (op2 == JSOP_CALL && GET_ARGC(pc2) == 0) {
15625 stack(0, w.immpObjGC(FUN_OBJECT(fun)));
15626 return ARECORD_CONTINUE;
15631 LIns *proto_ins;
15632 CHECK_STATUS_A(getClassPrototype(JSProto_Function, proto_ins));
15634 LIns* args[] = { w.immpObjGC(globalObj), proto_ins, w.immpFunGC(fun), cx_ins };
15635 LIns* x = w.call(&js_NewNullClosure_ci, args);
15636 stack(0, x);
15637 return ARECORD_CONTINUE;
15640 if (GetBlockChainFast(cx, cx->fp(), JSOP_LAMBDA, JSOP_LAMBDA_LENGTH))
15641 RETURN_STOP_A("Unable to trace creating lambda in let");
15643 LIns *proto_ins;
15644 CHECK_STATUS_A(getClassPrototype(JSProto_Function, proto_ins));
15645 LIns* scopeChain_ins = scopeChain();
15646 JS_ASSERT(scopeChain_ins);
15647 LIns* args[] = { proto_ins, scopeChain_ins, w.nameImmpNonGC(fun), cx_ins };
15648 LIns* call_ins = w.call(&js_CloneFunctionObject_ci, args);
15649 guard(false,
15650 w.name(w.eqp0(call_ins), "guard(js_CloneFunctionObject)"),
15651 OOM_EXIT);
15652 stack(0, call_ins);
15654 return ARECORD_CONTINUE;
15657 JS_REQUIRES_STACK AbortableRecordingStatus
15658 TraceRecorder::record_JSOP_LAMBDA_FC()
15660 JSFunction* fun;
15661 fun = cx->fp()->script()->getFunction(getFullIndex());
15663 if (FUN_OBJECT(fun)->getParent() != globalObj)
15664 return ARECORD_STOP;
15666 if (GetBlockChainFast(cx, cx->fp(), JSOP_LAMBDA_FC, JSOP_LAMBDA_FC_LENGTH))
15667 RETURN_STOP_A("Unable to trace creating lambda in let");
15669 LIns* args[] = { scopeChain(), w.immpFunGC(fun), cx_ins };
15670 LIns* closure_ins = w.call(&js_AllocFlatClosure_ci, args);
15671 guard(false,
15672 w.name(w.eqp(closure_ins, w.immpNull()), "guard(js_AllocFlatClosure)"),
15673 OOM_EXIT);
15675 JSScript *script = fun->script();
15676 if (script->bindings.hasUpvars()) {
15677 JSUpvarArray *uva = script->upvars();
15678 LIns* upvars_ins = w.getObjPrivatizedSlot(closure_ins,
15679 JSObject::JSSLOT_FLAT_CLOSURE_UPVARS);
15681 for (uint32 i = 0, n = uva->length; i < n; i++) {
15682 Value v;
15683 LIns* v_ins = upvar(script, uva, i, v);
15684 if (!v_ins)
15685 return ARECORD_STOP;
15687 box_value_into(v, v_ins, FCSlotsAddress(upvars_ins, i));
15691 stack(0, closure_ins);
15692 return ARECORD_CONTINUE;
15695 JS_REQUIRES_STACK AbortableRecordingStatus
15696 TraceRecorder::record_JSOP_CALLEE()
15698 stack(0, get(&cx->fp()->calleeValue()));
15699 return ARECORD_CONTINUE;
15702 JS_REQUIRES_STACK AbortableRecordingStatus
15703 TraceRecorder::record_JSOP_SETLOCALPOP()
15705 var(GET_SLOTNO(cx->regs->pc), stack(-1));
15706 return ARECORD_CONTINUE;
15709 JS_REQUIRES_STACK AbortableRecordingStatus
15710 TraceRecorder::record_JSOP_IFPRIMTOP()
15712 // Traces are type-specialized, including null vs. object, so we need do
15713 // nothing here. The upstream unbox_value called after valueOf or toString
15714 // from an imacro (e.g.) will fork the trace for us, allowing us to just
15715 // follow along mindlessly :-).
15716 return ARECORD_CONTINUE;
15719 JS_REQUIRES_STACK AbortableRecordingStatus
15720 TraceRecorder::record_JSOP_SETCALL()
15722 return ARECORD_STOP;
15725 JS_REQUIRES_STACK AbortableRecordingStatus
15726 TraceRecorder::record_JSOP_TRY()
15728 return ARECORD_CONTINUE;
15731 JS_REQUIRES_STACK AbortableRecordingStatus
15732 TraceRecorder::record_JSOP_FINALLY()
15734 return ARECORD_CONTINUE;
15737 JS_REQUIRES_STACK AbortableRecordingStatus
15738 TraceRecorder::record_JSOP_NOP()
15740 return ARECORD_CONTINUE;
15743 JS_REQUIRES_STACK AbortableRecordingStatus
15744 TraceRecorder::record_JSOP_ARGSUB()
15746 JSStackFrame* const fp = cx->fp();
15749 * The arguments object or its absence in the frame is part of the typemap,
15750 * so a record-time check suffices here. We don't bother tracing ARGSUB in
15751 * the case of an arguments object exising, because ARGSUB and to a lesser
15752 * extent ARGCNT are emitted to avoid arguments object creation.
15754 if (!fp->hasArgsObj() && !fp->fun()->isHeavyweight()) {
15755 uintN slot = GET_ARGNO(cx->regs->pc);
15756 if (slot >= fp->numActualArgs())
15757 RETURN_STOP_A("can't trace out-of-range arguments");
15759 stack(0, get(&cx->fp()->canonicalActualArg(slot)));
15760 return ARECORD_CONTINUE;
15762 RETURN_STOP_A("can't trace JSOP_ARGSUB hard case");
15765 JS_REQUIRES_STACK LIns*
15766 TraceRecorder::guardArgsLengthNotAssigned(LIns* argsobj_ins)
15768 // The following implements JSObject::isArgsLengthOverridden on trace.
15769 // ARGS_LENGTH_OVERRIDDEN_BIT is set if length was overridden.
15770 LIns *len_ins = w.getArgsLength(argsobj_ins);
15771 LIns *ovr_ins = w.andi(len_ins, w.nameImmi(JSObject::ARGS_LENGTH_OVERRIDDEN_BIT));
15772 guard(true, w.eqi0(ovr_ins), MISMATCH_EXIT);
15773 return len_ins;
15776 JS_REQUIRES_STACK AbortableRecordingStatus
15777 TraceRecorder::record_JSOP_ARGCNT()
15779 JSStackFrame * const fp = cx->fp();
15781 if (fp->fun()->flags & JSFUN_HEAVYWEIGHT)
15782 RETURN_STOP_A("can't trace heavyweight JSOP_ARGCNT");
15784 // argc is fixed on trace, so ideally we would simply generate LIR for
15785 // constant argc. But the user can mutate arguments.length in the
15786 // interpreter, so we have to check for that in the trace entry frame.
15787 // We also have to check that arguments.length has not been mutated
15788 // at record time, because if so we will generate incorrect constant
15789 // LIR, which will assert in alu().
15790 if (fp->hasArgsObj() && fp->argsObj().isArgsLengthOverridden())
15791 RETURN_STOP_A("can't trace JSOP_ARGCNT if arguments.length has been modified");
15792 LIns *a_ins = getFrameObjPtr(fp->addressOfArgs());
15793 if (callDepth == 0) {
15794 if (MaybeBranch mbr = w.jt(w.eqp0(a_ins))) {
15795 guardArgsLengthNotAssigned(a_ins);
15796 w.label(mbr);
15799 stack(0, w.immd(fp->numActualArgs()));
15800 return ARECORD_CONTINUE;
15803 JS_REQUIRES_STACK AbortableRecordingStatus
15804 TraceRecorder::record_DefLocalFunSetSlot(uint32 slot, JSObject* obj)
15806 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
15808 if (FUN_NULL_CLOSURE(fun) && FUN_OBJECT(fun)->getParent() == globalObj) {
15809 LIns *proto_ins;
15810 CHECK_STATUS_A(getClassPrototype(JSProto_Function, proto_ins));
15812 LIns* args[] = { w.immpObjGC(globalObj), proto_ins, w.immpFunGC(fun), cx_ins };
15813 LIns* x = w.call(&js_NewNullClosure_ci, args);
15814 var(slot, x);
15815 return ARECORD_CONTINUE;
15818 return ARECORD_STOP;
15821 JS_REQUIRES_STACK AbortableRecordingStatus
15822 TraceRecorder::record_JSOP_DEFLOCALFUN()
15824 return ARECORD_CONTINUE;
15827 JS_REQUIRES_STACK AbortableRecordingStatus
15828 TraceRecorder::record_JSOP_DEFLOCALFUN_FC()
15830 return ARECORD_CONTINUE;
15833 JS_REQUIRES_STACK AbortableRecordingStatus
15834 TraceRecorder::record_JSOP_GOTOX()
15836 return record_JSOP_GOTO();
15839 JS_REQUIRES_STACK AbortableRecordingStatus
15840 TraceRecorder::record_JSOP_IFEQX()
15842 return record_JSOP_IFEQ();
15845 JS_REQUIRES_STACK AbortableRecordingStatus
15846 TraceRecorder::record_JSOP_IFNEX()
15848 return record_JSOP_IFNE();
15851 JS_REQUIRES_STACK AbortableRecordingStatus
15852 TraceRecorder::record_JSOP_ORX()
15854 return record_JSOP_OR();
15857 JS_REQUIRES_STACK AbortableRecordingStatus
15858 TraceRecorder::record_JSOP_ANDX()
15860 return record_JSOP_AND();
15863 JS_REQUIRES_STACK AbortableRecordingStatus
15864 TraceRecorder::record_JSOP_GOSUBX()
15866 return record_JSOP_GOSUB();
15869 JS_REQUIRES_STACK AbortableRecordingStatus
15870 TraceRecorder::record_JSOP_CASEX()
15872 CHECK_STATUS_A(strictEquality(true, true));
15873 return ARECORD_CONTINUE;
15876 JS_REQUIRES_STACK AbortableRecordingStatus
15877 TraceRecorder::record_JSOP_DEFAULTX()
15879 return ARECORD_CONTINUE;
15882 JS_REQUIRES_STACK AbortableRecordingStatus
15883 TraceRecorder::record_JSOP_TABLESWITCHX()
15885 return record_JSOP_TABLESWITCH();
15888 JS_REQUIRES_STACK AbortableRecordingStatus
15889 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
15891 return InjectStatus(switchop());
15894 JS_REQUIRES_STACK AbortableRecordingStatus
15895 TraceRecorder::record_JSOP_BACKPATCH()
15897 return ARECORD_CONTINUE;
15900 JS_REQUIRES_STACK AbortableRecordingStatus
15901 TraceRecorder::record_JSOP_BACKPATCH_POP()
15903 return ARECORD_CONTINUE;
15906 JS_REQUIRES_STACK AbortableRecordingStatus
15907 TraceRecorder::record_JSOP_THROWING()
15909 return ARECORD_STOP;
15912 JS_REQUIRES_STACK AbortableRecordingStatus
15913 TraceRecorder::record_JSOP_SETRVAL()
15915 // If we implement this, we need to update JSOP_STOP.
15916 return ARECORD_STOP;
15919 JS_REQUIRES_STACK AbortableRecordingStatus
15920 TraceRecorder::record_JSOP_RETRVAL()
15922 return ARECORD_STOP;
15925 JS_REQUIRES_STACK AbortableRecordingStatus
15926 TraceRecorder::record_JSOP_REGEXP()
15928 JSStackFrame* const fp = cx->fp();
15929 JSScript* script = fp->script();
15930 unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs->pc);
15932 LIns* proto_ins;
15933 CHECK_STATUS_A(getClassPrototype(JSProto_RegExp, proto_ins));
15935 LIns* args[] = {
15936 proto_ins,
15937 w.immpObjGC(script->getRegExp(index)),
15938 cx_ins
15940 LIns* regex_ins = w.call(&js_CloneRegExpObject_ci, args);
15941 guard(false, w.eqp0(regex_ins), OOM_EXIT);
15943 stack(0, regex_ins);
15944 return ARECORD_CONTINUE;
15947 // begin JS_HAS_XML_SUPPORT
15949 JS_REQUIRES_STACK AbortableRecordingStatus
15950 TraceRecorder::record_JSOP_DEFXMLNS()
15952 return ARECORD_STOP;
15955 JS_REQUIRES_STACK AbortableRecordingStatus
15956 TraceRecorder::record_JSOP_ANYNAME()
15958 return ARECORD_STOP;
15961 JS_REQUIRES_STACK AbortableRecordingStatus
15962 TraceRecorder::record_JSOP_QNAMEPART()
15964 return record_JSOP_STRING();
15967 JS_REQUIRES_STACK AbortableRecordingStatus
15968 TraceRecorder::record_JSOP_QNAMECONST()
15970 return ARECORD_STOP;
15973 JS_REQUIRES_STACK AbortableRecordingStatus
15974 TraceRecorder::record_JSOP_QNAME()
15976 return ARECORD_STOP;
15979 JS_REQUIRES_STACK AbortableRecordingStatus
15980 TraceRecorder::record_JSOP_TOATTRNAME()
15982 return ARECORD_STOP;
15985 JS_REQUIRES_STACK AbortableRecordingStatus
15986 TraceRecorder::record_JSOP_TOATTRVAL()
15988 return ARECORD_STOP;
15991 JS_REQUIRES_STACK AbortableRecordingStatus
15992 TraceRecorder::record_JSOP_ADDATTRNAME()
15994 return ARECORD_STOP;
15997 JS_REQUIRES_STACK AbortableRecordingStatus
15998 TraceRecorder::record_JSOP_ADDATTRVAL()
16000 return ARECORD_STOP;
16003 JS_REQUIRES_STACK AbortableRecordingStatus
16004 TraceRecorder::record_JSOP_BINDXMLNAME()
16006 return ARECORD_STOP;
16009 JS_REQUIRES_STACK AbortableRecordingStatus
16010 TraceRecorder::record_JSOP_SETXMLNAME()
16012 return ARECORD_STOP;
16015 JS_REQUIRES_STACK AbortableRecordingStatus
16016 TraceRecorder::record_JSOP_XMLNAME()
16018 return ARECORD_STOP;
16021 JS_REQUIRES_STACK AbortableRecordingStatus
16022 TraceRecorder::record_JSOP_DESCENDANTS()
16024 return ARECORD_STOP;
16027 JS_REQUIRES_STACK AbortableRecordingStatus
16028 TraceRecorder::record_JSOP_FILTER()
16030 return ARECORD_STOP;
16033 JS_REQUIRES_STACK AbortableRecordingStatus
16034 TraceRecorder::record_JSOP_ENDFILTER()
16036 return ARECORD_STOP;
16039 JS_REQUIRES_STACK AbortableRecordingStatus
16040 TraceRecorder::record_JSOP_TOXML()
16042 return ARECORD_STOP;
16045 JS_REQUIRES_STACK AbortableRecordingStatus
16046 TraceRecorder::record_JSOP_TOXMLLIST()
16048 return ARECORD_STOP;
16051 JS_REQUIRES_STACK AbortableRecordingStatus
16052 TraceRecorder::record_JSOP_XMLTAGEXPR()
16054 return ARECORD_STOP;
16057 JS_REQUIRES_STACK AbortableRecordingStatus
16058 TraceRecorder::record_JSOP_XMLELTEXPR()
16060 return ARECORD_STOP;
16063 JS_REQUIRES_STACK AbortableRecordingStatus
16064 TraceRecorder::record_JSOP_XMLCDATA()
16066 return ARECORD_STOP;
16069 JS_REQUIRES_STACK AbortableRecordingStatus
16070 TraceRecorder::record_JSOP_XMLCOMMENT()
16072 return ARECORD_STOP;
16075 JS_REQUIRES_STACK AbortableRecordingStatus
16076 TraceRecorder::record_JSOP_XMLPI()
16078 return ARECORD_STOP;
16081 JS_REQUIRES_STACK AbortableRecordingStatus
16082 TraceRecorder::record_JSOP_GETFUNNS()
16084 return ARECORD_STOP;
16087 JS_REQUIRES_STACK AbortableRecordingStatus
16088 TraceRecorder::record_JSOP_STARTXML()
16090 return ARECORD_STOP;
16093 JS_REQUIRES_STACK AbortableRecordingStatus
16094 TraceRecorder::record_JSOP_STARTXMLEXPR()
16096 return ARECORD_STOP;
16099 // end JS_HAS_XML_SUPPORT
16101 JS_REQUIRES_STACK AbortableRecordingStatus
16102 TraceRecorder::record_JSOP_CALLPROP()
16104 Value& l = stackval(-1);
16105 JSObject* obj;
16106 LIns* obj_ins;
16107 LIns* this_ins;
16108 if (!l.isPrimitive()) {
16109 obj = &l.toObject();
16110 obj_ins = get(&l);
16111 this_ins = obj_ins; // |this| for subsequent call
16112 } else {
16113 JSProtoKey protoKey;
16114 debug_only_stmt(const char* protoname = NULL;)
16115 if (l.isString()) {
16116 protoKey = JSProto_String;
16117 debug_only_stmt(protoname = "String.prototype";)
16118 } else if (l.isNumber()) {
16119 protoKey = JSProto_Number;
16120 debug_only_stmt(protoname = "Number.prototype";)
16121 } else if (l.isBoolean()) {
16122 protoKey = JSProto_Boolean;
16123 debug_only_stmt(protoname = "Boolean.prototype";)
16124 } else {
16125 JS_ASSERT(l.isNull() || l.isUndefined());
16126 RETURN_STOP_A("callprop on null or void");
16129 if (!js_GetClassPrototype(cx, NULL, protoKey, &obj))
16130 RETURN_ERROR_A("GetClassPrototype failed!");
16132 obj_ins = w.immpObjGC(obj);
16133 debug_only_stmt(obj_ins = w.name(obj_ins, protoname);)
16134 this_ins = get(&l); // use primitive as |this|
16137 JSObject* obj2;
16138 PCVal pcval;
16139 CHECK_STATUS_A(test_property_cache(obj, obj_ins, obj2, pcval));
16141 if (pcval.isNull())
16142 RETURN_STOP_A("callprop of missing method");
16144 if (pcval.isFunObj()) {
16145 if (l.isPrimitive()) {
16146 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, &pcval.toFunObj());
16147 if (fun->isInterpreted() && !fun->inStrictMode())
16148 RETURN_STOP_A("callee does not accept primitive |this|");
16150 set(&l, w.immpObjGC(&pcval.toFunObj()));
16151 } else {
16152 if (l.isPrimitive())
16153 RETURN_STOP_A("callprop of primitive method");
16154 JS_ASSERT_IF(pcval.isShape(), !pcval.toShape()->isMethod());
16155 CHECK_STATUS_A(propTail(obj, obj_ins, obj2, pcval, NULL, NULL, &l));
16157 stack(0, this_ins);
16158 return ARECORD_CONTINUE;
16161 JS_REQUIRES_STACK AbortableRecordingStatus
16162 TraceRecorder::record_JSOP_DELDESC()
16164 return ARECORD_STOP;
16167 JS_REQUIRES_STACK AbortableRecordingStatus
16168 TraceRecorder::record_JSOP_UINT24()
16170 stack(0, w.immd(GET_UINT24(cx->regs->pc)));
16171 return ARECORD_CONTINUE;
16174 JS_REQUIRES_STACK AbortableRecordingStatus
16175 TraceRecorder::record_JSOP_INDEXBASE()
16177 atoms += GET_INDEXBASE(cx->regs->pc);
16178 return ARECORD_CONTINUE;
16181 JS_REQUIRES_STACK AbortableRecordingStatus
16182 TraceRecorder::record_JSOP_RESETBASE()
16184 updateAtoms();
16185 return ARECORD_CONTINUE;
16188 JS_REQUIRES_STACK AbortableRecordingStatus
16189 TraceRecorder::record_JSOP_RESETBASE0()
16191 updateAtoms();
16192 return ARECORD_CONTINUE;
16195 JS_REQUIRES_STACK AbortableRecordingStatus
16196 TraceRecorder::record_JSOP_CALLELEM()
16198 return record_JSOP_GETELEM();
16201 JS_REQUIRES_STACK AbortableRecordingStatus
16202 TraceRecorder::record_JSOP_STOP()
16204 JSStackFrame *fp = cx->fp();
16206 /* A return from callDepth 0 terminates the current loop, except for recursion. */
16207 if (callDepth == 0 && !fp->hasImacropc()) {
16208 AUDIT(returnLoopExits);
16209 return endLoop();
16212 if (fp->hasImacropc()) {
16214 * End of imacro, so return true to the interpreter immediately. The
16215 * interpreter's JSOP_STOP case will return from the imacro, back to
16216 * the pc after the calling op, still in the same JSStackFrame.
16218 updateAtoms(fp->script());
16219 return ARECORD_CONTINUE;
16222 putActivationObjects();
16224 if (Probes::callTrackingActive(cx)) {
16225 LIns* args[] = { w.immi(0), w.nameImmpNonGC(cx->fp()->fun()), cx_ins };
16226 LIns* call_ins = w.call(&functionProbe_ci, args);
16227 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
16231 * We know falling off the end of a constructor returns the new object that
16232 * was passed in via fp->argv[-1], while falling off the end of a function
16233 * returns undefined.
16235 * NB: we do not support script rval (eval, API users who want the result
16236 * of the last expression-statement, debugger API calls).
16238 if (fp->isConstructing()) {
16239 rval_ins = get(&fp->thisValue());
16240 } else {
16241 rval_ins = w.immiUndefined();
16243 clearReturningFrameFromNativeTracker();
16244 return ARECORD_CONTINUE;
16247 JS_REQUIRES_STACK AbortableRecordingStatus
16248 TraceRecorder::record_JSOP_GETXPROP()
16250 Value& l = stackval(-1);
16251 if (l.isPrimitive())
16252 RETURN_STOP_A("primitive-this for GETXPROP?");
16254 Value* vp;
16255 LIns* v_ins;
16256 NameResult nr;
16257 CHECK_STATUS_A(name(vp, v_ins, nr));
16258 stack(-1, v_ins);
16259 return ARECORD_CONTINUE;
16262 JS_REQUIRES_STACK AbortableRecordingStatus
16263 TraceRecorder::record_JSOP_CALLXMLNAME()
16265 return ARECORD_STOP;
16268 JS_REQUIRES_STACK AbortableRecordingStatus
16269 TraceRecorder::record_JSOP_TYPEOFEXPR()
16271 return record_JSOP_TYPEOF();
16274 JS_REQUIRES_STACK AbortableRecordingStatus
16275 TraceRecorder::record_JSOP_ENTERBLOCK()
16277 JSObject* obj;
16278 obj = cx->fp()->script()->getObject(getFullIndex(0));
16280 LIns* void_ins = w.immiUndefined();
16281 for (int i = 0, n = OBJ_BLOCK_COUNT(cx, obj); i < n; i++)
16282 stack(i, void_ins);
16283 return ARECORD_CONTINUE;
16286 JS_REQUIRES_STACK AbortableRecordingStatus
16287 TraceRecorder::record_JSOP_LEAVEBLOCK()
16289 return ARECORD_CONTINUE;
16292 JS_REQUIRES_STACK AbortableRecordingStatus
16293 TraceRecorder::record_JSOP_GENERATOR()
16295 return ARECORD_STOP;
16298 JS_REQUIRES_STACK AbortableRecordingStatus
16299 TraceRecorder::record_JSOP_YIELD()
16301 return ARECORD_STOP;
16304 JS_REQUIRES_STACK AbortableRecordingStatus
16305 TraceRecorder::record_JSOP_ARRAYPUSH()
16307 uint32_t slot = GET_UINT16(cx->regs->pc);
16308 JS_ASSERT(cx->fp()->numFixed() <= slot);
16309 JS_ASSERT(cx->fp()->slots() + slot < cx->regs->sp - 1);
16310 Value &arrayval = cx->fp()->slots()[slot];
16311 JS_ASSERT(arrayval.isObject());
16312 JS_ASSERT(arrayval.toObject().isDenseArray());
16313 LIns *array_ins = get(&arrayval);
16314 Value &elt = stackval(-1);
16315 LIns *elt_ins = box_value_for_native_call(elt, get(&elt));
16317 enterDeepBailCall();
16319 LIns *args[] = { elt_ins, array_ins, cx_ins };
16320 pendingGuardCondition = w.call(&js_ArrayCompPush_tn_ci, args);
16322 leaveDeepBailCall();
16323 return ARECORD_CONTINUE;
16326 JS_REQUIRES_STACK AbortableRecordingStatus
16327 TraceRecorder::record_JSOP_ENUMCONSTELEM()
16329 return ARECORD_STOP;
16332 JS_REQUIRES_STACK AbortableRecordingStatus
16333 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
16335 LIns* v_ins = stack(-1);
16336 int n = -1 - GET_UINT16(cx->regs->pc);
16337 stack(n, v_ins);
16338 return ARECORD_CONTINUE;
16341 JS_REQUIRES_STACK AbortableRecordingStatus
16342 TraceRecorder::record_JSOP_GETTHISPROP()
16344 LIns* this_ins;
16346 CHECK_STATUS_A(getThis(this_ins));
16349 * It's safe to just use cx->fp->thisValue() here because getThis() returns
16350 * ARECORD_STOP or ARECORD_ERROR if thisv is not available.
16352 const Value &thisv = cx->fp()->thisValue();
16353 if (!thisv.isObject())
16354 RETURN_STOP_A("primitive this for GETTHISPROP");
16356 CHECK_STATUS_A(getProp(&thisv.toObject(), this_ins));
16357 return ARECORD_CONTINUE;
16360 JS_REQUIRES_STACK AbortableRecordingStatus
16361 TraceRecorder::record_JSOP_GETARGPROP()
16363 return getProp(argval(GET_ARGNO(cx->regs->pc)));
16366 JS_REQUIRES_STACK AbortableRecordingStatus
16367 TraceRecorder::record_JSOP_GETLOCALPROP()
16369 return getProp(varval(GET_SLOTNO(cx->regs->pc)));
16372 JS_REQUIRES_STACK AbortableRecordingStatus
16373 TraceRecorder::record_JSOP_INDEXBASE1()
16375 atoms += 1 << 16;
16376 return ARECORD_CONTINUE;
16379 JS_REQUIRES_STACK AbortableRecordingStatus
16380 TraceRecorder::record_JSOP_INDEXBASE2()
16382 atoms += 2 << 16;
16383 return ARECORD_CONTINUE;
16386 JS_REQUIRES_STACK AbortableRecordingStatus
16387 TraceRecorder::record_JSOP_INDEXBASE3()
16389 atoms += 3 << 16;
16390 return ARECORD_CONTINUE;
16393 JS_REQUIRES_STACK AbortableRecordingStatus
16394 TraceRecorder::record_JSOP_CALLLOCAL()
16396 uintN slot = GET_SLOTNO(cx->regs->pc);
16397 stack(0, var(slot));
16398 stack(1, w.immiUndefined());
16399 return ARECORD_CONTINUE;
16402 JS_REQUIRES_STACK AbortableRecordingStatus
16403 TraceRecorder::record_JSOP_CALLARG()
16405 uintN slot = GET_ARGNO(cx->regs->pc);
16406 stack(0, arg(slot));
16407 stack(1, w.immiUndefined());
16408 return ARECORD_CONTINUE;
16411 JS_REQUIRES_STACK AbortableRecordingStatus
16412 TraceRecorder::record_JSOP_BINDGNAME()
16414 stack(0, w.immpObjGC(globalObj));
16415 return ARECORD_CONTINUE;
16418 JS_REQUIRES_STACK AbortableRecordingStatus
16419 TraceRecorder::record_JSOP_INT8()
16421 stack(0, w.immd(GET_INT8(cx->regs->pc)));
16422 return ARECORD_CONTINUE;
16425 JS_REQUIRES_STACK AbortableRecordingStatus
16426 TraceRecorder::record_JSOP_INT32()
16428 stack(0, w.immd(GET_INT32(cx->regs->pc)));
16429 return ARECORD_CONTINUE;
16432 JS_REQUIRES_STACK AbortableRecordingStatus
16433 TraceRecorder::record_JSOP_LENGTH()
16435 Value& l = stackval(-1);
16436 if (l.isPrimitive()) {
16437 if (!l.isString())
16438 RETURN_STOP_A("non-string primitive JSOP_LENGTH unsupported");
16439 set(&l, w.i2d(w.p2i(w.getStringLength(get(&l)))));
16440 return ARECORD_CONTINUE;
16443 JSObject* obj = &l.toObject();
16444 LIns* obj_ins = get(&l);
16446 if (obj->isArguments()) {
16447 unsigned depth;
16448 JSStackFrame *afp = guardArguments(obj, obj_ins, &depth);
16449 if (!afp)
16450 RETURN_STOP_A("can't reach arguments object's frame");
16452 // We must both check at record time and guard at run time that
16453 // arguments.length has not been reassigned, redefined or deleted.
16454 if (obj->isArgsLengthOverridden())
16455 RETURN_STOP_A("can't trace JSOP_ARGCNT if arguments.length has been modified");
16456 LIns* slot_ins = guardArgsLengthNotAssigned(obj_ins);
16458 // slot_ins is the value from the slot; right-shift to get the length
16459 // (see JSObject::getArgsInitialLength in jsfun.cpp).
16460 LIns* v_ins = w.i2d(w.rshiN(slot_ins, JSObject::ARGS_PACKED_BITS_COUNT));
16461 set(&l, v_ins);
16462 return ARECORD_CONTINUE;
16465 LIns* v_ins;
16466 if (obj->isArray()) {
16467 if (obj->isDenseArray()) {
16468 guardDenseArray(obj_ins, BRANCH_EXIT);
16469 } else {
16470 JS_ASSERT(obj->isSlowArray());
16471 guardClass(obj_ins, &js_SlowArrayClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
16473 v_ins = w.lduiObjPrivate(obj_ins);
16474 if (obj->getArrayLength() <= JSVAL_INT_MAX) {
16475 guard(true, w.leui(v_ins, w.immi(JSVAL_INT_MAX)), BRANCH_EXIT);
16476 v_ins = w.i2d(v_ins);
16477 } else {
16478 v_ins = w.ui2d(v_ins);
16480 } else if (OkToTraceTypedArrays && js_IsTypedArray(obj)) {
16481 // Ensure array is a typed array and is the same type as what was written
16482 guardClass(obj_ins, obj->getClass(), snapshot(BRANCH_EXIT), LOAD_NORMAL);
16483 v_ins = w.i2d(w.ldiConstTypedArrayLength(w.ldpObjPrivate(obj_ins)));
16484 } else {
16485 if (!obj->isNative())
16486 RETURN_STOP_A("can't trace length property access on non-array, non-native object");
16487 return getProp(obj, obj_ins);
16489 set(&l, v_ins);
16490 return ARECORD_CONTINUE;
16493 JS_REQUIRES_STACK AbortableRecordingStatus
16494 TraceRecorder::record_JSOP_HOLE()
16496 stack(0, w.immpMagicWhy(JS_ARRAY_HOLE));
16497 return ARECORD_CONTINUE;
16500 AbortableRecordingStatus
16501 TraceRecorder::record_JSOP_TRACE()
16503 return ARECORD_CONTINUE;
16506 AbortableRecordingStatus
16507 TraceRecorder::record_JSOP_NOTRACE()
16509 return ARECORD_CONTINUE;
16512 JSBool FASTCALL
16513 js_Unbrand(JSContext *cx, JSObject *obj)
16515 return obj->unbrand(cx);
16518 JS_DEFINE_CALLINFO_2(extern, BOOL, js_Unbrand, CONTEXT, OBJECT, 0, ACCSET_STORE_ANY)
16520 JS_REQUIRES_STACK AbortableRecordingStatus
16521 TraceRecorder::record_JSOP_UNBRAND()
16523 LIns* args_ins[] = { stack(-1), cx_ins };
16524 LIns* call_ins = w.call(&js_Unbrand_ci, args_ins);
16525 guard(false, w.eqi0(call_ins), OOM_EXIT);
16526 return ARECORD_CONTINUE;
16529 JS_REQUIRES_STACK AbortableRecordingStatus
16530 TraceRecorder::record_JSOP_UNBRANDTHIS()
16532 /* In case of primitive this, do nothing. */
16533 JSStackFrame *fp = cx->fp();
16534 if (fp->fun()->inStrictMode() && !fp->thisValue().isObject())
16535 return ARECORD_CONTINUE;
16537 LIns* this_ins;
16538 RecordingStatus status = getThis(this_ins);
16539 if (status != RECORD_CONTINUE)
16540 return InjectStatus(status);
16542 LIns* args_ins[] = { this_ins, cx_ins };
16543 LIns* call_ins = w.call(&js_Unbrand_ci, args_ins);
16544 guard(false, w.eqi0(call_ins), OOM_EXIT);
16545 return ARECORD_CONTINUE;
16548 JS_REQUIRES_STACK AbortableRecordingStatus
16549 TraceRecorder::record_JSOP_SHARPINIT()
16551 return ARECORD_STOP;
16554 JS_REQUIRES_STACK AbortableRecordingStatus
16555 TraceRecorder::record_JSOP_GETGLOBAL()
16557 uint32 slot = cx->fp()->script()->getGlobalSlot(GET_SLOTNO(cx->regs->pc));
16558 if (!lazilyImportGlobalSlot(slot))
16559 RETURN_STOP_A("lazy import of global slot failed");
16561 stack(0, get(&globalObj->getSlotRef(slot)));
16562 return ARECORD_CONTINUE;
16565 JS_REQUIRES_STACK AbortableRecordingStatus
16566 TraceRecorder::record_JSOP_CALLGLOBAL()
16568 uint32 slot = cx->fp()->script()->getGlobalSlot(GET_SLOTNO(cx->regs->pc));
16569 if (!lazilyImportGlobalSlot(slot))
16570 RETURN_STOP_A("lazy import of global slot failed");
16572 Value &v = globalObj->getSlotRef(slot);
16573 stack(0, get(&v));
16574 stack(1, w.immiUndefined());
16575 return ARECORD_CONTINUE;
16578 JS_REQUIRES_STACK AbortableRecordingStatus
16579 TraceRecorder::record_JSOP_GETGNAME()
16581 return record_JSOP_NAME();
16584 JS_REQUIRES_STACK AbortableRecordingStatus
16585 TraceRecorder::record_JSOP_SETGNAME()
16587 return record_JSOP_SETNAME();
16590 JS_REQUIRES_STACK AbortableRecordingStatus
16591 TraceRecorder::record_JSOP_GNAMEDEC()
16593 return record_JSOP_NAMEDEC();
16596 JS_REQUIRES_STACK AbortableRecordingStatus
16597 TraceRecorder::record_JSOP_GNAMEINC()
16599 return record_JSOP_NAMEINC();
16602 JS_REQUIRES_STACK AbortableRecordingStatus
16603 TraceRecorder::record_JSOP_DECGNAME()
16605 return record_JSOP_DECNAME();
16608 JS_REQUIRES_STACK AbortableRecordingStatus
16609 TraceRecorder::record_JSOP_INCGNAME()
16611 return record_JSOP_INCNAME();
16614 JS_REQUIRES_STACK AbortableRecordingStatus
16615 TraceRecorder::record_JSOP_CALLGNAME()
16617 return record_JSOP_CALLNAME();
16620 #define DBG_STUB(OP) \
16621 JS_REQUIRES_STACK AbortableRecordingStatus \
16622 TraceRecorder::record_##OP() \
16624 RETURN_STOP_A("can't trace " #OP); \
16627 DBG_STUB(JSOP_GETUPVAR_DBG)
16628 DBG_STUB(JSOP_CALLUPVAR_DBG)
16629 DBG_STUB(JSOP_DEFFUN_DBGFC)
16630 DBG_STUB(JSOP_DEFLOCALFUN_DBGFC)
16631 DBG_STUB(JSOP_LAMBDA_DBGFC)
16633 #ifdef JS_JIT_SPEW
16635 * Print information about entry typemaps and unstable exits for all peers
16636 * at a PC.
16638 void
16639 DumpPeerStability(TraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape,
16640 uint32 argc)
16642 TreeFragment* f;
16643 bool looped = false;
16644 unsigned length = 0;
16646 for (f = LookupLoop(tm, ip, globalObj, globalShape, argc); f != NULL; f = f->peer) {
16647 if (!f->code())
16648 continue;
16649 debug_only_printf(LC_TMRecorder, "Stability of fragment %p:\nENTRY STACK=", (void*)f);
16650 if (looped)
16651 JS_ASSERT(f->nStackTypes == length);
16652 for (unsigned i = 0; i < f->nStackTypes; i++)
16653 debug_only_printf(LC_TMRecorder, "%c", TypeToChar(f->stackTypeMap()[i]));
16654 debug_only_print0(LC_TMRecorder, " GLOBALS=");
16655 for (unsigned i = 0; i < f->nGlobalTypes(); i++)
16656 debug_only_printf(LC_TMRecorder, "%c", TypeToChar(f->globalTypeMap()[i]));
16657 debug_only_print0(LC_TMRecorder, "\n");
16658 UnstableExit* uexit = f->unstableExits;
16659 while (uexit != NULL) {
16660 debug_only_print0(LC_TMRecorder, "EXIT ");
16661 JSValueType* m = uexit->exit->fullTypeMap();
16662 debug_only_print0(LC_TMRecorder, "STACK=");
16663 for (unsigned i = 0; i < uexit->exit->numStackSlots; i++)
16664 debug_only_printf(LC_TMRecorder, "%c", TypeToChar(m[i]));
16665 debug_only_print0(LC_TMRecorder, " GLOBALS=");
16666 for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) {
16667 debug_only_printf(LC_TMRecorder, "%c",
16668 TypeToChar(m[uexit->exit->numStackSlots + i]));
16670 debug_only_print0(LC_TMRecorder, "\n");
16671 uexit = uexit->next;
16673 length = f->nStackTypes;
16674 looped = true;
16677 #endif
16679 #ifdef MOZ_TRACEVIS
16681 FILE* traceVisLogFile = NULL;
16682 JSHashTable *traceVisScriptTable = NULL;
16684 JS_FRIEND_API(bool)
16685 StartTraceVis(const char* filename = "tracevis.dat")
16687 if (traceVisLogFile) {
16688 // If we're currently recording, first we must stop.
16689 StopTraceVis();
16692 traceVisLogFile = fopen(filename, "wb");
16693 if (!traceVisLogFile)
16694 return false;
16696 return true;
16699 JS_FRIEND_API(JSBool)
16700 StartTraceVisNative(JSContext *cx, uintN argc, jsval *vp)
16702 JSBool ok;
16704 if (argc > 0 && JSVAL_IS_STRING(JS_ARGV(cx, vp)[0])) {
16705 JSString *str = JSVAL_TO_STRING(JS_ARGV(cx, vp)[0]);
16706 char *filename = js_DeflateString(cx, str->chars(), str->length());
16707 if (!filename)
16708 goto error;
16709 ok = StartTraceVis(filename);
16710 cx->free(filename);
16711 } else {
16712 ok = StartTraceVis();
16715 if (ok) {
16716 fprintf(stderr, "started TraceVis recording\n");
16717 JS_SET_RVAL(cx, vp, JSVAL_VOID);
16718 return true;
16721 error:
16722 JS_ReportError(cx, "failed to start TraceVis recording");
16723 return false;
16726 JS_FRIEND_API(bool)
16727 StopTraceVis()
16729 if (!traceVisLogFile)
16730 return false;
16732 fclose(traceVisLogFile); // not worth checking the result
16733 traceVisLogFile = NULL;
16735 return true;
16738 JS_FRIEND_API(JSBool)
16739 StopTraceVisNative(JSContext *cx, uintN argc, jsval *vp)
16741 JSBool ok = StopTraceVis();
16743 if (ok) {
16744 fprintf(stderr, "stopped TraceVis recording\n");
16745 JS_SET_RVAL(cx, vp, JSVAL_VOID);
16746 } else {
16747 JS_ReportError(cx, "TraceVis isn't running");
16750 return ok;
16753 #endif /* MOZ_TRACEVIS */
16755 JS_REQUIRES_STACK void
16756 TraceRecorder::captureStackTypes(unsigned callDepth, JSValueType* typeMap)
16758 CaptureTypesVisitor capVisitor(cx, traceMonitor->oracle, typeMap, !!oracle);
16759 VisitStackSlots(capVisitor, cx, callDepth);
16762 JS_REQUIRES_STACK void
16763 TraceRecorder::determineGlobalTypes(JSValueType* typeMap)
16765 DetermineTypesVisitor detVisitor(*this, typeMap);
16766 VisitGlobalSlots(detVisitor, cx, *tree->globalSlots);
16769 #ifdef JS_METHODJIT
16771 class AutoRetBlacklist
16773 jsbytecode* pc;
16774 bool* blacklist;
16776 public:
16777 AutoRetBlacklist(jsbytecode* pc, bool* blacklist)
16778 : pc(pc), blacklist(blacklist)
16781 ~AutoRetBlacklist()
16783 *blacklist = IsBlacklisted(pc);
16787 JS_REQUIRES_STACK TracePointAction
16788 RecordTracePoint(JSContext* cx, TraceMonitor* tm,
16789 uintN& inlineCallCount, bool* blacklist, bool execAllowed)
16791 JSStackFrame* fp = cx->fp();
16792 jsbytecode* pc = cx->regs->pc;
16794 JS_ASSERT(!tm->recorder);
16795 JS_ASSERT(!tm->profile);
16797 JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
16798 uint32 globalShape = -1;
16799 SlotList* globalSlots = NULL;
16801 AutoRetBlacklist autoRetBlacklist(pc, blacklist);
16803 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
16804 Backoff(tm, pc);
16805 return TPA_Nothing;
16808 uint32 argc = entryFrameArgc(cx);
16809 TreeFragment* tree = LookupOrAddLoop(tm, pc, globalObj, globalShape, argc);
16811 debug_only_printf(LC_TMTracer,
16812 "Looking for compat peer %d@%d, from %p (ip: %p)\n",
16813 js_FramePCToLineNumber(cx, cx->fp()),
16814 FramePCOffset(cx, cx->fp()), (void*)tree, tree->ip);
16816 if (tree->code() || tree->peer) {
16817 uintN count;
16818 TreeFragment* match = FindVMCompatiblePeer(cx, globalObj, tree, count);
16819 if (match) {
16820 VMSideExit* lr = NULL;
16821 VMSideExit* innermostNestedGuard = NULL;
16823 if (!execAllowed) {
16824 /* We've already compiled a trace for it, but we don't want to use that trace. */
16825 Blacklist((jsbytecode*)tree->root->ip);
16826 return TPA_Nothing;
16829 /* Best case - just go and execute. */
16830 if (!ExecuteTree(cx, tm, match, inlineCallCount, &innermostNestedGuard, &lr))
16831 return TPA_Error;
16833 if (!lr)
16834 return TPA_Nothing;
16836 switch (lr->exitType) {
16837 case UNSTABLE_LOOP_EXIT:
16838 if (!AttemptToStabilizeTree(cx, tm, globalObj, lr, NULL, NULL, 0))
16839 return TPA_RanStuff;
16840 break;
16842 case MUL_ZERO_EXIT:
16843 case OVERFLOW_EXIT:
16844 if (lr->exitType == MUL_ZERO_EXIT)
16845 tm->oracle->markInstructionSlowZeroTest(cx->regs->pc);
16846 else
16847 tm->oracle->markInstructionUndemotable(cx->regs->pc);
16848 /* FALL THROUGH */
16849 case BRANCH_EXIT:
16850 case CASE_EXIT:
16851 if (!AttemptToExtendTree(cx, tm, lr, NULL, NULL, NULL))
16852 return TPA_RanStuff;
16853 break;
16855 case LOOP_EXIT:
16856 if (!innermostNestedGuard)
16857 return TPA_RanStuff;
16858 if (!AttemptToExtendTree(cx, tm, innermostNestedGuard, lr, NULL, NULL))
16859 return TPA_RanStuff;
16860 break;
16862 default:
16863 return TPA_RanStuff;
16866 JS_ASSERT(tm->recorder);
16868 goto interpret;
16871 if (count >= MAXPEERS) {
16872 debug_only_print0(LC_TMTracer, "Blacklisted: too many peer trees.\n");
16873 Blacklist((jsbytecode*)tree->root->ip);
16874 return TPA_Nothing;
16878 if (++tree->hits() < HOTLOOP)
16879 return TPA_Nothing;
16880 if (!ScopeChainCheck(cx, tree))
16881 return TPA_Nothing;
16882 if (!RecordTree(cx, tm, tree->first, NULL, NULL, 0, globalSlots))
16883 return TPA_Nothing;
16885 interpret:
16886 JS_ASSERT(tm->recorder);
16888 /* Locked and loaded with a recorder. Ask the interperter to go run some code. */
16889 if (!Interpret(cx, fp, inlineCallCount, JSINTERP_RECORD))
16890 return TPA_Error;
16892 JS_ASSERT(!cx->isExceptionPending());
16894 return TPA_RanStuff;
16897 LoopProfile::LoopProfile(TraceMonitor *tm, JSStackFrame *entryfp,
16898 jsbytecode *top, jsbytecode *bottom)
16899 : traceMonitor(tm),
16900 entryScript(entryfp->script()),
16901 entryfp(entryfp),
16902 top(top),
16903 bottom(bottom),
16904 hits(0),
16905 undecided(false),
16906 unprofitable(false)
16908 reset();
16911 void
16912 LoopProfile::reset()
16914 profiled = false;
16915 traceOK = false;
16916 numAllOps = 0;
16917 numSelfOps = 0;
16918 numSelfOpsMult = 0;
16919 branchMultiplier = 1;
16920 shortLoop = false;
16921 maybeShortLoop = false;
16922 numInnerLoops = 0;
16923 loopStackDepth = 0;
16924 sp = 0;
16926 PodArrayZero(allOps);
16927 PodArrayZero(selfOps);
16930 MonitorResult
16931 LoopProfile::profileLoopEdge(JSContext* cx, uintN& inlineCallCount)
16933 if (cx->regs->pc == top) {
16934 debug_only_print0(LC_TMProfiler, "Profiling complete (edge)\n");
16935 decide(cx);
16936 } else {
16937 /* Record an inner loop invocation. */
16938 JSStackFrame *fp = cx->fp();
16939 jsbytecode *pc = cx->regs->pc;
16940 bool found = false;
16942 /* We started with the most deeply nested one first, since it gets hit most often.*/
16943 for (int i = int(numInnerLoops)-1; i >= 0; i--) {
16944 if (innerLoops[i].entryfp == fp && innerLoops[i].top == pc) {
16945 innerLoops[i].iters++;
16946 found = true;
16947 break;
16951 if (!found && numInnerLoops < PROFILE_MAX_INNER_LOOPS)
16952 innerLoops[numInnerLoops++] = InnerLoop(fp, pc, NULL);
16955 return MONITOR_NOT_RECORDING;
16959 static const uintN PROFILE_HOTLOOP = 61;
16960 static const uintN MAX_PROFILE_OPS = 4096;
16962 static jsbytecode *
16963 GetLoopBottom(JSContext *cx)
16965 return GetLoopBottom(cx, cx->regs->pc);
16968 static LoopProfile *
16969 LookupOrAddProfile(JSContext *cx, TraceMonitor *tm, void** traceData, uintN *traceEpoch)
16971 LoopProfile *prof;
16974 * We try to keep a pointer to the loop profile inside the TRACE IC.
16975 * We also keep a pointer inside a hashtable for when we need to
16976 * look up nested loops (or when ICs are disabled).
16978 * Memory for the profile is allocated in the dataAlloc for the
16979 * trace monitor. Since this thing can get flushed periodically,
16980 * we use epochs to decide if the profile in the MIC is valid, as
16981 * follows. Every time the trace monitor is flushed,
16982 * |tm->flushEpoch| is incremented. When storing the profile in
16983 * the IC, we store the current |tm->flushEpoch| along with it.
16984 * Before pulling a profile out of the IC, we check that its
16985 * stored epoch is still up-to-date with |tm->flushEpoch|.
16986 * This ensures that no flush has happened in between.
16989 #if JS_MONOIC
16990 if (*traceData && *traceEpoch == tm->flushEpoch) {
16991 prof = (LoopProfile *)*traceData;
16992 } else {
16993 jsbytecode* pc = cx->regs->pc;
16994 jsbytecode* bottom = GetLoopBottom(cx);
16995 if (!bottom)
16996 return NULL;
16997 prof = new (*tm->dataAlloc) LoopProfile(tm, cx->fp(), pc, bottom);
16998 *traceData = prof;
16999 *traceEpoch = tm->flushEpoch;
17000 tm->loopProfiles->put(pc, prof);
17002 #else
17003 LoopProfileMap &table = *tm->loopProfiles;
17004 jsbytecode* pc = cx->regs->pc;
17005 if (LoopProfileMap::AddPtr p = table.lookupForAdd(pc)) {
17006 prof = p->value;
17007 } else {
17008 jsbytecode* bottom = GetLoopBottom(cx);
17009 if (!bottom)
17010 return NULL;
17011 prof = new (*tm->dataAlloc) LoopProfile(tm, cx->fp(), pc, bottom);
17012 table.add(p, pc, prof);
17014 #endif
17016 return prof;
17019 static LoopProfile *
17020 LookupLoopProfile(TraceMonitor *tm, jsbytecode *pc)
17022 LoopProfileMap &table = *tm->loopProfiles;
17023 if (LoopProfileMap::Ptr p = table.lookup(pc)) {
17024 JS_ASSERT(p->value->top == pc);
17025 return p->value;
17026 } else
17027 return NULL;
17030 void
17031 LoopProfile::stopProfiling(JSContext *cx)
17033 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL);
17034 JS_THREAD_DATA(cx)->profilingCompartment = NULL;
17036 traceMonitor->profile = NULL;
17039 JS_REQUIRES_STACK TracePointAction
17040 MonitorTracePoint(JSContext *cx, uintN& inlineCallCount, bool* blacklist,
17041 void** traceData, uintN *traceEpoch, uint32 *loopCounter, uint32 hits)
17043 TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
17045 if (!cx->profilingEnabled)
17046 return RecordTracePoint(cx, tm, inlineCallCount, blacklist, true);
17048 *blacklist = false;
17051 * This is the only place where we check for re-entering the profiler.
17052 * The assumption is that MonitorTracePoint is the only place where we
17053 * start profiling. When we do so, we enter an interpreter frame with
17054 * JSINTERP_PROFILE mode. All other entry points to the profiler check
17055 * that the interpreter mode is JSINTERP_PROFILE. If it isn't, they
17056 * don't profile.
17058 if (TRACE_PROFILER(cx))
17059 return TPA_Nothing;
17061 jsbytecode* pc = cx->regs->pc;
17062 LoopProfile *prof = LookupOrAddProfile(cx, tm, traceData, traceEpoch);
17063 if (!prof) {
17064 *blacklist = true;
17065 return TPA_Nothing;
17068 prof->hits += hits;
17069 if (prof->hits < PROFILE_HOTLOOP)
17070 return TPA_Nothing;
17072 AutoRetBlacklist autoRetBlacklist(cx->regs->pc, blacklist);
17074 if (prof->profiled) {
17075 if (prof->traceOK) {
17076 return RecordTracePoint(cx, tm, inlineCallCount, blacklist, prof->execOK);
17077 } else {
17078 return TPA_Nothing;
17082 debug_only_printf(LC_TMProfiler, "Profiling at line %d\n",
17083 js_FramePCToLineNumber(cx, cx->fp()));
17085 tm->profile = prof;
17087 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
17088 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL);
17089 JS_THREAD_DATA(cx)->profilingCompartment = cx->compartment;
17091 if (!Interpret(cx, cx->fp(), inlineCallCount, JSINTERP_PROFILE))
17092 return TPA_Error;
17094 JS_ASSERT(!cx->isExceptionPending());
17096 /* Look it up again since a reset may have happened during Interpret. */
17097 prof = LookupLoopProfile(tm, pc);
17098 if (prof && prof->undecided) {
17099 *loopCounter = 3000;
17100 prof->reset();
17103 return TPA_RanStuff;
17107 * Returns true if pc is within the given loop.
17108 * If we're in a different script, then we must have come from
17109 * a call instruction within the loop (since we check if we're within
17110 * the loop before each instruction) so we're still in the loop.
17112 template<class T>
17113 static inline bool
17114 PCWithinLoop(JSStackFrame *fp, jsbytecode *pc, T& loop)
17116 return fp > loop.entryfp || (fp == loop.entryfp && pc >= loop.top && pc <= loop.bottom);
17119 LoopProfile::ProfileAction
17120 LoopProfile::profileOperation(JSContext* cx, JSOp op)
17122 TraceMonitor* tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
17124 JS_ASSERT(tm == traceMonitor);
17125 JS_ASSERT(&entryScript->compartment->traceMonitor == tm);
17127 if (profiled) {
17128 stopProfiling(cx);
17129 return ProfComplete;
17132 jsbytecode *pc = cx->regs->pc;
17133 JSStackFrame *fp = cx->fp();
17134 JSScript *script = fp->script();
17136 if (!PCWithinLoop(fp, pc, *this)) {
17137 debug_only_printf(LC_TMProfiler, "Profiling complete (loop exit) at line %u\n",
17138 js_FramePCToLineNumber(cx, cx->fp()));
17139 tm->profile->decide(cx);
17140 stopProfiling(cx);
17141 return ProfComplete;
17144 while (loopStackDepth > 0 && !PCWithinLoop(fp, pc, loopStack[loopStackDepth-1])) {
17145 debug_only_print0(LC_TMProfiler, "Profiler: Exiting inner loop\n");
17146 loopStackDepth--;
17149 if (op == JSOP_TRACE || op == JSOP_NOTRACE) {
17150 if (pc != top && (loopStackDepth == 0 || pc != loopStack[loopStackDepth-1].top)) {
17151 if (loopStackDepth == PROFILE_MAX_INNER_LOOPS) {
17152 debug_only_print0(LC_TMProfiler, "Profiling complete (maxnest)\n");
17153 tm->profile->decide(cx);
17154 stopProfiling(cx);
17155 return ProfComplete;
17158 debug_only_printf(LC_TMProfiler, "Profiler: Entering inner loop at line %d\n",
17159 js_FramePCToLineNumber(cx, cx->fp()));
17160 loopStack[loopStackDepth++] = InnerLoop(fp, pc, GetLoopBottom(cx));
17164 numAllOps++;
17165 if (loopStackDepth == 0) {
17166 numSelfOps++;
17167 numSelfOpsMult += branchMultiplier;
17170 if (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_MUL || op == JSOP_DIV) {
17171 Value& v1 = cx->regs->sp[-1];
17172 Value& v2 = cx->regs->sp[-2];
17174 /* If either operand is a double, treat it as a floating-point op. */
17175 if (v1.isDouble() || v2.isDouble())
17176 increment(OP_FLOAT);
17177 else if (v1.isInt32() || v2.isInt32())
17178 increment(OP_INT);
17181 if (op == JSOP_EQ || op == JSOP_NE)
17182 increment(OP_EQ);
17184 if (op == JSOP_BITOR || op == JSOP_BITXOR || op == JSOP_BITAND
17185 || op == JSOP_LSH || op == JSOP_RSH || op == JSOP_URSH || op == JSOP_BITNOT)
17187 increment(OP_BIT);
17190 if (op == JSOP_EVAL)
17191 increment(OP_EVAL);
17193 if (op == JSOP_NEW)
17194 increment(OP_NEW);
17196 if (op == JSOP_GETELEM || op == JSOP_SETELEM) {
17197 Value& lval = cx->regs->sp[op == JSOP_GETELEM ? -2 : -3];
17198 if (lval.isObject() && js_IsTypedArray(&lval.toObject()))
17199 increment(OP_TYPED_ARRAY);
17200 else if (lval.isObject() && lval.toObject().isDenseArray() && op == JSOP_GETELEM)
17201 increment(OP_ARRAY_READ);
17204 if (op == JSOP_CALL) {
17205 increment(OP_CALL);
17207 uintN argc = GET_ARGC(cx->regs->pc);
17208 Value &v = cx->regs->sp[-((int)argc + 2)];
17209 JSObject *callee;
17210 if (IsFunctionObject(v, &callee)) {
17211 JSFunction *fun = callee->getFunctionPrivate();
17212 if (fun->isInterpreted()) {
17213 if (cx->fp()->isFunctionFrame() && fun == cx->fp()->fun())
17214 increment(OP_RECURSIVE);
17215 } else {
17216 js::Native native = fun->u.n.native;
17217 if (js_IsMathFunction(JS_JSVALIFY_NATIVE(native)))
17218 increment(OP_FLOAT);
17223 if (op == JSOP_CALLPROP && loopStackDepth == 0)
17224 branchMultiplier *= mjit::GetCallTargetCount(script, pc);
17226 if (op == JSOP_TABLESWITCH) {
17227 jsint low = GET_JUMP_OFFSET(pc + JUMP_OFFSET_LEN);
17228 jsint high = GET_JUMP_OFFSET(pc + JUMP_OFFSET_LEN*2);
17229 branchMultiplier *= high - low + 1;
17232 if (op == JSOP_LOOKUPSWITCH)
17233 branchMultiplier *= GET_UINT16(pc + JUMP_OFFSET_LEN);
17235 if (numAllOps >= MAX_PROFILE_OPS) {
17236 debug_only_print0(LC_TMProfiler, "Profiling complete (maxops)\n");
17237 tm->profile->decide(cx);
17238 stopProfiling(cx);
17239 return ProfComplete;
17242 /* These are the places where the interpreter skips over branches. */
17243 jsbytecode *testPC = cx->regs->pc;
17244 if (op == JSOP_EQ || op == JSOP_NE || op == JSOP_LT || op == JSOP_GT
17245 || op == JSOP_LE || op == JSOP_GE || op == JSOP_IN || op == JSOP_MOREITER)
17247 const JSCodeSpec *cs = &js_CodeSpec[op];
17248 ptrdiff_t oplen = cs->length;
17249 JS_ASSERT(oplen != -1);
17251 if (cx->regs->pc - script->code + oplen < ptrdiff_t(script->length))
17252 if (cx->regs->pc[oplen] == JSOP_IFEQ || cx->regs->pc[oplen] == JSOP_IFNE)
17253 testPC = cx->regs->pc + oplen;
17256 /* Check if we're exiting the loop being profiled. */
17257 JSOp testOp = js_GetOpcode(cx, script, testPC);
17258 if (testOp == JSOP_IFEQ || testOp == JSOP_IFNE || testOp == JSOP_GOTO
17259 || testOp == JSOP_AND || testOp == JSOP_OR)
17261 ptrdiff_t len = GET_JUMP_OFFSET(testPC);
17262 if (testPC + len == top && (op == JSOP_LT || op == JSOP_LE)) {
17263 StackValue v = stackAt(-1);
17264 if (v.hasValue && v.value < 8)
17265 shortLoop = true;
17268 if (testPC + len == top && (op == JSOP_LT || op == JSOP_LE)
17269 && cx->regs->sp[-2].isInt32() && cx->regs->sp[-2].toInt32() < 16)
17271 maybeShortLoop = true;
17274 if (testOp != JSOP_GOTO && len > 0) {
17275 bool isConst;
17276 if (testOp == JSOP_IFEQ || testOp == JSOP_IFNE)
17277 isConst = stackAt(-1).isConst && stackAt(-2).isConst;
17278 else
17279 isConst = stackAt(-1).isConst;
17281 increment(OP_FWDJUMP);
17282 if (loopStackDepth == 0 && !isConst)
17283 branchMultiplier *= 2;
17287 if (op == JSOP_INT8) {
17288 stackPush(StackValue(true, GET_INT8(cx->regs->pc)));
17289 } else if (op == JSOP_STRING) {
17290 stackPush(StackValue(true));
17291 } else if (op == JSOP_TYPEOF || op == JSOP_TYPEOFEXPR) {
17292 stackPush(StackValue(true));
17293 } else if (op == JSOP_EQ || op == JSOP_NE) {
17294 StackValue v1 = stackAt(-1);
17295 StackValue v2 = stackAt(-2);
17296 stackPush(StackValue(v1.isConst && v2.isConst));
17297 } else if (op == JSOP_AND) {
17298 bool b = !!js_ValueToBoolean(cx->regs->sp[-1]);
17299 StackValue v = stackAt(-1);
17300 if (b)
17301 stackPop();
17302 } else {
17303 stackClear();
17306 return ProfContinue;
17310 * Returns true if the loop would probably take a long time to
17311 * compile.
17313 bool
17314 LoopProfile::isCompilationExpensive(JSContext *cx, uintN depth)
17316 if (depth == 0)
17317 return true;
17319 if (!profiled)
17320 return false;
17322 /* Too many ops to compile? */
17323 if (numSelfOps == MAX_PROFILE_OPS)
17324 return true;
17326 /* Is the code too branchy? */
17327 if (numSelfOpsMult > numSelfOps*100000)
17328 return true;
17330 /* Ensure that inner loops aren't too expensive. */
17331 for (uintN i=0; i<numInnerLoops; i++) {
17332 LoopProfile *prof = LookupLoopProfile(traceMonitor, innerLoops[i].top);
17333 if (!prof || prof->isCompilationExpensive(cx, depth-1))
17334 return true;
17337 return false;
17341 * This function recognizes loops that are short and that contain
17342 * jumps. The tracer does badly with these loops because it
17343 * needs to do a lot of side exits, which are somewhat
17344 * expensive.
17346 bool
17347 LoopProfile::isCompilationUnprofitable(JSContext *cx, uintN goodOps)
17349 if (!profiled)
17350 return false;
17352 if (goodOps <= 22 && allOps[OP_FWDJUMP])
17353 return true;
17355 /* Ensure that inner loops aren't fleeting. */
17356 for (uintN i=0; i<numInnerLoops; i++) {
17357 LoopProfile *prof = LookupLoopProfile(traceMonitor, innerLoops[i].top);
17358 if (!prof || prof->unprofitable)
17359 return true;
17362 return false;
17365 /* After profiling is done, this method decides whether to trace the loop. */
17366 void
17367 LoopProfile::decide(JSContext *cx)
17369 bool wasUndecided = undecided;
17370 bool wasTraceOK = traceOK;
17372 profiled = true;
17373 traceOK = false;
17374 undecided = false;
17376 #ifdef DEBUG
17377 uintN line = js_PCToLineNumber(cx, entryScript, top);
17379 debug_only_printf(LC_TMProfiler, "LOOP %s:%d\n", entryScript->filename, line);
17381 for (uintN i=0; i<numInnerLoops; i++) {
17382 InnerLoop &loop = innerLoops[i];
17383 if (LoopProfile *prof = LookupLoopProfile(traceMonitor, loop.top)) {
17384 uintN line = js_PCToLineNumber(cx, prof->entryScript, prof->top);
17385 debug_only_printf(LC_TMProfiler, "NESTED %s:%d (%d iters)\n",
17386 prof->entryScript->filename, line, loop.iters);
17389 debug_only_printf(LC_TMProfiler, "FEATURE float %d\n", allOps[OP_FLOAT]);
17390 debug_only_printf(LC_TMProfiler, "FEATURE int %d\n", allOps[OP_INT]);
17391 debug_only_printf(LC_TMProfiler, "FEATURE bit %d\n", allOps[OP_BIT]);
17392 debug_only_printf(LC_TMProfiler, "FEATURE equality %d\n", allOps[OP_EQ]);
17393 debug_only_printf(LC_TMProfiler, "FEATURE eval %d\n", allOps[OP_EVAL]);
17394 debug_only_printf(LC_TMProfiler, "FEATURE new %d\n", allOps[OP_NEW]);
17395 debug_only_printf(LC_TMProfiler, "FEATURE call %d\n", allOps[OP_CALL]);
17396 debug_only_printf(LC_TMProfiler, "FEATURE arrayread %d\n", allOps[OP_ARRAY_READ]);
17397 debug_only_printf(LC_TMProfiler, "FEATURE typedarray %d\n", allOps[OP_TYPED_ARRAY]);
17398 debug_only_printf(LC_TMProfiler, "FEATURE fwdjump %d\n", allOps[OP_FWDJUMP]);
17399 debug_only_printf(LC_TMProfiler, "FEATURE recursive %d\n", allOps[OP_RECURSIVE]);
17400 debug_only_printf(LC_TMProfiler, "FEATURE shortLoop %d\n", shortLoop);
17401 debug_only_printf(LC_TMProfiler, "FEATURE maybeShortLoop %d\n", maybeShortLoop);
17402 debug_only_printf(LC_TMProfiler, "FEATURE numAllOps %d\n", numAllOps);
17403 debug_only_printf(LC_TMProfiler, "FEATURE selfOps %d\n", numSelfOps);
17404 debug_only_printf(LC_TMProfiler, "FEATURE selfOpsMult %g\n", numSelfOpsMult);
17405 #endif
17407 if (count(OP_RECURSIVE)) {
17408 debug_only_print0(LC_TMProfiler, "NOTRACE: recursive\n");
17409 } else if (count(OP_EVAL)) {
17410 debug_only_print0(LC_TMProfiler, "NOTRACE: eval\n");
17411 } else if (numInnerLoops > 7) {
17412 debug_only_print0(LC_TMProfiler, "NOTRACE: >3 inner loops\n");
17413 } else if (shortLoop) {
17414 debug_only_print0(LC_TMProfiler, "NOTRACE: short\n");
17415 } else if (isCompilationExpensive(cx, 4)) {
17416 debug_only_print0(LC_TMProfiler, "NOTRACE: expensive\n");
17417 } else if (maybeShortLoop && numInnerLoops < 2) {
17418 if (wasUndecided) {
17419 debug_only_print0(LC_TMProfiler, "NOTRACE: maybe short\n");
17420 } else {
17421 debug_only_print0(LC_TMProfiler, "UNDECIDED: maybe short\n");
17422 undecided = true; /* Profile the loop again to see if it's still short. */
17424 } else {
17425 uintN goodOps = 0;
17427 /* The tracer handles these ops well because of type specialization. */
17428 goodOps += count(OP_FLOAT)*10 + count(OP_BIT)*11 + count(OP_INT)*5 + count(OP_EQ)*15;
17430 /* The tracer handles these ops well because of inlining. */
17431 goodOps += (count(OP_CALL) + count(OP_NEW))*20;
17433 /* The tracer specialized typed array access. */
17434 goodOps += count(OP_TYPED_ARRAY)*10;
17436 /* The methodjit is faster at array writes, but the tracer is faster for reads. */
17437 goodOps += count(OP_ARRAY_READ)*15;
17439 debug_only_printf(LC_TMProfiler, "FEATURE goodOps %u\n", goodOps);
17441 unprofitable = isCompilationUnprofitable(cx, goodOps);
17442 if (unprofitable)
17443 debug_only_print0(LC_TMProfiler, "NOTRACE: unprofitable\n");
17444 else if (goodOps >= numAllOps)
17445 traceOK = true;
17448 debug_only_printf(LC_TMProfiler, "TRACE %s:%d = %d\n", entryScript->filename, line, traceOK);
17450 if (traceOK) {
17451 /* Unblacklist the inner loops. */
17452 for (uintN i=0; i<numInnerLoops; i++) {
17453 InnerLoop &loop = innerLoops[i];
17454 LoopProfile *prof = LookupLoopProfile(traceMonitor, loop.top);
17455 if (prof) {
17457 * Note that execOK for the inner loop is left unchanged. So even
17458 * if we trace the inner loop, we will never call that trace
17459 * on its own. We'll only call it from this trace.
17461 prof->traceOK = true;
17462 if (IsBlacklisted(loop.top)) {
17463 debug_only_printf(LC_TMProfiler, "Unblacklisting at %d\n",
17464 js_PCToLineNumber(cx, prof->entryScript, loop.top));
17465 Unblacklist(prof->entryScript, loop.top);
17471 execOK = traceOK;
17472 traceOK = wasTraceOK || traceOK;
17474 if (!traceOK && !undecided) {
17475 debug_only_printf(LC_TMProfiler, "Blacklisting at %d\n", line);
17476 Blacklist(top);
17479 debug_only_print0(LC_TMProfiler, "\n");
17482 JS_REQUIRES_STACK MonitorResult
17483 MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, JSInterpMode interpMode)
17485 TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
17486 if (interpMode == JSINTERP_PROFILE && tm->profile)
17487 return tm->profile->profileLoopEdge(cx, inlineCallCount);
17488 else
17489 return RecordLoopEdge(cx, tm, inlineCallCount);
17492 void
17493 AbortProfiling(JSContext *cx)
17495 JS_ASSERT(TRACE_PROFILER(cx));
17496 LoopProfile *prof = TRACE_PROFILER(cx);
17498 debug_only_print0(LC_TMProfiler, "Profiling complete (aborted)\n");
17499 prof->profiled = true;
17500 prof->traceOK = false;
17501 prof->execOK = false;
17502 prof->stopProfiling(cx);
17505 #else /* JS_METHODJIT */
17507 JS_REQUIRES_STACK MonitorResult
17508 MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, JSInterpMode interpMode)
17510 TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
17511 return RecordLoopEdge(cx, tm, inlineCallCount);
17514 #endif /* JS_METHODJIT */
17516 uint32
17517 GetHotloop(JSContext *cx)
17519 #ifdef JS_METHODJIT
17520 if (cx->profilingEnabled)
17521 return PROFILE_HOTLOOP;
17522 else
17523 #endif
17524 return 1;
17527 } /* namespace js */