1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
43 #include "jsbit.h" // low-level (NSPR-based) headers next
45 #include <math.h> // standard headers next
47 #if defined(_MSC_VER) || defined(__MINGW32__)
50 #define alloca _alloca
58 #include "nanojit/nanojit.h"
59 #include "jsapi.h" // higher-level library and API headers
63 #include "jscompartment.h"
76 #include "jsstaticcheck.h"
80 #include "jstypedarray.h"
82 #include "jsatominlines.h"
83 #include "jscntxtinlines.h"
84 #include "jsfuninlines.h"
85 #include "jsinterpinlines.h"
86 #include "jspropertycacheinlines.h"
87 #include "jsobjinlines.h"
88 #include "jsscopeinlines.h"
89 #include "jsscriptinlines.h"
90 #include "jscntxtinlines.h"
91 #include "jsopcodeinlines.h"
94 #include "methodjit/MethodJIT.h"
97 #include "jsautooplen.h" // generated headers last
98 #include "imacros.c.out"
100 #if defined(NANOJIT_ARM) && defined(__GNUC__) && defined(AVMPLUS_LINUX)
103 #include <sys/types.h>
104 #include <sys/stat.h>
105 #include <sys/mman.h>
114 getExitName(ExitType type
)
116 static const char* exitNames
[] =
118 #define MAKE_EXIT_STRING(x) #x,
119 JS_TM_EXITCODES(MAKE_EXIT_STRING
)
120 #undef MAKE_EXIT_STRING
124 JS_ASSERT(type
< TOTAL_EXIT_TYPES
);
126 return exitNames
[type
];
133 using namespace js::gc
;
134 using namespace js::tjit
;
137 * This macro is just like JS_NOT_REACHED but it exists in non-debug builds
138 * too. Its presence indicates shortcomings in jstracer's handling of some
140 * - OOM failures in constructors, which lack a return value to pass back a
141 * failure code (though it can and should be done indirectly).
142 * - OOM failures in the "infallible" allocators used for Nanojit.
144 * FIXME: bug 624590 is open to fix these problems.
146 #define OUT_OF_MEMORY_ABORT(msg) JS_Assert(msg, __FILE__, __LINE__);
148 /* Implement embedder-specific nanojit members. */
151 * Nanojit requires infallible allocations most of the time. We satisfy this
152 * by reserving some space in each allocator which is used as a fallback if
153 * js_calloc() fails. Ideallly this reserve space should be big enough to
154 * allow for all infallible requests made to the allocator until the next OOM
155 * check occurs, but it turns out that's impossible to guarantee (though it
156 * should be unlikely). So we abort if the reserve runs out; this is better
157 * than allowing memory errors to occur.
159 * The space calculations are as follows... between OOM checks, each
160 * VMAllocator can do (ie. has been seen to do) the following maximum
161 * allocations on 64-bits:
163 * - dataAlloc: 31 minimum-sized chunks (MIN_CHUNK_SZB) in assm->compile()
164 * (though arbitrarily more could occur due to LabelStateMap additions done
165 * when handling labels): 62,248 bytes. This one is the most likely to
168 * - traceAlloc: 1 minimum-sized chunk: 2,008 bytes.
170 * - tempAlloc: 1 LIR code chunk (CHUNK_SZB) and 5 minimum-sized chunks for
171 * sundry small allocations: 18,048 bytes.
173 * The reserve sizes are chosen by exceeding this by a reasonable amount.
174 * Reserves for 32-bits are slightly more than half, because most of the
175 * allocated space is used to hold pointers.
177 * FIXME: Bug 624590 is open to get rid of all this.
179 static const size_t DataReserveSize
= 12500 * sizeof(uintptr_t);
180 static const size_t TraceReserveSize
= 5000 * sizeof(uintptr_t);
181 static const size_t TempReserveSize
= 1000 * sizeof(uintptr_t);
184 nanojit::Allocator::allocChunk(size_t nbytes
, bool fallible
)
186 VMAllocator
*vma
= (VMAllocator
*)this;
188 * Nb: it's conceivable that request 1 might fail (in which case
189 * mOutOfMemory will be set) and then request 2 succeeds. The subsequent
190 * OOM check will still fail, which is what we want, and the success of
191 * request 2 makes it less likely that the reserve space will overflow.
193 void *p
= js_calloc(nbytes
);
195 vma
->mSize
+= nbytes
;
197 vma
->mOutOfMemory
= true;
199 p
= (void *)vma
->mReserveCurr
;
200 vma
->mReserveCurr
+= nbytes
;
201 if (vma
->mReserveCurr
> vma
->mReserveLimit
)
202 OUT_OF_MEMORY_ABORT("nanojit::Allocator::allocChunk: out of memory");
203 memset(p
, 0, nbytes
);
204 vma
->mSize
+= nbytes
;
211 nanojit::Allocator::freeChunk(void *p
) {
212 VMAllocator
*vma
= (VMAllocator
*)this;
213 if (p
< vma
->mReserve
|| uintptr_t(p
) >= vma
->mReserveLimit
)
218 nanojit::Allocator::postReset() {
219 VMAllocator
*vma
= (VMAllocator
*)this;
220 vma
->mOutOfMemory
= false;
222 vma
->mReserveCurr
= uintptr_t(vma
->mReserve
);
226 StackFilter::getTop(LIns
* guard
)
228 VMSideExit
* e
= (VMSideExit
*)guard
->record()->exit
;
232 #if defined NJ_VERBOSE
234 formatGuardExit(InsBuf
*buf
, LIns
*ins
)
236 VMSideExit
*x
= (VMSideExit
*)ins
->record()->exit
;
238 if (LogController
.lcbits
& LC_FragProfile
)
239 VMPI_snprintf(b1
.buf
, b1
.len
, " (GuardID=%03d)", ins
->record()->profGuardID
);
242 VMPI_snprintf(buf
->buf
, buf
->len
,
243 " -> exit=%p pc=%p imacpc=%p sp%+ld rp%+ld %s%s",
249 getExitName(x
->exitType
),
254 LInsPrinter::formatGuard(InsBuf
*buf
, LIns
*ins
)
258 formatGuardExit(&b3
, ins
);
259 VMPI_snprintf(buf
->buf
, buf
->len
,
262 lirNames
[ins
->opcode()],
263 ins
->oprnd1() ? formatRef(&b2
, ins
->oprnd1()) : "",
268 LInsPrinter::formatGuardXov(InsBuf
*buf
, LIns
*ins
)
272 formatGuardExit(&b4
, ins
);
273 VMPI_snprintf(buf
->buf
, buf
->len
,
276 lirNames
[ins
->opcode()],
277 formatRef(&b2
, ins
->oprnd1()),
278 formatRef(&b3
, ins
->oprnd2()),
283 nanojit::LInsPrinter::accNames
[] = {
284 "state", // (1 << 0) == ACCSET_STATE
285 "sp", // (1 << 1) == ACCSET_STACK
286 "rp", // (1 << 2) == ACCSET_RSTACK
287 "cx", // (1 << 3) == ACCSET_CX
288 "tm", // (1 << 4) == ACCSET_TM
289 "eos", // (1 << 5) == ACCSET_EOS
290 "alloc", // (1 << 6) == ACCSET_ALLOC
291 "regs", // (1 << 7) == ACCSET_FRAMEREGS
292 "sf", // (1 << 8) == ACCSET_STACKFRAME
293 "rt", // (1 << 9) == ACCSET_RUNTIME
295 "objclasp", // (1 << 10) == ACCSET_OBJ_CLASP
296 "objflags", // (1 << 11) == ACCSET_OBJ_FLAGS
297 "objshape", // (1 << 12) == ACCSET_OBJ_SHAPE
298 "objproto", // (1 << 13) == ACCSET_OBJ_PROTO
299 "objparent", // (1 << 14) == ACCSET_OBJ_PARENT
300 "objprivate", // (1 << 15) == ACCSET_OBJ_PRIVATE
301 "objcapacity", // (1 << 16) == ACCSET_OBJ_CAPACITY
302 "objslots", // (1 << 17) == ACCSET_OBJ_SLOTS
304 "slots", // (1 << 18) == ACCSET_SLOTS
305 "tarray", // (1 << 19) == ACCSET_TARRAY
306 "tdata", // (1 << 20) == ACCSET_TARRAY_DATA
307 "iter", // (1 << 21) == ACCSET_ITER
308 "iterprops", // (1 << 22) == ACCSET_ITER_PROPS
309 "str", // (1 << 23) == ACCSET_STRING
310 "strmchars", // (1 << 24) == ACCSET_STRING_MCHARS
311 "typemap", // (1 << 25) == ACCSET_TYPEMAP
312 "fcslots", // (1 << 26) == ACCSET_FCSLOTS
313 "argsdata", // (1 << 27) == ACCSET_ARGS_DATA
315 "?!" // this entry should never be used, have it just in case
318 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(nanojit::LInsPrinter::accNames
) == TM_NUM_USED_ACCS
+ 1);
321 } /* namespace nanojit */
323 JS_DEFINE_CALLINFO_2(extern, STRING
, js_IntToString
, CONTEXT
, INT32
, 1, nanojit::ACCSET_NONE
)
327 using namespace nanojit
;
329 #if JS_HAS_XML_SUPPORT
330 #define RETURN_VALUE_IF_XML(val, ret) \
332 if (!val.isPrimitive() && val.toObject().isXML()) \
333 RETURN_VALUE("xml detected", ret); \
336 #define RETURN_IF_XML(val, ret) ((void) 0)
339 #define RETURN_IF_XML_A(val) RETURN_VALUE_IF_XML(val, ARECORD_STOP)
340 #define RETURN_IF_XML(val) RETURN_VALUE_IF_XML(val, RECORD_STOP)
342 JS_STATIC_ASSERT(sizeof(JSValueType
) == 1);
343 JS_STATIC_ASSERT(offsetof(TraceNativeStorage
, stack_global_buf
) % 16 == 0);
345 /* Map to translate a type tag into a printable representation. */
348 TypeToChar(JSValueType type
)
351 case JSVAL_TYPE_DOUBLE
: return 'D';
352 case JSVAL_TYPE_INT32
: return 'I';
353 case JSVAL_TYPE_STRING
: return 'S';
354 case JSVAL_TYPE_OBJECT
: return '!';
355 case JSVAL_TYPE_BOOLEAN
: return 'B';
356 case JSVAL_TYPE_NULL
: return 'N';
357 case JSVAL_TYPE_UNDEFINED
: return 'U';
358 case JSVAL_TYPE_MAGIC
: return 'M';
359 case JSVAL_TYPE_FUNOBJ
: return 'F';
360 case JSVAL_TYPE_NONFUNOBJ
: return 'O';
361 case JSVAL_TYPE_BOXED
: return '#';
362 case JSVAL_TYPE_STRORNULL
: return 's';
363 case JSVAL_TYPE_OBJORNULL
: return 'o';
369 ValueToTypeChar(const Value
&v
)
371 if (v
.isInt32()) return 'I';
372 if (v
.isDouble()) return 'D';
373 if (v
.isString()) return 'S';
374 if (v
.isObject()) return v
.toObject().isFunction() ? 'F' : 'O';
375 if (v
.isBoolean()) return 'B';
376 if (v
.isNull()) return 'N';
377 if (v
.isUndefined()) return 'U';
378 if (v
.isMagic()) return 'M';
384 /* Blacklist parameters. */
387 * Number of iterations of a loop where we start tracing. That is, we don't
388 * start tracing until the beginning of the HOTLOOP-th iteration.
392 /* Attempt recording this many times before blacklisting permanently. */
393 #define BL_ATTEMPTS 2
395 /* Skip this many hits before attempting recording again, after an aborted attempt. */
396 #define BL_BACKOFF 32
399 * If, after running a trace CHECK_LOOP_ITERS times, it hasn't done MIN_LOOP_ITERS
400 * iterations, we blacklist it.
402 #define MIN_LOOP_ITERS 200
403 #define LOOP_CHECK_ITERS 10
406 #define LOOP_COUNT_MAX 100000000
408 #define LOOP_COUNT_MAX MIN_LOOP_ITERS
411 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
414 /* Number of times we try to extend the tree along a side exit. */
417 /* Maximum number of peer trees allowed. */
420 /* Max call depths for inlining. */
421 #define MAX_CALLDEPTH 10
423 /* Max number of slots in a table-switch. */
424 #define MAX_TABLE_SWITCH 256
426 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
427 #define MAX_INTERP_STACK_BYTES \
428 (MAX_NATIVE_STACK_SLOTS * sizeof(Value) + \
429 MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) + \
430 sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
432 /* Max number of branches per tree. */
433 #define MAX_BRANCHES 32
435 #define CHECK_STATUS(expr) \
437 RecordingStatus _status = (expr); \
438 if (_status != RECORD_CONTINUE) \
442 #define CHECK_STATUS_A(expr) \
444 AbortableRecordingStatus _status = InjectStatus((expr)); \
445 if (_status != ARECORD_CONTINUE) \
450 #define RETURN_VALUE(msg, value) \
452 debug_only_printf(LC_TMAbort, "trace stopped: %d: %s\n", __LINE__, (msg)); \
456 #define RETURN_VALUE(msg, value) return (value)
459 #define RETURN_STOP(msg) RETURN_VALUE(msg, RECORD_STOP)
460 #define RETURN_STOP_A(msg) RETURN_VALUE(msg, ARECORD_STOP)
461 #define RETURN_ERROR(msg) RETURN_VALUE(msg, RECORD_ERROR)
462 #define RETURN_ERROR_A(msg) RETURN_VALUE(msg, ARECORD_ERROR)
466 #define JITSTAT(x) uint64 x;
467 #include "jitstats.tbl"
469 } jitstats
= { 0LL, };
471 JS_STATIC_ASSERT(sizeof(jitstats
) % sizeof(uint64
) == 0);
474 #define JITSTAT(x) STAT ## x ## ID,
475 #include "jitstats.tbl"
481 jitstats_getOnTrace(JSContext
*cx
, JSObject
*obj
, jsid id
, jsval
*vp
)
483 *vp
= BOOLEAN_TO_JSVAL(JS_ON_TRACE(cx
));
487 static JSPropertySpec jitstats_props
[] = {
488 #define JITSTAT(x) { #x, STAT ## x ## ID, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT },
489 #include "jitstats.tbl"
491 { "onTrace", 0, JSPROP_ENUMERATE
| JSPROP_READONLY
| JSPROP_PERMANENT
, jitstats_getOnTrace
, NULL
},
496 jitstats_getProperty(JSContext
*cx
, JSObject
*obj
, jsid id
, jsval
*vp
)
500 if (JSID_IS_STRING(id
)) {
501 JSAtom
* str
= JSID_TO_ATOM(id
);
502 if (StringEqualsAscii(str
, "HOTLOOP")) {
503 *vp
= INT_TO_JSVAL(HOTLOOP
);
508 if (StringEqualsAscii(str
, "profiler")) {
509 *vp
= BOOLEAN_TO_JSVAL(cx
->profilingEnabled
);
516 index
= JSID_TO_INT(id
);
520 #define JITSTAT(x) case STAT ## x ## ID: result = jitstats.x; break;
521 #include "jitstats.tbl"
528 if (result
< JSVAL_INT_MAX
) {
529 *vp
= INT_TO_JSVAL(jsint(result
));
533 JS_snprintf(retstr
, sizeof retstr
, "%llu", result
);
534 *vp
= STRING_TO_JSVAL(JS_NewStringCopyZ(cx
, retstr
));
538 JSClass jitstats_class
= {
541 JS_PropertyStub
, JS_PropertyStub
,
542 jitstats_getProperty
, JS_PropertyStub
,
543 JS_EnumerateStub
, JS_ResolveStub
,
544 JS_ConvertStub
, NULL
,
545 JSCLASS_NO_OPTIONAL_MEMBERS
549 InitJITStatsClass(JSContext
*cx
, JSObject
*glob
)
551 JS_InitClass(cx
, glob
, NULL
, &jitstats_class
, NULL
, 0, jitstats_props
, NULL
, NULL
, NULL
);
554 #define AUDIT(x) (jitstats.x++)
556 #define AUDIT(x) ((void)0)
557 #endif /* JS_JIT_SPEW */
559 static avmplus::AvmCore s_core
= avmplus::AvmCore();
560 static avmplus::AvmCore
* core
= &s_core
;
564 DumpPeerStability(TraceMonitor
* tm
, const void* ip
, JSObject
* globalObj
, uint32 globalShape
, uint32 argc
);
568 * We really need a better way to configure the JIT. Shaver, where is
569 * my fancy JIT object?
571 * NB: this is raced on, if jstracer.cpp should ever be running MT.
572 * I think it's harmless tho.
574 static bool did_we_check_processor_features
= false;
576 /* ------ Debug logging control ------ */
579 * All the logging control stuff lives in here. It is shared between
580 * all threads, but I think that's OK.
582 LogControl LogController
;
587 * NB: this is raced on too, if jstracer.cpp should ever be running MT.
590 static bool did_we_set_up_debug_logging
= false;
593 InitJITLogController()
598 LogController
.lcbits
= 0;
600 tm
= getenv("TRACEMONKEY");
604 "The environment variable $TRACEMONKEY has been replaced by $TMFLAGS.\n"
605 "Try 'TMFLAGS=help js -j' for a list of options.\n"
610 tmf
= getenv("TMFLAGS");
613 /* Using strstr() is really a cheap hack as far as flag decoding goes. */
614 if (strstr(tmf
, "help")) {
617 "usage: TMFLAGS=option,option,option,... where options can be:\n"
619 " help show this message\n"
620 " ------ options for jstracer & jsregexp ------\n"
621 " minimal ultra-minimalist output; try this first\n"
622 " full everything except 'treevis' and 'fragprofile'\n"
623 " tracer tracer lifetime (FIXME:better description)\n"
624 " recorder trace recording stuff (FIXME:better description)\n"
625 " abort show trace recording aborts\n"
626 " stats show trace recording stats\n"
627 " regexp show compilation & entry for regexps\n"
628 " profiler show loop profiles as they are profiled\n"
629 " treevis spew that tracevis/tree.py can parse\n"
630 " ------ options for Nanojit ------\n"
631 " fragprofile count entries and exits for each fragment\n"
632 " liveness show LIR liveness at start of reader pipeline\n"
633 " readlir show LIR as it enters the reader pipeline\n"
634 " aftersf show LIR after StackFilter\n"
635 " afterdce show LIR after dead code elimination\n"
636 " native show native code (interleaved with 'afterdce')\n"
637 " nativebytes show native code bytes in 'native' output\n"
638 " regalloc show regalloc state in 'native' output\n"
639 " activation show activation state in 'native' output\n"
648 /* flags for jstracer.cpp */
649 if (strstr(tmf
, "minimal") || strstr(tmf
, "full")) bits
|= LC_TMMinimal
;
650 if (strstr(tmf
, "tracer") || strstr(tmf
, "full")) bits
|= LC_TMTracer
;
651 if (strstr(tmf
, "recorder") || strstr(tmf
, "full")) bits
|= LC_TMRecorder
;
652 if (strstr(tmf
, "abort") || strstr(tmf
, "full")) bits
|= LC_TMAbort
;
653 if (strstr(tmf
, "stats") || strstr(tmf
, "full")) bits
|= LC_TMStats
;
654 if (strstr(tmf
, "profiler") || strstr(tmf
, "full")) bits
|= LC_TMProfiler
;
655 if (strstr(tmf
, "treevis")) bits
|= LC_TMTreeVis
;
657 /* flags for nanojit */
658 if (strstr(tmf
, "fragprofile")) bits
|= LC_FragProfile
;
659 if (strstr(tmf
, "liveness") || strstr(tmf
, "full")) bits
|= LC_Liveness
;
660 if (strstr(tmf
, "readlir") || strstr(tmf
, "full")) bits
|= LC_ReadLIR
;
661 if (strstr(tmf
, "aftersf") || strstr(tmf
, "full")) bits
|= LC_AfterSF
;
662 if (strstr(tmf
, "afterdce") || strstr(tmf
, "full")) bits
|= LC_AfterDCE
;
663 if (strstr(tmf
, "native") || strstr(tmf
, "full")) bits
|= LC_Native
;
664 if (strstr(tmf
, "nativebytes")|| strstr(tmf
, "full")) bits
|= LC_Bytes
;
665 if (strstr(tmf
, "regalloc") || strstr(tmf
, "full")) bits
|= LC_RegAlloc
;
666 if (strstr(tmf
, "activation") || strstr(tmf
, "full")) bits
|= LC_Activation
;
668 LogController
.lcbits
= bits
;
674 /* ------------------ Frag-level profiling support ------------------ */
679 * All the allocations done by this profile data-collection and
680 * display machinery, are done in TraceMonitor::profAlloc. That is
681 * emptied out at the end of FinishJIT. It has a lifetime from
682 * InitJIT to FinishJIT, which exactly matches the span
683 * js_FragProfiling_init to js_FragProfiling_showResults.
687 Seq
<T
>* reverseInPlace(Seq
<T
>* seq
)
692 Seq
<T
>* next
= curr
->tail
;
700 // The number of top blocks to show in the profile
701 #define N_TOP_BLOCKS 50
703 // Contains profile info for a single guard
705 uint32_t guardID
; // identifying number
706 uint32_t count
; // count.
710 uint32_t count
; // entry count for this Fragment
711 uint32_t nStaticExits
; // statically: the number of exits
712 size_t nCodeBytes
; // statically: the number of insn bytes in the main fragment
713 size_t nExitBytes
; // statically: the number of insn bytes in the exit paths
714 Seq
<GuardPI
>* guards
; // guards, each with its own count
715 uint32_t largestGuardID
; // that exists in .guards
719 FragProfiling_FragFinalizer(Fragment
* f
, TraceMonitor
* tm
)
721 // Recover profiling data from 'f', which is logically at the end
722 // of its useful lifetime.
723 if (!(LogController
.lcbits
& LC_FragProfile
))
727 // Valid profFragIDs start at 1
728 NanoAssert(f
->profFragID
>= 1);
729 // Should be called exactly once per Fragment. This will assert if
730 // you issue the same FragID to more than one Fragment.
731 NanoAssert(!tm
->profTab
->containsKey(f
->profFragID
));
733 FragPI pi
= { f
->profCount
,
739 // Begin sanity check on the guards
740 SeqBuilder
<GuardPI
> guardsBuilder(*tm
->profAlloc
);
743 uint32_t sumOfDynExits
= 0;
744 for (gr
= f
->guardsForFrag
; gr
; gr
= gr
->nextInFrag
) {
746 // Also copy the data into our auxiliary structure.
747 // f->guardsForFrag is in reverse order, and so this
748 // copy preserves that ordering (->add adds at end).
749 // Valid profGuardIDs start at 1.
750 NanoAssert(gr
->profGuardID
> 0);
751 sumOfDynExits
+= gr
->profCount
;
752 GuardPI gpi
= { gr
->profGuardID
, gr
->profCount
};
753 guardsBuilder
.add(gpi
);
754 if (gr
->profGuardID
> pi
.largestGuardID
)
755 pi
.largestGuardID
= gr
->profGuardID
;
757 pi
.guards
= guardsBuilder
.get();
758 // And put the guard list in forwards order
759 pi
.guards
= reverseInPlace(pi
.guards
);
761 // Why is this so? Because nGs is the number of guards
762 // at the time the LIR was generated, whereas f->nStaticExits
763 // is the number of them observed by the time it makes it
764 // through to the assembler. It can be the case that LIR
765 // optimisation removes redundant guards; hence we expect
766 // nGs to always be the same or higher.
767 NanoAssert(nGs
>= f
->nStaticExits
);
769 // Also we can assert that the sum of the exit counts
770 // can't exceed the entry count. It'd be nice to assert that
771 // they are exactly equal, but we can't because we don't know
772 // how many times we got to the end of the trace.
773 NanoAssert(f
->profCount
>= sumOfDynExits
);
775 // End sanity check on guards
777 tm
->profTab
->put(f
->profFragID
, pi
);
781 FragProfiling_showResults(TraceMonitor
* tm
)
783 uint32_t topFragID
[N_TOP_BLOCKS
];
784 FragPI topPI
[N_TOP_BLOCKS
];
785 uint64_t totCount
= 0, cumulCount
;
787 size_t totCodeB
= 0, totExitB
= 0;
788 PodArrayZero(topFragID
);
790 FragStatsMap::Iter
iter(*tm
->profTab
);
791 while (iter
.next()) {
792 uint32_t fragID
= iter
.key();
793 FragPI pi
= iter
.value();
794 uint32_t count
= pi
.count
;
795 totCount
+= (uint64_t)count
;
796 /* Find the rank for this entry, in tops */
797 int r
= N_TOP_BLOCKS
-1;
801 if (topFragID
[r
] == 0) {
805 if (count
> topPI
[r
].count
) {
812 NanoAssert(r
>= 0 && r
<= N_TOP_BLOCKS
);
813 /* This entry should be placed at topPI[r], and entries
814 at higher numbered slots moved up one. */
815 if (r
< N_TOP_BLOCKS
) {
816 for (int s
= N_TOP_BLOCKS
-1; s
> r
; s
--) {
817 topFragID
[s
] = topFragID
[s
-1];
818 topPI
[s
] = topPI
[s
-1];
820 topFragID
[r
] = fragID
;
825 LogController
.printf(
826 "\n----------------- Per-fragment execution counts ------------------\n");
827 LogController
.printf(
828 "\nTotal count = %llu\n\n", (unsigned long long int)totCount
);
830 LogController
.printf(
831 " Entry counts Entry counts ----- Static -----\n");
832 LogController
.printf(
833 " ------Self------ ----Cumulative--- Exits Cbytes Xbytes FragID\n");
834 LogController
.printf("\n");
837 totCount
= 1; /* avoid division by zero */
840 for (r
= 0; r
< N_TOP_BLOCKS
; r
++) {
841 if (topFragID
[r
] == 0)
843 cumulCount
+= (uint64_t)topPI
[r
].count
;
844 LogController
.printf("%3d: %5.2f%% %9u %6.2f%% %9llu"
845 " %3d %5u %5u %06u\n",
847 (double)topPI
[r
].count
* 100.0 / (double)totCount
,
849 (double)cumulCount
* 100.0 / (double)totCount
,
850 (unsigned long long int)cumulCount
,
851 topPI
[r
].nStaticExits
,
852 (unsigned int)topPI
[r
].nCodeBytes
,
853 (unsigned int)topPI
[r
].nExitBytes
,
855 totSE
+= (uint32_t)topPI
[r
].nStaticExits
;
856 totCodeB
+= topPI
[r
].nCodeBytes
;
857 totExitB
+= topPI
[r
].nExitBytes
;
859 LogController
.printf("\nTotal displayed code bytes = %u, "
861 "Total displayed static exits = %d\n\n",
862 (unsigned int)totCodeB
, (unsigned int)totExitB
, totSE
);
864 LogController
.printf("Analysis by exit counts\n\n");
866 for (r
= 0; r
< N_TOP_BLOCKS
; r
++) {
867 if (topFragID
[r
] == 0)
869 LogController
.printf("FragID=%06u, total count %u:\n", topFragID
[r
],
871 uint32_t madeItToEnd
= topPI
[r
].count
;
872 uint32_t totThisFrag
= topPI
[r
].count
;
873 if (totThisFrag
== 0)
876 // visit the guards, in forward order
877 for (Seq
<GuardPI
>* guards
= topPI
[r
].guards
; guards
; guards
= guards
->tail
) {
878 gpi
= (*guards
).head
;
881 madeItToEnd
-= gpi
.count
;
882 LogController
.printf(" GuardID=%03u %7u (%5.2f%%)\n",
883 gpi
.guardID
, gpi
.count
,
884 100.0 * (double)gpi
.count
/ (double)totThisFrag
);
886 LogController
.printf(" Looped (%03u) %7u (%5.2f%%)\n",
887 topPI
[r
].largestGuardID
+1,
889 100.0 * (double)madeItToEnd
/ (double)totThisFrag
);
890 NanoAssert(madeItToEnd
<= topPI
[r
].count
); // else unsigned underflow
891 LogController
.printf("\n");
899 /* ----------------------------------------------------------------- */
902 static JSBool FASTCALL
903 PrintOnTrace(char* format
, uint32 argc
, double *argv
)
916 #define GET_ARG() JS_BEGIN_MACRO \
917 if (argi >= argc) { \
918 fprintf(out, "[too few args for format]"); \
921 u.d = argv[argi++]; \
927 for (char *p
= format
; *p
; ++p
) {
934 fprintf(out
, "[trailing %%]");
941 fprintf(out
, "[%u:%u 0x%x:0x%x %f]", u
.i
.lo
, u
.i
.hi
, u
.i
.lo
, u
.i
.hi
, u
.d
);
945 fprintf(out
, "%d", u
.i
.lo
);
949 fprintf(out
, "%u", u
.i
.lo
);
953 fprintf(out
, "%x", u
.i
.lo
);
957 fprintf(out
, "%f", u
.d
);
966 size_t length
= u
.s
->length();
967 // protect against massive spew if u.s is a bad pointer.
968 if (length
> 1 << 16)
971 fprintf(out
, "<rope>");
974 const jschar
*chars
= u
.s
->nonRopeChars();
975 for (unsigned i
= 0; i
< length
; ++i
) {
976 jschar co
= chars
[i
];
980 fprintf(out
, "\\u%02x", co
);
982 fprintf(out
, "\\u%04x", co
);
988 fprintf(out
, "%s", u
.cstr
);
992 Value
*v
= (Value
*) u
.i
.lo
;
997 fprintf(out
, "[invalid %%%c]", *p
);
1006 JS_DEFINE_CALLINFO_3(extern, BOOL
, PrintOnTrace
, CHARPTR
, UINT32
, DOUBLEPTR
, 0, ACCSET_STORE_ANY
)
1008 // This version is not intended to be called directly: usually it is easier to
1009 // use one of the other overloads.
1011 TraceRecorder::tprint(const char *format
, int count
, nanojit::LIns
*insa
[])
1013 size_t size
= strlen(format
) + 1;
1014 char* data
= (char*) traceMonitor
->traceAlloc
->alloc(size
);
1015 memcpy(data
, format
, size
);
1017 double *args
= (double*) traceMonitor
->traceAlloc
->alloc(count
* sizeof(double));
1018 LIns
* argsp_ins
= w
.nameImmpNonGC(args
);
1019 for (int i
= 0; i
< count
; ++i
)
1020 w
.stTprintArg(insa
, argsp_ins
, i
);
1022 LIns
* args_ins
[] = { w
.nameImmpNonGC(args
), w
.nameImmi(count
), w
.nameImmpNonGC(data
) };
1023 LIns
* call_ins
= w
.call(&PrintOnTrace_ci
, args_ins
);
1024 guard(false, w
.eqi0(call_ins
), MISMATCH_EXIT
);
1027 // Generate a 'printf'-type call from trace for debugging.
1029 TraceRecorder::tprint(const char *format
)
1031 LIns
* insa
[] = { NULL
};
1032 tprint(format
, 0, insa
);
1036 TraceRecorder::tprint(const char *format
, LIns
*ins
)
1038 LIns
* insa
[] = { ins
};
1039 tprint(format
, 1, insa
);
1043 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
)
1045 LIns
* insa
[] = { ins1
, ins2
};
1046 tprint(format
, 2, insa
);
1050 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
)
1052 LIns
* insa
[] = { ins1
, ins2
, ins3
};
1053 tprint(format
, 3, insa
);
1057 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
, LIns
*ins4
)
1059 LIns
* insa
[] = { ins1
, ins2
, ins3
, ins4
};
1060 tprint(format
, 4, insa
);
1064 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
, LIns
*ins4
,
1067 LIns
* insa
[] = { ins1
, ins2
, ins3
, ins4
, ins5
};
1068 tprint(format
, 5, insa
);
1072 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
, LIns
*ins4
,
1073 LIns
*ins5
, LIns
*ins6
)
1075 LIns
* insa
[] = { ins1
, ins2
, ins3
, ins4
, ins5
, ins6
};
1076 tprint(format
, 6, insa
);
1091 Tracker::getTrackerPageBase(const void* v
) const
1093 return jsuword(v
) & ~TRACKER_PAGE_MASK
;
1097 Tracker::getTrackerPageOffset(const void* v
) const
1099 return (jsuword(v
) & TRACKER_PAGE_MASK
) >> 2;
1102 struct Tracker::TrackerPage
*
1103 Tracker::findTrackerPage(const void* v
) const
1105 jsuword base
= getTrackerPageBase(v
);
1106 struct Tracker::TrackerPage
* p
= pagelist
;
1108 if (p
->base
== base
)
1115 struct Tracker::TrackerPage
*
1116 Tracker::addTrackerPage(const void* v
)
1118 jsuword base
= getTrackerPageBase(v
);
1119 struct TrackerPage
* p
= (struct TrackerPage
*) js_calloc(sizeof(*p
));
1130 TrackerPage
* p
= pagelist
;
1131 pagelist
= pagelist
->next
;
1137 Tracker::has(const void *v
) const
1139 return get(v
) != NULL
;
1143 Tracker::get(const void* v
) const
1145 struct Tracker::TrackerPage
* p
= findTrackerPage(v
);
1148 return p
->map
[getTrackerPageOffset(v
)];
1152 Tracker::set(const void* v
, LIns
* i
)
1154 struct Tracker::TrackerPage
* p
= findTrackerPage(v
);
1156 p
= addTrackerPage(v
);
1157 p
->map
[getTrackerPageOffset(v
)] = i
;
1161 hasInt32Repr(const Value
&v
)
1168 return JSDOUBLE_IS_INT32(v
.toDouble(), &_
);
1172 asInt32(const Value
&v
)
1174 JS_ASSERT(v
.isNumber());
1179 JS_ASSERT(JSDOUBLE_IS_INT32(v
.toDouble(), &_
));
1181 return jsint(v
.toDouble());
1185 * Return JSVAL_TYPE_DOUBLE for all numbers (int and double). Split
1186 * JSVAL_TYPE_OBJECT into JSVAL_TYPE_FUNOBJ and JSVAL_TYPE_NONFUNOBJ.
1187 * Otherwise, just return the value's type.
1189 static inline JSValueType
1190 getPromotedType(const Value
&v
)
1193 return JSVAL_TYPE_DOUBLE
;
1195 return v
.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ
: JSVAL_TYPE_NONFUNOBJ
;
1196 return v
.extractNonDoubleObjectTraceType();
1200 * Return JSVAL_TYPE_INT32 for all whole numbers that fit into signed 32-bit.
1201 * Split JSVAL_TYPE_OBJECT into JSVAL_TYPE_FUNOBJ and JSVAL_TYPE_NONFUNOBJ.
1202 * Otherwise, just return the value's type.
1204 static inline JSValueType
1205 getCoercedType(const Value
&v
)
1209 return (v
.isInt32() || JSDOUBLE_IS_INT32(v
.toDouble(), &_
))
1211 : JSVAL_TYPE_DOUBLE
;
1214 return v
.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ
: JSVAL_TYPE_NONFUNOBJ
;
1215 return v
.extractNonDoubleObjectTraceType();
1218 static inline JSValueType
1219 getFrameObjPtrTraceType(void *p
, JSStackFrame
*fp
)
1221 if (p
== fp
->addressOfScopeChain()) {
1222 JS_ASSERT(*(JSObject
**)p
!= NULL
);
1223 return JSVAL_TYPE_NONFUNOBJ
;
1225 JS_ASSERT(p
== fp
->addressOfArgs());
1226 return fp
->hasArgsObj() ? JSVAL_TYPE_NONFUNOBJ
: JSVAL_TYPE_NULL
;
1230 isFrameObjPtrTraceType(JSValueType t
)
1232 return t
== JSVAL_TYPE_NULL
|| t
== JSVAL_TYPE_NONFUNOBJ
;
1235 /* Constant seed and accumulate step borrowed from the DJB hash. */
1237 const uintptr_t ORACLE_MASK
= ORACLE_SIZE
- 1;
1238 JS_STATIC_ASSERT((ORACLE_MASK
& ORACLE_SIZE
) == 0);
1240 const uintptr_t FRAGMENT_TABLE_MASK
= FRAGMENT_TABLE_SIZE
- 1;
1241 JS_STATIC_ASSERT((FRAGMENT_TABLE_MASK
& FRAGMENT_TABLE_SIZE
) == 0);
1243 const uintptr_t HASH_SEED
= 5381;
1246 HashAccum(uintptr_t& h
, uintptr_t i
, uintptr_t mask
)
1248 h
= ((h
<< 5) + h
+ (mask
& i
)) & mask
;
1251 static JS_REQUIRES_STACK
inline int
1252 StackSlotHash(JSContext
* cx
, unsigned slot
, const void* pc
)
1254 uintptr_t h
= HASH_SEED
;
1255 HashAccum(h
, uintptr_t(cx
->fp()->script()), ORACLE_MASK
);
1256 HashAccum(h
, uintptr_t(pc
), ORACLE_MASK
);
1257 HashAccum(h
, uintptr_t(slot
), ORACLE_MASK
);
1261 static JS_REQUIRES_STACK
inline int
1262 GlobalSlotHash(JSContext
* cx
, unsigned slot
)
1264 uintptr_t h
= HASH_SEED
;
1265 JSStackFrame
* fp
= cx
->fp();
1270 HashAccum(h
, uintptr_t(fp
->maybeScript()), ORACLE_MASK
);
1271 HashAccum(h
, uintptr_t(fp
->scopeChain().getGlobal()->shape()), ORACLE_MASK
);
1272 HashAccum(h
, uintptr_t(slot
), ORACLE_MASK
);
1277 PCHash(jsbytecode
* pc
)
1279 return int(uintptr_t(pc
) & ORACLE_MASK
);
1284 /* Grow the oracle bitsets to their (fixed) size here, once. */
1285 _stackDontDemote
.set(ORACLE_SIZE
-1);
1286 _globalDontDemote
.set(ORACLE_SIZE
-1);
1290 /* Tell the oracle that a certain global variable should not be demoted. */
1291 JS_REQUIRES_STACK
void
1292 Oracle::markGlobalSlotUndemotable(JSContext
* cx
, unsigned slot
)
1294 _globalDontDemote
.set(GlobalSlotHash(cx
, slot
));
1297 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
1298 JS_REQUIRES_STACK
bool
1299 Oracle::isGlobalSlotUndemotable(JSContext
* cx
, unsigned slot
) const
1301 return _globalDontDemote
.get(GlobalSlotHash(cx
, slot
));
1304 /* Tell the oracle that a certain slot at a certain stack slot should not be demoted. */
1305 JS_REQUIRES_STACK
void
1306 Oracle::markStackSlotUndemotable(JSContext
* cx
, unsigned slot
, const void* pc
)
1308 _stackDontDemote
.set(StackSlotHash(cx
, slot
, pc
));
1311 JS_REQUIRES_STACK
void
1312 Oracle::markStackSlotUndemotable(JSContext
* cx
, unsigned slot
)
1314 markStackSlotUndemotable(cx
, slot
, cx
->regs
->pc
);
1317 /* Consult with the oracle whether we shouldn't demote a certain slot. */
1318 JS_REQUIRES_STACK
bool
1319 Oracle::isStackSlotUndemotable(JSContext
* cx
, unsigned slot
, const void* pc
) const
1321 return _stackDontDemote
.get(StackSlotHash(cx
, slot
, pc
));
1324 JS_REQUIRES_STACK
bool
1325 Oracle::isStackSlotUndemotable(JSContext
* cx
, unsigned slot
) const
1327 return isStackSlotUndemotable(cx
, slot
, cx
->regs
->pc
);
1330 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
1332 Oracle::markInstructionUndemotable(jsbytecode
* pc
)
1334 _pcDontDemote
.set(PCHash(pc
));
1337 /* Consult with the oracle whether we shouldn't demote a certain bytecode location. */
1339 Oracle::isInstructionUndemotable(jsbytecode
* pc
) const
1341 return _pcDontDemote
.get(PCHash(pc
));
1344 /* Tell the oracle that the instruction at bytecode location should use a stronger (slower) test for -0. */
1346 Oracle::markInstructionSlowZeroTest(jsbytecode
* pc
)
1348 _pcSlowZeroTest
.set(PCHash(pc
));
1351 /* Consult with the oracle whether we should use a stronger (slower) test for -0. */
1353 Oracle::isInstructionSlowZeroTest(jsbytecode
* pc
) const
1355 return _pcSlowZeroTest
.get(PCHash(pc
));
1359 Oracle::clearDemotability()
1361 _stackDontDemote
.reset();
1362 _globalDontDemote
.reset();
1363 _pcDontDemote
.reset();
1364 _pcSlowZeroTest
.reset();
1367 JS_REQUIRES_STACK
void
1368 TraceRecorder::markSlotUndemotable(LinkableFragment
* f
, unsigned slot
)
1370 if (slot
< f
->nStackTypes
) {
1371 traceMonitor
->oracle
->markStackSlotUndemotable(cx
, slot
);
1375 uint16
* gslots
= f
->globalSlots
->data();
1376 traceMonitor
->oracle
->markGlobalSlotUndemotable(cx
, gslots
[slot
- f
->nStackTypes
]);
1379 JS_REQUIRES_STACK
void
1380 TraceRecorder::markSlotUndemotable(LinkableFragment
* f
, unsigned slot
, const void* pc
)
1382 if (slot
< f
->nStackTypes
) {
1383 traceMonitor
->oracle
->markStackSlotUndemotable(cx
, slot
, pc
);
1387 uint16
* gslots
= f
->globalSlots
->data();
1388 traceMonitor
->oracle
->markGlobalSlotUndemotable(cx
, gslots
[slot
- f
->nStackTypes
]);
1391 static JS_REQUIRES_STACK
bool
1392 IsSlotUndemotable(Oracle
* oracle
, JSContext
* cx
, LinkableFragment
* f
, unsigned slot
, const void* ip
)
1394 if (slot
< f
->nStackTypes
)
1395 return !oracle
|| oracle
->isStackSlotUndemotable(cx
, slot
, ip
);
1397 uint16
* gslots
= f
->globalSlots
->data();
1398 return !oracle
|| oracle
->isGlobalSlotUndemotable(cx
, gslots
[slot
- f
->nStackTypes
]);
1401 class FrameInfoCache
1405 typedef FrameInfo
*Lookup
;
1406 static HashNumber
hash(const FrameInfo
* fi
) {
1407 size_t len
= sizeof(FrameInfo
) + fi
->callerHeight
* sizeof(JSValueType
);
1409 const unsigned char *s
= (const unsigned char*)fi
;
1410 for (size_t i
= 0; i
< len
; i
++, s
++)
1411 h
= JS_ROTATE_LEFT32(h
, 4) ^ *s
;
1415 static bool match(const FrameInfo
* fi1
, const FrameInfo
* fi2
) {
1416 if (memcmp(fi1
, fi2
, sizeof(FrameInfo
)) != 0)
1418 return memcmp(fi1
->get_typemap(), fi2
->get_typemap(),
1419 fi1
->callerHeight
* sizeof(JSValueType
)) == 0;
1423 typedef HashSet
<FrameInfo
*, HashPolicy
, SystemAllocPolicy
> FrameSet
;
1426 VMAllocator
*allocator
;
1430 FrameInfoCache(VMAllocator
*allocator
);
1436 FrameInfo
*memoize(FrameInfo
*fi
) {
1437 FrameSet::AddPtr p
= set
.lookupForAdd(fi
);
1439 FrameInfo
* n
= (FrameInfo
*)
1440 allocator
->alloc(sizeof(FrameInfo
) + fi
->callerHeight
* sizeof(JSValueType
));
1441 memcpy(n
, fi
, sizeof(FrameInfo
) + fi
->callerHeight
* sizeof(JSValueType
));
1450 FrameInfoCache::FrameInfoCache(VMAllocator
*allocator
)
1451 : allocator(allocator
)
1454 OUT_OF_MEMORY_ABORT("FrameInfoCache::FrameInfoCache(): out of memory");
1457 #define PC_HASH_COUNT 1024
1460 Blacklist(jsbytecode
* pc
)
1463 JS_ASSERT(*pc
== JSOP_TRACE
|| *pc
== JSOP_NOTRACE
);
1468 Unblacklist(JSScript
*script
, jsbytecode
*pc
)
1470 JS_ASSERT(*pc
== JSOP_NOTRACE
|| *pc
== JSOP_TRACE
);
1471 if (*pc
== JSOP_NOTRACE
) {
1475 /* This code takes care of unblacklisting in the method JIT. */
1476 js::mjit::ResetTraceHint(script
, pc
, GET_UINT16(pc
), false);
1482 IsBlacklisted(jsbytecode
* pc
)
1484 if (*pc
== JSOP_NOTRACE
)
1486 if (*pc
== JSOP_CALL
)
1487 return *(pc
+ JSOP_CALL_LENGTH
) == JSOP_NOTRACE
;
1492 Backoff(JSContext
*cx
, jsbytecode
* pc
, Fragment
* tree
= NULL
)
1494 /* N.B. This code path cannot assume the recorder is/is not alive. */
1495 RecordAttemptMap
&table
= *JS_TRACE_MONITOR(cx
).recordAttempts
;
1496 if (RecordAttemptMap::AddPtr p
= table
.lookupForAdd(pc
)) {
1497 if (p
->value
++ > (BL_ATTEMPTS
* MAXPEERS
)) {
1503 table
.add(p
, pc
, 0);
1507 tree
->hits() -= BL_BACKOFF
;
1510 * In case there is no entry or no table (due to OOM) or some
1511 * serious imbalance in the recording-attempt distribution on a
1512 * multitree, give each tree another chance to blacklist here as
1515 if (++tree
->recordAttempts
> BL_ATTEMPTS
)
1521 ResetRecordingAttempts(JSContext
*cx
, jsbytecode
* pc
)
1523 RecordAttemptMap
&table
= *JS_TRACE_MONITOR(cx
).recordAttempts
;
1524 if (RecordAttemptMap::Ptr p
= table
.lookup(pc
))
1528 static inline size_t
1529 FragmentHash(const void *ip
, JSObject
* globalObj
, uint32 globalShape
, uint32 argc
)
1531 uintptr_t h
= HASH_SEED
;
1532 HashAccum(h
, uintptr_t(ip
), FRAGMENT_TABLE_MASK
);
1533 HashAccum(h
, uintptr_t(globalObj
), FRAGMENT_TABLE_MASK
);
1534 HashAccum(h
, uintptr_t(globalShape
), FRAGMENT_TABLE_MASK
);
1535 HashAccum(h
, uintptr_t(argc
), FRAGMENT_TABLE_MASK
);
1540 RawLookupFirstPeer(TraceMonitor
* tm
, const void *ip
, JSObject
* globalObj
,
1541 uint32 globalShape
, uint32 argc
,
1542 TreeFragment
*& firstInBucket
, TreeFragment
**& prevTreeNextp
)
1544 size_t h
= FragmentHash(ip
, globalObj
, globalShape
, argc
);
1545 TreeFragment
** ppf
= &tm
->vmfragments
[h
];
1546 firstInBucket
= *ppf
;
1547 for (; TreeFragment
* pf
= *ppf
; ppf
= &pf
->next
) {
1548 if (pf
->globalObj
== globalObj
&&
1549 pf
->globalShape
== globalShape
&&
1552 prevTreeNextp
= ppf
;
1556 prevTreeNextp
= ppf
;
1560 static TreeFragment
*
1561 LookupLoop(TraceMonitor
* tm
, const void *ip
, JSObject
* globalObj
,
1562 uint32 globalShape
, uint32 argc
)
1564 TreeFragment
*_
, **prevTreeNextp
;
1565 RawLookupFirstPeer(tm
, ip
, globalObj
, globalShape
, argc
, _
, prevTreeNextp
);
1566 return *prevTreeNextp
;
1569 static TreeFragment
*
1570 LookupOrAddLoop(TraceMonitor
* tm
, const void *ip
, JSObject
* globalObj
,
1571 uint32 globalShape
, uint32 argc
)
1573 TreeFragment
*firstInBucket
, **prevTreeNextp
;
1574 RawLookupFirstPeer(tm
, ip
, globalObj
, globalShape
, argc
, firstInBucket
, prevTreeNextp
);
1575 if (TreeFragment
*f
= *prevTreeNextp
)
1579 uint32_t profFragID
= (LogController
.lcbits
& LC_FragProfile
)
1580 ? (++(tm
->lastFragID
)) : 0;
1582 TreeFragment
* f
= new (*tm
->dataAlloc
) TreeFragment(ip
, tm
->dataAlloc
, globalObj
, globalShape
,
1583 argc
verbose_only(, profFragID
));
1584 f
->root
= f
; /* f is the root of a new tree */
1585 *prevTreeNextp
= f
; /* insert f at the end of the vmfragments bucket-list */
1587 f
->first
= f
; /* initialize peer-list at f */
1592 static TreeFragment
*
1593 AddNewPeerToPeerList(TraceMonitor
* tm
, TreeFragment
* peer
)
1597 uint32_t profFragID
= (LogController
.lcbits
& LC_FragProfile
)
1598 ? (++(tm
->lastFragID
)) : 0;
1600 TreeFragment
* f
= new (*tm
->dataAlloc
) TreeFragment(peer
->ip
, tm
->dataAlloc
, peer
->globalObj
,
1601 peer
->globalShape
, peer
->argc
1602 verbose_only(, profFragID
));
1603 f
->root
= f
; /* f is the root of a new tree */
1604 f
->first
= peer
->first
; /* add f to peer list */
1605 f
->peer
= peer
->peer
;
1607 /* only the |first| Fragment of a peer list needs a valid |next| field */
1608 debug_only(f
->next
= (TreeFragment
*)0xcdcdcdcd);
1612 JS_REQUIRES_STACK
void
1613 TreeFragment::initialize(JSContext
* cx
, SlotList
*globalSlots
, bool speculate
)
1615 this->dependentTrees
.clear();
1616 this->linkedTrees
.clear();
1617 this->globalSlots
= globalSlots
;
1619 /* Capture the coerced type of each active slot in the type map. */
1620 this->typeMap
.captureTypes(cx
, globalObj
, *globalSlots
, 0 /* callDepth */, speculate
);
1621 this->nStackTypes
= this->typeMap
.length() - globalSlots
->length();
1622 this->spOffsetAtEntry
= cx
->regs
->sp
- cx
->fp()->base();
1625 this->treeFileName
= cx
->fp()->script()->filename
;
1626 this->treeLineNumber
= js_FramePCToLineNumber(cx
, cx
->fp());
1627 this->treePCOffset
= FramePCOffset(cx
, cx
->fp());
1629 this->script
= cx
->fp()->script();
1630 this->gcthings
.clear();
1631 this->shapes
.clear();
1632 this->unstableExits
= NULL
;
1633 this->sideExits
.clear();
1635 /* Determine the native frame layout at the entry point. */
1636 this->nativeStackBase
= (nStackTypes
- (cx
->regs
->sp
- cx
->fp()->base())) *
1638 this->maxNativeStackSlots
= nStackTypes
;
1639 this->maxCallDepth
= 0;
1645 TreeFragment::removeUnstableExit(VMSideExit
* exit
)
1647 /* Now erase this exit from the unstable exit list. */
1648 UnstableExit
** tail
= &this->unstableExits
;
1649 for (UnstableExit
* uexit
= this->unstableExits
; uexit
!= NULL
; uexit
= uexit
->next
) {
1650 if (uexit
->exit
== exit
) {
1651 *tail
= uexit
->next
;
1654 tail
= &uexit
->next
;
1656 JS_NOT_REACHED("exit not in unstable exit list");
1662 AssertTreeIsUnique(TraceMonitor
* tm
, TreeFragment
* f
)
1664 JS_ASSERT(f
->root
== f
);
1667 * Check for duplicate entry type maps. This is always wrong and hints at
1668 * trace explosion since we are trying to stabilize something without
1669 * properly connecting peer edges.
1671 for (TreeFragment
* peer
= LookupLoop(tm
, f
->ip
, f
->globalObj
, f
->globalShape
, f
->argc
);
1673 peer
= peer
->peer
) {
1674 if (!peer
->code() || peer
== f
)
1676 JS_ASSERT(!f
->typeMap
.matches(peer
->typeMap
));
1682 AttemptCompilation(JSContext
*cx
, JSObject
* globalObj
,
1683 JSScript
* script
, jsbytecode
* pc
, uint32 argc
)
1685 TraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
1687 /* If we already permanently blacklisted the location, undo that. */
1688 Unblacklist(script
, pc
);
1689 ResetRecordingAttempts(cx
, pc
);
1691 /* Breathe new life into all peer fragments at the designated loop header. */
1692 TreeFragment
* f
= LookupLoop(tm
, pc
, globalObj
, globalObj
->shape(), argc
);
1695 * If the global object's shape changed, we can't easily find the
1696 * corresponding loop header via a hash table lookup. In this
1697 * we simply bail here and hope that the fragment has another
1698 * outstanding compilation attempt. This case is extremely rare.
1702 JS_ASSERT(f
->root
== f
);
1705 JS_ASSERT(f
->root
== f
);
1706 --f
->recordAttempts
;
1707 f
->hits() = HOTLOOP
;
1712 static const CallInfo
*
1713 fcallinfo(LIns
*ins
)
1715 return ins
->isop(LIR_calld
) ? ins
->callInfo() : NULL
;
1719 * Determine whether this operand is guaranteed to not overflow the specified
1720 * integer operation.
1723 ChecksRequired(LOpcode op
, LIns
* op1
, LIns
* op2
,
1724 bool* needsOverflowCheck
, bool* needsNegZeroCheck
)
1726 Interval x
= Interval::of(op1
, 3);
1727 Interval y
= Interval::of(op2
, 3);
1732 z
= Interval::add(x
, y
);
1733 *needsNegZeroCheck
= false;
1737 z
= Interval::sub(x
, y
);
1738 *needsNegZeroCheck
= false;
1742 z
= Interval::mul(x
, y
);
1743 // A would-be negative zero result can only occur if we have
1744 // mul(0, -n) or mul(-n, 0), where n != 0. In particular, a multiply
1745 // where one operand is a positive immediate cannot result in negative
1748 // This assumes that -0 cannot be an operand; if one had occurred we
1749 // would have already exited the trace in order to promote the
1750 // computation back to doubles.
1751 *needsNegZeroCheck
= (x
.canBeZero() && y
.canBeNegative()) ||
1752 (y
.canBeZero() && x
.canBeNegative());
1757 JS_NOT_REACHED("needsOverflowCheck");
1760 *needsOverflowCheck
= z
.hasOverflowed
;
1764 * JSStackFrame::numActualArgs is only defined for function frames. Since the
1765 * actual arguments of the entry frame are kept on trace, argc is included in
1766 * the tuple identifying a fragment so that two fragments for the same loop but
1767 * recorded with different number of actual arguments are treated as two
1768 * completely separate trees. For this particular use, we define the number of
1769 * actuals for global and eval frames to be 0.
1772 entryFrameArgc(JSContext
*cx
)
1774 JSStackFrame
*fp
= cx
->fp();
1775 return fp
->isGlobalFrame() || fp
->isEvalFrame() ? 0 : fp
->numActualArgs();
1778 template <typename Visitor
>
1779 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
1780 VisitStackAndArgs(Visitor
&visitor
, JSStackFrame
*fp
, JSStackFrame
*next
, Value
*stack
)
1782 if (JS_LIKELY(!next
->hasOverflowArgs()))
1783 return visitor
.visitStackSlots(stack
, next
->formalArgsEnd() - stack
, fp
);
1786 * In the case of nactual > nformal, the formals are copied by the VM onto
1787 * the top of the stack. We only want to mark the formals once, so we
1788 * carefully mark only the canonical actual arguments (as defined by
1789 * JSStackFrame::canonicalActualArg).
1791 uintN nactual
= next
->numActualArgs();
1792 Value
*actuals
= next
->actualArgs();
1793 size_t nstack
= (actuals
- 2 /* callee,this */) - stack
;
1794 if (!visitor
.visitStackSlots(stack
, nstack
, fp
))
1796 uintN nformal
= next
->numFormalArgs();
1797 Value
*formals
= next
->formalArgs();
1798 if (!visitor
.visitStackSlots(formals
- 2, 2 + nformal
, fp
))
1800 return visitor
.visitStackSlots(actuals
+ nformal
, nactual
- nformal
, fp
);
1804 * Visit the values in the given JSStackFrame that the tracer cares about. This
1805 * visitor function is (implicitly) the primary definition of the native stack
1806 * area layout. There are a few other independent pieces of code that must be
1807 * maintained to assume the same layout. They are marked like this:
1809 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
1811 template <typename Visitor
>
1812 static JS_REQUIRES_STACK
bool
1813 VisitFrameSlots(Visitor
&visitor
, JSContext
*cx
, unsigned depth
, JSStackFrame
*fp
,
1816 JS_ASSERT_IF(!next
, cx
->fp() == fp
);
1818 if (depth
> 0 && !VisitFrameSlots(visitor
, cx
, depth
-1, fp
->prev(), fp
))
1822 if (fp
->isGlobalFrame()) {
1823 visitor
.setStackSlotKind("global");
1824 Value
*base
= fp
->slots() + fp
->globalScript()->nfixed
;
1826 return VisitStackAndArgs(visitor
, fp
, next
, base
);
1827 return visitor
.visitStackSlots(base
, cx
->regs
->sp
- base
, fp
);
1830 if (JS_UNLIKELY(fp
->isEvalFrame())) {
1831 visitor
.setStackSlotKind("eval");
1832 if (!visitor
.visitStackSlots(&fp
->calleeValue(), 2, fp
))
1836 * Only the bottom function frame must visit its arguments; for all
1837 * other frames, arguments are visited by the prev-frame.
1839 visitor
.setStackSlotKind("args");
1840 uintN nformal
= fp
->numFormalArgs();
1841 if (!visitor
.visitStackSlots(fp
->formalArgs() - 2, 2 + nformal
, fp
))
1843 if (JS_UNLIKELY(fp
->hasOverflowArgs())) {
1844 if (!visitor
.visitStackSlots(fp
->actualArgs() + nformal
,
1845 fp
->numActualArgs() - nformal
, fp
))
1851 JS_ASSERT(fp
->isFunctionFrame());
1854 * We keep two members of JSStackFrame on trace: the args obj pointer and
1855 * the scope chain pointer. The visitor must take care not to treat these
1856 * as js::Value-typed variables, since they are unboxed pointers.
1857 * Moreover, JSStackFrame compresses the args obj pointer with nactual, so
1858 * fp->addressOfArgs() is not really a JSObject**: the visitor must treat
1859 * !fp->hasArgsObj() as a null args obj pointer. Hence, visitFrameObjPtr
1860 * is only passed a void *.
1862 visitor
.setStackSlotKind("arguments");
1863 if (!visitor
.visitFrameObjPtr(fp
->addressOfArgs(), fp
))
1865 visitor
.setStackSlotKind("scopeChain");
1866 if (!visitor
.visitFrameObjPtr(fp
->addressOfScopeChain(), fp
))
1869 visitor
.setStackSlotKind("slots");
1871 return VisitStackAndArgs(visitor
, fp
, next
, fp
->slots());
1872 return visitor
.visitStackSlots(fp
->slots(), cx
->regs
->sp
- fp
->slots(), fp
);
1875 // Number of native frame slots used for 'special' values between args and vars.
1876 // Currently the two values are |arguments| (args object) and |scopeChain|.
1877 const int SPECIAL_FRAME_SLOTS
= 2;
1879 template <typename Visitor
>
1880 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
1881 VisitStackSlots(Visitor
&visitor
, JSContext
*cx
, unsigned callDepth
)
1883 return VisitFrameSlots(visitor
, cx
, callDepth
, cx
->fp(), NULL
);
1886 template <typename Visitor
>
1887 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
1888 VisitGlobalSlots(Visitor
&visitor
, JSContext
*cx
, JSObject
*globalObj
,
1889 unsigned ngslots
, uint16
*gslots
)
1891 for (unsigned n
= 0; n
< ngslots
; ++n
) {
1892 unsigned slot
= gslots
[n
];
1893 visitor
.visitGlobalSlot(&globalObj
->getSlotRef(slot
), n
, slot
);
1897 class AdjustCallerTypeVisitor
;
1899 template <typename Visitor
>
1900 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
1901 VisitGlobalSlots(Visitor
&visitor
, JSContext
*cx
, SlotList
&gslots
)
1903 VisitGlobalSlots(visitor
, cx
, cx
->fp()->scopeChain().getGlobal(),
1904 gslots
.length(), gslots
.data());
1908 template <typename Visitor
>
1909 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
1910 VisitSlots(Visitor
& visitor
, JSContext
* cx
, JSObject
* globalObj
,
1911 unsigned callDepth
, unsigned ngslots
, uint16
* gslots
)
1913 if (VisitStackSlots(visitor
, cx
, callDepth
))
1914 VisitGlobalSlots(visitor
, cx
, globalObj
, ngslots
, gslots
);
1917 template <typename Visitor
>
1918 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
1919 VisitSlots(Visitor
& visitor
, JSContext
* cx
, unsigned callDepth
,
1920 unsigned ngslots
, uint16
* gslots
)
1922 VisitSlots(visitor
, cx
, cx
->fp()->scopeChain().getGlobal(),
1923 callDepth
, ngslots
, gslots
);
1926 template <typename Visitor
>
1927 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
1928 VisitSlots(Visitor
&visitor
, JSContext
*cx
, JSObject
*globalObj
,
1929 unsigned callDepth
, const SlotList
& slots
)
1931 VisitSlots(visitor
, cx
, globalObj
, callDepth
, slots
.length(),
1935 template <typename Visitor
>
1936 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
1937 VisitSlots(Visitor
&visitor
, JSContext
*cx
, unsigned callDepth
,
1938 const SlotList
& slots
)
1940 VisitSlots(visitor
, cx
, cx
->fp()->scopeChain().getGlobal(),
1941 callDepth
, slots
.length(), slots
.data());
1945 class SlotVisitorBase
{
1946 #if defined JS_JIT_SPEW
1948 char const *mStackSlotKind
;
1950 SlotVisitorBase() : mStackSlotKind(NULL
) {}
1951 JS_ALWAYS_INLINE
const char *stackSlotKind() { return mStackSlotKind
; }
1952 JS_ALWAYS_INLINE
void setStackSlotKind(char const *k
) {
1957 JS_ALWAYS_INLINE
const char *stackSlotKind() { return NULL
; }
1958 JS_ALWAYS_INLINE
void setStackSlotKind(char const *k
) {}
1962 struct CountSlotsVisitor
: public SlotVisitorBase
1968 JS_ALWAYS_INLINE
CountSlotsVisitor(const void* stop
= NULL
) :
1974 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
1975 visitStackSlots(Value
*vp
, size_t count
, JSStackFrame
* fp
) {
1978 if (mStop
&& size_t(((const Value
*)mStop
) - vp
) < count
) {
1979 mCount
+= size_t(((const Value
*)mStop
) - vp
);
1987 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
1988 visitFrameObjPtr(void* p
, JSStackFrame
* fp
) {
1991 if (mStop
&& mStop
== p
) {
1999 JS_ALWAYS_INLINE
unsigned count() {
2003 JS_ALWAYS_INLINE
bool stopped() {
2008 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
unsigned
2009 CountStackAndArgs(JSStackFrame
*next
, Value
*stack
)
2011 if (JS_LIKELY(!next
->hasOverflowArgs()))
2012 return (Value
*)next
- stack
;
2013 size_t nvals
= (next
->formalArgs() - 2 /* callee, this */) - stack
;
2014 JS_ASSERT(nvals
== unsigned((next
->actualArgs() - 2) - stack
) + (2 + next
->numActualArgs()));
2018 static JS_ALWAYS_INLINE uintN
2019 NumSlotsBeforeFixed(JSStackFrame
*fp
)
2021 uintN numArgs
= fp
->isEvalFrame() ? 0 : Max(fp
->numActualArgs(), fp
->numFormalArgs());
2022 return 2 + numArgs
+ SPECIAL_FRAME_SLOTS
;
2026 * Calculate the total number of native frame slots we need from this frame all
2027 * the way back to the entry frame, including the current stack usage.
2029 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
2031 JS_REQUIRES_STACK
unsigned
2032 NativeStackSlots(JSContext
*cx
, unsigned callDepth
)
2034 JSStackFrame
*fp
= cx
->fp();
2035 JSStackFrame
*next
= NULL
;
2037 unsigned depth
= callDepth
;
2039 for (; depth
> 0; --depth
, next
= fp
, fp
= fp
->prev()) {
2040 JS_ASSERT(fp
->isFunctionFrame() && !fp
->isEvalFrame());
2041 slots
+= SPECIAL_FRAME_SLOTS
;
2043 slots
+= CountStackAndArgs(next
, fp
->slots());
2045 slots
+= cx
->regs
->sp
- fp
->slots();
2049 if (fp
->isGlobalFrame()) {
2050 start
= fp
->slots() + fp
->globalScript()->nfixed
;
2052 start
= fp
->slots();
2053 slots
+= NumSlotsBeforeFixed(fp
);
2056 slots
+= CountStackAndArgs(next
, start
);
2058 slots
+= cx
->regs
->sp
- start
;
2061 CountSlotsVisitor visitor
;
2062 VisitStackSlots(visitor
, cx
, callDepth
);
2063 JS_ASSERT(visitor
.count() == slots
&& !visitor
.stopped());
2068 class CaptureTypesVisitor
: public SlotVisitorBase
2071 JSValueType
* mTypeMap
;
2076 JS_ALWAYS_INLINE
CaptureTypesVisitor(JSContext
* cx
, JSValueType
* typeMap
, bool speculate
) :
2080 mOracle(speculate
? JS_TRACE_MONITOR(cx
).oracle
: NULL
) {}
2082 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2083 visitGlobalSlot(Value
*vp
, unsigned n
, unsigned slot
) {
2084 JSValueType type
= getCoercedType(*vp
);
2085 if (type
== JSVAL_TYPE_INT32
&& (!mOracle
|| mOracle
->isGlobalSlotUndemotable(mCx
, slot
)))
2086 type
= JSVAL_TYPE_DOUBLE
;
2087 JS_ASSERT(type
!= JSVAL_TYPE_BOXED
);
2088 debug_only_printf(LC_TMTracer
,
2089 "capture type global%d: %c\n",
2090 n
, TypeToChar(type
));
2094 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
2095 visitStackSlots(Value
*vp
, int count
, JSStackFrame
* fp
) {
2096 for (int i
= 0; i
< count
; ++i
) {
2097 JSValueType type
= getCoercedType(vp
[i
]);
2098 if (type
== JSVAL_TYPE_INT32
&& (!mOracle
|| mOracle
->isStackSlotUndemotable(mCx
, length())))
2099 type
= JSVAL_TYPE_DOUBLE
;
2100 JS_ASSERT(type
!= JSVAL_TYPE_BOXED
);
2101 debug_only_printf(LC_TMTracer
,
2102 "capture type %s%d: %c\n",
2103 stackSlotKind(), i
, TypeToChar(type
));
2109 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
2110 visitFrameObjPtr(void* p
, JSStackFrame
* fp
) {
2111 JSValueType type
= getFrameObjPtrTraceType(p
, fp
);
2112 debug_only_printf(LC_TMTracer
,
2113 "capture type %s%d: %c\n",
2114 stackSlotKind(), 0, TypeToChar(type
));
2119 JS_ALWAYS_INLINE
uintptr_t length() {
2120 return mPtr
- mTypeMap
;
2125 TypeMap::set(unsigned stackSlots
, unsigned ngslots
,
2126 const JSValueType
* stackTypeMap
, const JSValueType
* globalTypeMap
)
2128 setLength(ngslots
+ stackSlots
);
2129 memcpy(data(), stackTypeMap
, stackSlots
* sizeof(JSValueType
));
2130 memcpy(data() + stackSlots
, globalTypeMap
, ngslots
* sizeof(JSValueType
));
2134 * Capture the type map for the selected slots of the global object and currently pending
2137 JS_REQUIRES_STACK
void
2138 TypeMap::captureTypes(JSContext
* cx
, JSObject
* globalObj
, SlotList
& slots
, unsigned callDepth
,
2141 setLength(NativeStackSlots(cx
, callDepth
) + slots
.length());
2142 CaptureTypesVisitor
visitor(cx
, data(), speculate
);
2143 VisitSlots(visitor
, cx
, globalObj
, callDepth
, slots
);
2144 JS_ASSERT(visitor
.length() == length());
2147 JS_REQUIRES_STACK
void
2148 TypeMap::captureMissingGlobalTypes(JSContext
* cx
, JSObject
* globalObj
, SlotList
& slots
, unsigned stackSlots
,
2151 unsigned oldSlots
= length() - stackSlots
;
2152 int diff
= slots
.length() - oldSlots
;
2153 JS_ASSERT(diff
>= 0);
2154 setLength(length() + diff
);
2155 CaptureTypesVisitor
visitor(cx
, data() + stackSlots
+ oldSlots
, speculate
);
2156 VisitGlobalSlots(visitor
, cx
, globalObj
, diff
, slots
.data() + oldSlots
);
2159 /* Compare this type map to another one and see whether they match. */
2161 TypeMap::matches(TypeMap
& other
) const
2163 if (length() != other
.length())
2165 return !memcmp(data(), other
.data(), length());
2169 TypeMap::fromRaw(JSValueType
* other
, unsigned numSlots
)
2171 unsigned oldLength
= length();
2172 setLength(length() + numSlots
);
2173 for (unsigned i
= 0; i
< numSlots
; i
++)
2174 get(oldLength
+ i
) = other
[i
];
2178 * Use the provided storage area to create a new type map that contains the
2179 * partial type map with the rest of it filled up from the complete type
2183 MergeTypeMaps(JSValueType
** partial
, unsigned* plength
, JSValueType
* complete
, unsigned clength
, JSValueType
* mem
)
2185 unsigned l
= *plength
;
2186 JS_ASSERT(l
< clength
);
2187 memcpy(mem
, *partial
, l
* sizeof(JSValueType
));
2188 memcpy(mem
+ l
, complete
+ l
, (clength
- l
) * sizeof(JSValueType
));
2194 * Specializes a tree to any specifically missing globals, including any
2197 static JS_REQUIRES_STACK
void
2198 SpecializeTreesToLateGlobals(JSContext
* cx
, TreeFragment
* root
, JSValueType
* globalTypeMap
,
2199 unsigned numGlobalSlots
)
2201 for (unsigned i
= root
->nGlobalTypes(); i
< numGlobalSlots
; i
++)
2202 root
->typeMap
.add(globalTypeMap
[i
]);
2204 JS_ASSERT(root
->nGlobalTypes() == numGlobalSlots
);
2206 for (unsigned i
= 0; i
< root
->dependentTrees
.length(); i
++) {
2207 TreeFragment
* tree
= root
->dependentTrees
[i
];
2208 if (tree
->code() && tree
->nGlobalTypes() < numGlobalSlots
)
2209 SpecializeTreesToLateGlobals(cx
, tree
, globalTypeMap
, numGlobalSlots
);
2211 for (unsigned i
= 0; i
< root
->linkedTrees
.length(); i
++) {
2212 TreeFragment
* tree
= root
->linkedTrees
[i
];
2213 if (tree
->code() && tree
->nGlobalTypes() < numGlobalSlots
)
2214 SpecializeTreesToLateGlobals(cx
, tree
, globalTypeMap
, numGlobalSlots
);
2218 /* Specializes a tree to any missing globals, including any dependent trees. */
2219 static JS_REQUIRES_STACK
void
2220 SpecializeTreesToMissingGlobals(JSContext
* cx
, JSObject
* globalObj
, TreeFragment
* root
)
2222 /* If we already have a bunch of peer trees, try to be as generic as possible. */
2224 for (TreeFragment
*f
= root
->first
; f
; f
= f
->peer
, ++count
);
2225 bool speculate
= count
< MAXPEERS
-1;
2227 root
->typeMap
.captureMissingGlobalTypes(cx
, globalObj
, *root
->globalSlots
, root
->nStackTypes
,
2229 JS_ASSERT(root
->globalSlots
->length() == root
->typeMap
.length() - root
->nStackTypes
);
2231 SpecializeTreesToLateGlobals(cx
, root
, root
->globalTypeMap(), root
->nGlobalTypes());
2235 ResetJITImpl(JSContext
* cx
);
2238 static JS_INLINE
void
2239 ResetJIT(JSContext
* cx
, TraceVisFlushReason r
)
2241 LogTraceVisEvent(cx
, S_RESET
, r
);
2245 # define ResetJIT(cx, reason) ResetJITImpl(cx)
2249 FlushJITCache(JSContext
*cx
)
2251 ResetJIT(cx
, FR_OOM
);
2255 TrashTree(TreeFragment
* f
);
2258 TraceRecorder::TraceRecorder(JSContext
* cx
, VMSideExit
* anchor
, VMFragment
* fragment
,
2259 unsigned stackSlots
, unsigned ngslots
, JSValueType
* typeMap
,
2260 VMSideExit
* innermost
, JSScript
* outerScript
, jsbytecode
* outerPC
,
2261 uint32 outerArgc
, bool speculate
)
2263 traceMonitor(&JS_TRACE_MONITOR(cx
)),
2264 oracle(speculate
? JS_TRACE_MONITOR(cx
).oracle
: NULL
),
2266 tree(fragment
->root
),
2267 globalObj(tree
->globalObj
),
2268 outerScript(outerScript
),
2270 outerArgc(outerArgc
),
2276 importTypeMap(&tempAlloc()),
2277 lirbuf(new (tempAlloc()) LirBuffer(tempAlloc())),
2278 mark(*traceMonitor
->traceAlloc
),
2279 numSideExitsBefore(tree
->sideExits
.length()),
2281 nativeFrameTracker(),
2283 callDepth(anchor
? anchor
->calldepth
: 0),
2284 atoms(FrameAtomBase(cx
, cx
->fp())),
2285 consts(JSScript::isValidOffset(cx
->fp()->script()->constOffset
)
2286 ? cx
->fp()->script()->consts()->vector
2288 strictModeCode_ins(NULL
),
2289 cfgMerges(&tempAlloc()),
2291 whichTreesToTrash(&tempAlloc()),
2292 guardedShapeTable(cx
),
2296 addPropShapeBefore(NULL
),
2299 native_rval_ins(NULL
),
2301 pendingSpecializedNative(NULL
),
2302 pendingUnboxSlot(NULL
),
2303 pendingGuardCondition(NULL
),
2304 pendingGlobalSlotsToSet(cx
),
2306 generatedSpecializedNative(),
2308 w(&tempAlloc(), lirbuf
)
2310 JS_ASSERT(globalObj
== cx
->fp()->scopeChain().getGlobal());
2311 JS_ASSERT(globalObj
->hasOwnShape());
2312 JS_ASSERT(cx
->regs
->pc
== (jsbytecode
*)fragment
->ip
);
2315 lirbuf
->printer
= new (tempAlloc()) LInsPrinter(tempAlloc(), TM_NUM_USED_ACCS
);
2319 * Reset the fragment state we care about in case we got a recycled
2320 * fragment. This includes resetting any profiling data we might have
2323 fragment
->lastIns
= NULL
;
2324 fragment
->setCode(NULL
);
2325 fragment
->lirbuf
= lirbuf
;
2326 verbose_only( fragment
->profCount
= 0; )
2327 verbose_only( fragment
->nStaticExits
= 0; )
2328 verbose_only( fragment
->nCodeBytes
= 0; )
2329 verbose_only( fragment
->nExitBytes
= 0; )
2330 verbose_only( fragment
->guardNumberer
= 1; )
2331 verbose_only( fragment
->guardsForFrag
= NULL
; )
2332 verbose_only( fragment
->loopLabel
= NULL
; )
2335 * Don't change fragment->profFragID, though. Once the identity of the
2336 * Fragment is set up (for profiling purposes), we can't change it.
2339 if (!guardedShapeTable
.init())
2340 OUT_OF_MEMORY_ABORT("TraceRecorder::TraceRecorder: out of memory");
2343 debug_only_print0(LC_TMMinimal
, "\n");
2344 debug_only_printf(LC_TMMinimal
, "Recording starting from %s:%u@%u (FragID=%06u)\n",
2345 tree
->treeFileName
, tree
->treeLineNumber
, tree
->treePCOffset
,
2346 fragment
->profFragID
);
2348 debug_only_printf(LC_TMTracer
, "globalObj=%p, shape=%d\n",
2349 (void*)this->globalObj
, this->globalObj
->shape());
2350 debug_only_printf(LC_TMTreeVis
, "TREEVIS RECORD FRAG=%p ANCHOR=%p\n", (void*)fragment
,
2354 /* This creates the LIR writer pipeline. */
2355 w
.init(&LogController
);
2359 for (int i
= 0; i
< NumSavedRegs
; ++i
)
2362 for (int i
= 0; i
< NumSavedRegs
; ++i
)
2363 w
.name(lirbuf
->savedRegs
[i
], regNames
[REGNUM(Assembler::savedRegs
[i
])]);
2366 lirbuf
->state
= w
.name(w
.paramp(0, 0), "state");
2368 if (fragment
== fragment
->root
) {
2369 w
.comment("begin-loop");
2370 InitConst(loopLabel
) = w
.label();
2372 w
.comment("begin-setup");
2374 // if profiling, drop a label, so the assembler knows to put a
2375 // frag-entry-counter increment at this point. If there's a
2376 // loopLabel, use that; else we'll have to make a dummy label
2377 // especially for this purpose.
2378 verbose_only( if (LogController
.lcbits
& LC_FragProfile
) {
2379 LIns
* entryLabel
= NULL
;
2380 if (fragment
== fragment
->root
) {
2381 entryLabel
= loopLabel
;
2383 entryLabel
= w
.label();
2385 NanoAssert(entryLabel
);
2386 NanoAssert(!fragment
->loopLabel
);
2387 fragment
->loopLabel
= entryLabel
;
2390 lirbuf
->sp
= w
.name(w
.ldpStateField(sp
), "sp");
2391 lirbuf
->rp
= w
.name(w
.ldpStateField(rp
), "rp");
2392 InitConst(cx_ins
) = w
.name(w
.ldpStateField(cx
), "cx");
2393 InitConst(eos_ins
) = w
.name(w
.ldpStateField(eos
), "eos");
2394 InitConst(eor_ins
) = w
.name(w
.ldpStateField(eor
), "eor");
2396 strictModeCode_ins
= w
.name(w
.immi(cx
->fp()->script()->strictModeCode
), "strict");
2398 /* If we came from exit, we might not have enough global types. */
2399 if (tree
->globalSlots
->length() > tree
->nGlobalTypes())
2400 SpecializeTreesToMissingGlobals(cx
, globalObj
, tree
);
2402 /* read into registers all values on the stack and all globals we know so far */
2403 import(tree
, lirbuf
->sp
, stackSlots
, ngslots
, callDepth
, typeMap
);
2405 if (fragment
== fragment
->root
) {
2407 * We poll the operation callback request flag. It is updated asynchronously whenever
2408 * the callback is to be invoked. We can use w.nameImmpNonGC here as JIT-ed code is per
2409 * thread and cannot outlive the corresponding JSThreadData.
2411 w
.comment("begin-interruptFlags-check");
2412 /* FIXME: See bug 621140 for moving interruptCounter to the compartment. */
2413 #ifdef JS_THREADSAFE
2414 void *interrupt
= (void*) &cx
->runtime
->interruptCounter
;
2416 void *interrupt
= (void*) &JS_THREAD_DATA(cx
)->interruptFlags
;
2418 LIns
* flagptr
= w
.nameImmpNonGC(interrupt
);
2419 LIns
* x
= w
.ldiVolatile(flagptr
);
2420 guard(true, w
.eqi0(x
), TIMEOUT_EXIT
);
2421 w
.comment("end-interruptFlags-check");
2424 * Count the number of iterations run by a trace, so that we can blacklist if
2425 * the trace runs too few iterations to be worthwhile. Do this only if the methodjit
2426 * is on--otherwise we must try to trace as much as possible.
2429 if (cx
->methodJitEnabled
) {
2430 w
.comment("begin-count-loop-iterations");
2431 LIns
* counterPtr
= w
.nameImmpNonGC((void *) &traceMonitor
->iterationCounter
);
2432 LIns
* counterValue
= w
.ldiVolatile(counterPtr
);
2433 LIns
* test
= w
.ltiN(counterValue
, LOOP_COUNT_MAX
);
2434 LIns
*branch
= w
.jfUnoptimizable(test
);
2436 * stiVolatile() uses ACCSET_STORE_ANY; If LICM is implemented
2437 * (bug 545406) this counter will need its own region.
2439 w
.stiVolatile(w
.addi(counterValue
, w
.immi(1)), counterPtr
);
2441 w
.comment("end-count-loop-iterations");
2447 * If we are attached to a tree call guard, make sure the guard the inner
2448 * tree exited from is what we expect it to be.
2450 if (anchor
&& anchor
->exitType
== NESTED_EXIT
) {
2451 LIns
* nested_ins
= w
.ldpStateField(outermostTreeExitGuard
);
2452 guard(true, w
.eqp(nested_ins
, w
.nameImmpNonGC(innermost
)), NESTED_EXIT
);
2455 w
.comment("end-setup");
2458 TraceRecorder::~TraceRecorder()
2460 /* Should already have been adjusted by callers before calling delete. */
2461 JS_ASSERT(traceMonitor
->recorder
!= this);
2464 TrashTree(fragment
->root
);
2466 for (unsigned int i
= 0; i
< whichTreesToTrash
.length(); i
++)
2467 TrashTree(whichTreesToTrash
[i
]);
2469 /* Purge the tempAlloc used during recording. */
2470 tempAlloc().reset();
2472 forgetGuardedShapes();
2476 TraceMonitor::outOfMemory() const
2478 return dataAlloc
->outOfMemory() ||
2479 tempAlloc
->outOfMemory() ||
2480 traceAlloc
->outOfMemory();
2484 * This function destroys the recorder after a successful recording, possibly
2485 * starting a suspended outer recorder.
2487 AbortableRecordingStatus
2488 TraceRecorder::finishSuccessfully()
2490 JS_ASSERT(!traceMonitor
->profile
);
2491 JS_ASSERT(traceMonitor
->recorder
== this);
2492 JS_ASSERT(fragment
->lastIns
&& fragment
->code());
2494 AUDIT(traceCompleted
);
2497 /* Grab local copies of members needed after destruction of |this|. */
2498 JSContext
* localcx
= cx
;
2499 TraceMonitor
* localtm
= traceMonitor
;
2501 localtm
->recorder
= NULL
;
2502 /* We can't (easily) use js_delete() here because the constructor is private. */
2503 this->~TraceRecorder();
2506 /* Catch OOM that occurred during recording. */
2507 if (localtm
->outOfMemory() || OverfullJITCache(localcx
, localtm
)) {
2508 ResetJIT(localcx
, FR_OOM
);
2509 return ARECORD_ABORTED
;
2511 return ARECORD_COMPLETED
;
2514 /* This function aborts a recorder and any pending outer recorders. */
2515 JS_REQUIRES_STACK
TraceRecorder::AbortResult
2516 TraceRecorder::finishAbort(const char* reason
)
2518 JS_ASSERT(!traceMonitor
->profile
);
2519 JS_ASSERT(traceMonitor
->recorder
== this);
2521 AUDIT(recorderAborted
);
2523 debug_only_printf(LC_TMMinimal
| LC_TMAbort
,
2524 "Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n",
2526 tree
->treeLineNumber
,
2528 cx
->fp()->script()->filename
,
2529 js_FramePCToLineNumber(cx
, cx
->fp()),
2530 FramePCOffset(cx
, cx
->fp()),
2533 Backoff(cx
, (jsbytecode
*) fragment
->root
->ip
, fragment
->root
);
2536 * If this is the primary trace and we didn't succeed compiling, trash the
2537 * tree. Otherwise, remove the VMSideExits we added while recording, which
2538 * are about to be invalid.
2540 * BIG FAT WARNING: resetting the length is only a valid strategy as long as
2541 * there may be only one recorder active for a single TreeInfo at a time.
2542 * Otherwise, we may be throwing away another recorder's valid side exits.
2544 if (fragment
->root
== fragment
) {
2545 TrashTree(fragment
->toTreeFragment());
2547 JS_ASSERT(numSideExitsBefore
<= fragment
->root
->sideExits
.length());
2548 fragment
->root
->sideExits
.setLength(numSideExitsBefore
);
2551 /* Grab local copies of members needed after destruction of |this|. */
2552 JSContext
* localcx
= cx
;
2553 TraceMonitor
* localtm
= traceMonitor
;
2555 localtm
->recorder
= NULL
;
2556 /* We can't (easily) use js_delete() here because the constructor is private. */
2557 this->~TraceRecorder();
2560 /* Catch OOM that occurred during recording. */
2561 if (localtm
->outOfMemory() || OverfullJITCache(localcx
, localtm
)) {
2562 ResetJIT(localcx
, FR_OOM
);
2565 return NORMAL_ABORT
;
2569 TraceRecorder::w_immpObjGC(JSObject
* obj
)
2572 tree
->gcthings
.addUnique(ObjectValue(*obj
));
2573 return w
.immpNonGC((void*)obj
);
2577 TraceRecorder::w_immpFunGC(JSFunction
* fun
)
2580 tree
->gcthings
.addUnique(ObjectValue(*fun
));
2581 return w
.immpNonGC((void*)fun
);
2585 TraceRecorder::w_immpStrGC(JSString
* str
)
2588 tree
->gcthings
.addUnique(StringValue(str
));
2589 return w
.immpNonGC((void*)str
);
2593 TraceRecorder::w_immpShapeGC(const Shape
* shape
)
2596 tree
->shapes
.addUnique(shape
);
2597 return w
.immpNonGC((void*)shape
);
2601 TraceRecorder::w_immpIdGC(jsid id
)
2603 if (JSID_IS_GCTHING(id
))
2604 tree
->gcthings
.addUnique(IdToValue(id
));
2605 return w
.immpNonGC((void*)JSID_BITS(id
));
2609 TraceRecorder::nativeGlobalSlot(const Value
* p
) const
2611 JS_ASSERT(isGlobal(p
));
2612 return ptrdiff_t(p
- globalObj
->slots
);
2615 /* Determine the offset in the native global frame for a jsval we track. */
2617 TraceRecorder::nativeGlobalOffset(const Value
* p
) const
2619 return nativeGlobalSlot(p
) * sizeof(double);
2622 /* Determine whether a value is a global stack slot. */
2624 TraceRecorder::isGlobal(const Value
* p
) const
2626 return (size_t(p
- globalObj
->slots
) < globalObj
->numSlots());
2630 TraceRecorder::isVoidPtrGlobal(const void* p
) const
2632 return isGlobal((const Value
*)p
);
2636 * Return the offset in the native stack for the given jsval. More formally,
2637 * |p| must be the address of a jsval that is represented in the native stack
2638 * area. The return value is the offset, from TracerState::stackBase, in bytes,
2639 * where the native representation of |*p| is stored. To get the offset
2640 * relative to TracerState::sp, subtract TreeFragment::nativeStackBase.
2642 JS_REQUIRES_STACK
ptrdiff_t
2643 TraceRecorder::nativeStackOffsetImpl(const void* p
) const
2645 CountSlotsVisitor
visitor(p
);
2646 VisitStackSlots(visitor
, cx
, callDepth
);
2647 size_t offset
= visitor
.count() * sizeof(double);
2650 * If it's not in a pending frame, it must be on the stack of the current
2651 * frame above sp but below fp->slots() + script->nslots.
2653 if (!visitor
.stopped()) {
2654 const Value
*vp
= (const Value
*)p
;
2655 JS_ASSERT(size_t(vp
- cx
->fp()->slots()) < cx
->fp()->numSlots());
2656 offset
+= size_t(vp
- cx
->regs
->sp
) * sizeof(double);
2661 JS_REQUIRES_STACK
inline ptrdiff_t
2662 TraceRecorder::nativeStackOffset(const Value
* p
) const
2664 return nativeStackOffsetImpl(p
);
2667 JS_REQUIRES_STACK
inline ptrdiff_t
2668 TraceRecorder::nativeStackSlotImpl(const void* p
) const
2670 return nativeStackOffsetImpl(p
) / sizeof(double);
2673 JS_REQUIRES_STACK
inline ptrdiff_t
2674 TraceRecorder::nativeStackSlot(const Value
* p
) const
2676 return nativeStackSlotImpl(p
);
2680 * Return the offset, from TracerState:sp, for the given jsval. Shorthand for:
2681 * -TreeFragment::nativeStackBase + nativeStackOffset(p).
2683 inline JS_REQUIRES_STACK
ptrdiff_t
2684 TraceRecorder::nativespOffsetImpl(const void* p
) const
2686 return -tree
->nativeStackBase
+ nativeStackOffsetImpl(p
);
2689 inline JS_REQUIRES_STACK
ptrdiff_t
2690 TraceRecorder::nativespOffset(const Value
* p
) const
2692 return nativespOffsetImpl(p
);
2695 /* Track the maximum number of native frame slots we need during execution. */
2697 TraceRecorder::trackNativeStackUse(unsigned slots
)
2699 if (slots
> tree
->maxNativeStackSlots
)
2700 tree
->maxNativeStackSlots
= slots
;
2704 * Unbox a jsval into a slot. Slots are wide enough to hold double values
2705 * directly (instead of storing a pointer to them). We assert instead of
2706 * type checking. The caller must ensure the types are compatible.
2709 ValueToNative(const Value
&v
, JSValueType type
, double* slot
)
2711 JS_ASSERT(type
<= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET
);
2712 if (type
> JSVAL_UPPER_INCL_TYPE_OF_NUMBER_SET
)
2713 v
.unboxNonDoubleTo((uint64
*)slot
);
2714 else if (type
== JSVAL_TYPE_INT32
)
2715 *(int32_t *)slot
= v
.isInt32() ? v
.toInt32() : (int32_t)v
.toDouble();
2717 *(double *)slot
= v
.toNumber();
2722 case JSVAL_TYPE_NONFUNOBJ
: {
2723 JS_ASSERT(!IsFunctionObject(v
));
2724 debug_only_printf(LC_TMTracer
,
2725 "object<%p:%s> ", (void*)*(JSObject
**)slot
,
2726 v
.toObject().getClass()->name
);
2730 case JSVAL_TYPE_INT32
:
2731 JS_ASSERT(v
.isInt32() || (v
.isDouble() && JSDOUBLE_IS_INT32(v
.toDouble(), &_
)));
2732 debug_only_printf(LC_TMTracer
, "int<%d> ", *(jsint
*)slot
);
2735 case JSVAL_TYPE_DOUBLE
:
2736 JS_ASSERT(v
.isNumber());
2737 debug_only_printf(LC_TMTracer
, "double<%g> ", *(jsdouble
*)slot
);
2740 case JSVAL_TYPE_BOXED
:
2741 JS_NOT_REACHED("found jsval type in an entry type map");
2744 case JSVAL_TYPE_STRING
:
2745 JS_ASSERT(v
.isString());
2746 debug_only_printf(LC_TMTracer
, "string<%p> ", (void*)*(JSString
**)slot
);
2749 case JSVAL_TYPE_NULL
:
2750 JS_ASSERT(v
.isNull());
2751 debug_only_print0(LC_TMTracer
, "null ");
2754 case JSVAL_TYPE_BOOLEAN
:
2755 JS_ASSERT(v
.isBoolean());
2756 debug_only_printf(LC_TMTracer
, "special<%d> ", *(JSBool
*)slot
);
2759 case JSVAL_TYPE_UNDEFINED
:
2760 JS_ASSERT(v
.isUndefined());
2761 debug_only_print0(LC_TMTracer
, "undefined ");
2764 case JSVAL_TYPE_MAGIC
:
2765 JS_ASSERT(v
.isMagic());
2766 debug_only_print0(LC_TMTracer
, "hole ");
2769 case JSVAL_TYPE_FUNOBJ
: {
2770 JS_ASSERT(IsFunctionObject(v
));
2771 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, &v
.toObject());
2772 #if defined JS_JIT_SPEW
2773 if (LogController
.lcbits
& LC_TMTracer
) {
2776 JS_PutEscapedFlatString(funName
, sizeof funName
, ATOM_TO_STRING(fun
->atom
), 0);
2778 strcpy(funName
, "unnamed");
2779 LogController
.printf("function<%p:%s> ", (void*)*(JSObject
**)slot
, funName
);
2785 JS_NOT_REACHED("unexpected type");
2792 TraceMonitor::flush()
2794 /* flush should only be called after all recorders have been aborted. */
2795 JS_ASSERT(!recorder
);
2796 JS_ASSERT(!profile
);
2797 AUDIT(cacheFlushed
);
2799 // recover profiling data from expiring Fragments
2801 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
2802 for (TreeFragment
*f
= vmfragments
[i
]; f
; f
= f
->next
) {
2803 JS_ASSERT(f
->root
== f
);
2804 for (TreeFragment
*p
= f
; p
; p
= p
->peer
)
2805 FragProfiling_FragFinalizer(p
, this);
2811 for (Seq
<Fragment
*>* f
= branches
; f
; f
= f
->tail
)
2812 FragProfiling_FragFinalizer(f
->head
, this);
2819 for (LoopProfileMap::Enum
e(*loopProfiles
); !e
.empty(); e
.popFront()) {
2820 jsbytecode
*pc
= e
.front().key
;
2821 LoopProfile
*prof
= e
.front().value
;
2822 /* This code takes care of resetting all methodjit state. */
2823 js::mjit::ResetTraceHint(prof
->entryScript
, pc
, GET_UINT16(pc
), true);
2828 frameCache
->reset();
2830 traceAlloc
->reset();
2834 loopProfiles
->clear();
2836 for (size_t i
= 0; i
< MONITOR_N_GLOBAL_STATES
; ++i
) {
2837 globalStates
[i
].globalShape
= -1;
2838 globalStates
[i
].globalSlots
= new (*dataAlloc
) SlotList(dataAlloc
);
2841 assembler
= new (*dataAlloc
) Assembler(*codeAlloc
, *dataAlloc
, *dataAlloc
, core
,
2842 &LogController
, avmplus::AvmCore::config
);
2843 verbose_only( branches
= NULL
; )
2845 PodArrayZero(vmfragments
);
2846 tracedScripts
.clear();
2848 needFlush
= JS_FALSE
;
2852 IsShapeAboutToBeFinalized(JSContext
*cx
, const js::Shape
*shape
)
2854 JSRuntime
*rt
= cx
->runtime
;
2855 if (rt
->gcCurrentCompartment
!= NULL
)
2858 return !shape
->marked();
2862 HasUnreachableGCThings(JSContext
*cx
, TreeFragment
*f
)
2865 * We do not check here for dead scripts as JSScript is not a GC thing.
2866 * Instead PurgeScriptFragments is used to remove dead script fragments.
2869 if (IsAboutToBeFinalized(cx
, f
->globalObj
))
2871 Value
* vp
= f
->gcthings
.data();
2872 for (unsigned len
= f
->gcthings
.length(); len
; --len
) {
2874 JS_ASSERT(v
.isMarkable());
2875 if (IsAboutToBeFinalized(cx
, v
.toGCThing()))
2878 const Shape
** shapep
= f
->shapes
.data();
2879 for (unsigned len
= f
->shapes
.length(); len
; --len
) {
2880 const Shape
* shape
= *shapep
++;
2881 if (IsShapeAboutToBeFinalized(cx
, shape
))
2888 TraceMonitor::sweep(JSContext
*cx
)
2890 JS_ASSERT(!ontrace());
2891 debug_only_print0(LC_TMTracer
, "Purging fragments with dead things");
2893 bool shouldAbortRecording
= false;
2894 TreeFragment
*recorderTree
= NULL
;
2896 recorderTree
= recorder
->getTree();
2897 shouldAbortRecording
= HasUnreachableGCThings(cx
, recorderTree
);
2900 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
2901 TreeFragment
** fragp
= &vmfragments
[i
];
2902 while (TreeFragment
* frag
= *fragp
) {
2903 TreeFragment
* peer
= frag
;
2905 if (HasUnreachableGCThings(cx
, peer
))
2910 debug_only_printf(LC_TMTracer
,
2911 "TreeFragment peer %p has dead gc thing."
2912 "Disconnecting tree %p with ip %p\n",
2913 (void *) peer
, (void *) frag
, frag
->ip
);
2914 JS_ASSERT(frag
->root
== frag
);
2915 *fragp
= frag
->next
;
2917 verbose_only( FragProfiling_FragFinalizer(frag
, this); );
2918 if (recorderTree
== frag
)
2919 shouldAbortRecording
= true;
2924 fragp
= &frag
->next
;
2929 if (shouldAbortRecording
)
2930 recorder
->finishAbort("dead GC things");
2934 TraceMonitor::mark(JSTracer
*trc
)
2936 TracerState
* state
= tracerState
;
2938 if (state
->nativeVp
)
2939 MarkValueRange(trc
, state
->nativeVpLen
, state
->nativeVp
, "nativeVp");
2940 state
= state
->prev
;
2945 * Box a value from the native stack back into the Value format.
2948 NativeToValue(JSContext
* cx
, Value
& v
, JSValueType type
, double* slot
)
2950 if (type
== JSVAL_TYPE_DOUBLE
) {
2952 } else if (JS_LIKELY(type
<= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET
)) {
2953 v
.boxNonDoubleFrom(type
, (uint64
*)slot
);
2954 } else if (type
== JSVAL_TYPE_STRORNULL
) {
2955 JSString
*str
= *(JSString
**)slot
;
2956 v
= str
? StringValue(str
) : NullValue();
2957 } else if (type
== JSVAL_TYPE_OBJORNULL
) {
2958 JSObject
*obj
= *(JSObject
**)slot
;
2959 v
= obj
? ObjectValue(*obj
) : NullValue();
2961 JS_ASSERT(type
== JSVAL_TYPE_BOXED
);
2962 JS_STATIC_ASSERT(sizeof(Value
) == sizeof(double));
2968 case JSVAL_TYPE_NONFUNOBJ
:
2969 JS_ASSERT(!IsFunctionObject(v
));
2970 debug_only_printf(LC_TMTracer
,
2972 (void*) &v
.toObject(),
2973 v
.toObject().getClass()->name
);
2975 case JSVAL_TYPE_INT32
:
2976 debug_only_printf(LC_TMTracer
, "int<%d> ", v
.toInt32());
2978 case JSVAL_TYPE_DOUBLE
:
2979 debug_only_printf(LC_TMTracer
, "double<%g> ", v
.toNumber());
2981 case JSVAL_TYPE_STRING
:
2982 debug_only_printf(LC_TMTracer
, "string<%p> ", (void*)v
.toString());
2984 case JSVAL_TYPE_NULL
:
2985 JS_ASSERT(v
.isNull());
2986 debug_only_print0(LC_TMTracer
, "null ");
2988 case JSVAL_TYPE_BOOLEAN
:
2989 debug_only_printf(LC_TMTracer
, "bool<%d> ", v
.toBoolean());
2991 case JSVAL_TYPE_UNDEFINED
:
2992 JS_ASSERT(v
.isUndefined());
2993 debug_only_print0(LC_TMTracer
, "undefined ");
2995 case JSVAL_TYPE_MAGIC
:
2996 debug_only_printf(LC_TMTracer
, "magic<%d> ", v
.whyMagic());
2998 case JSVAL_TYPE_FUNOBJ
:
2999 JS_ASSERT(IsFunctionObject(v
));
3000 #if defined JS_JIT_SPEW
3001 if (LogController
.lcbits
& LC_TMTracer
) {
3002 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, &v
.toObject());
3005 JS_PutEscapedFlatString(funName
, sizeof funName
, ATOM_TO_STRING(fun
->atom
), 0);
3007 strcpy(funName
, "unnamed");
3008 LogController
.printf("function<%p:%s> ", (void*) &v
.toObject(), funName
);
3012 case JSVAL_TYPE_STRORNULL
:
3013 debug_only_printf(LC_TMTracer
, "nullablestr<%p> ", v
.isNull() ? NULL
: (void *)v
.toString());
3015 case JSVAL_TYPE_OBJORNULL
:
3016 debug_only_printf(LC_TMTracer
, "nullablestr<%p> ", v
.isNull() ? NULL
: (void *)&v
.toObject());
3018 case JSVAL_TYPE_BOXED
:
3019 debug_only_printf(LC_TMTracer
, "box<%llx> ", (long long unsigned int)v
.asRawBits());
3022 JS_NOT_REACHED("unexpected type");
3029 ExternNativeToValue(JSContext
* cx
, Value
& v
, JSValueType type
, double* slot
)
3031 return NativeToValue(cx
, v
, type
, slot
);
3034 class BuildNativeFrameVisitor
: public SlotVisitorBase
3037 JSValueType
*mTypeMap
;
3041 BuildNativeFrameVisitor(JSContext
*cx
,
3042 JSValueType
*typemap
,
3051 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
3052 visitGlobalSlot(Value
*vp
, unsigned n
, unsigned slot
) {
3053 debug_only_printf(LC_TMTracer
, "global%d: ", n
);
3054 ValueToNative(*vp
, *mTypeMap
++, &mGlobal
[slot
]);
3057 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3058 visitStackSlots(Value
*vp
, int count
, JSStackFrame
* fp
) {
3059 for (int i
= 0; i
< count
; ++i
) {
3060 debug_only_printf(LC_TMTracer
, "%s%d: ", stackSlotKind(), i
);
3061 ValueToNative(*vp
++, *mTypeMap
++, mStack
++);
3066 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3067 visitFrameObjPtr(void* p
, JSStackFrame
* fp
) {
3068 debug_only_printf(LC_TMTracer
, "%s%d: ", stackSlotKind(), 0);
3069 if (p
== fp
->addressOfScopeChain())
3070 *(JSObject
**)mStack
= &fp
->scopeChain();
3072 *(JSObject
**)mStack
= fp
->hasArgsObj() ? &fp
->argsObj() : NULL
;
3074 if (*mTypeMap
== JSVAL_TYPE_NULL
) {
3075 JS_ASSERT(*(JSObject
**)mStack
== NULL
);
3076 debug_only_print0(LC_TMTracer
, "null ");
3078 JS_ASSERT(*mTypeMap
== JSVAL_TYPE_NONFUNOBJ
);
3079 JS_ASSERT(!(*(JSObject
**)p
)->isFunction());
3080 debug_only_printf(LC_TMTracer
,
3081 "object<%p:%s> ", *(void **)p
,
3082 (*(JSObject
**)p
)->getClass()->name
);
3091 static JS_REQUIRES_STACK
void
3092 BuildNativeFrame(JSContext
*cx
, JSObject
*globalObj
, unsigned callDepth
,
3093 unsigned ngslots
, uint16
*gslots
,
3094 JSValueType
*typeMap
, double *global
, double *stack
)
3096 BuildNativeFrameVisitor
visitor(cx
, typeMap
, global
, stack
);
3097 VisitSlots(visitor
, cx
, globalObj
, callDepth
, ngslots
, gslots
);
3098 debug_only_print0(LC_TMTracer
, "\n");
3101 class FlushNativeGlobalFrameVisitor
: public SlotVisitorBase
3104 JSValueType
*mTypeMap
;
3107 FlushNativeGlobalFrameVisitor(JSContext
*cx
,
3108 JSValueType
*typeMap
,
3115 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
3116 visitGlobalSlot(Value
*vp
, unsigned n
, unsigned slot
) {
3117 debug_only_printf(LC_TMTracer
, "global%d=", n
);
3118 JS_ASSERT(JS_THREAD_DATA(mCx
)->waiveGCQuota
);
3119 NativeToValue(mCx
, *vp
, *mTypeMap
++, &mGlobal
[slot
]);
3123 class FlushNativeStackFrameVisitor
: public SlotVisitorBase
3126 const JSValueType
*mInitTypeMap
;
3127 const JSValueType
*mTypeMap
;
3130 FlushNativeStackFrameVisitor(JSContext
*cx
,
3131 const JSValueType
*typeMap
,
3134 mInitTypeMap(typeMap
),
3139 const JSValueType
* getTypeMap()
3144 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3145 visitStackSlots(Value
*vp
, size_t count
, JSStackFrame
* fp
) {
3146 JS_ASSERT(JS_THREAD_DATA(mCx
)->waiveGCQuota
);
3147 for (size_t i
= 0; i
< count
; ++i
) {
3148 debug_only_printf(LC_TMTracer
, "%s%u=", stackSlotKind(), unsigned(i
));
3149 NativeToValue(mCx
, *vp
, *mTypeMap
, mStack
);
3157 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3158 visitFrameObjPtr(void* p
, JSStackFrame
* fp
) {
3159 JS_ASSERT(JS_THREAD_DATA(mCx
)->waiveGCQuota
);
3160 debug_only_printf(LC_TMTracer
, "%s%u=", stackSlotKind(), 0);
3161 JSObject
*frameobj
= *(JSObject
**)mStack
;
3162 JS_ASSERT((frameobj
== NULL
) == (*mTypeMap
== JSVAL_TYPE_NULL
));
3163 if (p
== fp
->addressOfArgs()) {
3165 JS_ASSERT_IF(fp
->hasArgsObj(), frameobj
== &fp
->argsObj());
3166 fp
->setArgsObj(*frameobj
);
3167 JS_ASSERT(frameobj
->isArguments());
3168 if (frameobj
->isNormalArguments())
3169 frameobj
->setPrivate(fp
);
3171 JS_ASSERT(!frameobj
->getPrivate());
3172 debug_only_printf(LC_TMTracer
,
3176 JS_ASSERT(!fp
->hasArgsObj());
3177 debug_only_print0(LC_TMTracer
,
3180 /* else, SynthesizeFrame has initialized fp->args.nactual */
3182 JS_ASSERT(p
== fp
->addressOfScopeChain());
3183 if (frameobj
->isCall() &&
3184 !frameobj
->getPrivate() &&
3185 fp
->maybeCallee() == frameobj
->getCallObjCallee())
3187 JS_ASSERT(&fp
->scopeChain() == JSStackFrame::sInvalidScopeChain
);
3188 frameobj
->setPrivate(fp
);
3189 fp
->setScopeChainAndCallObj(*frameobj
);
3191 fp
->setScopeChainNoCallObj(*frameobj
);
3193 debug_only_printf(LC_TMTracer
,
3198 JSValueType type
= *mTypeMap
;
3199 if (type
== JSVAL_TYPE_NULL
) {
3200 debug_only_print0(LC_TMTracer
, "null ");
3202 JS_ASSERT(type
== JSVAL_TYPE_NONFUNOBJ
);
3203 JS_ASSERT(!frameobj
->isFunction());
3204 debug_only_printf(LC_TMTracer
,
3207 frameobj
->getClass()->name
);
3216 /* Box the given native frame into a JS frame. This is infallible. */
3217 static JS_REQUIRES_STACK
void
3218 FlushNativeGlobalFrame(JSContext
*cx
, JSObject
*globalObj
, double *global
, unsigned ngslots
,
3219 uint16
*gslots
, JSValueType
*typemap
)
3221 FlushNativeGlobalFrameVisitor
visitor(cx
, typemap
, global
);
3222 VisitGlobalSlots(visitor
, cx
, globalObj
, ngslots
, gslots
);
3223 debug_only_print0(LC_TMTracer
, "\n");
3227 * Returns the number of values on the native stack, excluding the innermost
3228 * frame. This walks all FrameInfos on the native frame stack and sums the
3229 * slot usage of each frame.
3232 StackDepthFromCallStack(TracerState
* state
, uint32 callDepth
)
3234 int32 nativeStackFramePos
= 0;
3236 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
3237 for (FrameInfo
** fip
= state
->callstackBase
; fip
< state
->rp
+ callDepth
; fip
++)
3238 nativeStackFramePos
+= (*fip
)->callerHeight
;
3239 return nativeStackFramePos
;
3243 * Generic function to read upvars on trace from slots of active frames.
3244 * T Traits type parameter. Must provide static functions:
3245 * interp_get(fp, slot) Read the value out of an interpreter frame.
3246 * native_slot(argc, slot) Return the position of the desired value in the on-trace
3247 * stack frame (with position 0 being callee).
3249 * upvarLevel Static level of the function containing the upvar definition
3250 * slot Identifies the value to get. The meaning is defined by the traits type.
3251 * callDepth Call depth of current point relative to trace entry
3253 template<typename T
>
3255 GetUpvarOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
, double* result
)
3257 TracerState
* state
= JS_TRACE_MONITOR(cx
).tracerState
;
3258 FrameInfo
** fip
= state
->rp
+ callDepth
;
3261 * First search the FrameInfo call stack for an entry containing our
3262 * upvar, namely one with level == upvarLevel. The first FrameInfo is a
3263 * transition from the entry frame to some callee. However, it is not
3264 * known (from looking at the FrameInfo) whether the entry frame had a
3265 * callee. Rather than special-case this or insert more logic into the
3266 * loop, instead just stop before that FrameInfo (i.e. |> base| instead of
3267 * |>= base|), and let the code after the loop handle it.
3269 int32 stackOffset
= StackDepthFromCallStack(state
, callDepth
);
3270 while (--fip
> state
->callstackBase
) {
3271 FrameInfo
* fi
= *fip
;
3274 * The loop starts aligned to the top of the stack, so move down to the first meaningful
3275 * callee. Then read the callee directly from the frame.
3277 stackOffset
-= fi
->callerHeight
;
3278 JSObject
* callee
= *(JSObject
**)(&state
->stackBase
[stackOffset
]);
3279 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, callee
);
3280 uintN calleeLevel
= fun
->u
.i
.script
->staticLevel
;
3281 if (calleeLevel
== upvarLevel
) {
3283 * Now find the upvar's value in the native stack. stackOffset is
3284 * the offset of the start of the activation record corresponding
3285 * to *fip in the native stack.
3287 uint32 native_slot
= T::native_slot(fi
->callerArgc
, slot
);
3288 *result
= state
->stackBase
[stackOffset
+ native_slot
];
3289 return fi
->get_typemap()[native_slot
];
3293 // Next search the trace entry frame, which is not in the FrameInfo stack.
3294 if (state
->outermostTree
->script
->staticLevel
== upvarLevel
) {
3295 uint32 argc
= state
->outermostTree
->argc
;
3296 uint32 native_slot
= T::native_slot(argc
, slot
);
3297 *result
= state
->stackBase
[native_slot
];
3298 return state
->callstackBase
[0]->get_typemap()[native_slot
];
3302 * If we did not find the upvar in the frames for the active traces,
3303 * then we simply get the value from the interpreter state.
3305 JS_ASSERT(upvarLevel
< UpvarCookie::UPVAR_LEVEL_LIMIT
);
3306 JSStackFrame
* fp
= cx
->findFrameAtLevel(upvarLevel
);
3307 Value v
= T::interp_get(fp
, slot
);
3308 JSValueType type
= getCoercedType(v
);
3309 ValueToNative(v
, type
, result
);
3313 // For this traits type, 'slot' is the argument index, which may be -2 for callee.
3314 struct UpvarArgTraits
{
3315 static Value
interp_get(JSStackFrame
* fp
, int32 slot
) {
3316 return fp
->formalArg(slot
);
3319 static uint32
native_slot(uint32 argc
, int32 slot
) {
3320 return 2 /*callee,this*/ + slot
;
3325 GetUpvarArgOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
, double* result
)
3327 return GetUpvarOnTrace
<UpvarArgTraits
>(cx
, upvarLevel
, slot
, callDepth
, result
);
3330 // For this traits type, 'slot' is an index into the local slots array.
3331 struct UpvarVarTraits
{
3332 static Value
interp_get(JSStackFrame
* fp
, int32 slot
) {
3333 return fp
->slots()[slot
];
3336 static uint32
native_slot(uint32 argc
, int32 slot
) {
3337 return 4 /*callee,this,arguments,scopeChain*/ + argc
+ slot
;
3342 GetUpvarVarOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
, double* result
)
3344 return GetUpvarOnTrace
<UpvarVarTraits
>(cx
, upvarLevel
, slot
, callDepth
, result
);
3348 * For this traits type, 'slot' is an index into the stack area (within slots,
3349 * after nfixed) of a frame with no function. (On trace, the top-level frame is
3350 * the only one that can have no function.)
3352 struct UpvarStackTraits
{
3353 static Value
interp_get(JSStackFrame
* fp
, int32 slot
) {
3354 return fp
->slots()[slot
+ fp
->numFixed()];
3357 static uint32
native_slot(uint32 argc
, int32 slot
) {
3359 * Locals are not imported by the tracer when the frame has no
3360 * function, so we do not add fp->getFixedCount().
3362 JS_ASSERT(argc
== 0);
3368 GetUpvarStackOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
,
3371 return GetUpvarOnTrace
<UpvarStackTraits
>(cx
, upvarLevel
, slot
, callDepth
, result
);
3374 // Parameters needed to access a value from a closure on trace.
3375 struct ClosureVarInfo
3384 * Generic function to read upvars from Call objects of active heavyweight functions.
3385 * call Callee Function object in which the upvar is accessed.
3387 template<typename T
>
3389 GetFromClosure(JSContext
* cx
, JSObject
* call
, const ClosureVarInfo
* cv
, double* result
)
3391 JS_ASSERT(call
->isCall());
3394 TracerState
* state
= JS_TRACE_MONITOR(cx
).tracerState
;
3395 FrameInfo
** fip
= state
->rp
+ cv
->callDepth
;
3396 int32 stackOffset
= StackDepthFromCallStack(state
, cv
->callDepth
);
3397 while (--fip
> state
->callstackBase
) {
3398 FrameInfo
* fi
= *fip
;
3401 * The loop starts aligned to the top of the stack, so move down to the first meaningful
3402 * callee. Then read the callee directly from the frame.
3404 stackOffset
-= fi
->callerHeight
;
3405 JSObject
* callee
= *(JSObject
**)(&state
->stackBase
[stackOffset
]);
3406 if (callee
== call
) {
3407 // This is not reachable as long as the tracer guards on the identity of the callee's
3408 // parent when making a call:
3410 // - We can only reach this point if we execute JSOP_LAMBDA on trace, then call the
3411 // function created by the lambda, and then execute a JSOP_NAME on trace.
3412 // - Each time we execute JSOP_LAMBDA we get a function with a different parent.
3413 // - When we execute the call to the new function, we exit trace because the parent
3415 JS_NOT_REACHED("JSOP_NAME variable found in outer trace");
3420 // We already guarded on trace that we aren't touching an outer tree's entry frame
3421 VOUCH_DOES_NOT_REQUIRE_STACK();
3422 JSStackFrame
* fp
= (JSStackFrame
*) call
->getPrivate();
3423 JS_ASSERT(fp
!= cx
->fp());
3427 v
= T::get_slot(fp
, cv
->slot
);
3430 * Get the value from the object. We know we have a Call object, and
3431 * that our slot index is fine, so don't monkey around with calling the
3432 * property getter (which just looks in the slot) or calling
3433 * js_GetReservedSlot. Just get the slot directly. Note the static
3434 * asserts in jsfun.cpp which make sure Call objects use slots.
3436 JS_ASSERT(cv
->slot
< T::slot_count(call
));
3437 v
= T::get_slot(call
, cv
->slot
);
3439 JSValueType type
= getCoercedType(v
);
3440 ValueToNative(v
, type
, result
);
3444 struct ArgClosureTraits
3446 // Get the right frame slots to use our slot index with.
3447 // See also UpvarArgTraits.
3448 static inline Value
get_slot(JSStackFrame
* fp
, unsigned slot
) {
3449 JS_ASSERT(slot
< fp
->numFormalArgs());
3450 return fp
->formalArg(slot
);
3453 // Get the right object slots to use our slot index with.
3454 static inline Value
get_slot(JSObject
* obj
, unsigned slot
) {
3455 return obj
->getSlot(slot_offset(obj
) + slot
);
3458 // Get the offset of our object slots from the object's slots pointer.
3459 static inline uint32
slot_offset(JSObject
* obj
) {
3460 return JSObject::CALL_RESERVED_SLOTS
;
3463 // Get the maximum slot index of this type that should be allowed
3464 static inline uint16
slot_count(JSObject
* obj
) {
3465 return obj
->getCallObjCalleeFunction()->nargs
;
3473 GetClosureArg(JSContext
* cx
, JSObject
* callee
, const ClosureVarInfo
* cv
, double* result
)
3475 return GetFromClosure
<ArgClosureTraits
>(cx
, callee
, cv
, result
);
3478 struct VarClosureTraits
3480 // See also UpvarVarTraits.
3481 static inline Value
get_slot(JSStackFrame
* fp
, unsigned slot
) {
3482 JS_ASSERT(slot
< fp
->fun()->script()->bindings
.countVars());
3483 return fp
->slots()[slot
];
3486 static inline Value
get_slot(JSObject
* obj
, unsigned slot
) {
3487 return obj
->getSlot(slot_offset(obj
) + slot
);
3490 static inline uint32
slot_offset(JSObject
* obj
) {
3491 return JSObject::CALL_RESERVED_SLOTS
+
3492 obj
->getCallObjCalleeFunction()->nargs
;
3495 static inline uint16
slot_count(JSObject
* obj
) {
3496 return obj
->getCallObjCalleeFunction()->script()->bindings
.countVars();
3504 GetClosureVar(JSContext
* cx
, JSObject
* callee
, const ClosureVarInfo
* cv
, double* result
)
3506 return GetFromClosure
<VarClosureTraits
>(cx
, callee
, cv
, result
);
3510 * Box the given native stack frame into the virtual machine stack. This
3513 * @param callDepth the distance between the entry frame into our trace and
3514 * cx->fp() when we make this call. If this is not called as a
3515 * result of a nested exit, callDepth is 0.
3516 * @param mp an array of JSValueType that indicate what the types of the things
3518 * @param np pointer to the native stack. We want to copy values from here to
3519 * the JS stack as needed.
3520 * @return the number of things we popped off of np.
3522 static JS_REQUIRES_STACK
int
3523 FlushNativeStackFrame(JSContext
* cx
, unsigned callDepth
, const JSValueType
* mp
, double* np
)
3525 /* Root all string and object references first (we don't need to call the GC for this). */
3526 FlushNativeStackFrameVisitor
visitor(cx
, mp
, np
);
3527 VisitStackSlots(visitor
, cx
, callDepth
);
3529 debug_only_print0(LC_TMTracer
, "\n");
3530 return visitor
.getTypeMap() - mp
;
3533 /* Emit load instructions onto the trace that read the initial stack state. */
3534 JS_REQUIRES_STACK
void
3535 TraceRecorder::importImpl(Address addr
, const void* p
, JSValueType t
,
3536 const char *prefix
, uintN index
, JSStackFrame
*fp
)
3539 if (t
== JSVAL_TYPE_INT32
) { /* demoted */
3540 JS_ASSERT(hasInt32Repr(*(const Value
*)p
));
3543 * Ok, we have a valid demotion attempt pending, so insert an integer
3544 * read and promote it to double since all arithmetic operations expect
3545 * to see doubles on entry. The first op to use this slot will emit a
3546 * d2i cast which will cancel out the i2d we insert here.
3551 JS_ASSERT_IF(t
!= JSVAL_TYPE_BOXED
&& !isFrameObjPtrTraceType(t
),
3552 ((const Value
*)p
)->isNumber() == (t
== JSVAL_TYPE_DOUBLE
));
3553 if (t
== JSVAL_TYPE_DOUBLE
) {
3555 } else if (t
== JSVAL_TYPE_BOOLEAN
) {
3557 } else if (t
== JSVAL_TYPE_UNDEFINED
) {
3558 ins
= w
.immiUndefined();
3559 } else if (t
== JSVAL_TYPE_MAGIC
) {
3565 checkForGlobalObjectReallocation();
3566 tracker
.set(p
, ins
);
3570 JS_ASSERT(strlen(prefix
) < 11);
3572 jsuword
* localNames
= NULL
;
3573 const char* funName
= NULL
;
3574 JSAutoByteString funNameBytes
;
3575 if (*prefix
== 'a' || *prefix
== 'v') {
3576 mark
= JS_ARENA_MARK(&cx
->tempPool
);
3577 JSFunction
*fun
= fp
->fun();
3578 Bindings
&bindings
= fun
->script()->bindings
;
3579 if (bindings
.hasLocalNames())
3580 localNames
= bindings
.getLocalNameArray(cx
, &cx
->tempPool
);
3582 ? js_AtomToPrintableString(cx
, fun
->atom
, &funNameBytes
)
3585 if (!strcmp(prefix
, "argv")) {
3586 if (index
< fp
->numFormalArgs()) {
3587 JSAtom
*atom
= JS_LOCAL_NAME_TO_ATOM(localNames
[index
]);
3588 JSAutoByteString atomBytes
;
3589 JS_snprintf(name
, sizeof name
, "$%s.%s", funName
,
3590 js_AtomToPrintableString(cx
, atom
, &atomBytes
));
3592 JS_snprintf(name
, sizeof name
, "$%s.<arg%d>", funName
, index
);
3594 } else if (!strcmp(prefix
, "vars")) {
3595 JSAtom
*atom
= JS_LOCAL_NAME_TO_ATOM(localNames
[fp
->numFormalArgs() + index
]);
3596 JSAutoByteString atomBytes
;
3597 JS_snprintf(name
, sizeof name
, "$%s.%s", funName
,
3598 js_AtomToPrintableString(cx
, atom
, &atomBytes
));
3600 JS_snprintf(name
, sizeof name
, "$%s%d", prefix
, index
);
3604 JS_ARENA_RELEASE(&cx
->tempPool
, mark
);
3607 debug_only_printf(LC_TMTracer
, "import vp=%p name=%s type=%c\n",
3608 p
, name
, TypeToChar(t
));
3612 JS_REQUIRES_STACK
void
3613 TraceRecorder::import(Address addr
, const Value
* p
, JSValueType t
,
3614 const char *prefix
, uintN index
, JSStackFrame
*fp
)
3616 return importImpl(addr
, p
, t
, prefix
, index
, fp
);
3619 class ImportBoxedStackSlotVisitor
: public SlotVisitorBase
3621 TraceRecorder
&mRecorder
;
3623 ptrdiff_t mStackOffset
;
3624 JSValueType
*mTypemap
;
3627 ImportBoxedStackSlotVisitor(TraceRecorder
&recorder
,
3629 ptrdiff_t stackOffset
,
3630 JSValueType
*typemap
) :
3631 mRecorder(recorder
),
3633 mStackOffset(stackOffset
),
3637 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3638 visitStackSlots(Value
*vp
, size_t count
, JSStackFrame
* fp
) {
3639 for (size_t i
= 0; i
< count
; ++i
) {
3640 if (*mTypemap
== JSVAL_TYPE_BOXED
) {
3641 mRecorder
.import(StackAddress(mBase
, mStackOffset
), vp
, JSVAL_TYPE_BOXED
,
3643 LIns
*vp_ins
= mRecorder
.unbox_value(*vp
,
3644 StackAddress(mBase
, mStackOffset
),
3645 mRecorder
.copy(mRecorder
.anchor
));
3646 mRecorder
.set(vp
, vp_ins
);
3650 mStackOffset
+= sizeof(double);
3655 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3656 visitFrameObjPtr(void* p
, JSStackFrame
*fp
) {
3657 JS_ASSERT(*mTypemap
!= JSVAL_TYPE_BOXED
);
3659 mStackOffset
+= sizeof(double);
3664 JS_REQUIRES_STACK
void
3665 TraceRecorder::import(TreeFragment
* tree
, LIns
* sp
, unsigned stackSlots
, unsigned ngslots
,
3666 unsigned callDepth
, JSValueType
* typeMap
)
3669 * If we get a partial list that doesn't have all the types (i.e. recording
3670 * from a side exit that was recorded but we added more global slots
3671 * later), merge the missing types from the entry type map. This is safe
3672 * because at the loop edge we verify that we have compatible types for all
3673 * globals (entry type and loop edge type match). While a different trace
3674 * of the tree might have had a guard with a different type map for these
3675 * slots we just filled in here (the guard we continue from didn't know
3676 * about them), since we didn't take that particular guard the only way we
3677 * could have ended up here is if that other trace had at its end a
3678 * compatible type distribution with the entry map. Since that's exactly
3679 * what we used to fill in the types our current side exit didn't provide,
3680 * this is always safe to do.
3683 JSValueType
* globalTypeMap
= typeMap
+ stackSlots
;
3684 unsigned length
= tree
->nGlobalTypes();
3687 * This is potentially the typemap of the side exit and thus shorter than
3688 * the tree's global type map.
3690 if (ngslots
< length
) {
3691 MergeTypeMaps(&globalTypeMap
/* out param */, &ngslots
/* out param */,
3692 tree
->globalTypeMap(), length
,
3693 (JSValueType
*)alloca(sizeof(JSValueType
) * length
));
3695 JS_ASSERT(ngslots
== tree
->nGlobalTypes());
3698 * Check whether there are any values on the stack we have to unbox and do
3699 * that first before we waste any time fetching the state from the stack.
3701 ImportBoxedStackSlotVisitor
boxedStackVisitor(*this, sp
, -tree
->nativeStackBase
, typeMap
);
3702 VisitStackSlots(boxedStackVisitor
, cx
, callDepth
);
3705 * Remember the import type map so we can lazily import later whatever
3708 importTypeMap
.set(importStackSlots
= stackSlots
,
3709 importGlobalSlots
= ngslots
,
3710 typeMap
, globalTypeMap
);
3713 JS_REQUIRES_STACK
bool
3714 TraceRecorder::isValidSlot(JSObject
*obj
, const Shape
* shape
)
3716 uint32 setflags
= (js_CodeSpec
[*cx
->regs
->pc
].format
& (JOF_SET
| JOF_INCDEC
| JOF_FOR
));
3719 if (!shape
->hasDefaultSetter())
3720 RETURN_VALUE("non-stub setter", false);
3721 if (!shape
->writable())
3722 RETURN_VALUE("writing to a read-only property", false);
3725 /* This check applies even when setflags == 0. */
3726 if (setflags
!= JOF_SET
&& !shape
->hasDefaultGetter()) {
3727 JS_ASSERT(!shape
->isMethod());
3728 RETURN_VALUE("non-stub getter", false);
3731 if (!obj
->containsSlot(shape
->slot
))
3732 RETURN_VALUE("invalid-slot obj property", false);
3737 /* Lazily import a global slot if we don't already have it in the tracker. */
3738 JS_REQUIRES_STACK
void
3739 TraceRecorder::importGlobalSlot(unsigned slot
)
3741 JS_ASSERT(slot
== uint16(slot
));
3742 JS_ASSERT(globalObj
->numSlots() <= MAX_GLOBAL_SLOTS
);
3744 Value
* vp
= &globalObj
->getSlotRef(slot
);
3745 JS_ASSERT(!known(vp
));
3747 /* Add the slot to the list of interned global slots. */
3749 int index
= tree
->globalSlots
->offsetOf(uint16(slot
));
3751 type
= getCoercedType(*vp
);
3752 if (type
== JSVAL_TYPE_INT32
&& (!oracle
|| oracle
->isGlobalSlotUndemotable(cx
, slot
)))
3753 type
= JSVAL_TYPE_DOUBLE
;
3754 index
= (int)tree
->globalSlots
->length();
3755 tree
->globalSlots
->add(uint16(slot
));
3756 tree
->typeMap
.add(type
);
3757 SpecializeTreesToMissingGlobals(cx
, globalObj
, tree
);
3758 JS_ASSERT(tree
->nGlobalTypes() == tree
->globalSlots
->length());
3760 type
= importTypeMap
[importStackSlots
+ index
];
3762 import(EosAddress(eos_ins
, slot
* sizeof(double)), vp
, type
, "global", index
, NULL
);
3765 /* Lazily import a global slot if we don't already have it in the tracker. */
3766 JS_REQUIRES_STACK
bool
3767 TraceRecorder::lazilyImportGlobalSlot(unsigned slot
)
3769 if (slot
!= uint16(slot
)) /* we use a table of 16-bit ints, bail out if that's not enough */
3772 * If the global object grows too large, alloca in ExecuteTree might fail,
3773 * so abort tracing on global objects with unreasonably many slots.
3775 if (globalObj
->numSlots() > MAX_GLOBAL_SLOTS
)
3777 Value
* vp
= &globalObj
->getSlotRef(slot
);
3779 return true; /* we already have it */
3780 importGlobalSlot(slot
);
3784 /* Write back a value onto the stack or global frames. */
3786 TraceRecorder::writeBack(LIns
* ins
, LIns
* base
, ptrdiff_t offset
, bool shouldDemoteToInt32
)
3789 * Sink all type casts targeting the stack into the side exit by simply storing the original
3790 * (uncasted) value. Each guard generates the side exit map based on the types of the
3791 * last stores to every stack location, so it's safe to not perform them on-trace.
3793 JS_ASSERT(base
== lirbuf
->sp
|| base
== eos_ins
);
3794 if (shouldDemoteToInt32
&& IsPromotedInt32(ins
))
3795 ins
= w
.demoteToInt32(ins
);
3798 if (base
== lirbuf
->sp
) {
3799 addr
= StackAddress(base
, offset
);
3801 addr
= EosAddress(base
, offset
);
3802 unsigned slot
= unsigned(offset
/ sizeof(double));
3803 (void)pendingGlobalSlotsToSet
.append(slot
); /* OOM is safe. */
3805 return w
.st(ins
, addr
);
3808 /* Update the tracker, then issue a write back store. */
3809 JS_REQUIRES_STACK
void
3810 TraceRecorder::setImpl(void* p
, LIns
* i
, bool shouldDemoteToInt32
)
3812 JS_ASSERT(i
!= NULL
);
3813 checkForGlobalObjectReallocation();
3817 * If we are writing to this location for the first time, calculate the
3818 * offset into the native frame manually. Otherwise just look up the last
3819 * load or store associated with the same source address (p) and use the
3822 LIns
* x
= nativeFrameTracker
.get(p
);
3824 if (isVoidPtrGlobal(p
))
3825 x
= writeBack(i
, eos_ins
, nativeGlobalOffset((Value
*)p
), shouldDemoteToInt32
);
3827 x
= writeBack(i
, lirbuf
->sp
, nativespOffsetImpl(p
), shouldDemoteToInt32
);
3828 nativeFrameTracker
.set(p
, x
);
3830 #if defined NANOJIT_64BIT
3831 JS_ASSERT( x
->isop(LIR_stq
) || x
->isop(LIR_sti
) || x
->isop(LIR_std
));
3833 JS_ASSERT( x
->isop(LIR_sti
) || x
->isop(LIR_std
));
3837 LIns
*base
= x
->oprnd2();
3838 if (base
->isop(LIR_addp
) && base
->oprnd2()->isImmP()) {
3839 disp
= ptrdiff_t(base
->oprnd2()->immP());
3840 base
= base
->oprnd1();
3845 JS_ASSERT(base
== lirbuf
->sp
|| base
== eos_ins
);
3846 JS_ASSERT(disp
== ((base
== lirbuf
->sp
)
3847 ? nativespOffsetImpl(p
)
3848 : nativeGlobalOffset((Value
*)p
)));
3850 writeBack(i
, base
, disp
, shouldDemoteToInt32
);
3854 JS_REQUIRES_STACK
inline void
3855 TraceRecorder::set(Value
* p
, LIns
* i
, bool shouldDemoteToInt32
)
3857 return setImpl(p
, i
, shouldDemoteToInt32
);
3860 JS_REQUIRES_STACK
void
3861 TraceRecorder::setFrameObjPtr(void* p
, LIns
* i
, bool shouldDemoteToInt32
)
3863 JS_ASSERT(isValidFrameObjPtr(p
));
3864 return setImpl(p
, i
, shouldDemoteToInt32
);
3867 JS_REQUIRES_STACK LIns
*
3868 TraceRecorder::attemptImport(const Value
* p
)
3870 if (LIns
* i
= getFromTracker(p
))
3873 /* If the variable was not known, it could require a lazy import. */
3874 CountSlotsVisitor
countVisitor(p
);
3875 VisitStackSlots(countVisitor
, cx
, callDepth
);
3877 if (countVisitor
.stopped() || size_t(p
- cx
->fp()->slots()) < cx
->fp()->numSlots())
3883 inline nanojit::LIns
*
3884 TraceRecorder::getFromTrackerImpl(const void* p
)
3886 checkForGlobalObjectReallocation();
3887 return tracker
.get(p
);
3890 inline nanojit::LIns
*
3891 TraceRecorder::getFromTracker(const Value
* p
)
3893 return getFromTrackerImpl(p
);
3896 JS_REQUIRES_STACK LIns
*
3897 TraceRecorder::getImpl(const void *p
)
3899 LIns
* x
= getFromTrackerImpl(p
);
3902 if (isVoidPtrGlobal(p
)) {
3903 unsigned slot
= nativeGlobalSlot((const Value
*)p
);
3904 JS_ASSERT(tree
->globalSlots
->offsetOf(uint16(slot
)) != -1);
3905 importGlobalSlot(slot
);
3907 unsigned slot
= nativeStackSlotImpl(p
);
3908 JSValueType type
= importTypeMap
[slot
];
3909 importImpl(StackAddress(lirbuf
->sp
, -tree
->nativeStackBase
+ slot
* sizeof(jsdouble
)),
3910 p
, type
, "stack", slot
, cx
->fp());
3912 JS_ASSERT(knownImpl(p
));
3913 return tracker
.get(p
);
3916 JS_REQUIRES_STACK LIns
*
3917 TraceRecorder::get(const Value
*p
)
3924 TraceRecorder::isValidFrameObjPtr(void *p
)
3926 JSStackFrame
*fp
= cx
->fp();
3927 for (; fp
; fp
= fp
->prev()) {
3928 if (fp
->addressOfScopeChain() == p
|| fp
->addressOfArgs() == p
)
3935 JS_REQUIRES_STACK LIns
*
3936 TraceRecorder::getFrameObjPtr(void *p
)
3938 JS_ASSERT(isValidFrameObjPtr(p
));
3942 JS_REQUIRES_STACK LIns
*
3943 TraceRecorder::addr(Value
* p
)
3946 ? w
.addp(eos_ins
, w
.nameImmw(nativeGlobalOffset(p
)))
3947 : w
.addp(lirbuf
->sp
, w
.nameImmw(nativespOffset(p
)));
3950 JS_REQUIRES_STACK
inline bool
3951 TraceRecorder::knownImpl(const void* p
)
3953 checkForGlobalObjectReallocation();
3954 return tracker
.has(p
);
3957 JS_REQUIRES_STACK
inline bool
3958 TraceRecorder::known(const Value
* vp
)
3960 return knownImpl(vp
);
3963 JS_REQUIRES_STACK
inline bool
3964 TraceRecorder::known(JSObject
** p
)
3966 return knownImpl(p
);
3970 * The slots of the global object are sometimes reallocated by the interpreter.
3971 * This function check for that condition and re-maps the entries of the tracker
3974 JS_REQUIRES_STACK
void
3975 TraceRecorder::checkForGlobalObjectReallocationHelper()
3977 debug_only_print0(LC_TMTracer
, "globalObj->slots relocated, updating tracker\n");
3978 Value
* src
= global_slots
;
3979 Value
* dst
= globalObj
->getSlots();
3980 jsuint length
= globalObj
->capacity
;
3981 LIns
** map
= (LIns
**)alloca(sizeof(LIns
*) * length
);
3982 for (jsuint n
= 0; n
< length
; ++n
) {
3983 map
[n
] = tracker
.get(src
);
3984 tracker
.set(src
++, NULL
);
3986 for (jsuint n
= 0; n
< length
; ++n
)
3987 tracker
.set(dst
++, map
[n
]);
3988 global_slots
= globalObj
->getSlots();
3991 /* Determine whether the current branch is a loop edge (taken or not taken). */
3992 static JS_REQUIRES_STACK
bool
3993 IsLoopEdge(jsbytecode
* pc
, jsbytecode
* header
)
3998 return ((pc
+ GET_JUMP_OFFSET(pc
)) == header
);
4001 return ((pc
+ GET_JUMPX_OFFSET(pc
)) == header
);
4003 JS_ASSERT((*pc
== JSOP_AND
) || (*pc
== JSOP_ANDX
) ||
4004 (*pc
== JSOP_OR
) || (*pc
== JSOP_ORX
));
4009 class AdjustCallerGlobalTypesVisitor
: public SlotVisitorBase
4011 TraceRecorder
&mRecorder
;
4013 nanojit::LirBuffer
*mLirbuf
;
4014 JSValueType
*mTypeMap
;
4016 AdjustCallerGlobalTypesVisitor(TraceRecorder
&recorder
,
4017 JSValueType
*typeMap
) :
4018 mRecorder(recorder
),
4020 mLirbuf(mRecorder
.lirbuf
),
4024 JSValueType
* getTypeMap()
4029 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4030 visitGlobalSlot(Value
*vp
, unsigned n
, unsigned slot
) {
4031 LIns
*ins
= mRecorder
.get(vp
);
4032 bool isPromote
= IsPromotedInt32(ins
);
4033 if (isPromote
&& *mTypeMap
== JSVAL_TYPE_DOUBLE
) {
4034 mRecorder
.w
.st(mRecorder
.get(vp
),
4035 EosAddress(mRecorder
.eos_ins
, mRecorder
.nativeGlobalOffset(vp
)));
4037 * Aggressively undo speculation so the inner tree will compile
4040 JS_TRACE_MONITOR(mCx
).oracle
->markGlobalSlotUndemotable(mCx
, slot
);
4042 JS_ASSERT(!(!isPromote
&& *mTypeMap
== JSVAL_TYPE_INT32
));
4047 class AdjustCallerStackTypesVisitor
: public SlotVisitorBase
4049 TraceRecorder
&mRecorder
;
4051 nanojit::LirBuffer
*mLirbuf
;
4053 JSValueType
*mTypeMap
;
4055 AdjustCallerStackTypesVisitor(TraceRecorder
&recorder
,
4056 JSValueType
*typeMap
) :
4057 mRecorder(recorder
),
4059 mLirbuf(mRecorder
.lirbuf
),
4064 JSValueType
* getTypeMap()
4069 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
4070 visitStackSlots(Value
*vp
, size_t count
, JSStackFrame
* fp
) {
4071 /* N.B. vp may actually point to a JSObject*. */
4072 for (size_t i
= 0; i
< count
; ++i
) {
4073 LIns
*ins
= mRecorder
.get(vp
);
4074 bool isPromote
= IsPromotedInt32(ins
);
4075 if (isPromote
&& *mTypeMap
== JSVAL_TYPE_DOUBLE
) {
4076 mRecorder
.w
.st(ins
, StackAddress(mLirbuf
->sp
, mRecorder
.nativespOffset(vp
)));
4078 * Aggressively undo speculation so the inner tree will compile
4081 JS_TRACE_MONITOR(mCx
).oracle
->markStackSlotUndemotable(mCx
, mSlotnum
);
4083 JS_ASSERT(!(!isPromote
&& *mTypeMap
== JSVAL_TYPE_INT32
));
4091 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
4092 visitFrameObjPtr(void* p
, JSStackFrame
* fp
) {
4093 JS_ASSERT(*mTypeMap
!= JSVAL_TYPE_BOXED
);
4101 * Promote slots if necessary to match the called tree's type map. This
4102 * function is infallible and must only be called if we are certain that it is
4103 * possible to reconcile the types for each slot in the inner and outer trees.
4105 JS_REQUIRES_STACK
void
4106 TraceRecorder::adjustCallerTypes(TreeFragment
* f
)
4108 AdjustCallerGlobalTypesVisitor
globalVisitor(*this, f
->globalTypeMap());
4109 VisitGlobalSlots(globalVisitor
, cx
, *tree
->globalSlots
);
4111 AdjustCallerStackTypesVisitor
stackVisitor(*this, f
->stackTypeMap());
4112 VisitStackSlots(stackVisitor
, cx
, 0);
4114 JS_ASSERT(f
== f
->root
);
4117 JS_REQUIRES_STACK
inline JSValueType
4118 TraceRecorder::determineSlotType(Value
* vp
)
4120 if (vp
->isNumber()) {
4121 LIns
*i
= getFromTracker(vp
);
4124 t
= IsPromotedInt32(i
) ? JSVAL_TYPE_INT32
: JSVAL_TYPE_DOUBLE
;
4125 } else if (isGlobal(vp
)) {
4126 int offset
= tree
->globalSlots
->offsetOf(uint16(nativeGlobalSlot(vp
)));
4127 JS_ASSERT(offset
!= -1);
4128 t
= importTypeMap
[importStackSlots
+ offset
];
4130 t
= importTypeMap
[nativeStackSlot(vp
)];
4132 JS_ASSERT_IF(t
== JSVAL_TYPE_INT32
, hasInt32Repr(*vp
));
4137 return vp
->toObject().isFunction() ? JSVAL_TYPE_FUNOBJ
: JSVAL_TYPE_NONFUNOBJ
;
4138 return vp
->extractNonDoubleObjectTraceType();
4141 class DetermineTypesVisitor
: public SlotVisitorBase
4143 TraceRecorder
&mRecorder
;
4144 JSValueType
*mTypeMap
;
4146 DetermineTypesVisitor(TraceRecorder
&recorder
,
4147 JSValueType
*typeMap
) :
4148 mRecorder(recorder
),
4152 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4153 visitGlobalSlot(Value
*vp
, unsigned n
, unsigned slot
) {
4154 *mTypeMap
++ = mRecorder
.determineSlotType(vp
);
4157 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
4158 visitStackSlots(Value
*vp
, size_t count
, JSStackFrame
* fp
) {
4159 for (size_t i
= 0; i
< count
; ++i
)
4160 *mTypeMap
++ = mRecorder
.determineSlotType(vp
++);
4164 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
4165 visitFrameObjPtr(void* p
, JSStackFrame
* fp
) {
4166 *mTypeMap
++ = getFrameObjPtrTraceType(p
, fp
);
4170 JSValueType
* getTypeMap()
4176 #if defined JS_JIT_SPEW
4177 JS_REQUIRES_STACK
static void
4178 TreevisLogExit(JSContext
* cx
, VMSideExit
* exit
)
4180 debug_only_printf(LC_TMTreeVis
, "TREEVIS ADDEXIT EXIT=%p TYPE=%s FRAG=%p PC=%p FILE=\"%s\""
4181 " LINE=%d OFFS=%d", (void*)exit
, getExitName(exit
->exitType
),
4182 (void*)exit
->from
, (void*)cx
->regs
->pc
, cx
->fp()->script()->filename
,
4183 js_FramePCToLineNumber(cx
, cx
->fp()), FramePCOffset(cx
, cx
->fp()));
4184 debug_only_print0(LC_TMTreeVis
, " STACK=\"");
4185 for (unsigned i
= 0; i
< exit
->numStackSlots
; i
++)
4186 debug_only_printf(LC_TMTreeVis
, "%c", TypeToChar(exit
->stackTypeMap()[i
]));
4187 debug_only_print0(LC_TMTreeVis
, "\" GLOBALS=\"");
4188 for (unsigned i
= 0; i
< exit
->numGlobalSlots
; i
++)
4189 debug_only_printf(LC_TMTreeVis
, "%c", TypeToChar(exit
->globalTypeMap()[i
]));
4190 debug_only_print0(LC_TMTreeVis
, "\"\n");
4194 JS_REQUIRES_STACK VMSideExit
*
4195 TraceRecorder::snapshot(ExitType exitType
)
4197 JSStackFrame
* const fp
= cx
->fp();
4198 JSFrameRegs
* const regs
= cx
->regs
;
4199 jsbytecode
* pc
= regs
->pc
;
4202 * Check for a return-value opcode that needs to restart at the next
4205 const JSCodeSpec
& cs
= js_CodeSpec
[*pc
];
4208 * When calling a _FAIL native, make the snapshot's pc point to the next
4209 * instruction after the CALL or APPLY. Even on failure, a _FAIL native
4210 * must not be called again from the interpreter.
4212 bool resumeAfter
= (pendingSpecializedNative
&&
4213 JSTN_ERRTYPE(pendingSpecializedNative
) == FAIL_STATUS
);
4215 JS_ASSERT(*pc
== JSOP_CALL
|| *pc
== JSOP_FUNAPPLY
|| *pc
== JSOP_FUNCALL
||
4216 *pc
== JSOP_NEW
|| *pc
== JSOP_SETPROP
|| *pc
== JSOP_SETNAME
);
4219 MUST_FLOW_THROUGH("restore_pc");
4223 * Generate the entry map for the (possibly advanced) pc and stash it in
4226 unsigned stackSlots
= NativeStackSlots(cx
, callDepth
);
4229 * It's sufficient to track the native stack use here since all stores
4230 * above the stack watermark defined by guards are killed.
4232 trackNativeStackUse(stackSlots
+ 1);
4234 /* Capture the type map into a temporary location. */
4235 unsigned ngslots
= tree
->globalSlots
->length();
4236 unsigned typemap_size
= (stackSlots
+ ngslots
) * sizeof(JSValueType
);
4238 /* Use the recorder-local temporary type map. */
4239 JSValueType
* typemap
= NULL
;
4240 if (tempTypeMap
.resize(typemap_size
))
4241 typemap
= tempTypeMap
.begin(); /* crash if resize() fails. */
4244 * Determine the type of a store by looking at the current type of the
4245 * actual value the interpreter is using. For numbers we have to check what
4246 * kind of store we used last (integer or double) to figure out what the
4247 * side exit show reflect in its typemap.
4249 DetermineTypesVisitor
detVisitor(*this, typemap
);
4250 VisitSlots(detVisitor
, cx
, callDepth
, ngslots
,
4251 tree
->globalSlots
->data());
4252 JS_ASSERT(unsigned(detVisitor
.getTypeMap() - typemap
) ==
4253 ngslots
+ stackSlots
);
4256 * If this snapshot is for a side exit that leaves a boxed Value result on
4257 * the stack, make a note of this in the typemap. Examples include the
4258 * builtinStatus guard after calling a _FAIL builtin, a JSFastNative, or
4259 * GetPropertyByName; and the type guard in unbox_value after such a call
4260 * (also at the beginning of a trace branched from such a type guard).
4262 if (pendingUnboxSlot
||
4263 (pendingSpecializedNative
&& (pendingSpecializedNative
->flags
& JSTN_UNBOX_AFTER
))) {
4264 unsigned pos
= stackSlots
- 1;
4265 if (pendingUnboxSlot
== cx
->regs
->sp
- 2)
4266 pos
= stackSlots
- 2;
4267 typemap
[pos
] = JSVAL_TYPE_BOXED
;
4268 } else if (pendingSpecializedNative
&&
4269 (pendingSpecializedNative
->flags
& JSTN_RETURN_NULLABLE_STR
)) {
4270 typemap
[stackSlots
- 1] = JSVAL_TYPE_STRORNULL
;
4271 } else if (pendingSpecializedNative
&&
4272 (pendingSpecializedNative
->flags
& JSTN_RETURN_NULLABLE_OBJ
)) {
4273 typemap
[stackSlots
- 1] = JSVAL_TYPE_OBJORNULL
;
4276 /* Now restore the the original pc (after which early returns are ok). */
4278 MUST_FLOW_LABEL(restore_pc
);
4279 regs
->pc
= pc
- cs
.length
;
4282 * If we take a snapshot on a goto, advance to the target address. This
4283 * avoids inner trees returning on a break goto, which the outer
4284 * recorder then would confuse with a break in the outer tree.
4286 if (*pc
== JSOP_GOTO
)
4287 pc
+= GET_JUMP_OFFSET(pc
);
4288 else if (*pc
== JSOP_GOTOX
)
4289 pc
+= GET_JUMPX_OFFSET(pc
);
4293 * Check if we already have a matching side exit; if so we can return that
4294 * side exit instead of creating a new one.
4296 VMSideExit
** exits
= tree
->sideExits
.data();
4297 unsigned nexits
= tree
->sideExits
.length();
4298 if (exitType
== LOOP_EXIT
) {
4299 for (unsigned n
= 0; n
< nexits
; ++n
) {
4300 VMSideExit
* e
= exits
[n
];
4301 if (e
->pc
== pc
&& (e
->imacpc
== fp
->maybeImacropc()) &&
4302 ngslots
== e
->numGlobalSlots
&&
4303 !memcmp(exits
[n
]->fullTypeMap(), typemap
, typemap_size
)) {
4304 AUDIT(mergedLoopExits
);
4305 #if defined JS_JIT_SPEW
4306 TreevisLogExit(cx
, e
);
4313 /* We couldn't find a matching side exit, so create a new one. */
4314 VMSideExit
* exit
= (VMSideExit
*)
4315 traceAlloc().alloc(sizeof(VMSideExit
) + (stackSlots
+ ngslots
) * sizeof(JSValueType
));
4317 /* Setup side exit structure. */
4318 exit
->from
= fragment
;
4319 exit
->calldepth
= callDepth
;
4320 exit
->numGlobalSlots
= ngslots
;
4321 exit
->numStackSlots
= stackSlots
;
4322 exit
->numStackSlotsBelowCurrentFrame
= cx
->fp()->isFunctionFrame() ?
4323 nativeStackOffset(&cx
->fp()->calleeValue()) / sizeof(double) :
4325 exit
->exitType
= exitType
;
4327 exit
->imacpc
= fp
->maybeImacropc();
4328 exit
->sp_adj
= (stackSlots
* sizeof(double)) - tree
->nativeStackBase
;
4329 exit
->rp_adj
= exit
->calldepth
* sizeof(FrameInfo
*);
4330 exit
->lookupFlags
= js_InferFlags(cx
, 0);
4331 memcpy(exit
->fullTypeMap(), typemap
, typemap_size
);
4333 #if defined JS_JIT_SPEW
4334 TreevisLogExit(cx
, exit
);
4339 JS_REQUIRES_STACK GuardRecord
*
4340 TraceRecorder::createGuardRecord(VMSideExit
* exit
)
4343 // For debug builds, place the guard records in a longer lasting
4344 // pool. This is because the fragment profiler will look at them
4345 // relatively late in the day, after they would have been freed,
4346 // in some cases, had they been allocated in traceAlloc().
4347 GuardRecord
* gr
= new (dataAlloc()) GuardRecord();
4349 // The standard place (for production builds).
4350 GuardRecord
* gr
= new (traceAlloc()) GuardRecord();
4356 // gr->profCount is calloc'd to zero
4358 gr
->profGuardID
= fragment
->guardNumberer
++;
4359 gr
->nextInFrag
= fragment
->guardsForFrag
;
4360 fragment
->guardsForFrag
= gr
;
4366 /* Test if 'ins' is in a form that can be used as a guard/branch condition. */
4370 return ins
->isCmp() || ins
->isImmI(0) || ins
->isImmI(1);
4373 /* Ensure 'ins' is in a form suitable for a guard/branch condition. */
4375 TraceRecorder::ensureCond(LIns
** ins
, bool* cond
)
4377 if (!isCond(*ins
)) {
4379 *ins
= (*ins
)->isI() ? w
.eqi0(*ins
) : w
.eqp0(*ins
);
4384 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4385 * (expected) and using the supplied side exit if the condition doesn't hold.
4387 * Callers shouldn't generate guards that always exit (which can occur due to
4388 * optimization of the guard condition) because it's bad for both compile-time
4389 * speed (all the code generated after the guard is dead) and run-time speed
4390 * (fragment that always exit are slow). This function has two modes for
4391 * handling an always-exit guard; which mode is used depends on the value of
4392 * abortIfAlwaysExits:
4394 * - abortIfAlwaysExits == false: This is the default mode. If the guard
4395 * will always exit, we assert (in debug builds) as a signal that we are
4396 * generating bad traces. (In optimized builds that lack assertions the
4397 * guard will be generated correctly, so the code will be slow but safe.) In
4398 * this mode, the caller is responsible for not generating an always-exit
4399 * guard. The return value will always be RECORD_CONTINUE, so the caller
4400 * need not check it.
4402 * - abortIfAlwaysExits == true: If the guard will always exit, we abort
4403 * recording and return RECORD_STOP; otherwise we generate the guard
4404 * normally and return RECORD_CONTINUE. This mode can be used when the
4405 * caller doesn't know ahead of time whether the guard will always exit. In
4406 * this mode, the caller must check the return value.
4408 JS_REQUIRES_STACK RecordingStatus
4409 TraceRecorder::guard(bool expected
, LIns
* cond
, VMSideExit
* exit
,
4410 bool abortIfAlwaysExits
/* = false */)
4412 if (exit
->exitType
== LOOP_EXIT
)
4413 tree
->sideExits
.add(exit
);
4415 JS_ASSERT(isCond(cond
));
4417 if ((cond
->isImmI(0) && expected
) || (cond
->isImmI(1) && !expected
)) {
4418 if (abortIfAlwaysExits
) {
4419 /* The guard always exits, the caller must check for an abort. */
4420 RETURN_STOP("Constantly false guard detected");
4423 * If you hit this assertion, first decide if you want recording to
4424 * abort in the case where the guard always exits. If not, find a way
4425 * to detect that case and avoid calling guard(). Otherwise, change
4426 * the invocation of guard() so it passes in abortIfAlwaysExits=true,
4427 * and have the caller check the return value, eg. using
4428 * CHECK_STATUS(). (In optimized builds, we'll fall through to the
4429 * insGuard() below and an always-exits guard will be inserted, which
4430 * is correct but sub-optimal.)
4432 JS_NOT_REACHED("unexpected constantly false guard detected");
4436 * Nb: if the guard is never taken, no instruction will be created and
4437 * insGuard() will return NULL. This is a good thing.
4439 GuardRecord
* guardRec
= createGuardRecord(exit
);
4440 expected
? w
.xf(cond
, guardRec
) : w
.xt(cond
, guardRec
);
4441 return RECORD_CONTINUE
;
4445 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4446 * (expected) and generate a side exit with type exitType to jump to if the
4447 * condition does not hold.
4449 JS_REQUIRES_STACK RecordingStatus
4450 TraceRecorder::guard(bool expected
, LIns
* cond
, ExitType exitType
,
4451 bool abortIfAlwaysExits
/* = false */)
4453 return guard(expected
, cond
, snapshot(exitType
), abortIfAlwaysExits
);
4457 * Emit a guard a 32-bit integer arithmetic operation op(d0, d1) and
4458 * using the supplied side exit if it overflows.
4460 JS_REQUIRES_STACK LIns
*
4461 TraceRecorder::guard_xov(LOpcode op
, LIns
* d0
, LIns
* d1
, VMSideExit
* exit
)
4463 JS_ASSERT(exit
->exitType
== OVERFLOW_EXIT
);
4465 GuardRecord
* guardRec
= createGuardRecord(exit
);
4468 return w
.addxovi(d0
, d1
, guardRec
);
4470 return w
.subxovi(d0
, d1
, guardRec
);
4472 return w
.mulxovi(d0
, d1
, guardRec
);
4476 JS_NOT_REACHED("unexpected opcode");
4480 JS_REQUIRES_STACK VMSideExit
*
4481 TraceRecorder::copy(VMSideExit
* copy
)
4483 size_t typemap_size
= copy
->numGlobalSlots
+ copy
->numStackSlots
;
4484 VMSideExit
* exit
= (VMSideExit
*)
4485 traceAlloc().alloc(sizeof(VMSideExit
) + typemap_size
* sizeof(JSValueType
));
4487 /* Copy side exit structure. */
4488 memcpy(exit
, copy
, sizeof(VMSideExit
) + typemap_size
* sizeof(JSValueType
));
4489 exit
->guards
= NULL
;
4490 exit
->from
= fragment
;
4491 exit
->target
= NULL
;
4493 if (exit
->exitType
== LOOP_EXIT
)
4494 tree
->sideExits
.add(exit
);
4495 #if defined JS_JIT_SPEW
4496 TreevisLogExit(cx
, exit
);
4502 * Determine whether any context associated with the same thread as cx is
4503 * executing native code.
4506 ProhibitFlush(TraceMonitor
*tm
)
4508 return !!tm
->tracerState
; // don't flush if we're running a trace
4512 ResetJITImpl(JSContext
* cx
)
4514 if (!cx
->traceJitEnabled
)
4516 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
4517 debug_only_print0(LC_TMTracer
, "Flushing cache.\n");
4519 JS_ASSERT_NOT_ON_TRACE(cx
);
4520 AbortRecording(cx
, "flush cache");
4526 if (ProhibitFlush(tm
)) {
4527 debug_only_print0(LC_TMTracer
, "Deferring JIT flush due to deep bail.\n");
4528 tm
->needFlush
= JS_TRUE
;
4534 /* Compile the current fragment. */
4535 JS_REQUIRES_STACK AbortableRecordingStatus
4536 TraceRecorder::compile()
4539 TraceVisStateObj
tvso(cx
, S_COMPILE
);
4542 if (traceMonitor
->needFlush
) {
4543 ResetJIT(cx
, FR_DEEP_BAIL
);
4544 return ARECORD_ABORTED
;
4546 if (tree
->maxNativeStackSlots
>= MAX_NATIVE_STACK_SLOTS
) {
4547 debug_only_print0(LC_TMTracer
, "Blacklist: excessive stack use.\n");
4548 Blacklist((jsbytecode
*)tree
->ip
);
4549 return ARECORD_STOP
;
4551 if (anchor
&& anchor
->exitType
!= CASE_EXIT
)
4552 ++tree
->branchCount
;
4554 return ARECORD_STOP
;
4556 /* :TODO: windows support */
4557 #if defined DEBUG && !defined WIN32
4558 /* Associate a filename and line number with the fragment. */
4559 const char* filename
= cx
->fp()->script()->filename
;
4560 char* label
= (char*)js_malloc((filename
? strlen(filename
) : 7) + 16);
4562 sprintf(label
, "%s:%u", filename
? filename
: "<stdin>",
4563 js_FramePCToLineNumber(cx
, cx
->fp()));
4564 lirbuf
->printer
->addrNameMap
->addAddrRange(fragment
, sizeof(Fragment
), 0, label
);
4569 Assembler
*assm
= traceMonitor
->assembler
;
4570 JS_ASSERT(!assm
->error());
4571 assm
->compile(fragment
, tempAlloc(), /*optimize*/true verbose_only(, lirbuf
->printer
));
4573 if (assm
->error()) {
4574 assm
->setError(nanojit::None
);
4575 debug_only_print0(LC_TMTracer
, "Blacklisted: error during compilation\n");
4576 Blacklist((jsbytecode
*)tree
->ip
);
4577 return ARECORD_STOP
;
4581 return ARECORD_STOP
;
4582 ResetRecordingAttempts(cx
, (jsbytecode
*)fragment
->ip
);
4583 ResetRecordingAttempts(cx
, (jsbytecode
*)tree
->ip
);
4586 if (anchor
->exitType
== CASE_EXIT
)
4587 assm
->patch(anchor
, anchor
->switchInfo
);
4590 assm
->patch(anchor
);
4592 JS_ASSERT(fragment
->code());
4593 JS_ASSERT_IF(fragment
== fragment
->root
, fragment
->root
== tree
);
4595 return ARECORD_CONTINUE
;
4599 JoinPeers(Assembler
* assm
, VMSideExit
* exit
, TreeFragment
* target
)
4601 exit
->target
= target
;
4604 debug_only_printf(LC_TMTreeVis
, "TREEVIS JOIN ANCHOR=%p FRAG=%p\n", (void*)exit
, (void*)target
);
4606 if (exit
->root() == target
)
4609 target
->dependentTrees
.addUnique(exit
->root());
4610 exit
->root()->linkedTrees
.addUnique(target
);
4613 /* Results of trying to connect an arbitrary type A with arbitrary type B */
4614 enum TypeCheckResult
4616 TypeCheck_Okay
, /* Okay: same type */
4617 TypeCheck_Promote
, /* Okay: Type A needs d2i() */
4618 TypeCheck_Demote
, /* Okay: Type A needs i2d() */
4619 TypeCheck_Undemote
, /* Bad: Slot is undemotable */
4620 TypeCheck_Bad
/* Bad: incompatible types */
4623 class SlotMap
: public SlotVisitorBase
4629 : vp(NULL
), isPromotedInt32(false), lastCheck(TypeCheck_Bad
)
4631 SlotInfo(Value
* vp
, bool isPromotedInt32
)
4632 : vp(vp
), isPromotedInt32(isPromotedInt32
), lastCheck(TypeCheck_Bad
),
4633 type(getCoercedType(*vp
))
4635 SlotInfo(JSValueType t
)
4636 : vp(NULL
), isPromotedInt32(false), lastCheck(TypeCheck_Bad
), type(t
)
4638 SlotInfo(Value
* vp
, JSValueType t
)
4639 : vp(vp
), isPromotedInt32(t
== JSVAL_TYPE_INT32
), lastCheck(TypeCheck_Bad
), type(t
)
4642 bool isPromotedInt32
;
4643 TypeCheckResult lastCheck
;
4647 SlotMap(TraceRecorder
& rec
)
4658 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4659 visitGlobalSlot(Value
*vp
, unsigned n
, unsigned slot
)
4664 JS_ALWAYS_INLINE
SlotMap::SlotInfo
&
4665 operator [](unsigned i
)
4670 JS_ALWAYS_INLINE
SlotMap::SlotInfo
&
4676 JS_ALWAYS_INLINE
unsigned
4679 return slots
.length();
4683 * Possible return states:
4685 * TypeConsensus_Okay: All types are compatible. Caller must go through slot list and handle
4687 * TypeConsensus_Bad: Types are not compatible. Individual type check results are undefined.
4688 * TypeConsensus_Undemotes: Types would be compatible if slots were marked as undemotable
4689 * before recording began. Caller can go through slot list and mark
4690 * such slots as undemotable.
4692 JS_REQUIRES_STACK TypeConsensus
4693 checkTypes(LinkableFragment
* f
)
4695 if (length() != f
->typeMap
.length())
4696 return TypeConsensus_Bad
;
4698 bool has_undemotes
= false;
4699 for (unsigned i
= 0; i
< length(); i
++) {
4700 TypeCheckResult result
= checkType(i
, f
->typeMap
[i
]);
4701 if (result
== TypeCheck_Bad
)
4702 return TypeConsensus_Bad
;
4703 if (result
== TypeCheck_Undemote
)
4704 has_undemotes
= true;
4705 slots
[i
].lastCheck
= result
;
4708 return TypeConsensus_Undemotes
;
4709 return TypeConsensus_Okay
;
4712 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4715 bool isPromotedInt32
= false;
4716 if (vp
->isNumber()) {
4717 if (LIns
* i
= mRecorder
.getFromTracker(vp
)) {
4718 isPromotedInt32
= IsPromotedInt32(i
);
4719 } else if (mRecorder
.isGlobal(vp
)) {
4720 int offset
= mRecorder
.tree
->globalSlots
->offsetOf(uint16(mRecorder
.nativeGlobalSlot(vp
)));
4721 JS_ASSERT(offset
!= -1);
4722 isPromotedInt32
= mRecorder
.importTypeMap
[mRecorder
.importStackSlots
+ offset
] ==
4725 isPromotedInt32
= mRecorder
.importTypeMap
[mRecorder
.nativeStackSlot(vp
)] ==
4729 slots
.add(SlotInfo(vp
, isPromotedInt32
));
4732 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4733 addSlot(JSValueType t
)
4735 slots
.add(SlotInfo(NULL
, t
));
4738 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4739 addSlot(Value
*vp
, JSValueType t
)
4741 slots
.add(SlotInfo(vp
, t
));
4744 JS_REQUIRES_STACK
void
4747 for (unsigned i
= 0; i
< length(); i
++) {
4748 if (get(i
).lastCheck
== TypeCheck_Undemote
)
4749 mRecorder
.markSlotUndemotable(mRecorder
.tree
, i
);
4753 JS_REQUIRES_STACK
virtual void
4756 for (unsigned i
= 0; i
< length(); i
++)
4761 JS_REQUIRES_STACK
virtual void
4762 adjustType(SlotInfo
& info
) {
4763 JS_ASSERT(info
.lastCheck
!= TypeCheck_Undemote
&& info
.lastCheck
!= TypeCheck_Bad
);
4765 if (info
.lastCheck
== TypeCheck_Promote
) {
4766 JS_ASSERT(info
.type
== JSVAL_TYPE_INT32
|| info
.type
== JSVAL_TYPE_DOUBLE
);
4768 * This should only happen if the slot has a trivial conversion, i.e.
4769 * IsPromotedInt32() is true. We check this.
4771 * Note that getFromTracker() will return NULL if the slot was
4772 * never used, in which case we don't do the check. We could
4773 * instead called mRecorder.get(info.vp) and always check, but
4774 * get() has side-effects, which is not good in an assertion.
4775 * Not checking unused slots isn't so bad.
4777 LIns
* ins
= mRecorder
.getFromTrackerImpl(info
.vp
);
4778 JS_ASSERT_IF(ins
, IsPromotedInt32(ins
));
4781 if (info
.lastCheck
== TypeCheck_Demote
) {
4782 JS_ASSERT(info
.type
== JSVAL_TYPE_INT32
|| info
.type
== JSVAL_TYPE_DOUBLE
);
4783 JS_ASSERT(mRecorder
.getImpl(info
.vp
)->isD());
4785 /* Never demote this final i2d. */
4786 mRecorder
.setImpl(info
.vp
, mRecorder
.getImpl(info
.vp
), false);
4792 checkType(unsigned i
, JSValueType t
)
4794 debug_only_printf(LC_TMTracer
,
4795 "checkType slot %d: interp=%c typemap=%c isNum=%d isPromotedInt32=%d\n",
4797 TypeToChar(slots
[i
].type
),
4799 slots
[i
].type
== JSVAL_TYPE_INT32
|| slots
[i
].type
== JSVAL_TYPE_DOUBLE
,
4800 slots
[i
].isPromotedInt32
);
4802 case JSVAL_TYPE_INT32
:
4803 if (slots
[i
].type
!= JSVAL_TYPE_INT32
&& slots
[i
].type
!= JSVAL_TYPE_DOUBLE
)
4804 return TypeCheck_Bad
; /* Not a number? Type mismatch. */
4805 /* This is always a type mismatch, we can't close a double to an int. */
4806 if (!slots
[i
].isPromotedInt32
)
4807 return TypeCheck_Undemote
;
4808 /* Looks good, slot is an int32, the last instruction should be promotable. */
4809 JS_ASSERT_IF(slots
[i
].vp
,
4810 hasInt32Repr(*(const Value
*)slots
[i
].vp
) && slots
[i
].isPromotedInt32
);
4811 return slots
[i
].vp
? TypeCheck_Promote
: TypeCheck_Okay
;
4812 case JSVAL_TYPE_DOUBLE
:
4813 if (slots
[i
].type
!= JSVAL_TYPE_INT32
&& slots
[i
].type
!= JSVAL_TYPE_DOUBLE
)
4814 return TypeCheck_Bad
; /* Not a number? Type mismatch. */
4815 if (slots
[i
].isPromotedInt32
)
4816 return slots
[i
].vp
? TypeCheck_Demote
: TypeCheck_Bad
;
4817 return TypeCheck_Okay
;
4819 return slots
[i
].type
== t
? TypeCheck_Okay
: TypeCheck_Bad
;
4821 JS_NOT_REACHED("shouldn't fall through type check switch");
4824 TraceRecorder
& mRecorder
;
4826 Queue
<SlotInfo
> slots
;
4829 class DefaultSlotMap
: public SlotMap
4832 DefaultSlotMap(TraceRecorder
& tr
) : SlotMap(tr
)
4836 virtual ~DefaultSlotMap()
4840 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
4841 visitStackSlots(Value
*vp
, size_t count
, JSStackFrame
* fp
)
4843 for (size_t i
= 0; i
< count
; i
++)
4848 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
4849 visitFrameObjPtr(void* p
, JSStackFrame
* fp
)
4851 addSlot(getFrameObjPtrTraceType(p
, fp
));
4856 JS_REQUIRES_STACK TypeConsensus
4857 TraceRecorder::selfTypeStability(SlotMap
& slotMap
)
4859 debug_only_printf(LC_TMTracer
, "Checking type stability against self=%p\n", (void*)fragment
);
4860 TypeConsensus consensus
= slotMap
.checkTypes(tree
);
4862 /* Best case: loop jumps back to its own header */
4863 if (consensus
== TypeConsensus_Okay
)
4864 return TypeConsensus_Okay
;
4867 * If the only thing keeping this loop from being stable is undemotions, then mark relevant
4868 * slots as undemotable.
4870 if (consensus
== TypeConsensus_Undemotes
)
4871 slotMap
.markUndemotes();
4876 JS_REQUIRES_STACK TypeConsensus
4877 TraceRecorder::peerTypeStability(SlotMap
& slotMap
, const void* ip
, TreeFragment
** pPeer
)
4879 JS_ASSERT(tree
->first
== LookupLoop(traceMonitor
, ip
, tree
->globalObj
, tree
->globalShape
, tree
->argc
));
4881 /* See if there are any peers that would make this stable */
4882 bool onlyUndemotes
= false;
4883 for (TreeFragment
*peer
= tree
->first
; peer
!= NULL
; peer
= peer
->peer
) {
4884 if (!peer
->code() || peer
== fragment
)
4886 debug_only_printf(LC_TMTracer
, "Checking type stability against peer=%p\n", (void*)peer
);
4887 TypeConsensus consensus
= slotMap
.checkTypes(peer
);
4888 if (consensus
== TypeConsensus_Okay
) {
4891 * Return this even though there will be linkage; the trace itself is not stable.
4892 * Caller should inspect ppeer to check for a compatible peer.
4894 return TypeConsensus_Okay
;
4896 if (consensus
== TypeConsensus_Undemotes
)
4897 onlyUndemotes
= true;
4900 return onlyUndemotes
? TypeConsensus_Undemotes
: TypeConsensus_Bad
;
4904 * Complete and compile a trace and link it to the existing tree if
4905 * appropriate. Returns ARECORD_ABORTED or ARECORD_STOP, depending on whether
4906 * the recorder was deleted. Outparam is always set.
4908 JS_REQUIRES_STACK AbortableRecordingStatus
4909 TraceRecorder::closeLoop()
4911 VMSideExit
*exit
= snapshot(UNSTABLE_LOOP_EXIT
);
4913 DefaultSlotMap
slotMap(*this);
4914 VisitSlots(slotMap
, cx
, 0, *tree
->globalSlots
);
4917 * We should have arrived back at the loop header, and hence we don't want
4918 * to be in an imacro here and the opcode should be either JSOP_TRACE or, in
4919 * case this loop was blacklisted in the meantime, JSOP_NOTRACE.
4921 JS_ASSERT(*cx
->regs
->pc
== JSOP_TRACE
|| *cx
->regs
->pc
== JSOP_NOTRACE
);
4922 JS_ASSERT(!cx
->fp()->hasImacropc());
4924 if (callDepth
!= 0) {
4925 debug_only_print0(LC_TMTracer
,
4926 "Blacklisted: stack depth mismatch, possible recursion.\n");
4927 Blacklist((jsbytecode
*)tree
->ip
);
4929 return ARECORD_STOP
;
4932 JS_ASSERT(exit
->numStackSlots
== tree
->nStackTypes
);
4933 JS_ASSERT(fragment
->root
== tree
);
4934 JS_ASSERT(!trashSelf
);
4936 TreeFragment
* peer
= NULL
;
4938 TypeConsensus consensus
= selfTypeStability(slotMap
);
4939 if (consensus
!= TypeConsensus_Okay
) {
4940 TypeConsensus peerConsensus
= peerTypeStability(slotMap
, tree
->ip
, &peer
);
4941 /* If there was a semblance of a stable peer (even if not linkable), keep the result. */
4942 if (peerConsensus
!= TypeConsensus_Bad
)
4943 consensus
= peerConsensus
;
4947 if (consensus
!= TypeConsensus_Okay
|| peer
)
4948 AUDIT(unstableLoopVariable
);
4952 * This exit is indeed linkable to something now. Process any promote or
4953 * demotes that are pending in the slot map.
4955 if (consensus
== TypeConsensus_Okay
)
4956 slotMap
.adjustTypes();
4958 if (consensus
!= TypeConsensus_Okay
|| peer
) {
4959 fragment
->lastIns
= w
.x(createGuardRecord(exit
));
4961 /* If there is a peer, there must have been an "Okay" consensus. */
4962 JS_ASSERT_IF(peer
, consensus
== TypeConsensus_Okay
);
4964 /* Compile as a type-unstable loop, and hope for a connection later. */
4967 * If such a fragment does not exist, let's compile the loop ahead
4968 * of time anyway. Later, if the loop becomes type stable, we will
4969 * connect these two fragments together.
4971 debug_only_print0(LC_TMTracer
,
4972 "Trace has unstable loop variable with no stable peer, "
4973 "compiling anyway.\n");
4974 UnstableExit
* uexit
= new (traceAlloc()) UnstableExit
;
4975 uexit
->fragment
= fragment
;
4977 uexit
->next
= tree
->unstableExits
;
4978 tree
->unstableExits
= uexit
;
4980 JS_ASSERT(peer
->code());
4981 exit
->target
= peer
;
4982 debug_only_printf(LC_TMTracer
,
4983 "Joining type-unstable trace to target fragment %p.\n",
4985 peer
->dependentTrees
.addUnique(tree
);
4986 tree
->linkedTrees
.addUnique(peer
);
4989 exit
->exitType
= LOOP_EXIT
;
4990 debug_only_printf(LC_TMTreeVis
, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit
,
4991 getExitName(LOOP_EXIT
));
4993 JS_ASSERT((fragment
== fragment
->root
) == !!loopLabel
);
4996 w
.comment("end-loop");
4997 w
.livep(lirbuf
->state
);
5000 exit
->target
= tree
;
5002 * This guard is dead code. However, it must be present because it
5003 * can keep alive values on the stack. Without it, StackFilter can
5004 * remove some stack stores that it shouldn't. See bug 582766 comment
5007 fragment
->lastIns
= w
.x(createGuardRecord(exit
));
5010 CHECK_STATUS_A(compile());
5012 debug_only_printf(LC_TMTreeVis
, "TREEVIS CLOSELOOP EXIT=%p PEER=%p\n", (void*)exit
, (void*)peer
);
5014 JS_ASSERT(LookupLoop(traceMonitor
, tree
->ip
, tree
->globalObj
, tree
->globalShape
, tree
->argc
) ==
5016 JS_ASSERT(tree
->first
);
5019 joinEdgesToEntry(peer
);
5021 debug_only_stmt(DumpPeerStability(traceMonitor
, peer
->ip
, peer
->globalObj
,
5022 peer
->globalShape
, peer
->argc
);)
5024 debug_only_print0(LC_TMTracer
,
5025 "updating specializations on dependent and linked trees\n");
5027 SpecializeTreesToMissingGlobals(cx
, globalObj
, tree
);
5030 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
5031 * should try to compile the outer tree again.
5034 AttemptCompilation(cx
, globalObj
, outerScript
, outerPC
, outerArgc
);
5036 debug_only_printf(LC_TMMinimal
,
5037 "Recording completed at %s:%u@%u via closeLoop (FragID=%06u)\n",
5038 cx
->fp()->script()->filename
,
5039 js_FramePCToLineNumber(cx
, cx
->fp()),
5040 FramePCOffset(cx
, cx
->fp()),
5041 fragment
->profFragID
);
5042 debug_only_print0(LC_TMMinimal
, "\n");
5045 return finishSuccessfully();
5049 FullMapFromExit(TypeMap
& typeMap
, VMSideExit
* exit
)
5051 typeMap
.setLength(0);
5052 typeMap
.fromRaw(exit
->stackTypeMap(), exit
->numStackSlots
);
5053 typeMap
.fromRaw(exit
->globalTypeMap(), exit
->numGlobalSlots
);
5054 /* Include globals that were later specialized at the root of the tree. */
5055 if (exit
->numGlobalSlots
< exit
->root()->nGlobalTypes()) {
5056 typeMap
.fromRaw(exit
->root()->globalTypeMap() + exit
->numGlobalSlots
,
5057 exit
->root()->nGlobalTypes() - exit
->numGlobalSlots
);
5061 static JS_REQUIRES_STACK TypeConsensus
5062 TypeMapLinkability(JSContext
* cx
, const TypeMap
& typeMap
, TreeFragment
* peer
)
5064 const TypeMap
& peerMap
= peer
->typeMap
;
5065 unsigned minSlots
= JS_MIN(typeMap
.length(), peerMap
.length());
5066 TypeConsensus consensus
= TypeConsensus_Okay
;
5067 for (unsigned i
= 0; i
< minSlots
; i
++) {
5068 if (typeMap
[i
] == peerMap
[i
])
5070 if (typeMap
[i
] == JSVAL_TYPE_INT32
&& peerMap
[i
] == JSVAL_TYPE_DOUBLE
&&
5071 IsSlotUndemotable(JS_TRACE_MONITOR(cx
).oracle
, cx
, peer
, i
, peer
->ip
)) {
5072 consensus
= TypeConsensus_Undemotes
;
5074 return TypeConsensus_Bad
;
5080 JS_REQUIRES_STACK
unsigned
5081 TraceRecorder::findUndemotesInTypemaps(const TypeMap
& typeMap
, LinkableFragment
* f
,
5082 Queue
<unsigned>& undemotes
)
5084 undemotes
.setLength(0);
5085 unsigned minSlots
= JS_MIN(typeMap
.length(), f
->typeMap
.length());
5086 for (unsigned i
= 0; i
< minSlots
; i
++) {
5087 if (typeMap
[i
] == JSVAL_TYPE_INT32
&& f
->typeMap
[i
] == JSVAL_TYPE_DOUBLE
) {
5089 } else if (typeMap
[i
] != f
->typeMap
[i
]) {
5093 for (unsigned i
= 0; i
< undemotes
.length(); i
++)
5094 markSlotUndemotable(f
, undemotes
[i
]);
5095 return undemotes
.length();
5098 JS_REQUIRES_STACK
void
5099 TraceRecorder::joinEdgesToEntry(TreeFragment
* peer_root
)
5101 if (fragment
->root
!= fragment
)
5104 TypeMap
typeMap(NULL
);
5105 Queue
<unsigned> undemotes(NULL
);
5107 for (TreeFragment
* peer
= peer_root
; peer
; peer
= peer
->peer
) {
5110 UnstableExit
* uexit
= peer
->unstableExits
;
5111 while (uexit
!= NULL
) {
5112 /* Build the full typemap for this unstable exit */
5113 FullMapFromExit(typeMap
, uexit
->exit
);
5114 /* Check its compatibility against this tree */
5115 TypeConsensus consensus
= TypeMapLinkability(cx
, typeMap
, tree
);
5116 JS_ASSERT_IF(consensus
== TypeConsensus_Okay
, peer
!= fragment
);
5117 if (consensus
== TypeConsensus_Okay
) {
5118 debug_only_printf(LC_TMTracer
,
5119 "Joining type-stable trace to target exit %p->%p.\n",
5120 (void*)uexit
->fragment
, (void*)uexit
->exit
);
5123 * See bug 531513. Before linking these trees, make sure the
5124 * peer's dependency graph is up to date.
5126 TreeFragment
* from
= uexit
->exit
->root();
5127 if (from
->nGlobalTypes() < tree
->nGlobalTypes()) {
5128 SpecializeTreesToLateGlobals(cx
, from
, tree
->globalTypeMap(),
5129 tree
->nGlobalTypes());
5132 /* It's okay! Link together and remove the unstable exit. */
5133 JS_ASSERT(tree
== fragment
);
5134 JoinPeers(traceMonitor
->assembler
, uexit
->exit
, tree
);
5135 uexit
= peer
->removeUnstableExit(uexit
->exit
);
5137 /* Check for int32->double slots that suggest trashing. */
5138 if (findUndemotesInTypemaps(typeMap
, tree
, undemotes
)) {
5139 JS_ASSERT(peer
== uexit
->fragment
->root
);
5140 if (fragment
== peer
)
5143 whichTreesToTrash
.addUnique(uexit
->fragment
->root
);
5146 uexit
= uexit
->next
;
5152 JS_REQUIRES_STACK AbortableRecordingStatus
5153 TraceRecorder::endLoop()
5155 return endLoop(snapshot(LOOP_EXIT
));
5158 /* Emit an always-exit guard and compile the tree (used for break statements. */
5159 JS_REQUIRES_STACK AbortableRecordingStatus
5160 TraceRecorder::endLoop(VMSideExit
* exit
)
5162 JS_ASSERT(fragment
->root
== tree
);
5164 if (callDepth
!= 0) {
5165 debug_only_print0(LC_TMTracer
, "Blacklisted: stack depth mismatch, possible recursion.\n");
5166 Blacklist((jsbytecode
*)tree
->ip
);
5168 return ARECORD_STOP
;
5171 fragment
->lastIns
= w
.x(createGuardRecord(exit
));
5173 CHECK_STATUS_A(compile());
5175 debug_only_printf(LC_TMTreeVis
, "TREEVIS ENDLOOP EXIT=%p\n", (void*)exit
);
5177 JS_ASSERT(LookupLoop(traceMonitor
, tree
->ip
, tree
->globalObj
, tree
->globalShape
, tree
->argc
) ==
5180 joinEdgesToEntry(tree
->first
);
5182 debug_only_stmt(DumpPeerStability(traceMonitor
, tree
->ip
, tree
->globalObj
,
5183 tree
->globalShape
, tree
->argc
);)
5186 * Note: this must always be done, in case we added new globals on trace
5187 * and haven't yet propagated those to linked and dependent trees.
5189 debug_only_print0(LC_TMTracer
,
5190 "updating specializations on dependent and linked trees\n");
5192 SpecializeTreesToMissingGlobals(cx
, globalObj
, fragment
->root
);
5195 * If this is a newly formed tree, and the outer tree has not been compiled
5196 * yet, we should try to compile the outer tree again.
5199 AttemptCompilation(cx
, globalObj
, outerScript
, outerPC
, outerArgc
);
5201 debug_only_printf(LC_TMMinimal
,
5202 "Recording completed at %s:%u@%u via endLoop (FragID=%06u)\n",
5203 cx
->fp()->script()->filename
,
5204 js_FramePCToLineNumber(cx
, cx
->fp()),
5205 FramePCOffset(cx
, cx
->fp()),
5206 fragment
->profFragID
);
5207 debug_only_print0(LC_TMTracer
, "\n");
5210 return finishSuccessfully();
5213 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
5214 JS_REQUIRES_STACK
void
5215 TraceRecorder::prepareTreeCall(TreeFragment
* inner
)
5217 VMSideExit
* exit
= snapshot(OOM_EXIT
);
5220 * The inner tree expects to be called from the current frame. If the outer
5221 * tree (this trace) is currently inside a function inlining code
5222 * (calldepth > 0), we have to advance the native stack pointer such that
5223 * we match what the inner trace expects to see. We move it back when we
5224 * come out of the inner tree call.
5226 if (callDepth
> 0) {
5228 * Calculate the amount we have to lift the native stack pointer by to
5229 * compensate for any outer frames that the inner tree doesn't expect
5230 * but the outer tree has.
5232 ptrdiff_t sp_adj
= nativeStackOffset(&cx
->fp()->calleeValue());
5234 /* Calculate the amount we have to lift the call stack by. */
5235 ptrdiff_t rp_adj
= callDepth
* sizeof(FrameInfo
*);
5238 * Guard that we have enough stack space for the tree we are trying to
5239 * call on top of the new value for sp.
5241 debug_only_printf(LC_TMTracer
,
5242 "sp_adj=%lld outer=%lld inner=%lld\n",
5243 (long long int)sp_adj
,
5244 (long long int)tree
->nativeStackBase
,
5245 (long long int)inner
->nativeStackBase
);
5246 ptrdiff_t sp_offset
=
5247 - tree
->nativeStackBase
/* rebase sp to beginning of outer tree's stack */
5248 + sp_adj
/* adjust for stack in outer frame inner tree can't see */
5249 + inner
->maxNativeStackSlots
* sizeof(double); /* plus the inner tree's stack */
5250 LIns
* sp_top
= w
.addp(lirbuf
->sp
, w
.nameImmw(sp_offset
));
5251 guard(true, w
.ltp(sp_top
, eos_ins
), exit
);
5253 /* Guard that we have enough call stack space. */
5254 ptrdiff_t rp_offset
= rp_adj
+ inner
->maxCallDepth
* sizeof(FrameInfo
*);
5255 LIns
* rp_top
= w
.addp(lirbuf
->rp
, w
.nameImmw(rp_offset
));
5256 guard(true, w
.ltp(rp_top
, eor_ins
), exit
);
5259 - tree
->nativeStackBase
/* rebase sp to beginning of outer tree's stack */
5260 + sp_adj
/* adjust for stack in outer frame inner tree can't see */
5261 + inner
->nativeStackBase
; /* plus the inner tree's stack base */
5262 /* We have enough space, so adjust sp and rp to their new level. */
5263 w
.stStateField(w
.addp(lirbuf
->sp
, w
.nameImmw(sp_offset
)), sp
);
5264 w
.stStateField(w
.addp(lirbuf
->rp
, w
.nameImmw(rp_adj
)), rp
);
5268 * The inner tree will probably access stack slots. So tell nanojit not to
5269 * discard or defer stack writes before emitting the call tree code.
5271 * (The ExitType of this snapshot is nugatory. The exit can't be taken.)
5273 w
.xbarrier(createGuardRecord(exit
));
5276 class ClearSlotsVisitor
: public SlotVisitorBase
5280 ClearSlotsVisitor(Tracker
&tracker
)
5284 JS_ALWAYS_INLINE
bool
5285 visitStackSlots(Value
*vp
, size_t count
, JSStackFrame
*) {
5286 for (Value
*vpend
= vp
+ count
; vp
!= vpend
; ++vp
)
5287 tracker
.set(vp
, NULL
);
5291 JS_ALWAYS_INLINE
bool
5292 visitFrameObjPtr(void *p
, JSStackFrame
*) {
5293 tracker
.set(p
, NULL
);
5299 BuildGlobalTypeMapFromInnerTree(Queue
<JSValueType
>& typeMap
, VMSideExit
* inner
)
5302 unsigned initialSlots
= typeMap
.length();
5304 /* First, use the innermost exit's global typemap. */
5305 typeMap
.add(inner
->globalTypeMap(), inner
->numGlobalSlots
);
5307 /* Add missing global types from the innermost exit's tree. */
5308 TreeFragment
* innerFrag
= inner
->root();
5309 unsigned slots
= inner
->numGlobalSlots
;
5310 if (slots
< innerFrag
->nGlobalTypes()) {
5311 typeMap
.add(innerFrag
->globalTypeMap() + slots
, innerFrag
->nGlobalTypes() - slots
);
5312 slots
= innerFrag
->nGlobalTypes();
5314 JS_ASSERT(typeMap
.length() - initialSlots
== slots
);
5318 /* Record a call to an inner tree. */
5319 JS_REQUIRES_STACK
void
5320 TraceRecorder::emitTreeCall(TreeFragment
* inner
, VMSideExit
* exit
)
5322 /* Invoke the inner tree. */
5323 LIns
* args
[] = { lirbuf
->state
}; /* reverse order */
5324 /* Construct a call info structure for the target tree. */
5325 CallInfo
* ci
= new (traceAlloc()) CallInfo();
5326 ci
->_address
= uintptr_t(inner
->code());
5327 JS_ASSERT(ci
->_address
);
5328 ci
->_typesig
= CallInfo::typeSig1(ARGTYPE_P
, ARGTYPE_P
);
5330 ci
->_storeAccSet
= ACCSET_STORE_ANY
;
5331 ci
->_abi
= ABI_FASTCALL
;
5333 ci
->_name
= "fragment";
5335 LIns
* rec
= w
.call(ci
, args
);
5336 LIns
* lr
= w
.ldpGuardRecordExit(rec
);
5337 LIns
* nested
= w
.jtUnoptimizable(w
.eqiN(w
.ldiVMSideExitField(lr
, exitType
), NESTED_EXIT
));
5340 * If the tree exits on a regular (non-nested) guard, keep updating lastTreeExitGuard
5341 * with that guard. If we mismatch on a tree call guard, this will contain the last
5342 * non-nested guard we encountered, which is the innermost loop or branch guard.
5344 w
.stStateField(lr
, lastTreeExitGuard
);
5345 LIns
* done1
= w
.j(NULL
);
5348 * The tree exited on a nested guard. This only occurs once a tree call guard mismatches
5349 * and we unwind the tree call stack. We store the first (innermost) tree call guard in state
5350 * and we will try to grow the outer tree the failing call was in starting at that guard.
5353 LIns
* done2
= w
.jfUnoptimizable(w
.eqp0(w
.ldpStateField(lastTreeCallGuard
)));
5354 w
.stStateField(lr
, lastTreeCallGuard
);
5355 w
.stStateField(w
.addp(w
.ldpStateField(rp
),
5356 w
.i2p(w
.lshiN(w
.ldiVMSideExitField(lr
, calldepth
),
5357 sizeof(void*) == 4 ? 2 : 3))),
5359 w
.label(done1
, done2
);
5362 * Keep updating outermostTreeExit so that TracerState always contains the most recent
5365 w
.stStateField(lr
, outermostTreeExitGuard
);
5367 /* Read back all registers, in case the called tree changed any of them. */
5371 map
= exit
->globalTypeMap();
5372 for (i
= 0; i
< exit
->numGlobalSlots
; i
++)
5373 JS_ASSERT(map
[i
] != JSVAL_TYPE_BOXED
);
5374 map
= exit
->stackTypeMap();
5375 for (i
= 0; i
< exit
->numStackSlots
; i
++)
5376 JS_ASSERT(map
[i
] != JSVAL_TYPE_BOXED
);
5379 /* The inner tree may modify currently-tracked upvars, so flush everything. */
5380 ClearSlotsVisitor
visitor(tracker
);
5381 VisitStackSlots(visitor
, cx
, callDepth
);
5382 SlotList
& gslots
= *tree
->globalSlots
;
5383 for (unsigned i
= 0; i
< gslots
.length(); i
++) {
5384 unsigned slot
= gslots
[i
];
5385 Value
* vp
= &globalObj
->getSlotRef(slot
);
5386 tracker
.set(vp
, NULL
);
5389 /* Set stack slots from the innermost frame. */
5390 importTypeMap
.setLength(NativeStackSlots(cx
, callDepth
));
5391 unsigned startOfInnerFrame
= importTypeMap
.length() - exit
->numStackSlots
;
5392 for (unsigned i
= 0; i
< exit
->numStackSlots
; i
++)
5393 importTypeMap
[startOfInnerFrame
+ i
] = exit
->stackTypeMap()[i
];
5394 importStackSlots
= importTypeMap
.length();
5395 JS_ASSERT(importStackSlots
== NativeStackSlots(cx
, callDepth
));
5398 * Bug 502604 - It is illegal to extend from the outer typemap without
5399 * first extending from the inner. Make a new typemap here.
5401 BuildGlobalTypeMapFromInnerTree(importTypeMap
, exit
);
5403 importGlobalSlots
= importTypeMap
.length() - importStackSlots
;
5404 JS_ASSERT(importGlobalSlots
== tree
->globalSlots
->length());
5406 /* Restore sp and rp to their original values (we still have them in a register). */
5407 if (callDepth
> 0) {
5408 w
.stStateField(lirbuf
->sp
, sp
);
5409 w
.stStateField(lirbuf
->rp
, rp
);
5413 * Guard that we come out of the inner tree along the same side exit we came out when
5414 * we called the inner tree at recording time.
5416 VMSideExit
* nestedExit
= snapshot(NESTED_EXIT
);
5417 JS_ASSERT(exit
->exitType
== LOOP_EXIT
);
5418 guard(true, w
.eqp(lr
, w
.nameImmpNonGC(exit
)), nestedExit
);
5419 debug_only_printf(LC_TMTreeVis
, "TREEVIS TREECALL INNER=%p EXIT=%p GUARD=%p\n", (void*)inner
,
5420 (void*)nestedExit
, (void*)exit
);
5422 /* Register us as a dependent tree of the inner tree. */
5423 inner
->dependentTrees
.addUnique(fragment
->root
);
5424 tree
->linkedTrees
.addUnique(inner
);
5427 /* Add a if/if-else control-flow merge point to the list of known merge points. */
5428 JS_REQUIRES_STACK
void
5429 TraceRecorder::trackCfgMerges(jsbytecode
* pc
)
5431 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
5432 JS_ASSERT((*pc
== JSOP_IFEQ
) || (*pc
== JSOP_IFEQX
));
5433 jssrcnote
* sn
= js_GetSrcNote(cx
->fp()->script(), pc
);
5435 if (SN_TYPE(sn
) == SRC_IF
) {
5436 cfgMerges
.add((*pc
== JSOP_IFEQ
)
5437 ? pc
+ GET_JUMP_OFFSET(pc
)
5438 : pc
+ GET_JUMPX_OFFSET(pc
));
5439 } else if (SN_TYPE(sn
) == SRC_IF_ELSE
)
5440 cfgMerges
.add(pc
+ js_GetSrcNoteOffset(sn
, 0));
5445 * Invert the direction of the guard if this is a loop edge that is not
5446 * taken (thin loop).
5448 JS_REQUIRES_STACK
void
5449 TraceRecorder::emitIf(jsbytecode
* pc
, bool cond
, LIns
* x
)
5452 JS_ASSERT(isCond(x
));
5453 if (IsLoopEdge(pc
, (jsbytecode
*)tree
->ip
)) {
5454 exitType
= LOOP_EXIT
;
5457 * If we are about to walk out of the loop, generate code for the
5458 * inverse loop condition, pretending we recorded the case that stays
5461 if ((*pc
== JSOP_IFEQ
|| *pc
== JSOP_IFEQX
) == cond
) {
5462 JS_ASSERT(*pc
== JSOP_IFNE
|| *pc
== JSOP_IFNEX
|| *pc
== JSOP_IFEQ
|| *pc
== JSOP_IFEQX
);
5463 debug_only_print0(LC_TMTracer
,
5464 "Walking out of the loop, terminating it anyway.\n");
5469 * Conditional guards do not have to be emitted if the condition is
5470 * constant. We make a note whether the loop condition is true or false
5471 * here, so we later know whether to emit a loop edge or a loop end.
5474 pendingLoop
= (x
->immI() == int32(cond
));
5478 exitType
= BRANCH_EXIT
;
5481 guard(cond
, x
, exitType
);
5484 /* Emit code for a fused IFEQ/IFNE. */
5485 JS_REQUIRES_STACK
void
5486 TraceRecorder::fuseIf(jsbytecode
* pc
, bool cond
, LIns
* x
)
5488 if (*pc
== JSOP_IFEQ
|| *pc
== JSOP_IFNE
) {
5489 emitIf(pc
, cond
, x
);
5490 if (*pc
== JSOP_IFEQ
)
5495 /* Check whether we have reached the end of the trace. */
5496 JS_REQUIRES_STACK AbortableRecordingStatus
5497 TraceRecorder::checkTraceEnd(jsbytecode
*pc
)
5499 if (IsLoopEdge(pc
, (jsbytecode
*)tree
->ip
)) {
5501 * If we compile a loop, the trace should have a zero stack balance at
5502 * the loop edge. Currently we are parked on a comparison op or
5503 * IFNE/IFEQ, so advance pc to the loop header and adjust the stack
5504 * pointer and pretend we have reached the loop header.
5507 JS_ASSERT(!cx
->fp()->hasImacropc() && (pc
== cx
->regs
->pc
|| pc
== cx
->regs
->pc
+ 1));
5508 JSFrameRegs orig
= *cx
->regs
;
5510 cx
->regs
->pc
= (jsbytecode
*)tree
->ip
;
5511 cx
->regs
->sp
= cx
->fp()->base() + tree
->spOffsetAtEntry
;
5513 JSContext
* localcx
= cx
;
5514 AbortableRecordingStatus ars
= closeLoop();
5515 *localcx
->regs
= orig
;
5521 return ARECORD_CONTINUE
;
5525 * Check whether the shape of the global object has changed. The return value
5526 * indicates whether the recorder is still active. If 'false', any active
5527 * recording has been aborted and the JIT may have been reset.
5529 static JS_REQUIRES_STACK
bool
5530 CheckGlobalObjectShape(JSContext
* cx
, TraceMonitor
* tm
, JSObject
* globalObj
,
5531 uint32
*shape
= NULL
, SlotList
** slots
= NULL
)
5533 if (tm
->needFlush
) {
5534 ResetJIT(cx
, FR_DEEP_BAIL
);
5538 if (globalObj
->numSlots() > MAX_GLOBAL_SLOTS
) {
5540 AbortRecording(cx
, "too many slots in global object");
5545 * The global object must have a unique shape. That way, if an operand
5546 * isn't the global at record time, a shape guard suffices to ensure
5547 * that it isn't the global at run time.
5549 if (!globalObj
->hasOwnShape()) {
5550 if (!globalObj
->globalObjectOwnShapeChange(cx
)) {
5551 debug_only_print0(LC_TMTracer
,
5552 "Can't record: failed to give globalObj a unique shape.\n");
5557 uint32 globalShape
= globalObj
->shape();
5560 TreeFragment
* root
= tm
->recorder
->getFragment()->root
;
5562 /* Check the global shape matches the recorder's treeinfo's shape. */
5563 if (globalObj
!= root
->globalObj
|| globalShape
!= root
->globalShape
) {
5564 AUDIT(globalShapeMismatchAtEntry
);
5565 debug_only_printf(LC_TMTracer
,
5566 "Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
5567 (void*)globalObj
, globalShape
, (void*)root
->globalObj
,
5569 Backoff(cx
, (jsbytecode
*) root
->ip
);
5570 ResetJIT(cx
, FR_GLOBAL_SHAPE_MISMATCH
);
5574 *shape
= globalShape
;
5576 *slots
= root
->globalSlots
;
5580 /* No recorder, search for a tracked global-state (or allocate one). */
5581 for (size_t i
= 0; i
< MONITOR_N_GLOBAL_STATES
; ++i
) {
5582 GlobalState
&state
= tm
->globalStates
[i
];
5584 if (state
.globalShape
== uint32(-1)) {
5585 state
.globalObj
= globalObj
;
5586 state
.globalShape
= globalShape
;
5587 JS_ASSERT(state
.globalSlots
);
5588 JS_ASSERT(state
.globalSlots
->length() == 0);
5591 if (state
.globalObj
== globalObj
&& state
.globalShape
== globalShape
) {
5593 *shape
= globalShape
;
5595 *slots
= state
.globalSlots
;
5600 /* No currently-tracked-global found and no room to allocate, abort. */
5601 AUDIT(globalShapeMismatchAtEntry
);
5602 debug_only_printf(LC_TMTracer
,
5603 "No global slotlist for global shape %u, flushing cache.\n",
5605 ResetJIT(cx
, FR_GLOBALS_FULL
);
5610 * Return whether or not the recorder could be started. If 'false', the JIT has
5611 * been reset in response to an OOM.
5613 bool JS_REQUIRES_STACK
5614 TraceRecorder::startRecorder(JSContext
* cx
, VMSideExit
* anchor
, VMFragment
* f
,
5615 unsigned stackSlots
, unsigned ngslots
,
5616 JSValueType
* typeMap
, VMSideExit
* expectedInnerExit
,
5617 JSScript
* outerScript
, jsbytecode
* outerPC
, uint32 outerArgc
,
5620 TraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
5621 JS_ASSERT(!tm
->profile
);
5622 JS_ASSERT(!tm
->needFlush
);
5623 JS_ASSERT_IF(cx
->fp()->hasImacropc(), f
->root
!= f
);
5625 /* We can't (easily) use js_new() here because the constructor is private. */
5626 void *memory
= js_malloc(sizeof(TraceRecorder
));
5627 tm
->recorder
= memory
5628 ? new(memory
) TraceRecorder(cx
, anchor
, f
, stackSlots
, ngslots
, typeMap
,
5629 expectedInnerExit
, outerScript
, outerPC
, outerArgc
,
5633 if (!tm
->recorder
|| tm
->outOfMemory() || OverfullJITCache(cx
, tm
)) {
5634 ResetJIT(cx
, FR_OOM
);
5642 TrashTree(TreeFragment
* f
)
5644 JS_ASSERT(f
== f
->root
);
5645 debug_only_printf(LC_TMTreeVis
, "TREEVIS TRASH FRAG=%p\n", (void*)f
);
5649 AUDIT(treesTrashed
);
5650 debug_only_print0(LC_TMTracer
, "Trashing tree info.\n");
5652 TreeFragment
** data
= f
->dependentTrees
.data();
5653 unsigned length
= f
->dependentTrees
.length();
5654 for (unsigned n
= 0; n
< length
; ++n
)
5656 data
= f
->linkedTrees
.data();
5657 length
= f
->linkedTrees
.length();
5658 for (unsigned n
= 0; n
< length
; ++n
)
5663 SynthesizeFrame(JSContext
* cx
, const FrameInfo
& fi
, JSObject
* callee
)
5665 VOUCH_DOES_NOT_REQUIRE_STACK();
5667 /* Assert that we have a correct sp distance from cx->fp()->slots in fi. */
5668 JSStackFrame
* const fp
= cx
->fp();
5669 JS_ASSERT_IF(!fi
.imacpc
,
5670 js_ReconstructStackDepth(cx
, fp
->script(), fi
.pc
) ==
5671 uintN(fi
.spdist
- fp
->numFixed()));
5673 /* Use the just-flushed prev-frame to get the callee function. */
5674 JSFunction
* newfun
= callee
->getFunctionPrivate();
5675 JSScript
* newscript
= newfun
->script();
5677 /* Fill in the prev-frame's sp. */
5678 JSFrameRegs
*regs
= cx
->regs
;
5679 regs
->sp
= fp
->slots() + fi
.spdist
;
5682 fp
->setImacropc(fi
.imacpc
);
5684 /* Set argc/flags then mimic JSOP_CALL. */
5685 uintN argc
= fi
.get_argc();
5686 uint32 flags
= fi
.is_constructing ()
5687 ? JSFRAME_CONSTRUCTING
| JSFRAME_CONSTRUCTING
5690 /* Get pointer to new/frame/slots, prepare arguments. */
5691 StackSpace
&stack
= cx
->stack();
5692 JSStackFrame
*newfp
= stack
.getInlineFrame(cx
, regs
->sp
, argc
, newfun
,
5695 /* Initialize frame; do not need to initialize locals. */
5696 newfp
->initCallFrame(cx
, *callee
, newfun
, argc
, flags
);
5699 /* The stack is conservatively marked, so we can leave non-canonical args uninitialized. */
5700 if (newfp
->hasOverflowArgs()) {
5701 Value
*beg
= newfp
->actualArgs() - 2;
5702 Value
*end
= newfp
->actualArgs() + newfp
->numFormalArgs();
5703 for (Value
*p
= beg
; p
!= end
; ++p
)
5704 p
->setMagic(JS_ARG_POISON
);
5707 /* These should be initialized by FlushNativeStackFrame. */
5708 newfp
->thisValue().setMagic(JS_THIS_POISON
);
5709 newfp
->setScopeChainNoCallObj(*JSStackFrame::sInvalidScopeChain
);
5712 /* Officially push the frame. */
5713 stack
.pushInlineFrame(cx
, newscript
, newfp
, cx
->regs
);
5715 /* Call object will be set by FlushNativeStackFrame. */
5717 /* Call the debugger hook if present. */
5718 JSInterpreterHook hook
= cx
->debugHooks
->callHook
;
5720 newfp
->setHookData(hook(cx
, newfp
, JS_TRUE
, 0,
5721 cx
->debugHooks
->callHookData
));
5725 static JS_REQUIRES_STACK
bool
5726 RecordTree(JSContext
* cx
, TreeFragment
* first
, JSScript
* outerScript
, jsbytecode
* outerPC
,
5727 uint32 outerArgc
, SlotList
* globalSlots
)
5729 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5731 /* Try to find an unused peer fragment, or allocate a new one. */
5732 JS_ASSERT(first
->first
== first
);
5733 TreeFragment
* f
= NULL
;
5735 for (TreeFragment
* peer
= first
; peer
; peer
= peer
->peer
, ++count
) {
5740 f
= AddNewPeerToPeerList(tm
, first
);
5741 JS_ASSERT(f
->root
== f
);
5743 /* Disable speculation if we are starting to accumulate a lot of trees. */
5744 bool speculate
= count
< MAXPEERS
-1;
5746 /* save a local copy for use after JIT flush */
5747 const void* localRootIP
= f
->root
->ip
;
5749 /* Make sure the global type map didn't change on us. */
5750 if (!CheckGlobalObjectShape(cx
, tm
, f
->globalObj
)) {
5751 Backoff(cx
, (jsbytecode
*) localRootIP
);
5755 AUDIT(recorderStarted
);
5757 if (tm
->outOfMemory() ||
5758 OverfullJITCache(cx
, tm
) ||
5759 !tm
->tracedScripts
.put(cx
->fp()->script()))
5761 if (!OverfullJITCache(cx
, tm
))
5762 js_ReportOutOfMemory(cx
);
5763 Backoff(cx
, (jsbytecode
*) f
->root
->ip
);
5764 ResetJIT(cx
, FR_OOM
);
5765 debug_only_print0(LC_TMTracer
,
5766 "Out of memory recording new tree, flushing cache.\n");
5770 JS_ASSERT(!f
->code());
5772 f
->initialize(cx
, globalSlots
, speculate
);
5775 AssertTreeIsUnique(tm
, f
);
5778 debug_only_printf(LC_TMTreeVis
, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d",
5779 (void*)f
, f
->ip
, f
->treeFileName
, f
->treeLineNumber
,
5780 FramePCOffset(cx
, cx
->fp()));
5781 debug_only_print0(LC_TMTreeVis
, " STACK=\"");
5782 for (unsigned i
= 0; i
< f
->nStackTypes
; i
++)
5783 debug_only_printf(LC_TMTreeVis
, "%c", TypeToChar(f
->typeMap
[i
]));
5784 debug_only_print0(LC_TMTreeVis
, "\" GLOBALS=\"");
5785 for (unsigned i
= 0; i
< f
->nGlobalTypes(); i
++)
5786 debug_only_printf(LC_TMTreeVis
, "%c", TypeToChar(f
->typeMap
[f
->nStackTypes
+ i
]));
5787 debug_only_print0(LC_TMTreeVis
, "\"\n");
5790 /* Recording primary trace. */
5791 return TraceRecorder::startRecorder(cx
, NULL
, f
, f
->nStackTypes
,
5792 f
->globalSlots
->length(),
5793 f
->typeMap
.data(), NULL
,
5794 outerScript
, outerPC
, outerArgc
, speculate
);
5797 static JS_REQUIRES_STACK TypeConsensus
5798 FindLoopEdgeTarget(JSContext
* cx
, VMSideExit
* exit
, TreeFragment
** peerp
)
5800 TreeFragment
* from
= exit
->root();
5802 JS_ASSERT(from
->code());
5803 Oracle
* oracle
= JS_TRACE_MONITOR(cx
).oracle
;
5805 TypeMap
typeMap(NULL
);
5806 FullMapFromExit(typeMap
, exit
);
5807 JS_ASSERT(typeMap
.length() - exit
->numStackSlots
== from
->nGlobalTypes());
5809 /* Mark all double slots as undemotable */
5810 uint16
* gslots
= from
->globalSlots
->data();
5811 for (unsigned i
= 0; i
< typeMap
.length(); i
++) {
5812 if (typeMap
[i
] == JSVAL_TYPE_DOUBLE
) {
5813 if (i
< from
->nStackTypes
)
5814 oracle
->markStackSlotUndemotable(cx
, i
, from
->ip
);
5815 else if (i
>= exit
->numStackSlots
)
5816 oracle
->markGlobalSlotUndemotable(cx
, gslots
[i
- exit
->numStackSlots
]);
5820 JS_ASSERT(exit
->exitType
== UNSTABLE_LOOP_EXIT
);
5822 TreeFragment
* firstPeer
= from
->first
;
5824 for (TreeFragment
* peer
= firstPeer
; peer
; peer
= peer
->peer
) {
5827 JS_ASSERT(peer
->argc
== from
->argc
);
5828 JS_ASSERT(exit
->numStackSlots
== peer
->nStackTypes
);
5829 TypeConsensus consensus
= TypeMapLinkability(cx
, typeMap
, peer
);
5830 if (consensus
== TypeConsensus_Okay
|| consensus
== TypeConsensus_Undemotes
) {
5836 return TypeConsensus_Bad
;
5839 static JS_REQUIRES_STACK
bool
5840 AttemptToStabilizeTree(JSContext
* cx
, JSObject
* globalObj
, VMSideExit
* exit
,
5841 JSScript
* outerScript
, jsbytecode
* outerPC
, uint32 outerArgc
)
5843 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5844 if (tm
->needFlush
) {
5845 ResetJIT(cx
, FR_DEEP_BAIL
);
5849 TreeFragment
* from
= exit
->root();
5851 TreeFragment
* peer
= NULL
;
5852 TypeConsensus consensus
= FindLoopEdgeTarget(cx
, exit
, &peer
);
5853 if (consensus
== TypeConsensus_Okay
) {
5854 JS_ASSERT(from
->globalSlots
== peer
->globalSlots
);
5855 JS_ASSERT_IF(exit
->exitType
== UNSTABLE_LOOP_EXIT
,
5856 from
->nStackTypes
== peer
->nStackTypes
);
5857 JS_ASSERT(exit
->numStackSlots
== peer
->nStackTypes
);
5858 /* Patch this exit to its peer */
5859 JoinPeers(tm
->assembler
, exit
, peer
);
5861 * Update peer global types. The |from| fragment should already be updated because it on
5862 * the execution path, and somehow connected to the entry trace.
5864 if (peer
->nGlobalTypes() < peer
->globalSlots
->length())
5865 SpecializeTreesToMissingGlobals(cx
, globalObj
, peer
);
5866 JS_ASSERT(from
->nGlobalTypes() == from
->globalSlots
->length());
5867 /* This exit is no longer unstable, so remove it. */
5868 if (exit
->exitType
== UNSTABLE_LOOP_EXIT
)
5869 from
->removeUnstableExit(exit
);
5870 debug_only_stmt(DumpPeerStability(tm
, peer
->ip
, globalObj
, from
->globalShape
, from
->argc
);)
5872 } else if (consensus
== TypeConsensus_Undemotes
) {
5873 /* The original tree is unconnectable, so trash it. */
5878 SlotList
*globalSlots
= from
->globalSlots
;
5880 JS_ASSERT(from
== from
->root
);
5882 /* If this tree has been blacklisted, don't try to record a new one. */
5883 if (*(jsbytecode
*)from
->ip
== JSOP_NOTRACE
)
5886 return RecordTree(cx
, from
->first
, outerScript
, outerPC
, outerArgc
, globalSlots
);
5889 static JS_REQUIRES_STACK VMFragment
*
5890 CreateBranchFragment(JSContext
* cx
, TreeFragment
* root
, VMSideExit
* anchor
)
5892 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5895 uint32_t profFragID
= (LogController
.lcbits
& LC_FragProfile
)
5896 ? (++(tm
->lastFragID
)) : 0;
5899 VMFragment
* f
= new (*tm
->dataAlloc
) VMFragment(cx
->regs
->pc
verbose_only(, profFragID
));
5901 debug_only_printf(LC_TMTreeVis
, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\""
5902 " LINE=%d ANCHOR=%p OFFS=%d\n",
5903 (void*)root
, (void*)f
, (void*)cx
->regs
->pc
, cx
->fp()->script()->filename
,
5904 js_FramePCToLineNumber(cx
, cx
->fp()), (void*)anchor
,
5905 FramePCOffset(cx
, cx
->fp()));
5906 verbose_only( tm
->branches
= new (*tm
->dataAlloc
) Seq
<Fragment
*>(f
, tm
->branches
); )
5914 static JS_REQUIRES_STACK
bool
5915 AttemptToExtendTree(JSContext
* cx
, VMSideExit
* anchor
, VMSideExit
* exitedFrom
,
5916 JSScript
*outerScript
, jsbytecode
* outerPC
5918 , TraceVisStateObj
* tvso
= NULL
5922 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5923 JS_ASSERT(!tm
->recorder
);
5925 if (tm
->needFlush
) {
5926 ResetJIT(cx
, FR_DEEP_BAIL
);
5928 if (tvso
) tvso
->r
= R_FAIL_EXTEND_FLUSH
;
5933 TreeFragment
* f
= anchor
->root();
5934 JS_ASSERT(f
->code());
5937 * Don't grow trees above a certain size to avoid code explosion due to
5940 if (f
->branchCount
>= MAX_BRANCHES
) {
5942 if (cx
->methodJitEnabled
&& cx
->profilingEnabled
)
5943 Blacklist((jsbytecode
*)f
->ip
);
5946 if (tvso
) tvso
->r
= R_FAIL_EXTEND_MAX_BRANCHES
;
5951 VMFragment
* c
= (VMFragment
*)anchor
->target
;
5953 c
= CreateBranchFragment(cx
, f
, anchor
);
5956 * If we are recycling a fragment, it might have a different ip so reset it
5957 * here. This can happen when attaching a branch to a NESTED_EXIT, which
5958 * might extend along separate paths (i.e. after the loop edge, and after a
5959 * return statement).
5961 c
->ip
= cx
->regs
->pc
;
5962 JS_ASSERT(c
->root
== f
);
5965 debug_only_printf(LC_TMTracer
,
5966 "trying to attach another branch to the tree (hits = %d)\n", c
->hits());
5968 int32_t& hits
= c
->hits();
5969 int32_t maxHits
= HOTEXIT
+ MAXEXIT
;
5970 if (anchor
->exitType
== CASE_EXIT
)
5971 maxHits
*= anchor
->switchInfo
->count
;
5972 if (outerPC
|| (hits
++ >= HOTEXIT
&& hits
<= maxHits
)) {
5973 /* start tracing secondary trace from this point */
5974 unsigned stackSlots
;
5976 JSValueType
* typeMap
;
5977 TypeMap
fullMap(NULL
);
5980 * If we are coming straight from a simple side exit, just use that
5981 * exit's type map as starting point.
5983 ngslots
= anchor
->numGlobalSlots
;
5984 stackSlots
= anchor
->numStackSlots
;
5985 typeMap
= anchor
->fullTypeMap();
5988 * If we side-exited on a loop exit and continue on a nesting
5989 * guard, the nesting guard (anchor) has the type information for
5990 * everything below the current scope, and the actual guard we
5991 * exited from has the types for everything in the current scope
5992 * (and whatever it inlined). We have to merge those maps here.
5994 VMSideExit
* e1
= anchor
;
5995 VMSideExit
* e2
= exitedFrom
;
5996 fullMap
.add(e1
->stackTypeMap(), e1
->numStackSlotsBelowCurrentFrame
);
5997 fullMap
.add(e2
->stackTypeMap(), e2
->numStackSlots
);
5998 stackSlots
= fullMap
.length();
5999 ngslots
= BuildGlobalTypeMapFromInnerTree(fullMap
, e2
);
6000 JS_ASSERT(ngslots
>= e1
->numGlobalSlots
); // inner tree must have all globals
6001 JS_ASSERT(ngslots
== fullMap
.length() - stackSlots
);
6002 typeMap
= fullMap
.data();
6004 JS_ASSERT(ngslots
>= anchor
->numGlobalSlots
);
6005 bool rv
= TraceRecorder::startRecorder(cx
, anchor
, c
, stackSlots
, ngslots
, typeMap
,
6006 exitedFrom
, outerScript
, outerPC
, f
->argc
,
6010 tvso
->r
= R_FAIL_EXTEND_START
;
6015 if (tvso
) tvso
->r
= R_FAIL_EXTEND_COLD
;
6020 static JS_REQUIRES_STACK
bool
6021 ExecuteTree(JSContext
* cx
, TreeFragment
* f
, uintN
& inlineCallCount
,
6022 VMSideExit
** innermostNestedGuardp
, VMSideExit
** lrp
);
6024 static inline MonitorResult
6025 RecordingIfTrue(bool b
)
6027 return b
? MONITOR_RECORDING
: MONITOR_NOT_RECORDING
;
6031 * A postcondition of recordLoopEdge is that if recordLoopEdge does not return
6032 * MONITOR_RECORDING, the recording has been aborted.
6034 JS_REQUIRES_STACK MonitorResult
6035 TraceRecorder::recordLoopEdge(JSContext
* cx
, TraceRecorder
* r
, uintN
& inlineCallCount
)
6037 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
6039 /* Process needFlush and deep abort requests. */
6040 if (tm
->needFlush
) {
6041 ResetJIT(cx
, FR_DEEP_BAIL
);
6042 return MONITOR_NOT_RECORDING
;
6045 JS_ASSERT(r
->fragment
&& !r
->fragment
->lastIns
);
6046 TreeFragment
* root
= r
->fragment
->root
;
6047 TreeFragment
* first
= LookupOrAddLoop(tm
, cx
->regs
->pc
, root
->globalObj
,
6048 root
->globalShape
, entryFrameArgc(cx
));
6051 * Make sure the shape of the global object still matches (this might flush
6054 JSObject
* globalObj
= cx
->fp()->scopeChain().getGlobal();
6055 uint32 globalShape
= -1;
6056 SlotList
* globalSlots
= NULL
;
6057 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
, &globalShape
, &globalSlots
)) {
6058 JS_ASSERT(!tm
->recorder
);
6059 return MONITOR_NOT_RECORDING
;
6062 debug_only_printf(LC_TMTracer
,
6063 "Looking for type-compatible peer (%s:%d@%d)\n",
6064 cx
->fp()->script()->filename
,
6065 js_FramePCToLineNumber(cx
, cx
->fp()),
6066 FramePCOffset(cx
, cx
->fp()));
6068 // Find a matching inner tree. If none can be found, compile one.
6069 TreeFragment
* f
= r
->findNestedCompatiblePeer(first
);
6070 if (!f
|| !f
->code()) {
6071 AUDIT(noCompatInnerTrees
);
6073 TreeFragment
* outerFragment
= root
;
6074 JSScript
* outerScript
= outerFragment
->script
;
6075 jsbytecode
* outerPC
= (jsbytecode
*) outerFragment
->ip
;
6076 uint32 outerArgc
= outerFragment
->argc
;
6077 JS_ASSERT(entryFrameArgc(cx
) == first
->argc
);
6079 if (AbortRecording(cx
, "No compatible inner tree") == JIT_RESET
)
6080 return MONITOR_NOT_RECORDING
;
6082 return RecordingIfTrue(RecordTree(cx
, first
, outerScript
, outerPC
, outerArgc
, globalSlots
));
6085 AbortableRecordingStatus status
= r
->attemptTreeCall(f
, inlineCallCount
);
6086 if (status
== ARECORD_CONTINUE
)
6087 return MONITOR_RECORDING
;
6088 if (status
== ARECORD_ERROR
) {
6089 if (TRACE_RECORDER(cx
))
6090 AbortRecording(cx
, "Error returned while recording loop edge");
6091 return MONITOR_ERROR
;
6093 JS_ASSERT(status
== ARECORD_ABORTED
&& !tm
->recorder
);
6094 return MONITOR_NOT_RECORDING
;
6097 JS_REQUIRES_STACK AbortableRecordingStatus
6098 TraceRecorder::attemptTreeCall(TreeFragment
* f
, uintN
& inlineCallCount
)
6100 adjustCallerTypes(f
);
6104 uintN oldInlineCallCount
= inlineCallCount
;
6107 JSContext
*localCx
= cx
;
6109 // Refresh the import type map so the tracker can reimport values after the
6110 // call with their correct types. The inner tree must not change the type of
6111 // any variable in a frame above the current one (i.e., upvars).
6113 // Note that DetermineTypesVisitor may call determineSlotType, which may
6114 // read from the (current, stale) import type map, but this is safe here.
6115 // The reason is that determineSlotType will read the import type map only
6116 // if there is not a tracker instruction for that value, which means that
6117 // value has not been written yet, so that type map entry is up to date.
6118 importTypeMap
.setLength(NativeStackSlots(cx
, callDepth
));
6119 DetermineTypesVisitor
visitor(*this, importTypeMap
.data());
6120 VisitStackSlots(visitor
, cx
, callDepth
);
6122 VMSideExit
* innermostNestedGuard
= NULL
;
6124 bool ok
= ExecuteTree(cx
, f
, inlineCallCount
, &innermostNestedGuard
, &lr
);
6127 * If ExecuteTree reentered the interpreter, it may have killed |this|
6128 * and/or caused an error, which must be propagated.
6130 JS_ASSERT_IF(TRACE_RECORDER(localCx
), TRACE_RECORDER(localCx
) == this);
6132 return ARECORD_ERROR
;
6133 if (!TRACE_RECORDER(localCx
))
6134 return ARECORD_ABORTED
;
6137 AbortRecording(cx
, "Couldn't call inner tree");
6138 return ARECORD_ABORTED
;
6141 TreeFragment
* outerFragment
= tree
;
6142 JSScript
* outerScript
= outerFragment
->script
;
6143 jsbytecode
* outerPC
= (jsbytecode
*) outerFragment
->ip
;
6144 switch (lr
->exitType
) {
6146 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
6147 if (innermostNestedGuard
) {
6148 if (AbortRecording(cx
, "Inner tree took different side exit, abort current "
6149 "recording and grow nesting tree") == JIT_RESET
) {
6150 return ARECORD_ABORTED
;
6152 return AttemptToExtendTree(localCx
, innermostNestedGuard
, lr
, outerScript
, outerPC
)
6157 JS_ASSERT(oldInlineCallCount
== inlineCallCount
);
6159 /* Emit a call to the inner tree and continue recording the outer tree trace. */
6160 emitTreeCall(f
, lr
);
6161 return ARECORD_CONTINUE
;
6163 case UNSTABLE_LOOP_EXIT
:
6165 /* Abort recording so the inner loop can become type stable. */
6166 JSObject
* _globalObj
= globalObj
;
6167 if (AbortRecording(cx
, "Inner tree is trying to stabilize, "
6168 "abort outer recording") == JIT_RESET
) {
6169 return ARECORD_ABORTED
;
6171 return AttemptToStabilizeTree(localCx
, _globalObj
, lr
, outerScript
, outerPC
,
6172 outerFragment
->argc
)
6179 if (lr
->exitType
== MUL_ZERO_EXIT
)
6180 traceMonitor
->oracle
->markInstructionSlowZeroTest(cx
->regs
->pc
);
6182 traceMonitor
->oracle
->markInstructionUndemotable(cx
->regs
->pc
);
6186 /* Abort recording the outer tree, extend the inner tree. */
6187 if (AbortRecording(cx
, "Inner tree is trying to grow, "
6188 "abort outer recording") == JIT_RESET
) {
6189 return ARECORD_ABORTED
;
6191 return AttemptToExtendTree(localCx
, lr
, NULL
, outerScript
, outerPC
)
6196 JS_NOT_REACHED("NESTED_EXIT should be replaced by innermost side exit");
6198 debug_only_printf(LC_TMTracer
, "exit_type=%s\n", getExitName(lr
->exitType
));
6199 AbortRecording(cx
, "Inner tree not suitable for calling");
6200 return ARECORD_ABORTED
;
6205 IsEntryTypeCompatible(const Value
&v
, JSValueType type
)
6209 JS_ASSERT(type
<= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET
);
6210 JS_ASSERT(type
!= JSVAL_TYPE_OBJECT
); /* JSVAL_TYPE_OBJECT does not belong in a type map */
6213 ok
= (type
== JSVAL_TYPE_INT32
|| type
== JSVAL_TYPE_DOUBLE
);
6215 } else if (v
.isDouble()) {
6217 ok
= (type
== JSVAL_TYPE_DOUBLE
) ||
6218 (type
== JSVAL_TYPE_INT32
&& JSDOUBLE_IS_INT32(v
.toDouble(), &_
));
6220 } else if (v
.isObject()) {
6221 ok
= v
.toObject().isFunction()
6222 ? type
== JSVAL_TYPE_FUNOBJ
6223 : type
== JSVAL_TYPE_NONFUNOBJ
;
6226 ok
= v
.extractNonDoubleObjectTraceType() == type
;
6229 char ttag
= TypeToChar(type
);
6230 char vtag
= ValueToTypeChar(v
);
6231 debug_only_printf(LC_TMTracer
, "%c/%c ", vtag
, ttag
);
6233 debug_only_printf(LC_TMTracer
, "%s", "(incompatible types)");
6239 IsFrameObjPtrTypeCompatible(void *p
, JSStackFrame
*fp
, JSValueType type
)
6241 if (p
== fp
->addressOfScopeChain())
6242 return type
== JSVAL_TYPE_NONFUNOBJ
;
6243 JS_ASSERT(p
== fp
->addressOfArgs());
6244 JS_ASSERT(type
== JSVAL_TYPE_NONFUNOBJ
|| type
== JSVAL_TYPE_NULL
);
6245 return fp
->hasArgsObj() == (type
== JSVAL_TYPE_NONFUNOBJ
);
6248 class TypeCompatibilityVisitor
: public SlotVisitorBase
6250 TraceRecorder
&mRecorder
;
6253 JSValueType
*mTypeMap
;
6254 unsigned mStackSlotNum
;
6257 TypeCompatibilityVisitor (TraceRecorder
&recorder
,
6258 JSValueType
*typeMap
) :
6259 mRecorder(recorder
),
6261 mOracle(JS_TRACE_MONITOR(mCx
).oracle
),
6267 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
6268 visitGlobalSlot(Value
*vp
, unsigned n
, unsigned slot
) {
6269 debug_only_printf(LC_TMTracer
, "global%d=", n
);
6270 if (!IsEntryTypeCompatible(*vp
, *mTypeMap
)) {
6272 } else if (!IsPromotedInt32(mRecorder
.get(vp
)) && *mTypeMap
== JSVAL_TYPE_INT32
) {
6273 mOracle
->markGlobalSlotUndemotable(mCx
, slot
);
6275 } else if (vp
->isInt32() && *mTypeMap
== JSVAL_TYPE_DOUBLE
) {
6276 mOracle
->markGlobalSlotUndemotable(mCx
, slot
);
6282 * For the below two methods, one may be inclined to 'return false' early
6283 * when mOk is set to 'false'. Don't do that. It is very important to run
6284 * through the whole list to let all mis-matching slots get marked
6285 * undemotable in the oracle.
6288 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6289 visitStackSlots(Value
*vp
, size_t count
, JSStackFrame
* fp
) {
6290 for (size_t i
= 0; i
< count
; ++i
) {
6291 debug_only_printf(LC_TMTracer
, "%s%u=", stackSlotKind(), unsigned(i
));
6292 if (!IsEntryTypeCompatible(*vp
, *mTypeMap
)) {
6294 } else if (!IsPromotedInt32(mRecorder
.get(vp
)) && *mTypeMap
== JSVAL_TYPE_INT32
) {
6295 mOracle
->markStackSlotUndemotable(mCx
, mStackSlotNum
);
6297 } else if (vp
->isInt32() && *mTypeMap
== JSVAL_TYPE_DOUBLE
) {
6298 mOracle
->markStackSlotUndemotable(mCx
, mStackSlotNum
);
6307 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6308 visitFrameObjPtr(void* p
, JSStackFrame
* fp
) {
6309 debug_only_printf(LC_TMTracer
, "%s%u=", stackSlotKind(), 0);
6310 if (!IsFrameObjPtrTypeCompatible(p
, fp
, *mTypeMap
))
6322 JS_REQUIRES_STACK TreeFragment
*
6323 TraceRecorder::findNestedCompatiblePeer(TreeFragment
* f
)
6327 tm
= &JS_TRACE_MONITOR(cx
);
6328 unsigned int ngslots
= tree
->globalSlots
->length();
6330 for (; f
!= NULL
; f
= f
->peer
) {
6334 debug_only_printf(LC_TMTracer
, "checking nested types %p: ", (void*)f
);
6336 if (ngslots
> f
->nGlobalTypes())
6337 SpecializeTreesToMissingGlobals(cx
, globalObj
, f
);
6340 * Determine whether the typemap of the inner tree matches the outer
6341 * tree's current state. If the inner tree expects an integer, but the
6342 * outer tree doesn't guarantee an integer for that slot, we mark the
6343 * slot undemotable and mismatch here. This will force a new tree to be
6344 * compiled that accepts a double for the slot. If the inner tree
6345 * expects a double, but the outer tree has an integer, we can proceed,
6346 * but we mark the location undemotable.
6348 TypeCompatibilityVisitor
visitor(*this, f
->typeMap
.data());
6349 VisitSlots(visitor
, cx
, 0, *tree
->globalSlots
);
6351 debug_only_printf(LC_TMTracer
, " %s\n", visitor
.isOk() ? "match" : "");
6359 class CheckEntryTypeVisitor
: public SlotVisitorBase
6362 JSValueType
*mTypeMap
;
6364 CheckEntryTypeVisitor(JSValueType
*typeMap
) :
6369 JS_ALWAYS_INLINE
void checkSlot(const Value
&v
, char const *name
, int i
) {
6370 debug_only_printf(LC_TMTracer
, "%s%d=", name
, i
);
6371 JS_ASSERT(*(uint8_t*)mTypeMap
!= 0xCD);
6372 mOk
= IsEntryTypeCompatible(v
, *mTypeMap
++);
6375 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
6376 visitGlobalSlot(Value
*vp
, unsigned n
, unsigned slot
) {
6378 checkSlot(*vp
, "global", n
);
6381 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6382 visitStackSlots(Value
*vp
, size_t count
, JSStackFrame
* fp
) {
6383 for (size_t i
= 0; i
< count
; ++i
) {
6386 checkSlot(*vp
++, stackSlotKind(), i
);
6391 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6392 visitFrameObjPtr(void* p
, JSStackFrame
*fp
) {
6393 debug_only_printf(LC_TMTracer
, "%s%d=", stackSlotKind(), 0);
6394 JS_ASSERT(*(uint8_t*)mTypeMap
!= 0xCD);
6395 return mOk
= IsFrameObjPtrTypeCompatible(p
, fp
, *mTypeMap
++);
6404 * Check if types are usable for trace execution.
6406 * @param cx Context.
6407 * @param f Tree of peer we're testing.
6408 * @return True if compatible (with or without demotions), false otherwise.
6410 static JS_REQUIRES_STACK
bool
6411 CheckEntryTypes(JSContext
* cx
, JSObject
* globalObj
, TreeFragment
* f
)
6413 unsigned int ngslots
= f
->globalSlots
->length();
6415 JS_ASSERT(f
->nStackTypes
== NativeStackSlots(cx
, 0));
6417 if (ngslots
> f
->nGlobalTypes())
6418 SpecializeTreesToMissingGlobals(cx
, globalObj
, f
);
6420 JS_ASSERT(f
->typeMap
.length() == NativeStackSlots(cx
, 0) + ngslots
);
6421 JS_ASSERT(f
->typeMap
.length() == f
->nStackTypes
+ ngslots
);
6422 JS_ASSERT(f
->nGlobalTypes() == ngslots
);
6424 CheckEntryTypeVisitor
visitor(f
->typeMap
.data());
6425 VisitSlots(visitor
, cx
, 0, *f
->globalSlots
);
6427 debug_only_print0(LC_TMTracer
, "\n");
6428 return visitor
.isOk();
6432 * Find an acceptable entry tree given a PC.
6434 * @param cx Context.
6435 * @param globalObj Global object.
6436 * @param f First peer fragment.
6437 * @param nodemote If true, will try to find a peer that does not require demotion.
6438 * @out count Number of fragments consulted.
6440 static JS_REQUIRES_STACK TreeFragment
*
6441 FindVMCompatiblePeer(JSContext
* cx
, JSObject
* globalObj
, TreeFragment
* f
, uintN
& count
)
6444 for (; f
!= NULL
; f
= f
->peer
) {
6447 debug_only_printf(LC_TMTracer
,
6448 "checking vm types %p (ip: %p): ", (void*)f
, f
->ip
);
6449 if (CheckEntryTypes(cx
, globalObj
, f
))
6457 * For the native stacks and global frame, reuse the storage in |tm->storage|.
6458 * This reuse depends on the invariant that only one trace uses |tm->storage| at
6459 * a time. This is subtley correct in lieu of deep bail; see comment for
6460 * |deepBailSp| in DeepBail.
6463 TracerState::TracerState(JSContext
* cx
, TraceMonitor
* tm
, TreeFragment
* f
,
6464 uintN
& inlineCallCount
, VMSideExit
** innermostNestedGuardp
)
6466 stackBase(tm
->storage
->stack()),
6467 sp(stackBase
+ f
->nativeStackBase
/ sizeof(double)),
6468 eos(tm
->storage
->global()),
6469 callstackBase(tm
->storage
->callstack()),
6472 eor(callstackBase
+ JS_MIN(MAX_CALL_STACK_ENTRIES
,
6473 JS_MAX_INLINE_CALL_COUNT
- inlineCallCount
)),
6474 lastTreeExitGuard(NULL
),
6475 lastTreeCallGuard(NULL
),
6476 rpAtLastTreeCall(NULL
),
6478 inlineCallCountp(&inlineCallCount
),
6479 innermostNestedGuardp(innermostNestedGuardp
),
6480 #ifdef EXECUTE_TREE_TIMER
6486 JS_ASSERT(tm
== &JS_TRACE_MONITOR(cx
));
6487 JS_ASSERT(!tm
->tracecx
);
6489 prev
= tm
->tracerState
;
6490 tm
->tracerState
= this;
6492 JS_ASSERT(eos
== stackBase
+ MAX_NATIVE_STACK_SLOTS
);
6493 JS_ASSERT(sp
< eos
);
6496 * inlineCallCount has already been incremented, if being invoked from
6497 * EnterFrame. It is okay to have a 0-frame restriction since the JIT
6498 * might not need any frames.
6500 JS_ASSERT(inlineCallCount
<= JS_MAX_INLINE_CALL_COUNT
);
6504 * Cannot 0xCD-fill global frame since it may overwrite a bailed outer
6505 * ExecuteTree's 0xdeadbeefdeadbeef marker.
6507 memset(tm
->storage
->stack(), 0xCD, MAX_NATIVE_STACK_SLOTS
* sizeof(double));
6508 memset(tm
->storage
->callstack(), 0xCD, MAX_CALL_STACK_ENTRIES
* sizeof(FrameInfo
*));
6513 TracerState::~TracerState()
6515 JS_ASSERT(!nativeVp
);
6517 TraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
6518 tm
->tracerState
= prev
;
6522 /* Call |f|, return the exit taken. */
6523 static JS_ALWAYS_INLINE VMSideExit
*
6524 ExecuteTrace(JSContext
* cx
, Fragment
* f
, TracerState
& state
)
6526 JS_ASSERT(!JS_TRACE_MONITOR(cx
).bailExit
);
6528 JS_ASSERT(!TRACE_PROFILER(cx
));
6530 union { NIns
*code
; GuardRecord
* (FASTCALL
*func
)(TracerState
*); } u
;
6533 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
6534 SIMULATE_FASTCALL(rec
, state
, NULL
, u
.func
);
6536 rec
= u
.func(&state
);
6538 JS_ASSERT(!JS_TRACE_MONITOR(cx
).bailExit
);
6539 return (VMSideExit
*)rec
->exit
;
6542 /* Check whether our assumptions about the incoming scope-chain are upheld. */
6543 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6544 ScopeChainCheck(JSContext
* cx
, TreeFragment
* f
)
6546 JS_ASSERT(f
->globalObj
== cx
->fp()->scopeChain().getGlobal());
6549 * The JIT records and expects to execute with two scope-chain
6550 * assumptions baked-in:
6552 * 1. That the bottom of the scope chain is global, in the sense of
6553 * JSCLASS_IS_GLOBAL.
6555 * 2. That the scope chain between fp and the global is free of
6556 * "unusual" native objects such as HTML forms or other funny
6559 * #2 is checked here while following the scope-chain links, via
6560 * js_IsCacheableNonGlobalScope, which consults a whitelist of known
6561 * class types; once a global is found, it's checked for #1. Failing
6562 * either check causes an early return from execution.
6564 JSObject
* child
= &cx
->fp()->scopeChain();
6565 while (JSObject
* parent
= child
->getParent()) {
6566 if (!js_IsCacheableNonGlobalScope(child
)) {
6567 debug_only_print0(LC_TMTracer
,"Blacklist: non-cacheable object on scope chain.\n");
6568 Blacklist((jsbytecode
*) f
->root
->ip
);
6573 JS_ASSERT(child
== f
->globalObj
);
6575 if (!f
->globalObj
->isGlobal()) {
6576 debug_only_print0(LC_TMTracer
, "Blacklist: non-global at root of scope chain.\n");
6577 Blacklist((jsbytecode
*) f
->root
->ip
);
6584 enum LEAVE_TREE_STATUS
{
6589 static LEAVE_TREE_STATUS
6590 LeaveTree(TraceMonitor
*tm
, TracerState
&, VMSideExit
*lr
);
6592 /* Return false if the interpreter should goto error. */
6593 static JS_REQUIRES_STACK
bool
6594 ExecuteTree(JSContext
* cx
, TreeFragment
* f
, uintN
& inlineCallCount
,
6595 VMSideExit
** innermostNestedGuardp
, VMSideExit
**lrp
)
6598 TraceVisStateObj
tvso(cx
, S_EXECUTE
);
6600 JS_ASSERT(f
->root
== f
&& f
->code());
6601 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
6603 JS_ASSERT(!tm
->profile
);
6605 if (!ScopeChainCheck(cx
, f
) || !cx
->stack().ensureEnoughSpaceToEnterTrace() ||
6606 inlineCallCount
+ f
->maxCallDepth
> JS_MAX_INLINE_CALL_COUNT
) {
6611 /* Make sure the global object is sane. */
6612 JS_ASSERT(f
->globalObj
->numSlots() <= MAX_GLOBAL_SLOTS
);
6613 JS_ASSERT(f
->nGlobalTypes() == f
->globalSlots
->length());
6614 JS_ASSERT_IF(f
->globalSlots
->length() != 0,
6615 f
->globalObj
->shape() == f
->globalShape
);
6617 /* Initialize trace state. */
6618 TracerState
state(cx
, tm
, f
, inlineCallCount
, innermostNestedGuardp
);
6619 double* stack
= tm
->storage
->stack();
6620 double* global
= tm
->storage
->global();
6621 JSObject
* globalObj
= f
->globalObj
;
6622 unsigned ngslots
= f
->globalSlots
->length();
6623 uint16
* gslots
= f
->globalSlots
->data();
6625 BuildNativeFrame(cx
, globalObj
, 0 /* callDepth */, ngslots
, gslots
,
6626 f
->typeMap
.data(), global
, stack
);
6628 AUDIT(traceTriggered
);
6629 debug_only_printf(LC_TMTracer
, "entering trace at %s:%u@%u, execs: %u code: %p\n",
6630 cx
->fp()->script()->filename
,
6631 js_FramePCToLineNumber(cx
, cx
->fp()),
6632 FramePCOffset(cx
, cx
->fp()),
6636 debug_only_stmt(uint32 globalSlots
= globalObj
->numSlots();)
6637 debug_only_stmt(*(uint64
*)&tm
->storage
->global()[globalSlots
] = 0xdeadbeefdeadbeefLL
;)
6639 /* Execute trace. */
6640 tm
->iterationCounter
= 0;
6641 debug_only(int64 t0
= PRMJ_Now();)
6643 VMSideExit
* lr
= (TraceVisStateObj(cx
, S_NATIVE
), ExecuteTrace(cx
, f
, state
));
6645 VMSideExit
* lr
= ExecuteTrace(cx
, f
, state
);
6647 debug_only(int64 t1
= PRMJ_Now();)
6649 JS_ASSERT_IF(lr
->exitType
== LOOP_EXIT
, !lr
->calldepth
);
6651 /* Restore interpreter state. */
6653 LEAVE_TREE_STATUS lts
=
6655 LeaveTree(tm
, state
, lr
);
6657 JS_ASSERT_IF(lts
== NO_DEEP_BAIL
,
6658 *(uint64
*)&tm
->storage
->global()[globalSlots
] == 0xdeadbeefdeadbeefLL
);
6661 *lrp
= state
.innermost
;
6662 bool ok
= !(state
.builtinStatus
& BUILTIN_ERROR
);
6663 JS_ASSERT_IF(cx
->isExceptionPending(), !ok
);
6665 size_t iters
= tm
->iterationCounter
;
6671 JSStackFrame
*fp
= cx
->fp();
6672 const char *prefix
= "";
6673 if (iters
== LOOP_COUNT_MAX
)
6675 debug_only_printf(LC_TMMinimal
, " [%.3f ms] Tree at line %u executed for %s%u iterations;"
6676 " executed %u times; leave for %s at %s:%u (%s)\n",
6677 double(t1
-t0
) / PRMJ_USEC_PER_MSEC
,
6678 f
->treeLineNumber
, prefix
, (uintN
)iters
, f
->execs
,
6679 getExitName(lr
->exitType
),
6680 fp
->script()->filename
,
6681 js_FramePCToLineNumber(cx
, fp
),
6682 js_CodeName
[fp
->hasImacropc() ? *fp
->imacropc() : *cx
->regs
->pc
]);
6686 if (cx
->methodJitEnabled
) {
6687 if (lr
->exitType
== LOOP_EXIT
&& f
->iters
< MIN_LOOP_ITERS
6688 && f
->execs
>= LOOP_CHECK_ITERS
)
6690 debug_only_printf(LC_TMMinimal
, " Blacklisting at line %u (executed only %d iters)\n",
6691 f
->treeLineNumber
, f
->iters
);
6692 Blacklist((jsbytecode
*)f
->ip
);
6702 Guardian(bool *flagp
) {
6703 this->flagp
= flagp
;
6714 static JS_FORCES_STACK LEAVE_TREE_STATUS
6715 LeaveTree(TraceMonitor
*tm
, TracerState
& state
, VMSideExit
* lr
)
6717 VOUCH_DOES_NOT_REQUIRE_STACK();
6719 JSContext
* cx
= state
.cx
;
6721 /* Temporary waive the soft GC quota to make sure LeaveTree() doesn't fail. */
6722 Guardian
waiver(&JS_THREAD_DATA(cx
)->waiveGCQuota
);
6724 FrameInfo
** callstack
= state
.callstackBase
;
6725 double* stack
= state
.stackBase
;
6728 * Except if we find that this is a nested bailout, the guard the call
6729 * returned is the one we have to use to adjust pc and sp.
6731 VMSideExit
* innermost
= lr
;
6734 * While executing a tree we do not update state.sp and state.rp even if
6735 * they grow. Instead, guards tell us by how much sp and rp should be
6736 * incremented in case of a side exit. When calling a nested tree, however,
6737 * we actively adjust sp and rp. If we have such frames from outer trees on
6738 * the stack, then rp will have been adjusted. Before we can process the
6739 * stack of the frames of the tree we directly exited from, we have to
6740 * first work our way through the outer frames and generate interpreter
6741 * frames for them. Once the call stack (rp) is empty, we can process the
6742 * final frames (which again are not directly visible and only the guard we
6743 * exited on will tells us about).
6745 FrameInfo
** rp
= (FrameInfo
**)state
.rp
;
6746 if (lr
->exitType
== NESTED_EXIT
) {
6747 VMSideExit
* nested
= state
.lastTreeCallGuard
;
6750 * If lastTreeCallGuard is not set in state, we only have a single
6751 * level of nesting in this exit, so lr itself is the innermost and
6752 * outermost nested guard, and hence we set nested to lr. The
6753 * calldepth of the innermost guard is not added to state.rp, so we
6754 * do it here manually. For a nesting depth greater than 1 the
6755 * call tree code already added the innermost guard's calldepth
6756 * to state.rpAtLastTreeCall.
6759 rp
+= lr
->calldepth
;
6762 * During unwinding state.rp gets overwritten at every step and we
6763 * restore it here to its state at the innermost nested guard. The
6764 * builtin already added the calldepth of that innermost guard to
6767 rp
= (FrameInfo
**)state
.rpAtLastTreeCall
;
6769 innermost
= state
.lastTreeExitGuard
;
6770 if (state
.innermostNestedGuardp
)
6771 *state
.innermostNestedGuardp
= nested
;
6773 JS_ASSERT(nested
->exitType
== NESTED_EXIT
);
6774 JS_ASSERT(state
.lastTreeExitGuard
);
6775 JS_ASSERT(state
.lastTreeExitGuard
->exitType
!= NESTED_EXIT
);
6778 int32_t bs
= state
.builtinStatus
;
6779 bool bailed
= innermost
->exitType
== STATUS_EXIT
&& (bs
& BUILTIN_BAILED
);
6784 * A _FAIL native already called LeaveTree. We already reconstructed
6785 * the interpreter stack, in pre-call state, with pc pointing to the
6786 * CALL/APPLY op, for correctness. Then we continued in native code.
6788 if (!(bs
& BUILTIN_ERROR
)) {
6790 * The builtin or native deep-bailed but finished successfully
6791 * (no exception or error).
6793 * After it returned, the JIT code stored the results of the
6794 * builtin or native at the top of the native stack and then
6795 * immediately flunked the guard on state->builtinStatus.
6797 * Now LeaveTree has been called again from the tail of
6798 * ExecuteTree. We are about to return to the interpreter. Adjust
6799 * the top stack frame to resume on the next op.
6801 JSFrameRegs
* regs
= cx
->regs
;
6802 JSOp op
= (JSOp
) *regs
->pc
;
6805 * JSOP_SETELEM can be coalesced with a JSOP_POP in the interpeter.
6806 * Since this doesn't re-enter the recorder, the post-state snapshot
6807 * is invalid. Fix it up here.
6809 if (op
== JSOP_SETELEM
&& JSOp(regs
->pc
[JSOP_SETELEM_LENGTH
]) == JSOP_POP
) {
6810 regs
->sp
-= js_CodeSpec
[JSOP_SETELEM
].nuses
;
6811 regs
->sp
+= js_CodeSpec
[JSOP_SETELEM
].ndefs
;
6812 regs
->pc
+= JSOP_SETELEM_LENGTH
;
6816 const JSCodeSpec
& cs
= js_CodeSpec
[op
];
6817 regs
->sp
-= (cs
.format
& JOF_INVOKE
) ? GET_ARGC(regs
->pc
) + 2 : cs
.nuses
;
6818 regs
->sp
+= cs
.ndefs
;
6819 regs
->pc
+= cs
.length
;
6820 JS_ASSERT_IF(!cx
->fp()->hasImacropc(),
6821 cx
->fp()->slots() + cx
->fp()->numFixed() +
6822 js_ReconstructStackDepth(cx
, cx
->fp()->script(), regs
->pc
) ==
6826 * If there's a tree call around the point that we deep exited at,
6827 * then state.sp and state.rp were restored to their original
6828 * values before the tree call and sp might be less than deepBailSp,
6829 * which we sampled when we were told to deep bail.
6831 JS_ASSERT(state
.deepBailSp
>= state
.stackBase
&& state
.sp
<= state
.deepBailSp
);
6834 * As explained above, the JIT code stored a result value or values
6835 * on the native stack. Transfer them to the interpreter stack now.
6836 * (Some opcodes, like JSOP_CALLELEM, produce two values, hence the
6839 JSValueType
* typeMap
= innermost
->stackTypeMap();
6840 for (int i
= 1; i
<= cs
.ndefs
; i
++) {
6843 typeMap
[innermost
->numStackSlots
- i
],
6844 (jsdouble
*) state
.deepBailSp
6845 + innermost
->sp_adj
/ sizeof(jsdouble
) - i
);
6851 while (callstack
< rp
) {
6852 FrameInfo
* fi
= *callstack
;
6853 /* Peek at the callee native slot in the not-yet-synthesized prev frame. */
6854 JSObject
* callee
= *(JSObject
**)&stack
[fi
->callerHeight
];
6857 * Flush the slots for cx->fp() (which will become cx->fp()->prev after
6858 * SynthesizeFrame). Since a frame's arguments (including callee
6859 * and thisv) are part of the frame, we only want to flush up to the
6860 * next frame's arguments, so set cx->regs->sp to to not include said
6861 * arguments. The upcoming call to SynthesizeFrame will reset regs->sp
6862 * to its correct value.
6864 cx
->regs
->sp
= cx
->fp()->slots() + (fi
->spdist
- (2 + fi
->get_argc()));
6865 int slots
= FlushNativeStackFrame(cx
, 0 /* callDepth */, fi
->get_typemap(), stack
);
6867 /* Finish initializing cx->fp() and push a new cx->fp(). */
6868 SynthesizeFrame(cx
, *fi
, callee
);
6870 JSStackFrame
* fp
= cx
->fp();
6871 debug_only_printf(LC_TMTracer
,
6872 "synthesized deep frame for %s:%u@%u, slots=%d, fi=%p\n",
6873 fp
->script()->filename
,
6874 js_FramePCToLineNumber(cx
, fp
),
6875 FramePCOffset(cx
, fp
),
6880 * Keep track of the additional frames we put on the interpreter stack
6881 * and the native stack slots we consumed.
6883 ++*state
.inlineCallCountp
;
6889 * We already synthesized the frames around the innermost guard. Here we
6890 * just deal with additional frames inside the tree we are bailing out
6893 JS_ASSERT(rp
== callstack
);
6894 unsigned calldepth
= innermost
->calldepth
;
6895 unsigned calleeOffset
= 0;
6896 for (unsigned n
= 0; n
< calldepth
; ++n
) {
6897 /* Peek at the callee native slot in the not-yet-synthesized prev frame. */
6898 calleeOffset
+= callstack
[n
]->callerHeight
;
6899 JSObject
* callee
= *(JSObject
**)&stack
[calleeOffset
];
6901 /* Reconstruct the frame. */
6902 SynthesizeFrame(cx
, *callstack
[n
], callee
);
6903 ++*state
.inlineCallCountp
;
6905 JSStackFrame
* fp
= cx
->fp();
6906 debug_only_printf(LC_TMTracer
,
6907 "synthesized shallow frame for %s:%u@%u\n",
6908 fp
->script()->filename
, js_FramePCToLineNumber(cx
, fp
),
6909 FramePCOffset(cx
, fp
));
6914 * Adjust sp and pc relative to the tree we exited from (not the tree we
6915 * entered into). These are our final values for sp and pc since
6916 * SynthesizeFrame has already taken care of all frames in between.
6918 JSStackFrame
* const fp
= cx
->fp();
6921 * If we are not exiting from an inlined frame, the state->sp is spbase.
6922 * Otherwise spbase is whatever slots frames around us consume.
6924 cx
->regs
->pc
= innermost
->pc
;
6925 if (innermost
->imacpc
)
6926 fp
->setImacropc(innermost
->imacpc
);
6928 fp
->clearImacropc();
6931 * Set cx->regs->regs for the top frame. Since the top frame does not have a
6932 * FrameInfo (a FrameInfo is only pushed for calls), we basically need to
6933 * compute the offset from fp->slots() to the top of the stack based on the
6934 * number of native slots allocated for this function.
6936 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
6938 uintN slotOffset
= innermost
->numStackSlots
- innermost
->numStackSlotsBelowCurrentFrame
;
6939 if (fp
->isGlobalFrame()) {
6940 /* Global nfixed slots are not kept on the native stack, so add them back. */
6941 slotOffset
+= fp
->globalScript()->nfixed
;
6943 /* A frame's native slots includes args and frame ptrs, so strip them off. */
6944 slotOffset
-= NumSlotsBeforeFixed(fp
);
6946 cx
->regs
->sp
= fp
->slots() + slotOffset
;
6948 /* Assert that we computed sp correctly. */
6949 JS_ASSERT_IF(!fp
->hasImacropc(),
6950 fp
->slots() + fp
->numFixed() +
6951 js_ReconstructStackDepth(cx
, fp
->script(), cx
->regs
->pc
) == cx
->regs
->sp
);
6953 #ifdef EXECUTE_TREE_TIMER
6954 uint64 cycles
= rdtsc() - state
.startTime
;
6955 #elif defined(JS_JIT_SPEW)
6958 debug_only_printf(LC_TMTracer
,
6959 "leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%s, sp=%lld, "
6960 "calldepth=%d, cycles=%llu\n",
6961 fp
->script()->filename
,
6962 js_FramePCToLineNumber(cx
, fp
),
6963 FramePCOffset(cx
, fp
),
6964 js_CodeName
[fp
->hasImacropc() ? *fp
->imacropc() : *cx
->regs
->pc
],
6966 getExitName(lr
->exitType
),
6967 (long long int)(cx
->regs
->sp
- fp
->base()),
6969 (unsigned long long int)cycles
);
6974 FlushNativeStackFrame(cx
, innermost
->calldepth
, innermost
->stackTypeMap(), stack
);
6975 JS_ASSERT(unsigned(slots
) == innermost
->numStackSlots
);
6978 * If this trace is part of a tree, later branches might have added
6979 * additional globals for which we don't have any type information
6980 * available in the side exit. We merge in this information from the entry
6981 * type-map. See also the comment in the constructor of TraceRecorder
6982 * regarding why this is always safe to do.
6984 TreeFragment
* outermostTree
= state
.outermostTree
;
6985 uint16
* gslots
= outermostTree
->globalSlots
->data();
6986 unsigned ngslots
= outermostTree
->globalSlots
->length();
6987 JS_ASSERT(ngslots
== outermostTree
->nGlobalTypes());
6988 JSValueType
* globalTypeMap
;
6990 /* Are there enough globals? */
6991 TypeMap
& typeMap
= *tm
->cachedTempTypeMap
;
6993 if (innermost
->numGlobalSlots
== ngslots
) {
6994 /* Yes. This is the ideal fast path. */
6995 globalTypeMap
= innermost
->globalTypeMap();
6998 * No. Merge the typemap of the innermost entry and exit together. This
6999 * should always work because it is invalid for nested trees or linked
7000 * trees to have incompatible types. Thus, whenever a new global type
7001 * is lazily added into a tree, all dependent and linked trees are
7002 * immediately specialized (see bug 476653).
7004 JS_ASSERT(innermost
->root()->nGlobalTypes() == ngslots
);
7005 JS_ASSERT(innermost
->root()->nGlobalTypes() > innermost
->numGlobalSlots
);
7006 typeMap
.ensure(ngslots
);
7008 unsigned check_ngslots
=
7010 BuildGlobalTypeMapFromInnerTree(typeMap
, innermost
);
7011 JS_ASSERT(check_ngslots
== ngslots
);
7012 globalTypeMap
= typeMap
.data();
7015 /* Write back interned globals. */
7016 JS_ASSERT(state
.eos
== state
.stackBase
+ MAX_NATIVE_STACK_SLOTS
);
7017 JSObject
* globalObj
= outermostTree
->globalObj
;
7018 FlushNativeGlobalFrame(cx
, globalObj
, state
.eos
, ngslots
, gslots
, globalTypeMap
);
7021 if (innermost
->exitType
!= TIMEOUT_EXIT
)
7022 AUDIT(sideExitIntoInterpreter
);
7024 AUDIT(timeoutIntoInterpreter
);
7027 state
.innermost
= innermost
;
7028 return NO_DEEP_BAIL
;
7032 GetLoopBottom(JSContext
*cx
, jsbytecode
*pc
)
7034 JS_ASSERT(*pc
== JSOP_TRACE
|| *pc
== JSOP_NOTRACE
);
7035 JSScript
*script
= cx
->fp()->script();
7036 jssrcnote
*sn
= js_GetSrcNote(script
, pc
);
7039 return pc
+ js_GetSrcNoteOffset(sn
, 0);
7042 JS_ALWAYS_INLINE
void
7043 TraceRecorder::assertInsideLoop()
7046 /* Asserts at callDepth == 0 will catch problems at the call op. */
7050 jsbytecode
*pc
= cx
->regs
->fp
->hasImacropc() ? cx
->regs
->fp
->imacropc() : cx
->regs
->pc
;
7051 jsbytecode
*beg
= (jsbytecode
*)tree
->ip
;
7052 jsbytecode
*end
= GetLoopBottom(cx
, beg
);
7055 * In some cases (continue in a while loop), we jump to the goto
7056 * immediately preceeding a loop (the one that jumps to the loop
7059 JS_ASSERT(pc
>= beg
- JSOP_GOTO_LENGTH
&& pc
<= end
);
7063 JS_REQUIRES_STACK MonitorResult
7064 RecordLoopEdge(JSContext
* cx
, uintN
& inlineCallCount
)
7067 TraceVisStateObj
tvso(cx
, S_MONITOR
);
7070 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
7072 JS_ASSERT(!tm
->profile
);
7074 /* Is the recorder currently active? */
7076 tm
->recorder
->assertInsideLoop();
7077 jsbytecode
* pc
= cx
->regs
->pc
;
7078 if (pc
== tm
->recorder
->tree
->ip
) {
7079 tm
->recorder
->closeLoop();
7081 MonitorResult r
= TraceRecorder::recordLoopEdge(cx
, tm
->recorder
, inlineCallCount
);
7082 JS_ASSERT((r
== MONITOR_RECORDING
) == (TRACE_RECORDER(cx
) != NULL
));
7083 if (r
== MONITOR_RECORDING
|| r
== MONITOR_ERROR
)
7087 * recordLoopEdge will invoke an inner tree if we have a matching
7088 * one. If we arrive here, that tree didn't run to completion and
7089 * instead we mis-matched or the inner tree took a side exit other than
7090 * the loop exit. We are thus no longer guaranteed to be parked on the
7091 * same loop header RecordLoopEdge was called for. In fact, this
7092 * might not even be a loop header at all. Hence if the program counter
7093 * no longer hovers over the inner loop header, return to the
7094 * interpreter and do not attempt to trigger or record a new tree at
7097 if (pc
!= cx
->regs
->pc
) {
7099 tvso
.r
= R_INNER_SIDE_EXIT
;
7101 return MONITOR_NOT_RECORDING
;
7105 JS_ASSERT(!tm
->recorder
);
7108 * Make sure the shape of the global object still matches (this might flush
7111 JSObject
* globalObj
= cx
->fp()->scopeChain().getGlobal();
7112 uint32 globalShape
= -1;
7113 SlotList
* globalSlots
= NULL
;
7115 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
, &globalShape
, &globalSlots
)) {
7116 Backoff(cx
, cx
->regs
->pc
);
7117 return MONITOR_NOT_RECORDING
;
7120 /* Do not enter the JIT code with a pending operation callback. */
7121 if (JS_THREAD_DATA(cx
)->interruptFlags
) {
7123 tvso
.r
= R_CALLBACK_PENDING
;
7125 return MONITOR_NOT_RECORDING
;
7128 jsbytecode
* pc
= cx
->regs
->pc
;
7129 uint32 argc
= entryFrameArgc(cx
);
7131 TreeFragment
* f
= LookupOrAddLoop(tm
, pc
, globalObj
, globalShape
, argc
);
7134 * If we have no code in the anchor and no peers, we definitively won't be
7135 * able to activate any trees, so start compiling.
7137 if (!f
->code() && !f
->peer
) {
7139 if (++f
->hits() < HOTLOOP
) {
7141 tvso
.r
= f
->hits() < 1 ? R_BACKED_OFF
: R_COLD
;
7143 return MONITOR_NOT_RECORDING
;
7146 if (!ScopeChainCheck(cx
, f
)) {
7148 tvso
.r
= R_FAIL_SCOPE_CHAIN_CHECK
;
7150 return MONITOR_NOT_RECORDING
;
7154 * We can give RecordTree the root peer. If that peer is already taken,
7155 * it will walk the peer list and find us a free slot or allocate a new
7158 bool rv
= RecordTree(cx
, f
->first
, NULL
, NULL
, 0, globalSlots
);
7161 tvso
.r
= R_FAIL_RECORD_TREE
;
7163 return RecordingIfTrue(rv
);
7166 debug_only_printf(LC_TMTracer
,
7167 "Looking for compat peer %d@%d, from %p (ip: %p)\n",
7168 js_FramePCToLineNumber(cx
, cx
->fp()),
7169 FramePCOffset(cx
, cx
->fp()), (void*)f
, f
->ip
);
7172 TreeFragment
* match
= FindVMCompatiblePeer(cx
, globalObj
, f
, count
);
7174 if (count
< MAXPEERS
)
7178 * If we hit the max peers ceiling, don't try to lookup fragments all
7179 * the time. That's expensive. This must be a rather type-unstable loop.
7181 debug_only_print0(LC_TMTracer
, "Blacklisted: too many peer trees.\n");
7182 Blacklist((jsbytecode
*) f
->root
->ip
);
7184 tvso
.r
= R_MAX_PEERS
;
7186 return MONITOR_NOT_RECORDING
;
7189 VMSideExit
* lr
= NULL
;
7190 VMSideExit
* innermostNestedGuard
= NULL
;
7192 if (!ExecuteTree(cx
, match
, inlineCallCount
, &innermostNestedGuard
, &lr
))
7193 return MONITOR_ERROR
;
7197 tvso
.r
= R_FAIL_EXECUTE_TREE
;
7199 return MONITOR_NOT_RECORDING
;
7203 * If we exit on a branch, or on a tree call guard, try to grow the inner
7204 * tree (in case of a branch exit), or the tree nested around the tree we
7205 * exited from (in case of the tree call guard).
7208 switch (lr
->exitType
) {
7209 case UNSTABLE_LOOP_EXIT
:
7210 rv
= AttemptToStabilizeTree(cx
, globalObj
, lr
, NULL
, NULL
, 0);
7213 tvso
.r
= R_FAIL_STABILIZE
;
7215 return RecordingIfTrue(rv
);
7219 if (lr
->exitType
== MUL_ZERO_EXIT
)
7220 tm
->oracle
->markInstructionSlowZeroTest(cx
->regs
->pc
);
7222 tm
->oracle
->markInstructionUndemotable(cx
->regs
->pc
);
7226 rv
= AttemptToExtendTree(cx
, lr
, NULL
, NULL
, NULL
7231 return RecordingIfTrue(rv
);
7234 if (innermostNestedGuard
) {
7235 rv
= AttemptToExtendTree(cx
, innermostNestedGuard
, lr
, NULL
, NULL
7240 return RecordingIfTrue(rv
);
7243 tvso
.r
= R_NO_EXTEND_OUTER
;
7245 return MONITOR_NOT_RECORDING
;
7249 tvso
.r
= R_MISMATCH_EXIT
;
7250 return MONITOR_NOT_RECORDING
;
7252 tvso
.r
= R_OOM_EXIT
;
7253 return MONITOR_NOT_RECORDING
;
7255 tvso
.r
= R_TIMEOUT_EXIT
;
7256 return MONITOR_NOT_RECORDING
;
7257 case DEEP_BAIL_EXIT
:
7258 tvso
.r
= R_DEEP_BAIL_EXIT
;
7259 return MONITOR_NOT_RECORDING
;
7261 tvso
.r
= R_STATUS_EXIT
;
7262 return MONITOR_NOT_RECORDING
;
7267 * No, this was an unusual exit (i.e. out of memory/GC), so just resume
7271 tvso
.r
= R_OTHER_EXIT
;
7273 return MONITOR_NOT_RECORDING
;
7277 JS_REQUIRES_STACK AbortableRecordingStatus
7278 TraceRecorder::monitorRecording(JSOp op
)
7280 JS_ASSERT(!addPropShapeBefore
);
7282 TraceMonitor
&localtm
= JS_TRACE_MONITOR(cx
);
7283 debug_only_stmt( JSContext
*localcx
= cx
; )
7285 JS_ASSERT(!localtm
.profile
);
7287 /* Process needFlush requests now. */
7288 if (localtm
.needFlush
) {
7289 ResetJIT(cx
, FR_DEEP_BAIL
);
7290 return ARECORD_ABORTED
;
7292 JS_ASSERT(!fragment
->lastIns
);
7295 * Clear one-shot state used to communicate between record_JSOP_CALL and post-
7296 * opcode-case-guts record hook (record_NativeCallComplete).
7298 pendingSpecializedNative
= NULL
;
7300 pendingGlobalSlotsToSet
.clear();
7302 /* Handle one-shot request from finishGetProp or INSTANCEOF to snapshot post-op state and guard. */
7303 if (pendingGuardCondition
) {
7304 LIns
* cond
= pendingGuardCondition
;
7305 bool expected
= true;
7306 /* Put 'cond' in a form suitable for a guard/branch condition if it's not already. */
7307 ensureCond(&cond
, &expected
);
7308 guard(expected
, cond
, STATUS_EXIT
);
7309 pendingGuardCondition
= NULL
;
7312 /* Handle one-shot request to unbox the result of a property get or ObjectToIterator. */
7313 if (pendingUnboxSlot
) {
7314 LIns
* val_ins
= get(pendingUnboxSlot
);
7316 * We need to know from where to unbox the value. Since pendingUnboxSlot
7317 * is only set in finishGetProp, we can depend on LIns* tracked for
7318 * pendingUnboxSlot to have this information.
7320 LIns
* unboxed_ins
= unbox_value(*pendingUnboxSlot
,
7321 AnyAddress(val_ins
->oprnd1(), val_ins
->disp()),
7322 snapshot(BRANCH_EXIT
));
7323 set(pendingUnboxSlot
, unboxed_ins
);
7324 pendingUnboxSlot
= 0;
7328 if (LogController
.lcbits
& LC_TMRecorder
) {
7329 debug_only_print0(LC_TMRecorder
, "\n");
7330 js_Disassemble1(cx
, cx
->fp()->script(), cx
->regs
->pc
,
7331 cx
->fp()->hasImacropc()
7332 ? 0 : cx
->regs
->pc
- cx
->fp()->script()->code
,
7333 !cx
->fp()->hasImacropc(), stdout
);
7338 * If op is not a break or a return from a loop, continue recording and
7339 * follow the trace. We check for imacro-calling bytecodes inside each
7340 * switch case to resolve the if (JSOP_IS_IMACOP(x)) conditions at compile
7344 AbortableRecordingStatus status
;
7346 bool wasInImacro
= (cx
->fp()->hasImacropc());
7350 AbortRecording(cx
, "unsupported opcode");
7351 status
= ARECORD_ERROR
;
7353 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
7356 status = this->record_##op(); \
7358 # include "jsopcode.tbl"
7362 /* N.B. |this| may have been deleted. */
7364 if (!JSOP_IS_IMACOP(op
)) {
7365 JS_ASSERT(status
!= ARECORD_IMACRO
);
7366 JS_ASSERT_IF(!wasInImacro
, !localcx
->fp()->hasImacropc());
7369 if (localtm
.recorder
) {
7370 JS_ASSERT(status
!= ARECORD_ABORTED
);
7371 JS_ASSERT(localtm
.recorder
== this);
7373 /* |this| recorder completed, but a new one started; keep recording. */
7374 if (status
== ARECORD_COMPLETED
)
7375 return ARECORD_CONTINUE
;
7377 /* Handle lazy aborts; propagate the 'error' status. */
7378 if (StatusAbortsRecorderIfActive(status
)) {
7379 AbortRecording(cx
, js_CodeName
[op
]);
7380 return status
== ARECORD_ERROR
? ARECORD_ERROR
: ARECORD_ABORTED
;
7383 if (outOfMemory() || OverfullJITCache(cx
, &localtm
)) {
7384 ResetJIT(cx
, FR_OOM
);
7387 * If the status returned was ARECORD_IMACRO, then we just
7388 * changed cx->regs, we need to tell the interpreter to sync
7389 * its local variables.
7391 return status
== ARECORD_IMACRO
? ARECORD_IMACRO_ABORTED
: ARECORD_ABORTED
;
7394 JS_ASSERT(status
== ARECORD_COMPLETED
||
7395 status
== ARECORD_ABORTED
||
7396 status
== ARECORD_ERROR
);
7401 JS_REQUIRES_STACK
TraceRecorder::AbortResult
7402 AbortRecording(JSContext
* cx
, const char* reason
)
7405 JS_ASSERT(TRACE_RECORDER(cx
));
7406 return TRACE_RECORDER(cx
)->finishAbort(reason
);
7408 return TRACE_RECORDER(cx
)->finishAbort("[no reason]");
7412 #if defined NANOJIT_IA32
7416 char *c
= getenv("X86_FORCE_SSE2");
7418 return (!strcmp(c
, "true") ||
7423 #if defined _MSC_VER
7432 #elif defined __GNUC__
7433 asm("xchg %%esi, %%ebx\n" /* we can't clobber ebx on gcc (PIC register) */
7434 "mov $0x01, %%eax\n"
7437 "xchg %%esi, %%ebx\n"
7439 : /* We have no inputs */
7440 : "%eax", "%esi", "%ecx", "%edx"
7442 #elif defined __SUNPRO_C || defined __SUNPRO_CC
7444 "mov $0x01, %%eax\n"
7448 : /* We have no inputs */
7452 return (features
& (1<<26)) != 0;
7456 #if defined(NANOJIT_ARM)
7458 #if defined(_MSC_VER) && defined(WINCE)
7460 // these come in from jswince.asm
7461 extern "C" int js_arm_try_armv5_op();
7462 extern "C" int js_arm_try_armv6_op();
7463 extern "C" int js_arm_try_armv7_op();
7464 extern "C" int js_arm_try_vfp_op();
7469 unsigned int arch
= 4;
7471 js_arm_try_armv5_op();
7473 js_arm_try_armv6_op();
7475 js_arm_try_armv7_op();
7477 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7485 #ifdef WINCE_WINDOWS_MOBILE
7490 js_arm_try_vfp_op();
7492 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7499 #define HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS 1
7501 /* See "Suppressing Exception Notifications while Debugging", at
7502 * http://msdn.microsoft.com/en-us/library/ms924252.aspx
7505 disable_debugger_exceptions()
7507 // 2 == TLSSLOT_KERNEL
7508 DWORD kctrl
= (DWORD
) TlsGetValue(2);
7509 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7511 TlsSetValue(2, (LPVOID
) kctrl
);
7515 enable_debugger_exceptions()
7517 // 2 == TLSSLOT_KERNEL
7518 DWORD kctrl
= (DWORD
) TlsGetValue(2);
7519 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7521 TlsSetValue(2, (LPVOID
) kctrl
);
7524 #elif defined(__GNUC__) && defined(AVMPLUS_LINUX)
7526 // Assume ARMv4 by default.
7527 static unsigned int arm_arch
= 4;
7528 static bool arm_has_vfp
= false;
7529 static bool arm_has_neon
= false;
7530 static bool arm_has_iwmmxt
= false;
7531 static bool arm_tests_initialized
= false;
7534 // we're actually reading /proc/cpuinfo, but oh well
7540 const char* ver_token
= "CPU architecture: ";
7541 FILE* f
= fopen("/proc/cpuinfo", "r");
7542 fread(buf
, sizeof(char), 1024, f
);
7544 pos
= strstr(buf
, ver_token
);
7546 int ver
= *(pos
+ strlen(ver_token
)) - '0';
7549 arm_has_neon
= strstr(buf
, "neon") != NULL
;
7550 arm_has_vfp
= strstr(buf
, "vfp") != NULL
;
7551 arm_has_iwmmxt
= strstr(buf
, "iwmmxt") != NULL
;
7552 arm_tests_initialized
= true;
7563 fd
= open("/proc/self/auxv", O_RDONLY
);
7565 while (read(fd
, &aux
, sizeof(Elf32_auxv_t
))) {
7566 if (aux
.a_type
== AT_HWCAP
) {
7567 uint32_t hwcap
= aux
.a_un
.a_val
;
7568 if (getenv("ARM_FORCE_HWCAP"))
7569 hwcap
= strtoul(getenv("ARM_FORCE_HWCAP"), NULL
, 0);
7570 else if (getenv("_SBOX_DIR"))
7571 continue; // Ignore the rest, if we're running in scratchbox
7572 // hardcode these values to avoid depending on specific versions
7573 // of the hwcap header, e.g. HWCAP_NEON
7574 arm_has_vfp
= (hwcap
& 64) != 0;
7575 arm_has_iwmmxt
= (hwcap
& 512) != 0;
7576 // this flag is only present on kernel 2.6.29
7577 arm_has_neon
= (hwcap
& 4096) != 0;
7578 } else if (aux
.a_type
== AT_PLATFORM
) {
7579 const char *plat
= (const char*) aux
.a_un
.a_val
;
7580 if (getenv("ARM_FORCE_PLATFORM"))
7581 plat
= getenv("ARM_FORCE_PLATFORM");
7582 else if (getenv("_SBOX_DIR"))
7583 continue; // Ignore the rest, if we're running in scratchbox
7584 // The platform string has the form "v[0-9][lb]". The "l" or "b" indicate little-
7585 // or big-endian variants and the digit indicates the version of the platform.
7586 // We can only accept ARMv4 and above, but allow anything up to ARMv9 for future
7587 // processors. Architectures newer than ARMv7 are assumed to be
7588 // backwards-compatible with ARMv7.
7589 if ((plat
[0] == 'v') &&
7590 (plat
[1] >= '4') && (plat
[1] <= '9') &&
7591 ((plat
[2] == 'l') || (plat
[2] == 'b')))
7593 arm_arch
= plat
[1] - '0';
7599 // if we don't have 2.6.29, we have to do this hack; set
7600 // the env var to trust HWCAP.
7601 if (!getenv("ARM_TRUST_HWCAP") && (arm_arch
>= 7))
7602 arm_has_neon
= true;
7605 arm_tests_initialized
= true;
7613 if (!arm_tests_initialized
)
7622 if (!arm_tests_initialized
)
7629 #warning Not sure how to check for architecture variant on your platform. Assuming ARMv4.
7631 arm_check_arch() { return 4; }
7633 arm_check_vfp() { return false; }
7636 #ifndef HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS
7638 enable_debugger_exceptions() { }
7640 disable_debugger_exceptions() { }
7643 #endif /* NANOJIT_ARM */
7650 SetMaxCodeCacheBytes(JSContext
* cx
, uint32 bytes
)
7656 JS_THREAD_DATA(cx
)->maxCodeCacheBytes
= bytes
;
7660 InitJIT(TraceMonitor
*tm
)
7662 // InitJIT expects this area to be zero'd
7663 memset(tm
, 0, sizeof(*tm
));
7665 #if defined JS_JIT_SPEW
7666 tm
->profAlloc
= NULL
;
7667 /* Set up debug logging. */
7668 if (!did_we_set_up_debug_logging
) {
7669 InitJITLogController();
7670 did_we_set_up_debug_logging
= true;
7672 /* Set up fragprofiling, if required. */
7673 if (LogController
.lcbits
& LC_FragProfile
) {
7674 tm
->profAlloc
= js_new
<VMAllocator
>((char*)NULL
, 0); /* no reserve needed in debug builds */
7675 JS_ASSERT(tm
->profAlloc
);
7676 tm
->profTab
= new (*tm
->profAlloc
) FragStatsMap(*tm
->profAlloc
);
7680 PodZero(&LogController
);
7683 if (!did_we_check_processor_features
) {
7684 #if defined NANOJIT_IA32
7685 avmplus::AvmCore::config
.i386_use_cmov
=
7686 avmplus::AvmCore::config
.i386_sse2
= CheckForSSE2();
7687 avmplus::AvmCore::config
.i386_fixed_esp
= true;
7689 #if defined NANOJIT_ARM
7691 disable_debugger_exceptions();
7693 bool arm_vfp
= arm_check_vfp();
7694 unsigned int arm_arch
= arm_check_arch();
7696 enable_debugger_exceptions();
7698 avmplus::AvmCore::config
.arm_vfp
= arm_vfp
;
7699 avmplus::AvmCore::config
.soft_float
= !arm_vfp
;
7700 avmplus::AvmCore::config
.arm_arch
= arm_arch
;
7702 // Sanity-check the configuration detection.
7703 // * We don't understand architectures prior to ARMv4.
7704 JS_ASSERT(arm_arch
>= 4);
7706 did_we_check_processor_features
= true;
7709 #define CHECK_ALLOC(lhs, rhs) \
7710 do { lhs = (rhs); if (!lhs) return false; } while (0)
7712 CHECK_ALLOC(tm
->oracle
, js_new
<Oracle
>());
7716 CHECK_ALLOC(tm
->recordAttempts
, js_new
<RecordAttemptMap
>());
7717 if (!tm
->recordAttempts
->init(PC_HASH_COUNT
))
7720 CHECK_ALLOC(tm
->loopProfiles
, js_new
<LoopProfileMap
>());
7721 if (!tm
->loopProfiles
->init(PC_HASH_COUNT
))
7726 char *dataReserve
, *traceReserve
, *tempReserve
;
7727 CHECK_ALLOC(dataReserve
, (char *)js_malloc(DataReserveSize
));
7728 CHECK_ALLOC(traceReserve
, (char *)js_malloc(TraceReserveSize
));
7729 CHECK_ALLOC(tempReserve
, (char *)js_malloc(TempReserveSize
));
7730 CHECK_ALLOC(tm
->dataAlloc
, js_new
<VMAllocator
>(dataReserve
, DataReserveSize
));
7731 CHECK_ALLOC(tm
->traceAlloc
, js_new
<VMAllocator
>(traceReserve
, TraceReserveSize
));
7732 CHECK_ALLOC(tm
->tempAlloc
, js_new
<VMAllocator
>(tempReserve
, TempReserveSize
));
7733 CHECK_ALLOC(tm
->codeAlloc
, js_new
<CodeAlloc
>());
7734 CHECK_ALLOC(tm
->frameCache
, js_new
<FrameInfoCache
>(tm
->dataAlloc
));
7735 CHECK_ALLOC(tm
->storage
, js_new
<TraceNativeStorage
>());
7736 CHECK_ALLOC(tm
->cachedTempTypeMap
, js_new
<TypeMap
>((Allocator
*)NULL
));
7738 verbose_only( tm
->branches
= NULL
; )
7741 debug_only(PodZero(&jitstats
));
7745 /* Architecture properties used by test cases. */
7746 jitstats
.archIsIA32
= 0;
7747 jitstats
.archIs64BIT
= 0;
7748 jitstats
.archIsARM
= 0;
7749 jitstats
.archIsSPARC
= 0;
7750 jitstats
.archIsPPC
= 0;
7751 #if defined NANOJIT_IA32
7752 jitstats
.archIsIA32
= 1;
7754 #if defined NANOJIT_64BIT
7755 jitstats
.archIs64BIT
= 1;
7757 #if defined NANOJIT_ARM
7758 jitstats
.archIsARM
= 1;
7760 #if defined NANOJIT_SPARC
7761 jitstats
.archIsSPARC
= 1;
7763 #if defined NANOJIT_PPC
7764 jitstats
.archIsPPC
= 1;
7766 #if defined NANOJIT_X64
7767 jitstats
.archIsAMD64
= 1;
7771 if (!tm
->tracedScripts
.init())
7777 FinishJIT(TraceMonitor
*tm
)
7779 JS_ASSERT(!tm
->recorder
);
7780 JS_ASSERT(!tm
->profile
);
7783 if (jitstats
.recorderStarted
) {
7785 debug_only_print0(LC_TMStats
, "recorder");
7786 #define RECORDER_JITSTAT(_ident, _name) \
7787 debug_only_printf(LC_TMStats, "%c " _name "(%llu)", sep, \
7788 (unsigned long long int)jitstats._ident); \
7790 #define JITSTAT(x) /* nothing */
7791 #include "jitstats.tbl"
7793 #undef RECORDER_JITSTAT
7794 debug_only_print0(LC_TMStats
, "\n");
7797 debug_only_print0(LC_TMStats
, "monitor");
7798 #define MONITOR_JITSTAT(_ident, _name) \
7799 debug_only_printf(LC_TMStats, "%c " _name "(%llu)", sep, \
7800 (unsigned long long int)jitstats._ident); \
7802 #define JITSTAT(x) /* nothing */
7803 #include "jitstats.tbl"
7805 #undef MONITOR_JITSTAT
7806 debug_only_print0(LC_TMStats
, "\n");
7810 js_delete(tm
->recordAttempts
);
7811 js_delete(tm
->loopProfiles
);
7812 js_delete(tm
->oracle
);
7815 // Recover profiling data from expiring Fragments, and display
7817 if (LogController
.lcbits
& LC_FragProfile
) {
7818 for (Seq
<Fragment
*>* f
= tm
->branches
; f
; f
= f
->tail
) {
7819 FragProfiling_FragFinalizer(f
->head
, tm
);
7821 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
7822 for (TreeFragment
*f
= tm
->vmfragments
[i
]; f
; f
= f
->next
) {
7823 JS_ASSERT(f
->root
== f
);
7824 for (TreeFragment
*p
= f
; p
; p
= p
->peer
)
7825 FragProfiling_FragFinalizer(p
, tm
);
7828 FragProfiling_showResults(tm
);
7829 js_delete(tm
->profAlloc
);
7832 NanoAssert(!tm
->profTab
);
7833 NanoAssert(!tm
->profAlloc
);
7837 PodArrayZero(tm
->vmfragments
);
7839 js_delete(tm
->frameCache
);
7840 tm
->frameCache
= NULL
;
7842 js_delete(tm
->codeAlloc
);
7843 tm
->codeAlloc
= NULL
;
7845 js_delete(tm
->dataAlloc
);
7846 tm
->dataAlloc
= NULL
;
7848 js_delete(tm
->traceAlloc
);
7849 tm
->traceAlloc
= NULL
;
7851 js_delete(tm
->tempAlloc
);
7852 tm
->tempAlloc
= NULL
;
7854 js_delete(tm
->storage
);
7857 js_delete(tm
->cachedTempTypeMap
);
7858 tm
->cachedTempTypeMap
= NULL
;
7861 JS_REQUIRES_STACK
void
7862 PurgeScriptFragments(TraceMonitor
* tm
, JSScript
* script
)
7864 debug_only_printf(LC_TMTracer
,
7865 "Purging fragments for JSScript %p.\n", (void*)script
);
7867 /* A recorder script is being evaluated and can not be destroyed or GC-ed. */
7868 JS_ASSERT_IF(tm
->recorder
,
7869 JS_UPTRDIFF(tm
->recorder
->getTree()->ip
, script
->code
) >= script
->length
);
7871 for (LoopProfileMap::Enum
e(*tm
->loopProfiles
); !e
.empty(); e
.popFront()) {
7872 if (JS_UPTRDIFF(e
.front().key
, script
->code
) < script
->length
)
7876 TracedScriptSet::Ptr found
= tm
->tracedScripts
.lookup(script
);
7879 tm
->tracedScripts
.remove(found
);
7881 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
7882 TreeFragment
** fragp
= &tm
->vmfragments
[i
];
7883 while (TreeFragment
* frag
= *fragp
) {
7884 if (JS_UPTRDIFF(frag
->ip
, script
->code
) < script
->length
) {
7885 /* This fragment is associated with the script. */
7886 debug_only_printf(LC_TMTracer
,
7887 "Disconnecting TreeFragment %p "
7888 "with ip %p, in range [%p,%p).\n",
7889 (void*)frag
, frag
->ip
, script
->code
,
7890 script
->code
+ script
->length
);
7892 JS_ASSERT(frag
->root
== frag
);
7893 *fragp
= frag
->next
;
7895 verbose_only( FragProfiling_FragFinalizer(frag
, tm
); )
7897 } while ((frag
= frag
->peer
) != NULL
);
7900 fragp
= &frag
->next
;
7904 RecordAttemptMap
&table
= *tm
->recordAttempts
;
7905 for (RecordAttemptMap::Enum
e(table
); !e
.empty(); e
.popFront()) {
7906 if (JS_UPTRDIFF(e
.front().key
, script
->code
) < script
->length
)
7912 OverfullJITCache(JSContext
*cx
, TraceMonitor
* tm
)
7915 * You might imagine the outOfMemory flag on the allocator is sufficient
7916 * to model the notion of "running out of memory", but there are actually
7917 * two separate issues involved:
7919 * 1. The process truly running out of memory: malloc() or mmap()
7922 * 2. The limit we put on the "intended size" of the tracemonkey code
7923 * cache, in pages, has been exceeded.
7925 * Condition 1 doesn't happen very often, but we're obliged to try to
7926 * safely shut down and signal the rest of spidermonkey when it
7927 * does. Condition 2 happens quite regularly.
7929 * Presently, the code in this file doesn't check the outOfMemory condition
7930 * often enough, and frequently misuses the unchecked results of
7931 * lirbuffer insertions on the assumption that it will notice the
7932 * outOfMemory flag "soon enough" when it returns to the monitorRecording
7933 * function. This turns out to be a false assumption if we use outOfMemory
7934 * to signal condition 2: we regularly provoke "passing our intended
7935 * size" and regularly fail to notice it in time to prevent writing
7936 * over the end of an artificially self-limited LIR buffer.
7938 * To mitigate, though not completely solve, this problem, we're
7939 * modeling the two forms of memory exhaustion *separately* for the
7940 * time being: condition 1 is handled by the outOfMemory flag inside
7941 * nanojit, and condition 2 is being handled independently *here*. So
7942 * we construct our allocators to use all available memory they like,
7943 * and only report outOfMemory to us when there is literally no OS memory
7944 * left. Merely purging our cache when we hit our highwater mark is
7945 * handled by the (few) callers of this function.
7948 jsuint maxsz
= JS_THREAD_DATA(cx
)->maxCodeCacheBytes
;
7949 return (tm
->codeAlloc
->size() + tm
->dataAlloc
->size() + tm
->traceAlloc
->size() > maxsz
);
7952 JS_FORCES_STACK
JS_FRIEND_API(void)
7953 DeepBail(JSContext
*cx
)
7955 JS_ASSERT(JS_ON_TRACE(cx
));
7958 * Exactly one context on the current thread is on trace. Find out which
7959 * one. (Most callers cannot guarantee that it's cx.)
7961 TraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
7963 /* It's a bug if a non-FAIL_STATUS builtin gets here. */
7964 JS_ASSERT(tm
->bailExit
);
7967 debug_only_print0(LC_TMTracer
, "Deep bail.\n");
7968 LeaveTree(tm
, *tm
->tracerState
, tm
->bailExit
);
7969 tm
->bailExit
= NULL
;
7971 TracerState
* state
= tm
->tracerState
;
7972 state
->builtinStatus
|= BUILTIN_BAILED
;
7975 * Between now and the LeaveTree in ExecuteTree, |tm->storage| may be reused
7976 * if another trace executes before the currently executing native returns.
7977 * However, all such traces will complete by the time the currently
7978 * executing native returns and the return value is written to the native
7979 * stack. After that point, no traces may execute until the LeaveTree in
7980 * ExecuteTree, hence the invariant is maintained that only one trace uses
7981 * |tm->storage| at a time.
7983 state
->deepBailSp
= state
->sp
;
7986 JS_REQUIRES_STACK Value
&
7987 TraceRecorder::argval(unsigned n
) const
7989 JS_ASSERT(n
< cx
->fp()->numFormalArgs());
7990 return cx
->fp()->formalArg(n
);
7993 JS_REQUIRES_STACK Value
&
7994 TraceRecorder::varval(unsigned n
) const
7996 JS_ASSERT(n
< cx
->fp()->numSlots());
7997 return cx
->fp()->slots()[n
];
8000 JS_REQUIRES_STACK Value
&
8001 TraceRecorder::stackval(int n
) const
8003 return cx
->regs
->sp
[n
];
8006 JS_REQUIRES_STACK
void
8007 TraceRecorder::updateAtoms()
8009 JSScript
*script
= cx
->fp()->script();
8010 atoms
= FrameAtomBase(cx
, cx
->fp());
8011 consts
= (cx
->fp()->hasImacropc() || !JSScript::isValidOffset(script
->constOffset
))
8013 : script
->consts()->vector
;
8014 strictModeCode_ins
= w
.name(w
.immi(script
->strictModeCode
), "strict");
8017 JS_REQUIRES_STACK
void
8018 TraceRecorder::updateAtoms(JSScript
*script
)
8020 atoms
= script
->atomMap
.vector
;
8021 consts
= JSScript::isValidOffset(script
->constOffset
) ? script
->consts()->vector
: 0;
8022 strictModeCode_ins
= w
.name(w
.immi(script
->strictModeCode
), "strict");
8026 * Generate LIR to compute the scope chain.
8028 JS_REQUIRES_STACK LIns
*
8029 TraceRecorder::scopeChain()
8031 return cx
->fp()->isFunctionFrame()
8032 ? getFrameObjPtr(cx
->fp()->addressOfScopeChain())
8033 : entryScopeChain();
8037 * Generate LIR to compute the scope chain on entry to the trace. This is
8038 * generally useful only for getting to the global object, because only
8039 * the global object is guaranteed to be present.
8041 JS_REQUIRES_STACK LIns
*
8042 TraceRecorder::entryScopeChain() const
8044 return w
.ldpStackFrameScopeChain(entryFrameIns());
8048 * Generate LIR to compute the stack frame on entry to the trace.
8050 JS_REQUIRES_STACK LIns
*
8051 TraceRecorder::entryFrameIns() const
8053 return w
.ldpFrameFp(w
.ldpContextField(regs
));
8057 * Return the frame of a call object if that frame is part of the current
8058 * trace. |depthp| is an optional outparam: if it is non-null, it will be
8059 * filled in with the depth of the call object's frame relevant to cx->fp().
8061 JS_REQUIRES_STACK JSStackFrame
*
8062 TraceRecorder::frameIfInRange(JSObject
* obj
, unsigned* depthp
) const
8064 JSStackFrame
* ofp
= (JSStackFrame
*) obj
->getPrivate();
8065 JSStackFrame
* fp
= cx
->fp();
8066 for (unsigned depth
= 0; depth
<= callDepth
; ++depth
) {
8072 if (!(fp
= fp
->prev()))
8078 JS_DEFINE_CALLINFO_4(extern, UINT32
, GetClosureVar
, CONTEXT
, OBJECT
, CVIPTR
, DOUBLEPTR
,
8079 0, ACCSET_STORE_ANY
)
8080 JS_DEFINE_CALLINFO_4(extern, UINT32
, GetClosureArg
, CONTEXT
, OBJECT
, CVIPTR
, DOUBLEPTR
,
8081 0, ACCSET_STORE_ANY
)
8084 * Search the scope chain for a property lookup operation at the current PC and
8085 * generate LIR to access the given property. Return RECORD_CONTINUE on success,
8086 * otherwise abort and return RECORD_STOP. There are 3 outparams:
8088 * vp the address of the current property value
8089 * ins LIR instruction representing the property value on trace
8090 * NameResult describes how to look up name; see comment for NameResult in jstracer.h
8092 JS_REQUIRES_STACK AbortableRecordingStatus
8093 TraceRecorder::scopeChainProp(JSObject
* chainHead
, Value
*& vp
, LIns
*& ins
, NameResult
& nr
)
8095 JS_ASSERT(chainHead
== &cx
->fp()->scopeChain());
8096 JS_ASSERT(chainHead
!= globalObj
);
8098 TraceMonitor
&localtm
= *traceMonitor
;
8100 JSAtom
* atom
= atoms
[GET_INDEX(cx
->regs
->pc
)];
8103 JSObject
*obj
= chainHead
;
8104 if (!js_FindProperty(cx
, ATOM_TO_JSID(atom
), &obj
, &obj2
, &prop
))
8105 RETURN_ERROR_A("error in js_FindProperty");
8107 /* js_FindProperty can reenter the interpreter and kill |this|. */
8108 if (!localtm
.recorder
)
8109 return ARECORD_ABORTED
;
8112 RETURN_STOP_A("failed to find name in non-global scope chain");
8114 if (obj
== globalObj
) {
8115 // Even if the property is on the global object, we must guard against
8116 // the creation of properties that shadow the property in the middle
8117 // of the scope chain.
8119 if (cx
->fp()->isFunctionFrame()) {
8120 // Skip any Call object when inside a function. Any reference to a
8121 // Call name the compiler resolves statically and we do not need
8122 // to match shapes of the Call objects.
8123 chainHead
= cx
->fp()->callee().getParent();
8124 head_ins
= w
.ldpObjParent(get(&cx
->fp()->calleeValue()));
8126 head_ins
= scopeChain();
8129 CHECK_STATUS_A(traverseScopeChain(chainHead
, head_ins
, obj
, obj_ins
));
8132 RETURN_STOP_A("prototype property");
8134 Shape
* shape
= (Shape
*) prop
;
8135 if (!isValidSlot(obj
, shape
))
8136 return ARECORD_STOP
;
8137 if (!lazilyImportGlobalSlot(shape
->slot
))
8138 RETURN_STOP_A("lazy import of global slot failed");
8139 vp
= &obj
->getSlotRef(shape
->slot
);
8142 return ARECORD_CONTINUE
;
8145 if (obj
== obj2
&& obj
->isCall()) {
8146 AbortableRecordingStatus status
=
8147 InjectStatus(callProp(obj
, prop
, ATOM_TO_JSID(atom
), vp
, ins
, nr
));
8151 RETURN_STOP_A("fp->scopeChain is not global or active call object");
8155 * Generate LIR to access a property of a Call object.
8157 JS_REQUIRES_STACK RecordingStatus
8158 TraceRecorder::callProp(JSObject
* obj
, JSProperty
* prop
, jsid id
, Value
*& vp
,
8159 LIns
*& ins
, NameResult
& nr
)
8161 Shape
*shape
= (Shape
*) prop
;
8163 JSOp op
= JSOp(*cx
->regs
->pc
);
8164 uint32 setflags
= (js_CodeSpec
[op
].format
& (JOF_SET
| JOF_INCDEC
| JOF_FOR
));
8165 if (setflags
&& !shape
->writable())
8166 RETURN_STOP("writing to a read-only property");
8168 uintN slot
= uint16(shape
->shortid
);
8171 JSStackFrame
* cfp
= (JSStackFrame
*) obj
->getPrivate();
8173 if (shape
->getterOp() == GetCallArg
) {
8174 JS_ASSERT(slot
< cfp
->numFormalArgs());
8175 vp
= &cfp
->formalArg(slot
);
8177 } else if (shape
->getterOp() == GetCallVar
||
8178 shape
->getterOp() == GetCallVarChecked
) {
8179 JS_ASSERT(slot
< cfp
->numSlots());
8180 vp
= &cfp
->slots()[slot
];
8183 RETURN_STOP("dynamic property of Call object");
8186 // Now assert that our use of shape->shortid was in fact kosher.
8187 JS_ASSERT(shape
->hasShortID());
8189 if (frameIfInRange(obj
)) {
8190 // At this point we are guaranteed to be looking at an active call oject
8191 // whose properties are stored in the corresponding JSStackFrame.
8194 return RECORD_CONTINUE
;
8197 // Call objects do not yet have shape->isMethod() properties, but they
8198 // should. See bug 514046, for which this code is future-proof. Remove
8199 // this comment when that bug is fixed (so, FIXME: 514046).
8203 js_GetPropertyHelper(cx
, obj
, shape
->id
,
8204 (op
== JSOP_CALLNAME
)
8205 ? JSGET_NO_METHOD_BARRIER
8206 : JSGET_METHOD_BARRIER
,
8212 JSObject
* parent
= cx
->fp()->callee().getParent();
8213 LIns
* parent_ins
= w
.ldpObjParent(get(&cx
->fp()->calleeValue()));
8214 CHECK_STATUS(traverseScopeChain(parent
, parent_ins
, obj
, obj_ins
));
8217 // Because the parent guard in guardCallee ensures this Call object
8218 // will be the same object now and on trace, and because once a Call
8219 // object loses its frame it never regains one, on trace we will also
8220 // have a null private in the Call object. So all we need to do is
8221 // write the value to the Call object's slot.
8222 if (shape
->getterOp() == GetCallArg
) {
8223 JS_ASSERT(slot
< ArgClosureTraits::slot_count(obj
));
8224 slot
+= ArgClosureTraits::slot_offset(obj
);
8225 } else if (shape
->getterOp() == GetCallVar
||
8226 shape
->getterOp() == GetCallVarChecked
) {
8227 JS_ASSERT(slot
< VarClosureTraits::slot_count(obj
));
8228 slot
+= VarClosureTraits::slot_offset(obj
);
8230 RETURN_STOP("dynamic property of Call object");
8233 // Now assert that our use of shape->shortid was in fact kosher.
8234 JS_ASSERT(shape
->hasShortID());
8236 ins
= unbox_slot(obj
, obj_ins
, slot
, snapshot(BRANCH_EXIT
));
8238 ClosureVarInfo
* cv
= new (traceAlloc()) ClosureVarInfo();
8241 cv
->callDepth
= callDepth
;
8244 // Even though the frame is out of range, later we might be called as an
8245 // inner trace such that the target variable is defined in the outer trace
8246 // entry frame. For simplicity, we just fall off trace.
8248 w
.eqp(entryFrameIns(), w
.ldpObjPrivate(obj_ins
)),
8251 LIns
* outp
= w
.allocp(sizeof(double));
8254 w
.nameImmpNonGC(cv
),
8259 if (shape
->getterOp() == GetCallArg
) {
8260 ci
= &GetClosureArg_ci
;
8261 } else if (shape
->getterOp() == GetCallVar
||
8262 shape
->getterOp() == GetCallVarChecked
) {
8263 ci
= &GetClosureVar_ci
;
8265 RETURN_STOP("dynamic property of Call object");
8268 // Now assert that our use of shape->shortid was in fact kosher.
8269 JS_ASSERT(shape
->hasShortID());
8271 LIns
* call_ins
= w
.call(ci
, args
);
8273 JSValueType type
= getCoercedType(nr
.v
);
8275 w
.name(w
.eqi(call_ins
, w
.immi(type
)), "guard(type-stable name access)"),
8277 ins
= stackLoad(AllocSlotsAddress(outp
), type
);
8281 nr
.obj_ins
= obj_ins
;
8283 return RECORD_CONTINUE
;
8286 JS_REQUIRES_STACK LIns
*
8287 TraceRecorder::arg(unsigned n
)
8289 return get(&argval(n
));
8292 JS_REQUIRES_STACK
void
8293 TraceRecorder::arg(unsigned n
, LIns
* i
)
8298 JS_REQUIRES_STACK LIns
*
8299 TraceRecorder::var(unsigned n
)
8301 return get(&varval(n
));
8304 JS_REQUIRES_STACK
void
8305 TraceRecorder::var(unsigned n
, LIns
* i
)
8310 JS_REQUIRES_STACK LIns
*
8311 TraceRecorder::stack(int n
)
8313 return get(&stackval(n
));
8316 JS_REQUIRES_STACK
void
8317 TraceRecorder::stack(int n
, LIns
* i
)
8319 set(&stackval(n
), i
);
8322 /* Leave trace iff one operand is negative and the other is non-negative. */
8323 JS_REQUIRES_STACK
void
8324 TraceRecorder::guardNonNeg(LIns
* d0
, LIns
* d1
, VMSideExit
* exit
)
8327 JS_ASSERT(d0
->immI() >= 0);
8329 guard(false, w
.ltiN(d0
, 0), exit
);
8332 JS_ASSERT(d1
->immI() >= 0);
8334 guard(false, w
.ltiN(d1
, 0), exit
);
8337 JS_REQUIRES_STACK LIns
*
8338 TraceRecorder::alu(LOpcode v
, jsdouble v0
, jsdouble v1
, LIns
* s0
, LIns
* s1
)
8341 * To even consider this operation for demotion, both operands have to be
8342 * integers and the oracle must not give us a negative hint for the
8345 if (!oracle
|| oracle
->isInstructionUndemotable(cx
->regs
->pc
) ||
8346 !IsPromotedInt32(s0
) || !IsPromotedInt32(s1
)) {
8348 if (v
== LIR_modd
) {
8349 LIns
* args
[] = { s1
, s0
};
8350 return w
.call(&js_dmod_ci
, args
);
8352 LIns
* result
= w
.ins2(v
, s0
, s1
);
8353 JS_ASSERT_IF(s0
->isImmD() && s1
->isImmD(), result
->isImmD());
8367 if (r
== 0.0 && (v0
< 0.0 || v1
< 0.0))
8370 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8377 if (v0
< 0 || v1
== 0 || (s1
->isImmD() && v1
< 0))
8379 r
= js_dmod(v0
, v1
);
8387 * The result must be an integer at record time, otherwise there is no
8388 * point in trying to demote it.
8390 if (jsint(r
) != r
|| JSDOUBLE_IS_NEGZERO(r
))
8393 LIns
* d0
= w
.demoteToInt32(s0
);
8394 LIns
* d1
= w
.demoteToInt32(s1
);
8397 * Speculatively emit an integer operation, betting that at runtime we
8398 * will get integer results again.
8400 VMSideExit
* exit
= NULL
;
8403 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8405 if (d0
->isImmI() && d1
->isImmI())
8406 return w
.i2d(w
.immi(jsint(r
)));
8408 exit
= snapshot(OVERFLOW_EXIT
);
8411 * If the divisor is greater than zero its always safe to execute
8412 * the division. If not, we have to make sure we are not running
8413 * into -2147483648 / -1, because it can raise an overflow exception.
8415 if (!d1
->isImmI()) {
8416 if (MaybeBranch mbr
= w
.jt(w
.gtiN(d1
, 0))) {
8417 guard(false, w
.eqi0(d1
), exit
);
8418 guard(true, w
.eqi0(w
.andi(w
.eqiN(d0
, 0x80000000),
8419 w
.eqiN(d1
, -1))), exit
);
8423 if (d1
->immI() == -1)
8424 guard(false, w
.eqiN(d0
, 0x80000000), exit
);
8427 result
= w
.divi(d0
, d1
);
8429 /* As long as the modulus is zero, the result is an integer. */
8430 guard(true, w
.eqi0(w
.modi(result
)), exit
);
8432 /* Don't lose a -0. */
8433 guard(false, w
.eqi0(result
), exit
);
8437 if (d0
->isImmI() && d1
->isImmI())
8438 return w
.i2d(w
.immi(jsint(r
)));
8440 exit
= snapshot(OVERFLOW_EXIT
);
8442 /* Make sure we don't trigger division by zero at runtime. */
8444 guard(false, w
.eqi0(d1
), exit
);
8446 result
= w
.modi(w
.divi(d0
, d1
));
8448 /* If the result is not 0, it is always within the integer domain. */
8449 if (MaybeBranch mbr
= w
.jf(w
.eqi0(result
))) {
8451 * If the result is zero, we must exit if the lhs is negative since
8452 * the result is -0 in this case, which is not in the integer domain.
8454 guard(false, w
.ltiN(d0
, 0), exit
);
8462 v
= arithOpcodeD2I(v
);
8463 JS_ASSERT(v
== LIR_addi
|| v
== LIR_muli
|| v
== LIR_subi
);
8466 * If the operands guarantee that the result will be an integer (e.g.
8467 * z = x * y with 0 <= (x|y) <= 0xffff guarantees z <= fffe0001), we
8468 * don't have to guard against an overflow. Otherwise we emit a guard
8469 * that will inform the oracle and cause a non-demoted trace to be
8470 * attached that uses floating-point math for this operation.
8472 bool needsOverflowCheck
= true, needsNegZeroCheck
= true;
8473 ChecksRequired(v
, d0
, d1
, &needsOverflowCheck
, &needsNegZeroCheck
);
8474 if (needsOverflowCheck
) {
8475 exit
= snapshot(OVERFLOW_EXIT
);
8476 result
= guard_xov(v
, d0
, d1
, exit
);
8478 result
= w
.ins2(v
, d0
, d1
);
8480 if (needsNegZeroCheck
) {
8481 JS_ASSERT(v
== LIR_muli
);
8483 * Make sure we don't lose a -0. We exit if the result is zero and if
8484 * either operand is negative. We start out using a weaker guard, checking
8485 * if either argument is negative. If this ever fails, we recompile with
8486 * a stronger, but slower, guard.
8488 if (v0
< 0.0 || v1
< 0.0
8489 || !oracle
|| oracle
->isInstructionSlowZeroTest(cx
->regs
->pc
))
8492 exit
= snapshot(OVERFLOW_EXIT
);
8495 w
.eqi0(w
.andi(w
.eqi0(result
),
8496 w
.ori(w
.ltiN(d0
, 0),
8500 guardNonNeg(d0
, d1
, snapshot(MUL_ZERO_EXIT
));
8505 JS_ASSERT_IF(d0
->isImmI() && d1
->isImmI(), result
->isImmI(jsint(r
)));
8506 return w
.i2d(result
);
8510 TraceRecorder::d2i(LIns
* d
, bool resultCanBeImpreciseIfFractional
)
8513 return w
.immi(js_DoubleToECMAInt32(d
->immD()));
8514 if (d
->isop(LIR_i2d
) || d
->isop(LIR_ui2d
)) {
8515 // The d2i(i2d(i)) case is obviously a no-op. (Unlike i2d(d2i(d))!)
8516 // The d2i(ui2d(ui)) case is less obvious, but it is also a no-op.
8517 // For example, 4294967295U has the bit pattern 0xffffffff, and
8518 // d2i(ui2d(4294967295U)) is -1, which also has the bit pattern
8519 // 0xffffffff. Another way to think about it: d2i(ui2d(ui)) is
8520 // equivalent to ui2i(ui); ui2i doesn't exist, but it would be a
8522 // (Note that the above reasoning depends on the fact that d2i()
8523 // always succeeds, ie. it never aborts).
8526 if (d
->isop(LIR_addd
) || d
->isop(LIR_subd
)) {
8527 // If 'i32ad' and 'i32bd' are integral doubles that fit in int32s, and
8528 // 'i32ai' and 'i32bi' are int32s with the equivalent values, then
8531 // d2i(addd(i32ad, i32bd)) == addi(i32ai, i32bi)
8533 // If the RHS doesn't overflow, this is obvious. If it does overflow,
8534 // the result will truncate. And the LHS will truncate in exactly the
8535 // same way. So they're always equal.
8536 LIns
* lhs
= d
->oprnd1();
8537 LIns
* rhs
= d
->oprnd2();
8538 if (IsPromotedInt32(lhs
) && IsPromotedInt32(rhs
))
8539 return w
.ins2(arithOpcodeD2I(d
->opcode()), w
.demoteToInt32(lhs
), w
.demoteToInt32(rhs
));
8542 const CallInfo
* ci
= d
->callInfo();
8543 if (ci
== &js_UnboxDouble_ci
) {
8544 #if JS_BITS_PER_WORD == 32
8545 LIns
*tag_ins
= d
->callArgN(0);
8546 LIns
*payload_ins
= d
->callArgN(1);
8547 LIns
* args
[] = { payload_ins
, tag_ins
};
8548 return w
.call(&js_UnboxInt32_ci
, args
);
8550 LIns
* val_ins
= d
->callArgN(0);
8551 LIns
* args
[] = { val_ins
};
8552 return w
.call(&js_UnboxInt32_ci
, args
);
8555 if (ci
== &js_StringToNumber_ci
) {
8556 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
8557 LIns
* args
[] = { ok_ins
, d
->callArgN(1), d
->callArgN(0) };
8558 LIns
* ret_ins
= w
.call(&js_StringToInt32_ci
, args
);
8560 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
8565 return resultCanBeImpreciseIfFractional
8567 : w
.call(&js_DoubleToInt32_ci
, &d
);
8571 TraceRecorder::d2u(LIns
* d
)
8574 return w
.immi(js_DoubleToECMAUint32(d
->immD()));
8575 if (d
->isop(LIR_i2d
) || d
->isop(LIR_ui2d
))
8577 return w
.call(&js_DoubleToUint32_ci
, &d
);
8580 JS_REQUIRES_STACK RecordingStatus
8581 TraceRecorder::makeNumberInt32(LIns
* d
, LIns
** out
)
8583 JS_ASSERT(d
->isD());
8584 if (IsPromotedInt32(d
)) {
8585 *out
= w
.demoteToInt32(d
);
8586 return RECORD_CONTINUE
;
8589 // This means "convert double to int if it's integral, otherwise
8590 // exit". We first convert the double to an int, then convert it back
8591 // and exit if the two doubles don't match. If 'f' is a non-integral
8592 // immediate we'll end up aborting.
8593 *out
= d2i(d
, /* resultCanBeImpreciseIfFractional = */true);
8594 return guard(true, w
.eqd(d
, w
.i2d(*out
)), MISMATCH_EXIT
, /* abortIfAlwaysExits = */true);
8597 JS_REQUIRES_STACK RecordingStatus
8598 TraceRecorder::makeNumberUint32(LIns
* d
, LIns
** out
)
8600 JS_ASSERT(d
->isD());
8601 if (IsPromotedUint32(d
)) {
8602 *out
= w
.demoteToUint32(d
);
8603 return RECORD_CONTINUE
;
8606 // This means "convert double to uint if it's integral, otherwise
8607 // exit". We first convert the double to an unsigned int, then
8608 // convert it back and exit if the two doubles don't match. If
8609 // 'f' is a non-integral immediate we'll end up aborting.
8611 return guard(true, w
.eqd(d
, w
.ui2d(*out
)), MISMATCH_EXIT
, /* abortIfAlwaysExits = */true);
8614 JS_REQUIRES_STACK LIns
*
8615 TraceRecorder::stringify(const Value
& v
)
8617 LIns
* v_ins
= get(&v
);
8621 LIns
* args
[] = { v_ins
, cx_ins
};
8624 ci
= &js_NumberToString_ci
;
8625 } else if (v
.isUndefined()) {
8626 return w
.immpAtomGC(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_VOID
]);
8627 } else if (v
.isBoolean()) {
8628 ci
= &js_BooleanIntToString_ci
;
8631 * Callers must deal with non-primitive (non-null object) values by
8632 * calling an imacro. We don't try to guess about which imacro, with
8633 * what valueOf hint, here.
8635 JS_ASSERT(v
.isNull());
8636 return w
.immpAtomGC(cx
->runtime
->atomState
.nullAtom
);
8639 v_ins
= w
.call(ci
, args
);
8640 guard(false, w
.eqp0(v_ins
), OOM_EXIT
);
8644 JS_REQUIRES_STACK
bool
8645 TraceRecorder::canCallImacro() const
8647 /* We cannot nest imacros. */
8648 return !cx
->fp()->hasImacropc();
8651 JS_REQUIRES_STACK RecordingStatus
8652 TraceRecorder::callImacro(jsbytecode
* imacro
)
8654 return canCallImacro() ? callImacroInfallibly(imacro
) : RECORD_STOP
;
8657 JS_REQUIRES_STACK RecordingStatus
8658 TraceRecorder::callImacroInfallibly(jsbytecode
* imacro
)
8660 JSStackFrame
* fp
= cx
->fp();
8661 JS_ASSERT(!fp
->hasImacropc());
8662 JSFrameRegs
* regs
= cx
->regs
;
8663 fp
->setImacropc(regs
->pc
);
8666 return RECORD_IMACRO
;
8669 JS_REQUIRES_STACK AbortableRecordingStatus
8670 TraceRecorder::ifop()
8672 Value
& v
= stackval(-1);
8673 LIns
* v_ins
= get(&v
);
8677 if (v
.isNull() || v
.isUndefined()) {
8680 } else if (!v
.isPrimitive()) {
8683 } else if (v
.isBoolean()) {
8684 /* Test for boolean is true, negate later if we are testing for false. */
8686 x
= w
.eqiN(v_ins
, 1);
8687 } else if (v
.isNumber()) {
8688 jsdouble d
= v
.toNumber();
8689 cond
= !JSDOUBLE_IS_NaN(d
) && d
;
8690 x
= w
.eqi0(w
.eqi0(w
.andi(w
.eqd(v_ins
, v_ins
), w
.eqi0(w
.eqd0(v_ins
)))));
8691 } else if (v
.isString()) {
8692 cond
= v
.toString()->length() != 0;
8693 x
= w
.eqi0(w
.eqp0(w
.getStringLength(v_ins
)));
8695 JS_NOT_REACHED("ifop");
8696 return ARECORD_STOP
;
8699 jsbytecode
* pc
= cx
->regs
->pc
;
8700 emitIf(pc
, cond
, x
);
8701 return checkTraceEnd(pc
);
8706 * Record LIR for a tableswitch or tableswitchx op. We record LIR only the
8707 * "first" time we hit the op. Later, when we start traces after exiting that
8708 * trace, we just patch.
8710 JS_REQUIRES_STACK AbortableRecordingStatus
8711 TraceRecorder::tableswitch()
8713 Value
& v
= stackval(-1);
8715 /* No need to guard if the condition can't match any of the cases. */
8717 return ARECORD_CONTINUE
;
8719 /* No need to guard if the condition is constant. */
8720 LIns
* v_ins
= d2i(get(&v
));
8721 if (v_ins
->isImmI())
8722 return ARECORD_CONTINUE
;
8724 jsbytecode
* pc
= cx
->regs
->pc
;
8725 /* Starting a new trace after exiting a trace via switch. */
8727 (anchor
->exitType
== CASE_EXIT
|| anchor
->exitType
== DEFAULT_EXIT
) &&
8728 fragment
->ip
== pc
) {
8729 return ARECORD_CONTINUE
;
8734 if (*pc
== JSOP_TABLESWITCH
) {
8735 pc
+= JUMP_OFFSET_LEN
;
8736 low
= GET_JUMP_OFFSET(pc
);
8737 pc
+= JUMP_OFFSET_LEN
;
8738 high
= GET_JUMP_OFFSET(pc
);
8740 pc
+= JUMPX_OFFSET_LEN
;
8741 low
= GET_JUMP_OFFSET(pc
);
8742 pc
+= JUMP_OFFSET_LEN
;
8743 high
= GET_JUMP_OFFSET(pc
);
8747 * If there are no cases, this is a no-op. The default case immediately
8748 * follows in the bytecode and is always taken, so we need no special
8749 * action to handle it.
8751 int count
= high
+ 1 - low
;
8752 JS_ASSERT(count
>= 0);
8754 return ARECORD_CONTINUE
;
8756 /* Cap maximum table-switch size for modesty. */
8757 if (count
> MAX_TABLE_SWITCH
)
8758 return InjectStatus(switchop());
8760 /* Generate switch LIR. */
8761 SwitchInfo
* si
= new (traceAlloc()) SwitchInfo();
8764 si
->index
= (uint32
) -1;
8765 LIns
* diff
= w
.subi(v_ins
, w
.immi(low
));
8766 LIns
* cmp
= w
.ltui(diff
, w
.immi(si
->count
));
8767 guard(true, cmp
, DEFAULT_EXIT
);
8768 // We use AnyAddress; it's imprecise but this case is rare and not worth its
8769 // own access region.
8770 w
.st(diff
, AnyAddress(w
.immpNonGC(&si
->index
)));
8771 VMSideExit
* exit
= snapshot(CASE_EXIT
);
8772 exit
->switchInfo
= si
;
8773 LIns
* guardIns
= w
.xtbl(diff
, createGuardRecord(exit
));
8774 fragment
->lastIns
= guardIns
;
8775 CHECK_STATUS_A(compile());
8776 return finishSuccessfully();
8780 JS_REQUIRES_STACK RecordingStatus
8781 TraceRecorder::switchop()
8783 Value
& v
= stackval(-1);
8784 LIns
* v_ins
= get(&v
);
8786 /* No need to guard if the condition is constant. */
8787 if (v_ins
->isImmAny())
8788 return RECORD_CONTINUE
;
8790 jsdouble d
= v
.toNumber();
8792 w
.name(w
.eqd(v_ins
, w
.immd(d
)), "guard(switch on numeric)"),
8794 } else if (v
.isString()) {
8795 LIns
* args
[] = { w
.immpStrGC(v
.toString()), v_ins
, cx_ins
};
8796 LIns
* equal_rval
= w
.call(&js_EqualStringsOnTrace_ci
, args
);
8798 w
.name(w
.eqiN(equal_rval
, JS_NEITHER
), "guard(oom)"),
8801 w
.name(w
.eqi0(equal_rval
), "guard(switch on string)"),
8803 } else if (v
.isBoolean()) {
8805 w
.name(w
.eqi(v_ins
, w
.immi(v
.isTrue())), "guard(switch on boolean)"),
8807 } else if (v
.isUndefined()) {
8808 // This is a unit type, so no guard is needed.
8810 RETURN_STOP("switch on object or null");
8812 return RECORD_CONTINUE
;
8815 JS_REQUIRES_STACK RecordingStatus
8816 TraceRecorder::inc(Value
& v
, jsint incr
, bool pre
)
8818 LIns
* v_ins
= get(&v
);
8820 CHECK_STATUS(inc(v
, v_ins
, dummy
, incr
, pre
));
8822 return RECORD_CONTINUE
;
8826 * On exit, v_ins is the incremented unboxed value, and the appropriate value
8827 * (pre- or post-increment as described by pre) is stacked. v_out is set to
8828 * the value corresponding to v_ins.
8830 JS_REQUIRES_STACK RecordingStatus
8831 TraceRecorder::inc(const Value
&v
, LIns
*& v_ins
, Value
&v_out
, jsint incr
, bool pre
)
8834 CHECK_STATUS(incHelper(v
, v_ins
, v_out
, v_after
, incr
));
8836 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->regs
->pc
];
8837 JS_ASSERT(cs
.ndefs
== 1);
8838 stack(-cs
.nuses
, pre
? v_after
: v_ins
);
8840 return RECORD_CONTINUE
;
8844 * Do an increment operation without storing anything to the stack.
8846 * v_after is an out param whose value corresponds to the instruction the
8847 * v_ins_after out param gets set to.
8849 JS_REQUIRES_STACK RecordingStatus
8850 TraceRecorder::incHelper(const Value
&v
, LIns
*& v_ins
, Value
&v_after
,
8851 LIns
*& v_ins_after
, jsint incr
)
8853 // FIXME: Bug 606071 on making this work for objects.
8854 if (!v
.isPrimitive())
8855 RETURN_STOP("can inc primitives only");
8857 // We need to modify |v_ins| the same way relational() modifies
8859 if (v
.isUndefined()) {
8860 v_ins_after
= w
.immd(js_NaN
);
8861 v_after
.setDouble(js_NaN
);
8862 v_ins
= w
.immd(js_NaN
);
8863 } else if (v
.isNull()) {
8864 v_ins_after
= w
.immd(incr
);
8865 v_after
.setDouble(incr
);
8866 v_ins
= w
.immd(0.0);
8868 if (v
.isBoolean()) {
8869 v_ins
= w
.i2d(v_ins
);
8870 } else if (v
.isString()) {
8871 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
8872 LIns
* args
[] = { ok_ins
, v_ins
, cx_ins
};
8873 v_ins
= w
.call(&js_StringToNumber_ci
, args
);
8875 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
8878 JS_ASSERT(v
.isNumber());
8882 AutoValueRooter
tvr(cx
);
8884 ValueToNumber(cx
, tvr
.value(), &num
);
8885 v_ins_after
= alu(LIR_addd
, num
, incr
, v_ins
, w
.immd(incr
));
8886 v_after
.setDouble(num
+ incr
);
8889 return RECORD_CONTINUE
;
8892 JS_REQUIRES_STACK AbortableRecordingStatus
8893 TraceRecorder::incProp(jsint incr
, bool pre
)
8895 Value
& l
= stackval(-1);
8896 if (l
.isPrimitive())
8897 RETURN_STOP_A("incProp on primitive");
8899 JSObject
* obj
= &l
.toObject();
8900 LIns
* obj_ins
= get(&l
);
8904 CHECK_STATUS_A(prop(obj
, obj_ins
, &slot
, &v_ins
, NULL
));
8906 if (slot
== SHAPE_INVALID_SLOT
)
8907 RETURN_STOP_A("incProp on invalid slot");
8909 Value
& v
= obj
->getSlotRef(slot
);
8911 CHECK_STATUS_A(inc(v
, v_ins
, v_after
, incr
, pre
));
8913 LIns
* slots_ins
= NULL
;
8914 stobj_set_slot(obj
, obj_ins
, slot
, slots_ins
, v_after
, v_ins
);
8915 return ARECORD_CONTINUE
;
8918 JS_REQUIRES_STACK RecordingStatus
8919 TraceRecorder::incElem(jsint incr
, bool pre
)
8921 Value
& r
= stackval(-1);
8922 Value
& l
= stackval(-2);
8927 if (!l
.isPrimitive() && l
.toObject().isDenseArray() && r
.isInt32()) {
8928 guardDenseArray(get(&l
), MISMATCH_EXIT
);
8929 CHECK_STATUS(denseArrayElement(l
, r
, vp
, v_ins
, addr_ins
, snapshot(BRANCH_EXIT
)));
8930 if (!addr_ins
) // if we read a hole, abort
8933 CHECK_STATUS(inc(*vp
, v_ins
, v_after
, incr
, pre
));
8934 box_value_into(v_after
, v_ins
, DSlotsAddress(addr_ins
));
8935 return RECORD_CONTINUE
;
8938 return callImacro((incr
== 1)
8939 ? pre
? incelem_imacros
.incelem
: incelem_imacros
.eleminc
8940 : pre
? decelem_imacros
.decelem
: decelem_imacros
.elemdec
);
8944 EvalCmp(LOpcode op
, double l
, double r
)
8964 JS_NOT_REACHED("unexpected comparison op");
8971 EvalCmp(JSContext
*cx
, LOpcode op
, JSString
* l
, JSString
* r
, JSBool
*ret
)
8974 return EqualStrings(cx
, l
, r
, ret
);
8976 if (!CompareStrings(cx
, l
, r
, &cmp
))
8978 *ret
= EvalCmp(op
, cmp
, 0);
8982 JS_REQUIRES_STACK RecordingStatus
8983 TraceRecorder::strictEquality(bool equal
, bool cmpCase
)
8985 Value
& r
= stackval(-1);
8986 Value
& l
= stackval(-2);
8987 LIns
* l_ins
= get(&l
);
8988 LIns
* r_ins
= get(&r
);
8992 JSValueType ltag
= getPromotedType(l
);
8993 if (ltag
!= getPromotedType(r
)) {
8996 } else if (ltag
== JSVAL_TYPE_STRING
) {
8997 LIns
* args
[] = { r_ins
, l_ins
, cx_ins
};
8998 LIns
* equal_ins
= w
.call(&js_EqualStringsOnTrace_ci
, args
);
9000 w
.name(w
.eqiN(equal_ins
, JS_NEITHER
), "guard(oom)"),
9002 x
= w
.eqiN(equal_ins
, equal
);
9003 if (!EqualStrings(cx
, l
.toString(), r
.toString(), &cond
))
9004 RETURN_ERROR("oom");
9006 if (ltag
== JSVAL_TYPE_DOUBLE
)
9007 x
= w
.eqd(l_ins
, r_ins
);
9008 else if (ltag
== JSVAL_TYPE_NULL
|| ltag
== JSVAL_TYPE_NONFUNOBJ
|| ltag
== JSVAL_TYPE_FUNOBJ
)
9009 x
= w
.eqp(l_ins
, r_ins
);
9011 x
= w
.eqi(l_ins
, r_ins
);
9014 cond
= (ltag
== JSVAL_TYPE_DOUBLE
)
9015 ? l
.toNumber() == r
.toNumber()
9018 cond
= (!!cond
== equal
);
9021 /* Only guard if the same path may not always be taken. */
9023 guard(cond
, x
, BRANCH_EXIT
);
9024 return RECORD_CONTINUE
;
9028 return RECORD_CONTINUE
;
9031 JS_REQUIRES_STACK AbortableRecordingStatus
9032 TraceRecorder::equality(bool negate
, bool tryBranchAfterCond
)
9034 Value
& rval
= stackval(-1);
9035 Value
& lval
= stackval(-2);
9036 LIns
* l_ins
= get(&lval
);
9037 LIns
* r_ins
= get(&rval
);
9039 return equalityHelper(lval
, rval
, l_ins
, r_ins
, negate
, tryBranchAfterCond
, lval
);
9042 JS_REQUIRES_STACK AbortableRecordingStatus
9043 TraceRecorder::equalityHelper(Value
& l
, Value
& r
, LIns
* l_ins
, LIns
* r_ins
,
9044 bool negate
, bool tryBranchAfterCond
,
9047 LOpcode op
= LIR_eqi
;
9049 LIns
* args
[] = { NULL
, NULL
, NULL
};
9052 * The if chain below closely mirrors that found in 11.9.3, in general
9053 * deviating from that ordering of ifs only to account for SpiderMonkey's
9054 * conflation of booleans and undefined and for the possibility of
9055 * confusing objects and null. Note carefully the spec-mandated recursion
9056 * in the final else clause, which terminates because Number == T recurs
9057 * only if T is Object, but that must recur again to convert Object to
9058 * primitive, and ToPrimitive throws if the object cannot be converted to
9059 * a primitive value (which would terminate recursion).
9062 if (getPromotedType(l
) == getPromotedType(r
)) {
9063 if (l
.isUndefined() || l
.isNull()) {
9067 } else if (l
.isObject()) {
9068 if (l
.toObject().getClass()->ext
.equality
)
9069 RETURN_STOP_A("Can't trace extended class equality operator");
9070 LIns
* flags_ins
= w
.ldiObjFlags(l_ins
);
9071 LIns
* flag_ins
= w
.andi(flags_ins
, w
.nameImmui(JSObject::HAS_EQUALITY
));
9072 guard(true, w
.eqi0(flag_ins
), BRANCH_EXIT
);
9076 } else if (l
.isBoolean()) {
9077 JS_ASSERT(r
.isBoolean());
9079 } else if (l
.isString()) {
9080 JSString
*l_str
= l
.toString();
9081 JSString
*r_str
= r
.toString();
9082 if (!l_str
->isRope() && !r_str
->isRope() && l_str
->length() == 1 && r_str
->length() == 1) {
9083 VMSideExit
*exit
= snapshot(BRANCH_EXIT
);
9084 LIns
*c
= w
.immw(1);
9085 guard(true, w
.eqp(w
.getStringLength(l_ins
), c
), exit
);
9086 guard(true, w
.eqp(w
.getStringLength(r_ins
), c
), exit
);
9087 l_ins
= w
.getStringChar(l_ins
, w
.immpNonGC(0));
9088 r_ins
= w
.getStringChar(r_ins
, w
.immpNonGC(0));
9090 args
[0] = r_ins
, args
[1] = l_ins
, args
[2] = cx_ins
;
9091 LIns
*equal_ins
= w
.call(&js_EqualStringsOnTrace_ci
, args
);
9093 w
.name(w
.eqiN(equal_ins
, JS_NEITHER
), "guard(oom)"),
9098 if (!EqualStrings(cx
, l
.toString(), r
.toString(), &cond
))
9099 RETURN_ERROR_A("oom");
9101 JS_ASSERT(l
.isNumber() && r
.isNumber());
9102 cond
= (l
.toNumber() == r
.toNumber());
9105 } else if (l
.isNull() && r
.isUndefined()) {
9106 l_ins
= w
.immiUndefined();
9108 } else if (l
.isUndefined() && r
.isNull()) {
9109 r_ins
= w
.immiUndefined();
9111 } else if (l
.isNumber() && r
.isString()) {
9112 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
9113 args
[0] = ok_ins
, args
[1] = r_ins
, args
[2] = cx_ins
;
9114 r_ins
= w
.call(&js_StringToNumber_ci
, args
);
9116 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
9119 double d
= js_StringToNumber(cx
, r
.toString(), &ok
);
9121 RETURN_ERROR_A("oom");
9122 cond
= (l
.toNumber() == d
);
9124 } else if (l
.isString() && r
.isNumber()) {
9125 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
9126 args
[0] = ok_ins
, args
[1] = l_ins
, args
[2] = cx_ins
;
9127 l_ins
= w
.call(&js_StringToNumber_ci
, args
);
9129 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
9132 double d
= js_StringToNumber(cx
, l
.toString(), &ok
);
9134 RETURN_ERROR_A("oom");
9135 cond
= (d
== r
.toNumber());
9138 // Below we may assign to l or r, which modifies the interpreter state.
9139 // This is fine as long as we also update the tracker.
9140 if (l
.isBoolean()) {
9141 l_ins
= w
.i2d(l_ins
);
9143 l
.setInt32(l
.isTrue());
9144 return equalityHelper(l
, r
, l_ins
, r_ins
, negate
,
9145 tryBranchAfterCond
, rval
);
9147 if (r
.isBoolean()) {
9148 r_ins
= w
.i2d(r_ins
);
9150 r
.setInt32(r
.isTrue());
9151 return equalityHelper(l
, r
, l_ins
, r_ins
, negate
,
9152 tryBranchAfterCond
, rval
);
9154 if ((l
.isString() || l
.isNumber()) && !r
.isPrimitive()) {
9155 CHECK_STATUS_A(guardNativeConversion(r
));
9156 return InjectStatus(callImacro(equality_imacros
.any_obj
));
9158 if (!l
.isPrimitive() && (r
.isString() || r
.isNumber())) {
9159 CHECK_STATUS_A(guardNativeConversion(l
));
9160 return InjectStatus(callImacro(equality_imacros
.obj_any
));
9168 /* If the operands aren't numbers, compare them as integers. */
9169 LIns
* x
= w
.ins2(op
, l_ins
, r_ins
);
9175 jsbytecode
* pc
= cx
->regs
->pc
;
9178 * Don't guard if the same path is always taken. If it isn't, we have to
9179 * fuse comparisons and the following branch, because the interpreter does
9182 if (tryBranchAfterCond
)
9183 fuseIf(pc
+ 1, cond
, x
);
9186 * There is no need to write out the result of this comparison if the trace
9187 * ends on this operation.
9189 if (pc
[1] == JSOP_IFNE
|| pc
[1] == JSOP_IFEQ
)
9190 CHECK_STATUS_A(checkTraceEnd(pc
+ 1));
9193 * We update the stack after the guard. This is safe since the guard bails
9194 * out at the comparison and the interpreter will therefore re-execute the
9195 * comparison. This way the value of the condition doesn't have to be
9196 * calculated and saved on the stack in most cases.
9200 return ARECORD_CONTINUE
;
9203 JS_REQUIRES_STACK AbortableRecordingStatus
9204 TraceRecorder::relational(LOpcode op
, bool tryBranchAfterCond
)
9206 Value
& r
= stackval(-1);
9207 Value
& l
= stackval(-2);
9210 LIns
* l_ins
= get(&l
);
9211 LIns
* r_ins
= get(&r
);
9213 jsdouble lnum
, rnum
;
9216 * 11.8.5 if either argument is an object with a function-valued valueOf
9217 * property; if both arguments are objects with non-function-valued valueOf
9218 * properties, abort.
9220 if (!l
.isPrimitive()) {
9221 CHECK_STATUS_A(guardNativeConversion(l
));
9222 if (!r
.isPrimitive()) {
9223 CHECK_STATUS_A(guardNativeConversion(r
));
9224 return InjectStatus(callImacro(binary_imacros
.obj_obj
));
9226 return InjectStatus(callImacro(binary_imacros
.obj_any
));
9228 if (!r
.isPrimitive()) {
9229 CHECK_STATUS_A(guardNativeConversion(r
));
9230 return InjectStatus(callImacro(binary_imacros
.any_obj
));
9233 /* 11.8.5 steps 3, 16-21. */
9234 if (l
.isString() && r
.isString()) {
9235 LIns
* args
[] = { r_ins
, l_ins
, cx_ins
};
9236 LIns
* result_ins
= w
.call(&js_CompareStringsOnTrace_ci
, args
);
9238 w
.name(w
.eqiN(result_ins
, INT32_MIN
), "guard(oom)"),
9242 if (!EvalCmp(cx
, op
, l
.toString(), r
.toString(), &cond
))
9243 RETURN_ERROR_A("oom");
9247 /* 11.8.5 steps 4-5. */
9248 if (!l
.isNumber()) {
9249 if (l
.isBoolean()) {
9250 l_ins
= w
.i2d(l_ins
);
9251 } else if (l
.isUndefined()) {
9252 l_ins
= w
.immd(js_NaN
);
9253 } else if (l
.isString()) {
9254 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
9255 LIns
* args
[] = { ok_ins
, l_ins
, cx_ins
};
9256 l_ins
= w
.call(&js_StringToNumber_ci
, args
);
9258 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
9260 } else if (l
.isNull()) {
9261 l_ins
= w
.immd(0.0);
9263 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
9264 "have been handled at start of method");
9265 RETURN_STOP_A("safety belt");
9268 if (!r
.isNumber()) {
9269 if (r
.isBoolean()) {
9270 r_ins
= w
.i2d(r_ins
);
9271 } else if (r
.isUndefined()) {
9272 r_ins
= w
.immd(js_NaN
);
9273 } else if (r
.isString()) {
9274 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
9275 LIns
* args
[] = { ok_ins
, r_ins
, cx_ins
};
9276 r_ins
= w
.call(&js_StringToNumber_ci
, args
);
9278 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
9280 } else if (r
.isNull()) {
9281 r_ins
= w
.immd(0.0);
9283 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
9284 "have been handled at start of method");
9285 RETURN_STOP_A("safety belt");
9289 AutoValueRooter
tvr(cx
);
9291 ValueToNumber(cx
, tvr
.value(), &lnum
);
9293 ValueToNumber(cx
, tvr
.value(), &rnum
);
9295 cond
= EvalCmp(op
, lnum
, rnum
);
9298 /* 11.8.5 steps 6-15. */
9301 * If the result is not a number or it's not a quad, we must use an integer
9305 JS_ASSERT(isCmpDOpcode(op
));
9306 op
= cmpOpcodeD2I(op
);
9308 x
= w
.ins2(op
, l_ins
, r_ins
);
9310 jsbytecode
* pc
= cx
->regs
->pc
;
9313 * Don't guard if the same path is always taken. If it isn't, we have to
9314 * fuse comparisons and the following branch, because the interpreter does
9317 if (tryBranchAfterCond
)
9318 fuseIf(pc
+ 1, cond
, x
);
9321 * There is no need to write out the result of this comparison if the trace
9322 * ends on this operation.
9324 if (pc
[1] == JSOP_IFNE
|| pc
[1] == JSOP_IFEQ
)
9325 CHECK_STATUS_A(checkTraceEnd(pc
+ 1));
9328 * We update the stack after the guard. This is safe since the guard bails
9329 * out at the comparison and the interpreter will therefore re-execute the
9330 * comparison. This way the value of the condition doesn't have to be
9331 * calculated and saved on the stack in most cases.
9335 return ARECORD_CONTINUE
;
9338 JS_REQUIRES_STACK RecordingStatus
9339 TraceRecorder::unary(LOpcode op
)
9341 Value
& v
= stackval(-1);
9342 bool intop
= retTypes
[op
] == LTy_I
;
9351 return RECORD_CONTINUE
;
9356 JS_REQUIRES_STACK RecordingStatus
9357 TraceRecorder::binary(LOpcode op
)
9359 Value
& r
= stackval(-1);
9360 Value
& l
= stackval(-2);
9362 if (!l
.isPrimitive()) {
9363 CHECK_STATUS(guardNativeConversion(l
));
9364 if (!r
.isPrimitive()) {
9365 CHECK_STATUS(guardNativeConversion(r
));
9366 return callImacro(binary_imacros
.obj_obj
);
9368 return callImacro(binary_imacros
.obj_any
);
9370 if (!r
.isPrimitive()) {
9371 CHECK_STATUS(guardNativeConversion(r
));
9372 return callImacro(binary_imacros
.any_obj
);
9375 bool intop
= retTypes
[op
] == LTy_I
;
9379 bool leftIsNumber
= l
.isNumber();
9380 jsdouble lnum
= leftIsNumber
? l
.toNumber() : 0;
9382 bool rightIsNumber
= r
.isNumber();
9383 jsdouble rnum
= rightIsNumber
? r
.toNumber() : 0;
9386 NanoAssert(op
!= LIR_addd
); // LIR_addd/IS_STRING case handled by record_JSOP_ADD()
9387 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
9388 LIns
* args
[] = { ok_ins
, a
, cx_ins
};
9389 a
= w
.call(&js_StringToNumber_ci
, args
);
9391 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
9394 lnum
= js_StringToNumber(cx
, l
.toString(), &ok
);
9396 RETURN_ERROR("oom");
9397 leftIsNumber
= true;
9400 NanoAssert(op
!= LIR_addd
); // LIR_addd/IS_STRING case handled by record_JSOP_ADD()
9401 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
9402 LIns
* args
[] = { ok_ins
, b
, cx_ins
};
9403 b
= w
.call(&js_StringToNumber_ci
, args
);
9405 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
9408 rnum
= js_StringToNumber(cx
, r
.toString(), &ok
);
9410 RETURN_ERROR("oom");
9411 rightIsNumber
= true;
9413 if (l
.isBoolean()) {
9415 lnum
= l
.toBoolean();
9416 leftIsNumber
= true;
9417 } else if (l
.isUndefined()) {
9420 leftIsNumber
= true;
9422 if (r
.isBoolean()) {
9424 rnum
= r
.toBoolean();
9425 rightIsNumber
= true;
9426 } else if (r
.isUndefined()) {
9429 rightIsNumber
= true;
9431 if (leftIsNumber
&& rightIsNumber
) {
9433 a
= (op
== LIR_rshui
) ? d2u(a
) : d2i(a
);
9436 a
= alu(op
, lnum
, rnum
, a
, b
);
9438 a
= (op
== LIR_rshui
) ? w
.ui2d(a
) : w
.i2d(a
);
9440 return RECORD_CONTINUE
;
9445 #if defined DEBUG_notme && defined XP_UNIX
9448 static FILE* shapefp
= NULL
;
9451 DumpShape(JSObject
* obj
, const char* prefix
)
9454 shapefp
= fopen("/tmp/shapes.dump", "w");
9459 fprintf(shapefp
, "\n%s: shape %u flags %x\n", prefix
, obj
->shape(), obj
->flags
);
9460 for (Shape::Range r
= obj
->lastProperty()->all(); !r
.empty(); r
.popFront()) {
9461 const Shape
&shape
= r
.front();
9463 if (JSID_IS_ATOM(shape
.id
)) {
9465 JS_PutString(JSID_TO_STRING(shape
.id
), shapefp
);
9467 JS_ASSERT(!JSID_IS_OBJECT(shape
.id
));
9468 fprintf(shapefp
, " %d", JSID_TO_INT(shape
.id
));
9470 fprintf(shapefp
, " %u %p %p %x %x %d\n",
9471 shape
.slot
, shape
.getter
, shape
.setter
, shape
.attrs
, shape
.flags
, shape
.shortid
);
9477 TraceRecorder::dumpGuardedShapes(const char* prefix
)
9479 for (GuardedShapeTable::Range r
= guardedShapeTable
.all(); !r
.empty(); r
.popFront())
9480 DumpShape(r
.front().value
, prefix
);
9482 #endif /* DEBUG_notme && XP_UNIX */
9484 JS_REQUIRES_STACK RecordingStatus
9485 TraceRecorder::guardShape(LIns
* obj_ins
, JSObject
* obj
, uint32 shape
, const char* guardName
,
9488 // Test (with add if missing) for a remembered guard for (obj_ins, obj).
9489 GuardedShapeTable::AddPtr p
= guardedShapeTable
.lookupForAdd(obj_ins
);
9491 JS_ASSERT(p
->value
== obj
);
9492 return RECORD_CONTINUE
;
9494 if (!guardedShapeTable
.add(p
, obj_ins
, obj
))
9495 return RECORD_ERROR
;
9497 if (obj
== globalObj
) {
9498 // In this case checking object identity is equivalent and faster.
9500 w
.name(w
.eqp(obj_ins
, w
.immpObjGC(globalObj
)), "guard_global"),
9502 return RECORD_CONTINUE
;
9505 #if defined DEBUG_notme && defined XP_UNIX
9506 DumpShape(obj
, "guard");
9507 fprintf(shapefp
, "for obj_ins %p\n", obj_ins
);
9510 // Finally, emit the shape guard.
9511 guard(true, w
.name(w
.eqiN(w
.ldiObjShape(obj_ins
), shape
), guardName
), exit
);
9512 return RECORD_CONTINUE
;
9516 TraceRecorder::forgetGuardedShapesForObject(JSObject
* obj
)
9518 for (GuardedShapeTable::Enum
e(guardedShapeTable
); !e
.empty(); e
.popFront()) {
9519 if (e
.front().value
== obj
) {
9520 #if defined DEBUG_notme && defined XP_UNIX
9521 DumpShape(entry
->obj
, "forget");
9529 TraceRecorder::forgetGuardedShapes()
9531 #if defined DEBUG_notme && defined XP_UNIX
9532 dumpGuardedShapes("forget-all");
9534 guardedShapeTable
.clear();
9537 JS_REQUIRES_STACK AbortableRecordingStatus
9538 TraceRecorder::test_property_cache(JSObject
* obj
, LIns
* obj_ins
, JSObject
*& obj2
, PCVal
& pcval
)
9540 jsbytecode
* pc
= cx
->regs
->pc
;
9541 JS_ASSERT(*pc
!= JSOP_INITPROP
&& *pc
!= JSOP_INITMETHOD
&&
9542 *pc
!= JSOP_SETNAME
&& *pc
!= JSOP_SETPROP
&& *pc
!= JSOP_SETMETHOD
);
9544 // Mimic the interpreter's special case for dense arrays by skipping up one
9545 // hop along the proto chain when accessing a named (not indexed) property,
9546 // typically to find Array.prototype methods.
9547 JSObject
* aobj
= obj
;
9548 if (obj
->isDenseArray()) {
9549 guardDenseArray(obj_ins
, BRANCH_EXIT
);
9550 aobj
= obj
->getProto();
9551 obj_ins
= w
.ldpObjProto(obj_ins
);
9554 if (!aobj
->isNative())
9555 RETURN_STOP_A("non-native object");
9558 PropertyCacheEntry
* entry
;
9559 JS_PROPERTY_CACHE(cx
).test(cx
, pc
, aobj
, obj2
, entry
, atom
);
9561 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
9562 // FIXME: bug 458271.
9563 jsid id
= ATOM_TO_JSID(atom
);
9565 // The lookup below may change object shapes.
9566 forgetGuardedShapes();
9569 if (JOF_OPMODE(*pc
) == JOF_NAME
) {
9570 JS_ASSERT(aobj
== obj
);
9572 TraceMonitor
&localtm
= *traceMonitor
;
9573 entry
= js_FindPropertyHelper(cx
, id
, true, &obj
, &obj2
, &prop
);
9575 RETURN_ERROR_A("error in js_FindPropertyHelper");
9577 /* js_FindPropertyHelper can reenter the interpreter and kill |this|. */
9578 if (!localtm
.recorder
)
9579 return ARECORD_ABORTED
;
9581 if (entry
== JS_NO_PROP_CACHE_FILL
)
9582 RETURN_STOP_A("cannot cache name");
9584 TraceMonitor
&localtm
= *traceMonitor
;
9585 int protoIndex
= js_LookupPropertyWithFlags(cx
, aobj
, id
,
9590 RETURN_ERROR_A("error in js_LookupPropertyWithFlags");
9592 /* js_LookupPropertyWithFlags can reenter the interpreter and kill |this|. */
9593 if (!localtm
.recorder
)
9594 return ARECORD_ABORTED
;
9597 if (!obj2
->isNative())
9598 RETURN_STOP_A("property found on non-native object");
9599 entry
= JS_PROPERTY_CACHE(cx
).fill(cx
, aobj
, 0, protoIndex
, obj2
,
9602 if (entry
== JS_NO_PROP_CACHE_FILL
)
9609 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
9610 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
9611 // the global it's assigning does not yet exist, create it.
9614 // Use a null pcval to return "no such property" to our caller.
9616 return ARECORD_CONTINUE
;
9620 RETURN_STOP_A("failed to fill property cache");
9623 #ifdef JS_THREADSAFE
9624 // There's a potential race in any JS_THREADSAFE embedding that's nuts
9625 // enough to share mutable objects on the scope or proto chain, but we
9626 // don't care about such insane embeddings. Anyway, the (scope, proto)
9627 // entry->vcap coordinates must reach obj2 from aobj at this point.
9628 JS_ASSERT(cx
->thread
->data
.requestDepth
);
9631 return InjectStatus(guardPropertyCacheHit(obj_ins
, aobj
, obj2
, entry
, pcval
));
9634 JS_REQUIRES_STACK RecordingStatus
9635 TraceRecorder::guardPropertyCacheHit(LIns
* obj_ins
,
9638 PropertyCacheEntry
* entry
,
9641 VMSideExit
* exit
= snapshot(BRANCH_EXIT
);
9643 uint32 vshape
= entry
->vshape();
9645 // Special case for the global object, which may be aliased to get a property value.
9646 // To catch cross-global property accesses we must check against globalObj identity.
9647 // But a JOF_NAME mode opcode needs no guard, as we ensure the global object's shape
9648 // never changes, and name ops can't reach across a global object ('with' aborts).
9649 if (aobj
== globalObj
) {
9650 if (entry
->adding())
9651 RETURN_STOP("adding a property to the global object");
9653 JSOp op
= js_GetOpcode(cx
, cx
->fp()->script(), cx
->regs
->pc
);
9654 if (JOF_OPMODE(op
) != JOF_NAME
) {
9656 w
.name(w
.eqp(obj_ins
, w
.immpObjGC(globalObj
)), "guard_global"),
9660 CHECK_STATUS(guardShape(obj_ins
, aobj
, entry
->kshape
, "guard_kshape", exit
));
9663 if (entry
->adding()) {
9665 w
.ldiRuntimeProtoHazardShape(w
.ldpConstContextField(runtime
));
9668 w
.name(w
.eqiN(vshape_ins
, vshape
), "guard_protoHazardShape"),
9672 // For any hit that goes up the scope and/or proto chains, we will need to
9673 // guard on the shape of the object containing the property.
9674 if (entry
->vcapTag() >= 1) {
9675 JS_ASSERT(obj2
->shape() == vshape
);
9676 if (obj2
== globalObj
)
9677 RETURN_STOP("hitting the global object via a prototype chain");
9680 if (entry
->vcapTag() == 1) {
9681 // Duplicate the special case in PropertyCache::test.
9682 obj2_ins
= w
.ldpObjProto(obj_ins
);
9683 guard(false, w
.eqp0(obj2_ins
), exit
);
9685 obj2_ins
= w
.immpObjGC(obj2
);
9687 CHECK_STATUS(guardShape(obj2_ins
, obj2
, vshape
, "guard_vshape", exit
));
9690 pcval
= entry
->vword
;
9691 return RECORD_CONTINUE
;
9695 TraceRecorder::stobj_set_fslot(LIns
*obj_ins
, unsigned slot
, const Value
&v
, LIns
* v_ins
)
9697 box_value_into(v
, v_ins
, FSlotsAddress(obj_ins
, slot
));
9701 TraceRecorder::stobj_set_dslot(LIns
*obj_ins
, unsigned slot
, LIns
*& slots_ins
,
9702 const Value
&v
, LIns
* v_ins
)
9705 slots_ins
= w
.ldpObjSlots(obj_ins
);
9706 box_value_into(v
, v_ins
, DSlotsAddress(slots_ins
, slot
));
9710 TraceRecorder::stobj_set_slot(JSObject
*obj
, LIns
* obj_ins
, unsigned slot
, LIns
*& slots_ins
,
9711 const Value
&v
, LIns
* v_ins
)
9714 * A shape guard must have already been generated for obj, which will
9715 * ensure that future objects have the same number of fixed slots.
9717 if (!obj
->hasSlotsArray()) {
9718 JS_ASSERT(slot
< obj
->numSlots());
9719 stobj_set_fslot(obj_ins
, slot
, v
, v_ins
);
9721 stobj_set_dslot(obj_ins
, slot
, slots_ins
, v
, v_ins
);
9726 TraceRecorder::unbox_slot(JSObject
*obj
, LIns
*obj_ins
, uint32 slot
, VMSideExit
*exit
)
9728 /* Same guarantee about fixed slots as stobj_set_slot. */
9729 Address addr
= (!obj
->hasSlotsArray())
9730 ? (Address
)FSlotsAddress(obj_ins
, slot
)
9731 : (Address
)DSlotsAddress(w
.ldpObjSlots(obj_ins
), slot
);
9733 return unbox_value(obj
->getSlot(slot
), addr
, exit
);
9736 #if JS_BITS_PER_WORD == 32
9739 TraceRecorder::box_undefined_into(Address addr
)
9741 w
.stiValueTag(w
.nameImmui(JSVAL_TAG_UNDEFINED
), addr
);
9742 w
.stiValuePayload(w
.immi(0), addr
);
9746 TraceRecorder::box_null_into(Address addr
)
9748 w
.stiValueTag(w
.nameImmui(JSVAL_TAG_NULL
), addr
);
9749 w
.stiValuePayload(w
.immi(0), addr
);
9753 TraceRecorder::unbox_number_as_double(Address addr
, LIns
*tag_ins
, VMSideExit
*exit
)
9755 guard(true, w
.leui(tag_ins
, w
.nameImmui(JSVAL_UPPER_INCL_TAG_OF_NUMBER_SET
)), exit
);
9756 LIns
*val_ins
= w
.ldiValuePayload(addr
);
9757 LIns
* args
[] = { val_ins
, tag_ins
};
9758 return w
.call(&js_UnboxDouble_ci
, args
);
9762 TraceRecorder::unbox_non_double_object(Address addr
, LIns
* tag_ins
,
9763 JSValueType type
, VMSideExit
* exit
)
9766 if (type
== JSVAL_TYPE_UNDEFINED
) {
9767 val_ins
= w
.immiUndefined();
9768 } else if (type
== JSVAL_TYPE_NULL
) {
9769 val_ins
= w
.immpNull();
9771 JS_ASSERT(type
== JSVAL_TYPE_INT32
|| type
== JSVAL_TYPE_OBJECT
||
9772 type
== JSVAL_TYPE_STRING
|| type
== JSVAL_TYPE_BOOLEAN
||
9773 type
== JSVAL_TYPE_MAGIC
);
9774 val_ins
= w
.ldiValuePayload(addr
);
9777 guard(true, w
.eqi(tag_ins
, w
.nameImmui(JSVAL_TYPE_TO_TAG(type
))), exit
);
9782 TraceRecorder::unbox_object(Address addr
, LIns
* tag_ins
, JSValueType type
, VMSideExit
* exit
)
9784 JS_ASSERT(type
== JSVAL_TYPE_FUNOBJ
|| type
== JSVAL_TYPE_NONFUNOBJ
);
9785 guard(true, w
.name(w
.eqi(tag_ins
, w
.nameImmui(JSVAL_TAG_OBJECT
)), "isObj"), exit
);
9786 LIns
*payload_ins
= w
.ldiValuePayload(addr
);
9787 if (type
== JSVAL_TYPE_FUNOBJ
)
9788 guardClass(payload_ins
, &js_FunctionClass
, exit
, LOAD_NORMAL
);
9790 guardNotClass(payload_ins
, &js_FunctionClass
, exit
, LOAD_NORMAL
);
9795 TraceRecorder::unbox_value(const Value
&v
, Address addr
, VMSideExit
*exit
, bool force_double
)
9797 LIns
*tag_ins
= w
.ldiValueTag(addr
);
9799 if (v
.isNumber() && force_double
)
9800 return unbox_number_as_double(addr
, tag_ins
, exit
);
9803 guard(true, w
.name(w
.eqi(tag_ins
, w
.nameImmui(JSVAL_TAG_INT32
)), "isInt"), exit
);
9804 return w
.i2d(w
.ldiValuePayload(addr
));
9808 guard(true, w
.name(w
.ltui(tag_ins
, w
.nameImmui(JSVAL_TAG_CLEAR
)), "isDouble"), exit
);
9813 JSValueType type
= v
.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ
: JSVAL_TYPE_NONFUNOBJ
;
9814 return unbox_object(addr
, tag_ins
, type
, exit
);
9817 JSValueType type
= v
.extractNonDoubleObjectTraceType();
9818 return unbox_non_double_object(addr
, tag_ins
, type
, exit
);
9822 TraceRecorder::unbox_any_object(Address addr
, LIns
**obj_ins
, LIns
**is_obj_ins
)
9824 LIns
*tag_ins
= w
.ldiValueTag(addr
);
9825 *is_obj_ins
= w
.eqi(tag_ins
, w
.nameImmui(JSVAL_TAG_OBJECT
));
9826 *obj_ins
= w
.ldiValuePayload(addr
);
9830 TraceRecorder::is_boxed_true(Address addr
)
9832 LIns
*tag_ins
= w
.ldiValueTag(addr
);
9833 LIns
*bool_ins
= w
.eqi(tag_ins
, w
.nameImmui(JSVAL_TAG_BOOLEAN
));
9834 LIns
*payload_ins
= w
.ldiValuePayload(addr
);
9835 return w
.gtiN(w
.andi(bool_ins
, payload_ins
), 0);
9839 TraceRecorder::is_boxed_magic(Address addr
, JSWhyMagic why
)
9841 LIns
*tag_ins
= w
.ldiValueTag(addr
);
9842 return w
.eqi(tag_ins
, w
.nameImmui(JSVAL_TAG_MAGIC
));
9846 TraceRecorder::box_value_into(const Value
&v
, LIns
*v_ins
, Address addr
)
9849 JS_ASSERT(v_ins
->isD());
9850 if (fcallinfo(v_ins
) == &js_UnboxDouble_ci
) {
9851 w
.stiValueTag(v_ins
->callArgN(0), addr
);
9852 w
.stiValuePayload(v_ins
->callArgN(1), addr
);
9853 } else if (IsPromotedInt32(v_ins
)) {
9854 LIns
*int_ins
= w
.demoteToInt32(v_ins
);
9855 w
.stiValueTag(w
.nameImmui(JSVAL_TAG_INT32
), addr
);
9856 w
.stiValuePayload(int_ins
, addr
);
9863 if (v
.isUndefined()) {
9864 box_undefined_into(addr
);
9865 } else if (v
.isNull()) {
9866 box_null_into(addr
);
9868 JSValueTag tag
= v
.isObject() ? JSVAL_TAG_OBJECT
: v
.extractNonDoubleObjectTraceTag();
9869 w
.stiValueTag(w
.nameImmui(tag
), addr
);
9870 w
.stiValuePayload(v_ins
, addr
);
9875 TraceRecorder::box_value_for_native_call(const Value
&v
, LIns
*v_ins
)
9877 return box_value_into_alloc(v
, v_ins
);
9880 #elif JS_BITS_PER_WORD == 64
9883 TraceRecorder::box_undefined_into(Address addr
)
9885 w
.stq(w
.nameImmq(JSVAL_BITS(JSVAL_VOID
)), addr
);
9889 TraceRecorder::non_double_object_value_has_type(LIns
*v_ins
, JSValueType type
)
9891 return w
.eqi(w
.q2i(w
.rshuqN(v_ins
, JSVAL_TAG_SHIFT
)),
9892 w
.nameImmui(JSVAL_TYPE_TO_TAG(type
)));
9896 TraceRecorder::unpack_ptr(LIns
*v_ins
)
9898 return w
.andq(v_ins
, w
.nameImmq(JSVAL_PAYLOAD_MASK
));
9902 TraceRecorder::unbox_number_as_double(LIns
*v_ins
, VMSideExit
*exit
)
9905 w
.ltuq(v_ins
, w
.nameImmq(JSVAL_UPPER_EXCL_SHIFTED_TAG_OF_NUMBER_SET
)),
9907 LIns
* args
[] = { v_ins
};
9908 return w
.call(&js_UnboxDouble_ci
, args
);
9911 inline nanojit::LIns
*
9912 TraceRecorder::unbox_non_double_object(LIns
* v_ins
, JSValueType type
, VMSideExit
* exit
)
9914 JS_ASSERT(type
<= JSVAL_UPPER_INCL_TYPE_OF_VALUE_SET
);
9916 if (type
== JSVAL_TYPE_UNDEFINED
) {
9917 unboxed_ins
= w
.immiUndefined();
9918 } else if (type
== JSVAL_TYPE_NULL
) {
9919 unboxed_ins
= w
.immpNull();
9920 } else if (type
>= JSVAL_LOWER_INCL_TYPE_OF_PTR_PAYLOAD_SET
) {
9921 unboxed_ins
= unpack_ptr(v_ins
);
9923 JS_ASSERT(type
== JSVAL_TYPE_INT32
|| type
== JSVAL_TYPE_BOOLEAN
|| type
== JSVAL_TYPE_MAGIC
);
9924 unboxed_ins
= w
.q2i(v_ins
);
9927 guard(true, non_double_object_value_has_type(v_ins
, type
), exit
);
9932 TraceRecorder::unbox_object(LIns
* v_ins
, JSValueType type
, VMSideExit
* exit
)
9934 JS_STATIC_ASSERT(JSVAL_TYPE_OBJECT
== JSVAL_UPPER_INCL_TYPE_OF_VALUE_SET
);
9935 JS_ASSERT(type
== JSVAL_TYPE_FUNOBJ
|| type
== JSVAL_TYPE_NONFUNOBJ
);
9937 w
.geuq(v_ins
, w
.nameImmq(JSVAL_SHIFTED_TAG_OBJECT
)),
9939 v_ins
= unpack_ptr(v_ins
);
9940 if (type
== JSVAL_TYPE_FUNOBJ
)
9941 guardClass(v_ins
, &js_FunctionClass
, exit
, LOAD_NORMAL
);
9943 guardNotClass(v_ins
, &js_FunctionClass
, exit
, LOAD_NORMAL
);
9948 TraceRecorder::unbox_value(const Value
&v
, Address addr
, VMSideExit
*exit
, bool force_double
)
9950 LIns
*v_ins
= w
.ldq(addr
);
9952 if (v
.isNumber() && force_double
)
9953 return unbox_number_as_double(v_ins
, exit
);
9956 guard(true, non_double_object_value_has_type(v_ins
, JSVAL_TYPE_INT32
), exit
);
9957 return w
.i2d(w
.q2i(v_ins
));
9961 guard(true, w
.leuq(v_ins
, w
.nameImmq(JSVAL_SHIFTED_TAG_MAX_DOUBLE
)), exit
);
9962 return w
.qasd(v_ins
);
9966 JSValueType type
= v
.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ
: JSVAL_TYPE_NONFUNOBJ
;
9967 return unbox_object(v_ins
, type
, exit
);
9970 JSValueType type
= v
.extractNonDoubleObjectTraceType();
9971 return unbox_non_double_object(v_ins
, type
, exit
);
9975 TraceRecorder::unbox_any_object(Address addr
, LIns
**obj_ins
, LIns
**is_obj_ins
)
9977 JS_STATIC_ASSERT(JSVAL_TYPE_OBJECT
== JSVAL_UPPER_INCL_TYPE_OF_VALUE_SET
);
9978 LIns
*v_ins
= w
.ldq(addr
);
9979 *is_obj_ins
= w
.geuq(v_ins
, w
.nameImmq(JSVAL_TYPE_OBJECT
));
9980 *obj_ins
= unpack_ptr(v_ins
);
9984 TraceRecorder::is_boxed_true(Address addr
)
9986 LIns
*v_ins
= w
.ldq(addr
);
9987 return w
.eqq(v_ins
, w
.immq(JSVAL_BITS(JSVAL_TRUE
)));
9991 TraceRecorder::is_boxed_magic(Address addr
, JSWhyMagic why
)
9993 LIns
*v_ins
= w
.ldq(addr
);
9994 return w
.eqq(v_ins
, w
.nameImmq(BUILD_JSVAL(JSVAL_TAG_MAGIC
, why
)));
9998 TraceRecorder::box_value_for_native_call(const Value
&v
, LIns
*v_ins
)
10000 if (v
.isNumber()) {
10001 JS_ASSERT(v_ins
->isD());
10002 if (fcallinfo(v_ins
) == &js_UnboxDouble_ci
)
10003 return v_ins
->callArgN(0);
10004 if (IsPromotedInt32(v_ins
)) {
10005 return w
.orq(w
.ui2uq(w
.demoteToInt32(v_ins
)),
10006 w
.nameImmq(JSVAL_SHIFTED_TAG_INT32
));
10008 return w
.dasq(v_ins
);
10012 return w
.nameImmq(JSVAL_BITS(JSVAL_NULL
));
10013 if (v
.isUndefined())
10014 return w
.nameImmq(JSVAL_BITS(JSVAL_VOID
));
10016 JSValueTag tag
= v
.isObject() ? JSVAL_TAG_OBJECT
: v
.extractNonDoubleObjectTraceTag();
10017 uint64 shiftedTag
= ((uint64
)tag
) << JSVAL_TAG_SHIFT
;
10018 LIns
*shiftedTag_ins
= w
.nameImmq(shiftedTag
);
10020 if (v
.hasPtrPayload())
10021 return w
.orq(v_ins
, shiftedTag_ins
);
10022 return w
.orq(w
.ui2uq(v_ins
), shiftedTag_ins
);
10026 TraceRecorder::box_value_into(const Value
&v
, LIns
*v_ins
, Address addr
)
10028 LIns
*boxed_ins
= box_value_for_native_call(v
, v_ins
);
10029 w
.st(boxed_ins
, addr
);
10032 #endif /* JS_BITS_PER_WORD */
10035 TraceRecorder::box_value_into_alloc(const Value
&v
, LIns
*v_ins
)
10037 LIns
*alloc_ins
= w
.allocp(sizeof(Value
));
10038 box_value_into(v
, v_ins
, AllocSlotsAddress(alloc_ins
));
10043 TraceRecorder::is_string_id(LIns
*id_ins
)
10045 return w
.eqp0(w
.andp(id_ins
, w
.nameImmw(JSID_TYPE_MASK
)));
10049 TraceRecorder::unbox_string_id(LIns
*id_ins
)
10051 JS_STATIC_ASSERT(JSID_TYPE_STRING
== 0);
10056 TraceRecorder::unbox_int_id(LIns
*id_ins
)
10058 return w
.rshiN(w
.p2i(id_ins
), 1);
10061 JS_REQUIRES_STACK RecordingStatus
10062 TraceRecorder::getThis(LIns
*& this_ins
)
10064 JSStackFrame
*fp
= cx
->fp();
10066 if (fp
->isGlobalFrame()) {
10067 // Top-level code. It is an invariant of the interpreter that fp->thisv
10068 // is non-null. Furthermore, we would not be recording if globalObj
10069 // were not at the end of the scope chain, so `this` can only be one
10070 // object, which we can burn into the trace.
10071 JS_ASSERT(!fp
->thisValue().isPrimitive());
10074 JSObject
*obj
= globalObj
->thisObject(cx
);
10076 RETURN_ERROR("thisObject hook failed");
10077 JS_ASSERT(&fp
->thisValue().toObject() == obj
);
10080 this_ins
= w
.immpObjGC(&fp
->thisValue().toObject());
10081 return RECORD_CONTINUE
;
10084 JS_ASSERT(fp
->callee().getGlobal() == globalObj
);
10085 Value
& thisv
= fp
->thisValue();
10087 if (thisv
.isObject() || fp
->fun()->inStrictMode()) {
10089 * fp->thisValue() has already been computed. Since the
10090 * type-specialization of traces distinguishes between computed and
10091 * uncomputed |this|, the same will be true at run time (or we
10092 * won't get this far).
10094 this_ins
= get(&fp
->thisValue());
10095 return RECORD_CONTINUE
;
10098 /* Don't bother tracing calls on wrapped primitive |this| values. */
10099 if (!thisv
.isNullOrUndefined())
10100 RETURN_STOP("wrapping primitive |this|");
10103 * Compute 'this' now. The result is globalObj->thisObject(), which is
10104 * trace-constant. getThisObject writes back to fp->thisValue(), so do
10105 * the same on trace.
10107 if (!fp
->computeThis(cx
))
10108 RETURN_ERROR("computeThis failed");
10110 /* thisv is a reference, so it'll see the newly computed |this|. */
10111 this_ins
= w
.immpObjGC(globalObj
);
10112 set(&thisv
, this_ins
);
10113 return RECORD_CONTINUE
;
10116 JS_REQUIRES_STACK
void
10117 TraceRecorder::guardClassHelper(bool cond
, LIns
* obj_ins
, Class
* clasp
, VMSideExit
* exit
,
10120 LIns
* class_ins
= w
.ldpObjClasp(obj_ins
, loadQual
);
10124 JS_snprintf(namebuf
, sizeof namebuf
, "%s_clasp", clasp
->name
);
10125 LIns
* clasp_ins
= w
.name(w
.immpNonGC(clasp
), namebuf
);
10126 JS_snprintf(namebuf
, sizeof namebuf
, "guard(class is %s)", clasp
->name
);
10127 LIns
* cmp_ins
= w
.name(w
.eqp(class_ins
, clasp_ins
), namebuf
);
10129 LIns
* clasp_ins
= w
.immpNonGC(clasp
);
10130 LIns
* cmp_ins
= w
.eqp(class_ins
, clasp_ins
);
10132 guard(cond
, cmp_ins
, exit
);
10135 JS_REQUIRES_STACK
void
10136 TraceRecorder::guardClass(LIns
* obj_ins
, Class
* clasp
, VMSideExit
* exit
, LoadQual loadQual
)
10138 guardClassHelper(true, obj_ins
, clasp
, exit
, loadQual
);
10141 JS_REQUIRES_STACK
void
10142 TraceRecorder::guardNotClass(LIns
* obj_ins
, Class
* clasp
, VMSideExit
* exit
, LoadQual loadQual
)
10144 guardClassHelper(false, obj_ins
, clasp
, exit
, loadQual
);
10147 JS_REQUIRES_STACK
void
10148 TraceRecorder::guardDenseArray(LIns
* obj_ins
, ExitType exitType
)
10150 guardClass(obj_ins
, &js_ArrayClass
, snapshot(exitType
), LOAD_NORMAL
);
10153 JS_REQUIRES_STACK
void
10154 TraceRecorder::guardDenseArray(LIns
* obj_ins
, VMSideExit
* exit
)
10156 guardClass(obj_ins
, &js_ArrayClass
, exit
, LOAD_NORMAL
);
10159 JS_REQUIRES_STACK
bool
10160 TraceRecorder::guardHasPrototype(JSObject
* obj
, LIns
* obj_ins
,
10161 JSObject
** pobj
, LIns
** pobj_ins
,
10164 *pobj
= obj
->getProto();
10165 *pobj_ins
= w
.ldpObjProto(obj_ins
);
10167 bool cond
= *pobj
== NULL
;
10168 guard(cond
, w
.name(w
.eqp0(*pobj_ins
), "guard(proto-not-null)"), exit
);
10172 JS_REQUIRES_STACK RecordingStatus
10173 TraceRecorder::guardPrototypeHasNoIndexedProperties(JSObject
* obj
, LIns
* obj_ins
, VMSideExit
*exit
)
10176 * Guard that no object along the prototype chain has any indexed
10177 * properties which might become visible through holes in the array.
10179 if (js_PrototypeHasIndexedProperties(cx
, obj
))
10180 return RECORD_STOP
;
10182 JS_ASSERT(obj
->isDenseArray());
10185 * Changing __proto__ on a dense array makes it slow, so we can just bake in
10186 * the current prototype as the first prototype to test. This avoids an
10187 * extra load when running the trace.
10189 obj
= obj
->getProto();
10192 obj_ins
= w
.immpObjGC(obj
);
10195 * Changing __proto__ on a native object changes its shape, and adding
10196 * indexed properties changes shapes too. And non-native objects never pass
10197 * shape guards. So it's enough to just guard on shapes up the proto chain;
10198 * any change to the proto chain length will make us fail a guard before we
10199 * run off the end of the proto chain.
10202 CHECK_STATUS(guardShape(obj_ins
, obj
, obj
->shape(), "guard(shape)", exit
));
10203 obj
= obj
->getProto();
10204 obj_ins
= w
.ldpObjProto(obj_ins
);
10207 return RECORD_CONTINUE
;
10211 * Guard that the object stored in v has the ECMA standard [[DefaultValue]]
10212 * method. Several imacros require this.
10214 JS_REQUIRES_STACK RecordingStatus
10215 TraceRecorder::guardNativeConversion(Value
& v
)
10217 JSObject
* obj
= &v
.toObject();
10218 LIns
* obj_ins
= get(&v
);
10220 ConvertOp convert
= obj
->getClass()->convert
;
10221 if (convert
!= Valueify(JS_ConvertStub
) && convert
!= js_TryValueOf
)
10222 RETURN_STOP("operand has convert hook");
10224 VMSideExit
* exit
= snapshot(BRANCH_EXIT
);
10225 if (obj
->isNative()) {
10226 // The common case. Guard on shape rather than class because it'll
10227 // often be free: we're about to do a shape guard anyway to get the
10228 // .valueOf property of this object, and this guard will be cached.
10229 CHECK_STATUS(guardShape(obj_ins
, obj
, obj
->shape(),
10230 "guardNativeConversion", exit
));
10232 // We could specialize to guard on just JSClass.convert, but a mere
10233 // class guard is simpler and slightly faster.
10234 guardClass(obj_ins
, obj
->getClass(), snapshot(MISMATCH_EXIT
), LOAD_NORMAL
);
10236 return RECORD_CONTINUE
;
10239 JS_REQUIRES_STACK
void
10240 TraceRecorder::clearReturningFrameFromNativeTracker()
10243 * Clear all tracker entries associated with the frame for the same reason
10244 * described in record_EnterFrame. Reuse the generic visitor to avoid
10245 * duplicating logic. The generic visitor stops at 'sp', whereas we need to
10246 * clear up to script->nslots, so finish the job manually.
10248 ClearSlotsVisitor
visitor(nativeFrameTracker
);
10249 VisitStackSlots(visitor
, cx
, 0);
10250 Value
*vp
= cx
->regs
->sp
;
10251 Value
*vpend
= cx
->fp()->slots() + cx
->fp()->script()->nslots
;
10252 for (; vp
< vpend
; ++vp
)
10253 nativeFrameTracker
.set(vp
, NULL
);
10259 BoxArg(TraceRecorder
*tr
, Address addr
)
10260 : tr(tr
), addr(addr
) {}
10263 void operator()(uintN argi
, Value
*src
) {
10264 tr
->box_value_into(*src
, tr
->get(src
), OffsetAddress(addr
, argi
* sizeof(Value
)));
10269 * If we have created an |arguments| object for the frame, we must copy the
10270 * argument values into the object as properties in case it is used after
10271 * this frame returns.
10273 JS_REQUIRES_STACK
void
10274 TraceRecorder::putActivationObjects()
10276 JSStackFrame
*const fp
= cx
->fp();
10277 bool have_args
= fp
->hasArgsObj() && !fp
->argsObj().isStrictArguments();
10278 bool have_call
= fp
->isFunctionFrame() && fp
->fun()->isHeavyweight();
10280 if (!have_args
&& !have_call
)
10283 uintN nformal
= fp
->numFormalArgs();
10284 uintN nactual
= fp
->numActualArgs();
10285 uintN nargs
= have_args
&& have_call
? Max(nformal
, nactual
)
10286 : have_args
? nactual
: nformal
;
10290 args_ins
= w
.allocp(nargs
* sizeof(Value
));
10291 /* Don't copy all the actuals if we are only boxing for the callobj. */
10292 Address addr
= AllocSlotsAddress(args_ins
);
10293 if (nargs
== nactual
)
10294 fp
->forEachCanonicalActualArg(BoxArg(this, addr
));
10296 fp
->forEachFormalArg(BoxArg(this, addr
));
10298 args_ins
= w
.immpNonGC(0);
10302 LIns
* argsobj_ins
= getFrameObjPtr(fp
->addressOfArgs());
10303 LIns
* args
[] = { args_ins
, argsobj_ins
, cx_ins
};
10304 w
.call(&js_PutArgumentsOnTrace_ci
, args
);
10308 int nslots
= fp
->fun()->script()->bindings
.countVars();
10311 slots_ins
= w
.allocp(sizeof(Value
) * nslots
);
10312 for (int i
= 0; i
< nslots
; ++i
) {
10313 box_value_into(fp
->slots()[i
], get(&fp
->slots()[i
]),
10314 AllocSlotsAddress(slots_ins
, i
));
10317 slots_ins
= w
.immpNonGC(0);
10320 LIns
* scopeChain_ins
= getFrameObjPtr(fp
->addressOfScopeChain());
10321 LIns
* args
[] = { slots_ins
, w
.nameImmi(nslots
), args_ins
,
10322 w
.nameImmi(fp
->numFormalArgs()), scopeChain_ins
, cx_ins
};
10323 w
.call(&js_PutCallObjectOnTrace_ci
, args
);
10327 JS_REQUIRES_STACK AbortableRecordingStatus
10328 TraceRecorder::record_EnterFrame()
10330 JSStackFrame
* const fp
= cx
->fp();
10332 if (++callDepth
>= MAX_CALLDEPTH
)
10333 RETURN_STOP_A("exceeded maximum call depth");
10335 debug_only_stmt(JSAutoByteString funBytes
);
10336 debug_only_printf(LC_TMTracer
, "EnterFrame %s, callDepth=%d\n",
10337 cx
->fp()->fun()->atom
?
10338 js_AtomToPrintableString(cx
, cx
->fp()->fun()->atom
, &funBytes
) :
10342 if (LogController
.lcbits
& LC_TMRecorder
) {
10343 js_Disassemble(cx
, cx
->fp()->script(), JS_TRUE
, stdout
);
10344 debug_only_print0(LC_TMTracer
, "----\n");
10347 LIns
* void_ins
= w
.immiUndefined();
10349 // Before we enter this frame, we need to clear out any dangling insns left
10350 // in the tracer. While we also clear when returning from a function, it is
10351 // possible to have the following sequence of stack usage:
10353 // [fp1]***************** push
10355 // [fp1]*****[fp2] call
10356 // [fp1]*****[fp2]*** push
10358 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
10360 // args: carefully copy stack layout
10361 uintN nactual
= fp
->numActualArgs();
10362 uintN nformal
= fp
->numFormalArgs();
10363 if (nactual
< nformal
) {
10364 // Fill in missing with void.
10365 JS_ASSERT(fp
->actualArgs() == fp
->formalArgs());
10366 Value
*beg
= fp
->formalArgs() + nactual
;
10367 Value
*end
= fp
->formalArgsEnd();
10368 for (Value
*vp
= beg
; vp
!= end
; ++vp
) {
10369 nativeFrameTracker
.set(vp
, NULL
);
10372 } else if (nactual
> nformal
) {
10373 // Although the VM clones the formal args to the top of the stack, due
10374 // to the fact that we only track the canonical arguments (in argument
10375 // order), the native stack offset of the arguments doesn't change. The
10376 // only thing that changes is which js::Value* in the tracker maps to
10377 // that slot. Thus, all we need to do here is fixup the trackers, not
10378 // emit any actual copying on trace.
10379 JS_ASSERT(fp
->actualArgs() != fp
->formalArgs());
10380 JS_ASSERT(fp
->hasOverflowArgs());
10381 Value
*srcbeg
= fp
->actualArgs() - 2;
10382 Value
*srcend
= fp
->actualArgs() + nformal
;
10383 Value
*dstbeg
= fp
->formalArgs() - 2;
10384 for (Value
*src
= srcbeg
, *dst
= dstbeg
; src
!= srcend
; ++src
, ++dst
) {
10385 nativeFrameTracker
.set(dst
, NULL
);
10386 tracker
.set(dst
, tracker
.get(src
));
10387 nativeFrameTracker
.set(src
, NULL
);
10388 tracker
.set(src
, NULL
);
10392 // argsObj: clear and set to null
10393 nativeFrameTracker
.set(fp
->addressOfArgs(), NULL
);
10394 setFrameObjPtr(fp
->addressOfArgs(), w
.immpNull());
10396 // scopeChain: clear, initialize before snapshot, set below
10397 nativeFrameTracker
.set(fp
->addressOfScopeChain(), NULL
);
10398 setFrameObjPtr(fp
->addressOfScopeChain(), w
.immpNull());
10400 // nfixed: clear and set to undefined
10401 Value
*vp
= fp
->slots();
10402 Value
*vpstop
= vp
+ fp
->numFixed();
10403 for (; vp
< vpstop
; ++vp
) {
10404 nativeFrameTracker
.set(vp
, NULL
);
10408 // nfixed to nslots: clear
10410 vpstop
= fp
->slots() + fp
->numSlots();
10411 for (; vp
< vpstop
; ++vp
)
10412 nativeFrameTracker
.set(vp
, NULL
);
10414 LIns
* callee_ins
= get(&cx
->fp()->calleeValue());
10415 LIns
* scopeChain_ins
= w
.ldpObjParent(callee_ins
);
10417 // set scopeChain for real
10418 if (cx
->fp()->fun()->isHeavyweight()) {
10419 if (js_IsNamedLambda(cx
->fp()->fun()))
10420 RETURN_STOP_A("can't call named lambda heavyweight on trace");
10422 LIns
* fun_ins
= w
.nameImmpNonGC(cx
->fp()->fun());
10424 LIns
* args
[] = { scopeChain_ins
, callee_ins
, fun_ins
, cx_ins
};
10425 LIns
* call_ins
= w
.call(&js_CreateCallObjectOnTrace_ci
, args
);
10426 guard(false, w
.eqp0(call_ins
), OOM_EXIT
);
10428 setFrameObjPtr(fp
->addressOfScopeChain(), call_ins
);
10430 setFrameObjPtr(fp
->addressOfScopeChain(), scopeChain_ins
);
10433 /* Try inlining one level in case this recursion doesn't go too deep. */
10434 if (fp
->script() == fp
->prev()->script() &&
10435 fp
->prev()->prev() && fp
->prev()->prev()->script() == fp
->script()) {
10436 RETURN_STOP_A("recursion started inlining");
10439 if (fp
->isConstructing()) {
10440 LIns
* args
[] = { callee_ins
, w
.nameImmpNonGC(&js_ObjectClass
), cx_ins
};
10441 LIns
* tv_ins
= w
.call(&js_CreateThisFromTrace_ci
, args
);
10442 guard(false, w
.eqp0(tv_ins
), OOM_EXIT
);
10443 set(&fp
->thisValue(), tv_ins
);
10446 return ARECORD_CONTINUE
;
10449 JS_REQUIRES_STACK AbortableRecordingStatus
10450 TraceRecorder::record_LeaveFrame()
10452 debug_only_stmt(JSStackFrame
*fp
= cx
->fp();)
10454 JS_ASSERT(js_CodeSpec
[js_GetOpcode(cx
, fp
->script(),
10455 cx
->regs
->pc
)].length
== JSOP_CALL_LENGTH
);
10457 if (callDepth
-- <= 0)
10458 RETURN_STOP_A("returned out of a loop we started tracing");
10460 // LeaveFrame gets called after the interpreter popped the frame and
10461 // stored rval, so cx->fp() not cx->fp()->prev, and -1 not 0.
10463 set(&stackval(-1), rval_ins
);
10464 return ARECORD_CONTINUE
;
10467 JS_REQUIRES_STACK AbortableRecordingStatus
10468 TraceRecorder::record_JSOP_PUSH()
10470 stack(0, w
.immiUndefined());
10471 return ARECORD_CONTINUE
;
10474 JS_REQUIRES_STACK AbortableRecordingStatus
10475 TraceRecorder::record_JSOP_POPV()
10477 Value
& rval
= stackval(-1);
10479 // Store it in cx->fp()->rval. NB: Tricky dependencies. cx->fp() is the right
10480 // frame because POPV appears only in global and eval code and we don't
10481 // trace JSOP_EVAL or leaving the frame where tracing started.
10482 LIns
*fp_ins
= entryFrameIns();
10483 box_value_into(rval
, get(&rval
), StackFrameAddress(fp_ins
,
10484 JSStackFrame::offsetOfReturnValue()));
10485 return ARECORD_CONTINUE
;
10488 JS_REQUIRES_STACK AbortableRecordingStatus
10489 TraceRecorder::record_JSOP_ENTERWITH()
10491 return ARECORD_STOP
;
10494 JS_REQUIRES_STACK AbortableRecordingStatus
10495 TraceRecorder::record_JSOP_LEAVEWITH()
10497 return ARECORD_STOP
;
10500 static JSBool JS_FASTCALL
10501 functionProbe(JSContext
*cx
, JSFunction
*fun
, int enter
)
10503 #ifdef MOZ_TRACE_JSCALLS
10504 JSScript
*script
= fun
? FUN_SCRIPT(fun
) : NULL
;
10506 Probes::enterJSFun(cx
, fun
, script
, enter
);
10508 Probes::exitJSFun(cx
, fun
, script
, enter
);
10513 JS_DEFINE_CALLINFO_3(static, BOOL
, functionProbe
, CONTEXT
, FUNCTION
, INT32
, 0, ACCSET_ALL
)
10515 JS_REQUIRES_STACK AbortableRecordingStatus
10516 TraceRecorder::record_JSOP_RETURN()
10518 /* A return from callDepth 0 terminates the current loop, except for recursion. */
10519 if (callDepth
== 0) {
10520 AUDIT(returnLoopExits
);
10524 putActivationObjects();
10526 if (Probes::callTrackingActive(cx
)) {
10527 LIns
* args
[] = { w
.immi(0), w
.nameImmpNonGC(cx
->fp()->fun()), cx_ins
};
10528 LIns
* call_ins
= w
.call(&functionProbe_ci
, args
);
10529 guard(false, w
.eqi0(call_ins
), MISMATCH_EXIT
);
10532 /* If we inlined this function call, make the return value available to the caller code. */
10533 Value
& rval
= stackval(-1);
10534 JSStackFrame
*fp
= cx
->fp();
10535 if (fp
->isConstructing() && rval
.isPrimitive()) {
10536 rval_ins
= get(&fp
->thisValue());
10538 rval_ins
= get(&rval
);
10540 debug_only_stmt(JSAutoByteString funBytes
);
10541 debug_only_printf(LC_TMTracer
,
10542 "returning from %s\n",
10544 js_AtomToPrintableString(cx
, fp
->fun()->atom
, &funBytes
) :
10546 clearReturningFrameFromNativeTracker();
10548 return ARECORD_CONTINUE
;
10551 JS_REQUIRES_STACK AbortableRecordingStatus
10552 TraceRecorder::record_JSOP_GOTO()
10555 * If we hit a break or a continue to an outer loop, end the loop and
10556 * generate an always-taken loop exit guard. For other downward gotos
10557 * (like if/else) continue recording.
10559 jssrcnote
* sn
= js_GetSrcNote(cx
->fp()->script(), cx
->regs
->pc
);
10562 if (SN_TYPE(sn
) == SRC_BREAK
) {
10563 AUDIT(breakLoopExits
);
10568 * Tracing labeled break isn't impossible, but does require potentially
10569 * fixing up the block chain. See bug 616119.
10571 if (SN_TYPE(sn
) == SRC_BREAK2LABEL
|| SN_TYPE(sn
) == SRC_CONT2LABEL
)
10572 RETURN_STOP_A("labeled break");
10574 return ARECORD_CONTINUE
;
10577 JS_REQUIRES_STACK AbortableRecordingStatus
10578 TraceRecorder::record_JSOP_IFEQ()
10580 trackCfgMerges(cx
->regs
->pc
);
10584 JS_REQUIRES_STACK AbortableRecordingStatus
10585 TraceRecorder::record_JSOP_IFNE()
10591 TraceRecorder::newArguments(LIns
* callee_ins
, bool strict
)
10593 LIns
* global_ins
= w
.immpObjGC(globalObj
);
10594 LIns
* argc_ins
= w
.nameImmi(cx
->fp()->numActualArgs());
10596 LIns
* args
[] = { callee_ins
, argc_ins
, global_ins
, cx_ins
};
10597 LIns
* argsobj_ins
= w
.call(&js_NewArgumentsOnTrace_ci
, args
);
10598 guard(false, w
.eqp0(argsobj_ins
), OOM_EXIT
);
10601 LIns
* argsData_ins
= w
.getObjPrivatizedSlot(argsobj_ins
, JSObject::JSSLOT_ARGS_DATA
);
10602 ptrdiff_t slotsOffset
= offsetof(ArgumentsData
, slots
);
10603 cx
->fp()->forEachCanonicalActualArg(BoxArg(this, ArgsSlotOffsetAddress(argsData_ins
,
10607 return argsobj_ins
;
10610 JS_REQUIRES_STACK AbortableRecordingStatus
10611 TraceRecorder::record_JSOP_ARGUMENTS()
10613 JSStackFrame
* const fp
= cx
->fp();
10615 /* In an eval, 'arguments' will be a BINDNAME, which we don't trace. */
10616 JS_ASSERT(!fp
->isEvalFrame());
10618 if (fp
->hasOverriddenArgs())
10619 RETURN_STOP_A("Can't trace |arguments| if |arguments| is assigned to");
10621 LIns
* a_ins
= getFrameObjPtr(fp
->addressOfArgs());
10623 LIns
* callee_ins
= get(&fp
->calleeValue());
10624 bool strict
= fp
->fun()->inStrictMode();
10625 if (a_ins
->isImmP()) {
10626 // |arguments| is set to 0 by EnterFrame on this trace, so call to create it.
10627 args_ins
= newArguments(callee_ins
, strict
);
10629 // Generate LIR to create arguments only if it has not already been created.
10631 LIns
* mem_ins
= w
.allocp(sizeof(JSObject
*));
10633 LIns
* isZero_ins
= w
.eqp0(a_ins
);
10634 if (isZero_ins
->isImmI(0)) {
10635 w
.stAlloc(a_ins
, mem_ins
);
10636 } else if (isZero_ins
->isImmI(1)) {
10637 LIns
* call_ins
= newArguments(callee_ins
, strict
);
10638 w
.stAlloc(call_ins
, mem_ins
);
10640 LIns
* br1
= w
.jtUnoptimizable(isZero_ins
);
10641 w
.stAlloc(a_ins
, mem_ins
);
10642 LIns
* br2
= w
.j(NULL
);
10645 LIns
* call_ins
= newArguments(callee_ins
, strict
);
10646 w
.stAlloc(call_ins
, mem_ins
);
10649 args_ins
= w
.ldpAlloc(mem_ins
);
10652 stack(0, args_ins
);
10653 setFrameObjPtr(fp
->addressOfArgs(), args_ins
);
10654 return ARECORD_CONTINUE
;
10657 JS_REQUIRES_STACK AbortableRecordingStatus
10658 TraceRecorder::record_JSOP_DUP()
10660 stack(0, get(&stackval(-1)));
10661 return ARECORD_CONTINUE
;
10664 JS_REQUIRES_STACK AbortableRecordingStatus
10665 TraceRecorder::record_JSOP_DUP2()
10667 stack(0, get(&stackval(-2)));
10668 stack(1, get(&stackval(-1)));
10669 return ARECORD_CONTINUE
;
10672 JS_REQUIRES_STACK AbortableRecordingStatus
10673 TraceRecorder::record_JSOP_SWAP()
10675 Value
& l
= stackval(-2);
10676 Value
& r
= stackval(-1);
10677 LIns
* l_ins
= get(&l
);
10678 LIns
* r_ins
= get(&r
);
10681 return ARECORD_CONTINUE
;
10684 JS_REQUIRES_STACK AbortableRecordingStatus
10685 TraceRecorder::record_JSOP_PICK()
10687 Value
* sp
= cx
->regs
->sp
;
10688 jsint n
= cx
->regs
->pc
[1];
10689 JS_ASSERT(sp
- (n
+1) >= cx
->fp()->base());
10690 LIns
* top
= get(sp
- (n
+1));
10691 for (jsint i
= 0; i
< n
; ++i
)
10692 set(sp
- (n
+1) + i
, get(sp
- n
+ i
));
10694 return ARECORD_CONTINUE
;
10697 JS_REQUIRES_STACK AbortableRecordingStatus
10698 TraceRecorder::record_JSOP_SETCONST()
10700 return ARECORD_STOP
;
10703 JS_REQUIRES_STACK AbortableRecordingStatus
10704 TraceRecorder::record_JSOP_BITOR()
10706 return InjectStatus(binary(LIR_ori
));
10709 JS_REQUIRES_STACK AbortableRecordingStatus
10710 TraceRecorder::record_JSOP_BITXOR()
10712 return InjectStatus(binary(LIR_xori
));
10715 JS_REQUIRES_STACK AbortableRecordingStatus
10716 TraceRecorder::record_JSOP_BITAND()
10718 return InjectStatus(binary(LIR_andi
));
10721 JS_REQUIRES_STACK AbortableRecordingStatus
10722 TraceRecorder::record_JSOP_EQ()
10724 return equality(false, true);
10727 JS_REQUIRES_STACK AbortableRecordingStatus
10728 TraceRecorder::record_JSOP_NE()
10730 return equality(true, true);
10733 JS_REQUIRES_STACK AbortableRecordingStatus
10734 TraceRecorder::record_JSOP_LT()
10736 return relational(LIR_ltd
, true);
10739 JS_REQUIRES_STACK AbortableRecordingStatus
10740 TraceRecorder::record_JSOP_LE()
10742 return relational(LIR_led
, true);
10745 JS_REQUIRES_STACK AbortableRecordingStatus
10746 TraceRecorder::record_JSOP_GT()
10748 return relational(LIR_gtd
, true);
10751 JS_REQUIRES_STACK AbortableRecordingStatus
10752 TraceRecorder::record_JSOP_GE()
10754 return relational(LIR_ged
, true);
10757 JS_REQUIRES_STACK AbortableRecordingStatus
10758 TraceRecorder::record_JSOP_LSH()
10760 return InjectStatus(binary(LIR_lshi
));
10763 JS_REQUIRES_STACK AbortableRecordingStatus
10764 TraceRecorder::record_JSOP_RSH()
10766 return InjectStatus(binary(LIR_rshi
));
10769 JS_REQUIRES_STACK AbortableRecordingStatus
10770 TraceRecorder::record_JSOP_URSH()
10772 return InjectStatus(binary(LIR_rshui
));
10775 JS_REQUIRES_STACK AbortableRecordingStatus
10776 TraceRecorder::record_JSOP_ADD()
10778 Value
& r
= stackval(-1);
10779 Value
& l
= stackval(-2);
10781 if (!l
.isPrimitive()) {
10782 CHECK_STATUS_A(guardNativeConversion(l
));
10783 if (!r
.isPrimitive()) {
10784 CHECK_STATUS_A(guardNativeConversion(r
));
10785 return InjectStatus(callImacro(add_imacros
.obj_obj
));
10787 return InjectStatus(callImacro(add_imacros
.obj_any
));
10789 if (!r
.isPrimitive()) {
10790 CHECK_STATUS_A(guardNativeConversion(r
));
10791 return InjectStatus(callImacro(add_imacros
.any_obj
));
10794 if (l
.isString() || r
.isString()) {
10795 LIns
* args
[] = { stringify(r
), stringify(l
), cx_ins
};
10796 LIns
* concat
= w
.call(&js_ConcatStrings_ci
, args
);
10797 guard(false, w
.eqp0(concat
), OOM_EXIT
);
10799 return ARECORD_CONTINUE
;
10802 return InjectStatus(binary(LIR_addd
));
10805 JS_REQUIRES_STACK AbortableRecordingStatus
10806 TraceRecorder::record_JSOP_SUB()
10808 return InjectStatus(binary(LIR_subd
));
10811 JS_REQUIRES_STACK AbortableRecordingStatus
10812 TraceRecorder::record_JSOP_MUL()
10814 return InjectStatus(binary(LIR_muld
));
10817 JS_REQUIRES_STACK AbortableRecordingStatus
10818 TraceRecorder::record_JSOP_DIV()
10820 return InjectStatus(binary(LIR_divd
));
10823 JS_REQUIRES_STACK AbortableRecordingStatus
10824 TraceRecorder::record_JSOP_MOD()
10826 return InjectStatus(binary(LIR_modd
));
10829 JS_REQUIRES_STACK AbortableRecordingStatus
10830 TraceRecorder::record_JSOP_NOT()
10832 Value
& v
= stackval(-1);
10833 if (v
.isBoolean() || v
.isUndefined()) {
10834 set(&v
, w
.eqi0(w
.eqiN(get(&v
), 1)));
10835 return ARECORD_CONTINUE
;
10837 if (v
.isNumber()) {
10838 LIns
* v_ins
= get(&v
);
10839 set(&v
, w
.ori(w
.eqd0(v_ins
), w
.eqi0(w
.eqd(v_ins
, v_ins
))));
10840 return ARECORD_CONTINUE
;
10842 if (v
.isObjectOrNull()) {
10843 set(&v
, w
.eqp0(get(&v
)));
10844 return ARECORD_CONTINUE
;
10846 JS_ASSERT(v
.isString());
10847 set(&v
, w
.eqp0(w
.getStringLength(get(&v
))));
10848 return ARECORD_CONTINUE
;
10851 JS_REQUIRES_STACK AbortableRecordingStatus
10852 TraceRecorder::record_JSOP_BITNOT()
10854 return InjectStatus(unary(LIR_noti
));
10857 JS_REQUIRES_STACK AbortableRecordingStatus
10858 TraceRecorder::record_JSOP_NEG()
10860 Value
& v
= stackval(-1);
10862 if (!v
.isPrimitive()) {
10863 CHECK_STATUS_A(guardNativeConversion(v
));
10864 return InjectStatus(callImacro(unary_imacros
.sign
));
10867 if (v
.isNumber()) {
10871 * If we're a promoted integer, we have to watch out for 0s since -0 is
10872 * a double. Only follow this path if we're not an integer that's 0 and
10873 * we're not a double that's zero.
10876 !oracle
->isInstructionUndemotable(cx
->regs
->pc
) &&
10877 IsPromotedInt32(a
) &&
10878 (!v
.isInt32() || v
.toInt32() != 0) &&
10879 (!v
.isDouble() || v
.toDouble() != 0) &&
10880 -v
.toNumber() == (int)-v
.toNumber())
10882 VMSideExit
* exit
= snapshot(OVERFLOW_EXIT
);
10883 a
= guard_xov(LIR_subi
, w
.immi(0), w
.demoteToInt32(a
), exit
);
10884 if (!a
->isImmI() && a
->isop(LIR_subxovi
)) {
10885 guard(false, w
.eqiN(a
, 0), exit
); // make sure we don't lose a -0
10893 return ARECORD_CONTINUE
;
10897 set(&v
, w
.immd(-0.0));
10898 return ARECORD_CONTINUE
;
10901 if (v
.isUndefined()) {
10902 set(&v
, w
.immd(js_NaN
));
10903 return ARECORD_CONTINUE
;
10906 if (v
.isString()) {
10907 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
10908 LIns
* args
[] = { ok_ins
, get(&v
), cx_ins
};
10909 LIns
* num_ins
= w
.call(&js_StringToNumber_ci
, args
);
10911 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
10913 set(&v
, w
.negd(num_ins
));
10914 return ARECORD_CONTINUE
;
10917 JS_ASSERT(v
.isBoolean());
10918 set(&v
, w
.negd(w
.i2d(get(&v
))));
10919 return ARECORD_CONTINUE
;
10922 JS_REQUIRES_STACK AbortableRecordingStatus
10923 TraceRecorder::record_JSOP_POS()
10925 Value
& v
= stackval(-1);
10927 if (!v
.isPrimitive()) {
10928 CHECK_STATUS_A(guardNativeConversion(v
));
10929 return InjectStatus(callImacro(unary_imacros
.sign
));
10933 return ARECORD_CONTINUE
;
10936 set(&v
, w
.immd(0));
10937 return ARECORD_CONTINUE
;
10939 if (v
.isUndefined()) {
10940 set(&v
, w
.immd(js_NaN
));
10941 return ARECORD_CONTINUE
;
10944 if (v
.isString()) {
10945 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
10946 LIns
* args
[] = { ok_ins
, get(&v
), cx_ins
};
10947 LIns
* num_ins
= w
.call(&js_StringToNumber_ci
, args
);
10949 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
10952 return ARECORD_CONTINUE
;
10955 JS_ASSERT(v
.isBoolean());
10956 set(&v
, w
.i2d(get(&v
)));
10957 return ARECORD_CONTINUE
;
10960 JS_REQUIRES_STACK AbortableRecordingStatus
10961 TraceRecorder::record_JSOP_PRIMTOP()
10963 // Either this opcode does nothing or we couldn't have traced here, because
10964 // we'd have thrown an exception -- so do nothing if we actually hit this.
10965 return ARECORD_CONTINUE
;
10968 JS_REQUIRES_STACK AbortableRecordingStatus
10969 TraceRecorder::record_JSOP_OBJTOP()
10971 Value
& v
= stackval(-1);
10972 RETURN_IF_XML_A(v
);
10973 return ARECORD_CONTINUE
;
10977 TraceRecorder::getClassPrototype(JSObject
* ctor
, LIns
*& proto_ins
)
10979 // ctor must be a function created via js_InitClass.
10981 Class
*clasp
= FUN_CLASP(GET_FUNCTION_PRIVATE(cx
, ctor
));
10984 TraceMonitor
&localtm
= JS_TRACE_MONITOR(cx
);
10988 if (!ctor
->getProperty(cx
, ATOM_TO_JSID(cx
->runtime
->atomState
.classPrototypeAtom
), &pval
))
10989 RETURN_ERROR("error getting prototype from constructor");
10991 // ctor.prototype is a permanent data property, so this lookup cannot have
10993 JS_ASSERT(localtm
.recorder
);
10998 ok
= JS_GetPropertyAttributes(cx
, ctor
, js_class_prototype_str
, &attrs
, &found
);
11001 JS_ASSERT((~attrs
& (JSPROP_READONLY
| JSPROP_PERMANENT
)) == 0);
11004 // Since ctor was built by js_InitClass, we can assert (rather than check)
11005 // that pval is usable.
11006 JS_ASSERT(!pval
.isPrimitive());
11007 JSObject
*proto
= &pval
.toObject();
11008 JS_ASSERT_IF(clasp
!= &js_ArrayClass
, proto
->emptyShapes
[0]->getClass() == clasp
);
11010 proto_ins
= w
.immpObjGC(proto
);
11011 return RECORD_CONTINUE
;
11015 TraceRecorder::getClassPrototype(JSProtoKey key
, LIns
*& proto_ins
)
11018 TraceMonitor
&localtm
= JS_TRACE_MONITOR(cx
);
11022 if (!js_GetClassPrototype(cx
, globalObj
, key
, &proto
))
11023 RETURN_ERROR("error in js_GetClassPrototype");
11025 // This should not have reentered.
11026 JS_ASSERT(localtm
.recorder
);
11029 /* Double-check that a native proto has a matching emptyShape. */
11030 if (key
!= JSProto_Array
) {
11031 JS_ASSERT(proto
->isNative());
11032 JS_ASSERT(proto
->emptyShapes
);
11033 EmptyShape
*empty
= proto
->emptyShapes
[0];
11035 JS_ASSERT(JSCLASS_CACHED_PROTO_KEY(empty
->getClass()) == key
);
11039 proto_ins
= w
.immpObjGC(proto
);
11040 return RECORD_CONTINUE
;
11043 #define IGNORE_NATIVE_CALL_COMPLETE_CALLBACK ((JSSpecializedNative*)1)
11046 TraceRecorder::newString(JSObject
* ctor
, uint32 argc
, Value
* argv
, Value
* rval
)
11048 JS_ASSERT(argc
== 1);
11050 if (!argv
[0].isPrimitive()) {
11051 CHECK_STATUS(guardNativeConversion(argv
[0]));
11052 return callImacro(new_imacros
.String
);
11056 CHECK_STATUS(getClassPrototype(ctor
, proto_ins
));
11058 LIns
* args
[] = { stringify(argv
[0]), proto_ins
, cx_ins
};
11059 LIns
* obj_ins
= w
.call(&js_String_tn_ci
, args
);
11060 guard(false, w
.eqp0(obj_ins
), OOM_EXIT
);
11062 set(rval
, obj_ins
);
11063 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11064 return RECORD_CONTINUE
;
11068 TraceRecorder::newArray(JSObject
* ctor
, uint32 argc
, Value
* argv
, Value
* rval
)
11071 CHECK_STATUS(getClassPrototype(ctor
, proto_ins
));
11075 LIns
*args
[] = { proto_ins
, cx_ins
};
11076 arr_ins
= w
.call(&js::NewDenseEmptyArray_ci
, args
);
11077 guard(false, w
.eqp0(arr_ins
), OOM_EXIT
);
11079 } else if (argc
== 1 && argv
[0].isNumber()) {
11080 /* Abort on RangeError if the double doesn't fit in a uint. */
11082 CHECK_STATUS(makeNumberUint32(get(argv
), &len_ins
));
11083 LIns
*args
[] = { proto_ins
, len_ins
, cx_ins
};
11084 arr_ins
= w
.call(&js::NewDenseUnallocatedArray_ci
, args
);
11085 guard(false, w
.eqp0(arr_ins
), OOM_EXIT
);
11088 LIns
*args
[] = { proto_ins
, w
.nameImmi(argc
), cx_ins
};
11089 arr_ins
= w
.call(&js::NewDenseAllocatedArray_ci
, args
);
11090 guard(false, w
.eqp0(arr_ins
), OOM_EXIT
);
11092 // arr->slots[i] = box_jsval(vp[i]); for i in 0..argc
11093 LIns
*slots_ins
= NULL
;
11094 for (uint32 i
= 0; i
< argc
&& !outOfMemory(); i
++) {
11095 stobj_set_dslot(arr_ins
, i
, slots_ins
, argv
[i
], get(&argv
[i
]));
11099 set(rval
, arr_ins
);
11100 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11101 return RECORD_CONTINUE
;
11104 JS_REQUIRES_STACK
void
11105 TraceRecorder::propagateFailureToBuiltinStatus(LIns
* ok_ins
, LIns
*& status_ins
)
11108 * Check the boolean return value (ok_ins) of a native JSNative,
11109 * JSFastNative, or JSPropertyOp hook for failure. On failure, set the
11110 * BUILTIN_ERROR bit of cx->builtinStatus.
11112 * If the return value (ok_ins) is true, status' == status. Otherwise
11113 * status' = status | BUILTIN_ERROR. We calculate (rval&1)^1, which is 1
11114 * if rval is JS_FALSE (error), and then shift that by 1, which is the log2
11115 * of BUILTIN_ERROR.
11117 JS_STATIC_ASSERT(((JS_TRUE
& 1) ^ 1) << 1 == 0);
11118 JS_STATIC_ASSERT(((JS_FALSE
& 1) ^ 1) << 1 == BUILTIN_ERROR
);
11119 status_ins
= w
.ori(status_ins
, w
.lshiN(w
.xoriN(w
.andiN(ok_ins
, 1), 1), 1));
11120 w
.stStateField(status_ins
, builtinStatus
);
11123 JS_REQUIRES_STACK
void
11124 TraceRecorder::emitNativePropertyOp(const Shape
* shape
, LIns
* obj_ins
,
11125 bool setflag
, LIns
* addr_boxed_val_ins
)
11127 JS_ASSERT(addr_boxed_val_ins
->isop(LIR_allocp
));
11128 JS_ASSERT(setflag
? !shape
->hasSetterValue() : !shape
->hasGetterValue());
11129 JS_ASSERT(setflag
? !shape
->hasDefaultSetter() : !shape
->hasDefaultGetterOrIsMethod());
11131 enterDeepBailCall();
11133 w
.stStateField(addr_boxed_val_ins
, nativeVp
);
11134 w
.stStateField(w
.immi(1), nativeVpLen
);
11136 CallInfo
* ci
= new (traceAlloc()) CallInfo();
11137 ci
->_address
= uintptr_t(setflag
? shape
->setterOp() : shape
->getterOp());
11138 ci
->_typesig
= CallInfo::typeSig4(ARGTYPE_I
, ARGTYPE_P
, ARGTYPE_P
, ARGTYPE_P
, ARGTYPE_P
);
11140 ci
->_storeAccSet
= ACCSET_STORE_ANY
;
11141 ci
->_abi
= ABI_CDECL
;
11143 ci
->_name
= "JSPropertyOp";
11145 LIns
* args
[] = { addr_boxed_val_ins
, w
.immpIdGC(SHAPE_USERID(shape
)), obj_ins
, cx_ins
};
11146 LIns
* ok_ins
= w
.call(ci
, args
);
11148 // Cleanup. Immediately clear nativeVp before we might deep bail.
11149 w
.stStateField(w
.immpNull(), nativeVp
);
11150 leaveDeepBailCall();
11152 // Guard that the call succeeded and builtinStatus is still 0.
11153 // If the native op succeeds but we deep-bail here, the result value is
11154 // lost! Therefore this can only be used for setters of shared properties.
11155 // In that case we ignore the result value anyway.
11156 LIns
* status_ins
= w
.ldiStateField(builtinStatus
);
11157 propagateFailureToBuiltinStatus(ok_ins
, status_ins
);
11158 guard(true, w
.eqi0(status_ins
), STATUS_EXIT
);
11161 JS_REQUIRES_STACK RecordingStatus
11162 TraceRecorder::emitNativeCall(JSSpecializedNative
* sn
, uintN argc
, LIns
* args
[], bool rooted
)
11164 if (JSTN_ERRTYPE(sn
) == FAIL_STATUS
) {
11165 // This needs to capture the pre-call state of the stack. So do not set
11166 // pendingSpecializedNative before taking this snapshot.
11167 JS_ASSERT(!pendingSpecializedNative
);
11169 // Take snapshot for DeepBail and store it in tm->bailExit.
11170 enterDeepBailCall();
11173 LIns
* res_ins
= w
.call(sn
->builtin
, args
);
11175 // Immediately unroot the vp as soon we return since we might deep bail next.
11177 w
.stStateField(w
.immpNull(), nativeVp
);
11179 rval_ins
= res_ins
;
11180 switch (JSTN_ERRTYPE(sn
)) {
11182 guard(false, w
.eqp0(res_ins
), OOM_EXIT
);
11185 res_ins
= w
.i2d(res_ins
);
11186 guard(false, w
.ltdN(res_ins
, 0), OOM_EXIT
);
11189 guard(false, w
.eqiN(res_ins
, JS_NEITHER
), OOM_EXIT
);
11194 set(&stackval(0 - (2 + argc
)), res_ins
);
11197 * The return value will be processed by NativeCallComplete since
11198 * we have to know the actual return value type for calls that return
11201 pendingSpecializedNative
= sn
;
11203 return RECORD_CONTINUE
;
11207 * Check whether we have a specialized implementation for this native
11210 JS_REQUIRES_STACK RecordingStatus
11211 TraceRecorder::callSpecializedNative(JSNativeTraceInfo
*trcinfo
, uintN argc
,
11214 JSStackFrame
* const fp
= cx
->fp();
11215 jsbytecode
*pc
= cx
->regs
->pc
;
11217 Value
& fval
= stackval(0 - (2 + argc
));
11218 Value
& tval
= stackval(0 - (1 + argc
));
11220 LIns
* this_ins
= get(&tval
);
11222 LIns
* args
[nanojit::MAXARGS
];
11223 JSSpecializedNative
*sn
= trcinfo
->specializations
;
11226 if (((sn
->flags
& JSTN_CONSTRUCTOR
) != 0) != constructing
)
11229 uintN knownargc
= strlen(sn
->argtypes
);
11230 if (argc
!= knownargc
)
11233 intN prefixc
= strlen(sn
->prefix
);
11234 JS_ASSERT(prefixc
<= 3);
11235 LIns
** argp
= &args
[argc
+ prefixc
- 1];
11239 memset(args
, 0xCD, sizeof(args
));
11243 for (i
= prefixc
; i
--; ) {
11244 argtype
= sn
->prefix
[i
];
11245 if (argtype
== 'C') {
11247 } else if (argtype
== 'T') { /* this, as an object */
11248 if (tval
.isPrimitive())
11249 goto next_specialization
;
11251 } else if (argtype
== 'S') { /* this, as a string */
11252 if (!tval
.isString())
11253 goto next_specialization
;
11255 } else if (argtype
== 'f') {
11256 *argp
= w
.immpObjGC(&fval
.toObject());
11257 } else if (argtype
== 'p') {
11258 CHECK_STATUS(getClassPrototype(&fval
.toObject(), *argp
));
11259 } else if (argtype
== 'R') {
11260 *argp
= w
.nameImmpNonGC(cx
->runtime
);
11261 } else if (argtype
== 'P') {
11262 // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
11263 // JSOP_GETELEM imacro (bug 476559).
11264 if ((*pc
== JSOP_CALL
) &&
11265 fp
->hasImacropc() && *fp
->imacropc() == JSOP_GETELEM
)
11266 *argp
= w
.nameImmpNonGC(fp
->imacropc());
11268 *argp
= w
.nameImmpNonGC(pc
);
11269 } else if (argtype
== 'D') { /* this, as a number */
11270 if (!tval
.isNumber())
11271 goto next_specialization
;
11273 } else if (argtype
== 'M') {
11274 MathCache
*mathCache
= GetMathCache(cx
);
11276 return RECORD_ERROR
;
11277 *argp
= w
.nameImmpNonGC(mathCache
);
11279 JS_NOT_REACHED("unknown prefix arg type");
11284 for (i
= knownargc
; i
--; ) {
11285 Value
& arg
= stackval(0 - (i
+ 1));
11288 argtype
= sn
->argtypes
[i
];
11289 if (argtype
== 'd' || argtype
== 'i') {
11290 if (!arg
.isNumber())
11291 goto next_specialization
;
11292 if (argtype
== 'i')
11293 *argp
= d2i(*argp
);
11294 } else if (argtype
== 'o') {
11295 if (arg
.isPrimitive())
11296 goto next_specialization
;
11297 } else if (argtype
== 's') {
11298 if (!arg
.isString())
11299 goto next_specialization
;
11300 } else if (argtype
== 'r') {
11301 if (!VALUE_IS_REGEXP(cx
, arg
))
11302 goto next_specialization
;
11303 } else if (argtype
== 'f') {
11304 if (!IsFunctionObject(arg
))
11305 goto next_specialization
;
11306 } else if (argtype
== 'v') {
11307 *argp
= box_value_for_native_call(arg
, *argp
);
11309 goto next_specialization
;
11314 JS_ASSERT(args
[0] != (LIns
*)0xcdcdcdcd);
11316 return emitNativeCall(sn
, argc
, args
, false);
11318 next_specialization
:;
11319 } while ((sn
++)->flags
& JSTN_MORE
);
11321 return RECORD_STOP
;
11324 static JSBool FASTCALL
11325 ceilReturningInt(jsdouble x
, int32
*out
)
11327 jsdouble r
= js_math_ceil_impl(x
);
11328 return JSDOUBLE_IS_INT32(r
, out
);
11331 static JSBool FASTCALL
11332 floorReturningInt(jsdouble x
, int32
*out
)
11334 jsdouble r
= js_math_floor_impl(x
);
11335 return JSDOUBLE_IS_INT32(r
, out
);
11338 static JSBool FASTCALL
11339 roundReturningInt(jsdouble x
, int32
*out
)
11341 jsdouble r
= js_math_round_impl(x
);
11342 return JSDOUBLE_IS_INT32(r
, out
);
11346 * These functions store into their second argument, so they need to
11347 * be annotated accordingly. To be future-proof, we use ACCSET_STORE_ANY
11348 * so that new callers don't have to remember to update the annotation.
11350 JS_DEFINE_CALLINFO_2(static, BOOL
, ceilReturningInt
, DOUBLE
, INT32PTR
, 0, ACCSET_STORE_ANY
)
11351 JS_DEFINE_CALLINFO_2(static, BOOL
, floorReturningInt
, DOUBLE
, INT32PTR
, 0, ACCSET_STORE_ANY
)
11352 JS_DEFINE_CALLINFO_2(static, BOOL
, roundReturningInt
, DOUBLE
, INT32PTR
, 0, ACCSET_STORE_ANY
)
11354 JS_REQUIRES_STACK RecordingStatus
11355 TraceRecorder::callFloatReturningInt(uintN argc
, const nanojit::CallInfo
*ci
)
11357 Value
& arg
= stackval(-1);
11358 LIns
* resptr_ins
= w
.allocp(sizeof(int32
));
11359 LIns
* args
[] = { resptr_ins
, get(&arg
) };
11360 LIns
* fits_ins
= w
.call(ci
, args
);
11362 guard(false, w
.eqi0(fits_ins
), OVERFLOW_EXIT
);
11364 LIns
* res_ins
= w
.ldiAlloc(resptr_ins
);
11366 set(&stackval(0 - (2 + argc
)), w
.i2d(res_ins
));
11368 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11370 return RECORD_CONTINUE
;
11373 JS_REQUIRES_STACK RecordingStatus
11374 TraceRecorder::callNative(uintN argc
, JSOp mode
)
11378 JS_ASSERT(mode
== JSOP_CALL
|| mode
== JSOP_NEW
|| mode
== JSOP_FUNAPPLY
||
11379 mode
== JSOP_FUNCALL
);
11381 Value
* vp
= &stackval(0 - (2 + argc
));
11382 JSObject
* funobj
= &vp
[0].toObject();
11383 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, funobj
);
11384 Native native
= fun
->u
.n
.native
;
11388 if (vp
[2].isNumber() && mode
== JSOP_CALL
) {
11389 if (native
== js_math_ceil
|| native
== js_math_floor
|| native
== js_math_round
) {
11390 LIns
* a
= get(&vp
[2]);
11392 if (IsPromotedInt32OrUint32(a
)) {
11394 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11395 return RECORD_CONTINUE
;
11397 if (native
== js_math_floor
) {
11398 if (floorReturningInt(vp
[2].toNumber(), &result
))
11399 return callFloatReturningInt(argc
, &floorReturningInt_ci
);
11400 } else if (native
== js_math_ceil
) {
11401 if (ceilReturningInt(vp
[2].toNumber(), &result
))
11402 return callFloatReturningInt(argc
, &ceilReturningInt_ci
);
11403 } else if (native
== js_math_round
) {
11404 if (roundReturningInt(vp
[2].toNumber(), &result
))
11405 return callFloatReturningInt(argc
, &roundReturningInt_ci
);
11407 } else if (native
== js_math_abs
) {
11408 LIns
* a
= get(&vp
[2]);
11409 if (IsPromotedInt32(a
) && vp
[2].toNumber() != INT_MIN
) {
11410 a
= w
.demoteToInt32(a
);
11411 /* abs(INT_MIN) can't be done using integers; exit if we see it. */
11412 LIns
* intMin_ins
= w
.name(w
.immi(0x80000000), "INT_MIN");
11413 LIns
* isIntMin_ins
= w
.name(w
.eqi(a
, intMin_ins
), "isIntMin");
11414 guard(false, isIntMin_ins
, MISMATCH_EXIT
);
11415 LIns
* neg_ins
= w
.negi(a
);
11416 LIns
* isNeg_ins
= w
.name(w
.ltiN(a
, 0), "isNeg");
11417 LIns
* abs_ins
= w
.name(w
.cmovi(isNeg_ins
, neg_ins
, a
), "abs");
11418 set(&vp
[0], w
.i2d(abs_ins
));
11419 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11420 return RECORD_CONTINUE
;
11423 if (vp
[1].isString()) {
11424 JSString
*str
= vp
[1].toString();
11425 if (native
== js_str_charAt
) {
11426 jsdouble i
= vp
[2].toNumber();
11427 if (JSDOUBLE_IS_NaN(i
))
11429 if (i
< 0 || i
>= str
->length())
11430 RETURN_STOP("charAt out of bounds");
11431 LIns
* str_ins
= get(&vp
[1]);
11432 LIns
* idx_ins
= get(&vp
[2]);
11434 CHECK_STATUS(getCharAt(str
, str_ins
, idx_ins
, mode
, &char_ins
));
11435 set(&vp
[0], char_ins
);
11436 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11437 return RECORD_CONTINUE
;
11438 } else if (native
== js_str_charCodeAt
) {
11439 jsdouble i
= vp
[2].toNumber();
11440 if (JSDOUBLE_IS_NaN(i
))
11442 if (i
< 0 || i
>= str
->length())
11443 RETURN_STOP("charCodeAt out of bounds");
11444 LIns
* str_ins
= get(&vp
[1]);
11445 LIns
* idx_ins
= get(&vp
[2]);
11446 LIns
* charCode_ins
;
11447 CHECK_STATUS(getCharCodeAt(str
, str_ins
, idx_ins
, &charCode_ins
));
11448 set(&vp
[0], charCode_ins
);
11449 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11450 return RECORD_CONTINUE
;
11453 } else if (vp
[2].isString() && mode
== JSOP_CALL
) {
11454 if (native
== js_regexp_exec
) {
11455 jsbytecode
*pc
= cx
->regs
->pc
;
11457 * If we see any of these sequences, the result is unused:
11459 * - call / trace / pop
11461 * If we see any of these sequences, the result is only tested for nullness:
11463 * - call / trace / ifeq
11464 * - call / not / ifeq
11465 * - call / trace / not / ifeq
11467 * In either case, we replace the call to RegExp.exec() on the
11468 * stack with a call to RegExp.test() because "r.exec(s) !=
11469 * null" is equivalent to "r.test(s)". This avoids building
11470 * the result array, which can be expensive. This requires
11471 * that RegExp.prototype.test() hasn't been changed; we check this.
11473 if (pc
[0] == JSOP_CALL
) {
11474 if ((pc
[JSOP_CALL_LENGTH
] == JSOP_POP
) ||
11475 (pc
[JSOP_CALL_LENGTH
] == JSOP_TRACE
&&
11476 pc
[JSOP_CALL_LENGTH
+ JSOP_TRACE_LENGTH
] == JSOP_POP
) ||
11477 (pc
[JSOP_CALL_LENGTH
] == JSOP_IFEQ
) ||
11478 (pc
[JSOP_CALL_LENGTH
] == JSOP_TRACE
&&
11479 pc
[JSOP_CALL_LENGTH
+ JSOP_TRACE_LENGTH
] == JSOP_IFEQ
) ||
11480 (pc
[JSOP_CALL_LENGTH
] == JSOP_NOT
&&
11481 pc
[JSOP_CALL_LENGTH
+ JSOP_NOT_LENGTH
] == JSOP_IFEQ
) ||
11482 (pc
[JSOP_CALL_LENGTH
] == JSOP_TRACE
&&
11483 pc
[JSOP_CALL_LENGTH
+ JSOP_TRACE_LENGTH
] == JSOP_NOT
&&
11484 pc
[JSOP_CALL_LENGTH
+ JSOP_TRACE_LENGTH
+ JSOP_NOT_LENGTH
] == JSOP_IFEQ
))
11487 jsid id
= ATOM_TO_JSID(cx
->runtime
->atomState
.testAtom
);
11488 /* Get RegExp.prototype.test() and check it hasn't been changed. */
11489 if (js_GetClassPrototype(cx
, NULL
, JSProto_RegExp
, &proto
)) {
11490 if (JSObject
*tmp
= HasNativeMethod(proto
, id
, js_regexp_test
)) {
11491 vp
[0] = ObjectValue(*tmp
);
11493 fun
= tmp
->getFunctionPrivate();
11494 native
= js_regexp_test
;
11504 if (vp
[2].isNumber() && vp
[3].isNumber() && mode
== JSOP_CALL
&&
11505 (native
== js_math_min
|| native
== js_math_max
)) {
11506 LIns
* a
= get(&vp
[2]);
11507 LIns
* b
= get(&vp
[3]);
11508 if (IsPromotedInt32(a
) && IsPromotedInt32(b
)) {
11509 a
= w
.demoteToInt32(a
);
11510 b
= w
.demoteToInt32(b
);
11511 LIns
* cmp
= (native
== js_math_min
) ? w
.lti(a
, b
) : w
.gti(a
, b
);
11512 set(&vp
[0], w
.i2d(w
.cmovi(cmp
, a
, b
)));
11513 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11514 return RECORD_CONTINUE
;
11516 if (IsPromotedUint32(a
) && IsPromotedUint32(b
)) {
11517 a
= w
.demoteToUint32(a
);
11518 b
= w
.demoteToUint32(b
);
11519 LIns
* cmp
= (native
== js_math_min
) ? w
.ltui(a
, b
) : w
.gtui(a
, b
);
11520 set(&vp
[0], w
.ui2d(w
.cmovi(cmp
, a
, b
)));
11521 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11522 return RECORD_CONTINUE
;
11528 if (fun
->flags
& JSFUN_TRCINFO
) {
11529 JSNativeTraceInfo
*trcinfo
= FUN_TRCINFO(fun
);
11530 JS_ASSERT(trcinfo
&& fun
->u
.n
.native
== trcinfo
->native
);
11532 /* Try to call a type specialized version of the native. */
11533 if (trcinfo
->specializations
) {
11534 RecordingStatus status
= callSpecializedNative(trcinfo
, argc
, mode
== JSOP_NEW
);
11535 if (status
!= RECORD_STOP
)
11540 if (native
== js_fun_apply
|| native
== js_fun_call
)
11541 RETURN_STOP("trying to call native apply or call");
11543 // Allocate the vp vector and emit code to root it.
11544 uintN vplen
= 2 + argc
;
11545 LIns
* invokevp_ins
= w
.allocp(vplen
* sizeof(Value
));
11547 // vp[0] is the callee.
11548 box_value_into(vp
[0], w
.immpObjGC(funobj
), AllocSlotsAddress(invokevp_ins
));
11550 // Calculate |this|.
11552 if (mode
== JSOP_NEW
) {
11553 Class
* clasp
= fun
->u
.n
.clasp
;
11554 JS_ASSERT(clasp
!= &js_SlowArrayClass
);
11556 clasp
= &js_ObjectClass
;
11557 JS_ASSERT(((jsuword
) clasp
& 3) == 0);
11559 // Abort on |new Function|. js_CreateThis would allocate a regular-
11560 // sized JSObject, not a Function-sized one. (The Function ctor would
11561 // deep-bail anyway but let's not go there.)
11562 if (clasp
== &js_FunctionClass
)
11563 RETURN_STOP("new Function");
11565 if (!clasp
->isNative())
11566 RETURN_STOP("new with non-native ops");
11568 if (fun
->isConstructor()) {
11569 vp
[1].setMagicWithObjectOrNullPayload(NULL
);
11570 newobj_ins
= w
.immpMagicNull();
11572 /* Treat this as a regular call, the constructor will behave correctly. */
11575 args
[0] = w
.immpObjGC(funobj
);
11576 args
[1] = w
.immpNonGC(clasp
);
11578 newobj_ins
= w
.call(&js_CreateThisFromTrace_ci
, args
);
11579 guard(false, w
.eqp0(newobj_ins
), OOM_EXIT
);
11582 * emitNativeCall may take a snapshot below. To avoid having a type
11583 * mismatch (e.g., where get(&vp[1]) is an object and vp[1] is
11584 * null), we make sure vp[1] is some object. The actual object
11585 * doesn't matter; JSOP_NEW and InvokeConstructor both overwrite
11586 * vp[1] without observing its value.
11588 * N.B. tracing specializes for functions, so pick a non-function.
11590 vp
[1].setObject(*globalObj
);
11592 this_ins
= newobj_ins
;
11594 this_ins
= get(&vp
[1]);
11596 set(&vp
[1], this_ins
);
11597 box_value_into(vp
[1], this_ins
, AllocSlotsAddress(invokevp_ins
, 1));
11600 for (uintN n
= 2; n
< 2 + argc
; n
++) {
11601 box_value_into(vp
[n
], get(&vp
[n
]), AllocSlotsAddress(invokevp_ins
, n
));
11602 // For a very long argument list we might run out of LIR space, so
11603 // check inside the loop.
11605 RETURN_STOP("out of memory in argument list");
11608 // Populate extra slots, including the return value slot for a slow native.
11609 if (2 + argc
< vplen
) {
11610 for (uintN n
= 2 + argc
; n
< vplen
; n
++) {
11611 box_undefined_into(AllocSlotsAddress(invokevp_ins
, n
));
11613 RETURN_STOP("out of memory in extra slots");
11617 // Set up arguments for the JSNative or JSFastNative.
11618 if (mode
== JSOP_NEW
)
11619 RETURN_STOP("untraceable fast native constructor");
11620 native_rval_ins
= invokevp_ins
;
11621 args
[0] = invokevp_ins
;
11622 args
[1] = w
.immi(argc
);
11624 uint32 typesig
= CallInfo::typeSig3(ARGTYPE_I
, ARGTYPE_P
, ARGTYPE_I
, ARGTYPE_P
);
11626 // Generate CallInfo and a JSSpecializedNative structure on the fly.
11627 // Do not use JSTN_UNBOX_AFTER for mode JSOP_NEW because
11628 // record_NativeCallComplete unboxes the result specially.
11630 CallInfo
* ci
= new (traceAlloc()) CallInfo();
11631 ci
->_address
= uintptr_t(fun
->u
.n
.native
);
11633 ci
->_storeAccSet
= ACCSET_STORE_ANY
;
11634 ci
->_abi
= ABI_CDECL
;
11635 ci
->_typesig
= typesig
;
11637 ci
->_name
= js_anonymous_str
;
11639 JSAutoByteString
bytes(cx
, ATOM_TO_STRING(fun
->atom
));
11641 size_t n
= strlen(bytes
.ptr()) + 1;
11642 char *buffer
= new (traceAlloc()) char[n
];
11643 memcpy(buffer
, bytes
.ptr(), n
);
11644 ci
->_name
= buffer
;
11649 // Generate a JSSpecializedNative structure on the fly.
11650 generatedSpecializedNative
.builtin
= ci
;
11651 generatedSpecializedNative
.flags
= FAIL_STATUS
| ((mode
== JSOP_NEW
)
11653 : JSTN_UNBOX_AFTER
);
11654 generatedSpecializedNative
.prefix
= NULL
;
11655 generatedSpecializedNative
.argtypes
= NULL
;
11657 // We only have to ensure that the values we wrote into the stack buffer
11658 // are rooted if we actually make it to the call, so only set nativeVp and
11659 // nativeVpLen immediately before emitting the call code. This way we avoid
11660 // leaving trace with a bogus nativeVp because we fall off trace while unboxing
11661 // values into the stack buffer.
11662 w
.stStateField(w
.nameImmi(vplen
), nativeVpLen
);
11663 w
.stStateField(invokevp_ins
, nativeVp
);
11665 // argc is the original argc here. It is used to calculate where to place
11666 // the return value.
11667 return emitNativeCall(&generatedSpecializedNative
, argc
, args
, true);
11670 JS_REQUIRES_STACK RecordingStatus
11671 TraceRecorder::functionCall(uintN argc
, JSOp mode
)
11673 Value
& fval
= stackval(0 - (2 + argc
));
11674 JS_ASSERT(&fval
>= cx
->fp()->base());
11676 if (!IsFunctionObject(fval
))
11677 RETURN_STOP("callee is not a function");
11679 Value
& tval
= stackval(0 - (1 + argc
));
11682 * If callee is not constant, it's a shapeless call and we have to guard
11683 * explicitly that we will get this callee again at runtime.
11685 if (!get(&fval
)->isImmP())
11686 CHECK_STATUS(guardCallee(fval
));
11689 * Require that the callee be a function object, to avoid guarding on its
11690 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
11691 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
11692 * the property cache and guard on the object (this) in which the callee
11693 * was found. So it's sufficient to test here that the particular function
11694 * is interpreted, not guard on that condition.
11696 * Bytecode sequences that push shapeless callees must guard on the callee
11697 * class being Function and the function being interpreted.
11699 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, &fval
.toObject());
11701 if (Probes::callTrackingActive(cx
)) {
11702 JSScript
*script
= FUN_SCRIPT(fun
);
11703 if (!script
|| !script
->isEmpty()) {
11704 LIns
* args
[] = { w
.immi(1), w
.nameImmpNonGC(fun
), cx_ins
};
11705 LIns
* call_ins
= w
.call(&functionProbe_ci
, args
);
11706 guard(false, w
.eqi0(call_ins
), MISMATCH_EXIT
);
11710 if (FUN_INTERPRETED(fun
))
11711 return interpretedFunctionCall(fval
, fun
, argc
, mode
== JSOP_NEW
);
11713 Native native
= fun
->maybeNative();
11714 Value
* argv
= &tval
+ 1;
11715 if (native
== js_Array
)
11716 return newArray(&fval
.toObject(), argc
, argv
, &fval
);
11717 if (native
== js_String
&& argc
== 1) {
11718 if (mode
== JSOP_NEW
)
11719 return newString(&fval
.toObject(), 1, argv
, &fval
);
11720 if (!argv
[0].isPrimitive()) {
11721 CHECK_STATUS(guardNativeConversion(argv
[0]));
11722 return callImacro(call_imacros
.String
);
11724 set(&fval
, stringify(argv
[0]));
11725 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
11726 return RECORD_CONTINUE
;
11729 RecordingStatus rs
= callNative(argc
, mode
);
11730 if (Probes::callTrackingActive(cx
)) {
11731 LIns
* args
[] = { w
.immi(0), w
.nameImmpNonGC(fun
), cx_ins
};
11732 LIns
* call_ins
= w
.call(&functionProbe_ci
, args
);
11733 guard(false, w
.eqi0(call_ins
), MISMATCH_EXIT
);
11738 JS_REQUIRES_STACK AbortableRecordingStatus
11739 TraceRecorder::record_JSOP_NEW()
11741 uintN argc
= GET_ARGC(cx
->regs
->pc
);
11742 cx
->assertValidStackDepth(argc
+ 2);
11743 return InjectStatus(functionCall(argc
, JSOP_NEW
));
11746 JS_REQUIRES_STACK AbortableRecordingStatus
11747 TraceRecorder::record_JSOP_DELNAME()
11749 return ARECORD_STOP
;
11752 static JSBool JS_FASTCALL
11753 DeleteIntKey(JSContext
* cx
, JSObject
* obj
, int32 i
, JSBool strict
)
11755 LeaveTraceIfGlobalObject(cx
, obj
);
11756 LeaveTraceIfArgumentsObject(cx
, obj
);
11757 Value v
= BooleanValue(false);
11759 if (INT_FITS_IN_JSID(i
)) {
11760 id
= INT_TO_JSID(i
);
11762 if (!js_ValueToStringId(cx
, Int32Value(i
), &id
)) {
11763 SetBuiltinError(cx
);
11768 if (!obj
->deleteProperty(cx
, id
, &v
, strict
))
11769 SetBuiltinError(cx
);
11770 return v
.toBoolean();
11772 JS_DEFINE_CALLINFO_4(extern, BOOL_FAIL
, DeleteIntKey
, CONTEXT
, OBJECT
, INT32
, BOOL
,
11773 0, ACCSET_STORE_ANY
)
11775 static JSBool JS_FASTCALL
11776 DeleteStrKey(JSContext
* cx
, JSObject
* obj
, JSString
* str
, JSBool strict
)
11778 LeaveTraceIfGlobalObject(cx
, obj
);
11779 LeaveTraceIfArgumentsObject(cx
, obj
);
11780 Value v
= BooleanValue(false);
11784 * NB: JSOP_DELPROP does not need js_ValueToStringId to atomize, but (see
11785 * jsatominlines.h) that helper early-returns if the computed property name
11786 * string is already atomized, and we are *not* on a perf-critical path!
11788 if (!js_ValueToStringId(cx
, StringValue(str
), &id
) || !obj
->deleteProperty(cx
, id
, &v
, strict
))
11789 SetBuiltinError(cx
);
11790 return v
.toBoolean();
11792 JS_DEFINE_CALLINFO_4(extern, BOOL_FAIL
, DeleteStrKey
, CONTEXT
, OBJECT
, STRING
, BOOL
,
11793 0, ACCSET_STORE_ANY
)
11795 JS_REQUIRES_STACK AbortableRecordingStatus
11796 TraceRecorder::record_JSOP_DELPROP()
11798 Value
& lval
= stackval(-1);
11799 if (lval
.isPrimitive())
11800 RETURN_STOP_A("JSOP_DELPROP on primitive base expression");
11801 if (&lval
.toObject() == globalObj
)
11802 RETURN_STOP_A("JSOP_DELPROP on global property");
11804 JSAtom
* atom
= atoms
[GET_INDEX(cx
->regs
->pc
)];
11806 enterDeepBailCall();
11807 LIns
* args
[] = { strictModeCode_ins
, w
.immpAtomGC(atom
), get(&lval
), cx_ins
};
11808 LIns
* rval_ins
= w
.call(&DeleteStrKey_ci
, args
);
11810 LIns
* status_ins
= w
.ldiStateField(builtinStatus
);
11811 pendingGuardCondition
= w
.eqi0(status_ins
);
11812 leaveDeepBailCall();
11814 set(&lval
, rval_ins
);
11815 return ARECORD_CONTINUE
;
11818 JS_REQUIRES_STACK AbortableRecordingStatus
11819 TraceRecorder::record_JSOP_DELELEM()
11821 Value
& lval
= stackval(-2);
11822 if (lval
.isPrimitive())
11823 RETURN_STOP_A("JSOP_DELELEM on primitive base expression");
11824 if (&lval
.toObject() == globalObj
)
11825 RETURN_STOP_A("JSOP_DELELEM on global property");
11826 if (lval
.toObject().isArguments())
11827 RETURN_STOP_A("JSOP_DELELEM on the |arguments| object");
11829 Value
& idx
= stackval(-1);
11832 enterDeepBailCall();
11833 if (hasInt32Repr(idx
)) {
11835 CHECK_STATUS_A(makeNumberInt32(get(&idx
), &num_ins
));
11836 LIns
* args
[] = { strictModeCode_ins
, num_ins
, get(&lval
), cx_ins
};
11837 rval_ins
= w
.call(&DeleteIntKey_ci
, args
);
11838 } else if (idx
.isString()) {
11839 LIns
* args
[] = { strictModeCode_ins
, get(&idx
), get(&lval
), cx_ins
};
11840 rval_ins
= w
.call(&DeleteStrKey_ci
, args
);
11842 RETURN_STOP_A("JSOP_DELELEM on non-int, non-string index");
11845 LIns
* status_ins
= w
.ldiStateField(builtinStatus
);
11846 pendingGuardCondition
= w
.eqi0(status_ins
);
11847 leaveDeepBailCall();
11849 set(&lval
, rval_ins
);
11850 return ARECORD_CONTINUE
;
11853 JS_REQUIRES_STACK AbortableRecordingStatus
11854 TraceRecorder::record_JSOP_TYPEOF()
11856 Value
& r
= stackval(-1);
11858 if (r
.isString()) {
11859 type
= w
.immpAtomGC(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_STRING
]);
11860 } else if (r
.isNumber()) {
11861 type
= w
.immpAtomGC(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_NUMBER
]);
11862 } else if (r
.isUndefined()) {
11863 type
= w
.immpAtomGC(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_VOID
]);
11864 } else if (r
.isBoolean()) {
11865 type
= w
.immpAtomGC(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_BOOLEAN
]);
11866 } else if (r
.isNull()) {
11867 type
= w
.immpAtomGC(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_OBJECT
]);
11869 if (r
.toObject().isFunction()) {
11870 type
= w
.immpAtomGC(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_FUNCTION
]);
11872 LIns
* args
[] = { get(&r
), cx_ins
};
11873 type
= w
.call(&js_TypeOfObject_ci
, args
);
11877 return ARECORD_CONTINUE
;
11880 JS_REQUIRES_STACK AbortableRecordingStatus
11881 TraceRecorder::record_JSOP_VOID()
11883 stack(-1, w
.immiUndefined());
11884 return ARECORD_CONTINUE
;
11887 JS_REQUIRES_STACK AbortableRecordingStatus
11888 TraceRecorder::record_JSOP_INCNAME()
11893 JS_REQUIRES_STACK AbortableRecordingStatus
11894 TraceRecorder::record_JSOP_INCPROP()
11899 JS_REQUIRES_STACK AbortableRecordingStatus
11900 TraceRecorder::record_JSOP_INCELEM()
11902 return InjectStatus(incElem(1));
11905 JS_REQUIRES_STACK AbortableRecordingStatus
11906 TraceRecorder::record_JSOP_DECNAME()
11908 return incName(-1);
11911 JS_REQUIRES_STACK AbortableRecordingStatus
11912 TraceRecorder::record_JSOP_DECPROP()
11914 return incProp(-1);
11917 JS_REQUIRES_STACK AbortableRecordingStatus
11918 TraceRecorder::record_JSOP_DECELEM()
11920 return InjectStatus(incElem(-1));
11923 JS_REQUIRES_STACK AbortableRecordingStatus
11924 TraceRecorder::incName(jsint incr
, bool pre
)
11931 CHECK_STATUS_A(name(vp
, v_ins
, nr
));
11932 Value v
= nr
.tracked
? *vp
: nr
.v
;
11934 CHECK_STATUS_A(incHelper(v
, v_ins
, v_after
, v_ins_after
, incr
));
11935 LIns
* v_ins_result
= pre
? v_ins_after
: v_ins
;
11937 set(vp
, v_ins_after
);
11938 stack(0, v_ins_result
);
11939 return ARECORD_CONTINUE
;
11942 if (!nr
.obj
->isCall())
11943 RETURN_STOP_A("incName on unsupported object class");
11945 CHECK_STATUS_A(setCallProp(nr
.obj
, nr
.obj_ins
, nr
.shape
, v_ins_after
, v_after
));
11946 stack(0, v_ins_result
);
11947 return ARECORD_CONTINUE
;
11950 JS_REQUIRES_STACK AbortableRecordingStatus
11951 TraceRecorder::record_JSOP_NAMEINC()
11953 return incName(1, false);
11956 JS_REQUIRES_STACK AbortableRecordingStatus
11957 TraceRecorder::record_JSOP_PROPINC()
11959 return incProp(1, false);
11962 // XXX consolidate with record_JSOP_GETELEM code...
11963 JS_REQUIRES_STACK AbortableRecordingStatus
11964 TraceRecorder::record_JSOP_ELEMINC()
11966 return InjectStatus(incElem(1, false));
11969 JS_REQUIRES_STACK AbortableRecordingStatus
11970 TraceRecorder::record_JSOP_NAMEDEC()
11972 return incName(-1, false);
11975 JS_REQUIRES_STACK AbortableRecordingStatus
11976 TraceRecorder::record_JSOP_PROPDEC()
11978 return incProp(-1, false);
11981 JS_REQUIRES_STACK AbortableRecordingStatus
11982 TraceRecorder::record_JSOP_ELEMDEC()
11984 return InjectStatus(incElem(-1, false));
11987 JS_REQUIRES_STACK AbortableRecordingStatus
11988 TraceRecorder::record_JSOP_GETPROP()
11990 return getProp(stackval(-1));
11994 * If possible, lookup obj[id] without calling any resolve hooks or touching
11995 * any non-native objects, store the results in *pobjp and *shapep (NULL if no
11996 * such property exists), and return true.
11998 * If a safe lookup is not possible, return false; *pobjp and *shapep are
12002 SafeLookup(JSContext
*cx
, JSObject
* obj
, jsid id
, JSObject
** pobjp
, const Shape
** shapep
)
12005 // Avoid non-native lookupProperty hooks.
12006 if (obj
->getOps()->lookupProperty
)
12009 if (const Shape
*shape
= obj
->nativeLookup(id
)) {
12015 // Avoid resolve hooks.
12016 if (obj
->getClass()->resolve
!= JS_ResolveStub
)
12018 } while ((obj
= obj
->getProto()) != NULL
);
12025 * Lookup the property for the SETPROP/SETNAME/SETMETHOD instruction at pc.
12026 * Emit guards to ensure that the result at run time is the same.
12028 JS_REQUIRES_STACK RecordingStatus
12029 TraceRecorder::lookupForSetPropertyOp(JSObject
* obj
, LIns
* obj_ins
, jsid id
,
12030 bool* safep
, JSObject
** pobjp
, const Shape
** shapep
)
12032 // We could consult the property cache here, but the contract for
12033 // PropertyCache::testForSet is intricate enough that it's a lot less code
12034 // to do a SafeLookup.
12035 *safep
= SafeLookup(cx
, obj
, id
, pobjp
, shapep
);
12037 return RECORD_CONTINUE
;
12039 VMSideExit
*exit
= snapshot(BRANCH_EXIT
);
12041 CHECK_STATUS(guardShape(obj_ins
, obj
, obj
->shape(), "guard_kshape", exit
));
12042 if (obj
!= *pobjp
&& *pobjp
!= globalObj
) {
12043 CHECK_STATUS(guardShape(w
.immpObjGC(*pobjp
), *pobjp
, (*pobjp
)->shape(),
12044 "guard_vshape", exit
));
12048 if (obj
!= globalObj
)
12049 CHECK_STATUS(guardShape(obj_ins
, obj
, obj
->shape(), "guard_proto_chain", exit
));
12050 obj
= obj
->getProto();
12053 obj_ins
= w
.immpObjGC(obj
);
12056 return RECORD_CONTINUE
;
12059 static JSBool FASTCALL
12060 MethodWriteBarrier(JSContext
* cx
, JSObject
* obj
, uint32 slot
, const Value
* v
)
12062 bool ok
= obj
->methodWriteBarrier(cx
, slot
, *v
);
12063 JS_ASSERT(WasBuiltinSuccessful(cx
));
12066 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, MethodWriteBarrier
, CONTEXT
, OBJECT
, UINT32
, CVALUEPTR
,
12067 0, ACCSET_STORE_ANY
)
12069 /* Emit a specialized, inlined copy of js_NativeSet. */
12070 JS_REQUIRES_STACK RecordingStatus
12071 TraceRecorder::nativeSet(JSObject
* obj
, LIns
* obj_ins
, const Shape
* shape
,
12072 const Value
&v
, LIns
* v_ins
)
12074 uint32 slot
= shape
->slot
;
12075 JS_ASSERT((slot
!= SHAPE_INVALID_SLOT
) == shape
->hasSlot());
12076 JS_ASSERT_IF(shape
->hasSlot(), obj
->nativeContains(*shape
));
12079 * We do not trace assignment to properties that have both a non-default
12080 * setter and a slot, for several reasons.
12082 * First, that would require sampling rt->propertyRemovals before and after
12083 * (see js_NativeSet), and even more code to handle the case where the two
12084 * samples differ. A mere guard is not enough, because you can't just bail
12085 * off trace in the middle of a property assignment without storing the
12086 * value and making the stack right.
12088 * If obj is the global object, there are two additional problems. We would
12089 * have to emit still more code to store the result in the object (not the
12090 * native global frame) if the setter returned successfully after
12091 * deep-bailing. And we would have to cope if the run-time type of the
12092 * setter's return value differed from the record-time type of v, in which
12093 * case unboxing would fail and, having called a native setter, we could
12094 * not just retry the instruction in the interpreter.
12096 * If obj is branded, we would have a similar problem recovering from a
12097 * failed call to MethodWriteBarrier.
12099 if (!shape
->hasDefaultSetter() && slot
!= SHAPE_INVALID_SLOT
)
12100 RETURN_STOP("can't trace set of property with setter and slot");
12102 // These two cases are strict-mode errors and can't be traced.
12103 if (shape
->hasGetterValue() && shape
->hasDefaultSetter())
12104 RETURN_STOP("can't set a property that has only a getter");
12105 if (shape
->isDataDescriptor() && !shape
->writable())
12106 RETURN_STOP("can't assign to readonly property");
12108 // Call the setter, if any.
12109 if (!shape
->hasDefaultSetter()) {
12110 if (shape
->hasSetterValue())
12111 RETURN_STOP("can't trace JavaScript function setter yet");
12112 emitNativePropertyOp(shape
, obj_ins
, true, box_value_into_alloc(v
, v_ins
));
12115 if (slot
!= SHAPE_INVALID_SLOT
) {
12116 if (obj
->brandedOrHasMethodBarrier()) {
12117 if (obj
== globalObj
) {
12118 // Because the trace is type-specialized to the global object's
12119 // slots, no run-time check is needed. Avoid recording a global
12120 // shape change, though.
12121 JS_ASSERT(obj
->nativeContains(*shape
));
12122 if (IsFunctionObject(obj
->getSlot(slot
)))
12123 RETURN_STOP("can't trace set of function-valued global property");
12125 // Setting a function-valued property might need to rebrand the
12126 // object. Call the method write barrier. Note that even if the
12127 // property is not function-valued now, it might be on trace.
12128 enterDeepBailCall();
12129 LIns
* args
[] = {box_value_into_alloc(v
, v_ins
), w
.immi(slot
), obj_ins
, cx_ins
};
12130 LIns
* ok_ins
= w
.call(&MethodWriteBarrier_ci
, args
);
12131 guard(false, w
.eqi0(ok_ins
), OOM_EXIT
);
12132 leaveDeepBailCall();
12136 // Store the value.
12137 if (obj
== globalObj
) {
12138 if (!lazilyImportGlobalSlot(slot
))
12139 RETURN_STOP("lazy import of global slot failed");
12140 set(&obj
->getSlotRef(slot
), v_ins
);
12142 LIns
* slots_ins
= NULL
;
12143 stobj_set_slot(obj
, obj_ins
, slot
, slots_ins
, v
, v_ins
);
12147 return RECORD_CONTINUE
;
12150 JS_REQUIRES_STACK RecordingStatus
12151 TraceRecorder::addDataProperty(JSObject
* obj
)
12153 if (!obj
->isExtensible())
12154 RETURN_STOP("assignment adds property to non-extensible object");
12156 // If obj is the global, the global shape is about to change. Note also
12157 // that since we do not record this case, SETNAME and SETPROP are identical
12158 // as far as the tracer is concerned. (js_CheckUndeclaredVarAssignment
12159 // distinguishes the two, in the interpreter.)
12160 if (obj
== globalObj
)
12161 RETURN_STOP("set new property of global object"); // global shape change
12163 // js_AddProperty does not call the addProperty hook.
12164 Class
* clasp
= obj
->getClass();
12165 if (clasp
->addProperty
!= Valueify(JS_PropertyStub
))
12166 RETURN_STOP("set new property of object with addProperty hook");
12168 // See comment in TR::nativeSet about why we do not support setting a
12169 // property that has both a setter and a slot.
12170 if (clasp
->setProperty
!= Valueify(JS_PropertyStub
))
12171 RETURN_STOP("set new property with setter and slot");
12174 addPropShapeBefore
= obj
->lastProperty();
12176 return RECORD_CONTINUE
;
12179 JS_REQUIRES_STACK AbortableRecordingStatus
12180 TraceRecorder::record_AddProperty(JSObject
*obj
)
12182 Value
& objv
= stackval(-2);
12183 JS_ASSERT(&objv
.toObject() == obj
);
12184 LIns
* obj_ins
= get(&objv
);
12185 Value
& v
= stackval(-1);
12186 LIns
* v_ins
= get(&v
);
12187 const Shape
* shape
= obj
->lastProperty();
12190 JS_ASSERT(addPropShapeBefore
);
12191 if (obj
->inDictionaryMode())
12192 JS_ASSERT(shape
->previous()->matches(addPropShapeBefore
));
12194 JS_ASSERT(shape
->previous() == addPropShapeBefore
);
12195 JS_ASSERT(shape
->isDataDescriptor());
12196 JS_ASSERT(shape
->hasDefaultSetter());
12197 addPropShapeBefore
= NULL
;
12200 if (obj
->inDictionaryMode())
12201 RETURN_STOP_A("assignment adds property to dictionary"); // FIXME: bug 625900
12203 // On trace, call js_Add{,Atom}Property to do the dirty work.
12204 LIns
* args
[] = { w
.immpShapeGC(shape
), obj_ins
, cx_ins
};
12205 jsbytecode op
= *cx
->regs
->pc
;
12206 bool isDefinitelyAtom
= (op
== JSOP_SETPROP
);
12207 const CallInfo
*ci
= isDefinitelyAtom
? &js_AddAtomProperty_ci
: &js_AddProperty_ci
;
12208 LIns
* ok_ins
= w
.call(ci
, args
);
12209 guard(false, w
.eqi0(ok_ins
), OOM_EXIT
);
12211 // Box the value and store it in the new slot.
12212 CHECK_STATUS_A(InjectStatus(nativeSet(obj
, obj_ins
, shape
, v
, v_ins
)));
12214 // Finish off a SET instruction by moving sp[-1] to sp[-2].
12215 if (op
== JSOP_SETPROP
|| op
== JSOP_SETNAME
|| op
== JSOP_SETMETHOD
)
12217 return ARECORD_CONTINUE
;
12220 JS_REQUIRES_STACK RecordingStatus
12221 TraceRecorder::setUpwardTrackedVar(Value
* stackVp
, const Value
&v
, LIns
* v_ins
)
12223 JSValueType stackT
= determineSlotType(stackVp
);
12224 JSValueType otherT
= getCoercedType(v
);
12226 bool promote
= true;
12228 if (stackT
!= otherT
) {
12229 if (stackT
== JSVAL_TYPE_DOUBLE
&& otherT
== JSVAL_TYPE_INT32
&& IsPromotedInt32(v_ins
))
12232 RETURN_STOP("can't trace this upvar mutation");
12235 set(stackVp
, v_ins
, promote
);
12237 return RECORD_CONTINUE
;
12240 JS_REQUIRES_STACK RecordingStatus
12241 TraceRecorder::setCallProp(JSObject
*callobj
, LIns
*callobj_ins
, const Shape
*shape
,
12242 LIns
*v_ins
, const Value
&v
)
12244 // Set variables in on-trace-stack call objects by updating the tracker.
12245 JSStackFrame
*fp
= frameIfInRange(callobj
);
12247 if (shape
->setterOp() == SetCallArg
) {
12248 JS_ASSERT(shape
->hasShortID());
12249 uintN slot
= uint16(shape
->shortid
);
12250 Value
*vp2
= &fp
->formalArg(slot
);
12251 CHECK_STATUS(setUpwardTrackedVar(vp2
, v
, v_ins
));
12252 return RECORD_CONTINUE
;
12254 if (shape
->setterOp() == SetCallVar
) {
12255 JS_ASSERT(shape
->hasShortID());
12256 uintN slot
= uint16(shape
->shortid
);
12257 Value
*vp2
= &fp
->slots()[slot
];
12258 CHECK_STATUS(setUpwardTrackedVar(vp2
, v
, v_ins
));
12259 return RECORD_CONTINUE
;
12261 RETURN_STOP("can't trace special CallClass setter");
12264 if (!callobj
->getPrivate()) {
12265 // Because the parent guard in guardCallee ensures this Call object
12266 // will be the same object now and on trace, and because once a Call
12267 // object loses its frame it never regains one, on trace we will also
12268 // have a null private in the Call object. So all we need to do is
12269 // write the value to the Call object's slot.
12270 intN slot
= uint16(shape
->shortid
);
12271 if (shape
->setterOp() == SetCallArg
) {
12272 JS_ASSERT(slot
< ArgClosureTraits::slot_count(callobj
));
12273 slot
+= ArgClosureTraits::slot_offset(callobj
);
12274 } else if (shape
->setterOp() == SetCallVar
) {
12275 JS_ASSERT(slot
< VarClosureTraits::slot_count(callobj
));
12276 slot
+= VarClosureTraits::slot_offset(callobj
);
12278 RETURN_STOP("can't trace special CallClass setter");
12281 // Now assert that the shortid get we did above was ok. Have to do it
12282 // after the RETURN_STOP above, since in that case we may in fact not
12283 // have a valid shortid; but we don't use it in that case anyway.
12284 JS_ASSERT(shape
->hasShortID());
12286 LIns
* slots_ins
= NULL
;
12287 stobj_set_dslot(callobj_ins
, slot
, slots_ins
, v
, v_ins
);
12288 return RECORD_CONTINUE
;
12291 // This is the hard case: we have a JSStackFrame private, but it's not in
12292 // range. During trace execution we may or may not have a JSStackFrame
12293 // anymore. Call the standard builtins, which handle that situation.
12295 // Set variables in off-trace-stack call objects by calling standard builtins.
12296 const CallInfo
* ci
= NULL
;
12297 if (shape
->setterOp() == SetCallArg
)
12298 ci
= &js_SetCallArg_ci
;
12299 else if (shape
->setterOp() == SetCallVar
)
12300 ci
= &js_SetCallVar_ci
;
12302 RETURN_STOP("can't trace special CallClass setter");
12304 // Even though the frame is out of range, later we might be called as an
12305 // inner trace such that the target variable is defined in the outer trace
12306 // entry frame. For simplicity, we just fall off trace.
12308 w
.eqp(entryFrameIns(), w
.ldpObjPrivate(callobj_ins
)),
12312 box_value_for_native_call(v
, v_ins
),
12313 w
.nameImmw(JSID_BITS(SHAPE_USERID(shape
))),
12317 LIns
* call_ins
= w
.call(ci
, args
);
12318 guard(false, w
.name(w
.eqi0(call_ins
), "guard(set upvar)"), STATUS_EXIT
);
12320 return RECORD_CONTINUE
;
12324 * Emit a specialized, inlined copy of js_SetPropertyHelper for the current
12325 * instruction. On success, *deferredp is true if a call to record_AddProperty
12328 JS_REQUIRES_STACK RecordingStatus
12329 TraceRecorder::setProperty(JSObject
* obj
, LIns
* obj_ins
, const Value
&v
, LIns
* v_ins
,
12332 *deferredp
= false;
12334 JSAtom
*atom
= atoms
[GET_INDEX(cx
->regs
->pc
)];
12335 jsid id
= ATOM_TO_JSID(atom
);
12337 if (obj
->getOps()->setProperty
)
12338 RETURN_STOP("non-native object"); // FIXME: bug 625900
12342 const Shape
* shape
;
12343 CHECK_STATUS(lookupForSetPropertyOp(obj
, obj_ins
, id
, &safe
, &pobj
, &shape
));
12345 RETURN_STOP("setprop: lookup fail"); // FIXME: bug 625900
12347 // Handle Call objects specially. The Call objects we create on trace are
12348 // not fully populated until we leave trace. Calling the setter on such an
12349 // object wouldn't work.
12351 return setCallProp(obj
, obj_ins
, shape
, v_ins
, v
);
12353 // Handle setting a property that is not found on obj or anywhere on its
12354 // the prototype chain.
12357 return addDataProperty(obj
);
12360 // Check whether we can assign to/over the existing property.
12361 if (shape
->isAccessorDescriptor()) {
12362 if (shape
->hasDefaultSetter())
12363 RETURN_STOP("setting accessor property with no setter");
12364 } else if (!shape
->writable()) {
12365 RETURN_STOP("setting readonly data property");
12368 // Handle setting an existing own property.
12370 if (*cx
->regs
->pc
== JSOP_SETMETHOD
) {
12371 if (shape
->isMethod() && &shape
->methodObject() == &v
.toObject())
12372 return RECORD_CONTINUE
;
12373 RETURN_STOP("setmethod: property exists");
12375 return nativeSet(obj
, obj_ins
, shape
, v
, v_ins
);
12378 // If shape is an inherited non-SHARED property, we will add a new,
12379 // shadowing data property.
12380 if (shape
->hasSlot()) {
12381 // Avoid being tripped up by legacy special case for shortids, where
12382 // the new shadowing data property inherits the setter.
12383 if (shape
->hasShortID() && !shape
->hasDefaultSetter())
12384 RETURN_STOP("shadowing assignment with shortid");
12386 return addDataProperty(obj
);
12389 // Handle setting an inherited SHARED property.
12390 // If it has the default setter, the assignment is a no-op.
12391 if (shape
->hasDefaultSetter() && !shape
->hasGetterValue())
12392 return RECORD_CONTINUE
;
12393 return nativeSet(obj
, obj_ins
, shape
, v
, v_ins
);
12396 /* Record a JSOP_SET{PROP,NAME,METHOD} instruction. */
12397 JS_REQUIRES_STACK RecordingStatus
12398 TraceRecorder::recordSetPropertyOp()
12400 Value
& l
= stackval(-2);
12402 RETURN_STOP("set property of primitive");
12403 JSObject
* obj
= &l
.toObject();
12404 LIns
* obj_ins
= get(&l
);
12406 Value
& r
= stackval(-1);
12407 LIns
* r_ins
= get(&r
);
12410 CHECK_STATUS(setProperty(obj
, obj_ins
, r
, r_ins
, &deferred
));
12412 // Finish off a SET instruction by moving sp[-1] to sp[-2]. But if
12413 // record_AddProperty is going be called, we're not done with sp[-2] yet,
12414 // so delay this move until the end of record_AddProperty.
12417 return RECORD_CONTINUE
;
12420 JS_REQUIRES_STACK AbortableRecordingStatus
12421 TraceRecorder::record_JSOP_SETPROP()
12423 return InjectStatus(recordSetPropertyOp());
12426 JS_REQUIRES_STACK AbortableRecordingStatus
12427 TraceRecorder::record_JSOP_SETMETHOD()
12429 return InjectStatus(recordSetPropertyOp());
12432 JS_REQUIRES_STACK AbortableRecordingStatus
12433 TraceRecorder::record_JSOP_SETNAME()
12435 return InjectStatus(recordSetPropertyOp());
12438 JS_REQUIRES_STACK RecordingStatus
12439 TraceRecorder::recordInitPropertyOp(jsbytecode op
)
12441 Value
& l
= stackval(-2);
12442 JSObject
* obj
= &l
.toObject();
12443 LIns
* obj_ins
= get(&l
);
12444 JS_ASSERT(obj
->getClass() == &js_ObjectClass
);
12446 Value
& v
= stackval(-1);
12447 LIns
* v_ins
= get(&v
);
12449 JSAtom
* atom
= atoms
[GET_INDEX(cx
->regs
->pc
)];
12450 jsid id
= js_CheckForStringIndex(ATOM_TO_JSID(atom
));
12452 // If obj already has this property (because JSOP_NEWOBJECT already set its
12453 // shape or because the id appears more than once in the initializer), just
12454 // set it. The existing property can't be an accessor property: we wouldn't
12455 // get here, as JSOP_SETTER can't be recorded.
12456 if (const Shape
* shape
= obj
->nativeLookup(id
)) {
12457 // Don't assign a bare (non-cloned) function to an ordinary or method
12458 // property. The opposite case, assigning some other value to a method,
12459 // is OK. nativeSet emits code that trips the write barrier.
12460 if (op
== JSOP_INITMETHOD
)
12461 RETURN_STOP("initmethod: property exists");
12462 JS_ASSERT(shape
->isDataDescriptor());
12463 JS_ASSERT(shape
->hasSlot());
12464 JS_ASSERT(shape
->hasDefaultSetter());
12465 return nativeSet(obj
, obj_ins
, shape
, v
, v_ins
);
12468 // Duplicate the interpreter's special treatment of __proto__. Unlike the
12469 // SET opcodes, JSOP_INIT{PROP,METHOD} do not write to the stack.
12470 if (atom
== cx
->runtime
->atomState
.protoAtom
) {
12472 return setProperty(obj
, obj_ins
, v
, v_ins
, &deferred
);
12475 // Define a new property.
12476 return addDataProperty(obj
);
12479 JS_REQUIRES_STACK AbortableRecordingStatus
12480 TraceRecorder::record_JSOP_INITPROP()
12482 return InjectStatus(recordInitPropertyOp(JSOP_INITPROP
));
12485 JS_REQUIRES_STACK AbortableRecordingStatus
12486 TraceRecorder::record_JSOP_INITMETHOD()
12488 return InjectStatus(recordInitPropertyOp(JSOP_INITMETHOD
));
12491 JS_REQUIRES_STACK VMSideExit
*
12492 TraceRecorder::enterDeepBailCall()
12494 // Take snapshot for DeepBail and store it in tm->bailExit.
12495 VMSideExit
* exit
= snapshot(DEEP_BAIL_EXIT
);
12496 w
.stTraceMonitorField(w
.nameImmpNonGC(exit
), bailExit
);
12498 // Tell nanojit not to discard or defer stack writes before this call.
12499 w
.xbarrier(createGuardRecord(exit
));
12501 // Forget about guarded shapes, since deep bailers can reshape the world.
12502 forgetGuardedShapes();
12506 JS_REQUIRES_STACK
void
12507 TraceRecorder::leaveDeepBailCall()
12509 // Keep tm->bailExit null when it's invalid.
12510 w
.stTraceMonitorField(w
.immpNull(), bailExit
);
12513 JS_REQUIRES_STACK
void
12514 TraceRecorder::finishGetProp(LIns
* obj_ins
, LIns
* vp_ins
, LIns
* ok_ins
, Value
* outp
)
12516 // Store the boxed result (and this-object, if JOF_CALLOP) before the
12517 // guard. The deep-bail case requires this. If the property get fails,
12518 // these slots will be ignored anyway.
12519 // N.B. monitorRecording expects get(outp)->isLoad()
12520 JS_ASSERT(vp_ins
->isop(LIR_allocp
));
12521 LIns
* result_ins
= w
.lddAlloc(vp_ins
);
12522 set(outp
, result_ins
);
12523 if (js_CodeSpec
[*cx
->regs
->pc
].format
& JOF_CALLOP
)
12524 set(outp
+ 1, obj_ins
);
12526 // We need to guard on ok_ins, but this requires a snapshot of the state
12527 // after this op. monitorRecording will do it for us.
12528 pendingGuardCondition
= ok_ins
;
12530 // Note there is a boxed result sitting on the stack. The caller must leave
12531 // it there for the time being, since the return type is not yet
12532 // known. monitorRecording will emit the code to unbox it.
12533 pendingUnboxSlot
= outp
;
12537 RootedStringToId(JSContext
* cx
, JSString
** namep
, jsid
* idp
)
12539 JSString
* name
= *namep
;
12540 if (name
->isAtomized()) {
12541 *idp
= INTERNED_STRING_TO_JSID(name
);
12545 JSAtom
* atom
= js_AtomizeString(cx
, name
, 0);
12548 *namep
= ATOM_TO_STRING(atom
); /* write back to GC root */
12549 *idp
= ATOM_TO_JSID(atom
);
12553 static const size_t PIC_TABLE_ENTRY_COUNT
= 32;
12555 struct PICTableEntry
12564 PICTable() : entryCount(0) {}
12566 PICTableEntry entries
[PIC_TABLE_ENTRY_COUNT
];
12569 bool scan(uint32 shape
, jsid id
, uint32
*slotOut
) {
12570 for (size_t i
= 0; i
< entryCount
; ++i
) {
12571 PICTableEntry
&entry
= entries
[i
];
12572 if (entry
.shape
== shape
&& entry
.id
== id
) {
12573 *slotOut
= entry
.slot
;
12580 void update(uint32 shape
, jsid id
, uint32 slot
) {
12581 if (entryCount
>= PIC_TABLE_ENTRY_COUNT
)
12583 PICTableEntry
&newEntry
= entries
[entryCount
++];
12584 newEntry
.shape
= shape
;
12586 newEntry
.slot
= slot
;
12590 static JSBool FASTCALL
12591 GetPropertyByName(JSContext
* cx
, JSObject
* obj
, JSString
** namep
, Value
* vp
, PICTable
*picTable
)
12593 LeaveTraceIfGlobalObject(cx
, obj
);
12596 if (!RootedStringToId(cx
, namep
, &id
)) {
12597 SetBuiltinError(cx
);
12601 /* Delegate to the op, if present. */
12602 PropertyIdOp op
= obj
->getOps()->getProperty
;
12604 bool result
= op(cx
, obj
, obj
, id
, vp
);
12606 SetBuiltinError(cx
);
12607 return WasBuiltinSuccessful(cx
);
12610 /* Try to hit in the cache. */
12612 if (picTable
->scan(obj
->shape(), id
, &slot
)) {
12613 *vp
= obj
->getSlot(slot
);
12614 return WasBuiltinSuccessful(cx
);
12617 const Shape
*shape
;
12619 if (!js_GetPropertyHelperWithShape(cx
, obj
, obj
, id
, JSGET_METHOD_BARRIER
, vp
, &shape
,
12621 SetBuiltinError(cx
);
12625 /* Only update the table when the object is the holder of the property. */
12626 if (obj
== holder
&& shape
->hasSlot()) {
12628 * Note: we insert the non-normalized id into the table so you don't need to
12629 * normalize it before hitting in the table (faster lookup).
12631 picTable
->update(obj
->shape(), id
, shape
->slot
);
12634 return WasBuiltinSuccessful(cx
);
12636 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL
, GetPropertyByName
, CONTEXT
, OBJECT
, STRINGPTR
, VALUEPTR
,
12638 0, ACCSET_STORE_ANY
)
12640 // Convert the value in a slot to a string and store the resulting string back
12641 // in the slot (typically in order to root it).
12642 JS_REQUIRES_STACK RecordingStatus
12643 TraceRecorder::primitiveToStringInPlace(Value
* vp
)
12646 JS_ASSERT(v
.isPrimitive());
12648 if (!v
.isString()) {
12649 // v is not a string. Turn it into one. js_ValueToString is safe
12650 // because v is not an object.
12651 JSString
*str
= js_ValueToString(cx
, v
);
12652 JS_ASSERT(TRACE_RECORDER(cx
) == this);
12654 RETURN_ERROR("failed to stringify element id");
12656 set(vp
, stringify(*vp
));
12658 // Write the string back to the stack to save the interpreter some work
12659 // and to ensure snapshots get the correct type for this slot.
12662 return RECORD_CONTINUE
;
12665 JS_REQUIRES_STACK RecordingStatus
12666 TraceRecorder::getPropertyByName(LIns
* obj_ins
, Value
* idvalp
, Value
* outp
)
12668 CHECK_STATUS(primitiveToStringInPlace(idvalp
));
12669 enterDeepBailCall();
12671 // Call GetPropertyByName. The vp parameter points to stack because this is
12672 // what the interpreter currently does. obj and id are rooted on the
12673 // interpreter stack, but the slot at vp is not a root.
12674 LIns
* vp_ins
= w
.name(w
.allocp(sizeof(Value
)), "vp");
12675 LIns
* idvalp_ins
= w
.name(addr(idvalp
), "idvalp");
12676 PICTable
*picTable
= new (traceAlloc()) PICTable();
12677 LIns
* pic_ins
= w
.nameImmpNonGC(picTable
);
12678 LIns
* args
[] = {pic_ins
, vp_ins
, idvalp_ins
, obj_ins
, cx_ins
};
12679 LIns
* ok_ins
= w
.call(&GetPropertyByName_ci
, args
);
12681 // GetPropertyByName can assign to *idvalp, so the tracker has an incorrect
12682 // entry for that address. Correct it. (If the value in the address is
12683 // never used again, the usual case, Nanojit will kill this load.)
12684 // The Address could be made more precise with some effort (idvalp_ins may
12685 // be a stack location), but it's not worth it because this case is rare.
12686 tracker
.set(idvalp
, w
.ldp(AnyAddress(idvalp_ins
)));
12688 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
12689 leaveDeepBailCall();
12690 return RECORD_CONTINUE
;
12693 static JSBool FASTCALL
12694 GetPropertyByIndex(JSContext
* cx
, JSObject
* obj
, int32 index
, Value
* vp
)
12696 LeaveTraceIfGlobalObject(cx
, obj
);
12698 AutoIdRooter
idr(cx
);
12699 if (!js_Int32ToId(cx
, index
, idr
.addr()) || !obj
->getProperty(cx
, idr
.id(), vp
)) {
12700 SetBuiltinError(cx
);
12703 return WasBuiltinSuccessful(cx
);
12705 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyByIndex
, CONTEXT
, OBJECT
, INT32
, VALUEPTR
, 0,
12708 JS_REQUIRES_STACK RecordingStatus
12709 TraceRecorder::getPropertyByIndex(LIns
* obj_ins
, LIns
* index_ins
, Value
* outp
)
12711 CHECK_STATUS(makeNumberInt32(index_ins
, &index_ins
));
12713 // See note in getPropertyByName about vp.
12714 enterDeepBailCall();
12715 LIns
* vp_ins
= w
.name(w
.allocp(sizeof(Value
)), "vp");
12716 LIns
* args
[] = {vp_ins
, index_ins
, obj_ins
, cx_ins
};
12717 LIns
* ok_ins
= w
.call(&GetPropertyByIndex_ci
, args
);
12718 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
12719 leaveDeepBailCall();
12720 return RECORD_CONTINUE
;
12723 static JSBool FASTCALL
12724 GetPropertyById(JSContext
* cx
, JSObject
* obj
, jsid id
, Value
* vp
)
12726 LeaveTraceIfGlobalObject(cx
, obj
);
12727 if (!obj
->getProperty(cx
, id
, vp
)) {
12728 SetBuiltinError(cx
);
12731 return WasBuiltinSuccessful(cx
);
12733 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyById
, CONTEXT
, OBJECT
, JSID
, VALUEPTR
,
12734 0, ACCSET_STORE_ANY
)
12736 JS_REQUIRES_STACK RecordingStatus
12737 TraceRecorder::getPropertyById(LIns
* obj_ins
, Value
* outp
)
12741 jsbytecode
* pc
= cx
->regs
->pc
;
12742 const JSCodeSpec
& cs
= js_CodeSpec
[*pc
];
12743 if (*pc
== JSOP_LENGTH
) {
12744 atom
= cx
->runtime
->atomState
.lengthAtom
;
12745 } else if (JOF_TYPE(cs
.format
) == JOF_ATOM
) {
12746 atom
= atoms
[GET_INDEX(pc
)];
12748 JS_ASSERT(JOF_TYPE(cs
.format
) == JOF_SLOTATOM
);
12749 atom
= atoms
[GET_INDEX(pc
+ SLOTNO_LEN
)];
12752 JS_STATIC_ASSERT(sizeof(jsid
) == sizeof(void *));
12753 jsid id
= ATOM_TO_JSID(atom
);
12755 // Call GetPropertyById. See note in getPropertyByName about vp.
12756 enterDeepBailCall();
12757 LIns
* vp_ins
= w
.name(w
.allocp(sizeof(Value
)), "vp");
12758 LIns
* args
[] = {vp_ins
, w
.nameImmw(JSID_BITS(id
)), obj_ins
, cx_ins
};
12759 LIns
* ok_ins
= w
.call(&GetPropertyById_ci
, args
);
12760 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
12761 leaveDeepBailCall();
12762 return RECORD_CONTINUE
;
12765 /* Manually inlined, specialized copy of js_NativeGet. */
12766 static JSBool FASTCALL
12767 GetPropertyWithNativeGetter(JSContext
* cx
, JSObject
* obj
, Shape
* shape
, Value
* vp
)
12769 LeaveTraceIfGlobalObject(cx
, obj
);
12774 JS_ASSERT(obj
->lookupProperty(cx
, shape
->id
, &pobj
, &prop
));
12775 JS_ASSERT(prop
== (JSProperty
*) shape
);
12778 // Shape::get contains a special case for With objects. We can elide it
12779 // here because With objects are, we claim, never on the operand stack
12780 // while recording.
12781 JS_ASSERT(obj
->getClass() != &js_WithClass
);
12783 vp
->setUndefined();
12784 if (!shape
->getterOp()(cx
, obj
, SHAPE_USERID(shape
), vp
)) {
12785 SetBuiltinError(cx
);
12788 return WasBuiltinSuccessful(cx
);
12790 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyWithNativeGetter
,
12791 CONTEXT
, OBJECT
, SHAPE
, VALUEPTR
, 0, ACCSET_STORE_ANY
)
12793 JS_REQUIRES_STACK RecordingStatus
12794 TraceRecorder::getPropertyWithNativeGetter(LIns
* obj_ins
, const Shape
* shape
, Value
* outp
)
12796 JS_ASSERT(!shape
->hasGetterValue());
12797 JS_ASSERT(shape
->slot
== SHAPE_INVALID_SLOT
);
12798 JS_ASSERT(!shape
->hasDefaultGetterOrIsMethod());
12800 // Call GetPropertyWithNativeGetter. See note in getPropertyByName about vp.
12801 // FIXME - We should call the getter directly. Using a builtin function for
12802 // now because it buys some extra asserts. See bug 508310.
12803 enterDeepBailCall();
12804 LIns
* vp_ins
= w
.name(w
.allocp(sizeof(Value
)), "vp");
12805 LIns
* args
[] = {vp_ins
, w
.nameImmpNonGC(shape
), obj_ins
, cx_ins
};
12806 LIns
* ok_ins
= w
.call(&GetPropertyWithNativeGetter_ci
, args
);
12807 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
12808 leaveDeepBailCall();
12809 return RECORD_CONTINUE
;
12812 JS_REQUIRES_STACK RecordingStatus
12813 TraceRecorder::getPropertyWithScriptGetter(JSObject
*obj
, LIns
* obj_ins
, const Shape
* shape
)
12815 if (!canCallImacro())
12816 RETURN_STOP("cannot trace script getter, already in imacro");
12818 // Rearrange the stack in preparation for the imacro, taking care to adjust
12819 // the interpreter state and the tracker in the same way. This adjustment
12820 // is noted in imacros.jsasm with .fixup tags.
12821 Value getter
= shape
->getterValue();
12822 Value
*& sp
= cx
->regs
->sp
;
12823 switch (*cx
->regs
->pc
) {
12827 set(&sp
[-1], get(&sp
[-2]));
12829 set(&sp
[-2], w
.immpObjGC(&getter
.toObject()));
12830 return callImacroInfallibly(getprop_imacros
.scriptgetter
);
12832 case JSOP_CALLPROP
:
12835 set(&sp
[-2], w
.immpObjGC(&getter
.toObject()));
12837 set(&sp
[-1], get(&sp
[-3]));
12838 return callImacroInfallibly(callprop_imacros
.scriptgetter
);
12840 case JSOP_GETTHISPROP
:
12841 case JSOP_GETARGPROP
:
12842 case JSOP_GETLOCALPROP
:
12845 set(&sp
[-2], w
.immpObjGC(&getter
.toObject()));
12846 sp
[-1] = ObjectValue(*obj
);
12847 set(&sp
[-1], obj_ins
);
12848 return callImacroInfallibly(getthisprop_imacros
.scriptgetter
);
12851 RETURN_STOP("cannot trace script getter for this opcode");
12855 JS_REQUIRES_STACK RecordingStatus
12856 TraceRecorder::getCharCodeAt(JSString
*str
, LIns
* str_ins
, LIns
* idx_ins
, LIns
** out
)
12858 CHECK_STATUS(makeNumberInt32(idx_ins
, &idx_ins
));
12859 idx_ins
= w
.ui2p(idx_ins
);
12860 LIns
*lengthAndFlags_ins
= w
.ldpStringLengthAndFlags(str_ins
);
12861 if (MaybeBranch mbr
= w
.jt(w
.eqp0(w
.andp(lengthAndFlags_ins
, w
.nameImmw(JSString::ROPE_BIT
)))))
12863 LIns
*args
[] = { str_ins
, cx_ins
};
12864 LIns
*ok_ins
= w
.call(&js_Flatten_ci
, args
);
12865 guard(false, w
.eqi0(ok_ins
), OOM_EXIT
);
12870 w
.ltup(idx_ins
, w
.rshupN(lengthAndFlags_ins
, JSString::LENGTH_SHIFT
)),
12871 snapshot(MISMATCH_EXIT
));
12872 *out
= w
.i2d(w
.getStringChar(str_ins
, idx_ins
));
12873 return RECORD_CONTINUE
;
12876 JS_STATIC_ASSERT(sizeof(JSString
) == 16 || sizeof(JSString
) == 32);
12879 JS_REQUIRES_STACK LIns
*
12880 TraceRecorder::getUnitString(LIns
* str_ins
, LIns
* idx_ins
)
12882 LIns
*ch_ins
= w
.getStringChar(str_ins
, idx_ins
);
12883 guard(true, w
.ltuiN(ch_ins
, UNIT_STRING_LIMIT
), MISMATCH_EXIT
);
12884 return w
.addp(w
.nameImmpNonGC(JSString::unitStringTable
),
12885 w
.lshpN(w
.ui2p(ch_ins
), (sizeof(JSString
) == 16) ? 4 : 5));
12888 JS_REQUIRES_STACK RecordingStatus
12889 TraceRecorder::getCharAt(JSString
*str
, LIns
* str_ins
, LIns
* idx_ins
, JSOp mode
, LIns
** out
)
12891 CHECK_STATUS(makeNumberInt32(idx_ins
, &idx_ins
));
12892 idx_ins
= w
.ui2p(idx_ins
);
12893 LIns
*lengthAndFlags_ins
= w
.ldpStringLengthAndFlags(str_ins
);
12894 if (MaybeBranch mbr
= w
.jt(w
.eqp0(w
.andp(lengthAndFlags_ins
,
12895 w
.nameImmw(JSString::ROPE_BIT
)))))
12897 LIns
*args
[] = { str_ins
, cx_ins
};
12898 LIns
*ok_ins
= w
.call(&js_Flatten_ci
, args
);
12899 guard(false, w
.eqi0(ok_ins
), OOM_EXIT
);
12903 LIns
* inRange
= w
.ltup(idx_ins
, w
.rshupN(lengthAndFlags_ins
, JSString::LENGTH_SHIFT
));
12905 if (mode
== JSOP_GETELEM
) {
12906 guard(true, inRange
, MISMATCH_EXIT
);
12908 *out
= getUnitString(str_ins
, idx_ins
);
12910 LIns
*phi_ins
= w
.allocp(sizeof(JSString
*));
12911 w
.stAlloc(w
.nameImmpNonGC(cx
->runtime
->emptyString
), phi_ins
);
12913 if (MaybeBranch mbr
= w
.jf(inRange
)) {
12914 LIns
*unitstr_ins
= getUnitString(str_ins
, idx_ins
);
12915 w
.stAlloc(unitstr_ins
, phi_ins
);
12918 *out
= w
.ldpAlloc(phi_ins
);
12920 return RECORD_CONTINUE
;
12923 // Typed array tracing depends on EXPANDED_LOADSTORE and F2I
12924 #if NJ_EXPANDED_LOADSTORE_SUPPORTED && NJ_F2I_SUPPORTED
12925 static bool OkToTraceTypedArrays
= true;
12927 static bool OkToTraceTypedArrays
= false;
12930 JS_REQUIRES_STACK
void
12931 TraceRecorder::guardNotHole(LIns
*argsobj_ins
, LIns
*idx_ins
)
12933 // vp = &argsobj->slots[JSSLOT_ARGS_DATA].slots[idx]
12934 LIns
* argsData_ins
= w
.getObjPrivatizedSlot(argsobj_ins
, JSObject::JSSLOT_ARGS_DATA
);
12935 LIns
* slotOffset_ins
= w
.addp(w
.nameImmw(offsetof(ArgumentsData
, slots
)),
12936 w
.ui2p(w
.muliN(idx_ins
, sizeof(Value
))));
12937 LIns
* vp_ins
= w
.addp(argsData_ins
, slotOffset_ins
);
12940 w
.name(is_boxed_magic(ArgsSlotOffsetAddress(vp_ins
), JS_ARGS_HOLE
),
12941 "guard(not deleted arg)"),
12945 JS_REQUIRES_STACK AbortableRecordingStatus
12946 TraceRecorder::record_JSOP_GETELEM()
12948 bool call
= *cx
->regs
->pc
== JSOP_CALLELEM
;
12950 Value
& idx
= stackval(-1);
12951 Value
& lval
= stackval(-2);
12953 LIns
* obj_ins
= get(&lval
);
12954 LIns
* idx_ins
= get(&idx
);
12956 // Special case for array-like access of strings.
12957 if (lval
.isString() && hasInt32Repr(idx
)) {
12959 RETURN_STOP_A("JSOP_CALLELEM on a string");
12960 int i
= asInt32(idx
);
12961 if (size_t(i
) >= lval
.toString()->length())
12962 RETURN_STOP_A("Invalid string index in JSOP_GETELEM");
12964 CHECK_STATUS_A(getCharAt(lval
.toString(), obj_ins
, idx_ins
, JSOP_GETELEM
, &char_ins
));
12965 set(&lval
, char_ins
);
12966 return ARECORD_CONTINUE
;
12969 if (lval
.isPrimitive())
12970 RETURN_STOP_A("JSOP_GETLEM on a primitive");
12971 RETURN_IF_XML_A(lval
);
12973 JSObject
* obj
= &lval
.toObject();
12974 if (obj
== globalObj
)
12975 RETURN_STOP_A("JSOP_GETELEM on global");
12978 /* Property access using a string name or something we have to stringify. */
12979 if (!idx
.isInt32()) {
12980 if (!idx
.isPrimitive())
12981 RETURN_STOP_A("object used as index");
12983 return InjectStatus(getPropertyByName(obj_ins
, &idx
, &lval
));
12986 if (obj
->isArguments()) {
12987 // Don't even try to record if out of range or reading a deleted arg
12988 int32 int_idx
= idx
.toInt32();
12989 if (int_idx
< 0 || int_idx
>= (int32
)obj
->getArgsInitialLength())
12990 RETURN_STOP_A("cannot trace arguments with out of range index");
12991 if (obj
->getArgsElement(int_idx
).isMagic(JS_ARGS_HOLE
))
12992 RETURN_STOP_A("reading deleted args element");
12994 // Only trace reading arguments out of active, tracked frame
12996 JSStackFrame
*afp
= guardArguments(obj
, obj_ins
, &depth
);
12998 Value
* vp
= &afp
->canonicalActualArg(int_idx
);
12999 if (idx_ins
->isImmD()) {
13000 JS_ASSERT(int_idx
== (int32
)idx_ins
->immD());
13001 guardNotHole(obj_ins
, w
.nameImmi(int_idx
));
13004 // If the index is not a constant expression, we generate LIR to load the value from
13005 // the native stack area. The guard on js_ArgumentClass above ensures the up-to-date
13006 // value has been written back to the native stack area.
13007 CHECK_STATUS_A(makeNumberInt32(idx_ins
, &idx_ins
));
13010 * For small nactual,
13011 * 0 <= int_idx < nactual iff unsigned(int_idx) < unsigned(nactual).
13014 w
.name(w
.ltui(idx_ins
, w
.nameImmui(afp
->numActualArgs())),
13015 "guard(upvar index in range)"),
13018 guardNotHole(obj_ins
, idx_ins
);
13020 JSValueType type
= getCoercedType(*vp
);
13022 // Guard that the argument has the same type on trace as during recording.
13025 // In this case, we are in the same frame where the arguments object was created.
13026 // The entry type map is not necessarily up-to-date, so we capture a new type map
13027 // for this point in the code.
13028 unsigned stackSlots
= NativeStackSlots(cx
, 0 /* callDepth */);
13029 JSValueType
* typemap
= new (traceAlloc()) JSValueType
[stackSlots
];
13030 DetermineTypesVisitor
detVisitor(*this, typemap
);
13031 VisitStackSlots(detVisitor
, cx
, 0);
13032 typemap_ins
= w
.nameImmpNonGC(typemap
+ 2 /* callee, this */);
13034 // In this case, we are in a deeper frame from where the arguments object was
13035 // created. The type map at the point of the call out from the creation frame
13037 // Note: this relies on the assumption that we abort on setting an element of
13038 // an arguments object in any deeper frame.
13039 LIns
* fip_ins
= w
.ldpRstack(lirbuf
->rp
, (callDepth
-depth
)*sizeof(FrameInfo
*));
13040 typemap_ins
= w
.addp(fip_ins
, w
.nameImmw(sizeof(FrameInfo
) + 2/*callee,this*/ * sizeof(JSValueType
)));
13043 LIns
* type_ins
= w
.lduc2uiConstTypeMapEntry(typemap_ins
, idx_ins
);
13045 w
.name(w
.eqi(type_ins
, w
.immi(type
)), "guard(type-stable upvar)"),
13048 // Read the value out of the native stack area.
13049 size_t stackOffset
= nativespOffset(&afp
->canonicalActualArg(0));
13050 LIns
* args_addr_ins
= w
.addp(lirbuf
->sp
, w
.nameImmw(stackOffset
));
13051 LIns
* argi_addr_ins
= w
.addp(args_addr_ins
,
13052 w
.ui2p(w
.muli(idx_ins
, w
.nameImmi(sizeof(double)))));
13054 // The Address could be more precise, but ValidateWriter
13055 // doesn't recognise the complex expression involving 'sp' as
13056 // an stack access, and it's not worth the effort to be
13057 // more precise because this case is rare.
13058 v_ins
= stackLoad(AnyAddress(argi_addr_ins
), type
);
13063 set(&idx
, obj_ins
);
13064 return ARECORD_CONTINUE
;
13066 RETURN_STOP_A("can't reach arguments object's frame");
13069 if (obj
->isDenseArray()) {
13070 // Fast path for dense arrays accessed with a integer index.
13074 VMSideExit
* branchExit
= snapshot(BRANCH_EXIT
);
13075 guardDenseArray(obj_ins
, branchExit
);
13076 CHECK_STATUS_A(denseArrayElement(lval
, idx
, vp
, v_ins
, addr_ins
, branchExit
));
13079 set(&idx
, obj_ins
);
13080 return ARECORD_CONTINUE
;
13083 if (OkToTraceTypedArrays
&& js_IsTypedArray(obj
)) {
13084 // Fast path for typed arrays accessed with a integer index.
13086 guardClass(obj_ins
, obj
->getClass(), snapshot(BRANCH_EXIT
), LOAD_CONST
);
13087 CHECK_STATUS_A(typedArrayElement(lval
, idx
, vp
, v_ins
));
13090 set(&idx
, obj_ins
);
13091 return ARECORD_CONTINUE
;
13094 return InjectStatus(getPropertyByIndex(obj_ins
, idx_ins
, &lval
));
13097 /* Functions used by JSOP_SETELEM */
13099 static JSBool FASTCALL
13100 SetPropertyByName(JSContext
* cx
, JSObject
* obj
, JSString
** namep
, Value
* vp
, JSBool strict
)
13102 LeaveTraceIfGlobalObject(cx
, obj
);
13105 if (!RootedStringToId(cx
, namep
, &id
) || !obj
->setProperty(cx
, id
, vp
, strict
)) {
13106 SetBuiltinError(cx
);
13109 return WasBuiltinSuccessful(cx
);
13111 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL
, SetPropertyByName
,
13112 CONTEXT
, OBJECT
, STRINGPTR
, VALUEPTR
, BOOL
,
13113 0, ACCSET_STORE_ANY
)
13115 static JSBool FASTCALL
13116 InitPropertyByName(JSContext
* cx
, JSObject
* obj
, JSString
** namep
, ValueArgType arg
)
13118 LeaveTraceIfGlobalObject(cx
, obj
);
13121 if (!RootedStringToId(cx
, namep
, &id
) ||
13122 !obj
->defineProperty(cx
, id
, ValueArgToConstRef(arg
), NULL
, NULL
, JSPROP_ENUMERATE
)) {
13123 SetBuiltinError(cx
);
13126 return WasBuiltinSuccessful(cx
);
13128 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, InitPropertyByName
, CONTEXT
, OBJECT
, STRINGPTR
, VALUE
,
13129 0, ACCSET_STORE_ANY
)
13131 JS_REQUIRES_STACK RecordingStatus
13132 TraceRecorder::initOrSetPropertyByName(LIns
* obj_ins
, Value
* idvalp
, Value
* rvalp
, bool init
)
13134 CHECK_STATUS(primitiveToStringInPlace(idvalp
));
13137 LIns
* v_ins
= box_value_for_native_call(*rvalp
, get(rvalp
));
13138 enterDeepBailCall();
13139 LIns
* idvalp_ins
= w
.name(addr(idvalp
), "idvalp");
13140 LIns
* args
[] = {v_ins
, idvalp_ins
, obj_ins
, cx_ins
};
13141 pendingGuardCondition
= w
.call(&InitPropertyByName_ci
, args
);
13143 // See note in getPropertyByName about vp.
13144 LIns
* vp_ins
= box_value_into_alloc(*rvalp
, get(rvalp
));
13145 enterDeepBailCall();
13146 LIns
* idvalp_ins
= w
.name(addr(idvalp
), "idvalp");
13147 LIns
* args
[] = { strictModeCode_ins
, vp_ins
, idvalp_ins
, obj_ins
, cx_ins
};
13148 pendingGuardCondition
= w
.call(&SetPropertyByName_ci
, args
);
13151 leaveDeepBailCall();
13152 return RECORD_CONTINUE
;
13155 static JSBool FASTCALL
13156 SetPropertyByIndex(JSContext
* cx
, JSObject
* obj
, int32 index
, Value
* vp
, JSBool strict
)
13158 LeaveTraceIfGlobalObject(cx
, obj
);
13160 AutoIdRooter
idr(cx
);
13161 if (!js_Int32ToId(cx
, index
, idr
.addr()) || !obj
->setProperty(cx
, idr
.id(), vp
, strict
)) {
13162 SetBuiltinError(cx
);
13165 return WasBuiltinSuccessful(cx
);
13167 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL
, SetPropertyByIndex
, CONTEXT
, OBJECT
, INT32
, VALUEPTR
, BOOL
,
13168 0, ACCSET_STORE_ANY
)
13170 static JSBool FASTCALL
13171 InitPropertyByIndex(JSContext
* cx
, JSObject
* obj
, int32 index
, ValueArgType arg
)
13173 LeaveTraceIfGlobalObject(cx
, obj
);
13175 AutoIdRooter
idr(cx
);
13176 if (!js_Int32ToId(cx
, index
, idr
.addr()) ||
13177 !obj
->defineProperty(cx
, idr
.id(), ValueArgToConstRef(arg
), NULL
, NULL
, JSPROP_ENUMERATE
)) {
13178 SetBuiltinError(cx
);
13181 return WasBuiltinSuccessful(cx
);
13183 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, InitPropertyByIndex
, CONTEXT
, OBJECT
, INT32
, VALUE
,
13184 0, ACCSET_STORE_ANY
)
13186 JS_REQUIRES_STACK RecordingStatus
13187 TraceRecorder::initOrSetPropertyByIndex(LIns
* obj_ins
, LIns
* index_ins
, Value
* rvalp
, bool init
)
13189 CHECK_STATUS(makeNumberInt32(index_ins
, &index_ins
));
13192 LIns
* rval_ins
= box_value_for_native_call(*rvalp
, get(rvalp
));
13193 enterDeepBailCall();
13194 LIns
* args
[] = {rval_ins
, index_ins
, obj_ins
, cx_ins
};
13195 pendingGuardCondition
= w
.call(&InitPropertyByIndex_ci
, args
);
13197 // See note in getPropertyByName about vp.
13198 LIns
* vp_ins
= box_value_into_alloc(*rvalp
, get(rvalp
));
13199 enterDeepBailCall();
13200 LIns
* args
[] = {strictModeCode_ins
, vp_ins
, index_ins
, obj_ins
, cx_ins
};
13201 pendingGuardCondition
= w
.call(&SetPropertyByIndex_ci
, args
);
13204 leaveDeepBailCall();
13205 return RECORD_CONTINUE
;
13208 JS_REQUIRES_STACK AbortableRecordingStatus
13209 TraceRecorder::setElem(int lval_spindex
, int idx_spindex
, int v_spindex
)
13211 Value
& v
= stackval(v_spindex
);
13212 Value
& idx
= stackval(idx_spindex
);
13213 Value
& lval
= stackval(lval_spindex
);
13215 if (lval
.isPrimitive())
13216 RETURN_STOP_A("left JSOP_SETELEM operand is not an object");
13217 RETURN_IF_XML_A(lval
);
13219 JSObject
* obj
= &lval
.toObject();
13220 LIns
* obj_ins
= get(&lval
);
13221 LIns
* idx_ins
= get(&idx
);
13222 LIns
* v_ins
= get(&v
);
13224 if (obj
->isArguments())
13225 RETURN_STOP_A("can't trace setting elements of the |arguments| object");
13227 if (obj
== globalObj
)
13228 RETURN_STOP_A("can't trace setting elements on the global object");
13230 if (!idx
.isInt32()) {
13231 if (!idx
.isPrimitive())
13232 RETURN_STOP_A("non-primitive index");
13233 CHECK_STATUS_A(initOrSetPropertyByName(obj_ins
, &idx
, &v
,
13234 *cx
->regs
->pc
== JSOP_INITELEM
));
13235 } else if (OkToTraceTypedArrays
&& js_IsTypedArray(obj
)) {
13236 // Fast path: assigning to element of typed array.
13237 VMSideExit
* branchExit
= snapshot(BRANCH_EXIT
);
13239 // Ensure array is a typed array and is the same type as what was written
13240 guardClass(obj_ins
, obj
->getClass(), branchExit
, LOAD_CONST
);
13242 js::TypedArray
* tarray
= js::TypedArray::fromJSObject(obj
);
13244 LIns
* priv_ins
= w
.ldpObjPrivate(obj_ins
);
13246 // The index was on the stack and is therefore a LIR float; force it to
13248 CHECK_STATUS_A(makeNumberInt32(idx_ins
, &idx_ins
));
13250 // Ensure idx >= 0 && idx < length (by using uint32)
13251 CHECK_STATUS_A(guard(true,
13252 w
.name(w
.ltui(idx_ins
, w
.ldiConstTypedArrayLength(priv_ins
)),
13254 OVERFLOW_EXIT
, /* abortIfAlwaysExits = */true));
13256 // We're now ready to store
13257 LIns
* data_ins
= w
.ldpConstTypedArrayData(priv_ins
);
13258 LIns
* pidx_ins
= w
.ui2p(idx_ins
);
13259 LIns
* typed_v_ins
= v_ins
;
13261 // If it's not a number, convert objects to NaN,
13262 // null to 0, and call StringToNumber or BooleanOrUndefinedToNumber
13264 if (!v
.isNumber()) {
13266 typed_v_ins
= w
.immd(0);
13267 } else if (v
.isUndefined()) {
13268 typed_v_ins
= w
.immd(js_NaN
);
13269 } else if (v
.isString()) {
13270 LIns
* ok_ins
= w
.allocp(sizeof(JSBool
));
13271 LIns
* args
[] = { ok_ins
, typed_v_ins
, cx_ins
};
13272 typed_v_ins
= w
.call(&js_StringToNumber_ci
, args
);
13274 w
.name(w
.eqi0(w
.ldiAlloc(ok_ins
)), "guard(oom)"),
13276 } else if (v
.isBoolean()) {
13277 JS_ASSERT(v
.isBoolean());
13278 typed_v_ins
= w
.i2d(typed_v_ins
);
13280 typed_v_ins
= w
.immd(js_NaN
);
13284 switch (tarray
->type
) {
13285 case js::TypedArray::TYPE_INT8
:
13286 case js::TypedArray::TYPE_INT16
:
13287 case js::TypedArray::TYPE_INT32
:
13288 typed_v_ins
= d2i(typed_v_ins
);
13290 case js::TypedArray::TYPE_UINT8
:
13291 case js::TypedArray::TYPE_UINT16
:
13292 case js::TypedArray::TYPE_UINT32
:
13293 typed_v_ins
= d2u(typed_v_ins
);
13295 case js::TypedArray::TYPE_UINT8_CLAMPED
:
13296 if (IsPromotedInt32(typed_v_ins
)) {
13297 typed_v_ins
= w
.demoteToInt32(typed_v_ins
);
13298 typed_v_ins
= w
.cmovi(w
.ltiN(typed_v_ins
, 0),
13300 w
.cmovi(w
.gtiN(typed_v_ins
, 0xff),
13304 typed_v_ins
= w
.call(&js_TypedArray_uint8_clamp_double_ci
, &typed_v_ins
);
13307 case js::TypedArray::TYPE_FLOAT32
:
13308 case js::TypedArray::TYPE_FLOAT64
:
13309 // Do nothing, this is already a float
13312 JS_NOT_REACHED("Unknown typed array type in tracer");
13315 switch (tarray
->type
) {
13316 case js::TypedArray::TYPE_INT8
:
13317 case js::TypedArray::TYPE_UINT8_CLAMPED
:
13318 case js::TypedArray::TYPE_UINT8
:
13319 w
.sti2cTypedArrayElement(typed_v_ins
, data_ins
, pidx_ins
);
13321 case js::TypedArray::TYPE_INT16
:
13322 case js::TypedArray::TYPE_UINT16
:
13323 w
.sti2sTypedArrayElement(typed_v_ins
, data_ins
, pidx_ins
);
13325 case js::TypedArray::TYPE_INT32
:
13326 case js::TypedArray::TYPE_UINT32
:
13327 w
.stiTypedArrayElement(typed_v_ins
, data_ins
, pidx_ins
);
13329 case js::TypedArray::TYPE_FLOAT32
:
13330 w
.std2fTypedArrayElement(typed_v_ins
, data_ins
, pidx_ins
);
13332 case js::TypedArray::TYPE_FLOAT64
:
13333 w
.stdTypedArrayElement(typed_v_ins
, data_ins
, pidx_ins
);
13336 JS_NOT_REACHED("Unknown typed array type in tracer");
13338 } else if (idx
.toInt32() < 0 || !obj
->isDenseArray()) {
13339 CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins
, idx_ins
, &v
,
13340 *cx
->regs
->pc
== JSOP_INITELEM
));
13342 // Fast path: assigning to element of dense array.
13343 VMSideExit
* branchExit
= snapshot(BRANCH_EXIT
);
13344 VMSideExit
* mismatchExit
= snapshot(MISMATCH_EXIT
);
13346 // Make sure the array is actually dense.
13347 if (!obj
->isDenseArray())
13348 return ARECORD_STOP
;
13349 guardDenseArray(obj_ins
, branchExit
);
13351 // The index was on the stack and is therefore a LIR float. Force it to
13353 CHECK_STATUS_A(makeNumberInt32(idx_ins
, &idx_ins
));
13355 if (!js_EnsureDenseArrayCapacity(cx
, obj
, idx
.toInt32()))
13356 RETURN_STOP_A("couldn't ensure dense array capacity for setelem");
13358 // Grow the array if the index exceeds the capacity. This happens
13359 // rarely, eg. less than 1% of the time in SunSpider.
13360 LIns
* capacity_ins
= w
.ldiDenseArrayCapacity(obj_ins
);
13362 * It's important that CSE works across this control-flow diamond
13363 * because it really helps series of interleaved GETELEM and SETELEM
13364 * operations. Likewise with the diamond below.
13366 w
.pauseAddingCSEValues();
13367 if (MaybeBranch mbr
= w
.jt(w
.name(w
.ltui(idx_ins
, capacity_ins
), "inRange"))) {
13368 LIns
* args
[] = { idx_ins
, obj_ins
, cx_ins
};
13369 LIns
* res_ins
= w
.call(&js_EnsureDenseArrayCapacity_ci
, args
);
13370 guard(false, w
.eqi0(res_ins
), mismatchExit
);
13373 w
.resumeAddingCSEValues();
13375 // Get the address of the element.
13376 LIns
*elemp_ins
= w
.name(w
.getDslotAddress(obj_ins
, idx_ins
), "elemp");
13378 // If we are overwriting a hole:
13379 // - Guard that we don't have any indexed properties along the prototype chain.
13380 // - Check if the length has changed; if so, update it to index+1.
13381 // This happens moderately often, eg. close to 10% of the time in
13382 // SunSpider, and for some benchmarks it's close to 100%.
13383 Address dslotAddr
= DSlotsAddress(elemp_ins
);
13384 LIns
* isHole_ins
= w
.name(is_boxed_magic(dslotAddr
, JS_ARRAY_HOLE
),
13386 w
.pauseAddingCSEValues();
13387 if (MaybeBranch mbr1
= w
.jf(isHole_ins
)) {
13389 * It's important that this use branchExit, not mismatchExit, since
13390 * changes to shapes should just mean we compile a new branch, not
13391 * throw the whole trace away.
13393 CHECK_STATUS_A(guardPrototypeHasNoIndexedProperties(obj
, obj_ins
, branchExit
));
13394 LIns
* length_ins
= w
.lduiObjPrivate(obj_ins
);
13395 if (MaybeBranch mbr2
= w
.jt(w
.ltui(idx_ins
, length_ins
))) {
13396 LIns
* newLength_ins
= w
.name(w
.addiN(idx_ins
, 1), "newLength");
13397 w
.stuiObjPrivate(obj_ins
, newLength_ins
);
13402 w
.resumeAddingCSEValues();
13404 // Right, actually set the element.
13405 box_value_into(v
, v_ins
, dslotAddr
);
13408 jsbytecode
* pc
= cx
->regs
->pc
;
13409 if (*pc
== JSOP_SETELEM
&& pc
[JSOP_SETELEM_LENGTH
] != JSOP_POP
)
13412 return ARECORD_CONTINUE
;
13415 JS_REQUIRES_STACK AbortableRecordingStatus
13416 TraceRecorder::record_JSOP_SETELEM()
13418 return setElem(-3, -2, -1);
13421 JS_REQUIRES_STACK AbortableRecordingStatus
13422 TraceRecorder::record_JSOP_CALLNAME()
13424 JSObject
* obj
= &cx
->fp()->scopeChain();
13425 if (obj
!= globalObj
) {
13429 CHECK_STATUS_A(scopeChainProp(obj
, vp
, ins
, nr
));
13431 stack(1, w
.immiUndefined());
13432 return ARECORD_CONTINUE
;
13435 LIns
* obj_ins
= w
.immpObjGC(globalObj
);
13439 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
13441 if (pcval
.isNull() || !pcval
.isFunObj())
13442 RETURN_STOP_A("callee is not an object");
13444 stack(0, w
.immpObjGC(&pcval
.toFunObj()));
13445 stack(1, w
.immiUndefined());
13446 return ARECORD_CONTINUE
;
13449 JS_DEFINE_CALLINFO_5(extern, UINT32
, GetUpvarArgOnTrace
, CONTEXT
, UINT32
, INT32
, UINT32
,
13450 DOUBLEPTR
, 0, ACCSET_STORE_ANY
)
13451 JS_DEFINE_CALLINFO_5(extern, UINT32
, GetUpvarVarOnTrace
, CONTEXT
, UINT32
, INT32
, UINT32
,
13452 DOUBLEPTR
, 0, ACCSET_STORE_ANY
)
13453 JS_DEFINE_CALLINFO_5(extern, UINT32
, GetUpvarStackOnTrace
, CONTEXT
, UINT32
, INT32
, UINT32
,
13454 DOUBLEPTR
, 0, ACCSET_STORE_ANY
)
13457 * Record LIR to get the given upvar. Return the LIR instruction for the upvar
13458 * value. NULL is returned only on a can't-happen condition with an invalid
13459 * typemap. The value of the upvar is returned as v.
13461 JS_REQUIRES_STACK LIns
*
13462 TraceRecorder::upvar(JSScript
* script
, JSUpvarArray
* uva
, uintN index
, Value
& v
)
13465 * Try to find the upvar in the current trace's tracker. For &vr to be
13466 * the address of the jsval found in js::GetUpvar, we must initialize
13467 * vr directly with the result, so it is a reference to the same location.
13468 * It does not work to assign the result to v, because v is an already
13469 * existing reference that points to something else.
13471 UpvarCookie cookie
= uva
->vector
[index
];
13472 const Value
& vr
= GetUpvar(cx
, script
->staticLevel
, cookie
);
13475 if (LIns
* ins
= attemptImport(&vr
))
13479 * The upvar is not in the current trace, so get the upvar value exactly as
13480 * the interpreter does and unbox.
13482 uint32 level
= script
->staticLevel
- cookie
.level();
13483 uint32 cookieSlot
= cookie
.slot();
13484 JSStackFrame
* fp
= cx
->findFrameAtLevel(level
);
13485 const CallInfo
* ci
;
13487 if (!fp
->isFunctionFrame() || fp
->isEvalFrame()) {
13488 ci
= &GetUpvarStackOnTrace_ci
;
13490 } else if (cookieSlot
< fp
->numFormalArgs()) {
13491 ci
= &GetUpvarArgOnTrace_ci
;
13493 } else if (cookieSlot
== UpvarCookie::CALLEE_SLOT
) {
13494 ci
= &GetUpvarArgOnTrace_ci
;
13497 ci
= &GetUpvarVarOnTrace_ci
;
13498 slot
= cookieSlot
- fp
->numFormalArgs();
13501 LIns
* outp
= w
.allocp(sizeof(double));
13504 w
.nameImmi(callDepth
),
13509 LIns
* call_ins
= w
.call(ci
, args
);
13510 JSValueType type
= getCoercedType(v
);
13512 w
.name(w
.eqi(call_ins
, w
.immi(type
)), "guard(type-stable upvar)"),
13514 return stackLoad(AllocSlotsAddress(outp
), type
);
13518 * Generate LIR to load a value from the native stack. This method ensures that
13519 * the correct LIR load operator is used.
13522 TraceRecorder::stackLoad(Address addr
, uint8 type
)
13525 case JSVAL_TYPE_DOUBLE
:
13526 return w
.ldd(addr
);
13527 case JSVAL_TYPE_NONFUNOBJ
:
13528 case JSVAL_TYPE_STRING
:
13529 case JSVAL_TYPE_FUNOBJ
:
13530 case JSVAL_TYPE_NULL
:
13531 return w
.ldp(addr
);
13532 case JSVAL_TYPE_INT32
:
13533 return w
.i2d(w
.ldi(addr
));
13534 case JSVAL_TYPE_BOOLEAN
:
13535 case JSVAL_TYPE_UNDEFINED
:
13536 case JSVAL_TYPE_MAGIC
:
13537 return w
.ldi(addr
);
13538 case JSVAL_TYPE_BOXED
:
13540 JS_NOT_REACHED("found jsval type in an upvar type map entry");
13545 JS_REQUIRES_STACK AbortableRecordingStatus
13546 TraceRecorder::record_JSOP_GETFCSLOT()
13548 JSObject
& callee
= cx
->fp()->callee();
13549 LIns
* callee_ins
= get(&cx
->fp()->calleeValue());
13551 LIns
* upvars_ins
= w
.getObjPrivatizedSlot(callee_ins
, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS
);
13553 unsigned index
= GET_UINT16(cx
->regs
->pc
);
13554 LIns
*v_ins
= unbox_value(callee
.getFlatClosureUpvar(index
),
13555 FCSlotsAddress(upvars_ins
, index
),
13556 snapshot(BRANCH_EXIT
));
13558 return ARECORD_CONTINUE
;
13561 JS_REQUIRES_STACK AbortableRecordingStatus
13562 TraceRecorder::record_JSOP_CALLFCSLOT()
13564 CHECK_STATUS_A(record_JSOP_GETFCSLOT());
13565 stack(1, w
.immiUndefined());
13566 return ARECORD_CONTINUE
;
13569 JS_REQUIRES_STACK RecordingStatus
13570 TraceRecorder::guardCallee(Value
& callee
)
13572 JSObject
& callee_obj
= callee
.toObject();
13573 JS_ASSERT(callee_obj
.isFunction());
13574 JSFunction
* callee_fun
= (JSFunction
*) callee_obj
.getPrivate();
13577 * First, guard on the callee's function (JSFunction*) identity. This is
13578 * necessary since tracing always inlines function calls. But note that
13579 * TR::functionCall avoids calling TR::guardCallee for constant methods
13580 * (those hit in the property cache from JSOP_CALLPROP).
13582 VMSideExit
* branchExit
= snapshot(BRANCH_EXIT
);
13583 LIns
* callee_ins
= get(&callee
);
13584 tree
->gcthings
.addUnique(callee
);
13587 w
.eqp(w
.ldpObjPrivate(callee_ins
), w
.nameImmpNonGC(callee_fun
)),
13591 * Second, consider guarding on the parent scope of the callee.
13593 * As long as we guard on parent scope, we are guaranteed when recording
13594 * variable accesses for a Call object having no private data that we can
13595 * emit code that avoids checking for an active JSStackFrame for the Call
13596 * object (which would hold fresh variable values -- the Call object's
13597 * slots would be stale until the stack frame is popped). This is because
13598 * Call objects can't pick up a new stack frame in their private slot once
13599 * they have none. TR::callProp and TR::setCallProp depend on this fact and
13600 * document where; if this guard is removed make sure to fix those methods.
13601 * Search for the "parent guard" comments in them.
13603 * In general, a loop in an escaping function scoped by Call objects could
13604 * be traced before the function has returned, and the trace then triggered
13605 * after, or vice versa. The function must escape, i.e., be a "funarg", or
13606 * else there's no need to guard callee parent at all. So once we know (by
13607 * static analysis) that a function may escape, we cannot avoid guarding on
13608 * either the private data of the Call object or the Call object itself, if
13609 * we wish to optimize for the particular deactivated stack frame (null
13610 * private data) case as noted above.
13612 if (callee_fun
->isInterpreted() &&
13613 (!FUN_NULL_CLOSURE(callee_fun
) || callee_fun
->script()->bindings
.hasUpvars())) {
13614 JSObject
* parent
= callee_obj
.getParent();
13616 if (parent
!= globalObj
) {
13617 if (!parent
->isCall())
13618 RETURN_STOP("closure scoped by neither the global object nor a Call object");
13621 w
.eqp(w
.ldpObjParent(callee_ins
), w
.immpObjGC(parent
)),
13625 return RECORD_CONTINUE
;
13629 * Prepare the given |arguments| object to be accessed on trace. If the return
13630 * value is non-NULL, then the given |arguments| object refers to a frame on
13631 * the current trace and is guaranteed to refer to the same frame on trace for
13632 * all later executions.
13634 JS_REQUIRES_STACK JSStackFrame
*
13635 TraceRecorder::guardArguments(JSObject
*obj
, LIns
* obj_ins
, unsigned *depthp
)
13637 JS_ASSERT(obj
->isArguments());
13639 JSStackFrame
*afp
= frameIfInRange(obj
, depthp
);
13643 VMSideExit
*exit
= snapshot(MISMATCH_EXIT
);
13644 guardClass(obj_ins
, obj
->getClass(), exit
, LOAD_CONST
);
13646 LIns
* args_ins
= getFrameObjPtr(afp
->addressOfArgs());
13647 LIns
* cmp
= w
.eqp(args_ins
, obj_ins
);
13648 guard(true, cmp
, exit
);
13652 JS_REQUIRES_STACK RecordingStatus
13653 TraceRecorder::interpretedFunctionCall(Value
& fval
, JSFunction
* fun
, uintN argc
, bool constructing
)
13656 * The function's identity (JSFunction and therefore JSScript) is guarded,
13657 * so we can optimize away the function call if the corresponding script is
13658 * empty. No need to worry about crossing globals or relocating argv, even,
13661 if (fun
->script()->isEmpty()) {
13663 if (constructing
) {
13664 LIns
* args
[] = { get(&fval
), w
.nameImmpNonGC(&js_ObjectClass
), cx_ins
};
13665 LIns
* tv_ins
= w
.call(&js_CreateThisFromTrace_ci
, args
);
13666 guard(false, w
.eqp0(tv_ins
), OOM_EXIT
);
13669 rval_ins
= w
.immiUndefined();
13671 stack(-2 - argc
, rval_ins
);
13672 return RECORD_CONTINUE
;
13675 if (fval
.toObject().getGlobal() != globalObj
)
13676 RETURN_STOP("JSOP_CALL or JSOP_NEW crosses global scopes");
13678 JSStackFrame
* const fp
= cx
->fp();
13680 // Generate a type map for the outgoing frame and stash it in the LIR
13681 unsigned stackSlots
= NativeStackSlots(cx
, 0 /* callDepth */);
13682 FrameInfo
* fi
= (FrameInfo
*)
13683 tempAlloc().alloc(sizeof(FrameInfo
) + stackSlots
* sizeof(JSValueType
));
13684 JSValueType
* typemap
= (JSValueType
*)(fi
+ 1);
13686 DetermineTypesVisitor
detVisitor(*this, typemap
);
13687 VisitStackSlots(detVisitor
, cx
, 0);
13689 JS_ASSERT(argc
< FrameInfo::CONSTRUCTING_FLAG
);
13691 tree
->gcthings
.addUnique(fval
);
13692 fi
->pc
= cx
->regs
->pc
;
13693 fi
->imacpc
= fp
->maybeImacropc();
13694 fi
->spdist
= cx
->regs
->sp
- fp
->slots();
13695 fi
->set_argc(uint16(argc
), constructing
);
13696 fi
->callerHeight
= stackSlots
- (2 + argc
);
13697 fi
->callerArgc
= fp
->isGlobalFrame() || fp
->isEvalFrame() ? 0 : fp
->numActualArgs();
13699 if (callDepth
>= tree
->maxCallDepth
)
13700 tree
->maxCallDepth
= callDepth
+ 1;
13702 fi
= traceMonitor
->frameCache
->memoize(fi
);
13704 RETURN_STOP("out of memory");
13705 w
.stRstack(w
.nameImmpNonGC(fi
), lirbuf
->rp
, callDepth
* sizeof(FrameInfo
*));
13707 #if defined JS_JIT_SPEW
13708 debug_only_printf(LC_TMTracer
, "iFC frameinfo=%p, stack=%d, map=", (void*)fi
,
13710 for (unsigned i
= 0; i
< fi
->callerHeight
; i
++)
13711 debug_only_printf(LC_TMTracer
, "%c", TypeToChar(fi
->get_typemap()[i
]));
13712 debug_only_print0(LC_TMTracer
, "\n");
13715 updateAtoms(fun
->u
.i
.script
);
13716 return RECORD_CONTINUE
;
13720 * We implement JSOP_FUNAPPLY/JSOP_FUNCALL using imacros
13723 GetCallMode(JSStackFrame
*fp
)
13725 if (fp
->hasImacropc()) {
13726 JSOp op
= (JSOp
) *fp
->imacropc();
13727 if (op
== JSOP_FUNAPPLY
|| op
== JSOP_FUNCALL
)
13733 JS_REQUIRES_STACK AbortableRecordingStatus
13734 TraceRecorder::record_JSOP_CALL()
13736 uintN argc
= GET_ARGC(cx
->regs
->pc
);
13737 cx
->assertValidStackDepth(argc
+ 2);
13738 return InjectStatus(functionCall(argc
, GetCallMode(cx
->fp())));
13741 static jsbytecode
* funapply_imacro_table
[] = {
13742 funapply_imacros
.apply0
,
13743 funapply_imacros
.apply1
,
13744 funapply_imacros
.apply2
,
13745 funapply_imacros
.apply3
,
13746 funapply_imacros
.apply4
,
13747 funapply_imacros
.apply5
,
13748 funapply_imacros
.apply6
,
13749 funapply_imacros
.apply7
,
13750 funapply_imacros
.apply8
13753 static jsbytecode
* funcall_imacro_table
[] = {
13754 funcall_imacros
.call0
,
13755 funcall_imacros
.call1
,
13756 funcall_imacros
.call2
,
13757 funcall_imacros
.call3
,
13758 funcall_imacros
.call4
,
13759 funcall_imacros
.call5
,
13760 funcall_imacros
.call6
,
13761 funcall_imacros
.call7
,
13762 funcall_imacros
.call8
13765 JS_REQUIRES_STACK AbortableRecordingStatus
13766 TraceRecorder::record_JSOP_FUNCALL()
13768 return record_JSOP_FUNAPPLY();
13771 JS_REQUIRES_STACK AbortableRecordingStatus
13772 TraceRecorder::record_JSOP_FUNAPPLY()
13774 jsbytecode
*pc
= cx
->regs
->pc
;
13775 uintN argc
= GET_ARGC(pc
);
13776 cx
->assertValidStackDepth(argc
+ 2);
13778 Value
* vp
= cx
->regs
->sp
- (argc
+ 2);
13780 JSObject
* aobj
= NULL
;
13781 LIns
* aobj_ins
= NULL
;
13783 JS_ASSERT(!cx
->fp()->hasImacropc());
13785 if (!IsFunctionObject(vp
[0]))
13786 return record_JSOP_CALL();
13787 RETURN_IF_XML_A(vp
[0]);
13789 JSObject
* obj
= &vp
[0].toObject();
13790 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, obj
);
13791 if (FUN_INTERPRETED(fun
))
13792 return record_JSOP_CALL();
13794 bool apply
= fun
->u
.n
.native
== js_fun_apply
;
13795 if (!apply
&& fun
->u
.n
.native
!= js_fun_call
)
13796 return record_JSOP_CALL();
13799 * We don't trace apply and call with a primitive 'this', which is the
13800 * first positional parameter, unless 'this' is null. That's ok.
13802 if (argc
> 0 && !vp
[2].isObjectOrNull())
13803 return record_JSOP_CALL();
13806 * Guard on the identity of this, which is the function we are applying.
13808 if (!IsFunctionObject(vp
[1]))
13809 RETURN_STOP_A("callee is not a function");
13810 CHECK_STATUS_A(guardCallee(vp
[1]));
13812 if (apply
&& argc
>= 2) {
13814 RETURN_STOP_A("apply with excess arguments");
13815 if (vp
[3].isPrimitive())
13816 RETURN_STOP_A("arguments parameter of apply is primitive");
13817 aobj
= &vp
[3].toObject();
13818 aobj_ins
= get(&vp
[3]);
13821 * We trace dense arrays and arguments objects. The code we generate
13822 * for apply uses imacros to handle a specific number of arguments.
13824 if (aobj
->isDenseArray()) {
13825 guardDenseArray(aobj_ins
, MISMATCH_EXIT
);
13826 length
= aobj
->getArrayLength();
13828 w
.eqiN(w
.lduiObjPrivate(aobj_ins
), length
),
13830 } else if (aobj
->isArguments()) {
13832 JSStackFrame
*afp
= guardArguments(aobj
, aobj_ins
, &depth
);
13834 RETURN_STOP_A("can't reach arguments object's frame");
13835 if (aobj
->isArgsLengthOverridden())
13836 RETURN_STOP_A("can't trace arguments with overridden length");
13837 guardArgsLengthNotAssigned(aobj_ins
);
13838 length
= afp
->numActualArgs();
13840 RETURN_STOP_A("arguments parameter of apply is not a dense array or argments object");
13843 if (length
>= JS_ARRAY_LENGTH(funapply_imacro_table
))
13844 RETURN_STOP_A("too many arguments to apply");
13846 return InjectStatus(callImacro(funapply_imacro_table
[length
]));
13849 if (argc
>= JS_ARRAY_LENGTH(funcall_imacro_table
))
13850 RETURN_STOP_A("too many arguments to call");
13852 return InjectStatus(callImacro(funcall_imacro_table
[argc
]));
13855 JS_REQUIRES_STACK AbortableRecordingStatus
13856 TraceRecorder::record_NativeCallComplete()
13858 if (pendingSpecializedNative
== IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
)
13859 return ARECORD_CONTINUE
;
13862 JS_ASSERT(pendingSpecializedNative
);
13863 jsbytecode
* pc
= cx
->regs
->pc
;
13864 JS_ASSERT(*pc
== JSOP_CALL
|| *pc
== JSOP_FUNCALL
|| *pc
== JSOP_FUNAPPLY
||
13865 *pc
== JSOP_NEW
|| *pc
== JSOP_SETPROP
);
13868 Value
& v
= stackval(-1);
13869 LIns
* v_ins
= get(&v
);
13872 * At this point the generated code has already called the native function
13873 * and we can no longer fail back to the original pc location (JSOP_CALL)
13874 * because that would cause the interpreter to re-execute the native
13875 * function, which might have side effects.
13877 * Instead, the snapshot() call below sees that we are currently parked on
13878 * a traceable native's JSOP_CALL instruction, and it will advance the pc
13879 * to restore by the length of the current opcode. If the native's return
13880 * type is jsval, snapshot() will also indicate in the type map that the
13881 * element on top of the stack is a boxed value which doesn't need to be
13882 * boxed if the type guard generated by unbox_value() fails.
13885 if (JSTN_ERRTYPE(pendingSpecializedNative
) == FAIL_STATUS
) {
13886 leaveDeepBailCall();
13888 LIns
* status
= w
.ldiStateField(builtinStatus
);
13889 if (pendingSpecializedNative
== &generatedSpecializedNative
) {
13890 LIns
* ok_ins
= v_ins
;
13893 * If we run a generic traceable native, the return value is in the argument
13894 * vector for native function calls. The actual return value of the native is a JSBool
13895 * indicating the error status.
13898 Address nativeRvalAddr
= AllocSlotsAddress(native_rval_ins
);
13899 if (pendingSpecializedNative
->flags
& JSTN_CONSTRUCTOR
) {
13903 // v_ins := the object payload from native_rval_ins
13904 // cond_ins := true if native_rval_ins contains a JSObject*
13905 unbox_any_object(nativeRvalAddr
, &v_ins
, &cond_ins
);
13906 // x := v_ins if native_rval_ins contains a JSObject*, NULL otherwise
13907 x
= w
.cmovp(cond_ins
, v_ins
, w
.immw(0));
13908 // v_ins := newobj_ins if native_rval_ins doesn't contain a JSObject*,
13909 // the object payload from native_rval_ins otherwise
13910 v_ins
= w
.cmovp(w
.eqp0(x
), newobj_ins
, x
);
13912 v_ins
= w
.ldd(nativeRvalAddr
);
13916 propagateFailureToBuiltinStatus(ok_ins
, status
);
13918 guard(true, w
.eqi0(status
), STATUS_EXIT
);
13921 if (pendingSpecializedNative
->flags
& JSTN_UNBOX_AFTER
) {
13923 * If we side exit on the unboxing code due to a type change, make sure that the boxed
13924 * value is actually currently associated with that location, and that we are talking
13925 * about the top of the stack here, which is where we expected boxed values.
13927 JS_ASSERT(&v
== &cx
->regs
->sp
[-1] && get(&v
) == v_ins
);
13928 set(&v
, unbox_value(v
, AllocSlotsAddress(native_rval_ins
), snapshot(BRANCH_EXIT
)));
13929 } else if (pendingSpecializedNative
->flags
&
13930 (JSTN_RETURN_NULLABLE_STR
| JSTN_RETURN_NULLABLE_OBJ
)) {
13932 w
.name(w
.eqp0(v_ins
), "guard(nullness)"),
13934 } else if (JSTN_ERRTYPE(pendingSpecializedNative
) == FAIL_NEG
) {
13935 /* Already added i2d in functionCall. */
13936 JS_ASSERT(v
.isNumber());
13938 /* Convert the result to double if the builtin returns int32. */
13939 if (v
.isNumber() &&
13940 pendingSpecializedNative
->builtin
->returnType() == ARGTYPE_I
) {
13941 set(&v
, w
.i2d(v_ins
));
13945 // We'll null pendingSpecializedNative in monitorRecording, on the next op
13946 // cycle. There must be a next op since the stack is non-empty.
13947 return ARECORD_CONTINUE
;
13950 JS_REQUIRES_STACK AbortableRecordingStatus
13951 TraceRecorder::name(Value
*& vp
, LIns
*& ins
, NameResult
& nr
)
13953 JSObject
* obj
= &cx
->fp()->scopeChain();
13954 JSOp op
= JSOp(*cx
->regs
->pc
);
13955 if (js_CodeSpec
[op
].format
& JOF_GNAME
)
13956 obj
= obj
->getGlobal();
13957 if (obj
!= globalObj
)
13958 return scopeChainProp(obj
, vp
, ins
, nr
);
13960 /* Can't use prop here, because we don't want unboxing from global slots. */
13961 LIns
* obj_ins
= w
.immpObjGC(globalObj
);
13968 * Property cache ensures that we are dealing with an existing property,
13969 * and guards the shape for us.
13971 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
13973 /* Abort if property doesn't exist (interpreter will report an error.) */
13974 if (pcval
.isNull())
13975 RETURN_STOP_A("named property not found");
13977 /* Insist on obj being the directly addressed object. */
13979 RETURN_STOP_A("name() hit prototype chain");
13981 /* Don't trace getter or setter calls, our caller wants a direct slot. */
13982 if (pcval
.isShape()) {
13983 const Shape
* shape
= pcval
.toShape();
13984 if (!isValidSlot(obj
, shape
))
13985 RETURN_STOP_A("name() not accessing a valid slot");
13986 slot
= shape
->slot
;
13988 if (!pcval
.isSlot())
13989 RETURN_STOP_A("PCE is not a slot");
13990 slot
= pcval
.toSlot();
13993 if (!lazilyImportGlobalSlot(slot
))
13994 RETURN_STOP_A("lazy import of global slot failed");
13996 vp
= &obj
->getSlotRef(slot
);
13999 return ARECORD_CONTINUE
;
14002 static JSObject
* FASTCALL
14003 MethodReadBarrier(JSContext
* cx
, JSObject
* obj
, Shape
* shape
, JSObject
* funobj
)
14005 Value v
= ObjectValue(*funobj
);
14006 AutoValueRooter
tvr(cx
, v
);
14008 if (!obj
->methodReadBarrier(cx
, *shape
, tvr
.addr()))
14010 return &tvr
.value().toObject();
14012 JS_DEFINE_CALLINFO_4(static, OBJECT_FAIL
, MethodReadBarrier
, CONTEXT
, OBJECT
, SHAPE
, OBJECT
,
14013 0, ACCSET_STORE_ANY
)
14016 * Get a property. The current opcode has JOF_ATOM.
14018 * There are two modes. The caller must pass nonnull pointers for either outp
14019 * or both slotp and v_insp. In the latter case, we require a plain old
14020 * property with a slot; if the property turns out to be anything else, abort
14021 * tracing (rather than emit a call to a native getter or GetAnyProperty).
14023 JS_REQUIRES_STACK AbortableRecordingStatus
14024 TraceRecorder::prop(JSObject
* obj
, LIns
* obj_ins
, uint32
*slotp
, LIns
** v_insp
, Value
*outp
)
14027 * Insist that obj have js_SetProperty as its set object-op. This suffices
14028 * to prevent a rogue obj from being used on-trace (loaded via obj_ins),
14029 * because we will guard on shape (or else global object identity) and any
14030 * object not having the same op must have a different class, and therefore
14031 * must differ in its shape (or not be the global object).
14033 if (!obj
->isDenseArray() && obj
->getOps()->getProperty
)
14034 RETURN_STOP_A("non-dense-array, non-native js::ObjectOps::getProperty");
14036 JS_ASSERT((slotp
&& v_insp
&& !outp
) || (!slotp
&& !v_insp
&& outp
));
14039 * Property cache ensures that we are dealing with an existing property,
14040 * and guards the shape for us.
14044 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
14046 /* Check for nonexistent property reference, which results in undefined. */
14047 if (pcval
.isNull()) {
14049 RETURN_STOP_A("property not found");
14052 * We could specialize to guard on just JSClass.getProperty, but a mere
14053 * class guard is simpler and slightly faster.
14055 if (obj
->getClass()->getProperty
!= Valueify(JS_PropertyStub
)) {
14056 RETURN_STOP_A("can't trace through access to undefined property if "
14057 "JSClass.getProperty hook isn't stubbed");
14059 guardClass(obj_ins
, obj
->getClass(), snapshot(MISMATCH_EXIT
), LOAD_NORMAL
);
14062 * This trace will be valid as long as neither the object nor any object
14063 * on its prototype chain changes shape.
14065 * FIXME: This loop can become a single shape guard once bug 497789 has
14068 VMSideExit
* exit
= snapshot(BRANCH_EXIT
);
14070 if (obj
->isNative()) {
14071 CHECK_STATUS_A(guardShape(obj_ins
, obj
, obj
->shape(), "guard(shape)", exit
));
14072 } else if (obj
->isDenseArray()) {
14073 guardDenseArray(obj_ins
, exit
);
14075 RETURN_STOP_A("non-native object involved in undefined property access");
14077 } while (guardHasPrototype(obj
, obj_ins
, &obj
, &obj_ins
, exit
));
14079 set(outp
, w
.immiUndefined());
14080 return ARECORD_CONTINUE
;
14083 return InjectStatus(propTail(obj
, obj_ins
, obj2
, pcval
, slotp
, v_insp
, outp
));
14086 JS_REQUIRES_STACK RecordingStatus
14087 TraceRecorder::propTail(JSObject
* obj
, LIns
* obj_ins
, JSObject
* obj2
, PCVal pcval
,
14088 uint32
*slotp
, LIns
** v_insp
, Value
*outp
)
14090 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->regs
->pc
];
14091 uint32 setflags
= (cs
.format
& (JOF_INCDEC
| JOF_FOR
));
14092 JS_ASSERT(!(cs
.format
& JOF_SET
));
14094 const Shape
* shape
;
14098 if (pcval
.isShape()) {
14099 shape
= pcval
.toShape();
14100 JS_ASSERT(obj2
->nativeContains(*shape
));
14102 if (setflags
&& !shape
->hasDefaultSetter())
14103 RETURN_STOP("non-stub setter");
14104 if (setflags
&& !shape
->writable())
14105 RETURN_STOP("writing to a readonly property");
14106 if (!shape
->hasDefaultGetterOrIsMethod()) {
14108 RETURN_STOP("can't trace non-stub getter for this opcode");
14109 if (shape
->hasGetterValue())
14110 return getPropertyWithScriptGetter(obj
, obj_ins
, shape
);
14111 if (shape
->slot
== SHAPE_INVALID_SLOT
)
14112 return getPropertyWithNativeGetter(obj_ins
, shape
, outp
);
14113 return getPropertyById(obj_ins
, outp
);
14115 if (!obj2
->containsSlot(shape
->slot
))
14116 RETURN_STOP("no valid slot");
14117 slot
= shape
->slot
;
14118 isMethod
= shape
->isMethod();
14119 JS_ASSERT_IF(isMethod
, obj2
->hasMethodBarrier());
14121 if (!pcval
.isSlot())
14122 RETURN_STOP("PCE is not a slot");
14123 slot
= pcval
.toSlot();
14128 /* We have a slot. Check whether it is direct or in a prototype. */
14131 RETURN_STOP("JOF_INCDEC|JOF_FOR opcode hit prototype chain");
14134 * We're getting a prototype property. Two cases:
14136 * 1. If obj2 is obj's immediate prototype we must walk up from obj,
14137 * since direct and immediate-prototype cache hits key on obj's shape,
14138 * not its identity.
14140 * 2. Otherwise obj2 is higher up the prototype chain and we've keyed
14141 * on obj's identity, and since setting __proto__ reshapes all objects
14142 * along the old prototype chain, then provided we shape-guard obj2,
14143 * we can "teleport" directly to obj2 by embedding it as a constant
14144 * (this constant object instruction will be CSE'ed with the constant
14145 * emitted by test_property_cache, whose shape is guarded).
14147 obj_ins
= (obj2
== obj
->getProto()) ? w
.ldpObjProto(obj_ins
) : w
.immpObjGC(obj2
);
14152 if (obj2
== globalObj
) {
14154 RETURN_STOP("get global method");
14155 if (!lazilyImportGlobalSlot(slot
))
14156 RETURN_STOP("lazy import of global slot failed");
14157 v_ins
= get(&globalObj
->getSlotRef(slot
));
14159 v_ins
= unbox_slot(obj
, obj_ins
, slot
, snapshot(BRANCH_EXIT
));
14163 * Joined function object stored as a method must be cloned when extracted
14164 * as a property value other than a callee. Note that shapes cover method
14165 * value as well as other property attributes and order, so this condition
14166 * is trace-invariant.
14168 * We do not impose the method read barrier if in an imacro, assuming any
14169 * property gets it does (e.g., for 'toString' from JSOP_NEW) will not be
14170 * leaked to the calling script.
14172 if (isMethod
&& !cx
->fp()->hasImacropc()) {
14173 enterDeepBailCall();
14174 LIns
* args
[] = { v_ins
, w
.immpShapeGC(shape
), obj_ins
, cx_ins
};
14175 v_ins
= w
.call(&MethodReadBarrier_ci
, args
);
14176 leaveDeepBailCall();
14185 return RECORD_CONTINUE
;
14189 * When we end up with a hole, read it as undefined, and make sure to set
14190 * addr_ins to null.
14192 JS_REQUIRES_STACK RecordingStatus
14193 TraceRecorder::denseArrayElement(Value
& oval
, Value
& ival
, Value
*& vp
, LIns
*& v_ins
,
14194 LIns
*& addr_ins
, VMSideExit
* branchExit
)
14196 JS_ASSERT(oval
.isObject() && ival
.isInt32());
14198 JSObject
* obj
= &oval
.toObject();
14199 LIns
* obj_ins
= get(&oval
);
14200 jsint idx
= ival
.toInt32();
14202 CHECK_STATUS(makeNumberInt32(get(&ival
), &idx_ins
));
14205 * Arrays have both a length and a capacity, but we only need to check
14206 * |index < capacity|; in the case where |length < index < capacity|
14207 * the entries [length..capacity-1] will have already been marked as
14208 * holes by resizeDenseArrayElements() so we can read them and get
14209 * the correct value.
14211 LIns
* capacity_ins
= w
.ldiDenseArrayCapacity(obj_ins
);
14212 jsuint capacity
= obj
->getDenseArrayCapacity();
14213 bool within
= (jsuint(idx
) < capacity
);
14215 /* If not idx < capacity, stay on trace (and read value as undefined). */
14216 guard(true, w
.geui(idx_ins
, capacity_ins
), branchExit
);
14218 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj
, obj_ins
, snapshot(MISMATCH_EXIT
)));
14220 v_ins
= w
.immiUndefined();
14222 return RECORD_CONTINUE
;
14225 /* Guard that index is within capacity. */
14226 guard(true, w
.name(w
.ltui(idx_ins
, capacity_ins
), "inRange"), branchExit
);
14228 /* Load the value and guard on its type to unbox it. */
14229 vp
= &obj
->slots
[jsuint(idx
)];
14230 JS_ASSERT(sizeof(Value
) == 8); // The |3| in the following statement requires this.
14231 addr_ins
= w
.name(w
.getDslotAddress(obj_ins
, idx_ins
), "elemp");
14232 v_ins
= unbox_value(*vp
, DSlotsAddress(addr_ins
), branchExit
);
14234 /* Don't let the hole value escape. Turn it into an undefined. */
14235 if (vp
->isMagic()) {
14236 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj
, obj_ins
, snapshot(MISMATCH_EXIT
)));
14237 v_ins
= w
.immiUndefined();
14240 return RECORD_CONTINUE
;
14243 /* See comments in TypedArrayTemplate<double>::copyIndexToValue. */
14245 TraceRecorder::canonicalizeNaNs(LIns
*dval_ins
)
14247 /* NaNs are the only floating point values that do not == themselves. */
14248 LIns
*isnonnan_ins
= w
.eqd(dval_ins
, dval_ins
);
14249 return w
.cmovd(isnonnan_ins
, dval_ins
, w
.immd(js_NaN
));
14252 JS_REQUIRES_STACK AbortableRecordingStatus
14253 TraceRecorder::typedArrayElement(Value
& oval
, Value
& ival
, Value
*& vp
, LIns
*& v_ins
)
14255 JS_ASSERT(oval
.isObject() && ival
.isInt32());
14257 JSObject
* obj
= &oval
.toObject();
14258 LIns
* obj_ins
= get(&oval
);
14259 jsint idx
= ival
.toInt32();
14261 CHECK_STATUS_A(makeNumberInt32(get(&ival
), &idx_ins
));
14262 LIns
* pidx_ins
= w
.ui2p(idx_ins
);
14264 js::TypedArray
* tarray
= js::TypedArray::fromJSObject(obj
);
14267 /* priv_ins will load the TypedArray* */
14268 LIns
* priv_ins
= w
.ldpObjPrivate(obj_ins
);
14270 /* for out-of-range, do the same thing that the interpreter does, which is return undefined */
14271 if ((jsuint
) idx
>= tarray
->length
) {
14272 CHECK_STATUS_A(guard(false,
14273 w
.ltui(idx_ins
, w
.ldiConstTypedArrayLength(priv_ins
)),
14275 /* abortIfAlwaysExits = */true));
14276 v_ins
= w
.immiUndefined();
14277 return ARECORD_CONTINUE
;
14281 * Ensure idx < length
14283 * NOTE! mLength is uint32, but it's guaranteed to fit in a Value
14284 * int, so we can treat it as either signed or unsigned.
14285 * If the index happens to be negative, when it's treated as
14286 * unsigned it'll be a very large int, and thus won't be less than
14290 w
.name(w
.ltui(idx_ins
, w
.ldiConstTypedArrayLength(priv_ins
)), "inRange"),
14293 /* We are now ready to load. Do a different type of load
14294 * depending on what type of thing we're loading. */
14295 LIns
* data_ins
= w
.ldpConstTypedArrayData(priv_ins
);
14297 switch (tarray
->type
) {
14298 case js::TypedArray::TYPE_INT8
:
14299 v_ins
= w
.i2d(w
.ldc2iTypedArrayElement(data_ins
, pidx_ins
));
14301 case js::TypedArray::TYPE_UINT8
:
14302 case js::TypedArray::TYPE_UINT8_CLAMPED
:
14303 // i2d on purpose here: it's safe, because an 8-bit uint is guaranteed
14304 // to fit in a 32-bit int, and i2d gets more optimization than ui2d.
14305 v_ins
= w
.i2d(w
.lduc2uiTypedArrayElement(data_ins
, pidx_ins
));
14307 case js::TypedArray::TYPE_INT16
:
14308 v_ins
= w
.i2d(w
.lds2iTypedArrayElement(data_ins
, pidx_ins
));
14310 case js::TypedArray::TYPE_UINT16
:
14311 // i2d on purpose here: it's safe, because a 16-bit uint is guaranteed
14312 // to fit in a 32-bit int, and i2d gets more optimization than ui2d.
14313 v_ins
= w
.i2d(w
.ldus2uiTypedArrayElement(data_ins
, pidx_ins
));
14315 case js::TypedArray::TYPE_INT32
:
14316 v_ins
= w
.i2d(w
.ldiTypedArrayElement(data_ins
, pidx_ins
));
14318 case js::TypedArray::TYPE_UINT32
:
14319 v_ins
= w
.ui2d(w
.ldiTypedArrayElement(data_ins
, pidx_ins
));
14321 case js::TypedArray::TYPE_FLOAT32
:
14322 v_ins
= canonicalizeNaNs(w
.ldf2dTypedArrayElement(data_ins
, pidx_ins
));
14324 case js::TypedArray::TYPE_FLOAT64
:
14325 v_ins
= canonicalizeNaNs(w
.lddTypedArrayElement(data_ins
, pidx_ins
));
14328 JS_NOT_REACHED("Unknown typed array type in tracer");
14331 return ARECORD_CONTINUE
;
14334 JS_REQUIRES_STACK AbortableRecordingStatus
14335 TraceRecorder::getProp(JSObject
* obj
, LIns
* obj_ins
)
14337 JSOp op
= JSOp(*cx
->regs
->pc
);
14338 const JSCodeSpec
& cs
= js_CodeSpec
[op
];
14340 JS_ASSERT(cs
.ndefs
== 1);
14341 return prop(obj
, obj_ins
, NULL
, NULL
, &stackval(-cs
.nuses
));
14344 JS_REQUIRES_STACK AbortableRecordingStatus
14345 TraceRecorder::getProp(Value
& v
)
14347 if (v
.isPrimitive())
14348 RETURN_STOP_A("primitive lhs");
14350 return getProp(&v
.toObject(), get(&v
));
14353 JS_REQUIRES_STACK AbortableRecordingStatus
14354 TraceRecorder::record_JSOP_NAME()
14359 CHECK_STATUS_A(name(vp
, v_ins
, nr
));
14361 return ARECORD_CONTINUE
;
14364 JS_REQUIRES_STACK AbortableRecordingStatus
14365 TraceRecorder::record_JSOP_DOUBLE()
14367 double d
= consts
[GET_INDEX(cx
->regs
->pc
)].toDouble();
14368 stack(0, w
.immd(d
));
14369 return ARECORD_CONTINUE
;
14372 JS_REQUIRES_STACK AbortableRecordingStatus
14373 TraceRecorder::record_JSOP_STRING()
14375 JSAtom
* atom
= atoms
[GET_INDEX(cx
->regs
->pc
)];
14376 stack(0, w
.immpAtomGC(atom
));
14377 return ARECORD_CONTINUE
;
14380 JS_REQUIRES_STACK AbortableRecordingStatus
14381 TraceRecorder::record_JSOP_ZERO()
14383 stack(0, w
.immd(0));
14384 return ARECORD_CONTINUE
;
14387 JS_REQUIRES_STACK AbortableRecordingStatus
14388 TraceRecorder::record_JSOP_ONE()
14390 stack(0, w
.immd(1));
14391 return ARECORD_CONTINUE
;
14394 JS_REQUIRES_STACK AbortableRecordingStatus
14395 TraceRecorder::record_JSOP_NULL()
14397 stack(0, w
.immpNull());
14398 return ARECORD_CONTINUE
;
14401 JS_REQUIRES_STACK AbortableRecordingStatus
14402 TraceRecorder::record_JSOP_THIS()
14405 CHECK_STATUS_A(getThis(this_ins
));
14406 stack(0, this_ins
);
14407 return ARECORD_CONTINUE
;
14410 JS_REQUIRES_STACK AbortableRecordingStatus
14411 TraceRecorder::record_JSOP_FALSE()
14413 stack(0, w
.immi(0));
14414 return ARECORD_CONTINUE
;
14417 JS_REQUIRES_STACK AbortableRecordingStatus
14418 TraceRecorder::record_JSOP_TRUE()
14420 stack(0, w
.immi(1));
14421 return ARECORD_CONTINUE
;
14424 JS_REQUIRES_STACK AbortableRecordingStatus
14425 TraceRecorder::record_JSOP_OR()
14430 JS_REQUIRES_STACK AbortableRecordingStatus
14431 TraceRecorder::record_JSOP_AND()
14436 JS_REQUIRES_STACK AbortableRecordingStatus
14437 TraceRecorder::record_JSOP_TABLESWITCH()
14439 #ifdef NANOJIT_IA32
14440 /* Handle tableswitches specially -- prepare a jump table if needed. */
14441 return tableswitch();
14443 return InjectStatus(switchop());
14447 JS_REQUIRES_STACK AbortableRecordingStatus
14448 TraceRecorder::record_JSOP_LOOKUPSWITCH()
14450 return InjectStatus(switchop());
14453 JS_REQUIRES_STACK AbortableRecordingStatus
14454 TraceRecorder::record_JSOP_STRICTEQ()
14456 CHECK_STATUS_A(strictEquality(true, false));
14457 return ARECORD_CONTINUE
;
14460 JS_REQUIRES_STACK AbortableRecordingStatus
14461 TraceRecorder::record_JSOP_STRICTNE()
14463 CHECK_STATUS_A(strictEquality(false, false));
14464 return ARECORD_CONTINUE
;
14467 JS_REQUIRES_STACK AbortableRecordingStatus
14468 TraceRecorder::record_JSOP_OBJECT()
14470 JSStackFrame
* const fp
= cx
->fp();
14471 JSScript
* script
= fp
->script();
14472 unsigned index
= atoms
- script
->atomMap
.vector
+ GET_INDEX(cx
->regs
->pc
);
14475 obj
= script
->getObject(index
);
14476 stack(0, w
.immpObjGC(obj
));
14477 return ARECORD_CONTINUE
;
14480 JS_REQUIRES_STACK AbortableRecordingStatus
14481 TraceRecorder::record_JSOP_POP()
14483 return ARECORD_CONTINUE
;
14486 JS_REQUIRES_STACK AbortableRecordingStatus
14487 TraceRecorder::record_JSOP_TRAP()
14489 return ARECORD_STOP
;
14492 JS_REQUIRES_STACK AbortableRecordingStatus
14493 TraceRecorder::record_JSOP_GETARG()
14495 stack(0, arg(GET_ARGNO(cx
->regs
->pc
)));
14496 return ARECORD_CONTINUE
;
14499 JS_REQUIRES_STACK AbortableRecordingStatus
14500 TraceRecorder::record_JSOP_SETARG()
14502 arg(GET_ARGNO(cx
->regs
->pc
), stack(-1));
14503 return ARECORD_CONTINUE
;
14506 JS_REQUIRES_STACK AbortableRecordingStatus
14507 TraceRecorder::record_JSOP_GETLOCAL()
14509 stack(0, var(GET_SLOTNO(cx
->regs
->pc
)));
14510 return ARECORD_CONTINUE
;
14513 JS_REQUIRES_STACK AbortableRecordingStatus
14514 TraceRecorder::record_JSOP_SETLOCAL()
14516 var(GET_SLOTNO(cx
->regs
->pc
), stack(-1));
14517 return ARECORD_CONTINUE
;
14520 JS_REQUIRES_STACK AbortableRecordingStatus
14521 TraceRecorder::record_JSOP_UINT16()
14523 stack(0, w
.immd(GET_UINT16(cx
->regs
->pc
)));
14524 return ARECORD_CONTINUE
;
14527 JS_REQUIRES_STACK AbortableRecordingStatus
14528 TraceRecorder::record_JSOP_NEWINIT()
14533 JSProtoKey key
= JSProtoKey(cx
->regs
->pc
[1]);
14536 CHECK_STATUS_A(getClassPrototype(key
, proto_ins
));
14539 if (key
== JSProto_Array
) {
14540 LIns
*args
[] = { proto_ins
, cx_ins
};
14541 v_ins
= w
.call(&NewDenseEmptyArray_ci
, args
);
14543 LIns
*args
[] = { w
.immpNull(), proto_ins
, cx_ins
};
14544 v_ins
= w
.call(&js_InitializerObject_ci
, args
);
14546 guard(false, w
.eqp0(v_ins
), OOM_EXIT
);
14548 return ARECORD_CONTINUE
;
14551 JS_REQUIRES_STACK AbortableRecordingStatus
14552 TraceRecorder::record_JSOP_NEWARRAY()
14557 CHECK_STATUS_A(getClassPrototype(JSProto_Array
, proto_ins
));
14559 unsigned count
= GET_UINT24(cx
->regs
->pc
);
14560 LIns
*args
[] = { proto_ins
, w
.immi(count
), cx_ins
};
14561 LIns
*v_ins
= w
.call(&NewDenseAllocatedArray_ci
, args
);
14563 guard(false, w
.eqp0(v_ins
), OOM_EXIT
);
14565 return ARECORD_CONTINUE
;
14568 JS_REQUIRES_STACK AbortableRecordingStatus
14569 TraceRecorder::record_JSOP_NEWOBJECT()
14574 CHECK_STATUS_A(getClassPrototype(JSProto_Object
, proto_ins
));
14576 JSObject
* baseobj
= cx
->fp()->script()->getObject(getFullIndex(0));
14578 LIns
*args
[] = { w
.immpObjGC(baseobj
), proto_ins
, cx_ins
};
14579 LIns
*v_ins
= w
.call(&js_InitializerObject_ci
, args
);
14581 guard(false, w
.eqp0(v_ins
), OOM_EXIT
);
14583 return ARECORD_CONTINUE
;
14586 JS_REQUIRES_STACK AbortableRecordingStatus
14587 TraceRecorder::record_JSOP_ENDINIT()
14590 if (initDepth
== 0)
14591 hadNewInit
= false;
14594 Value
& v
= stackval(-1);
14595 JS_ASSERT(!v
.isPrimitive());
14597 return ARECORD_CONTINUE
;
14600 JS_REQUIRES_STACK AbortableRecordingStatus
14601 TraceRecorder::record_JSOP_INITELEM()
14603 Value
& v
= stackval(-1);
14604 Value
& idx
= stackval(-2);
14605 Value
& lval
= stackval(-3);
14607 // The object is either a dense Array or an Object. Only handle the dense case here.
14608 // Also skip array initializers which might be unoptimized NEWINIT initializers.
14609 if (!lval
.toObject().isDenseArray() || hadNewInit
)
14610 return setElem(-3, -2, -1);
14612 // The index is always the same constant integer.
14613 JS_ASSERT(idx
.isInt32());
14615 // Nothing to do for holes, the array's length has already been set.
14616 if (v
.isMagic(JS_ARRAY_HOLE
))
14617 return ARECORD_CONTINUE
;
14619 LIns
* obj_ins
= get(&lval
);
14620 LIns
* v_ins
= get(&v
);
14622 // Set the element.
14623 LIns
*slots_ins
= w
.ldpObjSlots(obj_ins
);
14624 box_value_into(v
, v_ins
, DSlotsAddress(slots_ins
, idx
.toInt32()));
14626 return ARECORD_CONTINUE
;
14629 JS_REQUIRES_STACK AbortableRecordingStatus
14630 TraceRecorder::record_JSOP_DEFSHARP()
14632 return ARECORD_STOP
;
14635 JS_REQUIRES_STACK AbortableRecordingStatus
14636 TraceRecorder::record_JSOP_USESHARP()
14638 return ARECORD_STOP
;
14641 JS_REQUIRES_STACK AbortableRecordingStatus
14642 TraceRecorder::record_JSOP_INCARG()
14644 return InjectStatus(inc(argval(GET_ARGNO(cx
->regs
->pc
)), 1));
14647 JS_REQUIRES_STACK AbortableRecordingStatus
14648 TraceRecorder::record_JSOP_INCLOCAL()
14650 return InjectStatus(inc(varval(GET_SLOTNO(cx
->regs
->pc
)), 1));
14653 JS_REQUIRES_STACK AbortableRecordingStatus
14654 TraceRecorder::record_JSOP_DECARG()
14656 return InjectStatus(inc(argval(GET_ARGNO(cx
->regs
->pc
)), -1));
14659 JS_REQUIRES_STACK AbortableRecordingStatus
14660 TraceRecorder::record_JSOP_DECLOCAL()
14662 return InjectStatus(inc(varval(GET_SLOTNO(cx
->regs
->pc
)), -1));
14665 JS_REQUIRES_STACK AbortableRecordingStatus
14666 TraceRecorder::record_JSOP_ARGINC()
14668 return InjectStatus(inc(argval(GET_ARGNO(cx
->regs
->pc
)), 1, false));
14671 JS_REQUIRES_STACK AbortableRecordingStatus
14672 TraceRecorder::record_JSOP_LOCALINC()
14674 return InjectStatus(inc(varval(GET_SLOTNO(cx
->regs
->pc
)), 1, false));
14677 JS_REQUIRES_STACK AbortableRecordingStatus
14678 TraceRecorder::record_JSOP_ARGDEC()
14680 return InjectStatus(inc(argval(GET_ARGNO(cx
->regs
->pc
)), -1, false));
14683 JS_REQUIRES_STACK AbortableRecordingStatus
14684 TraceRecorder::record_JSOP_LOCALDEC()
14686 return InjectStatus(inc(varval(GET_SLOTNO(cx
->regs
->pc
)), -1, false));
14689 JS_REQUIRES_STACK AbortableRecordingStatus
14690 TraceRecorder::record_JSOP_IMACOP()
14692 JS_ASSERT(cx
->fp()->hasImacropc());
14693 return ARECORD_CONTINUE
;
14696 static JSBool FASTCALL
14697 ObjectToIterator(JSContext
* cx
, JSObject
*obj
, int32 flags
, Value
* vp
)
14699 vp
->setObject(*obj
);
14700 bool ok
= js_ValueToIterator(cx
, flags
, vp
);
14702 SetBuiltinError(cx
);
14705 return WasBuiltinSuccessful(cx
);
14707 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, ObjectToIterator
, CONTEXT
, OBJECT
, INT32
, VALUEPTR
,
14708 0, ACCSET_STORE_ANY
)
14710 JS_REQUIRES_STACK AbortableRecordingStatus
14711 TraceRecorder::record_JSOP_ITER()
14713 Value
& v
= stackval(-1);
14714 if (v
.isPrimitive())
14715 RETURN_STOP_A("for-in on a primitive value");
14717 RETURN_IF_XML_A(v
);
14719 LIns
*obj_ins
= get(&v
);
14720 jsuint flags
= cx
->regs
->pc
[1];
14722 enterDeepBailCall();
14724 LIns
* vp_ins
= w
.allocp(sizeof(Value
));
14725 LIns
* args
[] = { vp_ins
, w
.immi(flags
), obj_ins
, cx_ins
};
14726 LIns
* ok_ins
= w
.call(&ObjectToIterator_ci
, args
);
14728 // We need to guard on ok_ins, but this requires a snapshot of the state
14729 // after this op. monitorRecording will do it for us.
14730 pendingGuardCondition
= ok_ins
;
14732 // ObjectToIterator can deep-bail without throwing, leaving a value of
14733 // unknown type in *vp (it can be either a function or a non-function
14734 // object). Use the same mechanism as finishGetProp to arrange for
14735 // LeaveTree to deal with this value.
14736 pendingUnboxSlot
= cx
->regs
->sp
- 1;
14737 set(pendingUnboxSlot
, w
.name(w
.lddAlloc(vp_ins
), "iterval"));
14739 leaveDeepBailCall();
14741 return ARECORD_CONTINUE
;
14744 static JSBool FASTCALL
14745 IteratorMore(JSContext
*cx
, JSObject
*iterobj
, Value
*vp
)
14747 if (!js_IteratorMore(cx
, iterobj
, vp
)) {
14748 SetBuiltinError(cx
);
14751 return WasBuiltinSuccessful(cx
);
14753 JS_DEFINE_CALLINFO_3(extern, BOOL_FAIL
, IteratorMore
, CONTEXT
, OBJECT
, VALUEPTR
,
14754 0, ACCSET_STORE_ANY
)
14756 JS_REQUIRES_STACK AbortableRecordingStatus
14757 TraceRecorder::record_JSOP_MOREITER()
14759 Value
& iterobj_val
= stackval(-1);
14760 if (iterobj_val
.isPrimitive())
14761 RETURN_STOP_A("for-in on a primitive value");
14763 RETURN_IF_XML_A(iterobj_val
);
14765 JSObject
* iterobj
= &iterobj_val
.toObject();
14766 LIns
* iterobj_ins
= get(&iterobj_val
);
14770 * JSOP_FOR* already guards on this, but in certain rare cases we might
14771 * record misformed loop traces. Note that it's not necessary to guard on
14772 * ni->flags (nor do we in unboxNextValue), because the different
14773 * iteration type will guarantee a different entry typemap.
14775 if (iterobj
->hasClass(&js_IteratorClass
)) {
14776 guardClass(iterobj_ins
, &js_IteratorClass
, snapshot(BRANCH_EXIT
), LOAD_NORMAL
);
14778 NativeIterator
*ni
= (NativeIterator
*) iterobj
->getPrivate();
14779 if (ni
->isKeyIter()) {
14780 LIns
*ni_ins
= w
.ldpObjPrivate(iterobj_ins
);
14781 LIns
*cursor_ins
= w
.ldpIterCursor(ni_ins
);
14782 LIns
*end_ins
= w
.ldpIterEnd(ni_ins
);
14784 cond_ins
= w
.ltp(cursor_ins
, end_ins
);
14785 stack(0, cond_ins
);
14786 return ARECORD_CONTINUE
;
14789 guardNotClass(iterobj_ins
, &js_IteratorClass
, snapshot(BRANCH_EXIT
), LOAD_NORMAL
);
14792 enterDeepBailCall();
14794 LIns
* vp_ins
= w
.allocp(sizeof(Value
));
14795 LIns
* args
[] = { vp_ins
, iterobj_ins
, cx_ins
};
14796 pendingGuardCondition
= w
.call(&IteratorMore_ci
, args
);
14798 leaveDeepBailCall();
14800 cond_ins
= is_boxed_true(AllocSlotsAddress(vp_ins
));
14801 stack(0, cond_ins
);
14803 return ARECORD_CONTINUE
;
14806 static JSBool FASTCALL
14807 CloseIterator(JSContext
*cx
, JSObject
*iterobj
)
14809 if (!js_CloseIterator(cx
, iterobj
)) {
14810 SetBuiltinError(cx
);
14813 return WasBuiltinSuccessful(cx
);
14815 JS_DEFINE_CALLINFO_2(extern, BOOL_FAIL
, CloseIterator
, CONTEXT
, OBJECT
, 0, ACCSET_STORE_ANY
)
14817 JS_REQUIRES_STACK AbortableRecordingStatus
14818 TraceRecorder::record_JSOP_ENDITER()
14820 JS_ASSERT(!stackval(-1).isPrimitive());
14822 enterDeepBailCall();
14824 LIns
* args
[] = { stack(-1), cx_ins
};
14825 LIns
* ok_ins
= w
.call(&CloseIterator_ci
, args
);
14827 // We need to guard on ok_ins, but this requires a snapshot of the state
14828 // after this op. monitorRecording will do it for us.
14829 pendingGuardCondition
= ok_ins
;
14831 leaveDeepBailCall();
14833 return ARECORD_CONTINUE
;
14836 #if JS_BITS_PER_WORD == 32
14837 JS_REQUIRES_STACK
void
14838 TraceRecorder::storeMagic(JSWhyMagic why
, Address addr
)
14840 w
.stiValuePayload(w
.immpMagicWhy(why
), addr
);
14841 w
.stiValueTag(w
.immpMagicWhy(JSVAL_TAG_MAGIC
), addr
);
14843 #elif JS_BITS_PER_WORD == 64
14844 JS_REQUIRES_STACK
void
14845 TraceRecorder::storeMagic(JSWhyMagic why
, Address addr
)
14847 LIns
*magic
= w
.nameImmq(BUILD_JSVAL(JSVAL_TAG_MAGIC
, why
));
14848 w
.stq(magic
, addr
);
14852 JS_REQUIRES_STACK AbortableRecordingStatus
14853 TraceRecorder::unboxNextValue(LIns
* &v_ins
)
14855 Value
&iterobj_val
= stackval(-1);
14856 JSObject
*iterobj
= &iterobj_val
.toObject();
14857 LIns
* iterobj_ins
= get(&iterobj_val
);
14859 if (iterobj
->hasClass(&js_IteratorClass
)) {
14860 guardClass(iterobj_ins
, &js_IteratorClass
, snapshot(BRANCH_EXIT
), LOAD_NORMAL
);
14861 NativeIterator
*ni
= (NativeIterator
*) iterobj
->getPrivate();
14863 LIns
*ni_ins
= w
.ldpObjPrivate(iterobj_ins
);
14864 LIns
*cursor_ins
= w
.ldpIterCursor(ni_ins
);
14866 /* Emit code to stringify the id if necessary. */
14867 Address cursorAddr
= IterPropsAddress(cursor_ins
);
14868 if (ni
->isKeyIter()) {
14869 /* Read the next id from the iterator. */
14870 jsid id
= *ni
->current();
14871 LIns
*id_ins
= w
.name(w
.ldp(cursorAddr
), "id");
14874 * Most iterations over object properties never have to actually deal with
14875 * any numeric properties, so we guard here instead of branching.
14877 guard(JSID_IS_STRING(id
), is_string_id(id_ins
), BRANCH_EXIT
);
14879 if (JSID_IS_STRING(id
)) {
14880 v_ins
= unbox_string_id(id_ins
);
14881 } else if (JSID_IS_INT(id
)) {
14882 /* id is an integer, convert to a string. */
14883 LIns
*id_to_int_ins
= unbox_int_id(id_ins
);
14884 LIns
* args
[] = { id_to_int_ins
, cx_ins
};
14885 v_ins
= w
.call(&js_IntToString_ci
, args
);
14886 guard(false, w
.eqp0(v_ins
), OOM_EXIT
);
14888 #if JS_HAS_XML_SUPPORT
14889 JS_ASSERT(JSID_IS_OBJECT(id
));
14890 JS_ASSERT(JSID_TO_OBJECT(id
)->isXMLId());
14891 RETURN_STOP_A("iterated over a property with an XML id");
14893 JS_NEVER_REACHED("unboxNextValue");
14897 /* Increment the cursor by one jsid and store it back. */
14898 cursor_ins
= w
.addp(cursor_ins
, w
.nameImmw(sizeof(jsid
)));
14899 w
.stpIterCursor(cursor_ins
, ni_ins
);
14900 return ARECORD_CONTINUE
;
14903 guardNotClass(iterobj_ins
, &js_IteratorClass
, snapshot(BRANCH_EXIT
), LOAD_NORMAL
);
14907 Address iterValueAddr
= CxAddress(iterValue
);
14908 v_ins
= unbox_value(cx
->iterValue
, iterValueAddr
, snapshot(BRANCH_EXIT
));
14909 storeMagic(JS_NO_ITER_VALUE
, iterValueAddr
);
14911 return ARECORD_CONTINUE
;
14914 JS_REQUIRES_STACK AbortableRecordingStatus
14915 TraceRecorder::record_JSOP_FORNAME()
14920 CHECK_STATUS_A(name(vp
, x_ins
, nr
));
14922 RETURN_STOP_A("forname on non-tracked value not supported");
14924 CHECK_STATUS_A(unboxNextValue(v_ins
));
14926 return ARECORD_CONTINUE
;
14929 JS_REQUIRES_STACK AbortableRecordingStatus
14930 TraceRecorder::record_JSOP_FORGNAME()
14932 return record_JSOP_FORNAME();
14935 JS_REQUIRES_STACK AbortableRecordingStatus
14936 TraceRecorder::record_JSOP_FORPROP()
14938 return ARECORD_STOP
;
14941 JS_REQUIRES_STACK AbortableRecordingStatus
14942 TraceRecorder::record_JSOP_FORELEM()
14945 CHECK_STATUS_A(unboxNextValue(v_ins
));
14947 return ARECORD_CONTINUE
;
14950 JS_REQUIRES_STACK AbortableRecordingStatus
14951 TraceRecorder::record_JSOP_FORARG()
14954 CHECK_STATUS_A(unboxNextValue(v_ins
));
14955 arg(GET_ARGNO(cx
->regs
->pc
), v_ins
);
14956 return ARECORD_CONTINUE
;
14959 JS_REQUIRES_STACK AbortableRecordingStatus
14960 TraceRecorder::record_JSOP_FORLOCAL()
14963 CHECK_STATUS_A(unboxNextValue(v_ins
));
14964 var(GET_SLOTNO(cx
->regs
->pc
), v_ins
);
14965 return ARECORD_CONTINUE
;
14968 JS_REQUIRES_STACK AbortableRecordingStatus
14969 TraceRecorder::record_JSOP_POPN()
14971 return ARECORD_CONTINUE
;
14975 IsFindableCallObj(JSObject
*obj
)
14977 return obj
->isCall() &&
14978 (obj
->callIsForEval() || obj
->getCallObjCalleeFunction()->isHeavyweight());
14982 * Generate LIR to reach |obj2| from |obj| by traversing the scope chain. The
14983 * generated code also ensures that any call objects found have not changed shape.
14985 * obj starting object
14986 * obj_ins LIR instruction representing obj
14987 * targetObj end object for traversal
14988 * targetIns [out] LIR instruction representing obj2
14990 JS_REQUIRES_STACK RecordingStatus
14991 TraceRecorder::traverseScopeChain(JSObject
*obj
, LIns
*obj_ins
, JSObject
*targetObj
,
14994 VMSideExit
* exit
= NULL
;
14997 * Scope chains are often left "incomplete", and reified lazily when
14998 * necessary, since doing so is expensive. When creating null and flat
14999 * closures on trace (the only kinds supported), the global object is
15000 * hardcoded as the parent, since reifying the scope chain on trace
15001 * would be extremely difficult. This is because block objects need frame
15002 * pointers, which do not exist on trace, and thus would require magic
15003 * similar to arguments objects or reification of stack frames. Luckily,
15004 * for null and flat closures, these blocks are unnecessary.
15006 * The problem, as exposed by bug 523793, is that this means creating a
15007 * fixed traversal on trace can be inconsistent with the shorter scope
15008 * chain used when executing a trace. To address this, perform an initial
15009 * sweep of the scope chain to make sure that if there is a heavyweight
15010 * function with a call object, and there is also a block object, the
15011 * trace is safely aborted.
15013 * If there is no call object, we must have arrived at the global object,
15014 * and can bypass the scope chain traversal completely.
15016 bool foundCallObj
= false;
15017 bool foundBlockObj
= false;
15018 JSObject
* searchObj
= obj
;
15021 if (searchObj
!= globalObj
) {
15022 if (searchObj
->isBlock())
15023 foundBlockObj
= true;
15024 else if (IsFindableCallObj(searchObj
))
15025 foundCallObj
= true;
15028 if (searchObj
== targetObj
)
15031 searchObj
= searchObj
->getParent();
15033 RETURN_STOP("cannot traverse this scope chain on trace");
15036 if (!foundCallObj
) {
15037 JS_ASSERT(targetObj
== globalObj
);
15038 targetIns
= w
.nameImmpNonGC(globalObj
);
15039 return RECORD_CONTINUE
;
15043 RETURN_STOP("cannot traverse this scope chain on trace");
15045 /* There was a call object, or should be a call object now. */
15047 if (obj
!= globalObj
) {
15048 if (!js_IsCacheableNonGlobalScope(obj
))
15049 RETURN_STOP("scope chain lookup crosses non-cacheable object");
15051 // We must guard on the shape of all call objects for heavyweight functions
15052 // that we traverse on the scope chain: if the shape changes, a variable with
15053 // the same name may have been inserted in the scope chain.
15054 if (IsFindableCallObj(obj
)) {
15056 exit
= snapshot(BRANCH_EXIT
);
15058 w
.name(w
.eqiN(w
.ldiObjShape(obj_ins
), obj
->shape()), "guard_shape"),
15063 JS_ASSERT(!obj
->isBlock());
15065 if (obj
== targetObj
)
15068 obj
= obj
->getParent();
15069 obj_ins
= w
.ldpObjParent(obj_ins
);
15072 targetIns
= obj_ins
;
15073 return RECORD_CONTINUE
;
15076 JS_REQUIRES_STACK AbortableRecordingStatus
15077 TraceRecorder::record_JSOP_BINDNAME()
15079 JSStackFrame
* const fp
= cx
->fp();
15082 if (!fp
->isFunctionFrame()) {
15083 obj
= &fp
->scopeChain();
15086 JSStackFrame
*fp2
= fp
;
15089 // In global code, fp->scopeChain can only contain blocks whose values
15090 // are still on the stack. We never use BINDNAME to refer to these.
15091 while (obj
->isBlock()) {
15092 // The block's values are still on the stack.
15094 // NB: fp2 can't be a generator frame, because !fp->hasFunction.
15095 while (obj
->getPrivate() != fp2
) {
15096 JS_ASSERT(fp2
->isEvalOrDebuggerFrame());
15099 JS_NOT_REACHED("bad stack frame");
15102 obj
= obj
->getParent();
15103 // Blocks always have parents.
15107 // If anything other than Block, Call, DeclEnv, and the global object
15108 // is on the scope chain, we shouldn't be recording. Of those, only
15109 // Block and global can be present in global code.
15110 JS_ASSERT(obj
== globalObj
);
15113 * The trace is specialized to this global object. Furthermore, we know it
15114 * is the sole 'global' object on the scope chain: we set globalObj to the
15115 * scope chain element with no parent, and we reached it starting from the
15116 * function closure or the current scopeChain, so there is nothing inner to
15117 * it. Therefore this must be the right base object.
15119 stack(0, w
.immpObjGC(obj
));
15120 return ARECORD_CONTINUE
;
15123 // We can't trace BINDNAME in functions that contain direct calls to eval,
15124 // as they might add bindings which previously-traced references would have
15126 if (JSFUN_HEAVYWEIGHT_TEST(fp
->fun()->flags
))
15127 RETURN_STOP_A("BINDNAME in heavyweight function.");
15129 // We don't have the scope chain on trace, so instead we get a start object
15130 // that is on the scope chain and doesn't skip the target object (the one
15131 // that contains the property).
15132 Value
*callee
= &cx
->fp()->calleeValue();
15133 obj
= callee
->toObject().getParent();
15134 if (obj
== globalObj
) {
15135 stack(0, w
.immpObjGC(obj
));
15136 return ARECORD_CONTINUE
;
15138 LIns
*obj_ins
= w
.ldpObjParent(get(callee
));
15140 // Find the target object.
15141 JSAtom
*atom
= atoms
[GET_INDEX(cx
->regs
->pc
)];
15142 jsid id
= ATOM_TO_JSID(atom
);
15143 JSContext
*localCx
= cx
;
15144 JSObject
*obj2
= js_FindIdentifierBase(cx
, &fp
->scopeChain(), id
);
15146 RETURN_ERROR_A("error in js_FindIdentifierBase");
15147 if (!TRACE_RECORDER(localCx
))
15148 return ARECORD_ABORTED
;
15149 if (obj2
!= globalObj
&& !obj2
->isCall())
15150 RETURN_STOP_A("BINDNAME on non-global, non-call object");
15152 // Generate LIR to get to the target object from the start object.
15154 CHECK_STATUS_A(traverseScopeChain(obj
, obj_ins
, obj2
, obj2_ins
));
15156 // If |obj2| is the global object, we can refer to it directly instead of walking up
15157 // the scope chain. There may still be guards on intervening call objects.
15158 stack(0, obj2
== globalObj
? w
.immpObjGC(obj2
) : obj2_ins
);
15159 return ARECORD_CONTINUE
;
15162 JS_REQUIRES_STACK AbortableRecordingStatus
15163 TraceRecorder::record_JSOP_THROW()
15165 return ARECORD_STOP
;
15168 JS_REQUIRES_STACK AbortableRecordingStatus
15169 TraceRecorder::record_JSOP_IN()
15171 Value
& rval
= stackval(-1);
15172 Value
& lval
= stackval(-2);
15174 if (rval
.isPrimitive())
15175 RETURN_STOP_A("JSOP_IN on non-object right operand");
15176 JSObject
* obj
= &rval
.toObject();
15177 LIns
* obj_ins
= get(&rval
);
15181 if (lval
.isInt32()) {
15182 if (!js_Int32ToId(cx
, lval
.toInt32(), &id
))
15183 RETURN_ERROR_A("OOM converting left operand of JSOP_IN to string");
15185 if (obj
->isDenseArray()) {
15186 // Fast path for dense arrays
15187 VMSideExit
* branchExit
= snapshot(BRANCH_EXIT
);
15188 guardDenseArray(obj_ins
, branchExit
);
15190 // If our proto has indexed props, all bets are off on our
15191 // "false" values and out-of-bounds access. Just guard on
15193 CHECK_STATUS_A(guardPrototypeHasNoIndexedProperties(obj
, obj_ins
,
15194 snapshot(MISMATCH_EXIT
)));
15197 CHECK_STATUS_A(makeNumberInt32(get(&lval
), &idx_ins
));
15198 idx_ins
= w
.name(idx_ins
, "index");
15199 LIns
* capacity_ins
= w
.ldiDenseArrayCapacity(obj_ins
);
15200 LIns
* inRange
= w
.ltui(idx_ins
, capacity_ins
);
15202 if (jsuint(lval
.toInt32()) < obj
->getDenseArrayCapacity()) {
15203 guard(true, inRange
, branchExit
);
15205 LIns
*elem_ins
= w
.getDslotAddress(obj_ins
, idx_ins
);
15206 // Need to make sure we don't have a hole
15207 LIns
*is_hole_ins
=
15208 is_boxed_magic(DSlotsAddress(elem_ins
), JS_ARRAY_HOLE
);
15210 // Set x to true (index in our array) if is_hole_ins == 0
15211 x
= w
.eqi0(is_hole_ins
);
15213 guard(false, inRange
, branchExit
);
15218 CHECK_STATUS_A(makeNumberInt32(get(&lval
), &num_ins
));
15219 LIns
* args
[] = { num_ins
, obj_ins
, cx_ins
};
15220 x
= w
.call(&js_HasNamedPropertyInt32_ci
, args
);
15222 } else if (lval
.isString()) {
15223 if (!js_ValueToStringId(cx
, lval
, &id
))
15224 RETURN_ERROR_A("left operand of JSOP_IN didn't convert to a string-id");
15225 LIns
* args
[] = { get(&lval
), obj_ins
, cx_ins
};
15226 x
= w
.call(&js_HasNamedProperty_ci
, args
);
15228 RETURN_STOP_A("string or integer expected");
15231 guard(false, w
.eqiN(x
, JS_NEITHER
), OOM_EXIT
);
15234 TraceMonitor
&localtm
= *traceMonitor
;
15238 JSBool ok
= obj
->lookupProperty(cx
, id
, &obj2
, &prop
);
15241 RETURN_ERROR_A("obj->lookupProperty failed in JSOP_IN");
15243 /* lookupProperty can reenter the interpreter and kill |this|. */
15244 if (!localtm
.recorder
)
15245 return ARECORD_ABORTED
;
15247 bool cond
= prop
!= NULL
;
15250 * The interpreter fuses comparisons and the following branch, so we have
15251 * to do that here as well.
15253 jsbytecode
*pc
= cx
->regs
->pc
;
15254 fuseIf(pc
+ 1, cond
, x
);
15256 /* If the branch was to a loop header, we may need to close it. */
15257 if (pc
[1] == JSOP_IFNE
|| pc
[1] == JSOP_IFEQ
)
15258 CHECK_STATUS_A(checkTraceEnd(pc
+ 1));
15261 * We update the stack after the guard. This is safe since the guard bails
15262 * out at the comparison and the interpreter will therefore re-execute the
15263 * comparison. This way the value of the condition doesn't have to be
15264 * calculated and saved on the stack in most cases.
15267 return ARECORD_CONTINUE
;
15270 static JSBool FASTCALL
15271 HasInstanceOnTrace(JSContext
* cx
, JSObject
* ctor
, ValueArgType arg
)
15273 const Value
&argref
= ValueArgToConstRef(arg
);
15274 JSBool result
= JS_FALSE
;
15275 if (!HasInstance(cx
, ctor
, &argref
, &result
))
15276 SetBuiltinError(cx
);
15279 JS_DEFINE_CALLINFO_3(static, BOOL_FAIL
, HasInstanceOnTrace
, CONTEXT
, OBJECT
, VALUE
,
15280 0, ACCSET_STORE_ANY
)
15282 JS_REQUIRES_STACK AbortableRecordingStatus
15283 TraceRecorder::record_JSOP_INSTANCEOF()
15285 // If the rhs isn't an object, we are headed for a TypeError.
15286 Value
& ctor
= stackval(-1);
15287 if (ctor
.isPrimitive())
15288 RETURN_STOP_A("non-object on rhs of instanceof");
15290 Value
& val
= stackval(-2);
15291 LIns
* val_ins
= box_value_for_native_call(val
, get(&val
));
15293 enterDeepBailCall();
15294 LIns
* args
[] = {val_ins
, get(&ctor
), cx_ins
};
15295 stack(-2, w
.call(&HasInstanceOnTrace_ci
, args
));
15296 LIns
* status_ins
= w
.ldiStateField(builtinStatus
);
15297 pendingGuardCondition
= w
.eqi0(status_ins
);
15298 leaveDeepBailCall();
15300 return ARECORD_CONTINUE
;
15303 JS_REQUIRES_STACK AbortableRecordingStatus
15304 TraceRecorder::record_JSOP_DEBUGGER()
15306 return ARECORD_STOP
;
15309 JS_REQUIRES_STACK AbortableRecordingStatus
15310 TraceRecorder::record_JSOP_GOSUB()
15312 return ARECORD_STOP
;
15315 JS_REQUIRES_STACK AbortableRecordingStatus
15316 TraceRecorder::record_JSOP_RETSUB()
15318 return ARECORD_STOP
;
15321 JS_REQUIRES_STACK AbortableRecordingStatus
15322 TraceRecorder::record_JSOP_EXCEPTION()
15324 return ARECORD_STOP
;
15327 JS_REQUIRES_STACK AbortableRecordingStatus
15328 TraceRecorder::record_JSOP_LINENO()
15330 return ARECORD_CONTINUE
;
15333 JS_REQUIRES_STACK AbortableRecordingStatus
15334 TraceRecorder::record_JSOP_BLOCKCHAIN()
15336 return ARECORD_CONTINUE
;
15339 JS_REQUIRES_STACK AbortableRecordingStatus
15340 TraceRecorder::record_JSOP_NULLBLOCKCHAIN()
15342 return ARECORD_CONTINUE
;
15345 JS_REQUIRES_STACK AbortableRecordingStatus
15346 TraceRecorder::record_JSOP_CONDSWITCH()
15348 return ARECORD_CONTINUE
;
15351 JS_REQUIRES_STACK AbortableRecordingStatus
15352 TraceRecorder::record_JSOP_CASE()
15354 CHECK_STATUS_A(strictEquality(true, true));
15355 return ARECORD_CONTINUE
;
15358 JS_REQUIRES_STACK AbortableRecordingStatus
15359 TraceRecorder::record_JSOP_DEFAULT()
15361 return ARECORD_CONTINUE
;
15364 JS_REQUIRES_STACK AbortableRecordingStatus
15365 TraceRecorder::record_JSOP_EVAL()
15367 return ARECORD_STOP
;
15370 JS_REQUIRES_STACK AbortableRecordingStatus
15371 TraceRecorder::record_JSOP_ENUMELEM()
15374 * To quote from jsinterp.cpp's JSOP_ENUMELEM case:
15375 * Funky: the value to set is under the [obj, id] pair.
15377 return setElem(-2, -1, -3);
15380 JS_REQUIRES_STACK AbortableRecordingStatus
15381 TraceRecorder::record_JSOP_GETTER()
15383 return ARECORD_STOP
;
15386 JS_REQUIRES_STACK AbortableRecordingStatus
15387 TraceRecorder::record_JSOP_SETTER()
15389 return ARECORD_STOP
;
15392 JS_REQUIRES_STACK AbortableRecordingStatus
15393 TraceRecorder::record_JSOP_DEFFUN()
15395 return ARECORD_STOP
;
15398 JS_REQUIRES_STACK AbortableRecordingStatus
15399 TraceRecorder::record_JSOP_DEFFUN_FC()
15401 return ARECORD_STOP
;
15404 JS_REQUIRES_STACK AbortableRecordingStatus
15405 TraceRecorder::record_JSOP_DEFCONST()
15407 return ARECORD_STOP
;
15410 JS_REQUIRES_STACK AbortableRecordingStatus
15411 TraceRecorder::record_JSOP_DEFVAR()
15413 return ARECORD_STOP
;
15417 TraceRecorder::getFullIndex(ptrdiff_t pcoff
)
15419 jsatomid index
= GET_INDEX(cx
->regs
->pc
+ pcoff
);
15420 index
+= atoms
- cx
->fp()->script()->atomMap
.vector
;
15424 JS_REQUIRES_STACK AbortableRecordingStatus
15425 TraceRecorder::record_JSOP_LAMBDA()
15428 fun
= cx
->fp()->script()->getFunction(getFullIndex());
15430 if (FUN_NULL_CLOSURE(fun
) && FUN_OBJECT(fun
)->getParent() != globalObj
)
15431 RETURN_STOP_A("Null closure function object parent must be global object");
15434 * Emit code to clone a null closure parented by this recorder's global
15435 * object, in order to preserve function object evaluation rules observable
15436 * via identity and mutation. But don't clone if our result is consumed by
15437 * JSOP_SETMETHOD or JSOP_INITMETHOD, since we optimize away the clone for
15438 * these combinations and clone only if the "method value" escapes.
15440 * See jsinterp.cpp, the JSOP_LAMBDA null closure case. The JSOP_SETMETHOD and
15441 * JSOP_INITMETHOD logic governing the early ARECORD_CONTINUE returns below
15442 * must agree with the corresponding break-from-do-while(0) logic there.
15444 if (FUN_NULL_CLOSURE(fun
) && FUN_OBJECT(fun
)->getParent() == &cx
->fp()->scopeChain()) {
15445 jsbytecode
*pc2
= AdvanceOverBlockchainOp(cx
->regs
->pc
+ JSOP_LAMBDA_LENGTH
);
15446 JSOp op2
= JSOp(*pc2
);
15448 if (op2
== JSOP_INITMETHOD
) {
15449 stack(0, w
.immpObjGC(FUN_OBJECT(fun
)));
15450 return ARECORD_CONTINUE
;
15453 if (op2
== JSOP_SETMETHOD
) {
15454 Value lval
= stackval(-1);
15456 if (!lval
.isPrimitive() && lval
.toObject().canHaveMethodBarrier()) {
15457 stack(0, w
.immpObjGC(FUN_OBJECT(fun
)));
15458 return ARECORD_CONTINUE
;
15460 } else if (fun
->joinable()) {
15461 if (op2
== JSOP_CALL
) {
15463 * Array.prototype.sort and String.prototype.replace are
15464 * optimized as if they are special form. We know that they
15465 * won't leak the joined function object in obj, therefore
15466 * we don't need to clone that compiler- created function
15467 * object for identity/mutation reasons.
15469 int iargc
= GET_ARGC(pc2
);
15472 * Note that we have not yet pushed obj as the final argument,
15473 * so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)],
15474 * is the callee for this JSOP_CALL.
15476 const Value
&cref
= cx
->regs
->sp
[1 - (iargc
+ 2)];
15479 if (IsFunctionObject(cref
, &callee
)) {
15480 JSFunction
*calleeFun
= callee
->getFunctionPrivate();
15481 Native native
= calleeFun
->maybeNative();
15483 if ((iargc
== 1 && native
== array_sort
) ||
15484 (iargc
== 2 && native
== str_replace
)) {
15485 stack(0, w
.immpObjGC(FUN_OBJECT(fun
)));
15486 return ARECORD_CONTINUE
;
15489 } else if (op2
== JSOP_NULL
) {
15490 pc2
+= JSOP_NULL_LENGTH
;
15493 if (op2
== JSOP_CALL
&& GET_ARGC(pc2
) == 0) {
15494 stack(0, w
.immpObjGC(FUN_OBJECT(fun
)));
15495 return ARECORD_CONTINUE
;
15501 CHECK_STATUS_A(getClassPrototype(JSProto_Function
, proto_ins
));
15503 LIns
* args
[] = { w
.immpObjGC(globalObj
), proto_ins
, w
.immpFunGC(fun
), cx_ins
};
15504 LIns
* x
= w
.call(&js_NewNullClosure_ci
, args
);
15506 return ARECORD_CONTINUE
;
15509 if (GetBlockChainFast(cx
, cx
->fp(), JSOP_LAMBDA
, JSOP_LAMBDA_LENGTH
))
15510 RETURN_STOP_A("Unable to trace creating lambda in let");
15513 CHECK_STATUS_A(getClassPrototype(JSProto_Function
, proto_ins
));
15514 LIns
* scopeChain_ins
= scopeChain();
15515 JS_ASSERT(scopeChain_ins
);
15516 LIns
* args
[] = { proto_ins
, scopeChain_ins
, w
.nameImmpNonGC(fun
), cx_ins
};
15517 LIns
* call_ins
= w
.call(&js_CloneFunctionObject_ci
, args
);
15519 w
.name(w
.eqp0(call_ins
), "guard(js_CloneFunctionObject)"),
15521 stack(0, call_ins
);
15523 return ARECORD_CONTINUE
;
15526 JS_REQUIRES_STACK AbortableRecordingStatus
15527 TraceRecorder::record_JSOP_LAMBDA_FC()
15530 fun
= cx
->fp()->script()->getFunction(getFullIndex());
15532 if (FUN_OBJECT(fun
)->getParent() != globalObj
)
15533 return ARECORD_STOP
;
15535 if (GetBlockChainFast(cx
, cx
->fp(), JSOP_LAMBDA_FC
, JSOP_LAMBDA_FC_LENGTH
))
15536 RETURN_STOP_A("Unable to trace creating lambda in let");
15538 LIns
* args
[] = { scopeChain(), w
.immpFunGC(fun
), cx_ins
};
15539 LIns
* closure_ins
= w
.call(&js_AllocFlatClosure_ci
, args
);
15541 w
.name(w
.eqp(closure_ins
, w
.immpNull()), "guard(js_AllocFlatClosure)"),
15544 JSScript
*script
= fun
->script();
15545 if (script
->bindings
.hasUpvars()) {
15546 JSUpvarArray
*uva
= script
->upvars();
15547 LIns
* upvars_ins
= w
.getObjPrivatizedSlot(closure_ins
,
15548 JSObject::JSSLOT_FLAT_CLOSURE_UPVARS
);
15550 for (uint32 i
= 0, n
= uva
->length
; i
< n
; i
++) {
15552 LIns
* v_ins
= upvar(script
, uva
, i
, v
);
15554 return ARECORD_STOP
;
15556 box_value_into(v
, v_ins
, FCSlotsAddress(upvars_ins
, i
));
15560 stack(0, closure_ins
);
15561 return ARECORD_CONTINUE
;
15564 JS_REQUIRES_STACK AbortableRecordingStatus
15565 TraceRecorder::record_JSOP_CALLEE()
15567 stack(0, get(&cx
->fp()->calleeValue()));
15568 return ARECORD_CONTINUE
;
15571 JS_REQUIRES_STACK AbortableRecordingStatus
15572 TraceRecorder::record_JSOP_SETLOCALPOP()
15574 var(GET_SLOTNO(cx
->regs
->pc
), stack(-1));
15575 return ARECORD_CONTINUE
;
15578 JS_REQUIRES_STACK AbortableRecordingStatus
15579 TraceRecorder::record_JSOP_IFPRIMTOP()
15581 // Traces are type-specialized, including null vs. object, so we need do
15582 // nothing here. The upstream unbox_value called after valueOf or toString
15583 // from an imacro (e.g.) will fork the trace for us, allowing us to just
15584 // follow along mindlessly :-).
15585 return ARECORD_CONTINUE
;
15588 JS_REQUIRES_STACK AbortableRecordingStatus
15589 TraceRecorder::record_JSOP_SETCALL()
15591 return ARECORD_STOP
;
15594 JS_REQUIRES_STACK AbortableRecordingStatus
15595 TraceRecorder::record_JSOP_TRY()
15597 return ARECORD_CONTINUE
;
15600 JS_REQUIRES_STACK AbortableRecordingStatus
15601 TraceRecorder::record_JSOP_FINALLY()
15603 return ARECORD_CONTINUE
;
15606 JS_REQUIRES_STACK AbortableRecordingStatus
15607 TraceRecorder::record_JSOP_NOP()
15609 return ARECORD_CONTINUE
;
15612 JS_REQUIRES_STACK AbortableRecordingStatus
15613 TraceRecorder::record_JSOP_ARGSUB()
15615 JSStackFrame
* const fp
= cx
->fp();
15618 * The arguments object or its absence in the frame is part of the typemap,
15619 * so a record-time check suffices here. We don't bother tracing ARGSUB in
15620 * the case of an arguments object exising, because ARGSUB and to a lesser
15621 * extent ARGCNT are emitted to avoid arguments object creation.
15623 if (!fp
->hasArgsObj() && !fp
->fun()->isHeavyweight()) {
15624 uintN slot
= GET_ARGNO(cx
->regs
->pc
);
15625 if (slot
>= fp
->numActualArgs())
15626 RETURN_STOP_A("can't trace out-of-range arguments");
15628 stack(0, get(&cx
->fp()->canonicalActualArg(slot
)));
15629 return ARECORD_CONTINUE
;
15631 RETURN_STOP_A("can't trace JSOP_ARGSUB hard case");
15634 JS_REQUIRES_STACK LIns
*
15635 TraceRecorder::guardArgsLengthNotAssigned(LIns
* argsobj_ins
)
15637 // The following implements JSObject::isArgsLengthOverridden on trace.
15638 // ARGS_LENGTH_OVERRIDDEN_BIT is set if length was overridden.
15639 LIns
*len_ins
= w
.getArgsLength(argsobj_ins
);
15640 LIns
*ovr_ins
= w
.andi(len_ins
, w
.nameImmi(JSObject::ARGS_LENGTH_OVERRIDDEN_BIT
));
15641 guard(true, w
.eqi0(ovr_ins
), MISMATCH_EXIT
);
15645 JS_REQUIRES_STACK AbortableRecordingStatus
15646 TraceRecorder::record_JSOP_ARGCNT()
15648 JSStackFrame
* const fp
= cx
->fp();
15650 if (fp
->fun()->flags
& JSFUN_HEAVYWEIGHT
)
15651 RETURN_STOP_A("can't trace heavyweight JSOP_ARGCNT");
15653 // argc is fixed on trace, so ideally we would simply generate LIR for
15654 // constant argc. But the user can mutate arguments.length in the
15655 // interpreter, so we have to check for that in the trace entry frame.
15656 // We also have to check that arguments.length has not been mutated
15657 // at record time, because if so we will generate incorrect constant
15658 // LIR, which will assert in alu().
15659 if (fp
->hasArgsObj() && fp
->argsObj().isArgsLengthOverridden())
15660 RETURN_STOP_A("can't trace JSOP_ARGCNT if arguments.length has been modified");
15661 LIns
*a_ins
= getFrameObjPtr(fp
->addressOfArgs());
15662 if (callDepth
== 0) {
15663 if (MaybeBranch mbr
= w
.jt(w
.eqp0(a_ins
))) {
15664 guardArgsLengthNotAssigned(a_ins
);
15668 stack(0, w
.immd(fp
->numActualArgs()));
15669 return ARECORD_CONTINUE
;
15672 JS_REQUIRES_STACK AbortableRecordingStatus
15673 TraceRecorder::record_DefLocalFunSetSlot(uint32 slot
, JSObject
* obj
)
15675 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, obj
);
15677 if (FUN_NULL_CLOSURE(fun
) && FUN_OBJECT(fun
)->getParent() == globalObj
) {
15679 CHECK_STATUS_A(getClassPrototype(JSProto_Function
, proto_ins
));
15681 LIns
* args
[] = { w
.immpObjGC(globalObj
), proto_ins
, w
.immpFunGC(fun
), cx_ins
};
15682 LIns
* x
= w
.call(&js_NewNullClosure_ci
, args
);
15684 return ARECORD_CONTINUE
;
15687 return ARECORD_STOP
;
15690 JS_REQUIRES_STACK AbortableRecordingStatus
15691 TraceRecorder::record_JSOP_DEFLOCALFUN()
15693 return ARECORD_CONTINUE
;
15696 JS_REQUIRES_STACK AbortableRecordingStatus
15697 TraceRecorder::record_JSOP_DEFLOCALFUN_FC()
15699 return ARECORD_CONTINUE
;
15702 JS_REQUIRES_STACK AbortableRecordingStatus
15703 TraceRecorder::record_JSOP_GOTOX()
15705 return record_JSOP_GOTO();
15708 JS_REQUIRES_STACK AbortableRecordingStatus
15709 TraceRecorder::record_JSOP_IFEQX()
15711 return record_JSOP_IFEQ();
15714 JS_REQUIRES_STACK AbortableRecordingStatus
15715 TraceRecorder::record_JSOP_IFNEX()
15717 return record_JSOP_IFNE();
15720 JS_REQUIRES_STACK AbortableRecordingStatus
15721 TraceRecorder::record_JSOP_ORX()
15723 return record_JSOP_OR();
15726 JS_REQUIRES_STACK AbortableRecordingStatus
15727 TraceRecorder::record_JSOP_ANDX()
15729 return record_JSOP_AND();
15732 JS_REQUIRES_STACK AbortableRecordingStatus
15733 TraceRecorder::record_JSOP_GOSUBX()
15735 return record_JSOP_GOSUB();
15738 JS_REQUIRES_STACK AbortableRecordingStatus
15739 TraceRecorder::record_JSOP_CASEX()
15741 CHECK_STATUS_A(strictEquality(true, true));
15742 return ARECORD_CONTINUE
;
15745 JS_REQUIRES_STACK AbortableRecordingStatus
15746 TraceRecorder::record_JSOP_DEFAULTX()
15748 return ARECORD_CONTINUE
;
15751 JS_REQUIRES_STACK AbortableRecordingStatus
15752 TraceRecorder::record_JSOP_TABLESWITCHX()
15754 return record_JSOP_TABLESWITCH();
15757 JS_REQUIRES_STACK AbortableRecordingStatus
15758 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
15760 return InjectStatus(switchop());
15763 JS_REQUIRES_STACK AbortableRecordingStatus
15764 TraceRecorder::record_JSOP_BACKPATCH()
15766 return ARECORD_CONTINUE
;
15769 JS_REQUIRES_STACK AbortableRecordingStatus
15770 TraceRecorder::record_JSOP_BACKPATCH_POP()
15772 return ARECORD_CONTINUE
;
15775 JS_REQUIRES_STACK AbortableRecordingStatus
15776 TraceRecorder::record_JSOP_THROWING()
15778 return ARECORD_STOP
;
15781 JS_REQUIRES_STACK AbortableRecordingStatus
15782 TraceRecorder::record_JSOP_SETRVAL()
15784 // If we implement this, we need to update JSOP_STOP.
15785 return ARECORD_STOP
;
15788 JS_REQUIRES_STACK AbortableRecordingStatus
15789 TraceRecorder::record_JSOP_RETRVAL()
15791 return ARECORD_STOP
;
15794 JS_REQUIRES_STACK AbortableRecordingStatus
15795 TraceRecorder::record_JSOP_REGEXP()
15797 JSStackFrame
* const fp
= cx
->fp();
15798 JSScript
* script
= fp
->script();
15799 unsigned index
= atoms
- script
->atomMap
.vector
+ GET_INDEX(cx
->regs
->pc
);
15802 CHECK_STATUS_A(getClassPrototype(JSProto_RegExp
, proto_ins
));
15806 w
.immpObjGC(script
->getRegExp(index
)),
15809 LIns
* regex_ins
= w
.call(&js_CloneRegExpObject_ci
, args
);
15810 guard(false, w
.eqp0(regex_ins
), OOM_EXIT
);
15812 stack(0, regex_ins
);
15813 return ARECORD_CONTINUE
;
15816 // begin JS_HAS_XML_SUPPORT
15818 JS_REQUIRES_STACK AbortableRecordingStatus
15819 TraceRecorder::record_JSOP_DEFXMLNS()
15821 return ARECORD_STOP
;
15824 JS_REQUIRES_STACK AbortableRecordingStatus
15825 TraceRecorder::record_JSOP_ANYNAME()
15827 return ARECORD_STOP
;
15830 JS_REQUIRES_STACK AbortableRecordingStatus
15831 TraceRecorder::record_JSOP_QNAMEPART()
15833 return record_JSOP_STRING();
15836 JS_REQUIRES_STACK AbortableRecordingStatus
15837 TraceRecorder::record_JSOP_QNAMECONST()
15839 return ARECORD_STOP
;
15842 JS_REQUIRES_STACK AbortableRecordingStatus
15843 TraceRecorder::record_JSOP_QNAME()
15845 return ARECORD_STOP
;
15848 JS_REQUIRES_STACK AbortableRecordingStatus
15849 TraceRecorder::record_JSOP_TOATTRNAME()
15851 return ARECORD_STOP
;
15854 JS_REQUIRES_STACK AbortableRecordingStatus
15855 TraceRecorder::record_JSOP_TOATTRVAL()
15857 return ARECORD_STOP
;
15860 JS_REQUIRES_STACK AbortableRecordingStatus
15861 TraceRecorder::record_JSOP_ADDATTRNAME()
15863 return ARECORD_STOP
;
15866 JS_REQUIRES_STACK AbortableRecordingStatus
15867 TraceRecorder::record_JSOP_ADDATTRVAL()
15869 return ARECORD_STOP
;
15872 JS_REQUIRES_STACK AbortableRecordingStatus
15873 TraceRecorder::record_JSOP_BINDXMLNAME()
15875 return ARECORD_STOP
;
15878 JS_REQUIRES_STACK AbortableRecordingStatus
15879 TraceRecorder::record_JSOP_SETXMLNAME()
15881 return ARECORD_STOP
;
15884 JS_REQUIRES_STACK AbortableRecordingStatus
15885 TraceRecorder::record_JSOP_XMLNAME()
15887 return ARECORD_STOP
;
15890 JS_REQUIRES_STACK AbortableRecordingStatus
15891 TraceRecorder::record_JSOP_DESCENDANTS()
15893 return ARECORD_STOP
;
15896 JS_REQUIRES_STACK AbortableRecordingStatus
15897 TraceRecorder::record_JSOP_FILTER()
15899 return ARECORD_STOP
;
15902 JS_REQUIRES_STACK AbortableRecordingStatus
15903 TraceRecorder::record_JSOP_ENDFILTER()
15905 return ARECORD_STOP
;
15908 JS_REQUIRES_STACK AbortableRecordingStatus
15909 TraceRecorder::record_JSOP_TOXML()
15911 return ARECORD_STOP
;
15914 JS_REQUIRES_STACK AbortableRecordingStatus
15915 TraceRecorder::record_JSOP_TOXMLLIST()
15917 return ARECORD_STOP
;
15920 JS_REQUIRES_STACK AbortableRecordingStatus
15921 TraceRecorder::record_JSOP_XMLTAGEXPR()
15923 return ARECORD_STOP
;
15926 JS_REQUIRES_STACK AbortableRecordingStatus
15927 TraceRecorder::record_JSOP_XMLELTEXPR()
15929 return ARECORD_STOP
;
15932 JS_REQUIRES_STACK AbortableRecordingStatus
15933 TraceRecorder::record_JSOP_XMLCDATA()
15935 return ARECORD_STOP
;
15938 JS_REQUIRES_STACK AbortableRecordingStatus
15939 TraceRecorder::record_JSOP_XMLCOMMENT()
15941 return ARECORD_STOP
;
15944 JS_REQUIRES_STACK AbortableRecordingStatus
15945 TraceRecorder::record_JSOP_XMLPI()
15947 return ARECORD_STOP
;
15950 JS_REQUIRES_STACK AbortableRecordingStatus
15951 TraceRecorder::record_JSOP_GETFUNNS()
15953 return ARECORD_STOP
;
15956 JS_REQUIRES_STACK AbortableRecordingStatus
15957 TraceRecorder::record_JSOP_STARTXML()
15959 return ARECORD_STOP
;
15962 JS_REQUIRES_STACK AbortableRecordingStatus
15963 TraceRecorder::record_JSOP_STARTXMLEXPR()
15965 return ARECORD_STOP
;
15968 // end JS_HAS_XML_SUPPORT
15970 JS_REQUIRES_STACK AbortableRecordingStatus
15971 TraceRecorder::record_JSOP_CALLPROP()
15973 Value
& l
= stackval(-1);
15977 if (!l
.isPrimitive()) {
15978 obj
= &l
.toObject();
15980 this_ins
= obj_ins
; // |this| for subsequent call
15982 JSProtoKey protoKey
;
15983 debug_only_stmt(const char* protoname
= NULL
;)
15984 if (l
.isString()) {
15985 protoKey
= JSProto_String
;
15986 debug_only_stmt(protoname
= "String.prototype";)
15987 } else if (l
.isNumber()) {
15988 protoKey
= JSProto_Number
;
15989 debug_only_stmt(protoname
= "Number.prototype";)
15990 } else if (l
.isBoolean()) {
15991 protoKey
= JSProto_Boolean
;
15992 debug_only_stmt(protoname
= "Boolean.prototype";)
15994 JS_ASSERT(l
.isNull() || l
.isUndefined());
15995 RETURN_STOP_A("callprop on null or void");
15998 if (!js_GetClassPrototype(cx
, NULL
, protoKey
, &obj
))
15999 RETURN_ERROR_A("GetClassPrototype failed!");
16001 obj_ins
= w
.immpObjGC(obj
);
16002 debug_only_stmt(obj_ins
= w
.name(obj_ins
, protoname
);)
16003 this_ins
= get(&l
); // use primitive as |this|
16008 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
16010 if (pcval
.isNull())
16011 RETURN_STOP_A("callprop of missing method");
16013 if (pcval
.isFunObj()) {
16014 if (l
.isPrimitive()) {
16015 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, &pcval
.toFunObj());
16016 if (fun
->isInterpreted() && !fun
->inStrictMode())
16017 RETURN_STOP_A("callee does not accept primitive |this|");
16019 set(&l
, w
.immpObjGC(&pcval
.toFunObj()));
16021 if (l
.isPrimitive())
16022 RETURN_STOP_A("callprop of primitive method");
16023 JS_ASSERT_IF(pcval
.isShape(), !pcval
.toShape()->isMethod());
16024 CHECK_STATUS_A(propTail(obj
, obj_ins
, obj2
, pcval
, NULL
, NULL
, &l
));
16026 stack(0, this_ins
);
16027 return ARECORD_CONTINUE
;
16030 JS_REQUIRES_STACK AbortableRecordingStatus
16031 TraceRecorder::record_JSOP_DELDESC()
16033 return ARECORD_STOP
;
16036 JS_REQUIRES_STACK AbortableRecordingStatus
16037 TraceRecorder::record_JSOP_UINT24()
16039 stack(0, w
.immd(GET_UINT24(cx
->regs
->pc
)));
16040 return ARECORD_CONTINUE
;
16043 JS_REQUIRES_STACK AbortableRecordingStatus
16044 TraceRecorder::record_JSOP_INDEXBASE()
16046 atoms
+= GET_INDEXBASE(cx
->regs
->pc
);
16047 return ARECORD_CONTINUE
;
16050 JS_REQUIRES_STACK AbortableRecordingStatus
16051 TraceRecorder::record_JSOP_RESETBASE()
16054 return ARECORD_CONTINUE
;
16057 JS_REQUIRES_STACK AbortableRecordingStatus
16058 TraceRecorder::record_JSOP_RESETBASE0()
16061 return ARECORD_CONTINUE
;
16064 JS_REQUIRES_STACK AbortableRecordingStatus
16065 TraceRecorder::record_JSOP_CALLELEM()
16067 return record_JSOP_GETELEM();
16070 JS_REQUIRES_STACK AbortableRecordingStatus
16071 TraceRecorder::record_JSOP_STOP()
16073 JSStackFrame
*fp
= cx
->fp();
16075 /* A return from callDepth 0 terminates the current loop, except for recursion. */
16076 if (callDepth
== 0 && !fp
->hasImacropc()) {
16077 AUDIT(returnLoopExits
);
16081 if (fp
->hasImacropc()) {
16083 * End of imacro, so return true to the interpreter immediately. The
16084 * interpreter's JSOP_STOP case will return from the imacro, back to
16085 * the pc after the calling op, still in the same JSStackFrame.
16087 updateAtoms(fp
->script());
16088 return ARECORD_CONTINUE
;
16091 putActivationObjects();
16093 if (Probes::callTrackingActive(cx
)) {
16094 LIns
* args
[] = { w
.immi(0), w
.nameImmpNonGC(cx
->fp()->fun()), cx_ins
};
16095 LIns
* call_ins
= w
.call(&functionProbe_ci
, args
);
16096 guard(false, w
.eqi0(call_ins
), MISMATCH_EXIT
);
16100 * We know falling off the end of a constructor returns the new object that
16101 * was passed in via fp->argv[-1], while falling off the end of a function
16102 * returns undefined.
16104 * NB: we do not support script rval (eval, API users who want the result
16105 * of the last expression-statement, debugger API calls).
16107 if (fp
->isConstructing()) {
16108 rval_ins
= get(&fp
->thisValue());
16110 rval_ins
= w
.immiUndefined();
16112 clearReturningFrameFromNativeTracker();
16113 return ARECORD_CONTINUE
;
16116 JS_REQUIRES_STACK AbortableRecordingStatus
16117 TraceRecorder::record_JSOP_GETXPROP()
16119 Value
& l
= stackval(-1);
16120 if (l
.isPrimitive())
16121 RETURN_STOP_A("primitive-this for GETXPROP?");
16126 CHECK_STATUS_A(name(vp
, v_ins
, nr
));
16128 return ARECORD_CONTINUE
;
16131 JS_REQUIRES_STACK AbortableRecordingStatus
16132 TraceRecorder::record_JSOP_CALLXMLNAME()
16134 return ARECORD_STOP
;
16137 JS_REQUIRES_STACK AbortableRecordingStatus
16138 TraceRecorder::record_JSOP_TYPEOFEXPR()
16140 return record_JSOP_TYPEOF();
16143 JS_REQUIRES_STACK AbortableRecordingStatus
16144 TraceRecorder::record_JSOP_ENTERBLOCK()
16147 obj
= cx
->fp()->script()->getObject(getFullIndex(0));
16149 LIns
* void_ins
= w
.immiUndefined();
16150 for (int i
= 0, n
= OBJ_BLOCK_COUNT(cx
, obj
); i
< n
; i
++)
16151 stack(i
, void_ins
);
16152 return ARECORD_CONTINUE
;
16155 JS_REQUIRES_STACK AbortableRecordingStatus
16156 TraceRecorder::record_JSOP_LEAVEBLOCK()
16158 return ARECORD_CONTINUE
;
16161 JS_REQUIRES_STACK AbortableRecordingStatus
16162 TraceRecorder::record_JSOP_GENERATOR()
16164 return ARECORD_STOP
;
16167 JS_REQUIRES_STACK AbortableRecordingStatus
16168 TraceRecorder::record_JSOP_YIELD()
16170 return ARECORD_STOP
;
16173 JS_REQUIRES_STACK AbortableRecordingStatus
16174 TraceRecorder::record_JSOP_ARRAYPUSH()
16176 uint32_t slot
= GET_UINT16(cx
->regs
->pc
);
16177 JS_ASSERT(cx
->fp()->numFixed() <= slot
);
16178 JS_ASSERT(cx
->fp()->slots() + slot
< cx
->regs
->sp
- 1);
16179 Value
&arrayval
= cx
->fp()->slots()[slot
];
16180 JS_ASSERT(arrayval
.isObject());
16181 JS_ASSERT(arrayval
.toObject().isDenseArray());
16182 LIns
*array_ins
= get(&arrayval
);
16183 Value
&elt
= stackval(-1);
16184 LIns
*elt_ins
= box_value_for_native_call(elt
, get(&elt
));
16186 enterDeepBailCall();
16188 LIns
*args
[] = { elt_ins
, array_ins
, cx_ins
};
16189 pendingGuardCondition
= w
.call(&js_ArrayCompPush_tn_ci
, args
);
16191 leaveDeepBailCall();
16192 return ARECORD_CONTINUE
;
16195 JS_REQUIRES_STACK AbortableRecordingStatus
16196 TraceRecorder::record_JSOP_ENUMCONSTELEM()
16198 return ARECORD_STOP
;
16201 JS_REQUIRES_STACK AbortableRecordingStatus
16202 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
16204 LIns
* v_ins
= stack(-1);
16205 int n
= -1 - GET_UINT16(cx
->regs
->pc
);
16207 return ARECORD_CONTINUE
;
16210 JS_REQUIRES_STACK AbortableRecordingStatus
16211 TraceRecorder::record_JSOP_GETTHISPROP()
16215 CHECK_STATUS_A(getThis(this_ins
));
16218 * It's safe to just use cx->fp->thisValue() here because getThis() returns
16219 * ARECORD_STOP or ARECORD_ERROR if thisv is not available.
16221 const Value
&thisv
= cx
->fp()->thisValue();
16222 if (!thisv
.isObject())
16223 RETURN_STOP_A("primitive this for GETTHISPROP");
16225 CHECK_STATUS_A(getProp(&thisv
.toObject(), this_ins
));
16226 return ARECORD_CONTINUE
;
16229 JS_REQUIRES_STACK AbortableRecordingStatus
16230 TraceRecorder::record_JSOP_GETARGPROP()
16232 return getProp(argval(GET_ARGNO(cx
->regs
->pc
)));
16235 JS_REQUIRES_STACK AbortableRecordingStatus
16236 TraceRecorder::record_JSOP_GETLOCALPROP()
16238 return getProp(varval(GET_SLOTNO(cx
->regs
->pc
)));
16241 JS_REQUIRES_STACK AbortableRecordingStatus
16242 TraceRecorder::record_JSOP_INDEXBASE1()
16245 return ARECORD_CONTINUE
;
16248 JS_REQUIRES_STACK AbortableRecordingStatus
16249 TraceRecorder::record_JSOP_INDEXBASE2()
16252 return ARECORD_CONTINUE
;
16255 JS_REQUIRES_STACK AbortableRecordingStatus
16256 TraceRecorder::record_JSOP_INDEXBASE3()
16259 return ARECORD_CONTINUE
;
16262 JS_REQUIRES_STACK AbortableRecordingStatus
16263 TraceRecorder::record_JSOP_CALLLOCAL()
16265 uintN slot
= GET_SLOTNO(cx
->regs
->pc
);
16266 stack(0, var(slot
));
16267 stack(1, w
.immiUndefined());
16268 return ARECORD_CONTINUE
;
16271 JS_REQUIRES_STACK AbortableRecordingStatus
16272 TraceRecorder::record_JSOP_CALLARG()
16274 uintN slot
= GET_ARGNO(cx
->regs
->pc
);
16275 stack(0, arg(slot
));
16276 stack(1, w
.immiUndefined());
16277 return ARECORD_CONTINUE
;
16280 JS_REQUIRES_STACK AbortableRecordingStatus
16281 TraceRecorder::record_JSOP_BINDGNAME()
16283 stack(0, w
.immpObjGC(globalObj
));
16284 return ARECORD_CONTINUE
;
16287 JS_REQUIRES_STACK AbortableRecordingStatus
16288 TraceRecorder::record_JSOP_INT8()
16290 stack(0, w
.immd(GET_INT8(cx
->regs
->pc
)));
16291 return ARECORD_CONTINUE
;
16294 JS_REQUIRES_STACK AbortableRecordingStatus
16295 TraceRecorder::record_JSOP_INT32()
16297 stack(0, w
.immd(GET_INT32(cx
->regs
->pc
)));
16298 return ARECORD_CONTINUE
;
16301 JS_REQUIRES_STACK AbortableRecordingStatus
16302 TraceRecorder::record_JSOP_LENGTH()
16304 Value
& l
= stackval(-1);
16305 if (l
.isPrimitive()) {
16307 RETURN_STOP_A("non-string primitive JSOP_LENGTH unsupported");
16308 set(&l
, w
.i2d(w
.p2i(w
.getStringLength(get(&l
)))));
16309 return ARECORD_CONTINUE
;
16312 JSObject
* obj
= &l
.toObject();
16313 LIns
* obj_ins
= get(&l
);
16315 if (obj
->isArguments()) {
16317 JSStackFrame
*afp
= guardArguments(obj
, obj_ins
, &depth
);
16319 RETURN_STOP_A("can't reach arguments object's frame");
16321 // We must both check at record time and guard at run time that
16322 // arguments.length has not been reassigned, redefined or deleted.
16323 if (obj
->isArgsLengthOverridden())
16324 RETURN_STOP_A("can't trace JSOP_ARGCNT if arguments.length has been modified");
16325 LIns
* slot_ins
= guardArgsLengthNotAssigned(obj_ins
);
16327 // slot_ins is the value from the slot; right-shift to get the length
16328 // (see JSObject::getArgsInitialLength in jsfun.cpp).
16329 LIns
* v_ins
= w
.i2d(w
.rshiN(slot_ins
, JSObject::ARGS_PACKED_BITS_COUNT
));
16331 return ARECORD_CONTINUE
;
16335 if (obj
->isArray()) {
16336 if (obj
->isDenseArray()) {
16337 guardDenseArray(obj_ins
, BRANCH_EXIT
);
16339 JS_ASSERT(obj
->isSlowArray());
16340 guardClass(obj_ins
, &js_SlowArrayClass
, snapshot(BRANCH_EXIT
), LOAD_NORMAL
);
16342 v_ins
= w
.lduiObjPrivate(obj_ins
);
16343 if (obj
->getArrayLength() <= JSVAL_INT_MAX
) {
16344 guard(true, w
.leui(v_ins
, w
.immi(JSVAL_INT_MAX
)), BRANCH_EXIT
);
16345 v_ins
= w
.i2d(v_ins
);
16347 v_ins
= w
.ui2d(v_ins
);
16349 } else if (OkToTraceTypedArrays
&& js_IsTypedArray(obj
)) {
16350 // Ensure array is a typed array and is the same type as what was written
16351 guardClass(obj_ins
, obj
->getClass(), snapshot(BRANCH_EXIT
), LOAD_NORMAL
);
16352 v_ins
= w
.i2d(w
.ldiConstTypedArrayLength(w
.ldpObjPrivate(obj_ins
)));
16354 if (!obj
->isNative())
16355 RETURN_STOP_A("can't trace length property access on non-array, non-native object");
16356 return getProp(obj
, obj_ins
);
16359 return ARECORD_CONTINUE
;
16362 JS_REQUIRES_STACK AbortableRecordingStatus
16363 TraceRecorder::record_JSOP_HOLE()
16365 stack(0, w
.immpMagicWhy(JS_ARRAY_HOLE
));
16366 return ARECORD_CONTINUE
;
16369 AbortableRecordingStatus
16370 TraceRecorder::record_JSOP_TRACE()
16372 return ARECORD_CONTINUE
;
16375 AbortableRecordingStatus
16376 TraceRecorder::record_JSOP_NOTRACE()
16378 return ARECORD_CONTINUE
;
16382 js_Unbrand(JSContext
*cx
, JSObject
*obj
)
16384 return obj
->unbrand(cx
);
16387 JS_DEFINE_CALLINFO_2(extern, BOOL
, js_Unbrand
, CONTEXT
, OBJECT
, 0, ACCSET_STORE_ANY
)
16389 JS_REQUIRES_STACK AbortableRecordingStatus
16390 TraceRecorder::record_JSOP_UNBRAND()
16392 LIns
* args_ins
[] = { stack(-1), cx_ins
};
16393 LIns
* call_ins
= w
.call(&js_Unbrand_ci
, args_ins
);
16394 guard(false, w
.eqi0(call_ins
), OOM_EXIT
);
16395 return ARECORD_CONTINUE
;
16398 JS_REQUIRES_STACK AbortableRecordingStatus
16399 TraceRecorder::record_JSOP_UNBRANDTHIS()
16401 /* In case of primitive this, do nothing. */
16402 JSStackFrame
*fp
= cx
->fp();
16403 if (fp
->fun()->inStrictMode() && !fp
->thisValue().isObject())
16404 return ARECORD_CONTINUE
;
16407 RecordingStatus status
= getThis(this_ins
);
16408 if (status
!= RECORD_CONTINUE
)
16409 return InjectStatus(status
);
16411 LIns
* args_ins
[] = { this_ins
, cx_ins
};
16412 LIns
* call_ins
= w
.call(&js_Unbrand_ci
, args_ins
);
16413 guard(false, w
.eqi0(call_ins
), OOM_EXIT
);
16414 return ARECORD_CONTINUE
;
16417 JS_REQUIRES_STACK AbortableRecordingStatus
16418 TraceRecorder::record_JSOP_SHARPINIT()
16420 return ARECORD_STOP
;
16423 JS_REQUIRES_STACK AbortableRecordingStatus
16424 TraceRecorder::record_JSOP_GETGLOBAL()
16426 uint32 slot
= cx
->fp()->script()->getGlobalSlot(GET_SLOTNO(cx
->regs
->pc
));
16427 if (!lazilyImportGlobalSlot(slot
))
16428 RETURN_STOP_A("lazy import of global slot failed");
16430 stack(0, get(&globalObj
->getSlotRef(slot
)));
16431 return ARECORD_CONTINUE
;
16434 JS_REQUIRES_STACK AbortableRecordingStatus
16435 TraceRecorder::record_JSOP_CALLGLOBAL()
16437 uint32 slot
= cx
->fp()->script()->getGlobalSlot(GET_SLOTNO(cx
->regs
->pc
));
16438 if (!lazilyImportGlobalSlot(slot
))
16439 RETURN_STOP_A("lazy import of global slot failed");
16441 Value
&v
= globalObj
->getSlotRef(slot
);
16443 stack(1, w
.immiUndefined());
16444 return ARECORD_CONTINUE
;
16447 JS_REQUIRES_STACK AbortableRecordingStatus
16448 TraceRecorder::record_JSOP_GETGNAME()
16450 return record_JSOP_NAME();
16453 JS_REQUIRES_STACK AbortableRecordingStatus
16454 TraceRecorder::record_JSOP_SETGNAME()
16456 return record_JSOP_SETNAME();
16459 JS_REQUIRES_STACK AbortableRecordingStatus
16460 TraceRecorder::record_JSOP_GNAMEDEC()
16462 return record_JSOP_NAMEDEC();
16465 JS_REQUIRES_STACK AbortableRecordingStatus
16466 TraceRecorder::record_JSOP_GNAMEINC()
16468 return record_JSOP_NAMEINC();
16471 JS_REQUIRES_STACK AbortableRecordingStatus
16472 TraceRecorder::record_JSOP_DECGNAME()
16474 return record_JSOP_DECNAME();
16477 JS_REQUIRES_STACK AbortableRecordingStatus
16478 TraceRecorder::record_JSOP_INCGNAME()
16480 return record_JSOP_INCNAME();
16483 JS_REQUIRES_STACK AbortableRecordingStatus
16484 TraceRecorder::record_JSOP_CALLGNAME()
16486 return record_JSOP_CALLNAME();
16489 #define DBG_STUB(OP) \
16490 JS_REQUIRES_STACK AbortableRecordingStatus \
16491 TraceRecorder::record_##OP() \
16493 RETURN_STOP_A("can't trace " #OP); \
16496 DBG_STUB(JSOP_GETUPVAR_DBG
)
16497 DBG_STUB(JSOP_CALLUPVAR_DBG
)
16498 DBG_STUB(JSOP_DEFFUN_DBGFC
)
16499 DBG_STUB(JSOP_DEFLOCALFUN_DBGFC
)
16500 DBG_STUB(JSOP_LAMBDA_DBGFC
)
16504 * Print information about entry typemaps and unstable exits for all peers
16508 DumpPeerStability(TraceMonitor
* tm
, const void* ip
, JSObject
* globalObj
, uint32 globalShape
,
16512 bool looped
= false;
16513 unsigned length
= 0;
16515 for (f
= LookupLoop(tm
, ip
, globalObj
, globalShape
, argc
); f
!= NULL
; f
= f
->peer
) {
16518 debug_only_printf(LC_TMRecorder
, "Stability of fragment %p:\nENTRY STACK=", (void*)f
);
16520 JS_ASSERT(f
->nStackTypes
== length
);
16521 for (unsigned i
= 0; i
< f
->nStackTypes
; i
++)
16522 debug_only_printf(LC_TMRecorder
, "%c", TypeToChar(f
->stackTypeMap()[i
]));
16523 debug_only_print0(LC_TMRecorder
, " GLOBALS=");
16524 for (unsigned i
= 0; i
< f
->nGlobalTypes(); i
++)
16525 debug_only_printf(LC_TMRecorder
, "%c", TypeToChar(f
->globalTypeMap()[i
]));
16526 debug_only_print0(LC_TMRecorder
, "\n");
16527 UnstableExit
* uexit
= f
->unstableExits
;
16528 while (uexit
!= NULL
) {
16529 debug_only_print0(LC_TMRecorder
, "EXIT ");
16530 JSValueType
* m
= uexit
->exit
->fullTypeMap();
16531 debug_only_print0(LC_TMRecorder
, "STACK=");
16532 for (unsigned i
= 0; i
< uexit
->exit
->numStackSlots
; i
++)
16533 debug_only_printf(LC_TMRecorder
, "%c", TypeToChar(m
[i
]));
16534 debug_only_print0(LC_TMRecorder
, " GLOBALS=");
16535 for (unsigned i
= 0; i
< uexit
->exit
->numGlobalSlots
; i
++) {
16536 debug_only_printf(LC_TMRecorder
, "%c",
16537 TypeToChar(m
[uexit
->exit
->numStackSlots
+ i
]));
16539 debug_only_print0(LC_TMRecorder
, "\n");
16540 uexit
= uexit
->next
;
16542 length
= f
->nStackTypes
;
16548 #ifdef MOZ_TRACEVIS
16550 FILE* traceVisLogFile
= NULL
;
16551 JSHashTable
*traceVisScriptTable
= NULL
;
16553 JS_FRIEND_API(bool)
16554 StartTraceVis(const char* filename
= "tracevis.dat")
16556 if (traceVisLogFile
) {
16557 // If we're currently recording, first we must stop.
16561 traceVisLogFile
= fopen(filename
, "wb");
16562 if (!traceVisLogFile
)
16568 JS_FRIEND_API(JSBool
)
16569 StartTraceVisNative(JSContext
*cx
, uintN argc
, jsval
*vp
)
16573 if (argc
> 0 && JSVAL_IS_STRING(JS_ARGV(cx
, vp
)[0])) {
16574 JSString
*str
= JSVAL_TO_STRING(JS_ARGV(cx
, vp
)[0]);
16575 char *filename
= js_DeflateString(cx
, str
->chars(), str
->length());
16578 ok
= StartTraceVis(filename
);
16579 cx
->free(filename
);
16581 ok
= StartTraceVis();
16585 fprintf(stderr
, "started TraceVis recording\n");
16586 JS_SET_RVAL(cx
, vp
, JSVAL_VOID
);
16591 JS_ReportError(cx
, "failed to start TraceVis recording");
16595 JS_FRIEND_API(bool)
16598 if (!traceVisLogFile
)
16601 fclose(traceVisLogFile
); // not worth checking the result
16602 traceVisLogFile
= NULL
;
16607 JS_FRIEND_API(JSBool
)
16608 StopTraceVisNative(JSContext
*cx
, uintN argc
, jsval
*vp
)
16610 JSBool ok
= StopTraceVis();
16613 fprintf(stderr
, "stopped TraceVis recording\n");
16614 JS_SET_RVAL(cx
, vp
, JSVAL_VOID
);
16616 JS_ReportError(cx
, "TraceVis isn't running");
16622 #endif /* MOZ_TRACEVIS */
16624 JS_REQUIRES_STACK
void
16625 TraceRecorder::captureStackTypes(unsigned callDepth
, JSValueType
* typeMap
)
16627 CaptureTypesVisitor
capVisitor(cx
, typeMap
, !!oracle
);
16628 VisitStackSlots(capVisitor
, cx
, callDepth
);
16631 JS_REQUIRES_STACK
void
16632 TraceRecorder::determineGlobalTypes(JSValueType
* typeMap
)
16634 DetermineTypesVisitor
detVisitor(*this, typeMap
);
16635 VisitGlobalSlots(detVisitor
, cx
, *tree
->globalSlots
);
16638 #ifdef JS_METHODJIT
16640 class AutoRetBlacklist
16646 AutoRetBlacklist(jsbytecode
* pc
, bool* blacklist
)
16647 : pc(pc
), blacklist(blacklist
)
16650 ~AutoRetBlacklist()
16652 *blacklist
= IsBlacklisted(pc
);
16656 JS_REQUIRES_STACK TracePointAction
16657 RecordTracePoint(JSContext
* cx
, uintN
& inlineCallCount
, bool* blacklist
, bool execAllowed
)
16659 JSStackFrame
* fp
= cx
->fp();
16660 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
16661 jsbytecode
* pc
= cx
->regs
->pc
;
16663 JS_ASSERT(!TRACE_RECORDER(cx
));
16664 JS_ASSERT(!tm
->profile
);
16666 JSObject
* globalObj
= cx
->fp()->scopeChain().getGlobal();
16667 uint32 globalShape
= -1;
16668 SlotList
* globalSlots
= NULL
;
16670 AutoRetBlacklist
autoRetBlacklist(pc
, blacklist
);
16672 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
, &globalShape
, &globalSlots
)) {
16674 return TPA_Nothing
;
16677 uint32 argc
= entryFrameArgc(cx
);
16678 TreeFragment
* tree
= LookupOrAddLoop(tm
, pc
, globalObj
, globalShape
, argc
);
16680 debug_only_printf(LC_TMTracer
,
16681 "Looking for compat peer %d@%d, from %p (ip: %p)\n",
16682 js_FramePCToLineNumber(cx
, cx
->fp()),
16683 FramePCOffset(cx
, cx
->fp()), (void*)tree
, tree
->ip
);
16685 if (tree
->code() || tree
->peer
) {
16687 TreeFragment
* match
= FindVMCompatiblePeer(cx
, globalObj
, tree
, count
);
16689 VMSideExit
* lr
= NULL
;
16690 VMSideExit
* innermostNestedGuard
= NULL
;
16692 if (!execAllowed
) {
16693 /* We've already compiled a trace for it, but we don't want to use that trace. */
16694 Blacklist((jsbytecode
*)tree
->root
->ip
);
16695 return TPA_Nothing
;
16698 /* Best case - just go and execute. */
16699 if (!ExecuteTree(cx
, match
, inlineCallCount
, &innermostNestedGuard
, &lr
))
16703 return TPA_Nothing
;
16705 switch (lr
->exitType
) {
16706 case UNSTABLE_LOOP_EXIT
:
16707 if (!AttemptToStabilizeTree(cx
, globalObj
, lr
, NULL
, NULL
, 0))
16708 return TPA_RanStuff
;
16711 case MUL_ZERO_EXIT
:
16712 case OVERFLOW_EXIT
:
16713 if (lr
->exitType
== MUL_ZERO_EXIT
)
16714 tm
->oracle
->markInstructionSlowZeroTest(cx
->regs
->pc
);
16716 tm
->oracle
->markInstructionUndemotable(cx
->regs
->pc
);
16720 if (!AttemptToExtendTree(cx
, lr
, NULL
, NULL
, NULL
))
16721 return TPA_RanStuff
;
16725 if (!innermostNestedGuard
)
16726 return TPA_RanStuff
;
16727 if (!AttemptToExtendTree(cx
, innermostNestedGuard
, lr
, NULL
, NULL
))
16728 return TPA_RanStuff
;
16732 return TPA_RanStuff
;
16735 JS_ASSERT(TRACE_RECORDER(cx
));
16740 if (count
>= MAXPEERS
) {
16741 debug_only_print0(LC_TMTracer
, "Blacklisted: too many peer trees.\n");
16742 Blacklist((jsbytecode
*)tree
->root
->ip
);
16743 return TPA_Nothing
;
16747 if (++tree
->hits() < HOTLOOP
)
16748 return TPA_Nothing
;
16749 if (!ScopeChainCheck(cx
, tree
))
16750 return TPA_Nothing
;
16751 if (!RecordTree(cx
, tree
->first
, NULL
, NULL
, 0, globalSlots
))
16752 return TPA_Nothing
;
16755 JS_ASSERT(TRACE_RECORDER(cx
));
16757 /* Locked and loaded with a recorder. Ask the interperter to go run some code. */
16758 if (!Interpret(cx
, fp
, inlineCallCount
, JSINTERP_RECORD
))
16761 JS_ASSERT(!cx
->isExceptionPending());
16763 return TPA_RanStuff
;
16766 LoopProfile::LoopProfile(JSStackFrame
*entryfp
, jsbytecode
*top
, jsbytecode
*bottom
)
16767 : entryScript(entryfp
->script()),
16773 unprofitable(false)
16779 LoopProfile::reset()
16785 numSelfOpsMult
= 0;
16786 branchMultiplier
= 1;
16788 maybeShortLoop
= false;
16790 loopStackDepth
= 0;
16793 PodArrayZero(allOps
);
16794 PodArrayZero(selfOps
);
16798 LoopProfile::profileLoopEdge(JSContext
* cx
, uintN
& inlineCallCount
)
16800 if (cx
->regs
->pc
== top
) {
16801 debug_only_print0(LC_TMProfiler
, "Profiling complete (edge)\n");
16804 /* Record an inner loop invocation. */
16805 JSStackFrame
*fp
= cx
->fp();
16806 jsbytecode
*pc
= cx
->regs
->pc
;
16807 bool found
= false;
16809 /* We started with the most deeply nested one first, since it gets hit most often.*/
16810 for (int i
= int(numInnerLoops
)-1; i
>= 0; i
--) {
16811 if (innerLoops
[i
].entryfp
== fp
&& innerLoops
[i
].top
== pc
) {
16812 innerLoops
[i
].iters
++;
16818 if (!found
&& numInnerLoops
< PROFILE_MAX_INNER_LOOPS
)
16819 innerLoops
[numInnerLoops
++] = InnerLoop(fp
, pc
, NULL
);
16822 return MONITOR_NOT_RECORDING
;
16826 static const uintN PROFILE_HOTLOOP
= 61;
16827 static const uintN MAX_PROFILE_OPS
= 4096;
16829 static jsbytecode
*
16830 GetLoopBottom(JSContext
*cx
)
16832 return GetLoopBottom(cx
, cx
->regs
->pc
);
16835 static LoopProfile
*
16836 LookupOrAddProfile(JSContext
*cx
, TraceMonitor
*tm
, void** traceData
, uintN
*traceEpoch
)
16841 * We try to keep a pointer to the loop profile inside the TRACE IC.
16842 * We also keep a pointer inside a hashtable for when we need to
16843 * look up nested loops (or when ICs are disabled).
16845 * Memory for the profile is allocated in the dataAlloc for the
16846 * trace monitor. Since this thing can get flushed periodically,
16847 * we use epochs to decide if the profile in the MIC is valid, as
16848 * follows. Every time the trace monitor is flushed,
16849 * |tm->flushEpoch| is incremented. When storing the profile in
16850 * the IC, we store the current |tm->flushEpoch| along with it.
16851 * Before pulling a profile out of the IC, we check that its
16852 * stored epoch is still up-to-date with |tm->flushEpoch|.
16853 * This ensures that no flush has happened in between.
16857 if (*traceData
&& *traceEpoch
== tm
->flushEpoch
) {
16858 prof
= (LoopProfile
*)*traceData
;
16860 jsbytecode
* pc
= cx
->regs
->pc
;
16861 jsbytecode
* bottom
= GetLoopBottom(cx
);
16864 prof
= new (*tm
->dataAlloc
) LoopProfile(cx
->fp(), pc
, bottom
);
16866 *traceEpoch
= tm
->flushEpoch
;
16867 tm
->loopProfiles
->put(pc
, prof
);
16870 LoopProfileMap
&table
= *tm
->loopProfiles
;
16871 jsbytecode
* pc
= cx
->regs
->pc
;
16872 if (LoopProfileMap::AddPtr p
= table
.lookupForAdd(pc
)) {
16875 jsbytecode
* bottom
= GetLoopBottom(cx
);
16878 prof
= new (*tm
->dataAlloc
) LoopProfile(cx
->fp(), pc
, bottom
);
16879 table
.add(p
, pc
, prof
);
16886 static LoopProfile
*
16887 LookupLoopProfile(JSContext
*cx
, jsbytecode
*pc
)
16889 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
16890 LoopProfileMap
&table
= *tm
->loopProfiles
;
16891 if (LoopProfileMap::Ptr p
= table
.lookup(pc
)) {
16892 JS_ASSERT(p
->value
->top
== pc
);
16898 JS_REQUIRES_STACK TracePointAction
16899 MonitorTracePoint(JSContext
*cx
, uintN
& inlineCallCount
, bool* blacklist
,
16900 void** traceData
, uintN
*traceEpoch
, uint32
*loopCounter
, uint32 hits
)
16902 if (!cx
->profilingEnabled
)
16903 return RecordTracePoint(cx
, inlineCallCount
, blacklist
, true);
16905 *blacklist
= false;
16907 TraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
16909 * We may have re-entered Interpret while profiling. We don't profile
16910 * the nested invocation.
16913 return TPA_Nothing
;
16915 jsbytecode
* pc
= cx
->regs
->pc
;
16916 LoopProfile
*prof
= LookupOrAddProfile(cx
, tm
, traceData
, traceEpoch
);
16919 return TPA_Nothing
;
16922 prof
->hits
+= hits
;
16923 if (prof
->hits
< PROFILE_HOTLOOP
)
16924 return TPA_Nothing
;
16926 AutoRetBlacklist
autoRetBlacklist(cx
->regs
->pc
, blacklist
);
16928 if (prof
->profiled
) {
16929 if (prof
->traceOK
) {
16930 return RecordTracePoint(cx
, inlineCallCount
, blacklist
, prof
->execOK
);
16932 return TPA_Nothing
;
16936 debug_only_printf(LC_TMProfiler
, "Profiling at line %d\n",
16937 js_FramePCToLineNumber(cx
, cx
->fp()));
16939 tm
->profile
= prof
;
16941 if (!Interpret(cx
, cx
->fp(), inlineCallCount
, JSINTERP_PROFILE
))
16944 JS_ASSERT(!cx
->isExceptionPending());
16946 /* Look it up again since a reset may have happened during Interpret. */
16947 prof
= LookupLoopProfile(cx
, pc
);
16948 if (prof
&& prof
->undecided
) {
16949 *loopCounter
= 3000;
16953 return TPA_RanStuff
;
16957 * Returns true if pc is within the given loop.
16958 * If we're in a different script, then we must have come from
16959 * a call instruction within the loop (since we check if we're within
16960 * the loop before each instruction) so we're still in the loop.
16964 PCWithinLoop(JSStackFrame
*fp
, jsbytecode
*pc
, T
& loop
)
16966 return fp
> loop
.entryfp
|| (fp
== loop
.entryfp
&& pc
>= loop
.top
&& pc
<= loop
.bottom
);
16969 LoopProfile::ProfileAction
16970 LoopProfile::profileOperation(JSContext
* cx
, JSOp op
)
16972 TraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
16975 tm
->profile
= NULL
;
16976 return ProfComplete
;
16979 jsbytecode
*pc
= cx
->regs
->pc
;
16980 JSStackFrame
*fp
= cx
->fp();
16981 JSScript
*script
= fp
->script();
16983 if (!PCWithinLoop(fp
, pc
, *this)) {
16984 debug_only_printf(LC_TMProfiler
, "Profiling complete (loop exit) at line %u\n",
16985 js_FramePCToLineNumber(cx
, cx
->fp()));
16986 tm
->profile
->decide(cx
);
16987 tm
->profile
= NULL
;
16988 return ProfComplete
;
16991 while (loopStackDepth
> 0 && !PCWithinLoop(fp
, pc
, loopStack
[loopStackDepth
-1])) {
16992 debug_only_print0(LC_TMProfiler
, "Profiler: Exiting inner loop\n");
16996 if (op
== JSOP_TRACE
|| op
== JSOP_NOTRACE
) {
16997 if (pc
!= top
&& (loopStackDepth
== 0 || pc
!= loopStack
[loopStackDepth
-1].top
)) {
16998 if (loopStackDepth
== PROFILE_MAX_INNER_LOOPS
) {
16999 debug_only_print0(LC_TMProfiler
, "Profiling complete (maxnest)\n");
17000 tm
->profile
->decide(cx
);
17001 tm
->profile
= NULL
;
17002 return ProfComplete
;
17005 debug_only_printf(LC_TMProfiler
, "Profiler: Entering inner loop at line %d\n",
17006 js_FramePCToLineNumber(cx
, cx
->fp()));
17007 loopStack
[loopStackDepth
++] = InnerLoop(fp
, pc
, GetLoopBottom(cx
));
17012 if (loopStackDepth
== 0) {
17014 numSelfOpsMult
+= branchMultiplier
;
17017 if (op
== JSOP_ADD
|| op
== JSOP_SUB
|| op
== JSOP_MUL
|| op
== JSOP_DIV
) {
17018 Value
& v1
= cx
->regs
->sp
[-1];
17019 Value
& v2
= cx
->regs
->sp
[-2];
17021 /* If either operand is a double, treat it as a floating-point op. */
17022 if (v1
.isDouble() || v2
.isDouble())
17023 increment(OP_FLOAT
);
17024 else if (v1
.isInt32() || v2
.isInt32())
17028 if (op
== JSOP_EQ
|| op
== JSOP_NE
)
17031 if (op
== JSOP_BITOR
|| op
== JSOP_BITXOR
|| op
== JSOP_BITAND
17032 || op
== JSOP_LSH
|| op
== JSOP_RSH
|| op
== JSOP_URSH
|| op
== JSOP_BITNOT
)
17037 if (op
== JSOP_EVAL
)
17038 increment(OP_EVAL
);
17040 if (op
== JSOP_NEW
)
17043 if (op
== JSOP_GETELEM
|| op
== JSOP_SETELEM
) {
17044 Value
& lval
= cx
->regs
->sp
[op
== JSOP_GETELEM
? -2 : -3];
17045 if (lval
.isObject() && js_IsTypedArray(&lval
.toObject()))
17046 increment(OP_TYPED_ARRAY
);
17047 else if (lval
.isObject() && lval
.toObject().isDenseArray() && op
== JSOP_GETELEM
)
17048 increment(OP_ARRAY_READ
);
17051 if (op
== JSOP_CALL
) {
17052 increment(OP_CALL
);
17054 uintN argc
= GET_ARGC(cx
->regs
->pc
);
17055 Value
&v
= cx
->regs
->sp
[-((int)argc
+ 2)];
17057 if (IsFunctionObject(v
, &callee
)) {
17058 JSFunction
*fun
= callee
->getFunctionPrivate();
17059 if (fun
->isInterpreted()) {
17060 if (cx
->fp()->isFunctionFrame() && fun
== cx
->fp()->fun())
17061 increment(OP_RECURSIVE
);
17063 js::Native native
= fun
->u
.n
.native
;
17064 if (js_IsMathFunction(JS_JSVALIFY_NATIVE(native
)))
17065 increment(OP_FLOAT
);
17070 if (op
== JSOP_CALLPROP
&& loopStackDepth
== 0)
17071 branchMultiplier
*= mjit::GetCallTargetCount(script
, pc
);
17073 if (op
== JSOP_TABLESWITCH
) {
17074 jsint low
= GET_JUMP_OFFSET(pc
+ JUMP_OFFSET_LEN
);
17075 jsint high
= GET_JUMP_OFFSET(pc
+ JUMP_OFFSET_LEN
*2);
17076 branchMultiplier
*= high
- low
+ 1;
17079 if (op
== JSOP_LOOKUPSWITCH
)
17080 branchMultiplier
*= GET_UINT16(pc
+ JUMP_OFFSET_LEN
);
17082 if (numAllOps
>= MAX_PROFILE_OPS
) {
17083 debug_only_print0(LC_TMProfiler
, "Profiling complete (maxops)\n");
17084 tm
->profile
->decide(cx
);
17085 tm
->profile
= NULL
;
17086 return ProfComplete
;
17089 /* These are the places where the interpreter skips over branches. */
17090 jsbytecode
*testPC
= cx
->regs
->pc
;
17091 if (op
== JSOP_EQ
|| op
== JSOP_NE
|| op
== JSOP_LT
|| op
== JSOP_GT
17092 || op
== JSOP_LE
|| op
== JSOP_GE
|| op
== JSOP_IN
|| op
== JSOP_MOREITER
)
17094 const JSCodeSpec
*cs
= &js_CodeSpec
[op
];
17095 ptrdiff_t oplen
= cs
->length
;
17096 JS_ASSERT(oplen
!= -1);
17098 if (cx
->regs
->pc
- script
->code
+ oplen
< ptrdiff_t(script
->length
))
17099 if (cx
->regs
->pc
[oplen
] == JSOP_IFEQ
|| cx
->regs
->pc
[oplen
] == JSOP_IFNE
)
17100 testPC
= cx
->regs
->pc
+ oplen
;
17103 /* Check if we're exiting the loop being profiled. */
17104 JSOp testOp
= js_GetOpcode(cx
, script
, testPC
);
17105 if (testOp
== JSOP_IFEQ
|| testOp
== JSOP_IFNE
|| testOp
== JSOP_GOTO
17106 || testOp
== JSOP_AND
|| testOp
== JSOP_OR
)
17108 ptrdiff_t len
= GET_JUMP_OFFSET(testPC
);
17109 if (testPC
+ len
== top
&& (op
== JSOP_LT
|| op
== JSOP_LE
)) {
17110 StackValue v
= stackAt(-1);
17111 if (v
.hasValue
&& v
.value
< 8)
17115 if (testPC
+ len
== top
&& (op
== JSOP_LT
|| op
== JSOP_LE
)
17116 && cx
->regs
->sp
[-2].isInt32() && cx
->regs
->sp
[-2].toInt32() < 16)
17118 maybeShortLoop
= true;
17121 if (testOp
!= JSOP_GOTO
&& len
> 0) {
17123 if (testOp
== JSOP_IFEQ
|| testOp
== JSOP_IFNE
)
17124 isConst
= stackAt(-1).isConst
&& stackAt(-2).isConst
;
17126 isConst
= stackAt(-1).isConst
;
17128 increment(OP_FWDJUMP
);
17129 if (loopStackDepth
== 0 && !isConst
)
17130 branchMultiplier
*= 2;
17134 if (op
== JSOP_INT8
) {
17135 stackPush(StackValue(true, GET_INT8(cx
->regs
->pc
)));
17136 } else if (op
== JSOP_STRING
) {
17137 stackPush(StackValue(true));
17138 } else if (op
== JSOP_TYPEOF
|| op
== JSOP_TYPEOFEXPR
) {
17139 stackPush(StackValue(true));
17140 } else if (op
== JSOP_EQ
|| op
== JSOP_NE
) {
17141 StackValue v1
= stackAt(-1);
17142 StackValue v2
= stackAt(-2);
17143 stackPush(StackValue(v1
.isConst
&& v2
.isConst
));
17144 } else if (op
== JSOP_AND
) {
17145 bool b
= !!js_ValueToBoolean(cx
->regs
->sp
[-1]);
17146 StackValue v
= stackAt(-1);
17153 return ProfContinue
;
17157 * Returns true if the loop would probably take a long time to
17161 LoopProfile::isCompilationExpensive(JSContext
*cx
, uintN depth
)
17169 /* Too many ops to compile? */
17170 if (numSelfOps
== MAX_PROFILE_OPS
)
17173 /* Is the code too branchy? */
17174 if (numSelfOpsMult
> numSelfOps
*100000)
17177 /* Ensure that inner loops aren't too expensive. */
17178 for (uintN i
=0; i
<numInnerLoops
; i
++) {
17179 LoopProfile
*prof
= LookupLoopProfile(cx
, innerLoops
[i
].top
);
17180 if (!prof
|| prof
->isCompilationExpensive(cx
, depth
-1))
17188 * This function recognizes loops that are short and that contain
17189 * jumps. The tracer does badly with these loops because it
17190 * needs to do a lot of side exits, which are somewhat
17194 LoopProfile::isCompilationUnprofitable(JSContext
*cx
, uintN goodOps
)
17199 if (goodOps
<= 22 && allOps
[OP_FWDJUMP
])
17202 /* Ensure that inner loops aren't fleeting. */
17203 for (uintN i
=0; i
<numInnerLoops
; i
++) {
17204 LoopProfile
*prof
= LookupLoopProfile(cx
, innerLoops
[i
].top
);
17205 if (!prof
|| prof
->unprofitable
)
17212 /* After profiling is done, this method decides whether to trace the loop. */
17214 LoopProfile::decide(JSContext
*cx
)
17216 bool wasUndecided
= undecided
;
17217 bool wasTraceOK
= traceOK
;
17224 uintN line
= js_PCToLineNumber(cx
, entryScript
, top
);
17226 debug_only_printf(LC_TMProfiler
, "LOOP %s:%d\n", entryScript
->filename
, line
);
17228 for (uintN i
=0; i
<numInnerLoops
; i
++) {
17229 InnerLoop
&loop
= innerLoops
[i
];
17230 if (LoopProfile
*prof
= LookupLoopProfile(cx
, loop
.top
)) {
17231 uintN line
= js_PCToLineNumber(cx
, prof
->entryScript
, prof
->top
);
17232 debug_only_printf(LC_TMProfiler
, "NESTED %s:%d (%d iters)\n",
17233 prof
->entryScript
->filename
, line
, loop
.iters
);
17236 debug_only_printf(LC_TMProfiler
, "FEATURE float %d\n", allOps
[OP_FLOAT
]);
17237 debug_only_printf(LC_TMProfiler
, "FEATURE int %d\n", allOps
[OP_INT
]);
17238 debug_only_printf(LC_TMProfiler
, "FEATURE bit %d\n", allOps
[OP_BIT
]);
17239 debug_only_printf(LC_TMProfiler
, "FEATURE equality %d\n", allOps
[OP_EQ
]);
17240 debug_only_printf(LC_TMProfiler
, "FEATURE eval %d\n", allOps
[OP_EVAL
]);
17241 debug_only_printf(LC_TMProfiler
, "FEATURE new %d\n", allOps
[OP_NEW
]);
17242 debug_only_printf(LC_TMProfiler
, "FEATURE call %d\n", allOps
[OP_CALL
]);
17243 debug_only_printf(LC_TMProfiler
, "FEATURE arrayread %d\n", allOps
[OP_ARRAY_READ
]);
17244 debug_only_printf(LC_TMProfiler
, "FEATURE typedarray %d\n", allOps
[OP_TYPED_ARRAY
]);
17245 debug_only_printf(LC_TMProfiler
, "FEATURE fwdjump %d\n", allOps
[OP_FWDJUMP
]);
17246 debug_only_printf(LC_TMProfiler
, "FEATURE recursive %d\n", allOps
[OP_RECURSIVE
]);
17247 debug_only_printf(LC_TMProfiler
, "FEATURE shortLoop %d\n", shortLoop
);
17248 debug_only_printf(LC_TMProfiler
, "FEATURE maybeShortLoop %d\n", maybeShortLoop
);
17249 debug_only_printf(LC_TMProfiler
, "FEATURE numAllOps %d\n", numAllOps
);
17250 debug_only_printf(LC_TMProfiler
, "FEATURE selfOps %d\n", numSelfOps
);
17251 debug_only_printf(LC_TMProfiler
, "FEATURE selfOpsMult %g\n", numSelfOpsMult
);
17254 if (count(OP_RECURSIVE
)) {
17255 debug_only_print0(LC_TMProfiler
, "NOTRACE: recursive\n");
17256 } else if (count(OP_EVAL
)) {
17257 debug_only_print0(LC_TMProfiler
, "NOTRACE: eval\n");
17258 } else if (numInnerLoops
> 7) {
17259 debug_only_print0(LC_TMProfiler
, "NOTRACE: >3 inner loops\n");
17260 } else if (shortLoop
) {
17261 debug_only_print0(LC_TMProfiler
, "NOTRACE: short\n");
17262 } else if (isCompilationExpensive(cx
, 4)) {
17263 debug_only_print0(LC_TMProfiler
, "NOTRACE: expensive\n");
17264 } else if (maybeShortLoop
&& numInnerLoops
< 2) {
17265 if (wasUndecided
) {
17266 debug_only_print0(LC_TMProfiler
, "NOTRACE: maybe short\n");
17268 debug_only_print0(LC_TMProfiler
, "UNDECIDED: maybe short\n");
17269 undecided
= true; /* Profile the loop again to see if it's still short. */
17274 /* The tracer handles these ops well because of type specialization. */
17275 goodOps
+= count(OP_FLOAT
)*10 + count(OP_BIT
)*11 + count(OP_INT
)*5 + count(OP_EQ
)*15;
17277 /* The tracer handles these ops well because of inlining. */
17278 goodOps
+= (count(OP_CALL
) + count(OP_NEW
))*20;
17280 /* The tracer specialized typed array access. */
17281 goodOps
+= count(OP_TYPED_ARRAY
)*10;
17283 /* The methodjit is faster at array writes, but the tracer is faster for reads. */
17284 goodOps
+= count(OP_ARRAY_READ
)*15;
17286 debug_only_printf(LC_TMProfiler
, "FEATURE goodOps %u\n", goodOps
);
17288 unprofitable
= isCompilationUnprofitable(cx
, goodOps
);
17290 debug_only_print0(LC_TMProfiler
, "NOTRACE: unprofitable\n");
17291 else if (goodOps
>= numAllOps
)
17295 debug_only_printf(LC_TMProfiler
, "TRACE %s:%d = %d\n", entryScript
->filename
, line
, traceOK
);
17298 /* Unblacklist the inner loops. */
17299 for (uintN i
=0; i
<numInnerLoops
; i
++) {
17300 InnerLoop
&loop
= innerLoops
[i
];
17301 LoopProfile
*prof
= LookupLoopProfile(cx
, loop
.top
);
17304 * Note that execOK for the inner loop is left unchanged. So even
17305 * if we trace the inner loop, we will never call that trace
17306 * on its own. We'll only call it from this trace.
17308 prof
->traceOK
= true;
17309 if (IsBlacklisted(loop
.top
)) {
17310 debug_only_printf(LC_TMProfiler
, "Unblacklisting at %d\n",
17311 js_PCToLineNumber(cx
, prof
->entryScript
, loop
.top
));
17312 Unblacklist(prof
->entryScript
, loop
.top
);
17319 traceOK
= wasTraceOK
|| traceOK
;
17321 if (!traceOK
&& !undecided
) {
17322 debug_only_printf(LC_TMProfiler
, "Blacklisting at %d\n", line
);
17326 debug_only_print0(LC_TMProfiler
, "\n");
17329 JS_REQUIRES_STACK MonitorResult
17330 MonitorLoopEdge(JSContext
* cx
, uintN
& inlineCallCount
)
17332 TraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
17334 return tm
->profile
->profileLoopEdge(cx
, inlineCallCount
);
17336 return RecordLoopEdge(cx
, inlineCallCount
);
17340 AbortProfiling(JSContext
*cx
)
17342 debug_only_print0(LC_TMProfiler
, "Profiling complete (aborted)\n");
17343 TraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
17344 tm
->profile
->profiled
= true;
17345 tm
->profile
->traceOK
= false;
17346 tm
->profile
->execOK
= false;
17347 tm
->profile
= NULL
;
17350 #else /* JS_METHODJIT */
17352 JS_REQUIRES_STACK MonitorResult
17353 MonitorLoopEdge(JSContext
* cx
, uintN
& inlineCallCount
)
17355 return RecordLoopEdge(cx
, inlineCallCount
);
17358 #endif /* JS_METHODJIT */
17361 GetHotloop(JSContext
*cx
)
17363 #ifdef JS_METHODJIT
17364 if (cx
->profilingEnabled
)
17365 return PROFILE_HOTLOOP
;
17371 } /* namespace js */