1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
43 #include "jsbit.h" // low-level (NSPR-based) headers next
45 #include <math.h> // standard headers next
47 #if defined(_MSC_VER) || defined(__MINGW32__)
50 #define alloca _alloca
58 #include "nanojit/nanojit.h"
59 #include "jsapi.h" // higher-level library and API headers
75 #include "jsstaticcheck.h"
79 #include "jsatominlines.h"
80 #include "jsscopeinlines.h"
81 #include "jsscriptinlines.h"
83 #include "jsautooplen.h" // generated headers last
84 #include "imacros.c.out"
86 using namespace nanojit
;
88 #if JS_HAS_XML_SUPPORT
89 #define RETURN_VALUE_IF_XML(val, ret) \
91 if (!JSVAL_IS_PRIMITIVE(val) && \
92 OBJECT_IS_XML(BOGUS_CX, JSVAL_TO_OBJECT(val))) { \
93 RETURN_VALUE("xml detected", ret); \
97 #define RETURN_IF_XML(val, ret) ((void) 0)
100 #define RETURN_IF_XML_A(val) RETURN_VALUE_IF_XML(val, ARECORD_STOP)
101 #define RETURN_IF_XML(val) RETURN_VALUE_IF_XML(val, RECORD_STOP)
104 * Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and
105 * the type. What you want to use is JSVAL_IS_SPECIAL(x) and then handle the
106 * undefined case properly (bug 457363).
108 #undef JSVAL_IS_BOOLEAN
109 #define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0)
111 JS_STATIC_ASSERT(sizeof(JSTraceType
) == 1);
113 /* Map to translate a type tag into a printable representation. */
114 static const char typeChar
[] = "OIDXSNBF";
115 static const char tagChar
[] = "OIDISIBI";
117 /* Blacklist parameters. */
120 * Number of iterations of a loop where we start tracing. That is, we don't
121 * start tracing until the beginning of the HOTLOOP-th iteration.
125 /* Attempt recording this many times before blacklisting permanently. */
126 #define BL_ATTEMPTS 2
128 /* Skip this many hits before attempting recording again, after an aborted attempt. */
129 #define BL_BACKOFF 32
131 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
134 /* Number of times we try to extend the tree along a side exit. */
137 /* Maximum number of peer trees allowed. */
140 /* Max number of hits to a RECURSIVE_UNLINKED exit before we trash the tree. */
141 #define MAX_RECURSIVE_UNLINK_HITS 64
143 /* Max call depths for inlining. */
144 #define MAX_CALLDEPTH 10
146 /* Max number of slots in a table-switch. */
147 #define MAX_TABLE_SWITCH 256
149 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
150 #define MAX_INTERP_STACK_BYTES \
151 (MAX_NATIVE_STACK_SLOTS * sizeof(jsval) + \
152 MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) + \
153 sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
155 /* Max number of branches per tree. */
156 #define MAX_BRANCHES 32
158 #define CHECK_STATUS(expr) \
160 RecordingStatus _status = (expr); \
161 if (_status != RECORD_CONTINUE) \
165 #define CHECK_STATUS_A(expr) \
167 AbortableRecordingStatus _status = InjectStatus((expr)); \
168 if (_status != ARECORD_CONTINUE) \
173 #define RETURN_VALUE(msg, value) \
175 debug_only_printf(LC_TMAbort, "trace stopped: %d: %s\n", __LINE__, (msg)); \
179 #define RETURN_VALUE(msg, value) return (value)
182 #define RETURN_STOP(msg) RETURN_VALUE(msg, RECORD_STOP)
183 #define RETURN_STOP_A(msg) RETURN_VALUE(msg, ARECORD_STOP)
184 #define RETURN_ERROR(msg) RETURN_VALUE(msg, RECORD_ERROR)
185 #define RETURN_ERROR_A(msg) RETURN_VALUE(msg, ARECORD_ERROR)
189 #define JITSTAT(x) uint64 x;
190 #include "jitstats.tbl"
192 } jitstats
= { 0LL, };
194 JS_STATIC_ASSERT(sizeof(jitstats
) % sizeof(uint64
) == 0);
197 #define JITSTAT(x) STAT ## x ## ID,
198 #include "jitstats.tbl"
203 static JSPropertySpec jitstats_props
[] = {
204 #define JITSTAT(x) { #x, STAT ## x ## ID, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT },
205 #include "jitstats.tbl"
211 jitstats_getProperty(JSContext
*cx
, JSObject
*obj
, jsid id
, jsval
*vp
)
215 if (JSVAL_IS_STRING(id
)) {
216 JSString
* str
= JSVAL_TO_STRING(id
);
217 if (strcmp(JS_GetStringBytes(str
), "HOTLOOP") == 0) {
218 *vp
= INT_TO_JSVAL(HOTLOOP
);
223 if (JSVAL_IS_INT(id
))
224 index
= JSVAL_TO_INT(id
);
228 #define JITSTAT(x) case STAT ## x ## ID: result = jitstats.x; break;
229 #include "jitstats.tbl"
236 if (result
< JSVAL_INT_MAX
) {
237 *vp
= INT_TO_JSVAL(jsint(result
));
241 JS_snprintf(retstr
, sizeof retstr
, "%llu", result
);
242 *vp
= STRING_TO_JSVAL(JS_NewStringCopyZ(cx
, retstr
));
246 JSClass jitstats_class
= {
249 JS_PropertyStub
, JS_PropertyStub
,
250 jitstats_getProperty
, JS_PropertyStub
,
251 JS_EnumerateStub
, JS_ResolveStub
,
252 JS_ConvertStub
, NULL
,
253 JSCLASS_NO_OPTIONAL_MEMBERS
257 js_InitJITStatsClass(JSContext
*cx
, JSObject
*glob
)
259 JS_InitClass(cx
, glob
, NULL
, &jitstats_class
, NULL
, 0, jitstats_props
, NULL
, NULL
, NULL
);
262 #define AUDIT(x) (jitstats.x++)
264 #define AUDIT(x) ((void)0)
265 #endif /* JS_JIT_SPEW */
268 * INS_CONSTPTR can be used to embed arbitrary pointers into the native code. It should not
269 * be used directly to embed GC thing pointers. Instead, use the INS_CONSTOBJ/FUN/STR/SPROP
270 * variants which ensure that the embedded pointer will be kept alive across GCs.
273 #define INS_CONST(c) addName(lir->insImm(c), #c)
274 #define INS_CONSTPTR(p) addName(lir->insImmPtr(p), #p)
275 #define INS_CONSTWORD(v) addName(lir->insImmPtr((void *) (v)), #v)
276 #define INS_CONSTVAL(v) addName(insImmVal(v), #v)
277 #define INS_CONSTOBJ(obj) addName(insImmObj(obj), #obj)
278 #define INS_CONSTFUN(fun) addName(insImmFun(fun), #fun)
279 #define INS_CONSTSTR(str) addName(insImmStr(str), #str)
280 #define INS_CONSTSPROP(sprop) addName(insImmSprop(sprop), #sprop)
281 #define INS_ATOM(atom) INS_CONSTSTR(ATOM_TO_STRING(atom))
282 #define INS_NULL() INS_CONSTPTR(NULL)
283 #define INS_VOID() INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID))
285 static avmplus::AvmCore s_core
= avmplus::AvmCore();
286 static avmplus::AvmCore
* core
= &s_core
;
288 /* Allocator SPI implementation. */
291 nanojit::Allocator::allocChunk(size_t nbytes
)
293 VMAllocator
*vma
= (VMAllocator
*)this;
294 JS_ASSERT(!vma
->outOfMemory());
295 void *p
= calloc(1, nbytes
);
297 JS_ASSERT(nbytes
< sizeof(vma
->mReserve
));
298 vma
->mOutOfMemory
= true;
299 p
= (void*) &vma
->mReserve
[0];
301 vma
->mSize
+= nbytes
;
306 nanojit::Allocator::freeChunk(void *p
) {
307 VMAllocator
*vma
= (VMAllocator
*)this;
308 if (p
!= &vma
->mReserve
[0])
313 nanojit::Allocator::postReset() {
314 VMAllocator
*vma
= (VMAllocator
*)this;
315 vma
->mOutOfMemory
= false;
319 static void OutOfMemoryAbort()
321 JS_NOT_REACHED("out of memory");
327 DumpPeerStability(JSTraceMonitor
* tm
, const void* ip
, JSObject
* globalObj
, uint32 globalShape
, uint32 argc
);
331 * We really need a better way to configure the JIT. Shaver, where is
332 * my fancy JIT object?
334 * NB: this is raced on, if jstracer.cpp should ever be running MT.
335 * I think it's harmless tho.
337 static bool did_we_check_processor_features
= false;
339 /* ------ Debug logging control ------ */
342 * All the logging control stuff lives in here. It is shared between
343 * all threads, but I think that's OK.
345 LogControl js_LogController
;
350 * NB: this is raced on too, if jstracer.cpp should ever be running MT.
353 static bool did_we_set_up_debug_logging
= false;
356 InitJITLogController()
361 js_LogController
.lcbits
= 0;
363 tm
= getenv("TRACEMONKEY");
367 "The environment variable $TRACEMONKEY has been replaced by $TMFLAGS.\n"
368 "Try 'TMFLAGS=help js -j' for a list of options.\n"
373 tmf
= getenv("TMFLAGS");
376 /* Using strstr() is really a cheap hack as far as flag decoding goes. */
377 if (strstr(tmf
, "help")) {
380 "usage: TMFLAGS=option,option,option,... where options can be:\n"
382 " help show this message\n"
383 " ------ options for jstracer & jsregexp ------\n"
384 " minimal ultra-minimalist output; try this first\n"
385 " full everything except 'treevis' and 'nocodeaddrs'\n"
386 " tracer tracer lifetime (FIXME:better description)\n"
387 " recorder trace recording stuff (FIXME:better description)\n"
388 " abort show trace recording aborts\n"
389 " stats show trace recording stats\n"
390 " regexp show compilation & entry for regexps\n"
391 " treevis spew that tracevis/tree.py can parse\n"
392 " ------ options for Nanojit ------\n"
393 " fragprofile count entries and exits for each fragment\n"
394 " liveness show LIR liveness at start of rdr pipeline\n"
395 " readlir show LIR as it enters the reader pipeline\n"
396 " aftersf show LIR after StackFilter\n"
397 " assembly show final aggregated assembly code\n"
398 " regalloc show regalloc state in 'assembly' output\n"
399 " activation show activation state in 'assembly' output\n"
400 " nocodeaddrs omit code addresses in 'assembly' output\n"
409 /* flags for jstracer.cpp */
410 if (strstr(tmf
, "minimal") || strstr(tmf
, "full")) bits
|= LC_TMMinimal
;
411 if (strstr(tmf
, "tracer") || strstr(tmf
, "full")) bits
|= LC_TMTracer
;
412 if (strstr(tmf
, "recorder") || strstr(tmf
, "full")) bits
|= LC_TMRecorder
;
413 if (strstr(tmf
, "abort") || strstr(tmf
, "full")) bits
|= LC_TMAbort
;
414 if (strstr(tmf
, "stats") || strstr(tmf
, "full")) bits
|= LC_TMStats
;
415 if (strstr(tmf
, "regexp") || strstr(tmf
, "full")) bits
|= LC_TMRegexp
;
416 if (strstr(tmf
, "treevis")) bits
|= LC_TMTreeVis
;
418 /* flags for nanojit */
419 if (strstr(tmf
, "fragprofile")) bits
|= LC_FragProfile
;
420 if (strstr(tmf
, "liveness") || strstr(tmf
, "full")) bits
|= LC_Liveness
;
421 if (strstr(tmf
, "activation") || strstr(tmf
, "full")) bits
|= LC_Activation
;
422 if (strstr(tmf
, "readlir") || strstr(tmf
, "full")) bits
|= LC_ReadLIR
;
423 if (strstr(tmf
, "aftersf") || strstr(tmf
, "full")) bits
|= LC_AfterSF
;
424 if (strstr(tmf
, "regalloc") || strstr(tmf
, "full")) bits
|= LC_RegAlloc
;
425 if (strstr(tmf
, "assembly") || strstr(tmf
, "full")) bits
|= LC_Assembly
;
426 if (strstr(tmf
, "nocodeaddrs")) bits
|= LC_NoCodeAddrs
;
428 js_LogController
.lcbits
= bits
;
434 /* ------------------ Frag-level profiling support ------------------ */
439 * All the allocations done by this profile data-collection and
440 * display machinery, are done in JSTraceMonitor::profAlloc. That is
441 * emptied out at the end of js_FinishJIT. It has a lifetime from
442 * js_InitJIT to js_FinishJIT, which exactly matches the span
443 * js_FragProfiling_init to js_FragProfiling_showResults.
447 Seq
<T
>* reverseInPlace(Seq
<T
>* seq
)
452 Seq
<T
>* next
= curr
->tail
;
460 // The number of top blocks to show in the profile
461 #define N_TOP_BLOCKS 50
463 // Contains profile info for a single guard
465 uint32_t guardID
; // identifying number
466 uint32_t count
; // count.
470 uint32_t count
; // entry count for this Fragment
471 uint32_t nStaticExits
; // statically: the number of exits
472 size_t nCodeBytes
; // statically: the number of insn bytes in the main fragment
473 size_t nExitBytes
; // statically: the number of insn bytes in the exit paths
474 Seq
<GuardPI
>* guards
; // guards, each with its own count
475 uint32_t largestGuardID
; // that exists in .guards
478 /* A mapping of Fragment.profFragID to FragPI */
479 typedef HashMap
<uint32
,FragPI
> FragStatsMap
;
482 js_FragProfiling_FragFinalizer(Fragment
* f
, JSTraceMonitor
* tm
)
484 // Recover profiling data from 'f', which is logically at the end
485 // of its useful lifetime.
486 if (!(js_LogController
.lcbits
& LC_FragProfile
))
490 // Valid profFragIDs start at 1
491 NanoAssert(f
->profFragID
>= 1);
492 // Should be called exactly once per Fragment. This will assert if
493 // you issue the same FragID to more than one Fragment.
494 NanoAssert(!tm
->profTab
->containsKey(f
->profFragID
));
496 FragPI pi
= { f
->profCount
,
502 // Begin sanity check on the guards
503 SeqBuilder
<GuardPI
> guardsBuilder(*tm
->profAlloc
);
506 uint32_t sumOfDynExits
= 0;
507 for (gr
= f
->guardsForFrag
; gr
; gr
= gr
->nextInFrag
) {
509 // Also copy the data into our auxiliary structure.
510 // f->guardsForFrag is in reverse order, and so this
511 // copy preserves that ordering (->add adds at end).
512 // Valid profGuardIDs start at 1.
513 NanoAssert(gr
->profGuardID
> 0);
514 sumOfDynExits
+= gr
->profCount
;
515 GuardPI gpi
= { gr
->profGuardID
, gr
->profCount
};
516 guardsBuilder
.add(gpi
);
517 if (gr
->profGuardID
> pi
.largestGuardID
)
518 pi
.largestGuardID
= gr
->profGuardID
;
520 pi
.guards
= guardsBuilder
.get();
521 // And put the guard list in forwards order
522 pi
.guards
= reverseInPlace(pi
.guards
);
524 // Why is this so? Because nGs is the number of guards
525 // at the time the LIR was generated, whereas f->nStaticExits
526 // is the number of them observed by the time it makes it
527 // through to the assembler. It can be the case that LIR
528 // optimisation removes redundant guards; hence we expect
529 // nGs to always be the same or higher.
530 NanoAssert(nGs
>= f
->nStaticExits
);
532 // Also we can assert that the sum of the exit counts
533 // can't exceed the entry count. It'd be nice to assert that
534 // they are exactly equal, but we can't because we don't know
535 // how many times we got to the end of the trace.
536 NanoAssert(f
->profCount
>= sumOfDynExits
);
538 // End sanity check on guards
540 tm
->profTab
->put(f
->profFragID
, pi
);
544 js_FragProfiling_showResults(JSTraceMonitor
* tm
)
546 uint32_t topFragID
[N_TOP_BLOCKS
];
547 FragPI topPI
[N_TOP_BLOCKS
];
548 uint64_t totCount
= 0, cumulCount
;
550 size_t totCodeB
= 0, totExitB
= 0;
551 memset(topFragID
, 0, sizeof(topFragID
));
552 memset(topPI
, 0, sizeof(topPI
));
553 FragStatsMap::Iter
iter(*tm
->profTab
);
554 while (iter
.next()) {
555 uint32_t fragID
= iter
.key();
556 FragPI pi
= iter
.value();
557 uint32_t count
= pi
.count
;
558 totCount
+= (uint64_t)count
;
559 /* Find the rank for this entry, in tops */
560 int r
= N_TOP_BLOCKS
-1;
564 if (topFragID
[r
] == 0) {
568 if (count
> topPI
[r
].count
) {
575 AvmAssert(r
>= 0 && r
<= N_TOP_BLOCKS
);
576 /* This entry should be placed at topPI[r], and entries
577 at higher numbered slots moved up one. */
578 if (r
< N_TOP_BLOCKS
) {
579 for (int s
= N_TOP_BLOCKS
-1; s
> r
; s
--) {
580 topFragID
[s
] = topFragID
[s
-1];
581 topPI
[s
] = topPI
[s
-1];
583 topFragID
[r
] = fragID
;
588 js_LogController
.printf(
589 "\n----------------- Per-fragment execution counts ------------------\n");
590 js_LogController
.printf(
591 "\nTotal count = %llu\n\n", (unsigned long long int)totCount
);
593 js_LogController
.printf(
594 " Entry counts Entry counts ----- Static -----\n");
595 js_LogController
.printf(
596 " ------Self------ ----Cumulative--- Exits Cbytes Xbytes FragID\n");
597 js_LogController
.printf("\n");
600 totCount
= 1; /* avoid division by zero */
603 for (r
= 0; r
< N_TOP_BLOCKS
; r
++) {
604 if (topFragID
[r
] == 0)
606 cumulCount
+= (uint64_t)topPI
[r
].count
;
607 js_LogController
.printf("%3d: %5.2f%% %9u %6.2f%% %9llu"
608 " %3d %5u %5u %06u\n",
610 (double)topPI
[r
].count
* 100.0 / (double)totCount
,
612 (double)cumulCount
* 100.0 / (double)totCount
,
613 (unsigned long long int)cumulCount
,
614 topPI
[r
].nStaticExits
,
615 (unsigned int)topPI
[r
].nCodeBytes
,
616 (unsigned int)topPI
[r
].nExitBytes
,
618 totSE
+= (uint32_t)topPI
[r
].nStaticExits
;
619 totCodeB
+= topPI
[r
].nCodeBytes
;
620 totExitB
+= topPI
[r
].nExitBytes
;
622 js_LogController
.printf("\nTotal displayed code bytes = %u, "
624 "Total displayed static exits = %d\n\n",
625 (unsigned int)totCodeB
, (unsigned int)totExitB
, totSE
);
627 js_LogController
.printf("Analysis by exit counts\n\n");
629 for (r
= 0; r
< N_TOP_BLOCKS
; r
++) {
630 if (topFragID
[r
] == 0)
632 js_LogController
.printf("FragID=%06u, total count %u:\n", topFragID
[r
],
634 uint32_t madeItToEnd
= topPI
[r
].count
;
635 uint32_t totThisFrag
= topPI
[r
].count
;
636 if (totThisFrag
== 0)
639 // visit the guards, in forward order
640 for (Seq
<GuardPI
>* guards
= topPI
[r
].guards
; guards
; guards
= guards
->tail
) {
641 gpi
= (*guards
).head
;
644 madeItToEnd
-= gpi
.count
;
645 js_LogController
.printf(" GuardID=%03u %7u (%5.2f%%)\n",
646 gpi
.guardID
, gpi
.count
,
647 100.0 * (double)gpi
.count
/ (double)totThisFrag
);
649 js_LogController
.printf(" Looped (%03u) %7u (%5.2f%%)\n",
650 topPI
[r
].largestGuardID
+1,
652 100.0 * (double)madeItToEnd
/ (double)totThisFrag
);
653 NanoAssert(madeItToEnd
<= topPI
[r
].count
); // else unsigned underflow
654 js_LogController
.printf("\n");
662 /* ----------------------------------------------------------------- */
666 getExitName(ExitType type
)
668 static const char* exitNames
[] =
670 #define MAKE_EXIT_STRING(x) #x,
671 JS_TM_EXITCODES(MAKE_EXIT_STRING
)
672 #undef MAKE_EXIT_STRING
676 JS_ASSERT(type
< TOTAL_EXIT_TYPES
);
678 return exitNames
[type
];
681 static JSBool FASTCALL
682 PrintOnTrace(char* format
, uint32 argc
, double *argv
)
695 #define GET_ARG() JS_BEGIN_MACRO \
696 if (argi >= argc) { \
697 fprintf(out, "[too few args for format]"); \
700 u.d = argv[argi++]; \
706 for (char *p
= format
; *p
; ++p
) {
713 fprintf(out
, "[trailing %%]");
720 fprintf(out
, "[%u:%u 0x%x:0x%x %f]", u
.i
.lo
, u
.i
.hi
, u
.i
.lo
, u
.i
.hi
, u
.d
);
724 fprintf(out
, "%d", u
.i
.lo
);
728 fprintf(out
, "%u", u
.i
.lo
);
732 fprintf(out
, "%x", u
.i
.lo
);
736 fprintf(out
, "%f", u
.d
);
745 size_t length
= u
.s
->length();
746 // protect against massive spew if u.s is a bad pointer.
747 if (length
> 1 << 16)
749 jschar
*chars
= u
.s
->chars();
750 for (unsigned i
= 0; i
< length
; ++i
) {
751 jschar co
= chars
[i
];
755 fprintf(out
, "\\u%02x", co
);
757 fprintf(out
, "\\u%04x", co
);
763 fprintf(out
, "%s", u
.cstr
);
766 fprintf(out
, "[invalid %%%c]", *p
);
775 JS_DEFINE_CALLINFO_3(extern, BOOL
, PrintOnTrace
, CHARPTR
, UINT32
, DOUBLEPTR
, 0, 0)
777 // This version is not intended to be called directly: usually it is easier to
778 // use one of the other overloads.
780 TraceRecorder::tprint(const char *format
, int count
, nanojit::LIns
*insa
[])
782 size_t size
= strlen(format
) + 1;
783 char* data
= (char*) traceMonitor
->traceAlloc
->alloc(size
);
784 memcpy(data
, format
, size
);
786 double *args
= (double*) traceMonitor
->traceAlloc
->alloc(count
* sizeof(double));
787 for (int i
= 0; i
< count
; ++i
) {
789 lir
->insStorei(insa
[i
], INS_CONSTPTR(args
), sizeof(double) * i
);
792 LIns
* args_ins
[] = { INS_CONSTPTR(args
), INS_CONST(count
), INS_CONSTPTR(data
) };
793 LIns
* call_ins
= lir
->insCall(&PrintOnTrace_ci
, args_ins
);
794 guard(false, lir
->ins_eq0(call_ins
), MISMATCH_EXIT
);
797 // Generate a 'printf'-type call from trace for debugging.
799 TraceRecorder::tprint(const char *format
)
801 LIns
* insa
[] = { NULL
};
802 tprint(format
, 0, insa
);
806 TraceRecorder::tprint(const char *format
, LIns
*ins
)
808 LIns
* insa
[] = { ins
};
809 tprint(format
, 1, insa
);
813 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
)
815 LIns
* insa
[] = { ins1
, ins2
};
816 tprint(format
, 2, insa
);
820 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
)
822 LIns
* insa
[] = { ins1
, ins2
, ins3
};
823 tprint(format
, 3, insa
);
827 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
, LIns
*ins4
)
829 LIns
* insa
[] = { ins1
, ins2
, ins3
, ins4
};
830 tprint(format
, 4, insa
);
834 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
, LIns
*ins4
,
837 LIns
* insa
[] = { ins1
, ins2
, ins3
, ins4
, ins5
};
838 tprint(format
, 5, insa
);
842 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
, LIns
*ins4
,
843 LIns
*ins5
, LIns
*ins6
)
845 LIns
* insa
[] = { ins1
, ins2
, ins3
, ins4
, ins5
, ins6
};
846 tprint(format
, 6, insa
);
851 * The entire VM shares one oracle. Collisions and concurrent updates are
852 * tolerated and worst case cause performance regressions.
854 static Oracle oracle
;
867 Tracker::getTrackerPageBase(const void* v
) const
869 return jsuword(v
) & ~TRACKER_PAGE_MASK
;
873 Tracker::getTrackerPageOffset(const void* v
) const
875 return (jsuword(v
) & TRACKER_PAGE_MASK
) >> 2;
878 struct Tracker::TrackerPage
*
879 Tracker::findTrackerPage(const void* v
) const
881 jsuword base
= getTrackerPageBase(v
);
882 struct Tracker::TrackerPage
* p
= pagelist
;
891 struct Tracker::TrackerPage
*
892 Tracker::addTrackerPage(const void* v
)
894 jsuword base
= getTrackerPageBase(v
);
895 struct TrackerPage
* p
= (struct TrackerPage
*) calloc(1, sizeof(*p
));
906 TrackerPage
* p
= pagelist
;
907 pagelist
= pagelist
->next
;
913 Tracker::has(const void *v
) const
915 return get(v
) != NULL
;
919 Tracker::get(const void* v
) const
921 struct Tracker::TrackerPage
* p
= findTrackerPage(v
);
924 return p
->map
[getTrackerPageOffset(v
)];
928 Tracker::set(const void* v
, LIns
* i
)
930 struct Tracker::TrackerPage
* p
= findTrackerPage(v
);
932 p
= addTrackerPage(v
);
933 p
->map
[getTrackerPageOffset(v
)] = i
;
937 argSlots(JSStackFrame
* fp
)
939 return JS_MAX(fp
->argc
, fp
->fun
->nargs
);
945 return JSVAL_IS_INT(v
) || JSVAL_IS_DOUBLE(v
);
948 static inline jsdouble
951 JS_ASSERT(isNumber(v
));
952 if (JSVAL_IS_DOUBLE(v
))
953 return *JSVAL_TO_DOUBLE(v
);
954 return (jsdouble
)JSVAL_TO_INT(v
);
962 jsdouble d
= asNumber(v
);
964 return !!JSDOUBLE_IS_INT(d
, i
);
970 JS_ASSERT(isNumber(v
));
972 return JSVAL_TO_INT(v
);
975 JS_ASSERT(JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v
), i
));
977 return jsint(*JSVAL_TO_DOUBLE(v
));
980 /* Return TT_DOUBLE for all numbers (int and double) and the tag otherwise. */
981 static inline JSTraceType
982 GetPromotedType(jsval v
)
986 if (JSVAL_IS_OBJECT(v
)) {
987 if (JSVAL_IS_NULL(v
))
989 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v
)))
993 uint8_t tag
= JSVAL_TAG(v
);
994 JS_ASSERT(tag
== JSVAL_DOUBLE
|| tag
== JSVAL_STRING
|| tag
== JSVAL_SPECIAL
);
995 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_DOUBLE
) == JSVAL_DOUBLE
);
996 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_STRING
) == JSVAL_STRING
);
997 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_PSEUDOBOOLEAN
) == JSVAL_SPECIAL
);
998 return JSTraceType(tag
);
1001 /* Return TT_INT32 for all whole numbers that fit into signed 32-bit and the tag otherwise. */
1002 static inline JSTraceType
1003 getCoercedType(jsval v
)
1007 if (JSVAL_IS_OBJECT(v
)) {
1008 if (JSVAL_IS_NULL(v
))
1010 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v
)))
1014 uint8_t tag
= JSVAL_TAG(v
);
1015 JS_ASSERT(tag
== JSVAL_DOUBLE
|| tag
== JSVAL_STRING
|| tag
== JSVAL_SPECIAL
);
1016 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_DOUBLE
) == JSVAL_DOUBLE
);
1017 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_STRING
) == JSVAL_STRING
);
1018 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_PSEUDOBOOLEAN
) == JSVAL_SPECIAL
);
1019 return JSTraceType(tag
);
1022 /* Constant seed and accumulate step borrowed from the DJB hash. */
1024 const uintptr_t ORACLE_MASK
= ORACLE_SIZE
- 1;
1025 JS_STATIC_ASSERT((ORACLE_MASK
& ORACLE_SIZE
) == 0);
1027 const uintptr_t FRAGMENT_TABLE_MASK
= FRAGMENT_TABLE_SIZE
- 1;
1028 JS_STATIC_ASSERT((FRAGMENT_TABLE_MASK
& FRAGMENT_TABLE_SIZE
) == 0);
1030 const uintptr_t HASH_SEED
= 5381;
1033 HashAccum(uintptr_t& h
, uintptr_t i
, uintptr_t mask
)
1035 h
= ((h
<< 5) + h
+ (mask
& i
)) & mask
;
1038 static JS_REQUIRES_STACK
inline int
1039 StackSlotHash(JSContext
* cx
, unsigned slot
, const void* pc
)
1041 uintptr_t h
= HASH_SEED
;
1042 HashAccum(h
, uintptr_t(cx
->fp
->script
), ORACLE_MASK
);
1043 HashAccum(h
, uintptr_t(pc
), ORACLE_MASK
);
1044 HashAccum(h
, uintptr_t(slot
), ORACLE_MASK
);
1048 static JS_REQUIRES_STACK
inline int
1049 GlobalSlotHash(JSContext
* cx
, unsigned slot
)
1051 uintptr_t h
= HASH_SEED
;
1052 JSStackFrame
* fp
= cx
->fp
;
1057 HashAccum(h
, uintptr_t(fp
->script
), ORACLE_MASK
);
1058 HashAccum(h
, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx
, fp
->scopeChain
))), ORACLE_MASK
);
1059 HashAccum(h
, uintptr_t(slot
), ORACLE_MASK
);
1064 PCHash(jsbytecode
* pc
)
1066 return int(uintptr_t(pc
) & ORACLE_MASK
);
1071 /* Grow the oracle bitsets to their (fixed) size here, once. */
1072 _stackDontDemote
.set(ORACLE_SIZE
-1);
1073 _globalDontDemote
.set(ORACLE_SIZE
-1);
1077 /* Tell the oracle that a certain global variable should not be demoted. */
1078 JS_REQUIRES_STACK
void
1079 Oracle::markGlobalSlotUndemotable(JSContext
* cx
, unsigned slot
)
1081 #ifdef DEBUG_dvander
1082 printf("MGSU: %d [%08x]: %d\n", slot
, GlobalSlotHash(cx
, slot
),
1083 _globalDontDemote
.get(GlobalSlotHash(cx
, slot
)));
1085 _globalDontDemote
.set(GlobalSlotHash(cx
, slot
));
1088 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
1089 JS_REQUIRES_STACK
bool
1090 Oracle::isGlobalSlotUndemotable(JSContext
* cx
, unsigned slot
) const
1092 #ifdef DEBUG_dvander
1093 printf("IGSU: %d [%08x]: %d\n", slot
, GlobalSlotHash(cx
, slot
),
1094 _globalDontDemote
.get(GlobalSlotHash(cx
, slot
)));
1096 return _globalDontDemote
.get(GlobalSlotHash(cx
, slot
));
1099 /* Tell the oracle that a certain slot at a certain stack slot should not be demoted. */
1100 JS_REQUIRES_STACK
void
1101 Oracle::markStackSlotUndemotable(JSContext
* cx
, unsigned slot
, const void* pc
)
1103 #ifdef DEBUG_dvander
1104 printf("MSSU: %p:%d [%08x]: %d\n", pc
, slot
, StackSlotHash(cx
, slot
, pc
),
1105 _stackDontDemote
.get(StackSlotHash(cx
, slot
, pc
)));
1107 _stackDontDemote
.set(StackSlotHash(cx
, slot
, pc
));
1110 JS_REQUIRES_STACK
void
1111 Oracle::markStackSlotUndemotable(JSContext
* cx
, unsigned slot
)
1113 markStackSlotUndemotable(cx
, slot
, cx
->fp
->regs
->pc
);
1116 /* Consult with the oracle whether we shouldn't demote a certain slot. */
1117 JS_REQUIRES_STACK
bool
1118 Oracle::isStackSlotUndemotable(JSContext
* cx
, unsigned slot
, const void* pc
) const
1120 #ifdef DEBUG_dvander
1121 printf("ISSU: %p:%d [%08x]: %d\n", pc
, slot
, StackSlotHash(cx
, slot
, pc
),
1122 _stackDontDemote
.get(StackSlotHash(cx
, slot
, pc
)));
1124 return _stackDontDemote
.get(StackSlotHash(cx
, slot
, pc
));
1127 JS_REQUIRES_STACK
bool
1128 Oracle::isStackSlotUndemotable(JSContext
* cx
, unsigned slot
) const
1130 return isStackSlotUndemotable(cx
, slot
, cx
->fp
->regs
->pc
);
1133 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
1135 Oracle::markInstructionUndemotable(jsbytecode
* pc
)
1137 _pcDontDemote
.set(PCHash(pc
));
1140 /* Consult with the oracle whether we shouldn't demote a certain bytecode location. */
1142 Oracle::isInstructionUndemotable(jsbytecode
* pc
) const
1144 return _pcDontDemote
.get(PCHash(pc
));
1148 Oracle::clearDemotability()
1150 _stackDontDemote
.reset();
1151 _globalDontDemote
.reset();
1152 _pcDontDemote
.reset();
1155 JS_REQUIRES_STACK
static JS_INLINE
void
1156 MarkSlotUndemotable(JSContext
* cx
, TreeInfo
* ti
, unsigned slot
)
1158 if (slot
< ti
->nStackTypes
) {
1159 oracle
.markStackSlotUndemotable(cx
, slot
);
1163 uint16
* gslots
= ti
->globalSlots
->data();
1164 oracle
.markGlobalSlotUndemotable(cx
, gslots
[slot
- ti
->nStackTypes
]);
1167 JS_REQUIRES_STACK
static JS_INLINE
void
1168 MarkSlotUndemotable(JSContext
* cx
, TreeInfo
* ti
, unsigned slot
, const void* pc
)
1170 if (slot
< ti
->nStackTypes
) {
1171 oracle
.markStackSlotUndemotable(cx
, slot
, pc
);
1175 uint16
* gslots
= ti
->globalSlots
->data();
1176 oracle
.markGlobalSlotUndemotable(cx
, gslots
[slot
- ti
->nStackTypes
]);
1179 static JS_REQUIRES_STACK
inline bool
1180 IsSlotUndemotable(JSContext
* cx
, TreeInfo
* ti
, unsigned slot
, const void* ip
)
1182 if (slot
< ti
->nStackTypes
)
1183 return oracle
.isStackSlotUndemotable(cx
, slot
, ip
);
1185 uint16
* gslots
= ti
->globalSlots
->data();
1186 return oracle
.isGlobalSlotUndemotable(cx
, gslots
[slot
- ti
->nStackTypes
]);
1189 static JS_REQUIRES_STACK
inline bool
1190 IsSlotUndemotable(JSContext
* cx
, TreeInfo
* ti
, unsigned slot
)
1192 return IsSlotUndemotable(cx
, ti
, slot
, cx
->fp
->regs
->pc
);
1195 class FrameInfoCache
1197 struct Entry
: public JSDHashEntryHdr
1203 MatchFrameInfo(JSDHashTable
*table
, const JSDHashEntryHdr
*entry
, const void *key
) {
1204 const FrameInfo
* fi1
= ((const Entry
*)entry
)->fi
;
1205 const FrameInfo
* fi2
= (const FrameInfo
*)key
;
1206 if (memcmp(fi1
, fi2
, sizeof(FrameInfo
)) != 0)
1208 return memcmp(fi1
->get_typemap(), fi2
->get_typemap(),
1209 fi1
->callerHeight
* sizeof(JSTraceType
)) == 0;
1212 static JSDHashNumber
1213 HashFrameInfo(JSDHashTable
*table
, const void *key
) {
1214 FrameInfo
* fi
= (FrameInfo
*)key
;
1215 size_t len
= sizeof(FrameInfo
) + fi
->callerHeight
* sizeof(JSTraceType
);
1217 JSDHashNumber h
= 0;
1218 const unsigned char *s
= (const unsigned char*)fi
;
1219 for (size_t i
= 0; i
< len
; i
++, s
++)
1220 h
= JS_ROTATE_LEFT32(h
, 4) ^ *s
;
1224 static const JSDHashTableOps FrameCacheOps
;
1226 JSDHashTable
*table
;
1227 VMAllocator
*allocator
;
1230 FrameInfoCache(VMAllocator
*allocator
) : allocator(allocator
) {
1240 JS_DHashTableDestroy(table
);
1251 table
= JS_NewDHashTable(&FrameCacheOps
, NULL
, sizeof(Entry
),
1252 JS_DHASH_DEFAULT_CAPACITY(32));
1253 return table
!= NULL
;
1256 FrameInfo
*memoize(const FrameInfo
*fi
) {
1257 Entry
*entry
= (Entry
*)JS_DHashTableOperate(table
, fi
, JS_DHASH_ADD
);
1261 FrameInfo
* n
= (FrameInfo
*)
1262 allocator
->alloc(sizeof(FrameInfo
) + fi
->callerHeight
* sizeof(JSTraceType
));
1263 memcpy(n
, fi
, sizeof(FrameInfo
) + fi
->callerHeight
* sizeof(JSTraceType
));
1270 const JSDHashTableOps
FrameInfoCache::FrameCacheOps
=
1274 FrameInfoCache::HashFrameInfo
,
1275 FrameInfoCache::MatchFrameInfo
,
1276 JS_DHashMoveEntryStub
,
1277 JS_DHashClearEntryStub
,
1278 JS_DHashFinalizeStub
,
1283 struct PCHashEntry
: public JSDHashEntryStub
{
1287 #define PC_HASH_COUNT 1024
1290 Blacklist(jsbytecode
* pc
)
1293 JS_ASSERT(*pc
== JSOP_TRACE
|| *pc
== JSOP_NOP
|| *pc
== JSOP_CALL
);
1294 if (*pc
== JSOP_CALL
) {
1295 JS_ASSERT(*(pc
+ JSOP_CALL_LENGTH
) == JSOP_TRACE
||
1296 *(pc
+ JSOP_CALL_LENGTH
) == JSOP_NOP
);
1297 *(pc
+ JSOP_CALL_LENGTH
) = JSOP_NOP
;
1298 } else if (*pc
== JSOP_TRACE
) {
1304 IsBlacklisted(jsbytecode
* pc
)
1306 if (*pc
== JSOP_NOP
)
1308 if (*pc
== JSOP_CALL
)
1309 return *(pc
+ JSOP_CALL_LENGTH
) == JSOP_NOP
;
1314 Backoff(JSContext
*cx
, jsbytecode
* pc
, Fragment
* tree
= NULL
)
1316 /* N.B. This code path cannot assume the recorder is/is not alive. */
1317 JSDHashTable
*table
= &JS_TRACE_MONITOR(cx
).recordAttempts
;
1320 PCHashEntry
*entry
= (PCHashEntry
*)
1321 JS_DHashTableOperate(table
, pc
, JS_DHASH_ADD
);
1326 JS_ASSERT(entry
->count
== 0);
1328 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry
->hdr
)));
1329 if (entry
->count
++ > (BL_ATTEMPTS
* MAXPEERS
)) {
1338 tree
->hits() -= BL_BACKOFF
;
1341 * In case there is no entry or no table (due to OOM) or some
1342 * serious imbalance in the recording-attempt distribution on a
1343 * multitree, give each tree another chance to blacklist here as
1346 if (++tree
->recordAttempts
> BL_ATTEMPTS
)
1352 ResetRecordingAttempts(JSContext
*cx
, jsbytecode
* pc
)
1354 JSDHashTable
*table
= &JS_TRACE_MONITOR(cx
).recordAttempts
;
1356 PCHashEntry
*entry
= (PCHashEntry
*)
1357 JS_DHashTableOperate(table
, pc
, JS_DHASH_LOOKUP
);
1359 if (JS_DHASH_ENTRY_IS_FREE(&(entry
->hdr
)))
1361 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry
->hdr
)));
1366 static inline size_t
1367 FragmentHash(const void *ip
, JSObject
* globalObj
, uint32 globalShape
, uint32 argc
)
1369 uintptr_t h
= HASH_SEED
;
1370 HashAccum(h
, uintptr_t(ip
), FRAGMENT_TABLE_MASK
);
1371 HashAccum(h
, uintptr_t(globalObj
), FRAGMENT_TABLE_MASK
);
1372 HashAccum(h
, uintptr_t(globalShape
), FRAGMENT_TABLE_MASK
);
1373 HashAccum(h
, uintptr_t(argc
), FRAGMENT_TABLE_MASK
);
1378 RawLookupFirstPeer(JSTraceMonitor
* tm
, const void *ip
, JSObject
* globalObj
,
1379 uint32 globalShape
, uint32 argc
,
1380 TreeFragment
*& firstInBucket
, TreeFragment
**& prevTreeNextp
)
1382 size_t h
= FragmentHash(ip
, globalObj
, globalShape
, argc
);
1383 TreeFragment
** ppf
= &tm
->vmfragments
[h
];
1384 firstInBucket
= *ppf
;
1385 for (; TreeFragment
* pf
= *ppf
; ppf
= &pf
->next
) {
1386 if (pf
->globalObj
== globalObj
&&
1387 pf
->globalShape
== globalShape
&&
1390 prevTreeNextp
= ppf
;
1394 prevTreeNextp
= ppf
;
1398 static TreeFragment
*
1399 LookupLoop(JSTraceMonitor
* tm
, const void *ip
, JSObject
* globalObj
,
1400 uint32 globalShape
, uint32 argc
)
1402 TreeFragment
*_
, **prevTreeNextp
;
1403 RawLookupFirstPeer(tm
, ip
, globalObj
, globalShape
, argc
, _
, prevTreeNextp
);
1404 return *prevTreeNextp
;
1407 static TreeFragment
*
1408 LookupOrAddLoop(JSTraceMonitor
* tm
, const void *ip
, JSObject
* globalObj
,
1409 uint32 globalShape
, uint32 argc
)
1411 TreeFragment
*firstInBucket
, **prevTreeNextp
;
1412 RawLookupFirstPeer(tm
, ip
, globalObj
, globalShape
, argc
, firstInBucket
, prevTreeNextp
);
1413 if (TreeFragment
*f
= *prevTreeNextp
)
1417 uint32_t profFragID
= (js_LogController
.lcbits
& LC_FragProfile
)
1418 ? (++(tm
->lastFragID
)) : 0;
1420 TreeFragment
* f
= new (*tm
->dataAlloc
) TreeFragment(ip
, globalObj
, globalShape
, argc
1421 verbose_only(, profFragID
));
1422 f
->root
= f
; /* f is the root of a new tree */
1423 *prevTreeNextp
= f
; /* insert f at the end of the vmfragments bucket-list */
1425 f
->first
= f
; /* initialize peer-list at f */
1430 static TreeFragment
*
1431 AddNewPeerToPeerList(JSTraceMonitor
* tm
, TreeFragment
* peer
)
1435 uint32_t profFragID
= (js_LogController
.lcbits
& LC_FragProfile
)
1436 ? (++(tm
->lastFragID
)) : 0;
1438 TreeFragment
* f
= new (*tm
->dataAlloc
) TreeFragment(peer
->ip
, peer
->globalObj
,
1439 peer
->globalShape
, peer
->argc
1440 verbose_only(, profFragID
));
1441 f
->root
= f
; /* f is the root of a new tree */
1442 f
->first
= peer
->first
; /* add f to peer list */
1443 f
->peer
= peer
->peer
;
1445 /* only the |first| Fragment of a peer list needs a valid |next| field */
1446 debug_only(f
->next
= (TreeFragment
*)0xcdcdcdcd);
1452 AssertTreeIsUnique(JSTraceMonitor
* tm
, TreeFragment
* f
, TreeInfo
* ti
)
1454 JS_ASSERT(f
->root
== f
);
1457 * Check for duplicate entry type maps. This is always wrong and hints at
1458 * trace explosion since we are trying to stabilize something without
1459 * properly connecting peer edges.
1462 for (TreeFragment
* peer
= LookupLoop(tm
, f
->ip
, f
->globalObj
, f
->globalShape
, f
->argc
);
1464 peer
= peer
->peer
) {
1465 if (!peer
->code() || peer
== f
)
1467 ti_other
= peer
->treeInfo
;
1468 JS_ASSERT(ti_other
);
1469 JS_ASSERT(!ti
->typeMap
.matches(ti_other
->typeMap
));
1475 AttemptCompilation(JSContext
*cx
, JSTraceMonitor
* tm
, JSObject
* globalObj
, jsbytecode
* pc
,
1478 /* If we already permanently blacklisted the location, undo that. */
1479 JS_ASSERT(*pc
== JSOP_NOP
|| *pc
== JSOP_TRACE
|| *pc
== JSOP_CALL
);
1480 if (*pc
== JSOP_NOP
)
1482 ResetRecordingAttempts(cx
, pc
);
1484 /* Breathe new life into all peer fragments at the designated loop header. */
1485 TreeFragment
* f
= LookupLoop(tm
, pc
, globalObj
, OBJ_SHAPE(globalObj
), argc
);
1488 * If the global object's shape changed, we can't easily find the
1489 * corresponding loop header via a hash table lookup. In this
1490 * we simply bail here and hope that the fragment has another
1491 * outstanding compilation attempt. This case is extremely rare.
1495 JS_ASSERT(f
->root
== f
);
1498 JS_ASSERT(f
->root
== f
);
1499 --f
->recordAttempts
;
1500 f
->hits() = HOTLOOP
;
1505 // Forward declarations.
1506 JS_DEFINE_CALLINFO_1(static, DOUBLE
, i2f
, INT32
, 1, 1)
1507 JS_DEFINE_CALLINFO_1(static, DOUBLE
, u2f
, UINT32
, 1, 1)
1512 if (i
->isop(LIR_i2f
))
1515 if (nanojit::AvmCore::config
.soft_float
&&
1516 i
->isop(LIR_qjoin
) &&
1517 i
->oprnd1()->isop(LIR_pcall
) &&
1518 i
->oprnd2()->isop(LIR_callh
)) {
1519 if (i
->oprnd1()->callInfo() == &i2f_ci
)
1529 if (i
->isop(LIR_u2f
))
1532 if (nanojit::AvmCore::config
.soft_float
&&
1533 i
->isop(LIR_qjoin
) &&
1534 i
->oprnd1()->isop(LIR_pcall
) &&
1535 i
->oprnd2()->isop(LIR_callh
)) {
1536 if (i
->oprnd1()->callInfo() == &u2f_ci
)
1546 if (nanojit::AvmCore::config
.soft_float
&&
1547 i
->isop(LIR_qjoin
)) {
1548 return i
->oprnd1()->arg(0);
1555 demote(LirWriter
*out
, LIns
* i
)
1558 return i
->callArgN(0);
1559 if (isi2f(i
) || isu2f(i
))
1563 JS_ASSERT(i
->isconstf());
1564 double cf
= i
->imm64f();
1565 int32_t ci
= cf
> 0x7fffffff ? uint32_t(cf
) : int32_t(cf
);
1566 return out
->insImm(ci
);
1570 isPromoteInt(LIns
* i
)
1572 if (isi2f(i
) || i
->isconst())
1576 jsdouble d
= i
->imm64f();
1577 return d
== jsdouble(jsint(d
)) && !JSDOUBLE_IS_NEGZERO(d
);
1581 isPromoteUint(LIns
* i
)
1583 if (isu2f(i
) || i
->isconst())
1587 jsdouble d
= i
->imm64f();
1588 return d
== jsdouble(jsuint(d
)) && !JSDOUBLE_IS_NEGZERO(d
);
1594 return isPromoteInt(i
) || isPromoteUint(i
);
1598 IsConst(LIns
* i
, int32_t c
)
1600 return i
->isconst() && i
->imm32() == c
;
1604 * Determine whether this operand is guaranteed to not overflow the specified
1605 * integer operation.
1608 IsOverflowSafe(LOpcode op
, LIns
* i
)
1614 return (i
->isop(LIR_and
) && ((c
= i
->oprnd2())->isconst()) &&
1615 ((c
->imm32() & 0xc0000000) == 0)) ||
1616 (i
->isop(LIR_rsh
) && ((c
= i
->oprnd2())->isconst()) &&
1617 ((c
->imm32() > 0)));
1619 JS_ASSERT(op
== LIR_mul
);
1621 return (i
->isop(LIR_and
) && ((c
= i
->oprnd2())->isconst()) &&
1622 ((c
->imm32() & 0xffff0000) == 0)) ||
1623 (i
->isop(LIR_ush
) && ((c
= i
->oprnd2())->isconst()) &&
1624 ((c
->imm32() >= 16)));
1627 /* soft float support */
1629 static jsdouble FASTCALL
1634 JS_DEFINE_CALLINFO_1(static, DOUBLE
, fneg
, DOUBLE
, 1, 1)
1636 static jsdouble FASTCALL
1642 static jsdouble FASTCALL
1648 static int32 FASTCALL
1649 fcmpeq(jsdouble x
, jsdouble y
)
1653 JS_DEFINE_CALLINFO_2(static, INT32
, fcmpeq
, DOUBLE
, DOUBLE
, 1, 1)
1655 static int32 FASTCALL
1656 fcmplt(jsdouble x
, jsdouble y
)
1660 JS_DEFINE_CALLINFO_2(static, INT32
, fcmplt
, DOUBLE
, DOUBLE
, 1, 1)
1662 static int32 FASTCALL
1663 fcmple(jsdouble x
, jsdouble y
)
1667 JS_DEFINE_CALLINFO_2(static, INT32
, fcmple
, DOUBLE
, DOUBLE
, 1, 1)
1669 static int32 FASTCALL
1670 fcmpgt(jsdouble x
, jsdouble y
)
1674 JS_DEFINE_CALLINFO_2(static, INT32
, fcmpgt
, DOUBLE
, DOUBLE
, 1, 1)
1676 static int32 FASTCALL
1677 fcmpge(jsdouble x
, jsdouble y
)
1681 JS_DEFINE_CALLINFO_2(static, INT32
, fcmpge
, DOUBLE
, DOUBLE
, 1, 1)
1683 static jsdouble FASTCALL
1684 fmul(jsdouble x
, jsdouble y
)
1688 JS_DEFINE_CALLINFO_2(static, DOUBLE
, fmul
, DOUBLE
, DOUBLE
, 1, 1)
1690 static jsdouble FASTCALL
1691 fadd(jsdouble x
, jsdouble y
)
1695 JS_DEFINE_CALLINFO_2(static, DOUBLE
, fadd
, DOUBLE
, DOUBLE
, 1, 1)
1697 static jsdouble FASTCALL
1698 fdiv(jsdouble x
, jsdouble y
)
1702 JS_DEFINE_CALLINFO_2(static, DOUBLE
, fdiv
, DOUBLE
, DOUBLE
, 1, 1)
1704 static jsdouble FASTCALL
1705 fsub(jsdouble x
, jsdouble y
)
1709 JS_DEFINE_CALLINFO_2(static, DOUBLE
, fsub
, DOUBLE
, DOUBLE
, 1, 1)
1711 // replace fpu ops with function calls
1712 class SoftFloatFilter
: public LirWriter
1715 SoftFloatFilter(LirWriter
*out
) : LirWriter(out
)
1719 return ins1(LIR_qhi
, q
);
1722 return ins1(LIR_qlo
, q
);
1725 LIns
*split(LIns
*a
) {
1726 if (a
->isQuad() && !a
->isop(LIR_qjoin
)) {
1727 // all quad-sized args must be qjoin's for soft-float
1728 a
= ins2(LIR_qjoin
, lo(a
), hi(a
));
1733 LIns
*split(const CallInfo
*call
, LInsp args
[]) {
1734 LIns
*lo
= out
->insCall(call
, args
);
1735 LIns
*hi
= out
->ins1(LIR_callh
, lo
);
1736 return out
->ins2(LIR_qjoin
, lo
, hi
);
1739 LIns
*fcall1(const CallInfo
*call
, LIns
*a
) {
1740 LIns
*args
[] = { split(a
) };
1741 return split(call
, args
);
1744 LIns
*fcall2(const CallInfo
*call
, LIns
*a
, LIns
*b
) {
1745 LIns
*args
[] = { split(b
), split(a
) };
1746 return split(call
, args
);
1749 LIns
*fcmp(const CallInfo
*call
, LIns
*a
, LIns
*b
) {
1750 LIns
*args
[] = { split(b
), split(a
) };
1751 return out
->ins2(LIR_eq
, out
->insCall(call
, args
), out
->insImm(1));
1754 LIns
*ins1(LOpcode op
, LIns
*a
) {
1757 return fcall1(&i2f_ci
, a
);
1759 return fcall1(&u2f_ci
, a
);
1761 return fcall1(&fneg_ci
, a
);
1763 return out
->ins1(op
, split(a
));
1765 return out
->ins1(op
, a
);
1769 LIns
*ins2(LOpcode op
, LIns
*a
, LIns
*b
) {
1772 return fcall2(&fadd_ci
, a
, b
);
1774 return fcall2(&fsub_ci
, a
, b
);
1776 return fcall2(&fmul_ci
, a
, b
);
1778 return fcall2(&fdiv_ci
, a
, b
);
1780 return fcmp(&fcmpeq_ci
, a
, b
);
1782 return fcmp(&fcmplt_ci
, a
, b
);
1784 return fcmp(&fcmpgt_ci
, a
, b
);
1786 return fcmp(&fcmple_ci
, a
, b
);
1788 return fcmp(&fcmpge_ci
, a
, b
);
1792 return out
->ins2(op
, a
, b
);
1795 LIns
*insCall(const CallInfo
*ci
, LInsp args
[]) {
1796 uint32_t argt
= ci
->_argtypes
;
1798 for (uint32_t i
= 0, argsizes
= argt
>> ARGSIZE_SHIFT
; argsizes
!= 0; i
++, argsizes
>>= ARGSIZE_SHIFT
)
1799 args
[i
] = split(args
[i
]);
1801 if ((argt
& ARGSIZE_MASK_ANY
) == ARGSIZE_F
) {
1802 // this function returns a double as two 32bit values, so replace
1803 // call with qjoin(qhi(call), call)
1804 return split(ci
, args
);
1806 return out
->insCall(ci
, args
);
1811 class FuncFilter
: public LirWriter
1814 FuncFilter(LirWriter
* out
):
1819 LIns
* ins2(LOpcode v
, LIns
* s0
, LIns
* s1
)
1821 if (s0
== s1
&& v
== LIR_feq
) {
1822 if (isPromote(s0
)) {
1823 // double(int) and double(uint) cannot be nan
1826 if (s0
->isop(LIR_fmul
) || s0
->isop(LIR_fsub
) || s0
->isop(LIR_fadd
)) {
1827 LIns
* lhs
= s0
->oprnd1();
1828 LIns
* rhs
= s0
->oprnd2();
1829 if (isPromote(lhs
) && isPromote(rhs
)) {
1830 // add/sub/mul promoted ints can't be nan
1834 } else if (LIR_feq
<= v
&& v
<= LIR_fge
) {
1835 if (isPromoteInt(s0
) && isPromoteInt(s1
)) {
1836 // demote fcmp to cmp
1837 v
= LOpcode(v
+ (LIR_eq
- LIR_feq
));
1838 return out
->ins2(v
, demote(out
, s0
), demote(out
, s1
));
1839 } else if (isPromoteUint(s0
) && isPromoteUint(s1
)) {
1841 v
= LOpcode(v
+ (LIR_eq
- LIR_feq
));
1843 v
= LOpcode(v
+ (LIR_ult
- LIR_lt
)); // cmp -> ucmp
1844 return out
->ins2(v
, demote(out
, s0
), demote(out
, s1
));
1846 } else if (v
== LIR_or
&&
1847 s0
->isop(LIR_lsh
) && IsConst(s0
->oprnd2(), 16) &&
1848 s1
->isop(LIR_and
) && IsConst(s1
->oprnd2(), 0xffff)) {
1849 LIns
* msw
= s0
->oprnd1();
1850 LIns
* lsw
= s1
->oprnd1();
1853 if (lsw
->isop(LIR_add
) &&
1854 lsw
->oprnd1()->isop(LIR_and
) &&
1855 lsw
->oprnd2()->isop(LIR_and
) &&
1856 IsConst(lsw
->oprnd1()->oprnd2(), 0xffff) &&
1857 IsConst(lsw
->oprnd2()->oprnd2(), 0xffff) &&
1858 msw
->isop(LIR_add
) &&
1859 msw
->oprnd1()->isop(LIR_add
) &&
1860 msw
->oprnd2()->isop(LIR_rsh
) &&
1861 msw
->oprnd1()->oprnd1()->isop(LIR_rsh
) &&
1862 msw
->oprnd1()->oprnd2()->isop(LIR_rsh
) &&
1863 IsConst(msw
->oprnd2()->oprnd2(), 16) &&
1864 IsConst(msw
->oprnd1()->oprnd1()->oprnd2(), 16) &&
1865 IsConst(msw
->oprnd1()->oprnd2()->oprnd2(), 16) &&
1866 (x
= lsw
->oprnd1()->oprnd1()) == msw
->oprnd1()->oprnd1()->oprnd1() &&
1867 (y
= lsw
->oprnd2()->oprnd1()) == msw
->oprnd1()->oprnd2()->oprnd1() &&
1868 lsw
== msw
->oprnd2()->oprnd1()) {
1869 return out
->ins2(LIR_add
, x
, y
);
1873 return out
->ins2(v
, s0
, s1
);
1876 LIns
* insCall(const CallInfo
*ci
, LIns
* args
[])
1878 if (ci
== &js_DoubleToUint32_ci
) {
1881 return out
->insImm(js_DoubleToECMAUint32(s0
->imm64f()));
1882 if (isi2f(s0
) || isu2f(s0
))
1884 } else if (ci
== &js_DoubleToInt32_ci
) {
1887 return out
->insImm(js_DoubleToECMAInt32(s0
->imm64f()));
1888 if (s0
->isop(LIR_fadd
) || s0
->isop(LIR_fsub
)) {
1889 LIns
* lhs
= s0
->oprnd1();
1890 LIns
* rhs
= s0
->oprnd2();
1891 if (isPromote(lhs
) && isPromote(rhs
)) {
1892 LOpcode op
= LOpcode(s0
->opcode() & ~LIR64
);
1893 return out
->ins2(op
, demote(out
, lhs
), demote(out
, rhs
));
1896 if (isi2f(s0
) || isu2f(s0
))
1899 // XXX ARM -- check for qjoin(call(UnboxDouble),call(UnboxDouble))
1901 const CallInfo
* ci2
= s0
->callInfo();
1902 if (ci2
== &js_UnboxDouble_ci
) {
1903 LIns
* args2
[] = { s0
->callArgN(0) };
1904 return out
->insCall(&js_UnboxInt32_ci
, args2
);
1905 } else if (ci2
== &js_StringToNumber_ci
) {
1906 // callArgN's ordering is that as seen by the builtin, not as stored in
1907 // args here. True story!
1908 LIns
* args2
[] = { s0
->callArgN(1), s0
->callArgN(0) };
1909 return out
->insCall(&js_StringToInt32_ci
, args2
);
1910 } else if (ci2
== &js_String_p_charCodeAt0_ci
) {
1911 // Use a fast path builtin for a charCodeAt that converts to an int right away.
1912 LIns
* args2
[] = { s0
->callArgN(0) };
1913 return out
->insCall(&js_String_p_charCodeAt0_int_ci
, args2
);
1914 } else if (ci2
== &js_String_p_charCodeAt_ci
) {
1915 LIns
* idx
= s0
->callArgN(1);
1916 // If the index is not already an integer, force it to be an integer.
1917 idx
= isPromote(idx
)
1919 : out
->insCall(&js_DoubleToInt32_ci
, &idx
);
1920 LIns
* args2
[] = { idx
, s0
->callArgN(0) };
1921 return out
->insCall(&js_String_p_charCodeAt_int_ci
, args2
);
1924 } else if (ci
== &js_BoxDouble_ci
) {
1926 JS_ASSERT(s0
->isQuad());
1927 if (isPromoteInt(s0
)) {
1928 LIns
* args2
[] = { demote(out
, s0
), args
[1] };
1929 return out
->insCall(&js_BoxInt32_ci
, args2
);
1931 if (s0
->isCall() && s0
->callInfo() == &js_UnboxDouble_ci
)
1932 return s0
->callArgN(0);
1934 return out
->insCall(ci
, args
);
1939 * Visit the values in the given JSStackFrame that the tracer cares about. This
1940 * visitor function is (implicitly) the primary definition of the native stack
1941 * area layout. There are a few other independent pieces of code that must be
1942 * maintained to assume the same layout. They are marked like this:
1944 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
1946 template <typename Visitor
>
1947 static JS_REQUIRES_STACK
bool
1948 VisitFrameSlots(Visitor
&visitor
, unsigned depth
, JSStackFrame
*fp
,
1951 if (depth
> 0 && !VisitFrameSlots(visitor
, depth
-1, fp
->down
, fp
))
1956 visitor
.setStackSlotKind("args");
1957 if (!visitor
.visitStackSlots(&fp
->argv
[-2], argSlots(fp
) + 2, fp
))
1960 visitor
.setStackSlotKind("arguments");
1961 if (!visitor
.visitStackSlots(&fp
->argsobj
, 1, fp
))
1963 visitor
.setStackSlotKind("var");
1964 if (!visitor
.visitStackSlots(fp
->slots
, fp
->script
->nfixed
, fp
))
1967 visitor
.setStackSlotKind("stack");
1968 JS_ASSERT(fp
->regs
->sp
>= StackBase(fp
));
1969 if (!visitor
.visitStackSlots(StackBase(fp
),
1970 size_t(fp
->regs
->sp
- StackBase(fp
)),
1975 int missing
= up
->fun
->nargs
- up
->argc
;
1977 visitor
.setStackSlotKind("missing");
1978 if (!visitor
.visitStackSlots(fp
->regs
->sp
, size_t(missing
), fp
))
1985 template <typename Visitor
>
1986 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
1987 VisitStackSlots(Visitor
&visitor
, JSContext
*cx
, unsigned callDepth
)
1989 return VisitFrameSlots(visitor
, callDepth
, cx
->fp
, NULL
);
1992 template <typename Visitor
>
1993 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
1994 VisitGlobalSlots(Visitor
&visitor
, JSContext
*cx
, JSObject
*globalObj
,
1995 unsigned ngslots
, uint16
*gslots
)
1997 for (unsigned n
= 0; n
< ngslots
; ++n
) {
1998 unsigned slot
= gslots
[n
];
1999 visitor
.visitGlobalSlot(&STOBJ_GET_SLOT(globalObj
, slot
), n
, slot
);
2003 template <typename Visitor
>
2004 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2005 VisitGlobalSlots(Visitor
&visitor
, JSContext
*cx
, TreeInfo
*ti
)
2007 JSObject
* globalObj
= ti
->globalObj();
2008 SlotList
& gslots
= *ti
->globalSlots
;
2009 VisitGlobalSlots(visitor
, cx
, globalObj
, gslots
.length(), gslots
.data());
2012 class AdjustCallerTypeVisitor
;
2014 template <typename Visitor
>
2015 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2016 VisitGlobalSlots(Visitor
&visitor
, JSContext
*cx
, SlotList
&gslots
)
2018 VisitGlobalSlots(visitor
, cx
, JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
),
2019 gslots
.length(), gslots
.data());
2023 template <typename Visitor
>
2024 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2025 VisitSlots(Visitor
& visitor
, JSContext
* cx
, JSObject
* globalObj
,
2026 unsigned callDepth
, unsigned ngslots
, uint16
* gslots
)
2028 if (VisitStackSlots(visitor
, cx
, callDepth
))
2029 VisitGlobalSlots(visitor
, cx
, globalObj
, ngslots
, gslots
);
2032 template <typename Visitor
>
2033 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2034 VisitSlots(Visitor
& visitor
, JSContext
* cx
, unsigned callDepth
,
2035 unsigned ngslots
, uint16
* gslots
)
2037 VisitSlots(visitor
, cx
, JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
),
2038 callDepth
, ngslots
, gslots
);
2041 template <typename Visitor
>
2042 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2043 VisitSlots(Visitor
&visitor
, JSContext
*cx
, JSObject
*globalObj
,
2044 unsigned callDepth
, const SlotList
& slots
)
2046 VisitSlots(visitor
, cx
, globalObj
, callDepth
, slots
.length(),
2050 template <typename Visitor
>
2051 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2052 VisitSlots(Visitor
&visitor
, JSContext
*cx
, unsigned callDepth
,
2053 const SlotList
& slots
)
2055 VisitSlots(visitor
, cx
, JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
),
2056 callDepth
, slots
.length(), slots
.data());
2060 class SlotVisitorBase
{
2061 #if defined JS_JIT_SPEW
2063 char const *mStackSlotKind
;
2065 SlotVisitorBase() : mStackSlotKind(NULL
) {}
2066 JS_ALWAYS_INLINE
const char *stackSlotKind() { return mStackSlotKind
; }
2067 JS_ALWAYS_INLINE
void setStackSlotKind(char const *k
) {
2072 JS_ALWAYS_INLINE
const char *stackSlotKind() { return NULL
; }
2073 JS_ALWAYS_INLINE
void setStackSlotKind(char const *k
) {}
2077 struct CountSlotsVisitor
: public SlotVisitorBase
2083 JS_ALWAYS_INLINE
CountSlotsVisitor(jsval
* stop
= NULL
) :
2089 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
2090 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
2093 if (mStop
&& size_t(mStop
- vp
) < count
) {
2094 mCount
+= size_t(mStop
- vp
);
2102 JS_ALWAYS_INLINE
unsigned count() {
2106 JS_ALWAYS_INLINE
bool stopped() {
2112 * Calculate the total number of native frame slots we need from this frame all
2113 * the way back to the entry frame, including the current stack usage.
2115 JS_REQUIRES_STACK
unsigned
2116 NativeStackSlots(JSContext
*cx
, unsigned callDepth
)
2118 JSStackFrame
* fp
= cx
->fp
;
2120 unsigned depth
= callDepth
;
2123 * Duplicate native stack layout computation: see VisitFrameSlots
2126 unsigned operands
= fp
->regs
->sp
- StackBase(fp
);
2129 slots
+= fp
->script
->nfixed
+ 1 /*argsobj*/;
2132 slots
+= 2/*callee,this*/ + argSlots(fp
);
2134 CountSlotsVisitor visitor
;
2135 VisitStackSlots(visitor
, cx
, callDepth
);
2136 JS_ASSERT(visitor
.count() == slots
&& !visitor
.stopped());
2140 JSStackFrame
* fp2
= fp
;
2142 int missing
= fp2
->fun
->nargs
- fp2
->argc
;
2146 JS_NOT_REACHED("NativeStackSlots");
2149 class CaptureTypesVisitor
: public SlotVisitorBase
2152 JSTraceType
* mTypeMap
;
2156 JS_ALWAYS_INLINE
CaptureTypesVisitor(JSContext
* cx
, JSTraceType
* typeMap
) :
2162 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2163 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
2164 JSTraceType type
= getCoercedType(*vp
);
2165 if (type
== TT_INT32
&&
2166 oracle
.isGlobalSlotUndemotable(mCx
, slot
))
2168 JS_ASSERT(type
!= TT_JSVAL
);
2169 debug_only_printf(LC_TMTracer
,
2170 "capture type global%d: %d=%c\n",
2171 n
, type
, typeChar
[type
]);
2175 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
2176 visitStackSlots(jsval
*vp
, int count
, JSStackFrame
* fp
) {
2177 for (int i
= 0; i
< count
; ++i
) {
2178 JSTraceType type
= getCoercedType(vp
[i
]);
2179 if (type
== TT_INT32
&&
2180 oracle
.isStackSlotUndemotable(mCx
, length()))
2182 JS_ASSERT(type
!= TT_JSVAL
);
2183 debug_only_printf(LC_TMTracer
,
2184 "capture type %s%d: %d=%c\n",
2185 stackSlotKind(), i
, type
, typeChar
[type
]);
2191 JS_ALWAYS_INLINE
uintptr_t length() {
2192 return mPtr
- mTypeMap
;
2197 * Capture the type map for the selected slots of the global object and currently pending
2200 JS_REQUIRES_STACK
void
2201 TypeMap::captureTypes(JSContext
* cx
, JSObject
* globalObj
, SlotList
& slots
, unsigned callDepth
)
2203 setLength(NativeStackSlots(cx
, callDepth
) + slots
.length());
2204 CaptureTypesVisitor
visitor(cx
, data());
2205 VisitSlots(visitor
, cx
, globalObj
, callDepth
, slots
);
2206 JS_ASSERT(visitor
.length() == length());
2209 JS_REQUIRES_STACK
void
2210 TypeMap::captureMissingGlobalTypes(JSContext
* cx
, JSObject
* globalObj
, SlotList
& slots
, unsigned stackSlots
)
2212 unsigned oldSlots
= length() - stackSlots
;
2213 int diff
= slots
.length() - oldSlots
;
2214 JS_ASSERT(diff
>= 0);
2215 setLength(length() + diff
);
2216 CaptureTypesVisitor
visitor(cx
, data() + stackSlots
+ oldSlots
);
2217 VisitGlobalSlots(visitor
, cx
, globalObj
, diff
, slots
.data() + oldSlots
);
2220 /* Compare this type map to another one and see whether they match. */
2222 TypeMap::matches(TypeMap
& other
) const
2224 if (length() != other
.length())
2226 return !memcmp(data(), other
.data(), length());
2230 TypeMap::fromRaw(JSTraceType
* other
, unsigned numSlots
)
2232 unsigned oldLength
= length();
2233 setLength(length() + numSlots
);
2234 for (unsigned i
= 0; i
< numSlots
; i
++)
2235 get(oldLength
+ i
) = other
[i
];
2239 * Use the provided storage area to create a new type map that contains the
2240 * partial type map with the rest of it filled up from the complete type
2244 MergeTypeMaps(JSTraceType
** partial
, unsigned* plength
, JSTraceType
* complete
, unsigned clength
, JSTraceType
* mem
)
2246 unsigned l
= *plength
;
2247 JS_ASSERT(l
< clength
);
2248 memcpy(mem
, *partial
, l
* sizeof(JSTraceType
));
2249 memcpy(mem
+ l
, complete
+ l
, (clength
- l
) * sizeof(JSTraceType
));
2254 /* Specializes a tree to any missing globals, including any dependent trees. */
2255 static JS_REQUIRES_STACK
void
2256 SpecializeTreesToMissingGlobals(JSContext
* cx
, JSObject
* globalObj
, TreeInfo
* root
)
2258 TreeInfo
* ti
= root
;
2260 ti
->typeMap
.captureMissingGlobalTypes(cx
, globalObj
, *ti
->globalSlots
, ti
->nStackTypes
);
2261 JS_ASSERT(ti
->globalSlots
->length() == ti
->typeMap
.length() - ti
->nStackTypes
);
2263 for (unsigned i
= 0; i
< root
->dependentTrees
.length(); i
++) {
2264 ti
= root
->dependentTrees
[i
]->treeInfo
;
2266 /* ti can be NULL if we hit the recording tree in emitTreeCall; this is harmless. */
2267 if (ti
&& ti
->nGlobalTypes() < ti
->globalSlots
->length())
2268 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
2270 for (unsigned i
= 0; i
< root
->linkedTrees
.length(); i
++) {
2271 ti
= root
->linkedTrees
[i
]->treeInfo
;
2272 if (ti
&& ti
->nGlobalTypes() < ti
->globalSlots
->length())
2273 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
2277 static JS_REQUIRES_STACK
void
2278 ResetJITImpl(JSContext
* cx
);
2281 static JS_INLINE JS_REQUIRES_STACK
void
2282 ResetJIT(JSContext
* cx
, TraceVisFlushReason r
)
2284 js_LogTraceVisEvent(cx
, S_RESET
, r
);
2288 #define ResetJIT(cx, r) ResetJITImpl(cx)
2292 TrashTree(JSContext
* cx
, TreeFragment
* f
);
2296 InitConst(const T
&t
)
2298 return const_cast<T
&>(t
);
2302 TraceRecorder::TraceRecorder(JSContext
* cx
, VMSideExit
* anchor
, VMFragment
* fragment
,
2303 TreeInfo
* ti
, unsigned stackSlots
, unsigned ngslots
, JSTraceType
* typeMap
,
2304 VMSideExit
* innermost
, jsbytecode
* outer
, uint32 outerArgc
,
2305 RecordReason recordReason
)
2307 traceMonitor(&JS_TRACE_MONITOR(cx
)),
2310 recordReason(recordReason
),
2311 globalObj(ti
->globalObj()),
2313 outerArgc(outerArgc
),
2314 lexicalBlock(cx
->fp
->blockChain
),
2321 lirbuf(traceMonitor
->lirbuf
),
2322 mark(*traceMonitor
->traceAlloc
),
2323 numSideExitsBefore(treeInfo
->sideExits
.length()),
2325 nativeFrameTracker(),
2326 global_dslots(NULL
),
2327 callDepth(anchor
? anchor
->calldepth
: 0),
2328 atoms(FrameAtomBase(cx
, cx
->fp
)),
2329 cfgMerges(&tempAlloc()),
2331 whichTreesToTrash(&tempAlloc()),
2333 native_rval_ins(NULL
),
2335 pendingSpecializedNative(NULL
),
2336 pendingUnboxSlot(NULL
),
2337 pendingGuardCondition(NULL
),
2339 generatedSpecializedNative(),
2342 JS_ASSERT(globalObj
== JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
));
2343 JS_ASSERT(cx
->fp
->regs
->pc
== (jsbytecode
*)fragment
->ip
);
2344 JS_ASSERT(fragment
->root
== treeInfo
->rootFragment
);
2345 JS_ASSERT_IF(fragment
->root
== fragment
, !fragment
->root
->treeInfo
);
2349 * Reset the fragment state we care about in case we got a recycled
2350 * fragment. This includes resetting any profiling data we might have
2353 fragment
->lastIns
= NULL
;
2354 fragment
->setCode(NULL
);
2355 fragment
->lirbuf
= lirbuf
;
2356 verbose_only( fragment
->profCount
= 0; )
2357 verbose_only( fragment
->nStaticExits
= 0; )
2358 verbose_only( fragment
->nCodeBytes
= 0; )
2359 verbose_only( fragment
->nExitBytes
= 0; )
2360 verbose_only( fragment
->guardNumberer
= 1; )
2361 verbose_only( fragment
->guardsForFrag
= NULL
; )
2362 verbose_only( fragment
->loopLabel
= NULL
; )
2365 * Don't change fragment->profFragID, though. Once the identity of the
2366 * Fragment is set up (for profiling purposes), we can't change it.
2369 guardedShapeTable
.ops
= NULL
;
2372 debug_only_print0(LC_TMMinimal
, "\n");
2373 debug_only_printf(LC_TMMinimal
, "Recording starting from %s:%u@%u (FragID=%06u)\n",
2374 ti
->treeFileName
, ti
->treeLineNumber
, ti
->treePCOffset
,
2375 fragment
->profFragID
);
2377 debug_only_printf(LC_TMTracer
, "globalObj=%p, shape=%d\n",
2378 (void*)this->globalObj
, OBJ_SHAPE(this->globalObj
));
2379 debug_only_printf(LC_TMTreeVis
, "TREEVIS RECORD FRAG=%p ANCHOR=%p\n", (void*)fragment
,
2383 nanojit::LirWriter
*& lir
= InitConst(this->lir
);
2384 lir
= new (tempAlloc()) LirBufWriter(lirbuf
);
2386 lir
= new (tempAlloc()) SanityFilter(lir
);
2389 if (js_LogController
.lcbits
& LC_TMRecorder
) {
2390 lir
= new (tempAlloc()) VerboseWriter(tempAlloc(), lir
, lirbuf
->names
,
2394 // CseFilter must be downstream of SoftFloatFilter (see bug 527754 for why).
2395 lir
= new (tempAlloc()) CseFilter(lir
, tempAlloc());
2396 if (nanojit::AvmCore::config
.soft_float
)
2397 lir
= new (tempAlloc()) SoftFloatFilter(lir
);
2398 lir
= new (tempAlloc()) ExprFilter(lir
);
2399 lir
= new (tempAlloc()) FuncFilter(lir
);
2401 lir
= new (tempAlloc()) SanityFilter(lir
);
2403 lir
->ins0(LIR_start
);
2405 for (int i
= 0; i
< NumSavedRegs
; ++i
)
2406 lir
->insParam(i
, 1);
2408 for (int i
= 0; i
< NumSavedRegs
; ++i
)
2409 addName(lirbuf
->savedRegs
[i
], regNames
[Assembler::savedRegs
[i
]]);
2412 lirbuf
->state
= addName(lir
->insParam(0, 0), "state");
2414 if (fragment
== fragment
->root
)
2415 InitConst(loopLabel
) = lir
->ins0(LIR_label
);
2417 // if profiling, drop a label, so the assembler knows to put a
2418 // frag-entry-counter increment at this point. If there's a
2419 // loopLabel, use that; else we'll have to make a dummy label
2420 // especially for this purpose.
2421 verbose_only( if (js_LogController
.lcbits
& LC_FragProfile
) {
2422 LIns
* entryLabel
= NULL
;
2423 if (fragment
== fragment
->root
) {
2424 entryLabel
= loopLabel
;
2426 entryLabel
= lir
->ins0(LIR_label
);
2428 NanoAssert(entryLabel
);
2429 NanoAssert(!fragment
->loopLabel
);
2430 fragment
->loopLabel
= entryLabel
;
2433 lirbuf
->sp
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, sp
)), "sp");
2434 lirbuf
->rp
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, rp
)), "rp");
2435 InitConst(cx_ins
) = addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, cx
)), "cx");
2436 InitConst(eos_ins
) = addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, eos
)), "eos");
2437 InitConst(eor_ins
) = addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, eor
)), "eor");
2439 /* If we came from exit, we might not have enough global types. */
2440 if (ti
->globalSlots
->length() > ti
->nGlobalTypes())
2441 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
2443 /* read into registers all values on the stack and all globals we know so far */
2444 import(treeInfo
, lirbuf
->sp
, stackSlots
, ngslots
, callDepth
, typeMap
);
2446 /* Finish handling RECURSIVE_SLURP_FAIL_EXIT in startRecorder. */
2447 if (anchor
&& anchor
->exitType
== RECURSIVE_SLURP_FAIL_EXIT
)
2450 if (fragment
== fragment
->root
) {
2452 * We poll the operation callback request flag. It is updated asynchronously whenever
2453 * the callback is to be invoked.
2455 LIns
* x
= lir
->insLoad(LIR_ld
, cx_ins
, offsetof(JSContext
, operationCallbackFlag
));
2456 guard(true, lir
->ins_eq0(x
), snapshot(TIMEOUT_EXIT
));
2460 * If we are attached to a tree call guard, make sure the guard the inner
2461 * tree exited from is what we expect it to be.
2463 if (anchor
&& anchor
->exitType
== NESTED_EXIT
) {
2464 LIns
* nested_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
,
2465 offsetof(InterpState
, outermostTreeExitGuard
)),
2466 "outermostTreeExitGuard");
2467 guard(true, lir
->ins2(LIR_peq
, nested_ins
, INS_CONSTPTR(innermost
)), NESTED_EXIT
);
2471 TraceRecorder::~TraceRecorder()
2473 /* Should already have been adjusted by callers before calling delete. */
2474 JS_ASSERT(traceMonitor
->recorder
!= this);
2475 JS_ASSERT(fragment
->root
== treeInfo
->rootFragment
);
2478 TrashTree(cx
, fragment
->root
);
2480 for (unsigned int i
= 0; i
< whichTreesToTrash
.length(); i
++)
2481 TrashTree(cx
, whichTreesToTrash
[i
]);
2483 /* Purge the tempAlloc used during recording. */
2484 tempAlloc().reset();
2485 traceMonitor
->lirbuf
->clear();
2487 forgetGuardedShapes();
2491 JSTraceMonitor::outOfMemory() const
2493 return dataAlloc
->outOfMemory() ||
2494 tempAlloc
->outOfMemory() ||
2495 traceAlloc
->outOfMemory();
2499 * This function destroys the recorder after a successful recording, possibly
2500 * starting a suspended outer recorder.
2502 AbortableRecordingStatus
2503 TraceRecorder::finishSuccessfully()
2505 JS_ASSERT(traceMonitor
->recorder
== this);
2506 JS_ASSERT(fragment
->lastIns
&& fragment
->code());
2507 JS_ASSERT_IF(fragment
== fragment
->root
, fragment
->toTreeFragment()->treeInfo
);
2509 AUDIT(traceCompleted
);
2512 /* Grab local copies of members needed after |delete this|. */
2513 JSContext
* localcx
= cx
;
2514 JSTraceMonitor
* localtm
= traceMonitor
;
2516 localtm
->recorder
= NULL
;
2519 /* Catch OOM that occurred during recording. */
2520 if (localtm
->outOfMemory() || js_OverfullJITCache(localtm
)) {
2521 ResetJIT(localcx
, FR_OOM
);
2522 return ARECORD_ABORTED
;
2524 return ARECORD_COMPLETED
;
2527 /* This function aborts a recorder and any pending outer recorders. */
2528 JS_REQUIRES_STACK AbortableRecordingStatus
2529 TraceRecorder::finishAbort(const char* reason
)
2531 JS_ASSERT(traceMonitor
->recorder
== this);
2532 JS_ASSERT(!fragment
->code());
2533 JS_ASSERT_IF(fragment
== fragment
->root
, !fragment
->toTreeFragment()->treeInfo
);
2535 AUDIT(recorderAborted
);
2537 debug_only_printf(LC_TMAbort
,
2538 "Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n",
2539 treeInfo
->treeFileName
,
2540 treeInfo
->treeLineNumber
,
2541 treeInfo
->treePCOffset
,
2542 cx
->fp
->script
->filename
,
2543 js_FramePCToLineNumber(cx
, cx
->fp
),
2544 FramePCOffset(cx
->fp
),
2547 Backoff(cx
, (jsbytecode
*) fragment
->root
->ip
, fragment
->root
);
2550 * If this is the primary trace and we didn't succeed compiling, trash the
2551 * TreeInfo object. Otherwise, remove the VMSideExits we added while
2552 * recording, which are about to be invalid.
2554 * BIG FAT WARNING: resetting the length is only a valid strategy as long as
2555 * there may be only one recorder active for a single TreeInfo at a time.
2556 * Otherwise, we may be throwing away another recorder's valid side exits.
2558 if (fragment
->root
== fragment
) {
2559 TrashTree(cx
, fragment
->toTreeFragment());
2561 JS_ASSERT(numSideExitsBefore
<= fragment
->root
->treeInfo
->sideExits
.length());
2562 fragment
->root
->treeInfo
->sideExits
.setLength(numSideExitsBefore
);
2565 /* Grab local copies of members needed after |delete this|. */
2566 JSContext
* localcx
= cx
;
2567 JSTraceMonitor
* localtm
= traceMonitor
;
2569 localtm
->recorder
= NULL
;
2571 if (localtm
->outOfMemory() || js_OverfullJITCache(localtm
))
2572 ResetJIT(localcx
, FR_OOM
);
2573 return ARECORD_ABORTED
;
2576 /* Add debug information to a LIR instruction as we emit it. */
2578 TraceRecorder::addName(LIns
* ins
, const char* name
)
2582 * We'll only ask for verbose Nanojit when .lcbits > 0, so there's no point
2583 * in adding names otherwise.
2585 if (js_LogController
.lcbits
> 0)
2586 lirbuf
->names
->addName(ins
, name
);
2592 TraceRecorder::insImmVal(jsval val
)
2594 if (JSVAL_IS_TRACEABLE(val
))
2595 treeInfo
->gcthings
.addUnique(val
);
2596 return lir
->insImmWord(val
);
2600 TraceRecorder::insImmObj(JSObject
* obj
)
2602 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(obj
));
2603 return lir
->insImmPtr((void*)obj
);
2607 TraceRecorder::insImmFun(JSFunction
* fun
)
2609 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(FUN_OBJECT(fun
)));
2610 return lir
->insImmPtr((void*)fun
);
2614 TraceRecorder::insImmStr(JSString
* str
)
2616 treeInfo
->gcthings
.addUnique(STRING_TO_JSVAL(str
));
2617 return lir
->insImmPtr((void*)str
);
2621 TraceRecorder::insImmSprop(JSScopeProperty
* sprop
)
2623 treeInfo
->sprops
.addUnique(sprop
);
2624 return lir
->insImmPtr((void*)sprop
);
2628 TraceRecorder::p2i(nanojit::LIns
* ins
)
2630 #ifdef NANOJIT_64BIT
2631 return lir
->ins1(LIR_qlo
, ins
);
2637 /* Determine the offset in the native global frame for a jsval we track. */
2639 TraceRecorder::nativeGlobalOffset(jsval
* p
) const
2641 JS_ASSERT(isGlobal(p
));
2642 if (size_t(p
- globalObj
->fslots
) < JS_INITIAL_NSLOTS
)
2643 return size_t(p
- globalObj
->fslots
) * sizeof(double);
2644 return ((p
- globalObj
->dslots
) + JS_INITIAL_NSLOTS
) * sizeof(double);
2647 /* Determine whether a value is a global stack slot. */
2649 TraceRecorder::isGlobal(jsval
* p
) const
2651 return ((size_t(p
- globalObj
->fslots
) < JS_INITIAL_NSLOTS
) ||
2652 (size_t(p
- globalObj
->dslots
) < (STOBJ_NSLOTS(globalObj
) - JS_INITIAL_NSLOTS
)));
2656 * Return the offset in the native stack for the given jsval. More formally,
2657 * |p| must be the address of a jsval that is represented in the native stack
2658 * area. The return value is the offset, from InterpState::stackBase, in bytes,
2659 * where the native representation of |*p| is stored. To get the offset
2660 * relative to InterpState::sp, subtract TreeInfo::nativeStackBase.
2662 JS_REQUIRES_STACK
ptrdiff_t
2663 TraceRecorder::nativeStackOffset(jsval
* p
) const
2665 CountSlotsVisitor
visitor(p
);
2666 VisitStackSlots(visitor
, cx
, callDepth
);
2667 size_t offset
= visitor
.count() * sizeof(double);
2670 * If it's not in a pending frame, it must be on the stack of the current
2671 * frame above sp but below fp->slots + script->nslots.
2673 if (!visitor
.stopped()) {
2674 JS_ASSERT(size_t(p
- cx
->fp
->slots
) < cx
->fp
->script
->nslots
);
2675 offset
+= size_t(p
- cx
->fp
->regs
->sp
) * sizeof(double);
2680 * Return the offset, from InterpState:sp, for the given jsval. Shorthand for:
2681 * -TreeInfo::nativeStackBase + nativeStackOffset(p).
2683 inline JS_REQUIRES_STACK
ptrdiff_t
2684 TraceRecorder::nativespOffset(jsval
* p
) const
2686 return -treeInfo
->nativeStackBase
+ nativeStackOffset(p
);
2689 /* Track the maximum number of native frame slots we need during execution. */
2691 TraceRecorder::trackNativeStackUse(unsigned slots
)
2693 if (slots
> treeInfo
->maxNativeStackSlots
)
2694 treeInfo
->maxNativeStackSlots
= slots
;
2698 * Unbox a jsval into a slot. Slots are wide enough to hold double values
2699 * directly (instead of storing a pointer to them). We assert instead of
2700 * type checking. The caller must ensure the types are compatible.
2703 ValueToNative(JSContext
* cx
, jsval v
, JSTraceType type
, double* slot
)
2705 uint8_t tag
= JSVAL_TAG(v
);
2708 JS_ASSERT(tag
== JSVAL_OBJECT
);
2709 JS_ASSERT(!JSVAL_IS_NULL(v
) && !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v
)));
2710 *(JSObject
**)slot
= JSVAL_TO_OBJECT(v
);
2711 debug_only_printf(LC_TMTracer
,
2712 "object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v
),
2715 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v
))->name
);
2720 if (JSVAL_IS_INT(v
))
2721 *(jsint
*)slot
= JSVAL_TO_INT(v
);
2722 else if (tag
== JSVAL_DOUBLE
&& JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v
), i
))
2725 JS_ASSERT(JSVAL_IS_INT(v
));
2726 debug_only_printf(LC_TMTracer
, "int<%d> ", *(jsint
*)slot
);
2731 if (JSVAL_IS_INT(v
))
2732 d
= JSVAL_TO_INT(v
);
2734 d
= *JSVAL_TO_DOUBLE(v
);
2735 JS_ASSERT(JSVAL_IS_INT(v
) || JSVAL_IS_DOUBLE(v
));
2736 *(jsdouble
*)slot
= d
;
2737 debug_only_printf(LC_TMTracer
, "double<%g> ", d
);
2741 JS_NOT_REACHED("found jsval type in an entry type map");
2745 JS_ASSERT(tag
== JSVAL_STRING
);
2746 *(JSString
**)slot
= JSVAL_TO_STRING(v
);
2747 debug_only_printf(LC_TMTracer
, "string<%p> ", (void*)(*(JSString
**)slot
));
2751 JS_ASSERT(tag
== JSVAL_OBJECT
);
2752 *(JSObject
**)slot
= NULL
;
2753 debug_only_print0(LC_TMTracer
, "null ");
2756 case TT_PSEUDOBOOLEAN
:
2757 /* Watch out for pseudo-booleans. */
2758 JS_ASSERT(tag
== JSVAL_SPECIAL
);
2759 *(JSBool
*)slot
= JSVAL_TO_SPECIAL(v
);
2760 debug_only_printf(LC_TMTracer
, "pseudoboolean<%d> ", *(JSBool
*)slot
);
2764 JS_ASSERT(tag
== JSVAL_OBJECT
);
2765 JSObject
* obj
= JSVAL_TO_OBJECT(v
);
2766 *(JSObject
**)slot
= obj
;
2768 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, obj
);
2769 debug_only_printf(LC_TMTracer
,
2770 "function<%p:%s> ", (void*) obj
,
2772 ? JS_GetStringBytes(ATOM_TO_STRING(fun
->atom
))
2779 JS_NOT_REACHED("unexpected type");
2783 JSTraceMonitor::flush()
2785 /* flush should only be called after all recorders have been aborted. */
2786 JS_ASSERT(!recorder
);
2787 AUDIT(cacheFlushed
);
2789 // recover profiling data from expiring Fragments
2791 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
2792 for (TreeFragment
*f
= vmfragments
[i
]; f
; f
= f
->next
) {
2793 JS_ASSERT(f
->root
== f
);
2794 for (TreeFragment
*p
= f
; p
; p
= p
->peer
)
2795 js_FragProfiling_FragFinalizer(p
, this);
2801 for (Seq
<Fragment
*>* f
= branches
; f
; f
= f
->tail
)
2802 js_FragProfiling_FragFinalizer(f
->head
, this);
2805 frameCache
->reset();
2807 traceAlloc
->reset();
2810 reTempAlloc
->reset();
2812 Allocator
& alloc
= *dataAlloc
;
2814 for (size_t i
= 0; i
< MONITOR_N_GLOBAL_STATES
; ++i
) {
2815 globalStates
[i
].globalShape
= -1;
2816 globalStates
[i
].globalSlots
= new (alloc
) SlotList(&alloc
);
2819 assembler
= new (alloc
) Assembler(*codeAlloc
, alloc
, alloc
, core
, &js_LogController
);
2820 lirbuf
= new (alloc
) LirBuffer(*tempAlloc
);
2821 reLirBuf
= new (alloc
) LirBuffer(*reTempAlloc
);
2822 verbose_only( branches
= NULL
; )
2825 labels
= new (alloc
) LabelMap(alloc
, &js_LogController
);
2827 lirbuf
->names
= new (alloc
) LirNameMap(alloc
, labels
);
2830 memset(&vmfragments
[0], 0, FRAGMENT_TABLE_SIZE
* sizeof(TreeFragment
*));
2831 reFragments
= new (alloc
) REHashMap(alloc
);
2833 needFlush
= JS_FALSE
;
2837 MarkTreeInfo(JSTracer
* trc
, TreeInfo
*ti
)
2839 jsval
* vp
= ti
->gcthings
.data();
2840 unsigned len
= ti
->gcthings
.length();
2843 JS_SET_TRACING_NAME(trc
, "jitgcthing");
2844 JS_CallTracer(trc
, JSVAL_TO_TRACEABLE(v
), JSVAL_TRACE_KIND(v
));
2846 JSScopeProperty
** spropp
= ti
->sprops
.data();
2847 len
= ti
->sprops
.length();
2849 JSScopeProperty
* sprop
= *spropp
++;
2855 JSTraceMonitor::mark(JSTracer
* trc
)
2857 if (!trc
->context
->runtime
->gcFlushCodeCaches
) {
2858 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
2859 TreeFragment
* f
= vmfragments
[i
];
2861 if (TreeInfo
* ti
= f
->treeInfo
)
2862 MarkTreeInfo(trc
, ti
);
2863 TreeFragment
* peer
= f
->peer
;
2865 if (TreeInfo
* ti
= peer
->treeInfo
)
2866 MarkTreeInfo(trc
, ti
);
2873 MarkTreeInfo(trc
, recorder
->getTreeInfo());
2878 * Box a value from the native stack back into the jsval format. Integers that
2879 * are too large to fit into a jsval are automatically boxed into
2880 * heap-allocated doubles.
2883 js_NativeToValue(JSContext
* cx
, jsval
& v
, JSTraceType type
, double* slot
)
2890 v
= OBJECT_TO_JSVAL(*(JSObject
**)slot
);
2891 JS_ASSERT(v
!= JSVAL_ERROR_COOKIE
); /* don't leak JSVAL_ERROR_COOKIE */
2892 debug_only_printf(LC_TMTracer
,
2893 "object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v
),
2896 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v
))->name
);
2901 debug_only_printf(LC_TMTracer
, "int<%d> ", i
);
2903 if (INT_FITS_IN_JSVAL(i
)) {
2904 v
= INT_TO_JSVAL(i
);
2911 debug_only_printf(LC_TMTracer
, "double<%g> ", d
);
2912 if (JSDOUBLE_IS_INT(d
, i
))
2915 ok
= js_NewDoubleInRootedValue(cx
, d
, &v
);
2917 js_ReportOutOfMemory(cx
);
2924 JS_ASSERT(v
!= JSVAL_ERROR_COOKIE
); /* don't leak JSVAL_ERROR_COOKIE */
2925 debug_only_printf(LC_TMTracer
, "box<%p> ", (void*)v
);
2929 v
= STRING_TO_JSVAL(*(JSString
**)slot
);
2930 debug_only_printf(LC_TMTracer
, "string<%p> ", (void*)(*(JSString
**)slot
));
2934 JS_ASSERT(*(JSObject
**)slot
== NULL
);
2936 debug_only_printf(LC_TMTracer
, "null<%p> ", (void*)(*(JSObject
**)slot
));
2939 case TT_PSEUDOBOOLEAN
:
2940 /* Watch out for pseudo-booleans. */
2941 v
= SPECIAL_TO_JSVAL(*(JSBool
*)slot
);
2942 debug_only_printf(LC_TMTracer
, "boolean<%d> ", *(JSBool
*)slot
);
2946 JS_ASSERT(HAS_FUNCTION_CLASS(*(JSObject
**)slot
));
2947 v
= OBJECT_TO_JSVAL(*(JSObject
**)slot
);
2949 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, JSVAL_TO_OBJECT(v
));
2950 debug_only_printf(LC_TMTracer
,
2951 "function<%p:%s> ", (void*)JSVAL_TO_OBJECT(v
),
2953 ? JS_GetStringBytes(ATOM_TO_STRING(fun
->atom
))
2962 class BuildNativeFrameVisitor
: public SlotVisitorBase
2965 JSTraceType
*mTypeMap
;
2969 BuildNativeFrameVisitor(JSContext
*cx
,
2970 JSTraceType
*typemap
,
2979 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2980 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
2981 debug_only_printf(LC_TMTracer
, "global%d: ", n
);
2982 ValueToNative(mCx
, *vp
, *mTypeMap
++, &mGlobal
[slot
]);
2985 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
2986 visitStackSlots(jsval
*vp
, int count
, JSStackFrame
* fp
) {
2987 for (int i
= 0; i
< count
; ++i
) {
2988 debug_only_printf(LC_TMTracer
, "%s%d: ", stackSlotKind(), i
);
2989 ValueToNative(mCx
, *vp
++, *mTypeMap
++, mStack
++);
2995 static JS_REQUIRES_STACK
void
2996 BuildNativeFrame(JSContext
*cx
, JSObject
*globalObj
, unsigned callDepth
,
2997 unsigned ngslots
, uint16
*gslots
,
2998 JSTraceType
*typeMap
, double *global
, double *stack
)
3000 BuildNativeFrameVisitor
visitor(cx
, typeMap
, global
, stack
);
3001 VisitSlots(visitor
, cx
, globalObj
, callDepth
, ngslots
, gslots
);
3002 debug_only_print0(LC_TMTracer
, "\n");
3005 class FlushNativeGlobalFrameVisitor
: public SlotVisitorBase
3008 JSTraceType
*mTypeMap
;
3011 FlushNativeGlobalFrameVisitor(JSContext
*cx
,
3012 JSTraceType
*typeMap
,
3019 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
3020 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
3021 debug_only_printf(LC_TMTracer
, "global%d=", n
);
3022 JS_ASSERT(JS_THREAD_DATA(mCx
)->waiveGCQuota
);
3023 if (!js_NativeToValue(mCx
, *vp
, *mTypeMap
++, &mGlobal
[slot
]))
3028 class FlushNativeStackFrameVisitor
: public SlotVisitorBase
3031 const JSTraceType
*mInitTypeMap
;
3032 const JSTraceType
*mTypeMap
;
3035 unsigned mIgnoreSlots
;
3037 FlushNativeStackFrameVisitor(JSContext
*cx
,
3038 const JSTraceType
*typeMap
,
3041 unsigned ignoreSlots
) :
3043 mInitTypeMap(typeMap
),
3047 mIgnoreSlots(ignoreSlots
)
3050 const JSTraceType
* getTypeMap()
3055 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3056 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3057 JS_ASSERT(JS_THREAD_DATA(mCx
)->waiveGCQuota
);
3058 for (size_t i
= 0; i
< count
; ++i
) {
3061 debug_only_printf(LC_TMTracer
, "%s%u=", stackSlotKind(), unsigned(i
));
3062 if (unsigned(mTypeMap
- mInitTypeMap
) >= mIgnoreSlots
) {
3063 if (!js_NativeToValue(mCx
, *vp
, *mTypeMap
, mStack
))
3074 /* Box the given native frame into a JS frame. This is infallible. */
3075 static JS_REQUIRES_STACK
void
3076 FlushNativeGlobalFrame(JSContext
*cx
, JSObject
*globalObj
, double *global
, unsigned ngslots
,
3077 uint16
*gslots
, JSTraceType
*typemap
)
3079 FlushNativeGlobalFrameVisitor
visitor(cx
, typemap
, global
);
3080 VisitGlobalSlots(visitor
, cx
, globalObj
, ngslots
, gslots
);
3081 debug_only_print0(LC_TMTracer
, "\n");
3085 * Returns the number of values on the native stack, excluding the innermost
3086 * frame. This walks all FrameInfos on the native frame stack and sums the
3087 * slot usage of each frame.
3090 StackDepthFromCallStack(InterpState
* state
, uint32 callDepth
)
3092 int32 nativeStackFramePos
= 0;
3094 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
3095 for (FrameInfo
** fip
= state
->callstackBase
; fip
< state
->rp
+ callDepth
; fip
++)
3096 nativeStackFramePos
+= (*fip
)->callerHeight
;
3097 return nativeStackFramePos
;
3101 * Generic function to read upvars on trace from slots of active frames.
3102 * T Traits type parameter. Must provide static functions:
3103 * interp_get(fp, slot) Read the value out of an interpreter frame.
3104 * native_slot(argc, slot) Return the position of the desired value in the on-trace
3105 * stack frame (with position 0 being callee).
3107 * upvarLevel Static level of the function containing the upvar definition
3108 * slot Identifies the value to get. The meaning is defined by the traits type.
3109 * callDepth Call depth of current point relative to trace entry
3111 template<typename T
>
3113 GetUpvarOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
, double* result
)
3115 InterpState
* state
= cx
->interpState
;
3116 FrameInfo
** fip
= state
->rp
+ callDepth
;
3119 * First search the FrameInfo call stack for an entry containing our
3120 * upvar, namely one with level == upvarLevel. The first FrameInfo is a
3121 * transition from the entry frame to some callee. However, it is not
3122 * known (from looking at the FrameInfo) whether the entry frame had a
3123 * callee. Rather than special-case this or insert more logic into the
3124 * loop, instead just stop before that FrameInfo (i.e. |> base| instead of
3125 * |>= base|), and let the code after the loop handle it.
3127 int32 stackOffset
= StackDepthFromCallStack(state
, callDepth
);
3128 while (--fip
> state
->callstackBase
) {
3129 FrameInfo
* fi
= *fip
;
3132 * The loop starts aligned to the top of the stack, so move down to the first meaningful
3133 * callee. Then read the callee directly from the frame.
3135 stackOffset
-= fi
->callerHeight
;
3136 JSObject
* callee
= *(JSObject
**)(&state
->stackBase
[stackOffset
]);
3137 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, callee
);
3138 uintN calleeLevel
= fun
->u
.i
.script
->staticLevel
;
3139 if (calleeLevel
== upvarLevel
) {
3141 * Now find the upvar's value in the native stack. stackOffset is
3142 * the offset of the start of the activation record corresponding
3143 * to *fip in the native stack.
3145 uint32 native_slot
= T::native_slot(fi
->callerArgc
, slot
);
3146 *result
= state
->stackBase
[stackOffset
+ native_slot
];
3147 return fi
->get_typemap()[native_slot
];
3151 // Next search the trace entry frame, which is not in the FrameInfo stack.
3152 if (state
->outermostTree
->script
->staticLevel
== upvarLevel
) {
3153 uint32 argc
= state
->outermostTree
->rootFragment
->argc
;
3154 uint32 native_slot
= T::native_slot(argc
, slot
);
3155 *result
= state
->stackBase
[native_slot
];
3156 return state
->callstackBase
[0]->get_typemap()[native_slot
];
3160 * If we did not find the upvar in the frames for the active traces,
3161 * then we simply get the value from the interpreter state.
3163 JS_ASSERT(upvarLevel
< JS_DISPLAY_SIZE
);
3164 JSStackFrame
* fp
= cx
->display
[upvarLevel
];
3165 jsval v
= T::interp_get(fp
, slot
);
3166 JSTraceType type
= getCoercedType(v
);
3167 ValueToNative(cx
, v
, type
, result
);
3171 // For this traits type, 'slot' is the argument index, which may be -2 for callee.
3172 struct UpvarArgTraits
{
3173 static jsval
interp_get(JSStackFrame
* fp
, int32 slot
) {
3174 return fp
->argv
[slot
];
3177 static uint32
native_slot(uint32 argc
, int32 slot
) {
3178 return 2 /*callee,this*/ + slot
;
3183 GetUpvarArgOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
, double* result
)
3185 return GetUpvarOnTrace
<UpvarArgTraits
>(cx
, upvarLevel
, slot
, callDepth
, result
);
3188 // For this traits type, 'slot' is an index into the local slots array.
3189 struct UpvarVarTraits
{
3190 static jsval
interp_get(JSStackFrame
* fp
, int32 slot
) {
3191 return fp
->slots
[slot
];
3194 static uint32
native_slot(uint32 argc
, int32 slot
) {
3195 return 3 /*callee,this,arguments*/ + argc
+ slot
;
3200 GetUpvarVarOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
, double* result
)
3202 return GetUpvarOnTrace
<UpvarVarTraits
>(cx
, upvarLevel
, slot
, callDepth
, result
);
3206 * For this traits type, 'slot' is an index into the stack area (within slots,
3207 * after nfixed) of a frame with no function. (On trace, the top-level frame is
3208 * the only one that can have no function.)
3210 struct UpvarStackTraits
{
3211 static jsval
interp_get(JSStackFrame
* fp
, int32 slot
) {
3212 return fp
->slots
[slot
+ fp
->script
->nfixed
];
3215 static uint32
native_slot(uint32 argc
, int32 slot
) {
3217 * Locals are not imported by the tracer when the frame has no
3218 * function, so we do not add fp->script->nfixed.
3220 JS_ASSERT(argc
== 0);
3226 GetUpvarStackOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
,
3229 return GetUpvarOnTrace
<UpvarStackTraits
>(cx
, upvarLevel
, slot
, callDepth
, result
);
3232 // Parameters needed to access a value from a closure on trace.
3233 struct ClosureVarInfo
3238 uint32 resolveFlags
;
3242 * Generic function to read upvars from Call objects of active heavyweight functions.
3243 * call Callee Function object in which the upvar is accessed.
3245 template<typename T
>
3247 GetFromClosure(JSContext
* cx
, JSObject
* call
, const ClosureVarInfo
* cv
, double* result
)
3249 JS_ASSERT(OBJ_GET_CLASS(cx
, call
) == &js_CallClass
);
3251 InterpState
* state
= cx
->interpState
;
3254 int32 stackOffset
= StackDepthFromCallStack(state
, cv
->callDepth
);
3255 FrameInfo
** fip
= state
->rp
+ cv
->callDepth
;
3256 while (--fip
> state
->callstackBase
) {
3257 FrameInfo
* fi
= *fip
;
3258 JSObject
* callee
= *(JSObject
**)(&state
->stackBase
[stackOffset
]);
3259 if (callee
== call
) {
3260 // This is not reachable as long as JSOP_LAMBDA is not traced:
3261 // - The upvar is found at this point only if the upvar was defined on a frame that was
3262 // entered on this trace.
3263 // - The upvar definition must be (dynamically, and thus on trace) before the definition
3264 // of the function that uses the upvar.
3265 // - Therefore, if the upvar is found at this point, the function definition JSOP_LAMBDA
3267 JS_NOT_REACHED("JSOP_NAME variable found in outer trace");
3269 stackOffset
-= fi
->callerHeight
;
3274 * Here we specifically want to check the call object of the trace entry frame.
3276 uint32 slot
= cv
->slot
;
3277 VOUCH_DOES_NOT_REQUIRE_STACK();
3278 if (cx
->fp
->callobj
== call
) {
3279 slot
= T::adj_slot(cx
->fp
, slot
);
3280 *result
= state
->stackBase
[slot
];
3281 return state
->callstackBase
[0]->get_typemap()[slot
];
3284 JSStackFrame
* fp
= (JSStackFrame
*) call
->getPrivate();
3287 v
= T::slots(fp
)[slot
];
3289 JS_ASSERT(cv
->resolveFlags
!= JSRESOLVE_INFER
);
3290 JSAutoResolveFlags
rf(cx
, cv
->resolveFlags
);
3294 js_GetPropertyHelper(cx
, call
, cv
->id
, JSGET_METHOD_BARRIER
, &v
);
3297 JSTraceType type
= getCoercedType(v
);
3298 ValueToNative(cx
, v
, type
, result
);
3302 struct ArgClosureTraits
3304 // See also UpvarArgTraits.
3305 static inline uint32
adj_slot(JSStackFrame
* fp
, uint32 slot
) { return 2 + slot
; }
3307 // See also UpvarArgTraits.
3308 static inline jsval
* slots(JSStackFrame
* fp
) { return fp
->argv
; }
3314 GetClosureArg(JSContext
* cx
, JSObject
* callee
, const ClosureVarInfo
* cv
, double* result
)
3316 return GetFromClosure
<ArgClosureTraits
>(cx
, callee
, cv
, result
);
3319 struct VarClosureTraits
3321 // See also UpvarVarTraits.
3322 static inline uint32
adj_slot(JSStackFrame
* fp
, uint32 slot
) { return 3 + fp
->argc
+ slot
; }
3324 // See also UpvarVarTraits.
3325 static inline jsval
* slots(JSStackFrame
* fp
) { return fp
->slots
; }
3331 GetClosureVar(JSContext
* cx
, JSObject
* callee
, const ClosureVarInfo
* cv
, double* result
)
3333 return GetFromClosure
<VarClosureTraits
>(cx
, callee
, cv
, result
);
3337 * Box the given native stack frame into the virtual machine stack. This
3340 * @param callDepth the distance between the entry frame into our trace and
3341 * cx->fp when we make this call. If this is not called as a
3342 * result of a nested exit, callDepth is 0.
3343 * @param mp an array of JSTraceTypes that indicate what the types of the things
3345 * @param np pointer to the native stack. We want to copy values from here to
3346 * the JS stack as needed.
3347 * @param stopFrame if non-null, this frame and everything above it should not
3349 * @return the number of things we popped off of np.
3351 static JS_REQUIRES_STACK
int
3352 FlushNativeStackFrame(JSContext
* cx
, unsigned callDepth
, const JSTraceType
* mp
, double* np
,
3353 JSStackFrame
* stopFrame
, unsigned ignoreSlots
)
3355 jsval
* stopAt
= stopFrame
? &stopFrame
->argv
[-2] : NULL
;
3357 /* Root all string and object references first (we don't need to call the GC for this). */
3358 FlushNativeStackFrameVisitor
visitor(cx
, mp
, np
, stopAt
, ignoreSlots
);
3359 VisitStackSlots(visitor
, cx
, callDepth
);
3361 // Restore thisv from the now-restored argv[-1] in each pending frame.
3362 // Keep in mind that we didn't restore frames at stopFrame and above!
3363 // Scope to keep |fp| from leaking into the macros we're using.
3365 unsigned n
= callDepth
+1; // +1 to make sure we restore the entry frame
3366 JSStackFrame
* fp
= cx
->fp
;
3368 for (; fp
!= stopFrame
; fp
= fp
->down
) {
3373 // Skip over stopFrame itself.
3378 for (; n
!= 0; fp
= fp
->down
) {
3382 js_GetArgsPrivateNative(JSVAL_TO_OBJECT(fp
->argsobj
))) {
3383 JSVAL_TO_OBJECT(fp
->argsobj
)->setPrivate(fp
);
3386 JS_ASSERT(JSVAL_IS_OBJECT(fp
->argv
[-1]));
3387 JS_ASSERT(HAS_FUNCTION_CLASS(fp
->calleeObject()));
3388 JS_ASSERT(GET_FUNCTION_PRIVATE(cx
, fp
->callee()) == fp
->fun
);
3391 * SynthesizeFrame sets scopeChain to NULL, because we can't calculate the
3392 * correct scope chain until we have the final callee. Calculate the real
3393 * scope object here.
3395 if (!fp
->scopeChain
) {
3396 fp
->scopeChain
= OBJ_GET_PARENT(cx
, fp
->calleeObject());
3397 if (fp
->fun
->flags
& JSFUN_HEAVYWEIGHT
) {
3399 * Set hookData to null because the failure case for js_GetCallObject
3400 * involves it calling the debugger hook.
3402 * Allocating the Call object must not fail, so use an object
3403 * previously reserved by ExecuteTree if needed.
3405 void* hookData
= ((JSInlineFrame
*)fp
)->hookData
;
3406 ((JSInlineFrame
*)fp
)->hookData
= NULL
;
3407 JS_ASSERT(JS_THREAD_DATA(cx
)->waiveGCQuota
);
3411 js_GetCallObject(cx
, fp
);
3413 ((JSInlineFrame
*)fp
)->hookData
= hookData
;
3416 fp
->thisv
= fp
->argv
[-1];
3417 if (fp
->flags
& JSFRAME_CONSTRUCTING
) // constructors always compute 'this'
3418 fp
->flags
|= JSFRAME_COMPUTED_THIS
;
3422 debug_only_print0(LC_TMTracer
, "\n");
3423 return visitor
.getTypeMap() - mp
;
3426 /* Emit load instructions onto the trace that read the initial stack state. */
3427 JS_REQUIRES_STACK
void
3428 TraceRecorder::import(LIns
* base
, ptrdiff_t offset
, jsval
* p
, JSTraceType t
,
3429 const char *prefix
, uintN index
, JSStackFrame
*fp
)
3432 if (t
== TT_INT32
) { /* demoted */
3433 JS_ASSERT(isInt32(*p
));
3436 * Ok, we have a valid demotion attempt pending, so insert an integer
3437 * read and promote it to double since all arithmetic operations expect
3438 * to see doubles on entry. The first op to use this slot will emit a
3439 * f2i cast which will cancel out the i2f we insert here.
3441 ins
= lir
->insLoad(LIR_ld
, base
, offset
);
3442 ins
= lir
->ins1(LIR_i2f
, ins
);
3444 JS_ASSERT_IF(t
!= TT_JSVAL
, isNumber(*p
) == (t
== TT_DOUBLE
));
3445 if (t
== TT_DOUBLE
) {
3446 ins
= lir
->insLoad(LIR_ldq
, base
, offset
);
3447 } else if (t
== TT_PSEUDOBOOLEAN
) {
3448 ins
= lir
->insLoad(LIR_ld
, base
, offset
);
3450 ins
= lir
->insLoad(LIR_ldp
, base
, offset
);
3453 checkForGlobalObjectReallocation();
3454 tracker
.set(p
, ins
);
3458 JS_ASSERT(strlen(prefix
) < 10);
3460 jsuword
* localNames
= NULL
;
3461 const char* funName
= NULL
;
3462 if (*prefix
== 'a' || *prefix
== 'v') {
3463 mark
= JS_ARENA_MARK(&cx
->tempPool
);
3464 if (fp
->fun
->hasLocalNames())
3465 localNames
= js_GetLocalNameArray(cx
, fp
->fun
, &cx
->tempPool
);
3466 funName
= fp
->fun
->atom
? js_AtomToPrintableString(cx
, fp
->fun
->atom
) : "<anonymous>";
3468 if (!strcmp(prefix
, "argv")) {
3469 if (index
< fp
->fun
->nargs
) {
3470 JSAtom
*atom
= JS_LOCAL_NAME_TO_ATOM(localNames
[index
]);
3471 JS_snprintf(name
, sizeof name
, "$%s.%s", funName
, js_AtomToPrintableString(cx
, atom
));
3473 JS_snprintf(name
, sizeof name
, "$%s.<arg%d>", funName
, index
);
3475 } else if (!strcmp(prefix
, "vars")) {
3476 JSAtom
*atom
= JS_LOCAL_NAME_TO_ATOM(localNames
[fp
->fun
->nargs
+ index
]);
3477 JS_snprintf(name
, sizeof name
, "$%s.%s", funName
, js_AtomToPrintableString(cx
, atom
));
3479 JS_snprintf(name
, sizeof name
, "$%s%d", prefix
, index
);
3483 JS_ARENA_RELEASE(&cx
->tempPool
, mark
);
3486 static const char* typestr
[] = {
3487 "object", "int", "double", "jsval", "string", "null", "boolean", "function"
3489 debug_only_printf(LC_TMTracer
, "import vp=%p name=%s type=%s flags=%d\n",
3490 (void*)p
, name
, typestr
[t
& 7], t
>> 3);
3494 class ImportGlobalSlotVisitor
: public SlotVisitorBase
3496 TraceRecorder
&mRecorder
;
3498 JSTraceType
*mTypemap
;
3500 ImportGlobalSlotVisitor(TraceRecorder
&recorder
,
3502 JSTraceType
*typemap
) :
3503 mRecorder(recorder
),
3508 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
3509 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
3510 JS_ASSERT(*mTypemap
!= TT_JSVAL
);
3511 mRecorder
.import(mBase
, mRecorder
.nativeGlobalOffset(vp
),
3512 vp
, *mTypemap
++, "global", n
, NULL
);
3516 class ImportBoxedStackSlotVisitor
: public SlotVisitorBase
3518 TraceRecorder
&mRecorder
;
3520 ptrdiff_t mStackOffset
;
3521 JSTraceType
*mTypemap
;
3524 ImportBoxedStackSlotVisitor(TraceRecorder
&recorder
,
3526 ptrdiff_t stackOffset
,
3527 JSTraceType
*typemap
) :
3528 mRecorder(recorder
),
3530 mStackOffset(stackOffset
),
3534 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3535 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3536 for (size_t i
= 0; i
< count
; ++i
) {
3537 if (*mTypemap
== TT_JSVAL
) {
3538 mRecorder
.import(mBase
, mStackOffset
, vp
, TT_JSVAL
,
3540 LIns
*vp_ins
= mRecorder
.unbox_jsval(*vp
, mRecorder
.get(vp
),
3541 mRecorder
.copy(mRecorder
.anchor
));
3542 mRecorder
.set(vp
, vp_ins
);
3546 mStackOffset
+= sizeof(double);
3552 class ImportUnboxedStackSlotVisitor
: public SlotVisitorBase
3554 TraceRecorder
&mRecorder
;
3556 ptrdiff_t mStackOffset
;
3557 JSTraceType
*mTypemap
;
3560 ImportUnboxedStackSlotVisitor(TraceRecorder
&recorder
,
3562 ptrdiff_t stackOffset
,
3563 JSTraceType
*typemap
) :
3564 mRecorder(recorder
),
3566 mStackOffset(stackOffset
),
3570 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3571 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3572 for (size_t i
= 0; i
< count
; ++i
) {
3573 if (*mTypemap
!= TT_JSVAL
) {
3574 mRecorder
.import(mBase
, mStackOffset
, vp
++, *mTypemap
,
3575 stackSlotKind(), i
, fp
);
3578 mStackOffset
+= sizeof(double);
3584 JS_REQUIRES_STACK
void
3585 TraceRecorder::import(TreeInfo
* treeInfo
, LIns
* sp
, unsigned stackSlots
, unsigned ngslots
,
3586 unsigned callDepth
, JSTraceType
* typeMap
)
3589 * If we get a partial list that doesn't have all the types (i.e. recording
3590 * from a side exit that was recorded but we added more global slots
3591 * later), merge the missing types from the entry type map. This is safe
3592 * because at the loop edge we verify that we have compatible types for all
3593 * globals (entry type and loop edge type match). While a different trace
3594 * of the tree might have had a guard with a different type map for these
3595 * slots we just filled in here (the guard we continue from didn't know
3596 * about them), since we didn't take that particular guard the only way we
3597 * could have ended up here is if that other trace had at its end a
3598 * compatible type distribution with the entry map. Since that's exactly
3599 * what we used to fill in the types our current side exit didn't provide,
3600 * this is always safe to do.
3603 JSTraceType
* globalTypeMap
= typeMap
+ stackSlots
;
3604 unsigned length
= treeInfo
->nGlobalTypes();
3607 * This is potentially the typemap of the side exit and thus shorter than
3608 * the tree's global type map.
3610 if (ngslots
< length
) {
3611 MergeTypeMaps(&globalTypeMap
/* out param */, &ngslots
/* out param */,
3612 treeInfo
->globalTypeMap(), length
,
3613 (JSTraceType
*)alloca(sizeof(JSTraceType
) * length
));
3615 JS_ASSERT(ngslots
== treeInfo
->nGlobalTypes());
3616 ptrdiff_t offset
= -treeInfo
->nativeStackBase
;
3619 * Check whether there are any values on the stack we have to unbox and do
3620 * that first before we waste any time fetching the state from the stack.
3622 if (!anchor
|| anchor
->exitType
!= RECURSIVE_SLURP_FAIL_EXIT
) {
3623 ImportBoxedStackSlotVisitor
boxedStackVisitor(*this, sp
, offset
, typeMap
);
3624 VisitStackSlots(boxedStackVisitor
, cx
, callDepth
);
3627 ImportGlobalSlotVisitor
globalVisitor(*this, eos_ins
, globalTypeMap
);
3628 VisitGlobalSlots(globalVisitor
, cx
, globalObj
, ngslots
,
3629 treeInfo
->globalSlots
->data());
3631 if (!anchor
|| anchor
->exitType
!= RECURSIVE_SLURP_FAIL_EXIT
) {
3632 ImportUnboxedStackSlotVisitor
unboxedStackVisitor(*this, sp
, offset
,
3634 VisitStackSlots(unboxedStackVisitor
, cx
, callDepth
);
3638 JS_REQUIRES_STACK
bool
3639 TraceRecorder::isValidSlot(JSScope
* scope
, JSScopeProperty
* sprop
)
3641 uint32 setflags
= (js_CodeSpec
[*cx
->fp
->regs
->pc
].format
& (JOF_SET
| JOF_INCDEC
| JOF_FOR
));
3644 if (!SPROP_HAS_STUB_SETTER(sprop
))
3645 RETURN_VALUE("non-stub setter", false);
3646 if (sprop
->attrs
& JSPROP_READONLY
)
3647 RETURN_VALUE("writing to a read-only property", false);
3650 /* This check applies even when setflags == 0. */
3651 if (setflags
!= JOF_SET
&& !SPROP_HAS_STUB_GETTER(sprop
)) {
3652 JS_ASSERT(!sprop
->isMethod());
3653 RETURN_VALUE("non-stub getter", false);
3656 if (!SPROP_HAS_VALID_SLOT(sprop
, scope
))
3657 RETURN_VALUE("slotless obj property", false);
3662 /* Lazily import a global slot if we don't already have it in the tracker. */
3663 JS_REQUIRES_STACK
bool
3664 TraceRecorder::lazilyImportGlobalSlot(unsigned slot
)
3666 if (slot
!= uint16(slot
)) /* we use a table of 16-bit ints, bail out if that's not enough */
3670 * If the global object grows too large, alloca in ExecuteTree might fail,
3671 * so abort tracing on global objects with unreasonably many slots.
3673 if (STOBJ_NSLOTS(globalObj
) > MAX_GLOBAL_SLOTS
)
3675 jsval
* vp
= &STOBJ_GET_SLOT(globalObj
, slot
);
3677 return true; /* we already have it */
3678 unsigned index
= treeInfo
->globalSlots
->length();
3680 /* Add the slot to the list of interned global slots. */
3681 JS_ASSERT(treeInfo
->nGlobalTypes() == treeInfo
->globalSlots
->length());
3682 treeInfo
->globalSlots
->add(slot
);
3683 JSTraceType type
= getCoercedType(*vp
);
3684 if (type
== TT_INT32
&& oracle
.isGlobalSlotUndemotable(cx
, slot
))
3686 treeInfo
->typeMap
.add(type
);
3687 import(eos_ins
, slot
*sizeof(double), vp
, type
, "global", index
, NULL
);
3688 SpecializeTreesToMissingGlobals(cx
, globalObj
, treeInfo
);
3692 /* Write back a value onto the stack or global frames. */
3694 TraceRecorder::writeBack(LIns
* i
, LIns
* base
, ptrdiff_t offset
, bool demote
)
3697 * Sink all type casts targeting the stack into the side exit by simply storing the original
3698 * (uncasted) value. Each guard generates the side exit map based on the types of the
3699 * last stores to every stack location, so it's safe to not perform them on-trace.
3701 if (demote
&& isPromoteInt(i
))
3702 i
= ::demote(lir
, i
);
3703 return lir
->insStorei(i
, base
, offset
);
3706 /* Update the tracker, then issue a write back store. */
3707 JS_REQUIRES_STACK
void
3708 TraceRecorder::set(jsval
* p
, LIns
* i
, bool initializing
, bool demote
)
3710 JS_ASSERT(i
!= NULL
);
3711 JS_ASSERT(initializing
|| known(p
));
3712 checkForGlobalObjectReallocation();
3716 * If we are writing to this location for the first time, calculate the
3717 * offset into the native frame manually. Otherwise just look up the last
3718 * load or store associated with the same source address (p) and use the
3721 LIns
* x
= nativeFrameTracker
.get(p
);
3724 x
= writeBack(i
, eos_ins
, nativeGlobalOffset(p
), demote
);
3726 x
= writeBack(i
, lirbuf
->sp
, nativespOffset(p
), demote
);
3727 nativeFrameTracker
.set(p
, x
);
3729 JS_ASSERT(x
->isop(LIR_sti
) || x
->isop(LIR_stqi
));
3732 LIns
*base
= x
->oprnd2();
3734 if (base
->isop(LIR_piadd
)) {
3735 disp
= base
->oprnd2()->imm32();
3736 base
= base
->oprnd1();
3741 JS_ASSERT(base
== lirbuf
->sp
|| base
== eos_ins
);
3742 JS_ASSERT(disp
== ((base
== lirbuf
->sp
)
3744 : nativeGlobalOffset(p
)));
3746 writeBack(i
, base
, disp
, demote
);
3750 JS_REQUIRES_STACK LIns
*
3751 TraceRecorder::get(jsval
* p
)
3753 JS_ASSERT(known(p
));
3754 checkForGlobalObjectReallocation();
3755 return tracker
.get(p
);
3758 JS_REQUIRES_STACK LIns
*
3759 TraceRecorder::addr(jsval
* p
)
3762 ? lir
->ins2(LIR_piadd
, eos_ins
, INS_CONSTWORD(nativeGlobalOffset(p
)))
3763 : lir
->ins2(LIR_piadd
, lirbuf
->sp
,
3764 INS_CONSTWORD(nativespOffset(p
)));
3767 JS_REQUIRES_STACK
bool
3768 TraceRecorder::known(jsval
* p
)
3770 checkForGlobalObjectReallocation();
3771 return tracker
.has(p
);
3775 * The dslots of the global object are sometimes reallocated by the interpreter.
3776 * This function check for that condition and re-maps the entries of the tracker
3779 JS_REQUIRES_STACK
void
3780 TraceRecorder::checkForGlobalObjectReallocation()
3782 if (global_dslots
!= globalObj
->dslots
) {
3783 debug_only_print0(LC_TMTracer
,
3784 "globalObj->dslots relocated, updating tracker\n");
3785 jsval
* src
= global_dslots
;
3786 jsval
* dst
= globalObj
->dslots
;
3787 jsuint length
= globalObj
->dslots
[-1] - JS_INITIAL_NSLOTS
;
3788 LIns
** map
= (LIns
**)alloca(sizeof(LIns
*) * length
);
3789 for (jsuint n
= 0; n
< length
; ++n
) {
3790 map
[n
] = tracker
.get(src
);
3791 tracker
.set(src
++, NULL
);
3793 for (jsuint n
= 0; n
< length
; ++n
)
3794 tracker
.set(dst
++, map
[n
]);
3795 global_dslots
= globalObj
->dslots
;
3799 /* Determine whether the current branch is a loop edge (taken or not taken). */
3800 static JS_REQUIRES_STACK
bool
3801 IsLoopEdge(jsbytecode
* pc
, jsbytecode
* header
)
3806 return ((pc
+ GET_JUMP_OFFSET(pc
)) == header
);
3809 return ((pc
+ GET_JUMPX_OFFSET(pc
)) == header
);
3811 JS_ASSERT((*pc
== JSOP_AND
) || (*pc
== JSOP_ANDX
) ||
3812 (*pc
== JSOP_OR
) || (*pc
== JSOP_ORX
));
3817 class AdjustCallerGlobalTypesVisitor
: public SlotVisitorBase
3819 TraceRecorder
&mRecorder
;
3821 nanojit::LirBuffer
*mLirbuf
;
3822 nanojit::LirWriter
*mLir
;
3823 JSTraceType
*mTypeMap
;
3825 AdjustCallerGlobalTypesVisitor(TraceRecorder
&recorder
,
3826 JSTraceType
*typeMap
) :
3827 mRecorder(recorder
),
3829 mLirbuf(mRecorder
.lirbuf
),
3830 mLir(mRecorder
.lir
),
3834 JSTraceType
* getTypeMap()
3839 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
3840 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
3841 LIns
*ins
= mRecorder
.get(vp
);
3842 bool isPromote
= isPromoteInt(ins
);
3843 if (isPromote
&& *mTypeMap
== TT_DOUBLE
) {
3844 mLir
->insStorei(mRecorder
.get(vp
), mRecorder
.eos_ins
,
3845 mRecorder
.nativeGlobalOffset(vp
));
3848 * Aggressively undo speculation so the inner tree will compile
3851 oracle
.markGlobalSlotUndemotable(mCx
, slot
);
3853 JS_ASSERT(!(!isPromote
&& *mTypeMap
== TT_INT32
));
3858 class AdjustCallerStackTypesVisitor
: public SlotVisitorBase
3860 TraceRecorder
&mRecorder
;
3862 nanojit::LirBuffer
*mLirbuf
;
3863 nanojit::LirWriter
*mLir
;
3865 JSTraceType
*mTypeMap
;
3867 AdjustCallerStackTypesVisitor(TraceRecorder
&recorder
,
3868 JSTraceType
*typeMap
) :
3869 mRecorder(recorder
),
3871 mLirbuf(mRecorder
.lirbuf
),
3872 mLir(mRecorder
.lir
),
3877 JSTraceType
* getTypeMap()
3882 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3883 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3884 for (size_t i
= 0; i
< count
; ++i
) {
3885 LIns
*ins
= mRecorder
.get(vp
);
3886 bool isPromote
= isPromoteInt(ins
);
3887 if (isPromote
&& *mTypeMap
== TT_DOUBLE
) {
3888 mLir
->insStorei(mRecorder
.get(vp
), mLirbuf
->sp
,
3889 mRecorder
.nativespOffset(vp
));
3892 * Aggressively undo speculation so the inner tree will compile
3895 oracle
.markStackSlotUndemotable(mCx
, mSlotnum
);
3897 JS_ASSERT(!(!isPromote
&& *mTypeMap
== TT_INT32
));
3907 * Promote slots if necessary to match the called tree's type map. This
3908 * function is infallible and must only be called if we are certain that it is
3909 * possible to reconcile the types for each slot in the inner and outer trees.
3911 JS_REQUIRES_STACK
void
3912 TraceRecorder::adjustCallerTypes(TreeFragment
* f
)
3914 TreeInfo
* ti
= f
->treeInfo
;
3916 AdjustCallerGlobalTypesVisitor
globalVisitor(*this, ti
->globalTypeMap());
3917 VisitGlobalSlots(globalVisitor
, cx
, *treeInfo
->globalSlots
);
3919 AdjustCallerStackTypesVisitor
stackVisitor(*this, ti
->stackTypeMap());
3920 VisitStackSlots(stackVisitor
, cx
, 0);
3922 JS_ASSERT(f
== f
->root
);
3925 JS_REQUIRES_STACK JSTraceType
3926 TraceRecorder::determineSlotType(jsval
* vp
)
3930 if (isNumber(*vp
)) {
3931 m
= isPromoteInt(i
) ? TT_INT32
: TT_DOUBLE
;
3932 } else if (JSVAL_IS_OBJECT(*vp
)) {
3933 if (JSVAL_IS_NULL(*vp
))
3935 else if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp
)))
3940 JS_ASSERT(JSVAL_TAG(*vp
) == JSVAL_STRING
|| JSVAL_IS_SPECIAL(*vp
));
3941 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_STRING
) == JSVAL_STRING
);
3942 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_PSEUDOBOOLEAN
) == JSVAL_SPECIAL
);
3943 m
= JSTraceType(JSVAL_TAG(*vp
));
3945 JS_ASSERT(m
!= TT_INT32
|| isInt32(*vp
));
3949 class DetermineTypesVisitor
: public SlotVisitorBase
3951 TraceRecorder
&mRecorder
;
3952 JSTraceType
*mTypeMap
;
3954 DetermineTypesVisitor(TraceRecorder
&recorder
,
3955 JSTraceType
*typeMap
) :
3956 mRecorder(recorder
),
3960 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
3961 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
3962 *mTypeMap
++ = mRecorder
.determineSlotType(vp
);
3965 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3966 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3967 for (size_t i
= 0; i
< count
; ++i
)
3968 *mTypeMap
++ = mRecorder
.determineSlotType(vp
++);
3972 JSTraceType
* getTypeMap()
3978 #if defined JS_JIT_SPEW
3979 JS_REQUIRES_STACK
static void
3980 TreevisLogExit(JSContext
* cx
, VMSideExit
* exit
)
3982 debug_only_printf(LC_TMTreeVis
, "TREEVIS ADDEXIT EXIT=%p TYPE=%s FRAG=%p PC=%p FILE=\"%s\""
3983 " LINE=%d OFFS=%d", (void*)exit
, getExitName(exit
->exitType
),
3984 (void*)exit
->from
, (void*)cx
->fp
->regs
->pc
, cx
->fp
->script
->filename
,
3985 js_FramePCToLineNumber(cx
, cx
->fp
), FramePCOffset(cx
->fp
));
3986 debug_only_print0(LC_TMTreeVis
, " STACK=\"");
3987 for (unsigned i
= 0; i
< exit
->numStackSlots
; i
++)
3988 debug_only_printf(LC_TMTreeVis
, "%c", typeChar
[exit
->stackTypeMap()[i
]]);
3989 debug_only_print0(LC_TMTreeVis
, "\" GLOBALS=\"");
3990 for (unsigned i
= 0; i
< exit
->numGlobalSlots
; i
++)
3991 debug_only_printf(LC_TMTreeVis
, "%c", typeChar
[exit
->globalTypeMap()[i
]]);
3992 debug_only_print0(LC_TMTreeVis
, "\"\n");
3996 JS_REQUIRES_STACK VMSideExit
*
3997 TraceRecorder::snapshot(ExitType exitType
)
3999 JSStackFrame
* fp
= cx
->fp
;
4000 JSFrameRegs
* regs
= fp
->regs
;
4001 jsbytecode
* pc
= regs
->pc
;
4004 * Check for a return-value opcode that needs to restart at the next
4007 const JSCodeSpec
& cs
= js_CodeSpec
[*pc
];
4010 * When calling a _FAIL native, make the snapshot's pc point to the next
4011 * instruction after the CALL or APPLY. Even on failure, a _FAIL native
4012 * must not be called again from the interpreter.
4014 bool resumeAfter
= (pendingSpecializedNative
&&
4015 JSTN_ERRTYPE(pendingSpecializedNative
) == FAIL_STATUS
);
4017 JS_ASSERT(*pc
== JSOP_CALL
|| *pc
== JSOP_APPLY
|| *pc
== JSOP_NEW
||
4018 *pc
== JSOP_SETPROP
|| *pc
== JSOP_SETNAME
);
4021 MUST_FLOW_THROUGH("restore_pc");
4025 * Generate the entry map for the (possibly advanced) pc and stash it in
4028 unsigned stackSlots
= NativeStackSlots(cx
, callDepth
);
4031 * It's sufficient to track the native stack use here since all stores
4032 * above the stack watermark defined by guards are killed.
4034 trackNativeStackUse(stackSlots
+ 1);
4036 /* Capture the type map into a temporary location. */
4037 unsigned ngslots
= treeInfo
->globalSlots
->length();
4038 unsigned typemap_size
= (stackSlots
+ ngslots
) * sizeof(JSTraceType
);
4040 /* Use the recorder-local temporary type map. */
4041 JSTraceType
* typemap
= NULL
;
4042 if (tempTypeMap
.resize(typemap_size
))
4043 typemap
= tempTypeMap
.begin(); /* crash if resize() fails. */
4046 * Determine the type of a store by looking at the current type of the
4047 * actual value the interpreter is using. For numbers we have to check what
4048 * kind of store we used last (integer or double) to figure out what the
4049 * side exit show reflect in its typemap.
4051 DetermineTypesVisitor
detVisitor(*this, typemap
);
4052 VisitSlots(detVisitor
, cx
, callDepth
, ngslots
,
4053 treeInfo
->globalSlots
->data());
4054 JS_ASSERT(unsigned(detVisitor
.getTypeMap() - typemap
) ==
4055 ngslots
+ stackSlots
);
4058 * If this snapshot is for a side exit that leaves a boxed jsval result on
4059 * the stack, make a note of this in the typemap. Examples include the
4060 * builtinStatus guard after calling a _FAIL builtin, a JSFastNative, or
4061 * GetPropertyByName; and the type guard in unbox_jsval after such a call
4062 * (also at the beginning of a trace branched from such a type guard).
4064 if (pendingUnboxSlot
||
4065 (pendingSpecializedNative
&& (pendingSpecializedNative
->flags
& JSTN_UNBOX_AFTER
))) {
4066 unsigned pos
= stackSlots
- 1;
4067 if (pendingUnboxSlot
== cx
->fp
->regs
->sp
- 2)
4068 pos
= stackSlots
- 2;
4069 typemap
[pos
] = TT_JSVAL
;
4072 /* Now restore the the original pc (after which early returns are ok). */
4074 MUST_FLOW_LABEL(restore_pc
);
4075 regs
->pc
= pc
- cs
.length
;
4078 * If we take a snapshot on a goto, advance to the target address. This
4079 * avoids inner trees returning on a break goto, which the outer
4080 * recorder then would confuse with a break in the outer tree.
4082 if (*pc
== JSOP_GOTO
)
4083 pc
+= GET_JUMP_OFFSET(pc
);
4084 else if (*pc
== JSOP_GOTOX
)
4085 pc
+= GET_JUMPX_OFFSET(pc
);
4089 * Check if we already have a matching side exit; if so we can return that
4090 * side exit instead of creating a new one.
4092 VMSideExit
** exits
= treeInfo
->sideExits
.data();
4093 unsigned nexits
= treeInfo
->sideExits
.length();
4094 if (exitType
== LOOP_EXIT
) {
4095 for (unsigned n
= 0; n
< nexits
; ++n
) {
4096 VMSideExit
* e
= exits
[n
];
4097 if (e
->pc
== pc
&& e
->imacpc
== fp
->imacpc
&&
4098 ngslots
== e
->numGlobalSlots
&&
4099 !memcmp(exits
[n
]->fullTypeMap(), typemap
, typemap_size
)) {
4100 AUDIT(mergedLoopExits
);
4101 #if defined JS_JIT_SPEW
4102 TreevisLogExit(cx
, e
);
4109 /* We couldn't find a matching side exit, so create a new one. */
4110 VMSideExit
* exit
= (VMSideExit
*)
4111 traceAlloc().alloc(sizeof(VMSideExit
) + (stackSlots
+ ngslots
) * sizeof(JSTraceType
));
4113 /* Setup side exit structure. */
4114 exit
->from
= fragment
;
4115 exit
->calldepth
= callDepth
;
4116 exit
->numGlobalSlots
= ngslots
;
4117 exit
->numStackSlots
= stackSlots
;
4118 exit
->numStackSlotsBelowCurrentFrame
= cx
->fp
->argv
?
4119 nativeStackOffset(&cx
->fp
->argv
[-2]) / sizeof(double) :
4121 exit
->exitType
= exitType
;
4122 exit
->block
= fp
->blockChain
;
4124 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(fp
->blockChain
));
4126 exit
->imacpc
= fp
->imacpc
;
4127 exit
->sp_adj
= (stackSlots
* sizeof(double)) - treeInfo
->nativeStackBase
;
4128 exit
->rp_adj
= exit
->calldepth
* sizeof(FrameInfo
*);
4129 exit
->nativeCalleeWord
= 0;
4130 exit
->lookupFlags
= js_InferFlags(cx
, 0);
4131 memcpy(exit
->fullTypeMap(), typemap
, typemap_size
);
4133 #if defined JS_JIT_SPEW
4134 TreevisLogExit(cx
, exit
);
4139 JS_REQUIRES_STACK GuardRecord
*
4140 TraceRecorder::createGuardRecord(VMSideExit
* exit
)
4142 GuardRecord
* gr
= new (traceAlloc()) GuardRecord();
4147 // gr->profCount is calloc'd to zero
4149 gr
->profGuardID
= fragment
->guardNumberer
++;
4150 gr
->nextInFrag
= fragment
->guardsForFrag
;
4151 fragment
->guardsForFrag
= gr
;
4158 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4159 * (expected) and using the supplied side exit if the conditon doesn't hold.
4161 JS_REQUIRES_STACK
void
4162 TraceRecorder::guard(bool expected
, LIns
* cond
, VMSideExit
* exit
)
4164 debug_only_printf(LC_TMRecorder
,
4165 " About to try emitting guard code for "
4166 "SideExit=%p exitType=%s\n",
4167 (void*)exit
, getExitName(exit
->exitType
));
4169 GuardRecord
* guardRec
= createGuardRecord(exit
);
4171 if (exit
->exitType
== LOOP_EXIT
)
4172 treeInfo
->sideExits
.add(exit
);
4174 if (!cond
->isCond()) {
4175 expected
= !expected
;
4176 cond
= cond
->isQuad() ? lir
->ins_peq0(cond
) : lir
->ins_eq0(cond
);
4180 lir
->insGuard(expected
? LIR_xf
: LIR_xt
, cond
, guardRec
);
4182 debug_only_print0(LC_TMRecorder
,
4183 " redundant guard, eliminated, no codegen\n");
4187 JS_REQUIRES_STACK VMSideExit
*
4188 TraceRecorder::copy(VMSideExit
* copy
)
4190 size_t typemap_size
= copy
->numGlobalSlots
+ copy
->numStackSlots
;
4191 VMSideExit
* exit
= (VMSideExit
*)
4192 traceAlloc().alloc(sizeof(VMSideExit
) + typemap_size
* sizeof(JSTraceType
));
4194 /* Copy side exit structure. */
4195 memcpy(exit
, copy
, sizeof(VMSideExit
) + typemap_size
* sizeof(JSTraceType
));
4196 exit
->guards
= NULL
;
4197 exit
->from
= fragment
;
4198 exit
->target
= NULL
;
4200 if (exit
->exitType
== LOOP_EXIT
)
4201 treeInfo
->sideExits
.add(exit
);
4202 #if defined JS_JIT_SPEW
4203 TreevisLogExit(cx
, exit
);
4209 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4210 * (expected) and generate a side exit with type exitType to jump to if the
4211 * condition does not hold.
4213 JS_REQUIRES_STACK
void
4214 TraceRecorder::guard(bool expected
, LIns
* cond
, ExitType exitType
)
4216 guard(expected
, cond
, snapshot(exitType
));
4220 * Determine whether any context associated with the same thread as cx is
4221 * executing native code.
4224 ProhibitFlush(JSContext
* cx
)
4226 if (cx
->interpState
) // early out if the given is in native code
4231 #ifdef JS_THREADSAFE
4232 JSThread
* thread
= cx
->thread
;
4233 for (cl
= thread
->contextList
.next
; cl
!= &thread
->contextList
; cl
= cl
->next
)
4234 if (CX_FROM_THREAD_LINKS(cl
)->interpState
)
4237 JSRuntime
* rt
= cx
->runtime
;
4238 for (cl
= rt
->contextList
.next
; cl
!= &rt
->contextList
; cl
= cl
->next
)
4239 if (js_ContextFromLinkField(cl
)->interpState
)
4245 static JS_REQUIRES_STACK
void
4246 ResetJITImpl(JSContext
* cx
)
4248 if (!TRACING_ENABLED(cx
))
4250 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
4251 debug_only_print0(LC_TMTracer
, "Flushing cache.\n");
4253 js_AbortRecording(cx
, "flush cache");
4254 if (ProhibitFlush(cx
)) {
4255 debug_only_print0(LC_TMTracer
, "Deferring JIT flush due to deep bail.\n");
4256 tm
->needFlush
= JS_TRUE
;
4262 JS_REQUIRES_STACK
void
4263 js_ResetJIT(JSContext
* cx
)
4265 ResetJIT(cx
, FR_OOM
);
4268 /* Compile the current fragment. */
4269 JS_REQUIRES_STACK AbortableRecordingStatus
4270 TraceRecorder::compile()
4273 TraceVisStateObj
tvso(cx
, S_COMPILE
);
4276 if (traceMonitor
->needFlush
) {
4277 ResetJIT(cx
, FR_DEEP_BAIL
);
4278 return ARECORD_ABORTED
;
4280 if (treeInfo
->maxNativeStackSlots
>= MAX_NATIVE_STACK_SLOTS
) {
4281 debug_only_print0(LC_TMTracer
, "Blacklist: excessive stack use.\n");
4282 Blacklist((jsbytecode
*) fragment
->root
->ip
);
4283 return ARECORD_STOP
;
4285 if (anchor
&& anchor
->exitType
!= CASE_EXIT
)
4286 ++fragment
->root
->branchCount
;
4288 return ARECORD_STOP
;
4290 /* :TODO: windows support */
4291 #if defined DEBUG && !defined WIN32
4292 /* Associate a filename and line number with the fragment. */
4293 const char* filename
= cx
->fp
->script
->filename
;
4294 char* label
= (char*)js_malloc((filename
? strlen(filename
) : 7) + 16);
4295 sprintf(label
, "%s:%u", filename
? filename
: "<stdin>",
4296 js_FramePCToLineNumber(cx
, cx
->fp
));
4297 traceMonitor
->labels
->add(fragment
, sizeof(Fragment
), 0, label
);
4301 Assembler
*assm
= traceMonitor
->assembler
;
4302 JS_ASSERT(assm
->error() == nanojit::None
);
4303 nanojit::compile(assm
, fragment
, tempAlloc() verbose_only(, traceMonitor
->labels
));
4305 if (assm
->error() != nanojit::None
) {
4306 assm
->setError(nanojit::None
);
4307 debug_only_print0(LC_TMTracer
, "Blacklisted: error during compilation\n");
4308 Blacklist((jsbytecode
*) fragment
->root
->ip
);
4309 return ARECORD_STOP
;
4313 return ARECORD_STOP
;
4314 ResetRecordingAttempts(cx
, (jsbytecode
*) fragment
->ip
);
4315 ResetRecordingAttempts(cx
, (jsbytecode
*) fragment
->root
->ip
);
4318 if (anchor
->exitType
== CASE_EXIT
)
4319 assm
->patch(anchor
, anchor
->switchInfo
);
4322 assm
->patch(anchor
);
4324 JS_ASSERT(fragment
->code());
4325 JS_ASSERT_IF(fragment
== fragment
->root
, !fragment
->root
->treeInfo
);
4326 if (fragment
== fragment
->root
)
4327 fragment
->root
->treeInfo
= treeInfo
;
4329 return ARECORD_CONTINUE
;
4333 JoinPeers(Assembler
* assm
, VMSideExit
* exit
, TreeFragment
* target
)
4335 exit
->target
= target
;
4338 debug_only_printf(LC_TMTreeVis
, "TREEVIS JOIN ANCHOR=%p FRAG=%p\n", (void*)exit
, (void*)target
);
4340 if (exit
->root() == target
)
4343 target
->treeInfo
->dependentTrees
.addUnique(exit
->root());
4344 exit
->root()->treeInfo
->linkedTrees
.addUnique(target
);
4347 /* Results of trying to connect an arbitrary type A with arbitrary type B */
4348 enum TypeCheckResult
4350 TypeCheck_Okay
, /* Okay: same type */
4351 TypeCheck_Promote
, /* Okay: Type A needs f2i() */
4352 TypeCheck_Demote
, /* Okay: Type A needs i2f() */
4353 TypeCheck_Undemote
, /* Bad: Slot is undemotable */
4354 TypeCheck_Bad
/* Bad: incompatible types */
4357 class SlotMap
: public SlotVisitorBase
4363 : vp(NULL
), promoteInt(false), lastCheck(TypeCheck_Bad
)
4365 SlotInfo(jsval
* vp
, bool promoteInt
)
4366 : vp(vp
), promoteInt(promoteInt
), lastCheck(TypeCheck_Bad
), type(getCoercedType(*vp
))
4368 SlotInfo(jsval
* vp
, JSTraceType t
)
4369 : vp(vp
), promoteInt(t
== TT_INT32
), lastCheck(TypeCheck_Bad
), type(t
)
4373 TypeCheckResult lastCheck
;
4377 SlotMap(TraceRecorder
& rec
)
4384 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4385 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
)
4390 JS_ALWAYS_INLINE
SlotMap::SlotInfo
&
4391 operator [](unsigned i
)
4396 JS_ALWAYS_INLINE
SlotMap::SlotInfo
&
4402 JS_ALWAYS_INLINE
unsigned
4405 return slots
.length();
4409 * Possible return states:
4411 * TypeConsensus_Okay: All types are compatible. Caller must go through slot list and handle
4413 * TypeConsensus_Bad: Types are not compatible. Individual type check results are undefined.
4414 * TypeConsensus_Undemotes: Types would be compatible if slots were marked as undemotable
4415 * before recording began. Caller can go through slot list and mark
4416 * such slots as undemotable.
4418 JS_REQUIRES_STACK TypeConsensus
4419 checkTypes(TreeInfo
* ti
)
4421 if (length() != ti
->typeMap
.length())
4422 return TypeConsensus_Bad
;
4424 bool has_undemotes
= false;
4425 for (unsigned i
= 0; i
< length(); i
++) {
4426 TypeCheckResult result
= checkType(i
, ti
->typeMap
[i
]);
4427 if (result
== TypeCheck_Bad
)
4428 return TypeConsensus_Bad
;
4429 if (result
== TypeCheck_Undemote
)
4430 has_undemotes
= true;
4431 slots
[i
].lastCheck
= result
;
4434 return TypeConsensus_Undemotes
;
4435 return TypeConsensus_Okay
;
4438 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4441 slots
.add(SlotInfo(vp
, isPromoteInt(mRecorder
.get(vp
))));
4444 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4445 addSlot(JSTraceType t
)
4447 slots
.add(SlotInfo(NULL
, t
));
4450 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4451 addSlot(jsval
*vp
, JSTraceType t
)
4453 slots
.add(SlotInfo(vp
, t
));
4456 JS_REQUIRES_STACK
void
4459 for (unsigned i
= 0; i
< length(); i
++) {
4460 if (get(i
).lastCheck
== TypeCheck_Undemote
)
4461 MarkSlotUndemotable(mRecorder
.cx
, mRecorder
.treeInfo
, i
);
4465 JS_REQUIRES_STACK
virtual void
4466 adjustTail(TypeConsensus consensus
)
4470 JS_REQUIRES_STACK
virtual void
4473 for (unsigned i
= 0; i
< length(); i
++)
4478 JS_REQUIRES_STACK
virtual void
4479 adjustType(SlotInfo
& info
) {
4480 JS_ASSERT(info
.lastCheck
!= TypeCheck_Undemote
&& info
.lastCheck
!= TypeCheck_Bad
);
4481 if (info
.lastCheck
== TypeCheck_Promote
) {
4482 JS_ASSERT(info
.type
== TT_INT32
|| info
.type
== TT_DOUBLE
);
4483 mRecorder
.set(info
.vp
, mRecorder
.f2i(mRecorder
.get(info
.vp
)));
4484 } else if (info
.lastCheck
== TypeCheck_Demote
) {
4485 JS_ASSERT(info
.type
== TT_INT32
|| info
.type
== TT_DOUBLE
);
4486 JS_ASSERT(mRecorder
.get(info
.vp
)->isQuad());
4488 /* Never demote this final i2f. */
4489 mRecorder
.set(info
.vp
, mRecorder
.get(info
.vp
), false, false);
4495 checkType(unsigned i
, JSTraceType t
)
4497 debug_only_printf(LC_TMTracer
,
4498 "checkType slot %d: interp=%c typemap=%c isNum=%d promoteInt=%d\n",
4500 typeChar
[slots
[i
].type
],
4502 slots
[i
].type
== TT_INT32
|| slots
[i
].type
== TT_DOUBLE
,
4503 slots
[i
].promoteInt
);
4506 if (slots
[i
].type
!= TT_INT32
&& slots
[i
].type
!= TT_DOUBLE
)
4507 return TypeCheck_Bad
; /* Not a number? Type mismatch. */
4508 /* This is always a type mismatch, we can't close a double to an int. */
4509 if (!slots
[i
].promoteInt
)
4510 return TypeCheck_Undemote
;
4511 /* Looks good, slot is an int32, the last instruction should be promotable. */
4512 JS_ASSERT_IF(slots
[i
].vp
, isInt32(*slots
[i
].vp
) && slots
[i
].promoteInt
);
4513 return slots
[i
].vp
? TypeCheck_Promote
: TypeCheck_Okay
;
4515 if (slots
[i
].type
!= TT_INT32
&& slots
[i
].type
!= TT_DOUBLE
)
4516 return TypeCheck_Bad
; /* Not a number? Type mismatch. */
4517 if (slots
[i
].promoteInt
)
4518 return slots
[i
].vp
? TypeCheck_Demote
: TypeCheck_Bad
;
4519 return TypeCheck_Okay
;
4521 return slots
[i
].type
== t
? TypeCheck_Okay
: TypeCheck_Bad
;
4523 JS_NOT_REACHED("shouldn't fall through type check switch");
4526 TraceRecorder
& mRecorder
;
4528 Queue
<SlotInfo
> slots
;
4531 class DefaultSlotMap
: public SlotMap
4534 DefaultSlotMap(TraceRecorder
& tr
) : SlotMap(tr
)
4538 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
4539 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
)
4541 for (size_t i
= 0; i
< count
; i
++)
4547 JS_REQUIRES_STACK TypeConsensus
4548 TraceRecorder::selfTypeStability(SlotMap
& slotMap
)
4550 debug_only_printf(LC_TMTracer
, "Checking type stability against self=%p\n", (void*)fragment
);
4551 TypeConsensus consensus
= slotMap
.checkTypes(treeInfo
);
4553 /* Best case: loop jumps back to its own header */
4554 if (consensus
== TypeConsensus_Okay
)
4555 return TypeConsensus_Okay
;
4557 /* If the only thing keeping this loop from being stable is undemotions, then mark relevant
4558 * slots as undemotable.
4560 if (consensus
== TypeConsensus_Undemotes
)
4561 slotMap
.markUndemotes();
4566 JS_REQUIRES_STACK TypeConsensus
4567 TraceRecorder::peerTypeStability(SlotMap
& slotMap
, const void* ip
, TreeFragment
** pPeer
)
4569 /* See if there are any peers that would make this stable */
4570 TreeFragment
* root
= fragment
->root
;
4571 TreeFragment
* peer
= LookupLoop(traceMonitor
, ip
, root
->globalObj
, root
->globalShape
, root
->argc
);
4573 /* This condition is possible with recursion */
4574 JS_ASSERT_IF(!peer
, fragment
->root
->ip
!= ip
);
4576 return TypeConsensus_Bad
;
4577 bool onlyUndemotes
= false;
4578 for (; peer
!= NULL
; peer
= peer
->peer
) {
4579 if (!peer
->treeInfo
|| peer
== fragment
)
4581 debug_only_printf(LC_TMTracer
, "Checking type stability against peer=%p\n", (void*)peer
);
4582 TypeConsensus consensus
= slotMap
.checkTypes(peer
->treeInfo
);
4583 if (consensus
== TypeConsensus_Okay
) {
4585 /* Return this even though there will be linkage; the trace itself is not stable.
4586 * Caller should inspect ppeer to check for a compatible peer.
4588 return TypeConsensus_Okay
;
4590 if (consensus
== TypeConsensus_Undemotes
)
4591 onlyUndemotes
= true;
4594 return onlyUndemotes
? TypeConsensus_Undemotes
: TypeConsensus_Bad
;
4597 JS_REQUIRES_STACK AbortableRecordingStatus
4598 TraceRecorder::closeLoop()
4600 return closeLoop(snapshot(UNSTABLE_LOOP_EXIT
));
4603 JS_REQUIRES_STACK AbortableRecordingStatus
4604 TraceRecorder::closeLoop(VMSideExit
* exit
)
4606 DefaultSlotMap
slotMap(*this);
4607 VisitSlots(slotMap
, cx
, 0, *treeInfo
->globalSlots
);
4608 return closeLoop(slotMap
, exit
);
4612 * Complete and compile a trace and link it to the existing tree if
4613 * appropriate. Returns ARECORD_ABORTED or ARECORD_STOP, depending on whether
4614 * the recorder was deleted. Outparam is always set.
4616 JS_REQUIRES_STACK AbortableRecordingStatus
4617 TraceRecorder::closeLoop(SlotMap
& slotMap
, VMSideExit
* exit
)
4620 * We should have arrived back at the loop header, and hence we don't want
4621 * to be in an imacro here and the opcode should be either JSOP_TRACE or, in
4622 * case this loop was blacklisted in the meantime, JSOP_NOP.
4624 JS_ASSERT((*cx
->fp
->regs
->pc
== JSOP_TRACE
|| *cx
->fp
->regs
->pc
== JSOP_NOP
||
4625 *cx
->fp
->regs
->pc
== JSOP_RETURN
) && !cx
->fp
->imacpc
);
4627 if (callDepth
!= 0) {
4628 debug_only_print0(LC_TMTracer
,
4629 "Blacklisted: stack depth mismatch, possible recursion.\n");
4630 Blacklist((jsbytecode
*) fragment
->root
->ip
);
4632 return ARECORD_STOP
;
4635 JS_ASSERT_IF(exit
->exitType
== UNSTABLE_LOOP_EXIT
,
4636 exit
->numStackSlots
== treeInfo
->nStackTypes
);
4637 JS_ASSERT_IF(exit
->exitType
!= UNSTABLE_LOOP_EXIT
, exit
->exitType
== RECURSIVE_UNLINKED_EXIT
);
4638 JS_ASSERT_IF(exit
->exitType
== RECURSIVE_UNLINKED_EXIT
,
4639 exit
->recursive_pc
!= fragment
->root
->ip
);
4641 TreeFragment
* peer
= NULL
;
4642 TreeFragment
* root
= fragment
->root
;
4644 TypeConsensus consensus
= TypeConsensus_Bad
;
4646 if (exit
->exitType
== UNSTABLE_LOOP_EXIT
)
4647 consensus
= selfTypeStability(slotMap
);
4648 if (consensus
!= TypeConsensus_Okay
) {
4649 const void* ip
= exit
->exitType
== RECURSIVE_UNLINKED_EXIT
?
4650 exit
->recursive_pc
: fragment
->root
->ip
;
4651 TypeConsensus peerConsensus
= peerTypeStability(slotMap
, ip
, &peer
);
4652 /* If there was a semblance of a stable peer (even if not linkable), keep the result. */
4653 if (peerConsensus
!= TypeConsensus_Bad
)
4654 consensus
= peerConsensus
;
4658 if (consensus
!= TypeConsensus_Okay
|| peer
)
4659 AUDIT(unstableLoopVariable
);
4662 JS_ASSERT(!trashSelf
);
4665 * This exit is indeed linkable to something now. Process any promote or
4666 * demotes that are pending in the slot map.
4668 if (consensus
== TypeConsensus_Okay
)
4669 slotMap
.adjustTypes();
4671 /* Give up-recursion a chance to pop the stack frame. */
4672 slotMap
.adjustTail(consensus
);
4674 if (consensus
!= TypeConsensus_Okay
|| peer
) {
4675 fragment
->lastIns
= lir
->insGuard(LIR_x
, NULL
, createGuardRecord(exit
));
4677 /* If there is a peer, there must have been an "Okay" consensus. */
4678 JS_ASSERT_IF(peer
, consensus
== TypeConsensus_Okay
);
4680 /* Compile as a type-unstable loop, and hope for a connection later. */
4683 * If such a fragment does not exist, let's compile the loop ahead
4684 * of time anyway. Later, if the loop becomes type stable, we will
4685 * connect these two fragments together.
4687 debug_only_print0(LC_TMTracer
,
4688 "Trace has unstable loop variable with no stable peer, "
4689 "compiling anyway.\n");
4690 UnstableExit
* uexit
= new (traceAlloc()) UnstableExit
;
4691 uexit
->fragment
= fragment
;
4693 uexit
->next
= treeInfo
->unstableExits
;
4694 treeInfo
->unstableExits
= uexit
;
4696 JS_ASSERT(peer
->code());
4697 exit
->target
= peer
;
4698 debug_only_printf(LC_TMTracer
,
4699 "Joining type-unstable trace to target fragment %p.\n",
4701 peer
->treeInfo
->dependentTrees
.addUnique(fragment
->root
);
4702 treeInfo
->linkedTrees
.addUnique(peer
);
4705 exit
->exitType
= LOOP_EXIT
;
4706 debug_only_printf(LC_TMTreeVis
, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit
,
4707 getExitName(LOOP_EXIT
));
4709 JS_ASSERT((fragment
== fragment
->root
) == !!loopLabel
);
4711 lir
->insBranch(LIR_j
, NULL
, loopLabel
);
4712 lir
->ins1(LIR_live
, lirbuf
->state
);
4715 exit
->target
= fragment
->root
;
4716 fragment
->lastIns
= lir
->insGuard(LIR_x
, NULL
, createGuardRecord(exit
));
4719 CHECK_STATUS_A(compile());
4721 debug_only_printf(LC_TMTreeVis
, "TREEVIS CLOSELOOP EXIT=%p PEER=%p\n", (void*)exit
, (void*)peer
);
4723 peer
= LookupLoop(traceMonitor
, root
->ip
, root
->globalObj
, root
->globalShape
, root
->argc
);
4725 joinEdgesToEntry(peer
);
4727 debug_only_stmt(DumpPeerStability(traceMonitor
, peer
->ip
, peer
->globalObj
,
4728 peer
->globalShape
, peer
->argc
);)
4730 debug_only_print0(LC_TMTracer
,
4731 "updating specializations on dependent and linked trees\n");
4732 if (fragment
->root
->treeInfo
)
4733 SpecializeTreesToMissingGlobals(cx
, globalObj
, fragment
->root
->treeInfo
);
4736 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
4737 * should try to compile the outer tree again.
4740 AttemptCompilation(cx
, traceMonitor
, globalObj
, outer
, outerArgc
);
4742 debug_only_printf(LC_TMMinimal
,
4743 "Recording completed at %s:%u@%u via closeLoop (FragID=%06u)\n",
4744 cx
->fp
->script
->filename
,
4745 js_FramePCToLineNumber(cx
, cx
->fp
),
4746 FramePCOffset(cx
->fp
),
4747 fragment
->profFragID
);
4748 debug_only_print0(LC_TMMinimal
, "\n");
4751 return finishSuccessfully();
4755 FullMapFromExit(TypeMap
& typeMap
, VMSideExit
* exit
)
4757 typeMap
.setLength(0);
4758 typeMap
.fromRaw(exit
->stackTypeMap(), exit
->numStackSlots
);
4759 typeMap
.fromRaw(exit
->globalTypeMap(), exit
->numGlobalSlots
);
4760 /* Include globals that were later specialized at the root of the tree. */
4761 if (exit
->numGlobalSlots
< exit
->root()->treeInfo
->nGlobalTypes()) {
4762 typeMap
.fromRaw(exit
->root()->treeInfo
->globalTypeMap() + exit
->numGlobalSlots
,
4763 exit
->root()->treeInfo
->nGlobalTypes() - exit
->numGlobalSlots
);
4767 static JS_REQUIRES_STACK TypeConsensus
4768 TypeMapLinkability(JSContext
* cx
, const TypeMap
& typeMap
, TreeFragment
* peer
)
4770 const TypeMap
& peerMap
= peer
->treeInfo
->typeMap
;
4771 unsigned minSlots
= JS_MIN(typeMap
.length(), peerMap
.length());
4772 TypeConsensus consensus
= TypeConsensus_Okay
;
4773 for (unsigned i
= 0; i
< minSlots
; i
++) {
4774 if (typeMap
[i
] == peerMap
[i
])
4776 if (typeMap
[i
] == TT_INT32
&& peerMap
[i
] == TT_DOUBLE
&&
4777 IsSlotUndemotable(cx
, peer
->treeInfo
, i
, peer
->ip
)) {
4778 consensus
= TypeConsensus_Undemotes
;
4780 return TypeConsensus_Bad
;
4786 static JS_REQUIRES_STACK
unsigned
4787 FindUndemotesInTypemaps(JSContext
* cx
, const TypeMap
& typeMap
, TreeInfo
* treeInfo
,
4788 Queue
<unsigned>& undemotes
)
4790 undemotes
.setLength(0);
4791 unsigned minSlots
= JS_MIN(typeMap
.length(), treeInfo
->typeMap
.length());
4792 for (unsigned i
= 0; i
< minSlots
; i
++) {
4793 if (typeMap
[i
] == TT_INT32
&& treeInfo
->typeMap
[i
] == TT_DOUBLE
) {
4795 } else if (typeMap
[i
] != treeInfo
->typeMap
[i
]) {
4799 for (unsigned i
= 0; i
< undemotes
.length(); i
++)
4800 MarkSlotUndemotable(cx
, treeInfo
, undemotes
[i
]);
4801 return undemotes
.length();
4804 JS_REQUIRES_STACK
void
4805 TraceRecorder::joinEdgesToEntry(TreeFragment
* peer_root
)
4807 if (fragment
->root
!= fragment
)
4810 TypeMap
typeMap(NULL
);
4811 Queue
<unsigned> undemotes(NULL
);
4813 for (TreeFragment
* peer
= peer_root
; peer
; peer
= peer
->peer
) {
4814 TreeInfo
* ti
= peer
->treeInfo
;
4817 UnstableExit
* uexit
= ti
->unstableExits
;
4818 while (uexit
!= NULL
) {
4819 /* :TODO: these exits go somewhere else. */
4820 if (uexit
->exit
->exitType
== RECURSIVE_UNLINKED_EXIT
) {
4821 uexit
= uexit
->next
;
4824 /* Build the full typemap for this unstable exit */
4825 FullMapFromExit(typeMap
, uexit
->exit
);
4826 /* Check its compatibility against this tree */
4827 TypeConsensus consensus
= TypeMapLinkability(cx
, typeMap
, fragment
->root
);
4828 JS_ASSERT_IF(consensus
== TypeConsensus_Okay
, peer
!= fragment
);
4829 if (consensus
== TypeConsensus_Okay
) {
4830 debug_only_printf(LC_TMTracer
,
4831 "Joining type-stable trace to target exit %p->%p.\n",
4832 (void*)uexit
->fragment
, (void*)uexit
->exit
);
4833 /* It's okay! Link together and remove the unstable exit. */
4834 JoinPeers(traceMonitor
->assembler
, uexit
->exit
, (TreeFragment
*)fragment
);
4835 uexit
= ti
->removeUnstableExit(uexit
->exit
);
4837 /* Check for int32->double slots that suggest trashing. */
4838 if (FindUndemotesInTypemaps(cx
, typeMap
, treeInfo
, undemotes
)) {
4839 JS_ASSERT(peer
== uexit
->fragment
->root
);
4840 if (fragment
== peer
)
4843 whichTreesToTrash
.addUnique(uexit
->fragment
->root
);
4846 uexit
= uexit
->next
;
4852 JS_REQUIRES_STACK AbortableRecordingStatus
4853 TraceRecorder::endLoop()
4855 return endLoop(snapshot(LOOP_EXIT
));
4858 /* Emit an always-exit guard and compile the tree (used for break statements. */
4859 JS_REQUIRES_STACK AbortableRecordingStatus
4860 TraceRecorder::endLoop(VMSideExit
* exit
)
4862 if (callDepth
!= 0) {
4863 debug_only_print0(LC_TMTracer
, "Blacklisted: stack depth mismatch, possible recursion.\n");
4864 Blacklist((jsbytecode
*) fragment
->root
->ip
);
4866 return ARECORD_STOP
;
4869 if (recordReason
!= Record_Branch
)
4870 RETURN_STOP_A("control flow should have been recursive");
4873 lir
->insGuard(LIR_x
, NULL
, createGuardRecord(exit
));
4875 CHECK_STATUS_A(compile());
4877 debug_only_printf(LC_TMTreeVis
, "TREEVIS ENDLOOP EXIT=%p\n", (void*)exit
);
4879 TreeFragment
* root
= fragment
->root
;
4880 joinEdgesToEntry(LookupLoop(traceMonitor
, root
->ip
, root
->globalObj
,
4881 root
->globalShape
, root
->argc
));
4882 debug_only_stmt(DumpPeerStability(traceMonitor
, root
->ip
, root
->globalObj
,
4883 root
->globalShape
, root
->argc
);)
4886 * Note: this must always be done, in case we added new globals on trace
4887 * and haven't yet propagated those to linked and dependent trees.
4889 debug_only_print0(LC_TMTracer
,
4890 "updating specializations on dependent and linked trees\n");
4891 if (fragment
->root
->treeInfo
)
4892 SpecializeTreesToMissingGlobals(cx
, globalObj
, fragment
->root
->treeInfo
);
4895 * If this is a newly formed tree, and the outer tree has not been compiled
4896 * yet, we should try to compile the outer tree again.
4899 AttemptCompilation(cx
, traceMonitor
, globalObj
, outer
, outerArgc
);
4901 debug_only_printf(LC_TMMinimal
,
4902 "Recording completed at %s:%u@%u via endLoop (FragID=%06u)\n",
4903 cx
->fp
->script
->filename
,
4904 js_FramePCToLineNumber(cx
, cx
->fp
),
4905 FramePCOffset(cx
->fp
),
4906 fragment
->profFragID
);
4907 debug_only_print0(LC_TMTracer
, "\n");
4910 return finishSuccessfully();
4913 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
4914 JS_REQUIRES_STACK
void
4915 TraceRecorder::prepareTreeCall(TreeFragment
* inner
, LIns
*& inner_sp_ins
)
4917 TreeInfo
* ti
= inner
->treeInfo
;
4918 inner_sp_ins
= lirbuf
->sp
;
4919 VMSideExit
* exit
= snapshot(OOM_EXIT
);
4922 * The inner tree expects to be called from the current frame. If the outer
4923 * tree (this trace) is currently inside a function inlining code
4924 * (calldepth > 0), we have to advance the native stack pointer such that
4925 * we match what the inner trace expects to see. We move it back when we
4926 * come out of the inner tree call.
4928 if (callDepth
> 0) {
4930 * Calculate the amount we have to lift the native stack pointer by to
4931 * compensate for any outer frames that the inner tree doesn't expect
4932 * but the outer tree has.
4934 ptrdiff_t sp_adj
= nativeStackOffset(&cx
->fp
->argv
[-2]);
4936 /* Calculate the amount we have to lift the call stack by. */
4937 ptrdiff_t rp_adj
= callDepth
* sizeof(FrameInfo
*);
4940 * Guard that we have enough stack space for the tree we are trying to
4941 * call on top of the new value for sp.
4943 debug_only_printf(LC_TMTracer
,
4944 "sp_adj=%lld outer=%lld inner=%lld\n",
4945 (long long int)sp_adj
,
4946 (long long int)treeInfo
->nativeStackBase
,
4947 (long long int)ti
->nativeStackBase
);
4948 ptrdiff_t sp_offset
=
4949 - treeInfo
->nativeStackBase
/* rebase sp to beginning of outer tree's stack */
4950 + sp_adj
/* adjust for stack in outer frame inner tree can't see */
4951 + ti
->maxNativeStackSlots
* sizeof(double); /* plus the inner tree's stack */
4952 LIns
* sp_top
= lir
->ins2(LIR_piadd
, lirbuf
->sp
, INS_CONSTWORD(sp_offset
));
4953 guard(true, lir
->ins2(LIR_plt
, sp_top
, eos_ins
), exit
);
4955 /* Guard that we have enough call stack space. */
4956 ptrdiff_t rp_offset
= rp_adj
+ ti
->maxCallDepth
* sizeof(FrameInfo
*);
4957 LIns
* rp_top
= lir
->ins2(LIR_piadd
, lirbuf
->rp
, INS_CONSTWORD(rp_offset
));
4958 guard(true, lir
->ins2(LIR_plt
, rp_top
, eor_ins
), exit
);
4961 - treeInfo
->nativeStackBase
/* rebase sp to beginning of outer tree's stack */
4962 + sp_adj
/* adjust for stack in outer frame inner tree can't see */
4963 + ti
->nativeStackBase
; /* plus the inner tree's stack base */
4964 /* We have enough space, so adjust sp and rp to their new level. */
4965 lir
->insStorei(inner_sp_ins
= lir
->ins2(LIR_piadd
, lirbuf
->sp
, INS_CONSTWORD(sp_offset
)),
4966 lirbuf
->state
, offsetof(InterpState
, sp
));
4967 lir
->insStorei(lir
->ins2(LIR_piadd
, lirbuf
->rp
, INS_CONSTWORD(rp_adj
)),
4968 lirbuf
->state
, offsetof(InterpState
, rp
));
4972 * The inner tree will probably access stack slots. So tell nanojit not to
4973 * discard or defer stack writes before emitting the call tree code.
4975 * (The ExitType of this snapshot is nugatory. The exit can't be taken.)
4977 GuardRecord
* guardRec
= createGuardRecord(exit
);
4978 lir
->insGuard(LIR_xbarrier
, NULL
, guardRec
);
4982 BuildGlobalTypeMapFromInnerTree(Queue
<JSTraceType
>& typeMap
, VMSideExit
* inner
)
4985 unsigned initialSlots
= typeMap
.length();
4987 /* First, use the innermost exit's global typemap. */
4988 typeMap
.add(inner
->globalTypeMap(), inner
->numGlobalSlots
);
4990 /* Add missing global types from the innermost exit's tree. */
4991 TreeInfo
* innerTree
= inner
->root()->treeInfo
;
4992 unsigned slots
= inner
->numGlobalSlots
;
4993 if (slots
< innerTree
->nGlobalTypes()) {
4994 typeMap
.add(innerTree
->globalTypeMap() + slots
, innerTree
->nGlobalTypes() - slots
);
4995 slots
= innerTree
->nGlobalTypes();
4997 JS_ASSERT(typeMap
.length() - initialSlots
== slots
);
5001 /* Record a call to an inner tree. */
5002 JS_REQUIRES_STACK
void
5003 TraceRecorder::emitTreeCall(TreeFragment
* inner
, VMSideExit
* exit
, LIns
* inner_sp_ins
)
5005 /* Invoke the inner tree. */
5006 LIns
* args
[] = { lirbuf
->state
}; /* reverse order */
5007 /* Construct a call info structure for the target tree. */
5008 CallInfo
* ci
= new (traceAlloc()) CallInfo();
5009 ci
->_address
= uintptr_t(inner
->code());
5010 JS_ASSERT(ci
->_address
);
5011 ci
->_argtypes
= ARGSIZE_P
| ARGSIZE_P
<< ARGSIZE_SHIFT
;
5012 ci
->_cse
= ci
->_fold
= 0;
5013 ci
->_abi
= ABI_FASTCALL
;
5015 ci
->_name
= "fragment";
5017 LIns
* rec
= lir
->insCall(ci
, args
);
5018 LIns
* lr
= lir
->insLoad(LIR_ldp
, rec
, offsetof(GuardRecord
, exit
));
5019 LIns
* nested
= lir
->insBranch(LIR_jt
,
5021 lir
->insLoad(LIR_ld
, lr
, offsetof(VMSideExit
, exitType
)),
5026 * If the tree exits on a regular (non-nested) guard, keep updating lastTreeExitGuard
5027 * with that guard. If we mismatch on a tree call guard, this will contain the last
5028 * non-nested guard we encountered, which is the innermost loop or branch guard.
5030 lir
->insStorei(lr
, lirbuf
->state
, offsetof(InterpState
, lastTreeExitGuard
));
5031 LIns
* done1
= lir
->insBranch(LIR_j
, NULL
, NULL
);
5034 * The tree exited on a nested guard. This only occurs once a tree call guard mismatches
5035 * and we unwind the tree call stack. We store the first (innermost) tree call guard in state
5036 * and we will try to grow the outer tree the failing call was in starting at that guard.
5038 nested
->setTarget(lir
->ins0(LIR_label
));
5039 LIns
* done2
= lir
->insBranch(LIR_jf
,
5040 lir
->ins_peq0(lir
->insLoad(LIR_ldp
,
5042 offsetof(InterpState
, lastTreeCallGuard
))),
5044 lir
->insStorei(lr
, lirbuf
->state
, offsetof(InterpState
, lastTreeCallGuard
));
5045 lir
->insStorei(lir
->ins2(LIR_piadd
,
5046 lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, rp
)),
5047 lir
->ins_i2p(lir
->ins2i(LIR_lsh
,
5048 lir
->insLoad(LIR_ld
, lr
, offsetof(VMSideExit
, calldepth
)),
5049 sizeof(void*) == 4 ? 2 : 3))),
5051 offsetof(InterpState
, rpAtLastTreeCall
));
5052 LIns
* label
= lir
->ins0(LIR_label
);
5053 done1
->setTarget(label
);
5054 done2
->setTarget(label
);
5057 * Keep updating outermostTreeExit so that InterpState always contains the most recent
5060 lir
->insStorei(lr
, lirbuf
->state
, offsetof(InterpState
, outermostTreeExitGuard
));
5062 /* Read back all registers, in case the called tree changed any of them. */
5066 map
= exit
->globalTypeMap();
5067 for (i
= 0; i
< exit
->numGlobalSlots
; i
++)
5068 JS_ASSERT(map
[i
] != TT_JSVAL
);
5069 map
= exit
->stackTypeMap();
5070 for (i
= 0; i
< exit
->numStackSlots
; i
++)
5071 JS_ASSERT(map
[i
] != TT_JSVAL
);
5075 * Bug 502604 - It is illegal to extend from the outer typemap without
5076 * first extending from the inner. Make a new typemap here.
5078 TypeMap
fullMap(NULL
);
5079 fullMap
.add(exit
->stackTypeMap(), exit
->numStackSlots
);
5080 BuildGlobalTypeMapFromInnerTree(fullMap
, exit
);
5082 TreeInfo
* ti
= inner
->treeInfo
;
5083 import(ti
, inner_sp_ins
, exit
->numStackSlots
, fullMap
.length() - exit
->numStackSlots
,
5084 exit
->calldepth
, fullMap
.data());
5086 /* Restore sp and rp to their original values (we still have them in a register). */
5087 if (callDepth
> 0) {
5088 lir
->insStorei(lirbuf
->sp
, lirbuf
->state
, offsetof(InterpState
, sp
));
5089 lir
->insStorei(lirbuf
->rp
, lirbuf
->state
, offsetof(InterpState
, rp
));
5093 * Guard that we come out of the inner tree along the same side exit we came out when
5094 * we called the inner tree at recording time.
5096 VMSideExit
* nestedExit
= snapshot(NESTED_EXIT
);
5097 JS_ASSERT(exit
->exitType
== LOOP_EXIT
);
5098 guard(true, lir
->ins2(LIR_peq
, lr
, INS_CONSTPTR(exit
)), nestedExit
);
5099 debug_only_printf(LC_TMTreeVis
, "TREEVIS TREECALL INNER=%p EXIT=%p GUARD=%p\n", (void*)inner
,
5100 (void*)nestedExit
, (void*)exit
);
5102 /* Register us as a dependent tree of the inner tree. */
5103 inner
->treeInfo
->dependentTrees
.addUnique(fragment
->root
);
5104 treeInfo
->linkedTrees
.addUnique(inner
);
5107 /* Add a if/if-else control-flow merge point to the list of known merge points. */
5108 JS_REQUIRES_STACK
void
5109 TraceRecorder::trackCfgMerges(jsbytecode
* pc
)
5111 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
5112 JS_ASSERT((*pc
== JSOP_IFEQ
) || (*pc
== JSOP_IFEQX
));
5113 jssrcnote
* sn
= js_GetSrcNote(cx
->fp
->script
, pc
);
5115 if (SN_TYPE(sn
) == SRC_IF
) {
5116 cfgMerges
.add((*pc
== JSOP_IFEQ
)
5117 ? pc
+ GET_JUMP_OFFSET(pc
)
5118 : pc
+ GET_JUMPX_OFFSET(pc
));
5119 } else if (SN_TYPE(sn
) == SRC_IF_ELSE
)
5120 cfgMerges
.add(pc
+ js_GetSrcNoteOffset(sn
, 0));
5125 * Invert the direction of the guard if this is a loop edge that is not
5126 * taken (thin loop).
5128 JS_REQUIRES_STACK
void
5129 TraceRecorder::emitIf(jsbytecode
* pc
, bool cond
, LIns
* x
)
5132 if (IsLoopEdge(pc
, (jsbytecode
*)fragment
->root
->ip
)) {
5133 exitType
= LOOP_EXIT
;
5136 * If we are about to walk out of the loop, generate code for the
5137 * inverse loop condition, pretending we recorded the case that stays
5140 if ((*pc
== JSOP_IFEQ
|| *pc
== JSOP_IFEQX
) == cond
) {
5141 JS_ASSERT(*pc
== JSOP_IFNE
|| *pc
== JSOP_IFNEX
|| *pc
== JSOP_IFEQ
|| *pc
== JSOP_IFEQX
);
5142 debug_only_print0(LC_TMTracer
,
5143 "Walking out of the loop, terminating it anyway.\n");
5148 * Conditional guards do not have to be emitted if the condition is
5149 * constant. We make a note whether the loop condition is true or false
5150 * here, so we later know whether to emit a loop edge or a loop end.
5153 pendingLoop
= (x
->imm32() == int32(cond
));
5157 exitType
= BRANCH_EXIT
;
5160 guard(cond
, x
, exitType
);
5163 /* Emit code for a fused IFEQ/IFNE. */
5164 JS_REQUIRES_STACK
void
5165 TraceRecorder::fuseIf(jsbytecode
* pc
, bool cond
, LIns
* x
)
5167 if (*pc
== JSOP_IFEQ
|| *pc
== JSOP_IFNE
) {
5168 emitIf(pc
, cond
, x
);
5169 if (*pc
== JSOP_IFEQ
)
5174 /* Check whether we have reached the end of the trace. */
5175 JS_REQUIRES_STACK AbortableRecordingStatus
5176 TraceRecorder::checkTraceEnd(jsbytecode
*pc
)
5178 if (IsLoopEdge(pc
, (jsbytecode
*)fragment
->root
->ip
)) {
5180 * If we compile a loop, the trace should have a zero stack balance at
5181 * the loop edge. Currently we are parked on a comparison op or
5182 * IFNE/IFEQ, so advance pc to the loop header and adjust the stack
5183 * pointer and pretend we have reached the loop header.
5186 JS_ASSERT(!cx
->fp
->imacpc
&& (pc
== cx
->fp
->regs
->pc
|| pc
== cx
->fp
->regs
->pc
+ 1));
5187 bool fused
= pc
!= cx
->fp
->regs
->pc
;
5188 JSFrameRegs orig
= *cx
->fp
->regs
;
5190 cx
->fp
->regs
->pc
= (jsbytecode
*)fragment
->root
->ip
;
5191 cx
->fp
->regs
->sp
-= fused
? 2 : 1;
5193 JSContext
* localcx
= cx
;
5194 AbortableRecordingStatus ars
= closeLoop();
5195 *localcx
->fp
->regs
= orig
;
5201 return ARECORD_CONTINUE
;
5205 TraceRecorder::hasMethod(JSObject
* obj
, jsid id
)
5212 int protoIndex
= obj
->lookupProperty(cx
, id
, &pobj
, &prop
);
5213 if (protoIndex
< 0 || !prop
)
5217 if (OBJ_IS_NATIVE(pobj
)) {
5218 JSScope
* scope
= OBJ_SCOPE(pobj
);
5219 JSScopeProperty
* sprop
= (JSScopeProperty
*) prop
;
5221 if (SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop
) &&
5222 SPROP_HAS_VALID_SLOT(sprop
, scope
)) {
5223 jsval v
= LOCKED_OBJ_GET_SLOT(pobj
, sprop
->slot
);
5224 if (VALUE_IS_FUNCTION(cx
, v
)) {
5226 if (!scope
->branded()) {
5227 scope
->brandingShapeChange(cx
, sprop
->slot
, v
);
5228 scope
->setBranded();
5234 pobj
->dropProperty(cx
, prop
);
5238 JS_REQUIRES_STACK
bool
5239 TraceRecorder::hasIteratorMethod(JSObject
* obj
)
5241 JS_ASSERT(cx
->fp
->regs
->sp
+ 2 <= cx
->fp
->slots
+ cx
->fp
->script
->nslots
);
5243 return hasMethod(obj
, ATOM_TO_JSID(cx
->runtime
->atomState
.iteratorAtom
));
5247 nanojit::StackFilter::getTops(LIns
* guard
, int& spTop
, int& rpTop
)
5249 VMSideExit
* e
= (VMSideExit
*)guard
->record()->exit
;
5254 #if defined NJ_VERBOSE
5256 nanojit::LirNameMap::formatGuard(LIns
*i
, char *out
)
5260 x
= (VMSideExit
*)i
->record()->exit
;
5262 "%s: %s %s -> pc=%p imacpc=%p sp%+ld rp%+ld (GuardID=%03d)",
5264 lirNames
[i
->opcode()],
5265 i
->oprnd1() ? formatRef(i
->oprnd1()) : "",
5268 (long int)x
->sp_adj
,
5269 (long int)x
->rp_adj
,
5270 i
->record()->profGuardID
);
5275 * Check whether the shape of the global object has changed. The return value
5276 * indicates whether the recorder is still active. If 'false', any active
5277 * recording has been aborted and the JIT may have been reset.
5279 static JS_REQUIRES_STACK
bool
5280 CheckGlobalObjectShape(JSContext
* cx
, JSTraceMonitor
* tm
, JSObject
* globalObj
,
5281 uint32
*shape
= NULL
, SlotList
** slots
= NULL
)
5283 if (tm
->needFlush
) {
5284 ResetJIT(cx
, FR_DEEP_BAIL
);
5288 if (STOBJ_NSLOTS(globalObj
) > MAX_GLOBAL_SLOTS
) {
5290 js_AbortRecording(cx
, "too many slots in global object");
5294 uint32 globalShape
= OBJ_SHAPE(globalObj
);
5297 TreeFragment
* root
= tm
->recorder
->getFragment()->root
;
5298 TreeInfo
* ti
= tm
->recorder
->getTreeInfo();
5300 /* Check the global shape matches the recorder's treeinfo's shape. */
5301 if (globalObj
!= root
->globalObj
|| globalShape
!= root
->globalShape
) {
5302 AUDIT(globalShapeMismatchAtEntry
);
5303 debug_only_printf(LC_TMTracer
,
5304 "Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
5305 (void*)globalObj
, globalShape
, (void*)root
->globalObj
,
5307 Backoff(cx
, (jsbytecode
*) root
->ip
);
5308 ResetJIT(cx
, FR_GLOBAL_SHAPE_MISMATCH
);
5312 *shape
= globalShape
;
5314 *slots
= ti
->globalSlots
;
5318 /* No recorder, search for a tracked global-state (or allocate one). */
5319 for (size_t i
= 0; i
< MONITOR_N_GLOBAL_STATES
; ++i
) {
5320 GlobalState
&state
= tm
->globalStates
[i
];
5322 if (state
.globalShape
== uint32(-1)) {
5323 state
.globalObj
= globalObj
;
5324 state
.globalShape
= globalShape
;
5325 JS_ASSERT(state
.globalSlots
);
5326 JS_ASSERT(state
.globalSlots
->length() == 0);
5329 if (state
.globalObj
== globalObj
&& state
.globalShape
== globalShape
) {
5331 *shape
= globalShape
;
5333 *slots
= state
.globalSlots
;
5338 /* No currently-tracked-global found and no room to allocate, abort. */
5339 AUDIT(globalShapeMismatchAtEntry
);
5340 debug_only_printf(LC_TMTracer
,
5341 "No global slotlist for global shape %u, flushing cache.\n",
5343 ResetJIT(cx
, FR_GLOBALS_FULL
);
5348 * Return whether or not the recorder could be started. If 'false', the JIT has
5349 * been reset in response to an OOM.
5351 bool JS_REQUIRES_STACK
5352 TraceRecorder::startRecorder(JSContext
* cx
, VMSideExit
* anchor
, VMFragment
* f
,
5353 TreeInfo
* ti
, unsigned stackSlots
, unsigned ngslots
,
5354 JSTraceType
* typeMap
, VMSideExit
* expectedInnerExit
,
5355 jsbytecode
* outer
, uint32 outerArgc
, RecordReason recordReason
)
5357 JSTraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
5358 JS_ASSERT(!tm
->needFlush
);
5359 JS_ASSERT_IF(cx
->fp
->imacpc
, f
->root
!= f
);
5361 tm
->recorder
= new TraceRecorder(cx
, anchor
, f
, ti
, stackSlots
,
5362 ngslots
, typeMap
, expectedInnerExit
,
5363 outer
, outerArgc
, recordReason
);
5365 if (!tm
->recorder
|| tm
->outOfMemory() || js_OverfullJITCache(tm
)) {
5366 ResetJIT(cx
, FR_OOM
);
5371 * If slurping failed, there's no reason to start recording again. Emit LIR
5372 * to capture the rest of the slots, then immediately compile and finish.
5374 if (anchor
&& anchor
->exitType
== RECURSIVE_SLURP_FAIL_EXIT
) {
5375 tm
->recorder
->slurpDownFrames((jsbytecode
*)anchor
->recursive_pc
- JSOP_CALL_LENGTH
);
5377 tm
->recorder
->finishAbort("Failed to slurp down frames");
5385 TrashTree(JSContext
* cx
, TreeFragment
* f
)
5387 JS_ASSERT((!f
->code()) == (!f
->treeInfo
));
5388 JS_ASSERT(f
== f
->root
);
5389 debug_only_printf(LC_TMTreeVis
, "TREEVIS TRASH FRAG=%p\n", (void*)f
);
5393 AUDIT(treesTrashed
);
5394 debug_only_print0(LC_TMTracer
, "Trashing tree info.\n");
5395 TreeInfo
* ti
= f
->treeInfo
;
5398 TreeFragment
** data
= ti
->dependentTrees
.data();
5399 unsigned length
= ti
->dependentTrees
.length();
5400 for (unsigned n
= 0; n
< length
; ++n
)
5401 TrashTree(cx
, data
[n
]);
5402 data
= ti
->linkedTrees
.data();
5403 length
= ti
->linkedTrees
.length();
5404 for (unsigned n
= 0; n
< length
; ++n
)
5405 TrashTree(cx
, data
[n
]);
5409 SynthesizeFrame(JSContext
* cx
, const FrameInfo
& fi
, JSObject
* callee
)
5411 VOUCH_DOES_NOT_REQUIRE_STACK();
5413 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, callee
);
5414 JS_ASSERT(FUN_INTERPRETED(fun
));
5416 /* Assert that we have a correct sp distance from cx->fp->slots in fi. */
5417 JSStackFrame
* fp
= cx
->fp
;
5418 JS_ASSERT_IF(!fi
.imacpc
,
5419 js_ReconstructStackDepth(cx
, fp
->script
, fi
.pc
) ==
5420 uintN(fi
.spdist
- fp
->script
->nfixed
));
5422 uintN nframeslots
= JS_HOWMANY(sizeof(JSInlineFrame
), sizeof(jsval
));
5423 JSScript
* script
= fun
->u
.i
.script
;
5424 size_t nbytes
= (nframeslots
+ script
->nslots
) * sizeof(jsval
);
5426 /* Code duplicated from inline_call: case in js_Interpret (FIXME). */
5427 JSArena
* a
= cx
->stackPool
.current
;
5428 void* newmark
= (void*) a
->avail
;
5429 uintN argc
= fi
.get_argc();
5430 jsval
* vp
= fp
->slots
+ fi
.spdist
- (2 + argc
);
5434 if (fun
->nargs
> argc
) {
5435 const JSFrameRegs
& regs
= *fp
->regs
;
5437 newsp
= vp
+ 2 + fun
->nargs
;
5438 JS_ASSERT(newsp
> regs
.sp
);
5439 if ((jsuword
) newsp
<= a
->limit
) {
5440 if ((jsuword
) newsp
> a
->avail
)
5441 a
->avail
= (jsuword
) newsp
;
5442 jsval
* argsp
= newsp
;
5444 *--argsp
= JSVAL_VOID
;
5445 } while (argsp
!= regs
.sp
);
5448 missing
= fun
->nargs
- argc
;
5449 nbytes
+= (2 + fun
->nargs
) * sizeof(jsval
);
5453 /* Allocate the inline frame with its vars and operands. */
5454 if (a
->avail
+ nbytes
<= a
->limit
) {
5455 newsp
= (jsval
*) a
->avail
;
5457 JS_ASSERT(missing
== 0);
5459 JS_ARENA_ALLOCATE_CAST(newsp
, jsval
*, &cx
->stackPool
, nbytes
);
5464 * Move args if the missing ones overflow arena a, then push
5465 * undefined for the missing args.
5468 memcpy(newsp
, vp
, (2 + argc
) * sizeof(jsval
));
5470 newsp
= vp
+ 2 + argc
;
5472 *newsp
++ = JSVAL_VOID
;
5473 } while (--missing
!= 0);
5477 /* Claim space for the stack frame and initialize it. */
5478 JSInlineFrame
* newifp
= (JSInlineFrame
*) newsp
;
5479 newsp
+= nframeslots
;
5481 newifp
->frame
.callobj
= NULL
;
5482 newifp
->frame
.argsobj
= NULL
;
5483 newifp
->frame
.varobj
= NULL
;
5484 newifp
->frame
.script
= script
;
5485 newifp
->frame
.fun
= fun
;
5487 bool constructing
= fi
.is_constructing();
5488 newifp
->frame
.argc
= argc
;
5489 newifp
->callerRegs
.pc
= fi
.pc
;
5490 newifp
->callerRegs
.sp
= fp
->slots
+ fi
.spdist
;
5491 fp
->imacpc
= fi
.imacpc
;
5494 if (fi
.block
!= fp
->blockChain
) {
5495 for (JSObject
* obj
= fi
.block
; obj
!= fp
->blockChain
; obj
= STOBJ_GET_PARENT(obj
))
5499 fp
->blockChain
= fi
.block
;
5501 newifp
->frame
.argv
= newifp
->callerRegs
.sp
- argc
;
5502 JS_ASSERT(newifp
->frame
.argv
);
5504 // Initialize argv[-1] to a known-bogus value so we'll catch it if
5505 // someone forgets to initialize it later.
5506 newifp
->frame
.argv
[-1] = JSVAL_HOLE
;
5508 JS_ASSERT(newifp
->frame
.argv
>= StackBase(fp
) + 2);
5510 newifp
->frame
.rval
= JSVAL_VOID
;
5511 newifp
->frame
.down
= fp
;
5512 newifp
->frame
.annotation
= NULL
;
5513 newifp
->frame
.scopeChain
= NULL
; // will be updated in FlushNativeStackFrame
5514 newifp
->frame
.flags
= constructing
? JSFRAME_CONSTRUCTING
: 0;
5515 newifp
->frame
.dormantNext
= NULL
;
5516 newifp
->frame
.blockChain
= NULL
;
5517 newifp
->mark
= newmark
;
5518 newifp
->frame
.thisv
= JSVAL_NULL
; // will be updated in FlushNativeStackFrame
5520 newifp
->frame
.regs
= fp
->regs
;
5521 newifp
->frame
.regs
->pc
= script
->code
;
5522 newifp
->frame
.regs
->sp
= newsp
+ script
->nfixed
;
5523 newifp
->frame
.imacpc
= NULL
;
5524 newifp
->frame
.slots
= newsp
;
5525 if (script
->staticLevel
< JS_DISPLAY_SIZE
) {
5526 JSStackFrame
**disp
= &cx
->display
[script
->staticLevel
];
5527 newifp
->frame
.displaySave
= *disp
;
5528 *disp
= &newifp
->frame
;
5532 * Note that fp->script is still the caller's script; set the callee
5533 * inline frame's idea of caller version from its version.
5535 newifp
->callerVersion
= (JSVersion
) fp
->script
->version
;
5537 // After this paragraph, fp and cx->fp point to the newly synthesized frame.
5538 fp
->regs
= &newifp
->callerRegs
;
5539 fp
= cx
->fp
= &newifp
->frame
;
5542 * If there's a call hook, invoke it to compute the hookData used by
5543 * debuggers that cooperate with the interpreter.
5545 JSInterpreterHook hook
= cx
->debugHooks
->callHook
;
5547 newifp
->hookData
= hook(cx
, fp
, JS_TRUE
, 0, cx
->debugHooks
->callHookData
);
5549 newifp
->hookData
= NULL
;
5553 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
5555 * FIXME - We must count stack slots from caller's operand stack up to (but
5556 * not including) callee's, including missing arguments. Could we shift
5557 * everything down to the caller's fp->slots (where vars start) and avoid
5558 * some of the complexity?
5560 return (fi
.spdist
- fp
->down
->script
->nfixed
) +
5561 ((fun
->nargs
> fp
->argc
) ? fun
->nargs
- fp
->argc
: 0) +
5562 script
->nfixed
+ 1/*argsobj*/;
5566 SynthesizeSlowNativeFrame(InterpState
& state
, JSContext
*cx
, VMSideExit
*exit
)
5568 VOUCH_DOES_NOT_REQUIRE_STACK();
5573 /* This allocation is infallible: ExecuteTree reserved enough stack. */
5574 mark
= JS_ARENA_MARK(&cx
->stackPool
);
5575 JS_ARENA_ALLOCATE_CAST(ifp
, JSInlineFrame
*, &cx
->stackPool
, sizeof(JSInlineFrame
));
5579 JSStackFrame
*fp
= &ifp
->frame
;
5585 fp
->varobj
= cx
->fp
->varobj
;
5587 fp
->thisv
= state
.nativeVp
[1];
5588 fp
->argc
= state
.nativeVpLen
- 2;
5589 fp
->argv
= state
.nativeVp
+ 2;
5590 fp
->fun
= GET_FUNCTION_PRIVATE(cx
, fp
->calleeObject());
5591 fp
->rval
= JSVAL_VOID
;
5593 fp
->annotation
= NULL
;
5594 JS_ASSERT(cx
->fp
->scopeChain
);
5595 fp
->scopeChain
= cx
->fp
->scopeChain
;
5596 fp
->blockChain
= NULL
;
5597 fp
->flags
= exit
->constructing() ? JSFRAME_CONSTRUCTING
: 0;
5598 fp
->dormantNext
= NULL
;
5599 fp
->displaySave
= NULL
;
5606 * Create a TreeInfo in preparation for starting a recorder. If one cannot be
5607 * allocated, reset the JIT and return NULL.
5609 static JS_REQUIRES_STACK TreeInfo
*
5610 CreateTreeInfo(JSContext
* cx
, TreeFragment
* f
, JSObject
* globalObj
, SlotList
* globalSlots
)
5612 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5614 /* Set up the VM-private treeInfo structure for this fragment. */
5615 TreeInfo
* ti
= new (*tm
->traceAlloc
) TreeInfo(tm
->dataAlloc
, f
, globalSlots
);
5617 /* Capture the coerced type of each active slot in the type map. */
5618 ti
->typeMap
.captureTypes(cx
, globalObj
, *globalSlots
, 0 /* callDepth */);
5619 ti
->nStackTypes
= ti
->typeMap
.length() - globalSlots
->length();
5622 AssertTreeIsUnique(tm
, f
, ti
);
5623 ti
->treeFileName
= cx
->fp
->script
->filename
;
5624 ti
->treeLineNumber
= js_FramePCToLineNumber(cx
, cx
->fp
);
5625 ti
->treePCOffset
= FramePCOffset(cx
->fp
);
5628 debug_only_printf(LC_TMTreeVis
, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d",
5629 (void*)f
, f
->ip
, ti
->treeFileName
, ti
->treeLineNumber
,
5630 FramePCOffset(cx
->fp
));
5631 debug_only_print0(LC_TMTreeVis
, " STACK=\"");
5632 for (unsigned i
= 0; i
< ti
->nStackTypes
; i
++)
5633 debug_only_printf(LC_TMTreeVis
, "%c", typeChar
[ti
->typeMap
[i
]]);
5634 debug_only_print0(LC_TMTreeVis
, "\" GLOBALS=\"");
5635 for (unsigned i
= 0; i
< ti
->nGlobalTypes(); i
++)
5636 debug_only_printf(LC_TMTreeVis
, "%c", typeChar
[ti
->typeMap
[ti
->nStackTypes
+ i
]]);
5637 debug_only_print0(LC_TMTreeVis
, "\"\n");
5640 /* Determine the native frame layout at the entry point. */
5641 unsigned entryNativeStackSlots
= ti
->nStackTypes
;
5642 JS_ASSERT(entryNativeStackSlots
== NativeStackSlots(cx
, 0 /* callDepth */));
5643 ti
->nativeStackBase
= (entryNativeStackSlots
-
5644 (cx
->fp
->regs
->sp
- StackBase(cx
->fp
))) * sizeof(double);
5645 ti
->maxNativeStackSlots
= entryNativeStackSlots
;
5646 ti
->maxCallDepth
= 0;
5647 ti
->script
= cx
->fp
->script
;
5652 static JS_REQUIRES_STACK
bool
5653 RecordTree(JSContext
* cx
, JSTraceMonitor
* tm
, TreeFragment
* peer
, jsbytecode
* outer
,
5654 uint32 outerArgc
, JSObject
* globalObj
, uint32 globalShape
,
5655 SlotList
* globalSlots
, uint32 argc
, RecordReason reason
)
5657 /* Try to find an unused peer fragment, or allocate a new one. */
5658 TreeFragment
* f
= peer
;
5659 while (f
->code() && f
->peer
)
5662 f
= AddNewPeerToPeerList(tm
, f
);
5663 JS_ASSERT(f
->root
== f
);
5665 /* save a local copy for use after JIT flush */
5666 const void* localRootIP
= f
->root
->ip
;
5668 /* Make sure the global type map didn't change on us. */
5669 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
)) {
5670 Backoff(cx
, (jsbytecode
*) localRootIP
);
5674 AUDIT(recorderStarted
);
5676 if (tm
->outOfMemory() || js_OverfullJITCache(tm
)) {
5677 Backoff(cx
, (jsbytecode
*) f
->root
->ip
);
5678 ResetJIT(cx
, FR_OOM
);
5679 debug_only_print0(LC_TMTracer
,
5680 "Out of memory recording new tree, flushing cache.\n");
5684 JS_ASSERT(!f
->code() && !f
->treeInfo
);
5686 TreeInfo
* ti
= CreateTreeInfo(cx
, f
, globalObj
, globalSlots
);
5690 /* Recording primary trace. */
5691 return TraceRecorder::startRecorder(cx
, NULL
, f
, ti
,
5693 ti
->globalSlots
->length(),
5694 ti
->typeMap
.data(), NULL
,
5695 outer
, outerArgc
, reason
);
5698 static JS_REQUIRES_STACK TypeConsensus
5699 FindLoopEdgeTarget(JSContext
* cx
, VMSideExit
* exit
, TreeFragment
** peerp
)
5701 TreeFragment
* from
= exit
->root();
5702 TreeInfo
* from_ti
= from
->treeInfo
;
5704 JS_ASSERT(from
->code());
5706 TypeMap
typeMap(NULL
);
5707 FullMapFromExit(typeMap
, exit
);
5708 JS_ASSERT(typeMap
.length() - exit
->numStackSlots
== from_ti
->nGlobalTypes());
5710 /* Mark all double slots as undemotable */
5711 uint16
* gslots
= from_ti
->globalSlots
->data();
5712 for (unsigned i
= 0; i
< typeMap
.length(); i
++) {
5713 if (typeMap
[i
] == TT_DOUBLE
) {
5714 if (exit
->exitType
== RECURSIVE_UNLINKED_EXIT
) {
5715 if (i
< exit
->numStackSlots
)
5716 oracle
.markStackSlotUndemotable(cx
, i
, exit
->recursive_pc
);
5718 oracle
.markGlobalSlotUndemotable(cx
, gslots
[i
- exit
->numStackSlots
]);
5720 if (i
< from_ti
->nStackTypes
)
5721 oracle
.markStackSlotUndemotable(cx
, i
, from
->ip
);
5722 else if (i
>= exit
->numStackSlots
)
5723 oracle
.markGlobalSlotUndemotable(cx
, gslots
[i
- exit
->numStackSlots
]);
5727 JS_ASSERT(exit
->exitType
== UNSTABLE_LOOP_EXIT
||
5728 (exit
->exitType
== RECURSIVE_UNLINKED_EXIT
&& exit
->recursive_pc
));
5730 TreeFragment
* firstPeer
= NULL
;
5731 if (exit
->exitType
== UNSTABLE_LOOP_EXIT
|| exit
->recursive_pc
== from
->ip
) {
5732 firstPeer
= from
->first
;
5734 firstPeer
= LookupLoop(&JS_TRACE_MONITOR(cx
), exit
->recursive_pc
, from
->globalObj
,
5735 from
->globalShape
, from
->argc
);
5738 for (TreeFragment
* peer
= firstPeer
; peer
; peer
= peer
->peer
) {
5739 TreeInfo
* peer_ti
= peer
->treeInfo
;
5742 JS_ASSERT(peer
->argc
== from
->argc
);
5743 JS_ASSERT(exit
->numStackSlots
== peer_ti
->nStackTypes
);
5744 TypeConsensus consensus
= TypeMapLinkability(cx
, typeMap
, peer
);
5745 if (consensus
== TypeConsensus_Okay
|| consensus
== TypeConsensus_Undemotes
) {
5751 return TypeConsensus_Bad
;
5755 TreeInfo::removeUnstableExit(VMSideExit
* exit
)
5757 /* Now erase this exit from the unstable exit list. */
5758 UnstableExit
** tail
= &this->unstableExits
;
5759 for (UnstableExit
* uexit
= this->unstableExits
; uexit
!= NULL
; uexit
= uexit
->next
) {
5760 if (uexit
->exit
== exit
) {
5761 *tail
= uexit
->next
;
5764 tail
= &uexit
->next
;
5766 JS_NOT_REACHED("exit not in unstable exit list");
5770 static JS_REQUIRES_STACK
bool
5771 AttemptToStabilizeTree(JSContext
* cx
, JSObject
* globalObj
, VMSideExit
* exit
, jsbytecode
* outer
,
5774 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5775 if (tm
->needFlush
) {
5776 ResetJIT(cx
, FR_DEEP_BAIL
);
5780 TreeFragment
* from
= exit
->root();
5781 TreeInfo
* from_ti
= from
->treeInfo
;
5783 TreeFragment
* peer
= NULL
;
5784 TypeConsensus consensus
= FindLoopEdgeTarget(cx
, exit
, &peer
);
5785 if (consensus
== TypeConsensus_Okay
) {
5786 TreeInfo
* peer_ti
= peer
->treeInfo
;
5787 JS_ASSERT(from_ti
->globalSlots
== peer_ti
->globalSlots
);
5788 JS_ASSERT_IF(exit
->exitType
== UNSTABLE_LOOP_EXIT
,
5789 from_ti
->nStackTypes
== peer_ti
->nStackTypes
);
5790 JS_ASSERT(exit
->numStackSlots
== peer_ti
->nStackTypes
);
5791 /* Patch this exit to its peer */
5792 JoinPeers(tm
->assembler
, exit
, peer
);
5794 * Update peer global types. The |from| fragment should already be updated because it on
5795 * the execution path, and somehow connected to the entry trace.
5797 if (peer_ti
->nGlobalTypes() < peer_ti
->globalSlots
->length())
5798 SpecializeTreesToMissingGlobals(cx
, globalObj
, peer_ti
);
5799 JS_ASSERT(from_ti
->nGlobalTypes() == from_ti
->globalSlots
->length());
5800 /* This exit is no longer unstable, so remove it. */
5801 if (exit
->exitType
== UNSTABLE_LOOP_EXIT
)
5802 from_ti
->removeUnstableExit(exit
);
5803 debug_only_stmt(DumpPeerStability(tm
, peer
->ip
, from
->globalObj
, from
->globalShape
, from
->argc
);)
5805 } else if (consensus
== TypeConsensus_Undemotes
) {
5806 /* The original tree is unconnectable, so trash it. */
5807 TrashTree(cx
, peer
);
5811 /* Don't bother recording if the exit doesn't expect this PC */
5812 if (exit
->exitType
== RECURSIVE_UNLINKED_EXIT
) {
5813 if (++exit
->hitcount
>= MAX_RECURSIVE_UNLINK_HITS
) {
5814 Blacklist((jsbytecode
*)from
->ip
);
5815 TrashTree(cx
, from
);
5818 if (exit
->recursive_pc
!= cx
->fp
->regs
->pc
)
5820 from
= LookupLoop(tm
, exit
->recursive_pc
, from
->globalObj
, from
->globalShape
, cx
->fp
->argc
);
5823 /* use stale TI for RecordTree - since from might not have one anymore. */
5826 JS_ASSERT(from
== from
->root
);
5828 /* If this tree has been blacklisted, don't try to record a new one. */
5829 if (*(jsbytecode
*)from
->ip
== JSOP_NOP
)
5832 return RecordTree(cx
, tm
, from
->first
, outer
, outerArgc
, from
->globalObj
,
5833 from
->globalShape
, from_ti
->globalSlots
, cx
->fp
->argc
,
5837 static JS_REQUIRES_STACK VMFragment
*
5838 CreateBranchFragment(JSContext
* cx
, TreeFragment
* root
, VMSideExit
* anchor
)
5840 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5843 uint32_t profFragID
= (js_LogController
.lcbits
& LC_FragProfile
)
5844 ? (++(tm
->lastFragID
)) : 0;
5847 VMFragment
* f
= new (*tm
->dataAlloc
) VMFragment(cx
->fp
->regs
->pc
verbose_only(, profFragID
));
5849 debug_only_printf(LC_TMTreeVis
, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\""
5850 " LINE=%d ANCHOR=%p OFFS=%d\n",
5851 (void*)root
, (void*)f
, (void*)cx
->fp
->regs
->pc
, cx
->fp
->script
->filename
,
5852 js_FramePCToLineNumber(cx
, cx
->fp
), (void*)anchor
,
5853 FramePCOffset(cx
->fp
));
5854 verbose_only( tm
->branches
= new (*tm
->dataAlloc
) Seq
<Fragment
*>(f
, tm
->branches
); )
5862 static JS_REQUIRES_STACK
bool
5863 AttemptToExtendTree(JSContext
* cx
, VMSideExit
* anchor
, VMSideExit
* exitedFrom
, jsbytecode
* outer
5865 , TraceVisStateObj
* tvso
= NULL
5869 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5870 JS_ASSERT(!tm
->recorder
);
5872 if (tm
->needFlush
) {
5873 ResetJIT(cx
, FR_DEEP_BAIL
);
5875 if (tvso
) tvso
->r
= R_FAIL_EXTEND_FLUSH
;
5880 TreeFragment
* f
= anchor
->root();
5881 JS_ASSERT(f
->treeInfo
);
5884 * Don't grow trees above a certain size to avoid code explosion due to
5887 if (f
->branchCount
>= MAX_BRANCHES
) {
5889 if (tvso
) tvso
->r
= R_FAIL_EXTEND_MAX_BRANCHES
;
5894 VMFragment
* c
= (VMFragment
*)anchor
->target
;
5896 c
= CreateBranchFragment(cx
, f
, anchor
);
5899 * If we are recycling a fragment, it might have a different ip so reset it
5900 * here. This can happen when attaching a branch to a NESTED_EXIT, which
5901 * might extend along separate paths (i.e. after the loop edge, and after a
5902 * return statement).
5904 c
->ip
= cx
->fp
->regs
->pc
;
5905 JS_ASSERT(c
->root
== f
);
5908 debug_only_printf(LC_TMTracer
,
5909 "trying to attach another branch to the tree (hits = %d)\n", c
->hits());
5911 int32_t& hits
= c
->hits();
5912 if (outer
|| (hits
++ >= HOTEXIT
&& hits
<= HOTEXIT
+MAXEXIT
)) {
5913 /* start tracing secondary trace from this point */
5914 unsigned stackSlots
;
5916 JSTraceType
* typeMap
;
5917 TypeMap
fullMap(NULL
);
5920 * If we are coming straight from a simple side exit, just use that
5921 * exit's type map as starting point.
5923 ngslots
= anchor
->numGlobalSlots
;
5924 stackSlots
= anchor
->numStackSlots
;
5925 typeMap
= anchor
->fullTypeMap();
5928 * If we side-exited on a loop exit and continue on a nesting
5929 * guard, the nesting guard (anchor) has the type information for
5930 * everything below the current scope, and the actual guard we
5931 * exited from has the types for everything in the current scope
5932 * (and whatever it inlined). We have to merge those maps here.
5934 VMSideExit
* e1
= anchor
;
5935 VMSideExit
* e2
= exitedFrom
;
5936 fullMap
.add(e1
->stackTypeMap(), e1
->numStackSlotsBelowCurrentFrame
);
5937 fullMap
.add(e2
->stackTypeMap(), e2
->numStackSlots
);
5938 stackSlots
= fullMap
.length();
5939 ngslots
= BuildGlobalTypeMapFromInnerTree(fullMap
, e2
);
5940 JS_ASSERT(ngslots
>= e1
->numGlobalSlots
); // inner tree must have all globals
5941 JS_ASSERT(ngslots
== fullMap
.length() - stackSlots
);
5942 typeMap
= fullMap
.data();
5944 JS_ASSERT(ngslots
>= anchor
->numGlobalSlots
);
5945 bool rv
= TraceRecorder::startRecorder(cx
, anchor
, c
, f
->treeInfo
,
5946 stackSlots
, ngslots
, typeMap
, exitedFrom
,
5947 outer
, cx
->fp
->argc
, Record_Branch
);
5950 tvso
->r
= R_FAIL_EXTEND_START
;
5955 if (tvso
) tvso
->r
= R_FAIL_EXTEND_COLD
;
5960 static JS_REQUIRES_STACK VMSideExit
*
5961 ExecuteTree(JSContext
* cx
, TreeFragment
* f
, uintN
& inlineCallCount
,
5962 VMSideExit
** innermostNestedGuardp
);
5964 JS_REQUIRES_STACK
bool
5965 TraceRecorder::recordLoopEdge(JSContext
* cx
, TraceRecorder
* r
, uintN
& inlineCallCount
)
5967 #ifdef JS_THREADSAFE
5968 if (OBJ_SCOPE(JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
))->title
.ownercx
!= cx
) {
5969 js_AbortRecording(cx
, "Global object not owned by this context");
5970 return false; /* we stay away from shared global objects */
5974 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5976 /* Process needFlush and deep abort requests. */
5977 if (tm
->needFlush
) {
5978 ResetJIT(cx
, FR_DEEP_BAIL
);
5982 JS_ASSERT(r
->fragment
&& !r
->fragment
->lastIns
);
5983 TreeFragment
* root
= r
->fragment
->root
;
5984 TreeFragment
* first
= LookupOrAddLoop(tm
, cx
->fp
->regs
->pc
, root
->globalObj
,
5985 root
->globalShape
, cx
->fp
->argc
);
5988 * Make sure the shape of the global object still matches (this might flush
5991 JSObject
* globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
5992 uint32 globalShape
= -1;
5993 SlotList
* globalSlots
= NULL
;
5994 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
, &globalShape
, &globalSlots
)) {
5995 JS_ASSERT(!tm
->recorder
);
5999 debug_only_printf(LC_TMTracer
,
6000 "Looking for type-compatible peer (%s:%d@%d)\n",
6001 cx
->fp
->script
->filename
,
6002 js_FramePCToLineNumber(cx
, cx
->fp
),
6003 FramePCOffset(cx
->fp
));
6005 // Find a matching inner tree. If none can be found, compile one.
6006 TreeFragment
* f
= r
->findNestedCompatiblePeer(first
);
6007 if (!f
|| !f
->code()) {
6008 AUDIT(noCompatInnerTrees
);
6010 TreeFragment
* outerFragment
= root
;
6011 jsbytecode
* outer
= (jsbytecode
*) outerFragment
->ip
;
6012 uint32 outerArgc
= outerFragment
->argc
;
6013 uint32 argc
= cx
->fp
->argc
;
6014 js_AbortRecording(cx
, "No compatible inner tree");
6016 return RecordTree(cx
, tm
, first
, outer
, outerArgc
, globalObj
, globalShape
,
6017 globalSlots
, argc
, Record_Branch
);
6020 return r
->attemptTreeCall(f
, inlineCallCount
) == ARECORD_CONTINUE
;
6023 JS_REQUIRES_STACK AbortableRecordingStatus
6024 TraceRecorder::attemptTreeCall(TreeFragment
* f
, uintN
& inlineCallCount
)
6027 * It is absolutely forbidden to have recursive loops tree call themselves
6028 * because it could accidentally pop frames owned by the parent call, and
6029 * there is no way to deal with this yet. We could have to set a "start of
6030 * poppable rp stack" variable, and if that unequals "real start of rp stack",
6031 * it would be illegal to pop frames.
6033 * In the interim, just do tree calls knowing that they won't go into
6034 * recursive trees that can pop parent frames.
6036 if (f
->treeInfo
->script
== cx
->fp
->script
) {
6037 if (f
->treeInfo
->recursion
>= Recursion_Unwinds
) {
6038 Blacklist(cx
->fp
->script
->code
);
6039 js_AbortRecording(cx
, "Inner tree is an unsupported type of recursion");
6040 return ARECORD_ABORTED
;
6042 f
->treeInfo
->recursion
= Recursion_Disallowed
;
6046 adjustCallerTypes(f
);
6048 prepareTreeCall(f
, inner_sp_ins
);
6051 unsigned oldInlineCallCount
= inlineCallCount
;
6054 JSContext
*localCx
= cx
;
6056 VMSideExit
* innermostNestedGuard
= NULL
;
6057 VMSideExit
* lr
= ExecuteTree(cx
, f
, inlineCallCount
, &innermostNestedGuard
);
6059 /* ExecuteTree can reenter the interpreter and kill |this|. */
6060 if (!TRACE_RECORDER(localCx
))
6061 return ARECORD_ABORTED
;
6064 js_AbortRecording(cx
, "Couldn't call inner tree");
6065 return ARECORD_ABORTED
;
6068 TreeFragment
* outerFragment
= fragment
->root
;
6069 jsbytecode
* outer
= (jsbytecode
*) outerFragment
->ip
;
6070 switch (lr
->exitType
) {
6071 case RECURSIVE_LOOP_EXIT
:
6073 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
6074 if (innermostNestedGuard
) {
6075 js_AbortRecording(cx
, "Inner tree took different side exit, abort current "
6076 "recording and grow nesting tree");
6077 return AttemptToExtendTree(localCx
, innermostNestedGuard
, lr
, outer
) ?
6078 ARECORD_CONTINUE
: ARECORD_ABORTED
;
6081 JS_ASSERT(oldInlineCallCount
== inlineCallCount
);
6083 /* Emit a call to the inner tree and continue recording the outer tree trace. */
6084 emitTreeCall(f
, lr
, inner_sp_ins
);
6085 return ARECORD_CONTINUE
;
6087 case UNSTABLE_LOOP_EXIT
:
6089 /* Abort recording so the inner loop can become type stable. */
6090 JSObject
* _globalObj
= globalObj
;
6091 js_AbortRecording(cx
, "Inner tree is trying to stabilize, abort outer recording");
6092 return AttemptToStabilizeTree(localCx
, _globalObj
, lr
, outer
, outerFragment
->argc
) ?
6093 ARECORD_CONTINUE
: ARECORD_ABORTED
;
6097 oracle
.markInstructionUndemotable(cx
->fp
->regs
->pc
);
6099 case RECURSIVE_SLURP_FAIL_EXIT
:
6100 case RECURSIVE_SLURP_MISMATCH_EXIT
:
6101 case RECURSIVE_MISMATCH_EXIT
:
6102 case RECURSIVE_EMPTY_RP_EXIT
:
6105 /* Abort recording the outer tree, extend the inner tree. */
6106 js_AbortRecording(cx
, "Inner tree is trying to grow, abort outer recording");
6107 return AttemptToExtendTree(localCx
, lr
, NULL
, outer
) ? ARECORD_CONTINUE
: ARECORD_ABORTED
;
6111 JS_NOT_REACHED("NESTED_EXIT should be replaced by innermost side exit");
6113 debug_only_printf(LC_TMTracer
, "exit_type=%s\n", getExitName(lr
->exitType
));
6114 js_AbortRecording(cx
, "Inner tree not suitable for calling");
6115 return ARECORD_ABORTED
;
6120 IsEntryTypeCompatible(jsval
* vp
, JSTraceType
* m
)
6122 unsigned tag
= JSVAL_TAG(*vp
);
6124 debug_only_printf(LC_TMTracer
, "%c/%c ", tagChar
[tag
], typeChar
[*m
]);
6128 if (tag
== JSVAL_OBJECT
&& !JSVAL_IS_NULL(*vp
) &&
6129 !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp
))) {
6132 debug_only_printf(LC_TMTracer
, "object != tag%u ", tag
);
6136 if (JSVAL_IS_INT(*vp
))
6138 if (tag
== JSVAL_DOUBLE
&& JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(*vp
), i
))
6140 debug_only_printf(LC_TMTracer
, "int != tag%u(value=%lu) ", tag
, (unsigned long)*vp
);
6143 if (JSVAL_IS_INT(*vp
) || tag
== JSVAL_DOUBLE
)
6145 debug_only_printf(LC_TMTracer
, "double != tag%u ", tag
);
6148 JS_NOT_REACHED("shouldn't see jsval type in entry");
6151 if (tag
== JSVAL_STRING
)
6153 debug_only_printf(LC_TMTracer
, "string != tag%u ", tag
);
6156 if (JSVAL_IS_NULL(*vp
))
6158 debug_only_printf(LC_TMTracer
, "null != tag%u ", tag
);
6160 case TT_PSEUDOBOOLEAN
:
6161 if (tag
== JSVAL_SPECIAL
)
6163 debug_only_printf(LC_TMTracer
, "bool != tag%u ", tag
);
6166 JS_ASSERT(*m
== TT_FUNCTION
);
6167 if (tag
== JSVAL_OBJECT
&& !JSVAL_IS_NULL(*vp
) &&
6168 HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp
))) {
6171 debug_only_printf(LC_TMTracer
, "fun != tag%u ", tag
);
6176 class TypeCompatibilityVisitor
: public SlotVisitorBase
6178 TraceRecorder
&mRecorder
;
6180 JSTraceType
*mTypeMap
;
6181 unsigned mStackSlotNum
;
6184 TypeCompatibilityVisitor (TraceRecorder
&recorder
,
6185 JSTraceType
*typeMap
) :
6186 mRecorder(recorder
),
6193 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
6194 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
6195 debug_only_printf(LC_TMTracer
, "global%d=", n
);
6196 if (!IsEntryTypeCompatible(vp
, mTypeMap
)) {
6198 } else if (!isPromoteInt(mRecorder
.get(vp
)) && *mTypeMap
== TT_INT32
) {
6199 oracle
.markGlobalSlotUndemotable(mCx
, slot
);
6201 } else if (JSVAL_IS_INT(*vp
) && *mTypeMap
== TT_DOUBLE
) {
6202 oracle
.markGlobalSlotUndemotable(mCx
, slot
);
6207 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6208 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
6209 for (size_t i
= 0; i
< count
; ++i
) {
6210 debug_only_printf(LC_TMTracer
, "%s%u=", stackSlotKind(), unsigned(i
));
6211 if (!IsEntryTypeCompatible(vp
, mTypeMap
)) {
6213 } else if (!isPromoteInt(mRecorder
.get(vp
)) && *mTypeMap
== TT_INT32
) {
6214 oracle
.markStackSlotUndemotable(mCx
, mStackSlotNum
);
6216 } else if (JSVAL_IS_INT(*vp
) && *mTypeMap
== TT_DOUBLE
) {
6217 oracle
.markStackSlotUndemotable(mCx
, mStackSlotNum
);
6231 JS_REQUIRES_STACK TreeFragment
*
6232 TraceRecorder::findNestedCompatiblePeer(TreeFragment
* f
)
6236 tm
= &JS_TRACE_MONITOR(cx
);
6237 unsigned int ngslots
= treeInfo
->globalSlots
->length();
6240 for (; f
!= NULL
; f
= f
->peer
) {
6246 debug_only_printf(LC_TMTracer
, "checking nested types %p: ", (void*)f
);
6248 if (ngslots
> ti
->nGlobalTypes())
6249 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
6252 * Determine whether the typemap of the inner tree matches the outer
6253 * tree's current state. If the inner tree expects an integer, but the
6254 * outer tree doesn't guarantee an integer for that slot, we mark the
6255 * slot undemotable and mismatch here. This will force a new tree to be
6256 * compiled that accepts a double for the slot. If the inner tree
6257 * expects a double, but the outer tree has an integer, we can proceed,
6258 * but we mark the location undemotable.
6260 TypeCompatibilityVisitor
visitor(*this, ti
->typeMap
.data());
6261 VisitSlots(visitor
, cx
, 0, *treeInfo
->globalSlots
);
6263 debug_only_printf(LC_TMTracer
, " %s\n", visitor
.isOk() ? "match" : "");
6271 class CheckEntryTypeVisitor
: public SlotVisitorBase
6274 JSTraceType
*mTypeMap
;
6276 CheckEntryTypeVisitor(JSTraceType
*typeMap
) :
6281 JS_ALWAYS_INLINE
void checkSlot(jsval
*vp
, char const *name
, int i
) {
6282 debug_only_printf(LC_TMTracer
, "%s%d=", name
, i
);
6283 JS_ASSERT(*(uint8_t*)mTypeMap
!= 0xCD);
6284 mOk
= IsEntryTypeCompatible(vp
, mTypeMap
++);
6287 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
6288 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
6290 checkSlot(vp
, "global", n
);
6293 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6294 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
6295 for (size_t i
= 0; i
< count
; ++i
) {
6298 checkSlot(vp
++, stackSlotKind(), i
);
6309 * Check if types are usable for trace execution.
6311 * @param cx Context.
6312 * @param ti Tree info of peer we're testing.
6313 * @return True if compatible (with or without demotions), false otherwise.
6315 static JS_REQUIRES_STACK
bool
6316 CheckEntryTypes(JSContext
* cx
, JSObject
* globalObj
, TreeInfo
* ti
)
6318 unsigned int ngslots
= ti
->globalSlots
->length();
6320 JS_ASSERT(ti
->nStackTypes
== NativeStackSlots(cx
, 0));
6322 if (ngslots
> ti
->nGlobalTypes())
6323 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
6325 JS_ASSERT(ti
->typeMap
.length() == NativeStackSlots(cx
, 0) + ngslots
);
6326 JS_ASSERT(ti
->typeMap
.length() == ti
->nStackTypes
+ ngslots
);
6327 JS_ASSERT(ti
->nGlobalTypes() == ngslots
);
6329 CheckEntryTypeVisitor
visitor(ti
->typeMap
.data());
6330 VisitSlots(visitor
, cx
, 0, *ti
->globalSlots
);
6332 debug_only_print0(LC_TMTracer
, "\n");
6333 return visitor
.isOk();
6337 * Find an acceptable entry tree given a PC.
6339 * @param cx Context.
6340 * @param globalObj Global object.
6341 * @param f First peer fragment.
6342 * @param nodemote If true, will try to find a peer that does not require demotion.
6343 * @out count Number of fragments consulted.
6345 static JS_REQUIRES_STACK TreeFragment
*
6346 FindVMCompatiblePeer(JSContext
* cx
, JSObject
* globalObj
, TreeFragment
* f
, uintN
& count
)
6349 for (; f
!= NULL
; f
= f
->peer
) {
6352 debug_only_printf(LC_TMTracer
,
6353 "checking vm types %p (ip: %p): ", (void*)f
, f
->ip
);
6354 if (CheckEntryTypes(cx
, globalObj
, f
->treeInfo
))
6362 * For the native stacks and global frame, reuse the storage in |tm->storage|.
6363 * This reuse depends on the invariant that only one trace uses |tm->storage| at
6364 * a time. This is subtley correct in lieu of deep bail; see comment for
6365 * |deepBailSp| in js_DeepBail.
6368 InterpState::InterpState(JSContext
* cx
, JSTraceMonitor
* tm
, TreeInfo
* ti
,
6369 uintN
& inlineCallCount
, VMSideExit
** innermostNestedGuardp
)
6371 stackBase(tm
->storage
.stack()),
6372 sp(stackBase
+ ti
->nativeStackBase
/ sizeof(double)),
6373 eos(tm
->storage
.global()),
6374 callstackBase(tm
->storage
.callstack()),
6377 eor(callstackBase
+ JS_MIN(MAX_CALL_STACK_ENTRIES
,
6378 JS_MAX_INLINE_CALL_COUNT
- inlineCallCount
)),
6379 lastTreeExitGuard(NULL
),
6380 lastTreeCallGuard(NULL
),
6381 rpAtLastTreeCall(NULL
),
6383 inlineCallCountp(&inlineCallCount
),
6384 innermostNestedGuardp(innermostNestedGuardp
),
6385 #ifdef EXECUTE_TREE_TIMER
6391 JS_ASSERT(!tm
->tracecx
);
6393 prev
= cx
->interpState
;
6394 cx
->interpState
= this;
6396 JS_ASSERT(eos
== stackBase
+ MAX_NATIVE_STACK_SLOTS
);
6397 JS_ASSERT(sp
< eos
);
6400 * inlineCallCount has already been incremented, if being invoked from
6401 * EnterFrame. It is okay to have a 0-frame restriction since the JIT
6402 * might not need any frames.
6404 JS_ASSERT(inlineCallCount
<= JS_MAX_INLINE_CALL_COUNT
);
6408 * Cannot 0xCD-fill global frame since it may overwrite a bailed outer
6409 * ExecuteTree's 0xdeadbeefdeadbeef marker.
6411 memset(tm
->storage
.stack(), 0xCD, MAX_NATIVE_STACK_SLOTS
* sizeof(double));
6412 memset(tm
->storage
.callstack(), 0xCD, MAX_CALL_STACK_ENTRIES
* sizeof(FrameInfo
*));
6417 InterpState::~InterpState()
6419 JS_ASSERT(!nativeVp
);
6421 cx
->interpState
= prev
;
6422 JS_TRACE_MONITOR(cx
).tracecx
= NULL
;
6425 /* Call |f|, return the exit taken. */
6426 static JS_ALWAYS_INLINE VMSideExit
*
6427 ExecuteTrace(JSContext
* cx
, Fragment
* f
, InterpState
& state
)
6429 JS_ASSERT(!cx
->bailExit
);
6430 union { NIns
*code
; GuardRecord
* (FASTCALL
*func
)(InterpState
*); } u
;
6433 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
6434 SIMULATE_FASTCALL(rec
, state
, NULL
, u
.func
);
6436 rec
= u
.func(&state
);
6438 JS_ASSERT(!cx
->bailExit
);
6439 return (VMSideExit
*)rec
->exit
;
6442 /* Check whether our assumptions about the incoming scope-chain are upheld. */
6443 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6444 ScopeChainCheck(JSContext
* cx
, TreeInfo
* ti
, TreeFragment
* f
)
6446 JS_ASSERT(ti
->globalObj() == f
->globalObj
);
6447 JS_ASSERT(ti
->globalObj() == JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
));
6450 * The JIT records and expects to execute with two scope-chain
6451 * assumptions baked-in:
6453 * 1. That the bottom of the scope chain is global, in the sense of
6454 * JSCLASS_IS_GLOBAL.
6456 * 2. That the scope chain between fp and the global is free of
6457 * "unusual" native objects such as HTML forms or other funny
6460 * #2 is checked here while following the scope-chain links, via
6461 * js_IsCacheableNonGlobalScope, which consults a whitelist of known
6462 * class types; once a global is found, it's checked for #1. Failing
6463 * either check causes an early return from execution.
6465 JSObject
* child
= cx
->fp
->scopeChain
;
6466 while (JSObject
* parent
= OBJ_GET_PARENT(cx
, child
)) {
6467 if (!js_IsCacheableNonGlobalScope(child
)) {
6468 debug_only_print0(LC_TMTracer
,"Blacklist: non-cacheable object on scope chain.\n");
6469 Blacklist((jsbytecode
*) f
->root
->ip
);
6474 JS_ASSERT(child
== f
->globalObj
);
6476 if (!(OBJ_GET_CLASS(cx
, f
->globalObj
)->flags
& JSCLASS_IS_GLOBAL
)) {
6477 debug_only_print0(LC_TMTracer
, "Blacklist: non-global at root of scope chain.\n");
6478 Blacklist((jsbytecode
*) f
->root
->ip
);
6482 /* Make sure the global object is sane. */
6483 JS_ASSERT(STOBJ_NSLOTS(f
->globalObj
) <= MAX_GLOBAL_SLOTS
);
6484 JS_ASSERT(ti
->nGlobalTypes() == ti
->globalSlots
->length());
6485 JS_ASSERT_IF(ti
->globalSlots
->length() != 0,
6486 OBJ_SHAPE(f
->globalObj
) == f
->globalShape
);
6491 LeaveTree(InterpState
&, VMSideExit
* lr
);
6493 static JS_REQUIRES_STACK VMSideExit
*
6494 ExecuteTree(JSContext
* cx
, TreeFragment
* f
, uintN
& inlineCallCount
,
6495 VMSideExit
** innermostNestedGuardp
)
6498 TraceVisStateObj
tvso(cx
, S_EXECUTE
);
6500 JS_ASSERT(f
->root
== f
&& f
->code() && f
->treeInfo
);
6501 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
6502 TreeInfo
* ti
= f
->treeInfo
;
6504 if (!ScopeChainCheck(cx
, ti
, f
))
6507 /* Initialize trace state. */
6508 InterpState
state(cx
, tm
, ti
, inlineCallCount
, innermostNestedGuardp
);
6509 double* stack
= tm
->storage
.stack();
6510 double* global
= tm
->storage
.global();
6511 JSObject
* globalObj
= f
->globalObj
;
6512 unsigned ngslots
= ti
->globalSlots
->length();
6513 uint16
* gslots
= ti
->globalSlots
->data();
6515 BuildNativeFrame(cx
, globalObj
, 0 /* callDepth */, ngslots
, gslots
,
6516 ti
->typeMap
.data(), global
, stack
);
6518 AUDIT(traceTriggered
);
6519 debug_only_printf(LC_TMTracer
,
6520 "entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
6521 cx
->fp
->script
->filename
,
6522 js_FramePCToLineNumber(cx
, cx
->fp
),
6523 FramePCOffset(cx
->fp
),
6524 ti
->maxNativeStackSlots
,
6527 debug_only_stmt(uint32 globalSlots
= STOBJ_NSLOTS(globalObj
);)
6528 debug_only_stmt(*(uint64
*)&tm
->storage
.global()[globalSlots
] = 0xdeadbeefdeadbeefLL
;)
6530 /* Execute trace. */
6532 VMSideExit
* lr
= (TraceVisStateObj(cx
, S_NATIVE
), ExecuteTrace(cx
, f
, state
));
6534 VMSideExit
* lr
= ExecuteTrace(cx
, f
, state
);
6537 JS_ASSERT(*(uint64
*)&tm
->storage
.global()[globalSlots
] == 0xdeadbeefdeadbeefLL
);
6538 JS_ASSERT_IF(lr
->exitType
== LOOP_EXIT
, !lr
->calldepth
);
6540 /* Restore interpreter state. */
6541 LeaveTree(state
, lr
);
6542 return state
.innermost
;
6548 Guardian(bool *flagp
) {
6549 this->flagp
= flagp
;
6560 static JS_FORCES_STACK
void
6561 LeaveTree(InterpState
& state
, VMSideExit
* lr
)
6563 VOUCH_DOES_NOT_REQUIRE_STACK();
6565 JSContext
* cx
= state
.cx
;
6567 /* Temporary waive the soft GC quota to make sure LeaveTree() doesn't fail. */
6568 Guardian
waiver(&JS_THREAD_DATA(cx
)->waiveGCQuota
);
6570 FrameInfo
** callstack
= state
.callstackBase
;
6571 double* stack
= state
.stackBase
;
6574 * Except if we find that this is a nested bailout, the guard the call
6575 * returned is the one we have to use to adjust pc and sp.
6577 VMSideExit
* innermost
= lr
;
6580 * While executing a tree we do not update state.sp and state.rp even if
6581 * they grow. Instead, guards tell us by how much sp and rp should be
6582 * incremented in case of a side exit. When calling a nested tree, however,
6583 * we actively adjust sp and rp. If we have such frames from outer trees on
6584 * the stack, then rp will have been adjusted. Before we can process the
6585 * stack of the frames of the tree we directly exited from, we have to
6586 * first work our way through the outer frames and generate interpreter
6587 * frames for them. Once the call stack (rp) is empty, we can process the
6588 * final frames (which again are not directly visible and only the guard we
6589 * exited on will tells us about).
6591 FrameInfo
** rp
= (FrameInfo
**)state
.rp
;
6592 if (lr
->exitType
== NESTED_EXIT
) {
6593 VMSideExit
* nested
= state
.lastTreeCallGuard
;
6596 * If lastTreeCallGuard is not set in state, we only have a single
6597 * level of nesting in this exit, so lr itself is the innermost and
6598 * outermost nested guard, and hence we set nested to lr. The
6599 * calldepth of the innermost guard is not added to state.rp, so we
6600 * do it here manually. For a nesting depth greater than 1 the
6601 * call tree code already added the innermost guard's calldepth
6602 * to state.rpAtLastTreeCall.
6605 rp
+= lr
->calldepth
;
6608 * During unwinding state.rp gets overwritten at every step and we
6609 * restore it here to its state at the innermost nested guard. The
6610 * builtin already added the calldepth of that innermost guard to
6613 rp
= (FrameInfo
**)state
.rpAtLastTreeCall
;
6615 innermost
= state
.lastTreeExitGuard
;
6616 if (state
.innermostNestedGuardp
)
6617 *state
.innermostNestedGuardp
= nested
;
6619 JS_ASSERT(nested
->exitType
== NESTED_EXIT
);
6620 JS_ASSERT(state
.lastTreeExitGuard
);
6621 JS_ASSERT(state
.lastTreeExitGuard
->exitType
!= NESTED_EXIT
);
6624 int32_t bs
= state
.builtinStatus
;
6625 bool bailed
= innermost
->exitType
== STATUS_EXIT
&& (bs
& JSBUILTIN_BAILED
);
6630 * A _FAIL native already called LeaveTree. We already reconstructed
6631 * the interpreter stack, in pre-call state, with pc pointing to the
6632 * CALL/APPLY op, for correctness. Then we continued in native code.
6634 * First, if we just returned from a slow native, pop its stack frame.
6636 if (!cx
->fp
->script
) {
6637 JSStackFrame
*fp
= cx
->fp
;
6638 JS_ASSERT(FUN_SLOW_NATIVE(fp
->fun
));
6639 JS_ASSERT(!fp
->regs
);
6640 JS_ASSERT(fp
->down
->regs
!= &((JSInlineFrame
*) fp
)->callerRegs
);
6642 JS_ARENA_RELEASE(&cx
->stackPool
, ((JSInlineFrame
*) fp
)->mark
);
6644 JS_ASSERT(cx
->fp
->script
);
6646 if (!(bs
& JSBUILTIN_ERROR
)) {
6648 * The builtin or native deep-bailed but finished successfully
6649 * (no exception or error).
6651 * After it returned, the JIT code stored the results of the
6652 * builtin or native at the top of the native stack and then
6653 * immediately flunked the guard on state->builtinStatus.
6655 * Now LeaveTree has been called again from the tail of
6656 * ExecuteTree. We are about to return to the interpreter. Adjust
6657 * the top stack frame to resume on the next op.
6659 JSFrameRegs
* regs
= cx
->fp
->regs
;
6660 JSOp op
= (JSOp
) *regs
->pc
;
6661 JS_ASSERT(op
== JSOP_CALL
|| op
== JSOP_APPLY
|| op
== JSOP_NEW
||
6662 op
== JSOP_GETPROP
|| op
== JSOP_GETTHISPROP
|| op
== JSOP_GETARGPROP
||
6663 op
== JSOP_GETLOCALPROP
|| op
== JSOP_LENGTH
||
6664 op
== JSOP_GETELEM
|| op
== JSOP_CALLELEM
||
6665 op
== JSOP_SETPROP
|| op
== JSOP_SETNAME
|| op
== JSOP_SETMETHOD
||
6666 op
== JSOP_SETELEM
|| op
== JSOP_INITELEM
|| op
== JSOP_ENUMELEM
||
6667 op
== JSOP_INSTANCEOF
);
6670 * JSOP_SETELEM can be coalesced with a JSOP_POP in the interpeter.
6671 * Since this doesn't re-enter the recorder, the post-state snapshot
6672 * is invalid. Fix it up here.
6674 if (op
== JSOP_SETELEM
&& JSOp(regs
->pc
[JSOP_SETELEM_LENGTH
]) == JSOP_POP
) {
6675 regs
->sp
-= js_CodeSpec
[JSOP_SETELEM
].nuses
;
6676 regs
->sp
+= js_CodeSpec
[JSOP_SETELEM
].ndefs
;
6677 regs
->pc
+= JSOP_SETELEM_LENGTH
;
6681 const JSCodeSpec
& cs
= js_CodeSpec
[op
];
6682 regs
->sp
-= (cs
.format
& JOF_INVOKE
) ? GET_ARGC(regs
->pc
) + 2 : cs
.nuses
;
6683 regs
->sp
+= cs
.ndefs
;
6684 regs
->pc
+= cs
.length
;
6685 JS_ASSERT_IF(!cx
->fp
->imacpc
,
6686 cx
->fp
->slots
+ cx
->fp
->script
->nfixed
+
6687 js_ReconstructStackDepth(cx
, cx
->fp
->script
, regs
->pc
) ==
6691 * If there's a tree call around the point that we deep exited at,
6692 * then state.sp and state.rp were restored to their original
6693 * values before the tree call and sp might be less than deepBailSp,
6694 * which we sampled when we were told to deep bail.
6696 JS_ASSERT(state
.deepBailSp
>= state
.stackBase
&& state
.sp
<= state
.deepBailSp
);
6699 * As explained above, the JIT code stored a result value or values
6700 * on the native stack. Transfer them to the interpreter stack now.
6701 * (Some opcodes, like JSOP_CALLELEM, produce two values, hence the
6704 JSTraceType
* typeMap
= innermost
->stackTypeMap();
6705 for (int i
= 1; i
<= cs
.ndefs
; i
++) {
6706 if (!js_NativeToValue(cx
,
6708 typeMap
[innermost
->numStackSlots
- i
],
6709 (jsdouble
*) state
.deepBailSp
6710 + innermost
->sp_adj
/ sizeof(jsdouble
) - i
)) {
6718 /* Save the innermost FrameInfo for guardUpRecursion */
6719 if (innermost
->exitType
== RECURSIVE_MISMATCH_EXIT
) {
6720 /* There should never be a static calldepth for a recursive mismatch. */
6721 JS_ASSERT(innermost
->calldepth
== 0);
6722 /* There must be at least one item on the rp stack. */
6723 JS_ASSERT(callstack
< rp
);
6724 /* :TODO: don't be all squirrelin' this in here */
6725 innermost
->recursive_down
= *(rp
- 1);
6728 /* Slurp failure should have no frames */
6729 JS_ASSERT_IF(innermost
->exitType
== RECURSIVE_SLURP_FAIL_EXIT
,
6730 innermost
->calldepth
== 0 && callstack
== rp
);
6732 while (callstack
< rp
) {
6733 FrameInfo
* fi
= *callstack
;
6734 /* Peek at the callee native slot in the not-yet-synthesized down frame. */
6735 JSObject
* callee
= *(JSObject
**)&stack
[fi
->callerHeight
];
6738 * Synthesize a stack frame and write out the values in it using the
6739 * type map pointer on the native call stack.
6741 SynthesizeFrame(cx
, *fi
, callee
);
6742 int slots
= FlushNativeStackFrame(cx
, 1 /* callDepth */, (*callstack
)->get_typemap(),
6745 JSStackFrame
* fp
= cx
->fp
;
6746 debug_only_printf(LC_TMTracer
,
6747 "synthesized deep frame for %s:%u@%u, slots=%d, fi=%p\n",
6748 fp
->script
->filename
,
6749 js_FramePCToLineNumber(cx
, fp
),
6755 * Keep track of the additional frames we put on the interpreter stack
6756 * and the native stack slots we consumed.
6758 ++*state
.inlineCallCountp
;
6764 * We already synthesized the frames around the innermost guard. Here we
6765 * just deal with additional frames inside the tree we are bailing out
6768 JS_ASSERT(rp
== callstack
);
6769 unsigned calldepth
= innermost
->calldepth
;
6770 unsigned calldepth_slots
= 0;
6771 unsigned calleeOffset
= 0;
6772 for (unsigned n
= 0; n
< calldepth
; ++n
) {
6773 /* Peek at the callee native slot in the not-yet-synthesized down frame. */
6774 calleeOffset
+= callstack
[n
]->callerHeight
;
6775 JSObject
* callee
= *(JSObject
**)&stack
[calleeOffset
];
6777 /* Reconstruct the frame. */
6778 calldepth_slots
+= SynthesizeFrame(cx
, *callstack
[n
], callee
);
6779 ++*state
.inlineCallCountp
;
6781 JSStackFrame
* fp
= cx
->fp
;
6782 debug_only_printf(LC_TMTracer
,
6783 "synthesized shallow frame for %s:%u@%u\n",
6784 fp
->script
->filename
, js_FramePCToLineNumber(cx
, fp
),
6790 * Adjust sp and pc relative to the tree we exited from (not the tree we
6791 * entered into). These are our final values for sp and pc since
6792 * SynthesizeFrame has already taken care of all frames in between. But
6793 * first we recover fp->blockChain, which comes from the side exit
6796 JSStackFrame
* fp
= cx
->fp
;
6798 fp
->blockChain
= innermost
->block
;
6801 * If we are not exiting from an inlined frame, the state->sp is spbase.
6802 * Otherwise spbase is whatever slots frames around us consume.
6804 fp
->regs
->pc
= innermost
->pc
;
6805 fp
->imacpc
= innermost
->imacpc
;
6806 fp
->regs
->sp
= StackBase(fp
) + (innermost
->sp_adj
/ sizeof(double)) - calldepth_slots
;
6807 JS_ASSERT_IF(!fp
->imacpc
,
6808 fp
->slots
+ fp
->script
->nfixed
+
6809 js_ReconstructStackDepth(cx
, fp
->script
, fp
->regs
->pc
) == fp
->regs
->sp
);
6811 #ifdef EXECUTE_TREE_TIMER
6812 uint64 cycles
= rdtsc() - state
.startTime
;
6813 #elif defined(JS_JIT_SPEW)
6817 debug_only_printf(LC_TMTracer
,
6818 "leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%s, sp=%lld, "
6819 "calldepth=%d, cycles=%llu\n",
6820 fp
->script
->filename
,
6821 js_FramePCToLineNumber(cx
, fp
),
6823 js_CodeName
[fp
->imacpc
? *fp
->imacpc
: *fp
->regs
->pc
],
6825 getExitName(lr
->exitType
),
6826 (long long int)(fp
->regs
->sp
- StackBase(fp
)),
6828 (unsigned long long int)cycles
);
6831 * If this trace is part of a tree, later branches might have added
6832 * additional globals for which we don't have any type information
6833 * available in the side exit. We merge in this information from the entry
6834 * type-map. See also the comment in the constructor of TraceRecorder
6835 * regarding why this is always safe to do.
6837 TreeInfo
* outermostTree
= state
.outermostTree
;
6838 uint16
* gslots
= outermostTree
->globalSlots
->data();
6839 unsigned ngslots
= outermostTree
->globalSlots
->length();
6840 JS_ASSERT(ngslots
== outermostTree
->nGlobalTypes());
6841 JSTraceType
* globalTypeMap
;
6843 /* Are there enough globals? */
6844 Queue
<JSTraceType
> typeMap(0);
6845 if (innermost
->numGlobalSlots
== ngslots
) {
6846 /* Yes. This is the ideal fast path. */
6847 globalTypeMap
= innermost
->globalTypeMap();
6850 * No. Merge the typemap of the innermost entry and exit together. This
6851 * should always work because it is invalid for nested trees or linked
6852 * trees to have incompatible types. Thus, whenever a new global type
6853 * is lazily added into a tree, all dependent and linked trees are
6854 * immediately specialized (see bug 476653).
6856 JS_ASSERT(innermost
->root()->treeInfo
->nGlobalTypes() == ngslots
);
6857 JS_ASSERT(innermost
->root()->treeInfo
->nGlobalTypes() > innermost
->numGlobalSlots
);
6858 typeMap
.ensure(ngslots
);
6860 unsigned check_ngslots
=
6862 BuildGlobalTypeMapFromInnerTree(typeMap
, innermost
);
6863 JS_ASSERT(check_ngslots
== ngslots
);
6864 globalTypeMap
= typeMap
.data();
6867 /* Write back the topmost native stack frame. */
6868 unsigned ignoreSlots
= innermost
->exitType
== RECURSIVE_SLURP_FAIL_EXIT
?
6869 innermost
->numStackSlots
- 1 : 0;
6873 FlushNativeStackFrame(cx
, innermost
->calldepth
,
6874 innermost
->stackTypeMap(),
6875 stack
, NULL
, ignoreSlots
);
6876 JS_ASSERT(unsigned(slots
) == innermost
->numStackSlots
);
6878 if (innermost
->nativeCalleeWord
)
6879 SynthesizeSlowNativeFrame(state
, cx
, innermost
);
6881 /* Write back interned globals. */
6882 JS_ASSERT(state
.eos
== state
.stackBase
+ MAX_NATIVE_STACK_SLOTS
);
6883 JSObject
* globalObj
= outermostTree
->globalObj();
6884 FlushNativeGlobalFrame(cx
, globalObj
, state
.eos
, ngslots
, gslots
, globalTypeMap
);
6886 /* Verify that our state restoration worked. */
6887 for (JSStackFrame
* fp
= cx
->fp
; fp
; fp
= fp
->down
) {
6888 JS_ASSERT_IF(fp
->argv
, JSVAL_IS_OBJECT(fp
->argv
[-1]));
6892 if (innermost
->exitType
!= TIMEOUT_EXIT
)
6893 AUDIT(sideExitIntoInterpreter
);
6895 AUDIT(timeoutIntoInterpreter
);
6898 state
.innermost
= innermost
;
6901 JS_REQUIRES_STACK
bool
6902 js_MonitorLoopEdge(JSContext
* cx
, uintN
& inlineCallCount
, RecordReason reason
)
6905 TraceVisStateObj
tvso(cx
, S_MONITOR
);
6908 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
6910 /* Is the recorder currently active? */
6912 jsbytecode
* innerLoopHeaderPC
= cx
->fp
->regs
->pc
;
6914 if (TraceRecorder::recordLoopEdge(cx
, tm
->recorder
, inlineCallCount
))
6918 * recordLoopEdge will invoke an inner tree if we have a matching
6919 * one. If we arrive here, that tree didn't run to completion and
6920 * instead we mis-matched or the inner tree took a side exit other than
6921 * the loop exit. We are thus no longer guaranteed to be parked on the
6922 * same loop header js_MonitorLoopEdge was called for. In fact, this
6923 * might not even be a loop header at all. Hence if the program counter
6924 * no longer hovers over the inner loop header, return to the
6925 * interpreter and do not attempt to trigger or record a new tree at
6928 if (innerLoopHeaderPC
!= cx
->fp
->regs
->pc
) {
6930 tvso
.r
= R_INNER_SIDE_EXIT
;
6935 JS_ASSERT(!tm
->recorder
);
6938 * Make sure the shape of the global object still matches (this might flush
6941 JSObject
* globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
6942 uint32 globalShape
= -1;
6943 SlotList
* globalSlots
= NULL
;
6945 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
, &globalShape
, &globalSlots
)) {
6946 Backoff(cx
, cx
->fp
->regs
->pc
);
6950 /* Do not enter the JIT code with a pending operation callback. */
6951 if (cx
->operationCallbackFlag
) {
6953 tvso
.r
= R_CALLBACK_PENDING
;
6958 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
6959 uint32 argc
= cx
->fp
->argc
;
6961 TreeFragment
* f
= LookupOrAddLoop(tm
, pc
, globalObj
, globalShape
, argc
);
6964 * If we have no code in the anchor and no peers, we definitively won't be
6965 * able to activate any trees, so start compiling.
6967 if (!f
->code() && !f
->peer
) {
6969 if (++f
->hits() < HOTLOOP
) {
6971 tvso
.r
= f
->hits() < 1 ? R_BACKED_OFF
: R_COLD
;
6977 * We can give RecordTree the root peer. If that peer is already taken,
6978 * it will walk the peer list and find us a free slot or allocate a new
6981 bool rv
= RecordTree(cx
, tm
, f
->first
, NULL
, 0, globalObj
, globalShape
,
6982 globalSlots
, argc
, reason
);
6985 tvso
.r
= R_FAIL_RECORD_TREE
;
6990 debug_only_printf(LC_TMTracer
,
6991 "Looking for compat peer %d@%d, from %p (ip: %p)\n",
6992 js_FramePCToLineNumber(cx
, cx
->fp
),
6993 FramePCOffset(cx
->fp
), (void*)f
, f
->ip
);
6996 TreeFragment
* match
= FindVMCompatiblePeer(cx
, globalObj
, f
, count
);
6998 if (count
< MAXPEERS
)
7002 * If we hit the max peers ceiling, don't try to lookup fragments all
7003 * the time. That's expensive. This must be a rather type-unstable loop.
7005 debug_only_print0(LC_TMTracer
, "Blacklisted: too many peer trees.\n");
7006 Blacklist((jsbytecode
*) f
->root
->ip
);
7008 tvso
.r
= R_MAX_PEERS
;
7014 * Trees that only unwind recursive frames usually won't do much work, and
7015 * most time will be spent entering and exiting ExecuteTree(). There's no
7016 * benefit to doing this until the down-recursive side completes.
7018 if (match
->treeInfo
->recursion
== Recursion_Unwinds
)
7021 VMSideExit
* lr
= NULL
;
7022 VMSideExit
* innermostNestedGuard
= NULL
;
7024 lr
= ExecuteTree(cx
, match
, inlineCallCount
, &innermostNestedGuard
);
7027 tvso
.r
= R_FAIL_EXECUTE_TREE
;
7033 * If we exit on a branch, or on a tree call guard, try to grow the inner
7034 * tree (in case of a branch exit), or the tree nested around the tree we
7035 * exited from (in case of the tree call guard).
7038 switch (lr
->exitType
) {
7039 case RECURSIVE_UNLINKED_EXIT
:
7040 case UNSTABLE_LOOP_EXIT
:
7041 rv
= AttemptToStabilizeTree(cx
, globalObj
, lr
, NULL
, 0);
7044 tvso
.r
= R_FAIL_STABILIZE
;
7049 oracle
.markInstructionUndemotable(cx
->fp
->regs
->pc
);
7051 case RECURSIVE_SLURP_FAIL_EXIT
:
7052 case RECURSIVE_SLURP_MISMATCH_EXIT
:
7053 case RECURSIVE_EMPTY_RP_EXIT
:
7054 case RECURSIVE_MISMATCH_EXIT
:
7057 return AttemptToExtendTree(cx
, lr
, NULL
, NULL
7063 case RECURSIVE_LOOP_EXIT
:
7065 if (innermostNestedGuard
)
7066 return AttemptToExtendTree(cx
, innermostNestedGuard
, lr
, NULL
7072 tvso
.r
= R_NO_EXTEND_OUTER
;
7077 case MISMATCH_EXIT
: tvso
.r
= R_MISMATCH_EXIT
; return false;
7078 case OOM_EXIT
: tvso
.r
= R_OOM_EXIT
; return false;
7079 case TIMEOUT_EXIT
: tvso
.r
= R_TIMEOUT_EXIT
; return false;
7080 case DEEP_BAIL_EXIT
: tvso
.r
= R_DEEP_BAIL_EXIT
; return false;
7081 case STATUS_EXIT
: tvso
.r
= R_STATUS_EXIT
; return false;
7086 * No, this was an unusual exit (i.e. out of memory/GC), so just resume
7090 tvso
.r
= R_OTHER_EXIT
;
7096 JS_REQUIRES_STACK AbortableRecordingStatus
7097 TraceRecorder::monitorRecording(JSOp op
)
7099 JSTraceMonitor
&localtm
= JS_TRACE_MONITOR(cx
);
7100 debug_only_stmt( JSContext
*localcx
= cx
; )
7102 /* Process needFlush requests now. */
7103 if (localtm
.needFlush
) {
7104 ResetJIT(cx
, FR_DEEP_BAIL
);
7105 return ARECORD_ABORTED
;
7107 JS_ASSERT(!fragment
->lastIns
);
7110 * Clear one-shot state used to communicate between record_JSOP_CALL and post-
7111 * opcode-case-guts record hook (record_NativeCallComplete).
7113 pendingSpecializedNative
= NULL
;
7116 /* Handle one-shot request from finishGetProp or INSTANCEOF to snapshot post-op state and guard. */
7117 if (pendingGuardCondition
) {
7118 guard(true, pendingGuardCondition
, STATUS_EXIT
);
7119 pendingGuardCondition
= NULL
;
7122 /* Handle one-shot request to unbox the result of a property get. */
7123 if (pendingUnboxSlot
) {
7124 LIns
* val_ins
= get(pendingUnboxSlot
);
7125 val_ins
= unbox_jsval(*pendingUnboxSlot
, val_ins
, snapshot(BRANCH_EXIT
));
7126 set(pendingUnboxSlot
, val_ins
);
7127 pendingUnboxSlot
= 0;
7131 if (js_LogController
.lcbits
& LC_TMRecorder
) {
7132 js_Disassemble1(cx
, cx
->fp
->script
, cx
->fp
->regs
->pc
,
7134 ? 0 : cx
->fp
->regs
->pc
- cx
->fp
->script
->code
,
7135 !cx
->fp
->imacpc
, stdout
);
7140 * If op is not a break or a return from a loop, continue recording and
7141 * follow the trace. We check for imacro-calling bytecodes inside each
7142 * switch case to resolve the if (JSOP_IS_IMACOP(x)) conditions at compile
7146 AbortableRecordingStatus status
;
7148 bool wasInImacro
= (cx
->fp
->imacpc
!= NULL
);
7152 status
= ARECORD_ERROR
;
7153 goto stop_recording
;
7154 # define OPDEF(x,val,name,token,length,nuses,ndefs,prec,format) \
7156 status = this->record_##x(); \
7157 if (JSOP_IS_IMACOP(x)) \
7160 # include "jsopcode.tbl"
7164 /* Careful, |this| may have been deleted. */
7165 JS_ASSERT(status
!= ARECORD_IMACRO
);
7166 JS_ASSERT_IF(!wasInImacro
, localcx
->fp
->imacpc
== NULL
);
7169 /* |this| may be killed while recording. */
7170 if (status
== ARECORD_COMPLETED
) {
7171 JS_ASSERT(localtm
.recorder
!= this);
7172 return localtm
.recorder
? ARECORD_CONTINUE
: ARECORD_COMPLETED
;
7174 if (status
== ARECORD_ABORTED
) {
7175 JS_ASSERT(!localtm
.recorder
);
7176 return ARECORD_ABORTED
;
7180 /* Handle lazy abort / OOM. */
7181 if (outOfMemory() || js_OverfullJITCache(&localtm
)) {
7182 ResetJIT(cx
, FR_OOM
);
7183 return ARECORD_ABORTED
;
7185 if (StatusAbortsRecording(status
)) {
7186 js_AbortRecording(cx
, js_CodeName
[op
]);
7187 return ARECORD_ABORTED
;
7193 JS_REQUIRES_STACK
void
7194 js_AbortRecording(JSContext
* cx
, const char* reason
)
7197 JS_ASSERT(TRACE_RECORDER(cx
));
7198 TRACE_RECORDER(cx
)->finishAbort(reason
);
7200 TRACE_RECORDER(cx
)->finishAbort("[no reason]");
7204 #if defined NANOJIT_IA32
7208 char *c
= getenv("X86_FORCE_SSE2");
7210 return (!strcmp(c
, "true") ||
7215 #if defined _MSC_VER
7224 #elif defined __GNUC__
7225 asm("xchg %%esi, %%ebx\n" /* we can't clobber ebx on gcc (PIC register) */
7226 "mov $0x01, %%eax\n"
7229 "xchg %%esi, %%ebx\n"
7231 : /* We have no inputs */
7232 : "%eax", "%esi", "%ecx", "%edx"
7234 #elif defined __SUNPRO_C || defined __SUNPRO_CC
7236 "mov $0x01, %%eax\n"
7240 : /* We have no inputs */
7244 return (features
& (1<<26)) != 0;
7248 #if defined(NANOJIT_ARM)
7250 #if defined(_MSC_VER) && defined(WINCE)
7252 // these come in from jswince.asm
7253 extern "C" int js_arm_try_thumb_op();
7254 extern "C" int js_arm_try_armv6t2_op();
7255 extern "C" int js_arm_try_armv5_op();
7256 extern "C" int js_arm_try_armv6_op();
7257 extern "C" int js_arm_try_armv7_op();
7258 extern "C" int js_arm_try_vfp_op();
7261 js_arm_check_thumb() {
7264 js_arm_try_thumb_op();
7266 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7273 js_arm_check_thumb2() {
7276 js_arm_try_armv6t2_op();
7278 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7285 js_arm_check_arch() {
7286 unsigned int arch
= 4;
7288 js_arm_try_armv5_op();
7290 js_arm_try_armv6_op();
7292 js_arm_try_armv7_op();
7294 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7300 js_arm_check_vfp() {
7301 #ifdef WINCE_WINDOWS_MOBILE
7306 js_arm_try_vfp_op();
7308 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7315 #define HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS 1
7317 /* See "Suppressing Exception Notifications while Debugging", at
7318 * http://msdn.microsoft.com/en-us/library/ms924252.aspx
7321 js_disable_debugger_exceptions()
7323 // 2 == TLSSLOT_KERNEL
7324 DWORD kctrl
= (DWORD
) TlsGetValue(2);
7325 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7327 TlsSetValue(2, (LPVOID
) kctrl
);
7331 js_enable_debugger_exceptions()
7333 // 2 == TLSSLOT_KERNEL
7334 DWORD kctrl
= (DWORD
) TlsGetValue(2);
7335 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7337 TlsSetValue(2, (LPVOID
) kctrl
);
7340 #elif defined(__GNUC__) && defined(AVMPLUS_LINUX)
7344 #include <sys/types.h>
7345 #include <sys/stat.h>
7346 #include <sys/mman.h>
7351 // Assume ARMv4 by default.
7352 static unsigned int arm_arch
= 4;
7353 static bool arm_has_thumb
= false;
7354 static bool arm_has_vfp
= false;
7355 static bool arm_has_neon
= false;
7356 static bool arm_has_iwmmxt
= false;
7357 static bool arm_tests_initialized
= false;
7364 fd
= open("/proc/self/auxv", O_RDONLY
);
7366 while (read(fd
, &aux
, sizeof(Elf32_auxv_t
))) {
7367 if (aux
.a_type
== AT_HWCAP
) {
7368 uint32_t hwcap
= aux
.a_un
.a_val
;
7369 if (getenv("ARM_FORCE_HWCAP"))
7370 hwcap
= strtoul(getenv("ARM_FORCE_HWCAP"), NULL
, 0);
7371 // hardcode these values to avoid depending on specific versions
7372 // of the hwcap header, e.g. HWCAP_NEON
7373 arm_has_thumb
= (hwcap
& 4) != 0;
7374 arm_has_vfp
= (hwcap
& 64) != 0;
7375 arm_has_iwmmxt
= (hwcap
& 512) != 0;
7376 // this flag is only present on kernel 2.6.29
7377 arm_has_neon
= (hwcap
& 4096) != 0;
7378 } else if (aux
.a_type
== AT_PLATFORM
) {
7379 const char *plat
= (const char*) aux
.a_un
.a_val
;
7380 if (getenv("ARM_FORCE_PLATFORM"))
7381 plat
= getenv("ARM_FORCE_PLATFORM");
7382 // The platform string has the form "v[0-9][lb]". The "l" or "b" indicate little-
7383 // or big-endian variants and the digit indicates the version of the platform.
7384 // We can only accept ARMv4 and above, but allow anything up to ARMv9 for future
7385 // processors. Architectures newer than ARMv7 are assumed to be
7386 // backwards-compatible with ARMv7.
7387 if ((plat
[0] == 'v') &&
7388 (plat
[1] >= '4') && (plat
[1] <= '9') &&
7389 ((plat
[2] == 'l') || (plat
[2] == 'b')))
7391 arm_arch
= plat
[1] - '0';
7395 // For production code, ignore invalid (or unexpected) platform strings and
7396 // fall back to the default. For debug code, use an assertion to catch this
7397 // when not running in scratchbox.
7398 if (getenv("_SBOX_DIR") == NULL
)
7405 // if we don't have 2.6.29, we have to do this hack; set
7406 // the env var to trust HWCAP.
7407 if (!getenv("ARM_TRUST_HWCAP") && (arm_arch
>= 7))
7408 arm_has_neon
= true;
7411 arm_tests_initialized
= true;
7415 js_arm_check_thumb() {
7416 if (!arm_tests_initialized
)
7419 return arm_has_thumb
;
7423 js_arm_check_thumb2() {
7424 if (!arm_tests_initialized
)
7427 // ARMv6T2 also supports Thumb2, but Linux doesn't provide an easy way to test for this as
7428 // there is no associated bit in auxv. ARMv7 always supports Thumb2, and future architectures
7429 // are assumed to be backwards-compatible.
7430 return (arm_arch
>= 7);
7434 js_arm_check_arch() {
7435 if (!arm_tests_initialized
)
7442 js_arm_check_vfp() {
7443 if (!arm_tests_initialized
)
7450 #warning Not sure how to check for architecture variant on your platform. Assuming ARMv4.
7452 js_arm_check_thumb() { return false; }
7454 js_arm_check_thumb2() { return false; }
7456 js_arm_check_arch() { return 4; }
7458 js_arm_check_vfp() { return false; }
7461 #ifndef HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS
7463 js_enable_debugger_exceptions() { }
7465 js_disable_debugger_exceptions() { }
7468 #endif /* NANOJIT_ARM */
7475 js_SetMaxCodeCacheBytes(JSContext
* cx
, uint32 bytes
)
7477 JSTraceMonitor
* tm
= &JS_THREAD_DATA(cx
)->traceMonitor
;
7478 JS_ASSERT(tm
->codeAlloc
&& tm
->dataAlloc
&& tm
->traceAlloc
);
7483 tm
->maxCodeCacheBytes
= bytes
;
7487 js_InitJIT(JSTraceMonitor
*tm
)
7489 #if defined JS_JIT_SPEW
7490 tm
->profAlloc
= NULL
;
7491 /* Set up debug logging. */
7492 if (!did_we_set_up_debug_logging
) {
7493 InitJITLogController();
7494 did_we_set_up_debug_logging
= true;
7496 /* Set up fragprofiling, if required. */
7497 if (js_LogController
.lcbits
& LC_FragProfile
) {
7498 tm
->profAlloc
= new VMAllocator();
7499 tm
->profTab
= new (*tm
->profAlloc
) FragStatsMap(*tm
->profAlloc
);
7503 memset(&js_LogController
, 0, sizeof(js_LogController
));
7506 if (!did_we_check_processor_features
) {
7507 #if defined NANOJIT_IA32
7508 avmplus::AvmCore::config
.use_cmov
=
7509 avmplus::AvmCore::config
.sse2
= CheckForSSE2();
7510 avmplus::AvmCore::config
.fixed_esp
= true;
7512 #if defined NANOJIT_ARM
7514 js_disable_debugger_exceptions();
7516 bool arm_vfp
= js_arm_check_vfp();
7517 bool arm_thumb
= js_arm_check_thumb();
7518 bool arm_thumb2
= js_arm_check_thumb2();
7519 unsigned int arm_arch
= js_arm_check_arch();
7521 js_enable_debugger_exceptions();
7523 avmplus::AvmCore::config
.vfp
= arm_vfp
;
7524 avmplus::AvmCore::config
.soft_float
= !arm_vfp
;
7525 avmplus::AvmCore::config
.thumb
= arm_thumb
;
7526 avmplus::AvmCore::config
.thumb2
= arm_thumb2
;
7527 avmplus::AvmCore::config
.arch
= arm_arch
;
7529 // Sanity-check the configuration detection.
7530 // * We don't understand architectures prior to ARMv4.
7531 JS_ASSERT(arm_arch
>= 4);
7532 // * All architectures support Thumb with the possible exception of ARMv4.
7533 JS_ASSERT((arm_thumb
) || (arm_arch
== 4));
7534 // * Only ARMv6T2 and ARMv7(+) support Thumb2, but ARMv6 does not.
7535 JS_ASSERT((arm_thumb2
) || (arm_arch
<= 6));
7536 // * All architectures that support Thumb2 also support Thumb.
7537 JS_ASSERT((arm_thumb2
&& arm_thumb
) || (!arm_thumb2
));
7539 did_we_check_processor_features
= true;
7542 /* Set the default size for the code cache to 16MB. */
7543 tm
->maxCodeCacheBytes
= 16 M
;
7545 if (!tm
->recordAttempts
.ops
) {
7546 JS_DHashTableInit(&tm
->recordAttempts
, JS_DHashGetStubOps(),
7547 NULL
, sizeof(PCHashEntry
),
7548 JS_DHASH_DEFAULT_CAPACITY(PC_HASH_COUNT
));
7551 JS_ASSERT(!tm
->dataAlloc
&& !tm
->traceAlloc
&& !tm
->codeAlloc
);
7552 tm
->dataAlloc
= new VMAllocator();
7553 tm
->traceAlloc
= new VMAllocator();
7554 tm
->tempAlloc
= new VMAllocator();
7555 tm
->reTempAlloc
= new VMAllocator();
7556 tm
->codeAlloc
= new CodeAlloc();
7557 tm
->frameCache
= new FrameInfoCache(tm
->dataAlloc
);
7559 verbose_only( tm
->branches
= NULL
; )
7562 debug_only(memset(&jitstats
, 0, sizeof(jitstats
)));
7566 /* Architecture properties used by test cases. */
7567 jitstats
.archIsIA32
= 0;
7568 jitstats
.archIs64BIT
= 0;
7569 jitstats
.archIsARM
= 0;
7570 jitstats
.archIsSPARC
= 0;
7571 jitstats
.archIsPPC
= 0;
7572 #if defined NANOJIT_IA32
7573 jitstats
.archIsIA32
= 1;
7575 #if defined NANOJIT_64BIT
7576 jitstats
.archIs64BIT
= 1;
7578 #if defined NANOJIT_ARM
7579 jitstats
.archIsARM
= 1;
7581 #if defined NANOJIT_SPARC
7582 jitstats
.archIsSPARC
= 1;
7584 #if defined NANOJIT_PPC
7585 jitstats
.archIsPPC
= 1;
7587 #if defined NANOJIT_X64
7588 jitstats
.archIsAMD64
= 1;
7594 js_FinishJIT(JSTraceMonitor
*tm
)
7596 JS_ASSERT(!tm
->recorder
);
7599 if (jitstats
.recorderStarted
) {
7601 debug_only_print0(LC_TMStats
, "recorder");
7602 #define RECORDER_JITSTAT(_ident, _name) \
7603 debug_only_printf(LC_TMStats, "%c " _name "(%llu)", sep, \
7604 (unsigned long long int)jitstats._ident); \
7606 #define JITSTAT(x) /* nothing */
7607 #include "jitstats.tbl"
7609 #undef RECORDER_JITSTAT
7610 debug_only_print0(LC_TMStats
, "\n");
7613 debug_only_print0(LC_TMStats
, "monitor");
7614 #define MONITOR_JITSTAT(_ident, _name) \
7615 debug_only_printf(LC_TMStats, "%c " _name "(%llu)", sep, \
7616 (unsigned long long int)jitstats._ident); \
7618 #define JITSTAT(x) /* nothing */
7619 #include "jitstats.tbl"
7621 #undef MONITOR_JITSTAT
7622 debug_only_print0(LC_TMStats
, "\n");
7626 if (tm
->recordAttempts
.ops
)
7627 JS_DHashTableFinish(&tm
->recordAttempts
);
7630 // Recover profiling data from expiring Fragments, and display
7632 if (js_LogController
.lcbits
& LC_FragProfile
) {
7633 for (Seq
<Fragment
*>* f
= tm
->branches
; f
; f
= f
->tail
) {
7634 js_FragProfiling_FragFinalizer(f
->head
, tm
);
7636 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
7637 for (TreeFragment
*f
= tm
->vmfragments
[i
]; f
; f
= f
->next
) {
7638 JS_ASSERT(f
->root
== f
);
7639 for (TreeFragment
*p
= f
; p
; p
= p
->peer
)
7640 js_FragProfiling_FragFinalizer(p
, tm
);
7643 REHashMap::Iter
iter(*(tm
->reFragments
));
7644 while (iter
.next()) {
7645 VMFragment
* frag
= (VMFragment
*)iter
.value();
7646 js_FragProfiling_FragFinalizer(frag
, tm
);
7649 js_FragProfiling_showResults(tm
);
7650 delete tm
->profAlloc
;
7653 NanoAssert(!tm
->profTab
);
7654 NanoAssert(!tm
->profAlloc
);
7658 memset(&tm
->vmfragments
[0], 0, FRAGMENT_TABLE_SIZE
* sizeof(TreeFragment
*));
7660 if (tm
->frameCache
) {
7661 delete tm
->frameCache
;
7662 tm
->frameCache
= NULL
;
7665 if (tm
->codeAlloc
) {
7666 delete tm
->codeAlloc
;
7667 tm
->codeAlloc
= NULL
;
7670 if (tm
->dataAlloc
) {
7671 delete tm
->dataAlloc
;
7672 tm
->dataAlloc
= NULL
;
7675 if (tm
->traceAlloc
) {
7676 delete tm
->traceAlloc
;
7677 tm
->traceAlloc
= NULL
;
7680 if (tm
->tempAlloc
) {
7681 delete tm
->tempAlloc
;
7682 tm
->tempAlloc
= NULL
;
7685 if (tm
->reTempAlloc
) {
7686 delete tm
->reTempAlloc
;
7687 tm
->reTempAlloc
= NULL
;
7697 static JSDHashOperator
7698 PurgeScriptRecordingAttempts(JSDHashTable
*table
, JSDHashEntryHdr
*hdr
, uint32 number
, void *arg
)
7700 PCHashEntry
*e
= (PCHashEntry
*)hdr
;
7701 JSScript
*script
= (JSScript
*)arg
;
7702 jsbytecode
*pc
= (jsbytecode
*)e
->key
;
7704 if (JS_UPTRDIFF(pc
, script
->code
) < script
->length
)
7705 return JS_DHASH_REMOVE
;
7706 return JS_DHASH_NEXT
;
7710 JS_REQUIRES_STACK
void
7711 js_PurgeScriptFragments(JSContext
* cx
, JSScript
* script
)
7713 if (!TRACING_ENABLED(cx
))
7715 debug_only_printf(LC_TMTracer
,
7716 "Purging fragments for JSScript %p.\n", (void*)script
);
7718 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
7719 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
7720 TreeFragment
** fragp
= &tm
->vmfragments
[i
];
7721 while (TreeFragment
* frag
= *fragp
) {
7722 if (JS_UPTRDIFF(frag
->ip
, script
->code
) < script
->length
) {
7723 /* This fragment is associated with the script. */
7724 debug_only_printf(LC_TMTracer
,
7725 "Disconnecting TreeFragment %p "
7726 "with ip %p, in range [%p,%p).\n",
7727 (void*)frag
, frag
->ip
, script
->code
,
7728 script
->code
+ script
->length
);
7730 JS_ASSERT(frag
->root
== frag
);
7731 *fragp
= frag
->next
;
7733 verbose_only( js_FragProfiling_FragFinalizer(frag
, tm
); )
7734 TrashTree(cx
, frag
);
7735 } while ((frag
= frag
->peer
) != NULL
);
7738 fragp
= &frag
->next
;
7742 JS_DHashTableEnumerate(&tm
->recordAttempts
, PurgeScriptRecordingAttempts
, script
);
7746 js_OverfullJITCache(JSTraceMonitor
* tm
)
7749 * You might imagine the outOfMemory flag on the allocator is sufficient
7750 * to model the notion of "running out of memory", but there are actually
7751 * two separate issues involved:
7753 * 1. The process truly running out of memory: malloc() or mmap()
7756 * 2. The limit we put on the "intended size" of the tracemonkey code
7757 * cache, in pages, has been exceeded.
7759 * Condition 1 doesn't happen very often, but we're obliged to try to
7760 * safely shut down and signal the rest of spidermonkey when it
7761 * does. Condition 2 happens quite regularly.
7763 * Presently, the code in this file doesn't check the outOfMemory condition
7764 * often enough, and frequently misuses the unchecked results of
7765 * lirbuffer insertions on the asssumption that it will notice the
7766 * outOfMemory flag "soon enough" when it returns to the monitorRecording
7767 * function. This turns out to be a false assumption if we use outOfMemory
7768 * to signal condition 2: we regularly provoke "passing our intended
7769 * size" and regularly fail to notice it in time to prevent writing
7770 * over the end of an artificially self-limited LIR buffer.
7772 * To mitigate, though not completely solve, this problem, we're
7773 * modeling the two forms of memory exhaustion *separately* for the
7774 * time being: condition 1 is handled by the outOfMemory flag inside
7775 * nanojit, and condition 2 is being handled independently *here*. So
7776 * we construct our allocators to use all available memory they like,
7777 * and only report outOfMemory to us when there is literally no OS memory
7778 * left. Merely purging our cache when we hit our highwater mark is
7779 * handled by the (few) callers of this function.
7782 jsuint maxsz
= tm
->maxCodeCacheBytes
;
7783 VMAllocator
*dataAlloc
= tm
->dataAlloc
;
7784 VMAllocator
*traceAlloc
= tm
->traceAlloc
;
7785 CodeAlloc
*codeAlloc
= tm
->codeAlloc
;
7787 return (codeAlloc
->size() + dataAlloc
->size() + traceAlloc
->size() > maxsz
);
7790 JS_FORCES_STACK
JS_FRIEND_API(void)
7791 js_DeepBail(JSContext
*cx
)
7793 JS_ASSERT(JS_ON_TRACE(cx
));
7796 * Exactly one context on the current thread is on trace. Find out which
7797 * one. (Most callers cannot guarantee that it's cx.)
7799 JSTraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
7800 JSContext
*tracecx
= tm
->tracecx
;
7802 /* It's a bug if a non-FAIL_STATUS builtin gets here. */
7803 JS_ASSERT(tracecx
->bailExit
);
7806 debug_only_print0(LC_TMTracer
, "Deep bail.\n");
7807 LeaveTree(*tracecx
->interpState
, tracecx
->bailExit
);
7808 tracecx
->bailExit
= NULL
;
7810 InterpState
* state
= tracecx
->interpState
;
7811 state
->builtinStatus
|= JSBUILTIN_BAILED
;
7814 * Between now and the LeaveTree in ExecuteTree, |tm->storage| may be reused
7815 * if another trace executes before the currently executing native returns.
7816 * However, all such traces will complete by the time the currently
7817 * executing native returns and the return value is written to the native
7818 * stack. After that point, no traces may execute until the LeaveTree in
7819 * ExecuteTree, hence the invariant is maintained that only one trace uses
7820 * |tm->storage| at a time.
7822 state
->deepBailSp
= state
->sp
;
7825 JS_REQUIRES_STACK jsval
&
7826 TraceRecorder::argval(unsigned n
) const
7828 JS_ASSERT(n
< cx
->fp
->fun
->nargs
);
7829 return cx
->fp
->argv
[n
];
7832 JS_REQUIRES_STACK jsval
&
7833 TraceRecorder::varval(unsigned n
) const
7835 JS_ASSERT(n
< cx
->fp
->script
->nslots
);
7836 return cx
->fp
->slots
[n
];
7839 JS_REQUIRES_STACK jsval
&
7840 TraceRecorder::stackval(int n
) const
7842 jsval
* sp
= cx
->fp
->regs
->sp
;
7846 JS_REQUIRES_STACK LIns
*
7847 TraceRecorder::scopeChain() const
7849 return lir
->insLoad(LIR_ldp
,
7850 lir
->insLoad(LIR_ldp
, cx_ins
, offsetof(JSContext
, fp
)),
7851 offsetof(JSStackFrame
, scopeChain
));
7855 * Return the frame of a call object if that frame is part of the current
7856 * trace. |depthp| is an optional outparam: if it is non-null, it will be
7857 * filled in with the depth of the call object's frame relevant to cx->fp.
7859 JS_REQUIRES_STACK JSStackFrame
*
7860 TraceRecorder::frameIfInRange(JSObject
* obj
, unsigned* depthp
) const
7862 JSStackFrame
* ofp
= (JSStackFrame
*) obj
->getPrivate();
7863 JSStackFrame
* fp
= cx
->fp
;
7864 for (unsigned depth
= 0; depth
<= callDepth
; ++depth
) {
7870 if (!(fp
= fp
->down
))
7876 JS_DEFINE_CALLINFO_4(extern, UINT32
, GetClosureVar
, CONTEXT
, OBJECT
, CVIPTR
, DOUBLEPTR
, 0, 0)
7877 JS_DEFINE_CALLINFO_4(extern, UINT32
, GetClosureArg
, CONTEXT
, OBJECT
, CVIPTR
, DOUBLEPTR
, 0, 0)
7880 * Search the scope chain for a property lookup operation at the current PC and
7881 * generate LIR to access the given property. Return RECORD_CONTINUE on success,
7882 * otherwise abort and return RECORD_STOP. There are 3 outparams:
7884 * vp the address of the current property value
7885 * ins LIR instruction representing the property value on trace
7886 * NameResult describes how to look up name; see comment for NameResult in jstracer.h
7888 JS_REQUIRES_STACK AbortableRecordingStatus
7889 TraceRecorder::scopeChainProp(JSObject
* chainHead
, jsval
*& vp
, LIns
*& ins
, NameResult
& nr
)
7891 JS_ASSERT(chainHead
== cx
->fp
->scopeChain
);
7892 JS_ASSERT(chainHead
!= globalObj
);
7894 JSTraceMonitor
&localtm
= *traceMonitor
;
7896 JSAtom
* atom
= atoms
[GET_INDEX(cx
->fp
->regs
->pc
)];
7899 JSObject
*obj
= chainHead
;
7900 bool ok
= js_FindProperty(cx
, ATOM_TO_JSID(atom
), &obj
, &obj2
, &prop
);
7902 /* js_FindProperty can reenter the interpreter and kill |this|. */
7903 if (!localtm
.recorder
)
7904 return ARECORD_ABORTED
;
7907 RETURN_ERROR_A("error in js_FindProperty");
7910 RETURN_STOP_A("failed to find name in non-global scope chain");
7912 if (obj
== globalObj
) {
7913 // Even if the property is on the global object, we must guard against
7914 // the creation of properties that shadow the property in the middle
7915 // of the scope chain.
7918 // Skip any Call object when inside a function. Any reference to a
7919 // Call name the compiler resolves statically and we do not need
7920 // to match shapes of the Call objects.
7921 chainHead
= cx
->fp
->calleeObject()->getParent();
7922 head_ins
= stobj_get_parent(get(&cx
->fp
->argv
[-2]));
7924 head_ins
= scopeChain();
7927 CHECK_STATUS_A(traverseScopeChain(chainHead
, head_ins
, obj
, obj_ins
));
7929 JSScopeProperty
* sprop
= (JSScopeProperty
*) prop
;
7932 obj2
->dropProperty(cx
, prop
);
7933 RETURN_STOP_A("prototype property");
7935 if (!isValidSlot(OBJ_SCOPE(obj
), sprop
)) {
7936 obj2
->dropProperty(cx
, prop
);
7937 return ARECORD_STOP
;
7939 if (!lazilyImportGlobalSlot(sprop
->slot
)) {
7940 obj2
->dropProperty(cx
, prop
);
7941 RETURN_STOP_A("lazy import of global slot failed");
7943 vp
= &STOBJ_GET_SLOT(obj
, sprop
->slot
);
7945 obj2
->dropProperty(cx
, prop
);
7947 return ARECORD_CONTINUE
;
7950 if (obj
== obj2
&& OBJ_GET_CLASS(cx
, obj
) == &js_CallClass
)
7951 return InjectStatus(callProp(obj
, prop
, ATOM_TO_JSID(atom
), vp
, ins
, nr
));
7953 obj2
->dropProperty(cx
, prop
);
7954 RETURN_STOP_A("fp->scopeChain is not global or active call object");
7958 * Generate LIR to access a property of a Call object.
7960 JS_REQUIRES_STACK RecordingStatus
7961 TraceRecorder::callProp(JSObject
* obj
, JSProperty
* prop
, jsid id
, jsval
*& vp
,
7962 LIns
*& ins
, NameResult
& nr
)
7964 JSScopeProperty
*sprop
= (JSScopeProperty
*) prop
;
7966 JSOp op
= JSOp(*cx
->fp
->regs
->pc
);
7967 uint32 setflags
= (js_CodeSpec
[op
].format
& (JOF_SET
| JOF_INCDEC
| JOF_FOR
));
7968 if (setflags
&& (sprop
->attrs
& JSPROP_READONLY
))
7969 RETURN_STOP("writing to a read-only property");
7971 uintN slot
= sprop
->shortid
;
7974 uintN upvar_slot
= SPROP_INVALID_SLOT
;
7975 JSStackFrame
* cfp
= (JSStackFrame
*) obj
->getPrivate();
7977 if (sprop
->getter
== js_GetCallArg
) {
7978 JS_ASSERT(slot
< cfp
->fun
->nargs
);
7979 vp
= &cfp
->argv
[slot
];
7982 } else if (sprop
->getter
== js_GetCallVar
) {
7983 JS_ASSERT(slot
< cfp
->script
->nslots
);
7984 vp
= &cfp
->slots
[slot
];
7985 upvar_slot
= cx
->fp
->fun
->nargs
+ slot
;
7988 RETURN_STOP("dynamic property of Call object");
7990 obj
->dropProperty(cx
, prop
);
7992 if (frameIfInRange(obj
)) {
7993 // At this point we are guaranteed to be looking at an active call oject
7994 // whose properties are stored in the corresponding JSStackFrame.
7997 return RECORD_CONTINUE
;
8000 // Call objects do not yet have sprop->isMethod() properties, but they
8001 // should. See bug 514046, for which this code is future-proof. Remove
8002 // this comment when that bug is fixed (so, FIXME: 514046).
8006 js_GetPropertyHelper(cx
, obj
, sprop
->id
,
8007 (op
== JSOP_CALLNAME
)
8008 ? JSGET_NO_METHOD_BARRIER
8009 : JSGET_METHOD_BARRIER
,
8012 obj
->dropProperty(cx
, prop
);
8016 JSObject
* parent
= STOBJ_GET_PARENT(cx
->fp
->calleeObject());
8017 LIns
* parent_ins
= stobj_get_parent(get(&cx
->fp
->argv
[-2]));
8018 CHECK_STATUS(traverseScopeChain(parent
, parent_ins
, obj
, obj_ins
));
8020 ClosureVarInfo
* cv
= new (traceAlloc()) ClosureVarInfo();
8023 cv
->callDepth
= callDepth
;
8024 cv
->resolveFlags
= cx
->resolveFlags
== JSRESOLVE_INFER
8025 ? js_InferFlags(cx
, 0)
8028 LIns
* outp
= lir
->insAlloc(sizeof(double));
8036 if (sprop
->getter
== js_GetCallArg
)
8037 ci
= &GetClosureArg_ci
;
8039 ci
= &GetClosureVar_ci
;
8041 LIns
* call_ins
= lir
->insCall(ci
, args
);
8042 JSTraceType type
= getCoercedType(nr
.v
);
8044 addName(lir
->ins2(LIR_eq
, call_ins
, lir
->insImm(type
)),
8045 "guard(type-stable name access)"),
8047 ins
= stackLoad(outp
, type
);
8050 nr
.obj_ins
= obj_ins
;
8052 return RECORD_CONTINUE
;
8055 JS_REQUIRES_STACK LIns
*
8056 TraceRecorder::arg(unsigned n
)
8058 return get(&argval(n
));
8061 JS_REQUIRES_STACK
void
8062 TraceRecorder::arg(unsigned n
, LIns
* i
)
8067 JS_REQUIRES_STACK LIns
*
8068 TraceRecorder::var(unsigned n
)
8070 return get(&varval(n
));
8073 JS_REQUIRES_STACK
void
8074 TraceRecorder::var(unsigned n
, LIns
* i
)
8079 JS_REQUIRES_STACK LIns
*
8080 TraceRecorder::stack(int n
)
8082 return get(&stackval(n
));
8085 JS_REQUIRES_STACK
void
8086 TraceRecorder::stack(int n
, LIns
* i
)
8088 set(&stackval(n
), i
, n
>= 0);
8091 JS_REQUIRES_STACK LIns
*
8092 TraceRecorder::alu(LOpcode v
, jsdouble v0
, jsdouble v1
, LIns
* s0
, LIns
* s1
)
8095 * To even consider this operation for demotion, both operands have to be
8096 * integers and the oracle must not give us a negative hint for the
8099 if (oracle
.isInstructionUndemotable(cx
->fp
->regs
->pc
) || !isPromoteInt(s0
) || !isPromoteInt(s1
)) {
8101 if (v
== LIR_fmod
) {
8102 LIns
* args
[] = { s1
, s0
};
8103 return lir
->insCall(&js_dmod_ci
, args
);
8105 LIns
* result
= lir
->ins2(v
, s0
, s1
);
8106 JS_ASSERT_IF(s0
->isconstf() && s1
->isconstf(), result
->isconstf());
8118 #if !defined NANOJIT_ARM
8125 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8132 if (v0
< 0 || v1
== 0 || (s1
->isconstf() && v1
< 0))
8134 r
= js_dmod(v0
, v1
);
8142 * The result must be an integer at record time, otherwise there is no
8143 * point in trying to demote it.
8145 if (jsint(r
) != r
|| JSDOUBLE_IS_NEGZERO(r
))
8148 LIns
* d0
= ::demote(lir
, s0
);
8149 LIns
* d1
= ::demote(lir
, s1
);
8152 * Speculatively emit an integer operation, betting that at runtime we
8153 * will get integer results again.
8158 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8160 if (d0
->isconst() && d1
->isconst())
8161 return lir
->ins1(LIR_i2f
, lir
->insImm(jsint(r
)));
8163 exit
= snapshot(OVERFLOW_EXIT
);
8166 * If the divisor is greater than zero its always safe to execute
8167 * the division. If not, we have to make sure we are not running
8168 * into -2147483648 / -1, because it can raise an overflow exception.
8170 if (!d1
->isconst()) {
8171 LIns
* gt
= lir
->insBranch(LIR_jt
, lir
->ins2i(LIR_gt
, d1
, 0), NULL
);
8172 guard(false, lir
->ins_eq0(d1
), exit
);
8173 guard(false, lir
->ins2(LIR_and
,
8174 lir
->ins2i(LIR_eq
, d0
, 0x80000000),
8175 lir
->ins2i(LIR_eq
, d1
, -1)), exit
);
8176 gt
->setTarget(lir
->ins0(LIR_label
));
8178 if (d1
->imm32() == -1)
8179 guard(false, lir
->ins2i(LIR_eq
, d0
, 0x80000000), exit
);
8181 result
= lir
->ins2(v
= LIR_div
, d0
, d1
);
8183 /* As long the modulus is zero, the result is an integer. */
8184 guard(true, lir
->ins_eq0(lir
->ins1(LIR_mod
, result
)), exit
);
8186 /* Don't lose a -0. */
8187 guard(false, lir
->ins_eq0(result
), exit
);
8191 if (d0
->isconst() && d1
->isconst())
8192 return lir
->ins1(LIR_i2f
, lir
->insImm(jsint(r
)));
8194 exit
= snapshot(OVERFLOW_EXIT
);
8196 /* Make sure we don't trigger division by zero at runtime. */
8198 guard(false, lir
->ins_eq0(d1
), exit
);
8199 result
= lir
->ins1(v
= LIR_mod
, lir
->ins2(LIR_div
, d0
, d1
));
8201 /* If the result is not 0, it is always within the integer domain. */
8202 LIns
* branch
= lir
->insBranch(LIR_jf
, lir
->ins_eq0(result
), NULL
);
8205 * If the result is zero, we must exit if the lhs is negative since
8206 * the result is -0 in this case, which is not in the integer domain.
8208 guard(false, lir
->ins2i(LIR_lt
, d0
, 0), exit
);
8209 branch
->setTarget(lir
->ins0(LIR_label
));
8215 v
= (LOpcode
)((int)v
& ~LIR64
);
8216 result
= lir
->ins2(v
, d0
, d1
);
8219 * If the operands guarantee that the result will be an integer (i.e.
8220 * z = x + y with 0 <= (x|y) <= 0xffff guarantees z <= fffe0001), we
8221 * don't have to guard against an overflow. Otherwise we emit a guard
8222 * that will inform the oracle and cause a non-demoted trace to be
8223 * attached that uses floating-point math for this operation.
8225 if (!result
->isconst() && (!IsOverflowSafe(v
, d0
) || !IsOverflowSafe(v
, d1
))) {
8226 exit
= snapshot(OVERFLOW_EXIT
);
8227 guard(false, lir
->ins1(LIR_ov
, result
), exit
);
8228 if (v
== LIR_mul
) // make sure we don't lose a -0
8229 guard(false, lir
->ins_eq0(result
), exit
);
8233 JS_ASSERT_IF(d0
->isconst() && d1
->isconst(),
8234 result
->isconst() && result
->imm32() == jsint(r
));
8235 return lir
->ins1(LIR_i2f
, result
);
8239 TraceRecorder::f2i(LIns
* f
)
8241 return lir
->insCall(&js_DoubleToInt32_ci
, &f
);
8244 JS_REQUIRES_STACK LIns
*
8245 TraceRecorder::makeNumberInt32(LIns
* f
)
8247 JS_ASSERT(f
->isQuad());
8249 if (!isPromote(f
)) {
8251 guard(true, lir
->ins2(LIR_feq
, f
, lir
->ins1(LIR_i2f
, x
)), MISMATCH_EXIT
);
8253 x
= ::demote(lir
, f
);
8258 JS_REQUIRES_STACK LIns
*
8259 TraceRecorder::stringify(jsval
& v
)
8261 LIns
* v_ins
= get(&v
);
8262 if (JSVAL_IS_STRING(v
))
8265 LIns
* args
[] = { v_ins
, cx_ins
};
8267 if (JSVAL_IS_NUMBER(v
)) {
8268 ci
= &js_NumberToString_ci
;
8269 } else if (JSVAL_IS_SPECIAL(v
)) {
8270 ci
= &js_BooleanOrUndefinedToString_ci
;
8273 * Callers must deal with non-primitive (non-null object) values by
8274 * calling an imacro. We don't try to guess about which imacro, with
8275 * what valueOf hint, here.
8277 JS_ASSERT(JSVAL_IS_NULL(v
));
8278 return INS_ATOM(cx
->runtime
->atomState
.nullAtom
);
8281 v_ins
= lir
->insCall(ci
, args
);
8282 guard(false, lir
->ins_peq0(v_ins
), OOM_EXIT
);
8286 JS_REQUIRES_STACK RecordingStatus
8287 TraceRecorder::call_imacro(jsbytecode
* imacro
)
8289 JSStackFrame
* fp
= cx
->fp
;
8290 JSFrameRegs
* regs
= fp
->regs
;
8292 /* We cannot nest imacros, only tail-call. */
8294 /* Dereference is safe since imacros are JSOP_STOP-terminated. */
8295 if (regs
->pc
[js_CodeSpec
[*regs
->pc
].length
] != JSOP_STOP
)
8298 return RECORD_IMACRO
;
8301 fp
->imacpc
= regs
->pc
;
8303 atoms
= COMMON_ATOMS_START(&cx
->runtime
->atomState
);
8304 return RECORD_IMACRO
;
8307 JS_REQUIRES_STACK AbortableRecordingStatus
8308 TraceRecorder::ifop()
8310 jsval
& v
= stackval(-1);
8311 LIns
* v_ins
= get(&v
);
8315 if (JSVAL_IS_NULL(v
)) {
8318 } else if (!JSVAL_IS_PRIMITIVE(v
)) {
8321 } else if (JSVAL_IS_SPECIAL(v
)) {
8322 /* Test for boolean is true, negate later if we are testing for false. */
8323 cond
= JSVAL_TO_SPECIAL(v
) == JS_TRUE
;
8324 x
= lir
->ins2i(LIR_eq
, v_ins
, 1);
8325 } else if (isNumber(v
)) {
8326 jsdouble d
= asNumber(v
);
8327 cond
= !JSDOUBLE_IS_NaN(d
) && d
;
8328 x
= lir
->ins2(LIR_and
,
8329 lir
->ins2(LIR_feq
, v_ins
, v_ins
),
8330 lir
->ins_eq0(lir
->ins2(LIR_feq
, v_ins
, lir
->insImmf(0))));
8331 } else if (JSVAL_IS_STRING(v
)) {
8332 cond
= JSVAL_TO_STRING(v
)->length() != 0;
8333 x
= lir
->insLoad(LIR_ldp
, v_ins
, offsetof(JSString
, mLength
));
8335 JS_NOT_REACHED("ifop");
8336 return ARECORD_STOP
;
8339 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
8340 emitIf(pc
, cond
, x
);
8341 return checkTraceEnd(pc
);
8346 * Record LIR for a tableswitch or tableswitchx op. We record LIR only the
8347 * "first" time we hit the op. Later, when we start traces after exiting that
8348 * trace, we just patch.
8350 JS_REQUIRES_STACK AbortableRecordingStatus
8351 TraceRecorder::tableswitch()
8353 jsval
& v
= stackval(-1);
8355 /* No need to guard if the condition can't match any of the cases. */
8357 return ARECORD_CONTINUE
;
8359 /* No need to guard if the condition is constant. */
8360 LIns
* v_ins
= f2i(get(&v
));
8361 if (v_ins
->isconst() || v_ins
->isconstq())
8362 return ARECORD_CONTINUE
;
8364 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
8365 /* Starting a new trace after exiting a trace via switch. */
8367 (anchor
->exitType
== CASE_EXIT
|| anchor
->exitType
== DEFAULT_EXIT
) &&
8368 fragment
->ip
== pc
) {
8369 return ARECORD_CONTINUE
;
8374 if (*pc
== JSOP_TABLESWITCH
) {
8375 pc
+= JUMP_OFFSET_LEN
;
8376 low
= GET_JUMP_OFFSET(pc
);
8377 pc
+= JUMP_OFFSET_LEN
;
8378 high
= GET_JUMP_OFFSET(pc
);
8380 pc
+= JUMPX_OFFSET_LEN
;
8381 low
= GET_JUMPX_OFFSET(pc
);
8382 pc
+= JUMPX_OFFSET_LEN
;
8383 high
= GET_JUMPX_OFFSET(pc
);
8386 /* Cap maximum table-switch size for modesty. */
8387 if ((high
+ 1 - low
) > MAX_TABLE_SWITCH
)
8388 return InjectStatus(switchop());
8390 /* Generate switch LIR. */
8391 SwitchInfo
* si
= new (traceAlloc()) SwitchInfo();
8392 si
->count
= high
+ 1 - low
;
8394 si
->index
= (uint32
) -1;
8395 LIns
* diff
= lir
->ins2(LIR_sub
, v_ins
, lir
->insImm(low
));
8396 LIns
* cmp
= lir
->ins2(LIR_ult
, diff
, lir
->insImm(si
->count
));
8397 lir
->insGuard(LIR_xf
, cmp
, createGuardRecord(snapshot(DEFAULT_EXIT
)));
8398 lir
->insStorei(diff
, lir
->insImmPtr(&si
->index
), 0);
8399 VMSideExit
* exit
= snapshot(CASE_EXIT
);
8400 exit
->switchInfo
= si
;
8401 LIns
* guardIns
= lir
->insGuard(LIR_xtbl
, diff
, createGuardRecord(exit
));
8402 fragment
->lastIns
= guardIns
;
8403 CHECK_STATUS_A(compile());
8404 return finishSuccessfully();
8408 static JS_ALWAYS_INLINE
int32_t
8409 UnboxBooleanOrUndefined(jsval v
)
8411 /* Although this says 'special', we really only expect 3 special values: */
8412 JS_ASSERT(v
== JSVAL_TRUE
|| v
== JSVAL_FALSE
|| v
== JSVAL_VOID
);
8413 return JSVAL_TO_SPECIAL(v
);
8416 JS_REQUIRES_STACK RecordingStatus
8417 TraceRecorder::switchop()
8419 jsval
& v
= stackval(-1);
8420 LIns
* v_ins
= get(&v
);
8422 /* No need to guard if the condition is constant. */
8423 if (v_ins
->isconst() || v_ins
->isconstq())
8424 return RECORD_CONTINUE
;
8426 jsdouble d
= asNumber(v
);
8428 addName(lir
->ins2(LIR_feq
, v_ins
, lir
->insImmf(d
)),
8429 "guard(switch on numeric)"),
8431 } else if (JSVAL_IS_STRING(v
)) {
8432 LIns
* args
[] = { INS_CONSTSTR(JSVAL_TO_STRING(v
)), v_ins
};
8434 addName(lir
->ins_eq0(lir
->ins_eq0(lir
->insCall(&js_EqualStrings_ci
, args
))),
8435 "guard(switch on string)"),
8437 } else if (JSVAL_IS_SPECIAL(v
)) {
8439 addName(lir
->ins2(LIR_eq
, v_ins
, lir
->insImm(UnboxBooleanOrUndefined(v
))),
8440 "guard(switch on boolean)"),
8443 RETURN_STOP("switch on object or null");
8445 return RECORD_CONTINUE
;
8448 JS_REQUIRES_STACK RecordingStatus
8449 TraceRecorder::inc(jsval
& v
, jsint incr
, bool pre
)
8451 LIns
* v_ins
= get(&v
);
8452 CHECK_STATUS(inc(v
, v_ins
, incr
, pre
));
8454 return RECORD_CONTINUE
;
8458 * On exit, v_ins is the incremented unboxed value, and the appropriate value
8459 * (pre- or post-increment as described by pre) is stacked.
8461 JS_REQUIRES_STACK RecordingStatus
8462 TraceRecorder::inc(jsval v
, LIns
*& v_ins
, jsint incr
, bool pre
)
8465 CHECK_STATUS(incHelper(v
, v_ins
, v_after
, incr
));
8467 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->fp
->regs
->pc
];
8468 JS_ASSERT(cs
.ndefs
== 1);
8469 stack(-cs
.nuses
, pre
? v_after
: v_ins
);
8471 return RECORD_CONTINUE
;
8475 * Do an increment operation without storing anything to the stack.
8477 JS_REQUIRES_STACK RecordingStatus
8478 TraceRecorder::incHelper(jsval v
, LIns
* v_ins
, LIns
*& v_after
, jsint incr
)
8481 RETURN_STOP("can only inc numbers");
8482 v_after
= alu(LIR_fadd
, asNumber(v
), incr
, v_ins
, lir
->insImmf(incr
));
8483 return RECORD_CONTINUE
;
8486 JS_REQUIRES_STACK AbortableRecordingStatus
8487 TraceRecorder::incProp(jsint incr
, bool pre
)
8489 jsval
& l
= stackval(-1);
8490 if (JSVAL_IS_PRIMITIVE(l
))
8491 RETURN_STOP_A("incProp on primitive");
8493 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
8494 LIns
* obj_ins
= get(&l
);
8498 CHECK_STATUS_A(prop(obj
, obj_ins
, &slot
, &v_ins
, NULL
));
8500 if (slot
== SPROP_INVALID_SLOT
)
8501 RETURN_STOP_A("incProp on invalid slot");
8503 jsval
& v
= STOBJ_GET_SLOT(obj
, slot
);
8504 CHECK_STATUS_A(inc(v
, v_ins
, incr
, pre
));
8506 LIns
* dslots_ins
= NULL
;
8507 stobj_set_slot(obj_ins
, slot
, dslots_ins
, box_jsval(v
, v_ins
));
8508 return ARECORD_CONTINUE
;
8511 JS_REQUIRES_STACK RecordingStatus
8512 TraceRecorder::incElem(jsint incr
, bool pre
)
8514 jsval
& r
= stackval(-1);
8515 jsval
& l
= stackval(-2);
8520 if (JSVAL_IS_PRIMITIVE(l
) || !JSVAL_IS_INT(r
) ||
8521 !guardDenseArray(JSVAL_TO_OBJECT(l
), get(&l
))) {
8525 CHECK_STATUS(denseArrayElement(l
, r
, vp
, v_ins
, addr_ins
));
8526 if (!addr_ins
) // if we read a hole, abort
8528 CHECK_STATUS(inc(*vp
, v_ins
, incr
, pre
));
8529 lir
->insStorei(box_jsval(*vp
, v_ins
), addr_ins
, 0);
8530 return RECORD_CONTINUE
;
8534 EvalCmp(LOpcode op
, double l
, double r
)
8554 JS_NOT_REACHED("unexpected comparison op");
8561 EvalCmp(LOpcode op
, JSString
* l
, JSString
* r
)
8564 return !!js_EqualStrings(l
, r
);
8565 return EvalCmp(op
, js_CompareStrings(l
, r
), 0);
8568 JS_REQUIRES_STACK
void
8569 TraceRecorder::strictEquality(bool equal
, bool cmpCase
)
8571 jsval
& r
= stackval(-1);
8572 jsval
& l
= stackval(-2);
8573 LIns
* l_ins
= get(&l
);
8574 LIns
* r_ins
= get(&r
);
8578 JSTraceType ltag
= GetPromotedType(l
);
8579 if (ltag
!= GetPromotedType(r
)) {
8581 x
= lir
->insImm(cond
);
8582 } else if (ltag
== TT_STRING
) {
8583 LIns
* args
[] = { r_ins
, l_ins
};
8584 x
= lir
->ins2i(LIR_eq
, lir
->insCall(&js_EqualStrings_ci
, args
), equal
);
8585 cond
= !!js_EqualStrings(JSVAL_TO_STRING(l
), JSVAL_TO_STRING(r
));
8588 if (ltag
== TT_DOUBLE
)
8590 else if (ltag
== TT_NULL
|| ltag
== TT_OBJECT
|| ltag
== TT_FUNCTION
)
8594 x
= lir
->ins2(op
, l_ins
, r_ins
);
8596 x
= lir
->ins_eq0(x
);
8597 cond
= (ltag
== TT_DOUBLE
)
8598 ? asNumber(l
) == asNumber(r
)
8601 cond
= (cond
== equal
);
8604 /* Only guard if the same path may not always be taken. */
8606 guard(cond
, x
, BRANCH_EXIT
);
8613 JS_REQUIRES_STACK AbortableRecordingStatus
8614 TraceRecorder::equality(bool negate
, bool tryBranchAfterCond
)
8616 jsval
& rval
= stackval(-1);
8617 jsval
& lval
= stackval(-2);
8618 LIns
* l_ins
= get(&lval
);
8619 LIns
* r_ins
= get(&rval
);
8621 return equalityHelper(lval
, rval
, l_ins
, r_ins
, negate
, tryBranchAfterCond
, lval
);
8624 JS_REQUIRES_STACK AbortableRecordingStatus
8625 TraceRecorder::equalityHelper(jsval l
, jsval r
, LIns
* l_ins
, LIns
* r_ins
,
8626 bool negate
, bool tryBranchAfterCond
,
8629 LOpcode op
= LIR_eq
;
8631 LIns
* args
[] = { NULL
, NULL
};
8634 * The if chain below closely mirrors that found in 11.9.3, in general
8635 * deviating from that ordering of ifs only to account for SpiderMonkey's
8636 * conflation of booleans and undefined and for the possibility of
8637 * confusing objects and null. Note carefully the spec-mandated recursion
8638 * in the final else clause, which terminates because Number == T recurs
8639 * only if T is Object, but that must recur again to convert Object to
8640 * primitive, and ToPrimitive throws if the object cannot be converted to
8641 * a primitive value (which would terminate recursion).
8644 if (GetPromotedType(l
) == GetPromotedType(r
)) {
8645 if (JSVAL_TAG(l
) == JSVAL_OBJECT
|| JSVAL_IS_SPECIAL(l
)) {
8646 if (JSVAL_TAG(l
) == JSVAL_OBJECT
&& l
) {
8647 JSClass
*clasp
= OBJ_GET_CLASS(cx
, JSVAL_TO_OBJECT(l
));
8648 if ((clasp
->flags
& JSCLASS_IS_EXTENDED
) && ((JSExtendedClass
*) clasp
)->equality
)
8649 RETURN_STOP_A("Can't trace extended class equality operator");
8651 if (JSVAL_TAG(l
) == JSVAL_OBJECT
)
8654 } else if (JSVAL_IS_STRING(l
)) {
8655 args
[0] = r_ins
, args
[1] = l_ins
;
8656 l_ins
= lir
->insCall(&js_EqualStrings_ci
, args
);
8657 r_ins
= lir
->insImm(1);
8658 cond
= !!js_EqualStrings(JSVAL_TO_STRING(l
), JSVAL_TO_STRING(r
));
8660 JS_ASSERT(isNumber(l
) && isNumber(r
));
8661 cond
= (asNumber(l
) == asNumber(r
));
8664 } else if (JSVAL_IS_NULL(l
) && JSVAL_IS_SPECIAL(r
)) {
8665 l_ins
= lir
->insImm(JSVAL_TO_SPECIAL(JSVAL_VOID
));
8666 cond
= (r
== JSVAL_VOID
);
8667 } else if (JSVAL_IS_SPECIAL(l
) && JSVAL_IS_NULL(r
)) {
8668 r_ins
= lir
->insImm(JSVAL_TO_SPECIAL(JSVAL_VOID
));
8669 cond
= (l
== JSVAL_VOID
);
8670 } else if (isNumber(l
) && JSVAL_IS_STRING(r
)) {
8671 args
[0] = r_ins
, args
[1] = cx_ins
;
8672 r_ins
= lir
->insCall(&js_StringToNumber_ci
, args
);
8673 cond
= (asNumber(l
) == js_StringToNumber(cx
, JSVAL_TO_STRING(r
)));
8675 } else if (JSVAL_IS_STRING(l
) && isNumber(r
)) {
8676 args
[0] = l_ins
, args
[1] = cx_ins
;
8677 l_ins
= lir
->insCall(&js_StringToNumber_ci
, args
);
8678 cond
= (js_StringToNumber(cx
, JSVAL_TO_STRING(l
)) == asNumber(r
));
8681 if (JSVAL_IS_SPECIAL(l
)) {
8682 bool isVoid
= !!JSVAL_IS_VOID(l
);
8684 lir
->ins2(LIR_eq
, l_ins
, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
))),
8687 args
[0] = l_ins
, args
[1] = cx_ins
;
8688 l_ins
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8689 l
= (l
== JSVAL_VOID
)
8690 ? cx
->runtime
->NaNValue
8691 : INT_TO_JSVAL(l
== JSVAL_TRUE
);
8692 return equalityHelper(l
, r
, l_ins
, r_ins
, negate
,
8693 tryBranchAfterCond
, rval
);
8695 } else if (JSVAL_IS_SPECIAL(r
)) {
8696 bool isVoid
= !!JSVAL_IS_VOID(r
);
8698 lir
->ins2(LIR_eq
, r_ins
, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
))),
8701 args
[0] = r_ins
, args
[1] = cx_ins
;
8702 r_ins
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8703 r
= (r
== JSVAL_VOID
)
8704 ? cx
->runtime
->NaNValue
8705 : INT_TO_JSVAL(r
== JSVAL_TRUE
);
8706 return equalityHelper(l
, r
, l_ins
, r_ins
, negate
,
8707 tryBranchAfterCond
, rval
);
8710 if ((JSVAL_IS_STRING(l
) || isNumber(l
)) && !JSVAL_IS_PRIMITIVE(r
)) {
8712 return InjectStatus(call_imacro(equality_imacros
.any_obj
));
8714 if (!JSVAL_IS_PRIMITIVE(l
) && (JSVAL_IS_STRING(r
) || isNumber(r
))) {
8716 return InjectStatus(call_imacro(equality_imacros
.obj_any
));
8720 l_ins
= lir
->insImm(0);
8721 r_ins
= lir
->insImm(1);
8725 /* If the operands aren't numbers, compare them as integers. */
8726 LIns
* x
= lir
->ins2(op
, l_ins
, r_ins
);
8728 x
= lir
->ins_eq0(x
);
8732 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
8735 * Don't guard if the same path is always taken. If it isn't, we have to
8736 * fuse comparisons and the following branch, because the interpreter does
8739 if (tryBranchAfterCond
)
8740 fuseIf(pc
+ 1, cond
, x
);
8743 * There is no need to write out the result of this comparison if the trace
8744 * ends on this operation.
8746 if (pc
[1] == JSOP_IFNE
|| pc
[1] == JSOP_IFEQ
)
8747 CHECK_STATUS_A(checkTraceEnd(pc
+ 1));
8750 * We update the stack after the guard. This is safe since the guard bails
8751 * out at the comparison and the interpreter will therefore re-execute the
8752 * comparison. This way the value of the condition doesn't have to be
8753 * calculated and saved on the stack in most cases.
8757 return ARECORD_CONTINUE
;
8760 JS_REQUIRES_STACK AbortableRecordingStatus
8761 TraceRecorder::relational(LOpcode op
, bool tryBranchAfterCond
)
8763 jsval
& r
= stackval(-1);
8764 jsval
& l
= stackval(-2);
8767 LIns
* l_ins
= get(&l
);
8768 LIns
* r_ins
= get(&r
);
8770 jsdouble lnum
, rnum
;
8773 * 11.8.5 if either argument is an object with a function-valued valueOf
8774 * property; if both arguments are objects with non-function-valued valueOf
8775 * properties, abort.
8777 if (!JSVAL_IS_PRIMITIVE(l
)) {
8779 if (!JSVAL_IS_PRIMITIVE(r
)) {
8781 return InjectStatus(call_imacro(binary_imacros
.obj_obj
));
8783 return InjectStatus(call_imacro(binary_imacros
.obj_any
));
8785 if (!JSVAL_IS_PRIMITIVE(r
)) {
8787 return InjectStatus(call_imacro(binary_imacros
.any_obj
));
8790 /* 11.8.5 steps 3, 16-21. */
8791 if (JSVAL_IS_STRING(l
) && JSVAL_IS_STRING(r
)) {
8792 LIns
* args
[] = { r_ins
, l_ins
};
8793 l_ins
= lir
->insCall(&js_CompareStrings_ci
, args
);
8794 r_ins
= lir
->insImm(0);
8795 cond
= EvalCmp(op
, JSVAL_TO_STRING(l
), JSVAL_TO_STRING(r
));
8799 /* 11.8.5 steps 4-5. */
8800 if (!JSVAL_IS_NUMBER(l
)) {
8801 LIns
* args
[] = { l_ins
, cx_ins
};
8802 switch (JSVAL_TAG(l
)) {
8804 l_ins
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8807 l_ins
= lir
->insCall(&js_StringToNumber_ci
, args
);
8810 if (JSVAL_IS_NULL(l
)) {
8811 l_ins
= lir
->insImmf(0.0);
8818 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
8819 "have been handled at start of method");
8820 RETURN_STOP_A("safety belt");
8823 if (!JSVAL_IS_NUMBER(r
)) {
8824 LIns
* args
[] = { r_ins
, cx_ins
};
8825 switch (JSVAL_TAG(r
)) {
8827 r_ins
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8830 r_ins
= lir
->insCall(&js_StringToNumber_ci
, args
);
8833 if (JSVAL_IS_NULL(r
)) {
8834 r_ins
= lir
->insImmf(0.0);
8841 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
8842 "have been handled at start of method");
8843 RETURN_STOP_A("safety belt");
8847 jsval tmp
= JSVAL_NULL
;
8848 JSAutoTempValueRooter
tvr(cx
, 1, &tmp
);
8851 lnum
= js_ValueToNumber(cx
, &tmp
);
8853 rnum
= js_ValueToNumber(cx
, &tmp
);
8855 cond
= EvalCmp(op
, lnum
, rnum
);
8858 /* 11.8.5 steps 6-15. */
8861 * If the result is not a number or it's not a quad, we must use an integer
8865 JS_ASSERT(op
>= LIR_feq
&& op
<= LIR_fge
);
8866 op
= LOpcode(op
+ (LIR_eq
- LIR_feq
));
8868 x
= lir
->ins2(op
, l_ins
, r_ins
);
8870 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
8873 * Don't guard if the same path is always taken. If it isn't, we have to
8874 * fuse comparisons and the following branch, because the interpreter does
8877 if (tryBranchAfterCond
)
8878 fuseIf(pc
+ 1, cond
, x
);
8881 * There is no need to write out the result of this comparison if the trace
8882 * ends on this operation.
8884 if (pc
[1] == JSOP_IFNE
|| pc
[1] == JSOP_IFEQ
)
8885 CHECK_STATUS_A(checkTraceEnd(pc
+ 1));
8888 * We update the stack after the guard. This is safe since the guard bails
8889 * out at the comparison and the interpreter will therefore re-execute the
8890 * comparison. This way the value of the condition doesn't have to be
8891 * calculated and saved on the stack in most cases.
8895 return ARECORD_CONTINUE
;
8898 JS_REQUIRES_STACK RecordingStatus
8899 TraceRecorder::unary(LOpcode op
)
8901 jsval
& v
= stackval(-1);
8902 bool intop
= !(op
& LIR64
);
8907 a
= lir
->ins1(op
, a
);
8909 a
= lir
->ins1(LIR_i2f
, a
);
8911 return RECORD_CONTINUE
;
8916 JS_REQUIRES_STACK RecordingStatus
8917 TraceRecorder::binary(LOpcode op
)
8919 jsval
& r
= stackval(-1);
8920 jsval
& l
= stackval(-2);
8922 if (!JSVAL_IS_PRIMITIVE(l
)) {
8924 if (!JSVAL_IS_PRIMITIVE(r
)) {
8926 return call_imacro(binary_imacros
.obj_obj
);
8928 return call_imacro(binary_imacros
.obj_any
);
8930 if (!JSVAL_IS_PRIMITIVE(r
)) {
8932 return call_imacro(binary_imacros
.any_obj
);
8935 bool intop
= !(op
& LIR64
);
8939 bool leftIsNumber
= isNumber(l
);
8940 jsdouble lnum
= leftIsNumber
? asNumber(l
) : 0;
8942 bool rightIsNumber
= isNumber(r
);
8943 jsdouble rnum
= rightIsNumber
? asNumber(r
) : 0;
8945 if ((op
>= LIR_sub
&& op
<= LIR_ush
) || // sub, mul, (callh), or, xor, (not,) lsh, rsh, ush
8946 (op
>= LIR_fsub
&& op
<= LIR_fmod
)) { // fsub, fmul, fdiv, fmod
8948 if (JSVAL_IS_STRING(l
)) {
8951 a
= lir
->insCall(&js_StringToNumber_ci
, args
);
8952 lnum
= js_StringToNumber(cx
, JSVAL_TO_STRING(l
));
8953 leftIsNumber
= true;
8955 if (JSVAL_IS_STRING(r
)) {
8958 b
= lir
->insCall(&js_StringToNumber_ci
, args
);
8959 rnum
= js_StringToNumber(cx
, JSVAL_TO_STRING(r
));
8960 rightIsNumber
= true;
8963 if (JSVAL_IS_SPECIAL(l
)) {
8964 LIns
* args
[] = { a
, cx_ins
};
8965 a
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8966 lnum
= js_BooleanOrUndefinedToNumber(cx
, JSVAL_TO_SPECIAL(l
));
8967 leftIsNumber
= true;
8969 if (JSVAL_IS_SPECIAL(r
)) {
8970 LIns
* args
[] = { b
, cx_ins
};
8971 b
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8972 rnum
= js_BooleanOrUndefinedToNumber(cx
, JSVAL_TO_SPECIAL(r
));
8973 rightIsNumber
= true;
8975 if (leftIsNumber
&& rightIsNumber
) {
8977 LIns
*args
[] = { a
};
8978 a
= lir
->insCall(op
== LIR_ush
? &js_DoubleToUint32_ci
: &js_DoubleToInt32_ci
, args
);
8981 a
= alu(op
, lnum
, rnum
, a
, b
);
8983 a
= lir
->ins1(op
== LIR_ush
? LIR_u2f
: LIR_i2f
, a
);
8985 return RECORD_CONTINUE
;
8990 struct GuardedShapeEntry
: public JSDHashEntryStub
8995 #if defined DEBUG_notme && defined XP_UNIX
8998 static FILE* shapefp
= NULL
;
9001 DumpShape(JSObject
* obj
, const char* prefix
)
9003 JSScope
* scope
= OBJ_SCOPE(obj
);
9006 shapefp
= fopen("/tmp/shapes.dump", "w");
9011 fprintf(shapefp
, "\n%s: shape %u flags %x\n", prefix
, scope
->shape
, scope
->flags
);
9012 for (JSScopeProperty
* sprop
= scope
->lastProperty(); sprop
; sprop
= sprop
->parent
) {
9013 if (JSID_IS_ATOM(sprop
->id
)) {
9014 fprintf(shapefp
, " %s", JS_GetStringBytes(JSVAL_TO_STRING(ID_TO_VALUE(sprop
->id
))));
9016 JS_ASSERT(!JSID_IS_OBJECT(sprop
->id
));
9017 fprintf(shapefp
, " %d", JSID_TO_INT(sprop
->id
));
9019 fprintf(shapefp
, " %u %p %p %x %x %d\n",
9020 sprop
->slot
, sprop
->getter
, sprop
->setter
, sprop
->attrs
, sprop
->flags
,
9026 static JSDHashOperator
9027 DumpShapeEnumerator(JSDHashTable
* table
, JSDHashEntryHdr
* hdr
, uint32 number
, void* arg
)
9029 GuardedShapeEntry
* entry
= (GuardedShapeEntry
*) hdr
;
9030 const char* prefix
= (const char*) arg
;
9032 DumpShape(entry
->obj
, prefix
);
9033 return JS_DHASH_NEXT
;
9037 TraceRecorder::dumpGuardedShapes(const char* prefix
)
9039 if (guardedShapeTable
.ops
)
9040 JS_DHashTableEnumerate(&guardedShapeTable
, DumpShapeEnumerator
, (void*) prefix
);
9042 #endif /* DEBUG_notme && XP_UNIX */
9044 JS_REQUIRES_STACK RecordingStatus
9045 TraceRecorder::guardShape(LIns
* obj_ins
, JSObject
* obj
, uint32 shape
, const char* guardName
,
9046 LIns
* map_ins
, VMSideExit
* exit
)
9048 if (!guardedShapeTable
.ops
) {
9049 JS_DHashTableInit(&guardedShapeTable
, JS_DHashGetStubOps(), NULL
,
9050 sizeof(GuardedShapeEntry
), JS_DHASH_MIN_SIZE
);
9053 // Test (with add if missing) for a remembered guard for (obj_ins, obj).
9054 GuardedShapeEntry
* entry
= (GuardedShapeEntry
*)
9055 JS_DHashTableOperate(&guardedShapeTable
, obj_ins
, JS_DHASH_ADD
);
9057 JS_ReportOutOfMemory(cx
);
9058 return RECORD_ERROR
;
9061 // If already guarded, check that the shape matches.
9063 JS_ASSERT(entry
->key
== obj_ins
);
9064 JS_ASSERT(entry
->obj
== obj
);
9065 return RECORD_CONTINUE
;
9068 // Not yet guarded. Remember obj_ins along with obj (for invalidation).
9069 entry
->key
= obj_ins
;
9072 #if defined DEBUG_notme && defined XP_UNIX
9073 DumpShape(obj
, "guard");
9074 fprintf(shapefp
, "for obj_ins %p\n", obj_ins
);
9077 // Finally, emit the shape guard.
9078 LIns
* shape_ins
= addName(lir
->insLoad(LIR_ld
, map_ins
, offsetof(JSScope
, shape
)), "shape");
9080 addName(lir
->ins2i(LIR_eq
, shape_ins
, shape
), guardName
),
9082 return RECORD_CONTINUE
;
9085 static JSDHashOperator
9086 ForgetGuardedShapesForObject(JSDHashTable
* table
, JSDHashEntryHdr
* hdr
, uint32 number
, void* arg
)
9088 GuardedShapeEntry
* entry
= (GuardedShapeEntry
*) hdr
;
9089 if (entry
->obj
== arg
) {
9090 #if defined DEBUG_notme && defined XP_UNIX
9091 DumpShape(entry
->obj
, "forget");
9093 return JS_DHASH_REMOVE
;
9095 return JS_DHASH_NEXT
;
9099 TraceRecorder::forgetGuardedShapesForObject(JSObject
* obj
)
9101 if (guardedShapeTable
.ops
)
9102 JS_DHashTableEnumerate(&guardedShapeTable
, ForgetGuardedShapesForObject
, obj
);
9106 TraceRecorder::forgetGuardedShapes()
9108 if (guardedShapeTable
.ops
) {
9109 #if defined DEBUG_notme && defined XP_UNIX
9110 dumpGuardedShapes("forget-all");
9112 JS_DHashTableFinish(&guardedShapeTable
);
9113 guardedShapeTable
.ops
= NULL
;
9117 JS_STATIC_ASSERT(offsetof(JSObjectOps
, objectMap
) == 0);
9120 TraceRecorder::map(LIns
* obj_ins
)
9122 return addName(lir
->insLoad(LIR_ldp
, obj_ins
, (int) offsetof(JSObject
, map
)), "map");
9126 TraceRecorder::map_is_native(JSObjectMap
* map
, LIns
* map_ins
, LIns
*& ops_ins
, size_t op_offset
)
9128 JS_ASSERT(op_offset
< sizeof(JSObjectOps
));
9129 JS_ASSERT(op_offset
% sizeof(void *) == 0);
9131 #define OP(ops) (*(void **) ((uint8 *) (ops) + op_offset))
9132 void* ptr
= OP(map
->ops
);
9133 if (ptr
!= OP(&js_ObjectOps
))
9137 ops_ins
= addName(lir
->insLoad(LIR_ldcp
, map_ins
, int(offsetof(JSObjectMap
, ops
))), "ops");
9138 LIns
* n
= lir
->insLoad(LIR_ldcp
, ops_ins
, op_offset
);
9140 addName(lir
->ins2(LIR_peq
, n
, INS_CONSTPTR(ptr
)), "guard(native-map)"),
9146 JS_REQUIRES_STACK RecordingStatus
9147 TraceRecorder::guardNativePropertyOp(JSObject
* aobj
, LIns
* map_ins
)
9150 * Interpreter calls to PROPERTY_CACHE_TEST guard on native object ops
9151 * which is required to use native objects (those whose maps are scopes),
9152 * or even more narrow conditions required because the cache miss case
9153 * will call a particular object-op (js_GetProperty, js_SetProperty).
9155 * We parameterize using offsetof and guard on match against the hook at
9156 * the given offset in js_ObjectOps. TraceRecorder::record_JSOP_SETPROP
9157 * guards the js_SetProperty case.
9159 uint32 format
= js_CodeSpec
[*cx
->fp
->regs
->pc
].format
;
9160 uint32 mode
= JOF_MODE(format
);
9162 // No need to guard native-ness of global object.
9163 JS_ASSERT(OBJ_IS_NATIVE(globalObj
));
9164 if (aobj
!= globalObj
) {
9165 size_t op_offset
= offsetof(JSObjectOps
, objectMap
);
9166 if (mode
== JOF_PROP
|| mode
== JOF_VARPROP
) {
9167 op_offset
= (format
& JOF_SET
)
9168 ? offsetof(JSObjectOps
, setProperty
)
9169 : offsetof(JSObjectOps
, getProperty
);
9171 JS_ASSERT(mode
== JOF_NAME
);
9175 if (!map_is_native(aobj
->map
, map_ins
, ops_ins
, op_offset
))
9176 RETURN_STOP("non-native map");
9178 return RECORD_CONTINUE
;
9181 JS_REQUIRES_STACK AbortableRecordingStatus
9182 TraceRecorder::test_property_cache(JSObject
* obj
, LIns
* obj_ins
, JSObject
*& obj2
, jsuword
& pcval
)
9184 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
9185 JS_ASSERT(*pc
!= JSOP_INITPROP
&& *pc
!= JSOP_INITMETHOD
&&
9186 *pc
!= JSOP_SETNAME
&& *pc
!= JSOP_SETPROP
&& *pc
!= JSOP_SETMETHOD
);
9188 // Mimic the interpreter's special case for dense arrays by skipping up one
9189 // hop along the proto chain when accessing a named (not indexed) property,
9190 // typically to find Array.prototype methods.
9191 JSObject
* aobj
= obj
;
9192 if (OBJ_IS_DENSE_ARRAY(cx
, obj
)) {
9193 guardDenseArray(obj
, obj_ins
, BRANCH_EXIT
);
9194 aobj
= OBJ_GET_PROTO(cx
, obj
);
9195 obj_ins
= stobj_get_proto(obj_ins
);
9198 LIns
* map_ins
= map(obj_ins
);
9200 CHECK_STATUS_A(guardNativePropertyOp(aobj
, map_ins
));
9203 JSPropCacheEntry
* entry
;
9204 PROPERTY_CACHE_TEST(cx
, pc
, aobj
, obj2
, entry
, atom
);
9206 // Null atom means that obj2 is locked and must now be unlocked.
9207 JS_UNLOCK_OBJ(cx
, obj2
);
9209 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
9210 jsid id
= ATOM_TO_JSID(atom
);
9212 if (JOF_OPMODE(*pc
) == JOF_NAME
) {
9213 JS_ASSERT(aobj
== obj
);
9215 JSTraceMonitor
&localtm
= *traceMonitor
;
9216 entry
= js_FindPropertyHelper(cx
, id
, true, &obj
, &obj2
, &prop
);
9218 /* js_FindPropertyHelper can reenter the interpreter and kill |this|. */
9219 if (!localtm
.recorder
)
9220 return ARECORD_ABORTED
;
9223 RETURN_ERROR_A("error in js_FindPropertyHelper");
9224 if (entry
== JS_NO_PROP_CACHE_FILL
)
9225 RETURN_STOP_A("cannot cache name");
9227 JSTraceMonitor
&localtm
= *traceMonitor
;
9228 JSContext
*localcx
= cx
;
9229 int protoIndex
= js_LookupPropertyWithFlags(cx
, aobj
, id
,
9233 /* js_LookupPropertyWithFlags can reenter the interpreter and kill |this|. */
9234 if (!localtm
.recorder
) {
9236 obj2
->dropProperty(localcx
, prop
);
9237 return ARECORD_ABORTED
;
9241 RETURN_ERROR_A("error in js_LookupPropertyWithFlags");
9244 if (!OBJ_IS_NATIVE(obj2
)) {
9245 obj2
->dropProperty(cx
, prop
);
9246 RETURN_STOP_A("property found on non-native object");
9248 entry
= js_FillPropertyCache(cx
, aobj
, 0, protoIndex
, obj2
,
9249 (JSScopeProperty
*) prop
, false);
9251 if (entry
== JS_NO_PROP_CACHE_FILL
)
9258 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
9259 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
9260 // the global it's assigning does not yet exist, create it.
9263 // Use PCVAL_NULL to return "no such property" to our caller.
9265 return ARECORD_CONTINUE
;
9268 obj2
->dropProperty(cx
, prop
);
9270 RETURN_STOP_A("failed to fill property cache");
9273 #ifdef JS_THREADSAFE
9274 // There's a potential race in any JS_THREADSAFE embedding that's nuts
9275 // enough to share mutable objects on the scope or proto chain, but we
9276 // don't care about such insane embeddings. Anyway, the (scope, proto)
9277 // entry->vcap coordinates must reach obj2 from aobj at this point.
9278 JS_ASSERT(cx
->requestDepth
);
9281 return InjectStatus(guardPropertyCacheHit(obj_ins
, map_ins
, aobj
, obj2
, entry
, pcval
));
9284 JS_REQUIRES_STACK RecordingStatus
9285 TraceRecorder::guardPropertyCacheHit(LIns
* obj_ins
,
9289 JSPropCacheEntry
* entry
,
9292 VMSideExit
* exit
= snapshot(BRANCH_EXIT
);
9294 uint32 vshape
= PCVCAP_SHAPE(entry
->vcap
);
9296 // Check for first-level cache hit and guard on kshape if possible.
9297 // Otherwise guard on key object exact match.
9298 if (PCVCAP_TAG(entry
->vcap
) <= 1) {
9299 if (aobj
!= globalObj
)
9300 CHECK_STATUS(guardShape(obj_ins
, aobj
, entry
->kshape
, "guard_kshape", map_ins
, exit
));
9302 if (entry
->adding()) {
9303 if (aobj
== globalObj
)
9304 RETURN_STOP("adding a property to the global object");
9306 LIns
*vshape_ins
= addName(
9307 lir
->insLoad(LIR_ld
,
9308 addName(lir
->insLoad(LIR_ldcp
, cx_ins
, offsetof(JSContext
, runtime
)),
9310 offsetof(JSRuntime
, protoHazardShape
)),
9311 "protoHazardShape");
9313 addName(lir
->ins2i(LIR_eq
, vshape_ins
, vshape
), "guard_protoHazardShape"),
9318 JSOp op
= js_GetOpcode(cx
, cx
->fp
->script
, cx
->fp
->regs
->pc
);
9320 if (op
== JSOP_LENGTH
) {
9321 pcatom
= cx
->runtime
->atomState
.lengthAtom
;
9323 ptrdiff_t pcoff
= (JOF_TYPE(js_CodeSpec
[op
].format
) == JOF_SLOTATOM
) ? SLOTNO_LEN
: 0;
9324 GET_ATOM_FROM_BYTECODE(cx
->fp
->script
, cx
->fp
->regs
->pc
, pcoff
, pcatom
);
9326 JS_ASSERT(entry
->kpc
== (jsbytecode
*) pcatom
);
9327 JS_ASSERT(entry
->kshape
== jsuword(aobj
));
9329 if (aobj
!= globalObj
&& !obj_ins
->isconstp()) {
9331 addName(lir
->ins2(LIR_peq
, obj_ins
, INS_CONSTOBJ(aobj
)), "guard_kobj"),
9336 // For any hit that goes up the scope and/or proto chains, we will need to
9337 // guard on the shape of the object containing the property.
9338 if (PCVCAP_TAG(entry
->vcap
) >= 1) {
9339 JS_ASSERT(OBJ_SHAPE(obj2
) == vshape
);
9342 if (PCVCAP_TAG(entry
->vcap
) == 1) {
9343 // Duplicate the special case in PROPERTY_CACHE_TEST.
9344 obj2_ins
= addName(stobj_get_proto(obj_ins
), "proto");
9345 guard(false, lir
->ins_peq0(obj2_ins
), exit
);
9347 obj2_ins
= INS_CONSTOBJ(obj2
);
9349 CHECK_STATUS(guardShape(obj2_ins
, obj2
, vshape
, "guard_vshape", map(obj2_ins
), exit
));
9352 pcval
= entry
->vword
;
9353 return RECORD_CONTINUE
;
9357 TraceRecorder::stobj_set_fslot(LIns
*obj_ins
, unsigned slot
, LIns
* v_ins
)
9359 lir
->insStorei(v_ins
, obj_ins
, offsetof(JSObject
, fslots
) + slot
* sizeof(jsval
));
9363 TraceRecorder::stobj_set_dslot(LIns
*obj_ins
, unsigned slot
, LIns
*& dslots_ins
, LIns
* v_ins
)
9366 dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
9367 lir
->insStorei(v_ins
, dslots_ins
, slot
* sizeof(jsval
));
9371 TraceRecorder::stobj_set_slot(LIns
* obj_ins
, unsigned slot
, LIns
*& dslots_ins
, LIns
* v_ins
)
9373 if (slot
< JS_INITIAL_NSLOTS
) {
9374 stobj_set_fslot(obj_ins
, slot
, v_ins
);
9376 stobj_set_dslot(obj_ins
, slot
- JS_INITIAL_NSLOTS
, dslots_ins
, v_ins
);
9381 TraceRecorder::stobj_get_fslot(LIns
* obj_ins
, unsigned slot
)
9383 JS_ASSERT(slot
< JS_INITIAL_NSLOTS
);
9384 return lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, fslots
) + slot
* sizeof(jsval
));
9388 TraceRecorder::stobj_get_dslot(LIns
* obj_ins
, unsigned index
, LIns
*& dslots_ins
)
9391 dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
9392 return lir
->insLoad(LIR_ldp
, dslots_ins
, index
* sizeof(jsval
));
9396 TraceRecorder::stobj_get_slot(LIns
* obj_ins
, unsigned slot
, LIns
*& dslots_ins
)
9398 if (slot
< JS_INITIAL_NSLOTS
)
9399 return stobj_get_fslot(obj_ins
, slot
);
9400 return stobj_get_dslot(obj_ins
, slot
- JS_INITIAL_NSLOTS
, dslots_ins
);
9403 JS_REQUIRES_STACK LIns
*
9404 TraceRecorder::box_jsval(jsval v
, LIns
* v_ins
)
9407 LIns
* args
[] = { v_ins
, cx_ins
};
9408 v_ins
= lir
->insCall(&js_BoxDouble_ci
, args
);
9409 guard(false, lir
->ins2(LIR_peq
, v_ins
, INS_CONSTWORD(JSVAL_ERROR_COOKIE
)),
9413 switch (JSVAL_TAG(v
)) {
9415 return lir
->ins2(LIR_pior
, lir
->ins2i(LIR_pilsh
, lir
->ins_u2p(v_ins
), JSVAL_TAGBITS
),
9416 INS_CONSTWORD(JSVAL_SPECIAL
));
9420 JS_ASSERT(JSVAL_TAG(v
) == JSVAL_STRING
);
9421 return lir
->ins2(LIR_pior
, v_ins
, INS_CONSTWORD(JSVAL_STRING
));
9425 JS_REQUIRES_STACK LIns
*
9426 TraceRecorder::unbox_jsval(jsval v
, LIns
* v_ins
, VMSideExit
* exit
)
9429 // JSVAL_IS_NUMBER(v)
9431 lir
->ins_eq0(lir
->ins2(LIR_or
,
9432 p2i(lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_INT
))),
9434 lir
->ins2(LIR_piand
, v_ins
,
9435 INS_CONSTWORD(JSVAL_TAGMASK
)),
9436 INS_CONSTWORD(JSVAL_DOUBLE
)))),
9438 LIns
* args
[] = { v_ins
};
9439 return lir
->insCall(&js_UnboxDouble_ci
, args
);
9441 switch (JSVAL_TAG(v
)) {
9445 lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_TAGMASK
)),
9446 INS_CONSTWORD(JSVAL_SPECIAL
)),
9448 return p2i(lir
->ins2i(LIR_pursh
, v_ins
, JSVAL_TAGBITS
));
9451 if (JSVAL_IS_NULL(v
)) {
9452 // JSVAL_NULL maps to type TT_NULL, so insist that v_ins == 0 here.
9453 guard(true, lir
->ins_peq0(v_ins
), exit
);
9455 guard(false, lir
->ins_peq0(v_ins
), exit
);
9458 lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_TAGMASK
)),
9459 INS_CONSTWORD(JSVAL_OBJECT
)),
9463 * LIR_ldcp is ok to use here even though Array classword can
9464 * change, because no object's classword can ever change from
9465 * &js_ArrayClass to &js_FunctionClass.
9467 guard(HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v
)),
9469 lir
->ins2(LIR_piand
,
9470 lir
->insLoad(LIR_ldcp
, v_ins
, offsetof(JSObject
, classword
)),
9471 INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS
)),
9472 INS_CONSTPTR(&js_FunctionClass
)),
9478 JS_ASSERT(JSVAL_TAG(v
) == JSVAL_STRING
);
9481 lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_TAGMASK
)),
9482 INS_CONSTWORD(JSVAL_STRING
)),
9484 return lir
->ins2(LIR_piand
, v_ins
, addName(lir
->insImmWord(~JSVAL_TAGMASK
),
9489 JS_REQUIRES_STACK RecordingStatus
9490 TraceRecorder::getThis(LIns
*& this_ins
)
9493 * js_ComputeThisForFrame updates cx->fp->argv[-1], so sample it into 'original' first.
9495 jsval original
= JSVAL_NULL
;
9497 original
= cx
->fp
->argv
[-1];
9498 if (!JSVAL_IS_PRIMITIVE(original
) &&
9499 guardClass(JSVAL_TO_OBJECT(original
), get(&cx
->fp
->argv
[-1]), &js_WithClass
, snapshot(MISMATCH_EXIT
))) {
9500 RETURN_STOP("can't trace getThis on With object");
9504 JSObject
* thisObj
= js_ComputeThisForFrame(cx
, cx
->fp
);
9506 RETURN_ERROR("js_ComputeThisForName failed");
9508 /* In global code, bake in the global object as 'this' object. */
9509 if (!cx
->fp
->callee()) {
9510 JS_ASSERT(callDepth
== 0);
9511 this_ins
= INS_CONSTOBJ(thisObj
);
9514 * We don't have argv[-1] in global code, so we don't update the
9517 return RECORD_CONTINUE
;
9520 jsval
& thisv
= cx
->fp
->argv
[-1];
9521 JS_ASSERT(JSVAL_IS_OBJECT(thisv
));
9524 * Traces type-specialize between null and objects, so if we currently see
9525 * a null value in argv[-1], this trace will only match if we see null at
9526 * runtime as well. Bake in the global object as 'this' object, updating
9527 * the tracker as well. We can only detect this condition prior to calling
9528 * js_ComputeThisForFrame, since it updates the interpreter's copy of
9531 JSClass
* clasp
= NULL
;;
9532 if (JSVAL_IS_NULL(original
) ||
9533 (((clasp
= STOBJ_GET_CLASS(JSVAL_TO_OBJECT(original
))) == &js_CallClass
) ||
9534 (clasp
== &js_BlockClass
))) {
9536 guardClass(JSVAL_TO_OBJECT(original
), get(&thisv
), clasp
, snapshot(BRANCH_EXIT
));
9537 JS_ASSERT(!JSVAL_IS_PRIMITIVE(thisv
));
9538 if (thisObj
!= globalObj
)
9539 RETURN_STOP("global object was wrapped while recording");
9540 this_ins
= INS_CONSTOBJ(thisObj
);
9541 set(&thisv
, this_ins
);
9542 return RECORD_CONTINUE
;
9545 this_ins
= get(&thisv
);
9547 JSObject
* wrappedGlobal
= globalObj
->thisObject(cx
);
9549 RETURN_ERROR("globalObj->thisObject hook threw in getThis");
9552 * The only unwrapped object that needs to be wrapped that we can get here
9553 * is the global object obtained throught the scope chain.
9555 this_ins
= lir
->ins_choose(lir
->ins_peq0(stobj_get_parent(this_ins
)),
9556 INS_CONSTOBJ(wrappedGlobal
),
9557 this_ins
, avmplus::AvmCore::use_cmov());
9558 return RECORD_CONTINUE
;
9562 JS_REQUIRES_STACK
bool
9563 TraceRecorder::guardClass(JSObject
* obj
, LIns
* obj_ins
, JSClass
* clasp
, VMSideExit
* exit
)
9565 bool cond
= STOBJ_GET_CLASS(obj
) == clasp
;
9567 LIns
* class_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, classword
));
9568 class_ins
= lir
->ins2(LIR_piand
, class_ins
, INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS
));
9571 JS_snprintf(namebuf
, sizeof namebuf
, "guard(class is %s)", clasp
->name
);
9572 guard(cond
, addName(lir
->ins2(LIR_peq
, class_ins
, INS_CONSTPTR(clasp
)), namebuf
), exit
);
9576 JS_REQUIRES_STACK
bool
9577 TraceRecorder::guardDenseArray(JSObject
* obj
, LIns
* obj_ins
, ExitType exitType
)
9579 return guardClass(obj
, obj_ins
, &js_ArrayClass
, snapshot(exitType
));
9582 JS_REQUIRES_STACK
bool
9583 TraceRecorder::guardDenseArray(JSObject
* obj
, LIns
* obj_ins
, VMSideExit
* exit
)
9585 return guardClass(obj
, obj_ins
, &js_ArrayClass
, exit
);
9588 JS_REQUIRES_STACK
bool
9589 TraceRecorder::guardHasPrototype(JSObject
* obj
, LIns
* obj_ins
,
9590 JSObject
** pobj
, LIns
** pobj_ins
,
9593 *pobj
= obj
->getProto();
9594 *pobj_ins
= stobj_get_proto(obj_ins
);
9596 bool cond
= *pobj
== NULL
;
9597 guard(cond
, addName(lir
->ins_peq0(*pobj_ins
), "guard(proto-not-null)"), exit
);
9601 JS_REQUIRES_STACK RecordingStatus
9602 TraceRecorder::guardPrototypeHasNoIndexedProperties(JSObject
* obj
, LIns
* obj_ins
, ExitType exitType
)
9605 * Guard that no object along the prototype chain has any indexed
9606 * properties which might become visible through holes in the array.
9608 VMSideExit
* exit
= snapshot(exitType
);
9610 if (js_PrototypeHasIndexedProperties(cx
, obj
))
9613 while (guardHasPrototype(obj
, obj_ins
, &obj
, &obj_ins
, exit
))
9614 CHECK_STATUS(guardShape(obj_ins
, obj
, OBJ_SHAPE(obj
), "guard(shape)", map(obj_ins
), exit
));
9615 return RECORD_CONTINUE
;
9619 TraceRecorder::guardNotGlobalObject(JSObject
* obj
, LIns
* obj_ins
)
9621 if (obj
== globalObj
)
9622 RETURN_STOP("reference aliases global object");
9623 guard(false, lir
->ins2(LIR_peq
, obj_ins
, INS_CONSTOBJ(globalObj
)), MISMATCH_EXIT
);
9624 return RECORD_CONTINUE
;
9627 JS_REQUIRES_STACK
void
9628 TraceRecorder::clearFrameSlotsFromCache()
9631 * Clear out all slots of this frame in the nativeFrameTracker. Different
9632 * locations on the VM stack might map to different locations on the native
9633 * stack depending on the number of arguments (i.e.) of the next call, so
9634 * we have to make sure we map those in to the cache with the right
9637 JSStackFrame
* fp
= cx
->fp
;
9642 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
9643 * This doesn't do layout arithmetic, but it must clear out all the slots defined as
9644 * imported by VisitFrameSlots.
9648 vpstop
= &fp
->argv
[argSlots(fp
)];
9650 nativeFrameTracker
.set(vp
++, (LIns
*)0);
9651 nativeFrameTracker
.set(&fp
->argsobj
, (LIns
*)0);
9654 vpstop
= &fp
->slots
[fp
->script
->nslots
];
9656 nativeFrameTracker
.set(vp
++, (LIns
*)0);
9660 * If we have created an |arguments| object for the frame, we must copy the
9661 * argument values into the object as properties in case it is used after
9662 * this frame returns.
9664 JS_REQUIRES_STACK
void
9665 TraceRecorder::putArguments()
9667 if (cx
->fp
->argsobj
&& cx
->fp
->argc
) {
9668 LIns
* argsobj_ins
= get(&cx
->fp
->argsobj
);
9669 LIns
* args_ins
= lir
->insAlloc(sizeof(jsval
) * cx
->fp
->argc
);
9670 for (uintN i
= 0; i
< cx
->fp
->argc
; ++i
) {
9671 LIns
* arg_ins
= box_jsval(cx
->fp
->argv
[i
], get(&cx
->fp
->argv
[i
]));
9672 lir
->insStorei(arg_ins
, args_ins
, i
* sizeof(jsval
));
9674 LIns
* args
[] = { args_ins
, argsobj_ins
, cx_ins
};
9675 lir
->insCall(&js_PutArguments_ci
, args
);
9679 static JS_REQUIRES_STACK
inline bool
9680 IsTraceableRecursion(JSContext
*cx
)
9682 JSStackFrame
*fp
= cx
->fp
;
9683 JSStackFrame
*down
= cx
->fp
->down
;
9686 if (down
->script
!= fp
->script
)
9688 if (down
->argc
!= fp
->argc
)
9690 if (fp
->argc
!= fp
->fun
->nargs
)
9692 if (fp
->imacpc
|| down
->imacpc
)
9694 if ((fp
->flags
& JSFRAME_CONSTRUCTING
) || (down
->flags
& JSFRAME_CONSTRUCTING
))
9696 if (*fp
->script
->code
!= JSOP_TRACE
)
9701 JS_REQUIRES_STACK AbortableRecordingStatus
9702 TraceRecorder::record_EnterFrame(uintN
& inlineCallCount
)
9704 JSStackFrame
* fp
= cx
->fp
;
9706 if (++callDepth
>= MAX_CALLDEPTH
)
9707 RETURN_STOP_A("exceeded maximum call depth");
9709 debug_only_printf(LC_TMTracer
, "EnterFrame %s, callDepth=%d\n",
9710 js_AtomToPrintableString(cx
, cx
->fp
->fun
->atom
),
9713 if (js_LogController
.lcbits
& LC_TMRecorder
) {
9714 js_Disassemble(cx
, cx
->fp
->script
, JS_TRUE
, stdout
);
9715 debug_only_print0(LC_TMTracer
, "----\n");
9718 LIns
* void_ins
= INS_VOID();
9720 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
9721 // This doesn't do layout arithmetic, but it must initialize in the tracker all the
9722 // slots defined as imported by VisitFrameSlots.
9723 jsval
* vp
= &fp
->argv
[fp
->argc
];
9724 jsval
* vpstop
= vp
+ ptrdiff_t(fp
->fun
->nargs
) - ptrdiff_t(fp
->argc
);
9725 while (vp
< vpstop
) {
9726 if (vp
>= fp
->down
->regs
->sp
)
9727 nativeFrameTracker
.set(vp
, (LIns
*)0);
9728 set(vp
++, void_ins
, true);
9732 vpstop
= vp
+ fp
->script
->nfixed
;
9734 set(vp
++, void_ins
, true);
9735 set(&fp
->argsobj
, INS_NULL(), true);
9738 * Check for recursion. This is a special check for recursive cases that can be
9739 * a trace-tree, just like a loop. If recursion acts weird, for example
9740 * differing argc or existence of an imacpc, it's not something this code is
9741 * concerned about. That should pass through below to not regress pre-recursion
9744 if (IsTraceableRecursion(cx
) && treeInfo
->script
== cx
->fp
->script
) {
9745 if (treeInfo
->recursion
== Recursion_Disallowed
)
9746 RETURN_STOP_A("recursion not allowed in this tree");
9747 if (treeInfo
->script
!= cx
->fp
->script
)
9748 RETURN_STOP_A("recursion does not match original tree");
9749 return InjectStatus(downRecursion());
9752 /* Try inlining one level in case this recursion doesn't go too deep. */
9753 if (fp
->script
== fp
->down
->script
&&
9754 fp
->down
->down
&& fp
->down
->down
->script
== fp
->script
) {
9755 RETURN_STOP_A("recursion started inlining");
9758 TreeFragment
* root
= fragment
->root
;
9759 TreeFragment
* first
= LookupLoop(&JS_TRACE_MONITOR(cx
), fp
->regs
->pc
, root
->globalObj
,
9760 root
->globalShape
, fp
->argc
);
9762 return ARECORD_CONTINUE
;
9763 TreeFragment
* f
= findNestedCompatiblePeer(first
);
9766 * If there were no compatible peers, but there were peers at all, then it is probable that
9767 * an inner recursive function is type mismatching. Start a new recorder that must be
9770 for (f
= first
; f
; f
= f
->peer
) {
9771 if (f
->treeInfo
&& f
->treeInfo
->recursion
== Recursion_Detected
) {
9772 /* Since this recorder is about to die, save its values. */
9773 if (++first
->hits() <= HOTLOOP
)
9774 return ARECORD_STOP
;
9775 if (IsBlacklisted((jsbytecode
*)f
->ip
))
9776 RETURN_STOP_A("inner recursive tree is blacklisted");
9777 JSContext
* _cx
= cx
;
9778 SlotList
* globalSlots
= treeInfo
->globalSlots
;
9779 JSTraceMonitor
* tm
= traceMonitor
;
9780 js_AbortRecording(cx
, "trying to compile inner recursive tree");
9781 if (RecordTree(_cx
, tm
, first
, NULL
, 0, first
->globalObj
, first
->globalShape
,
9782 globalSlots
, _cx
->fp
->argc
, Record_EnterFrame
)) {
9783 JS_ASSERT(tm
->recorder
);
9788 return ARECORD_CONTINUE
;
9791 * Make sure the shape of the global object still matches (this might
9792 * flush the JIT cache).
9794 JSObject
* globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
9795 uint32 globalShape
= -1;
9796 SlotList
* globalSlots
= NULL
;
9797 if (!CheckGlobalObjectShape(cx
, traceMonitor
, globalObj
, &globalShape
, &globalSlots
))
9798 return ARECORD_ABORTED
;
9799 return attemptTreeCall(f
, inlineCallCount
);
9802 return ARECORD_CONTINUE
;
9805 JS_REQUIRES_STACK AbortableRecordingStatus
9806 TraceRecorder::record_LeaveFrame()
9810 debug_only_printf(LC_TMTracer
,
9811 "LeaveFrame (back to %s), callDepth=%d\n",
9812 js_AtomToPrintableString(cx
, cx
->fp
->fun
->atom
),
9816 JS_ASSERT(js_CodeSpec
[js_GetOpcode(cx
, cx
->fp
->script
,
9817 cx
->fp
->regs
->pc
)].length
== JSOP_CALL_LENGTH
);
9819 if (callDepth
-- <= 0)
9820 RETURN_STOP_A("returned out of a loop we started tracing");
9822 // LeaveFrame gets called after the interpreter popped the frame and
9823 // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
9824 atoms
= FrameAtomBase(cx
, cx
->fp
);
9825 set(&stackval(-1), rval_ins
, true);
9826 return ARECORD_CONTINUE
;
9829 JS_REQUIRES_STACK AbortableRecordingStatus
9830 TraceRecorder::record_JSOP_PUSH()
9832 stack(0, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
)));
9833 return ARECORD_CONTINUE
;
9836 JS_REQUIRES_STACK AbortableRecordingStatus
9837 TraceRecorder::record_JSOP_POPV()
9839 jsval
& rval
= stackval(-1);
9840 LIns
*rval_ins
= box_jsval(rval
, get(&rval
));
9842 // Store it in cx->fp->rval. NB: Tricky dependencies. cx->fp is the right
9843 // frame because POPV appears only in global and eval code and we don't
9844 // trace JSOP_EVAL or leaving the frame where tracing started.
9845 LIns
*fp_ins
= lir
->insLoad(LIR_ldp
, cx_ins
, offsetof(JSContext
, fp
));
9846 lir
->insStorei(rval_ins
, fp_ins
, offsetof(JSStackFrame
, rval
));
9847 return ARECORD_CONTINUE
;
9850 JS_REQUIRES_STACK AbortableRecordingStatus
9851 TraceRecorder::record_JSOP_ENTERWITH()
9853 return ARECORD_STOP
;
9856 JS_REQUIRES_STACK AbortableRecordingStatus
9857 TraceRecorder::record_JSOP_LEAVEWITH()
9859 return ARECORD_STOP
;
9862 JS_REQUIRES_STACK AbortableRecordingStatus
9863 TraceRecorder::record_JSOP_RETURN()
9865 /* A return from callDepth 0 terminates the current loop, except for recursion. */
9866 if (callDepth
== 0) {
9867 if (IsTraceableRecursion(cx
) && treeInfo
->recursion
!= Recursion_Disallowed
&&
9868 treeInfo
->script
== cx
->fp
->script
) {
9869 return InjectStatus(upRecursion());
9871 AUDIT(returnLoopExits
);
9878 /* If we inlined this function call, make the return value available to the caller code. */
9879 jsval
& rval
= stackval(-1);
9880 JSStackFrame
*fp
= cx
->fp
;
9881 if ((cx
->fp
->flags
& JSFRAME_CONSTRUCTING
) && JSVAL_IS_PRIMITIVE(rval
)) {
9882 JS_ASSERT(fp
->thisv
== fp
->argv
[-1]);
9883 rval_ins
= get(&fp
->argv
[-1]);
9885 rval_ins
= get(&rval
);
9887 debug_only_printf(LC_TMTracer
,
9888 "returning from %s\n",
9889 js_AtomToPrintableString(cx
, cx
->fp
->fun
->atom
));
9890 clearFrameSlotsFromCache();
9892 return ARECORD_CONTINUE
;
9895 JS_REQUIRES_STACK AbortableRecordingStatus
9896 TraceRecorder::record_JSOP_GOTO()
9899 * If we hit a break or a continue to an outer loop, end the loop and
9900 * generate an always-taken loop exit guard. For other downward gotos
9901 * (like if/else) continue recording.
9903 jssrcnote
* sn
= js_GetSrcNote(cx
->fp
->script
, cx
->fp
->regs
->pc
);
9905 if (sn
&& (SN_TYPE(sn
) == SRC_BREAK
|| SN_TYPE(sn
) == SRC_CONT2LABEL
)) {
9906 AUDIT(breakLoopExits
);
9909 return ARECORD_CONTINUE
;
9912 JS_REQUIRES_STACK AbortableRecordingStatus
9913 TraceRecorder::record_JSOP_IFEQ()
9915 trackCfgMerges(cx
->fp
->regs
->pc
);
9919 JS_REQUIRES_STACK AbortableRecordingStatus
9920 TraceRecorder::record_JSOP_IFNE()
9926 TraceRecorder::newArguments()
9928 LIns
* global_ins
= INS_CONSTOBJ(globalObj
);
9929 LIns
* argc_ins
= INS_CONST(cx
->fp
->argc
);
9930 LIns
* callee_ins
= get(&cx
->fp
->argv
[-2]);
9931 LIns
* argv_ins
= cx
->fp
->argc
9932 ? lir
->ins2(LIR_piadd
, lirbuf
->sp
,
9933 lir
->insImmWord(nativespOffset(&cx
->fp
->argv
[0])))
9934 : INS_CONSTPTR((void *) 2);
9935 js_ArgsPrivateNative
*apn
= js_ArgsPrivateNative::create(traceAlloc(), cx
->fp
->argc
);
9936 for (uintN i
= 0; i
< cx
->fp
->argc
; ++i
) {
9937 apn
->typemap()[i
] = determineSlotType(&cx
->fp
->argv
[i
]);
9940 LIns
* args
[] = { INS_CONSTPTR(apn
), argv_ins
, callee_ins
, argc_ins
, global_ins
, cx_ins
};
9941 LIns
* call_ins
= lir
->insCall(&js_Arguments_ci
, args
);
9942 guard(false, lir
->ins_peq0(call_ins
), OOM_EXIT
);
9946 JS_REQUIRES_STACK AbortableRecordingStatus
9947 TraceRecorder::record_JSOP_ARGUMENTS()
9949 if (cx
->fp
->flags
& JSFRAME_OVERRIDE_ARGS
)
9950 RETURN_STOP_A("Can't trace |arguments| if |arguments| is assigned to");
9952 LIns
* a_ins
= get(&cx
->fp
->argsobj
);
9954 if (a_ins
->opcode() == LIR_int
) {
9955 // |arguments| is set to 0 by EnterFrame on this trace, so call to create it.
9956 args_ins
= newArguments();
9958 // Generate LIR to create arguments only if it has not already been created.
9960 LIns
* mem_ins
= lir
->insAlloc(sizeof(jsval
));
9962 LIns
* br1
= lir
->insBranch(LIR_jt
, lir
->ins_peq0(a_ins
), NULL
);
9963 lir
->insStorei(a_ins
, mem_ins
, 0);
9964 LIns
* br2
= lir
->insBranch(LIR_j
, NULL
, NULL
);
9966 LIns
* label1
= lir
->ins0(LIR_label
);
9967 br1
->setTarget(label1
);
9969 LIns
* call_ins
= newArguments();
9970 lir
->insStorei(call_ins
, mem_ins
, 0);
9972 LIns
* label2
= lir
->ins0(LIR_label
);
9973 br2
->setTarget(label2
);
9975 args_ins
= lir
->insLoad(LIR_ldp
, mem_ins
, 0);
9979 set(&cx
->fp
->argsobj
, args_ins
);
9980 return ARECORD_CONTINUE
;
9983 JS_REQUIRES_STACK AbortableRecordingStatus
9984 TraceRecorder::record_JSOP_DUP()
9986 stack(0, get(&stackval(-1)));
9987 return ARECORD_CONTINUE
;
9990 JS_REQUIRES_STACK AbortableRecordingStatus
9991 TraceRecorder::record_JSOP_DUP2()
9993 stack(0, get(&stackval(-2)));
9994 stack(1, get(&stackval(-1)));
9995 return ARECORD_CONTINUE
;
9998 JS_REQUIRES_STACK AbortableRecordingStatus
9999 TraceRecorder::record_JSOP_SWAP()
10001 jsval
& l
= stackval(-2);
10002 jsval
& r
= stackval(-1);
10003 LIns
* l_ins
= get(&l
);
10004 LIns
* r_ins
= get(&r
);
10007 return ARECORD_CONTINUE
;
10010 JS_REQUIRES_STACK AbortableRecordingStatus
10011 TraceRecorder::record_JSOP_PICK()
10013 jsval
* sp
= cx
->fp
->regs
->sp
;
10014 jsint n
= cx
->fp
->regs
->pc
[1];
10015 JS_ASSERT(sp
- (n
+1) >= StackBase(cx
->fp
));
10016 LIns
* top
= get(sp
- (n
+1));
10017 for (jsint i
= 0; i
< n
; ++i
)
10018 set(sp
- (n
+1) + i
, get(sp
- n
+ i
));
10020 return ARECORD_CONTINUE
;
10023 JS_REQUIRES_STACK AbortableRecordingStatus
10024 TraceRecorder::record_JSOP_SETCONST()
10026 return ARECORD_STOP
;
10029 JS_REQUIRES_STACK AbortableRecordingStatus
10030 TraceRecorder::record_JSOP_BITOR()
10032 return InjectStatus(binary(LIR_or
));
10035 JS_REQUIRES_STACK AbortableRecordingStatus
10036 TraceRecorder::record_JSOP_BITXOR()
10038 return InjectStatus(binary(LIR_xor
));
10041 JS_REQUIRES_STACK AbortableRecordingStatus
10042 TraceRecorder::record_JSOP_BITAND()
10044 return InjectStatus(binary(LIR_and
));
10047 JS_REQUIRES_STACK AbortableRecordingStatus
10048 TraceRecorder::record_JSOP_EQ()
10050 return equality(false, true);
10053 JS_REQUIRES_STACK AbortableRecordingStatus
10054 TraceRecorder::record_JSOP_NE()
10056 return equality(true, true);
10059 JS_REQUIRES_STACK AbortableRecordingStatus
10060 TraceRecorder::record_JSOP_LT()
10062 return relational(LIR_flt
, true);
10065 JS_REQUIRES_STACK AbortableRecordingStatus
10066 TraceRecorder::record_JSOP_LE()
10068 return relational(LIR_fle
, true);
10071 JS_REQUIRES_STACK AbortableRecordingStatus
10072 TraceRecorder::record_JSOP_GT()
10074 return relational(LIR_fgt
, true);
10077 JS_REQUIRES_STACK AbortableRecordingStatus
10078 TraceRecorder::record_JSOP_GE()
10080 return relational(LIR_fge
, true);
10083 JS_REQUIRES_STACK AbortableRecordingStatus
10084 TraceRecorder::record_JSOP_LSH()
10086 return InjectStatus(binary(LIR_lsh
));
10089 JS_REQUIRES_STACK AbortableRecordingStatus
10090 TraceRecorder::record_JSOP_RSH()
10092 return InjectStatus(binary(LIR_rsh
));
10095 JS_REQUIRES_STACK AbortableRecordingStatus
10096 TraceRecorder::record_JSOP_URSH()
10098 return InjectStatus(binary(LIR_ush
));
10101 JS_REQUIRES_STACK AbortableRecordingStatus
10102 TraceRecorder::record_JSOP_ADD()
10104 jsval
& r
= stackval(-1);
10105 jsval
& l
= stackval(-2);
10107 if (!JSVAL_IS_PRIMITIVE(l
)) {
10108 RETURN_IF_XML_A(l
);
10109 if (!JSVAL_IS_PRIMITIVE(r
)) {
10110 RETURN_IF_XML_A(r
);
10111 return InjectStatus(call_imacro(add_imacros
.obj_obj
));
10113 return InjectStatus(call_imacro(add_imacros
.obj_any
));
10115 if (!JSVAL_IS_PRIMITIVE(r
)) {
10116 RETURN_IF_XML_A(r
);
10117 return InjectStatus(call_imacro(add_imacros
.any_obj
));
10120 if (JSVAL_IS_STRING(l
) || JSVAL_IS_STRING(r
)) {
10121 LIns
* args
[] = { stringify(r
), stringify(l
), cx_ins
};
10122 LIns
* concat
= lir
->insCall(&js_ConcatStrings_ci
, args
);
10123 guard(false, lir
->ins_peq0(concat
), OOM_EXIT
);
10125 return ARECORD_CONTINUE
;
10128 return InjectStatus(binary(LIR_fadd
));
10131 JS_REQUIRES_STACK AbortableRecordingStatus
10132 TraceRecorder::record_JSOP_SUB()
10134 return InjectStatus(binary(LIR_fsub
));
10137 JS_REQUIRES_STACK AbortableRecordingStatus
10138 TraceRecorder::record_JSOP_MUL()
10140 return InjectStatus(binary(LIR_fmul
));
10143 JS_REQUIRES_STACK AbortableRecordingStatus
10144 TraceRecorder::record_JSOP_DIV()
10146 return InjectStatus(binary(LIR_fdiv
));
10149 JS_REQUIRES_STACK AbortableRecordingStatus
10150 TraceRecorder::record_JSOP_MOD()
10152 return InjectStatus(binary(LIR_fmod
));
10155 JS_REQUIRES_STACK AbortableRecordingStatus
10156 TraceRecorder::record_JSOP_NOT()
10158 jsval
& v
= stackval(-1);
10159 if (JSVAL_IS_SPECIAL(v
)) {
10160 set(&v
, lir
->ins_eq0(lir
->ins2i(LIR_eq
, get(&v
), 1)));
10161 return ARECORD_CONTINUE
;
10164 LIns
* v_ins
= get(&v
);
10165 set(&v
, lir
->ins2(LIR_or
, lir
->ins2(LIR_feq
, v_ins
, lir
->insImmf(0)),
10166 lir
->ins_eq0(lir
->ins2(LIR_feq
, v_ins
, v_ins
))));
10167 return ARECORD_CONTINUE
;
10169 if (JSVAL_TAG(v
) == JSVAL_OBJECT
) {
10170 set(&v
, lir
->ins_peq0(get(&v
)));
10171 return ARECORD_CONTINUE
;
10173 JS_ASSERT(JSVAL_IS_STRING(v
));
10174 set(&v
, lir
->ins_peq0(lir
->insLoad(LIR_ldp
, get(&v
),
10175 offsetof(JSString
, mLength
))));
10176 return ARECORD_CONTINUE
;
10179 JS_REQUIRES_STACK AbortableRecordingStatus
10180 TraceRecorder::record_JSOP_BITNOT()
10182 return InjectStatus(unary(LIR_not
));
10185 JS_REQUIRES_STACK AbortableRecordingStatus
10186 TraceRecorder::record_JSOP_NEG()
10188 jsval
& v
= stackval(-1);
10190 if (!JSVAL_IS_PRIMITIVE(v
)) {
10191 RETURN_IF_XML_A(v
);
10192 return InjectStatus(call_imacro(unary_imacros
.sign
));
10199 * If we're a promoted integer, we have to watch out for 0s since -0 is
10200 * a double. Only follow this path if we're not an integer that's 0 and
10201 * we're not a double that's zero.
10203 if (!oracle
.isInstructionUndemotable(cx
->fp
->regs
->pc
) &&
10205 (!JSVAL_IS_INT(v
) || JSVAL_TO_INT(v
) != 0) &&
10206 (!JSVAL_IS_DOUBLE(v
) || !JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v
))) &&
10207 -asNumber(v
) == (int)-asNumber(v
)) {
10208 a
= lir
->ins1(LIR_neg
, ::demote(lir
, a
));
10209 if (!a
->isconst()) {
10210 VMSideExit
* exit
= snapshot(OVERFLOW_EXIT
);
10211 guard(false, lir
->ins1(LIR_ov
, a
), exit
);
10212 guard(false, lir
->ins2i(LIR_eq
, a
, 0), exit
);
10214 a
= lir
->ins1(LIR_i2f
, a
);
10216 a
= lir
->ins1(LIR_fneg
, a
);
10220 return ARECORD_CONTINUE
;
10223 if (JSVAL_IS_NULL(v
)) {
10224 set(&v
, lir
->insImmf(-0.0));
10225 return ARECORD_CONTINUE
;
10228 JS_ASSERT(JSVAL_TAG(v
) == JSVAL_STRING
|| JSVAL_IS_SPECIAL(v
));
10230 LIns
* args
[] = { get(&v
), cx_ins
};
10231 set(&v
, lir
->ins1(LIR_fneg
,
10232 lir
->insCall(JSVAL_IS_STRING(v
)
10233 ? &js_StringToNumber_ci
10234 : &js_BooleanOrUndefinedToNumber_ci
,
10236 return ARECORD_CONTINUE
;
10239 JS_REQUIRES_STACK AbortableRecordingStatus
10240 TraceRecorder::record_JSOP_POS()
10242 jsval
& v
= stackval(-1);
10244 if (!JSVAL_IS_PRIMITIVE(v
)) {
10245 RETURN_IF_XML_A(v
);
10246 return InjectStatus(call_imacro(unary_imacros
.sign
));
10250 return ARECORD_CONTINUE
;
10252 if (JSVAL_IS_NULL(v
)) {
10253 set(&v
, lir
->insImmf(0));
10254 return ARECORD_CONTINUE
;
10257 JS_ASSERT(JSVAL_TAG(v
) == JSVAL_STRING
|| JSVAL_IS_SPECIAL(v
));
10259 LIns
* args
[] = { get(&v
), cx_ins
};
10260 set(&v
, lir
->insCall(JSVAL_IS_STRING(v
)
10261 ? &js_StringToNumber_ci
10262 : &js_BooleanOrUndefinedToNumber_ci
,
10264 return ARECORD_CONTINUE
;
10267 JS_REQUIRES_STACK AbortableRecordingStatus
10268 TraceRecorder::record_JSOP_PRIMTOP()
10270 // Either this opcode does nothing or we couldn't have traced here, because
10271 // we'd have thrown an exception -- so do nothing if we actually hit this.
10272 return ARECORD_CONTINUE
;
10275 JS_REQUIRES_STACK AbortableRecordingStatus
10276 TraceRecorder::record_JSOP_OBJTOP()
10278 jsval
& v
= stackval(-1);
10279 RETURN_IF_XML_A(v
);
10280 return ARECORD_CONTINUE
;
10284 TraceRecorder::getClassPrototype(JSObject
* ctor
, LIns
*& proto_ins
)
10286 // ctor must be a function created via js_InitClass.
10288 JSClass
*clasp
= FUN_CLASP(GET_FUNCTION_PRIVATE(cx
, ctor
));
10291 JSTraceMonitor
&localtm
= JS_TRACE_MONITOR(cx
);
10295 if (!ctor
->getProperty(cx
, ATOM_TO_JSID(cx
->runtime
->atomState
.classPrototypeAtom
), &pval
))
10296 RETURN_ERROR("error getting prototype from constructor");
10298 // ctor.prototype is a permanent data property, so this lookup cannot have
10300 JS_ASSERT(localtm
.recorder
);
10305 ok
= JS_GetPropertyAttributes(cx
, ctor
, js_class_prototype_str
, &attrs
, &found
);
10308 JS_ASSERT((~attrs
& (JSPROP_READONLY
| JSPROP_PERMANENT
)) == 0);
10311 // Since ctor was built by js_InitClass, we can assert (rather than check)
10312 // that pval is usable.
10313 JS_ASSERT(!JSVAL_IS_PRIMITIVE(pval
));
10314 JSObject
*proto
= JSVAL_TO_OBJECT(pval
);
10315 JS_ASSERT_IF(clasp
!= &js_ArrayClass
, OBJ_SCOPE(proto
)->emptyScope
->clasp
== clasp
);
10317 proto_ins
= INS_CONSTOBJ(proto
);
10318 return RECORD_CONTINUE
;
10322 TraceRecorder::getClassPrototype(JSProtoKey key
, LIns
*& proto_ins
)
10325 JSTraceMonitor
&localtm
= JS_TRACE_MONITOR(cx
);
10329 if (!js_GetClassPrototype(cx
, globalObj
, INT_TO_JSID(key
), &proto
))
10330 RETURN_ERROR("error in js_GetClassPrototype");
10332 // This should not have reentered.
10333 JS_ASSERT(localtm
.recorder
);
10335 // If we might end up passing the proto to JSObject::initSharingEmptyScope,
10336 // we must check here that proto has a matching emptyScope. We skip the
10337 // check for Array.prototype because new arrays, being non-native, are
10338 // never initialized using initSharingEmptyScope.
10339 if (key
!= JSProto_Array
) {
10340 if (!OBJ_IS_NATIVE(proto
))
10341 RETURN_STOP("non-native class prototype");
10342 JSEmptyScope
*emptyScope
= OBJ_SCOPE(proto
)->emptyScope
;
10343 if (!emptyScope
|| JSCLASS_CACHED_PROTO_KEY(emptyScope
->clasp
) != key
)
10344 RETURN_STOP("class prototype is not the standard one");
10347 proto_ins
= INS_CONSTOBJ(proto
);
10348 return RECORD_CONTINUE
;
10351 #define IGNORE_NATIVE_CALL_COMPLETE_CALLBACK ((JSSpecializedNative*)1)
10354 TraceRecorder::newString(JSObject
* ctor
, uint32 argc
, jsval
* argv
, jsval
* rval
)
10356 JS_ASSERT(argc
== 1);
10358 if (!JSVAL_IS_PRIMITIVE(argv
[0])) {
10359 RETURN_IF_XML(argv
[0]);
10360 return call_imacro(new_imacros
.String
);
10364 CHECK_STATUS(getClassPrototype(ctor
, proto_ins
));
10366 LIns
* args
[] = { stringify(argv
[0]), proto_ins
, cx_ins
};
10367 LIns
* obj_ins
= lir
->insCall(&js_String_tn_ci
, args
);
10368 guard(false, lir
->ins_peq0(obj_ins
), OOM_EXIT
);
10370 set(rval
, obj_ins
);
10371 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10372 return RECORD_CONTINUE
;
10376 TraceRecorder::newArray(JSObject
* ctor
, uint32 argc
, jsval
* argv
, jsval
* rval
)
10379 CHECK_STATUS(getClassPrototype(ctor
, proto_ins
));
10383 // arr_ins = js_NewEmptyArray(cx, Array.prototype)
10384 LIns
*args
[] = { proto_ins
, cx_ins
};
10385 arr_ins
= lir
->insCall(&js_NewEmptyArray_ci
, args
);
10386 guard(false, lir
->ins_peq0(arr_ins
), OOM_EXIT
);
10387 } else if (argc
== 1 && JSVAL_IS_NUMBER(argv
[0])) {
10388 // arr_ins = js_NewEmptyArray(cx, Array.prototype, length)
10389 LIns
*args
[] = { f2i(get(argv
)), proto_ins
, cx_ins
}; // FIXME: is this 64-bit safe?
10390 arr_ins
= lir
->insCall(&js_NewEmptyArrayWithLength_ci
, args
);
10391 guard(false, lir
->ins_peq0(arr_ins
), OOM_EXIT
);
10393 // arr_ins = js_NewArrayWithSlots(cx, Array.prototype, argc)
10394 LIns
*args
[] = { INS_CONST(argc
), proto_ins
, cx_ins
};
10395 arr_ins
= lir
->insCall(&js_NewArrayWithSlots_ci
, args
);
10396 guard(false, lir
->ins_peq0(arr_ins
), OOM_EXIT
);
10398 // arr->dslots[i] = box_jsval(vp[i]); for i in 0..argc
10399 LIns
*dslots_ins
= NULL
;
10400 for (uint32 i
= 0; i
< argc
&& !outOfMemory(); i
++) {
10401 LIns
*elt_ins
= box_jsval(argv
[i
], get(&argv
[i
]));
10402 stobj_set_dslot(arr_ins
, i
, dslots_ins
, elt_ins
);
10406 stobj_set_fslot(arr_ins
, JSSLOT_ARRAY_COUNT
, INS_CONST(argc
));
10409 set(rval
, arr_ins
);
10410 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10411 return RECORD_CONTINUE
;
10414 JS_REQUIRES_STACK
void
10415 TraceRecorder::propagateFailureToBuiltinStatus(LIns
* ok_ins
, LIns
*& status_ins
)
10418 * Check the boolean return value (ok_ins) of a native JSNative,
10419 * JSFastNative, or JSPropertyOp hook for failure. On failure, set the
10420 * JSBUILTIN_ERROR bit of cx->builtinStatus.
10422 * If the return value (ok_ins) is true, status' == status. Otherwise
10423 * status' = status | JSBUILTIN_ERROR. We calculate (rval&1)^1, which is 1
10424 * if rval is JS_FALSE (error), and then shift that by 1, which is the log2
10425 * of JSBUILTIN_ERROR.
10427 JS_STATIC_ASSERT(((JS_TRUE
& 1) ^ 1) << 1 == 0);
10428 JS_STATIC_ASSERT(((JS_FALSE
& 1) ^ 1) << 1 == JSBUILTIN_ERROR
);
10429 status_ins
= lir
->ins2(LIR_or
,
10431 lir
->ins2i(LIR_lsh
,
10432 lir
->ins2i(LIR_xor
,
10433 lir
->ins2i(LIR_and
, ok_ins
, 1),
10436 lir
->insStorei(status_ins
, lirbuf
->state
, (int) offsetof(InterpState
, builtinStatus
));
10439 JS_REQUIRES_STACK
void
10440 TraceRecorder::emitNativePropertyOp(JSScope
* scope
, JSScopeProperty
* sprop
, LIns
* obj_ins
,
10441 bool setflag
, LIns
* boxed_ins
)
10443 JS_ASSERT(!(sprop
->attrs
& (setflag
? JSPROP_SETTER
: JSPROP_GETTER
)));
10444 JS_ASSERT(setflag
? !SPROP_HAS_STUB_SETTER(sprop
) : !SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop
));
10446 enterDeepBailCall();
10448 // It is unsafe to pass the address of an object slot as the out parameter,
10449 // because the getter or setter could end up resizing the object's dslots.
10450 // Instead, use a word of stack and root it in nativeVp.
10451 LIns
* vp_ins
= lir
->insAlloc(sizeof(jsval
));
10452 lir
->insStorei(vp_ins
, lirbuf
->state
, offsetof(InterpState
, nativeVp
));
10453 lir
->insStorei(INS_CONST(1), lirbuf
->state
, offsetof(InterpState
, nativeVpLen
));
10455 lir
->insStorei(boxed_ins
, vp_ins
, 0);
10457 CallInfo
* ci
= new (traceAlloc()) CallInfo();
10458 ci
->_address
= uintptr_t(setflag
? sprop
->setter
: sprop
->getter
);
10459 ci
->_argtypes
= ARGSIZE_I
<< (0*ARGSIZE_SHIFT
) |
10460 ARGSIZE_P
<< (1*ARGSIZE_SHIFT
) |
10461 ARGSIZE_P
<< (2*ARGSIZE_SHIFT
) |
10462 ARGSIZE_P
<< (3*ARGSIZE_SHIFT
) |
10463 ARGSIZE_P
<< (4*ARGSIZE_SHIFT
);
10464 ci
->_cse
= ci
->_fold
= 0;
10465 ci
->_abi
= ABI_CDECL
;
10467 ci
->_name
= "JSPropertyOp";
10469 LIns
* args
[] = { vp_ins
, INS_CONSTVAL(SPROP_USERID(sprop
)), obj_ins
, cx_ins
};
10470 LIns
* ok_ins
= lir
->insCall(ci
, args
);
10472 // Cleanup. Immediately clear nativeVp before we might deep bail.
10473 lir
->insStorei(INS_NULL(), lirbuf
->state
, offsetof(InterpState
, nativeVp
));
10474 leaveDeepBailCall();
10476 // Guard that the call succeeded and builtinStatus is still 0.
10477 // If the native op succeeds but we deep-bail here, the result value is
10478 // lost! Therefore this can only be used for setters of shared properties.
10479 // In that case we ignore the result value anyway.
10480 LIns
* status_ins
= lir
->insLoad(LIR_ld
,
10482 (int) offsetof(InterpState
, builtinStatus
));
10483 propagateFailureToBuiltinStatus(ok_ins
, status_ins
);
10484 guard(true, lir
->ins_eq0(status_ins
), STATUS_EXIT
);
10486 // Re-load the value--but this is currently unused, so commented out.
10487 //boxed_ins = lir->insLoad(LIR_ldp, vp_ins, 0);
10490 JS_REQUIRES_STACK RecordingStatus
10491 TraceRecorder::emitNativeCall(JSSpecializedNative
* sn
, uintN argc
, LIns
* args
[], bool rooted
)
10493 bool constructing
= !!(sn
->flags
& JSTN_CONSTRUCTOR
);
10495 if (JSTN_ERRTYPE(sn
) == FAIL_STATUS
) {
10496 // This needs to capture the pre-call state of the stack. So do not set
10497 // pendingSpecializedNative before taking this snapshot.
10498 JS_ASSERT(!pendingSpecializedNative
);
10500 // Take snapshot for js_DeepBail and store it in cx->bailExit.
10501 // If we are calling a slow native, add information to the side exit
10502 // for SynthesizeSlowNativeFrame.
10503 VMSideExit
* exit
= enterDeepBailCall();
10504 JSObject
* funobj
= JSVAL_TO_OBJECT(stackval(0 - (2 + argc
)));
10505 if (FUN_SLOW_NATIVE(GET_FUNCTION_PRIVATE(cx
, funobj
))) {
10506 exit
->setNativeCallee(funobj
, constructing
);
10507 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(funobj
));
10511 LIns
* res_ins
= lir
->insCall(sn
->builtin
, args
);
10513 // Immediately unroot the vp as soon we return since we might deep bail next.
10515 lir
->insStorei(INS_NULL(), lirbuf
->state
, offsetof(InterpState
, nativeVp
));
10517 rval_ins
= res_ins
;
10518 switch (JSTN_ERRTYPE(sn
)) {
10520 guard(false, lir
->ins_peq0(res_ins
), OOM_EXIT
);
10523 res_ins
= lir
->ins1(LIR_i2f
, res_ins
);
10524 guard(false, lir
->ins2(LIR_flt
, res_ins
, lir
->insImmf(0)), OOM_EXIT
);
10527 guard(false, lir
->ins2i(LIR_eq
, res_ins
, JSVAL_TO_SPECIAL(JSVAL_VOID
)), OOM_EXIT
);
10530 guard(false, lir
->ins2(LIR_peq
, res_ins
, INS_CONSTWORD(JSVAL_ERROR_COOKIE
)), OOM_EXIT
);
10535 set(&stackval(0 - (2 + argc
)), res_ins
);
10538 * The return value will be processed by NativeCallComplete since
10539 * we have to know the actual return value type for calls that return
10540 * jsval (like Array_p_pop).
10542 pendingSpecializedNative
= sn
;
10544 return RECORD_CONTINUE
;
10548 * Check whether we have a specialized implementation for this native
10551 JS_REQUIRES_STACK RecordingStatus
10552 TraceRecorder::callSpecializedNative(JSNativeTraceInfo
*trcinfo
, uintN argc
,
10555 JSStackFrame
* fp
= cx
->fp
;
10556 jsbytecode
*pc
= fp
->regs
->pc
;
10558 jsval
& fval
= stackval(0 - (2 + argc
));
10559 jsval
& tval
= stackval(0 - (1 + argc
));
10561 LIns
* this_ins
= get(&tval
);
10563 LIns
* args
[nanojit::MAXARGS
];
10564 JSSpecializedNative
*sn
= trcinfo
->specializations
;
10567 if (((sn
->flags
& JSTN_CONSTRUCTOR
) != 0) != constructing
)
10570 uintN knownargc
= strlen(sn
->argtypes
);
10571 if (argc
!= knownargc
)
10574 intN prefixc
= strlen(sn
->prefix
);
10575 JS_ASSERT(prefixc
<= 3);
10576 LIns
** argp
= &args
[argc
+ prefixc
- 1];
10580 memset(args
, 0xCD, sizeof(args
));
10584 for (i
= prefixc
; i
--; ) {
10585 argtype
= sn
->prefix
[i
];
10586 if (argtype
== 'C') {
10588 } else if (argtype
== 'T') { /* this, as an object */
10589 if (JSVAL_IS_PRIMITIVE(tval
))
10590 goto next_specialization
;
10592 } else if (argtype
== 'S') { /* this, as a string */
10593 if (!JSVAL_IS_STRING(tval
))
10594 goto next_specialization
;
10596 } else if (argtype
== 'f') {
10597 *argp
= INS_CONSTOBJ(JSVAL_TO_OBJECT(fval
));
10598 } else if (argtype
== 'p') {
10599 CHECK_STATUS(getClassPrototype(JSVAL_TO_OBJECT(fval
), *argp
));
10600 } else if (argtype
== 'R') {
10601 *argp
= INS_CONSTPTR(cx
->runtime
);
10602 } else if (argtype
== 'P') {
10603 // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
10604 // JSOP_GETELEM imacro (bug 476559).
10605 if ((*pc
== JSOP_CALL
) &&
10606 fp
->imacpc
&& *fp
->imacpc
== JSOP_GETELEM
)
10607 *argp
= INS_CONSTPTR(fp
->imacpc
);
10609 *argp
= INS_CONSTPTR(pc
);
10610 } else if (argtype
== 'D') { /* this, as a number */
10611 if (!isNumber(tval
))
10612 goto next_specialization
;
10615 JS_NOT_REACHED("unknown prefix arg type");
10620 for (i
= knownargc
; i
--; ) {
10621 jsval
& arg
= stackval(0 - (i
+ 1));
10624 argtype
= sn
->argtypes
[i
];
10625 if (argtype
== 'd' || argtype
== 'i') {
10626 if (!isNumber(arg
))
10627 goto next_specialization
;
10628 if (argtype
== 'i')
10629 *argp
= f2i(*argp
);
10630 } else if (argtype
== 'o') {
10631 if (JSVAL_IS_PRIMITIVE(arg
))
10632 goto next_specialization
;
10633 } else if (argtype
== 's') {
10634 if (!JSVAL_IS_STRING(arg
))
10635 goto next_specialization
;
10636 } else if (argtype
== 'r') {
10637 if (!VALUE_IS_REGEXP(cx
, arg
))
10638 goto next_specialization
;
10639 } else if (argtype
== 'f') {
10640 if (!VALUE_IS_FUNCTION(cx
, arg
))
10641 goto next_specialization
;
10642 } else if (argtype
== 'v') {
10643 *argp
= box_jsval(arg
, *argp
);
10645 goto next_specialization
;
10650 JS_ASSERT(args
[0] != (LIns
*)0xcdcdcdcd);
10652 return emitNativeCall(sn
, argc
, args
, false);
10654 next_specialization
:;
10655 } while ((sn
++)->flags
& JSTN_MORE
);
10657 return RECORD_STOP
;
10660 JS_REQUIRES_STACK RecordingStatus
10661 TraceRecorder::callNative(uintN argc
, JSOp mode
)
10665 JS_ASSERT(mode
== JSOP_CALL
|| mode
== JSOP_NEW
|| mode
== JSOP_APPLY
);
10667 jsval
* vp
= &stackval(0 - (2 + argc
));
10668 JSObject
* funobj
= JSVAL_TO_OBJECT(vp
[0]);
10669 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, funobj
);
10670 JSFastNative native
= (JSFastNative
)fun
->u
.n
.native
;
10674 if (isNumber(vp
[2]) &&
10675 (native
== js_math_ceil
|| native
== js_math_floor
|| native
== js_math_round
)) {
10676 LIns
* a
= get(&vp
[2]);
10677 if (isPromote(a
)) {
10679 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10680 return RECORD_CONTINUE
;
10686 if (isNumber(vp
[2]) && isNumber(vp
[3]) &&
10687 (native
== js_math_min
|| native
== js_math_max
)) {
10688 LIns
* a
= get(&vp
[2]);
10689 LIns
* b
= get(&vp
[3]);
10690 if (isPromote(a
) && isPromote(b
)) {
10691 a
= ::demote(lir
, a
);
10692 b
= ::demote(lir
, b
);
10695 lir
->ins_choose(lir
->ins2((native
== js_math_min
)
10698 a
, b
, avmplus::AvmCore::use_cmov())));
10699 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10700 return RECORD_CONTINUE
;
10706 if (fun
->flags
& JSFUN_TRCINFO
) {
10707 JSNativeTraceInfo
*trcinfo
= FUN_TRCINFO(fun
);
10708 JS_ASSERT(trcinfo
&& (JSFastNative
)fun
->u
.n
.native
== trcinfo
->native
);
10710 /* Try to call a type specialized version of the native. */
10711 if (trcinfo
->specializations
) {
10712 RecordingStatus status
= callSpecializedNative(trcinfo
, argc
, mode
== JSOP_NEW
);
10713 if (status
!= RECORD_STOP
)
10718 if (native
== js_fun_apply
|| native
== js_fun_call
)
10719 RETURN_STOP("trying to call native apply or call");
10721 // Allocate the vp vector and emit code to root it.
10722 uintN vplen
= 2 + JS_MAX(argc
, unsigned(FUN_MINARGS(fun
))) + fun
->u
.n
.extra
;
10723 if (!(fun
->flags
& JSFUN_FAST_NATIVE
))
10724 vplen
++; // slow native return value slot
10725 LIns
* invokevp_ins
= lir
->insAlloc(vplen
* sizeof(jsval
));
10727 // vp[0] is the callee.
10728 lir
->insStorei(INS_CONSTVAL(OBJECT_TO_JSVAL(funobj
)), invokevp_ins
, 0);
10730 // Calculate |this|.
10732 if (mode
== JSOP_NEW
) {
10733 JSClass
* clasp
= fun
->u
.n
.clasp
;
10734 JS_ASSERT(clasp
!= &js_SlowArrayClass
);
10736 clasp
= &js_ObjectClass
;
10737 JS_ASSERT(((jsuword
) clasp
& 3) == 0);
10739 // Abort on |new Function|. js_NewInstance would allocate a regular-
10740 // sized JSObject, not a Function-sized one. (The Function ctor would
10741 // deep-bail anyway but let's not go there.)
10742 if (clasp
== &js_FunctionClass
)
10743 RETURN_STOP("new Function");
10745 if (clasp
->getObjectOps
)
10746 RETURN_STOP("new with non-native ops");
10748 args
[0] = INS_CONSTOBJ(funobj
);
10749 args
[1] = INS_CONSTPTR(clasp
);
10751 newobj_ins
= lir
->insCall(&js_NewInstance_ci
, args
);
10752 guard(false, lir
->ins_peq0(newobj_ins
), OOM_EXIT
);
10753 this_ins
= newobj_ins
; /* boxing an object is a no-op */
10754 } else if (JSFUN_BOUND_METHOD_TEST(fun
->flags
)) {
10755 /* |funobj| was rooted above already. */
10756 this_ins
= INS_CONSTWORD(OBJECT_TO_JSVAL(OBJ_GET_PARENT(cx
, funobj
)));
10758 this_ins
= get(&vp
[1]);
10761 * For fast natives, 'null' or primitives are fine as as 'this' value.
10762 * For slow natives we have to ensure the object is substituted for the
10763 * appropriate global object or boxed object value. JSOP_NEW allocates its
10764 * own object so it's guaranteed to have a valid 'this' value.
10766 if (!(fun
->flags
& JSFUN_FAST_NATIVE
)) {
10767 if (JSVAL_IS_NULL(vp
[1])) {
10768 JSObject
* thisObj
= js_ComputeThis(cx
, JS_FALSE
, vp
+ 2);
10770 RETURN_ERROR("error in js_ComputeGlobalThis");
10771 this_ins
= INS_CONSTOBJ(thisObj
);
10772 } else if (!JSVAL_IS_OBJECT(vp
[1])) {
10773 RETURN_STOP("slow native(primitive, args)");
10775 if (guardClass(JSVAL_TO_OBJECT(vp
[1]), this_ins
, &js_WithClass
, snapshot(MISMATCH_EXIT
)))
10776 RETURN_STOP("can't trace slow native invocation on With object");
10778 this_ins
= lir
->ins_choose(lir
->ins_peq0(stobj_get_parent(this_ins
)),
10779 INS_CONSTOBJ(globalObj
),
10780 this_ins
, avmplus::AvmCore::use_cmov());
10783 this_ins
= box_jsval(vp
[1], this_ins
);
10785 lir
->insStorei(this_ins
, invokevp_ins
, 1 * sizeof(jsval
));
10788 for (uintN n
= 2; n
< 2 + argc
; n
++) {
10789 LIns
* i
= box_jsval(vp
[n
], get(&vp
[n
]));
10790 lir
->insStorei(i
, invokevp_ins
, n
* sizeof(jsval
));
10792 // For a very long argument list we might run out of LIR space, so
10793 // check inside the loop.
10795 RETURN_STOP("out of memory in argument list");
10798 // Populate extra slots, including the return value slot for a slow native.
10799 if (2 + argc
< vplen
) {
10800 LIns
* undef_ins
= INS_CONSTWORD(JSVAL_VOID
);
10801 for (uintN n
= 2 + argc
; n
< vplen
; n
++) {
10802 lir
->insStorei(undef_ins
, invokevp_ins
, n
* sizeof(jsval
));
10805 RETURN_STOP("out of memory in extra slots");
10809 // Set up arguments for the JSNative or JSFastNative.
10811 if (fun
->flags
& JSFUN_FAST_NATIVE
) {
10812 if (mode
== JSOP_NEW
)
10813 RETURN_STOP("untraceable fast native constructor");
10814 native_rval_ins
= invokevp_ins
;
10815 args
[0] = invokevp_ins
;
10816 args
[1] = lir
->insImm(argc
);
10818 types
= ARGSIZE_I
<< (0*ARGSIZE_SHIFT
) |
10819 ARGSIZE_P
<< (1*ARGSIZE_SHIFT
) |
10820 ARGSIZE_I
<< (2*ARGSIZE_SHIFT
) |
10821 ARGSIZE_P
<< (3*ARGSIZE_SHIFT
);
10823 int32_t offset
= (vplen
- 1) * sizeof(jsval
);
10824 native_rval_ins
= lir
->ins2(LIR_piadd
, invokevp_ins
, INS_CONSTWORD(offset
));
10825 args
[0] = native_rval_ins
;
10826 args
[1] = lir
->ins2(LIR_piadd
, invokevp_ins
, INS_CONSTWORD(2 * sizeof(jsval
)));
10827 args
[2] = lir
->insImm(argc
);
10828 args
[3] = this_ins
;
10830 types
= ARGSIZE_I
<< (0*ARGSIZE_SHIFT
) |
10831 ARGSIZE_P
<< (1*ARGSIZE_SHIFT
) |
10832 ARGSIZE_P
<< (2*ARGSIZE_SHIFT
) |
10833 ARGSIZE_I
<< (3*ARGSIZE_SHIFT
) |
10834 ARGSIZE_P
<< (4*ARGSIZE_SHIFT
) |
10835 ARGSIZE_P
<< (5*ARGSIZE_SHIFT
);
10838 // Generate CallInfo and a JSSpecializedNative structure on the fly.
10839 // Do not use JSTN_UNBOX_AFTER for mode JSOP_NEW because
10840 // record_NativeCallComplete unboxes the result specially.
10842 CallInfo
* ci
= new (traceAlloc()) CallInfo();
10843 ci
->_address
= uintptr_t(fun
->u
.n
.native
);
10844 ci
->_cse
= ci
->_fold
= 0;
10845 ci
->_abi
= ABI_CDECL
;
10846 ci
->_argtypes
= types
;
10848 ci
->_name
= JS_GetFunctionName(fun
);
10851 // Generate a JSSpecializedNative structure on the fly.
10852 generatedSpecializedNative
.builtin
= ci
;
10853 generatedSpecializedNative
.flags
= FAIL_STATUS
| ((mode
== JSOP_NEW
)
10855 : JSTN_UNBOX_AFTER
);
10856 generatedSpecializedNative
.prefix
= NULL
;
10857 generatedSpecializedNative
.argtypes
= NULL
;
10859 // We only have to ensure that the values we wrote into the stack buffer
10860 // are rooted if we actually make it to the call, so only set nativeVp and
10861 // nativeVpLen immediately before emitting the call code. This way we avoid
10862 // leaving trace with a bogus nativeVp because we fall off trace while unboxing
10863 // values into the stack buffer.
10864 lir
->insStorei(INS_CONST(vplen
), lirbuf
->state
, offsetof(InterpState
, nativeVpLen
));
10865 lir
->insStorei(invokevp_ins
, lirbuf
->state
, offsetof(InterpState
, nativeVp
));
10867 // argc is the original argc here. It is used to calculate where to place
10868 // the return value.
10869 return emitNativeCall(&generatedSpecializedNative
, argc
, args
, true);
10872 JS_REQUIRES_STACK RecordingStatus
10873 TraceRecorder::functionCall(uintN argc
, JSOp mode
)
10875 jsval
& fval
= stackval(0 - (2 + argc
));
10876 JS_ASSERT(&fval
>= StackBase(cx
->fp
));
10878 if (!VALUE_IS_FUNCTION(cx
, fval
))
10879 RETURN_STOP("callee is not a function");
10881 jsval
& tval
= stackval(0 - (1 + argc
));
10884 * If callee is not constant, it's a shapeless call and we have to guard
10885 * explicitly that we will get this callee again at runtime.
10887 if (!get(&fval
)->isconstp())
10888 CHECK_STATUS(guardCallee(fval
));
10891 * Require that the callee be a function object, to avoid guarding on its
10892 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
10893 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
10894 * the property cache and guard on the object (this) in which the callee
10895 * was found. So it's sufficient to test here that the particular function
10896 * is interpreted, not guard on that condition.
10898 * Bytecode sequences that push shapeless callees must guard on the callee
10899 * class being Function and the function being interpreted.
10901 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, JSVAL_TO_OBJECT(fval
));
10903 if (FUN_INTERPRETED(fun
)) {
10904 if (mode
== JSOP_NEW
) {
10905 LIns
* args
[] = { get(&fval
), INS_CONSTPTR(&js_ObjectClass
), cx_ins
};
10906 LIns
* tv_ins
= lir
->insCall(&js_NewInstance_ci
, args
);
10907 guard(false, lir
->ins_peq0(tv_ins
), OOM_EXIT
);
10908 set(&tval
, tv_ins
);
10910 return interpretedFunctionCall(fval
, fun
, argc
, mode
== JSOP_NEW
);
10913 if (FUN_SLOW_NATIVE(fun
)) {
10914 JSNative native
= fun
->u
.n
.native
;
10915 jsval
* argv
= &tval
+ 1;
10916 if (native
== js_Array
)
10917 return newArray(JSVAL_TO_OBJECT(fval
), argc
, argv
, &fval
);
10918 if (native
== js_String
&& argc
== 1) {
10919 if (mode
== JSOP_NEW
)
10920 return newString(JSVAL_TO_OBJECT(fval
), 1, argv
, &fval
);
10921 if (!JSVAL_IS_PRIMITIVE(argv
[0])) {
10922 RETURN_IF_XML(argv
[0]);
10923 return call_imacro(call_imacros
.String
);
10925 set(&fval
, stringify(argv
[0]));
10926 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10927 return RECORD_CONTINUE
;
10931 return callNative(argc
, mode
);
10934 JS_REQUIRES_STACK AbortableRecordingStatus
10935 TraceRecorder::record_JSOP_NEW()
10937 uintN argc
= GET_ARGC(cx
->fp
->regs
->pc
);
10938 cx
->fp
->assertValidStackDepth(argc
+ 2);
10939 return InjectStatus(functionCall(argc
, JSOP_NEW
));
10942 JS_REQUIRES_STACK AbortableRecordingStatus
10943 TraceRecorder::record_JSOP_DELNAME()
10945 return ARECORD_STOP
;
10948 JS_REQUIRES_STACK AbortableRecordingStatus
10949 TraceRecorder::record_JSOP_DELPROP()
10951 return ARECORD_STOP
;
10954 JS_REQUIRES_STACK AbortableRecordingStatus
10955 TraceRecorder::record_JSOP_DELELEM()
10957 return ARECORD_STOP
;
10960 JS_REQUIRES_STACK AbortableRecordingStatus
10961 TraceRecorder::record_JSOP_TYPEOF()
10963 jsval
& r
= stackval(-1);
10965 if (JSVAL_IS_STRING(r
)) {
10966 type
= INS_ATOM(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_STRING
]);
10967 } else if (isNumber(r
)) {
10968 type
= INS_ATOM(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_NUMBER
]);
10969 } else if (VALUE_IS_FUNCTION(cx
, r
)) {
10970 type
= INS_ATOM(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_FUNCTION
]);
10972 LIns
* args
[] = { get(&r
), cx_ins
};
10973 if (JSVAL_IS_SPECIAL(r
)) {
10974 // We specialize identically for boolean and undefined. We must not have a hole here.
10975 // Pass the unboxed type here, since TypeOfBoolean knows how to handle it.
10976 JS_ASSERT(r
== JSVAL_TRUE
|| r
== JSVAL_FALSE
|| r
== JSVAL_VOID
);
10977 type
= lir
->insCall(&js_TypeOfBoolean_ci
, args
);
10979 JS_ASSERT(JSVAL_TAG(r
) == JSVAL_OBJECT
);
10980 type
= lir
->insCall(&js_TypeOfObject_ci
, args
);
10984 return ARECORD_CONTINUE
;
10987 JS_REQUIRES_STACK AbortableRecordingStatus
10988 TraceRecorder::record_JSOP_VOID()
10990 stack(-1, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
)));
10991 return ARECORD_CONTINUE
;
10994 JS_REQUIRES_STACK AbortableRecordingStatus
10995 TraceRecorder::record_JSOP_INCNAME()
11000 JS_REQUIRES_STACK AbortableRecordingStatus
11001 TraceRecorder::record_JSOP_INCPROP()
11006 JS_REQUIRES_STACK AbortableRecordingStatus
11007 TraceRecorder::record_JSOP_INCELEM()
11009 return InjectStatus(incElem(1));
11012 JS_REQUIRES_STACK AbortableRecordingStatus
11013 TraceRecorder::record_JSOP_DECNAME()
11015 return incName(-1);
11018 JS_REQUIRES_STACK AbortableRecordingStatus
11019 TraceRecorder::record_JSOP_DECPROP()
11021 return incProp(-1);
11024 JS_REQUIRES_STACK AbortableRecordingStatus
11025 TraceRecorder::record_JSOP_DECELEM()
11027 return InjectStatus(incElem(-1));
11030 JS_REQUIRES_STACK AbortableRecordingStatus
11031 TraceRecorder::incName(jsint incr
, bool pre
)
11038 CHECK_STATUS_A(name(vp
, v_ins
, nr
));
11039 jsval v
= nr
.tracked
? *vp
: nr
.v
;
11040 CHECK_STATUS_A(incHelper(v
, v_ins
, v_after
, incr
));
11041 LIns
* v_result
= pre
? v_after
: v_ins
;
11044 stack(0, v_result
);
11045 return ARECORD_CONTINUE
;
11048 if (OBJ_GET_CLASS(cx
, nr
.obj
) != &js_CallClass
)
11049 RETURN_STOP_A("incName on unsupported object class");
11051 CHECK_STATUS_A(setCallProp(nr
.obj
, nr
.obj_ins
, nr
.sprop
, v_after
, v
));
11052 stack(0, v_result
);
11053 return ARECORD_CONTINUE
;
11056 JS_REQUIRES_STACK AbortableRecordingStatus
11057 TraceRecorder::record_JSOP_NAMEINC()
11059 return incName(1, false);
11062 JS_REQUIRES_STACK AbortableRecordingStatus
11063 TraceRecorder::record_JSOP_PROPINC()
11065 return incProp(1, false);
11068 // XXX consolidate with record_JSOP_GETELEM code...
11069 JS_REQUIRES_STACK AbortableRecordingStatus
11070 TraceRecorder::record_JSOP_ELEMINC()
11072 return InjectStatus(incElem(1, false));
11075 JS_REQUIRES_STACK AbortableRecordingStatus
11076 TraceRecorder::record_JSOP_NAMEDEC()
11078 return incName(-1, false);
11081 JS_REQUIRES_STACK AbortableRecordingStatus
11082 TraceRecorder::record_JSOP_PROPDEC()
11084 return incProp(-1, false);
11087 JS_REQUIRES_STACK AbortableRecordingStatus
11088 TraceRecorder::record_JSOP_ELEMDEC()
11090 return InjectStatus(incElem(-1, false));
11093 JS_REQUIRES_STACK AbortableRecordingStatus
11094 TraceRecorder::record_JSOP_GETPROP()
11096 return getProp(stackval(-1));
11099 JS_REQUIRES_STACK AbortableRecordingStatus
11100 TraceRecorder::record_JSOP_SETPROP()
11102 jsval
& l
= stackval(-2);
11103 if (JSVAL_IS_PRIMITIVE(l
))
11104 RETURN_STOP_A("primitive this for SETPROP");
11106 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
11107 if (obj
->map
->ops
->setProperty
!= js_SetProperty
)
11108 RETURN_STOP_A("non-native JSObjectOps::setProperty");
11109 return ARECORD_CONTINUE
;
11112 /* Emit a specialized, inlined copy of js_NativeSet. */
11113 JS_REQUIRES_STACK RecordingStatus
11114 TraceRecorder::nativeSet(JSObject
* obj
, LIns
* obj_ins
, JSScopeProperty
* sprop
,
11115 jsval v
, LIns
* v_ins
)
11117 JSScope
* scope
= OBJ_SCOPE(obj
);
11118 uint32 slot
= sprop
->slot
;
11121 * We do not trace assignment to properties that have both a nonstub setter
11122 * and a slot, for several reasons.
11124 * First, that would require sampling rt->propertyRemovals before and after
11125 * (see js_NativeSet), and even more code to handle the case where the two
11126 * samples differ. A mere guard is not enough, because you can't just bail
11127 * off trace in the middle of a property assignment without storing the
11128 * value and making the stack right.
11130 * If obj is the global object, there are two additional problems. We would
11131 * have to emit still more code to store the result in the object (not the
11132 * native global frame) if the setter returned successfully after
11133 * deep-bailing. And we would have to cope if the run-time type of the
11134 * setter's return value differed from the record-time type of v, in which
11135 * case unboxing would fail and, having called a native setter, we could
11136 * not just retry the instruction in the interpreter.
11138 JS_ASSERT(SPROP_HAS_STUB_SETTER(sprop
) || slot
== SPROP_INVALID_SLOT
);
11140 // Box the value to be stored, if necessary.
11141 LIns
* boxed_ins
= NULL
;
11142 if (!SPROP_HAS_STUB_SETTER(sprop
) || (slot
!= SPROP_INVALID_SLOT
&& obj
!= globalObj
))
11143 boxed_ins
= box_jsval(v
, v_ins
);
11145 // Call the setter, if any.
11146 if (!SPROP_HAS_STUB_SETTER(sprop
))
11147 emitNativePropertyOp(scope
, sprop
, obj_ins
, true, boxed_ins
);
11149 // Store the value, if this property has a slot.
11150 if (slot
!= SPROP_INVALID_SLOT
) {
11151 JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop
, scope
));
11152 JS_ASSERT(!(sprop
->attrs
& JSPROP_SHARED
));
11153 if (obj
== globalObj
) {
11154 if (!lazilyImportGlobalSlot(slot
))
11155 RETURN_STOP("lazy import of global slot failed");
11156 set(&STOBJ_GET_SLOT(obj
, slot
), v_ins
);
11158 LIns
* dslots_ins
= NULL
;
11159 stobj_set_slot(obj_ins
, slot
, dslots_ins
, boxed_ins
);
11163 return RECORD_CONTINUE
;
11166 static JSBool FASTCALL
11167 MethodWriteBarrier(JSContext
* cx
, JSObject
* obj
, JSScopeProperty
* sprop
, JSObject
* funobj
)
11169 JSAutoTempValueRooter
tvr(cx
, funobj
);
11171 return OBJ_SCOPE(obj
)->methodWriteBarrier(cx
, sprop
, tvr
.value());
11173 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, MethodWriteBarrier
, CONTEXT
, OBJECT
, SCOPEPROP
, OBJECT
,
11176 JS_REQUIRES_STACK RecordingStatus
11177 TraceRecorder::setProp(jsval
&l
, JSPropCacheEntry
* entry
, JSScopeProperty
* sprop
,
11178 jsval
&v
, LIns
*& v_ins
)
11180 if (entry
== JS_NO_PROP_CACHE_FILL
)
11181 RETURN_STOP("can't trace uncacheable property set");
11182 JS_ASSERT_IF(PCVCAP_TAG(entry
->vcap
) >= 1, sprop
->attrs
& JSPROP_SHARED
);
11183 if (!SPROP_HAS_STUB_SETTER(sprop
) && sprop
->slot
!= SPROP_INVALID_SLOT
)
11184 RETURN_STOP("can't trace set of property with setter and slot");
11185 if (sprop
->attrs
& JSPROP_SETTER
)
11186 RETURN_STOP("can't trace JavaScript function setter");
11188 // These two cases are errors and can't be traced.
11189 if (sprop
->attrs
& JSPROP_GETTER
)
11190 RETURN_STOP("can't assign to property with script getter but no setter");
11191 if (sprop
->attrs
& JSPROP_READONLY
)
11192 RETURN_STOP("can't assign to readonly property");
11194 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l
));
11195 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
11196 LIns
* obj_ins
= get(&l
);
11197 JSScope
* scope
= OBJ_SCOPE(obj
);
11199 JS_ASSERT_IF(entry
->vcap
== PCVCAP_MAKE(entry
->kshape
, 0, 0), scope
->hasProperty(sprop
));
11201 // Fast path for CallClass. This is about 20% faster than the general case.
11203 if (OBJ_GET_CLASS(cx
, obj
) == &js_CallClass
)
11204 return setCallProp(obj
, obj_ins
, sprop
, v_ins
, v
);
11206 // Find obj2. If entry->adding(), the TAG bits are all 0.
11207 JSObject
* obj2
= obj
;
11208 for (jsuword i
= PCVCAP_TAG(entry
->vcap
) >> PCVCAP_PROTOBITS
; i
; i
--)
11209 obj2
= OBJ_GET_PARENT(cx
, obj2
);
11210 for (jsuword j
= PCVCAP_TAG(entry
->vcap
) & PCVCAP_PROTOMASK
; j
; j
--)
11211 obj2
= OBJ_GET_PROTO(cx
, obj2
);
11212 scope
= OBJ_SCOPE(obj2
);
11213 JS_ASSERT_IF(entry
->adding(), obj2
== obj
);
11215 // Guard before anything else.
11216 LIns
* map_ins
= map(obj_ins
);
11217 CHECK_STATUS(guardNativePropertyOp(obj
, map_ins
));
11219 CHECK_STATUS(guardPropertyCacheHit(obj_ins
, map_ins
, obj
, obj2
, entry
, pcval
));
11220 JS_ASSERT(scope
->object
== obj2
);
11221 JS_ASSERT(scope
->hasProperty(sprop
));
11222 JS_ASSERT_IF(obj2
!= obj
, sprop
->attrs
& JSPROP_SHARED
);
11225 * Setting a function-valued property might need to rebrand the object, so
11226 * we emit a call to the method write barrier. There's no need to guard on
11227 * this, because functions have distinct trace-type from other values and
11228 * branded-ness is implied by the shape, which we've already guarded on.
11230 if (scope
->brandedOrHasMethodBarrier() && VALUE_IS_FUNCTION(cx
, v
) && entry
->directHit()) {
11231 if (obj
== globalObj
)
11232 RETURN_STOP("can't trace function-valued property set in branded global scope");
11234 enterDeepBailCall();
11235 LIns
* args
[] = { v_ins
, INS_CONSTSPROP(sprop
), obj_ins
, cx_ins
};
11236 LIns
* ok_ins
= lir
->insCall(&MethodWriteBarrier_ci
, args
);
11237 guard(false, lir
->ins_eq0(ok_ins
), OOM_EXIT
);
11238 leaveDeepBailCall();
11241 // Add a property to the object if necessary.
11242 if (entry
->adding()) {
11243 JS_ASSERT(!(sprop
->attrs
& JSPROP_SHARED
));
11244 if (obj
== globalObj
)
11245 RETURN_STOP("adding a property to the global object");
11247 LIns
* args
[] = { INS_CONSTSPROP(sprop
), obj_ins
, cx_ins
};
11248 LIns
* ok_ins
= lir
->insCall(&js_AddProperty_ci
, args
);
11249 guard(false, lir
->ins_eq0(ok_ins
), OOM_EXIT
);
11252 return nativeSet(obj
, obj_ins
, sprop
, v
, v_ins
);
11255 JS_REQUIRES_STACK RecordingStatus
11256 TraceRecorder::setCallProp(JSObject
*callobj
, LIns
*callobj_ins
, JSScopeProperty
*sprop
,
11257 LIns
*v_ins
, jsval v
)
11259 // Set variables in on-trace-stack call objects by updating the tracker.
11260 JSStackFrame
*fp
= frameIfInRange(callobj
);
11262 if (sprop
->setter
== SetCallArg
) {
11263 jsint slot
= JSVAL_TO_INT(SPROP_USERID(sprop
));
11264 jsval
*vp2
= &fp
->argv
[slot
];
11266 return RECORD_CONTINUE
;
11268 if (sprop
->setter
== SetCallVar
) {
11269 jsint slot
= JSVAL_TO_INT(SPROP_USERID(sprop
));
11270 jsval
*vp2
= &fp
->slots
[slot
];
11272 return RECORD_CONTINUE
;
11274 RETURN_STOP("can't trace special CallClass setter");
11277 // Set variables in off-trace-stack call objects by calling standard builtins.
11278 const CallInfo
* ci
= NULL
;
11279 if (sprop
->setter
== SetCallArg
)
11280 ci
= &js_SetCallArg_ci
;
11281 else if (sprop
->setter
== SetCallVar
)
11282 ci
= &js_SetCallVar_ci
;
11284 RETURN_STOP("can't trace special CallClass setter");
11287 box_jsval(v
, v_ins
),
11288 INS_CONST(SPROP_USERID(sprop
)),
11292 LIns
* call_ins
= lir
->insCall(ci
, args
);
11293 guard(false, addName(lir
->ins_eq0(call_ins
), "guard(set upvar)"), STATUS_EXIT
);
11294 return RECORD_CONTINUE
;
11297 JS_REQUIRES_STACK AbortableRecordingStatus
11298 TraceRecorder::record_SetPropHit(JSPropCacheEntry
* entry
, JSScopeProperty
* sprop
)
11300 jsval
& r
= stackval(-1);
11301 jsval
& l
= stackval(-2);
11303 CHECK_STATUS_A(setProp(l
, entry
, sprop
, r
, v_ins
));
11305 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
11309 case JSOP_SETMETHOD
:
11310 if (pc
[JSOP_SETPROP_LENGTH
] != JSOP_POP
)
11317 return ARECORD_CONTINUE
;
11320 JS_REQUIRES_STACK VMSideExit
*
11321 TraceRecorder::enterDeepBailCall()
11323 // Take snapshot for js_DeepBail and store it in cx->bailExit.
11324 VMSideExit
* exit
= snapshot(DEEP_BAIL_EXIT
);
11325 lir
->insStorei(INS_CONSTPTR(exit
), cx_ins
, offsetof(JSContext
, bailExit
));
11327 // Tell nanojit not to discard or defer stack writes before this call.
11328 GuardRecord
* guardRec
= createGuardRecord(exit
);
11329 lir
->insGuard(LIR_xbarrier
, NULL
, guardRec
);
11331 // Forget about guarded shapes, since deep bailers can reshape the world.
11332 forgetGuardedShapes();
11336 JS_REQUIRES_STACK
void
11337 TraceRecorder::leaveDeepBailCall()
11339 // Keep cx->bailExit null when it's invalid.
11340 lir
->insStorei(INS_NULL(), cx_ins
, offsetof(JSContext
, bailExit
));
11343 JS_REQUIRES_STACK
void
11344 TraceRecorder::finishGetProp(LIns
* obj_ins
, LIns
* vp_ins
, LIns
* ok_ins
, jsval
* outp
)
11346 // Store the boxed result (and this-object, if JOF_CALLOP) before the
11347 // guard. The deep-bail case requires this. If the property get fails,
11348 // these slots will be ignored anyway.
11349 LIns
* result_ins
= lir
->insLoad(LIR_ldp
, vp_ins
, 0);
11350 set(outp
, result_ins
, true);
11351 if (js_CodeSpec
[*cx
->fp
->regs
->pc
].format
& JOF_CALLOP
)
11352 set(outp
+ 1, obj_ins
, true);
11354 // We need to guard on ok_ins, but this requires a snapshot of the state
11355 // after this op. monitorRecording will do it for us.
11356 pendingGuardCondition
= ok_ins
;
11358 // Note there is a boxed result sitting on the stack. The caller must leave
11359 // it there for the time being, since the return type is not yet
11360 // known. monitorRecording will emit the code to unbox it.
11361 pendingUnboxSlot
= outp
;
11365 RootedStringToId(JSContext
* cx
, JSString
** namep
, jsid
* idp
)
11367 JSString
* name
= *namep
;
11368 if (name
->isAtomized()) {
11369 *idp
= ATOM_TO_JSID((JSAtom
*) STRING_TO_JSVAL(name
));
11373 JSAtom
* atom
= js_AtomizeString(cx
, name
, 0);
11376 *namep
= ATOM_TO_STRING(atom
); /* write back to GC root */
11377 *idp
= ATOM_TO_JSID(atom
);
11381 static JSBool FASTCALL
11382 GetPropertyByName(JSContext
* cx
, JSObject
* obj
, JSString
** namep
, jsval
* vp
)
11384 js_LeaveTraceIfGlobalObject(cx
, obj
);
11387 if (!RootedStringToId(cx
, namep
, &id
) || !obj
->getProperty(cx
, id
, vp
)) {
11388 js_SetBuiltinError(cx
);
11391 return cx
->interpState
->builtinStatus
== 0;
11393 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyByName
, CONTEXT
, OBJECT
, STRINGPTR
, JSVALPTR
,
11396 // Convert the value in a slot to a string and store the resulting string back
11397 // in the slot (typically in order to root it).
11398 JS_REQUIRES_STACK RecordingStatus
11399 TraceRecorder::primitiveToStringInPlace(jsval
* vp
)
11402 JS_ASSERT(JSVAL_IS_PRIMITIVE(v
));
11404 if (!JSVAL_IS_STRING(v
)) {
11405 // v is not a string. Turn it into one. js_ValueToString is safe
11406 // because v is not an object.
11407 JSString
*str
= js_ValueToString(cx
, v
);
11409 RETURN_ERROR("failed to stringify element id");
11410 v
= STRING_TO_JSVAL(str
);
11411 set(vp
, stringify(*vp
));
11413 // Write the string back to the stack to save the interpreter some work
11414 // and to ensure snapshots get the correct type for this slot.
11417 return RECORD_CONTINUE
;
11420 JS_REQUIRES_STACK RecordingStatus
11421 TraceRecorder::getPropertyByName(LIns
* obj_ins
, jsval
* idvalp
, jsval
* outp
)
11423 CHECK_STATUS(primitiveToStringInPlace(idvalp
));
11424 enterDeepBailCall();
11426 // Call GetPropertyByName. The vp parameter points to stack because this is
11427 // what the interpreter currently does. obj and id are rooted on the
11428 // interpreter stack, but the slot at vp is not a root.
11429 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11430 LIns
* idvalp_ins
= addName(addr(idvalp
), "idvalp");
11431 LIns
* args
[] = {vp_ins
, idvalp_ins
, obj_ins
, cx_ins
};
11432 LIns
* ok_ins
= lir
->insCall(&GetPropertyByName_ci
, args
);
11434 // GetPropertyByName can assign to *idvalp, so the tracker has an incorrect
11435 // entry for that address. Correct it. (If the value in the address is
11436 // never used again, the usual case, Nanojit will kill this load.)
11437 tracker
.set(idvalp
, lir
->insLoad(LIR_ldp
, idvalp_ins
, 0));
11439 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
11440 leaveDeepBailCall();
11441 return RECORD_CONTINUE
;
11444 static JSBool FASTCALL
11445 GetPropertyByIndex(JSContext
* cx
, JSObject
* obj
, int32 index
, jsval
* vp
)
11447 js_LeaveTraceIfGlobalObject(cx
, obj
);
11449 JSAutoTempIdRooter
idr(cx
);
11450 if (!js_Int32ToId(cx
, index
, idr
.addr()) || !obj
->getProperty(cx
, idr
.id(), vp
)) {
11451 js_SetBuiltinError(cx
);
11454 return cx
->interpState
->builtinStatus
== 0;
11456 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyByIndex
, CONTEXT
, OBJECT
, INT32
, JSVALPTR
, 0, 0)
11458 JS_REQUIRES_STACK RecordingStatus
11459 TraceRecorder::getPropertyByIndex(LIns
* obj_ins
, LIns
* index_ins
, jsval
* outp
)
11461 index_ins
= makeNumberInt32(index_ins
);
11463 // See note in getPropertyByName about vp.
11464 enterDeepBailCall();
11465 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11466 LIns
* args
[] = {vp_ins
, index_ins
, obj_ins
, cx_ins
};
11467 LIns
* ok_ins
= lir
->insCall(&GetPropertyByIndex_ci
, args
);
11468 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
11469 leaveDeepBailCall();
11470 return RECORD_CONTINUE
;
11473 static JSBool FASTCALL
11474 GetPropertyById(JSContext
* cx
, JSObject
* obj
, jsid id
, jsval
* vp
)
11476 js_LeaveTraceIfGlobalObject(cx
, obj
);
11477 if (!obj
->getProperty(cx
, id
, vp
)) {
11478 js_SetBuiltinError(cx
);
11481 return cx
->interpState
->builtinStatus
== 0;
11483 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyById
,
11484 CONTEXT
, OBJECT
, JSVAL
, JSVALPTR
, 0, 0)
11486 JS_REQUIRES_STACK RecordingStatus
11487 TraceRecorder::getPropertyById(LIns
* obj_ins
, jsval
* outp
)
11491 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
11492 const JSCodeSpec
& cs
= js_CodeSpec
[*pc
];
11493 if (*pc
== JSOP_LENGTH
) {
11494 atom
= cx
->runtime
->atomState
.lengthAtom
;
11495 } else if (JOF_TYPE(cs
.format
) == JOF_ATOM
) {
11496 atom
= atoms
[GET_INDEX(pc
)];
11498 JS_ASSERT(JOF_TYPE(cs
.format
) == JOF_SLOTATOM
);
11499 atom
= atoms
[GET_INDEX(pc
+ SLOTNO_LEN
)];
11502 // Call GetPropertyById. See note in getPropertyByName about vp.
11503 enterDeepBailCall();
11504 jsid id
= ATOM_TO_JSID(atom
);
11505 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11506 LIns
* args
[] = {vp_ins
, INS_CONSTWORD(id
), obj_ins
, cx_ins
};
11507 LIns
* ok_ins
= lir
->insCall(&GetPropertyById_ci
, args
);
11508 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
11509 leaveDeepBailCall();
11510 return RECORD_CONTINUE
;
11513 /* Manually inlined, specialized copy of js_NativeGet. */
11514 static JSBool FASTCALL
11515 GetPropertyWithNativeGetter(JSContext
* cx
, JSObject
* obj
, JSScopeProperty
* sprop
, jsval
* vp
)
11517 js_LeaveTraceIfGlobalObject(cx
, obj
);
11522 JS_ASSERT(obj
->lookupProperty(cx
, sprop
->id
, &pobj
, &prop
));
11523 JS_ASSERT(prop
== (JSProperty
*) sprop
);
11524 obj
->dropProperty(cx
, prop
);
11527 // JSScopeProperty::get contains a special case for With objects. We can
11528 // elide it here because With objects are, we claim, never on the operand
11529 // stack while recording.
11530 JS_ASSERT(STOBJ_GET_CLASS(obj
) != &js_WithClass
);
11533 if (!sprop
->getter(cx
, obj
, SPROP_USERID(sprop
), vp
)) {
11534 js_SetBuiltinError(cx
);
11537 return cx
->interpState
->builtinStatus
== 0;
11539 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyWithNativeGetter
,
11540 CONTEXT
, OBJECT
, SCOPEPROP
, JSVALPTR
, 0, 0)
11542 JS_REQUIRES_STACK RecordingStatus
11543 TraceRecorder::getPropertyWithNativeGetter(LIns
* obj_ins
, JSScopeProperty
* sprop
, jsval
* outp
)
11545 JS_ASSERT(!(sprop
->attrs
& JSPROP_GETTER
));
11546 JS_ASSERT(sprop
->slot
== SPROP_INVALID_SLOT
);
11547 JS_ASSERT(!SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop
));
11549 // Call GetPropertyWithNativeGetter. See note in getPropertyByName about vp.
11550 // FIXME - We should call the getter directly. Using a builtin function for
11551 // now because it buys some extra asserts. See bug 508310.
11552 enterDeepBailCall();
11553 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11554 LIns
* args
[] = {vp_ins
, INS_CONSTPTR(sprop
), obj_ins
, cx_ins
};
11555 LIns
* ok_ins
= lir
->insCall(&GetPropertyWithNativeGetter_ci
, args
);
11556 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
11557 leaveDeepBailCall();
11558 return RECORD_CONTINUE
;
11561 JS_REQUIRES_STACK AbortableRecordingStatus
11562 TraceRecorder::record_JSOP_GETELEM()
11564 bool call
= *cx
->fp
->regs
->pc
== JSOP_CALLELEM
;
11566 jsval
& idx
= stackval(-1);
11567 jsval
& lval
= stackval(-2);
11569 LIns
* obj_ins
= get(&lval
);
11570 LIns
* idx_ins
= get(&idx
);
11572 // Special case for array-like access of strings.
11573 if (JSVAL_IS_STRING(lval
) && isInt32(idx
)) {
11575 RETURN_STOP_A("JSOP_CALLELEM on a string");
11576 int i
= asInt32(idx
);
11577 if (size_t(i
) >= JSVAL_TO_STRING(lval
)->length())
11578 RETURN_STOP_A("Invalid string index in JSOP_GETELEM");
11579 idx_ins
= makeNumberInt32(idx_ins
);
11580 LIns
* args
[] = { idx_ins
, obj_ins
, cx_ins
};
11581 LIns
* unitstr_ins
= lir
->insCall(&js_String_getelem_ci
, args
);
11582 guard(false, lir
->ins_peq0(unitstr_ins
), MISMATCH_EXIT
);
11583 set(&lval
, unitstr_ins
);
11584 return ARECORD_CONTINUE
;
11587 if (JSVAL_IS_PRIMITIVE(lval
))
11588 RETURN_STOP_A("JSOP_GETLEM on a primitive");
11589 RETURN_IF_XML_A(lval
);
11591 JSObject
* obj
= JSVAL_TO_OBJECT(lval
);
11592 if (obj
== globalObj
)
11593 RETURN_STOP_A("JSOP_GETELEM on global");
11596 /* Property access using a string name or something we have to stringify. */
11597 if (!JSVAL_IS_INT(idx
)) {
11598 if (!JSVAL_IS_PRIMITIVE(idx
))
11599 RETURN_STOP_A("object used as index");
11601 return InjectStatus(getPropertyByName(obj_ins
, &idx
, &lval
));
11604 if (STOBJ_GET_CLASS(obj
) == &js_ArgumentsClass
) {
11606 JSStackFrame
*afp
= guardArguments(obj
, obj_ins
, &depth
);
11608 uintN int_idx
= JSVAL_TO_INT(idx
);
11609 jsval
* vp
= &afp
->argv
[int_idx
];
11610 if (idx_ins
->isconstf()) {
11611 if (int_idx
>= 0 && int_idx
< afp
->argc
)
11614 v_ins
= INS_VOID();
11616 // If the index is not a constant expression, we generate LIR to load the value from
11617 // the native stack area. The guard on js_ArgumentClass above ensures the up-to-date
11618 // value has been written back to the native stack area.
11619 idx_ins
= makeNumberInt32(idx_ins
);
11620 if (int_idx
>= 0 && int_idx
< afp
->argc
) {
11621 JSTraceType type
= getCoercedType(*vp
);
11623 // Guard that the argument has the same type on trace as during recording.
11625 if (callDepth
== depth
) {
11626 // In this case, we are in the same frame where the arguments object was created.
11627 // The entry type map is not necessarily up-to-date, so we capture a new type map
11628 // for this point in the code.
11629 unsigned stackSlots
= NativeStackSlots(cx
, 0 /* callDepth */);
11630 JSTraceType
* typemap
= new (traceAlloc()) JSTraceType
[stackSlots
];
11631 DetermineTypesVisitor
detVisitor(*this, typemap
);
11632 VisitStackSlots(detVisitor
, cx
, 0);
11633 typemap_ins
= INS_CONSTPTR(typemap
+ 2 /* callee, this */);
11635 // In this case, we are in a deeper frame from where the arguments object was
11636 // created. The type map at the point of the call out from the creation frame
11638 // Note: this relies on the assumption that we abort on setting an element of
11639 // an arguments object in any deeper frame.
11640 LIns
* fip_ins
= lir
->insLoad(LIR_ldp
, lirbuf
->rp
, (callDepth
-depth
)*sizeof(FrameInfo
*));
11641 typemap_ins
= lir
->ins2(LIR_add
, fip_ins
, INS_CONST(sizeof(FrameInfo
) + 2/*callee,this*/ * sizeof(JSTraceType
)));
11644 LIns
* typep_ins
= lir
->ins2(LIR_piadd
, typemap_ins
,
11645 lir
->ins_u2p(lir
->ins2(LIR_mul
,
11647 INS_CONST(sizeof(JSTraceType
)))));
11648 LIns
* type_ins
= lir
->insLoad(LIR_ldcb
, typep_ins
, 0);
11650 addName(lir
->ins2(LIR_eq
, type_ins
, lir
->insImm(type
)),
11651 "guard(type-stable upvar)"),
11654 // Read the value out of the native stack area.
11655 guard(true, lir
->ins2(LIR_ult
, idx_ins
, INS_CONST(afp
->argc
)),
11656 snapshot(BRANCH_EXIT
));
11657 size_t stackOffset
= nativespOffset(&afp
->argv
[0]);
11658 LIns
* args_addr_ins
= lir
->ins2(LIR_piadd
, lirbuf
->sp
, INS_CONSTWORD(stackOffset
));
11659 LIns
* argi_addr_ins
= lir
->ins2(LIR_piadd
,
11661 lir
->ins_u2p(lir
->ins2(LIR_mul
,
11663 INS_CONST(sizeof(double)))));
11664 v_ins
= stackLoad(argi_addr_ins
, type
);
11666 guard(false, lir
->ins2(LIR_ult
, idx_ins
, INS_CONST(afp
->argc
)),
11667 snapshot(BRANCH_EXIT
));
11668 v_ins
= INS_VOID();
11673 return ARECORD_CONTINUE
;
11675 RETURN_STOP_A("can't reach arguments object's frame");
11677 if (js_IsDenseArray(obj
)) {
11678 // Fast path for dense arrays accessed with a integer index.
11682 guardDenseArray(obj
, obj_ins
, BRANCH_EXIT
);
11683 CHECK_STATUS_A(denseArrayElement(lval
, idx
, vp
, v_ins
, addr_ins
));
11686 set(&idx
, obj_ins
);
11687 return ARECORD_CONTINUE
;
11690 return InjectStatus(getPropertyByIndex(obj_ins
, idx_ins
, &lval
));
11693 /* Functions used by JSOP_SETELEM */
11695 static JSBool FASTCALL
11696 SetPropertyByName(JSContext
* cx
, JSObject
* obj
, JSString
** namep
, jsval
* vp
)
11698 js_LeaveTraceIfGlobalObject(cx
, obj
);
11701 if (!RootedStringToId(cx
, namep
, &id
) || !obj
->setProperty(cx
, id
, vp
)) {
11702 js_SetBuiltinError(cx
);
11705 return cx
->interpState
->builtinStatus
== 0;
11707 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, SetPropertyByName
, CONTEXT
, OBJECT
, STRINGPTR
, JSVALPTR
,
11710 static JSBool FASTCALL
11711 InitPropertyByName(JSContext
* cx
, JSObject
* obj
, JSString
** namep
, jsval val
)
11713 js_LeaveTraceIfGlobalObject(cx
, obj
);
11716 if (!RootedStringToId(cx
, namep
, &id
) ||
11717 !obj
->defineProperty(cx
, id
, val
, NULL
, NULL
, JSPROP_ENUMERATE
)) {
11718 js_SetBuiltinError(cx
);
11721 return cx
->interpState
->builtinStatus
== 0;
11723 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, InitPropertyByName
, CONTEXT
, OBJECT
, STRINGPTR
, JSVAL
,
11726 JS_REQUIRES_STACK RecordingStatus
11727 TraceRecorder::initOrSetPropertyByName(LIns
* obj_ins
, jsval
* idvalp
, jsval
* rvalp
, bool init
)
11729 CHECK_STATUS(primitiveToStringInPlace(idvalp
));
11731 LIns
* rval_ins
= box_jsval(*rvalp
, get(rvalp
));
11733 enterDeepBailCall();
11736 LIns
* idvalp_ins
= addName(addr(idvalp
), "idvalp");
11738 LIns
* args
[] = {rval_ins
, idvalp_ins
, obj_ins
, cx_ins
};
11739 ok_ins
= lir
->insCall(&InitPropertyByName_ci
, args
);
11741 // See note in getPropertyByName about vp.
11742 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11743 lir
->insStorei(rval_ins
, vp_ins
, 0);
11744 LIns
* args
[] = {vp_ins
, idvalp_ins
, obj_ins
, cx_ins
};
11745 ok_ins
= lir
->insCall(&SetPropertyByName_ci
, args
);
11747 pendingGuardCondition
= ok_ins
;
11749 leaveDeepBailCall();
11750 return RECORD_CONTINUE
;
11753 static JSBool FASTCALL
11754 SetPropertyByIndex(JSContext
* cx
, JSObject
* obj
, int32 index
, jsval
* vp
)
11756 js_LeaveTraceIfGlobalObject(cx
, obj
);
11758 JSAutoTempIdRooter
idr(cx
);
11759 if (!js_Int32ToId(cx
, index
, idr
.addr()) || !obj
->setProperty(cx
, idr
.id(), vp
)) {
11760 js_SetBuiltinError(cx
);
11763 return cx
->interpState
->builtinStatus
== 0;
11765 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, SetPropertyByIndex
, CONTEXT
, OBJECT
, INT32
, JSVALPTR
, 0, 0)
11767 static JSBool FASTCALL
11768 InitPropertyByIndex(JSContext
* cx
, JSObject
* obj
, int32 index
, jsval val
)
11770 js_LeaveTraceIfGlobalObject(cx
, obj
);
11772 JSAutoTempIdRooter
idr(cx
);
11773 if (!js_Int32ToId(cx
, index
, idr
.addr()) ||
11774 !obj
->defineProperty(cx
, idr
.id(), val
, NULL
, NULL
, JSPROP_ENUMERATE
)) {
11775 js_SetBuiltinError(cx
);
11778 return cx
->interpState
->builtinStatus
== 0;
11780 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, InitPropertyByIndex
, CONTEXT
, OBJECT
, INT32
, JSVAL
, 0, 0)
11782 JS_REQUIRES_STACK RecordingStatus
11783 TraceRecorder::initOrSetPropertyByIndex(LIns
* obj_ins
, LIns
* index_ins
, jsval
* rvalp
, bool init
)
11785 index_ins
= makeNumberInt32(index_ins
);
11787 LIns
* rval_ins
= box_jsval(*rvalp
, get(rvalp
));
11789 enterDeepBailCall();
11793 LIns
* args
[] = {rval_ins
, index_ins
, obj_ins
, cx_ins
};
11794 ok_ins
= lir
->insCall(&InitPropertyByIndex_ci
, args
);
11796 // See note in getPropertyByName about vp.
11797 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11798 lir
->insStorei(rval_ins
, vp_ins
, 0);
11799 LIns
* args
[] = {vp_ins
, index_ins
, obj_ins
, cx_ins
};
11800 ok_ins
= lir
->insCall(&SetPropertyByIndex_ci
, args
);
11802 pendingGuardCondition
= ok_ins
;
11804 leaveDeepBailCall();
11805 return RECORD_CONTINUE
;
11808 JS_REQUIRES_STACK AbortableRecordingStatus
11809 TraceRecorder::setElem(int lval_spindex
, int idx_spindex
, int v_spindex
)
11811 jsval
& v
= stackval(v_spindex
);
11812 jsval
& idx
= stackval(idx_spindex
);
11813 jsval
& lval
= stackval(lval_spindex
);
11815 if (JSVAL_IS_PRIMITIVE(lval
))
11816 RETURN_STOP_A("left JSOP_SETELEM operand is not an object");
11817 RETURN_IF_XML_A(lval
);
11819 JSObject
* obj
= JSVAL_TO_OBJECT(lval
);
11820 LIns
* obj_ins
= get(&lval
);
11821 LIns
* idx_ins
= get(&idx
);
11822 LIns
* v_ins
= get(&v
);
11824 if (JS_InstanceOf(cx
, obj
, &js_ArgumentsClass
, NULL
))
11825 RETURN_STOP_A("can't trace setting elements of the |arguments| object");
11827 if (!JSVAL_IS_INT(idx
)) {
11828 if (!JSVAL_IS_PRIMITIVE(idx
))
11829 RETURN_STOP_A("non-primitive index");
11830 CHECK_STATUS_A(initOrSetPropertyByName(obj_ins
, &idx
, &v
,
11831 *cx
->fp
->regs
->pc
== JSOP_INITELEM
));
11832 } else if (JSVAL_TO_INT(idx
) < 0 || !OBJ_IS_DENSE_ARRAY(cx
, obj
)) {
11833 CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins
, idx_ins
, &v
,
11834 *cx
->fp
->regs
->pc
== JSOP_INITELEM
));
11836 // Fast path: assigning to element of dense array.
11838 // Make sure the array is actually dense.
11839 if (!guardDenseArray(obj
, obj_ins
, BRANCH_EXIT
))
11840 return ARECORD_STOP
;
11842 // The index was on the stack and is therefore a LIR float. Force it to
11844 idx_ins
= makeNumberInt32(idx_ins
);
11846 // Box the value so we can use one builtin instead of having to add one
11847 // builtin for every storage type. Special case for integers though,
11848 // since they are so common.
11850 LIns
* args
[] = { NULL
, idx_ins
, obj_ins
, cx_ins
};
11852 if (isPromoteInt(v_ins
)) {
11853 args
[0] = ::demote(lir
, v_ins
);
11854 res_ins
= lir
->insCall(&js_Array_dense_setelem_int_ci
, args
);
11857 res_ins
= lir
->insCall(&js_Array_dense_setelem_double_ci
, args
);
11860 LIns
* args
[] = { box_jsval(v
, v_ins
), idx_ins
, obj_ins
, cx_ins
};
11861 res_ins
= lir
->insCall(&js_Array_dense_setelem_ci
, args
);
11863 guard(false, lir
->ins_eq0(res_ins
), MISMATCH_EXIT
);
11866 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
11867 if (*pc
== JSOP_SETELEM
&& pc
[JSOP_SETELEM_LENGTH
] != JSOP_POP
)
11870 return ARECORD_CONTINUE
;
11873 JS_REQUIRES_STACK AbortableRecordingStatus
11874 TraceRecorder::record_JSOP_SETELEM()
11876 return setElem(-3, -2, -1);
11879 JS_REQUIRES_STACK AbortableRecordingStatus
11880 TraceRecorder::record_JSOP_CALLNAME()
11882 JSObject
* obj
= cx
->fp
->scopeChain
;
11883 if (obj
!= globalObj
) {
11887 CHECK_STATUS_A(scopeChainProp(obj
, vp
, ins
, nr
));
11889 stack(1, INS_CONSTOBJ(globalObj
));
11890 return ARECORD_CONTINUE
;
11893 LIns
* obj_ins
= scopeChain();
11897 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
11899 if (PCVAL_IS_NULL(pcval
) || !PCVAL_IS_OBJECT(pcval
))
11900 RETURN_STOP_A("callee is not an object");
11902 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval
)));
11904 stack(0, INS_CONSTOBJ(PCVAL_TO_OBJECT(pcval
)));
11906 return ARECORD_CONTINUE
;
11909 JS_DEFINE_CALLINFO_5(extern, UINT32
, GetUpvarArgOnTrace
, CONTEXT
, UINT32
, INT32
, UINT32
,
11911 JS_DEFINE_CALLINFO_5(extern, UINT32
, GetUpvarVarOnTrace
, CONTEXT
, UINT32
, INT32
, UINT32
,
11913 JS_DEFINE_CALLINFO_5(extern, UINT32
, GetUpvarStackOnTrace
, CONTEXT
, UINT32
, INT32
, UINT32
,
11917 * Record LIR to get the given upvar. Return the LIR instruction for the upvar
11918 * value. NULL is returned only on a can't-happen condition with an invalid
11919 * typemap. The value of the upvar is returned as v.
11921 JS_REQUIRES_STACK LIns
*
11922 TraceRecorder::upvar(JSScript
* script
, JSUpvarArray
* uva
, uintN index
, jsval
& v
)
11925 * Try to find the upvar in the current trace's tracker. For &vr to be
11926 * the address of the jsval found in js_GetUpvar, we must initialize
11927 * vr directly with the result, so it is a reference to the same location.
11928 * It does not work to assign the result to v, because v is an already
11929 * existing reference that points to something else.
11931 uint32 cookie
= uva
->vector
[index
];
11932 jsval
& vr
= js_GetUpvar(cx
, script
->staticLevel
, cookie
);
11939 * The upvar is not in the current trace, so get the upvar value exactly as
11940 * the interpreter does and unbox.
11942 uint32 level
= script
->staticLevel
- UPVAR_FRAME_SKIP(cookie
);
11943 uint32 cookieSlot
= UPVAR_FRAME_SLOT(cookie
);
11944 JSStackFrame
* fp
= cx
->display
[level
];
11945 const CallInfo
* ci
;
11948 ci
= &GetUpvarStackOnTrace_ci
;
11950 } else if (cookieSlot
< fp
->fun
->nargs
) {
11951 ci
= &GetUpvarArgOnTrace_ci
;
11953 } else if (cookieSlot
== CALLEE_UPVAR_SLOT
) {
11954 ci
= &GetUpvarArgOnTrace_ci
;
11957 ci
= &GetUpvarVarOnTrace_ci
;
11958 slot
= cookieSlot
- fp
->fun
->nargs
;
11961 LIns
* outp
= lir
->insAlloc(sizeof(double));
11964 INS_CONST(callDepth
),
11969 LIns
* call_ins
= lir
->insCall(ci
, args
);
11970 JSTraceType type
= getCoercedType(v
);
11972 addName(lir
->ins2(LIR_eq
, call_ins
, lir
->insImm(type
)),
11973 "guard(type-stable upvar)"),
11975 return stackLoad(outp
, type
);
11979 * Generate LIR to load a value from the native stack. This method ensures that
11980 * the correct LIR load operator is used.
11982 LIns
* TraceRecorder::stackLoad(LIns
* base
, uint8 type
)
11996 case TT_PSEUDOBOOLEAN
:
12001 JS_NOT_REACHED("found jsval type in an upvar type map entry");
12005 LIns
* result
= lir
->insLoad(loadOp
, base
, 0);
12006 if (type
== TT_INT32
)
12007 result
= lir
->ins1(LIR_i2f
, result
);
12011 JS_REQUIRES_STACK AbortableRecordingStatus
12012 TraceRecorder::record_JSOP_GETUPVAR()
12014 uintN index
= GET_UINT16(cx
->fp
->regs
->pc
);
12015 JSScript
*script
= cx
->fp
->script
;
12016 JSUpvarArray
* uva
= script
->upvars();
12017 JS_ASSERT(index
< uva
->length
);
12020 LIns
* upvar_ins
= upvar(script
, uva
, index
, v
);
12022 return ARECORD_STOP
;
12023 stack(0, upvar_ins
);
12024 return ARECORD_CONTINUE
;
12027 JS_REQUIRES_STACK AbortableRecordingStatus
12028 TraceRecorder::record_JSOP_CALLUPVAR()
12030 CHECK_STATUS_A(record_JSOP_GETUPVAR());
12031 stack(1, INS_NULL());
12032 return ARECORD_CONTINUE
;
12035 JS_REQUIRES_STACK AbortableRecordingStatus
12036 TraceRecorder::record_JSOP_GETDSLOT()
12038 JSObject
* callee
= cx
->fp
->calleeObject();
12039 LIns
* callee_ins
= get(&cx
->fp
->argv
[-2]);
12041 unsigned index
= GET_UINT16(cx
->fp
->regs
->pc
);
12042 LIns
* dslots_ins
= NULL
;
12043 LIns
* v_ins
= stobj_get_dslot(callee_ins
, index
, dslots_ins
);
12045 stack(0, unbox_jsval(callee
->dslots
[index
], v_ins
, snapshot(BRANCH_EXIT
)));
12046 return ARECORD_CONTINUE
;
12049 JS_REQUIRES_STACK AbortableRecordingStatus
12050 TraceRecorder::record_JSOP_CALLDSLOT()
12052 CHECK_STATUS_A(record_JSOP_GETDSLOT());
12053 stack(1, INS_NULL());
12054 return ARECORD_CONTINUE
;
12057 JS_REQUIRES_STACK RecordingStatus
12058 TraceRecorder::guardCallee(jsval
& callee
)
12060 JS_ASSERT(VALUE_IS_FUNCTION(cx
, callee
));
12062 VMSideExit
* branchExit
= snapshot(BRANCH_EXIT
);
12063 JSObject
* callee_obj
= JSVAL_TO_OBJECT(callee
);
12064 LIns
* callee_ins
= get(&callee
);
12066 treeInfo
->gcthings
.addUnique(callee
);
12069 stobj_get_private(callee_ins
),
12070 INS_CONSTPTR(callee_obj
->getPrivate())),
12074 stobj_get_parent(callee_ins
),
12075 INS_CONSTOBJ(OBJ_GET_PARENT(cx
, callee_obj
))),
12077 return RECORD_CONTINUE
;
12081 * Prepare the given |arguments| object to be accessed on trace. If the return
12082 * value is non-NULL, then the given |arguments| object refers to a frame on
12083 * the current trace and is guaranteed to refer to the same frame on trace for
12084 * all later executions.
12086 JS_REQUIRES_STACK JSStackFrame
*
12087 TraceRecorder::guardArguments(JSObject
*obj
, LIns
* obj_ins
, unsigned *depthp
)
12089 JS_ASSERT(STOBJ_GET_CLASS(obj
) == &js_ArgumentsClass
);
12091 JSStackFrame
*afp
= frameIfInRange(obj
, depthp
);
12095 VMSideExit
*exit
= snapshot(MISMATCH_EXIT
);
12096 guardClass(obj
, obj_ins
, &js_ArgumentsClass
, exit
);
12098 LIns
* args_ins
= get(&afp
->argsobj
);
12099 LIns
* cmp
= lir
->ins2(LIR_peq
, args_ins
, obj_ins
);
12100 lir
->insGuard(LIR_xf
, cmp
, createGuardRecord(exit
));
12104 JS_REQUIRES_STACK RecordingStatus
12105 TraceRecorder::interpretedFunctionCall(jsval
& fval
, JSFunction
* fun
, uintN argc
, bool constructing
)
12108 * The function's identity (JSFunction and therefore JSScript) is guarded,
12109 * so we can optimize for the empty script singleton right away. No need to
12110 * worry about crossing globals or relocating argv, even, in this case!
12112 * Note that the interpreter shortcuts empty-script call and construct too,
12113 * and does not call any TR::record_*CallComplete hook.
12115 if (fun
->u
.i
.script
->isEmpty()) {
12116 LIns
* rval_ins
= constructing
? stack(-1 - argc
) : INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
));
12117 stack(-2 - argc
, rval_ins
);
12118 return RECORD_CONTINUE
;
12121 if (JS_GetGlobalForObject(cx
, JSVAL_TO_OBJECT(fval
)) != globalObj
)
12122 RETURN_STOP("JSOP_CALL or JSOP_NEW crosses global scopes");
12124 JSStackFrame
* fp
= cx
->fp
;
12126 // TODO: track the copying via the tracker...
12127 if (argc
< fun
->nargs
&&
12128 jsuword(fp
->regs
->sp
+ (fun
->nargs
- argc
)) > cx
->stackPool
.current
->limit
) {
12129 RETURN_STOP("can't trace calls with too few args requiring argv move");
12132 // Generate a type map for the outgoing frame and stash it in the LIR
12133 unsigned stackSlots
= NativeStackSlots(cx
, 0 /* callDepth */);
12134 FrameInfo
* fi
= (FrameInfo
*)
12135 tempAlloc().alloc(sizeof(FrameInfo
) + stackSlots
* sizeof(JSTraceType
));
12136 JSTraceType
* typemap
= (JSTraceType
*)(fi
+ 1);
12138 DetermineTypesVisitor
detVisitor(*this, typemap
);
12139 VisitStackSlots(detVisitor
, cx
, 0);
12141 JS_ASSERT(argc
< FrameInfo::CONSTRUCTING_FLAG
);
12143 treeInfo
->gcthings
.addUnique(fval
);
12144 fi
->block
= fp
->blockChain
;
12145 if (fp
->blockChain
)
12146 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(fp
->blockChain
));
12147 fi
->pc
= fp
->regs
->pc
;
12148 fi
->imacpc
= fp
->imacpc
;
12149 fi
->spdist
= fp
->regs
->sp
- fp
->slots
;
12150 fi
->set_argc(argc
, constructing
);
12151 fi
->callerHeight
= stackSlots
- (2 + argc
);
12152 fi
->callerArgc
= fp
->argc
;
12154 if (callDepth
>= treeInfo
->maxCallDepth
)
12155 treeInfo
->maxCallDepth
= callDepth
+ 1;
12157 fi
= traceMonitor
->frameCache
->memoize(fi
);
12159 RETURN_STOP("out of memory");
12160 lir
->insStorei(INS_CONSTPTR(fi
), lirbuf
->rp
, callDepth
* sizeof(FrameInfo
*));
12162 #if defined JS_JIT_SPEW
12163 debug_only_printf(LC_TMTracer
, "iFC frameinfo=%p, stack=%d, map=", (void*)fi
,
12165 for (unsigned i
= 0; i
< fi
->callerHeight
; i
++)
12166 debug_only_printf(LC_TMTracer
, "%c", typeChar
[fi
->get_typemap()[i
]]);
12167 debug_only_print0(LC_TMTracer
, "\n");
12170 atoms
= fun
->u
.i
.script
->atomMap
.vector
;
12171 return RECORD_CONTINUE
;
12174 JS_REQUIRES_STACK AbortableRecordingStatus
12175 TraceRecorder::record_JSOP_CALL()
12177 uintN argc
= GET_ARGC(cx
->fp
->regs
->pc
);
12178 cx
->fp
->assertValidStackDepth(argc
+ 2);
12179 return InjectStatus(functionCall(argc
,
12180 (cx
->fp
->imacpc
&& *cx
->fp
->imacpc
== JSOP_APPLY
)
12185 static jsbytecode
* apply_imacro_table
[] = {
12186 apply_imacros
.apply0
,
12187 apply_imacros
.apply1
,
12188 apply_imacros
.apply2
,
12189 apply_imacros
.apply3
,
12190 apply_imacros
.apply4
,
12191 apply_imacros
.apply5
,
12192 apply_imacros
.apply6
,
12193 apply_imacros
.apply7
,
12194 apply_imacros
.apply8
12197 static jsbytecode
* call_imacro_table
[] = {
12198 apply_imacros
.call0
,
12199 apply_imacros
.call1
,
12200 apply_imacros
.call2
,
12201 apply_imacros
.call3
,
12202 apply_imacros
.call4
,
12203 apply_imacros
.call5
,
12204 apply_imacros
.call6
,
12205 apply_imacros
.call7
,
12206 apply_imacros
.call8
12209 JS_REQUIRES_STACK AbortableRecordingStatus
12210 TraceRecorder::record_JSOP_APPLY()
12212 JSStackFrame
* fp
= cx
->fp
;
12213 jsbytecode
*pc
= fp
->regs
->pc
;
12214 uintN argc
= GET_ARGC(pc
);
12215 cx
->fp
->assertValidStackDepth(argc
+ 2);
12217 jsval
* vp
= fp
->regs
->sp
- (argc
+ 2);
12219 JSObject
* aobj
= NULL
;
12220 LIns
* aobj_ins
= NULL
;
12222 JS_ASSERT(!fp
->imacpc
);
12224 if (!VALUE_IS_FUNCTION(cx
, vp
[0]))
12225 return record_JSOP_CALL();
12226 RETURN_IF_XML_A(vp
[0]);
12228 JSObject
* obj
= JSVAL_TO_OBJECT(vp
[0]);
12229 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, obj
);
12230 if (FUN_INTERPRETED(fun
))
12231 return record_JSOP_CALL();
12233 bool apply
= (JSFastNative
)fun
->u
.n
.native
== js_fun_apply
;
12234 if (!apply
&& (JSFastNative
)fun
->u
.n
.native
!= js_fun_call
)
12235 return record_JSOP_CALL();
12238 * We don't trace apply and call with a primitive 'this', which is the
12239 * first positional parameter.
12241 if (argc
> 0 && !JSVAL_IS_OBJECT(vp
[2]))
12242 return record_JSOP_CALL();
12245 * Guard on the identity of this, which is the function we are applying.
12247 if (!VALUE_IS_FUNCTION(cx
, vp
[1]))
12248 RETURN_STOP_A("callee is not a function");
12249 CHECK_STATUS_A(guardCallee(vp
[1]));
12251 if (apply
&& argc
>= 2) {
12253 RETURN_STOP_A("apply with excess arguments");
12254 if (JSVAL_IS_PRIMITIVE(vp
[3]))
12255 RETURN_STOP_A("arguments parameter of apply is primitive");
12256 aobj
= JSVAL_TO_OBJECT(vp
[3]);
12257 aobj_ins
= get(&vp
[3]);
12260 * We trace dense arrays and arguments objects. The code we generate
12261 * for apply uses imacros to handle a specific number of arguments.
12263 if (OBJ_IS_DENSE_ARRAY(cx
, aobj
)) {
12264 guardDenseArray(aobj
, aobj_ins
);
12265 length
= jsuint(aobj
->fslots
[JSSLOT_ARRAY_LENGTH
]);
12268 p2i(stobj_get_fslot(aobj_ins
, JSSLOT_ARRAY_LENGTH
)),
12271 } else if (OBJ_GET_CLASS(cx
, aobj
) == &js_ArgumentsClass
) {
12273 JSStackFrame
*afp
= guardArguments(aobj
, aobj_ins
, &depth
);
12275 RETURN_STOP_A("can't reach arguments object's frame");
12276 length
= afp
->argc
;
12278 RETURN_STOP_A("arguments parameter of apply is not a dense array or argments object");
12281 if (length
>= JS_ARRAY_LENGTH(apply_imacro_table
))
12282 RETURN_STOP_A("too many arguments to apply");
12284 return InjectStatus(call_imacro(apply_imacro_table
[length
]));
12287 if (argc
>= JS_ARRAY_LENGTH(call_imacro_table
))
12288 RETURN_STOP_A("too many arguments to call");
12290 return InjectStatus(call_imacro(call_imacro_table
[argc
]));
12293 static JSBool FASTCALL
12294 CatchStopIteration_tn(JSContext
* cx
, JSBool ok
, jsval
* vp
)
12296 if (!ok
&& cx
->throwing
&& js_ValueIsStopIteration(cx
->exception
)) {
12297 cx
->throwing
= JS_FALSE
;
12298 cx
->exception
= JSVAL_VOID
;
12305 JS_DEFINE_TRCINFO_1(CatchStopIteration_tn
,
12306 (3, (static, BOOL
, CatchStopIteration_tn
, CONTEXT
, BOOL
, JSVALPTR
, 0, 0)))
12308 JS_REQUIRES_STACK AbortableRecordingStatus
12309 TraceRecorder::record_NativeCallComplete()
12311 if (pendingSpecializedNative
== IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
)
12312 return ARECORD_CONTINUE
;
12314 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
12316 JS_ASSERT(pendingSpecializedNative
);
12317 JS_ASSERT(*pc
== JSOP_CALL
|| *pc
== JSOP_APPLY
|| *pc
== JSOP_NEW
|| *pc
== JSOP_SETPROP
);
12319 jsval
& v
= stackval(-1);
12320 LIns
* v_ins
= get(&v
);
12323 * At this point the generated code has already called the native function
12324 * and we can no longer fail back to the original pc location (JSOP_CALL)
12325 * because that would cause the interpreter to re-execute the native
12326 * function, which might have side effects.
12328 * Instead, the snapshot() call below sees that we are currently parked on
12329 * a traceable native's JSOP_CALL instruction, and it will advance the pc
12330 * to restore by the length of the current opcode. If the native's return
12331 * type is jsval, snapshot() will also indicate in the type map that the
12332 * element on top of the stack is a boxed value which doesn't need to be
12333 * boxed if the type guard generated by unbox_jsval() fails.
12336 if (JSTN_ERRTYPE(pendingSpecializedNative
) == FAIL_STATUS
) {
12337 /* Keep cx->bailExit null when it's invalid. */
12338 lir
->insStorei(INS_NULL(), cx_ins
, (int) offsetof(JSContext
, bailExit
));
12340 LIns
* status
= lir
->insLoad(LIR_ld
, lirbuf
->state
, (int) offsetof(InterpState
, builtinStatus
));
12341 if (pendingSpecializedNative
== &generatedSpecializedNative
) {
12342 LIns
* ok_ins
= v_ins
;
12345 * Custom implementations of Iterator.next() throw a StopIteration exception.
12346 * Catch and clear it and set the return value to JSVAL_HOLE in this case.
12348 if (uintptr_t(pc
- nextiter_imacros
.custom_iter_next
) <
12349 sizeof(nextiter_imacros
.custom_iter_next
)) {
12350 LIns
* args
[] = { native_rval_ins
, ok_ins
, cx_ins
}; /* reverse order */
12351 ok_ins
= lir
->insCall(&CatchStopIteration_tn_ci
, args
);
12355 * If we run a generic traceable native, the return value is in the argument
12356 * vector for native function calls. The actual return value of the native is a JSBool
12357 * indicating the error status.
12359 v_ins
= lir
->insLoad(LIR_ldp
, native_rval_ins
, 0);
12360 if (*pc
== JSOP_NEW
) {
12361 LIns
* x
= lir
->ins_peq0(lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_TAGMASK
)));
12362 x
= lir
->ins_choose(x
, v_ins
, INS_CONSTWORD(0), avmplus::AvmCore::use_cmov());
12363 v_ins
= lir
->ins_choose(lir
->ins_peq0(x
), newobj_ins
, x
, avmplus::AvmCore::use_cmov());
12367 propagateFailureToBuiltinStatus(ok_ins
, status
);
12369 guard(true, lir
->ins_eq0(status
), STATUS_EXIT
);
12372 if (pendingSpecializedNative
->flags
& JSTN_UNBOX_AFTER
) {
12374 * If we side exit on the unboxing code due to a type change, make sure that the boxed
12375 * value is actually currently associated with that location, and that we are talking
12376 * about the top of the stack here, which is where we expected boxed values.
12378 JS_ASSERT(&v
== &cx
->fp
->regs
->sp
[-1] && get(&v
) == v_ins
);
12379 set(&v
, unbox_jsval(v
, v_ins
, snapshot(BRANCH_EXIT
)));
12380 } else if (JSTN_ERRTYPE(pendingSpecializedNative
) == FAIL_NEG
) {
12381 /* Already added i2f in functionCall. */
12382 JS_ASSERT(JSVAL_IS_NUMBER(v
));
12384 /* Convert the result to double if the builtin returns int32. */
12385 if (JSVAL_IS_NUMBER(v
) &&
12386 (pendingSpecializedNative
->builtin
->_argtypes
& ARGSIZE_MASK_ANY
) == ARGSIZE_I
) {
12387 set(&v
, lir
->ins1(LIR_i2f
, v_ins
));
12391 // We'll null pendingSpecializedNative in monitorRecording, on the next op
12392 // cycle. There must be a next op since the stack is non-empty.
12393 return ARECORD_CONTINUE
;
12396 JS_REQUIRES_STACK AbortableRecordingStatus
12397 TraceRecorder::name(jsval
*& vp
, LIns
*& ins
, NameResult
& nr
)
12399 JSObject
* obj
= cx
->fp
->scopeChain
;
12400 if (obj
!= globalObj
)
12401 return scopeChainProp(obj
, vp
, ins
, nr
);
12403 /* Can't use prop here, because we don't want unboxing from global slots. */
12404 LIns
* obj_ins
= scopeChain();
12411 * Property cache ensures that we are dealing with an existing property,
12412 * and guards the shape for us.
12414 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
12416 /* Abort if property doesn't exist (interpreter will report an error.) */
12417 if (PCVAL_IS_NULL(pcval
))
12418 RETURN_STOP_A("named property not found");
12420 /* Insist on obj being the directly addressed object. */
12422 RETURN_STOP_A("name() hit prototype chain");
12424 /* Don't trace getter or setter calls, our caller wants a direct slot. */
12425 if (PCVAL_IS_SPROP(pcval
)) {
12426 JSScopeProperty
* sprop
= PCVAL_TO_SPROP(pcval
);
12427 if (!isValidSlot(OBJ_SCOPE(obj
), sprop
))
12428 RETURN_STOP_A("name() not accessing a valid slot");
12429 slot
= sprop
->slot
;
12431 if (!PCVAL_IS_SLOT(pcval
))
12432 RETURN_STOP_A("PCE is not a slot");
12433 slot
= PCVAL_TO_SLOT(pcval
);
12436 if (!lazilyImportGlobalSlot(slot
))
12437 RETURN_STOP_A("lazy import of global slot failed");
12439 vp
= &STOBJ_GET_SLOT(obj
, slot
);
12442 return ARECORD_CONTINUE
;
12445 static JSObject
* FASTCALL
12446 MethodReadBarrier(JSContext
* cx
, JSObject
* obj
, JSScopeProperty
* sprop
, JSObject
* funobj
)
12448 JSAutoTempValueRooter
tvr(cx
, funobj
);
12450 if (!OBJ_SCOPE(obj
)->methodReadBarrier(cx
, sprop
, tvr
.addr()))
12452 JS_ASSERT(VALUE_IS_FUNCTION(cx
, tvr
.value()));
12453 return JSVAL_TO_OBJECT(tvr
.value());
12455 JS_DEFINE_CALLINFO_4(static, OBJECT_FAIL
, MethodReadBarrier
, CONTEXT
, OBJECT
, SCOPEPROP
, OBJECT
,
12459 * Get a property. The current opcode has JOF_ATOM.
12461 * There are two modes. The caller must pass nonnull pointers for either outp
12462 * or both slotp and v_insp. In the latter case, we require a plain old
12463 * property with a slot; if the property turns out to be anything else, abort
12464 * tracing (rather than emit a call to a native getter or GetAnyProperty).
12466 JS_REQUIRES_STACK AbortableRecordingStatus
12467 TraceRecorder::prop(JSObject
* obj
, LIns
* obj_ins
, uint32
*slotp
, LIns
** v_insp
, jsval
*outp
)
12469 JS_ASSERT((slotp
&& v_insp
&& !outp
) || (!slotp
&& !v_insp
&& outp
));
12472 * Can't specialize to assert obj != global, must guard to avoid aliasing
12473 * stale homes of stacked global variables.
12475 CHECK_STATUS_A(guardNotGlobalObject(obj
, obj_ins
));
12478 * Property cache ensures that we are dealing with an existing property,
12479 * and guards the shape for us.
12483 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
12485 /* Check for non-existent property reference, which results in undefined. */
12486 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->fp
->regs
->pc
];
12487 if (PCVAL_IS_NULL(pcval
)) {
12489 RETURN_STOP_A("property not found");
12492 * We could specialize to guard on just JSClass.getProperty, but a mere
12493 * class guard is simpler and slightly faster.
12495 if (OBJ_GET_CLASS(cx
, obj
)->getProperty
!= JS_PropertyStub
) {
12496 RETURN_STOP_A("can't trace through access to undefined property if "
12497 "JSClass.getProperty hook isn't stubbed");
12499 guardClass(obj
, obj_ins
, OBJ_GET_CLASS(cx
, obj
), snapshot(MISMATCH_EXIT
));
12502 * This trace will be valid as long as neither the object nor any object
12503 * on its prototype chain changes shape.
12505 * FIXME: This loop can become a single shape guard once bug 497789 has
12508 VMSideExit
* exit
= snapshot(BRANCH_EXIT
);
12510 LIns
* map_ins
= map(obj_ins
);
12512 if (map_is_native(obj
->map
, map_ins
, ops_ins
)) {
12513 CHECK_STATUS_A(InjectStatus(guardShape(obj_ins
, obj
, OBJ_SHAPE(obj
), "guard(shape)",
12515 } else if (!guardDenseArray(obj
, obj_ins
, exit
)) {
12516 RETURN_STOP_A("non-native object involved in undefined property access");
12518 } while (guardHasPrototype(obj
, obj_ins
, &obj
, &obj_ins
, exit
));
12520 set(outp
, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
)), true);
12521 return ARECORD_CONTINUE
;
12524 uint32 setflags
= (cs
.format
& (JOF_INCDEC
| JOF_FOR
));
12525 JS_ASSERT(!(cs
.format
& JOF_SET
));
12527 JSScopeProperty
* sprop
;
12531 if (PCVAL_IS_SPROP(pcval
)) {
12532 sprop
= PCVAL_TO_SPROP(pcval
);
12533 JS_ASSERT(OBJ_SCOPE(obj2
)->hasProperty(sprop
));
12535 if (setflags
&& !SPROP_HAS_STUB_SETTER(sprop
))
12536 RETURN_STOP_A("non-stub setter");
12537 if (setflags
&& (sprop
->attrs
& JSPROP_READONLY
))
12538 RETURN_STOP_A("writing to a readonly property");
12539 if (!SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop
)) {
12541 RETURN_STOP_A("can't trace non-stub getter for this opcode");
12542 if (sprop
->attrs
& JSPROP_GETTER
)
12543 RETURN_STOP_A("script getter");
12544 if (sprop
->slot
== SPROP_INVALID_SLOT
)
12545 return InjectStatus(getPropertyWithNativeGetter(obj_ins
, sprop
, outp
));
12546 return InjectStatus(getPropertyById(obj_ins
, outp
));
12548 if (!SPROP_HAS_VALID_SLOT(sprop
, OBJ_SCOPE(obj2
)))
12549 RETURN_STOP_A("no valid slot");
12550 slot
= sprop
->slot
;
12551 isMethod
= sprop
->isMethod();
12552 JS_ASSERT_IF(isMethod
, OBJ_SCOPE(obj2
)->hasMethodBarrier());
12554 if (!PCVAL_IS_SLOT(pcval
))
12555 RETURN_STOP_A("PCE is not a slot");
12556 slot
= PCVAL_TO_SLOT(pcval
);
12561 /* We have a slot. Check whether it is direct or in a prototype. */
12564 RETURN_STOP_A("JOF_INCDEC|JOF_FOR opcode hit prototype chain");
12567 * We're getting a proto-property. Walk up the prototype chain emitting
12568 * proto slot loads, updating obj as we go, leaving obj set to obj2 with
12569 * obj_ins the last proto-load.
12572 obj_ins
= stobj_get_proto(obj_ins
);
12573 obj
= STOBJ_GET_PROTO(obj
);
12574 } while (obj
!= obj2
);
12577 LIns
* dslots_ins
= NULL
;
12578 LIns
* v_ins
= unbox_jsval(STOBJ_GET_SLOT(obj
, slot
),
12579 stobj_get_slot(obj_ins
, slot
, dslots_ins
),
12580 snapshot(BRANCH_EXIT
));
12583 * Joined function object stored as a method must be cloned when extracted
12584 * as a property value other than a callee. Note that shapes cover method
12585 * value as well as other property attributes and order, so this condition
12586 * is trace-invariant.
12588 * We do not impose the method read barrier if in an imacro, assuming any
12589 * property gets it does (e.g., for 'toString' from JSOP_NEW) will not be
12590 * leaked to the calling script.
12592 if (isMethod
&& !cx
->fp
->imacpc
) {
12593 enterDeepBailCall();
12594 LIns
* args
[] = { v_ins
, INS_CONSTSPROP(sprop
), obj_ins
, cx_ins
};
12595 v_ins
= lir
->insCall(&MethodReadBarrier_ci
, args
);
12596 leaveDeepBailCall();
12604 set(outp
, v_ins
, true);
12605 return ARECORD_CONTINUE
;
12608 JS_REQUIRES_STACK RecordingStatus
12609 TraceRecorder::denseArrayElement(jsval
& oval
, jsval
& ival
, jsval
*& vp
, LIns
*& v_ins
,
12612 JS_ASSERT(JSVAL_IS_OBJECT(oval
) && JSVAL_IS_INT(ival
));
12614 JSObject
* obj
= JSVAL_TO_OBJECT(oval
);
12615 LIns
* obj_ins
= get(&oval
);
12616 jsint idx
= JSVAL_TO_INT(ival
);
12617 LIns
* idx_ins
= makeNumberInt32(get(&ival
));
12618 LIns
* pidx_ins
= lir
->ins_u2p(idx_ins
);
12620 VMSideExit
* exit
= snapshot(BRANCH_EXIT
);
12622 /* check that the index is within bounds */
12623 LIns
* dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
12624 jsuint capacity
= js_DenseArrayCapacity(obj
);
12625 bool within
= (jsuint(idx
) < jsuint(obj
->fslots
[JSSLOT_ARRAY_LENGTH
]) && jsuint(idx
) < capacity
);
12627 /* If idx < 0, stay on trace (and read value as undefined, since this is a dense array). */
12629 if (MAX_DSLOTS_LENGTH
> MAX_DSLOTS_LENGTH32
&& !idx_ins
->isconst()) {
12630 /* Only 64-bit machines support large enough arrays for this. */
12631 JS_ASSERT(sizeof(jsval
) == 8);
12632 br1
= lir
->insBranch(LIR_jt
,
12633 lir
->ins2i(LIR_lt
, idx_ins
, 0),
12637 /* If not idx < length, stay on trace (and read value as undefined). */
12638 LIns
* br2
= lir
->insBranch(LIR_jf
,
12639 lir
->ins2(LIR_pult
,
12641 stobj_get_fslot(obj_ins
, JSSLOT_ARRAY_LENGTH
)),
12644 /* If dslots is NULL, stay on trace (and read value as undefined). */
12645 LIns
* br3
= lir
->insBranch(LIR_jt
, lir
->ins_peq0(dslots_ins
), NULL
);
12647 /* If not idx < capacity, stay on trace (and read value as undefined). */
12648 LIns
* br4
= lir
->insBranch(LIR_jf
,
12649 lir
->ins2(LIR_pult
,
12651 lir
->insLoad(LIR_ldp
,
12653 -(int)sizeof(jsval
))),
12655 lir
->insGuard(LIR_x
, NULL
, createGuardRecord(exit
));
12656 LIns
* label
= lir
->ins0(LIR_label
);
12658 br1
->setTarget(label
);
12659 br2
->setTarget(label
);
12660 br3
->setTarget(label
);
12661 br4
->setTarget(label
);
12663 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj
, obj_ins
, MISMATCH_EXIT
));
12665 // Return undefined and indicate that we didn't actually read this (addr_ins).
12666 v_ins
= lir
->insImm(JSVAL_TO_SPECIAL(JSVAL_VOID
));
12668 return RECORD_CONTINUE
;
12671 /* Guard against negative index */
12672 if (MAX_DSLOTS_LENGTH
> MAX_DSLOTS_LENGTH32
&& !idx_ins
->isconst()) {
12673 /* Only 64-bit machines support large enough arrays for this. */
12674 JS_ASSERT(sizeof(jsval
) == 8);
12676 lir
->ins2i(LIR_lt
, idx_ins
, 0),
12680 /* Guard array length */
12682 lir
->ins2(LIR_pult
, pidx_ins
, stobj_get_fslot(obj_ins
, JSSLOT_ARRAY_LENGTH
)),
12685 /* dslots must not be NULL */
12687 lir
->ins_peq0(dslots_ins
),
12690 /* Guard array capacity */
12692 lir
->ins2(LIR_pult
,
12694 lir
->insLoad(LIR_ldp
, dslots_ins
, 0 - (int)sizeof(jsval
))),
12697 /* Load the value and guard on its type to unbox it. */
12698 vp
= &obj
->dslots
[jsuint(idx
)];
12699 addr_ins
= lir
->ins2(LIR_piadd
, dslots_ins
,
12700 lir
->ins2i(LIR_pilsh
, pidx_ins
, (sizeof(jsval
) == 4) ? 2 : 3));
12701 v_ins
= unbox_jsval(*vp
, lir
->insLoad(LIR_ldp
, addr_ins
, 0), exit
);
12703 if (JSVAL_IS_SPECIAL(*vp
)) {
12705 * If we read a hole from the array, convert it to undefined and guard
12706 * that there are no indexed properties along the prototype chain.
12708 LIns
* br
= lir
->insBranch(LIR_jf
,
12709 lir
->ins2i(LIR_eq
, v_ins
, JSVAL_TO_SPECIAL(JSVAL_HOLE
)),
12711 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj
, obj_ins
, MISMATCH_EXIT
));
12712 br
->setTarget(lir
->ins0(LIR_label
));
12714 /* Don't let the hole value escape. Turn it into an undefined. */
12715 v_ins
= lir
->ins2i(LIR_and
, v_ins
, ~(JSVAL_HOLE_FLAG
>> JSVAL_TAGBITS
));
12717 return RECORD_CONTINUE
;
12720 JS_REQUIRES_STACK AbortableRecordingStatus
12721 TraceRecorder::getProp(JSObject
* obj
, LIns
* obj_ins
)
12723 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->fp
->regs
->pc
];
12724 JS_ASSERT(cs
.ndefs
== 1);
12725 return prop(obj
, obj_ins
, NULL
, NULL
, &stackval(-cs
.nuses
));
12728 JS_REQUIRES_STACK AbortableRecordingStatus
12729 TraceRecorder::getProp(jsval
& v
)
12731 if (JSVAL_IS_PRIMITIVE(v
))
12732 RETURN_STOP_A("primitive lhs");
12734 return getProp(JSVAL_TO_OBJECT(v
), get(&v
));
12737 JS_REQUIRES_STACK AbortableRecordingStatus
12738 TraceRecorder::record_JSOP_NAME()
12743 CHECK_STATUS_A(name(vp
, v_ins
, nr
));
12745 return ARECORD_CONTINUE
;
12748 JS_REQUIRES_STACK AbortableRecordingStatus
12749 TraceRecorder::record_JSOP_DOUBLE()
12751 jsval v
= jsval(atoms
[GET_INDEX(cx
->fp
->regs
->pc
)]);
12752 stack(0, lir
->insImmf(*JSVAL_TO_DOUBLE(v
)));
12753 return ARECORD_CONTINUE
;
12756 JS_REQUIRES_STACK AbortableRecordingStatus
12757 TraceRecorder::record_JSOP_STRING()
12759 JSAtom
* atom
= atoms
[GET_INDEX(cx
->fp
->regs
->pc
)];
12760 JS_ASSERT(ATOM_IS_STRING(atom
));
12761 stack(0, INS_ATOM(atom
));
12762 return ARECORD_CONTINUE
;
12765 JS_REQUIRES_STACK AbortableRecordingStatus
12766 TraceRecorder::record_JSOP_ZERO()
12768 stack(0, lir
->insImmf(0));
12769 return ARECORD_CONTINUE
;
12772 JS_REQUIRES_STACK AbortableRecordingStatus
12773 TraceRecorder::record_JSOP_ONE()
12775 stack(0, lir
->insImmf(1));
12776 return ARECORD_CONTINUE
;
12779 JS_REQUIRES_STACK AbortableRecordingStatus
12780 TraceRecorder::record_JSOP_NULL()
12782 stack(0, INS_NULL());
12783 return ARECORD_CONTINUE
;
12786 JS_REQUIRES_STACK AbortableRecordingStatus
12787 TraceRecorder::record_JSOP_THIS()
12790 CHECK_STATUS_A(getThis(this_ins
));
12791 stack(0, this_ins
);
12792 return ARECORD_CONTINUE
;
12795 JS_REQUIRES_STACK AbortableRecordingStatus
12796 TraceRecorder::record_JSOP_FALSE()
12798 stack(0, lir
->insImm(0));
12799 return ARECORD_CONTINUE
;
12802 JS_REQUIRES_STACK AbortableRecordingStatus
12803 TraceRecorder::record_JSOP_TRUE()
12805 stack(0, lir
->insImm(1));
12806 return ARECORD_CONTINUE
;
12809 JS_REQUIRES_STACK AbortableRecordingStatus
12810 TraceRecorder::record_JSOP_OR()
12815 JS_REQUIRES_STACK AbortableRecordingStatus
12816 TraceRecorder::record_JSOP_AND()
12821 JS_REQUIRES_STACK AbortableRecordingStatus
12822 TraceRecorder::record_JSOP_TABLESWITCH()
12824 #ifdef NANOJIT_IA32
12825 /* Handle tableswitches specially -- prepare a jump table if needed. */
12826 return tableswitch();
12828 return InjectStatus(switchop());
12832 JS_REQUIRES_STACK AbortableRecordingStatus
12833 TraceRecorder::record_JSOP_LOOKUPSWITCH()
12835 return InjectStatus(switchop());
12838 JS_REQUIRES_STACK AbortableRecordingStatus
12839 TraceRecorder::record_JSOP_STRICTEQ()
12841 strictEquality(true, false);
12842 return ARECORD_CONTINUE
;
12845 JS_REQUIRES_STACK AbortableRecordingStatus
12846 TraceRecorder::record_JSOP_STRICTNE()
12848 strictEquality(false, false);
12849 return ARECORD_CONTINUE
;
12852 JS_REQUIRES_STACK AbortableRecordingStatus
12853 TraceRecorder::record_JSOP_OBJECT()
12855 JSStackFrame
* fp
= cx
->fp
;
12856 JSScript
* script
= fp
->script
;
12857 unsigned index
= atoms
- script
->atomMap
.vector
+ GET_INDEX(fp
->regs
->pc
);
12860 obj
= script
->getObject(index
);
12861 stack(0, INS_CONSTOBJ(obj
));
12862 return ARECORD_CONTINUE
;
12865 JS_REQUIRES_STACK AbortableRecordingStatus
12866 TraceRecorder::record_JSOP_POP()
12868 return ARECORD_CONTINUE
;
12871 JS_REQUIRES_STACK AbortableRecordingStatus
12872 TraceRecorder::record_JSOP_TRAP()
12874 return ARECORD_STOP
;
12877 JS_REQUIRES_STACK AbortableRecordingStatus
12878 TraceRecorder::record_JSOP_GETARG()
12880 stack(0, arg(GET_ARGNO(cx
->fp
->regs
->pc
)));
12881 return ARECORD_CONTINUE
;
12884 JS_REQUIRES_STACK AbortableRecordingStatus
12885 TraceRecorder::record_JSOP_SETARG()
12887 arg(GET_ARGNO(cx
->fp
->regs
->pc
), stack(-1));
12888 return ARECORD_CONTINUE
;
12891 JS_REQUIRES_STACK AbortableRecordingStatus
12892 TraceRecorder::record_JSOP_GETLOCAL()
12894 stack(0, var(GET_SLOTNO(cx
->fp
->regs
->pc
)));
12895 return ARECORD_CONTINUE
;
12898 JS_REQUIRES_STACK AbortableRecordingStatus
12899 TraceRecorder::record_JSOP_SETLOCAL()
12901 var(GET_SLOTNO(cx
->fp
->regs
->pc
), stack(-1));
12902 return ARECORD_CONTINUE
;
12905 JS_REQUIRES_STACK AbortableRecordingStatus
12906 TraceRecorder::record_JSOP_UINT16()
12908 stack(0, lir
->insImmf(GET_UINT16(cx
->fp
->regs
->pc
)));
12909 return ARECORD_CONTINUE
;
12912 JS_REQUIRES_STACK AbortableRecordingStatus
12913 TraceRecorder::record_JSOP_NEWINIT()
12915 JSProtoKey key
= JSProtoKey(GET_INT8(cx
->fp
->regs
->pc
));
12917 CHECK_STATUS_A(getClassPrototype(key
, proto_ins
));
12919 LIns
* args
[] = { proto_ins
, cx_ins
};
12920 const CallInfo
*ci
= (key
== JSProto_Array
) ? &js_NewEmptyArray_ci
: &js_Object_tn_ci
;
12921 LIns
* v_ins
= lir
->insCall(ci
, args
);
12922 guard(false, lir
->ins_peq0(v_ins
), OOM_EXIT
);
12924 return ARECORD_CONTINUE
;
12927 JS_REQUIRES_STACK AbortableRecordingStatus
12928 TraceRecorder::record_JSOP_ENDINIT()
12931 jsval
& v
= stackval(-1);
12932 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v
));
12934 return ARECORD_CONTINUE
;
12937 JS_REQUIRES_STACK AbortableRecordingStatus
12938 TraceRecorder::record_JSOP_INITPROP()
12940 // All the action is in record_SetPropHit.
12941 return ARECORD_CONTINUE
;
12944 JS_REQUIRES_STACK AbortableRecordingStatus
12945 TraceRecorder::record_JSOP_INITELEM()
12947 return setElem(-3, -2, -1);
12950 JS_REQUIRES_STACK AbortableRecordingStatus
12951 TraceRecorder::record_JSOP_DEFSHARP()
12953 return ARECORD_STOP
;
12956 JS_REQUIRES_STACK AbortableRecordingStatus
12957 TraceRecorder::record_JSOP_USESHARP()
12959 return ARECORD_STOP
;
12962 JS_REQUIRES_STACK AbortableRecordingStatus
12963 TraceRecorder::record_JSOP_INCARG()
12965 return InjectStatus(inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), 1));
12968 JS_REQUIRES_STACK AbortableRecordingStatus
12969 TraceRecorder::record_JSOP_INCLOCAL()
12971 return InjectStatus(inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), 1));
12974 JS_REQUIRES_STACK AbortableRecordingStatus
12975 TraceRecorder::record_JSOP_DECARG()
12977 return InjectStatus(inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), -1));
12980 JS_REQUIRES_STACK AbortableRecordingStatus
12981 TraceRecorder::record_JSOP_DECLOCAL()
12983 return InjectStatus(inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), -1));
12986 JS_REQUIRES_STACK AbortableRecordingStatus
12987 TraceRecorder::record_JSOP_ARGINC()
12989 return InjectStatus(inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), 1, false));
12992 JS_REQUIRES_STACK AbortableRecordingStatus
12993 TraceRecorder::record_JSOP_LOCALINC()
12995 return InjectStatus(inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), 1, false));
12998 JS_REQUIRES_STACK AbortableRecordingStatus
12999 TraceRecorder::record_JSOP_ARGDEC()
13001 return InjectStatus(inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), -1, false));
13004 JS_REQUIRES_STACK AbortableRecordingStatus
13005 TraceRecorder::record_JSOP_LOCALDEC()
13007 return InjectStatus(inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), -1, false));
13010 JS_REQUIRES_STACK AbortableRecordingStatus
13011 TraceRecorder::record_JSOP_IMACOP()
13013 JS_ASSERT(cx
->fp
->imacpc
);
13014 return ARECORD_CONTINUE
;
13017 JS_REQUIRES_STACK AbortableRecordingStatus
13018 TraceRecorder::record_JSOP_ITER()
13020 jsval
& v
= stackval(-1);
13021 if (JSVAL_IS_PRIMITIVE(v
))
13022 RETURN_STOP_A("for-in on a primitive value");
13023 RETURN_IF_XML_A(v
);
13025 jsuint flags
= cx
->fp
->regs
->pc
[1];
13027 if (hasIteratorMethod(JSVAL_TO_OBJECT(v
))) {
13028 if (flags
== JSITER_ENUMERATE
)
13029 return InjectStatus(call_imacro(iter_imacros
.for_in
));
13030 if (flags
== (JSITER_ENUMERATE
| JSITER_FOREACH
))
13031 return InjectStatus(call_imacro(iter_imacros
.for_each
));
13033 if (flags
== JSITER_ENUMERATE
)
13034 return InjectStatus(call_imacro(iter_imacros
.for_in_native
));
13035 if (flags
== (JSITER_ENUMERATE
| JSITER_FOREACH
))
13036 return InjectStatus(call_imacro(iter_imacros
.for_each_native
));
13038 RETURN_STOP_A("unimplemented JSITER_* flags");
13041 JS_REQUIRES_STACK AbortableRecordingStatus
13042 TraceRecorder::record_JSOP_NEXTITER()
13044 jsval
& iterobj_val
= stackval(-2);
13045 if (JSVAL_IS_PRIMITIVE(iterobj_val
))
13046 RETURN_STOP_A("for-in on a primitive value");
13047 RETURN_IF_XML_A(iterobj_val
);
13048 JSObject
* iterobj
= JSVAL_TO_OBJECT(iterobj_val
);
13049 JSClass
* clasp
= STOBJ_GET_CLASS(iterobj
);
13050 LIns
* iterobj_ins
= get(&iterobj_val
);
13051 guardClass(iterobj
, iterobj_ins
, clasp
, snapshot(BRANCH_EXIT
));
13052 if (clasp
== &js_IteratorClass
|| clasp
== &js_GeneratorClass
)
13053 return InjectStatus(call_imacro(nextiter_imacros
.native_iter_next
));
13054 return InjectStatus(call_imacro(nextiter_imacros
.custom_iter_next
));
13057 JS_REQUIRES_STACK AbortableRecordingStatus
13058 TraceRecorder::record_JSOP_ENDITER()
13060 LIns
* args
[] = { stack(-2), cx_ins
};
13061 LIns
* ok_ins
= lir
->insCall(&js_CloseIterator_ci
, args
);
13062 guard(false, lir
->ins_eq0(ok_ins
), MISMATCH_EXIT
);
13063 return ARECORD_CONTINUE
;
13066 JS_REQUIRES_STACK AbortableRecordingStatus
13067 TraceRecorder::record_JSOP_FORNAME()
13072 CHECK_STATUS_A(name(vp
, x_ins
, nr
));
13074 RETURN_STOP_A("forname on non-tracked value not supported");
13075 set(vp
, stack(-1));
13076 return ARECORD_CONTINUE
;
13079 JS_REQUIRES_STACK AbortableRecordingStatus
13080 TraceRecorder::record_JSOP_FORPROP()
13082 return ARECORD_STOP
;
13085 JS_REQUIRES_STACK AbortableRecordingStatus
13086 TraceRecorder::record_JSOP_FORELEM()
13088 return record_JSOP_DUP();
13091 JS_REQUIRES_STACK AbortableRecordingStatus
13092 TraceRecorder::record_JSOP_FORARG()
13094 return record_JSOP_SETARG();
13097 JS_REQUIRES_STACK AbortableRecordingStatus
13098 TraceRecorder::record_JSOP_FORLOCAL()
13100 return record_JSOP_SETLOCAL();
13103 JS_REQUIRES_STACK AbortableRecordingStatus
13104 TraceRecorder::record_JSOP_POPN()
13106 return ARECORD_CONTINUE
;
13110 * Generate LIR to reach |obj2| from |obj| by traversing the scope chain. The generated code
13111 * also ensures that any call objects found have not changed shape.
13113 * obj starting object
13114 * obj_ins LIR instruction representing obj
13115 * obj2 end object for traversal
13116 * obj2_ins [out] LIR instruction representing obj2
13118 JS_REQUIRES_STACK RecordingStatus
13119 TraceRecorder::traverseScopeChain(JSObject
*obj
, LIns
*obj_ins
, JSObject
*obj2
, LIns
*&obj2_ins
)
13121 VMSideExit
* exit
= NULL
;
13123 if (obj
!= globalObj
) {
13124 if (!js_IsCacheableNonGlobalScope(obj
))
13125 RETURN_STOP("scope chain lookup crosses non-cacheable object");
13127 // We must guard on the shape of all call objects for heavyweight functions
13128 // that we traverse on the scope chain: if the shape changes, a variable with
13129 // the same name may have been inserted in the scope chain.
13130 if (STOBJ_GET_CLASS(obj
) == &js_CallClass
&&
13131 JSFUN_HEAVYWEIGHT_TEST(js_GetCallObjectFunction(obj
)->flags
)) {
13132 LIns
* map_ins
= map(obj_ins
);
13133 LIns
* shape_ins
= addName(lir
->insLoad(LIR_ld
, map_ins
, offsetof(JSScope
, shape
)),
13136 exit
= snapshot(BRANCH_EXIT
);
13138 addName(lir
->ins2i(LIR_eq
, shape_ins
, OBJ_SHAPE(obj
)), "guard_shape"),
13146 obj
= STOBJ_GET_PARENT(obj
);
13148 RETURN_STOP("target object not reached on scope chain");
13149 obj_ins
= stobj_get_parent(obj_ins
);
13152 obj2_ins
= obj_ins
;
13153 return RECORD_CONTINUE
;
13156 JS_REQUIRES_STACK AbortableRecordingStatus
13157 TraceRecorder::record_JSOP_BINDNAME()
13159 JSStackFrame
*fp
= cx
->fp
;
13163 obj
= fp
->scopeChain
;
13165 // In global code, fp->scopeChain can only contain blocks whose values
13166 // are still on the stack. We never use BINDNAME to refer to these.
13167 while (OBJ_GET_CLASS(cx
, obj
) == &js_BlockClass
) {
13168 // The block's values are still on the stack.
13169 JS_ASSERT(obj
->getPrivate() == fp
);
13170 obj
= OBJ_GET_PARENT(cx
, obj
);
13171 // Blocks always have parents.
13175 if (obj
!= globalObj
)
13176 RETURN_STOP_A("BINDNAME in global code resolved to non-global object");
13179 * The trace is specialized to this global object. Furthermore, we know it
13180 * is the sole 'global' object on the scope chain: we set globalObj to the
13181 * scope chain element with no parent, and we reached it starting from the
13182 * function closure or the current scopeChain, so there is nothing inner to
13183 * it. Therefore this must be the right base object.
13185 stack(0, INS_CONSTOBJ(obj
));
13186 return ARECORD_CONTINUE
;
13189 // We can't trace BINDNAME in functions that contain direct calls to eval,
13190 // as they might add bindings which previously-traced references would have
13192 if (JSFUN_HEAVYWEIGHT_TEST(fp
->fun
->flags
))
13193 RETURN_STOP_A("BINDNAME in heavyweight function.");
13195 // We don't have the scope chain on trace, so instead we get a start object
13196 // that is on the scope chain and doesn't skip the target object (the one
13197 // that contains the property).
13198 jsval
*callee
= &cx
->fp
->argv
[-2];
13199 obj
= STOBJ_GET_PARENT(JSVAL_TO_OBJECT(*callee
));
13200 if (obj
== globalObj
) {
13201 stack(0, INS_CONSTOBJ(obj
));
13202 return ARECORD_CONTINUE
;
13204 LIns
*obj_ins
= stobj_get_parent(get(callee
));
13206 // Find the target object.
13207 JSAtom
*atom
= atoms
[GET_INDEX(cx
->fp
->regs
->pc
)];
13208 jsid id
= ATOM_TO_JSID(atom
);
13209 JSObject
*obj2
= js_FindIdentifierBase(cx
, fp
->scopeChain
, id
);
13210 if (obj2
!= globalObj
&& STOBJ_GET_CLASS(obj2
) != &js_CallClass
)
13211 RETURN_STOP_A("BINDNAME on non-global, non-call object");
13213 // Generate LIR to get to the target object from the start object.
13215 CHECK_STATUS_A(traverseScopeChain(obj
, obj_ins
, obj2
, obj2_ins
));
13217 // If |obj2| is the global object, we can refer to it directly instead of walking up
13218 // the scope chain. There may still be guards on intervening call objects.
13219 stack(0, obj2
== globalObj
? INS_CONSTOBJ(obj2
) : obj2_ins
);
13220 return ARECORD_CONTINUE
;
13223 JS_REQUIRES_STACK AbortableRecordingStatus
13224 TraceRecorder::record_JSOP_SETNAME()
13226 jsval
& l
= stackval(-2);
13227 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l
));
13230 * Trace only cases that are global code, in lightweight functions
13231 * scoped by the global object only, or in call objects.
13233 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
13234 if (OBJ_GET_CLASS(cx
, obj
) == &js_CallClass
)
13235 return ARECORD_CONTINUE
;
13236 if (obj
!= cx
->fp
->scopeChain
|| obj
!= globalObj
)
13237 RETURN_STOP_A("JSOP_SETNAME left operand is not the global object");
13239 // The rest of the work is in record_SetPropHit.
13240 return ARECORD_CONTINUE
;
13243 JS_REQUIRES_STACK AbortableRecordingStatus
13244 TraceRecorder::record_JSOP_THROW()
13246 return ARECORD_STOP
;
13249 JS_REQUIRES_STACK AbortableRecordingStatus
13250 TraceRecorder::record_JSOP_IN()
13252 jsval
& rval
= stackval(-1);
13253 jsval
& lval
= stackval(-2);
13255 if (JSVAL_IS_PRIMITIVE(rval
))
13256 RETURN_STOP_A("JSOP_IN on non-object right operand");
13257 JSObject
* obj
= JSVAL_TO_OBJECT(rval
);
13258 LIns
* obj_ins
= get(&rval
);
13262 if (JSVAL_IS_INT(lval
)) {
13263 id
= INT_JSVAL_TO_JSID(lval
);
13264 LIns
* args
[] = { makeNumberInt32(get(&lval
)), obj_ins
, cx_ins
};
13265 x
= lir
->insCall(&js_HasNamedPropertyInt32_ci
, args
);
13266 } else if (JSVAL_IS_STRING(lval
)) {
13267 if (!js_ValueToStringId(cx
, lval
, &id
))
13268 RETURN_ERROR_A("left operand of JSOP_IN didn't convert to a string-id");
13269 LIns
* args
[] = { get(&lval
), obj_ins
, cx_ins
};
13270 x
= lir
->insCall(&js_HasNamedProperty_ci
, args
);
13272 RETURN_STOP_A("string or integer expected");
13275 guard(false, lir
->ins2i(LIR_eq
, x
, JSVAL_TO_SPECIAL(JSVAL_VOID
)), OOM_EXIT
);
13276 x
= lir
->ins2i(LIR_eq
, x
, 1);
13278 JSTraceMonitor
&localtm
= *traceMonitor
;
13279 JSContext
*localcx
= cx
;
13283 bool ok
= obj
->lookupProperty(cx
, id
, &obj2
, &prop
);
13285 /* lookupProperty can reenter the interpreter and kill |this|. */
13286 if (!localtm
.recorder
) {
13288 obj2
->dropProperty(localcx
, prop
);
13289 return ARECORD_STOP
;
13293 RETURN_ERROR_A("obj->lookupProperty failed in JSOP_IN");
13294 bool cond
= prop
!= NULL
;
13296 obj2
->dropProperty(cx
, prop
);
13299 * The interpreter fuses comparisons and the following branch, so we have
13300 * to do that here as well.
13302 fuseIf(cx
->fp
->regs
->pc
+ 1, cond
, x
);
13305 * We update the stack after the guard. This is safe since the guard bails
13306 * out at the comparison and the interpreter will therefore re-execute the
13307 * comparison. This way the value of the condition doesn't have to be
13308 * calculated and saved on the stack in most cases.
13311 return ARECORD_CONTINUE
;
13314 static JSBool FASTCALL
13315 HasInstance(JSContext
* cx
, JSObject
* ctor
, jsval val
)
13317 JSBool result
= JS_FALSE
;
13318 if (!ctor
->map
->ops
->hasInstance(cx
, ctor
, val
, &result
))
13319 js_SetBuiltinError(cx
);
13322 JS_DEFINE_CALLINFO_3(static, BOOL_FAIL
, HasInstance
, CONTEXT
, OBJECT
, JSVAL
, 0, 0)
13324 JS_REQUIRES_STACK AbortableRecordingStatus
13325 TraceRecorder::record_JSOP_INSTANCEOF()
13327 // If the rhs isn't an object, we are headed for a TypeError.
13328 jsval
& ctor
= stackval(-1);
13329 if (JSVAL_IS_PRIMITIVE(ctor
))
13330 RETURN_STOP_A("non-object on rhs of instanceof");
13332 jsval
& val
= stackval(-2);
13333 LIns
* val_ins
= box_jsval(val
, get(&val
));
13335 enterDeepBailCall();
13336 LIns
* args
[] = {val_ins
, get(&ctor
), cx_ins
};
13337 stack(-2, lir
->insCall(&HasInstance_ci
, args
));
13338 LIns
* status_ins
= lir
->insLoad(LIR_ld
,
13340 offsetof(InterpState
, builtinStatus
));
13341 pendingGuardCondition
= lir
->ins_eq0(status_ins
);
13342 leaveDeepBailCall();
13344 return ARECORD_CONTINUE
;
13347 JS_REQUIRES_STACK AbortableRecordingStatus
13348 TraceRecorder::record_JSOP_DEBUGGER()
13350 return ARECORD_STOP
;
13353 JS_REQUIRES_STACK AbortableRecordingStatus
13354 TraceRecorder::record_JSOP_GOSUB()
13356 return ARECORD_STOP
;
13359 JS_REQUIRES_STACK AbortableRecordingStatus
13360 TraceRecorder::record_JSOP_RETSUB()
13362 return ARECORD_STOP
;
13365 JS_REQUIRES_STACK AbortableRecordingStatus
13366 TraceRecorder::record_JSOP_EXCEPTION()
13368 return ARECORD_STOP
;
13371 JS_REQUIRES_STACK AbortableRecordingStatus
13372 TraceRecorder::record_JSOP_LINENO()
13374 return ARECORD_CONTINUE
;
13377 JS_REQUIRES_STACK AbortableRecordingStatus
13378 TraceRecorder::record_JSOP_CONDSWITCH()
13380 return ARECORD_CONTINUE
;
13383 JS_REQUIRES_STACK AbortableRecordingStatus
13384 TraceRecorder::record_JSOP_CASE()
13386 strictEquality(true, true);
13387 return ARECORD_CONTINUE
;
13390 JS_REQUIRES_STACK AbortableRecordingStatus
13391 TraceRecorder::record_JSOP_DEFAULT()
13393 return ARECORD_CONTINUE
;
13396 JS_REQUIRES_STACK AbortableRecordingStatus
13397 TraceRecorder::record_JSOP_EVAL()
13399 return ARECORD_STOP
;
13402 JS_REQUIRES_STACK AbortableRecordingStatus
13403 TraceRecorder::record_JSOP_ENUMELEM()
13406 * To quote from jsops.cpp's JSOP_ENUMELEM case:
13407 * Funky: the value to set is under the [obj, id] pair.
13409 return setElem(-2, -1, -3);
13412 JS_REQUIRES_STACK AbortableRecordingStatus
13413 TraceRecorder::record_JSOP_GETTER()
13415 return ARECORD_STOP
;
13418 JS_REQUIRES_STACK AbortableRecordingStatus
13419 TraceRecorder::record_JSOP_SETTER()
13421 return ARECORD_STOP
;
13424 JS_REQUIRES_STACK AbortableRecordingStatus
13425 TraceRecorder::record_JSOP_DEFFUN()
13427 return ARECORD_STOP
;
13430 JS_REQUIRES_STACK AbortableRecordingStatus
13431 TraceRecorder::record_JSOP_DEFFUN_FC()
13433 return ARECORD_STOP
;
13436 JS_REQUIRES_STACK AbortableRecordingStatus
13437 TraceRecorder::record_JSOP_DEFCONST()
13439 return ARECORD_STOP
;
13442 JS_REQUIRES_STACK AbortableRecordingStatus
13443 TraceRecorder::record_JSOP_DEFVAR()
13445 return ARECORD_STOP
;
13449 TraceRecorder::getFullIndex(ptrdiff_t pcoff
)
13451 jsatomid index
= GET_INDEX(cx
->fp
->regs
->pc
+ pcoff
);
13452 index
+= atoms
- cx
->fp
->script
->atomMap
.vector
;
13456 JS_REQUIRES_STACK AbortableRecordingStatus
13457 TraceRecorder::record_JSOP_LAMBDA()
13460 fun
= cx
->fp
->script
->getFunction(getFullIndex());
13463 * Emit code to clone a null closure parented by this recorder's global
13464 * object, in order to preserve function object evaluation rules observable
13465 * via identity and mutation. But don't clone if our result is consumed by
13466 * JSOP_SETMETHOD or JSOP_INITMETHOD, since we optimize away the clone for
13467 * these combinations and clone only if the "method value" escapes.
13469 * See jsops.cpp, the JSOP_LAMBDA null closure case. The JSOP_SETMETHOD and
13470 * JSOP_INITMETHOD logic governing the early ARECORD_CONTINUE returns below
13471 * must agree with the corresponding break-from-do-while(0) logic there.
13473 if (FUN_NULL_CLOSURE(fun
) && OBJ_GET_PARENT(cx
, FUN_OBJECT(fun
)) == globalObj
) {
13474 JSOp op2
= JSOp(cx
->fp
->regs
->pc
[JSOP_LAMBDA_LENGTH
]);
13476 if (op2
== JSOP_SETMETHOD
) {
13477 jsval lval
= stackval(-1);
13479 if (!JSVAL_IS_PRIMITIVE(lval
) &&
13480 OBJ_GET_CLASS(cx
, JSVAL_TO_OBJECT(lval
)) == &js_ObjectClass
) {
13481 stack(0, INS_CONSTOBJ(FUN_OBJECT(fun
)));
13482 return ARECORD_CONTINUE
;
13484 } else if (op2
== JSOP_INITMETHOD
) {
13485 stack(0, INS_CONSTOBJ(FUN_OBJECT(fun
)));
13486 return ARECORD_CONTINUE
;
13490 CHECK_STATUS_A(getClassPrototype(JSProto_Function
, proto_ins
));
13492 LIns
* args
[] = { INS_CONSTOBJ(globalObj
), proto_ins
, INS_CONSTFUN(fun
), cx_ins
};
13493 LIns
* x
= lir
->insCall(&js_NewNullClosure_ci
, args
);
13495 return ARECORD_CONTINUE
;
13497 return ARECORD_STOP
;
13500 JS_REQUIRES_STACK AbortableRecordingStatus
13501 TraceRecorder::record_JSOP_LAMBDA_FC()
13504 fun
= cx
->fp
->script
->getFunction(getFullIndex());
13506 if (OBJ_GET_PARENT(cx
, FUN_OBJECT(fun
)) != globalObj
)
13507 return ARECORD_STOP
;
13510 INS_CONSTOBJ(globalObj
),
13514 LIns
* call_ins
= lir
->insCall(&js_AllocFlatClosure_ci
, args
);
13516 addName(lir
->ins2(LIR_peq
, call_ins
, INS_NULL()),
13517 "guard(js_AllocFlatClosure)"),
13520 if (fun
->u
.i
.nupvars
) {
13521 JSUpvarArray
*uva
= fun
->u
.i
.script
->upvars();
13522 for (uint32 i
= 0, n
= uva
->length
; i
< n
; i
++) {
13524 LIns
* upvar_ins
= upvar(fun
->u
.i
.script
, uva
, i
, v
);
13526 return ARECORD_STOP
;
13527 LIns
* dslots_ins
= NULL
;
13528 stobj_set_dslot(call_ins
, i
, dslots_ins
, box_jsval(v
, upvar_ins
));
13532 stack(0, call_ins
);
13533 return ARECORD_CONTINUE
;
13536 JS_REQUIRES_STACK AbortableRecordingStatus
13537 TraceRecorder::record_JSOP_CALLEE()
13539 stack(0, get(&cx
->fp
->argv
[-2]));
13540 return ARECORD_CONTINUE
;
13543 JS_REQUIRES_STACK AbortableRecordingStatus
13544 TraceRecorder::record_JSOP_SETLOCALPOP()
13546 var(GET_SLOTNO(cx
->fp
->regs
->pc
), stack(-1));
13547 return ARECORD_CONTINUE
;
13550 JS_REQUIRES_STACK AbortableRecordingStatus
13551 TraceRecorder::record_JSOP_IFPRIMTOP()
13553 // Traces are type-specialized, including null vs. object, so we need do
13554 // nothing here. The upstream unbox_jsval called after valueOf or toString
13555 // from an imacro (e.g.) will fork the trace for us, allowing us to just
13556 // follow along mindlessly :-).
13557 return ARECORD_CONTINUE
;
13560 JS_REQUIRES_STACK AbortableRecordingStatus
13561 TraceRecorder::record_JSOP_SETCALL()
13563 return ARECORD_STOP
;
13566 JS_REQUIRES_STACK AbortableRecordingStatus
13567 TraceRecorder::record_JSOP_TRY()
13569 return ARECORD_CONTINUE
;
13572 JS_REQUIRES_STACK AbortableRecordingStatus
13573 TraceRecorder::record_JSOP_FINALLY()
13575 return ARECORD_CONTINUE
;
13578 JS_REQUIRES_STACK AbortableRecordingStatus
13579 TraceRecorder::record_JSOP_NOP()
13581 return ARECORD_CONTINUE
;
13584 JS_REQUIRES_STACK AbortableRecordingStatus
13585 TraceRecorder::record_JSOP_ARGSUB()
13587 JSStackFrame
* fp
= cx
->fp
;
13588 if (!(fp
->fun
->flags
& JSFUN_HEAVYWEIGHT
)) {
13589 uintN slot
= GET_ARGNO(fp
->regs
->pc
);
13590 if (slot
< fp
->argc
)
13591 stack(0, get(&cx
->fp
->argv
[slot
]));
13593 stack(0, INS_VOID());
13594 return ARECORD_CONTINUE
;
13596 RETURN_STOP_A("can't trace JSOP_ARGSUB hard case");
13599 JS_REQUIRES_STACK AbortableRecordingStatus
13600 TraceRecorder::record_JSOP_ARGCNT()
13602 if (cx
->fp
->fun
->flags
& JSFUN_HEAVYWEIGHT
)
13603 RETURN_STOP_A("can't trace heavyweight JSOP_ARGCNT");
13605 // argc is fixed on trace, so ideally we would simply generate LIR for
13606 // constant argc. But the user can mutate arguments.length in the
13607 // interpreter, so we have to check for that in the trace entry frame.
13608 // We also have to check that arguments.length has not been mutated
13609 // at record time, because if so we will generate incorrect constant
13610 // LIR, which will assert in alu().
13611 if (cx
->fp
->argsobj
&& js_IsOverriddenArgsLength(JSVAL_TO_OBJECT(cx
->fp
->argsobj
)))
13612 RETURN_STOP_A("can't trace JSOP_ARGCNT if arguments.length has been modified");
13613 LIns
*a_ins
= get(&cx
->fp
->argsobj
);
13614 if (callDepth
== 0) {
13615 LIns
*br
= lir
->insBranch(LIR_jt
, lir
->ins_peq0(a_ins
), NULL
);
13617 // The following implements js_IsOverriddenArgsLength on trace.
13618 // The '2' bit is set if length was overridden.
13619 LIns
*len_ins
= stobj_get_fslot(a_ins
, JSSLOT_ARGS_LENGTH
);
13620 LIns
*ovr_ins
= lir
->ins2(LIR_piand
, len_ins
, INS_CONSTWORD(2));
13622 guard(true, lir
->ins_peq0(ovr_ins
), snapshot(BRANCH_EXIT
));
13623 LIns
*label
= lir
->ins0(LIR_label
);
13624 br
->setTarget(label
);
13626 stack(0, lir
->insImmf(cx
->fp
->argc
));
13627 return ARECORD_CONTINUE
;
13630 JS_REQUIRES_STACK AbortableRecordingStatus
13631 TraceRecorder::record_DefLocalFunSetSlot(uint32 slot
, JSObject
* obj
)
13633 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, obj
);
13635 if (FUN_NULL_CLOSURE(fun
) && OBJ_GET_PARENT(cx
, FUN_OBJECT(fun
)) == globalObj
) {
13637 CHECK_STATUS_A(getClassPrototype(JSProto_Function
, proto_ins
));
13639 LIns
* args
[] = { INS_CONSTOBJ(globalObj
), proto_ins
, INS_CONSTFUN(fun
), cx_ins
};
13640 LIns
* x
= lir
->insCall(&js_NewNullClosure_ci
, args
);
13642 return ARECORD_CONTINUE
;
13645 return ARECORD_STOP
;
13648 JS_REQUIRES_STACK AbortableRecordingStatus
13649 TraceRecorder::record_JSOP_DEFLOCALFUN()
13651 return ARECORD_CONTINUE
;
13654 JS_REQUIRES_STACK AbortableRecordingStatus
13655 TraceRecorder::record_JSOP_DEFLOCALFUN_FC()
13657 return ARECORD_CONTINUE
;
13660 JS_REQUIRES_STACK AbortableRecordingStatus
13661 TraceRecorder::record_JSOP_GOTOX()
13663 return record_JSOP_GOTO();
13666 JS_REQUIRES_STACK AbortableRecordingStatus
13667 TraceRecorder::record_JSOP_IFEQX()
13669 return record_JSOP_IFEQ();
13672 JS_REQUIRES_STACK AbortableRecordingStatus
13673 TraceRecorder::record_JSOP_IFNEX()
13675 return record_JSOP_IFNE();
13678 JS_REQUIRES_STACK AbortableRecordingStatus
13679 TraceRecorder::record_JSOP_ORX()
13681 return record_JSOP_OR();
13684 JS_REQUIRES_STACK AbortableRecordingStatus
13685 TraceRecorder::record_JSOP_ANDX()
13687 return record_JSOP_AND();
13690 JS_REQUIRES_STACK AbortableRecordingStatus
13691 TraceRecorder::record_JSOP_GOSUBX()
13693 return record_JSOP_GOSUB();
13696 JS_REQUIRES_STACK AbortableRecordingStatus
13697 TraceRecorder::record_JSOP_CASEX()
13699 strictEquality(true, true);
13700 return ARECORD_CONTINUE
;
13703 JS_REQUIRES_STACK AbortableRecordingStatus
13704 TraceRecorder::record_JSOP_DEFAULTX()
13706 return ARECORD_CONTINUE
;
13709 JS_REQUIRES_STACK AbortableRecordingStatus
13710 TraceRecorder::record_JSOP_TABLESWITCHX()
13712 return record_JSOP_TABLESWITCH();
13715 JS_REQUIRES_STACK AbortableRecordingStatus
13716 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
13718 return InjectStatus(switchop());
13721 JS_REQUIRES_STACK AbortableRecordingStatus
13722 TraceRecorder::record_JSOP_BACKPATCH()
13724 return ARECORD_CONTINUE
;
13727 JS_REQUIRES_STACK AbortableRecordingStatus
13728 TraceRecorder::record_JSOP_BACKPATCH_POP()
13730 return ARECORD_CONTINUE
;
13733 JS_REQUIRES_STACK AbortableRecordingStatus
13734 TraceRecorder::record_JSOP_THROWING()
13736 return ARECORD_STOP
;
13739 JS_REQUIRES_STACK AbortableRecordingStatus
13740 TraceRecorder::record_JSOP_SETRVAL()
13742 // If we implement this, we need to update JSOP_STOP.
13743 return ARECORD_STOP
;
13746 JS_REQUIRES_STACK AbortableRecordingStatus
13747 TraceRecorder::record_JSOP_RETRVAL()
13749 return ARECORD_STOP
;
13752 JS_REQUIRES_STACK AbortableRecordingStatus
13753 TraceRecorder::record_JSOP_GETGVAR()
13755 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13756 if (JSVAL_IS_NULL(slotval
))
13757 return ARECORD_CONTINUE
; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
13759 uint32 slot
= JSVAL_TO_INT(slotval
);
13761 if (!lazilyImportGlobalSlot(slot
))
13762 RETURN_STOP_A("lazy import of global slot failed");
13764 stack(0, get(&STOBJ_GET_SLOT(globalObj
, slot
)));
13765 return ARECORD_CONTINUE
;
13768 JS_REQUIRES_STACK AbortableRecordingStatus
13769 TraceRecorder::record_JSOP_SETGVAR()
13771 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13772 if (JSVAL_IS_NULL(slotval
))
13773 return ARECORD_CONTINUE
; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
13775 uint32 slot
= JSVAL_TO_INT(slotval
);
13777 if (!lazilyImportGlobalSlot(slot
))
13778 RETURN_STOP_A("lazy import of global slot failed");
13780 set(&STOBJ_GET_SLOT(globalObj
, slot
), stack(-1));
13781 return ARECORD_CONTINUE
;
13784 JS_REQUIRES_STACK AbortableRecordingStatus
13785 TraceRecorder::record_JSOP_INCGVAR()
13787 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13788 if (JSVAL_IS_NULL(slotval
))
13789 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
13790 return ARECORD_CONTINUE
;
13792 uint32 slot
= JSVAL_TO_INT(slotval
);
13794 if (!lazilyImportGlobalSlot(slot
))
13795 RETURN_STOP_A("lazy import of global slot failed");
13797 return InjectStatus(inc(STOBJ_GET_SLOT(globalObj
, slot
), 1));
13800 JS_REQUIRES_STACK AbortableRecordingStatus
13801 TraceRecorder::record_JSOP_DECGVAR()
13803 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13804 if (JSVAL_IS_NULL(slotval
))
13805 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
13806 return ARECORD_CONTINUE
;
13808 uint32 slot
= JSVAL_TO_INT(slotval
);
13810 if (!lazilyImportGlobalSlot(slot
))
13811 RETURN_STOP_A("lazy import of global slot failed");
13813 return InjectStatus(inc(STOBJ_GET_SLOT(globalObj
, slot
), -1));
13816 JS_REQUIRES_STACK AbortableRecordingStatus
13817 TraceRecorder::record_JSOP_GVARINC()
13819 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13820 if (JSVAL_IS_NULL(slotval
))
13821 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
13822 return ARECORD_CONTINUE
;
13824 uint32 slot
= JSVAL_TO_INT(slotval
);
13826 if (!lazilyImportGlobalSlot(slot
))
13827 RETURN_STOP_A("lazy import of global slot failed");
13829 return InjectStatus(inc(STOBJ_GET_SLOT(globalObj
, slot
), 1, false));
13832 JS_REQUIRES_STACK AbortableRecordingStatus
13833 TraceRecorder::record_JSOP_GVARDEC()
13835 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13836 if (JSVAL_IS_NULL(slotval
))
13837 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
13838 return ARECORD_CONTINUE
;
13840 uint32 slot
= JSVAL_TO_INT(slotval
);
13842 if (!lazilyImportGlobalSlot(slot
))
13843 RETURN_STOP_A("lazy import of global slot failed");
13845 return InjectStatus(inc(STOBJ_GET_SLOT(globalObj
, slot
), -1, false));
13848 JS_REQUIRES_STACK AbortableRecordingStatus
13849 TraceRecorder::record_JSOP_REGEXP()
13851 return ARECORD_STOP
;
13854 // begin JS_HAS_XML_SUPPORT
13856 JS_REQUIRES_STACK AbortableRecordingStatus
13857 TraceRecorder::record_JSOP_DEFXMLNS()
13859 return ARECORD_STOP
;
13862 JS_REQUIRES_STACK AbortableRecordingStatus
13863 TraceRecorder::record_JSOP_ANYNAME()
13865 return ARECORD_STOP
;
13868 JS_REQUIRES_STACK AbortableRecordingStatus
13869 TraceRecorder::record_JSOP_QNAMEPART()
13871 return record_JSOP_STRING();
13874 JS_REQUIRES_STACK AbortableRecordingStatus
13875 TraceRecorder::record_JSOP_QNAMECONST()
13877 return ARECORD_STOP
;
13880 JS_REQUIRES_STACK AbortableRecordingStatus
13881 TraceRecorder::record_JSOP_QNAME()
13883 return ARECORD_STOP
;
13886 JS_REQUIRES_STACK AbortableRecordingStatus
13887 TraceRecorder::record_JSOP_TOATTRNAME()
13889 return ARECORD_STOP
;
13892 JS_REQUIRES_STACK AbortableRecordingStatus
13893 TraceRecorder::record_JSOP_TOATTRVAL()
13895 return ARECORD_STOP
;
13898 JS_REQUIRES_STACK AbortableRecordingStatus
13899 TraceRecorder::record_JSOP_ADDATTRNAME()
13901 return ARECORD_STOP
;
13904 JS_REQUIRES_STACK AbortableRecordingStatus
13905 TraceRecorder::record_JSOP_ADDATTRVAL()
13907 return ARECORD_STOP
;
13910 JS_REQUIRES_STACK AbortableRecordingStatus
13911 TraceRecorder::record_JSOP_BINDXMLNAME()
13913 return ARECORD_STOP
;
13916 JS_REQUIRES_STACK AbortableRecordingStatus
13917 TraceRecorder::record_JSOP_SETXMLNAME()
13919 return ARECORD_STOP
;
13922 JS_REQUIRES_STACK AbortableRecordingStatus
13923 TraceRecorder::record_JSOP_XMLNAME()
13925 return ARECORD_STOP
;
13928 JS_REQUIRES_STACK AbortableRecordingStatus
13929 TraceRecorder::record_JSOP_DESCENDANTS()
13931 return ARECORD_STOP
;
13934 JS_REQUIRES_STACK AbortableRecordingStatus
13935 TraceRecorder::record_JSOP_FILTER()
13937 return ARECORD_STOP
;
13940 JS_REQUIRES_STACK AbortableRecordingStatus
13941 TraceRecorder::record_JSOP_ENDFILTER()
13943 return ARECORD_STOP
;
13946 JS_REQUIRES_STACK AbortableRecordingStatus
13947 TraceRecorder::record_JSOP_TOXML()
13949 return ARECORD_STOP
;
13952 JS_REQUIRES_STACK AbortableRecordingStatus
13953 TraceRecorder::record_JSOP_TOXMLLIST()
13955 return ARECORD_STOP
;
13958 JS_REQUIRES_STACK AbortableRecordingStatus
13959 TraceRecorder::record_JSOP_XMLTAGEXPR()
13961 return ARECORD_STOP
;
13964 JS_REQUIRES_STACK AbortableRecordingStatus
13965 TraceRecorder::record_JSOP_XMLELTEXPR()
13967 return ARECORD_STOP
;
13970 JS_REQUIRES_STACK AbortableRecordingStatus
13971 TraceRecorder::record_JSOP_XMLOBJECT()
13973 return ARECORD_STOP
;
13976 JS_REQUIRES_STACK AbortableRecordingStatus
13977 TraceRecorder::record_JSOP_XMLCDATA()
13979 return ARECORD_STOP
;
13982 JS_REQUIRES_STACK AbortableRecordingStatus
13983 TraceRecorder::record_JSOP_XMLCOMMENT()
13985 return ARECORD_STOP
;
13988 JS_REQUIRES_STACK AbortableRecordingStatus
13989 TraceRecorder::record_JSOP_XMLPI()
13991 return ARECORD_STOP
;
13994 JS_REQUIRES_STACK AbortableRecordingStatus
13995 TraceRecorder::record_JSOP_GETFUNNS()
13997 return ARECORD_STOP
;
14000 JS_REQUIRES_STACK AbortableRecordingStatus
14001 TraceRecorder::record_JSOP_STARTXML()
14003 return ARECORD_STOP
;
14006 JS_REQUIRES_STACK AbortableRecordingStatus
14007 TraceRecorder::record_JSOP_STARTXMLEXPR()
14009 return ARECORD_STOP
;
14012 // end JS_HAS_XML_SUPPORT
14014 JS_REQUIRES_STACK AbortableRecordingStatus
14015 TraceRecorder::record_JSOP_CALLPROP()
14017 jsval
& l
= stackval(-1);
14021 if (!JSVAL_IS_PRIMITIVE(l
)) {
14022 obj
= JSVAL_TO_OBJECT(l
);
14024 this_ins
= obj_ins
; // |this| for subsequent call
14027 debug_only_stmt(const char* protoname
= NULL
;)
14028 if (JSVAL_IS_STRING(l
)) {
14029 i
= JSProto_String
;
14030 debug_only_stmt(protoname
= "String.prototype";)
14031 } else if (JSVAL_IS_NUMBER(l
)) {
14032 i
= JSProto_Number
;
14033 debug_only_stmt(protoname
= "Number.prototype";)
14034 } else if (JSVAL_IS_SPECIAL(l
)) {
14035 if (l
== JSVAL_VOID
)
14036 RETURN_STOP_A("callprop on void");
14037 guard(false, lir
->ins2i(LIR_eq
, get(&l
), JSVAL_TO_SPECIAL(JSVAL_VOID
)), MISMATCH_EXIT
);
14038 i
= JSProto_Boolean
;
14039 debug_only_stmt(protoname
= "Boolean.prototype";)
14041 JS_ASSERT(JSVAL_IS_NULL(l
) || JSVAL_IS_VOID(l
));
14042 RETURN_STOP_A("callprop on null or void");
14045 if (!js_GetClassPrototype(cx
, NULL
, INT_TO_JSID(i
), &obj
))
14046 RETURN_ERROR_A("GetClassPrototype failed!");
14048 obj_ins
= INS_CONSTOBJ(obj
);
14049 debug_only_stmt(obj_ins
= addName(obj_ins
, protoname
);)
14050 this_ins
= get(&l
); // use primitive as |this|
14055 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
14057 if (PCVAL_IS_NULL(pcval
) || !PCVAL_IS_OBJECT(pcval
))
14058 RETURN_STOP_A("callee is not an object");
14059 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval
)));
14061 if (JSVAL_IS_PRIMITIVE(l
)) {
14062 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, PCVAL_TO_OBJECT(pcval
));
14063 if (!PRIMITIVE_THIS_TEST(fun
, l
))
14064 RETURN_STOP_A("callee does not accept primitive |this|");
14067 stack(0, this_ins
);
14068 stack(-1, INS_CONSTOBJ(PCVAL_TO_OBJECT(pcval
)));
14069 return ARECORD_CONTINUE
;
14072 JS_REQUIRES_STACK AbortableRecordingStatus
14073 TraceRecorder::record_JSOP_DELDESC()
14075 return ARECORD_STOP
;
14078 JS_REQUIRES_STACK AbortableRecordingStatus
14079 TraceRecorder::record_JSOP_UINT24()
14081 stack(0, lir
->insImmf(GET_UINT24(cx
->fp
->regs
->pc
)));
14082 return ARECORD_CONTINUE
;
14085 JS_REQUIRES_STACK AbortableRecordingStatus
14086 TraceRecorder::record_JSOP_INDEXBASE()
14088 atoms
+= GET_INDEXBASE(cx
->fp
->regs
->pc
);
14089 return ARECORD_CONTINUE
;
14092 JS_REQUIRES_STACK AbortableRecordingStatus
14093 TraceRecorder::record_JSOP_RESETBASE()
14095 atoms
= cx
->fp
->script
->atomMap
.vector
;
14096 return ARECORD_CONTINUE
;
14099 JS_REQUIRES_STACK AbortableRecordingStatus
14100 TraceRecorder::record_JSOP_RESETBASE0()
14102 atoms
= cx
->fp
->script
->atomMap
.vector
;
14103 return ARECORD_CONTINUE
;
14106 JS_REQUIRES_STACK AbortableRecordingStatus
14107 TraceRecorder::record_JSOP_CALLELEM()
14109 return record_JSOP_GETELEM();
14112 JS_REQUIRES_STACK AbortableRecordingStatus
14113 TraceRecorder::record_JSOP_STOP()
14115 JSStackFrame
*fp
= cx
->fp
;
14119 * End of imacro, so return true to the interpreter immediately. The
14120 * interpreter's JSOP_STOP case will return from the imacro, back to
14121 * the pc after the calling op, still in the same JSStackFrame.
14123 atoms
= fp
->script
->atomMap
.vector
;
14124 return ARECORD_CONTINUE
;
14130 * We know falling off the end of a constructor returns the new object that
14131 * was passed in via fp->argv[-1], while falling off the end of a function
14132 * returns undefined.
14134 * NB: we do not support script rval (eval, API users who want the result
14135 * of the last expression-statement, debugger API calls).
14137 if (fp
->flags
& JSFRAME_CONSTRUCTING
) {
14138 JS_ASSERT(fp
->thisv
== fp
->argv
[-1]);
14139 rval_ins
= get(&fp
->argv
[-1]);
14141 rval_ins
= INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
));
14143 clearFrameSlotsFromCache();
14144 return ARECORD_CONTINUE
;
14147 JS_REQUIRES_STACK AbortableRecordingStatus
14148 TraceRecorder::record_JSOP_GETXPROP()
14150 jsval
& l
= stackval(-1);
14151 if (JSVAL_IS_PRIMITIVE(l
))
14152 RETURN_STOP_A("primitive-this for GETXPROP?");
14157 CHECK_STATUS_A(name(vp
, v_ins
, nr
));
14159 return ARECORD_CONTINUE
;
14162 JS_REQUIRES_STACK AbortableRecordingStatus
14163 TraceRecorder::record_JSOP_CALLXMLNAME()
14165 return ARECORD_STOP
;
14168 JS_REQUIRES_STACK AbortableRecordingStatus
14169 TraceRecorder::record_JSOP_TYPEOFEXPR()
14171 return record_JSOP_TYPEOF();
14174 JS_REQUIRES_STACK AbortableRecordingStatus
14175 TraceRecorder::record_JSOP_ENTERBLOCK()
14178 obj
= cx
->fp
->script
->getObject(getFullIndex(0));
14180 LIns
* void_ins
= INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
));
14181 for (int i
= 0, n
= OBJ_BLOCK_COUNT(cx
, obj
); i
< n
; i
++)
14182 stack(i
, void_ins
);
14183 return ARECORD_CONTINUE
;
14186 JS_REQUIRES_STACK AbortableRecordingStatus
14187 TraceRecorder::record_JSOP_LEAVEBLOCK()
14189 /* We mustn't exit the lexical block we began recording in. */
14190 if (cx
->fp
->blockChain
!= lexicalBlock
)
14191 return ARECORD_CONTINUE
;
14193 return ARECORD_STOP
;
14196 JS_REQUIRES_STACK AbortableRecordingStatus
14197 TraceRecorder::record_JSOP_GENERATOR()
14199 return ARECORD_STOP
;
14202 JS_REQUIRES_STACK AbortableRecordingStatus
14203 TraceRecorder::record_JSOP_YIELD()
14205 return ARECORD_STOP
;
14208 JS_REQUIRES_STACK AbortableRecordingStatus
14209 TraceRecorder::record_JSOP_ARRAYPUSH()
14211 uint32_t slot
= GET_UINT16(cx
->fp
->regs
->pc
);
14212 JS_ASSERT(cx
->fp
->script
->nfixed
<= slot
);
14213 JS_ASSERT(cx
->fp
->slots
+ slot
< cx
->fp
->regs
->sp
- 1);
14214 jsval
&arrayval
= cx
->fp
->slots
[slot
];
14215 JS_ASSERT(JSVAL_IS_OBJECT(arrayval
));
14216 JS_ASSERT(OBJ_IS_DENSE_ARRAY(cx
, JSVAL_TO_OBJECT(arrayval
)));
14217 LIns
*array_ins
= get(&arrayval
);
14218 jsval
&elt
= stackval(-1);
14219 LIns
*elt_ins
= box_jsval(elt
, get(&elt
));
14221 LIns
*args
[] = { elt_ins
, array_ins
, cx_ins
};
14222 LIns
*ok_ins
= lir
->insCall(&js_ArrayCompPush_ci
, args
);
14223 guard(false, lir
->ins_eq0(ok_ins
), OOM_EXIT
);
14224 return ARECORD_CONTINUE
;
14227 JS_REQUIRES_STACK AbortableRecordingStatus
14228 TraceRecorder::record_JSOP_ENUMCONSTELEM()
14230 return ARECORD_STOP
;
14233 JS_REQUIRES_STACK AbortableRecordingStatus
14234 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
14236 LIns
* v_ins
= stack(-1);
14237 int n
= -1 - GET_UINT16(cx
->fp
->regs
->pc
);
14239 return ARECORD_CONTINUE
;
14242 JS_REQUIRES_STACK AbortableRecordingStatus
14243 TraceRecorder::record_JSOP_GETTHISPROP()
14247 CHECK_STATUS_A(getThis(this_ins
));
14250 * It's safe to just use cx->fp->thisv here because getThis() returns
14251 * ARECORD_STOP if thisv is not available.
14253 JS_ASSERT(cx
->fp
->flags
& JSFRAME_COMPUTED_THIS
);
14254 CHECK_STATUS_A(getProp(JSVAL_TO_OBJECT(cx
->fp
->thisv
), this_ins
));
14255 return ARECORD_CONTINUE
;
14258 JS_REQUIRES_STACK AbortableRecordingStatus
14259 TraceRecorder::record_JSOP_GETARGPROP()
14261 return getProp(argval(GET_ARGNO(cx
->fp
->regs
->pc
)));
14264 JS_REQUIRES_STACK AbortableRecordingStatus
14265 TraceRecorder::record_JSOP_GETLOCALPROP()
14267 return getProp(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)));
14270 JS_REQUIRES_STACK AbortableRecordingStatus
14271 TraceRecorder::record_JSOP_INDEXBASE1()
14274 return ARECORD_CONTINUE
;
14277 JS_REQUIRES_STACK AbortableRecordingStatus
14278 TraceRecorder::record_JSOP_INDEXBASE2()
14281 return ARECORD_CONTINUE
;
14284 JS_REQUIRES_STACK AbortableRecordingStatus
14285 TraceRecorder::record_JSOP_INDEXBASE3()
14288 return ARECORD_CONTINUE
;
14291 JS_REQUIRES_STACK AbortableRecordingStatus
14292 TraceRecorder::record_JSOP_CALLGVAR()
14294 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
14295 if (JSVAL_IS_NULL(slotval
))
14296 // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
14297 return ARECORD_CONTINUE
;
14299 uint32 slot
= JSVAL_TO_INT(slotval
);
14301 if (!lazilyImportGlobalSlot(slot
))
14302 RETURN_STOP_A("lazy import of global slot failed");
14304 jsval
& v
= STOBJ_GET_SLOT(globalObj
, slot
);
14306 stack(1, INS_NULL());
14307 return ARECORD_CONTINUE
;
14310 JS_REQUIRES_STACK AbortableRecordingStatus
14311 TraceRecorder::record_JSOP_CALLLOCAL()
14313 uintN slot
= GET_SLOTNO(cx
->fp
->regs
->pc
);
14314 stack(0, var(slot
));
14315 stack(1, INS_NULL());
14316 return ARECORD_CONTINUE
;
14319 JS_REQUIRES_STACK AbortableRecordingStatus
14320 TraceRecorder::record_JSOP_CALLARG()
14322 uintN slot
= GET_ARGNO(cx
->fp
->regs
->pc
);
14323 stack(0, arg(slot
));
14324 stack(1, INS_NULL());
14325 return ARECORD_CONTINUE
;
14328 /* Functions for use with JSOP_CALLBUILTIN. */
14331 ObjectToIterator(JSContext
*cx
, uintN argc
, jsval
*vp
)
14333 jsval
*argv
= JS_ARGV(cx
, vp
);
14334 JS_ASSERT(JSVAL_IS_INT(argv
[0]));
14335 JS_SET_RVAL(cx
, vp
, JS_THIS(cx
, vp
));
14336 return js_ValueToIterator(cx
, JSVAL_TO_INT(argv
[0]), &JS_RVAL(cx
, vp
));
14339 static JSObject
* FASTCALL
14340 ObjectToIterator_tn(JSContext
* cx
, jsbytecode
* pc
, JSObject
*obj
, int32 flags
)
14342 jsval v
= OBJECT_TO_JSVAL(obj
);
14343 JSBool ok
= js_ValueToIterator(cx
, flags
, &v
);
14346 js_SetBuiltinError(cx
);
14349 return JSVAL_TO_OBJECT(v
);
14353 CallIteratorNext(JSContext
*cx
, uintN argc
, jsval
*vp
)
14355 return js_CallIteratorNext(cx
, JS_THIS_OBJECT(cx
, vp
), &JS_RVAL(cx
, vp
));
14358 static jsval FASTCALL
14359 CallIteratorNext_tn(JSContext
* cx
, jsbytecode
* pc
, JSObject
* iterobj
)
14361 JSAutoTempValueRooter
tvr(cx
);
14362 JSBool ok
= js_CallIteratorNext(cx
, iterobj
, tvr
.addr());
14365 js_SetBuiltinError(cx
);
14366 return JSVAL_ERROR_COOKIE
;
14368 return tvr
.value();
14371 JS_DEFINE_TRCINFO_1(ObjectToIterator
,
14372 (4, (static, OBJECT_FAIL
, ObjectToIterator_tn
, CONTEXT
, PC
, THIS
, INT32
, 0, 0)))
14373 JS_DEFINE_TRCINFO_1(CallIteratorNext
,
14374 (3, (static, JSVAL_FAIL
, CallIteratorNext_tn
, CONTEXT
, PC
, THIS
, 0, 0)))
14376 static const struct BuiltinFunctionInfo
{
14377 JSNativeTraceInfo
*ti
;
14379 } builtinFunctionInfo
[JSBUILTIN_LIMIT
] = {
14380 {&ObjectToIterator_trcinfo
, 1},
14381 {&CallIteratorNext_trcinfo
, 0},
14385 js_GetBuiltinFunction(JSContext
*cx
, uintN index
)
14387 JSRuntime
*rt
= cx
->runtime
;
14388 JSObject
*funobj
= rt
->builtinFunctions
[index
];
14391 /* Use NULL parent and atom. Builtin functions never escape to scripts. */
14392 JS_ASSERT(index
< JS_ARRAY_LENGTH(builtinFunctionInfo
));
14393 const BuiltinFunctionInfo
*bfi
= &builtinFunctionInfo
[index
];
14394 JSFunction
*fun
= js_NewFunction(cx
,
14396 JS_DATA_TO_FUNC_PTR(JSNative
, bfi
->ti
),
14398 JSFUN_FAST_NATIVE
| JSFUN_TRCINFO
,
14402 funobj
= FUN_OBJECT(fun
);
14403 STOBJ_CLEAR_PROTO(funobj
);
14404 STOBJ_CLEAR_PARENT(funobj
);
14407 if (!rt
->builtinFunctions
[index
]) /* retest now that the lock is held */
14408 rt
->builtinFunctions
[index
] = funobj
;
14410 funobj
= rt
->builtinFunctions
[index
];
14417 JS_REQUIRES_STACK AbortableRecordingStatus
14418 TraceRecorder::record_JSOP_CALLBUILTIN()
14420 JSObject
*obj
= js_GetBuiltinFunction(cx
, GET_INDEX(cx
->fp
->regs
->pc
));
14422 RETURN_ERROR_A("error in js_GetBuiltinFunction");
14424 stack(0, get(&stackval(-1)));
14425 stack(-1, INS_CONSTOBJ(obj
));
14426 return ARECORD_CONTINUE
;
14429 JS_REQUIRES_STACK AbortableRecordingStatus
14430 TraceRecorder::record_JSOP_INT8()
14432 stack(0, lir
->insImmf(GET_INT8(cx
->fp
->regs
->pc
)));
14433 return ARECORD_CONTINUE
;
14436 JS_REQUIRES_STACK AbortableRecordingStatus
14437 TraceRecorder::record_JSOP_INT32()
14439 stack(0, lir
->insImmf(GET_INT32(cx
->fp
->regs
->pc
)));
14440 return ARECORD_CONTINUE
;
14443 JS_REQUIRES_STACK AbortableRecordingStatus
14444 TraceRecorder::record_JSOP_LENGTH()
14446 jsval
& l
= stackval(-1);
14447 if (JSVAL_IS_PRIMITIVE(l
)) {
14448 if (!JSVAL_IS_STRING(l
))
14449 RETURN_STOP_A("non-string primitive JSOP_LENGTH unsupported");
14450 set(&l
, lir
->ins1(LIR_i2f
,
14451 p2i(lir
->insLoad(LIR_ldp
, get(&l
),
14452 offsetof(JSString
, mLength
)))));
14453 return ARECORD_CONTINUE
;
14456 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
14457 LIns
* obj_ins
= get(&l
);
14459 if (STOBJ_GET_CLASS(obj
) == &js_ArgumentsClass
) {
14461 JSStackFrame
*afp
= guardArguments(obj
, obj_ins
, &depth
);
14463 RETURN_STOP_A("can't reach arguments object's frame");
14465 LIns
* v_ins
= lir
->ins1(LIR_i2f
, INS_CONST(afp
->argc
));
14467 return ARECORD_CONTINUE
;
14471 if (OBJ_IS_ARRAY(cx
, obj
)) {
14472 if (OBJ_IS_DENSE_ARRAY(cx
, obj
)) {
14473 if (!guardDenseArray(obj
, obj_ins
, BRANCH_EXIT
)) {
14474 JS_NOT_REACHED("OBJ_IS_DENSE_ARRAY but not?!?");
14475 return ARECORD_STOP
;
14478 if (!guardClass(obj
, obj_ins
, &js_SlowArrayClass
, snapshot(BRANCH_EXIT
)))
14479 RETURN_STOP_A("can't trace length property access on non-array");
14481 v_ins
= lir
->ins1(LIR_i2f
, p2i(stobj_get_fslot(obj_ins
, JSSLOT_ARRAY_LENGTH
)));
14483 if (!OBJ_IS_NATIVE(obj
))
14484 RETURN_STOP_A("can't trace length property access on non-array, non-native object");
14485 return getProp(obj
, obj_ins
);
14488 return ARECORD_CONTINUE
;
14491 JS_REQUIRES_STACK AbortableRecordingStatus
14492 TraceRecorder::record_JSOP_NEWARRAY()
14495 CHECK_STATUS_A(getClassPrototype(JSProto_Array
, proto_ins
));
14497 uint32 len
= GET_UINT16(cx
->fp
->regs
->pc
);
14498 cx
->fp
->assertValidStackDepth(len
);
14500 LIns
* args
[] = { lir
->insImm(len
), proto_ins
, cx_ins
};
14501 LIns
* v_ins
= lir
->insCall(&js_NewArrayWithSlots_ci
, args
);
14502 guard(false, lir
->ins_peq0(v_ins
), OOM_EXIT
);
14504 LIns
* dslots_ins
= NULL
;
14506 for (uint32 i
= 0; i
< len
; i
++) {
14507 jsval
& v
= stackval(int(i
) - int(len
));
14508 if (v
!= JSVAL_HOLE
)
14510 LIns
* elt_ins
= box_jsval(v
, get(&v
));
14511 stobj_set_dslot(v_ins
, i
, dslots_ins
, elt_ins
);
14515 stobj_set_fslot(v_ins
, JSSLOT_ARRAY_COUNT
, INS_CONST(count
));
14517 stack(-int(len
), v_ins
);
14518 return ARECORD_CONTINUE
;
14521 JS_REQUIRES_STACK AbortableRecordingStatus
14522 TraceRecorder::record_JSOP_HOLE()
14524 stack(0, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_HOLE
)));
14525 return ARECORD_CONTINUE
;
14528 AbortableRecordingStatus
14529 TraceRecorder::record_JSOP_TRACE()
14531 return ARECORD_CONTINUE
;
14534 static const uint32 sMaxConcatNSize
= 32;
14537 * Copy the result of defvalue.string back into concatn's arguments, clean the
14538 * stack, and return a pointer to the argument that was just overwritten.
14540 JS_REQUIRES_STACK jsval
*
14541 js_ConcatPostImacroStackCleanup(uint32 argc
, JSFrameRegs
®s
,
14542 TraceRecorder
*recorder
)
14544 JS_ASSERT(*regs
.pc
== JSOP_IMACOP
);
14546 /* Pop the argument offset and imacro return value. */
14547 jsint offset
= JSVAL_TO_INT(*--regs
.sp
);
14548 jsval
*imacroResult
= --regs
.sp
;
14550 /* Replace non-primitive argument with new primitive argument. */
14551 jsval
*vp
= regs
.sp
- offset
;
14552 JS_ASSERT(regs
.sp
- argc
<= vp
&& vp
< regs
.sp
);
14554 recorder
->set(vp
, recorder
->get(imacroResult
));
14555 *vp
= *imacroResult
;
14561 * Initially, concatn takes N arguments on the stack, where N is immediate
14562 * operand. To convert these arguments to primitives, we must repeatedly call
14563 * the defvalue.string imacro. To achieve this iteration, defvalue.string ends
14564 * with imacop. Hence, this function is called multiple times, each time with
14565 * one less non-primitive. To keep track of where we are in the loop, we must
14566 * push an additional index value on the stack. Hence, on all subsequent
14567 * entries, the stack is organized as follows (bottom to top):
14572 * nonprim[i] argument to imacro
14576 * primarg[i] nonprim[i] converted to primitive
14579 * Hence, the stack setup on entry to this function (and JSOP_CONCATN in the
14580 * interpreter, on trace abort) is dependent on whether an imacro is in
14581 * progress. When all of concatn's arguments are primitive, it emits a builtin
14582 * call and allows the actual JSOP_CONCATN to be executed by the interpreter.
14584 JS_REQUIRES_STACK AbortableRecordingStatus
14585 TraceRecorder::record_JSOP_CONCATN()
14587 JSStackFrame
*fp
= cx
->fp
;
14588 JSFrameRegs
®s
= *fp
->regs
;
14591 * If we are in an imacro, we must have just finished a call to
14592 * defvalue.string. Continue where we left off last time.
14597 JS_ASSERT(*fp
->imacpc
== JSOP_CONCATN
);
14598 argc
= GET_ARGC(fp
->imacpc
);
14599 loopStart
= js_ConcatPostImacroStackCleanup(argc
, regs
, this) + 1;
14601 argc
= GET_ARGC(regs
.pc
);
14602 JS_ASSERT(argc
> 0);
14603 loopStart
= regs
.sp
- argc
;
14605 /* Prevent code/alloca explosion. */
14606 if (argc
> sMaxConcatNSize
)
14607 return ARECORD_STOP
;
14610 /* Convert non-primitives to primitives using defvalue.string. */
14611 for (jsval
*vp
= loopStart
; vp
!= regs
.sp
; ++vp
) {
14612 if (!JSVAL_IS_PRIMITIVE(*vp
)) {
14614 * In addition to the jsval we want the imacro to convert to
14615 * primitive, pass through the offset of the argument on the stack.
14617 jsint offset
= regs
.sp
- vp
;
14619 /* Push the non-primitive to convert. */
14620 set(regs
.sp
, get(vp
), true);
14623 /* Push the argument index. */
14624 set(regs
.sp
, lir
->insImm(offset
), true);
14625 *regs
.sp
++ = INT_TO_JSVAL(offset
);
14627 /* Nested imacro call OK because this is a tail call. */
14628 return InjectStatus(call_imacro(defvalue_imacros
.string
));
14632 /* Build an array of the stringified primitives. */
14633 int32_t bufSize
= argc
* sizeof(JSString
*);
14634 LIns
*buf_ins
= lir
->insAlloc(bufSize
);
14636 for (jsval
*vp
= regs
.sp
- argc
; vp
!= regs
.sp
; ++vp
, d
+= sizeof(void *))
14637 lir
->insStorei(stringify(*vp
), buf_ins
, d
);
14639 /* Perform concatenation using a builtin. */
14640 LIns
*args
[] = { lir
->insImm(argc
), buf_ins
, cx_ins
};
14641 LIns
*concat
= lir
->insCall(&js_ConcatN_ci
, args
);
14642 guard(false, lir
->ins_peq0(concat
), OOM_EXIT
);
14644 /* Update tracker with result. */
14645 jsval
*afterPop
= regs
.sp
- (argc
- 1);
14646 set(afterPop
- 1, concat
);
14648 return ARECORD_CONTINUE
;
14651 JS_REQUIRES_STACK AbortableRecordingStatus
14652 TraceRecorder::record_JSOP_SETMETHOD()
14654 return record_JSOP_SETPROP();
14657 JS_REQUIRES_STACK AbortableRecordingStatus
14658 TraceRecorder::record_JSOP_INITMETHOD()
14660 return record_JSOP_INITPROP();
14663 JS_REQUIRES_STACK AbortableRecordingStatus
14664 TraceRecorder::record_JSOP_SHARPINIT()
14666 return ARECORD_STOP
;
14669 #define DBG_STUB(OP) \
14670 JS_REQUIRES_STACK AbortableRecordingStatus \
14671 TraceRecorder::record_##OP() \
14673 RETURN_STOP_A("can't trace " #OP); \
14676 DBG_STUB(JSOP_GETUPVAR_DBG
)
14677 DBG_STUB(JSOP_CALLUPVAR_DBG
)
14678 DBG_STUB(JSOP_DEFFUN_DBGFC
)
14679 DBG_STUB(JSOP_DEFLOCALFUN_DBGFC
)
14680 DBG_STUB(JSOP_LAMBDA_DBGFC
)
14684 * Print information about entry typemaps and unstable exits for all peers
14688 DumpPeerStability(JSTraceMonitor
* tm
, const void* ip
, JSObject
* globalObj
, uint32 globalShape
,
14693 bool looped
= false;
14694 unsigned length
= 0;
14696 for (f
= LookupLoop(tm
, ip
, globalObj
, globalShape
, argc
); f
!= NULL
; f
= f
->peer
) {
14699 debug_only_printf(LC_TMRecorder
, "Stability of fragment %p:\nENTRY STACK=", (void*)f
);
14702 JS_ASSERT(ti
->nStackTypes
== length
);
14703 for (unsigned i
= 0; i
< ti
->nStackTypes
; i
++)
14704 debug_only_printf(LC_TMRecorder
, "%c", typeChar
[ti
->stackTypeMap()[i
]]);
14705 debug_only_print0(LC_TMRecorder
, " GLOBALS=");
14706 for (unsigned i
= 0; i
< ti
->nGlobalTypes(); i
++)
14707 debug_only_printf(LC_TMRecorder
, "%c", typeChar
[ti
->globalTypeMap()[i
]]);
14708 debug_only_print0(LC_TMRecorder
, "\n");
14709 UnstableExit
* uexit
= ti
->unstableExits
;
14710 while (uexit
!= NULL
) {
14711 debug_only_print0(LC_TMRecorder
, "EXIT ");
14712 JSTraceType
* m
= uexit
->exit
->fullTypeMap();
14713 debug_only_print0(LC_TMRecorder
, "STACK=");
14714 for (unsigned i
= 0; i
< uexit
->exit
->numStackSlots
; i
++)
14715 debug_only_printf(LC_TMRecorder
, "%c", typeChar
[m
[i
]]);
14716 debug_only_print0(LC_TMRecorder
, " GLOBALS=");
14717 for (unsigned i
= 0; i
< uexit
->exit
->numGlobalSlots
; i
++) {
14718 debug_only_printf(LC_TMRecorder
, "%c",
14719 typeChar
[m
[uexit
->exit
->numStackSlots
+ i
]]);
14721 debug_only_print0(LC_TMRecorder
, "\n");
14722 uexit
= uexit
->next
;
14724 length
= ti
->nStackTypes
;
14730 #ifdef MOZ_TRACEVIS
14732 FILE* traceVisLogFile
= NULL
;
14733 JSHashTable
*traceVisScriptTable
= NULL
;
14735 JS_FRIEND_API(bool)
14736 JS_StartTraceVis(const char* filename
= "tracevis.dat")
14738 if (traceVisLogFile
) {
14739 // If we're currently recording, first we must stop.
14743 traceVisLogFile
= fopen(filename
, "wb");
14744 if (!traceVisLogFile
)
14750 JS_FRIEND_API(JSBool
)
14751 js_StartTraceVis(JSContext
*cx
, JSObject
*obj
,
14752 uintN argc
, jsval
*argv
, jsval
*rval
)
14756 if (argc
> 0 && JSVAL_IS_STRING(argv
[0])) {
14757 JSString
*str
= JSVAL_TO_STRING(argv
[0]);
14758 char *filename
= js_DeflateString(cx
, str
->chars(), str
->length());
14761 ok
= JS_StartTraceVis(filename
);
14762 cx
->free(filename
);
14764 ok
= JS_StartTraceVis();
14768 fprintf(stderr
, "started TraceVis recording\n");
14773 JS_ReportError(cx
, "failed to start TraceVis recording");
14777 JS_FRIEND_API(bool)
14780 if (!traceVisLogFile
)
14783 fclose(traceVisLogFile
); // not worth checking the result
14784 traceVisLogFile
= NULL
;
14789 JS_FRIEND_API(JSBool
)
14790 js_StopTraceVis(JSContext
*cx
, JSObject
*obj
,
14791 uintN argc
, jsval
*argv
, jsval
*rval
)
14793 JSBool ok
= JS_StopTraceVis();
14796 fprintf(stderr
, "stopped TraceVis recording\n");
14798 JS_ReportError(cx
, "TraceVis isn't running");
14803 #endif /* MOZ_TRACEVIS */
14805 JS_REQUIRES_STACK
void
14806 js_CaptureStackTypes(JSContext
* cx
, unsigned callDepth
, JSTraceType
* typeMap
)
14808 CaptureTypesVisitor
capVisitor(cx
, typeMap
);
14809 VisitStackSlots(capVisitor
, cx
, callDepth
);
14812 JS_REQUIRES_STACK
void
14813 TraceRecorder::determineGlobalTypes(JSTraceType
* typeMap
)
14815 DetermineTypesVisitor
detVisitor(*this, typeMap
);
14816 VisitGlobalSlots(detVisitor
, cx
, *treeInfo
->globalSlots
);
14819 #include "jsrecursion.cpp"