1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
43 #include "jsbit.h" // low-level (NSPR-based) headers next
45 #include <math.h> // standard headers next
47 #if defined(_MSC_VER) || defined(__MINGW32__)
50 #define alloca _alloca
58 #include "nanojit/nanojit.h"
59 #include "jsapi.h" // higher-level library and API headers
75 #include "jsstaticcheck.h"
79 #include "jsatominlines.h"
80 #include "jsscriptinlines.h"
82 #include "jsautooplen.h" // generated headers last
83 #include "imacros.c.out"
85 using namespace nanojit
;
87 #if JS_HAS_XML_SUPPORT
88 #define RETURN_VALUE_IF_XML(val, ret) \
90 if (!JSVAL_IS_PRIMITIVE(val) && \
91 OBJECT_IS_XML(BOGUS_CX, JSVAL_TO_OBJECT(val))) { \
92 RETURN_VALUE("xml detected", ret); \
96 #define RETURN_IF_XML(val, ret) ((void) 0)
99 #define RETURN_IF_XML_A(val) RETURN_VALUE_IF_XML(val, ARECORD_STOP)
100 #define RETURN_IF_XML(val) RETURN_VALUE_IF_XML(val, RECORD_STOP)
103 * Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and
104 * the type. What you want to use is JSVAL_IS_SPECIAL(x) and then handle the
105 * undefined case properly (bug 457363).
107 #undef JSVAL_IS_BOOLEAN
108 #define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0)
110 JS_STATIC_ASSERT(sizeof(JSTraceType
) == 1);
112 /* Map to translate a type tag into a printable representation. */
113 static const char typeChar
[] = "OIDXSNBF";
114 static const char tagChar
[] = "OIDISIBI";
116 /* Blacklist parameters. */
119 * Number of iterations of a loop where we start tracing. That is, we don't
120 * start tracing until the beginning of the HOTLOOP-th iteration.
124 /* Attempt recording this many times before blacklisting permanently. */
125 #define BL_ATTEMPTS 2
127 /* Skip this many hits before attempting recording again, after an aborted attempt. */
128 #define BL_BACKOFF 32
130 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
133 /* Number of times we try to extend the tree along a side exit. */
136 /* Maximum number of peer trees allowed. */
139 /* Max number of hits to a RECURSIVE_UNLINKED exit before we trash the tree. */
140 #define MAX_RECURSIVE_UNLINK_HITS 64
142 /* Max call depths for inlining. */
143 #define MAX_CALLDEPTH 10
145 /* Max native stack size. */
146 #define MAX_NATIVE_STACK_SLOTS 4096
148 /* Max call stack size. */
149 #define MAX_CALL_STACK_ENTRIES 500
151 /* Max global object size. */
152 #define MAX_GLOBAL_SLOTS 4096
154 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
155 #define MAX_INTERP_STACK_BYTES \
156 (MAX_NATIVE_STACK_SLOTS * sizeof(jsval) + \
157 MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) + \
158 sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
160 /* Max number of branches per tree. */
161 #define MAX_BRANCHES 32
163 #define CHECK_STATUS(expr) \
165 RecordingStatus _status = (expr); \
166 if (_status != RECORD_CONTINUE) \
170 #define CHECK_STATUS_A(expr) \
172 AbortableRecordingStatus _status = InjectStatus((expr)); \
173 if (_status != ARECORD_CONTINUE) \
178 #define RETURN_VALUE(msg, value) \
180 debug_only_printf(LC_TMAbort, "trace stopped: %d: %s\n", __LINE__, (msg)); \
184 #define RETURN_VALUE(msg, value) return (value)
187 #define RETURN_STOP(msg) RETURN_VALUE(msg, RECORD_STOP)
188 #define RETURN_STOP_A(msg) RETURN_VALUE(msg, ARECORD_STOP)
189 #define RETURN_ERROR(msg) RETURN_VALUE(msg, RECORD_ERROR)
190 #define RETURN_ERROR_A(msg) RETURN_VALUE(msg, ARECORD_ERROR)
194 #define JITSTAT(x) uint64 x;
195 #include "jitstats.tbl"
197 } jitstats
= { 0LL, };
199 JS_STATIC_ASSERT(sizeof(jitstats
) % sizeof(uint64
) == 0);
202 #define JITSTAT(x) STAT ## x ## ID,
203 #include "jitstats.tbl"
208 static JSPropertySpec jitstats_props
[] = {
209 #define JITSTAT(x) { #x, STAT ## x ## ID, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT },
210 #include "jitstats.tbl"
216 jitstats_getProperty(JSContext
*cx
, JSObject
*obj
, jsid id
, jsval
*vp
)
220 if (JSVAL_IS_STRING(id
)) {
221 JSString
* str
= JSVAL_TO_STRING(id
);
222 if (strcmp(JS_GetStringBytes(str
), "HOTLOOP") == 0) {
223 *vp
= INT_TO_JSVAL(HOTLOOP
);
228 if (JSVAL_IS_INT(id
))
229 index
= JSVAL_TO_INT(id
);
233 #define JITSTAT(x) case STAT ## x ## ID: result = jitstats.x; break;
234 #include "jitstats.tbl"
241 if (result
< JSVAL_INT_MAX
) {
242 *vp
= INT_TO_JSVAL(jsint(result
));
246 JS_snprintf(retstr
, sizeof retstr
, "%llu", result
);
247 *vp
= STRING_TO_JSVAL(JS_NewStringCopyZ(cx
, retstr
));
251 JSClass jitstats_class
= {
254 JS_PropertyStub
, JS_PropertyStub
,
255 jitstats_getProperty
, JS_PropertyStub
,
256 JS_EnumerateStub
, JS_ResolveStub
,
257 JS_ConvertStub
, NULL
,
258 JSCLASS_NO_OPTIONAL_MEMBERS
262 js_InitJITStatsClass(JSContext
*cx
, JSObject
*glob
)
264 JS_InitClass(cx
, glob
, NULL
, &jitstats_class
, NULL
, 0, jitstats_props
, NULL
, NULL
, NULL
);
267 #define AUDIT(x) (jitstats.x++)
269 #define AUDIT(x) ((void)0)
270 #endif /* JS_JIT_SPEW */
273 * INS_CONSTPTR can be used to embed arbitrary pointers into the native code. It should not
274 * be used directly to embed GC thing pointers. Instead, use the INS_CONSTOBJ/FUN/STR/SPROP
275 * variants which ensure that the embedded pointer will be kept alive across GCs.
278 #define INS_CONST(c) addName(lir->insImm(c), #c)
279 #define INS_CONSTPTR(p) addName(lir->insImmPtr(p), #p)
280 #define INS_CONSTWORD(v) addName(lir->insImmPtr((void *) (v)), #v)
281 #define INS_CONSTOBJ(obj) addName(insImmObj(obj), #obj)
282 #define INS_CONSTFUN(fun) addName(insImmFun(fun), #fun)
283 #define INS_CONSTSTR(str) addName(insImmStr(str), #str)
284 #define INS_CONSTSPROP(sprop) addName(insImmSprop(sprop), #sprop)
285 #define INS_ATOM(atom) INS_CONSTSTR(ATOM_TO_STRING(atom))
286 #define INS_NULL() INS_CONSTPTR(NULL)
287 #define INS_VOID() INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID))
289 static avmplus::AvmCore s_core
= avmplus::AvmCore();
290 static avmplus::AvmCore
* core
= &s_core
;
292 /* Allocator SPI implementation. */
295 nanojit::Allocator::allocChunk(size_t nbytes
)
297 VMAllocator
*vma
= (VMAllocator
*)this;
298 JS_ASSERT(!vma
->outOfMemory());
299 void *p
= calloc(1, nbytes
);
301 JS_ASSERT(nbytes
< sizeof(vma
->mReserve
));
302 vma
->mOutOfMemory
= true;
303 p
= (void*) &vma
->mReserve
[0];
305 vma
->mSize
+= nbytes
;
310 nanojit::Allocator::freeChunk(void *p
) {
311 VMAllocator
*vma
= (VMAllocator
*)this;
312 if (p
!= &vma
->mReserve
[0])
317 nanojit::Allocator::postReset() {
318 VMAllocator
*vma
= (VMAllocator
*)this;
319 vma
->mOutOfMemory
= false;
326 DumpPeerStability(JSTraceMonitor
* tm
, const void* ip
, JSObject
* globalObj
, uint32 globalShape
, uint32 argc
);
330 * We really need a better way to configure the JIT. Shaver, where is
331 * my fancy JIT object?
333 * NB: this is raced on, if jstracer.cpp should ever be running MT.
334 * I think it's harmless tho.
336 static bool did_we_check_processor_features
= false;
338 /* ------ Debug logging control ------ */
341 * All the logging control stuff lives in here. It is shared between
342 * all threads, but I think that's OK.
344 LogControl js_LogController
;
349 * NB: this is raced on too, if jstracer.cpp should ever be running MT.
352 static bool did_we_set_up_debug_logging
= false;
355 InitJITLogController()
360 js_LogController
.lcbits
= 0;
362 tm
= getenv("TRACEMONKEY");
366 "The environment variable $TRACEMONKEY has been replaced by $TMFLAGS.\n"
367 "Try 'TMFLAGS=help js -j' for a list of options.\n"
372 tmf
= getenv("TMFLAGS");
375 /* Using strstr() is really a cheap hack as far as flag decoding goes. */
376 if (strstr(tmf
, "help")) {
379 "usage: TMFLAGS=option,option,option,... where options can be:\n"
381 " help show this message\n"
382 " ------ options for jstracer & jsregexp ------\n"
383 " minimal ultra-minimalist output; try this first\n"
384 " full everything except 'treevis' and 'nocodeaddrs'\n"
385 " tracer tracer lifetime (FIXME:better description)\n"
386 " recorder trace recording stuff (FIXME:better description)\n"
387 " abort show trace recording aborts\n"
388 " stats show trace recording stats\n"
389 " regexp show compilation & entry for regexps\n"
390 " treevis spew that tracevis/tree.py can parse\n"
391 " ------ options for Nanojit ------\n"
392 " fragprofile count entries and exits for each fragment\n"
393 " activation show activation info\n"
394 " liveness show LIR liveness at start of rdr pipeline\n"
395 " readlir show LIR as it enters the reader pipeline\n"
396 " aftersf show LIR after StackFilter\n"
397 " regalloc show regalloc details\n"
398 " assembly show final aggregated assembly code\n"
399 " nocodeaddrs don't show code addresses in assembly listings\n"
408 /* flags for jstracer.cpp */
409 if (strstr(tmf
, "minimal") || strstr(tmf
, "full")) bits
|= LC_TMMinimal
;
410 if (strstr(tmf
, "tracer") || strstr(tmf
, "full")) bits
|= LC_TMTracer
;
411 if (strstr(tmf
, "recorder") || strstr(tmf
, "full")) bits
|= LC_TMRecorder
;
412 if (strstr(tmf
, "abort") || strstr(tmf
, "full")) bits
|= LC_TMAbort
;
413 if (strstr(tmf
, "stats") || strstr(tmf
, "full")) bits
|= LC_TMStats
;
414 if (strstr(tmf
, "regexp") || strstr(tmf
, "full")) bits
|= LC_TMRegexp
;
415 if (strstr(tmf
, "treevis")) bits
|= LC_TMTreeVis
;
417 /* flags for nanojit */
418 if (strstr(tmf
, "fragprofile")) bits
|= LC_FragProfile
;
419 if (strstr(tmf
, "liveness") || strstr(tmf
, "full")) bits
|= LC_Liveness
;
420 if (strstr(tmf
, "activation") || strstr(tmf
, "full")) bits
|= LC_Activation
;
421 if (strstr(tmf
, "readlir") || strstr(tmf
, "full")) bits
|= LC_ReadLIR
;
422 if (strstr(tmf
, "aftersf") || strstr(tmf
, "full")) bits
|= LC_AfterSF
;
423 if (strstr(tmf
, "regalloc") || strstr(tmf
, "full")) bits
|= LC_RegAlloc
;
424 if (strstr(tmf
, "assembly") || strstr(tmf
, "full")) bits
|= LC_Assembly
;
425 if (strstr(tmf
, "nocodeaddrs")) bits
|= LC_NoCodeAddrs
;
427 js_LogController
.lcbits
= bits
;
433 /* ------------------ Frag-level profiling support ------------------ */
438 * All the allocations done by this profile data-collection and
439 * display machinery, are done in JSTraceMonitor::profAlloc. That is
440 * emptied out at the end of js_FinishJIT. It has a lifetime from
441 * js_InitJIT to js_FinishJIT, which exactly matches the span
442 * js_FragProfiling_init to js_FragProfiling_showResults.
446 Seq
<T
>* reverseInPlace(Seq
<T
>* seq
)
451 Seq
<T
>* next
= curr
->tail
;
459 // The number of top blocks to show in the profile
460 #define N_TOP_BLOCKS 50
462 // Contains profile info for a single guard
464 uint32_t guardID
; // identifying number
465 uint32_t count
; // count.
469 uint32_t count
; // entry count for this Fragment
470 uint32_t nStaticExits
; // statically: the number of exits
471 size_t nCodeBytes
; // statically: the number of insn bytes in the main fragment
472 size_t nExitBytes
; // statically: the number of insn bytes in the exit paths
473 Seq
<GuardPI
>* guards
; // guards, each with its own count
474 uint32_t largestGuardID
; // that exists in .guards
477 /* A mapping of Fragment.profFragID to FragPI */
478 typedef HashMap
<uint32
,FragPI
> FragStatsMap
;
481 js_FragProfiling_FragFinalizer(Fragment
* f
, JSTraceMonitor
* tm
)
483 // Recover profiling data from 'f', which is logically at the end
484 // of its useful lifetime.
485 if (!(js_LogController
.lcbits
& LC_FragProfile
))
489 // Valid profFragIDs start at 1
490 NanoAssert(f
->profFragID
>= 1);
491 // Should be called exactly once per Fragment. This will assert if
492 // you issue the same FragID to more than one Fragment.
493 NanoAssert(!tm
->profTab
->containsKey(f
->profFragID
));
495 FragPI pi
= { f
->profCount
,
501 // Begin sanity check on the guards
502 SeqBuilder
<GuardPI
> guardsBuilder(*tm
->profAlloc
);
505 uint32_t sumOfDynExits
= 0;
506 for (gr
= f
->guardsForFrag
; gr
; gr
= gr
->nextInFrag
) {
508 // Also copy the data into our auxiliary structure.
509 // f->guardsForFrag is in reverse order, and so this
510 // copy preserves that ordering (->add adds at end).
511 // Valid profGuardIDs start at 1.
512 NanoAssert(gr
->profGuardID
> 0);
513 sumOfDynExits
+= gr
->profCount
;
514 GuardPI gpi
= { gr
->profGuardID
, gr
->profCount
};
515 guardsBuilder
.add(gpi
);
516 if (gr
->profGuardID
> pi
.largestGuardID
)
517 pi
.largestGuardID
= gr
->profGuardID
;
519 pi
.guards
= guardsBuilder
.get();
520 // And put the guard list in forwards order
521 pi
.guards
= reverseInPlace(pi
.guards
);
523 // Why is this so? Because nGs is the number of guards
524 // at the time the LIR was generated, whereas f->nStaticExits
525 // is the number of them observed by the time it makes it
526 // through to the assembler. It can be the case that LIR
527 // optimisation removes redundant guards; hence we expect
528 // nGs to always be the same or higher.
529 NanoAssert(nGs
>= f
->nStaticExits
);
531 // Also we can assert that the sum of the exit counts
532 // can't exceed the entry count. It'd be nice to assert that
533 // they are exactly equal, but we can't because we don't know
534 // how many times we got to the end of the trace.
535 NanoAssert(f
->profCount
>= sumOfDynExits
);
537 // End sanity check on guards
539 tm
->profTab
->put(f
->profFragID
, pi
);
543 js_FragProfiling_showResults(JSTraceMonitor
* tm
)
545 uint32_t topFragID
[N_TOP_BLOCKS
];
546 FragPI topPI
[N_TOP_BLOCKS
];
547 uint64_t totCount
= 0, cumulCount
;
549 size_t totCodeB
= 0, totExitB
= 0;
550 memset(topFragID
, 0, sizeof(topFragID
));
551 memset(topPI
, 0, sizeof(topPI
));
552 FragStatsMap::Iter
iter(*tm
->profTab
);
553 while (iter
.next()) {
554 uint32_t fragID
= iter
.key();
555 FragPI pi
= iter
.value();
556 uint32_t count
= pi
.count
;
557 totCount
+= (uint64_t)count
;
558 /* Find the rank for this entry, in tops */
559 int r
= N_TOP_BLOCKS
-1;
563 if (topFragID
[r
] == 0) {
567 if (count
> topPI
[r
].count
) {
574 AvmAssert(r
>= 0 && r
<= N_TOP_BLOCKS
);
575 /* This entry should be placed at topPI[r], and entries
576 at higher numbered slots moved up one. */
577 if (r
< N_TOP_BLOCKS
) {
578 for (int s
= N_TOP_BLOCKS
-1; s
> r
; s
--) {
579 topFragID
[s
] = topFragID
[s
-1];
580 topPI
[s
] = topPI
[s
-1];
582 topFragID
[r
] = fragID
;
587 js_LogController
.printf(
588 "\n----------------- Per-fragment execution counts ------------------\n");
589 js_LogController
.printf(
590 "\nTotal count = %llu\n\n", (unsigned long long int)totCount
);
592 js_LogController
.printf(
593 " Entry counts Entry counts ----- Static -----\n");
594 js_LogController
.printf(
595 " ------Self------ ----Cumulative--- Exits Cbytes Xbytes FragID\n");
596 js_LogController
.printf("\n");
599 totCount
= 1; /* avoid division by zero */
602 for (r
= 0; r
< N_TOP_BLOCKS
; r
++) {
603 if (topFragID
[r
] == 0)
605 cumulCount
+= (uint64_t)topPI
[r
].count
;
606 js_LogController
.printf("%3d: %5.2f%% %9u %6.2f%% %9llu"
607 " %3d %5u %5u %06u\n",
609 (double)topPI
[r
].count
* 100.0 / (double)totCount
,
611 (double)cumulCount
* 100.0 / (double)totCount
,
612 (unsigned long long int)cumulCount
,
613 topPI
[r
].nStaticExits
,
614 (unsigned int)topPI
[r
].nCodeBytes
,
615 (unsigned int)topPI
[r
].nExitBytes
,
617 totSE
+= (uint32_t)topPI
[r
].nStaticExits
;
618 totCodeB
+= topPI
[r
].nCodeBytes
;
619 totExitB
+= topPI
[r
].nExitBytes
;
621 js_LogController
.printf("\nTotal displayed code bytes = %u, "
623 "Total displayed static exits = %d\n\n",
624 (unsigned int)totCodeB
, (unsigned int)totExitB
, totSE
);
626 js_LogController
.printf("Analysis by exit counts\n\n");
628 for (r
= 0; r
< N_TOP_BLOCKS
; r
++) {
629 if (topFragID
[r
] == 0)
631 js_LogController
.printf("FragID=%06u, total count %u:\n", topFragID
[r
],
633 uint32_t madeItToEnd
= topPI
[r
].count
;
634 uint32_t totThisFrag
= topPI
[r
].count
;
635 if (totThisFrag
== 0)
638 // visit the guards, in forward order
639 for (Seq
<GuardPI
>* guards
= topPI
[r
].guards
; guards
; guards
= guards
->tail
) {
640 gpi
= (*guards
).head
;
643 madeItToEnd
-= gpi
.count
;
644 js_LogController
.printf(" GuardID=%03u %7u (%5.2f%%)\n",
645 gpi
.guardID
, gpi
.count
,
646 100.0 * (double)gpi
.count
/ (double)totThisFrag
);
648 js_LogController
.printf(" Looped (%03u) %7u (%5.2f%%)\n",
649 topPI
[r
].largestGuardID
+1,
651 100.0 * (double)madeItToEnd
/ (double)totThisFrag
);
652 NanoAssert(madeItToEnd
<= topPI
[r
].count
); // else unsigned underflow
653 js_LogController
.printf("\n");
661 /* ----------------------------------------------------------------- */
665 getExitName(ExitType type
)
667 static const char* exitNames
[] =
669 #define MAKE_EXIT_STRING(x) #x,
670 JS_TM_EXITCODES(MAKE_EXIT_STRING
)
671 #undef MAKE_EXIT_STRING
675 JS_ASSERT(type
< TOTAL_EXIT_TYPES
);
677 return exitNames
[type
];
680 static JSBool FASTCALL
681 PrintOnTrace(char* format
, uint32 argc
, double *argv
)
694 #define GET_ARG() JS_BEGIN_MACRO \
695 if (argi >= argc) { \
696 fprintf(out, "[too few args for format]"); \
699 u.d = argv[argi++]; \
705 for (char *p
= format
; *p
; ++p
) {
712 fprintf(out
, "[trailing %%]");
719 fprintf(out
, "[%u:%u 0x%x:0x%x %f]", u
.i
.lo
, u
.i
.hi
, u
.i
.lo
, u
.i
.hi
, u
.d
);
723 fprintf(out
, "%d", u
.i
.lo
);
727 fprintf(out
, "%u", u
.i
.lo
);
731 fprintf(out
, "%x", u
.i
.lo
);
735 fprintf(out
, "%f", u
.d
);
744 size_t length
= u
.s
->length();
745 // protect against massive spew if u.s is a bad pointer.
746 if (length
> 1 << 16)
748 jschar
*chars
= u
.s
->chars();
749 for (unsigned i
= 0; i
< length
; ++i
) {
750 jschar co
= chars
[i
];
754 fprintf(out
, "\\u%02x", co
);
756 fprintf(out
, "\\u%04x", co
);
762 fprintf(out
, "%s", u
.cstr
);
765 fprintf(out
, "[invalid %%%c]", *p
);
774 JS_DEFINE_CALLINFO_3(extern, BOOL
, PrintOnTrace
, CHARPTR
, UINT32
, DOUBLEPTR
, 0, 0)
776 // This version is not intended to be called directly: usually it is easier to
777 // use one of the other overloads.
779 TraceRecorder::tprint(const char *format
, int count
, nanojit::LIns
*insa
[])
781 size_t size
= strlen(format
) + 1;
782 char *data
= (char*) lir
->insSkip(size
)->payload();
783 memcpy(data
, format
, size
);
785 double *args
= (double*) lir
->insSkip(count
* sizeof(double))->payload();
786 for (int i
= 0; i
< count
; ++i
) {
788 lir
->insStorei(insa
[i
], INS_CONSTPTR(args
), sizeof(double) * i
);
791 LIns
* args_ins
[] = { INS_CONSTPTR(args
), INS_CONST(count
), INS_CONSTPTR(data
) };
792 LIns
* call_ins
= lir
->insCall(&PrintOnTrace_ci
, args_ins
);
793 guard(false, lir
->ins_eq0(call_ins
), MISMATCH_EXIT
);
796 // Generate a 'printf'-type call from trace for debugging.
798 TraceRecorder::tprint(const char *format
)
800 LIns
* insa
[] = { NULL
};
801 tprint(format
, 0, insa
);
805 TraceRecorder::tprint(const char *format
, LIns
*ins
)
807 LIns
* insa
[] = { ins
};
808 tprint(format
, 1, insa
);
812 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
)
814 LIns
* insa
[] = { ins1
, ins2
};
815 tprint(format
, 2, insa
);
819 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
)
821 LIns
* insa
[] = { ins1
, ins2
, ins3
};
822 tprint(format
, 3, insa
);
826 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
, LIns
*ins4
)
828 LIns
* insa
[] = { ins1
, ins2
, ins3
, ins4
};
829 tprint(format
, 4, insa
);
833 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
, LIns
*ins4
,
836 LIns
* insa
[] = { ins1
, ins2
, ins3
, ins4
, ins5
};
837 tprint(format
, 5, insa
);
841 TraceRecorder::tprint(const char *format
, LIns
*ins1
, LIns
*ins2
, LIns
*ins3
, LIns
*ins4
,
842 LIns
*ins5
, LIns
*ins6
)
844 LIns
* insa
[] = { ins1
, ins2
, ins3
, ins4
, ins5
, ins6
};
845 tprint(format
, 6, insa
);
850 * The entire VM shares one oracle. Collisions and concurrent updates are
851 * tolerated and worst case cause performance regressions.
853 static Oracle oracle
;
866 Tracker::getTrackerPageBase(const void* v
) const
868 return jsuword(v
) & ~TRACKER_PAGE_MASK
;
872 Tracker::getTrackerPageOffset(const void* v
) const
874 return (jsuword(v
) & TRACKER_PAGE_MASK
) >> 2;
877 struct Tracker::TrackerPage
*
878 Tracker::findTrackerPage(const void* v
) const
880 jsuword base
= getTrackerPageBase(v
);
881 struct Tracker::TrackerPage
* p
= pagelist
;
890 struct Tracker::TrackerPage
*
891 Tracker::addTrackerPage(const void* v
)
893 jsuword base
= getTrackerPageBase(v
);
894 struct TrackerPage
* p
= (struct TrackerPage
*) calloc(1, sizeof(*p
));
905 TrackerPage
* p
= pagelist
;
906 pagelist
= pagelist
->next
;
912 Tracker::has(const void *v
) const
914 return get(v
) != NULL
;
918 Tracker::get(const void* v
) const
920 struct Tracker::TrackerPage
* p
= findTrackerPage(v
);
923 return p
->map
[getTrackerPageOffset(v
)];
927 Tracker::set(const void* v
, LIns
* i
)
929 struct Tracker::TrackerPage
* p
= findTrackerPage(v
);
931 p
= addTrackerPage(v
);
932 p
->map
[getTrackerPageOffset(v
)] = i
;
936 argSlots(JSStackFrame
* fp
)
938 return JS_MAX(fp
->argc
, fp
->fun
->nargs
);
944 return JSVAL_IS_INT(v
) || JSVAL_IS_DOUBLE(v
);
947 static inline jsdouble
950 JS_ASSERT(isNumber(v
));
951 if (JSVAL_IS_DOUBLE(v
))
952 return *JSVAL_TO_DOUBLE(v
);
953 return (jsdouble
)JSVAL_TO_INT(v
);
961 jsdouble d
= asNumber(v
);
963 return !!JSDOUBLE_IS_INT(d
, i
);
969 JS_ASSERT(isNumber(v
));
971 return JSVAL_TO_INT(v
);
974 JS_ASSERT(JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v
), i
));
976 return jsint(*JSVAL_TO_DOUBLE(v
));
979 /* Return TT_DOUBLE for all numbers (int and double) and the tag otherwise. */
980 static inline JSTraceType
981 GetPromotedType(jsval v
)
985 if (JSVAL_IS_OBJECT(v
)) {
986 if (JSVAL_IS_NULL(v
))
988 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v
)))
992 uint8_t tag
= JSVAL_TAG(v
);
993 JS_ASSERT(tag
== JSVAL_DOUBLE
|| tag
== JSVAL_STRING
|| tag
== JSVAL_SPECIAL
);
994 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_DOUBLE
) == JSVAL_DOUBLE
);
995 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_STRING
) == JSVAL_STRING
);
996 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_PSEUDOBOOLEAN
) == JSVAL_SPECIAL
);
997 return JSTraceType(tag
);
1000 /* Return TT_INT32 for all whole numbers that fit into signed 32-bit and the tag otherwise. */
1001 static inline JSTraceType
1002 getCoercedType(jsval v
)
1006 if (JSVAL_IS_OBJECT(v
)) {
1007 if (JSVAL_IS_NULL(v
))
1009 if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v
)))
1013 uint8_t tag
= JSVAL_TAG(v
);
1014 JS_ASSERT(tag
== JSVAL_DOUBLE
|| tag
== JSVAL_STRING
|| tag
== JSVAL_SPECIAL
);
1015 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_DOUBLE
) == JSVAL_DOUBLE
);
1016 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_STRING
) == JSVAL_STRING
);
1017 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_PSEUDOBOOLEAN
) == JSVAL_SPECIAL
);
1018 return JSTraceType(tag
);
1021 /* Constant seed and accumulate step borrowed from the DJB hash. */
1023 const uintptr_t ORACLE_MASK
= ORACLE_SIZE
- 1;
1024 JS_STATIC_ASSERT((ORACLE_MASK
& ORACLE_SIZE
) == 0);
1026 const uintptr_t FRAGMENT_TABLE_MASK
= FRAGMENT_TABLE_SIZE
- 1;
1027 JS_STATIC_ASSERT((FRAGMENT_TABLE_MASK
& FRAGMENT_TABLE_SIZE
) == 0);
1029 const uintptr_t HASH_SEED
= 5381;
1032 HashAccum(uintptr_t& h
, uintptr_t i
, uintptr_t mask
)
1034 h
= ((h
<< 5) + h
+ (mask
& i
)) & mask
;
1037 static JS_REQUIRES_STACK
inline int
1038 StackSlotHash(JSContext
* cx
, unsigned slot
, const void* pc
)
1040 uintptr_t h
= HASH_SEED
;
1041 HashAccum(h
, uintptr_t(cx
->fp
->script
), ORACLE_MASK
);
1042 HashAccum(h
, uintptr_t(pc
), ORACLE_MASK
);
1043 HashAccum(h
, uintptr_t(slot
), ORACLE_MASK
);
1047 static JS_REQUIRES_STACK
inline int
1048 GlobalSlotHash(JSContext
* cx
, unsigned slot
)
1050 uintptr_t h
= HASH_SEED
;
1051 JSStackFrame
* fp
= cx
->fp
;
1056 HashAccum(h
, uintptr_t(fp
->script
), ORACLE_MASK
);
1057 HashAccum(h
, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx
, fp
->scopeChain
))), ORACLE_MASK
);
1058 HashAccum(h
, uintptr_t(slot
), ORACLE_MASK
);
1063 PCHash(jsbytecode
* pc
)
1065 return int(uintptr_t(pc
) & ORACLE_MASK
);
1070 /* Grow the oracle bitsets to their (fixed) size here, once. */
1071 _stackDontDemote
.set(ORACLE_SIZE
-1);
1072 _globalDontDemote
.set(ORACLE_SIZE
-1);
1076 /* Tell the oracle that a certain global variable should not be demoted. */
1077 JS_REQUIRES_STACK
void
1078 Oracle::markGlobalSlotUndemotable(JSContext
* cx
, unsigned slot
)
1080 #ifdef DEBUG_dvander
1081 printf("MGSU: %d [%08x]: %d\n", slot
, GlobalSlotHash(cx
, slot
),
1082 _globalDontDemote
.get(GlobalSlotHash(cx
, slot
)));
1084 _globalDontDemote
.set(GlobalSlotHash(cx
, slot
));
1087 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
1088 JS_REQUIRES_STACK
bool
1089 Oracle::isGlobalSlotUndemotable(JSContext
* cx
, unsigned slot
) const
1091 #ifdef DEBUG_dvander
1092 printf("IGSU: %d [%08x]: %d\n", slot
, GlobalSlotHash(cx
, slot
),
1093 _globalDontDemote
.get(GlobalSlotHash(cx
, slot
)));
1095 return _globalDontDemote
.get(GlobalSlotHash(cx
, slot
));
1098 /* Tell the oracle that a certain slot at a certain stack slot should not be demoted. */
1099 JS_REQUIRES_STACK
void
1100 Oracle::markStackSlotUndemotable(JSContext
* cx
, unsigned slot
, const void* pc
)
1102 #ifdef DEBUG_dvander
1103 printf("MSSU: %p:%d [%08x]: %d\n", pc
, slot
, StackSlotHash(cx
, slot
, pc
),
1104 _stackDontDemote
.get(StackSlotHash(cx
, slot
, pc
)));
1106 _stackDontDemote
.set(StackSlotHash(cx
, slot
, pc
));
1109 JS_REQUIRES_STACK
void
1110 Oracle::markStackSlotUndemotable(JSContext
* cx
, unsigned slot
)
1112 markStackSlotUndemotable(cx
, slot
, cx
->fp
->regs
->pc
);
1115 /* Consult with the oracle whether we shouldn't demote a certain slot. */
1116 JS_REQUIRES_STACK
bool
1117 Oracle::isStackSlotUndemotable(JSContext
* cx
, unsigned slot
, const void* pc
) const
1119 #ifdef DEBUG_dvander
1120 printf("ISSU: %p:%d [%08x]: %d\n", pc
, slot
, StackSlotHash(cx
, slot
, pc
),
1121 _stackDontDemote
.get(StackSlotHash(cx
, slot
, pc
)));
1123 return _stackDontDemote
.get(StackSlotHash(cx
, slot
, pc
));
1126 JS_REQUIRES_STACK
bool
1127 Oracle::isStackSlotUndemotable(JSContext
* cx
, unsigned slot
) const
1129 return isStackSlotUndemotable(cx
, slot
, cx
->fp
->regs
->pc
);
1132 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
1134 Oracle::markInstructionUndemotable(jsbytecode
* pc
)
1136 _pcDontDemote
.set(PCHash(pc
));
1139 /* Consult with the oracle whether we shouldn't demote a certain bytecode location. */
1141 Oracle::isInstructionUndemotable(jsbytecode
* pc
) const
1143 return _pcDontDemote
.get(PCHash(pc
));
1147 Oracle::clearDemotability()
1149 _stackDontDemote
.reset();
1150 _globalDontDemote
.reset();
1151 _pcDontDemote
.reset();
1154 JS_REQUIRES_STACK
static JS_INLINE
void
1155 MarkSlotUndemotable(JSContext
* cx
, TreeInfo
* ti
, unsigned slot
)
1157 if (slot
< ti
->nStackTypes
) {
1158 oracle
.markStackSlotUndemotable(cx
, slot
);
1162 uint16
* gslots
= ti
->globalSlots
->data();
1163 oracle
.markGlobalSlotUndemotable(cx
, gslots
[slot
- ti
->nStackTypes
]);
1166 JS_REQUIRES_STACK
static JS_INLINE
void
1167 MarkSlotUndemotable(JSContext
* cx
, TreeInfo
* ti
, unsigned slot
, const void* pc
)
1169 if (slot
< ti
->nStackTypes
) {
1170 oracle
.markStackSlotUndemotable(cx
, slot
, pc
);
1174 uint16
* gslots
= ti
->globalSlots
->data();
1175 oracle
.markGlobalSlotUndemotable(cx
, gslots
[slot
- ti
->nStackTypes
]);
1178 static JS_REQUIRES_STACK
inline bool
1179 IsSlotUndemotable(JSContext
* cx
, TreeInfo
* ti
, unsigned slot
, const void* ip
)
1181 if (slot
< ti
->nStackTypes
)
1182 return oracle
.isStackSlotUndemotable(cx
, slot
, ip
);
1184 uint16
* gslots
= ti
->globalSlots
->data();
1185 return oracle
.isGlobalSlotUndemotable(cx
, gslots
[slot
- ti
->nStackTypes
]);
1188 static JS_REQUIRES_STACK
inline bool
1189 IsSlotUndemotable(JSContext
* cx
, TreeInfo
* ti
, unsigned slot
)
1191 return IsSlotUndemotable(cx
, ti
, slot
, cx
->fp
->regs
->pc
);
1194 class FrameInfoCache
1196 struct Entry
: public JSDHashEntryHdr
1202 MatchFrameInfo(JSDHashTable
*table
, const JSDHashEntryHdr
*entry
, const void *key
) {
1203 const FrameInfo
* fi1
= ((const Entry
*)entry
)->fi
;
1204 const FrameInfo
* fi2
= (const FrameInfo
*)key
;
1205 if (memcmp(fi1
, fi2
, sizeof(FrameInfo
)) != 0)
1207 return memcmp(fi1
->get_typemap(), fi2
->get_typemap(),
1208 fi1
->callerHeight
* sizeof(JSTraceType
)) == 0;
1211 static JSDHashNumber
1212 HashFrameInfo(JSDHashTable
*table
, const void *key
) {
1213 FrameInfo
* fi
= (FrameInfo
*)key
;
1214 size_t len
= sizeof(FrameInfo
) + fi
->callerHeight
* sizeof(JSTraceType
);
1216 JSDHashNumber h
= 0;
1217 const unsigned char *s
= (const unsigned char*)fi
;
1218 for (size_t i
= 0; i
< len
; i
++, s
++)
1219 h
= JS_ROTATE_LEFT32(h
, 4) ^ *s
;
1223 static const JSDHashTableOps FrameCacheOps
;
1225 JSDHashTable
*table
;
1226 VMAllocator
*allocator
;
1229 FrameInfoCache(VMAllocator
*allocator
) : allocator(allocator
) {
1239 JS_DHashTableDestroy(table
);
1250 table
= JS_NewDHashTable(&FrameCacheOps
, NULL
, sizeof(Entry
),
1251 JS_DHASH_DEFAULT_CAPACITY(32));
1252 return table
!= NULL
;
1255 FrameInfo
*memoize(const FrameInfo
*fi
) {
1256 Entry
*entry
= (Entry
*)JS_DHashTableOperate(table
, fi
, JS_DHASH_ADD
);
1260 FrameInfo
* n
= (FrameInfo
*)
1261 allocator
->alloc(sizeof(FrameInfo
) + fi
->callerHeight
* sizeof(JSTraceType
));
1262 memcpy(n
, fi
, sizeof(FrameInfo
) + fi
->callerHeight
* sizeof(JSTraceType
));
1269 const JSDHashTableOps
FrameInfoCache::FrameCacheOps
=
1273 FrameInfoCache::HashFrameInfo
,
1274 FrameInfoCache::MatchFrameInfo
,
1275 JS_DHashMoveEntryStub
,
1276 JS_DHashClearEntryStub
,
1277 JS_DHashFinalizeStub
,
1282 struct PCHashEntry
: public JSDHashEntryStub
{
1286 #define PC_HASH_COUNT 1024
1289 Blacklist(jsbytecode
* pc
)
1292 JS_ASSERT(*pc
== JSOP_TRACE
|| *pc
== JSOP_NOP
|| *pc
== JSOP_CALL
);
1293 if (*pc
== JSOP_CALL
) {
1294 JS_ASSERT(*(pc
+ JSOP_CALL_LENGTH
) == JSOP_TRACE
||
1295 *(pc
+ JSOP_CALL_LENGTH
) == JSOP_NOP
);
1296 *(pc
+ JSOP_CALL_LENGTH
) = JSOP_NOP
;
1297 } else if (*pc
== JSOP_TRACE
) {
1303 IsBlacklisted(jsbytecode
* pc
)
1305 if (*pc
== JSOP_NOP
)
1307 if (*pc
== JSOP_CALL
)
1308 return *(pc
+ JSOP_CALL_LENGTH
) == JSOP_NOP
;
1313 Backoff(JSContext
*cx
, jsbytecode
* pc
, Fragment
* tree
= NULL
)
1315 /* N.B. This code path cannot assume the recorder is/is not alive. */
1316 JSDHashTable
*table
= &JS_TRACE_MONITOR(cx
).recordAttempts
;
1319 PCHashEntry
*entry
= (PCHashEntry
*)
1320 JS_DHashTableOperate(table
, pc
, JS_DHASH_ADD
);
1325 JS_ASSERT(entry
->count
== 0);
1327 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry
->hdr
)));
1328 if (entry
->count
++ > (BL_ATTEMPTS
* MAXPEERS
)) {
1337 tree
->hits() -= BL_BACKOFF
;
1340 * In case there is no entry or no table (due to OOM) or some
1341 * serious imbalance in the recording-attempt distribution on a
1342 * multitree, give each tree another chance to blacklist here as
1345 if (++tree
->recordAttempts
> BL_ATTEMPTS
)
1351 ResetRecordingAttempts(JSContext
*cx
, jsbytecode
* pc
)
1353 JSDHashTable
*table
= &JS_TRACE_MONITOR(cx
).recordAttempts
;
1355 PCHashEntry
*entry
= (PCHashEntry
*)
1356 JS_DHashTableOperate(table
, pc
, JS_DHASH_LOOKUP
);
1358 if (JS_DHASH_ENTRY_IS_FREE(&(entry
->hdr
)))
1360 JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry
->hdr
)));
1365 static inline size_t
1366 FragmentHash(const void *ip
, JSObject
* globalObj
, uint32 globalShape
, uint32 argc
)
1368 uintptr_t h
= HASH_SEED
;
1369 HashAccum(h
, uintptr_t(ip
), FRAGMENT_TABLE_MASK
);
1370 HashAccum(h
, uintptr_t(globalObj
), FRAGMENT_TABLE_MASK
);
1371 HashAccum(h
, uintptr_t(globalShape
), FRAGMENT_TABLE_MASK
);
1372 HashAccum(h
, uintptr_t(argc
), FRAGMENT_TABLE_MASK
);
1377 * argc is cx->fp->argc at the trace loop header, i.e., the number of arguments
1378 * pushed for the innermost JS frame. This is required as part of the fragment
1379 * key because the fragment will write those arguments back to the interpreter
1380 * stack when it exits, using its typemap, which implicitly incorporates a
1381 * given value of argc. Without this feature, a fragment could be called as an
1382 * inner tree with two different values of argc, and entry type checking or
1383 * exit frame synthesis could crash.
1385 struct VMFragment
: public Fragment
1387 VMFragment(const void* _ip
, JSObject
* _globalObj
, uint32 _globalShape
, uint32 _argc
1388 verbose_only(, uint32_t profFragID
)) :
1389 Fragment(_ip
verbose_only(, profFragID
)),
1393 globalObj(_globalObj
),
1394 globalShape(_globalShape
),
1398 inline TreeInfo
* getTreeInfo() {
1399 return (TreeInfo
*)vmprivate
;
1405 JSObject
* globalObj
;
1411 getVMFragment(JSTraceMonitor
* tm
, const void *ip
, JSObject
* globalObj
, uint32 globalShape
,
1414 size_t h
= FragmentHash(ip
, globalObj
, globalShape
, argc
);
1415 VMFragment
* vf
= tm
->vmfragments
[h
];
1417 ! (vf
->globalObj
== globalObj
&&
1418 vf
->globalShape
== globalShape
&&
1420 vf
->argc
== argc
)) {
1427 getLoop(JSTraceMonitor
* tm
, const void *ip
, JSObject
* globalObj
, uint32 globalShape
, uint32 argc
)
1429 return getVMFragment(tm
, ip
, globalObj
, globalShape
, argc
);
1433 getAnchor(JSTraceMonitor
* tm
, const void *ip
, JSObject
* globalObj
, uint32 globalShape
, uint32 argc
)
1436 uint32_t profFragID
= (js_LogController
.lcbits
& LC_FragProfile
)
1437 ? (++(tm
->lastFragID
)) : 0;
1439 VMFragment
*f
= new (*tm
->dataAlloc
) VMFragment(ip
, globalObj
, globalShape
, argc
1440 verbose_only(, profFragID
));
1443 VMFragment
*p
= getVMFragment(tm
, ip
, globalObj
, globalShape
, argc
);
1447 /* append at the end of the peer list */
1449 while ((next
= p
->peer
) != NULL
)
1453 /* this is the first fragment */
1455 size_t h
= FragmentHash(ip
, globalObj
, globalShape
, argc
);
1456 f
->next
= tm
->vmfragments
[h
];
1457 tm
->vmfragments
[h
] = f
;
1465 AssertTreeIsUnique(JSTraceMonitor
* tm
, VMFragment
* f
, TreeInfo
* ti
)
1467 JS_ASSERT(f
->root
== f
);
1470 * Check for duplicate entry type maps. This is always wrong and hints at
1471 * trace explosion since we are trying to stabilize something without
1472 * properly connecting peer edges.
1475 for (VMFragment
* peer
= getLoop(tm
, f
->ip
, f
->globalObj
, f
->globalShape
, f
->argc
);
1477 peer
= peer
->peer
) {
1478 if (!peer
->code() || peer
== f
)
1480 ti_other
= (TreeInfo
*)peer
->vmprivate
;
1481 JS_ASSERT(ti_other
);
1482 JS_ASSERT(!ti
->typeMap
.matches(ti_other
->typeMap
));
1488 AttemptCompilation(JSContext
*cx
, JSTraceMonitor
* tm
, JSObject
* globalObj
, jsbytecode
* pc
,
1491 /* If we already permanently blacklisted the location, undo that. */
1492 JS_ASSERT(*pc
== JSOP_NOP
|| *pc
== JSOP_TRACE
|| *pc
== JSOP_CALL
);
1493 if (*pc
== JSOP_NOP
)
1495 ResetRecordingAttempts(cx
, pc
);
1497 /* Breathe new life into all peer fragments at the designated loop header. */
1498 VMFragment
* f
= (VMFragment
*)getLoop(tm
, pc
, globalObj
, OBJ_SHAPE(globalObj
), argc
);
1501 * If the global object's shape changed, we can't easily find the
1502 * corresponding loop header via a hash table lookup. In this
1503 * we simply bail here and hope that the fragment has another
1504 * outstanding compilation attempt. This case is extremely rare.
1508 JS_ASSERT(f
->root
== f
);
1511 JS_ASSERT(f
->root
== f
);
1512 --f
->recordAttempts
;
1513 f
->hits() = HOTLOOP
;
1518 // Forward declarations.
1519 JS_DEFINE_CALLINFO_1(static, DOUBLE
, i2f
, INT32
, 1, 1)
1520 JS_DEFINE_CALLINFO_1(static, DOUBLE
, u2f
, UINT32
, 1, 1)
1525 if (i
->isop(LIR_i2f
))
1528 if (nanojit::AvmCore::config
.soft_float
&&
1529 i
->isop(LIR_qjoin
) &&
1530 i
->oprnd1()->isop(LIR_pcall
) &&
1531 i
->oprnd2()->isop(LIR_callh
)) {
1532 if (i
->oprnd1()->callInfo() == &i2f_ci
)
1542 if (i
->isop(LIR_u2f
))
1545 if (nanojit::AvmCore::config
.soft_float
&&
1546 i
->isop(LIR_qjoin
) &&
1547 i
->oprnd1()->isop(LIR_pcall
) &&
1548 i
->oprnd2()->isop(LIR_callh
)) {
1549 if (i
->oprnd1()->callInfo() == &u2f_ci
)
1559 if (nanojit::AvmCore::config
.soft_float
&&
1560 i
->isop(LIR_qjoin
)) {
1561 return i
->oprnd1()->arg(0);
1568 demote(LirWriter
*out
, LIns
* i
)
1571 return i
->callArgN(0);
1572 if (isi2f(i
) || isu2f(i
))
1576 JS_ASSERT(i
->isconstf());
1577 double cf
= i
->imm64f();
1578 int32_t ci
= cf
> 0x7fffffff ? uint32_t(cf
) : int32_t(cf
);
1579 return out
->insImm(ci
);
1583 isPromoteInt(LIns
* i
)
1585 if (isi2f(i
) || i
->isconst())
1589 jsdouble d
= i
->imm64f();
1590 return d
== jsdouble(jsint(d
)) && !JSDOUBLE_IS_NEGZERO(d
);
1594 isPromoteUint(LIns
* i
)
1596 if (isu2f(i
) || i
->isconst())
1600 jsdouble d
= i
->imm64f();
1601 return d
== jsdouble(jsuint(d
)) && !JSDOUBLE_IS_NEGZERO(d
);
1607 return isPromoteInt(i
) || isPromoteUint(i
);
1611 IsConst(LIns
* i
, int32_t c
)
1613 return i
->isconst() && i
->imm32() == c
;
1617 * Determine whether this operand is guaranteed to not overflow the specified
1618 * integer operation.
1621 IsOverflowSafe(LOpcode op
, LIns
* i
)
1627 return (i
->isop(LIR_and
) && ((c
= i
->oprnd2())->isconst()) &&
1628 ((c
->imm32() & 0xc0000000) == 0)) ||
1629 (i
->isop(LIR_rsh
) && ((c
= i
->oprnd2())->isconst()) &&
1630 ((c
->imm32() > 0)));
1632 JS_ASSERT(op
== LIR_mul
);
1634 return (i
->isop(LIR_and
) && ((c
= i
->oprnd2())->isconst()) &&
1635 ((c
->imm32() & 0xffff0000) == 0)) ||
1636 (i
->isop(LIR_ush
) && ((c
= i
->oprnd2())->isconst()) &&
1637 ((c
->imm32() >= 16)));
1640 /* soft float support */
1642 static jsdouble FASTCALL
1647 JS_DEFINE_CALLINFO_1(static, DOUBLE
, fneg
, DOUBLE
, 1, 1)
1649 static jsdouble FASTCALL
1655 static jsdouble FASTCALL
1661 static int32 FASTCALL
1662 fcmpeq(jsdouble x
, jsdouble y
)
1666 JS_DEFINE_CALLINFO_2(static, INT32
, fcmpeq
, DOUBLE
, DOUBLE
, 1, 1)
1668 static int32 FASTCALL
1669 fcmplt(jsdouble x
, jsdouble y
)
1673 JS_DEFINE_CALLINFO_2(static, INT32
, fcmplt
, DOUBLE
, DOUBLE
, 1, 1)
1675 static int32 FASTCALL
1676 fcmple(jsdouble x
, jsdouble y
)
1680 JS_DEFINE_CALLINFO_2(static, INT32
, fcmple
, DOUBLE
, DOUBLE
, 1, 1)
1682 static int32 FASTCALL
1683 fcmpgt(jsdouble x
, jsdouble y
)
1687 JS_DEFINE_CALLINFO_2(static, INT32
, fcmpgt
, DOUBLE
, DOUBLE
, 1, 1)
1689 static int32 FASTCALL
1690 fcmpge(jsdouble x
, jsdouble y
)
1694 JS_DEFINE_CALLINFO_2(static, INT32
, fcmpge
, DOUBLE
, DOUBLE
, 1, 1)
1696 static jsdouble FASTCALL
1697 fmul(jsdouble x
, jsdouble y
)
1701 JS_DEFINE_CALLINFO_2(static, DOUBLE
, fmul
, DOUBLE
, DOUBLE
, 1, 1)
1703 static jsdouble FASTCALL
1704 fadd(jsdouble x
, jsdouble y
)
1708 JS_DEFINE_CALLINFO_2(static, DOUBLE
, fadd
, DOUBLE
, DOUBLE
, 1, 1)
1710 static jsdouble FASTCALL
1711 fdiv(jsdouble x
, jsdouble y
)
1715 JS_DEFINE_CALLINFO_2(static, DOUBLE
, fdiv
, DOUBLE
, DOUBLE
, 1, 1)
1717 static jsdouble FASTCALL
1718 fsub(jsdouble x
, jsdouble y
)
1722 JS_DEFINE_CALLINFO_2(static, DOUBLE
, fsub
, DOUBLE
, DOUBLE
, 1, 1)
1724 // replace fpu ops with function calls
1725 class SoftFloatFilter
: public LirWriter
1728 SoftFloatFilter(LirWriter
*out
) : LirWriter(out
)
1732 return ins1(LIR_qhi
, q
);
1735 return ins1(LIR_qlo
, q
);
1738 LIns
*split(LIns
*a
) {
1739 if (a
->isQuad() && !a
->isop(LIR_qjoin
)) {
1740 // all quad-sized args must be qjoin's for soft-float
1741 a
= ins2(LIR_qjoin
, lo(a
), hi(a
));
1746 LIns
*split(const CallInfo
*call
, LInsp args
[]) {
1747 LIns
*lo
= out
->insCall(call
, args
);
1748 LIns
*hi
= out
->ins1(LIR_callh
, lo
);
1749 return out
->ins2(LIR_qjoin
, lo
, hi
);
1752 LIns
*fcall1(const CallInfo
*call
, LIns
*a
) {
1753 LIns
*args
[] = { split(a
) };
1754 return split(call
, args
);
1757 LIns
*fcall2(const CallInfo
*call
, LIns
*a
, LIns
*b
) {
1758 LIns
*args
[] = { split(b
), split(a
) };
1759 return split(call
, args
);
1762 LIns
*fcmp(const CallInfo
*call
, LIns
*a
, LIns
*b
) {
1763 LIns
*args
[] = { split(b
), split(a
) };
1764 return out
->ins2(LIR_eq
, out
->insCall(call
, args
), out
->insImm(1));
1767 LIns
*ins1(LOpcode op
, LIns
*a
) {
1770 return fcall1(&i2f_ci
, a
);
1772 return fcall1(&u2f_ci
, a
);
1774 return fcall1(&fneg_ci
, a
);
1776 return out
->ins1(op
, split(a
));
1778 return out
->ins1(op
, a
);
1782 LIns
*ins2(LOpcode op
, LIns
*a
, LIns
*b
) {
1785 return fcall2(&fadd_ci
, a
, b
);
1787 return fcall2(&fsub_ci
, a
, b
);
1789 return fcall2(&fmul_ci
, a
, b
);
1791 return fcall2(&fdiv_ci
, a
, b
);
1793 return fcmp(&fcmpeq_ci
, a
, b
);
1795 return fcmp(&fcmplt_ci
, a
, b
);
1797 return fcmp(&fcmpgt_ci
, a
, b
);
1799 return fcmp(&fcmple_ci
, a
, b
);
1801 return fcmp(&fcmpge_ci
, a
, b
);
1805 return out
->ins2(op
, a
, b
);
1808 LIns
*insCall(const CallInfo
*ci
, LInsp args
[]) {
1809 uint32_t argt
= ci
->_argtypes
;
1811 for (uint32_t i
= 0, argsizes
= argt
>> ARGSIZE_SHIFT
; argsizes
!= 0; i
++, argsizes
>>= ARGSIZE_SHIFT
)
1812 args
[i
] = split(args
[i
]);
1814 if ((argt
& ARGSIZE_MASK_ANY
) == ARGSIZE_F
) {
1815 // this function returns a double as two 32bit values, so replace
1816 // call with qjoin(qhi(call), call)
1817 return split(ci
, args
);
1819 return out
->insCall(ci
, args
);
1824 class FuncFilter
: public LirWriter
1827 FuncFilter(LirWriter
* out
):
1832 LIns
* ins2(LOpcode v
, LIns
* s0
, LIns
* s1
)
1834 if (s0
== s1
&& v
== LIR_feq
) {
1835 if (isPromote(s0
)) {
1836 // double(int) and double(uint) cannot be nan
1839 if (s0
->isop(LIR_fmul
) || s0
->isop(LIR_fsub
) || s0
->isop(LIR_fadd
)) {
1840 LIns
* lhs
= s0
->oprnd1();
1841 LIns
* rhs
= s0
->oprnd2();
1842 if (isPromote(lhs
) && isPromote(rhs
)) {
1843 // add/sub/mul promoted ints can't be nan
1847 } else if (LIR_feq
<= v
&& v
<= LIR_fge
) {
1848 if (isPromoteInt(s0
) && isPromoteInt(s1
)) {
1849 // demote fcmp to cmp
1850 v
= LOpcode(v
+ (LIR_eq
- LIR_feq
));
1851 return out
->ins2(v
, demote(out
, s0
), demote(out
, s1
));
1852 } else if (isPromoteUint(s0
) && isPromoteUint(s1
)) {
1854 v
= LOpcode(v
+ (LIR_eq
- LIR_feq
));
1856 v
= LOpcode(v
+ (LIR_ult
- LIR_lt
)); // cmp -> ucmp
1857 return out
->ins2(v
, demote(out
, s0
), demote(out
, s1
));
1859 } else if (v
== LIR_or
&&
1860 s0
->isop(LIR_lsh
) && IsConst(s0
->oprnd2(), 16) &&
1861 s1
->isop(LIR_and
) && IsConst(s1
->oprnd2(), 0xffff)) {
1862 LIns
* msw
= s0
->oprnd1();
1863 LIns
* lsw
= s1
->oprnd1();
1866 if (lsw
->isop(LIR_add
) &&
1867 lsw
->oprnd1()->isop(LIR_and
) &&
1868 lsw
->oprnd2()->isop(LIR_and
) &&
1869 IsConst(lsw
->oprnd1()->oprnd2(), 0xffff) &&
1870 IsConst(lsw
->oprnd2()->oprnd2(), 0xffff) &&
1871 msw
->isop(LIR_add
) &&
1872 msw
->oprnd1()->isop(LIR_add
) &&
1873 msw
->oprnd2()->isop(LIR_rsh
) &&
1874 msw
->oprnd1()->oprnd1()->isop(LIR_rsh
) &&
1875 msw
->oprnd1()->oprnd2()->isop(LIR_rsh
) &&
1876 IsConst(msw
->oprnd2()->oprnd2(), 16) &&
1877 IsConst(msw
->oprnd1()->oprnd1()->oprnd2(), 16) &&
1878 IsConst(msw
->oprnd1()->oprnd2()->oprnd2(), 16) &&
1879 (x
= lsw
->oprnd1()->oprnd1()) == msw
->oprnd1()->oprnd1()->oprnd1() &&
1880 (y
= lsw
->oprnd2()->oprnd1()) == msw
->oprnd1()->oprnd2()->oprnd1() &&
1881 lsw
== msw
->oprnd2()->oprnd1()) {
1882 return out
->ins2(LIR_add
, x
, y
);
1886 return out
->ins2(v
, s0
, s1
);
1889 LIns
* insCall(const CallInfo
*ci
, LIns
* args
[])
1891 if (ci
== &js_DoubleToUint32_ci
) {
1894 return out
->insImm(js_DoubleToECMAUint32(s0
->imm64f()));
1895 if (isi2f(s0
) || isu2f(s0
))
1897 } else if (ci
== &js_DoubleToInt32_ci
) {
1900 return out
->insImm(js_DoubleToECMAInt32(s0
->imm64f()));
1901 if (s0
->isop(LIR_fadd
) || s0
->isop(LIR_fsub
)) {
1902 LIns
* lhs
= s0
->oprnd1();
1903 LIns
* rhs
= s0
->oprnd2();
1904 if (isPromote(lhs
) && isPromote(rhs
)) {
1905 LOpcode op
= LOpcode(s0
->opcode() & ~LIR64
);
1906 return out
->ins2(op
, demote(out
, lhs
), demote(out
, rhs
));
1909 if (isi2f(s0
) || isu2f(s0
))
1912 // XXX ARM -- check for qjoin(call(UnboxDouble),call(UnboxDouble))
1914 const CallInfo
* ci2
= s0
->callInfo();
1915 if (ci2
== &js_UnboxDouble_ci
) {
1916 LIns
* args2
[] = { s0
->callArgN(0) };
1917 return out
->insCall(&js_UnboxInt32_ci
, args2
);
1918 } else if (ci2
== &js_StringToNumber_ci
) {
1919 // callArgN's ordering is that as seen by the builtin, not as stored in
1920 // args here. True story!
1921 LIns
* args2
[] = { s0
->callArgN(1), s0
->callArgN(0) };
1922 return out
->insCall(&js_StringToInt32_ci
, args2
);
1923 } else if (ci2
== &js_String_p_charCodeAt0_ci
) {
1924 // Use a fast path builtin for a charCodeAt that converts to an int right away.
1925 LIns
* args2
[] = { s0
->callArgN(0) };
1926 return out
->insCall(&js_String_p_charCodeAt0_int_ci
, args2
);
1927 } else if (ci2
== &js_String_p_charCodeAt_ci
) {
1928 LIns
* idx
= s0
->callArgN(1);
1929 // If the index is not already an integer, force it to be an integer.
1930 idx
= isPromote(idx
)
1932 : out
->insCall(&js_DoubleToInt32_ci
, &idx
);
1933 LIns
* args2
[] = { idx
, s0
->callArgN(0) };
1934 return out
->insCall(&js_String_p_charCodeAt_int_ci
, args2
);
1937 } else if (ci
== &js_BoxDouble_ci
) {
1939 JS_ASSERT(s0
->isQuad());
1940 if (isPromoteInt(s0
)) {
1941 LIns
* args2
[] = { demote(out
, s0
), args
[1] };
1942 return out
->insCall(&js_BoxInt32_ci
, args2
);
1944 if (s0
->isCall() && s0
->callInfo() == &js_UnboxDouble_ci
)
1945 return s0
->callArgN(0);
1947 return out
->insCall(ci
, args
);
1952 * Visit the values in the given JSStackFrame that the tracer cares about. This
1953 * visitor function is (implicitly) the primary definition of the native stack
1954 * area layout. There are a few other independent pieces of code that must be
1955 * maintained to assume the same layout. They are marked like this:
1957 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
1959 template <typename Visitor
>
1960 static JS_REQUIRES_STACK
bool
1961 VisitFrameSlots(Visitor
&visitor
, unsigned depth
, JSStackFrame
*fp
,
1964 if (depth
> 0 && !VisitFrameSlots(visitor
, depth
-1, fp
->down
, fp
))
1969 visitor
.setStackSlotKind("args");
1970 if (!visitor
.visitStackSlots(&fp
->argv
[-2], argSlots(fp
) + 2, fp
))
1973 visitor
.setStackSlotKind("arguments");
1974 if (!visitor
.visitStackSlots(&fp
->argsobj
, 1, fp
))
1976 visitor
.setStackSlotKind("var");
1977 if (!visitor
.visitStackSlots(fp
->slots
, fp
->script
->nfixed
, fp
))
1980 visitor
.setStackSlotKind("stack");
1981 JS_ASSERT(fp
->regs
->sp
>= StackBase(fp
));
1982 if (!visitor
.visitStackSlots(StackBase(fp
),
1983 size_t(fp
->regs
->sp
- StackBase(fp
)),
1988 int missing
= up
->fun
->nargs
- up
->argc
;
1990 visitor
.setStackSlotKind("missing");
1991 if (!visitor
.visitStackSlots(fp
->regs
->sp
, size_t(missing
), fp
))
1998 template <typename Visitor
>
1999 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
2000 VisitStackSlots(Visitor
&visitor
, JSContext
*cx
, unsigned callDepth
)
2002 return VisitFrameSlots(visitor
, callDepth
, cx
->fp
, NULL
);
2005 template <typename Visitor
>
2006 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2007 VisitGlobalSlots(Visitor
&visitor
, JSContext
*cx
, JSObject
*globalObj
,
2008 unsigned ngslots
, uint16
*gslots
)
2010 for (unsigned n
= 0; n
< ngslots
; ++n
) {
2011 unsigned slot
= gslots
[n
];
2012 visitor
.visitGlobalSlot(&STOBJ_GET_SLOT(globalObj
, slot
), n
, slot
);
2016 class AdjustCallerTypeVisitor
;
2018 template <typename Visitor
>
2019 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2020 VisitGlobalSlots(Visitor
&visitor
, JSContext
*cx
, SlotList
&gslots
)
2022 VisitGlobalSlots(visitor
, cx
, JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
),
2023 gslots
.length(), gslots
.data());
2027 template <typename Visitor
>
2028 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2029 VisitSlots(Visitor
& visitor
, JSContext
* cx
, JSObject
* globalObj
,
2030 unsigned callDepth
, unsigned ngslots
, uint16
* gslots
)
2032 if (VisitStackSlots(visitor
, cx
, callDepth
))
2033 VisitGlobalSlots(visitor
, cx
, globalObj
, ngslots
, gslots
);
2036 template <typename Visitor
>
2037 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2038 VisitSlots(Visitor
& visitor
, JSContext
* cx
, unsigned callDepth
,
2039 unsigned ngslots
, uint16
* gslots
)
2041 VisitSlots(visitor
, cx
, JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
),
2042 callDepth
, ngslots
, gslots
);
2045 template <typename Visitor
>
2046 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2047 VisitSlots(Visitor
&visitor
, JSContext
*cx
, JSObject
*globalObj
,
2048 unsigned callDepth
, const SlotList
& slots
)
2050 VisitSlots(visitor
, cx
, globalObj
, callDepth
, slots
.length(),
2054 template <typename Visitor
>
2055 static JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2056 VisitSlots(Visitor
&visitor
, JSContext
*cx
, unsigned callDepth
,
2057 const SlotList
& slots
)
2059 VisitSlots(visitor
, cx
, JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
),
2060 callDepth
, slots
.length(), slots
.data());
2064 class SlotVisitorBase
{
2065 #if defined JS_JIT_SPEW
2067 char const *mStackSlotKind
;
2069 SlotVisitorBase() : mStackSlotKind(NULL
) {}
2070 JS_ALWAYS_INLINE
const char *stackSlotKind() { return mStackSlotKind
; }
2071 JS_ALWAYS_INLINE
void setStackSlotKind(char const *k
) {
2076 JS_ALWAYS_INLINE
const char *stackSlotKind() { return NULL
; }
2077 JS_ALWAYS_INLINE
void setStackSlotKind(char const *k
) {}
2081 struct CountSlotsVisitor
: public SlotVisitorBase
2087 JS_ALWAYS_INLINE
CountSlotsVisitor(jsval
* stop
= NULL
) :
2093 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
2094 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
2097 if (mStop
&& size_t(mStop
- vp
) < count
) {
2098 mCount
+= size_t(mStop
- vp
);
2106 JS_ALWAYS_INLINE
unsigned count() {
2110 JS_ALWAYS_INLINE
bool stopped() {
2116 * Calculate the total number of native frame slots we need from this frame all
2117 * the way back to the entry frame, including the current stack usage.
2119 JS_REQUIRES_STACK
unsigned
2120 NativeStackSlots(JSContext
*cx
, unsigned callDepth
)
2122 JSStackFrame
* fp
= cx
->fp
;
2124 unsigned depth
= callDepth
;
2127 * Duplicate native stack layout computation: see VisitFrameSlots
2130 unsigned operands
= fp
->regs
->sp
- StackBase(fp
);
2133 slots
+= fp
->script
->nfixed
+ 1 /*argsobj*/;
2136 slots
+= 2/*callee,this*/ + argSlots(fp
);
2138 CountSlotsVisitor visitor
;
2139 VisitStackSlots(visitor
, cx
, callDepth
);
2140 JS_ASSERT(visitor
.count() == slots
&& !visitor
.stopped());
2144 JSStackFrame
* fp2
= fp
;
2146 int missing
= fp2
->fun
->nargs
- fp2
->argc
;
2150 JS_NOT_REACHED("NativeStackSlots");
2153 class CaptureTypesVisitor
: public SlotVisitorBase
2156 JSTraceType
* mTypeMap
;
2160 JS_ALWAYS_INLINE
CaptureTypesVisitor(JSContext
* cx
, JSTraceType
* typeMap
) :
2166 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2167 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
2168 JSTraceType type
= getCoercedType(*vp
);
2169 if (type
== TT_INT32
&&
2170 oracle
.isGlobalSlotUndemotable(mCx
, slot
))
2172 JS_ASSERT(type
!= TT_JSVAL
);
2173 debug_only_printf(LC_TMTracer
,
2174 "capture type global%d: %d=%c\n",
2175 n
, type
, typeChar
[type
]);
2179 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
2180 visitStackSlots(jsval
*vp
, int count
, JSStackFrame
* fp
) {
2181 for (int i
= 0; i
< count
; ++i
) {
2182 JSTraceType type
= getCoercedType(vp
[i
]);
2183 if (type
== TT_INT32
&&
2184 oracle
.isStackSlotUndemotable(mCx
, length()))
2186 JS_ASSERT(type
!= TT_JSVAL
);
2187 debug_only_printf(LC_TMTracer
,
2188 "capture type %s%d: %d=%c\n",
2189 stackSlotKind(), i
, type
, typeChar
[type
]);
2195 JS_ALWAYS_INLINE
uintptr_t length() {
2196 return mPtr
- mTypeMap
;
2201 * Capture the type map for the selected slots of the global object and currently pending
2204 JS_REQUIRES_STACK
void
2205 TypeMap::captureTypes(JSContext
* cx
, JSObject
* globalObj
, SlotList
& slots
, unsigned callDepth
)
2207 setLength(NativeStackSlots(cx
, callDepth
) + slots
.length());
2208 CaptureTypesVisitor
visitor(cx
, data());
2209 VisitSlots(visitor
, cx
, globalObj
, callDepth
, slots
);
2210 JS_ASSERT(visitor
.length() == length());
2213 JS_REQUIRES_STACK
void
2214 TypeMap::captureMissingGlobalTypes(JSContext
* cx
, JSObject
* globalObj
, SlotList
& slots
, unsigned stackSlots
)
2216 unsigned oldSlots
= length() - stackSlots
;
2217 int diff
= slots
.length() - oldSlots
;
2218 JS_ASSERT(diff
>= 0);
2219 setLength(length() + diff
);
2220 CaptureTypesVisitor
visitor(cx
, data() + stackSlots
+ oldSlots
);
2221 VisitGlobalSlots(visitor
, cx
, globalObj
, diff
, slots
.data() + oldSlots
);
2224 /* Compare this type map to another one and see whether they match. */
2226 TypeMap::matches(TypeMap
& other
) const
2228 if (length() != other
.length())
2230 return !memcmp(data(), other
.data(), length());
2234 TypeMap::fromRaw(JSTraceType
* other
, unsigned numSlots
)
2236 unsigned oldLength
= length();
2237 setLength(length() + numSlots
);
2238 for (unsigned i
= 0; i
< numSlots
; i
++)
2239 get(oldLength
+ i
) = other
[i
];
2243 * Use the provided storage area to create a new type map that contains the
2244 * partial type map with the rest of it filled up from the complete type
2248 MergeTypeMaps(JSTraceType
** partial
, unsigned* plength
, JSTraceType
* complete
, unsigned clength
, JSTraceType
* mem
)
2250 unsigned l
= *plength
;
2251 JS_ASSERT(l
< clength
);
2252 memcpy(mem
, *partial
, l
* sizeof(JSTraceType
));
2253 memcpy(mem
+ l
, complete
+ l
, (clength
- l
) * sizeof(JSTraceType
));
2258 /* Specializes a tree to any missing globals, including any dependent trees. */
2259 static JS_REQUIRES_STACK
void
2260 SpecializeTreesToMissingGlobals(JSContext
* cx
, JSObject
* globalObj
, TreeInfo
* root
)
2262 TreeInfo
* ti
= root
;
2264 ti
->typeMap
.captureMissingGlobalTypes(cx
, globalObj
, *ti
->globalSlots
, ti
->nStackTypes
);
2265 JS_ASSERT(ti
->globalSlots
->length() == ti
->typeMap
.length() - ti
->nStackTypes
);
2267 for (unsigned i
= 0; i
< root
->dependentTrees
.length(); i
++) {
2268 ti
= (TreeInfo
*)root
->dependentTrees
[i
]->vmprivate
;
2270 /* ti can be NULL if we hit the recording tree in emitTreeCall; this is harmless. */
2271 if (ti
&& ti
->nGlobalTypes() < ti
->globalSlots
->length())
2272 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
2274 for (unsigned i
= 0; i
< root
->linkedTrees
.length(); i
++) {
2275 ti
= (TreeInfo
*)root
->linkedTrees
[i
]->vmprivate
;
2276 if (ti
&& ti
->nGlobalTypes() < ti
->globalSlots
->length())
2277 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
2282 TrashTree(JSContext
* cx
, Fragment
* f
);
2285 TraceRecorder::TraceRecorder(JSContext
* cx
, VMSideExit
* _anchor
, Fragment
* _fragment
,
2286 TreeInfo
* ti
, unsigned stackSlots
, unsigned ngslots
, JSTraceType
* typeMap
,
2287 VMSideExit
* innermostNestedGuard
, jsbytecode
* outer
, uint32 outerArgc
,
2288 MonitorReason reason
)
2289 : tempAlloc(*JS_TRACE_MONITOR(cx
).tempAlloc
),
2290 mark(*JS_TRACE_MONITOR(cx
).traceAlloc
),
2291 whichTreesToTrash(&tempAlloc
),
2292 cfgMerges(&tempAlloc
),
2293 monitorReason(reason
)
2295 JS_ASSERT(!_fragment
->vmprivate
&& ti
&& cx
->fp
->regs
->pc
== (jsbytecode
*)_fragment
->ip
);
2296 /* Reset the fragment state we care about in case we got a recycled fragment.
2297 This includes resetting any profiling data we might have accumulated. */
2298 _fragment
->lastIns
= NULL
;
2299 verbose_only( _fragment
->profCount
= 0; )
2300 verbose_only( _fragment
->nStaticExits
= 0; )
2301 verbose_only( _fragment
->nCodeBytes
= 0; )
2302 verbose_only( _fragment
->nExitBytes
= 0; )
2303 verbose_only( _fragment
->guardNumberer
= 1; )
2304 verbose_only( _fragment
->guardsForFrag
= NULL
; )
2305 verbose_only( _fragment
->loopLabel
= NULL
; )
2306 // don't change _fragment->profFragID, though. Once the identity of
2307 // the Fragment is set up (for profiling purposes), we can't change it.
2309 this->traceMonitor
= &JS_TRACE_MONITOR(cx
);
2310 this->globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
2311 this->lexicalBlock
= cx
->fp
->blockChain
;
2312 this->anchor
= _anchor
;
2313 this->fragment
= _fragment
;
2314 this->lirbuf
= _fragment
->lirbuf
;
2315 this->treeInfo
= ti
;
2316 this->callDepth
= _anchor
? _anchor
->calldepth
: 0;
2317 this->atoms
= FrameAtomBase(cx
, cx
->fp
);
2318 this->trashSelf
= false;
2319 this->global_dslots
= this->globalObj
->dslots
;
2320 this->loop
= true; /* default assumption is we are compiling a loop */
2321 this->outer
= outer
;
2322 this->outerArgc
= outerArgc
;
2323 this->pendingSpecializedNative
= NULL
;
2324 this->newobj_ins
= NULL
;
2325 this->loopLabel
= NULL
;
2327 guardedShapeTable
.ops
= NULL
;
2330 debug_only_print0(LC_TMMinimal
, "\n");
2331 debug_only_printf(LC_TMMinimal
, "Recording starting from %s:%u@%u (FragID=%06u)\n",
2332 ti
->treeFileName
, ti
->treeLineNumber
, ti
->treePCOffset
,
2333 _fragment
->profFragID
);
2335 debug_only_printf(LC_TMTracer
, "globalObj=%p, shape=%d\n",
2336 (void*)this->globalObj
, OBJ_SHAPE(this->globalObj
));
2337 debug_only_printf(LC_TMTreeVis
, "TREEVIS RECORD FRAG=%p ANCHOR=%p\n", (void*)fragment
,
2341 lir
= lir_buf_writer
= new (tempAlloc
) LirBufWriter(lirbuf
);
2343 lir
= sanity_filter_1
= new (tempAlloc
) SanityFilter(lir
);
2346 if (js_LogController
.lcbits
& LC_TMRecorder
) {
2347 lir
= verbose_filter
2348 = new (tempAlloc
) VerboseWriter (tempAlloc
, lir
, lirbuf
->names
,
2352 if (nanojit::AvmCore::config
.soft_float
)
2353 lir
= float_filter
= new (tempAlloc
) SoftFloatFilter(lir
);
2354 lir
= cse_filter
= new (tempAlloc
) CseFilter(lir
, tempAlloc
);
2355 lir
= expr_filter
= new (tempAlloc
) ExprFilter(lir
);
2356 lir
= func_filter
= new (tempAlloc
) FuncFilter(lir
);
2358 lir
= sanity_filter_2
= new (tempAlloc
) SanityFilter(lir
);
2360 lir
->ins0(LIR_start
);
2362 for (int i
= 0; i
< NumSavedRegs
; ++i
)
2363 lir
->insParam(i
, 1);
2365 for (int i
= 0; i
< NumSavedRegs
; ++i
)
2366 addName(lirbuf
->savedRegs
[i
], regNames
[Assembler::savedRegs
[i
]]);
2369 lirbuf
->state
= addName(lir
->insParam(0, 0), "state");
2371 if (fragment
== fragment
->root
)
2372 loopLabel
= lir
->ins0(LIR_label
);
2374 // if profiling, drop a label, so the assembler knows to put a
2375 // frag-entry-counter increment at this point. If there's a
2376 // loopLabel, use that; else we'll have to make a dummy label
2377 // especially for this purpose.
2378 verbose_only( if (js_LogController
.lcbits
& LC_FragProfile
) {
2379 LIns
* entryLabel
= NULL
;
2380 if (fragment
== fragment
->root
) {
2381 entryLabel
= loopLabel
;
2383 entryLabel
= lir
->ins0(LIR_label
);
2385 NanoAssert(entryLabel
);
2386 NanoAssert(!fragment
->loopLabel
);
2387 fragment
->loopLabel
= entryLabel
;
2390 lirbuf
->sp
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, sp
)), "sp");
2391 lirbuf
->rp
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, rp
)), "rp");
2392 cx_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, cx
)), "cx");
2393 eos_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, eos
)), "eos");
2394 eor_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, eor
)), "eor");
2396 /* If we came from exit, we might not have enough global types. */
2397 if (ti
->globalSlots
->length() > ti
->nGlobalTypes())
2398 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
2400 /* read into registers all values on the stack and all globals we know so far */
2401 import(treeInfo
, lirbuf
->sp
, stackSlots
, ngslots
, callDepth
, typeMap
);
2404 * If slurping failed, there's no reason to start recording again. Emit LIR
2405 * to capture the rest of the slots, then immediately compile.
2407 if (anchor
&& anchor
->exitType
== RECURSIVE_SLURP_FAIL_EXIT
) {
2408 slurpDownFrames((jsbytecode
*)anchor
->recursive_pc
- JSOP_CALL_LENGTH
);
2412 if (fragment
== fragment
->root
) {
2414 * We poll the operation callback request flag. It is updated asynchronously whenever
2415 * the callback is to be invoked.
2417 LIns
* x
= lir
->insLoad(LIR_ld
, cx_ins
, offsetof(JSContext
, operationCallbackFlag
));
2418 guard(true, lir
->ins_eq0(x
), snapshot(TIMEOUT_EXIT
));
2422 * If we are attached to a tree call guard, make sure the guard the inner
2423 * tree exited from is what we expect it to be.
2425 if (_anchor
&& _anchor
->exitType
== NESTED_EXIT
) {
2426 LIns
* nested_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
,
2427 offsetof(InterpState
, lastTreeExitGuard
)),
2428 "lastTreeExitGuard");
2429 guard(true, lir
->ins2(LIR_peq
, nested_ins
, INS_CONSTPTR(innermostNestedGuard
)), NESTED_EXIT
);
2433 TraceRecorder::~TraceRecorder()
2435 JS_ASSERT(treeInfo
&& fragment
);
2438 TrashTree(cx
, fragment
->root
);
2440 for (unsigned int i
= 0; i
< whichTreesToTrash
.length(); i
++)
2441 TrashTree(cx
, whichTreesToTrash
[i
]);
2443 /* Purge the tempAlloc used during recording. */
2445 traceMonitor
->lirbuf
->clear();
2447 forgetGuardedShapes();
2451 TraceRecorder::outOfMemory()
2453 return traceMonitor
->dataAlloc
->outOfMemory() ||
2454 traceMonitor
->traceAlloc
->outOfMemory() ||
2455 tempAlloc
.outOfMemory();
2458 /* Add debug information to a LIR instruction as we emit it. */
2460 TraceRecorder::addName(LIns
* ins
, const char* name
)
2464 * We'll only ask for verbose Nanojit when .lcbits > 0, so there's no point
2465 * in adding names otherwise.
2467 if (js_LogController
.lcbits
> 0)
2468 lirbuf
->names
->addName(ins
, name
);
2474 TraceRecorder::insImmObj(JSObject
* obj
)
2476 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(obj
));
2477 return lir
->insImmPtr((void*)obj
);
2481 TraceRecorder::insImmFun(JSFunction
* fun
)
2483 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(FUN_OBJECT(fun
)));
2484 return lir
->insImmPtr((void*)fun
);
2488 TraceRecorder::insImmStr(JSString
* str
)
2490 treeInfo
->gcthings
.addUnique(STRING_TO_JSVAL(str
));
2491 return lir
->insImmPtr((void*)str
);
2495 TraceRecorder::insImmSprop(JSScopeProperty
* sprop
)
2497 treeInfo
->sprops
.addUnique(sprop
);
2498 return lir
->insImmPtr((void*)sprop
);
2502 TraceRecorder::p2i(nanojit::LIns
* ins
)
2504 #ifdef NANOJIT_64BIT
2505 return lir
->ins1(LIR_qlo
, ins
);
2511 /* Determine the current call depth (starting with the entry frame.) */
2513 TraceRecorder::getCallDepth() const
2518 /* Determine the offset in the native global frame for a jsval we track. */
2520 TraceRecorder::nativeGlobalOffset(jsval
* p
) const
2522 JS_ASSERT(isGlobal(p
));
2523 if (size_t(p
- globalObj
->fslots
) < JS_INITIAL_NSLOTS
)
2524 return sizeof(InterpState
) + size_t(p
- globalObj
->fslots
) * sizeof(double);
2525 return sizeof(InterpState
) + ((p
- globalObj
->dslots
) + JS_INITIAL_NSLOTS
) * sizeof(double);
2528 /* Determine whether a value is a global stack slot. */
2530 TraceRecorder::isGlobal(jsval
* p
) const
2532 return ((size_t(p
- globalObj
->fslots
) < JS_INITIAL_NSLOTS
) ||
2533 (size_t(p
- globalObj
->dslots
) < (STOBJ_NSLOTS(globalObj
) - JS_INITIAL_NSLOTS
)));
2537 * Return the offset in the native stack for the given jsval. More formally,
2538 * |p| must be the address of a jsval that is represented in the native stack
2539 * area. The return value is the offset, from InterpState::stackBase, in bytes,
2540 * where the native representation of |*p| is stored. To get the offset
2541 * relative to InterpState::sp, subtract TreeInfo::nativeStackBase.
2543 JS_REQUIRES_STACK
ptrdiff_t
2544 TraceRecorder::nativeStackOffset(jsval
* p
) const
2546 CountSlotsVisitor
visitor(p
);
2547 VisitStackSlots(visitor
, cx
, callDepth
);
2548 size_t offset
= visitor
.count() * sizeof(double);
2551 * If it's not in a pending frame, it must be on the stack of the current
2552 * frame above sp but below fp->slots + script->nslots.
2554 if (!visitor
.stopped()) {
2555 JS_ASSERT(size_t(p
- cx
->fp
->slots
) < cx
->fp
->script
->nslots
);
2556 offset
+= size_t(p
- cx
->fp
->regs
->sp
) * sizeof(double);
2561 /* Track the maximum number of native frame slots we need during execution. */
2563 TraceRecorder::trackNativeStackUse(unsigned slots
)
2565 if (slots
> treeInfo
->maxNativeStackSlots
)
2566 treeInfo
->maxNativeStackSlots
= slots
;
2570 * Unbox a jsval into a slot. Slots are wide enough to hold double values
2571 * directly (instead of storing a pointer to them). We assert instead of
2572 * type checking. The caller must ensure the types are compatible.
2575 ValueToNative(JSContext
* cx
, jsval v
, JSTraceType type
, double* slot
)
2577 uint8_t tag
= JSVAL_TAG(v
);
2580 JS_ASSERT(tag
== JSVAL_OBJECT
);
2581 JS_ASSERT(!JSVAL_IS_NULL(v
) && !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v
)));
2582 *(JSObject
**)slot
= JSVAL_TO_OBJECT(v
);
2583 debug_only_printf(LC_TMTracer
,
2584 "object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v
),
2587 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v
))->name
);
2592 if (JSVAL_IS_INT(v
))
2593 *(jsint
*)slot
= JSVAL_TO_INT(v
);
2594 else if (tag
== JSVAL_DOUBLE
&& JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v
), i
))
2597 JS_ASSERT(JSVAL_IS_INT(v
));
2598 debug_only_printf(LC_TMTracer
, "int<%d> ", *(jsint
*)slot
);
2603 if (JSVAL_IS_INT(v
))
2604 d
= JSVAL_TO_INT(v
);
2606 d
= *JSVAL_TO_DOUBLE(v
);
2607 JS_ASSERT(JSVAL_IS_INT(v
) || JSVAL_IS_DOUBLE(v
));
2608 *(jsdouble
*)slot
= d
;
2609 debug_only_printf(LC_TMTracer
, "double<%g> ", d
);
2613 JS_NOT_REACHED("found jsval type in an entry type map");
2617 JS_ASSERT(tag
== JSVAL_STRING
);
2618 *(JSString
**)slot
= JSVAL_TO_STRING(v
);
2619 debug_only_printf(LC_TMTracer
, "string<%p> ", (void*)(*(JSString
**)slot
));
2623 JS_ASSERT(tag
== JSVAL_OBJECT
);
2624 *(JSObject
**)slot
= NULL
;
2625 debug_only_print0(LC_TMTracer
, "null ");
2628 case TT_PSEUDOBOOLEAN
:
2629 /* Watch out for pseudo-booleans. */
2630 JS_ASSERT(tag
== JSVAL_SPECIAL
);
2631 *(JSBool
*)slot
= JSVAL_TO_SPECIAL(v
);
2632 debug_only_printf(LC_TMTracer
, "pseudoboolean<%d> ", *(JSBool
*)slot
);
2636 JS_ASSERT(tag
== JSVAL_OBJECT
);
2637 JSObject
* obj
= JSVAL_TO_OBJECT(v
);
2638 *(JSObject
**)slot
= obj
;
2640 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, obj
);
2641 debug_only_printf(LC_TMTracer
,
2642 "function<%p:%s> ", (void*) obj
,
2644 ? JS_GetStringBytes(ATOM_TO_STRING(fun
->atom
))
2651 JS_NOT_REACHED("unexpected type");
2655 * We maintain an emergency pool of doubles so we can recover safely if a trace
2656 * runs out of memory (doubles or objects).
2659 AllocateDoubleFromReservedPool(JSContext
* cx
)
2661 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
2662 JS_ASSERT(tm
->reservedDoublePoolPtr
> tm
->reservedDoublePool
);
2663 return *--tm
->reservedDoublePoolPtr
;
2667 ReplenishReservedPool(JSContext
* cx
, JSTraceMonitor
* tm
)
2669 /* We should not be called with a full pool. */
2670 JS_ASSERT((size_t) (tm
->reservedDoublePoolPtr
- tm
->reservedDoublePool
) <
2671 MAX_NATIVE_STACK_SLOTS
);
2674 * When the GC runs in js_NewDoubleInRootedValue, it resets
2675 * tm->reservedDoublePoolPtr back to tm->reservedDoublePool.
2677 JSRuntime
* rt
= cx
->runtime
;
2678 uintN gcNumber
= rt
->gcNumber
;
2679 uintN lastgcNumber
= gcNumber
;
2680 jsval
* ptr
= tm
->reservedDoublePoolPtr
;
2681 while (ptr
< tm
->reservedDoublePool
+ MAX_NATIVE_STACK_SLOTS
) {
2682 if (!js_NewDoubleInRootedValue(cx
, 0.0, ptr
))
2685 /* Check if the last call to js_NewDoubleInRootedValue GC'd. */
2686 if (rt
->gcNumber
!= lastgcNumber
) {
2687 lastgcNumber
= rt
->gcNumber
;
2688 JS_ASSERT(tm
->reservedDoublePoolPtr
== tm
->reservedDoublePool
);
2689 ptr
= tm
->reservedDoublePool
;
2692 * Have we GC'd more than once? We're probably running really
2693 * low on memory, bail now.
2695 if (uintN(rt
->gcNumber
- gcNumber
) > uintN(1))
2701 tm
->reservedDoublePoolPtr
= ptr
;
2706 * Already massive GC pressure, no need to hold doubles back.
2707 * We won't run any native code anyway.
2709 tm
->reservedDoublePoolPtr
= tm
->reservedDoublePool
;
2714 JSTraceMonitor::flush()
2716 AUDIT(cacheFlushed
);
2718 // recover profiling data from expiring Fragments
2720 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
2721 for (VMFragment
*f
= vmfragments
[i
]; f
; f
= f
->next
) {
2722 JS_ASSERT(f
->root
== f
);
2723 for (VMFragment
*p
= f
; p
; p
= p
->peer
)
2724 js_FragProfiling_FragFinalizer(p
, this);
2730 for (Seq
<Fragment
*>* f
= branches
; f
; f
= f
->tail
)
2731 js_FragProfiling_FragFinalizer(f
->head
, this);
2734 frameCache
->reset();
2736 traceAlloc
->reset();
2739 Allocator
& alloc
= *dataAlloc
;
2741 for (size_t i
= 0; i
< MONITOR_N_GLOBAL_STATES
; ++i
) {
2742 globalStates
[i
].globalShape
= -1;
2743 globalStates
[i
].globalSlots
= new (alloc
) SlotList(&alloc
);
2746 assembler
= new (alloc
) Assembler(*codeAlloc
, alloc
, core
, &js_LogController
);
2747 lirbuf
= new (alloc
) LirBuffer(*tempAlloc
);
2748 reLirBuf
= new (alloc
) LirBuffer(*reTempAlloc
);
2749 verbose_only( branches
= NULL
; )
2752 labels
= new (alloc
) LabelMap(alloc
, &js_LogController
);
2754 lirbuf
->names
= new (alloc
) LirNameMap(alloc
, labels
);
2757 memset(&vmfragments
[0], 0, FRAGMENT_TABLE_SIZE
* sizeof(VMFragment
*));
2758 reFragments
= new (alloc
) REHashMap(alloc
);
2760 needFlush
= JS_FALSE
;
2764 JSTraceMonitor::mark(JSTracer
* trc
)
2766 if (!trc
->context
->runtime
->gcFlushCodeCaches
) {
2767 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
2768 VMFragment
* f
= vmfragments
[i
];
2770 TreeInfo
* ti
= (TreeInfo
*)f
->vmprivate
;
2772 jsval
* vp
= ti
->gcthings
.data();
2773 unsigned len
= ti
->gcthings
.length();
2776 JS_SET_TRACING_NAME(trc
, "jitgcthing");
2777 JS_CallTracer(trc
, JSVAL_TO_TRACEABLE(v
), JSVAL_TRACE_KIND(v
));
2779 JSScopeProperty
** spropp
= ti
->sprops
.data();
2780 len
= ti
->sprops
.length();
2782 JSScopeProperty
* sprop
= *spropp
++;
2796 * Box a value from the native stack back into the jsval format. Integers that
2797 * are too large to fit into a jsval are automatically boxed into
2798 * heap-allocated doubles.
2800 template <typename E
>
2802 NativeToValueBase(JSContext
* cx
, jsval
& v
, JSTraceType type
, double* slot
)
2808 v
= OBJECT_TO_JSVAL(*(JSObject
**)slot
);
2809 JS_ASSERT(v
!= JSVAL_ERROR_COOKIE
); /* don't leak JSVAL_ERROR_COOKIE */
2810 debug_only_printf(LC_TMTracer
,
2811 "object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v
),
2814 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v
))->name
);
2819 debug_only_printf(LC_TMTracer
, "int<%d> ", i
);
2821 if (INT_FITS_IN_JSVAL(i
)) {
2822 v
= INT_TO_JSVAL(i
);
2829 debug_only_printf(LC_TMTracer
, "double<%g> ", d
);
2830 if (JSDOUBLE_IS_INT(d
, i
))
2834 * It's not safe to trigger the GC here, so use an emergency heap if we
2835 * are out of double boxes.
2837 if (cx
->doubleFreeList
) {
2841 js_NewDoubleInRootedValue(cx
, d
, &v
);
2845 return E::handleDoubleOOM(cx
, d
, v
);
2850 JS_ASSERT(v
!= JSVAL_ERROR_COOKIE
); /* don't leak JSVAL_ERROR_COOKIE */
2851 debug_only_printf(LC_TMTracer
, "box<%p> ", (void*)v
);
2855 v
= STRING_TO_JSVAL(*(JSString
**)slot
);
2856 debug_only_printf(LC_TMTracer
, "string<%p> ", (void*)(*(JSString
**)slot
));
2860 JS_ASSERT(*(JSObject
**)slot
== NULL
);
2862 debug_only_printf(LC_TMTracer
, "null<%p> ", (void*)(*(JSObject
**)slot
));
2865 case TT_PSEUDOBOOLEAN
:
2866 /* Watch out for pseudo-booleans. */
2867 v
= SPECIAL_TO_JSVAL(*(JSBool
*)slot
);
2868 debug_only_printf(LC_TMTracer
, "boolean<%d> ", *(JSBool
*)slot
);
2872 JS_ASSERT(HAS_FUNCTION_CLASS(*(JSObject
**)slot
));
2873 v
= OBJECT_TO_JSVAL(*(JSObject
**)slot
);
2875 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, JSVAL_TO_OBJECT(v
));
2876 debug_only_printf(LC_TMTracer
,
2877 "function<%p:%s> ", (void*)JSVAL_TO_OBJECT(v
),
2879 ? JS_GetStringBytes(ATOM_TO_STRING(fun
->atom
))
2888 struct ReserveDoubleOOMHandler
{
2889 static bool handleDoubleOOM(JSContext
*cx
, double d
, jsval
& v
) {
2890 v
= AllocateDoubleFromReservedPool(cx
);
2891 JS_ASSERT(JSVAL_IS_DOUBLE(v
) && *JSVAL_TO_DOUBLE(v
) == 0.0);
2892 *JSVAL_TO_DOUBLE(v
) = d
;
2898 NativeToValue(JSContext
* cx
, jsval
& v
, JSTraceType type
, double* slot
)
2903 NativeToValueBase
<ReserveDoubleOOMHandler
>(cx
, v
, type
, slot
);
2907 struct FailDoubleOOMHandler
{
2908 static bool handleDoubleOOM(JSContext
*cx
, double d
, jsval
& v
) {
2914 js_NativeToValue(JSContext
* cx
, jsval
& v
, JSTraceType type
, double* slot
)
2916 return NativeToValueBase
<FailDoubleOOMHandler
>(cx
, v
, type
, slot
);
2919 class BuildNativeFrameVisitor
: public SlotVisitorBase
2922 JSTraceType
*mTypeMap
;
2926 BuildNativeFrameVisitor(JSContext
*cx
,
2927 JSTraceType
*typemap
,
2936 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2937 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
2938 debug_only_printf(LC_TMTracer
, "global%d: ", n
);
2939 ValueToNative(mCx
, *vp
, *mTypeMap
++, &mGlobal
[slot
]);
2942 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
2943 visitStackSlots(jsval
*vp
, int count
, JSStackFrame
* fp
) {
2944 for (int i
= 0; i
< count
; ++i
) {
2945 debug_only_printf(LC_TMTracer
, "%s%d: ", stackSlotKind(), i
);
2946 ValueToNative(mCx
, *vp
++, *mTypeMap
++, mStack
++);
2952 static JS_REQUIRES_STACK
void
2953 BuildNativeFrame(JSContext
*cx
, JSObject
*globalObj
, unsigned callDepth
,
2954 unsigned ngslots
, uint16
*gslots
,
2955 JSTraceType
*typeMap
, double *global
, double *stack
)
2957 BuildNativeFrameVisitor
visitor(cx
, typeMap
, global
, stack
);
2958 VisitSlots(visitor
, cx
, globalObj
, callDepth
, ngslots
, gslots
);
2959 debug_only_print0(LC_TMTracer
, "\n");
2962 class FlushNativeGlobalFrameVisitor
: public SlotVisitorBase
2965 JSTraceType
*mTypeMap
;
2968 FlushNativeGlobalFrameVisitor(JSContext
*cx
,
2969 JSTraceType
*typeMap
,
2976 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
2977 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
2978 debug_only_printf(LC_TMTracer
, "global%d=", n
);
2979 NativeToValue(mCx
, *vp
, *mTypeMap
++, &mGlobal
[slot
]);
2983 class FlushNativeStackFrameVisitor
: public SlotVisitorBase
2986 const JSTraceType
*mInitTypeMap
;
2987 const JSTraceType
*mTypeMap
;
2990 unsigned mIgnoreSlots
;
2992 FlushNativeStackFrameVisitor(JSContext
*cx
,
2993 const JSTraceType
*typeMap
,
2996 unsigned ignoreSlots
) :
2998 mInitTypeMap(typeMap
),
3002 mIgnoreSlots(ignoreSlots
)
3005 const JSTraceType
* getTypeMap()
3010 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3011 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3012 for (size_t i
= 0; i
< count
; ++i
) {
3015 debug_only_printf(LC_TMTracer
, "%s%u=", stackSlotKind(), unsigned(i
));
3016 if (unsigned(mTypeMap
- mInitTypeMap
) >= mIgnoreSlots
)
3017 NativeToValue(mCx
, *vp
, *mTypeMap
, mStack
);
3026 /* Box the given native frame into a JS frame. This is infallible. */
3027 static JS_REQUIRES_STACK
void
3028 FlushNativeGlobalFrame(JSContext
*cx
, double *global
, unsigned ngslots
,
3029 uint16
*gslots
, JSTraceType
*typemap
)
3031 FlushNativeGlobalFrameVisitor
visitor(cx
, typemap
, global
);
3032 JSObject
*globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
3033 VisitGlobalSlots(visitor
, cx
, globalObj
, ngslots
, gslots
);
3034 debug_only_print0(LC_TMTracer
, "\n");
3038 * Returns the number of values on the native stack, excluding the innermost
3039 * frame. This walks all FrameInfos on the native frame stack and sums the
3040 * slot usage of each frame.
3043 StackDepthFromCallStack(InterpState
* state
, uint32 callDepth
)
3045 int32 nativeStackFramePos
= 0;
3047 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
3048 for (FrameInfo
** fip
= state
->callstackBase
; fip
< state
->rp
+ callDepth
; fip
++)
3049 nativeStackFramePos
+= (*fip
)->callerHeight
;
3050 return nativeStackFramePos
;
3054 * Generic function to read upvars on trace from slots of active frames.
3055 * T Traits type parameter. Must provide static functions:
3056 * interp_get(fp, slot) Read the value out of an interpreter frame.
3057 * native_slot(argc, slot) Return the position of the desired value in the on-trace
3058 * stack frame (with position 0 being callee).
3060 * upvarLevel Static level of the function containing the upvar definition
3061 * slot Identifies the value to get. The meaning is defined by the traits type.
3062 * callDepth Call depth of current point relative to trace entry
3064 template<typename T
>
3066 GetUpvarOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
, double* result
)
3068 InterpState
* state
= cx
->interpState
;
3069 FrameInfo
** fip
= state
->rp
+ callDepth
;
3072 * First search the FrameInfo call stack for an entry containing our
3073 * upvar, namely one with level == upvarLevel. The first FrameInfo is a
3074 * transition from the entry frame to some callee. However, it is not
3075 * known (from looking at the FrameInfo) whether the entry frame had a
3076 * callee. Rather than special-case this or insert more logic into the
3077 * loop, instead just stop before that FrameInfo (i.e. |> base| instead of
3078 * |>= base|), and let the code after the loop handle it.
3080 int32 stackOffset
= StackDepthFromCallStack(state
, callDepth
);
3081 while (--fip
> state
->callstackBase
) {
3082 FrameInfo
* fi
= *fip
;
3085 * The loop starts aligned to the top of the stack, so move down to the first meaningful
3086 * callee. Then read the callee directly from the frame.
3088 stackOffset
-= fi
->callerHeight
;
3089 JSObject
* callee
= *(JSObject
**)(&state
->stackBase
[stackOffset
]);
3090 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, callee
);
3091 uintN calleeLevel
= fun
->u
.i
.script
->staticLevel
;
3092 if (calleeLevel
== upvarLevel
) {
3094 * Now find the upvar's value in the native stack. stackOffset is
3095 * the offset of the start of the activation record corresponding
3096 * to *fip in the native stack.
3098 uint32 native_slot
= T::native_slot(fi
->callerArgc
, slot
);
3099 *result
= state
->stackBase
[stackOffset
+ native_slot
];
3100 return fi
->get_typemap()[native_slot
];
3104 // Next search the trace entry frame, which is not in the FrameInfo stack.
3105 if (state
->outermostTree
->script
->staticLevel
== upvarLevel
) {
3106 uint32 argc
= ((VMFragment
*) state
->outermostTree
->fragment
)->argc
;
3107 uint32 native_slot
= T::native_slot(argc
, slot
);
3108 *result
= state
->stackBase
[native_slot
];
3109 return state
->callstackBase
[0]->get_typemap()[native_slot
];
3113 * If we did not find the upvar in the frames for the active traces,
3114 * then we simply get the value from the interpreter state.
3116 JS_ASSERT(upvarLevel
< JS_DISPLAY_SIZE
);
3117 JSStackFrame
* fp
= cx
->display
[upvarLevel
];
3118 jsval v
= T::interp_get(fp
, slot
);
3119 JSTraceType type
= getCoercedType(v
);
3120 ValueToNative(cx
, v
, type
, result
);
3124 // For this traits type, 'slot' is the argument index, which may be -2 for callee.
3125 struct UpvarArgTraits
{
3126 static jsval
interp_get(JSStackFrame
* fp
, int32 slot
) {
3127 return fp
->argv
[slot
];
3130 static uint32
native_slot(uint32 argc
, int32 slot
) {
3131 return 2 /*callee,this*/ + slot
;
3136 GetUpvarArgOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
, double* result
)
3138 return GetUpvarOnTrace
<UpvarArgTraits
>(cx
, upvarLevel
, slot
, callDepth
, result
);
3141 // For this traits type, 'slot' is an index into the local slots array.
3142 struct UpvarVarTraits
{
3143 static jsval
interp_get(JSStackFrame
* fp
, int32 slot
) {
3144 return fp
->slots
[slot
];
3147 static uint32
native_slot(uint32 argc
, int32 slot
) {
3148 return 3 /*callee,this,arguments*/ + argc
+ slot
;
3153 GetUpvarVarOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
, double* result
)
3155 return GetUpvarOnTrace
<UpvarVarTraits
>(cx
, upvarLevel
, slot
, callDepth
, result
);
3159 * For this traits type, 'slot' is an index into the stack area (within slots,
3160 * after nfixed) of a frame with no function. (On trace, the top-level frame is
3161 * the only one that can have no function.)
3163 struct UpvarStackTraits
{
3164 static jsval
interp_get(JSStackFrame
* fp
, int32 slot
) {
3165 return fp
->slots
[slot
+ fp
->script
->nfixed
];
3168 static uint32
native_slot(uint32 argc
, int32 slot
) {
3170 * Locals are not imported by the tracer when the frame has no
3171 * function, so we do not add fp->script->nfixed.
3173 JS_ASSERT(argc
== 0);
3179 GetUpvarStackOnTrace(JSContext
* cx
, uint32 upvarLevel
, int32 slot
, uint32 callDepth
,
3182 return GetUpvarOnTrace
<UpvarStackTraits
>(cx
, upvarLevel
, slot
, callDepth
, result
);
3185 // Parameters needed to access a value from a closure on trace.
3186 struct ClosureVarInfo
3191 uint32 resolveFlags
;
3195 * Generic function to read upvars from Call objects of active heavyweight functions.
3196 * call Callee Function object in which the upvar is accessed.
3198 template<typename T
>
3200 GetFromClosure(JSContext
* cx
, JSObject
* call
, const ClosureVarInfo
* cv
, double* result
)
3202 JS_ASSERT(OBJ_GET_CLASS(cx
, call
) == &js_CallClass
);
3204 InterpState
* state
= cx
->interpState
;
3207 int32 stackOffset
= StackDepthFromCallStack(state
, cv
->callDepth
);
3208 FrameInfo
** fip
= state
->rp
+ cv
->callDepth
;
3209 while (--fip
> state
->callstackBase
) {
3210 FrameInfo
* fi
= *fip
;
3211 JSObject
* callee
= *(JSObject
**)(&state
->stackBase
[stackOffset
]);
3212 if (callee
== call
) {
3213 // This is not reachable as long as JSOP_LAMBDA is not traced:
3214 // - The upvar is found at this point only if the upvar was defined on a frame that was
3215 // entered on this trace.
3216 // - The upvar definition must be (dynamically, and thus on trace) before the definition
3217 // of the function that uses the upvar.
3218 // - Therefore, if the upvar is found at this point, the function definition JSOP_LAMBDA
3220 JS_NOT_REACHED("JSOP_NAME variable found in outer trace");
3222 stackOffset
-= fi
->callerHeight
;
3227 * Here we specifically want to check the call object of the trace entry frame.
3229 uint32 slot
= cv
->slot
;
3230 VOUCH_DOES_NOT_REQUIRE_STACK();
3231 if (cx
->fp
->callobj
== call
) {
3232 slot
= T::adj_slot(cx
->fp
, slot
);
3233 *result
= state
->stackBase
[slot
];
3234 return state
->callstackBase
[0]->get_typemap()[slot
];
3237 JSStackFrame
* fp
= (JSStackFrame
*) call
->getPrivate();
3240 v
= T::slots(fp
)[slot
];
3242 JS_ASSERT(cv
->resolveFlags
!= JSRESOLVE_INFER
);
3243 JSAutoResolveFlags
rf(cx
, cv
->resolveFlags
);
3247 js_GetPropertyHelper(cx
, call
, cv
->id
, JSGET_METHOD_BARRIER
, &v
);
3250 JSTraceType type
= getCoercedType(v
);
3251 ValueToNative(cx
, v
, type
, result
);
3255 struct ArgClosureTraits
3257 static inline uint32
adj_slot(JSStackFrame
* fp
, uint32 slot
) { return fp
->argc
+ slot
; }
3258 static inline jsval
* slots(JSStackFrame
* fp
) { return fp
->argv
; }
3264 GetClosureArg(JSContext
* cx
, JSObject
* callee
, const ClosureVarInfo
* cv
, double* result
)
3266 return GetFromClosure
<ArgClosureTraits
>(cx
, callee
, cv
, result
);
3269 struct VarClosureTraits
3271 static inline uint32
adj_slot(JSStackFrame
* fp
, uint32 slot
) { return slot
; }
3272 static inline jsval
* slots(JSStackFrame
* fp
) { return fp
->slots
; }
3278 GetClosureVar(JSContext
* cx
, JSObject
* callee
, const ClosureVarInfo
* cv
, double* result
)
3280 return GetFromClosure
<VarClosureTraits
>(cx
, callee
, cv
, result
);
3284 * Box the given native stack frame into the virtual machine stack. This
3287 * @param callDepth the distance between the entry frame into our trace and
3288 * cx->fp when we make this call. If this is not called as a
3289 * result of a nested exit, callDepth is 0.
3290 * @param mp an array of JSTraceTypes that indicate what the types of the things
3292 * @param np pointer to the native stack. We want to copy values from here to
3293 * the JS stack as needed.
3294 * @param stopFrame if non-null, this frame and everything above it should not
3296 * @return the number of things we popped off of np.
3298 static JS_REQUIRES_STACK
int
3299 FlushNativeStackFrame(JSContext
* cx
, unsigned callDepth
, const JSTraceType
* mp
, double* np
,
3300 JSStackFrame
* stopFrame
, unsigned ignoreSlots
)
3302 jsval
* stopAt
= stopFrame
? &stopFrame
->argv
[-2] : NULL
;
3304 /* Root all string and object references first (we don't need to call the GC for this). */
3305 FlushNativeStackFrameVisitor
visitor(cx
, mp
, np
, stopAt
, ignoreSlots
);
3306 VisitStackSlots(visitor
, cx
, callDepth
);
3308 // Restore thisv from the now-restored argv[-1] in each pending frame.
3309 // Keep in mind that we didn't restore frames at stopFrame and above!
3310 // Scope to keep |fp| from leaking into the macros we're using.
3312 unsigned n
= callDepth
+1; // +1 to make sure we restore the entry frame
3313 JSStackFrame
* fp
= cx
->fp
;
3315 for (; fp
!= stopFrame
; fp
= fp
->down
) {
3320 // Skip over stopFrame itself.
3325 for (; n
!= 0; fp
= fp
->down
) {
3329 js_GetArgsPrivateNative(JSVAL_TO_OBJECT(fp
->argsobj
))) {
3330 JSVAL_TO_OBJECT(fp
->argsobj
)->setPrivate(fp
);
3334 * We might return from trace with a different callee object, but it still
3335 * has to be the same JSFunction (FIXME: bug 471425, eliminate fp->callee).
3337 JS_ASSERT(JSVAL_IS_OBJECT(fp
->argv
[-1]));
3338 JS_ASSERT(HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(fp
->argv
[-2])));
3339 JS_ASSERT(GET_FUNCTION_PRIVATE(cx
, JSVAL_TO_OBJECT(fp
->argv
[-2])) ==
3340 GET_FUNCTION_PRIVATE(cx
, fp
->callee()));
3341 JS_ASSERT(GET_FUNCTION_PRIVATE(cx
, fp
->callee()) == fp
->fun
);
3344 * SynthesizeFrame sets scopeChain to NULL, because we can't calculate the
3345 * correct scope chain until we have the final callee. Calculate the real
3346 * scope object here.
3348 if (!fp
->scopeChain
) {
3349 fp
->scopeChain
= OBJ_GET_PARENT(cx
, JSVAL_TO_OBJECT(fp
->argv
[-2]));
3350 if (fp
->fun
->flags
& JSFUN_HEAVYWEIGHT
) {
3352 * Set hookData to null because the failure case for js_GetCallObject
3353 * involves it calling the debugger hook.
3355 * Allocating the Call object must not fail, so use an object
3356 * previously reserved by ExecuteTree if needed.
3358 void* hookData
= ((JSInlineFrame
*)fp
)->hookData
;
3359 ((JSInlineFrame
*)fp
)->hookData
= NULL
;
3360 JS_ASSERT(!JS_TRACE_MONITOR(cx
).useReservedObjects
);
3361 JS_TRACE_MONITOR(cx
).useReservedObjects
= JS_TRUE
;
3365 js_GetCallObject(cx
, fp
);
3367 JS_TRACE_MONITOR(cx
).useReservedObjects
= JS_FALSE
;
3368 ((JSInlineFrame
*)fp
)->hookData
= hookData
;
3371 fp
->thisv
= fp
->argv
[-1];
3372 if (fp
->flags
& JSFRAME_CONSTRUCTING
) // constructors always compute 'this'
3373 fp
->flags
|= JSFRAME_COMPUTED_THIS
;
3377 debug_only_print0(LC_TMTracer
, "\n");
3378 return visitor
.getTypeMap() - mp
;
3381 /* Emit load instructions onto the trace that read the initial stack state. */
3382 JS_REQUIRES_STACK
void
3383 TraceRecorder::import(LIns
* base
, ptrdiff_t offset
, jsval
* p
, JSTraceType t
,
3384 const char *prefix
, uintN index
, JSStackFrame
*fp
)
3387 if (t
== TT_INT32
) { /* demoted */
3388 JS_ASSERT(isInt32(*p
));
3391 * Ok, we have a valid demotion attempt pending, so insert an integer
3392 * read and promote it to double since all arithmetic operations expect
3393 * to see doubles on entry. The first op to use this slot will emit a
3394 * f2i cast which will cancel out the i2f we insert here.
3396 ins
= lir
->insLoad(LIR_ld
, base
, offset
);
3397 ins
= lir
->ins1(LIR_i2f
, ins
);
3399 JS_ASSERT_IF(t
!= TT_JSVAL
, isNumber(*p
) == (t
== TT_DOUBLE
));
3400 if (t
== TT_DOUBLE
) {
3401 ins
= lir
->insLoad(LIR_ldq
, base
, offset
);
3402 } else if (t
== TT_PSEUDOBOOLEAN
) {
3403 ins
= lir
->insLoad(LIR_ld
, base
, offset
);
3405 ins
= lir
->insLoad(LIR_ldp
, base
, offset
);
3408 checkForGlobalObjectReallocation();
3409 tracker
.set(p
, ins
);
3413 JS_ASSERT(strlen(prefix
) < 10);
3415 jsuword
* localNames
= NULL
;
3416 const char* funName
= NULL
;
3417 if (*prefix
== 'a' || *prefix
== 'v') {
3418 mark
= JS_ARENA_MARK(&cx
->tempPool
);
3419 if (fp
->fun
->hasLocalNames())
3420 localNames
= js_GetLocalNameArray(cx
, fp
->fun
, &cx
->tempPool
);
3421 funName
= fp
->fun
->atom
? js_AtomToPrintableString(cx
, fp
->fun
->atom
) : "<anonymous>";
3423 if (!strcmp(prefix
, "argv")) {
3424 if (index
< fp
->fun
->nargs
) {
3425 JSAtom
*atom
= JS_LOCAL_NAME_TO_ATOM(localNames
[index
]);
3426 JS_snprintf(name
, sizeof name
, "$%s.%s", funName
, js_AtomToPrintableString(cx
, atom
));
3428 JS_snprintf(name
, sizeof name
, "$%s.<arg%d>", funName
, index
);
3430 } else if (!strcmp(prefix
, "vars")) {
3431 JSAtom
*atom
= JS_LOCAL_NAME_TO_ATOM(localNames
[fp
->fun
->nargs
+ index
]);
3432 JS_snprintf(name
, sizeof name
, "$%s.%s", funName
, js_AtomToPrintableString(cx
, atom
));
3434 JS_snprintf(name
, sizeof name
, "$%s%d", prefix
, index
);
3438 JS_ARENA_RELEASE(&cx
->tempPool
, mark
);
3441 static const char* typestr
[] = {
3442 "object", "int", "double", "jsval", "string", "null", "boolean", "function"
3444 debug_only_printf(LC_TMTracer
, "import vp=%p name=%s type=%s flags=%d\n",
3445 (void*)p
, name
, typestr
[t
& 7], t
>> 3);
3449 class ImportGlobalSlotVisitor
: public SlotVisitorBase
3451 TraceRecorder
&mRecorder
;
3453 JSTraceType
*mTypemap
;
3455 ImportGlobalSlotVisitor(TraceRecorder
&recorder
,
3457 JSTraceType
*typemap
) :
3458 mRecorder(recorder
),
3463 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
3464 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
3465 JS_ASSERT(*mTypemap
!= TT_JSVAL
);
3466 mRecorder
.import(mBase
, mRecorder
.nativeGlobalOffset(vp
),
3467 vp
, *mTypemap
++, "global", n
, NULL
);
3471 class ImportBoxedStackSlotVisitor
: public SlotVisitorBase
3473 TraceRecorder
&mRecorder
;
3475 ptrdiff_t mStackOffset
;
3476 JSTraceType
*mTypemap
;
3479 ImportBoxedStackSlotVisitor(TraceRecorder
&recorder
,
3481 ptrdiff_t stackOffset
,
3482 JSTraceType
*typemap
) :
3483 mRecorder(recorder
),
3485 mStackOffset(stackOffset
),
3489 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3490 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3491 for (size_t i
= 0; i
< count
; ++i
) {
3492 if (*mTypemap
== TT_JSVAL
) {
3493 mRecorder
.import(mBase
, mStackOffset
, vp
, TT_JSVAL
,
3495 LIns
*vp_ins
= mRecorder
.unbox_jsval(*vp
, mRecorder
.get(vp
),
3496 mRecorder
.copy(mRecorder
.anchor
));
3497 mRecorder
.set(vp
, vp_ins
);
3501 mStackOffset
+= sizeof(double);
3507 class ImportUnboxedStackSlotVisitor
: public SlotVisitorBase
3509 TraceRecorder
&mRecorder
;
3511 ptrdiff_t mStackOffset
;
3512 JSTraceType
*mTypemap
;
3515 ImportUnboxedStackSlotVisitor(TraceRecorder
&recorder
,
3517 ptrdiff_t stackOffset
,
3518 JSTraceType
*typemap
) :
3519 mRecorder(recorder
),
3521 mStackOffset(stackOffset
),
3525 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3526 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3527 for (size_t i
= 0; i
< count
; ++i
) {
3528 if (*mTypemap
!= TT_JSVAL
) {
3529 mRecorder
.import(mBase
, mStackOffset
, vp
++, *mTypemap
,
3530 stackSlotKind(), i
, fp
);
3533 mStackOffset
+= sizeof(double);
3539 JS_REQUIRES_STACK
void
3540 TraceRecorder::import(TreeInfo
* treeInfo
, LIns
* sp
, unsigned stackSlots
, unsigned ngslots
,
3541 unsigned callDepth
, JSTraceType
* typeMap
)
3544 * If we get a partial list that doesn't have all the types (i.e. recording
3545 * from a side exit that was recorded but we added more global slots
3546 * later), merge the missing types from the entry type map. This is safe
3547 * because at the loop edge we verify that we have compatible types for all
3548 * globals (entry type and loop edge type match). While a different trace
3549 * of the tree might have had a guard with a different type map for these
3550 * slots we just filled in here (the guard we continue from didn't know
3551 * about them), since we didn't take that particular guard the only way we
3552 * could have ended up here is if that other trace had at its end a
3553 * compatible type distribution with the entry map. Since that's exactly
3554 * what we used to fill in the types our current side exit didn't provide,
3555 * this is always safe to do.
3558 JSTraceType
* globalTypeMap
= typeMap
+ stackSlots
;
3559 unsigned length
= treeInfo
->nGlobalTypes();
3562 * This is potentially the typemap of the side exit and thus shorter than
3563 * the tree's global type map.
3565 if (ngslots
< length
) {
3566 MergeTypeMaps(&globalTypeMap
/* out param */, &ngslots
/* out param */,
3567 treeInfo
->globalTypeMap(), length
,
3568 (JSTraceType
*)alloca(sizeof(JSTraceType
) * length
));
3570 JS_ASSERT(ngslots
== treeInfo
->nGlobalTypes());
3571 ptrdiff_t offset
= -treeInfo
->nativeStackBase
;
3574 * Check whether there are any values on the stack we have to unbox and do
3575 * that first before we waste any time fetching the state from the stack.
3577 if (!anchor
|| anchor
->exitType
!= RECURSIVE_SLURP_FAIL_EXIT
) {
3578 ImportBoxedStackSlotVisitor
boxedStackVisitor(*this, sp
, offset
, typeMap
);
3579 VisitStackSlots(boxedStackVisitor
, cx
, callDepth
);
3582 ImportGlobalSlotVisitor
globalVisitor(*this, lirbuf
->state
, globalTypeMap
);
3583 VisitGlobalSlots(globalVisitor
, cx
, globalObj
, ngslots
,
3584 treeInfo
->globalSlots
->data());
3586 if (!anchor
|| anchor
->exitType
!= RECURSIVE_SLURP_FAIL_EXIT
) {
3587 ImportUnboxedStackSlotVisitor
unboxedStackVisitor(*this, sp
, offset
,
3589 VisitStackSlots(unboxedStackVisitor
, cx
, callDepth
);
3591 import(sp
, nativeStackOffset(&stackval(-1)), &stackval(-1),
3592 typeMap
[treeInfo
->nStackTypes
- 1], "retval", 0, cx
->fp
);
3596 JS_REQUIRES_STACK
bool
3597 TraceRecorder::isValidSlot(JSScope
* scope
, JSScopeProperty
* sprop
)
3599 uint32 setflags
= (js_CodeSpec
[*cx
->fp
->regs
->pc
].format
& (JOF_SET
| JOF_INCDEC
| JOF_FOR
));
3602 if (!SPROP_HAS_STUB_SETTER(sprop
))
3603 RETURN_VALUE("non-stub setter", false);
3604 if (sprop
->attrs
& JSPROP_READONLY
)
3605 RETURN_VALUE("writing to a read-only property", false);
3608 /* This check applies even when setflags == 0. */
3609 if (setflags
!= JOF_SET
&& !SPROP_HAS_STUB_GETTER(sprop
)) {
3610 JS_ASSERT(!sprop
->isMethod());
3611 RETURN_VALUE("non-stub getter", false);
3614 if (!SPROP_HAS_VALID_SLOT(sprop
, scope
))
3615 RETURN_VALUE("slotless obj property", false);
3620 /* Lazily import a global slot if we don't already have it in the tracker. */
3621 JS_REQUIRES_STACK
bool
3622 TraceRecorder::lazilyImportGlobalSlot(unsigned slot
)
3624 if (slot
!= uint16(slot
)) /* we use a table of 16-bit ints, bail out if that's not enough */
3628 * If the global object grows too large, alloca in ExecuteTree might fail,
3629 * so abort tracing on global objects with unreasonably many slots.
3631 if (STOBJ_NSLOTS(globalObj
) > MAX_GLOBAL_SLOTS
)
3633 jsval
* vp
= &STOBJ_GET_SLOT(globalObj
, slot
);
3635 return true; /* we already have it */
3636 unsigned index
= treeInfo
->globalSlots
->length();
3638 /* Add the slot to the list of interned global slots. */
3639 JS_ASSERT(treeInfo
->nGlobalTypes() == treeInfo
->globalSlots
->length());
3640 treeInfo
->globalSlots
->add(slot
);
3641 JSTraceType type
= getCoercedType(*vp
);
3642 if (type
== TT_INT32
&& oracle
.isGlobalSlotUndemotable(cx
, slot
))
3644 treeInfo
->typeMap
.add(type
);
3645 import(lirbuf
->state
, sizeof(struct InterpState
) + slot
*sizeof(double),
3646 vp
, type
, "global", index
, NULL
);
3647 SpecializeTreesToMissingGlobals(cx
, globalObj
, treeInfo
);
3651 /* Write back a value onto the stack or global frames. */
3653 TraceRecorder::writeBack(LIns
* i
, LIns
* base
, ptrdiff_t offset
, bool demote
)
3656 * Sink all type casts targeting the stack into the side exit by simply storing the original
3657 * (uncasted) value. Each guard generates the side exit map based on the types of the
3658 * last stores to every stack location, so it's safe to not perform them on-trace.
3660 if (demote
&& isPromoteInt(i
))
3661 i
= ::demote(lir
, i
);
3662 return lir
->insStorei(i
, base
, offset
);
3665 /* Update the tracker, then issue a write back store. */
3666 JS_REQUIRES_STACK
void
3667 TraceRecorder::set(jsval
* p
, LIns
* i
, bool initializing
, bool demote
)
3669 JS_ASSERT(i
!= NULL
);
3670 JS_ASSERT(initializing
|| known(p
));
3671 checkForGlobalObjectReallocation();
3675 * If we are writing to this location for the first time, calculate the
3676 * offset into the native frame manually. Otherwise just look up the last
3677 * load or store associated with the same source address (p) and use the
3680 LIns
* x
= nativeFrameTracker
.get(p
);
3683 x
= writeBack(i
, lirbuf
->state
, nativeGlobalOffset(p
), demote
);
3685 x
= writeBack(i
, lirbuf
->sp
, -treeInfo
->nativeStackBase
+ nativeStackOffset(p
), demote
);
3686 nativeFrameTracker
.set(p
, x
);
3688 #define ASSERT_VALID_CACHE_HIT(base, offset) \
3689 JS_ASSERT(base == lirbuf->sp || base == lirbuf->state); \
3690 JS_ASSERT(offset == ((base == lirbuf->sp) \
3691 ? -treeInfo->nativeStackBase + nativeStackOffset(p) \
3692 : nativeGlobalOffset(p))); \
3694 JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
3695 ASSERT_VALID_CACHE_HIT(x
->oprnd2(), x
->disp());
3696 writeBack(i
, x
->oprnd2(), x
->disp(), demote
);
3698 #undef ASSERT_VALID_CACHE_HIT
3701 JS_REQUIRES_STACK LIns
*
3702 TraceRecorder::get(jsval
* p
)
3704 JS_ASSERT(known(p
));
3705 checkForGlobalObjectReallocation();
3706 return tracker
.get(p
);
3709 JS_REQUIRES_STACK LIns
*
3710 TraceRecorder::addr(jsval
* p
)
3713 ? lir
->ins2(LIR_piadd
, lirbuf
->state
, INS_CONSTWORD(nativeGlobalOffset(p
)))
3714 : lir
->ins2(LIR_piadd
, lirbuf
->sp
,
3715 INS_CONSTWORD(-treeInfo
->nativeStackBase
+ nativeStackOffset(p
)));
3718 JS_REQUIRES_STACK
bool
3719 TraceRecorder::known(jsval
* p
)
3721 checkForGlobalObjectReallocation();
3722 return tracker
.has(p
);
3726 * The dslots of the global object are sometimes reallocated by the interpreter.
3727 * This function check for that condition and re-maps the entries of the tracker
3730 JS_REQUIRES_STACK
void
3731 TraceRecorder::checkForGlobalObjectReallocation()
3733 if (global_dslots
!= globalObj
->dslots
) {
3734 debug_only_print0(LC_TMTracer
,
3735 "globalObj->dslots relocated, updating tracker\n");
3736 jsval
* src
= global_dslots
;
3737 jsval
* dst
= globalObj
->dslots
;
3738 jsuint length
= globalObj
->dslots
[-1] - JS_INITIAL_NSLOTS
;
3739 LIns
** map
= (LIns
**)alloca(sizeof(LIns
*) * length
);
3740 for (jsuint n
= 0; n
< length
; ++n
) {
3741 map
[n
] = tracker
.get(src
);
3742 tracker
.set(src
++, NULL
);
3744 for (jsuint n
= 0; n
< length
; ++n
)
3745 tracker
.set(dst
++, map
[n
]);
3746 global_dslots
= globalObj
->dslots
;
3750 /* Determine whether the current branch is a loop edge (taken or not taken). */
3751 static JS_REQUIRES_STACK
bool
3752 IsLoopEdge(jsbytecode
* pc
, jsbytecode
* header
)
3757 return ((pc
+ GET_JUMP_OFFSET(pc
)) == header
);
3760 return ((pc
+ GET_JUMPX_OFFSET(pc
)) == header
);
3762 JS_ASSERT((*pc
== JSOP_AND
) || (*pc
== JSOP_ANDX
) ||
3763 (*pc
== JSOP_OR
) || (*pc
== JSOP_ORX
));
3768 class AdjustCallerGlobalTypesVisitor
: public SlotVisitorBase
3770 TraceRecorder
&mRecorder
;
3772 nanojit::LirBuffer
*mLirbuf
;
3773 nanojit::LirWriter
*mLir
;
3774 JSTraceType
*mTypeMap
;
3776 AdjustCallerGlobalTypesVisitor(TraceRecorder
&recorder
,
3777 JSTraceType
*typeMap
) :
3778 mRecorder(recorder
),
3780 mLirbuf(mRecorder
.lirbuf
),
3781 mLir(mRecorder
.lir
),
3785 JSTraceType
* getTypeMap()
3790 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
3791 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
3792 LIns
*ins
= mRecorder
.get(vp
);
3793 bool isPromote
= isPromoteInt(ins
);
3794 if (isPromote
&& *mTypeMap
== TT_DOUBLE
) {
3795 mLir
->insStorei(mRecorder
.get(vp
), mLirbuf
->state
,
3796 mRecorder
.nativeGlobalOffset(vp
));
3799 * Aggressively undo speculation so the inner tree will compile
3802 oracle
.markGlobalSlotUndemotable(mCx
, slot
);
3804 JS_ASSERT(!(!isPromote
&& *mTypeMap
== TT_INT32
));
3809 class AdjustCallerStackTypesVisitor
: public SlotVisitorBase
3811 TraceRecorder
&mRecorder
;
3813 nanojit::LirBuffer
*mLirbuf
;
3814 nanojit::LirWriter
*mLir
;
3816 JSTraceType
*mTypeMap
;
3818 AdjustCallerStackTypesVisitor(TraceRecorder
&recorder
,
3819 JSTraceType
*typeMap
) :
3820 mRecorder(recorder
),
3822 mLirbuf(mRecorder
.lirbuf
),
3823 mLir(mRecorder
.lir
),
3828 JSTraceType
* getTypeMap()
3833 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3834 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3835 for (size_t i
= 0; i
< count
; ++i
) {
3836 LIns
*ins
= mRecorder
.get(vp
);
3837 bool isPromote
= isPromoteInt(ins
);
3838 if (isPromote
&& *mTypeMap
== TT_DOUBLE
) {
3839 mLir
->insStorei(mRecorder
.get(vp
), mLirbuf
->sp
,
3840 -mRecorder
.treeInfo
->nativeStackBase
+
3841 mRecorder
.nativeStackOffset(vp
));
3844 * Aggressively undo speculation so the inner tree will compile
3847 oracle
.markStackSlotUndemotable(mCx
, mSlotnum
);
3849 JS_ASSERT(!(!isPromote
&& *mTypeMap
== TT_INT32
));
3859 * Promote slots if necessary to match the called tree's type map. This
3860 * function is infallible and must only be called if we are certain that it is
3861 * possible to reconcile the types for each slot in the inner and outer trees.
3863 JS_REQUIRES_STACK
void
3864 TraceRecorder::adjustCallerTypes(Fragment
* f
)
3866 TreeInfo
* ti
= (TreeInfo
*)f
->vmprivate
;
3868 AdjustCallerGlobalTypesVisitor
globalVisitor(*this, ti
->globalTypeMap());
3869 VisitGlobalSlots(globalVisitor
, cx
, *treeInfo
->globalSlots
);
3871 AdjustCallerStackTypesVisitor
stackVisitor(*this, ti
->stackTypeMap());
3872 VisitStackSlots(stackVisitor
, cx
, 0);
3874 JS_ASSERT(f
== f
->root
);
3877 JS_REQUIRES_STACK JSTraceType
3878 TraceRecorder::determineSlotType(jsval
* vp
)
3882 if (isNumber(*vp
)) {
3883 m
= isPromoteInt(i
) ? TT_INT32
: TT_DOUBLE
;
3884 } else if (JSVAL_IS_OBJECT(*vp
)) {
3885 if (JSVAL_IS_NULL(*vp
))
3887 else if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp
)))
3892 JS_ASSERT(JSVAL_TAG(*vp
) == JSVAL_STRING
|| JSVAL_IS_SPECIAL(*vp
));
3893 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_STRING
) == JSVAL_STRING
);
3894 JS_STATIC_ASSERT(static_cast<jsvaltag
>(TT_PSEUDOBOOLEAN
) == JSVAL_SPECIAL
);
3895 m
= JSTraceType(JSVAL_TAG(*vp
));
3897 JS_ASSERT(m
!= TT_INT32
|| isInt32(*vp
));
3901 class DetermineTypesVisitor
: public SlotVisitorBase
3903 TraceRecorder
&mRecorder
;
3904 JSTraceType
*mTypeMap
;
3906 DetermineTypesVisitor(TraceRecorder
&recorder
,
3907 JSTraceType
*typeMap
) :
3908 mRecorder(recorder
),
3912 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
3913 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
3914 *mTypeMap
++ = mRecorder
.determineSlotType(vp
);
3917 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
3918 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
3919 for (size_t i
= 0; i
< count
; ++i
)
3920 *mTypeMap
++ = mRecorder
.determineSlotType(vp
++);
3924 JSTraceType
* getTypeMap()
3930 #if defined JS_JIT_SPEW
3931 JS_REQUIRES_STACK
static void
3932 TreevisLogExit(JSContext
* cx
, VMSideExit
* exit
)
3934 debug_only_printf(LC_TMTreeVis
, "TREEVIS ADDEXIT EXIT=%p TYPE=%s FRAG=%p PC=%p FILE=\"%s\""
3935 " LINE=%d OFFS=%d", (void*)exit
, getExitName(exit
->exitType
),
3936 (void*)exit
->from
, (void*)cx
->fp
->regs
->pc
, cx
->fp
->script
->filename
,
3937 js_FramePCToLineNumber(cx
, cx
->fp
), FramePCOffset(cx
->fp
));
3938 debug_only_print0(LC_TMTreeVis
, " STACK=\"");
3939 for (unsigned i
= 0; i
< exit
->numStackSlots
; i
++)
3940 debug_only_printf(LC_TMTreeVis
, "%c", typeChar
[exit
->stackTypeMap()[i
]]);
3941 debug_only_print0(LC_TMTreeVis
, "\" GLOBALS=\"");
3942 for (unsigned i
= 0; i
< exit
->numGlobalSlots
; i
++)
3943 debug_only_printf(LC_TMTreeVis
, "%c", typeChar
[exit
->globalTypeMap()[i
]]);
3944 debug_only_print0(LC_TMTreeVis
, "\"\n");
3948 JS_REQUIRES_STACK VMSideExit
*
3949 TraceRecorder::snapshot(ExitType exitType
)
3951 JSStackFrame
* fp
= cx
->fp
;
3952 JSFrameRegs
* regs
= fp
->regs
;
3953 jsbytecode
* pc
= regs
->pc
;
3956 * Check for a return-value opcode that needs to restart at the next
3959 const JSCodeSpec
& cs
= js_CodeSpec
[*pc
];
3962 * When calling a _FAIL native, make the snapshot's pc point to the next
3963 * instruction after the CALL or APPLY. Even on failure, a _FAIL native
3964 * must not be called again from the interpreter.
3966 bool resumeAfter
= (pendingSpecializedNative
&&
3967 JSTN_ERRTYPE(pendingSpecializedNative
) == FAIL_STATUS
);
3969 JS_ASSERT(*pc
== JSOP_CALL
|| *pc
== JSOP_APPLY
|| *pc
== JSOP_NEW
||
3970 *pc
== JSOP_SETPROP
|| *pc
== JSOP_SETNAME
);
3973 MUST_FLOW_THROUGH("restore_pc");
3977 * Generate the entry map for the (possibly advanced) pc and stash it in
3980 unsigned stackSlots
= NativeStackSlots(cx
, callDepth
);
3983 * It's sufficient to track the native stack use here since all stores
3984 * above the stack watermark defined by guards are killed.
3986 trackNativeStackUse(stackSlots
+ 1);
3988 /* Capture the type map into a temporary location. */
3989 unsigned ngslots
= treeInfo
->globalSlots
->length();
3990 unsigned typemap_size
= (stackSlots
+ ngslots
) * sizeof(JSTraceType
);
3991 void *mark
= JS_ARENA_MARK(&cx
->tempPool
);
3992 JSTraceType
* typemap
;
3993 JS_ARENA_ALLOCATE_CAST(typemap
, JSTraceType
*, &cx
->tempPool
, typemap_size
);
3996 * Determine the type of a store by looking at the current type of the
3997 * actual value the interpreter is using. For numbers we have to check what
3998 * kind of store we used last (integer or double) to figure out what the
3999 * side exit show reflect in its typemap.
4001 DetermineTypesVisitor
detVisitor(*this, typemap
);
4002 VisitSlots(detVisitor
, cx
, callDepth
, ngslots
,
4003 treeInfo
->globalSlots
->data());
4004 JS_ASSERT(unsigned(detVisitor
.getTypeMap() - typemap
) ==
4005 ngslots
+ stackSlots
);
4008 * If this snapshot is for a side exit that leaves a boxed jsval result on
4009 * the stack, make a note of this in the typemap. Examples include the
4010 * builtinStatus guard after calling a _FAIL builtin, a JSFastNative, or
4011 * GetPropertyByName; and the type guard in unbox_jsval after such a call
4012 * (also at the beginning of a trace branched from such a type guard).
4014 if (pendingUnboxSlot
||
4015 (pendingSpecializedNative
&& (pendingSpecializedNative
->flags
& JSTN_UNBOX_AFTER
))) {
4016 unsigned pos
= stackSlots
- 1;
4017 if (pendingUnboxSlot
== cx
->fp
->regs
->sp
- 2)
4018 pos
= stackSlots
- 2;
4019 typemap
[pos
] = TT_JSVAL
;
4022 /* Now restore the the original pc (after which early returns are ok). */
4024 MUST_FLOW_LABEL(restore_pc
);
4025 regs
->pc
= pc
- cs
.length
;
4028 * If we take a snapshot on a goto, advance to the target address. This
4029 * avoids inner trees returning on a break goto, which the outer
4030 * recorder then would confuse with a break in the outer tree.
4032 if (*pc
== JSOP_GOTO
)
4033 pc
+= GET_JUMP_OFFSET(pc
);
4034 else if (*pc
== JSOP_GOTOX
)
4035 pc
+= GET_JUMPX_OFFSET(pc
);
4039 * Check if we already have a matching side exit; if so we can return that
4040 * side exit instead of creating a new one.
4042 VMSideExit
** exits
= treeInfo
->sideExits
.data();
4043 unsigned nexits
= treeInfo
->sideExits
.length();
4044 if (exitType
== LOOP_EXIT
) {
4045 for (unsigned n
= 0; n
< nexits
; ++n
) {
4046 VMSideExit
* e
= exits
[n
];
4047 if (e
->pc
== pc
&& e
->imacpc
== fp
->imacpc
&&
4048 ngslots
== e
->numGlobalSlots
&&
4049 !memcmp(exits
[n
]->fullTypeMap(), typemap
, typemap_size
)) {
4050 AUDIT(mergedLoopExits
);
4051 #if defined JS_JIT_SPEW
4052 TreevisLogExit(cx
, e
);
4054 JS_ARENA_RELEASE(&cx
->tempPool
, mark
);
4060 /* We couldn't find a matching side exit, so create a new one. */
4061 VMSideExit
* exit
= (VMSideExit
*)
4062 traceMonitor
->traceAlloc
->alloc(sizeof(VMSideExit
) +
4063 (stackSlots
+ ngslots
) * sizeof(JSTraceType
));
4065 /* Setup side exit structure. */
4066 exit
->from
= fragment
;
4067 exit
->calldepth
= callDepth
;
4068 exit
->numGlobalSlots
= ngslots
;
4069 exit
->numStackSlots
= stackSlots
;
4070 exit
->numStackSlotsBelowCurrentFrame
= cx
->fp
->argv
?
4071 nativeStackOffset(&cx
->fp
->argv
[-2]) / sizeof(double) :
4073 exit
->exitType
= exitType
;
4074 exit
->block
= fp
->blockChain
;
4076 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(fp
->blockChain
));
4078 exit
->imacpc
= fp
->imacpc
;
4079 exit
->sp_adj
= (stackSlots
* sizeof(double)) - treeInfo
->nativeStackBase
;
4080 exit
->rp_adj
= exit
->calldepth
* sizeof(FrameInfo
*);
4081 exit
->nativeCalleeWord
= 0;
4082 exit
->lookupFlags
= js_InferFlags(cx
, 0);
4083 memcpy(exit
->fullTypeMap(), typemap
, typemap_size
);
4085 #if defined JS_JIT_SPEW
4086 TreevisLogExit(cx
, exit
);
4089 JS_ARENA_RELEASE(&cx
->tempPool
, mark
);
4093 JS_REQUIRES_STACK GuardRecord
*
4094 TraceRecorder::createGuardRecord(VMSideExit
* exit
)
4096 GuardRecord
* gr
= new (*traceMonitor
->traceAlloc
) GuardRecord();
4101 // gr->profCount is calloc'd to zero
4103 gr
->profGuardID
= fragment
->guardNumberer
++;
4104 gr
->nextInFrag
= fragment
->guardsForFrag
;
4105 fragment
->guardsForFrag
= gr
;
4112 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4113 * (expected) and using the supplied side exit if the conditon doesn't hold.
4115 JS_REQUIRES_STACK
void
4116 TraceRecorder::guard(bool expected
, LIns
* cond
, VMSideExit
* exit
)
4118 debug_only_printf(LC_TMRecorder
,
4119 " About to try emitting guard code for "
4120 "SideExit=%p exitType=%s\n",
4121 (void*)exit
, getExitName(exit
->exitType
));
4123 GuardRecord
* guardRec
= createGuardRecord(exit
);
4126 * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
4127 * safe to keep references to the side exits here. If we ever start
4128 * clearing those lirbufs, we have to make sure we purge the side exits
4129 * that then no longer will be in valid memory.
4131 if (exit
->exitType
== LOOP_EXIT
)
4132 treeInfo
->sideExits
.add(exit
);
4134 if (!cond
->isCond()) {
4135 expected
= !expected
;
4136 cond
= cond
->isQuad() ? lir
->ins_peq0(cond
) : lir
->ins_eq0(cond
);
4140 lir
->insGuard(expected
? LIR_xf
: LIR_xt
, cond
, guardRec
);
4142 debug_only_print0(LC_TMRecorder
,
4143 " redundant guard, eliminated, no codegen\n");
4147 JS_REQUIRES_STACK VMSideExit
*
4148 TraceRecorder::copy(VMSideExit
* copy
)
4150 size_t typemap_size
= copy
->numGlobalSlots
+ copy
->numStackSlots
;
4151 VMSideExit
* exit
= (VMSideExit
*)
4152 traceMonitor
->traceAlloc
->alloc(sizeof(VMSideExit
) +
4153 typemap_size
* sizeof(JSTraceType
));
4155 /* Copy side exit structure. */
4156 memcpy(exit
, copy
, sizeof(VMSideExit
) + typemap_size
* sizeof(JSTraceType
));
4157 exit
->guards
= NULL
;
4158 exit
->from
= fragment
;
4159 exit
->target
= NULL
;
4162 * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
4163 * safe to keep references to the side exits here. If we ever start
4164 * clearing those lirbufs, we have to make sure we purge the side exits
4165 * that then no longer will be in valid memory.
4167 if (exit
->exitType
== LOOP_EXIT
)
4168 treeInfo
->sideExits
.add(exit
);
4169 #if defined JS_JIT_SPEW
4170 TreevisLogExit(cx
, exit
);
4176 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4177 * (expected) and generate a side exit with type exitType to jump to if the
4178 * condition does not hold.
4180 JS_REQUIRES_STACK
void
4181 TraceRecorder::guard(bool expected
, LIns
* cond
, ExitType exitType
)
4183 guard(expected
, cond
, snapshot(exitType
));
4187 * Determine whether any context associated with the same thread as cx is
4188 * executing native code.
4191 ProhibitFlush(JSContext
* cx
)
4193 if (cx
->interpState
) // early out if the given is in native code
4198 #ifdef JS_THREADSAFE
4199 JSThread
* thread
= cx
->thread
;
4200 for (cl
= thread
->contextList
.next
; cl
!= &thread
->contextList
; cl
= cl
->next
)
4201 if (CX_FROM_THREAD_LINKS(cl
)->interpState
)
4204 JSRuntime
* rt
= cx
->runtime
;
4205 for (cl
= rt
->contextList
.next
; cl
!= &rt
->contextList
; cl
= cl
->next
)
4206 if (js_ContextFromLinkField(cl
)->interpState
)
4212 static JS_REQUIRES_STACK
void
4213 ResetJITImpl(JSContext
* cx
)
4215 if (!TRACING_ENABLED(cx
))
4217 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
4218 debug_only_print0(LC_TMTracer
, "Flushing cache.\n");
4220 js_AbortRecording(cx
, "flush cache");
4221 if (ProhibitFlush(cx
)) {
4222 debug_only_print0(LC_TMTracer
, "Deferring JIT flush due to deep bail.\n");
4223 tm
->needFlush
= JS_TRUE
;
4230 static JS_INLINE JS_REQUIRES_STACK
void
4231 ResetJIT(JSContext
* cx
, TraceVisFlushReason r
)
4233 js_LogTraceVisEvent(cx
, S_RESET
, r
);
4237 #define ResetJIT(cx, r) ResetJITImpl(cx)
4240 JS_REQUIRES_STACK
void
4241 js_ResetJIT(JSContext
* cx
)
4243 ResetJIT(cx
, FR_OOM
);
4246 /* Compile the current fragment. */
4247 JS_REQUIRES_STACK AbortableRecordingStatus
4248 TraceRecorder::compile(JSTraceMonitor
* tm
)
4251 TraceVisStateObj
tvso(cx
, S_COMPILE
);
4254 if (tm
->needFlush
) {
4255 ResetJIT(cx
, FR_DEEP_BAIL
);
4256 return ARECORD_ABORTED
;
4258 if (treeInfo
->maxNativeStackSlots
>= MAX_NATIVE_STACK_SLOTS
) {
4259 debug_only_print0(LC_TMTracer
, "Blacklist: excessive stack use.\n");
4260 Blacklist((jsbytecode
*) fragment
->root
->ip
);
4261 return ARECORD_STOP
;
4263 if (anchor
&& anchor
->exitType
!= CASE_EXIT
)
4264 ++treeInfo
->branchCount
;
4266 return ARECORD_STOP
;
4268 Assembler
*assm
= tm
->assembler
;
4269 nanojit::compile(assm
, fragment
verbose_only(, tempAlloc
, tm
->labels
));
4271 return ARECORD_STOP
;
4273 if (assm
->error() != nanojit::None
) {
4274 debug_only_print0(LC_TMTracer
, "Blacklisted: error during compilation\n");
4275 Blacklist((jsbytecode
*) fragment
->root
->ip
);
4276 return ARECORD_STOP
;
4278 ResetRecordingAttempts(cx
, (jsbytecode
*) fragment
->ip
);
4279 ResetRecordingAttempts(cx
, (jsbytecode
*) fragment
->root
->ip
);
4282 if (anchor
->exitType
== CASE_EXIT
)
4283 assm
->patch(anchor
, anchor
->switchInfo
);
4286 assm
->patch(anchor
);
4288 JS_ASSERT(fragment
->code());
4289 JS_ASSERT(!fragment
->vmprivate
);
4290 if (fragment
== fragment
->root
)
4291 fragment
->vmprivate
= treeInfo
;
4293 /* :TODO: windows support */
4294 #if defined DEBUG && !defined WIN32
4295 const char* filename
= cx
->fp
->script
->filename
;
4296 char* label
= (char*)js_malloc((filename
? strlen(filename
) : 7) + 16);
4297 sprintf(label
, "%s:%u", filename
? filename
: "<stdin>",
4298 js_FramePCToLineNumber(cx
, cx
->fp
));
4299 tm
->labels
->add(fragment
, sizeof(Fragment
), 0, label
);
4302 AUDIT(traceCompleted
);
4304 return ARECORD_CONTINUE
;
4308 JoinPeers(Assembler
* assm
, VMSideExit
* exit
, VMFragment
* target
)
4310 exit
->target
= target
;
4313 debug_only_printf(LC_TMTreeVis
, "TREEVIS JOIN ANCHOR=%p FRAG=%p\n", (void*)exit
, (void*)target
);
4315 if (exit
->root() == target
)
4318 target
->getTreeInfo()->dependentTrees
.addUnique(exit
->root());
4319 exit
->root()->getTreeInfo()->linkedTrees
.addUnique(target
);
4322 /* Results of trying to connect an arbitrary type A with arbitrary type B */
4323 enum TypeCheckResult
4325 TypeCheck_Okay
, /* Okay: same type */
4326 TypeCheck_Promote
, /* Okay: Type A needs f2i() */
4327 TypeCheck_Demote
, /* Okay: Type A needs i2f() */
4328 TypeCheck_Undemote
, /* Bad: Slot is undemotable */
4329 TypeCheck_Bad
/* Bad: incompatible types */
4332 class SlotMap
: public SlotVisitorBase
4338 : v(NULL
), promoteInt(false), lastCheck(TypeCheck_Bad
)
4340 SlotInfo(jsval
* v
, bool promoteInt
)
4341 : v(v
), promoteInt(promoteInt
), lastCheck(TypeCheck_Bad
), type(getCoercedType(*v
))
4343 SlotInfo(JSTraceType t
)
4344 : v(NULL
), promoteInt(t
== TT_INT32
), lastCheck(TypeCheck_Bad
), type(t
)
4348 TypeCheckResult lastCheck
;
4352 SlotMap(TraceRecorder
& rec
)
4359 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4360 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
)
4365 JS_ALWAYS_INLINE
SlotMap::SlotInfo
&
4366 operator [](unsigned i
)
4371 JS_ALWAYS_INLINE
SlotMap::SlotInfo
&
4377 JS_ALWAYS_INLINE
unsigned
4380 return slots
.length();
4384 * Possible return states:
4386 * TypeConsensus_Okay: All types are compatible. Caller must go through slot list and handle
4388 * TypeConsensus_Bad: Types are not compatible. Individual type check results are undefined.
4389 * TypeConsensus_Undemotes: Types would be compatible if slots were marked as undemotable
4390 * before recording began. Caller can go through slot list and mark
4391 * such slots as undemotable.
4393 JS_REQUIRES_STACK TypeConsensus
4394 checkTypes(TreeInfo
* ti
)
4396 if (length() != ti
->typeMap
.length())
4397 return TypeConsensus_Bad
;
4399 bool has_undemotes
= false;
4400 for (unsigned i
= 0; i
< length(); i
++) {
4401 TypeCheckResult result
= checkType(i
, ti
->typeMap
[i
]);
4402 if (result
== TypeCheck_Bad
)
4403 return TypeConsensus_Bad
;
4404 if (result
== TypeCheck_Undemote
)
4405 has_undemotes
= true;
4406 slots
[i
].lastCheck
= result
;
4409 return TypeConsensus_Undemotes
;
4410 return TypeConsensus_Okay
;
4413 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4416 slots
.add(SlotInfo(vp
, isPromoteInt(mRecorder
.get(vp
))));
4419 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
4420 addSlot(JSTraceType t
)
4422 slots
.add(SlotInfo(t
));
4425 JS_REQUIRES_STACK
void
4428 for (unsigned i
= 0; i
< length(); i
++) {
4429 if (get(i
).lastCheck
== TypeCheck_Undemote
)
4430 MarkSlotUndemotable(mRecorder
.cx
, mRecorder
.treeInfo
, i
);
4434 JS_REQUIRES_STACK
virtual void
4437 for (unsigned i
= 0; i
< length(); i
++) {
4438 SlotInfo
& info
= get(i
);
4439 JS_ASSERT(info
.lastCheck
!= TypeCheck_Undemote
&& info
.lastCheck
!= TypeCheck_Bad
);
4440 if (info
.lastCheck
== TypeCheck_Promote
) {
4441 JS_ASSERT(info
.type
== TT_INT32
|| info
.type
== TT_DOUBLE
);
4442 mRecorder
.set(info
.v
, mRecorder
.f2i(mRecorder
.get(info
.v
)));
4443 } else if (info
.lastCheck
== TypeCheck_Demote
) {
4444 JS_ASSERT(info
.type
== TT_INT32
|| info
.type
== TT_DOUBLE
);
4445 JS_ASSERT(mRecorder
.get(info
.v
)->isQuad());
4447 /* Never demote this final i2f. */
4448 mRecorder
.set(info
.v
, mRecorder
.get(info
.v
), false, false);
4454 checkType(unsigned i
, JSTraceType t
)
4456 debug_only_printf(LC_TMTracer
,
4457 "checkType slot %d: interp=%c typemap=%c isNum=%d promoteInt=%d\n",
4459 typeChar
[slots
[i
].type
],
4461 slots
[i
].type
== TT_INT32
|| slots
[i
].type
== TT_DOUBLE
,
4462 slots
[i
].promoteInt
);
4465 if (slots
[i
].type
!= TT_INT32
&& slots
[i
].type
!= TT_DOUBLE
)
4466 return TypeCheck_Bad
; /* Not a number? Type mismatch. */
4467 /* This is always a type mismatch, we can't close a double to an int. */
4468 if (!slots
[i
].promoteInt
)
4469 return TypeCheck_Undemote
;
4470 /* Looks good, slot is an int32, the last instruction should be promotable. */
4471 JS_ASSERT_IF(slots
[i
].v
, isInt32(*slots
[i
].v
) && slots
[i
].promoteInt
);
4472 return slots
[i
].v
? TypeCheck_Promote
: TypeCheck_Okay
;
4474 if (slots
[i
].type
!= TT_INT32
&& slots
[i
].type
!= TT_DOUBLE
)
4475 return TypeCheck_Bad
; /* Not a number? Type mismatch. */
4476 if (slots
[i
].promoteInt
)
4477 return slots
[i
].v
? TypeCheck_Demote
: TypeCheck_Bad
;
4478 return TypeCheck_Okay
;
4480 return slots
[i
].type
== t
? TypeCheck_Okay
: TypeCheck_Bad
;
4482 JS_NOT_REACHED("shouldn't fall through type check switch");
4485 TraceRecorder
& mRecorder
;
4487 Queue
<SlotInfo
> slots
;
4490 class DefaultSlotMap
: public SlotMap
4493 DefaultSlotMap(TraceRecorder
& tr
) : SlotMap(tr
)
4497 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
4498 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
)
4500 for (size_t i
= 0; i
< count
; i
++)
4506 JS_REQUIRES_STACK TypeConsensus
4507 TraceRecorder::selfTypeStability(SlotMap
& slotMap
)
4509 debug_only_printf(LC_TMTracer
, "Checking type stability against self=%p\n", (void*)fragment
);
4510 TypeConsensus consensus
= slotMap
.checkTypes(treeInfo
);
4512 /* Best case: loop jumps back to its own header */
4513 if (consensus
== TypeConsensus_Okay
)
4514 return TypeConsensus_Okay
;
4516 /* If the only thing keeping this loop from being stable is undemotions, then mark relevant
4517 * slots as undemotable.
4519 if (consensus
== TypeConsensus_Undemotes
)
4520 slotMap
.markUndemotes();
4525 JS_REQUIRES_STACK TypeConsensus
4526 TraceRecorder::peerTypeStability(SlotMap
& slotMap
, const void* ip
, VMFragment
** pPeer
)
4528 /* See if there are any peers that would make this stable */
4529 VMFragment
* root
= (VMFragment
*)fragment
->root
;
4530 VMFragment
* peer
= getLoop(traceMonitor
, ip
, root
->globalObj
, root
->globalShape
,
4533 /* This condition is possible with recursion */
4534 JS_ASSERT_IF(!peer
, fragment
->root
->ip
!= ip
);
4536 return TypeConsensus_Bad
;
4537 bool onlyUndemotes
= false;
4538 for (; peer
!= NULL
; peer
= (VMFragment
*)peer
->peer
) {
4539 if (!peer
->vmprivate
|| peer
== fragment
)
4541 debug_only_printf(LC_TMTracer
, "Checking type stability against peer=%p\n", (void*)peer
);
4542 TypeConsensus consensus
= slotMap
.checkTypes((TreeInfo
*)peer
->vmprivate
);
4543 if (consensus
== TypeConsensus_Okay
) {
4545 /* Return this even though there will be linkage; the trace itself is not stable.
4546 * Caller should inspect ppeer to check for a compatible peer.
4548 return TypeConsensus_Okay
;
4550 if (consensus
== TypeConsensus_Undemotes
)
4551 onlyUndemotes
= true;
4554 return onlyUndemotes
? TypeConsensus_Undemotes
: TypeConsensus_Bad
;
4557 JS_REQUIRES_STACK AbortableRecordingStatus
4558 TraceRecorder::closeLoop()
4560 return closeLoop(snapshot(UNSTABLE_LOOP_EXIT
));
4563 JS_REQUIRES_STACK AbortableRecordingStatus
4564 TraceRecorder::closeLoop(VMSideExit
* exit
)
4566 DefaultSlotMap
slotMap(*this);
4567 VisitSlots(slotMap
, cx
, 0, *treeInfo
->globalSlots
);
4568 return closeLoop(slotMap
, exit
);
4572 * Complete and compile a trace and link it to the existing tree if
4573 * appropriate. Returns ARECORD_ABORTED or ARECORD_STOP, depending on whether
4574 * the recorder was deleted. Outparam is always set.
4576 JS_REQUIRES_STACK AbortableRecordingStatus
4577 TraceRecorder::closeLoop(SlotMap
& slotMap
, VMSideExit
* exit
)
4580 * We should have arrived back at the loop header, and hence we don't want
4581 * to be in an imacro here and the opcode should be either JSOP_TRACE or, in
4582 * case this loop was blacklisted in the meantime, JSOP_NOP.
4584 JS_ASSERT((*cx
->fp
->regs
->pc
== JSOP_TRACE
|| *cx
->fp
->regs
->pc
== JSOP_NOP
||
4585 *cx
->fp
->regs
->pc
== JSOP_RETURN
) && !cx
->fp
->imacpc
);
4587 if (callDepth
!= 0) {
4588 debug_only_print0(LC_TMTracer
,
4589 "Blacklisted: stack depth mismatch, possible recursion.\n");
4590 Blacklist((jsbytecode
*) fragment
->root
->ip
);
4592 return ARECORD_STOP
;
4595 JS_ASSERT_IF(exit
->exitType
== UNSTABLE_LOOP_EXIT
,
4596 exit
->numStackSlots
== treeInfo
->nStackTypes
);
4597 JS_ASSERT_IF(exit
->exitType
!= UNSTABLE_LOOP_EXIT
, exit
->exitType
== RECURSIVE_UNLINKED_EXIT
);
4598 JS_ASSERT_IF(exit
->exitType
== RECURSIVE_UNLINKED_EXIT
,
4599 exit
->recursive_pc
!= fragment
->root
->ip
);
4601 VMFragment
* peer
= NULL
;
4602 VMFragment
* root
= (VMFragment
*)fragment
->root
;
4604 TypeConsensus consensus
= TypeConsensus_Bad
;
4606 if (exit
->exitType
== UNSTABLE_LOOP_EXIT
)
4607 consensus
= selfTypeStability(slotMap
);
4608 if (consensus
!= TypeConsensus_Okay
) {
4609 const void* ip
= exit
->exitType
== RECURSIVE_UNLINKED_EXIT
?
4610 exit
->recursive_pc
: fragment
->root
->ip
;
4611 TypeConsensus peerConsensus
= peerTypeStability(slotMap
, ip
, &peer
);
4612 /* If there was a semblance of a stable peer (even if not linkable), keep the result. */
4613 if (peerConsensus
!= TypeConsensus_Bad
)
4614 consensus
= peerConsensus
;
4618 if (consensus
!= TypeConsensus_Okay
|| peer
)
4619 AUDIT(unstableLoopVariable
);
4622 JS_ASSERT(!trashSelf
);
4624 /* This exit is indeed linkable to something now. Process any promote/demotes that
4625 * are pending in the slot map.
4627 if (consensus
== TypeConsensus_Okay
)
4628 slotMap
.adjustTypes();
4630 if (consensus
!= TypeConsensus_Okay
|| peer
) {
4631 fragment
->lastIns
= lir
->insGuard(LIR_x
, NULL
, createGuardRecord(exit
));
4633 /* If there is a peer, there must have been an "Okay" consensus. */
4634 JS_ASSERT_IF(peer
, consensus
== TypeConsensus_Okay
);
4636 /* Compile as a type-unstable loop, and hope for a connection later. */
4639 * If such a fragment does not exist, let's compile the loop ahead
4640 * of time anyway. Later, if the loop becomes type stable, we will
4641 * connect these two fragments together.
4643 debug_only_print0(LC_TMTracer
,
4644 "Trace has unstable loop variable with no stable peer, "
4645 "compiling anyway.\n");
4646 UnstableExit
* uexit
= new (*traceMonitor
->traceAlloc
) UnstableExit
;
4647 uexit
->fragment
= fragment
;
4649 uexit
->next
= treeInfo
->unstableExits
;
4650 treeInfo
->unstableExits
= uexit
;
4652 JS_ASSERT(peer
->code());
4653 exit
->target
= peer
;
4654 debug_only_printf(LC_TMTracer
,
4655 "Joining type-unstable trace to target fragment %p.\n",
4657 ((TreeInfo
*)peer
->vmprivate
)->dependentTrees
.addUnique(fragment
->root
);
4658 treeInfo
->linkedTrees
.addUnique(peer
);
4661 exit
->exitType
= LOOP_EXIT
;
4662 debug_only_printf(LC_TMTreeVis
, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit
,
4663 getExitName(LOOP_EXIT
));
4665 JS_ASSERT((fragment
== fragment
->root
) == !!loopLabel
);
4667 lir
->insBranch(LIR_j
, NULL
, loopLabel
);
4668 lir
->ins1(LIR_live
, lirbuf
->state
);
4671 exit
->target
= fragment
->root
;
4672 fragment
->lastIns
= lir
->insGuard(LIR_x
, NULL
, createGuardRecord(exit
));
4675 CHECK_STATUS_A(compile(traceMonitor
));
4677 debug_only_printf(LC_TMTreeVis
, "TREEVIS CLOSELOOP EXIT=%p PEER=%p\n", (void*)exit
, (void*)peer
);
4679 peer
= getLoop(traceMonitor
, root
->ip
, root
->globalObj
, root
->globalShape
, root
->argc
);
4681 joinEdgesToEntry(peer
);
4683 debug_only_stmt(DumpPeerStability(traceMonitor
, peer
->ip
, peer
->globalObj
,
4684 peer
->globalShape
, peer
->argc
);)
4686 debug_only_print0(LC_TMTracer
,
4687 "updating specializations on dependent and linked trees\n");
4688 if (fragment
->root
->vmprivate
)
4689 SpecializeTreesToMissingGlobals(cx
, globalObj
, (TreeInfo
*)fragment
->root
->vmprivate
);
4692 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
4693 * should try to compile the outer tree again.
4696 AttemptCompilation(cx
, traceMonitor
, globalObj
, outer
, outerArgc
);
4698 debug_only_printf(LC_TMMinimal
,
4699 "Recording completed at %s:%u@%u via closeLoop (FragID=%06u)\n",
4700 cx
->fp
->script
->filename
,
4701 js_FramePCToLineNumber(cx
, cx
->fp
),
4702 FramePCOffset(cx
->fp
),
4703 fragment
->profFragID
);
4704 debug_only_print0(LC_TMMinimal
, "\n");
4707 /* recording complete. */
4708 return ARECORD_STOP
;
4712 FullMapFromExit(TypeMap
& typeMap
, VMSideExit
* exit
)
4714 typeMap
.setLength(0);
4715 typeMap
.fromRaw(exit
->stackTypeMap(), exit
->numStackSlots
);
4716 typeMap
.fromRaw(exit
->globalTypeMap(), exit
->numGlobalSlots
);
4717 /* Include globals that were later specialized at the root of the tree. */
4718 if (exit
->numGlobalSlots
< exit
->root()->getTreeInfo()->nGlobalTypes()) {
4719 typeMap
.fromRaw(exit
->root()->getTreeInfo()->globalTypeMap() + exit
->numGlobalSlots
,
4720 exit
->root()->getTreeInfo()->nGlobalTypes() - exit
->numGlobalSlots
);
4724 static JS_REQUIRES_STACK TypeConsensus
4725 TypeMapLinkability(JSContext
* cx
, const TypeMap
& typeMap
, VMFragment
* peer
)
4727 const TypeMap
& peerMap
= peer
->getTreeInfo()->typeMap
;
4728 unsigned minSlots
= JS_MIN(typeMap
.length(), peerMap
.length());
4729 TypeConsensus consensus
= TypeConsensus_Okay
;
4730 for (unsigned i
= 0; i
< minSlots
; i
++) {
4731 if (typeMap
[i
] == peerMap
[i
])
4733 if (typeMap
[i
] == TT_INT32
&& peerMap
[i
] == TT_DOUBLE
&&
4734 IsSlotUndemotable(cx
, peer
->getTreeInfo(), i
, peer
->ip
)) {
4735 consensus
= TypeConsensus_Undemotes
;
4737 return TypeConsensus_Bad
;
4743 static JS_REQUIRES_STACK
unsigned
4744 FindUndemotesInTypemaps(JSContext
* cx
, const TypeMap
& typeMap
, TreeInfo
* treeInfo
,
4745 Queue
<unsigned>& undemotes
)
4747 undemotes
.setLength(0);
4748 unsigned minSlots
= JS_MIN(typeMap
.length(), treeInfo
->typeMap
.length());
4749 for (unsigned i
= 0; i
< minSlots
; i
++) {
4750 if (typeMap
[i
] == TT_INT32
&& treeInfo
->typeMap
[i
] == TT_DOUBLE
) {
4752 } else if (typeMap
[i
] != treeInfo
->typeMap
[i
]) {
4756 for (unsigned i
= 0; i
< undemotes
.length(); i
++)
4757 MarkSlotUndemotable(cx
, treeInfo
, undemotes
[i
]);
4758 return undemotes
.length();
4761 JS_REQUIRES_STACK
void
4762 TraceRecorder::joinEdgesToEntry(VMFragment
* peer_root
)
4764 if (fragment
->root
!= fragment
)
4767 TypeMap
typeMap(NULL
);
4768 Queue
<unsigned> undemotes(NULL
);
4770 for (VMFragment
* peer
= peer_root
; peer
; peer
= (VMFragment
*)peer
->peer
) {
4771 TreeInfo
* ti
= peer
->getTreeInfo();
4774 UnstableExit
* uexit
= ti
->unstableExits
;
4775 while (uexit
!= NULL
) {
4776 /* :TODO: these exits go somewhere else. */
4777 if (uexit
->exit
->exitType
== RECURSIVE_UNLINKED_EXIT
) {
4778 uexit
= uexit
->next
;
4781 /* Build the full typemap for this unstable exit */
4782 FullMapFromExit(typeMap
, uexit
->exit
);
4783 /* Check its compatibility against this tree */
4784 TypeConsensus consensus
= TypeMapLinkability(cx
, typeMap
, (VMFragment
*)fragment
->root
);
4785 JS_ASSERT_IF(consensus
== TypeConsensus_Okay
, peer
!= fragment
);
4786 if (consensus
== TypeConsensus_Okay
) {
4787 debug_only_printf(LC_TMTracer
,
4788 "Joining type-stable trace to target exit %p->%p.\n",
4789 (void*)uexit
->fragment
, (void*)uexit
->exit
);
4790 /* It's okay! Link together and remove the unstable exit. */
4791 JoinPeers(traceMonitor
->assembler
, uexit
->exit
, (VMFragment
*)fragment
);
4792 uexit
= ti
->removeUnstableExit(uexit
->exit
);
4794 /* Check for int32->double slots that suggest trashing. */
4795 if (FindUndemotesInTypemaps(cx
, typeMap
, treeInfo
, undemotes
)) {
4796 JS_ASSERT(peer
== uexit
->fragment
->root
);
4797 if (fragment
== peer
)
4800 whichTreesToTrash
.addUnique(uexit
->fragment
->root
);
4803 uexit
= uexit
->next
;
4809 JS_REQUIRES_STACK AbortableRecordingStatus
4810 TraceRecorder::endLoop()
4812 return endLoop(snapshot(LOOP_EXIT
));
4815 /* Emit an always-exit guard and compile the tree (used for break statements. */
4816 JS_REQUIRES_STACK AbortableRecordingStatus
4817 TraceRecorder::endLoop(VMSideExit
* exit
)
4819 if (callDepth
!= 0) {
4820 debug_only_print0(LC_TMTracer
, "Blacklisted: stack depth mismatch, possible recursion.\n");
4821 Blacklist((jsbytecode
*) fragment
->root
->ip
);
4823 return ARECORD_STOP
;
4826 if (monitorReason
!= Monitor_Branch
)
4827 RETURN_STOP_A("control flow should have been recursive");
4830 lir
->insGuard(LIR_x
, NULL
, createGuardRecord(exit
));
4832 CHECK_STATUS_A(compile(traceMonitor
));
4834 debug_only_printf(LC_TMTreeVis
, "TREEVIS ENDLOOP EXIT=%p\n", (void*)exit
);
4836 VMFragment
* root
= (VMFragment
*)fragment
->root
;
4837 joinEdgesToEntry(getLoop(traceMonitor
,
4842 debug_only_stmt(DumpPeerStability(traceMonitor
, root
->ip
, root
->globalObj
,
4843 root
->globalShape
, root
->argc
);)
4846 * Note: this must always be done, in case we added new globals on trace
4847 * and haven't yet propagated those to linked and dependent trees.
4849 debug_only_print0(LC_TMTracer
,
4850 "updating specializations on dependent and linked trees\n");
4851 if (fragment
->root
->vmprivate
)
4852 SpecializeTreesToMissingGlobals(cx
, globalObj
, (TreeInfo
*)fragment
->root
->vmprivate
);
4855 * If this is a newly formed tree, and the outer tree has not been compiled
4856 * yet, we should try to compile the outer tree again.
4859 AttemptCompilation(cx
, traceMonitor
, globalObj
, outer
, outerArgc
);
4861 debug_only_printf(LC_TMMinimal
,
4862 "Recording completed at %s:%u@%u via endLoop (FragID=%06u)\n",
4863 cx
->fp
->script
->filename
,
4864 js_FramePCToLineNumber(cx
, cx
->fp
),
4865 FramePCOffset(cx
->fp
),
4866 fragment
->profFragID
);
4867 debug_only_print0(LC_TMTracer
, "\n");
4870 /* recording complete */
4871 return ARECORD_STOP
;
4874 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
4875 JS_REQUIRES_STACK
void
4876 TraceRecorder::prepareTreeCall(VMFragment
* inner
)
4878 TreeInfo
* ti
= (TreeInfo
*)inner
->vmprivate
;
4879 inner_sp_ins
= lirbuf
->sp
;
4880 VMSideExit
* exit
= snapshot(OOM_EXIT
);
4883 * The inner tree expects to be called from the current frame. If the outer
4884 * tree (this trace) is currently inside a function inlining code
4885 * (calldepth > 0), we have to advance the native stack pointer such that
4886 * we match what the inner trace expects to see. We move it back when we
4887 * come out of the inner tree call.
4889 if (callDepth
> 0) {
4891 * Calculate the amount we have to lift the native stack pointer by to
4892 * compensate for any outer frames that the inner tree doesn't expect
4893 * but the outer tree has.
4895 ptrdiff_t sp_adj
= nativeStackOffset(&cx
->fp
->argv
[-2]);
4897 /* Calculate the amount we have to lift the call stack by. */
4898 ptrdiff_t rp_adj
= callDepth
* sizeof(FrameInfo
*);
4901 * Guard that we have enough stack space for the tree we are trying to
4902 * call on top of the new value for sp.
4904 debug_only_printf(LC_TMTracer
,
4905 "sp_adj=%lld outer=%lld inner=%lld\n",
4906 (long long int)sp_adj
,
4907 (long long int)treeInfo
->nativeStackBase
,
4908 (long long int)ti
->nativeStackBase
);
4909 ptrdiff_t sp_offset
=
4910 - treeInfo
->nativeStackBase
/* rebase sp to beginning of outer tree's stack */
4911 + sp_adj
/* adjust for stack in outer frame inner tree can't see */
4912 + ti
->maxNativeStackSlots
* sizeof(double); /* plus the inner tree's stack */
4913 LIns
* sp_top
= lir
->ins2(LIR_piadd
, lirbuf
->sp
, INS_CONSTWORD(sp_offset
));
4914 guard(true, lir
->ins2(LIR_plt
, sp_top
, eos_ins
), exit
);
4916 /* Guard that we have enough call stack space. */
4917 ptrdiff_t rp_offset
= rp_adj
+ ti
->maxCallDepth
* sizeof(FrameInfo
*);
4918 LIns
* rp_top
= lir
->ins2(LIR_piadd
, lirbuf
->rp
, INS_CONSTWORD(rp_offset
));
4919 guard(true, lir
->ins2(LIR_plt
, rp_top
, eor_ins
), exit
);
4922 - treeInfo
->nativeStackBase
/* rebase sp to beginning of outer tree's stack */
4923 + sp_adj
/* adjust for stack in outer frame inner tree can't see */
4924 + ti
->nativeStackBase
; /* plus the inner tree's stack base */
4925 /* We have enough space, so adjust sp and rp to their new level. */
4926 lir
->insStorei(inner_sp_ins
= lir
->ins2(LIR_piadd
, lirbuf
->sp
, INS_CONSTWORD(sp_offset
)),
4927 lirbuf
->state
, offsetof(InterpState
, sp
));
4928 lir
->insStorei(lir
->ins2(LIR_piadd
, lirbuf
->rp
, INS_CONSTWORD(rp_adj
)),
4929 lirbuf
->state
, offsetof(InterpState
, rp
));
4933 * The inner tree will probably access stack slots. So tell nanojit not to
4934 * discard or defer stack writes before calling js_CallTree.
4936 * (The ExitType of this snapshot is nugatory. The exit can't be taken.)
4938 GuardRecord
* guardRec
= createGuardRecord(exit
);
4939 lir
->insGuard(LIR_xbarrier
, NULL
, guardRec
);
4943 BuildGlobalTypeMapFromInnerTree(Queue
<JSTraceType
>& typeMap
, VMSideExit
* inner
)
4946 unsigned initialSlots
= typeMap
.length();
4948 /* First, use the innermost exit's global typemap. */
4949 typeMap
.add(inner
->globalTypeMap(), inner
->numGlobalSlots
);
4951 /* Add missing global types from the innermost exit's tree. */
4952 TreeInfo
* innerTree
= inner
->root()->getTreeInfo();
4953 unsigned slots
= inner
->numGlobalSlots
;
4954 if (slots
< innerTree
->nGlobalTypes()) {
4955 typeMap
.add(innerTree
->globalTypeMap() + slots
, innerTree
->nGlobalTypes() - slots
);
4956 slots
= innerTree
->nGlobalTypes();
4958 JS_ASSERT(typeMap
.length() - initialSlots
== slots
);
4962 /* Record a call to an inner tree. */
4963 JS_REQUIRES_STACK
void
4964 TraceRecorder::emitTreeCall(VMFragment
* inner
, VMSideExit
* exit
)
4966 TreeInfo
* ti
= (TreeInfo
*)inner
->vmprivate
;
4968 /* Invoke the inner tree. */
4969 LIns
* args
[] = { INS_CONSTPTR(inner
), lirbuf
->state
}; /* reverse order */
4970 LIns
* ret
= lir
->insCall(&js_CallTree_ci
, args
);
4972 /* Read back all registers, in case the called tree changed any of them. */
4976 map
= exit
->globalTypeMap();
4977 for (i
= 0; i
< exit
->numGlobalSlots
; i
++)
4978 JS_ASSERT(map
[i
] != TT_JSVAL
);
4979 map
= exit
->stackTypeMap();
4980 for (i
= 0; i
< exit
->numStackSlots
; i
++)
4981 JS_ASSERT(map
[i
] != TT_JSVAL
);
4984 * Bug 502604 - It is illegal to extend from the outer typemap without
4985 * first extending from the inner. Make a new typemap here.
4987 TypeMap
fullMap(NULL
);
4988 fullMap
.add(exit
->stackTypeMap(), exit
->numStackSlots
);
4989 BuildGlobalTypeMapFromInnerTree(fullMap
, exit
);
4990 import(ti
, inner_sp_ins
, exit
->numStackSlots
, fullMap
.length() - exit
->numStackSlots
,
4991 exit
->calldepth
, fullMap
.data());
4993 /* Restore sp and rp to their original values (we still have them in a register). */
4994 if (callDepth
> 0) {
4995 lir
->insStorei(lirbuf
->sp
, lirbuf
->state
, offsetof(InterpState
, sp
));
4996 lir
->insStorei(lirbuf
->rp
, lirbuf
->state
, offsetof(InterpState
, rp
));
5000 * Guard that we come out of the inner tree along the same side exit we came out when
5001 * we called the inner tree at recording time.
5003 VMSideExit
* nested
= snapshot(NESTED_EXIT
);
5004 guard(true, lir
->ins2(LIR_peq
, ret
, INS_CONSTPTR(exit
)), nested
);
5005 debug_only_printf(LC_TMTreeVis
, "TREEVIS TREECALL INNER=%p EXIT=%p GUARD=%p\n", (void*)inner
,
5006 (void*)nested
, (void*)exit
);
5008 /* Register us as a dependent tree of the inner tree. */
5009 ((TreeInfo
*)inner
->vmprivate
)->dependentTrees
.addUnique(fragment
->root
);
5010 treeInfo
->linkedTrees
.addUnique(inner
);
5013 /* Add a if/if-else control-flow merge point to the list of known merge points. */
5014 JS_REQUIRES_STACK
void
5015 TraceRecorder::trackCfgMerges(jsbytecode
* pc
)
5017 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
5018 JS_ASSERT((*pc
== JSOP_IFEQ
) || (*pc
== JSOP_IFEQX
));
5019 jssrcnote
* sn
= js_GetSrcNote(cx
->fp
->script
, pc
);
5021 if (SN_TYPE(sn
) == SRC_IF
) {
5022 cfgMerges
.add((*pc
== JSOP_IFEQ
)
5023 ? pc
+ GET_JUMP_OFFSET(pc
)
5024 : pc
+ GET_JUMPX_OFFSET(pc
));
5025 } else if (SN_TYPE(sn
) == SRC_IF_ELSE
)
5026 cfgMerges
.add(pc
+ js_GetSrcNoteOffset(sn
, 0));
5031 * Invert the direction of the guard if this is a loop edge that is not
5032 * taken (thin loop).
5034 JS_REQUIRES_STACK
void
5035 TraceRecorder::emitIf(jsbytecode
* pc
, bool cond
, LIns
* x
)
5038 if (IsLoopEdge(pc
, (jsbytecode
*)fragment
->root
->ip
)) {
5039 exitType
= LOOP_EXIT
;
5042 * If we are about to walk out of the loop, generate code for the
5043 * inverse loop condition, pretending we recorded the case that stays
5046 if ((*pc
== JSOP_IFEQ
|| *pc
== JSOP_IFEQX
) == cond
) {
5047 JS_ASSERT(*pc
== JSOP_IFNE
|| *pc
== JSOP_IFNEX
|| *pc
== JSOP_IFEQ
|| *pc
== JSOP_IFEQX
);
5048 debug_only_print0(LC_TMTracer
,
5049 "Walking out of the loop, terminating it anyway.\n");
5054 * Conditional guards do not have to be emitted if the condition is
5055 * constant. We make a note whether the loop condition is true or false
5056 * here, so we later know whether to emit a loop edge or a loop end.
5059 loop
= (x
->imm32() == int32(cond
));
5063 exitType
= BRANCH_EXIT
;
5066 guard(cond
, x
, exitType
);
5069 /* Emit code for a fused IFEQ/IFNE. */
5070 JS_REQUIRES_STACK
void
5071 TraceRecorder::fuseIf(jsbytecode
* pc
, bool cond
, LIns
* x
)
5073 if (*pc
== JSOP_IFEQ
|| *pc
== JSOP_IFNE
) {
5074 emitIf(pc
, cond
, x
);
5075 if (*pc
== JSOP_IFEQ
)
5080 /* Check whether we have reached the end of the trace. */
5081 JS_REQUIRES_STACK AbortableRecordingStatus
5082 TraceRecorder::checkTraceEnd(jsbytecode
*pc
)
5084 if (IsLoopEdge(pc
, (jsbytecode
*)fragment
->root
->ip
)) {
5086 * If we compile a loop, the trace should have a zero stack balance at
5087 * the loop edge. Currently we are parked on a comparison op or
5088 * IFNE/IFEQ, so advance pc to the loop header and adjust the stack
5089 * pointer and pretend we have reached the loop header.
5092 JS_ASSERT(!cx
->fp
->imacpc
&& (pc
== cx
->fp
->regs
->pc
|| pc
== cx
->fp
->regs
->pc
+ 1));
5093 bool fused
= pc
!= cx
->fp
->regs
->pc
;
5094 JSFrameRegs orig
= *cx
->fp
->regs
;
5096 cx
->fp
->regs
->pc
= (jsbytecode
*)fragment
->root
->ip
;
5097 cx
->fp
->regs
->sp
-= fused
? 2 : 1;
5099 AbortableRecordingStatus ars
= closeLoop();
5101 *cx
->fp
->regs
= orig
;
5107 return ARECORD_CONTINUE
;
5111 TraceRecorder::hasMethod(JSObject
* obj
, jsid id
)
5118 int protoIndex
= obj
->lookupProperty(cx
, id
, &pobj
, &prop
);
5119 if (protoIndex
< 0 || !prop
)
5123 if (OBJ_IS_NATIVE(pobj
)) {
5124 JSScope
* scope
= OBJ_SCOPE(pobj
);
5125 JSScopeProperty
* sprop
= (JSScopeProperty
*) prop
;
5127 if (SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop
) &&
5128 SPROP_HAS_VALID_SLOT(sprop
, scope
)) {
5129 jsval v
= LOCKED_OBJ_GET_SLOT(pobj
, sprop
->slot
);
5130 if (VALUE_IS_FUNCTION(cx
, v
)) {
5132 if (!scope
->branded()) {
5133 scope
->brandingShapeChange(cx
, sprop
->slot
, v
);
5134 scope
->setBranded();
5140 pobj
->dropProperty(cx
, prop
);
5144 JS_REQUIRES_STACK
bool
5145 TraceRecorder::hasIteratorMethod(JSObject
* obj
)
5147 JS_ASSERT(cx
->fp
->regs
->sp
+ 2 <= cx
->fp
->slots
+ cx
->fp
->script
->nslots
);
5149 return hasMethod(obj
, ATOM_TO_JSID(cx
->runtime
->atomState
.iteratorAtom
));
5153 nanojit::StackFilter::getTops(LIns
* guard
, int& spTop
, int& rpTop
)
5155 VMSideExit
* e
= (VMSideExit
*)guard
->record()->exit
;
5160 #if defined NJ_VERBOSE
5162 nanojit::LirNameMap::formatGuard(LIns
*i
, char *out
)
5166 x
= (VMSideExit
*)i
->record()->exit
;
5168 "%s: %s %s -> pc=%p imacpc=%p sp%+ld rp%+ld (GuardID=%03d)",
5170 lirNames
[i
->opcode()],
5171 i
->oprnd1() ? formatRef(i
->oprnd1()) : "",
5174 (long int)x
->sp_adj
,
5175 (long int)x
->rp_adj
,
5176 i
->record()->profGuardID
);
5180 static JS_REQUIRES_STACK
bool
5181 DeleteRecorder(JSContext
* cx
)
5183 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5185 /* Aborting and completing a trace end up here. */
5186 delete tm
->recorder
;
5187 tm
->recorder
= NULL
;
5189 /* If we ran out of memory, flush the code cache. */
5190 if (tm
->dataAlloc
->outOfMemory() ||
5191 tm
->traceAlloc
->outOfMemory() ||
5192 js_OverfullJITCache(tm
)) {
5193 ResetJIT(cx
, FR_OOM
);
5200 /* Check whether the shape of the global object has changed. */
5201 static JS_REQUIRES_STACK
bool
5202 CheckGlobalObjectShape(JSContext
* cx
, JSTraceMonitor
* tm
, JSObject
* globalObj
,
5203 uint32
*shape
= NULL
, SlotList
** slots
= NULL
)
5205 if (tm
->needFlush
) {
5206 ResetJIT(cx
, FR_DEEP_BAIL
);
5210 if (STOBJ_NSLOTS(globalObj
) > MAX_GLOBAL_SLOTS
)
5213 uint32 globalShape
= OBJ_SHAPE(globalObj
);
5216 VMFragment
* root
= (VMFragment
*)tm
->recorder
->getFragment()->root
;
5217 TreeInfo
* ti
= tm
->recorder
->getTreeInfo();
5219 /* Check the global shape matches the recorder's treeinfo's shape. */
5220 if (globalObj
!= root
->globalObj
|| globalShape
!= root
->globalShape
) {
5221 AUDIT(globalShapeMismatchAtEntry
);
5222 debug_only_printf(LC_TMTracer
,
5223 "Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
5224 (void*)globalObj
, globalShape
, (void*)root
->globalObj
,
5226 Backoff(cx
, (jsbytecode
*) root
->ip
);
5227 ResetJIT(cx
, FR_GLOBAL_SHAPE_MISMATCH
);
5231 *shape
= globalShape
;
5233 *slots
= ti
->globalSlots
;
5237 /* No recorder, search for a tracked global-state (or allocate one). */
5238 for (size_t i
= 0; i
< MONITOR_N_GLOBAL_STATES
; ++i
) {
5239 GlobalState
&state
= tm
->globalStates
[i
];
5241 if (state
.globalShape
== uint32(-1)) {
5242 state
.globalObj
= globalObj
;
5243 state
.globalShape
= globalShape
;
5244 JS_ASSERT(state
.globalSlots
);
5245 JS_ASSERT(state
.globalSlots
->length() == 0);
5248 if (state
.globalObj
== globalObj
&& state
.globalShape
== globalShape
) {
5250 *shape
= globalShape
;
5252 *slots
= state
.globalSlots
;
5257 /* No currently-tracked-global found and no room to allocate, abort. */
5258 AUDIT(globalShapeMismatchAtEntry
);
5259 debug_only_printf(LC_TMTracer
,
5260 "No global slotlist for global shape %u, flushing cache.\n",
5262 ResetJIT(cx
, FR_GLOBALS_FULL
);
5266 static JS_REQUIRES_STACK
bool
5267 StartRecorder(JSContext
* cx
, VMSideExit
* anchor
, Fragment
* f
, TreeInfo
* ti
,
5268 unsigned stackSlots
, unsigned ngslots
, JSTraceType
* typeMap
,
5269 VMSideExit
* expectedInnerExit
, jsbytecode
* outer
, uint32 outerArgc
,
5270 MonitorReason reason
)
5272 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5273 if (JS_TRACE_MONITOR(cx
).needFlush
) {
5274 ResetJIT(cx
, FR_DEEP_BAIL
);
5278 JS_ASSERT(f
->root
!= f
|| !cx
->fp
->imacpc
);
5280 /* Start recording if no exception during construction. */
5281 tm
->recorder
= new TraceRecorder(cx
, anchor
, f
, ti
,
5282 stackSlots
, ngslots
, typeMap
,
5283 expectedInnerExit
, outer
, outerArgc
, reason
);
5286 js_AbortRecording(cx
, "setting up recorder failed");
5290 /* Clear any leftover error state. */
5291 Assembler
*assm
= JS_TRACE_MONITOR(cx
).assembler
;
5292 assm
->setError(None
);
5297 TrashTree(JSContext
* cx
, Fragment
* f
)
5299 JS_ASSERT((!f
->code()) == (!f
->vmprivate
));
5300 JS_ASSERT(f
== f
->root
);
5301 debug_only_printf(LC_TMTreeVis
, "TREEVIS TRASH FRAG=%p\n", (void*)f
);
5305 AUDIT(treesTrashed
);
5306 debug_only_print0(LC_TMTracer
, "Trashing tree info.\n");
5307 TreeInfo
* ti
= (TreeInfo
*)f
->vmprivate
;
5308 f
->vmprivate
= NULL
;
5310 Fragment
** data
= ti
->dependentTrees
.data();
5311 unsigned length
= ti
->dependentTrees
.length();
5312 for (unsigned n
= 0; n
< length
; ++n
)
5313 TrashTree(cx
, data
[n
]);
5314 data
= ti
->linkedTrees
.data();
5315 length
= ti
->linkedTrees
.length();
5316 for (unsigned n
= 0; n
< length
; ++n
)
5317 TrashTree(cx
, data
[n
]);
5321 SynthesizeFrame(JSContext
* cx
, const FrameInfo
& fi
, JSObject
* callee
)
5323 VOUCH_DOES_NOT_REQUIRE_STACK();
5325 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, callee
);
5326 JS_ASSERT(FUN_INTERPRETED(fun
));
5328 /* Assert that we have a correct sp distance from cx->fp->slots in fi. */
5329 JSStackFrame
* fp
= cx
->fp
;
5330 JS_ASSERT_IF(!fi
.imacpc
,
5331 js_ReconstructStackDepth(cx
, fp
->script
, fi
.pc
) ==
5332 uintN(fi
.spdist
- fp
->script
->nfixed
));
5334 uintN nframeslots
= JS_HOWMANY(sizeof(JSInlineFrame
), sizeof(jsval
));
5335 JSScript
* script
= fun
->u
.i
.script
;
5336 size_t nbytes
= (nframeslots
+ script
->nslots
) * sizeof(jsval
);
5338 /* Code duplicated from inline_call: case in js_Interpret (FIXME). */
5339 JSArena
* a
= cx
->stackPool
.current
;
5340 void* newmark
= (void*) a
->avail
;
5341 uintN argc
= fi
.get_argc();
5342 jsval
* vp
= fp
->slots
+ fi
.spdist
- (2 + argc
);
5346 if (fun
->nargs
> argc
) {
5347 const JSFrameRegs
& regs
= *fp
->regs
;
5349 newsp
= vp
+ 2 + fun
->nargs
;
5350 JS_ASSERT(newsp
> regs
.sp
);
5351 if ((jsuword
) newsp
<= a
->limit
) {
5352 if ((jsuword
) newsp
> a
->avail
)
5353 a
->avail
= (jsuword
) newsp
;
5354 jsval
* argsp
= newsp
;
5356 *--argsp
= JSVAL_VOID
;
5357 } while (argsp
!= regs
.sp
);
5360 missing
= fun
->nargs
- argc
;
5361 nbytes
+= (2 + fun
->nargs
) * sizeof(jsval
);
5365 /* Allocate the inline frame with its vars and operands. */
5366 if (a
->avail
+ nbytes
<= a
->limit
) {
5367 newsp
= (jsval
*) a
->avail
;
5369 JS_ASSERT(missing
== 0);
5372 * This allocation is infallible: ExecuteTree reserved enough stack.
5373 * (But see bug 491023.)
5375 JS_ARENA_ALLOCATE_CAST(newsp
, jsval
*, &cx
->stackPool
, nbytes
);
5379 * Move args if the missing ones overflow arena a, then push
5380 * undefined for the missing args.
5383 memcpy(newsp
, vp
, (2 + argc
) * sizeof(jsval
));
5385 newsp
= vp
+ 2 + argc
;
5387 *newsp
++ = JSVAL_VOID
;
5388 } while (--missing
!= 0);
5392 /* Claim space for the stack frame and initialize it. */
5393 JSInlineFrame
* newifp
= (JSInlineFrame
*) newsp
;
5394 newsp
+= nframeslots
;
5396 newifp
->frame
.callobj
= NULL
;
5397 newifp
->frame
.argsobj
= NULL
;
5398 newifp
->frame
.varobj
= NULL
;
5399 newifp
->frame
.script
= script
;
5400 newifp
->frame
.fun
= fun
;
5402 bool constructing
= fi
.is_constructing();
5403 newifp
->frame
.argc
= argc
;
5404 newifp
->callerRegs
.pc
= fi
.pc
;
5405 newifp
->callerRegs
.sp
= fp
->slots
+ fi
.spdist
;
5406 fp
->imacpc
= fi
.imacpc
;
5409 if (fi
.block
!= fp
->blockChain
) {
5410 for (JSObject
* obj
= fi
.block
; obj
!= fp
->blockChain
; obj
= STOBJ_GET_PARENT(obj
))
5414 fp
->blockChain
= fi
.block
;
5416 newifp
->frame
.argv
= newifp
->callerRegs
.sp
- argc
;
5417 JS_ASSERT(newifp
->frame
.argv
);
5419 // Initialize argv[-1] to a known-bogus value so we'll catch it if
5420 // someone forgets to initialize it later.
5421 newifp
->frame
.argv
[-1] = JSVAL_HOLE
;
5423 JS_ASSERT(newifp
->frame
.argv
>= StackBase(fp
) + 2);
5425 newifp
->frame
.rval
= JSVAL_VOID
;
5426 newifp
->frame
.down
= fp
;
5427 newifp
->frame
.annotation
= NULL
;
5428 newifp
->frame
.scopeChain
= NULL
; // will be updated in FlushNativeStackFrame
5429 newifp
->frame
.flags
= constructing
? JSFRAME_CONSTRUCTING
: 0;
5430 newifp
->frame
.dormantNext
= NULL
;
5431 newifp
->frame
.blockChain
= NULL
;
5432 newifp
->mark
= newmark
;
5433 newifp
->frame
.thisv
= JSVAL_NULL
; // will be updated in FlushNativeStackFrame
5435 newifp
->frame
.regs
= fp
->regs
;
5436 newifp
->frame
.regs
->pc
= script
->code
;
5437 newifp
->frame
.regs
->sp
= newsp
+ script
->nfixed
;
5438 newifp
->frame
.imacpc
= NULL
;
5439 newifp
->frame
.slots
= newsp
;
5440 if (script
->staticLevel
< JS_DISPLAY_SIZE
) {
5441 JSStackFrame
**disp
= &cx
->display
[script
->staticLevel
];
5442 newifp
->frame
.displaySave
= *disp
;
5443 *disp
= &newifp
->frame
;
5447 * Note that fp->script is still the caller's script; set the callee
5448 * inline frame's idea of caller version from its version.
5450 newifp
->callerVersion
= (JSVersion
) fp
->script
->version
;
5452 // After this paragraph, fp and cx->fp point to the newly synthesized frame.
5453 fp
->regs
= &newifp
->callerRegs
;
5454 fp
= cx
->fp
= &newifp
->frame
;
5457 * If there's a call hook, invoke it to compute the hookData used by
5458 * debuggers that cooperate with the interpreter.
5460 JSInterpreterHook hook
= cx
->debugHooks
->callHook
;
5462 newifp
->hookData
= hook(cx
, fp
, JS_TRUE
, 0, cx
->debugHooks
->callHookData
);
5464 newifp
->hookData
= NULL
;
5468 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
5470 * FIXME - We must count stack slots from caller's operand stack up to (but
5471 * not including) callee's, including missing arguments. Could we shift
5472 * everything down to the caller's fp->slots (where vars start) and avoid
5473 * some of the complexity?
5475 return (fi
.spdist
- fp
->down
->script
->nfixed
) +
5476 ((fun
->nargs
> fp
->argc
) ? fun
->nargs
- fp
->argc
: 0) +
5477 script
->nfixed
+ 1/*argsobj*/;
5481 SynthesizeSlowNativeFrame(InterpState
& state
, JSContext
*cx
, VMSideExit
*exit
)
5483 VOUCH_DOES_NOT_REQUIRE_STACK();
5488 /* This allocation is infallible: ExecuteTree reserved enough stack. */
5489 mark
= JS_ARENA_MARK(&cx
->stackPool
);
5490 JS_ARENA_ALLOCATE_CAST(ifp
, JSInlineFrame
*, &cx
->stackPool
, sizeof(JSInlineFrame
));
5493 JSStackFrame
*fp
= &ifp
->frame
;
5499 fp
->varobj
= cx
->fp
->varobj
;
5501 fp
->thisv
= state
.nativeVp
[1];
5502 fp
->argc
= state
.nativeVpLen
- 2;
5503 fp
->argv
= state
.nativeVp
+ 2;
5504 fp
->fun
= GET_FUNCTION_PRIVATE(cx
, JSVAL_TO_OBJECT(fp
->argv
[-2]));
5505 fp
->rval
= JSVAL_VOID
;
5507 fp
->annotation
= NULL
;
5508 JS_ASSERT(cx
->fp
->scopeChain
);
5509 fp
->scopeChain
= cx
->fp
->scopeChain
;
5510 fp
->blockChain
= NULL
;
5511 fp
->flags
= exit
->constructing() ? JSFRAME_CONSTRUCTING
: 0;
5512 fp
->dormantNext
= NULL
;
5513 fp
->displaySave
= NULL
;
5519 static JS_REQUIRES_STACK
bool
5520 RecordTree(JSContext
* cx
, JSTraceMonitor
* tm
, VMFragment
* f
, jsbytecode
* outer
,
5521 uint32 outerArgc
, JSObject
* globalObj
, uint32 globalShape
,
5522 SlotList
* globalSlots
, uint32 argc
, MonitorReason reason
)
5524 JS_ASSERT(f
->root
== f
);
5526 /* Make sure the global type map didn't change on us. */
5527 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
)) {
5528 Backoff(cx
, (jsbytecode
*) f
->root
->ip
);
5532 AUDIT(recorderStarted
);
5534 /* Try to find an unused peer fragment, or allocate a new one. */
5535 while (f
->code() && f
->peer
)
5538 f
= getAnchor(&JS_TRACE_MONITOR(cx
), f
->root
->ip
, globalObj
, globalShape
, argc
);
5541 ResetJIT(cx
, FR_OOM
);
5546 f
->lirbuf
= tm
->lirbuf
;
5548 if (tm
->dataAlloc
->outOfMemory() ||
5549 tm
->traceAlloc
->outOfMemory() ||
5550 js_OverfullJITCache(tm
)) {
5551 Backoff(cx
, (jsbytecode
*) f
->root
->ip
);
5552 ResetJIT(cx
, FR_OOM
);
5553 debug_only_print0(LC_TMTracer
,
5554 "Out of memory recording new tree, flushing cache.\n");
5558 JS_ASSERT(!f
->code() && !f
->vmprivate
);
5560 /* Set up the VM-private treeInfo structure for this fragment. */
5561 TreeInfo
* ti
= new (*tm
->traceAlloc
) TreeInfo(tm
->dataAlloc
, f
, globalSlots
);
5563 /* Capture the coerced type of each active slot in the type map. */
5564 ti
->typeMap
.captureTypes(cx
, globalObj
, *globalSlots
, 0 /* callDepth */);
5565 ti
->nStackTypes
= ti
->typeMap
.length() - globalSlots
->length();
5568 AssertTreeIsUnique(tm
, (VMFragment
*)f
, ti
);
5569 ti
->treeFileName
= cx
->fp
->script
->filename
;
5570 ti
->treeLineNumber
= js_FramePCToLineNumber(cx
, cx
->fp
);
5571 ti
->treePCOffset
= FramePCOffset(cx
->fp
);
5574 debug_only_printf(LC_TMTreeVis
, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d",
5575 (void*)f
, f
->ip
, ti
->treeFileName
, ti
->treeLineNumber
,
5576 FramePCOffset(cx
->fp
));
5577 debug_only_print0(LC_TMTreeVis
, " STACK=\"");
5578 for (unsigned i
= 0; i
< ti
->nStackTypes
; i
++)
5579 debug_only_printf(LC_TMTreeVis
, "%c", typeChar
[ti
->typeMap
[i
]]);
5580 debug_only_print0(LC_TMTreeVis
, "\" GLOBALS=\"");
5581 for (unsigned i
= 0; i
< ti
->nGlobalTypes(); i
++)
5582 debug_only_printf(LC_TMTreeVis
, "%c", typeChar
[ti
->typeMap
[ti
->nStackTypes
+ i
]]);
5583 debug_only_print0(LC_TMTreeVis
, "\"\n");
5586 /* Determine the native frame layout at the entry point. */
5587 unsigned entryNativeStackSlots
= ti
->nStackTypes
;
5588 JS_ASSERT(entryNativeStackSlots
== NativeStackSlots(cx
, 0 /* callDepth */));
5589 ti
->nativeStackBase
= (entryNativeStackSlots
-
5590 (cx
->fp
->regs
->sp
- StackBase(cx
->fp
))) * sizeof(double);
5591 ti
->maxNativeStackSlots
= entryNativeStackSlots
;
5592 ti
->maxCallDepth
= 0;
5593 ti
->script
= cx
->fp
->script
;
5595 /* Recording primary trace. */
5596 return StartRecorder(cx
, NULL
, f
, ti
,
5598 ti
->globalSlots
->length(),
5599 ti
->typeMap
.data(), NULL
, outer
, outerArgc
, reason
);
5602 static JS_REQUIRES_STACK TypeConsensus
5603 FindLoopEdgeTarget(JSContext
* cx
, VMSideExit
* exit
, VMFragment
** peerp
)
5605 VMFragment
* from
= exit
->root();
5606 TreeInfo
* from_ti
= from
->getTreeInfo();
5608 JS_ASSERT(from
->code());
5610 TypeMap
typeMap(NULL
);
5611 FullMapFromExit(typeMap
, exit
);
5612 JS_ASSERT(typeMap
.length() - exit
->numStackSlots
== from_ti
->nGlobalTypes());
5614 /* Mark all double slots as undemotable */
5615 uint16
* gslots
= from_ti
->globalSlots
->data();
5616 for (unsigned i
= 0; i
< typeMap
.length(); i
++) {
5617 if (typeMap
[i
] == TT_DOUBLE
) {
5618 if (exit
->exitType
== RECURSIVE_UNLINKED_EXIT
) {
5619 if (i
< exit
->numStackSlots
)
5620 oracle
.markStackSlotUndemotable(cx
, i
, exit
->recursive_pc
);
5622 oracle
.markGlobalSlotUndemotable(cx
, gslots
[i
- exit
->numStackSlots
]);
5624 if (i
< from_ti
->nStackTypes
)
5625 oracle
.markStackSlotUndemotable(cx
, i
, from
->ip
);
5626 else if (i
>= exit
->numStackSlots
)
5627 oracle
.markGlobalSlotUndemotable(cx
, gslots
[i
- exit
->numStackSlots
]);
5631 JS_ASSERT(exit
->exitType
== UNSTABLE_LOOP_EXIT
||
5632 (exit
->exitType
== RECURSIVE_UNLINKED_EXIT
&& exit
->recursive_pc
));
5634 VMFragment
* firstPeer
= NULL
;
5635 if (exit
->exitType
== UNSTABLE_LOOP_EXIT
|| exit
->recursive_pc
== from
->ip
) {
5636 firstPeer
= (VMFragment
*)from
->first
;
5638 firstPeer
= getLoop(&JS_TRACE_MONITOR(cx
), exit
->recursive_pc
, from
->globalObj
,
5639 from
->globalShape
, from
->argc
);
5642 for (VMFragment
* peer
= firstPeer
; peer
; peer
= peer
->peer
) {
5643 TreeInfo
* peer_ti
= peer
->getTreeInfo();
5646 JS_ASSERT(peer
->argc
== from
->argc
);
5647 JS_ASSERT(exit
->numStackSlots
== peer_ti
->nStackTypes
);
5648 TypeConsensus consensus
= TypeMapLinkability(cx
, typeMap
, peer
);
5649 if (consensus
== TypeConsensus_Okay
|| consensus
== TypeConsensus_Undemotes
) {
5655 return TypeConsensus_Bad
;
5659 TreeInfo::removeUnstableExit(VMSideExit
* exit
)
5661 /* Now erase this exit from the unstable exit list. */
5662 UnstableExit
** tail
= &this->unstableExits
;
5663 for (UnstableExit
* uexit
= this->unstableExits
; uexit
!= NULL
; uexit
= uexit
->next
) {
5664 if (uexit
->exit
== exit
) {
5665 *tail
= uexit
->next
;
5668 tail
= &uexit
->next
;
5670 JS_NOT_REACHED("exit not in unstable exit list");
5674 static JS_REQUIRES_STACK
bool
5675 AttemptToStabilizeTree(JSContext
* cx
, JSObject
* globalObj
, VMSideExit
* exit
, jsbytecode
* outer
,
5678 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5679 if (tm
->needFlush
) {
5680 ResetJIT(cx
, FR_DEEP_BAIL
);
5684 VMFragment
* from
= exit
->root();
5685 TreeInfo
* from_ti
= from
->getTreeInfo();
5687 VMFragment
* peer
= NULL
;
5688 TypeConsensus consensus
= FindLoopEdgeTarget(cx
, exit
, &peer
);
5689 if (consensus
== TypeConsensus_Okay
) {
5690 TreeInfo
* peer_ti
= peer
->getTreeInfo();
5691 JS_ASSERT(from_ti
->globalSlots
== peer_ti
->globalSlots
);
5692 JS_ASSERT_IF(exit
->exitType
== UNSTABLE_LOOP_EXIT
,
5693 from_ti
->nStackTypes
== peer_ti
->nStackTypes
);
5694 JS_ASSERT(exit
->numStackSlots
== peer_ti
->nStackTypes
);
5695 /* Patch this exit to its peer */
5696 JoinPeers(tm
->assembler
, exit
, peer
);
5698 * Update peer global types. The |from| fragment should already be updated because it on
5699 * the execution path, and somehow connected to the entry trace.
5701 if (peer_ti
->nGlobalTypes() < peer_ti
->globalSlots
->length())
5702 SpecializeTreesToMissingGlobals(cx
, globalObj
, peer_ti
);
5703 JS_ASSERT(from_ti
->nGlobalTypes() == from_ti
->globalSlots
->length());
5704 /* This exit is no longer unstable, so remove it. */
5705 if (exit
->exitType
== UNSTABLE_LOOP_EXIT
)
5706 from_ti
->removeUnstableExit(exit
);
5707 debug_only_stmt(DumpPeerStability(tm
, peer
->ip
, from
->globalObj
, from
->globalShape
, from
->argc
);)
5709 } else if (consensus
== TypeConsensus_Undemotes
) {
5710 /* The original tree is unconnectable, so trash it. */
5711 TrashTree(cx
, peer
);
5715 /* Don't bother recording if the exit doesn't expect this PC */
5716 if (exit
->exitType
== RECURSIVE_UNLINKED_EXIT
) {
5717 if (++exit
->hitcount
>= MAX_RECURSIVE_UNLINK_HITS
) {
5718 Blacklist((jsbytecode
*)from
->ip
);
5719 TrashTree(cx
, from
);
5722 if (exit
->recursive_pc
!= cx
->fp
->regs
->pc
)
5724 from
= getLoop(tm
, exit
->recursive_pc
, from
->globalObj
, from
->globalShape
, cx
->fp
->argc
);
5725 /* use stale TI for RecordTree - since from might not have one anymore. */
5728 JS_ASSERT(from
== from
->root
);
5730 /* If this tree has been blacklisted, don't try to record a new one. */
5731 if (*(jsbytecode
*)from
->ip
== JSOP_NOP
)
5734 return RecordTree(cx
, tm
, from
->first
, outer
, outerArgc
, from
->globalObj
,
5735 from
->globalShape
, from_ti
->globalSlots
, cx
->fp
->argc
,
5739 static JS_REQUIRES_STACK
bool
5740 AttemptToExtendTree(JSContext
* cx
, VMSideExit
* anchor
, VMSideExit
* exitedFrom
, jsbytecode
* outer
5742 , TraceVisStateObj
* tvso
= NULL
5746 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5747 if (tm
->needFlush
) {
5748 ResetJIT(cx
, FR_DEEP_BAIL
);
5750 if (tvso
) tvso
->r
= R_FAIL_EXTEND_FLUSH
;
5755 Fragment
* f
= anchor
->root();
5756 JS_ASSERT(f
->vmprivate
);
5757 TreeInfo
* ti
= (TreeInfo
*)f
->vmprivate
;
5760 * Don't grow trees above a certain size to avoid code explosion due to
5763 if (ti
->branchCount
>= MAX_BRANCHES
) {
5765 if (tvso
) tvso
->r
= R_FAIL_EXTEND_MAX_BRANCHES
;
5771 if (!(c
= anchor
->target
)) {
5772 JSTraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
5773 Allocator
& alloc
= *tm
->dataAlloc
;
5775 uint32_t profFragID
= (js_LogController
.lcbits
& LC_FragProfile
)
5776 ? (++(tm
->lastFragID
)) : 0;
5778 c
= new (alloc
) Fragment(cx
->fp
->regs
->pc
verbose_only(, profFragID
));
5779 c
->root
= anchor
->from
->root
;
5780 debug_only_printf(LC_TMTreeVis
, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\""
5781 " LINE=%d ANCHOR=%p OFFS=%d\n",
5782 (void*)f
, (void*)c
, (void*)cx
->fp
->regs
->pc
, cx
->fp
->script
->filename
,
5783 js_FramePCToLineNumber(cx
, cx
->fp
), (void*)anchor
,
5784 FramePCOffset(cx
->fp
));
5787 verbose_only( tm
->branches
= new (alloc
) Seq
<Fragment
*>(c
, tm
->branches
); )
5791 * If we are recycling a fragment, it might have a different ip so reset it
5792 * here. This can happen when attaching a branch to a NESTED_EXIT, which
5793 * might extend along separate paths (i.e. after the loop edge, and after a
5794 * return statement).
5796 c
->ip
= cx
->fp
->regs
->pc
;
5798 debug_only_printf(LC_TMTracer
,
5799 "trying to attach another branch to the tree (hits = %d)\n", c
->hits());
5801 int32_t& hits
= c
->hits();
5802 if (outer
|| (hits
++ >= HOTEXIT
&& hits
<= HOTEXIT
+MAXEXIT
)) {
5803 /* start tracing secondary trace from this point */
5804 c
->lirbuf
= f
->lirbuf
;
5805 unsigned stackSlots
;
5807 JSTraceType
* typeMap
;
5808 TypeMap
fullMap(NULL
);
5809 if (exitedFrom
== NULL
) {
5811 * If we are coming straight from a simple side exit, just use that
5812 * exit's type map as starting point.
5814 ngslots
= anchor
->numGlobalSlots
;
5815 stackSlots
= anchor
->numStackSlots
;
5816 typeMap
= anchor
->fullTypeMap();
5819 * If we side-exited on a loop exit and continue on a nesting
5820 * guard, the nesting guard (anchor) has the type information for
5821 * everything below the current scope, and the actual guard we
5822 * exited from has the types for everything in the current scope
5823 * (and whatever it inlined). We have to merge those maps here.
5825 VMSideExit
* e1
= anchor
;
5826 VMSideExit
* e2
= exitedFrom
;
5827 fullMap
.add(e1
->stackTypeMap(), e1
->numStackSlotsBelowCurrentFrame
);
5828 fullMap
.add(e2
->stackTypeMap(), e2
->numStackSlots
);
5829 stackSlots
= fullMap
.length();
5830 ngslots
= BuildGlobalTypeMapFromInnerTree(fullMap
, e2
);
5831 JS_ASSERT(ngslots
>= e1
->numGlobalSlots
); // inner tree must have all globals
5832 JS_ASSERT(ngslots
== fullMap
.length() - stackSlots
);
5833 typeMap
= fullMap
.data();
5835 JS_ASSERT(ngslots
>= anchor
->numGlobalSlots
);
5836 bool rv
= StartRecorder(cx
, anchor
, c
, (TreeInfo
*)f
->vmprivate
, stackSlots
,
5837 ngslots
, typeMap
, exitedFrom
, outer
, cx
->fp
->argc
,
5841 tvso
->r
= R_FAIL_EXTEND_START
;
5843 if (anchor
->exitType
== RECURSIVE_SLURP_FAIL_EXIT
) {
5850 if (tvso
) tvso
->r
= R_FAIL_EXTEND_COLD
;
5855 static JS_REQUIRES_STACK VMSideExit
*
5856 ExecuteTree(JSContext
* cx
, Fragment
* f
, uintN
& inlineCallCount
,
5857 VMSideExit
** innermostNestedGuardp
);
5859 static JS_REQUIRES_STACK
bool
5860 RecordLoopEdge(JSContext
* cx
, TraceRecorder
* r
, uintN
& inlineCallCount
)
5862 #ifdef JS_THREADSAFE
5863 if (OBJ_SCOPE(JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
))->title
.ownercx
!= cx
) {
5864 js_AbortRecording(cx
, "Global object not owned by this context");
5865 return false; /* we stay away from shared global objects */
5869 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
5871 /* Process needFlush and deep abort requests. */
5872 if (tm
->needFlush
) {
5873 ResetJIT(cx
, FR_DEEP_BAIL
);
5877 JS_ASSERT(r
->getFragment() && !r
->getFragment()->lastIns
);
5878 VMFragment
* root
= (VMFragment
*)r
->getFragment()->root
;
5880 /* Does this branch go to an inner loop? */
5881 VMFragment
* first
= getLoop(&JS_TRACE_MONITOR(cx
), cx
->fp
->regs
->pc
,
5882 root
->globalObj
, root
->globalShape
, cx
->fp
->argc
);
5884 /* Not an inner loop we can call, abort trace. */
5885 AUDIT(returnToDifferentLoopHeader
);
5886 JS_ASSERT(!cx
->fp
->imacpc
);
5887 debug_only_printf(LC_TMTracer
,
5888 "loop edge to %lld, header %lld\n",
5889 (long long int)(cx
->fp
->regs
->pc
- cx
->fp
->script
->code
),
5890 (long long int)((jsbytecode
*)r
->getFragment()->root
->ip
- cx
->fp
->script
->code
));
5891 js_AbortRecording(cx
, "Loop edge does not return to header");
5895 /* Make sure inner tree call will not run into an out-of-memory condition. */
5896 if (tm
->reservedDoublePoolPtr
< (tm
->reservedDoublePool
+ MAX_NATIVE_STACK_SLOTS
) &&
5897 !ReplenishReservedPool(cx
, tm
)) {
5898 js_AbortRecording(cx
, "Couldn't call inner tree (out of memory)");
5903 * Make sure the shape of the global object still matches (this might flush
5906 JSObject
* globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
5907 uint32 globalShape
= -1;
5908 SlotList
* globalSlots
= NULL
;
5909 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
, &globalShape
, &globalSlots
))
5912 debug_only_printf(LC_TMTracer
,
5913 "Looking for type-compatible peer (%s:%d@%d)\n",
5914 cx
->fp
->script
->filename
,
5915 js_FramePCToLineNumber(cx
, cx
->fp
),
5916 FramePCOffset(cx
->fp
));
5918 // Find a matching inner tree. If none can be found, compile one.
5919 VMFragment
* f
= r
->findNestedCompatiblePeer(first
);
5920 if (!f
|| !f
->code()) {
5921 AUDIT(noCompatInnerTrees
);
5923 VMFragment
* outerFragment
= (VMFragment
*) tm
->recorder
->getFragment()->root
;
5924 jsbytecode
* outer
= (jsbytecode
*) outerFragment
->ip
;
5925 uint32 outerArgc
= outerFragment
->argc
;
5926 uint32 argc
= cx
->fp
->argc
;
5927 js_AbortRecording(cx
, "No compatible inner tree");
5929 // Find an empty fragment we can recycle, or allocate a new one.
5930 for (f
= first
; f
!= NULL
; f
= f
->peer
) {
5934 if (!f
|| f
->code()) {
5935 f
= getAnchor(tm
, cx
->fp
->regs
->pc
, globalObj
, globalShape
, argc
);
5937 ResetJIT(cx
, FR_OOM
);
5941 return RecordTree(cx
, tm
, f
, outer
, outerArgc
, globalObj
, globalShape
, globalSlots
, argc
,
5945 return r
->attemptTreeCall(f
, inlineCallCount
) == ARECORD_CONTINUE
;
5948 JS_REQUIRES_STACK AbortableRecordingStatus
5949 TraceRecorder::attemptTreeCall(VMFragment
* f
, uintN
& inlineCallCount
)
5952 * It is absolutely forbidden to have recursive loops tree call themselves
5953 * because it could accidentally pop frames owned by the parent call, and
5954 * there is no way to deal with this yet. We could have to set a "start of
5955 * poppable rp stack" variable, and if that unequals "real start of rp stack",
5956 * it would be illegal to pop frames.
5958 * In the interim, just do tree calls knowing that they won't go into
5959 * recursive trees that can pop parent frames.
5961 if (f
->getTreeInfo()->script
== cx
->fp
->script
) {
5962 if (f
->getTreeInfo()->recursion
>= Recursion_Unwinds
) {
5963 Blacklist(cx
->fp
->script
->code
);
5964 js_AbortRecording(cx
, "Inner tree is an unsupported type of recursion");
5965 return ARECORD_ABORTED
;
5967 f
->getTreeInfo()->recursion
= Recursion_Disallowed
;
5971 adjustCallerTypes(f
);
5975 unsigned oldInlineCallCount
= inlineCallCount
;
5978 VMSideExit
* innermostNestedGuard
= NULL
;
5979 VMSideExit
* lr
= ExecuteTree(cx
, f
, inlineCallCount
, &innermostNestedGuard
);
5981 /* ExecuteTree can reenter the interpreter and kill |this|. */
5982 if (!TRACE_RECORDER(cx
))
5983 return ARECORD_ABORTED
;
5986 js_AbortRecording(cx
, "Couldn't call inner tree");
5987 return ARECORD_ABORTED
;
5990 VMFragment
* outerFragment
= (VMFragment
*)fragment
->root
;
5991 jsbytecode
* outer
= (jsbytecode
*) outerFragment
->ip
;
5992 switch (lr
->exitType
) {
5993 case RECURSIVE_LOOP_EXIT
:
5995 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
5996 if (innermostNestedGuard
) {
5997 JSContext
* _cx
= cx
;
5998 js_AbortRecording(cx
, "Inner tree took different side exit, abort current "
5999 "recording and grow nesting tree");
6000 return AttemptToExtendTree(_cx
, innermostNestedGuard
, lr
, outer
) ?
6001 ARECORD_CONTINUE
: ARECORD_ABORTED
;
6004 JS_ASSERT(oldInlineCallCount
== inlineCallCount
);
6006 /* Emit a call to the inner tree and continue recording the outer tree trace. */
6007 emitTreeCall(f
, lr
);
6008 return ARECORD_CONTINUE
;
6010 case UNSTABLE_LOOP_EXIT
:
6012 /* Abort recording so the inner loop can become type stable. */
6013 JSContext
* _cx
= cx
;
6014 JSObject
* _globalObj
= globalObj
;
6015 js_AbortRecording(cx
, "Inner tree is trying to stabilize, abort outer recording");
6016 return AttemptToStabilizeTree(_cx
, _globalObj
, lr
, outer
, outerFragment
->argc
) ?
6017 ARECORD_CONTINUE
: ARECORD_ABORTED
;
6021 oracle
.markInstructionUndemotable(cx
->fp
->regs
->pc
);
6023 case RECURSIVE_SLURP_FAIL_EXIT
:
6024 case RECURSIVE_SLURP_MISMATCH_EXIT
:
6025 case RECURSIVE_MISMATCH_EXIT
:
6026 case RECURSIVE_EMPTY_RP_EXIT
:
6029 /* Abort recording the outer tree, extend the inner tree. */
6030 JSContext
* _cx
= cx
;
6031 js_AbortRecording(cx
, "Inner tree is trying to grow, abort outer recording");
6032 return AttemptToExtendTree(_cx
, lr
, NULL
, outer
) ? ARECORD_CONTINUE
: ARECORD_ABORTED
;
6036 debug_only_printf(LC_TMTracer
, "exit_type=%s\n", getExitName(lr
->exitType
));
6037 js_AbortRecording(cx
, "Inner tree not suitable for calling");
6038 return ARECORD_ABORTED
;
6043 IsEntryTypeCompatible(jsval
* vp
, JSTraceType
* m
)
6045 unsigned tag
= JSVAL_TAG(*vp
);
6047 debug_only_printf(LC_TMTracer
, "%c/%c ", tagChar
[tag
], typeChar
[*m
]);
6051 if (tag
== JSVAL_OBJECT
&& !JSVAL_IS_NULL(*vp
) &&
6052 !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp
))) {
6055 debug_only_printf(LC_TMTracer
, "object != tag%u ", tag
);
6059 if (JSVAL_IS_INT(*vp
))
6061 if (tag
== JSVAL_DOUBLE
&& JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(*vp
), i
))
6063 debug_only_printf(LC_TMTracer
, "int != tag%u(value=%lu) ", tag
, (unsigned long)*vp
);
6066 if (JSVAL_IS_INT(*vp
) || tag
== JSVAL_DOUBLE
)
6068 debug_only_printf(LC_TMTracer
, "double != tag%u ", tag
);
6071 JS_NOT_REACHED("shouldn't see jsval type in entry");
6074 if (tag
== JSVAL_STRING
)
6076 debug_only_printf(LC_TMTracer
, "string != tag%u ", tag
);
6079 if (JSVAL_IS_NULL(*vp
))
6081 debug_only_printf(LC_TMTracer
, "null != tag%u ", tag
);
6083 case TT_PSEUDOBOOLEAN
:
6084 if (tag
== JSVAL_SPECIAL
)
6086 debug_only_printf(LC_TMTracer
, "bool != tag%u ", tag
);
6089 JS_ASSERT(*m
== TT_FUNCTION
);
6090 if (tag
== JSVAL_OBJECT
&& !JSVAL_IS_NULL(*vp
) &&
6091 HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp
))) {
6094 debug_only_printf(LC_TMTracer
, "fun != tag%u ", tag
);
6099 class TypeCompatibilityVisitor
: public SlotVisitorBase
6101 TraceRecorder
&mRecorder
;
6103 JSTraceType
*mTypeMap
;
6104 unsigned mStackSlotNum
;
6107 TypeCompatibilityVisitor (TraceRecorder
&recorder
,
6108 JSTraceType
*typeMap
) :
6109 mRecorder(recorder
),
6116 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
6117 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
6118 debug_only_printf(LC_TMTracer
, "global%d=", n
);
6119 if (!IsEntryTypeCompatible(vp
, mTypeMap
)) {
6121 } else if (!isPromoteInt(mRecorder
.get(vp
)) && *mTypeMap
== TT_INT32
) {
6122 oracle
.markGlobalSlotUndemotable(mCx
, slot
);
6124 } else if (JSVAL_IS_INT(*vp
) && *mTypeMap
== TT_DOUBLE
) {
6125 oracle
.markGlobalSlotUndemotable(mCx
, slot
);
6130 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6131 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
6132 for (size_t i
= 0; i
< count
; ++i
) {
6133 debug_only_printf(LC_TMTracer
, "%s%u=", stackSlotKind(), unsigned(i
));
6134 if (!IsEntryTypeCompatible(vp
, mTypeMap
)) {
6136 } else if (!isPromoteInt(mRecorder
.get(vp
)) && *mTypeMap
== TT_INT32
) {
6137 oracle
.markStackSlotUndemotable(mCx
, mStackSlotNum
);
6139 } else if (JSVAL_IS_INT(*vp
) && *mTypeMap
== TT_DOUBLE
) {
6140 oracle
.markStackSlotUndemotable(mCx
, mStackSlotNum
);
6154 JS_REQUIRES_STACK VMFragment
*
6155 TraceRecorder::findNestedCompatiblePeer(VMFragment
* f
)
6159 tm
= &JS_TRACE_MONITOR(cx
);
6160 unsigned int ngslots
= treeInfo
->globalSlots
->length();
6163 for (; f
!= NULL
; f
= (VMFragment
*)f
->peer
) {
6167 ti
= (TreeInfo
*)f
->vmprivate
;
6169 debug_only_printf(LC_TMTracer
, "checking nested types %p: ", (void*)f
);
6171 if (ngslots
> ti
->nGlobalTypes())
6172 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
6175 * Determine whether the typemap of the inner tree matches the outer
6176 * tree's current state. If the inner tree expects an integer, but the
6177 * outer tree doesn't guarantee an integer for that slot, we mark the
6178 * slot undemotable and mismatch here. This will force a new tree to be
6179 * compiled that accepts a double for the slot. If the inner tree
6180 * expects a double, but the outer tree has an integer, we can proceed,
6181 * but we mark the location undemotable.
6183 TypeCompatibilityVisitor
visitor(*this, ti
->typeMap
.data());
6184 VisitSlots(visitor
, cx
, 0, *treeInfo
->globalSlots
);
6186 debug_only_printf(LC_TMTracer
, " %s\n", visitor
.isOk() ? "match" : "");
6194 class CheckEntryTypeVisitor
: public SlotVisitorBase
6197 JSTraceType
*mTypeMap
;
6199 CheckEntryTypeVisitor(JSTraceType
*typeMap
) :
6204 JS_ALWAYS_INLINE
void checkSlot(jsval
*vp
, char const *name
, int i
) {
6205 debug_only_printf(LC_TMTracer
, "%s%d=", name
, i
);
6206 JS_ASSERT(*(uint8_t*)mTypeMap
!= 0xCD);
6207 mOk
= IsEntryTypeCompatible(vp
, mTypeMap
++);
6210 JS_REQUIRES_STACK JS_ALWAYS_INLINE
void
6211 visitGlobalSlot(jsval
*vp
, unsigned n
, unsigned slot
) {
6213 checkSlot(vp
, "global", n
);
6216 JS_REQUIRES_STACK JS_ALWAYS_INLINE
bool
6217 visitStackSlots(jsval
*vp
, size_t count
, JSStackFrame
* fp
) {
6218 for (size_t i
= 0; i
< count
; ++i
) {
6221 checkSlot(vp
++, stackSlotKind(), i
);
6232 * Check if types are usable for trace execution.
6234 * @param cx Context.
6235 * @param ti Tree info of peer we're testing.
6236 * @return True if compatible (with or without demotions), false otherwise.
6238 static JS_REQUIRES_STACK
bool
6239 CheckEntryTypes(JSContext
* cx
, JSObject
* globalObj
, TreeInfo
* ti
)
6241 unsigned int ngslots
= ti
->globalSlots
->length();
6243 JS_ASSERT(ti
->nStackTypes
== NativeStackSlots(cx
, 0));
6245 if (ngslots
> ti
->nGlobalTypes())
6246 SpecializeTreesToMissingGlobals(cx
, globalObj
, ti
);
6248 JS_ASSERT(ti
->typeMap
.length() == NativeStackSlots(cx
, 0) + ngslots
);
6249 JS_ASSERT(ti
->typeMap
.length() == ti
->nStackTypes
+ ngslots
);
6250 JS_ASSERT(ti
->nGlobalTypes() == ngslots
);
6252 CheckEntryTypeVisitor
visitor(ti
->typeMap
.data());
6253 VisitSlots(visitor
, cx
, 0, *ti
->globalSlots
);
6255 debug_only_print0(LC_TMTracer
, "\n");
6256 return visitor
.isOk();
6260 * Find an acceptable entry tree given a PC.
6262 * @param cx Context.
6263 * @param globalObj Global object.
6264 * @param f First peer fragment.
6265 * @param nodemote If true, will try to find a peer that does not require demotion.
6266 * @out count Number of fragments consulted.
6268 static JS_REQUIRES_STACK VMFragment
*
6269 FindVMCompatiblePeer(JSContext
* cx
, JSObject
* globalObj
, VMFragment
* f
, uintN
& count
)
6272 for (; f
!= NULL
; f
= f
->peer
) {
6273 if (f
->vmprivate
== NULL
)
6275 debug_only_printf(LC_TMTracer
,
6276 "checking vm types %p (ip: %p): ", (void*)f
, f
->ip
);
6277 if (CheckEntryTypes(cx
, globalObj
, (TreeInfo
*)f
->vmprivate
))
6285 LeaveTree(InterpState
&, VMSideExit
* lr
);
6287 static JS_REQUIRES_STACK VMSideExit
*
6288 ExecuteTree(JSContext
* cx
, Fragment
* f
, uintN
& inlineCallCount
,
6289 VMSideExit
** innermostNestedGuardp
)
6292 TraceVisStateObj
tvso(cx
, S_EXECUTE
);
6295 JS_ASSERT(f
->root
== f
&& f
->code() && f
->vmprivate
);
6298 * The JIT records and expects to execute with two scope-chain
6299 * assumptions baked-in:
6301 * 1. That the bottom of the scope chain is global, in the sense of
6302 * JSCLASS_IS_GLOBAL.
6304 * 2. That the scope chain between fp and the global is free of
6305 * "unusual" native objects such as HTML forms or other funny
6308 * #2 is checked here while following the scope-chain links, via
6309 * js_IsCacheableNonGlobalScope, which consults a whitelist of known
6310 * class types; once a global is found, it's checked for #1. Failing
6311 * either check causes an early return from execution.
6314 JSObject
* child
= cx
->fp
->scopeChain
;
6315 while ((parent
= OBJ_GET_PARENT(cx
, child
)) != NULL
) {
6316 if (!js_IsCacheableNonGlobalScope(child
)) {
6317 debug_only_print0(LC_TMTracer
,"Blacklist: non-cacheable object on scope chain.\n");
6318 Blacklist((jsbytecode
*) f
->root
->ip
);
6323 JSObject
* globalObj
= child
;
6324 if (!(OBJ_GET_CLASS(cx
, globalObj
)->flags
& JSCLASS_IS_GLOBAL
)) {
6325 debug_only_print0(LC_TMTracer
, "Blacklist: non-global at root of scope chain.\n");
6326 Blacklist((jsbytecode
*) f
->root
->ip
);
6330 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
6331 TreeInfo
* ti
= (TreeInfo
*)f
->vmprivate
;
6332 unsigned ngslots
= ti
->globalSlots
->length();
6333 uint16
* gslots
= ti
->globalSlots
->data();
6334 unsigned globalFrameSize
= STOBJ_NSLOTS(globalObj
);
6336 /* Make sure the global object is sane. */
6337 JS_ASSERT_IF(ngslots
!= 0,
6338 OBJ_SHAPE(JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
)) ==
6339 ((VMFragment
*)f
)->globalShape
);
6341 /* Make sure our caller replenished the double pool. */
6342 JS_ASSERT(tm
->reservedDoublePoolPtr
>= tm
->reservedDoublePool
+ MAX_NATIVE_STACK_SLOTS
);
6344 /* Reserve objects and stack space now, to make leaving the tree infallible. */
6345 if (!js_ReserveObjects(cx
, MAX_CALL_STACK_ENTRIES
))
6348 /* Set up the interpreter state block, which is followed by the native global frame. */
6349 InterpState
* state
= (InterpState
*)alloca(sizeof(InterpState
) + (globalFrameSize
+1)*sizeof(double));
6351 state
->inlineCallCountp
= &inlineCallCount
;
6352 state
->innermostNestedGuardp
= innermostNestedGuardp
;
6353 state
->outermostTree
= ti
;
6354 state
->lastTreeExitGuard
= NULL
;
6355 state
->lastTreeCallGuard
= NULL
;
6356 state
->rpAtLastTreeCall
= NULL
;
6357 state
->nativeVp
= NULL
;
6358 state
->builtinStatus
= 0;
6360 /* Set up the native global frame. */
6361 double* global
= (double*)(state
+1);
6363 /* Set up the native stack frame. */
6364 double stack_buffer
[MAX_NATIVE_STACK_SLOTS
];
6365 state
->stackBase
= stack_buffer
;
6366 state
->sp
= stack_buffer
+ (ti
->nativeStackBase
/sizeof(double));
6367 state
->eos
= stack_buffer
+ MAX_NATIVE_STACK_SLOTS
;
6369 /* Set up the native call stack frame. */
6370 FrameInfo
* callstack_buffer
[MAX_CALL_STACK_ENTRIES
];
6371 state
->callstackBase
= callstack_buffer
;
6372 state
->rp
= callstack_buffer
;
6373 state
->eor
= callstack_buffer
+ MAX_CALL_STACK_ENTRIES
;
6374 state
->sor
= state
->rp
;
6377 state
->stackMark
= JS_ARENA_MARK(&cx
->stackPool
);
6378 JS_ARENA_ALLOCATE(reserve
, &cx
->stackPool
, MAX_INTERP_STACK_BYTES
);
6383 memset(stack_buffer
, 0xCD, sizeof(stack_buffer
));
6384 memset(global
, 0xCD, (globalFrameSize
+1)*sizeof(double));
6385 JS_ASSERT(globalFrameSize
<= MAX_GLOBAL_SLOTS
);
6388 debug_only_stmt(*(uint64
*)&global
[globalFrameSize
] = 0xdeadbeefdeadbeefLL
;)
6389 debug_only_printf(LC_TMTracer
,
6390 "entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
6391 cx
->fp
->script
->filename
,
6392 js_FramePCToLineNumber(cx
, cx
->fp
),
6393 FramePCOffset(cx
->fp
),
6394 ti
->maxNativeStackSlots
,
6397 JS_ASSERT(ti
->nGlobalTypes() == ngslots
);
6398 BuildNativeFrame(cx
, globalObj
, 0 /* callDepth */, ngslots
, gslots
,
6399 ti
->typeMap
.data(), global
, stack_buffer
);
6401 union { NIns
*code
; GuardRecord
* (FASTCALL
*func
)(InterpState
*, Fragment
*); } u
;
6404 #ifdef EXECUTE_TREE_TIMER
6405 state
->startTime
= rdtsc();
6408 JS_ASSERT(!tm
->tracecx
);
6410 state
->prev
= cx
->interpState
;
6411 cx
->interpState
= state
;
6413 debug_only_stmt(fflush(NULL
));
6416 // Note that the block scoping is crucial here for TraceVis; the
6417 // TraceVisStateObj constructors and destructors must run at the right times.
6420 TraceVisStateObj
tvso_n(cx
, S_NATIVE
);
6422 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
6423 SIMULATE_FASTCALL(rec
, state
, NULL
, u
.func
);
6425 rec
= u
.func(state
, NULL
);
6429 JS_ASSERT(*(uint64
*)&global
[globalFrameSize
] == 0xdeadbeefdeadbeefLL
);
6430 JS_ASSERT(!state
->nativeVp
);
6432 VMSideExit
* lr
= (VMSideExit
*)rec
->exit
;
6434 AUDIT(traceTriggered
);
6436 cx
->interpState
= state
->prev
;
6438 JS_ASSERT(!cx
->bailExit
);
6439 JS_ASSERT(lr
->exitType
!= LOOP_EXIT
|| !lr
->calldepth
);
6441 LeaveTree(*state
, lr
);
6442 return state
->innermost
;
6445 static JS_FORCES_STACK
void
6446 LeaveTree(InterpState
& state
, VMSideExit
* lr
)
6448 VOUCH_DOES_NOT_REQUIRE_STACK();
6450 JSContext
* cx
= state
.cx
;
6451 FrameInfo
** callstack
= state
.callstackBase
;
6452 double* stack
= state
.stackBase
;
6455 * Except if we find that this is a nested bailout, the guard the call
6456 * returned is the one we have to use to adjust pc and sp.
6458 VMSideExit
* innermost
= lr
;
6461 * While executing a tree we do not update state.sp and state.rp even if
6462 * they grow. Instead, guards tell us by how much sp and rp should be
6463 * incremented in case of a side exit. When calling a nested tree, however,
6464 * we actively adjust sp and rp. If we have such frames from outer trees on
6465 * the stack, then rp will have been adjusted. Before we can process the
6466 * stack of the frames of the tree we directly exited from, we have to
6467 * first work our way through the outer frames and generate interpreter
6468 * frames for them. Once the call stack (rp) is empty, we can process the
6469 * final frames (which again are not directly visible and only the guard we
6470 * exited on will tells us about).
6472 FrameInfo
** rp
= (FrameInfo
**)state
.rp
;
6473 if (lr
->exitType
== NESTED_EXIT
) {
6474 VMSideExit
* nested
= state
.lastTreeCallGuard
;
6477 * If lastTreeCallGuard is not set in state, we only have a single
6478 * level of nesting in this exit, so lr itself is the innermost and
6479 * outermost nested guard, and hence we set nested to lr. The
6480 * calldepth of the innermost guard is not added to state.rp, so we
6481 * do it here manually. For a nesting depth greater than 1 the
6482 * CallTree builtin already added the innermost guard's calldepth
6483 * to state.rpAtLastTreeCall.
6486 rp
+= lr
->calldepth
;
6489 * During unwinding state.rp gets overwritten at every step and we
6490 * restore it here to its state at the innermost nested guard. The
6491 * builtin already added the calldepth of that innermost guard to
6494 rp
= (FrameInfo
**)state
.rpAtLastTreeCall
;
6496 innermost
= state
.lastTreeExitGuard
;
6497 if (state
.innermostNestedGuardp
)
6498 *state
.innermostNestedGuardp
= nested
;
6500 JS_ASSERT(nested
->exitType
== NESTED_EXIT
);
6501 JS_ASSERT(state
.lastTreeExitGuard
);
6502 JS_ASSERT(state
.lastTreeExitGuard
->exitType
!= NESTED_EXIT
);
6505 int32_t bs
= state
.builtinStatus
;
6506 bool bailed
= innermost
->exitType
== STATUS_EXIT
&& (bs
& JSBUILTIN_BAILED
);
6511 * A _FAIL native already called LeaveTree. We already reconstructed
6512 * the interpreter stack, in pre-call state, with pc pointing to the
6513 * CALL/APPLY op, for correctness. Then we continued in native code.
6515 * First, if we just returned from a slow native, pop its stack frame.
6517 if (!cx
->fp
->script
) {
6518 JSStackFrame
*fp
= cx
->fp
;
6519 JS_ASSERT(FUN_SLOW_NATIVE(fp
->fun
));
6520 JS_ASSERT(fp
->regs
== NULL
);
6521 JS_ASSERT(fp
->down
->regs
!= &((JSInlineFrame
*) fp
)->callerRegs
);
6523 JS_ARENA_RELEASE(&cx
->stackPool
, ((JSInlineFrame
*) fp
)->mark
);
6525 JS_ASSERT(cx
->fp
->script
);
6527 if (!(bs
& JSBUILTIN_ERROR
)) {
6529 * The builtin or native deep-bailed but finished successfully
6530 * (no exception or error).
6532 * After it returned, the JIT code stored the results of the
6533 * builtin or native at the top of the native stack and then
6534 * immediately flunked the guard on state->builtinStatus.
6536 * Now LeaveTree has been called again from the tail of
6537 * ExecuteTree. We are about to return to the interpreter. Adjust
6538 * the top stack frame to resume on the next op.
6540 JSFrameRegs
* regs
= cx
->fp
->regs
;
6541 JSOp op
= (JSOp
) *regs
->pc
;
6542 JS_ASSERT(op
== JSOP_CALL
|| op
== JSOP_APPLY
|| op
== JSOP_NEW
||
6543 op
== JSOP_GETPROP
|| op
== JSOP_GETTHISPROP
|| op
== JSOP_GETARGPROP
||
6544 op
== JSOP_GETLOCALPROP
|| op
== JSOP_LENGTH
||
6545 op
== JSOP_GETELEM
|| op
== JSOP_CALLELEM
||
6546 op
== JSOP_SETPROP
|| op
== JSOP_SETNAME
|| op
== JSOP_SETMETHOD
||
6547 op
== JSOP_SETELEM
|| op
== JSOP_INITELEM
||
6548 op
== JSOP_INSTANCEOF
);
6549 const JSCodeSpec
& cs
= js_CodeSpec
[op
];
6550 regs
->sp
-= (cs
.format
& JOF_INVOKE
) ? GET_ARGC(regs
->pc
) + 2 : cs
.nuses
;
6551 regs
->sp
+= cs
.ndefs
;
6552 regs
->pc
+= cs
.length
;
6554 * JSOP_SETELEM can be coalesced with a JSOP_POP in the interpeter.
6555 * Since this doesn't re-enter the recorder, the post-state snapshot
6556 * is invalid. Fix it up here.
6558 if (op
== JSOP_SETELEM
&& (JSOp
)*regs
->pc
== JSOP_POP
) {
6559 regs
->pc
+= JSOP_POP_LENGTH
;
6560 JS_ASSERT(js_CodeSpec
[JSOP_POP
].ndefs
== 0 && js_CodeSpec
[JSOP_POP
].nuses
== 1);
6563 JS_ASSERT_IF(!cx
->fp
->imacpc
,
6564 cx
->fp
->slots
+ cx
->fp
->script
->nfixed
+
6565 js_ReconstructStackDepth(cx
, cx
->fp
->script
, regs
->pc
) ==
6567 JS_ASSERT(regs
->pc
== innermost
->pc
);
6570 * If there's a tree call around the point that we deep exited at,
6571 * then state.sp and state.rp were restored to their original
6572 * values before the tree call and sp might be less than deepBailSp,
6573 * which we sampled when we were told to deep bail.
6575 JS_ASSERT(state
.deepBailSp
>= state
.stackBase
&& state
.sp
<= state
.deepBailSp
);
6578 * As explained above, the JIT code stored a result value or values
6579 * on the native stack. Transfer them to the interpreter stack now.
6580 * (Some opcodes, like JSOP_CALLELEM, produce two values, hence the
6583 JSTraceType
* typeMap
= innermost
->stackTypeMap();
6584 for (int i
= 1; i
<= cs
.ndefs
; i
++) {
6587 typeMap
[innermost
->numStackSlots
- i
],
6588 (jsdouble
*) state
.deepBailSp
6589 + innermost
->sp_adj
/ sizeof(jsdouble
) - i
);
6595 /* Save the innermost FrameInfo for guardUpRecursion */
6596 if (innermost
->exitType
== RECURSIVE_MISMATCH_EXIT
) {
6597 /* There should never be a static calldepth for a recursive mismatch. */
6598 JS_ASSERT(innermost
->calldepth
== 0);
6599 /* There must be at least one item on the rp stack. */
6600 JS_ASSERT(callstack
< rp
);
6601 /* :TODO: don't be all squirrelin' this in here */
6602 innermost
->recursive_down
= *(rp
- 1);
6605 /* Slurp failure should have no frames */
6606 JS_ASSERT_IF(innermost
->exitType
== RECURSIVE_SLURP_FAIL_EXIT
,
6607 innermost
->calldepth
== 0 && callstack
== rp
);
6609 JS_ARENA_RELEASE(&cx
->stackPool
, state
.stackMark
);
6610 while (callstack
< rp
) {
6611 FrameInfo
* fi
= *callstack
;
6612 /* Peek at the callee native slot in the not-yet-synthesized down frame. */
6613 JSObject
* callee
= *(JSObject
**)&stack
[fi
->callerHeight
];
6616 * Synthesize a stack frame and write out the values in it using the
6617 * type map pointer on the native call stack.
6619 SynthesizeFrame(cx
, *fi
, callee
);
6620 int slots
= FlushNativeStackFrame(cx
, 1 /* callDepth */, (*callstack
)->get_typemap(),
6623 JSStackFrame
* fp
= cx
->fp
;
6624 debug_only_printf(LC_TMTracer
,
6625 "synthesized deep frame for %s:%u@%u, slots=%d, fi=%p\n",
6626 fp
->script
->filename
,
6627 js_FramePCToLineNumber(cx
, fp
),
6633 * Keep track of the additional frames we put on the interpreter stack
6634 * and the native stack slots we consumed.
6636 ++*state
.inlineCallCountp
;
6642 * We already synthesized the frames around the innermost guard. Here we
6643 * just deal with additional frames inside the tree we are bailing out
6646 JS_ASSERT(rp
== callstack
);
6647 unsigned calldepth
= innermost
->calldepth
;
6648 unsigned calldepth_slots
= 0;
6649 unsigned calleeOffset
= 0;
6650 for (unsigned n
= 0; n
< calldepth
; ++n
) {
6651 /* Peek at the callee native slot in the not-yet-synthesized down frame. */
6652 calleeOffset
+= callstack
[n
]->callerHeight
;
6653 JSObject
* callee
= *(JSObject
**)&stack
[calleeOffset
];
6655 /* Reconstruct the frame. */
6656 calldepth_slots
+= SynthesizeFrame(cx
, *callstack
[n
], callee
);
6657 ++*state
.inlineCallCountp
;
6659 JSStackFrame
* fp
= cx
->fp
;
6660 debug_only_printf(LC_TMTracer
,
6661 "synthesized shallow frame for %s:%u@%u\n",
6662 fp
->script
->filename
, js_FramePCToLineNumber(cx
, fp
),
6668 * Adjust sp and pc relative to the tree we exited from (not the tree we
6669 * entered into). These are our final values for sp and pc since
6670 * SynthesizeFrame has already taken care of all frames in between. But
6671 * first we recover fp->blockChain, which comes from the side exit
6674 JSStackFrame
* fp
= cx
->fp
;
6676 fp
->blockChain
= innermost
->block
;
6679 * If we are not exiting from an inlined frame, the state->sp is spbase.
6680 * Otherwise spbase is whatever slots frames around us consume.
6682 fp
->regs
->pc
= innermost
->pc
;
6683 fp
->imacpc
= innermost
->imacpc
;
6684 fp
->regs
->sp
= StackBase(fp
) + (innermost
->sp_adj
/ sizeof(double)) - calldepth_slots
;
6685 JS_ASSERT_IF(!fp
->imacpc
,
6686 fp
->slots
+ fp
->script
->nfixed
+
6687 js_ReconstructStackDepth(cx
, fp
->script
, fp
->regs
->pc
) == fp
->regs
->sp
);
6689 #ifdef EXECUTE_TREE_TIMER
6690 uint64 cycles
= rdtsc() - state
.startTime
;
6691 #elif defined(JS_JIT_SPEW)
6695 debug_only_printf(LC_TMTracer
,
6696 "leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%s, sp=%lld, "
6697 "calldepth=%d, cycles=%llu\n",
6698 fp
->script
->filename
,
6699 js_FramePCToLineNumber(cx
, fp
),
6701 js_CodeName
[fp
->imacpc
? *fp
->imacpc
: *fp
->regs
->pc
],
6703 getExitName(lr
->exitType
),
6704 (long long int)(fp
->regs
->sp
- StackBase(fp
)),
6706 (unsigned long long int)cycles
);
6709 * If this trace is part of a tree, later branches might have added
6710 * additional globals for which we don't have any type information
6711 * available in the side exit. We merge in this information from the entry
6712 * type-map. See also the comment in the constructor of TraceRecorder
6713 * regarding why this is always safe to do.
6715 TreeInfo
* outermostTree
= state
.outermostTree
;
6716 uint16
* gslots
= outermostTree
->globalSlots
->data();
6717 unsigned ngslots
= outermostTree
->globalSlots
->length();
6718 JS_ASSERT(ngslots
== outermostTree
->nGlobalTypes());
6719 JSTraceType
* globalTypeMap
;
6721 /* Are there enough globals? */
6722 Queue
<JSTraceType
> typeMap(0);
6723 if (innermost
->numGlobalSlots
== ngslots
) {
6724 /* Yes. This is the ideal fast path. */
6725 globalTypeMap
= innermost
->globalTypeMap();
6728 * No. Merge the typemap of the innermost entry and exit together. This
6729 * should always work because it is invalid for nested trees or linked
6730 * trees to have incompatible types. Thus, whenever a new global type
6731 * is lazily added into a tree, all dependent and linked trees are
6732 * immediately specialized (see bug 476653).
6734 JS_ASSERT(innermost
->root()->getTreeInfo()->nGlobalTypes() == ngslots
);
6735 JS_ASSERT(innermost
->root()->getTreeInfo()->nGlobalTypes() > innermost
->numGlobalSlots
);
6736 typeMap
.ensure(ngslots
);
6738 unsigned check_ngslots
=
6740 BuildGlobalTypeMapFromInnerTree(typeMap
, innermost
);
6741 JS_ASSERT(check_ngslots
== ngslots
);
6742 globalTypeMap
= typeMap
.data();
6745 /* Write back the topmost native stack frame. */
6746 unsigned ignoreSlots
= innermost
->exitType
== RECURSIVE_SLURP_FAIL_EXIT
?
6747 innermost
->numStackSlots
- 1 : 0;
6751 FlushNativeStackFrame(cx
, innermost
->calldepth
,
6752 innermost
->stackTypeMap(),
6753 stack
, NULL
, ignoreSlots
);
6754 JS_ASSERT(unsigned(slots
) == innermost
->numStackSlots
);
6756 if (innermost
->nativeCalleeWord
)
6757 SynthesizeSlowNativeFrame(state
, cx
, innermost
);
6759 /* Write back interned globals. */
6760 double* global
= (double*)(&state
+ 1);
6761 FlushNativeGlobalFrame(cx
, global
,
6762 ngslots
, gslots
, globalTypeMap
);
6764 /* Verify that our state restoration worked. */
6765 for (JSStackFrame
* fp
= cx
->fp
; fp
; fp
= fp
->down
) {
6766 JS_ASSERT_IF(fp
->argv
, JSVAL_IS_OBJECT(fp
->argv
[-1]));
6770 if (innermost
->exitType
!= TIMEOUT_EXIT
)
6771 AUDIT(sideExitIntoInterpreter
);
6773 AUDIT(timeoutIntoInterpreter
);
6776 state
.innermost
= innermost
;
6779 JS_REQUIRES_STACK
bool
6780 js_MonitorLoopEdge(JSContext
* cx
, uintN
& inlineCallCount
, MonitorReason reason
)
6783 TraceVisStateObj
tvso(cx
, S_MONITOR
);
6786 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
6788 /* Is the recorder currently active? */
6790 jsbytecode
* innerLoopHeaderPC
= cx
->fp
->regs
->pc
;
6792 if (RecordLoopEdge(cx
, tm
->recorder
, inlineCallCount
))
6796 * RecordLoopEdge will invoke an inner tree if we have a matching
6797 * one. If we arrive here, that tree didn't run to completion and
6798 * instead we mis-matched or the inner tree took a side exit other than
6799 * the loop exit. We are thus no longer guaranteed to be parked on the
6800 * same loop header js_MonitorLoopEdge was called for. In fact, this
6801 * might not even be a loop header at all. Hence if the program counter
6802 * no longer hovers over the inner loop header, return to the
6803 * interpreter and do not attempt to trigger or record a new tree at
6806 if (innerLoopHeaderPC
!= cx
->fp
->regs
->pc
) {
6808 tvso
.r
= R_INNER_SIDE_EXIT
;
6813 JS_ASSERT(!tm
->recorder
);
6815 /* Check the pool of reserved doubles (this might trigger a GC). */
6816 if (tm
->reservedDoublePoolPtr
< (tm
->reservedDoublePool
+ MAX_NATIVE_STACK_SLOTS
) &&
6817 !ReplenishReservedPool(cx
, tm
)) {
6821 return false; /* Out of memory, don't try to record now. */
6825 * Make sure the shape of the global object still matches (this might flush
6828 JSObject
* globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
6829 uint32 globalShape
= -1;
6830 SlotList
* globalSlots
= NULL
;
6832 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
, &globalShape
, &globalSlots
)) {
6833 Backoff(cx
, cx
->fp
->regs
->pc
);
6837 /* Do not enter the JIT code with a pending operation callback. */
6838 if (cx
->operationCallbackFlag
) {
6840 tvso
.r
= R_CALLBACK_PENDING
;
6845 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
6846 uint32 argc
= cx
->fp
->argc
;
6848 VMFragment
* f
= getLoop(tm
, pc
, globalObj
, globalShape
, argc
);
6850 f
= getAnchor(tm
, pc
, globalObj
, globalShape
, argc
);
6853 ResetJIT(cx
, FR_OOM
);
6855 tvso
.r
= R_OOM_GETANCHOR
;
6861 * If we have no code in the anchor and no peers, we definitively won't be
6862 * able to activate any trees, so start compiling.
6864 if (!f
->code() && !f
->peer
) {
6866 if (++f
->hits() < HOTLOOP
) {
6868 tvso
.r
= f
->hits() < 1 ? R_BACKED_OFF
: R_COLD
;
6874 * We can give RecordTree the root peer. If that peer is already taken,
6875 * it will walk the peer list and find us a free slot or allocate a new
6878 bool rv
= RecordTree(cx
, tm
, f
->first
, NULL
, 0, globalObj
, globalShape
,
6879 globalSlots
, argc
, reason
);
6882 tvso
.r
= R_FAIL_RECORD_TREE
;
6887 debug_only_printf(LC_TMTracer
,
6888 "Looking for compat peer %d@%d, from %p (ip: %p)\n",
6889 js_FramePCToLineNumber(cx
, cx
->fp
),
6890 FramePCOffset(cx
->fp
), (void*)f
, f
->ip
);
6893 VMFragment
* match
= FindVMCompatiblePeer(cx
, globalObj
, f
, count
);
6895 if (count
< MAXPEERS
)
6899 * If we hit the max peers ceiling, don't try to lookup fragments all
6900 * the time. That's expensive. This must be a rather type-unstable loop.
6902 debug_only_print0(LC_TMTracer
, "Blacklisted: too many peer trees.\n");
6903 Blacklist((jsbytecode
*) f
->root
->ip
);
6905 tvso
.r
= R_MAX_PEERS
;
6911 * Trees that only unwind recursive frames usually won't do much work, and
6912 * most time will be spent entering and exiting ExecuteTree(). There's no
6913 * benefit to doing this until the down-recursive side completes.
6915 if (match
->getTreeInfo()->recursion
== Recursion_Unwinds
)
6918 VMSideExit
* lr
= NULL
;
6919 VMSideExit
* innermostNestedGuard
= NULL
;
6921 lr
= ExecuteTree(cx
, match
, inlineCallCount
, &innermostNestedGuard
);
6924 tvso
.r
= R_FAIL_EXECUTE_TREE
;
6930 * If we exit on a branch, or on a tree call guard, try to grow the inner
6931 * tree (in case of a branch exit), or the tree nested around the tree we
6932 * exited from (in case of the tree call guard).
6935 switch (lr
->exitType
) {
6936 case RECURSIVE_UNLINKED_EXIT
:
6937 case UNSTABLE_LOOP_EXIT
:
6938 rv
= AttemptToStabilizeTree(cx
, globalObj
, lr
, NULL
, 0);
6941 tvso
.r
= R_FAIL_STABILIZE
;
6946 oracle
.markInstructionUndemotable(cx
->fp
->regs
->pc
);
6948 case RECURSIVE_SLURP_FAIL_EXIT
:
6949 case RECURSIVE_SLURP_MISMATCH_EXIT
:
6950 case RECURSIVE_EMPTY_RP_EXIT
:
6951 case RECURSIVE_MISMATCH_EXIT
:
6954 return AttemptToExtendTree(cx
, lr
, NULL
, NULL
6960 case RECURSIVE_LOOP_EXIT
:
6962 if (innermostNestedGuard
)
6963 return AttemptToExtendTree(cx
, innermostNestedGuard
, lr
, NULL
6969 tvso
.r
= R_NO_EXTEND_OUTER
;
6974 case MISMATCH_EXIT
: tvso
.r
= R_MISMATCH_EXIT
; return false;
6975 case OOM_EXIT
: tvso
.r
= R_OOM_EXIT
; return false;
6976 case TIMEOUT_EXIT
: tvso
.r
= R_TIMEOUT_EXIT
; return false;
6977 case DEEP_BAIL_EXIT
: tvso
.r
= R_DEEP_BAIL_EXIT
; return false;
6978 case STATUS_EXIT
: tvso
.r
= R_STATUS_EXIT
; return false;
6983 * No, this was an unusual exit (i.e. out of memory/GC), so just resume
6987 tvso
.r
= R_OTHER_EXIT
;
6993 JS_REQUIRES_STACK AbortableRecordingStatus
6994 TraceRecorder::monitorRecording(JSContext
* cx
, TraceRecorder
* tr
, JSOp op
)
6996 Assembler
*assm
= JS_TRACE_MONITOR(cx
).assembler
;
6997 JSTraceMonitor
&localtm
= JS_TRACE_MONITOR(cx
);
6999 /* Process needFlush requests now. */
7000 if (localtm
.needFlush
) {
7001 ResetJIT(cx
, FR_DEEP_BAIL
);
7002 return ARECORD_ABORTED
;
7004 JS_ASSERT(!tr
->fragment
->lastIns
);
7007 * Clear one-shot state used to communicate between record_JSOP_CALL and post-
7008 * opcode-case-guts record hook (record_NativeCallComplete).
7010 tr
->pendingSpecializedNative
= NULL
;
7011 tr
->newobj_ins
= NULL
;
7013 /* Handle one-shot request from finishGetProp or INSTANCEOF to snapshot post-op state and guard. */
7014 if (tr
->pendingGuardCondition
) {
7015 tr
->guard(true, tr
->pendingGuardCondition
, STATUS_EXIT
);
7016 tr
->pendingGuardCondition
= NULL
;
7019 /* Handle one-shot request to unbox the result of a property get. */
7020 if (tr
->pendingUnboxSlot
) {
7021 LIns
* val_ins
= tr
->get(tr
->pendingUnboxSlot
);
7022 val_ins
= tr
->unbox_jsval(*tr
->pendingUnboxSlot
, val_ins
, tr
->snapshot(BRANCH_EXIT
));
7023 tr
->set(tr
->pendingUnboxSlot
, val_ins
);
7024 tr
->pendingUnboxSlot
= 0;
7028 if (js_LogController
.lcbits
& LC_TMRecorder
) {
7029 js_Disassemble1(cx
, cx
->fp
->script
, cx
->fp
->regs
->pc
,
7031 ? 0 : cx
->fp
->regs
->pc
- cx
->fp
->script
->code
,
7032 !cx
->fp
->imacpc
, stdout
);
7037 * If op is not a break or a return from a loop, continue recording and
7038 * follow the trace. We check for imacro-calling bytecodes inside each
7039 * switch case to resolve the if (JSOP_IS_IMACOP(x)) conditions at compile
7043 AbortableRecordingStatus status
;
7045 bool wasInImacro
= (cx
->fp
->imacpc
!= NULL
);
7049 status
= ARECORD_ERROR
;
7050 goto stop_recording
;
7051 # define OPDEF(x,val,name,token,length,nuses,ndefs,prec,format) \
7053 status = tr->record_##x(); \
7054 if (JSOP_IS_IMACOP(x)) \
7057 # include "jsopcode.tbl"
7061 /* record_JSOP_* can reenter the interpreter and kill |tr|. */
7062 if (!localtm
.recorder
)
7063 return ARECORD_ABORTED
;
7065 JS_ASSERT(status
!= ARECORD_IMACRO
);
7066 JS_ASSERT_IF(!wasInImacro
, cx
->fp
->imacpc
== NULL
);
7068 if (assm
->error()) {
7069 js_AbortRecording(cx
, "error during recording");
7070 return ARECORD_ABORTED
;
7073 if (tr
->outOfMemory() || js_OverfullJITCache(&localtm
)) {
7074 js_AbortRecording(cx
, "no more memory");
7075 ResetJIT(cx
, FR_OOM
);
7076 return ARECORD_ABORTED
;
7080 /* record_JSOP_* can reenter the interpreter and kill |tr|. */
7081 if (!localtm
.recorder
)
7082 return ARECORD_ABORTED
;
7084 if (!StatusAbortsRecording(status
))
7088 /* If we recorded the end of the trace, destroy the recorder now. */
7089 if (tr
->fragment
->lastIns
) {
7090 JS_ASSERT(status
== ARECORD_STOP
);
7092 return ARECORD_ABORTED
;
7095 /* We encountered an error, abort recording. */
7096 js_AbortRecording(cx
, js_CodeName
[op
]);
7097 return ARECORD_ABORTED
;
7100 JS_REQUIRES_STACK
void
7101 js_AbortRecording(JSContext
* cx
, const char* reason
)
7103 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
7104 JS_ASSERT(tm
->recorder
!= NULL
);
7106 /* Abort the trace and blacklist its starting point. */
7107 Fragment
* f
= tm
->recorder
->getFragment();
7110 * If the recorder already had its fragment disposed, or we actually
7111 * finished recording and this recorder merely is passing through the deep
7112 * abort state to the next recorder on the stack, just destroy the
7113 * recorder. There is nothing to abort.
7115 if (!f
|| f
->lastIns
) {
7120 AUDIT(recorderAborted
);
7122 JS_ASSERT(!f
->vmprivate
);
7124 TreeInfo
* ti
= tm
->recorder
->getTreeInfo();
7125 debug_only_printf(LC_TMAbort
,
7126 "Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n",
7130 cx
->fp
->script
->filename
,
7131 js_FramePCToLineNumber(cx
, cx
->fp
),
7132 FramePCOffset(cx
->fp
),
7136 Backoff(cx
, (jsbytecode
*) f
->root
->ip
, f
->root
);
7138 /* If DeleteRecorder flushed the code cache, we can't rely on f any more. */
7139 if (!DeleteRecorder(cx
))
7143 * If this is the primary trace and we didn't succeed compiling, trash the
7146 if (!f
->code() && (f
->root
== f
))
7150 #if defined NANOJIT_IA32
7154 char *c
= getenv("X86_FORCE_SSE2");
7156 return (!strcmp(c
, "true") ||
7161 #if defined _MSC_VER
7170 #elif defined __GNUC__
7171 asm("xchg %%esi, %%ebx\n" /* we can't clobber ebx on gcc (PIC register) */
7172 "mov $0x01, %%eax\n"
7175 "xchg %%esi, %%ebx\n"
7177 : /* We have no inputs */
7178 : "%eax", "%esi", "%ecx", "%edx"
7180 #elif defined __SUNPRO_C || defined __SUNPRO_CC
7182 "mov $0x01, %%eax\n"
7186 : /* We have no inputs */
7190 return (features
& (1<<26)) != 0;
7194 #if defined(NANOJIT_ARM)
7196 #if defined(_MSC_VER) && defined(WINCE)
7198 // these come in from jswince.asm
7199 extern "C" int js_arm_try_thumb_op();
7200 extern "C" int js_arm_try_armv6t2_op();
7201 extern "C" int js_arm_try_armv5_op();
7202 extern "C" int js_arm_try_armv6_op();
7203 extern "C" int js_arm_try_armv7_op();
7204 extern "C" int js_arm_try_vfp_op();
7207 js_arm_check_thumb() {
7210 js_arm_try_thumb_op();
7212 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7219 js_arm_check_thumb2() {
7222 js_arm_try_armv6t2_op();
7224 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7231 js_arm_check_arch() {
7232 unsigned int arch
= 4;
7234 js_arm_try_armv5_op();
7236 js_arm_try_armv6_op();
7238 js_arm_try_armv7_op();
7240 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7246 js_arm_check_vfp() {
7247 #ifdef WINCE_WINDOWS_MOBILE
7252 js_arm_try_vfp_op();
7254 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION
) {
7261 #define HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS 1
7263 /* See "Suppressing Exception Notifications while Debugging", at
7264 * http://msdn.microsoft.com/en-us/library/ms924252.aspx
7267 js_disable_debugger_exceptions()
7269 // 2 == TLSSLOT_KERNEL
7270 DWORD kctrl
= (DWORD
) TlsGetValue(2);
7271 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7273 TlsSetValue(2, (LPVOID
) kctrl
);
7277 js_enable_debugger_exceptions()
7279 // 2 == TLSSLOT_KERNEL
7280 DWORD kctrl
= (DWORD
) TlsGetValue(2);
7281 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7283 TlsSetValue(2, (LPVOID
) kctrl
);
7286 #elif defined(__GNUC__) && defined(AVMPLUS_LINUX)
7290 #include <sys/types.h>
7291 #include <sys/stat.h>
7292 #include <sys/mman.h>
7297 // Assume ARMv4 by default.
7298 static unsigned int arm_arch
= 4;
7299 static bool arm_has_thumb
= false;
7300 static bool arm_has_vfp
= false;
7301 static bool arm_has_neon
= false;
7302 static bool arm_has_iwmmxt
= false;
7303 static bool arm_tests_initialized
= false;
7310 fd
= open("/proc/self/auxv", O_RDONLY
);
7312 while (read(fd
, &aux
, sizeof(Elf32_auxv_t
))) {
7313 if (aux
.a_type
== AT_HWCAP
) {
7314 uint32_t hwcap
= aux
.a_un
.a_val
;
7315 if (getenv("ARM_FORCE_HWCAP"))
7316 hwcap
= strtoul(getenv("ARM_FORCE_HWCAP"), NULL
, 0);
7317 // hardcode these values to avoid depending on specific versions
7318 // of the hwcap header, e.g. HWCAP_NEON
7319 arm_has_thumb
= (hwcap
& 4) != 0;
7320 arm_has_vfp
= (hwcap
& 64) != 0;
7321 arm_has_iwmmxt
= (hwcap
& 512) != 0;
7322 // this flag is only present on kernel 2.6.29
7323 arm_has_neon
= (hwcap
& 4096) != 0;
7324 } else if (aux
.a_type
== AT_PLATFORM
) {
7325 const char *plat
= (const char*) aux
.a_un
.a_val
;
7326 if (getenv("ARM_FORCE_PLATFORM"))
7327 plat
= getenv("ARM_FORCE_PLATFORM");
7328 // The platform string has the form "v[0-9][lb]". The "l" or "b" indicate little-
7329 // or big-endian variants and the digit indicates the version of the platform.
7330 // We can only accept ARMv4 and above, but allow anything up to ARMv9 for future
7331 // processors. Architectures newer than ARMv7 are assumed to be
7332 // backwards-compatible with ARMv7.
7333 if ((plat
[0] == 'v') &&
7334 (plat
[1] >= '4') && (plat
[1] <= '9') &&
7335 ((plat
[2] == 'l') || (plat
[2] == 'b')))
7337 arm_arch
= plat
[1] - '0';
7341 // For production code, ignore invalid (or unexpected) platform strings and
7342 // fall back to the default. For debug code, use an assertion to catch this.
7349 // if we don't have 2.6.29, we have to do this hack; set
7350 // the env var to trust HWCAP.
7351 if (!getenv("ARM_TRUST_HWCAP") && (arm_arch
>= 7))
7352 arm_has_neon
= true;
7355 arm_tests_initialized
= true;
7359 js_arm_check_thumb() {
7360 if (!arm_tests_initialized
)
7363 return arm_has_thumb
;
7367 js_arm_check_thumb2() {
7368 if (!arm_tests_initialized
)
7371 // ARMv6T2 also supports Thumb2, but Linux doesn't provide an easy way to test for this as
7372 // there is no associated bit in auxv. ARMv7 always supports Thumb2, and future architectures
7373 // are assumed to be backwards-compatible.
7374 return (arm_arch
>= 7);
7378 js_arm_check_arch() {
7379 if (!arm_tests_initialized
)
7386 js_arm_check_vfp() {
7387 if (!arm_tests_initialized
)
7394 #warning Not sure how to check for architecture variant on your platform. Assuming ARMv4.
7396 js_arm_check_thumb() { return false; }
7398 js_arm_check_thumb2() { return false; }
7400 js_arm_check_arch() { return 4; }
7402 js_arm_check_vfp() { return false; }
7405 #ifndef HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS
7407 js_enable_debugger_exceptions() { }
7409 js_disable_debugger_exceptions() { }
7412 #endif /* NANOJIT_ARM */
7419 js_SetMaxCodeCacheBytes(JSContext
* cx
, uint32 bytes
)
7421 JSTraceMonitor
* tm
= &JS_THREAD_DATA(cx
)->traceMonitor
;
7422 JS_ASSERT(tm
->codeAlloc
&& tm
->dataAlloc
&& tm
->traceAlloc
);
7427 tm
->maxCodeCacheBytes
= bytes
;
7431 js_InitJIT(JSTraceMonitor
*tm
)
7433 #if defined JS_JIT_SPEW
7434 tm
->profAlloc
= NULL
;
7435 /* Set up debug logging. */
7436 if (!did_we_set_up_debug_logging
) {
7437 InitJITLogController();
7438 did_we_set_up_debug_logging
= true;
7440 /* Set up fragprofiling, if required. */
7441 if (js_LogController
.lcbits
& LC_FragProfile
) {
7442 tm
->profAlloc
= new VMAllocator();
7443 tm
->profTab
= new (*tm
->profAlloc
) FragStatsMap(*tm
->profAlloc
);
7447 memset(&js_LogController
, 0, sizeof(js_LogController
));
7450 if (!did_we_check_processor_features
) {
7451 #if defined NANOJIT_IA32
7452 avmplus::AvmCore::config
.use_cmov
=
7453 avmplus::AvmCore::config
.sse2
= CheckForSSE2();
7455 #if defined NANOJIT_ARM
7457 js_disable_debugger_exceptions();
7459 bool arm_vfp
= js_arm_check_vfp();
7460 bool arm_thumb
= js_arm_check_thumb();
7461 bool arm_thumb2
= js_arm_check_thumb2();
7462 unsigned int arm_arch
= js_arm_check_arch();
7464 js_enable_debugger_exceptions();
7466 avmplus::AvmCore::config
.vfp
= arm_vfp
;
7467 avmplus::AvmCore::config
.soft_float
= !arm_vfp
;
7468 avmplus::AvmCore::config
.thumb
= arm_thumb
;
7469 avmplus::AvmCore::config
.thumb2
= arm_thumb2
;
7470 avmplus::AvmCore::config
.arch
= arm_arch
;
7472 // Sanity-check the configuration detection.
7473 // * We don't understand architectures prior to ARMv4.
7474 JS_ASSERT(arm_arch
>= 4);
7475 // * All architectures support Thumb with the possible exception of ARMv4.
7476 JS_ASSERT((arm_thumb
) || (arm_arch
== 4));
7477 // * Only ARMv6T2 and ARMv7(+) support Thumb2, but ARMv6 does not.
7478 JS_ASSERT((arm_thumb2
) || (arm_arch
<= 6));
7479 // * All architectures that support Thumb2 also support Thumb.
7480 JS_ASSERT((arm_thumb2
&& arm_thumb
) || (!arm_thumb2
));
7482 did_we_check_processor_features
= true;
7485 /* Set the default size for the code cache to 16MB. */
7486 tm
->maxCodeCacheBytes
= 16 M
;
7488 if (!tm
->recordAttempts
.ops
) {
7489 JS_DHashTableInit(&tm
->recordAttempts
, JS_DHashGetStubOps(),
7490 NULL
, sizeof(PCHashEntry
),
7491 JS_DHASH_DEFAULT_CAPACITY(PC_HASH_COUNT
));
7494 JS_ASSERT(!tm
->dataAlloc
&& !tm
->traceAlloc
&& !tm
->codeAlloc
);
7495 tm
->dataAlloc
= new VMAllocator();
7496 tm
->traceAlloc
= new VMAllocator();
7497 tm
->tempAlloc
= new VMAllocator();
7498 tm
->reTempAlloc
= new VMAllocator();
7499 tm
->codeAlloc
= new CodeAlloc();
7500 tm
->frameCache
= new FrameInfoCache(tm
->dataAlloc
);
7502 verbose_only( tm
->branches
= NULL
; )
7504 JS_ASSERT(!tm
->reservedDoublePool
);
7505 tm
->reservedDoublePoolPtr
= tm
->reservedDoublePool
= new jsval
[MAX_NATIVE_STACK_SLOTS
];
7508 debug_only(memset(&jitstats
, 0, sizeof(jitstats
)));
7512 /* Architecture properties used by test cases. */
7513 jitstats
.archIsIA32
= 0;
7514 jitstats
.archIs64BIT
= 0;
7515 jitstats
.archIsARM
= 0;
7516 jitstats
.archIsSPARC
= 0;
7517 jitstats
.archIsPPC
= 0;
7518 #if defined NANOJIT_IA32
7519 jitstats
.archIsIA32
= 1;
7521 #if defined NANOJIT_64BIT
7522 jitstats
.archIs64BIT
= 1;
7524 #if defined NANOJIT_ARM
7525 jitstats
.archIsARM
= 1;
7527 #if defined NANOJIT_SPARC
7528 jitstats
.archIsSPARC
= 1;
7530 #if defined NANOJIT_PPC
7531 jitstats
.archIsPPC
= 1;
7533 #if defined NANOJIT_X64
7534 jitstats
.archIsAMD64
= 1;
7540 js_FinishJIT(JSTraceMonitor
*tm
)
7543 if (jitstats
.recorderStarted
) {
7544 debug_only_printf(LC_TMStats
,
7545 "recorder: started(%llu), aborted(%llu), completed(%llu), different header(%llu), "
7546 "trees trashed(%llu), slot promoted(%llu), unstable loop variable(%llu), "
7547 "breaks(%llu), returns(%llu), unstableInnerCalls(%llu), blacklisted(%llu)\n",
7548 (unsigned long long int)jitstats
.recorderStarted
,
7549 (unsigned long long int)jitstats
.recorderAborted
,
7550 (unsigned long long int)jitstats
.traceCompleted
,
7551 (unsigned long long int)jitstats
.returnToDifferentLoopHeader
,
7552 (unsigned long long int)jitstats
.treesTrashed
,
7553 (unsigned long long int)jitstats
.slotPromoted
,
7554 (unsigned long long int)jitstats
.unstableLoopVariable
,
7555 (unsigned long long int)jitstats
.breakLoopExits
,
7556 (unsigned long long int)jitstats
.returnLoopExits
,
7557 (unsigned long long int)jitstats
.noCompatInnerTrees
,
7558 (unsigned long long int)jitstats
.blacklisted
);
7559 debug_only_printf(LC_TMStats
,
7560 "monitor: triggered(%llu), exits(%llu), type mismatch(%llu), "
7561 "global mismatch(%llu), flushed(%llu)\n",
7562 (unsigned long long int)jitstats
.traceTriggered
,
7563 (unsigned long long int)jitstats
.sideExitIntoInterpreter
,
7564 (unsigned long long int)jitstats
.typeMapMismatchAtEntry
,
7565 (unsigned long long int)jitstats
.globalShapeMismatchAtEntry
,
7566 (unsigned long long int)jitstats
.cacheFlushed
);
7569 JS_ASSERT(tm
->reservedDoublePool
);
7571 if (tm
->recordAttempts
.ops
)
7572 JS_DHashTableFinish(&tm
->recordAttempts
);
7575 // Recover profiling data from expiring Fragments, and display
7577 if (js_LogController
.lcbits
& LC_FragProfile
) {
7578 for (Seq
<Fragment
*>* f
= tm
->branches
; f
; f
= f
->tail
) {
7579 js_FragProfiling_FragFinalizer(f
->head
, tm
);
7581 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
7582 for (VMFragment
*f
= tm
->vmfragments
[i
]; f
; f
= f
->next
) {
7583 JS_ASSERT(f
->root
== f
);
7584 for (VMFragment
*p
= f
; p
; p
= p
->peer
)
7585 js_FragProfiling_FragFinalizer(p
, tm
);
7588 REHashMap::Iter
iter(*(tm
->reFragments
));
7589 while (iter
.next()) {
7590 nanojit::Fragment
* frag
= iter
.value();
7591 js_FragProfiling_FragFinalizer(frag
, tm
);
7594 js_FragProfiling_showResults(tm
);
7595 delete tm
->profAlloc
;
7598 NanoAssert(!tm
->profTab
);
7599 NanoAssert(!tm
->profAlloc
);
7603 memset(&tm
->vmfragments
[0], 0, FRAGMENT_TABLE_SIZE
* sizeof(VMFragment
*));
7605 delete[] tm
->reservedDoublePool
;
7606 tm
->reservedDoublePool
= tm
->reservedDoublePoolPtr
= NULL
;
7608 if (tm
->frameCache
) {
7609 delete tm
->frameCache
;
7610 tm
->frameCache
= NULL
;
7613 if (tm
->codeAlloc
) {
7614 delete tm
->codeAlloc
;
7615 tm
->codeAlloc
= NULL
;
7618 if (tm
->dataAlloc
) {
7619 delete tm
->dataAlloc
;
7620 tm
->dataAlloc
= NULL
;
7623 if (tm
->traceAlloc
) {
7624 delete tm
->traceAlloc
;
7625 tm
->traceAlloc
= NULL
;
7628 if (tm
->tempAlloc
) {
7629 delete tm
->tempAlloc
;
7630 tm
->tempAlloc
= NULL
;
7633 if (tm
->reTempAlloc
) {
7634 delete tm
->reTempAlloc
;
7635 tm
->reTempAlloc
= NULL
;
7645 static JSDHashOperator
7646 PurgeScriptRecordingAttempts(JSDHashTable
*table
, JSDHashEntryHdr
*hdr
, uint32 number
, void *arg
)
7648 PCHashEntry
*e
= (PCHashEntry
*)hdr
;
7649 JSScript
*script
= (JSScript
*)arg
;
7650 jsbytecode
*pc
= (jsbytecode
*)e
->key
;
7652 if (JS_UPTRDIFF(pc
, script
->code
) < script
->length
)
7653 return JS_DHASH_REMOVE
;
7654 return JS_DHASH_NEXT
;
7658 JS_REQUIRES_STACK
void
7659 js_PurgeScriptFragments(JSContext
* cx
, JSScript
* script
)
7661 if (!TRACING_ENABLED(cx
))
7663 debug_only_printf(LC_TMTracer
,
7664 "Purging fragments for JSScript %p.\n", (void*)script
);
7666 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
7667 for (size_t i
= 0; i
< FRAGMENT_TABLE_SIZE
; ++i
) {
7668 VMFragment
** fragp
= &tm
->vmfragments
[i
];
7669 while (VMFragment
* frag
= *fragp
) {
7670 if (JS_UPTRDIFF(frag
->ip
, script
->code
) < script
->length
) {
7671 /* This fragment is associated with the script. */
7672 debug_only_printf(LC_TMTracer
,
7673 "Disconnecting VMFragment %p "
7674 "with ip %p, in range [%p,%p).\n",
7675 (void*)frag
, frag
->ip
, script
->code
,
7676 script
->code
+ script
->length
);
7678 JS_ASSERT(frag
->root
== frag
);
7679 *fragp
= frag
->next
;
7681 verbose_only( js_FragProfiling_FragFinalizer(frag
, tm
); )
7682 TrashTree(cx
, frag
);
7683 } while ((frag
= frag
->peer
) != NULL
);
7686 fragp
= &frag
->next
;
7690 JS_DHashTableEnumerate(&tm
->recordAttempts
, PurgeScriptRecordingAttempts
, script
);
7694 js_OverfullJITCache(JSTraceMonitor
* tm
)
7697 * You might imagine the outOfMemory flag on the allocator is sufficient
7698 * to model the notion of "running out of memory", but there are actually
7699 * two separate issues involved:
7701 * 1. The process truly running out of memory: malloc() or mmap()
7704 * 2. The limit we put on the "intended size" of the tracemonkey code
7705 * cache, in pages, has been exceeded.
7707 * Condition 1 doesn't happen very often, but we're obliged to try to
7708 * safely shut down and signal the rest of spidermonkey when it
7709 * does. Condition 2 happens quite regularly.
7711 * Presently, the code in this file doesn't check the outOfMemory condition
7712 * often enough, and frequently misuses the unchecked results of
7713 * lirbuffer insertions on the asssumption that it will notice the
7714 * outOfMemory flag "soon enough" when it returns to the monitorRecording
7715 * function. This turns out to be a false assumption if we use outOfMemory
7716 * to signal condition 2: we regularly provoke "passing our intended
7717 * size" and regularly fail to notice it in time to prevent writing
7718 * over the end of an artificially self-limited LIR buffer.
7720 * To mitigate, though not completely solve, this problem, we're
7721 * modeling the two forms of memory exhaustion *separately* for the
7722 * time being: condition 1 is handled by the outOfMemory flag inside
7723 * nanojit, and condition 2 is being handled independently *here*. So
7724 * we construct our allocators to use all available memory they like,
7725 * and only report outOfMemory to us when there is literally no OS memory
7726 * left. Merely purging our cache when we hit our highwater mark is
7727 * handled by the (few) callers of this function.
7730 jsuint maxsz
= tm
->maxCodeCacheBytes
;
7731 VMAllocator
*dataAlloc
= tm
->dataAlloc
;
7732 VMAllocator
*traceAlloc
= tm
->traceAlloc
;
7733 CodeAlloc
*codeAlloc
= tm
->codeAlloc
;
7735 return (codeAlloc
->size() + dataAlloc
->size() + traceAlloc
->size() > maxsz
);
7738 JS_FORCES_STACK
JS_FRIEND_API(void)
7739 js_DeepBail(JSContext
*cx
)
7741 JS_ASSERT(JS_ON_TRACE(cx
));
7744 * Exactly one context on the current thread is on trace. Find out which
7745 * one. (Most callers cannot guarantee that it's cx.)
7747 JSTraceMonitor
*tm
= &JS_TRACE_MONITOR(cx
);
7748 JSContext
*tracecx
= tm
->tracecx
;
7750 /* It's a bug if a non-FAIL_STATUS builtin gets here. */
7751 JS_ASSERT(tracecx
->bailExit
);
7754 debug_only_print0(LC_TMTracer
, "Deep bail.\n");
7755 LeaveTree(*tracecx
->interpState
, tracecx
->bailExit
);
7756 tracecx
->bailExit
= NULL
;
7758 InterpState
* state
= tracecx
->interpState
;
7759 state
->builtinStatus
|= JSBUILTIN_BAILED
;
7760 state
->deepBailSp
= state
->sp
;
7763 JS_REQUIRES_STACK jsval
&
7764 TraceRecorder::argval(unsigned n
) const
7766 JS_ASSERT(n
< cx
->fp
->fun
->nargs
);
7767 return cx
->fp
->argv
[n
];
7770 JS_REQUIRES_STACK jsval
&
7771 TraceRecorder::varval(unsigned n
) const
7773 JS_ASSERT(n
< cx
->fp
->script
->nslots
);
7774 return cx
->fp
->slots
[n
];
7777 JS_REQUIRES_STACK jsval
&
7778 TraceRecorder::stackval(int n
) const
7780 jsval
* sp
= cx
->fp
->regs
->sp
;
7784 JS_REQUIRES_STACK LIns
*
7785 TraceRecorder::scopeChain() const
7787 return lir
->insLoad(LIR_ldp
,
7788 lir
->insLoad(LIR_ldp
, cx_ins
, offsetof(JSContext
, fp
)),
7789 offsetof(JSStackFrame
, scopeChain
));
7793 * Return the frame of a call object if that frame is part of the current
7794 * trace. |depthp| is an optional outparam: if it is non-null, it will be
7795 * filled in with the depth of the call object's frame relevant to cx->fp.
7797 JS_REQUIRES_STACK JSStackFrame
*
7798 TraceRecorder::frameIfInRange(JSObject
* obj
, unsigned* depthp
) const
7800 JSStackFrame
* ofp
= (JSStackFrame
*) obj
->getPrivate();
7801 JSStackFrame
* fp
= cx
->fp
;
7802 for (unsigned depth
= 0; depth
<= callDepth
; ++depth
) {
7808 if (!(fp
= fp
->down
))
7814 JS_DEFINE_CALLINFO_4(extern, UINT32
, GetClosureVar
, CONTEXT
, OBJECT
, CVIPTR
, DOUBLEPTR
, 0, 0)
7815 JS_DEFINE_CALLINFO_4(extern, UINT32
, GetClosureArg
, CONTEXT
, OBJECT
, CVIPTR
, DOUBLEPTR
, 0, 0)
7818 * Search the scope chain for a property lookup operation at the current PC and
7819 * generate LIR to access the given property. Return RECORD_CONTINUE on success,
7820 * otherwise abort and return RECORD_STOP. There are 3 outparams:
7822 * vp the address of the current property value
7823 * ins LIR instruction representing the property value on trace
7824 * NameResult describes how to look up name; see comment for NameResult in jstracer.h
7826 JS_REQUIRES_STACK AbortableRecordingStatus
7827 TraceRecorder::scopeChainProp(JSObject
* obj
, jsval
*& vp
, LIns
*& ins
, NameResult
& nr
)
7829 JS_ASSERT(obj
!= globalObj
);
7831 JSTraceMonitor
&localtm
= *traceMonitor
;
7833 JSAtom
* atom
= atoms
[GET_INDEX(cx
->fp
->regs
->pc
)];
7836 bool ok
= js_FindProperty(cx
, ATOM_TO_JSID(atom
), &obj
, &obj2
, &prop
);
7838 /* js_FindProperty can reenter the interpreter and kill |this|. */
7839 if (!localtm
.recorder
)
7840 return ARECORD_ABORTED
;
7843 RETURN_ERROR_A("error in js_FindProperty");
7846 RETURN_STOP_A("failed to find name in non-global scope chain");
7848 if (obj
== globalObj
) {
7849 // Even if the property is on the global object, we must guard against
7850 // the creation of properties that shadow the property in the middle
7851 // of the scope chain if we are in a function.
7854 JSObject
* parent
= STOBJ_GET_PARENT(JSVAL_TO_OBJECT(cx
->fp
->argv
[-2]));
7855 LIns
* parent_ins
= stobj_get_parent(get(&cx
->fp
->argv
[-2]));
7856 CHECK_STATUS_A(traverseScopeChain(parent
, parent_ins
, obj
, obj_ins
));
7859 JSScopeProperty
* sprop
= (JSScopeProperty
*) prop
;
7862 obj2
->dropProperty(cx
, prop
);
7863 RETURN_STOP_A("prototype property");
7865 if (!isValidSlot(OBJ_SCOPE(obj
), sprop
)) {
7866 obj2
->dropProperty(cx
, prop
);
7867 return ARECORD_STOP
;
7869 if (!lazilyImportGlobalSlot(sprop
->slot
)) {
7870 obj2
->dropProperty(cx
, prop
);
7871 RETURN_STOP_A("lazy import of global slot failed");
7873 vp
= &STOBJ_GET_SLOT(obj
, sprop
->slot
);
7875 obj2
->dropProperty(cx
, prop
);
7877 return ARECORD_CONTINUE
;
7880 if (obj
== obj2
&& OBJ_GET_CLASS(cx
, obj
) == &js_CallClass
)
7881 return InjectStatus(callProp(obj
, prop
, ATOM_TO_JSID(atom
), vp
, ins
, nr
));
7883 obj2
->dropProperty(cx
, prop
);
7884 RETURN_STOP_A("fp->scopeChain is not global or active call object");
7888 * Generate LIR to access a property of a Call object.
7890 JS_REQUIRES_STACK RecordingStatus
7891 TraceRecorder::callProp(JSObject
* obj
, JSProperty
* prop
, jsid id
, jsval
*& vp
,
7892 LIns
*& ins
, NameResult
& nr
)
7894 JSScopeProperty
*sprop
= (JSScopeProperty
*) prop
;
7896 JSOp op
= JSOp(*cx
->fp
->regs
->pc
);
7897 uint32 setflags
= (js_CodeSpec
[op
].format
& (JOF_SET
| JOF_INCDEC
| JOF_FOR
));
7898 if (setflags
&& (sprop
->attrs
& JSPROP_READONLY
))
7899 RETURN_STOP("writing to a read-only property");
7901 uintN slot
= sprop
->shortid
;
7904 uintN upvar_slot
= SPROP_INVALID_SLOT
;
7905 JSStackFrame
* cfp
= (JSStackFrame
*) obj
->getPrivate();
7907 if (sprop
->getter
== js_GetCallArg
) {
7908 JS_ASSERT(slot
< cfp
->fun
->nargs
);
7909 vp
= &cfp
->argv
[slot
];
7912 } else if (sprop
->getter
== js_GetCallVar
) {
7913 JS_ASSERT(slot
< cfp
->script
->nslots
);
7914 vp
= &cfp
->slots
[slot
];
7915 upvar_slot
= cx
->fp
->fun
->nargs
+ slot
;
7918 RETURN_STOP("dynamic property of Call object");
7920 obj
->dropProperty(cx
, prop
);
7922 if (frameIfInRange(obj
)) {
7923 // At this point we are guaranteed to be looking at an active call oject
7924 // whose properties are stored in the corresponding JSStackFrame.
7927 return RECORD_CONTINUE
;
7930 // Call objects do not yet have sprop->isMethod() properties, but they
7931 // should. See bug 514046, for which this code is future-proof. Remove
7932 // this comment when that bug is fixed (so, FIXME: 514046).
7936 js_GetPropertyHelper(cx
, obj
, sprop
->id
,
7937 (op
== JSOP_CALLNAME
)
7938 ? JSGET_NO_METHOD_BARRIER
7939 : JSGET_METHOD_BARRIER
,
7942 obj
->dropProperty(cx
, prop
);
7946 JSObject
* parent
= STOBJ_GET_PARENT(JSVAL_TO_OBJECT(cx
->fp
->argv
[-2]));
7947 LIns
* parent_ins
= stobj_get_parent(get(&cx
->fp
->argv
[-2]));
7948 CHECK_STATUS(traverseScopeChain(parent
, parent_ins
, obj
, obj_ins
));
7950 ClosureVarInfo
* cv
= new (traceMonitor
->traceAlloc
) ClosureVarInfo();
7953 cv
->callDepth
= callDepth
;
7954 cv
->resolveFlags
= cx
->resolveFlags
== JSRESOLVE_INFER
7955 ? js_InferFlags(cx
, 0)
7958 LIns
* outp
= lir
->insAlloc(sizeof(double));
7966 if (sprop
->getter
== js_GetCallArg
)
7967 ci
= &GetClosureArg_ci
;
7969 ci
= &GetClosureVar_ci
;
7971 LIns
* call_ins
= lir
->insCall(ci
, args
);
7972 JSTraceType type
= getCoercedType(nr
.v
);
7974 addName(lir
->ins2(LIR_eq
, call_ins
, lir
->insImm(type
)),
7975 "guard(type-stable name access)"),
7977 ins
= stackLoad(outp
, type
);
7980 nr
.obj_ins
= obj_ins
;
7982 return RECORD_CONTINUE
;
7985 JS_REQUIRES_STACK LIns
*
7986 TraceRecorder::arg(unsigned n
)
7988 return get(&argval(n
));
7991 JS_REQUIRES_STACK
void
7992 TraceRecorder::arg(unsigned n
, LIns
* i
)
7997 JS_REQUIRES_STACK LIns
*
7998 TraceRecorder::var(unsigned n
)
8000 return get(&varval(n
));
8003 JS_REQUIRES_STACK
void
8004 TraceRecorder::var(unsigned n
, LIns
* i
)
8009 JS_REQUIRES_STACK LIns
*
8010 TraceRecorder::stack(int n
)
8012 return get(&stackval(n
));
8015 JS_REQUIRES_STACK
void
8016 TraceRecorder::stack(int n
, LIns
* i
)
8018 set(&stackval(n
), i
, n
>= 0);
8021 JS_REQUIRES_STACK LIns
*
8022 TraceRecorder::alu(LOpcode v
, jsdouble v0
, jsdouble v1
, LIns
* s0
, LIns
* s1
)
8025 * To even consider this operation for demotion, both operands have to be
8026 * integers and the oracle must not give us a negative hint for the
8029 if (oracle
.isInstructionUndemotable(cx
->fp
->regs
->pc
) || !isPromoteInt(s0
) || !isPromoteInt(s1
)) {
8031 if (v
== LIR_fmod
) {
8032 LIns
* args
[] = { s1
, s0
};
8033 return lir
->insCall(&js_dmod_ci
, args
);
8035 LIns
* result
= lir
->ins2(v
, s0
, s1
);
8036 JS_ASSERT_IF(s0
->isconstf() && s1
->isconstf(), result
->isconstf());
8053 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8060 if (v0
< 0 || v1
== 0 || (s1
->isconstf() && v1
< 0))
8062 r
= js_dmod(v0
, v1
);
8070 * The result must be an integer at record time, otherwise there is no
8071 * point in trying to demote it.
8073 if (jsint(r
) != r
|| JSDOUBLE_IS_NEGZERO(r
))
8076 LIns
* d0
= ::demote(lir
, s0
);
8077 LIns
* d1
= ::demote(lir
, s1
);
8080 * Speculatively emit an integer operation, betting that at runtime we
8081 * will get integer results again.
8086 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8088 if (d0
->isconst() && d1
->isconst())
8089 return lir
->ins1(LIR_i2f
, lir
->insImm(jsint(r
)));
8091 exit
= snapshot(OVERFLOW_EXIT
);
8094 * If the divisor is greater than zero its always safe to execute
8095 * the division. If not, we have to make sure we are not running
8096 * into -2147483648 / -1, because it can raise an overflow exception.
8098 if (!d1
->isconst()) {
8099 LIns
* gt
= lir
->insBranch(LIR_jt
, lir
->ins2i(LIR_gt
, d1
, 0), NULL
);
8100 guard(false, lir
->ins_eq0(d1
), exit
);
8101 guard(false, lir
->ins2(LIR_and
,
8102 lir
->ins2i(LIR_eq
, d0
, 0x80000000),
8103 lir
->ins2i(LIR_eq
, d1
, -1)), exit
);
8104 gt
->setTarget(lir
->ins0(LIR_label
));
8106 if (d1
->imm32() == -1)
8107 guard(false, lir
->ins2i(LIR_eq
, d0
, 0x80000000), exit
);
8109 result
= lir
->ins2(v
= LIR_div
, d0
, d1
);
8111 /* As long the modulus is zero, the result is an integer. */
8112 guard(true, lir
->ins_eq0(lir
->ins1(LIR_mod
, result
)), exit
);
8114 /* Don't lose a -0. */
8115 guard(false, lir
->ins_eq0(result
), exit
);
8119 if (d0
->isconst() && d1
->isconst())
8120 return lir
->ins1(LIR_i2f
, lir
->insImm(jsint(r
)));
8122 exit
= snapshot(OVERFLOW_EXIT
);
8124 /* Make sure we don't trigger division by zero at runtime. */
8126 guard(false, lir
->ins_eq0(d1
), exit
);
8127 result
= lir
->ins1(v
= LIR_mod
, lir
->ins2(LIR_div
, d0
, d1
));
8129 /* If the result is not 0, it is always within the integer domain. */
8130 LIns
* branch
= lir
->insBranch(LIR_jf
, lir
->ins_eq0(result
), NULL
);
8133 * If the result is zero, we must exit if the lhs is negative since
8134 * the result is -0 in this case, which is not in the integer domain.
8136 guard(false, lir
->ins2i(LIR_lt
, d1
, 0), exit
);
8137 branch
->setTarget(lir
->ins0(LIR_label
));
8143 v
= (LOpcode
)((int)v
& ~LIR64
);
8144 result
= lir
->ins2(v
, d0
, d1
);
8147 * If the operands guarantee that the result will be an integer (i.e.
8148 * z = x + y with 0 <= (x|y) <= 0xffff guarantees z <= fffe0001), we
8149 * don't have to guard against an overflow. Otherwise we emit a guard
8150 * that will inform the oracle and cause a non-demoted trace to be
8151 * attached that uses floating-point math for this operation.
8153 if (!result
->isconst() && (!IsOverflowSafe(v
, d0
) || !IsOverflowSafe(v
, d1
))) {
8154 exit
= snapshot(OVERFLOW_EXIT
);
8155 guard(false, lir
->ins1(LIR_ov
, result
), exit
);
8156 if (v
== LIR_mul
) // make sure we don't lose a -0
8157 guard(false, lir
->ins_eq0(result
), exit
);
8161 JS_ASSERT_IF(d0
->isconst() && d1
->isconst(),
8162 result
->isconst() && result
->imm32() == jsint(r
));
8163 return lir
->ins1(LIR_i2f
, result
);
8167 TraceRecorder::f2i(LIns
* f
)
8169 return lir
->insCall(&js_DoubleToInt32_ci
, &f
);
8172 JS_REQUIRES_STACK LIns
*
8173 TraceRecorder::makeNumberInt32(LIns
* f
)
8175 JS_ASSERT(f
->isQuad());
8177 if (!isPromote(f
)) {
8179 guard(true, lir
->ins2(LIR_feq
, f
, lir
->ins1(LIR_i2f
, x
)), MISMATCH_EXIT
);
8181 x
= ::demote(lir
, f
);
8186 JS_REQUIRES_STACK LIns
*
8187 TraceRecorder::stringify(jsval
& v
)
8189 LIns
* v_ins
= get(&v
);
8190 if (JSVAL_IS_STRING(v
))
8193 LIns
* args
[] = { v_ins
, cx_ins
};
8195 if (JSVAL_IS_NUMBER(v
)) {
8196 ci
= &js_NumberToString_ci
;
8197 } else if (JSVAL_IS_SPECIAL(v
)) {
8198 ci
= &js_BooleanOrUndefinedToString_ci
;
8201 * Callers must deal with non-primitive (non-null object) values by
8202 * calling an imacro. We don't try to guess about which imacro, with
8203 * what valueOf hint, here.
8205 JS_ASSERT(JSVAL_IS_NULL(v
));
8206 return INS_ATOM(cx
->runtime
->atomState
.nullAtom
);
8209 v_ins
= lir
->insCall(ci
, args
);
8210 guard(false, lir
->ins_peq0(v_ins
), OOM_EXIT
);
8214 JS_REQUIRES_STACK RecordingStatus
8215 TraceRecorder::call_imacro(jsbytecode
* imacro
)
8217 JSStackFrame
* fp
= cx
->fp
;
8218 JSFrameRegs
* regs
= fp
->regs
;
8220 /* We cannot nest imacros, only tail-call. */
8222 /* Dereference is safe since imacros are JSOP_STOP-terminated. */
8223 if (regs
->pc
[js_CodeSpec
[*regs
->pc
].length
] != JSOP_STOP
)
8226 return RECORD_IMACRO
;
8229 fp
->imacpc
= regs
->pc
;
8231 atoms
= COMMON_ATOMS_START(&cx
->runtime
->atomState
);
8232 return RECORD_IMACRO
;
8235 JS_REQUIRES_STACK AbortableRecordingStatus
8236 TraceRecorder::ifop()
8238 jsval
& v
= stackval(-1);
8239 LIns
* v_ins
= get(&v
);
8243 if (JSVAL_IS_NULL(v
)) {
8246 } else if (!JSVAL_IS_PRIMITIVE(v
)) {
8249 } else if (JSVAL_IS_SPECIAL(v
)) {
8250 /* Test for boolean is true, negate later if we are testing for false. */
8251 cond
= JSVAL_TO_SPECIAL(v
) == JS_TRUE
;
8252 x
= lir
->ins2i(LIR_eq
, v_ins
, 1);
8253 } else if (isNumber(v
)) {
8254 jsdouble d
= asNumber(v
);
8255 cond
= !JSDOUBLE_IS_NaN(d
) && d
;
8256 x
= lir
->ins2(LIR_and
,
8257 lir
->ins2(LIR_feq
, v_ins
, v_ins
),
8258 lir
->ins_eq0(lir
->ins2(LIR_feq
, v_ins
, lir
->insImmf(0))));
8259 } else if (JSVAL_IS_STRING(v
)) {
8260 cond
= JSVAL_TO_STRING(v
)->length() != 0;
8261 x
= lir
->ins2(LIR_piand
,
8262 lir
->insLoad(LIR_ldp
,
8264 (int)offsetof(JSString
, mLength
)),
8265 INS_CONSTWORD(JSString::LENGTH_MASK
));
8267 JS_NOT_REACHED("ifop");
8268 return ARECORD_STOP
;
8271 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
8272 emitIf(pc
, cond
, x
);
8273 return checkTraceEnd(pc
);
8278 * Record LIR for a tableswitch or tableswitchx op. We record LIR only the
8279 * "first" time we hit the op. Later, when we start traces after exiting that
8280 * trace, we just patch.
8282 JS_REQUIRES_STACK AbortableRecordingStatus
8283 TraceRecorder::tableswitch()
8285 jsval
& v
= stackval(-1);
8287 /* No need to guard if the condition can't match any of the cases. */
8289 return ARECORD_CONTINUE
;
8291 /* No need to guard if the condition is constant. */
8292 LIns
* v_ins
= f2i(get(&v
));
8293 if (v_ins
->isconst() || v_ins
->isconstq())
8294 return ARECORD_CONTINUE
;
8296 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
8297 /* Starting a new trace after exiting a trace via switch. */
8299 (anchor
->exitType
== CASE_EXIT
|| anchor
->exitType
== DEFAULT_EXIT
) &&
8300 fragment
->ip
== pc
) {
8301 return ARECORD_CONTINUE
;
8306 if (*pc
== JSOP_TABLESWITCH
) {
8307 pc
+= JUMP_OFFSET_LEN
;
8308 low
= GET_JUMP_OFFSET(pc
);
8309 pc
+= JUMP_OFFSET_LEN
;
8310 high
= GET_JUMP_OFFSET(pc
);
8312 pc
+= JUMPX_OFFSET_LEN
;
8313 low
= GET_JUMPX_OFFSET(pc
);
8314 pc
+= JUMPX_OFFSET_LEN
;
8315 high
= GET_JUMPX_OFFSET(pc
);
8319 * Really large tables won't fit in a page. This is a conservative check.
8320 * If it matters in practice we need to go off-page.
8322 if ((high
+ 1 - low
) * sizeof(intptr_t*) + 128 > (unsigned) LARGEST_UNDERRUN_PROT
)
8323 return InjectStatus(switchop());
8325 /* Generate switch LIR. */
8326 SwitchInfo
* si
= new (*traceMonitor
->traceAlloc
) SwitchInfo();
8327 si
->count
= high
+ 1 - low
;
8329 si
->index
= (uint32
) -1;
8330 LIns
* diff
= lir
->ins2(LIR_sub
, v_ins
, lir
->insImm(low
));
8331 LIns
* cmp
= lir
->ins2(LIR_ult
, diff
, lir
->insImm(si
->count
));
8332 lir
->insGuard(LIR_xf
, cmp
, createGuardRecord(snapshot(DEFAULT_EXIT
)));
8333 lir
->insStorei(diff
, lir
->insImmPtr(&si
->index
), 0);
8334 VMSideExit
* exit
= snapshot(CASE_EXIT
);
8335 exit
->switchInfo
= si
;
8336 LIns
* guardIns
= lir
->insGuard(LIR_xtbl
, diff
, createGuardRecord(exit
));
8337 fragment
->lastIns
= guardIns
;
8338 compile(&JS_TRACE_MONITOR(cx
));
8339 return ARECORD_STOP
;
8343 static JS_ALWAYS_INLINE
int32_t
8344 UnboxBooleanOrUndefined(jsval v
)
8346 /* Although this says 'special', we really only expect 3 special values: */
8347 JS_ASSERT(v
== JSVAL_TRUE
|| v
== JSVAL_FALSE
|| v
== JSVAL_VOID
);
8348 return JSVAL_TO_SPECIAL(v
);
8351 JS_REQUIRES_STACK RecordingStatus
8352 TraceRecorder::switchop()
8354 jsval
& v
= stackval(-1);
8355 LIns
* v_ins
= get(&v
);
8357 /* No need to guard if the condition is constant. */
8358 if (v_ins
->isconst() || v_ins
->isconstq())
8359 return RECORD_CONTINUE
;
8361 jsdouble d
= asNumber(v
);
8363 addName(lir
->ins2(LIR_feq
, v_ins
, lir
->insImmf(d
)),
8364 "guard(switch on numeric)"),
8366 } else if (JSVAL_IS_STRING(v
)) {
8367 LIns
* args
[] = { v_ins
, INS_CONSTSTR(JSVAL_TO_STRING(v
)) };
8369 addName(lir
->ins_eq0(lir
->ins_eq0(lir
->insCall(&js_EqualStrings_ci
, args
))),
8370 "guard(switch on string)"),
8372 } else if (JSVAL_IS_SPECIAL(v
)) {
8374 addName(lir
->ins2(LIR_eq
, v_ins
, lir
->insImm(UnboxBooleanOrUndefined(v
))),
8375 "guard(switch on boolean)"),
8378 RETURN_STOP("switch on object or null");
8380 return RECORD_CONTINUE
;
8383 JS_REQUIRES_STACK RecordingStatus
8384 TraceRecorder::inc(jsval
& v
, jsint incr
, bool pre
)
8386 LIns
* v_ins
= get(&v
);
8387 CHECK_STATUS(inc(v
, v_ins
, incr
, pre
));
8389 return RECORD_CONTINUE
;
8393 * On exit, v_ins is the incremented unboxed value, and the appropriate value
8394 * (pre- or post-increment as described by pre) is stacked.
8396 JS_REQUIRES_STACK RecordingStatus
8397 TraceRecorder::inc(jsval v
, LIns
*& v_ins
, jsint incr
, bool pre
)
8400 CHECK_STATUS(incHelper(v
, v_ins
, v_after
, incr
));
8402 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->fp
->regs
->pc
];
8403 JS_ASSERT(cs
.ndefs
== 1);
8404 stack(-cs
.nuses
, pre
? v_after
: v_ins
);
8406 return RECORD_CONTINUE
;
8410 * Do an increment operation without storing anything to the stack.
8412 JS_REQUIRES_STACK RecordingStatus
8413 TraceRecorder::incHelper(jsval v
, LIns
* v_ins
, LIns
*& v_after
, jsint incr
)
8416 RETURN_STOP("can only inc numbers");
8417 v_after
= alu(LIR_fadd
, asNumber(v
), incr
, v_ins
, lir
->insImmf(incr
));
8418 return RECORD_CONTINUE
;
8421 JS_REQUIRES_STACK AbortableRecordingStatus
8422 TraceRecorder::incProp(jsint incr
, bool pre
)
8424 jsval
& l
= stackval(-1);
8425 if (JSVAL_IS_PRIMITIVE(l
))
8426 RETURN_STOP_A("incProp on primitive");
8428 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
8429 LIns
* obj_ins
= get(&l
);
8433 CHECK_STATUS_A(prop(obj
, obj_ins
, &slot
, &v_ins
, NULL
));
8435 if (slot
== SPROP_INVALID_SLOT
)
8436 RETURN_STOP_A("incProp on invalid slot");
8438 jsval
& v
= STOBJ_GET_SLOT(obj
, slot
);
8439 CHECK_STATUS_A(inc(v
, v_ins
, incr
, pre
));
8441 LIns
* dslots_ins
= NULL
;
8442 stobj_set_slot(obj_ins
, slot
, dslots_ins
, box_jsval(v
, v_ins
));
8443 return ARECORD_CONTINUE
;
8446 JS_REQUIRES_STACK RecordingStatus
8447 TraceRecorder::incElem(jsint incr
, bool pre
)
8449 jsval
& r
= stackval(-1);
8450 jsval
& l
= stackval(-2);
8455 if (JSVAL_IS_PRIMITIVE(l
) || !JSVAL_IS_INT(r
) ||
8456 !guardDenseArray(JSVAL_TO_OBJECT(l
), get(&l
))) {
8460 CHECK_STATUS(denseArrayElement(l
, r
, vp
, v_ins
, addr_ins
));
8461 if (!addr_ins
) // if we read a hole, abort
8463 CHECK_STATUS(inc(*vp
, v_ins
, incr
, pre
));
8464 lir
->insStorei(box_jsval(*vp
, v_ins
), addr_ins
, 0);
8465 return RECORD_CONTINUE
;
8469 EvalCmp(LOpcode op
, double l
, double r
)
8489 JS_NOT_REACHED("unexpected comparison op");
8496 EvalCmp(LOpcode op
, JSString
* l
, JSString
* r
)
8499 return !!js_EqualStrings(l
, r
);
8500 return EvalCmp(op
, js_CompareStrings(l
, r
), 0);
8503 JS_REQUIRES_STACK
void
8504 TraceRecorder::strictEquality(bool equal
, bool cmpCase
)
8506 jsval
& r
= stackval(-1);
8507 jsval
& l
= stackval(-2);
8508 LIns
* l_ins
= get(&l
);
8509 LIns
* r_ins
= get(&r
);
8513 JSTraceType ltag
= GetPromotedType(l
);
8514 if (ltag
!= GetPromotedType(r
)) {
8516 x
= lir
->insImm(cond
);
8517 } else if (ltag
== TT_STRING
) {
8518 LIns
* args
[] = { r_ins
, l_ins
};
8519 x
= lir
->ins2i(LIR_eq
, lir
->insCall(&js_EqualStrings_ci
, args
), equal
);
8520 cond
= !!js_EqualStrings(JSVAL_TO_STRING(l
), JSVAL_TO_STRING(r
));
8523 if (ltag
== TT_DOUBLE
)
8525 else if (ltag
== TT_NULL
|| ltag
== TT_OBJECT
|| ltag
== TT_FUNCTION
)
8529 x
= lir
->ins2(op
, l_ins
, r_ins
);
8531 x
= lir
->ins_eq0(x
);
8532 cond
= (ltag
== TT_DOUBLE
)
8533 ? asNumber(l
) == asNumber(r
)
8536 cond
= (cond
== equal
);
8539 /* Only guard if the same path may not always be taken. */
8541 guard(cond
, x
, BRANCH_EXIT
);
8548 JS_REQUIRES_STACK AbortableRecordingStatus
8549 TraceRecorder::equality(bool negate
, bool tryBranchAfterCond
)
8551 jsval
& rval
= stackval(-1);
8552 jsval
& lval
= stackval(-2);
8553 LIns
* l_ins
= get(&lval
);
8554 LIns
* r_ins
= get(&rval
);
8556 return equalityHelper(lval
, rval
, l_ins
, r_ins
, negate
, tryBranchAfterCond
, lval
);
8559 JS_REQUIRES_STACK AbortableRecordingStatus
8560 TraceRecorder::equalityHelper(jsval l
, jsval r
, LIns
* l_ins
, LIns
* r_ins
,
8561 bool negate
, bool tryBranchAfterCond
,
8564 LOpcode op
= LIR_eq
;
8566 LIns
* args
[] = { NULL
, NULL
};
8569 * The if chain below closely mirrors that found in 11.9.3, in general
8570 * deviating from that ordering of ifs only to account for SpiderMonkey's
8571 * conflation of booleans and undefined and for the possibility of
8572 * confusing objects and null. Note carefully the spec-mandated recursion
8573 * in the final else clause, which terminates because Number == T recurs
8574 * only if T is Object, but that must recur again to convert Object to
8575 * primitive, and ToPrimitive throws if the object cannot be converted to
8576 * a primitive value (which would terminate recursion).
8579 if (GetPromotedType(l
) == GetPromotedType(r
)) {
8580 if (JSVAL_TAG(l
) == JSVAL_OBJECT
|| JSVAL_IS_SPECIAL(l
)) {
8581 if (JSVAL_TAG(l
) == JSVAL_OBJECT
)
8584 } else if (JSVAL_IS_STRING(l
)) {
8585 args
[0] = r_ins
, args
[1] = l_ins
;
8586 l_ins
= lir
->insCall(&js_EqualStrings_ci
, args
);
8587 r_ins
= lir
->insImm(1);
8588 cond
= !!js_EqualStrings(JSVAL_TO_STRING(l
), JSVAL_TO_STRING(r
));
8590 JS_ASSERT(isNumber(l
) && isNumber(r
));
8591 cond
= (asNumber(l
) == asNumber(r
));
8594 } else if (JSVAL_IS_NULL(l
) && JSVAL_IS_SPECIAL(r
)) {
8595 l_ins
= lir
->insImm(JSVAL_TO_SPECIAL(JSVAL_VOID
));
8596 cond
= (r
== JSVAL_VOID
);
8597 } else if (JSVAL_IS_SPECIAL(l
) && JSVAL_IS_NULL(r
)) {
8598 r_ins
= lir
->insImm(JSVAL_TO_SPECIAL(JSVAL_VOID
));
8599 cond
= (l
== JSVAL_VOID
);
8600 } else if (isNumber(l
) && JSVAL_IS_STRING(r
)) {
8601 args
[0] = r_ins
, args
[1] = cx_ins
;
8602 r_ins
= lir
->insCall(&js_StringToNumber_ci
, args
);
8603 cond
= (asNumber(l
) == js_StringToNumber(cx
, JSVAL_TO_STRING(r
)));
8605 } else if (JSVAL_IS_STRING(l
) && isNumber(r
)) {
8606 args
[0] = l_ins
, args
[1] = cx_ins
;
8607 l_ins
= lir
->insCall(&js_StringToNumber_ci
, args
);
8608 cond
= (js_StringToNumber(cx
, JSVAL_TO_STRING(l
)) == asNumber(r
));
8611 if (JSVAL_IS_SPECIAL(l
)) {
8612 bool isVoid
= !!JSVAL_IS_VOID(l
);
8614 lir
->ins2(LIR_eq
, l_ins
, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
))),
8617 args
[0] = l_ins
, args
[1] = cx_ins
;
8618 l_ins
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8619 l
= (l
== JSVAL_VOID
)
8620 ? DOUBLE_TO_JSVAL(cx
->runtime
->jsNaN
)
8621 : INT_TO_JSVAL(l
== JSVAL_TRUE
);
8622 return equalityHelper(l
, r
, l_ins
, r_ins
, negate
,
8623 tryBranchAfterCond
, rval
);
8625 } else if (JSVAL_IS_SPECIAL(r
)) {
8626 bool isVoid
= !!JSVAL_IS_VOID(r
);
8628 lir
->ins2(LIR_eq
, r_ins
, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
))),
8631 args
[0] = r_ins
, args
[1] = cx_ins
;
8632 r_ins
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8633 r
= (r
== JSVAL_VOID
)
8634 ? DOUBLE_TO_JSVAL(cx
->runtime
->jsNaN
)
8635 : INT_TO_JSVAL(r
== JSVAL_TRUE
);
8636 return equalityHelper(l
, r
, l_ins
, r_ins
, negate
,
8637 tryBranchAfterCond
, rval
);
8640 if ((JSVAL_IS_STRING(l
) || isNumber(l
)) && !JSVAL_IS_PRIMITIVE(r
)) {
8642 return InjectStatus(call_imacro(equality_imacros
.any_obj
));
8644 if (!JSVAL_IS_PRIMITIVE(l
) && (JSVAL_IS_STRING(r
) || isNumber(r
))) {
8646 return InjectStatus(call_imacro(equality_imacros
.obj_any
));
8650 l_ins
= lir
->insImm(0);
8651 r_ins
= lir
->insImm(1);
8655 /* If the operands aren't numbers, compare them as integers. */
8656 LIns
* x
= lir
->ins2(op
, l_ins
, r_ins
);
8658 x
= lir
->ins_eq0(x
);
8662 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
8665 * Don't guard if the same path is always taken. If it isn't, we have to
8666 * fuse comparisons and the following branch, because the interpreter does
8669 if (tryBranchAfterCond
)
8670 fuseIf(pc
+ 1, cond
, x
);
8673 * There is no need to write out the result of this comparison if the trace
8674 * ends on this operation.
8676 if (pc
[1] == JSOP_IFNE
|| pc
[1] == JSOP_IFEQ
)
8677 CHECK_STATUS_A(checkTraceEnd(pc
+ 1));
8680 * We update the stack after the guard. This is safe since the guard bails
8681 * out at the comparison and the interpreter will therefore re-execute the
8682 * comparison. This way the value of the condition doesn't have to be
8683 * calculated and saved on the stack in most cases.
8687 return ARECORD_CONTINUE
;
8690 JS_REQUIRES_STACK AbortableRecordingStatus
8691 TraceRecorder::relational(LOpcode op
, bool tryBranchAfterCond
)
8693 jsval
& r
= stackval(-1);
8694 jsval
& l
= stackval(-2);
8697 LIns
* l_ins
= get(&l
);
8698 LIns
* r_ins
= get(&r
);
8700 jsdouble lnum
, rnum
;
8703 * 11.8.5 if either argument is an object with a function-valued valueOf
8704 * property; if both arguments are objects with non-function-valued valueOf
8705 * properties, abort.
8707 if (!JSVAL_IS_PRIMITIVE(l
)) {
8709 if (!JSVAL_IS_PRIMITIVE(r
)) {
8711 return InjectStatus(call_imacro(binary_imacros
.obj_obj
));
8713 return InjectStatus(call_imacro(binary_imacros
.obj_any
));
8715 if (!JSVAL_IS_PRIMITIVE(r
)) {
8717 return InjectStatus(call_imacro(binary_imacros
.any_obj
));
8720 /* 11.8.5 steps 3, 16-21. */
8721 if (JSVAL_IS_STRING(l
) && JSVAL_IS_STRING(r
)) {
8722 LIns
* args
[] = { r_ins
, l_ins
};
8723 l_ins
= lir
->insCall(&js_CompareStrings_ci
, args
);
8724 r_ins
= lir
->insImm(0);
8725 cond
= EvalCmp(op
, JSVAL_TO_STRING(l
), JSVAL_TO_STRING(r
));
8729 /* 11.8.5 steps 4-5. */
8730 if (!JSVAL_IS_NUMBER(l
)) {
8731 LIns
* args
[] = { l_ins
, cx_ins
};
8732 switch (JSVAL_TAG(l
)) {
8734 l_ins
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8737 l_ins
= lir
->insCall(&js_StringToNumber_ci
, args
);
8740 if (JSVAL_IS_NULL(l
)) {
8741 l_ins
= lir
->insImmf(0.0);
8748 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
8749 "have been handled at start of method");
8750 RETURN_STOP_A("safety belt");
8753 if (!JSVAL_IS_NUMBER(r
)) {
8754 LIns
* args
[] = { r_ins
, cx_ins
};
8755 switch (JSVAL_TAG(r
)) {
8757 r_ins
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8760 r_ins
= lir
->insCall(&js_StringToNumber_ci
, args
);
8763 if (JSVAL_IS_NULL(r
)) {
8764 r_ins
= lir
->insImmf(0.0);
8771 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
8772 "have been handled at start of method");
8773 RETURN_STOP_A("safety belt");
8777 jsval tmp
= JSVAL_NULL
;
8778 JSAutoTempValueRooter
tvr(cx
, 1, &tmp
);
8781 lnum
= js_ValueToNumber(cx
, &tmp
);
8783 rnum
= js_ValueToNumber(cx
, &tmp
);
8785 cond
= EvalCmp(op
, lnum
, rnum
);
8788 /* 11.8.5 steps 6-15. */
8791 * If the result is not a number or it's not a quad, we must use an integer
8795 JS_ASSERT(op
>= LIR_feq
&& op
<= LIR_fge
);
8796 op
= LOpcode(op
+ (LIR_eq
- LIR_feq
));
8798 x
= lir
->ins2(op
, l_ins
, r_ins
);
8800 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
8803 * Don't guard if the same path is always taken. If it isn't, we have to
8804 * fuse comparisons and the following branch, because the interpreter does
8807 if (tryBranchAfterCond
)
8808 fuseIf(pc
+ 1, cond
, x
);
8811 * There is no need to write out the result of this comparison if the trace
8812 * ends on this operation.
8814 if (pc
[1] == JSOP_IFNE
|| pc
[1] == JSOP_IFEQ
)
8815 CHECK_STATUS_A(checkTraceEnd(pc
+ 1));
8818 * We update the stack after the guard. This is safe since the guard bails
8819 * out at the comparison and the interpreter will therefore re-execute the
8820 * comparison. This way the value of the condition doesn't have to be
8821 * calculated and saved on the stack in most cases.
8825 return ARECORD_CONTINUE
;
8828 JS_REQUIRES_STACK RecordingStatus
8829 TraceRecorder::unary(LOpcode op
)
8831 jsval
& v
= stackval(-1);
8832 bool intop
= !(op
& LIR64
);
8837 a
= lir
->ins1(op
, a
);
8839 a
= lir
->ins1(LIR_i2f
, a
);
8841 return RECORD_CONTINUE
;
8846 JS_REQUIRES_STACK RecordingStatus
8847 TraceRecorder::binary(LOpcode op
)
8849 jsval
& r
= stackval(-1);
8850 jsval
& l
= stackval(-2);
8852 if (!JSVAL_IS_PRIMITIVE(l
)) {
8854 if (!JSVAL_IS_PRIMITIVE(r
)) {
8856 return call_imacro(binary_imacros
.obj_obj
);
8858 return call_imacro(binary_imacros
.obj_any
);
8860 if (!JSVAL_IS_PRIMITIVE(r
)) {
8862 return call_imacro(binary_imacros
.any_obj
);
8865 bool intop
= !(op
& LIR64
);
8869 bool leftIsNumber
= isNumber(l
);
8870 jsdouble lnum
= leftIsNumber
? asNumber(l
) : 0;
8872 bool rightIsNumber
= isNumber(r
);
8873 jsdouble rnum
= rightIsNumber
? asNumber(r
) : 0;
8875 if ((op
>= LIR_sub
&& op
<= LIR_ush
) || // sub, mul, (callh), or, xor, (not,) lsh, rsh, ush
8876 (op
>= LIR_fsub
&& op
<= LIR_fmod
)) { // fsub, fmul, fdiv, fmod
8878 if (JSVAL_IS_STRING(l
)) {
8881 a
= lir
->insCall(&js_StringToNumber_ci
, args
);
8882 lnum
= js_StringToNumber(cx
, JSVAL_TO_STRING(l
));
8883 leftIsNumber
= true;
8885 if (JSVAL_IS_STRING(r
)) {
8888 b
= lir
->insCall(&js_StringToNumber_ci
, args
);
8889 rnum
= js_StringToNumber(cx
, JSVAL_TO_STRING(r
));
8890 rightIsNumber
= true;
8893 if (JSVAL_IS_SPECIAL(l
)) {
8894 LIns
* args
[] = { a
, cx_ins
};
8895 a
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8896 lnum
= js_BooleanOrUndefinedToNumber(cx
, JSVAL_TO_SPECIAL(l
));
8897 leftIsNumber
= true;
8899 if (JSVAL_IS_SPECIAL(r
)) {
8900 LIns
* args
[] = { b
, cx_ins
};
8901 b
= lir
->insCall(&js_BooleanOrUndefinedToNumber_ci
, args
);
8902 rnum
= js_BooleanOrUndefinedToNumber(cx
, JSVAL_TO_SPECIAL(r
));
8903 rightIsNumber
= true;
8905 if (leftIsNumber
&& rightIsNumber
) {
8907 LIns
*args
[] = { a
};
8908 a
= lir
->insCall(op
== LIR_ush
? &js_DoubleToUint32_ci
: &js_DoubleToInt32_ci
, args
);
8911 a
= alu(op
, lnum
, rnum
, a
, b
);
8913 a
= lir
->ins1(op
== LIR_ush
? LIR_u2f
: LIR_i2f
, a
);
8915 return RECORD_CONTINUE
;
8920 struct GuardedShapeEntry
: public JSDHashEntryStub
8925 #if defined DEBUG_notme && defined XP_UNIX
8928 static FILE* shapefp
= NULL
;
8931 DumpShape(JSObject
* obj
, const char* prefix
)
8933 JSScope
* scope
= OBJ_SCOPE(obj
);
8936 shapefp
= fopen("/tmp/shapes.dump", "w");
8941 fprintf(shapefp
, "\n%s: shape %u flags %x\n", prefix
, scope
->shape
, scope
->flags
);
8942 for (JSScopeProperty
* sprop
= scope
->lastProp
; sprop
; sprop
= sprop
->parent
) {
8943 if (JSID_IS_ATOM(sprop
->id
)) {
8944 fprintf(shapefp
, " %s", JS_GetStringBytes(JSVAL_TO_STRING(ID_TO_VALUE(sprop
->id
))));
8946 JS_ASSERT(!JSID_IS_OBJECT(sprop
->id
));
8947 fprintf(shapefp
, " %d", JSID_TO_INT(sprop
->id
));
8949 fprintf(shapefp
, " %u %p %p %x %x %d\n",
8950 sprop
->slot
, sprop
->getter
, sprop
->setter
, sprop
->attrs
, sprop
->flags
,
8956 static JSDHashOperator
8957 DumpShapeEnumerator(JSDHashTable
* table
, JSDHashEntryHdr
* hdr
, uint32 number
, void* arg
)
8959 GuardedShapeEntry
* entry
= (GuardedShapeEntry
*) hdr
;
8960 const char* prefix
= (const char*) arg
;
8962 DumpShape(entry
->obj
, prefix
);
8963 return JS_DHASH_NEXT
;
8967 TraceRecorder::dumpGuardedShapes(const char* prefix
)
8969 if (guardedShapeTable
.ops
)
8970 JS_DHashTableEnumerate(&guardedShapeTable
, DumpShapeEnumerator
, (void*) prefix
);
8972 #endif /* DEBUG_notme && XP_UNIX */
8974 JS_REQUIRES_STACK RecordingStatus
8975 TraceRecorder::guardShape(LIns
* obj_ins
, JSObject
* obj
, uint32 shape
, const char* guardName
,
8976 LIns
* map_ins
, VMSideExit
* exit
)
8978 if (!guardedShapeTable
.ops
) {
8979 JS_DHashTableInit(&guardedShapeTable
, JS_DHashGetStubOps(), NULL
,
8980 sizeof(GuardedShapeEntry
), JS_DHASH_MIN_SIZE
);
8983 // Test (with add if missing) for a remembered guard for (obj_ins, obj).
8984 GuardedShapeEntry
* entry
= (GuardedShapeEntry
*)
8985 JS_DHashTableOperate(&guardedShapeTable
, obj_ins
, JS_DHASH_ADD
);
8987 JS_ReportOutOfMemory(cx
);
8988 return RECORD_ERROR
;
8991 // If already guarded, emit an assertion that the shape matches.
8993 JS_ASSERT(entry
->key
== obj_ins
);
8994 JS_ASSERT(entry
->obj
== obj
);
8996 lir
->insAssert(lir
->ins2i(LIR_eq
,
8997 lir
->insLoad(LIR_ld
, map_ins
, offsetof(JSScope
, shape
)),
9000 return RECORD_CONTINUE
;
9003 // Not yet guarded. Remember obj_ins along with obj (for invalidation).
9004 entry
->key
= obj_ins
;
9007 #if defined DEBUG_notme && defined XP_UNIX
9008 DumpShape(obj
, "guard");
9009 fprintf(shapefp
, "for obj_ins %p\n", obj_ins
);
9012 // Finally, emit the shape guard.
9013 LIns
* shape_ins
= addName(lir
->insLoad(LIR_ld
, map_ins
, offsetof(JSScope
, shape
)), "shape");
9015 addName(lir
->ins2i(LIR_eq
, shape_ins
, shape
), guardName
),
9017 return RECORD_CONTINUE
;
9020 static JSDHashOperator
9021 ForgetGuardedShapesForObject(JSDHashTable
* table
, JSDHashEntryHdr
* hdr
, uint32 number
, void* arg
)
9023 GuardedShapeEntry
* entry
= (GuardedShapeEntry
*) hdr
;
9024 if (entry
->obj
== arg
) {
9025 #if defined DEBUG_notme && defined XP_UNIX
9026 DumpShape(entry
->obj
, "forget");
9028 return JS_DHASH_REMOVE
;
9030 return JS_DHASH_NEXT
;
9034 TraceRecorder::forgetGuardedShapesForObject(JSObject
* obj
)
9036 if (guardedShapeTable
.ops
)
9037 JS_DHashTableEnumerate(&guardedShapeTable
, ForgetGuardedShapesForObject
, obj
);
9041 TraceRecorder::forgetGuardedShapes()
9043 if (guardedShapeTable
.ops
) {
9044 #if defined DEBUG_notme && defined XP_UNIX
9045 dumpGuardedShapes("forget-all");
9047 JS_DHashTableFinish(&guardedShapeTable
);
9048 guardedShapeTable
.ops
= NULL
;
9052 JS_STATIC_ASSERT(offsetof(JSObjectOps
, objectMap
) == 0);
9055 TraceRecorder::map(LIns
* obj_ins
)
9057 return addName(lir
->insLoad(LIR_ldp
, obj_ins
, (int) offsetof(JSObject
, map
)), "map");
9061 TraceRecorder::map_is_native(JSObjectMap
* map
, LIns
* map_ins
, LIns
*& ops_ins
, size_t op_offset
)
9063 JS_ASSERT(op_offset
< sizeof(JSObjectOps
));
9064 JS_ASSERT(op_offset
% sizeof(void *) == 0);
9066 #define OP(ops) (*(void **) ((uint8 *) (ops) + op_offset))
9067 void* ptr
= OP(map
->ops
);
9068 if (ptr
!= OP(&js_ObjectOps
))
9072 ops_ins
= addName(lir
->insLoad(LIR_ldcp
, map_ins
, int(offsetof(JSObjectMap
, ops
))), "ops");
9073 LIns
* n
= lir
->insLoad(LIR_ldcp
, ops_ins
, op_offset
);
9075 addName(lir
->ins2(LIR_peq
, n
, INS_CONSTPTR(ptr
)), "guard(native-map)"),
9081 JS_REQUIRES_STACK RecordingStatus
9082 TraceRecorder::guardNativePropertyOp(JSObject
* aobj
, LIns
* map_ins
)
9085 * Interpreter calls to PROPERTY_CACHE_TEST guard on native object ops
9086 * which is required to use native objects (those whose maps are scopes),
9087 * or even more narrow conditions required because the cache miss case
9088 * will call a particular object-op (js_GetProperty, js_SetProperty).
9090 * We parameterize using offsetof and guard on match against the hook at
9091 * the given offset in js_ObjectOps. TraceRecorder::record_JSOP_SETPROP
9092 * guards the js_SetProperty case.
9094 uint32 format
= js_CodeSpec
[*cx
->fp
->regs
->pc
].format
;
9095 uint32 mode
= JOF_MODE(format
);
9097 // No need to guard native-ness of global object.
9098 JS_ASSERT(OBJ_IS_NATIVE(globalObj
));
9099 if (aobj
!= globalObj
) {
9100 size_t op_offset
= offsetof(JSObjectOps
, objectMap
);
9101 if (mode
== JOF_PROP
|| mode
== JOF_VARPROP
) {
9102 op_offset
= (format
& JOF_SET
)
9103 ? offsetof(JSObjectOps
, setProperty
)
9104 : offsetof(JSObjectOps
, getProperty
);
9106 JS_ASSERT(mode
== JOF_NAME
);
9110 if (!map_is_native(aobj
->map
, map_ins
, ops_ins
, op_offset
))
9111 RETURN_STOP("non-native map");
9113 return RECORD_CONTINUE
;
9116 JS_REQUIRES_STACK AbortableRecordingStatus
9117 TraceRecorder::test_property_cache(JSObject
* obj
, LIns
* obj_ins
, JSObject
*& obj2
, jsuword
& pcval
)
9119 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
9120 JS_ASSERT(*pc
!= JSOP_INITPROP
&& *pc
!= JSOP_INITMETHOD
&&
9121 *pc
!= JSOP_SETNAME
&& *pc
!= JSOP_SETPROP
&& *pc
!= JSOP_SETMETHOD
);
9123 // Mimic the interpreter's special case for dense arrays by skipping up one
9124 // hop along the proto chain when accessing a named (not indexed) property,
9125 // typically to find Array.prototype methods.
9126 JSObject
* aobj
= obj
;
9127 if (OBJ_IS_DENSE_ARRAY(cx
, obj
)) {
9128 guardDenseArray(obj
, obj_ins
, BRANCH_EXIT
);
9129 aobj
= OBJ_GET_PROTO(cx
, obj
);
9130 obj_ins
= stobj_get_proto(obj_ins
);
9133 LIns
* map_ins
= map(obj_ins
);
9135 CHECK_STATUS_A(guardNativePropertyOp(aobj
, map_ins
));
9138 JSPropCacheEntry
* entry
;
9139 PROPERTY_CACHE_TEST(cx
, pc
, aobj
, obj2
, entry
, atom
);
9141 // Null atom means that obj2 is locked and must now be unlocked.
9142 JS_UNLOCK_OBJ(cx
, obj2
);
9144 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
9145 jsid id
= ATOM_TO_JSID(atom
);
9147 if (JOF_OPMODE(*pc
) == JOF_NAME
) {
9148 JS_ASSERT(aobj
== obj
);
9150 JSTraceMonitor
&localtm
= *traceMonitor
;
9151 entry
= js_FindPropertyHelper(cx
, id
, true, &obj
, &obj2
, &prop
);
9153 /* js_FindPropertyHelper can reenter the interpreter and kill |this|. */
9154 if (!localtm
.recorder
)
9155 return ARECORD_ABORTED
;
9158 RETURN_ERROR_A("error in js_FindPropertyHelper");
9159 if (entry
== JS_NO_PROP_CACHE_FILL
)
9160 RETURN_STOP_A("cannot cache name");
9162 JSTraceMonitor
&localtm
= *traceMonitor
;
9163 JSContext
*localcx
= cx
;
9164 int protoIndex
= js_LookupPropertyWithFlags(cx
, aobj
, id
,
9168 /* js_LookupPropertyWithFlags can reenter the interpreter and kill |this|. */
9169 if (!localtm
.recorder
) {
9171 obj2
->dropProperty(localcx
, prop
);
9172 return ARECORD_ABORTED
;
9176 RETURN_ERROR_A("error in js_LookupPropertyWithFlags");
9179 if (!OBJ_IS_NATIVE(obj2
)) {
9180 obj2
->dropProperty(cx
, prop
);
9181 RETURN_STOP_A("property found on non-native object");
9183 entry
= js_FillPropertyCache(cx
, aobj
, 0, protoIndex
, obj2
,
9184 (JSScopeProperty
*) prop
, false);
9186 if (entry
== JS_NO_PROP_CACHE_FILL
)
9193 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
9194 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
9195 // the global it's assigning does not yet exist, create it.
9198 // Use PCVAL_NULL to return "no such property" to our caller.
9200 return ARECORD_CONTINUE
;
9203 obj2
->dropProperty(cx
, prop
);
9205 RETURN_STOP_A("failed to fill property cache");
9208 #ifdef JS_THREADSAFE
9209 // There's a potential race in any JS_THREADSAFE embedding that's nuts
9210 // enough to share mutable objects on the scope or proto chain, but we
9211 // don't care about such insane embeddings. Anyway, the (scope, proto)
9212 // entry->vcap coordinates must reach obj2 from aobj at this point.
9213 JS_ASSERT(cx
->requestDepth
);
9216 return InjectStatus(guardPropertyCacheHit(obj_ins
, map_ins
, aobj
, obj2
, entry
, pcval
));
9219 JS_REQUIRES_STACK RecordingStatus
9220 TraceRecorder::guardPropertyCacheHit(LIns
* obj_ins
,
9224 JSPropCacheEntry
* entry
,
9227 VMSideExit
* exit
= snapshot(BRANCH_EXIT
);
9229 uint32 vshape
= PCVCAP_SHAPE(entry
->vcap
);
9231 // Check for first-level cache hit and guard on kshape if possible.
9232 // Otherwise guard on key object exact match.
9233 if (PCVCAP_TAG(entry
->vcap
) <= 1) {
9234 if (aobj
!= globalObj
)
9235 CHECK_STATUS(guardShape(obj_ins
, aobj
, entry
->kshape
, "guard_kshape", map_ins
, exit
));
9237 if (entry
->adding()) {
9238 if (aobj
== globalObj
)
9239 RETURN_STOP("adding a property to the global object");
9241 LIns
*vshape_ins
= addName(
9242 lir
->insLoad(LIR_ld
,
9243 addName(lir
->insLoad(LIR_ldcp
, cx_ins
, offsetof(JSContext
, runtime
)),
9245 offsetof(JSRuntime
, protoHazardShape
)),
9246 "protoHazardShape");
9248 addName(lir
->ins2i(LIR_eq
, vshape_ins
, vshape
), "guard_protoHazardShape"),
9253 JSOp op
= js_GetOpcode(cx
, cx
->fp
->script
, cx
->fp
->regs
->pc
);
9255 if (op
== JSOP_LENGTH
) {
9256 pcatom
= cx
->runtime
->atomState
.lengthAtom
;
9258 ptrdiff_t pcoff
= (JOF_TYPE(js_CodeSpec
[op
].format
) == JOF_SLOTATOM
) ? SLOTNO_LEN
: 0;
9259 GET_ATOM_FROM_BYTECODE(cx
->fp
->script
, cx
->fp
->regs
->pc
, pcoff
, pcatom
);
9261 JS_ASSERT(entry
->kpc
== (jsbytecode
*) pcatom
);
9262 JS_ASSERT(entry
->kshape
== jsuword(aobj
));
9264 if (aobj
!= globalObj
&& !obj_ins
->isconstp()) {
9266 addName(lir
->ins2(LIR_peq
, obj_ins
, INS_CONSTWORD(entry
->kshape
)), "guard_kobj"),
9271 // For any hit that goes up the scope and/or proto chains, we will need to
9272 // guard on the shape of the object containing the property.
9273 if (PCVCAP_TAG(entry
->vcap
) >= 1) {
9274 JS_ASSERT(OBJ_SHAPE(obj2
) == vshape
);
9277 if (PCVCAP_TAG(entry
->vcap
) == 1) {
9278 // Duplicate the special case in PROPERTY_CACHE_TEST.
9279 obj2_ins
= addName(stobj_get_proto(obj_ins
), "proto");
9280 guard(false, lir
->ins_peq0(obj2_ins
), exit
);
9282 obj2_ins
= INS_CONSTOBJ(obj2
);
9284 CHECK_STATUS(guardShape(obj2_ins
, obj2
, vshape
, "guard_vshape", map(obj2_ins
), exit
));
9287 pcval
= entry
->vword
;
9288 return RECORD_CONTINUE
;
9292 TraceRecorder::stobj_set_fslot(LIns
*obj_ins
, unsigned slot
, LIns
* v_ins
)
9294 lir
->insStorei(v_ins
, obj_ins
, offsetof(JSObject
, fslots
) + slot
* sizeof(jsval
));
9298 TraceRecorder::stobj_set_dslot(LIns
*obj_ins
, unsigned slot
, LIns
*& dslots_ins
, LIns
* v_ins
)
9301 dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
9302 lir
->insStorei(v_ins
, dslots_ins
, slot
* sizeof(jsval
));
9306 TraceRecorder::stobj_set_slot(LIns
* obj_ins
, unsigned slot
, LIns
*& dslots_ins
, LIns
* v_ins
)
9308 if (slot
< JS_INITIAL_NSLOTS
) {
9309 stobj_set_fslot(obj_ins
, slot
, v_ins
);
9311 stobj_set_dslot(obj_ins
, slot
- JS_INITIAL_NSLOTS
, dslots_ins
, v_ins
);
9316 TraceRecorder::stobj_get_fslot(LIns
* obj_ins
, unsigned slot
)
9318 JS_ASSERT(slot
< JS_INITIAL_NSLOTS
);
9319 return lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, fslots
) + slot
* sizeof(jsval
));
9323 TraceRecorder::stobj_get_dslot(LIns
* obj_ins
, unsigned index
, LIns
*& dslots_ins
)
9326 dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
9327 return lir
->insLoad(LIR_ldp
, dslots_ins
, index
* sizeof(jsval
));
9331 TraceRecorder::stobj_get_slot(LIns
* obj_ins
, unsigned slot
, LIns
*& dslots_ins
)
9333 if (slot
< JS_INITIAL_NSLOTS
)
9334 return stobj_get_fslot(obj_ins
, slot
);
9335 return stobj_get_dslot(obj_ins
, slot
- JS_INITIAL_NSLOTS
, dslots_ins
);
9338 JS_REQUIRES_STACK LIns
*
9339 TraceRecorder::box_jsval(jsval v
, LIns
* v_ins
)
9342 LIns
* args
[] = { v_ins
, cx_ins
};
9343 v_ins
= lir
->insCall(&js_BoxDouble_ci
, args
);
9344 guard(false, lir
->ins2(LIR_peq
, v_ins
, INS_CONSTWORD(JSVAL_ERROR_COOKIE
)),
9348 switch (JSVAL_TAG(v
)) {
9350 return lir
->ins2(LIR_pior
, lir
->ins2i(LIR_pilsh
, lir
->ins_u2p(v_ins
), JSVAL_TAGBITS
),
9351 INS_CONSTWORD(JSVAL_SPECIAL
));
9355 JS_ASSERT(JSVAL_TAG(v
) == JSVAL_STRING
);
9356 return lir
->ins2(LIR_pior
, v_ins
, INS_CONSTWORD(JSVAL_STRING
));
9360 JS_REQUIRES_STACK LIns
*
9361 TraceRecorder::unbox_jsval(jsval v
, LIns
* v_ins
, VMSideExit
* exit
)
9364 // JSVAL_IS_NUMBER(v)
9366 lir
->ins_eq0(lir
->ins2(LIR_or
,
9367 p2i(lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_INT
))),
9369 lir
->ins2(LIR_piand
, v_ins
,
9370 INS_CONSTWORD(JSVAL_TAGMASK
)),
9371 INS_CONSTWORD(JSVAL_DOUBLE
)))),
9373 LIns
* args
[] = { v_ins
};
9374 return lir
->insCall(&js_UnboxDouble_ci
, args
);
9376 switch (JSVAL_TAG(v
)) {
9380 lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_TAGMASK
)),
9381 INS_CONSTWORD(JSVAL_SPECIAL
)),
9383 return p2i(lir
->ins2i(LIR_pursh
, v_ins
, JSVAL_TAGBITS
));
9386 if (JSVAL_IS_NULL(v
)) {
9387 // JSVAL_NULL maps to type TT_NULL, so insist that v_ins == 0 here.
9388 guard(true, lir
->ins_peq0(v_ins
), exit
);
9390 guard(false, lir
->ins_peq0(v_ins
), exit
);
9393 lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_TAGMASK
)),
9394 INS_CONSTWORD(JSVAL_OBJECT
)),
9398 * LIR_ldcp is ok to use here even though Array classword can
9399 * change, because no object's classword can ever change from
9400 * &js_ArrayClass to &js_FunctionClass.
9402 guard(HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v
)),
9404 lir
->ins2(LIR_piand
,
9405 lir
->insLoad(LIR_ldcp
, v_ins
, offsetof(JSObject
, classword
)),
9406 INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS
)),
9407 INS_CONSTPTR(&js_FunctionClass
)),
9413 JS_ASSERT(JSVAL_TAG(v
) == JSVAL_STRING
);
9416 lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_TAGMASK
)),
9417 INS_CONSTWORD(JSVAL_STRING
)),
9419 return lir
->ins2(LIR_piand
, v_ins
, addName(lir
->insImmWord(~JSVAL_TAGMASK
),
9424 JS_REQUIRES_STACK RecordingStatus
9425 TraceRecorder::getThis(LIns
*& this_ins
)
9428 * js_ComputeThisForFrame updates cx->fp->argv[-1], so sample it into 'original' first.
9430 jsval original
= JSVAL_NULL
;
9432 original
= cx
->fp
->argv
[-1];
9433 if (!JSVAL_IS_PRIMITIVE(original
) &&
9434 guardClass(JSVAL_TO_OBJECT(original
), get(&cx
->fp
->argv
[-1]), &js_WithClass
, snapshot(MISMATCH_EXIT
))) {
9435 RETURN_STOP("can't trace getThis on With object");
9439 JSObject
* thisObj
= js_ComputeThisForFrame(cx
, cx
->fp
);
9441 RETURN_ERROR("js_ComputeThisForName failed");
9443 /* In global code, bake in the global object as 'this' object. */
9444 if (!cx
->fp
->callee()) {
9445 JS_ASSERT(callDepth
== 0);
9446 this_ins
= INS_CONSTOBJ(thisObj
);
9449 * We don't have argv[-1] in global code, so we don't update the
9452 return RECORD_CONTINUE
;
9455 jsval
& thisv
= cx
->fp
->argv
[-1];
9456 JS_ASSERT(JSVAL_IS_OBJECT(thisv
));
9459 * Traces type-specialize between null and objects, so if we currently see
9460 * a null value in argv[-1], this trace will only match if we see null at
9461 * runtime as well. Bake in the global object as 'this' object, updating
9462 * the tracker as well. We can only detect this condition prior to calling
9463 * js_ComputeThisForFrame, since it updates the interpreter's copy of
9466 JSClass
* clasp
= NULL
;;
9467 if (JSVAL_IS_NULL(original
) ||
9468 (((clasp
= STOBJ_GET_CLASS(JSVAL_TO_OBJECT(original
))) == &js_CallClass
) ||
9469 (clasp
== &js_BlockClass
))) {
9471 guardClass(JSVAL_TO_OBJECT(original
), get(&thisv
), clasp
, snapshot(BRANCH_EXIT
));
9472 JS_ASSERT(!JSVAL_IS_PRIMITIVE(thisv
));
9473 if (thisObj
!= globalObj
)
9474 RETURN_STOP("global object was wrapped while recording");
9475 this_ins
= INS_CONSTOBJ(thisObj
);
9476 set(&thisv
, this_ins
);
9477 return RECORD_CONTINUE
;
9480 this_ins
= get(&thisv
);
9482 JSObject
* wrappedGlobal
= globalObj
->thisObject(cx
);
9484 RETURN_ERROR("globalObj->thisObject hook threw in getThis");
9487 * The only unwrapped object that needs to be wrapped that we can get here
9488 * is the global object obtained throught the scope chain.
9490 this_ins
= lir
->ins_choose(lir
->ins_peq0(stobj_get_parent(this_ins
)),
9491 INS_CONSTOBJ(wrappedGlobal
),
9493 return RECORD_CONTINUE
;
9498 TraceRecorder::getStringLength(LIns
* str_ins
)
9500 LIns
* len_ins
= lir
->insLoad(LIR_ldp
, str_ins
, (int)offsetof(JSString
, mLength
));
9502 LIns
* masked_len_ins
= lir
->ins2(LIR_piand
,
9504 INS_CONSTWORD(JSString::LENGTH_MASK
));
9507 lir
->ins_choose(lir
->ins_peq0(lir
->ins2(LIR_piand
,
9509 INS_CONSTWORD(JSString::DEPENDENT
))),
9511 lir
->ins_choose(lir
->ins_peq0(lir
->ins2(LIR_piand
,
9513 INS_CONSTWORD(JSString::PREFIX
))),
9514 lir
->ins2(LIR_piand
,
9516 INS_CONSTWORD(JSString::DEPENDENT_LENGTH_MASK
)),
9518 return p2i(real_len
);
9521 JS_REQUIRES_STACK
bool
9522 TraceRecorder::guardClass(JSObject
* obj
, LIns
* obj_ins
, JSClass
* clasp
, VMSideExit
* exit
)
9524 bool cond
= STOBJ_GET_CLASS(obj
) == clasp
;
9526 LIns
* class_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, classword
));
9527 class_ins
= lir
->ins2(LIR_piand
, class_ins
, INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS
));
9530 JS_snprintf(namebuf
, sizeof namebuf
, "guard(class is %s)", clasp
->name
);
9531 guard(cond
, addName(lir
->ins2(LIR_peq
, class_ins
, INS_CONSTPTR(clasp
)), namebuf
), exit
);
9535 JS_REQUIRES_STACK
bool
9536 TraceRecorder::guardDenseArray(JSObject
* obj
, LIns
* obj_ins
, ExitType exitType
)
9538 return guardClass(obj
, obj_ins
, &js_ArrayClass
, snapshot(exitType
));
9541 JS_REQUIRES_STACK
bool
9542 TraceRecorder::guardDenseArray(JSObject
* obj
, LIns
* obj_ins
, VMSideExit
* exit
)
9544 return guardClass(obj
, obj_ins
, &js_ArrayClass
, exit
);
9547 JS_REQUIRES_STACK
bool
9548 TraceRecorder::guardHasPrototype(JSObject
* obj
, LIns
* obj_ins
,
9549 JSObject
** pobj
, LIns
** pobj_ins
,
9552 *pobj
= obj
->getProto();
9553 *pobj_ins
= stobj_get_proto(obj_ins
);
9555 bool cond
= *pobj
== NULL
;
9556 guard(cond
, addName(lir
->ins_peq0(*pobj_ins
), "guard(proto-not-null)"), exit
);
9560 JS_REQUIRES_STACK RecordingStatus
9561 TraceRecorder::guardPrototypeHasNoIndexedProperties(JSObject
* obj
, LIns
* obj_ins
, ExitType exitType
)
9564 * Guard that no object along the prototype chain has any indexed
9565 * properties which might become visible through holes in the array.
9567 VMSideExit
* exit
= snapshot(exitType
);
9569 if (js_PrototypeHasIndexedProperties(cx
, obj
))
9572 while (guardHasPrototype(obj
, obj_ins
, &obj
, &obj_ins
, exit
))
9573 CHECK_STATUS(guardShape(obj_ins
, obj
, OBJ_SHAPE(obj
), "guard(shape)", map(obj_ins
), exit
));
9574 return RECORD_CONTINUE
;
9578 TraceRecorder::guardNotGlobalObject(JSObject
* obj
, LIns
* obj_ins
)
9580 if (obj
== globalObj
)
9581 RETURN_STOP("reference aliases global object");
9582 guard(false, lir
->ins2(LIR_peq
, obj_ins
, INS_CONSTOBJ(globalObj
)), MISMATCH_EXIT
);
9583 return RECORD_CONTINUE
;
9586 JS_REQUIRES_STACK
void
9587 TraceRecorder::clearFrameSlotsFromCache()
9590 * Clear out all slots of this frame in the nativeFrameTracker. Different
9591 * locations on the VM stack might map to different locations on the native
9592 * stack depending on the number of arguments (i.e.) of the next call, so
9593 * we have to make sure we map those in to the cache with the right
9596 JSStackFrame
* fp
= cx
->fp
;
9601 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
9602 * This doesn't do layout arithmetic, but it must clear out all the slots defined as
9603 * imported by VisitFrameSlots.
9607 vpstop
= &fp
->argv
[argSlots(fp
)];
9609 nativeFrameTracker
.set(vp
++, (LIns
*)0);
9610 nativeFrameTracker
.set(&fp
->argsobj
, (LIns
*)0);
9613 vpstop
= &fp
->slots
[fp
->script
->nslots
];
9615 nativeFrameTracker
.set(vp
++, (LIns
*)0);
9619 * If we have created an |arguments| object for the frame, we must copy the
9620 * argument values into the object as properties in case it is used after
9621 * this frame returns.
9623 JS_REQUIRES_STACK
void
9624 TraceRecorder::putArguments()
9626 if (cx
->fp
->argsobj
&& cx
->fp
->argc
) {
9627 LIns
* argsobj_ins
= get(&cx
->fp
->argsobj
);
9628 LIns
* args_ins
= lir
->insAlloc(sizeof(jsval
) * cx
->fp
->argc
);
9629 for (uintN i
= 0; i
< cx
->fp
->argc
; ++i
) {
9630 LIns
* arg_ins
= box_jsval(cx
->fp
->argv
[i
], get(&cx
->fp
->argv
[i
]));
9631 lir
->insStorei(arg_ins
, args_ins
, i
* sizeof(jsval
));
9633 LIns
* args
[] = { args_ins
, argsobj_ins
, cx_ins
};
9634 lir
->insCall(&js_PutArguments_ci
, args
);
9639 IsTraceableRecursion(JSContext
*cx
)
9641 JSStackFrame
*fp
= cx
->fp
;
9642 JSStackFrame
*down
= cx
->fp
->down
;
9645 if (down
->script
!= fp
->script
)
9647 if (down
->argc
!= fp
->argc
)
9649 if (fp
->imacpc
|| down
->imacpc
)
9651 if ((fp
->flags
& JSFRAME_CONSTRUCTING
) || (down
->flags
& JSFRAME_CONSTRUCTING
))
9656 JS_REQUIRES_STACK AbortableRecordingStatus
9657 TraceRecorder::record_EnterFrame(uintN
& inlineCallCount
)
9659 JSStackFrame
* fp
= cx
->fp
;
9661 if (++callDepth
>= MAX_CALLDEPTH
)
9662 RETURN_STOP_A("exceeded maximum call depth");
9664 debug_only_printf(LC_TMTracer
, "EnterFrame %s, callDepth=%d\n",
9665 js_AtomToPrintableString(cx
, cx
->fp
->fun
->atom
),
9668 if (js_LogController
.lcbits
& LC_TMRecorder
) {
9669 js_Disassemble(cx
, cx
->fp
->script
, JS_TRUE
, stdout
);
9670 debug_only_print0(LC_TMTracer
, "----\n");
9673 LIns
* void_ins
= INS_VOID();
9675 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
9676 // This doesn't do layout arithmetic, but it must initialize in the tracker all the
9677 // slots defined as imported by VisitFrameSlots.
9678 jsval
* vp
= &fp
->argv
[fp
->argc
];
9679 jsval
* vpstop
= vp
+ ptrdiff_t(fp
->fun
->nargs
) - ptrdiff_t(fp
->argc
);
9680 while (vp
< vpstop
) {
9681 if (vp
>= fp
->down
->regs
->sp
)
9682 nativeFrameTracker
.set(vp
, (LIns
*)0);
9683 set(vp
++, void_ins
, true);
9687 vpstop
= vp
+ fp
->script
->nfixed
;
9689 set(vp
++, void_ins
, true);
9690 set(&fp
->argsobj
, INS_NULL(), true);
9693 * Check for recursion. This is a special check for recursive cases that can be
9694 * a trace-tree, just like a loop. If recursion acts weird, for example
9695 * differing argc or existence of an imacpc, it's not something this code is
9696 * concerned about. That should pass through below to not regress pre-recursion
9699 if (IsTraceableRecursion(cx
) && treeInfo
->script
== cx
->fp
->script
) {
9700 if (treeInfo
->recursion
== Recursion_Disallowed
)
9701 RETURN_STOP_A("recursion not allowed in this tree");
9702 if (treeInfo
->script
!= cx
->fp
->script
)
9703 RETURN_STOP_A("recursion does not match original tree");
9704 return InjectStatus(downRecursion());
9707 /* Try inlining one level in case this recursion doesn't go too deep. */
9708 if (fp
->script
== fp
->down
->script
&&
9709 fp
->down
->down
&& fp
->down
->down
->script
== fp
->script
) {
9710 RETURN_STOP_A("recursion started inlining");
9713 VMFragment
* root
= (VMFragment
*)fragment
->root
;
9714 VMFragment
* first
= getLoop(&JS_TRACE_MONITOR(cx
), fp
->regs
->pc
,
9715 root
->globalObj
, root
->globalShape
, fp
->argc
);
9717 return ARECORD_CONTINUE
;
9718 VMFragment
* f
= findNestedCompatiblePeer(first
);
9721 * If there were no compatible peers, but there were peers at all, then it is probable that
9722 * an inner recursive function is type mismatching. Start a new recorder that must be
9725 for (f
= first
; f
; f
= (VMFragment
*)f
->peer
) {
9726 if (f
->getTreeInfo() && f
->getTreeInfo()->recursion
== Recursion_Detected
) {
9727 /* Since this recorder is about to die, save its values. */
9728 if (++first
->hits() <= HOTLOOP
)
9729 return ARECORD_STOP
;
9730 if (IsBlacklisted((jsbytecode
*)f
->ip
))
9731 RETURN_STOP_A("inner recursive tree is blacklisted");
9732 JS_ASSERT(f
->getTreeInfo()->script
!= treeInfo
->script
);
9733 JSContext
* _cx
= cx
;
9734 SlotList
* globalSlots
= treeInfo
->globalSlots
;
9735 JSTraceMonitor
* tm
= traceMonitor
;
9736 js_AbortRecording(cx
, "trying to compile inner recursive tree");
9737 if (RecordTree(_cx
, tm
, first
, NULL
, 0, first
->globalObj
, first
->globalShape
,
9738 globalSlots
, _cx
->fp
->argc
, Monitor_EnterFrame
)) {
9739 JS_ASSERT(tm
->recorder
);
9744 return ARECORD_CONTINUE
;
9746 /* Make sure inner tree call will not run into an out-of-memory condition. */
9747 JSTraceMonitor
* tm
= traceMonitor
;
9748 if (tm
->reservedDoublePoolPtr
< (tm
->reservedDoublePool
+ MAX_NATIVE_STACK_SLOTS
) &&
9749 !ReplenishReservedPool(cx
, tm
)) {
9750 RETURN_STOP_A("Couldn't call inner tree (out of memory)");
9753 * Make sure the shape of the global object still matches (this might
9754 * flush the JIT cache).
9756 JSObject
* globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
9757 uint32 globalShape
= -1;
9758 SlotList
* globalSlots
= NULL
;
9759 if (!CheckGlobalObjectShape(cx
, tm
, globalObj
, &globalShape
, &globalSlots
))
9760 return ARECORD_ABORTED
;
9761 return attemptTreeCall(f
, inlineCallCount
);
9764 return ARECORD_CONTINUE
;
9767 JS_REQUIRES_STACK AbortableRecordingStatus
9768 TraceRecorder::record_LeaveFrame()
9772 debug_only_printf(LC_TMTracer
,
9773 "LeaveFrame (back to %s), callDepth=%d\n",
9774 js_AtomToPrintableString(cx
, cx
->fp
->fun
->atom
),
9778 JS_ASSERT(js_CodeSpec
[js_GetOpcode(cx
, cx
->fp
->script
,
9779 cx
->fp
->regs
->pc
)].length
== JSOP_CALL_LENGTH
);
9781 if (callDepth
-- <= 0)
9782 RETURN_STOP_A("returned out of a loop we started tracing");
9784 // LeaveFrame gets called after the interpreter popped the frame and
9785 // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
9786 atoms
= FrameAtomBase(cx
, cx
->fp
);
9787 set(&stackval(-1), rval_ins
, true);
9788 return ARECORD_CONTINUE
;
9791 JS_REQUIRES_STACK AbortableRecordingStatus
9792 TraceRecorder::record_JSOP_PUSH()
9794 stack(0, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
)));
9795 return ARECORD_CONTINUE
;
9798 JS_REQUIRES_STACK AbortableRecordingStatus
9799 TraceRecorder::record_JSOP_POPV()
9801 jsval
& rval
= stackval(-1);
9802 LIns
*rval_ins
= box_jsval(rval
, get(&rval
));
9804 // Store it in cx->fp->rval. NB: Tricky dependencies. cx->fp is the right
9805 // frame because POPV appears only in global and eval code and we don't
9806 // trace JSOP_EVAL or leaving the frame where tracing started.
9807 LIns
*fp_ins
= lir
->insLoad(LIR_ldp
, cx_ins
, offsetof(JSContext
, fp
));
9808 lir
->insStorei(rval_ins
, fp_ins
, offsetof(JSStackFrame
, rval
));
9809 return ARECORD_CONTINUE
;
9812 JS_REQUIRES_STACK AbortableRecordingStatus
9813 TraceRecorder::record_JSOP_ENTERWITH()
9815 return ARECORD_STOP
;
9818 JS_REQUIRES_STACK AbortableRecordingStatus
9819 TraceRecorder::record_JSOP_LEAVEWITH()
9821 return ARECORD_STOP
;
9824 JS_REQUIRES_STACK AbortableRecordingStatus
9825 TraceRecorder::record_JSOP_RETURN()
9827 /* A return from callDepth 0 terminates the current loop, except for recursion. */
9828 if (callDepth
== 0) {
9829 if (IsTraceableRecursion(cx
) && treeInfo
->recursion
!= Recursion_Disallowed
&&
9830 treeInfo
->script
== cx
->fp
->script
) {
9831 return InjectStatus(upRecursion());
9833 AUDIT(returnLoopExits
);
9840 /* If we inlined this function call, make the return value available to the caller code. */
9841 jsval
& rval
= stackval(-1);
9842 JSStackFrame
*fp
= cx
->fp
;
9843 if ((cx
->fp
->flags
& JSFRAME_CONSTRUCTING
) && JSVAL_IS_PRIMITIVE(rval
)) {
9844 JS_ASSERT(fp
->thisv
== fp
->argv
[-1]);
9845 rval_ins
= get(&fp
->argv
[-1]);
9847 rval_ins
= get(&rval
);
9849 debug_only_printf(LC_TMTracer
,
9850 "returning from %s\n",
9851 js_AtomToPrintableString(cx
, cx
->fp
->fun
->atom
));
9852 clearFrameSlotsFromCache();
9854 return ARECORD_CONTINUE
;
9857 JS_REQUIRES_STACK AbortableRecordingStatus
9858 TraceRecorder::record_JSOP_GOTO()
9861 * If we hit a break or a continue to an outer loop, end the loop and
9862 * generate an always-taken loop exit guard. For other downward gotos
9863 * (like if/else) continue recording.
9865 jssrcnote
* sn
= js_GetSrcNote(cx
->fp
->script
, cx
->fp
->regs
->pc
);
9867 if (sn
&& (SN_TYPE(sn
) == SRC_BREAK
|| SN_TYPE(sn
) == SRC_CONT2LABEL
)) {
9868 AUDIT(breakLoopExits
);
9871 return ARECORD_CONTINUE
;
9874 JS_REQUIRES_STACK AbortableRecordingStatus
9875 TraceRecorder::record_JSOP_IFEQ()
9877 trackCfgMerges(cx
->fp
->regs
->pc
);
9881 JS_REQUIRES_STACK AbortableRecordingStatus
9882 TraceRecorder::record_JSOP_IFNE()
9888 TraceRecorder::newArguments()
9890 LIns
* global_ins
= INS_CONSTOBJ(globalObj
);
9891 LIns
* argc_ins
= INS_CONST(cx
->fp
->argc
);
9892 LIns
* callee_ins
= get(&cx
->fp
->argv
[-2]);
9893 LIns
* argv_ins
= cx
->fp
->argc
9894 ? lir
->ins2(LIR_piadd
, lirbuf
->sp
,
9895 lir
->insImmWord(-treeInfo
->nativeStackBase
+ nativeStackOffset(&cx
->fp
->argv
[0])))
9896 : INS_CONSTPTR((void *) 2);
9897 js_ArgsPrivateNative
*apn
= js_ArgsPrivateNative::create(*traceMonitor
->traceAlloc
, cx
->fp
->argc
);
9898 for (uintN i
= 0; i
< cx
->fp
->argc
; ++i
) {
9899 apn
->typemap()[i
] = determineSlotType(&cx
->fp
->argv
[i
]);
9902 LIns
* args
[] = { INS_CONSTPTR(apn
), argv_ins
, callee_ins
, argc_ins
, global_ins
, cx_ins
};
9903 LIns
* call_ins
= lir
->insCall(&js_Arguments_ci
, args
);
9904 guard(false, lir
->ins_peq0(call_ins
), OOM_EXIT
);
9908 JS_REQUIRES_STACK AbortableRecordingStatus
9909 TraceRecorder::record_JSOP_ARGUMENTS()
9911 if (cx
->fp
->flags
& JSFRAME_OVERRIDE_ARGS
)
9912 RETURN_STOP_A("Can't trace |arguments| if |arguments| is assigned to");
9914 LIns
* a_ins
= get(&cx
->fp
->argsobj
);
9916 if (a_ins
->opcode() == LIR_int
) {
9917 // |arguments| is set to 0 by EnterFrame on this trace, so call to create it.
9918 args_ins
= newArguments();
9920 // Generate LIR to create arguments only if it has not already been created.
9922 LIns
* mem_ins
= lir
->insAlloc(sizeof(jsval
));
9924 LIns
* br1
= lir
->insBranch(LIR_jt
, lir
->ins_peq0(a_ins
), NULL
);
9925 lir
->insStorei(a_ins
, mem_ins
, 0);
9926 LIns
* br2
= lir
->insBranch(LIR_j
, NULL
, NULL
);
9928 LIns
* label1
= lir
->ins0(LIR_label
);
9929 br1
->setTarget(label1
);
9931 LIns
* call_ins
= newArguments();
9932 lir
->insStorei(call_ins
, mem_ins
, 0);
9934 LIns
* label2
= lir
->ins0(LIR_label
);
9935 br2
->setTarget(label2
);
9937 args_ins
= lir
->insLoad(LIR_ldp
, mem_ins
, 0);
9941 set(&cx
->fp
->argsobj
, args_ins
);
9942 return ARECORD_CONTINUE
;
9945 JS_REQUIRES_STACK AbortableRecordingStatus
9946 TraceRecorder::record_JSOP_DUP()
9948 stack(0, get(&stackval(-1)));
9949 return ARECORD_CONTINUE
;
9952 JS_REQUIRES_STACK AbortableRecordingStatus
9953 TraceRecorder::record_JSOP_DUP2()
9955 stack(0, get(&stackval(-2)));
9956 stack(1, get(&stackval(-1)));
9957 return ARECORD_CONTINUE
;
9960 JS_REQUIRES_STACK AbortableRecordingStatus
9961 TraceRecorder::record_JSOP_SWAP()
9963 jsval
& l
= stackval(-2);
9964 jsval
& r
= stackval(-1);
9965 LIns
* l_ins
= get(&l
);
9966 LIns
* r_ins
= get(&r
);
9969 return ARECORD_CONTINUE
;
9972 JS_REQUIRES_STACK AbortableRecordingStatus
9973 TraceRecorder::record_JSOP_PICK()
9975 jsval
* sp
= cx
->fp
->regs
->sp
;
9976 jsint n
= cx
->fp
->regs
->pc
[1];
9977 JS_ASSERT(sp
- (n
+1) >= StackBase(cx
->fp
));
9978 LIns
* top
= get(sp
- (n
+1));
9979 for (jsint i
= 0; i
< n
; ++i
)
9980 set(sp
- (n
+1) + i
, get(sp
- n
+ i
));
9982 return ARECORD_CONTINUE
;
9985 JS_REQUIRES_STACK AbortableRecordingStatus
9986 TraceRecorder::record_JSOP_SETCONST()
9988 return ARECORD_STOP
;
9991 JS_REQUIRES_STACK AbortableRecordingStatus
9992 TraceRecorder::record_JSOP_BITOR()
9994 return InjectStatus(binary(LIR_or
));
9997 JS_REQUIRES_STACK AbortableRecordingStatus
9998 TraceRecorder::record_JSOP_BITXOR()
10000 return InjectStatus(binary(LIR_xor
));
10003 JS_REQUIRES_STACK AbortableRecordingStatus
10004 TraceRecorder::record_JSOP_BITAND()
10006 return InjectStatus(binary(LIR_and
));
10009 JS_REQUIRES_STACK AbortableRecordingStatus
10010 TraceRecorder::record_JSOP_EQ()
10012 return equality(false, true);
10015 JS_REQUIRES_STACK AbortableRecordingStatus
10016 TraceRecorder::record_JSOP_NE()
10018 return equality(true, true);
10021 JS_REQUIRES_STACK AbortableRecordingStatus
10022 TraceRecorder::record_JSOP_LT()
10024 return relational(LIR_flt
, true);
10027 JS_REQUIRES_STACK AbortableRecordingStatus
10028 TraceRecorder::record_JSOP_LE()
10030 return relational(LIR_fle
, true);
10033 JS_REQUIRES_STACK AbortableRecordingStatus
10034 TraceRecorder::record_JSOP_GT()
10036 return relational(LIR_fgt
, true);
10039 JS_REQUIRES_STACK AbortableRecordingStatus
10040 TraceRecorder::record_JSOP_GE()
10042 return relational(LIR_fge
, true);
10045 JS_REQUIRES_STACK AbortableRecordingStatus
10046 TraceRecorder::record_JSOP_LSH()
10048 return InjectStatus(binary(LIR_lsh
));
10051 JS_REQUIRES_STACK AbortableRecordingStatus
10052 TraceRecorder::record_JSOP_RSH()
10054 return InjectStatus(binary(LIR_rsh
));
10057 JS_REQUIRES_STACK AbortableRecordingStatus
10058 TraceRecorder::record_JSOP_URSH()
10060 return InjectStatus(binary(LIR_ush
));
10063 JS_REQUIRES_STACK AbortableRecordingStatus
10064 TraceRecorder::record_JSOP_ADD()
10066 jsval
& r
= stackval(-1);
10067 jsval
& l
= stackval(-2);
10069 if (!JSVAL_IS_PRIMITIVE(l
)) {
10070 RETURN_IF_XML_A(l
);
10071 if (!JSVAL_IS_PRIMITIVE(r
)) {
10072 RETURN_IF_XML_A(r
);
10073 return InjectStatus(call_imacro(add_imacros
.obj_obj
));
10075 return InjectStatus(call_imacro(add_imacros
.obj_any
));
10077 if (!JSVAL_IS_PRIMITIVE(r
)) {
10078 RETURN_IF_XML_A(r
);
10079 return InjectStatus(call_imacro(add_imacros
.any_obj
));
10082 if (JSVAL_IS_STRING(l
) || JSVAL_IS_STRING(r
)) {
10083 LIns
* args
[] = { stringify(r
), stringify(l
), cx_ins
};
10084 LIns
* concat
= lir
->insCall(&js_ConcatStrings_ci
, args
);
10085 guard(false, lir
->ins_peq0(concat
), OOM_EXIT
);
10087 return ARECORD_CONTINUE
;
10090 return InjectStatus(binary(LIR_fadd
));
10093 JS_REQUIRES_STACK AbortableRecordingStatus
10094 TraceRecorder::record_JSOP_SUB()
10096 return InjectStatus(binary(LIR_fsub
));
10099 JS_REQUIRES_STACK AbortableRecordingStatus
10100 TraceRecorder::record_JSOP_MUL()
10102 return InjectStatus(binary(LIR_fmul
));
10105 JS_REQUIRES_STACK AbortableRecordingStatus
10106 TraceRecorder::record_JSOP_DIV()
10108 return InjectStatus(binary(LIR_fdiv
));
10111 JS_REQUIRES_STACK AbortableRecordingStatus
10112 TraceRecorder::record_JSOP_MOD()
10114 return InjectStatus(binary(LIR_fmod
));
10117 JS_REQUIRES_STACK AbortableRecordingStatus
10118 TraceRecorder::record_JSOP_NOT()
10120 jsval
& v
= stackval(-1);
10121 if (JSVAL_IS_SPECIAL(v
)) {
10122 set(&v
, lir
->ins_eq0(lir
->ins2i(LIR_eq
, get(&v
), 1)));
10123 return ARECORD_CONTINUE
;
10126 LIns
* v_ins
= get(&v
);
10127 set(&v
, lir
->ins2(LIR_or
, lir
->ins2(LIR_feq
, v_ins
, lir
->insImmf(0)),
10128 lir
->ins_eq0(lir
->ins2(LIR_feq
, v_ins
, v_ins
))));
10129 return ARECORD_CONTINUE
;
10131 if (JSVAL_TAG(v
) == JSVAL_OBJECT
) {
10132 set(&v
, lir
->ins_peq0(get(&v
)));
10133 return ARECORD_CONTINUE
;
10135 JS_ASSERT(JSVAL_IS_STRING(v
));
10136 set(&v
, lir
->ins_peq0(lir
->ins2(LIR_piand
,
10137 lir
->insLoad(LIR_ldp
, get(&v
), (int)offsetof(JSString
, mLength
)),
10138 INS_CONSTWORD(JSString::LENGTH_MASK
))));
10139 return ARECORD_CONTINUE
;
10142 JS_REQUIRES_STACK AbortableRecordingStatus
10143 TraceRecorder::record_JSOP_BITNOT()
10145 return InjectStatus(unary(LIR_not
));
10148 JS_REQUIRES_STACK AbortableRecordingStatus
10149 TraceRecorder::record_JSOP_NEG()
10151 jsval
& v
= stackval(-1);
10153 if (!JSVAL_IS_PRIMITIVE(v
)) {
10154 RETURN_IF_XML_A(v
);
10155 return InjectStatus(call_imacro(unary_imacros
.sign
));
10162 * If we're a promoted integer, we have to watch out for 0s since -0 is
10163 * a double. Only follow this path if we're not an integer that's 0 and
10164 * we're not a double that's zero.
10166 if (!oracle
.isInstructionUndemotable(cx
->fp
->regs
->pc
) &&
10168 (!JSVAL_IS_INT(v
) || JSVAL_TO_INT(v
) != 0) &&
10169 (!JSVAL_IS_DOUBLE(v
) || !JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v
))) &&
10170 -asNumber(v
) == (int)-asNumber(v
)) {
10171 a
= lir
->ins1(LIR_neg
, ::demote(lir
, a
));
10172 if (!a
->isconst()) {
10173 VMSideExit
* exit
= snapshot(OVERFLOW_EXIT
);
10174 guard(false, lir
->ins1(LIR_ov
, a
), exit
);
10175 guard(false, lir
->ins2i(LIR_eq
, a
, 0), exit
);
10177 a
= lir
->ins1(LIR_i2f
, a
);
10179 a
= lir
->ins1(LIR_fneg
, a
);
10183 return ARECORD_CONTINUE
;
10186 if (JSVAL_IS_NULL(v
)) {
10187 set(&v
, lir
->insImmf(-0.0));
10188 return ARECORD_CONTINUE
;
10191 JS_ASSERT(JSVAL_TAG(v
) == JSVAL_STRING
|| JSVAL_IS_SPECIAL(v
));
10193 LIns
* args
[] = { get(&v
), cx_ins
};
10194 set(&v
, lir
->ins1(LIR_fneg
,
10195 lir
->insCall(JSVAL_IS_STRING(v
)
10196 ? &js_StringToNumber_ci
10197 : &js_BooleanOrUndefinedToNumber_ci
,
10199 return ARECORD_CONTINUE
;
10202 JS_REQUIRES_STACK AbortableRecordingStatus
10203 TraceRecorder::record_JSOP_POS()
10205 jsval
& v
= stackval(-1);
10207 if (!JSVAL_IS_PRIMITIVE(v
)) {
10208 RETURN_IF_XML_A(v
);
10209 return InjectStatus(call_imacro(unary_imacros
.sign
));
10213 return ARECORD_CONTINUE
;
10215 if (JSVAL_IS_NULL(v
)) {
10216 set(&v
, lir
->insImmf(0));
10217 return ARECORD_CONTINUE
;
10220 JS_ASSERT(JSVAL_TAG(v
) == JSVAL_STRING
|| JSVAL_IS_SPECIAL(v
));
10222 LIns
* args
[] = { get(&v
), cx_ins
};
10223 set(&v
, lir
->insCall(JSVAL_IS_STRING(v
)
10224 ? &js_StringToNumber_ci
10225 : &js_BooleanOrUndefinedToNumber_ci
,
10227 return ARECORD_CONTINUE
;
10230 JS_REQUIRES_STACK AbortableRecordingStatus
10231 TraceRecorder::record_JSOP_PRIMTOP()
10233 // Either this opcode does nothing or we couldn't have traced here, because
10234 // we'd have thrown an exception -- so do nothing if we actually hit this.
10235 return ARECORD_CONTINUE
;
10238 JS_REQUIRES_STACK AbortableRecordingStatus
10239 TraceRecorder::record_JSOP_OBJTOP()
10241 jsval
& v
= stackval(-1);
10242 RETURN_IF_XML_A(v
);
10243 return ARECORD_CONTINUE
;
10247 TraceRecorder::getClassPrototype(JSObject
* ctor
, LIns
*& proto_ins
)
10250 JSTraceMonitor
&localtm
= JS_TRACE_MONITOR(cx
);
10254 if (!ctor
->getProperty(cx
, ATOM_TO_JSID(cx
->runtime
->atomState
.classPrototypeAtom
), &pval
))
10255 RETURN_ERROR("error getting prototype from constructor");
10257 /* For functions, this shold not reenter */
10258 JS_ASSERT(localtm
.recorder
);
10260 if (JSVAL_TAG(pval
) != JSVAL_OBJECT
)
10261 RETURN_STOP("got primitive prototype from constructor");
10265 ok
= JS_GetPropertyAttributes(cx
, ctor
, js_class_prototype_str
, &attrs
, &found
);
10268 JS_ASSERT((~attrs
& (JSPROP_READONLY
| JSPROP_PERMANENT
)) == 0);
10270 proto_ins
= INS_CONSTOBJ(JSVAL_TO_OBJECT(pval
));
10271 return RECORD_CONTINUE
;
10275 TraceRecorder::getClassPrototype(JSProtoKey key
, LIns
*& proto_ins
)
10278 JSTraceMonitor
&localtm
= JS_TRACE_MONITOR(cx
);
10282 if (!js_GetClassPrototype(cx
, globalObj
, INT_TO_JSID(key
), &proto
))
10283 RETURN_ERROR("error in js_GetClassPrototype");
10285 /* For functions, this shold not reenter */
10286 JS_ASSERT(localtm
.recorder
);
10288 proto_ins
= INS_CONSTOBJ(proto
);
10289 return RECORD_CONTINUE
;
10292 #define IGNORE_NATIVE_CALL_COMPLETE_CALLBACK ((JSSpecializedNative*)1)
10295 TraceRecorder::newString(JSObject
* ctor
, uint32 argc
, jsval
* argv
, jsval
* rval
)
10297 JS_ASSERT(argc
== 1);
10299 if (!JSVAL_IS_PRIMITIVE(argv
[0])) {
10300 RETURN_IF_XML(argv
[0]);
10301 return call_imacro(new_imacros
.String
);
10305 CHECK_STATUS(getClassPrototype(ctor
, proto_ins
));
10307 LIns
* args
[] = { stringify(argv
[0]), proto_ins
, cx_ins
};
10308 LIns
* obj_ins
= lir
->insCall(&js_String_tn_ci
, args
);
10309 guard(false, lir
->ins_peq0(obj_ins
), OOM_EXIT
);
10311 set(rval
, obj_ins
);
10312 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10313 return RECORD_CONTINUE
;
10317 TraceRecorder::newArray(JSObject
* ctor
, uint32 argc
, jsval
* argv
, jsval
* rval
)
10320 CHECK_STATUS(getClassPrototype(ctor
, proto_ins
));
10323 if (argc
== 0 || (argc
== 1 && JSVAL_IS_NUMBER(argv
[0]))) {
10324 // arr_ins = js_NewEmptyArray(cx, Array.prototype)
10325 LIns
*args
[] = { proto_ins
, cx_ins
};
10326 arr_ins
= lir
->insCall(&js_NewEmptyArray_ci
, args
);
10327 guard(false, lir
->ins_peq0(arr_ins
), OOM_EXIT
);
10329 // array_ins.fslots[JSSLOT_ARRAY_LENGTH] = length
10330 lir
->insStorei(f2i(get(argv
)), // FIXME: is this 64-bit safe?
10332 offsetof(JSObject
, fslots
) + JSSLOT_ARRAY_LENGTH
* sizeof(jsval
));
10335 // arr_ins = js_NewArrayWithSlots(cx, Array.prototype, argc)
10336 LIns
*args
[] = { INS_CONST(argc
), proto_ins
, cx_ins
};
10337 arr_ins
= lir
->insCall(&js_NewArrayWithSlots_ci
, args
);
10338 guard(false, lir
->ins_peq0(arr_ins
), OOM_EXIT
);
10340 // arr->dslots[i] = box_jsval(vp[i]); for i in 0..argc
10341 LIns
*dslots_ins
= NULL
;
10342 for (uint32 i
= 0; i
< argc
&& !outOfMemory(); i
++) {
10343 LIns
*elt_ins
= box_jsval(argv
[i
], get(&argv
[i
]));
10344 stobj_set_dslot(arr_ins
, i
, dslots_ins
, elt_ins
);
10348 stobj_set_fslot(arr_ins
, JSSLOT_ARRAY_COUNT
, INS_CONST(argc
));
10351 set(rval
, arr_ins
);
10352 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10353 return RECORD_CONTINUE
;
10356 JS_REQUIRES_STACK
void
10357 TraceRecorder::propagateFailureToBuiltinStatus(LIns
* ok_ins
, LIns
*& status_ins
)
10360 * Check the boolean return value (ok_ins) of a native JSNative,
10361 * JSFastNative, or JSPropertyOp hook for failure. On failure, set the
10362 * JSBUILTIN_ERROR bit of cx->builtinStatus.
10364 * If the return value (ok_ins) is true, status' == status. Otherwise
10365 * status' = status | JSBUILTIN_ERROR. We calculate (rval&1)^1, which is 1
10366 * if rval is JS_FALSE (error), and then shift that by 1, which is the log2
10367 * of JSBUILTIN_ERROR.
10369 JS_STATIC_ASSERT(((JS_TRUE
& 1) ^ 1) << 1 == 0);
10370 JS_STATIC_ASSERT(((JS_FALSE
& 1) ^ 1) << 1 == JSBUILTIN_ERROR
);
10371 status_ins
= lir
->ins2(LIR_or
,
10373 lir
->ins2i(LIR_lsh
,
10374 lir
->ins2i(LIR_xor
,
10375 lir
->ins2i(LIR_and
, ok_ins
, 1),
10378 lir
->insStorei(status_ins
, lirbuf
->state
, (int) offsetof(InterpState
, builtinStatus
));
10381 JS_REQUIRES_STACK
void
10382 TraceRecorder::emitNativePropertyOp(JSScope
* scope
, JSScopeProperty
* sprop
, LIns
* obj_ins
,
10383 bool setflag
, LIns
* boxed_ins
)
10385 JS_ASSERT(!(sprop
->attrs
& (setflag
? JSPROP_SETTER
: JSPROP_GETTER
)));
10386 JS_ASSERT(setflag
? !SPROP_HAS_STUB_SETTER(sprop
) : !SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop
));
10388 enterDeepBailCall();
10390 // It is unsafe to pass the address of an object slot as the out parameter,
10391 // because the getter or setter could end up resizing the object's dslots.
10392 // Instead, use a word of stack and root it in nativeVp.
10393 LIns
* vp_ins
= lir
->insAlloc(sizeof(jsval
));
10394 lir
->insStorei(vp_ins
, lirbuf
->state
, offsetof(InterpState
, nativeVp
));
10395 lir
->insStorei(INS_CONST(1), lirbuf
->state
, offsetof(InterpState
, nativeVpLen
));
10397 lir
->insStorei(boxed_ins
, vp_ins
, 0);
10399 CallInfo
* ci
= new (*traceMonitor
->traceAlloc
) CallInfo();
10400 ci
->_address
= uintptr_t(setflag
? sprop
->setter
: sprop
->getter
);
10401 ci
->_argtypes
= ARGSIZE_I
<< (0*ARGSIZE_SHIFT
) |
10402 ARGSIZE_P
<< (1*ARGSIZE_SHIFT
) |
10403 ARGSIZE_P
<< (2*ARGSIZE_SHIFT
) |
10404 ARGSIZE_P
<< (3*ARGSIZE_SHIFT
) |
10405 ARGSIZE_P
<< (4*ARGSIZE_SHIFT
);
10406 ci
->_cse
= ci
->_fold
= 0;
10407 ci
->_abi
= ABI_CDECL
;
10409 ci
->_name
= "JSPropertyOp";
10411 LIns
* args
[] = { vp_ins
, INS_CONSTWORD(SPROP_USERID(sprop
)), obj_ins
, cx_ins
};
10412 LIns
* ok_ins
= lir
->insCall(ci
, args
);
10414 // Cleanup. Immediately clear nativeVp before we might deep bail.
10415 lir
->insStorei(INS_NULL(), lirbuf
->state
, offsetof(InterpState
, nativeVp
));
10416 leaveDeepBailCall();
10418 // Guard that the call succeeded and builtinStatus is still 0.
10419 // If the native op succeeds but we deep-bail here, the result value is
10420 // lost! Therefore this can only be used for setters of shared properties.
10421 // In that case we ignore the result value anyway.
10422 LIns
* status_ins
= lir
->insLoad(LIR_ld
,
10424 (int) offsetof(InterpState
, builtinStatus
));
10425 propagateFailureToBuiltinStatus(ok_ins
, status_ins
);
10426 guard(true, lir
->ins_eq0(status_ins
), STATUS_EXIT
);
10428 // Re-load the value--but this is currently unused, so commented out.
10429 //boxed_ins = lir->insLoad(LIR_ldp, vp_ins, 0);
10432 JS_REQUIRES_STACK RecordingStatus
10433 TraceRecorder::emitNativeCall(JSSpecializedNative
* sn
, uintN argc
, LIns
* args
[], bool rooted
)
10435 bool constructing
= !!(sn
->flags
& JSTN_CONSTRUCTOR
);
10437 if (JSTN_ERRTYPE(sn
) == FAIL_STATUS
) {
10438 // This needs to capture the pre-call state of the stack. So do not set
10439 // pendingSpecializedNative before taking this snapshot.
10440 JS_ASSERT(!pendingSpecializedNative
);
10442 // Take snapshot for js_DeepBail and store it in cx->bailExit.
10443 // If we are calling a slow native, add information to the side exit
10444 // for SynthesizeSlowNativeFrame.
10445 VMSideExit
* exit
= enterDeepBailCall();
10446 JSObject
* funobj
= JSVAL_TO_OBJECT(stackval(0 - (2 + argc
)));
10447 if (FUN_SLOW_NATIVE(GET_FUNCTION_PRIVATE(cx
, funobj
))) {
10448 exit
->setNativeCallee(funobj
, constructing
);
10449 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(funobj
));
10453 LIns
* res_ins
= lir
->insCall(sn
->builtin
, args
);
10455 // Immediately unroot the vp as soon we return since we might deep bail next.
10457 lir
->insStorei(INS_NULL(), lirbuf
->state
, offsetof(InterpState
, nativeVp
));
10459 rval_ins
= res_ins
;
10460 switch (JSTN_ERRTYPE(sn
)) {
10462 guard(false, lir
->ins_peq0(res_ins
), OOM_EXIT
);
10465 res_ins
= lir
->ins1(LIR_i2f
, res_ins
);
10466 guard(false, lir
->ins2(LIR_flt
, res_ins
, lir
->insImmf(0)), OOM_EXIT
);
10469 guard(false, lir
->ins2i(LIR_eq
, res_ins
, JSVAL_TO_SPECIAL(JSVAL_VOID
)), OOM_EXIT
);
10472 guard(false, lir
->ins2(LIR_peq
, res_ins
, INS_CONSTWORD(JSVAL_ERROR_COOKIE
)), OOM_EXIT
);
10477 set(&stackval(0 - (2 + argc
)), res_ins
);
10480 * The return value will be processed by NativeCallComplete since
10481 * we have to know the actual return value type for calls that return
10482 * jsval (like Array_p_pop).
10484 pendingSpecializedNative
= sn
;
10486 return RECORD_CONTINUE
;
10490 * Check whether we have a specialized implementation for this native
10493 JS_REQUIRES_STACK RecordingStatus
10494 TraceRecorder::callSpecializedNative(JSNativeTraceInfo
*trcinfo
, uintN argc
,
10497 JSStackFrame
* fp
= cx
->fp
;
10498 jsbytecode
*pc
= fp
->regs
->pc
;
10500 jsval
& fval
= stackval(0 - (2 + argc
));
10501 jsval
& tval
= stackval(0 - (1 + argc
));
10503 LIns
* this_ins
= get(&tval
);
10505 LIns
* args
[nanojit::MAXARGS
];
10506 JSSpecializedNative
*sn
= trcinfo
->specializations
;
10509 if (((sn
->flags
& JSTN_CONSTRUCTOR
) != 0) != constructing
)
10512 uintN knownargc
= strlen(sn
->argtypes
);
10513 if (argc
!= knownargc
)
10516 intN prefixc
= strlen(sn
->prefix
);
10517 JS_ASSERT(prefixc
<= 3);
10518 LIns
** argp
= &args
[argc
+ prefixc
- 1];
10522 memset(args
, 0xCD, sizeof(args
));
10526 for (i
= prefixc
; i
--; ) {
10527 argtype
= sn
->prefix
[i
];
10528 if (argtype
== 'C') {
10530 } else if (argtype
== 'T') { /* this, as an object */
10531 if (JSVAL_IS_PRIMITIVE(tval
))
10532 goto next_specialization
;
10534 } else if (argtype
== 'S') { /* this, as a string */
10535 if (!JSVAL_IS_STRING(tval
))
10536 goto next_specialization
;
10538 } else if (argtype
== 'f') {
10539 *argp
= INS_CONSTOBJ(JSVAL_TO_OBJECT(fval
));
10540 } else if (argtype
== 'p') {
10541 CHECK_STATUS(getClassPrototype(JSVAL_TO_OBJECT(fval
), *argp
));
10542 } else if (argtype
== 'R') {
10543 *argp
= INS_CONSTPTR(cx
->runtime
);
10544 } else if (argtype
== 'P') {
10545 // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
10546 // JSOP_GETELEM imacro (bug 476559).
10547 if ((*pc
== JSOP_CALL
) &&
10548 fp
->imacpc
&& *fp
->imacpc
== JSOP_GETELEM
)
10549 *argp
= INS_CONSTPTR(fp
->imacpc
);
10551 *argp
= INS_CONSTPTR(pc
);
10552 } else if (argtype
== 'D') { /* this, as a number */
10553 if (!isNumber(tval
))
10554 goto next_specialization
;
10557 JS_NOT_REACHED("unknown prefix arg type");
10562 for (i
= knownargc
; i
--; ) {
10563 jsval
& arg
= stackval(0 - (i
+ 1));
10566 argtype
= sn
->argtypes
[i
];
10567 if (argtype
== 'd' || argtype
== 'i') {
10568 if (!isNumber(arg
))
10569 goto next_specialization
;
10570 if (argtype
== 'i')
10571 *argp
= f2i(*argp
);
10572 } else if (argtype
== 'o') {
10573 if (JSVAL_IS_PRIMITIVE(arg
))
10574 goto next_specialization
;
10575 } else if (argtype
== 's') {
10576 if (!JSVAL_IS_STRING(arg
))
10577 goto next_specialization
;
10578 } else if (argtype
== 'r') {
10579 if (!VALUE_IS_REGEXP(cx
, arg
))
10580 goto next_specialization
;
10581 } else if (argtype
== 'f') {
10582 if (!VALUE_IS_FUNCTION(cx
, arg
))
10583 goto next_specialization
;
10584 } else if (argtype
== 'v') {
10585 *argp
= box_jsval(arg
, *argp
);
10587 goto next_specialization
;
10592 JS_ASSERT(args
[0] != (LIns
*)0xcdcdcdcd);
10594 return emitNativeCall(sn
, argc
, args
, false);
10596 next_specialization
:;
10597 } while ((sn
++)->flags
& JSTN_MORE
);
10599 return RECORD_STOP
;
10602 JS_REQUIRES_STACK RecordingStatus
10603 TraceRecorder::callNative(uintN argc
, JSOp mode
)
10607 JS_ASSERT(mode
== JSOP_CALL
|| mode
== JSOP_NEW
|| mode
== JSOP_APPLY
);
10609 jsval
* vp
= &stackval(0 - (2 + argc
));
10610 JSObject
* funobj
= JSVAL_TO_OBJECT(vp
[0]);
10611 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, funobj
);
10612 JSFastNative native
= (JSFastNative
)fun
->u
.n
.native
;
10616 if (isNumber(vp
[2]) &&
10617 (native
== js_math_ceil
|| native
== js_math_floor
|| native
== js_math_round
)) {
10618 LIns
* a
= get(&vp
[2]);
10619 if (isPromote(a
)) {
10621 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10622 return RECORD_CONTINUE
;
10628 if (isNumber(vp
[2]) && isNumber(vp
[3]) &&
10629 (native
== js_math_min
|| native
== js_math_max
)) {
10630 LIns
* a
= get(&vp
[2]);
10631 LIns
* b
= get(&vp
[3]);
10632 if (isPromote(a
) && isPromote(b
)) {
10633 a
= ::demote(lir
, a
);
10634 b
= ::demote(lir
, b
);
10637 lir
->ins_choose(lir
->ins2((native
== js_math_min
)
10641 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10642 return RECORD_CONTINUE
;
10648 if (fun
->flags
& JSFUN_TRCINFO
) {
10649 JSNativeTraceInfo
*trcinfo
= FUN_TRCINFO(fun
);
10650 JS_ASSERT(trcinfo
&& (JSFastNative
)fun
->u
.n
.native
== trcinfo
->native
);
10652 /* Try to call a type specialized version of the native. */
10653 if (trcinfo
->specializations
) {
10654 RecordingStatus status
= callSpecializedNative(trcinfo
, argc
, mode
== JSOP_NEW
);
10655 if (status
!= RECORD_STOP
)
10660 if (native
== js_fun_apply
|| native
== js_fun_call
)
10661 RETURN_STOP("trying to call native apply or call");
10663 // Allocate the vp vector and emit code to root it.
10664 uintN vplen
= 2 + JS_MAX(argc
, unsigned(FUN_MINARGS(fun
))) + fun
->u
.n
.extra
;
10665 if (!(fun
->flags
& JSFUN_FAST_NATIVE
))
10666 vplen
++; // slow native return value slot
10667 LIns
* invokevp_ins
= lir
->insAlloc(vplen
* sizeof(jsval
));
10669 // vp[0] is the callee.
10670 lir
->insStorei(INS_CONSTWORD(OBJECT_TO_JSVAL(funobj
)), invokevp_ins
, 0);
10672 // Calculate |this|.
10674 if (mode
== JSOP_NEW
) {
10675 JSClass
* clasp
= fun
->u
.n
.clasp
;
10676 JS_ASSERT(clasp
!= &js_SlowArrayClass
);
10678 clasp
= &js_ObjectClass
;
10679 JS_ASSERT(((jsuword
) clasp
& 3) == 0);
10681 // Abort on |new Function|. js_NewInstance would allocate a regular-
10682 // sized JSObject, not a Function-sized one. (The Function ctor would
10683 // deep-bail anyway but let's not go there.)
10684 if (clasp
== &js_FunctionClass
)
10685 RETURN_STOP("new Function");
10687 if (clasp
->getObjectOps
)
10688 RETURN_STOP("new with non-native ops");
10690 args
[0] = INS_CONSTOBJ(funobj
);
10691 args
[1] = INS_CONSTPTR(clasp
);
10693 newobj_ins
= lir
->insCall(&js_NewInstance_ci
, args
);
10694 guard(false, lir
->ins_peq0(newobj_ins
), OOM_EXIT
);
10695 this_ins
= newobj_ins
; /* boxing an object is a no-op */
10696 } else if (JSFUN_BOUND_METHOD_TEST(fun
->flags
)) {
10697 this_ins
= INS_CONSTWORD(OBJECT_TO_JSVAL(OBJ_GET_PARENT(cx
, funobj
)));
10699 this_ins
= get(&vp
[1]);
10702 * For fast natives, 'null' or primitives are fine as as 'this' value.
10703 * For slow natives we have to ensure the object is substituted for the
10704 * appropriate global object or boxed object value. JSOP_NEW allocates its
10705 * own object so it's guaranteed to have a valid 'this' value.
10707 if (!(fun
->flags
& JSFUN_FAST_NATIVE
)) {
10708 if (JSVAL_IS_NULL(vp
[1])) {
10709 JSObject
* thisObj
= js_ComputeThis(cx
, JS_FALSE
, vp
+ 2);
10711 RETURN_ERROR("error in js_ComputeGlobalThis");
10712 this_ins
= INS_CONSTOBJ(thisObj
);
10713 } else if (!JSVAL_IS_OBJECT(vp
[1])) {
10714 RETURN_STOP("slow native(primitive, args)");
10716 if (guardClass(JSVAL_TO_OBJECT(vp
[1]), this_ins
, &js_WithClass
, snapshot(MISMATCH_EXIT
)))
10717 RETURN_STOP("can't trace slow native invocation on With object");
10719 this_ins
= lir
->ins_choose(lir
->ins_peq0(stobj_get_parent(this_ins
)),
10720 INS_CONSTOBJ(globalObj
),
10724 this_ins
= box_jsval(vp
[1], this_ins
);
10726 lir
->insStorei(this_ins
, invokevp_ins
, 1 * sizeof(jsval
));
10729 for (uintN n
= 2; n
< 2 + argc
; n
++) {
10730 LIns
* i
= box_jsval(vp
[n
], get(&vp
[n
]));
10731 lir
->insStorei(i
, invokevp_ins
, n
* sizeof(jsval
));
10733 // For a very long argument list we might run out of LIR space, so
10734 // check inside the loop.
10736 RETURN_STOP("out of memory in argument list");
10739 // Populate extra slots, including the return value slot for a slow native.
10740 if (2 + argc
< vplen
) {
10741 LIns
* undef_ins
= INS_CONSTWORD(JSVAL_VOID
);
10742 for (uintN n
= 2 + argc
; n
< vplen
; n
++) {
10743 lir
->insStorei(undef_ins
, invokevp_ins
, n
* sizeof(jsval
));
10746 RETURN_STOP("out of memory in extra slots");
10750 // Set up arguments for the JSNative or JSFastNative.
10752 if (fun
->flags
& JSFUN_FAST_NATIVE
) {
10753 if (mode
== JSOP_NEW
)
10754 RETURN_STOP("untraceable fast native constructor");
10755 native_rval_ins
= invokevp_ins
;
10756 args
[0] = invokevp_ins
;
10757 args
[1] = lir
->insImm(argc
);
10759 types
= ARGSIZE_I
<< (0*ARGSIZE_SHIFT
) |
10760 ARGSIZE_P
<< (1*ARGSIZE_SHIFT
) |
10761 ARGSIZE_I
<< (2*ARGSIZE_SHIFT
) |
10762 ARGSIZE_P
<< (3*ARGSIZE_SHIFT
);
10764 int32_t offset
= (vplen
- 1) * sizeof(jsval
);
10765 native_rval_ins
= lir
->ins2(LIR_piadd
, invokevp_ins
, INS_CONSTWORD(offset
));
10766 args
[0] = native_rval_ins
;
10767 args
[1] = lir
->ins2(LIR_piadd
, invokevp_ins
, INS_CONSTWORD(2 * sizeof(jsval
)));
10768 args
[2] = lir
->insImm(argc
);
10769 args
[3] = this_ins
;
10771 types
= ARGSIZE_I
<< (0*ARGSIZE_SHIFT
) |
10772 ARGSIZE_P
<< (1*ARGSIZE_SHIFT
) |
10773 ARGSIZE_P
<< (2*ARGSIZE_SHIFT
) |
10774 ARGSIZE_I
<< (3*ARGSIZE_SHIFT
) |
10775 ARGSIZE_P
<< (4*ARGSIZE_SHIFT
) |
10776 ARGSIZE_P
<< (5*ARGSIZE_SHIFT
);
10779 // Generate CallInfo and a JSSpecializedNative structure on the fly.
10780 // Do not use JSTN_UNBOX_AFTER for mode JSOP_NEW because
10781 // record_NativeCallComplete unboxes the result specially.
10783 CallInfo
* ci
= new (*traceMonitor
->traceAlloc
) CallInfo();
10784 ci
->_address
= uintptr_t(fun
->u
.n
.native
);
10785 ci
->_cse
= ci
->_fold
= 0;
10786 ci
->_abi
= ABI_CDECL
;
10787 ci
->_argtypes
= types
;
10789 ci
->_name
= JS_GetFunctionName(fun
);
10792 // Generate a JSSpecializedNative structure on the fly.
10793 generatedSpecializedNative
.builtin
= ci
;
10794 generatedSpecializedNative
.flags
= FAIL_STATUS
| ((mode
== JSOP_NEW
)
10796 : JSTN_UNBOX_AFTER
);
10797 generatedSpecializedNative
.prefix
= NULL
;
10798 generatedSpecializedNative
.argtypes
= NULL
;
10800 // We only have to ensure that the values we wrote into the stack buffer
10801 // are rooted if we actually make it to the call, so only set nativeVp and
10802 // nativeVpLen immediately before emitting the call code. This way we avoid
10803 // leaving trace with a bogus nativeVp because we fall off trace while unboxing
10804 // values into the stack buffer.
10805 lir
->insStorei(INS_CONST(vplen
), lirbuf
->state
, offsetof(InterpState
, nativeVpLen
));
10806 lir
->insStorei(invokevp_ins
, lirbuf
->state
, offsetof(InterpState
, nativeVp
));
10808 // argc is the original argc here. It is used to calculate where to place
10809 // the return value.
10810 return emitNativeCall(&generatedSpecializedNative
, argc
, args
, true);
10813 JS_REQUIRES_STACK RecordingStatus
10814 TraceRecorder::functionCall(uintN argc
, JSOp mode
)
10816 jsval
& fval
= stackval(0 - (2 + argc
));
10817 JS_ASSERT(&fval
>= StackBase(cx
->fp
));
10819 if (!VALUE_IS_FUNCTION(cx
, fval
))
10820 RETURN_STOP("callee is not a function");
10822 jsval
& tval
= stackval(0 - (1 + argc
));
10825 * If callee is not constant, it's a shapeless call and we have to guard
10826 * explicitly that we will get this callee again at runtime.
10828 if (!get(&fval
)->isconstp())
10829 CHECK_STATUS(guardCallee(fval
));
10832 * Require that the callee be a function object, to avoid guarding on its
10833 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
10834 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
10835 * the property cache and guard on the object (this) in which the callee
10836 * was found. So it's sufficient to test here that the particular function
10837 * is interpreted, not guard on that condition.
10839 * Bytecode sequences that push shapeless callees must guard on the callee
10840 * class being Function and the function being interpreted.
10842 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, JSVAL_TO_OBJECT(fval
));
10844 if (FUN_INTERPRETED(fun
)) {
10845 if (mode
== JSOP_NEW
) {
10846 LIns
* args
[] = { get(&fval
), INS_CONSTPTR(&js_ObjectClass
), cx_ins
};
10847 LIns
* tv_ins
= lir
->insCall(&js_NewInstance_ci
, args
);
10848 guard(false, lir
->ins_peq0(tv_ins
), OOM_EXIT
);
10849 set(&tval
, tv_ins
);
10851 return interpretedFunctionCall(fval
, fun
, argc
, mode
== JSOP_NEW
);
10854 if (FUN_SLOW_NATIVE(fun
)) {
10855 JSNative native
= fun
->u
.n
.native
;
10856 jsval
* argv
= &tval
+ 1;
10857 if (native
== js_Array
)
10858 return newArray(JSVAL_TO_OBJECT(fval
), argc
, argv
, &fval
);
10859 if (native
== js_String
&& argc
== 1) {
10860 if (mode
== JSOP_NEW
)
10861 return newString(JSVAL_TO_OBJECT(fval
), 1, argv
, &fval
);
10862 if (!JSVAL_IS_PRIMITIVE(argv
[0])) {
10863 RETURN_IF_XML(argv
[0]);
10864 return call_imacro(call_imacros
.String
);
10866 set(&fval
, stringify(argv
[0]));
10867 pendingSpecializedNative
= IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
;
10868 return RECORD_CONTINUE
;
10872 return callNative(argc
, mode
);
10875 JS_REQUIRES_STACK AbortableRecordingStatus
10876 TraceRecorder::record_JSOP_NEW()
10878 uintN argc
= GET_ARGC(cx
->fp
->regs
->pc
);
10879 cx
->fp
->assertValidStackDepth(argc
+ 2);
10880 return InjectStatus(functionCall(argc
, JSOP_NEW
));
10883 JS_REQUIRES_STACK AbortableRecordingStatus
10884 TraceRecorder::record_JSOP_DELNAME()
10886 return ARECORD_STOP
;
10889 JS_REQUIRES_STACK AbortableRecordingStatus
10890 TraceRecorder::record_JSOP_DELPROP()
10892 return ARECORD_STOP
;
10895 JS_REQUIRES_STACK AbortableRecordingStatus
10896 TraceRecorder::record_JSOP_DELELEM()
10898 return ARECORD_STOP
;
10901 JS_REQUIRES_STACK AbortableRecordingStatus
10902 TraceRecorder::record_JSOP_TYPEOF()
10904 jsval
& r
= stackval(-1);
10906 if (JSVAL_IS_STRING(r
)) {
10907 type
= INS_ATOM(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_STRING
]);
10908 } else if (isNumber(r
)) {
10909 type
= INS_ATOM(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_NUMBER
]);
10910 } else if (VALUE_IS_FUNCTION(cx
, r
)) {
10911 type
= INS_ATOM(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_FUNCTION
]);
10913 LIns
* args
[] = { get(&r
), cx_ins
};
10914 if (JSVAL_IS_SPECIAL(r
)) {
10915 // We specialize identically for boolean and undefined. We must not have a hole here.
10916 // Pass the unboxed type here, since TypeOfBoolean knows how to handle it.
10917 JS_ASSERT(r
== JSVAL_TRUE
|| r
== JSVAL_FALSE
|| r
== JSVAL_VOID
);
10918 type
= lir
->insCall(&js_TypeOfBoolean_ci
, args
);
10920 JS_ASSERT(JSVAL_TAG(r
) == JSVAL_OBJECT
);
10921 type
= lir
->insCall(&js_TypeOfObject_ci
, args
);
10925 return ARECORD_CONTINUE
;
10928 JS_REQUIRES_STACK AbortableRecordingStatus
10929 TraceRecorder::record_JSOP_VOID()
10931 stack(-1, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
)));
10932 return ARECORD_CONTINUE
;
10935 JS_REQUIRES_STACK AbortableRecordingStatus
10936 TraceRecorder::record_JSOP_INCNAME()
10941 JS_REQUIRES_STACK AbortableRecordingStatus
10942 TraceRecorder::record_JSOP_INCPROP()
10947 JS_REQUIRES_STACK AbortableRecordingStatus
10948 TraceRecorder::record_JSOP_INCELEM()
10950 return InjectStatus(incElem(1));
10953 JS_REQUIRES_STACK AbortableRecordingStatus
10954 TraceRecorder::record_JSOP_DECNAME()
10956 return incName(-1);
10959 JS_REQUIRES_STACK AbortableRecordingStatus
10960 TraceRecorder::record_JSOP_DECPROP()
10962 return incProp(-1);
10965 JS_REQUIRES_STACK AbortableRecordingStatus
10966 TraceRecorder::record_JSOP_DECELEM()
10968 return InjectStatus(incElem(-1));
10971 JS_REQUIRES_STACK AbortableRecordingStatus
10972 TraceRecorder::incName(jsint incr
, bool pre
)
10979 CHECK_STATUS_A(name(vp
, v_ins
, nr
));
10980 jsval v
= nr
.tracked
? *vp
: nr
.v
;
10981 CHECK_STATUS_A(incHelper(v
, v_ins
, v_after
, incr
));
10982 LIns
* v_result
= pre
? v_after
: v_ins
;
10985 stack(0, v_result
);
10986 return ARECORD_CONTINUE
;
10989 if (OBJ_GET_CLASS(cx
, nr
.obj
) != &js_CallClass
)
10990 RETURN_STOP_A("incName on unsupported object class");
10992 CHECK_STATUS_A(setCallProp(nr
.obj
, nr
.obj_ins
, nr
.sprop
, v_after
, v
));
10993 stack(0, v_result
);
10994 return ARECORD_CONTINUE
;
10997 JS_REQUIRES_STACK AbortableRecordingStatus
10998 TraceRecorder::record_JSOP_NAMEINC()
11000 return incName(1, false);
11003 JS_REQUIRES_STACK AbortableRecordingStatus
11004 TraceRecorder::record_JSOP_PROPINC()
11006 return incProp(1, false);
11009 // XXX consolidate with record_JSOP_GETELEM code...
11010 JS_REQUIRES_STACK AbortableRecordingStatus
11011 TraceRecorder::record_JSOP_ELEMINC()
11013 return InjectStatus(incElem(1, false));
11016 JS_REQUIRES_STACK AbortableRecordingStatus
11017 TraceRecorder::record_JSOP_NAMEDEC()
11019 return incName(-1, false);
11022 JS_REQUIRES_STACK AbortableRecordingStatus
11023 TraceRecorder::record_JSOP_PROPDEC()
11025 return incProp(-1, false);
11028 JS_REQUIRES_STACK AbortableRecordingStatus
11029 TraceRecorder::record_JSOP_ELEMDEC()
11031 return InjectStatus(incElem(-1, false));
11034 JS_REQUIRES_STACK AbortableRecordingStatus
11035 TraceRecorder::record_JSOP_GETPROP()
11037 return getProp(stackval(-1));
11040 JS_REQUIRES_STACK AbortableRecordingStatus
11041 TraceRecorder::record_JSOP_SETPROP()
11043 jsval
& l
= stackval(-2);
11044 if (JSVAL_IS_PRIMITIVE(l
))
11045 RETURN_STOP_A("primitive this for SETPROP");
11047 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
11048 if (obj
->map
->ops
->setProperty
!= js_SetProperty
)
11049 RETURN_STOP_A("non-native JSObjectOps::setProperty");
11050 return ARECORD_CONTINUE
;
11053 /* Emit a specialized, inlined copy of js_NativeSet. */
11054 JS_REQUIRES_STACK RecordingStatus
11055 TraceRecorder::nativeSet(JSObject
* obj
, LIns
* obj_ins
, JSScopeProperty
* sprop
,
11056 jsval v
, LIns
* v_ins
)
11058 JSScope
* scope
= OBJ_SCOPE(obj
);
11059 uint32 slot
= sprop
->slot
;
11062 * We do not trace assignment to properties that have both a nonstub setter
11063 * and a slot, for several reasons.
11065 * First, that would require sampling rt->propertyRemovals before and after
11066 * (see js_NativeSet), and even more code to handle the case where the two
11067 * samples differ. A mere guard is not enough, because you can't just bail
11068 * off trace in the middle of a property assignment without storing the
11069 * value and making the stack right.
11071 * If obj is the global object, there are two additional problems. We would
11072 * have to emit still more code to store the result in the object (not the
11073 * native global frame) if the setter returned successfully after
11074 * deep-bailing. And we would have to cope if the run-time type of the
11075 * setter's return value differed from the record-time type of v, in which
11076 * case unboxing would fail and, having called a native setter, we could
11077 * not just retry the instruction in the interpreter.
11079 JS_ASSERT(SPROP_HAS_STUB_SETTER(sprop
) || slot
== SPROP_INVALID_SLOT
);
11081 // Box the value to be stored, if necessary.
11082 LIns
* boxed_ins
= NULL
;
11083 if (!SPROP_HAS_STUB_SETTER(sprop
) || (slot
!= SPROP_INVALID_SLOT
&& obj
!= globalObj
))
11084 boxed_ins
= box_jsval(v
, v_ins
);
11086 // Call the setter, if any.
11087 if (!SPROP_HAS_STUB_SETTER(sprop
))
11088 emitNativePropertyOp(scope
, sprop
, obj_ins
, true, boxed_ins
);
11090 // Store the value, if this property has a slot.
11091 if (slot
!= SPROP_INVALID_SLOT
) {
11092 JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop
, scope
));
11093 JS_ASSERT(!(sprop
->attrs
& JSPROP_SHARED
));
11094 if (obj
== globalObj
) {
11095 if (!lazilyImportGlobalSlot(slot
))
11096 RETURN_STOP("lazy import of global slot failed");
11097 set(&STOBJ_GET_SLOT(obj
, slot
), v_ins
);
11099 LIns
* dslots_ins
= NULL
;
11100 stobj_set_slot(obj_ins
, slot
, dslots_ins
, boxed_ins
);
11104 return RECORD_CONTINUE
;
11107 static JSBool FASTCALL
11108 MethodWriteBarrier(JSContext
* cx
, JSObject
* obj
, JSScopeProperty
* sprop
, JSObject
* funobj
)
11110 JSAutoTempValueRooter
tvr(cx
, funobj
);
11112 return OBJ_SCOPE(obj
)->methodWriteBarrier(cx
, sprop
, tvr
.value());
11114 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, MethodWriteBarrier
, CONTEXT
, OBJECT
, SCOPEPROP
, OBJECT
,
11117 JS_REQUIRES_STACK RecordingStatus
11118 TraceRecorder::setProp(jsval
&l
, JSPropCacheEntry
* entry
, JSScopeProperty
* sprop
,
11119 jsval
&v
, LIns
*& v_ins
)
11121 if (entry
== JS_NO_PROP_CACHE_FILL
)
11122 RETURN_STOP("can't trace uncacheable property set");
11123 JS_ASSERT_IF(PCVCAP_TAG(entry
->vcap
) >= 1, sprop
->attrs
& JSPROP_SHARED
);
11124 if (!SPROP_HAS_STUB_SETTER(sprop
) && sprop
->slot
!= SPROP_INVALID_SLOT
)
11125 RETURN_STOP("can't trace set of property with setter and slot");
11126 if (sprop
->attrs
& JSPROP_SETTER
)
11127 RETURN_STOP("can't trace JavaScript function setter");
11129 // These two cases are errors and can't be traced.
11130 if (sprop
->attrs
& JSPROP_GETTER
)
11131 RETURN_STOP("can't assign to property with script getter but no setter");
11132 if (sprop
->attrs
& JSPROP_READONLY
)
11133 RETURN_STOP("can't assign to readonly property");
11135 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l
));
11136 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
11137 LIns
* obj_ins
= get(&l
);
11138 JSScope
* scope
= OBJ_SCOPE(obj
);
11140 JS_ASSERT_IF(entry
->vcap
== PCVCAP_MAKE(entry
->kshape
, 0, 0), scope
->has(sprop
));
11142 // Fast path for CallClass. This is about 20% faster than the general case.
11144 if (OBJ_GET_CLASS(cx
, obj
) == &js_CallClass
)
11145 return setCallProp(obj
, obj_ins
, sprop
, v_ins
, v
);
11148 * Setting a function-valued property might need to rebrand the object, so
11149 * we emit a call to the method write barrier. There's no need to guard on
11150 * this, because functions have distinct trace-type from other values and
11151 * branded-ness is implied by the shape, which we've already guarded on.
11153 if (scope
->branded() && VALUE_IS_FUNCTION(cx
, v
) && entry
->directHit()) {
11154 if (obj
== globalObj
)
11155 RETURN_STOP("can't trace function-valued property set in branded global scope");
11157 enterDeepBailCall();
11158 LIns
* args
[] = { v_ins
, INS_CONSTSPROP(sprop
), obj_ins
, cx_ins
};
11159 LIns
* ok_ins
= lir
->insCall(&MethodWriteBarrier_ci
, args
);
11160 guard(false, lir
->ins_eq0(ok_ins
), OOM_EXIT
);
11161 leaveDeepBailCall();
11164 // Find obj2. If entry->adding(), the TAG bits are all 0.
11165 JSObject
* obj2
= obj
;
11166 for (jsuword i
= PCVCAP_TAG(entry
->vcap
) >> PCVCAP_PROTOBITS
; i
; i
--)
11167 obj2
= OBJ_GET_PARENT(cx
, obj2
);
11168 for (jsuword j
= PCVCAP_TAG(entry
->vcap
) & PCVCAP_PROTOMASK
; j
; j
--)
11169 obj2
= OBJ_GET_PROTO(cx
, obj2
);
11170 scope
= OBJ_SCOPE(obj2
);
11171 JS_ASSERT_IF(entry
->adding(), obj2
== obj
);
11173 // Guard before anything else.
11174 LIns
* map_ins
= map(obj_ins
);
11175 CHECK_STATUS(guardNativePropertyOp(obj
, map_ins
));
11177 CHECK_STATUS(guardPropertyCacheHit(obj_ins
, map_ins
, obj
, obj2
, entry
, pcval
));
11178 JS_ASSERT(scope
->object
== obj2
);
11179 JS_ASSERT(scope
->has(sprop
));
11180 JS_ASSERT_IF(obj2
!= obj
, sprop
->attrs
& JSPROP_SHARED
);
11182 // Add a property to the object if necessary.
11183 if (entry
->adding()) {
11184 JS_ASSERT(!(sprop
->attrs
& JSPROP_SHARED
));
11185 if (obj
== globalObj
)
11186 RETURN_STOP("adding a property to the global object");
11188 LIns
* args
[] = { INS_CONSTSPROP(sprop
), obj_ins
, cx_ins
};
11189 LIns
* ok_ins
= lir
->insCall(&js_AddProperty_ci
, args
);
11190 guard(false, lir
->ins_eq0(ok_ins
), OOM_EXIT
);
11193 return nativeSet(obj
, obj_ins
, sprop
, v
, v_ins
);
11196 JS_REQUIRES_STACK RecordingStatus
11197 TraceRecorder::setCallProp(JSObject
*callobj
, LIns
*callobj_ins
, JSScopeProperty
*sprop
,
11198 LIns
*v_ins
, jsval v
)
11200 // Set variables in on-trace-stack call objects by updating the tracker.
11201 JSStackFrame
*fp
= frameIfInRange(callobj
);
11203 jsint slot
= JSVAL_TO_INT(SPROP_USERID(sprop
));
11204 if (sprop
->setter
== SetCallArg
) {
11205 jsval
*vp2
= &fp
->argv
[slot
];
11207 return RECORD_CONTINUE
;
11209 if (sprop
->setter
== SetCallVar
) {
11210 jsval
*vp2
= &fp
->slots
[slot
];
11212 return RECORD_CONTINUE
;
11214 RETURN_STOP("can't trace special CallClass setter");
11217 // Set variables in off-trace-stack call objects by calling standard builtins.
11218 const CallInfo
* ci
= NULL
;
11219 if (sprop
->setter
== SetCallArg
)
11220 ci
= &js_SetCallArg_ci
;
11221 else if (sprop
->setter
== SetCallVar
)
11222 ci
= &js_SetCallVar_ci
;
11224 RETURN_STOP("can't trace special CallClass setter");
11227 box_jsval(v
, v_ins
),
11228 INS_CONST(SPROP_USERID(sprop
)),
11232 LIns
* call_ins
= lir
->insCall(ci
, args
);
11233 guard(false, addName(lir
->ins_eq0(call_ins
), "guard(set upvar)"), STATUS_EXIT
);
11234 return RECORD_CONTINUE
;
11237 JS_REQUIRES_STACK AbortableRecordingStatus
11238 TraceRecorder::record_SetPropHit(JSPropCacheEntry
* entry
, JSScopeProperty
* sprop
)
11240 jsval
& r
= stackval(-1);
11241 jsval
& l
= stackval(-2);
11243 CHECK_STATUS_A(setProp(l
, entry
, sprop
, r
, v_ins
));
11245 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
11249 case JSOP_SETMETHOD
:
11250 if (pc
[JSOP_SETPROP_LENGTH
] != JSOP_POP
)
11257 return ARECORD_CONTINUE
;
11260 JS_REQUIRES_STACK VMSideExit
*
11261 TraceRecorder::enterDeepBailCall()
11263 // Take snapshot for js_DeepBail and store it in cx->bailExit.
11264 VMSideExit
* exit
= snapshot(DEEP_BAIL_EXIT
);
11265 lir
->insStorei(INS_CONSTPTR(exit
), cx_ins
, offsetof(JSContext
, bailExit
));
11267 // Tell nanojit not to discard or defer stack writes before this call.
11268 GuardRecord
* guardRec
= createGuardRecord(exit
);
11269 lir
->insGuard(LIR_xbarrier
, NULL
, guardRec
);
11271 // Forget about guarded shapes, since deep bailers can reshape the world.
11272 forgetGuardedShapes();
11276 JS_REQUIRES_STACK
void
11277 TraceRecorder::leaveDeepBailCall()
11279 // Keep cx->bailExit null when it's invalid.
11280 lir
->insStorei(INS_NULL(), cx_ins
, offsetof(JSContext
, bailExit
));
11283 JS_REQUIRES_STACK
void
11284 TraceRecorder::finishGetProp(LIns
* obj_ins
, LIns
* vp_ins
, LIns
* ok_ins
, jsval
* outp
)
11286 // Store the boxed result (and this-object, if JOF_CALLOP) before the
11287 // guard. The deep-bail case requires this. If the property get fails,
11288 // these slots will be ignored anyway.
11289 LIns
* result_ins
= lir
->insLoad(LIR_ldp
, vp_ins
, 0);
11290 set(outp
, result_ins
, true);
11291 if (js_CodeSpec
[*cx
->fp
->regs
->pc
].format
& JOF_CALLOP
)
11292 set(outp
+ 1, obj_ins
, true);
11294 // We need to guard on ok_ins, but this requires a snapshot of the state
11295 // after this op. monitorRecording will do it for us.
11296 pendingGuardCondition
= ok_ins
;
11298 // Note there is a boxed result sitting on the stack. The caller must leave
11299 // it there for the time being, since the return type is not yet
11300 // known. monitorRecording will emit the code to unbox it.
11301 pendingUnboxSlot
= outp
;
11305 RootedStringToId(JSContext
* cx
, JSString
** namep
, jsid
* idp
)
11307 JSString
* name
= *namep
;
11308 if (name
->isAtomized()) {
11309 *idp
= ATOM_TO_JSID((JSAtom
*) STRING_TO_JSVAL(name
));
11313 JSAtom
* atom
= js_AtomizeString(cx
, name
, 0);
11316 *namep
= ATOM_TO_STRING(atom
); /* write back to GC root */
11317 *idp
= ATOM_TO_JSID(atom
);
11321 static JSBool FASTCALL
11322 GetPropertyByName(JSContext
* cx
, JSObject
* obj
, JSString
** namep
, jsval
* vp
)
11324 js_LeaveTraceIfGlobalObject(cx
, obj
);
11327 if (!RootedStringToId(cx
, namep
, &id
) || !obj
->getProperty(cx
, id
, vp
)) {
11328 js_SetBuiltinError(cx
);
11331 return cx
->interpState
->builtinStatus
== 0;
11333 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyByName
, CONTEXT
, OBJECT
, STRINGPTR
, JSVALPTR
,
11336 // Convert the value in a slot to a string and store the resulting string back
11337 // in the slot (typically in order to root it).
11338 JS_REQUIRES_STACK RecordingStatus
11339 TraceRecorder::primitiveToStringInPlace(jsval
* vp
)
11342 JS_ASSERT(JSVAL_IS_PRIMITIVE(v
));
11344 if (!JSVAL_IS_STRING(v
)) {
11345 // v is not a string. Turn it into one. js_ValueToString is safe
11346 // because v is not an object.
11347 JSString
*str
= js_ValueToString(cx
, v
);
11349 RETURN_ERROR("failed to stringify element id");
11350 v
= STRING_TO_JSVAL(str
);
11351 set(vp
, stringify(*vp
));
11353 // Write the string back to the stack to save the interpreter some work
11354 // and to ensure snapshots get the correct type for this slot.
11357 return RECORD_CONTINUE
;
11360 JS_REQUIRES_STACK RecordingStatus
11361 TraceRecorder::getPropertyByName(LIns
* obj_ins
, jsval
* idvalp
, jsval
* outp
)
11363 CHECK_STATUS(primitiveToStringInPlace(idvalp
));
11364 enterDeepBailCall();
11366 // Call GetPropertyByName. The vp parameter points to stack because this is
11367 // what the interpreter currently does. obj and id are rooted on the
11368 // interpreter stack, but the slot at vp is not a root.
11369 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11370 LIns
* idvalp_ins
= addName(addr(idvalp
), "idvalp");
11371 LIns
* args
[] = {vp_ins
, idvalp_ins
, obj_ins
, cx_ins
};
11372 LIns
* ok_ins
= lir
->insCall(&GetPropertyByName_ci
, args
);
11374 // GetPropertyByName can assign to *idvalp, so the tracker has an incorrect
11375 // entry for that address. Correct it. (If the value in the address is
11376 // never used again, the usual case, Nanojit will kill this load.)
11377 tracker
.set(idvalp
, lir
->insLoad(LIR_ldp
, idvalp_ins
, 0));
11379 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
11380 leaveDeepBailCall();
11381 return RECORD_CONTINUE
;
11384 static JSBool FASTCALL
11385 GetPropertyByIndex(JSContext
* cx
, JSObject
* obj
, int32 index
, jsval
* vp
)
11387 js_LeaveTraceIfGlobalObject(cx
, obj
);
11389 JSAutoTempIdRooter
idr(cx
);
11390 if (!js_Int32ToId(cx
, index
, idr
.addr()) || !obj
->getProperty(cx
, idr
.id(), vp
)) {
11391 js_SetBuiltinError(cx
);
11394 return cx
->interpState
->builtinStatus
== 0;
11396 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyByIndex
, CONTEXT
, OBJECT
, INT32
, JSVALPTR
, 0, 0)
11398 JS_REQUIRES_STACK RecordingStatus
11399 TraceRecorder::getPropertyByIndex(LIns
* obj_ins
, LIns
* index_ins
, jsval
* outp
)
11401 index_ins
= makeNumberInt32(index_ins
);
11403 // See note in getPropertyByName about vp.
11404 enterDeepBailCall();
11405 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11406 LIns
* args
[] = {vp_ins
, index_ins
, obj_ins
, cx_ins
};
11407 LIns
* ok_ins
= lir
->insCall(&GetPropertyByIndex_ci
, args
);
11408 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
11409 leaveDeepBailCall();
11410 return RECORD_CONTINUE
;
11413 static JSBool FASTCALL
11414 GetPropertyById(JSContext
* cx
, JSObject
* obj
, jsid id
, jsval
* vp
)
11416 js_LeaveTraceIfGlobalObject(cx
, obj
);
11417 if (!obj
->getProperty(cx
, id
, vp
)) {
11418 js_SetBuiltinError(cx
);
11421 return cx
->interpState
->builtinStatus
== 0;
11423 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyById
,
11424 CONTEXT
, OBJECT
, JSVAL
, JSVALPTR
, 0, 0)
11426 JS_REQUIRES_STACK RecordingStatus
11427 TraceRecorder::getPropertyById(LIns
* obj_ins
, jsval
* outp
)
11431 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
11432 const JSCodeSpec
& cs
= js_CodeSpec
[*pc
];
11433 if (*pc
== JSOP_LENGTH
) {
11434 atom
= cx
->runtime
->atomState
.lengthAtom
;
11435 } else if (JOF_TYPE(cs
.format
) == JOF_ATOM
) {
11436 atom
= atoms
[GET_INDEX(pc
)];
11438 JS_ASSERT(JOF_TYPE(cs
.format
) == JOF_SLOTATOM
);
11439 atom
= atoms
[GET_INDEX(pc
+ SLOTNO_LEN
)];
11442 // Call GetPropertyById. See note in getPropertyByName about vp.
11443 enterDeepBailCall();
11444 jsid id
= ATOM_TO_JSID(atom
);
11445 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11446 LIns
* args
[] = {vp_ins
, INS_CONSTWORD(id
), obj_ins
, cx_ins
};
11447 LIns
* ok_ins
= lir
->insCall(&GetPropertyById_ci
, args
);
11448 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
11449 leaveDeepBailCall();
11450 return RECORD_CONTINUE
;
11453 /* Manually inlined, specialized copy of js_NativeGet. */
11454 static JSBool FASTCALL
11455 GetPropertyWithNativeGetter(JSContext
* cx
, JSObject
* obj
, JSScopeProperty
* sprop
, jsval
* vp
)
11457 js_LeaveTraceIfGlobalObject(cx
, obj
);
11462 JS_ASSERT(obj
->lookupProperty(cx
, sprop
->id
, &pobj
, &prop
));
11463 JS_ASSERT(prop
== (JSProperty
*) sprop
);
11464 obj
->dropProperty(cx
, prop
);
11467 // JSScopeProperty::get contains a special case for With objects. We can
11468 // elide it here because With objects are, we claim, never on the operand
11469 // stack while recording.
11470 JS_ASSERT(STOBJ_GET_CLASS(obj
) != &js_WithClass
);
11473 if (!sprop
->getter(cx
, obj
, SPROP_USERID(sprop
), vp
)) {
11474 js_SetBuiltinError(cx
);
11477 return cx
->interpState
->builtinStatus
== 0;
11479 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, GetPropertyWithNativeGetter
,
11480 CONTEXT
, OBJECT
, SCOPEPROP
, JSVALPTR
, 0, 0)
11482 JS_REQUIRES_STACK RecordingStatus
11483 TraceRecorder::getPropertyWithNativeGetter(LIns
* obj_ins
, JSScopeProperty
* sprop
, jsval
* outp
)
11485 JS_ASSERT(!(sprop
->attrs
& JSPROP_GETTER
));
11486 JS_ASSERT(sprop
->slot
== SPROP_INVALID_SLOT
);
11487 JS_ASSERT(!SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop
));
11489 // Call GetPropertyWithNativeGetter. See note in getPropertyByName about vp.
11490 // FIXME - We should call the getter directly. Using a builtin function for
11491 // now because it buys some extra asserts. See bug 508310.
11492 enterDeepBailCall();
11493 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11494 LIns
* args
[] = {vp_ins
, INS_CONSTPTR(sprop
), obj_ins
, cx_ins
};
11495 LIns
* ok_ins
= lir
->insCall(&GetPropertyWithNativeGetter_ci
, args
);
11496 finishGetProp(obj_ins
, vp_ins
, ok_ins
, outp
);
11497 leaveDeepBailCall();
11498 return RECORD_CONTINUE
;
11501 JS_REQUIRES_STACK AbortableRecordingStatus
11502 TraceRecorder::record_JSOP_GETELEM()
11504 bool call
= *cx
->fp
->regs
->pc
== JSOP_CALLELEM
;
11506 jsval
& idx
= stackval(-1);
11507 jsval
& lval
= stackval(-2);
11509 LIns
* obj_ins
= get(&lval
);
11510 LIns
* idx_ins
= get(&idx
);
11512 // Special case for array-like access of strings.
11513 if (JSVAL_IS_STRING(lval
) && isInt32(idx
)) {
11515 RETURN_STOP_A("JSOP_CALLELEM on a string");
11516 int i
= asInt32(idx
);
11517 if (size_t(i
) >= JSVAL_TO_STRING(lval
)->length())
11518 RETURN_STOP_A("Invalid string index in JSOP_GETELEM");
11519 idx_ins
= makeNumberInt32(idx_ins
);
11520 LIns
* args
[] = { idx_ins
, obj_ins
, cx_ins
};
11521 LIns
* unitstr_ins
= lir
->insCall(&js_String_getelem_ci
, args
);
11522 guard(false, lir
->ins_peq0(unitstr_ins
), MISMATCH_EXIT
);
11523 set(&lval
, unitstr_ins
);
11524 return ARECORD_CONTINUE
;
11527 if (JSVAL_IS_PRIMITIVE(lval
))
11528 RETURN_STOP_A("JSOP_GETLEM on a primitive");
11529 RETURN_IF_XML_A(lval
);
11531 JSObject
* obj
= JSVAL_TO_OBJECT(lval
);
11532 if (obj
== globalObj
)
11533 RETURN_STOP_A("JSOP_GETELEM on global");
11536 /* Property access using a string name or something we have to stringify. */
11537 if (!JSVAL_IS_INT(idx
)) {
11538 if (!JSVAL_IS_PRIMITIVE(idx
))
11539 RETURN_STOP_A("object used as index");
11541 return InjectStatus(getPropertyByName(obj_ins
, &idx
, &lval
));
11544 if (STOBJ_GET_CLASS(obj
) == &js_ArgumentsClass
) {
11546 JSStackFrame
*afp
= guardArguments(obj
, obj_ins
, &depth
);
11548 uintN int_idx
= JSVAL_TO_INT(idx
);
11549 jsval
* vp
= &afp
->argv
[int_idx
];
11550 if (idx_ins
->isconstf()) {
11551 if (int_idx
>= 0 && int_idx
< afp
->argc
)
11554 v_ins
= INS_VOID();
11556 // If the index is not a constant expression, we generate LIR to load the value from
11557 // the native stack area. The guard on js_ArgumentClass above ensures the up-to-date
11558 // value has been written back to the native stack area.
11559 idx_ins
= makeNumberInt32(idx_ins
);
11560 if (int_idx
>= 0 && int_idx
< afp
->argc
) {
11561 JSTraceType type
= getCoercedType(*vp
);
11563 // Guard that the argument has the same type on trace as during recording.
11565 if (callDepth
== depth
) {
11566 // In this case, we are in the same frame where the arguments object was created.
11567 // The entry type map is not necessarily up-to-date, so we capture a new type map
11568 // for this point in the code.
11569 unsigned stackSlots
= NativeStackSlots(cx
, 0 /* callDepth */);
11570 JSTraceType
* typemap
= new (*traceMonitor
->traceAlloc
) JSTraceType
[stackSlots
];
11571 DetermineTypesVisitor
detVisitor(*this, typemap
);
11572 VisitStackSlots(detVisitor
, cx
, 0);
11573 typemap_ins
= INS_CONSTPTR(typemap
+ 2 /* callee, this */);
11575 // In this case, we are in a deeper frame from where the arguments object was
11576 // created. The type map at the point of the call out from the creation frame
11578 // Note: this relies on the assumption that we abort on setting an element of
11579 // an arguments object in any deeper frame.
11580 LIns
* fip_ins
= lir
->insLoad(LIR_ldp
, lirbuf
->rp
, (callDepth
-depth
)*sizeof(FrameInfo
*));
11581 typemap_ins
= lir
->ins2(LIR_add
, fip_ins
, INS_CONST(sizeof(FrameInfo
) + 2/*callee,this*/ * sizeof(JSTraceType
)));
11584 LIns
* typep_ins
= lir
->ins2(LIR_piadd
, typemap_ins
,
11585 lir
->ins_u2p(lir
->ins2(LIR_mul
,
11587 INS_CONST(sizeof(JSTraceType
)))));
11588 LIns
* type_ins
= lir
->insLoad(LIR_ldcb
, typep_ins
, 0);
11590 addName(lir
->ins2(LIR_eq
, type_ins
, lir
->insImm(type
)),
11591 "guard(type-stable upvar)"),
11594 // Read the value out of the native stack area.
11595 guard(true, lir
->ins2(LIR_ult
, idx_ins
, INS_CONST(afp
->argc
)),
11596 snapshot(BRANCH_EXIT
));
11597 size_t stackOffset
= -treeInfo
->nativeStackBase
+ nativeStackOffset(&afp
->argv
[0]);
11598 LIns
* args_addr_ins
= lir
->ins2(LIR_piadd
, lirbuf
->sp
, INS_CONSTWORD(stackOffset
));
11599 LIns
* argi_addr_ins
= lir
->ins2(LIR_piadd
,
11601 lir
->ins_u2p(lir
->ins2(LIR_mul
,
11603 INS_CONST(sizeof(double)))));
11604 v_ins
= stackLoad(argi_addr_ins
, type
);
11606 guard(false, lir
->ins2(LIR_ult
, idx_ins
, INS_CONST(afp
->argc
)),
11607 snapshot(BRANCH_EXIT
));
11608 v_ins
= INS_VOID();
11613 return ARECORD_CONTINUE
;
11615 RETURN_STOP_A("can't reach arguments object's frame");
11617 if (js_IsDenseArray(obj
)) {
11618 // Fast path for dense arrays accessed with a integer index.
11622 guardDenseArray(obj
, obj_ins
, BRANCH_EXIT
);
11623 CHECK_STATUS_A(denseArrayElement(lval
, idx
, vp
, v_ins
, addr_ins
));
11626 set(&idx
, obj_ins
);
11627 return ARECORD_CONTINUE
;
11630 return InjectStatus(getPropertyByIndex(obj_ins
, idx_ins
, &lval
));
11633 /* Functions used by JSOP_SETELEM */
11635 static JSBool FASTCALL
11636 SetPropertyByName(JSContext
* cx
, JSObject
* obj
, JSString
** namep
, jsval
* vp
)
11638 js_LeaveTraceIfGlobalObject(cx
, obj
);
11641 if (!RootedStringToId(cx
, namep
, &id
) || !obj
->setProperty(cx
, id
, vp
)) {
11642 js_SetBuiltinError(cx
);
11645 return cx
->interpState
->builtinStatus
== 0;
11647 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, SetPropertyByName
, CONTEXT
, OBJECT
, STRINGPTR
, JSVALPTR
,
11650 static JSBool FASTCALL
11651 InitPropertyByName(JSContext
* cx
, JSObject
* obj
, JSString
** namep
, jsval val
)
11653 js_LeaveTraceIfGlobalObject(cx
, obj
);
11656 if (!RootedStringToId(cx
, namep
, &id
) ||
11657 !obj
->defineProperty(cx
, id
, val
, NULL
, NULL
, JSPROP_ENUMERATE
)) {
11658 js_SetBuiltinError(cx
);
11661 return cx
->interpState
->builtinStatus
== 0;
11663 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, InitPropertyByName
, CONTEXT
, OBJECT
, STRINGPTR
, JSVAL
,
11666 JS_REQUIRES_STACK RecordingStatus
11667 TraceRecorder::initOrSetPropertyByName(LIns
* obj_ins
, jsval
* idvalp
, jsval
* rvalp
, bool init
)
11669 CHECK_STATUS(primitiveToStringInPlace(idvalp
));
11671 LIns
* rval_ins
= box_jsval(*rvalp
, get(rvalp
));
11673 enterDeepBailCall();
11676 LIns
* idvalp_ins
= addName(addr(idvalp
), "idvalp");
11678 LIns
* args
[] = {rval_ins
, idvalp_ins
, obj_ins
, cx_ins
};
11679 ok_ins
= lir
->insCall(&InitPropertyByName_ci
, args
);
11681 // See note in getPropertyByName about vp.
11682 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11683 lir
->insStorei(rval_ins
, vp_ins
, 0);
11684 LIns
* args
[] = {vp_ins
, idvalp_ins
, obj_ins
, cx_ins
};
11685 ok_ins
= lir
->insCall(&SetPropertyByName_ci
, args
);
11687 pendingGuardCondition
= ok_ins
;
11689 leaveDeepBailCall();
11690 return RECORD_CONTINUE
;
11693 static JSBool FASTCALL
11694 SetPropertyByIndex(JSContext
* cx
, JSObject
* obj
, int32 index
, jsval
* vp
)
11696 js_LeaveTraceIfGlobalObject(cx
, obj
);
11698 JSAutoTempIdRooter
idr(cx
);
11699 if (!js_Int32ToId(cx
, index
, idr
.addr()) || !obj
->setProperty(cx
, idr
.id(), vp
)) {
11700 js_SetBuiltinError(cx
);
11703 return cx
->interpState
->builtinStatus
== 0;
11705 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, SetPropertyByIndex
, CONTEXT
, OBJECT
, INT32
, JSVALPTR
, 0, 0)
11707 static JSBool FASTCALL
11708 InitPropertyByIndex(JSContext
* cx
, JSObject
* obj
, int32 index
, jsval val
)
11710 js_LeaveTraceIfGlobalObject(cx
, obj
);
11712 JSAutoTempIdRooter
idr(cx
);
11713 if (!js_Int32ToId(cx
, index
, idr
.addr()) ||
11714 !obj
->defineProperty(cx
, idr
.id(), val
, NULL
, NULL
, JSPROP_ENUMERATE
)) {
11715 js_SetBuiltinError(cx
);
11718 return cx
->interpState
->builtinStatus
== 0;
11720 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL
, InitPropertyByIndex
, CONTEXT
, OBJECT
, INT32
, JSVAL
, 0, 0)
11722 JS_REQUIRES_STACK RecordingStatus
11723 TraceRecorder::initOrSetPropertyByIndex(LIns
* obj_ins
, LIns
* index_ins
, jsval
* rvalp
, bool init
)
11725 index_ins
= makeNumberInt32(index_ins
);
11727 LIns
* rval_ins
= box_jsval(*rvalp
, get(rvalp
));
11729 enterDeepBailCall();
11733 LIns
* args
[] = {rval_ins
, index_ins
, obj_ins
, cx_ins
};
11734 ok_ins
= lir
->insCall(&InitPropertyByIndex_ci
, args
);
11736 // See note in getPropertyByName about vp.
11737 LIns
* vp_ins
= addName(lir
->insAlloc(sizeof(jsval
)), "vp");
11738 lir
->insStorei(rval_ins
, vp_ins
, 0);
11739 LIns
* args
[] = {vp_ins
, index_ins
, obj_ins
, cx_ins
};
11740 ok_ins
= lir
->insCall(&SetPropertyByIndex_ci
, args
);
11742 pendingGuardCondition
= ok_ins
;
11744 leaveDeepBailCall();
11745 return RECORD_CONTINUE
;
11748 JS_REQUIRES_STACK AbortableRecordingStatus
11749 TraceRecorder::record_JSOP_SETELEM()
11751 jsval
& v
= stackval(-1);
11752 jsval
& idx
= stackval(-2);
11753 jsval
& lval
= stackval(-3);
11755 if (JSVAL_IS_PRIMITIVE(lval
))
11756 RETURN_STOP_A("left JSOP_SETELEM operand is not an object");
11757 RETURN_IF_XML_A(lval
);
11759 JSObject
* obj
= JSVAL_TO_OBJECT(lval
);
11760 LIns
* obj_ins
= get(&lval
);
11761 LIns
* idx_ins
= get(&idx
);
11762 LIns
* v_ins
= get(&v
);
11764 if (!JSVAL_IS_INT(idx
)) {
11765 if (!JSVAL_IS_PRIMITIVE(idx
))
11766 RETURN_STOP_A("non-primitive index");
11767 CHECK_STATUS_A(initOrSetPropertyByName(obj_ins
, &idx
, &v
,
11768 *cx
->fp
->regs
->pc
== JSOP_INITELEM
));
11769 } else if (JSVAL_TO_INT(idx
) < 0 || !OBJ_IS_DENSE_ARRAY(cx
, obj
)) {
11770 CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins
, idx_ins
, &v
,
11771 *cx
->fp
->regs
->pc
== JSOP_INITELEM
));
11773 // Fast path: assigning to element of dense array.
11775 // Make sure the array is actually dense.
11776 if (!guardDenseArray(obj
, obj_ins
, BRANCH_EXIT
))
11777 return ARECORD_STOP
;
11779 // The index was on the stack and is therefore a LIR float. Force it to
11781 idx_ins
= makeNumberInt32(idx_ins
);
11783 // Box the value so we can use one builtin instead of having to add one
11784 // builtin for every storage type. Special case for integers though,
11785 // since they are so common.
11787 LIns
* args
[] = { NULL
, idx_ins
, obj_ins
, cx_ins
};
11789 if (isPromoteInt(v_ins
)) {
11790 args
[0] = ::demote(lir
, v_ins
);
11791 res_ins
= lir
->insCall(&js_Array_dense_setelem_int_ci
, args
);
11794 res_ins
= lir
->insCall(&js_Array_dense_setelem_double_ci
, args
);
11797 LIns
* args
[] = { box_jsval(v
, v_ins
), idx_ins
, obj_ins
, cx_ins
};
11798 res_ins
= lir
->insCall(&js_Array_dense_setelem_ci
, args
);
11800 guard(false, lir
->ins_eq0(res_ins
), MISMATCH_EXIT
);
11803 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
11804 if (*pc
== JSOP_SETELEM
&& pc
[JSOP_SETELEM_LENGTH
] != JSOP_POP
)
11807 return ARECORD_CONTINUE
;
11810 JS_REQUIRES_STACK AbortableRecordingStatus
11811 TraceRecorder::record_JSOP_CALLNAME()
11813 JSObject
* obj
= cx
->fp
->scopeChain
;
11814 if (obj
!= globalObj
) {
11818 CHECK_STATUS_A(scopeChainProp(obj
, vp
, ins
, nr
));
11820 stack(1, INS_CONSTOBJ(globalObj
));
11821 return ARECORD_CONTINUE
;
11824 LIns
* obj_ins
= scopeChain();
11828 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
11830 if (PCVAL_IS_NULL(pcval
) || !PCVAL_IS_OBJECT(pcval
))
11831 RETURN_STOP_A("callee is not an object");
11833 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval
)));
11835 stack(0, INS_CONSTOBJ(PCVAL_TO_OBJECT(pcval
)));
11837 return ARECORD_CONTINUE
;
11840 JS_DEFINE_CALLINFO_5(extern, UINT32
, GetUpvarArgOnTrace
, CONTEXT
, UINT32
, INT32
, UINT32
,
11842 JS_DEFINE_CALLINFO_5(extern, UINT32
, GetUpvarVarOnTrace
, CONTEXT
, UINT32
, INT32
, UINT32
,
11844 JS_DEFINE_CALLINFO_5(extern, UINT32
, GetUpvarStackOnTrace
, CONTEXT
, UINT32
, INT32
, UINT32
,
11848 * Record LIR to get the given upvar. Return the LIR instruction for the upvar
11849 * value. NULL is returned only on a can't-happen condition with an invalid
11850 * typemap. The value of the upvar is returned as v.
11852 JS_REQUIRES_STACK LIns
*
11853 TraceRecorder::upvar(JSScript
* script
, JSUpvarArray
* uva
, uintN index
, jsval
& v
)
11856 * Try to find the upvar in the current trace's tracker. For &vr to be
11857 * the address of the jsval found in js_GetUpvar, we must initialize
11858 * vr directly with the result, so it is a reference to the same location.
11859 * It does not work to assign the result to v, because v is an already
11860 * existing reference that points to something else.
11862 uint32 cookie
= uva
->vector
[index
];
11863 jsval
& vr
= js_GetUpvar(cx
, script
->staticLevel
, cookie
);
11870 * The upvar is not in the current trace, so get the upvar value exactly as
11871 * the interpreter does and unbox.
11873 uint32 level
= script
->staticLevel
- UPVAR_FRAME_SKIP(cookie
);
11874 uint32 cookieSlot
= UPVAR_FRAME_SLOT(cookie
);
11875 JSStackFrame
* fp
= cx
->display
[level
];
11876 const CallInfo
* ci
;
11879 ci
= &GetUpvarStackOnTrace_ci
;
11881 } else if (cookieSlot
< fp
->fun
->nargs
) {
11882 ci
= &GetUpvarArgOnTrace_ci
;
11884 } else if (cookieSlot
== CALLEE_UPVAR_SLOT
) {
11885 ci
= &GetUpvarArgOnTrace_ci
;
11888 ci
= &GetUpvarVarOnTrace_ci
;
11889 slot
= cookieSlot
- fp
->fun
->nargs
;
11892 LIns
* outp
= lir
->insAlloc(sizeof(double));
11895 INS_CONST(callDepth
),
11900 LIns
* call_ins
= lir
->insCall(ci
, args
);
11901 JSTraceType type
= getCoercedType(v
);
11903 addName(lir
->ins2(LIR_eq
, call_ins
, lir
->insImm(type
)),
11904 "guard(type-stable upvar)"),
11906 return stackLoad(outp
, type
);
11910 * Generate LIR to load a value from the native stack. This method ensures that
11911 * the correct LIR load operator is used.
11913 LIns
* TraceRecorder::stackLoad(LIns
* base
, uint8 type
)
11927 case TT_PSEUDOBOOLEAN
:
11932 JS_NOT_REACHED("found jsval type in an upvar type map entry");
11936 LIns
* result
= lir
->insLoad(loadOp
, base
, 0);
11937 if (type
== TT_INT32
)
11938 result
= lir
->ins1(LIR_i2f
, result
);
11942 JS_REQUIRES_STACK AbortableRecordingStatus
11943 TraceRecorder::record_JSOP_GETUPVAR()
11945 uintN index
= GET_UINT16(cx
->fp
->regs
->pc
);
11946 JSScript
*script
= cx
->fp
->script
;
11947 JSUpvarArray
* uva
= script
->upvars();
11948 JS_ASSERT(index
< uva
->length
);
11951 LIns
* upvar_ins
= upvar(script
, uva
, index
, v
);
11953 return ARECORD_STOP
;
11954 stack(0, upvar_ins
);
11955 return ARECORD_CONTINUE
;
11958 JS_REQUIRES_STACK AbortableRecordingStatus
11959 TraceRecorder::record_JSOP_CALLUPVAR()
11961 CHECK_STATUS_A(record_JSOP_GETUPVAR());
11962 stack(1, INS_NULL());
11963 return ARECORD_CONTINUE
;
11966 JS_REQUIRES_STACK AbortableRecordingStatus
11967 TraceRecorder::record_JSOP_GETDSLOT()
11969 JSObject
* callee
= JSVAL_TO_OBJECT(cx
->fp
->argv
[-2]);
11970 LIns
* callee_ins
= get(&cx
->fp
->argv
[-2]);
11972 unsigned index
= GET_UINT16(cx
->fp
->regs
->pc
);
11973 LIns
* dslots_ins
= NULL
;
11974 LIns
* v_ins
= stobj_get_dslot(callee_ins
, index
, dslots_ins
);
11976 stack(0, unbox_jsval(callee
->dslots
[index
], v_ins
, snapshot(BRANCH_EXIT
)));
11977 return ARECORD_CONTINUE
;
11980 JS_REQUIRES_STACK AbortableRecordingStatus
11981 TraceRecorder::record_JSOP_CALLDSLOT()
11983 CHECK_STATUS_A(record_JSOP_GETDSLOT());
11984 stack(1, INS_NULL());
11985 return ARECORD_CONTINUE
;
11988 JS_REQUIRES_STACK RecordingStatus
11989 TraceRecorder::guardCallee(jsval
& callee
)
11991 JS_ASSERT(VALUE_IS_FUNCTION(cx
, callee
));
11993 VMSideExit
* branchExit
= snapshot(BRANCH_EXIT
);
11994 JSObject
* callee_obj
= JSVAL_TO_OBJECT(callee
);
11995 LIns
* callee_ins
= get(&callee
);
11997 treeInfo
->gcthings
.addUnique(callee
);
12000 stobj_get_private(callee_ins
),
12001 INS_CONSTPTR(callee_obj
->getPrivate())),
12005 stobj_get_parent(callee_ins
),
12006 INS_CONSTOBJ(OBJ_GET_PARENT(cx
, callee_obj
))),
12008 return RECORD_CONTINUE
;
12012 * Prepare the given |arguments| object to be accessed on trace. If the return
12013 * value is non-NULL, then the given |arguments| object refers to a frame on
12014 * the current trace and is guaranteed to refer to the same frame on trace for
12015 * all later executions.
12017 JS_REQUIRES_STACK JSStackFrame
*
12018 TraceRecorder::guardArguments(JSObject
*obj
, LIns
* obj_ins
, unsigned *depthp
)
12020 JS_ASSERT(STOBJ_GET_CLASS(obj
) == &js_ArgumentsClass
);
12022 JSStackFrame
*afp
= frameIfInRange(obj
, depthp
);
12026 VMSideExit
*exit
= snapshot(MISMATCH_EXIT
);
12027 guardClass(obj
, obj_ins
, &js_ArgumentsClass
, exit
);
12029 LIns
* args_ins
= get(&afp
->argsobj
);
12030 LIns
* cmp
= lir
->ins2(LIR_peq
, args_ins
, obj_ins
);
12031 lir
->insGuard(LIR_xf
, cmp
, createGuardRecord(exit
));
12035 JS_REQUIRES_STACK RecordingStatus
12036 TraceRecorder::interpretedFunctionCall(jsval
& fval
, JSFunction
* fun
, uintN argc
, bool constructing
)
12038 if (JS_GetGlobalForObject(cx
, JSVAL_TO_OBJECT(fval
)) != globalObj
)
12039 RETURN_STOP("JSOP_CALL or JSOP_NEW crosses global scopes");
12041 JSStackFrame
* fp
= cx
->fp
;
12043 // TODO: track the copying via the tracker...
12044 if (argc
< fun
->nargs
&&
12045 jsuword(fp
->regs
->sp
+ (fun
->nargs
- argc
)) > cx
->stackPool
.current
->limit
) {
12046 RETURN_STOP("can't trace calls with too few args requiring argv move");
12049 // Generate a type map for the outgoing frame and stash it in the LIR
12050 unsigned stackSlots
= NativeStackSlots(cx
, 0 /* callDepth */);
12051 FrameInfo
* fi
= (FrameInfo
*)
12052 traceMonitor
->tempAlloc
->alloc(sizeof(FrameInfo
) + stackSlots
* sizeof(JSTraceType
));
12053 JSTraceType
* typemap
= (JSTraceType
*)(fi
+ 1);
12055 DetermineTypesVisitor
detVisitor(*this, typemap
);
12056 VisitStackSlots(detVisitor
, cx
, 0);
12058 JS_ASSERT(argc
< FrameInfo::CONSTRUCTING_FLAG
);
12060 treeInfo
->gcthings
.addUnique(fval
);
12061 fi
->block
= fp
->blockChain
;
12062 if (fp
->blockChain
)
12063 treeInfo
->gcthings
.addUnique(OBJECT_TO_JSVAL(fp
->blockChain
));
12064 fi
->pc
= fp
->regs
->pc
;
12065 fi
->imacpc
= fp
->imacpc
;
12066 fi
->spdist
= fp
->regs
->sp
- fp
->slots
;
12067 fi
->set_argc(argc
, constructing
);
12068 fi
->callerHeight
= NativeStackSlots(cx
, 0) - (2 + argc
);
12069 fi
->callerArgc
= fp
->argc
;
12071 unsigned callDepth
= getCallDepth();
12072 if (callDepth
>= treeInfo
->maxCallDepth
)
12073 treeInfo
->maxCallDepth
= callDepth
+ 1;
12075 fi
= traceMonitor
->frameCache
->memoize(fi
);
12077 RETURN_STOP("out of memory");
12078 lir
->insStorei(INS_CONSTPTR(fi
), lirbuf
->rp
, callDepth
* sizeof(FrameInfo
*));
12080 #if defined JS_JIT_SPEW
12081 debug_only_printf(LC_TMTracer
, "iFC frameinfo=%p, stack=%d, map=", (void*)fi
,
12083 for (unsigned i
= 0; i
< fi
->callerHeight
; i
++)
12084 debug_only_printf(LC_TMTracer
, "%c", typeChar
[fi
->get_typemap()[i
]]);
12085 debug_only_print0(LC_TMTracer
, "\n");
12088 atoms
= fun
->u
.i
.script
->atomMap
.vector
;
12089 return RECORD_CONTINUE
;
12092 JS_REQUIRES_STACK AbortableRecordingStatus
12093 TraceRecorder::record_JSOP_CALL()
12095 uintN argc
= GET_ARGC(cx
->fp
->regs
->pc
);
12096 cx
->fp
->assertValidStackDepth(argc
+ 2);
12097 return InjectStatus(functionCall(argc
,
12098 (cx
->fp
->imacpc
&& *cx
->fp
->imacpc
== JSOP_APPLY
)
12103 static jsbytecode
* apply_imacro_table
[] = {
12104 apply_imacros
.apply0
,
12105 apply_imacros
.apply1
,
12106 apply_imacros
.apply2
,
12107 apply_imacros
.apply3
,
12108 apply_imacros
.apply4
,
12109 apply_imacros
.apply5
,
12110 apply_imacros
.apply6
,
12111 apply_imacros
.apply7
,
12112 apply_imacros
.apply8
12115 static jsbytecode
* call_imacro_table
[] = {
12116 apply_imacros
.call0
,
12117 apply_imacros
.call1
,
12118 apply_imacros
.call2
,
12119 apply_imacros
.call3
,
12120 apply_imacros
.call4
,
12121 apply_imacros
.call5
,
12122 apply_imacros
.call6
,
12123 apply_imacros
.call7
,
12124 apply_imacros
.call8
12127 JS_REQUIRES_STACK AbortableRecordingStatus
12128 TraceRecorder::record_JSOP_APPLY()
12130 JSStackFrame
* fp
= cx
->fp
;
12131 jsbytecode
*pc
= fp
->regs
->pc
;
12132 uintN argc
= GET_ARGC(pc
);
12133 cx
->fp
->assertValidStackDepth(argc
+ 2);
12135 jsval
* vp
= fp
->regs
->sp
- (argc
+ 2);
12137 JSObject
* aobj
= NULL
;
12138 LIns
* aobj_ins
= NULL
;
12140 JS_ASSERT(!fp
->imacpc
);
12142 if (!VALUE_IS_FUNCTION(cx
, vp
[0]))
12143 return record_JSOP_CALL();
12144 RETURN_IF_XML_A(vp
[0]);
12146 JSObject
* obj
= JSVAL_TO_OBJECT(vp
[0]);
12147 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, obj
);
12148 if (FUN_INTERPRETED(fun
))
12149 return record_JSOP_CALL();
12151 bool apply
= (JSFastNative
)fun
->u
.n
.native
== js_fun_apply
;
12152 if (!apply
&& (JSFastNative
)fun
->u
.n
.native
!= js_fun_call
)
12153 return record_JSOP_CALL();
12156 * We don't trace apply and call with a primitive 'this', which is the
12157 * first positional parameter.
12159 if (argc
> 0 && !JSVAL_IS_OBJECT(vp
[2]))
12160 return record_JSOP_CALL();
12163 * Guard on the identity of this, which is the function we are applying.
12165 if (!VALUE_IS_FUNCTION(cx
, vp
[1]))
12166 RETURN_STOP_A("callee is not a function");
12167 CHECK_STATUS_A(guardCallee(vp
[1]));
12169 if (apply
&& argc
>= 2) {
12171 RETURN_STOP_A("apply with excess arguments");
12172 if (JSVAL_IS_PRIMITIVE(vp
[3]))
12173 RETURN_STOP_A("arguments parameter of apply is primitive");
12174 aobj
= JSVAL_TO_OBJECT(vp
[3]);
12175 aobj_ins
= get(&vp
[3]);
12178 * We trace dense arrays and arguments objects. The code we generate
12179 * for apply uses imacros to handle a specific number of arguments.
12181 if (OBJ_IS_DENSE_ARRAY(cx
, aobj
)) {
12182 guardDenseArray(aobj
, aobj_ins
);
12183 length
= jsuint(aobj
->fslots
[JSSLOT_ARRAY_LENGTH
]);
12186 p2i(stobj_get_fslot(aobj_ins
, JSSLOT_ARRAY_LENGTH
)),
12189 } else if (OBJ_GET_CLASS(cx
, aobj
) == &js_ArgumentsClass
) {
12191 JSStackFrame
*afp
= guardArguments(aobj
, aobj_ins
, &depth
);
12193 RETURN_STOP_A("can't reach arguments object's frame");
12194 length
= afp
->argc
;
12196 RETURN_STOP_A("arguments parameter of apply is not a dense array or argments object");
12199 if (length
>= JS_ARRAY_LENGTH(apply_imacro_table
))
12200 RETURN_STOP_A("too many arguments to apply");
12202 return InjectStatus(call_imacro(apply_imacro_table
[length
]));
12205 if (argc
>= JS_ARRAY_LENGTH(call_imacro_table
))
12206 RETURN_STOP_A("too many arguments to call");
12208 return InjectStatus(call_imacro(call_imacro_table
[argc
]));
12211 static JSBool FASTCALL
12212 CatchStopIteration_tn(JSContext
* cx
, JSBool ok
, jsval
* vp
)
12214 if (!ok
&& cx
->throwing
&& js_ValueIsStopIteration(cx
->exception
)) {
12215 cx
->throwing
= JS_FALSE
;
12216 cx
->exception
= JSVAL_VOID
;
12223 JS_DEFINE_TRCINFO_1(CatchStopIteration_tn
,
12224 (3, (static, BOOL
, CatchStopIteration_tn
, CONTEXT
, BOOL
, JSVALPTR
, 0, 0)))
12226 JS_REQUIRES_STACK AbortableRecordingStatus
12227 TraceRecorder::record_NativeCallComplete()
12229 if (pendingSpecializedNative
== IGNORE_NATIVE_CALL_COMPLETE_CALLBACK
)
12230 return ARECORD_CONTINUE
;
12232 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
12234 JS_ASSERT(pendingSpecializedNative
);
12235 JS_ASSERT(*pc
== JSOP_CALL
|| *pc
== JSOP_APPLY
|| *pc
== JSOP_NEW
|| *pc
== JSOP_SETPROP
);
12237 jsval
& v
= stackval(-1);
12238 LIns
* v_ins
= get(&v
);
12241 * At this point the generated code has already called the native function
12242 * and we can no longer fail back to the original pc location (JSOP_CALL)
12243 * because that would cause the interpreter to re-execute the native
12244 * function, which might have side effects.
12246 * Instead, the snapshot() call below sees that we are currently parked on
12247 * a traceable native's JSOP_CALL instruction, and it will advance the pc
12248 * to restore by the length of the current opcode. If the native's return
12249 * type is jsval, snapshot() will also indicate in the type map that the
12250 * element on top of the stack is a boxed value which doesn't need to be
12251 * boxed if the type guard generated by unbox_jsval() fails.
12254 if (JSTN_ERRTYPE(pendingSpecializedNative
) == FAIL_STATUS
) {
12255 /* Keep cx->bailExit null when it's invalid. */
12256 lir
->insStorei(INS_NULL(), cx_ins
, (int) offsetof(JSContext
, bailExit
));
12258 LIns
* status
= lir
->insLoad(LIR_ld
, lirbuf
->state
, (int) offsetof(InterpState
, builtinStatus
));
12259 if (pendingSpecializedNative
== &generatedSpecializedNative
) {
12260 LIns
* ok_ins
= v_ins
;
12263 * Custom implementations of Iterator.next() throw a StopIteration exception.
12264 * Catch and clear it and set the return value to JSVAL_HOLE in this case.
12266 if (uintptr_t(pc
- nextiter_imacros
.custom_iter_next
) <
12267 sizeof(nextiter_imacros
.custom_iter_next
)) {
12268 LIns
* args
[] = { native_rval_ins
, ok_ins
, cx_ins
}; /* reverse order */
12269 ok_ins
= lir
->insCall(&CatchStopIteration_tn_ci
, args
);
12273 * If we run a generic traceable native, the return value is in the argument
12274 * vector for native function calls. The actual return value of the native is a JSBool
12275 * indicating the error status.
12277 v_ins
= lir
->insLoad(LIR_ldp
, native_rval_ins
, 0);
12278 if (*pc
== JSOP_NEW
) {
12279 LIns
* x
= lir
->ins_peq0(lir
->ins2(LIR_piand
, v_ins
, INS_CONSTWORD(JSVAL_TAGMASK
)));
12280 x
= lir
->ins_choose(x
, v_ins
, INS_CONSTWORD(0));
12281 v_ins
= lir
->ins_choose(lir
->ins_peq0(x
), newobj_ins
, x
);
12285 propagateFailureToBuiltinStatus(ok_ins
, status
);
12287 guard(true, lir
->ins_eq0(status
), STATUS_EXIT
);
12290 if (pendingSpecializedNative
->flags
& JSTN_UNBOX_AFTER
) {
12292 * If we side exit on the unboxing code due to a type change, make sure that the boxed
12293 * value is actually currently associated with that location, and that we are talking
12294 * about the top of the stack here, which is where we expected boxed values.
12296 JS_ASSERT(&v
== &cx
->fp
->regs
->sp
[-1] && get(&v
) == v_ins
);
12297 set(&v
, unbox_jsval(v
, v_ins
, snapshot(BRANCH_EXIT
)));
12298 } else if (JSTN_ERRTYPE(pendingSpecializedNative
) == FAIL_NEG
) {
12299 /* Already added i2f in functionCall. */
12300 JS_ASSERT(JSVAL_IS_NUMBER(v
));
12302 /* Convert the result to double if the builtin returns int32. */
12303 if (JSVAL_IS_NUMBER(v
) &&
12304 (pendingSpecializedNative
->builtin
->_argtypes
& ARGSIZE_MASK_ANY
) == ARGSIZE_I
) {
12305 set(&v
, lir
->ins1(LIR_i2f
, v_ins
));
12309 // We'll null pendingSpecializedNative in monitorRecording, on the next op
12310 // cycle. There must be a next op since the stack is non-empty.
12311 return ARECORD_CONTINUE
;
12314 JS_REQUIRES_STACK AbortableRecordingStatus
12315 TraceRecorder::name(jsval
*& vp
, LIns
*& ins
, NameResult
& nr
)
12317 JSObject
* obj
= cx
->fp
->scopeChain
;
12318 if (obj
!= globalObj
)
12319 return scopeChainProp(obj
, vp
, ins
, nr
);
12321 /* Can't use prop here, because we don't want unboxing from global slots. */
12322 LIns
* obj_ins
= scopeChain();
12329 * Property cache ensures that we are dealing with an existing property,
12330 * and guards the shape for us.
12332 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
12334 /* Abort if property doesn't exist (interpreter will report an error.) */
12335 if (PCVAL_IS_NULL(pcval
))
12336 RETURN_STOP_A("named property not found");
12338 /* Insist on obj being the directly addressed object. */
12340 RETURN_STOP_A("name() hit prototype chain");
12342 /* Don't trace getter or setter calls, our caller wants a direct slot. */
12343 if (PCVAL_IS_SPROP(pcval
)) {
12344 JSScopeProperty
* sprop
= PCVAL_TO_SPROP(pcval
);
12345 if (!isValidSlot(OBJ_SCOPE(obj
), sprop
))
12346 RETURN_STOP_A("name() not accessing a valid slot");
12347 slot
= sprop
->slot
;
12349 if (!PCVAL_IS_SLOT(pcval
))
12350 RETURN_STOP_A("PCE is not a slot");
12351 slot
= PCVAL_TO_SLOT(pcval
);
12354 if (!lazilyImportGlobalSlot(slot
))
12355 RETURN_STOP_A("lazy import of global slot failed");
12357 vp
= &STOBJ_GET_SLOT(obj
, slot
);
12360 return ARECORD_CONTINUE
;
12363 static JSObject
* FASTCALL
12364 MethodReadBarrier(JSContext
* cx
, JSObject
* obj
, JSScopeProperty
* sprop
, JSObject
* funobj
)
12366 JSAutoTempValueRooter
tvr(cx
, funobj
);
12368 if (!OBJ_SCOPE(obj
)->methodReadBarrier(cx
, sprop
, tvr
.addr()))
12370 JS_ASSERT(VALUE_IS_FUNCTION(cx
, tvr
.value()));
12371 return JSVAL_TO_OBJECT(tvr
.value());
12373 JS_DEFINE_CALLINFO_4(static, OBJECT_FAIL
, MethodReadBarrier
, CONTEXT
, OBJECT
, SCOPEPROP
, OBJECT
,
12377 * Get a property. The current opcode has JOF_ATOM.
12379 * There are two modes. The caller must pass nonnull pointers for either outp
12380 * or both slotp and v_insp. In the latter case, we require a plain old
12381 * property with a slot; if the property turns out to be anything else, abort
12382 * tracing (rather than emit a call to a native getter or GetAnyProperty).
12384 JS_REQUIRES_STACK AbortableRecordingStatus
12385 TraceRecorder::prop(JSObject
* obj
, LIns
* obj_ins
, uint32
*slotp
, LIns
** v_insp
, jsval
*outp
)
12387 JS_ASSERT((slotp
&& v_insp
&& !outp
) || (!slotp
&& !v_insp
&& outp
));
12390 * Can't specialize to assert obj != global, must guard to avoid aliasing
12391 * stale homes of stacked global variables.
12393 CHECK_STATUS_A(guardNotGlobalObject(obj
, obj_ins
));
12396 * Property cache ensures that we are dealing with an existing property,
12397 * and guards the shape for us.
12401 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
12403 /* Check for non-existent property reference, which results in undefined. */
12404 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->fp
->regs
->pc
];
12405 if (PCVAL_IS_NULL(pcval
)) {
12407 RETURN_STOP_A("property not found");
12410 * We could specialize to guard on just JSClass.getProperty, but a mere
12411 * class guard is simpler and slightly faster.
12413 if (OBJ_GET_CLASS(cx
, obj
)->getProperty
!= JS_PropertyStub
) {
12414 RETURN_STOP_A("can't trace through access to undefined property if "
12415 "JSClass.getProperty hook isn't stubbed");
12417 guardClass(obj
, obj_ins
, OBJ_GET_CLASS(cx
, obj
), snapshot(MISMATCH_EXIT
));
12420 * This trace will be valid as long as neither the object nor any object
12421 * on its prototype chain changes shape.
12423 * FIXME: This loop can become a single shape guard once bug 497789 has
12426 VMSideExit
* exit
= snapshot(BRANCH_EXIT
);
12428 LIns
* map_ins
= map(obj_ins
);
12430 if (map_is_native(obj
->map
, map_ins
, ops_ins
)) {
12431 CHECK_STATUS_A(InjectStatus(guardShape(obj_ins
, obj
, OBJ_SHAPE(obj
), "guard(shape)",
12433 } else if (!guardDenseArray(obj
, obj_ins
, exit
)) {
12434 RETURN_STOP_A("non-native object involved in undefined property access");
12436 } while (guardHasPrototype(obj
, obj_ins
, &obj
, &obj_ins
, exit
));
12438 set(outp
, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
)), true);
12439 return ARECORD_CONTINUE
;
12442 uint32 setflags
= (cs
.format
& (JOF_INCDEC
| JOF_FOR
));
12443 JS_ASSERT(!(cs
.format
& JOF_SET
));
12445 JSScopeProperty
* sprop
;
12449 if (PCVAL_IS_SPROP(pcval
)) {
12450 sprop
= PCVAL_TO_SPROP(pcval
);
12451 JS_ASSERT(OBJ_SCOPE(obj2
)->has(sprop
));
12453 if (setflags
&& !SPROP_HAS_STUB_SETTER(sprop
))
12454 RETURN_STOP_A("non-stub setter");
12455 if (setflags
&& (sprop
->attrs
& JSPROP_READONLY
))
12456 RETURN_STOP_A("writing to a readonly property");
12457 if (!SPROP_HAS_STUB_GETTER_OR_IS_METHOD(sprop
)) {
12459 RETURN_STOP_A("can't trace non-stub getter for this opcode");
12460 if (sprop
->attrs
& JSPROP_GETTER
)
12461 RETURN_STOP_A("script getter");
12462 if (sprop
->slot
== SPROP_INVALID_SLOT
)
12463 return InjectStatus(getPropertyWithNativeGetter(obj_ins
, sprop
, outp
));
12464 return InjectStatus(getPropertyById(obj_ins
, outp
));
12466 if (!SPROP_HAS_VALID_SLOT(sprop
, OBJ_SCOPE(obj2
)))
12467 RETURN_STOP_A("no valid slot");
12468 slot
= sprop
->slot
;
12469 isMethod
= sprop
->isMethod();
12470 JS_ASSERT_IF(isMethod
, OBJ_SCOPE(obj2
)->hasMethodBarrier());
12472 if (!PCVAL_IS_SLOT(pcval
))
12473 RETURN_STOP_A("PCE is not a slot");
12474 slot
= PCVAL_TO_SLOT(pcval
);
12479 /* We have a slot. Check whether it is direct or in a prototype. */
12482 RETURN_STOP_A("JOF_INCDEC|JOF_FOR opcode hit prototype chain");
12485 * We're getting a proto-property. Walk up the prototype chain emitting
12486 * proto slot loads, updating obj as we go, leaving obj set to obj2 with
12487 * obj_ins the last proto-load.
12490 obj_ins
= stobj_get_proto(obj_ins
);
12491 obj
= STOBJ_GET_PROTO(obj
);
12492 } while (obj
!= obj2
);
12495 LIns
* dslots_ins
= NULL
;
12496 LIns
* v_ins
= unbox_jsval(STOBJ_GET_SLOT(obj
, slot
),
12497 stobj_get_slot(obj_ins
, slot
, dslots_ins
),
12498 snapshot(BRANCH_EXIT
));
12501 * Joined function object stored as a method must be cloned when extracted
12502 * as a property value other than a callee. Note that shapes cover method
12503 * value as well as other property attributes and order, so this condition
12504 * is trace-invariant.
12506 * We do not impose the method read barrier if in an imacro, assuming any
12507 * property gets it does (e.g., for 'toString' from JSOP_NEW) will not be
12508 * leaked to the calling script.
12510 if (isMethod
&& !cx
->fp
->imacpc
) {
12511 enterDeepBailCall();
12512 LIns
* args
[] = { v_ins
, INS_CONSTSPROP(sprop
), obj_ins
, cx_ins
};
12513 v_ins
= lir
->insCall(&MethodReadBarrier_ci
, args
);
12514 leaveDeepBailCall();
12522 set(outp
, v_ins
, true);
12523 return ARECORD_CONTINUE
;
12526 JS_REQUIRES_STACK RecordingStatus
12527 TraceRecorder::denseArrayElement(jsval
& oval
, jsval
& ival
, jsval
*& vp
, LIns
*& v_ins
,
12530 JS_ASSERT(JSVAL_IS_OBJECT(oval
) && JSVAL_IS_INT(ival
));
12532 JSObject
* obj
= JSVAL_TO_OBJECT(oval
);
12533 LIns
* obj_ins
= get(&oval
);
12534 jsint idx
= JSVAL_TO_INT(ival
);
12535 LIns
* idx_ins
= makeNumberInt32(get(&ival
));
12536 LIns
* pidx_ins
= lir
->ins_u2p(idx_ins
);
12538 VMSideExit
* exit
= snapshot(BRANCH_EXIT
);
12540 /* check that the index is within bounds */
12541 LIns
* dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
12542 jsuint capacity
= js_DenseArrayCapacity(obj
);
12543 bool within
= (jsuint(idx
) < jsuint(obj
->fslots
[JSSLOT_ARRAY_LENGTH
]) && jsuint(idx
) < capacity
);
12545 /* If idx < 0, stay on trace (and read value as undefined, since this is a dense array). */
12547 if (MAX_DSLOTS_LENGTH
> MAX_DSLOTS_LENGTH32
&& !idx_ins
->isconst()) {
12548 /* Only 64-bit machines support large enough arrays for this. */
12549 JS_ASSERT(sizeof(jsval
) == 8);
12550 br1
= lir
->insBranch(LIR_jt
,
12551 lir
->ins2i(LIR_lt
, idx_ins
, 0),
12555 /* If not idx < length, stay on trace (and read value as undefined). */
12556 LIns
* br2
= lir
->insBranch(LIR_jf
,
12557 lir
->ins2(LIR_pult
,
12559 stobj_get_fslot(obj_ins
, JSSLOT_ARRAY_LENGTH
)),
12562 /* If dslots is NULL, stay on trace (and read value as undefined). */
12563 LIns
* br3
= lir
->insBranch(LIR_jt
, lir
->ins_peq0(dslots_ins
), NULL
);
12565 /* If not idx < capacity, stay on trace (and read value as undefined). */
12566 LIns
* br4
= lir
->insBranch(LIR_jf
,
12567 lir
->ins2(LIR_pult
,
12569 lir
->insLoad(LIR_ldp
,
12571 -(int)sizeof(jsval
))),
12573 lir
->insGuard(LIR_x
, NULL
, createGuardRecord(exit
));
12574 LIns
* label
= lir
->ins0(LIR_label
);
12576 br1
->setTarget(label
);
12577 br2
->setTarget(label
);
12578 br3
->setTarget(label
);
12579 br4
->setTarget(label
);
12581 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj
, obj_ins
, MISMATCH_EXIT
));
12583 // Return undefined and indicate that we didn't actually read this (addr_ins).
12584 v_ins
= lir
->insImm(JSVAL_TO_SPECIAL(JSVAL_VOID
));
12586 return RECORD_CONTINUE
;
12589 /* Guard against negative index */
12590 if (MAX_DSLOTS_LENGTH
> MAX_DSLOTS_LENGTH32
&& !idx_ins
->isconst()) {
12591 /* Only 64-bit machines support large enough arrays for this. */
12592 JS_ASSERT(sizeof(jsval
) == 8);
12594 lir
->ins2i(LIR_lt
, idx_ins
, 0),
12598 /* Guard array length */
12600 lir
->ins2(LIR_pult
, pidx_ins
, stobj_get_fslot(obj_ins
, JSSLOT_ARRAY_LENGTH
)),
12603 /* dslots must not be NULL */
12605 lir
->ins_peq0(dslots_ins
),
12608 /* Guard array capacity */
12610 lir
->ins2(LIR_pult
,
12612 lir
->insLoad(LIR_ldp
, dslots_ins
, 0 - (int)sizeof(jsval
))),
12615 /* Load the value and guard on its type to unbox it. */
12616 vp
= &obj
->dslots
[jsuint(idx
)];
12617 addr_ins
= lir
->ins2(LIR_piadd
, dslots_ins
,
12618 lir
->ins2i(LIR_pilsh
, pidx_ins
, (sizeof(jsval
) == 4) ? 2 : 3));
12619 v_ins
= unbox_jsval(*vp
, lir
->insLoad(LIR_ldp
, addr_ins
, 0), exit
);
12621 if (JSVAL_IS_SPECIAL(*vp
)) {
12623 * If we read a hole from the array, convert it to undefined and guard
12624 * that there are no indexed properties along the prototype chain.
12626 LIns
* br
= lir
->insBranch(LIR_jf
,
12627 lir
->ins2i(LIR_eq
, v_ins
, JSVAL_TO_SPECIAL(JSVAL_HOLE
)),
12629 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj
, obj_ins
, MISMATCH_EXIT
));
12630 br
->setTarget(lir
->ins0(LIR_label
));
12632 /* Don't let the hole value escape. Turn it into an undefined. */
12633 v_ins
= lir
->ins2i(LIR_and
, v_ins
, ~(JSVAL_HOLE_FLAG
>> JSVAL_TAGBITS
));
12635 return RECORD_CONTINUE
;
12638 JS_REQUIRES_STACK AbortableRecordingStatus
12639 TraceRecorder::getProp(JSObject
* obj
, LIns
* obj_ins
)
12641 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->fp
->regs
->pc
];
12642 JS_ASSERT(cs
.ndefs
== 1);
12643 return prop(obj
, obj_ins
, NULL
, NULL
, &stackval(-cs
.nuses
));
12646 JS_REQUIRES_STACK AbortableRecordingStatus
12647 TraceRecorder::getProp(jsval
& v
)
12649 if (JSVAL_IS_PRIMITIVE(v
))
12650 RETURN_STOP_A("primitive lhs");
12652 return getProp(JSVAL_TO_OBJECT(v
), get(&v
));
12655 JS_REQUIRES_STACK AbortableRecordingStatus
12656 TraceRecorder::record_JSOP_NAME()
12661 CHECK_STATUS_A(name(vp
, v_ins
, nr
));
12663 return ARECORD_CONTINUE
;
12666 JS_REQUIRES_STACK AbortableRecordingStatus
12667 TraceRecorder::record_JSOP_DOUBLE()
12669 jsval v
= jsval(atoms
[GET_INDEX(cx
->fp
->regs
->pc
)]);
12670 stack(0, lir
->insImmf(*JSVAL_TO_DOUBLE(v
)));
12671 return ARECORD_CONTINUE
;
12674 JS_REQUIRES_STACK AbortableRecordingStatus
12675 TraceRecorder::record_JSOP_STRING()
12677 JSAtom
* atom
= atoms
[GET_INDEX(cx
->fp
->regs
->pc
)];
12678 JS_ASSERT(ATOM_IS_STRING(atom
));
12679 stack(0, INS_ATOM(atom
));
12680 return ARECORD_CONTINUE
;
12683 JS_REQUIRES_STACK AbortableRecordingStatus
12684 TraceRecorder::record_JSOP_ZERO()
12686 stack(0, lir
->insImmf(0));
12687 return ARECORD_CONTINUE
;
12690 JS_REQUIRES_STACK AbortableRecordingStatus
12691 TraceRecorder::record_JSOP_ONE()
12693 stack(0, lir
->insImmf(1));
12694 return ARECORD_CONTINUE
;
12697 JS_REQUIRES_STACK AbortableRecordingStatus
12698 TraceRecorder::record_JSOP_NULL()
12700 stack(0, INS_NULL());
12701 return ARECORD_CONTINUE
;
12704 JS_REQUIRES_STACK AbortableRecordingStatus
12705 TraceRecorder::record_JSOP_THIS()
12708 CHECK_STATUS_A(getThis(this_ins
));
12709 stack(0, this_ins
);
12710 return ARECORD_CONTINUE
;
12713 JS_REQUIRES_STACK AbortableRecordingStatus
12714 TraceRecorder::record_JSOP_FALSE()
12716 stack(0, lir
->insImm(0));
12717 return ARECORD_CONTINUE
;
12720 JS_REQUIRES_STACK AbortableRecordingStatus
12721 TraceRecorder::record_JSOP_TRUE()
12723 stack(0, lir
->insImm(1));
12724 return ARECORD_CONTINUE
;
12727 JS_REQUIRES_STACK AbortableRecordingStatus
12728 TraceRecorder::record_JSOP_OR()
12733 JS_REQUIRES_STACK AbortableRecordingStatus
12734 TraceRecorder::record_JSOP_AND()
12739 JS_REQUIRES_STACK AbortableRecordingStatus
12740 TraceRecorder::record_JSOP_TABLESWITCH()
12742 #ifdef NANOJIT_IA32
12743 /* Handle tableswitches specially -- prepare a jump table if needed. */
12744 return tableswitch();
12746 return InjectStatus(switchop());
12750 JS_REQUIRES_STACK AbortableRecordingStatus
12751 TraceRecorder::record_JSOP_LOOKUPSWITCH()
12753 return InjectStatus(switchop());
12756 JS_REQUIRES_STACK AbortableRecordingStatus
12757 TraceRecorder::record_JSOP_STRICTEQ()
12759 strictEquality(true, false);
12760 return ARECORD_CONTINUE
;
12763 JS_REQUIRES_STACK AbortableRecordingStatus
12764 TraceRecorder::record_JSOP_STRICTNE()
12766 strictEquality(false, false);
12767 return ARECORD_CONTINUE
;
12770 JS_REQUIRES_STACK AbortableRecordingStatus
12771 TraceRecorder::record_JSOP_OBJECT()
12773 JSStackFrame
* fp
= cx
->fp
;
12774 JSScript
* script
= fp
->script
;
12775 unsigned index
= atoms
- script
->atomMap
.vector
+ GET_INDEX(fp
->regs
->pc
);
12778 obj
= script
->getObject(index
);
12779 stack(0, INS_CONSTOBJ(obj
));
12780 return ARECORD_CONTINUE
;
12783 JS_REQUIRES_STACK AbortableRecordingStatus
12784 TraceRecorder::record_JSOP_POP()
12786 return ARECORD_CONTINUE
;
12789 JS_REQUIRES_STACK AbortableRecordingStatus
12790 TraceRecorder::record_JSOP_TRAP()
12792 return ARECORD_STOP
;
12795 JS_REQUIRES_STACK AbortableRecordingStatus
12796 TraceRecorder::record_JSOP_GETARG()
12798 stack(0, arg(GET_ARGNO(cx
->fp
->regs
->pc
)));
12799 return ARECORD_CONTINUE
;
12802 JS_REQUIRES_STACK AbortableRecordingStatus
12803 TraceRecorder::record_JSOP_SETARG()
12805 arg(GET_ARGNO(cx
->fp
->regs
->pc
), stack(-1));
12806 return ARECORD_CONTINUE
;
12809 JS_REQUIRES_STACK AbortableRecordingStatus
12810 TraceRecorder::record_JSOP_GETLOCAL()
12812 stack(0, var(GET_SLOTNO(cx
->fp
->regs
->pc
)));
12813 return ARECORD_CONTINUE
;
12816 JS_REQUIRES_STACK AbortableRecordingStatus
12817 TraceRecorder::record_JSOP_SETLOCAL()
12819 var(GET_SLOTNO(cx
->fp
->regs
->pc
), stack(-1));
12820 return ARECORD_CONTINUE
;
12823 JS_REQUIRES_STACK AbortableRecordingStatus
12824 TraceRecorder::record_JSOP_UINT16()
12826 stack(0, lir
->insImmf(GET_UINT16(cx
->fp
->regs
->pc
)));
12827 return ARECORD_CONTINUE
;
12830 JS_REQUIRES_STACK AbortableRecordingStatus
12831 TraceRecorder::record_JSOP_NEWINIT()
12833 JSProtoKey key
= JSProtoKey(GET_INT8(cx
->fp
->regs
->pc
));
12835 CHECK_STATUS_A(getClassPrototype(key
, proto_ins
));
12837 LIns
* args
[] = { proto_ins
, cx_ins
};
12838 const CallInfo
*ci
= (key
== JSProto_Array
) ? &js_NewEmptyArray_ci
: &js_Object_tn_ci
;
12839 LIns
* v_ins
= lir
->insCall(ci
, args
);
12840 guard(false, lir
->ins_peq0(v_ins
), OOM_EXIT
);
12842 return ARECORD_CONTINUE
;
12845 JS_REQUIRES_STACK AbortableRecordingStatus
12846 TraceRecorder::record_JSOP_ENDINIT()
12849 jsval
& v
= stackval(-1);
12850 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v
));
12852 return ARECORD_CONTINUE
;
12855 JS_REQUIRES_STACK AbortableRecordingStatus
12856 TraceRecorder::record_JSOP_INITPROP()
12858 // All the action is in record_SetPropHit.
12859 return ARECORD_CONTINUE
;
12862 JS_REQUIRES_STACK AbortableRecordingStatus
12863 TraceRecorder::record_JSOP_INITELEM()
12865 return record_JSOP_SETELEM();
12868 JS_REQUIRES_STACK AbortableRecordingStatus
12869 TraceRecorder::record_JSOP_DEFSHARP()
12871 return ARECORD_STOP
;
12874 JS_REQUIRES_STACK AbortableRecordingStatus
12875 TraceRecorder::record_JSOP_USESHARP()
12877 return ARECORD_STOP
;
12880 JS_REQUIRES_STACK AbortableRecordingStatus
12881 TraceRecorder::record_JSOP_INCARG()
12883 return InjectStatus(inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), 1));
12886 JS_REQUIRES_STACK AbortableRecordingStatus
12887 TraceRecorder::record_JSOP_INCLOCAL()
12889 return InjectStatus(inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), 1));
12892 JS_REQUIRES_STACK AbortableRecordingStatus
12893 TraceRecorder::record_JSOP_DECARG()
12895 return InjectStatus(inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), -1));
12898 JS_REQUIRES_STACK AbortableRecordingStatus
12899 TraceRecorder::record_JSOP_DECLOCAL()
12901 return InjectStatus(inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), -1));
12904 JS_REQUIRES_STACK AbortableRecordingStatus
12905 TraceRecorder::record_JSOP_ARGINC()
12907 return InjectStatus(inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), 1, false));
12910 JS_REQUIRES_STACK AbortableRecordingStatus
12911 TraceRecorder::record_JSOP_LOCALINC()
12913 return InjectStatus(inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), 1, false));
12916 JS_REQUIRES_STACK AbortableRecordingStatus
12917 TraceRecorder::record_JSOP_ARGDEC()
12919 return InjectStatus(inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), -1, false));
12922 JS_REQUIRES_STACK AbortableRecordingStatus
12923 TraceRecorder::record_JSOP_LOCALDEC()
12925 return InjectStatus(inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), -1, false));
12928 JS_REQUIRES_STACK AbortableRecordingStatus
12929 TraceRecorder::record_JSOP_IMACOP()
12931 JS_ASSERT(cx
->fp
->imacpc
);
12932 return ARECORD_CONTINUE
;
12935 JS_REQUIRES_STACK AbortableRecordingStatus
12936 TraceRecorder::record_JSOP_ITER()
12938 jsval
& v
= stackval(-1);
12939 if (JSVAL_IS_PRIMITIVE(v
))
12940 RETURN_STOP_A("for-in on a primitive value");
12941 RETURN_IF_XML_A(v
);
12943 jsuint flags
= cx
->fp
->regs
->pc
[1];
12945 if (hasIteratorMethod(JSVAL_TO_OBJECT(v
))) {
12946 if (flags
== JSITER_ENUMERATE
)
12947 return InjectStatus(call_imacro(iter_imacros
.for_in
));
12948 if (flags
== (JSITER_ENUMERATE
| JSITER_FOREACH
))
12949 return InjectStatus(call_imacro(iter_imacros
.for_each
));
12951 if (flags
== JSITER_ENUMERATE
)
12952 return InjectStatus(call_imacro(iter_imacros
.for_in_native
));
12953 if (flags
== (JSITER_ENUMERATE
| JSITER_FOREACH
))
12954 return InjectStatus(call_imacro(iter_imacros
.for_each_native
));
12956 RETURN_STOP_A("unimplemented JSITER_* flags");
12959 JS_REQUIRES_STACK AbortableRecordingStatus
12960 TraceRecorder::record_JSOP_NEXTITER()
12962 jsval
& iterobj_val
= stackval(-2);
12963 if (JSVAL_IS_PRIMITIVE(iterobj_val
))
12964 RETURN_STOP_A("for-in on a primitive value");
12965 RETURN_IF_XML_A(iterobj_val
);
12966 JSObject
* iterobj
= JSVAL_TO_OBJECT(iterobj_val
);
12967 JSClass
* clasp
= STOBJ_GET_CLASS(iterobj
);
12968 LIns
* iterobj_ins
= get(&iterobj_val
);
12969 if (clasp
== &js_IteratorClass
|| clasp
== &js_GeneratorClass
) {
12970 guardClass(iterobj
, iterobj_ins
, clasp
, snapshot(BRANCH_EXIT
));
12971 return InjectStatus(call_imacro(nextiter_imacros
.native_iter_next
));
12973 return InjectStatus(call_imacro(nextiter_imacros
.custom_iter_next
));
12976 JS_REQUIRES_STACK AbortableRecordingStatus
12977 TraceRecorder::record_JSOP_ENDITER()
12979 LIns
* args
[] = { stack(-2), cx_ins
};
12980 LIns
* ok_ins
= lir
->insCall(&js_CloseIterator_ci
, args
);
12981 guard(false, lir
->ins_eq0(ok_ins
), MISMATCH_EXIT
);
12982 return ARECORD_CONTINUE
;
12985 JS_REQUIRES_STACK AbortableRecordingStatus
12986 TraceRecorder::record_JSOP_FORNAME()
12991 CHECK_STATUS_A(name(vp
, x_ins
, nr
));
12993 RETURN_STOP_A("forname on non-tracked value not supported");
12994 set(vp
, stack(-1));
12995 return ARECORD_CONTINUE
;
12998 JS_REQUIRES_STACK AbortableRecordingStatus
12999 TraceRecorder::record_JSOP_FORPROP()
13001 return ARECORD_STOP
;
13004 JS_REQUIRES_STACK AbortableRecordingStatus
13005 TraceRecorder::record_JSOP_FORELEM()
13007 return record_JSOP_DUP();
13010 JS_REQUIRES_STACK AbortableRecordingStatus
13011 TraceRecorder::record_JSOP_FORARG()
13013 return record_JSOP_SETARG();
13016 JS_REQUIRES_STACK AbortableRecordingStatus
13017 TraceRecorder::record_JSOP_FORLOCAL()
13019 return record_JSOP_SETLOCAL();
13022 JS_REQUIRES_STACK AbortableRecordingStatus
13023 TraceRecorder::record_JSOP_POPN()
13025 return ARECORD_CONTINUE
;
13029 * Generate LIR to reach |obj2| from |obj| by traversing the scope chain. The generated code
13030 * also ensures that any call objects found have not changed shape.
13032 * obj starting object
13033 * obj_ins LIR instruction representing obj
13034 * obj2 end object for traversal
13035 * obj2_ins [out] LIR instruction representing obj2
13037 JS_REQUIRES_STACK RecordingStatus
13038 TraceRecorder::traverseScopeChain(JSObject
*obj
, LIns
*obj_ins
, JSObject
*obj2
, LIns
*&obj2_ins
)
13040 VMSideExit
* exit
= NULL
;
13042 if (obj
!= globalObj
) {
13043 if (!js_IsCacheableNonGlobalScope(obj
))
13044 RETURN_STOP("scope chain lookup crosses non-cacheable object");
13046 // We must guard on the shape of all call objects for heavyweight functions
13047 // that we traverse on the scope chain: if the shape changes, a variable with
13048 // the same name may have been inserted in the scope chain.
13049 if (STOBJ_GET_CLASS(obj
) == &js_CallClass
&&
13050 JSFUN_HEAVYWEIGHT_TEST(js_GetCallObjectFunction(obj
)->flags
)) {
13051 LIns
* map_ins
= map(obj_ins
);
13052 LIns
* shape_ins
= addName(lir
->insLoad(LIR_ld
, map_ins
, offsetof(JSScope
, shape
)),
13055 exit
= snapshot(BRANCH_EXIT
);
13057 addName(lir
->ins2i(LIR_eq
, shape_ins
, OBJ_SHAPE(obj
)), "guard_shape"),
13065 obj
= STOBJ_GET_PARENT(obj
);
13067 RETURN_STOP("target object not reached on scope chain");
13068 obj_ins
= stobj_get_parent(obj_ins
);
13071 obj2_ins
= obj_ins
;
13072 return RECORD_CONTINUE
;
13075 JS_REQUIRES_STACK AbortableRecordingStatus
13076 TraceRecorder::record_JSOP_BINDNAME()
13078 JSStackFrame
*fp
= cx
->fp
;
13082 obj
= fp
->scopeChain
;
13084 // In global code, fp->scopeChain can only contain blocks whose values
13085 // are still on the stack. We never use BINDNAME to refer to these.
13086 while (OBJ_GET_CLASS(cx
, obj
) == &js_BlockClass
) {
13087 // The block's values are still on the stack.
13088 JS_ASSERT(obj
->getPrivate() == fp
);
13089 obj
= OBJ_GET_PARENT(cx
, obj
);
13090 // Blocks always have parents.
13094 if (obj
!= globalObj
)
13095 RETURN_STOP_A("BINDNAME in global code resolved to non-global object");
13098 * The trace is specialized to this global object. Furthermore, we know it
13099 * is the sole 'global' object on the scope chain: we set globalObj to the
13100 * scope chain element with no parent, and we reached it starting from the
13101 * function closure or the current scopeChain, so there is nothing inner to
13102 * it. Therefore this must be the right base object.
13104 stack(0, INS_CONSTOBJ(obj
));
13105 return ARECORD_CONTINUE
;
13108 // We can't trace BINDNAME in functions that contain direct calls to eval,
13109 // as they might add bindings which previously-traced references would have
13111 if (JSFUN_HEAVYWEIGHT_TEST(fp
->fun
->flags
))
13112 RETURN_STOP_A("BINDNAME in heavyweight function.");
13114 // We don't have the scope chain on trace, so instead we get a start object
13115 // that is on the scope chain and doesn't skip the target object (the one
13116 // that contains the property).
13117 jsval
*callee
= &cx
->fp
->argv
[-2];
13118 obj
= STOBJ_GET_PARENT(JSVAL_TO_OBJECT(*callee
));
13119 if (obj
== globalObj
) {
13120 stack(0, INS_CONSTOBJ(obj
));
13121 return ARECORD_CONTINUE
;
13123 LIns
*obj_ins
= stobj_get_parent(get(callee
));
13125 // Find the target object.
13126 JSAtom
*atom
= atoms
[GET_INDEX(cx
->fp
->regs
->pc
)];
13127 jsid id
= ATOM_TO_JSID(atom
);
13128 JSObject
*obj2
= js_FindIdentifierBase(cx
, fp
->scopeChain
, id
);
13129 if (obj2
!= globalObj
&& STOBJ_GET_CLASS(obj2
) != &js_CallClass
)
13130 RETURN_STOP_A("BINDNAME on non-global, non-call object");
13132 // Generate LIR to get to the target object from the start object.
13134 CHECK_STATUS_A(traverseScopeChain(obj
, obj_ins
, obj2
, obj2_ins
));
13136 // If |obj2| is the global object, we can refer to it directly instead of walking up
13137 // the scope chain. There may still be guards on intervening call objects.
13138 stack(0, obj2
== globalObj
? INS_CONSTOBJ(obj2
) : obj2_ins
);
13139 return ARECORD_CONTINUE
;
13142 JS_REQUIRES_STACK AbortableRecordingStatus
13143 TraceRecorder::record_JSOP_SETNAME()
13145 jsval
& l
= stackval(-2);
13146 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l
));
13149 * Trace only cases that are global code, in lightweight functions
13150 * scoped by the global object only, or in call objects.
13152 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
13153 if (OBJ_GET_CLASS(cx
, obj
) == &js_CallClass
)
13154 return ARECORD_CONTINUE
;
13155 if (obj
!= cx
->fp
->scopeChain
|| obj
!= globalObj
)
13156 RETURN_STOP_A("JSOP_SETNAME left operand is not the global object");
13158 // The rest of the work is in record_SetPropHit.
13159 return ARECORD_CONTINUE
;
13162 JS_REQUIRES_STACK AbortableRecordingStatus
13163 TraceRecorder::record_JSOP_THROW()
13165 return ARECORD_STOP
;
13168 JS_REQUIRES_STACK AbortableRecordingStatus
13169 TraceRecorder::record_JSOP_IN()
13171 jsval
& rval
= stackval(-1);
13172 jsval
& lval
= stackval(-2);
13174 if (JSVAL_IS_PRIMITIVE(rval
))
13175 RETURN_STOP_A("JSOP_IN on non-object right operand");
13176 JSObject
* obj
= JSVAL_TO_OBJECT(rval
);
13177 LIns
* obj_ins
= get(&rval
);
13181 if (JSVAL_IS_INT(lval
)) {
13182 id
= INT_JSVAL_TO_JSID(lval
);
13183 LIns
* args
[] = { makeNumberInt32(get(&lval
)), obj_ins
, cx_ins
};
13184 x
= lir
->insCall(&js_HasNamedPropertyInt32_ci
, args
);
13185 } else if (JSVAL_IS_STRING(lval
)) {
13186 if (!js_ValueToStringId(cx
, lval
, &id
))
13187 RETURN_ERROR_A("left operand of JSOP_IN didn't convert to a string-id");
13188 LIns
* args
[] = { get(&lval
), obj_ins
, cx_ins
};
13189 x
= lir
->insCall(&js_HasNamedProperty_ci
, args
);
13191 RETURN_STOP_A("string or integer expected");
13194 guard(false, lir
->ins2i(LIR_eq
, x
, JSVAL_TO_SPECIAL(JSVAL_VOID
)), OOM_EXIT
);
13195 x
= lir
->ins2i(LIR_eq
, x
, 1);
13197 JSTraceMonitor
&localtm
= *traceMonitor
;
13198 JSContext
*localcx
= cx
;
13202 bool ok
= obj
->lookupProperty(cx
, id
, &obj2
, &prop
);
13204 /* lookupProperty can reenter the interpreter and kill |this|. */
13205 if (!localtm
.recorder
) {
13207 obj2
->dropProperty(localcx
, prop
);
13208 return ARECORD_STOP
;
13212 RETURN_ERROR_A("obj->lookupProperty failed in JSOP_IN");
13213 bool cond
= prop
!= NULL
;
13215 obj2
->dropProperty(cx
, prop
);
13218 * The interpreter fuses comparisons and the following branch, so we have
13219 * to do that here as well.
13221 fuseIf(cx
->fp
->regs
->pc
+ 1, cond
, x
);
13224 * We update the stack after the guard. This is safe since the guard bails
13225 * out at the comparison and the interpreter will therefore re-execute the
13226 * comparison. This way the value of the condition doesn't have to be
13227 * calculated and saved on the stack in most cases.
13230 return ARECORD_CONTINUE
;
13233 static JSBool FASTCALL
13234 HasInstance(JSContext
* cx
, JSObject
* ctor
, jsval val
)
13236 JSBool result
= JS_FALSE
;
13237 if (!ctor
->map
->ops
->hasInstance(cx
, ctor
, val
, &result
))
13238 js_SetBuiltinError(cx
);
13241 JS_DEFINE_CALLINFO_3(static, BOOL_FAIL
, HasInstance
, CONTEXT
, OBJECT
, JSVAL
, 0, 0)
13243 JS_REQUIRES_STACK AbortableRecordingStatus
13244 TraceRecorder::record_JSOP_INSTANCEOF()
13246 // If the rhs isn't an object, we are headed for a TypeError.
13247 jsval
& ctor
= stackval(-1);
13248 if (JSVAL_IS_PRIMITIVE(ctor
))
13249 RETURN_STOP_A("non-object on rhs of instanceof");
13251 jsval
& val
= stackval(-2);
13252 LIns
* val_ins
= box_jsval(val
, get(&val
));
13254 enterDeepBailCall();
13255 LIns
* args
[] = {val_ins
, get(&ctor
), cx_ins
};
13256 stack(-2, lir
->insCall(&HasInstance_ci
, args
));
13257 LIns
* status_ins
= lir
->insLoad(LIR_ld
,
13259 offsetof(InterpState
, builtinStatus
));
13260 pendingGuardCondition
= lir
->ins_eq0(status_ins
);
13261 leaveDeepBailCall();
13263 return ARECORD_CONTINUE
;
13266 JS_REQUIRES_STACK AbortableRecordingStatus
13267 TraceRecorder::record_JSOP_DEBUGGER()
13269 return ARECORD_STOP
;
13272 JS_REQUIRES_STACK AbortableRecordingStatus
13273 TraceRecorder::record_JSOP_GOSUB()
13275 return ARECORD_STOP
;
13278 JS_REQUIRES_STACK AbortableRecordingStatus
13279 TraceRecorder::record_JSOP_RETSUB()
13281 return ARECORD_STOP
;
13284 JS_REQUIRES_STACK AbortableRecordingStatus
13285 TraceRecorder::record_JSOP_EXCEPTION()
13287 return ARECORD_STOP
;
13290 JS_REQUIRES_STACK AbortableRecordingStatus
13291 TraceRecorder::record_JSOP_LINENO()
13293 return ARECORD_CONTINUE
;
13296 JS_REQUIRES_STACK AbortableRecordingStatus
13297 TraceRecorder::record_JSOP_CONDSWITCH()
13299 return ARECORD_CONTINUE
;
13302 JS_REQUIRES_STACK AbortableRecordingStatus
13303 TraceRecorder::record_JSOP_CASE()
13305 strictEquality(true, true);
13306 return ARECORD_CONTINUE
;
13309 JS_REQUIRES_STACK AbortableRecordingStatus
13310 TraceRecorder::record_JSOP_DEFAULT()
13312 return ARECORD_CONTINUE
;
13315 JS_REQUIRES_STACK AbortableRecordingStatus
13316 TraceRecorder::record_JSOP_EVAL()
13318 return ARECORD_STOP
;
13321 JS_REQUIRES_STACK AbortableRecordingStatus
13322 TraceRecorder::record_JSOP_ENUMELEM()
13324 return ARECORD_STOP
;
13327 JS_REQUIRES_STACK AbortableRecordingStatus
13328 TraceRecorder::record_JSOP_GETTER()
13330 return ARECORD_STOP
;
13333 JS_REQUIRES_STACK AbortableRecordingStatus
13334 TraceRecorder::record_JSOP_SETTER()
13336 return ARECORD_STOP
;
13339 JS_REQUIRES_STACK AbortableRecordingStatus
13340 TraceRecorder::record_JSOP_DEFFUN()
13342 return ARECORD_STOP
;
13345 JS_REQUIRES_STACK AbortableRecordingStatus
13346 TraceRecorder::record_JSOP_DEFFUN_FC()
13348 return ARECORD_STOP
;
13351 JS_REQUIRES_STACK AbortableRecordingStatus
13352 TraceRecorder::record_JSOP_DEFCONST()
13354 return ARECORD_STOP
;
13357 JS_REQUIRES_STACK AbortableRecordingStatus
13358 TraceRecorder::record_JSOP_DEFVAR()
13360 return ARECORD_STOP
;
13364 TraceRecorder::getFullIndex(ptrdiff_t pcoff
)
13366 jsatomid index
= GET_INDEX(cx
->fp
->regs
->pc
+ pcoff
);
13367 index
+= atoms
- cx
->fp
->script
->atomMap
.vector
;
13371 JS_REQUIRES_STACK AbortableRecordingStatus
13372 TraceRecorder::record_JSOP_LAMBDA()
13375 fun
= cx
->fp
->script
->getFunction(getFullIndex());
13378 * Emit code to clone a null closure parented by this recorder's global
13379 * object, in order to preserve function object evaluation rules observable
13380 * via identity and mutation. But don't clone if our result is consumed by
13381 * JSOP_SETMETHOD or JSOP_INITMETHOD, since we optimize away the clone for
13382 * these combinations and clone only if the "method value" escapes.
13384 if (FUN_NULL_CLOSURE(fun
) && OBJ_GET_PARENT(cx
, FUN_OBJECT(fun
)) == globalObj
) {
13385 JSOp op2
= JSOp(cx
->fp
->regs
->pc
[JSOP_LAMBDA_LENGTH
]);
13387 if (op2
== JSOP_SETMETHOD
|| op2
== JSOP_INITMETHOD
) {
13388 stack(0, INS_CONSTOBJ(FUN_OBJECT(fun
)));
13389 return ARECORD_CONTINUE
;
13393 CHECK_STATUS_A(getClassPrototype(JSProto_Function
, proto_ins
));
13395 LIns
* args
[] = { INS_CONSTOBJ(globalObj
), proto_ins
, INS_CONSTFUN(fun
), cx_ins
};
13396 LIns
* x
= lir
->insCall(&js_NewNullClosure_ci
, args
);
13398 return ARECORD_CONTINUE
;
13400 return ARECORD_STOP
;
13403 JS_REQUIRES_STACK AbortableRecordingStatus
13404 TraceRecorder::record_JSOP_LAMBDA_FC()
13407 fun
= cx
->fp
->script
->getFunction(getFullIndex());
13409 LIns
* scopeChain_ins
= get(&cx
->fp
->argv
[-2]);
13410 JS_ASSERT(scopeChain_ins
);
13417 LIns
* call_ins
= lir
->insCall(&js_AllocFlatClosure_ci
, args
);
13419 addName(lir
->ins2(LIR_peq
, call_ins
, INS_NULL()),
13420 "guard(js_AllocFlatClosure)"),
13423 if (fun
->u
.i
.nupvars
) {
13424 JSUpvarArray
*uva
= fun
->u
.i
.script
->upvars();
13425 for (uint32 i
= 0, n
= uva
->length
; i
< n
; i
++) {
13427 LIns
* upvar_ins
= upvar(fun
->u
.i
.script
, uva
, i
, v
);
13429 return ARECORD_STOP
;
13430 LIns
* dslots_ins
= NULL
;
13431 stobj_set_dslot(call_ins
, i
, dslots_ins
, box_jsval(v
, upvar_ins
));
13435 stack(0, call_ins
);
13436 return ARECORD_CONTINUE
;
13439 JS_REQUIRES_STACK AbortableRecordingStatus
13440 TraceRecorder::record_JSOP_CALLEE()
13442 stack(0, get(&cx
->fp
->argv
[-2]));
13443 return ARECORD_CONTINUE
;
13446 JS_REQUIRES_STACK AbortableRecordingStatus
13447 TraceRecorder::record_JSOP_SETLOCALPOP()
13449 var(GET_SLOTNO(cx
->fp
->regs
->pc
), stack(-1));
13450 return ARECORD_CONTINUE
;
13453 JS_REQUIRES_STACK AbortableRecordingStatus
13454 TraceRecorder::record_JSOP_IFPRIMTOP()
13456 // Traces are type-specialized, including null vs. object, so we need do
13457 // nothing here. The upstream unbox_jsval called after valueOf or toString
13458 // from an imacro (e.g.) will fork the trace for us, allowing us to just
13459 // follow along mindlessly :-).
13460 return ARECORD_CONTINUE
;
13463 JS_REQUIRES_STACK AbortableRecordingStatus
13464 TraceRecorder::record_JSOP_SETCALL()
13466 return ARECORD_STOP
;
13469 JS_REQUIRES_STACK AbortableRecordingStatus
13470 TraceRecorder::record_JSOP_TRY()
13472 return ARECORD_CONTINUE
;
13475 JS_REQUIRES_STACK AbortableRecordingStatus
13476 TraceRecorder::record_JSOP_FINALLY()
13478 return ARECORD_CONTINUE
;
13481 JS_REQUIRES_STACK AbortableRecordingStatus
13482 TraceRecorder::record_JSOP_NOP()
13484 return ARECORD_CONTINUE
;
13487 JS_REQUIRES_STACK AbortableRecordingStatus
13488 TraceRecorder::record_JSOP_ARGSUB()
13490 JSStackFrame
* fp
= cx
->fp
;
13491 if (!(fp
->fun
->flags
& JSFUN_HEAVYWEIGHT
)) {
13492 uintN slot
= GET_ARGNO(fp
->regs
->pc
);
13493 if (slot
< fp
->argc
)
13494 stack(0, get(&cx
->fp
->argv
[slot
]));
13496 stack(0, INS_VOID());
13497 return ARECORD_CONTINUE
;
13499 RETURN_STOP_A("can't trace JSOP_ARGSUB hard case");
13502 JS_REQUIRES_STACK AbortableRecordingStatus
13503 TraceRecorder::record_JSOP_ARGCNT()
13505 if (cx
->fp
->fun
->flags
& JSFUN_HEAVYWEIGHT
)
13506 RETURN_STOP_A("can't trace heavyweight JSOP_ARGCNT");
13508 // argc is fixed on trace, so ideally we would simply generate LIR for
13509 // constant argc. But the user can mutate arguments.length in the
13510 // interpreter, so we have to check for that in the trace entry frame.
13511 // We also have to check that arguments.length has not been mutated
13512 // at record time, because if so we will generate incorrect constant
13513 // LIR, which will assert in alu().
13514 if (cx
->fp
->argsobj
&& js_IsOverriddenArgsLength(JSVAL_TO_OBJECT(cx
->fp
->argsobj
)))
13515 RETURN_STOP_A("can't trace JSOP_ARGCNT if arguments.length has been modified");
13516 LIns
*a_ins
= get(&cx
->fp
->argsobj
);
13517 if (callDepth
== 0) {
13518 LIns
*br
= lir
->insBranch(LIR_jt
, lir
->ins_peq0(a_ins
), NULL
);
13520 // The following implements js_IsOverriddenArgsLength on trace.
13521 // The '2' bit is set if length was overridden.
13522 LIns
*len_ins
= stobj_get_fslot(a_ins
, JSSLOT_ARGS_LENGTH
);
13523 LIns
*ovr_ins
= lir
->ins2(LIR_piand
, len_ins
, INS_CONSTWORD(2));
13525 guard(true, lir
->ins_peq0(ovr_ins
), snapshot(BRANCH_EXIT
));
13526 LIns
*label
= lir
->ins0(LIR_label
);
13527 br
->setTarget(label
);
13529 stack(0, lir
->insImmf(cx
->fp
->argc
));
13530 return ARECORD_CONTINUE
;
13533 JS_REQUIRES_STACK AbortableRecordingStatus
13534 TraceRecorder::record_DefLocalFunSetSlot(uint32 slot
, JSObject
* obj
)
13536 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, obj
);
13538 if (FUN_NULL_CLOSURE(fun
) && OBJ_GET_PARENT(cx
, FUN_OBJECT(fun
)) == globalObj
) {
13540 CHECK_STATUS_A(getClassPrototype(JSProto_Function
, proto_ins
));
13542 LIns
* args
[] = { INS_CONSTOBJ(globalObj
), proto_ins
, INS_CONSTFUN(fun
), cx_ins
};
13543 LIns
* x
= lir
->insCall(&js_NewNullClosure_ci
, args
);
13545 return ARECORD_CONTINUE
;
13548 return ARECORD_STOP
;
13551 JS_REQUIRES_STACK AbortableRecordingStatus
13552 TraceRecorder::record_JSOP_DEFLOCALFUN()
13554 return ARECORD_CONTINUE
;
13557 JS_REQUIRES_STACK AbortableRecordingStatus
13558 TraceRecorder::record_JSOP_DEFLOCALFUN_FC()
13560 return ARECORD_CONTINUE
;
13563 JS_REQUIRES_STACK AbortableRecordingStatus
13564 TraceRecorder::record_JSOP_GOTOX()
13566 return record_JSOP_GOTO();
13569 JS_REQUIRES_STACK AbortableRecordingStatus
13570 TraceRecorder::record_JSOP_IFEQX()
13572 return record_JSOP_IFEQ();
13575 JS_REQUIRES_STACK AbortableRecordingStatus
13576 TraceRecorder::record_JSOP_IFNEX()
13578 return record_JSOP_IFNE();
13581 JS_REQUIRES_STACK AbortableRecordingStatus
13582 TraceRecorder::record_JSOP_ORX()
13584 return record_JSOP_OR();
13587 JS_REQUIRES_STACK AbortableRecordingStatus
13588 TraceRecorder::record_JSOP_ANDX()
13590 return record_JSOP_AND();
13593 JS_REQUIRES_STACK AbortableRecordingStatus
13594 TraceRecorder::record_JSOP_GOSUBX()
13596 return record_JSOP_GOSUB();
13599 JS_REQUIRES_STACK AbortableRecordingStatus
13600 TraceRecorder::record_JSOP_CASEX()
13602 strictEquality(true, true);
13603 return ARECORD_CONTINUE
;
13606 JS_REQUIRES_STACK AbortableRecordingStatus
13607 TraceRecorder::record_JSOP_DEFAULTX()
13609 return ARECORD_CONTINUE
;
13612 JS_REQUIRES_STACK AbortableRecordingStatus
13613 TraceRecorder::record_JSOP_TABLESWITCHX()
13615 return record_JSOP_TABLESWITCH();
13618 JS_REQUIRES_STACK AbortableRecordingStatus
13619 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
13621 return InjectStatus(switchop());
13624 JS_REQUIRES_STACK AbortableRecordingStatus
13625 TraceRecorder::record_JSOP_BACKPATCH()
13627 return ARECORD_CONTINUE
;
13630 JS_REQUIRES_STACK AbortableRecordingStatus
13631 TraceRecorder::record_JSOP_BACKPATCH_POP()
13633 return ARECORD_CONTINUE
;
13636 JS_REQUIRES_STACK AbortableRecordingStatus
13637 TraceRecorder::record_JSOP_THROWING()
13639 return ARECORD_STOP
;
13642 JS_REQUIRES_STACK AbortableRecordingStatus
13643 TraceRecorder::record_JSOP_SETRVAL()
13645 // If we implement this, we need to update JSOP_STOP.
13646 return ARECORD_STOP
;
13649 JS_REQUIRES_STACK AbortableRecordingStatus
13650 TraceRecorder::record_JSOP_RETRVAL()
13652 return ARECORD_STOP
;
13655 JS_REQUIRES_STACK AbortableRecordingStatus
13656 TraceRecorder::record_JSOP_GETGVAR()
13658 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13659 if (JSVAL_IS_NULL(slotval
))
13660 return ARECORD_CONTINUE
; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
13662 uint32 slot
= JSVAL_TO_INT(slotval
);
13664 if (!lazilyImportGlobalSlot(slot
))
13665 RETURN_STOP_A("lazy import of global slot failed");
13667 stack(0, get(&STOBJ_GET_SLOT(globalObj
, slot
)));
13668 return ARECORD_CONTINUE
;
13671 JS_REQUIRES_STACK AbortableRecordingStatus
13672 TraceRecorder::record_JSOP_SETGVAR()
13674 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13675 if (JSVAL_IS_NULL(slotval
))
13676 return ARECORD_CONTINUE
; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
13678 uint32 slot
= JSVAL_TO_INT(slotval
);
13680 if (!lazilyImportGlobalSlot(slot
))
13681 RETURN_STOP_A("lazy import of global slot failed");
13683 set(&STOBJ_GET_SLOT(globalObj
, slot
), stack(-1));
13684 return ARECORD_CONTINUE
;
13687 JS_REQUIRES_STACK AbortableRecordingStatus
13688 TraceRecorder::record_JSOP_INCGVAR()
13690 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13691 if (JSVAL_IS_NULL(slotval
))
13692 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
13693 return ARECORD_CONTINUE
;
13695 uint32 slot
= JSVAL_TO_INT(slotval
);
13697 if (!lazilyImportGlobalSlot(slot
))
13698 RETURN_STOP_A("lazy import of global slot failed");
13700 return InjectStatus(inc(STOBJ_GET_SLOT(globalObj
, slot
), 1));
13703 JS_REQUIRES_STACK AbortableRecordingStatus
13704 TraceRecorder::record_JSOP_DECGVAR()
13706 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13707 if (JSVAL_IS_NULL(slotval
))
13708 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
13709 return ARECORD_CONTINUE
;
13711 uint32 slot
= JSVAL_TO_INT(slotval
);
13713 if (!lazilyImportGlobalSlot(slot
))
13714 RETURN_STOP_A("lazy import of global slot failed");
13716 return InjectStatus(inc(STOBJ_GET_SLOT(globalObj
, slot
), -1));
13719 JS_REQUIRES_STACK AbortableRecordingStatus
13720 TraceRecorder::record_JSOP_GVARINC()
13722 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13723 if (JSVAL_IS_NULL(slotval
))
13724 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
13725 return ARECORD_CONTINUE
;
13727 uint32 slot
= JSVAL_TO_INT(slotval
);
13729 if (!lazilyImportGlobalSlot(slot
))
13730 RETURN_STOP_A("lazy import of global slot failed");
13732 return InjectStatus(inc(STOBJ_GET_SLOT(globalObj
, slot
), 1, false));
13735 JS_REQUIRES_STACK AbortableRecordingStatus
13736 TraceRecorder::record_JSOP_GVARDEC()
13738 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
13739 if (JSVAL_IS_NULL(slotval
))
13740 // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
13741 return ARECORD_CONTINUE
;
13743 uint32 slot
= JSVAL_TO_INT(slotval
);
13745 if (!lazilyImportGlobalSlot(slot
))
13746 RETURN_STOP_A("lazy import of global slot failed");
13748 return InjectStatus(inc(STOBJ_GET_SLOT(globalObj
, slot
), -1, false));
13751 JS_REQUIRES_STACK AbortableRecordingStatus
13752 TraceRecorder::record_JSOP_REGEXP()
13754 return ARECORD_STOP
;
13757 // begin JS_HAS_XML_SUPPORT
13759 JS_REQUIRES_STACK AbortableRecordingStatus
13760 TraceRecorder::record_JSOP_DEFXMLNS()
13762 return ARECORD_STOP
;
13765 JS_REQUIRES_STACK AbortableRecordingStatus
13766 TraceRecorder::record_JSOP_ANYNAME()
13768 return ARECORD_STOP
;
13771 JS_REQUIRES_STACK AbortableRecordingStatus
13772 TraceRecorder::record_JSOP_QNAMEPART()
13774 return record_JSOP_STRING();
13777 JS_REQUIRES_STACK AbortableRecordingStatus
13778 TraceRecorder::record_JSOP_QNAMECONST()
13780 return ARECORD_STOP
;
13783 JS_REQUIRES_STACK AbortableRecordingStatus
13784 TraceRecorder::record_JSOP_QNAME()
13786 return ARECORD_STOP
;
13789 JS_REQUIRES_STACK AbortableRecordingStatus
13790 TraceRecorder::record_JSOP_TOATTRNAME()
13792 return ARECORD_STOP
;
13795 JS_REQUIRES_STACK AbortableRecordingStatus
13796 TraceRecorder::record_JSOP_TOATTRVAL()
13798 return ARECORD_STOP
;
13801 JS_REQUIRES_STACK AbortableRecordingStatus
13802 TraceRecorder::record_JSOP_ADDATTRNAME()
13804 return ARECORD_STOP
;
13807 JS_REQUIRES_STACK AbortableRecordingStatus
13808 TraceRecorder::record_JSOP_ADDATTRVAL()
13810 return ARECORD_STOP
;
13813 JS_REQUIRES_STACK AbortableRecordingStatus
13814 TraceRecorder::record_JSOP_BINDXMLNAME()
13816 return ARECORD_STOP
;
13819 JS_REQUIRES_STACK AbortableRecordingStatus
13820 TraceRecorder::record_JSOP_SETXMLNAME()
13822 return ARECORD_STOP
;
13825 JS_REQUIRES_STACK AbortableRecordingStatus
13826 TraceRecorder::record_JSOP_XMLNAME()
13828 return ARECORD_STOP
;
13831 JS_REQUIRES_STACK AbortableRecordingStatus
13832 TraceRecorder::record_JSOP_DESCENDANTS()
13834 return ARECORD_STOP
;
13837 JS_REQUIRES_STACK AbortableRecordingStatus
13838 TraceRecorder::record_JSOP_FILTER()
13840 return ARECORD_STOP
;
13843 JS_REQUIRES_STACK AbortableRecordingStatus
13844 TraceRecorder::record_JSOP_ENDFILTER()
13846 return ARECORD_STOP
;
13849 JS_REQUIRES_STACK AbortableRecordingStatus
13850 TraceRecorder::record_JSOP_TOXML()
13852 return ARECORD_STOP
;
13855 JS_REQUIRES_STACK AbortableRecordingStatus
13856 TraceRecorder::record_JSOP_TOXMLLIST()
13858 return ARECORD_STOP
;
13861 JS_REQUIRES_STACK AbortableRecordingStatus
13862 TraceRecorder::record_JSOP_XMLTAGEXPR()
13864 return ARECORD_STOP
;
13867 JS_REQUIRES_STACK AbortableRecordingStatus
13868 TraceRecorder::record_JSOP_XMLELTEXPR()
13870 return ARECORD_STOP
;
13873 JS_REQUIRES_STACK AbortableRecordingStatus
13874 TraceRecorder::record_JSOP_XMLOBJECT()
13876 return ARECORD_STOP
;
13879 JS_REQUIRES_STACK AbortableRecordingStatus
13880 TraceRecorder::record_JSOP_XMLCDATA()
13882 return ARECORD_STOP
;
13885 JS_REQUIRES_STACK AbortableRecordingStatus
13886 TraceRecorder::record_JSOP_XMLCOMMENT()
13888 return ARECORD_STOP
;
13891 JS_REQUIRES_STACK AbortableRecordingStatus
13892 TraceRecorder::record_JSOP_XMLPI()
13894 return ARECORD_STOP
;
13897 JS_REQUIRES_STACK AbortableRecordingStatus
13898 TraceRecorder::record_JSOP_GETFUNNS()
13900 return ARECORD_STOP
;
13903 JS_REQUIRES_STACK AbortableRecordingStatus
13904 TraceRecorder::record_JSOP_STARTXML()
13906 return ARECORD_STOP
;
13909 JS_REQUIRES_STACK AbortableRecordingStatus
13910 TraceRecorder::record_JSOP_STARTXMLEXPR()
13912 return ARECORD_STOP
;
13915 // end JS_HAS_XML_SUPPORT
13917 JS_REQUIRES_STACK AbortableRecordingStatus
13918 TraceRecorder::record_JSOP_CALLPROP()
13920 jsval
& l
= stackval(-1);
13924 if (!JSVAL_IS_PRIMITIVE(l
)) {
13925 obj
= JSVAL_TO_OBJECT(l
);
13927 this_ins
= obj_ins
; // |this| for subsequent call
13930 debug_only_stmt(const char* protoname
= NULL
;)
13931 if (JSVAL_IS_STRING(l
)) {
13932 i
= JSProto_String
;
13933 debug_only_stmt(protoname
= "String.prototype";)
13934 } else if (JSVAL_IS_NUMBER(l
)) {
13935 i
= JSProto_Number
;
13936 debug_only_stmt(protoname
= "Number.prototype";)
13937 } else if (JSVAL_IS_SPECIAL(l
)) {
13938 if (l
== JSVAL_VOID
)
13939 RETURN_STOP_A("callprop on void");
13940 guard(false, lir
->ins2i(LIR_eq
, get(&l
), JSVAL_TO_SPECIAL(JSVAL_VOID
)), MISMATCH_EXIT
);
13941 i
= JSProto_Boolean
;
13942 debug_only_stmt(protoname
= "Boolean.prototype";)
13944 JS_ASSERT(JSVAL_IS_NULL(l
) || JSVAL_IS_VOID(l
));
13945 RETURN_STOP_A("callprop on null or void");
13948 if (!js_GetClassPrototype(cx
, NULL
, INT_TO_JSID(i
), &obj
))
13949 RETURN_ERROR_A("GetClassPrototype failed!");
13951 obj_ins
= INS_CONSTOBJ(obj
);
13952 debug_only_stmt(obj_ins
= addName(obj_ins
, protoname
);)
13953 this_ins
= get(&l
); // use primitive as |this|
13958 CHECK_STATUS_A(test_property_cache(obj
, obj_ins
, obj2
, pcval
));
13960 if (PCVAL_IS_NULL(pcval
) || !PCVAL_IS_OBJECT(pcval
))
13961 RETURN_STOP_A("callee is not an object");
13962 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval
)));
13964 if (JSVAL_IS_PRIMITIVE(l
)) {
13965 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, PCVAL_TO_OBJECT(pcval
));
13966 if (!PRIMITIVE_THIS_TEST(fun
, l
))
13967 RETURN_STOP_A("callee does not accept primitive |this|");
13970 stack(0, this_ins
);
13971 stack(-1, INS_CONSTOBJ(PCVAL_TO_OBJECT(pcval
)));
13972 return ARECORD_CONTINUE
;
13975 JS_REQUIRES_STACK AbortableRecordingStatus
13976 TraceRecorder::record_JSOP_DELDESC()
13978 return ARECORD_STOP
;
13981 JS_REQUIRES_STACK AbortableRecordingStatus
13982 TraceRecorder::record_JSOP_UINT24()
13984 stack(0, lir
->insImmf(GET_UINT24(cx
->fp
->regs
->pc
)));
13985 return ARECORD_CONTINUE
;
13988 JS_REQUIRES_STACK AbortableRecordingStatus
13989 TraceRecorder::record_JSOP_INDEXBASE()
13991 atoms
+= GET_INDEXBASE(cx
->fp
->regs
->pc
);
13992 return ARECORD_CONTINUE
;
13995 JS_REQUIRES_STACK AbortableRecordingStatus
13996 TraceRecorder::record_JSOP_RESETBASE()
13998 atoms
= cx
->fp
->script
->atomMap
.vector
;
13999 return ARECORD_CONTINUE
;
14002 JS_REQUIRES_STACK AbortableRecordingStatus
14003 TraceRecorder::record_JSOP_RESETBASE0()
14005 atoms
= cx
->fp
->script
->atomMap
.vector
;
14006 return ARECORD_CONTINUE
;
14009 JS_REQUIRES_STACK AbortableRecordingStatus
14010 TraceRecorder::record_JSOP_CALLELEM()
14012 return record_JSOP_GETELEM();
14015 JS_REQUIRES_STACK AbortableRecordingStatus
14016 TraceRecorder::record_JSOP_STOP()
14018 JSStackFrame
*fp
= cx
->fp
;
14022 * End of imacro, so return true to the interpreter immediately. The
14023 * interpreter's JSOP_STOP case will return from the imacro, back to
14024 * the pc after the calling op, still in the same JSStackFrame.
14026 atoms
= fp
->script
->atomMap
.vector
;
14027 return ARECORD_CONTINUE
;
14033 * We know falling off the end of a constructor returns the new object that
14034 * was passed in via fp->argv[-1], while falling off the end of a function
14035 * returns undefined.
14037 * NB: we do not support script rval (eval, API users who want the result
14038 * of the last expression-statement, debugger API calls).
14040 if (fp
->flags
& JSFRAME_CONSTRUCTING
) {
14041 JS_ASSERT(fp
->thisv
== fp
->argv
[-1]);
14042 rval_ins
= get(&fp
->argv
[-1]);
14044 rval_ins
= INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
));
14046 clearFrameSlotsFromCache();
14047 return ARECORD_CONTINUE
;
14050 JS_REQUIRES_STACK AbortableRecordingStatus
14051 TraceRecorder::record_JSOP_GETXPROP()
14053 jsval
& l
= stackval(-1);
14054 if (JSVAL_IS_PRIMITIVE(l
))
14055 RETURN_STOP_A("primitive-this for GETXPROP?");
14060 CHECK_STATUS_A(name(vp
, v_ins
, nr
));
14062 return ARECORD_CONTINUE
;
14065 JS_REQUIRES_STACK AbortableRecordingStatus
14066 TraceRecorder::record_JSOP_CALLXMLNAME()
14068 return ARECORD_STOP
;
14071 JS_REQUIRES_STACK AbortableRecordingStatus
14072 TraceRecorder::record_JSOP_TYPEOFEXPR()
14074 return record_JSOP_TYPEOF();
14077 JS_REQUIRES_STACK AbortableRecordingStatus
14078 TraceRecorder::record_JSOP_ENTERBLOCK()
14081 obj
= cx
->fp
->script
->getObject(getFullIndex(0));
14083 LIns
* void_ins
= INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID
));
14084 for (int i
= 0, n
= OBJ_BLOCK_COUNT(cx
, obj
); i
< n
; i
++)
14085 stack(i
, void_ins
);
14086 return ARECORD_CONTINUE
;
14089 JS_REQUIRES_STACK AbortableRecordingStatus
14090 TraceRecorder::record_JSOP_LEAVEBLOCK()
14092 /* We mustn't exit the lexical block we began recording in. */
14093 if (cx
->fp
->blockChain
!= lexicalBlock
)
14094 return ARECORD_CONTINUE
;
14096 return ARECORD_STOP
;
14099 JS_REQUIRES_STACK AbortableRecordingStatus
14100 TraceRecorder::record_JSOP_GENERATOR()
14102 return ARECORD_STOP
;
14105 JS_REQUIRES_STACK AbortableRecordingStatus
14106 TraceRecorder::record_JSOP_YIELD()
14108 return ARECORD_STOP
;
14111 JS_REQUIRES_STACK AbortableRecordingStatus
14112 TraceRecorder::record_JSOP_ARRAYPUSH()
14114 uint32_t slot
= GET_UINT16(cx
->fp
->regs
->pc
);
14115 JS_ASSERT(cx
->fp
->script
->nfixed
<= slot
);
14116 JS_ASSERT(cx
->fp
->slots
+ slot
< cx
->fp
->regs
->sp
- 1);
14117 jsval
&arrayval
= cx
->fp
->slots
[slot
];
14118 JS_ASSERT(JSVAL_IS_OBJECT(arrayval
));
14119 JS_ASSERT(OBJ_IS_DENSE_ARRAY(cx
, JSVAL_TO_OBJECT(arrayval
)));
14120 LIns
*array_ins
= get(&arrayval
);
14121 jsval
&elt
= stackval(-1);
14122 LIns
*elt_ins
= box_jsval(elt
, get(&elt
));
14124 LIns
*args
[] = { elt_ins
, array_ins
, cx_ins
};
14125 LIns
*ok_ins
= lir
->insCall(&js_ArrayCompPush_ci
, args
);
14126 guard(false, lir
->ins_eq0(ok_ins
), OOM_EXIT
);
14127 return ARECORD_CONTINUE
;
14130 JS_REQUIRES_STACK AbortableRecordingStatus
14131 TraceRecorder::record_JSOP_ENUMCONSTELEM()
14133 return ARECORD_STOP
;
14136 JS_REQUIRES_STACK AbortableRecordingStatus
14137 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
14139 LIns
* v_ins
= stack(-1);
14140 int n
= -1 - GET_UINT16(cx
->fp
->regs
->pc
);
14142 return ARECORD_CONTINUE
;
14145 JS_REQUIRES_STACK AbortableRecordingStatus
14146 TraceRecorder::record_JSOP_GETTHISPROP()
14150 CHECK_STATUS_A(getThis(this_ins
));
14153 * It's safe to just use cx->fp->thisv here because getThis() returns
14154 * ARECORD_STOP if thisv is not available.
14156 JS_ASSERT(cx
->fp
->flags
& JSFRAME_COMPUTED_THIS
);
14157 CHECK_STATUS_A(getProp(JSVAL_TO_OBJECT(cx
->fp
->thisv
), this_ins
));
14158 return ARECORD_CONTINUE
;
14161 JS_REQUIRES_STACK AbortableRecordingStatus
14162 TraceRecorder::record_JSOP_GETARGPROP()
14164 return getProp(argval(GET_ARGNO(cx
->fp
->regs
->pc
)));
14167 JS_REQUIRES_STACK AbortableRecordingStatus
14168 TraceRecorder::record_JSOP_GETLOCALPROP()
14170 return getProp(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)));
14173 JS_REQUIRES_STACK AbortableRecordingStatus
14174 TraceRecorder::record_JSOP_INDEXBASE1()
14177 return ARECORD_CONTINUE
;
14180 JS_REQUIRES_STACK AbortableRecordingStatus
14181 TraceRecorder::record_JSOP_INDEXBASE2()
14184 return ARECORD_CONTINUE
;
14187 JS_REQUIRES_STACK AbortableRecordingStatus
14188 TraceRecorder::record_JSOP_INDEXBASE3()
14191 return ARECORD_CONTINUE
;
14194 JS_REQUIRES_STACK AbortableRecordingStatus
14195 TraceRecorder::record_JSOP_CALLGVAR()
14197 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
14198 if (JSVAL_IS_NULL(slotval
))
14199 // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
14200 return ARECORD_CONTINUE
;
14202 uint32 slot
= JSVAL_TO_INT(slotval
);
14204 if (!lazilyImportGlobalSlot(slot
))
14205 RETURN_STOP_A("lazy import of global slot failed");
14207 jsval
& v
= STOBJ_GET_SLOT(globalObj
, slot
);
14209 stack(1, INS_NULL());
14210 return ARECORD_CONTINUE
;
14213 JS_REQUIRES_STACK AbortableRecordingStatus
14214 TraceRecorder::record_JSOP_CALLLOCAL()
14216 uintN slot
= GET_SLOTNO(cx
->fp
->regs
->pc
);
14217 stack(0, var(slot
));
14218 stack(1, INS_NULL());
14219 return ARECORD_CONTINUE
;
14222 JS_REQUIRES_STACK AbortableRecordingStatus
14223 TraceRecorder::record_JSOP_CALLARG()
14225 uintN slot
= GET_ARGNO(cx
->fp
->regs
->pc
);
14226 stack(0, arg(slot
));
14227 stack(1, INS_NULL());
14228 return ARECORD_CONTINUE
;
14231 /* Functions for use with JSOP_CALLBUILTIN. */
14234 ObjectToIterator(JSContext
*cx
, uintN argc
, jsval
*vp
)
14236 jsval
*argv
= JS_ARGV(cx
, vp
);
14237 JS_ASSERT(JSVAL_IS_INT(argv
[0]));
14238 JS_SET_RVAL(cx
, vp
, JS_THIS(cx
, vp
));
14239 return js_ValueToIterator(cx
, JSVAL_TO_INT(argv
[0]), &JS_RVAL(cx
, vp
));
14242 static JSObject
* FASTCALL
14243 ObjectToIterator_tn(JSContext
* cx
, jsbytecode
* pc
, JSObject
*obj
, int32 flags
)
14245 jsval v
= OBJECT_TO_JSVAL(obj
);
14246 JSBool ok
= js_ValueToIterator(cx
, flags
, &v
);
14249 js_SetBuiltinError(cx
);
14252 return JSVAL_TO_OBJECT(v
);
14256 CallIteratorNext(JSContext
*cx
, uintN argc
, jsval
*vp
)
14258 return js_CallIteratorNext(cx
, JS_THIS_OBJECT(cx
, vp
), &JS_RVAL(cx
, vp
));
14261 static jsval FASTCALL
14262 CallIteratorNext_tn(JSContext
* cx
, jsbytecode
* pc
, JSObject
* iterobj
)
14264 JSAutoTempValueRooter
tvr(cx
);
14265 JSBool ok
= js_CallIteratorNext(cx
, iterobj
, tvr
.addr());
14268 js_SetBuiltinError(cx
);
14269 return JSVAL_ERROR_COOKIE
;
14271 return tvr
.value();
14274 JS_DEFINE_TRCINFO_1(ObjectToIterator
,
14275 (4, (static, OBJECT_FAIL
, ObjectToIterator_tn
, CONTEXT
, PC
, THIS
, INT32
, 0, 0)))
14276 JS_DEFINE_TRCINFO_1(CallIteratorNext
,
14277 (3, (static, JSVAL_FAIL
, CallIteratorNext_tn
, CONTEXT
, PC
, THIS
, 0, 0)))
14279 static const struct BuiltinFunctionInfo
{
14280 JSNativeTraceInfo
*ti
;
14282 } builtinFunctionInfo
[JSBUILTIN_LIMIT
] = {
14283 {&ObjectToIterator_trcinfo
, 1},
14284 {&CallIteratorNext_trcinfo
, 0},
14288 js_GetBuiltinFunction(JSContext
*cx
, uintN index
)
14290 JSRuntime
*rt
= cx
->runtime
;
14291 JSObject
*funobj
= rt
->builtinFunctions
[index
];
14294 /* Use NULL parent and atom. Builtin functions never escape to scripts. */
14295 JS_ASSERT(index
< JS_ARRAY_LENGTH(builtinFunctionInfo
));
14296 const BuiltinFunctionInfo
*bfi
= &builtinFunctionInfo
[index
];
14297 JSFunction
*fun
= js_NewFunction(cx
,
14299 JS_DATA_TO_FUNC_PTR(JSNative
, bfi
->ti
),
14301 JSFUN_FAST_NATIVE
| JSFUN_TRCINFO
,
14305 funobj
= FUN_OBJECT(fun
);
14306 STOBJ_CLEAR_PROTO(funobj
);
14307 STOBJ_CLEAR_PARENT(funobj
);
14310 if (!rt
->builtinFunctions
[index
]) /* retest now that the lock is held */
14311 rt
->builtinFunctions
[index
] = funobj
;
14313 funobj
= rt
->builtinFunctions
[index
];
14320 JS_REQUIRES_STACK AbortableRecordingStatus
14321 TraceRecorder::record_JSOP_CALLBUILTIN()
14323 JSObject
*obj
= js_GetBuiltinFunction(cx
, GET_INDEX(cx
->fp
->regs
->pc
));
14325 RETURN_ERROR_A("error in js_GetBuiltinFunction");
14327 stack(0, get(&stackval(-1)));
14328 stack(-1, INS_CONSTOBJ(obj
));
14329 return ARECORD_CONTINUE
;
14332 JS_REQUIRES_STACK AbortableRecordingStatus
14333 TraceRecorder::record_JSOP_INT8()
14335 stack(0, lir
->insImmf(GET_INT8(cx
->fp
->regs
->pc
)));
14336 return ARECORD_CONTINUE
;
14339 JS_REQUIRES_STACK AbortableRecordingStatus
14340 TraceRecorder::record_JSOP_INT32()
14342 stack(0, lir
->insImmf(GET_INT32(cx
->fp
->regs
->pc
)));
14343 return ARECORD_CONTINUE
;
14346 JS_REQUIRES_STACK AbortableRecordingStatus
14347 TraceRecorder::record_JSOP_LENGTH()
14349 jsval
& l
= stackval(-1);
14350 if (JSVAL_IS_PRIMITIVE(l
)) {
14351 if (!JSVAL_IS_STRING(l
))
14352 RETURN_STOP_A("non-string primitive JSOP_LENGTH unsupported");
14353 set(&l
, lir
->ins1(LIR_i2f
, getStringLength(get(&l
))));
14354 return ARECORD_CONTINUE
;
14357 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
14358 LIns
* obj_ins
= get(&l
);
14360 if (STOBJ_GET_CLASS(obj
) == &js_ArgumentsClass
) {
14362 JSStackFrame
*afp
= guardArguments(obj
, obj_ins
, &depth
);
14364 RETURN_STOP_A("can't reach arguments object's frame");
14366 LIns
* v_ins
= lir
->ins1(LIR_i2f
, INS_CONST(afp
->argc
));
14368 return ARECORD_CONTINUE
;
14372 if (OBJ_IS_ARRAY(cx
, obj
)) {
14373 if (OBJ_IS_DENSE_ARRAY(cx
, obj
)) {
14374 if (!guardDenseArray(obj
, obj_ins
, BRANCH_EXIT
)) {
14375 JS_NOT_REACHED("OBJ_IS_DENSE_ARRAY but not?!?");
14376 return ARECORD_STOP
;
14379 if (!guardClass(obj
, obj_ins
, &js_SlowArrayClass
, snapshot(BRANCH_EXIT
)))
14380 RETURN_STOP_A("can't trace length property access on non-array");
14382 v_ins
= lir
->ins1(LIR_i2f
, p2i(stobj_get_fslot(obj_ins
, JSSLOT_ARRAY_LENGTH
)));
14384 if (!OBJ_IS_NATIVE(obj
))
14385 RETURN_STOP_A("can't trace length property access on non-array, non-native object");
14386 return getProp(obj
, obj_ins
);
14389 return ARECORD_CONTINUE
;
14392 JS_REQUIRES_STACK AbortableRecordingStatus
14393 TraceRecorder::record_JSOP_NEWARRAY()
14396 CHECK_STATUS_A(getClassPrototype(JSProto_Array
, proto_ins
));
14398 uint32 len
= GET_UINT16(cx
->fp
->regs
->pc
);
14399 cx
->fp
->assertValidStackDepth(len
);
14401 LIns
* args
[] = { lir
->insImm(len
), proto_ins
, cx_ins
};
14402 LIns
* v_ins
= lir
->insCall(&js_NewArrayWithSlots_ci
, args
);
14403 guard(false, lir
->ins_peq0(v_ins
), OOM_EXIT
);
14405 LIns
* dslots_ins
= NULL
;
14407 for (uint32 i
= 0; i
< len
; i
++) {
14408 jsval
& v
= stackval(int(i
) - int(len
));
14409 if (v
!= JSVAL_HOLE
)
14411 LIns
* elt_ins
= box_jsval(v
, get(&v
));
14412 stobj_set_dslot(v_ins
, i
, dslots_ins
, elt_ins
);
14416 stobj_set_fslot(v_ins
, JSSLOT_ARRAY_COUNT
, INS_CONST(count
));
14418 stack(-int(len
), v_ins
);
14419 return ARECORD_CONTINUE
;
14422 JS_REQUIRES_STACK AbortableRecordingStatus
14423 TraceRecorder::record_JSOP_HOLE()
14425 stack(0, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_HOLE
)));
14426 return ARECORD_CONTINUE
;
14429 AbortableRecordingStatus
14430 TraceRecorder::record_JSOP_TRACE()
14432 return ARECORD_CONTINUE
;
14435 static const uint32 sMaxConcatNSize
= 32;
14438 * Copy the result of defvalue.string back into concatn's arguments, clean the
14439 * stack, and return a pointer to the argument that was just overwritten.
14441 JS_REQUIRES_STACK jsval
*
14442 js_ConcatPostImacroStackCleanup(uint32 argc
, JSFrameRegs
®s
,
14443 TraceRecorder
*recorder
)
14445 JS_ASSERT(*regs
.pc
== JSOP_IMACOP
);
14447 /* Pop the argument offset and imacro return value. */
14448 jsint offset
= JSVAL_TO_INT(*--regs
.sp
);
14449 jsval
*imacroResult
= --regs
.sp
;
14451 /* Replace non-primitive argument with new primitive argument. */
14452 jsval
*vp
= regs
.sp
- offset
;
14453 JS_ASSERT(regs
.sp
- argc
<= vp
&& vp
< regs
.sp
);
14455 recorder
->set(vp
, recorder
->get(imacroResult
));
14456 *vp
= *imacroResult
;
14462 * Initially, concatn takes N arguments on the stack, where N is immediate
14463 * operand. To convert these arguments to primitives, we must repeatedly call
14464 * the defvalue.string imacro. To achieve this iteration, defvalue.string ends
14465 * with imacop. Hence, this function is called multiple times, each time with
14466 * one less non-primitive. To keep track of where we are in the loop, we must
14467 * push an additional index value on the stack. Hence, on all subsequent
14468 * entries, the stack is organized as follows (bottom to top):
14473 * nonprim[i] argument to imacro
14477 * primarg[i] nonprim[i] converted to primitive
14480 * Hence, the stack setup on entry to this function (and JSOP_CONCATN in the
14481 * interpreter, on trace abort) is dependent on whether an imacro is in
14482 * progress. When all of concatn's arguments are primitive, it emits a builtin
14483 * call and allows the actual JSOP_CONCATN to be executed by the interpreter.
14485 JS_REQUIRES_STACK AbortableRecordingStatus
14486 TraceRecorder::record_JSOP_CONCATN()
14488 JSStackFrame
*fp
= cx
->fp
;
14489 JSFrameRegs
®s
= *fp
->regs
;
14492 * If we are in an imacro, we must have just finished a call to
14493 * defvalue.string. Continue where we left off last time.
14498 JS_ASSERT(*fp
->imacpc
== JSOP_CONCATN
);
14499 argc
= GET_ARGC(fp
->imacpc
);
14500 loopStart
= js_ConcatPostImacroStackCleanup(argc
, regs
, this) + 1;
14502 argc
= GET_ARGC(regs
.pc
);
14503 JS_ASSERT(argc
> 0);
14504 loopStart
= regs
.sp
- argc
;
14506 /* Prevent code/alloca explosion. */
14507 if (argc
> sMaxConcatNSize
)
14508 return ARECORD_STOP
;
14511 /* Convert non-primitives to primitives using defvalue.string. */
14512 for (jsval
*vp
= loopStart
; vp
!= regs
.sp
; ++vp
) {
14513 if (!JSVAL_IS_PRIMITIVE(*vp
)) {
14515 * In addition to the jsval we want the imacro to convert to
14516 * primitive, pass through the offset of the argument on the stack.
14518 jsint offset
= regs
.sp
- vp
;
14520 /* Push the non-primitive to convert. */
14521 set(regs
.sp
, get(vp
), true);
14524 /* Push the argument index. */
14525 set(regs
.sp
, lir
->insImm(offset
), true);
14526 *regs
.sp
++ = INT_TO_JSVAL(offset
);
14528 /* Nested imacro call OK because this is a tail call. */
14529 return InjectStatus(call_imacro(defvalue_imacros
.string
));
14533 /* Build an array of the stringified primitives. */
14534 int32_t bufSize
= argc
* sizeof(JSString
*);
14535 LIns
*buf_ins
= lir
->insAlloc(bufSize
);
14537 for (jsval
*vp
= regs
.sp
- argc
; vp
!= regs
.sp
; ++vp
, d
+= sizeof(void *))
14538 lir
->insStorei(stringify(*vp
), buf_ins
, d
);
14540 /* Perform concatenation using a builtin. */
14541 LIns
*args
[] = { lir
->insImm(argc
), buf_ins
, cx_ins
};
14542 LIns
*concat
= lir
->insCall(&js_ConcatN_ci
, args
);
14543 guard(false, lir
->ins_peq0(concat
), OOM_EXIT
);
14545 /* Update tracker with result. */
14546 jsval
*afterPop
= regs
.sp
- (argc
- 1);
14547 set(afterPop
- 1, concat
);
14549 return ARECORD_CONTINUE
;
14552 JS_REQUIRES_STACK AbortableRecordingStatus
14553 TraceRecorder::record_JSOP_SETMETHOD()
14555 return record_JSOP_SETPROP();
14558 JS_REQUIRES_STACK AbortableRecordingStatus
14559 TraceRecorder::record_JSOP_INITMETHOD()
14561 return record_JSOP_INITPROP();
14564 JS_REQUIRES_STACK AbortableRecordingStatus
14565 TraceRecorder::record_JSOP_SHARPINIT()
14567 return ARECORD_STOP
;
14570 #define DBG_STUB(OP) \
14571 JS_REQUIRES_STACK AbortableRecordingStatus \
14572 TraceRecorder::record_##OP() \
14574 RETURN_STOP_A("can't trace " #OP); \
14577 DBG_STUB(JSOP_GETUPVAR_DBG
)
14578 DBG_STUB(JSOP_CALLUPVAR_DBG
)
14579 DBG_STUB(JSOP_DEFFUN_DBGFC
)
14580 DBG_STUB(JSOP_DEFLOCALFUN_DBGFC
)
14581 DBG_STUB(JSOP_LAMBDA_DBGFC
)
14585 * Print information about entry typemaps and unstable exits for all peers
14589 DumpPeerStability(JSTraceMonitor
* tm
, const void* ip
, JSObject
* globalObj
, uint32 globalShape
,
14594 bool looped
= false;
14595 unsigned length
= 0;
14597 for (f
= getLoop(tm
, ip
, globalObj
, globalShape
, argc
); f
!= NULL
; f
= f
->peer
) {
14600 debug_only_printf(LC_TMRecorder
, "Stability of fragment %p:\nENTRY STACK=", (void*)f
);
14601 ti
= (TreeInfo
*)f
->vmprivate
;
14603 JS_ASSERT(ti
->nStackTypes
== length
);
14604 for (unsigned i
= 0; i
< ti
->nStackTypes
; i
++)
14605 debug_only_printf(LC_TMRecorder
, "%c", typeChar
[ti
->stackTypeMap()[i
]]);
14606 debug_only_print0(LC_TMRecorder
, " GLOBALS=");
14607 for (unsigned i
= 0; i
< ti
->nGlobalTypes(); i
++)
14608 debug_only_printf(LC_TMRecorder
, "%c", typeChar
[ti
->globalTypeMap()[i
]]);
14609 debug_only_print0(LC_TMRecorder
, "\n");
14610 UnstableExit
* uexit
= ti
->unstableExits
;
14611 while (uexit
!= NULL
) {
14612 debug_only_print0(LC_TMRecorder
, "EXIT ");
14613 JSTraceType
* m
= uexit
->exit
->fullTypeMap();
14614 debug_only_print0(LC_TMRecorder
, "STACK=");
14615 for (unsigned i
= 0; i
< uexit
->exit
->numStackSlots
; i
++)
14616 debug_only_printf(LC_TMRecorder
, "%c", typeChar
[m
[i
]]);
14617 debug_only_print0(LC_TMRecorder
, " GLOBALS=");
14618 for (unsigned i
= 0; i
< uexit
->exit
->numGlobalSlots
; i
++) {
14619 debug_only_printf(LC_TMRecorder
, "%c",
14620 typeChar
[m
[uexit
->exit
->numStackSlots
+ i
]]);
14622 debug_only_print0(LC_TMRecorder
, "\n");
14623 uexit
= uexit
->next
;
14625 length
= ti
->nStackTypes
;
14631 #ifdef MOZ_TRACEVIS
14633 FILE* traceVisLogFile
= NULL
;
14634 JSHashTable
*traceVisScriptTable
= NULL
;
14636 JS_FRIEND_API(bool)
14637 JS_StartTraceVis(const char* filename
= "tracevis.dat")
14639 if (traceVisLogFile
) {
14640 // If we're currently recording, first we must stop.
14644 traceVisLogFile
= fopen(filename
, "wb");
14645 if (!traceVisLogFile
)
14651 JS_FRIEND_API(JSBool
)
14652 js_StartTraceVis(JSContext
*cx
, JSObject
*obj
,
14653 uintN argc
, jsval
*argv
, jsval
*rval
)
14657 if (argc
> 0 && JSVAL_IS_STRING(argv
[0])) {
14658 JSString
*str
= JSVAL_TO_STRING(argv
[0]);
14659 char *filename
= js_DeflateString(cx
, str
->chars(), str
->length());
14662 ok
= JS_StartTraceVis(filename
);
14663 cx
->free(filename
);
14665 ok
= JS_StartTraceVis();
14669 fprintf(stderr
, "started TraceVis recording\n");
14674 JS_ReportError(cx
, "failed to start TraceVis recording");
14678 JS_FRIEND_API(bool)
14681 if (!traceVisLogFile
)
14684 fclose(traceVisLogFile
); // not worth checking the result
14685 traceVisLogFile
= NULL
;
14690 JS_FRIEND_API(JSBool
)
14691 js_StopTraceVis(JSContext
*cx
, JSObject
*obj
,
14692 uintN argc
, jsval
*argv
, jsval
*rval
)
14694 JSBool ok
= JS_StopTraceVis();
14697 fprintf(stderr
, "stopped TraceVis recording\n");
14699 JS_ReportError(cx
, "TraceVis isn't running");
14704 #endif /* MOZ_TRACEVIS */
14706 JS_REQUIRES_STACK
void
14707 js_CaptureStackTypes(JSContext
* cx
, unsigned callDepth
, JSTraceType
* typeMap
)
14709 CaptureTypesVisitor
capVisitor(cx
, typeMap
);
14710 VisitStackSlots(capVisitor
, cx
, callDepth
);
14713 JS_REQUIRES_STACK
void
14714 TraceRecorder::determineGlobalTypes(JSTraceType
* typeMap
)
14716 DetermineTypesVisitor
detVisitor(*this, typeMap
);
14717 VisitGlobalSlots(detVisitor
, cx
, *treeInfo
->globalSlots
);
14721 TraceRecorder::demoteIns(LIns
* ins
)
14723 return ::demote(lir
, ins
);
14726 #include "jsrecursion.cpp"