1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "mozilla/Assertions.h"
8 #include "mozilla/ScopeExit.h"
10 #include "builtin/ModuleObject.h"
11 #include "debugger/DebugAPI.h"
13 #include "jit/Bailouts.h"
14 #include "jit/BaselineFrame.h"
15 #include "jit/BaselineIC.h"
16 #include "jit/BaselineJIT.h"
17 #include "jit/CalleeToken.h"
18 #include "jit/Invalidation.h"
20 #include "jit/IonScript.h"
21 #include "jit/JitFrames.h"
22 #include "jit/JitRuntime.h"
23 #include "jit/JitSpewer.h"
24 #include "jit/JitZone.h"
25 #include "jit/RematerializedFrame.h"
26 #include "jit/SharedICRegisters.h"
27 #include "jit/Simulator.h"
28 #include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit, js::ReportOverRecursed
29 #include "js/Utility.h"
30 #include "proxy/ScriptedProxyHandler.h"
31 #include "util/Memory.h"
32 #include "vm/ArgumentsObject.h"
33 #include "vm/BytecodeUtil.h"
34 #include "vm/JitActivation.h"
36 #include "jit/JitFrames-inl.h"
37 #include "vm/JSAtomUtils-inl.h"
38 #include "vm/JSContext-inl.h"
39 #include "vm/JSScript-inl.h"
42 using namespace js::jit
;
44 using mozilla::DebugOnly
;
47 // BaselineStackBuilder may reallocate its buffer if the current one is too
48 // small. To avoid dangling pointers, BufferPointer represents a pointer into
49 // this buffer as a pointer to the header and a fixed offset.
52 const UniquePtr
<BaselineBailoutInfo
>& header_
;
57 BufferPointer(const UniquePtr
<BaselineBailoutInfo
>& header
, size_t offset
,
59 : header_(header
), offset_(offset
), heap_(heap
) {}
62 BaselineBailoutInfo
* header
= header_
.get();
64 return (T
*)(header
->incomingStack
+ offset_
);
67 uint8_t* p
= header
->copyStackTop
- offset_
;
68 MOZ_ASSERT(p
>= header
->copyStackBottom
&& p
< header
->copyStackTop
);
72 void set(const T
& value
) { *get() = value
; }
74 // Note: we return a copy instead of a reference, to avoid potential memory
75 // safety hazards when the underlying buffer gets resized.
76 const T
operator*() const { return *get(); }
77 T
* operator->() const { return get(); }
81 * BaselineStackBuilder helps abstract the process of rebuilding the C stack on
82 * the heap. It takes a bailout iterator and keeps track of the point on the C
83 * stack from which the reconstructed frames will be written.
85 * It exposes methods to write data into the heap memory storing the
86 * reconstructed stack. It also exposes method to easily calculate addresses.
87 * This includes both the virtual address that a particular value will be at
88 * when it's eventually copied onto the stack, as well as the current actual
89 * address of that value (whether on the heap allocated portion being
90 * constructed or the existing stack).
92 * The abstraction handles transparent re-allocation of the heap memory when it
93 * needs to be enlarged to accommodate new data. Similarly to the C stack, the
94 * data that's written to the reconstructed stack grows from high to low in
97 * The lowest region of the allocated memory contains a BaselineBailoutInfo
98 * structure that points to the start and end of the written data.
100 class MOZ_STACK_CLASS BaselineStackBuilder
{
102 JitFrameLayout
* frame_
= nullptr;
103 SnapshotIterator
& iter_
;
104 RootedValueVector outermostFrameFormals_
;
106 size_t bufferTotal_
= 1024;
107 size_t bufferAvail_
= 0;
108 size_t bufferUsed_
= 0;
109 size_t framePushed_
= 0;
111 UniquePtr
<BaselineBailoutInfo
> header_
;
115 const ExceptionBailoutInfo
* excInfo_
;
118 jsbytecode
* pc_
= nullptr;
119 JSOp op_
= JSOp::Nop
;
120 mozilla::Maybe
<ResumeMode
> resumeMode_
;
121 uint32_t exprStackSlots_
= 0;
122 void* prevFramePtr_
= nullptr;
123 Maybe
<BufferPointer
<BaselineFrame
>> blFrame_
;
126 JSFunction
* nextCallee_
= nullptr;
128 BailoutKind bailoutKind_
;
130 bool canUseTrialInlinedICScripts_
= true;
132 // The baseline frames we will reconstruct on the heap are not
133 // rooted, so GC must be suppressed.
134 gc::AutoSuppressGC suppress_
;
137 BaselineStackBuilder(JSContext
* cx
, const JSJitFrameIter
& frameIter
,
138 SnapshotIterator
& iter
,
139 const ExceptionBailoutInfo
* excInfo
,
140 BailoutReason reason
);
142 [[nodiscard
]] bool init() {
143 MOZ_ASSERT(!header_
);
144 MOZ_ASSERT(bufferUsed_
== 0);
146 uint8_t* bufferRaw
= cx_
->pod_calloc
<uint8_t>(bufferTotal_
);
150 bufferAvail_
= bufferTotal_
- sizeof(BaselineBailoutInfo
);
152 header_
.reset(new (bufferRaw
) BaselineBailoutInfo());
153 header_
->incomingStack
= reinterpret_cast<uint8_t*>(frame_
);
154 header_
->copyStackTop
= bufferRaw
+ bufferTotal_
;
155 header_
->copyStackBottom
= header_
->copyStackTop
;
159 [[nodiscard
]] bool buildOneFrame();
163 JSScript
* script() const { return script_
; }
164 size_t frameNo() const { return frameNo_
; }
165 bool isOutermostFrame() const { return frameNo_
== 0; }
166 MutableHandleValueVector
outermostFrameFormals() {
167 return &outermostFrameFormals_
;
169 BailoutKind
bailoutKind() const { return bailoutKind_
; }
171 inline JitFrameLayout
* startFrame() { return frame_
; }
173 BaselineBailoutInfo
* info() {
175 return header_
.get();
178 BaselineBailoutInfo
* takeBuffer() {
180 return header_
.release();
184 [[nodiscard
]] bool initFrame();
185 [[nodiscard
]] bool buildBaselineFrame();
186 [[nodiscard
]] bool buildArguments();
187 [[nodiscard
]] bool buildFixedSlots();
188 [[nodiscard
]] bool fixUpCallerArgs(MutableHandleValueVector savedCallerArgs
,
190 [[nodiscard
]] bool buildFinallyException();
191 [[nodiscard
]] bool buildExpressionStack();
192 [[nodiscard
]] bool finishLastFrame();
194 [[nodiscard
]] bool prepareForNextFrame(HandleValueVector savedCallerArgs
);
195 [[nodiscard
]] bool finishOuterFrame();
196 [[nodiscard
]] bool buildStubFrame(uint32_t frameSize
,
197 HandleValueVector savedCallerArgs
);
198 [[nodiscard
]] bool buildRectifierFrame(uint32_t actualArgc
,
199 size_t endOfBaselineStubArgs
);
202 [[nodiscard
]] bool validateFrame();
206 bool envChainSlotCanBeOptimized();
209 bool isPrologueBailout();
210 jsbytecode
* getResumePC();
211 void* getStubReturnAddress();
213 uint32_t exprStackSlots() const { return exprStackSlots_
; }
215 // Returns true if we're bailing out to a catch or finally block in this frame
216 bool catchingException() const {
217 return excInfo_
&& excInfo_
->catchingException() &&
218 excInfo_
->frameNo() == frameNo_
;
221 // Returns true if we're bailing out to a finally block in this frame.
222 bool resumingInFinallyBlock() const {
223 return catchingException() && excInfo_
->isFinally();
226 bool forcedReturn() const { return excInfo_
&& excInfo_
->forcedReturn(); }
228 // Returns true if we're bailing out in place for debug mode
229 bool propagatingIonExceptionForDebugMode() const {
230 return excInfo_
&& excInfo_
->propagatingIonExceptionForDebugMode();
233 void* prevFramePtr() const {
234 MOZ_ASSERT(prevFramePtr_
);
235 return prevFramePtr_
;
237 BufferPointer
<BaselineFrame
>& blFrame() { return blFrame_
.ref(); }
239 void setNextCallee(JSFunction
* nextCallee
,
240 TrialInliningState trialInliningState
);
241 JSFunction
* nextCallee() const { return nextCallee_
; }
243 jsbytecode
* pc() const { return pc_
; }
244 bool resumeAfter() const {
245 return !catchingException() && iter_
.resumeAfter();
248 ResumeMode
resumeMode() const { return *resumeMode_
; }
250 bool needToSaveCallerArgs() const {
251 return resumeMode() == ResumeMode::InlinedAccessor
;
254 [[nodiscard
]] bool enlarge() {
255 MOZ_ASSERT(header_
!= nullptr);
256 if (bufferTotal_
& mozilla::tl::MulOverflowMask
<2>::value
) {
257 ReportOutOfMemory(cx_
);
261 size_t newSize
= bufferTotal_
* 2;
262 uint8_t* newBufferRaw
= cx_
->pod_calloc
<uint8_t>(newSize
);
267 // Initialize the new buffer.
271 // [ Header | .. | Payload ]
275 // [ Header | ............... | Payload ]
277 // Size of Payload is |bufferUsed_|.
279 // We need to copy from the old buffer and header to the new buffer before
280 // we set header_ (this deletes the old buffer).
282 // We also need to update |copyStackBottom| and |copyStackTop| because these
283 // fields point to the Payload's start and end, respectively.
284 using BailoutInfoPtr
= UniquePtr
<BaselineBailoutInfo
>;
285 BailoutInfoPtr
newHeader(new (newBufferRaw
) BaselineBailoutInfo(*header_
));
286 newHeader
->copyStackTop
= newBufferRaw
+ newSize
;
287 newHeader
->copyStackBottom
= newHeader
->copyStackTop
- bufferUsed_
;
288 memcpy(newHeader
->copyStackBottom
, header_
->copyStackBottom
, bufferUsed_
);
289 bufferTotal_
= newSize
;
290 bufferAvail_
= newSize
- (sizeof(BaselineBailoutInfo
) + bufferUsed_
);
291 header_
= std::move(newHeader
);
295 void resetFramePushed() { framePushed_
= 0; }
297 size_t framePushed() const { return framePushed_
; }
299 [[nodiscard
]] bool subtract(size_t size
, const char* info
= nullptr) {
300 // enlarge the buffer if need be.
301 while (size
> bufferAvail_
) {
307 // write out element.
308 header_
->copyStackBottom
-= size
;
309 bufferAvail_
-= size
;
311 framePushed_
+= size
;
313 JitSpew(JitSpew_BaselineBailouts
, " SUB_%03d %p/%p %-15s",
314 (int)size
, header_
->copyStackBottom
,
315 virtualPointerAtStackOffset(0), info
);
320 template <typename T
>
321 [[nodiscard
]] bool write(const T
& t
) {
322 MOZ_ASSERT(!(uintptr_t(&t
) >= uintptr_t(header_
->copyStackBottom
) &&
323 uintptr_t(&t
) < uintptr_t(header_
->copyStackTop
)),
324 "Should not reference memory that can be freed");
325 if (!subtract(sizeof(T
))) {
328 memcpy(header_
->copyStackBottom
, &t
, sizeof(T
));
332 template <typename T
>
333 [[nodiscard
]] bool writePtr(T
* t
, const char* info
) {
338 JitSpew(JitSpew_BaselineBailouts
, " WRITE_PTR %p/%p %-15s %p",
339 header_
->copyStackBottom
, virtualPointerAtStackOffset(0), info
,
345 [[nodiscard
]] bool writeWord(size_t w
, const char* info
) {
346 if (!write
<size_t>(w
)) {
350 if (sizeof(size_t) == 4) {
351 JitSpew(JitSpew_BaselineBailouts
, " WRITE_WRD %p/%p %-15s %08zx",
352 header_
->copyStackBottom
, virtualPointerAtStackOffset(0), info
,
355 JitSpew(JitSpew_BaselineBailouts
, " WRITE_WRD %p/%p %-15s %016zx",
356 header_
->copyStackBottom
, virtualPointerAtStackOffset(0), info
,
363 [[nodiscard
]] bool writeValue(const Value
& val
, const char* info
) {
364 if (!write
<Value
>(val
)) {
368 JitSpew(JitSpew_BaselineBailouts
,
369 " WRITE_VAL %p/%p %-15s %016" PRIx64
,
370 header_
->copyStackBottom
, virtualPointerAtStackOffset(0), info
,
376 [[nodiscard
]] bool peekLastValue(Value
* result
) {
377 if (bufferUsed_
< sizeof(Value
)) {
381 memcpy(result
, header_
->copyStackBottom
, sizeof(Value
));
385 [[nodiscard
]] bool maybeWritePadding(size_t alignment
, size_t after
,
387 MOZ_ASSERT(framePushed_
% sizeof(Value
) == 0);
388 MOZ_ASSERT(after
% sizeof(Value
) == 0);
389 size_t offset
= ComputeByteAlignment(after
, alignment
);
390 while (framePushed_
% alignment
!= offset
) {
391 if (!writeValue(MagicValue(JS_ARG_POISON
), info
)) {
399 void setResumeFramePtr(void* resumeFramePtr
) {
400 header_
->resumeFramePtr
= resumeFramePtr
;
403 void setResumeAddr(void* resumeAddr
) { header_
->resumeAddr
= resumeAddr
; }
405 template <typename T
>
406 BufferPointer
<T
> pointerAtStackOffset(size_t offset
) {
407 if (offset
< bufferUsed_
) {
408 // Calculate offset from copyStackTop.
409 offset
= header_
->copyStackTop
- (header_
->copyStackBottom
+ offset
);
410 return BufferPointer
<T
>(header_
, offset
, /* heap = */ true);
413 return BufferPointer
<T
>(header_
, offset
- bufferUsed_
, /* heap = */ false);
416 BufferPointer
<Value
> valuePointerAtStackOffset(size_t offset
) {
417 return pointerAtStackOffset
<Value
>(offset
);
420 inline uint8_t* virtualPointerAtStackOffset(size_t offset
) {
421 if (offset
< bufferUsed_
) {
422 return reinterpret_cast<uint8_t*>(frame_
) - (bufferUsed_
- offset
);
424 return reinterpret_cast<uint8_t*>(frame_
) + (offset
- bufferUsed_
);
428 void BaselineBailoutInfo::trace(JSTracer
* trc
) {
429 TraceRoot(trc
, &tempId
, "BaselineBailoutInfo::tempId");
432 BaselineStackBuilder::BaselineStackBuilder(JSContext
* cx
,
433 const JSJitFrameIter
& frameIter
,
434 SnapshotIterator
& iter
,
435 const ExceptionBailoutInfo
* excInfo
,
436 BailoutReason reason
)
438 frame_(static_cast<JitFrameLayout
*>(frameIter
.current())),
440 outermostFrameFormals_(cx
),
441 script_(frameIter
.script()),
442 fun_(frameIter
.maybeCallee()),
444 icScript_(script_
->jitScript()->icScript()),
445 bailoutKind_(iter
.bailoutKind()),
447 MOZ_ASSERT(bufferTotal_
>= sizeof(BaselineBailoutInfo
));
448 if (reason
== BailoutReason::Invalidate
) {
449 bailoutKind_
= BailoutKind::OnStackInvalidation
;
453 bool BaselineStackBuilder::initFrame() {
454 // Get the pc and ResumeMode. If we are handling an exception, resume at the
455 // pc of the catch or finally block.
456 if (catchingException()) {
457 pc_
= excInfo_
->resumePC();
458 resumeMode_
= mozilla::Some(ResumeMode::ResumeAt
);
460 pc_
= script_
->offsetToPC(iter_
.pcOffset());
461 resumeMode_
= mozilla::Some(iter_
.resumeMode());
465 // If we are catching an exception, we are bailing out to a catch or
466 // finally block and this is the frame where we will resume. Usually the
467 // expression stack should be empty in this case but there can be
468 // iterators on the stack.
469 if (catchingException()) {
470 exprStackSlots_
= excInfo_
->numExprSlots();
472 uint32_t totalFrameSlots
= iter_
.numAllocations();
473 uint32_t fixedSlots
= script_
->nfixed();
474 uint32_t argSlots
= CountArgSlots(script_
, fun_
);
475 uint32_t intermediates
= NumIntermediateValues(resumeMode());
476 exprStackSlots_
= totalFrameSlots
- fixedSlots
- argSlots
- intermediates
;
478 // Verify that there was no underflow.
479 MOZ_ASSERT(exprStackSlots_
<= totalFrameSlots
);
482 JitSpew(JitSpew_BaselineBailouts
, " Unpacking %s:%u:%u",
483 script_
->filename(), script_
->lineno(),
484 script_
->column().oneOriginValue());
485 JitSpew(JitSpew_BaselineBailouts
, " [BASELINE-JS FRAME]");
487 // Write the previous frame pointer value. For the outermost frame we reuse
488 // the value in the JitFrameLayout already on the stack. Record the virtual
489 // stack offset at this location. Later on, if we end up writing out a
490 // BaselineStub frame for the next callee, we'll need to save the address.
491 if (!isOutermostFrame()) {
492 if (!writePtr(prevFramePtr(), "PrevFramePtr")) {
496 prevFramePtr_
= virtualPointerAtStackOffset(0);
503 void BaselineStackBuilder::setNextCallee(
504 JSFunction
* nextCallee
, TrialInliningState trialInliningState
) {
505 nextCallee_
= nextCallee
;
507 if (trialInliningState
== TrialInliningState::Inlined
&&
508 !iter_
.ionScript()->purgedICScripts() && canUseTrialInlinedICScripts_
) {
509 // Update icScript_ to point to the icScript of nextCallee
510 const uint32_t pcOff
= script_
->pcToOffset(pc_
);
511 icScript_
= icScript_
->findInlinedChild(pcOff
);
513 // If we don't know for certain that it's TrialInliningState::Inlined,
514 // just use the callee's own ICScript. We could still have the trial
515 // inlined ICScript available, but we also could not if we transitioned
516 // to TrialInliningState::Failure after being monomorphic inlined.
518 // Also use the callee's own ICScript if we purged callee ICScripts.
519 icScript_
= nextCallee
->nonLazyScript()->jitScript()->icScript();
521 if (trialInliningState
!= TrialInliningState::MonomorphicInlined
) {
522 // Don't use specialized ICScripts for any of the callees if we had an
523 // inlining failure. We're now using the generic ICScript but compilation
524 // might have used the trial-inlined ICScript and these can have very
525 // different inlining graphs.
526 canUseTrialInlinedICScripts_
= false;
530 // Assert the ICScript matches nextCallee.
531 JSScript
* calleeScript
= nextCallee
->nonLazyScript();
532 MOZ_RELEASE_ASSERT(icScript_
->numICEntries() == calleeScript
->numICEntries());
533 MOZ_RELEASE_ASSERT(icScript_
->bytecodeSize() == calleeScript
->length());
536 bool BaselineStackBuilder::done() {
537 if (!iter_
.moreFrames()) {
538 MOZ_ASSERT(!nextCallee_
);
541 return catchingException();
544 void BaselineStackBuilder::nextFrame() {
545 MOZ_ASSERT(nextCallee_
);
547 script_
= fun_
->nonLazyScript();
548 nextCallee_
= nullptr;
550 // Scripts with an IonScript must also have a BaselineScript.
551 MOZ_ASSERT(script_
->hasBaselineScript());
554 iter_
.nextInstruction();
557 // Build the BaselineFrame struct
558 bool BaselineStackBuilder::buildBaselineFrame() {
559 if (!subtract(BaselineFrame::Size(), "BaselineFrame")) {
563 blFrame_
.emplace(pointerAtStackOffset
<BaselineFrame
>(0));
565 uint32_t flags
= BaselineFrame::RUNNING_IN_INTERPRETER
;
567 // If we are bailing to a script whose execution is observed, mark the
568 // baseline frame as a debuggee frame. This is to cover the case where we
569 // don't rematerialize the Ion frame via the Debugger.
570 if (script_
->isDebuggee()) {
571 flags
|= BaselineFrame::DEBUGGEE
;
575 JSObject
* envChain
= nullptr;
576 Value envChainSlot
= iter_
.read();
577 if (envChainSlot
.isObject()) {
578 // The env slot has been updated from UndefinedValue. It must be the
579 // complete initial environment.
580 envChain
= &envChainSlot
.toObject();
582 // Set the HAS_INITIAL_ENV flag if needed. See IsFrameInitialEnvironment.
583 MOZ_ASSERT(!script_
->isForEval());
584 if (fun_
&& fun_
->needsFunctionEnvironmentObjects()) {
585 MOZ_ASSERT(fun_
->nonLazyScript()->initialEnvironmentShape());
586 flags
|= BaselineFrame::HAS_INITIAL_ENV
;
589 MOZ_ASSERT(envChainSlot
.isUndefined() ||
590 envChainSlot
.isMagic(JS_OPTIMIZED_OUT
));
591 MOZ_ASSERT(envChainSlotCanBeOptimized());
593 // The env slot has been optimized out.
594 // Get it from the function or script.
596 envChain
= fun_
->environment();
597 } else if (script_
->isModule()) {
598 envChain
= script_
->module()->environment();
600 // For global scripts without a non-syntactic env the env
601 // chain is the script's global lexical environment. (We do
602 // not compile scripts with a non-syntactic global scope).
603 // Also note that it's invalid to resume into the prologue in
604 // this case because the prologue expects the env chain in R1
605 // for eval and global scripts.
606 MOZ_ASSERT(!script_
->isForEval());
607 MOZ_ASSERT(!script_
->hasNonSyntacticScope());
608 envChain
= &(script_
->global().lexicalEnvironment());
613 MOZ_ASSERT(envChain
);
614 JitSpew(JitSpew_BaselineBailouts
, " EnvChain=%p", envChain
);
615 blFrame()->setEnvironmentChain(envChain
);
617 // Get |returnValue| if present.
618 Value returnValue
= UndefinedValue();
619 if (script_
->noScriptRval()) {
620 // Don't use the return value (likely a JS_OPTIMIZED_OUT MagicValue) to
621 // not confuse Baseline.
624 returnValue
= iter_
.read();
625 flags
|= BaselineFrame::HAS_RVAL
;
628 // Write |returnValue|.
629 JitSpew(JitSpew_BaselineBailouts
, " ReturnValue=%016" PRIx64
,
630 *((uint64_t*)&returnValue
));
631 blFrame()->setReturnValue(returnValue
);
633 // Get |argsObj| if present.
634 ArgumentsObject
* argsObj
= nullptr;
635 if (script_
->needsArgsObj()) {
636 Value maybeArgsObj
= iter_
.read();
637 MOZ_ASSERT(maybeArgsObj
.isObject() || maybeArgsObj
.isUndefined() ||
638 maybeArgsObj
.isMagic(JS_OPTIMIZED_OUT
));
639 if (maybeArgsObj
.isObject()) {
640 argsObj
= &maybeArgsObj
.toObject().as
<ArgumentsObject
>();
644 // Note: we do not need to initialize the scratchValue field in BaselineFrame.
647 blFrame()->setFlags(flags
);
650 JitSpew(JitSpew_BaselineBailouts
, " ICScript=%p", icScript_
);
651 blFrame()->setICScript(icScript_
);
653 // initArgsObjUnchecked modifies the frame's flags, so call it after setFlags.
655 blFrame()->initArgsObjUnchecked(*argsObj
);
660 // Overwrite the pushed args present in the calling frame with
661 // the unpacked |thisv| and argument values.
662 bool BaselineStackBuilder::buildArguments() {
663 Value thisv
= iter_
.read();
664 JitSpew(JitSpew_BaselineBailouts
, " Is function!");
665 JitSpew(JitSpew_BaselineBailouts
, " thisv=%016" PRIx64
,
666 *((uint64_t*)&thisv
));
668 size_t thisvOffset
= framePushed() + JitFrameLayout::offsetOfThis();
669 valuePointerAtStackOffset(thisvOffset
).set(thisv
);
671 MOZ_ASSERT(iter_
.numAllocations() >= CountArgSlots(script_
, fun_
));
672 JitSpew(JitSpew_BaselineBailouts
,
673 " frame slots %u, nargs %zu, nfixed %zu", iter_
.numAllocations(),
674 fun_
->nargs(), script_
->nfixed());
676 bool shouldStoreOutermostFormals
=
677 isOutermostFrame() && !script_
->argsObjAliasesFormals();
678 if (shouldStoreOutermostFormals
) {
679 // This is the first (outermost) frame and we don't have an
680 // arguments object aliasing the formals. Due to UCE and phi
681 // elimination, we could store an UndefinedValue() here for
682 // formals we think are unused, but locals may still reference the
683 // original argument slot (MParameter/LArgument) and expect the
684 // original Value. To avoid this problem, store the formals in a
685 // Vector until we are done.
686 MOZ_ASSERT(outermostFrameFormals().empty());
687 if (!outermostFrameFormals().resize(fun_
->nargs())) {
692 for (uint32_t i
= 0; i
< fun_
->nargs(); i
++) {
693 Value arg
= iter_
.read();
694 JitSpew(JitSpew_BaselineBailouts
, " arg %d = %016" PRIx64
, (int)i
,
696 if (!isOutermostFrame()) {
697 size_t argOffset
= framePushed() + JitFrameLayout::offsetOfActualArg(i
);
698 valuePointerAtStackOffset(argOffset
).set(arg
);
699 } else if (shouldStoreOutermostFormals
) {
700 outermostFrameFormals()[i
].set(arg
);
702 // When the arguments object aliases the formal arguments, then
703 // JSOp::SetArg mutates the argument object. In such cases, the
704 // list of arguments reported by the snapshot are only aliases
705 // of argument object slots which are optimized to only store
706 // differences compared to arguments which are on the stack.
712 bool BaselineStackBuilder::buildFixedSlots() {
713 for (uint32_t i
= 0; i
< script_
->nfixed(); i
++) {
714 Value slot
= iter_
.read();
715 if (!writeValue(slot
, "FixedValue")) {
722 // The caller side of inlined js::fun_call and accessors must look
723 // like the function wasn't inlined.
724 bool BaselineStackBuilder::fixUpCallerArgs(
725 MutableHandleValueVector savedCallerArgs
, bool* fixedUp
) {
726 MOZ_ASSERT(!*fixedUp
);
728 // Inlining of SpreadCall-like frames not currently supported.
729 MOZ_ASSERT(!IsSpreadOp(op_
));
731 if (resumeMode() != ResumeMode::InlinedFunCall
&& !needToSaveCallerArgs()) {
735 // Calculate how many arguments are consumed by the inlined call.
736 // All calls pass |callee| and |this|.
737 uint32_t inlinedArgs
= 2;
738 if (resumeMode() == ResumeMode::InlinedFunCall
) {
739 // The first argument to an inlined FunCall becomes |this|,
740 // if it exists. The rest are passed normally.
741 MOZ_ASSERT(IsInvokeOp(op_
));
742 inlinedArgs
+= GET_ARGC(pc_
) > 0 ? GET_ARGC(pc_
) - 1 : 0;
744 MOZ_ASSERT(resumeMode() == ResumeMode::InlinedAccessor
);
745 MOZ_ASSERT(IsIonInlinableGetterOrSetterOp(op_
));
746 // Setters are passed one argument. Getters are passed none.
747 if (IsSetPropOp(op_
)) {
752 // Calculate how many values are live on the stack across the call,
754 MOZ_ASSERT(inlinedArgs
<= exprStackSlots());
755 uint32_t liveStackSlots
= exprStackSlots() - inlinedArgs
;
757 JitSpew(JitSpew_BaselineBailouts
,
758 " pushing %u expression stack slots before fixup",
760 for (uint32_t i
= 0; i
< liveStackSlots
; i
++) {
761 Value v
= iter_
.read();
762 if (!writeValue(v
, "StackValue")) {
767 // When we inline js::fun_call, we bypass the native and inline the
768 // target directly. When rebuilding the stack, we need to fill in
769 // the right number of slots to make it look like the js_native was
771 if (resumeMode() == ResumeMode::InlinedFunCall
) {
772 // We must transform the stack from |target, this, args| to
773 // |js_fun_call, target, this, args|. The value of |js_fun_call|
774 // will never be observed, so we push |undefined| for it, followed
775 // by the remaining arguments.
776 JitSpew(JitSpew_BaselineBailouts
,
777 " pushing undefined to fixup funcall");
778 if (!writeValue(UndefinedValue(), "StackValue")) {
781 if (GET_ARGC(pc_
) > 0) {
782 JitSpew(JitSpew_BaselineBailouts
,
783 " pushing %u expression stack slots", inlinedArgs
);
784 for (uint32_t i
= 0; i
< inlinedArgs
; i
++) {
785 Value arg
= iter_
.read();
786 if (!writeValue(arg
, "StackValue")) {
791 // When we inline FunCall with no arguments, we push an extra
792 // |undefined| value for |this|. That value should not appear
793 // in the rebuilt baseline frame.
794 JitSpew(JitSpew_BaselineBailouts
, " pushing target of funcall");
795 Value target
= iter_
.read();
796 if (!writeValue(target
, "StackValue")) {
804 if (needToSaveCallerArgs()) {
805 // Save the actual arguments. They are needed to rebuild the callee frame.
806 if (!savedCallerArgs
.resize(inlinedArgs
)) {
809 for (uint32_t i
= 0; i
< inlinedArgs
; i
++) {
810 savedCallerArgs
[i
].set(iter_
.read());
813 if (IsSetPropOp(op_
)) {
814 // The RHS argument to SetProp remains on the stack after the
815 // operation and is observable, so we have to fill it in.
816 Value initialArg
= savedCallerArgs
[inlinedArgs
- 1];
817 JitSpew(JitSpew_BaselineBailouts
,
818 " pushing setter's initial argument");
819 if (!writeValue(initialArg
, "StackValue")) {
829 bool BaselineStackBuilder::buildExpressionStack() {
830 JitSpew(JitSpew_BaselineBailouts
, " pushing %u expression stack slots",
833 for (uint32_t i
= 0; i
< exprStackSlots(); i
++) {
835 // If we are in the middle of propagating an exception from Ion by
836 // bailing to baseline due to debug mode, we might not have all
837 // the stack if we are at the newest frame.
839 // For instance, if calling |f()| pushed an Ion frame which threw,
840 // the snapshot expects the return value to be pushed, but it's
841 // possible nothing was pushed before we threw.
843 // We therefore use a fallible read here.
844 if (!iter_
.tryRead(&v
)) {
845 MOZ_ASSERT(propagatingIonExceptionForDebugMode() && !iter_
.moreFrames());
846 v
= MagicValue(JS_OPTIMIZED_OUT
);
848 if (!writeValue(v
, "StackValue")) {
853 if (resumeMode() == ResumeMode::ResumeAfterCheckProxyGetResult
) {
854 JitSpew(JitSpew_BaselineBailouts
,
855 " Checking that the proxy's get trap result matches "
858 if (peekLastValue(&returnVal
) && !returnVal
.isMagic(JS_OPTIMIZED_OUT
)) {
859 Value idVal
= iter_
.read();
860 Value targetVal
= iter_
.read();
862 MOZ_RELEASE_ASSERT(!idVal
.isMagic());
863 MOZ_RELEASE_ASSERT(targetVal
.isObject());
864 RootedObject
target(cx_
, &targetVal
.toObject());
865 RootedValue
rootedIdVal(cx_
, idVal
);
867 if (!PrimitiveValueToId
<CanGC
>(cx_
, rootedIdVal
, &id
)) {
870 RootedValue
value(cx_
, returnVal
);
873 ScriptedProxyHandler::checkGetTrapResult(cx_
, target
, id
, value
);
874 if (validation
!= ScriptedProxyHandler::GetTrapValidationResult::OK
) {
875 header_
->tempId
= id
.get();
878 JitSpew_BaselineBailouts
,
879 " Proxy get trap result mismatch! Overwriting bailout kind");
880 if (validation
== ScriptedProxyHandler::GetTrapValidationResult::
881 MustReportSameValue
) {
882 bailoutKind_
= BailoutKind::ThrowProxyTrapMustReportSameValue
;
883 } else if (validation
== ScriptedProxyHandler::GetTrapValidationResult::
884 MustReportUndefined
) {
885 bailoutKind_
= BailoutKind::ThrowProxyTrapMustReportUndefined
;
895 if (resumeMode() == ResumeMode::ResumeAfterCheckIsObject
) {
896 JitSpew(JitSpew_BaselineBailouts
,
897 " Checking that intermediate value is an object");
899 if (iter_
.tryRead(&returnVal
) && !returnVal
.isObject()) {
900 MOZ_ASSERT(!returnVal
.isMagic());
901 JitSpew(JitSpew_BaselineBailouts
,
902 " Not an object! Overwriting bailout kind");
903 bailoutKind_
= BailoutKind::ThrowCheckIsObject
;
910 bool BaselineStackBuilder::buildFinallyException() {
911 MOZ_ASSERT(resumingInFinallyBlock());
913 if (!writeValue(excInfo_
->finallyException(), "Exception")) {
916 if (!writeValue(excInfo_
->finallyExceptionStack(), "ExceptionStack")) {
919 if (!writeValue(BooleanValue(true), "throwing")) {
926 bool BaselineStackBuilder::prepareForNextFrame(
927 HandleValueVector savedCallerArgs
) {
928 const uint32_t frameSize
= framePushed();
930 // Write out descriptor and return address for the baseline frame.
931 // The icEntry in question MUST have an inlinable fallback stub.
932 if (!finishOuterFrame()) {
936 return buildStubFrame(frameSize
, savedCallerArgs
);
939 bool BaselineStackBuilder::finishOuterFrame() {
946 const BaselineInterpreter
& baselineInterp
=
947 cx_
->runtime()->jitRuntime()->baselineInterpreter();
949 blFrame()->setInterpreterFields(script_
, pc_
);
951 // Write out descriptor of BaselineJS frame.
952 size_t baselineFrameDescr
= MakeFrameDescriptor(FrameType::BaselineJS
);
953 if (!writeWord(baselineFrameDescr
, "Descriptor")) {
957 uint8_t* retAddr
= baselineInterp
.retAddrForIC(op_
);
958 return writePtr(retAddr
, "ReturnAddr");
961 bool BaselineStackBuilder::buildStubFrame(uint32_t frameSize
,
962 HandleValueVector savedCallerArgs
) {
963 // Build baseline stub frame:
986 JitSpew(JitSpew_BaselineBailouts
, " [BASELINE-STUB FRAME]");
988 // Write previous frame pointer (saved earlier).
989 if (!writePtr(prevFramePtr(), "PrevFramePtr")) {
992 prevFramePtr_
= virtualPointerAtStackOffset(0);
994 // Write stub pointer.
995 uint32_t pcOff
= script_
->pcToOffset(pc_
);
996 JitScript
* jitScript
= script_
->jitScript();
997 const ICEntry
& icEntry
= jitScript
->icEntryFromPCOffset(pcOff
);
998 ICFallbackStub
* fallback
= jitScript
->fallbackStubForICEntry(&icEntry
);
999 if (!writePtr(fallback
, "StubPtr")) {
1003 // Write out the arguments, copied from the baseline frame. The order
1004 // of the arguments is reversed relative to the baseline frame's stack
1006 MOZ_ASSERT(IsIonInlinableOp(op_
));
1007 bool pushedNewTarget
= IsConstructPC(pc_
);
1008 unsigned actualArgc
;
1010 if (needToSaveCallerArgs()) {
1011 // For accessors, the arguments are not on the stack anymore,
1012 // but they are copied in a vector and are written here.
1013 callee
= savedCallerArgs
[0];
1014 actualArgc
= IsSetPropOp(op_
) ? 1 : 0;
1016 // Align the stack based on the number of arguments.
1017 size_t afterFrameSize
=
1018 (actualArgc
+ 1) * sizeof(Value
) + JitFrameLayout::Size();
1019 if (!maybeWritePadding(JitStackAlignment
, afterFrameSize
, "Padding")) {
1024 MOZ_ASSERT(actualArgc
+ 2 <= exprStackSlots());
1025 MOZ_ASSERT(savedCallerArgs
.length() == actualArgc
+ 2);
1026 for (unsigned i
= 0; i
< actualArgc
+ 1; i
++) {
1027 size_t arg
= savedCallerArgs
.length() - (i
+ 1);
1028 if (!writeValue(savedCallerArgs
[arg
], "ArgVal")) {
1032 } else if (resumeMode() == ResumeMode::InlinedFunCall
&& GET_ARGC(pc_
) == 0) {
1033 // When calling FunCall with 0 arguments, we push |undefined|
1034 // for this. See BaselineCacheIRCompiler::pushFunCallArguments.
1035 MOZ_ASSERT(!pushedNewTarget
);
1037 // Align the stack based on pushing |this| and 0 arguments.
1038 size_t afterFrameSize
= sizeof(Value
) + JitFrameLayout::Size();
1039 if (!maybeWritePadding(JitStackAlignment
, afterFrameSize
, "Padding")) {
1042 // Push an undefined value for |this|.
1043 if (!writeValue(UndefinedValue(), "ThisValue")) {
1046 size_t calleeSlot
= blFrame()->numValueSlots(frameSize
) - 1;
1047 callee
= *blFrame()->valueSlot(calleeSlot
);
1050 MOZ_ASSERT(resumeMode() == ResumeMode::InlinedStandardCall
||
1051 resumeMode() == ResumeMode::InlinedFunCall
);
1052 actualArgc
= GET_ARGC(pc_
);
1053 if (resumeMode() == ResumeMode::InlinedFunCall
) {
1054 // See BaselineCacheIRCompiler::pushFunCallArguments.
1055 MOZ_ASSERT(actualArgc
> 0);
1059 // In addition to the formal arguments, we must also push |this|.
1060 // When calling a constructor, we must also push |newTarget|.
1061 uint32_t numArguments
= actualArgc
+ 1 + pushedNewTarget
;
1063 // Align the stack based on the number of arguments.
1064 size_t afterFrameSize
=
1065 numArguments
* sizeof(Value
) + JitFrameLayout::Size();
1066 if (!maybeWritePadding(JitStackAlignment
, afterFrameSize
, "Padding")) {
1070 // Copy the arguments and |this| from the BaselineFrame, in reverse order.
1071 size_t valueSlot
= blFrame()->numValueSlots(frameSize
) - 1;
1072 size_t calleeSlot
= valueSlot
- numArguments
;
1074 for (size_t i
= valueSlot
; i
> calleeSlot
; i
--) {
1075 Value v
= *blFrame()->valueSlot(i
);
1076 if (!writeValue(v
, "ArgVal")) {
1081 callee
= *blFrame()->valueSlot(calleeSlot
);
1084 // In case these arguments need to be copied on the stack again for a
1085 // rectifier frame, save the framePushed values here for later use.
1086 size_t endOfBaselineStubArgs
= framePushed();
1088 // Push callee token (must be a JS Function)
1089 JitSpew(JitSpew_BaselineBailouts
, " Callee = %016" PRIx64
,
1090 callee
.asRawBits());
1092 JSFunction
* calleeFun
= &callee
.toObject().as
<JSFunction
>();
1093 if (!writePtr(CalleeToToken(calleeFun
, pushedNewTarget
), "CalleeToken")) {
1096 const ICEntry
& icScriptEntry
= icScript_
->icEntryFromPCOffset(pcOff
);
1097 ICFallbackStub
* icScriptFallback
=
1098 icScript_
->fallbackStubForICEntry(&icScriptEntry
);
1099 setNextCallee(calleeFun
, icScriptFallback
->trialInliningState());
1101 // Push BaselineStub frame descriptor
1102 size_t baselineStubFrameDescr
=
1103 MakeFrameDescriptorForJitCall(FrameType::BaselineStub
, actualArgc
);
1104 if (!writeWord(baselineStubFrameDescr
, "Descriptor")) {
1108 // Push return address into ICCall_Scripted stub, immediately after the call.
1109 void* baselineCallReturnAddr
= getStubReturnAddress();
1110 MOZ_ASSERT(baselineCallReturnAddr
);
1111 if (!writePtr(baselineCallReturnAddr
, "ReturnAddr")) {
1115 // The stack must be aligned after the callee pushes the frame pointer.
1116 MOZ_ASSERT((framePushed() + sizeof(void*)) % JitStackAlignment
== 0);
1118 // Build a rectifier frame if necessary
1119 if (actualArgc
< calleeFun
->nargs() &&
1120 !buildRectifierFrame(actualArgc
, endOfBaselineStubArgs
)) {
1127 bool BaselineStackBuilder::buildRectifierFrame(uint32_t actualArgc
,
1128 size_t endOfBaselineStubArgs
) {
1129 // Push a reconstructed rectifier frame.
1130 // +===============+
1132 // +---------------+
1134 // +---------------+
1136 // +---------------+
1138 // +---------------+
1140 // +---------------+
1142 // +---------------+
1144 // +---------------+
1146 // +---------------+
1148 // +---------------+
1150 // +---------------+
1152 // +===============+
1154 JitSpew(JitSpew_BaselineBailouts
, " [RECTIFIER FRAME]");
1155 bool pushedNewTarget
= IsConstructPC(pc_
);
1157 if (!writePtr(prevFramePtr(), "PrevFramePtr")) {
1160 prevFramePtr_
= virtualPointerAtStackOffset(0);
1162 // Align the stack based on the number of arguments.
1163 size_t afterFrameSize
=
1164 (nextCallee()->nargs() + 1 + pushedNewTarget
) * sizeof(Value
) +
1165 RectifierFrameLayout::Size();
1166 if (!maybeWritePadding(JitStackAlignment
, afterFrameSize
, "Padding")) {
1170 // Copy new.target, if necessary.
1171 if (pushedNewTarget
) {
1172 size_t newTargetOffset
= (framePushed() - endOfBaselineStubArgs
) +
1173 (actualArgc
+ 1) * sizeof(Value
);
1174 Value newTargetValue
= *valuePointerAtStackOffset(newTargetOffset
);
1175 if (!writeValue(newTargetValue
, "CopiedNewTarget")) {
1180 // Push undefined for missing arguments.
1181 for (unsigned i
= 0; i
< (nextCallee()->nargs() - actualArgc
); i
++) {
1182 if (!writeValue(UndefinedValue(), "FillerVal")) {
1187 // Copy arguments + thisv from BaselineStub frame.
1188 if (!subtract((actualArgc
+ 1) * sizeof(Value
), "CopiedArgs")) {
1191 BufferPointer
<uint8_t> stubArgsEnd
=
1192 pointerAtStackOffset
<uint8_t>(framePushed() - endOfBaselineStubArgs
);
1193 JitSpew(JitSpew_BaselineBailouts
, " MemCpy from %p", stubArgsEnd
.get());
1194 memcpy(pointerAtStackOffset
<uint8_t>(0).get(), stubArgsEnd
.get(),
1195 (actualArgc
+ 1) * sizeof(Value
));
1197 // Push calleeToken again.
1198 if (!writePtr(CalleeToToken(nextCallee(), pushedNewTarget
), "CalleeToken")) {
1202 // Push rectifier frame descriptor
1203 size_t rectifierFrameDescr
=
1204 MakeFrameDescriptorForJitCall(FrameType::Rectifier
, actualArgc
);
1205 if (!writeWord(rectifierFrameDescr
, "Descriptor")) {
1209 // Push return address into the ArgumentsRectifier code, immediately after the
1211 void* rectReturnAddr
=
1212 cx_
->runtime()->jitRuntime()->getArgumentsRectifierReturnAddr().value
;
1213 MOZ_ASSERT(rectReturnAddr
);
1214 if (!writePtr(rectReturnAddr
, "ReturnAddr")) {
1218 // The stack must be aligned after the callee pushes the frame pointer.
1219 MOZ_ASSERT((framePushed() + sizeof(void*)) % JitStackAlignment
== 0);
1224 bool BaselineStackBuilder::finishLastFrame() {
1225 const BaselineInterpreter
& baselineInterp
=
1226 cx_
->runtime()->jitRuntime()->baselineInterpreter();
1228 setResumeFramePtr(prevFramePtr());
1230 // Compute the native address (within the Baseline Interpreter) that we will
1231 // resume at and initialize the frame's interpreter fields.
1232 uint8_t* resumeAddr
;
1233 if (isPrologueBailout()) {
1234 JitSpew(JitSpew_BaselineBailouts
, " Resuming into prologue.");
1235 MOZ_ASSERT(pc_
== script_
->code());
1236 blFrame()->setInterpreterFieldsForPrologue(script_
);
1237 resumeAddr
= baselineInterp
.bailoutPrologueEntryAddr();
1238 } else if (propagatingIonExceptionForDebugMode()) {
1239 // When propagating an exception for debug mode, set the
1240 // resume pc to the throwing pc, so that Debugger hooks report
1241 // the correct pc offset of the throwing op instead of its
1243 jsbytecode
* throwPC
= script_
->offsetToPC(iter_
.pcOffset());
1244 blFrame()->setInterpreterFields(script_
, throwPC
);
1245 resumeAddr
= baselineInterp
.interpretOpAddr().value
;
1247 jsbytecode
* resumePC
= getResumePC();
1248 blFrame()->setInterpreterFields(script_
, resumePC
);
1249 resumeAddr
= baselineInterp
.interpretOpAddr().value
;
1251 setResumeAddr(resumeAddr
);
1252 JitSpew(JitSpew_BaselineBailouts
, " Set resumeAddr=%p", resumeAddr
);
1254 if (cx_
->runtime()->geckoProfiler().enabled()) {
1255 // Register bailout with profiler.
1256 const char* filename
= script_
->filename();
1257 if (filename
== nullptr) {
1258 filename
= "<unknown>";
1260 unsigned len
= strlen(filename
) + 200;
1261 UniqueChars
buf(js_pod_malloc
<char>(len
));
1262 if (buf
== nullptr) {
1263 ReportOutOfMemory(cx_
);
1266 snprintf(buf
.get(), len
, "%s %s %s on line %u of %s:%u",
1267 BailoutKindString(bailoutKind()), resumeAfter() ? "after" : "at",
1268 CodeName(op_
), PCToLineNumber(script_
, pc_
), filename
,
1270 cx_
->runtime()->geckoProfiler().markEvent("Bailout", buf
.get());
1277 // The |envChain| slot must not be optimized out if the currently
1278 // active scope requires any EnvironmentObjects beyond what is
1279 // available at body scope. This checks that scope chain does not
1280 // require any such EnvironmentObjects.
1281 // See also: |CompileInfo::isObservableFrameSlot|
1282 bool BaselineStackBuilder::envChainSlotCanBeOptimized() {
1283 jsbytecode
* pc
= script_
->offsetToPC(iter_
.pcOffset());
1284 Scope
* scopeIter
= script_
->innermostScope(pc
);
1285 while (scopeIter
!= script_
->bodyScope()) {
1286 if (!scopeIter
|| scopeIter
->hasEnvironment()) {
1289 scopeIter
= scopeIter
->enclosing();
1294 bool jit::AssertBailoutStackDepth(JSContext
* cx
, JSScript
* script
,
1295 jsbytecode
* pc
, ResumeMode mode
,
1296 uint32_t exprStackSlots
) {
1297 if (IsResumeAfter(mode
)) {
1301 uint32_t expectedDepth
;
1303 if (!ReconstructStackDepth(cx
, script
, pc
, &expectedDepth
, &reachablePC
)) {
1310 JSOp op
= JSOp(*pc
);
1312 if (mode
== ResumeMode::InlinedFunCall
) {
1313 // For inlined fun.call(this, ...); the reconstructed stack depth will
1314 // include the |this|, but the exprStackSlots won't.
1315 // Exception: if there are no arguments, the depths do match.
1316 MOZ_ASSERT(IsInvokeOp(op
));
1317 if (GET_ARGC(pc
) > 0) {
1318 MOZ_ASSERT(expectedDepth
== exprStackSlots
+ 1);
1320 MOZ_ASSERT(expectedDepth
== exprStackSlots
);
1325 if (mode
== ResumeMode::InlinedAccessor
) {
1326 // Accessors coming out of ion are inlined via a complete lie perpetrated by
1327 // the compiler internally. Ion just rearranges the stack, and pretends that
1328 // it looked like a call all along.
1329 // This means that the depth is actually one *more* than expected by the
1330 // interpreter, as there is now a JSFunction, |this| and [arg], rather than
1331 // the expected |this| and [arg].
1332 // If the inlined accessor is a GetElem operation, the numbers do match, but
1333 // that's just because GetElem expects one more item on the stack. Note that
1334 // none of that was pushed, but it's still reflected in exprStackSlots.
1335 MOZ_ASSERT(IsIonInlinableGetterOrSetterOp(op
));
1336 if (IsGetElemOp(op
)) {
1337 MOZ_ASSERT(exprStackSlots
== expectedDepth
);
1339 MOZ_ASSERT(exprStackSlots
== expectedDepth
+ 1);
1344 // In all other cases, the depth must match.
1345 MOZ_ASSERT(exprStackSlots
== expectedDepth
);
1349 bool BaselineStackBuilder::validateFrame() {
1350 const uint32_t frameSize
= framePushed();
1351 blFrame()->setDebugFrameSize(frameSize
);
1352 JitSpew(JitSpew_BaselineBailouts
, " FrameSize=%u", frameSize
);
1354 // debugNumValueSlots() is based on the frame size, do some sanity checks.
1355 MOZ_ASSERT(blFrame()->debugNumValueSlots() >= script_
->nfixed());
1356 MOZ_ASSERT(blFrame()->debugNumValueSlots() <= script_
->nslots());
1358 uint32_t expectedSlots
= exprStackSlots();
1359 if (resumingInFinallyBlock()) {
1360 // If we are resuming in a finally block, we push three extra values on the
1361 // stack (the exception, the exception stack, and |throwing|), so the depth
1362 // at the resume PC should be the depth at the fault PC plus three.
1365 return AssertBailoutStackDepth(cx_
, script_
, pc_
, resumeMode(),
1370 void* BaselineStackBuilder::getStubReturnAddress() {
1371 const BaselineICFallbackCode
& code
=
1372 cx_
->runtime()->jitRuntime()->baselineICFallbackCode();
1374 if (IsGetPropOp(op_
)) {
1375 return code
.bailoutReturnAddr(BailoutReturnKind::GetProp
);
1377 if (IsSetPropOp(op_
)) {
1378 return code
.bailoutReturnAddr(BailoutReturnKind::SetProp
);
1380 if (IsGetElemOp(op_
)) {
1381 return code
.bailoutReturnAddr(BailoutReturnKind::GetElem
);
1384 // This should be a call op of some kind, now.
1385 MOZ_ASSERT(IsInvokeOp(op_
) && !IsSpreadOp(op_
));
1386 if (IsConstructOp(op_
)) {
1387 return code
.bailoutReturnAddr(BailoutReturnKind::New
);
1389 return code
.bailoutReturnAddr(BailoutReturnKind::Call
);
1392 static inline jsbytecode
* GetNextNonLoopHeadPc(jsbytecode
* pc
) {
1393 JSOp op
= JSOp(*pc
);
1396 return pc
+ GET_JUMP_OFFSET(pc
);
1398 case JSOp::LoopHead
:
1400 return GetNextPc(pc
);
1407 // Returns the pc to resume execution at in Baseline after a bailout.
1408 jsbytecode
* BaselineStackBuilder::getResumePC() {
1409 if (resumeAfter()) {
1410 return GetNextPc(pc_
);
1413 // If we are resuming at a LoopHead op, resume at the next op to avoid
1414 // a bailout -> enter Ion -> bailout loop with --ion-eager.
1416 // Cycles can cause the loop below to not terminate. Empty loops are one
1422 // We do cycle detection below with the "tortoise and the hare" algorithm.
1423 jsbytecode
* slowerPc
= pc_
;
1424 jsbytecode
* fasterPc
= pc_
;
1426 // Advance fasterPc twice as fast as slowerPc.
1427 slowerPc
= GetNextNonLoopHeadPc(slowerPc
);
1428 fasterPc
= GetNextNonLoopHeadPc(fasterPc
);
1429 fasterPc
= GetNextNonLoopHeadPc(fasterPc
);
1431 // Break on cycles or at the end of goto sequences.
1432 if (fasterPc
== slowerPc
) {
1440 bool BaselineStackBuilder::isPrologueBailout() {
1441 // If we are propagating an exception for debug mode, we will not resume
1442 // into baseline code, but instead into HandleExceptionBaseline (i.e.,
1443 // never before the prologue).
1444 return iter_
.pcOffset() == 0 && !iter_
.resumeAfter() &&
1445 !propagatingIonExceptionForDebugMode();
1448 // Build a baseline stack frame.
1449 bool BaselineStackBuilder::buildOneFrame() {
1450 // Build a baseline frame:
1451 // +===============+
1452 // | PrevFramePtr | <-- initFrame()
1453 // +---------------+
1454 // | Baseline | <-- buildBaselineFrame()
1456 // +---------------+
1457 // | Fixed0 | <-- buildFixedSlots()
1458 // +---------------+
1460 // +---------------+
1462 // +---------------+
1463 // | Stack0 | <-- buildExpressionStack() -or- fixupCallerArgs()
1464 // +---------------+
1466 // +---------------+ If we are building the frame in which we will
1467 // | StackS | <-- resume, we stop here.
1468 // +---------------+ finishLastFrame() sets up the interpreter fields.
1471 // . . <-- If there are additional frames inlined into this
1472 // | Descr(BLJS) | one, we finish this frame. We generate a stub
1473 // +---------------+ frame (and maybe also a rectifier frame) between
1474 // | ReturnAddr | this frame and the inlined frame.
1475 // +===============+ See: prepareForNextFrame()
1481 if (!buildBaselineFrame()) {
1485 if (fun_
&& !buildArguments()) {
1489 if (!buildFixedSlots()) {
1493 bool fixedUp
= false;
1494 RootedValueVector
savedCallerArgs(cx_
);
1495 if (iter_
.moreFrames() && !fixUpCallerArgs(&savedCallerArgs
, &fixedUp
)) {
1500 if (!buildExpressionStack()) {
1503 if (resumingInFinallyBlock() && !buildFinallyException()) {
1509 if (!validateFrame()) {
1515 const uint32_t pcOff
= script_
->pcToOffset(pc());
1516 JitSpew(JitSpew_BaselineBailouts
,
1517 " Resuming %s pc offset %d (op %s) (line %u) of %s:%u:%u",
1518 resumeAfter() ? "after" : "at", (int)pcOff
, CodeName(op_
),
1519 PCToLineNumber(script_
, pc()), script_
->filename(), script_
->lineno(),
1520 script_
->column().oneOriginValue());
1521 JitSpew(JitSpew_BaselineBailouts
, " Bailout kind: %s",
1522 BailoutKindString(bailoutKind()));
1525 // If this was the last inline frame, or we are bailing out to a catch or
1526 // finally block in this frame, then unpacking is almost done.
1528 return finishLastFrame();
1531 // Otherwise, this is an outer frame for an inlined call or
1532 // accessor. We will be building an inner frame. Before that,
1533 // we must create a stub frame, and potentially a rectifier frame.
1534 return prepareForNextFrame(savedCallerArgs
);
1537 bool jit::BailoutIonToBaseline(JSContext
* cx
, JitActivation
* activation
,
1538 const JSJitFrameIter
& iter
,
1539 BaselineBailoutInfo
** bailoutInfo
,
1540 const ExceptionBailoutInfo
* excInfo
,
1541 BailoutReason reason
) {
1542 MOZ_ASSERT(bailoutInfo
!= nullptr);
1543 MOZ_ASSERT(*bailoutInfo
== nullptr);
1544 MOZ_ASSERT(iter
.isBailoutJS());
1546 // Caller should have saved the exception while we perform the bailout.
1547 MOZ_ASSERT(!cx
->isExceptionPending());
1549 // Ion bailout can fail due to overrecursion and OOM. In such cases we
1550 // cannot honor any further Debugger hooks on the frame, and need to
1551 // ensure that its Debugger.Frame entry is cleaned up.
1552 auto guardRemoveRematerializedFramesFromDebugger
=
1553 mozilla::MakeScopeExit([&] {
1554 activation
->removeRematerializedFramesFromDebugger(cx
, iter
.fp());
1557 // Always remove the RInstructionResults from the JitActivation, even in
1558 // case of failures as the stack frame is going away after the bailout.
1559 auto removeIonFrameRecovery
= mozilla::MakeScopeExit(
1560 [&] { activation
->removeIonFrameRecovery(iter
.jsFrame()); });
1562 // The caller of the top frame must be one of the following:
1563 // IonJS - Ion calling into Ion.
1564 // BaselineStub - Baseline calling into Ion.
1565 // Entry / WasmToJSJit - Interpreter or other (wasm) calling into Ion.
1566 // Rectifier - Arguments rectifier calling into Ion.
1567 // BaselineJS - Resume'd Baseline, then likely OSR'd into Ion.
1568 MOZ_ASSERT(iter
.isBailoutJS());
1569 #if defined(DEBUG) || defined(JS_JITSPEW)
1570 FrameType prevFrameType
= iter
.prevType();
1571 MOZ_ASSERT(JSJitFrameIter::isEntry(prevFrameType
) ||
1572 prevFrameType
== FrameType::IonJS
||
1573 prevFrameType
== FrameType::BaselineStub
||
1574 prevFrameType
== FrameType::Rectifier
||
1575 prevFrameType
== FrameType::TrampolineNative
||
1576 prevFrameType
== FrameType::IonICCall
||
1577 prevFrameType
== FrameType::BaselineJS
||
1578 prevFrameType
== FrameType::BaselineInterpreterEntry
);
1581 // All incoming frames are going to look like this:
1583 // +---------------+
1585 // +---------------+
1588 // +---------------+
1590 // +---------------+
1592 // +---------------+
1594 // +---------------+
1596 // +---------------+
1598 // +---------------+
1599 // | ||||| | <---- Overwrite starting here.
1602 // +---------------+
1604 JitSpew(JitSpew_BaselineBailouts
,
1605 "Bailing to baseline %s:%u:%u (IonScript=%p) (FrameType=%d)",
1606 iter
.script()->filename(), iter
.script()->lineno(),
1607 iter
.script()->column().oneOriginValue(), (void*)iter
.ionScript(),
1608 (int)prevFrameType
);
1611 if (excInfo
->catchingException()) {
1612 JitSpew(JitSpew_BaselineBailouts
, "Resuming in catch or finally block");
1614 if (excInfo
->propagatingIonExceptionForDebugMode()) {
1615 JitSpew(JitSpew_BaselineBailouts
, "Resuming in-place for debug mode");
1619 JitSpew(JitSpew_BaselineBailouts
,
1620 " Reading from snapshot offset %u size %zu", iter
.snapshotOffset(),
1621 iter
.ionScript()->snapshotsListSize());
1623 iter
.script()->updateJitCodeRaw(cx
->runtime());
1625 // Under a bailout, there is no need to invalidate the frame after
1626 // evaluating the recover instruction, as the invalidation is only needed in
1627 // cases where the frame is introspected ahead of the bailout.
1628 MaybeReadFallback
recoverBailout(cx
, activation
, &iter
,
1629 MaybeReadFallback::Fallback_DoNothing
);
1631 // Ensure that all value locations are readable from the SnapshotIterator.
1632 // Get the RInstructionResults from the JitActivation if the frame got
1633 // recovered ahead of the bailout.
1634 SnapshotIterator
snapIter(iter
, activation
->bailoutData()->machineState());
1635 if (!snapIter
.initInstructionResults(recoverBailout
)) {
1639 #ifdef TRACK_SNAPSHOTS
1640 snapIter
.spewBailingFrom();
1643 BaselineStackBuilder
builder(cx
, iter
, snapIter
, excInfo
, reason
);
1644 if (!builder
.init()) {
1648 JitSpew(JitSpew_BaselineBailouts
, " Incoming frame ptr = %p",
1649 builder
.startFrame());
1650 if (iter
.maybeCallee()) {
1651 JitSpew(JitSpew_BaselineBailouts
, " Callee function (%s:%u:%u)",
1652 iter
.script()->filename(), iter
.script()->lineno(),
1653 iter
.script()->column().oneOriginValue());
1655 JitSpew(JitSpew_BaselineBailouts
, " No callee!");
1658 if (iter
.isConstructing()) {
1659 JitSpew(JitSpew_BaselineBailouts
, " Constructing!");
1661 JitSpew(JitSpew_BaselineBailouts
, " Not constructing!");
1664 JitSpew(JitSpew_BaselineBailouts
, " Restoring frames:");
1667 // Skip recover instructions as they are already recovered by
1668 // |initInstructionResults|.
1669 snapIter
.settleOnFrame();
1671 JitSpew(JitSpew_BaselineBailouts
, " FrameNo %zu", builder
.frameNo());
1673 if (!builder
.buildOneFrame()) {
1674 MOZ_ASSERT(cx
->isExceptionPending());
1678 if (builder
.done()) {
1682 builder
.nextFrame();
1684 JitSpew(JitSpew_BaselineBailouts
, " Done restoring frames");
1686 BailoutKind bailoutKind
= builder
.bailoutKind();
1688 if (!builder
.outermostFrameFormals().empty()) {
1689 // Set the first frame's formals, see the comment in InitFromBailout.
1690 Value
* argv
= builder
.startFrame()->actualArgs();
1691 mozilla::PodCopy(argv
, builder
.outermostFrameFormals().begin(),
1692 builder
.outermostFrameFormals().length());
1696 bool overRecursed
= false;
1697 BaselineBailoutInfo
* info
= builder
.info();
1698 size_t numBytesToPush
= info
->copyStackTop
- info
->copyStackBottom
;
1699 MOZ_ASSERT((numBytesToPush
% sizeof(uintptr_t)) == 0);
1700 uint8_t* newsp
= info
->incomingStack
- numBytesToPush
;
1702 if (Simulator::Current()->overRecursed(uintptr_t(newsp
))) {
1703 overRecursed
= true;
1706 AutoCheckRecursionLimit
recursion(cx
);
1707 if (!recursion
.checkWithStackPointerDontReport(cx
, newsp
)) {
1708 overRecursed
= true;
1712 JitSpew(JitSpew_BaselineBailouts
, " Overrecursion check failed!");
1713 ReportOverRecursed(cx
);
1717 // Take the reconstructed baseline stack so it doesn't get freed when builder
1719 info
= builder
.takeBuffer();
1720 info
->numFrames
= builder
.frameNo() + 1;
1721 info
->bailoutKind
.emplace(bailoutKind
);
1722 *bailoutInfo
= info
;
1723 guardRemoveRematerializedFramesFromDebugger
.release();
1727 static void InvalidateAfterBailout(JSContext
* cx
, HandleScript outerScript
,
1728 const char* reason
) {
1729 // In some cases, the computation of recover instruction can invalidate the
1730 // Ion script before we reach the end of the bailout. Thus, if the outer
1731 // script no longer have any Ion script attached, then we just skip the
1734 // For example, such case can happen if the template object for an unboxed
1735 // objects no longer match the content of its properties (see Bug 1174547)
1736 if (!outerScript
->hasIonScript()) {
1737 JitSpew(JitSpew_BaselineBailouts
, "Ion script is already invalidated");
1741 // Record a invalidation for this script in the jit hints map
1742 if (cx
->runtime()->jitRuntime()->hasJitHintsMap()) {
1743 JitHintsMap
* jitHints
= cx
->runtime()->jitRuntime()->getJitHintsMap();
1744 jitHints
->recordInvalidation(outerScript
);
1747 MOZ_ASSERT(!outerScript
->ionScript()->invalidated());
1749 JitSpew(JitSpew_BaselineBailouts
, "Invalidating due to %s", reason
);
1750 Invalidate(cx
, outerScript
);
1753 static void HandleLexicalCheckFailure(JSContext
* cx
, HandleScript outerScript
,
1754 HandleScript innerScript
) {
1755 JitSpew(JitSpew_IonBailouts
,
1756 "Lexical check failure %s:%u:%u, inlined into %s:%u:%u",
1757 innerScript
->filename(), innerScript
->lineno(),
1758 innerScript
->column().oneOriginValue(), outerScript
->filename(),
1759 outerScript
->lineno(), outerScript
->column().oneOriginValue());
1761 if (!innerScript
->failedLexicalCheck()) {
1762 innerScript
->setFailedLexicalCheck();
1765 InvalidateAfterBailout(cx
, outerScript
, "lexical check failure");
1766 if (innerScript
->hasIonScript()) {
1767 Invalidate(cx
, innerScript
);
1771 static bool CopyFromRematerializedFrame(JSContext
* cx
, JitActivation
* act
,
1772 uint8_t* fp
, size_t inlineDepth
,
1773 BaselineFrame
* frame
) {
1774 RematerializedFrame
* rematFrame
=
1775 act
->lookupRematerializedFrame(fp
, inlineDepth
);
1777 // We might not have rematerialized a frame if the user never requested a
1778 // Debugger.Frame for it.
1783 MOZ_ASSERT(rematFrame
->script() == frame
->script());
1784 MOZ_ASSERT(rematFrame
->numActualArgs() == frame
->numActualArgs());
1786 frame
->setEnvironmentChain(rematFrame
->environmentChain());
1788 if (frame
->isFunctionFrame()) {
1789 frame
->thisArgument() = rematFrame
->thisArgument();
1792 for (unsigned i
= 0; i
< frame
->numActualArgs(); i
++) {
1793 frame
->argv()[i
] = rematFrame
->argv()[i
];
1796 for (size_t i
= 0; i
< frame
->script()->nfixed(); i
++) {
1797 *frame
->valueSlot(i
) = rematFrame
->locals()[i
];
1800 if (frame
->script()->noScriptRval()) {
1801 frame
->setReturnValue(UndefinedValue());
1803 frame
->setReturnValue(rematFrame
->returnValue());
1806 // Don't copy over the hasCachedSavedFrame bit. The new BaselineFrame we're
1807 // building has a different AbstractFramePtr, so it won't be found in the
1808 // LiveSavedFrameCache if we look there.
1810 JitSpew(JitSpew_BaselineBailouts
,
1811 " Copied from rematerialized frame at (%p,%zu)", fp
, inlineDepth
);
1813 // Propagate the debuggee frame flag. For the case where the Debugger did
1814 // not rematerialize an Ion frame, the baseline frame has its debuggee
1815 // flag set iff its script is considered a debuggee. See the debuggee case
1816 // in InitFromBailout.
1817 if (rematFrame
->isDebuggee()) {
1818 frame
->setIsDebuggee();
1819 return DebugAPI::handleIonBailout(cx
, rematFrame
, frame
);
1825 enum class BailoutAction
{
1826 InvalidateImmediately
,
1827 InvalidateIfFrequent
,
1832 bool jit::FinishBailoutToBaseline(BaselineBailoutInfo
* bailoutInfoArg
) {
1833 JitSpew(JitSpew_BaselineBailouts
, " Done restoring frames");
1835 JSContext
* cx
= TlsContext
.get();
1836 // Use UniquePtr to free the bailoutInfo before we return, and root it for
1837 // the tempId field.
1838 Rooted
<UniquePtr
<BaselineBailoutInfo
>> bailoutInfo(cx
, bailoutInfoArg
);
1839 bailoutInfoArg
= nullptr;
1841 MOZ_DIAGNOSTIC_ASSERT(*bailoutInfo
->bailoutKind
!= BailoutKind::Unreachable
);
1843 // jit::Bailout(), jit::InvalidationBailout(), and jit::HandleException()
1844 // should have reset the counter to zero.
1845 MOZ_ASSERT(!cx
->isInUnsafeRegion());
1847 BaselineFrame
* topFrame
= GetTopBaselineFrame(cx
);
1849 // We have to get rid of the rematerialized frame, whether it is
1850 // restored or unwound.
1851 uint8_t* incomingStack
= bailoutInfo
->incomingStack
;
1852 auto guardRemoveRematerializedFramesFromDebugger
=
1853 mozilla::MakeScopeExit([&] {
1854 JitActivation
* act
= cx
->activation()->asJit();
1855 act
->removeRematerializedFramesFromDebugger(cx
, incomingStack
);
1858 // Ensure the frame has a call object if it needs one.
1859 if (!EnsureHasEnvironmentObjects(cx
, topFrame
)) {
1863 // Create arguments objects for bailed out frames, to maintain the invariant
1864 // that script->needsArgsObj() implies frame->hasArgsObj().
1865 RootedScript
innerScript(cx
, nullptr);
1866 RootedScript
outerScript(cx
, nullptr);
1868 MOZ_ASSERT(cx
->currentlyRunningInJit());
1869 JSJitFrameIter
iter(cx
->activation()->asJit());
1870 uint8_t* outerFp
= nullptr;
1872 // Iter currently points at the exit frame. Get the previous frame
1873 // (which must be a baseline frame), and set it as the last profiling
1875 if (cx
->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
1877 MOZ_ASSERT(iter
.prevType() == FrameType::BaselineJS
);
1878 JitFrameLayout
* fp
= reinterpret_cast<JitFrameLayout
*>(iter
.prevFp());
1879 cx
->jitActivation
->setLastProfilingFrame(fp
);
1882 uint32_t numFrames
= bailoutInfo
->numFrames
;
1883 MOZ_ASSERT(numFrames
> 0);
1885 uint32_t frameno
= 0;
1886 while (frameno
< numFrames
) {
1887 MOZ_ASSERT(!iter
.isIonJS());
1889 if (iter
.isBaselineJS()) {
1890 BaselineFrame
* frame
= iter
.baselineFrame();
1891 MOZ_ASSERT(frame
->script()->hasBaselineScript());
1893 // If the frame doesn't even have a env chain set yet, then it's resuming
1894 // into the the prologue before the env chain is initialized. Any
1895 // necessary args object will also be initialized there.
1896 if (frame
->environmentChain() && frame
->script()->needsArgsObj()) {
1897 ArgumentsObject
* argsObj
;
1898 if (frame
->hasArgsObj()) {
1899 argsObj
= &frame
->argsObj();
1901 argsObj
= ArgumentsObject::createExpected(cx
, frame
);
1907 // The arguments is a local binding and needsArgsObj does not
1908 // check if it is clobbered. Ensure that the local binding
1909 // restored during bailout before storing the arguments object
1911 RootedScript
script(cx
, frame
->script());
1912 SetFrameArgumentsObject(cx
, frame
, script
, argsObj
);
1916 innerScript
= frame
->script();
1919 if (frameno
== numFrames
- 1) {
1920 outerScript
= frame
->script();
1921 outerFp
= iter
.fp();
1922 MOZ_ASSERT(outerFp
== incomingStack
);
1931 MOZ_ASSERT(innerScript
);
1932 MOZ_ASSERT(outerScript
);
1933 MOZ_ASSERT(outerFp
);
1935 // If we rematerialized Ion frames due to debug mode toggling, copy their
1936 // values into the baseline frame. We need to do this even when debug mode
1937 // is off, as we should respect the mutations made while debug mode was
1939 JitActivation
* act
= cx
->activation()->asJit();
1940 if (act
->hasRematerializedFrame(outerFp
)) {
1941 JSJitFrameIter
iter(act
);
1942 size_t inlineDepth
= numFrames
;
1944 while (inlineDepth
> 0) {
1945 if (iter
.isBaselineJS()) {
1946 // We must attempt to copy all rematerialized frames over,
1947 // even if earlier ones failed, to invoke the proper frame
1948 // cleanup in the Debugger.
1949 if (!CopyFromRematerializedFrame(cx
, act
, outerFp
, --inlineDepth
,
1950 iter
.baselineFrame())) {
1961 // After copying from all the rematerialized frames, remove them from
1962 // the table to keep the table up to date.
1963 guardRemoveRematerializedFramesFromDebugger
.release();
1964 act
->removeRematerializedFrame(outerFp
);
1967 // If we are unwinding for an exception, we need to unwind scopes.
1968 // See |SettleOnTryNote|
1969 if (bailoutInfo
->faultPC
) {
1970 EnvironmentIter
ei(cx
, topFrame
, bailoutInfo
->faultPC
);
1971 UnwindEnvironment(cx
, ei
, bailoutInfo
->tryPC
);
1974 BailoutKind bailoutKind
= *bailoutInfo
->bailoutKind
;
1975 JitSpew(JitSpew_BaselineBailouts
,
1976 " Restored outerScript=(%s:%u:%u,%u) innerScript=(%s:%u:%u,%u) "
1978 outerScript
->filename(), outerScript
->lineno(),
1979 outerScript
->column().oneOriginValue(), outerScript
->getWarmUpCount(),
1980 innerScript
->filename(), innerScript
->lineno(),
1981 innerScript
->column().oneOriginValue(), innerScript
->getWarmUpCount(),
1982 (unsigned)bailoutKind
);
1984 BailoutAction action
= BailoutAction::InvalidateImmediately
;
1985 DebugOnly
<bool> saveFailedICHash
= false;
1986 switch (bailoutKind
) {
1987 case BailoutKind::TranspiledCacheIR
:
1988 // A transpiled guard failed. If this happens often enough, we will
1989 // invalidate and recompile.
1990 action
= BailoutAction::InvalidateIfFrequent
;
1991 saveFailedICHash
= true;
1994 case BailoutKind::MonomorphicInlinedStubFolding
:
1995 action
= BailoutAction::InvalidateIfFrequent
;
1996 saveFailedICHash
= true;
1997 if (innerScript
!= outerScript
) {
1998 // In the case where this instruction comes from a monomorphic-inlined
1999 // ICScript, we need to ensure that we note the connection between the
2000 // inner script and the outer script, so that we can properly track if
2001 // we add a new case to the folded stub and avoid invalidating the
2003 cx
->zone()->jitZone()->noteStubFoldingBailout(innerScript
, outerScript
);
2007 case BailoutKind::SpeculativePhi
:
2008 // A value of an unexpected type flowed into a phi.
2009 MOZ_ASSERT(!outerScript
->hadSpeculativePhiBailout());
2010 if (!outerScript
->hasIonScript() ||
2011 outerScript
->ionScript()->numFixableBailouts() == 0) {
2012 outerScript
->setHadSpeculativePhiBailout();
2014 InvalidateAfterBailout(cx
, outerScript
, "phi specialization failure");
2017 case BailoutKind::TypePolicy
:
2018 // A conversion inserted by a type policy failed.
2019 // We will invalidate and disable recompilation if this happens too often.
2020 action
= BailoutAction::DisableIfFrequent
;
2023 case BailoutKind::LICM
:
2024 // LICM may cause spurious bailouts by hoisting unreachable
2025 // guards past branches. To prevent bailout loops, when an
2026 // instruction hoisted by LICM bails out, we update the
2027 // IonScript and resume in baseline. If the guard would have
2028 // been executed anyway, then we will hit the baseline fallback,
2029 // and call noteBaselineFallback. If that does not happen,
2030 // then the next time we reach this point, we will disable LICM
2032 MOZ_ASSERT(!outerScript
->hadLICMInvalidation());
2033 if (outerScript
->hasIonScript()) {
2034 switch (outerScript
->ionScript()->licmState()) {
2035 case IonScript::LICMState::NeverBailed
:
2036 outerScript
->ionScript()->setHadLICMBailout();
2037 action
= BailoutAction::NoAction
;
2039 case IonScript::LICMState::Bailed
:
2040 outerScript
->setHadLICMInvalidation();
2041 InvalidateAfterBailout(cx
, outerScript
, "LICM failure");
2043 case IonScript::LICMState::BailedAndHitFallback
:
2044 // This bailout is not due to LICM. Treat it like a
2045 // regular TranspiledCacheIR bailout.
2046 action
= BailoutAction::InvalidateIfFrequent
;
2052 case BailoutKind::InstructionReordering
:
2053 // An instruction moved up by instruction reordering bailed out.
2054 outerScript
->setHadReorderingBailout();
2055 action
= BailoutAction::InvalidateIfFrequent
;
2058 case BailoutKind::HoistBoundsCheck
:
2059 // An instruction hoisted or generated by tryHoistBoundsCheck bailed out.
2060 MOZ_ASSERT(!outerScript
->failedBoundsCheck());
2061 outerScript
->setFailedBoundsCheck();
2062 InvalidateAfterBailout(cx
, outerScript
, "bounds check failure");
2065 case BailoutKind::EagerTruncation
:
2066 // An eager truncation generated by range analysis bailed out.
2067 // To avoid bailout loops, we set a flag to avoid generating
2068 // eager truncations next time we recompile.
2069 MOZ_ASSERT(!outerScript
->hadEagerTruncationBailout());
2070 outerScript
->setHadEagerTruncationBailout();
2071 InvalidateAfterBailout(cx
, outerScript
, "eager range analysis failure");
2074 case BailoutKind::UnboxFolding
:
2075 // An unbox that was hoisted to fold with a load bailed out.
2076 // To avoid bailout loops, we set a flag to avoid folding
2077 // loads with unboxes next time we recompile.
2078 MOZ_ASSERT(!outerScript
->hadUnboxFoldingBailout());
2079 outerScript
->setHadUnboxFoldingBailout();
2080 InvalidateAfterBailout(cx
, outerScript
, "unbox folding failure");
2083 case BailoutKind::TooManyArguments
:
2084 // A funapply or spread call had more than JIT_ARGS_LENGTH_MAX arguments.
2085 // We will invalidate and disable recompilation if this happens too often.
2086 action
= BailoutAction::DisableIfFrequent
;
2089 case BailoutKind::DuringVMCall
:
2090 if (cx
->isExceptionPending()) {
2091 // We are bailing out to catch an exception. We will invalidate
2092 // and disable recompilation if this happens too often.
2093 action
= BailoutAction::DisableIfFrequent
;
2097 case BailoutKind::Finally
:
2098 // We are bailing out for a finally block. We will invalidate
2099 // and disable recompilation if this happens too often.
2100 action
= BailoutAction::DisableIfFrequent
;
2103 case BailoutKind::Inevitable
:
2104 case BailoutKind::Debugger
:
2106 action
= BailoutAction::NoAction
;
2109 case BailoutKind::FirstExecution
:
2110 // We reached an instruction that had not been executed yet at
2111 // the time we compiled. If this happens often enough, we will
2112 // invalidate and recompile.
2113 action
= BailoutAction::InvalidateIfFrequent
;
2114 saveFailedICHash
= true;
2117 case BailoutKind::UninitializedLexical
:
2118 HandleLexicalCheckFailure(cx
, outerScript
, innerScript
);
2121 case BailoutKind::ThrowCheckIsObject
:
2122 MOZ_ASSERT(!cx
->isExceptionPending());
2123 return ThrowCheckIsObject(cx
, CheckIsObjectKind::IteratorReturn
);
2125 case BailoutKind::ThrowProxyTrapMustReportSameValue
:
2126 case BailoutKind::ThrowProxyTrapMustReportUndefined
: {
2127 MOZ_ASSERT(!cx
->isExceptionPending());
2128 RootedId
rootedId(cx
, bailoutInfo
->tempId
);
2129 ScriptedProxyHandler::reportGetTrapValidationError(
2131 bailoutKind
== BailoutKind::ThrowProxyTrapMustReportSameValue
2132 ? ScriptedProxyHandler::GetTrapValidationResult::
2134 : ScriptedProxyHandler::GetTrapValidationResult::
2135 MustReportUndefined
);
2139 case BailoutKind::IonExceptionDebugMode
:
2140 // Return false to resume in HandleException with reconstructed
2144 case BailoutKind::OnStackInvalidation
:
2145 // The script has already been invalidated. There is nothing left to do.
2146 action
= BailoutAction::NoAction
;
2150 MOZ_CRASH("Unknown bailout kind!");
2154 if (MOZ_UNLIKELY(cx
->runtime()->jitRuntime()->ionBailAfterEnabled())) {
2155 action
= BailoutAction::NoAction
;
2159 if (outerScript
->hasIonScript()) {
2160 IonScript
* ionScript
= outerScript
->ionScript();
2162 case BailoutAction::InvalidateImmediately
:
2163 // The IonScript should already have been invalidated.
2166 case BailoutAction::InvalidateIfFrequent
:
2167 ionScript
->incNumFixableBailouts();
2168 if (ionScript
->shouldInvalidate()) {
2170 // To detect bailout loops, we save a hash of the CacheIR used to
2171 // compile this script, and assert that we don't recompile with the
2172 // exact same inputs. Some of our bailout detection strategies, like
2173 // LICM and stub folding, rely on bailing out, updating some state
2174 // when we hit the baseline fallback, and using that information when
2175 // we invalidate. If the frequentBailoutThreshold is set too low, we
2176 // will instead invalidate the first time we bail out, so we don't
2177 // have the chance to make those decisions. That doesn't happen in
2178 // regular code, so we just skip bailout loop detection in that case.
2179 if (saveFailedICHash
&& !JitOptions
.disableBailoutLoopCheck
&&
2180 JitOptions
.frequentBailoutThreshold
> 1) {
2181 outerScript
->jitScript()->setFailedICHash(ionScript
->icHash());
2184 InvalidateAfterBailout(cx
, outerScript
, "fixable bailouts");
2187 case BailoutAction::DisableIfFrequent
:
2188 ionScript
->incNumUnfixableBailouts();
2189 if (ionScript
->shouldInvalidateAndDisable()) {
2190 InvalidateAfterBailout(cx
, outerScript
, "unfixable bailouts");
2191 outerScript
->disableIon();
2194 case BailoutAction::NoAction
: