Bug 1867190 - Add prefs for PHC probablities r=glandium
[gecko.git] / js / src / jit / BaselineBailouts.cpp
blobcf3acad3a52a100e305027c69cc4c9908ea46895
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "mozilla/Assertions.h"
8 #include "mozilla/ScopeExit.h"
10 #include "builtin/ModuleObject.h"
11 #include "debugger/DebugAPI.h"
12 #include "gc/GC.h"
13 #include "jit/Bailouts.h"
14 #include "jit/BaselineFrame.h"
15 #include "jit/BaselineIC.h"
16 #include "jit/BaselineJIT.h"
17 #include "jit/CalleeToken.h"
18 #include "jit/Invalidation.h"
19 #include "jit/Ion.h"
20 #include "jit/IonScript.h"
21 #include "jit/JitFrames.h"
22 #include "jit/JitRuntime.h"
23 #include "jit/JitSpewer.h"
24 #include "jit/JitZone.h"
25 #include "jit/RematerializedFrame.h"
26 #include "jit/SharedICRegisters.h"
27 #include "jit/Simulator.h"
28 #include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit, js::ReportOverRecursed
29 #include "js/Utility.h"
30 #include "proxy/ScriptedProxyHandler.h"
31 #include "util/Memory.h"
32 #include "vm/ArgumentsObject.h"
33 #include "vm/BytecodeUtil.h"
34 #include "vm/JitActivation.h"
36 #include "jit/JitFrames-inl.h"
37 #include "vm/JSAtomUtils-inl.h"
38 #include "vm/JSContext-inl.h"
39 #include "vm/JSScript-inl.h"
41 using namespace js;
42 using namespace js::jit;
44 using mozilla::DebugOnly;
45 using mozilla::Maybe;
47 // BaselineStackBuilder may reallocate its buffer if the current one is too
48 // small. To avoid dangling pointers, BufferPointer represents a pointer into
49 // this buffer as a pointer to the header and a fixed offset.
50 template <typename T>
51 class BufferPointer {
52 const UniquePtr<BaselineBailoutInfo>& header_;
53 size_t offset_;
54 bool heap_;
56 public:
57 BufferPointer(const UniquePtr<BaselineBailoutInfo>& header, size_t offset,
58 bool heap)
59 : header_(header), offset_(offset), heap_(heap) {}
61 T* get() const {
62 BaselineBailoutInfo* header = header_.get();
63 if (!heap_) {
64 return (T*)(header->incomingStack + offset_);
67 uint8_t* p = header->copyStackTop - offset_;
68 MOZ_ASSERT(p >= header->copyStackBottom && p < header->copyStackTop);
69 return (T*)p;
72 void set(const T& value) { *get() = value; }
74 // Note: we return a copy instead of a reference, to avoid potential memory
75 // safety hazards when the underlying buffer gets resized.
76 const T operator*() const { return *get(); }
77 T* operator->() const { return get(); }
80 /**
81 * BaselineStackBuilder helps abstract the process of rebuilding the C stack on
82 * the heap. It takes a bailout iterator and keeps track of the point on the C
83 * stack from which the reconstructed frames will be written.
85 * It exposes methods to write data into the heap memory storing the
86 * reconstructed stack. It also exposes method to easily calculate addresses.
87 * This includes both the virtual address that a particular value will be at
88 * when it's eventually copied onto the stack, as well as the current actual
89 * address of that value (whether on the heap allocated portion being
90 * constructed or the existing stack).
92 * The abstraction handles transparent re-allocation of the heap memory when it
93 * needs to be enlarged to accommodate new data. Similarly to the C stack, the
94 * data that's written to the reconstructed stack grows from high to low in
95 * memory.
97 * The lowest region of the allocated memory contains a BaselineBailoutInfo
98 * structure that points to the start and end of the written data.
100 class MOZ_STACK_CLASS BaselineStackBuilder {
101 JSContext* cx_;
102 JitFrameLayout* frame_ = nullptr;
103 SnapshotIterator& iter_;
104 RootedValueVector outermostFrameFormals_;
106 size_t bufferTotal_ = 1024;
107 size_t bufferAvail_ = 0;
108 size_t bufferUsed_ = 0;
109 size_t framePushed_ = 0;
111 UniquePtr<BaselineBailoutInfo> header_;
113 JSScript* script_;
114 JSFunction* fun_;
115 const ExceptionBailoutInfo* excInfo_;
116 ICScript* icScript_;
118 jsbytecode* pc_ = nullptr;
119 JSOp op_ = JSOp::Nop;
120 mozilla::Maybe<ResumeMode> resumeMode_;
121 uint32_t exprStackSlots_ = 0;
122 void* prevFramePtr_ = nullptr;
123 Maybe<BufferPointer<BaselineFrame>> blFrame_;
125 size_t frameNo_ = 0;
126 JSFunction* nextCallee_ = nullptr;
128 BailoutKind bailoutKind_;
130 // The baseline frames we will reconstruct on the heap are not
131 // rooted, so GC must be suppressed.
132 gc::AutoSuppressGC suppress_;
134 public:
135 BaselineStackBuilder(JSContext* cx, const JSJitFrameIter& frameIter,
136 SnapshotIterator& iter,
137 const ExceptionBailoutInfo* excInfo,
138 BailoutReason reason);
140 [[nodiscard]] bool init() {
141 MOZ_ASSERT(!header_);
142 MOZ_ASSERT(bufferUsed_ == 0);
144 uint8_t* bufferRaw = cx_->pod_calloc<uint8_t>(bufferTotal_);
145 if (!bufferRaw) {
146 return false;
148 bufferAvail_ = bufferTotal_ - sizeof(BaselineBailoutInfo);
150 header_.reset(new (bufferRaw) BaselineBailoutInfo());
151 header_->incomingStack = reinterpret_cast<uint8_t*>(frame_);
152 header_->copyStackTop = bufferRaw + bufferTotal_;
153 header_->copyStackBottom = header_->copyStackTop;
154 return true;
157 [[nodiscard]] bool buildOneFrame();
158 bool done();
159 void nextFrame();
161 JSScript* script() const { return script_; }
162 size_t frameNo() const { return frameNo_; }
163 bool isOutermostFrame() const { return frameNo_ == 0; }
164 MutableHandleValueVector outermostFrameFormals() {
165 return &outermostFrameFormals_;
167 BailoutKind bailoutKind() const { return bailoutKind_; }
169 inline JitFrameLayout* startFrame() { return frame_; }
171 BaselineBailoutInfo* info() {
172 MOZ_ASSERT(header_);
173 return header_.get();
176 BaselineBailoutInfo* takeBuffer() {
177 MOZ_ASSERT(header_);
178 return header_.release();
181 private:
182 [[nodiscard]] bool initFrame();
183 [[nodiscard]] bool buildBaselineFrame();
184 [[nodiscard]] bool buildArguments();
185 [[nodiscard]] bool buildFixedSlots();
186 [[nodiscard]] bool fixUpCallerArgs(MutableHandleValueVector savedCallerArgs,
187 bool* fixedUp);
188 [[nodiscard]] bool buildFinallyException();
189 [[nodiscard]] bool buildExpressionStack();
190 [[nodiscard]] bool finishLastFrame();
192 [[nodiscard]] bool prepareForNextFrame(HandleValueVector savedCallerArgs);
193 [[nodiscard]] bool finishOuterFrame();
194 [[nodiscard]] bool buildStubFrame(uint32_t frameSize,
195 HandleValueVector savedCallerArgs);
196 [[nodiscard]] bool buildRectifierFrame(uint32_t actualArgc,
197 size_t endOfBaselineStubArgs);
199 #ifdef DEBUG
200 [[nodiscard]] bool validateFrame();
201 #endif
203 #ifdef DEBUG
204 bool envChainSlotCanBeOptimized();
205 #endif
207 bool isPrologueBailout();
208 jsbytecode* getResumePC();
209 void* getStubReturnAddress();
211 uint32_t exprStackSlots() const { return exprStackSlots_; }
213 // Returns true if we're bailing out to a catch or finally block in this frame
214 bool catchingException() const {
215 return excInfo_ && excInfo_->catchingException() &&
216 excInfo_->frameNo() == frameNo_;
219 // Returns true if we're bailing out to a finally block in this frame.
220 bool resumingInFinallyBlock() const {
221 return catchingException() && excInfo_->isFinally();
224 bool forcedReturn() const { return excInfo_ && excInfo_->forcedReturn(); }
226 // Returns true if we're bailing out in place for debug mode
227 bool propagatingIonExceptionForDebugMode() const {
228 return excInfo_ && excInfo_->propagatingIonExceptionForDebugMode();
231 void* prevFramePtr() const {
232 MOZ_ASSERT(prevFramePtr_);
233 return prevFramePtr_;
235 BufferPointer<BaselineFrame>& blFrame() { return blFrame_.ref(); }
237 void setNextCallee(JSFunction* nextCallee,
238 TrialInliningState trialInliningState);
239 JSFunction* nextCallee() const { return nextCallee_; }
241 jsbytecode* pc() const { return pc_; }
242 bool resumeAfter() const {
243 return !catchingException() && iter_.resumeAfter();
246 ResumeMode resumeMode() const { return *resumeMode_; }
248 bool needToSaveCallerArgs() const {
249 return resumeMode() == ResumeMode::InlinedAccessor;
252 [[nodiscard]] bool enlarge() {
253 MOZ_ASSERT(header_ != nullptr);
254 if (bufferTotal_ & mozilla::tl::MulOverflowMask<2>::value) {
255 ReportOutOfMemory(cx_);
256 return false;
259 size_t newSize = bufferTotal_ * 2;
260 uint8_t* newBufferRaw = cx_->pod_calloc<uint8_t>(newSize);
261 if (!newBufferRaw) {
262 return false;
265 // Initialize the new buffer.
267 // Before:
269 // [ Header | .. | Payload ]
271 // After:
273 // [ Header | ............... | Payload ]
275 // Size of Payload is |bufferUsed_|.
277 // We need to copy from the old buffer and header to the new buffer before
278 // we set header_ (this deletes the old buffer).
280 // We also need to update |copyStackBottom| and |copyStackTop| because these
281 // fields point to the Payload's start and end, respectively.
282 using BailoutInfoPtr = UniquePtr<BaselineBailoutInfo>;
283 BailoutInfoPtr newHeader(new (newBufferRaw) BaselineBailoutInfo(*header_));
284 newHeader->copyStackTop = newBufferRaw + newSize;
285 newHeader->copyStackBottom = newHeader->copyStackTop - bufferUsed_;
286 memcpy(newHeader->copyStackBottom, header_->copyStackBottom, bufferUsed_);
287 bufferTotal_ = newSize;
288 bufferAvail_ = newSize - (sizeof(BaselineBailoutInfo) + bufferUsed_);
289 header_ = std::move(newHeader);
290 return true;
293 void resetFramePushed() { framePushed_ = 0; }
295 size_t framePushed() const { return framePushed_; }
297 [[nodiscard]] bool subtract(size_t size, const char* info = nullptr) {
298 // enlarge the buffer if need be.
299 while (size > bufferAvail_) {
300 if (!enlarge()) {
301 return false;
305 // write out element.
306 header_->copyStackBottom -= size;
307 bufferAvail_ -= size;
308 bufferUsed_ += size;
309 framePushed_ += size;
310 if (info) {
311 JitSpew(JitSpew_BaselineBailouts, " SUB_%03d %p/%p %-15s",
312 (int)size, header_->copyStackBottom,
313 virtualPointerAtStackOffset(0), info);
315 return true;
318 template <typename T>
319 [[nodiscard]] bool write(const T& t) {
320 MOZ_ASSERT(!(uintptr_t(&t) >= uintptr_t(header_->copyStackBottom) &&
321 uintptr_t(&t) < uintptr_t(header_->copyStackTop)),
322 "Should not reference memory that can be freed");
323 if (!subtract(sizeof(T))) {
324 return false;
326 memcpy(header_->copyStackBottom, &t, sizeof(T));
327 return true;
330 template <typename T>
331 [[nodiscard]] bool writePtr(T* t, const char* info) {
332 if (!write<T*>(t)) {
333 return false;
335 if (info) {
336 JitSpew(JitSpew_BaselineBailouts, " WRITE_PTR %p/%p %-15s %p",
337 header_->copyStackBottom, virtualPointerAtStackOffset(0), info,
340 return true;
343 [[nodiscard]] bool writeWord(size_t w, const char* info) {
344 if (!write<size_t>(w)) {
345 return false;
347 if (info) {
348 if (sizeof(size_t) == 4) {
349 JitSpew(JitSpew_BaselineBailouts, " WRITE_WRD %p/%p %-15s %08zx",
350 header_->copyStackBottom, virtualPointerAtStackOffset(0), info,
352 } else {
353 JitSpew(JitSpew_BaselineBailouts, " WRITE_WRD %p/%p %-15s %016zx",
354 header_->copyStackBottom, virtualPointerAtStackOffset(0), info,
358 return true;
361 [[nodiscard]] bool writeValue(const Value& val, const char* info) {
362 if (!write<Value>(val)) {
363 return false;
365 if (info) {
366 JitSpew(JitSpew_BaselineBailouts,
367 " WRITE_VAL %p/%p %-15s %016" PRIx64,
368 header_->copyStackBottom, virtualPointerAtStackOffset(0), info,
369 *((uint64_t*)&val));
371 return true;
374 [[nodiscard]] bool peekLastValue(Value* result) {
375 if (bufferUsed_ < sizeof(Value)) {
376 return false;
379 memcpy(result, header_->copyStackBottom, sizeof(Value));
380 return true;
383 [[nodiscard]] bool maybeWritePadding(size_t alignment, size_t after,
384 const char* info) {
385 MOZ_ASSERT(framePushed_ % sizeof(Value) == 0);
386 MOZ_ASSERT(after % sizeof(Value) == 0);
387 size_t offset = ComputeByteAlignment(after, alignment);
388 while (framePushed_ % alignment != offset) {
389 if (!writeValue(MagicValue(JS_ARG_POISON), info)) {
390 return false;
394 return true;
397 void setResumeFramePtr(void* resumeFramePtr) {
398 header_->resumeFramePtr = resumeFramePtr;
401 void setResumeAddr(void* resumeAddr) { header_->resumeAddr = resumeAddr; }
403 template <typename T>
404 BufferPointer<T> pointerAtStackOffset(size_t offset) {
405 if (offset < bufferUsed_) {
406 // Calculate offset from copyStackTop.
407 offset = header_->copyStackTop - (header_->copyStackBottom + offset);
408 return BufferPointer<T>(header_, offset, /* heap = */ true);
411 return BufferPointer<T>(header_, offset - bufferUsed_, /* heap = */ false);
414 BufferPointer<Value> valuePointerAtStackOffset(size_t offset) {
415 return pointerAtStackOffset<Value>(offset);
418 inline uint8_t* virtualPointerAtStackOffset(size_t offset) {
419 if (offset < bufferUsed_) {
420 return reinterpret_cast<uint8_t*>(frame_) - (bufferUsed_ - offset);
422 return reinterpret_cast<uint8_t*>(frame_) + (offset - bufferUsed_);
426 void BaselineBailoutInfo::trace(JSTracer* trc) {
427 TraceRoot(trc, &tempId, "BaselineBailoutInfo::tempId");
430 BaselineStackBuilder::BaselineStackBuilder(JSContext* cx,
431 const JSJitFrameIter& frameIter,
432 SnapshotIterator& iter,
433 const ExceptionBailoutInfo* excInfo,
434 BailoutReason reason)
435 : cx_(cx),
436 frame_(static_cast<JitFrameLayout*>(frameIter.current())),
437 iter_(iter),
438 outermostFrameFormals_(cx),
439 script_(frameIter.script()),
440 fun_(frameIter.maybeCallee()),
441 excInfo_(excInfo),
442 icScript_(script_->jitScript()->icScript()),
443 bailoutKind_(iter.bailoutKind()),
444 suppress_(cx) {
445 MOZ_ASSERT(bufferTotal_ >= sizeof(BaselineBailoutInfo));
446 if (reason == BailoutReason::Invalidate) {
447 bailoutKind_ = BailoutKind::OnStackInvalidation;
451 bool BaselineStackBuilder::initFrame() {
452 // Get the pc and ResumeMode. If we are handling an exception, resume at the
453 // pc of the catch or finally block.
454 if (catchingException()) {
455 pc_ = excInfo_->resumePC();
456 resumeMode_ = mozilla::Some(ResumeMode::ResumeAt);
457 } else {
458 pc_ = script_->offsetToPC(iter_.pcOffset());
459 resumeMode_ = mozilla::Some(iter_.resumeMode());
461 op_ = JSOp(*pc_);
463 // If we are catching an exception, we are bailing out to a catch or
464 // finally block and this is the frame where we will resume. Usually the
465 // expression stack should be empty in this case but there can be
466 // iterators on the stack.
467 if (catchingException()) {
468 exprStackSlots_ = excInfo_->numExprSlots();
469 } else {
470 uint32_t totalFrameSlots = iter_.numAllocations();
471 uint32_t fixedSlots = script_->nfixed();
472 uint32_t argSlots = CountArgSlots(script_, fun_);
473 uint32_t intermediates = NumIntermediateValues(resumeMode());
474 exprStackSlots_ = totalFrameSlots - fixedSlots - argSlots - intermediates;
476 // Verify that there was no underflow.
477 MOZ_ASSERT(exprStackSlots_ <= totalFrameSlots);
480 JitSpew(JitSpew_BaselineBailouts, " Unpacking %s:%u:%u",
481 script_->filename(), script_->lineno(),
482 script_->column().oneOriginValue());
483 JitSpew(JitSpew_BaselineBailouts, " [BASELINE-JS FRAME]");
485 // Write the previous frame pointer value. For the outermost frame we reuse
486 // the value in the JitFrameLayout already on the stack. Record the virtual
487 // stack offset at this location. Later on, if we end up writing out a
488 // BaselineStub frame for the next callee, we'll need to save the address.
489 if (!isOutermostFrame()) {
490 if (!writePtr(prevFramePtr(), "PrevFramePtr")) {
491 return false;
494 prevFramePtr_ = virtualPointerAtStackOffset(0);
496 resetFramePushed();
498 return true;
501 void BaselineStackBuilder::setNextCallee(
502 JSFunction* nextCallee, TrialInliningState trialInliningState) {
503 nextCallee_ = nextCallee;
505 if (trialInliningState == TrialInliningState::Inlined &&
506 !iter_.ionScript()->purgedICScripts()) {
507 // Update icScript_ to point to the icScript of nextCallee
508 const uint32_t pcOff = script_->pcToOffset(pc_);
509 icScript_ = icScript_->findInlinedChild(pcOff);
510 } else {
511 // If we don't know for certain that it's TrialInliningState::Inlined,
512 // just use the callee's own ICScript. We could still have the trial
513 // inlined ICScript available, but we also could not if we transitioned
514 // to TrialInliningState::Failure after being monomorphic inlined.
516 // Also use the callee's own ICScript if we purged callee ICScripts.
517 icScript_ = nextCallee->nonLazyScript()->jitScript()->icScript();
520 // Assert the ICScript matches nextCallee.
521 JSScript* calleeScript = nextCallee->nonLazyScript();
522 MOZ_RELEASE_ASSERT(icScript_->numICEntries() == calleeScript->numICEntries());
523 MOZ_RELEASE_ASSERT(icScript_->bytecodeSize() == calleeScript->length());
526 bool BaselineStackBuilder::done() {
527 if (!iter_.moreFrames()) {
528 MOZ_ASSERT(!nextCallee_);
529 return true;
531 return catchingException();
534 void BaselineStackBuilder::nextFrame() {
535 MOZ_ASSERT(nextCallee_);
536 fun_ = nextCallee_;
537 script_ = fun_->nonLazyScript();
538 nextCallee_ = nullptr;
540 // Scripts with an IonScript must also have a BaselineScript.
541 MOZ_ASSERT(script_->hasBaselineScript());
543 frameNo_++;
544 iter_.nextInstruction();
547 // Build the BaselineFrame struct
548 bool BaselineStackBuilder::buildBaselineFrame() {
549 if (!subtract(BaselineFrame::Size(), "BaselineFrame")) {
550 return false;
552 blFrame_.reset();
553 blFrame_.emplace(pointerAtStackOffset<BaselineFrame>(0));
555 uint32_t flags = BaselineFrame::RUNNING_IN_INTERPRETER;
557 // If we are bailing to a script whose execution is observed, mark the
558 // baseline frame as a debuggee frame. This is to cover the case where we
559 // don't rematerialize the Ion frame via the Debugger.
560 if (script_->isDebuggee()) {
561 flags |= BaselineFrame::DEBUGGEE;
564 // Get |envChain|.
565 JSObject* envChain = nullptr;
566 Value envChainSlot = iter_.read();
567 if (envChainSlot.isObject()) {
568 // The env slot has been updated from UndefinedValue. It must be the
569 // complete initial environment.
570 envChain = &envChainSlot.toObject();
572 // Set the HAS_INITIAL_ENV flag if needed. See IsFrameInitialEnvironment.
573 MOZ_ASSERT(!script_->isForEval());
574 if (fun_ && fun_->needsFunctionEnvironmentObjects()) {
575 MOZ_ASSERT(fun_->nonLazyScript()->initialEnvironmentShape());
576 flags |= BaselineFrame::HAS_INITIAL_ENV;
578 } else {
579 MOZ_ASSERT(envChainSlot.isUndefined() ||
580 envChainSlot.isMagic(JS_OPTIMIZED_OUT));
581 MOZ_ASSERT(envChainSlotCanBeOptimized());
583 // The env slot has been optimized out.
584 // Get it from the function or script.
585 if (fun_) {
586 envChain = fun_->environment();
587 } else if (script_->isModule()) {
588 envChain = script_->module()->environment();
589 } else {
590 // For global scripts without a non-syntactic env the env
591 // chain is the script's global lexical environment. (We do
592 // not compile scripts with a non-syntactic global scope).
593 // Also note that it's invalid to resume into the prologue in
594 // this case because the prologue expects the env chain in R1
595 // for eval and global scripts.
596 MOZ_ASSERT(!script_->isForEval());
597 MOZ_ASSERT(!script_->hasNonSyntacticScope());
598 envChain = &(script_->global().lexicalEnvironment());
602 // Write |envChain|.
603 MOZ_ASSERT(envChain);
604 JitSpew(JitSpew_BaselineBailouts, " EnvChain=%p", envChain);
605 blFrame()->setEnvironmentChain(envChain);
607 // Get |returnValue| if present.
608 Value returnValue = UndefinedValue();
609 if (script_->noScriptRval()) {
610 // Don't use the return value (likely a JS_OPTIMIZED_OUT MagicValue) to
611 // not confuse Baseline.
612 iter_.skip();
613 } else {
614 returnValue = iter_.read();
615 flags |= BaselineFrame::HAS_RVAL;
618 // Write |returnValue|.
619 JitSpew(JitSpew_BaselineBailouts, " ReturnValue=%016" PRIx64,
620 *((uint64_t*)&returnValue));
621 blFrame()->setReturnValue(returnValue);
623 // Get |argsObj| if present.
624 ArgumentsObject* argsObj = nullptr;
625 if (script_->needsArgsObj()) {
626 Value maybeArgsObj = iter_.read();
627 MOZ_ASSERT(maybeArgsObj.isObject() || maybeArgsObj.isUndefined() ||
628 maybeArgsObj.isMagic(JS_OPTIMIZED_OUT));
629 if (maybeArgsObj.isObject()) {
630 argsObj = &maybeArgsObj.toObject().as<ArgumentsObject>();
634 // Note: we do not need to initialize the scratchValue field in BaselineFrame.
636 // Write |flags|.
637 blFrame()->setFlags(flags);
639 // Write |icScript|.
640 JitSpew(JitSpew_BaselineBailouts, " ICScript=%p", icScript_);
641 blFrame()->setICScript(icScript_);
643 // initArgsObjUnchecked modifies the frame's flags, so call it after setFlags.
644 if (argsObj) {
645 blFrame()->initArgsObjUnchecked(*argsObj);
647 return true;
650 // Overwrite the pushed args present in the calling frame with
651 // the unpacked |thisv| and argument values.
652 bool BaselineStackBuilder::buildArguments() {
653 Value thisv = iter_.read();
654 JitSpew(JitSpew_BaselineBailouts, " Is function!");
655 JitSpew(JitSpew_BaselineBailouts, " thisv=%016" PRIx64,
656 *((uint64_t*)&thisv));
658 size_t thisvOffset = framePushed() + JitFrameLayout::offsetOfThis();
659 valuePointerAtStackOffset(thisvOffset).set(thisv);
661 MOZ_ASSERT(iter_.numAllocations() >= CountArgSlots(script_, fun_));
662 JitSpew(JitSpew_BaselineBailouts,
663 " frame slots %u, nargs %zu, nfixed %zu", iter_.numAllocations(),
664 fun_->nargs(), script_->nfixed());
666 bool shouldStoreOutermostFormals =
667 isOutermostFrame() && !script_->argsObjAliasesFormals();
668 if (shouldStoreOutermostFormals) {
669 // This is the first (outermost) frame and we don't have an
670 // arguments object aliasing the formals. Due to UCE and phi
671 // elimination, we could store an UndefinedValue() here for
672 // formals we think are unused, but locals may still reference the
673 // original argument slot (MParameter/LArgument) and expect the
674 // original Value. To avoid this problem, store the formals in a
675 // Vector until we are done.
676 MOZ_ASSERT(outermostFrameFormals().empty());
677 if (!outermostFrameFormals().resize(fun_->nargs())) {
678 return false;
682 for (uint32_t i = 0; i < fun_->nargs(); i++) {
683 Value arg = iter_.read();
684 JitSpew(JitSpew_BaselineBailouts, " arg %d = %016" PRIx64, (int)i,
685 *((uint64_t*)&arg));
686 if (!isOutermostFrame()) {
687 size_t argOffset = framePushed() + JitFrameLayout::offsetOfActualArg(i);
688 valuePointerAtStackOffset(argOffset).set(arg);
689 } else if (shouldStoreOutermostFormals) {
690 outermostFrameFormals()[i].set(arg);
691 } else {
692 // When the arguments object aliases the formal arguments, then
693 // JSOp::SetArg mutates the argument object. In such cases, the
694 // list of arguments reported by the snapshot are only aliases
695 // of argument object slots which are optimized to only store
696 // differences compared to arguments which are on the stack.
699 return true;
702 bool BaselineStackBuilder::buildFixedSlots() {
703 for (uint32_t i = 0; i < script_->nfixed(); i++) {
704 Value slot = iter_.read();
705 if (!writeValue(slot, "FixedValue")) {
706 return false;
709 return true;
712 // The caller side of inlined js::fun_call and accessors must look
713 // like the function wasn't inlined.
714 bool BaselineStackBuilder::fixUpCallerArgs(
715 MutableHandleValueVector savedCallerArgs, bool* fixedUp) {
716 MOZ_ASSERT(!*fixedUp);
718 // Inlining of SpreadCall-like frames not currently supported.
719 MOZ_ASSERT(!IsSpreadOp(op_));
721 if (resumeMode() != ResumeMode::InlinedFunCall && !needToSaveCallerArgs()) {
722 return true;
725 // Calculate how many arguments are consumed by the inlined call.
726 // All calls pass |callee| and |this|.
727 uint32_t inlinedArgs = 2;
728 if (resumeMode() == ResumeMode::InlinedFunCall) {
729 // The first argument to an inlined FunCall becomes |this|,
730 // if it exists. The rest are passed normally.
731 MOZ_ASSERT(IsInvokeOp(op_));
732 inlinedArgs += GET_ARGC(pc_) > 0 ? GET_ARGC(pc_) - 1 : 0;
733 } else {
734 MOZ_ASSERT(resumeMode() == ResumeMode::InlinedAccessor);
735 MOZ_ASSERT(IsIonInlinableGetterOrSetterOp(op_));
736 // Setters are passed one argument. Getters are passed none.
737 if (IsSetPropOp(op_)) {
738 inlinedArgs++;
742 // Calculate how many values are live on the stack across the call,
743 // and push them.
744 MOZ_ASSERT(inlinedArgs <= exprStackSlots());
745 uint32_t liveStackSlots = exprStackSlots() - inlinedArgs;
747 JitSpew(JitSpew_BaselineBailouts,
748 " pushing %u expression stack slots before fixup",
749 liveStackSlots);
750 for (uint32_t i = 0; i < liveStackSlots; i++) {
751 Value v = iter_.read();
752 if (!writeValue(v, "StackValue")) {
753 return false;
757 // When we inline js::fun_call, we bypass the native and inline the
758 // target directly. When rebuilding the stack, we need to fill in
759 // the right number of slots to make it look like the js_native was
760 // actually called.
761 if (resumeMode() == ResumeMode::InlinedFunCall) {
762 // We must transform the stack from |target, this, args| to
763 // |js_fun_call, target, this, args|. The value of |js_fun_call|
764 // will never be observed, so we push |undefined| for it, followed
765 // by the remaining arguments.
766 JitSpew(JitSpew_BaselineBailouts,
767 " pushing undefined to fixup funcall");
768 if (!writeValue(UndefinedValue(), "StackValue")) {
769 return false;
771 if (GET_ARGC(pc_) > 0) {
772 JitSpew(JitSpew_BaselineBailouts,
773 " pushing %u expression stack slots", inlinedArgs);
774 for (uint32_t i = 0; i < inlinedArgs; i++) {
775 Value arg = iter_.read();
776 if (!writeValue(arg, "StackValue")) {
777 return false;
780 } else {
781 // When we inline FunCall with no arguments, we push an extra
782 // |undefined| value for |this|. That value should not appear
783 // in the rebuilt baseline frame.
784 JitSpew(JitSpew_BaselineBailouts, " pushing target of funcall");
785 Value target = iter_.read();
786 if (!writeValue(target, "StackValue")) {
787 return false;
789 // Skip |this|.
790 iter_.skip();
794 if (needToSaveCallerArgs()) {
795 // Save the actual arguments. They are needed to rebuild the callee frame.
796 if (!savedCallerArgs.resize(inlinedArgs)) {
797 return false;
799 for (uint32_t i = 0; i < inlinedArgs; i++) {
800 savedCallerArgs[i].set(iter_.read());
803 if (IsSetPropOp(op_)) {
804 // The RHS argument to SetProp remains on the stack after the
805 // operation and is observable, so we have to fill it in.
806 Value initialArg = savedCallerArgs[inlinedArgs - 1];
807 JitSpew(JitSpew_BaselineBailouts,
808 " pushing setter's initial argument");
809 if (!writeValue(initialArg, "StackValue")) {
810 return false;
815 *fixedUp = true;
816 return true;
819 bool BaselineStackBuilder::buildExpressionStack() {
820 JitSpew(JitSpew_BaselineBailouts, " pushing %u expression stack slots",
821 exprStackSlots());
823 for (uint32_t i = 0; i < exprStackSlots(); i++) {
824 Value v;
825 // If we are in the middle of propagating an exception from Ion by
826 // bailing to baseline due to debug mode, we might not have all
827 // the stack if we are at the newest frame.
829 // For instance, if calling |f()| pushed an Ion frame which threw,
830 // the snapshot expects the return value to be pushed, but it's
831 // possible nothing was pushed before we threw.
833 // We therefore use a fallible read here.
834 if (!iter_.tryRead(&v)) {
835 MOZ_ASSERT(propagatingIonExceptionForDebugMode() && !iter_.moreFrames());
836 v = MagicValue(JS_OPTIMIZED_OUT);
838 if (!writeValue(v, "StackValue")) {
839 return false;
843 if (resumeMode() == ResumeMode::ResumeAfterCheckProxyGetResult) {
844 JitSpew(JitSpew_BaselineBailouts,
845 " Checking that the proxy's get trap result matches "
846 "expectations.");
847 Value returnVal;
848 if (peekLastValue(&returnVal) && !returnVal.isMagic(JS_OPTIMIZED_OUT)) {
849 Value idVal = iter_.read();
850 Value targetVal = iter_.read();
852 MOZ_RELEASE_ASSERT(!idVal.isMagic());
853 MOZ_RELEASE_ASSERT(targetVal.isObject());
854 RootedObject target(cx_, &targetVal.toObject());
855 RootedValue rootedIdVal(cx_, idVal);
856 RootedId id(cx_);
857 if (!PrimitiveValueToId<CanGC>(cx_, rootedIdVal, &id)) {
858 return false;
860 RootedValue value(cx_, returnVal);
862 auto validation =
863 ScriptedProxyHandler::checkGetTrapResult(cx_, target, id, value);
864 if (validation != ScriptedProxyHandler::GetTrapValidationResult::OK) {
865 header_->tempId = id.get();
867 JitSpew(
868 JitSpew_BaselineBailouts,
869 " Proxy get trap result mismatch! Overwriting bailout kind");
870 if (validation == ScriptedProxyHandler::GetTrapValidationResult::
871 MustReportSameValue) {
872 bailoutKind_ = BailoutKind::ThrowProxyTrapMustReportSameValue;
873 } else if (validation == ScriptedProxyHandler::GetTrapValidationResult::
874 MustReportUndefined) {
875 bailoutKind_ = BailoutKind::ThrowProxyTrapMustReportUndefined;
876 } else {
877 return false;
882 return true;
885 if (resumeMode() == ResumeMode::ResumeAfterCheckIsObject) {
886 JitSpew(JitSpew_BaselineBailouts,
887 " Checking that intermediate value is an object");
888 Value returnVal;
889 if (iter_.tryRead(&returnVal) && !returnVal.isObject()) {
890 MOZ_ASSERT(!returnVal.isMagic());
891 JitSpew(JitSpew_BaselineBailouts,
892 " Not an object! Overwriting bailout kind");
893 bailoutKind_ = BailoutKind::ThrowCheckIsObject;
897 return true;
900 bool BaselineStackBuilder::buildFinallyException() {
901 MOZ_ASSERT(resumingInFinallyBlock());
903 if (!writeValue(excInfo_->finallyException(), "Exception")) {
904 return false;
906 if (!writeValue(excInfo_->finallyExceptionStack(), "ExceptionStack")) {
907 return false;
909 if (!writeValue(BooleanValue(true), "throwing")) {
910 return false;
913 return true;
916 bool BaselineStackBuilder::prepareForNextFrame(
917 HandleValueVector savedCallerArgs) {
918 const uint32_t frameSize = framePushed();
920 // Write out descriptor and return address for the baseline frame.
921 // The icEntry in question MUST have an inlinable fallback stub.
922 if (!finishOuterFrame()) {
923 return false;
926 return buildStubFrame(frameSize, savedCallerArgs);
929 bool BaselineStackBuilder::finishOuterFrame() {
930 // . .
931 // | Descr(BLJS) |
932 // +---------------+
933 // | ReturnAddr |
934 // +===============+
936 const BaselineInterpreter& baselineInterp =
937 cx_->runtime()->jitRuntime()->baselineInterpreter();
939 blFrame()->setInterpreterFields(script_, pc_);
941 // Write out descriptor of BaselineJS frame.
942 size_t baselineFrameDescr = MakeFrameDescriptor(FrameType::BaselineJS);
943 if (!writeWord(baselineFrameDescr, "Descriptor")) {
944 return false;
947 uint8_t* retAddr = baselineInterp.retAddrForIC(op_);
948 return writePtr(retAddr, "ReturnAddr");
951 bool BaselineStackBuilder::buildStubFrame(uint32_t frameSize,
952 HandleValueVector savedCallerArgs) {
953 // Build baseline stub frame:
954 // +===============+
955 // | FramePtr |
956 // +---------------+
957 // | StubPtr |
958 // +---------------+
959 // | Padding? |
960 // +---------------+
961 // | ArgA |
962 // +---------------+
963 // | ... |
964 // +---------------+
965 // | Arg0 |
966 // +---------------+
967 // | ThisV |
968 // +---------------+
969 // | CalleeToken |
970 // +---------------+
971 // | Descr(BLStub) |
972 // +---------------+
973 // | ReturnAddr |
974 // +===============+
976 JitSpew(JitSpew_BaselineBailouts, " [BASELINE-STUB FRAME]");
978 // Write previous frame pointer (saved earlier).
979 if (!writePtr(prevFramePtr(), "PrevFramePtr")) {
980 return false;
982 prevFramePtr_ = virtualPointerAtStackOffset(0);
984 // Write stub pointer.
985 uint32_t pcOff = script_->pcToOffset(pc_);
986 JitScript* jitScript = script_->jitScript();
987 const ICEntry& icEntry = jitScript->icEntryFromPCOffset(pcOff);
988 ICFallbackStub* fallback = jitScript->fallbackStubForICEntry(&icEntry);
989 if (!writePtr(fallback, "StubPtr")) {
990 return false;
993 // Write out the arguments, copied from the baseline frame. The order
994 // of the arguments is reversed relative to the baseline frame's stack
995 // values.
996 MOZ_ASSERT(IsIonInlinableOp(op_));
997 bool pushedNewTarget = IsConstructPC(pc_);
998 unsigned actualArgc;
999 Value callee;
1000 if (needToSaveCallerArgs()) {
1001 // For accessors, the arguments are not on the stack anymore,
1002 // but they are copied in a vector and are written here.
1003 callee = savedCallerArgs[0];
1004 actualArgc = IsSetPropOp(op_) ? 1 : 0;
1006 // Align the stack based on the number of arguments.
1007 size_t afterFrameSize =
1008 (actualArgc + 1) * sizeof(Value) + JitFrameLayout::Size();
1009 if (!maybeWritePadding(JitStackAlignment, afterFrameSize, "Padding")) {
1010 return false;
1013 // Push arguments.
1014 MOZ_ASSERT(actualArgc + 2 <= exprStackSlots());
1015 MOZ_ASSERT(savedCallerArgs.length() == actualArgc + 2);
1016 for (unsigned i = 0; i < actualArgc + 1; i++) {
1017 size_t arg = savedCallerArgs.length() - (i + 1);
1018 if (!writeValue(savedCallerArgs[arg], "ArgVal")) {
1019 return false;
1022 } else if (resumeMode() == ResumeMode::InlinedFunCall && GET_ARGC(pc_) == 0) {
1023 // When calling FunCall with 0 arguments, we push |undefined|
1024 // for this. See BaselineCacheIRCompiler::pushFunCallArguments.
1025 MOZ_ASSERT(!pushedNewTarget);
1026 actualArgc = 0;
1027 // Align the stack based on pushing |this| and 0 arguments.
1028 size_t afterFrameSize = sizeof(Value) + JitFrameLayout::Size();
1029 if (!maybeWritePadding(JitStackAlignment, afterFrameSize, "Padding")) {
1030 return false;
1032 // Push an undefined value for |this|.
1033 if (!writeValue(UndefinedValue(), "ThisValue")) {
1034 return false;
1036 size_t calleeSlot = blFrame()->numValueSlots(frameSize) - 1;
1037 callee = *blFrame()->valueSlot(calleeSlot);
1039 } else {
1040 MOZ_ASSERT(resumeMode() == ResumeMode::InlinedStandardCall ||
1041 resumeMode() == ResumeMode::InlinedFunCall);
1042 actualArgc = GET_ARGC(pc_);
1043 if (resumeMode() == ResumeMode::InlinedFunCall) {
1044 // See BaselineCacheIRCompiler::pushFunCallArguments.
1045 MOZ_ASSERT(actualArgc > 0);
1046 actualArgc--;
1049 // In addition to the formal arguments, we must also push |this|.
1050 // When calling a constructor, we must also push |newTarget|.
1051 uint32_t numArguments = actualArgc + 1 + pushedNewTarget;
1053 // Align the stack based on the number of arguments.
1054 size_t afterFrameSize =
1055 numArguments * sizeof(Value) + JitFrameLayout::Size();
1056 if (!maybeWritePadding(JitStackAlignment, afterFrameSize, "Padding")) {
1057 return false;
1060 // Copy the arguments and |this| from the BaselineFrame, in reverse order.
1061 size_t valueSlot = blFrame()->numValueSlots(frameSize) - 1;
1062 size_t calleeSlot = valueSlot - numArguments;
1064 for (size_t i = valueSlot; i > calleeSlot; i--) {
1065 Value v = *blFrame()->valueSlot(i);
1066 if (!writeValue(v, "ArgVal")) {
1067 return false;
1071 callee = *blFrame()->valueSlot(calleeSlot);
1074 // In case these arguments need to be copied on the stack again for a
1075 // rectifier frame, save the framePushed values here for later use.
1076 size_t endOfBaselineStubArgs = framePushed();
1078 // Push callee token (must be a JS Function)
1079 JitSpew(JitSpew_BaselineBailouts, " Callee = %016" PRIx64,
1080 callee.asRawBits());
1082 JSFunction* calleeFun = &callee.toObject().as<JSFunction>();
1083 if (!writePtr(CalleeToToken(calleeFun, pushedNewTarget), "CalleeToken")) {
1084 return false;
1086 const ICEntry& icScriptEntry = icScript_->icEntryFromPCOffset(pcOff);
1087 ICFallbackStub* icScriptFallback =
1088 icScript_->fallbackStubForICEntry(&icScriptEntry);
1089 setNextCallee(calleeFun, icScriptFallback->trialInliningState());
1091 // Push BaselineStub frame descriptor
1092 size_t baselineStubFrameDescr =
1093 MakeFrameDescriptorForJitCall(FrameType::BaselineStub, actualArgc);
1094 if (!writeWord(baselineStubFrameDescr, "Descriptor")) {
1095 return false;
1098 // Push return address into ICCall_Scripted stub, immediately after the call.
1099 void* baselineCallReturnAddr = getStubReturnAddress();
1100 MOZ_ASSERT(baselineCallReturnAddr);
1101 if (!writePtr(baselineCallReturnAddr, "ReturnAddr")) {
1102 return false;
1105 // The stack must be aligned after the callee pushes the frame pointer.
1106 MOZ_ASSERT((framePushed() + sizeof(void*)) % JitStackAlignment == 0);
1108 // Build a rectifier frame if necessary
1109 if (actualArgc < calleeFun->nargs() &&
1110 !buildRectifierFrame(actualArgc, endOfBaselineStubArgs)) {
1111 return false;
1114 return true;
1117 bool BaselineStackBuilder::buildRectifierFrame(uint32_t actualArgc,
1118 size_t endOfBaselineStubArgs) {
1119 // Push a reconstructed rectifier frame.
1120 // +===============+
1121 // | Padding? |
1122 // +---------------+
1123 // | UndefinedU |
1124 // +---------------+
1125 // | ... |
1126 // +---------------+
1127 // | Undefined0 |
1128 // +---------------+
1129 // | ArgA |
1130 // +---------------+
1131 // | ... |
1132 // +---------------+
1133 // | Arg0 |
1134 // +---------------+
1135 // | ThisV |
1136 // +---------------+
1137 // | CalleeToken |
1138 // +---------------+
1139 // | Descr(Rect) |
1140 // +---------------+
1141 // | ReturnAddr |
1142 // +===============+
1144 JitSpew(JitSpew_BaselineBailouts, " [RECTIFIER FRAME]");
1145 bool pushedNewTarget = IsConstructPC(pc_);
1147 if (!writePtr(prevFramePtr(), "PrevFramePtr")) {
1148 return false;
1150 prevFramePtr_ = virtualPointerAtStackOffset(0);
1152 // Align the stack based on the number of arguments.
1153 size_t afterFrameSize =
1154 (nextCallee()->nargs() + 1 + pushedNewTarget) * sizeof(Value) +
1155 RectifierFrameLayout::Size();
1156 if (!maybeWritePadding(JitStackAlignment, afterFrameSize, "Padding")) {
1157 return false;
1160 // Copy new.target, if necessary.
1161 if (pushedNewTarget) {
1162 size_t newTargetOffset = (framePushed() - endOfBaselineStubArgs) +
1163 (actualArgc + 1) * sizeof(Value);
1164 Value newTargetValue = *valuePointerAtStackOffset(newTargetOffset);
1165 if (!writeValue(newTargetValue, "CopiedNewTarget")) {
1166 return false;
1170 // Push undefined for missing arguments.
1171 for (unsigned i = 0; i < (nextCallee()->nargs() - actualArgc); i++) {
1172 if (!writeValue(UndefinedValue(), "FillerVal")) {
1173 return false;
1177 // Copy arguments + thisv from BaselineStub frame.
1178 if (!subtract((actualArgc + 1) * sizeof(Value), "CopiedArgs")) {
1179 return false;
1181 BufferPointer<uint8_t> stubArgsEnd =
1182 pointerAtStackOffset<uint8_t>(framePushed() - endOfBaselineStubArgs);
1183 JitSpew(JitSpew_BaselineBailouts, " MemCpy from %p", stubArgsEnd.get());
1184 memcpy(pointerAtStackOffset<uint8_t>(0).get(), stubArgsEnd.get(),
1185 (actualArgc + 1) * sizeof(Value));
1187 // Push calleeToken again.
1188 if (!writePtr(CalleeToToken(nextCallee(), pushedNewTarget), "CalleeToken")) {
1189 return false;
1192 // Push rectifier frame descriptor
1193 size_t rectifierFrameDescr =
1194 MakeFrameDescriptorForJitCall(FrameType::Rectifier, actualArgc);
1195 if (!writeWord(rectifierFrameDescr, "Descriptor")) {
1196 return false;
1199 // Push return address into the ArgumentsRectifier code, immediately after the
1200 // ioncode call.
1201 void* rectReturnAddr =
1202 cx_->runtime()->jitRuntime()->getArgumentsRectifierReturnAddr().value;
1203 MOZ_ASSERT(rectReturnAddr);
1204 if (!writePtr(rectReturnAddr, "ReturnAddr")) {
1205 return false;
1208 // The stack must be aligned after the callee pushes the frame pointer.
1209 MOZ_ASSERT((framePushed() + sizeof(void*)) % JitStackAlignment == 0);
1211 return true;
1214 bool BaselineStackBuilder::finishLastFrame() {
1215 const BaselineInterpreter& baselineInterp =
1216 cx_->runtime()->jitRuntime()->baselineInterpreter();
1218 setResumeFramePtr(prevFramePtr());
1220 // Compute the native address (within the Baseline Interpreter) that we will
1221 // resume at and initialize the frame's interpreter fields.
1222 uint8_t* resumeAddr;
1223 if (isPrologueBailout()) {
1224 JitSpew(JitSpew_BaselineBailouts, " Resuming into prologue.");
1225 MOZ_ASSERT(pc_ == script_->code());
1226 blFrame()->setInterpreterFieldsForPrologue(script_);
1227 resumeAddr = baselineInterp.bailoutPrologueEntryAddr();
1228 } else if (propagatingIonExceptionForDebugMode()) {
1229 // When propagating an exception for debug mode, set the
1230 // resume pc to the throwing pc, so that Debugger hooks report
1231 // the correct pc offset of the throwing op instead of its
1232 // successor.
1233 jsbytecode* throwPC = script_->offsetToPC(iter_.pcOffset());
1234 blFrame()->setInterpreterFields(script_, throwPC);
1235 resumeAddr = baselineInterp.interpretOpAddr().value;
1236 } else {
1237 jsbytecode* resumePC = getResumePC();
1238 blFrame()->setInterpreterFields(script_, resumePC);
1239 resumeAddr = baselineInterp.interpretOpAddr().value;
1241 setResumeAddr(resumeAddr);
1242 JitSpew(JitSpew_BaselineBailouts, " Set resumeAddr=%p", resumeAddr);
1244 if (cx_->runtime()->geckoProfiler().enabled()) {
1245 // Register bailout with profiler.
1246 const char* filename = script_->filename();
1247 if (filename == nullptr) {
1248 filename = "<unknown>";
1250 unsigned len = strlen(filename) + 200;
1251 UniqueChars buf(js_pod_malloc<char>(len));
1252 if (buf == nullptr) {
1253 ReportOutOfMemory(cx_);
1254 return false;
1256 snprintf(buf.get(), len, "%s %s %s on line %u of %s:%u",
1257 BailoutKindString(bailoutKind()), resumeAfter() ? "after" : "at",
1258 CodeName(op_), PCToLineNumber(script_, pc_), filename,
1259 script_->lineno());
1260 cx_->runtime()->geckoProfiler().markEvent("Bailout", buf.get());
1263 return true;
1266 #ifdef DEBUG
1267 // The |envChain| slot must not be optimized out if the currently
1268 // active scope requires any EnvironmentObjects beyond what is
1269 // available at body scope. This checks that scope chain does not
1270 // require any such EnvironmentObjects.
1271 // See also: |CompileInfo::isObservableFrameSlot|
1272 bool BaselineStackBuilder::envChainSlotCanBeOptimized() {
1273 jsbytecode* pc = script_->offsetToPC(iter_.pcOffset());
1274 Scope* scopeIter = script_->innermostScope(pc);
1275 while (scopeIter != script_->bodyScope()) {
1276 if (!scopeIter || scopeIter->hasEnvironment()) {
1277 return false;
1279 scopeIter = scopeIter->enclosing();
1281 return true;
1284 bool jit::AssertBailoutStackDepth(JSContext* cx, JSScript* script,
1285 jsbytecode* pc, ResumeMode mode,
1286 uint32_t exprStackSlots) {
1287 if (IsResumeAfter(mode)) {
1288 pc = GetNextPc(pc);
1291 uint32_t expectedDepth;
1292 bool reachablePC;
1293 if (!ReconstructStackDepth(cx, script, pc, &expectedDepth, &reachablePC)) {
1294 return false;
1296 if (!reachablePC) {
1297 return true;
1300 JSOp op = JSOp(*pc);
1302 if (mode == ResumeMode::InlinedFunCall) {
1303 // For inlined fun.call(this, ...); the reconstructed stack depth will
1304 // include the |this|, but the exprStackSlots won't.
1305 // Exception: if there are no arguments, the depths do match.
1306 MOZ_ASSERT(IsInvokeOp(op));
1307 if (GET_ARGC(pc) > 0) {
1308 MOZ_ASSERT(expectedDepth == exprStackSlots + 1);
1309 } else {
1310 MOZ_ASSERT(expectedDepth == exprStackSlots);
1312 return true;
1315 if (mode == ResumeMode::InlinedAccessor) {
1316 // Accessors coming out of ion are inlined via a complete lie perpetrated by
1317 // the compiler internally. Ion just rearranges the stack, and pretends that
1318 // it looked like a call all along.
1319 // This means that the depth is actually one *more* than expected by the
1320 // interpreter, as there is now a JSFunction, |this| and [arg], rather than
1321 // the expected |this| and [arg].
1322 // If the inlined accessor is a GetElem operation, the numbers do match, but
1323 // that's just because GetElem expects one more item on the stack. Note that
1324 // none of that was pushed, but it's still reflected in exprStackSlots.
1325 MOZ_ASSERT(IsIonInlinableGetterOrSetterOp(op));
1326 if (IsGetElemOp(op)) {
1327 MOZ_ASSERT(exprStackSlots == expectedDepth);
1328 } else {
1329 MOZ_ASSERT(exprStackSlots == expectedDepth + 1);
1331 return true;
1334 // In all other cases, the depth must match.
1335 MOZ_ASSERT(exprStackSlots == expectedDepth);
1336 return true;
1339 bool BaselineStackBuilder::validateFrame() {
1340 const uint32_t frameSize = framePushed();
1341 blFrame()->setDebugFrameSize(frameSize);
1342 JitSpew(JitSpew_BaselineBailouts, " FrameSize=%u", frameSize);
1344 // debugNumValueSlots() is based on the frame size, do some sanity checks.
1345 MOZ_ASSERT(blFrame()->debugNumValueSlots() >= script_->nfixed());
1346 MOZ_ASSERT(blFrame()->debugNumValueSlots() <= script_->nslots());
1348 uint32_t expectedSlots = exprStackSlots();
1349 if (resumingInFinallyBlock()) {
1350 // If we are resuming in a finally block, we push three extra values on the
1351 // stack (the exception, the exception stack, and |throwing|), so the depth
1352 // at the resume PC should be the depth at the fault PC plus three.
1353 expectedSlots += 3;
1355 return AssertBailoutStackDepth(cx_, script_, pc_, resumeMode(),
1356 expectedSlots);
1358 #endif
1360 void* BaselineStackBuilder::getStubReturnAddress() {
1361 const BaselineICFallbackCode& code =
1362 cx_->runtime()->jitRuntime()->baselineICFallbackCode();
1364 if (IsGetPropOp(op_)) {
1365 return code.bailoutReturnAddr(BailoutReturnKind::GetProp);
1367 if (IsSetPropOp(op_)) {
1368 return code.bailoutReturnAddr(BailoutReturnKind::SetProp);
1370 if (IsGetElemOp(op_)) {
1371 return code.bailoutReturnAddr(BailoutReturnKind::GetElem);
1374 // This should be a call op of some kind, now.
1375 MOZ_ASSERT(IsInvokeOp(op_) && !IsSpreadOp(op_));
1376 if (IsConstructOp(op_)) {
1377 return code.bailoutReturnAddr(BailoutReturnKind::New);
1379 return code.bailoutReturnAddr(BailoutReturnKind::Call);
1382 static inline jsbytecode* GetNextNonLoopHeadPc(jsbytecode* pc) {
1383 JSOp op = JSOp(*pc);
1384 switch (op) {
1385 case JSOp::Goto:
1386 return pc + GET_JUMP_OFFSET(pc);
1388 case JSOp::LoopHead:
1389 case JSOp::Nop:
1390 return GetNextPc(pc);
1392 default:
1393 return pc;
1397 // Returns the pc to resume execution at in Baseline after a bailout.
1398 jsbytecode* BaselineStackBuilder::getResumePC() {
1399 if (resumeAfter()) {
1400 return GetNextPc(pc_);
1403 // If we are resuming at a LoopHead op, resume at the next op to avoid
1404 // a bailout -> enter Ion -> bailout loop with --ion-eager.
1406 // Cycles can cause the loop below to not terminate. Empty loops are one
1407 // such example:
1409 // L: loophead
1410 // goto L
1412 // We do cycle detection below with the "tortoise and the hare" algorithm.
1413 jsbytecode* slowerPc = pc_;
1414 jsbytecode* fasterPc = pc_;
1415 while (true) {
1416 // Advance fasterPc twice as fast as slowerPc.
1417 slowerPc = GetNextNonLoopHeadPc(slowerPc);
1418 fasterPc = GetNextNonLoopHeadPc(fasterPc);
1419 fasterPc = GetNextNonLoopHeadPc(fasterPc);
1421 // Break on cycles or at the end of goto sequences.
1422 if (fasterPc == slowerPc) {
1423 break;
1427 return slowerPc;
1430 bool BaselineStackBuilder::isPrologueBailout() {
1431 // If we are propagating an exception for debug mode, we will not resume
1432 // into baseline code, but instead into HandleExceptionBaseline (i.e.,
1433 // never before the prologue).
1434 return iter_.pcOffset() == 0 && !iter_.resumeAfter() &&
1435 !propagatingIonExceptionForDebugMode();
1438 // Build a baseline stack frame.
1439 bool BaselineStackBuilder::buildOneFrame() {
1440 // Build a baseline frame:
1441 // +===============+
1442 // | PrevFramePtr | <-- initFrame()
1443 // +---------------+
1444 // | Baseline | <-- buildBaselineFrame()
1445 // | Frame |
1446 // +---------------+
1447 // | Fixed0 | <-- buildFixedSlots()
1448 // +---------------+
1449 // | ... |
1450 // +---------------+
1451 // | FixedF |
1452 // +---------------+
1453 // | Stack0 | <-- buildExpressionStack() -or- fixupCallerArgs()
1454 // +---------------+
1455 // | ... |
1456 // +---------------+ If we are building the frame in which we will
1457 // | StackS | <-- resume, we stop here.
1458 // +---------------+ finishLastFrame() sets up the interpreter fields.
1459 // . .
1460 // . .
1461 // . . <-- If there are additional frames inlined into this
1462 // | Descr(BLJS) | one, we finish this frame. We generate a stub
1463 // +---------------+ frame (and maybe also a rectifier frame) between
1464 // | ReturnAddr | this frame and the inlined frame.
1465 // +===============+ See: prepareForNextFrame()
1467 if (!initFrame()) {
1468 return false;
1471 if (!buildBaselineFrame()) {
1472 return false;
1475 if (fun_ && !buildArguments()) {
1476 return false;
1479 if (!buildFixedSlots()) {
1480 return false;
1483 bool fixedUp = false;
1484 RootedValueVector savedCallerArgs(cx_);
1485 if (iter_.moreFrames() && !fixUpCallerArgs(&savedCallerArgs, &fixedUp)) {
1486 return false;
1489 if (!fixedUp) {
1490 if (!buildExpressionStack()) {
1491 return false;
1493 if (resumingInFinallyBlock() && !buildFinallyException()) {
1494 return false;
1498 #ifdef DEBUG
1499 if (!validateFrame()) {
1500 return false;
1502 #endif
1504 #ifdef JS_JITSPEW
1505 const uint32_t pcOff = script_->pcToOffset(pc());
1506 JitSpew(JitSpew_BaselineBailouts,
1507 " Resuming %s pc offset %d (op %s) (line %u) of %s:%u:%u",
1508 resumeAfter() ? "after" : "at", (int)pcOff, CodeName(op_),
1509 PCToLineNumber(script_, pc()), script_->filename(), script_->lineno(),
1510 script_->column().oneOriginValue());
1511 JitSpew(JitSpew_BaselineBailouts, " Bailout kind: %s",
1512 BailoutKindString(bailoutKind()));
1513 #endif
1515 // If this was the last inline frame, or we are bailing out to a catch or
1516 // finally block in this frame, then unpacking is almost done.
1517 if (done()) {
1518 return finishLastFrame();
1521 // Otherwise, this is an outer frame for an inlined call or
1522 // accessor. We will be building an inner frame. Before that,
1523 // we must create a stub frame, and potentially a rectifier frame.
1524 return prepareForNextFrame(savedCallerArgs);
1527 bool jit::BailoutIonToBaseline(JSContext* cx, JitActivation* activation,
1528 const JSJitFrameIter& iter,
1529 BaselineBailoutInfo** bailoutInfo,
1530 const ExceptionBailoutInfo* excInfo,
1531 BailoutReason reason) {
1532 MOZ_ASSERT(bailoutInfo != nullptr);
1533 MOZ_ASSERT(*bailoutInfo == nullptr);
1534 MOZ_ASSERT(iter.isBailoutJS());
1536 // Caller should have saved the exception while we perform the bailout.
1537 MOZ_ASSERT(!cx->isExceptionPending());
1539 // Ion bailout can fail due to overrecursion and OOM. In such cases we
1540 // cannot honor any further Debugger hooks on the frame, and need to
1541 // ensure that its Debugger.Frame entry is cleaned up.
1542 auto guardRemoveRematerializedFramesFromDebugger =
1543 mozilla::MakeScopeExit([&] {
1544 activation->removeRematerializedFramesFromDebugger(cx, iter.fp());
1547 // Always remove the RInstructionResults from the JitActivation, even in
1548 // case of failures as the stack frame is going away after the bailout.
1549 auto removeIonFrameRecovery = mozilla::MakeScopeExit(
1550 [&] { activation->removeIonFrameRecovery(iter.jsFrame()); });
1552 // The caller of the top frame must be one of the following:
1553 // IonJS - Ion calling into Ion.
1554 // BaselineStub - Baseline calling into Ion.
1555 // Entry / WasmToJSJit - Interpreter or other (wasm) calling into Ion.
1556 // Rectifier - Arguments rectifier calling into Ion.
1557 // BaselineJS - Resume'd Baseline, then likely OSR'd into Ion.
1558 MOZ_ASSERT(iter.isBailoutJS());
1559 #if defined(DEBUG) || defined(JS_JITSPEW)
1560 FrameType prevFrameType = iter.prevType();
1561 MOZ_ASSERT(JSJitFrameIter::isEntry(prevFrameType) ||
1562 prevFrameType == FrameType::IonJS ||
1563 prevFrameType == FrameType::BaselineStub ||
1564 prevFrameType == FrameType::Rectifier ||
1565 prevFrameType == FrameType::IonICCall ||
1566 prevFrameType == FrameType::BaselineJS ||
1567 prevFrameType == FrameType::BaselineInterpreterEntry);
1568 #endif
1570 // All incoming frames are going to look like this:
1572 // +---------------+
1573 // | ... |
1574 // +---------------+
1575 // | Args |
1576 // | ... |
1577 // +---------------+
1578 // | ThisV |
1579 // +---------------+
1580 // | ActualArgC |
1581 // +---------------+
1582 // | CalleeToken |
1583 // +---------------+
1584 // | Descriptor |
1585 // +---------------+
1586 // | ReturnAddr |
1587 // +---------------+
1588 // | ||||| | <---- Overwrite starting here.
1589 // | ||||| |
1590 // | ||||| |
1591 // +---------------+
1593 JitSpew(JitSpew_BaselineBailouts,
1594 "Bailing to baseline %s:%u:%u (IonScript=%p) (FrameType=%d)",
1595 iter.script()->filename(), iter.script()->lineno(),
1596 iter.script()->column().oneOriginValue(), (void*)iter.ionScript(),
1597 (int)prevFrameType);
1599 if (excInfo) {
1600 if (excInfo->catchingException()) {
1601 JitSpew(JitSpew_BaselineBailouts, "Resuming in catch or finally block");
1603 if (excInfo->propagatingIonExceptionForDebugMode()) {
1604 JitSpew(JitSpew_BaselineBailouts, "Resuming in-place for debug mode");
1608 JitSpew(JitSpew_BaselineBailouts,
1609 " Reading from snapshot offset %u size %zu", iter.snapshotOffset(),
1610 iter.ionScript()->snapshotsListSize());
1612 iter.script()->updateJitCodeRaw(cx->runtime());
1614 // Under a bailout, there is no need to invalidate the frame after
1615 // evaluating the recover instruction, as the invalidation is only needed in
1616 // cases where the frame is introspected ahead of the bailout.
1617 MaybeReadFallback recoverBailout(cx, activation, &iter,
1618 MaybeReadFallback::Fallback_DoNothing);
1620 // Ensure that all value locations are readable from the SnapshotIterator.
1621 // Get the RInstructionResults from the JitActivation if the frame got
1622 // recovered ahead of the bailout.
1623 SnapshotIterator snapIter(iter, activation->bailoutData()->machineState());
1624 if (!snapIter.initInstructionResults(recoverBailout)) {
1625 return false;
1628 #ifdef TRACK_SNAPSHOTS
1629 snapIter.spewBailingFrom();
1630 #endif
1632 BaselineStackBuilder builder(cx, iter, snapIter, excInfo, reason);
1633 if (!builder.init()) {
1634 return false;
1637 JitSpew(JitSpew_BaselineBailouts, " Incoming frame ptr = %p",
1638 builder.startFrame());
1639 if (iter.maybeCallee()) {
1640 JitSpew(JitSpew_BaselineBailouts, " Callee function (%s:%u:%u)",
1641 iter.script()->filename(), iter.script()->lineno(),
1642 iter.script()->column().oneOriginValue());
1643 } else {
1644 JitSpew(JitSpew_BaselineBailouts, " No callee!");
1647 if (iter.isConstructing()) {
1648 JitSpew(JitSpew_BaselineBailouts, " Constructing!");
1649 } else {
1650 JitSpew(JitSpew_BaselineBailouts, " Not constructing!");
1653 JitSpew(JitSpew_BaselineBailouts, " Restoring frames:");
1655 while (true) {
1656 // Skip recover instructions as they are already recovered by
1657 // |initInstructionResults|.
1658 snapIter.settleOnFrame();
1660 JitSpew(JitSpew_BaselineBailouts, " FrameNo %zu", builder.frameNo());
1662 if (!builder.buildOneFrame()) {
1663 MOZ_ASSERT(cx->isExceptionPending());
1664 return false;
1667 if (builder.done()) {
1668 break;
1671 builder.nextFrame();
1673 JitSpew(JitSpew_BaselineBailouts, " Done restoring frames");
1675 BailoutKind bailoutKind = builder.bailoutKind();
1677 if (!builder.outermostFrameFormals().empty()) {
1678 // Set the first frame's formals, see the comment in InitFromBailout.
1679 Value* argv = builder.startFrame()->actualArgs();
1680 mozilla::PodCopy(argv, builder.outermostFrameFormals().begin(),
1681 builder.outermostFrameFormals().length());
1684 // Do stack check.
1685 bool overRecursed = false;
1686 BaselineBailoutInfo* info = builder.info();
1687 size_t numBytesToPush = info->copyStackTop - info->copyStackBottom;
1688 MOZ_ASSERT((numBytesToPush % sizeof(uintptr_t)) == 0);
1689 uint8_t* newsp = info->incomingStack - numBytesToPush;
1690 #ifdef JS_SIMULATOR
1691 if (Simulator::Current()->overRecursed(uintptr_t(newsp))) {
1692 overRecursed = true;
1694 #else
1695 AutoCheckRecursionLimit recursion(cx);
1696 if (!recursion.checkWithStackPointerDontReport(cx, newsp)) {
1697 overRecursed = true;
1699 #endif
1700 if (overRecursed) {
1701 JitSpew(JitSpew_BaselineBailouts, " Overrecursion check failed!");
1702 ReportOverRecursed(cx);
1703 return false;
1706 // Take the reconstructed baseline stack so it doesn't get freed when builder
1707 // destructs.
1708 info = builder.takeBuffer();
1709 info->numFrames = builder.frameNo() + 1;
1710 info->bailoutKind.emplace(bailoutKind);
1711 *bailoutInfo = info;
1712 guardRemoveRematerializedFramesFromDebugger.release();
1713 return true;
1716 static void InvalidateAfterBailout(JSContext* cx, HandleScript outerScript,
1717 const char* reason) {
1718 // In some cases, the computation of recover instruction can invalidate the
1719 // Ion script before we reach the end of the bailout. Thus, if the outer
1720 // script no longer have any Ion script attached, then we just skip the
1721 // invalidation.
1723 // For example, such case can happen if the template object for an unboxed
1724 // objects no longer match the content of its properties (see Bug 1174547)
1725 if (!outerScript->hasIonScript()) {
1726 JitSpew(JitSpew_BaselineBailouts, "Ion script is already invalidated");
1727 return;
1730 // Record a invalidation for this script in the jit hints map
1731 if (cx->runtime()->jitRuntime()->hasJitHintsMap()) {
1732 JitHintsMap* jitHints = cx->runtime()->jitRuntime()->getJitHintsMap();
1733 jitHints->recordInvalidation(outerScript);
1736 MOZ_ASSERT(!outerScript->ionScript()->invalidated());
1738 JitSpew(JitSpew_BaselineBailouts, "Invalidating due to %s", reason);
1739 Invalidate(cx, outerScript);
1742 static void HandleLexicalCheckFailure(JSContext* cx, HandleScript outerScript,
1743 HandleScript innerScript) {
1744 JitSpew(JitSpew_IonBailouts,
1745 "Lexical check failure %s:%u:%u, inlined into %s:%u:%u",
1746 innerScript->filename(), innerScript->lineno(),
1747 innerScript->column().oneOriginValue(), outerScript->filename(),
1748 outerScript->lineno(), outerScript->column().oneOriginValue());
1750 if (!innerScript->failedLexicalCheck()) {
1751 innerScript->setFailedLexicalCheck();
1754 InvalidateAfterBailout(cx, outerScript, "lexical check failure");
1755 if (innerScript->hasIonScript()) {
1756 Invalidate(cx, innerScript);
1760 static bool CopyFromRematerializedFrame(JSContext* cx, JitActivation* act,
1761 uint8_t* fp, size_t inlineDepth,
1762 BaselineFrame* frame) {
1763 RematerializedFrame* rematFrame =
1764 act->lookupRematerializedFrame(fp, inlineDepth);
1766 // We might not have rematerialized a frame if the user never requested a
1767 // Debugger.Frame for it.
1768 if (!rematFrame) {
1769 return true;
1772 MOZ_ASSERT(rematFrame->script() == frame->script());
1773 MOZ_ASSERT(rematFrame->numActualArgs() == frame->numActualArgs());
1775 frame->setEnvironmentChain(rematFrame->environmentChain());
1777 if (frame->isFunctionFrame()) {
1778 frame->thisArgument() = rematFrame->thisArgument();
1781 for (unsigned i = 0; i < frame->numActualArgs(); i++) {
1782 frame->argv()[i] = rematFrame->argv()[i];
1785 for (size_t i = 0; i < frame->script()->nfixed(); i++) {
1786 *frame->valueSlot(i) = rematFrame->locals()[i];
1789 if (frame->script()->noScriptRval()) {
1790 frame->setReturnValue(UndefinedValue());
1791 } else {
1792 frame->setReturnValue(rematFrame->returnValue());
1795 // Don't copy over the hasCachedSavedFrame bit. The new BaselineFrame we're
1796 // building has a different AbstractFramePtr, so it won't be found in the
1797 // LiveSavedFrameCache if we look there.
1799 JitSpew(JitSpew_BaselineBailouts,
1800 " Copied from rematerialized frame at (%p,%zu)", fp, inlineDepth);
1802 // Propagate the debuggee frame flag. For the case where the Debugger did
1803 // not rematerialize an Ion frame, the baseline frame has its debuggee
1804 // flag set iff its script is considered a debuggee. See the debuggee case
1805 // in InitFromBailout.
1806 if (rematFrame->isDebuggee()) {
1807 frame->setIsDebuggee();
1808 return DebugAPI::handleIonBailout(cx, rematFrame, frame);
1811 return true;
1814 enum class BailoutAction {
1815 InvalidateImmediately,
1816 InvalidateIfFrequent,
1817 DisableIfFrequent,
1818 NoAction
1821 bool jit::FinishBailoutToBaseline(BaselineBailoutInfo* bailoutInfoArg) {
1822 JitSpew(JitSpew_BaselineBailouts, " Done restoring frames");
1824 JSContext* cx = TlsContext.get();
1825 // Use UniquePtr to free the bailoutInfo before we return, and root it for
1826 // the tempId field.
1827 Rooted<UniquePtr<BaselineBailoutInfo>> bailoutInfo(cx, bailoutInfoArg);
1828 bailoutInfoArg = nullptr;
1830 MOZ_DIAGNOSTIC_ASSERT(*bailoutInfo->bailoutKind != BailoutKind::Unreachable);
1832 // jit::Bailout(), jit::InvalidationBailout(), and jit::HandleException()
1833 // should have reset the counter to zero.
1834 MOZ_ASSERT(!cx->isInUnsafeRegion());
1836 BaselineFrame* topFrame = GetTopBaselineFrame(cx);
1838 // We have to get rid of the rematerialized frame, whether it is
1839 // restored or unwound.
1840 uint8_t* incomingStack = bailoutInfo->incomingStack;
1841 auto guardRemoveRematerializedFramesFromDebugger =
1842 mozilla::MakeScopeExit([&] {
1843 JitActivation* act = cx->activation()->asJit();
1844 act->removeRematerializedFramesFromDebugger(cx, incomingStack);
1847 // Ensure the frame has a call object if it needs one.
1848 if (!EnsureHasEnvironmentObjects(cx, topFrame)) {
1849 return false;
1852 // Create arguments objects for bailed out frames, to maintain the invariant
1853 // that script->needsArgsObj() implies frame->hasArgsObj().
1854 RootedScript innerScript(cx, nullptr);
1855 RootedScript outerScript(cx, nullptr);
1857 MOZ_ASSERT(cx->currentlyRunningInJit());
1858 JSJitFrameIter iter(cx->activation()->asJit());
1859 uint8_t* outerFp = nullptr;
1861 // Iter currently points at the exit frame. Get the previous frame
1862 // (which must be a baseline frame), and set it as the last profiling
1863 // frame.
1864 if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
1865 cx->runtime())) {
1866 MOZ_ASSERT(iter.prevType() == FrameType::BaselineJS);
1867 JitFrameLayout* fp = reinterpret_cast<JitFrameLayout*>(iter.prevFp());
1868 cx->jitActivation->setLastProfilingFrame(fp);
1871 uint32_t numFrames = bailoutInfo->numFrames;
1872 MOZ_ASSERT(numFrames > 0);
1874 uint32_t frameno = 0;
1875 while (frameno < numFrames) {
1876 MOZ_ASSERT(!iter.isIonJS());
1878 if (iter.isBaselineJS()) {
1879 BaselineFrame* frame = iter.baselineFrame();
1880 MOZ_ASSERT(frame->script()->hasBaselineScript());
1882 // If the frame doesn't even have a env chain set yet, then it's resuming
1883 // into the the prologue before the env chain is initialized. Any
1884 // necessary args object will also be initialized there.
1885 if (frame->environmentChain() && frame->script()->needsArgsObj()) {
1886 ArgumentsObject* argsObj;
1887 if (frame->hasArgsObj()) {
1888 argsObj = &frame->argsObj();
1889 } else {
1890 argsObj = ArgumentsObject::createExpected(cx, frame);
1891 if (!argsObj) {
1892 return false;
1896 // The arguments is a local binding and needsArgsObj does not
1897 // check if it is clobbered. Ensure that the local binding
1898 // restored during bailout before storing the arguments object
1899 // to the slot.
1900 RootedScript script(cx, frame->script());
1901 SetFrameArgumentsObject(cx, frame, script, argsObj);
1904 if (frameno == 0) {
1905 innerScript = frame->script();
1908 if (frameno == numFrames - 1) {
1909 outerScript = frame->script();
1910 outerFp = iter.fp();
1911 MOZ_ASSERT(outerFp == incomingStack);
1914 frameno++;
1917 ++iter;
1920 MOZ_ASSERT(innerScript);
1921 MOZ_ASSERT(outerScript);
1922 MOZ_ASSERT(outerFp);
1924 // If we rematerialized Ion frames due to debug mode toggling, copy their
1925 // values into the baseline frame. We need to do this even when debug mode
1926 // is off, as we should respect the mutations made while debug mode was
1927 // on.
1928 JitActivation* act = cx->activation()->asJit();
1929 if (act->hasRematerializedFrame(outerFp)) {
1930 JSJitFrameIter iter(act);
1931 size_t inlineDepth = numFrames;
1932 bool ok = true;
1933 while (inlineDepth > 0) {
1934 if (iter.isBaselineJS()) {
1935 // We must attempt to copy all rematerialized frames over,
1936 // even if earlier ones failed, to invoke the proper frame
1937 // cleanup in the Debugger.
1938 if (!CopyFromRematerializedFrame(cx, act, outerFp, --inlineDepth,
1939 iter.baselineFrame())) {
1940 ok = false;
1943 ++iter;
1946 if (!ok) {
1947 return false;
1950 // After copying from all the rematerialized frames, remove them from
1951 // the table to keep the table up to date.
1952 guardRemoveRematerializedFramesFromDebugger.release();
1953 act->removeRematerializedFrame(outerFp);
1956 // If we are unwinding for an exception, we need to unwind scopes.
1957 // See |SettleOnTryNote|
1958 if (bailoutInfo->faultPC) {
1959 EnvironmentIter ei(cx, topFrame, bailoutInfo->faultPC);
1960 UnwindEnvironment(cx, ei, bailoutInfo->tryPC);
1963 // Check for interrupts now because we might miss an interrupt check in JIT
1964 // code when resuming in the prologue, after the stack/interrupt check.
1965 if (!cx->isExceptionPending()) {
1966 if (!CheckForInterrupt(cx)) {
1967 return false;
1971 BailoutKind bailoutKind = *bailoutInfo->bailoutKind;
1972 JitSpew(JitSpew_BaselineBailouts,
1973 " Restored outerScript=(%s:%u:%u,%u) innerScript=(%s:%u:%u,%u) "
1974 "(bailoutKind=%u)",
1975 outerScript->filename(), outerScript->lineno(),
1976 outerScript->column().oneOriginValue(), outerScript->getWarmUpCount(),
1977 innerScript->filename(), innerScript->lineno(),
1978 innerScript->column().oneOriginValue(), innerScript->getWarmUpCount(),
1979 (unsigned)bailoutKind);
1981 BailoutAction action = BailoutAction::InvalidateImmediately;
1982 DebugOnly<bool> saveFailedICHash = false;
1983 switch (bailoutKind) {
1984 case BailoutKind::TranspiledCacheIR:
1985 // A transpiled guard failed. If this happens often enough, we will
1986 // invalidate and recompile.
1987 action = BailoutAction::InvalidateIfFrequent;
1988 saveFailedICHash = true;
1989 break;
1991 case BailoutKind::MonomorphicInlinedStubFolding:
1992 action = BailoutAction::InvalidateIfFrequent;
1993 saveFailedICHash = true;
1994 if (innerScript != outerScript) {
1995 // In the case where this instruction comes from a monomorphic-inlined
1996 // ICScript, we need to ensure that we note the connection between the
1997 // inner script and the outer script, so that we can properly track if
1998 // we add a new case to the folded stub and avoid invalidating the
1999 // outer script.
2000 cx->zone()->jitZone()->noteStubFoldingBailout(innerScript, outerScript);
2002 break;
2004 case BailoutKind::SpeculativePhi:
2005 // A value of an unexpected type flowed into a phi.
2006 MOZ_ASSERT(!outerScript->hadSpeculativePhiBailout());
2007 if (!outerScript->hasIonScript() ||
2008 outerScript->ionScript()->numFixableBailouts() == 0) {
2009 outerScript->setHadSpeculativePhiBailout();
2011 InvalidateAfterBailout(cx, outerScript, "phi specialization failure");
2012 break;
2014 case BailoutKind::TypePolicy:
2015 // A conversion inserted by a type policy failed.
2016 // We will invalidate and disable recompilation if this happens too often.
2017 action = BailoutAction::DisableIfFrequent;
2018 break;
2020 case BailoutKind::LICM:
2021 // LICM may cause spurious bailouts by hoisting unreachable
2022 // guards past branches. To prevent bailout loops, when an
2023 // instruction hoisted by LICM bails out, we update the
2024 // IonScript and resume in baseline. If the guard would have
2025 // been executed anyway, then we will hit the baseline fallback,
2026 // and call noteBaselineFallback. If that does not happen,
2027 // then the next time we reach this point, we will disable LICM
2028 // for this script.
2029 MOZ_ASSERT(!outerScript->hadLICMInvalidation());
2030 if (outerScript->hasIonScript()) {
2031 switch (outerScript->ionScript()->licmState()) {
2032 case IonScript::LICMState::NeverBailed:
2033 outerScript->ionScript()->setHadLICMBailout();
2034 action = BailoutAction::NoAction;
2035 break;
2036 case IonScript::LICMState::Bailed:
2037 outerScript->setHadLICMInvalidation();
2038 InvalidateAfterBailout(cx, outerScript, "LICM failure");
2039 break;
2040 case IonScript::LICMState::BailedAndHitFallback:
2041 // This bailout is not due to LICM. Treat it like a
2042 // regular TranspiledCacheIR bailout.
2043 action = BailoutAction::InvalidateIfFrequent;
2044 break;
2047 break;
2049 case BailoutKind::InstructionReordering:
2050 // An instruction moved up by instruction reordering bailed out.
2051 outerScript->setHadReorderingBailout();
2052 action = BailoutAction::InvalidateIfFrequent;
2053 break;
2055 case BailoutKind::HoistBoundsCheck:
2056 // An instruction hoisted or generated by tryHoistBoundsCheck bailed out.
2057 MOZ_ASSERT(!outerScript->failedBoundsCheck());
2058 outerScript->setFailedBoundsCheck();
2059 InvalidateAfterBailout(cx, outerScript, "bounds check failure");
2060 break;
2062 case BailoutKind::EagerTruncation:
2063 // An eager truncation generated by range analysis bailed out.
2064 // To avoid bailout loops, we set a flag to avoid generating
2065 // eager truncations next time we recompile.
2066 MOZ_ASSERT(!outerScript->hadEagerTruncationBailout());
2067 outerScript->setHadEagerTruncationBailout();
2068 InvalidateAfterBailout(cx, outerScript, "eager range analysis failure");
2069 break;
2071 case BailoutKind::UnboxFolding:
2072 // An unbox that was hoisted to fold with a load bailed out.
2073 // To avoid bailout loops, we set a flag to avoid folding
2074 // loads with unboxes next time we recompile.
2075 MOZ_ASSERT(!outerScript->hadUnboxFoldingBailout());
2076 outerScript->setHadUnboxFoldingBailout();
2077 InvalidateAfterBailout(cx, outerScript, "unbox folding failure");
2078 break;
2080 case BailoutKind::TooManyArguments:
2081 // A funapply or spread call had more than JIT_ARGS_LENGTH_MAX arguments.
2082 // We will invalidate and disable recompilation if this happens too often.
2083 action = BailoutAction::DisableIfFrequent;
2084 break;
2086 case BailoutKind::DuringVMCall:
2087 if (cx->isExceptionPending()) {
2088 // We are bailing out to catch an exception. We will invalidate
2089 // and disable recompilation if this happens too often.
2090 action = BailoutAction::DisableIfFrequent;
2092 break;
2094 case BailoutKind::Finally:
2095 // We are bailing out for a finally block. We will invalidate
2096 // and disable recompilation if this happens too often.
2097 action = BailoutAction::DisableIfFrequent;
2098 break;
2100 case BailoutKind::Inevitable:
2101 case BailoutKind::Debugger:
2102 // Do nothing.
2103 action = BailoutAction::NoAction;
2104 break;
2106 case BailoutKind::FirstExecution:
2107 // We reached an instruction that had not been executed yet at
2108 // the time we compiled. If this happens often enough, we will
2109 // invalidate and recompile.
2110 action = BailoutAction::InvalidateIfFrequent;
2111 saveFailedICHash = true;
2112 break;
2114 case BailoutKind::UninitializedLexical:
2115 HandleLexicalCheckFailure(cx, outerScript, innerScript);
2116 break;
2118 case BailoutKind::ThrowCheckIsObject:
2119 MOZ_ASSERT(!cx->isExceptionPending());
2120 return ThrowCheckIsObject(cx, CheckIsObjectKind::IteratorReturn);
2122 case BailoutKind::ThrowProxyTrapMustReportSameValue:
2123 case BailoutKind::ThrowProxyTrapMustReportUndefined: {
2124 MOZ_ASSERT(!cx->isExceptionPending());
2125 RootedId rootedId(cx, bailoutInfo->tempId);
2126 ScriptedProxyHandler::reportGetTrapValidationError(
2127 cx, rootedId,
2128 bailoutKind == BailoutKind::ThrowProxyTrapMustReportSameValue
2129 ? ScriptedProxyHandler::GetTrapValidationResult::
2130 MustReportSameValue
2131 : ScriptedProxyHandler::GetTrapValidationResult::
2132 MustReportUndefined);
2133 return false;
2136 case BailoutKind::IonExceptionDebugMode:
2137 // Return false to resume in HandleException with reconstructed
2138 // baseline frame.
2139 return false;
2141 case BailoutKind::OnStackInvalidation:
2142 // The script has already been invalidated. There is nothing left to do.
2143 action = BailoutAction::NoAction;
2144 break;
2146 default:
2147 MOZ_CRASH("Unknown bailout kind!");
2150 #ifdef DEBUG
2151 if (MOZ_UNLIKELY(cx->runtime()->jitRuntime()->ionBailAfterEnabled())) {
2152 action = BailoutAction::NoAction;
2154 #endif
2156 if (outerScript->hasIonScript()) {
2157 IonScript* ionScript = outerScript->ionScript();
2158 switch (action) {
2159 case BailoutAction::InvalidateImmediately:
2160 // The IonScript should already have been invalidated.
2161 MOZ_ASSERT(false);
2162 break;
2163 case BailoutAction::InvalidateIfFrequent:
2164 ionScript->incNumFixableBailouts();
2165 if (ionScript->shouldInvalidate()) {
2166 #ifdef DEBUG
2167 if (saveFailedICHash && !JitOptions.disableBailoutLoopCheck) {
2168 outerScript->jitScript()->setFailedICHash(ionScript->icHash());
2170 #endif
2171 InvalidateAfterBailout(cx, outerScript, "fixable bailouts");
2173 break;
2174 case BailoutAction::DisableIfFrequent:
2175 ionScript->incNumUnfixableBailouts();
2176 if (ionScript->shouldInvalidateAndDisable()) {
2177 InvalidateAfterBailout(cx, outerScript, "unfixable bailouts");
2178 outerScript->disableIon();
2180 break;
2181 case BailoutAction::NoAction:
2182 break;
2186 return true;