Backed out changeset 2450366cf7ca (bug 1891629) for causing win msix mochitest failures
[gecko.git] / js / src / jit / BaselineIC.cpp
blob705dcb8e462701b44f8a7334859d1da839a22c06
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/BaselineIC.h"
9 #include "mozilla/DebugOnly.h"
10 #include "mozilla/Sprintf.h"
12 #include "jstypes.h"
14 #include "builtin/Eval.h"
15 #include "jit/BaselineCacheIRCompiler.h"
16 #include "jit/CacheIRGenerator.h"
17 #include "jit/CacheIRHealth.h"
18 #include "jit/JitFrames.h"
19 #include "jit/JitRuntime.h"
20 #include "jit/JitSpewer.h"
21 #include "jit/Linker.h"
22 #include "jit/PerfSpewer.h"
23 #include "jit/SharedICHelpers.h"
24 #include "jit/SharedICRegisters.h"
25 #include "jit/VMFunctions.h"
26 #include "js/Conversions.h"
27 #include "js/friend/ErrorMessages.h" // JSMSG_*
28 #include "vm/BytecodeIterator.h"
29 #include "vm/BytecodeLocation.h"
30 #include "vm/BytecodeUtil.h"
31 #include "vm/EqualityOperations.h"
32 #include "vm/JSFunction.h"
33 #include "vm/JSScript.h"
34 #include "vm/Opcodes.h"
35 #ifdef MOZ_VTUNE
36 # include "vtune/VTuneWrapper.h"
37 #endif
39 #include "jit/MacroAssembler-inl.h"
40 #include "jit/SharedICHelpers-inl.h"
41 #include "jit/VMFunctionList-inl.h"
42 #include "vm/BytecodeIterator-inl.h"
43 #include "vm/BytecodeLocation-inl.h"
44 #include "vm/EnvironmentObject-inl.h"
45 #include "vm/Interpreter-inl.h"
46 #include "vm/JSScript-inl.h"
48 using mozilla::DebugOnly;
50 namespace js {
51 namespace jit {
53 // Class used to emit all Baseline IC fallback code when initializing the
54 // JitRuntime.
55 class MOZ_RAII FallbackICCodeCompiler final {
56 BaselineICFallbackCode& code;
57 MacroAssembler& masm;
59 JSContext* cx;
60 bool inStubFrame_ = false;
62 #ifdef DEBUG
63 bool entersStubFrame_ = false;
64 uint32_t framePushedAtEnterStubFrame_ = 0;
65 #endif
67 [[nodiscard]] bool emitCall(bool isSpread, bool isConstructing);
68 [[nodiscard]] bool emitGetElem(bool hasReceiver);
69 [[nodiscard]] bool emitGetProp(bool hasReceiver);
71 public:
72 FallbackICCodeCompiler(JSContext* cx, BaselineICFallbackCode& code,
73 MacroAssembler& masm)
74 : code(code), masm(masm), cx(cx) {}
76 #define DEF_METHOD(kind) [[nodiscard]] bool emit_##kind();
77 IC_BASELINE_FALLBACK_CODE_KIND_LIST(DEF_METHOD)
78 #undef DEF_METHOD
80 void pushCallArguments(MacroAssembler& masm,
81 AllocatableGeneralRegisterSet regs, Register argcReg,
82 bool isConstructing);
84 // Push a payload specialized per compiler needed to execute stubs.
85 void PushStubPayload(MacroAssembler& masm, Register scratch);
86 void pushStubPayload(MacroAssembler& masm, Register scratch);
88 // Emits a tail call to a VMFunction wrapper.
89 [[nodiscard]] bool tailCallVMInternal(MacroAssembler& masm, VMFunctionId id);
91 template <typename Fn, Fn fn>
92 [[nodiscard]] bool tailCallVM(MacroAssembler& masm);
94 // Emits a normal (non-tail) call to a VMFunction wrapper.
95 [[nodiscard]] bool callVMInternal(MacroAssembler& masm, VMFunctionId id);
97 template <typename Fn, Fn fn>
98 [[nodiscard]] bool callVM(MacroAssembler& masm);
100 // A stub frame is used when a stub wants to call into the VM without
101 // performing a tail call. This is required for the return address
102 // to pc mapping to work.
103 void enterStubFrame(MacroAssembler& masm, Register scratch);
104 void assumeStubFrame();
105 void leaveStubFrame(MacroAssembler& masm);
108 AllocatableGeneralRegisterSet BaselineICAvailableGeneralRegs(size_t numInputs) {
109 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
110 MOZ_ASSERT(!regs.has(FramePointer));
111 #if defined(JS_CODEGEN_ARM)
112 MOZ_ASSERT(!regs.has(ICTailCallReg));
113 regs.take(BaselineSecondScratchReg);
114 #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
115 MOZ_ASSERT(!regs.has(ICTailCallReg));
116 MOZ_ASSERT(!regs.has(BaselineSecondScratchReg));
117 #elif defined(JS_CODEGEN_ARM64)
118 MOZ_ASSERT(!regs.has(PseudoStackPointer));
119 MOZ_ASSERT(!regs.has(RealStackPointer));
120 MOZ_ASSERT(!regs.has(ICTailCallReg));
121 #endif
122 regs.take(ICStubReg);
124 switch (numInputs) {
125 case 0:
126 break;
127 case 1:
128 regs.take(R0);
129 break;
130 case 2:
131 regs.take(R0);
132 regs.take(R1);
133 break;
134 default:
135 MOZ_CRASH("Invalid numInputs");
138 return regs;
141 static jsbytecode* StubOffsetToPc(const ICFallbackStub* stub,
142 const JSScript* script) {
143 return script->offsetToPC(stub->pcOffset());
146 #ifdef JS_JITSPEW
147 void FallbackICSpew(JSContext* cx, ICFallbackStub* stub, const char* fmt, ...) {
148 if (JitSpewEnabled(JitSpew_BaselineICFallback)) {
149 RootedScript script(cx, GetTopJitJSScript(cx));
150 jsbytecode* pc = StubOffsetToPc(stub, script);
152 char fmtbuf[100];
153 va_list args;
154 va_start(args, fmt);
155 (void)VsprintfLiteral(fmtbuf, fmt, args);
156 va_end(args);
158 JitSpew(
159 JitSpew_BaselineICFallback,
160 "Fallback hit for (%s:%u:%u) (pc=%zu,line=%u,uses=%u,stubs=%zu): %s",
161 script->filename(), script->lineno(), script->column().oneOriginValue(),
162 script->pcToOffset(pc), PCToLineNumber(script, pc),
163 script->getWarmUpCount(), stub->numOptimizedStubs(), fmtbuf);
166 #endif // JS_JITSPEW
168 void ICEntry::trace(JSTracer* trc) {
169 ICStub* stub = firstStub();
171 // Trace CacheIR stubs.
172 while (!stub->isFallback()) {
173 stub->toCacheIRStub()->trace(trc);
174 stub = stub->toCacheIRStub()->next();
177 // Fallback stubs use runtime-wide trampoline code we don't need to trace.
178 MOZ_ASSERT(stub->usesTrampolineCode());
181 inline ICFallbackStub* GetFallbackStub(ICEntry* entry) {
182 ICStub* stub = entry->firstStub();
183 while (!stub->isFallback()) {
184 stub = stub->toCacheIRStub()->next();
186 return stub->toFallbackStub();
189 bool ICEntry::traceWeak(JSTracer* trc) {
190 // Trace CacheIR stubs and remove those containing weak pointers to dead GC
191 // things. Prebarriers are not necessary because this happens as part of GC.
193 ICFallbackStub* fallbackStub = GetFallbackStub(this);
195 ICStub* stub = firstStub();
196 ICCacheIRStub* prev = nullptr;
197 bool allSurvived = true;
198 while (!stub->isFallback()) {
199 ICCacheIRStub* cacheIRStub = stub->toCacheIRStub();
200 if (!cacheIRStub->traceWeak(trc)) {
201 fallbackStub->unlinkStubUnbarriered(this, prev, cacheIRStub);
202 allSurvived = false;
203 } else {
204 prev = cacheIRStub;
207 stub = cacheIRStub->next();
208 MOZ_ASSERT_IF(prev, prev->next() == stub);
211 // Clear the folded stubs flag if we know for sure that there are none
212 // left. The flag will remain set if we have removed all folded stubs but
213 // other stubs remain.
214 if (fallbackStub->numOptimizedStubs() == 0 &&
215 fallbackStub->mayHaveFoldedStub()) {
216 fallbackStub->clearMayHaveFoldedStub();
219 #ifdef DEBUG
220 size_t count = 0;
221 for (ICStub* stub = firstStub(); stub != fallbackStub;
222 stub = stub->toCacheIRStub()->next()) {
223 count++;
225 MOZ_ASSERT(count == fallbackStub->state().numOptimizedStubs());
226 #endif
228 return allSurvived;
231 // constexpr table mapping JSOp to BaselineICFallbackKind. Each value in the
232 // table is either a fallback kind or a sentinel value (NoICValue) indicating
233 // the JSOp is not a JOF_IC op.
234 class MOZ_STATIC_CLASS OpToFallbackKindTable {
235 static_assert(sizeof(BaselineICFallbackKind) == sizeof(uint8_t));
236 uint8_t table_[JSOP_LIMIT] = {};
238 constexpr void setKind(JSOp op, BaselineICFallbackKind kind) {
239 MOZ_ASSERT(uint8_t(kind) != NoICValue);
240 table_[size_t(op)] = uint8_t(kind);
243 public:
244 static constexpr uint8_t NoICValue = uint8_t(BaselineICFallbackKind::Count);
246 uint8_t lookup(JSOp op) const { return table_[size_t(op)]; }
248 constexpr OpToFallbackKindTable() {
249 for (size_t i = 0; i < JSOP_LIMIT; i++) {
250 table_[i] = NoICValue;
253 setKind(JSOp::Not, BaselineICFallbackKind::ToBool);
254 setKind(JSOp::And, BaselineICFallbackKind::ToBool);
255 setKind(JSOp::Or, BaselineICFallbackKind::ToBool);
256 setKind(JSOp::JumpIfTrue, BaselineICFallbackKind::ToBool);
257 setKind(JSOp::JumpIfFalse, BaselineICFallbackKind::ToBool);
259 setKind(JSOp::BitNot, BaselineICFallbackKind::UnaryArith);
260 setKind(JSOp::Pos, BaselineICFallbackKind::UnaryArith);
261 setKind(JSOp::Neg, BaselineICFallbackKind::UnaryArith);
262 setKind(JSOp::Inc, BaselineICFallbackKind::UnaryArith);
263 setKind(JSOp::Dec, BaselineICFallbackKind::UnaryArith);
264 setKind(JSOp::ToNumeric, BaselineICFallbackKind::UnaryArith);
266 setKind(JSOp::BitOr, BaselineICFallbackKind::BinaryArith);
267 setKind(JSOp::BitXor, BaselineICFallbackKind::BinaryArith);
268 setKind(JSOp::BitAnd, BaselineICFallbackKind::BinaryArith);
269 setKind(JSOp::Lsh, BaselineICFallbackKind::BinaryArith);
270 setKind(JSOp::Rsh, BaselineICFallbackKind::BinaryArith);
271 setKind(JSOp::Ursh, BaselineICFallbackKind::BinaryArith);
272 setKind(JSOp::Add, BaselineICFallbackKind::BinaryArith);
273 setKind(JSOp::Sub, BaselineICFallbackKind::BinaryArith);
274 setKind(JSOp::Mul, BaselineICFallbackKind::BinaryArith);
275 setKind(JSOp::Div, BaselineICFallbackKind::BinaryArith);
276 setKind(JSOp::Mod, BaselineICFallbackKind::BinaryArith);
277 setKind(JSOp::Pow, BaselineICFallbackKind::BinaryArith);
279 setKind(JSOp::Eq, BaselineICFallbackKind::Compare);
280 setKind(JSOp::Ne, BaselineICFallbackKind::Compare);
281 setKind(JSOp::Lt, BaselineICFallbackKind::Compare);
282 setKind(JSOp::Le, BaselineICFallbackKind::Compare);
283 setKind(JSOp::Gt, BaselineICFallbackKind::Compare);
284 setKind(JSOp::Ge, BaselineICFallbackKind::Compare);
285 setKind(JSOp::StrictEq, BaselineICFallbackKind::Compare);
286 setKind(JSOp::StrictNe, BaselineICFallbackKind::Compare);
288 setKind(JSOp::NewArray, BaselineICFallbackKind::NewArray);
290 setKind(JSOp::NewObject, BaselineICFallbackKind::NewObject);
291 setKind(JSOp::NewInit, BaselineICFallbackKind::NewObject);
293 setKind(JSOp::InitElem, BaselineICFallbackKind::SetElem);
294 setKind(JSOp::InitHiddenElem, BaselineICFallbackKind::SetElem);
295 setKind(JSOp::InitLockedElem, BaselineICFallbackKind::SetElem);
296 setKind(JSOp::InitElemInc, BaselineICFallbackKind::SetElem);
297 setKind(JSOp::SetElem, BaselineICFallbackKind::SetElem);
298 setKind(JSOp::StrictSetElem, BaselineICFallbackKind::SetElem);
300 setKind(JSOp::InitProp, BaselineICFallbackKind::SetProp);
301 setKind(JSOp::InitLockedProp, BaselineICFallbackKind::SetProp);
302 setKind(JSOp::InitHiddenProp, BaselineICFallbackKind::SetProp);
303 setKind(JSOp::InitGLexical, BaselineICFallbackKind::SetProp);
304 setKind(JSOp::SetProp, BaselineICFallbackKind::SetProp);
305 setKind(JSOp::StrictSetProp, BaselineICFallbackKind::SetProp);
306 setKind(JSOp::SetName, BaselineICFallbackKind::SetProp);
307 setKind(JSOp::StrictSetName, BaselineICFallbackKind::SetProp);
308 setKind(JSOp::SetGName, BaselineICFallbackKind::SetProp);
309 setKind(JSOp::StrictSetGName, BaselineICFallbackKind::SetProp);
311 setKind(JSOp::GetProp, BaselineICFallbackKind::GetProp);
312 setKind(JSOp::GetBoundName, BaselineICFallbackKind::GetProp);
314 setKind(JSOp::GetPropSuper, BaselineICFallbackKind::GetPropSuper);
316 setKind(JSOp::GetElem, BaselineICFallbackKind::GetElem);
318 setKind(JSOp::GetElemSuper, BaselineICFallbackKind::GetElemSuper);
320 setKind(JSOp::In, BaselineICFallbackKind::In);
322 setKind(JSOp::HasOwn, BaselineICFallbackKind::HasOwn);
324 setKind(JSOp::CheckPrivateField, BaselineICFallbackKind::CheckPrivateField);
326 setKind(JSOp::GetName, BaselineICFallbackKind::GetName);
327 setKind(JSOp::GetGName, BaselineICFallbackKind::GetName);
329 setKind(JSOp::BindName, BaselineICFallbackKind::BindName);
330 setKind(JSOp::BindGName, BaselineICFallbackKind::BindName);
332 setKind(JSOp::GetIntrinsic, BaselineICFallbackKind::GetIntrinsic);
334 setKind(JSOp::Call, BaselineICFallbackKind::Call);
335 setKind(JSOp::CallContent, BaselineICFallbackKind::Call);
336 setKind(JSOp::CallIgnoresRv, BaselineICFallbackKind::Call);
337 setKind(JSOp::CallIter, BaselineICFallbackKind::Call);
338 setKind(JSOp::CallContentIter, BaselineICFallbackKind::Call);
339 setKind(JSOp::Eval, BaselineICFallbackKind::Call);
340 setKind(JSOp::StrictEval, BaselineICFallbackKind::Call);
342 setKind(JSOp::SuperCall, BaselineICFallbackKind::CallConstructing);
343 setKind(JSOp::New, BaselineICFallbackKind::CallConstructing);
344 setKind(JSOp::NewContent, BaselineICFallbackKind::CallConstructing);
346 setKind(JSOp::SpreadCall, BaselineICFallbackKind::SpreadCall);
347 setKind(JSOp::SpreadEval, BaselineICFallbackKind::SpreadCall);
348 setKind(JSOp::StrictSpreadEval, BaselineICFallbackKind::SpreadCall);
350 setKind(JSOp::SpreadSuperCall,
351 BaselineICFallbackKind::SpreadCallConstructing);
352 setKind(JSOp::SpreadNew, BaselineICFallbackKind::SpreadCallConstructing);
354 setKind(JSOp::Instanceof, BaselineICFallbackKind::InstanceOf);
356 setKind(JSOp::Typeof, BaselineICFallbackKind::TypeOf);
357 setKind(JSOp::TypeofExpr, BaselineICFallbackKind::TypeOf);
359 setKind(JSOp::ToPropertyKey, BaselineICFallbackKind::ToPropertyKey);
361 setKind(JSOp::Iter, BaselineICFallbackKind::GetIterator);
363 setKind(JSOp::OptimizeSpreadCall,
364 BaselineICFallbackKind::OptimizeSpreadCall);
366 setKind(JSOp::Rest, BaselineICFallbackKind::Rest);
368 setKind(JSOp::CloseIter, BaselineICFallbackKind::CloseIter);
369 setKind(JSOp::OptimizeGetIterator,
370 BaselineICFallbackKind::OptimizeGetIterator);
374 static constexpr OpToFallbackKindTable FallbackKindTable;
376 void ICScript::initICEntries(JSContext* cx, JSScript* script) {
377 MOZ_ASSERT(cx->zone()->jitZone());
378 MOZ_ASSERT(jit::IsBaselineInterpreterEnabled() ||
379 jit::IsPortableBaselineInterpreterEnabled());
381 MOZ_ASSERT(numICEntries() == script->numICEntries());
383 // Index of the next ICEntry to initialize.
384 uint32_t icEntryIndex = 0;
386 const BaselineICFallbackCode& fallbackCode =
387 cx->runtime()->jitRuntime()->baselineICFallbackCode();
389 // For JOF_IC ops: initialize ICEntries and fallback stubs.
390 for (BytecodeLocation loc : js::AllBytecodesIterable(script)) {
391 JSOp op = loc.getOp();
393 // Assert the frontend stored the correct IC index in jump target ops.
394 MOZ_ASSERT_IF(BytecodeIsJumpTarget(op), loc.icIndex() == icEntryIndex);
396 uint8_t tableValue = FallbackKindTable.lookup(op);
398 if (tableValue == OpToFallbackKindTable::NoICValue) {
399 MOZ_ASSERT(!BytecodeOpHasIC(op),
400 "Missing entry in OpToFallbackKindTable for JOF_IC op");
401 continue;
404 MOZ_ASSERT(BytecodeOpHasIC(op),
405 "Unexpected fallback kind for non-JOF_IC op");
407 BaselineICFallbackKind kind = BaselineICFallbackKind(tableValue);
408 TrampolinePtr stubCode = !jit::IsPortableBaselineInterpreterEnabled()
409 ? fallbackCode.addr(kind)
410 : TrampolinePtr();
412 // Initialize the ICEntry and ICFallbackStub.
413 uint32_t offset = loc.bytecodeToOffset(script);
414 ICEntry& entryRef = this->icEntry(icEntryIndex);
415 ICFallbackStub* stub = fallbackStub(icEntryIndex);
416 icEntryIndex++;
417 new (&entryRef) ICEntry(stub);
418 new (stub) ICFallbackStub(offset, stubCode);
421 // Assert all ICEntries have been initialized.
422 MOZ_ASSERT(icEntryIndex == numICEntries());
425 bool ICSupportsPolymorphicTypeData(JSOp op) {
426 MOZ_ASSERT(BytecodeOpHasIC(op));
427 BaselineICFallbackKind kind =
428 BaselineICFallbackKind(FallbackKindTable.lookup(op));
429 switch (kind) {
430 case BaselineICFallbackKind::ToBool:
431 case BaselineICFallbackKind::TypeOf:
432 return true;
433 default:
434 return false;
438 bool ICCacheIRStub::makesGCCalls() const { return stubInfo()->makesGCCalls(); }
440 void ICFallbackStub::trackNotAttached() { state().trackNotAttached(); }
442 // When we enter a baseline fallback stub, if a Warp compilation
443 // exists that transpiled that IC, we notify that compilation. This
444 // helps the bailout code tell whether a bailing instruction hoisted
445 // by LICM would have been executed anyway.
446 static void MaybeNotifyWarp(JSScript* script, ICFallbackStub* stub) {
447 if (stub->state().usedByTranspiler() && script->hasIonScript()) {
448 script->ionScript()->noteBaselineFallback();
452 void ICCacheIRStub::trace(JSTracer* trc) {
453 if (hasJitCode()) {
454 JitCode* stubJitCode = jitCode();
455 TraceManuallyBarrieredEdge(trc, &stubJitCode, "baseline-ic-stub-code");
458 TraceCacheIRStub(trc, this, stubInfo());
461 bool ICCacheIRStub::traceWeak(JSTracer* trc) {
462 return TraceWeakCacheIRStub(trc, this, stubInfo());
465 static void MaybeTransition(JSContext* cx, BaselineFrame* frame,
466 ICFallbackStub* stub) {
467 if (stub->state().shouldTransition()) {
468 if (!TryFoldingStubs(cx, stub, frame->script(), frame->icScript())) {
469 cx->recoverFromOutOfMemory();
471 if (stub->state().maybeTransition()) {
472 ICEntry* icEntry = frame->icScript()->icEntryForStub(stub);
473 #ifdef JS_CACHEIR_SPEW
474 if (cx->spewer().enabled(cx, frame->script(),
475 SpewChannel::CacheIRHealthReport)) {
476 CacheIRHealth cih;
477 RootedScript script(cx, frame->script());
478 cih.healthReportForIC(cx, icEntry, stub, script,
479 SpewContext::Transition);
481 #endif
482 stub->discardStubs(cx->zone(), icEntry);
487 // This helper handles ICState updates/transitions while attaching CacheIR
488 // stubs.
489 template <typename IRGenerator, typename... Args>
490 static void TryAttachStub(const char* name, JSContext* cx, BaselineFrame* frame,
491 ICFallbackStub* stub, Args&&... args) {
492 MaybeTransition(cx, frame, stub);
494 if (stub->state().canAttachStub()) {
495 RootedScript script(cx, frame->script());
496 ICScript* icScript = frame->icScript();
497 jsbytecode* pc = StubOffsetToPc(stub, script);
498 bool attached = false;
499 IRGenerator gen(cx, script, pc, stub->state(), std::forward<Args>(args)...);
500 switch (gen.tryAttachStub()) {
501 case AttachDecision::Attach: {
502 ICAttachResult result =
503 AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
504 script, icScript, stub, gen.stubName());
505 if (result == ICAttachResult::Attached) {
506 attached = true;
507 JitSpew(JitSpew_BaselineIC, " Attached %s CacheIR stub", name);
509 } break;
510 case AttachDecision::NoAction:
511 break;
512 case AttachDecision::TemporarilyUnoptimizable:
513 case AttachDecision::Deferred:
514 MOZ_ASSERT_UNREACHABLE("Not expected in generic TryAttachStub");
515 break;
517 if (!attached) {
518 stub->trackNotAttached();
523 void ICFallbackStub::unlinkStub(Zone* zone, ICEntry* icEntry,
524 ICCacheIRStub* prev, ICCacheIRStub* stub) {
525 // We are removing edges from ICStub to gcthings. Perform a barrier to let the
526 // GC know about those edges.
527 PreWriteBarrier(zone, stub);
529 unlinkStubUnbarriered(icEntry, prev, stub);
532 void ICFallbackStub::unlinkStubUnbarriered(ICEntry* icEntry,
533 ICCacheIRStub* prev,
534 ICCacheIRStub* stub) {
535 if (prev) {
536 MOZ_ASSERT(prev->next() == stub);
537 prev->setNext(stub->next());
538 } else {
539 MOZ_ASSERT(icEntry->firstStub() == stub);
540 icEntry->setFirstStub(stub->next());
543 state_.trackUnlinkedStub();
545 #ifdef DEBUG
546 // Poison stub code to ensure we don't call this stub again. However, if
547 // this stub can make calls, a pointer to it may be stored in a stub frame
548 // on the stack, so we can't touch the stubCode_ or GC will crash when
549 // tracing this pointer.
550 if (!stub->makesGCCalls()) {
551 stub->stubCode_ = (uint8_t*)0xbad;
553 #endif
556 void ICFallbackStub::discardStubs(Zone* zone, ICEntry* icEntry) {
557 ICStub* stub = icEntry->firstStub();
558 while (stub != this) {
559 unlinkStub(zone, icEntry, /* prev = */ nullptr, stub->toCacheIRStub());
560 stub = stub->toCacheIRStub()->next();
562 clearMayHaveFoldedStub();
565 static void InitMacroAssemblerForICStub(StackMacroAssembler& masm) {
566 #ifndef JS_USE_LINK_REGISTER
567 // The first value contains the return addres,
568 // which we pull into ICTailCallReg for tail calls.
569 masm.adjustFrame(sizeof(intptr_t));
570 #endif
571 #ifdef JS_CODEGEN_ARM
572 masm.setSecondScratchReg(BaselineSecondScratchReg);
573 #endif
576 bool FallbackICCodeCompiler::tailCallVMInternal(MacroAssembler& masm,
577 VMFunctionId id) {
578 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
579 const VMFunctionData& fun = GetVMFunction(id);
580 uint32_t argSize = fun.explicitStackSlots() * sizeof(void*);
581 EmitBaselineTailCallVM(code, masm, argSize);
582 return true;
585 bool FallbackICCodeCompiler::callVMInternal(MacroAssembler& masm,
586 VMFunctionId id) {
587 MOZ_ASSERT(inStubFrame_);
589 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
591 EmitBaselineCallVM(code, masm);
592 return true;
595 template <typename Fn, Fn fn>
596 bool FallbackICCodeCompiler::callVM(MacroAssembler& masm) {
597 VMFunctionId id = VMFunctionToId<Fn, fn>::id;
598 return callVMInternal(masm, id);
601 template <typename Fn, Fn fn>
602 bool FallbackICCodeCompiler::tailCallVM(MacroAssembler& masm) {
603 VMFunctionId id = VMFunctionToId<Fn, fn>::id;
604 return tailCallVMInternal(masm, id);
607 void FallbackICCodeCompiler::enterStubFrame(MacroAssembler& masm,
608 Register scratch) {
609 EmitBaselineEnterStubFrame(masm, scratch);
610 #ifdef DEBUG
611 framePushedAtEnterStubFrame_ = masm.framePushed();
612 #endif
614 MOZ_ASSERT(!inStubFrame_);
615 inStubFrame_ = true;
617 #ifdef DEBUG
618 entersStubFrame_ = true;
619 #endif
622 void FallbackICCodeCompiler::assumeStubFrame() {
623 MOZ_ASSERT(!inStubFrame_);
624 inStubFrame_ = true;
626 #ifdef DEBUG
627 entersStubFrame_ = true;
629 // |framePushed| isn't tracked precisely in ICStubs, so simply assume it to
630 // be the stub frame layout and the pushed ICStub* so that assertions don't
631 // fail in leaveStubFrame
632 framePushedAtEnterStubFrame_ =
633 BaselineStubFrameLayout::Size() + sizeof(ICStub*);
634 #endif
637 void FallbackICCodeCompiler::leaveStubFrame(MacroAssembler& masm) {
638 MOZ_ASSERT(entersStubFrame_ && inStubFrame_);
639 inStubFrame_ = false;
641 #ifdef DEBUG
642 masm.setFramePushed(framePushedAtEnterStubFrame_);
643 #endif
644 EmitBaselineLeaveStubFrame(masm);
647 void FallbackICCodeCompiler::pushStubPayload(MacroAssembler& masm,
648 Register scratch) {
649 if (inStubFrame_) {
650 masm.loadPtr(Address(FramePointer, 0), scratch);
651 masm.pushBaselineFramePtr(scratch, scratch);
652 } else {
653 masm.pushBaselineFramePtr(FramePointer, scratch);
657 void FallbackICCodeCompiler::PushStubPayload(MacroAssembler& masm,
658 Register scratch) {
659 pushStubPayload(masm, scratch);
660 masm.adjustFrame(sizeof(intptr_t));
664 // ToBool_Fallback
667 bool DoToBoolFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub,
668 HandleValue arg, MutableHandleValue ret) {
669 stub->incrementEnteredCount();
670 MaybeNotifyWarp(frame->outerScript(), stub);
671 FallbackICSpew(cx, stub, "ToBool");
673 TryAttachStub<ToBoolIRGenerator>("ToBool", cx, frame, stub, arg);
675 bool cond = ToBoolean(arg);
676 ret.setBoolean(cond);
678 return true;
681 bool FallbackICCodeCompiler::emit_ToBool() {
682 static_assert(R0 == JSReturnOperand);
684 // Restore the tail call register.
685 EmitRestoreTailCallReg(masm);
687 // Push arguments.
688 masm.pushValue(R0);
689 masm.push(ICStubReg);
690 pushStubPayload(masm, R0.scratchReg());
692 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
693 MutableHandleValue);
694 return tailCallVM<Fn, DoToBoolFallback>(masm);
698 // GetElem_Fallback
701 bool DoGetElemFallback(JSContext* cx, BaselineFrame* frame,
702 ICFallbackStub* stub, HandleValue lhs, HandleValue rhs,
703 MutableHandleValue res) {
704 stub->incrementEnteredCount();
705 MaybeNotifyWarp(frame->outerScript(), stub);
706 FallbackICSpew(cx, stub, "GetElem");
708 #ifdef DEBUG
709 jsbytecode* pc = StubOffsetToPc(stub, frame->script());
710 MOZ_ASSERT(JSOp(*pc) == JSOp::GetElem);
711 #endif
713 TryAttachStub<GetPropIRGenerator>("GetElem", cx, frame, stub,
714 CacheKind::GetElem, lhs, rhs);
716 if (!GetElementOperation(cx, lhs, rhs, res)) {
717 return false;
720 return true;
723 bool DoGetElemSuperFallback(JSContext* cx, BaselineFrame* frame,
724 ICFallbackStub* stub, HandleValue lhs,
725 HandleValue rhs, HandleValue receiver,
726 MutableHandleValue res) {
727 stub->incrementEnteredCount();
728 MaybeNotifyWarp(frame->outerScript(), stub);
730 jsbytecode* pc = StubOffsetToPc(stub, frame->script());
732 JSOp op = JSOp(*pc);
733 FallbackICSpew(cx, stub, "GetElemSuper(%s)", CodeName(op));
735 MOZ_ASSERT(op == JSOp::GetElemSuper);
737 // |lhs| is [[HomeObject]].[[Prototype]] which must be an Object or null.
738 MOZ_ASSERT(lhs.isObjectOrNull());
740 int lhsIndex = -1;
741 RootedObject lhsObj(
742 cx, ToObjectFromStackForPropertyAccess(cx, lhs, lhsIndex, rhs));
743 if (!lhsObj) {
744 return false;
747 TryAttachStub<GetPropIRGenerator>("GetElemSuper", cx, frame, stub,
748 CacheKind::GetElemSuper, lhs, rhs);
750 return GetObjectElementOperation(cx, op, lhsObj, receiver, rhs, res);
753 bool FallbackICCodeCompiler::emitGetElem(bool hasReceiver) {
754 static_assert(R0 == JSReturnOperand);
756 // Restore the tail call register.
757 EmitRestoreTailCallReg(masm);
759 // Super property getters use a |this| that differs from base object
760 if (hasReceiver) {
761 // State: receiver in R0, index in R1, obj on the stack
763 // Ensure stack is fully synced for the expression decompiler.
764 // We need: receiver, index, obj
765 masm.pushValue(R0);
766 masm.pushValue(R1);
767 masm.pushValue(Address(masm.getStackPointer(), sizeof(Value) * 2));
769 // Push arguments.
770 masm.pushValue(R0); // Receiver
771 masm.pushValue(R1); // Index
772 masm.pushValue(Address(masm.getStackPointer(), sizeof(Value) * 5)); // Obj
773 masm.push(ICStubReg);
774 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg());
776 using Fn =
777 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
778 HandleValue, HandleValue, MutableHandleValue);
779 if (!tailCallVM<Fn, DoGetElemSuperFallback>(masm)) {
780 return false;
782 } else {
783 // Ensure stack is fully synced for the expression decompiler.
784 masm.pushValue(R0);
785 masm.pushValue(R1);
787 // Push arguments.
788 masm.pushValue(R1);
789 masm.pushValue(R0);
790 masm.push(ICStubReg);
791 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg());
793 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*,
794 HandleValue, HandleValue, MutableHandleValue);
795 if (!tailCallVM<Fn, DoGetElemFallback>(masm)) {
796 return false;
800 // This is the resume point used when bailout rewrites call stack to undo
801 // Ion inlined frames. The return address pushed onto reconstructed stack
802 // will point here.
803 assumeStubFrame();
804 if (hasReceiver) {
805 code.initBailoutReturnOffset(BailoutReturnKind::GetElemSuper,
806 masm.currentOffset());
807 } else {
808 code.initBailoutReturnOffset(BailoutReturnKind::GetElem,
809 masm.currentOffset());
812 leaveStubFrame(masm);
814 EmitReturnFromIC(masm);
815 return true;
818 bool FallbackICCodeCompiler::emit_GetElem() {
819 return emitGetElem(/* hasReceiver = */ false);
822 bool FallbackICCodeCompiler::emit_GetElemSuper() {
823 return emitGetElem(/* hasReceiver = */ true);
826 bool DoSetElemFallback(JSContext* cx, BaselineFrame* frame,
827 ICFallbackStub* stub, Value* stack, HandleValue objv,
828 HandleValue index, HandleValue rhs) {
829 using DeferType = SetPropIRGenerator::DeferType;
831 stub->incrementEnteredCount();
832 MaybeNotifyWarp(frame->outerScript(), stub);
834 RootedScript script(cx, frame->script());
835 RootedScript outerScript(cx, script);
836 jsbytecode* pc = StubOffsetToPc(stub, script);
837 JSOp op = JSOp(*pc);
838 FallbackICSpew(cx, stub, "SetElem(%s)", CodeName(JSOp(*pc)));
840 MOZ_ASSERT(op == JSOp::SetElem || op == JSOp::StrictSetElem ||
841 op == JSOp::InitElem || op == JSOp::InitHiddenElem ||
842 op == JSOp::InitLockedElem || op == JSOp::InitElemInc);
844 int objvIndex = -3;
845 RootedObject obj(
846 cx, ToObjectFromStackForPropertyAccess(cx, objv, objvIndex, index));
847 if (!obj) {
848 return false;
851 Rooted<Shape*> oldShape(cx, obj->shape());
853 DeferType deferType = DeferType::None;
854 bool attached = false;
856 MaybeTransition(cx, frame, stub);
858 if (stub->state().canAttachStub()) {
859 ICScript* icScript = frame->icScript();
860 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetElem, stub->state(),
861 objv, index, rhs);
862 switch (gen.tryAttachStub()) {
863 case AttachDecision::Attach: {
864 ICAttachResult result = AttachBaselineCacheIRStub(
865 cx, gen.writerRef(), gen.cacheKind(), frame->script(), icScript,
866 stub, gen.stubName());
867 if (result == ICAttachResult::Attached) {
868 attached = true;
869 JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub");
871 } break;
872 case AttachDecision::NoAction:
873 break;
874 case AttachDecision::TemporarilyUnoptimizable:
875 attached = true;
876 break;
877 case AttachDecision::Deferred:
878 deferType = gen.deferType();
879 MOZ_ASSERT(deferType != DeferType::None);
880 break;
884 if (op == JSOp::InitElem || op == JSOp::InitHiddenElem ||
885 op == JSOp::InitLockedElem) {
886 if (!InitElemOperation(cx, pc, obj, index, rhs)) {
887 return false;
889 } else if (op == JSOp::InitElemInc) {
890 if (!InitElemIncOperation(cx, obj.as<ArrayObject>(), index.toInt32(),
891 rhs)) {
892 return false;
894 } else {
895 if (!SetObjectElementWithReceiver(cx, obj, index, rhs, objv,
896 JSOp(*pc) == JSOp::StrictSetElem)) {
897 return false;
901 if (stack) {
902 // Overwrite the object on the stack (pushed for the decompiler) with the
903 // rhs.
904 MOZ_ASSERT(stack[2] == objv);
905 stack[2] = rhs;
908 if (attached) {
909 return true;
912 // The SetObjectElement call might have entered this IC recursively, so try
913 // to transition.
914 MaybeTransition(cx, frame, stub);
916 bool canAttachStub = stub->state().canAttachStub();
918 if (deferType != DeferType::None && canAttachStub) {
919 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetElem, stub->state(),
920 objv, index, rhs);
922 MOZ_ASSERT(deferType == DeferType::AddSlot);
923 AttachDecision decision = gen.tryAttachAddSlotStub(oldShape);
925 switch (decision) {
926 case AttachDecision::Attach: {
927 ICScript* icScript = frame->icScript();
928 ICAttachResult result = AttachBaselineCacheIRStub(
929 cx, gen.writerRef(), gen.cacheKind(), frame->script(), icScript,
930 stub, gen.stubName());
931 if (result == ICAttachResult::Attached) {
932 attached = true;
933 JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub");
935 } break;
936 case AttachDecision::NoAction:
937 gen.trackAttached(IRGenerator::NotAttached);
938 break;
939 case AttachDecision::TemporarilyUnoptimizable:
940 case AttachDecision::Deferred:
941 MOZ_ASSERT_UNREACHABLE("Invalid attach result");
942 break;
945 if (!attached && canAttachStub) {
946 stub->trackNotAttached();
948 return true;
951 bool FallbackICCodeCompiler::emit_SetElem() {
952 static_assert(R0 == JSReturnOperand);
954 EmitRestoreTailCallReg(masm);
956 // State: R0: object, R1: index, stack: rhs.
957 // For the decompiler, the stack has to be: object, index, rhs,
958 // so we push the index, then overwrite the rhs Value with R0
959 // and push the rhs value.
960 masm.pushValue(R1);
961 masm.loadValue(Address(masm.getStackPointer(), sizeof(Value)), R1);
962 masm.storeValue(R0, Address(masm.getStackPointer(), sizeof(Value)));
963 masm.pushValue(R1);
965 // Push arguments.
966 masm.pushValue(R1); // RHS
968 // Push index. On x86 and ARM two push instructions are emitted so use a
969 // separate register to store the old stack pointer.
970 masm.moveStackPtrTo(R1.scratchReg());
971 masm.pushValue(Address(R1.scratchReg(), 2 * sizeof(Value)));
972 masm.pushValue(R0); // Object.
974 // Push pointer to stack values, so that the stub can overwrite the object
975 // (pushed for the decompiler) with the rhs.
976 masm.computeEffectiveAddress(
977 Address(masm.getStackPointer(), 3 * sizeof(Value)), R0.scratchReg());
978 masm.push(R0.scratchReg());
980 masm.push(ICStubReg);
981 pushStubPayload(masm, R0.scratchReg());
983 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, Value*,
984 HandleValue, HandleValue, HandleValue);
985 return tailCallVM<Fn, DoSetElemFallback>(masm);
989 // In_Fallback
992 bool DoInFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub,
993 HandleValue key, HandleValue objValue,
994 MutableHandleValue res) {
995 stub->incrementEnteredCount();
996 MaybeNotifyWarp(frame->outerScript(), stub);
997 FallbackICSpew(cx, stub, "In");
999 if (!objValue.isObject()) {
1000 ReportInNotObjectError(cx, key, objValue);
1001 return false;
1004 TryAttachStub<HasPropIRGenerator>("In", cx, frame, stub, CacheKind::In, key,
1005 objValue);
1007 RootedObject obj(cx, &objValue.toObject());
1008 bool cond = false;
1009 if (!OperatorIn(cx, key, obj, &cond)) {
1010 return false;
1012 res.setBoolean(cond);
1014 return true;
1017 bool FallbackICCodeCompiler::emit_In() {
1018 EmitRestoreTailCallReg(masm);
1020 // Sync for the decompiler.
1021 masm.pushValue(R0);
1022 masm.pushValue(R1);
1024 // Push arguments.
1025 masm.pushValue(R1);
1026 masm.pushValue(R0);
1027 masm.push(ICStubReg);
1028 pushStubPayload(masm, R0.scratchReg());
1030 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
1031 HandleValue, MutableHandleValue);
1032 return tailCallVM<Fn, DoInFallback>(masm);
1036 // HasOwn_Fallback
1039 bool DoHasOwnFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub,
1040 HandleValue keyValue, HandleValue objValue,
1041 MutableHandleValue res) {
1042 stub->incrementEnteredCount();
1043 MaybeNotifyWarp(frame->outerScript(), stub);
1044 FallbackICSpew(cx, stub, "HasOwn");
1046 TryAttachStub<HasPropIRGenerator>("HasOwn", cx, frame, stub,
1047 CacheKind::HasOwn, keyValue, objValue);
1049 bool found;
1050 if (!HasOwnProperty(cx, objValue, keyValue, &found)) {
1051 return false;
1054 res.setBoolean(found);
1055 return true;
1058 bool FallbackICCodeCompiler::emit_HasOwn() {
1059 EmitRestoreTailCallReg(masm);
1061 // Sync for the decompiler.
1062 masm.pushValue(R0);
1063 masm.pushValue(R1);
1065 // Push arguments.
1066 masm.pushValue(R1);
1067 masm.pushValue(R0);
1068 masm.push(ICStubReg);
1069 pushStubPayload(masm, R0.scratchReg());
1071 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
1072 HandleValue, MutableHandleValue);
1073 return tailCallVM<Fn, DoHasOwnFallback>(masm);
1077 // CheckPrivate_Fallback
1080 bool DoCheckPrivateFieldFallback(JSContext* cx, BaselineFrame* frame,
1081 ICFallbackStub* stub, HandleValue objValue,
1082 HandleValue keyValue, MutableHandleValue res) {
1083 stub->incrementEnteredCount();
1084 MaybeNotifyWarp(frame->outerScript(), stub);
1086 jsbytecode* pc = StubOffsetToPc(stub, frame->script());
1088 FallbackICSpew(cx, stub, "CheckPrivateField");
1090 MOZ_ASSERT(keyValue.isSymbol() && keyValue.toSymbol()->isPrivateName());
1092 TryAttachStub<CheckPrivateFieldIRGenerator>("CheckPrivate", cx, frame, stub,
1093 CacheKind::CheckPrivateField,
1094 keyValue, objValue);
1096 bool result;
1097 if (!CheckPrivateFieldOperation(cx, pc, objValue, keyValue, &result)) {
1098 return false;
1101 res.setBoolean(result);
1102 return true;
1105 bool FallbackICCodeCompiler::emit_CheckPrivateField() {
1106 EmitRestoreTailCallReg(masm);
1108 // Sync for the decompiler.
1109 masm.pushValue(R0);
1110 masm.pushValue(R1);
1112 // Push arguments.
1113 masm.pushValue(R1);
1114 masm.pushValue(R0);
1115 masm.push(ICStubReg);
1116 pushStubPayload(masm, R0.scratchReg());
1118 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
1119 HandleValue, MutableHandleValue);
1120 return tailCallVM<Fn, DoCheckPrivateFieldFallback>(masm);
1124 // GetName_Fallback
1127 bool DoGetNameFallback(JSContext* cx, BaselineFrame* frame,
1128 ICFallbackStub* stub, HandleObject envChain,
1129 MutableHandleValue res) {
1130 stub->incrementEnteredCount();
1131 MaybeNotifyWarp(frame->outerScript(), stub);
1133 RootedScript script(cx, frame->script());
1134 jsbytecode* pc = StubOffsetToPc(stub, script);
1135 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
1136 FallbackICSpew(cx, stub, "GetName(%s)", CodeName(JSOp(*pc)));
1138 MOZ_ASSERT(op == JSOp::GetName || op == JSOp::GetGName);
1140 Rooted<PropertyName*> name(cx, script->getName(pc));
1142 TryAttachStub<GetNameIRGenerator>("GetName", cx, frame, stub, envChain, name);
1144 static_assert(JSOpLength_GetGName == JSOpLength_GetName,
1145 "Otherwise our check for JSOp::Typeof isn't ok");
1146 if (JSOp(pc[JSOpLength_GetGName]) == JSOp::Typeof) {
1147 if (!GetEnvironmentName<GetNameMode::TypeOf>(cx, envChain, name, res)) {
1148 return false;
1150 } else {
1151 if (!GetEnvironmentName<GetNameMode::Normal>(cx, envChain, name, res)) {
1152 return false;
1156 return true;
1159 bool FallbackICCodeCompiler::emit_GetName() {
1160 static_assert(R0 == JSReturnOperand);
1162 EmitRestoreTailCallReg(masm);
1164 masm.push(R0.scratchReg());
1165 masm.push(ICStubReg);
1166 pushStubPayload(masm, R0.scratchReg());
1168 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleObject,
1169 MutableHandleValue);
1170 return tailCallVM<Fn, DoGetNameFallback>(masm);
1174 // BindName_Fallback
1177 bool DoBindNameFallback(JSContext* cx, BaselineFrame* frame,
1178 ICFallbackStub* stub, HandleObject envChain,
1179 MutableHandleValue res) {
1180 stub->incrementEnteredCount();
1181 MaybeNotifyWarp(frame->outerScript(), stub);
1183 jsbytecode* pc = StubOffsetToPc(stub, frame->script());
1184 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
1185 FallbackICSpew(cx, stub, "BindName(%s)", CodeName(JSOp(*pc)));
1187 MOZ_ASSERT(op == JSOp::BindName || op == JSOp::BindGName);
1189 Rooted<PropertyName*> name(cx, frame->script()->getName(pc));
1191 TryAttachStub<BindNameIRGenerator>("BindName", cx, frame, stub, envChain,
1192 name);
1194 RootedObject scope(cx);
1195 if (!LookupNameUnqualified(cx, name, envChain, &scope)) {
1196 return false;
1199 res.setObject(*scope);
1200 return true;
1203 bool FallbackICCodeCompiler::emit_BindName() {
1204 static_assert(R0 == JSReturnOperand);
1206 EmitRestoreTailCallReg(masm);
1208 masm.push(R0.scratchReg());
1209 masm.push(ICStubReg);
1210 pushStubPayload(masm, R0.scratchReg());
1212 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleObject,
1213 MutableHandleValue);
1214 return tailCallVM<Fn, DoBindNameFallback>(masm);
1218 // GetIntrinsic_Fallback
1221 bool DoGetIntrinsicFallback(JSContext* cx, BaselineFrame* frame,
1222 ICFallbackStub* stub, MutableHandleValue res) {
1223 stub->incrementEnteredCount();
1224 MaybeNotifyWarp(frame->outerScript(), stub);
1226 RootedScript script(cx, frame->script());
1227 jsbytecode* pc = StubOffsetToPc(stub, script);
1228 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
1229 FallbackICSpew(cx, stub, "GetIntrinsic(%s)", CodeName(JSOp(*pc)));
1231 MOZ_ASSERT(op == JSOp::GetIntrinsic);
1233 if (!GetIntrinsicOperation(cx, script, pc, res)) {
1234 return false;
1237 TryAttachStub<GetIntrinsicIRGenerator>("GetIntrinsic", cx, frame, stub, res);
1239 return true;
1242 bool FallbackICCodeCompiler::emit_GetIntrinsic() {
1243 EmitRestoreTailCallReg(masm);
1245 masm.push(ICStubReg);
1246 pushStubPayload(masm, R0.scratchReg());
1248 using Fn =
1249 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue);
1250 return tailCallVM<Fn, DoGetIntrinsicFallback>(masm);
1254 // GetProp_Fallback
1257 bool DoGetPropFallback(JSContext* cx, BaselineFrame* frame,
1258 ICFallbackStub* stub, MutableHandleValue val,
1259 MutableHandleValue res) {
1260 stub->incrementEnteredCount();
1261 MaybeNotifyWarp(frame->outerScript(), stub);
1263 RootedScript script(cx, frame->script());
1264 jsbytecode* pc = StubOffsetToPc(stub, script);
1265 JSOp op = JSOp(*pc);
1266 FallbackICSpew(cx, stub, "GetProp(%s)", CodeName(op));
1268 MOZ_ASSERT(op == JSOp::GetProp || op == JSOp::GetBoundName);
1270 Rooted<PropertyName*> name(cx, script->getName(pc));
1271 RootedValue idVal(cx, StringValue(name));
1273 TryAttachStub<GetPropIRGenerator>("GetProp", cx, frame, stub,
1274 CacheKind::GetProp, val, idVal);
1276 if (op == JSOp::GetBoundName) {
1277 RootedObject env(cx, &val.toObject());
1278 RootedId id(cx, NameToId(name));
1279 return GetNameBoundInEnvironment(cx, env, id, res);
1282 MOZ_ASSERT(op == JSOp::GetProp);
1283 if (!GetProperty(cx, val, name, res)) {
1284 return false;
1287 return true;
1290 bool DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame,
1291 ICFallbackStub* stub, HandleValue receiver,
1292 MutableHandleValue val, MutableHandleValue res) {
1293 stub->incrementEnteredCount();
1294 MaybeNotifyWarp(frame->outerScript(), stub);
1296 RootedScript script(cx, frame->script());
1297 jsbytecode* pc = StubOffsetToPc(stub, script);
1298 FallbackICSpew(cx, stub, "GetPropSuper(%s)", CodeName(JSOp(*pc)));
1300 MOZ_ASSERT(JSOp(*pc) == JSOp::GetPropSuper);
1302 Rooted<PropertyName*> name(cx, script->getName(pc));
1303 RootedValue idVal(cx, StringValue(name));
1305 // |val| is [[HomeObject]].[[Prototype]] which must be an Object or null.
1306 MOZ_ASSERT(val.isObjectOrNull());
1308 int valIndex = -1;
1309 RootedObject valObj(
1310 cx, ToObjectFromStackForPropertyAccess(cx, val, valIndex, name));
1311 if (!valObj) {
1312 return false;
1315 TryAttachStub<GetPropIRGenerator>("GetPropSuper", cx, frame, stub,
1316 CacheKind::GetPropSuper, val, idVal);
1318 if (!GetProperty(cx, valObj, receiver, name, res)) {
1319 return false;
1322 return true;
1325 bool FallbackICCodeCompiler::emitGetProp(bool hasReceiver) {
1326 static_assert(R0 == JSReturnOperand);
1328 EmitRestoreTailCallReg(masm);
1330 // Super property getters use a |this| that differs from base object
1331 if (hasReceiver) {
1332 // Push arguments.
1333 masm.pushValue(R0);
1334 masm.pushValue(R1);
1335 masm.push(ICStubReg);
1336 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg());
1338 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*,
1339 HandleValue, MutableHandleValue, MutableHandleValue);
1340 if (!tailCallVM<Fn, DoGetPropSuperFallback>(masm)) {
1341 return false;
1343 } else {
1344 // Ensure stack is fully synced for the expression decompiler.
1345 masm.pushValue(R0);
1347 // Push arguments.
1348 masm.pushValue(R0);
1349 masm.push(ICStubReg);
1350 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg());
1352 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*,
1353 MutableHandleValue, MutableHandleValue);
1354 if (!tailCallVM<Fn, DoGetPropFallback>(masm)) {
1355 return false;
1359 // This is the resume point used when bailout rewrites call stack to undo
1360 // Ion inlined frames. The return address pushed onto reconstructed stack
1361 // will point here.
1362 assumeStubFrame();
1363 if (hasReceiver) {
1364 code.initBailoutReturnOffset(BailoutReturnKind::GetPropSuper,
1365 masm.currentOffset());
1366 } else {
1367 code.initBailoutReturnOffset(BailoutReturnKind::GetProp,
1368 masm.currentOffset());
1371 leaveStubFrame(masm);
1373 EmitReturnFromIC(masm);
1374 return true;
1377 bool FallbackICCodeCompiler::emit_GetProp() {
1378 return emitGetProp(/* hasReceiver = */ false);
1381 bool FallbackICCodeCompiler::emit_GetPropSuper() {
1382 return emitGetProp(/* hasReceiver = */ true);
1386 // SetProp_Fallback
1389 bool DoSetPropFallback(JSContext* cx, BaselineFrame* frame,
1390 ICFallbackStub* stub, Value* stack, HandleValue lhs,
1391 HandleValue rhs) {
1392 using DeferType = SetPropIRGenerator::DeferType;
1394 stub->incrementEnteredCount();
1395 MaybeNotifyWarp(frame->outerScript(), stub);
1397 RootedScript script(cx, frame->script());
1398 jsbytecode* pc = StubOffsetToPc(stub, script);
1399 JSOp op = JSOp(*pc);
1400 FallbackICSpew(cx, stub, "SetProp(%s)", CodeName(op));
1402 MOZ_ASSERT(op == JSOp::SetProp || op == JSOp::StrictSetProp ||
1403 op == JSOp::SetName || op == JSOp::StrictSetName ||
1404 op == JSOp::SetGName || op == JSOp::StrictSetGName ||
1405 op == JSOp::InitProp || op == JSOp::InitLockedProp ||
1406 op == JSOp::InitHiddenProp || op == JSOp::InitGLexical);
1408 Rooted<PropertyName*> name(cx, script->getName(pc));
1409 RootedId id(cx, NameToId(name));
1411 int lhsIndex = stack ? -2 : JSDVG_IGNORE_STACK;
1412 RootedObject obj(cx,
1413 ToObjectFromStackForPropertyAccess(cx, lhs, lhsIndex, id));
1414 if (!obj) {
1415 return false;
1417 Rooted<Shape*> oldShape(cx, obj->shape());
1419 DeferType deferType = DeferType::None;
1420 bool attached = false;
1421 MaybeTransition(cx, frame, stub);
1423 if (stub->state().canAttachStub()) {
1424 RootedValue idVal(cx, StringValue(name));
1425 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetProp, stub->state(),
1426 lhs, idVal, rhs);
1427 switch (gen.tryAttachStub()) {
1428 case AttachDecision::Attach: {
1429 ICScript* icScript = frame->icScript();
1430 ICAttachResult result = AttachBaselineCacheIRStub(
1431 cx, gen.writerRef(), gen.cacheKind(), frame->script(), icScript,
1432 stub, gen.stubName());
1433 if (result == ICAttachResult::Attached) {
1434 attached = true;
1435 JitSpew(JitSpew_BaselineIC, " Attached SetProp CacheIR stub");
1437 } break;
1438 case AttachDecision::NoAction:
1439 break;
1440 case AttachDecision::TemporarilyUnoptimizable:
1441 attached = true;
1442 break;
1443 case AttachDecision::Deferred:
1444 deferType = gen.deferType();
1445 MOZ_ASSERT(deferType != DeferType::None);
1446 break;
1450 if (op == JSOp::InitProp || op == JSOp::InitLockedProp ||
1451 op == JSOp::InitHiddenProp) {
1452 if (!InitPropertyOperation(cx, pc, obj, name, rhs)) {
1453 return false;
1455 } else if (op == JSOp::SetName || op == JSOp::StrictSetName ||
1456 op == JSOp::SetGName || op == JSOp::StrictSetGName) {
1457 if (!SetNameOperation(cx, script, pc, obj, rhs)) {
1458 return false;
1460 } else if (op == JSOp::InitGLexical) {
1461 ExtensibleLexicalEnvironmentObject* lexicalEnv;
1462 if (script->hasNonSyntacticScope()) {
1463 lexicalEnv = &NearestEnclosingExtensibleLexicalEnvironment(
1464 frame->environmentChain());
1465 } else {
1466 lexicalEnv = &cx->global()->lexicalEnvironment();
1468 InitGlobalLexicalOperation(cx, lexicalEnv, script, pc, rhs);
1469 } else {
1470 MOZ_ASSERT(op == JSOp::SetProp || op == JSOp::StrictSetProp);
1472 ObjectOpResult result;
1473 if (!SetProperty(cx, obj, id, rhs, lhs, result) ||
1474 !result.checkStrictModeError(cx, obj, id, op == JSOp::StrictSetProp)) {
1475 return false;
1479 if (stack) {
1480 // Overwrite the LHS on the stack (pushed for the decompiler) with the RHS.
1481 MOZ_ASSERT(stack[1] == lhs);
1482 stack[1] = rhs;
1485 if (attached) {
1486 return true;
1489 // The SetProperty call might have entered this IC recursively, so try
1490 // to transition.
1491 MaybeTransition(cx, frame, stub);
1493 bool canAttachStub = stub->state().canAttachStub();
1495 if (deferType != DeferType::None && canAttachStub) {
1496 RootedValue idVal(cx, StringValue(name));
1497 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetProp, stub->state(),
1498 lhs, idVal, rhs);
1500 MOZ_ASSERT(deferType == DeferType::AddSlot);
1501 AttachDecision decision = gen.tryAttachAddSlotStub(oldShape);
1503 switch (decision) {
1504 case AttachDecision::Attach: {
1505 ICScript* icScript = frame->icScript();
1506 ICAttachResult result = AttachBaselineCacheIRStub(
1507 cx, gen.writerRef(), gen.cacheKind(), frame->script(), icScript,
1508 stub, gen.stubName());
1509 if (result == ICAttachResult::Attached) {
1510 attached = true;
1511 JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub");
1513 } break;
1514 case AttachDecision::NoAction:
1515 gen.trackAttached(IRGenerator::NotAttached);
1516 break;
1517 case AttachDecision::TemporarilyUnoptimizable:
1518 case AttachDecision::Deferred:
1519 MOZ_ASSERT_UNREACHABLE("Invalid attach result");
1520 break;
1523 if (!attached && canAttachStub) {
1524 stub->trackNotAttached();
1527 return true;
1530 bool FallbackICCodeCompiler::emit_SetProp() {
1531 static_assert(R0 == JSReturnOperand);
1533 EmitRestoreTailCallReg(masm);
1535 // Ensure stack is fully synced for the expression decompiler.
1536 // Overwrite the RHS value on top of the stack with the object, then push
1537 // the RHS in R1 on top of that.
1538 masm.storeValue(R0, Address(masm.getStackPointer(), 0));
1539 masm.pushValue(R1);
1541 // Push arguments.
1542 masm.pushValue(R1);
1543 masm.pushValue(R0);
1545 // Push pointer to stack values, so that the stub can overwrite the object
1546 // (pushed for the decompiler) with the RHS.
1547 masm.computeEffectiveAddress(
1548 Address(masm.getStackPointer(), 2 * sizeof(Value)), R0.scratchReg());
1549 masm.push(R0.scratchReg());
1551 masm.push(ICStubReg);
1552 pushStubPayload(masm, R0.scratchReg());
1554 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, Value*,
1555 HandleValue, HandleValue);
1556 if (!tailCallVM<Fn, DoSetPropFallback>(masm)) {
1557 return false;
1560 // This is the resume point used when bailout rewrites call stack to undo
1561 // Ion inlined frames. The return address pushed onto reconstructed stack
1562 // will point here.
1563 assumeStubFrame();
1564 code.initBailoutReturnOffset(BailoutReturnKind::SetProp,
1565 masm.currentOffset());
1567 leaveStubFrame(masm);
1568 EmitReturnFromIC(masm);
1570 return true;
1574 // Call_Fallback
1577 bool DoCallFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub,
1578 uint32_t argc, Value* vp, MutableHandleValue res) {
1579 stub->incrementEnteredCount();
1580 MaybeNotifyWarp(frame->outerScript(), stub);
1582 RootedScript script(cx, frame->script());
1583 jsbytecode* pc = StubOffsetToPc(stub, script);
1584 JSOp op = JSOp(*pc);
1585 FallbackICSpew(cx, stub, "Call(%s)", CodeName(op));
1587 MOZ_ASSERT(argc == GET_ARGC(pc));
1588 bool constructing =
1589 (op == JSOp::New || op == JSOp::NewContent || op == JSOp::SuperCall);
1590 bool ignoresReturnValue = (op == JSOp::CallIgnoresRv);
1592 // Ensure vp array is rooted - we may GC in here.
1593 size_t numValues = argc + 2 + constructing;
1594 RootedExternalValueArray vpRoot(cx, numValues, vp);
1596 CallArgs callArgs = CallArgsFromSp(argc + constructing, vp + numValues,
1597 constructing, ignoresReturnValue);
1598 RootedValue callee(cx, vp[0]);
1599 RootedValue newTarget(cx, constructing ? callArgs.newTarget() : NullValue());
1601 // Transition stub state to megamorphic or generic if warranted.
1602 MaybeTransition(cx, frame, stub);
1604 bool canAttachStub = stub->state().canAttachStub();
1605 bool handled = false;
1607 // Only bother to try optimizing JSOp::Call with CacheIR if the chain is still
1608 // allowed to attach stubs.
1609 if (canAttachStub) {
1610 HandleValueArray args = HandleValueArray::fromMarkedLocation(argc, vp + 2);
1611 CallIRGenerator gen(cx, script, pc, op, stub->state(), argc, callee,
1612 callArgs.thisv(), newTarget, args);
1613 switch (gen.tryAttachStub()) {
1614 case AttachDecision::NoAction:
1615 break;
1616 case AttachDecision::Attach: {
1617 ICScript* icScript = frame->icScript();
1618 ICAttachResult result =
1619 AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1620 script, icScript, stub, gen.stubName());
1621 if (result == ICAttachResult::Attached) {
1622 handled = true;
1623 JitSpew(JitSpew_BaselineIC, " Attached Call CacheIR stub");
1625 } break;
1626 case AttachDecision::TemporarilyUnoptimizable:
1627 handled = true;
1628 break;
1629 case AttachDecision::Deferred:
1630 MOZ_CRASH("No deferred Call stubs");
1632 if (!handled) {
1633 stub->trackNotAttached();
1637 if (constructing) {
1638 if (!ConstructFromStack(cx, callArgs)) {
1639 return false;
1641 res.set(callArgs.rval());
1642 } else if ((op == JSOp::Eval || op == JSOp::StrictEval) &&
1643 cx->global()->valueIsEval(callee)) {
1644 if (!DirectEval(cx, callArgs.get(0), res)) {
1645 return false;
1647 } else {
1648 MOZ_ASSERT(op == JSOp::Call || op == JSOp::CallContent ||
1649 op == JSOp::CallIgnoresRv || op == JSOp::CallIter ||
1650 op == JSOp::CallContentIter || op == JSOp::Eval ||
1651 op == JSOp::StrictEval);
1652 if ((op == JSOp::CallIter || op == JSOp::CallContentIter) &&
1653 callee.isPrimitive()) {
1654 MOZ_ASSERT(argc == 0, "thisv must be on top of the stack");
1655 ReportValueError(cx, JSMSG_NOT_ITERABLE, -1, callArgs.thisv(), nullptr);
1656 return false;
1659 if (!CallFromStack(cx, callArgs)) {
1660 return false;
1663 res.set(callArgs.rval());
1666 return true;
1669 bool DoSpreadCallFallback(JSContext* cx, BaselineFrame* frame,
1670 ICFallbackStub* stub, Value* vp,
1671 MutableHandleValue res) {
1672 stub->incrementEnteredCount();
1673 MaybeNotifyWarp(frame->outerScript(), stub);
1675 RootedScript script(cx, frame->script());
1676 jsbytecode* pc = StubOffsetToPc(stub, script);
1677 JSOp op = JSOp(*pc);
1678 bool constructing = (op == JSOp::SpreadNew || op == JSOp::SpreadSuperCall);
1679 FallbackICSpew(cx, stub, "SpreadCall(%s)", CodeName(op));
1681 // Ensure vp array is rooted - we may GC in here.
1682 RootedExternalValueArray vpRoot(cx, 3 + constructing, vp);
1684 RootedValue callee(cx, vp[0]);
1685 RootedValue thisv(cx, vp[1]);
1686 RootedValue arr(cx, vp[2]);
1687 RootedValue newTarget(cx, constructing ? vp[3] : NullValue());
1689 // Transition stub state to megamorphic or generic if warranted.
1690 MaybeTransition(cx, frame, stub);
1692 // Try attaching a call stub.
1693 bool handled = false;
1694 if (op != JSOp::SpreadEval && op != JSOp::StrictSpreadEval &&
1695 stub->state().canAttachStub()) {
1696 // Try CacheIR first:
1697 Rooted<ArrayObject*> aobj(cx, &arr.toObject().as<ArrayObject>());
1698 MOZ_ASSERT(IsPackedArray(aobj));
1700 HandleValueArray args = HandleValueArray::fromMarkedLocation(
1701 aobj->length(), aobj->getDenseElements());
1702 CallIRGenerator gen(cx, script, pc, op, stub->state(), 1, callee, thisv,
1703 newTarget, args);
1704 switch (gen.tryAttachStub()) {
1705 case AttachDecision::NoAction:
1706 break;
1707 case AttachDecision::Attach: {
1708 ICScript* icScript = frame->icScript();
1709 ICAttachResult result =
1710 AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1711 script, icScript, stub, gen.stubName());
1713 if (result == ICAttachResult::Attached) {
1714 handled = true;
1715 JitSpew(JitSpew_BaselineIC, " Attached Spread Call CacheIR stub");
1717 } break;
1718 case AttachDecision::TemporarilyUnoptimizable:
1719 handled = true;
1720 break;
1721 case AttachDecision::Deferred:
1722 MOZ_ASSERT_UNREACHABLE("No deferred optimizations for spread calls");
1723 break;
1725 if (!handled) {
1726 stub->trackNotAttached();
1730 return SpreadCallOperation(cx, script, pc, thisv, callee, arr, newTarget,
1731 res);
1734 void FallbackICCodeCompiler::pushCallArguments(
1735 MacroAssembler& masm, AllocatableGeneralRegisterSet regs, Register argcReg,
1736 bool isConstructing) {
1737 MOZ_ASSERT(!regs.has(argcReg));
1739 // argPtr initially points to the last argument.
1740 Register argPtr = regs.takeAny();
1741 masm.mov(FramePointer, argPtr);
1743 // Skip 3 pointers pushed on top of the arguments: the frame descriptor,
1744 // return address, and old frame pointer.
1745 size_t valueOffset = BaselineStubFrameLayout::Size();
1747 // We have to push |this|, callee, new.target (if constructing) and argc
1748 // arguments. Handle the number of Values we know statically first.
1750 size_t numNonArgValues = 2 + isConstructing;
1751 for (size_t i = 0; i < numNonArgValues; i++) {
1752 masm.pushValue(Address(argPtr, valueOffset));
1753 valueOffset += sizeof(Value);
1756 // If there are no arguments we're done.
1757 Label done;
1758 masm.branchTest32(Assembler::Zero, argcReg, argcReg, &done);
1760 // Push argc Values.
1761 Label loop;
1762 Register count = regs.takeAny();
1763 masm.addPtr(Imm32(valueOffset), argPtr);
1764 masm.move32(argcReg, count);
1765 masm.bind(&loop);
1767 masm.pushValue(Address(argPtr, 0));
1768 masm.addPtr(Imm32(sizeof(Value)), argPtr);
1770 masm.branchSub32(Assembler::NonZero, Imm32(1), count, &loop);
1772 masm.bind(&done);
1775 bool FallbackICCodeCompiler::emitCall(bool isSpread, bool isConstructing) {
1776 static_assert(R0 == JSReturnOperand);
1778 // Values are on the stack left-to-right. Calling convention wants them
1779 // right-to-left so duplicate them on the stack in reverse order.
1780 // |this| and callee are pushed last.
1782 AllocatableGeneralRegisterSet regs = BaselineICAvailableGeneralRegs(0);
1784 if (MOZ_UNLIKELY(isSpread)) {
1785 // Push a stub frame so that we can perform a non-tail call.
1786 enterStubFrame(masm, R1.scratchReg());
1788 // Use FramePointer instead of StackPointer because it's not affected by
1789 // the stack pushes below.
1791 // newTarget
1792 uint32_t valueOffset = BaselineStubFrameLayout::Size();
1793 if (isConstructing) {
1794 masm.pushValue(Address(FramePointer, valueOffset));
1795 valueOffset += sizeof(Value);
1798 // array
1799 masm.pushValue(Address(FramePointer, valueOffset));
1800 valueOffset += sizeof(Value);
1802 // this
1803 masm.pushValue(Address(FramePointer, valueOffset));
1804 valueOffset += sizeof(Value);
1806 // callee
1807 masm.pushValue(Address(FramePointer, valueOffset));
1808 valueOffset += sizeof(Value);
1810 masm.push(masm.getStackPointer());
1811 masm.push(ICStubReg);
1813 PushStubPayload(masm, R0.scratchReg());
1815 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, Value*,
1816 MutableHandleValue);
1817 if (!callVM<Fn, DoSpreadCallFallback>(masm)) {
1818 return false;
1821 leaveStubFrame(masm);
1822 EmitReturnFromIC(masm);
1824 // SpreadCall is not yet supported in Ion, so do not generate asmcode for
1825 // bailout.
1826 return true;
1829 // Push a stub frame so that we can perform a non-tail call.
1830 enterStubFrame(masm, R1.scratchReg());
1832 regs.take(R0.scratchReg()); // argc.
1834 pushCallArguments(masm, regs, R0.scratchReg(), isConstructing);
1836 masm.push(masm.getStackPointer());
1837 masm.push(R0.scratchReg());
1838 masm.push(ICStubReg);
1840 PushStubPayload(masm, R0.scratchReg());
1842 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, uint32_t,
1843 Value*, MutableHandleValue);
1844 if (!callVM<Fn, DoCallFallback>(masm)) {
1845 return false;
1848 leaveStubFrame(masm);
1849 EmitReturnFromIC(masm);
1851 // This is the resume point used when bailout rewrites call stack to undo
1852 // Ion inlined frames. The return address pushed onto reconstructed stack
1853 // will point here.
1854 assumeStubFrame();
1856 MOZ_ASSERT(!isSpread);
1858 if (isConstructing) {
1859 code.initBailoutReturnOffset(BailoutReturnKind::New, masm.currentOffset());
1860 } else {
1861 code.initBailoutReturnOffset(BailoutReturnKind::Call, masm.currentOffset());
1864 // Load passed-in ThisV into R1 just in case it's needed. Need to do this
1865 // before we leave the stub frame since that info will be lost.
1866 // Current stack: [...., ThisV, CalleeToken, Descriptor ]
1867 size_t thisvOffset =
1868 JitFrameLayout::offsetOfThis() - JitFrameLayout::bytesPoppedAfterCall();
1869 masm.loadValue(Address(masm.getStackPointer(), thisvOffset), R1);
1871 leaveStubFrame(masm);
1873 // If this is a |constructing| call, if the callee returns a non-object, we
1874 // replace it with the |this| object passed in.
1875 if (isConstructing) {
1876 static_assert(JSReturnOperand == R0);
1877 Label skipThisReplace;
1879 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
1880 masm.moveValue(R1, R0);
1881 #ifdef DEBUG
1882 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
1883 masm.assumeUnreachable("Failed to return object in constructing call.");
1884 #endif
1885 masm.bind(&skipThisReplace);
1888 EmitReturnFromIC(masm);
1889 return true;
1892 bool FallbackICCodeCompiler::emit_Call() {
1893 return emitCall(/* isSpread = */ false, /* isConstructing = */ false);
1896 bool FallbackICCodeCompiler::emit_CallConstructing() {
1897 return emitCall(/* isSpread = */ false, /* isConstructing = */ true);
1900 bool FallbackICCodeCompiler::emit_SpreadCall() {
1901 return emitCall(/* isSpread = */ true, /* isConstructing = */ false);
1904 bool FallbackICCodeCompiler::emit_SpreadCallConstructing() {
1905 return emitCall(/* isSpread = */ true, /* isConstructing = */ true);
1909 // GetIterator_Fallback
1912 bool DoGetIteratorFallback(JSContext* cx, BaselineFrame* frame,
1913 ICFallbackStub* stub, HandleValue value,
1914 MutableHandleValue res) {
1915 stub->incrementEnteredCount();
1916 MaybeNotifyWarp(frame->outerScript(), stub);
1917 FallbackICSpew(cx, stub, "GetIterator");
1919 TryAttachStub<GetIteratorIRGenerator>("GetIterator", cx, frame, stub, value);
1921 PropertyIteratorObject* iterObj = ValueToIterator(cx, value);
1922 if (!iterObj) {
1923 return false;
1926 res.setObject(*iterObj);
1927 return true;
1930 bool FallbackICCodeCompiler::emit_GetIterator() {
1931 EmitRestoreTailCallReg(masm);
1933 // Sync stack for the decompiler.
1934 masm.pushValue(R0);
1936 masm.pushValue(R0);
1937 masm.push(ICStubReg);
1938 pushStubPayload(masm, R0.scratchReg());
1940 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
1941 MutableHandleValue);
1942 return tailCallVM<Fn, DoGetIteratorFallback>(masm);
1946 // OptimizeSpreadCall_Fallback
1949 bool DoOptimizeSpreadCallFallback(JSContext* cx, BaselineFrame* frame,
1950 ICFallbackStub* stub, HandleValue value,
1951 MutableHandleValue res) {
1952 stub->incrementEnteredCount();
1953 MaybeNotifyWarp(frame->outerScript(), stub);
1954 FallbackICSpew(cx, stub, "OptimizeSpreadCall");
1956 TryAttachStub<OptimizeSpreadCallIRGenerator>("OptimizeSpreadCall", cx, frame,
1957 stub, value);
1959 return OptimizeSpreadCall(cx, value, res);
1962 bool FallbackICCodeCompiler::emit_OptimizeSpreadCall() {
1963 EmitRestoreTailCallReg(masm);
1965 masm.pushValue(R0);
1966 masm.push(ICStubReg);
1967 pushStubPayload(masm, R0.scratchReg());
1969 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
1970 MutableHandleValue);
1971 return tailCallVM<Fn, DoOptimizeSpreadCallFallback>(masm);
1975 // InstanceOf_Fallback
1978 bool DoInstanceOfFallback(JSContext* cx, BaselineFrame* frame,
1979 ICFallbackStub* stub, HandleValue lhs,
1980 HandleValue rhs, MutableHandleValue res) {
1981 stub->incrementEnteredCount();
1982 MaybeNotifyWarp(frame->outerScript(), stub);
1983 FallbackICSpew(cx, stub, "InstanceOf");
1985 if (!rhs.isObject()) {
1986 ReportValueError(cx, JSMSG_BAD_INSTANCEOF_RHS, -1, rhs, nullptr);
1987 return false;
1990 RootedObject obj(cx, &rhs.toObject());
1991 bool cond = false;
1992 if (!InstanceofOperator(cx, obj, lhs, &cond)) {
1993 return false;
1996 res.setBoolean(cond);
1998 if (!obj->is<JSFunction>()) {
1999 // ensure we've recorded at least one failure, so we can detect there was a
2000 // non-optimizable case
2001 if (!stub->state().hasFailures()) {
2002 stub->trackNotAttached();
2004 return true;
2007 TryAttachStub<InstanceOfIRGenerator>("InstanceOf", cx, frame, stub, lhs, obj);
2008 return true;
2011 bool FallbackICCodeCompiler::emit_InstanceOf() {
2012 EmitRestoreTailCallReg(masm);
2014 // Sync stack for the decompiler.
2015 masm.pushValue(R0);
2016 masm.pushValue(R1);
2018 masm.pushValue(R1);
2019 masm.pushValue(R0);
2020 masm.push(ICStubReg);
2021 pushStubPayload(masm, R0.scratchReg());
2023 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
2024 HandleValue, MutableHandleValue);
2025 return tailCallVM<Fn, DoInstanceOfFallback>(masm);
2029 // TypeOf_Fallback
2032 bool DoTypeOfFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub,
2033 HandleValue val, MutableHandleValue res) {
2034 stub->incrementEnteredCount();
2035 MaybeNotifyWarp(frame->outerScript(), stub);
2036 FallbackICSpew(cx, stub, "TypeOf");
2038 TryAttachStub<TypeOfIRGenerator>("TypeOf", cx, frame, stub, val);
2040 JSType type = js::TypeOfValue(val);
2041 RootedString string(cx, TypeName(type, cx->names()));
2042 res.setString(string);
2043 return true;
2046 bool FallbackICCodeCompiler::emit_TypeOf() {
2047 EmitRestoreTailCallReg(masm);
2049 masm.pushValue(R0);
2050 masm.push(ICStubReg);
2051 pushStubPayload(masm, R0.scratchReg());
2053 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
2054 MutableHandleValue);
2055 return tailCallVM<Fn, DoTypeOfFallback>(masm);
2059 // ToPropertyKey_Fallback
2062 bool DoToPropertyKeyFallback(JSContext* cx, BaselineFrame* frame,
2063 ICFallbackStub* stub, HandleValue val,
2064 MutableHandleValue res) {
2065 stub->incrementEnteredCount();
2066 MaybeNotifyWarp(frame->outerScript(), stub);
2067 FallbackICSpew(cx, stub, "ToPropertyKey");
2069 TryAttachStub<ToPropertyKeyIRGenerator>("ToPropertyKey", cx, frame, stub,
2070 val);
2072 return ToPropertyKeyOperation(cx, val, res);
2075 bool FallbackICCodeCompiler::emit_ToPropertyKey() {
2076 EmitRestoreTailCallReg(masm);
2078 masm.pushValue(R0);
2079 masm.push(ICStubReg);
2080 pushStubPayload(masm, R0.scratchReg());
2082 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
2083 MutableHandleValue);
2084 return tailCallVM<Fn, DoToPropertyKeyFallback>(masm);
2088 // Rest_Fallback
2091 bool DoRestFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub,
2092 MutableHandleValue res) {
2093 unsigned numFormals = frame->numFormalArgs() - 1;
2094 unsigned numActuals = frame->numActualArgs();
2095 unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
2096 Value* rest = frame->argv() + numFormals;
2098 ArrayObject* obj = NewDenseCopiedArray(cx, numRest, rest);
2099 if (!obj) {
2100 return false;
2102 res.setObject(*obj);
2103 return true;
2106 bool FallbackICCodeCompiler::emit_Rest() {
2107 EmitRestoreTailCallReg(masm);
2109 masm.push(ICStubReg);
2110 pushStubPayload(masm, R0.scratchReg());
2112 using Fn =
2113 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue);
2114 return tailCallVM<Fn, DoRestFallback>(masm);
2118 // UnaryArith_Fallback
2121 bool DoUnaryArithFallback(JSContext* cx, BaselineFrame* frame,
2122 ICFallbackStub* stub, HandleValue val,
2123 MutableHandleValue res) {
2124 stub->incrementEnteredCount();
2125 MaybeNotifyWarp(frame->outerScript(), stub);
2127 jsbytecode* pc = StubOffsetToPc(stub, frame->script());
2128 JSOp op = JSOp(*pc);
2129 FallbackICSpew(cx, stub, "UnaryArith(%s)", CodeName(op));
2131 switch (op) {
2132 case JSOp::BitNot: {
2133 res.set(val);
2134 if (!BitNot(cx, res, res)) {
2135 return false;
2137 break;
2139 case JSOp::Pos: {
2140 res.set(val);
2141 if (!ToNumber(cx, res)) {
2142 return false;
2144 break;
2146 case JSOp::Neg: {
2147 res.set(val);
2148 if (!NegOperation(cx, res, res)) {
2149 return false;
2151 break;
2153 case JSOp::Inc: {
2154 if (!IncOperation(cx, val, res)) {
2155 return false;
2157 break;
2159 case JSOp::Dec: {
2160 if (!DecOperation(cx, val, res)) {
2161 return false;
2163 break;
2165 case JSOp::ToNumeric: {
2166 res.set(val);
2167 if (!ToNumeric(cx, res)) {
2168 return false;
2170 break;
2172 default:
2173 MOZ_CRASH("Unexpected op");
2175 MOZ_ASSERT(res.isNumeric());
2177 TryAttachStub<UnaryArithIRGenerator>("UnaryArith", cx, frame, stub, op, val,
2178 res);
2179 return true;
2182 bool FallbackICCodeCompiler::emit_UnaryArith() {
2183 static_assert(R0 == JSReturnOperand);
2185 // Restore the tail call register.
2186 EmitRestoreTailCallReg(masm);
2188 // Ensure stack is fully synced for the expression decompiler.
2189 masm.pushValue(R0);
2191 // Push arguments.
2192 masm.pushValue(R0);
2193 masm.push(ICStubReg);
2194 pushStubPayload(masm, R0.scratchReg());
2196 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
2197 MutableHandleValue);
2198 return tailCallVM<Fn, DoUnaryArithFallback>(masm);
2202 // BinaryArith_Fallback
2205 bool DoBinaryArithFallback(JSContext* cx, BaselineFrame* frame,
2206 ICFallbackStub* stub, HandleValue lhs,
2207 HandleValue rhs, MutableHandleValue ret) {
2208 stub->incrementEnteredCount();
2209 MaybeNotifyWarp(frame->outerScript(), stub);
2211 jsbytecode* pc = StubOffsetToPc(stub, frame->script());
2212 JSOp op = JSOp(*pc);
2213 FallbackICSpew(
2214 cx, stub, "CacheIRBinaryArith(%s,%d,%d)", CodeName(op),
2215 int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()),
2216 int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType()));
2218 // Don't pass lhs/rhs directly, we need the original values when
2219 // generating stubs.
2220 RootedValue lhsCopy(cx, lhs);
2221 RootedValue rhsCopy(cx, rhs);
2223 // Perform the arith operation.
2224 switch (op) {
2225 case JSOp::Add:
2226 // Do an add.
2227 if (!AddValues(cx, &lhsCopy, &rhsCopy, ret)) {
2228 return false;
2230 break;
2231 case JSOp::Sub:
2232 if (!SubValues(cx, &lhsCopy, &rhsCopy, ret)) {
2233 return false;
2235 break;
2236 case JSOp::Mul:
2237 if (!MulValues(cx, &lhsCopy, &rhsCopy, ret)) {
2238 return false;
2240 break;
2241 case JSOp::Div:
2242 if (!DivValues(cx, &lhsCopy, &rhsCopy, ret)) {
2243 return false;
2245 break;
2246 case JSOp::Mod:
2247 if (!ModValues(cx, &lhsCopy, &rhsCopy, ret)) {
2248 return false;
2250 break;
2251 case JSOp::Pow:
2252 if (!PowValues(cx, &lhsCopy, &rhsCopy, ret)) {
2253 return false;
2255 break;
2256 case JSOp::BitOr: {
2257 if (!BitOr(cx, &lhsCopy, &rhsCopy, ret)) {
2258 return false;
2260 break;
2262 case JSOp::BitXor: {
2263 if (!BitXor(cx, &lhsCopy, &rhsCopy, ret)) {
2264 return false;
2266 break;
2268 case JSOp::BitAnd: {
2269 if (!BitAnd(cx, &lhsCopy, &rhsCopy, ret)) {
2270 return false;
2272 break;
2274 case JSOp::Lsh: {
2275 if (!BitLsh(cx, &lhsCopy, &rhsCopy, ret)) {
2276 return false;
2278 break;
2280 case JSOp::Rsh: {
2281 if (!BitRsh(cx, &lhsCopy, &rhsCopy, ret)) {
2282 return false;
2284 break;
2286 case JSOp::Ursh: {
2287 if (!UrshValues(cx, &lhsCopy, &rhsCopy, ret)) {
2288 return false;
2290 break;
2292 default:
2293 MOZ_CRASH("Unhandled baseline arith op");
2296 TryAttachStub<BinaryArithIRGenerator>("BinaryArith", cx, frame, stub, op, lhs,
2297 rhs, ret);
2298 return true;
2301 bool FallbackICCodeCompiler::emit_BinaryArith() {
2302 static_assert(R0 == JSReturnOperand);
2304 // Restore the tail call register.
2305 EmitRestoreTailCallReg(masm);
2307 // Ensure stack is fully synced for the expression decompiler.
2308 masm.pushValue(R0);
2309 masm.pushValue(R1);
2311 // Push arguments.
2312 masm.pushValue(R1);
2313 masm.pushValue(R0);
2314 masm.push(ICStubReg);
2315 pushStubPayload(masm, R0.scratchReg());
2317 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
2318 HandleValue, MutableHandleValue);
2319 return tailCallVM<Fn, DoBinaryArithFallback>(masm);
2323 // Compare_Fallback
2325 bool DoCompareFallback(JSContext* cx, BaselineFrame* frame,
2326 ICFallbackStub* stub, HandleValue lhs, HandleValue rhs,
2327 MutableHandleValue ret) {
2328 stub->incrementEnteredCount();
2329 MaybeNotifyWarp(frame->outerScript(), stub);
2331 jsbytecode* pc = StubOffsetToPc(stub, frame->script());
2332 JSOp op = JSOp(*pc);
2334 FallbackICSpew(cx, stub, "Compare(%s)", CodeName(op));
2336 // Don't pass lhs/rhs directly, we need the original values when
2337 // generating stubs.
2338 RootedValue lhsCopy(cx, lhs);
2339 RootedValue rhsCopy(cx, rhs);
2341 // Perform the compare operation.
2342 bool out;
2343 switch (op) {
2344 case JSOp::Lt:
2345 if (!LessThan(cx, &lhsCopy, &rhsCopy, &out)) {
2346 return false;
2348 break;
2349 case JSOp::Le:
2350 if (!LessThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) {
2351 return false;
2353 break;
2354 case JSOp::Gt:
2355 if (!GreaterThan(cx, &lhsCopy, &rhsCopy, &out)) {
2356 return false;
2358 break;
2359 case JSOp::Ge:
2360 if (!GreaterThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) {
2361 return false;
2363 break;
2364 case JSOp::Eq:
2365 if (!js::LooselyEqual(cx, lhsCopy, rhsCopy, &out)) {
2366 return false;
2368 break;
2369 case JSOp::Ne:
2370 if (!js::LooselyEqual(cx, lhsCopy, rhsCopy, &out)) {
2371 return false;
2373 out = !out;
2374 break;
2375 case JSOp::StrictEq:
2376 if (!js::StrictlyEqual(cx, lhsCopy, rhsCopy, &out)) {
2377 return false;
2379 break;
2380 case JSOp::StrictNe:
2381 if (!js::StrictlyEqual(cx, lhsCopy, rhsCopy, &out)) {
2382 return false;
2384 out = !out;
2385 break;
2386 default:
2387 MOZ_ASSERT_UNREACHABLE("Unhandled baseline compare op");
2388 return false;
2391 ret.setBoolean(out);
2393 TryAttachStub<CompareIRGenerator>("Compare", cx, frame, stub, op, lhs, rhs);
2394 return true;
2397 bool FallbackICCodeCompiler::emit_Compare() {
2398 static_assert(R0 == JSReturnOperand);
2400 // Restore the tail call register.
2401 EmitRestoreTailCallReg(masm);
2403 // Ensure stack is fully synced for the expression decompiler.
2404 masm.pushValue(R0);
2405 masm.pushValue(R1);
2407 // Push arguments.
2408 masm.pushValue(R1);
2409 masm.pushValue(R0);
2410 masm.push(ICStubReg);
2411 pushStubPayload(masm, R0.scratchReg());
2413 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
2414 HandleValue, MutableHandleValue);
2415 return tailCallVM<Fn, DoCompareFallback>(masm);
2419 // NewArray_Fallback
2422 bool DoNewArrayFallback(JSContext* cx, BaselineFrame* frame,
2423 ICFallbackStub* stub, MutableHandleValue res) {
2424 stub->incrementEnteredCount();
2425 MaybeNotifyWarp(frame->outerScript(), stub);
2426 FallbackICSpew(cx, stub, "NewArray");
2428 jsbytecode* pc = StubOffsetToPc(stub, frame->script());
2430 uint32_t length = GET_UINT32(pc);
2431 MOZ_ASSERT(length <= INT32_MAX,
2432 "the bytecode emitter must fail to compile code that would "
2433 "produce a length exceeding int32_t range");
2435 Rooted<ArrayObject*> array(cx, NewArrayOperation(cx, length));
2436 if (!array) {
2437 return false;
2440 TryAttachStub<NewArrayIRGenerator>("NewArray", cx, frame, stub, JSOp(*pc),
2441 array, frame);
2443 res.setObject(*array);
2444 return true;
2447 bool FallbackICCodeCompiler::emit_NewArray() {
2448 EmitRestoreTailCallReg(masm);
2450 masm.push(ICStubReg); // stub.
2451 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg());
2453 using Fn =
2454 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue);
2455 return tailCallVM<Fn, DoNewArrayFallback>(masm);
2459 // NewObject_Fallback
2461 bool DoNewObjectFallback(JSContext* cx, BaselineFrame* frame,
2462 ICFallbackStub* stub, MutableHandleValue res) {
2463 stub->incrementEnteredCount();
2464 MaybeNotifyWarp(frame->outerScript(), stub);
2465 FallbackICSpew(cx, stub, "NewObject");
2467 RootedScript script(cx, frame->script());
2468 jsbytecode* pc = StubOffsetToPc(stub, script);
2470 RootedObject obj(cx, NewObjectOperation(cx, script, pc));
2471 if (!obj) {
2472 return false;
2475 TryAttachStub<NewObjectIRGenerator>("NewObject", cx, frame, stub, JSOp(*pc),
2476 obj, frame);
2478 res.setObject(*obj);
2479 return true;
2482 bool FallbackICCodeCompiler::emit_NewObject() {
2483 EmitRestoreTailCallReg(masm);
2485 masm.push(ICStubReg); // stub.
2486 pushStubPayload(masm, R0.scratchReg());
2488 using Fn =
2489 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue);
2490 return tailCallVM<Fn, DoNewObjectFallback>(masm);
2494 // CloseIter_Fallback
2497 bool DoCloseIterFallback(JSContext* cx, BaselineFrame* frame,
2498 ICFallbackStub* stub, HandleObject iter) {
2499 stub->incrementEnteredCount();
2500 MaybeNotifyWarp(frame->outerScript(), stub);
2501 FallbackICSpew(cx, stub, "CloseIter");
2503 jsbytecode* pc = StubOffsetToPc(stub, frame->script());
2504 CompletionKind kind = CompletionKind(GET_UINT8(pc));
2506 TryAttachStub<CloseIterIRGenerator>("CloseIter", cx, frame, stub, iter, kind);
2508 return CloseIterOperation(cx, iter, kind);
2511 bool FallbackICCodeCompiler::emit_CloseIter() {
2512 EmitRestoreTailCallReg(masm);
2514 masm.push(R0.scratchReg());
2515 masm.push(ICStubReg);
2516 pushStubPayload(masm, R0.scratchReg());
2518 using Fn =
2519 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleObject);
2520 return tailCallVM<Fn, DoCloseIterFallback>(masm);
2524 // OptimizeGetIterator_Fallback
2527 bool DoOptimizeGetIteratorFallback(JSContext* cx, BaselineFrame* frame,
2528 ICFallbackStub* stub, HandleValue value,
2529 MutableHandleValue res) {
2530 stub->incrementEnteredCount();
2531 MaybeNotifyWarp(frame->outerScript(), stub);
2532 FallbackICSpew(cx, stub, "OptimizeGetIterator");
2534 TryAttachStub<OptimizeGetIteratorIRGenerator>("OptimizeGetIterator", cx,
2535 frame, stub, value);
2537 bool result;
2538 if (!OptimizeGetIterator(cx, value, &result)) {
2539 return false;
2541 res.setBoolean(result);
2542 return true;
2545 bool FallbackICCodeCompiler::emit_OptimizeGetIterator() {
2546 EmitRestoreTailCallReg(masm);
2548 masm.pushValue(R0);
2549 masm.push(ICStubReg);
2550 pushStubPayload(masm, R0.scratchReg());
2552 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue,
2553 MutableHandleValue);
2554 return tailCallVM<Fn, DoOptimizeGetIteratorFallback>(masm);
2557 bool JitRuntime::generateBaselineICFallbackCode(JSContext* cx) {
2558 TempAllocator temp(&cx->tempLifoAlloc());
2559 StackMacroAssembler masm(cx, temp);
2560 PerfSpewerRangeRecorder rangeRecorder(masm);
2561 AutoCreatedBy acb(masm, "JitRuntime::generateBaselineICFallbackCode");
2563 BaselineICFallbackCode& fallbackCode = baselineICFallbackCode_.ref();
2564 FallbackICCodeCompiler compiler(cx, fallbackCode, masm);
2566 JitSpew(JitSpew_Codegen, "# Emitting Baseline IC fallback code");
2568 #define EMIT_CODE(kind) \
2570 AutoCreatedBy acb(masm, "kind=" #kind); \
2571 uint32_t offset = startTrampolineCode(masm); \
2572 InitMacroAssemblerForICStub(masm); \
2573 if (!compiler.emit_##kind()) { \
2574 return false; \
2576 fallbackCode.initOffset(BaselineICFallbackKind::kind, offset); \
2577 rangeRecorder.recordOffset("BaselineICFallback: " #kind); \
2579 IC_BASELINE_FALLBACK_CODE_KIND_LIST(EMIT_CODE)
2580 #undef EMIT_CODE
2582 Linker linker(masm);
2583 JitCode* code = linker.newCode(cx, CodeKind::Other);
2584 if (!code) {
2585 return false;
2588 rangeRecorder.collectRangesForJitCode(code);
2590 #ifdef MOZ_VTUNE
2591 vtune::MarkStub(code, "BaselineICFallback");
2592 #endif
2594 fallbackCode.initCode(code);
2595 return true;
2598 } // namespace jit
2599 } // namespace js