Bug 1639153 - Part 6.2: Establish dependency from tls for x86 callWithABI div/mod...
[gecko.git] / js / src / jit / BaselineIC.cpp
blobf717222b76fc84adca89329b2b4b393c316a00b5
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/BaselineIC.h"
9 #include "mozilla/Casting.h"
10 #include "mozilla/DebugOnly.h"
11 #include "mozilla/IntegerPrintfMacros.h"
12 #include "mozilla/ScopeExit.h"
13 #include "mozilla/Sprintf.h"
14 #include "mozilla/TemplateLib.h"
15 #include "mozilla/Unused.h"
17 #include "jsfriendapi.h"
18 #include "jslibmath.h"
19 #include "jstypes.h"
21 #include "builtin/Eval.h"
22 #include "gc/Policy.h"
23 #include "jit/BaselineCacheIRCompiler.h"
24 #include "jit/BaselineDebugModeOSR.h"
25 #include "jit/BaselineJIT.h"
26 #include "jit/InlinableNatives.h"
27 #include "jit/JitSpewer.h"
28 #include "jit/Linker.h"
29 #include "jit/Lowering.h"
30 #ifdef JS_ION_PERF
31 # include "jit/PerfSpewer.h"
32 #endif
33 #include "jit/SharedICHelpers.h"
34 #include "jit/VMFunctions.h"
35 #include "js/Conversions.h"
36 #include "js/GCVector.h"
37 #include "vm/BytecodeIterator.h"
38 #include "vm/BytecodeLocation.h"
39 #include "vm/BytecodeUtil.h"
40 #include "vm/JSFunction.h"
41 #include "vm/JSScript.h"
42 #include "vm/Opcodes.h"
43 #include "vm/SelfHosting.h"
44 #include "vm/TypedArrayObject.h"
45 #ifdef MOZ_VTUNE
46 # include "vtune/VTuneWrapper.h"
47 #endif
49 #include "builtin/Boolean-inl.h"
51 #include "jit/JitFrames-inl.h"
52 #include "jit/MacroAssembler-inl.h"
53 #include "jit/shared/Lowering-shared-inl.h"
54 #include "jit/SharedICHelpers-inl.h"
55 #include "jit/VMFunctionList-inl.h"
56 #include "vm/BytecodeIterator-inl.h"
57 #include "vm/BytecodeLocation-inl.h"
58 #include "vm/EnvironmentObject-inl.h"
59 #include "vm/Interpreter-inl.h"
60 #include "vm/JSScript-inl.h"
61 #include "vm/StringObject-inl.h"
63 using mozilla::DebugOnly;
65 namespace js {
66 namespace jit {
68 // Class used to emit all Baseline IC fallback code when initializing the
69 // JitRuntime.
70 class MOZ_RAII FallbackICCodeCompiler final : public ICStubCompilerBase {
71 BaselineICFallbackCode& code;
72 MacroAssembler& masm;
74 MOZ_MUST_USE bool emitCall(bool isSpread, bool isConstructing);
75 MOZ_MUST_USE bool emitGetElem(bool hasReceiver);
76 MOZ_MUST_USE bool emitGetProp(bool hasReceiver);
78 public:
79 FallbackICCodeCompiler(JSContext* cx, BaselineICFallbackCode& code,
80 MacroAssembler& masm)
81 : ICStubCompilerBase(cx), code(code), masm(masm) {}
83 #define DEF_METHOD(kind) MOZ_MUST_USE bool emit_##kind();
84 IC_BASELINE_FALLBACK_CODE_KIND_LIST(DEF_METHOD)
85 #undef DEF_METHOD
88 #ifdef JS_JITSPEW
89 void FallbackICSpew(JSContext* cx, ICFallbackStub* stub, const char* fmt, ...) {
90 if (JitSpewEnabled(JitSpew_BaselineICFallback)) {
91 RootedScript script(cx, GetTopJitJSScript(cx));
92 jsbytecode* pc = stub->icEntry()->pc(script);
94 char fmtbuf[100];
95 va_list args;
96 va_start(args, fmt);
97 (void)VsprintfLiteral(fmtbuf, fmt, args);
98 va_end(args);
100 JitSpew(
101 JitSpew_BaselineICFallback,
102 "Fallback hit for (%s:%u:%u) (pc=%zu,line=%u,uses=%u,stubs=%zu): %s",
103 script->filename(), script->lineno(), script->column(),
104 script->pcToOffset(pc), PCToLineNumber(script, pc),
105 script->getWarmUpCount(), stub->numOptimizedStubs(), fmtbuf);
109 void TypeFallbackICSpew(JSContext* cx, ICTypeMonitor_Fallback* stub,
110 const char* fmt, ...) {
111 if (JitSpewEnabled(JitSpew_BaselineICFallback)) {
112 RootedScript script(cx, GetTopJitJSScript(cx));
113 jsbytecode* pc = stub->icEntry()->pc(script);
115 char fmtbuf[100];
116 va_list args;
117 va_start(args, fmt);
118 (void)VsprintfLiteral(fmtbuf, fmt, args);
119 va_end(args);
121 JitSpew(JitSpew_BaselineICFallback,
122 "Type monitor fallback hit for (%s:%u:%u) "
123 "(pc=%zu,line=%u,uses=%u,stubs=%d): %s",
124 script->filename(), script->lineno(), script->column(),
125 script->pcToOffset(pc), PCToLineNumber(script, pc),
126 script->getWarmUpCount(), (int)stub->numOptimizedMonitorStubs(),
127 fmtbuf);
130 #endif // JS_JITSPEW
132 ICFallbackStub* ICEntry::fallbackStub() const {
133 return firstStub()->getChainFallback();
136 void ICEntry::trace(JSTracer* trc) {
137 #ifdef JS_64BIT
138 // If we have filled our padding with a magic value, check it now.
139 MOZ_DIAGNOSTIC_ASSERT(traceMagic_ == EXPECTED_TRACE_MAGIC);
140 #endif
141 for (ICStub* stub = firstStub(); stub; stub = stub->next()) {
142 stub->trace(trc);
146 // Allocator for Baseline IC fallback stubs. These stubs use trampoline code
147 // stored in JitRuntime.
148 class MOZ_RAII FallbackStubAllocator {
149 JSContext* cx_;
150 ICStubSpace& stubSpace_;
151 const BaselineICFallbackCode& code_;
153 public:
154 FallbackStubAllocator(JSContext* cx, ICStubSpace& stubSpace)
155 : cx_(cx),
156 stubSpace_(stubSpace),
157 code_(cx->runtime()->jitRuntime()->baselineICFallbackCode()) {}
159 template <typename T, typename... Args>
160 T* newStub(BaselineICFallbackKind kind, Args&&... args) {
161 TrampolinePtr addr = code_.addr(kind);
162 return ICStub::NewFallback<T>(cx_, &stubSpace_, addr,
163 std::forward<Args>(args)...);
167 // Helper method called by lambda expressions `addIC` and `addPrologueIC` in
168 // `JitScript::initICEntriesAndBytecodeTypeMap`.
169 static bool AddICImpl(JSContext* cx, ICScript* icScript, uint32_t offset,
170 ICStub* stub, uint32_t& icEntryIndex) {
171 if (!stub) {
172 MOZ_ASSERT(cx->isExceptionPending());
173 mozilla::Unused << cx; // Silence -Wunused-lambda-capture in opt builds.
174 return false;
177 // Initialize the ICEntry.
178 ICEntry& entryRef = icScript->icEntry(icEntryIndex);
179 icEntryIndex++;
180 new (&entryRef) ICEntry(stub, offset);
182 // Fix up pointers from fallback stubs to the ICEntry.
183 if (stub->isFallback()) {
184 stub->toFallbackStub()->fixupICEntry(&entryRef);
185 } else {
186 stub->toTypeMonitor_Fallback()->fixupICEntry(&entryRef);
189 return true;
192 bool ICScript::initICEntries(JSContext* cx, JSScript* script) {
193 MOZ_ASSERT(cx->realm()->jitRealm());
194 MOZ_ASSERT(jit::IsBaselineInterpreterEnabled());
196 MOZ_ASSERT(numICEntries() == script->numICEntries());
198 FallbackStubAllocator alloc(cx, *fallbackStubSpace());
200 // Index of the next ICEntry to initialize.
201 uint32_t icEntryIndex = 0;
203 using Kind = BaselineICFallbackKind;
205 auto addIC = [cx, this, script, &icEntryIndex](BytecodeLocation loc,
206 ICStub* stub) {
207 uint32_t offset = loc.bytecodeToOffset(script);
208 return AddICImpl(cx, this, offset, stub, icEntryIndex);
211 // Lambda expression for adding ICs for non-op ICs
212 auto addPrologueIC = [cx, this, &icEntryIndex](ICStub* stub) {
213 return AddICImpl(cx, this, ICEntry::ProloguePCOffset, stub, icEntryIndex);
216 if (IsTypeInferenceEnabled()) {
217 // Add ICEntries and fallback stubs for this/argument type checks.
218 // Note: we pass a nullptr pc to indicate this is a non-op IC.
219 // See ICEntry::NonOpPCOffset.
220 if (JSFunction* fun = script->function()) {
221 ICStub* stub =
222 alloc.newStub<ICTypeMonitor_Fallback>(Kind::TypeMonitor, nullptr, 0);
223 if (!addPrologueIC(stub)) {
224 return false;
227 for (size_t i = 0; i < fun->nargs(); i++) {
228 ICStub* stub = alloc.newStub<ICTypeMonitor_Fallback>(Kind::TypeMonitor,
229 nullptr, i + 1);
230 if (!addPrologueIC(stub)) {
231 return false;
237 // For JOF_IC ops: initialize ICEntries and fallback stubs.
238 for (BytecodeLocation loc : js::AllBytecodesIterable(script)) {
239 JSOp op = loc.getOp();
241 // Assert the frontend stored the correct IC index in jump target ops.
242 MOZ_ASSERT_IF(BytecodeIsJumpTarget(op), loc.icIndex() == icEntryIndex);
244 if (!BytecodeOpHasIC(op)) {
245 continue;
248 switch (op) {
249 case JSOp::Not:
250 case JSOp::And:
251 case JSOp::Or:
252 case JSOp::IfEq:
253 case JSOp::IfNe: {
254 ICStub* stub = alloc.newStub<ICToBool_Fallback>(Kind::ToBool);
255 if (!addIC(loc, stub)) {
256 return false;
258 break;
260 case JSOp::BitNot:
261 case JSOp::Pos:
262 case JSOp::Neg:
263 case JSOp::Inc:
264 case JSOp::Dec:
265 case JSOp::ToNumeric: {
266 ICStub* stub = alloc.newStub<ICUnaryArith_Fallback>(Kind::UnaryArith);
267 if (!addIC(loc, stub)) {
268 return false;
270 break;
272 case JSOp::BitOr:
273 case JSOp::BitXor:
274 case JSOp::BitAnd:
275 case JSOp::Lsh:
276 case JSOp::Rsh:
277 case JSOp::Ursh:
278 case JSOp::Add:
279 case JSOp::Sub:
280 case JSOp::Mul:
281 case JSOp::Div:
282 case JSOp::Mod:
283 case JSOp::Pow: {
284 ICStub* stub = alloc.newStub<ICBinaryArith_Fallback>(Kind::BinaryArith);
285 if (!addIC(loc, stub)) {
286 return false;
288 break;
290 case JSOp::Eq:
291 case JSOp::Ne:
292 case JSOp::Lt:
293 case JSOp::Le:
294 case JSOp::Gt:
295 case JSOp::Ge:
296 case JSOp::StrictEq:
297 case JSOp::StrictNe: {
298 ICStub* stub = alloc.newStub<ICCompare_Fallback>(Kind::Compare);
299 if (!addIC(loc, stub)) {
300 return false;
302 break;
304 case JSOp::NewArray: {
305 ObjectGroup* group = ObjectGroup::allocationSiteGroup(
306 cx, script, loc.toRawBytecode(), JSProto_Array);
307 if (!group) {
308 return false;
310 ICStub* stub =
311 alloc.newStub<ICNewArray_Fallback>(Kind::NewArray, group);
312 if (!addIC(loc, stub)) {
313 return false;
315 break;
317 case JSOp::NewObject:
318 case JSOp::NewObjectWithGroup:
319 case JSOp::NewInit: {
320 ICStub* stub = alloc.newStub<ICNewObject_Fallback>(Kind::NewObject);
321 if (!addIC(loc, stub)) {
322 return false;
324 break;
326 case JSOp::InitElem:
327 case JSOp::InitHiddenElem:
328 case JSOp::InitLockedElem:
329 case JSOp::InitElemArray:
330 case JSOp::InitElemInc:
331 case JSOp::SetElem:
332 case JSOp::StrictSetElem: {
333 ICStub* stub = alloc.newStub<ICSetElem_Fallback>(Kind::SetElem);
334 if (!addIC(loc, stub)) {
335 return false;
337 break;
339 case JSOp::InitProp:
340 case JSOp::InitLockedProp:
341 case JSOp::InitHiddenProp:
342 case JSOp::InitGLexical:
343 case JSOp::SetProp:
344 case JSOp::StrictSetProp:
345 case JSOp::SetName:
346 case JSOp::StrictSetName:
347 case JSOp::SetGName:
348 case JSOp::StrictSetGName: {
349 ICStub* stub = alloc.newStub<ICSetProp_Fallback>(Kind::SetProp);
350 if (!addIC(loc, stub)) {
351 return false;
353 break;
355 case JSOp::GetProp:
356 case JSOp::CallProp:
357 case JSOp::Length:
358 case JSOp::GetBoundName: {
359 ICStub* stub = alloc.newStub<ICGetProp_Fallback>(Kind::GetProp);
360 if (!addIC(loc, stub)) {
361 return false;
363 break;
365 case JSOp::GetPropSuper: {
366 ICStub* stub = alloc.newStub<ICGetProp_Fallback>(Kind::GetPropSuper);
367 if (!addIC(loc, stub)) {
368 return false;
370 break;
372 case JSOp::GetElem:
373 case JSOp::CallElem: {
374 ICStub* stub = alloc.newStub<ICGetElem_Fallback>(Kind::GetElem);
375 if (!addIC(loc, stub)) {
376 return false;
378 break;
380 case JSOp::GetElemSuper: {
381 ICStub* stub = alloc.newStub<ICGetElem_Fallback>(Kind::GetElemSuper);
382 if (!addIC(loc, stub)) {
383 return false;
385 break;
387 case JSOp::In: {
388 ICStub* stub = alloc.newStub<ICIn_Fallback>(Kind::In);
389 if (!addIC(loc, stub)) {
390 return false;
392 break;
394 case JSOp::HasOwn: {
395 ICStub* stub = alloc.newStub<ICHasOwn_Fallback>(Kind::HasOwn);
396 if (!addIC(loc, stub)) {
397 return false;
399 break;
401 case JSOp::CheckPrivateField: {
402 ICStub* stub = alloc.newStub<ICCheckPrivateField_Fallback>(
403 Kind::CheckPrivateField);
404 if (!addIC(loc, stub)) {
405 return false;
407 break;
409 case JSOp::GetName:
410 case JSOp::GetGName: {
411 ICStub* stub = alloc.newStub<ICGetName_Fallback>(Kind::GetName);
412 if (!addIC(loc, stub)) {
413 return false;
415 break;
417 case JSOp::BindName:
418 case JSOp::BindGName: {
419 ICStub* stub = alloc.newStub<ICBindName_Fallback>(Kind::BindName);
420 if (!addIC(loc, stub)) {
421 return false;
423 break;
425 case JSOp::GetAliasedVar:
426 case JSOp::GetImport: {
427 ICStub* stub =
428 alloc.newStub<ICTypeMonitor_Fallback>(Kind::TypeMonitor, nullptr);
429 if (!addIC(loc, stub)) {
430 return false;
432 break;
434 case JSOp::GetIntrinsic: {
435 ICStub* stub =
436 alloc.newStub<ICGetIntrinsic_Fallback>(Kind::GetIntrinsic);
437 if (!addIC(loc, stub)) {
438 return false;
440 break;
442 case JSOp::Call:
443 case JSOp::CallIgnoresRv:
444 case JSOp::CallIter:
445 case JSOp::FunCall:
446 case JSOp::FunApply:
447 case JSOp::Eval:
448 case JSOp::StrictEval: {
449 ICStub* stub = alloc.newStub<ICCall_Fallback>(Kind::Call);
450 if (!addIC(loc, stub)) {
451 return false;
453 break;
455 case JSOp::SuperCall:
456 case JSOp::New: {
457 ICStub* stub = alloc.newStub<ICCall_Fallback>(Kind::CallConstructing);
458 if (!addIC(loc, stub)) {
459 return false;
461 break;
463 case JSOp::SpreadCall:
464 case JSOp::SpreadEval:
465 case JSOp::StrictSpreadEval: {
466 ICStub* stub = alloc.newStub<ICCall_Fallback>(Kind::SpreadCall);
467 if (!addIC(loc, stub)) {
468 return false;
470 break;
472 case JSOp::SpreadSuperCall:
473 case JSOp::SpreadNew: {
474 ICStub* stub =
475 alloc.newStub<ICCall_Fallback>(Kind::SpreadCallConstructing);
476 if (!addIC(loc, stub)) {
477 return false;
479 break;
481 case JSOp::Instanceof: {
482 ICStub* stub = alloc.newStub<ICInstanceOf_Fallback>(Kind::InstanceOf);
483 if (!addIC(loc, stub)) {
484 return false;
486 break;
488 case JSOp::Typeof:
489 case JSOp::TypeofExpr: {
490 ICStub* stub = alloc.newStub<ICTypeOf_Fallback>(Kind::TypeOf);
491 if (!addIC(loc, stub)) {
492 return false;
494 break;
496 case JSOp::ToPropertyKey: {
497 ICStub* stub =
498 alloc.newStub<ICToPropertyKey_Fallback>(Kind::ToPropertyKey);
499 if (!addIC(loc, stub)) {
500 return false;
502 break;
504 case JSOp::Iter: {
505 ICStub* stub = alloc.newStub<ICGetIterator_Fallback>(Kind::GetIterator);
506 if (!addIC(loc, stub)) {
507 return false;
509 break;
511 case JSOp::Rest: {
512 ArrayObject* templateObject = ObjectGroup::newArrayObject(
513 cx, nullptr, 0, TenuredObject,
514 ObjectGroup::NewArrayKind::UnknownIndex);
515 if (!templateObject) {
516 return false;
518 ICStub* stub =
519 alloc.newStub<ICRest_Fallback>(Kind::Rest, templateObject);
520 if (!addIC(loc, stub)) {
521 return false;
523 break;
525 default:
526 MOZ_CRASH("JOF_IC op not handled");
530 // Assert all ICEntries have been initialized.
531 MOZ_ASSERT(icEntryIndex == numICEntries());
532 return true;
535 ICStubConstIterator& ICStubConstIterator::operator++() {
536 MOZ_ASSERT(currentStub_ != nullptr);
537 currentStub_ = currentStub_->next();
538 return *this;
541 ICStubIterator::ICStubIterator(ICFallbackStub* fallbackStub, bool end)
542 : icEntry_(fallbackStub->icEntry()),
543 fallbackStub_(fallbackStub),
544 previousStub_(nullptr),
545 currentStub_(end ? fallbackStub : icEntry_->firstStub()),
546 unlinked_(false) {}
548 ICStubIterator& ICStubIterator::operator++() {
549 MOZ_ASSERT(currentStub_->next() != nullptr);
550 if (!unlinked_) {
551 previousStub_ = currentStub_;
553 currentStub_ = currentStub_->next();
554 unlinked_ = false;
555 return *this;
558 void ICStubIterator::unlink(JSContext* cx, JSScript* script) {
559 MOZ_ASSERT(currentStub_->next() != nullptr);
560 MOZ_ASSERT(currentStub_ != fallbackStub_);
561 MOZ_ASSERT(!unlinked_);
563 fallbackStub_->maybeInvalidateWarp(cx, script);
564 fallbackStub_->unlinkStubDontInvalidateWarp(cx->zone(), previousStub_,
565 currentStub_);
567 // Mark the current iterator position as unlinked, so operator++ works
568 // properly.
569 unlinked_ = true;
572 /* static */
573 bool ICStub::NonCacheIRStubMakesGCCalls(Kind kind) {
574 MOZ_ASSERT(IsValidKind(kind));
575 MOZ_ASSERT(!IsCacheIRKind(kind));
577 switch (kind) {
578 case Call_Fallback:
579 // These three fallback stubs don't actually make non-tail calls,
580 // but the fallback code for the bailout path needs to pop the stub frame
581 // pushed during the bailout.
582 case GetProp_Fallback:
583 case SetProp_Fallback:
584 case GetElem_Fallback:
585 return true;
586 default:
587 return false;
591 bool ICStub::makesGCCalls() const {
592 switch (kind()) {
593 case CacheIR_Regular:
594 return toCacheIR_Regular()->stubInfo()->makesGCCalls();
595 case CacheIR_Monitored:
596 return toCacheIR_Monitored()->stubInfo()->makesGCCalls();
597 case CacheIR_Updated:
598 return toCacheIR_Updated()->stubInfo()->makesGCCalls();
599 default:
600 return NonCacheIRStubMakesGCCalls(kind());
604 uint32_t ICStub::getEnteredCount() const {
605 switch (kind()) {
606 case CacheIR_Regular:
607 return toCacheIR_Regular()->enteredCount();
608 case CacheIR_Updated:
609 return toCacheIR_Updated()->enteredCount();
610 case CacheIR_Monitored:
611 return toCacheIR_Monitored()->enteredCount();
612 default:
613 return toFallbackStub()->enteredCount();
617 void ICFallbackStub::maybeInvalidateWarp(JSContext* cx, JSScript* script) {
618 if (!state_.usedByTranspiler()) {
619 return;
622 MOZ_ASSERT(JitOptions.warpBuilder);
623 clearUsedByTranspiler();
625 if (script->hasIonScript()) {
626 Invalidate(cx, script);
630 void ICStub::updateCode(JitCode* code) {
631 // Write barrier on the old code.
632 JitCode::writeBarrierPre(jitCode());
633 stubCode_ = code->raw();
636 /* static */
637 void ICStub::trace(JSTracer* trc) {
638 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED
639 checkTraceMagic();
640 #endif
641 // Fallback stubs use runtime-wide trampoline code we don't need to trace.
642 if (!usesTrampolineCode()) {
643 JitCode* stubJitCode = jitCode();
644 TraceManuallyBarrieredEdge(trc, &stubJitCode, "baseline-ic-stub-code");
647 // If the stub is a monitored fallback stub, then trace the monitor ICs
648 // hanging off of that stub. We don't need to worry about the regular
649 // monitored stubs, because the regular monitored stubs will always have a
650 // monitored fallback stub that references the same stub chain.
651 if (isMonitoredFallback()) {
652 ICTypeMonitor_Fallback* lastMonStub =
653 toMonitoredFallbackStub()->maybeFallbackMonitorStub();
654 if (lastMonStub) {
655 for (ICStubConstIterator iter(lastMonStub->firstMonitorStub());
656 !iter.atEnd(); iter++) {
657 MOZ_ASSERT_IF(iter->next() == nullptr, *iter == lastMonStub);
658 iter->trace(trc);
663 if (isUpdated()) {
664 for (ICStubConstIterator iter(toUpdatedStub()->firstUpdateStub());
665 !iter.atEnd(); iter++) {
666 MOZ_ASSERT_IF(iter->next() == nullptr, iter->isTypeUpdate_Fallback());
667 iter->trace(trc);
671 switch (kind()) {
672 case ICStub::TypeMonitor_SingleObject: {
673 ICTypeMonitor_SingleObject* monitorStub = toTypeMonitor_SingleObject();
674 TraceEdge(trc, &monitorStub->object(), "baseline-monitor-singleton");
675 break;
677 case ICStub::TypeMonitor_ObjectGroup: {
678 ICTypeMonitor_ObjectGroup* monitorStub = toTypeMonitor_ObjectGroup();
679 TraceEdge(trc, &monitorStub->group(), "baseline-monitor-group");
680 break;
682 case ICStub::TypeUpdate_SingleObject: {
683 ICTypeUpdate_SingleObject* updateStub = toTypeUpdate_SingleObject();
684 TraceEdge(trc, &updateStub->object(), "baseline-update-singleton");
685 break;
687 case ICStub::TypeUpdate_ObjectGroup: {
688 ICTypeUpdate_ObjectGroup* updateStub = toTypeUpdate_ObjectGroup();
689 TraceEdge(trc, &updateStub->group(), "baseline-update-group");
690 break;
692 case ICStub::NewArray_Fallback: {
693 ICNewArray_Fallback* stub = toNewArray_Fallback();
694 TraceNullableEdge(trc, &stub->templateObject(),
695 "baseline-newarray-template");
696 TraceEdge(trc, &stub->templateGroup(),
697 "baseline-newarray-template-group");
698 break;
700 case ICStub::NewObject_Fallback: {
701 ICNewObject_Fallback* stub = toNewObject_Fallback();
702 TraceNullableEdge(trc, &stub->templateObject(),
703 "baseline-newobject-template");
704 break;
706 case ICStub::Rest_Fallback: {
707 ICRest_Fallback* stub = toRest_Fallback();
708 TraceEdge(trc, &stub->templateObject(), "baseline-rest-template");
709 break;
711 case ICStub::CacheIR_Regular:
712 TraceCacheIRStub(trc, this, toCacheIR_Regular()->stubInfo());
713 break;
714 case ICStub::CacheIR_Monitored:
715 TraceCacheIRStub(trc, this, toCacheIR_Monitored()->stubInfo());
716 break;
717 case ICStub::CacheIR_Updated: {
718 ICCacheIR_Updated* stub = toCacheIR_Updated();
719 TraceNullableEdge(trc, &stub->updateStubGroup(),
720 "baseline-update-stub-group");
721 TraceEdge(trc, &stub->updateStubId(), "baseline-update-stub-id");
722 TraceCacheIRStub(trc, this, stub->stubInfo());
723 break;
725 default:
726 break;
730 // This helper handles ICState updates/transitions while attaching CacheIR
731 // stubs.
732 template <typename IRGenerator, typename... Args>
733 static void TryAttachStub(const char* name, JSContext* cx, BaselineFrame* frame,
734 ICFallbackStub* stub, BaselineCacheIRStubKind kind,
735 Args&&... args) {
736 if (stub->state().maybeTransition()) {
737 stub->discardStubs(cx, frame->invalidationScript());
740 if (stub->state().canAttachStub()) {
741 RootedScript script(cx, frame->script());
742 ICScript* icScript = frame->icScript();
743 jsbytecode* pc = stub->icEntry()->pc(script);
745 bool attached = false;
746 IRGenerator gen(cx, script, pc, stub->state().mode(),
747 std::forward<Args>(args)...);
748 switch (gen.tryAttachStub()) {
749 case AttachDecision::Attach: {
750 ICStub* newStub =
751 AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
752 kind, script, icScript, stub, &attached);
753 if (newStub) {
754 JitSpew(JitSpew_BaselineIC, " Attached %s CacheIR stub", name);
756 } break;
757 case AttachDecision::NoAction:
758 break;
759 case AttachDecision::TemporarilyUnoptimizable:
760 case AttachDecision::Deferred:
761 MOZ_ASSERT_UNREACHABLE("Not expected in generic TryAttachStub");
762 break;
764 if (!attached) {
765 stub->state().trackNotAttached();
770 void ICFallbackStub::unlinkStubDontInvalidateWarp(Zone* zone, ICStub* prev,
771 ICStub* stub) {
772 MOZ_ASSERT(stub->next());
774 if (prev) {
775 MOZ_ASSERT(prev->next() == stub);
776 prev->setNext(stub->next());
777 } else {
778 MOZ_ASSERT(icEntry()->firstStub() == stub);
779 icEntry()->setFirstStub(stub->next());
782 state_.trackUnlinkedStub();
784 if (zone->needsIncrementalBarrier()) {
785 // We are removing edges from ICStub to gcthings. Perform one final trace
786 // of the stub for incremental GC, as it must know about those edges.
787 stub->trace(zone->barrierTracer());
790 if (IsTypeInferenceEnabled() && stub->makesGCCalls() && stub->isMonitored()) {
791 // This stub can make calls so we can return to it if it's on the stack.
792 // We just have to reset its firstMonitorStub_ field to avoid a stale
793 // pointer when purgeOptimizedStubs destroys all optimized monitor
794 // stubs (unlinked stubs won't be updated).
795 ICTypeMonitor_Fallback* monitorFallback =
796 toMonitoredFallbackStub()->maybeFallbackMonitorStub();
797 MOZ_ASSERT(monitorFallback);
798 stub->toMonitoredStub()->resetFirstMonitorStub(monitorFallback);
801 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED
802 stub->checkTraceMagic();
803 #endif
804 #ifdef DEBUG
805 // Poison stub code to ensure we don't call this stub again. However, if
806 // this stub can make calls, a pointer to it may be stored in a stub frame
807 // on the stack, so we can't touch the stubCode_ or GC will crash when
808 // tracing this pointer.
809 if (!stub->makesGCCalls()) {
810 stub->stubCode_ = (uint8_t*)0xbad;
812 #endif
815 void ICFallbackStub::discardStubs(JSContext* cx, JSScript* script) {
816 for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++) {
817 iter.unlink(cx, script);
821 void ICTypeMonitor_Fallback::resetMonitorStubChain(Zone* zone) {
822 if (zone->needsIncrementalBarrier()) {
823 // We are removing edges from monitored stubs to gcthings (JitCode).
824 // Perform one final trace of all monitor stubs for incremental GC,
825 // as it must know about those edges.
826 for (ICStub* s = firstMonitorStub_; !s->isTypeMonitor_Fallback();
827 s = s->next()) {
828 s->trace(zone->barrierTracer());
832 firstMonitorStub_ = this;
833 numOptimizedMonitorStubs_ = 0;
835 if (hasFallbackStub_) {
836 lastMonitorStubPtrAddr_ = nullptr;
838 // Reset firstMonitorStub_ field of all monitored stubs.
839 for (ICStubConstIterator iter = mainFallbackStub_->beginChainConst();
840 !iter.atEnd(); iter++) {
841 if (!iter->isMonitored()) {
842 continue;
844 iter->toMonitoredStub()->resetFirstMonitorStub(this);
846 } else {
847 icEntry_->setFirstStub(this);
848 lastMonitorStubPtrAddr_ = icEntry_->addressOfFirstStub();
852 void ICCacheIR_Updated::resetUpdateStubChain(Zone* zone) {
853 while (!firstUpdateStub_->isTypeUpdate_Fallback()) {
854 if (zone->needsIncrementalBarrier()) {
855 // We are removing edges from update stubs to gcthings (JitCode).
856 // Perform one final trace of all update stubs for incremental GC,
857 // as it must know about those edges.
858 firstUpdateStub_->trace(zone->barrierTracer());
860 firstUpdateStub_ = firstUpdateStub_->next();
863 numOptimizedStubs_ = 0;
866 ICMonitoredStub::ICMonitoredStub(Kind kind, JitCode* stubCode,
867 ICStub* firstMonitorStub)
868 : ICStub(kind, ICStub::Monitored, stubCode),
869 firstMonitorStub_(firstMonitorStub) {
870 if (IsTypeInferenceEnabled()) {
871 // In order to silence Coverity - null pointer dereference checker
872 MOZ_ASSERT(firstMonitorStub_);
873 // If the first monitored stub is a ICTypeMonitor_Fallback stub, then
874 // double check that _its_ firstMonitorStub is the same as this one.
875 MOZ_ASSERT_IF(
876 firstMonitorStub_->isTypeMonitor_Fallback(),
877 firstMonitorStub_->toTypeMonitor_Fallback()->firstMonitorStub() ==
878 firstMonitorStub_);
879 } else {
880 MOZ_ASSERT(!firstMonitorStub_);
884 bool ICMonitoredFallbackStub::initMonitoringChain(JSContext* cx,
885 JSScript* script) {
886 MOZ_ASSERT(fallbackMonitorStub_ == nullptr);
887 MOZ_ASSERT(IsTypeInferenceEnabled());
889 ICStubSpace* space = script->jitScript()->fallbackStubSpace();
890 FallbackStubAllocator alloc(cx, *space);
891 auto* stub = alloc.newStub<ICTypeMonitor_Fallback>(
892 BaselineICFallbackKind::TypeMonitor, this);
893 if (!stub) {
894 return false;
897 fallbackMonitorStub_ = stub;
898 return true;
901 bool TypeMonitorResult(JSContext* cx, ICMonitoredFallbackStub* stub,
902 BaselineFrame* frame, HandleScript script,
903 jsbytecode* pc, HandleValue val) {
904 if (!IsTypeInferenceEnabled()) {
905 return true;
908 ICTypeMonitor_Fallback* typeMonitorFallback =
909 stub->getFallbackMonitorStub(cx, script);
910 if (!typeMonitorFallback) {
911 return false;
914 AutoSweepJitScript sweep(script);
915 StackTypeSet* types = script->jitScript()->bytecodeTypes(sweep, script, pc);
916 JitScript::MonitorBytecodeType(cx, script, pc, types, val);
918 return typeMonitorFallback->addMonitorStubForValue(cx, frame, types, val);
921 bool ICCacheIR_Updated::initUpdatingChain(JSContext* cx, ICStubSpace* space) {
922 MOZ_ASSERT(firstUpdateStub_ == nullptr);
924 FallbackStubAllocator alloc(cx, *space);
925 auto* stub =
926 alloc.newStub<ICTypeUpdate_Fallback>(BaselineICFallbackKind::TypeUpdate);
927 if (!stub) {
928 return false;
931 firstUpdateStub_ = stub;
932 return true;
935 /* static */
936 ICStubSpace* ICStubCompiler::StubSpaceForStub(bool makesGCCalls,
937 JSScript* script,
938 ICScript* icScript) {
939 if (makesGCCalls) {
940 return icScript ? icScript->fallbackStubSpace()
941 : script->jitScript()->fallbackStubSpace();
943 return script->zone()->jitZone()->optimizedStubSpace();
946 static void InitMacroAssemblerForICStub(StackMacroAssembler& masm) {
947 #ifndef JS_USE_LINK_REGISTER
948 // The first value contains the return addres,
949 // which we pull into ICTailCallReg for tail calls.
950 masm.adjustFrame(sizeof(intptr_t));
951 #endif
952 #ifdef JS_CODEGEN_ARM
953 masm.setSecondScratchReg(BaselineSecondScratchReg);
954 #endif
957 JitCode* ICStubCompiler::getStubCode() {
958 JitRealm* realm = cx->realm()->jitRealm();
960 // Check for existing cached stubcode.
961 uint32_t stubKey = getKey();
962 JitCode* stubCode = realm->getStubCode(stubKey);
963 if (stubCode) {
964 return stubCode;
967 // Compile new stubcode.
968 JitContext jctx(cx, nullptr);
969 StackMacroAssembler masm;
970 InitMacroAssemblerForICStub(masm);
972 if (!generateStubCode(masm)) {
973 return nullptr;
975 Linker linker(masm);
976 Rooted<JitCode*> newStubCode(cx, linker.newCode(cx, CodeKind::Baseline));
977 if (!newStubCode) {
978 return nullptr;
981 // Cache newly compiled stubcode.
982 if (!realm->putStubCode(cx, stubKey, newStubCode)) {
983 return nullptr;
986 MOZ_ASSERT(entersStubFrame_ == ICStub::NonCacheIRStubMakesGCCalls(kind));
987 MOZ_ASSERT(!inStubFrame_);
989 #ifdef JS_ION_PERF
990 writePerfSpewerJitCodeProfile(newStubCode, "BaselineIC");
991 #endif
993 return newStubCode;
996 bool ICStubCompilerBase::tailCallVMInternal(MacroAssembler& masm,
997 TailCallVMFunctionId id) {
998 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
999 const VMFunctionData& fun = GetVMFunction(id);
1000 MOZ_ASSERT(fun.expectTailCall == TailCall);
1001 uint32_t argSize = fun.explicitStackSlots() * sizeof(void*);
1002 EmitBaselineTailCallVM(code, masm, argSize);
1003 return true;
1006 bool ICStubCompilerBase::callVMInternal(MacroAssembler& masm, VMFunctionId id) {
1007 MOZ_ASSERT(inStubFrame_);
1009 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
1010 MOZ_ASSERT(GetVMFunction(id).expectTailCall == NonTailCall);
1012 EmitBaselineCallVM(code, masm);
1013 return true;
1016 template <typename Fn, Fn fn>
1017 bool ICStubCompilerBase::callVM(MacroAssembler& masm) {
1018 VMFunctionId id = VMFunctionToId<Fn, fn>::id;
1019 return callVMInternal(masm, id);
1022 template <typename Fn, Fn fn>
1023 bool ICStubCompilerBase::tailCallVM(MacroAssembler& masm) {
1024 TailCallVMFunctionId id = TailCallVMFunctionToId<Fn, fn>::id;
1025 return tailCallVMInternal(masm, id);
1028 void ICStubCompilerBase::enterStubFrame(MacroAssembler& masm,
1029 Register scratch) {
1030 EmitBaselineEnterStubFrame(masm, scratch);
1031 #ifdef DEBUG
1032 framePushedAtEnterStubFrame_ = masm.framePushed();
1033 #endif
1035 MOZ_ASSERT(!inStubFrame_);
1036 inStubFrame_ = true;
1038 #ifdef DEBUG
1039 entersStubFrame_ = true;
1040 #endif
1043 void ICStubCompilerBase::assumeStubFrame() {
1044 MOZ_ASSERT(!inStubFrame_);
1045 inStubFrame_ = true;
1047 #ifdef DEBUG
1048 entersStubFrame_ = true;
1050 // |framePushed| isn't tracked precisely in ICStubs, so simply assume it to
1051 // be STUB_FRAME_SIZE so that assertions don't fail in leaveStubFrame.
1052 framePushedAtEnterStubFrame_ = STUB_FRAME_SIZE;
1053 #endif
1056 void ICStubCompilerBase::leaveStubFrame(MacroAssembler& masm,
1057 bool calledIntoIon) {
1058 MOZ_ASSERT(entersStubFrame_ && inStubFrame_);
1059 inStubFrame_ = false;
1061 #ifdef DEBUG
1062 masm.setFramePushed(framePushedAtEnterStubFrame_);
1063 if (calledIntoIon) {
1064 masm.adjustFrame(sizeof(intptr_t)); // Calls into ion have this extra.
1066 #endif
1067 EmitBaselineLeaveStubFrame(masm, calledIntoIon);
1070 void ICStubCompilerBase::pushStubPayload(MacroAssembler& masm,
1071 Register scratch) {
1072 if (inStubFrame_) {
1073 masm.loadPtr(Address(BaselineFrameReg, 0), scratch);
1074 masm.pushBaselineFramePtr(scratch, scratch);
1075 } else {
1076 masm.pushBaselineFramePtr(BaselineFrameReg, scratch);
1080 void ICStubCompilerBase::PushStubPayload(MacroAssembler& masm,
1081 Register scratch) {
1082 pushStubPayload(masm, scratch);
1083 masm.adjustFrame(sizeof(intptr_t));
1086 // TypeMonitor_Fallback
1089 bool ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext* cx,
1090 BaselineFrame* frame,
1091 StackTypeSet* types,
1092 HandleValue val) {
1093 MOZ_ASSERT(types);
1095 if (MOZ_UNLIKELY(val.isMagic())) {
1096 return true;
1099 // Don't attach too many SingleObject/ObjectGroup stubs. If the value is a
1100 // primitive or if we will attach an any-object stub, we can handle this
1101 // with a single PrimitiveSet or AnyValue stub so we always optimize.
1102 if (numOptimizedMonitorStubs_ >= MAX_OPTIMIZED_STUBS && val.isObject() &&
1103 !types->unknownObject()) {
1104 return true;
1107 bool wasDetachedMonitorChain = lastMonitorStubPtrAddr_ == nullptr;
1108 MOZ_ASSERT_IF(wasDetachedMonitorChain, numOptimizedMonitorStubs_ == 0);
1110 if (types->unknown()) {
1111 // The TypeSet got marked as unknown so attach a stub that always
1112 // succeeds.
1114 // Check for existing TypeMonitor_AnyValue stubs.
1115 for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
1116 if (iter->isTypeMonitor_AnyValue()) {
1117 return true;
1121 // Discard existing stubs.
1122 resetMonitorStubChain(cx->zone());
1123 wasDetachedMonitorChain = (lastMonitorStubPtrAddr_ == nullptr);
1125 ICTypeMonitor_AnyValue::Compiler compiler(cx);
1126 ICStub* stub = compiler.getStub(compiler.getStubSpace(frame->script()));
1127 if (!stub) {
1128 ReportOutOfMemory(cx);
1129 return false;
1132 JitSpew(JitSpew_BaselineIC, " Added TypeMonitor stub %p for any value",
1133 stub);
1134 addOptimizedMonitorStub(stub);
1136 } else if (val.isPrimitive() || types->unknownObject()) {
1137 ValueType type = val.type();
1139 // Check for existing TypeMonitor stub.
1140 ICTypeMonitor_PrimitiveSet* existingStub = nullptr;
1141 for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
1142 if (iter->isTypeMonitor_PrimitiveSet()) {
1143 existingStub = iter->toTypeMonitor_PrimitiveSet();
1144 if (existingStub->containsType(type)) {
1145 return true;
1150 if (val.isObject()) {
1151 // Check for existing SingleObject/ObjectGroup stubs and discard
1152 // stubs if we find one. Ideally we would discard just these stubs,
1153 // but unlinking individual type monitor stubs is somewhat
1154 // complicated.
1155 MOZ_ASSERT(types->unknownObject());
1156 bool hasObjectStubs = false;
1157 for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd();
1158 iter++) {
1159 if (iter->isTypeMonitor_SingleObject() ||
1160 iter->isTypeMonitor_ObjectGroup()) {
1161 hasObjectStubs = true;
1162 break;
1165 if (hasObjectStubs) {
1166 resetMonitorStubChain(cx->zone());
1167 wasDetachedMonitorChain = (lastMonitorStubPtrAddr_ == nullptr);
1168 existingStub = nullptr;
1172 ICTypeMonitor_PrimitiveSet::Compiler compiler(cx, existingStub, type);
1173 ICStub* stub =
1174 existingStub ? compiler.updateStub()
1175 : compiler.getStub(compiler.getStubSpace(frame->script()));
1176 if (!stub) {
1177 ReportOutOfMemory(cx);
1178 return false;
1181 JitSpew(JitSpew_BaselineIC,
1182 " %s TypeMonitor stub %p for primitive type %u",
1183 existingStub ? "Modified existing" : "Created new", stub,
1184 static_cast<uint8_t>(type));
1186 if (!existingStub) {
1187 MOZ_ASSERT(!hasStub(TypeMonitor_PrimitiveSet));
1188 addOptimizedMonitorStub(stub);
1191 } else if (val.toObject().isSingleton()) {
1192 RootedObject obj(cx, &val.toObject());
1194 // Check for existing TypeMonitor stub.
1195 for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
1196 if (iter->isTypeMonitor_SingleObject() &&
1197 iter->toTypeMonitor_SingleObject()->object() == obj) {
1198 return true;
1202 ICTypeMonitor_SingleObject::Compiler compiler(cx, obj);
1203 ICStub* stub = compiler.getStub(compiler.getStubSpace(frame->script()));
1204 if (!stub) {
1205 ReportOutOfMemory(cx);
1206 return false;
1209 JitSpew(JitSpew_BaselineIC, " Added TypeMonitor stub %p for singleton %p",
1210 stub, obj.get());
1212 addOptimizedMonitorStub(stub);
1214 } else {
1215 RootedObjectGroup group(cx, val.toObject().group());
1217 // Check for existing TypeMonitor stub.
1218 for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
1219 if (iter->isTypeMonitor_ObjectGroup() &&
1220 iter->toTypeMonitor_ObjectGroup()->group() == group) {
1221 return true;
1225 ICTypeMonitor_ObjectGroup::Compiler compiler(cx, group);
1226 ICStub* stub = compiler.getStub(compiler.getStubSpace(frame->script()));
1227 if (!stub) {
1228 ReportOutOfMemory(cx);
1229 return false;
1232 JitSpew(JitSpew_BaselineIC,
1233 " Added TypeMonitor stub %p for ObjectGroup %p", stub,
1234 group.get());
1236 addOptimizedMonitorStub(stub);
1239 bool firstMonitorStubAdded =
1240 wasDetachedMonitorChain && (numOptimizedMonitorStubs_ > 0);
1242 if (firstMonitorStubAdded) {
1243 // Was an empty monitor chain before, but a new stub was added. This is the
1244 // only time that any main stubs' firstMonitorStub fields need to be updated
1245 // to refer to the newly added monitor stub.
1246 ICStub* firstStub = mainFallbackStub_->icEntry()->firstStub();
1247 for (ICStubConstIterator iter(firstStub); !iter.atEnd(); iter++) {
1248 // Non-monitored stubs are used if the result has always the same type,
1249 // e.g. a StringLength stub will always return int32.
1250 if (!iter->isMonitored()) {
1251 continue;
1254 // Since we just added the first optimized monitoring stub, any
1255 // existing main stub's |firstMonitorStub| MUST be pointing to the
1256 // fallback monitor stub (i.e. this stub).
1257 MOZ_ASSERT(iter->toMonitoredStub()->firstMonitorStub() == this);
1258 iter->toMonitoredStub()->updateFirstMonitorStub(firstMonitorStub_);
1262 return true;
1265 bool DoTypeMonitorFallback(JSContext* cx, BaselineFrame* frame,
1266 ICTypeMonitor_Fallback* stub, HandleValue value,
1267 MutableHandleValue res) {
1268 MOZ_ASSERT(IsTypeInferenceEnabled());
1270 JSScript* script = frame->script();
1271 jsbytecode* pc = stub->icEntry()->pc(script);
1272 TypeFallbackICSpew(cx, stub, "TypeMonitor");
1274 // Copy input value to res.
1275 res.set(value);
1277 JitScript* jitScript = script->jitScript();
1278 AutoSweepJitScript sweep(script);
1280 StackTypeSet* types;
1281 uint32_t argument;
1282 if (stub->monitorsArgument(&argument)) {
1283 MOZ_ASSERT(pc == script->code());
1284 types = jitScript->argTypes(sweep, script, argument);
1285 JitScript::MonitorArgType(cx, script, argument, value);
1286 } else if (stub->monitorsThis()) {
1287 MOZ_ASSERT(pc == script->code());
1288 types = jitScript->thisTypes(sweep, script);
1289 JitScript::MonitorThisType(cx, script, value);
1290 } else {
1291 types = jitScript->bytecodeTypes(sweep, script, pc);
1292 JitScript::MonitorBytecodeType(cx, script, pc, types, value);
1295 return stub->addMonitorStubForValue(cx, frame, types, value);
1298 bool FallbackICCodeCompiler::emit_TypeMonitor() {
1299 static_assert(R0 == JSReturnOperand);
1301 // Restore the tail call register.
1302 EmitRestoreTailCallReg(masm);
1304 masm.pushValue(R0);
1305 masm.push(ICStubReg);
1306 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1308 using Fn = bool (*)(JSContext*, BaselineFrame*, ICTypeMonitor_Fallback*,
1309 HandleValue, MutableHandleValue);
1310 return tailCallVM<Fn, DoTypeMonitorFallback>(masm);
1313 bool ICTypeMonitor_PrimitiveSet::Compiler::generateStubCode(
1314 MacroAssembler& masm) {
1315 Label success;
1316 if ((flags_ & TypeToFlag(ValueType::Int32)) &&
1317 !(flags_ & TypeToFlag(ValueType::Double))) {
1318 masm.branchTestInt32(Assembler::Equal, R0, &success);
1321 if (flags_ & TypeToFlag(ValueType::Double)) {
1322 masm.branchTestNumber(Assembler::Equal, R0, &success);
1325 if (flags_ & TypeToFlag(ValueType::Undefined)) {
1326 masm.branchTestUndefined(Assembler::Equal, R0, &success);
1329 if (flags_ & TypeToFlag(ValueType::Boolean)) {
1330 masm.branchTestBoolean(Assembler::Equal, R0, &success);
1333 if (flags_ & TypeToFlag(ValueType::String)) {
1334 masm.branchTestString(Assembler::Equal, R0, &success);
1337 if (flags_ & TypeToFlag(ValueType::Symbol)) {
1338 masm.branchTestSymbol(Assembler::Equal, R0, &success);
1341 if (flags_ & TypeToFlag(ValueType::BigInt)) {
1342 masm.branchTestBigInt(Assembler::Equal, R0, &success);
1345 if (flags_ & TypeToFlag(ValueType::Object)) {
1346 masm.branchTestObject(Assembler::Equal, R0, &success);
1349 if (flags_ & TypeToFlag(ValueType::Null)) {
1350 masm.branchTestNull(Assembler::Equal, R0, &success);
1353 EmitStubGuardFailure(masm);
1355 masm.bind(&success);
1356 EmitReturnFromIC(masm);
1357 return true;
1360 static void MaybeWorkAroundAmdBug(MacroAssembler& masm) {
1361 // Attempt to work around an AMD bug (see bug 1034706 and bug 1281759), by
1362 // inserting 32-bytes of NOPs.
1363 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
1364 if (CPUInfo::NeedAmdBugWorkaround()) {
1365 masm.nop(9);
1366 masm.nop(9);
1367 masm.nop(9);
1368 masm.nop(5);
1370 #endif
1373 bool ICTypeMonitor_SingleObject::Compiler::generateStubCode(
1374 MacroAssembler& masm) {
1375 Label failure;
1376 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1377 MaybeWorkAroundAmdBug(masm);
1379 // Guard on the object's identity.
1380 Register obj = masm.extractObject(R0, ExtractTemp0);
1381 Address expectedObject(ICStubReg,
1382 ICTypeMonitor_SingleObject::offsetOfObject());
1383 masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
1384 MaybeWorkAroundAmdBug(masm);
1386 EmitReturnFromIC(masm);
1387 MaybeWorkAroundAmdBug(masm);
1389 masm.bind(&failure);
1390 EmitStubGuardFailure(masm);
1391 return true;
1394 bool ICTypeMonitor_ObjectGroup::Compiler::generateStubCode(
1395 MacroAssembler& masm) {
1396 Label failure;
1397 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1398 MaybeWorkAroundAmdBug(masm);
1400 // Guard on the object's ObjectGroup. No Spectre mitigations are needed
1401 // here: we're just recording type information for Ion compilation and
1402 // it's safe to speculatively return.
1403 Register obj = masm.extractObject(R0, ExtractTemp0);
1404 Address expectedGroup(ICStubReg, ICTypeMonitor_ObjectGroup::offsetOfGroup());
1405 masm.branchTestObjGroupNoSpectreMitigations(
1406 Assembler::NotEqual, obj, expectedGroup, R1.scratchReg(), &failure);
1407 MaybeWorkAroundAmdBug(masm);
1409 EmitReturnFromIC(masm);
1410 MaybeWorkAroundAmdBug(masm);
1412 masm.bind(&failure);
1413 EmitStubGuardFailure(masm);
1414 return true;
1417 bool ICTypeMonitor_AnyValue::Compiler::generateStubCode(MacroAssembler& masm) {
1418 EmitReturnFromIC(masm);
1419 return true;
1422 bool ICCacheIR_Updated::addUpdateStubForValue(JSContext* cx,
1423 HandleScript outerScript,
1424 HandleObject obj,
1425 HandleObjectGroup group,
1426 HandleId id, HandleValue val) {
1427 MOZ_ASSERT(IsTypeInferenceEnabled());
1429 EnsureTrackPropertyTypes(cx, obj, id);
1431 // Make sure that undefined values are explicitly included in the property
1432 // types for an object if generating a stub to write an undefined value.
1433 if (val.isUndefined() && CanHaveEmptyPropertyTypesForOwnProperty(obj)) {
1434 MOZ_ASSERT(obj->group() == group);
1435 AddTypePropertyId(cx, obj, id, val);
1438 bool unknown = false, unknownObject = false;
1439 AutoSweepObjectGroup sweep(group);
1440 if (group->unknownProperties(sweep)) {
1441 unknown = unknownObject = true;
1442 } else {
1443 if (HeapTypeSet* types = group->maybeGetProperty(sweep, id)) {
1444 unknown = types->unknown();
1445 unknownObject = types->unknownObject();
1446 } else {
1447 // We don't record null/undefined types for certain TypedObject
1448 // properties. In these cases |types| is allowed to be nullptr
1449 // without implying unknown types. See DoTypeUpdateFallback.
1450 MOZ_ASSERT(obj->is<TypedObject>());
1451 MOZ_ASSERT(val.isNullOrUndefined());
1454 MOZ_ASSERT_IF(unknown, unknownObject);
1456 // Don't attach too many SingleObject/ObjectGroup stubs unless we can
1457 // replace them with a single PrimitiveSet or AnyValue stub.
1458 if (numOptimizedStubs_ >= MAX_OPTIMIZED_STUBS && val.isObject() &&
1459 !unknownObject) {
1460 return true;
1463 if (unknown) {
1464 // Attach a stub that always succeeds. We should not have a
1465 // TypeUpdate_AnyValue stub yet.
1466 MOZ_ASSERT(!hasTypeUpdateStub(TypeUpdate_AnyValue));
1468 // Discard existing stubs.
1469 resetUpdateStubChain(cx->zone());
1471 ICTypeUpdate_AnyValue::Compiler compiler(cx);
1472 ICStub* stub = compiler.getStub(compiler.getStubSpace(outerScript));
1473 if (!stub) {
1474 return false;
1477 JitSpew(JitSpew_BaselineIC, " Added TypeUpdate stub %p for any value",
1478 stub);
1479 addOptimizedUpdateStub(stub);
1481 } else if (val.isPrimitive() || unknownObject) {
1482 ValueType type = val.type();
1484 // Check for existing TypeUpdate stub.
1485 ICTypeUpdate_PrimitiveSet* existingStub = nullptr;
1486 for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
1487 if (iter->isTypeUpdate_PrimitiveSet()) {
1488 existingStub = iter->toTypeUpdate_PrimitiveSet();
1489 MOZ_ASSERT(!existingStub->containsType(type));
1493 if (val.isObject()) {
1494 // Discard existing ObjectGroup/SingleObject stubs.
1495 resetUpdateStubChain(cx->zone());
1496 if (existingStub) {
1497 addOptimizedUpdateStub(existingStub);
1501 ICTypeUpdate_PrimitiveSet::Compiler compiler(cx, existingStub, type);
1502 ICStub* stub = existingStub
1503 ? compiler.updateStub()
1504 : compiler.getStub(compiler.getStubSpace(outerScript));
1505 if (!stub) {
1506 return false;
1508 if (!existingStub) {
1509 MOZ_ASSERT(!hasTypeUpdateStub(TypeUpdate_PrimitiveSet));
1510 addOptimizedUpdateStub(stub);
1513 JitSpew(JitSpew_BaselineIC, " %s TypeUpdate stub %p for primitive type %d",
1514 existingStub ? "Modified existing" : "Created new", stub,
1515 static_cast<uint8_t>(type));
1517 } else if (val.toObject().isSingleton()) {
1518 RootedObject obj(cx, &val.toObject());
1520 #ifdef DEBUG
1521 // We should not have a stub for this object.
1522 for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
1523 MOZ_ASSERT_IF(iter->isTypeUpdate_SingleObject(),
1524 iter->toTypeUpdate_SingleObject()->object() != obj);
1526 #endif
1528 ICTypeUpdate_SingleObject::Compiler compiler(cx, obj);
1529 ICStub* stub = compiler.getStub(compiler.getStubSpace(outerScript));
1530 if (!stub) {
1531 return false;
1534 JitSpew(JitSpew_BaselineIC, " Added TypeUpdate stub %p for singleton %p",
1535 stub, obj.get());
1537 addOptimizedUpdateStub(stub);
1539 } else {
1540 RootedObjectGroup group(cx, val.toObject().group());
1542 #ifdef DEBUG
1543 // We should not have a stub for this group.
1544 for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
1545 MOZ_ASSERT_IF(iter->isTypeUpdate_ObjectGroup(),
1546 iter->toTypeUpdate_ObjectGroup()->group() != group);
1548 #endif
1550 ICTypeUpdate_ObjectGroup::Compiler compiler(cx, group);
1551 ICStub* stub = compiler.getStub(compiler.getStubSpace(outerScript));
1552 if (!stub) {
1553 return false;
1556 JitSpew(JitSpew_BaselineIC, " Added TypeUpdate stub %p for ObjectGroup %p",
1557 stub, group.get());
1559 addOptimizedUpdateStub(stub);
1562 return true;
1566 // TypeUpdate_Fallback
1568 bool DoTypeUpdateFallback(JSContext* cx, BaselineFrame* frame,
1569 ICCacheIR_Updated* stub, HandleValue objval,
1570 HandleValue value) {
1571 // This can get called from optimized stubs. Therefore it is not allowed to
1572 // gc.
1573 JS::AutoCheckCannotGC nogc;
1575 MOZ_ASSERT(IsTypeInferenceEnabled());
1577 FallbackICSpew(cx, stub->getChainFallback(), "TypeUpdate(%s)",
1578 ICStub::KindString(stub->kind()));
1580 MOZ_ASSERT(stub->isCacheIR_Updated());
1582 RootedScript script(cx, frame->script());
1583 RootedObject obj(cx, &objval.toObject());
1585 RootedId id(cx, stub->toCacheIR_Updated()->updateStubId());
1586 MOZ_ASSERT(id.get() != JSID_EMPTY);
1588 // The group should match the object's group.
1589 RootedObjectGroup group(cx, stub->toCacheIR_Updated()->updateStubGroup());
1590 #ifdef DEBUG
1591 MOZ_ASSERT(obj->group() == group);
1592 #endif
1594 // If we're storing null/undefined to a typed object property, check if
1595 // we want to include it in this property's type information.
1596 bool addType = true;
1597 if (MOZ_UNLIKELY(obj->is<TypedObject>()) && value.isNullOrUndefined()) {
1598 StructTypeDescr* structDescr =
1599 &obj->as<TypedObject>().typeDescr().as<StructTypeDescr>();
1600 size_t fieldIndex;
1601 MOZ_ALWAYS_TRUE(structDescr->fieldIndex(id, &fieldIndex));
1603 TypeDescr* fieldDescr = &structDescr->fieldDescr(fieldIndex);
1604 ReferenceType type = fieldDescr->as<ReferenceTypeDescr>().type();
1605 if (type == ReferenceType::TYPE_ANY) {
1606 // Ignore undefined values, which are included implicitly in type
1607 // information for this property.
1608 if (value.isUndefined()) {
1609 addType = false;
1611 } else {
1612 MOZ_ASSERT(type == ReferenceType::TYPE_OBJECT ||
1613 type == ReferenceType::TYPE_WASM_ANYREF);
1615 // Ignore null values being written here. Null is included
1616 // implicitly in type information for this property. Note that
1617 // non-object, non-null values are not possible here, these
1618 // should have been filtered out by the IR emitter.
1619 if (value.isNull()) {
1620 addType = false;
1625 if (MOZ_LIKELY(addType)) {
1626 JSObject* maybeSingleton = obj->isSingleton() ? obj.get() : nullptr;
1627 AddTypePropertyId(cx, group, maybeSingleton, id, value);
1630 if (MOZ_UNLIKELY(
1631 !stub->addUpdateStubForValue(cx, script, obj, group, id, value))) {
1632 // The calling JIT code assumes this function is infallible (for
1633 // instance we may reallocate dynamic slots before calling this),
1634 // so ignore OOMs if we failed to attach a stub.
1635 cx->recoverFromOutOfMemory();
1638 return true;
1641 bool FallbackICCodeCompiler::emit_TypeUpdate() {
1642 // Just store false into R1.scratchReg() and return.
1643 masm.move32(Imm32(0), R1.scratchReg());
1644 EmitReturnFromIC(masm);
1645 return true;
1648 bool ICTypeUpdate_PrimitiveSet::Compiler::generateStubCode(
1649 MacroAssembler& masm) {
1650 Label success;
1651 if ((flags_ & TypeToFlag(ValueType::Int32)) &&
1652 !(flags_ & TypeToFlag(ValueType::Double))) {
1653 masm.branchTestInt32(Assembler::Equal, R0, &success);
1656 if (flags_ & TypeToFlag(ValueType::Double)) {
1657 masm.branchTestNumber(Assembler::Equal, R0, &success);
1660 if (flags_ & TypeToFlag(ValueType::Undefined)) {
1661 masm.branchTestUndefined(Assembler::Equal, R0, &success);
1664 if (flags_ & TypeToFlag(ValueType::Boolean)) {
1665 masm.branchTestBoolean(Assembler::Equal, R0, &success);
1668 if (flags_ & TypeToFlag(ValueType::String)) {
1669 masm.branchTestString(Assembler::Equal, R0, &success);
1672 if (flags_ & TypeToFlag(ValueType::Symbol)) {
1673 masm.branchTestSymbol(Assembler::Equal, R0, &success);
1676 if (flags_ & TypeToFlag(ValueType::BigInt)) {
1677 masm.branchTestBigInt(Assembler::Equal, R0, &success);
1680 if (flags_ & TypeToFlag(ValueType::Object)) {
1681 masm.branchTestObject(Assembler::Equal, R0, &success);
1684 if (flags_ & TypeToFlag(ValueType::Null)) {
1685 masm.branchTestNull(Assembler::Equal, R0, &success);
1688 EmitStubGuardFailure(masm);
1690 // Type matches, load true into R1.scratchReg() and return.
1691 masm.bind(&success);
1692 masm.mov(ImmWord(1), R1.scratchReg());
1693 EmitReturnFromIC(masm);
1695 return true;
1698 bool ICTypeUpdate_SingleObject::Compiler::generateStubCode(
1699 MacroAssembler& masm) {
1700 Label failure;
1701 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1703 // Guard on the object's identity.
1704 Register obj = masm.extractObject(R0, R1.scratchReg());
1705 Address expectedObject(ICStubReg,
1706 ICTypeUpdate_SingleObject::offsetOfObject());
1707 masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
1709 // Identity matches, load true into R1.scratchReg() and return.
1710 masm.mov(ImmWord(1), R1.scratchReg());
1711 EmitReturnFromIC(masm);
1713 masm.bind(&failure);
1714 EmitStubGuardFailure(masm);
1715 return true;
1718 bool ICTypeUpdate_ObjectGroup::Compiler::generateStubCode(
1719 MacroAssembler& masm) {
1720 Label failure;
1722 Register scratch1 = R1.scratchReg();
1723 masm.fallibleUnboxObject(R0, scratch1, &failure);
1725 // Guard on the object's ObjectGroup.
1726 Address expectedGroup(ICStubReg, ICTypeUpdate_ObjectGroup::offsetOfGroup());
1727 masm.branchTestObjGroup(Assembler::NotEqual, scratch1, expectedGroup,
1728 scratch1, R0.payloadOrValueReg(), &failure);
1730 // Group matches, load true into R1.scratchReg() and return.
1731 masm.mov(ImmWord(1), R1.scratchReg());
1732 EmitReturnFromIC(masm);
1734 masm.bind(&failure);
1735 EmitStubGuardFailure(masm);
1736 return true;
1739 bool ICTypeUpdate_AnyValue::Compiler::generateStubCode(MacroAssembler& masm) {
1740 // AnyValue always matches so return true.
1741 masm.mov(ImmWord(1), R1.scratchReg());
1742 EmitReturnFromIC(masm);
1743 return true;
1747 // ToBool_Fallback
1750 bool DoToBoolFallback(JSContext* cx, BaselineFrame* frame,
1751 ICToBool_Fallback* stub, HandleValue arg,
1752 MutableHandleValue ret) {
1753 stub->incrementEnteredCount();
1754 FallbackICSpew(cx, stub, "ToBool");
1756 MOZ_ASSERT(!arg.isBoolean());
1758 TryAttachStub<ToBoolIRGenerator>("ToBool", cx, frame, stub,
1759 BaselineCacheIRStubKind::Regular, arg);
1761 bool cond = ToBoolean(arg);
1762 ret.setBoolean(cond);
1764 return true;
1767 bool FallbackICCodeCompiler::emit_ToBool() {
1768 static_assert(R0 == JSReturnOperand);
1770 // Restore the tail call register.
1771 EmitRestoreTailCallReg(masm);
1773 // Push arguments.
1774 masm.pushValue(R0);
1775 masm.push(ICStubReg);
1776 pushStubPayload(masm, R0.scratchReg());
1778 using Fn = bool (*)(JSContext*, BaselineFrame*, ICToBool_Fallback*,
1779 HandleValue, MutableHandleValue);
1780 return tailCallVM<Fn, DoToBoolFallback>(masm);
1783 static void StripPreliminaryObjectStubs(JSContext* cx, ICFallbackStub* stub,
1784 JSScript* script) {
1785 // Before the new script properties analysis has been performed on a type,
1786 // all instances of that type have the maximum number of fixed slots.
1787 // Afterwards, the objects (even the preliminary ones) might be changed
1788 // to reduce the number of fixed slots they have. If we generate stubs for
1789 // both the old and new number of fixed slots, the stub will look
1790 // polymorphic to IonBuilder when it is actually monomorphic. To avoid
1791 // this, strip out any stubs for preliminary objects before attaching a new
1792 // stub which isn't on a preliminary object.
1794 for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) {
1795 if (iter->isCacheIR_Regular() &&
1796 iter->toCacheIR_Regular()->hasPreliminaryObject()) {
1797 iter.unlink(cx, script);
1798 } else if (iter->isCacheIR_Monitored() &&
1799 iter->toCacheIR_Monitored()->hasPreliminaryObject()) {
1800 iter.unlink(cx, script);
1801 } else if (iter->isCacheIR_Updated() &&
1802 iter->toCacheIR_Updated()->hasPreliminaryObject()) {
1803 iter.unlink(cx, script);
1808 static bool TryAttachGetPropStub(const char* name, JSContext* cx,
1809 BaselineFrame* frame, ICFallbackStub* stub,
1810 CacheKind kind, HandleValue val,
1811 HandleValue idVal, HandleValue receiver) {
1812 bool attached = false;
1814 if (stub->state().maybeTransition()) {
1815 stub->discardStubs(cx, frame->invalidationScript());
1818 if (stub->state().canAttachStub()) {
1819 RootedScript script(cx, frame->script());
1820 ICScript* icScript = frame->icScript();
1821 jsbytecode* pc = stub->icEntry()->pc(script);
1823 GetPropIRGenerator gen(cx, script, pc, stub->state().mode(), kind, val,
1824 idVal, receiver, GetPropertyResultFlags::All);
1825 switch (gen.tryAttachStub()) {
1826 case AttachDecision::Attach: {
1827 ICStub* newStub =
1828 AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1829 BaselineCacheIRStubKind::Monitored,
1830 script, icScript, stub, &attached);
1831 if (newStub) {
1832 JitSpew(JitSpew_BaselineIC, " Attached %s CacheIR stub", name);
1833 if (gen.shouldNotePreliminaryObjectStub()) {
1834 newStub->toCacheIR_Monitored()->notePreliminaryObject();
1835 } else if (gen.shouldUnlinkPreliminaryObjectStubs()) {
1836 StripPreliminaryObjectStubs(cx, stub, frame->invalidationScript());
1839 } break;
1840 case AttachDecision::NoAction:
1841 break;
1842 case AttachDecision::TemporarilyUnoptimizable:
1843 attached = true;
1844 break;
1845 case AttachDecision::Deferred:
1846 MOZ_ASSERT_UNREACHABLE("No deferred GetProp stubs");
1847 break;
1850 return attached;
1854 // GetElem_Fallback
1857 bool DoGetElemFallback(JSContext* cx, BaselineFrame* frame,
1858 ICGetElem_Fallback* stub, HandleValue lhs,
1859 HandleValue rhs, MutableHandleValue res) {
1860 stub->incrementEnteredCount();
1862 RootedScript script(cx, frame->script());
1863 jsbytecode* pc = stub->icEntry()->pc(frame->script());
1865 JSOp op = JSOp(*pc);
1866 FallbackICSpew(cx, stub, "GetElem(%s)", CodeName(op));
1868 MOZ_ASSERT(op == JSOp::GetElem || op == JSOp::CallElem);
1870 // Don't pass lhs directly, we need it when generating stubs.
1871 RootedValue lhsCopy(cx, lhs);
1873 bool isOptimizedArgs = false;
1874 if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS)) {
1875 // Handle optimized arguments[i] access.
1876 if (!GetElemOptimizedArguments(cx, frame, &lhsCopy, rhs, res,
1877 &isOptimizedArgs)) {
1878 return false;
1880 if (isOptimizedArgs) {
1881 if (!TypeMonitorResult(cx, stub, frame, script, pc, res)) {
1882 return false;
1887 bool attached = TryAttachGetPropStub("GetElem", cx, frame, stub,
1888 CacheKind::GetElem, lhs, rhs, lhs);
1890 if (!isOptimizedArgs) {
1891 if (!GetElementOperation(cx, op, lhsCopy, rhs, res)) {
1892 return false;
1895 if (!TypeMonitorResult(cx, stub, frame, script, pc, res)) {
1896 return false;
1900 if (attached) {
1901 return true;
1904 // GetElem operations which could access negative indexes generally can't
1905 // be optimized without the potential for bailouts, as we can't statically
1906 // determine that an object has no properties on such indexes.
1907 if (rhs.isNumber() && rhs.toNumber() < 0) {
1908 stub->noteNegativeIndex();
1911 // GetElem operations which could access non-integer indexes generally can't
1912 // be optimized without the potential for bailouts.
1913 int32_t representable;
1914 if (rhs.isNumber() && rhs.isDouble() &&
1915 !mozilla::NumberEqualsInt32(rhs.toDouble(), &representable)) {
1916 stub->setSawNonIntegerIndex();
1919 return true;
1922 bool DoGetElemSuperFallback(JSContext* cx, BaselineFrame* frame,
1923 ICGetElem_Fallback* stub, HandleValue lhs,
1924 HandleValue rhs, HandleValue receiver,
1925 MutableHandleValue res) {
1926 stub->incrementEnteredCount();
1928 RootedScript script(cx, frame->script());
1929 jsbytecode* pc = stub->icEntry()->pc(frame->script());
1931 JSOp op = JSOp(*pc);
1932 FallbackICSpew(cx, stub, "GetElemSuper(%s)", CodeName(op));
1934 MOZ_ASSERT(op == JSOp::GetElemSuper);
1936 bool attached =
1937 TryAttachGetPropStub("GetElemSuper", cx, frame, stub,
1938 CacheKind::GetElemSuper, lhs, rhs, receiver);
1940 // |lhs| is [[HomeObject]].[[Prototype]] which must be Object
1941 RootedObject lhsObj(cx, &lhs.toObject());
1942 if (!GetObjectElementOperation(cx, op, lhsObj, receiver, rhs, res)) {
1943 return false;
1946 if (!TypeMonitorResult(cx, stub, frame, script, pc, res)) {
1947 return false;
1950 if (attached) {
1951 return true;
1954 // GetElem operations which could access negative indexes generally can't
1955 // be optimized without the potential for bailouts, as we can't statically
1956 // determine that an object has no properties on such indexes.
1957 if (rhs.isNumber() && rhs.toNumber() < 0) {
1958 stub->noteNegativeIndex();
1961 // GetElem operations which could access non-integer indexes generally can't
1962 // be optimized without the potential for bailouts.
1963 int32_t representable;
1964 if (rhs.isNumber() && rhs.isDouble() &&
1965 !mozilla::NumberEqualsInt32(rhs.toDouble(), &representable)) {
1966 stub->setSawNonIntegerIndex();
1969 return true;
1972 bool FallbackICCodeCompiler::emitGetElem(bool hasReceiver) {
1973 static_assert(R0 == JSReturnOperand);
1975 // Restore the tail call register.
1976 EmitRestoreTailCallReg(masm);
1978 // Super property getters use a |this| that differs from base object
1979 if (hasReceiver) {
1980 // State: receiver in R0, index in R1, obj on the stack
1982 // Ensure stack is fully synced for the expression decompiler.
1983 // We need: receiver, index, obj
1984 masm.pushValue(R0);
1985 masm.pushValue(R1);
1986 masm.pushValue(Address(masm.getStackPointer(), sizeof(Value) * 2));
1988 // Push arguments.
1989 masm.pushValue(R0); // Receiver
1990 masm.pushValue(R1); // Index
1991 masm.pushValue(Address(masm.getStackPointer(), sizeof(Value) * 5)); // Obj
1992 masm.push(ICStubReg);
1993 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1995 using Fn =
1996 bool (*)(JSContext*, BaselineFrame*, ICGetElem_Fallback*, HandleValue,
1997 HandleValue, HandleValue, MutableHandleValue);
1998 if (!tailCallVM<Fn, DoGetElemSuperFallback>(masm)) {
1999 return false;
2001 } else {
2002 // Ensure stack is fully synced for the expression decompiler.
2003 masm.pushValue(R0);
2004 masm.pushValue(R1);
2006 // Push arguments.
2007 masm.pushValue(R1);
2008 masm.pushValue(R0);
2009 masm.push(ICStubReg);
2010 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2012 using Fn = bool (*)(JSContext*, BaselineFrame*, ICGetElem_Fallback*,
2013 HandleValue, HandleValue, MutableHandleValue);
2014 if (!tailCallVM<Fn, DoGetElemFallback>(masm)) {
2015 return false;
2019 // This is the resume point used when bailout rewrites call stack to undo
2020 // Ion inlined frames. The return address pushed onto reconstructed stack
2021 // will point here.
2022 assumeStubFrame();
2023 if (hasReceiver) {
2024 code.initBailoutReturnOffset(BailoutReturnKind::GetElemSuper,
2025 masm.currentOffset());
2026 } else {
2027 code.initBailoutReturnOffset(BailoutReturnKind::GetElem,
2028 masm.currentOffset());
2031 leaveStubFrame(masm, true);
2033 if (!IsTypeInferenceEnabled()) {
2034 EmitReturnFromIC(masm);
2035 return true;
2038 // When we get here, ICStubReg contains the ICGetElem_Fallback stub,
2039 // which we can't use to enter the TypeMonitor IC, because it's a
2040 // MonitoredFallbackStub instead of a MonitoredStub. So, we cheat. Note that
2041 // we must have a non-null fallbackMonitorStub here because
2042 // BaselineStackBuilder::buildStubFrame delazifies the stub when bailing out.
2043 masm.loadPtr(Address(ICStubReg,
2044 ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
2045 ICStubReg);
2046 EmitEnterTypeMonitorIC(masm,
2047 ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
2049 return true;
2052 bool FallbackICCodeCompiler::emit_GetElem() {
2053 return emitGetElem(/* hasReceiver = */ false);
2056 bool FallbackICCodeCompiler::emit_GetElemSuper() {
2057 return emitGetElem(/* hasReceiver = */ true);
2060 static void SetUpdateStubData(ICCacheIR_Updated* stub,
2061 const PropertyTypeCheckInfo* info) {
2062 if (info->isSet()) {
2063 stub->updateStubGroup() = info->group();
2064 stub->updateStubId() = info->id();
2068 bool DoSetElemFallback(JSContext* cx, BaselineFrame* frame,
2069 ICSetElem_Fallback* stub, Value* stack, HandleValue objv,
2070 HandleValue index, HandleValue rhs) {
2071 using DeferType = SetPropIRGenerator::DeferType;
2073 stub->incrementEnteredCount();
2075 RootedScript script(cx, frame->script());
2076 RootedScript outerScript(cx, script);
2077 jsbytecode* pc = stub->icEntry()->pc(script);
2078 JSOp op = JSOp(*pc);
2079 FallbackICSpew(cx, stub, "SetElem(%s)", CodeName(JSOp(*pc)));
2081 MOZ_ASSERT(op == JSOp::SetElem || op == JSOp::StrictSetElem ||
2082 op == JSOp::InitElem || op == JSOp::InitHiddenElem ||
2083 op == JSOp::InitLockedElem || op == JSOp::InitElemArray ||
2084 op == JSOp::InitElemInc);
2086 int objvIndex = -3;
2087 RootedObject obj(
2088 cx, ToObjectFromStackForPropertyAccess(cx, objv, objvIndex, index));
2089 if (!obj) {
2090 return false;
2093 RootedShape oldShape(cx, obj->shape());
2094 RootedObjectGroup oldGroup(cx, JSObject::getGroup(cx, obj));
2095 if (!oldGroup) {
2096 return false;
2099 // We cannot attach a stub if the operation executed after the stub
2100 // is attached may throw.
2101 bool mayThrow = false;
2103 DeferType deferType = DeferType::None;
2104 bool attached = false;
2106 if (stub->state().maybeTransition()) {
2107 stub->discardStubs(cx, frame->invalidationScript());
2110 if (stub->state().canAttachStub() && !mayThrow) {
2111 ICScript* icScript = frame->icScript();
2112 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetElem,
2113 stub->state().mode(), objv, index, rhs);
2114 switch (gen.tryAttachStub()) {
2115 case AttachDecision::Attach: {
2116 ICStub* newStub = AttachBaselineCacheIRStub(
2117 cx, gen.writerRef(), gen.cacheKind(),
2118 BaselineCacheIRStubKind::Updated, frame->script(), icScript, stub,
2119 &attached);
2120 if (newStub) {
2121 JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub");
2123 SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
2125 if (gen.shouldNotePreliminaryObjectStub()) {
2126 newStub->toCacheIR_Updated()->notePreliminaryObject();
2127 } else if (gen.shouldUnlinkPreliminaryObjectStubs()) {
2128 StripPreliminaryObjectStubs(cx, stub, frame->invalidationScript());
2131 if (gen.attachedTypedArrayOOBStub()) {
2132 stub->noteHasTypedArrayOOB();
2135 } break;
2136 case AttachDecision::NoAction:
2137 break;
2138 case AttachDecision::TemporarilyUnoptimizable:
2139 attached = true;
2140 break;
2141 case AttachDecision::Deferred:
2142 deferType = gen.deferType();
2143 MOZ_ASSERT(deferType != DeferType::None);
2144 break;
2148 if (op == JSOp::InitElem || op == JSOp::InitHiddenElem ||
2149 op == JSOp::InitLockedElem) {
2150 if (!InitElemOperation(cx, pc, obj, index, rhs)) {
2151 return false;
2153 } else if (op == JSOp::InitElemArray) {
2154 MOZ_ASSERT(uint32_t(index.toInt32()) <= INT32_MAX,
2155 "the bytecode emitter must fail to compile code that would "
2156 "produce JSOp::InitElemArray with an index exceeding "
2157 "int32_t range");
2158 MOZ_ASSERT(uint32_t(index.toInt32()) == GET_UINT32(pc));
2159 if (!InitArrayElemOperation(cx, pc, obj.as<ArrayObject>(), index.toInt32(),
2160 rhs)) {
2161 return false;
2163 } else if (op == JSOp::InitElemInc) {
2164 if (!InitArrayElemOperation(cx, pc, obj.as<ArrayObject>(), index.toInt32(),
2165 rhs)) {
2166 return false;
2168 } else {
2169 if (!SetObjectElement(cx, obj, index, rhs, objv,
2170 JSOp(*pc) == JSOp::StrictSetElem, script, pc)) {
2171 return false;
2175 // Don't try to attach stubs that wish to be hidden. We don't know how to
2176 // have different enumerability in the stubs for the moment.
2177 if (op == JSOp::InitHiddenElem) {
2178 return true;
2181 // Overwrite the object on the stack (pushed for the decompiler) with the rhs.
2182 MOZ_ASSERT(stack[2] == objv);
2183 stack[2] = rhs;
2185 if (attached) {
2186 return true;
2189 // The SetObjectElement call might have entered this IC recursively, so try
2190 // to transition.
2191 if (stub->state().maybeTransition()) {
2192 stub->discardStubs(cx, frame->invalidationScript());
2195 bool canAttachStub = stub->state().canAttachStub();
2197 if (deferType != DeferType::None && canAttachStub) {
2198 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetElem,
2199 stub->state().mode(), objv, index, rhs);
2201 MOZ_ASSERT(deferType == DeferType::AddSlot);
2202 AttachDecision decision = gen.tryAttachAddSlotStub(oldGroup, oldShape);
2204 switch (decision) {
2205 case AttachDecision::Attach: {
2206 ICScript* icScript = frame->icScript();
2207 ICStub* newStub = AttachBaselineCacheIRStub(
2208 cx, gen.writerRef(), gen.cacheKind(),
2209 BaselineCacheIRStubKind::Updated, frame->script(), icScript, stub,
2210 &attached);
2211 if (newStub) {
2212 JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub");
2214 SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
2216 if (gen.shouldNotePreliminaryObjectStub()) {
2217 newStub->toCacheIR_Updated()->notePreliminaryObject();
2218 } else if (gen.shouldUnlinkPreliminaryObjectStubs()) {
2219 StripPreliminaryObjectStubs(cx, stub, frame->invalidationScript());
2222 } break;
2223 case AttachDecision::NoAction:
2224 gen.trackAttached(IRGenerator::NotAttached);
2225 break;
2226 case AttachDecision::TemporarilyUnoptimizable:
2227 case AttachDecision::Deferred:
2228 MOZ_ASSERT_UNREACHABLE("Invalid attach result");
2229 break;
2232 if (!attached && canAttachStub) {
2233 stub->state().trackNotAttached();
2235 return true;
2238 bool FallbackICCodeCompiler::emit_SetElem() {
2239 static_assert(R0 == JSReturnOperand);
2241 EmitRestoreTailCallReg(masm);
2243 // State: R0: object, R1: index, stack: rhs.
2244 // For the decompiler, the stack has to be: object, index, rhs,
2245 // so we push the index, then overwrite the rhs Value with R0
2246 // and push the rhs value.
2247 masm.pushValue(R1);
2248 masm.loadValue(Address(masm.getStackPointer(), sizeof(Value)), R1);
2249 masm.storeValue(R0, Address(masm.getStackPointer(), sizeof(Value)));
2250 masm.pushValue(R1);
2252 // Push arguments.
2253 masm.pushValue(R1); // RHS
2255 // Push index. On x86 and ARM two push instructions are emitted so use a
2256 // separate register to store the old stack pointer.
2257 masm.moveStackPtrTo(R1.scratchReg());
2258 masm.pushValue(Address(R1.scratchReg(), 2 * sizeof(Value)));
2259 masm.pushValue(R0); // Object.
2261 // Push pointer to stack values, so that the stub can overwrite the object
2262 // (pushed for the decompiler) with the rhs.
2263 masm.computeEffectiveAddress(
2264 Address(masm.getStackPointer(), 3 * sizeof(Value)), R0.scratchReg());
2265 masm.push(R0.scratchReg());
2267 masm.push(ICStubReg);
2268 pushStubPayload(masm, R0.scratchReg());
2270 using Fn = bool (*)(JSContext*, BaselineFrame*, ICSetElem_Fallback*, Value*,
2271 HandleValue, HandleValue, HandleValue);
2272 return tailCallVM<Fn, DoSetElemFallback>(masm);
2276 // In_Fallback
2279 bool DoInFallback(JSContext* cx, BaselineFrame* frame, ICIn_Fallback* stub,
2280 HandleValue key, HandleValue objValue,
2281 MutableHandleValue res) {
2282 stub->incrementEnteredCount();
2284 FallbackICSpew(cx, stub, "In");
2286 if (!objValue.isObject()) {
2287 ReportInNotObjectError(cx, key, -2, objValue, -1);
2288 return false;
2291 TryAttachStub<HasPropIRGenerator>("In", cx, frame, stub,
2292 BaselineCacheIRStubKind::Regular,
2293 CacheKind::In, key, objValue);
2295 RootedObject obj(cx, &objValue.toObject());
2296 bool cond = false;
2297 if (!OperatorIn(cx, key, obj, &cond)) {
2298 return false;
2300 res.setBoolean(cond);
2302 return true;
2305 bool FallbackICCodeCompiler::emit_In() {
2306 EmitRestoreTailCallReg(masm);
2308 // Sync for the decompiler.
2309 masm.pushValue(R0);
2310 masm.pushValue(R1);
2312 // Push arguments.
2313 masm.pushValue(R1);
2314 masm.pushValue(R0);
2315 masm.push(ICStubReg);
2316 pushStubPayload(masm, R0.scratchReg());
2318 using Fn = bool (*)(JSContext*, BaselineFrame*, ICIn_Fallback*, HandleValue,
2319 HandleValue, MutableHandleValue);
2320 return tailCallVM<Fn, DoInFallback>(masm);
2324 // HasOwn_Fallback
2327 bool DoHasOwnFallback(JSContext* cx, BaselineFrame* frame,
2328 ICHasOwn_Fallback* stub, HandleValue keyValue,
2329 HandleValue objValue, MutableHandleValue res) {
2330 stub->incrementEnteredCount();
2332 FallbackICSpew(cx, stub, "HasOwn");
2334 TryAttachStub<HasPropIRGenerator>("HasOwn", cx, frame, stub,
2335 BaselineCacheIRStubKind::Regular,
2336 CacheKind::HasOwn, keyValue, objValue);
2338 bool found;
2339 if (!HasOwnProperty(cx, objValue, keyValue, &found)) {
2340 return false;
2343 res.setBoolean(found);
2344 return true;
2347 bool FallbackICCodeCompiler::emit_HasOwn() {
2348 EmitRestoreTailCallReg(masm);
2350 // Sync for the decompiler.
2351 masm.pushValue(R0);
2352 masm.pushValue(R1);
2354 // Push arguments.
2355 masm.pushValue(R1);
2356 masm.pushValue(R0);
2357 masm.push(ICStubReg);
2358 pushStubPayload(masm, R0.scratchReg());
2360 using Fn = bool (*)(JSContext*, BaselineFrame*, ICHasOwn_Fallback*,
2361 HandleValue, HandleValue, MutableHandleValue);
2362 return tailCallVM<Fn, DoHasOwnFallback>(masm);
2366 // CheckPrivate_Fallback
2369 bool DoCheckPrivateFieldFallback(JSContext* cx, BaselineFrame* frame,
2370 ICCheckPrivateField_Fallback* stub,
2371 HandleValue objValue, HandleValue keyValue,
2372 MutableHandleValue res) {
2373 stub->incrementEnteredCount();
2374 RootedScript script(cx, frame->script());
2375 jsbytecode* pc = stub->icEntry()->pc(script);
2377 FallbackICSpew(cx, stub, "CheckPrivateField");
2379 MOZ_ASSERT(keyValue.isSymbol() && keyValue.toSymbol()->isPrivateName());
2381 TryAttachStub<CheckPrivateFieldIRGenerator>(
2382 "CheckPrivate", cx, frame, stub, BaselineCacheIRStubKind::Regular,
2383 CacheKind::CheckPrivateField, keyValue, objValue);
2385 bool result;
2386 if (!CheckPrivateFieldOperation(cx, pc, objValue, keyValue, &result)) {
2387 return false;
2390 res.setBoolean(result);
2391 return true;
2394 bool FallbackICCodeCompiler::emit_CheckPrivateField() {
2395 EmitRestoreTailCallReg(masm);
2397 // Sync for the decompiler.
2398 masm.pushValue(R0);
2399 masm.pushValue(R1);
2401 // Push arguments.
2402 masm.pushValue(R1);
2403 masm.pushValue(R0);
2404 masm.push(ICStubReg);
2405 pushStubPayload(masm, R0.scratchReg());
2407 using Fn = bool (*)(JSContext*, BaselineFrame*, ICCheckPrivateField_Fallback*,
2408 HandleValue, HandleValue, MutableHandleValue);
2409 return tailCallVM<Fn, DoCheckPrivateFieldFallback>(masm);
2413 // GetName_Fallback
2416 bool DoGetNameFallback(JSContext* cx, BaselineFrame* frame,
2417 ICGetName_Fallback* stub, HandleObject envChain,
2418 MutableHandleValue res) {
2419 stub->incrementEnteredCount();
2421 RootedScript script(cx, frame->script());
2422 jsbytecode* pc = stub->icEntry()->pc(script);
2423 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
2424 FallbackICSpew(cx, stub, "GetName(%s)", CodeName(JSOp(*pc)));
2426 MOZ_ASSERT(op == JSOp::GetName || op == JSOp::GetGName);
2428 RootedPropertyName name(cx, script->getName(pc));
2430 TryAttachStub<GetNameIRGenerator>("GetName", cx, frame, stub,
2431 BaselineCacheIRStubKind::Monitored,
2432 envChain, name);
2434 static_assert(JSOpLength_GetGName == JSOpLength_GetName,
2435 "Otherwise our check for JSOp::Typeof isn't ok");
2436 if (JSOp(pc[JSOpLength_GetGName]) == JSOp::Typeof) {
2437 if (!GetEnvironmentName<GetNameMode::TypeOf>(cx, envChain, name, res)) {
2438 return false;
2440 } else {
2441 if (!GetEnvironmentName<GetNameMode::Normal>(cx, envChain, name, res)) {
2442 return false;
2446 return TypeMonitorResult(cx, stub, frame, script, pc, res);
2449 bool FallbackICCodeCompiler::emit_GetName() {
2450 static_assert(R0 == JSReturnOperand);
2452 EmitRestoreTailCallReg(masm);
2454 masm.push(R0.scratchReg());
2455 masm.push(ICStubReg);
2456 pushStubPayload(masm, R0.scratchReg());
2458 using Fn = bool (*)(JSContext*, BaselineFrame*, ICGetName_Fallback*,
2459 HandleObject, MutableHandleValue);
2460 return tailCallVM<Fn, DoGetNameFallback>(masm);
2464 // BindName_Fallback
2467 bool DoBindNameFallback(JSContext* cx, BaselineFrame* frame,
2468 ICBindName_Fallback* stub, HandleObject envChain,
2469 MutableHandleValue res) {
2470 stub->incrementEnteredCount();
2472 jsbytecode* pc = stub->icEntry()->pc(frame->script());
2473 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
2474 FallbackICSpew(cx, stub, "BindName(%s)", CodeName(JSOp(*pc)));
2476 MOZ_ASSERT(op == JSOp::BindName || op == JSOp::BindGName);
2478 RootedPropertyName name(cx, frame->script()->getName(pc));
2480 TryAttachStub<BindNameIRGenerator>("BindName", cx, frame, stub,
2481 BaselineCacheIRStubKind::Regular, envChain,
2482 name);
2484 RootedObject scope(cx);
2485 if (!LookupNameUnqualified(cx, name, envChain, &scope)) {
2486 return false;
2489 res.setObject(*scope);
2490 return true;
2493 bool FallbackICCodeCompiler::emit_BindName() {
2494 static_assert(R0 == JSReturnOperand);
2496 EmitRestoreTailCallReg(masm);
2498 masm.push(R0.scratchReg());
2499 masm.push(ICStubReg);
2500 pushStubPayload(masm, R0.scratchReg());
2502 using Fn = bool (*)(JSContext*, BaselineFrame*, ICBindName_Fallback*,
2503 HandleObject, MutableHandleValue);
2504 return tailCallVM<Fn, DoBindNameFallback>(masm);
2508 // GetIntrinsic_Fallback
2511 bool DoGetIntrinsicFallback(JSContext* cx, BaselineFrame* frame,
2512 ICGetIntrinsic_Fallback* stub,
2513 MutableHandleValue res) {
2514 stub->incrementEnteredCount();
2516 RootedScript script(cx, frame->script());
2517 jsbytecode* pc = stub->icEntry()->pc(script);
2518 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
2519 FallbackICSpew(cx, stub, "GetIntrinsic(%s)", CodeName(JSOp(*pc)));
2521 MOZ_ASSERT(op == JSOp::GetIntrinsic);
2523 if (!GetIntrinsicOperation(cx, script, pc, res)) {
2524 return false;
2527 // An intrinsic operation will always produce the same result, so only
2528 // needs to be monitored once. Attach a stub to load the resulting constant
2529 // directly.
2531 JitScript::MonitorBytecodeType(cx, script, pc, res);
2533 TryAttachStub<GetIntrinsicIRGenerator>("GetIntrinsic", cx, frame, stub,
2534 BaselineCacheIRStubKind::Regular, res);
2536 return true;
2539 bool FallbackICCodeCompiler::emit_GetIntrinsic() {
2540 EmitRestoreTailCallReg(masm);
2542 masm.push(ICStubReg);
2543 pushStubPayload(masm, R0.scratchReg());
2545 using Fn = bool (*)(JSContext*, BaselineFrame*, ICGetIntrinsic_Fallback*,
2546 MutableHandleValue);
2547 return tailCallVM<Fn, DoGetIntrinsicFallback>(masm);
2551 // GetProp_Fallback
2554 static bool ComputeGetPropResult(JSContext* cx, BaselineFrame* frame, JSOp op,
2555 HandlePropertyName name,
2556 MutableHandleValue val,
2557 MutableHandleValue res) {
2558 // Handle arguments.length and arguments.callee on optimized arguments, as
2559 // it is not an object.
2560 if (val.isMagic(JS_OPTIMIZED_ARGUMENTS) && IsOptimizedArguments(frame, val)) {
2561 if (op == JSOp::Length) {
2562 res.setInt32(frame->numActualArgs());
2563 } else {
2564 MOZ_ASSERT(name == cx->names().callee);
2565 MOZ_ASSERT(frame->script()->hasMappedArgsObj());
2566 res.setObject(*frame->callee());
2568 } else {
2569 if (op == JSOp::GetBoundName) {
2570 RootedObject env(cx, &val.toObject());
2571 RootedId id(cx, NameToId(name));
2572 if (!GetNameBoundInEnvironment(cx, env, id, res)) {
2573 return false;
2575 } else {
2576 MOZ_ASSERT(op == JSOp::GetProp || op == JSOp::CallProp ||
2577 op == JSOp::Length);
2578 if (!GetProperty(cx, val, name, res)) {
2579 return false;
2584 return true;
2587 bool DoGetPropFallback(JSContext* cx, BaselineFrame* frame,
2588 ICGetProp_Fallback* stub, MutableHandleValue val,
2589 MutableHandleValue res) {
2590 stub->incrementEnteredCount();
2592 RootedScript script(cx, frame->script());
2593 jsbytecode* pc = stub->icEntry()->pc(script);
2594 JSOp op = JSOp(*pc);
2595 FallbackICSpew(cx, stub, "GetProp(%s)", CodeName(op));
2597 MOZ_ASSERT(op == JSOp::GetProp || op == JSOp::CallProp ||
2598 op == JSOp::Length || op == JSOp::GetBoundName);
2600 RootedPropertyName name(cx, script->getName(pc));
2601 RootedValue idVal(cx, StringValue(name));
2603 TryAttachGetPropStub("GetProp", cx, frame, stub, CacheKind::GetProp, val,
2604 idVal, val);
2606 if (!ComputeGetPropResult(cx, frame, op, name, val, res)) {
2607 return false;
2610 return TypeMonitorResult(cx, stub, frame, script, pc, res);
2613 bool DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame,
2614 ICGetProp_Fallback* stub, HandleValue receiver,
2615 MutableHandleValue val, MutableHandleValue res) {
2616 stub->incrementEnteredCount();
2618 RootedScript script(cx, frame->script());
2619 jsbytecode* pc = stub->icEntry()->pc(script);
2620 FallbackICSpew(cx, stub, "GetPropSuper(%s)", CodeName(JSOp(*pc)));
2622 MOZ_ASSERT(JSOp(*pc) == JSOp::GetPropSuper);
2624 RootedPropertyName name(cx, script->getName(pc));
2625 RootedValue idVal(cx, StringValue(name));
2627 TryAttachGetPropStub("GetPropSuper", cx, frame, stub, CacheKind::GetPropSuper,
2628 val, idVal, receiver);
2630 // |val| is [[HomeObject]].[[Prototype]] which must be Object
2631 RootedObject valObj(cx, &val.toObject());
2632 if (!GetProperty(cx, valObj, receiver, name, res)) {
2633 return false;
2636 return TypeMonitorResult(cx, stub, frame, script, pc, res);
2639 bool FallbackICCodeCompiler::emitGetProp(bool hasReceiver) {
2640 static_assert(R0 == JSReturnOperand);
2642 EmitRestoreTailCallReg(masm);
2644 // Super property getters use a |this| that differs from base object
2645 if (hasReceiver) {
2646 // Push arguments.
2647 masm.pushValue(R0);
2648 masm.pushValue(R1);
2649 masm.push(ICStubReg);
2650 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2652 using Fn = bool (*)(JSContext*, BaselineFrame*, ICGetProp_Fallback*,
2653 HandleValue, MutableHandleValue, MutableHandleValue);
2654 if (!tailCallVM<Fn, DoGetPropSuperFallback>(masm)) {
2655 return false;
2657 } else {
2658 // Ensure stack is fully synced for the expression decompiler.
2659 masm.pushValue(R0);
2661 // Push arguments.
2662 masm.pushValue(R0);
2663 masm.push(ICStubReg);
2664 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2666 using Fn = bool (*)(JSContext*, BaselineFrame*, ICGetProp_Fallback*,
2667 MutableHandleValue, MutableHandleValue);
2668 if (!tailCallVM<Fn, DoGetPropFallback>(masm)) {
2669 return false;
2673 // This is the resume point used when bailout rewrites call stack to undo
2674 // Ion inlined frames. The return address pushed onto reconstructed stack
2675 // will point here.
2676 assumeStubFrame();
2677 if (hasReceiver) {
2678 code.initBailoutReturnOffset(BailoutReturnKind::GetPropSuper,
2679 masm.currentOffset());
2680 } else {
2681 code.initBailoutReturnOffset(BailoutReturnKind::GetProp,
2682 masm.currentOffset());
2685 leaveStubFrame(masm, true);
2687 if (!IsTypeInferenceEnabled()) {
2688 EmitReturnFromIC(masm);
2689 return true;
2692 // When we get here, ICStubReg contains the ICGetProp_Fallback stub,
2693 // which we can't use to enter the TypeMonitor IC, because it's a
2694 // MonitoredFallbackStub instead of a MonitoredStub. So, we cheat. Note that
2695 // we must have a non-null fallbackMonitorStub here because
2696 // BaselineStackBuilder::buildStubFrame delazifies the stub when bailing out.
2697 masm.loadPtr(Address(ICStubReg,
2698 ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
2699 ICStubReg);
2700 EmitEnterTypeMonitorIC(masm,
2701 ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
2703 return true;
2706 bool FallbackICCodeCompiler::emit_GetProp() {
2707 return emitGetProp(/* hasReceiver = */ false);
2710 bool FallbackICCodeCompiler::emit_GetPropSuper() {
2711 return emitGetProp(/* hasReceiver = */ true);
2715 // SetProp_Fallback
2718 bool DoSetPropFallback(JSContext* cx, BaselineFrame* frame,
2719 ICSetProp_Fallback* stub, Value* stack, HandleValue lhs,
2720 HandleValue rhs) {
2721 using DeferType = SetPropIRGenerator::DeferType;
2723 stub->incrementEnteredCount();
2725 RootedScript script(cx, frame->script());
2726 jsbytecode* pc = stub->icEntry()->pc(script);
2727 JSOp op = JSOp(*pc);
2728 FallbackICSpew(cx, stub, "SetProp(%s)", CodeName(op));
2730 MOZ_ASSERT(op == JSOp::SetProp || op == JSOp::StrictSetProp ||
2731 op == JSOp::SetName || op == JSOp::StrictSetName ||
2732 op == JSOp::SetGName || op == JSOp::StrictSetGName ||
2733 op == JSOp::InitProp || op == JSOp::InitLockedProp ||
2734 op == JSOp::InitHiddenProp || op == JSOp::InitGLexical);
2736 RootedPropertyName name(cx, script->getName(pc));
2737 RootedId id(cx, NameToId(name));
2739 int lhsIndex = -2;
2740 RootedObject obj(cx,
2741 ToObjectFromStackForPropertyAccess(cx, lhs, lhsIndex, id));
2742 if (!obj) {
2743 return false;
2745 RootedShape oldShape(cx, obj->shape());
2746 RootedObjectGroup oldGroup(cx, JSObject::getGroup(cx, obj));
2747 if (!oldGroup) {
2748 return false;
2751 DeferType deferType = DeferType::None;
2752 bool attached = false;
2753 if (stub->state().maybeTransition()) {
2754 stub->discardStubs(cx, frame->invalidationScript());
2757 if (stub->state().canAttachStub()) {
2758 RootedValue idVal(cx, StringValue(name));
2759 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetProp,
2760 stub->state().mode(), lhs, idVal, rhs);
2761 switch (gen.tryAttachStub()) {
2762 case AttachDecision::Attach: {
2763 ICScript* icScript = frame->icScript();
2764 ICStub* newStub = AttachBaselineCacheIRStub(
2765 cx, gen.writerRef(), gen.cacheKind(),
2766 BaselineCacheIRStubKind::Updated, frame->script(), icScript, stub,
2767 &attached);
2768 if (newStub) {
2769 JitSpew(JitSpew_BaselineIC, " Attached SetProp CacheIR stub");
2771 SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
2773 if (gen.shouldNotePreliminaryObjectStub()) {
2774 newStub->toCacheIR_Updated()->notePreliminaryObject();
2775 } else if (gen.shouldUnlinkPreliminaryObjectStubs()) {
2776 StripPreliminaryObjectStubs(cx, stub, frame->invalidationScript());
2779 } break;
2780 case AttachDecision::NoAction:
2781 break;
2782 case AttachDecision::TemporarilyUnoptimizable:
2783 attached = true;
2784 break;
2785 case AttachDecision::Deferred:
2786 deferType = gen.deferType();
2787 MOZ_ASSERT(deferType != DeferType::None);
2788 break;
2792 if (op == JSOp::InitProp || op == JSOp::InitLockedProp ||
2793 op == JSOp::InitHiddenProp) {
2794 if (!InitPropertyOperation(cx, op, obj, name, rhs)) {
2795 return false;
2797 } else if (op == JSOp::SetName || op == JSOp::StrictSetName ||
2798 op == JSOp::SetGName || op == JSOp::StrictSetGName) {
2799 if (!SetNameOperation(cx, script, pc, obj, rhs)) {
2800 return false;
2802 } else if (op == JSOp::InitGLexical) {
2803 RootedValue v(cx, rhs);
2804 LexicalEnvironmentObject* lexicalEnv;
2805 if (script->hasNonSyntacticScope()) {
2806 lexicalEnv = &NearestEnclosingExtensibleLexicalEnvironment(
2807 frame->environmentChain());
2808 } else {
2809 lexicalEnv = &cx->global()->lexicalEnvironment();
2811 InitGlobalLexicalOperation(cx, lexicalEnv, script, pc, v);
2812 } else {
2813 MOZ_ASSERT(op == JSOp::SetProp || op == JSOp::StrictSetProp);
2815 ObjectOpResult result;
2816 if (!SetProperty(cx, obj, id, rhs, lhs, result) ||
2817 !result.checkStrictModeError(cx, obj, id, op == JSOp::StrictSetProp)) {
2818 return false;
2822 // Overwrite the LHS on the stack (pushed for the decompiler) with the RHS.
2823 MOZ_ASSERT(stack[1] == lhs);
2824 stack[1] = rhs;
2826 if (attached) {
2827 return true;
2830 // The SetProperty call might have entered this IC recursively, so try
2831 // to transition.
2832 if (stub->state().maybeTransition()) {
2833 stub->discardStubs(cx, frame->invalidationScript());
2836 bool canAttachStub = stub->state().canAttachStub();
2838 if (deferType != DeferType::None && canAttachStub) {
2839 RootedValue idVal(cx, StringValue(name));
2840 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetProp,
2841 stub->state().mode(), lhs, idVal, rhs);
2843 MOZ_ASSERT(deferType == DeferType::AddSlot);
2844 AttachDecision decision = gen.tryAttachAddSlotStub(oldGroup, oldShape);
2846 switch (decision) {
2847 case AttachDecision::Attach: {
2848 ICScript* icScript = frame->icScript();
2849 ICStub* newStub = AttachBaselineCacheIRStub(
2850 cx, gen.writerRef(), gen.cacheKind(),
2851 BaselineCacheIRStubKind::Updated, frame->script(), icScript, stub,
2852 &attached);
2853 if (newStub) {
2854 JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub");
2856 SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
2858 if (gen.shouldNotePreliminaryObjectStub()) {
2859 newStub->toCacheIR_Updated()->notePreliminaryObject();
2860 } else if (gen.shouldUnlinkPreliminaryObjectStubs()) {
2861 StripPreliminaryObjectStubs(cx, stub, frame->invalidationScript());
2864 } break;
2865 case AttachDecision::NoAction:
2866 gen.trackAttached(IRGenerator::NotAttached);
2867 break;
2868 case AttachDecision::TemporarilyUnoptimizable:
2869 case AttachDecision::Deferred:
2870 MOZ_ASSERT_UNREACHABLE("Invalid attach result");
2871 break;
2874 if (!attached && canAttachStub) {
2875 stub->state().trackNotAttached();
2878 return true;
2881 bool FallbackICCodeCompiler::emit_SetProp() {
2882 static_assert(R0 == JSReturnOperand);
2884 EmitRestoreTailCallReg(masm);
2886 // Ensure stack is fully synced for the expression decompiler.
2887 // Overwrite the RHS value on top of the stack with the object, then push
2888 // the RHS in R1 on top of that.
2889 masm.storeValue(R0, Address(masm.getStackPointer(), 0));
2890 masm.pushValue(R1);
2892 // Push arguments.
2893 masm.pushValue(R1);
2894 masm.pushValue(R0);
2896 // Push pointer to stack values, so that the stub can overwrite the object
2897 // (pushed for the decompiler) with the RHS.
2898 masm.computeEffectiveAddress(
2899 Address(masm.getStackPointer(), 2 * sizeof(Value)), R0.scratchReg());
2900 masm.push(R0.scratchReg());
2902 masm.push(ICStubReg);
2903 pushStubPayload(masm, R0.scratchReg());
2905 using Fn = bool (*)(JSContext*, BaselineFrame*, ICSetProp_Fallback*, Value*,
2906 HandleValue, HandleValue);
2907 if (!tailCallVM<Fn, DoSetPropFallback>(masm)) {
2908 return false;
2911 // This is the resume point used when bailout rewrites call stack to undo
2912 // Ion inlined frames. The return address pushed onto reconstructed stack
2913 // will point here.
2914 assumeStubFrame();
2915 code.initBailoutReturnOffset(BailoutReturnKind::SetProp,
2916 masm.currentOffset());
2918 leaveStubFrame(masm, true);
2919 EmitReturnFromIC(masm);
2921 return true;
2925 // Call_Fallback
2928 bool DoCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub,
2929 uint32_t argc, Value* vp, MutableHandleValue res) {
2930 stub->incrementEnteredCount();
2932 RootedScript script(cx, frame->script());
2933 jsbytecode* pc = stub->icEntry()->pc(script);
2934 JSOp op = JSOp(*pc);
2935 FallbackICSpew(cx, stub, "Call(%s)", CodeName(op));
2937 MOZ_ASSERT(argc == GET_ARGC(pc));
2938 bool constructing = (op == JSOp::New || op == JSOp::SuperCall);
2939 bool ignoresReturnValue = (op == JSOp::CallIgnoresRv);
2941 // Ensure vp array is rooted - we may GC in here.
2942 size_t numValues = argc + 2 + constructing;
2943 RootedExternalValueArray vpRoot(cx, numValues, vp);
2945 CallArgs callArgs = CallArgsFromSp(argc + constructing, vp + numValues,
2946 constructing, ignoresReturnValue);
2947 RootedValue callee(cx, vp[0]);
2948 RootedValue newTarget(cx, constructing ? callArgs.newTarget() : NullValue());
2950 // Handle funapply with JSOp::Arguments
2951 if (op == JSOp::FunApply && argc == 2 &&
2952 callArgs[1].isMagic(JS_OPTIMIZED_ARGUMENTS)) {
2953 GuardFunApplyArgumentsOptimization(cx, frame, callArgs);
2956 // Transition stub state to megamorphic or generic if warranted.
2957 if (stub->state().maybeTransition()) {
2958 stub->discardStubs(cx, frame->invalidationScript());
2961 bool canAttachStub = stub->state().canAttachStub();
2962 bool handled = false;
2963 bool deferred = false;
2965 // Only bother to try optimizing JSOp::Call with CacheIR if the chain is still
2966 // allowed to attach stubs.
2967 if (canAttachStub) {
2968 HandleValueArray args = HandleValueArray::fromMarkedLocation(argc, vp + 2);
2969 bool isFirstStub = stub->newStubIsFirstStub();
2970 CallIRGenerator gen(cx, script, pc, op, stub->state().mode(), isFirstStub,
2971 argc, callee, callArgs.thisv(), newTarget, args);
2972 switch (gen.tryAttachStub()) {
2973 case AttachDecision::NoAction:
2974 break;
2975 case AttachDecision::Attach: {
2976 ICScript* icScript = frame->icScript();
2977 ICStub* newStub = AttachBaselineCacheIRStub(
2978 cx, gen.writerRef(), gen.cacheKind(), gen.cacheIRStubKind(), script,
2979 icScript, stub, &handled);
2980 if (newStub) {
2981 JitSpew(JitSpew_BaselineIC, " Attached Call CacheIR stub");
2983 // If it's an updated stub, initialize it.
2984 if (gen.cacheIRStubKind() == BaselineCacheIRStubKind::Updated) {
2985 SetUpdateStubData(newStub->toCacheIR_Updated(),
2986 gen.typeCheckInfo());
2989 } break;
2990 case AttachDecision::TemporarilyUnoptimizable:
2991 handled = true;
2992 break;
2993 case AttachDecision::Deferred:
2994 deferred = true;
2998 if (constructing) {
2999 if (!ConstructFromStack(cx, callArgs)) {
3000 return false;
3002 res.set(callArgs.rval());
3003 } else if ((op == JSOp::Eval || op == JSOp::StrictEval) &&
3004 cx->global()->valueIsEval(callee)) {
3005 if (!DirectEval(cx, callArgs.get(0), res)) {
3006 return false;
3008 } else {
3009 MOZ_ASSERT(op == JSOp::Call || op == JSOp::CallIgnoresRv ||
3010 op == JSOp::CallIter || op == JSOp::FunCall ||
3011 op == JSOp::FunApply || op == JSOp::Eval ||
3012 op == JSOp::StrictEval);
3013 if (op == JSOp::CallIter && callee.isPrimitive()) {
3014 MOZ_ASSERT(argc == 0, "thisv must be on top of the stack");
3015 ReportValueError(cx, JSMSG_NOT_ITERABLE, -1, callArgs.thisv(), nullptr);
3016 return false;
3019 if (!CallFromStack(cx, callArgs)) {
3020 return false;
3023 res.set(callArgs.rval());
3026 if (!TypeMonitorResult(cx, stub, frame, script, pc, res)) {
3027 return false;
3030 // Try to transition again in case we called this IC recursively.
3031 if (stub->state().maybeTransition()) {
3032 stub->discardStubs(cx, frame->invalidationScript());
3034 canAttachStub = stub->state().canAttachStub();
3036 if (deferred && canAttachStub) {
3037 HandleValueArray args = HandleValueArray::fromMarkedLocation(argc, vp + 2);
3038 bool isFirstStub = stub->newStubIsFirstStub();
3039 CallIRGenerator gen(cx, script, pc, op, stub->state().mode(), isFirstStub,
3040 argc, callee, callArgs.thisv(), newTarget, args);
3041 switch (gen.tryAttachDeferredStub(res)) {
3042 case AttachDecision::Attach: {
3043 ICScript* icScript = frame->icScript();
3044 ICStub* newStub = AttachBaselineCacheIRStub(
3045 cx, gen.writerRef(), gen.cacheKind(), gen.cacheIRStubKind(), script,
3046 icScript, stub, &handled);
3047 if (newStub) {
3048 JitSpew(JitSpew_BaselineIC, " Attached Call CacheIR stub");
3050 // If it's an updated stub, initialize it.
3051 if (gen.cacheIRStubKind() == BaselineCacheIRStubKind::Updated) {
3052 SetUpdateStubData(newStub->toCacheIR_Updated(),
3053 gen.typeCheckInfo());
3056 } break;
3057 case AttachDecision::NoAction:
3058 break;
3059 case AttachDecision::TemporarilyUnoptimizable:
3060 case AttachDecision::Deferred:
3061 MOZ_ASSERT_UNREACHABLE("Impossible attach decision");
3062 break;
3066 if (!handled && canAttachStub) {
3067 stub->state().trackNotAttached();
3069 return true;
3072 bool DoSpreadCallFallback(JSContext* cx, BaselineFrame* frame,
3073 ICCall_Fallback* stub, Value* vp,
3074 MutableHandleValue res) {
3075 stub->incrementEnteredCount();
3077 RootedScript script(cx, frame->script());
3078 jsbytecode* pc = stub->icEntry()->pc(script);
3079 JSOp op = JSOp(*pc);
3080 bool constructing = (op == JSOp::SpreadNew || op == JSOp::SpreadSuperCall);
3081 FallbackICSpew(cx, stub, "SpreadCall(%s)", CodeName(op));
3083 // Ensure vp array is rooted - we may GC in here.
3084 RootedExternalValueArray vpRoot(cx, 3 + constructing, vp);
3086 RootedValue callee(cx, vp[0]);
3087 RootedValue thisv(cx, vp[1]);
3088 RootedValue arr(cx, vp[2]);
3089 RootedValue newTarget(cx, constructing ? vp[3] : NullValue());
3091 // Transition stub state to megamorphic or generic if warranted.
3092 if (stub->state().maybeTransition()) {
3093 stub->discardStubs(cx, frame->invalidationScript());
3096 // Try attaching a call stub.
3097 bool handled = false;
3098 if (op != JSOp::SpreadEval && op != JSOp::StrictSpreadEval &&
3099 stub->state().canAttachStub()) {
3100 // Try CacheIR first:
3101 RootedArrayObject aobj(cx, &arr.toObject().as<ArrayObject>());
3102 MOZ_ASSERT(aobj->length() == aobj->getDenseInitializedLength());
3104 HandleValueArray args = HandleValueArray::fromMarkedLocation(
3105 aobj->length(), aobj->getDenseElements());
3106 bool isFirstStub = stub->newStubIsFirstStub();
3107 CallIRGenerator gen(cx, script, pc, op, stub->state().mode(), isFirstStub,
3108 1, callee, thisv, newTarget, args);
3109 switch (gen.tryAttachStub()) {
3110 case AttachDecision::NoAction:
3111 break;
3112 case AttachDecision::Attach: {
3113 ICScript* icScript = frame->icScript();
3114 ICStub* newStub = AttachBaselineCacheIRStub(
3115 cx, gen.writerRef(), gen.cacheKind(), gen.cacheIRStubKind(), script,
3116 icScript, stub, &handled);
3118 if (newStub) {
3119 JitSpew(JitSpew_BaselineIC, " Attached Spread Call CacheIR stub");
3121 // If it's an updated stub, initialize it.
3122 if (gen.cacheIRStubKind() == BaselineCacheIRStubKind::Updated) {
3123 SetUpdateStubData(newStub->toCacheIR_Updated(),
3124 gen.typeCheckInfo());
3127 } break;
3128 case AttachDecision::TemporarilyUnoptimizable:
3129 handled = true;
3130 break;
3131 case AttachDecision::Deferred:
3132 MOZ_ASSERT_UNREACHABLE("No deferred optimizations for spread calls");
3133 break;
3137 if (!SpreadCallOperation(cx, script, pc, thisv, callee, arr, newTarget,
3138 res)) {
3139 return false;
3142 return TypeMonitorResult(cx, stub, frame, script, pc, res);
3145 void ICStubCompilerBase::pushCallArguments(MacroAssembler& masm,
3146 AllocatableGeneralRegisterSet regs,
3147 Register argcReg,
3148 bool isConstructing) {
3149 MOZ_ASSERT(!regs.has(argcReg));
3151 // argPtr initially points to the last argument.
3152 Register argPtr = regs.takeAny();
3153 masm.moveStackPtrTo(argPtr);
3155 // Skip 4 pointers pushed on top of the arguments: the frame descriptor,
3156 // return address, old frame pointer and stub reg.
3157 size_t valueOffset = STUB_FRAME_SIZE;
3159 // We have to push |this|, callee, new.target (if constructing) and argc
3160 // arguments. Handle the number of Values we know statically first.
3162 size_t numNonArgValues = 2 + isConstructing;
3163 for (size_t i = 0; i < numNonArgValues; i++) {
3164 masm.pushValue(Address(argPtr, valueOffset));
3165 valueOffset += sizeof(Value);
3168 // If there are no arguments we're done.
3169 Label done;
3170 masm.branchTest32(Assembler::Zero, argcReg, argcReg, &done);
3172 // Push argc Values.
3173 Label loop;
3174 Register count = regs.takeAny();
3175 masm.addPtr(Imm32(valueOffset), argPtr);
3176 masm.move32(argcReg, count);
3177 masm.bind(&loop);
3179 masm.pushValue(Address(argPtr, 0));
3180 masm.addPtr(Imm32(sizeof(Value)), argPtr);
3182 masm.branchSub32(Assembler::NonZero, Imm32(1), count, &loop);
3184 masm.bind(&done);
3187 bool FallbackICCodeCompiler::emitCall(bool isSpread, bool isConstructing) {
3188 static_assert(R0 == JSReturnOperand);
3190 // Values are on the stack left-to-right. Calling convention wants them
3191 // right-to-left so duplicate them on the stack in reverse order.
3192 // |this| and callee are pushed last.
3194 AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
3196 if (MOZ_UNLIKELY(isSpread)) {
3197 // Push a stub frame so that we can perform a non-tail call.
3198 enterStubFrame(masm, R1.scratchReg());
3200 // Use BaselineFrameReg instead of BaselineStackReg, because
3201 // BaselineFrameReg and BaselineStackReg hold the same value just after
3202 // calling enterStubFrame.
3204 // newTarget
3205 uint32_t valueOffset = 0;
3206 if (isConstructing) {
3207 masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE));
3208 valueOffset++;
3211 // array
3212 masm.pushValue(Address(BaselineFrameReg,
3213 valueOffset * sizeof(Value) + STUB_FRAME_SIZE));
3214 valueOffset++;
3216 // this
3217 masm.pushValue(Address(BaselineFrameReg,
3218 valueOffset * sizeof(Value) + STUB_FRAME_SIZE));
3219 valueOffset++;
3221 // callee
3222 masm.pushValue(Address(BaselineFrameReg,
3223 valueOffset * sizeof(Value) + STUB_FRAME_SIZE));
3224 valueOffset++;
3226 masm.push(masm.getStackPointer());
3227 masm.push(ICStubReg);
3229 PushStubPayload(masm, R0.scratchReg());
3231 using Fn = bool (*)(JSContext*, BaselineFrame*, ICCall_Fallback*, Value*,
3232 MutableHandleValue);
3233 if (!callVM<Fn, DoSpreadCallFallback>(masm)) {
3234 return false;
3237 leaveStubFrame(masm);
3238 EmitReturnFromIC(masm);
3240 // SpreadCall is not yet supported in Ion, so do not generate asmcode for
3241 // bailout.
3242 return true;
3245 // Push a stub frame so that we can perform a non-tail call.
3246 enterStubFrame(masm, R1.scratchReg());
3248 regs.take(R0.scratchReg()); // argc.
3250 pushCallArguments(masm, regs, R0.scratchReg(), isConstructing);
3252 masm.push(masm.getStackPointer());
3253 masm.push(R0.scratchReg());
3254 masm.push(ICStubReg);
3256 PushStubPayload(masm, R0.scratchReg());
3258 using Fn = bool (*)(JSContext*, BaselineFrame*, ICCall_Fallback*, uint32_t,
3259 Value*, MutableHandleValue);
3260 if (!callVM<Fn, DoCallFallback>(masm)) {
3261 return false;
3264 leaveStubFrame(masm);
3265 EmitReturnFromIC(masm);
3267 // This is the resume point used when bailout rewrites call stack to undo
3268 // Ion inlined frames. The return address pushed onto reconstructed stack
3269 // will point here.
3270 assumeStubFrame();
3272 MOZ_ASSERT(!isSpread);
3274 if (isConstructing) {
3275 code.initBailoutReturnOffset(BailoutReturnKind::New, masm.currentOffset());
3276 } else {
3277 code.initBailoutReturnOffset(BailoutReturnKind::Call, masm.currentOffset());
3280 // Load passed-in ThisV into R1 just in case it's needed. Need to do this
3281 // before we leave the stub frame since that info will be lost.
3282 // Current stack: [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
3283 masm.loadValue(Address(masm.getStackPointer(), 3 * sizeof(size_t)), R1);
3285 leaveStubFrame(masm, true);
3287 // If this is a |constructing| call, if the callee returns a non-object, we
3288 // replace it with the |this| object passed in.
3289 if (isConstructing) {
3290 static_assert(JSReturnOperand == R0);
3291 Label skipThisReplace;
3293 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
3294 masm.moveValue(R1, R0);
3295 #ifdef DEBUG
3296 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
3297 masm.assumeUnreachable("Failed to return object in constructing call.");
3298 #endif
3299 masm.bind(&skipThisReplace);
3302 if (!IsTypeInferenceEnabled()) {
3303 EmitReturnFromIC(masm);
3304 return true;
3307 // At this point, ICStubReg points to the ICCall_Fallback stub, which is NOT
3308 // a MonitoredStub, but rather a MonitoredFallbackStub. To use
3309 // EmitEnterTypeMonitorIC, first load the ICTypeMonitor_Fallback stub into
3310 // ICStubReg. Then, use EmitEnterTypeMonitorIC with a custom struct offset.
3311 // Note that we must have a non-null fallbackMonitorStub here because
3312 // BaselineStackBuilder::buildStubFrame delazifies the stub when bailing out.
3313 masm.loadPtr(Address(ICStubReg,
3314 ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
3315 ICStubReg);
3316 EmitEnterTypeMonitorIC(masm,
3317 ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
3319 return true;
3322 bool FallbackICCodeCompiler::emit_Call() {
3323 return emitCall(/* isSpread = */ false, /* isConstructing = */ false);
3326 bool FallbackICCodeCompiler::emit_CallConstructing() {
3327 return emitCall(/* isSpread = */ false, /* isConstructing = */ true);
3330 bool FallbackICCodeCompiler::emit_SpreadCall() {
3331 return emitCall(/* isSpread = */ true, /* isConstructing = */ false);
3334 bool FallbackICCodeCompiler::emit_SpreadCallConstructing() {
3335 return emitCall(/* isSpread = */ true, /* isConstructing = */ true);
3339 // GetIterator_Fallback
3342 bool DoGetIteratorFallback(JSContext* cx, BaselineFrame* frame,
3343 ICGetIterator_Fallback* stub, HandleValue value,
3344 MutableHandleValue res) {
3345 stub->incrementEnteredCount();
3346 FallbackICSpew(cx, stub, "GetIterator");
3348 TryAttachStub<GetIteratorIRGenerator>(
3349 "GetIterator", cx, frame, stub, BaselineCacheIRStubKind::Regular, value);
3351 JSObject* iterobj = ValueToIterator(cx, value);
3352 if (!iterobj) {
3353 return false;
3356 res.setObject(*iterobj);
3357 return true;
3360 bool FallbackICCodeCompiler::emit_GetIterator() {
3361 EmitRestoreTailCallReg(masm);
3363 // Sync stack for the decompiler.
3364 masm.pushValue(R0);
3366 masm.pushValue(R0);
3367 masm.push(ICStubReg);
3368 pushStubPayload(masm, R0.scratchReg());
3370 using Fn = bool (*)(JSContext*, BaselineFrame*, ICGetIterator_Fallback*,
3371 HandleValue, MutableHandleValue);
3372 return tailCallVM<Fn, DoGetIteratorFallback>(masm);
3376 // InstanceOf_Fallback
3379 bool DoInstanceOfFallback(JSContext* cx, BaselineFrame* frame,
3380 ICInstanceOf_Fallback* stub, HandleValue lhs,
3381 HandleValue rhs, MutableHandleValue res) {
3382 stub->incrementEnteredCount();
3384 FallbackICSpew(cx, stub, "InstanceOf");
3386 if (!rhs.isObject()) {
3387 ReportValueError(cx, JSMSG_BAD_INSTANCEOF_RHS, -1, rhs, nullptr);
3388 return false;
3391 RootedObject obj(cx, &rhs.toObject());
3392 bool cond = false;
3393 if (!HasInstance(cx, obj, lhs, &cond)) {
3394 return false;
3397 res.setBoolean(cond);
3399 if (!obj->is<JSFunction>()) {
3400 // ensure we've recorded at least one failure, so we can detect there was a
3401 // non-optimizable case
3402 if (!stub->state().hasFailures()) {
3403 stub->state().trackNotAttached();
3405 return true;
3408 // For functions, keep track of the |prototype| property in type information,
3409 // for use during Ion compilation.
3410 EnsureTrackPropertyTypes(cx, obj, NameToId(cx->names().prototype));
3412 TryAttachStub<InstanceOfIRGenerator>("InstanceOf", cx, frame, stub,
3413 BaselineCacheIRStubKind::Regular, lhs,
3414 obj);
3415 return true;
3418 bool FallbackICCodeCompiler::emit_InstanceOf() {
3419 EmitRestoreTailCallReg(masm);
3421 // Sync stack for the decompiler.
3422 masm.pushValue(R0);
3423 masm.pushValue(R1);
3425 masm.pushValue(R1);
3426 masm.pushValue(R0);
3427 masm.push(ICStubReg);
3428 pushStubPayload(masm, R0.scratchReg());
3430 using Fn = bool (*)(JSContext*, BaselineFrame*, ICInstanceOf_Fallback*,
3431 HandleValue, HandleValue, MutableHandleValue);
3432 return tailCallVM<Fn, DoInstanceOfFallback>(masm);
3436 // TypeOf_Fallback
3439 bool DoTypeOfFallback(JSContext* cx, BaselineFrame* frame,
3440 ICTypeOf_Fallback* stub, HandleValue val,
3441 MutableHandleValue res) {
3442 stub->incrementEnteredCount();
3443 FallbackICSpew(cx, stub, "TypeOf");
3445 TryAttachStub<TypeOfIRGenerator>("TypeOf", cx, frame, stub,
3446 BaselineCacheIRStubKind::Regular, val);
3448 JSType type = js::TypeOfValue(val);
3449 RootedString string(cx, TypeName(type, cx->names()));
3450 res.setString(string);
3451 return true;
3454 bool FallbackICCodeCompiler::emit_TypeOf() {
3455 EmitRestoreTailCallReg(masm);
3457 masm.pushValue(R0);
3458 masm.push(ICStubReg);
3459 pushStubPayload(masm, R0.scratchReg());
3461 using Fn = bool (*)(JSContext*, BaselineFrame*, ICTypeOf_Fallback*,
3462 HandleValue, MutableHandleValue);
3463 return tailCallVM<Fn, DoTypeOfFallback>(masm);
3467 // ToPropertyKey_Fallback
3470 bool DoToPropertyKeyFallback(JSContext* cx, BaselineFrame* frame,
3471 ICToPropertyKey_Fallback* stub, HandleValue val,
3472 MutableHandleValue res) {
3473 stub->incrementEnteredCount();
3474 FallbackICSpew(cx, stub, "ToPropertyKey");
3476 TryAttachStub<ToPropertyKeyIRGenerator>(
3477 "ToPropertyKey", cx, frame, stub, BaselineCacheIRStubKind::Regular, val);
3479 return ToPropertyKeyOperation(cx, val, res);
3482 bool FallbackICCodeCompiler::emit_ToPropertyKey() {
3483 EmitRestoreTailCallReg(masm);
3485 masm.pushValue(R0);
3486 masm.push(ICStubReg);
3487 pushStubPayload(masm, R0.scratchReg());
3489 using Fn = bool (*)(JSContext*, BaselineFrame*, ICToPropertyKey_Fallback*,
3490 HandleValue, MutableHandleValue);
3491 return tailCallVM<Fn, DoToPropertyKeyFallback>(masm);
3494 ICTypeMonitor_SingleObject::ICTypeMonitor_SingleObject(JitCode* stubCode,
3495 JSObject* obj)
3496 : ICStub(TypeMonitor_SingleObject, stubCode), obj_(obj) {}
3498 ICTypeMonitor_ObjectGroup::ICTypeMonitor_ObjectGroup(JitCode* stubCode,
3499 ObjectGroup* group)
3500 : ICStub(TypeMonitor_ObjectGroup, stubCode), group_(group) {}
3502 ICTypeUpdate_SingleObject::ICTypeUpdate_SingleObject(JitCode* stubCode,
3503 JSObject* obj)
3504 : ICStub(TypeUpdate_SingleObject, stubCode), obj_(obj) {}
3506 ICTypeUpdate_ObjectGroup::ICTypeUpdate_ObjectGroup(JitCode* stubCode,
3507 ObjectGroup* group)
3508 : ICStub(TypeUpdate_ObjectGroup, stubCode), group_(group) {}
3511 // Rest_Fallback
3514 bool DoRestFallback(JSContext* cx, BaselineFrame* frame, ICRest_Fallback* stub,
3515 MutableHandleValue res) {
3516 unsigned numFormals = frame->numFormalArgs() - 1;
3517 unsigned numActuals = frame->numActualArgs();
3518 unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
3519 Value* rest = frame->argv() + numFormals;
3521 ArrayObject* obj =
3522 ObjectGroup::newArrayObject(cx, rest, numRest, GenericObject,
3523 ObjectGroup::NewArrayKind::UnknownIndex);
3524 if (!obj) {
3525 return false;
3527 res.setObject(*obj);
3528 return true;
3531 bool FallbackICCodeCompiler::emit_Rest() {
3532 EmitRestoreTailCallReg(masm);
3534 masm.push(ICStubReg);
3535 pushStubPayload(masm, R0.scratchReg());
3537 using Fn = bool (*)(JSContext*, BaselineFrame*, ICRest_Fallback*,
3538 MutableHandleValue);
3539 return tailCallVM<Fn, DoRestFallback>(masm);
3543 // UnaryArith_Fallback
3546 bool DoUnaryArithFallback(JSContext* cx, BaselineFrame* frame,
3547 ICUnaryArith_Fallback* stub, HandleValue val,
3548 MutableHandleValue res) {
3549 stub->incrementEnteredCount();
3551 RootedScript script(cx, frame->script());
3552 jsbytecode* pc = stub->icEntry()->pc(script);
3553 JSOp op = JSOp(*pc);
3554 FallbackICSpew(cx, stub, "UnaryArith(%s)", CodeName(op));
3556 switch (op) {
3557 case JSOp::BitNot: {
3558 res.set(val);
3559 if (!BitNot(cx, res, res)) {
3560 return false;
3562 break;
3564 case JSOp::Pos: {
3565 res.set(val);
3566 if (!ToNumber(cx, res)) {
3567 return false;
3569 break;
3571 case JSOp::Neg: {
3572 res.set(val);
3573 if (!NegOperation(cx, res, res)) {
3574 return false;
3576 break;
3578 case JSOp::Inc: {
3579 if (!IncOperation(cx, val, res)) {
3580 return false;
3582 break;
3584 case JSOp::Dec: {
3585 if (!DecOperation(cx, val, res)) {
3586 return false;
3588 break;
3590 case JSOp::ToNumeric: {
3591 res.set(val);
3592 if (!ToNumeric(cx, res)) {
3593 return false;
3595 break;
3597 default:
3598 MOZ_CRASH("Unexpected op");
3600 MOZ_ASSERT(res.isNumeric());
3602 if (res.isDouble()) {
3603 stub->setSawDoubleResult();
3606 TryAttachStub<UnaryArithIRGenerator>("UnaryArith", cx, frame, stub,
3607 BaselineCacheIRStubKind::Regular, op,
3608 val, res);
3609 return true;
3612 bool FallbackICCodeCompiler::emit_UnaryArith() {
3613 static_assert(R0 == JSReturnOperand);
3615 // Restore the tail call register.
3616 EmitRestoreTailCallReg(masm);
3618 // Ensure stack is fully synced for the expression decompiler.
3619 masm.pushValue(R0);
3621 // Push arguments.
3622 masm.pushValue(R0);
3623 masm.push(ICStubReg);
3624 pushStubPayload(masm, R0.scratchReg());
3626 using Fn = bool (*)(JSContext*, BaselineFrame*, ICUnaryArith_Fallback*,
3627 HandleValue, MutableHandleValue);
3628 return tailCallVM<Fn, DoUnaryArithFallback>(masm);
3632 // BinaryArith_Fallback
3635 bool DoBinaryArithFallback(JSContext* cx, BaselineFrame* frame,
3636 ICBinaryArith_Fallback* stub, HandleValue lhs,
3637 HandleValue rhs, MutableHandleValue ret) {
3638 stub->incrementEnteredCount();
3640 RootedScript script(cx, frame->script());
3641 jsbytecode* pc = stub->icEntry()->pc(script);
3642 JSOp op = JSOp(*pc);
3643 FallbackICSpew(
3644 cx, stub, "CacheIRBinaryArith(%s,%d,%d)", CodeName(op),
3645 int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()),
3646 int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType()));
3648 // Don't pass lhs/rhs directly, we need the original values when
3649 // generating stubs.
3650 RootedValue lhsCopy(cx, lhs);
3651 RootedValue rhsCopy(cx, rhs);
3653 // Perform the arith operation.
3654 switch (op) {
3655 case JSOp::Add:
3656 // Do an add.
3657 if (!AddValues(cx, &lhsCopy, &rhsCopy, ret)) {
3658 return false;
3660 break;
3661 case JSOp::Sub:
3662 if (!SubValues(cx, &lhsCopy, &rhsCopy, ret)) {
3663 return false;
3665 break;
3666 case JSOp::Mul:
3667 if (!MulValues(cx, &lhsCopy, &rhsCopy, ret)) {
3668 return false;
3670 break;
3671 case JSOp::Div:
3672 if (!DivValues(cx, &lhsCopy, &rhsCopy, ret)) {
3673 return false;
3675 break;
3676 case JSOp::Mod:
3677 if (!ModValues(cx, &lhsCopy, &rhsCopy, ret)) {
3678 return false;
3680 break;
3681 case JSOp::Pow:
3682 if (!PowValues(cx, &lhsCopy, &rhsCopy, ret)) {
3683 return false;
3685 break;
3686 case JSOp::BitOr: {
3687 if (!BitOr(cx, &lhsCopy, &rhsCopy, ret)) {
3688 return false;
3690 break;
3692 case JSOp::BitXor: {
3693 if (!BitXor(cx, &lhsCopy, &rhsCopy, ret)) {
3694 return false;
3696 break;
3698 case JSOp::BitAnd: {
3699 if (!BitAnd(cx, &lhsCopy, &rhsCopy, ret)) {
3700 return false;
3702 break;
3704 case JSOp::Lsh: {
3705 if (!BitLsh(cx, &lhsCopy, &rhsCopy, ret)) {
3706 return false;
3708 break;
3710 case JSOp::Rsh: {
3711 if (!BitRsh(cx, &lhsCopy, &rhsCopy, ret)) {
3712 return false;
3714 break;
3716 case JSOp::Ursh: {
3717 if (!UrshValues(cx, &lhsCopy, &rhsCopy, ret)) {
3718 return false;
3720 break;
3722 default:
3723 MOZ_CRASH("Unhandled baseline arith op");
3726 if (ret.isDouble()) {
3727 stub->setSawDoubleResult();
3730 TryAttachStub<BinaryArithIRGenerator>("BinaryArith", cx, frame, stub,
3731 BaselineCacheIRStubKind::Regular, op,
3732 lhs, rhs, ret);
3733 return true;
3736 bool FallbackICCodeCompiler::emit_BinaryArith() {
3737 static_assert(R0 == JSReturnOperand);
3739 // Restore the tail call register.
3740 EmitRestoreTailCallReg(masm);
3742 // Ensure stack is fully synced for the expression decompiler.
3743 masm.pushValue(R0);
3744 masm.pushValue(R1);
3746 // Push arguments.
3747 masm.pushValue(R1);
3748 masm.pushValue(R0);
3749 masm.push(ICStubReg);
3750 pushStubPayload(masm, R0.scratchReg());
3752 using Fn = bool (*)(JSContext*, BaselineFrame*, ICBinaryArith_Fallback*,
3753 HandleValue, HandleValue, MutableHandleValue);
3754 return tailCallVM<Fn, DoBinaryArithFallback>(masm);
3758 // Compare_Fallback
3760 bool DoCompareFallback(JSContext* cx, BaselineFrame* frame,
3761 ICCompare_Fallback* stub, HandleValue lhs,
3762 HandleValue rhs, MutableHandleValue ret) {
3763 stub->incrementEnteredCount();
3765 RootedScript script(cx, frame->script());
3766 jsbytecode* pc = stub->icEntry()->pc(script);
3767 JSOp op = JSOp(*pc);
3769 FallbackICSpew(cx, stub, "Compare(%s)", CodeName(op));
3771 // Don't pass lhs/rhs directly, we need the original values when
3772 // generating stubs.
3773 RootedValue lhsCopy(cx, lhs);
3774 RootedValue rhsCopy(cx, rhs);
3776 // Perform the compare operation.
3777 bool out;
3778 switch (op) {
3779 case JSOp::Lt:
3780 if (!LessThan(cx, &lhsCopy, &rhsCopy, &out)) {
3781 return false;
3783 break;
3784 case JSOp::Le:
3785 if (!LessThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) {
3786 return false;
3788 break;
3789 case JSOp::Gt:
3790 if (!GreaterThan(cx, &lhsCopy, &rhsCopy, &out)) {
3791 return false;
3793 break;
3794 case JSOp::Ge:
3795 if (!GreaterThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) {
3796 return false;
3798 break;
3799 case JSOp::Eq:
3800 if (!LooselyEqual<EqualityKind::Equal>(cx, &lhsCopy, &rhsCopy, &out)) {
3801 return false;
3803 break;
3804 case JSOp::Ne:
3805 if (!LooselyEqual<EqualityKind::NotEqual>(cx, &lhsCopy, &rhsCopy, &out)) {
3806 return false;
3808 break;
3809 case JSOp::StrictEq:
3810 if (!StrictlyEqual<EqualityKind::Equal>(cx, &lhsCopy, &rhsCopy, &out)) {
3811 return false;
3813 break;
3814 case JSOp::StrictNe:
3815 if (!StrictlyEqual<EqualityKind::NotEqual>(cx, &lhsCopy, &rhsCopy,
3816 &out)) {
3817 return false;
3819 break;
3820 default:
3821 MOZ_ASSERT_UNREACHABLE("Unhandled baseline compare op");
3822 return false;
3825 ret.setBoolean(out);
3827 TryAttachStub<CompareIRGenerator>("Compare", cx, frame, stub,
3828 BaselineCacheIRStubKind::Regular, op, lhs,
3829 rhs);
3830 return true;
3833 bool FallbackICCodeCompiler::emit_Compare() {
3834 static_assert(R0 == JSReturnOperand);
3836 // Restore the tail call register.
3837 EmitRestoreTailCallReg(masm);
3839 // Ensure stack is fully synced for the expression decompiler.
3840 masm.pushValue(R0);
3841 masm.pushValue(R1);
3843 // Push arguments.
3844 masm.pushValue(R1);
3845 masm.pushValue(R0);
3846 masm.push(ICStubReg);
3847 pushStubPayload(masm, R0.scratchReg());
3849 using Fn = bool (*)(JSContext*, BaselineFrame*, ICCompare_Fallback*,
3850 HandleValue, HandleValue, MutableHandleValue);
3851 return tailCallVM<Fn, DoCompareFallback>(masm);
3855 // NewArray_Fallback
3858 bool DoNewArrayFallback(JSContext* cx, BaselineFrame* frame,
3859 ICNewArray_Fallback* stub, uint32_t length,
3860 MutableHandleValue res) {
3861 stub->incrementEnteredCount();
3862 FallbackICSpew(cx, stub, "NewArray");
3864 RootedObject obj(cx);
3865 if (stub->templateObject()) {
3866 RootedObject templateObject(cx, stub->templateObject());
3867 obj = NewArrayOperationWithTemplate(cx, templateObject);
3868 if (!obj) {
3869 return false;
3871 } else {
3872 RootedScript script(cx, frame->script());
3873 jsbytecode* pc = stub->icEntry()->pc(script);
3875 obj = NewArrayOperation(cx, script, pc, length);
3876 if (!obj) {
3877 return false;
3880 if (!obj->isSingleton()) {
3881 ArrayObject* templateObject =
3882 NewArrayOperation(cx, script, pc, length, TenuredObject);
3883 if (!templateObject) {
3884 return false;
3886 stub->setTemplateObject(templateObject);
3890 res.setObject(*obj);
3891 return true;
3894 bool FallbackICCodeCompiler::emit_NewArray() {
3895 EmitRestoreTailCallReg(masm);
3897 masm.push(R0.scratchReg()); // length
3898 masm.push(ICStubReg); // stub.
3899 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
3901 using Fn = bool (*)(JSContext*, BaselineFrame*, ICNewArray_Fallback*,
3902 uint32_t, MutableHandleValue);
3903 return tailCallVM<Fn, DoNewArrayFallback>(masm);
3907 // NewObject_Fallback
3909 bool DoNewObjectFallback(JSContext* cx, BaselineFrame* frame,
3910 ICNewObject_Fallback* stub, MutableHandleValue res) {
3911 stub->incrementEnteredCount();
3912 FallbackICSpew(cx, stub, "NewObject");
3914 RootedObject obj(cx);
3916 RootedObject templateObject(cx, stub->templateObject());
3917 if (templateObject) {
3918 MOZ_ASSERT(
3919 !templateObject->group()->maybePreliminaryObjectsDontCheckGeneration());
3920 obj = NewObjectOperationWithTemplate(cx, templateObject);
3921 } else {
3922 RootedScript script(cx, frame->script());
3923 jsbytecode* pc = stub->icEntry()->pc(script);
3924 obj = NewObjectOperation(cx, script, pc);
3926 if (obj && !obj->isSingleton() &&
3927 !obj->group()->maybePreliminaryObjectsDontCheckGeneration()) {
3928 templateObject = NewObjectOperation(cx, script, pc, TenuredObject);
3929 if (!templateObject) {
3930 return false;
3933 TryAttachStub<NewObjectIRGenerator>("NewObject", cx, frame, stub,
3934 BaselineCacheIRStubKind::Regular,
3935 JSOp(*pc), templateObject);
3937 stub->setTemplateObject(templateObject);
3941 if (!obj) {
3942 return false;
3945 res.setObject(*obj);
3946 return true;
3949 bool FallbackICCodeCompiler::emit_NewObject() {
3950 EmitRestoreTailCallReg(masm);
3952 masm.push(ICStubReg); // stub.
3953 pushStubPayload(masm, R0.scratchReg());
3955 using Fn = bool (*)(JSContext*, BaselineFrame*, ICNewObject_Fallback*,
3956 MutableHandleValue);
3957 return tailCallVM<Fn, DoNewObjectFallback>(masm);
3960 bool JitRuntime::generateBaselineICFallbackCode(JSContext* cx) {
3961 StackMacroAssembler masm;
3963 BaselineICFallbackCode& fallbackCode = baselineICFallbackCode_.ref();
3964 FallbackICCodeCompiler compiler(cx, fallbackCode, masm);
3966 JitSpew(JitSpew_Codegen, "# Emitting Baseline IC fallback code");
3968 #define EMIT_CODE(kind) \
3970 uint32_t offset = startTrampolineCode(masm); \
3971 InitMacroAssemblerForICStub(masm); \
3972 if (!compiler.emit_##kind()) { \
3973 return false; \
3975 fallbackCode.initOffset(BaselineICFallbackKind::kind, offset); \
3977 IC_BASELINE_FALLBACK_CODE_KIND_LIST(EMIT_CODE)
3978 #undef EMIT_CODE
3980 Linker linker(masm);
3981 JitCode* code = linker.newCode(cx, CodeKind::Other);
3982 if (!code) {
3983 return false;
3986 #ifdef JS_ION_PERF
3987 writePerfSpewerJitCodeProfile(code, "BaselineICFallback");
3988 #endif
3989 #ifdef MOZ_VTUNE
3990 vtune::MarkStub(code, "BaselineICFallback");
3991 #endif
3993 fallbackCode.initCode(code);
3994 return true;
3997 const CacheIRStubInfo* ICStub::cacheIRStubInfo() const {
3998 switch (kind()) {
3999 case ICStub::CacheIR_Regular:
4000 return toCacheIR_Regular()->stubInfo();
4001 case ICStub::CacheIR_Monitored:
4002 return toCacheIR_Monitored()->stubInfo();
4003 case ICStub::CacheIR_Updated:
4004 return toCacheIR_Updated()->stubInfo();
4005 default:
4006 MOZ_CRASH("Not a CacheIR stub");
4010 const uint8_t* ICStub::cacheIRStubData() {
4011 switch (kind()) {
4012 case ICStub::CacheIR_Regular:
4013 return toCacheIR_Regular()->stubDataStart();
4014 case ICStub::CacheIR_Monitored:
4015 return toCacheIR_Monitored()->stubDataStart();
4016 case ICStub::CacheIR_Updated:
4017 return toCacheIR_Updated()->stubDataStart();
4018 default:
4019 MOZ_CRASH("Not a CacheIR stub");
4023 } // namespace jit
4024 } // namespace js