1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/BaselineIC.h"
9 #include "mozilla/Casting.h"
10 #include "mozilla/DebugOnly.h"
11 #include "mozilla/IntegerPrintfMacros.h"
12 #include "mozilla/ScopeExit.h"
13 #include "mozilla/Sprintf.h"
14 #include "mozilla/TemplateLib.h"
15 #include "mozilla/Unused.h"
17 #include "jsfriendapi.h"
18 #include "jslibmath.h"
21 #include "builtin/Eval.h"
22 #include "gc/Policy.h"
23 #include "jit/BaselineCacheIRCompiler.h"
24 #include "jit/BaselineDebugModeOSR.h"
25 #include "jit/BaselineJIT.h"
26 #include "jit/InlinableNatives.h"
27 #include "jit/JitSpewer.h"
28 #include "jit/Linker.h"
29 #include "jit/Lowering.h"
31 # include "jit/PerfSpewer.h"
33 #include "jit/SharedICHelpers.h"
34 #include "jit/VMFunctions.h"
35 #include "js/Conversions.h"
36 #include "js/GCVector.h"
37 #include "vm/BytecodeIterator.h"
38 #include "vm/BytecodeLocation.h"
39 #include "vm/BytecodeUtil.h"
40 #include "vm/JSFunction.h"
41 #include "vm/JSScript.h"
42 #include "vm/Opcodes.h"
43 #include "vm/SelfHosting.h"
44 #include "vm/TypedArrayObject.h"
46 # include "vtune/VTuneWrapper.h"
49 #include "builtin/Boolean-inl.h"
51 #include "jit/JitFrames-inl.h"
52 #include "jit/MacroAssembler-inl.h"
53 #include "jit/shared/Lowering-shared-inl.h"
54 #include "jit/SharedICHelpers-inl.h"
55 #include "jit/VMFunctionList-inl.h"
56 #include "vm/BytecodeIterator-inl.h"
57 #include "vm/BytecodeLocation-inl.h"
58 #include "vm/EnvironmentObject-inl.h"
59 #include "vm/Interpreter-inl.h"
60 #include "vm/JSScript-inl.h"
61 #include "vm/StringObject-inl.h"
63 using mozilla::DebugOnly
;
68 // Class used to emit all Baseline IC fallback code when initializing the
70 class MOZ_RAII FallbackICCodeCompiler final
: public ICStubCompilerBase
{
71 BaselineICFallbackCode
& code
;
74 MOZ_MUST_USE
bool emitCall(bool isSpread
, bool isConstructing
);
75 MOZ_MUST_USE
bool emitGetElem(bool hasReceiver
);
76 MOZ_MUST_USE
bool emitGetProp(bool hasReceiver
);
79 FallbackICCodeCompiler(JSContext
* cx
, BaselineICFallbackCode
& code
,
81 : ICStubCompilerBase(cx
), code(code
), masm(masm
) {}
83 #define DEF_METHOD(kind) MOZ_MUST_USE bool emit_##kind();
84 IC_BASELINE_FALLBACK_CODE_KIND_LIST(DEF_METHOD
)
89 void FallbackICSpew(JSContext
* cx
, ICFallbackStub
* stub
, const char* fmt
, ...) {
90 if (JitSpewEnabled(JitSpew_BaselineICFallback
)) {
91 RootedScript
script(cx
, GetTopJitJSScript(cx
));
92 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
97 (void)VsprintfLiteral(fmtbuf
, fmt
, args
);
101 JitSpew_BaselineICFallback
,
102 "Fallback hit for (%s:%u:%u) (pc=%zu,line=%u,uses=%u,stubs=%zu): %s",
103 script
->filename(), script
->lineno(), script
->column(),
104 script
->pcToOffset(pc
), PCToLineNumber(script
, pc
),
105 script
->getWarmUpCount(), stub
->numOptimizedStubs(), fmtbuf
);
109 void TypeFallbackICSpew(JSContext
* cx
, ICTypeMonitor_Fallback
* stub
,
110 const char* fmt
, ...) {
111 if (JitSpewEnabled(JitSpew_BaselineICFallback
)) {
112 RootedScript
script(cx
, GetTopJitJSScript(cx
));
113 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
118 (void)VsprintfLiteral(fmtbuf
, fmt
, args
);
121 JitSpew(JitSpew_BaselineICFallback
,
122 "Type monitor fallback hit for (%s:%u:%u) "
123 "(pc=%zu,line=%u,uses=%u,stubs=%d): %s",
124 script
->filename(), script
->lineno(), script
->column(),
125 script
->pcToOffset(pc
), PCToLineNumber(script
, pc
),
126 script
->getWarmUpCount(), (int)stub
->numOptimizedMonitorStubs(),
132 ICFallbackStub
* ICEntry::fallbackStub() const {
133 return firstStub()->getChainFallback();
136 void ICEntry::trace(JSTracer
* trc
) {
138 // If we have filled our padding with a magic value, check it now.
139 MOZ_DIAGNOSTIC_ASSERT(traceMagic_
== EXPECTED_TRACE_MAGIC
);
141 for (ICStub
* stub
= firstStub(); stub
; stub
= stub
->next()) {
146 // Allocator for Baseline IC fallback stubs. These stubs use trampoline code
147 // stored in JitRuntime.
148 class MOZ_RAII FallbackStubAllocator
{
150 ICStubSpace
& stubSpace_
;
151 const BaselineICFallbackCode
& code_
;
154 FallbackStubAllocator(JSContext
* cx
, ICStubSpace
& stubSpace
)
156 stubSpace_(stubSpace
),
157 code_(cx
->runtime()->jitRuntime()->baselineICFallbackCode()) {}
159 template <typename T
, typename
... Args
>
160 T
* newStub(BaselineICFallbackKind kind
, Args
&&... args
) {
161 TrampolinePtr addr
= code_
.addr(kind
);
162 return ICStub::NewFallback
<T
>(cx_
, &stubSpace_
, addr
,
163 std::forward
<Args
>(args
)...);
167 // Helper method called by lambda expressions `addIC` and `addPrologueIC` in
168 // `JitScript::initICEntriesAndBytecodeTypeMap`.
169 static bool AddICImpl(JSContext
* cx
, ICScript
* icScript
, uint32_t offset
,
170 ICStub
* stub
, uint32_t& icEntryIndex
) {
172 MOZ_ASSERT(cx
->isExceptionPending());
173 mozilla::Unused
<< cx
; // Silence -Wunused-lambda-capture in opt builds.
177 // Initialize the ICEntry.
178 ICEntry
& entryRef
= icScript
->icEntry(icEntryIndex
);
180 new (&entryRef
) ICEntry(stub
, offset
);
182 // Fix up pointers from fallback stubs to the ICEntry.
183 if (stub
->isFallback()) {
184 stub
->toFallbackStub()->fixupICEntry(&entryRef
);
186 stub
->toTypeMonitor_Fallback()->fixupICEntry(&entryRef
);
192 bool ICScript::initICEntries(JSContext
* cx
, JSScript
* script
) {
193 MOZ_ASSERT(cx
->realm()->jitRealm());
194 MOZ_ASSERT(jit::IsBaselineInterpreterEnabled());
196 MOZ_ASSERT(numICEntries() == script
->numICEntries());
198 FallbackStubAllocator
alloc(cx
, *fallbackStubSpace());
200 // Index of the next ICEntry to initialize.
201 uint32_t icEntryIndex
= 0;
203 using Kind
= BaselineICFallbackKind
;
205 auto addIC
= [cx
, this, script
, &icEntryIndex
](BytecodeLocation loc
,
207 uint32_t offset
= loc
.bytecodeToOffset(script
);
208 return AddICImpl(cx
, this, offset
, stub
, icEntryIndex
);
211 // Lambda expression for adding ICs for non-op ICs
212 auto addPrologueIC
= [cx
, this, &icEntryIndex
](ICStub
* stub
) {
213 return AddICImpl(cx
, this, ICEntry::ProloguePCOffset
, stub
, icEntryIndex
);
216 if (IsTypeInferenceEnabled()) {
217 // Add ICEntries and fallback stubs for this/argument type checks.
218 // Note: we pass a nullptr pc to indicate this is a non-op IC.
219 // See ICEntry::NonOpPCOffset.
220 if (JSFunction
* fun
= script
->function()) {
222 alloc
.newStub
<ICTypeMonitor_Fallback
>(Kind::TypeMonitor
, nullptr, 0);
223 if (!addPrologueIC(stub
)) {
227 for (size_t i
= 0; i
< fun
->nargs(); i
++) {
228 ICStub
* stub
= alloc
.newStub
<ICTypeMonitor_Fallback
>(Kind::TypeMonitor
,
230 if (!addPrologueIC(stub
)) {
237 // For JOF_IC ops: initialize ICEntries and fallback stubs.
238 for (BytecodeLocation loc
: js::AllBytecodesIterable(script
)) {
239 JSOp op
= loc
.getOp();
241 // Assert the frontend stored the correct IC index in jump target ops.
242 MOZ_ASSERT_IF(BytecodeIsJumpTarget(op
), loc
.icIndex() == icEntryIndex
);
244 if (!BytecodeOpHasIC(op
)) {
254 ICStub
* stub
= alloc
.newStub
<ICToBool_Fallback
>(Kind::ToBool
);
255 if (!addIC(loc
, stub
)) {
265 case JSOp::ToNumeric
: {
266 ICStub
* stub
= alloc
.newStub
<ICUnaryArith_Fallback
>(Kind::UnaryArith
);
267 if (!addIC(loc
, stub
)) {
284 ICStub
* stub
= alloc
.newStub
<ICBinaryArith_Fallback
>(Kind::BinaryArith
);
285 if (!addIC(loc
, stub
)) {
297 case JSOp::StrictNe
: {
298 ICStub
* stub
= alloc
.newStub
<ICCompare_Fallback
>(Kind::Compare
);
299 if (!addIC(loc
, stub
)) {
304 case JSOp::NewArray
: {
305 ObjectGroup
* group
= ObjectGroup::allocationSiteGroup(
306 cx
, script
, loc
.toRawBytecode(), JSProto_Array
);
311 alloc
.newStub
<ICNewArray_Fallback
>(Kind::NewArray
, group
);
312 if (!addIC(loc
, stub
)) {
317 case JSOp::NewObject
:
318 case JSOp::NewObjectWithGroup
:
319 case JSOp::NewInit
: {
320 ICStub
* stub
= alloc
.newStub
<ICNewObject_Fallback
>(Kind::NewObject
);
321 if (!addIC(loc
, stub
)) {
327 case JSOp::InitHiddenElem
:
328 case JSOp::InitLockedElem
:
329 case JSOp::InitElemArray
:
330 case JSOp::InitElemInc
:
332 case JSOp::StrictSetElem
: {
333 ICStub
* stub
= alloc
.newStub
<ICSetElem_Fallback
>(Kind::SetElem
);
334 if (!addIC(loc
, stub
)) {
340 case JSOp::InitLockedProp
:
341 case JSOp::InitHiddenProp
:
342 case JSOp::InitGLexical
:
344 case JSOp::StrictSetProp
:
346 case JSOp::StrictSetName
:
348 case JSOp::StrictSetGName
: {
349 ICStub
* stub
= alloc
.newStub
<ICSetProp_Fallback
>(Kind::SetProp
);
350 if (!addIC(loc
, stub
)) {
358 case JSOp::GetBoundName
: {
359 ICStub
* stub
= alloc
.newStub
<ICGetProp_Fallback
>(Kind::GetProp
);
360 if (!addIC(loc
, stub
)) {
365 case JSOp::GetPropSuper
: {
366 ICStub
* stub
= alloc
.newStub
<ICGetProp_Fallback
>(Kind::GetPropSuper
);
367 if (!addIC(loc
, stub
)) {
373 case JSOp::CallElem
: {
374 ICStub
* stub
= alloc
.newStub
<ICGetElem_Fallback
>(Kind::GetElem
);
375 if (!addIC(loc
, stub
)) {
380 case JSOp::GetElemSuper
: {
381 ICStub
* stub
= alloc
.newStub
<ICGetElem_Fallback
>(Kind::GetElemSuper
);
382 if (!addIC(loc
, stub
)) {
388 ICStub
* stub
= alloc
.newStub
<ICIn_Fallback
>(Kind::In
);
389 if (!addIC(loc
, stub
)) {
395 ICStub
* stub
= alloc
.newStub
<ICHasOwn_Fallback
>(Kind::HasOwn
);
396 if (!addIC(loc
, stub
)) {
401 case JSOp::CheckPrivateField
: {
402 ICStub
* stub
= alloc
.newStub
<ICCheckPrivateField_Fallback
>(
403 Kind::CheckPrivateField
);
404 if (!addIC(loc
, stub
)) {
410 case JSOp::GetGName
: {
411 ICStub
* stub
= alloc
.newStub
<ICGetName_Fallback
>(Kind::GetName
);
412 if (!addIC(loc
, stub
)) {
418 case JSOp::BindGName
: {
419 ICStub
* stub
= alloc
.newStub
<ICBindName_Fallback
>(Kind::BindName
);
420 if (!addIC(loc
, stub
)) {
425 case JSOp::GetAliasedVar
:
426 case JSOp::GetImport
: {
428 alloc
.newStub
<ICTypeMonitor_Fallback
>(Kind::TypeMonitor
, nullptr);
429 if (!addIC(loc
, stub
)) {
434 case JSOp::GetIntrinsic
: {
436 alloc
.newStub
<ICGetIntrinsic_Fallback
>(Kind::GetIntrinsic
);
437 if (!addIC(loc
, stub
)) {
443 case JSOp::CallIgnoresRv
:
448 case JSOp::StrictEval
: {
449 ICStub
* stub
= alloc
.newStub
<ICCall_Fallback
>(Kind::Call
);
450 if (!addIC(loc
, stub
)) {
455 case JSOp::SuperCall
:
457 ICStub
* stub
= alloc
.newStub
<ICCall_Fallback
>(Kind::CallConstructing
);
458 if (!addIC(loc
, stub
)) {
463 case JSOp::SpreadCall
:
464 case JSOp::SpreadEval
:
465 case JSOp::StrictSpreadEval
: {
466 ICStub
* stub
= alloc
.newStub
<ICCall_Fallback
>(Kind::SpreadCall
);
467 if (!addIC(loc
, stub
)) {
472 case JSOp::SpreadSuperCall
:
473 case JSOp::SpreadNew
: {
475 alloc
.newStub
<ICCall_Fallback
>(Kind::SpreadCallConstructing
);
476 if (!addIC(loc
, stub
)) {
481 case JSOp::Instanceof
: {
482 ICStub
* stub
= alloc
.newStub
<ICInstanceOf_Fallback
>(Kind::InstanceOf
);
483 if (!addIC(loc
, stub
)) {
489 case JSOp::TypeofExpr
: {
490 ICStub
* stub
= alloc
.newStub
<ICTypeOf_Fallback
>(Kind::TypeOf
);
491 if (!addIC(loc
, stub
)) {
496 case JSOp::ToPropertyKey
: {
498 alloc
.newStub
<ICToPropertyKey_Fallback
>(Kind::ToPropertyKey
);
499 if (!addIC(loc
, stub
)) {
505 ICStub
* stub
= alloc
.newStub
<ICGetIterator_Fallback
>(Kind::GetIterator
);
506 if (!addIC(loc
, stub
)) {
512 ArrayObject
* templateObject
= ObjectGroup::newArrayObject(
513 cx
, nullptr, 0, TenuredObject
,
514 ObjectGroup::NewArrayKind::UnknownIndex
);
515 if (!templateObject
) {
519 alloc
.newStub
<ICRest_Fallback
>(Kind::Rest
, templateObject
);
520 if (!addIC(loc
, stub
)) {
526 MOZ_CRASH("JOF_IC op not handled");
530 // Assert all ICEntries have been initialized.
531 MOZ_ASSERT(icEntryIndex
== numICEntries());
535 ICStubConstIterator
& ICStubConstIterator::operator++() {
536 MOZ_ASSERT(currentStub_
!= nullptr);
537 currentStub_
= currentStub_
->next();
541 ICStubIterator::ICStubIterator(ICFallbackStub
* fallbackStub
, bool end
)
542 : icEntry_(fallbackStub
->icEntry()),
543 fallbackStub_(fallbackStub
),
544 previousStub_(nullptr),
545 currentStub_(end
? fallbackStub
: icEntry_
->firstStub()),
548 ICStubIterator
& ICStubIterator::operator++() {
549 MOZ_ASSERT(currentStub_
->next() != nullptr);
551 previousStub_
= currentStub_
;
553 currentStub_
= currentStub_
->next();
558 void ICStubIterator::unlink(JSContext
* cx
, JSScript
* script
) {
559 MOZ_ASSERT(currentStub_
->next() != nullptr);
560 MOZ_ASSERT(currentStub_
!= fallbackStub_
);
561 MOZ_ASSERT(!unlinked_
);
563 fallbackStub_
->maybeInvalidateWarp(cx
, script
);
564 fallbackStub_
->unlinkStubDontInvalidateWarp(cx
->zone(), previousStub_
,
567 // Mark the current iterator position as unlinked, so operator++ works
573 bool ICStub::NonCacheIRStubMakesGCCalls(Kind kind
) {
574 MOZ_ASSERT(IsValidKind(kind
));
575 MOZ_ASSERT(!IsCacheIRKind(kind
));
579 // These three fallback stubs don't actually make non-tail calls,
580 // but the fallback code for the bailout path needs to pop the stub frame
581 // pushed during the bailout.
582 case GetProp_Fallback
:
583 case SetProp_Fallback
:
584 case GetElem_Fallback
:
591 bool ICStub::makesGCCalls() const {
593 case CacheIR_Regular
:
594 return toCacheIR_Regular()->stubInfo()->makesGCCalls();
595 case CacheIR_Monitored
:
596 return toCacheIR_Monitored()->stubInfo()->makesGCCalls();
597 case CacheIR_Updated
:
598 return toCacheIR_Updated()->stubInfo()->makesGCCalls();
600 return NonCacheIRStubMakesGCCalls(kind());
604 uint32_t ICStub::getEnteredCount() const {
606 case CacheIR_Regular
:
607 return toCacheIR_Regular()->enteredCount();
608 case CacheIR_Updated
:
609 return toCacheIR_Updated()->enteredCount();
610 case CacheIR_Monitored
:
611 return toCacheIR_Monitored()->enteredCount();
613 return toFallbackStub()->enteredCount();
617 void ICFallbackStub::maybeInvalidateWarp(JSContext
* cx
, JSScript
* script
) {
618 if (!state_
.usedByTranspiler()) {
622 MOZ_ASSERT(JitOptions
.warpBuilder
);
623 clearUsedByTranspiler();
625 if (script
->hasIonScript()) {
626 Invalidate(cx
, script
);
630 void ICStub::updateCode(JitCode
* code
) {
631 // Write barrier on the old code.
632 JitCode::writeBarrierPre(jitCode());
633 stubCode_
= code
->raw();
637 void ICStub::trace(JSTracer
* trc
) {
638 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED
641 // Fallback stubs use runtime-wide trampoline code we don't need to trace.
642 if (!usesTrampolineCode()) {
643 JitCode
* stubJitCode
= jitCode();
644 TraceManuallyBarrieredEdge(trc
, &stubJitCode
, "baseline-ic-stub-code");
647 // If the stub is a monitored fallback stub, then trace the monitor ICs
648 // hanging off of that stub. We don't need to worry about the regular
649 // monitored stubs, because the regular monitored stubs will always have a
650 // monitored fallback stub that references the same stub chain.
651 if (isMonitoredFallback()) {
652 ICTypeMonitor_Fallback
* lastMonStub
=
653 toMonitoredFallbackStub()->maybeFallbackMonitorStub();
655 for (ICStubConstIterator
iter(lastMonStub
->firstMonitorStub());
656 !iter
.atEnd(); iter
++) {
657 MOZ_ASSERT_IF(iter
->next() == nullptr, *iter
== lastMonStub
);
664 for (ICStubConstIterator
iter(toUpdatedStub()->firstUpdateStub());
665 !iter
.atEnd(); iter
++) {
666 MOZ_ASSERT_IF(iter
->next() == nullptr, iter
->isTypeUpdate_Fallback());
672 case ICStub::TypeMonitor_SingleObject
: {
673 ICTypeMonitor_SingleObject
* monitorStub
= toTypeMonitor_SingleObject();
674 TraceEdge(trc
, &monitorStub
->object(), "baseline-monitor-singleton");
677 case ICStub::TypeMonitor_ObjectGroup
: {
678 ICTypeMonitor_ObjectGroup
* monitorStub
= toTypeMonitor_ObjectGroup();
679 TraceEdge(trc
, &monitorStub
->group(), "baseline-monitor-group");
682 case ICStub::TypeUpdate_SingleObject
: {
683 ICTypeUpdate_SingleObject
* updateStub
= toTypeUpdate_SingleObject();
684 TraceEdge(trc
, &updateStub
->object(), "baseline-update-singleton");
687 case ICStub::TypeUpdate_ObjectGroup
: {
688 ICTypeUpdate_ObjectGroup
* updateStub
= toTypeUpdate_ObjectGroup();
689 TraceEdge(trc
, &updateStub
->group(), "baseline-update-group");
692 case ICStub::NewArray_Fallback
: {
693 ICNewArray_Fallback
* stub
= toNewArray_Fallback();
694 TraceNullableEdge(trc
, &stub
->templateObject(),
695 "baseline-newarray-template");
696 TraceEdge(trc
, &stub
->templateGroup(),
697 "baseline-newarray-template-group");
700 case ICStub::NewObject_Fallback
: {
701 ICNewObject_Fallback
* stub
= toNewObject_Fallback();
702 TraceNullableEdge(trc
, &stub
->templateObject(),
703 "baseline-newobject-template");
706 case ICStub::Rest_Fallback
: {
707 ICRest_Fallback
* stub
= toRest_Fallback();
708 TraceEdge(trc
, &stub
->templateObject(), "baseline-rest-template");
711 case ICStub::CacheIR_Regular
:
712 TraceCacheIRStub(trc
, this, toCacheIR_Regular()->stubInfo());
714 case ICStub::CacheIR_Monitored
:
715 TraceCacheIRStub(trc
, this, toCacheIR_Monitored()->stubInfo());
717 case ICStub::CacheIR_Updated
: {
718 ICCacheIR_Updated
* stub
= toCacheIR_Updated();
719 TraceNullableEdge(trc
, &stub
->updateStubGroup(),
720 "baseline-update-stub-group");
721 TraceEdge(trc
, &stub
->updateStubId(), "baseline-update-stub-id");
722 TraceCacheIRStub(trc
, this, stub
->stubInfo());
730 // This helper handles ICState updates/transitions while attaching CacheIR
732 template <typename IRGenerator
, typename
... Args
>
733 static void TryAttachStub(const char* name
, JSContext
* cx
, BaselineFrame
* frame
,
734 ICFallbackStub
* stub
, BaselineCacheIRStubKind kind
,
736 if (stub
->state().maybeTransition()) {
737 stub
->discardStubs(cx
, frame
->invalidationScript());
740 if (stub
->state().canAttachStub()) {
741 RootedScript
script(cx
, frame
->script());
742 ICScript
* icScript
= frame
->icScript();
743 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
745 bool attached
= false;
746 IRGenerator
gen(cx
, script
, pc
, stub
->state().mode(),
747 std::forward
<Args
>(args
)...);
748 switch (gen
.tryAttachStub()) {
749 case AttachDecision::Attach
: {
751 AttachBaselineCacheIRStub(cx
, gen
.writerRef(), gen
.cacheKind(),
752 kind
, script
, icScript
, stub
, &attached
);
754 JitSpew(JitSpew_BaselineIC
, " Attached %s CacheIR stub", name
);
757 case AttachDecision::NoAction
:
759 case AttachDecision::TemporarilyUnoptimizable
:
760 case AttachDecision::Deferred
:
761 MOZ_ASSERT_UNREACHABLE("Not expected in generic TryAttachStub");
765 stub
->state().trackNotAttached();
770 void ICFallbackStub::unlinkStubDontInvalidateWarp(Zone
* zone
, ICStub
* prev
,
772 MOZ_ASSERT(stub
->next());
775 MOZ_ASSERT(prev
->next() == stub
);
776 prev
->setNext(stub
->next());
778 MOZ_ASSERT(icEntry()->firstStub() == stub
);
779 icEntry()->setFirstStub(stub
->next());
782 state_
.trackUnlinkedStub();
784 if (zone
->needsIncrementalBarrier()) {
785 // We are removing edges from ICStub to gcthings. Perform one final trace
786 // of the stub for incremental GC, as it must know about those edges.
787 stub
->trace(zone
->barrierTracer());
790 if (IsTypeInferenceEnabled() && stub
->makesGCCalls() && stub
->isMonitored()) {
791 // This stub can make calls so we can return to it if it's on the stack.
792 // We just have to reset its firstMonitorStub_ field to avoid a stale
793 // pointer when purgeOptimizedStubs destroys all optimized monitor
794 // stubs (unlinked stubs won't be updated).
795 ICTypeMonitor_Fallback
* monitorFallback
=
796 toMonitoredFallbackStub()->maybeFallbackMonitorStub();
797 MOZ_ASSERT(monitorFallback
);
798 stub
->toMonitoredStub()->resetFirstMonitorStub(monitorFallback
);
801 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED
802 stub
->checkTraceMagic();
805 // Poison stub code to ensure we don't call this stub again. However, if
806 // this stub can make calls, a pointer to it may be stored in a stub frame
807 // on the stack, so we can't touch the stubCode_ or GC will crash when
808 // tracing this pointer.
809 if (!stub
->makesGCCalls()) {
810 stub
->stubCode_
= (uint8_t*)0xbad;
815 void ICFallbackStub::discardStubs(JSContext
* cx
, JSScript
* script
) {
816 for (ICStubIterator iter
= beginChain(); !iter
.atEnd(); iter
++) {
817 iter
.unlink(cx
, script
);
821 void ICTypeMonitor_Fallback::resetMonitorStubChain(Zone
* zone
) {
822 if (zone
->needsIncrementalBarrier()) {
823 // We are removing edges from monitored stubs to gcthings (JitCode).
824 // Perform one final trace of all monitor stubs for incremental GC,
825 // as it must know about those edges.
826 for (ICStub
* s
= firstMonitorStub_
; !s
->isTypeMonitor_Fallback();
828 s
->trace(zone
->barrierTracer());
832 firstMonitorStub_
= this;
833 numOptimizedMonitorStubs_
= 0;
835 if (hasFallbackStub_
) {
836 lastMonitorStubPtrAddr_
= nullptr;
838 // Reset firstMonitorStub_ field of all monitored stubs.
839 for (ICStubConstIterator iter
= mainFallbackStub_
->beginChainConst();
840 !iter
.atEnd(); iter
++) {
841 if (!iter
->isMonitored()) {
844 iter
->toMonitoredStub()->resetFirstMonitorStub(this);
847 icEntry_
->setFirstStub(this);
848 lastMonitorStubPtrAddr_
= icEntry_
->addressOfFirstStub();
852 void ICCacheIR_Updated::resetUpdateStubChain(Zone
* zone
) {
853 while (!firstUpdateStub_
->isTypeUpdate_Fallback()) {
854 if (zone
->needsIncrementalBarrier()) {
855 // We are removing edges from update stubs to gcthings (JitCode).
856 // Perform one final trace of all update stubs for incremental GC,
857 // as it must know about those edges.
858 firstUpdateStub_
->trace(zone
->barrierTracer());
860 firstUpdateStub_
= firstUpdateStub_
->next();
863 numOptimizedStubs_
= 0;
866 ICMonitoredStub::ICMonitoredStub(Kind kind
, JitCode
* stubCode
,
867 ICStub
* firstMonitorStub
)
868 : ICStub(kind
, ICStub::Monitored
, stubCode
),
869 firstMonitorStub_(firstMonitorStub
) {
870 if (IsTypeInferenceEnabled()) {
871 // In order to silence Coverity - null pointer dereference checker
872 MOZ_ASSERT(firstMonitorStub_
);
873 // If the first monitored stub is a ICTypeMonitor_Fallback stub, then
874 // double check that _its_ firstMonitorStub is the same as this one.
876 firstMonitorStub_
->isTypeMonitor_Fallback(),
877 firstMonitorStub_
->toTypeMonitor_Fallback()->firstMonitorStub() ==
880 MOZ_ASSERT(!firstMonitorStub_
);
884 bool ICMonitoredFallbackStub::initMonitoringChain(JSContext
* cx
,
886 MOZ_ASSERT(fallbackMonitorStub_
== nullptr);
887 MOZ_ASSERT(IsTypeInferenceEnabled());
889 ICStubSpace
* space
= script
->jitScript()->fallbackStubSpace();
890 FallbackStubAllocator
alloc(cx
, *space
);
891 auto* stub
= alloc
.newStub
<ICTypeMonitor_Fallback
>(
892 BaselineICFallbackKind::TypeMonitor
, this);
897 fallbackMonitorStub_
= stub
;
901 bool TypeMonitorResult(JSContext
* cx
, ICMonitoredFallbackStub
* stub
,
902 BaselineFrame
* frame
, HandleScript script
,
903 jsbytecode
* pc
, HandleValue val
) {
904 if (!IsTypeInferenceEnabled()) {
908 ICTypeMonitor_Fallback
* typeMonitorFallback
=
909 stub
->getFallbackMonitorStub(cx
, script
);
910 if (!typeMonitorFallback
) {
914 AutoSweepJitScript
sweep(script
);
915 StackTypeSet
* types
= script
->jitScript()->bytecodeTypes(sweep
, script
, pc
);
916 JitScript::MonitorBytecodeType(cx
, script
, pc
, types
, val
);
918 return typeMonitorFallback
->addMonitorStubForValue(cx
, frame
, types
, val
);
921 bool ICCacheIR_Updated::initUpdatingChain(JSContext
* cx
, ICStubSpace
* space
) {
922 MOZ_ASSERT(firstUpdateStub_
== nullptr);
924 FallbackStubAllocator
alloc(cx
, *space
);
926 alloc
.newStub
<ICTypeUpdate_Fallback
>(BaselineICFallbackKind::TypeUpdate
);
931 firstUpdateStub_
= stub
;
936 ICStubSpace
* ICStubCompiler::StubSpaceForStub(bool makesGCCalls
,
938 ICScript
* icScript
) {
940 return icScript
? icScript
->fallbackStubSpace()
941 : script
->jitScript()->fallbackStubSpace();
943 return script
->zone()->jitZone()->optimizedStubSpace();
946 static void InitMacroAssemblerForICStub(StackMacroAssembler
& masm
) {
947 #ifndef JS_USE_LINK_REGISTER
948 // The first value contains the return addres,
949 // which we pull into ICTailCallReg for tail calls.
950 masm
.adjustFrame(sizeof(intptr_t));
952 #ifdef JS_CODEGEN_ARM
953 masm
.setSecondScratchReg(BaselineSecondScratchReg
);
957 JitCode
* ICStubCompiler::getStubCode() {
958 JitRealm
* realm
= cx
->realm()->jitRealm();
960 // Check for existing cached stubcode.
961 uint32_t stubKey
= getKey();
962 JitCode
* stubCode
= realm
->getStubCode(stubKey
);
967 // Compile new stubcode.
968 JitContext
jctx(cx
, nullptr);
969 StackMacroAssembler masm
;
970 InitMacroAssemblerForICStub(masm
);
972 if (!generateStubCode(masm
)) {
976 Rooted
<JitCode
*> newStubCode(cx
, linker
.newCode(cx
, CodeKind::Baseline
));
981 // Cache newly compiled stubcode.
982 if (!realm
->putStubCode(cx
, stubKey
, newStubCode
)) {
986 MOZ_ASSERT(entersStubFrame_
== ICStub::NonCacheIRStubMakesGCCalls(kind
));
987 MOZ_ASSERT(!inStubFrame_
);
990 writePerfSpewerJitCodeProfile(newStubCode
, "BaselineIC");
996 bool ICStubCompilerBase::tailCallVMInternal(MacroAssembler
& masm
,
997 TailCallVMFunctionId id
) {
998 TrampolinePtr code
= cx
->runtime()->jitRuntime()->getVMWrapper(id
);
999 const VMFunctionData
& fun
= GetVMFunction(id
);
1000 MOZ_ASSERT(fun
.expectTailCall
== TailCall
);
1001 uint32_t argSize
= fun
.explicitStackSlots() * sizeof(void*);
1002 EmitBaselineTailCallVM(code
, masm
, argSize
);
1006 bool ICStubCompilerBase::callVMInternal(MacroAssembler
& masm
, VMFunctionId id
) {
1007 MOZ_ASSERT(inStubFrame_
);
1009 TrampolinePtr code
= cx
->runtime()->jitRuntime()->getVMWrapper(id
);
1010 MOZ_ASSERT(GetVMFunction(id
).expectTailCall
== NonTailCall
);
1012 EmitBaselineCallVM(code
, masm
);
1016 template <typename Fn
, Fn fn
>
1017 bool ICStubCompilerBase::callVM(MacroAssembler
& masm
) {
1018 VMFunctionId id
= VMFunctionToId
<Fn
, fn
>::id
;
1019 return callVMInternal(masm
, id
);
1022 template <typename Fn
, Fn fn
>
1023 bool ICStubCompilerBase::tailCallVM(MacroAssembler
& masm
) {
1024 TailCallVMFunctionId id
= TailCallVMFunctionToId
<Fn
, fn
>::id
;
1025 return tailCallVMInternal(masm
, id
);
1028 void ICStubCompilerBase::enterStubFrame(MacroAssembler
& masm
,
1030 EmitBaselineEnterStubFrame(masm
, scratch
);
1032 framePushedAtEnterStubFrame_
= masm
.framePushed();
1035 MOZ_ASSERT(!inStubFrame_
);
1036 inStubFrame_
= true;
1039 entersStubFrame_
= true;
1043 void ICStubCompilerBase::assumeStubFrame() {
1044 MOZ_ASSERT(!inStubFrame_
);
1045 inStubFrame_
= true;
1048 entersStubFrame_
= true;
1050 // |framePushed| isn't tracked precisely in ICStubs, so simply assume it to
1051 // be STUB_FRAME_SIZE so that assertions don't fail in leaveStubFrame.
1052 framePushedAtEnterStubFrame_
= STUB_FRAME_SIZE
;
1056 void ICStubCompilerBase::leaveStubFrame(MacroAssembler
& masm
,
1057 bool calledIntoIon
) {
1058 MOZ_ASSERT(entersStubFrame_
&& inStubFrame_
);
1059 inStubFrame_
= false;
1062 masm
.setFramePushed(framePushedAtEnterStubFrame_
);
1063 if (calledIntoIon
) {
1064 masm
.adjustFrame(sizeof(intptr_t)); // Calls into ion have this extra.
1067 EmitBaselineLeaveStubFrame(masm
, calledIntoIon
);
1070 void ICStubCompilerBase::pushStubPayload(MacroAssembler
& masm
,
1073 masm
.loadPtr(Address(BaselineFrameReg
, 0), scratch
);
1074 masm
.pushBaselineFramePtr(scratch
, scratch
);
1076 masm
.pushBaselineFramePtr(BaselineFrameReg
, scratch
);
1080 void ICStubCompilerBase::PushStubPayload(MacroAssembler
& masm
,
1082 pushStubPayload(masm
, scratch
);
1083 masm
.adjustFrame(sizeof(intptr_t));
1086 // TypeMonitor_Fallback
1089 bool ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext
* cx
,
1090 BaselineFrame
* frame
,
1091 StackTypeSet
* types
,
1095 if (MOZ_UNLIKELY(val
.isMagic())) {
1099 // Don't attach too many SingleObject/ObjectGroup stubs. If the value is a
1100 // primitive or if we will attach an any-object stub, we can handle this
1101 // with a single PrimitiveSet or AnyValue stub so we always optimize.
1102 if (numOptimizedMonitorStubs_
>= MAX_OPTIMIZED_STUBS
&& val
.isObject() &&
1103 !types
->unknownObject()) {
1107 bool wasDetachedMonitorChain
= lastMonitorStubPtrAddr_
== nullptr;
1108 MOZ_ASSERT_IF(wasDetachedMonitorChain
, numOptimizedMonitorStubs_
== 0);
1110 if (types
->unknown()) {
1111 // The TypeSet got marked as unknown so attach a stub that always
1114 // Check for existing TypeMonitor_AnyValue stubs.
1115 for (ICStubConstIterator
iter(firstMonitorStub()); !iter
.atEnd(); iter
++) {
1116 if (iter
->isTypeMonitor_AnyValue()) {
1121 // Discard existing stubs.
1122 resetMonitorStubChain(cx
->zone());
1123 wasDetachedMonitorChain
= (lastMonitorStubPtrAddr_
== nullptr);
1125 ICTypeMonitor_AnyValue::Compiler
compiler(cx
);
1126 ICStub
* stub
= compiler
.getStub(compiler
.getStubSpace(frame
->script()));
1128 ReportOutOfMemory(cx
);
1132 JitSpew(JitSpew_BaselineIC
, " Added TypeMonitor stub %p for any value",
1134 addOptimizedMonitorStub(stub
);
1136 } else if (val
.isPrimitive() || types
->unknownObject()) {
1137 ValueType type
= val
.type();
1139 // Check for existing TypeMonitor stub.
1140 ICTypeMonitor_PrimitiveSet
* existingStub
= nullptr;
1141 for (ICStubConstIterator
iter(firstMonitorStub()); !iter
.atEnd(); iter
++) {
1142 if (iter
->isTypeMonitor_PrimitiveSet()) {
1143 existingStub
= iter
->toTypeMonitor_PrimitiveSet();
1144 if (existingStub
->containsType(type
)) {
1150 if (val
.isObject()) {
1151 // Check for existing SingleObject/ObjectGroup stubs and discard
1152 // stubs if we find one. Ideally we would discard just these stubs,
1153 // but unlinking individual type monitor stubs is somewhat
1155 MOZ_ASSERT(types
->unknownObject());
1156 bool hasObjectStubs
= false;
1157 for (ICStubConstIterator
iter(firstMonitorStub()); !iter
.atEnd();
1159 if (iter
->isTypeMonitor_SingleObject() ||
1160 iter
->isTypeMonitor_ObjectGroup()) {
1161 hasObjectStubs
= true;
1165 if (hasObjectStubs
) {
1166 resetMonitorStubChain(cx
->zone());
1167 wasDetachedMonitorChain
= (lastMonitorStubPtrAddr_
== nullptr);
1168 existingStub
= nullptr;
1172 ICTypeMonitor_PrimitiveSet::Compiler
compiler(cx
, existingStub
, type
);
1174 existingStub
? compiler
.updateStub()
1175 : compiler
.getStub(compiler
.getStubSpace(frame
->script()));
1177 ReportOutOfMemory(cx
);
1181 JitSpew(JitSpew_BaselineIC
,
1182 " %s TypeMonitor stub %p for primitive type %u",
1183 existingStub
? "Modified existing" : "Created new", stub
,
1184 static_cast<uint8_t>(type
));
1186 if (!existingStub
) {
1187 MOZ_ASSERT(!hasStub(TypeMonitor_PrimitiveSet
));
1188 addOptimizedMonitorStub(stub
);
1191 } else if (val
.toObject().isSingleton()) {
1192 RootedObject
obj(cx
, &val
.toObject());
1194 // Check for existing TypeMonitor stub.
1195 for (ICStubConstIterator
iter(firstMonitorStub()); !iter
.atEnd(); iter
++) {
1196 if (iter
->isTypeMonitor_SingleObject() &&
1197 iter
->toTypeMonitor_SingleObject()->object() == obj
) {
1202 ICTypeMonitor_SingleObject::Compiler
compiler(cx
, obj
);
1203 ICStub
* stub
= compiler
.getStub(compiler
.getStubSpace(frame
->script()));
1205 ReportOutOfMemory(cx
);
1209 JitSpew(JitSpew_BaselineIC
, " Added TypeMonitor stub %p for singleton %p",
1212 addOptimizedMonitorStub(stub
);
1215 RootedObjectGroup
group(cx
, val
.toObject().group());
1217 // Check for existing TypeMonitor stub.
1218 for (ICStubConstIterator
iter(firstMonitorStub()); !iter
.atEnd(); iter
++) {
1219 if (iter
->isTypeMonitor_ObjectGroup() &&
1220 iter
->toTypeMonitor_ObjectGroup()->group() == group
) {
1225 ICTypeMonitor_ObjectGroup::Compiler
compiler(cx
, group
);
1226 ICStub
* stub
= compiler
.getStub(compiler
.getStubSpace(frame
->script()));
1228 ReportOutOfMemory(cx
);
1232 JitSpew(JitSpew_BaselineIC
,
1233 " Added TypeMonitor stub %p for ObjectGroup %p", stub
,
1236 addOptimizedMonitorStub(stub
);
1239 bool firstMonitorStubAdded
=
1240 wasDetachedMonitorChain
&& (numOptimizedMonitorStubs_
> 0);
1242 if (firstMonitorStubAdded
) {
1243 // Was an empty monitor chain before, but a new stub was added. This is the
1244 // only time that any main stubs' firstMonitorStub fields need to be updated
1245 // to refer to the newly added monitor stub.
1246 ICStub
* firstStub
= mainFallbackStub_
->icEntry()->firstStub();
1247 for (ICStubConstIterator
iter(firstStub
); !iter
.atEnd(); iter
++) {
1248 // Non-monitored stubs are used if the result has always the same type,
1249 // e.g. a StringLength stub will always return int32.
1250 if (!iter
->isMonitored()) {
1254 // Since we just added the first optimized monitoring stub, any
1255 // existing main stub's |firstMonitorStub| MUST be pointing to the
1256 // fallback monitor stub (i.e. this stub).
1257 MOZ_ASSERT(iter
->toMonitoredStub()->firstMonitorStub() == this);
1258 iter
->toMonitoredStub()->updateFirstMonitorStub(firstMonitorStub_
);
1265 bool DoTypeMonitorFallback(JSContext
* cx
, BaselineFrame
* frame
,
1266 ICTypeMonitor_Fallback
* stub
, HandleValue value
,
1267 MutableHandleValue res
) {
1268 MOZ_ASSERT(IsTypeInferenceEnabled());
1270 JSScript
* script
= frame
->script();
1271 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
1272 TypeFallbackICSpew(cx
, stub
, "TypeMonitor");
1274 // Copy input value to res.
1277 JitScript
* jitScript
= script
->jitScript();
1278 AutoSweepJitScript
sweep(script
);
1280 StackTypeSet
* types
;
1282 if (stub
->monitorsArgument(&argument
)) {
1283 MOZ_ASSERT(pc
== script
->code());
1284 types
= jitScript
->argTypes(sweep
, script
, argument
);
1285 JitScript::MonitorArgType(cx
, script
, argument
, value
);
1286 } else if (stub
->monitorsThis()) {
1287 MOZ_ASSERT(pc
== script
->code());
1288 types
= jitScript
->thisTypes(sweep
, script
);
1289 JitScript::MonitorThisType(cx
, script
, value
);
1291 types
= jitScript
->bytecodeTypes(sweep
, script
, pc
);
1292 JitScript::MonitorBytecodeType(cx
, script
, pc
, types
, value
);
1295 return stub
->addMonitorStubForValue(cx
, frame
, types
, value
);
1298 bool FallbackICCodeCompiler::emit_TypeMonitor() {
1299 static_assert(R0
== JSReturnOperand
);
1301 // Restore the tail call register.
1302 EmitRestoreTailCallReg(masm
);
1305 masm
.push(ICStubReg
);
1306 masm
.pushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
1308 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICTypeMonitor_Fallback
*,
1309 HandleValue
, MutableHandleValue
);
1310 return tailCallVM
<Fn
, DoTypeMonitorFallback
>(masm
);
1313 bool ICTypeMonitor_PrimitiveSet::Compiler::generateStubCode(
1314 MacroAssembler
& masm
) {
1316 if ((flags_
& TypeToFlag(ValueType::Int32
)) &&
1317 !(flags_
& TypeToFlag(ValueType::Double
))) {
1318 masm
.branchTestInt32(Assembler::Equal
, R0
, &success
);
1321 if (flags_
& TypeToFlag(ValueType::Double
)) {
1322 masm
.branchTestNumber(Assembler::Equal
, R0
, &success
);
1325 if (flags_
& TypeToFlag(ValueType::Undefined
)) {
1326 masm
.branchTestUndefined(Assembler::Equal
, R0
, &success
);
1329 if (flags_
& TypeToFlag(ValueType::Boolean
)) {
1330 masm
.branchTestBoolean(Assembler::Equal
, R0
, &success
);
1333 if (flags_
& TypeToFlag(ValueType::String
)) {
1334 masm
.branchTestString(Assembler::Equal
, R0
, &success
);
1337 if (flags_
& TypeToFlag(ValueType::Symbol
)) {
1338 masm
.branchTestSymbol(Assembler::Equal
, R0
, &success
);
1341 if (flags_
& TypeToFlag(ValueType::BigInt
)) {
1342 masm
.branchTestBigInt(Assembler::Equal
, R0
, &success
);
1345 if (flags_
& TypeToFlag(ValueType::Object
)) {
1346 masm
.branchTestObject(Assembler::Equal
, R0
, &success
);
1349 if (flags_
& TypeToFlag(ValueType::Null
)) {
1350 masm
.branchTestNull(Assembler::Equal
, R0
, &success
);
1353 EmitStubGuardFailure(masm
);
1355 masm
.bind(&success
);
1356 EmitReturnFromIC(masm
);
1360 static void MaybeWorkAroundAmdBug(MacroAssembler
& masm
) {
1361 // Attempt to work around an AMD bug (see bug 1034706 and bug 1281759), by
1362 // inserting 32-bytes of NOPs.
1363 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
1364 if (CPUInfo::NeedAmdBugWorkaround()) {
1373 bool ICTypeMonitor_SingleObject::Compiler::generateStubCode(
1374 MacroAssembler
& masm
) {
1376 masm
.branchTestObject(Assembler::NotEqual
, R0
, &failure
);
1377 MaybeWorkAroundAmdBug(masm
);
1379 // Guard on the object's identity.
1380 Register obj
= masm
.extractObject(R0
, ExtractTemp0
);
1381 Address
expectedObject(ICStubReg
,
1382 ICTypeMonitor_SingleObject::offsetOfObject());
1383 masm
.branchPtr(Assembler::NotEqual
, expectedObject
, obj
, &failure
);
1384 MaybeWorkAroundAmdBug(masm
);
1386 EmitReturnFromIC(masm
);
1387 MaybeWorkAroundAmdBug(masm
);
1389 masm
.bind(&failure
);
1390 EmitStubGuardFailure(masm
);
1394 bool ICTypeMonitor_ObjectGroup::Compiler::generateStubCode(
1395 MacroAssembler
& masm
) {
1397 masm
.branchTestObject(Assembler::NotEqual
, R0
, &failure
);
1398 MaybeWorkAroundAmdBug(masm
);
1400 // Guard on the object's ObjectGroup. No Spectre mitigations are needed
1401 // here: we're just recording type information for Ion compilation and
1402 // it's safe to speculatively return.
1403 Register obj
= masm
.extractObject(R0
, ExtractTemp0
);
1404 Address
expectedGroup(ICStubReg
, ICTypeMonitor_ObjectGroup::offsetOfGroup());
1405 masm
.branchTestObjGroupNoSpectreMitigations(
1406 Assembler::NotEqual
, obj
, expectedGroup
, R1
.scratchReg(), &failure
);
1407 MaybeWorkAroundAmdBug(masm
);
1409 EmitReturnFromIC(masm
);
1410 MaybeWorkAroundAmdBug(masm
);
1412 masm
.bind(&failure
);
1413 EmitStubGuardFailure(masm
);
1417 bool ICTypeMonitor_AnyValue::Compiler::generateStubCode(MacroAssembler
& masm
) {
1418 EmitReturnFromIC(masm
);
1422 bool ICCacheIR_Updated::addUpdateStubForValue(JSContext
* cx
,
1423 HandleScript outerScript
,
1425 HandleObjectGroup group
,
1426 HandleId id
, HandleValue val
) {
1427 MOZ_ASSERT(IsTypeInferenceEnabled());
1429 EnsureTrackPropertyTypes(cx
, obj
, id
);
1431 // Make sure that undefined values are explicitly included in the property
1432 // types for an object if generating a stub to write an undefined value.
1433 if (val
.isUndefined() && CanHaveEmptyPropertyTypesForOwnProperty(obj
)) {
1434 MOZ_ASSERT(obj
->group() == group
);
1435 AddTypePropertyId(cx
, obj
, id
, val
);
1438 bool unknown
= false, unknownObject
= false;
1439 AutoSweepObjectGroup
sweep(group
);
1440 if (group
->unknownProperties(sweep
)) {
1441 unknown
= unknownObject
= true;
1443 if (HeapTypeSet
* types
= group
->maybeGetProperty(sweep
, id
)) {
1444 unknown
= types
->unknown();
1445 unknownObject
= types
->unknownObject();
1447 // We don't record null/undefined types for certain TypedObject
1448 // properties. In these cases |types| is allowed to be nullptr
1449 // without implying unknown types. See DoTypeUpdateFallback.
1450 MOZ_ASSERT(obj
->is
<TypedObject
>());
1451 MOZ_ASSERT(val
.isNullOrUndefined());
1454 MOZ_ASSERT_IF(unknown
, unknownObject
);
1456 // Don't attach too many SingleObject/ObjectGroup stubs unless we can
1457 // replace them with a single PrimitiveSet or AnyValue stub.
1458 if (numOptimizedStubs_
>= MAX_OPTIMIZED_STUBS
&& val
.isObject() &&
1464 // Attach a stub that always succeeds. We should not have a
1465 // TypeUpdate_AnyValue stub yet.
1466 MOZ_ASSERT(!hasTypeUpdateStub(TypeUpdate_AnyValue
));
1468 // Discard existing stubs.
1469 resetUpdateStubChain(cx
->zone());
1471 ICTypeUpdate_AnyValue::Compiler
compiler(cx
);
1472 ICStub
* stub
= compiler
.getStub(compiler
.getStubSpace(outerScript
));
1477 JitSpew(JitSpew_BaselineIC
, " Added TypeUpdate stub %p for any value",
1479 addOptimizedUpdateStub(stub
);
1481 } else if (val
.isPrimitive() || unknownObject
) {
1482 ValueType type
= val
.type();
1484 // Check for existing TypeUpdate stub.
1485 ICTypeUpdate_PrimitiveSet
* existingStub
= nullptr;
1486 for (ICStubConstIterator
iter(firstUpdateStub_
); !iter
.atEnd(); iter
++) {
1487 if (iter
->isTypeUpdate_PrimitiveSet()) {
1488 existingStub
= iter
->toTypeUpdate_PrimitiveSet();
1489 MOZ_ASSERT(!existingStub
->containsType(type
));
1493 if (val
.isObject()) {
1494 // Discard existing ObjectGroup/SingleObject stubs.
1495 resetUpdateStubChain(cx
->zone());
1497 addOptimizedUpdateStub(existingStub
);
1501 ICTypeUpdate_PrimitiveSet::Compiler
compiler(cx
, existingStub
, type
);
1502 ICStub
* stub
= existingStub
1503 ? compiler
.updateStub()
1504 : compiler
.getStub(compiler
.getStubSpace(outerScript
));
1508 if (!existingStub
) {
1509 MOZ_ASSERT(!hasTypeUpdateStub(TypeUpdate_PrimitiveSet
));
1510 addOptimizedUpdateStub(stub
);
1513 JitSpew(JitSpew_BaselineIC
, " %s TypeUpdate stub %p for primitive type %d",
1514 existingStub
? "Modified existing" : "Created new", stub
,
1515 static_cast<uint8_t>(type
));
1517 } else if (val
.toObject().isSingleton()) {
1518 RootedObject
obj(cx
, &val
.toObject());
1521 // We should not have a stub for this object.
1522 for (ICStubConstIterator
iter(firstUpdateStub_
); !iter
.atEnd(); iter
++) {
1523 MOZ_ASSERT_IF(iter
->isTypeUpdate_SingleObject(),
1524 iter
->toTypeUpdate_SingleObject()->object() != obj
);
1528 ICTypeUpdate_SingleObject::Compiler
compiler(cx
, obj
);
1529 ICStub
* stub
= compiler
.getStub(compiler
.getStubSpace(outerScript
));
1534 JitSpew(JitSpew_BaselineIC
, " Added TypeUpdate stub %p for singleton %p",
1537 addOptimizedUpdateStub(stub
);
1540 RootedObjectGroup
group(cx
, val
.toObject().group());
1543 // We should not have a stub for this group.
1544 for (ICStubConstIterator
iter(firstUpdateStub_
); !iter
.atEnd(); iter
++) {
1545 MOZ_ASSERT_IF(iter
->isTypeUpdate_ObjectGroup(),
1546 iter
->toTypeUpdate_ObjectGroup()->group() != group
);
1550 ICTypeUpdate_ObjectGroup::Compiler
compiler(cx
, group
);
1551 ICStub
* stub
= compiler
.getStub(compiler
.getStubSpace(outerScript
));
1556 JitSpew(JitSpew_BaselineIC
, " Added TypeUpdate stub %p for ObjectGroup %p",
1559 addOptimizedUpdateStub(stub
);
1566 // TypeUpdate_Fallback
1568 bool DoTypeUpdateFallback(JSContext
* cx
, BaselineFrame
* frame
,
1569 ICCacheIR_Updated
* stub
, HandleValue objval
,
1570 HandleValue value
) {
1571 // This can get called from optimized stubs. Therefore it is not allowed to
1573 JS::AutoCheckCannotGC nogc
;
1575 MOZ_ASSERT(IsTypeInferenceEnabled());
1577 FallbackICSpew(cx
, stub
->getChainFallback(), "TypeUpdate(%s)",
1578 ICStub::KindString(stub
->kind()));
1580 MOZ_ASSERT(stub
->isCacheIR_Updated());
1582 RootedScript
script(cx
, frame
->script());
1583 RootedObject
obj(cx
, &objval
.toObject());
1585 RootedId
id(cx
, stub
->toCacheIR_Updated()->updateStubId());
1586 MOZ_ASSERT(id
.get() != JSID_EMPTY
);
1588 // The group should match the object's group.
1589 RootedObjectGroup
group(cx
, stub
->toCacheIR_Updated()->updateStubGroup());
1591 MOZ_ASSERT(obj
->group() == group
);
1594 // If we're storing null/undefined to a typed object property, check if
1595 // we want to include it in this property's type information.
1596 bool addType
= true;
1597 if (MOZ_UNLIKELY(obj
->is
<TypedObject
>()) && value
.isNullOrUndefined()) {
1598 StructTypeDescr
* structDescr
=
1599 &obj
->as
<TypedObject
>().typeDescr().as
<StructTypeDescr
>();
1601 MOZ_ALWAYS_TRUE(structDescr
->fieldIndex(id
, &fieldIndex
));
1603 TypeDescr
* fieldDescr
= &structDescr
->fieldDescr(fieldIndex
);
1604 ReferenceType type
= fieldDescr
->as
<ReferenceTypeDescr
>().type();
1605 if (type
== ReferenceType::TYPE_ANY
) {
1606 // Ignore undefined values, which are included implicitly in type
1607 // information for this property.
1608 if (value
.isUndefined()) {
1612 MOZ_ASSERT(type
== ReferenceType::TYPE_OBJECT
||
1613 type
== ReferenceType::TYPE_WASM_ANYREF
);
1615 // Ignore null values being written here. Null is included
1616 // implicitly in type information for this property. Note that
1617 // non-object, non-null values are not possible here, these
1618 // should have been filtered out by the IR emitter.
1619 if (value
.isNull()) {
1625 if (MOZ_LIKELY(addType
)) {
1626 JSObject
* maybeSingleton
= obj
->isSingleton() ? obj
.get() : nullptr;
1627 AddTypePropertyId(cx
, group
, maybeSingleton
, id
, value
);
1631 !stub
->addUpdateStubForValue(cx
, script
, obj
, group
, id
, value
))) {
1632 // The calling JIT code assumes this function is infallible (for
1633 // instance we may reallocate dynamic slots before calling this),
1634 // so ignore OOMs if we failed to attach a stub.
1635 cx
->recoverFromOutOfMemory();
1641 bool FallbackICCodeCompiler::emit_TypeUpdate() {
1642 // Just store false into R1.scratchReg() and return.
1643 masm
.move32(Imm32(0), R1
.scratchReg());
1644 EmitReturnFromIC(masm
);
1648 bool ICTypeUpdate_PrimitiveSet::Compiler::generateStubCode(
1649 MacroAssembler
& masm
) {
1651 if ((flags_
& TypeToFlag(ValueType::Int32
)) &&
1652 !(flags_
& TypeToFlag(ValueType::Double
))) {
1653 masm
.branchTestInt32(Assembler::Equal
, R0
, &success
);
1656 if (flags_
& TypeToFlag(ValueType::Double
)) {
1657 masm
.branchTestNumber(Assembler::Equal
, R0
, &success
);
1660 if (flags_
& TypeToFlag(ValueType::Undefined
)) {
1661 masm
.branchTestUndefined(Assembler::Equal
, R0
, &success
);
1664 if (flags_
& TypeToFlag(ValueType::Boolean
)) {
1665 masm
.branchTestBoolean(Assembler::Equal
, R0
, &success
);
1668 if (flags_
& TypeToFlag(ValueType::String
)) {
1669 masm
.branchTestString(Assembler::Equal
, R0
, &success
);
1672 if (flags_
& TypeToFlag(ValueType::Symbol
)) {
1673 masm
.branchTestSymbol(Assembler::Equal
, R0
, &success
);
1676 if (flags_
& TypeToFlag(ValueType::BigInt
)) {
1677 masm
.branchTestBigInt(Assembler::Equal
, R0
, &success
);
1680 if (flags_
& TypeToFlag(ValueType::Object
)) {
1681 masm
.branchTestObject(Assembler::Equal
, R0
, &success
);
1684 if (flags_
& TypeToFlag(ValueType::Null
)) {
1685 masm
.branchTestNull(Assembler::Equal
, R0
, &success
);
1688 EmitStubGuardFailure(masm
);
1690 // Type matches, load true into R1.scratchReg() and return.
1691 masm
.bind(&success
);
1692 masm
.mov(ImmWord(1), R1
.scratchReg());
1693 EmitReturnFromIC(masm
);
1698 bool ICTypeUpdate_SingleObject::Compiler::generateStubCode(
1699 MacroAssembler
& masm
) {
1701 masm
.branchTestObject(Assembler::NotEqual
, R0
, &failure
);
1703 // Guard on the object's identity.
1704 Register obj
= masm
.extractObject(R0
, R1
.scratchReg());
1705 Address
expectedObject(ICStubReg
,
1706 ICTypeUpdate_SingleObject::offsetOfObject());
1707 masm
.branchPtr(Assembler::NotEqual
, expectedObject
, obj
, &failure
);
1709 // Identity matches, load true into R1.scratchReg() and return.
1710 masm
.mov(ImmWord(1), R1
.scratchReg());
1711 EmitReturnFromIC(masm
);
1713 masm
.bind(&failure
);
1714 EmitStubGuardFailure(masm
);
1718 bool ICTypeUpdate_ObjectGroup::Compiler::generateStubCode(
1719 MacroAssembler
& masm
) {
1722 Register scratch1
= R1
.scratchReg();
1723 masm
.fallibleUnboxObject(R0
, scratch1
, &failure
);
1725 // Guard on the object's ObjectGroup.
1726 Address
expectedGroup(ICStubReg
, ICTypeUpdate_ObjectGroup::offsetOfGroup());
1727 masm
.branchTestObjGroup(Assembler::NotEqual
, scratch1
, expectedGroup
,
1728 scratch1
, R0
.payloadOrValueReg(), &failure
);
1730 // Group matches, load true into R1.scratchReg() and return.
1731 masm
.mov(ImmWord(1), R1
.scratchReg());
1732 EmitReturnFromIC(masm
);
1734 masm
.bind(&failure
);
1735 EmitStubGuardFailure(masm
);
1739 bool ICTypeUpdate_AnyValue::Compiler::generateStubCode(MacroAssembler
& masm
) {
1740 // AnyValue always matches so return true.
1741 masm
.mov(ImmWord(1), R1
.scratchReg());
1742 EmitReturnFromIC(masm
);
1750 bool DoToBoolFallback(JSContext
* cx
, BaselineFrame
* frame
,
1751 ICToBool_Fallback
* stub
, HandleValue arg
,
1752 MutableHandleValue ret
) {
1753 stub
->incrementEnteredCount();
1754 FallbackICSpew(cx
, stub
, "ToBool");
1756 MOZ_ASSERT(!arg
.isBoolean());
1758 TryAttachStub
<ToBoolIRGenerator
>("ToBool", cx
, frame
, stub
,
1759 BaselineCacheIRStubKind::Regular
, arg
);
1761 bool cond
= ToBoolean(arg
);
1762 ret
.setBoolean(cond
);
1767 bool FallbackICCodeCompiler::emit_ToBool() {
1768 static_assert(R0
== JSReturnOperand
);
1770 // Restore the tail call register.
1771 EmitRestoreTailCallReg(masm
);
1775 masm
.push(ICStubReg
);
1776 pushStubPayload(masm
, R0
.scratchReg());
1778 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICToBool_Fallback
*,
1779 HandleValue
, MutableHandleValue
);
1780 return tailCallVM
<Fn
, DoToBoolFallback
>(masm
);
1783 static void StripPreliminaryObjectStubs(JSContext
* cx
, ICFallbackStub
* stub
,
1785 // Before the new script properties analysis has been performed on a type,
1786 // all instances of that type have the maximum number of fixed slots.
1787 // Afterwards, the objects (even the preliminary ones) might be changed
1788 // to reduce the number of fixed slots they have. If we generate stubs for
1789 // both the old and new number of fixed slots, the stub will look
1790 // polymorphic to IonBuilder when it is actually monomorphic. To avoid
1791 // this, strip out any stubs for preliminary objects before attaching a new
1792 // stub which isn't on a preliminary object.
1794 for (ICStubIterator iter
= stub
->beginChain(); !iter
.atEnd(); iter
++) {
1795 if (iter
->isCacheIR_Regular() &&
1796 iter
->toCacheIR_Regular()->hasPreliminaryObject()) {
1797 iter
.unlink(cx
, script
);
1798 } else if (iter
->isCacheIR_Monitored() &&
1799 iter
->toCacheIR_Monitored()->hasPreliminaryObject()) {
1800 iter
.unlink(cx
, script
);
1801 } else if (iter
->isCacheIR_Updated() &&
1802 iter
->toCacheIR_Updated()->hasPreliminaryObject()) {
1803 iter
.unlink(cx
, script
);
1808 static bool TryAttachGetPropStub(const char* name
, JSContext
* cx
,
1809 BaselineFrame
* frame
, ICFallbackStub
* stub
,
1810 CacheKind kind
, HandleValue val
,
1811 HandleValue idVal
, HandleValue receiver
) {
1812 bool attached
= false;
1814 if (stub
->state().maybeTransition()) {
1815 stub
->discardStubs(cx
, frame
->invalidationScript());
1818 if (stub
->state().canAttachStub()) {
1819 RootedScript
script(cx
, frame
->script());
1820 ICScript
* icScript
= frame
->icScript();
1821 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
1823 GetPropIRGenerator
gen(cx
, script
, pc
, stub
->state().mode(), kind
, val
,
1824 idVal
, receiver
, GetPropertyResultFlags::All
);
1825 switch (gen
.tryAttachStub()) {
1826 case AttachDecision::Attach
: {
1828 AttachBaselineCacheIRStub(cx
, gen
.writerRef(), gen
.cacheKind(),
1829 BaselineCacheIRStubKind::Monitored
,
1830 script
, icScript
, stub
, &attached
);
1832 JitSpew(JitSpew_BaselineIC
, " Attached %s CacheIR stub", name
);
1833 if (gen
.shouldNotePreliminaryObjectStub()) {
1834 newStub
->toCacheIR_Monitored()->notePreliminaryObject();
1835 } else if (gen
.shouldUnlinkPreliminaryObjectStubs()) {
1836 StripPreliminaryObjectStubs(cx
, stub
, frame
->invalidationScript());
1840 case AttachDecision::NoAction
:
1842 case AttachDecision::TemporarilyUnoptimizable
:
1845 case AttachDecision::Deferred
:
1846 MOZ_ASSERT_UNREACHABLE("No deferred GetProp stubs");
1857 bool DoGetElemFallback(JSContext
* cx
, BaselineFrame
* frame
,
1858 ICGetElem_Fallback
* stub
, HandleValue lhs
,
1859 HandleValue rhs
, MutableHandleValue res
) {
1860 stub
->incrementEnteredCount();
1862 RootedScript
script(cx
, frame
->script());
1863 jsbytecode
* pc
= stub
->icEntry()->pc(frame
->script());
1865 JSOp op
= JSOp(*pc
);
1866 FallbackICSpew(cx
, stub
, "GetElem(%s)", CodeName(op
));
1868 MOZ_ASSERT(op
== JSOp::GetElem
|| op
== JSOp::CallElem
);
1870 // Don't pass lhs directly, we need it when generating stubs.
1871 RootedValue
lhsCopy(cx
, lhs
);
1873 bool isOptimizedArgs
= false;
1874 if (lhs
.isMagic(JS_OPTIMIZED_ARGUMENTS
)) {
1875 // Handle optimized arguments[i] access.
1876 if (!GetElemOptimizedArguments(cx
, frame
, &lhsCopy
, rhs
, res
,
1877 &isOptimizedArgs
)) {
1880 if (isOptimizedArgs
) {
1881 if (!TypeMonitorResult(cx
, stub
, frame
, script
, pc
, res
)) {
1887 bool attached
= TryAttachGetPropStub("GetElem", cx
, frame
, stub
,
1888 CacheKind::GetElem
, lhs
, rhs
, lhs
);
1890 if (!isOptimizedArgs
) {
1891 if (!GetElementOperation(cx
, op
, lhsCopy
, rhs
, res
)) {
1895 if (!TypeMonitorResult(cx
, stub
, frame
, script
, pc
, res
)) {
1904 // GetElem operations which could access negative indexes generally can't
1905 // be optimized without the potential for bailouts, as we can't statically
1906 // determine that an object has no properties on such indexes.
1907 if (rhs
.isNumber() && rhs
.toNumber() < 0) {
1908 stub
->noteNegativeIndex();
1911 // GetElem operations which could access non-integer indexes generally can't
1912 // be optimized without the potential for bailouts.
1913 int32_t representable
;
1914 if (rhs
.isNumber() && rhs
.isDouble() &&
1915 !mozilla::NumberEqualsInt32(rhs
.toDouble(), &representable
)) {
1916 stub
->setSawNonIntegerIndex();
1922 bool DoGetElemSuperFallback(JSContext
* cx
, BaselineFrame
* frame
,
1923 ICGetElem_Fallback
* stub
, HandleValue lhs
,
1924 HandleValue rhs
, HandleValue receiver
,
1925 MutableHandleValue res
) {
1926 stub
->incrementEnteredCount();
1928 RootedScript
script(cx
, frame
->script());
1929 jsbytecode
* pc
= stub
->icEntry()->pc(frame
->script());
1931 JSOp op
= JSOp(*pc
);
1932 FallbackICSpew(cx
, stub
, "GetElemSuper(%s)", CodeName(op
));
1934 MOZ_ASSERT(op
== JSOp::GetElemSuper
);
1937 TryAttachGetPropStub("GetElemSuper", cx
, frame
, stub
,
1938 CacheKind::GetElemSuper
, lhs
, rhs
, receiver
);
1940 // |lhs| is [[HomeObject]].[[Prototype]] which must be Object
1941 RootedObject
lhsObj(cx
, &lhs
.toObject());
1942 if (!GetObjectElementOperation(cx
, op
, lhsObj
, receiver
, rhs
, res
)) {
1946 if (!TypeMonitorResult(cx
, stub
, frame
, script
, pc
, res
)) {
1954 // GetElem operations which could access negative indexes generally can't
1955 // be optimized without the potential for bailouts, as we can't statically
1956 // determine that an object has no properties on such indexes.
1957 if (rhs
.isNumber() && rhs
.toNumber() < 0) {
1958 stub
->noteNegativeIndex();
1961 // GetElem operations which could access non-integer indexes generally can't
1962 // be optimized without the potential for bailouts.
1963 int32_t representable
;
1964 if (rhs
.isNumber() && rhs
.isDouble() &&
1965 !mozilla::NumberEqualsInt32(rhs
.toDouble(), &representable
)) {
1966 stub
->setSawNonIntegerIndex();
1972 bool FallbackICCodeCompiler::emitGetElem(bool hasReceiver
) {
1973 static_assert(R0
== JSReturnOperand
);
1975 // Restore the tail call register.
1976 EmitRestoreTailCallReg(masm
);
1978 // Super property getters use a |this| that differs from base object
1980 // State: receiver in R0, index in R1, obj on the stack
1982 // Ensure stack is fully synced for the expression decompiler.
1983 // We need: receiver, index, obj
1986 masm
.pushValue(Address(masm
.getStackPointer(), sizeof(Value
) * 2));
1989 masm
.pushValue(R0
); // Receiver
1990 masm
.pushValue(R1
); // Index
1991 masm
.pushValue(Address(masm
.getStackPointer(), sizeof(Value
) * 5)); // Obj
1992 masm
.push(ICStubReg
);
1993 masm
.pushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
1996 bool (*)(JSContext
*, BaselineFrame
*, ICGetElem_Fallback
*, HandleValue
,
1997 HandleValue
, HandleValue
, MutableHandleValue
);
1998 if (!tailCallVM
<Fn
, DoGetElemSuperFallback
>(masm
)) {
2002 // Ensure stack is fully synced for the expression decompiler.
2009 masm
.push(ICStubReg
);
2010 masm
.pushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
2012 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICGetElem_Fallback
*,
2013 HandleValue
, HandleValue
, MutableHandleValue
);
2014 if (!tailCallVM
<Fn
, DoGetElemFallback
>(masm
)) {
2019 // This is the resume point used when bailout rewrites call stack to undo
2020 // Ion inlined frames. The return address pushed onto reconstructed stack
2024 code
.initBailoutReturnOffset(BailoutReturnKind::GetElemSuper
,
2025 masm
.currentOffset());
2027 code
.initBailoutReturnOffset(BailoutReturnKind::GetElem
,
2028 masm
.currentOffset());
2031 leaveStubFrame(masm
, true);
2033 if (!IsTypeInferenceEnabled()) {
2034 EmitReturnFromIC(masm
);
2038 // When we get here, ICStubReg contains the ICGetElem_Fallback stub,
2039 // which we can't use to enter the TypeMonitor IC, because it's a
2040 // MonitoredFallbackStub instead of a MonitoredStub. So, we cheat. Note that
2041 // we must have a non-null fallbackMonitorStub here because
2042 // BaselineStackBuilder::buildStubFrame delazifies the stub when bailing out.
2043 masm
.loadPtr(Address(ICStubReg
,
2044 ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
2046 EmitEnterTypeMonitorIC(masm
,
2047 ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
2052 bool FallbackICCodeCompiler::emit_GetElem() {
2053 return emitGetElem(/* hasReceiver = */ false);
2056 bool FallbackICCodeCompiler::emit_GetElemSuper() {
2057 return emitGetElem(/* hasReceiver = */ true);
2060 static void SetUpdateStubData(ICCacheIR_Updated
* stub
,
2061 const PropertyTypeCheckInfo
* info
) {
2062 if (info
->isSet()) {
2063 stub
->updateStubGroup() = info
->group();
2064 stub
->updateStubId() = info
->id();
2068 bool DoSetElemFallback(JSContext
* cx
, BaselineFrame
* frame
,
2069 ICSetElem_Fallback
* stub
, Value
* stack
, HandleValue objv
,
2070 HandleValue index
, HandleValue rhs
) {
2071 using DeferType
= SetPropIRGenerator::DeferType
;
2073 stub
->incrementEnteredCount();
2075 RootedScript
script(cx
, frame
->script());
2076 RootedScript
outerScript(cx
, script
);
2077 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
2078 JSOp op
= JSOp(*pc
);
2079 FallbackICSpew(cx
, stub
, "SetElem(%s)", CodeName(JSOp(*pc
)));
2081 MOZ_ASSERT(op
== JSOp::SetElem
|| op
== JSOp::StrictSetElem
||
2082 op
== JSOp::InitElem
|| op
== JSOp::InitHiddenElem
||
2083 op
== JSOp::InitLockedElem
|| op
== JSOp::InitElemArray
||
2084 op
== JSOp::InitElemInc
);
2088 cx
, ToObjectFromStackForPropertyAccess(cx
, objv
, objvIndex
, index
));
2093 RootedShape
oldShape(cx
, obj
->shape());
2094 RootedObjectGroup
oldGroup(cx
, JSObject::getGroup(cx
, obj
));
2099 // We cannot attach a stub if the operation executed after the stub
2100 // is attached may throw.
2101 bool mayThrow
= false;
2103 DeferType deferType
= DeferType::None
;
2104 bool attached
= false;
2106 if (stub
->state().maybeTransition()) {
2107 stub
->discardStubs(cx
, frame
->invalidationScript());
2110 if (stub
->state().canAttachStub() && !mayThrow
) {
2111 ICScript
* icScript
= frame
->icScript();
2112 SetPropIRGenerator
gen(cx
, script
, pc
, CacheKind::SetElem
,
2113 stub
->state().mode(), objv
, index
, rhs
);
2114 switch (gen
.tryAttachStub()) {
2115 case AttachDecision::Attach
: {
2116 ICStub
* newStub
= AttachBaselineCacheIRStub(
2117 cx
, gen
.writerRef(), gen
.cacheKind(),
2118 BaselineCacheIRStubKind::Updated
, frame
->script(), icScript
, stub
,
2121 JitSpew(JitSpew_BaselineIC
, " Attached SetElem CacheIR stub");
2123 SetUpdateStubData(newStub
->toCacheIR_Updated(), gen
.typeCheckInfo());
2125 if (gen
.shouldNotePreliminaryObjectStub()) {
2126 newStub
->toCacheIR_Updated()->notePreliminaryObject();
2127 } else if (gen
.shouldUnlinkPreliminaryObjectStubs()) {
2128 StripPreliminaryObjectStubs(cx
, stub
, frame
->invalidationScript());
2131 if (gen
.attachedTypedArrayOOBStub()) {
2132 stub
->noteHasTypedArrayOOB();
2136 case AttachDecision::NoAction
:
2138 case AttachDecision::TemporarilyUnoptimizable
:
2141 case AttachDecision::Deferred
:
2142 deferType
= gen
.deferType();
2143 MOZ_ASSERT(deferType
!= DeferType::None
);
2148 if (op
== JSOp::InitElem
|| op
== JSOp::InitHiddenElem
||
2149 op
== JSOp::InitLockedElem
) {
2150 if (!InitElemOperation(cx
, pc
, obj
, index
, rhs
)) {
2153 } else if (op
== JSOp::InitElemArray
) {
2154 MOZ_ASSERT(uint32_t(index
.toInt32()) <= INT32_MAX
,
2155 "the bytecode emitter must fail to compile code that would "
2156 "produce JSOp::InitElemArray with an index exceeding "
2158 MOZ_ASSERT(uint32_t(index
.toInt32()) == GET_UINT32(pc
));
2159 if (!InitArrayElemOperation(cx
, pc
, obj
.as
<ArrayObject
>(), index
.toInt32(),
2163 } else if (op
== JSOp::InitElemInc
) {
2164 if (!InitArrayElemOperation(cx
, pc
, obj
.as
<ArrayObject
>(), index
.toInt32(),
2169 if (!SetObjectElement(cx
, obj
, index
, rhs
, objv
,
2170 JSOp(*pc
) == JSOp::StrictSetElem
, script
, pc
)) {
2175 // Don't try to attach stubs that wish to be hidden. We don't know how to
2176 // have different enumerability in the stubs for the moment.
2177 if (op
== JSOp::InitHiddenElem
) {
2181 // Overwrite the object on the stack (pushed for the decompiler) with the rhs.
2182 MOZ_ASSERT(stack
[2] == objv
);
2189 // The SetObjectElement call might have entered this IC recursively, so try
2191 if (stub
->state().maybeTransition()) {
2192 stub
->discardStubs(cx
, frame
->invalidationScript());
2195 bool canAttachStub
= stub
->state().canAttachStub();
2197 if (deferType
!= DeferType::None
&& canAttachStub
) {
2198 SetPropIRGenerator
gen(cx
, script
, pc
, CacheKind::SetElem
,
2199 stub
->state().mode(), objv
, index
, rhs
);
2201 MOZ_ASSERT(deferType
== DeferType::AddSlot
);
2202 AttachDecision decision
= gen
.tryAttachAddSlotStub(oldGroup
, oldShape
);
2205 case AttachDecision::Attach
: {
2206 ICScript
* icScript
= frame
->icScript();
2207 ICStub
* newStub
= AttachBaselineCacheIRStub(
2208 cx
, gen
.writerRef(), gen
.cacheKind(),
2209 BaselineCacheIRStubKind::Updated
, frame
->script(), icScript
, stub
,
2212 JitSpew(JitSpew_BaselineIC
, " Attached SetElem CacheIR stub");
2214 SetUpdateStubData(newStub
->toCacheIR_Updated(), gen
.typeCheckInfo());
2216 if (gen
.shouldNotePreliminaryObjectStub()) {
2217 newStub
->toCacheIR_Updated()->notePreliminaryObject();
2218 } else if (gen
.shouldUnlinkPreliminaryObjectStubs()) {
2219 StripPreliminaryObjectStubs(cx
, stub
, frame
->invalidationScript());
2223 case AttachDecision::NoAction
:
2224 gen
.trackAttached(IRGenerator::NotAttached
);
2226 case AttachDecision::TemporarilyUnoptimizable
:
2227 case AttachDecision::Deferred
:
2228 MOZ_ASSERT_UNREACHABLE("Invalid attach result");
2232 if (!attached
&& canAttachStub
) {
2233 stub
->state().trackNotAttached();
2238 bool FallbackICCodeCompiler::emit_SetElem() {
2239 static_assert(R0
== JSReturnOperand
);
2241 EmitRestoreTailCallReg(masm
);
2243 // State: R0: object, R1: index, stack: rhs.
2244 // For the decompiler, the stack has to be: object, index, rhs,
2245 // so we push the index, then overwrite the rhs Value with R0
2246 // and push the rhs value.
2248 masm
.loadValue(Address(masm
.getStackPointer(), sizeof(Value
)), R1
);
2249 masm
.storeValue(R0
, Address(masm
.getStackPointer(), sizeof(Value
)));
2253 masm
.pushValue(R1
); // RHS
2255 // Push index. On x86 and ARM two push instructions are emitted so use a
2256 // separate register to store the old stack pointer.
2257 masm
.moveStackPtrTo(R1
.scratchReg());
2258 masm
.pushValue(Address(R1
.scratchReg(), 2 * sizeof(Value
)));
2259 masm
.pushValue(R0
); // Object.
2261 // Push pointer to stack values, so that the stub can overwrite the object
2262 // (pushed for the decompiler) with the rhs.
2263 masm
.computeEffectiveAddress(
2264 Address(masm
.getStackPointer(), 3 * sizeof(Value
)), R0
.scratchReg());
2265 masm
.push(R0
.scratchReg());
2267 masm
.push(ICStubReg
);
2268 pushStubPayload(masm
, R0
.scratchReg());
2270 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICSetElem_Fallback
*, Value
*,
2271 HandleValue
, HandleValue
, HandleValue
);
2272 return tailCallVM
<Fn
, DoSetElemFallback
>(masm
);
2279 bool DoInFallback(JSContext
* cx
, BaselineFrame
* frame
, ICIn_Fallback
* stub
,
2280 HandleValue key
, HandleValue objValue
,
2281 MutableHandleValue res
) {
2282 stub
->incrementEnteredCount();
2284 FallbackICSpew(cx
, stub
, "In");
2286 if (!objValue
.isObject()) {
2287 ReportInNotObjectError(cx
, key
, -2, objValue
, -1);
2291 TryAttachStub
<HasPropIRGenerator
>("In", cx
, frame
, stub
,
2292 BaselineCacheIRStubKind::Regular
,
2293 CacheKind::In
, key
, objValue
);
2295 RootedObject
obj(cx
, &objValue
.toObject());
2297 if (!OperatorIn(cx
, key
, obj
, &cond
)) {
2300 res
.setBoolean(cond
);
2305 bool FallbackICCodeCompiler::emit_In() {
2306 EmitRestoreTailCallReg(masm
);
2308 // Sync for the decompiler.
2315 masm
.push(ICStubReg
);
2316 pushStubPayload(masm
, R0
.scratchReg());
2318 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICIn_Fallback
*, HandleValue
,
2319 HandleValue
, MutableHandleValue
);
2320 return tailCallVM
<Fn
, DoInFallback
>(masm
);
2327 bool DoHasOwnFallback(JSContext
* cx
, BaselineFrame
* frame
,
2328 ICHasOwn_Fallback
* stub
, HandleValue keyValue
,
2329 HandleValue objValue
, MutableHandleValue res
) {
2330 stub
->incrementEnteredCount();
2332 FallbackICSpew(cx
, stub
, "HasOwn");
2334 TryAttachStub
<HasPropIRGenerator
>("HasOwn", cx
, frame
, stub
,
2335 BaselineCacheIRStubKind::Regular
,
2336 CacheKind::HasOwn
, keyValue
, objValue
);
2339 if (!HasOwnProperty(cx
, objValue
, keyValue
, &found
)) {
2343 res
.setBoolean(found
);
2347 bool FallbackICCodeCompiler::emit_HasOwn() {
2348 EmitRestoreTailCallReg(masm
);
2350 // Sync for the decompiler.
2357 masm
.push(ICStubReg
);
2358 pushStubPayload(masm
, R0
.scratchReg());
2360 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICHasOwn_Fallback
*,
2361 HandleValue
, HandleValue
, MutableHandleValue
);
2362 return tailCallVM
<Fn
, DoHasOwnFallback
>(masm
);
2366 // CheckPrivate_Fallback
2369 bool DoCheckPrivateFieldFallback(JSContext
* cx
, BaselineFrame
* frame
,
2370 ICCheckPrivateField_Fallback
* stub
,
2371 HandleValue objValue
, HandleValue keyValue
,
2372 MutableHandleValue res
) {
2373 stub
->incrementEnteredCount();
2374 RootedScript
script(cx
, frame
->script());
2375 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
2377 FallbackICSpew(cx
, stub
, "CheckPrivateField");
2379 MOZ_ASSERT(keyValue
.isSymbol() && keyValue
.toSymbol()->isPrivateName());
2381 TryAttachStub
<CheckPrivateFieldIRGenerator
>(
2382 "CheckPrivate", cx
, frame
, stub
, BaselineCacheIRStubKind::Regular
,
2383 CacheKind::CheckPrivateField
, keyValue
, objValue
);
2386 if (!CheckPrivateFieldOperation(cx
, pc
, objValue
, keyValue
, &result
)) {
2390 res
.setBoolean(result
);
2394 bool FallbackICCodeCompiler::emit_CheckPrivateField() {
2395 EmitRestoreTailCallReg(masm
);
2397 // Sync for the decompiler.
2404 masm
.push(ICStubReg
);
2405 pushStubPayload(masm
, R0
.scratchReg());
2407 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICCheckPrivateField_Fallback
*,
2408 HandleValue
, HandleValue
, MutableHandleValue
);
2409 return tailCallVM
<Fn
, DoCheckPrivateFieldFallback
>(masm
);
2416 bool DoGetNameFallback(JSContext
* cx
, BaselineFrame
* frame
,
2417 ICGetName_Fallback
* stub
, HandleObject envChain
,
2418 MutableHandleValue res
) {
2419 stub
->incrementEnteredCount();
2421 RootedScript
script(cx
, frame
->script());
2422 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
2423 mozilla::DebugOnly
<JSOp
> op
= JSOp(*pc
);
2424 FallbackICSpew(cx
, stub
, "GetName(%s)", CodeName(JSOp(*pc
)));
2426 MOZ_ASSERT(op
== JSOp::GetName
|| op
== JSOp::GetGName
);
2428 RootedPropertyName
name(cx
, script
->getName(pc
));
2430 TryAttachStub
<GetNameIRGenerator
>("GetName", cx
, frame
, stub
,
2431 BaselineCacheIRStubKind::Monitored
,
2434 static_assert(JSOpLength_GetGName
== JSOpLength_GetName
,
2435 "Otherwise our check for JSOp::Typeof isn't ok");
2436 if (JSOp(pc
[JSOpLength_GetGName
]) == JSOp::Typeof
) {
2437 if (!GetEnvironmentName
<GetNameMode::TypeOf
>(cx
, envChain
, name
, res
)) {
2441 if (!GetEnvironmentName
<GetNameMode::Normal
>(cx
, envChain
, name
, res
)) {
2446 return TypeMonitorResult(cx
, stub
, frame
, script
, pc
, res
);
2449 bool FallbackICCodeCompiler::emit_GetName() {
2450 static_assert(R0
== JSReturnOperand
);
2452 EmitRestoreTailCallReg(masm
);
2454 masm
.push(R0
.scratchReg());
2455 masm
.push(ICStubReg
);
2456 pushStubPayload(masm
, R0
.scratchReg());
2458 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICGetName_Fallback
*,
2459 HandleObject
, MutableHandleValue
);
2460 return tailCallVM
<Fn
, DoGetNameFallback
>(masm
);
2464 // BindName_Fallback
2467 bool DoBindNameFallback(JSContext
* cx
, BaselineFrame
* frame
,
2468 ICBindName_Fallback
* stub
, HandleObject envChain
,
2469 MutableHandleValue res
) {
2470 stub
->incrementEnteredCount();
2472 jsbytecode
* pc
= stub
->icEntry()->pc(frame
->script());
2473 mozilla::DebugOnly
<JSOp
> op
= JSOp(*pc
);
2474 FallbackICSpew(cx
, stub
, "BindName(%s)", CodeName(JSOp(*pc
)));
2476 MOZ_ASSERT(op
== JSOp::BindName
|| op
== JSOp::BindGName
);
2478 RootedPropertyName
name(cx
, frame
->script()->getName(pc
));
2480 TryAttachStub
<BindNameIRGenerator
>("BindName", cx
, frame
, stub
,
2481 BaselineCacheIRStubKind::Regular
, envChain
,
2484 RootedObject
scope(cx
);
2485 if (!LookupNameUnqualified(cx
, name
, envChain
, &scope
)) {
2489 res
.setObject(*scope
);
2493 bool FallbackICCodeCompiler::emit_BindName() {
2494 static_assert(R0
== JSReturnOperand
);
2496 EmitRestoreTailCallReg(masm
);
2498 masm
.push(R0
.scratchReg());
2499 masm
.push(ICStubReg
);
2500 pushStubPayload(masm
, R0
.scratchReg());
2502 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICBindName_Fallback
*,
2503 HandleObject
, MutableHandleValue
);
2504 return tailCallVM
<Fn
, DoBindNameFallback
>(masm
);
2508 // GetIntrinsic_Fallback
2511 bool DoGetIntrinsicFallback(JSContext
* cx
, BaselineFrame
* frame
,
2512 ICGetIntrinsic_Fallback
* stub
,
2513 MutableHandleValue res
) {
2514 stub
->incrementEnteredCount();
2516 RootedScript
script(cx
, frame
->script());
2517 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
2518 mozilla::DebugOnly
<JSOp
> op
= JSOp(*pc
);
2519 FallbackICSpew(cx
, stub
, "GetIntrinsic(%s)", CodeName(JSOp(*pc
)));
2521 MOZ_ASSERT(op
== JSOp::GetIntrinsic
);
2523 if (!GetIntrinsicOperation(cx
, script
, pc
, res
)) {
2527 // An intrinsic operation will always produce the same result, so only
2528 // needs to be monitored once. Attach a stub to load the resulting constant
2531 JitScript::MonitorBytecodeType(cx
, script
, pc
, res
);
2533 TryAttachStub
<GetIntrinsicIRGenerator
>("GetIntrinsic", cx
, frame
, stub
,
2534 BaselineCacheIRStubKind::Regular
, res
);
2539 bool FallbackICCodeCompiler::emit_GetIntrinsic() {
2540 EmitRestoreTailCallReg(masm
);
2542 masm
.push(ICStubReg
);
2543 pushStubPayload(masm
, R0
.scratchReg());
2545 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICGetIntrinsic_Fallback
*,
2546 MutableHandleValue
);
2547 return tailCallVM
<Fn
, DoGetIntrinsicFallback
>(masm
);
2554 static bool ComputeGetPropResult(JSContext
* cx
, BaselineFrame
* frame
, JSOp op
,
2555 HandlePropertyName name
,
2556 MutableHandleValue val
,
2557 MutableHandleValue res
) {
2558 // Handle arguments.length and arguments.callee on optimized arguments, as
2559 // it is not an object.
2560 if (val
.isMagic(JS_OPTIMIZED_ARGUMENTS
) && IsOptimizedArguments(frame
, val
)) {
2561 if (op
== JSOp::Length
) {
2562 res
.setInt32(frame
->numActualArgs());
2564 MOZ_ASSERT(name
== cx
->names().callee
);
2565 MOZ_ASSERT(frame
->script()->hasMappedArgsObj());
2566 res
.setObject(*frame
->callee());
2569 if (op
== JSOp::GetBoundName
) {
2570 RootedObject
env(cx
, &val
.toObject());
2571 RootedId
id(cx
, NameToId(name
));
2572 if (!GetNameBoundInEnvironment(cx
, env
, id
, res
)) {
2576 MOZ_ASSERT(op
== JSOp::GetProp
|| op
== JSOp::CallProp
||
2577 op
== JSOp::Length
);
2578 if (!GetProperty(cx
, val
, name
, res
)) {
2587 bool DoGetPropFallback(JSContext
* cx
, BaselineFrame
* frame
,
2588 ICGetProp_Fallback
* stub
, MutableHandleValue val
,
2589 MutableHandleValue res
) {
2590 stub
->incrementEnteredCount();
2592 RootedScript
script(cx
, frame
->script());
2593 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
2594 JSOp op
= JSOp(*pc
);
2595 FallbackICSpew(cx
, stub
, "GetProp(%s)", CodeName(op
));
2597 MOZ_ASSERT(op
== JSOp::GetProp
|| op
== JSOp::CallProp
||
2598 op
== JSOp::Length
|| op
== JSOp::GetBoundName
);
2600 RootedPropertyName
name(cx
, script
->getName(pc
));
2601 RootedValue
idVal(cx
, StringValue(name
));
2603 TryAttachGetPropStub("GetProp", cx
, frame
, stub
, CacheKind::GetProp
, val
,
2606 if (!ComputeGetPropResult(cx
, frame
, op
, name
, val
, res
)) {
2610 return TypeMonitorResult(cx
, stub
, frame
, script
, pc
, res
);
2613 bool DoGetPropSuperFallback(JSContext
* cx
, BaselineFrame
* frame
,
2614 ICGetProp_Fallback
* stub
, HandleValue receiver
,
2615 MutableHandleValue val
, MutableHandleValue res
) {
2616 stub
->incrementEnteredCount();
2618 RootedScript
script(cx
, frame
->script());
2619 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
2620 FallbackICSpew(cx
, stub
, "GetPropSuper(%s)", CodeName(JSOp(*pc
)));
2622 MOZ_ASSERT(JSOp(*pc
) == JSOp::GetPropSuper
);
2624 RootedPropertyName
name(cx
, script
->getName(pc
));
2625 RootedValue
idVal(cx
, StringValue(name
));
2627 TryAttachGetPropStub("GetPropSuper", cx
, frame
, stub
, CacheKind::GetPropSuper
,
2628 val
, idVal
, receiver
);
2630 // |val| is [[HomeObject]].[[Prototype]] which must be Object
2631 RootedObject
valObj(cx
, &val
.toObject());
2632 if (!GetProperty(cx
, valObj
, receiver
, name
, res
)) {
2636 return TypeMonitorResult(cx
, stub
, frame
, script
, pc
, res
);
2639 bool FallbackICCodeCompiler::emitGetProp(bool hasReceiver
) {
2640 static_assert(R0
== JSReturnOperand
);
2642 EmitRestoreTailCallReg(masm
);
2644 // Super property getters use a |this| that differs from base object
2649 masm
.push(ICStubReg
);
2650 masm
.pushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
2652 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICGetProp_Fallback
*,
2653 HandleValue
, MutableHandleValue
, MutableHandleValue
);
2654 if (!tailCallVM
<Fn
, DoGetPropSuperFallback
>(masm
)) {
2658 // Ensure stack is fully synced for the expression decompiler.
2663 masm
.push(ICStubReg
);
2664 masm
.pushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
2666 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICGetProp_Fallback
*,
2667 MutableHandleValue
, MutableHandleValue
);
2668 if (!tailCallVM
<Fn
, DoGetPropFallback
>(masm
)) {
2673 // This is the resume point used when bailout rewrites call stack to undo
2674 // Ion inlined frames. The return address pushed onto reconstructed stack
2678 code
.initBailoutReturnOffset(BailoutReturnKind::GetPropSuper
,
2679 masm
.currentOffset());
2681 code
.initBailoutReturnOffset(BailoutReturnKind::GetProp
,
2682 masm
.currentOffset());
2685 leaveStubFrame(masm
, true);
2687 if (!IsTypeInferenceEnabled()) {
2688 EmitReturnFromIC(masm
);
2692 // When we get here, ICStubReg contains the ICGetProp_Fallback stub,
2693 // which we can't use to enter the TypeMonitor IC, because it's a
2694 // MonitoredFallbackStub instead of a MonitoredStub. So, we cheat. Note that
2695 // we must have a non-null fallbackMonitorStub here because
2696 // BaselineStackBuilder::buildStubFrame delazifies the stub when bailing out.
2697 masm
.loadPtr(Address(ICStubReg
,
2698 ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
2700 EmitEnterTypeMonitorIC(masm
,
2701 ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
2706 bool FallbackICCodeCompiler::emit_GetProp() {
2707 return emitGetProp(/* hasReceiver = */ false);
2710 bool FallbackICCodeCompiler::emit_GetPropSuper() {
2711 return emitGetProp(/* hasReceiver = */ true);
2718 bool DoSetPropFallback(JSContext
* cx
, BaselineFrame
* frame
,
2719 ICSetProp_Fallback
* stub
, Value
* stack
, HandleValue lhs
,
2721 using DeferType
= SetPropIRGenerator::DeferType
;
2723 stub
->incrementEnteredCount();
2725 RootedScript
script(cx
, frame
->script());
2726 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
2727 JSOp op
= JSOp(*pc
);
2728 FallbackICSpew(cx
, stub
, "SetProp(%s)", CodeName(op
));
2730 MOZ_ASSERT(op
== JSOp::SetProp
|| op
== JSOp::StrictSetProp
||
2731 op
== JSOp::SetName
|| op
== JSOp::StrictSetName
||
2732 op
== JSOp::SetGName
|| op
== JSOp::StrictSetGName
||
2733 op
== JSOp::InitProp
|| op
== JSOp::InitLockedProp
||
2734 op
== JSOp::InitHiddenProp
|| op
== JSOp::InitGLexical
);
2736 RootedPropertyName
name(cx
, script
->getName(pc
));
2737 RootedId
id(cx
, NameToId(name
));
2740 RootedObject
obj(cx
,
2741 ToObjectFromStackForPropertyAccess(cx
, lhs
, lhsIndex
, id
));
2745 RootedShape
oldShape(cx
, obj
->shape());
2746 RootedObjectGroup
oldGroup(cx
, JSObject::getGroup(cx
, obj
));
2751 DeferType deferType
= DeferType::None
;
2752 bool attached
= false;
2753 if (stub
->state().maybeTransition()) {
2754 stub
->discardStubs(cx
, frame
->invalidationScript());
2757 if (stub
->state().canAttachStub()) {
2758 RootedValue
idVal(cx
, StringValue(name
));
2759 SetPropIRGenerator
gen(cx
, script
, pc
, CacheKind::SetProp
,
2760 stub
->state().mode(), lhs
, idVal
, rhs
);
2761 switch (gen
.tryAttachStub()) {
2762 case AttachDecision::Attach
: {
2763 ICScript
* icScript
= frame
->icScript();
2764 ICStub
* newStub
= AttachBaselineCacheIRStub(
2765 cx
, gen
.writerRef(), gen
.cacheKind(),
2766 BaselineCacheIRStubKind::Updated
, frame
->script(), icScript
, stub
,
2769 JitSpew(JitSpew_BaselineIC
, " Attached SetProp CacheIR stub");
2771 SetUpdateStubData(newStub
->toCacheIR_Updated(), gen
.typeCheckInfo());
2773 if (gen
.shouldNotePreliminaryObjectStub()) {
2774 newStub
->toCacheIR_Updated()->notePreliminaryObject();
2775 } else if (gen
.shouldUnlinkPreliminaryObjectStubs()) {
2776 StripPreliminaryObjectStubs(cx
, stub
, frame
->invalidationScript());
2780 case AttachDecision::NoAction
:
2782 case AttachDecision::TemporarilyUnoptimizable
:
2785 case AttachDecision::Deferred
:
2786 deferType
= gen
.deferType();
2787 MOZ_ASSERT(deferType
!= DeferType::None
);
2792 if (op
== JSOp::InitProp
|| op
== JSOp::InitLockedProp
||
2793 op
== JSOp::InitHiddenProp
) {
2794 if (!InitPropertyOperation(cx
, op
, obj
, name
, rhs
)) {
2797 } else if (op
== JSOp::SetName
|| op
== JSOp::StrictSetName
||
2798 op
== JSOp::SetGName
|| op
== JSOp::StrictSetGName
) {
2799 if (!SetNameOperation(cx
, script
, pc
, obj
, rhs
)) {
2802 } else if (op
== JSOp::InitGLexical
) {
2803 RootedValue
v(cx
, rhs
);
2804 LexicalEnvironmentObject
* lexicalEnv
;
2805 if (script
->hasNonSyntacticScope()) {
2806 lexicalEnv
= &NearestEnclosingExtensibleLexicalEnvironment(
2807 frame
->environmentChain());
2809 lexicalEnv
= &cx
->global()->lexicalEnvironment();
2811 InitGlobalLexicalOperation(cx
, lexicalEnv
, script
, pc
, v
);
2813 MOZ_ASSERT(op
== JSOp::SetProp
|| op
== JSOp::StrictSetProp
);
2815 ObjectOpResult result
;
2816 if (!SetProperty(cx
, obj
, id
, rhs
, lhs
, result
) ||
2817 !result
.checkStrictModeError(cx
, obj
, id
, op
== JSOp::StrictSetProp
)) {
2822 // Overwrite the LHS on the stack (pushed for the decompiler) with the RHS.
2823 MOZ_ASSERT(stack
[1] == lhs
);
2830 // The SetProperty call might have entered this IC recursively, so try
2832 if (stub
->state().maybeTransition()) {
2833 stub
->discardStubs(cx
, frame
->invalidationScript());
2836 bool canAttachStub
= stub
->state().canAttachStub();
2838 if (deferType
!= DeferType::None
&& canAttachStub
) {
2839 RootedValue
idVal(cx
, StringValue(name
));
2840 SetPropIRGenerator
gen(cx
, script
, pc
, CacheKind::SetProp
,
2841 stub
->state().mode(), lhs
, idVal
, rhs
);
2843 MOZ_ASSERT(deferType
== DeferType::AddSlot
);
2844 AttachDecision decision
= gen
.tryAttachAddSlotStub(oldGroup
, oldShape
);
2847 case AttachDecision::Attach
: {
2848 ICScript
* icScript
= frame
->icScript();
2849 ICStub
* newStub
= AttachBaselineCacheIRStub(
2850 cx
, gen
.writerRef(), gen
.cacheKind(),
2851 BaselineCacheIRStubKind::Updated
, frame
->script(), icScript
, stub
,
2854 JitSpew(JitSpew_BaselineIC
, " Attached SetElem CacheIR stub");
2856 SetUpdateStubData(newStub
->toCacheIR_Updated(), gen
.typeCheckInfo());
2858 if (gen
.shouldNotePreliminaryObjectStub()) {
2859 newStub
->toCacheIR_Updated()->notePreliminaryObject();
2860 } else if (gen
.shouldUnlinkPreliminaryObjectStubs()) {
2861 StripPreliminaryObjectStubs(cx
, stub
, frame
->invalidationScript());
2865 case AttachDecision::NoAction
:
2866 gen
.trackAttached(IRGenerator::NotAttached
);
2868 case AttachDecision::TemporarilyUnoptimizable
:
2869 case AttachDecision::Deferred
:
2870 MOZ_ASSERT_UNREACHABLE("Invalid attach result");
2874 if (!attached
&& canAttachStub
) {
2875 stub
->state().trackNotAttached();
2881 bool FallbackICCodeCompiler::emit_SetProp() {
2882 static_assert(R0
== JSReturnOperand
);
2884 EmitRestoreTailCallReg(masm
);
2886 // Ensure stack is fully synced for the expression decompiler.
2887 // Overwrite the RHS value on top of the stack with the object, then push
2888 // the RHS in R1 on top of that.
2889 masm
.storeValue(R0
, Address(masm
.getStackPointer(), 0));
2896 // Push pointer to stack values, so that the stub can overwrite the object
2897 // (pushed for the decompiler) with the RHS.
2898 masm
.computeEffectiveAddress(
2899 Address(masm
.getStackPointer(), 2 * sizeof(Value
)), R0
.scratchReg());
2900 masm
.push(R0
.scratchReg());
2902 masm
.push(ICStubReg
);
2903 pushStubPayload(masm
, R0
.scratchReg());
2905 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICSetProp_Fallback
*, Value
*,
2906 HandleValue
, HandleValue
);
2907 if (!tailCallVM
<Fn
, DoSetPropFallback
>(masm
)) {
2911 // This is the resume point used when bailout rewrites call stack to undo
2912 // Ion inlined frames. The return address pushed onto reconstructed stack
2915 code
.initBailoutReturnOffset(BailoutReturnKind::SetProp
,
2916 masm
.currentOffset());
2918 leaveStubFrame(masm
, true);
2919 EmitReturnFromIC(masm
);
2928 bool DoCallFallback(JSContext
* cx
, BaselineFrame
* frame
, ICCall_Fallback
* stub
,
2929 uint32_t argc
, Value
* vp
, MutableHandleValue res
) {
2930 stub
->incrementEnteredCount();
2932 RootedScript
script(cx
, frame
->script());
2933 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
2934 JSOp op
= JSOp(*pc
);
2935 FallbackICSpew(cx
, stub
, "Call(%s)", CodeName(op
));
2937 MOZ_ASSERT(argc
== GET_ARGC(pc
));
2938 bool constructing
= (op
== JSOp::New
|| op
== JSOp::SuperCall
);
2939 bool ignoresReturnValue
= (op
== JSOp::CallIgnoresRv
);
2941 // Ensure vp array is rooted - we may GC in here.
2942 size_t numValues
= argc
+ 2 + constructing
;
2943 RootedExternalValueArray
vpRoot(cx
, numValues
, vp
);
2945 CallArgs callArgs
= CallArgsFromSp(argc
+ constructing
, vp
+ numValues
,
2946 constructing
, ignoresReturnValue
);
2947 RootedValue
callee(cx
, vp
[0]);
2948 RootedValue
newTarget(cx
, constructing
? callArgs
.newTarget() : NullValue());
2950 // Handle funapply with JSOp::Arguments
2951 if (op
== JSOp::FunApply
&& argc
== 2 &&
2952 callArgs
[1].isMagic(JS_OPTIMIZED_ARGUMENTS
)) {
2953 GuardFunApplyArgumentsOptimization(cx
, frame
, callArgs
);
2956 // Transition stub state to megamorphic or generic if warranted.
2957 if (stub
->state().maybeTransition()) {
2958 stub
->discardStubs(cx
, frame
->invalidationScript());
2961 bool canAttachStub
= stub
->state().canAttachStub();
2962 bool handled
= false;
2963 bool deferred
= false;
2965 // Only bother to try optimizing JSOp::Call with CacheIR if the chain is still
2966 // allowed to attach stubs.
2967 if (canAttachStub
) {
2968 HandleValueArray args
= HandleValueArray::fromMarkedLocation(argc
, vp
+ 2);
2969 bool isFirstStub
= stub
->newStubIsFirstStub();
2970 CallIRGenerator
gen(cx
, script
, pc
, op
, stub
->state().mode(), isFirstStub
,
2971 argc
, callee
, callArgs
.thisv(), newTarget
, args
);
2972 switch (gen
.tryAttachStub()) {
2973 case AttachDecision::NoAction
:
2975 case AttachDecision::Attach
: {
2976 ICScript
* icScript
= frame
->icScript();
2977 ICStub
* newStub
= AttachBaselineCacheIRStub(
2978 cx
, gen
.writerRef(), gen
.cacheKind(), gen
.cacheIRStubKind(), script
,
2979 icScript
, stub
, &handled
);
2981 JitSpew(JitSpew_BaselineIC
, " Attached Call CacheIR stub");
2983 // If it's an updated stub, initialize it.
2984 if (gen
.cacheIRStubKind() == BaselineCacheIRStubKind::Updated
) {
2985 SetUpdateStubData(newStub
->toCacheIR_Updated(),
2986 gen
.typeCheckInfo());
2990 case AttachDecision::TemporarilyUnoptimizable
:
2993 case AttachDecision::Deferred
:
2999 if (!ConstructFromStack(cx
, callArgs
)) {
3002 res
.set(callArgs
.rval());
3003 } else if ((op
== JSOp::Eval
|| op
== JSOp::StrictEval
) &&
3004 cx
->global()->valueIsEval(callee
)) {
3005 if (!DirectEval(cx
, callArgs
.get(0), res
)) {
3009 MOZ_ASSERT(op
== JSOp::Call
|| op
== JSOp::CallIgnoresRv
||
3010 op
== JSOp::CallIter
|| op
== JSOp::FunCall
||
3011 op
== JSOp::FunApply
|| op
== JSOp::Eval
||
3012 op
== JSOp::StrictEval
);
3013 if (op
== JSOp::CallIter
&& callee
.isPrimitive()) {
3014 MOZ_ASSERT(argc
== 0, "thisv must be on top of the stack");
3015 ReportValueError(cx
, JSMSG_NOT_ITERABLE
, -1, callArgs
.thisv(), nullptr);
3019 if (!CallFromStack(cx
, callArgs
)) {
3023 res
.set(callArgs
.rval());
3026 if (!TypeMonitorResult(cx
, stub
, frame
, script
, pc
, res
)) {
3030 // Try to transition again in case we called this IC recursively.
3031 if (stub
->state().maybeTransition()) {
3032 stub
->discardStubs(cx
, frame
->invalidationScript());
3034 canAttachStub
= stub
->state().canAttachStub();
3036 if (deferred
&& canAttachStub
) {
3037 HandleValueArray args
= HandleValueArray::fromMarkedLocation(argc
, vp
+ 2);
3038 bool isFirstStub
= stub
->newStubIsFirstStub();
3039 CallIRGenerator
gen(cx
, script
, pc
, op
, stub
->state().mode(), isFirstStub
,
3040 argc
, callee
, callArgs
.thisv(), newTarget
, args
);
3041 switch (gen
.tryAttachDeferredStub(res
)) {
3042 case AttachDecision::Attach
: {
3043 ICScript
* icScript
= frame
->icScript();
3044 ICStub
* newStub
= AttachBaselineCacheIRStub(
3045 cx
, gen
.writerRef(), gen
.cacheKind(), gen
.cacheIRStubKind(), script
,
3046 icScript
, stub
, &handled
);
3048 JitSpew(JitSpew_BaselineIC
, " Attached Call CacheIR stub");
3050 // If it's an updated stub, initialize it.
3051 if (gen
.cacheIRStubKind() == BaselineCacheIRStubKind::Updated
) {
3052 SetUpdateStubData(newStub
->toCacheIR_Updated(),
3053 gen
.typeCheckInfo());
3057 case AttachDecision::NoAction
:
3059 case AttachDecision::TemporarilyUnoptimizable
:
3060 case AttachDecision::Deferred
:
3061 MOZ_ASSERT_UNREACHABLE("Impossible attach decision");
3066 if (!handled
&& canAttachStub
) {
3067 stub
->state().trackNotAttached();
3072 bool DoSpreadCallFallback(JSContext
* cx
, BaselineFrame
* frame
,
3073 ICCall_Fallback
* stub
, Value
* vp
,
3074 MutableHandleValue res
) {
3075 stub
->incrementEnteredCount();
3077 RootedScript
script(cx
, frame
->script());
3078 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
3079 JSOp op
= JSOp(*pc
);
3080 bool constructing
= (op
== JSOp::SpreadNew
|| op
== JSOp::SpreadSuperCall
);
3081 FallbackICSpew(cx
, stub
, "SpreadCall(%s)", CodeName(op
));
3083 // Ensure vp array is rooted - we may GC in here.
3084 RootedExternalValueArray
vpRoot(cx
, 3 + constructing
, vp
);
3086 RootedValue
callee(cx
, vp
[0]);
3087 RootedValue
thisv(cx
, vp
[1]);
3088 RootedValue
arr(cx
, vp
[2]);
3089 RootedValue
newTarget(cx
, constructing
? vp
[3] : NullValue());
3091 // Transition stub state to megamorphic or generic if warranted.
3092 if (stub
->state().maybeTransition()) {
3093 stub
->discardStubs(cx
, frame
->invalidationScript());
3096 // Try attaching a call stub.
3097 bool handled
= false;
3098 if (op
!= JSOp::SpreadEval
&& op
!= JSOp::StrictSpreadEval
&&
3099 stub
->state().canAttachStub()) {
3100 // Try CacheIR first:
3101 RootedArrayObject
aobj(cx
, &arr
.toObject().as
<ArrayObject
>());
3102 MOZ_ASSERT(aobj
->length() == aobj
->getDenseInitializedLength());
3104 HandleValueArray args
= HandleValueArray::fromMarkedLocation(
3105 aobj
->length(), aobj
->getDenseElements());
3106 bool isFirstStub
= stub
->newStubIsFirstStub();
3107 CallIRGenerator
gen(cx
, script
, pc
, op
, stub
->state().mode(), isFirstStub
,
3108 1, callee
, thisv
, newTarget
, args
);
3109 switch (gen
.tryAttachStub()) {
3110 case AttachDecision::NoAction
:
3112 case AttachDecision::Attach
: {
3113 ICScript
* icScript
= frame
->icScript();
3114 ICStub
* newStub
= AttachBaselineCacheIRStub(
3115 cx
, gen
.writerRef(), gen
.cacheKind(), gen
.cacheIRStubKind(), script
,
3116 icScript
, stub
, &handled
);
3119 JitSpew(JitSpew_BaselineIC
, " Attached Spread Call CacheIR stub");
3121 // If it's an updated stub, initialize it.
3122 if (gen
.cacheIRStubKind() == BaselineCacheIRStubKind::Updated
) {
3123 SetUpdateStubData(newStub
->toCacheIR_Updated(),
3124 gen
.typeCheckInfo());
3128 case AttachDecision::TemporarilyUnoptimizable
:
3131 case AttachDecision::Deferred
:
3132 MOZ_ASSERT_UNREACHABLE("No deferred optimizations for spread calls");
3137 if (!SpreadCallOperation(cx
, script
, pc
, thisv
, callee
, arr
, newTarget
,
3142 return TypeMonitorResult(cx
, stub
, frame
, script
, pc
, res
);
3145 void ICStubCompilerBase::pushCallArguments(MacroAssembler
& masm
,
3146 AllocatableGeneralRegisterSet regs
,
3148 bool isConstructing
) {
3149 MOZ_ASSERT(!regs
.has(argcReg
));
3151 // argPtr initially points to the last argument.
3152 Register argPtr
= regs
.takeAny();
3153 masm
.moveStackPtrTo(argPtr
);
3155 // Skip 4 pointers pushed on top of the arguments: the frame descriptor,
3156 // return address, old frame pointer and stub reg.
3157 size_t valueOffset
= STUB_FRAME_SIZE
;
3159 // We have to push |this|, callee, new.target (if constructing) and argc
3160 // arguments. Handle the number of Values we know statically first.
3162 size_t numNonArgValues
= 2 + isConstructing
;
3163 for (size_t i
= 0; i
< numNonArgValues
; i
++) {
3164 masm
.pushValue(Address(argPtr
, valueOffset
));
3165 valueOffset
+= sizeof(Value
);
3168 // If there are no arguments we're done.
3170 masm
.branchTest32(Assembler::Zero
, argcReg
, argcReg
, &done
);
3172 // Push argc Values.
3174 Register count
= regs
.takeAny();
3175 masm
.addPtr(Imm32(valueOffset
), argPtr
);
3176 masm
.move32(argcReg
, count
);
3179 masm
.pushValue(Address(argPtr
, 0));
3180 masm
.addPtr(Imm32(sizeof(Value
)), argPtr
);
3182 masm
.branchSub32(Assembler::NonZero
, Imm32(1), count
, &loop
);
3187 bool FallbackICCodeCompiler::emitCall(bool isSpread
, bool isConstructing
) {
3188 static_assert(R0
== JSReturnOperand
);
3190 // Values are on the stack left-to-right. Calling convention wants them
3191 // right-to-left so duplicate them on the stack in reverse order.
3192 // |this| and callee are pushed last.
3194 AllocatableGeneralRegisterSet
regs(availableGeneralRegs(0));
3196 if (MOZ_UNLIKELY(isSpread
)) {
3197 // Push a stub frame so that we can perform a non-tail call.
3198 enterStubFrame(masm
, R1
.scratchReg());
3200 // Use BaselineFrameReg instead of BaselineStackReg, because
3201 // BaselineFrameReg and BaselineStackReg hold the same value just after
3202 // calling enterStubFrame.
3205 uint32_t valueOffset
= 0;
3206 if (isConstructing
) {
3207 masm
.pushValue(Address(BaselineFrameReg
, STUB_FRAME_SIZE
));
3212 masm
.pushValue(Address(BaselineFrameReg
,
3213 valueOffset
* sizeof(Value
) + STUB_FRAME_SIZE
));
3217 masm
.pushValue(Address(BaselineFrameReg
,
3218 valueOffset
* sizeof(Value
) + STUB_FRAME_SIZE
));
3222 masm
.pushValue(Address(BaselineFrameReg
,
3223 valueOffset
* sizeof(Value
) + STUB_FRAME_SIZE
));
3226 masm
.push(masm
.getStackPointer());
3227 masm
.push(ICStubReg
);
3229 PushStubPayload(masm
, R0
.scratchReg());
3231 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICCall_Fallback
*, Value
*,
3232 MutableHandleValue
);
3233 if (!callVM
<Fn
, DoSpreadCallFallback
>(masm
)) {
3237 leaveStubFrame(masm
);
3238 EmitReturnFromIC(masm
);
3240 // SpreadCall is not yet supported in Ion, so do not generate asmcode for
3245 // Push a stub frame so that we can perform a non-tail call.
3246 enterStubFrame(masm
, R1
.scratchReg());
3248 regs
.take(R0
.scratchReg()); // argc.
3250 pushCallArguments(masm
, regs
, R0
.scratchReg(), isConstructing
);
3252 masm
.push(masm
.getStackPointer());
3253 masm
.push(R0
.scratchReg());
3254 masm
.push(ICStubReg
);
3256 PushStubPayload(masm
, R0
.scratchReg());
3258 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICCall_Fallback
*, uint32_t,
3259 Value
*, MutableHandleValue
);
3260 if (!callVM
<Fn
, DoCallFallback
>(masm
)) {
3264 leaveStubFrame(masm
);
3265 EmitReturnFromIC(masm
);
3267 // This is the resume point used when bailout rewrites call stack to undo
3268 // Ion inlined frames. The return address pushed onto reconstructed stack
3272 MOZ_ASSERT(!isSpread
);
3274 if (isConstructing
) {
3275 code
.initBailoutReturnOffset(BailoutReturnKind::New
, masm
.currentOffset());
3277 code
.initBailoutReturnOffset(BailoutReturnKind::Call
, masm
.currentOffset());
3280 // Load passed-in ThisV into R1 just in case it's needed. Need to do this
3281 // before we leave the stub frame since that info will be lost.
3282 // Current stack: [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
3283 masm
.loadValue(Address(masm
.getStackPointer(), 3 * sizeof(size_t)), R1
);
3285 leaveStubFrame(masm
, true);
3287 // If this is a |constructing| call, if the callee returns a non-object, we
3288 // replace it with the |this| object passed in.
3289 if (isConstructing
) {
3290 static_assert(JSReturnOperand
== R0
);
3291 Label skipThisReplace
;
3293 masm
.branchTestObject(Assembler::Equal
, JSReturnOperand
, &skipThisReplace
);
3294 masm
.moveValue(R1
, R0
);
3296 masm
.branchTestObject(Assembler::Equal
, JSReturnOperand
, &skipThisReplace
);
3297 masm
.assumeUnreachable("Failed to return object in constructing call.");
3299 masm
.bind(&skipThisReplace
);
3302 if (!IsTypeInferenceEnabled()) {
3303 EmitReturnFromIC(masm
);
3307 // At this point, ICStubReg points to the ICCall_Fallback stub, which is NOT
3308 // a MonitoredStub, but rather a MonitoredFallbackStub. To use
3309 // EmitEnterTypeMonitorIC, first load the ICTypeMonitor_Fallback stub into
3310 // ICStubReg. Then, use EmitEnterTypeMonitorIC with a custom struct offset.
3311 // Note that we must have a non-null fallbackMonitorStub here because
3312 // BaselineStackBuilder::buildStubFrame delazifies the stub when bailing out.
3313 masm
.loadPtr(Address(ICStubReg
,
3314 ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
3316 EmitEnterTypeMonitorIC(masm
,
3317 ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
3322 bool FallbackICCodeCompiler::emit_Call() {
3323 return emitCall(/* isSpread = */ false, /* isConstructing = */ false);
3326 bool FallbackICCodeCompiler::emit_CallConstructing() {
3327 return emitCall(/* isSpread = */ false, /* isConstructing = */ true);
3330 bool FallbackICCodeCompiler::emit_SpreadCall() {
3331 return emitCall(/* isSpread = */ true, /* isConstructing = */ false);
3334 bool FallbackICCodeCompiler::emit_SpreadCallConstructing() {
3335 return emitCall(/* isSpread = */ true, /* isConstructing = */ true);
3339 // GetIterator_Fallback
3342 bool DoGetIteratorFallback(JSContext
* cx
, BaselineFrame
* frame
,
3343 ICGetIterator_Fallback
* stub
, HandleValue value
,
3344 MutableHandleValue res
) {
3345 stub
->incrementEnteredCount();
3346 FallbackICSpew(cx
, stub
, "GetIterator");
3348 TryAttachStub
<GetIteratorIRGenerator
>(
3349 "GetIterator", cx
, frame
, stub
, BaselineCacheIRStubKind::Regular
, value
);
3351 JSObject
* iterobj
= ValueToIterator(cx
, value
);
3356 res
.setObject(*iterobj
);
3360 bool FallbackICCodeCompiler::emit_GetIterator() {
3361 EmitRestoreTailCallReg(masm
);
3363 // Sync stack for the decompiler.
3367 masm
.push(ICStubReg
);
3368 pushStubPayload(masm
, R0
.scratchReg());
3370 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICGetIterator_Fallback
*,
3371 HandleValue
, MutableHandleValue
);
3372 return tailCallVM
<Fn
, DoGetIteratorFallback
>(masm
);
3376 // InstanceOf_Fallback
3379 bool DoInstanceOfFallback(JSContext
* cx
, BaselineFrame
* frame
,
3380 ICInstanceOf_Fallback
* stub
, HandleValue lhs
,
3381 HandleValue rhs
, MutableHandleValue res
) {
3382 stub
->incrementEnteredCount();
3384 FallbackICSpew(cx
, stub
, "InstanceOf");
3386 if (!rhs
.isObject()) {
3387 ReportValueError(cx
, JSMSG_BAD_INSTANCEOF_RHS
, -1, rhs
, nullptr);
3391 RootedObject
obj(cx
, &rhs
.toObject());
3393 if (!HasInstance(cx
, obj
, lhs
, &cond
)) {
3397 res
.setBoolean(cond
);
3399 if (!obj
->is
<JSFunction
>()) {
3400 // ensure we've recorded at least one failure, so we can detect there was a
3401 // non-optimizable case
3402 if (!stub
->state().hasFailures()) {
3403 stub
->state().trackNotAttached();
3408 // For functions, keep track of the |prototype| property in type information,
3409 // for use during Ion compilation.
3410 EnsureTrackPropertyTypes(cx
, obj
, NameToId(cx
->names().prototype
));
3412 TryAttachStub
<InstanceOfIRGenerator
>("InstanceOf", cx
, frame
, stub
,
3413 BaselineCacheIRStubKind::Regular
, lhs
,
3418 bool FallbackICCodeCompiler::emit_InstanceOf() {
3419 EmitRestoreTailCallReg(masm
);
3421 // Sync stack for the decompiler.
3427 masm
.push(ICStubReg
);
3428 pushStubPayload(masm
, R0
.scratchReg());
3430 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICInstanceOf_Fallback
*,
3431 HandleValue
, HandleValue
, MutableHandleValue
);
3432 return tailCallVM
<Fn
, DoInstanceOfFallback
>(masm
);
3439 bool DoTypeOfFallback(JSContext
* cx
, BaselineFrame
* frame
,
3440 ICTypeOf_Fallback
* stub
, HandleValue val
,
3441 MutableHandleValue res
) {
3442 stub
->incrementEnteredCount();
3443 FallbackICSpew(cx
, stub
, "TypeOf");
3445 TryAttachStub
<TypeOfIRGenerator
>("TypeOf", cx
, frame
, stub
,
3446 BaselineCacheIRStubKind::Regular
, val
);
3448 JSType type
= js::TypeOfValue(val
);
3449 RootedString
string(cx
, TypeName(type
, cx
->names()));
3450 res
.setString(string
);
3454 bool FallbackICCodeCompiler::emit_TypeOf() {
3455 EmitRestoreTailCallReg(masm
);
3458 masm
.push(ICStubReg
);
3459 pushStubPayload(masm
, R0
.scratchReg());
3461 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICTypeOf_Fallback
*,
3462 HandleValue
, MutableHandleValue
);
3463 return tailCallVM
<Fn
, DoTypeOfFallback
>(masm
);
3467 // ToPropertyKey_Fallback
3470 bool DoToPropertyKeyFallback(JSContext
* cx
, BaselineFrame
* frame
,
3471 ICToPropertyKey_Fallback
* stub
, HandleValue val
,
3472 MutableHandleValue res
) {
3473 stub
->incrementEnteredCount();
3474 FallbackICSpew(cx
, stub
, "ToPropertyKey");
3476 TryAttachStub
<ToPropertyKeyIRGenerator
>(
3477 "ToPropertyKey", cx
, frame
, stub
, BaselineCacheIRStubKind::Regular
, val
);
3479 return ToPropertyKeyOperation(cx
, val
, res
);
3482 bool FallbackICCodeCompiler::emit_ToPropertyKey() {
3483 EmitRestoreTailCallReg(masm
);
3486 masm
.push(ICStubReg
);
3487 pushStubPayload(masm
, R0
.scratchReg());
3489 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICToPropertyKey_Fallback
*,
3490 HandleValue
, MutableHandleValue
);
3491 return tailCallVM
<Fn
, DoToPropertyKeyFallback
>(masm
);
3494 ICTypeMonitor_SingleObject::ICTypeMonitor_SingleObject(JitCode
* stubCode
,
3496 : ICStub(TypeMonitor_SingleObject
, stubCode
), obj_(obj
) {}
3498 ICTypeMonitor_ObjectGroup::ICTypeMonitor_ObjectGroup(JitCode
* stubCode
,
3500 : ICStub(TypeMonitor_ObjectGroup
, stubCode
), group_(group
) {}
3502 ICTypeUpdate_SingleObject::ICTypeUpdate_SingleObject(JitCode
* stubCode
,
3504 : ICStub(TypeUpdate_SingleObject
, stubCode
), obj_(obj
) {}
3506 ICTypeUpdate_ObjectGroup::ICTypeUpdate_ObjectGroup(JitCode
* stubCode
,
3508 : ICStub(TypeUpdate_ObjectGroup
, stubCode
), group_(group
) {}
3514 bool DoRestFallback(JSContext
* cx
, BaselineFrame
* frame
, ICRest_Fallback
* stub
,
3515 MutableHandleValue res
) {
3516 unsigned numFormals
= frame
->numFormalArgs() - 1;
3517 unsigned numActuals
= frame
->numActualArgs();
3518 unsigned numRest
= numActuals
> numFormals
? numActuals
- numFormals
: 0;
3519 Value
* rest
= frame
->argv() + numFormals
;
3522 ObjectGroup::newArrayObject(cx
, rest
, numRest
, GenericObject
,
3523 ObjectGroup::NewArrayKind::UnknownIndex
);
3527 res
.setObject(*obj
);
3531 bool FallbackICCodeCompiler::emit_Rest() {
3532 EmitRestoreTailCallReg(masm
);
3534 masm
.push(ICStubReg
);
3535 pushStubPayload(masm
, R0
.scratchReg());
3537 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICRest_Fallback
*,
3538 MutableHandleValue
);
3539 return tailCallVM
<Fn
, DoRestFallback
>(masm
);
3543 // UnaryArith_Fallback
3546 bool DoUnaryArithFallback(JSContext
* cx
, BaselineFrame
* frame
,
3547 ICUnaryArith_Fallback
* stub
, HandleValue val
,
3548 MutableHandleValue res
) {
3549 stub
->incrementEnteredCount();
3551 RootedScript
script(cx
, frame
->script());
3552 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
3553 JSOp op
= JSOp(*pc
);
3554 FallbackICSpew(cx
, stub
, "UnaryArith(%s)", CodeName(op
));
3557 case JSOp::BitNot
: {
3559 if (!BitNot(cx
, res
, res
)) {
3566 if (!ToNumber(cx
, res
)) {
3573 if (!NegOperation(cx
, res
, res
)) {
3579 if (!IncOperation(cx
, val
, res
)) {
3585 if (!DecOperation(cx
, val
, res
)) {
3590 case JSOp::ToNumeric
: {
3592 if (!ToNumeric(cx
, res
)) {
3598 MOZ_CRASH("Unexpected op");
3600 MOZ_ASSERT(res
.isNumeric());
3602 if (res
.isDouble()) {
3603 stub
->setSawDoubleResult();
3606 TryAttachStub
<UnaryArithIRGenerator
>("UnaryArith", cx
, frame
, stub
,
3607 BaselineCacheIRStubKind::Regular
, op
,
3612 bool FallbackICCodeCompiler::emit_UnaryArith() {
3613 static_assert(R0
== JSReturnOperand
);
3615 // Restore the tail call register.
3616 EmitRestoreTailCallReg(masm
);
3618 // Ensure stack is fully synced for the expression decompiler.
3623 masm
.push(ICStubReg
);
3624 pushStubPayload(masm
, R0
.scratchReg());
3626 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICUnaryArith_Fallback
*,
3627 HandleValue
, MutableHandleValue
);
3628 return tailCallVM
<Fn
, DoUnaryArithFallback
>(masm
);
3632 // BinaryArith_Fallback
3635 bool DoBinaryArithFallback(JSContext
* cx
, BaselineFrame
* frame
,
3636 ICBinaryArith_Fallback
* stub
, HandleValue lhs
,
3637 HandleValue rhs
, MutableHandleValue ret
) {
3638 stub
->incrementEnteredCount();
3640 RootedScript
script(cx
, frame
->script());
3641 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
3642 JSOp op
= JSOp(*pc
);
3644 cx
, stub
, "CacheIRBinaryArith(%s,%d,%d)", CodeName(op
),
3645 int(lhs
.isDouble() ? JSVAL_TYPE_DOUBLE
: lhs
.extractNonDoubleType()),
3646 int(rhs
.isDouble() ? JSVAL_TYPE_DOUBLE
: rhs
.extractNonDoubleType()));
3648 // Don't pass lhs/rhs directly, we need the original values when
3649 // generating stubs.
3650 RootedValue
lhsCopy(cx
, lhs
);
3651 RootedValue
rhsCopy(cx
, rhs
);
3653 // Perform the arith operation.
3657 if (!AddValues(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3662 if (!SubValues(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3667 if (!MulValues(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3672 if (!DivValues(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3677 if (!ModValues(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3682 if (!PowValues(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3687 if (!BitOr(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3692 case JSOp::BitXor
: {
3693 if (!BitXor(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3698 case JSOp::BitAnd
: {
3699 if (!BitAnd(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3705 if (!BitLsh(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3711 if (!BitRsh(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3717 if (!UrshValues(cx
, &lhsCopy
, &rhsCopy
, ret
)) {
3723 MOZ_CRASH("Unhandled baseline arith op");
3726 if (ret
.isDouble()) {
3727 stub
->setSawDoubleResult();
3730 TryAttachStub
<BinaryArithIRGenerator
>("BinaryArith", cx
, frame
, stub
,
3731 BaselineCacheIRStubKind::Regular
, op
,
3736 bool FallbackICCodeCompiler::emit_BinaryArith() {
3737 static_assert(R0
== JSReturnOperand
);
3739 // Restore the tail call register.
3740 EmitRestoreTailCallReg(masm
);
3742 // Ensure stack is fully synced for the expression decompiler.
3749 masm
.push(ICStubReg
);
3750 pushStubPayload(masm
, R0
.scratchReg());
3752 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICBinaryArith_Fallback
*,
3753 HandleValue
, HandleValue
, MutableHandleValue
);
3754 return tailCallVM
<Fn
, DoBinaryArithFallback
>(masm
);
3760 bool DoCompareFallback(JSContext
* cx
, BaselineFrame
* frame
,
3761 ICCompare_Fallback
* stub
, HandleValue lhs
,
3762 HandleValue rhs
, MutableHandleValue ret
) {
3763 stub
->incrementEnteredCount();
3765 RootedScript
script(cx
, frame
->script());
3766 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
3767 JSOp op
= JSOp(*pc
);
3769 FallbackICSpew(cx
, stub
, "Compare(%s)", CodeName(op
));
3771 // Don't pass lhs/rhs directly, we need the original values when
3772 // generating stubs.
3773 RootedValue
lhsCopy(cx
, lhs
);
3774 RootedValue
rhsCopy(cx
, rhs
);
3776 // Perform the compare operation.
3780 if (!LessThan(cx
, &lhsCopy
, &rhsCopy
, &out
)) {
3785 if (!LessThanOrEqual(cx
, &lhsCopy
, &rhsCopy
, &out
)) {
3790 if (!GreaterThan(cx
, &lhsCopy
, &rhsCopy
, &out
)) {
3795 if (!GreaterThanOrEqual(cx
, &lhsCopy
, &rhsCopy
, &out
)) {
3800 if (!LooselyEqual
<EqualityKind::Equal
>(cx
, &lhsCopy
, &rhsCopy
, &out
)) {
3805 if (!LooselyEqual
<EqualityKind::NotEqual
>(cx
, &lhsCopy
, &rhsCopy
, &out
)) {
3809 case JSOp::StrictEq
:
3810 if (!StrictlyEqual
<EqualityKind::Equal
>(cx
, &lhsCopy
, &rhsCopy
, &out
)) {
3814 case JSOp::StrictNe
:
3815 if (!StrictlyEqual
<EqualityKind::NotEqual
>(cx
, &lhsCopy
, &rhsCopy
,
3821 MOZ_ASSERT_UNREACHABLE("Unhandled baseline compare op");
3825 ret
.setBoolean(out
);
3827 TryAttachStub
<CompareIRGenerator
>("Compare", cx
, frame
, stub
,
3828 BaselineCacheIRStubKind::Regular
, op
, lhs
,
3833 bool FallbackICCodeCompiler::emit_Compare() {
3834 static_assert(R0
== JSReturnOperand
);
3836 // Restore the tail call register.
3837 EmitRestoreTailCallReg(masm
);
3839 // Ensure stack is fully synced for the expression decompiler.
3846 masm
.push(ICStubReg
);
3847 pushStubPayload(masm
, R0
.scratchReg());
3849 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICCompare_Fallback
*,
3850 HandleValue
, HandleValue
, MutableHandleValue
);
3851 return tailCallVM
<Fn
, DoCompareFallback
>(masm
);
3855 // NewArray_Fallback
3858 bool DoNewArrayFallback(JSContext
* cx
, BaselineFrame
* frame
,
3859 ICNewArray_Fallback
* stub
, uint32_t length
,
3860 MutableHandleValue res
) {
3861 stub
->incrementEnteredCount();
3862 FallbackICSpew(cx
, stub
, "NewArray");
3864 RootedObject
obj(cx
);
3865 if (stub
->templateObject()) {
3866 RootedObject
templateObject(cx
, stub
->templateObject());
3867 obj
= NewArrayOperationWithTemplate(cx
, templateObject
);
3872 RootedScript
script(cx
, frame
->script());
3873 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
3875 obj
= NewArrayOperation(cx
, script
, pc
, length
);
3880 if (!obj
->isSingleton()) {
3881 ArrayObject
* templateObject
=
3882 NewArrayOperation(cx
, script
, pc
, length
, TenuredObject
);
3883 if (!templateObject
) {
3886 stub
->setTemplateObject(templateObject
);
3890 res
.setObject(*obj
);
3894 bool FallbackICCodeCompiler::emit_NewArray() {
3895 EmitRestoreTailCallReg(masm
);
3897 masm
.push(R0
.scratchReg()); // length
3898 masm
.push(ICStubReg
); // stub.
3899 masm
.pushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
3901 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICNewArray_Fallback
*,
3902 uint32_t, MutableHandleValue
);
3903 return tailCallVM
<Fn
, DoNewArrayFallback
>(masm
);
3907 // NewObject_Fallback
3909 bool DoNewObjectFallback(JSContext
* cx
, BaselineFrame
* frame
,
3910 ICNewObject_Fallback
* stub
, MutableHandleValue res
) {
3911 stub
->incrementEnteredCount();
3912 FallbackICSpew(cx
, stub
, "NewObject");
3914 RootedObject
obj(cx
);
3916 RootedObject
templateObject(cx
, stub
->templateObject());
3917 if (templateObject
) {
3919 !templateObject
->group()->maybePreliminaryObjectsDontCheckGeneration());
3920 obj
= NewObjectOperationWithTemplate(cx
, templateObject
);
3922 RootedScript
script(cx
, frame
->script());
3923 jsbytecode
* pc
= stub
->icEntry()->pc(script
);
3924 obj
= NewObjectOperation(cx
, script
, pc
);
3926 if (obj
&& !obj
->isSingleton() &&
3927 !obj
->group()->maybePreliminaryObjectsDontCheckGeneration()) {
3928 templateObject
= NewObjectOperation(cx
, script
, pc
, TenuredObject
);
3929 if (!templateObject
) {
3933 TryAttachStub
<NewObjectIRGenerator
>("NewObject", cx
, frame
, stub
,
3934 BaselineCacheIRStubKind::Regular
,
3935 JSOp(*pc
), templateObject
);
3937 stub
->setTemplateObject(templateObject
);
3945 res
.setObject(*obj
);
3949 bool FallbackICCodeCompiler::emit_NewObject() {
3950 EmitRestoreTailCallReg(masm
);
3952 masm
.push(ICStubReg
); // stub.
3953 pushStubPayload(masm
, R0
.scratchReg());
3955 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, ICNewObject_Fallback
*,
3956 MutableHandleValue
);
3957 return tailCallVM
<Fn
, DoNewObjectFallback
>(masm
);
3960 bool JitRuntime::generateBaselineICFallbackCode(JSContext
* cx
) {
3961 StackMacroAssembler masm
;
3963 BaselineICFallbackCode
& fallbackCode
= baselineICFallbackCode_
.ref();
3964 FallbackICCodeCompiler
compiler(cx
, fallbackCode
, masm
);
3966 JitSpew(JitSpew_Codegen
, "# Emitting Baseline IC fallback code");
3968 #define EMIT_CODE(kind) \
3970 uint32_t offset = startTrampolineCode(masm); \
3971 InitMacroAssemblerForICStub(masm); \
3972 if (!compiler.emit_##kind()) { \
3975 fallbackCode.initOffset(BaselineICFallbackKind::kind, offset); \
3977 IC_BASELINE_FALLBACK_CODE_KIND_LIST(EMIT_CODE
)
3980 Linker
linker(masm
);
3981 JitCode
* code
= linker
.newCode(cx
, CodeKind::Other
);
3987 writePerfSpewerJitCodeProfile(code
, "BaselineICFallback");
3990 vtune::MarkStub(code
, "BaselineICFallback");
3993 fallbackCode
.initCode(code
);
3997 const CacheIRStubInfo
* ICStub::cacheIRStubInfo() const {
3999 case ICStub::CacheIR_Regular
:
4000 return toCacheIR_Regular()->stubInfo();
4001 case ICStub::CacheIR_Monitored
:
4002 return toCacheIR_Monitored()->stubInfo();
4003 case ICStub::CacheIR_Updated
:
4004 return toCacheIR_Updated()->stubInfo();
4006 MOZ_CRASH("Not a CacheIR stub");
4010 const uint8_t* ICStub::cacheIRStubData() {
4012 case ICStub::CacheIR_Regular
:
4013 return toCacheIR_Regular()->stubDataStart();
4014 case ICStub::CacheIR_Monitored
:
4015 return toCacheIR_Monitored()->stubDataStart();
4016 case ICStub::CacheIR_Updated
:
4017 return toCacheIR_Updated()->stubDataStart();
4019 MOZ_CRASH("Not a CacheIR stub");