1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/BaselineCodeGen.h"
9 #include "mozilla/Casting.h"
12 #include "jit/BaselineIC.h"
13 #include "jit/BaselineJIT.h"
14 #include "jit/CacheIRCompiler.h"
15 #include "jit/CacheIRGenerator.h"
16 #include "jit/CalleeToken.h"
17 #include "jit/FixedList.h"
18 #include "jit/IonOptimizationLevels.h"
19 #include "jit/JitcodeMap.h"
20 #include "jit/JitFrames.h"
21 #include "jit/JitRuntime.h"
22 #include "jit/JitSpewer.h"
23 #include "jit/Linker.h"
24 #include "jit/PerfSpewer.h"
25 #include "jit/SharedICHelpers.h"
26 #include "jit/TemplateObject.h"
27 #include "jit/TrialInlining.h"
28 #include "jit/VMFunctions.h"
29 #include "js/friend/ErrorMessages.h" // JSMSG_*
30 #include "js/UniquePtr.h"
31 #include "vm/AsyncFunction.h"
32 #include "vm/AsyncIteration.h"
33 #include "vm/BuiltinObjectKind.h"
34 #include "vm/EnvironmentObject.h"
35 #include "vm/FunctionFlags.h" // js::FunctionFlags
36 #include "vm/Interpreter.h"
37 #include "vm/JSFunction.h"
40 # include "vtune/VTuneWrapper.h"
43 #include "debugger/DebugAPI-inl.h"
44 #include "jit/BaselineFrameInfo-inl.h"
45 #include "jit/JitHints-inl.h"
46 #include "jit/JitScript-inl.h"
47 #include "jit/MacroAssembler-inl.h"
48 #include "jit/SharedICHelpers-inl.h"
49 #include "jit/TemplateObject-inl.h"
50 #include "jit/VMFunctionList-inl.h"
51 #include "vm/Interpreter-inl.h"
52 #include "vm/JSScript-inl.h"
55 using namespace js::jit
;
59 using mozilla::AssertedCast
;
68 BaselineCompilerHandler::BaselineCompilerHandler(JSContext
* cx
,
72 : frame_(script
, masm
),
74 analysis_(alloc
, script
),
81 compileDebugInstrumentation_(script
->isDebuggee()),
82 ionCompileable_(IsIonEnabled(cx
) && CanIonCompileScript(cx
, script
)) {
85 BaselineInterpreterHandler::BaselineInterpreterHandler(JSContext
* cx
,
89 template <typename Handler
>
90 template <typename
... HandlerArgs
>
91 BaselineCodeGen
<Handler
>::BaselineCodeGen(JSContext
* cx
, TempAllocator
& alloc
,
92 HandlerArgs
&&... args
)
93 : handler(cx
, masm
, std::forward
<HandlerArgs
>(args
)...),
96 frame(handler
.frame()) {}
98 BaselineCompiler::BaselineCompiler(JSContext
* cx
, TempAllocator
& alloc
,
100 : BaselineCodeGen(cx
, alloc
, /* HandlerArgs = */ alloc
, script
) {
101 #ifdef JS_CODEGEN_NONE
106 BaselineInterpreterGenerator::BaselineInterpreterGenerator(JSContext
* cx
,
107 TempAllocator
& alloc
)
108 : BaselineCodeGen(cx
, alloc
/* no handlerArgs */) {}
110 bool BaselineCompilerHandler::init(JSContext
* cx
) {
111 if (!analysis_
.init(alloc_
)) {
115 uint32_t len
= script_
->length();
117 if (!labels_
.init(alloc_
, len
)) {
121 for (size_t i
= 0; i
< len
; i
++) {
122 new (&labels_
[i
]) Label();
125 if (!frame_
.init(alloc_
)) {
132 bool BaselineCompiler::init() {
133 if (!handler
.init(cx
)) {
140 bool BaselineCompilerHandler::recordCallRetAddr(JSContext
* cx
,
141 RetAddrEntry::Kind kind
,
142 uint32_t retOffset
) {
143 uint32_t pcOffset
= script_
->pcToOffset(pc_
);
145 // Entries must be sorted by pcOffset for binary search to work.
146 // See BaselineScript::retAddrEntryFromPCOffset.
147 MOZ_ASSERT_IF(!retAddrEntries_
.empty(),
148 retAddrEntries_
.back().pcOffset() <= pcOffset
);
150 // Similarly, entries must be sorted by return offset and this offset must be
151 // unique. See BaselineScript::retAddrEntryFromReturnOffset.
152 MOZ_ASSERT_IF(!retAddrEntries_
.empty() && !masm_
.oom(),
153 retAddrEntries_
.back().returnOffset().offset() < retOffset
);
155 if (!retAddrEntries_
.emplaceBack(pcOffset
, kind
, CodeOffset(retOffset
))) {
156 ReportOutOfMemory(cx
);
163 bool BaselineInterpreterHandler::recordCallRetAddr(JSContext
* cx
,
164 RetAddrEntry::Kind kind
,
165 uint32_t retOffset
) {
167 case RetAddrEntry::Kind::DebugPrologue
:
168 MOZ_ASSERT(callVMOffsets_
.debugPrologueOffset
== 0,
169 "expected single DebugPrologue call");
170 callVMOffsets_
.debugPrologueOffset
= retOffset
;
172 case RetAddrEntry::Kind::DebugEpilogue
:
173 MOZ_ASSERT(callVMOffsets_
.debugEpilogueOffset
== 0,
174 "expected single DebugEpilogue call");
175 callVMOffsets_
.debugEpilogueOffset
= retOffset
;
177 case RetAddrEntry::Kind::DebugAfterYield
:
178 MOZ_ASSERT(callVMOffsets_
.debugAfterYieldOffset
== 0,
179 "expected single DebugAfterYield call");
180 callVMOffsets_
.debugAfterYieldOffset
= retOffset
;
189 bool BaselineInterpreterHandler::addDebugInstrumentationOffset(
190 JSContext
* cx
, CodeOffset offset
) {
191 if (!debugInstrumentationOffsets_
.append(offset
.offset())) {
192 ReportOutOfMemory(cx
);
198 MethodStatus
BaselineCompiler::compile() {
199 AutoCreatedBy
acb(masm
, "BaselineCompiler::compile");
201 Rooted
<JSScript
*> script(cx
, handler
.script());
202 JitSpew(JitSpew_BaselineScripts
, "Baseline compiling script %s:%u:%u (%p)",
203 script
->filename(), script
->lineno(),
204 script
->column().oneOriginValue(), script
.get());
206 JitSpew(JitSpew_Codegen
, "# Emitting baseline code for script %s:%u:%u",
207 script
->filename(), script
->lineno(),
208 script
->column().oneOriginValue());
210 AutoIncrementalTimer
timer(cx
->realm()->timers
.baselineCompileTime
);
212 AutoKeepJitScripts
keepJitScript(cx
);
213 if (!script
->ensureHasJitScript(cx
, keepJitScript
)) {
217 // When code coverage is enabled, we have to create the ScriptCounts if they
219 if (!script
->hasScriptCounts() && cx
->realm()->collectCoverageForDebug()) {
220 if (!script
->initScriptCounts(cx
)) {
225 if (!JitOptions
.disableJitHints
&&
226 cx
->runtime()->jitRuntime()->hasJitHintsMap()) {
227 JitHintsMap
* jitHints
= cx
->runtime()->jitRuntime()->getJitHintsMap();
228 jitHints
->setEagerBaselineHint(script
);
231 // Suppress GC during compilation.
232 gc::AutoSuppressGC
suppressGC(cx
);
234 if (!script
->jitScript()->ensureHasCachedBaselineJitData(cx
, script
)) {
238 MOZ_ASSERT(!script
->hasBaselineScript());
240 perfSpewer_
.recordOffset(masm
, "Prologue");
241 if (!emitPrologue()) {
245 MethodStatus status
= emitBody();
246 if (status
!= Method_Compiled
) {
250 perfSpewer_
.recordOffset(masm
, "Epilogue");
251 if (!emitEpilogue()) {
255 perfSpewer_
.recordOffset(masm
, "OOLPostBarrierSlot");
256 if (!emitOutOfLinePostBarrierSlot()) {
260 AutoCreatedBy
acb2(masm
, "exception_tail");
263 ReportOutOfMemory(cx
);
267 JitCode
* code
= linker
.newCode(cx
, CodeKind::Baseline
);
272 UniquePtr
<BaselineScript
> baselineScript(
274 cx
, warmUpCheckPrologueOffset_
.offset(),
275 profilerEnterFrameToggleOffset_
.offset(),
276 profilerExitFrameToggleOffset_
.offset(),
277 handler
.retAddrEntries().length(), handler
.osrEntries().length(),
278 debugTrapEntries_
.length(), script
->resumeOffsets().size()),
279 JS::DeletePolicy
<BaselineScript
>(cx
->runtime()));
280 if (!baselineScript
) {
284 baselineScript
->setMethod(code
);
286 JitSpew(JitSpew_BaselineScripts
,
287 "Created BaselineScript %p (raw %p) for %s:%u:%u",
288 (void*)baselineScript
.get(), (void*)code
->raw(), script
->filename(),
289 script
->lineno(), script
->column().oneOriginValue());
291 baselineScript
->copyRetAddrEntries(handler
.retAddrEntries().begin());
292 baselineScript
->copyOSREntries(handler
.osrEntries().begin());
293 baselineScript
->copyDebugTrapEntries(debugTrapEntries_
.begin());
295 // If profiler instrumentation is enabled, toggle instrumentation on.
296 if (cx
->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
298 baselineScript
->toggleProfilerInstrumentation(true);
301 // Compute native resume addresses for the script's resume offsets.
302 baselineScript
->computeResumeNativeOffsets(script
, resumeOffsetEntries_
);
304 if (compileDebugInstrumentation()) {
305 baselineScript
->setHasDebugInstrumentation();
308 // Always register a native => bytecode mapping entry, since profiler can be
309 // turned on with baseline jitcode on stack, and baseline jitcode cannot be
312 JitSpew(JitSpew_Profiling
,
313 "Added JitcodeGlobalEntry for baseline script %s:%u:%u (%p)",
314 script
->filename(), script
->lineno(),
315 script
->column().oneOriginValue(), baselineScript
.get());
317 // Generate profiling string.
318 UniqueChars str
= GeckoProfilerRuntime::allocProfileString(cx
, script
);
323 auto entry
= MakeJitcodeGlobalEntry
<BaselineEntry
>(
324 cx
, code
, code
->raw(), code
->rawEnd(), script
, std::move(str
));
329 JitcodeGlobalTable
* globalTable
=
330 cx
->runtime()->jitRuntime()->getJitcodeGlobalTable();
331 if (!globalTable
->addEntry(std::move(entry
))) {
332 ReportOutOfMemory(cx
);
336 // Mark the jitcode as having a bytecode map.
337 code
->setHasBytecodeMap();
340 script
->jitScript()->setBaselineScript(script
, baselineScript
.release());
342 perfSpewer_
.saveProfile(cx
, script
, code
);
345 vtune::MarkScript(code
, script
, "baseline");
348 return Method_Compiled
;
351 // On most platforms we use a dedicated bytecode PC register to avoid many
352 // dependent loads and stores for sequences of simple bytecode ops. This
353 // register must be saved/restored around VM and IC calls.
355 // On 32-bit x86 we don't have enough registers for this (because R0-R2 require
356 // 6 registers) so there we always store the pc on the frame.
357 static constexpr bool HasInterpreterPCReg() {
358 return InterpreterPCReg
!= InvalidReg
;
361 static Register
LoadBytecodePC(MacroAssembler
& masm
, Register scratch
) {
362 if (HasInterpreterPCReg()) {
363 return InterpreterPCReg
;
366 Address
pcAddr(FramePointer
, BaselineFrame::reverseOffsetOfInterpreterPC());
367 masm
.loadPtr(pcAddr
, scratch
);
371 static void LoadInt8Operand(MacroAssembler
& masm
, Register dest
) {
372 Register pc
= LoadBytecodePC(masm
, dest
);
373 masm
.load8SignExtend(Address(pc
, sizeof(jsbytecode
)), dest
);
376 static void LoadUint8Operand(MacroAssembler
& masm
, Register dest
) {
377 Register pc
= LoadBytecodePC(masm
, dest
);
378 masm
.load8ZeroExtend(Address(pc
, sizeof(jsbytecode
)), dest
);
381 static void LoadUint16Operand(MacroAssembler
& masm
, Register dest
) {
382 Register pc
= LoadBytecodePC(masm
, dest
);
383 masm
.load16ZeroExtend(Address(pc
, sizeof(jsbytecode
)), dest
);
386 static void LoadInt32Operand(MacroAssembler
& masm
, Register dest
) {
387 Register pc
= LoadBytecodePC(masm
, dest
);
388 masm
.load32(Address(pc
, sizeof(jsbytecode
)), dest
);
391 static void LoadInt32OperandSignExtendToPtr(MacroAssembler
& masm
, Register pc
,
393 masm
.load32SignExtendToPtr(Address(pc
, sizeof(jsbytecode
)), dest
);
396 static void LoadUint24Operand(MacroAssembler
& masm
, size_t offset
,
398 // Load the opcode and operand, then left shift to discard the opcode.
399 Register pc
= LoadBytecodePC(masm
, dest
);
400 masm
.load32(Address(pc
, offset
), dest
);
401 masm
.rshift32(Imm32(8), dest
);
404 static void LoadInlineValueOperand(MacroAssembler
& masm
, ValueOperand dest
) {
405 // Note: the Value might be unaligned but as above we rely on all our
406 // platforms having appropriate support for unaligned accesses (except for
407 // floating point instructions on ARM).
408 Register pc
= LoadBytecodePC(masm
, dest
.scratchReg());
409 masm
.loadUnalignedValue(Address(pc
, sizeof(jsbytecode
)), dest
);
413 void BaselineCompilerCodeGen::loadScript(Register dest
) {
414 masm
.movePtr(ImmGCPtr(handler
.script()), dest
);
418 void BaselineInterpreterCodeGen::loadScript(Register dest
) {
419 masm
.loadPtr(frame
.addressOfInterpreterScript(), dest
);
423 void BaselineCompilerCodeGen::saveInterpreterPCReg() {}
426 void BaselineInterpreterCodeGen::saveInterpreterPCReg() {
427 if (HasInterpreterPCReg()) {
428 masm
.storePtr(InterpreterPCReg
, frame
.addressOfInterpreterPC());
433 void BaselineCompilerCodeGen::restoreInterpreterPCReg() {}
436 void BaselineInterpreterCodeGen::restoreInterpreterPCReg() {
437 if (HasInterpreterPCReg()) {
438 masm
.loadPtr(frame
.addressOfInterpreterPC(), InterpreterPCReg
);
443 void BaselineCompilerCodeGen::emitInitializeLocals() {
444 // Initialize all locals to |undefined|. Lexical bindings are temporal
445 // dead zoned in bytecode.
447 size_t n
= frame
.nlocals();
452 // Use R0 to minimize code size. If the number of locals to push is <
453 // LOOP_UNROLL_FACTOR, then the initialization pushes are emitted directly
454 // and inline. Otherwise, they're emitted in a partially unrolled loop.
455 static const size_t LOOP_UNROLL_FACTOR
= 4;
456 size_t toPushExtra
= n
% LOOP_UNROLL_FACTOR
;
458 masm
.moveValue(UndefinedValue(), R0
);
460 // Handle any extra pushes left over by the optional unrolled loop below.
461 for (size_t i
= 0; i
< toPushExtra
; i
++) {
465 // Partially unrolled loop of pushes.
466 if (n
>= LOOP_UNROLL_FACTOR
) {
467 size_t toPush
= n
- toPushExtra
;
468 MOZ_ASSERT(toPush
% LOOP_UNROLL_FACTOR
== 0);
469 MOZ_ASSERT(toPush
>= LOOP_UNROLL_FACTOR
);
470 masm
.move32(Imm32(toPush
), R1
.scratchReg());
471 // Emit unrolled loop with 4 pushes per iteration.
473 masm
.bind(&pushLoop
);
474 for (size_t i
= 0; i
< LOOP_UNROLL_FACTOR
; i
++) {
477 masm
.branchSub32(Assembler::NonZero
, Imm32(LOOP_UNROLL_FACTOR
),
478 R1
.scratchReg(), &pushLoop
);
483 void BaselineInterpreterCodeGen::emitInitializeLocals() {
484 // Push |undefined| for all locals.
486 Register scratch
= R0
.scratchReg();
488 masm
.loadPtr(Address(scratch
, JSScript::offsetOfSharedData()), scratch
);
489 masm
.loadPtr(Address(scratch
, SharedImmutableScriptData::offsetOfISD()),
491 masm
.load32(Address(scratch
, ImmutableScriptData::offsetOfNfixed()), scratch
);
494 masm
.branchTest32(Assembler::Zero
, scratch
, scratch
, &done
);
497 masm
.pushValue(UndefinedValue());
498 masm
.branchSub32(Assembler::NonZero
, Imm32(1), scratch
, &top
);
504 // R2.scratchReg() contains object being written to.
505 // Called with the baseline stack synced, except for R0 which is preserved.
506 // All other registers are usable as scratch.
508 // void PostWriteBarrier(JSRuntime* rt, JSObject* obj);
509 template <typename Handler
>
510 bool BaselineCodeGen
<Handler
>::emitOutOfLinePostBarrierSlot() {
511 AutoCreatedBy
acb(masm
,
512 "BaselineCodeGen<Handler>::emitOutOfLinePostBarrierSlot");
514 if (!postBarrierSlot_
.used()) {
518 masm
.bind(&postBarrierSlot_
);
520 #ifdef JS_USE_LINK_REGISTER
521 masm
.pushReturnAddress();
524 Register objReg
= R2
.scratchReg();
526 // Check one element cache to avoid VM call.
528 auto* lastCellAddr
= cx
->runtime()->gc
.addressOfLastBufferedWholeCell();
529 masm
.branchPtr(Assembler::Equal
, AbsoluteAddress(lastCellAddr
), objReg
,
532 saveInterpreterPCReg();
534 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
535 MOZ_ASSERT(!regs
.has(FramePointer
));
538 Register scratch
= regs
.takeAny();
542 using Fn
= void (*)(JSRuntime
* rt
, js::gc::Cell
* cell
);
543 masm
.setupUnalignedABICall(scratch
);
544 masm
.movePtr(ImmPtr(cx
->runtime()), scratch
);
545 masm
.passABIArg(scratch
);
546 masm
.passABIArg(objReg
);
547 masm
.callWithABI
<Fn
, PostWriteBarrier
>();
549 restoreInterpreterPCReg();
553 masm
.bind(&skipBarrier
);
558 // Scan the a cache IR stub's fields and create an allocation site for any that
559 // refer to the catch-all unknown allocation site. This will be the case for
560 // stubs created when running in the interpreter. This happens on transition to
562 static bool CreateAllocSitesForCacheIRStub(JSScript
* script
, uint32_t pcOffset
,
563 ICCacheIRStub
* stub
) {
564 const CacheIRStubInfo
* stubInfo
= stub
->stubInfo();
565 uint8_t* stubData
= stub
->stubDataStart();
567 ICScript
* icScript
= script
->jitScript()->icScript();
572 StubField::Type fieldType
= stubInfo
->fieldType(field
);
573 if (fieldType
== StubField::Type::Limit
) {
577 if (fieldType
== StubField::Type::AllocSite
) {
578 gc::AllocSite
* site
=
579 stubInfo
->getPtrStubField
<ICCacheIRStub
, gc::AllocSite
>(stub
, offset
);
580 if (site
->kind() == gc::AllocSite::Kind::Unknown
) {
581 gc::AllocSite
* newSite
=
582 icScript
->getOrCreateAllocSite(script
, pcOffset
);
587 stubInfo
->replaceStubRawWord(stubData
, offset
, uintptr_t(site
),
593 offset
+= StubField::sizeInBytes(fieldType
);
599 static void CreateAllocSitesForICChain(JSScript
* script
, uint32_t pcOffset
,
600 uint32_t entryIndex
) {
601 JitScript
* jitScript
= script
->jitScript();
602 ICStub
* stub
= jitScript
->icEntry(entryIndex
).firstStub();
604 while (!stub
->isFallback()) {
605 if (!CreateAllocSitesForCacheIRStub(script
, pcOffset
,
606 stub
->toCacheIRStub())) {
607 // This is an optimization and safe to skip if we hit OOM or per-zone
611 stub
= stub
->toCacheIRStub()->next();
616 bool BaselineCompilerCodeGen::emitNextIC() {
617 AutoCreatedBy
acb(masm
, "emitNextIC");
619 // Emit a call to an IC stored in JitScript. Calls to this must match the
620 // ICEntry order in JitScript: first the non-op IC entries for |this| and
621 // formal arguments, then the for-op IC entries for JOF_IC ops.
623 JSScript
* script
= handler
.script();
624 uint32_t pcOffset
= script
->pcToOffset(handler
.pc());
626 // We don't use every ICEntry and we can skip unreachable ops, so we have
627 // to loop until we find an ICEntry for the current pc.
628 const ICFallbackStub
* stub
;
631 stub
= script
->jitScript()->fallbackStub(handler
.icEntryIndex());
632 entryIndex
= handler
.icEntryIndex();
633 handler
.moveToNextICEntry();
634 } while (stub
->pcOffset() < pcOffset
);
636 MOZ_ASSERT(stub
->pcOffset() == pcOffset
);
637 MOZ_ASSERT(BytecodeOpHasIC(JSOp(*handler
.pc())));
639 if (BytecodeOpCanHaveAllocSite(JSOp(*handler
.pc()))) {
640 CreateAllocSitesForICChain(script
, pcOffset
, entryIndex
);
643 // Load stub pointer into ICStubReg.
644 masm
.loadPtr(frame
.addressOfICScript(), ICStubReg
);
645 size_t firstStubOffset
= ICScript::offsetOfFirstStub(entryIndex
);
646 masm
.loadPtr(Address(ICStubReg
, firstStubOffset
), ICStubReg
);
648 CodeOffset returnOffset
;
649 EmitCallIC(masm
, &returnOffset
);
651 RetAddrEntry::Kind kind
= RetAddrEntry::Kind::IC
;
652 if (!handler
.retAddrEntries().emplaceBack(pcOffset
, kind
, returnOffset
)) {
653 ReportOutOfMemory(cx
);
661 bool BaselineInterpreterCodeGen::emitNextIC() {
662 saveInterpreterPCReg();
663 masm
.loadPtr(frame
.addressOfInterpreterICEntry(), ICStubReg
);
664 masm
.loadPtr(Address(ICStubReg
, ICEntry::offsetOfFirstStub()), ICStubReg
);
665 masm
.call(Address(ICStubReg
, ICStub::offsetOfStubCode()));
666 uint32_t returnOffset
= masm
.currentOffset();
667 restoreInterpreterPCReg();
669 // If this is an IC for a bytecode op where Ion may inline scripts, we need to
670 // record the return offset for Ion bailouts.
671 if (handler
.currentOp()) {
672 JSOp op
= *handler
.currentOp();
673 MOZ_ASSERT(BytecodeOpHasIC(op
));
674 if (IsIonInlinableOp(op
)) {
675 if (!handler
.icReturnOffsets().emplaceBack(returnOffset
, op
)) {
685 void BaselineCompilerCodeGen::computeFrameSize(Register dest
) {
686 MOZ_ASSERT(!inCall_
, "must not be called in the middle of a VM call");
687 masm
.move32(Imm32(frame
.frameSize()), dest
);
691 void BaselineInterpreterCodeGen::computeFrameSize(Register dest
) {
692 // dest := FramePointer - StackPointer.
693 MOZ_ASSERT(!inCall_
, "must not be called in the middle of a VM call");
694 masm
.mov(FramePointer
, dest
);
695 masm
.subStackPtrFrom(dest
);
698 template <typename Handler
>
699 void BaselineCodeGen
<Handler
>::prepareVMCall() {
700 pushedBeforeCall_
= masm
.framePushed();
705 // Ensure everything is synced.
710 void BaselineCompilerCodeGen::storeFrameSizeAndPushDescriptor(
711 uint32_t argSize
, Register scratch
) {
713 masm
.store32(Imm32(frame
.frameSize()), frame
.addressOfDebugFrameSize());
716 masm
.pushFrameDescriptor(FrameType::BaselineJS
);
720 void BaselineInterpreterCodeGen::storeFrameSizeAndPushDescriptor(
721 uint32_t argSize
, Register scratch
) {
723 // Store the frame size without VMFunction arguments in debug builds.
724 // scratch := FramePointer - StackPointer - argSize.
725 masm
.mov(FramePointer
, scratch
);
726 masm
.subStackPtrFrom(scratch
);
727 masm
.sub32(Imm32(argSize
), scratch
);
728 masm
.store32(scratch
, frame
.addressOfDebugFrameSize());
731 masm
.pushFrameDescriptor(FrameType::BaselineJS
);
734 static uint32_t GetVMFunctionArgSize(const VMFunctionData
& fun
) {
735 return fun
.explicitStackSlots() * sizeof(void*);
738 template <typename Handler
>
739 bool BaselineCodeGen
<Handler
>::callVMInternal(VMFunctionId id
,
740 RetAddrEntry::Kind kind
,
743 // Assert prepareVMCall() has been called.
748 TrampolinePtr code
= cx
->runtime()->jitRuntime()->getVMWrapper(id
);
749 const VMFunctionData
& fun
= GetVMFunction(id
);
751 uint32_t argSize
= GetVMFunctionArgSize(fun
);
753 // Assert all arguments were pushed.
754 MOZ_ASSERT(masm
.framePushed() - pushedBeforeCall_
== argSize
);
756 saveInterpreterPCReg();
758 if (phase
== CallVMPhase::AfterPushingLocals
) {
759 storeFrameSizeAndPushDescriptor(argSize
, R0
.scratchReg());
761 MOZ_ASSERT(phase
== CallVMPhase::BeforePushingLocals
);
763 uint32_t frameBaseSize
= BaselineFrame::frameSizeForNumValueSlots(0);
764 masm
.store32(Imm32(frameBaseSize
), frame
.addressOfDebugFrameSize());
766 masm
.pushFrameDescriptor(FrameType::BaselineJS
);
770 uint32_t callOffset
= masm
.currentOffset();
772 // Pop arguments from framePushed.
773 masm
.implicitPop(argSize
);
775 restoreInterpreterPCReg();
777 return handler
.recordCallRetAddr(cx
, kind
, callOffset
);
780 template <typename Handler
>
781 template <typename Fn
, Fn fn
>
782 bool BaselineCodeGen
<Handler
>::callVM(RetAddrEntry::Kind kind
,
784 VMFunctionId fnId
= VMFunctionToId
<Fn
, fn
>::id
;
785 return callVMInternal(fnId
, kind
, phase
);
788 template <typename Handler
>
789 bool BaselineCodeGen
<Handler
>::emitStackCheck() {
791 if (handler
.mustIncludeSlotsInStackCheck()) {
792 // Subtract the size of script->nslots() first.
793 Register scratch
= R1
.scratchReg();
794 masm
.moveStackPtrTo(scratch
);
795 subtractScriptSlotsSize(scratch
, R2
.scratchReg());
796 masm
.branchPtr(Assembler::BelowOrEqual
,
797 AbsoluteAddress(cx
->addressOfJitStackLimit()), scratch
,
800 masm
.branchStackPtrRhs(Assembler::BelowOrEqual
,
801 AbsoluteAddress(cx
->addressOfJitStackLimit()),
806 masm
.loadBaselineFramePtr(FramePointer
, R1
.scratchReg());
807 pushArg(R1
.scratchReg());
809 const CallVMPhase phase
= CallVMPhase::BeforePushingLocals
;
810 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::StackCheck
;
812 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
813 if (!callVM
<Fn
, CheckOverRecursedBaseline
>(kind
, phase
)) {
817 masm
.bind(&skipCall
);
821 static void EmitCallFrameIsDebuggeeCheck(MacroAssembler
& masm
) {
822 using Fn
= void (*)(BaselineFrame
* frame
);
823 masm
.setupUnalignedABICall(R0
.scratchReg());
824 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
825 masm
.passABIArg(R0
.scratchReg());
826 masm
.callWithABI
<Fn
, FrameIsDebuggeeCheck
>();
830 bool BaselineCompilerCodeGen::emitIsDebuggeeCheck() {
831 if (handler
.compileDebugInstrumentation()) {
832 EmitCallFrameIsDebuggeeCheck(masm
);
838 bool BaselineInterpreterCodeGen::emitIsDebuggeeCheck() {
839 // Use a toggled jump to call FrameIsDebuggeeCheck only if the debugger is
842 // TODO(bug 1522394): consider having a cx->realm->isDebuggee guard before the
843 // call. Consider moving the callWithABI out-of-line.
846 CodeOffset toggleOffset
= masm
.toggledJump(&skipCheck
);
848 saveInterpreterPCReg();
849 EmitCallFrameIsDebuggeeCheck(masm
);
850 restoreInterpreterPCReg();
852 masm
.bind(&skipCheck
);
853 return handler
.addDebugInstrumentationOffset(cx
, toggleOffset
);
856 static void MaybeIncrementCodeCoverageCounter(MacroAssembler
& masm
,
859 if (!script
->hasScriptCounts()) {
862 PCCounts
* counts
= script
->maybeGetPCCounts(pc
);
863 uint64_t* counterAddr
= &counts
->numExec();
864 masm
.inc64(AbsoluteAddress(counterAddr
));
868 bool BaselineCompilerCodeGen::emitHandleCodeCoverageAtPrologue() {
869 // If the main instruction is not a jump target, then we emit the
870 // corresponding code coverage counter.
871 JSScript
* script
= handler
.script();
872 jsbytecode
* main
= script
->main();
873 if (!BytecodeIsJumpTarget(JSOp(*main
))) {
874 MaybeIncrementCodeCoverageCounter(masm
, script
, main
);
880 bool BaselineInterpreterCodeGen::emitHandleCodeCoverageAtPrologue() {
882 CodeOffset toggleOffset
= masm
.toggledJump(&skipCoverage
);
883 masm
.call(handler
.codeCoverageAtPrologueLabel());
884 masm
.bind(&skipCoverage
);
885 return handler
.codeCoverageOffsets().append(toggleOffset
.offset());
889 void BaselineCompilerCodeGen::subtractScriptSlotsSize(Register reg
,
891 uint32_t slotsSize
= handler
.script()->nslots() * sizeof(Value
);
892 masm
.subPtr(Imm32(slotsSize
), reg
);
896 void BaselineInterpreterCodeGen::subtractScriptSlotsSize(Register reg
,
898 // reg = reg - script->nslots() * sizeof(Value)
899 MOZ_ASSERT(reg
!= scratch
);
901 masm
.loadPtr(Address(scratch
, JSScript::offsetOfSharedData()), scratch
);
902 masm
.loadPtr(Address(scratch
, SharedImmutableScriptData::offsetOfISD()),
904 masm
.load32(Address(scratch
, ImmutableScriptData::offsetOfNslots()), scratch
);
905 static_assert(sizeof(Value
) == 8,
906 "shift by 3 below assumes Value is 8 bytes");
907 masm
.lshiftPtr(Imm32(3), scratch
);
908 masm
.subPtr(scratch
, reg
);
912 void BaselineCompilerCodeGen::loadGlobalLexicalEnvironment(Register dest
) {
913 MOZ_ASSERT(!handler
.script()->hasNonSyntacticScope());
914 masm
.movePtr(ImmGCPtr(&cx
->global()->lexicalEnvironment()), dest
);
918 void BaselineInterpreterCodeGen::loadGlobalLexicalEnvironment(Register dest
) {
919 masm
.loadGlobalObjectData(dest
);
920 masm
.loadPtr(Address(dest
, GlobalObjectData::offsetOfLexicalEnvironment()),
925 void BaselineCompilerCodeGen::pushGlobalLexicalEnvironmentValue(
926 ValueOperand scratch
) {
927 frame
.push(ObjectValue(cx
->global()->lexicalEnvironment()));
931 void BaselineInterpreterCodeGen::pushGlobalLexicalEnvironmentValue(
932 ValueOperand scratch
) {
933 loadGlobalLexicalEnvironment(scratch
.scratchReg());
934 masm
.tagValue(JSVAL_TYPE_OBJECT
, scratch
.scratchReg(), scratch
);
939 void BaselineCompilerCodeGen::loadGlobalThisValue(ValueOperand dest
) {
940 JSObject
* thisObj
= cx
->global()->lexicalEnvironment().thisObject();
941 masm
.moveValue(ObjectValue(*thisObj
), dest
);
945 void BaselineInterpreterCodeGen::loadGlobalThisValue(ValueOperand dest
) {
946 Register scratch
= dest
.scratchReg();
947 loadGlobalLexicalEnvironment(scratch
);
948 static constexpr size_t SlotOffset
=
949 GlobalLexicalEnvironmentObject::offsetOfThisValueSlot();
950 masm
.loadValue(Address(scratch
, SlotOffset
), dest
);
954 void BaselineCompilerCodeGen::pushScriptArg() {
955 pushArg(ImmGCPtr(handler
.script()));
959 void BaselineInterpreterCodeGen::pushScriptArg() {
960 pushArg(frame
.addressOfInterpreterScript());
964 void BaselineCompilerCodeGen::pushBytecodePCArg() {
965 pushArg(ImmPtr(handler
.pc()));
969 void BaselineInterpreterCodeGen::pushBytecodePCArg() {
970 if (HasInterpreterPCReg()) {
971 pushArg(InterpreterPCReg
);
973 pushArg(frame
.addressOfInterpreterPC());
977 static gc::Cell
* GetScriptGCThing(JSScript
* script
, jsbytecode
* pc
,
978 ScriptGCThingType type
) {
980 case ScriptGCThingType::Atom
:
981 return script
->getAtom(pc
);
982 case ScriptGCThingType::String
:
983 return script
->getString(pc
);
984 case ScriptGCThingType::RegExp
:
985 return script
->getRegExp(pc
);
986 case ScriptGCThingType::Object
:
987 return script
->getObject(pc
);
988 case ScriptGCThingType::Function
:
989 return script
->getFunction(pc
);
990 case ScriptGCThingType::Scope
:
991 return script
->getScope(pc
);
992 case ScriptGCThingType::BigInt
:
993 return script
->getBigInt(pc
);
995 MOZ_CRASH("Unexpected GCThing type");
999 void BaselineCompilerCodeGen::loadScriptGCThing(ScriptGCThingType type
,
1002 gc::Cell
* thing
= GetScriptGCThing(handler
.script(), handler
.pc(), type
);
1003 masm
.movePtr(ImmGCPtr(thing
), dest
);
1007 void BaselineInterpreterCodeGen::loadScriptGCThing(ScriptGCThingType type
,
1010 MOZ_ASSERT(dest
!= scratch
);
1012 // Load the index in |scratch|.
1013 LoadInt32Operand(masm
, scratch
);
1015 // Load the GCCellPtr.
1017 masm
.loadPtr(Address(dest
, JSScript::offsetOfPrivateData()), dest
);
1018 masm
.loadPtr(BaseIndex(dest
, scratch
, ScalePointer
,
1019 PrivateScriptData::offsetOfGCThings()),
1022 // Clear the tag bits.
1024 case ScriptGCThingType::Atom
:
1025 case ScriptGCThingType::String
:
1026 // Use xorPtr with a 32-bit immediate because it's more efficient than
1027 // andPtr on 64-bit.
1028 static_assert(uintptr_t(TraceKind::String
) == 2,
1029 "Unexpected tag bits for string GCCellPtr");
1030 masm
.xorPtr(Imm32(2), dest
);
1032 case ScriptGCThingType::RegExp
:
1033 case ScriptGCThingType::Object
:
1034 case ScriptGCThingType::Function
:
1035 // No-op because GCCellPtr tag bits are zero for objects.
1036 static_assert(uintptr_t(TraceKind::Object
) == 0,
1037 "Unexpected tag bits for object GCCellPtr");
1039 case ScriptGCThingType::BigInt
:
1040 // Use xorPtr with a 32-bit immediate because it's more efficient than
1041 // andPtr on 64-bit.
1042 static_assert(uintptr_t(TraceKind::BigInt
) == 1,
1043 "Unexpected tag bits for BigInt GCCellPtr");
1044 masm
.xorPtr(Imm32(1), dest
);
1046 case ScriptGCThingType::Scope
:
1047 // Use xorPtr with a 32-bit immediate because it's more efficient than
1048 // andPtr on 64-bit.
1049 static_assert(uintptr_t(TraceKind::Scope
) >= JS::OutOfLineTraceKindMask
,
1050 "Expected Scopes to have OutOfLineTraceKindMask tag");
1051 masm
.xorPtr(Imm32(JS::OutOfLineTraceKindMask
), dest
);
1056 // Assert low bits are not set.
1058 masm
.branchTestPtr(Assembler::Zero
, dest
, Imm32(0b111), &ok
);
1059 masm
.assumeUnreachable("GC pointer with tag bits set");
1065 void BaselineCompilerCodeGen::pushScriptGCThingArg(ScriptGCThingType type
,
1067 Register scratch2
) {
1068 gc::Cell
* thing
= GetScriptGCThing(handler
.script(), handler
.pc(), type
);
1069 pushArg(ImmGCPtr(thing
));
1073 void BaselineInterpreterCodeGen::pushScriptGCThingArg(ScriptGCThingType type
,
1075 Register scratch2
) {
1076 loadScriptGCThing(type
, scratch1
, scratch2
);
1080 template <typename Handler
>
1081 void BaselineCodeGen
<Handler
>::pushScriptNameArg(Register scratch1
,
1082 Register scratch2
) {
1083 pushScriptGCThingArg(ScriptGCThingType::Atom
, scratch1
, scratch2
);
1087 void BaselineCompilerCodeGen::pushUint8BytecodeOperandArg(Register
) {
1088 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler
.pc())) == JOF_UINT8
);
1089 pushArg(Imm32(GET_UINT8(handler
.pc())));
1093 void BaselineInterpreterCodeGen::pushUint8BytecodeOperandArg(Register scratch
) {
1094 LoadUint8Operand(masm
, scratch
);
1099 void BaselineCompilerCodeGen::pushUint16BytecodeOperandArg(Register
) {
1100 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler
.pc())) == JOF_UINT16
);
1101 pushArg(Imm32(GET_UINT16(handler
.pc())));
1105 void BaselineInterpreterCodeGen::pushUint16BytecodeOperandArg(
1107 LoadUint16Operand(masm
, scratch
);
1112 void BaselineCompilerCodeGen::loadInt32LengthBytecodeOperand(Register dest
) {
1113 uint32_t length
= GET_UINT32(handler
.pc());
1114 MOZ_ASSERT(length
<= INT32_MAX
,
1115 "the bytecode emitter must fail to compile code that would "
1116 "produce a length exceeding int32_t range");
1117 masm
.move32(Imm32(AssertedCast
<int32_t>(length
)), dest
);
1121 void BaselineInterpreterCodeGen::loadInt32LengthBytecodeOperand(Register dest
) {
1122 LoadInt32Operand(masm
, dest
);
1125 template <typename Handler
>
1126 bool BaselineCodeGen
<Handler
>::emitDebugPrologue() {
1127 auto ifDebuggee
= [this]() {
1128 // Load pointer to BaselineFrame in R0.
1129 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
1132 pushArg(R0
.scratchReg());
1134 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::DebugPrologue
;
1136 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
1137 if (!callVM
<Fn
, jit::DebugPrologue
>(kind
)) {
1143 return emitDebugInstrumentation(ifDebuggee
);
1147 void BaselineCompilerCodeGen::emitInitFrameFields(Register nonFunctionEnv
) {
1148 Register scratch
= R0
.scratchReg();
1149 Register scratch2
= R2
.scratchReg();
1150 MOZ_ASSERT(nonFunctionEnv
!= scratch
&& nonFunctionEnv
!= scratch2
);
1152 masm
.store32(Imm32(0), frame
.addressOfFlags());
1153 if (handler
.function()) {
1154 masm
.loadFunctionFromCalleeToken(frame
.addressOfCalleeToken(), scratch
);
1155 masm
.unboxObject(Address(scratch
, JSFunction::offsetOfEnvironment()),
1157 masm
.storePtr(scratch
, frame
.addressOfEnvironmentChain());
1159 masm
.storePtr(nonFunctionEnv
, frame
.addressOfEnvironmentChain());
1162 // If cx->inlinedICScript contains an inlined ICScript (passed from
1163 // the caller), take that ICScript and store it in the frame, then
1164 // overwrite cx->inlinedICScript with nullptr.
1165 Label notInlined
, done
;
1166 masm
.movePtr(ImmPtr(cx
->addressOfInlinedICScript()), scratch
);
1167 Address
inlinedAddr(scratch
, 0);
1168 masm
.branchPtr(Assembler::Equal
, inlinedAddr
, ImmWord(0), ¬Inlined
);
1169 masm
.loadPtr(inlinedAddr
, scratch2
);
1170 masm
.storePtr(scratch2
, frame
.addressOfICScript());
1171 masm
.storePtr(ImmPtr(nullptr), inlinedAddr
);
1174 // Otherwise, store this script's default ICSCript in the frame.
1175 masm
.bind(¬Inlined
);
1176 masm
.storePtr(ImmPtr(handler
.script()->jitScript()->icScript()),
1177 frame
.addressOfICScript());
1182 void BaselineInterpreterCodeGen::emitInitFrameFields(Register nonFunctionEnv
) {
1183 MOZ_ASSERT(nonFunctionEnv
== R1
.scratchReg(),
1184 "Don't clobber nonFunctionEnv below");
1186 // If we have a dedicated PC register we use it as scratch1 to avoid a
1187 // register move below.
1189 HasInterpreterPCReg() ? InterpreterPCReg
: R0
.scratchReg();
1190 Register scratch2
= R2
.scratchReg();
1192 masm
.store32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER
),
1193 frame
.addressOfFlags());
1195 // Initialize interpreterScript.
1196 Label notFunction
, done
;
1197 masm
.loadPtr(frame
.addressOfCalleeToken(), scratch1
);
1198 masm
.branchTestPtr(Assembler::NonZero
, scratch1
, Imm32(CalleeTokenScriptBit
),
1201 // CalleeToken_Function or CalleeToken_FunctionConstructing.
1202 masm
.andPtr(Imm32(uint32_t(CalleeTokenMask
)), scratch1
);
1203 masm
.unboxObject(Address(scratch1
, JSFunction::offsetOfEnvironment()),
1205 masm
.storePtr(scratch2
, frame
.addressOfEnvironmentChain());
1206 masm
.loadPrivate(Address(scratch1
, JSFunction::offsetOfJitInfoOrScript()),
1210 masm
.bind(¬Function
);
1212 // CalleeToken_Script.
1213 masm
.andPtr(Imm32(uint32_t(CalleeTokenMask
)), scratch1
);
1214 masm
.storePtr(nonFunctionEnv
, frame
.addressOfEnvironmentChain());
1217 masm
.storePtr(scratch1
, frame
.addressOfInterpreterScript());
1219 // Initialize icScript and interpreterICEntry
1220 masm
.loadJitScript(scratch1
, scratch2
);
1221 masm
.computeEffectiveAddress(Address(scratch2
, JitScript::offsetOfICScript()),
1223 masm
.storePtr(scratch2
, frame
.addressOfICScript());
1224 masm
.computeEffectiveAddress(Address(scratch2
, ICScript::offsetOfICEntries()),
1226 masm
.storePtr(scratch2
, frame
.addressOfInterpreterICEntry());
1228 // Initialize interpreter pc.
1229 masm
.loadPtr(Address(scratch1
, JSScript::offsetOfSharedData()), scratch1
);
1230 masm
.loadPtr(Address(scratch1
, SharedImmutableScriptData::offsetOfISD()),
1232 masm
.addPtr(Imm32(ImmutableScriptData::offsetOfCode()), scratch1
);
1234 if (HasInterpreterPCReg()) {
1235 MOZ_ASSERT(scratch1
== InterpreterPCReg
,
1236 "pc must be stored in the pc register");
1238 masm
.storePtr(scratch1
, frame
.addressOfInterpreterPC());
1242 // Assert we don't need a post write barrier to write sourceObj to a slot of
1243 // destObj. See comments in WarpBuilder::buildNamedLambdaEnv.
1244 static void AssertCanElidePostWriteBarrier(MacroAssembler
& masm
,
1245 Register destObj
, Register sourceObj
,
1249 masm
.branchPtrInNurseryChunk(Assembler::Equal
, destObj
, temp
, &ok
);
1250 masm
.branchPtrInNurseryChunk(Assembler::NotEqual
, sourceObj
, temp
, &ok
);
1251 masm
.assumeUnreachable("Unexpected missing post write barrier in Baseline");
1257 bool BaselineCompilerCodeGen::initEnvironmentChain() {
1258 if (!handler
.function()) {
1261 if (!handler
.script()->needsFunctionEnvironmentObjects()) {
1265 // Allocate a NamedLambdaObject and/or a CallObject. If the function needs
1266 // both, the NamedLambdaObject must enclose the CallObject. If one of the
1267 // allocations fails, we perform the whole operation in C++.
1269 JSObject
* templateEnv
= handler
.script()->jitScript()->templateEnvironment();
1270 MOZ_ASSERT(templateEnv
);
1272 CallObject
* callObjectTemplate
= nullptr;
1273 if (handler
.function()->needsCallObject()) {
1274 callObjectTemplate
= &templateEnv
->as
<CallObject
>();
1277 NamedLambdaObject
* namedLambdaTemplate
= nullptr;
1278 if (handler
.function()->needsNamedLambdaEnvironment()) {
1279 if (callObjectTemplate
) {
1280 templateEnv
= templateEnv
->enclosingEnvironment();
1282 namedLambdaTemplate
= &templateEnv
->as
<NamedLambdaObject
>();
1285 MOZ_ASSERT(namedLambdaTemplate
|| callObjectTemplate
);
1287 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
1288 Register newEnv
= regs
.takeAny();
1289 Register enclosingEnv
= regs
.takeAny();
1290 Register callee
= regs
.takeAny();
1291 Register temp
= regs
.takeAny();
1294 masm
.loadPtr(frame
.addressOfEnvironmentChain(), enclosingEnv
);
1295 masm
.loadFunctionFromCalleeToken(frame
.addressOfCalleeToken(), callee
);
1297 // Allocate a NamedLambdaObject if needed.
1298 if (namedLambdaTemplate
) {
1299 TemplateObject
templateObject(namedLambdaTemplate
);
1300 masm
.createGCObject(newEnv
, temp
, templateObject
, gc::Heap::Default
, &fail
);
1302 // Store enclosing environment.
1303 Address
enclosingSlot(newEnv
,
1304 NamedLambdaObject::offsetOfEnclosingEnvironment());
1305 masm
.storeValue(JSVAL_TYPE_OBJECT
, enclosingEnv
, enclosingSlot
);
1306 AssertCanElidePostWriteBarrier(masm
, newEnv
, enclosingEnv
, temp
);
1309 Address
lambdaSlot(newEnv
, NamedLambdaObject::offsetOfLambdaSlot());
1310 masm
.storeValue(JSVAL_TYPE_OBJECT
, callee
, lambdaSlot
);
1311 AssertCanElidePostWriteBarrier(masm
, newEnv
, callee
, temp
);
1313 if (callObjectTemplate
) {
1314 masm
.movePtr(newEnv
, enclosingEnv
);
1318 // Allocate a CallObject if needed.
1319 if (callObjectTemplate
) {
1320 TemplateObject
templateObject(callObjectTemplate
);
1321 masm
.createGCObject(newEnv
, temp
, templateObject
, gc::Heap::Default
, &fail
);
1323 // Store enclosing environment.
1324 Address
enclosingSlot(newEnv
, CallObject::offsetOfEnclosingEnvironment());
1325 masm
.storeValue(JSVAL_TYPE_OBJECT
, enclosingEnv
, enclosingSlot
);
1326 AssertCanElidePostWriteBarrier(masm
, newEnv
, enclosingEnv
, temp
);
1329 Address
calleeSlot(newEnv
, CallObject::offsetOfCallee());
1330 masm
.storeValue(JSVAL_TYPE_OBJECT
, callee
, calleeSlot
);
1331 AssertCanElidePostWriteBarrier(masm
, newEnv
, callee
, temp
);
1334 // Update the frame's environment chain and mark it initialized.
1336 masm
.storePtr(newEnv
, frame
.addressOfEnvironmentChain());
1337 masm
.or32(Imm32(BaselineFrame::HAS_INITIAL_ENV
), frame
.addressOfFlags());
1344 masm
.loadBaselineFramePtr(FramePointer
, temp
);
1347 const CallVMPhase phase
= CallVMPhase::BeforePushingLocals
;
1349 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
1350 if (!callVMNonOp
<Fn
, jit::InitFunctionEnvironmentObjects
>(phase
)) {
1359 bool BaselineInterpreterCodeGen::initEnvironmentChain() {
1360 // For function scripts, call InitFunctionEnvironmentObjects if needed. For
1361 // non-function scripts this is a no-op.
1364 masm
.branchTestPtr(Assembler::NonZero
, frame
.addressOfCalleeToken(),
1365 Imm32(CalleeTokenScriptBit
), &done
);
1367 auto initEnv
= [this]() {
1368 // Call into the VM to create the proper environment objects.
1371 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
1372 pushArg(R0
.scratchReg());
1374 const CallVMPhase phase
= CallVMPhase::BeforePushingLocals
;
1376 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
1377 return callVMNonOp
<Fn
, jit::InitFunctionEnvironmentObjects
>(phase
);
1379 if (!emitTestScriptFlag(
1380 JSScript::ImmutableFlags::NeedsFunctionEnvironmentObjects
, true,
1381 initEnv
, R2
.scratchReg())) {
1390 template <typename Handler
>
1391 bool BaselineCodeGen
<Handler
>::emitInterruptCheck() {
1395 masm
.branch32(Assembler::Equal
, AbsoluteAddress(cx
->addressOfInterruptBits()),
1400 // Use a custom RetAddrEntry::Kind so DebugModeOSR can distinguish this call
1401 // from other callVMs that might happen at this pc.
1402 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::InterruptCheck
;
1404 using Fn
= bool (*)(JSContext
*);
1405 if (!callVM
<Fn
, InterruptCheck
>(kind
)) {
1414 bool BaselineCompilerCodeGen::emitWarmUpCounterIncrement() {
1415 frame
.assertSyncedStack();
1417 // Record native code offset for OSR from Baseline Interpreter into Baseline
1418 // JIT code. This is right before the warm-up check in the Baseline JIT code,
1419 // to make sure we can immediately enter Ion if the script is warm enough or
1420 // if --ion-eager is used.
1421 JSScript
* script
= handler
.script();
1422 jsbytecode
* pc
= handler
.pc();
1423 if (JSOp(*pc
) == JSOp::LoopHead
) {
1424 uint32_t pcOffset
= script
->pcToOffset(pc
);
1425 uint32_t nativeOffset
= masm
.currentOffset();
1426 if (!handler
.osrEntries().emplaceBack(pcOffset
, nativeOffset
)) {
1427 ReportOutOfMemory(cx
);
1432 // Emit no warm-up counter increments if Ion is not enabled or if the script
1433 // will never be Ion-compileable.
1434 if (!handler
.maybeIonCompileable()) {
1438 Register scriptReg
= R2
.scratchReg();
1439 Register countReg
= R0
.scratchReg();
1441 // Load the ICScript* in scriptReg.
1442 masm
.loadPtr(frame
.addressOfICScript(), scriptReg
);
1444 // Bump warm-up counter.
1445 Address
warmUpCounterAddr(scriptReg
, ICScript::offsetOfWarmUpCount());
1446 masm
.load32(warmUpCounterAddr
, countReg
);
1447 masm
.add32(Imm32(1), countReg
);
1448 masm
.store32(countReg
, warmUpCounterAddr
);
1450 if (!JitOptions
.disableInlining
) {
1451 // Consider trial inlining.
1452 // Note: unlike other warmup thresholds, where we try to enter a
1453 // higher tier whenever we are higher than a given warmup count,
1454 // trial inlining triggers once when reaching the threshold.
1455 Label noTrialInlining
;
1456 masm
.branch32(Assembler::NotEqual
, countReg
,
1457 Imm32(JitOptions
.trialInliningWarmUpThreshold
),
1461 masm
.PushBaselineFramePtr(FramePointer
, R0
.scratchReg());
1463 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
1464 if (!callVMNonOp
<Fn
, DoTrialInlining
>()) {
1467 // Reload registers potentially clobbered by the call.
1468 masm
.loadPtr(frame
.addressOfICScript(), scriptReg
);
1469 masm
.load32(warmUpCounterAddr
, countReg
);
1470 masm
.bind(&noTrialInlining
);
1473 if (JSOp(*pc
) == JSOp::LoopHead
) {
1474 // If this is a loop where we can't OSR (for example because it's inside a
1475 // catch or finally block), increment the warmup counter but don't attempt
1476 // OSR (Ion/Warp only compiles the try block).
1477 if (!handler
.analysis().info(pc
).loopHeadCanOsr
) {
1484 const OptimizationInfo
* info
=
1485 IonOptimizations
.get(OptimizationLevel::Normal
);
1486 uint32_t warmUpThreshold
= info
->compilerWarmUpThreshold(cx
, script
, pc
);
1487 masm
.branch32(Assembler::LessThan
, countReg
, Imm32(warmUpThreshold
), &done
);
1489 // Don't trigger Warp compilations from trial-inlined scripts.
1490 Address
depthAddr(scriptReg
, ICScript::offsetOfDepth());
1491 masm
.branch32(Assembler::NotEqual
, depthAddr
, Imm32(0), &done
);
1493 // Load the IonScript* in scriptReg. We can load this from the ICScript*
1494 // because it must be an outer ICScript embedded in the JitScript.
1495 constexpr int32_t offset
= -int32_t(JitScript::offsetOfICScript()) +
1496 int32_t(JitScript::offsetOfIonScript());
1497 masm
.loadPtr(Address(scriptReg
, offset
), scriptReg
);
1499 // Do nothing if Ion is already compiling this script off-thread or if Ion has
1500 // been disabled for this script.
1501 masm
.branchPtr(Assembler::Equal
, scriptReg
, ImmPtr(IonCompilingScriptPtr
),
1503 masm
.branchPtr(Assembler::Equal
, scriptReg
, ImmPtr(IonDisabledScriptPtr
),
1506 // Try to compile and/or finish a compilation.
1507 if (JSOp(*pc
) == JSOp::LoopHead
) {
1508 // Try to OSR into Ion.
1509 computeFrameSize(R0
.scratchReg());
1513 pushBytecodePCArg();
1514 pushArg(R0
.scratchReg());
1515 masm
.PushBaselineFramePtr(FramePointer
, R0
.scratchReg());
1517 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, uint32_t, jsbytecode
*,
1519 if (!callVM
<Fn
, IonCompileScriptForBaselineOSR
>()) {
1523 // The return register holds the IonOsrTempData*. Perform OSR if it's not
1525 static_assert(ReturnReg
!= OsrFrameReg
,
1526 "Code below depends on osrDataReg != OsrFrameReg");
1527 Register osrDataReg
= ReturnReg
;
1528 masm
.branchTestPtr(Assembler::Zero
, osrDataReg
, osrDataReg
, &done
);
1530 // Success! Switch from Baseline JIT code to Ion JIT code.
1532 // At this point, stack looks like:
1534 // +-> [...Calling-Frame...]
1535 // | [...Actual-Args/ThisV/ArgCount/Callee...]
1538 // +---[Saved-FramePtr]
1539 // [...Baseline-Frame...]
1542 // Get a scratch register that's not osrDataReg or OsrFrameReg.
1543 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
1544 MOZ_ASSERT(!regs
.has(FramePointer
));
1545 regs
.take(osrDataReg
);
1546 regs
.take(OsrFrameReg
);
1548 Register scratchReg
= regs
.takeAny();
1550 // If profiler instrumentation is on, ensure that lastProfilingFrame is
1551 // the frame currently being OSR-ed
1554 AbsoluteAddress
addressOfEnabled(
1555 cx
->runtime()->geckoProfiler().addressOfEnabled());
1556 masm
.branch32(Assembler::Equal
, addressOfEnabled
, Imm32(0), &checkOk
);
1557 masm
.loadPtr(AbsoluteAddress((void*)&cx
->jitActivation
), scratchReg
);
1559 Address(scratchReg
, JitActivation::offsetOfLastProfilingFrame()),
1562 // It may be the case that we entered the baseline frame with
1563 // profiling turned off on, then in a call within a loop (i.e. a
1564 // callee frame), turn on profiling, then return to this frame,
1565 // and then OSR with profiling turned on. In this case, allow for
1566 // lastProfilingFrame to be null.
1567 masm
.branchPtr(Assembler::Equal
, scratchReg
, ImmWord(0), &checkOk
);
1569 masm
.branchPtr(Assembler::Equal
, FramePointer
, scratchReg
, &checkOk
);
1570 masm
.assumeUnreachable("Baseline OSR lastProfilingFrame mismatch.");
1571 masm
.bind(&checkOk
);
1575 // Restore the stack pointer so that the saved frame pointer is on top of
1577 masm
.moveToStackPtr(FramePointer
);
1580 masm
.loadPtr(Address(osrDataReg
, IonOsrTempData::offsetOfBaselineFrame()),
1582 masm
.jump(Address(osrDataReg
, IonOsrTempData::offsetOfJitCode()));
1586 masm
.PushBaselineFramePtr(FramePointer
, R0
.scratchReg());
1588 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
1589 if (!callVMNonOp
<Fn
, IonCompileScriptForBaselineAtEntry
>()) {
1599 bool BaselineInterpreterCodeGen::emitWarmUpCounterIncrement() {
1600 Register scriptReg
= R2
.scratchReg();
1601 Register countReg
= R0
.scratchReg();
1603 // Load the JitScript* in scriptReg.
1604 loadScript(scriptReg
);
1605 masm
.loadJitScript(scriptReg
, scriptReg
);
1607 // Bump warm-up counter.
1608 Address
warmUpCounterAddr(scriptReg
, JitScript::offsetOfWarmUpCount());
1609 masm
.load32(warmUpCounterAddr
, countReg
);
1610 masm
.add32(Imm32(1), countReg
);
1611 masm
.store32(countReg
, warmUpCounterAddr
);
1613 // If the script is warm enough for Baseline compilation, call into the VM to
1616 masm
.branch32(Assembler::BelowOrEqual
, countReg
,
1617 Imm32(JitOptions
.baselineJitWarmUpThreshold
), &done
);
1618 masm
.branchPtr(Assembler::Equal
,
1619 Address(scriptReg
, JitScript::offsetOfBaselineScript()),
1620 ImmPtr(BaselineDisabledScriptPtr
), &done
);
1624 masm
.PushBaselineFramePtr(FramePointer
, R0
.scratchReg());
1626 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, uint8_t**);
1627 if (!callVM
<Fn
, BaselineCompileFromBaselineInterpreter
>()) {
1631 // If the function returned nullptr we either skipped compilation or were
1632 // unable to compile the script. Continue running in the interpreter.
1633 masm
.branchTestPtr(Assembler::Zero
, ReturnReg
, ReturnReg
, &done
);
1635 // Success! Switch from interpreter to JIT code by jumping to the
1636 // corresponding code in the BaselineScript.
1638 // This works because BaselineCompiler uses the same frame layout (stack is
1639 // synced at OSR points) and BaselineCompileFromBaselineInterpreter has
1640 // already cleared the RUNNING_IN_INTERPRETER flag for us.
1641 // See BaselineFrame::prepareForBaselineInterpreterToJitOSR.
1642 masm
.jump(ReturnReg
);
1649 bool BaselineCompiler::emitDebugTrap() {
1650 MOZ_ASSERT(compileDebugInstrumentation());
1651 MOZ_ASSERT(frame
.numUnsyncedSlots() == 0);
1653 JSScript
* script
= handler
.script();
1654 bool enabled
= DebugAPI::stepModeEnabled(script
) ||
1655 DebugAPI::hasBreakpointsAt(script
, handler
.pc());
1657 // Emit patchable call to debug trap handler.
1658 JitCode
* handlerCode
= cx
->runtime()->jitRuntime()->debugTrapHandler(
1659 cx
, DebugTrapHandlerKind::Compiler
);
1664 CodeOffset nativeOffset
= masm
.toggledCall(handlerCode
, enabled
);
1666 uint32_t pcOffset
= script
->pcToOffset(handler
.pc());
1667 if (!debugTrapEntries_
.emplaceBack(pcOffset
, nativeOffset
.offset())) {
1668 ReportOutOfMemory(cx
);
1672 // Add a RetAddrEntry for the return offset -> pc mapping.
1673 return handler
.recordCallRetAddr(cx
, RetAddrEntry::Kind::DebugTrap
,
1674 masm
.currentOffset());
1677 template <typename Handler
>
1678 void BaselineCodeGen
<Handler
>::emitProfilerEnterFrame() {
1679 // Store stack position to lastProfilingFrame variable, guarded by a toggled
1680 // jump. Starts off initially disabled.
1682 CodeOffset toggleOffset
= masm
.toggledJump(&noInstrument
);
1683 masm
.profilerEnterFrame(FramePointer
, R0
.scratchReg());
1684 masm
.bind(&noInstrument
);
1686 // Store the start offset in the appropriate location.
1687 MOZ_ASSERT(!profilerEnterFrameToggleOffset_
.bound());
1688 profilerEnterFrameToggleOffset_
= toggleOffset
;
1691 template <typename Handler
>
1692 void BaselineCodeGen
<Handler
>::emitProfilerExitFrame() {
1693 // Store previous frame to lastProfilingFrame variable, guarded by a toggled
1694 // jump. Starts off initially disabled.
1696 CodeOffset toggleOffset
= masm
.toggledJump(&noInstrument
);
1697 masm
.profilerExitFrame();
1698 masm
.bind(&noInstrument
);
1700 // Store the start offset in the appropriate location.
1701 MOZ_ASSERT(!profilerExitFrameToggleOffset_
.bound());
1702 profilerExitFrameToggleOffset_
= toggleOffset
;
1705 template <typename Handler
>
1706 bool BaselineCodeGen
<Handler
>::emit_Nop() {
1710 template <typename Handler
>
1711 bool BaselineCodeGen
<Handler
>::emit_NopDestructuring() {
1715 template <typename Handler
>
1716 bool BaselineCodeGen
<Handler
>::emit_NopIsAssignOp() {
1720 template <typename Handler
>
1721 bool BaselineCodeGen
<Handler
>::emit_TryDestructuring() {
1725 template <typename Handler
>
1726 bool BaselineCodeGen
<Handler
>::emit_Pop() {
1732 bool BaselineCompilerCodeGen::emit_PopN() {
1733 frame
.popn(GET_UINT16(handler
.pc()));
1738 bool BaselineInterpreterCodeGen::emit_PopN() {
1739 LoadUint16Operand(masm
, R0
.scratchReg());
1740 frame
.popn(R0
.scratchReg());
1745 bool BaselineCompilerCodeGen::emit_DupAt() {
1748 // DupAt takes a value on the stack and re-pushes it on top. It's like
1749 // GetLocal but it addresses from the top of the stack instead of from the
1752 int depth
= -(GET_UINT24(handler
.pc()) + 1);
1753 masm
.loadValue(frame
.addressOfStackValue(depth
), R0
);
1759 bool BaselineInterpreterCodeGen::emit_DupAt() {
1760 LoadUint24Operand(masm
, 0, R0
.scratchReg());
1761 masm
.loadValue(frame
.addressOfStackValue(R0
.scratchReg()), R0
);
1766 template <typename Handler
>
1767 bool BaselineCodeGen
<Handler
>::emit_Dup() {
1768 // Keep top stack value in R0, sync the rest so that we can use R1. We use
1769 // separate registers because every register can be used by at most one
1771 frame
.popRegsAndSync(1);
1772 masm
.moveValue(R0
, R1
);
1774 // inc/dec ops use Dup followed by Inc/Dec. Push R0 last to avoid a move.
1780 template <typename Handler
>
1781 bool BaselineCodeGen
<Handler
>::emit_Dup2() {
1784 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
1785 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
1792 template <typename Handler
>
1793 bool BaselineCodeGen
<Handler
>::emit_Swap() {
1794 // Keep top stack values in R0 and R1.
1795 frame
.popRegsAndSync(2);
1803 bool BaselineCompilerCodeGen::emit_Pick() {
1806 // Pick takes a value on the stack and moves it to the top.
1807 // For instance, pick 2:
1808 // before: A B C D E
1809 // after : A B D E C
1811 // First, move value at -(amount + 1) into R0.
1812 int32_t depth
= -(GET_INT8(handler
.pc()) + 1);
1813 masm
.loadValue(frame
.addressOfStackValue(depth
), R0
);
1815 // Move the other values down.
1817 for (; depth
< 0; depth
++) {
1818 Address source
= frame
.addressOfStackValue(depth
);
1819 Address dest
= frame
.addressOfStackValue(depth
- 1);
1820 masm
.loadValue(source
, R1
);
1821 masm
.storeValue(R1
, dest
);
1831 bool BaselineInterpreterCodeGen::emit_Pick() {
1832 // First, move the value to move up into R0.
1833 Register scratch
= R2
.scratchReg();
1834 LoadUint8Operand(masm
, scratch
);
1835 masm
.loadValue(frame
.addressOfStackValue(scratch
), R0
);
1837 // Move the other values down.
1840 masm
.branchSub32(Assembler::Signed
, Imm32(1), scratch
, &done
);
1842 masm
.loadValue(frame
.addressOfStackValue(scratch
), R1
);
1843 masm
.storeValue(R1
, frame
.addressOfStackValue(scratch
, sizeof(Value
)));
1849 // Replace value on top of the stack with R0.
1850 masm
.storeValue(R0
, frame
.addressOfStackValue(-1));
1855 bool BaselineCompilerCodeGen::emit_Unpick() {
1858 // Pick takes the top of the stack value and moves it under the nth value.
1859 // For instance, unpick 2:
1860 // before: A B C D E
1861 // after : A B E C D
1863 // First, move value at -1 into R0.
1864 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
1866 MOZ_ASSERT(GET_INT8(handler
.pc()) > 0,
1867 "Interpreter code assumes JSOp::Unpick operand > 0");
1869 // Move the other values up.
1870 int32_t depth
= -(GET_INT8(handler
.pc()) + 1);
1871 for (int32_t i
= -1; i
> depth
; i
--) {
1872 Address source
= frame
.addressOfStackValue(i
- 1);
1873 Address dest
= frame
.addressOfStackValue(i
);
1874 masm
.loadValue(source
, R1
);
1875 masm
.storeValue(R1
, dest
);
1878 // Store R0 under the nth value.
1879 Address dest
= frame
.addressOfStackValue(depth
);
1880 masm
.storeValue(R0
, dest
);
1885 bool BaselineInterpreterCodeGen::emit_Unpick() {
1886 Register scratch
= R2
.scratchReg();
1887 LoadUint8Operand(masm
, scratch
);
1889 // Move the top value into R0.
1890 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
1892 // Overwrite the nth stack value with R0 but first save the old value in R1.
1893 masm
.loadValue(frame
.addressOfStackValue(scratch
), R1
);
1894 masm
.storeValue(R0
, frame
.addressOfStackValue(scratch
));
1896 // Now for each slot x in [n-1, 1] do the following:
1898 // * Store the value in slot x in R0.
1899 // * Store the value in the previous slot (now in R1) in slot x.
1903 // Assert the operand > 0 so the branchSub32 below doesn't "underflow" to
1907 masm
.branch32(Assembler::GreaterThan
, scratch
, Imm32(0), &ok
);
1908 masm
.assumeUnreachable("JSOp::Unpick with operand <= 0?");
1915 masm
.branchSub32(Assembler::Zero
, Imm32(1), scratch
, &done
);
1917 // Overwrite stack slot x with slot x + 1, saving the old value in R1.
1918 masm
.loadValue(frame
.addressOfStackValue(scratch
), R0
);
1919 masm
.storeValue(R1
, frame
.addressOfStackValue(scratch
));
1920 masm
.moveValue(R0
, R1
);
1924 // Finally, replace the value on top of the stack (slot 0) with R1. This is
1925 // the value that used to be in slot 1.
1927 masm
.storeValue(R1
, frame
.addressOfStackValue(-1));
1932 void BaselineCompilerCodeGen::emitJump() {
1933 jsbytecode
* pc
= handler
.pc();
1934 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc
)));
1935 frame
.assertSyncedStack();
1937 jsbytecode
* target
= pc
+ GET_JUMP_OFFSET(pc
);
1938 masm
.jump(handler
.labelOf(target
));
1942 void BaselineInterpreterCodeGen::emitJump() {
1943 // We have to add the current pc's jump offset to the current pc. We can use
1944 // R0 and R1 as scratch because we jump to the "next op" label so these
1945 // registers aren't in use at this point.
1946 Register scratch1
= R0
.scratchReg();
1947 Register scratch2
= R1
.scratchReg();
1948 Register pc
= LoadBytecodePC(masm
, scratch1
);
1949 LoadInt32OperandSignExtendToPtr(masm
, pc
, scratch2
);
1950 if (HasInterpreterPCReg()) {
1951 masm
.addPtr(scratch2
, InterpreterPCReg
);
1953 masm
.addPtr(pc
, scratch2
);
1954 masm
.storePtr(scratch2
, frame
.addressOfInterpreterPC());
1956 masm
.jump(handler
.interpretOpWithPCRegLabel());
1960 void BaselineCompilerCodeGen::emitTestBooleanTruthy(bool branchIfTrue
,
1962 jsbytecode
* pc
= handler
.pc();
1963 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc
)));
1964 frame
.assertSyncedStack();
1966 jsbytecode
* target
= pc
+ GET_JUMP_OFFSET(pc
);
1967 masm
.branchTestBooleanTruthy(branchIfTrue
, val
, handler
.labelOf(target
));
1971 void BaselineInterpreterCodeGen::emitTestBooleanTruthy(bool branchIfTrue
,
1974 masm
.branchTestBooleanTruthy(!branchIfTrue
, val
, &done
);
1980 template <typename F1
, typename F2
>
1981 [[nodiscard
]] bool BaselineCompilerCodeGen::emitTestScriptFlag(
1982 JSScript::ImmutableFlags flag
, const F1
& ifSet
, const F2
& ifNotSet
,
1984 if (handler
.script()->hasFlag(flag
)) {
1991 template <typename F1
, typename F2
>
1992 [[nodiscard
]] bool BaselineInterpreterCodeGen::emitTestScriptFlag(
1993 JSScript::ImmutableFlags flag
, const F1
& ifSet
, const F2
& ifNotSet
,
1995 Label flagNotSet
, done
;
1996 loadScript(scratch
);
1997 masm
.branchTest32(Assembler::Zero
,
1998 Address(scratch
, JSScript::offsetOfImmutableFlags()),
1999 Imm32(uint32_t(flag
)), &flagNotSet
);
2006 masm
.bind(&flagNotSet
);
2018 template <typename F
>
2019 [[nodiscard
]] bool BaselineCompilerCodeGen::emitTestScriptFlag(
2020 JSScript::ImmutableFlags flag
, bool value
, const F
& emit
,
2022 if (handler
.script()->hasFlag(flag
) == value
) {
2029 template <typename F
>
2030 [[nodiscard
]] bool BaselineCompilerCodeGen::emitTestScriptFlag(
2031 JSScript::MutableFlags flag
, bool value
, const F
& emit
, Register scratch
) {
2032 if (handler
.script()->hasFlag(flag
) == value
) {
2039 template <typename F
>
2040 [[nodiscard
]] bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2041 JSScript::ImmutableFlags flag
, bool value
, const F
& emit
,
2044 loadScript(scratch
);
2045 masm
.branchTest32(value
? Assembler::Zero
: Assembler::NonZero
,
2046 Address(scratch
, JSScript::offsetOfImmutableFlags()),
2047 Imm32(uint32_t(flag
)), &done
);
2059 template <typename F
>
2060 [[nodiscard
]] bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2061 JSScript::MutableFlags flag
, bool value
, const F
& emit
, Register scratch
) {
2063 loadScript(scratch
);
2064 masm
.branchTest32(value
? Assembler::Zero
: Assembler::NonZero
,
2065 Address(scratch
, JSScript::offsetOfMutableFlags()),
2066 Imm32(uint32_t(flag
)), &done
);
2077 template <typename Handler
>
2078 bool BaselineCodeGen
<Handler
>::emit_Goto() {
2084 template <typename Handler
>
2085 bool BaselineCodeGen
<Handler
>::emitTest(bool branchIfTrue
) {
2086 bool knownBoolean
= frame
.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN
);
2088 // Keep top stack value in R0.
2089 frame
.popRegsAndSync(1);
2091 if (!knownBoolean
&& !emitNextIC()) {
2095 // IC will leave a BooleanValue in R0, just need to branch on it.
2096 emitTestBooleanTruthy(branchIfTrue
, R0
);
2100 template <typename Handler
>
2101 bool BaselineCodeGen
<Handler
>::emit_JumpIfFalse() {
2102 return emitTest(false);
2105 template <typename Handler
>
2106 bool BaselineCodeGen
<Handler
>::emit_JumpIfTrue() {
2107 return emitTest(true);
2110 template <typename Handler
>
2111 bool BaselineCodeGen
<Handler
>::emitAndOr(bool branchIfTrue
) {
2112 bool knownBoolean
= frame
.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN
);
2114 // And and Or leave the original value on the stack.
2117 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2118 if (!knownBoolean
&& !emitNextIC()) {
2122 emitTestBooleanTruthy(branchIfTrue
, R0
);
2126 template <typename Handler
>
2127 bool BaselineCodeGen
<Handler
>::emit_And() {
2128 return emitAndOr(false);
2131 template <typename Handler
>
2132 bool BaselineCodeGen
<Handler
>::emit_Or() {
2133 return emitAndOr(true);
2136 template <typename Handler
>
2137 bool BaselineCodeGen
<Handler
>::emit_Coalesce() {
2138 // Coalesce leaves the original value on the stack.
2141 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2143 Label undefinedOrNull
;
2145 masm
.branchTestUndefined(Assembler::Equal
, R0
, &undefinedOrNull
);
2146 masm
.branchTestNull(Assembler::Equal
, R0
, &undefinedOrNull
);
2149 masm
.bind(&undefinedOrNull
);
2154 template <typename Handler
>
2155 bool BaselineCodeGen
<Handler
>::emit_Not() {
2156 bool knownBoolean
= frame
.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN
);
2158 // Keep top stack value in R0.
2159 frame
.popRegsAndSync(1);
2161 if (!knownBoolean
&& !emitNextIC()) {
2165 masm
.notBoolean(R0
);
2167 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
2171 template <typename Handler
>
2172 bool BaselineCodeGen
<Handler
>::emit_Pos() {
2173 return emitUnaryArith();
2176 template <typename Handler
>
2177 bool BaselineCodeGen
<Handler
>::emit_ToNumeric() {
2178 return emitUnaryArith();
2181 template <typename Handler
>
2182 bool BaselineCodeGen
<Handler
>::emit_LoopHead() {
2183 if (!emit_JumpTarget()) {
2186 if (!emitInterruptCheck()) {
2189 if (!emitWarmUpCounterIncrement()) {
2195 template <typename Handler
>
2196 bool BaselineCodeGen
<Handler
>::emit_Void() {
2198 frame
.push(UndefinedValue());
2202 template <typename Handler
>
2203 bool BaselineCodeGen
<Handler
>::emit_Undefined() {
2204 frame
.push(UndefinedValue());
2208 template <typename Handler
>
2209 bool BaselineCodeGen
<Handler
>::emit_Hole() {
2210 frame
.push(MagicValue(JS_ELEMENTS_HOLE
));
2214 template <typename Handler
>
2215 bool BaselineCodeGen
<Handler
>::emit_Null() {
2216 frame
.push(NullValue());
2220 template <typename Handler
>
2221 bool BaselineCodeGen
<Handler
>::emit_CheckIsObj() {
2223 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2226 masm
.branchTestObject(Assembler::Equal
, R0
, &ok
);
2230 pushUint8BytecodeOperandArg(R0
.scratchReg());
2232 using Fn
= bool (*)(JSContext
*, CheckIsObjectKind
);
2233 if (!callVM
<Fn
, ThrowCheckIsObject
>()) {
2241 template <typename Handler
>
2242 bool BaselineCodeGen
<Handler
>::emit_CheckThis() {
2244 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2246 return emitCheckThis(R0
);
2249 template <typename Handler
>
2250 bool BaselineCodeGen
<Handler
>::emit_CheckThisReinit() {
2252 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2254 return emitCheckThis(R0
, /* reinit = */ true);
2257 template <typename Handler
>
2258 bool BaselineCodeGen
<Handler
>::emitCheckThis(ValueOperand val
, bool reinit
) {
2261 masm
.branchTestMagic(Assembler::Equal
, val
, &thisOK
);
2263 masm
.branchTestMagic(Assembler::NotEqual
, val
, &thisOK
);
2269 using Fn
= bool (*)(JSContext
*);
2270 if (!callVM
<Fn
, ThrowInitializedThis
>()) {
2274 using Fn
= bool (*)(JSContext
*);
2275 if (!callVM
<Fn
, ThrowUninitializedThis
>()) {
2284 template <typename Handler
>
2285 bool BaselineCodeGen
<Handler
>::emit_CheckReturn() {
2286 MOZ_ASSERT_IF(handler
.maybeScript(),
2287 handler
.maybeScript()->isDerivedClassConstructor());
2289 // Load |this| in R0, return value in R1.
2290 frame
.popRegsAndSync(1);
2291 emitLoadReturnValue(R1
);
2293 Label done
, returnBad
, checkThis
;
2294 masm
.branchTestObject(Assembler::NotEqual
, R1
, &checkThis
);
2296 masm
.moveValue(R1
, R0
);
2299 masm
.bind(&checkThis
);
2300 masm
.branchTestUndefined(Assembler::NotEqual
, R1
, &returnBad
);
2301 masm
.branchTestMagic(Assembler::NotEqual
, R0
, &done
);
2302 masm
.bind(&returnBad
);
2307 using Fn
= bool (*)(JSContext
*, HandleValue
);
2308 if (!callVM
<Fn
, ThrowBadDerivedReturnOrUninitializedThis
>()) {
2311 masm
.assumeUnreachable("Should throw on bad derived constructor return");
2315 // Push |rval| or |this| onto the stack.
2320 template <typename Handler
>
2321 bool BaselineCodeGen
<Handler
>::emit_FunctionThis() {
2322 MOZ_ASSERT_IF(handler
.maybeFunction(), !handler
.maybeFunction()->isArrow());
2326 auto boxThis
= [this]() {
2327 // Load |thisv| in R0. Skip the call if it's already an object.
2329 frame
.popRegsAndSync(1);
2330 masm
.branchTestObject(Assembler::Equal
, R0
, &skipCall
);
2333 masm
.loadBaselineFramePtr(FramePointer
, R1
.scratchReg());
2335 pushArg(R1
.scratchReg());
2337 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, MutableHandleValue
);
2338 if (!callVM
<Fn
, BaselineGetFunctionThis
>()) {
2342 masm
.bind(&skipCall
);
2347 // In strict mode code, |this| is left alone.
2348 return emitTestScriptFlag(JSScript::ImmutableFlags::Strict
, false, boxThis
,
2352 template <typename Handler
>
2353 bool BaselineCodeGen
<Handler
>::emit_GlobalThis() {
2356 loadGlobalThisValue(R0
);
2361 template <typename Handler
>
2362 bool BaselineCodeGen
<Handler
>::emit_NonSyntacticGlobalThis() {
2367 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
2368 pushArg(R0
.scratchReg());
2370 using Fn
= void (*)(JSContext
*, HandleObject
, MutableHandleValue
);
2371 if (!callVM
<Fn
, GetNonSyntacticGlobalThis
>()) {
2379 template <typename Handler
>
2380 bool BaselineCodeGen
<Handler
>::emit_True() {
2381 frame
.push(BooleanValue(true));
2385 template <typename Handler
>
2386 bool BaselineCodeGen
<Handler
>::emit_False() {
2387 frame
.push(BooleanValue(false));
2391 template <typename Handler
>
2392 bool BaselineCodeGen
<Handler
>::emit_Zero() {
2393 frame
.push(Int32Value(0));
2397 template <typename Handler
>
2398 bool BaselineCodeGen
<Handler
>::emit_One() {
2399 frame
.push(Int32Value(1));
2404 bool BaselineCompilerCodeGen::emit_Int8() {
2405 frame
.push(Int32Value(GET_INT8(handler
.pc())));
2410 bool BaselineInterpreterCodeGen::emit_Int8() {
2411 LoadInt8Operand(masm
, R0
.scratchReg());
2412 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
2418 bool BaselineCompilerCodeGen::emit_Int32() {
2419 frame
.push(Int32Value(GET_INT32(handler
.pc())));
2424 bool BaselineInterpreterCodeGen::emit_Int32() {
2425 LoadInt32Operand(masm
, R0
.scratchReg());
2426 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
2432 bool BaselineCompilerCodeGen::emit_Uint16() {
2433 frame
.push(Int32Value(GET_UINT16(handler
.pc())));
2438 bool BaselineInterpreterCodeGen::emit_Uint16() {
2439 LoadUint16Operand(masm
, R0
.scratchReg());
2440 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
2446 bool BaselineCompilerCodeGen::emit_Uint24() {
2447 frame
.push(Int32Value(GET_UINT24(handler
.pc())));
2452 bool BaselineInterpreterCodeGen::emit_Uint24() {
2453 LoadUint24Operand(masm
, 0, R0
.scratchReg());
2454 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
2460 bool BaselineCompilerCodeGen::emit_Double() {
2461 frame
.push(GET_INLINE_VALUE(handler
.pc()));
2466 bool BaselineInterpreterCodeGen::emit_Double() {
2467 LoadInlineValueOperand(masm
, R0
);
2473 bool BaselineCompilerCodeGen::emit_BigInt() {
2474 BigInt
* bi
= handler
.script()->getBigInt(handler
.pc());
2475 frame
.push(BigIntValue(bi
));
2480 bool BaselineInterpreterCodeGen::emit_BigInt() {
2481 Register scratch1
= R0
.scratchReg();
2482 Register scratch2
= R1
.scratchReg();
2483 loadScriptGCThing(ScriptGCThingType::BigInt
, scratch1
, scratch2
);
2484 masm
.tagValue(JSVAL_TYPE_BIGINT
, scratch1
, R0
);
2490 bool BaselineCompilerCodeGen::emit_String() {
2491 frame
.push(StringValue(handler
.script()->getString(handler
.pc())));
2496 bool BaselineInterpreterCodeGen::emit_String() {
2497 Register scratch1
= R0
.scratchReg();
2498 Register scratch2
= R1
.scratchReg();
2499 loadScriptGCThing(ScriptGCThingType::String
, scratch1
, scratch2
);
2500 masm
.tagValue(JSVAL_TYPE_STRING
, scratch1
, R0
);
2506 bool BaselineCompilerCodeGen::emit_Symbol() {
2507 unsigned which
= GET_UINT8(handler
.pc());
2508 JS::Symbol
* sym
= cx
->runtime()->wellKnownSymbols
->get(which
);
2509 frame
.push(SymbolValue(sym
));
2514 bool BaselineInterpreterCodeGen::emit_Symbol() {
2515 Register scratch1
= R0
.scratchReg();
2516 Register scratch2
= R1
.scratchReg();
2517 LoadUint8Operand(masm
, scratch1
);
2519 masm
.movePtr(ImmPtr(cx
->runtime()->wellKnownSymbols
), scratch2
);
2520 masm
.loadPtr(BaseIndex(scratch2
, scratch1
, ScalePointer
), scratch1
);
2522 masm
.tagValue(JSVAL_TYPE_SYMBOL
, scratch1
, R0
);
2528 bool BaselineCompilerCodeGen::emit_Object() {
2529 frame
.push(ObjectValue(*handler
.script()->getObject(handler
.pc())));
2534 bool BaselineInterpreterCodeGen::emit_Object() {
2535 Register scratch1
= R0
.scratchReg();
2536 Register scratch2
= R1
.scratchReg();
2537 loadScriptGCThing(ScriptGCThingType::Object
, scratch1
, scratch2
);
2538 masm
.tagValue(JSVAL_TYPE_OBJECT
, scratch1
, R0
);
2543 template <typename Handler
>
2544 bool BaselineCodeGen
<Handler
>::emit_CallSiteObj() {
2545 return emit_Object();
2548 template <typename Handler
>
2549 bool BaselineCodeGen
<Handler
>::emit_RegExp() {
2551 pushScriptGCThingArg(ScriptGCThingType::RegExp
, R0
.scratchReg(),
2554 using Fn
= JSObject
* (*)(JSContext
*, Handle
<RegExpObject
*>);
2555 if (!callVM
<Fn
, CloneRegExpObject
>()) {
2559 // Box and push return value.
2560 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
2565 #ifdef ENABLE_RECORD_TUPLE
2566 # define UNSUPPORTED_OPCODE(OP) \
2567 template <typename Handler> \
2568 bool BaselineCodeGen<Handler>::emit_##OP() { \
2569 MOZ_CRASH("Record and Tuple are not supported by jit"); \
2573 UNSUPPORTED_OPCODE(InitRecord
)
2574 UNSUPPORTED_OPCODE(AddRecordProperty
)
2575 UNSUPPORTED_OPCODE(AddRecordSpread
)
2576 UNSUPPORTED_OPCODE(FinishRecord
)
2577 UNSUPPORTED_OPCODE(InitTuple
)
2578 UNSUPPORTED_OPCODE(AddTupleElement
)
2579 UNSUPPORTED_OPCODE(FinishTuple
)
2581 # undef UNSUPPORTED_OPCODE
2584 template <typename Handler
>
2585 bool BaselineCodeGen
<Handler
>::emit_Lambda() {
2587 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
2589 pushArg(R0
.scratchReg());
2590 pushScriptGCThingArg(ScriptGCThingType::Function
, R0
.scratchReg(),
2593 using Fn
= JSObject
* (*)(JSContext
*, HandleFunction
, HandleObject
);
2594 if (!callVM
<Fn
, js::Lambda
>()) {
2598 // Box and push return value.
2599 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
2604 template <typename Handler
>
2605 bool BaselineCodeGen
<Handler
>::emit_SetFunName() {
2606 frame
.popRegsAndSync(2);
2611 masm
.unboxObject(R0
, R0
.scratchReg());
2615 pushUint8BytecodeOperandArg(R2
.scratchReg());
2617 pushArg(R0
.scratchReg());
2620 bool (*)(JSContext
*, HandleFunction
, HandleValue
, FunctionPrefixKind
);
2621 return callVM
<Fn
, SetFunctionName
>();
2624 template <typename Handler
>
2625 bool BaselineCodeGen
<Handler
>::emit_BitOr() {
2626 return emitBinaryArith();
2629 template <typename Handler
>
2630 bool BaselineCodeGen
<Handler
>::emit_BitXor() {
2631 return emitBinaryArith();
2634 template <typename Handler
>
2635 bool BaselineCodeGen
<Handler
>::emit_BitAnd() {
2636 return emitBinaryArith();
2639 template <typename Handler
>
2640 bool BaselineCodeGen
<Handler
>::emit_Lsh() {
2641 return emitBinaryArith();
2644 template <typename Handler
>
2645 bool BaselineCodeGen
<Handler
>::emit_Rsh() {
2646 return emitBinaryArith();
2649 template <typename Handler
>
2650 bool BaselineCodeGen
<Handler
>::emit_Ursh() {
2651 return emitBinaryArith();
2654 template <typename Handler
>
2655 bool BaselineCodeGen
<Handler
>::emit_Add() {
2656 return emitBinaryArith();
2659 template <typename Handler
>
2660 bool BaselineCodeGen
<Handler
>::emit_Sub() {
2661 return emitBinaryArith();
2664 template <typename Handler
>
2665 bool BaselineCodeGen
<Handler
>::emit_Mul() {
2666 return emitBinaryArith();
2669 template <typename Handler
>
2670 bool BaselineCodeGen
<Handler
>::emit_Div() {
2671 return emitBinaryArith();
2674 template <typename Handler
>
2675 bool BaselineCodeGen
<Handler
>::emit_Mod() {
2676 return emitBinaryArith();
2679 template <typename Handler
>
2680 bool BaselineCodeGen
<Handler
>::emit_Pow() {
2681 return emitBinaryArith();
2684 template <typename Handler
>
2685 bool BaselineCodeGen
<Handler
>::emitBinaryArith() {
2686 // Keep top JSStack value in R0 and R2
2687 frame
.popRegsAndSync(2);
2690 if (!emitNextIC()) {
2694 // Mark R0 as pushed stack value.
2699 template <typename Handler
>
2700 bool BaselineCodeGen
<Handler
>::emitUnaryArith() {
2701 // Keep top stack value in R0.
2702 frame
.popRegsAndSync(1);
2705 if (!emitNextIC()) {
2709 // Mark R0 as pushed stack value.
2714 template <typename Handler
>
2715 bool BaselineCodeGen
<Handler
>::emit_BitNot() {
2716 return emitUnaryArith();
2719 template <typename Handler
>
2720 bool BaselineCodeGen
<Handler
>::emit_Neg() {
2721 return emitUnaryArith();
2724 template <typename Handler
>
2725 bool BaselineCodeGen
<Handler
>::emit_Inc() {
2726 return emitUnaryArith();
2729 template <typename Handler
>
2730 bool BaselineCodeGen
<Handler
>::emit_Dec() {
2731 return emitUnaryArith();
2734 template <typename Handler
>
2735 bool BaselineCodeGen
<Handler
>::emit_Lt() {
2736 return emitCompare();
2739 template <typename Handler
>
2740 bool BaselineCodeGen
<Handler
>::emit_Le() {
2741 return emitCompare();
2744 template <typename Handler
>
2745 bool BaselineCodeGen
<Handler
>::emit_Gt() {
2746 return emitCompare();
2749 template <typename Handler
>
2750 bool BaselineCodeGen
<Handler
>::emit_Ge() {
2751 return emitCompare();
2754 template <typename Handler
>
2755 bool BaselineCodeGen
<Handler
>::emit_Eq() {
2756 return emitCompare();
2759 template <typename Handler
>
2760 bool BaselineCodeGen
<Handler
>::emit_Ne() {
2761 return emitCompare();
2764 template <typename Handler
>
2765 bool BaselineCodeGen
<Handler
>::emitCompare() {
2766 // Keep top JSStack value in R0 and R1.
2767 frame
.popRegsAndSync(2);
2770 if (!emitNextIC()) {
2774 // Mark R0 as pushed stack value.
2775 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
2779 template <typename Handler
>
2780 bool BaselineCodeGen
<Handler
>::emit_StrictEq() {
2781 return emitCompare();
2784 template <typename Handler
>
2785 bool BaselineCodeGen
<Handler
>::emit_StrictNe() {
2786 return emitCompare();
2789 template <typename Handler
>
2790 bool BaselineCodeGen
<Handler
>::emit_Case() {
2791 frame
.popRegsAndSync(1);
2794 masm
.branchTestBooleanTruthy(/* branchIfTrue */ false, R0
, &done
);
2796 // Pop the switch value if the case matches.
2797 masm
.addToStackPtr(Imm32(sizeof(Value
)));
2804 template <typename Handler
>
2805 bool BaselineCodeGen
<Handler
>::emit_Default() {
2810 template <typename Handler
>
2811 bool BaselineCodeGen
<Handler
>::emit_Lineno() {
2815 template <typename Handler
>
2816 bool BaselineCodeGen
<Handler
>::emit_NewArray() {
2819 if (!emitNextIC()) {
2827 static void MarkElementsNonPackedIfHoleValue(MacroAssembler
& masm
,
2831 masm
.branchTestMagic(Assembler::NotEqual
, val
, ¬Hole
);
2833 Address
elementsFlags(elements
, ObjectElements::offsetOfFlags());
2834 masm
.or32(Imm32(ObjectElements::NON_PACKED
), elementsFlags
);
2836 masm
.bind(¬Hole
);
2840 bool BaselineInterpreterCodeGen::emit_InitElemArray() {
2841 // Pop value into R0, keep the object on the stack.
2842 frame
.popRegsAndSync(1);
2844 // Load object in R2.
2845 Register obj
= R2
.scratchReg();
2846 masm
.unboxObject(frame
.addressOfStackValue(-1), obj
);
2848 // Load index in R1.
2849 Register index
= R1
.scratchReg();
2850 LoadInt32Operand(masm
, index
);
2852 // Store the Value. No pre-barrier because this is an initialization.
2853 masm
.loadPtr(Address(obj
, NativeObject::offsetOfElements()), obj
);
2854 masm
.storeValue(R0
, BaseObjectElementIndex(obj
, index
));
2856 // Bump initialized length.
2857 Address
initLength(obj
, ObjectElements::offsetOfInitializedLength());
2858 masm
.add32(Imm32(1), index
);
2859 masm
.store32(index
, initLength
);
2861 // Mark elements as NON_PACKED if we stored the hole value.
2862 MarkElementsNonPackedIfHoleValue(masm
, obj
, R0
);
2866 Register scratch
= index
;
2867 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, scratch
, &skipBarrier
);
2869 masm
.unboxObject(frame
.addressOfStackValue(-1), obj
);
2870 masm
.branchPtrInNurseryChunk(Assembler::Equal
, obj
, scratch
, &skipBarrier
);
2871 MOZ_ASSERT(obj
== R2
.scratchReg(), "post barrier expects object in R2");
2872 masm
.call(&postBarrierSlot_
);
2874 masm
.bind(&skipBarrier
);
2879 bool BaselineCompilerCodeGen::emit_InitElemArray() {
2880 // Pop value into R0, keep the object on the stack.
2881 Maybe
<Value
> knownValue
= frame
.knownStackValue(-1);
2882 frame
.popRegsAndSync(1);
2884 // Load object in R2.
2885 Register obj
= R2
.scratchReg();
2886 masm
.unboxObject(frame
.addressOfStackValue(-1), obj
);
2888 uint32_t index
= GET_UINT32(handler
.pc());
2889 MOZ_ASSERT(index
<= INT32_MAX
,
2890 "the bytecode emitter must fail to compile code that would "
2891 "produce an index exceeding int32_t range");
2893 // Store the Value. No pre-barrier because this is an initialization.
2894 masm
.loadPtr(Address(obj
, NativeObject::offsetOfElements()), obj
);
2895 masm
.storeValue(R0
, Address(obj
, index
* sizeof(Value
)));
2897 // Bump initialized length.
2898 Address
initLength(obj
, ObjectElements::offsetOfInitializedLength());
2899 masm
.store32(Imm32(index
+ 1), initLength
);
2901 // Mark elements as NON_PACKED if we stored the hole value. We know this
2902 // statically except when debugger instrumentation is enabled because that
2903 // forces a stack-sync (which discards constants and known types) for each op.
2904 if (knownValue
&& knownValue
->isMagic(JS_ELEMENTS_HOLE
)) {
2905 Address
elementsFlags(obj
, ObjectElements::offsetOfFlags());
2906 masm
.or32(Imm32(ObjectElements::NON_PACKED
), elementsFlags
);
2907 } else if (handler
.compileDebugInstrumentation()) {
2908 MarkElementsNonPackedIfHoleValue(masm
, obj
, R0
);
2912 masm
.branchTestMagic(Assembler::NotEqual
, R0
, ¬Hole
);
2913 masm
.assumeUnreachable("Unexpected hole value");
2914 masm
.bind(¬Hole
);
2920 MOZ_ASSERT(JS::GCPolicy
<Value
>::isTenured(*knownValue
));
2923 Register scratch
= R1
.scratchReg();
2924 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, scratch
,
2927 masm
.unboxObject(frame
.addressOfStackValue(-1), obj
);
2928 masm
.branchPtrInNurseryChunk(Assembler::Equal
, obj
, scratch
,
2930 MOZ_ASSERT(obj
== R2
.scratchReg(), "post barrier expects object in R2");
2931 masm
.call(&postBarrierSlot_
);
2933 masm
.bind(&skipBarrier
);
2938 template <typename Handler
>
2939 bool BaselineCodeGen
<Handler
>::emit_NewObject() {
2940 return emitNewObject();
2943 template <typename Handler
>
2944 bool BaselineCodeGen
<Handler
>::emit_NewInit() {
2945 return emitNewObject();
2948 template <typename Handler
>
2949 bool BaselineCodeGen
<Handler
>::emitNewObject() {
2952 if (!emitNextIC()) {
2960 template <typename Handler
>
2961 bool BaselineCodeGen
<Handler
>::emit_InitElem() {
2962 // Store RHS in the scratch slot.
2963 frame
.storeStackValue(-1, frame
.addressOfScratchValue(), R2
);
2966 // Keep object and index in R0 and R1.
2967 frame
.popRegsAndSync(2);
2969 // Push the object to store the result of the IC.
2973 // Keep RHS on the stack.
2974 frame
.pushScratchValue();
2977 if (!emitNextIC()) {
2981 // Pop the rhs, so that the object is on the top of the stack.
2986 template <typename Handler
>
2987 bool BaselineCodeGen
<Handler
>::emit_InitHiddenElem() {
2988 return emit_InitElem();
2991 template <typename Handler
>
2992 bool BaselineCodeGen
<Handler
>::emit_InitLockedElem() {
2993 return emit_InitElem();
2996 template <typename Handler
>
2997 bool BaselineCodeGen
<Handler
>::emit_MutateProto() {
2998 // Keep values on the stack for the decompiler.
3001 masm
.unboxObject(frame
.addressOfStackValue(-2), R0
.scratchReg());
3002 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3007 pushArg(R0
.scratchReg());
3009 using Fn
= bool (*)(JSContext
*, Handle
<PlainObject
*>, HandleValue
);
3010 if (!callVM
<Fn
, MutatePrototype
>()) {
3018 template <typename Handler
>
3019 bool BaselineCodeGen
<Handler
>::emit_InitProp() {
3020 // Load lhs in R0, rhs in R1.
3022 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3023 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3026 if (!emitNextIC()) {
3030 // Leave the object on the stack.
3035 template <typename Handler
>
3036 bool BaselineCodeGen
<Handler
>::emit_InitLockedProp() {
3037 return emit_InitProp();
3040 template <typename Handler
>
3041 bool BaselineCodeGen
<Handler
>::emit_InitHiddenProp() {
3042 return emit_InitProp();
3045 template <typename Handler
>
3046 bool BaselineCodeGen
<Handler
>::emit_GetElem() {
3047 // Keep top two stack values in R0 and R1.
3048 frame
.popRegsAndSync(2);
3051 if (!emitNextIC()) {
3055 // Mark R0 as pushed stack value.
3060 template <typename Handler
>
3061 bool BaselineCodeGen
<Handler
>::emit_GetElemSuper() {
3062 // Store obj in the scratch slot.
3063 frame
.storeStackValue(-1, frame
.addressOfScratchValue(), R2
);
3066 // Keep receiver and index in R0 and R1.
3067 frame
.popRegsAndSync(2);
3069 // Keep obj on the stack.
3070 frame
.pushScratchValue();
3072 if (!emitNextIC()) {
3081 template <typename Handler
>
3082 bool BaselineCodeGen
<Handler
>::emit_SetElem() {
3083 // Store RHS in the scratch slot.
3084 frame
.storeStackValue(-1, frame
.addressOfScratchValue(), R2
);
3087 // Keep object and index in R0 and R1.
3088 frame
.popRegsAndSync(2);
3090 // Keep RHS on the stack.
3091 frame
.pushScratchValue();
3094 if (!emitNextIC()) {
3101 template <typename Handler
>
3102 bool BaselineCodeGen
<Handler
>::emit_StrictSetElem() {
3103 return emit_SetElem();
3106 template <typename Handler
>
3107 bool BaselineCodeGen
<Handler
>::emitSetElemSuper(bool strict
) {
3108 // Incoming stack is |receiver, propval, obj, rval|. We need to shuffle
3109 // stack to leave rval when operation is complete.
3111 // Pop rval into R0, then load receiver into R1 and replace with rval.
3112 frame
.popRegsAndSync(1);
3113 masm
.loadValue(frame
.addressOfStackValue(-3), R1
);
3114 masm
.storeValue(R0
, frame
.addressOfStackValue(-3));
3118 pushArg(Imm32(strict
));
3119 pushArg(R0
); // rval
3120 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3121 pushArg(R0
); // propval
3122 pushArg(R1
); // receiver
3123 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
3126 using Fn
= bool (*)(JSContext
*, HandleValue
, HandleValue
, HandleValue
,
3128 if (!callVM
<Fn
, js::SetElementSuper
>()) {
3136 template <typename Handler
>
3137 bool BaselineCodeGen
<Handler
>::emit_SetElemSuper() {
3138 return emitSetElemSuper(/* strict = */ false);
3141 template <typename Handler
>
3142 bool BaselineCodeGen
<Handler
>::emit_StrictSetElemSuper() {
3143 return emitSetElemSuper(/* strict = */ true);
3146 template <typename Handler
>
3147 bool BaselineCodeGen
<Handler
>::emitDelElem(bool strict
) {
3148 // Keep values on the stack for the decompiler.
3150 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3151 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3158 using Fn
= bool (*)(JSContext
*, HandleValue
, HandleValue
, bool*);
3160 if (!callVM
<Fn
, DelElemOperation
<true>>()) {
3164 if (!callVM
<Fn
, DelElemOperation
<false>>()) {
3169 masm
.boxNonDouble(JSVAL_TYPE_BOOLEAN
, ReturnReg
, R1
);
3171 frame
.push(R1
, JSVAL_TYPE_BOOLEAN
);
3175 template <typename Handler
>
3176 bool BaselineCodeGen
<Handler
>::emit_DelElem() {
3177 return emitDelElem(/* strict = */ false);
3180 template <typename Handler
>
3181 bool BaselineCodeGen
<Handler
>::emit_StrictDelElem() {
3182 return emitDelElem(/* strict = */ true);
3185 template <typename Handler
>
3186 bool BaselineCodeGen
<Handler
>::emit_In() {
3187 frame
.popRegsAndSync(2);
3189 if (!emitNextIC()) {
3193 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
3197 template <typename Handler
>
3198 bool BaselineCodeGen
<Handler
>::emit_HasOwn() {
3199 frame
.popRegsAndSync(2);
3201 if (!emitNextIC()) {
3205 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
3209 template <typename Handler
>
3210 bool BaselineCodeGen
<Handler
>::emit_CheckPrivateField() {
3211 // Keep key and val on the stack.
3213 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3214 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3216 if (!emitNextIC()) {
3220 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
3224 template <typename Handler
>
3225 bool BaselineCodeGen
<Handler
>::emit_NewPrivateName() {
3228 pushScriptNameArg(R0
.scratchReg(), R1
.scratchReg());
3230 using Fn
= JS::Symbol
* (*)(JSContext
*, Handle
<JSAtom
*>);
3231 if (!callVM
<Fn
, NewPrivateName
>()) {
3235 masm
.tagValue(JSVAL_TYPE_SYMBOL
, ReturnReg
, R0
);
3240 template <typename Handler
>
3241 bool BaselineCodeGen
<Handler
>::emit_GetGName() {
3244 loadGlobalLexicalEnvironment(R0
.scratchReg());
3247 if (!emitNextIC()) {
3251 // Mark R0 as pushed stack value.
3257 bool BaselineCompilerCodeGen::tryOptimizeBindGlobalName() {
3258 JSScript
* script
= handler
.script();
3259 MOZ_ASSERT(!script
->hasNonSyntacticScope());
3261 Rooted
<GlobalObject
*> global(cx
, &script
->global());
3262 Rooted
<PropertyName
*> name(cx
, script
->getName(handler
.pc()));
3263 if (JSObject
* binding
= MaybeOptimizeBindGlobalName(cx
, global
, name
)) {
3264 frame
.push(ObjectValue(*binding
));
3271 bool BaselineInterpreterCodeGen::tryOptimizeBindGlobalName() {
3272 // Interpreter doesn't optimize simple BindGNames.
3276 template <typename Handler
>
3277 bool BaselineCodeGen
<Handler
>::emit_BindGName() {
3278 if (tryOptimizeBindGlobalName()) {
3283 loadGlobalLexicalEnvironment(R0
.scratchReg());
3286 if (!emitNextIC()) {
3290 // Mark R0 as pushed stack value.
3295 template <typename Handler
>
3296 bool BaselineCodeGen
<Handler
>::emit_BindVar() {
3298 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3301 pushArg(R0
.scratchReg());
3303 using Fn
= JSObject
* (*)(JSContext
*, JSObject
*);
3304 if (!callVM
<Fn
, BindVarOperation
>()) {
3308 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
3313 template <typename Handler
>
3314 bool BaselineCodeGen
<Handler
>::emit_SetProp() {
3315 // Keep lhs in R0, rhs in R1.
3316 frame
.popRegsAndSync(2);
3318 // Keep RHS on the stack.
3323 if (!emitNextIC()) {
3330 template <typename Handler
>
3331 bool BaselineCodeGen
<Handler
>::emit_StrictSetProp() {
3332 return emit_SetProp();
3335 template <typename Handler
>
3336 bool BaselineCodeGen
<Handler
>::emit_SetName() {
3337 return emit_SetProp();
3340 template <typename Handler
>
3341 bool BaselineCodeGen
<Handler
>::emit_StrictSetName() {
3342 return emit_SetProp();
3345 template <typename Handler
>
3346 bool BaselineCodeGen
<Handler
>::emit_SetGName() {
3347 return emit_SetProp();
3350 template <typename Handler
>
3351 bool BaselineCodeGen
<Handler
>::emit_StrictSetGName() {
3352 return emit_SetProp();
3355 template <typename Handler
>
3356 bool BaselineCodeGen
<Handler
>::emitSetPropSuper(bool strict
) {
3357 // Incoming stack is |receiver, obj, rval|. We need to shuffle stack to
3358 // leave rval when operation is complete.
3360 // Pop rval into R0, then load receiver into R1 and replace with rval.
3361 frame
.popRegsAndSync(1);
3362 masm
.loadValue(frame
.addressOfStackValue(-2), R1
);
3363 masm
.storeValue(R0
, frame
.addressOfStackValue(-2));
3367 pushArg(Imm32(strict
));
3368 pushArg(R0
); // rval
3369 pushScriptNameArg(R0
.scratchReg(), R2
.scratchReg());
3370 pushArg(R1
); // receiver
3371 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
3374 using Fn
= bool (*)(JSContext
*, HandleValue
, HandleValue
,
3375 Handle
<PropertyName
*>, HandleValue
, bool);
3376 if (!callVM
<Fn
, js::SetPropertySuper
>()) {
3384 template <typename Handler
>
3385 bool BaselineCodeGen
<Handler
>::emit_SetPropSuper() {
3386 return emitSetPropSuper(/* strict = */ false);
3389 template <typename Handler
>
3390 bool BaselineCodeGen
<Handler
>::emit_StrictSetPropSuper() {
3391 return emitSetPropSuper(/* strict = */ true);
3394 template <typename Handler
>
3395 bool BaselineCodeGen
<Handler
>::emit_GetProp() {
3396 // Keep object in R0.
3397 frame
.popRegsAndSync(1);
3400 if (!emitNextIC()) {
3404 // Mark R0 as pushed stack value.
3409 template <typename Handler
>
3410 bool BaselineCodeGen
<Handler
>::emit_GetBoundName() {
3411 return emit_GetProp();
3414 template <typename Handler
>
3415 bool BaselineCodeGen
<Handler
>::emit_GetPropSuper() {
3416 // Receiver -> R1, ObjectOrNull -> R0
3417 frame
.popRegsAndSync(1);
3418 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3421 if (!emitNextIC()) {
3429 template <typename Handler
>
3430 bool BaselineCodeGen
<Handler
>::emitDelProp(bool strict
) {
3431 // Keep value on the stack for the decompiler.
3433 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
3437 pushScriptNameArg(R1
.scratchReg(), R2
.scratchReg());
3440 using Fn
= bool (*)(JSContext
*, HandleValue
, Handle
<PropertyName
*>, bool*);
3442 if (!callVM
<Fn
, DelPropOperation
<true>>()) {
3446 if (!callVM
<Fn
, DelPropOperation
<false>>()) {
3451 masm
.boxNonDouble(JSVAL_TYPE_BOOLEAN
, ReturnReg
, R1
);
3453 frame
.push(R1
, JSVAL_TYPE_BOOLEAN
);
3457 template <typename Handler
>
3458 bool BaselineCodeGen
<Handler
>::emit_DelProp() {
3459 return emitDelProp(/* strict = */ false);
3462 template <typename Handler
>
3463 bool BaselineCodeGen
<Handler
>::emit_StrictDelProp() {
3464 return emitDelProp(/* strict = */ true);
3468 void BaselineCompilerCodeGen::getEnvironmentCoordinateObject(Register reg
) {
3469 EnvironmentCoordinate
ec(handler
.pc());
3471 masm
.loadPtr(frame
.addressOfEnvironmentChain(), reg
);
3472 for (unsigned i
= ec
.hops(); i
; i
--) {
3474 Address(reg
, EnvironmentObject::offsetOfEnclosingEnvironment()), reg
);
3479 void BaselineInterpreterCodeGen::getEnvironmentCoordinateObject(Register reg
) {
3480 MOZ_CRASH("Shouldn't call this for interpreter");
3484 Address
BaselineCompilerCodeGen::getEnvironmentCoordinateAddressFromObject(
3485 Register objReg
, Register reg
) {
3486 EnvironmentCoordinate
ec(handler
.pc());
3488 if (EnvironmentObject::nonExtensibleIsFixedSlot(ec
)) {
3489 return Address(objReg
, NativeObject::getFixedSlotOffset(ec
.slot()));
3492 uint32_t slot
= EnvironmentObject::nonExtensibleDynamicSlotIndex(ec
);
3493 masm
.loadPtr(Address(objReg
, NativeObject::offsetOfSlots()), reg
);
3494 return Address(reg
, slot
* sizeof(Value
));
3498 Address
BaselineInterpreterCodeGen::getEnvironmentCoordinateAddressFromObject(
3499 Register objReg
, Register reg
) {
3500 MOZ_CRASH("Shouldn't call this for interpreter");
3503 template <typename Handler
>
3504 Address BaselineCodeGen
<Handler
>::getEnvironmentCoordinateAddress(
3506 getEnvironmentCoordinateObject(reg
);
3507 return getEnvironmentCoordinateAddressFromObject(reg
, reg
);
3510 // For a JOF_ENVCOORD op load the number of hops from the bytecode and skip this
3511 // number of environment objects.
3512 static void LoadAliasedVarEnv(MacroAssembler
& masm
, Register env
,
3514 static_assert(ENVCOORD_HOPS_LEN
== 1,
3515 "Code assumes number of hops is stored in uint8 operand");
3516 LoadUint8Operand(masm
, scratch
);
3519 masm
.branchTest32(Assembler::Zero
, scratch
, scratch
, &done
);
3522 Address
nextEnv(env
, EnvironmentObject::offsetOfEnclosingEnvironment());
3523 masm
.unboxObject(nextEnv
, env
);
3524 masm
.branchSub32(Assembler::NonZero
, Imm32(1), scratch
, &top
);
3530 void BaselineCompilerCodeGen::emitGetAliasedVar(ValueOperand dest
) {
3533 Address address
= getEnvironmentCoordinateAddress(R0
.scratchReg());
3534 masm
.loadValue(address
, dest
);
3538 void BaselineInterpreterCodeGen::emitGetAliasedVar(ValueOperand dest
) {
3539 Register env
= R0
.scratchReg();
3540 Register scratch
= R1
.scratchReg();
3542 // Load the right environment object.
3543 masm
.loadPtr(frame
.addressOfEnvironmentChain(), env
);
3544 LoadAliasedVarEnv(masm
, env
, scratch
);
3546 // Load the slot index.
3547 static_assert(ENVCOORD_SLOT_LEN
== 3,
3548 "Code assumes slot is stored in uint24 operand");
3549 LoadUint24Operand(masm
, ENVCOORD_HOPS_LEN
, scratch
);
3551 // Load the Value from a fixed or dynamic slot.
3552 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3553 Label isDynamic
, done
;
3554 masm
.branch32(Assembler::AboveOrEqual
, scratch
,
3555 Imm32(NativeObject::MAX_FIXED_SLOTS
), &isDynamic
);
3557 uint32_t offset
= NativeObject::getFixedSlotOffset(0);
3558 masm
.loadValue(BaseValueIndex(env
, scratch
, offset
), dest
);
3561 masm
.bind(&isDynamic
);
3563 masm
.loadPtr(Address(env
, NativeObject::offsetOfSlots()), env
);
3565 // Use an offset to subtract the number of fixed slots.
3566 int32_t offset
= -int32_t(NativeObject::MAX_FIXED_SLOTS
* sizeof(Value
));
3567 masm
.loadValue(BaseValueIndex(env
, scratch
, offset
), dest
);
3572 template <typename Handler
>
3573 bool BaselineCodeGen
<Handler
>::emitGetAliasedDebugVar(ValueOperand dest
) {
3575 Register env
= R0
.scratchReg();
3576 // Load the right environment object.
3577 masm
.loadPtr(frame
.addressOfEnvironmentChain(), env
);
3580 pushBytecodePCArg();
3584 bool (*)(JSContext
*, JSObject
* env
, jsbytecode
*, MutableHandleValue
);
3585 return callVM
<Fn
, LoadAliasedDebugVar
>();
3588 template <typename Handler
>
3589 bool BaselineCodeGen
<Handler
>::emit_GetAliasedDebugVar() {
3590 if (!emitGetAliasedDebugVar(R0
)) {
3598 template <typename Handler
>
3599 bool BaselineCodeGen
<Handler
>::emit_GetAliasedVar() {
3600 emitGetAliasedVar(R0
);
3607 bool BaselineCompilerCodeGen::emit_SetAliasedVar() {
3608 // Keep rvalue in R0.
3609 frame
.popRegsAndSync(1);
3610 Register objReg
= R2
.scratchReg();
3612 getEnvironmentCoordinateObject(objReg
);
3614 getEnvironmentCoordinateAddressFromObject(objReg
, R1
.scratchReg());
3615 masm
.guardedCallPreBarrier(address
, MIRType::Value
);
3616 masm
.storeValue(R0
, address
);
3619 // Only R0 is live at this point.
3620 // Scope coordinate object is already in R2.scratchReg().
3621 Register temp
= R1
.scratchReg();
3624 masm
.branchPtrInNurseryChunk(Assembler::Equal
, objReg
, temp
, &skipBarrier
);
3625 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, temp
, &skipBarrier
);
3627 masm
.call(&postBarrierSlot_
); // Won't clobber R0
3629 masm
.bind(&skipBarrier
);
3634 bool BaselineInterpreterCodeGen::emit_SetAliasedVar() {
3635 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
3636 MOZ_ASSERT(!regs
.has(FramePointer
));
3638 if (HasInterpreterPCReg()) {
3639 regs
.take(InterpreterPCReg
);
3642 Register env
= regs
.takeAny();
3643 Register scratch1
= regs
.takeAny();
3644 Register scratch2
= regs
.takeAny();
3645 Register scratch3
= regs
.takeAny();
3647 // Load the right environment object.
3648 masm
.loadPtr(frame
.addressOfEnvironmentChain(), env
);
3649 LoadAliasedVarEnv(masm
, env
, scratch1
);
3651 // Load the slot index.
3652 static_assert(ENVCOORD_SLOT_LEN
== 3,
3653 "Code assumes slot is stored in uint24 operand");
3654 LoadUint24Operand(masm
, ENVCOORD_HOPS_LEN
, scratch1
);
3656 // Store the RHS Value in R2.
3657 masm
.loadValue(frame
.addressOfStackValue(-1), R2
);
3659 // Load a pointer to the fixed or dynamic slot into scratch2. We want to call
3660 // guardedCallPreBarrierAnyZone once to avoid code bloat.
3662 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3663 Label isDynamic
, done
;
3664 masm
.branch32(Assembler::AboveOrEqual
, scratch1
,
3665 Imm32(NativeObject::MAX_FIXED_SLOTS
), &isDynamic
);
3667 uint32_t offset
= NativeObject::getFixedSlotOffset(0);
3668 BaseValueIndex
slotAddr(env
, scratch1
, offset
);
3669 masm
.computeEffectiveAddress(slotAddr
, scratch2
);
3672 masm
.bind(&isDynamic
);
3674 masm
.loadPtr(Address(env
, NativeObject::offsetOfSlots()), scratch2
);
3676 // Use an offset to subtract the number of fixed slots.
3677 int32_t offset
= -int32_t(NativeObject::MAX_FIXED_SLOTS
* sizeof(Value
));
3678 BaseValueIndex
slotAddr(scratch2
, scratch1
, offset
);
3679 masm
.computeEffectiveAddress(slotAddr
, scratch2
);
3683 // Pre-barrier and store.
3684 Address
slotAddr(scratch2
, 0);
3685 masm
.guardedCallPreBarrierAnyZone(slotAddr
, MIRType::Value
, scratch3
);
3686 masm
.storeValue(R2
, slotAddr
);
3690 masm
.branchPtrInNurseryChunk(Assembler::Equal
, env
, scratch1
, &skipBarrier
);
3691 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R2
, scratch1
,
3694 // Post barrier code expects the object in R2.
3695 masm
.movePtr(env
, R2
.scratchReg());
3696 masm
.call(&postBarrierSlot_
);
3698 masm
.bind(&skipBarrier
);
3702 template <typename Handler
>
3703 bool BaselineCodeGen
<Handler
>::emit_GetName() {
3706 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3709 if (!emitNextIC()) {
3713 // Mark R0 as pushed stack value.
3718 template <typename Handler
>
3719 bool BaselineCodeGen
<Handler
>::emit_BindName() {
3721 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3724 if (!emitNextIC()) {
3728 // Mark R0 as pushed stack value.
3733 template <typename Handler
>
3734 bool BaselineCodeGen
<Handler
>::emit_DelName() {
3736 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3740 pushArg(R0
.scratchReg());
3741 pushScriptNameArg(R1
.scratchReg(), R2
.scratchReg());
3743 using Fn
= bool (*)(JSContext
*, Handle
<PropertyName
*>, HandleObject
,
3744 MutableHandleValue
);
3745 if (!callVM
<Fn
, js::DeleteNameOperation
>()) {
3754 bool BaselineCompilerCodeGen::emit_GetImport() {
3755 JSScript
* script
= handler
.script();
3756 ModuleEnvironmentObject
* env
= GetModuleEnvironmentForScript(script
);
3759 jsid id
= NameToId(script
->getName(handler
.pc()));
3760 ModuleEnvironmentObject
* targetEnv
;
3761 Maybe
<PropertyInfo
> prop
;
3762 MOZ_ALWAYS_TRUE(env
->lookupImport(id
, &targetEnv
, &prop
));
3766 uint32_t slot
= prop
->slot();
3767 Register scratch
= R0
.scratchReg();
3768 masm
.movePtr(ImmGCPtr(targetEnv
), scratch
);
3769 if (slot
< targetEnv
->numFixedSlots()) {
3770 masm
.loadValue(Address(scratch
, NativeObject::getFixedSlotOffset(slot
)),
3773 masm
.loadPtr(Address(scratch
, NativeObject::offsetOfSlots()), scratch
);
3775 Address(scratch
, (slot
- targetEnv
->numFixedSlots()) * sizeof(Value
)),
3779 // Imports are initialized by this point except in rare circumstances, so
3780 // don't emit a check unless we have to.
3781 if (targetEnv
->getSlot(slot
).isMagic(JS_UNINITIALIZED_LEXICAL
)) {
3782 if (!emitUninitializedLexicalCheck(R0
)) {
3792 bool BaselineInterpreterCodeGen::emit_GetImport() {
3795 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3799 pushBytecodePCArg();
3801 pushArg(R0
.scratchReg());
3803 using Fn
= bool (*)(JSContext
*, HandleObject
, HandleScript
, jsbytecode
*,
3804 MutableHandleValue
);
3805 if (!callVM
<Fn
, GetImportOperation
>()) {
3813 template <typename Handler
>
3814 bool BaselineCodeGen
<Handler
>::emit_GetIntrinsic() {
3817 if (!emitNextIC()) {
3825 template <typename Handler
>
3826 bool BaselineCodeGen
<Handler
>::emit_SetIntrinsic() {
3828 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
3833 pushBytecodePCArg();
3836 using Fn
= bool (*)(JSContext
*, JSScript
*, jsbytecode
*, HandleValue
);
3837 return callVM
<Fn
, SetIntrinsicOperation
>();
3840 template <typename Handler
>
3841 bool BaselineCodeGen
<Handler
>::emit_GlobalOrEvalDeclInstantiation() {
3846 loadInt32LengthBytecodeOperand(R0
.scratchReg());
3847 pushArg(R0
.scratchReg());
3849 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3850 pushArg(R0
.scratchReg());
3852 using Fn
= bool (*)(JSContext
*, HandleObject
, HandleScript
, GCThingIndex
);
3853 return callVM
<Fn
, js::GlobalOrEvalDeclInstantiation
>();
3856 template <typename Handler
>
3857 bool BaselineCodeGen
<Handler
>::emitInitPropGetterSetter() {
3858 // Keep values on the stack for the decompiler.
3863 masm
.unboxObject(frame
.addressOfStackValue(-1), R0
.scratchReg());
3864 masm
.unboxObject(frame
.addressOfStackValue(-2), R1
.scratchReg());
3866 pushArg(R0
.scratchReg());
3867 pushScriptNameArg(R0
.scratchReg(), R2
.scratchReg());
3868 pushArg(R1
.scratchReg());
3869 pushBytecodePCArg();
3871 using Fn
= bool (*)(JSContext
*, jsbytecode
*, HandleObject
,
3872 Handle
<PropertyName
*>, HandleObject
);
3873 if (!callVM
<Fn
, InitPropGetterSetterOperation
>()) {
3881 template <typename Handler
>
3882 bool BaselineCodeGen
<Handler
>::emit_InitPropGetter() {
3883 return emitInitPropGetterSetter();
3886 template <typename Handler
>
3887 bool BaselineCodeGen
<Handler
>::emit_InitHiddenPropGetter() {
3888 return emitInitPropGetterSetter();
3891 template <typename Handler
>
3892 bool BaselineCodeGen
<Handler
>::emit_InitPropSetter() {
3893 return emitInitPropGetterSetter();
3896 template <typename Handler
>
3897 bool BaselineCodeGen
<Handler
>::emit_InitHiddenPropSetter() {
3898 return emitInitPropGetterSetter();
3901 template <typename Handler
>
3902 bool BaselineCodeGen
<Handler
>::emitInitElemGetterSetter() {
3903 // Load index and value in R0 and R1, but keep values on the stack for the
3906 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3907 masm
.unboxObject(frame
.addressOfStackValue(-1), R1
.scratchReg());
3911 pushArg(R1
.scratchReg());
3913 masm
.unboxObject(frame
.addressOfStackValue(-3), R0
.scratchReg());
3914 pushArg(R0
.scratchReg());
3915 pushBytecodePCArg();
3917 using Fn
= bool (*)(JSContext
*, jsbytecode
*, HandleObject
, HandleValue
,
3919 if (!callVM
<Fn
, InitElemGetterSetterOperation
>()) {
3927 template <typename Handler
>
3928 bool BaselineCodeGen
<Handler
>::emit_InitElemGetter() {
3929 return emitInitElemGetterSetter();
3932 template <typename Handler
>
3933 bool BaselineCodeGen
<Handler
>::emit_InitHiddenElemGetter() {
3934 return emitInitElemGetterSetter();
3937 template <typename Handler
>
3938 bool BaselineCodeGen
<Handler
>::emit_InitElemSetter() {
3939 return emitInitElemGetterSetter();
3942 template <typename Handler
>
3943 bool BaselineCodeGen
<Handler
>::emit_InitHiddenElemSetter() {
3944 return emitInitElemGetterSetter();
3947 template <typename Handler
>
3948 bool BaselineCodeGen
<Handler
>::emit_InitElemInc() {
3949 // Keep the object and rhs on the stack.
3952 // Load object in R0, index in R1.
3953 masm
.loadValue(frame
.addressOfStackValue(-3), R0
);
3954 masm
.loadValue(frame
.addressOfStackValue(-2), R1
);
3957 if (!emitNextIC()) {
3965 Address indexAddr
= frame
.addressOfStackValue(-1);
3968 masm
.branchTestInt32(Assembler::Equal
, indexAddr
, &isInt32
);
3969 masm
.assumeUnreachable("INITELEM_INC index must be Int32");
3970 masm
.bind(&isInt32
);
3972 masm
.incrementInt32Value(indexAddr
);
3977 bool BaselineCompilerCodeGen::emit_GetLocal() {
3978 frame
.pushLocal(GET_LOCALNO(handler
.pc()));
3982 static BaseValueIndex
ComputeAddressOfLocal(MacroAssembler
& masm
,
3983 Register indexScratch
) {
3984 // Locals are stored in memory at a negative offset from the frame pointer. We
3985 // negate the index first to effectively subtract it.
3986 masm
.negPtr(indexScratch
);
3987 return BaseValueIndex(FramePointer
, indexScratch
,
3988 BaselineFrame::reverseOffsetOfLocal(0));
3992 bool BaselineInterpreterCodeGen::emit_GetLocal() {
3993 Register scratch
= R0
.scratchReg();
3994 LoadUint24Operand(masm
, 0, scratch
);
3995 BaseValueIndex addr
= ComputeAddressOfLocal(masm
, scratch
);
3996 masm
.loadValue(addr
, R0
);
4002 bool BaselineCompilerCodeGen::emit_SetLocal() {
4003 // Ensure no other StackValue refers to the old value, for instance i + (i =
4004 // 3). This also allows us to use R0 as scratch below.
4007 uint32_t local
= GET_LOCALNO(handler
.pc());
4008 frame
.storeStackValue(-1, frame
.addressOfLocal(local
), R0
);
4013 bool BaselineInterpreterCodeGen::emit_SetLocal() {
4014 Register scratch
= R0
.scratchReg();
4015 LoadUint24Operand(masm
, 0, scratch
);
4016 BaseValueIndex addr
= ComputeAddressOfLocal(masm
, scratch
);
4017 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
4018 masm
.storeValue(R1
, addr
);
4023 bool BaselineCompilerCodeGen::emitFormalArgAccess(JSOp op
) {
4024 MOZ_ASSERT(op
== JSOp::GetArg
|| op
== JSOp::SetArg
);
4026 uint32_t arg
= GET_ARGNO(handler
.pc());
4028 // Fast path: the script does not use |arguments| or formals don't
4029 // alias the arguments object.
4030 if (!handler
.script()->argsObjAliasesFormals()) {
4031 if (op
== JSOp::GetArg
) {
4034 // See the comment in emit_SetLocal.
4036 frame
.storeStackValue(-1, frame
.addressOfArg(arg
), R0
);
4042 // Sync so that we can use R0.
4045 // Load the arguments object data vector.
4046 Register reg
= R2
.scratchReg();
4047 masm
.loadPtr(frame
.addressOfArgsObj(), reg
);
4048 masm
.loadPrivate(Address(reg
, ArgumentsObject::getDataSlotOffset()), reg
);
4050 // Load/store the argument.
4051 Address
argAddr(reg
, ArgumentsData::offsetOfArgs() + arg
* sizeof(Value
));
4052 if (op
== JSOp::GetArg
) {
4053 masm
.loadValue(argAddr
, R0
);
4056 Register temp
= R1
.scratchReg();
4057 masm
.guardedCallPreBarrierAnyZone(argAddr
, MIRType::Value
, temp
);
4058 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
4059 masm
.storeValue(R0
, argAddr
);
4061 MOZ_ASSERT(frame
.numUnsyncedSlots() == 0);
4063 // Reload the arguments object.
4064 Register reg
= R2
.scratchReg();
4065 masm
.loadPtr(frame
.addressOfArgsObj(), reg
);
4069 masm
.branchPtrInNurseryChunk(Assembler::Equal
, reg
, temp
, &skipBarrier
);
4070 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, temp
, &skipBarrier
);
4072 masm
.call(&postBarrierSlot_
);
4074 masm
.bind(&skipBarrier
);
4081 bool BaselineInterpreterCodeGen::emitFormalArgAccess(JSOp op
) {
4082 MOZ_ASSERT(op
== JSOp::GetArg
|| op
== JSOp::SetArg
);
4085 Register argReg
= R1
.scratchReg();
4086 LoadUint16Operand(masm
, argReg
);
4088 // If the frame has no arguments object, this must be an unaliased access.
4089 Label isUnaliased
, done
;
4090 masm
.branchTest32(Assembler::Zero
, frame
.addressOfFlags(),
4091 Imm32(BaselineFrame::HAS_ARGS_OBJ
), &isUnaliased
);
4093 Register reg
= R2
.scratchReg();
4095 // If it's an unmapped arguments object, this is an unaliased access.
4098 Assembler::Zero
, Address(reg
, JSScript::offsetOfImmutableFlags()),
4099 Imm32(uint32_t(JSScript::ImmutableFlags::HasMappedArgsObj
)),
4102 // Load the arguments object data vector.
4103 masm
.loadPtr(frame
.addressOfArgsObj(), reg
);
4104 masm
.loadPrivate(Address(reg
, ArgumentsObject::getDataSlotOffset()), reg
);
4106 // Load/store the argument.
4107 BaseValueIndex
argAddr(reg
, argReg
, ArgumentsData::offsetOfArgs());
4108 if (op
== JSOp::GetArg
) {
4109 masm
.loadValue(argAddr
, R0
);
4112 masm
.guardedCallPreBarrierAnyZone(argAddr
, MIRType::Value
,
4114 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
4115 masm
.storeValue(R0
, argAddr
);
4117 // Reload the arguments object.
4118 masm
.loadPtr(frame
.addressOfArgsObj(), reg
);
4120 Register temp
= R1
.scratchReg();
4121 masm
.branchPtrInNurseryChunk(Assembler::Equal
, reg
, temp
, &done
);
4122 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, temp
, &done
);
4124 masm
.call(&postBarrierSlot_
);
4128 masm
.bind(&isUnaliased
);
4130 BaseValueIndex
addr(FramePointer
, argReg
,
4131 JitFrameLayout::offsetOfActualArgs());
4132 if (op
== JSOp::GetArg
) {
4133 masm
.loadValue(addr
, R0
);
4136 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
4137 masm
.storeValue(R0
, addr
);
4145 template <typename Handler
>
4146 bool BaselineCodeGen
<Handler
>::emit_GetArg() {
4147 return emitFormalArgAccess(JSOp::GetArg
);
4150 template <typename Handler
>
4151 bool BaselineCodeGen
<Handler
>::emit_SetArg() {
4152 return emitFormalArgAccess(JSOp::SetArg
);
4156 bool BaselineInterpreterCodeGen::emit_GetFrameArg() {
4159 Register argReg
= R1
.scratchReg();
4160 LoadUint16Operand(masm
, argReg
);
4162 BaseValueIndex
addr(FramePointer
, argReg
,
4163 JitFrameLayout::offsetOfActualArgs());
4164 masm
.loadValue(addr
, R0
);
4170 bool BaselineCompilerCodeGen::emit_GetFrameArg() {
4171 uint32_t arg
= GET_ARGNO(handler
.pc());
4176 template <typename Handler
>
4177 bool BaselineCodeGen
<Handler
>::emit_ArgumentsLength() {
4180 masm
.loadNumActualArgs(FramePointer
, R0
.scratchReg());
4181 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
4187 template <typename Handler
>
4188 bool BaselineCodeGen
<Handler
>::emit_GetActualArg() {
4189 frame
.popRegsAndSync(1);
4194 masm
.branchTestInt32(Assembler::Equal
, R0
, &ok
);
4195 masm
.assumeUnreachable("GetActualArg unexpected type");
4200 Register index
= R0
.scratchReg();
4201 masm
.unboxInt32(R0
, index
);
4206 masm
.loadNumActualArgs(FramePointer
, R1
.scratchReg());
4207 masm
.branch32(Assembler::Above
, R1
.scratchReg(), index
, &ok
);
4208 masm
.assumeUnreachable("GetActualArg invalid index");
4213 BaseValueIndex
addr(FramePointer
, index
,
4214 JitFrameLayout::offsetOfActualArgs());
4215 masm
.loadValue(addr
, R0
);
4221 void BaselineCompilerCodeGen::loadNumFormalArguments(Register dest
) {
4222 masm
.move32(Imm32(handler
.function()->nargs()), dest
);
4226 void BaselineInterpreterCodeGen::loadNumFormalArguments(Register dest
) {
4227 masm
.loadFunctionFromCalleeToken(frame
.addressOfCalleeToken(), dest
);
4228 masm
.loadFunctionArgCount(dest
, dest
);
4231 template <typename Handler
>
4232 bool BaselineCodeGen
<Handler
>::emit_NewTarget() {
4233 MOZ_ASSERT_IF(handler
.maybeFunction(), !handler
.maybeFunction()->isArrow());
4238 Register scratch1
= R0
.scratchReg();
4239 Register scratch2
= R1
.scratchReg();
4242 masm
.loadPtr(frame
.addressOfCalleeToken(), scratch1
);
4243 masm
.branchTestPtr(Assembler::Zero
, scratch1
, Imm32(CalleeTokenScriptBit
),
4245 masm
.assumeUnreachable("Unexpected non-function script");
4246 masm
.bind(&isFunction
);
4249 masm
.andPtr(Imm32(uint32_t(CalleeTokenMask
)), scratch1
);
4250 masm
.branchFunctionKind(Assembler::NotEqual
,
4251 FunctionFlags::FunctionKind::Arrow
, scratch1
,
4252 scratch2
, ¬Arrow
);
4253 masm
.assumeUnreachable("Unexpected arrow function");
4254 masm
.bind(¬Arrow
);
4257 // if (isConstructing()) push(argv[Max(numActualArgs, numFormalArgs)])
4258 Label notConstructing
, done
;
4259 masm
.branchTestPtr(Assembler::Zero
, frame
.addressOfCalleeToken(),
4260 Imm32(CalleeToken_FunctionConstructing
), ¬Constructing
);
4262 Register argvLen
= R0
.scratchReg();
4263 Register nformals
= R1
.scratchReg();
4264 masm
.loadNumActualArgs(FramePointer
, argvLen
);
4266 // If argvLen < nformals, set argvlen := nformals.
4267 loadNumFormalArguments(nformals
);
4268 masm
.cmp32Move32(Assembler::Below
, argvLen
, nformals
, nformals
, argvLen
);
4270 BaseValueIndex
newTarget(FramePointer
, argvLen
,
4271 JitFrameLayout::offsetOfActualArgs());
4272 masm
.loadValue(newTarget
, R0
);
4275 // else push(undefined)
4276 masm
.bind(¬Constructing
);
4277 masm
.moveValue(UndefinedValue(), R0
);
4284 template <typename Handler
>
4285 bool BaselineCodeGen
<Handler
>::emit_ThrowSetConst() {
4287 pushArg(Imm32(JSMSG_BAD_CONST_ASSIGN
));
4289 using Fn
= bool (*)(JSContext
*, unsigned);
4290 return callVM
<Fn
, jit::ThrowRuntimeLexicalError
>();
4293 template <typename Handler
>
4294 bool BaselineCodeGen
<Handler
>::emitUninitializedLexicalCheck(
4295 const ValueOperand
& val
) {
4297 masm
.branchTestMagicValue(Assembler::NotEqual
, val
, JS_UNINITIALIZED_LEXICAL
,
4301 pushArg(Imm32(JSMSG_UNINITIALIZED_LEXICAL
));
4303 using Fn
= bool (*)(JSContext
*, unsigned);
4304 if (!callVM
<Fn
, jit::ThrowRuntimeLexicalError
>()) {
4312 template <typename Handler
>
4313 bool BaselineCodeGen
<Handler
>::emit_CheckLexical() {
4315 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
4316 return emitUninitializedLexicalCheck(R0
);
4319 template <typename Handler
>
4320 bool BaselineCodeGen
<Handler
>::emit_CheckAliasedLexical() {
4321 return emit_CheckLexical();
4324 template <typename Handler
>
4325 bool BaselineCodeGen
<Handler
>::emit_InitLexical() {
4326 return emit_SetLocal();
4329 template <typename Handler
>
4330 bool BaselineCodeGen
<Handler
>::emit_InitGLexical() {
4331 frame
.popRegsAndSync(1);
4332 pushGlobalLexicalEnvironmentValue(R1
);
4334 return emit_SetProp();
4337 template <typename Handler
>
4338 bool BaselineCodeGen
<Handler
>::emit_InitAliasedLexical() {
4339 return emit_SetAliasedVar();
4342 template <typename Handler
>
4343 bool BaselineCodeGen
<Handler
>::emit_Uninitialized() {
4344 frame
.push(MagicValue(JS_UNINITIALIZED_LEXICAL
));
4349 bool BaselineCompilerCodeGen::emitCall(JSOp op
) {
4350 MOZ_ASSERT(IsInvokeOp(op
));
4354 uint32_t argc
= GET_ARGC(handler
.pc());
4355 masm
.move32(Imm32(argc
), R0
.scratchReg());
4358 if (!emitNextIC()) {
4362 // Update FrameInfo.
4363 bool construct
= IsConstructOp(op
);
4364 frame
.popn(2 + argc
+ construct
);
4370 bool BaselineInterpreterCodeGen::emitCall(JSOp op
) {
4371 MOZ_ASSERT(IsInvokeOp(op
));
4373 // The IC expects argc in R0.
4374 LoadUint16Operand(masm
, R0
.scratchReg());
4375 if (!emitNextIC()) {
4379 // Pop the arguments. We have to reload pc/argc because the IC clobbers them.
4380 // The return value is in R0 so we can't use that.
4381 Register scratch
= R1
.scratchReg();
4382 uint32_t extraValuesToPop
= IsConstructOp(op
) ? 3 : 2;
4383 Register spReg
= AsRegister(masm
.getStackPointer());
4384 LoadUint16Operand(masm
, scratch
);
4385 masm
.computeEffectiveAddress(
4386 BaseValueIndex(spReg
, scratch
, extraValuesToPop
* sizeof(Value
)), spReg
);
4391 template <typename Handler
>
4392 bool BaselineCodeGen
<Handler
>::emitSpreadCall(JSOp op
) {
4393 MOZ_ASSERT(IsInvokeOp(op
));
4396 masm
.move32(Imm32(1), R0
.scratchReg());
4399 if (!emitNextIC()) {
4403 // Update FrameInfo.
4404 bool construct
= op
== JSOp::SpreadNew
|| op
== JSOp::SpreadSuperCall
;
4405 frame
.popn(3 + construct
);
4410 template <typename Handler
>
4411 bool BaselineCodeGen
<Handler
>::emit_Call() {
4412 return emitCall(JSOp::Call
);
4415 template <typename Handler
>
4416 bool BaselineCodeGen
<Handler
>::emit_CallContent() {
4417 return emitCall(JSOp::CallContent
);
4420 template <typename Handler
>
4421 bool BaselineCodeGen
<Handler
>::emit_CallIgnoresRv() {
4422 return emitCall(JSOp::CallIgnoresRv
);
4425 template <typename Handler
>
4426 bool BaselineCodeGen
<Handler
>::emit_CallIter() {
4427 return emitCall(JSOp::CallIter
);
4430 template <typename Handler
>
4431 bool BaselineCodeGen
<Handler
>::emit_CallContentIter() {
4432 return emitCall(JSOp::CallContentIter
);
4435 template <typename Handler
>
4436 bool BaselineCodeGen
<Handler
>::emit_New() {
4437 return emitCall(JSOp::New
);
4440 template <typename Handler
>
4441 bool BaselineCodeGen
<Handler
>::emit_NewContent() {
4442 return emitCall(JSOp::NewContent
);
4445 template <typename Handler
>
4446 bool BaselineCodeGen
<Handler
>::emit_SuperCall() {
4447 return emitCall(JSOp::SuperCall
);
4450 template <typename Handler
>
4451 bool BaselineCodeGen
<Handler
>::emit_Eval() {
4452 return emitCall(JSOp::Eval
);
4455 template <typename Handler
>
4456 bool BaselineCodeGen
<Handler
>::emit_StrictEval() {
4457 return emitCall(JSOp::StrictEval
);
4460 template <typename Handler
>
4461 bool BaselineCodeGen
<Handler
>::emit_SpreadCall() {
4462 return emitSpreadCall(JSOp::SpreadCall
);
4465 template <typename Handler
>
4466 bool BaselineCodeGen
<Handler
>::emit_SpreadNew() {
4467 return emitSpreadCall(JSOp::SpreadNew
);
4470 template <typename Handler
>
4471 bool BaselineCodeGen
<Handler
>::emit_SpreadSuperCall() {
4472 return emitSpreadCall(JSOp::SpreadSuperCall
);
4475 template <typename Handler
>
4476 bool BaselineCodeGen
<Handler
>::emit_SpreadEval() {
4477 return emitSpreadCall(JSOp::SpreadEval
);
4480 template <typename Handler
>
4481 bool BaselineCodeGen
<Handler
>::emit_StrictSpreadEval() {
4482 return emitSpreadCall(JSOp::StrictSpreadEval
);
4485 template <typename Handler
>
4486 bool BaselineCodeGen
<Handler
>::emit_OptimizeSpreadCall() {
4487 frame
.popRegsAndSync(1);
4489 if (!emitNextIC()) {
4497 template <typename Handler
>
4498 bool BaselineCodeGen
<Handler
>::emit_ImplicitThis() {
4500 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
4504 pushScriptNameArg(R1
.scratchReg(), R2
.scratchReg());
4505 pushArg(R0
.scratchReg());
4507 using Fn
= bool (*)(JSContext
*, HandleObject
, Handle
<PropertyName
*>,
4508 MutableHandleValue
);
4509 if (!callVM
<Fn
, ImplicitThisOperation
>()) {
4517 template <typename Handler
>
4518 bool BaselineCodeGen
<Handler
>::emit_Instanceof() {
4519 frame
.popRegsAndSync(2);
4521 if (!emitNextIC()) {
4525 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
4529 template <typename Handler
>
4530 bool BaselineCodeGen
<Handler
>::emit_Typeof() {
4531 frame
.popRegsAndSync(1);
4533 if (!emitNextIC()) {
4541 template <typename Handler
>
4542 bool BaselineCodeGen
<Handler
>::emit_TypeofExpr() {
4543 return emit_Typeof();
4546 template <typename Handler
>
4547 bool BaselineCodeGen
<Handler
>::emit_ThrowMsg() {
4549 pushUint8BytecodeOperandArg(R2
.scratchReg());
4551 using Fn
= bool (*)(JSContext
*, const unsigned);
4552 return callVM
<Fn
, js::ThrowMsgOperation
>();
4555 template <typename Handler
>
4556 bool BaselineCodeGen
<Handler
>::emit_Throw() {
4557 // Keep value to throw in R0.
4558 frame
.popRegsAndSync(1);
4563 using Fn
= bool (*)(JSContext
*, HandleValue
);
4564 return callVM
<Fn
, js::ThrowOperation
>();
4567 template <typename Handler
>
4568 bool BaselineCodeGen
<Handler
>::emit_ThrowWithStack() {
4569 // Keep value to throw in R0 and the stack in R1.
4570 frame
.popRegsAndSync(2);
4576 using Fn
= bool (*)(JSContext
*, HandleValue
, HandleValue
);
4577 return callVM
<Fn
, js::ThrowWithStackOperation
>();
4580 template <typename Handler
>
4581 bool BaselineCodeGen
<Handler
>::emit_Try() {
4585 template <typename Handler
>
4586 bool BaselineCodeGen
<Handler
>::emit_Finally() {
4587 // To match the interpreter, emit an interrupt check at the start of the
4589 return emitInterruptCheck();
4592 static void LoadBaselineScriptResumeEntries(MacroAssembler
& masm
,
4593 JSScript
* script
, Register dest
,
4595 MOZ_ASSERT(dest
!= scratch
);
4597 masm
.movePtr(ImmPtr(script
->jitScript()), dest
);
4598 masm
.loadPtr(Address(dest
, JitScript::offsetOfBaselineScript()), dest
);
4599 masm
.load32(Address(dest
, BaselineScript::offsetOfResumeEntriesOffset()),
4601 masm
.addPtr(scratch
, dest
);
4604 template <typename Handler
>
4605 void BaselineCodeGen
<Handler
>::emitInterpJumpToResumeEntry(Register script
,
4606 Register resumeIndex
,
4608 // Load JSScript::immutableScriptData() into |script|.
4609 masm
.loadPtr(Address(script
, JSScript::offsetOfSharedData()), script
);
4610 masm
.loadPtr(Address(script
, SharedImmutableScriptData::offsetOfISD()),
4613 // Load the resume pcOffset in |resumeIndex|.
4615 Address(script
, ImmutableScriptData::offsetOfResumeOffsetsOffset()),
4617 masm
.computeEffectiveAddress(BaseIndex(scratch
, resumeIndex
, TimesFour
),
4619 masm
.load32(BaseIndex(script
, scratch
, TimesOne
), resumeIndex
);
4621 // Add resume offset to PC, jump to it.
4622 masm
.computeEffectiveAddress(BaseIndex(script
, resumeIndex
, TimesOne
,
4623 ImmutableScriptData::offsetOfCode()),
4625 Address
pcAddr(FramePointer
, BaselineFrame::reverseOffsetOfInterpreterPC());
4626 masm
.storePtr(script
, pcAddr
);
4627 emitJumpToInterpretOpLabel();
4631 void BaselineCompilerCodeGen::jumpToResumeEntry(Register resumeIndex
,
4633 Register scratch2
) {
4634 LoadBaselineScriptResumeEntries(masm
, handler
.script(), scratch1
, scratch2
);
4636 BaseIndex(scratch1
, resumeIndex
, ScaleFromElemWidth(sizeof(uintptr_t))),
4638 masm
.jump(scratch1
);
4642 void BaselineInterpreterCodeGen::jumpToResumeEntry(Register resumeIndex
,
4644 Register scratch2
) {
4645 loadScript(scratch1
);
4646 emitInterpJumpToResumeEntry(scratch1
, resumeIndex
, scratch2
);
4650 template <typename F1
, typename F2
>
4651 [[nodiscard
]] bool BaselineCompilerCodeGen::emitDebugInstrumentation(
4652 const F1
& ifDebuggee
, const Maybe
<F2
>& ifNotDebuggee
) {
4653 // The JIT calls either ifDebuggee or (if present) ifNotDebuggee, because it
4654 // knows statically whether we're compiling with debug instrumentation.
4656 if (handler
.compileDebugInstrumentation()) {
4657 return ifDebuggee();
4660 if (ifNotDebuggee
) {
4661 return (*ifNotDebuggee
)();
4668 template <typename F1
, typename F2
>
4669 [[nodiscard
]] bool BaselineInterpreterCodeGen::emitDebugInstrumentation(
4670 const F1
& ifDebuggee
, const Maybe
<F2
>& ifNotDebuggee
) {
4671 // The interpreter emits both ifDebuggee and (if present) ifNotDebuggee
4672 // paths, with a toggled jump followed by a branch on the frame's DEBUGGEE
4675 Label isNotDebuggee
, done
;
4677 CodeOffset toggleOffset
= masm
.toggledJump(&isNotDebuggee
);
4678 if (!handler
.addDebugInstrumentationOffset(cx
, toggleOffset
)) {
4682 masm
.branchTest32(Assembler::Zero
, frame
.addressOfFlags(),
4683 Imm32(BaselineFrame::DEBUGGEE
), &isNotDebuggee
);
4685 if (!ifDebuggee()) {
4689 if (ifNotDebuggee
) {
4693 masm
.bind(&isNotDebuggee
);
4695 if (ifNotDebuggee
&& !(*ifNotDebuggee
)()) {
4703 template <typename Handler
>
4704 bool BaselineCodeGen
<Handler
>::emit_PushLexicalEnv() {
4705 // Call a stub to push the block on the block chain.
4707 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
4709 pushScriptGCThingArg(ScriptGCThingType::Scope
, R1
.scratchReg(),
4711 pushArg(R0
.scratchReg());
4713 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, Handle
<LexicalScope
*>);
4714 return callVM
<Fn
, jit::PushLexicalEnv
>();
4717 template <typename Handler
>
4718 bool BaselineCodeGen
<Handler
>::emit_PushClassBodyEnv() {
4720 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
4722 pushScriptGCThingArg(ScriptGCThingType::Scope
, R1
.scratchReg(),
4724 pushArg(R0
.scratchReg());
4726 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, Handle
<ClassBodyScope
*>);
4727 return callVM
<Fn
, jit::PushClassBodyEnv
>();
4730 template <typename Handler
>
4731 bool BaselineCodeGen
<Handler
>::emit_PopLexicalEnv() {
4734 Register scratch1
= R0
.scratchReg();
4736 auto ifDebuggee
= [this, scratch1
]() {
4737 masm
.loadBaselineFramePtr(FramePointer
, scratch1
);
4740 pushBytecodePCArg();
4743 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, const jsbytecode
*);
4744 return callVM
<Fn
, jit::DebugLeaveThenPopLexicalEnv
>();
4746 auto ifNotDebuggee
= [this, scratch1
]() {
4747 Register scratch2
= R1
.scratchReg();
4748 masm
.loadPtr(frame
.addressOfEnvironmentChain(), scratch1
);
4749 masm
.debugAssertObjectHasClass(scratch1
, scratch2
,
4750 &LexicalEnvironmentObject::class_
);
4751 Address
enclosingAddr(scratch1
,
4752 EnvironmentObject::offsetOfEnclosingEnvironment());
4753 masm
.unboxObject(enclosingAddr
, scratch1
);
4754 masm
.storePtr(scratch1
, frame
.addressOfEnvironmentChain());
4757 return emitDebugInstrumentation(ifDebuggee
, mozilla::Some(ifNotDebuggee
));
4760 template <typename Handler
>
4761 bool BaselineCodeGen
<Handler
>::emit_FreshenLexicalEnv() {
4764 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
4766 auto ifDebuggee
= [this]() {
4768 pushBytecodePCArg();
4769 pushArg(R0
.scratchReg());
4771 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, const jsbytecode
*);
4772 return callVM
<Fn
, jit::DebuggeeFreshenLexicalEnv
>();
4774 auto ifNotDebuggee
= [this]() {
4776 pushArg(R0
.scratchReg());
4778 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
4779 return callVM
<Fn
, jit::FreshenLexicalEnv
>();
4781 return emitDebugInstrumentation(ifDebuggee
, mozilla::Some(ifNotDebuggee
));
4784 template <typename Handler
>
4785 bool BaselineCodeGen
<Handler
>::emit_RecreateLexicalEnv() {
4788 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
4790 auto ifDebuggee
= [this]() {
4792 pushBytecodePCArg();
4793 pushArg(R0
.scratchReg());
4795 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, const jsbytecode
*);
4796 return callVM
<Fn
, jit::DebuggeeRecreateLexicalEnv
>();
4798 auto ifNotDebuggee
= [this]() {
4800 pushArg(R0
.scratchReg());
4802 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
4803 return callVM
<Fn
, jit::RecreateLexicalEnv
>();
4805 return emitDebugInstrumentation(ifDebuggee
, mozilla::Some(ifNotDebuggee
));
4808 template <typename Handler
>
4809 bool BaselineCodeGen
<Handler
>::emit_DebugLeaveLexicalEnv() {
4810 auto ifDebuggee
= [this]() {
4812 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
4813 pushBytecodePCArg();
4814 pushArg(R0
.scratchReg());
4816 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, const jsbytecode
*);
4817 return callVM
<Fn
, jit::DebugLeaveLexicalEnv
>();
4819 return emitDebugInstrumentation(ifDebuggee
);
4822 template <typename Handler
>
4823 bool BaselineCodeGen
<Handler
>::emit_PushVarEnv() {
4825 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
4826 pushScriptGCThingArg(ScriptGCThingType::Scope
, R1
.scratchReg(),
4828 pushArg(R0
.scratchReg());
4830 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, Handle
<Scope
*>);
4831 return callVM
<Fn
, jit::PushVarEnv
>();
4834 template <typename Handler
>
4835 bool BaselineCodeGen
<Handler
>::emit_EnterWith() {
4836 // Pop "with" object to R0.
4837 frame
.popRegsAndSync(1);
4839 // Call a stub to push the object onto the environment chain.
4842 pushScriptGCThingArg(ScriptGCThingType::Scope
, R1
.scratchReg(),
4845 masm
.loadBaselineFramePtr(FramePointer
, R1
.scratchReg());
4846 pushArg(R1
.scratchReg());
4849 bool (*)(JSContext
*, BaselineFrame
*, HandleValue
, Handle
<WithScope
*>);
4850 return callVM
<Fn
, jit::EnterWith
>();
4853 template <typename Handler
>
4854 bool BaselineCodeGen
<Handler
>::emit_LeaveWith() {
4855 // Call a stub to pop the with object from the environment chain.
4858 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
4859 pushArg(R0
.scratchReg());
4861 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
4862 return callVM
<Fn
, jit::LeaveWith
>();
4865 template <typename Handler
>
4866 bool BaselineCodeGen
<Handler
>::emit_Exception() {
4869 using Fn
= bool (*)(JSContext
*, MutableHandleValue
);
4870 if (!callVM
<Fn
, GetAndClearException
>()) {
4878 template <typename Handler
>
4879 bool BaselineCodeGen
<Handler
>::emit_ExceptionAndStack() {
4880 // First call into the VM to store the exception stack.
4884 using Fn
= bool (*)(JSContext
*, MutableHandleValue
);
4885 if (!callVM
<Fn
, GetPendingExceptionStack
>()) {
4892 // Now get the actual exception value and clear the exception state.
4896 using Fn
= bool (*)(JSContext
*, MutableHandleValue
);
4897 if (!callVM
<Fn
, GetAndClearException
>()) {
4904 // Finally swap the stack and the exception.
4905 frame
.popRegsAndSync(2);
4912 template <typename Handler
>
4913 bool BaselineCodeGen
<Handler
>::emit_Debugger() {
4916 frame
.assertSyncedStack();
4917 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
4918 pushArg(R0
.scratchReg());
4920 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
4921 if (!callVM
<Fn
, jit::OnDebuggerStatement
>()) {
4928 template <typename Handler
>
4929 bool BaselineCodeGen
<Handler
>::emitDebugEpilogue() {
4930 auto ifDebuggee
= [this]() {
4931 // Move return value into the frame's rval slot.
4932 masm
.storeValue(JSReturnOperand
, frame
.addressOfReturnValue());
4933 masm
.or32(Imm32(BaselineFrame::HAS_RVAL
), frame
.addressOfFlags());
4935 // Load BaselineFrame pointer in R0.
4937 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
4940 pushBytecodePCArg();
4941 pushArg(R0
.scratchReg());
4943 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::DebugEpilogue
;
4945 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, const jsbytecode
*);
4946 if (!callVM
<Fn
, jit::DebugEpilogueOnBaselineReturn
>(kind
)) {
4950 masm
.loadValue(frame
.addressOfReturnValue(), JSReturnOperand
);
4953 return emitDebugInstrumentation(ifDebuggee
);
4956 template <typename Handler
>
4957 bool BaselineCodeGen
<Handler
>::emitReturn() {
4958 if (handler
.shouldEmitDebugEpilogueAtReturnOp()) {
4959 if (!emitDebugEpilogue()) {
4964 // Only emit the jump if this JSOp::RetRval is not the last instruction.
4965 // Not needed for last instruction, because last instruction flows
4966 // into return label.
4967 if (!handler
.isDefinitelyLastOp()) {
4968 masm
.jump(&return_
);
4974 template <typename Handler
>
4975 bool BaselineCodeGen
<Handler
>::emit_Return() {
4976 frame
.assertStackDepth(1);
4978 frame
.popValue(JSReturnOperand
);
4979 return emitReturn();
4982 template <typename Handler
>
4983 void BaselineCodeGen
<Handler
>::emitLoadReturnValue(ValueOperand val
) {
4985 masm
.branchTest32(Assembler::Zero
, frame
.addressOfFlags(),
4986 Imm32(BaselineFrame::HAS_RVAL
), &noRval
);
4987 masm
.loadValue(frame
.addressOfReturnValue(), val
);
4991 masm
.moveValue(UndefinedValue(), val
);
4996 template <typename Handler
>
4997 bool BaselineCodeGen
<Handler
>::emit_RetRval() {
4998 frame
.assertStackDepth(0);
5000 masm
.moveValue(UndefinedValue(), JSReturnOperand
);
5002 if (!handler
.maybeScript() || !handler
.maybeScript()->noScriptRval()) {
5003 // Return the value in the return value slot, if any.
5005 Address flags
= frame
.addressOfFlags();
5006 masm
.branchTest32(Assembler::Zero
, flags
, Imm32(BaselineFrame::HAS_RVAL
),
5008 masm
.loadValue(frame
.addressOfReturnValue(), JSReturnOperand
);
5012 return emitReturn();
5015 template <typename Handler
>
5016 bool BaselineCodeGen
<Handler
>::emit_ToPropertyKey() {
5017 frame
.popRegsAndSync(1);
5019 if (!emitNextIC()) {
5027 template <typename Handler
>
5028 bool BaselineCodeGen
<Handler
>::emit_ToAsyncIter() {
5030 masm
.unboxObject(frame
.addressOfStackValue(-2), R0
.scratchReg());
5031 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
5035 pushArg(R0
.scratchReg());
5037 using Fn
= JSObject
* (*)(JSContext
*, HandleObject
, HandleValue
);
5038 if (!callVM
<Fn
, js::CreateAsyncFromSyncIterator
>()) {
5042 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
5048 template <typename Handler
>
5049 bool BaselineCodeGen
<Handler
>::emit_CanSkipAwait() {
5051 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
5056 using Fn
= bool (*)(JSContext
*, HandleValue
, bool* canSkip
);
5057 if (!callVM
<Fn
, js::CanSkipAwait
>()) {
5061 masm
.tagValue(JSVAL_TYPE_BOOLEAN
, ReturnReg
, R0
);
5062 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
5066 template <typename Handler
>
5067 bool BaselineCodeGen
<Handler
>::emit_MaybeExtractAwaitValue() {
5069 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
5071 masm
.unboxBoolean(frame
.addressOfStackValue(-1), R1
.scratchReg());
5074 masm
.branchIfFalseBool(R1
.scratchReg(), &cantExtract
);
5079 using Fn
= bool (*)(JSContext
*, HandleValue
, MutableHandleValue
);
5080 if (!callVM
<Fn
, js::ExtractAwaitValue
>()) {
5084 masm
.storeValue(R0
, frame
.addressOfStackValue(-2));
5085 masm
.bind(&cantExtract
);
5090 template <typename Handler
>
5091 bool BaselineCodeGen
<Handler
>::emit_AsyncAwait() {
5093 masm
.loadValue(frame
.addressOfStackValue(-2), R1
);
5094 masm
.unboxObject(frame
.addressOfStackValue(-1), R0
.scratchReg());
5098 pushArg(R0
.scratchReg());
5100 using Fn
= JSObject
* (*)(JSContext
*, Handle
<AsyncFunctionGeneratorObject
*>,
5102 if (!callVM
<Fn
, js::AsyncFunctionAwait
>()) {
5106 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
5112 template <typename Handler
>
5113 bool BaselineCodeGen
<Handler
>::emit_AsyncResolve() {
5115 masm
.loadValue(frame
.addressOfStackValue(-2), R1
);
5116 masm
.unboxObject(frame
.addressOfStackValue(-1), R0
.scratchReg());
5120 pushArg(R0
.scratchReg());
5122 using Fn
= JSObject
* (*)(JSContext
*, Handle
<AsyncFunctionGeneratorObject
*>,
5124 if (!callVM
<Fn
, js::AsyncFunctionResolve
>()) {
5128 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
5134 template <typename Handler
>
5135 bool BaselineCodeGen
<Handler
>::emit_AsyncReject() {
5137 masm
.loadValue(frame
.addressOfStackValue(-3), R2
);
5138 masm
.loadValue(frame
.addressOfStackValue(-2), R1
);
5139 masm
.unboxObject(frame
.addressOfStackValue(-1), R0
.scratchReg());
5144 pushArg(R0
.scratchReg());
5146 using Fn
= JSObject
* (*)(JSContext
*, Handle
<AsyncFunctionGeneratorObject
*>,
5147 HandleValue
, HandleValue
);
5148 if (!callVM
<Fn
, js::AsyncFunctionReject
>()) {
5152 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
5158 template <typename Handler
>
5159 bool BaselineCodeGen
<Handler
>::emit_CheckObjCoercible() {
5161 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
5165 masm
.branchTestUndefined(Assembler::Equal
, R0
, &fail
);
5166 masm
.branchTestNull(Assembler::NotEqual
, R0
, &done
);
5173 using Fn
= bool (*)(JSContext
*, HandleValue
);
5174 if (!callVM
<Fn
, ThrowObjectCoercible
>()) {
5182 template <typename Handler
>
5183 bool BaselineCodeGen
<Handler
>::emit_ToString() {
5184 // Keep top stack value in R0.
5185 frame
.popRegsAndSync(1);
5187 // Inline path for string.
5189 masm
.branchTestString(Assembler::Equal
, R0
, &done
);
5195 // Call ToStringSlow which doesn't handle string inputs.
5196 using Fn
= JSString
* (*)(JSContext
*, HandleValue
);
5197 if (!callVM
<Fn
, ToStringSlow
<CanGC
>>()) {
5201 masm
.tagValue(JSVAL_TYPE_STRING
, ReturnReg
, R0
);
5208 static constexpr uint32_t TableSwitchOpLowOffset
= 1 * JUMP_OFFSET_LEN
;
5209 static constexpr uint32_t TableSwitchOpHighOffset
= 2 * JUMP_OFFSET_LEN
;
5210 static constexpr uint32_t TableSwitchOpFirstResumeIndexOffset
=
5211 3 * JUMP_OFFSET_LEN
;
5214 void BaselineCompilerCodeGen::emitGetTableSwitchIndex(ValueOperand val
,
5217 Register scratch2
) {
5218 jsbytecode
* pc
= handler
.pc();
5219 jsbytecode
* defaultpc
= pc
+ GET_JUMP_OFFSET(pc
);
5220 Label
* defaultLabel
= handler
.labelOf(defaultpc
);
5222 int32_t low
= GET_JUMP_OFFSET(pc
+ TableSwitchOpLowOffset
);
5223 int32_t high
= GET_JUMP_OFFSET(pc
+ TableSwitchOpHighOffset
);
5224 int32_t length
= high
- low
+ 1;
5226 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5227 // all cases are int32).
5228 masm
.branchTestInt32(Assembler::NotEqual
, val
, defaultLabel
);
5229 masm
.unboxInt32(val
, dest
);
5231 // Subtract 'low'. Bounds check.
5233 masm
.sub32(Imm32(low
), dest
);
5235 masm
.branch32(Assembler::AboveOrEqual
, dest
, Imm32(length
), defaultLabel
);
5239 void BaselineInterpreterCodeGen::emitGetTableSwitchIndex(ValueOperand val
,
5242 Register scratch2
) {
5243 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5244 // all cases are int32).
5245 Label done
, jumpToDefault
;
5246 masm
.branchTestInt32(Assembler::NotEqual
, val
, &jumpToDefault
);
5247 masm
.unboxInt32(val
, dest
);
5249 Register pcReg
= LoadBytecodePC(masm
, scratch1
);
5250 Address
lowAddr(pcReg
, sizeof(jsbytecode
) + TableSwitchOpLowOffset
);
5251 Address
highAddr(pcReg
, sizeof(jsbytecode
) + TableSwitchOpHighOffset
);
5253 // Jump to default if val > high.
5254 masm
.branch32(Assembler::LessThan
, highAddr
, dest
, &jumpToDefault
);
5256 // Jump to default if val < low.
5257 masm
.load32(lowAddr
, scratch2
);
5258 masm
.branch32(Assembler::GreaterThan
, scratch2
, dest
, &jumpToDefault
);
5260 // index := val - low.
5261 masm
.sub32(scratch2
, dest
);
5264 masm
.bind(&jumpToDefault
);
5271 void BaselineCompilerCodeGen::emitTableSwitchJump(Register key
,
5273 Register scratch2
) {
5274 // Jump to resumeEntries[firstResumeIndex + key].
5276 // Note: BytecodeEmitter::allocateResumeIndex static_asserts
5277 // |firstResumeIndex * sizeof(uintptr_t)| fits in int32_t.
5278 uint32_t firstResumeIndex
=
5279 GET_RESUMEINDEX(handler
.pc() + TableSwitchOpFirstResumeIndexOffset
);
5280 LoadBaselineScriptResumeEntries(masm
, handler
.script(), scratch1
, scratch2
);
5281 masm
.loadPtr(BaseIndex(scratch1
, key
, ScaleFromElemWidth(sizeof(uintptr_t)),
5282 firstResumeIndex
* sizeof(uintptr_t)),
5284 masm
.jump(scratch1
);
5288 void BaselineInterpreterCodeGen::emitTableSwitchJump(Register key
,
5290 Register scratch2
) {
5291 // Load the op's firstResumeIndex in scratch1.
5292 LoadUint24Operand(masm
, TableSwitchOpFirstResumeIndexOffset
, scratch1
);
5294 masm
.add32(key
, scratch1
);
5295 jumpToResumeEntry(scratch1
, key
, scratch2
);
5298 template <typename Handler
>
5299 bool BaselineCodeGen
<Handler
>::emit_TableSwitch() {
5300 frame
.popRegsAndSync(1);
5302 Register key
= R0
.scratchReg();
5303 Register scratch1
= R1
.scratchReg();
5304 Register scratch2
= R2
.scratchReg();
5306 // Call a stub to convert R0 from double to int32 if needed.
5307 // Note: this stub may clobber scratch1.
5308 masm
.call(cx
->runtime()->jitRuntime()->getDoubleToInt32ValueStub());
5310 // Load the index in the jump table in |key|, or branch to default pc if not
5311 // int32 or out-of-range.
5312 emitGetTableSwitchIndex(R0
, key
, scratch1
, scratch2
);
5314 // Jump to the target pc.
5315 emitTableSwitchJump(key
, scratch1
, scratch2
);
5319 template <typename Handler
>
5320 bool BaselineCodeGen
<Handler
>::emit_Iter() {
5321 frame
.popRegsAndSync(1);
5323 if (!emitNextIC()) {
5331 template <typename Handler
>
5332 bool BaselineCodeGen
<Handler
>::emit_MoreIter() {
5335 masm
.unboxObject(frame
.addressOfStackValue(-1), R1
.scratchReg());
5337 masm
.iteratorMore(R1
.scratchReg(), R0
, R2
.scratchReg());
5342 template <typename Handler
>
5343 bool BaselineCodeGen
<Handler
>::emitIsMagicValue() {
5346 Label isMagic
, done
;
5347 masm
.branchTestMagic(Assembler::Equal
, frame
.addressOfStackValue(-1),
5349 masm
.moveValue(BooleanValue(false), R0
);
5352 masm
.bind(&isMagic
);
5353 masm
.moveValue(BooleanValue(true), R0
);
5356 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
5360 template <typename Handler
>
5361 bool BaselineCodeGen
<Handler
>::emit_IsNoIter() {
5362 return emitIsMagicValue();
5365 template <typename Handler
>
5366 bool BaselineCodeGen
<Handler
>::emit_EndIter() {
5367 // Pop iterator value.
5370 // Pop the iterator object to close in R0.
5371 frame
.popRegsAndSync(1);
5373 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
5374 MOZ_ASSERT(!regs
.has(FramePointer
));
5375 if (HasInterpreterPCReg()) {
5376 regs
.take(InterpreterPCReg
);
5379 Register obj
= R0
.scratchReg();
5381 masm
.unboxObject(R0
, obj
);
5383 Register temp1
= regs
.takeAny();
5384 Register temp2
= regs
.takeAny();
5385 Register temp3
= regs
.takeAny();
5386 masm
.iteratorClose(obj
, temp1
, temp2
, temp3
);
5390 template <typename Handler
>
5391 bool BaselineCodeGen
<Handler
>::emit_CloseIter() {
5392 frame
.popRegsAndSync(1);
5394 Register iter
= R0
.scratchReg();
5395 masm
.unboxObject(R0
, iter
);
5397 return emitNextIC();
5400 template <typename Handler
>
5401 bool BaselineCodeGen
<Handler
>::emit_OptimizeGetIterator() {
5402 frame
.popRegsAndSync(1);
5404 if (!emitNextIC()) {
5412 template <typename Handler
>
5413 bool BaselineCodeGen
<Handler
>::emit_IsGenClosing() {
5414 return emitIsMagicValue();
5417 template <typename Handler
>
5418 bool BaselineCodeGen
<Handler
>::emit_IsNullOrUndefined() {
5421 Label isNullOrUndefined
, done
;
5422 masm
.branchTestNull(Assembler::Equal
, frame
.addressOfStackValue(-1),
5423 &isNullOrUndefined
);
5424 masm
.branchTestUndefined(Assembler::Equal
, frame
.addressOfStackValue(-1),
5425 &isNullOrUndefined
);
5426 masm
.moveValue(BooleanValue(false), R0
);
5429 masm
.bind(&isNullOrUndefined
);
5430 masm
.moveValue(BooleanValue(true), R0
);
5433 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
5437 template <typename Handler
>
5438 bool BaselineCodeGen
<Handler
>::emit_GetRval() {
5441 emitLoadReturnValue(R0
);
5447 template <typename Handler
>
5448 bool BaselineCodeGen
<Handler
>::emit_SetRval() {
5449 // Store to the frame's return value slot.
5450 frame
.storeStackValue(-1, frame
.addressOfReturnValue(), R2
);
5451 masm
.or32(Imm32(BaselineFrame::HAS_RVAL
), frame
.addressOfFlags());
5456 template <typename Handler
>
5457 bool BaselineCodeGen
<Handler
>::emit_Callee() {
5458 MOZ_ASSERT_IF(handler
.maybeScript(), handler
.maybeScript()->function());
5460 masm
.loadFunctionFromCalleeToken(frame
.addressOfCalleeToken(),
5462 masm
.tagValue(JSVAL_TYPE_OBJECT
, R0
.scratchReg(), R0
);
5468 bool BaselineCompilerCodeGen::emit_EnvCallee() {
5470 uint8_t numHops
= GET_UINT8(handler
.pc());
5471 Register scratch
= R0
.scratchReg();
5473 masm
.loadPtr(frame
.addressOfEnvironmentChain(), scratch
);
5474 for (unsigned i
= 0; i
< numHops
; i
++) {
5475 Address
nextAddr(scratch
,
5476 EnvironmentObject::offsetOfEnclosingEnvironment());
5477 masm
.unboxObject(nextAddr
, scratch
);
5480 masm
.loadValue(Address(scratch
, CallObject::offsetOfCallee()), R0
);
5486 bool BaselineInterpreterCodeGen::emit_EnvCallee() {
5487 Register scratch
= R0
.scratchReg();
5488 Register env
= R1
.scratchReg();
5490 static_assert(JSOpLength_EnvCallee
- sizeof(jsbytecode
) == ENVCOORD_HOPS_LEN
,
5491 "op must have uint8 operand for LoadAliasedVarEnv");
5493 // Load the right environment object.
5494 masm
.loadPtr(frame
.addressOfEnvironmentChain(), env
);
5495 LoadAliasedVarEnv(masm
, env
, scratch
);
5497 masm
.pushValue(Address(env
, CallObject::offsetOfCallee()));
5501 template <typename Handler
>
5502 bool BaselineCodeGen
<Handler
>::emit_SuperBase() {
5503 frame
.popRegsAndSync(1);
5505 Register scratch
= R0
.scratchReg();
5506 Register proto
= R1
.scratchReg();
5509 masm
.unboxObject(R0
, scratch
);
5511 // Load [[HomeObject]]
5512 Address
homeObjAddr(scratch
,
5513 FunctionExtended::offsetOfMethodHomeObjectSlot());
5515 masm
.assertFunctionIsExtended(scratch
);
5518 masm
.branchTestObject(Assembler::Equal
, homeObjAddr
, &isObject
);
5519 masm
.assumeUnreachable("[[HomeObject]] must be Object");
5520 masm
.bind(&isObject
);
5522 masm
.unboxObject(homeObjAddr
, scratch
);
5524 // Load prototype from [[HomeObject]]
5525 masm
.loadObjProto(scratch
, proto
);
5528 // We won't encounter a lazy proto, because the prototype is guaranteed to
5529 // either be a JSFunction or a PlainObject, and only proxy objects can have a
5531 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto
) == 1);
5533 Label proxyCheckDone
;
5534 masm
.branchPtr(Assembler::NotEqual
, proto
, ImmWord(1), &proxyCheckDone
);
5535 masm
.assumeUnreachable("Unexpected lazy proto in JSOp::SuperBase");
5536 masm
.bind(&proxyCheckDone
);
5539 Label nullProto
, done
;
5540 masm
.branchPtr(Assembler::Equal
, proto
, ImmWord(0), &nullProto
);
5542 // Box prototype and return
5543 masm
.tagValue(JSVAL_TYPE_OBJECT
, proto
, R1
);
5546 masm
.bind(&nullProto
);
5547 masm
.moveValue(NullValue(), R1
);
5554 template <typename Handler
>
5555 bool BaselineCodeGen
<Handler
>::emit_SuperFun() {
5556 frame
.popRegsAndSync(1);
5558 Register callee
= R0
.scratchReg();
5559 Register proto
= R1
.scratchReg();
5561 Register scratch
= R2
.scratchReg();
5565 masm
.unboxObject(R0
, callee
);
5568 Label classCheckDone
;
5569 masm
.branchTestObjIsFunction(Assembler::Equal
, callee
, scratch
, callee
,
5571 masm
.assumeUnreachable("Unexpected non-JSFunction callee in JSOp::SuperFun");
5572 masm
.bind(&classCheckDone
);
5575 // Load prototype of callee
5576 masm
.loadObjProto(callee
, proto
);
5579 // We won't encounter a lazy proto, because |callee| is guaranteed to be a
5580 // JSFunction and only proxy objects can have a lazy proto.
5581 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto
) == 1);
5583 Label proxyCheckDone
;
5584 masm
.branchPtr(Assembler::NotEqual
, proto
, ImmWord(1), &proxyCheckDone
);
5585 masm
.assumeUnreachable("Unexpected lazy proto in JSOp::SuperFun");
5586 masm
.bind(&proxyCheckDone
);
5589 Label nullProto
, done
;
5590 masm
.branchPtr(Assembler::Equal
, proto
, ImmWord(0), &nullProto
);
5592 // Box prototype and return
5593 masm
.tagValue(JSVAL_TYPE_OBJECT
, proto
, R1
);
5596 masm
.bind(&nullProto
);
5597 masm
.moveValue(NullValue(), R1
);
5604 template <typename Handler
>
5605 bool BaselineCodeGen
<Handler
>::emit_Arguments() {
5608 MOZ_ASSERT_IF(handler
.maybeScript(), handler
.maybeScript()->needsArgsObj());
5612 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
5613 pushArg(R0
.scratchReg());
5615 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, MutableHandleValue
);
5616 if (!callVM
<Fn
, jit::NewArgumentsObject
>()) {
5624 template <typename Handler
>
5625 bool BaselineCodeGen
<Handler
>::emit_Rest() {
5628 if (!emitNextIC()) {
5632 // Mark R0 as pushed stack value.
5637 template <typename Handler
>
5638 bool BaselineCodeGen
<Handler
>::emit_Generator() {
5639 frame
.assertStackDepth(0);
5641 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
5644 pushArg(R0
.scratchReg());
5646 using Fn
= JSObject
* (*)(JSContext
*, BaselineFrame
*);
5647 if (!callVM
<Fn
, jit::CreateGeneratorFromFrame
>()) {
5651 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
5656 template <typename Handler
>
5657 bool BaselineCodeGen
<Handler
>::emitSuspend(JSOp op
) {
5658 MOZ_ASSERT(op
== JSOp::InitialYield
|| op
== JSOp::Yield
||
5661 // Load the generator object in R2, but leave the return value on the
5662 // expression stack.
5663 Register genObj
= R2
.scratchReg();
5664 if (op
== JSOp::InitialYield
) {
5665 // Generator and return value are one and the same.
5667 frame
.assertStackDepth(1);
5668 masm
.unboxObject(frame
.addressOfStackValue(-1), genObj
);
5670 frame
.popRegsAndSync(1);
5671 masm
.unboxObject(R0
, genObj
);
5674 if (frame
.hasKnownStackDepth(1) && !handler
.canHaveFixedSlots()) {
5675 // If the expression stack is empty, we can inline the Yield. Note that this
5676 // branch is never taken for the interpreter because it doesn't know static
5678 MOZ_ASSERT_IF(op
== JSOp::InitialYield
&& handler
.maybePC(),
5679 GET_RESUMEINDEX(handler
.maybePC()) == 0);
5680 Address
resumeIndexSlot(genObj
,
5681 AbstractGeneratorObject::offsetOfResumeIndexSlot());
5682 Register temp
= R1
.scratchReg();
5683 if (op
== JSOp::InitialYield
) {
5684 masm
.storeValue(Int32Value(0), resumeIndexSlot
);
5686 jsbytecode
* pc
= handler
.maybePC();
5687 MOZ_ASSERT(pc
, "compiler-only code never has a null pc");
5688 masm
.move32(Imm32(GET_RESUMEINDEX(pc
)), temp
);
5689 masm
.storeValue(JSVAL_TYPE_INT32
, temp
, resumeIndexSlot
);
5692 Register envObj
= R0
.scratchReg();
5693 Address
envChainSlot(
5694 genObj
, AbstractGeneratorObject::offsetOfEnvironmentChainSlot());
5695 masm
.loadPtr(frame
.addressOfEnvironmentChain(), envObj
);
5696 masm
.guardedCallPreBarrierAnyZone(envChainSlot
, MIRType::Value
, temp
);
5697 masm
.storeValue(JSVAL_TYPE_OBJECT
, envObj
, envChainSlot
);
5700 masm
.branchPtrInNurseryChunk(Assembler::Equal
, genObj
, temp
, &skipBarrier
);
5701 masm
.branchPtrInNurseryChunk(Assembler::NotEqual
, envObj
, temp
,
5703 MOZ_ASSERT(genObj
== R2
.scratchReg());
5704 masm
.call(&postBarrierSlot_
);
5705 masm
.bind(&skipBarrier
);
5707 masm
.loadBaselineFramePtr(FramePointer
, R1
.scratchReg());
5708 computeFrameSize(R0
.scratchReg());
5711 pushBytecodePCArg();
5712 pushArg(R0
.scratchReg());
5713 pushArg(R1
.scratchReg());
5716 using Fn
= bool (*)(JSContext
*, HandleObject
, BaselineFrame
*, uint32_t,
5718 if (!callVM
<Fn
, jit::NormalSuspend
>()) {
5723 masm
.loadValue(frame
.addressOfStackValue(-1), JSReturnOperand
);
5724 if (!emitReturn()) {
5728 // Three values are pushed onto the stack when resuming the generator,
5729 // replacing the one slot that holds the return value.
5730 frame
.incStackDepth(2);
5734 template <typename Handler
>
5735 bool BaselineCodeGen
<Handler
>::emit_InitialYield() {
5736 return emitSuspend(JSOp::InitialYield
);
5739 template <typename Handler
>
5740 bool BaselineCodeGen
<Handler
>::emit_Yield() {
5741 return emitSuspend(JSOp::Yield
);
5744 template <typename Handler
>
5745 bool BaselineCodeGen
<Handler
>::emit_Await() {
5746 return emitSuspend(JSOp::Await
);
5750 template <typename F
>
5751 bool BaselineCompilerCodeGen::emitAfterYieldDebugInstrumentation(
5752 const F
& ifDebuggee
, Register
) {
5753 if (handler
.compileDebugInstrumentation()) {
5754 return ifDebuggee();
5760 template <typename F
>
5761 bool BaselineInterpreterCodeGen::emitAfterYieldDebugInstrumentation(
5762 const F
& ifDebuggee
, Register scratch
) {
5763 // Note that we can't use emitDebugInstrumentation here because the frame's
5764 // DEBUGGEE flag hasn't been initialized yet.
5766 // If the current Realm is not a debuggee we're done.
5768 CodeOffset toggleOffset
= masm
.toggledJump(&done
);
5769 if (!handler
.addDebugInstrumentationOffset(cx
, toggleOffset
)) {
5772 masm
.loadPtr(AbsoluteAddress(cx
->addressOfRealm()), scratch
);
5773 masm
.branchTest32(Assembler::Zero
,
5774 Address(scratch
, Realm::offsetOfDebugModeBits()),
5775 Imm32(Realm::debugModeIsDebuggeeBit()), &done
);
5777 if (!ifDebuggee()) {
5785 template <typename Handler
>
5786 bool BaselineCodeGen
<Handler
>::emit_AfterYield() {
5787 if (!emit_JumpTarget()) {
5791 auto ifDebuggee
= [this]() {
5792 frame
.assertSyncedStack();
5793 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
5795 pushArg(R0
.scratchReg());
5797 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::DebugAfterYield
;
5799 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
5800 if (!callVM
<Fn
, jit::DebugAfterYield
>(kind
)) {
5806 return emitAfterYieldDebugInstrumentation(ifDebuggee
, R0
.scratchReg());
5809 template <typename Handler
>
5810 bool BaselineCodeGen
<Handler
>::emit_FinalYieldRval() {
5811 // Store generator in R0.
5812 frame
.popRegsAndSync(1);
5813 masm
.unboxObject(R0
, R0
.scratchReg());
5816 pushBytecodePCArg();
5817 pushArg(R0
.scratchReg());
5819 using Fn
= bool (*)(JSContext
*, HandleObject
, const jsbytecode
*);
5820 if (!callVM
<Fn
, jit::FinalSuspend
>()) {
5824 masm
.loadValue(frame
.addressOfReturnValue(), JSReturnOperand
);
5825 return emitReturn();
5829 void BaselineCompilerCodeGen::emitJumpToInterpretOpLabel() {
5830 TrampolinePtr code
=
5831 cx
->runtime()->jitRuntime()->baselineInterpreter().interpretOpAddr();
5836 void BaselineInterpreterCodeGen::emitJumpToInterpretOpLabel() {
5837 masm
.jump(handler
.interpretOpLabel());
5840 template <typename Handler
>
5841 bool BaselineCodeGen
<Handler
>::emitEnterGeneratorCode(Register script
,
5842 Register resumeIndex
,
5844 // Resume in either the BaselineScript (if present) or Baseline Interpreter.
5846 static_assert(BaselineDisabledScript
== 0x1,
5847 "Comparison below requires specific sentinel encoding");
5849 // Initialize the icScript slot in the baseline frame.
5850 masm
.loadJitScript(script
, scratch
);
5851 masm
.computeEffectiveAddress(Address(scratch
, JitScript::offsetOfICScript()),
5853 Address
icScriptAddr(FramePointer
, BaselineFrame::reverseOffsetOfICScript());
5854 masm
.storePtr(scratch
, icScriptAddr
);
5856 Label noBaselineScript
;
5857 masm
.loadJitScript(script
, scratch
);
5858 masm
.loadPtr(Address(scratch
, JitScript::offsetOfBaselineScript()), scratch
);
5859 masm
.branchPtr(Assembler::BelowOrEqual
, scratch
,
5860 ImmPtr(BaselineDisabledScriptPtr
), &noBaselineScript
);
5862 masm
.load32(Address(scratch
, BaselineScript::offsetOfResumeEntriesOffset()),
5864 masm
.addPtr(scratch
, script
);
5866 BaseIndex(script
, resumeIndex
, ScaleFromElemWidth(sizeof(uintptr_t))),
5870 masm
.bind(&noBaselineScript
);
5872 // Initialize interpreter frame fields.
5873 Address
flagsAddr(FramePointer
, BaselineFrame::reverseOffsetOfFlags());
5874 Address
scriptAddr(FramePointer
,
5875 BaselineFrame::reverseOffsetOfInterpreterScript());
5876 masm
.or32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER
), flagsAddr
);
5877 masm
.storePtr(script
, scriptAddr
);
5879 // Initialize pc and jump to it.
5880 emitInterpJumpToResumeEntry(script
, resumeIndex
, scratch
);
5884 template <typename Handler
>
5885 bool BaselineCodeGen
<Handler
>::emit_Resume() {
5887 masm
.assertStackAlignment(sizeof(Value
), 0);
5889 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
5890 MOZ_ASSERT(!regs
.has(FramePointer
));
5891 if (HasInterpreterPCReg()) {
5892 regs
.take(InterpreterPCReg
);
5895 saveInterpreterPCReg();
5897 // Load generator object.
5898 Register genObj
= regs
.takeAny();
5899 masm
.unboxObject(frame
.addressOfStackValue(-3), genObj
);
5902 Register callee
= regs
.takeAny();
5904 Address(genObj
, AbstractGeneratorObject::offsetOfCalleeSlot()), callee
);
5906 // Save a pointer to the JSOp::Resume operand stack Values.
5907 Register callerStackPtr
= regs
.takeAny();
5908 masm
.computeEffectiveAddress(frame
.addressOfStackValue(-1), callerStackPtr
);
5910 // Branch to |interpret| to resume the generator in the C++ interpreter if the
5911 // script does not have a JitScript.
5913 Register scratch1
= regs
.takeAny();
5914 masm
.loadPrivate(Address(callee
, JSFunction::offsetOfJitInfoOrScript()),
5916 masm
.branchIfScriptHasNoJitScript(scratch1
, &interpret
);
5918 // Push |undefined| for all formals.
5919 Register scratch2
= regs
.takeAny();
5920 Label loop
, loopDone
;
5921 masm
.loadFunctionArgCount(callee
, scratch2
);
5923 static_assert(sizeof(Value
) == 8);
5924 #ifndef JS_CODEGEN_NONE
5925 static_assert(JitStackAlignment
== 16 || JitStackAlignment
== 8);
5927 // If JitStackValueAlignment == 1, then we were already correctly aligned on
5928 // entry, as guaranteed by the assertStackAlignment at the entry to this
5930 if (JitStackValueAlignment
> 1) {
5931 Register alignment
= regs
.takeAny();
5932 masm
.moveStackPtrTo(alignment
);
5933 masm
.alignJitStackBasedOnNArgs(scratch2
, false);
5935 // Compute alignment adjustment.
5936 masm
.subStackPtrFrom(alignment
);
5938 // Some code, like BaselineFrame::trace, will inspect the whole range of
5939 // the stack frame. In order to ensure that garbage data left behind from
5940 // previous activations doesn't confuse other machinery, we zero out the
5942 Label alignmentZero
;
5943 masm
.branchPtr(Assembler::Equal
, alignment
, ImmWord(0), &alignmentZero
);
5945 // Since we know prior to the stack alignment that the stack was 8 byte
5946 // aligned, and JitStackAlignment is 8 or 16 bytes, if we are doing an
5947 // alignment then we -must- have aligned by subtracting 8 bytes from
5948 // the stack pointer.
5950 // So we can freely store a valid double here.
5951 masm
.storeValue(DoubleValue(0), Address(masm
.getStackPointer(), 0));
5952 masm
.bind(&alignmentZero
);
5955 masm
.branchTest32(Assembler::Zero
, scratch2
, scratch2
, &loopDone
);
5958 masm
.pushValue(UndefinedValue());
5959 masm
.branchSub32(Assembler::NonZero
, Imm32(1), scratch2
, &loop
);
5961 masm
.bind(&loopDone
);
5963 // Push |undefined| for |this|.
5964 masm
.pushValue(UndefinedValue());
5967 // Update BaselineFrame debugFrameSize field.
5968 masm
.mov(FramePointer
, scratch2
);
5969 masm
.subStackPtrFrom(scratch2
);
5970 masm
.store32(scratch2
, frame
.addressOfDebugFrameSize());
5973 masm
.PushCalleeToken(callee
, /* constructing = */ false);
5974 masm
.pushFrameDescriptorForJitCall(FrameType::BaselineJS
, /* argc = */ 0);
5976 // PushCalleeToken bumped framePushed. Reset it.
5977 MOZ_ASSERT(masm
.framePushed() == sizeof(uintptr_t));
5978 masm
.setFramePushed(0);
5982 // Push a fake return address on the stack. We will resume here when the
5983 // generator returns.
5984 Label genStart
, returnTarget
;
5985 #ifdef JS_USE_LINK_REGISTER
5986 masm
.call(&genStart
);
5988 masm
.callAndPushReturnAddress(&genStart
);
5991 // Record the return address so the return offset -> pc mapping works.
5992 if (!handler
.recordCallRetAddr(cx
, RetAddrEntry::Kind::IC
,
5993 masm
.currentOffset())) {
5997 masm
.jump(&returnTarget
);
5998 masm
.bind(&genStart
);
5999 #ifdef JS_USE_LINK_REGISTER
6000 masm
.pushReturnAddress();
6003 // Construct BaselineFrame.
6004 masm
.push(FramePointer
);
6005 masm
.moveStackPtrTo(FramePointer
);
6007 // If profiler instrumentation is on, update lastProfilingFrame on
6008 // current JitActivation
6010 Register scratchReg
= scratch2
;
6012 AbsoluteAddress
addressOfEnabled(
6013 cx
->runtime()->geckoProfiler().addressOfEnabled());
6014 masm
.branch32(Assembler::Equal
, addressOfEnabled
, Imm32(0), &skip
);
6015 masm
.loadJSContext(scratchReg
);
6016 masm
.loadPtr(Address(scratchReg
, JSContext::offsetOfProfilingActivation()),
6020 Address(scratchReg
, JitActivation::offsetOfLastProfilingFrame()));
6024 masm
.subFromStackPtr(Imm32(BaselineFrame::Size()));
6025 masm
.assertStackAlignment(sizeof(Value
), 0);
6027 // Store flags and env chain.
6028 masm
.store32(Imm32(BaselineFrame::HAS_INITIAL_ENV
), frame
.addressOfFlags());
6030 Address(genObj
, AbstractGeneratorObject::offsetOfEnvironmentChainSlot()),
6032 masm
.storePtr(scratch2
, frame
.addressOfEnvironmentChain());
6034 // Store the arguments object if there is one.
6036 Address
argsObjSlot(genObj
, AbstractGeneratorObject::offsetOfArgsObjSlot());
6037 masm
.fallibleUnboxObject(argsObjSlot
, scratch2
, &noArgsObj
);
6039 masm
.storePtr(scratch2
, frame
.addressOfArgsObj());
6040 masm
.or32(Imm32(BaselineFrame::HAS_ARGS_OBJ
), frame
.addressOfFlags());
6042 masm
.bind(&noArgsObj
);
6044 // Push locals and expression slots if needed.
6045 Label noStackStorage
;
6046 Address
stackStorageSlot(genObj
,
6047 AbstractGeneratorObject::offsetOfStackStorageSlot());
6048 masm
.fallibleUnboxObject(stackStorageSlot
, scratch2
, &noStackStorage
);
6050 Register initLength
= regs
.takeAny();
6051 masm
.loadPtr(Address(scratch2
, NativeObject::offsetOfElements()), scratch2
);
6052 masm
.load32(Address(scratch2
, ObjectElements::offsetOfInitializedLength()),
6056 Address(scratch2
, ObjectElements::offsetOfInitializedLength()));
6058 Label loop
, loopDone
;
6059 masm
.branchTest32(Assembler::Zero
, initLength
, initLength
, &loopDone
);
6062 masm
.pushValue(Address(scratch2
, 0));
6063 masm
.guardedCallPreBarrierAnyZone(Address(scratch2
, 0), MIRType::Value
,
6065 masm
.addPtr(Imm32(sizeof(Value
)), scratch2
);
6066 masm
.branchSub32(Assembler::NonZero
, Imm32(1), initLength
, &loop
);
6068 masm
.bind(&loopDone
);
6069 regs
.add(initLength
);
6072 masm
.bind(&noStackStorage
);
6074 // Push arg, generator, resumeKind stack Values, in that order.
6075 masm
.pushValue(Address(callerStackPtr
, sizeof(Value
)));
6076 masm
.pushValue(JSVAL_TYPE_OBJECT
, genObj
);
6077 masm
.pushValue(Address(callerStackPtr
, 0));
6079 masm
.switchToObjectRealm(genObj
, scratch2
);
6081 // Load script in scratch1.
6083 Address(genObj
, AbstractGeneratorObject::offsetOfCalleeSlot()), scratch1
);
6084 masm
.loadPrivate(Address(scratch1
, JSFunction::offsetOfJitInfoOrScript()),
6087 // Load resume index in scratch2 and mark generator as running.
6088 Address
resumeIndexSlot(genObj
,
6089 AbstractGeneratorObject::offsetOfResumeIndexSlot());
6090 masm
.unboxInt32(resumeIndexSlot
, scratch2
);
6091 masm
.storeValue(Int32Value(AbstractGeneratorObject::RESUME_INDEX_RUNNING
),
6094 if (!emitEnterGeneratorCode(scratch1
, scratch2
, regs
.getAny())) {
6098 // Call into the VM to resume the generator in the C++ interpreter if there's
6100 masm
.bind(&interpret
);
6104 pushArg(callerStackPtr
);
6107 using Fn
= bool (*)(JSContext
*, HandleObject
, Value
*, MutableHandleValue
);
6108 if (!callVM
<Fn
, jit::InterpretResume
>()) {
6112 masm
.bind(&returnTarget
);
6114 // Restore Stack pointer
6115 masm
.computeEffectiveAddress(frame
.addressOfStackValue(-1),
6116 masm
.getStackPointer());
6118 // After the generator returns, we restore the stack pointer, switch back to
6119 // the current realm, push the return value, and we're done.
6120 if (JSScript
* script
= handler
.maybeScript()) {
6121 masm
.switchToRealm(script
->realm(), R2
.scratchReg());
6123 masm
.switchToBaselineFrameRealm(R2
.scratchReg());
6125 restoreInterpreterPCReg();
6131 template <typename Handler
>
6132 bool BaselineCodeGen
<Handler
>::emit_CheckResumeKind() {
6133 // Load resumeKind in R1, generator in R0.
6134 frame
.popRegsAndSync(2);
6138 masm
.branchTestInt32(Assembler::Equal
, R1
, &ok
);
6139 masm
.assumeUnreachable("Expected int32 resumeKind");
6143 // If resumeKind is 'next' we don't have to do anything.
6145 masm
.unboxInt32(R1
, R1
.scratchReg());
6146 masm
.branch32(Assembler::Equal
, R1
.scratchReg(),
6147 Imm32(int32_t(GeneratorResumeKind::Next
)), &done
);
6151 pushArg(R1
.scratchReg()); // resumeKind
6153 masm
.loadValue(frame
.addressOfStackValue(-1), R2
);
6156 masm
.unboxObject(R0
, R0
.scratchReg());
6157 pushArg(R0
.scratchReg()); // genObj
6159 masm
.loadBaselineFramePtr(FramePointer
, R2
.scratchReg());
6160 pushArg(R2
.scratchReg()); // frame
6162 using Fn
= bool (*)(JSContext
*, BaselineFrame
*,
6163 Handle
<AbstractGeneratorObject
*>, HandleValue
, int32_t);
6164 if (!callVM
<Fn
, jit::GeneratorThrowOrReturn
>()) {
6173 bool BaselineCompilerCodeGen::emit_ResumeKind() {
6174 GeneratorResumeKind resumeKind
= ResumeKindFromPC(handler
.pc());
6175 frame
.push(Int32Value(int32_t(resumeKind
)));
6180 bool BaselineInterpreterCodeGen::emit_ResumeKind() {
6181 LoadUint8Operand(masm
, R0
.scratchReg());
6182 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
6187 template <typename Handler
>
6188 bool BaselineCodeGen
<Handler
>::emit_DebugCheckSelfHosted() {
6192 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
6197 using Fn
= bool (*)(JSContext
*, HandleValue
);
6198 if (!callVM
<Fn
, js::Debug_CheckSelfHosted
>()) {
6205 template <typename Handler
>
6206 bool BaselineCodeGen
<Handler
>::emit_IsConstructing() {
6207 frame
.push(MagicValue(JS_IS_CONSTRUCTING
));
6212 bool BaselineCompilerCodeGen::emit_JumpTarget() {
6213 MaybeIncrementCodeCoverageCounter(masm
, handler
.script(), handler
.pc());
6218 bool BaselineInterpreterCodeGen::emit_JumpTarget() {
6219 Register scratch1
= R0
.scratchReg();
6220 Register scratch2
= R1
.scratchReg();
6223 CodeOffset toggleOffset
= masm
.toggledJump(&skipCoverage
);
6224 masm
.call(handler
.codeCoverageAtPCLabel());
6225 masm
.bind(&skipCoverage
);
6226 if (!handler
.codeCoverageOffsets().append(toggleOffset
.offset())) {
6230 // Load icIndex in scratch1.
6231 LoadInt32Operand(masm
, scratch1
);
6233 // Compute ICEntry* and store to frame->interpreterICEntry.
6234 masm
.loadPtr(frame
.addressOfICScript(), scratch2
);
6235 static_assert(sizeof(ICEntry
) == sizeof(uintptr_t));
6236 masm
.computeEffectiveAddress(BaseIndex(scratch2
, scratch1
, ScalePointer
,
6237 ICScript::offsetOfICEntries()),
6239 masm
.storePtr(scratch2
, frame
.addressOfInterpreterICEntry());
6243 template <typename Handler
>
6244 bool BaselineCodeGen
<Handler
>::emit_CheckClassHeritage() {
6247 // Leave the heritage value on the stack.
6248 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
6253 using Fn
= bool (*)(JSContext
*, HandleValue
);
6254 return callVM
<Fn
, js::CheckClassHeritageOperation
>();
6257 template <typename Handler
>
6258 bool BaselineCodeGen
<Handler
>::emit_InitHomeObject() {
6259 // Load HomeObject in R0.
6260 frame
.popRegsAndSync(1);
6262 // Load function off stack
6263 Register func
= R2
.scratchReg();
6264 masm
.unboxObject(frame
.addressOfStackValue(-1), func
);
6266 masm
.assertFunctionIsExtended(func
);
6268 // Set HOMEOBJECT_SLOT
6269 Register temp
= R1
.scratchReg();
6270 Address
addr(func
, FunctionExtended::offsetOfMethodHomeObjectSlot());
6271 masm
.guardedCallPreBarrierAnyZone(addr
, MIRType::Value
, temp
);
6272 masm
.storeValue(R0
, addr
);
6275 masm
.branchPtrInNurseryChunk(Assembler::Equal
, func
, temp
, &skipBarrier
);
6276 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, temp
, &skipBarrier
);
6277 masm
.call(&postBarrierSlot_
);
6278 masm
.bind(&skipBarrier
);
6284 bool BaselineCompilerCodeGen::emit_BuiltinObject() {
6285 // Built-in objects are constants for a given global.
6286 auto kind
= BuiltinObjectKind(GET_UINT8(handler
.pc()));
6287 JSObject
* builtin
= BuiltinObjectOperation(cx
, kind
);
6291 frame
.push(ObjectValue(*builtin
));
6296 bool BaselineInterpreterCodeGen::emit_BuiltinObject() {
6299 pushUint8BytecodeOperandArg(R0
.scratchReg());
6301 using Fn
= JSObject
* (*)(JSContext
*, BuiltinObjectKind
);
6302 if (!callVM
<Fn
, BuiltinObjectOperation
>()) {
6306 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6311 template <typename Handler
>
6312 bool BaselineCodeGen
<Handler
>::emit_ObjWithProto() {
6315 // Leave the proto value on the stack for the decompiler
6316 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
6321 using Fn
= PlainObject
* (*)(JSContext
*, HandleValue
);
6322 if (!callVM
<Fn
, js::ObjectWithProtoOperation
>()) {
6326 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6332 template <typename Handler
>
6333 bool BaselineCodeGen
<Handler
>::emit_FunWithProto() {
6334 frame
.popRegsAndSync(1);
6336 masm
.unboxObject(R0
, R0
.scratchReg());
6337 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R1
.scratchReg());
6340 pushArg(R0
.scratchReg());
6341 pushArg(R1
.scratchReg());
6342 pushScriptGCThingArg(ScriptGCThingType::Function
, R0
.scratchReg(),
6346 JSObject
* (*)(JSContext
*, HandleFunction
, HandleObject
, HandleObject
);
6347 if (!callVM
<Fn
, js::FunWithProtoOperation
>()) {
6351 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6357 bool BaselineCompilerCodeGen::emit_ImportMeta() {
6358 // Note: this is like the interpreter implementation, but optimized a bit by
6359 // calling GetModuleObjectForScript at compile-time.
6361 Rooted
<ModuleObject
*> module(cx
, GetModuleObjectForScript(handler
.script()));
6366 pushArg(ImmGCPtr(module
));
6368 using Fn
= JSObject
* (*)(JSContext
*, HandleObject
);
6369 if (!callVM
<Fn
, js::GetOrCreateModuleMetaObject
>()) {
6373 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6379 bool BaselineInterpreterCodeGen::emit_ImportMeta() {
6384 using Fn
= JSObject
* (*)(JSContext
*, HandleScript
);
6385 if (!callVM
<Fn
, ImportMetaOperation
>()) {
6389 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6394 template <typename Handler
>
6395 bool BaselineCodeGen
<Handler
>::emit_DynamicImport() {
6396 // Put specifier into R0 and object value into R1
6397 frame
.popRegsAndSync(2);
6404 using Fn
= JSObject
* (*)(JSContext
*, HandleScript
, HandleValue
, HandleValue
);
6405 if (!callVM
<Fn
, js::StartDynamicModuleImport
>()) {
6409 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6415 bool BaselineCompilerCodeGen::emit_ForceInterpreter() {
6416 // Caller is responsible for checking script->hasForceInterpreterOp().
6417 MOZ_CRASH("JSOp::ForceInterpreter in baseline");
6421 bool BaselineInterpreterCodeGen::emit_ForceInterpreter() {
6422 masm
.assumeUnreachable("JSOp::ForceInterpreter");
6426 template <typename Handler
>
6427 bool BaselineCodeGen
<Handler
>::emitPrologue() {
6428 AutoCreatedBy
acb(masm
, "BaselineCodeGen<Handler>::emitPrologue");
6430 #ifdef JS_USE_LINK_REGISTER
6431 // Push link register from generateEnterJIT()'s BLR.
6432 masm
.pushReturnAddress();
6435 masm
.push(FramePointer
);
6436 masm
.moveStackPtrTo(FramePointer
);
6438 masm
.checkStackAlignment();
6440 emitProfilerEnterFrame();
6442 masm
.subFromStackPtr(Imm32(BaselineFrame::Size()));
6444 // Initialize BaselineFrame. Also handles env chain pre-initialization (in
6445 // case GC gets run during stack check). For global and eval scripts, the env
6446 // chain is in R1. For function scripts, the env chain is in the callee.
6447 emitInitFrameFields(R1
.scratchReg());
6449 // When compiling with Debugger instrumentation, set the debuggeeness of
6450 // the frame before any operation that can call into the VM.
6451 if (!emitIsDebuggeeCheck()) {
6455 // Initialize the env chain before any operation that may call into the VM and
6457 if (!initEnvironmentChain()) {
6461 // Check for overrecursion before initializing locals.
6462 if (!emitStackCheck()) {
6466 emitInitializeLocals();
6468 // Ion prologue bailouts will enter here in the Baseline Interpreter.
6469 masm
.bind(&bailoutPrologue_
);
6471 frame
.assertSyncedStack();
6473 if (JSScript
* script
= handler
.maybeScript()) {
6474 masm
.debugAssertContextRealm(script
->realm(), R1
.scratchReg());
6477 if (!emitDebugPrologue()) {
6481 if (!emitHandleCodeCoverageAtPrologue()) {
6485 if (!emitWarmUpCounterIncrement()) {
6489 warmUpCheckPrologueOffset_
= CodeOffset(masm
.currentOffset());
6494 template <typename Handler
>
6495 bool BaselineCodeGen
<Handler
>::emitEpilogue() {
6496 AutoCreatedBy
acb(masm
, "BaselineCodeGen<Handler>::emitEpilogue");
6498 masm
.bind(&return_
);
6500 if (!handler
.shouldEmitDebugEpilogueAtReturnOp()) {
6501 if (!emitDebugEpilogue()) {
6506 emitProfilerExitFrame();
6508 masm
.moveToStackPtr(FramePointer
);
6509 masm
.pop(FramePointer
);
6515 MethodStatus
BaselineCompiler::emitBody() {
6516 AutoCreatedBy
acb(masm
, "BaselineCompiler::emitBody");
6518 JSScript
* script
= handler
.script();
6519 MOZ_ASSERT(handler
.pc() == script
->code());
6521 mozilla::DebugOnly
<jsbytecode
*> prevpc
= handler
.pc();
6524 JSOp op
= JSOp(*handler
.pc());
6525 JitSpew(JitSpew_BaselineOp
, "Compiling op @ %d: %s",
6526 int(script
->pcToOffset(handler
.pc())), CodeName(op
));
6528 BytecodeInfo
* info
= handler
.analysis().maybeInfo(handler
.pc());
6530 // Skip unreachable ops.
6532 // Test if last instructions and stop emitting in that case.
6533 handler
.moveToNextPC();
6534 if (handler
.pc() >= script
->codeEnd()) {
6538 prevpc
= handler
.pc();
6542 if (info
->jumpTarget
) {
6543 // Fully sync the stack if there are incoming jumps.
6545 frame
.setStackDepth(info
->stackDepth
);
6546 masm
.bind(handler
.labelOf(handler
.pc()));
6547 } else if (MOZ_UNLIKELY(compileDebugInstrumentation())) {
6548 // Also fully sync the stack if the debugger is enabled.
6551 // At the beginning of any op, at most the top 2 stack-values are
6553 if (frame
.stackDepth() > 2) {
6558 frame
.assertValidState(*info
);
6560 // If the script has a resume offset for this pc we need to keep track of
6561 // the native code offset.
6562 if (info
->hasResumeOffset
) {
6563 frame
.assertSyncedStack();
6564 uint32_t pcOffset
= script
->pcToOffset(handler
.pc());
6565 uint32_t nativeOffset
= masm
.currentOffset();
6566 if (!resumeOffsetEntries_
.emplaceBack(pcOffset
, nativeOffset
)) {
6567 ReportOutOfMemory(cx
);
6568 return Method_Error
;
6572 // Emit traps for breakpoints and step mode.
6573 if (MOZ_UNLIKELY(compileDebugInstrumentation()) && !emitDebugTrap()) {
6574 return Method_Error
;
6577 perfSpewer_
.recordInstruction(cx
, masm
, handler
.pc(), frame
);
6579 #define EMIT_OP(OP, ...) \
6581 AutoCreatedBy acb(masm, "op=" #OP); \
6582 if (MOZ_UNLIKELY(!this->emit_##OP())) return Method_Error; \
6586 FOR_EACH_OPCODE(EMIT_OP
)
6588 MOZ_CRASH("Unexpected op");
6593 MOZ_ASSERT(masm
.framePushed() == 0);
6595 // Test if last instructions and stop emitting in that case.
6596 handler
.moveToNextPC();
6597 if (handler
.pc() >= script
->codeEnd()) {
6602 prevpc
= handler
.pc();
6606 MOZ_ASSERT(JSOp(*prevpc
) == JSOp::RetRval
|| JSOp(*prevpc
) == JSOp::Return
);
6607 return Method_Compiled
;
6610 bool BaselineInterpreterGenerator::emitDebugTrap() {
6611 CodeOffset offset
= masm
.nopPatchableToCall();
6612 if (!debugTrapOffsets_
.append(offset
.offset())) {
6613 ReportOutOfMemory(cx
);
6620 // Register holding the bytecode pc during dispatch. This exists so the debug
6621 // trap handler can reload the pc into this register when it's done.
6622 static constexpr Register InterpreterPCRegAtDispatch
=
6623 HasInterpreterPCReg() ? InterpreterPCReg
: R0
.scratchReg();
6625 bool BaselineInterpreterGenerator::emitInterpreterLoop() {
6626 AutoCreatedBy
acb(masm
, "BaselineInterpreterGenerator::emitInterpreterLoop");
6628 Register scratch1
= R0
.scratchReg();
6629 Register scratch2
= R1
.scratchReg();
6631 // Entry point for interpreting a bytecode op. No registers are live except
6632 // for InterpreterPCReg.
6633 masm
.bind(handler
.interpretOpWithPCRegLabel());
6635 // Emit a patchable call for debugger breakpoints/stepping.
6636 if (!emitDebugTrap()) {
6639 Label interpretOpAfterDebugTrap
;
6640 masm
.bind(&interpretOpAfterDebugTrap
);
6642 // Load pc, bytecode op.
6643 Register pcReg
= LoadBytecodePC(masm
, scratch1
);
6644 masm
.load8ZeroExtend(Address(pcReg
, 0), scratch1
);
6646 // Jump to table[op].
6648 CodeOffset label
= masm
.moveNearAddressWithPatch(scratch2
);
6649 if (!tableLabels_
.append(label
)) {
6652 BaseIndex
pointer(scratch2
, scratch1
, ScalePointer
);
6653 masm
.branchToComputedAddress(pointer
);
6656 // At the end of each op, emit code to bump the pc and jump to the
6657 // next op (this is also known as a threaded interpreter).
6658 auto opEpilogue
= [&](JSOp op
, size_t opLength
) -> bool {
6659 MOZ_ASSERT(masm
.framePushed() == 0);
6661 if (!BytecodeFallsThrough(op
)) {
6663 masm
.assumeUnreachable("unexpected fall through");
6667 // Bump frame->interpreterICEntry if needed.
6668 if (BytecodeOpHasIC(op
)) {
6669 frame
.bumpInterpreterICEntry();
6672 // Bump bytecode PC.
6673 if (HasInterpreterPCReg()) {
6674 MOZ_ASSERT(InterpreterPCRegAtDispatch
== InterpreterPCReg
);
6675 masm
.addPtr(Imm32(opLength
), InterpreterPCReg
);
6677 MOZ_ASSERT(InterpreterPCRegAtDispatch
== scratch1
);
6678 masm
.loadPtr(frame
.addressOfInterpreterPC(), InterpreterPCRegAtDispatch
);
6679 masm
.addPtr(Imm32(opLength
), InterpreterPCRegAtDispatch
);
6680 masm
.storePtr(InterpreterPCRegAtDispatch
, frame
.addressOfInterpreterPC());
6683 if (!emitDebugTrap()) {
6687 // Load the opcode, jump to table[op].
6688 masm
.load8ZeroExtend(Address(InterpreterPCRegAtDispatch
, 0), scratch1
);
6689 CodeOffset label
= masm
.moveNearAddressWithPatch(scratch2
);
6690 if (!tableLabels_
.append(label
)) {
6693 BaseIndex
pointer(scratch2
, scratch1
, ScalePointer
);
6694 masm
.branchToComputedAddress(pointer
);
6698 // Emit code for each bytecode op.
6699 Label opLabels
[JSOP_LIMIT
];
6700 #define EMIT_OP(OP, ...) \
6702 AutoCreatedBy acb(masm, "op=" #OP); \
6703 perfSpewer_.recordOffset(masm, JSOp::OP); \
6704 masm.bind(&opLabels[uint8_t(JSOp::OP)]); \
6705 handler.setCurrentOp(JSOp::OP); \
6706 if (!this->emit_##OP()) { \
6709 if (!opEpilogue(JSOp::OP, JSOpLength_##OP)) { \
6712 handler.resetCurrentOp(); \
6714 FOR_EACH_OPCODE(EMIT_OP
)
6717 // External entry point to start interpreting bytecode ops. This is used for
6718 // things like exception handling and OSR. DebugModeOSR patches JIT frames to
6719 // return here from the DebugTrapHandler.
6720 masm
.bind(handler
.interpretOpLabel());
6721 interpretOpOffset_
= masm
.currentOffset();
6722 restoreInterpreterPCReg();
6723 masm
.jump(handler
.interpretOpWithPCRegLabel());
6725 // Second external entry point: this skips the debug trap for the first op
6726 // and is used by OSR.
6727 interpretOpNoDebugTrapOffset_
= masm
.currentOffset();
6728 restoreInterpreterPCReg();
6729 masm
.jump(&interpretOpAfterDebugTrap
);
6731 // External entry point for Ion prologue bailouts.
6732 bailoutPrologueOffset_
= CodeOffset(masm
.currentOffset());
6733 restoreInterpreterPCReg();
6734 masm
.jump(&bailoutPrologue_
);
6736 // Emit debug trap handler code (target of patchable call instructions). This
6737 // is just a tail call to the debug trap handler trampoline code.
6739 JitRuntime
* jrt
= cx
->runtime()->jitRuntime();
6740 JitCode
* handlerCode
=
6741 jrt
->debugTrapHandler(cx
, DebugTrapHandlerKind::Interpreter
);
6746 debugTrapHandlerOffset_
= masm
.currentOffset();
6747 masm
.jump(handlerCode
);
6751 masm
.haltingAlign(sizeof(void*));
6753 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64)
6754 size_t numInstructions
= JSOP_LIMIT
* (sizeof(uintptr_t) / sizeof(uint32_t));
6755 AutoForbidPoolsAndNops
afp(&masm
, numInstructions
);
6758 tableOffset_
= masm
.currentOffset();
6760 for (size_t i
= 0; i
< JSOP_LIMIT
; i
++) {
6761 const Label
& opLabel
= opLabels
[i
];
6762 MOZ_ASSERT(opLabel
.bound());
6764 masm
.writeCodePointer(&cl
);
6765 cl
.target()->bind(opLabel
.offset());
6766 masm
.addCodeLabel(cl
);
6772 void BaselineInterpreterGenerator::emitOutOfLineCodeCoverageInstrumentation() {
6773 AutoCreatedBy
acb(masm
,
6774 "BaselineInterpreterGenerator::"
6775 "emitOutOfLineCodeCoverageInstrumentation");
6777 masm
.bind(handler
.codeCoverageAtPrologueLabel());
6778 #ifdef JS_USE_LINK_REGISTER
6779 masm
.pushReturnAddress();
6782 saveInterpreterPCReg();
6784 using Fn1
= void (*)(BaselineFrame
* frame
);
6785 masm
.setupUnalignedABICall(R0
.scratchReg());
6786 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
6787 masm
.passABIArg(R0
.scratchReg());
6788 masm
.callWithABI
<Fn1
, HandleCodeCoverageAtPrologue
>();
6790 restoreInterpreterPCReg();
6793 masm
.bind(handler
.codeCoverageAtPCLabel());
6794 #ifdef JS_USE_LINK_REGISTER
6795 masm
.pushReturnAddress();
6798 saveInterpreterPCReg();
6800 using Fn2
= void (*)(BaselineFrame
* frame
, jsbytecode
* pc
);
6801 masm
.setupUnalignedABICall(R0
.scratchReg());
6802 masm
.loadBaselineFramePtr(FramePointer
, R0
.scratchReg());
6803 masm
.passABIArg(R0
.scratchReg());
6804 Register pcReg
= LoadBytecodePC(masm
, R2
.scratchReg());
6805 masm
.passABIArg(pcReg
);
6806 masm
.callWithABI
<Fn2
, HandleCodeCoverageAtPC
>();
6808 restoreInterpreterPCReg();
6812 bool BaselineInterpreterGenerator::generate(BaselineInterpreter
& interpreter
) {
6813 AutoCreatedBy
acb(masm
, "BaselineInterpreterGenerator::generate");
6815 perfSpewer_
.recordOffset(masm
, "Prologue");
6816 if (!emitPrologue()) {
6820 perfSpewer_
.recordOffset(masm
, "InterpreterLoop");
6821 if (!emitInterpreterLoop()) {
6825 perfSpewer_
.recordOffset(masm
, "Epilogue");
6826 if (!emitEpilogue()) {
6830 perfSpewer_
.recordOffset(masm
, "OOLPostBarrierSlot");
6831 if (!emitOutOfLinePostBarrierSlot()) {
6835 perfSpewer_
.recordOffset(masm
, "OOLCodeCoverageInstrumentation");
6836 emitOutOfLineCodeCoverageInstrumentation();
6839 AutoCreatedBy
acb(masm
, "everything_else");
6840 Linker
linker(masm
);
6842 ReportOutOfMemory(cx
);
6846 JitCode
* code
= linker
.newCode(cx
, CodeKind::Other
);
6851 // Register BaselineInterpreter code with the profiler's JitCode table.
6853 auto entry
= MakeJitcodeGlobalEntry
<BaselineInterpreterEntry
>(
6854 cx
, code
, code
->raw(), code
->rawEnd());
6859 JitcodeGlobalTable
* globalTable
=
6860 cx
->runtime()->jitRuntime()->getJitcodeGlobalTable();
6861 if (!globalTable
->addEntry(std::move(entry
))) {
6862 ReportOutOfMemory(cx
);
6866 code
->setHasBytecodeMap();
6869 // Patch loads now that we know the tableswitch base address.
6870 CodeLocationLabel
tableLoc(code
, CodeOffset(tableOffset_
));
6871 for (CodeOffset off
: tableLabels_
) {
6872 MacroAssembler::patchNearAddressMove(CodeLocationLabel(code
, off
),
6876 perfSpewer_
.saveProfile(code
);
6879 vtune::MarkStub(code
, "BaselineInterpreter");
6883 code
, interpretOpOffset_
, interpretOpNoDebugTrapOffset_
,
6884 bailoutPrologueOffset_
.offset(),
6885 profilerEnterFrameToggleOffset_
.offset(),
6886 profilerExitFrameToggleOffset_
.offset(), debugTrapHandlerOffset_
,
6887 std::move(handler
.debugInstrumentationOffsets()),
6888 std::move(debugTrapOffsets_
), std::move(handler
.codeCoverageOffsets()),
6889 std::move(handler
.icReturnOffsets()), handler
.callVMOffsets());
6892 if (cx
->runtime()->geckoProfiler().enabled()) {
6893 interpreter
.toggleProfilerInstrumentation(true);
6896 if (coverage::IsLCovEnabled()) {
6897 interpreter
.toggleCodeCoverageInstrumentationUnchecked(true);
6903 JitCode
* JitRuntime::generateDebugTrapHandler(JSContext
* cx
,
6904 DebugTrapHandlerKind kind
) {
6905 TempAllocator
temp(&cx
->tempLifoAlloc());
6906 StackMacroAssembler
masm(cx
, temp
);
6907 AutoCreatedBy
acb(masm
, "JitRuntime::generateDebugTrapHandler");
6909 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
6910 MOZ_ASSERT(!regs
.has(FramePointer
));
6911 regs
.takeUnchecked(ICStubReg
);
6912 if (HasInterpreterPCReg()) {
6913 regs
.takeUnchecked(InterpreterPCReg
);
6915 #ifdef JS_CODEGEN_ARM
6916 regs
.takeUnchecked(BaselineSecondScratchReg
);
6917 AutoNonDefaultSecondScratchRegister
andssr(masm
, BaselineSecondScratchReg
);
6919 Register scratch1
= regs
.takeAny();
6920 Register scratch2
= regs
.takeAny();
6921 Register scratch3
= regs
.takeAny();
6923 if (kind
== DebugTrapHandlerKind::Interpreter
) {
6924 // The interpreter calls this for every script when debugging, so check if
6925 // the script has any breakpoints or is in step mode before calling into
6927 Label hasDebugScript
;
6928 Address
scriptAddr(FramePointer
,
6929 BaselineFrame::reverseOffsetOfInterpreterScript());
6930 masm
.loadPtr(scriptAddr
, scratch1
);
6931 masm
.branchTest32(Assembler::NonZero
,
6932 Address(scratch1
, JSScript::offsetOfMutableFlags()),
6933 Imm32(int32_t(JSScript::MutableFlags::HasDebugScript
)),
6936 masm
.bind(&hasDebugScript
);
6938 if (HasInterpreterPCReg()) {
6939 // Update frame's bytecode pc because the debugger depends on it.
6940 Address
pcAddr(FramePointer
,
6941 BaselineFrame::reverseOffsetOfInterpreterPC());
6942 masm
.storePtr(InterpreterPCReg
, pcAddr
);
6946 // Load the return address in scratch1.
6947 masm
.loadAbiReturnAddress(scratch1
);
6949 // Load BaselineFrame pointer in scratch2.
6950 masm
.loadBaselineFramePtr(FramePointer
, scratch2
);
6952 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
6953 // the stub frame has a nullptr ICStub pointer, since this pointer is marked
6955 masm
.movePtr(ImmPtr(nullptr), ICStubReg
);
6956 EmitBaselineEnterStubFrame(masm
, scratch3
);
6958 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, const uint8_t*);
6959 VMFunctionId id
= VMFunctionToId
<Fn
, jit::HandleDebugTrap
>::id
;
6960 TrampolinePtr code
= cx
->runtime()->jitRuntime()->getVMWrapper(id
);
6962 masm
.push(scratch1
);
6963 masm
.push(scratch2
);
6964 EmitBaselineCallVM(code
, masm
);
6966 EmitBaselineLeaveStubFrame(masm
);
6968 if (kind
== DebugTrapHandlerKind::Interpreter
) {
6969 // We have to reload the bytecode pc register.
6970 Address
pcAddr(FramePointer
, BaselineFrame::reverseOffsetOfInterpreterPC());
6971 masm
.loadPtr(pcAddr
, InterpreterPCRegAtDispatch
);
6975 Linker
linker(masm
);
6976 JitCode
* handlerCode
= linker
.newCode(cx
, CodeKind::Other
);
6981 CollectPerfSpewerJitCodeProfile(handlerCode
, "DebugTrapHandler");
6984 vtune::MarkStub(handlerCode
, "DebugTrapHandler");