1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/BaselineCodeGen.h"
9 #include "mozilla/Casting.h"
11 #include "jit/BaselineIC.h"
12 #include "jit/BaselineJIT.h"
13 #include "jit/FixedList.h"
14 #include "jit/IonAnalysis.h"
15 #include "jit/JitcodeMap.h"
16 #include "jit/JitSpewer.h"
17 #include "jit/Linker.h"
19 # include "jit/PerfSpewer.h"
21 #include "jit/SharedICHelpers.h"
22 #include "jit/TrialInlining.h"
23 #include "jit/VMFunctions.h"
24 #include "js/UniquePtr.h"
25 #include "vm/AsyncFunction.h"
26 #include "vm/AsyncIteration.h"
27 #include "vm/BuiltinObjectKind.h"
28 #include "vm/EnvironmentObject.h"
29 #include "vm/FunctionFlags.h" // js::FunctionFlags
30 #include "vm/Interpreter.h"
31 #include "vm/JSFunction.h"
32 #include "vm/TraceLogging.h"
34 # include "vtune/VTuneWrapper.h"
37 #include "debugger/DebugAPI-inl.h"
38 #include "jit/BaselineFrameInfo-inl.h"
39 #include "jit/MacroAssembler-inl.h"
40 #include "jit/SharedICHelpers-inl.h"
41 #include "jit/VMFunctionList-inl.h"
42 #include "vm/Interpreter-inl.h"
43 #include "vm/JSScript-inl.h"
44 #include "vm/NativeObject-inl.h"
45 #include "vm/TypeInference-inl.h"
48 using namespace js::jit
;
52 using mozilla::AssertedCast
;
61 BaselineCompilerHandler::BaselineCompilerHandler(JSContext
* cx
,
65 : frame_(script
, masm
),
67 analysis_(alloc
, script
),
74 compileDebugInstrumentation_(script
->isDebuggee()),
75 ionCompileable_(IsIonEnabled(cx
) && CanIonCompileScript(cx
, script
)) {
78 BaselineInterpreterHandler::BaselineInterpreterHandler(JSContext
* cx
,
82 template <typename Handler
>
83 template <typename
... HandlerArgs
>
84 BaselineCodeGen
<Handler
>::BaselineCodeGen(JSContext
* cx
, HandlerArgs
&&... args
)
85 : handler(cx
, masm
, std::forward
<HandlerArgs
>(args
)...),
87 frame(handler
.frame()),
88 traceLoggerToggleOffsets_(cx
) {}
90 BaselineCompiler::BaselineCompiler(JSContext
* cx
, TempAllocator
& alloc
,
92 : BaselineCodeGen(cx
, /* HandlerArgs = */ alloc
, script
),
93 profilerPushToggleOffset_(),
94 traceLoggerScriptTextIdOffset_() {
95 #ifdef JS_CODEGEN_NONE
100 BaselineInterpreterGenerator::BaselineInterpreterGenerator(JSContext
* cx
)
101 : BaselineCodeGen(cx
/* no handlerArgs */) {}
103 bool BaselineCompilerHandler::init(JSContext
* cx
) {
104 if (!analysis_
.init(alloc_
)) {
108 uint32_t len
= script_
->length();
110 if (!labels_
.init(alloc_
, len
)) {
114 for (size_t i
= 0; i
< len
; i
++) {
115 new (&labels_
[i
]) Label();
118 if (!frame_
.init(alloc_
)) {
125 bool BaselineCompiler::init() {
126 if (!handler
.init(cx
)) {
133 bool BaselineCompilerHandler::recordCallRetAddr(JSContext
* cx
,
134 RetAddrEntry::Kind kind
,
135 uint32_t retOffset
) {
136 uint32_t pcOffset
= script_
->pcToOffset(pc_
);
138 // Entries must be sorted by pcOffset for binary search to work.
139 // See BaselineScript::retAddrEntryFromPCOffset.
140 MOZ_ASSERT_IF(!retAddrEntries_
.empty(),
141 retAddrEntries_
.back().pcOffset() <= pcOffset
);
143 // Similarly, entries must be sorted by return offset and this offset must be
144 // unique. See BaselineScript::retAddrEntryFromReturnOffset.
145 MOZ_ASSERT_IF(!retAddrEntries_
.empty() && !masm_
.oom(),
146 retAddrEntries_
.back().returnOffset().offset() < retOffset
);
148 if (!retAddrEntries_
.emplaceBack(pcOffset
, kind
, CodeOffset(retOffset
))) {
149 ReportOutOfMemory(cx
);
156 bool BaselineInterpreterHandler::recordCallRetAddr(JSContext
* cx
,
157 RetAddrEntry::Kind kind
,
158 uint32_t retOffset
) {
160 case RetAddrEntry::Kind::DebugPrologue
:
161 MOZ_ASSERT(callVMOffsets_
.debugPrologueOffset
== 0,
162 "expected single DebugPrologue call");
163 callVMOffsets_
.debugPrologueOffset
= retOffset
;
165 case RetAddrEntry::Kind::DebugEpilogue
:
166 MOZ_ASSERT(callVMOffsets_
.debugEpilogueOffset
== 0,
167 "expected single DebugEpilogue call");
168 callVMOffsets_
.debugEpilogueOffset
= retOffset
;
170 case RetAddrEntry::Kind::DebugAfterYield
:
171 MOZ_ASSERT(callVMOffsets_
.debugAfterYieldOffset
== 0,
172 "expected single DebugAfterYield call");
173 callVMOffsets_
.debugAfterYieldOffset
= retOffset
;
182 bool BaselineInterpreterHandler::addDebugInstrumentationOffset(
183 JSContext
* cx
, CodeOffset offset
) {
184 if (!debugInstrumentationOffsets_
.append(offset
.offset())) {
185 ReportOutOfMemory(cx
);
191 MethodStatus
BaselineCompiler::compile() {
192 JSScript
* script
= handler
.script();
193 JitSpew(JitSpew_BaselineScripts
, "Baseline compiling script %s:%u:%u (%p)",
194 script
->filename(), script
->lineno(), script
->column(), script
);
196 JitSpew(JitSpew_Codegen
, "# Emitting baseline code for script %s:%u:%u",
197 script
->filename(), script
->lineno(), script
->column());
199 TraceLoggerThread
* logger
= TraceLoggerForCurrentThread(cx
);
200 TraceLoggerEvent
scriptEvent(TraceLogger_AnnotateScripts
, script
);
201 AutoTraceLog
logScript(logger
, scriptEvent
);
202 AutoTraceLog
logCompile(logger
, TraceLogger_BaselineCompilation
);
204 AutoKeepJitScripts
keepJitScript(cx
);
205 if (!script
->ensureHasJitScript(cx
, keepJitScript
)) {
209 // When code coverage is only enabled for optimizations, or when a Debugger
210 // set the collectCoverageInfo flag, we have to create the ScriptCounts if
211 // they do not exist.
212 if (!script
->hasScriptCounts() && cx
->realm()->collectCoverage()) {
213 if (!script
->initScriptCounts(cx
)) {
218 // Pin analysis info during compilation.
219 AutoEnterAnalysis
autoEnterAnalysis(cx
);
221 MOZ_ASSERT(!script
->hasBaselineScript());
223 if (!emitPrologue()) {
227 MethodStatus status
= emitBody();
228 if (status
!= Method_Compiled
) {
232 if (!emitEpilogue()) {
236 if (!emitOutOfLinePostBarrierSlot()) {
242 ReportOutOfMemory(cx
);
246 JitCode
* code
= linker
.newCode(cx
, CodeKind::Baseline
);
251 UniquePtr
<BaselineScript
> baselineScript(
253 cx
, warmUpCheckPrologueOffset_
.offset(),
254 profilerEnterFrameToggleOffset_
.offset(),
255 profilerExitFrameToggleOffset_
.offset(),
256 handler
.retAddrEntries().length(), handler
.osrEntries().length(),
257 debugTrapEntries_
.length(), script
->resumeOffsets().size(),
258 traceLoggerToggleOffsets_
.length()),
259 JS::DeletePolicy
<BaselineScript
>(cx
->runtime()));
260 if (!baselineScript
) {
261 ReportOutOfMemory(cx
);
265 baselineScript
->setMethod(code
);
267 JitSpew(JitSpew_BaselineScripts
,
268 "Created BaselineScript %p (raw %p) for %s:%u:%u",
269 (void*)baselineScript
.get(), (void*)code
->raw(), script
->filename(),
270 script
->lineno(), script
->column());
272 baselineScript
->copyRetAddrEntries(handler
.retAddrEntries().begin());
273 baselineScript
->copyOSREntries(handler
.osrEntries().begin());
274 baselineScript
->copyDebugTrapEntries(debugTrapEntries_
.begin());
276 // If profiler instrumentation is enabled, toggle instrumentation on.
277 if (cx
->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
279 baselineScript
->toggleProfilerInstrumentation(true);
282 #ifdef JS_TRACE_LOGGING
283 // Initialize the tracelogger instrumentation.
284 if (JS::TraceLoggerSupported()) {
285 baselineScript
->initTraceLogger(script
, traceLoggerToggleOffsets_
);
289 // Compute native resume addresses for the script's resume offsets.
290 baselineScript
->computeResumeNativeOffsets(script
, resumeOffsetEntries_
);
292 if (compileDebugInstrumentation()) {
293 baselineScript
->setHasDebugInstrumentation();
296 // Always register a native => bytecode mapping entry, since profiler can be
297 // turned on with baseline jitcode on stack, and baseline jitcode cannot be
300 JitSpew(JitSpew_Profiling
,
301 "Added JitcodeGlobalEntry for baseline script %s:%u:%u (%p)",
302 script
->filename(), script
->lineno(), script
->column(),
303 baselineScript
.get());
305 // Generate profiling string.
306 UniqueChars str
= GeckoProfilerRuntime::allocProfileString(cx
, script
);
311 JitcodeGlobalEntry::BaselineEntry entry
;
312 entry
.init(code
, code
->raw(), code
->rawEnd(), script
, str
.release());
314 JitcodeGlobalTable
* globalTable
=
315 cx
->runtime()->jitRuntime()->getJitcodeGlobalTable();
316 if (!globalTable
->addEntry(entry
)) {
318 ReportOutOfMemory(cx
);
322 // Mark the jitcode as having a bytecode map.
323 code
->setHasBytecodeMap();
326 script
->jitScript()->setBaselineScript(script
, baselineScript
.release());
329 writePerfSpewerBaselineProfile(script
, code
);
333 vtune::MarkScript(code
, script
, "baseline");
336 return Method_Compiled
;
339 // On most platforms we use a dedicated bytecode PC register to avoid many
340 // dependent loads and stores for sequences of simple bytecode ops. This
341 // register must be saved/restored around VM and IC calls.
343 // On 32-bit x86 we don't have enough registers for this (because R0-R2 require
344 // 6 registers) so there we always store the pc on the frame.
345 static constexpr bool HasInterpreterPCReg() {
346 return InterpreterPCReg
!= InvalidReg
;
349 static Register
LoadBytecodePC(MacroAssembler
& masm
, Register scratch
) {
350 if (HasInterpreterPCReg()) {
351 return InterpreterPCReg
;
354 Address
pcAddr(BaselineFrameReg
,
355 BaselineFrame::reverseOffsetOfInterpreterPC());
356 masm
.loadPtr(pcAddr
, scratch
);
360 static void LoadInt8Operand(MacroAssembler
& masm
, Register dest
) {
361 Register pc
= LoadBytecodePC(masm
, dest
);
362 masm
.load8SignExtend(Address(pc
, sizeof(jsbytecode
)), dest
);
365 static void LoadUint8Operand(MacroAssembler
& masm
, Register dest
) {
366 Register pc
= LoadBytecodePC(masm
, dest
);
367 masm
.load8ZeroExtend(Address(pc
, sizeof(jsbytecode
)), dest
);
370 static void LoadUint16Operand(MacroAssembler
& masm
, Register dest
) {
371 Register pc
= LoadBytecodePC(masm
, dest
);
372 masm
.load16ZeroExtend(Address(pc
, sizeof(jsbytecode
)), dest
);
375 static void LoadInt32Operand(MacroAssembler
& masm
, Register dest
) {
376 Register pc
= LoadBytecodePC(masm
, dest
);
377 masm
.load32(Address(pc
, sizeof(jsbytecode
)), dest
);
380 static void LoadInt32OperandSignExtendToPtr(MacroAssembler
& masm
, Register pc
,
382 masm
.load32SignExtendToPtr(Address(pc
, sizeof(jsbytecode
)), dest
);
385 static void LoadUint24Operand(MacroAssembler
& masm
, size_t offset
,
387 // Load the opcode and operand, then left shift to discard the opcode.
388 Register pc
= LoadBytecodePC(masm
, dest
);
389 masm
.load32(Address(pc
, offset
), dest
);
390 masm
.rshift32(Imm32(8), dest
);
393 static void LoadInlineValueOperand(MacroAssembler
& masm
, ValueOperand dest
) {
394 // Note: the Value might be unaligned but as above we rely on all our
395 // platforms having appropriate support for unaligned accesses (except for
396 // floating point instructions on ARM).
397 Register pc
= LoadBytecodePC(masm
, dest
.scratchReg());
398 masm
.loadUnalignedValue(Address(pc
, sizeof(jsbytecode
)), dest
);
402 void BaselineCompilerCodeGen::loadScript(Register dest
) {
403 masm
.movePtr(ImmGCPtr(handler
.script()), dest
);
407 void BaselineInterpreterCodeGen::loadScript(Register dest
) {
408 masm
.loadPtr(frame
.addressOfInterpreterScript(), dest
);
412 void BaselineCompilerCodeGen::saveInterpreterPCReg() {}
415 void BaselineInterpreterCodeGen::saveInterpreterPCReg() {
416 if (HasInterpreterPCReg()) {
417 masm
.storePtr(InterpreterPCReg
, frame
.addressOfInterpreterPC());
422 void BaselineCompilerCodeGen::restoreInterpreterPCReg() {}
425 void BaselineInterpreterCodeGen::restoreInterpreterPCReg() {
426 if (HasInterpreterPCReg()) {
427 masm
.loadPtr(frame
.addressOfInterpreterPC(), InterpreterPCReg
);
432 void BaselineCompilerCodeGen::emitInitializeLocals() {
433 // Initialize all locals to |undefined|. Lexical bindings are temporal
434 // dead zoned in bytecode.
436 size_t n
= frame
.nlocals();
441 // Use R0 to minimize code size. If the number of locals to push is <
442 // LOOP_UNROLL_FACTOR, then the initialization pushes are emitted directly
443 // and inline. Otherwise, they're emitted in a partially unrolled loop.
444 static const size_t LOOP_UNROLL_FACTOR
= 4;
445 size_t toPushExtra
= n
% LOOP_UNROLL_FACTOR
;
447 masm
.moveValue(UndefinedValue(), R0
);
449 // Handle any extra pushes left over by the optional unrolled loop below.
450 for (size_t i
= 0; i
< toPushExtra
; i
++) {
454 // Partially unrolled loop of pushes.
455 if (n
>= LOOP_UNROLL_FACTOR
) {
456 size_t toPush
= n
- toPushExtra
;
457 MOZ_ASSERT(toPush
% LOOP_UNROLL_FACTOR
== 0);
458 MOZ_ASSERT(toPush
>= LOOP_UNROLL_FACTOR
);
459 masm
.move32(Imm32(toPush
), R1
.scratchReg());
460 // Emit unrolled loop with 4 pushes per iteration.
462 masm
.bind(&pushLoop
);
463 for (size_t i
= 0; i
< LOOP_UNROLL_FACTOR
; i
++) {
466 masm
.branchSub32(Assembler::NonZero
, Imm32(LOOP_UNROLL_FACTOR
),
467 R1
.scratchReg(), &pushLoop
);
472 void BaselineInterpreterCodeGen::emitInitializeLocals() {
473 // Push |undefined| for all locals.
475 Register scratch
= R0
.scratchReg();
477 masm
.loadPtr(Address(scratch
, JSScript::offsetOfSharedData()), scratch
);
478 masm
.loadPtr(Address(scratch
, RuntimeScriptData::offsetOfISD()), scratch
);
479 masm
.load32(Address(scratch
, ImmutableScriptData::offsetOfNfixed()), scratch
);
482 masm
.branchTest32(Assembler::Zero
, scratch
, scratch
, &done
);
485 masm
.pushValue(UndefinedValue());
486 masm
.branchSub32(Assembler::NonZero
, Imm32(1), scratch
, &top
);
492 // R2.scratchReg() contains object being written to.
493 // Called with the baseline stack synced, except for R0 which is preserved.
494 // All other registers are usable as scratch.
496 // void PostWriteBarrier(JSRuntime* rt, JSObject* obj);
497 template <typename Handler
>
498 bool BaselineCodeGen
<Handler
>::emitOutOfLinePostBarrierSlot() {
499 if (!postBarrierSlot_
.used()) {
503 masm
.bind(&postBarrierSlot_
);
505 saveInterpreterPCReg();
507 Register objReg
= R2
.scratchReg();
508 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
511 regs
.take(BaselineFrameReg
);
512 Register scratch
= regs
.takeAny();
513 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64)
514 // On ARM, save the link register before calling. It contains the return
515 // address. The |masm.ret()| later will pop this into |pc| to return.
517 #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
522 masm
.setupUnalignedABICall(scratch
);
523 masm
.movePtr(ImmPtr(cx
->runtime()), scratch
);
524 masm
.passABIArg(scratch
);
525 masm
.passABIArg(objReg
);
526 masm
.callWithABI(JS_FUNC_TO_DATA_PTR(void*, PostWriteBarrier
));
528 restoreInterpreterPCReg();
536 bool BaselineCompilerCodeGen::emitNextIC() {
537 // Emit a call to an IC stored in JitScript. Calls to this must match the
538 // ICEntry order in JitScript: first the non-op IC entries for |this| and
539 // formal arguments, then the for-op IC entries for JOF_IC ops.
541 JSScript
* script
= handler
.script();
542 uint32_t pcOffset
= script
->pcToOffset(handler
.pc());
544 // We don't use every ICEntry and we can skip unreachable ops, so we have
545 // to loop until we find an ICEntry for the current pc.
546 const ICEntry
* entry
;
549 entry
= &script
->jitScript()->icEntry(handler
.icEntryIndex());
550 entryIndex
= handler
.icEntryIndex();
551 handler
.moveToNextICEntry();
552 } while (entry
->pcOffset() < pcOffset
);
554 MOZ_RELEASE_ASSERT(entry
->pcOffset() == pcOffset
);
555 MOZ_ASSERT_IF(!entry
->isForPrologue(), BytecodeOpHasIC(JSOp(*handler
.pc())));
557 // Load stub pointer into ICStubReg.
558 if (JitOptions
.warpBuilder
) {
559 masm
.loadPtr(frame
.addressOfICScript(), ICStubReg
);
560 size_t firstStubOffset
= ICScript::offsetOfFirstStub(entryIndex
);
561 masm
.loadPtr(Address(ICStubReg
, firstStubOffset
), ICStubReg
);
563 masm
.loadPtr(AbsoluteAddress(entry
).offset(ICEntry::offsetOfFirstStub()),
567 CodeOffset returnOffset
;
568 EmitCallIC(masm
, &returnOffset
);
570 RetAddrEntry::Kind kind
= entry
->isForPrologue()
571 ? RetAddrEntry::Kind::PrologueIC
572 : RetAddrEntry::Kind::IC
;
574 if (!handler
.retAddrEntries().emplaceBack(pcOffset
, kind
, returnOffset
)) {
575 ReportOutOfMemory(cx
);
583 bool BaselineInterpreterCodeGen::emitNextIC() {
584 saveInterpreterPCReg();
585 masm
.loadPtr(frame
.addressOfInterpreterICEntry(), ICStubReg
);
586 masm
.loadPtr(Address(ICStubReg
, ICEntry::offsetOfFirstStub()), ICStubReg
);
587 masm
.call(Address(ICStubReg
, ICStub::offsetOfStubCode()));
588 uint32_t returnOffset
= masm
.currentOffset();
589 restoreInterpreterPCReg();
591 // If this is an IC for a bytecode op where Ion may inline scripts, we need to
592 // record the return offset for Ion bailouts.
593 if (handler
.currentOp()) {
594 JSOp op
= *handler
.currentOp();
595 MOZ_ASSERT(BytecodeOpHasIC(op
));
596 if (IsIonInlinableOp(op
)) {
597 if (!handler
.icReturnOffsets().emplaceBack(returnOffset
, op
)) {
607 void BaselineCompilerCodeGen::computeFrameSize(Register dest
) {
608 MOZ_ASSERT(!inCall_
, "must not be called in the middle of a VM call");
609 masm
.move32(Imm32(frame
.frameSize()), dest
);
613 void BaselineInterpreterCodeGen::computeFrameSize(Register dest
) {
614 // dest = FramePointer + BaselineFrame::FramePointerOffset - StackPointer.
615 MOZ_ASSERT(!inCall_
, "must not be called in the middle of a VM call");
616 masm
.computeEffectiveAddress(
617 Address(BaselineFrameReg
, BaselineFrame::FramePointerOffset
), dest
);
618 masm
.subStackPtrFrom(dest
);
621 template <typename Handler
>
622 void BaselineCodeGen
<Handler
>::prepareVMCall() {
623 pushedBeforeCall_
= masm
.framePushed();
628 // Ensure everything is synced.
633 void BaselineCompilerCodeGen::storeFrameSizeAndPushDescriptor(
634 uint32_t argSize
, Register scratch1
, Register scratch2
) {
635 uint32_t frameFullSize
= frame
.frameSize();
638 masm
.store32(Imm32(frameFullSize
), frame
.addressOfDebugFrameSize());
641 uint32_t descriptor
= MakeFrameDescriptor(
642 frameFullSize
+ argSize
, FrameType::BaselineJS
, ExitFrameLayout::Size());
643 masm
.push(Imm32(descriptor
));
647 void BaselineInterpreterCodeGen::storeFrameSizeAndPushDescriptor(
648 uint32_t argSize
, Register scratch1
, Register scratch2
) {
649 // scratch1 = FramePointer + BaselineFrame::FramePointerOffset - StackPointer.
650 masm
.computeEffectiveAddress(
651 Address(BaselineFrameReg
, BaselineFrame::FramePointerOffset
), scratch1
);
652 masm
.subStackPtrFrom(scratch1
);
655 // Store the frame size without VMFunction arguments in debug builds.
656 masm
.computeEffectiveAddress(Address(scratch1
, -int32_t(argSize
)), scratch2
);
657 masm
.store32(scratch2
, frame
.addressOfDebugFrameSize());
660 // Push frame descriptor based on the full frame size.
661 masm
.makeFrameDescriptor(scratch1
, FrameType::BaselineJS
,
662 ExitFrameLayout::Size());
666 static uint32_t GetVMFunctionArgSize(const VMFunctionData
& fun
) {
667 return fun
.explicitStackSlots() * sizeof(void*);
670 template <typename Handler
>
671 bool BaselineCodeGen
<Handler
>::callVMInternal(VMFunctionId id
,
672 RetAddrEntry::Kind kind
,
675 // Assert prepareVMCall() has been called.
680 TrampolinePtr code
= cx
->runtime()->jitRuntime()->getVMWrapper(id
);
681 const VMFunctionData
& fun
= GetVMFunction(id
);
683 uint32_t argSize
= GetVMFunctionArgSize(fun
);
685 // Assert all arguments were pushed.
686 MOZ_ASSERT(masm
.framePushed() - pushedBeforeCall_
== argSize
);
688 saveInterpreterPCReg();
690 if (phase
== CallVMPhase::AfterPushingLocals
) {
691 storeFrameSizeAndPushDescriptor(argSize
, R0
.scratchReg(), R1
.scratchReg());
693 MOZ_ASSERT(phase
== CallVMPhase::BeforePushingLocals
);
694 uint32_t frameBaseSize
= BaselineFrame::frameSizeForNumValueSlots(0);
696 masm
.store32(Imm32(frameBaseSize
), frame
.addressOfDebugFrameSize());
698 uint32_t descriptor
=
699 MakeFrameDescriptor(frameBaseSize
+ argSize
, FrameType::BaselineJS
,
700 ExitFrameLayout::Size());
701 masm
.push(Imm32(descriptor
));
703 MOZ_ASSERT(fun
.expectTailCall
== NonTailCall
);
706 uint32_t callOffset
= masm
.currentOffset();
708 // Pop arguments from framePushed.
709 masm
.implicitPop(argSize
);
711 restoreInterpreterPCReg();
713 return handler
.recordCallRetAddr(cx
, kind
, callOffset
);
716 template <typename Handler
>
717 template <typename Fn
, Fn fn
>
718 bool BaselineCodeGen
<Handler
>::callVM(RetAddrEntry::Kind kind
,
720 VMFunctionId fnId
= VMFunctionToId
<Fn
, fn
>::id
;
721 return callVMInternal(fnId
, kind
, phase
);
724 template <typename Handler
>
725 bool BaselineCodeGen
<Handler
>::emitStackCheck() {
727 if (handler
.mustIncludeSlotsInStackCheck()) {
728 // Subtract the size of script->nslots() first.
729 Register scratch
= R1
.scratchReg();
730 masm
.moveStackPtrTo(scratch
);
731 subtractScriptSlotsSize(scratch
, R2
.scratchReg());
732 masm
.branchPtr(Assembler::BelowOrEqual
,
733 AbsoluteAddress(cx
->addressOfJitStackLimit()), scratch
,
736 masm
.branchStackPtrRhs(Assembler::BelowOrEqual
,
737 AbsoluteAddress(cx
->addressOfJitStackLimit()),
742 masm
.loadBaselineFramePtr(BaselineFrameReg
, R1
.scratchReg());
743 pushArg(R1
.scratchReg());
745 const CallVMPhase phase
= CallVMPhase::BeforePushingLocals
;
746 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::StackCheck
;
748 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
749 if (!callVM
<Fn
, CheckOverRecursedBaseline
>(kind
, phase
)) {
753 masm
.bind(&skipCall
);
757 static void EmitCallFrameIsDebuggeeCheck(MacroAssembler
& masm
) {
758 masm
.setupUnalignedABICall(R0
.scratchReg());
759 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
760 masm
.passABIArg(R0
.scratchReg());
761 masm
.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::FrameIsDebuggeeCheck
));
765 bool BaselineCompilerCodeGen::emitIsDebuggeeCheck() {
766 if (handler
.compileDebugInstrumentation()) {
767 EmitCallFrameIsDebuggeeCheck(masm
);
773 bool BaselineInterpreterCodeGen::emitIsDebuggeeCheck() {
774 // Use a toggled jump to call FrameIsDebuggeeCheck only if the debugger is
777 // TODO(bug 1522394): consider having a cx->realm->isDebuggee guard before the
778 // call. Consider moving the callWithABI out-of-line.
781 CodeOffset toggleOffset
= masm
.toggledJump(&skipCheck
);
783 saveInterpreterPCReg();
784 EmitCallFrameIsDebuggeeCheck(masm
);
785 restoreInterpreterPCReg();
787 masm
.bind(&skipCheck
);
788 return handler
.addDebugInstrumentationOffset(cx
, toggleOffset
);
791 static void MaybeIncrementCodeCoverageCounter(MacroAssembler
& masm
,
794 if (!script
->hasScriptCounts()) {
797 PCCounts
* counts
= script
->maybeGetPCCounts(pc
);
798 uint64_t* counterAddr
= &counts
->numExec();
799 masm
.inc64(AbsoluteAddress(counterAddr
));
803 bool BaselineCompilerCodeGen::emitHandleCodeCoverageAtPrologue() {
804 // If the main instruction is not a jump target, then we emit the
805 // corresponding code coverage counter.
806 JSScript
* script
= handler
.script();
807 jsbytecode
* main
= script
->main();
808 if (!BytecodeIsJumpTarget(JSOp(*main
))) {
809 MaybeIncrementCodeCoverageCounter(masm
, script
, main
);
815 bool BaselineInterpreterCodeGen::emitHandleCodeCoverageAtPrologue() {
817 CodeOffset toggleOffset
= masm
.toggledJump(&skipCoverage
);
818 masm
.call(handler
.codeCoverageAtPrologueLabel());
819 masm
.bind(&skipCoverage
);
820 return handler
.codeCoverageOffsets().append(toggleOffset
.offset());
824 void BaselineCompilerCodeGen::subtractScriptSlotsSize(Register reg
,
826 uint32_t slotsSize
= handler
.script()->nslots() * sizeof(Value
);
827 masm
.subPtr(Imm32(slotsSize
), reg
);
831 void BaselineInterpreterCodeGen::subtractScriptSlotsSize(Register reg
,
833 // reg = reg - script->nslots() * sizeof(Value)
834 MOZ_ASSERT(reg
!= scratch
);
836 masm
.loadPtr(Address(scratch
, JSScript::offsetOfSharedData()), scratch
);
837 masm
.loadPtr(Address(scratch
, RuntimeScriptData::offsetOfISD()), scratch
);
838 masm
.load32(Address(scratch
, ImmutableScriptData::offsetOfNslots()), scratch
);
839 static_assert(sizeof(Value
) == 8,
840 "shift by 3 below assumes Value is 8 bytes");
841 masm
.lshiftPtr(Imm32(3), scratch
);
842 masm
.subPtr(scratch
, reg
);
846 void BaselineCompilerCodeGen::loadGlobalLexicalEnvironment(Register dest
) {
847 masm
.movePtr(ImmGCPtr(&cx
->global()->lexicalEnvironment()), dest
);
851 void BaselineInterpreterCodeGen::loadGlobalLexicalEnvironment(Register dest
) {
852 masm
.loadPtr(AbsoluteAddress(cx
->addressOfRealm()), dest
);
853 masm
.loadPtr(Address(dest
, Realm::offsetOfActiveLexicalEnvironment()), dest
);
857 void BaselineCompilerCodeGen::pushGlobalLexicalEnvironmentValue(
858 ValueOperand scratch
) {
859 frame
.push(ObjectValue(cx
->global()->lexicalEnvironment()));
863 void BaselineInterpreterCodeGen::pushGlobalLexicalEnvironmentValue(
864 ValueOperand scratch
) {
865 loadGlobalLexicalEnvironment(scratch
.scratchReg());
866 masm
.tagValue(JSVAL_TYPE_OBJECT
, scratch
.scratchReg(), scratch
);
871 void BaselineCompilerCodeGen::loadGlobalThisValue(ValueOperand dest
) {
872 JSObject
* thisObj
= cx
->global()->lexicalEnvironment().thisObject();
873 masm
.moveValue(ObjectValue(*thisObj
), dest
);
877 void BaselineInterpreterCodeGen::loadGlobalThisValue(ValueOperand dest
) {
878 Register scratch
= dest
.scratchReg();
879 loadGlobalLexicalEnvironment(scratch
);
880 static constexpr size_t SlotOffset
=
881 LexicalEnvironmentObject::offsetOfThisValueOrScopeSlot();
882 masm
.loadValue(Address(scratch
, SlotOffset
), dest
);
886 void BaselineCompilerCodeGen::pushScriptArg() {
887 pushArg(ImmGCPtr(handler
.script()));
891 void BaselineInterpreterCodeGen::pushScriptArg() {
892 pushArg(frame
.addressOfInterpreterScript());
896 void BaselineCompilerCodeGen::pushBytecodePCArg() {
897 pushArg(ImmPtr(handler
.pc()));
901 void BaselineInterpreterCodeGen::pushBytecodePCArg() {
902 if (HasInterpreterPCReg()) {
903 pushArg(InterpreterPCReg
);
905 pushArg(frame
.addressOfInterpreterPC());
909 static gc::Cell
* GetScriptGCThing(JSScript
* script
, jsbytecode
* pc
,
910 ScriptGCThingType type
) {
912 case ScriptGCThingType::Atom
:
913 return script
->getAtom(pc
);
914 case ScriptGCThingType::RegExp
:
915 return script
->getRegExp(pc
);
916 case ScriptGCThingType::Function
:
917 return script
->getFunction(pc
);
918 case ScriptGCThingType::Scope
:
919 return script
->getScope(pc
);
920 case ScriptGCThingType::BigInt
:
921 return script
->getBigInt(pc
);
923 MOZ_CRASH("Unexpected GCThing type");
927 void BaselineCompilerCodeGen::loadScriptGCThing(ScriptGCThingType type
,
930 gc::Cell
* thing
= GetScriptGCThing(handler
.script(), handler
.pc(), type
);
931 masm
.movePtr(ImmGCPtr(thing
), dest
);
935 void BaselineInterpreterCodeGen::loadScriptGCThing(ScriptGCThingType type
,
938 MOZ_ASSERT(dest
!= scratch
);
940 // Load the index in |scratch|.
941 LoadInt32Operand(masm
, scratch
);
943 // Load the GCCellPtr.
945 masm
.loadPtr(Address(dest
, JSScript::offsetOfPrivateData()), dest
);
946 masm
.loadPtr(BaseIndex(dest
, scratch
, ScalePointer
,
947 PrivateScriptData::offsetOfGCThings()),
950 // Clear the tag bits.
952 case ScriptGCThingType::Atom
:
953 // Use xorPtr with a 32-bit immediate because it's more efficient than
955 static_assert(uintptr_t(TraceKind::String
) == 2,
956 "Unexpected tag bits for string GCCellPtr");
957 masm
.xorPtr(Imm32(2), dest
);
959 case ScriptGCThingType::RegExp
:
960 case ScriptGCThingType::Function
:
961 // No-op because GCCellPtr tag bits are zero for objects.
962 static_assert(uintptr_t(TraceKind::Object
) == 0,
963 "Unexpected tag bits for object GCCellPtr");
965 case ScriptGCThingType::BigInt
:
966 // Use xorPtr with a 32-bit immediate because it's more efficient than
968 static_assert(uintptr_t(TraceKind::BigInt
) == 1,
969 "Unexpected tag bits for BigInt GCCellPtr");
970 masm
.xorPtr(Imm32(1), dest
);
972 case ScriptGCThingType::Scope
:
973 // Use xorPtr with a 32-bit immediate because it's more efficient than
975 static_assert(uintptr_t(TraceKind::Scope
) >= JS::OutOfLineTraceKindMask
,
976 "Expected Scopes to have OutOfLineTraceKindMask tag");
977 masm
.xorPtr(Imm32(JS::OutOfLineTraceKindMask
), dest
);
982 // Assert low bits are not set.
984 masm
.branchTestPtr(Assembler::Zero
, dest
, Imm32(0b111), &ok
);
985 masm
.assumeUnreachable("GC pointer with tag bits set");
991 void BaselineCompilerCodeGen::pushScriptGCThingArg(ScriptGCThingType type
,
994 gc::Cell
* thing
= GetScriptGCThing(handler
.script(), handler
.pc(), type
);
995 pushArg(ImmGCPtr(thing
));
999 void BaselineInterpreterCodeGen::pushScriptGCThingArg(ScriptGCThingType type
,
1001 Register scratch2
) {
1002 loadScriptGCThing(type
, scratch1
, scratch2
);
1006 template <typename Handler
>
1007 void BaselineCodeGen
<Handler
>::pushScriptNameArg(Register scratch1
,
1008 Register scratch2
) {
1009 pushScriptGCThingArg(ScriptGCThingType::Atom
, scratch1
, scratch2
);
1013 void BaselineCompilerCodeGen::pushUint8BytecodeOperandArg(Register
) {
1014 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler
.pc())) == JOF_UINT8
);
1015 pushArg(Imm32(GET_UINT8(handler
.pc())));
1019 void BaselineInterpreterCodeGen::pushUint8BytecodeOperandArg(Register scratch
) {
1020 LoadUint8Operand(masm
, scratch
);
1025 void BaselineCompilerCodeGen::pushUint16BytecodeOperandArg(Register
) {
1026 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler
.pc())) == JOF_UINT16
);
1027 pushArg(Imm32(GET_UINT16(handler
.pc())));
1031 void BaselineInterpreterCodeGen::pushUint16BytecodeOperandArg(
1033 LoadUint16Operand(masm
, scratch
);
1038 void BaselineCompilerCodeGen::loadInt32LengthBytecodeOperand(Register dest
) {
1039 uint32_t length
= GET_UINT32(handler
.pc());
1040 MOZ_ASSERT(length
<= INT32_MAX
,
1041 "the bytecode emitter must fail to compile code that would "
1042 "produce a length exceeding int32_t range");
1043 masm
.move32(Imm32(AssertedCast
<int32_t>(length
)), dest
);
1047 void BaselineInterpreterCodeGen::loadInt32LengthBytecodeOperand(Register dest
) {
1048 LoadInt32Operand(masm
, dest
);
1052 void BaselineCompilerCodeGen::loadInt32IndexBytecodeOperand(ValueOperand dest
) {
1053 uint32_t index
= GET_UINT32(handler
.pc());
1054 MOZ_ASSERT(index
<= INT32_MAX
,
1055 "the bytecode emitter must fail to compile code that would "
1056 "produce an index exceeding int32_t range");
1057 masm
.moveValue(Int32Value(AssertedCast
<int32_t>(index
)), dest
);
1061 void BaselineInterpreterCodeGen::loadInt32IndexBytecodeOperand(
1062 ValueOperand dest
) {
1063 Register scratch
= dest
.scratchReg();
1064 LoadInt32Operand(masm
, scratch
);
1065 masm
.tagValue(JSVAL_TYPE_INT32
, scratch
, dest
);
1068 template <typename Handler
>
1069 bool BaselineCodeGen
<Handler
>::emitDebugPrologue() {
1070 auto ifDebuggee
= [this]() {
1071 // Load pointer to BaselineFrame in R0.
1072 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
1075 pushArg(R0
.scratchReg());
1077 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::DebugPrologue
;
1079 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
1080 if (!callVM
<Fn
, jit::DebugPrologue
>(kind
)) {
1086 return emitDebugInstrumentation(ifDebuggee
);
1090 void BaselineCompilerCodeGen::emitInitFrameFields(Register nonFunctionEnv
) {
1091 Register scratch
= R0
.scratchReg();
1092 Register scratch2
= R2
.scratchReg();
1093 MOZ_ASSERT(nonFunctionEnv
!= scratch
&& nonFunctionEnv
!= scratch2
);
1095 masm
.store32(Imm32(0), frame
.addressOfFlags());
1096 if (handler
.function()) {
1097 masm
.loadFunctionFromCalleeToken(frame
.addressOfCalleeToken(), scratch
);
1098 masm
.loadPtr(Address(scratch
, JSFunction::offsetOfEnvironment()), scratch
);
1099 masm
.storePtr(scratch
, frame
.addressOfEnvironmentChain());
1101 masm
.storePtr(nonFunctionEnv
, frame
.addressOfEnvironmentChain());
1104 if (!JitOptions
.warpBuilder
) {
1105 // Trial inlining only supported in Warp.
1109 // If cx->inlinedICScript contains an inlined ICScript (passed from
1110 // the caller), take that ICScript and store it in the frame, then
1111 // overwrite cx->inlinedICScript with nullptr.
1112 Label notInlined
, done
;
1113 masm
.movePtr(ImmPtr(cx
->addressOfInlinedICScript()), scratch
);
1114 Address
inlinedAddr(scratch
, 0);
1115 masm
.branchPtr(Assembler::Equal
, inlinedAddr
, ImmWord(0), ¬Inlined
);
1116 masm
.loadPtr(inlinedAddr
, scratch2
);
1117 masm
.storePtr(scratch2
, frame
.addressOfICScript());
1118 masm
.storePtr(ImmPtr(nullptr), inlinedAddr
);
1121 // Otherwise, store this script's default ICSCript in the frame.
1122 masm
.bind(¬Inlined
);
1123 masm
.storePtr(ImmPtr(handler
.script()->jitScript()->icScript()),
1124 frame
.addressOfICScript());
1129 void BaselineInterpreterCodeGen::emitInitFrameFields(Register nonFunctionEnv
) {
1130 MOZ_ASSERT(nonFunctionEnv
== R1
.scratchReg(),
1131 "Don't clobber nonFunctionEnv below");
1133 // If we have a dedicated PC register we use it as scratch1 to avoid a
1134 // register move below.
1136 HasInterpreterPCReg() ? InterpreterPCReg
: R0
.scratchReg();
1137 Register scratch2
= R2
.scratchReg();
1139 masm
.store32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER
),
1140 frame
.addressOfFlags());
1142 // Initialize interpreterScript.
1143 Label notFunction
, done
;
1144 masm
.loadPtr(frame
.addressOfCalleeToken(), scratch1
);
1145 masm
.branchTestPtr(Assembler::NonZero
, scratch1
, Imm32(CalleeTokenScriptBit
),
1148 // CalleeToken_Function or CalleeToken_FunctionConstructing.
1149 masm
.andPtr(Imm32(uint32_t(CalleeTokenMask
)), scratch1
);
1150 masm
.loadPtr(Address(scratch1
, JSFunction::offsetOfEnvironment()),
1152 masm
.storePtr(scratch2
, frame
.addressOfEnvironmentChain());
1153 masm
.loadPtr(Address(scratch1
, JSFunction::offsetOfScript()), scratch1
);
1156 masm
.bind(¬Function
);
1158 // CalleeToken_Script.
1159 masm
.andPtr(Imm32(uint32_t(CalleeTokenMask
)), scratch1
);
1160 masm
.storePtr(nonFunctionEnv
, frame
.addressOfEnvironmentChain());
1163 masm
.storePtr(scratch1
, frame
.addressOfInterpreterScript());
1165 if (JitOptions
.warpBuilder
) {
1166 // Initialize icScript and interpreterICEntry
1167 masm
.loadJitScript(scratch1
, scratch2
);
1168 masm
.computeEffectiveAddress(
1169 Address(scratch2
, JitScript::offsetOfICScript()), scratch2
);
1170 masm
.storePtr(scratch2
, frame
.addressOfICScript());
1171 masm
.computeEffectiveAddress(
1172 Address(scratch2
, ICScript::offsetOfICEntries()), scratch2
);
1173 masm
.storePtr(scratch2
, frame
.addressOfInterpreterICEntry());
1175 // Initialize interpreterICEntry
1176 masm
.loadJitScript(scratch1
, scratch2
);
1177 masm
.computeEffectiveAddress(
1178 Address(scratch2
, JitScript::offsetOfICEntries()), scratch2
);
1179 masm
.storePtr(scratch2
, frame
.addressOfInterpreterICEntry());
1182 // Initialize interpreter pc.
1183 masm
.loadPtr(Address(scratch1
, JSScript::offsetOfSharedData()), scratch1
);
1184 masm
.loadPtr(Address(scratch1
, RuntimeScriptData::offsetOfISD()), scratch1
);
1185 masm
.addPtr(Imm32(ImmutableScriptData::offsetOfCode()), scratch1
);
1187 if (HasInterpreterPCReg()) {
1188 MOZ_ASSERT(scratch1
== InterpreterPCReg
,
1189 "pc must be stored in the pc register");
1191 masm
.storePtr(scratch1
, frame
.addressOfInterpreterPC());
1196 template <typename F
>
1197 bool BaselineCompilerCodeGen::initEnvironmentChainHelper(
1198 const F
& initFunctionEnv
) {
1199 if (handler
.function()) {
1200 return initFunctionEnv();
1206 template <typename F
>
1207 bool BaselineInterpreterCodeGen::initEnvironmentChainHelper(
1208 const F
& initFunctionEnv
) {
1209 // For function scripts use the code emitted by initFunctionEnv. For other
1210 // scripts this is a no-op.
1213 masm
.branchTestPtr(Assembler::NonZero
, frame
.addressOfCalleeToken(),
1214 Imm32(CalleeTokenScriptBit
), &done
);
1216 if (!initFunctionEnv()) {
1225 template <typename Handler
>
1226 bool BaselineCodeGen
<Handler
>::initEnvironmentChain() {
1227 auto initFunctionEnv
= [this]() {
1228 auto initEnv
= [this]() {
1229 // Call into the VM to create the proper environment objects.
1232 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
1233 pushArg(R0
.scratchReg());
1235 const CallVMPhase phase
= CallVMPhase::BeforePushingLocals
;
1237 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
1238 return callVMNonOp
<Fn
, jit::InitFunctionEnvironmentObjects
>(phase
);
1240 return emitTestScriptFlag(
1241 JSScript::ImmutableFlags::NeedsFunctionEnvironmentObjects
, true,
1242 initEnv
, R2
.scratchReg());
1245 return initEnvironmentChainHelper(initFunctionEnv
);
1248 template <typename Handler
>
1249 bool BaselineCodeGen
<Handler
>::emitInterruptCheck() {
1253 masm
.branch32(Assembler::Equal
, AbsoluteAddress(cx
->addressOfInterruptBits()),
1258 // Use a custom RetAddrEntry::Kind so DebugModeOSR can distinguish this call
1259 // from other callVMs that might happen at this pc.
1260 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::InterruptCheck
;
1262 using Fn
= bool (*)(JSContext
*);
1263 if (!callVM
<Fn
, InterruptCheck
>(kind
)) {
1272 bool BaselineCompilerCodeGen::emitWarmUpCounterIncrement() {
1273 frame
.assertSyncedStack();
1275 // Record native code offset for OSR from Baseline Interpreter into Baseline
1276 // JIT code. This is right before the warm-up check in the Baseline JIT code,
1277 // to make sure we can immediately enter Ion if the script is warm enough or
1278 // if --ion-eager is used.
1279 JSScript
* script
= handler
.script();
1280 jsbytecode
* pc
= handler
.pc();
1281 if (JSOp(*pc
) == JSOp::LoopHead
) {
1282 uint32_t pcOffset
= script
->pcToOffset(pc
);
1283 uint32_t nativeOffset
= masm
.currentOffset();
1284 if (!handler
.osrEntries().emplaceBack(pcOffset
, nativeOffset
)) {
1285 ReportOutOfMemory(cx
);
1290 // Emit no warm-up counter increments if Ion is not enabled or if the script
1291 // will never be Ion-compileable.
1292 if (!handler
.maybeIonCompileable()) {
1296 Register scriptReg
= R2
.scratchReg();
1297 Register countReg
= R0
.scratchReg();
1299 uint32_t warmUpCountOffset
;
1300 if (JitOptions
.warpBuilder
) {
1301 // Load the ICScript* in scriptReg.
1302 masm
.loadPtr(frame
.addressOfICScript(), scriptReg
);
1303 warmUpCountOffset
= ICScript::offsetOfWarmUpCount();
1305 // Load the JitScript* in scriptReg.
1306 masm
.movePtr(ImmPtr(script
->jitScript()), scriptReg
);
1307 warmUpCountOffset
= JitScript::offsetOfWarmUpCount();
1310 // Bump warm-up counter.
1311 Address
warmUpCounterAddr(scriptReg
, warmUpCountOffset
);
1312 masm
.load32(warmUpCounterAddr
, countReg
);
1313 masm
.add32(Imm32(1), countReg
);
1314 masm
.store32(countReg
, warmUpCounterAddr
);
1316 if (JitOptions
.warpBuilder
&& !JitOptions
.disableInlining
) {
1317 // Consider trial inlining.
1318 // Note: unlike other warmup thresholds, where we try to enter a
1319 // higher tier whenever we are higher than a given warmup count,
1320 // trial inlining triggers once when reaching the threshold.
1321 Label noTrialInlining
;
1322 masm
.branch32(Assembler::NotEqual
, countReg
,
1323 Imm32(JitOptions
.trialInliningWarmUpThreshold
),
1327 masm
.PushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
1329 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
1330 if (!callVMNonOp
<Fn
, DoTrialInlining
>()) {
1333 masm
.bind(&noTrialInlining
);
1336 if (JSOp(*pc
) == JSOp::LoopHead
) {
1337 // If this is a loop where we can't OSR (for example because it's inside a
1338 // catch or finally block), increment the warmup counter but don't attempt
1339 // OSR (Ion/Warp only compiles the try block).
1340 if (!handler
.analysis().info(pc
).loopHeadCanOsr
) {
1347 const OptimizationInfo
* info
=
1348 IonOptimizations
.get(IonOptimizations
.firstLevel());
1349 uint32_t warmUpThreshold
= info
->compilerWarmUpThreshold(script
, pc
);
1350 masm
.branch32(Assembler::LessThan
, countReg
, Imm32(warmUpThreshold
), &done
);
1352 if (JitOptions
.warpBuilder
) {
1353 // Load the JitScript* in scriptReg.
1354 masm
.movePtr(ImmPtr(script
->jitScript()), scriptReg
);
1357 // Do nothing if Ion is already compiling this script off-thread or if Ion has
1358 // been disabled for this script.
1359 masm
.loadPtr(Address(scriptReg
, JitScript::offsetOfIonScript()), scriptReg
);
1360 masm
.branchPtr(Assembler::Equal
, scriptReg
, ImmPtr(IonCompilingScriptPtr
),
1362 masm
.branchPtr(Assembler::Equal
, scriptReg
, ImmPtr(IonDisabledScriptPtr
),
1365 // Try to compile and/or finish a compilation.
1366 if (JSOp(*pc
) == JSOp::LoopHead
) {
1367 // Try to OSR into Ion.
1368 computeFrameSize(R0
.scratchReg());
1372 pushBytecodePCArg();
1373 pushArg(R0
.scratchReg());
1374 masm
.PushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
1376 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, uint32_t, jsbytecode
*,
1378 if (!callVM
<Fn
, IonCompileScriptForBaselineOSR
>()) {
1382 // The return register holds the IonOsrTempData*. Perform OSR if it's not
1384 static_assert(ReturnReg
!= OsrFrameReg
,
1385 "Code below depends on osrDataReg != OsrFrameReg");
1386 Register osrDataReg
= ReturnReg
;
1387 masm
.branchTestPtr(Assembler::Zero
, osrDataReg
, osrDataReg
, &done
);
1389 // Success! Switch from Baseline JIT code to Ion JIT code.
1391 // At this point, stack looks like:
1393 // +-> [...Calling-Frame...]
1394 // | [...Actual-Args/ThisV/ArgCount/Callee...]
1397 // +---[Saved-FramePtr]
1398 // [...Baseline-Frame...]
1400 // Restore the stack pointer so that the return address is on top of
1402 masm
.addToStackPtr(Imm32(frame
.frameSize()));
1405 // Get a scratch register that's not osrDataReg or OsrFrameReg.
1406 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
1407 regs
.take(BaselineFrameReg
);
1408 regs
.take(osrDataReg
);
1409 regs
.take(OsrFrameReg
);
1411 Register scratchReg
= regs
.takeAny();
1413 // If profiler instrumentation is on, ensure that lastProfilingFrame is
1414 // the frame currently being OSR-ed
1417 AbsoluteAddress
addressOfEnabled(
1418 cx
->runtime()->geckoProfiler().addressOfEnabled());
1419 masm
.branch32(Assembler::Equal
, addressOfEnabled
, Imm32(0), &checkOk
);
1420 masm
.loadPtr(AbsoluteAddress((void*)&cx
->jitActivation
), scratchReg
);
1422 Address(scratchReg
, JitActivation::offsetOfLastProfilingFrame()),
1425 // It may be the case that we entered the baseline frame with
1426 // profiling turned off on, then in a call within a loop (i.e. a
1427 // callee frame), turn on profiling, then return to this frame,
1428 // and then OSR with profiling turned on. In this case, allow for
1429 // lastProfilingFrame to be null.
1430 masm
.branchPtr(Assembler::Equal
, scratchReg
, ImmWord(0), &checkOk
);
1432 masm
.branchStackPtr(Assembler::Equal
, scratchReg
, &checkOk
);
1433 masm
.assumeUnreachable("Baseline OSR lastProfilingFrame mismatch.");
1434 masm
.bind(&checkOk
);
1439 masm
.loadPtr(Address(osrDataReg
, IonOsrTempData::offsetOfBaselineFrame()),
1441 masm
.jump(Address(osrDataReg
, IonOsrTempData::offsetOfJitCode()));
1445 masm
.PushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
1447 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
1448 if (!callVMNonOp
<Fn
, IonCompileScriptForBaselineAtEntry
>()) {
1458 bool BaselineInterpreterCodeGen::emitWarmUpCounterIncrement() {
1459 Register scriptReg
= R2
.scratchReg();
1460 Register countReg
= R0
.scratchReg();
1462 // Load the JitScript* in scriptReg.
1463 loadScript(scriptReg
);
1464 masm
.loadJitScript(scriptReg
, scriptReg
);
1466 // Bump warm-up counter.
1467 Address
warmUpCounterAddr(scriptReg
, JitScript::offsetOfWarmUpCount());
1468 masm
.load32(warmUpCounterAddr
, countReg
);
1469 masm
.add32(Imm32(1), countReg
);
1470 masm
.store32(countReg
, warmUpCounterAddr
);
1472 // If the script is warm enough for Baseline compilation, call into the VM to
1475 masm
.branch32(Assembler::BelowOrEqual
, countReg
,
1476 Imm32(JitOptions
.baselineJitWarmUpThreshold
), &done
);
1477 masm
.branchPtr(Assembler::Equal
,
1478 Address(scriptReg
, JitScript::offsetOfBaselineScript()),
1479 ImmPtr(BaselineDisabledScriptPtr
), &done
);
1483 masm
.PushBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
1485 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, uint8_t**);
1486 if (!callVM
<Fn
, BaselineCompileFromBaselineInterpreter
>()) {
1490 // If the function returned nullptr we either skipped compilation or were
1491 // unable to compile the script. Continue running in the interpreter.
1492 masm
.branchTestPtr(Assembler::Zero
, ReturnReg
, ReturnReg
, &done
);
1494 // Success! Switch from interpreter to JIT code by jumping to the
1495 // corresponding code in the BaselineScript.
1497 // This works because BaselineCompiler uses the same frame layout (stack is
1498 // synced at OSR points) and BaselineCompileFromBaselineInterpreter has
1499 // already cleared the RUNNING_IN_INTERPRETER flag for us.
1500 // See BaselineFrame::prepareForBaselineInterpreterToJitOSR.
1501 masm
.jump(ReturnReg
);
1509 bool BaselineCompilerCodeGen::emitArgumentTypeChecks() {
1510 if (!IsTypeInferenceEnabled()) {
1514 if (!handler
.function()) {
1519 frame
.popRegsAndSync(1);
1521 if (!emitNextIC()) {
1525 size_t nargs
= handler
.function()->nargs();
1527 for (size_t i
= 0; i
< nargs
; i
++) {
1529 frame
.popRegsAndSync(1);
1531 if (!emitNextIC()) {
1540 bool BaselineInterpreterCodeGen::emitArgumentTypeChecks() {
1541 if (!IsTypeInferenceEnabled()) {
1545 Register scratch1
= R1
.scratchReg();
1547 // If the script is not a function, we're done.
1549 masm
.loadPtr(frame
.addressOfCalleeToken(), scratch1
);
1550 masm
.branchTestPtr(Assembler::NonZero
, scratch1
, Imm32(CalleeTokenScriptBit
),
1553 // CalleeToken_Function or CalleeToken_FunctionConstructing.
1554 masm
.andPtr(Imm32(uint32_t(CalleeTokenMask
)), scratch1
);
1556 // The frame's scratch slot is used to store two 32-bit values: nargs (lower
1557 // half) and the argument index (upper half).
1558 masm
.load16ZeroExtend(Address(scratch1
, JSFunction::offsetOfNargs()),
1560 masm
.store32(scratch1
, frame
.addressOfScratchValueLow32());
1562 // Type check |this|.
1563 masm
.loadValue(frame
.addressOfThis(), R0
);
1564 if (!emitNextIC()) {
1567 frame
.bumpInterpreterICEntry();
1569 // Type check arguments. Scratch1 holds the next argument's index.
1570 masm
.move32(Imm32(0), scratch1
);
1575 masm
.branch32(Assembler::Equal
, frame
.addressOfScratchValueLow32(), scratch1
,
1578 // Load the argument, increment argument index and store the index in the
1580 BaseValueIndex
addr(BaselineFrameReg
, scratch1
,
1581 BaselineFrame::offsetOfArg(0));
1582 masm
.loadValue(addr
, R0
);
1583 masm
.add32(Imm32(1), scratch1
);
1584 masm
.store32(scratch1
, frame
.addressOfScratchValueHigh32());
1586 // Type check the argument.
1587 if (!emitNextIC()) {
1590 frame
.bumpInterpreterICEntry();
1592 // Restore argument index.
1593 masm
.load32(frame
.addressOfScratchValueHigh32(), scratch1
);
1601 bool BaselineCompiler::emitDebugTrap() {
1602 MOZ_ASSERT(compileDebugInstrumentation());
1603 MOZ_ASSERT(frame
.numUnsyncedSlots() == 0);
1605 JSScript
* script
= handler
.script();
1606 bool enabled
= DebugAPI::stepModeEnabled(script
) ||
1607 DebugAPI::hasBreakpointsAt(script
, handler
.pc());
1609 // Emit patchable call to debug trap handler.
1610 JitCode
* handlerCode
= cx
->runtime()->jitRuntime()->debugTrapHandler(
1611 cx
, DebugTrapHandlerKind::Compiler
);
1616 CodeOffset nativeOffset
= masm
.toggledCall(handlerCode
, enabled
);
1618 uint32_t pcOffset
= script
->pcToOffset(handler
.pc());
1619 if (!debugTrapEntries_
.emplaceBack(pcOffset
, nativeOffset
.offset())) {
1620 ReportOutOfMemory(cx
);
1624 // Add a RetAddrEntry for the return offset -> pc mapping.
1625 return handler
.recordCallRetAddr(cx
, RetAddrEntry::Kind::DebugTrap
,
1626 masm
.currentOffset());
1629 #ifdef JS_TRACE_LOGGING
1631 bool BaselineCompilerCodeGen::emitTraceLoggerEnter() {
1632 AllocatableRegisterSet
regs(RegisterSet::Volatile());
1633 Register loggerReg
= regs
.takeAnyGeneral();
1634 Register scriptReg
= regs
.takeAnyGeneral();
1636 Label noTraceLogger
;
1637 if (!traceLoggerToggleOffsets_
.append(masm
.toggledJump(&noTraceLogger
))) {
1641 masm
.Push(loggerReg
);
1642 masm
.Push(scriptReg
);
1644 masm
.loadTraceLogger(loggerReg
);
1647 masm
.movePtr(ImmPtr(handler
.script()->jitScript()), scriptReg
);
1648 masm
.loadPtr(Address(scriptReg
, JitScript::offsetOfBaselineScript()),
1650 Address
scriptEvent(scriptReg
,
1651 BaselineScript::offsetOfTraceLoggerScriptEvent());
1652 masm
.computeEffectiveAddress(scriptEvent
, scriptReg
);
1653 masm
.tracelogStartEvent(loggerReg
, scriptReg
);
1656 masm
.tracelogStartId(loggerReg
, TraceLogger_Baseline
, /* force = */ true);
1658 masm
.Pop(scriptReg
);
1659 masm
.Pop(loggerReg
);
1661 masm
.bind(&noTraceLogger
);
1667 bool BaselineInterpreterCodeGen::emitTraceLoggerEnter() {
1668 if (JS::TraceLoggerSupported()) {
1669 MOZ_CRASH("NYI: interpreter emitTraceLoggerEnter");
1674 template <typename Handler
>
1675 bool BaselineCodeGen
<Handler
>::emitTraceLoggerExit() {
1676 AllocatableRegisterSet
regs(RegisterSet::Volatile());
1677 Register loggerReg
= regs
.takeAnyGeneral();
1679 Label noTraceLogger
;
1680 if (!traceLoggerToggleOffsets_
.append(masm
.toggledJump(&noTraceLogger
))) {
1684 masm
.Push(loggerReg
);
1685 masm
.loadTraceLogger(loggerReg
);
1687 masm
.tracelogStopId(loggerReg
, TraceLogger_Baseline
, /* force = */ true);
1688 masm
.tracelogStopId(loggerReg
, TraceLogger_Scripts
, /* force = */ true);
1690 masm
.Pop(loggerReg
);
1692 masm
.bind(&noTraceLogger
);
1697 template <typename Handler
>
1698 bool BaselineCodeGen
<Handler
>::emitTraceLoggerResume(
1699 Register baselineScript
, AllocatableGeneralRegisterSet
& regs
) {
1700 Register scriptId
= regs
.takeAny();
1701 Register loggerReg
= regs
.takeAny();
1703 Label noTraceLogger
;
1704 if (!traceLoggerToggleOffsets_
.append(masm
.toggledJump(&noTraceLogger
))) {
1708 masm
.loadTraceLogger(loggerReg
);
1710 Address
scriptEvent(baselineScript
,
1711 BaselineScript::offsetOfTraceLoggerScriptEvent());
1712 masm
.computeEffectiveAddress(scriptEvent
, scriptId
);
1713 masm
.tracelogStartEvent(loggerReg
, scriptId
);
1714 masm
.tracelogStartId(loggerReg
, TraceLogger_Baseline
, /* force = */ true);
1716 regs
.add(loggerReg
);
1719 masm
.bind(&noTraceLogger
);
1725 template <typename Handler
>
1726 void BaselineCodeGen
<Handler
>::emitProfilerEnterFrame() {
1727 // Store stack position to lastProfilingFrame variable, guarded by a toggled
1728 // jump. Starts off initially disabled.
1730 CodeOffset toggleOffset
= masm
.toggledJump(&noInstrument
);
1731 masm
.profilerEnterFrame(masm
.getStackPointer(), R0
.scratchReg());
1732 masm
.bind(&noInstrument
);
1734 // Store the start offset in the appropriate location.
1735 MOZ_ASSERT(!profilerEnterFrameToggleOffset_
.bound());
1736 profilerEnterFrameToggleOffset_
= toggleOffset
;
1739 template <typename Handler
>
1740 void BaselineCodeGen
<Handler
>::emitProfilerExitFrame() {
1741 // Store previous frame to lastProfilingFrame variable, guarded by a toggled
1742 // jump. Starts off initially disabled.
1744 CodeOffset toggleOffset
= masm
.toggledJump(&noInstrument
);
1745 masm
.profilerExitFrame();
1746 masm
.bind(&noInstrument
);
1748 // Store the start offset in the appropriate location.
1749 MOZ_ASSERT(!profilerExitFrameToggleOffset_
.bound());
1750 profilerExitFrameToggleOffset_
= toggleOffset
;
1753 template <typename Handler
>
1754 bool BaselineCodeGen
<Handler
>::emit_Nop() {
1758 template <typename Handler
>
1759 bool BaselineCodeGen
<Handler
>::emit_IterNext() {
1763 template <typename Handler
>
1764 bool BaselineCodeGen
<Handler
>::emit_NopDestructuring() {
1768 template <typename Handler
>
1769 bool BaselineCodeGen
<Handler
>::emit_TryDestructuring() {
1773 template <typename Handler
>
1774 bool BaselineCodeGen
<Handler
>::emit_Pop() {
1780 bool BaselineCompilerCodeGen::emit_PopN() {
1781 frame
.popn(GET_UINT16(handler
.pc()));
1786 bool BaselineInterpreterCodeGen::emit_PopN() {
1787 LoadUint16Operand(masm
, R0
.scratchReg());
1788 frame
.popn(R0
.scratchReg());
1793 bool BaselineCompilerCodeGen::emit_DupAt() {
1796 // DupAt takes a value on the stack and re-pushes it on top. It's like
1797 // GetLocal but it addresses from the top of the stack instead of from the
1800 int depth
= -(GET_UINT24(handler
.pc()) + 1);
1801 masm
.loadValue(frame
.addressOfStackValue(depth
), R0
);
1807 bool BaselineInterpreterCodeGen::emit_DupAt() {
1808 LoadUint24Operand(masm
, 0, R0
.scratchReg());
1809 masm
.loadValue(frame
.addressOfStackValue(R0
.scratchReg()), R0
);
1814 template <typename Handler
>
1815 bool BaselineCodeGen
<Handler
>::emit_Dup() {
1816 // Keep top stack value in R0, sync the rest so that we can use R1. We use
1817 // separate registers because every register can be used by at most one
1819 frame
.popRegsAndSync(1);
1820 masm
.moveValue(R0
, R1
);
1822 // inc/dec ops use Dup followed by Inc/Dec. Push R0 last to avoid a move.
1828 template <typename Handler
>
1829 bool BaselineCodeGen
<Handler
>::emit_Dup2() {
1832 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
1833 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
1840 template <typename Handler
>
1841 bool BaselineCodeGen
<Handler
>::emit_Swap() {
1842 // Keep top stack values in R0 and R1.
1843 frame
.popRegsAndSync(2);
1851 bool BaselineCompilerCodeGen::emit_Pick() {
1854 // Pick takes a value on the stack and moves it to the top.
1855 // For instance, pick 2:
1856 // before: A B C D E
1857 // after : A B D E C
1859 // First, move value at -(amount + 1) into R0.
1860 int32_t depth
= -(GET_INT8(handler
.pc()) + 1);
1861 masm
.loadValue(frame
.addressOfStackValue(depth
), R0
);
1863 // Move the other values down.
1865 for (; depth
< 0; depth
++) {
1866 Address source
= frame
.addressOfStackValue(depth
);
1867 Address dest
= frame
.addressOfStackValue(depth
- 1);
1868 masm
.loadValue(source
, R1
);
1869 masm
.storeValue(R1
, dest
);
1879 bool BaselineInterpreterCodeGen::emit_Pick() {
1880 // First, move the value to move up into R0.
1881 Register scratch
= R2
.scratchReg();
1882 LoadUint8Operand(masm
, scratch
);
1883 masm
.loadValue(frame
.addressOfStackValue(scratch
), R0
);
1885 // Move the other values down.
1888 masm
.branchSub32(Assembler::Signed
, Imm32(1), scratch
, &done
);
1890 masm
.loadValue(frame
.addressOfStackValue(scratch
), R1
);
1891 masm
.storeValue(R1
, frame
.addressOfStackValue(scratch
, sizeof(Value
)));
1897 // Replace value on top of the stack with R0.
1898 masm
.storeValue(R0
, frame
.addressOfStackValue(-1));
1903 bool BaselineCompilerCodeGen::emit_Unpick() {
1906 // Pick takes the top of the stack value and moves it under the nth value.
1907 // For instance, unpick 2:
1908 // before: A B C D E
1909 // after : A B E C D
1911 // First, move value at -1 into R0.
1912 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
1914 MOZ_ASSERT(GET_INT8(handler
.pc()) > 0,
1915 "Interpreter code assumes JSOp::Unpick operand > 0");
1917 // Move the other values up.
1918 int32_t depth
= -(GET_INT8(handler
.pc()) + 1);
1919 for (int32_t i
= -1; i
> depth
; i
--) {
1920 Address source
= frame
.addressOfStackValue(i
- 1);
1921 Address dest
= frame
.addressOfStackValue(i
);
1922 masm
.loadValue(source
, R1
);
1923 masm
.storeValue(R1
, dest
);
1926 // Store R0 under the nth value.
1927 Address dest
= frame
.addressOfStackValue(depth
);
1928 masm
.storeValue(R0
, dest
);
1933 bool BaselineInterpreterCodeGen::emit_Unpick() {
1934 Register scratch
= R2
.scratchReg();
1935 LoadUint8Operand(masm
, scratch
);
1937 // Move the top value into R0.
1938 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
1940 // Overwrite the nth stack value with R0 but first save the old value in R1.
1941 masm
.loadValue(frame
.addressOfStackValue(scratch
), R1
);
1942 masm
.storeValue(R0
, frame
.addressOfStackValue(scratch
));
1944 // Now for each slot x in [n-1, 1] do the following:
1946 // * Store the value in slot x in R0.
1947 // * Store the value in the previous slot (now in R1) in slot x.
1951 // Assert the operand > 0 so the branchSub32 below doesn't "underflow" to
1955 masm
.branch32(Assembler::GreaterThan
, scratch
, Imm32(0), &ok
);
1956 masm
.assumeUnreachable("JSOp::Unpick with operand <= 0?");
1963 masm
.branchSub32(Assembler::Zero
, Imm32(1), scratch
, &done
);
1965 // Overwrite stack slot x with slot x + 1, saving the old value in R1.
1966 masm
.loadValue(frame
.addressOfStackValue(scratch
), R0
);
1967 masm
.storeValue(R1
, frame
.addressOfStackValue(scratch
));
1968 masm
.moveValue(R0
, R1
);
1972 // Finally, replace the value on top of the stack (slot 0) with R1. This is
1973 // the value that used to be in slot 1.
1975 masm
.storeValue(R1
, frame
.addressOfStackValue(-1));
1980 void BaselineCompilerCodeGen::emitJump() {
1981 jsbytecode
* pc
= handler
.pc();
1982 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc
)));
1983 frame
.assertSyncedStack();
1985 jsbytecode
* target
= pc
+ GET_JUMP_OFFSET(pc
);
1986 masm
.jump(handler
.labelOf(target
));
1990 void BaselineInterpreterCodeGen::emitJump() {
1991 // We have to add the current pc's jump offset to the current pc. We can use
1992 // R0 and R1 as scratch because we jump to the "next op" label so these
1993 // registers aren't in use at this point.
1994 Register scratch1
= R0
.scratchReg();
1995 Register scratch2
= R1
.scratchReg();
1996 Register pc
= LoadBytecodePC(masm
, scratch1
);
1997 LoadInt32OperandSignExtendToPtr(masm
, pc
, scratch2
);
1998 if (HasInterpreterPCReg()) {
1999 masm
.addPtr(scratch2
, InterpreterPCReg
);
2001 masm
.addPtr(pc
, scratch2
);
2002 masm
.storePtr(scratch2
, frame
.addressOfInterpreterPC());
2004 masm
.jump(handler
.interpretOpWithPCRegLabel());
2008 void BaselineCompilerCodeGen::emitTestBooleanTruthy(bool branchIfTrue
,
2010 jsbytecode
* pc
= handler
.pc();
2011 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc
)));
2012 frame
.assertSyncedStack();
2014 jsbytecode
* target
= pc
+ GET_JUMP_OFFSET(pc
);
2015 masm
.branchTestBooleanTruthy(branchIfTrue
, val
, handler
.labelOf(target
));
2019 void BaselineInterpreterCodeGen::emitTestBooleanTruthy(bool branchIfTrue
,
2022 masm
.branchTestBooleanTruthy(!branchIfTrue
, val
, &done
);
2028 template <typename F1
, typename F2
>
2029 MOZ_MUST_USE
bool BaselineCompilerCodeGen::emitTestScriptFlag(
2030 JSScript::ImmutableFlags flag
, const F1
& ifSet
, const F2
& ifNotSet
,
2032 return handler
.script()->hasFlag(flag
) ? ifSet() : ifNotSet();
2036 template <typename F1
, typename F2
>
2037 MOZ_MUST_USE
bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2038 JSScript::ImmutableFlags flag
, const F1
& ifSet
, const F2
& ifNotSet
,
2040 Label flagNotSet
, done
;
2041 loadScript(scratch
);
2042 masm
.branchTest32(Assembler::Zero
,
2043 Address(scratch
, JSScript::offsetOfImmutableFlags()),
2044 Imm32(uint32_t(flag
)), &flagNotSet
);
2051 masm
.bind(&flagNotSet
);
2063 template <typename F
>
2064 MOZ_MUST_USE
bool BaselineCompilerCodeGen::emitTestScriptFlag(
2065 JSScript::ImmutableFlags flag
, bool value
, const F
& emit
,
2067 if (handler
.script()->hasFlag(flag
) == value
) {
2074 template <typename F
>
2075 MOZ_MUST_USE
bool BaselineCompilerCodeGen::emitTestScriptFlag(
2076 JSScript::MutableFlags flag
, bool value
, const F
& emit
, Register scratch
) {
2077 if (handler
.script()->hasFlag(flag
) == value
) {
2084 template <typename F
>
2085 MOZ_MUST_USE
bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2086 JSScript::ImmutableFlags flag
, bool value
, const F
& emit
,
2089 loadScript(scratch
);
2090 masm
.branchTest32(value
? Assembler::Zero
: Assembler::NonZero
,
2091 Address(scratch
, JSScript::offsetOfImmutableFlags()),
2092 Imm32(uint32_t(flag
)), &done
);
2104 template <typename F
>
2105 MOZ_MUST_USE
bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2106 JSScript::MutableFlags flag
, bool value
, const F
& emit
, Register scratch
) {
2108 loadScript(scratch
);
2109 masm
.branchTest32(value
? Assembler::Zero
: Assembler::NonZero
,
2110 Address(scratch
, JSScript::offsetOfMutableFlags()),
2111 Imm32(uint32_t(flag
)), &done
);
2122 template <typename Handler
>
2123 bool BaselineCodeGen
<Handler
>::emit_Goto() {
2129 template <typename Handler
>
2130 bool BaselineCodeGen
<Handler
>::emitToBoolean() {
2132 masm
.branchTestBoolean(Assembler::Equal
, R0
, &skipIC
);
2135 if (!emitNextIC()) {
2143 template <typename Handler
>
2144 bool BaselineCodeGen
<Handler
>::emitTest(bool branchIfTrue
) {
2145 bool knownBoolean
= frame
.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN
);
2147 // Keep top stack value in R0.
2148 frame
.popRegsAndSync(1);
2150 if (!knownBoolean
&& !emitToBoolean()) {
2154 // IC will leave a BooleanValue in R0, just need to branch on it.
2155 emitTestBooleanTruthy(branchIfTrue
, R0
);
2159 template <typename Handler
>
2160 bool BaselineCodeGen
<Handler
>::emit_IfEq() {
2161 return emitTest(false);
2164 template <typename Handler
>
2165 bool BaselineCodeGen
<Handler
>::emit_IfNe() {
2166 return emitTest(true);
2169 template <typename Handler
>
2170 bool BaselineCodeGen
<Handler
>::emitAndOr(bool branchIfTrue
) {
2171 bool knownBoolean
= frame
.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN
);
2173 // And and Or leave the original value on the stack.
2176 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2177 if (!knownBoolean
&& !emitToBoolean()) {
2181 emitTestBooleanTruthy(branchIfTrue
, R0
);
2185 template <typename Handler
>
2186 bool BaselineCodeGen
<Handler
>::emit_And() {
2187 return emitAndOr(false);
2190 template <typename Handler
>
2191 bool BaselineCodeGen
<Handler
>::emit_Or() {
2192 return emitAndOr(true);
2195 template <typename Handler
>
2196 bool BaselineCodeGen
<Handler
>::emit_Coalesce() {
2197 // Coalesce leaves the original value on the stack.
2200 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2202 Label undefinedOrNull
;
2204 masm
.branchTestUndefined(Assembler::Equal
, R0
, &undefinedOrNull
);
2205 masm
.branchTestNull(Assembler::Equal
, R0
, &undefinedOrNull
);
2208 masm
.bind(&undefinedOrNull
);
2213 template <typename Handler
>
2214 bool BaselineCodeGen
<Handler
>::emit_Not() {
2215 bool knownBoolean
= frame
.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN
);
2217 // Keep top stack value in R0.
2218 frame
.popRegsAndSync(1);
2220 if (!knownBoolean
&& !emitToBoolean()) {
2224 masm
.notBoolean(R0
);
2226 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
2230 template <typename Handler
>
2231 bool BaselineCodeGen
<Handler
>::emit_Pos() {
2232 return emitUnaryArith();
2235 template <typename Handler
>
2236 bool BaselineCodeGen
<Handler
>::emit_ToNumeric() {
2237 return emitUnaryArith();
2240 template <typename Handler
>
2241 bool BaselineCodeGen
<Handler
>::emit_LoopHead() {
2242 if (!emit_JumpTarget()) {
2245 if (!emitInterruptCheck()) {
2248 if (!emitWarmUpCounterIncrement()) {
2254 template <typename Handler
>
2255 bool BaselineCodeGen
<Handler
>::emit_Void() {
2257 frame
.push(UndefinedValue());
2261 template <typename Handler
>
2262 bool BaselineCodeGen
<Handler
>::emit_Undefined() {
2263 // If this ever changes, change what JSOp::GImplicitThis does too.
2264 frame
.push(UndefinedValue());
2268 template <typename Handler
>
2269 bool BaselineCodeGen
<Handler
>::emit_Hole() {
2270 frame
.push(MagicValue(JS_ELEMENTS_HOLE
));
2274 template <typename Handler
>
2275 bool BaselineCodeGen
<Handler
>::emit_Null() {
2276 frame
.push(NullValue());
2280 template <typename Handler
>
2281 bool BaselineCodeGen
<Handler
>::emit_CheckIsObj() {
2283 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2286 masm
.branchTestObject(Assembler::Equal
, R0
, &ok
);
2290 pushUint8BytecodeOperandArg(R0
.scratchReg());
2292 using Fn
= bool (*)(JSContext
*, CheckIsObjectKind
);
2293 if (!callVM
<Fn
, ThrowCheckIsObject
>()) {
2301 template <typename Handler
>
2302 bool BaselineCodeGen
<Handler
>::emit_CheckThis() {
2304 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2306 return emitCheckThis(R0
);
2309 template <typename Handler
>
2310 bool BaselineCodeGen
<Handler
>::emit_CheckThisReinit() {
2312 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
2314 return emitCheckThis(R0
, /* reinit = */ true);
2317 template <typename Handler
>
2318 bool BaselineCodeGen
<Handler
>::emitCheckThis(ValueOperand val
, bool reinit
) {
2321 masm
.branchTestMagic(Assembler::Equal
, val
, &thisOK
);
2323 masm
.branchTestMagic(Assembler::NotEqual
, val
, &thisOK
);
2329 using Fn
= bool (*)(JSContext
*);
2330 if (!callVM
<Fn
, ThrowInitializedThis
>()) {
2334 using Fn
= bool (*)(JSContext
*);
2335 if (!callVM
<Fn
, ThrowUninitializedThis
>()) {
2344 template <typename Handler
>
2345 bool BaselineCodeGen
<Handler
>::emit_CheckReturn() {
2346 MOZ_ASSERT_IF(handler
.maybeScript(),
2347 handler
.maybeScript()->isDerivedClassConstructor());
2349 // Load |this| in R0, return value in R1.
2350 frame
.popRegsAndSync(1);
2351 emitLoadReturnValue(R1
);
2353 Label done
, returnOK
;
2354 masm
.branchTestObject(Assembler::Equal
, R1
, &done
);
2355 masm
.branchTestUndefined(Assembler::Equal
, R1
, &returnOK
);
2360 using Fn
= bool (*)(JSContext
*, HandleValue
);
2361 if (!callVM
<Fn
, ThrowBadDerivedReturn
>()) {
2364 masm
.assumeUnreachable("Should throw on bad derived constructor return");
2366 masm
.bind(&returnOK
);
2368 if (!emitCheckThis(R0
)) {
2372 // Store |this| in the return value slot.
2373 masm
.storeValue(R0
, frame
.addressOfReturnValue());
2374 masm
.or32(Imm32(BaselineFrame::HAS_RVAL
), frame
.addressOfFlags());
2380 template <typename Handler
>
2381 bool BaselineCodeGen
<Handler
>::emit_FunctionThis() {
2382 MOZ_ASSERT_IF(handler
.maybeFunction(), !handler
.maybeFunction()->isArrow());
2386 auto boxThis
= [this]() {
2387 // Load |thisv| in R0. Skip the call if it's already an object.
2389 frame
.popRegsAndSync(1);
2390 masm
.branchTestObject(Assembler::Equal
, R0
, &skipCall
);
2393 masm
.loadBaselineFramePtr(BaselineFrameReg
, R1
.scratchReg());
2395 pushArg(R1
.scratchReg());
2397 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, MutableHandleValue
);
2398 if (!callVM
<Fn
, BaselineGetFunctionThis
>()) {
2402 masm
.bind(&skipCall
);
2407 // In strict mode code, |this| is left alone.
2408 return emitTestScriptFlag(JSScript::ImmutableFlags::Strict
, false, boxThis
,
2412 template <typename Handler
>
2413 bool BaselineCodeGen
<Handler
>::emit_GlobalThis() {
2416 auto getNonSyntacticThis
= [this]() {
2419 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
2420 pushArg(R0
.scratchReg());
2422 using Fn
= void (*)(JSContext
*, HandleObject
, MutableHandleValue
);
2423 if (!callVM
<Fn
, GetNonSyntacticGlobalThis
>()) {
2430 auto getGlobalThis
= [this]() {
2431 loadGlobalThisValue(R0
);
2435 return emitTestScriptFlag(JSScript::ImmutableFlags::HasNonSyntacticScope
,
2436 getNonSyntacticThis
, getGlobalThis
,
2440 template <typename Handler
>
2441 bool BaselineCodeGen
<Handler
>::emit_True() {
2442 frame
.push(BooleanValue(true));
2446 template <typename Handler
>
2447 bool BaselineCodeGen
<Handler
>::emit_False() {
2448 frame
.push(BooleanValue(false));
2452 template <typename Handler
>
2453 bool BaselineCodeGen
<Handler
>::emit_Zero() {
2454 frame
.push(Int32Value(0));
2458 template <typename Handler
>
2459 bool BaselineCodeGen
<Handler
>::emit_One() {
2460 frame
.push(Int32Value(1));
2465 bool BaselineCompilerCodeGen::emit_Int8() {
2466 frame
.push(Int32Value(GET_INT8(handler
.pc())));
2471 bool BaselineInterpreterCodeGen::emit_Int8() {
2472 LoadInt8Operand(masm
, R0
.scratchReg());
2473 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
2479 bool BaselineCompilerCodeGen::emit_Int32() {
2480 frame
.push(Int32Value(GET_INT32(handler
.pc())));
2485 bool BaselineInterpreterCodeGen::emit_Int32() {
2486 LoadInt32Operand(masm
, R0
.scratchReg());
2487 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
2493 bool BaselineCompilerCodeGen::emit_Uint16() {
2494 frame
.push(Int32Value(GET_UINT16(handler
.pc())));
2499 bool BaselineInterpreterCodeGen::emit_Uint16() {
2500 LoadUint16Operand(masm
, R0
.scratchReg());
2501 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
2507 bool BaselineCompilerCodeGen::emit_Uint24() {
2508 frame
.push(Int32Value(GET_UINT24(handler
.pc())));
2513 bool BaselineInterpreterCodeGen::emit_Uint24() {
2514 LoadUint24Operand(masm
, 0, R0
.scratchReg());
2515 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
2520 template <typename Handler
>
2521 bool BaselineCodeGen
<Handler
>::emit_ResumeIndex() {
2522 return emit_Uint24();
2526 bool BaselineCompilerCodeGen::emit_Double() {
2527 frame
.push(GET_INLINE_VALUE(handler
.pc()));
2532 bool BaselineInterpreterCodeGen::emit_Double() {
2533 LoadInlineValueOperand(masm
, R0
);
2539 bool BaselineCompilerCodeGen::emit_BigInt() {
2540 BigInt
* bi
= handler
.script()->getBigInt(handler
.pc());
2541 frame
.push(BigIntValue(bi
));
2546 bool BaselineInterpreterCodeGen::emit_BigInt() {
2547 Register scratch1
= R0
.scratchReg();
2548 Register scratch2
= R1
.scratchReg();
2549 loadScriptGCThing(ScriptGCThingType::BigInt
, scratch1
, scratch2
);
2550 masm
.tagValue(JSVAL_TYPE_BIGINT
, scratch1
, R0
);
2556 bool BaselineCompilerCodeGen::emit_String() {
2557 frame
.push(StringValue(handler
.script()->getAtom(handler
.pc())));
2562 bool BaselineInterpreterCodeGen::emit_String() {
2563 Register scratch1
= R0
.scratchReg();
2564 Register scratch2
= R1
.scratchReg();
2565 loadScriptGCThing(ScriptGCThingType::Atom
, scratch1
, scratch2
);
2566 masm
.tagValue(JSVAL_TYPE_STRING
, scratch1
, R0
);
2572 bool BaselineCompilerCodeGen::emit_Symbol() {
2573 unsigned which
= GET_UINT8(handler
.pc());
2574 JS::Symbol
* sym
= cx
->runtime()->wellKnownSymbols
->get(which
);
2575 frame
.push(SymbolValue(sym
));
2580 bool BaselineInterpreterCodeGen::emit_Symbol() {
2581 Register scratch1
= R0
.scratchReg();
2582 Register scratch2
= R1
.scratchReg();
2583 LoadUint8Operand(masm
, scratch1
);
2585 masm
.movePtr(ImmPtr(cx
->runtime()->wellKnownSymbols
), scratch2
);
2586 masm
.loadPtr(BaseIndex(scratch2
, scratch1
, ScalePointer
), scratch1
);
2588 masm
.tagValue(JSVAL_TYPE_SYMBOL
, scratch1
, R0
);
2593 JSObject
* BaselineCompilerHandler::maybeNoCloneSingletonObject() {
2594 Realm
* realm
= script()->realm();
2595 if (realm
->creationOptions().cloneSingletons()) {
2599 realm
->behaviors().setSingletonsAsValues();
2600 return script()->getObject(pc());
2603 template <typename Handler
>
2604 bool BaselineCodeGen
<Handler
>::emit_Object() {
2605 // If we know we don't have to clone the object literal, just push it
2606 // directly. Note that the interpreter always does the VM call; that's fine
2607 // because this op is only used in run-once code.
2608 if (JSObject
* obj
= handler
.maybeNoCloneSingletonObject()) {
2609 frame
.push(ObjectValue(*obj
));
2615 pushBytecodePCArg();
2618 using Fn
= JSObject
* (*)(JSContext
*, HandleScript
, jsbytecode
*);
2619 if (!callVM
<Fn
, SingletonObjectLiteralOperation
>()) {
2623 // Box and push return value.
2624 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
2630 bool BaselineCompilerCodeGen::emit_CallSiteObj() {
2631 RootedScript
script(cx
, handler
.script());
2632 JSObject
* cso
= ProcessCallSiteObjOperation(cx
, script
, handler
.pc());
2637 frame
.push(ObjectValue(*cso
));
2642 bool BaselineInterpreterCodeGen::emit_CallSiteObj() {
2645 pushBytecodePCArg();
2648 using Fn
= ArrayObject
* (*)(JSContext
*, HandleScript
, jsbytecode
*);
2649 if (!callVM
<Fn
, ProcessCallSiteObjOperation
>()) {
2653 // Box and push return value.
2654 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
2659 template <typename Handler
>
2660 bool BaselineCodeGen
<Handler
>::emit_RegExp() {
2662 pushScriptGCThingArg(ScriptGCThingType::RegExp
, R0
.scratchReg(),
2665 using Fn
= JSObject
* (*)(JSContext
*, Handle
<RegExpObject
*>);
2666 if (!callVM
<Fn
, CloneRegExpObject
>()) {
2670 // Box and push return value.
2671 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
2676 template <typename Handler
>
2677 bool BaselineCodeGen
<Handler
>::emit_Lambda() {
2679 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
2681 pushArg(R0
.scratchReg());
2682 pushScriptGCThingArg(ScriptGCThingType::Function
, R0
.scratchReg(),
2685 using Fn
= JSObject
* (*)(JSContext
*, HandleFunction
, HandleObject
);
2686 if (!callVM
<Fn
, js::Lambda
>()) {
2690 // Box and push return value.
2691 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
2696 template <typename Handler
>
2697 bool BaselineCodeGen
<Handler
>::emit_LambdaArrow() {
2698 // Keep pushed newTarget in R0.
2699 frame
.popRegsAndSync(1);
2702 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R2
.scratchReg());
2705 pushArg(R2
.scratchReg());
2706 pushScriptGCThingArg(ScriptGCThingType::Function
, R0
.scratchReg(),
2710 JSObject
* (*)(JSContext
*, HandleFunction
, HandleObject
, HandleValue
);
2711 if (!callVM
<Fn
, js::LambdaArrow
>()) {
2715 // Box and push return value.
2716 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
2721 template <typename Handler
>
2722 bool BaselineCodeGen
<Handler
>::emit_SetFunName() {
2723 frame
.popRegsAndSync(2);
2728 masm
.unboxObject(R0
, R0
.scratchReg());
2732 pushUint8BytecodeOperandArg(R2
.scratchReg());
2734 pushArg(R0
.scratchReg());
2737 bool (*)(JSContext
*, HandleFunction
, HandleValue
, FunctionPrefixKind
);
2738 return callVM
<Fn
, SetFunctionName
>();
2741 template <typename Handler
>
2742 bool BaselineCodeGen
<Handler
>::emit_BitOr() {
2743 return emitBinaryArith();
2746 template <typename Handler
>
2747 bool BaselineCodeGen
<Handler
>::emit_BitXor() {
2748 return emitBinaryArith();
2751 template <typename Handler
>
2752 bool BaselineCodeGen
<Handler
>::emit_BitAnd() {
2753 return emitBinaryArith();
2756 template <typename Handler
>
2757 bool BaselineCodeGen
<Handler
>::emit_Lsh() {
2758 return emitBinaryArith();
2761 template <typename Handler
>
2762 bool BaselineCodeGen
<Handler
>::emit_Rsh() {
2763 return emitBinaryArith();
2766 template <typename Handler
>
2767 bool BaselineCodeGen
<Handler
>::emit_Ursh() {
2768 return emitBinaryArith();
2771 template <typename Handler
>
2772 bool BaselineCodeGen
<Handler
>::emit_Add() {
2773 return emitBinaryArith();
2776 template <typename Handler
>
2777 bool BaselineCodeGen
<Handler
>::emit_Sub() {
2778 return emitBinaryArith();
2781 template <typename Handler
>
2782 bool BaselineCodeGen
<Handler
>::emit_Mul() {
2783 return emitBinaryArith();
2786 template <typename Handler
>
2787 bool BaselineCodeGen
<Handler
>::emit_Div() {
2788 return emitBinaryArith();
2791 template <typename Handler
>
2792 bool BaselineCodeGen
<Handler
>::emit_Mod() {
2793 return emitBinaryArith();
2796 template <typename Handler
>
2797 bool BaselineCodeGen
<Handler
>::emit_Pow() {
2798 return emitBinaryArith();
2801 template <typename Handler
>
2802 bool BaselineCodeGen
<Handler
>::emitBinaryArith() {
2803 // Keep top JSStack value in R0 and R2
2804 frame
.popRegsAndSync(2);
2807 if (!emitNextIC()) {
2811 // Mark R0 as pushed stack value.
2816 template <typename Handler
>
2817 bool BaselineCodeGen
<Handler
>::emitUnaryArith() {
2818 // Keep top stack value in R0.
2819 frame
.popRegsAndSync(1);
2822 if (!emitNextIC()) {
2826 // Mark R0 as pushed stack value.
2831 template <typename Handler
>
2832 bool BaselineCodeGen
<Handler
>::emit_BitNot() {
2833 return emitUnaryArith();
2836 template <typename Handler
>
2837 bool BaselineCodeGen
<Handler
>::emit_Neg() {
2838 return emitUnaryArith();
2841 template <typename Handler
>
2842 bool BaselineCodeGen
<Handler
>::emit_Inc() {
2843 return emitUnaryArith();
2846 template <typename Handler
>
2847 bool BaselineCodeGen
<Handler
>::emit_Dec() {
2848 return emitUnaryArith();
2851 template <typename Handler
>
2852 bool BaselineCodeGen
<Handler
>::emit_Lt() {
2853 return emitCompare();
2856 template <typename Handler
>
2857 bool BaselineCodeGen
<Handler
>::emit_Le() {
2858 return emitCompare();
2861 template <typename Handler
>
2862 bool BaselineCodeGen
<Handler
>::emit_Gt() {
2863 return emitCompare();
2866 template <typename Handler
>
2867 bool BaselineCodeGen
<Handler
>::emit_Ge() {
2868 return emitCompare();
2871 template <typename Handler
>
2872 bool BaselineCodeGen
<Handler
>::emit_Eq() {
2873 return emitCompare();
2876 template <typename Handler
>
2877 bool BaselineCodeGen
<Handler
>::emit_Ne() {
2878 return emitCompare();
2881 template <typename Handler
>
2882 bool BaselineCodeGen
<Handler
>::emitCompare() {
2885 // Keep top JSStack value in R0 and R1.
2886 frame
.popRegsAndSync(2);
2889 if (!emitNextIC()) {
2893 // Mark R0 as pushed stack value.
2894 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
2898 template <typename Handler
>
2899 bool BaselineCodeGen
<Handler
>::emit_StrictEq() {
2900 return emitCompare();
2903 template <typename Handler
>
2904 bool BaselineCodeGen
<Handler
>::emit_StrictNe() {
2905 return emitCompare();
2908 template <typename Handler
>
2909 bool BaselineCodeGen
<Handler
>::emit_Case() {
2910 frame
.popRegsAndSync(1);
2913 masm
.branchTestBooleanTruthy(/* branchIfTrue */ false, R0
, &done
);
2915 // Pop the switch value if the case matches.
2916 masm
.addToStackPtr(Imm32(sizeof(Value
)));
2923 template <typename Handler
>
2924 bool BaselineCodeGen
<Handler
>::emit_Default() {
2929 template <typename Handler
>
2930 bool BaselineCodeGen
<Handler
>::emit_Lineno() {
2934 template <typename Handler
>
2935 bool BaselineCodeGen
<Handler
>::emit_NewArray() {
2938 // Pass length in R0.
2939 loadInt32LengthBytecodeOperand(R0
.scratchReg());
2941 if (!emitNextIC()) {
2950 bool BaselineCompilerCodeGen::emit_NewArrayCopyOnWrite() {
2951 // This is like the interpreter implementation, but we can call
2952 // getOrFixupCopyOnWriteObject at compile-time.
2954 RootedScript
scriptRoot(cx
, handler
.script());
2956 ObjectGroup::getOrFixupCopyOnWriteObject(cx
, scriptRoot
, handler
.pc());
2963 pushArg(ImmGCPtr(obj
));
2965 using Fn
= ArrayObject
* (*)(JSContext
*, HandleArrayObject
);
2966 if (!callVM
<Fn
, js::NewDenseCopyOnWriteArray
>()) {
2970 // Box and push return value.
2971 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
2977 bool BaselineInterpreterCodeGen::emit_NewArrayCopyOnWrite() {
2980 pushBytecodePCArg();
2983 using Fn
= ArrayObject
* (*)(JSContext
*, HandleScript
, jsbytecode
*);
2984 if (!callVM
<Fn
, NewArrayCopyOnWriteOperation
>()) {
2988 // Box and push return value.
2989 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
2994 template <typename Handler
>
2995 bool BaselineCodeGen
<Handler
>::emit_InitElemArray() {
2996 // Keep the object and rhs on the stack.
2999 // Load object in R0, index in R1.
3000 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3001 loadInt32IndexBytecodeOperand(R1
);
3004 if (!emitNextIC()) {
3008 // Pop the rhs, so that the object is on the top of the stack.
3013 template <typename Handler
>
3014 bool BaselineCodeGen
<Handler
>::emit_NewObject() {
3015 return emitNewObject();
3018 template <typename Handler
>
3019 bool BaselineCodeGen
<Handler
>::emit_NewObjectWithGroup() {
3020 return emitNewObject();
3023 template <typename Handler
>
3024 bool BaselineCodeGen
<Handler
>::emit_NewInit() {
3025 return emitNewObject();
3028 template <typename Handler
>
3029 bool BaselineCodeGen
<Handler
>::emitNewObject() {
3032 if (!emitNextIC()) {
3040 template <typename Handler
>
3041 bool BaselineCodeGen
<Handler
>::emit_InitElem() {
3042 // Store RHS in the scratch slot.
3043 frame
.storeStackValue(-1, frame
.addressOfScratchValue(), R2
);
3046 // Keep object and index in R0 and R1.
3047 frame
.popRegsAndSync(2);
3049 // Push the object to store the result of the IC.
3053 // Keep RHS on the stack.
3054 frame
.pushScratchValue();
3057 if (!emitNextIC()) {
3061 // Pop the rhs, so that the object is on the top of the stack.
3066 template <typename Handler
>
3067 bool BaselineCodeGen
<Handler
>::emit_InitHiddenElem() {
3068 return emit_InitElem();
3071 template <typename Handler
>
3072 bool BaselineCodeGen
<Handler
>::emit_InitLockedElem() {
3073 return emit_InitElem();
3076 template <typename Handler
>
3077 bool BaselineCodeGen
<Handler
>::emit_MutateProto() {
3078 // Keep values on the stack for the decompiler.
3081 masm
.unboxObject(frame
.addressOfStackValue(-2), R0
.scratchReg());
3082 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3087 pushArg(R0
.scratchReg());
3089 using Fn
= bool (*)(JSContext
*, HandlePlainObject
, HandleValue
);
3090 if (!callVM
<Fn
, MutatePrototype
>()) {
3098 template <typename Handler
>
3099 bool BaselineCodeGen
<Handler
>::emit_InitProp() {
3100 // Load lhs in R0, rhs in R1.
3102 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3103 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3106 if (!emitNextIC()) {
3110 // Leave the object on the stack.
3115 template <typename Handler
>
3116 bool BaselineCodeGen
<Handler
>::emit_InitLockedProp() {
3117 return emit_InitProp();
3120 template <typename Handler
>
3121 bool BaselineCodeGen
<Handler
>::emit_InitHiddenProp() {
3122 return emit_InitProp();
3125 template <typename Handler
>
3126 bool BaselineCodeGen
<Handler
>::emit_GetElem() {
3127 // Keep top two stack values in R0 and R1.
3128 frame
.popRegsAndSync(2);
3131 if (!emitNextIC()) {
3135 // Mark R0 as pushed stack value.
3140 template <typename Handler
>
3141 bool BaselineCodeGen
<Handler
>::emit_GetElemSuper() {
3142 // Store obj in the scratch slot.
3143 frame
.storeStackValue(-1, frame
.addressOfScratchValue(), R2
);
3146 // Keep receiver and index in R0 and R1.
3147 frame
.popRegsAndSync(2);
3149 // Keep obj on the stack.
3150 frame
.pushScratchValue();
3152 if (!emitNextIC()) {
3161 template <typename Handler
>
3162 bool BaselineCodeGen
<Handler
>::emit_CallElem() {
3163 return emit_GetElem();
3166 template <typename Handler
>
3167 bool BaselineCodeGen
<Handler
>::emit_SetElem() {
3168 // Store RHS in the scratch slot.
3169 frame
.storeStackValue(-1, frame
.addressOfScratchValue(), R2
);
3172 // Keep object and index in R0 and R1.
3173 frame
.popRegsAndSync(2);
3175 // Keep RHS on the stack.
3176 frame
.pushScratchValue();
3179 if (!emitNextIC()) {
3186 template <typename Handler
>
3187 bool BaselineCodeGen
<Handler
>::emit_StrictSetElem() {
3188 return emit_SetElem();
3191 template <typename Handler
>
3192 bool BaselineCodeGen
<Handler
>::emitSetElemSuper(bool strict
) {
3193 // Incoming stack is |receiver, propval, obj, rval|. We need to shuffle
3194 // stack to leave rval when operation is complete.
3196 // Pop rval into R0, then load receiver into R1 and replace with rval.
3197 frame
.popRegsAndSync(1);
3198 masm
.loadValue(frame
.addressOfStackValue(-3), R1
);
3199 masm
.storeValue(R0
, frame
.addressOfStackValue(-3));
3203 pushArg(Imm32(strict
));
3204 pushArg(R1
); // receiver
3205 pushArg(R0
); // rval
3206 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3207 pushArg(R0
); // propval
3208 masm
.unboxObject(frame
.addressOfStackValue(-1), R0
.scratchReg());
3209 pushArg(R0
.scratchReg()); // obj
3211 using Fn
= bool (*)(JSContext
*, HandleObject
, HandleValue
, HandleValue
,
3213 if (!callVM
<Fn
, js::SetObjectElementWithReceiver
>()) {
3221 template <typename Handler
>
3222 bool BaselineCodeGen
<Handler
>::emit_SetElemSuper() {
3223 return emitSetElemSuper(/* strict = */ false);
3226 template <typename Handler
>
3227 bool BaselineCodeGen
<Handler
>::emit_StrictSetElemSuper() {
3228 return emitSetElemSuper(/* strict = */ true);
3231 template <typename Handler
>
3232 bool BaselineCodeGen
<Handler
>::emitDelElem(bool strict
) {
3233 // Keep values on the stack for the decompiler.
3235 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3236 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3243 using Fn
= bool (*)(JSContext
*, HandleValue
, HandleValue
, bool*);
3245 if (!callVM
<Fn
, DelElemOperation
<true>>()) {
3249 if (!callVM
<Fn
, DelElemOperation
<false>>()) {
3254 masm
.boxNonDouble(JSVAL_TYPE_BOOLEAN
, ReturnReg
, R1
);
3260 template <typename Handler
>
3261 bool BaselineCodeGen
<Handler
>::emit_DelElem() {
3262 return emitDelElem(/* strict = */ false);
3265 template <typename Handler
>
3266 bool BaselineCodeGen
<Handler
>::emit_StrictDelElem() {
3267 return emitDelElem(/* strict = */ true);
3270 template <typename Handler
>
3271 bool BaselineCodeGen
<Handler
>::emit_In() {
3272 frame
.popRegsAndSync(2);
3274 if (!emitNextIC()) {
3282 template <typename Handler
>
3283 bool BaselineCodeGen
<Handler
>::emit_HasOwn() {
3284 frame
.popRegsAndSync(2);
3286 if (!emitNextIC()) {
3294 template <typename Handler
>
3295 bool BaselineCodeGen
<Handler
>::emit_CheckPrivateField() {
3296 // Keep key and val on the stack.
3298 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
3299 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3301 if (!emitNextIC()) {
3310 bool BaselineCompilerCodeGen::tryOptimizeGetGlobalName() {
3311 PropertyName
* name
= handler
.script()->getName(handler
.pc());
3313 // These names are non-configurable on the global and cannot be shadowed.
3314 if (name
== cx
->names().undefined
) {
3315 frame
.push(UndefinedValue());
3318 if (name
== cx
->names().NaN
) {
3319 frame
.push(JS::NaNValue());
3322 if (name
== cx
->names().Infinity
) {
3323 frame
.push(JS::InfinityValue());
3331 bool BaselineInterpreterCodeGen::tryOptimizeGetGlobalName() {
3332 // Interpreter doesn't optimize simple GETGNAMEs.
3336 template <typename Handler
>
3337 bool BaselineCodeGen
<Handler
>::emit_GetGName() {
3338 auto getName
= [this]() { return emit_GetName(); };
3340 auto getGlobalName
= [this]() {
3341 if (tryOptimizeGetGlobalName()) {
3347 loadGlobalLexicalEnvironment(R0
.scratchReg());
3350 if (!emitNextIC()) {
3354 // Mark R0 as pushed stack value.
3358 return emitTestScriptFlag(JSScript::ImmutableFlags::HasNonSyntacticScope
,
3359 getName
, getGlobalName
, R2
.scratchReg());
3363 bool BaselineCompilerCodeGen::tryOptimizeBindGlobalName() {
3364 JSScript
* script
= handler
.script();
3365 if (script
->hasNonSyntacticScope()) {
3369 RootedGlobalObject
global(cx
, &script
->global());
3370 RootedPropertyName
name(cx
, script
->getName(handler
.pc()));
3371 if (JSObject
* binding
= MaybeOptimizeBindGlobalName(cx
, global
, name
)) {
3372 frame
.push(ObjectValue(*binding
));
3379 bool BaselineInterpreterCodeGen::tryOptimizeBindGlobalName() {
3380 // Interpreter doesn't optimize simple BINDGNAMEs.
3384 template <typename Handler
>
3385 bool BaselineCodeGen
<Handler
>::emit_BindGName() {
3386 if (tryOptimizeBindGlobalName()) {
3389 return emitBindName(JSOp::BindGName
);
3392 template <typename Handler
>
3393 bool BaselineCodeGen
<Handler
>::emit_BindVar() {
3395 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3398 pushArg(R0
.scratchReg());
3400 using Fn
= JSObject
* (*)(JSContext
*, JSObject
*);
3401 if (!callVM
<Fn
, BindVarOperation
>()) {
3405 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
3410 template <typename Handler
>
3411 bool BaselineCodeGen
<Handler
>::emit_SetProp() {
3412 // Keep lhs in R0, rhs in R1.
3413 frame
.popRegsAndSync(2);
3415 // Keep RHS on the stack.
3420 if (!emitNextIC()) {
3427 template <typename Handler
>
3428 bool BaselineCodeGen
<Handler
>::emit_StrictSetProp() {
3429 return emit_SetProp();
3432 template <typename Handler
>
3433 bool BaselineCodeGen
<Handler
>::emit_SetName() {
3434 return emit_SetProp();
3437 template <typename Handler
>
3438 bool BaselineCodeGen
<Handler
>::emit_StrictSetName() {
3439 return emit_SetProp();
3442 template <typename Handler
>
3443 bool BaselineCodeGen
<Handler
>::emit_SetGName() {
3444 return emit_SetProp();
3447 template <typename Handler
>
3448 bool BaselineCodeGen
<Handler
>::emit_StrictSetGName() {
3449 return emit_SetProp();
3452 template <typename Handler
>
3453 bool BaselineCodeGen
<Handler
>::emitSetPropSuper(bool strict
) {
3454 // Incoming stack is |receiver, obj, rval|. We need to shuffle stack to
3455 // leave rval when operation is complete.
3457 // Pop rval into R0, then load receiver into R1 and replace with rval.
3458 frame
.popRegsAndSync(1);
3459 masm
.loadValue(frame
.addressOfStackValue(-2), R1
);
3460 masm
.storeValue(R0
, frame
.addressOfStackValue(-2));
3464 pushArg(Imm32(strict
));
3465 pushArg(R0
); // rval
3466 pushScriptNameArg(R0
.scratchReg(), R2
.scratchReg());
3467 pushArg(R1
); // receiver
3468 masm
.unboxObject(frame
.addressOfStackValue(-1), R0
.scratchReg());
3469 pushArg(R0
.scratchReg()); // obj
3471 using Fn
= bool (*)(JSContext
*, HandleObject
, HandleValue
, HandlePropertyName
,
3473 if (!callVM
<Fn
, js::SetPropertySuper
>()) {
3481 template <typename Handler
>
3482 bool BaselineCodeGen
<Handler
>::emit_SetPropSuper() {
3483 return emitSetPropSuper(/* strict = */ false);
3486 template <typename Handler
>
3487 bool BaselineCodeGen
<Handler
>::emit_StrictSetPropSuper() {
3488 return emitSetPropSuper(/* strict = */ true);
3491 template <typename Handler
>
3492 bool BaselineCodeGen
<Handler
>::emit_GetProp() {
3493 // Keep object in R0.
3494 frame
.popRegsAndSync(1);
3497 if (!emitNextIC()) {
3501 // Mark R0 as pushed stack value.
3506 template <typename Handler
>
3507 bool BaselineCodeGen
<Handler
>::emit_CallProp() {
3508 return emit_GetProp();
3511 template <typename Handler
>
3512 bool BaselineCodeGen
<Handler
>::emit_Length() {
3513 return emit_GetProp();
3516 template <typename Handler
>
3517 bool BaselineCodeGen
<Handler
>::emit_GetBoundName() {
3518 return emit_GetProp();
3521 template <typename Handler
>
3522 bool BaselineCodeGen
<Handler
>::emit_GetPropSuper() {
3523 // Receiver -> R1, Object -> R0
3524 frame
.popRegsAndSync(1);
3525 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
3528 if (!emitNextIC()) {
3536 template <typename Handler
>
3537 bool BaselineCodeGen
<Handler
>::emitDelProp(bool strict
) {
3538 // Keep value on the stack for the decompiler.
3540 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
3544 pushScriptNameArg(R1
.scratchReg(), R2
.scratchReg());
3547 using Fn
= bool (*)(JSContext
*, HandleValue
, HandlePropertyName
, bool*);
3549 if (!callVM
<Fn
, DelPropOperation
<true>>()) {
3553 if (!callVM
<Fn
, DelPropOperation
<false>>()) {
3558 masm
.boxNonDouble(JSVAL_TYPE_BOOLEAN
, ReturnReg
, R1
);
3564 template <typename Handler
>
3565 bool BaselineCodeGen
<Handler
>::emit_DelProp() {
3566 return emitDelProp(/* strict = */ false);
3569 template <typename Handler
>
3570 bool BaselineCodeGen
<Handler
>::emit_StrictDelProp() {
3571 return emitDelProp(/* strict = */ true);
3575 void BaselineCompilerCodeGen::getEnvironmentCoordinateObject(Register reg
) {
3576 EnvironmentCoordinate
ec(handler
.pc());
3578 masm
.loadPtr(frame
.addressOfEnvironmentChain(), reg
);
3579 for (unsigned i
= ec
.hops(); i
; i
--) {
3581 Address(reg
, EnvironmentObject::offsetOfEnclosingEnvironment()), reg
);
3586 void BaselineInterpreterCodeGen::getEnvironmentCoordinateObject(Register reg
) {
3587 MOZ_CRASH("Shouldn't call this for interpreter");
3591 Address
BaselineCompilerCodeGen::getEnvironmentCoordinateAddressFromObject(
3592 Register objReg
, Register reg
) {
3593 EnvironmentCoordinate
ec(handler
.pc());
3595 if (EnvironmentObject::nonExtensibleIsFixedSlot(ec
)) {
3596 return Address(objReg
, NativeObject::getFixedSlotOffset(ec
.slot()));
3599 uint32_t slot
= EnvironmentObject::nonExtensibleDynamicSlotIndex(ec
);
3600 masm
.loadPtr(Address(objReg
, NativeObject::offsetOfSlots()), reg
);
3601 return Address(reg
, slot
* sizeof(Value
));
3605 Address
BaselineInterpreterCodeGen::getEnvironmentCoordinateAddressFromObject(
3606 Register objReg
, Register reg
) {
3607 MOZ_CRASH("Shouldn't call this for interpreter");
3610 template <typename Handler
>
3611 Address BaselineCodeGen
<Handler
>::getEnvironmentCoordinateAddress(
3613 getEnvironmentCoordinateObject(reg
);
3614 return getEnvironmentCoordinateAddressFromObject(reg
, reg
);
3617 // For a JOF_ENVCOORD op load the number of hops from the bytecode and skip this
3618 // number of environment objects.
3619 static void LoadAliasedVarEnv(MacroAssembler
& masm
, Register env
,
3621 static_assert(ENVCOORD_HOPS_LEN
== 1,
3622 "Code assumes number of hops is stored in uint8 operand");
3623 LoadUint8Operand(masm
, scratch
);
3626 masm
.branchTest32(Assembler::Zero
, scratch
, scratch
, &done
);
3629 Address
nextEnv(env
, EnvironmentObject::offsetOfEnclosingEnvironment());
3630 masm
.unboxObject(nextEnv
, env
);
3631 masm
.branchSub32(Assembler::NonZero
, Imm32(1), scratch
, &top
);
3637 void BaselineCompilerCodeGen::emitGetAliasedVar(ValueOperand dest
) {
3640 Address address
= getEnvironmentCoordinateAddress(R0
.scratchReg());
3641 masm
.loadValue(address
, dest
);
3645 void BaselineInterpreterCodeGen::emitGetAliasedVar(ValueOperand dest
) {
3646 Register env
= R0
.scratchReg();
3647 Register scratch
= R1
.scratchReg();
3649 // Load the right environment object.
3650 masm
.loadPtr(frame
.addressOfEnvironmentChain(), env
);
3651 LoadAliasedVarEnv(masm
, env
, scratch
);
3653 // Load the slot index.
3654 static_assert(ENVCOORD_SLOT_LEN
== 3,
3655 "Code assumes slot is stored in uint24 operand");
3656 LoadUint24Operand(masm
, ENVCOORD_HOPS_LEN
, scratch
);
3658 // Load the Value from a fixed or dynamic slot.
3659 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3660 Label isDynamic
, done
;
3661 masm
.branch32(Assembler::AboveOrEqual
, scratch
,
3662 Imm32(NativeObject::MAX_FIXED_SLOTS
), &isDynamic
);
3664 uint32_t offset
= NativeObject::getFixedSlotOffset(0);
3665 masm
.loadValue(BaseValueIndex(env
, scratch
, offset
), dest
);
3668 masm
.bind(&isDynamic
);
3670 masm
.loadPtr(Address(env
, NativeObject::offsetOfSlots()), env
);
3672 // Use an offset to subtract the number of fixed slots.
3673 int32_t offset
= -int32_t(NativeObject::MAX_FIXED_SLOTS
* sizeof(Value
));
3674 masm
.loadValue(BaseValueIndex(env
, scratch
, offset
), dest
);
3679 template <typename Handler
>
3680 bool BaselineCodeGen
<Handler
>::emit_GetAliasedVar() {
3681 emitGetAliasedVar(R0
);
3683 if (IsTypeInferenceEnabled() && handler
.maybeIonCompileable()) {
3684 // No need to monitor types if we know Ion can't compile this script.
3685 if (!emitNextIC()) {
3695 bool BaselineCompilerCodeGen::emit_SetAliasedVar() {
3696 // Keep rvalue in R0.
3697 frame
.popRegsAndSync(1);
3698 Register objReg
= R2
.scratchReg();
3700 getEnvironmentCoordinateObject(objReg
);
3702 getEnvironmentCoordinateAddressFromObject(objReg
, R1
.scratchReg());
3703 masm
.guardedCallPreBarrier(address
, MIRType::Value
);
3704 masm
.storeValue(R0
, address
);
3707 // Only R0 is live at this point.
3708 // Scope coordinate object is already in R2.scratchReg().
3709 Register temp
= R1
.scratchReg();
3712 masm
.branchPtrInNurseryChunk(Assembler::Equal
, objReg
, temp
, &skipBarrier
);
3713 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, temp
, &skipBarrier
);
3715 masm
.call(&postBarrierSlot_
); // Won't clobber R0
3717 masm
.bind(&skipBarrier
);
3722 bool BaselineInterpreterCodeGen::emit_SetAliasedVar() {
3723 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
3724 regs
.take(BaselineFrameReg
);
3726 if (HasInterpreterPCReg()) {
3727 regs
.take(InterpreterPCReg
);
3730 Register env
= regs
.takeAny();
3731 Register scratch1
= regs
.takeAny();
3732 Register scratch2
= regs
.takeAny();
3733 Register scratch3
= regs
.takeAny();
3735 // Load the right environment object.
3736 masm
.loadPtr(frame
.addressOfEnvironmentChain(), env
);
3737 LoadAliasedVarEnv(masm
, env
, scratch1
);
3739 // Load the slot index.
3740 static_assert(ENVCOORD_SLOT_LEN
== 3,
3741 "Code assumes slot is stored in uint24 operand");
3742 LoadUint24Operand(masm
, ENVCOORD_HOPS_LEN
, scratch1
);
3744 // Store the RHS Value in R2.
3745 masm
.loadValue(frame
.addressOfStackValue(-1), R2
);
3747 // Load a pointer to the fixed or dynamic slot into scratch2. We want to call
3748 // guardedCallPreBarrierAnyZone once to avoid code bloat.
3750 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3751 Label isDynamic
, done
;
3752 masm
.branch32(Assembler::AboveOrEqual
, scratch1
,
3753 Imm32(NativeObject::MAX_FIXED_SLOTS
), &isDynamic
);
3755 uint32_t offset
= NativeObject::getFixedSlotOffset(0);
3756 BaseValueIndex
slotAddr(env
, scratch1
, offset
);
3757 masm
.computeEffectiveAddress(slotAddr
, scratch2
);
3760 masm
.bind(&isDynamic
);
3762 masm
.loadPtr(Address(env
, NativeObject::offsetOfSlots()), scratch2
);
3764 // Use an offset to subtract the number of fixed slots.
3765 int32_t offset
= -int32_t(NativeObject::MAX_FIXED_SLOTS
* sizeof(Value
));
3766 BaseValueIndex
slotAddr(scratch2
, scratch1
, offset
);
3767 masm
.computeEffectiveAddress(slotAddr
, scratch2
);
3771 // Pre-barrier and store.
3772 Address
slotAddr(scratch2
, 0);
3773 masm
.guardedCallPreBarrierAnyZone(slotAddr
, MIRType::Value
, scratch3
);
3774 masm
.storeValue(R2
, slotAddr
);
3778 masm
.branchPtrInNurseryChunk(Assembler::Equal
, env
, scratch1
, &skipBarrier
);
3779 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R2
, scratch1
,
3782 // Post barrier code expects the object in R2.
3783 masm
.movePtr(env
, R2
.scratchReg());
3784 masm
.call(&postBarrierSlot_
);
3786 masm
.bind(&skipBarrier
);
3790 template <typename Handler
>
3791 bool BaselineCodeGen
<Handler
>::emit_GetName() {
3794 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3797 if (!emitNextIC()) {
3801 // Mark R0 as pushed stack value.
3806 template <typename Handler
>
3807 bool BaselineCodeGen
<Handler
>::emitBindName(JSOp op
) {
3808 // If we have a BindGName without a non-syntactic scope, we pass the global
3809 // lexical environment to the IC instead of the frame's environment.
3813 auto loadGlobalLexical
= [this]() {
3814 loadGlobalLexicalEnvironment(R0
.scratchReg());
3817 auto loadFrameEnv
= [this]() {
3818 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3822 if (op
== JSOp::BindName
) {
3823 if (!loadFrameEnv()) {
3827 MOZ_ASSERT(op
== JSOp::BindGName
);
3828 if (!emitTestScriptFlag(JSScript::ImmutableFlags::HasNonSyntacticScope
,
3829 loadFrameEnv
, loadGlobalLexical
, R2
.scratchReg())) {
3835 if (!emitNextIC()) {
3839 // Mark R0 as pushed stack value.
3844 template <typename Handler
>
3845 bool BaselineCodeGen
<Handler
>::emit_BindName() {
3846 return emitBindName(JSOp::BindName
);
3849 template <typename Handler
>
3850 bool BaselineCodeGen
<Handler
>::emit_DelName() {
3852 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3856 pushArg(R0
.scratchReg());
3857 pushScriptNameArg(R1
.scratchReg(), R2
.scratchReg());
3859 using Fn
= bool (*)(JSContext
*, HandlePropertyName
, HandleObject
,
3860 MutableHandleValue
);
3861 if (!callVM
<Fn
, js::DeleteNameOperation
>()) {
3870 bool BaselineCompilerCodeGen::emit_GetImport() {
3871 JSScript
* script
= handler
.script();
3872 ModuleEnvironmentObject
* env
= GetModuleEnvironmentForScript(script
);
3875 jsid id
= NameToId(script
->getName(handler
.pc()));
3876 ModuleEnvironmentObject
* targetEnv
;
3878 MOZ_ALWAYS_TRUE(env
->lookupImport(id
, &targetEnv
, &shape
));
3882 uint32_t slot
= shape
->slot();
3883 Register scratch
= R0
.scratchReg();
3884 masm
.movePtr(ImmGCPtr(targetEnv
), scratch
);
3885 if (slot
< targetEnv
->numFixedSlots()) {
3886 masm
.loadValue(Address(scratch
, NativeObject::getFixedSlotOffset(slot
)),
3889 masm
.loadPtr(Address(scratch
, NativeObject::offsetOfSlots()), scratch
);
3891 Address(scratch
, (slot
- targetEnv
->numFixedSlots()) * sizeof(Value
)),
3895 // Imports are initialized by this point except in rare circumstances, so
3896 // don't emit a check unless we have to.
3897 if (targetEnv
->getSlot(shape
->slot()).isMagic(JS_UNINITIALIZED_LEXICAL
)) {
3898 if (!emitUninitializedLexicalCheck(R0
)) {
3903 if (IsTypeInferenceEnabled() && handler
.maybeIonCompileable()) {
3904 // No need to monitor types if we know Ion can't compile this script.
3905 if (!emitNextIC()) {
3915 bool BaselineInterpreterCodeGen::emit_GetImport() {
3918 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3922 pushBytecodePCArg();
3924 pushArg(R0
.scratchReg());
3926 using Fn
= bool (*)(JSContext
*, HandleObject
, HandleScript
, jsbytecode
*,
3927 MutableHandleValue
);
3928 if (!callVM
<Fn
, GetImportOperation
>()) {
3932 // Enter the type monitor IC.
3933 if (IsTypeInferenceEnabled() && !emitNextIC()) {
3941 template <typename Handler
>
3942 bool BaselineCodeGen
<Handler
>::emit_GetIntrinsic() {
3945 if (!emitNextIC()) {
3953 template <typename Handler
>
3954 bool BaselineCodeGen
<Handler
>::emit_SetIntrinsic() {
3956 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
3961 pushBytecodePCArg();
3964 using Fn
= bool (*)(JSContext
*, JSScript
*, jsbytecode
*, HandleValue
);
3965 return callVM
<Fn
, SetIntrinsicOperation
>();
3968 template <typename Handler
>
3969 bool BaselineCodeGen
<Handler
>::emit_DefVar() {
3972 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3976 pushBytecodePCArg();
3978 pushArg(R0
.scratchReg());
3980 using Fn
= bool (*)(JSContext
*, HandleObject
, HandleScript
, jsbytecode
*);
3981 return callVM
<Fn
, DefVarOperation
>();
3984 template <typename Handler
>
3985 bool BaselineCodeGen
<Handler
>::emitDefLexical(JSOp op
) {
3986 MOZ_ASSERT(op
== JSOp::DefConst
|| op
== JSOp::DefLet
);
3990 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
3994 pushBytecodePCArg();
3996 pushArg(R0
.scratchReg());
3998 using Fn
= bool (*)(JSContext
*, HandleObject
, HandleScript
, jsbytecode
*);
3999 return callVM
<Fn
, DefLexicalOperation
>();
4002 template <typename Handler
>
4003 bool BaselineCodeGen
<Handler
>::emit_DefConst() {
4004 return emitDefLexical(JSOp::DefConst
);
4007 template <typename Handler
>
4008 bool BaselineCodeGen
<Handler
>::emit_DefLet() {
4009 return emitDefLexical(JSOp::DefLet
);
4012 template <typename Handler
>
4013 bool BaselineCodeGen
<Handler
>::emit_DefFun() {
4014 frame
.popRegsAndSync(1);
4015 masm
.unboxObject(R0
, R0
.scratchReg());
4016 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R1
.scratchReg());
4020 pushArg(R0
.scratchReg());
4021 pushArg(R1
.scratchReg());
4024 using Fn
= bool (*)(JSContext
*, HandleScript
, HandleObject
, HandleFunction
);
4025 return callVM
<Fn
, DefFunOperation
>();
4028 template <typename Handler
>
4029 bool BaselineCodeGen
<Handler
>::emit_CheckGlobalOrEvalDecl() {
4035 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
4036 pushArg(R0
.scratchReg());
4038 using Fn
= bool (*)(JSContext
*, HandleObject
, HandleScript
);
4039 return callVM
<Fn
, js::CheckGlobalOrEvalDeclarationConflicts
>();
4042 template <typename Handler
>
4043 bool BaselineCodeGen
<Handler
>::emitInitPropGetterSetter() {
4044 // Keep values on the stack for the decompiler.
4049 masm
.unboxObject(frame
.addressOfStackValue(-1), R0
.scratchReg());
4050 masm
.unboxObject(frame
.addressOfStackValue(-2), R1
.scratchReg());
4052 pushArg(R0
.scratchReg());
4053 pushScriptNameArg(R0
.scratchReg(), R2
.scratchReg());
4054 pushArg(R1
.scratchReg());
4055 pushBytecodePCArg();
4057 using Fn
= bool (*)(JSContext
*, jsbytecode
*, HandleObject
, HandlePropertyName
,
4059 if (!callVM
<Fn
, InitPropGetterSetterOperation
>()) {
4067 template <typename Handler
>
4068 bool BaselineCodeGen
<Handler
>::emit_InitPropGetter() {
4069 return emitInitPropGetterSetter();
4072 template <typename Handler
>
4073 bool BaselineCodeGen
<Handler
>::emit_InitHiddenPropGetter() {
4074 return emitInitPropGetterSetter();
4077 template <typename Handler
>
4078 bool BaselineCodeGen
<Handler
>::emit_InitPropSetter() {
4079 return emitInitPropGetterSetter();
4082 template <typename Handler
>
4083 bool BaselineCodeGen
<Handler
>::emit_InitHiddenPropSetter() {
4084 return emitInitPropGetterSetter();
4087 template <typename Handler
>
4088 bool BaselineCodeGen
<Handler
>::emitInitElemGetterSetter() {
4089 // Load index and value in R0 and R1, but keep values on the stack for the
4092 masm
.loadValue(frame
.addressOfStackValue(-2), R0
);
4093 masm
.unboxObject(frame
.addressOfStackValue(-1), R1
.scratchReg());
4097 pushArg(R1
.scratchReg());
4099 masm
.unboxObject(frame
.addressOfStackValue(-3), R0
.scratchReg());
4100 pushArg(R0
.scratchReg());
4101 pushBytecodePCArg();
4103 using Fn
= bool (*)(JSContext
*, jsbytecode
*, HandleObject
, HandleValue
,
4105 if (!callVM
<Fn
, InitElemGetterSetterOperation
>()) {
4113 template <typename Handler
>
4114 bool BaselineCodeGen
<Handler
>::emit_InitElemGetter() {
4115 return emitInitElemGetterSetter();
4118 template <typename Handler
>
4119 bool BaselineCodeGen
<Handler
>::emit_InitHiddenElemGetter() {
4120 return emitInitElemGetterSetter();
4123 template <typename Handler
>
4124 bool BaselineCodeGen
<Handler
>::emit_InitElemSetter() {
4125 return emitInitElemGetterSetter();
4128 template <typename Handler
>
4129 bool BaselineCodeGen
<Handler
>::emit_InitHiddenElemSetter() {
4130 return emitInitElemGetterSetter();
4133 template <typename Handler
>
4134 bool BaselineCodeGen
<Handler
>::emit_InitElemInc() {
4135 // Keep the object and rhs on the stack.
4138 // Load object in R0, index in R1.
4139 masm
.loadValue(frame
.addressOfStackValue(-3), R0
);
4140 masm
.loadValue(frame
.addressOfStackValue(-2), R1
);
4143 if (!emitNextIC()) {
4151 Address indexAddr
= frame
.addressOfStackValue(-1);
4154 masm
.branchTestInt32(Assembler::Equal
, indexAddr
, &isInt32
);
4155 masm
.assumeUnreachable("INITELEM_INC index must be Int32");
4156 masm
.bind(&isInt32
);
4158 masm
.incrementInt32Value(indexAddr
);
4163 bool BaselineCompilerCodeGen::emit_GetLocal() {
4164 frame
.pushLocal(GET_LOCALNO(handler
.pc()));
4168 static BaseValueIndex
ComputeAddressOfLocal(MacroAssembler
& masm
,
4169 Register indexScratch
) {
4170 // Locals are stored in memory at a negative offset from the frame pointer. We
4171 // negate the index first to effectively subtract it.
4172 masm
.negPtr(indexScratch
);
4173 return BaseValueIndex(BaselineFrameReg
, indexScratch
,
4174 BaselineFrame::reverseOffsetOfLocal(0));
4178 bool BaselineInterpreterCodeGen::emit_GetLocal() {
4179 Register scratch
= R0
.scratchReg();
4180 LoadUint24Operand(masm
, 0, scratch
);
4181 BaseValueIndex addr
= ComputeAddressOfLocal(masm
, scratch
);
4182 masm
.loadValue(addr
, R0
);
4188 bool BaselineCompilerCodeGen::emit_SetLocal() {
4189 // Ensure no other StackValue refers to the old value, for instance i + (i =
4190 // 3). This also allows us to use R0 as scratch below.
4193 uint32_t local
= GET_LOCALNO(handler
.pc());
4194 frame
.storeStackValue(-1, frame
.addressOfLocal(local
), R0
);
4199 bool BaselineInterpreterCodeGen::emit_SetLocal() {
4200 Register scratch
= R0
.scratchReg();
4201 LoadUint24Operand(masm
, 0, scratch
);
4202 BaseValueIndex addr
= ComputeAddressOfLocal(masm
, scratch
);
4203 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
4204 masm
.storeValue(R1
, addr
);
4209 bool BaselineCompilerCodeGen::emitFormalArgAccess(JSOp op
) {
4210 MOZ_ASSERT(op
== JSOp::GetArg
|| op
== JSOp::SetArg
);
4212 uint32_t arg
= GET_ARGNO(handler
.pc());
4214 // Fast path: the script does not use |arguments| or formals don't
4215 // alias the arguments object.
4216 if (!handler
.script()->argumentsAliasesFormals()) {
4217 if (op
== JSOp::GetArg
) {
4220 // See the comment in emit_SetLocal.
4222 frame
.storeStackValue(-1, frame
.addressOfArg(arg
), R0
);
4228 // Sync so that we can use R0.
4231 // If the script is known to have an arguments object, we can just use it.
4232 // Else, we *may* have an arguments object (because we can't invalidate
4233 // when needsArgsObj becomes |true|), so we have to test HAS_ARGS_OBJ.
4235 if (!handler
.script()->needsArgsObj()) {
4237 masm
.branchTest32(Assembler::NonZero
, frame
.addressOfFlags(),
4238 Imm32(BaselineFrame::HAS_ARGS_OBJ
), &hasArgsObj
);
4239 if (op
== JSOp::GetArg
) {
4240 masm
.loadValue(frame
.addressOfArg(arg
), R0
);
4242 frame
.storeStackValue(-1, frame
.addressOfArg(arg
), R0
);
4245 masm
.bind(&hasArgsObj
);
4248 // Load the arguments object data vector.
4249 Register reg
= R2
.scratchReg();
4250 masm
.loadPtr(frame
.addressOfArgsObj(), reg
);
4251 masm
.loadPrivate(Address(reg
, ArgumentsObject::getDataSlotOffset()), reg
);
4253 // Load/store the argument.
4254 Address
argAddr(reg
, ArgumentsData::offsetOfArgs() + arg
* sizeof(Value
));
4255 if (op
== JSOp::GetArg
) {
4256 masm
.loadValue(argAddr
, R0
);
4259 Register temp
= R1
.scratchReg();
4260 masm
.guardedCallPreBarrierAnyZone(argAddr
, MIRType::Value
, temp
);
4261 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
4262 masm
.storeValue(R0
, argAddr
);
4264 MOZ_ASSERT(frame
.numUnsyncedSlots() == 0);
4266 // Reload the arguments object.
4267 Register reg
= R2
.scratchReg();
4268 masm
.loadPtr(frame
.addressOfArgsObj(), reg
);
4272 masm
.branchPtrInNurseryChunk(Assembler::Equal
, reg
, temp
, &skipBarrier
);
4273 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, temp
, &skipBarrier
);
4275 masm
.call(&postBarrierSlot_
);
4277 masm
.bind(&skipBarrier
);
4285 bool BaselineInterpreterCodeGen::emitFormalArgAccess(JSOp op
) {
4286 MOZ_ASSERT(op
== JSOp::GetArg
|| op
== JSOp::SetArg
);
4289 Register argReg
= R1
.scratchReg();
4290 LoadUint16Operand(masm
, argReg
);
4292 // If the frame has no arguments object, this must be an unaliased access.
4293 Label isUnaliased
, done
;
4294 masm
.branchTest32(Assembler::Zero
, frame
.addressOfFlags(),
4295 Imm32(BaselineFrame::HAS_ARGS_OBJ
), &isUnaliased
);
4297 Register reg
= R2
.scratchReg();
4299 // If it's an unmapped arguments object, this is an unaliased access.
4302 Assembler::Zero
, Address(reg
, JSScript::offsetOfImmutableFlags()),
4303 Imm32(uint32_t(JSScript::ImmutableFlags::HasMappedArgsObj
)),
4306 // Load the arguments object data vector.
4307 masm
.loadPtr(frame
.addressOfArgsObj(), reg
);
4308 masm
.loadPrivate(Address(reg
, ArgumentsObject::getDataSlotOffset()), reg
);
4310 // Load/store the argument.
4311 BaseValueIndex
argAddr(reg
, argReg
, ArgumentsData::offsetOfArgs());
4312 if (op
== JSOp::GetArg
) {
4313 masm
.loadValue(argAddr
, R0
);
4316 masm
.guardedCallPreBarrierAnyZone(argAddr
, MIRType::Value
,
4318 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
4319 masm
.storeValue(R0
, argAddr
);
4321 // Reload the arguments object.
4322 masm
.loadPtr(frame
.addressOfArgsObj(), reg
);
4324 Register temp
= R1
.scratchReg();
4325 masm
.branchPtrInNurseryChunk(Assembler::Equal
, reg
, temp
, &done
);
4326 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, temp
, &done
);
4328 masm
.call(&postBarrierSlot_
);
4332 masm
.bind(&isUnaliased
);
4334 BaseValueIndex
addr(BaselineFrameReg
, argReg
,
4335 BaselineFrame::offsetOfArg(0));
4336 if (op
== JSOp::GetArg
) {
4337 masm
.loadValue(addr
, R0
);
4340 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
4341 masm
.storeValue(R0
, addr
);
4349 template <typename Handler
>
4350 bool BaselineCodeGen
<Handler
>::emit_GetArg() {
4351 return emitFormalArgAccess(JSOp::GetArg
);
4354 template <typename Handler
>
4355 bool BaselineCodeGen
<Handler
>::emit_SetArg() {
4356 return emitFormalArgAccess(JSOp::SetArg
);
4360 void BaselineCompilerCodeGen::loadNumFormalArguments(Register dest
) {
4361 masm
.move32(Imm32(handler
.function()->nargs()), dest
);
4365 void BaselineInterpreterCodeGen::loadNumFormalArguments(Register dest
) {
4366 masm
.loadFunctionFromCalleeToken(frame
.addressOfCalleeToken(), dest
);
4367 masm
.load16ZeroExtend(Address(dest
, JSFunction::offsetOfNargs()), dest
);
4370 template <typename Handler
>
4371 void BaselineCodeGen
<Handler
>::emitPushNonArrowFunctionNewTarget() {
4372 // if (isConstructing()) push(argv[Max(numActualArgs, numFormalArgs)])
4373 Label notConstructing
, done
;
4374 masm
.branchTestPtr(Assembler::Zero
, frame
.addressOfCalleeToken(),
4375 Imm32(CalleeToken_FunctionConstructing
), ¬Constructing
);
4377 Register argvLen
= R0
.scratchReg();
4378 Register nformals
= R1
.scratchReg();
4379 Address
actualArgs(BaselineFrameReg
,
4380 BaselineFrame::offsetOfNumActualArgs());
4381 masm
.loadPtr(actualArgs
, argvLen
);
4383 // If argvLen < nformals, set argvlen := nformals.
4384 loadNumFormalArguments(nformals
);
4385 masm
.cmp32Move32(Assembler::Below
, argvLen
, nformals
, nformals
, argvLen
);
4387 BaseValueIndex
newTarget(BaselineFrameReg
, argvLen
,
4388 BaselineFrame::offsetOfArg(0));
4389 masm
.loadValue(newTarget
, R0
);
4392 // else push(undefined)
4393 masm
.bind(¬Constructing
);
4394 masm
.moveValue(UndefinedValue(), R0
);
4401 bool BaselineCompilerCodeGen::emit_NewTarget() {
4402 if (handler
.script()->isForEval()) {
4403 frame
.pushEvalNewTarget();
4407 MOZ_ASSERT(handler
.function());
4410 if (handler
.function()->isArrow()) {
4411 // Arrow functions store their |new.target| value in an
4413 Register scratch
= R0
.scratchReg();
4414 masm
.loadFunctionFromCalleeToken(frame
.addressOfCalleeToken(), scratch
);
4416 Address(scratch
, FunctionExtended::offsetOfArrowNewTargetSlot()), R0
);
4421 emitPushNonArrowFunctionNewTarget();
4426 bool BaselineInterpreterCodeGen::emit_NewTarget() {
4427 Register scratch1
= R0
.scratchReg();
4428 Register scratch2
= R1
.scratchReg();
4430 Label isFunction
, done
;
4431 masm
.loadPtr(frame
.addressOfCalleeToken(), scratch1
);
4432 masm
.branchTestPtr(Assembler::Zero
, scratch1
, Imm32(CalleeTokenScriptBit
),
4436 frame
.pushEvalNewTarget();
4440 masm
.bind(&isFunction
);
4443 masm
.andPtr(Imm32(uint32_t(CalleeTokenMask
)), scratch1
);
4444 masm
.branchFunctionKind(Assembler::NotEqual
,
4445 FunctionFlags::FunctionKind::Arrow
, scratch1
,
4446 scratch2
, ¬Arrow
);
4448 // Case 2: arrow function.
4450 Address(scratch1
, FunctionExtended::offsetOfArrowNewTargetSlot()));
4454 masm
.bind(¬Arrow
);
4456 // Case 3: non-arrow function.
4457 emitPushNonArrowFunctionNewTarget();
4463 template <typename Handler
>
4464 bool BaselineCodeGen
<Handler
>::emit_ThrowSetConst() {
4466 pushArg(Imm32(JSMSG_BAD_CONST_ASSIGN
));
4468 using Fn
= bool (*)(JSContext
*, unsigned);
4469 return callVM
<Fn
, jit::ThrowRuntimeLexicalError
>();
4472 template <typename Handler
>
4473 bool BaselineCodeGen
<Handler
>::emitUninitializedLexicalCheck(
4474 const ValueOperand
& val
) {
4476 masm
.branchTestMagicValue(Assembler::NotEqual
, val
, JS_UNINITIALIZED_LEXICAL
,
4480 pushArg(Imm32(JSMSG_UNINITIALIZED_LEXICAL
));
4482 using Fn
= bool (*)(JSContext
*, unsigned);
4483 if (!callVM
<Fn
, jit::ThrowRuntimeLexicalError
>()) {
4491 template <typename Handler
>
4492 bool BaselineCodeGen
<Handler
>::emit_CheckLexical() {
4494 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
4495 return emitUninitializedLexicalCheck(R0
);
4498 template <typename Handler
>
4499 bool BaselineCodeGen
<Handler
>::emit_CheckAliasedLexical() {
4500 return emit_CheckLexical();
4503 template <typename Handler
>
4504 bool BaselineCodeGen
<Handler
>::emit_InitLexical() {
4505 return emit_SetLocal();
4508 template <typename Handler
>
4509 bool BaselineCodeGen
<Handler
>::emit_InitGLexical() {
4510 frame
.popRegsAndSync(1);
4511 pushGlobalLexicalEnvironmentValue(R1
);
4513 return emit_SetProp();
4516 template <typename Handler
>
4517 bool BaselineCodeGen
<Handler
>::emit_InitAliasedLexical() {
4518 return emit_SetAliasedVar();
4521 template <typename Handler
>
4522 bool BaselineCodeGen
<Handler
>::emit_Uninitialized() {
4523 frame
.push(MagicValue(JS_UNINITIALIZED_LEXICAL
));
4528 bool BaselineCompilerCodeGen::emitCall(JSOp op
) {
4529 MOZ_ASSERT(IsInvokeOp(op
));
4533 uint32_t argc
= GET_ARGC(handler
.pc());
4534 masm
.move32(Imm32(argc
), R0
.scratchReg());
4537 if (!emitNextIC()) {
4541 // Update FrameInfo.
4542 bool construct
= IsConstructOp(op
);
4543 frame
.popn(2 + argc
+ construct
);
4549 bool BaselineInterpreterCodeGen::emitCall(JSOp op
) {
4550 MOZ_ASSERT(IsInvokeOp(op
));
4552 // The IC expects argc in R0.
4553 LoadUint16Operand(masm
, R0
.scratchReg());
4554 if (!emitNextIC()) {
4558 // Pop the arguments. We have to reload pc/argc because the IC clobbers them.
4559 // The return value is in R0 so we can't use that.
4560 Register scratch
= R1
.scratchReg();
4561 uint32_t extraValuesToPop
= IsConstructOp(op
) ? 3 : 2;
4562 Register spReg
= AsRegister(masm
.getStackPointer());
4563 LoadUint16Operand(masm
, scratch
);
4564 masm
.computeEffectiveAddress(
4565 BaseValueIndex(spReg
, scratch
, extraValuesToPop
* sizeof(Value
)), spReg
);
4570 template <typename Handler
>
4571 bool BaselineCodeGen
<Handler
>::emitSpreadCall(JSOp op
) {
4572 MOZ_ASSERT(IsInvokeOp(op
));
4575 masm
.move32(Imm32(1), R0
.scratchReg());
4578 if (!emitNextIC()) {
4582 // Update FrameInfo.
4583 bool construct
= op
== JSOp::SpreadNew
|| op
== JSOp::SpreadSuperCall
;
4584 frame
.popn(3 + construct
);
4589 template <typename Handler
>
4590 bool BaselineCodeGen
<Handler
>::emit_Call() {
4591 return emitCall(JSOp::Call
);
4594 template <typename Handler
>
4595 bool BaselineCodeGen
<Handler
>::emit_CallIgnoresRv() {
4596 return emitCall(JSOp::CallIgnoresRv
);
4599 template <typename Handler
>
4600 bool BaselineCodeGen
<Handler
>::emit_CallIter() {
4601 return emitCall(JSOp::CallIter
);
4604 template <typename Handler
>
4605 bool BaselineCodeGen
<Handler
>::emit_New() {
4606 return emitCall(JSOp::New
);
4609 template <typename Handler
>
4610 bool BaselineCodeGen
<Handler
>::emit_SuperCall() {
4611 return emitCall(JSOp::SuperCall
);
4614 template <typename Handler
>
4615 bool BaselineCodeGen
<Handler
>::emit_FunCall() {
4616 return emitCall(JSOp::FunCall
);
4619 template <typename Handler
>
4620 bool BaselineCodeGen
<Handler
>::emit_FunApply() {
4621 return emitCall(JSOp::FunApply
);
4624 template <typename Handler
>
4625 bool BaselineCodeGen
<Handler
>::emit_Eval() {
4626 return emitCall(JSOp::Eval
);
4629 template <typename Handler
>
4630 bool BaselineCodeGen
<Handler
>::emit_StrictEval() {
4631 return emitCall(JSOp::StrictEval
);
4634 template <typename Handler
>
4635 bool BaselineCodeGen
<Handler
>::emit_SpreadCall() {
4636 return emitSpreadCall(JSOp::SpreadCall
);
4639 template <typename Handler
>
4640 bool BaselineCodeGen
<Handler
>::emit_SpreadNew() {
4641 return emitSpreadCall(JSOp::SpreadNew
);
4644 template <typename Handler
>
4645 bool BaselineCodeGen
<Handler
>::emit_SpreadSuperCall() {
4646 return emitSpreadCall(JSOp::SpreadSuperCall
);
4649 template <typename Handler
>
4650 bool BaselineCodeGen
<Handler
>::emit_SpreadEval() {
4651 return emitSpreadCall(JSOp::SpreadEval
);
4654 template <typename Handler
>
4655 bool BaselineCodeGen
<Handler
>::emit_StrictSpreadEval() {
4656 return emitSpreadCall(JSOp::StrictSpreadEval
);
4659 template <typename Handler
>
4660 bool BaselineCodeGen
<Handler
>::emit_OptimizeSpreadCall() {
4662 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
4667 using Fn
= bool (*)(JSContext
*, HandleValue
, bool*);
4668 if (!callVM
<Fn
, OptimizeSpreadCall
>()) {
4672 masm
.boxNonDouble(JSVAL_TYPE_BOOLEAN
, ReturnReg
, R0
);
4677 template <typename Handler
>
4678 bool BaselineCodeGen
<Handler
>::emit_ImplicitThis() {
4680 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R0
.scratchReg());
4684 pushScriptNameArg(R1
.scratchReg(), R2
.scratchReg());
4685 pushArg(R0
.scratchReg());
4687 using Fn
= bool (*)(JSContext
*, HandleObject
, HandlePropertyName
,
4688 MutableHandleValue
);
4689 if (!callVM
<Fn
, ImplicitThisOperation
>()) {
4697 template <typename Handler
>
4698 bool BaselineCodeGen
<Handler
>::emit_GImplicitThis() {
4699 auto pushUndefined
= [this]() {
4700 frame
.push(UndefinedValue());
4703 auto emitImplicitThis
= [this]() { return emit_ImplicitThis(); };
4704 return emitTestScriptFlag(JSScript::ImmutableFlags::HasNonSyntacticScope
,
4705 emitImplicitThis
, pushUndefined
, R2
.scratchReg());
4708 template <typename Handler
>
4709 bool BaselineCodeGen
<Handler
>::emit_Instanceof() {
4710 frame
.popRegsAndSync(2);
4712 if (!emitNextIC()) {
4720 template <typename Handler
>
4721 bool BaselineCodeGen
<Handler
>::emit_Typeof() {
4722 frame
.popRegsAndSync(1);
4724 if (!emitNextIC()) {
4732 template <typename Handler
>
4733 bool BaselineCodeGen
<Handler
>::emit_TypeofExpr() {
4734 return emit_Typeof();
4737 template <typename Handler
>
4738 bool BaselineCodeGen
<Handler
>::emit_ThrowMsg() {
4740 pushUint8BytecodeOperandArg(R2
.scratchReg());
4742 using Fn
= bool (*)(JSContext
*, const unsigned);
4743 return callVM
<Fn
, js::ThrowMsgOperation
>();
4746 template <typename Handler
>
4747 bool BaselineCodeGen
<Handler
>::emit_Throw() {
4748 // Keep value to throw in R0.
4749 frame
.popRegsAndSync(1);
4754 using Fn
= bool (*)(JSContext
*, HandleValue
);
4755 return callVM
<Fn
, js::ThrowOperation
>();
4758 template <typename Handler
>
4759 bool BaselineCodeGen
<Handler
>::emit_Try() {
4763 template <typename Handler
>
4764 bool BaselineCodeGen
<Handler
>::emit_Finally() {
4765 // JSOp::Finally has a def count of 2, but these values are already on the
4766 // stack (they're pushed by JSOp::Gosub). Update the compiler's stack state.
4767 frame
.incStackDepth(2);
4769 // To match the interpreter, emit an interrupt check at the start of the
4771 return emitInterruptCheck();
4774 template <typename Handler
>
4775 bool BaselineCodeGen
<Handler
>::emit_Gosub() {
4776 // Jump to the finally block.
4782 static void LoadBaselineScriptResumeEntries(MacroAssembler
& masm
,
4783 JSScript
* script
, Register dest
,
4785 MOZ_ASSERT(dest
!= scratch
);
4787 masm
.movePtr(ImmPtr(script
->jitScript()), dest
);
4788 masm
.loadPtr(Address(dest
, JitScript::offsetOfBaselineScript()), dest
);
4789 masm
.load32(Address(dest
, BaselineScript::offsetOfResumeEntriesOffset()),
4791 masm
.addPtr(scratch
, dest
);
4794 template <typename Handler
>
4795 void BaselineCodeGen
<Handler
>::emitInterpJumpToResumeEntry(Register script
,
4796 Register resumeIndex
,
4798 // Load JSScript::immutableScriptData() into |script|.
4799 masm
.loadPtr(Address(script
, JSScript::offsetOfSharedData()), script
);
4800 masm
.loadPtr(Address(script
, RuntimeScriptData::offsetOfISD()), script
);
4802 // Load the resume pcOffset in |resumeIndex|.
4804 Address(script
, ImmutableScriptData::offsetOfResumeOffsetsOffset()),
4806 masm
.computeEffectiveAddress(BaseIndex(scratch
, resumeIndex
, TimesFour
),
4808 masm
.load32(BaseIndex(script
, scratch
, TimesOne
), resumeIndex
);
4810 // Add resume offset to PC, jump to it.
4811 masm
.computeEffectiveAddress(BaseIndex(script
, resumeIndex
, TimesOne
,
4812 ImmutableScriptData::offsetOfCode()),
4814 Address
pcAddr(BaselineFrameReg
,
4815 BaselineFrame::reverseOffsetOfInterpreterPC());
4816 masm
.storePtr(script
, pcAddr
);
4817 emitJumpToInterpretOpLabel();
4821 void BaselineCompilerCodeGen::jumpToResumeEntry(Register resumeIndex
,
4823 Register scratch2
) {
4824 LoadBaselineScriptResumeEntries(masm
, handler
.script(), scratch1
, scratch2
);
4826 BaseIndex(scratch1
, resumeIndex
, ScaleFromElemWidth(sizeof(uintptr_t))),
4828 masm
.jump(scratch1
);
4832 void BaselineInterpreterCodeGen::jumpToResumeEntry(Register resumeIndex
,
4834 Register scratch2
) {
4835 loadScript(scratch1
);
4836 emitInterpJumpToResumeEntry(scratch1
, resumeIndex
, scratch2
);
4839 template <typename Handler
>
4840 bool BaselineCodeGen
<Handler
>::emit_Retsub() {
4841 frame
.popRegsAndSync(2);
4844 masm
.branchTestBooleanTruthy(/* branchIfTrue = */ false, R0
, &isReturn
);
4846 // R0 is |true|. We need to throw R1.
4850 using Fn
= bool (*)(JSContext
*, HandleValue
);
4851 if (!callVM
<Fn
, js::ThrowOperation
>()) {
4855 masm
.bind(&isReturn
);
4857 // R0 is |false|. R1 contains the resumeIndex to jump to.
4858 Register resumeIndexReg
= R1
.scratchReg();
4859 masm
.unboxInt32(R1
, resumeIndexReg
);
4861 Register scratch1
= R2
.scratchReg();
4862 Register scratch2
= R0
.scratchReg();
4863 jumpToResumeEntry(resumeIndexReg
, scratch1
, scratch2
);
4868 template <typename F1
, typename F2
>
4869 MOZ_MUST_USE
bool BaselineCompilerCodeGen::emitDebugInstrumentation(
4870 const F1
& ifDebuggee
, const Maybe
<F2
>& ifNotDebuggee
) {
4871 // The JIT calls either ifDebuggee or (if present) ifNotDebuggee, because it
4872 // knows statically whether we're compiling with debug instrumentation.
4874 if (handler
.compileDebugInstrumentation()) {
4875 return ifDebuggee();
4878 if (ifNotDebuggee
) {
4879 return (*ifNotDebuggee
)();
4886 template <typename F1
, typename F2
>
4887 MOZ_MUST_USE
bool BaselineInterpreterCodeGen::emitDebugInstrumentation(
4888 const F1
& ifDebuggee
, const Maybe
<F2
>& ifNotDebuggee
) {
4889 // The interpreter emits both ifDebuggee and (if present) ifNotDebuggee
4890 // paths, with a toggled jump followed by a branch on the frame's DEBUGGEE
4893 Label isNotDebuggee
, done
;
4895 CodeOffset toggleOffset
= masm
.toggledJump(&isNotDebuggee
);
4896 if (!handler
.addDebugInstrumentationOffset(cx
, toggleOffset
)) {
4900 masm
.branchTest32(Assembler::Zero
, frame
.addressOfFlags(),
4901 Imm32(BaselineFrame::DEBUGGEE
), &isNotDebuggee
);
4903 if (!ifDebuggee()) {
4907 if (ifNotDebuggee
) {
4911 masm
.bind(&isNotDebuggee
);
4913 if (ifNotDebuggee
&& !(*ifNotDebuggee
)()) {
4921 template <typename Handler
>
4922 bool BaselineCodeGen
<Handler
>::emit_PushLexicalEnv() {
4923 // Call a stub to push the block on the block chain.
4925 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
4927 pushScriptGCThingArg(ScriptGCThingType::Scope
, R1
.scratchReg(),
4929 pushArg(R0
.scratchReg());
4931 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, Handle
<LexicalScope
*>);
4932 return callVM
<Fn
, jit::PushLexicalEnv
>();
4935 template <typename Handler
>
4936 bool BaselineCodeGen
<Handler
>::emit_PopLexicalEnv() {
4939 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
4941 auto ifDebuggee
= [this]() {
4943 pushBytecodePCArg();
4944 pushArg(R0
.scratchReg());
4946 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, jsbytecode
*);
4947 return callVM
<Fn
, jit::DebugLeaveThenPopLexicalEnv
>();
4949 auto ifNotDebuggee
= [this]() {
4951 pushArg(R0
.scratchReg());
4953 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
4954 return callVM
<Fn
, jit::PopLexicalEnv
>();
4956 return emitDebugInstrumentation(ifDebuggee
, mozilla::Some(ifNotDebuggee
));
4959 template <typename Handler
>
4960 bool BaselineCodeGen
<Handler
>::emit_FreshenLexicalEnv() {
4963 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
4965 auto ifDebuggee
= [this]() {
4967 pushBytecodePCArg();
4968 pushArg(R0
.scratchReg());
4970 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, jsbytecode
*);
4971 return callVM
<Fn
, jit::DebugLeaveThenFreshenLexicalEnv
>();
4973 auto ifNotDebuggee
= [this]() {
4975 pushArg(R0
.scratchReg());
4977 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
4978 return callVM
<Fn
, jit::FreshenLexicalEnv
>();
4980 return emitDebugInstrumentation(ifDebuggee
, mozilla::Some(ifNotDebuggee
));
4983 template <typename Handler
>
4984 bool BaselineCodeGen
<Handler
>::emit_RecreateLexicalEnv() {
4987 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
4989 auto ifDebuggee
= [this]() {
4991 pushBytecodePCArg();
4992 pushArg(R0
.scratchReg());
4994 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, jsbytecode
*);
4995 return callVM
<Fn
, jit::DebugLeaveThenRecreateLexicalEnv
>();
4997 auto ifNotDebuggee
= [this]() {
4999 pushArg(R0
.scratchReg());
5001 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
5002 return callVM
<Fn
, jit::RecreateLexicalEnv
>();
5004 return emitDebugInstrumentation(ifDebuggee
, mozilla::Some(ifNotDebuggee
));
5007 template <typename Handler
>
5008 bool BaselineCodeGen
<Handler
>::emit_DebugLeaveLexicalEnv() {
5009 auto ifDebuggee
= [this]() {
5011 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
5012 pushBytecodePCArg();
5013 pushArg(R0
.scratchReg());
5015 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, jsbytecode
*);
5016 return callVM
<Fn
, jit::DebugLeaveLexicalEnv
>();
5018 return emitDebugInstrumentation(ifDebuggee
);
5021 template <typename Handler
>
5022 bool BaselineCodeGen
<Handler
>::emit_PushVarEnv() {
5024 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
5025 pushScriptGCThingArg(ScriptGCThingType::Scope
, R1
.scratchReg(),
5027 pushArg(R0
.scratchReg());
5029 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, HandleScope
);
5030 return callVM
<Fn
, jit::PushVarEnv
>();
5033 template <typename Handler
>
5034 bool BaselineCodeGen
<Handler
>::emit_EnterWith() {
5035 // Pop "with" object to R0.
5036 frame
.popRegsAndSync(1);
5038 // Call a stub to push the object onto the environment chain.
5041 pushScriptGCThingArg(ScriptGCThingType::Scope
, R1
.scratchReg(),
5044 masm
.loadBaselineFramePtr(BaselineFrameReg
, R1
.scratchReg());
5045 pushArg(R1
.scratchReg());
5048 bool (*)(JSContext
*, BaselineFrame
*, HandleValue
, Handle
<WithScope
*>);
5049 return callVM
<Fn
, jit::EnterWith
>();
5052 template <typename Handler
>
5053 bool BaselineCodeGen
<Handler
>::emit_LeaveWith() {
5054 // Call a stub to pop the with object from the environment chain.
5057 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
5058 pushArg(R0
.scratchReg());
5060 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
5061 return callVM
<Fn
, jit::LeaveWith
>();
5064 template <typename Handler
>
5065 bool BaselineCodeGen
<Handler
>::emit_Exception() {
5068 using Fn
= bool (*)(JSContext
*, MutableHandleValue
);
5069 if (!callVM
<Fn
, GetAndClearException
>()) {
5077 template <typename Handler
>
5078 bool BaselineCodeGen
<Handler
>::emit_Debugger() {
5081 frame
.assertSyncedStack();
5082 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
5083 pushArg(R0
.scratchReg());
5085 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
5086 if (!callVM
<Fn
, jit::OnDebuggerStatement
>()) {
5093 template <typename Handler
>
5094 bool BaselineCodeGen
<Handler
>::emitDebugEpilogue() {
5095 auto ifDebuggee
= [this]() {
5096 // Move return value into the frame's rval slot.
5097 masm
.storeValue(JSReturnOperand
, frame
.addressOfReturnValue());
5098 masm
.or32(Imm32(BaselineFrame::HAS_RVAL
), frame
.addressOfFlags());
5100 // Load BaselineFrame pointer in R0.
5102 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
5105 pushBytecodePCArg();
5106 pushArg(R0
.scratchReg());
5108 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::DebugEpilogue
;
5110 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, jsbytecode
*);
5111 if (!callVM
<Fn
, jit::DebugEpilogueOnBaselineReturn
>(kind
)) {
5115 masm
.loadValue(frame
.addressOfReturnValue(), JSReturnOperand
);
5118 return emitDebugInstrumentation(ifDebuggee
);
5121 template <typename Handler
>
5122 bool BaselineCodeGen
<Handler
>::emitReturn() {
5123 if (handler
.shouldEmitDebugEpilogueAtReturnOp()) {
5124 if (!emitDebugEpilogue()) {
5129 // Only emit the jump if this JSOp::RetRval is not the last instruction.
5130 // Not needed for last instruction, because last instruction flows
5131 // into return label.
5132 if (!handler
.isDefinitelyLastOp()) {
5133 masm
.jump(&return_
);
5139 template <typename Handler
>
5140 bool BaselineCodeGen
<Handler
>::emit_Return() {
5141 frame
.assertStackDepth(1);
5143 frame
.popValue(JSReturnOperand
);
5144 return emitReturn();
5147 template <typename Handler
>
5148 void BaselineCodeGen
<Handler
>::emitLoadReturnValue(ValueOperand val
) {
5150 masm
.branchTest32(Assembler::Zero
, frame
.addressOfFlags(),
5151 Imm32(BaselineFrame::HAS_RVAL
), &noRval
);
5152 masm
.loadValue(frame
.addressOfReturnValue(), val
);
5156 masm
.moveValue(UndefinedValue(), val
);
5161 template <typename Handler
>
5162 bool BaselineCodeGen
<Handler
>::emit_RetRval() {
5163 frame
.assertStackDepth(0);
5165 masm
.moveValue(UndefinedValue(), JSReturnOperand
);
5167 if (!handler
.maybeScript() || !handler
.maybeScript()->noScriptRval()) {
5168 // Return the value in the return value slot, if any.
5170 Address flags
= frame
.addressOfFlags();
5171 masm
.branchTest32(Assembler::Zero
, flags
, Imm32(BaselineFrame::HAS_RVAL
),
5173 masm
.loadValue(frame
.addressOfReturnValue(), JSReturnOperand
);
5177 return emitReturn();
5180 template <typename Handler
>
5181 bool BaselineCodeGen
<Handler
>::emit_ToPropertyKey() {
5182 frame
.popRegsAndSync(1);
5184 if (!emitNextIC()) {
5192 template <typename Handler
>
5193 bool BaselineCodeGen
<Handler
>::emit_ToAsyncIter() {
5195 masm
.unboxObject(frame
.addressOfStackValue(-2), R0
.scratchReg());
5196 masm
.loadValue(frame
.addressOfStackValue(-1), R1
);
5200 pushArg(R0
.scratchReg());
5202 using Fn
= JSObject
* (*)(JSContext
*, HandleObject
, HandleValue
);
5203 if (!callVM
<Fn
, js::CreateAsyncFromSyncIterator
>()) {
5207 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
5213 template <typename Handler
>
5214 bool BaselineCodeGen
<Handler
>::emit_TrySkipAwait() {
5216 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
5221 using Fn
= bool (*)(JSContext
*, HandleValue
, MutableHandleValue
);
5222 if (!callVM
<Fn
, jit::TrySkipAwait
>()) {
5226 Label cannotSkip
, done
;
5227 masm
.branchTestMagicValue(Assembler::Equal
, R0
, JS_CANNOT_SKIP_AWAIT
,
5229 masm
.moveValue(BooleanValue(true), R1
);
5232 masm
.bind(&cannotSkip
);
5233 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
5234 masm
.moveValue(BooleanValue(false), R1
);
5244 template <typename Handler
>
5245 bool BaselineCodeGen
<Handler
>::emit_AsyncAwait() {
5247 masm
.loadValue(frame
.addressOfStackValue(-2), R1
);
5248 masm
.unboxObject(frame
.addressOfStackValue(-1), R0
.scratchReg());
5252 pushArg(R0
.scratchReg());
5254 using Fn
= JSObject
* (*)(JSContext
*, Handle
<AsyncFunctionGeneratorObject
*>,
5256 if (!callVM
<Fn
, js::AsyncFunctionAwait
>()) {
5260 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
5266 template <typename Handler
>
5267 bool BaselineCodeGen
<Handler
>::emit_AsyncResolve() {
5269 masm
.loadValue(frame
.addressOfStackValue(-2), R1
);
5270 masm
.unboxObject(frame
.addressOfStackValue(-1), R0
.scratchReg());
5273 pushUint8BytecodeOperandArg(R2
.scratchReg());
5275 pushArg(R0
.scratchReg());
5277 using Fn
= JSObject
* (*)(JSContext
*, Handle
<AsyncFunctionGeneratorObject
*>,
5278 HandleValue
, AsyncFunctionResolveKind
);
5279 if (!callVM
<Fn
, js::AsyncFunctionResolve
>()) {
5283 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
5289 template <typename Handler
>
5290 bool BaselineCodeGen
<Handler
>::emit_CheckObjCoercible() {
5292 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
5296 masm
.branchTestUndefined(Assembler::Equal
, R0
, &fail
);
5297 masm
.branchTestNull(Assembler::NotEqual
, R0
, &done
);
5304 using Fn
= bool (*)(JSContext
*, HandleValue
);
5305 if (!callVM
<Fn
, ThrowObjectCoercible
>()) {
5313 template <typename Handler
>
5314 bool BaselineCodeGen
<Handler
>::emit_ToString() {
5315 // Keep top stack value in R0.
5316 frame
.popRegsAndSync(1);
5318 // Inline path for string.
5320 masm
.branchTestString(Assembler::Equal
, R0
, &done
);
5326 // Call ToStringSlow which doesn't handle string inputs.
5327 using Fn
= JSString
* (*)(JSContext
*, HandleValue
);
5328 if (!callVM
<Fn
, ToStringSlow
<CanGC
>>()) {
5332 masm
.tagValue(JSVAL_TYPE_STRING
, ReturnReg
, R0
);
5339 static constexpr uint32_t TableSwitchOpLowOffset
= 1 * JUMP_OFFSET_LEN
;
5340 static constexpr uint32_t TableSwitchOpHighOffset
= 2 * JUMP_OFFSET_LEN
;
5341 static constexpr uint32_t TableSwitchOpFirstResumeIndexOffset
=
5342 3 * JUMP_OFFSET_LEN
;
5345 void BaselineCompilerCodeGen::emitGetTableSwitchIndex(ValueOperand val
,
5348 Register scratch2
) {
5349 jsbytecode
* pc
= handler
.pc();
5350 jsbytecode
* defaultpc
= pc
+ GET_JUMP_OFFSET(pc
);
5351 Label
* defaultLabel
= handler
.labelOf(defaultpc
);
5353 int32_t low
= GET_JUMP_OFFSET(pc
+ TableSwitchOpLowOffset
);
5354 int32_t high
= GET_JUMP_OFFSET(pc
+ TableSwitchOpHighOffset
);
5355 int32_t length
= high
- low
+ 1;
5357 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5358 // all cases are int32).
5359 masm
.branchTestInt32(Assembler::NotEqual
, val
, defaultLabel
);
5360 masm
.unboxInt32(val
, dest
);
5362 // Subtract 'low'. Bounds check.
5364 masm
.sub32(Imm32(low
), dest
);
5366 masm
.branch32(Assembler::AboveOrEqual
, dest
, Imm32(length
), defaultLabel
);
5370 void BaselineInterpreterCodeGen::emitGetTableSwitchIndex(ValueOperand val
,
5373 Register scratch2
) {
5374 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5375 // all cases are int32).
5376 Label done
, jumpToDefault
;
5377 masm
.branchTestInt32(Assembler::NotEqual
, val
, &jumpToDefault
);
5378 masm
.unboxInt32(val
, dest
);
5380 Register pcReg
= LoadBytecodePC(masm
, scratch1
);
5381 Address
lowAddr(pcReg
, sizeof(jsbytecode
) + TableSwitchOpLowOffset
);
5382 Address
highAddr(pcReg
, sizeof(jsbytecode
) + TableSwitchOpHighOffset
);
5384 // Jump to default if val > high.
5385 masm
.branch32(Assembler::LessThan
, highAddr
, dest
, &jumpToDefault
);
5387 // Jump to default if val < low.
5388 masm
.load32(lowAddr
, scratch2
);
5389 masm
.branch32(Assembler::GreaterThan
, scratch2
, dest
, &jumpToDefault
);
5391 // index := val - low.
5392 masm
.sub32(scratch2
, dest
);
5395 masm
.bind(&jumpToDefault
);
5402 void BaselineCompilerCodeGen::emitTableSwitchJump(Register key
,
5404 Register scratch2
) {
5405 // Jump to resumeEntries[firstResumeIndex + key].
5407 // Note: BytecodeEmitter::allocateResumeIndex static_asserts
5408 // |firstResumeIndex * sizeof(uintptr_t)| fits in int32_t.
5409 uint32_t firstResumeIndex
=
5410 GET_RESUMEINDEX(handler
.pc() + TableSwitchOpFirstResumeIndexOffset
);
5411 LoadBaselineScriptResumeEntries(masm
, handler
.script(), scratch1
, scratch2
);
5412 masm
.loadPtr(BaseIndex(scratch1
, key
, ScaleFromElemWidth(sizeof(uintptr_t)),
5413 firstResumeIndex
* sizeof(uintptr_t)),
5415 masm
.jump(scratch1
);
5419 void BaselineInterpreterCodeGen::emitTableSwitchJump(Register key
,
5421 Register scratch2
) {
5422 // Load the op's firstResumeIndex in scratch1.
5423 LoadUint24Operand(masm
, TableSwitchOpFirstResumeIndexOffset
, scratch1
);
5425 masm
.add32(key
, scratch1
);
5426 jumpToResumeEntry(scratch1
, key
, scratch2
);
5429 template <typename Handler
>
5430 bool BaselineCodeGen
<Handler
>::emit_TableSwitch() {
5431 frame
.popRegsAndSync(1);
5433 Register key
= R0
.scratchReg();
5434 Register scratch1
= R1
.scratchReg();
5435 Register scratch2
= R2
.scratchReg();
5437 // Call a stub to convert R0 from double to int32 if needed.
5438 // Note: this stub may clobber scratch1.
5439 masm
.call(cx
->runtime()->jitRuntime()->getDoubleToInt32ValueStub());
5441 // Load the index in the jump table in |key|, or branch to default pc if not
5442 // int32 or out-of-range.
5443 emitGetTableSwitchIndex(R0
, key
, scratch1
, scratch2
);
5445 // Jump to the target pc.
5446 emitTableSwitchJump(key
, scratch1
, scratch2
);
5450 template <typename Handler
>
5451 bool BaselineCodeGen
<Handler
>::emit_Iter() {
5452 frame
.popRegsAndSync(1);
5454 if (!emitNextIC()) {
5462 template <typename Handler
>
5463 bool BaselineCodeGen
<Handler
>::emit_MoreIter() {
5466 masm
.unboxObject(frame
.addressOfStackValue(-1), R1
.scratchReg());
5468 masm
.iteratorMore(R1
.scratchReg(), R0
, R2
.scratchReg());
5473 template <typename Handler
>
5474 bool BaselineCodeGen
<Handler
>::emitIsMagicValue() {
5477 Label isMagic
, done
;
5478 masm
.branchTestMagic(Assembler::Equal
, frame
.addressOfStackValue(-1),
5480 masm
.moveValue(BooleanValue(false), R0
);
5483 masm
.bind(&isMagic
);
5484 masm
.moveValue(BooleanValue(true), R0
);
5487 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
5491 template <typename Handler
>
5492 bool BaselineCodeGen
<Handler
>::emit_IsNoIter() {
5493 return emitIsMagicValue();
5496 template <typename Handler
>
5497 bool BaselineCodeGen
<Handler
>::emit_EndIter() {
5498 // Pop iterator value.
5501 // Pop the iterator object to close in R0.
5502 frame
.popRegsAndSync(1);
5504 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
5505 regs
.take(BaselineFrameReg
);
5506 if (HasInterpreterPCReg()) {
5507 regs
.take(InterpreterPCReg
);
5510 Register obj
= R0
.scratchReg();
5512 masm
.unboxObject(R0
, obj
);
5514 Register temp1
= regs
.takeAny();
5515 Register temp2
= regs
.takeAny();
5516 Register temp3
= regs
.takeAny();
5517 masm
.iteratorClose(obj
, temp1
, temp2
, temp3
);
5521 template <typename Handler
>
5522 bool BaselineCodeGen
<Handler
>::emit_IsGenClosing() {
5523 return emitIsMagicValue();
5526 template <typename Handler
>
5527 bool BaselineCodeGen
<Handler
>::emit_GetRval() {
5530 emitLoadReturnValue(R0
);
5536 template <typename Handler
>
5537 bool BaselineCodeGen
<Handler
>::emit_SetRval() {
5538 // Store to the frame's return value slot.
5539 frame
.storeStackValue(-1, frame
.addressOfReturnValue(), R2
);
5540 masm
.or32(Imm32(BaselineFrame::HAS_RVAL
), frame
.addressOfFlags());
5545 template <typename Handler
>
5546 bool BaselineCodeGen
<Handler
>::emit_Callee() {
5547 MOZ_ASSERT_IF(handler
.maybeScript(), handler
.maybeScript()->function());
5549 masm
.loadFunctionFromCalleeToken(frame
.addressOfCalleeToken(),
5551 masm
.tagValue(JSVAL_TYPE_OBJECT
, R0
.scratchReg(), R0
);
5557 bool BaselineCompilerCodeGen::emit_EnvCallee() {
5559 uint8_t numHops
= GET_UINT8(handler
.pc());
5560 Register scratch
= R0
.scratchReg();
5562 masm
.loadPtr(frame
.addressOfEnvironmentChain(), scratch
);
5563 for (unsigned i
= 0; i
< numHops
; i
++) {
5564 Address
nextAddr(scratch
,
5565 EnvironmentObject::offsetOfEnclosingEnvironment());
5566 masm
.unboxObject(nextAddr
, scratch
);
5569 masm
.loadValue(Address(scratch
, CallObject::offsetOfCallee()), R0
);
5575 bool BaselineInterpreterCodeGen::emit_EnvCallee() {
5576 Register scratch
= R0
.scratchReg();
5577 Register env
= R1
.scratchReg();
5579 static_assert(JSOpLength_EnvCallee
- sizeof(jsbytecode
) == ENVCOORD_HOPS_LEN
,
5580 "op must have uint8 operand for LoadAliasedVarEnv");
5582 // Load the right environment object.
5583 masm
.loadPtr(frame
.addressOfEnvironmentChain(), env
);
5584 LoadAliasedVarEnv(masm
, env
, scratch
);
5586 masm
.pushValue(Address(env
, CallObject::offsetOfCallee()));
5590 template <typename Handler
>
5591 bool BaselineCodeGen
<Handler
>::emit_SuperBase() {
5592 frame
.popRegsAndSync(1);
5594 Register scratch
= R0
.scratchReg();
5595 Register proto
= R1
.scratchReg();
5598 masm
.unboxObject(R0
, scratch
);
5600 // Load [[HomeObject]]
5601 Address
homeObjAddr(scratch
,
5602 FunctionExtended::offsetOfMethodHomeObjectSlot());
5605 masm
.branchTestObject(Assembler::Equal
, homeObjAddr
, &isObject
);
5606 masm
.assumeUnreachable("[[HomeObject]] must be Object");
5607 masm
.bind(&isObject
);
5609 masm
.unboxObject(homeObjAddr
, scratch
);
5611 // Load prototype from [[HomeObject]]
5612 masm
.loadObjProto(scratch
, proto
);
5615 // We won't encounter a lazy proto, because the prototype is guaranteed to
5616 // either be a JSFunction or a PlainObject, and only proxy objects can have a
5618 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto
) == 1);
5620 Label proxyCheckDone
;
5621 masm
.branchPtr(Assembler::NotEqual
, proto
, ImmWord(1), &proxyCheckDone
);
5622 masm
.assumeUnreachable("Unexpected lazy proto in JSOp::SuperBase");
5623 masm
.bind(&proxyCheckDone
);
5627 masm
.branchPtr(Assembler::NotEqual
, proto
, ImmWord(0), &hasProto
);
5629 // Throw an error if |proto| is null.
5632 using Fn
= bool (*)(JSContext
*);
5633 if (!callVM
<Fn
, ThrowHomeObjectNotObject
>()) {
5637 // Box prototype and return
5638 masm
.bind(&hasProto
);
5639 masm
.tagValue(JSVAL_TYPE_OBJECT
, proto
, R1
);
5644 template <typename Handler
>
5645 bool BaselineCodeGen
<Handler
>::emit_SuperFun() {
5646 frame
.popRegsAndSync(1);
5648 Register callee
= R0
.scratchReg();
5649 Register proto
= R1
.scratchReg();
5651 Register scratch
= R2
.scratchReg();
5655 masm
.unboxObject(R0
, callee
);
5658 Label classCheckDone
;
5659 masm
.branchTestObjClass(Assembler::Equal
, callee
, &JSFunction::class_
,
5660 scratch
, callee
, &classCheckDone
);
5661 masm
.assumeUnreachable("Unexpected non-JSFunction callee in JSOp::SuperFun");
5662 masm
.bind(&classCheckDone
);
5665 // Load prototype of callee
5666 masm
.loadObjProto(callee
, proto
);
5669 // We won't encounter a lazy proto, because |callee| is guaranteed to be a
5670 // JSFunction and only proxy objects can have a lazy proto.
5671 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto
) == 1);
5673 Label proxyCheckDone
;
5674 masm
.branchPtr(Assembler::NotEqual
, proto
, ImmWord(1), &proxyCheckDone
);
5675 masm
.assumeUnreachable("Unexpected lazy proto in JSOp::SuperFun");
5676 masm
.bind(&proxyCheckDone
);
5679 Label nullProto
, done
;
5680 masm
.branchPtr(Assembler::Equal
, proto
, ImmWord(0), &nullProto
);
5682 // Box prototype and return
5683 masm
.tagValue(JSVAL_TYPE_OBJECT
, proto
, R1
);
5686 masm
.bind(&nullProto
);
5687 masm
.moveValue(NullValue(), R1
);
5694 template <typename Handler
>
5695 bool BaselineCodeGen
<Handler
>::emit_Arguments() {
5698 MOZ_ASSERT_IF(handler
.maybeScript(),
5699 handler
.maybeScript()->argumentsHasVarBinding());
5702 if (!handler
.maybeScript() || !handler
.maybeScript()->needsArgsObj()) {
5703 // We assume the script does not need an arguments object. However, this
5704 // assumption can be invalidated later, see argumentsOptimizationFailed
5705 // in JSScript. Guard on the script's NeedsArgsObj flag.
5706 masm
.moveValue(MagicValue(JS_OPTIMIZED_ARGUMENTS
), R0
);
5708 // If we don't need an arguments object, skip the VM call.
5709 Register scratch
= R1
.scratchReg();
5710 loadScript(scratch
);
5712 Assembler::Zero
, Address(scratch
, JSScript::offsetOfMutableFlags()),
5713 Imm32(uint32_t(JSScript::MutableFlags::NeedsArgsObj
)), &done
);
5718 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
5719 pushArg(R0
.scratchReg());
5721 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, MutableHandleValue
);
5722 if (!callVM
<Fn
, jit::NewArgumentsObject
>()) {
5731 template <typename Handler
>
5732 bool BaselineCodeGen
<Handler
>::emit_Rest() {
5735 if (!emitNextIC()) {
5739 // Mark R0 as pushed stack value.
5744 template <typename Handler
>
5745 bool BaselineCodeGen
<Handler
>::emit_Generator() {
5746 frame
.assertStackDepth(0);
5748 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
5751 pushArg(R0
.scratchReg());
5753 using Fn
= JSObject
* (*)(JSContext
*, BaselineFrame
*);
5754 if (!callVM
<Fn
, jit::CreateGenerator
>()) {
5758 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
5763 template <typename Handler
>
5764 bool BaselineCodeGen
<Handler
>::emit_InitialYield() {
5766 frame
.assertStackDepth(1);
5768 Register genObj
= R2
.scratchReg();
5769 masm
.unboxObject(frame
.addressOfStackValue(-1), genObj
);
5771 MOZ_ASSERT_IF(handler
.maybePC(), GET_RESUMEINDEX(handler
.maybePC()) == 0);
5774 Address(genObj
, AbstractGeneratorObject::offsetOfResumeIndexSlot()));
5776 Register envObj
= R0
.scratchReg();
5777 Register temp
= R1
.scratchReg();
5778 Address
envChainSlot(genObj
,
5779 AbstractGeneratorObject::offsetOfEnvironmentChainSlot());
5780 masm
.loadPtr(frame
.addressOfEnvironmentChain(), envObj
);
5781 masm
.guardedCallPreBarrierAnyZone(envChainSlot
, MIRType::Value
, temp
);
5782 masm
.storeValue(JSVAL_TYPE_OBJECT
, envObj
, envChainSlot
);
5785 masm
.branchPtrInNurseryChunk(Assembler::Equal
, genObj
, temp
, &skipBarrier
);
5786 masm
.branchPtrInNurseryChunk(Assembler::NotEqual
, envObj
, temp
, &skipBarrier
);
5788 MOZ_ASSERT(genObj
== R2
.scratchReg());
5789 masm
.call(&postBarrierSlot_
);
5791 masm
.bind(&skipBarrier
);
5793 masm
.tagValue(JSVAL_TYPE_OBJECT
, genObj
, JSReturnOperand
);
5794 if (!emitReturn()) {
5798 // Two extra stack values will be pushed when resuming the generator.
5799 frame
.incStackDepth(2);
5803 template <typename Handler
>
5804 bool BaselineCodeGen
<Handler
>::emit_Yield() {
5805 // Store generator in R0.
5806 frame
.popRegsAndSync(1);
5808 Register genObj
= R2
.scratchReg();
5809 masm
.unboxObject(R0
, genObj
);
5811 if (frame
.hasKnownStackDepth(1)) {
5812 // If the expression stack is empty, we can inline the Yield. Note that this
5813 // branch is never taken for the interpreter because it doesn't know static
5816 Register temp
= R1
.scratchReg();
5817 Address
resumeIndexSlot(genObj
,
5818 AbstractGeneratorObject::offsetOfResumeIndexSlot());
5819 jsbytecode
* pc
= handler
.maybePC();
5820 MOZ_ASSERT(pc
, "compiler-only code never has a null pc");
5821 masm
.move32(Imm32(GET_RESUMEINDEX(pc
)), temp
);
5822 masm
.storeValue(JSVAL_TYPE_INT32
, temp
, resumeIndexSlot
);
5824 Register envObj
= R0
.scratchReg();
5825 Address
envChainSlot(
5826 genObj
, AbstractGeneratorObject::offsetOfEnvironmentChainSlot());
5827 masm
.loadPtr(frame
.addressOfEnvironmentChain(), envObj
);
5828 masm
.guardedCallPreBarrier(envChainSlot
, MIRType::Value
);
5829 masm
.storeValue(JSVAL_TYPE_OBJECT
, envObj
, envChainSlot
);
5832 masm
.branchPtrInNurseryChunk(Assembler::Equal
, genObj
, temp
, &skipBarrier
);
5833 masm
.branchPtrInNurseryChunk(Assembler::NotEqual
, envObj
, temp
,
5835 MOZ_ASSERT(genObj
== R2
.scratchReg());
5836 masm
.call(&postBarrierSlot_
);
5837 masm
.bind(&skipBarrier
);
5839 masm
.loadBaselineFramePtr(BaselineFrameReg
, R1
.scratchReg());
5840 computeFrameSize(R0
.scratchReg());
5843 pushBytecodePCArg();
5844 pushArg(R0
.scratchReg());
5845 pushArg(R1
.scratchReg());
5848 using Fn
= bool (*)(JSContext
*, HandleObject
, BaselineFrame
*, uint32_t,
5850 if (!callVM
<Fn
, jit::NormalSuspend
>()) {
5855 masm
.loadValue(frame
.addressOfStackValue(-1), JSReturnOperand
);
5856 if (!emitReturn()) {
5860 // Two extra stack values will be pushed when resuming the generator.
5861 frame
.incStackDepth(2);
5865 template <typename Handler
>
5866 bool BaselineCodeGen
<Handler
>::emit_Await() {
5867 return emit_Yield();
5871 template <typename F
>
5872 bool BaselineCompilerCodeGen::emitAfterYieldDebugInstrumentation(
5873 const F
& ifDebuggee
, Register
) {
5874 if (handler
.compileDebugInstrumentation()) {
5875 return ifDebuggee();
5881 template <typename F
>
5882 bool BaselineInterpreterCodeGen::emitAfterYieldDebugInstrumentation(
5883 const F
& ifDebuggee
, Register scratch
) {
5884 // Note that we can't use emitDebugInstrumentation here because the frame's
5885 // DEBUGGEE flag hasn't been initialized yet.
5887 // If the current Realm is not a debuggee we're done.
5889 CodeOffset toggleOffset
= masm
.toggledJump(&done
);
5890 if (!handler
.addDebugInstrumentationOffset(cx
, toggleOffset
)) {
5893 masm
.loadPtr(AbsoluteAddress(cx
->addressOfRealm()), scratch
);
5894 masm
.branchTest32(Assembler::Zero
,
5895 Address(scratch
, Realm::offsetOfDebugModeBits()),
5896 Imm32(Realm::debugModeIsDebuggeeBit()), &done
);
5898 if (!ifDebuggee()) {
5906 template <typename Handler
>
5907 bool BaselineCodeGen
<Handler
>::emit_AfterYield() {
5908 if (!emit_JumpTarget()) {
5912 auto ifDebuggee
= [this]() {
5913 frame
.assertSyncedStack();
5914 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
5916 pushArg(R0
.scratchReg());
5918 const RetAddrEntry::Kind kind
= RetAddrEntry::Kind::DebugAfterYield
;
5920 using Fn
= bool (*)(JSContext
*, BaselineFrame
*);
5921 if (!callVM
<Fn
, jit::DebugAfterYield
>(kind
)) {
5927 return emitAfterYieldDebugInstrumentation(ifDebuggee
, R0
.scratchReg());
5930 template <typename Handler
>
5931 bool BaselineCodeGen
<Handler
>::emit_FinalYieldRval() {
5932 // Store generator in R0.
5933 frame
.popRegsAndSync(1);
5934 masm
.unboxObject(R0
, R0
.scratchReg());
5937 pushBytecodePCArg();
5938 pushArg(R0
.scratchReg());
5940 using Fn
= bool (*)(JSContext
*, HandleObject
, jsbytecode
*);
5941 if (!callVM
<Fn
, jit::FinalSuspend
>()) {
5945 masm
.loadValue(frame
.addressOfReturnValue(), JSReturnOperand
);
5946 return emitReturn();
5950 void BaselineCompilerCodeGen::emitJumpToInterpretOpLabel() {
5951 TrampolinePtr code
=
5952 cx
->runtime()->jitRuntime()->baselineInterpreter().interpretOpAddr();
5957 void BaselineInterpreterCodeGen::emitJumpToInterpretOpLabel() {
5958 masm
.jump(handler
.interpretOpLabel());
5961 template <typename Handler
>
5962 bool BaselineCodeGen
<Handler
>::emitEnterGeneratorCode(Register script
,
5963 Register resumeIndex
,
5965 // Resume in either the BaselineScript (if present) or Baseline Interpreter.
5967 static_assert(BaselineDisabledScript
== 0x1,
5968 "Comparison below requires specific sentinel encoding");
5970 if (JitOptions
.warpBuilder
) {
5971 // Initialize the icScript slot in the baseline frame.
5972 masm
.loadJitScript(script
, scratch
);
5973 masm
.computeEffectiveAddress(
5974 Address(scratch
, JitScript::offsetOfICScript()), scratch
);
5975 Address
icScriptAddr(BaselineFrameReg
,
5976 BaselineFrame::reverseOffsetOfICScript());
5977 masm
.storePtr(scratch
, icScriptAddr
);
5980 Label noBaselineScript
;
5981 masm
.loadJitScript(script
, scratch
);
5982 masm
.loadPtr(Address(scratch
, JitScript::offsetOfBaselineScript()), scratch
);
5983 masm
.branchPtr(Assembler::BelowOrEqual
, scratch
,
5984 ImmPtr(BaselineDisabledScriptPtr
), &noBaselineScript
);
5986 masm
.load32(Address(scratch
, BaselineScript::offsetOfResumeEntriesOffset()),
5988 masm
.addPtr(scratch
, script
);
5990 BaseIndex(script
, resumeIndex
, ScaleFromElemWidth(sizeof(uintptr_t))),
5994 masm
.bind(&noBaselineScript
);
5996 // Initialize interpreter frame fields.
5997 Address
flagsAddr(BaselineFrameReg
, BaselineFrame::reverseOffsetOfFlags());
5998 Address
scriptAddr(BaselineFrameReg
,
5999 BaselineFrame::reverseOffsetOfInterpreterScript());
6000 masm
.or32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER
), flagsAddr
);
6001 masm
.storePtr(script
, scriptAddr
);
6003 // Initialize pc and jump to it.
6004 emitInterpJumpToResumeEntry(script
, resumeIndex
, scratch
);
6008 template <typename Handler
>
6009 bool BaselineCodeGen
<Handler
>::emit_Resume() {
6011 masm
.assertStackAlignment(sizeof(Value
), 0);
6013 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
6014 regs
.take(BaselineFrameReg
);
6015 if (HasInterpreterPCReg()) {
6016 regs
.take(InterpreterPCReg
);
6019 saveInterpreterPCReg();
6021 // Load generator object.
6022 Register genObj
= regs
.takeAny();
6023 masm
.unboxObject(frame
.addressOfStackValue(-3), genObj
);
6026 Register callee
= regs
.takeAny();
6028 Address(genObj
, AbstractGeneratorObject::offsetOfCalleeSlot()), callee
);
6030 // Save a pointer to the JSOp::Resume operand stack Values.
6031 Register callerStackPtr
= regs
.takeAny();
6032 masm
.computeEffectiveAddress(frame
.addressOfStackValue(-1), callerStackPtr
);
6034 // Branch to |interpret| to resume the generator in the C++ interpreter if the
6035 // script does not have a JitScript.
6037 Register scratch1
= regs
.takeAny();
6038 masm
.loadPtr(Address(callee
, JSFunction::offsetOfScript()), scratch1
);
6039 masm
.branchIfScriptHasNoJitScript(scratch1
, &interpret
);
6041 #ifdef JS_TRACE_LOGGING
6042 if (JS::TraceLoggerSupported()) {
6043 // TODO (bug 1565788): add Baseline Interpreter support.
6044 MOZ_CRASH("Unimplemented Baseline Interpreter TraceLogger support");
6045 masm
.loadJitScript(scratch1
, scratch1
);
6046 Address
baselineAddr(scratch1
, JitScript::offsetOfBaselineScript());
6047 masm
.loadPtr(baselineAddr
, scratch1
);
6048 if (!emitTraceLoggerResume(scratch1
, regs
)) {
6054 // Push |undefined| for all formals.
6055 Register scratch2
= regs
.takeAny();
6056 Label loop
, loopDone
;
6057 masm
.load16ZeroExtend(Address(callee
, JSFunction::offsetOfNargs()), scratch2
);
6058 masm
.branchTest32(Assembler::Zero
, scratch2
, scratch2
, &loopDone
);
6061 masm
.pushValue(UndefinedValue());
6062 masm
.branchSub32(Assembler::NonZero
, Imm32(1), scratch2
, &loop
);
6064 masm
.bind(&loopDone
);
6066 // Push |undefined| for |this|.
6067 masm
.pushValue(UndefinedValue());
6069 // Update BaselineFrame frameSize field and create the frame descriptor.
6070 masm
.computeEffectiveAddress(
6071 Address(BaselineFrameReg
, BaselineFrame::FramePointerOffset
), scratch2
);
6072 masm
.subStackPtrFrom(scratch2
);
6074 masm
.store32(scratch2
, frame
.addressOfDebugFrameSize());
6076 masm
.makeFrameDescriptor(scratch2
, FrameType::BaselineJS
,
6077 JitFrameLayout::Size());
6079 masm
.push(Imm32(0)); // actual argc
6080 masm
.PushCalleeToken(callee
, /* constructing = */ false);
6081 masm
.push(scratch2
); // frame descriptor
6083 // PushCalleeToken bumped framePushed. Reset it.
6084 MOZ_ASSERT(masm
.framePushed() == sizeof(uintptr_t));
6085 masm
.setFramePushed(0);
6089 // Push a fake return address on the stack. We will resume here when the
6090 // generator returns.
6091 Label genStart
, returnTarget
;
6092 #ifdef JS_USE_LINK_REGISTER
6093 masm
.call(&genStart
);
6095 masm
.callAndPushReturnAddress(&genStart
);
6098 // Record the return address so the return offset -> pc mapping works.
6099 if (!handler
.recordCallRetAddr(cx
, RetAddrEntry::Kind::IC
,
6100 masm
.currentOffset())) {
6104 masm
.jump(&returnTarget
);
6105 masm
.bind(&genStart
);
6106 #ifdef JS_USE_LINK_REGISTER
6107 masm
.pushReturnAddress();
6110 // If profiler instrumentation is on, update lastProfilingFrame on
6111 // current JitActivation
6113 Register scratchReg
= scratch2
;
6115 AbsoluteAddress
addressOfEnabled(
6116 cx
->runtime()->geckoProfiler().addressOfEnabled());
6117 masm
.branch32(Assembler::Equal
, addressOfEnabled
, Imm32(0), &skip
);
6118 masm
.loadJSContext(scratchReg
);
6119 masm
.loadPtr(Address(scratchReg
, JSContext::offsetOfProfilingActivation()),
6122 Address(scratchReg
, JitActivation::offsetOfLastProfilingFrame()));
6126 // Construct BaselineFrame.
6127 masm
.push(BaselineFrameReg
);
6128 masm
.moveStackPtrTo(BaselineFrameReg
);
6129 masm
.subFromStackPtr(Imm32(BaselineFrame::Size()));
6130 masm
.assertStackAlignment(sizeof(Value
), 0);
6132 // Store flags and env chain.
6133 masm
.store32(Imm32(BaselineFrame::HAS_INITIAL_ENV
), frame
.addressOfFlags());
6135 Address(genObj
, AbstractGeneratorObject::offsetOfEnvironmentChainSlot()),
6137 masm
.storePtr(scratch2
, frame
.addressOfEnvironmentChain());
6139 // Store the arguments object if there is one.
6141 Address
argsObjSlot(genObj
, AbstractGeneratorObject::offsetOfArgsObjSlot());
6142 masm
.fallibleUnboxObject(argsObjSlot
, scratch2
, &noArgsObj
);
6144 masm
.storePtr(scratch2
, frame
.addressOfArgsObj());
6145 masm
.or32(Imm32(BaselineFrame::HAS_ARGS_OBJ
), frame
.addressOfFlags());
6147 masm
.bind(&noArgsObj
);
6149 // Push expression slots if needed.
6151 Address
exprStackSlot(genObj
,
6152 AbstractGeneratorObject::offsetOfExpressionStackSlot());
6153 masm
.fallibleUnboxObject(exprStackSlot
, scratch2
, &noExprStack
);
6155 Register initLength
= regs
.takeAny();
6156 masm
.loadPtr(Address(scratch2
, NativeObject::offsetOfElements()), scratch2
);
6157 masm
.load32(Address(scratch2
, ObjectElements::offsetOfInitializedLength()),
6161 Address(scratch2
, ObjectElements::offsetOfInitializedLength()));
6163 Label loop
, loopDone
;
6164 masm
.branchTest32(Assembler::Zero
, initLength
, initLength
, &loopDone
);
6167 masm
.pushValue(Address(scratch2
, 0));
6168 masm
.guardedCallPreBarrierAnyZone(Address(scratch2
, 0), MIRType::Value
,
6170 masm
.addPtr(Imm32(sizeof(Value
)), scratch2
);
6171 masm
.branchSub32(Assembler::NonZero
, Imm32(1), initLength
, &loop
);
6173 masm
.bind(&loopDone
);
6174 regs
.add(initLength
);
6177 masm
.bind(&noExprStack
);
6179 // Push arg, generator, resumeKind stack Values, in that order.
6180 masm
.pushValue(Address(callerStackPtr
, sizeof(Value
)));
6181 masm
.pushValue(JSVAL_TYPE_OBJECT
, genObj
);
6182 masm
.pushValue(Address(callerStackPtr
, 0));
6184 masm
.switchToObjectRealm(genObj
, scratch2
);
6186 // Load script in scratch1.
6188 Address(genObj
, AbstractGeneratorObject::offsetOfCalleeSlot()), scratch1
);
6189 masm
.loadPtr(Address(scratch1
, JSFunction::offsetOfScript()), scratch1
);
6191 // Load resume index in scratch2 and mark generator as running.
6192 Address
resumeIndexSlot(genObj
,
6193 AbstractGeneratorObject::offsetOfResumeIndexSlot());
6194 masm
.unboxInt32(resumeIndexSlot
, scratch2
);
6195 masm
.storeValue(Int32Value(AbstractGeneratorObject::RESUME_INDEX_RUNNING
),
6198 if (!emitEnterGeneratorCode(scratch1
, scratch2
, regs
.getAny())) {
6202 // Call into the VM to resume the generator in the C++ interpreter if there's
6204 masm
.bind(&interpret
);
6208 pushArg(callerStackPtr
);
6211 using Fn
= bool (*)(JSContext
*, HandleObject
, Value
*, MutableHandleValue
);
6212 if (!callVM
<Fn
, jit::InterpretResume
>()) {
6216 // After the generator returns, we restore the stack pointer, switch back to
6217 // the current realm, push the return value, and we're done.
6218 masm
.bind(&returnTarget
);
6219 masm
.computeEffectiveAddress(frame
.addressOfStackValue(-1),
6220 masm
.getStackPointer());
6221 if (JSScript
* script
= handler
.maybeScript()) {
6222 masm
.switchToRealm(script
->realm(), R2
.scratchReg());
6224 masm
.switchToBaselineFrameRealm(R2
.scratchReg());
6226 restoreInterpreterPCReg();
6232 template <typename Handler
>
6233 bool BaselineCodeGen
<Handler
>::emit_CheckResumeKind() {
6234 // Load resumeKind in R1, generator in R0.
6235 frame
.popRegsAndSync(2);
6239 masm
.branchTestInt32(Assembler::Equal
, R1
, &ok
);
6240 masm
.assumeUnreachable("Expected int32 resumeKind");
6244 // If resumeKind is 'next' we don't have to do anything.
6246 masm
.unboxInt32(R1
, R1
.scratchReg());
6247 masm
.branch32(Assembler::Equal
, R1
.scratchReg(),
6248 Imm32(int32_t(GeneratorResumeKind::Next
)), &done
);
6252 pushArg(R1
.scratchReg()); // resumeKind
6254 masm
.loadValue(frame
.addressOfStackValue(-1), R2
);
6257 masm
.unboxObject(R0
, R0
.scratchReg());
6258 pushArg(R0
.scratchReg()); // genObj
6260 masm
.loadBaselineFramePtr(BaselineFrameReg
, R2
.scratchReg());
6261 pushArg(R2
.scratchReg()); // frame
6263 using Fn
= bool (*)(JSContext
*, BaselineFrame
*,
6264 Handle
<AbstractGeneratorObject
*>, HandleValue
, int32_t);
6265 if (!callVM
<Fn
, jit::GeneratorThrowOrReturn
>()) {
6274 bool BaselineCompilerCodeGen::emit_ResumeKind() {
6275 GeneratorResumeKind resumeKind
= ResumeKindFromPC(handler
.pc());
6276 frame
.push(Int32Value(int32_t(resumeKind
)));
6281 bool BaselineInterpreterCodeGen::emit_ResumeKind() {
6282 LoadUint8Operand(masm
, R0
.scratchReg());
6283 masm
.tagValue(JSVAL_TYPE_INT32
, R0
.scratchReg(), R0
);
6288 template <typename Handler
>
6289 bool BaselineCodeGen
<Handler
>::emit_DebugCheckSelfHosted() {
6293 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
6298 using Fn
= bool (*)(JSContext
*, HandleValue
);
6299 if (!callVM
<Fn
, js::Debug_CheckSelfHosted
>()) {
6306 template <typename Handler
>
6307 bool BaselineCodeGen
<Handler
>::emit_IsConstructing() {
6308 frame
.push(MagicValue(JS_IS_CONSTRUCTING
));
6313 bool BaselineCompilerCodeGen::emit_JumpTarget() {
6314 MaybeIncrementCodeCoverageCounter(masm
, handler
.script(), handler
.pc());
6319 bool BaselineInterpreterCodeGen::emit_JumpTarget() {
6320 Register scratch1
= R0
.scratchReg();
6321 Register scratch2
= R1
.scratchReg();
6324 CodeOffset toggleOffset
= masm
.toggledJump(&skipCoverage
);
6325 masm
.call(handler
.codeCoverageAtPCLabel());
6326 masm
.bind(&skipCoverage
);
6327 if (!handler
.codeCoverageOffsets().append(toggleOffset
.offset())) {
6331 // Load icIndex in scratch1.
6332 LoadInt32Operand(masm
, scratch1
);
6334 // scratch1 := scratch1 * sizeof(ICEntry)
6335 static_assert(sizeof(ICEntry
) == 8 || sizeof(ICEntry
) == 16,
6336 "shift below depends on ICEntry size");
6337 uint32_t shift
= (sizeof(ICEntry
) == 16) ? 4 : 3;
6338 masm
.lshiftPtr(Imm32(shift
), scratch1
);
6340 // Compute ICEntry* and store to frame->interpreterICEntry.
6341 if (JitOptions
.warpBuilder
) {
6342 masm
.loadPtr(frame
.addressOfICScript(), scratch2
);
6343 masm
.computeEffectiveAddress(
6344 BaseIndex(scratch2
, scratch1
, TimesOne
, ICScript::offsetOfICEntries()),
6347 loadScript(scratch2
);
6348 masm
.loadJitScript(scratch2
, scratch2
);
6349 masm
.computeEffectiveAddress(
6350 BaseIndex(scratch2
, scratch1
, TimesOne
, JitScript::offsetOfICEntries()),
6353 masm
.storePtr(scratch2
, frame
.addressOfInterpreterICEntry());
6357 template <typename Handler
>
6358 bool BaselineCodeGen
<Handler
>::emit_CheckClassHeritage() {
6361 // Leave the heritage value on the stack.
6362 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
6367 using Fn
= bool (*)(JSContext
*, HandleValue
);
6368 return callVM
<Fn
, js::CheckClassHeritageOperation
>();
6371 template <typename Handler
>
6372 bool BaselineCodeGen
<Handler
>::emit_InitHomeObject() {
6373 // Load HomeObject in R0.
6374 frame
.popRegsAndSync(1);
6376 // Load function off stack
6377 Register func
= R2
.scratchReg();
6378 masm
.unboxObject(frame
.addressOfStackValue(-1), func
);
6380 // Set HOMEOBJECT_SLOT
6381 Register temp
= R1
.scratchReg();
6382 Address
addr(func
, FunctionExtended::offsetOfMethodHomeObjectSlot());
6383 masm
.guardedCallPreBarrierAnyZone(addr
, MIRType::Value
, temp
);
6384 masm
.storeValue(R0
, addr
);
6387 masm
.branchPtrInNurseryChunk(Assembler::Equal
, func
, temp
, &skipBarrier
);
6388 masm
.branchValueIsNurseryCell(Assembler::NotEqual
, R0
, temp
, &skipBarrier
);
6389 masm
.call(&postBarrierSlot_
);
6390 masm
.bind(&skipBarrier
);
6396 bool BaselineCompilerCodeGen::emit_BuiltinObject() {
6397 // Built-in objects are constants for a given global.
6398 auto kind
= BuiltinObjectKind(GET_UINT8(handler
.pc()));
6399 JSObject
* builtin
= BuiltinObjectOperation(cx
, kind
);
6403 frame
.push(ObjectValue(*builtin
));
6408 bool BaselineInterpreterCodeGen::emit_BuiltinObject() {
6411 pushUint8BytecodeOperandArg(R0
.scratchReg());
6413 using Fn
= JSObject
* (*)(JSContext
*, BuiltinObjectKind
);
6414 if (!callVM
<Fn
, BuiltinObjectOperation
>()) {
6418 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6423 template <typename Handler
>
6424 bool BaselineCodeGen
<Handler
>::emit_ObjWithProto() {
6427 // Leave the proto value on the stack for the decompiler
6428 masm
.loadValue(frame
.addressOfStackValue(-1), R0
);
6433 using Fn
= PlainObject
* (*)(JSContext
*, HandleValue
);
6434 if (!callVM
<Fn
, js::ObjectWithProtoOperation
>()) {
6438 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6444 template <typename Handler
>
6445 bool BaselineCodeGen
<Handler
>::emit_FunWithProto() {
6446 frame
.popRegsAndSync(1);
6448 masm
.unboxObject(R0
, R0
.scratchReg());
6449 masm
.loadPtr(frame
.addressOfEnvironmentChain(), R1
.scratchReg());
6452 pushArg(R0
.scratchReg());
6453 pushArg(R1
.scratchReg());
6454 pushScriptGCThingArg(ScriptGCThingType::Function
, R0
.scratchReg(),
6458 JSObject
* (*)(JSContext
*, HandleFunction
, HandleObject
, HandleObject
);
6459 if (!callVM
<Fn
, js::FunWithProtoOperation
>()) {
6463 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6468 template <typename Handler
>
6469 bool BaselineCodeGen
<Handler
>::emit_ClassConstructor() {
6472 // Pass nullptr as prototype to MakeDefaultConstructor
6474 pushArg(ImmPtr(nullptr));
6475 pushBytecodePCArg();
6479 JSFunction
* (*)(JSContext
*, HandleScript
, jsbytecode
*, HandleObject
);
6480 if (!callVM
<Fn
, js::MakeDefaultConstructor
>()) {
6484 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6489 template <typename Handler
>
6490 bool BaselineCodeGen
<Handler
>::emit_DerivedConstructor() {
6491 frame
.popRegsAndSync(1);
6493 masm
.unboxObject(R0
, R0
.scratchReg());
6496 pushArg(R0
.scratchReg());
6497 pushBytecodePCArg();
6501 JSFunction
* (*)(JSContext
*, HandleScript
, jsbytecode
*, HandleObject
);
6502 if (!callVM
<Fn
, js::MakeDefaultConstructor
>()) {
6506 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6512 bool BaselineCompilerCodeGen::emit_ImportMeta() {
6513 // Note: this is like the interpreter implementation, but optimized a bit by
6514 // calling GetModuleObjectForScript at compile-time.
6516 RootedModuleObject
module(cx
, GetModuleObjectForScript(handler
.script()));
6522 pushArg(ImmGCPtr(module
));
6524 using Fn
= JSObject
* (*)(JSContext
*, HandleObject
);
6525 if (!callVM
<Fn
, js::GetOrCreateModuleMetaObject
>()) {
6529 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6535 bool BaselineInterpreterCodeGen::emit_ImportMeta() {
6540 using Fn
= JSObject
* (*)(JSContext
*, HandleScript
);
6541 if (!callVM
<Fn
, ImportMetaOperation
>()) {
6545 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6550 template <typename Handler
>
6551 bool BaselineCodeGen
<Handler
>::emit_DynamicImport() {
6552 // Put specifier value in R0.
6553 frame
.popRegsAndSync(1);
6559 using Fn
= JSObject
* (*)(JSContext
*, HandleScript
, HandleValue
);
6560 if (!callVM
<Fn
, js::StartDynamicModuleImport
>()) {
6564 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6570 bool BaselineCompilerCodeGen::emit_InstrumentationActive() {
6573 // RealmInstrumentation cannot be removed from a global without destroying the
6574 // entire realm, so its active address can be embedded into jitcode.
6575 const int32_t* address
= RealmInstrumentation::addressOfActive(cx
->global());
6577 Register scratch
= R0
.scratchReg();
6578 masm
.load32(AbsoluteAddress(address
), scratch
);
6579 masm
.tagValue(JSVAL_TYPE_BOOLEAN
, scratch
, R0
);
6580 frame
.push(R0
, JSVAL_TYPE_BOOLEAN
);
6586 bool BaselineInterpreterCodeGen::emit_InstrumentationActive() {
6589 using Fn
= bool (*)(JSContext
*, MutableHandleValue
);
6590 if (!callVM
<Fn
, InstrumentationActiveOperation
>()) {
6599 bool BaselineCompilerCodeGen::emit_InstrumentationCallback() {
6600 JSObject
* obj
= RealmInstrumentation::getCallback(cx
->global());
6602 frame
.push(ObjectValue(*obj
));
6607 bool BaselineInterpreterCodeGen::emit_InstrumentationCallback() {
6610 using Fn
= JSObject
* (*)(JSContext
*);
6611 if (!callVM
<Fn
, InstrumentationCallbackOperation
>()) {
6615 masm
.tagValue(JSVAL_TYPE_OBJECT
, ReturnReg
, R0
);
6621 bool BaselineCompilerCodeGen::emit_InstrumentationScriptId() {
6623 RootedScript
script(cx
, handler
.script());
6624 if (!RealmInstrumentation::getScriptId(cx
, cx
->global(), script
, &scriptId
)) {
6627 frame
.push(Int32Value(scriptId
));
6632 bool BaselineInterpreterCodeGen::emit_InstrumentationScriptId() {
6636 using Fn
= bool (*)(JSContext
*, HandleScript
, MutableHandleValue
);
6637 if (!callVM
<Fn
, InstrumentationScriptIdOperation
>()) {
6646 bool BaselineCompilerCodeGen::emit_ForceInterpreter() {
6647 // Caller is responsible for checking script->hasForceInterpreterOp().
6648 MOZ_CRASH("JSOp::ForceInterpreter in baseline");
6652 bool BaselineInterpreterCodeGen::emit_ForceInterpreter() {
6653 masm
.assumeUnreachable("JSOp::ForceInterpreter");
6657 template <typename Handler
>
6658 bool BaselineCodeGen
<Handler
>::emitPrologue() {
6659 #ifdef JS_USE_LINK_REGISTER
6660 // Push link register from generateEnterJIT()'s BLR.
6661 masm
.pushReturnAddress();
6662 masm
.checkStackAlignment();
6664 emitProfilerEnterFrame();
6666 masm
.push(BaselineFrameReg
);
6667 masm
.moveStackPtrTo(BaselineFrameReg
);
6668 masm
.subFromStackPtr(Imm32(BaselineFrame::Size()));
6670 // Initialize BaselineFrame. Also handles env chain pre-initialization (in
6671 // case GC gets run during stack check). For global and eval scripts, the env
6672 // chain is in R1. For function scripts, the env chain is in the callee.
6673 emitInitFrameFields(R1
.scratchReg());
6675 // When compiling with Debugger instrumentation, set the debuggeeness of
6676 // the frame before any operation that can call into the VM.
6677 if (!emitIsDebuggeeCheck()) {
6681 // Initialize the env chain before any operation that may call into the VM and
6683 if (!initEnvironmentChain()) {
6687 // Check for overrecursion before initializing locals.
6688 if (!emitStackCheck()) {
6692 emitInitializeLocals();
6694 #ifdef JS_TRACE_LOGGING
6695 if (JS::TraceLoggerSupported() && !emitTraceLoggerEnter()) {
6700 // Ion prologue bailouts will enter here in the Baseline Interpreter.
6701 masm
.bind(&bailoutPrologue_
);
6703 frame
.assertSyncedStack();
6705 if (JSScript
* script
= handler
.maybeScript()) {
6706 masm
.debugAssertContextRealm(script
->realm(), R1
.scratchReg());
6709 if (!emitDebugPrologue()) {
6713 if (!emitHandleCodeCoverageAtPrologue()) {
6717 if (!emitWarmUpCounterIncrement()) {
6721 warmUpCheckPrologueOffset_
= CodeOffset(masm
.currentOffset());
6723 if (!emitArgumentTypeChecks()) {
6730 template <typename Handler
>
6731 bool BaselineCodeGen
<Handler
>::emitEpilogue() {
6732 masm
.bind(&return_
);
6734 if (!handler
.shouldEmitDebugEpilogueAtReturnOp()) {
6735 if (!emitDebugEpilogue()) {
6740 #ifdef JS_TRACE_LOGGING
6741 if (JS::TraceLoggerSupported() && !emitTraceLoggerExit()) {
6746 masm
.moveToStackPtr(BaselineFrameReg
);
6747 masm
.pop(BaselineFrameReg
);
6749 emitProfilerExitFrame();
6755 MethodStatus
BaselineCompiler::emitBody() {
6756 JSScript
* script
= handler
.script();
6757 MOZ_ASSERT(handler
.pc() == script
->code());
6759 mozilla::DebugOnly
<jsbytecode
*> prevpc
= handler
.pc();
6762 JSOp op
= JSOp(*handler
.pc());
6763 JitSpew(JitSpew_BaselineOp
, "Compiling op @ %d: %s",
6764 int(script
->pcToOffset(handler
.pc())), CodeName(op
));
6766 BytecodeInfo
* info
= handler
.analysis().maybeInfo(handler
.pc());
6768 // Skip unreachable ops.
6770 // Test if last instructions and stop emitting in that case.
6771 handler
.moveToNextPC();
6772 if (handler
.pc() >= script
->codeEnd()) {
6776 prevpc
= handler
.pc();
6780 if (info
->jumpTarget
) {
6781 // Fully sync the stack if there are incoming jumps.
6783 frame
.setStackDepth(info
->stackDepth
);
6784 masm
.bind(handler
.labelOf(handler
.pc()));
6785 } else if (MOZ_UNLIKELY(compileDebugInstrumentation())) {
6786 // Also fully sync the stack if the debugger is enabled.
6789 // At the beginning of any op, at most the top 2 stack-values are
6791 if (frame
.stackDepth() > 2) {
6796 frame
.assertValidState(*info
);
6798 // If the script has a resume offset for this pc we need to keep track of
6799 // the native code offset.
6800 if (info
->hasResumeOffset
) {
6801 frame
.assertSyncedStack();
6802 uint32_t pcOffset
= script
->pcToOffset(handler
.pc());
6803 uint32_t nativeOffset
= masm
.currentOffset();
6804 if (!resumeOffsetEntries_
.emplaceBack(pcOffset
, nativeOffset
)) {
6805 ReportOutOfMemory(cx
);
6806 return Method_Error
;
6810 // Emit traps for breakpoints and step mode.
6811 if (MOZ_UNLIKELY(compileDebugInstrumentation()) && !emitDebugTrap()) {
6812 return Method_Error
;
6815 #define EMIT_OP(OP, ...) \
6817 if (MOZ_UNLIKELY(!this->emit_##OP())) return Method_Error; \
6821 FOR_EACH_OPCODE(EMIT_OP
)
6823 MOZ_CRASH("Unexpected op");
6828 MOZ_ASSERT(masm
.framePushed() == 0);
6830 // Test if last instructions and stop emitting in that case.
6831 handler
.moveToNextPC();
6832 if (handler
.pc() >= script
->codeEnd()) {
6837 prevpc
= handler
.pc();
6841 MOZ_ASSERT(JSOp(*prevpc
) == JSOp::RetRval
);
6842 return Method_Compiled
;
6845 bool BaselineInterpreterGenerator::emitDebugTrap() {
6846 CodeOffset offset
= masm
.nopPatchableToCall();
6847 if (!debugTrapOffsets_
.append(offset
.offset())) {
6848 ReportOutOfMemory(cx
);
6855 // Register holding the bytecode pc during dispatch. This exists so the debug
6856 // trap handler can reload the pc into this register when it's done.
6857 static constexpr Register InterpreterPCRegAtDispatch
=
6858 HasInterpreterPCReg() ? InterpreterPCReg
: R0
.scratchReg();
6860 bool BaselineInterpreterGenerator::emitInterpreterLoop() {
6861 Register scratch1
= R0
.scratchReg();
6862 Register scratch2
= R1
.scratchReg();
6864 // Entry point for interpreting a bytecode op. No registers are live except
6865 // for InterpreterPCReg.
6866 masm
.bind(handler
.interpretOpWithPCRegLabel());
6868 // Emit a patchable call for debugger breakpoints/stepping.
6869 if (!emitDebugTrap()) {
6872 Label interpretOpAfterDebugTrap
;
6873 masm
.bind(&interpretOpAfterDebugTrap
);
6875 // Load pc, bytecode op.
6876 Register pcReg
= LoadBytecodePC(masm
, scratch1
);
6877 masm
.load8ZeroExtend(Address(pcReg
, 0), scratch1
);
6879 // Jump to table[op].
6881 CodeOffset label
= masm
.moveNearAddressWithPatch(scratch2
);
6882 if (!tableLabels_
.append(label
)) {
6885 BaseIndex
pointer(scratch2
, scratch1
, ScalePointer
);
6886 masm
.branchToComputedAddress(pointer
);
6889 // At the end of each op, emit code to bump the pc and jump to the
6890 // next op (this is also known as a threaded interpreter).
6891 auto opEpilogue
= [&](JSOp op
, size_t opLength
) -> bool {
6892 MOZ_ASSERT(masm
.framePushed() == 0);
6894 if (!BytecodeFallsThrough(op
)) {
6896 masm
.assumeUnreachable("unexpected fall through");
6900 // Bump frame->interpreterICEntry if needed.
6901 if (BytecodeOpHasIC(op
)) {
6902 frame
.bumpInterpreterICEntry();
6905 // Bump bytecode PC.
6906 if (HasInterpreterPCReg()) {
6907 MOZ_ASSERT(InterpreterPCRegAtDispatch
== InterpreterPCReg
);
6908 masm
.addPtr(Imm32(opLength
), InterpreterPCReg
);
6910 MOZ_ASSERT(InterpreterPCRegAtDispatch
== scratch1
);
6911 masm
.loadPtr(frame
.addressOfInterpreterPC(), InterpreterPCRegAtDispatch
);
6912 masm
.addPtr(Imm32(opLength
), InterpreterPCRegAtDispatch
);
6913 masm
.storePtr(InterpreterPCRegAtDispatch
, frame
.addressOfInterpreterPC());
6916 if (!emitDebugTrap()) {
6920 // Load the opcode, jump to table[op].
6921 masm
.load8ZeroExtend(Address(InterpreterPCRegAtDispatch
, 0), scratch1
);
6922 CodeOffset label
= masm
.moveNearAddressWithPatch(scratch2
);
6923 if (!tableLabels_
.append(label
)) {
6926 BaseIndex
pointer(scratch2
, scratch1
, ScalePointer
);
6927 masm
.branchToComputedAddress(pointer
);
6931 // Emit code for each bytecode op.
6932 Label opLabels
[JSOP_LIMIT
];
6933 #define EMIT_OP(OP, ...) \
6935 masm.bind(&opLabels[uint8_t(JSOp::OP)]); \
6936 handler.setCurrentOp(JSOp::OP); \
6937 if (!this->emit_##OP()) { \
6940 if (!opEpilogue(JSOp::OP, JSOpLength_##OP)) { \
6943 handler.resetCurrentOp(); \
6945 FOR_EACH_OPCODE(EMIT_OP
)
6948 // External entry point to start interpreting bytecode ops. This is used for
6949 // things like exception handling and OSR. DebugModeOSR patches JIT frames to
6950 // return here from the DebugTrapHandler.
6951 masm
.bind(handler
.interpretOpLabel());
6952 interpretOpOffset_
= masm
.currentOffset();
6953 restoreInterpreterPCReg();
6954 masm
.jump(handler
.interpretOpWithPCRegLabel());
6956 // Second external entry point: this skips the debug trap for the first op
6957 // and is used by OSR.
6958 interpretOpNoDebugTrapOffset_
= masm
.currentOffset();
6959 restoreInterpreterPCReg();
6960 masm
.jump(&interpretOpAfterDebugTrap
);
6962 // External entry point for Ion prologue bailouts.
6963 bailoutPrologueOffset_
= CodeOffset(masm
.currentOffset());
6964 restoreInterpreterPCReg();
6965 masm
.jump(&bailoutPrologue_
);
6967 // Emit debug trap handler code (target of patchable call instructions). This
6968 // is just a tail call to the debug trap handler trampoline code.
6970 JitRuntime
* jrt
= cx
->runtime()->jitRuntime();
6971 JitCode
* handlerCode
=
6972 jrt
->debugTrapHandler(cx
, DebugTrapHandlerKind::Interpreter
);
6977 debugTrapHandlerOffset_
= masm
.currentOffset();
6978 masm
.jump(handlerCode
);
6982 masm
.haltingAlign(sizeof(void*));
6984 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64)
6985 size_t numInstructions
= JSOP_LIMIT
* (sizeof(uintptr_t) / sizeof(uint32_t));
6986 AutoForbidPoolsAndNops
afp(&masm
, numInstructions
);
6989 tableOffset_
= masm
.currentOffset();
6991 for (size_t i
= 0; i
< JSOP_LIMIT
; i
++) {
6992 const Label
& opLabel
= opLabels
[i
];
6993 MOZ_ASSERT(opLabel
.bound());
6995 masm
.writeCodePointer(&cl
);
6996 cl
.target()->bind(opLabel
.offset());
6997 masm
.addCodeLabel(cl
);
7003 void BaselineInterpreterGenerator::emitOutOfLineCodeCoverageInstrumentation() {
7004 masm
.bind(handler
.codeCoverageAtPrologueLabel());
7005 #ifdef JS_USE_LINK_REGISTER
7006 masm
.pushReturnAddress();
7009 saveInterpreterPCReg();
7011 masm
.setupUnalignedABICall(R0
.scratchReg());
7012 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
7013 masm
.passABIArg(R0
.scratchReg());
7015 JS_FUNC_TO_DATA_PTR(void*, jit::HandleCodeCoverageAtPrologue
));
7017 restoreInterpreterPCReg();
7020 masm
.bind(handler
.codeCoverageAtPCLabel());
7021 #ifdef JS_USE_LINK_REGISTER
7022 masm
.pushReturnAddress();
7025 saveInterpreterPCReg();
7027 masm
.setupUnalignedABICall(R0
.scratchReg());
7028 masm
.loadBaselineFramePtr(BaselineFrameReg
, R0
.scratchReg());
7029 masm
.passABIArg(R0
.scratchReg());
7030 Register pcReg
= LoadBytecodePC(masm
, R2
.scratchReg());
7031 masm
.passABIArg(pcReg
);
7032 masm
.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::HandleCodeCoverageAtPC
));
7034 restoreInterpreterPCReg();
7038 bool BaselineInterpreterGenerator::generate(BaselineInterpreter
& interpreter
) {
7039 if (!emitPrologue()) {
7043 if (!emitInterpreterLoop()) {
7047 if (!emitEpilogue()) {
7051 if (!emitOutOfLinePostBarrierSlot()) {
7055 emitOutOfLineCodeCoverageInstrumentation();
7058 Linker
linker(masm
);
7060 ReportOutOfMemory(cx
);
7064 JitCode
* code
= linker
.newCode(cx
, CodeKind::Other
);
7069 // Register BaselineInterpreter code with the profiler's JitCode table.
7071 JitcodeGlobalEntry::BaselineInterpreterEntry entry
;
7072 entry
.init(code
, code
->raw(), code
->rawEnd());
7074 JitcodeGlobalTable
* globalTable
=
7075 cx
->runtime()->jitRuntime()->getJitcodeGlobalTable();
7076 if (!globalTable
->addEntry(entry
)) {
7077 ReportOutOfMemory(cx
);
7081 code
->setHasBytecodeMap();
7084 // Patch loads now that we know the tableswitch base address.
7085 CodeLocationLabel
tableLoc(code
, CodeOffset(tableOffset_
));
7086 for (CodeOffset off
: tableLabels_
) {
7087 MacroAssembler::patchNearAddressMove(CodeLocationLabel(code
, off
),
7092 writePerfSpewerJitCodeProfile(code
, "BaselineInterpreter");
7096 vtune::MarkStub(code
, "BaselineInterpreter");
7100 code
, interpretOpOffset_
, interpretOpNoDebugTrapOffset_
,
7101 bailoutPrologueOffset_
.offset(),
7102 profilerEnterFrameToggleOffset_
.offset(),
7103 profilerExitFrameToggleOffset_
.offset(), debugTrapHandlerOffset_
,
7104 std::move(handler
.debugInstrumentationOffsets()),
7105 std::move(debugTrapOffsets_
), std::move(handler
.codeCoverageOffsets()),
7106 std::move(handler
.icReturnOffsets()), handler
.callVMOffsets());
7109 if (cx
->runtime()->geckoProfiler().enabled()) {
7110 interpreter
.toggleProfilerInstrumentation(true);
7113 if (coverage::IsLCovEnabled()) {
7114 interpreter
.toggleCodeCoverageInstrumentationUnchecked(true);
7120 JitCode
* JitRuntime::generateDebugTrapHandler(JSContext
* cx
,
7121 DebugTrapHandlerKind kind
) {
7122 StackMacroAssembler masm
;
7124 AllocatableGeneralRegisterSet
regs(GeneralRegisterSet::All());
7125 regs
.takeUnchecked(BaselineFrameReg
);
7126 regs
.takeUnchecked(ICStubReg
);
7127 if (HasInterpreterPCReg()) {
7128 regs
.takeUnchecked(InterpreterPCReg
);
7130 #ifdef JS_CODEGEN_ARM
7131 regs
.takeUnchecked(BaselineSecondScratchReg
);
7132 masm
.setSecondScratchReg(BaselineSecondScratchReg
);
7134 Register scratch1
= regs
.takeAny();
7135 Register scratch2
= regs
.takeAny();
7136 Register scratch3
= regs
.takeAny();
7138 if (kind
== DebugTrapHandlerKind::Interpreter
) {
7139 // The interpreter calls this for every script when debugging, so check if
7140 // the script has any breakpoints or is in step mode before calling into
7142 Label hasDebugScript
;
7143 Address
scriptAddr(BaselineFrameReg
,
7144 BaselineFrame::reverseOffsetOfInterpreterScript());
7145 masm
.loadPtr(scriptAddr
, scratch1
);
7146 masm
.branchTest32(Assembler::NonZero
,
7147 Address(scratch1
, JSScript::offsetOfMutableFlags()),
7148 Imm32(int32_t(JSScript::MutableFlags::HasDebugScript
)),
7151 masm
.bind(&hasDebugScript
);
7153 if (HasInterpreterPCReg()) {
7154 // Update frame's bytecode pc because the debugger depends on it.
7155 Address
pcAddr(BaselineFrameReg
,
7156 BaselineFrame::reverseOffsetOfInterpreterPC());
7157 masm
.storePtr(InterpreterPCReg
, pcAddr
);
7161 // Load the return address in scratch1.
7162 masm
.loadAbiReturnAddress(scratch1
);
7164 // Load BaselineFrame pointer in scratch2.
7165 masm
.loadBaselineFramePtr(BaselineFrameReg
, scratch2
);
7167 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
7168 // the stub frame has a nullptr ICStub pointer, since this pointer is marked
7170 masm
.movePtr(ImmPtr(nullptr), ICStubReg
);
7171 EmitBaselineEnterStubFrame(masm
, scratch3
);
7173 using Fn
= bool (*)(JSContext
*, BaselineFrame
*, uint8_t*);
7174 VMFunctionId id
= VMFunctionToId
<Fn
, jit::HandleDebugTrap
>::id
;
7175 TrampolinePtr code
= cx
->runtime()->jitRuntime()->getVMWrapper(id
);
7177 masm
.push(scratch1
);
7178 masm
.push(scratch2
);
7179 EmitBaselineCallVM(code
, masm
);
7181 EmitBaselineLeaveStubFrame(masm
);
7183 if (kind
== DebugTrapHandlerKind::Interpreter
) {
7184 // We have to reload the bytecode pc register.
7185 Address
pcAddr(BaselineFrameReg
,
7186 BaselineFrame::reverseOffsetOfInterpreterPC());
7187 masm
.loadPtr(pcAddr
, InterpreterPCRegAtDispatch
);
7191 Linker
linker(masm
);
7192 JitCode
* handlerCode
= linker
.newCode(cx
, CodeKind::Other
);
7198 writePerfSpewerJitCodeProfile(handlerCode
, "DebugTrapHandler");
7201 vtune::MarkStub(handlerCode
, "DebugTrapHandler");