Bug 1874684 - Part 29: Update spec fixme notes. r=mgaudet
[gecko.git] / js / src / jit / BaselineCodeGen.cpp
blob8ab65458ea3c4729dbbd232c63faeee523c13834
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/BaselineCodeGen.h"
9 #include "mozilla/Casting.h"
11 #include "gc/GC.h"
12 #include "jit/BaselineIC.h"
13 #include "jit/BaselineJIT.h"
14 #include "jit/CacheIRCompiler.h"
15 #include "jit/CacheIRGenerator.h"
16 #include "jit/CalleeToken.h"
17 #include "jit/FixedList.h"
18 #include "jit/IonOptimizationLevels.h"
19 #include "jit/JitcodeMap.h"
20 #include "jit/JitFrames.h"
21 #include "jit/JitRuntime.h"
22 #include "jit/JitSpewer.h"
23 #include "jit/Linker.h"
24 #include "jit/PerfSpewer.h"
25 #include "jit/SharedICHelpers.h"
26 #include "jit/TemplateObject.h"
27 #include "jit/TrialInlining.h"
28 #include "jit/VMFunctions.h"
29 #include "js/friend/ErrorMessages.h" // JSMSG_*
30 #include "js/UniquePtr.h"
31 #include "vm/AsyncFunction.h"
32 #include "vm/AsyncIteration.h"
33 #include "vm/BuiltinObjectKind.h"
34 #include "vm/EnvironmentObject.h"
35 #include "vm/FunctionFlags.h" // js::FunctionFlags
36 #include "vm/Interpreter.h"
37 #include "vm/JSFunction.h"
38 #include "vm/Time.h"
39 #ifdef MOZ_VTUNE
40 # include "vtune/VTuneWrapper.h"
41 #endif
43 #include "debugger/DebugAPI-inl.h"
44 #include "jit/BaselineFrameInfo-inl.h"
45 #include "jit/JitHints-inl.h"
46 #include "jit/JitScript-inl.h"
47 #include "jit/MacroAssembler-inl.h"
48 #include "jit/SharedICHelpers-inl.h"
49 #include "jit/TemplateObject-inl.h"
50 #include "jit/VMFunctionList-inl.h"
51 #include "vm/Interpreter-inl.h"
52 #include "vm/JSScript-inl.h"
54 using namespace js;
55 using namespace js::jit;
57 using JS::TraceKind;
59 using mozilla::AssertedCast;
60 using mozilla::Maybe;
62 namespace js {
64 class PlainObject;
66 namespace jit {
68 BaselineCompilerHandler::BaselineCompilerHandler(JSContext* cx,
69 MacroAssembler& masm,
70 TempAllocator& alloc,
71 JSScript* script)
72 : frame_(script, masm),
73 alloc_(alloc),
74 analysis_(alloc, script),
75 #ifdef DEBUG
76 masm_(masm),
77 #endif
78 script_(script),
79 pc_(script->code()),
80 icEntryIndex_(0),
81 compileDebugInstrumentation_(script->isDebuggee()),
82 ionCompileable_(IsIonEnabled(cx) && CanIonCompileScript(cx, script)) {
85 BaselineInterpreterHandler::BaselineInterpreterHandler(JSContext* cx,
86 MacroAssembler& masm)
87 : frame_(masm) {}
89 template <typename Handler>
90 template <typename... HandlerArgs>
91 BaselineCodeGen<Handler>::BaselineCodeGen(JSContext* cx, TempAllocator& alloc,
92 HandlerArgs&&... args)
93 : handler(cx, masm, std::forward<HandlerArgs>(args)...),
94 cx(cx),
95 masm(cx, alloc),
96 frame(handler.frame()) {}
98 BaselineCompiler::BaselineCompiler(JSContext* cx, TempAllocator& alloc,
99 JSScript* script)
100 : BaselineCodeGen(cx, alloc, /* HandlerArgs = */ alloc, script) {
101 #ifdef JS_CODEGEN_NONE
102 MOZ_CRASH();
103 #endif
106 BaselineInterpreterGenerator::BaselineInterpreterGenerator(JSContext* cx,
107 TempAllocator& alloc)
108 : BaselineCodeGen(cx, alloc /* no handlerArgs */) {}
110 bool BaselineCompilerHandler::init(JSContext* cx) {
111 if (!analysis_.init(alloc_)) {
112 return false;
115 uint32_t len = script_->length();
117 if (!labels_.init(alloc_, len)) {
118 return false;
121 for (size_t i = 0; i < len; i++) {
122 new (&labels_[i]) Label();
125 if (!frame_.init(alloc_)) {
126 return false;
129 return true;
132 bool BaselineCompiler::init() {
133 if (!handler.init(cx)) {
134 return false;
137 return true;
140 bool BaselineCompilerHandler::recordCallRetAddr(JSContext* cx,
141 RetAddrEntry::Kind kind,
142 uint32_t retOffset) {
143 uint32_t pcOffset = script_->pcToOffset(pc_);
145 // Entries must be sorted by pcOffset for binary search to work.
146 // See BaselineScript::retAddrEntryFromPCOffset.
147 MOZ_ASSERT_IF(!retAddrEntries_.empty(),
148 retAddrEntries_.back().pcOffset() <= pcOffset);
150 // Similarly, entries must be sorted by return offset and this offset must be
151 // unique. See BaselineScript::retAddrEntryFromReturnOffset.
152 MOZ_ASSERT_IF(!retAddrEntries_.empty() && !masm_.oom(),
153 retAddrEntries_.back().returnOffset().offset() < retOffset);
155 if (!retAddrEntries_.emplaceBack(pcOffset, kind, CodeOffset(retOffset))) {
156 ReportOutOfMemory(cx);
157 return false;
160 return true;
163 bool BaselineInterpreterHandler::recordCallRetAddr(JSContext* cx,
164 RetAddrEntry::Kind kind,
165 uint32_t retOffset) {
166 switch (kind) {
167 case RetAddrEntry::Kind::DebugPrologue:
168 MOZ_ASSERT(callVMOffsets_.debugPrologueOffset == 0,
169 "expected single DebugPrologue call");
170 callVMOffsets_.debugPrologueOffset = retOffset;
171 break;
172 case RetAddrEntry::Kind::DebugEpilogue:
173 MOZ_ASSERT(callVMOffsets_.debugEpilogueOffset == 0,
174 "expected single DebugEpilogue call");
175 callVMOffsets_.debugEpilogueOffset = retOffset;
176 break;
177 case RetAddrEntry::Kind::DebugAfterYield:
178 MOZ_ASSERT(callVMOffsets_.debugAfterYieldOffset == 0,
179 "expected single DebugAfterYield call");
180 callVMOffsets_.debugAfterYieldOffset = retOffset;
181 break;
182 default:
183 break;
186 return true;
189 bool BaselineInterpreterHandler::addDebugInstrumentationOffset(
190 JSContext* cx, CodeOffset offset) {
191 if (!debugInstrumentationOffsets_.append(offset.offset())) {
192 ReportOutOfMemory(cx);
193 return false;
195 return true;
198 MethodStatus BaselineCompiler::compile() {
199 AutoCreatedBy acb(masm, "BaselineCompiler::compile");
201 Rooted<JSScript*> script(cx, handler.script());
202 JitSpew(JitSpew_BaselineScripts, "Baseline compiling script %s:%u:%u (%p)",
203 script->filename(), script->lineno(),
204 script->column().oneOriginValue(), script.get());
206 JitSpew(JitSpew_Codegen, "# Emitting baseline code for script %s:%u:%u",
207 script->filename(), script->lineno(),
208 script->column().oneOriginValue());
210 AutoIncrementalTimer timer(cx->realm()->timers.baselineCompileTime);
212 AutoKeepJitScripts keepJitScript(cx);
213 if (!script->ensureHasJitScript(cx, keepJitScript)) {
214 return Method_Error;
217 // When code coverage is enabled, we have to create the ScriptCounts if they
218 // do not exist.
219 if (!script->hasScriptCounts() && cx->realm()->collectCoverageForDebug()) {
220 if (!script->initScriptCounts(cx)) {
221 return Method_Error;
225 if (!JitOptions.disableJitHints &&
226 cx->runtime()->jitRuntime()->hasJitHintsMap()) {
227 JitHintsMap* jitHints = cx->runtime()->jitRuntime()->getJitHintsMap();
228 jitHints->setEagerBaselineHint(script);
231 // Suppress GC during compilation.
232 gc::AutoSuppressGC suppressGC(cx);
234 if (!script->jitScript()->ensureHasCachedBaselineJitData(cx, script)) {
235 return Method_Error;
238 MOZ_ASSERT(!script->hasBaselineScript());
240 perfSpewer_.recordOffset(masm, "Prologue");
241 if (!emitPrologue()) {
242 return Method_Error;
245 MethodStatus status = emitBody();
246 if (status != Method_Compiled) {
247 return status;
250 perfSpewer_.recordOffset(masm, "Epilogue");
251 if (!emitEpilogue()) {
252 return Method_Error;
255 perfSpewer_.recordOffset(masm, "OOLPostBarrierSlot");
256 if (!emitOutOfLinePostBarrierSlot()) {
257 return Method_Error;
260 AutoCreatedBy acb2(masm, "exception_tail");
261 Linker linker(masm);
262 if (masm.oom()) {
263 ReportOutOfMemory(cx);
264 return Method_Error;
267 JitCode* code = linker.newCode(cx, CodeKind::Baseline);
268 if (!code) {
269 return Method_Error;
272 UniquePtr<BaselineScript> baselineScript(
273 BaselineScript::New(
274 cx, warmUpCheckPrologueOffset_.offset(),
275 profilerEnterFrameToggleOffset_.offset(),
276 profilerExitFrameToggleOffset_.offset(),
277 handler.retAddrEntries().length(), handler.osrEntries().length(),
278 debugTrapEntries_.length(), script->resumeOffsets().size()),
279 JS::DeletePolicy<BaselineScript>(cx->runtime()));
280 if (!baselineScript) {
281 return Method_Error;
284 baselineScript->setMethod(code);
286 JitSpew(JitSpew_BaselineScripts,
287 "Created BaselineScript %p (raw %p) for %s:%u:%u",
288 (void*)baselineScript.get(), (void*)code->raw(), script->filename(),
289 script->lineno(), script->column().oneOriginValue());
291 baselineScript->copyRetAddrEntries(handler.retAddrEntries().begin());
292 baselineScript->copyOSREntries(handler.osrEntries().begin());
293 baselineScript->copyDebugTrapEntries(debugTrapEntries_.begin());
295 // If profiler instrumentation is enabled, toggle instrumentation on.
296 if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
297 cx->runtime())) {
298 baselineScript->toggleProfilerInstrumentation(true);
301 // Compute native resume addresses for the script's resume offsets.
302 baselineScript->computeResumeNativeOffsets(script, resumeOffsetEntries_);
304 if (compileDebugInstrumentation()) {
305 baselineScript->setHasDebugInstrumentation();
308 // Always register a native => bytecode mapping entry, since profiler can be
309 // turned on with baseline jitcode on stack, and baseline jitcode cannot be
310 // invalidated.
312 JitSpew(JitSpew_Profiling,
313 "Added JitcodeGlobalEntry for baseline script %s:%u:%u (%p)",
314 script->filename(), script->lineno(),
315 script->column().oneOriginValue(), baselineScript.get());
317 // Generate profiling string.
318 UniqueChars str = GeckoProfilerRuntime::allocProfileString(cx, script);
319 if (!str) {
320 return Method_Error;
323 auto entry = MakeJitcodeGlobalEntry<BaselineEntry>(
324 cx, code, code->raw(), code->rawEnd(), script, std::move(str));
325 if (!entry) {
326 return Method_Error;
329 JitcodeGlobalTable* globalTable =
330 cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
331 if (!globalTable->addEntry(std::move(entry))) {
332 ReportOutOfMemory(cx);
333 return Method_Error;
336 // Mark the jitcode as having a bytecode map.
337 code->setHasBytecodeMap();
340 script->jitScript()->setBaselineScript(script, baselineScript.release());
342 perfSpewer_.saveProfile(cx, script, code);
344 #ifdef MOZ_VTUNE
345 vtune::MarkScript(code, script, "baseline");
346 #endif
348 return Method_Compiled;
351 // On most platforms we use a dedicated bytecode PC register to avoid many
352 // dependent loads and stores for sequences of simple bytecode ops. This
353 // register must be saved/restored around VM and IC calls.
355 // On 32-bit x86 we don't have enough registers for this (because R0-R2 require
356 // 6 registers) so there we always store the pc on the frame.
357 static constexpr bool HasInterpreterPCReg() {
358 return InterpreterPCReg != InvalidReg;
361 static Register LoadBytecodePC(MacroAssembler& masm, Register scratch) {
362 if (HasInterpreterPCReg()) {
363 return InterpreterPCReg;
366 Address pcAddr(FramePointer, BaselineFrame::reverseOffsetOfInterpreterPC());
367 masm.loadPtr(pcAddr, scratch);
368 return scratch;
371 static void LoadInt8Operand(MacroAssembler& masm, Register dest) {
372 Register pc = LoadBytecodePC(masm, dest);
373 masm.load8SignExtend(Address(pc, sizeof(jsbytecode)), dest);
376 static void LoadUint8Operand(MacroAssembler& masm, Register dest) {
377 Register pc = LoadBytecodePC(masm, dest);
378 masm.load8ZeroExtend(Address(pc, sizeof(jsbytecode)), dest);
381 static void LoadUint16Operand(MacroAssembler& masm, Register dest) {
382 Register pc = LoadBytecodePC(masm, dest);
383 masm.load16ZeroExtend(Address(pc, sizeof(jsbytecode)), dest);
386 static void LoadInt32Operand(MacroAssembler& masm, Register dest) {
387 Register pc = LoadBytecodePC(masm, dest);
388 masm.load32(Address(pc, sizeof(jsbytecode)), dest);
391 static void LoadInt32OperandSignExtendToPtr(MacroAssembler& masm, Register pc,
392 Register dest) {
393 masm.load32SignExtendToPtr(Address(pc, sizeof(jsbytecode)), dest);
396 static void LoadUint24Operand(MacroAssembler& masm, size_t offset,
397 Register dest) {
398 // Load the opcode and operand, then left shift to discard the opcode.
399 Register pc = LoadBytecodePC(masm, dest);
400 masm.load32(Address(pc, offset), dest);
401 masm.rshift32(Imm32(8), dest);
404 static void LoadInlineValueOperand(MacroAssembler& masm, ValueOperand dest) {
405 // Note: the Value might be unaligned but as above we rely on all our
406 // platforms having appropriate support for unaligned accesses (except for
407 // floating point instructions on ARM).
408 Register pc = LoadBytecodePC(masm, dest.scratchReg());
409 masm.loadUnalignedValue(Address(pc, sizeof(jsbytecode)), dest);
412 template <>
413 void BaselineCompilerCodeGen::loadScript(Register dest) {
414 masm.movePtr(ImmGCPtr(handler.script()), dest);
417 template <>
418 void BaselineInterpreterCodeGen::loadScript(Register dest) {
419 masm.loadPtr(frame.addressOfInterpreterScript(), dest);
422 template <>
423 void BaselineCompilerCodeGen::saveInterpreterPCReg() {}
425 template <>
426 void BaselineInterpreterCodeGen::saveInterpreterPCReg() {
427 if (HasInterpreterPCReg()) {
428 masm.storePtr(InterpreterPCReg, frame.addressOfInterpreterPC());
432 template <>
433 void BaselineCompilerCodeGen::restoreInterpreterPCReg() {}
435 template <>
436 void BaselineInterpreterCodeGen::restoreInterpreterPCReg() {
437 if (HasInterpreterPCReg()) {
438 masm.loadPtr(frame.addressOfInterpreterPC(), InterpreterPCReg);
442 template <>
443 void BaselineCompilerCodeGen::emitInitializeLocals() {
444 // Initialize all locals to |undefined|. Lexical bindings are temporal
445 // dead zoned in bytecode.
447 size_t n = frame.nlocals();
448 if (n == 0) {
449 return;
452 // Use R0 to minimize code size. If the number of locals to push is <
453 // LOOP_UNROLL_FACTOR, then the initialization pushes are emitted directly
454 // and inline. Otherwise, they're emitted in a partially unrolled loop.
455 static const size_t LOOP_UNROLL_FACTOR = 4;
456 size_t toPushExtra = n % LOOP_UNROLL_FACTOR;
458 masm.moveValue(UndefinedValue(), R0);
460 // Handle any extra pushes left over by the optional unrolled loop below.
461 for (size_t i = 0; i < toPushExtra; i++) {
462 masm.pushValue(R0);
465 // Partially unrolled loop of pushes.
466 if (n >= LOOP_UNROLL_FACTOR) {
467 size_t toPush = n - toPushExtra;
468 MOZ_ASSERT(toPush % LOOP_UNROLL_FACTOR == 0);
469 MOZ_ASSERT(toPush >= LOOP_UNROLL_FACTOR);
470 masm.move32(Imm32(toPush), R1.scratchReg());
471 // Emit unrolled loop with 4 pushes per iteration.
472 Label pushLoop;
473 masm.bind(&pushLoop);
474 for (size_t i = 0; i < LOOP_UNROLL_FACTOR; i++) {
475 masm.pushValue(R0);
477 masm.branchSub32(Assembler::NonZero, Imm32(LOOP_UNROLL_FACTOR),
478 R1.scratchReg(), &pushLoop);
482 template <>
483 void BaselineInterpreterCodeGen::emitInitializeLocals() {
484 // Push |undefined| for all locals.
486 Register scratch = R0.scratchReg();
487 loadScript(scratch);
488 masm.loadPtr(Address(scratch, JSScript::offsetOfSharedData()), scratch);
489 masm.loadPtr(Address(scratch, SharedImmutableScriptData::offsetOfISD()),
490 scratch);
491 masm.load32(Address(scratch, ImmutableScriptData::offsetOfNfixed()), scratch);
493 Label top, done;
494 masm.branchTest32(Assembler::Zero, scratch, scratch, &done);
495 masm.bind(&top);
497 masm.pushValue(UndefinedValue());
498 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch, &top);
500 masm.bind(&done);
503 // On input:
504 // R2.scratchReg() contains object being written to.
505 // Called with the baseline stack synced, except for R0 which is preserved.
506 // All other registers are usable as scratch.
507 // This calls:
508 // void PostWriteBarrier(JSRuntime* rt, JSObject* obj);
509 template <typename Handler>
510 bool BaselineCodeGen<Handler>::emitOutOfLinePostBarrierSlot() {
511 AutoCreatedBy acb(masm,
512 "BaselineCodeGen<Handler>::emitOutOfLinePostBarrierSlot");
514 if (!postBarrierSlot_.used()) {
515 return true;
518 masm.bind(&postBarrierSlot_);
520 #ifdef JS_USE_LINK_REGISTER
521 masm.pushReturnAddress();
522 #endif
524 Register objReg = R2.scratchReg();
526 // Check one element cache to avoid VM call.
527 Label skipBarrier;
528 auto* lastCellAddr = cx->runtime()->gc.addressOfLastBufferedWholeCell();
529 masm.branchPtr(Assembler::Equal, AbsoluteAddress(lastCellAddr), objReg,
530 &skipBarrier);
532 saveInterpreterPCReg();
534 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
535 MOZ_ASSERT(!regs.has(FramePointer));
536 regs.take(R0);
537 regs.take(objReg);
538 Register scratch = regs.takeAny();
540 masm.pushValue(R0);
542 using Fn = void (*)(JSRuntime* rt, js::gc::Cell* cell);
543 masm.setupUnalignedABICall(scratch);
544 masm.movePtr(ImmPtr(cx->runtime()), scratch);
545 masm.passABIArg(scratch);
546 masm.passABIArg(objReg);
547 masm.callWithABI<Fn, PostWriteBarrier>();
549 restoreInterpreterPCReg();
551 masm.popValue(R0);
553 masm.bind(&skipBarrier);
554 masm.ret();
555 return true;
558 // Scan the a cache IR stub's fields and create an allocation site for any that
559 // refer to the catch-all unknown allocation site. This will be the case for
560 // stubs created when running in the interpreter. This happens on transition to
561 // baseline.
562 static bool CreateAllocSitesForCacheIRStub(JSScript* script, uint32_t pcOffset,
563 ICCacheIRStub* stub) {
564 const CacheIRStubInfo* stubInfo = stub->stubInfo();
565 uint8_t* stubData = stub->stubDataStart();
567 ICScript* icScript = script->jitScript()->icScript();
569 uint32_t field = 0;
570 size_t offset = 0;
571 while (true) {
572 StubField::Type fieldType = stubInfo->fieldType(field);
573 if (fieldType == StubField::Type::Limit) {
574 break;
577 if (fieldType == StubField::Type::AllocSite) {
578 gc::AllocSite* site =
579 stubInfo->getPtrStubField<ICCacheIRStub, gc::AllocSite>(stub, offset);
580 if (site->kind() == gc::AllocSite::Kind::Unknown) {
581 gc::AllocSite* newSite =
582 icScript->getOrCreateAllocSite(script, pcOffset);
583 if (!newSite) {
584 return false;
587 stubInfo->replaceStubRawWord(stubData, offset, uintptr_t(site),
588 uintptr_t(newSite));
592 field++;
593 offset += StubField::sizeInBytes(fieldType);
596 return true;
599 static void CreateAllocSitesForICChain(JSScript* script, uint32_t pcOffset,
600 uint32_t entryIndex) {
601 JitScript* jitScript = script->jitScript();
602 ICStub* stub = jitScript->icEntry(entryIndex).firstStub();
604 while (!stub->isFallback()) {
605 if (!CreateAllocSitesForCacheIRStub(script, pcOffset,
606 stub->toCacheIRStub())) {
607 // This is an optimization and safe to skip if we hit OOM or per-zone
608 // limit.
609 return;
611 stub = stub->toCacheIRStub()->next();
615 template <>
616 bool BaselineCompilerCodeGen::emitNextIC() {
617 AutoCreatedBy acb(masm, "emitNextIC");
619 // Emit a call to an IC stored in JitScript. Calls to this must match the
620 // ICEntry order in JitScript: first the non-op IC entries for |this| and
621 // formal arguments, then the for-op IC entries for JOF_IC ops.
623 JSScript* script = handler.script();
624 uint32_t pcOffset = script->pcToOffset(handler.pc());
626 // We don't use every ICEntry and we can skip unreachable ops, so we have
627 // to loop until we find an ICEntry for the current pc.
628 const ICFallbackStub* stub;
629 uint32_t entryIndex;
630 do {
631 stub = script->jitScript()->fallbackStub(handler.icEntryIndex());
632 entryIndex = handler.icEntryIndex();
633 handler.moveToNextICEntry();
634 } while (stub->pcOffset() < pcOffset);
636 MOZ_ASSERT(stub->pcOffset() == pcOffset);
637 MOZ_ASSERT(BytecodeOpHasIC(JSOp(*handler.pc())));
639 if (BytecodeOpCanHaveAllocSite(JSOp(*handler.pc()))) {
640 CreateAllocSitesForICChain(script, pcOffset, entryIndex);
643 // Load stub pointer into ICStubReg.
644 masm.loadPtr(frame.addressOfICScript(), ICStubReg);
645 size_t firstStubOffset = ICScript::offsetOfFirstStub(entryIndex);
646 masm.loadPtr(Address(ICStubReg, firstStubOffset), ICStubReg);
648 CodeOffset returnOffset;
649 EmitCallIC(masm, &returnOffset);
651 RetAddrEntry::Kind kind = RetAddrEntry::Kind::IC;
652 if (!handler.retAddrEntries().emplaceBack(pcOffset, kind, returnOffset)) {
653 ReportOutOfMemory(cx);
654 return false;
657 return true;
660 template <>
661 bool BaselineInterpreterCodeGen::emitNextIC() {
662 saveInterpreterPCReg();
663 masm.loadPtr(frame.addressOfInterpreterICEntry(), ICStubReg);
664 masm.loadPtr(Address(ICStubReg, ICEntry::offsetOfFirstStub()), ICStubReg);
665 masm.call(Address(ICStubReg, ICStub::offsetOfStubCode()));
666 uint32_t returnOffset = masm.currentOffset();
667 restoreInterpreterPCReg();
669 // If this is an IC for a bytecode op where Ion may inline scripts, we need to
670 // record the return offset for Ion bailouts.
671 if (handler.currentOp()) {
672 JSOp op = *handler.currentOp();
673 MOZ_ASSERT(BytecodeOpHasIC(op));
674 if (IsIonInlinableOp(op)) {
675 if (!handler.icReturnOffsets().emplaceBack(returnOffset, op)) {
676 return false;
681 return true;
684 template <>
685 void BaselineCompilerCodeGen::computeFrameSize(Register dest) {
686 MOZ_ASSERT(!inCall_, "must not be called in the middle of a VM call");
687 masm.move32(Imm32(frame.frameSize()), dest);
690 template <>
691 void BaselineInterpreterCodeGen::computeFrameSize(Register dest) {
692 // dest := FramePointer - StackPointer.
693 MOZ_ASSERT(!inCall_, "must not be called in the middle of a VM call");
694 masm.mov(FramePointer, dest);
695 masm.subStackPtrFrom(dest);
698 template <typename Handler>
699 void BaselineCodeGen<Handler>::prepareVMCall() {
700 pushedBeforeCall_ = masm.framePushed();
701 #ifdef DEBUG
702 inCall_ = true;
703 #endif
705 // Ensure everything is synced.
706 frame.syncStack(0);
709 template <>
710 void BaselineCompilerCodeGen::storeFrameSizeAndPushDescriptor(
711 uint32_t argSize, Register scratch) {
712 #ifdef DEBUG
713 masm.store32(Imm32(frame.frameSize()), frame.addressOfDebugFrameSize());
714 #endif
716 masm.pushFrameDescriptor(FrameType::BaselineJS);
719 template <>
720 void BaselineInterpreterCodeGen::storeFrameSizeAndPushDescriptor(
721 uint32_t argSize, Register scratch) {
722 #ifdef DEBUG
723 // Store the frame size without VMFunction arguments in debug builds.
724 // scratch := FramePointer - StackPointer - argSize.
725 masm.mov(FramePointer, scratch);
726 masm.subStackPtrFrom(scratch);
727 masm.sub32(Imm32(argSize), scratch);
728 masm.store32(scratch, frame.addressOfDebugFrameSize());
729 #endif
731 masm.pushFrameDescriptor(FrameType::BaselineJS);
734 static uint32_t GetVMFunctionArgSize(const VMFunctionData& fun) {
735 return fun.explicitStackSlots() * sizeof(void*);
738 template <typename Handler>
739 bool BaselineCodeGen<Handler>::callVMInternal(VMFunctionId id,
740 RetAddrEntry::Kind kind,
741 CallVMPhase phase) {
742 #ifdef DEBUG
743 // Assert prepareVMCall() has been called.
744 MOZ_ASSERT(inCall_);
745 inCall_ = false;
746 #endif
748 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
749 const VMFunctionData& fun = GetVMFunction(id);
751 uint32_t argSize = GetVMFunctionArgSize(fun);
753 // Assert all arguments were pushed.
754 MOZ_ASSERT(masm.framePushed() - pushedBeforeCall_ == argSize);
756 saveInterpreterPCReg();
758 if (phase == CallVMPhase::AfterPushingLocals) {
759 storeFrameSizeAndPushDescriptor(argSize, R0.scratchReg());
760 } else {
761 MOZ_ASSERT(phase == CallVMPhase::BeforePushingLocals);
762 #ifdef DEBUG
763 uint32_t frameBaseSize = BaselineFrame::frameSizeForNumValueSlots(0);
764 masm.store32(Imm32(frameBaseSize), frame.addressOfDebugFrameSize());
765 #endif
766 masm.pushFrameDescriptor(FrameType::BaselineJS);
768 // Perform the call.
769 masm.call(code);
770 uint32_t callOffset = masm.currentOffset();
772 // Pop arguments from framePushed.
773 masm.implicitPop(argSize);
775 restoreInterpreterPCReg();
777 return handler.recordCallRetAddr(cx, kind, callOffset);
780 template <typename Handler>
781 template <typename Fn, Fn fn>
782 bool BaselineCodeGen<Handler>::callVM(RetAddrEntry::Kind kind,
783 CallVMPhase phase) {
784 VMFunctionId fnId = VMFunctionToId<Fn, fn>::id;
785 return callVMInternal(fnId, kind, phase);
788 template <typename Handler>
789 bool BaselineCodeGen<Handler>::emitStackCheck() {
790 Label skipCall;
791 if (handler.mustIncludeSlotsInStackCheck()) {
792 // Subtract the size of script->nslots() first.
793 Register scratch = R1.scratchReg();
794 masm.moveStackPtrTo(scratch);
795 subtractScriptSlotsSize(scratch, R2.scratchReg());
796 masm.branchPtr(Assembler::BelowOrEqual,
797 AbsoluteAddress(cx->addressOfJitStackLimit()), scratch,
798 &skipCall);
799 } else {
800 masm.branchStackPtrRhs(Assembler::BelowOrEqual,
801 AbsoluteAddress(cx->addressOfJitStackLimit()),
802 &skipCall);
805 prepareVMCall();
806 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg());
807 pushArg(R1.scratchReg());
809 const CallVMPhase phase = CallVMPhase::BeforePushingLocals;
810 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::StackCheck;
812 using Fn = bool (*)(JSContext*, BaselineFrame*);
813 if (!callVM<Fn, CheckOverRecursedBaseline>(kind, phase)) {
814 return false;
817 masm.bind(&skipCall);
818 return true;
821 static void EmitCallFrameIsDebuggeeCheck(MacroAssembler& masm) {
822 using Fn = void (*)(BaselineFrame* frame);
823 masm.setupUnalignedABICall(R0.scratchReg());
824 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
825 masm.passABIArg(R0.scratchReg());
826 masm.callWithABI<Fn, FrameIsDebuggeeCheck>();
829 template <>
830 bool BaselineCompilerCodeGen::emitIsDebuggeeCheck() {
831 if (handler.compileDebugInstrumentation()) {
832 EmitCallFrameIsDebuggeeCheck(masm);
834 return true;
837 template <>
838 bool BaselineInterpreterCodeGen::emitIsDebuggeeCheck() {
839 // Use a toggled jump to call FrameIsDebuggeeCheck only if the debugger is
840 // enabled.
842 // TODO(bug 1522394): consider having a cx->realm->isDebuggee guard before the
843 // call. Consider moving the callWithABI out-of-line.
845 Label skipCheck;
846 CodeOffset toggleOffset = masm.toggledJump(&skipCheck);
848 saveInterpreterPCReg();
849 EmitCallFrameIsDebuggeeCheck(masm);
850 restoreInterpreterPCReg();
852 masm.bind(&skipCheck);
853 return handler.addDebugInstrumentationOffset(cx, toggleOffset);
856 static void MaybeIncrementCodeCoverageCounter(MacroAssembler& masm,
857 JSScript* script,
858 jsbytecode* pc) {
859 if (!script->hasScriptCounts()) {
860 return;
862 PCCounts* counts = script->maybeGetPCCounts(pc);
863 uint64_t* counterAddr = &counts->numExec();
864 masm.inc64(AbsoluteAddress(counterAddr));
867 template <>
868 bool BaselineCompilerCodeGen::emitHandleCodeCoverageAtPrologue() {
869 // If the main instruction is not a jump target, then we emit the
870 // corresponding code coverage counter.
871 JSScript* script = handler.script();
872 jsbytecode* main = script->main();
873 if (!BytecodeIsJumpTarget(JSOp(*main))) {
874 MaybeIncrementCodeCoverageCounter(masm, script, main);
876 return true;
879 template <>
880 bool BaselineInterpreterCodeGen::emitHandleCodeCoverageAtPrologue() {
881 Label skipCoverage;
882 CodeOffset toggleOffset = masm.toggledJump(&skipCoverage);
883 masm.call(handler.codeCoverageAtPrologueLabel());
884 masm.bind(&skipCoverage);
885 return handler.codeCoverageOffsets().append(toggleOffset.offset());
888 template <>
889 void BaselineCompilerCodeGen::subtractScriptSlotsSize(Register reg,
890 Register scratch) {
891 uint32_t slotsSize = handler.script()->nslots() * sizeof(Value);
892 masm.subPtr(Imm32(slotsSize), reg);
895 template <>
896 void BaselineInterpreterCodeGen::subtractScriptSlotsSize(Register reg,
897 Register scratch) {
898 // reg = reg - script->nslots() * sizeof(Value)
899 MOZ_ASSERT(reg != scratch);
900 loadScript(scratch);
901 masm.loadPtr(Address(scratch, JSScript::offsetOfSharedData()), scratch);
902 masm.loadPtr(Address(scratch, SharedImmutableScriptData::offsetOfISD()),
903 scratch);
904 masm.load32(Address(scratch, ImmutableScriptData::offsetOfNslots()), scratch);
905 static_assert(sizeof(Value) == 8,
906 "shift by 3 below assumes Value is 8 bytes");
907 masm.lshiftPtr(Imm32(3), scratch);
908 masm.subPtr(scratch, reg);
911 template <>
912 void BaselineCompilerCodeGen::loadGlobalLexicalEnvironment(Register dest) {
913 MOZ_ASSERT(!handler.script()->hasNonSyntacticScope());
914 masm.movePtr(ImmGCPtr(&cx->global()->lexicalEnvironment()), dest);
917 template <>
918 void BaselineInterpreterCodeGen::loadGlobalLexicalEnvironment(Register dest) {
919 masm.loadGlobalObjectData(dest);
920 masm.loadPtr(Address(dest, GlobalObjectData::offsetOfLexicalEnvironment()),
921 dest);
924 template <>
925 void BaselineCompilerCodeGen::pushGlobalLexicalEnvironmentValue(
926 ValueOperand scratch) {
927 frame.push(ObjectValue(cx->global()->lexicalEnvironment()));
930 template <>
931 void BaselineInterpreterCodeGen::pushGlobalLexicalEnvironmentValue(
932 ValueOperand scratch) {
933 loadGlobalLexicalEnvironment(scratch.scratchReg());
934 masm.tagValue(JSVAL_TYPE_OBJECT, scratch.scratchReg(), scratch);
935 frame.push(scratch);
938 template <>
939 void BaselineCompilerCodeGen::loadGlobalThisValue(ValueOperand dest) {
940 JSObject* thisObj = cx->global()->lexicalEnvironment().thisObject();
941 masm.moveValue(ObjectValue(*thisObj), dest);
944 template <>
945 void BaselineInterpreterCodeGen::loadGlobalThisValue(ValueOperand dest) {
946 Register scratch = dest.scratchReg();
947 loadGlobalLexicalEnvironment(scratch);
948 static constexpr size_t SlotOffset =
949 GlobalLexicalEnvironmentObject::offsetOfThisValueSlot();
950 masm.loadValue(Address(scratch, SlotOffset), dest);
953 template <>
954 void BaselineCompilerCodeGen::pushScriptArg() {
955 pushArg(ImmGCPtr(handler.script()));
958 template <>
959 void BaselineInterpreterCodeGen::pushScriptArg() {
960 pushArg(frame.addressOfInterpreterScript());
963 template <>
964 void BaselineCompilerCodeGen::pushBytecodePCArg() {
965 pushArg(ImmPtr(handler.pc()));
968 template <>
969 void BaselineInterpreterCodeGen::pushBytecodePCArg() {
970 if (HasInterpreterPCReg()) {
971 pushArg(InterpreterPCReg);
972 } else {
973 pushArg(frame.addressOfInterpreterPC());
977 static gc::Cell* GetScriptGCThing(JSScript* script, jsbytecode* pc,
978 ScriptGCThingType type) {
979 switch (type) {
980 case ScriptGCThingType::Atom:
981 return script->getAtom(pc);
982 case ScriptGCThingType::String:
983 return script->getString(pc);
984 case ScriptGCThingType::RegExp:
985 return script->getRegExp(pc);
986 case ScriptGCThingType::Object:
987 return script->getObject(pc);
988 case ScriptGCThingType::Function:
989 return script->getFunction(pc);
990 case ScriptGCThingType::Scope:
991 return script->getScope(pc);
992 case ScriptGCThingType::BigInt:
993 return script->getBigInt(pc);
995 MOZ_CRASH("Unexpected GCThing type");
998 template <>
999 void BaselineCompilerCodeGen::loadScriptGCThing(ScriptGCThingType type,
1000 Register dest,
1001 Register scratch) {
1002 gc::Cell* thing = GetScriptGCThing(handler.script(), handler.pc(), type);
1003 masm.movePtr(ImmGCPtr(thing), dest);
1006 template <>
1007 void BaselineInterpreterCodeGen::loadScriptGCThing(ScriptGCThingType type,
1008 Register dest,
1009 Register scratch) {
1010 MOZ_ASSERT(dest != scratch);
1012 // Load the index in |scratch|.
1013 LoadInt32Operand(masm, scratch);
1015 // Load the GCCellPtr.
1016 loadScript(dest);
1017 masm.loadPtr(Address(dest, JSScript::offsetOfPrivateData()), dest);
1018 masm.loadPtr(BaseIndex(dest, scratch, ScalePointer,
1019 PrivateScriptData::offsetOfGCThings()),
1020 dest);
1022 // Clear the tag bits.
1023 switch (type) {
1024 case ScriptGCThingType::Atom:
1025 case ScriptGCThingType::String:
1026 // Use xorPtr with a 32-bit immediate because it's more efficient than
1027 // andPtr on 64-bit.
1028 static_assert(uintptr_t(TraceKind::String) == 2,
1029 "Unexpected tag bits for string GCCellPtr");
1030 masm.xorPtr(Imm32(2), dest);
1031 break;
1032 case ScriptGCThingType::RegExp:
1033 case ScriptGCThingType::Object:
1034 case ScriptGCThingType::Function:
1035 // No-op because GCCellPtr tag bits are zero for objects.
1036 static_assert(uintptr_t(TraceKind::Object) == 0,
1037 "Unexpected tag bits for object GCCellPtr");
1038 break;
1039 case ScriptGCThingType::BigInt:
1040 // Use xorPtr with a 32-bit immediate because it's more efficient than
1041 // andPtr on 64-bit.
1042 static_assert(uintptr_t(TraceKind::BigInt) == 1,
1043 "Unexpected tag bits for BigInt GCCellPtr");
1044 masm.xorPtr(Imm32(1), dest);
1045 break;
1046 case ScriptGCThingType::Scope:
1047 // Use xorPtr with a 32-bit immediate because it's more efficient than
1048 // andPtr on 64-bit.
1049 static_assert(uintptr_t(TraceKind::Scope) >= JS::OutOfLineTraceKindMask,
1050 "Expected Scopes to have OutOfLineTraceKindMask tag");
1051 masm.xorPtr(Imm32(JS::OutOfLineTraceKindMask), dest);
1052 break;
1055 #ifdef DEBUG
1056 // Assert low bits are not set.
1057 Label ok;
1058 masm.branchTestPtr(Assembler::Zero, dest, Imm32(0b111), &ok);
1059 masm.assumeUnreachable("GC pointer with tag bits set");
1060 masm.bind(&ok);
1061 #endif
1064 template <>
1065 void BaselineCompilerCodeGen::pushScriptGCThingArg(ScriptGCThingType type,
1066 Register scratch1,
1067 Register scratch2) {
1068 gc::Cell* thing = GetScriptGCThing(handler.script(), handler.pc(), type);
1069 pushArg(ImmGCPtr(thing));
1072 template <>
1073 void BaselineInterpreterCodeGen::pushScriptGCThingArg(ScriptGCThingType type,
1074 Register scratch1,
1075 Register scratch2) {
1076 loadScriptGCThing(type, scratch1, scratch2);
1077 pushArg(scratch1);
1080 template <typename Handler>
1081 void BaselineCodeGen<Handler>::pushScriptNameArg(Register scratch1,
1082 Register scratch2) {
1083 pushScriptGCThingArg(ScriptGCThingType::Atom, scratch1, scratch2);
1086 template <>
1087 void BaselineCompilerCodeGen::pushUint8BytecodeOperandArg(Register) {
1088 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler.pc())) == JOF_UINT8);
1089 pushArg(Imm32(GET_UINT8(handler.pc())));
1092 template <>
1093 void BaselineInterpreterCodeGen::pushUint8BytecodeOperandArg(Register scratch) {
1094 LoadUint8Operand(masm, scratch);
1095 pushArg(scratch);
1098 template <>
1099 void BaselineCompilerCodeGen::pushUint16BytecodeOperandArg(Register) {
1100 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler.pc())) == JOF_UINT16);
1101 pushArg(Imm32(GET_UINT16(handler.pc())));
1104 template <>
1105 void BaselineInterpreterCodeGen::pushUint16BytecodeOperandArg(
1106 Register scratch) {
1107 LoadUint16Operand(masm, scratch);
1108 pushArg(scratch);
1111 template <>
1112 void BaselineCompilerCodeGen::loadInt32LengthBytecodeOperand(Register dest) {
1113 uint32_t length = GET_UINT32(handler.pc());
1114 MOZ_ASSERT(length <= INT32_MAX,
1115 "the bytecode emitter must fail to compile code that would "
1116 "produce a length exceeding int32_t range");
1117 masm.move32(Imm32(AssertedCast<int32_t>(length)), dest);
1120 template <>
1121 void BaselineInterpreterCodeGen::loadInt32LengthBytecodeOperand(Register dest) {
1122 LoadInt32Operand(masm, dest);
1125 template <typename Handler>
1126 bool BaselineCodeGen<Handler>::emitDebugPrologue() {
1127 auto ifDebuggee = [this]() {
1128 // Load pointer to BaselineFrame in R0.
1129 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
1131 prepareVMCall();
1132 pushArg(R0.scratchReg());
1134 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugPrologue;
1136 using Fn = bool (*)(JSContext*, BaselineFrame*);
1137 if (!callVM<Fn, jit::DebugPrologue>(kind)) {
1138 return false;
1141 return true;
1143 return emitDebugInstrumentation(ifDebuggee);
1146 template <>
1147 void BaselineCompilerCodeGen::emitInitFrameFields(Register nonFunctionEnv) {
1148 Register scratch = R0.scratchReg();
1149 Register scratch2 = R2.scratchReg();
1150 MOZ_ASSERT(nonFunctionEnv != scratch && nonFunctionEnv != scratch2);
1152 masm.store32(Imm32(0), frame.addressOfFlags());
1153 if (handler.function()) {
1154 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), scratch);
1155 masm.unboxObject(Address(scratch, JSFunction::offsetOfEnvironment()),
1156 scratch);
1157 masm.storePtr(scratch, frame.addressOfEnvironmentChain());
1158 } else {
1159 masm.storePtr(nonFunctionEnv, frame.addressOfEnvironmentChain());
1162 // If cx->inlinedICScript contains an inlined ICScript (passed from
1163 // the caller), take that ICScript and store it in the frame, then
1164 // overwrite cx->inlinedICScript with nullptr.
1165 Label notInlined, done;
1166 masm.movePtr(ImmPtr(cx->addressOfInlinedICScript()), scratch);
1167 Address inlinedAddr(scratch, 0);
1168 masm.branchPtr(Assembler::Equal, inlinedAddr, ImmWord(0), &notInlined);
1169 masm.loadPtr(inlinedAddr, scratch2);
1170 masm.storePtr(scratch2, frame.addressOfICScript());
1171 masm.storePtr(ImmPtr(nullptr), inlinedAddr);
1172 masm.jump(&done);
1174 // Otherwise, store this script's default ICSCript in the frame.
1175 masm.bind(&notInlined);
1176 masm.storePtr(ImmPtr(handler.script()->jitScript()->icScript()),
1177 frame.addressOfICScript());
1178 masm.bind(&done);
1181 template <>
1182 void BaselineInterpreterCodeGen::emitInitFrameFields(Register nonFunctionEnv) {
1183 MOZ_ASSERT(nonFunctionEnv == R1.scratchReg(),
1184 "Don't clobber nonFunctionEnv below");
1186 // If we have a dedicated PC register we use it as scratch1 to avoid a
1187 // register move below.
1188 Register scratch1 =
1189 HasInterpreterPCReg() ? InterpreterPCReg : R0.scratchReg();
1190 Register scratch2 = R2.scratchReg();
1192 masm.store32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER),
1193 frame.addressOfFlags());
1195 // Initialize interpreterScript.
1196 Label notFunction, done;
1197 masm.loadPtr(frame.addressOfCalleeToken(), scratch1);
1198 masm.branchTestPtr(Assembler::NonZero, scratch1, Imm32(CalleeTokenScriptBit),
1199 &notFunction);
1201 // CalleeToken_Function or CalleeToken_FunctionConstructing.
1202 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
1203 masm.unboxObject(Address(scratch1, JSFunction::offsetOfEnvironment()),
1204 scratch2);
1205 masm.storePtr(scratch2, frame.addressOfEnvironmentChain());
1206 masm.loadPrivate(Address(scratch1, JSFunction::offsetOfJitInfoOrScript()),
1207 scratch1);
1208 masm.jump(&done);
1210 masm.bind(&notFunction);
1212 // CalleeToken_Script.
1213 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
1214 masm.storePtr(nonFunctionEnv, frame.addressOfEnvironmentChain());
1216 masm.bind(&done);
1217 masm.storePtr(scratch1, frame.addressOfInterpreterScript());
1219 // Initialize icScript and interpreterICEntry
1220 masm.loadJitScript(scratch1, scratch2);
1221 masm.computeEffectiveAddress(Address(scratch2, JitScript::offsetOfICScript()),
1222 scratch2);
1223 masm.storePtr(scratch2, frame.addressOfICScript());
1224 masm.computeEffectiveAddress(Address(scratch2, ICScript::offsetOfICEntries()),
1225 scratch2);
1226 masm.storePtr(scratch2, frame.addressOfInterpreterICEntry());
1228 // Initialize interpreter pc.
1229 masm.loadPtr(Address(scratch1, JSScript::offsetOfSharedData()), scratch1);
1230 masm.loadPtr(Address(scratch1, SharedImmutableScriptData::offsetOfISD()),
1231 scratch1);
1232 masm.addPtr(Imm32(ImmutableScriptData::offsetOfCode()), scratch1);
1234 if (HasInterpreterPCReg()) {
1235 MOZ_ASSERT(scratch1 == InterpreterPCReg,
1236 "pc must be stored in the pc register");
1237 } else {
1238 masm.storePtr(scratch1, frame.addressOfInterpreterPC());
1242 // Assert we don't need a post write barrier to write sourceObj to a slot of
1243 // destObj. See comments in WarpBuilder::buildNamedLambdaEnv.
1244 static void AssertCanElidePostWriteBarrier(MacroAssembler& masm,
1245 Register destObj, Register sourceObj,
1246 Register temp) {
1247 #ifdef DEBUG
1248 Label ok;
1249 masm.branchPtrInNurseryChunk(Assembler::Equal, destObj, temp, &ok);
1250 masm.branchPtrInNurseryChunk(Assembler::NotEqual, sourceObj, temp, &ok);
1251 masm.assumeUnreachable("Unexpected missing post write barrier in Baseline");
1252 masm.bind(&ok);
1253 #endif
1256 template <>
1257 bool BaselineCompilerCodeGen::initEnvironmentChain() {
1258 if (!handler.function()) {
1259 return true;
1261 if (!handler.script()->needsFunctionEnvironmentObjects()) {
1262 return true;
1265 // Allocate a NamedLambdaObject and/or a CallObject. If the function needs
1266 // both, the NamedLambdaObject must enclose the CallObject. If one of the
1267 // allocations fails, we perform the whole operation in C++.
1269 JSObject* templateEnv = handler.script()->jitScript()->templateEnvironment();
1270 MOZ_ASSERT(templateEnv);
1272 CallObject* callObjectTemplate = nullptr;
1273 if (handler.function()->needsCallObject()) {
1274 callObjectTemplate = &templateEnv->as<CallObject>();
1277 NamedLambdaObject* namedLambdaTemplate = nullptr;
1278 if (handler.function()->needsNamedLambdaEnvironment()) {
1279 if (callObjectTemplate) {
1280 templateEnv = templateEnv->enclosingEnvironment();
1282 namedLambdaTemplate = &templateEnv->as<NamedLambdaObject>();
1285 MOZ_ASSERT(namedLambdaTemplate || callObjectTemplate);
1287 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
1288 Register newEnv = regs.takeAny();
1289 Register enclosingEnv = regs.takeAny();
1290 Register callee = regs.takeAny();
1291 Register temp = regs.takeAny();
1293 Label fail;
1294 masm.loadPtr(frame.addressOfEnvironmentChain(), enclosingEnv);
1295 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), callee);
1297 // Allocate a NamedLambdaObject if needed.
1298 if (namedLambdaTemplate) {
1299 TemplateObject templateObject(namedLambdaTemplate);
1300 masm.createGCObject(newEnv, temp, templateObject, gc::Heap::Default, &fail);
1302 // Store enclosing environment.
1303 Address enclosingSlot(newEnv,
1304 NamedLambdaObject::offsetOfEnclosingEnvironment());
1305 masm.storeValue(JSVAL_TYPE_OBJECT, enclosingEnv, enclosingSlot);
1306 AssertCanElidePostWriteBarrier(masm, newEnv, enclosingEnv, temp);
1308 // Store callee.
1309 Address lambdaSlot(newEnv, NamedLambdaObject::offsetOfLambdaSlot());
1310 masm.storeValue(JSVAL_TYPE_OBJECT, callee, lambdaSlot);
1311 AssertCanElidePostWriteBarrier(masm, newEnv, callee, temp);
1313 if (callObjectTemplate) {
1314 masm.movePtr(newEnv, enclosingEnv);
1318 // Allocate a CallObject if needed.
1319 if (callObjectTemplate) {
1320 TemplateObject templateObject(callObjectTemplate);
1321 masm.createGCObject(newEnv, temp, templateObject, gc::Heap::Default, &fail);
1323 // Store enclosing environment.
1324 Address enclosingSlot(newEnv, CallObject::offsetOfEnclosingEnvironment());
1325 masm.storeValue(JSVAL_TYPE_OBJECT, enclosingEnv, enclosingSlot);
1326 AssertCanElidePostWriteBarrier(masm, newEnv, enclosingEnv, temp);
1328 // Store callee.
1329 Address calleeSlot(newEnv, CallObject::offsetOfCallee());
1330 masm.storeValue(JSVAL_TYPE_OBJECT, callee, calleeSlot);
1331 AssertCanElidePostWriteBarrier(masm, newEnv, callee, temp);
1334 // Update the frame's environment chain and mark it initialized.
1335 Label done;
1336 masm.storePtr(newEnv, frame.addressOfEnvironmentChain());
1337 masm.or32(Imm32(BaselineFrame::HAS_INITIAL_ENV), frame.addressOfFlags());
1338 masm.jump(&done);
1340 masm.bind(&fail);
1342 prepareVMCall();
1344 masm.loadBaselineFramePtr(FramePointer, temp);
1345 pushArg(temp);
1347 const CallVMPhase phase = CallVMPhase::BeforePushingLocals;
1349 using Fn = bool (*)(JSContext*, BaselineFrame*);
1350 if (!callVMNonOp<Fn, jit::InitFunctionEnvironmentObjects>(phase)) {
1351 return false;
1354 masm.bind(&done);
1355 return true;
1358 template <>
1359 bool BaselineInterpreterCodeGen::initEnvironmentChain() {
1360 // For function scripts, call InitFunctionEnvironmentObjects if needed. For
1361 // non-function scripts this is a no-op.
1363 Label done;
1364 masm.branchTestPtr(Assembler::NonZero, frame.addressOfCalleeToken(),
1365 Imm32(CalleeTokenScriptBit), &done);
1367 auto initEnv = [this]() {
1368 // Call into the VM to create the proper environment objects.
1369 prepareVMCall();
1371 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
1372 pushArg(R0.scratchReg());
1374 const CallVMPhase phase = CallVMPhase::BeforePushingLocals;
1376 using Fn = bool (*)(JSContext*, BaselineFrame*);
1377 return callVMNonOp<Fn, jit::InitFunctionEnvironmentObjects>(phase);
1379 if (!emitTestScriptFlag(
1380 JSScript::ImmutableFlags::NeedsFunctionEnvironmentObjects, true,
1381 initEnv, R2.scratchReg())) {
1382 return false;
1386 masm.bind(&done);
1387 return true;
1390 template <typename Handler>
1391 bool BaselineCodeGen<Handler>::emitInterruptCheck() {
1392 frame.syncStack(0);
1394 Label done;
1395 masm.branch32(Assembler::Equal, AbsoluteAddress(cx->addressOfInterruptBits()),
1396 Imm32(0), &done);
1398 prepareVMCall();
1400 // Use a custom RetAddrEntry::Kind so DebugModeOSR can distinguish this call
1401 // from other callVMs that might happen at this pc.
1402 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::InterruptCheck;
1404 using Fn = bool (*)(JSContext*);
1405 if (!callVM<Fn, InterruptCheck>(kind)) {
1406 return false;
1409 masm.bind(&done);
1410 return true;
1413 template <>
1414 bool BaselineCompilerCodeGen::emitWarmUpCounterIncrement() {
1415 frame.assertSyncedStack();
1417 // Record native code offset for OSR from Baseline Interpreter into Baseline
1418 // JIT code. This is right before the warm-up check in the Baseline JIT code,
1419 // to make sure we can immediately enter Ion if the script is warm enough or
1420 // if --ion-eager is used.
1421 JSScript* script = handler.script();
1422 jsbytecode* pc = handler.pc();
1423 if (JSOp(*pc) == JSOp::LoopHead) {
1424 uint32_t pcOffset = script->pcToOffset(pc);
1425 uint32_t nativeOffset = masm.currentOffset();
1426 if (!handler.osrEntries().emplaceBack(pcOffset, nativeOffset)) {
1427 ReportOutOfMemory(cx);
1428 return false;
1432 // Emit no warm-up counter increments if Ion is not enabled or if the script
1433 // will never be Ion-compileable.
1434 if (!handler.maybeIonCompileable()) {
1435 return true;
1438 Register scriptReg = R2.scratchReg();
1439 Register countReg = R0.scratchReg();
1441 // Load the ICScript* in scriptReg.
1442 masm.loadPtr(frame.addressOfICScript(), scriptReg);
1444 // Bump warm-up counter.
1445 Address warmUpCounterAddr(scriptReg, ICScript::offsetOfWarmUpCount());
1446 masm.load32(warmUpCounterAddr, countReg);
1447 masm.add32(Imm32(1), countReg);
1448 masm.store32(countReg, warmUpCounterAddr);
1450 if (!JitOptions.disableInlining) {
1451 // Consider trial inlining.
1452 // Note: unlike other warmup thresholds, where we try to enter a
1453 // higher tier whenever we are higher than a given warmup count,
1454 // trial inlining triggers once when reaching the threshold.
1455 Label noTrialInlining;
1456 masm.branch32(Assembler::NotEqual, countReg,
1457 Imm32(JitOptions.trialInliningWarmUpThreshold),
1458 &noTrialInlining);
1459 prepareVMCall();
1461 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg());
1463 using Fn = bool (*)(JSContext*, BaselineFrame*);
1464 if (!callVMNonOp<Fn, DoTrialInlining>()) {
1465 return false;
1467 // Reload registers potentially clobbered by the call.
1468 masm.loadPtr(frame.addressOfICScript(), scriptReg);
1469 masm.load32(warmUpCounterAddr, countReg);
1470 masm.bind(&noTrialInlining);
1473 if (JSOp(*pc) == JSOp::LoopHead) {
1474 // If this is a loop where we can't OSR (for example because it's inside a
1475 // catch or finally block), increment the warmup counter but don't attempt
1476 // OSR (Ion/Warp only compiles the try block).
1477 if (!handler.analysis().info(pc).loopHeadCanOsr) {
1478 return true;
1482 Label done;
1484 const OptimizationInfo* info =
1485 IonOptimizations.get(OptimizationLevel::Normal);
1486 uint32_t warmUpThreshold = info->compilerWarmUpThreshold(cx, script, pc);
1487 masm.branch32(Assembler::LessThan, countReg, Imm32(warmUpThreshold), &done);
1489 // Don't trigger Warp compilations from trial-inlined scripts.
1490 Address depthAddr(scriptReg, ICScript::offsetOfDepth());
1491 masm.branch32(Assembler::NotEqual, depthAddr, Imm32(0), &done);
1493 // Load the IonScript* in scriptReg. We can load this from the ICScript*
1494 // because it must be an outer ICScript embedded in the JitScript.
1495 constexpr int32_t offset = -int32_t(JitScript::offsetOfICScript()) +
1496 int32_t(JitScript::offsetOfIonScript());
1497 masm.loadPtr(Address(scriptReg, offset), scriptReg);
1499 // Do nothing if Ion is already compiling this script off-thread or if Ion has
1500 // been disabled for this script.
1501 masm.branchPtr(Assembler::Equal, scriptReg, ImmPtr(IonCompilingScriptPtr),
1502 &done);
1503 masm.branchPtr(Assembler::Equal, scriptReg, ImmPtr(IonDisabledScriptPtr),
1504 &done);
1506 // Try to compile and/or finish a compilation.
1507 if (JSOp(*pc) == JSOp::LoopHead) {
1508 // Try to OSR into Ion.
1509 computeFrameSize(R0.scratchReg());
1511 prepareVMCall();
1513 pushBytecodePCArg();
1514 pushArg(R0.scratchReg());
1515 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg());
1517 using Fn = bool (*)(JSContext*, BaselineFrame*, uint32_t, jsbytecode*,
1518 IonOsrTempData**);
1519 if (!callVM<Fn, IonCompileScriptForBaselineOSR>()) {
1520 return false;
1523 // The return register holds the IonOsrTempData*. Perform OSR if it's not
1524 // nullptr.
1525 static_assert(ReturnReg != OsrFrameReg,
1526 "Code below depends on osrDataReg != OsrFrameReg");
1527 Register osrDataReg = ReturnReg;
1528 masm.branchTestPtr(Assembler::Zero, osrDataReg, osrDataReg, &done);
1530 // Success! Switch from Baseline JIT code to Ion JIT code.
1532 // At this point, stack looks like:
1534 // +-> [...Calling-Frame...]
1535 // | [...Actual-Args/ThisV/ArgCount/Callee...]
1536 // | [Descriptor]
1537 // | [Return-Addr]
1538 // +---[Saved-FramePtr]
1539 // [...Baseline-Frame...]
1541 #ifdef DEBUG
1542 // Get a scratch register that's not osrDataReg or OsrFrameReg.
1543 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
1544 MOZ_ASSERT(!regs.has(FramePointer));
1545 regs.take(osrDataReg);
1546 regs.take(OsrFrameReg);
1548 Register scratchReg = regs.takeAny();
1550 // If profiler instrumentation is on, ensure that lastProfilingFrame is
1551 // the frame currently being OSR-ed
1553 Label checkOk;
1554 AbsoluteAddress addressOfEnabled(
1555 cx->runtime()->geckoProfiler().addressOfEnabled());
1556 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &checkOk);
1557 masm.loadPtr(AbsoluteAddress((void*)&cx->jitActivation), scratchReg);
1558 masm.loadPtr(
1559 Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()),
1560 scratchReg);
1562 // It may be the case that we entered the baseline frame with
1563 // profiling turned off on, then in a call within a loop (i.e. a
1564 // callee frame), turn on profiling, then return to this frame,
1565 // and then OSR with profiling turned on. In this case, allow for
1566 // lastProfilingFrame to be null.
1567 masm.branchPtr(Assembler::Equal, scratchReg, ImmWord(0), &checkOk);
1569 masm.branchPtr(Assembler::Equal, FramePointer, scratchReg, &checkOk);
1570 masm.assumeUnreachable("Baseline OSR lastProfilingFrame mismatch.");
1571 masm.bind(&checkOk);
1573 #endif
1575 // Restore the stack pointer so that the saved frame pointer is on top of
1576 // the stack.
1577 masm.moveToStackPtr(FramePointer);
1579 // Jump into Ion.
1580 masm.loadPtr(Address(osrDataReg, IonOsrTempData::offsetOfBaselineFrame()),
1581 OsrFrameReg);
1582 masm.jump(Address(osrDataReg, IonOsrTempData::offsetOfJitCode()));
1583 } else {
1584 prepareVMCall();
1586 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg());
1588 using Fn = bool (*)(JSContext*, BaselineFrame*);
1589 if (!callVMNonOp<Fn, IonCompileScriptForBaselineAtEntry>()) {
1590 return false;
1594 masm.bind(&done);
1595 return true;
1598 template <>
1599 bool BaselineInterpreterCodeGen::emitWarmUpCounterIncrement() {
1600 Register scriptReg = R2.scratchReg();
1601 Register countReg = R0.scratchReg();
1603 // Load the JitScript* in scriptReg.
1604 loadScript(scriptReg);
1605 masm.loadJitScript(scriptReg, scriptReg);
1607 // Bump warm-up counter.
1608 Address warmUpCounterAddr(scriptReg, JitScript::offsetOfWarmUpCount());
1609 masm.load32(warmUpCounterAddr, countReg);
1610 masm.add32(Imm32(1), countReg);
1611 masm.store32(countReg, warmUpCounterAddr);
1613 // If the script is warm enough for Baseline compilation, call into the VM to
1614 // compile it.
1615 Label done;
1616 masm.branch32(Assembler::BelowOrEqual, countReg,
1617 Imm32(JitOptions.baselineJitWarmUpThreshold), &done);
1618 masm.branchPtr(Assembler::Equal,
1619 Address(scriptReg, JitScript::offsetOfBaselineScript()),
1620 ImmPtr(BaselineDisabledScriptPtr), &done);
1622 prepareVMCall();
1624 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg());
1626 using Fn = bool (*)(JSContext*, BaselineFrame*, uint8_t**);
1627 if (!callVM<Fn, BaselineCompileFromBaselineInterpreter>()) {
1628 return false;
1631 // If the function returned nullptr we either skipped compilation or were
1632 // unable to compile the script. Continue running in the interpreter.
1633 masm.branchTestPtr(Assembler::Zero, ReturnReg, ReturnReg, &done);
1635 // Success! Switch from interpreter to JIT code by jumping to the
1636 // corresponding code in the BaselineScript.
1638 // This works because BaselineCompiler uses the same frame layout (stack is
1639 // synced at OSR points) and BaselineCompileFromBaselineInterpreter has
1640 // already cleared the RUNNING_IN_INTERPRETER flag for us.
1641 // See BaselineFrame::prepareForBaselineInterpreterToJitOSR.
1642 masm.jump(ReturnReg);
1645 masm.bind(&done);
1646 return true;
1649 bool BaselineCompiler::emitDebugTrap() {
1650 MOZ_ASSERT(compileDebugInstrumentation());
1651 MOZ_ASSERT(frame.numUnsyncedSlots() == 0);
1653 JSScript* script = handler.script();
1654 bool enabled = DebugAPI::stepModeEnabled(script) ||
1655 DebugAPI::hasBreakpointsAt(script, handler.pc());
1657 // Emit patchable call to debug trap handler.
1658 JitCode* handlerCode = cx->runtime()->jitRuntime()->debugTrapHandler(
1659 cx, DebugTrapHandlerKind::Compiler);
1660 if (!handlerCode) {
1661 return false;
1664 CodeOffset nativeOffset = masm.toggledCall(handlerCode, enabled);
1666 uint32_t pcOffset = script->pcToOffset(handler.pc());
1667 if (!debugTrapEntries_.emplaceBack(pcOffset, nativeOffset.offset())) {
1668 ReportOutOfMemory(cx);
1669 return false;
1672 // Add a RetAddrEntry for the return offset -> pc mapping.
1673 return handler.recordCallRetAddr(cx, RetAddrEntry::Kind::DebugTrap,
1674 masm.currentOffset());
1677 template <typename Handler>
1678 void BaselineCodeGen<Handler>::emitProfilerEnterFrame() {
1679 // Store stack position to lastProfilingFrame variable, guarded by a toggled
1680 // jump. Starts off initially disabled.
1681 Label noInstrument;
1682 CodeOffset toggleOffset = masm.toggledJump(&noInstrument);
1683 masm.profilerEnterFrame(FramePointer, R0.scratchReg());
1684 masm.bind(&noInstrument);
1686 // Store the start offset in the appropriate location.
1687 MOZ_ASSERT(!profilerEnterFrameToggleOffset_.bound());
1688 profilerEnterFrameToggleOffset_ = toggleOffset;
1691 template <typename Handler>
1692 void BaselineCodeGen<Handler>::emitProfilerExitFrame() {
1693 // Store previous frame to lastProfilingFrame variable, guarded by a toggled
1694 // jump. Starts off initially disabled.
1695 Label noInstrument;
1696 CodeOffset toggleOffset = masm.toggledJump(&noInstrument);
1697 masm.profilerExitFrame();
1698 masm.bind(&noInstrument);
1700 // Store the start offset in the appropriate location.
1701 MOZ_ASSERT(!profilerExitFrameToggleOffset_.bound());
1702 profilerExitFrameToggleOffset_ = toggleOffset;
1705 template <typename Handler>
1706 bool BaselineCodeGen<Handler>::emit_Nop() {
1707 return true;
1710 template <typename Handler>
1711 bool BaselineCodeGen<Handler>::emit_NopDestructuring() {
1712 return true;
1715 template <typename Handler>
1716 bool BaselineCodeGen<Handler>::emit_NopIsAssignOp() {
1717 return true;
1720 template <typename Handler>
1721 bool BaselineCodeGen<Handler>::emit_TryDestructuring() {
1722 return true;
1725 template <typename Handler>
1726 bool BaselineCodeGen<Handler>::emit_Pop() {
1727 frame.pop();
1728 return true;
1731 template <>
1732 bool BaselineCompilerCodeGen::emit_PopN() {
1733 frame.popn(GET_UINT16(handler.pc()));
1734 return true;
1737 template <>
1738 bool BaselineInterpreterCodeGen::emit_PopN() {
1739 LoadUint16Operand(masm, R0.scratchReg());
1740 frame.popn(R0.scratchReg());
1741 return true;
1744 template <>
1745 bool BaselineCompilerCodeGen::emit_DupAt() {
1746 frame.syncStack(0);
1748 // DupAt takes a value on the stack and re-pushes it on top. It's like
1749 // GetLocal but it addresses from the top of the stack instead of from the
1750 // stack frame.
1752 int depth = -(GET_UINT24(handler.pc()) + 1);
1753 masm.loadValue(frame.addressOfStackValue(depth), R0);
1754 frame.push(R0);
1755 return true;
1758 template <>
1759 bool BaselineInterpreterCodeGen::emit_DupAt() {
1760 LoadUint24Operand(masm, 0, R0.scratchReg());
1761 masm.loadValue(frame.addressOfStackValue(R0.scratchReg()), R0);
1762 frame.push(R0);
1763 return true;
1766 template <typename Handler>
1767 bool BaselineCodeGen<Handler>::emit_Dup() {
1768 // Keep top stack value in R0, sync the rest so that we can use R1. We use
1769 // separate registers because every register can be used by at most one
1770 // StackValue.
1771 frame.popRegsAndSync(1);
1772 masm.moveValue(R0, R1);
1774 // inc/dec ops use Dup followed by Inc/Dec. Push R0 last to avoid a move.
1775 frame.push(R1);
1776 frame.push(R0);
1777 return true;
1780 template <typename Handler>
1781 bool BaselineCodeGen<Handler>::emit_Dup2() {
1782 frame.syncStack(0);
1784 masm.loadValue(frame.addressOfStackValue(-2), R0);
1785 masm.loadValue(frame.addressOfStackValue(-1), R1);
1787 frame.push(R0);
1788 frame.push(R1);
1789 return true;
1792 template <typename Handler>
1793 bool BaselineCodeGen<Handler>::emit_Swap() {
1794 // Keep top stack values in R0 and R1.
1795 frame.popRegsAndSync(2);
1797 frame.push(R1);
1798 frame.push(R0);
1799 return true;
1802 template <>
1803 bool BaselineCompilerCodeGen::emit_Pick() {
1804 frame.syncStack(0);
1806 // Pick takes a value on the stack and moves it to the top.
1807 // For instance, pick 2:
1808 // before: A B C D E
1809 // after : A B D E C
1811 // First, move value at -(amount + 1) into R0.
1812 int32_t depth = -(GET_INT8(handler.pc()) + 1);
1813 masm.loadValue(frame.addressOfStackValue(depth), R0);
1815 // Move the other values down.
1816 depth++;
1817 for (; depth < 0; depth++) {
1818 Address source = frame.addressOfStackValue(depth);
1819 Address dest = frame.addressOfStackValue(depth - 1);
1820 masm.loadValue(source, R1);
1821 masm.storeValue(R1, dest);
1824 // Push R0.
1825 frame.pop();
1826 frame.push(R0);
1827 return true;
1830 template <>
1831 bool BaselineInterpreterCodeGen::emit_Pick() {
1832 // First, move the value to move up into R0.
1833 Register scratch = R2.scratchReg();
1834 LoadUint8Operand(masm, scratch);
1835 masm.loadValue(frame.addressOfStackValue(scratch), R0);
1837 // Move the other values down.
1838 Label top, done;
1839 masm.bind(&top);
1840 masm.branchSub32(Assembler::Signed, Imm32(1), scratch, &done);
1842 masm.loadValue(frame.addressOfStackValue(scratch), R1);
1843 masm.storeValue(R1, frame.addressOfStackValue(scratch, sizeof(Value)));
1844 masm.jump(&top);
1847 masm.bind(&done);
1849 // Replace value on top of the stack with R0.
1850 masm.storeValue(R0, frame.addressOfStackValue(-1));
1851 return true;
1854 template <>
1855 bool BaselineCompilerCodeGen::emit_Unpick() {
1856 frame.syncStack(0);
1858 // Pick takes the top of the stack value and moves it under the nth value.
1859 // For instance, unpick 2:
1860 // before: A B C D E
1861 // after : A B E C D
1863 // First, move value at -1 into R0.
1864 masm.loadValue(frame.addressOfStackValue(-1), R0);
1866 MOZ_ASSERT(GET_INT8(handler.pc()) > 0,
1867 "Interpreter code assumes JSOp::Unpick operand > 0");
1869 // Move the other values up.
1870 int32_t depth = -(GET_INT8(handler.pc()) + 1);
1871 for (int32_t i = -1; i > depth; i--) {
1872 Address source = frame.addressOfStackValue(i - 1);
1873 Address dest = frame.addressOfStackValue(i);
1874 masm.loadValue(source, R1);
1875 masm.storeValue(R1, dest);
1878 // Store R0 under the nth value.
1879 Address dest = frame.addressOfStackValue(depth);
1880 masm.storeValue(R0, dest);
1881 return true;
1884 template <>
1885 bool BaselineInterpreterCodeGen::emit_Unpick() {
1886 Register scratch = R2.scratchReg();
1887 LoadUint8Operand(masm, scratch);
1889 // Move the top value into R0.
1890 masm.loadValue(frame.addressOfStackValue(-1), R0);
1892 // Overwrite the nth stack value with R0 but first save the old value in R1.
1893 masm.loadValue(frame.addressOfStackValue(scratch), R1);
1894 masm.storeValue(R0, frame.addressOfStackValue(scratch));
1896 // Now for each slot x in [n-1, 1] do the following:
1898 // * Store the value in slot x in R0.
1899 // * Store the value in the previous slot (now in R1) in slot x.
1900 // * Move R0 to R1.
1902 #ifdef DEBUG
1903 // Assert the operand > 0 so the branchSub32 below doesn't "underflow" to
1904 // negative values.
1906 Label ok;
1907 masm.branch32(Assembler::GreaterThan, scratch, Imm32(0), &ok);
1908 masm.assumeUnreachable("JSOp::Unpick with operand <= 0?");
1909 masm.bind(&ok);
1911 #endif
1913 Label top, done;
1914 masm.bind(&top);
1915 masm.branchSub32(Assembler::Zero, Imm32(1), scratch, &done);
1917 // Overwrite stack slot x with slot x + 1, saving the old value in R1.
1918 masm.loadValue(frame.addressOfStackValue(scratch), R0);
1919 masm.storeValue(R1, frame.addressOfStackValue(scratch));
1920 masm.moveValue(R0, R1);
1921 masm.jump(&top);
1924 // Finally, replace the value on top of the stack (slot 0) with R1. This is
1925 // the value that used to be in slot 1.
1926 masm.bind(&done);
1927 masm.storeValue(R1, frame.addressOfStackValue(-1));
1928 return true;
1931 template <>
1932 void BaselineCompilerCodeGen::emitJump() {
1933 jsbytecode* pc = handler.pc();
1934 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc)));
1935 frame.assertSyncedStack();
1937 jsbytecode* target = pc + GET_JUMP_OFFSET(pc);
1938 masm.jump(handler.labelOf(target));
1941 template <>
1942 void BaselineInterpreterCodeGen::emitJump() {
1943 // We have to add the current pc's jump offset to the current pc. We can use
1944 // R0 and R1 as scratch because we jump to the "next op" label so these
1945 // registers aren't in use at this point.
1946 Register scratch1 = R0.scratchReg();
1947 Register scratch2 = R1.scratchReg();
1948 Register pc = LoadBytecodePC(masm, scratch1);
1949 LoadInt32OperandSignExtendToPtr(masm, pc, scratch2);
1950 if (HasInterpreterPCReg()) {
1951 masm.addPtr(scratch2, InterpreterPCReg);
1952 } else {
1953 masm.addPtr(pc, scratch2);
1954 masm.storePtr(scratch2, frame.addressOfInterpreterPC());
1956 masm.jump(handler.interpretOpWithPCRegLabel());
1959 template <>
1960 void BaselineCompilerCodeGen::emitTestBooleanTruthy(bool branchIfTrue,
1961 ValueOperand val) {
1962 jsbytecode* pc = handler.pc();
1963 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc)));
1964 frame.assertSyncedStack();
1966 jsbytecode* target = pc + GET_JUMP_OFFSET(pc);
1967 masm.branchTestBooleanTruthy(branchIfTrue, val, handler.labelOf(target));
1970 template <>
1971 void BaselineInterpreterCodeGen::emitTestBooleanTruthy(bool branchIfTrue,
1972 ValueOperand val) {
1973 Label done;
1974 masm.branchTestBooleanTruthy(!branchIfTrue, val, &done);
1975 emitJump();
1976 masm.bind(&done);
1979 template <>
1980 template <typename F1, typename F2>
1981 [[nodiscard]] bool BaselineCompilerCodeGen::emitTestScriptFlag(
1982 JSScript::ImmutableFlags flag, const F1& ifSet, const F2& ifNotSet,
1983 Register scratch) {
1984 if (handler.script()->hasFlag(flag)) {
1985 return ifSet();
1987 return ifNotSet();
1990 template <>
1991 template <typename F1, typename F2>
1992 [[nodiscard]] bool BaselineInterpreterCodeGen::emitTestScriptFlag(
1993 JSScript::ImmutableFlags flag, const F1& ifSet, const F2& ifNotSet,
1994 Register scratch) {
1995 Label flagNotSet, done;
1996 loadScript(scratch);
1997 masm.branchTest32(Assembler::Zero,
1998 Address(scratch, JSScript::offsetOfImmutableFlags()),
1999 Imm32(uint32_t(flag)), &flagNotSet);
2001 if (!ifSet()) {
2002 return false;
2004 masm.jump(&done);
2006 masm.bind(&flagNotSet);
2008 if (!ifNotSet()) {
2009 return false;
2013 masm.bind(&done);
2014 return true;
2017 template <>
2018 template <typename F>
2019 [[nodiscard]] bool BaselineCompilerCodeGen::emitTestScriptFlag(
2020 JSScript::ImmutableFlags flag, bool value, const F& emit,
2021 Register scratch) {
2022 if (handler.script()->hasFlag(flag) == value) {
2023 return emit();
2025 return true;
2028 template <>
2029 template <typename F>
2030 [[nodiscard]] bool BaselineCompilerCodeGen::emitTestScriptFlag(
2031 JSScript::MutableFlags flag, bool value, const F& emit, Register scratch) {
2032 if (handler.script()->hasFlag(flag) == value) {
2033 return emit();
2035 return true;
2038 template <>
2039 template <typename F>
2040 [[nodiscard]] bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2041 JSScript::ImmutableFlags flag, bool value, const F& emit,
2042 Register scratch) {
2043 Label done;
2044 loadScript(scratch);
2045 masm.branchTest32(value ? Assembler::Zero : Assembler::NonZero,
2046 Address(scratch, JSScript::offsetOfImmutableFlags()),
2047 Imm32(uint32_t(flag)), &done);
2049 if (!emit()) {
2050 return false;
2054 masm.bind(&done);
2055 return true;
2058 template <>
2059 template <typename F>
2060 [[nodiscard]] bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2061 JSScript::MutableFlags flag, bool value, const F& emit, Register scratch) {
2062 Label done;
2063 loadScript(scratch);
2064 masm.branchTest32(value ? Assembler::Zero : Assembler::NonZero,
2065 Address(scratch, JSScript::offsetOfMutableFlags()),
2066 Imm32(uint32_t(flag)), &done);
2068 if (!emit()) {
2069 return false;
2073 masm.bind(&done);
2074 return true;
2077 template <typename Handler>
2078 bool BaselineCodeGen<Handler>::emit_Goto() {
2079 frame.syncStack(0);
2080 emitJump();
2081 return true;
2084 template <typename Handler>
2085 bool BaselineCodeGen<Handler>::emitTest(bool branchIfTrue) {
2086 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN);
2088 // Keep top stack value in R0.
2089 frame.popRegsAndSync(1);
2091 if (!knownBoolean && !emitNextIC()) {
2092 return false;
2095 // IC will leave a BooleanValue in R0, just need to branch on it.
2096 emitTestBooleanTruthy(branchIfTrue, R0);
2097 return true;
2100 template <typename Handler>
2101 bool BaselineCodeGen<Handler>::emit_JumpIfFalse() {
2102 return emitTest(false);
2105 template <typename Handler>
2106 bool BaselineCodeGen<Handler>::emit_JumpIfTrue() {
2107 return emitTest(true);
2110 template <typename Handler>
2111 bool BaselineCodeGen<Handler>::emitAndOr(bool branchIfTrue) {
2112 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN);
2114 // And and Or leave the original value on the stack.
2115 frame.syncStack(0);
2117 masm.loadValue(frame.addressOfStackValue(-1), R0);
2118 if (!knownBoolean && !emitNextIC()) {
2119 return false;
2122 emitTestBooleanTruthy(branchIfTrue, R0);
2123 return true;
2126 template <typename Handler>
2127 bool BaselineCodeGen<Handler>::emit_And() {
2128 return emitAndOr(false);
2131 template <typename Handler>
2132 bool BaselineCodeGen<Handler>::emit_Or() {
2133 return emitAndOr(true);
2136 template <typename Handler>
2137 bool BaselineCodeGen<Handler>::emit_Coalesce() {
2138 // Coalesce leaves the original value on the stack.
2139 frame.syncStack(0);
2141 masm.loadValue(frame.addressOfStackValue(-1), R0);
2143 Label undefinedOrNull;
2145 masm.branchTestUndefined(Assembler::Equal, R0, &undefinedOrNull);
2146 masm.branchTestNull(Assembler::Equal, R0, &undefinedOrNull);
2147 emitJump();
2149 masm.bind(&undefinedOrNull);
2150 // fall through
2151 return true;
2154 template <typename Handler>
2155 bool BaselineCodeGen<Handler>::emit_Not() {
2156 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN);
2158 // Keep top stack value in R0.
2159 frame.popRegsAndSync(1);
2161 if (!knownBoolean && !emitNextIC()) {
2162 return false;
2165 masm.notBoolean(R0);
2167 frame.push(R0, JSVAL_TYPE_BOOLEAN);
2168 return true;
2171 template <typename Handler>
2172 bool BaselineCodeGen<Handler>::emit_Pos() {
2173 return emitUnaryArith();
2176 template <typename Handler>
2177 bool BaselineCodeGen<Handler>::emit_ToNumeric() {
2178 return emitUnaryArith();
2181 template <typename Handler>
2182 bool BaselineCodeGen<Handler>::emit_LoopHead() {
2183 if (!emit_JumpTarget()) {
2184 return false;
2186 if (!emitInterruptCheck()) {
2187 return false;
2189 if (!emitWarmUpCounterIncrement()) {
2190 return false;
2192 return true;
2195 template <typename Handler>
2196 bool BaselineCodeGen<Handler>::emit_Void() {
2197 frame.pop();
2198 frame.push(UndefinedValue());
2199 return true;
2202 template <typename Handler>
2203 bool BaselineCodeGen<Handler>::emit_Undefined() {
2204 frame.push(UndefinedValue());
2205 return true;
2208 template <typename Handler>
2209 bool BaselineCodeGen<Handler>::emit_Hole() {
2210 frame.push(MagicValue(JS_ELEMENTS_HOLE));
2211 return true;
2214 template <typename Handler>
2215 bool BaselineCodeGen<Handler>::emit_Null() {
2216 frame.push(NullValue());
2217 return true;
2220 template <typename Handler>
2221 bool BaselineCodeGen<Handler>::emit_CheckIsObj() {
2222 frame.syncStack(0);
2223 masm.loadValue(frame.addressOfStackValue(-1), R0);
2225 Label ok;
2226 masm.branchTestObject(Assembler::Equal, R0, &ok);
2228 prepareVMCall();
2230 pushUint8BytecodeOperandArg(R0.scratchReg());
2232 using Fn = bool (*)(JSContext*, CheckIsObjectKind);
2233 if (!callVM<Fn, ThrowCheckIsObject>()) {
2234 return false;
2237 masm.bind(&ok);
2238 return true;
2241 template <typename Handler>
2242 bool BaselineCodeGen<Handler>::emit_CheckThis() {
2243 frame.syncStack(0);
2244 masm.loadValue(frame.addressOfStackValue(-1), R0);
2246 return emitCheckThis(R0);
2249 template <typename Handler>
2250 bool BaselineCodeGen<Handler>::emit_CheckThisReinit() {
2251 frame.syncStack(0);
2252 masm.loadValue(frame.addressOfStackValue(-1), R0);
2254 return emitCheckThis(R0, /* reinit = */ true);
2257 template <typename Handler>
2258 bool BaselineCodeGen<Handler>::emitCheckThis(ValueOperand val, bool reinit) {
2259 Label thisOK;
2260 if (reinit) {
2261 masm.branchTestMagic(Assembler::Equal, val, &thisOK);
2262 } else {
2263 masm.branchTestMagic(Assembler::NotEqual, val, &thisOK);
2266 prepareVMCall();
2268 if (reinit) {
2269 using Fn = bool (*)(JSContext*);
2270 if (!callVM<Fn, ThrowInitializedThis>()) {
2271 return false;
2273 } else {
2274 using Fn = bool (*)(JSContext*);
2275 if (!callVM<Fn, ThrowUninitializedThis>()) {
2276 return false;
2280 masm.bind(&thisOK);
2281 return true;
2284 template <typename Handler>
2285 bool BaselineCodeGen<Handler>::emit_CheckReturn() {
2286 MOZ_ASSERT_IF(handler.maybeScript(),
2287 handler.maybeScript()->isDerivedClassConstructor());
2289 // Load |this| in R0, return value in R1.
2290 frame.popRegsAndSync(1);
2291 emitLoadReturnValue(R1);
2293 Label done, returnBad, checkThis;
2294 masm.branchTestObject(Assembler::NotEqual, R1, &checkThis);
2296 masm.moveValue(R1, R0);
2297 masm.jump(&done);
2299 masm.bind(&checkThis);
2300 masm.branchTestUndefined(Assembler::NotEqual, R1, &returnBad);
2301 masm.branchTestMagic(Assembler::NotEqual, R0, &done);
2302 masm.bind(&returnBad);
2304 prepareVMCall();
2305 pushArg(R1);
2307 using Fn = bool (*)(JSContext*, HandleValue);
2308 if (!callVM<Fn, ThrowBadDerivedReturnOrUninitializedThis>()) {
2309 return false;
2311 masm.assumeUnreachable("Should throw on bad derived constructor return");
2313 masm.bind(&done);
2315 // Push |rval| or |this| onto the stack.
2316 frame.push(R0);
2317 return true;
2320 template <typename Handler>
2321 bool BaselineCodeGen<Handler>::emit_FunctionThis() {
2322 MOZ_ASSERT_IF(handler.maybeFunction(), !handler.maybeFunction()->isArrow());
2324 frame.pushThis();
2326 auto boxThis = [this]() {
2327 // Load |thisv| in R0. Skip the call if it's already an object.
2328 Label skipCall;
2329 frame.popRegsAndSync(1);
2330 masm.branchTestObject(Assembler::Equal, R0, &skipCall);
2332 prepareVMCall();
2333 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg());
2335 pushArg(R1.scratchReg());
2337 using Fn = bool (*)(JSContext*, BaselineFrame*, MutableHandleValue);
2338 if (!callVM<Fn, BaselineGetFunctionThis>()) {
2339 return false;
2342 masm.bind(&skipCall);
2343 frame.push(R0);
2344 return true;
2347 // In strict mode code, |this| is left alone.
2348 return emitTestScriptFlag(JSScript::ImmutableFlags::Strict, false, boxThis,
2349 R2.scratchReg());
2352 template <typename Handler>
2353 bool BaselineCodeGen<Handler>::emit_GlobalThis() {
2354 frame.syncStack(0);
2356 loadGlobalThisValue(R0);
2357 frame.push(R0);
2358 return true;
2361 template <typename Handler>
2362 bool BaselineCodeGen<Handler>::emit_NonSyntacticGlobalThis() {
2363 frame.syncStack(0);
2365 prepareVMCall();
2367 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
2368 pushArg(R0.scratchReg());
2370 using Fn = void (*)(JSContext*, HandleObject, MutableHandleValue);
2371 if (!callVM<Fn, GetNonSyntacticGlobalThis>()) {
2372 return false;
2375 frame.push(R0);
2376 return true;
2379 template <typename Handler>
2380 bool BaselineCodeGen<Handler>::emit_True() {
2381 frame.push(BooleanValue(true));
2382 return true;
2385 template <typename Handler>
2386 bool BaselineCodeGen<Handler>::emit_False() {
2387 frame.push(BooleanValue(false));
2388 return true;
2391 template <typename Handler>
2392 bool BaselineCodeGen<Handler>::emit_Zero() {
2393 frame.push(Int32Value(0));
2394 return true;
2397 template <typename Handler>
2398 bool BaselineCodeGen<Handler>::emit_One() {
2399 frame.push(Int32Value(1));
2400 return true;
2403 template <>
2404 bool BaselineCompilerCodeGen::emit_Int8() {
2405 frame.push(Int32Value(GET_INT8(handler.pc())));
2406 return true;
2409 template <>
2410 bool BaselineInterpreterCodeGen::emit_Int8() {
2411 LoadInt8Operand(masm, R0.scratchReg());
2412 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2413 frame.push(R0);
2414 return true;
2417 template <>
2418 bool BaselineCompilerCodeGen::emit_Int32() {
2419 frame.push(Int32Value(GET_INT32(handler.pc())));
2420 return true;
2423 template <>
2424 bool BaselineInterpreterCodeGen::emit_Int32() {
2425 LoadInt32Operand(masm, R0.scratchReg());
2426 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2427 frame.push(R0);
2428 return true;
2431 template <>
2432 bool BaselineCompilerCodeGen::emit_Uint16() {
2433 frame.push(Int32Value(GET_UINT16(handler.pc())));
2434 return true;
2437 template <>
2438 bool BaselineInterpreterCodeGen::emit_Uint16() {
2439 LoadUint16Operand(masm, R0.scratchReg());
2440 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2441 frame.push(R0);
2442 return true;
2445 template <>
2446 bool BaselineCompilerCodeGen::emit_Uint24() {
2447 frame.push(Int32Value(GET_UINT24(handler.pc())));
2448 return true;
2451 template <>
2452 bool BaselineInterpreterCodeGen::emit_Uint24() {
2453 LoadUint24Operand(masm, 0, R0.scratchReg());
2454 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2455 frame.push(R0);
2456 return true;
2459 template <>
2460 bool BaselineCompilerCodeGen::emit_Double() {
2461 frame.push(GET_INLINE_VALUE(handler.pc()));
2462 return true;
2465 template <>
2466 bool BaselineInterpreterCodeGen::emit_Double() {
2467 LoadInlineValueOperand(masm, R0);
2468 frame.push(R0);
2469 return true;
2472 template <>
2473 bool BaselineCompilerCodeGen::emit_BigInt() {
2474 BigInt* bi = handler.script()->getBigInt(handler.pc());
2475 frame.push(BigIntValue(bi));
2476 return true;
2479 template <>
2480 bool BaselineInterpreterCodeGen::emit_BigInt() {
2481 Register scratch1 = R0.scratchReg();
2482 Register scratch2 = R1.scratchReg();
2483 loadScriptGCThing(ScriptGCThingType::BigInt, scratch1, scratch2);
2484 masm.tagValue(JSVAL_TYPE_BIGINT, scratch1, R0);
2485 frame.push(R0);
2486 return true;
2489 template <>
2490 bool BaselineCompilerCodeGen::emit_String() {
2491 frame.push(StringValue(handler.script()->getString(handler.pc())));
2492 return true;
2495 template <>
2496 bool BaselineInterpreterCodeGen::emit_String() {
2497 Register scratch1 = R0.scratchReg();
2498 Register scratch2 = R1.scratchReg();
2499 loadScriptGCThing(ScriptGCThingType::String, scratch1, scratch2);
2500 masm.tagValue(JSVAL_TYPE_STRING, scratch1, R0);
2501 frame.push(R0);
2502 return true;
2505 template <>
2506 bool BaselineCompilerCodeGen::emit_Symbol() {
2507 unsigned which = GET_UINT8(handler.pc());
2508 JS::Symbol* sym = cx->runtime()->wellKnownSymbols->get(which);
2509 frame.push(SymbolValue(sym));
2510 return true;
2513 template <>
2514 bool BaselineInterpreterCodeGen::emit_Symbol() {
2515 Register scratch1 = R0.scratchReg();
2516 Register scratch2 = R1.scratchReg();
2517 LoadUint8Operand(masm, scratch1);
2519 masm.movePtr(ImmPtr(cx->runtime()->wellKnownSymbols), scratch2);
2520 masm.loadPtr(BaseIndex(scratch2, scratch1, ScalePointer), scratch1);
2522 masm.tagValue(JSVAL_TYPE_SYMBOL, scratch1, R0);
2523 frame.push(R0);
2524 return true;
2527 template <>
2528 bool BaselineCompilerCodeGen::emit_Object() {
2529 frame.push(ObjectValue(*handler.script()->getObject(handler.pc())));
2530 return true;
2533 template <>
2534 bool BaselineInterpreterCodeGen::emit_Object() {
2535 Register scratch1 = R0.scratchReg();
2536 Register scratch2 = R1.scratchReg();
2537 loadScriptGCThing(ScriptGCThingType::Object, scratch1, scratch2);
2538 masm.tagValue(JSVAL_TYPE_OBJECT, scratch1, R0);
2539 frame.push(R0);
2540 return true;
2543 template <typename Handler>
2544 bool BaselineCodeGen<Handler>::emit_CallSiteObj() {
2545 return emit_Object();
2548 template <typename Handler>
2549 bool BaselineCodeGen<Handler>::emit_RegExp() {
2550 prepareVMCall();
2551 pushScriptGCThingArg(ScriptGCThingType::RegExp, R0.scratchReg(),
2552 R1.scratchReg());
2554 using Fn = JSObject* (*)(JSContext*, Handle<RegExpObject*>);
2555 if (!callVM<Fn, CloneRegExpObject>()) {
2556 return false;
2559 // Box and push return value.
2560 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2561 frame.push(R0);
2562 return true;
2565 #ifdef ENABLE_RECORD_TUPLE
2566 # define UNSUPPORTED_OPCODE(OP) \
2567 template <typename Handler> \
2568 bool BaselineCodeGen<Handler>::emit_##OP() { \
2569 MOZ_CRASH("Record and Tuple are not supported by jit"); \
2570 return false; \
2573 UNSUPPORTED_OPCODE(InitRecord)
2574 UNSUPPORTED_OPCODE(AddRecordProperty)
2575 UNSUPPORTED_OPCODE(AddRecordSpread)
2576 UNSUPPORTED_OPCODE(FinishRecord)
2577 UNSUPPORTED_OPCODE(InitTuple)
2578 UNSUPPORTED_OPCODE(AddTupleElement)
2579 UNSUPPORTED_OPCODE(FinishTuple)
2581 # undef UNSUPPORTED_OPCODE
2582 #endif
2584 template <typename Handler>
2585 bool BaselineCodeGen<Handler>::emit_Lambda() {
2586 prepareVMCall();
2587 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
2589 pushArg(R0.scratchReg());
2590 pushScriptGCThingArg(ScriptGCThingType::Function, R0.scratchReg(),
2591 R1.scratchReg());
2593 using Fn = JSObject* (*)(JSContext*, HandleFunction, HandleObject);
2594 if (!callVM<Fn, js::Lambda>()) {
2595 return false;
2598 // Box and push return value.
2599 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2600 frame.push(R0);
2601 return true;
2604 template <typename Handler>
2605 bool BaselineCodeGen<Handler>::emit_SetFunName() {
2606 frame.popRegsAndSync(2);
2608 frame.push(R0);
2609 frame.syncStack(0);
2611 masm.unboxObject(R0, R0.scratchReg());
2613 prepareVMCall();
2615 pushUint8BytecodeOperandArg(R2.scratchReg());
2616 pushArg(R1);
2617 pushArg(R0.scratchReg());
2619 using Fn =
2620 bool (*)(JSContext*, HandleFunction, HandleValue, FunctionPrefixKind);
2621 return callVM<Fn, SetFunctionName>();
2624 template <typename Handler>
2625 bool BaselineCodeGen<Handler>::emit_BitOr() {
2626 return emitBinaryArith();
2629 template <typename Handler>
2630 bool BaselineCodeGen<Handler>::emit_BitXor() {
2631 return emitBinaryArith();
2634 template <typename Handler>
2635 bool BaselineCodeGen<Handler>::emit_BitAnd() {
2636 return emitBinaryArith();
2639 template <typename Handler>
2640 bool BaselineCodeGen<Handler>::emit_Lsh() {
2641 return emitBinaryArith();
2644 template <typename Handler>
2645 bool BaselineCodeGen<Handler>::emit_Rsh() {
2646 return emitBinaryArith();
2649 template <typename Handler>
2650 bool BaselineCodeGen<Handler>::emit_Ursh() {
2651 return emitBinaryArith();
2654 template <typename Handler>
2655 bool BaselineCodeGen<Handler>::emit_Add() {
2656 return emitBinaryArith();
2659 template <typename Handler>
2660 bool BaselineCodeGen<Handler>::emit_Sub() {
2661 return emitBinaryArith();
2664 template <typename Handler>
2665 bool BaselineCodeGen<Handler>::emit_Mul() {
2666 return emitBinaryArith();
2669 template <typename Handler>
2670 bool BaselineCodeGen<Handler>::emit_Div() {
2671 return emitBinaryArith();
2674 template <typename Handler>
2675 bool BaselineCodeGen<Handler>::emit_Mod() {
2676 return emitBinaryArith();
2679 template <typename Handler>
2680 bool BaselineCodeGen<Handler>::emit_Pow() {
2681 return emitBinaryArith();
2684 template <typename Handler>
2685 bool BaselineCodeGen<Handler>::emitBinaryArith() {
2686 // Keep top JSStack value in R0 and R2
2687 frame.popRegsAndSync(2);
2689 // Call IC
2690 if (!emitNextIC()) {
2691 return false;
2694 // Mark R0 as pushed stack value.
2695 frame.push(R0);
2696 return true;
2699 template <typename Handler>
2700 bool BaselineCodeGen<Handler>::emitUnaryArith() {
2701 // Keep top stack value in R0.
2702 frame.popRegsAndSync(1);
2704 // Call IC
2705 if (!emitNextIC()) {
2706 return false;
2709 // Mark R0 as pushed stack value.
2710 frame.push(R0);
2711 return true;
2714 template <typename Handler>
2715 bool BaselineCodeGen<Handler>::emit_BitNot() {
2716 return emitUnaryArith();
2719 template <typename Handler>
2720 bool BaselineCodeGen<Handler>::emit_Neg() {
2721 return emitUnaryArith();
2724 template <typename Handler>
2725 bool BaselineCodeGen<Handler>::emit_Inc() {
2726 return emitUnaryArith();
2729 template <typename Handler>
2730 bool BaselineCodeGen<Handler>::emit_Dec() {
2731 return emitUnaryArith();
2734 template <typename Handler>
2735 bool BaselineCodeGen<Handler>::emit_Lt() {
2736 return emitCompare();
2739 template <typename Handler>
2740 bool BaselineCodeGen<Handler>::emit_Le() {
2741 return emitCompare();
2744 template <typename Handler>
2745 bool BaselineCodeGen<Handler>::emit_Gt() {
2746 return emitCompare();
2749 template <typename Handler>
2750 bool BaselineCodeGen<Handler>::emit_Ge() {
2751 return emitCompare();
2754 template <typename Handler>
2755 bool BaselineCodeGen<Handler>::emit_Eq() {
2756 return emitCompare();
2759 template <typename Handler>
2760 bool BaselineCodeGen<Handler>::emit_Ne() {
2761 return emitCompare();
2764 template <typename Handler>
2765 bool BaselineCodeGen<Handler>::emitCompare() {
2766 // Keep top JSStack value in R0 and R1.
2767 frame.popRegsAndSync(2);
2769 // Call IC.
2770 if (!emitNextIC()) {
2771 return false;
2774 // Mark R0 as pushed stack value.
2775 frame.push(R0, JSVAL_TYPE_BOOLEAN);
2776 return true;
2779 template <typename Handler>
2780 bool BaselineCodeGen<Handler>::emit_StrictEq() {
2781 return emitCompare();
2784 template <typename Handler>
2785 bool BaselineCodeGen<Handler>::emit_StrictNe() {
2786 return emitCompare();
2789 template <typename Handler>
2790 bool BaselineCodeGen<Handler>::emit_Case() {
2791 frame.popRegsAndSync(1);
2793 Label done;
2794 masm.branchTestBooleanTruthy(/* branchIfTrue */ false, R0, &done);
2796 // Pop the switch value if the case matches.
2797 masm.addToStackPtr(Imm32(sizeof(Value)));
2798 emitJump();
2800 masm.bind(&done);
2801 return true;
2804 template <typename Handler>
2805 bool BaselineCodeGen<Handler>::emit_Default() {
2806 frame.pop();
2807 return emit_Goto();
2810 template <typename Handler>
2811 bool BaselineCodeGen<Handler>::emit_Lineno() {
2812 return true;
2815 template <typename Handler>
2816 bool BaselineCodeGen<Handler>::emit_NewArray() {
2817 frame.syncStack(0);
2819 if (!emitNextIC()) {
2820 return false;
2823 frame.push(R0);
2824 return true;
2827 static void MarkElementsNonPackedIfHoleValue(MacroAssembler& masm,
2828 Register elements,
2829 ValueOperand val) {
2830 Label notHole;
2831 masm.branchTestMagic(Assembler::NotEqual, val, &notHole);
2833 Address elementsFlags(elements, ObjectElements::offsetOfFlags());
2834 masm.or32(Imm32(ObjectElements::NON_PACKED), elementsFlags);
2836 masm.bind(&notHole);
2839 template <>
2840 bool BaselineInterpreterCodeGen::emit_InitElemArray() {
2841 // Pop value into R0, keep the object on the stack.
2842 frame.popRegsAndSync(1);
2844 // Load object in R2.
2845 Register obj = R2.scratchReg();
2846 masm.unboxObject(frame.addressOfStackValue(-1), obj);
2848 // Load index in R1.
2849 Register index = R1.scratchReg();
2850 LoadInt32Operand(masm, index);
2852 // Store the Value. No pre-barrier because this is an initialization.
2853 masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), obj);
2854 masm.storeValue(R0, BaseObjectElementIndex(obj, index));
2856 // Bump initialized length.
2857 Address initLength(obj, ObjectElements::offsetOfInitializedLength());
2858 masm.add32(Imm32(1), index);
2859 masm.store32(index, initLength);
2861 // Mark elements as NON_PACKED if we stored the hole value.
2862 MarkElementsNonPackedIfHoleValue(masm, obj, R0);
2864 // Post-barrier.
2865 Label skipBarrier;
2866 Register scratch = index;
2867 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, scratch, &skipBarrier);
2869 masm.unboxObject(frame.addressOfStackValue(-1), obj);
2870 masm.branchPtrInNurseryChunk(Assembler::Equal, obj, scratch, &skipBarrier);
2871 MOZ_ASSERT(obj == R2.scratchReg(), "post barrier expects object in R2");
2872 masm.call(&postBarrierSlot_);
2874 masm.bind(&skipBarrier);
2875 return true;
2878 template <>
2879 bool BaselineCompilerCodeGen::emit_InitElemArray() {
2880 // Pop value into R0, keep the object on the stack.
2881 Maybe<Value> knownValue = frame.knownStackValue(-1);
2882 frame.popRegsAndSync(1);
2884 // Load object in R2.
2885 Register obj = R2.scratchReg();
2886 masm.unboxObject(frame.addressOfStackValue(-1), obj);
2888 uint32_t index = GET_UINT32(handler.pc());
2889 MOZ_ASSERT(index <= INT32_MAX,
2890 "the bytecode emitter must fail to compile code that would "
2891 "produce an index exceeding int32_t range");
2893 // Store the Value. No pre-barrier because this is an initialization.
2894 masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), obj);
2895 masm.storeValue(R0, Address(obj, index * sizeof(Value)));
2897 // Bump initialized length.
2898 Address initLength(obj, ObjectElements::offsetOfInitializedLength());
2899 masm.store32(Imm32(index + 1), initLength);
2901 // Mark elements as NON_PACKED if we stored the hole value. We know this
2902 // statically except when debugger instrumentation is enabled because that
2903 // forces a stack-sync (which discards constants and known types) for each op.
2904 if (knownValue && knownValue->isMagic(JS_ELEMENTS_HOLE)) {
2905 Address elementsFlags(obj, ObjectElements::offsetOfFlags());
2906 masm.or32(Imm32(ObjectElements::NON_PACKED), elementsFlags);
2907 } else if (handler.compileDebugInstrumentation()) {
2908 MarkElementsNonPackedIfHoleValue(masm, obj, R0);
2909 } else {
2910 #ifdef DEBUG
2911 Label notHole;
2912 masm.branchTestMagic(Assembler::NotEqual, R0, &notHole);
2913 masm.assumeUnreachable("Unexpected hole value");
2914 masm.bind(&notHole);
2915 #endif
2918 // Post-barrier.
2919 if (knownValue) {
2920 MOZ_ASSERT(JS::GCPolicy<Value>::isTenured(*knownValue));
2921 } else {
2922 Label skipBarrier;
2923 Register scratch = R1.scratchReg();
2924 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, scratch,
2925 &skipBarrier);
2927 masm.unboxObject(frame.addressOfStackValue(-1), obj);
2928 masm.branchPtrInNurseryChunk(Assembler::Equal, obj, scratch,
2929 &skipBarrier);
2930 MOZ_ASSERT(obj == R2.scratchReg(), "post barrier expects object in R2");
2931 masm.call(&postBarrierSlot_);
2933 masm.bind(&skipBarrier);
2935 return true;
2938 template <typename Handler>
2939 bool BaselineCodeGen<Handler>::emit_NewObject() {
2940 return emitNewObject();
2943 template <typename Handler>
2944 bool BaselineCodeGen<Handler>::emit_NewInit() {
2945 return emitNewObject();
2948 template <typename Handler>
2949 bool BaselineCodeGen<Handler>::emitNewObject() {
2950 frame.syncStack(0);
2952 if (!emitNextIC()) {
2953 return false;
2956 frame.push(R0);
2957 return true;
2960 template <typename Handler>
2961 bool BaselineCodeGen<Handler>::emit_InitElem() {
2962 // Store RHS in the scratch slot.
2963 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2);
2964 frame.pop();
2966 // Keep object and index in R0 and R1.
2967 frame.popRegsAndSync(2);
2969 // Push the object to store the result of the IC.
2970 frame.push(R0);
2971 frame.syncStack(0);
2973 // Keep RHS on the stack.
2974 frame.pushScratchValue();
2976 // Call IC.
2977 if (!emitNextIC()) {
2978 return false;
2981 // Pop the rhs, so that the object is on the top of the stack.
2982 frame.pop();
2983 return true;
2986 template <typename Handler>
2987 bool BaselineCodeGen<Handler>::emit_InitHiddenElem() {
2988 return emit_InitElem();
2991 template <typename Handler>
2992 bool BaselineCodeGen<Handler>::emit_InitLockedElem() {
2993 return emit_InitElem();
2996 template <typename Handler>
2997 bool BaselineCodeGen<Handler>::emit_MutateProto() {
2998 // Keep values on the stack for the decompiler.
2999 frame.syncStack(0);
3001 masm.unboxObject(frame.addressOfStackValue(-2), R0.scratchReg());
3002 masm.loadValue(frame.addressOfStackValue(-1), R1);
3004 prepareVMCall();
3006 pushArg(R1);
3007 pushArg(R0.scratchReg());
3009 using Fn = bool (*)(JSContext*, Handle<PlainObject*>, HandleValue);
3010 if (!callVM<Fn, MutatePrototype>()) {
3011 return false;
3014 frame.pop();
3015 return true;
3018 template <typename Handler>
3019 bool BaselineCodeGen<Handler>::emit_InitProp() {
3020 // Load lhs in R0, rhs in R1.
3021 frame.syncStack(0);
3022 masm.loadValue(frame.addressOfStackValue(-2), R0);
3023 masm.loadValue(frame.addressOfStackValue(-1), R1);
3025 // Call IC.
3026 if (!emitNextIC()) {
3027 return false;
3030 // Leave the object on the stack.
3031 frame.pop();
3032 return true;
3035 template <typename Handler>
3036 bool BaselineCodeGen<Handler>::emit_InitLockedProp() {
3037 return emit_InitProp();
3040 template <typename Handler>
3041 bool BaselineCodeGen<Handler>::emit_InitHiddenProp() {
3042 return emit_InitProp();
3045 template <typename Handler>
3046 bool BaselineCodeGen<Handler>::emit_GetElem() {
3047 // Keep top two stack values in R0 and R1.
3048 frame.popRegsAndSync(2);
3050 // Call IC.
3051 if (!emitNextIC()) {
3052 return false;
3055 // Mark R0 as pushed stack value.
3056 frame.push(R0);
3057 return true;
3060 template <typename Handler>
3061 bool BaselineCodeGen<Handler>::emit_GetElemSuper() {
3062 // Store obj in the scratch slot.
3063 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2);
3064 frame.pop();
3066 // Keep receiver and index in R0 and R1.
3067 frame.popRegsAndSync(2);
3069 // Keep obj on the stack.
3070 frame.pushScratchValue();
3072 if (!emitNextIC()) {
3073 return false;
3076 frame.pop();
3077 frame.push(R0);
3078 return true;
3081 template <typename Handler>
3082 bool BaselineCodeGen<Handler>::emit_SetElem() {
3083 // Store RHS in the scratch slot.
3084 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2);
3085 frame.pop();
3087 // Keep object and index in R0 and R1.
3088 frame.popRegsAndSync(2);
3090 // Keep RHS on the stack.
3091 frame.pushScratchValue();
3093 // Call IC.
3094 if (!emitNextIC()) {
3095 return false;
3098 return true;
3101 template <typename Handler>
3102 bool BaselineCodeGen<Handler>::emit_StrictSetElem() {
3103 return emit_SetElem();
3106 template <typename Handler>
3107 bool BaselineCodeGen<Handler>::emitSetElemSuper(bool strict) {
3108 // Incoming stack is |receiver, propval, obj, rval|. We need to shuffle
3109 // stack to leave rval when operation is complete.
3111 // Pop rval into R0, then load receiver into R1 and replace with rval.
3112 frame.popRegsAndSync(1);
3113 masm.loadValue(frame.addressOfStackValue(-3), R1);
3114 masm.storeValue(R0, frame.addressOfStackValue(-3));
3116 prepareVMCall();
3118 pushArg(Imm32(strict));
3119 pushArg(R0); // rval
3120 masm.loadValue(frame.addressOfStackValue(-2), R0);
3121 pushArg(R0); // propval
3122 pushArg(R1); // receiver
3123 masm.loadValue(frame.addressOfStackValue(-1), R0);
3124 pushArg(R0); // obj
3126 using Fn = bool (*)(JSContext*, HandleValue, HandleValue, HandleValue,
3127 HandleValue, bool);
3128 if (!callVM<Fn, js::SetElementSuper>()) {
3129 return false;
3132 frame.popn(2);
3133 return true;
3136 template <typename Handler>
3137 bool BaselineCodeGen<Handler>::emit_SetElemSuper() {
3138 return emitSetElemSuper(/* strict = */ false);
3141 template <typename Handler>
3142 bool BaselineCodeGen<Handler>::emit_StrictSetElemSuper() {
3143 return emitSetElemSuper(/* strict = */ true);
3146 template <typename Handler>
3147 bool BaselineCodeGen<Handler>::emitDelElem(bool strict) {
3148 // Keep values on the stack for the decompiler.
3149 frame.syncStack(0);
3150 masm.loadValue(frame.addressOfStackValue(-2), R0);
3151 masm.loadValue(frame.addressOfStackValue(-1), R1);
3153 prepareVMCall();
3155 pushArg(R1);
3156 pushArg(R0);
3158 using Fn = bool (*)(JSContext*, HandleValue, HandleValue, bool*);
3159 if (strict) {
3160 if (!callVM<Fn, DelElemOperation<true>>()) {
3161 return false;
3163 } else {
3164 if (!callVM<Fn, DelElemOperation<false>>()) {
3165 return false;
3169 masm.boxNonDouble(JSVAL_TYPE_BOOLEAN, ReturnReg, R1);
3170 frame.popn(2);
3171 frame.push(R1, JSVAL_TYPE_BOOLEAN);
3172 return true;
3175 template <typename Handler>
3176 bool BaselineCodeGen<Handler>::emit_DelElem() {
3177 return emitDelElem(/* strict = */ false);
3180 template <typename Handler>
3181 bool BaselineCodeGen<Handler>::emit_StrictDelElem() {
3182 return emitDelElem(/* strict = */ true);
3185 template <typename Handler>
3186 bool BaselineCodeGen<Handler>::emit_In() {
3187 frame.popRegsAndSync(2);
3189 if (!emitNextIC()) {
3190 return false;
3193 frame.push(R0, JSVAL_TYPE_BOOLEAN);
3194 return true;
3197 template <typename Handler>
3198 bool BaselineCodeGen<Handler>::emit_HasOwn() {
3199 frame.popRegsAndSync(2);
3201 if (!emitNextIC()) {
3202 return false;
3205 frame.push(R0, JSVAL_TYPE_BOOLEAN);
3206 return true;
3209 template <typename Handler>
3210 bool BaselineCodeGen<Handler>::emit_CheckPrivateField() {
3211 // Keep key and val on the stack.
3212 frame.syncStack(0);
3213 masm.loadValue(frame.addressOfStackValue(-2), R0);
3214 masm.loadValue(frame.addressOfStackValue(-1), R1);
3216 if (!emitNextIC()) {
3217 return false;
3220 frame.push(R0, JSVAL_TYPE_BOOLEAN);
3221 return true;
3224 template <typename Handler>
3225 bool BaselineCodeGen<Handler>::emit_NewPrivateName() {
3226 prepareVMCall();
3228 pushScriptNameArg(R0.scratchReg(), R1.scratchReg());
3230 using Fn = JS::Symbol* (*)(JSContext*, Handle<JSAtom*>);
3231 if (!callVM<Fn, NewPrivateName>()) {
3232 return false;
3235 masm.tagValue(JSVAL_TYPE_SYMBOL, ReturnReg, R0);
3236 frame.push(R0);
3237 return true;
3240 template <typename Handler>
3241 bool BaselineCodeGen<Handler>::emit_GetGName() {
3242 frame.syncStack(0);
3244 loadGlobalLexicalEnvironment(R0.scratchReg());
3246 // Call IC.
3247 if (!emitNextIC()) {
3248 return false;
3251 // Mark R0 as pushed stack value.
3252 frame.push(R0);
3253 return true;
3256 template <>
3257 bool BaselineCompilerCodeGen::tryOptimizeBindGlobalName() {
3258 JSScript* script = handler.script();
3259 MOZ_ASSERT(!script->hasNonSyntacticScope());
3261 Rooted<GlobalObject*> global(cx, &script->global());
3262 Rooted<PropertyName*> name(cx, script->getName(handler.pc()));
3263 if (JSObject* binding = MaybeOptimizeBindGlobalName(cx, global, name)) {
3264 frame.push(ObjectValue(*binding));
3265 return true;
3267 return false;
3270 template <>
3271 bool BaselineInterpreterCodeGen::tryOptimizeBindGlobalName() {
3272 // Interpreter doesn't optimize simple BindGNames.
3273 return false;
3276 template <typename Handler>
3277 bool BaselineCodeGen<Handler>::emit_BindGName() {
3278 if (tryOptimizeBindGlobalName()) {
3279 return true;
3282 frame.syncStack(0);
3283 loadGlobalLexicalEnvironment(R0.scratchReg());
3285 // Call IC.
3286 if (!emitNextIC()) {
3287 return false;
3290 // Mark R0 as pushed stack value.
3291 frame.push(R0);
3292 return true;
3295 template <typename Handler>
3296 bool BaselineCodeGen<Handler>::emit_BindVar() {
3297 frame.syncStack(0);
3298 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3300 prepareVMCall();
3301 pushArg(R0.scratchReg());
3303 using Fn = JSObject* (*)(JSContext*, JSObject*);
3304 if (!callVM<Fn, BindVarOperation>()) {
3305 return false;
3308 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
3309 frame.push(R0);
3310 return true;
3313 template <typename Handler>
3314 bool BaselineCodeGen<Handler>::emit_SetProp() {
3315 // Keep lhs in R0, rhs in R1.
3316 frame.popRegsAndSync(2);
3318 // Keep RHS on the stack.
3319 frame.push(R1);
3320 frame.syncStack(0);
3322 // Call IC.
3323 if (!emitNextIC()) {
3324 return false;
3327 return true;
3330 template <typename Handler>
3331 bool BaselineCodeGen<Handler>::emit_StrictSetProp() {
3332 return emit_SetProp();
3335 template <typename Handler>
3336 bool BaselineCodeGen<Handler>::emit_SetName() {
3337 return emit_SetProp();
3340 template <typename Handler>
3341 bool BaselineCodeGen<Handler>::emit_StrictSetName() {
3342 return emit_SetProp();
3345 template <typename Handler>
3346 bool BaselineCodeGen<Handler>::emit_SetGName() {
3347 return emit_SetProp();
3350 template <typename Handler>
3351 bool BaselineCodeGen<Handler>::emit_StrictSetGName() {
3352 return emit_SetProp();
3355 template <typename Handler>
3356 bool BaselineCodeGen<Handler>::emitSetPropSuper(bool strict) {
3357 // Incoming stack is |receiver, obj, rval|. We need to shuffle stack to
3358 // leave rval when operation is complete.
3360 // Pop rval into R0, then load receiver into R1 and replace with rval.
3361 frame.popRegsAndSync(1);
3362 masm.loadValue(frame.addressOfStackValue(-2), R1);
3363 masm.storeValue(R0, frame.addressOfStackValue(-2));
3365 prepareVMCall();
3367 pushArg(Imm32(strict));
3368 pushArg(R0); // rval
3369 pushScriptNameArg(R0.scratchReg(), R2.scratchReg());
3370 pushArg(R1); // receiver
3371 masm.loadValue(frame.addressOfStackValue(-1), R0);
3372 pushArg(R0); // obj
3374 using Fn = bool (*)(JSContext*, HandleValue, HandleValue,
3375 Handle<PropertyName*>, HandleValue, bool);
3376 if (!callVM<Fn, js::SetPropertySuper>()) {
3377 return false;
3380 frame.pop();
3381 return true;
3384 template <typename Handler>
3385 bool BaselineCodeGen<Handler>::emit_SetPropSuper() {
3386 return emitSetPropSuper(/* strict = */ false);
3389 template <typename Handler>
3390 bool BaselineCodeGen<Handler>::emit_StrictSetPropSuper() {
3391 return emitSetPropSuper(/* strict = */ true);
3394 template <typename Handler>
3395 bool BaselineCodeGen<Handler>::emit_GetProp() {
3396 // Keep object in R0.
3397 frame.popRegsAndSync(1);
3399 // Call IC.
3400 if (!emitNextIC()) {
3401 return false;
3404 // Mark R0 as pushed stack value.
3405 frame.push(R0);
3406 return true;
3409 template <typename Handler>
3410 bool BaselineCodeGen<Handler>::emit_GetBoundName() {
3411 return emit_GetProp();
3414 template <typename Handler>
3415 bool BaselineCodeGen<Handler>::emit_GetPropSuper() {
3416 // Receiver -> R1, ObjectOrNull -> R0
3417 frame.popRegsAndSync(1);
3418 masm.loadValue(frame.addressOfStackValue(-1), R1);
3419 frame.pop();
3421 if (!emitNextIC()) {
3422 return false;
3425 frame.push(R0);
3426 return true;
3429 template <typename Handler>
3430 bool BaselineCodeGen<Handler>::emitDelProp(bool strict) {
3431 // Keep value on the stack for the decompiler.
3432 frame.syncStack(0);
3433 masm.loadValue(frame.addressOfStackValue(-1), R0);
3435 prepareVMCall();
3437 pushScriptNameArg(R1.scratchReg(), R2.scratchReg());
3438 pushArg(R0);
3440 using Fn = bool (*)(JSContext*, HandleValue, Handle<PropertyName*>, bool*);
3441 if (strict) {
3442 if (!callVM<Fn, DelPropOperation<true>>()) {
3443 return false;
3445 } else {
3446 if (!callVM<Fn, DelPropOperation<false>>()) {
3447 return false;
3451 masm.boxNonDouble(JSVAL_TYPE_BOOLEAN, ReturnReg, R1);
3452 frame.pop();
3453 frame.push(R1, JSVAL_TYPE_BOOLEAN);
3454 return true;
3457 template <typename Handler>
3458 bool BaselineCodeGen<Handler>::emit_DelProp() {
3459 return emitDelProp(/* strict = */ false);
3462 template <typename Handler>
3463 bool BaselineCodeGen<Handler>::emit_StrictDelProp() {
3464 return emitDelProp(/* strict = */ true);
3467 template <>
3468 void BaselineCompilerCodeGen::getEnvironmentCoordinateObject(Register reg) {
3469 EnvironmentCoordinate ec(handler.pc());
3471 masm.loadPtr(frame.addressOfEnvironmentChain(), reg);
3472 for (unsigned i = ec.hops(); i; i--) {
3473 masm.unboxObject(
3474 Address(reg, EnvironmentObject::offsetOfEnclosingEnvironment()), reg);
3478 template <>
3479 void BaselineInterpreterCodeGen::getEnvironmentCoordinateObject(Register reg) {
3480 MOZ_CRASH("Shouldn't call this for interpreter");
3483 template <>
3484 Address BaselineCompilerCodeGen::getEnvironmentCoordinateAddressFromObject(
3485 Register objReg, Register reg) {
3486 EnvironmentCoordinate ec(handler.pc());
3488 if (EnvironmentObject::nonExtensibleIsFixedSlot(ec)) {
3489 return Address(objReg, NativeObject::getFixedSlotOffset(ec.slot()));
3492 uint32_t slot = EnvironmentObject::nonExtensibleDynamicSlotIndex(ec);
3493 masm.loadPtr(Address(objReg, NativeObject::offsetOfSlots()), reg);
3494 return Address(reg, slot * sizeof(Value));
3497 template <>
3498 Address BaselineInterpreterCodeGen::getEnvironmentCoordinateAddressFromObject(
3499 Register objReg, Register reg) {
3500 MOZ_CRASH("Shouldn't call this for interpreter");
3503 template <typename Handler>
3504 Address BaselineCodeGen<Handler>::getEnvironmentCoordinateAddress(
3505 Register reg) {
3506 getEnvironmentCoordinateObject(reg);
3507 return getEnvironmentCoordinateAddressFromObject(reg, reg);
3510 // For a JOF_ENVCOORD op load the number of hops from the bytecode and skip this
3511 // number of environment objects.
3512 static void LoadAliasedVarEnv(MacroAssembler& masm, Register env,
3513 Register scratch) {
3514 static_assert(ENVCOORD_HOPS_LEN == 1,
3515 "Code assumes number of hops is stored in uint8 operand");
3516 LoadUint8Operand(masm, scratch);
3518 Label top, done;
3519 masm.branchTest32(Assembler::Zero, scratch, scratch, &done);
3520 masm.bind(&top);
3522 Address nextEnv(env, EnvironmentObject::offsetOfEnclosingEnvironment());
3523 masm.unboxObject(nextEnv, env);
3524 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch, &top);
3526 masm.bind(&done);
3529 template <>
3530 void BaselineCompilerCodeGen::emitGetAliasedVar(ValueOperand dest) {
3531 frame.syncStack(0);
3533 Address address = getEnvironmentCoordinateAddress(R0.scratchReg());
3534 masm.loadValue(address, dest);
3537 template <>
3538 void BaselineInterpreterCodeGen::emitGetAliasedVar(ValueOperand dest) {
3539 Register env = R0.scratchReg();
3540 Register scratch = R1.scratchReg();
3542 // Load the right environment object.
3543 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
3544 LoadAliasedVarEnv(masm, env, scratch);
3546 // Load the slot index.
3547 static_assert(ENVCOORD_SLOT_LEN == 3,
3548 "Code assumes slot is stored in uint24 operand");
3549 LoadUint24Operand(masm, ENVCOORD_HOPS_LEN, scratch);
3551 // Load the Value from a fixed or dynamic slot.
3552 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3553 Label isDynamic, done;
3554 masm.branch32(Assembler::AboveOrEqual, scratch,
3555 Imm32(NativeObject::MAX_FIXED_SLOTS), &isDynamic);
3557 uint32_t offset = NativeObject::getFixedSlotOffset(0);
3558 masm.loadValue(BaseValueIndex(env, scratch, offset), dest);
3559 masm.jump(&done);
3561 masm.bind(&isDynamic);
3563 masm.loadPtr(Address(env, NativeObject::offsetOfSlots()), env);
3565 // Use an offset to subtract the number of fixed slots.
3566 int32_t offset = -int32_t(NativeObject::MAX_FIXED_SLOTS * sizeof(Value));
3567 masm.loadValue(BaseValueIndex(env, scratch, offset), dest);
3569 masm.bind(&done);
3572 template <typename Handler>
3573 bool BaselineCodeGen<Handler>::emitGetAliasedDebugVar(ValueOperand dest) {
3574 frame.syncStack(0);
3575 Register env = R0.scratchReg();
3576 // Load the right environment object.
3577 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
3579 prepareVMCall();
3580 pushBytecodePCArg();
3581 pushArg(env);
3583 using Fn =
3584 bool (*)(JSContext*, JSObject* env, jsbytecode*, MutableHandleValue);
3585 return callVM<Fn, LoadAliasedDebugVar>();
3588 template <typename Handler>
3589 bool BaselineCodeGen<Handler>::emit_GetAliasedDebugVar() {
3590 if (!emitGetAliasedDebugVar(R0)) {
3591 return false;
3594 frame.push(R0);
3595 return true;
3598 template <typename Handler>
3599 bool BaselineCodeGen<Handler>::emit_GetAliasedVar() {
3600 emitGetAliasedVar(R0);
3602 frame.push(R0);
3603 return true;
3606 template <>
3607 bool BaselineCompilerCodeGen::emit_SetAliasedVar() {
3608 // Keep rvalue in R0.
3609 frame.popRegsAndSync(1);
3610 Register objReg = R2.scratchReg();
3612 getEnvironmentCoordinateObject(objReg);
3613 Address address =
3614 getEnvironmentCoordinateAddressFromObject(objReg, R1.scratchReg());
3615 masm.guardedCallPreBarrier(address, MIRType::Value);
3616 masm.storeValue(R0, address);
3617 frame.push(R0);
3619 // Only R0 is live at this point.
3620 // Scope coordinate object is already in R2.scratchReg().
3621 Register temp = R1.scratchReg();
3623 Label skipBarrier;
3624 masm.branchPtrInNurseryChunk(Assembler::Equal, objReg, temp, &skipBarrier);
3625 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
3627 masm.call(&postBarrierSlot_); // Won't clobber R0
3629 masm.bind(&skipBarrier);
3630 return true;
3633 template <>
3634 bool BaselineInterpreterCodeGen::emit_SetAliasedVar() {
3635 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
3636 MOZ_ASSERT(!regs.has(FramePointer));
3637 regs.take(R2);
3638 if (HasInterpreterPCReg()) {
3639 regs.take(InterpreterPCReg);
3642 Register env = regs.takeAny();
3643 Register scratch1 = regs.takeAny();
3644 Register scratch2 = regs.takeAny();
3645 Register scratch3 = regs.takeAny();
3647 // Load the right environment object.
3648 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
3649 LoadAliasedVarEnv(masm, env, scratch1);
3651 // Load the slot index.
3652 static_assert(ENVCOORD_SLOT_LEN == 3,
3653 "Code assumes slot is stored in uint24 operand");
3654 LoadUint24Operand(masm, ENVCOORD_HOPS_LEN, scratch1);
3656 // Store the RHS Value in R2.
3657 masm.loadValue(frame.addressOfStackValue(-1), R2);
3659 // Load a pointer to the fixed or dynamic slot into scratch2. We want to call
3660 // guardedCallPreBarrierAnyZone once to avoid code bloat.
3662 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3663 Label isDynamic, done;
3664 masm.branch32(Assembler::AboveOrEqual, scratch1,
3665 Imm32(NativeObject::MAX_FIXED_SLOTS), &isDynamic);
3667 uint32_t offset = NativeObject::getFixedSlotOffset(0);
3668 BaseValueIndex slotAddr(env, scratch1, offset);
3669 masm.computeEffectiveAddress(slotAddr, scratch2);
3670 masm.jump(&done);
3672 masm.bind(&isDynamic);
3674 masm.loadPtr(Address(env, NativeObject::offsetOfSlots()), scratch2);
3676 // Use an offset to subtract the number of fixed slots.
3677 int32_t offset = -int32_t(NativeObject::MAX_FIXED_SLOTS * sizeof(Value));
3678 BaseValueIndex slotAddr(scratch2, scratch1, offset);
3679 masm.computeEffectiveAddress(slotAddr, scratch2);
3681 masm.bind(&done);
3683 // Pre-barrier and store.
3684 Address slotAddr(scratch2, 0);
3685 masm.guardedCallPreBarrierAnyZone(slotAddr, MIRType::Value, scratch3);
3686 masm.storeValue(R2, slotAddr);
3688 // Post barrier.
3689 Label skipBarrier;
3690 masm.branchPtrInNurseryChunk(Assembler::Equal, env, scratch1, &skipBarrier);
3691 masm.branchValueIsNurseryCell(Assembler::NotEqual, R2, scratch1,
3692 &skipBarrier);
3694 // Post barrier code expects the object in R2.
3695 masm.movePtr(env, R2.scratchReg());
3696 masm.call(&postBarrierSlot_);
3698 masm.bind(&skipBarrier);
3699 return true;
3702 template <typename Handler>
3703 bool BaselineCodeGen<Handler>::emit_GetName() {
3704 frame.syncStack(0);
3706 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3708 // Call IC.
3709 if (!emitNextIC()) {
3710 return false;
3713 // Mark R0 as pushed stack value.
3714 frame.push(R0);
3715 return true;
3718 template <typename Handler>
3719 bool BaselineCodeGen<Handler>::emit_BindName() {
3720 frame.syncStack(0);
3721 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3723 // Call IC.
3724 if (!emitNextIC()) {
3725 return false;
3728 // Mark R0 as pushed stack value.
3729 frame.push(R0);
3730 return true;
3733 template <typename Handler>
3734 bool BaselineCodeGen<Handler>::emit_DelName() {
3735 frame.syncStack(0);
3736 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3738 prepareVMCall();
3740 pushArg(R0.scratchReg());
3741 pushScriptNameArg(R1.scratchReg(), R2.scratchReg());
3743 using Fn = bool (*)(JSContext*, Handle<PropertyName*>, HandleObject,
3744 MutableHandleValue);
3745 if (!callVM<Fn, js::DeleteNameOperation>()) {
3746 return false;
3749 frame.push(R0);
3750 return true;
3753 template <>
3754 bool BaselineCompilerCodeGen::emit_GetImport() {
3755 JSScript* script = handler.script();
3756 ModuleEnvironmentObject* env = GetModuleEnvironmentForScript(script);
3757 MOZ_ASSERT(env);
3759 jsid id = NameToId(script->getName(handler.pc()));
3760 ModuleEnvironmentObject* targetEnv;
3761 Maybe<PropertyInfo> prop;
3762 MOZ_ALWAYS_TRUE(env->lookupImport(id, &targetEnv, &prop));
3764 frame.syncStack(0);
3766 uint32_t slot = prop->slot();
3767 Register scratch = R0.scratchReg();
3768 masm.movePtr(ImmGCPtr(targetEnv), scratch);
3769 if (slot < targetEnv->numFixedSlots()) {
3770 masm.loadValue(Address(scratch, NativeObject::getFixedSlotOffset(slot)),
3771 R0);
3772 } else {
3773 masm.loadPtr(Address(scratch, NativeObject::offsetOfSlots()), scratch);
3774 masm.loadValue(
3775 Address(scratch, (slot - targetEnv->numFixedSlots()) * sizeof(Value)),
3776 R0);
3779 // Imports are initialized by this point except in rare circumstances, so
3780 // don't emit a check unless we have to.
3781 if (targetEnv->getSlot(slot).isMagic(JS_UNINITIALIZED_LEXICAL)) {
3782 if (!emitUninitializedLexicalCheck(R0)) {
3783 return false;
3787 frame.push(R0);
3788 return true;
3791 template <>
3792 bool BaselineInterpreterCodeGen::emit_GetImport() {
3793 frame.syncStack(0);
3795 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3797 prepareVMCall();
3799 pushBytecodePCArg();
3800 pushScriptArg();
3801 pushArg(R0.scratchReg());
3803 using Fn = bool (*)(JSContext*, HandleObject, HandleScript, jsbytecode*,
3804 MutableHandleValue);
3805 if (!callVM<Fn, GetImportOperation>()) {
3806 return false;
3809 frame.push(R0);
3810 return true;
3813 template <typename Handler>
3814 bool BaselineCodeGen<Handler>::emit_GetIntrinsic() {
3815 frame.syncStack(0);
3817 if (!emitNextIC()) {
3818 return false;
3821 frame.push(R0);
3822 return true;
3825 template <typename Handler>
3826 bool BaselineCodeGen<Handler>::emit_SetIntrinsic() {
3827 frame.syncStack(0);
3828 masm.loadValue(frame.addressOfStackValue(-1), R0);
3830 prepareVMCall();
3832 pushArg(R0);
3833 pushBytecodePCArg();
3834 pushScriptArg();
3836 using Fn = bool (*)(JSContext*, JSScript*, jsbytecode*, HandleValue);
3837 return callVM<Fn, SetIntrinsicOperation>();
3840 template <typename Handler>
3841 bool BaselineCodeGen<Handler>::emit_GlobalOrEvalDeclInstantiation() {
3842 frame.syncStack(0);
3844 prepareVMCall();
3846 loadInt32LengthBytecodeOperand(R0.scratchReg());
3847 pushArg(R0.scratchReg());
3848 pushScriptArg();
3849 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3850 pushArg(R0.scratchReg());
3852 using Fn = bool (*)(JSContext*, HandleObject, HandleScript, GCThingIndex);
3853 return callVM<Fn, js::GlobalOrEvalDeclInstantiation>();
3856 template <typename Handler>
3857 bool BaselineCodeGen<Handler>::emitInitPropGetterSetter() {
3858 // Keep values on the stack for the decompiler.
3859 frame.syncStack(0);
3861 prepareVMCall();
3863 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
3864 masm.unboxObject(frame.addressOfStackValue(-2), R1.scratchReg());
3866 pushArg(R0.scratchReg());
3867 pushScriptNameArg(R0.scratchReg(), R2.scratchReg());
3868 pushArg(R1.scratchReg());
3869 pushBytecodePCArg();
3871 using Fn = bool (*)(JSContext*, jsbytecode*, HandleObject,
3872 Handle<PropertyName*>, HandleObject);
3873 if (!callVM<Fn, InitPropGetterSetterOperation>()) {
3874 return false;
3877 frame.pop();
3878 return true;
3881 template <typename Handler>
3882 bool BaselineCodeGen<Handler>::emit_InitPropGetter() {
3883 return emitInitPropGetterSetter();
3886 template <typename Handler>
3887 bool BaselineCodeGen<Handler>::emit_InitHiddenPropGetter() {
3888 return emitInitPropGetterSetter();
3891 template <typename Handler>
3892 bool BaselineCodeGen<Handler>::emit_InitPropSetter() {
3893 return emitInitPropGetterSetter();
3896 template <typename Handler>
3897 bool BaselineCodeGen<Handler>::emit_InitHiddenPropSetter() {
3898 return emitInitPropGetterSetter();
3901 template <typename Handler>
3902 bool BaselineCodeGen<Handler>::emitInitElemGetterSetter() {
3903 // Load index and value in R0 and R1, but keep values on the stack for the
3904 // decompiler.
3905 frame.syncStack(0);
3906 masm.loadValue(frame.addressOfStackValue(-2), R0);
3907 masm.unboxObject(frame.addressOfStackValue(-1), R1.scratchReg());
3909 prepareVMCall();
3911 pushArg(R1.scratchReg());
3912 pushArg(R0);
3913 masm.unboxObject(frame.addressOfStackValue(-3), R0.scratchReg());
3914 pushArg(R0.scratchReg());
3915 pushBytecodePCArg();
3917 using Fn = bool (*)(JSContext*, jsbytecode*, HandleObject, HandleValue,
3918 HandleObject);
3919 if (!callVM<Fn, InitElemGetterSetterOperation>()) {
3920 return false;
3923 frame.popn(2);
3924 return true;
3927 template <typename Handler>
3928 bool BaselineCodeGen<Handler>::emit_InitElemGetter() {
3929 return emitInitElemGetterSetter();
3932 template <typename Handler>
3933 bool BaselineCodeGen<Handler>::emit_InitHiddenElemGetter() {
3934 return emitInitElemGetterSetter();
3937 template <typename Handler>
3938 bool BaselineCodeGen<Handler>::emit_InitElemSetter() {
3939 return emitInitElemGetterSetter();
3942 template <typename Handler>
3943 bool BaselineCodeGen<Handler>::emit_InitHiddenElemSetter() {
3944 return emitInitElemGetterSetter();
3947 template <typename Handler>
3948 bool BaselineCodeGen<Handler>::emit_InitElemInc() {
3949 // Keep the object and rhs on the stack.
3950 frame.syncStack(0);
3952 // Load object in R0, index in R1.
3953 masm.loadValue(frame.addressOfStackValue(-3), R0);
3954 masm.loadValue(frame.addressOfStackValue(-2), R1);
3956 // Call IC.
3957 if (!emitNextIC()) {
3958 return false;
3961 // Pop the rhs
3962 frame.pop();
3964 // Increment index
3965 Address indexAddr = frame.addressOfStackValue(-1);
3966 #ifdef DEBUG
3967 Label isInt32;
3968 masm.branchTestInt32(Assembler::Equal, indexAddr, &isInt32);
3969 masm.assumeUnreachable("INITELEM_INC index must be Int32");
3970 masm.bind(&isInt32);
3971 #endif
3972 masm.incrementInt32Value(indexAddr);
3973 return true;
3976 template <>
3977 bool BaselineCompilerCodeGen::emit_GetLocal() {
3978 frame.pushLocal(GET_LOCALNO(handler.pc()));
3979 return true;
3982 static BaseValueIndex ComputeAddressOfLocal(MacroAssembler& masm,
3983 Register indexScratch) {
3984 // Locals are stored in memory at a negative offset from the frame pointer. We
3985 // negate the index first to effectively subtract it.
3986 masm.negPtr(indexScratch);
3987 return BaseValueIndex(FramePointer, indexScratch,
3988 BaselineFrame::reverseOffsetOfLocal(0));
3991 template <>
3992 bool BaselineInterpreterCodeGen::emit_GetLocal() {
3993 Register scratch = R0.scratchReg();
3994 LoadUint24Operand(masm, 0, scratch);
3995 BaseValueIndex addr = ComputeAddressOfLocal(masm, scratch);
3996 masm.loadValue(addr, R0);
3997 frame.push(R0);
3998 return true;
4001 template <>
4002 bool BaselineCompilerCodeGen::emit_SetLocal() {
4003 // Ensure no other StackValue refers to the old value, for instance i + (i =
4004 // 3). This also allows us to use R0 as scratch below.
4005 frame.syncStack(1);
4007 uint32_t local = GET_LOCALNO(handler.pc());
4008 frame.storeStackValue(-1, frame.addressOfLocal(local), R0);
4009 return true;
4012 template <>
4013 bool BaselineInterpreterCodeGen::emit_SetLocal() {
4014 Register scratch = R0.scratchReg();
4015 LoadUint24Operand(masm, 0, scratch);
4016 BaseValueIndex addr = ComputeAddressOfLocal(masm, scratch);
4017 masm.loadValue(frame.addressOfStackValue(-1), R1);
4018 masm.storeValue(R1, addr);
4019 return true;
4022 template <>
4023 bool BaselineCompilerCodeGen::emitFormalArgAccess(JSOp op) {
4024 MOZ_ASSERT(op == JSOp::GetArg || op == JSOp::SetArg);
4026 uint32_t arg = GET_ARGNO(handler.pc());
4028 // Fast path: the script does not use |arguments| or formals don't
4029 // alias the arguments object.
4030 if (!handler.script()->argsObjAliasesFormals()) {
4031 if (op == JSOp::GetArg) {
4032 frame.pushArg(arg);
4033 } else {
4034 // See the comment in emit_SetLocal.
4035 frame.syncStack(1);
4036 frame.storeStackValue(-1, frame.addressOfArg(arg), R0);
4039 return true;
4042 // Sync so that we can use R0.
4043 frame.syncStack(0);
4045 // Load the arguments object data vector.
4046 Register reg = R2.scratchReg();
4047 masm.loadPtr(frame.addressOfArgsObj(), reg);
4048 masm.loadPrivate(Address(reg, ArgumentsObject::getDataSlotOffset()), reg);
4050 // Load/store the argument.
4051 Address argAddr(reg, ArgumentsData::offsetOfArgs() + arg * sizeof(Value));
4052 if (op == JSOp::GetArg) {
4053 masm.loadValue(argAddr, R0);
4054 frame.push(R0);
4055 } else {
4056 Register temp = R1.scratchReg();
4057 masm.guardedCallPreBarrierAnyZone(argAddr, MIRType::Value, temp);
4058 masm.loadValue(frame.addressOfStackValue(-1), R0);
4059 masm.storeValue(R0, argAddr);
4061 MOZ_ASSERT(frame.numUnsyncedSlots() == 0);
4063 // Reload the arguments object.
4064 Register reg = R2.scratchReg();
4065 masm.loadPtr(frame.addressOfArgsObj(), reg);
4067 Label skipBarrier;
4069 masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &skipBarrier);
4070 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
4072 masm.call(&postBarrierSlot_);
4074 masm.bind(&skipBarrier);
4077 return true;
4080 template <>
4081 bool BaselineInterpreterCodeGen::emitFormalArgAccess(JSOp op) {
4082 MOZ_ASSERT(op == JSOp::GetArg || op == JSOp::SetArg);
4084 // Load the index.
4085 Register argReg = R1.scratchReg();
4086 LoadUint16Operand(masm, argReg);
4088 // If the frame has no arguments object, this must be an unaliased access.
4089 Label isUnaliased, done;
4090 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
4091 Imm32(BaselineFrame::HAS_ARGS_OBJ), &isUnaliased);
4093 Register reg = R2.scratchReg();
4095 // If it's an unmapped arguments object, this is an unaliased access.
4096 loadScript(reg);
4097 masm.branchTest32(
4098 Assembler::Zero, Address(reg, JSScript::offsetOfImmutableFlags()),
4099 Imm32(uint32_t(JSScript::ImmutableFlags::HasMappedArgsObj)),
4100 &isUnaliased);
4102 // Load the arguments object data vector.
4103 masm.loadPtr(frame.addressOfArgsObj(), reg);
4104 masm.loadPrivate(Address(reg, ArgumentsObject::getDataSlotOffset()), reg);
4106 // Load/store the argument.
4107 BaseValueIndex argAddr(reg, argReg, ArgumentsData::offsetOfArgs());
4108 if (op == JSOp::GetArg) {
4109 masm.loadValue(argAddr, R0);
4110 frame.push(R0);
4111 } else {
4112 masm.guardedCallPreBarrierAnyZone(argAddr, MIRType::Value,
4113 R0.scratchReg());
4114 masm.loadValue(frame.addressOfStackValue(-1), R0);
4115 masm.storeValue(R0, argAddr);
4117 // Reload the arguments object.
4118 masm.loadPtr(frame.addressOfArgsObj(), reg);
4120 Register temp = R1.scratchReg();
4121 masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &done);
4122 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &done);
4124 masm.call(&postBarrierSlot_);
4126 masm.jump(&done);
4128 masm.bind(&isUnaliased);
4130 BaseValueIndex addr(FramePointer, argReg,
4131 JitFrameLayout::offsetOfActualArgs());
4132 if (op == JSOp::GetArg) {
4133 masm.loadValue(addr, R0);
4134 frame.push(R0);
4135 } else {
4136 masm.loadValue(frame.addressOfStackValue(-1), R0);
4137 masm.storeValue(R0, addr);
4141 masm.bind(&done);
4142 return true;
4145 template <typename Handler>
4146 bool BaselineCodeGen<Handler>::emit_GetArg() {
4147 return emitFormalArgAccess(JSOp::GetArg);
4150 template <typename Handler>
4151 bool BaselineCodeGen<Handler>::emit_SetArg() {
4152 return emitFormalArgAccess(JSOp::SetArg);
4155 template <>
4156 bool BaselineInterpreterCodeGen::emit_GetFrameArg() {
4157 frame.syncStack(0);
4159 Register argReg = R1.scratchReg();
4160 LoadUint16Operand(masm, argReg);
4162 BaseValueIndex addr(FramePointer, argReg,
4163 JitFrameLayout::offsetOfActualArgs());
4164 masm.loadValue(addr, R0);
4165 frame.push(R0);
4166 return true;
4169 template <>
4170 bool BaselineCompilerCodeGen::emit_GetFrameArg() {
4171 uint32_t arg = GET_ARGNO(handler.pc());
4172 frame.pushArg(arg);
4173 return true;
4176 template <typename Handler>
4177 bool BaselineCodeGen<Handler>::emit_ArgumentsLength() {
4178 frame.syncStack(0);
4180 masm.loadNumActualArgs(FramePointer, R0.scratchReg());
4181 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
4183 frame.push(R0);
4184 return true;
4187 template <typename Handler>
4188 bool BaselineCodeGen<Handler>::emit_GetActualArg() {
4189 frame.popRegsAndSync(1);
4191 #ifdef DEBUG
4193 Label ok;
4194 masm.branchTestInt32(Assembler::Equal, R0, &ok);
4195 masm.assumeUnreachable("GetActualArg unexpected type");
4196 masm.bind(&ok);
4198 #endif
4200 Register index = R0.scratchReg();
4201 masm.unboxInt32(R0, index);
4203 #ifdef DEBUG
4205 Label ok;
4206 masm.loadNumActualArgs(FramePointer, R1.scratchReg());
4207 masm.branch32(Assembler::Above, R1.scratchReg(), index, &ok);
4208 masm.assumeUnreachable("GetActualArg invalid index");
4209 masm.bind(&ok);
4211 #endif
4213 BaseValueIndex addr(FramePointer, index,
4214 JitFrameLayout::offsetOfActualArgs());
4215 masm.loadValue(addr, R0);
4216 frame.push(R0);
4217 return true;
4220 template <>
4221 void BaselineCompilerCodeGen::loadNumFormalArguments(Register dest) {
4222 masm.move32(Imm32(handler.function()->nargs()), dest);
4225 template <>
4226 void BaselineInterpreterCodeGen::loadNumFormalArguments(Register dest) {
4227 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), dest);
4228 masm.loadFunctionArgCount(dest, dest);
4231 template <typename Handler>
4232 bool BaselineCodeGen<Handler>::emit_NewTarget() {
4233 MOZ_ASSERT_IF(handler.maybeFunction(), !handler.maybeFunction()->isArrow());
4235 frame.syncStack(0);
4237 #ifdef DEBUG
4238 Register scratch1 = R0.scratchReg();
4239 Register scratch2 = R1.scratchReg();
4241 Label isFunction;
4242 masm.loadPtr(frame.addressOfCalleeToken(), scratch1);
4243 masm.branchTestPtr(Assembler::Zero, scratch1, Imm32(CalleeTokenScriptBit),
4244 &isFunction);
4245 masm.assumeUnreachable("Unexpected non-function script");
4246 masm.bind(&isFunction);
4248 Label notArrow;
4249 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
4250 masm.branchFunctionKind(Assembler::NotEqual,
4251 FunctionFlags::FunctionKind::Arrow, scratch1,
4252 scratch2, &notArrow);
4253 masm.assumeUnreachable("Unexpected arrow function");
4254 masm.bind(&notArrow);
4255 #endif
4257 // if (isConstructing()) push(argv[Max(numActualArgs, numFormalArgs)])
4258 Label notConstructing, done;
4259 masm.branchTestPtr(Assembler::Zero, frame.addressOfCalleeToken(),
4260 Imm32(CalleeToken_FunctionConstructing), &notConstructing);
4262 Register argvLen = R0.scratchReg();
4263 Register nformals = R1.scratchReg();
4264 masm.loadNumActualArgs(FramePointer, argvLen);
4266 // If argvLen < nformals, set argvlen := nformals.
4267 loadNumFormalArguments(nformals);
4268 masm.cmp32Move32(Assembler::Below, argvLen, nformals, nformals, argvLen);
4270 BaseValueIndex newTarget(FramePointer, argvLen,
4271 JitFrameLayout::offsetOfActualArgs());
4272 masm.loadValue(newTarget, R0);
4273 masm.jump(&done);
4275 // else push(undefined)
4276 masm.bind(&notConstructing);
4277 masm.moveValue(UndefinedValue(), R0);
4279 masm.bind(&done);
4280 frame.push(R0);
4281 return true;
4284 template <typename Handler>
4285 bool BaselineCodeGen<Handler>::emit_ThrowSetConst() {
4286 prepareVMCall();
4287 pushArg(Imm32(JSMSG_BAD_CONST_ASSIGN));
4289 using Fn = bool (*)(JSContext*, unsigned);
4290 return callVM<Fn, jit::ThrowRuntimeLexicalError>();
4293 template <typename Handler>
4294 bool BaselineCodeGen<Handler>::emitUninitializedLexicalCheck(
4295 const ValueOperand& val) {
4296 Label done;
4297 masm.branchTestMagicValue(Assembler::NotEqual, val, JS_UNINITIALIZED_LEXICAL,
4298 &done);
4300 prepareVMCall();
4301 pushArg(Imm32(JSMSG_UNINITIALIZED_LEXICAL));
4303 using Fn = bool (*)(JSContext*, unsigned);
4304 if (!callVM<Fn, jit::ThrowRuntimeLexicalError>()) {
4305 return false;
4308 masm.bind(&done);
4309 return true;
4312 template <typename Handler>
4313 bool BaselineCodeGen<Handler>::emit_CheckLexical() {
4314 frame.syncStack(0);
4315 masm.loadValue(frame.addressOfStackValue(-1), R0);
4316 return emitUninitializedLexicalCheck(R0);
4319 template <typename Handler>
4320 bool BaselineCodeGen<Handler>::emit_CheckAliasedLexical() {
4321 return emit_CheckLexical();
4324 template <typename Handler>
4325 bool BaselineCodeGen<Handler>::emit_InitLexical() {
4326 return emit_SetLocal();
4329 template <typename Handler>
4330 bool BaselineCodeGen<Handler>::emit_InitGLexical() {
4331 frame.popRegsAndSync(1);
4332 pushGlobalLexicalEnvironmentValue(R1);
4333 frame.push(R0);
4334 return emit_SetProp();
4337 template <typename Handler>
4338 bool BaselineCodeGen<Handler>::emit_InitAliasedLexical() {
4339 return emit_SetAliasedVar();
4342 template <typename Handler>
4343 bool BaselineCodeGen<Handler>::emit_Uninitialized() {
4344 frame.push(MagicValue(JS_UNINITIALIZED_LEXICAL));
4345 return true;
4348 template <>
4349 bool BaselineCompilerCodeGen::emitCall(JSOp op) {
4350 MOZ_ASSERT(IsInvokeOp(op));
4352 frame.syncStack(0);
4354 uint32_t argc = GET_ARGC(handler.pc());
4355 masm.move32(Imm32(argc), R0.scratchReg());
4357 // Call IC
4358 if (!emitNextIC()) {
4359 return false;
4362 // Update FrameInfo.
4363 bool construct = IsConstructOp(op);
4364 frame.popn(2 + argc + construct);
4365 frame.push(R0);
4366 return true;
4369 template <>
4370 bool BaselineInterpreterCodeGen::emitCall(JSOp op) {
4371 MOZ_ASSERT(IsInvokeOp(op));
4373 // The IC expects argc in R0.
4374 LoadUint16Operand(masm, R0.scratchReg());
4375 if (!emitNextIC()) {
4376 return false;
4379 // Pop the arguments. We have to reload pc/argc because the IC clobbers them.
4380 // The return value is in R0 so we can't use that.
4381 Register scratch = R1.scratchReg();
4382 uint32_t extraValuesToPop = IsConstructOp(op) ? 3 : 2;
4383 Register spReg = AsRegister(masm.getStackPointer());
4384 LoadUint16Operand(masm, scratch);
4385 masm.computeEffectiveAddress(
4386 BaseValueIndex(spReg, scratch, extraValuesToPop * sizeof(Value)), spReg);
4387 frame.push(R0);
4388 return true;
4391 template <typename Handler>
4392 bool BaselineCodeGen<Handler>::emitSpreadCall(JSOp op) {
4393 MOZ_ASSERT(IsInvokeOp(op));
4395 frame.syncStack(0);
4396 masm.move32(Imm32(1), R0.scratchReg());
4398 // Call IC
4399 if (!emitNextIC()) {
4400 return false;
4403 // Update FrameInfo.
4404 bool construct = op == JSOp::SpreadNew || op == JSOp::SpreadSuperCall;
4405 frame.popn(3 + construct);
4406 frame.push(R0);
4407 return true;
4410 template <typename Handler>
4411 bool BaselineCodeGen<Handler>::emit_Call() {
4412 return emitCall(JSOp::Call);
4415 template <typename Handler>
4416 bool BaselineCodeGen<Handler>::emit_CallContent() {
4417 return emitCall(JSOp::CallContent);
4420 template <typename Handler>
4421 bool BaselineCodeGen<Handler>::emit_CallIgnoresRv() {
4422 return emitCall(JSOp::CallIgnoresRv);
4425 template <typename Handler>
4426 bool BaselineCodeGen<Handler>::emit_CallIter() {
4427 return emitCall(JSOp::CallIter);
4430 template <typename Handler>
4431 bool BaselineCodeGen<Handler>::emit_CallContentIter() {
4432 return emitCall(JSOp::CallContentIter);
4435 template <typename Handler>
4436 bool BaselineCodeGen<Handler>::emit_New() {
4437 return emitCall(JSOp::New);
4440 template <typename Handler>
4441 bool BaselineCodeGen<Handler>::emit_NewContent() {
4442 return emitCall(JSOp::NewContent);
4445 template <typename Handler>
4446 bool BaselineCodeGen<Handler>::emit_SuperCall() {
4447 return emitCall(JSOp::SuperCall);
4450 template <typename Handler>
4451 bool BaselineCodeGen<Handler>::emit_Eval() {
4452 return emitCall(JSOp::Eval);
4455 template <typename Handler>
4456 bool BaselineCodeGen<Handler>::emit_StrictEval() {
4457 return emitCall(JSOp::StrictEval);
4460 template <typename Handler>
4461 bool BaselineCodeGen<Handler>::emit_SpreadCall() {
4462 return emitSpreadCall(JSOp::SpreadCall);
4465 template <typename Handler>
4466 bool BaselineCodeGen<Handler>::emit_SpreadNew() {
4467 return emitSpreadCall(JSOp::SpreadNew);
4470 template <typename Handler>
4471 bool BaselineCodeGen<Handler>::emit_SpreadSuperCall() {
4472 return emitSpreadCall(JSOp::SpreadSuperCall);
4475 template <typename Handler>
4476 bool BaselineCodeGen<Handler>::emit_SpreadEval() {
4477 return emitSpreadCall(JSOp::SpreadEval);
4480 template <typename Handler>
4481 bool BaselineCodeGen<Handler>::emit_StrictSpreadEval() {
4482 return emitSpreadCall(JSOp::StrictSpreadEval);
4485 template <typename Handler>
4486 bool BaselineCodeGen<Handler>::emit_OptimizeSpreadCall() {
4487 frame.popRegsAndSync(1);
4489 if (!emitNextIC()) {
4490 return false;
4493 frame.push(R0);
4494 return true;
4497 template <typename Handler>
4498 bool BaselineCodeGen<Handler>::emit_ImplicitThis() {
4499 frame.syncStack(0);
4500 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
4502 prepareVMCall();
4504 pushScriptNameArg(R1.scratchReg(), R2.scratchReg());
4505 pushArg(R0.scratchReg());
4507 using Fn = bool (*)(JSContext*, HandleObject, Handle<PropertyName*>,
4508 MutableHandleValue);
4509 if (!callVM<Fn, ImplicitThisOperation>()) {
4510 return false;
4513 frame.push(R0);
4514 return true;
4517 template <typename Handler>
4518 bool BaselineCodeGen<Handler>::emit_Instanceof() {
4519 frame.popRegsAndSync(2);
4521 if (!emitNextIC()) {
4522 return false;
4525 frame.push(R0, JSVAL_TYPE_BOOLEAN);
4526 return true;
4529 template <typename Handler>
4530 bool BaselineCodeGen<Handler>::emit_Typeof() {
4531 frame.popRegsAndSync(1);
4533 if (!emitNextIC()) {
4534 return false;
4537 frame.push(R0);
4538 return true;
4541 template <typename Handler>
4542 bool BaselineCodeGen<Handler>::emit_TypeofExpr() {
4543 return emit_Typeof();
4546 template <typename Handler>
4547 bool BaselineCodeGen<Handler>::emit_ThrowMsg() {
4548 prepareVMCall();
4549 pushUint8BytecodeOperandArg(R2.scratchReg());
4551 using Fn = bool (*)(JSContext*, const unsigned);
4552 return callVM<Fn, js::ThrowMsgOperation>();
4555 template <typename Handler>
4556 bool BaselineCodeGen<Handler>::emit_Throw() {
4557 // Keep value to throw in R0.
4558 frame.popRegsAndSync(1);
4560 prepareVMCall();
4561 pushArg(R0);
4563 using Fn = bool (*)(JSContext*, HandleValue);
4564 return callVM<Fn, js::ThrowOperation>();
4567 template <typename Handler>
4568 bool BaselineCodeGen<Handler>::emit_ThrowWithStack() {
4569 // Keep value to throw in R0 and the stack in R1.
4570 frame.popRegsAndSync(2);
4572 prepareVMCall();
4573 pushArg(R1);
4574 pushArg(R0);
4576 using Fn = bool (*)(JSContext*, HandleValue, HandleValue);
4577 return callVM<Fn, js::ThrowWithStackOperation>();
4580 template <typename Handler>
4581 bool BaselineCodeGen<Handler>::emit_Try() {
4582 return true;
4585 template <typename Handler>
4586 bool BaselineCodeGen<Handler>::emit_Finally() {
4587 // To match the interpreter, emit an interrupt check at the start of the
4588 // finally block.
4589 return emitInterruptCheck();
4592 static void LoadBaselineScriptResumeEntries(MacroAssembler& masm,
4593 JSScript* script, Register dest,
4594 Register scratch) {
4595 MOZ_ASSERT(dest != scratch);
4597 masm.movePtr(ImmPtr(script->jitScript()), dest);
4598 masm.loadPtr(Address(dest, JitScript::offsetOfBaselineScript()), dest);
4599 masm.load32(Address(dest, BaselineScript::offsetOfResumeEntriesOffset()),
4600 scratch);
4601 masm.addPtr(scratch, dest);
4604 template <typename Handler>
4605 void BaselineCodeGen<Handler>::emitInterpJumpToResumeEntry(Register script,
4606 Register resumeIndex,
4607 Register scratch) {
4608 // Load JSScript::immutableScriptData() into |script|.
4609 masm.loadPtr(Address(script, JSScript::offsetOfSharedData()), script);
4610 masm.loadPtr(Address(script, SharedImmutableScriptData::offsetOfISD()),
4611 script);
4613 // Load the resume pcOffset in |resumeIndex|.
4614 masm.load32(
4615 Address(script, ImmutableScriptData::offsetOfResumeOffsetsOffset()),
4616 scratch);
4617 masm.computeEffectiveAddress(BaseIndex(scratch, resumeIndex, TimesFour),
4618 scratch);
4619 masm.load32(BaseIndex(script, scratch, TimesOne), resumeIndex);
4621 // Add resume offset to PC, jump to it.
4622 masm.computeEffectiveAddress(BaseIndex(script, resumeIndex, TimesOne,
4623 ImmutableScriptData::offsetOfCode()),
4624 script);
4625 Address pcAddr(FramePointer, BaselineFrame::reverseOffsetOfInterpreterPC());
4626 masm.storePtr(script, pcAddr);
4627 emitJumpToInterpretOpLabel();
4630 template <>
4631 void BaselineCompilerCodeGen::jumpToResumeEntry(Register resumeIndex,
4632 Register scratch1,
4633 Register scratch2) {
4634 LoadBaselineScriptResumeEntries(masm, handler.script(), scratch1, scratch2);
4635 masm.loadPtr(
4636 BaseIndex(scratch1, resumeIndex, ScaleFromElemWidth(sizeof(uintptr_t))),
4637 scratch1);
4638 masm.jump(scratch1);
4641 template <>
4642 void BaselineInterpreterCodeGen::jumpToResumeEntry(Register resumeIndex,
4643 Register scratch1,
4644 Register scratch2) {
4645 loadScript(scratch1);
4646 emitInterpJumpToResumeEntry(scratch1, resumeIndex, scratch2);
4649 template <>
4650 template <typename F1, typename F2>
4651 [[nodiscard]] bool BaselineCompilerCodeGen::emitDebugInstrumentation(
4652 const F1& ifDebuggee, const Maybe<F2>& ifNotDebuggee) {
4653 // The JIT calls either ifDebuggee or (if present) ifNotDebuggee, because it
4654 // knows statically whether we're compiling with debug instrumentation.
4656 if (handler.compileDebugInstrumentation()) {
4657 return ifDebuggee();
4660 if (ifNotDebuggee) {
4661 return (*ifNotDebuggee)();
4664 return true;
4667 template <>
4668 template <typename F1, typename F2>
4669 [[nodiscard]] bool BaselineInterpreterCodeGen::emitDebugInstrumentation(
4670 const F1& ifDebuggee, const Maybe<F2>& ifNotDebuggee) {
4671 // The interpreter emits both ifDebuggee and (if present) ifNotDebuggee
4672 // paths, with a toggled jump followed by a branch on the frame's DEBUGGEE
4673 // flag.
4675 Label isNotDebuggee, done;
4677 CodeOffset toggleOffset = masm.toggledJump(&isNotDebuggee);
4678 if (!handler.addDebugInstrumentationOffset(cx, toggleOffset)) {
4679 return false;
4682 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
4683 Imm32(BaselineFrame::DEBUGGEE), &isNotDebuggee);
4685 if (!ifDebuggee()) {
4686 return false;
4689 if (ifNotDebuggee) {
4690 masm.jump(&done);
4693 masm.bind(&isNotDebuggee);
4695 if (ifNotDebuggee && !(*ifNotDebuggee)()) {
4696 return false;
4699 masm.bind(&done);
4700 return true;
4703 template <typename Handler>
4704 bool BaselineCodeGen<Handler>::emit_PushLexicalEnv() {
4705 // Call a stub to push the block on the block chain.
4706 prepareVMCall();
4707 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4709 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
4710 R2.scratchReg());
4711 pushArg(R0.scratchReg());
4713 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<LexicalScope*>);
4714 return callVM<Fn, jit::PushLexicalEnv>();
4717 template <typename Handler>
4718 bool BaselineCodeGen<Handler>::emit_PushClassBodyEnv() {
4719 prepareVMCall();
4720 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4722 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
4723 R2.scratchReg());
4724 pushArg(R0.scratchReg());
4726 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<ClassBodyScope*>);
4727 return callVM<Fn, jit::PushClassBodyEnv>();
4730 template <typename Handler>
4731 bool BaselineCodeGen<Handler>::emit_PopLexicalEnv() {
4732 frame.syncStack(0);
4734 Register scratch1 = R0.scratchReg();
4736 auto ifDebuggee = [this, scratch1]() {
4737 masm.loadBaselineFramePtr(FramePointer, scratch1);
4739 prepareVMCall();
4740 pushBytecodePCArg();
4741 pushArg(scratch1);
4743 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4744 return callVM<Fn, jit::DebugLeaveThenPopLexicalEnv>();
4746 auto ifNotDebuggee = [this, scratch1]() {
4747 Register scratch2 = R1.scratchReg();
4748 masm.loadPtr(frame.addressOfEnvironmentChain(), scratch1);
4749 masm.debugAssertObjectHasClass(scratch1, scratch2,
4750 &LexicalEnvironmentObject::class_);
4751 Address enclosingAddr(scratch1,
4752 EnvironmentObject::offsetOfEnclosingEnvironment());
4753 masm.unboxObject(enclosingAddr, scratch1);
4754 masm.storePtr(scratch1, frame.addressOfEnvironmentChain());
4755 return true;
4757 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee));
4760 template <typename Handler>
4761 bool BaselineCodeGen<Handler>::emit_FreshenLexicalEnv() {
4762 frame.syncStack(0);
4764 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4766 auto ifDebuggee = [this]() {
4767 prepareVMCall();
4768 pushBytecodePCArg();
4769 pushArg(R0.scratchReg());
4771 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4772 return callVM<Fn, jit::DebuggeeFreshenLexicalEnv>();
4774 auto ifNotDebuggee = [this]() {
4775 prepareVMCall();
4776 pushArg(R0.scratchReg());
4778 using Fn = bool (*)(JSContext*, BaselineFrame*);
4779 return callVM<Fn, jit::FreshenLexicalEnv>();
4781 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee));
4784 template <typename Handler>
4785 bool BaselineCodeGen<Handler>::emit_RecreateLexicalEnv() {
4786 frame.syncStack(0);
4788 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4790 auto ifDebuggee = [this]() {
4791 prepareVMCall();
4792 pushBytecodePCArg();
4793 pushArg(R0.scratchReg());
4795 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4796 return callVM<Fn, jit::DebuggeeRecreateLexicalEnv>();
4798 auto ifNotDebuggee = [this]() {
4799 prepareVMCall();
4800 pushArg(R0.scratchReg());
4802 using Fn = bool (*)(JSContext*, BaselineFrame*);
4803 return callVM<Fn, jit::RecreateLexicalEnv>();
4805 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee));
4808 template <typename Handler>
4809 bool BaselineCodeGen<Handler>::emit_DebugLeaveLexicalEnv() {
4810 auto ifDebuggee = [this]() {
4811 prepareVMCall();
4812 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4813 pushBytecodePCArg();
4814 pushArg(R0.scratchReg());
4816 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4817 return callVM<Fn, jit::DebugLeaveLexicalEnv>();
4819 return emitDebugInstrumentation(ifDebuggee);
4822 template <typename Handler>
4823 bool BaselineCodeGen<Handler>::emit_PushVarEnv() {
4824 prepareVMCall();
4825 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4826 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
4827 R2.scratchReg());
4828 pushArg(R0.scratchReg());
4830 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<Scope*>);
4831 return callVM<Fn, jit::PushVarEnv>();
4834 template <typename Handler>
4835 bool BaselineCodeGen<Handler>::emit_EnterWith() {
4836 // Pop "with" object to R0.
4837 frame.popRegsAndSync(1);
4839 // Call a stub to push the object onto the environment chain.
4840 prepareVMCall();
4842 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
4843 R2.scratchReg());
4844 pushArg(R0);
4845 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg());
4846 pushArg(R1.scratchReg());
4848 using Fn =
4849 bool (*)(JSContext*, BaselineFrame*, HandleValue, Handle<WithScope*>);
4850 return callVM<Fn, jit::EnterWith>();
4853 template <typename Handler>
4854 bool BaselineCodeGen<Handler>::emit_LeaveWith() {
4855 // Call a stub to pop the with object from the environment chain.
4856 prepareVMCall();
4858 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4859 pushArg(R0.scratchReg());
4861 using Fn = bool (*)(JSContext*, BaselineFrame*);
4862 return callVM<Fn, jit::LeaveWith>();
4865 template <typename Handler>
4866 bool BaselineCodeGen<Handler>::emit_Exception() {
4867 prepareVMCall();
4869 using Fn = bool (*)(JSContext*, MutableHandleValue);
4870 if (!callVM<Fn, GetAndClearException>()) {
4871 return false;
4874 frame.push(R0);
4875 return true;
4878 template <typename Handler>
4879 bool BaselineCodeGen<Handler>::emit_ExceptionAndStack() {
4880 // First call into the VM to store the exception stack.
4882 prepareVMCall();
4884 using Fn = bool (*)(JSContext*, MutableHandleValue);
4885 if (!callVM<Fn, GetPendingExceptionStack>()) {
4886 return false;
4889 frame.push(R0);
4892 // Now get the actual exception value and clear the exception state.
4894 prepareVMCall();
4896 using Fn = bool (*)(JSContext*, MutableHandleValue);
4897 if (!callVM<Fn, GetAndClearException>()) {
4898 return false;
4901 frame.push(R0);
4904 // Finally swap the stack and the exception.
4905 frame.popRegsAndSync(2);
4906 frame.push(R1);
4907 frame.push(R0);
4909 return true;
4912 template <typename Handler>
4913 bool BaselineCodeGen<Handler>::emit_Debugger() {
4914 prepareVMCall();
4916 frame.assertSyncedStack();
4917 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4918 pushArg(R0.scratchReg());
4920 using Fn = bool (*)(JSContext*, BaselineFrame*);
4921 if (!callVM<Fn, jit::OnDebuggerStatement>()) {
4922 return false;
4925 return true;
4928 template <typename Handler>
4929 bool BaselineCodeGen<Handler>::emitDebugEpilogue() {
4930 auto ifDebuggee = [this]() {
4931 // Move return value into the frame's rval slot.
4932 masm.storeValue(JSReturnOperand, frame.addressOfReturnValue());
4933 masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags());
4935 // Load BaselineFrame pointer in R0.
4936 frame.syncStack(0);
4937 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4939 prepareVMCall();
4940 pushBytecodePCArg();
4941 pushArg(R0.scratchReg());
4943 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugEpilogue;
4945 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4946 if (!callVM<Fn, jit::DebugEpilogueOnBaselineReturn>(kind)) {
4947 return false;
4950 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
4951 return true;
4953 return emitDebugInstrumentation(ifDebuggee);
4956 template <typename Handler>
4957 bool BaselineCodeGen<Handler>::emitReturn() {
4958 if (handler.shouldEmitDebugEpilogueAtReturnOp()) {
4959 if (!emitDebugEpilogue()) {
4960 return false;
4964 // Only emit the jump if this JSOp::RetRval is not the last instruction.
4965 // Not needed for last instruction, because last instruction flows
4966 // into return label.
4967 if (!handler.isDefinitelyLastOp()) {
4968 masm.jump(&return_);
4971 return true;
4974 template <typename Handler>
4975 bool BaselineCodeGen<Handler>::emit_Return() {
4976 frame.assertStackDepth(1);
4978 frame.popValue(JSReturnOperand);
4979 return emitReturn();
4982 template <typename Handler>
4983 void BaselineCodeGen<Handler>::emitLoadReturnValue(ValueOperand val) {
4984 Label done, noRval;
4985 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
4986 Imm32(BaselineFrame::HAS_RVAL), &noRval);
4987 masm.loadValue(frame.addressOfReturnValue(), val);
4988 masm.jump(&done);
4990 masm.bind(&noRval);
4991 masm.moveValue(UndefinedValue(), val);
4993 masm.bind(&done);
4996 template <typename Handler>
4997 bool BaselineCodeGen<Handler>::emit_RetRval() {
4998 frame.assertStackDepth(0);
5000 masm.moveValue(UndefinedValue(), JSReturnOperand);
5002 if (!handler.maybeScript() || !handler.maybeScript()->noScriptRval()) {
5003 // Return the value in the return value slot, if any.
5004 Label done;
5005 Address flags = frame.addressOfFlags();
5006 masm.branchTest32(Assembler::Zero, flags, Imm32(BaselineFrame::HAS_RVAL),
5007 &done);
5008 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
5009 masm.bind(&done);
5012 return emitReturn();
5015 template <typename Handler>
5016 bool BaselineCodeGen<Handler>::emit_ToPropertyKey() {
5017 frame.popRegsAndSync(1);
5019 if (!emitNextIC()) {
5020 return false;
5023 frame.push(R0);
5024 return true;
5027 template <typename Handler>
5028 bool BaselineCodeGen<Handler>::emit_ToAsyncIter() {
5029 frame.syncStack(0);
5030 masm.unboxObject(frame.addressOfStackValue(-2), R0.scratchReg());
5031 masm.loadValue(frame.addressOfStackValue(-1), R1);
5033 prepareVMCall();
5034 pushArg(R1);
5035 pushArg(R0.scratchReg());
5037 using Fn = JSObject* (*)(JSContext*, HandleObject, HandleValue);
5038 if (!callVM<Fn, js::CreateAsyncFromSyncIterator>()) {
5039 return false;
5042 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5043 frame.popn(2);
5044 frame.push(R0);
5045 return true;
5048 template <typename Handler>
5049 bool BaselineCodeGen<Handler>::emit_CanSkipAwait() {
5050 frame.syncStack(0);
5051 masm.loadValue(frame.addressOfStackValue(-1), R0);
5053 prepareVMCall();
5054 pushArg(R0);
5056 using Fn = bool (*)(JSContext*, HandleValue, bool* canSkip);
5057 if (!callVM<Fn, js::CanSkipAwait>()) {
5058 return false;
5061 masm.tagValue(JSVAL_TYPE_BOOLEAN, ReturnReg, R0);
5062 frame.push(R0, JSVAL_TYPE_BOOLEAN);
5063 return true;
5066 template <typename Handler>
5067 bool BaselineCodeGen<Handler>::emit_MaybeExtractAwaitValue() {
5068 frame.syncStack(0);
5069 masm.loadValue(frame.addressOfStackValue(-2), R0);
5071 masm.unboxBoolean(frame.addressOfStackValue(-1), R1.scratchReg());
5073 Label cantExtract;
5074 masm.branchIfFalseBool(R1.scratchReg(), &cantExtract);
5076 prepareVMCall();
5077 pushArg(R0);
5079 using Fn = bool (*)(JSContext*, HandleValue, MutableHandleValue);
5080 if (!callVM<Fn, js::ExtractAwaitValue>()) {
5081 return false;
5084 masm.storeValue(R0, frame.addressOfStackValue(-2));
5085 masm.bind(&cantExtract);
5087 return true;
5090 template <typename Handler>
5091 bool BaselineCodeGen<Handler>::emit_AsyncAwait() {
5092 frame.syncStack(0);
5093 masm.loadValue(frame.addressOfStackValue(-2), R1);
5094 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
5096 prepareVMCall();
5097 pushArg(R1);
5098 pushArg(R0.scratchReg());
5100 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>,
5101 HandleValue);
5102 if (!callVM<Fn, js::AsyncFunctionAwait>()) {
5103 return false;
5106 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5107 frame.popn(2);
5108 frame.push(R0);
5109 return true;
5112 template <typename Handler>
5113 bool BaselineCodeGen<Handler>::emit_AsyncResolve() {
5114 frame.syncStack(0);
5115 masm.loadValue(frame.addressOfStackValue(-2), R1);
5116 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
5118 prepareVMCall();
5119 pushArg(R1);
5120 pushArg(R0.scratchReg());
5122 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>,
5123 HandleValue);
5124 if (!callVM<Fn, js::AsyncFunctionResolve>()) {
5125 return false;
5128 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5129 frame.popn(2);
5130 frame.push(R0);
5131 return true;
5134 template <typename Handler>
5135 bool BaselineCodeGen<Handler>::emit_AsyncReject() {
5136 frame.syncStack(0);
5137 masm.loadValue(frame.addressOfStackValue(-3), R2);
5138 masm.loadValue(frame.addressOfStackValue(-2), R1);
5139 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
5141 prepareVMCall();
5142 pushArg(R1);
5143 pushArg(R2);
5144 pushArg(R0.scratchReg());
5146 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>,
5147 HandleValue, HandleValue);
5148 if (!callVM<Fn, js::AsyncFunctionReject>()) {
5149 return false;
5152 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5153 frame.popn(3);
5154 frame.push(R0);
5155 return true;
5158 template <typename Handler>
5159 bool BaselineCodeGen<Handler>::emit_CheckObjCoercible() {
5160 frame.syncStack(0);
5161 masm.loadValue(frame.addressOfStackValue(-1), R0);
5163 Label fail, done;
5165 masm.branchTestUndefined(Assembler::Equal, R0, &fail);
5166 masm.branchTestNull(Assembler::NotEqual, R0, &done);
5168 masm.bind(&fail);
5169 prepareVMCall();
5171 pushArg(R0);
5173 using Fn = bool (*)(JSContext*, HandleValue);
5174 if (!callVM<Fn, ThrowObjectCoercible>()) {
5175 return false;
5178 masm.bind(&done);
5179 return true;
5182 template <typename Handler>
5183 bool BaselineCodeGen<Handler>::emit_ToString() {
5184 // Keep top stack value in R0.
5185 frame.popRegsAndSync(1);
5187 // Inline path for string.
5188 Label done;
5189 masm.branchTestString(Assembler::Equal, R0, &done);
5191 prepareVMCall();
5193 pushArg(R0);
5195 // Call ToStringSlow which doesn't handle string inputs.
5196 using Fn = JSString* (*)(JSContext*, HandleValue);
5197 if (!callVM<Fn, ToStringSlow<CanGC>>()) {
5198 return false;
5201 masm.tagValue(JSVAL_TYPE_STRING, ReturnReg, R0);
5203 masm.bind(&done);
5204 frame.push(R0);
5205 return true;
5208 static constexpr uint32_t TableSwitchOpLowOffset = 1 * JUMP_OFFSET_LEN;
5209 static constexpr uint32_t TableSwitchOpHighOffset = 2 * JUMP_OFFSET_LEN;
5210 static constexpr uint32_t TableSwitchOpFirstResumeIndexOffset =
5211 3 * JUMP_OFFSET_LEN;
5213 template <>
5214 void BaselineCompilerCodeGen::emitGetTableSwitchIndex(ValueOperand val,
5215 Register dest,
5216 Register scratch1,
5217 Register scratch2) {
5218 jsbytecode* pc = handler.pc();
5219 jsbytecode* defaultpc = pc + GET_JUMP_OFFSET(pc);
5220 Label* defaultLabel = handler.labelOf(defaultpc);
5222 int32_t low = GET_JUMP_OFFSET(pc + TableSwitchOpLowOffset);
5223 int32_t high = GET_JUMP_OFFSET(pc + TableSwitchOpHighOffset);
5224 int32_t length = high - low + 1;
5226 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5227 // all cases are int32).
5228 masm.branchTestInt32(Assembler::NotEqual, val, defaultLabel);
5229 masm.unboxInt32(val, dest);
5231 // Subtract 'low'. Bounds check.
5232 if (low != 0) {
5233 masm.sub32(Imm32(low), dest);
5235 masm.branch32(Assembler::AboveOrEqual, dest, Imm32(length), defaultLabel);
5238 template <>
5239 void BaselineInterpreterCodeGen::emitGetTableSwitchIndex(ValueOperand val,
5240 Register dest,
5241 Register scratch1,
5242 Register scratch2) {
5243 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5244 // all cases are int32).
5245 Label done, jumpToDefault;
5246 masm.branchTestInt32(Assembler::NotEqual, val, &jumpToDefault);
5247 masm.unboxInt32(val, dest);
5249 Register pcReg = LoadBytecodePC(masm, scratch1);
5250 Address lowAddr(pcReg, sizeof(jsbytecode) + TableSwitchOpLowOffset);
5251 Address highAddr(pcReg, sizeof(jsbytecode) + TableSwitchOpHighOffset);
5253 // Jump to default if val > high.
5254 masm.branch32(Assembler::LessThan, highAddr, dest, &jumpToDefault);
5256 // Jump to default if val < low.
5257 masm.load32(lowAddr, scratch2);
5258 masm.branch32(Assembler::GreaterThan, scratch2, dest, &jumpToDefault);
5260 // index := val - low.
5261 masm.sub32(scratch2, dest);
5262 masm.jump(&done);
5264 masm.bind(&jumpToDefault);
5265 emitJump();
5267 masm.bind(&done);
5270 template <>
5271 void BaselineCompilerCodeGen::emitTableSwitchJump(Register key,
5272 Register scratch1,
5273 Register scratch2) {
5274 // Jump to resumeEntries[firstResumeIndex + key].
5276 // Note: BytecodeEmitter::allocateResumeIndex static_asserts
5277 // |firstResumeIndex * sizeof(uintptr_t)| fits in int32_t.
5278 uint32_t firstResumeIndex =
5279 GET_RESUMEINDEX(handler.pc() + TableSwitchOpFirstResumeIndexOffset);
5280 LoadBaselineScriptResumeEntries(masm, handler.script(), scratch1, scratch2);
5281 masm.loadPtr(BaseIndex(scratch1, key, ScaleFromElemWidth(sizeof(uintptr_t)),
5282 firstResumeIndex * sizeof(uintptr_t)),
5283 scratch1);
5284 masm.jump(scratch1);
5287 template <>
5288 void BaselineInterpreterCodeGen::emitTableSwitchJump(Register key,
5289 Register scratch1,
5290 Register scratch2) {
5291 // Load the op's firstResumeIndex in scratch1.
5292 LoadUint24Operand(masm, TableSwitchOpFirstResumeIndexOffset, scratch1);
5294 masm.add32(key, scratch1);
5295 jumpToResumeEntry(scratch1, key, scratch2);
5298 template <typename Handler>
5299 bool BaselineCodeGen<Handler>::emit_TableSwitch() {
5300 frame.popRegsAndSync(1);
5302 Register key = R0.scratchReg();
5303 Register scratch1 = R1.scratchReg();
5304 Register scratch2 = R2.scratchReg();
5306 // Call a stub to convert R0 from double to int32 if needed.
5307 // Note: this stub may clobber scratch1.
5308 masm.call(cx->runtime()->jitRuntime()->getDoubleToInt32ValueStub());
5310 // Load the index in the jump table in |key|, or branch to default pc if not
5311 // int32 or out-of-range.
5312 emitGetTableSwitchIndex(R0, key, scratch1, scratch2);
5314 // Jump to the target pc.
5315 emitTableSwitchJump(key, scratch1, scratch2);
5316 return true;
5319 template <typename Handler>
5320 bool BaselineCodeGen<Handler>::emit_Iter() {
5321 frame.popRegsAndSync(1);
5323 if (!emitNextIC()) {
5324 return false;
5327 frame.push(R0);
5328 return true;
5331 template <typename Handler>
5332 bool BaselineCodeGen<Handler>::emit_MoreIter() {
5333 frame.syncStack(0);
5335 masm.unboxObject(frame.addressOfStackValue(-1), R1.scratchReg());
5337 masm.iteratorMore(R1.scratchReg(), R0, R2.scratchReg());
5338 frame.push(R0);
5339 return true;
5342 template <typename Handler>
5343 bool BaselineCodeGen<Handler>::emitIsMagicValue() {
5344 frame.syncStack(0);
5346 Label isMagic, done;
5347 masm.branchTestMagic(Assembler::Equal, frame.addressOfStackValue(-1),
5348 &isMagic);
5349 masm.moveValue(BooleanValue(false), R0);
5350 masm.jump(&done);
5352 masm.bind(&isMagic);
5353 masm.moveValue(BooleanValue(true), R0);
5355 masm.bind(&done);
5356 frame.push(R0, JSVAL_TYPE_BOOLEAN);
5357 return true;
5360 template <typename Handler>
5361 bool BaselineCodeGen<Handler>::emit_IsNoIter() {
5362 return emitIsMagicValue();
5365 template <typename Handler>
5366 bool BaselineCodeGen<Handler>::emit_EndIter() {
5367 // Pop iterator value.
5368 frame.pop();
5370 // Pop the iterator object to close in R0.
5371 frame.popRegsAndSync(1);
5373 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
5374 MOZ_ASSERT(!regs.has(FramePointer));
5375 if (HasInterpreterPCReg()) {
5376 regs.take(InterpreterPCReg);
5379 Register obj = R0.scratchReg();
5380 regs.take(obj);
5381 masm.unboxObject(R0, obj);
5383 Register temp1 = regs.takeAny();
5384 Register temp2 = regs.takeAny();
5385 Register temp3 = regs.takeAny();
5386 masm.iteratorClose(obj, temp1, temp2, temp3);
5387 return true;
5390 template <typename Handler>
5391 bool BaselineCodeGen<Handler>::emit_CloseIter() {
5392 frame.popRegsAndSync(1);
5394 Register iter = R0.scratchReg();
5395 masm.unboxObject(R0, iter);
5397 return emitNextIC();
5400 template <typename Handler>
5401 bool BaselineCodeGen<Handler>::emit_OptimizeGetIterator() {
5402 frame.popRegsAndSync(1);
5404 if (!emitNextIC()) {
5405 return false;
5408 frame.push(R0);
5409 return true;
5412 template <typename Handler>
5413 bool BaselineCodeGen<Handler>::emit_IsGenClosing() {
5414 return emitIsMagicValue();
5417 template <typename Handler>
5418 bool BaselineCodeGen<Handler>::emit_IsNullOrUndefined() {
5419 frame.syncStack(0);
5421 Label isNullOrUndefined, done;
5422 masm.branchTestNull(Assembler::Equal, frame.addressOfStackValue(-1),
5423 &isNullOrUndefined);
5424 masm.branchTestUndefined(Assembler::Equal, frame.addressOfStackValue(-1),
5425 &isNullOrUndefined);
5426 masm.moveValue(BooleanValue(false), R0);
5427 masm.jump(&done);
5429 masm.bind(&isNullOrUndefined);
5430 masm.moveValue(BooleanValue(true), R0);
5432 masm.bind(&done);
5433 frame.push(R0, JSVAL_TYPE_BOOLEAN);
5434 return true;
5437 template <typename Handler>
5438 bool BaselineCodeGen<Handler>::emit_GetRval() {
5439 frame.syncStack(0);
5441 emitLoadReturnValue(R0);
5443 frame.push(R0);
5444 return true;
5447 template <typename Handler>
5448 bool BaselineCodeGen<Handler>::emit_SetRval() {
5449 // Store to the frame's return value slot.
5450 frame.storeStackValue(-1, frame.addressOfReturnValue(), R2);
5451 masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags());
5452 frame.pop();
5453 return true;
5456 template <typename Handler>
5457 bool BaselineCodeGen<Handler>::emit_Callee() {
5458 MOZ_ASSERT_IF(handler.maybeScript(), handler.maybeScript()->function());
5459 frame.syncStack(0);
5460 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(),
5461 R0.scratchReg());
5462 masm.tagValue(JSVAL_TYPE_OBJECT, R0.scratchReg(), R0);
5463 frame.push(R0);
5464 return true;
5467 template <>
5468 bool BaselineCompilerCodeGen::emit_EnvCallee() {
5469 frame.syncStack(0);
5470 uint8_t numHops = GET_UINT8(handler.pc());
5471 Register scratch = R0.scratchReg();
5473 masm.loadPtr(frame.addressOfEnvironmentChain(), scratch);
5474 for (unsigned i = 0; i < numHops; i++) {
5475 Address nextAddr(scratch,
5476 EnvironmentObject::offsetOfEnclosingEnvironment());
5477 masm.unboxObject(nextAddr, scratch);
5480 masm.loadValue(Address(scratch, CallObject::offsetOfCallee()), R0);
5481 frame.push(R0);
5482 return true;
5485 template <>
5486 bool BaselineInterpreterCodeGen::emit_EnvCallee() {
5487 Register scratch = R0.scratchReg();
5488 Register env = R1.scratchReg();
5490 static_assert(JSOpLength_EnvCallee - sizeof(jsbytecode) == ENVCOORD_HOPS_LEN,
5491 "op must have uint8 operand for LoadAliasedVarEnv");
5493 // Load the right environment object.
5494 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
5495 LoadAliasedVarEnv(masm, env, scratch);
5497 masm.pushValue(Address(env, CallObject::offsetOfCallee()));
5498 return true;
5501 template <typename Handler>
5502 bool BaselineCodeGen<Handler>::emit_SuperBase() {
5503 frame.popRegsAndSync(1);
5505 Register scratch = R0.scratchReg();
5506 Register proto = R1.scratchReg();
5508 // Unbox callee.
5509 masm.unboxObject(R0, scratch);
5511 // Load [[HomeObject]]
5512 Address homeObjAddr(scratch,
5513 FunctionExtended::offsetOfMethodHomeObjectSlot());
5515 masm.assertFunctionIsExtended(scratch);
5516 #ifdef DEBUG
5517 Label isObject;
5518 masm.branchTestObject(Assembler::Equal, homeObjAddr, &isObject);
5519 masm.assumeUnreachable("[[HomeObject]] must be Object");
5520 masm.bind(&isObject);
5521 #endif
5522 masm.unboxObject(homeObjAddr, scratch);
5524 // Load prototype from [[HomeObject]]
5525 masm.loadObjProto(scratch, proto);
5527 #ifdef DEBUG
5528 // We won't encounter a lazy proto, because the prototype is guaranteed to
5529 // either be a JSFunction or a PlainObject, and only proxy objects can have a
5530 // lazy proto.
5531 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1);
5533 Label proxyCheckDone;
5534 masm.branchPtr(Assembler::NotEqual, proto, ImmWord(1), &proxyCheckDone);
5535 masm.assumeUnreachable("Unexpected lazy proto in JSOp::SuperBase");
5536 masm.bind(&proxyCheckDone);
5537 #endif
5539 Label nullProto, done;
5540 masm.branchPtr(Assembler::Equal, proto, ImmWord(0), &nullProto);
5542 // Box prototype and return
5543 masm.tagValue(JSVAL_TYPE_OBJECT, proto, R1);
5544 masm.jump(&done);
5546 masm.bind(&nullProto);
5547 masm.moveValue(NullValue(), R1);
5549 masm.bind(&done);
5550 frame.push(R1);
5551 return true;
5554 template <typename Handler>
5555 bool BaselineCodeGen<Handler>::emit_SuperFun() {
5556 frame.popRegsAndSync(1);
5558 Register callee = R0.scratchReg();
5559 Register proto = R1.scratchReg();
5560 #ifdef DEBUG
5561 Register scratch = R2.scratchReg();
5562 #endif
5564 // Unbox callee.
5565 masm.unboxObject(R0, callee);
5567 #ifdef DEBUG
5568 Label classCheckDone;
5569 masm.branchTestObjIsFunction(Assembler::Equal, callee, scratch, callee,
5570 &classCheckDone);
5571 masm.assumeUnreachable("Unexpected non-JSFunction callee in JSOp::SuperFun");
5572 masm.bind(&classCheckDone);
5573 #endif
5575 // Load prototype of callee
5576 masm.loadObjProto(callee, proto);
5578 #ifdef DEBUG
5579 // We won't encounter a lazy proto, because |callee| is guaranteed to be a
5580 // JSFunction and only proxy objects can have a lazy proto.
5581 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1);
5583 Label proxyCheckDone;
5584 masm.branchPtr(Assembler::NotEqual, proto, ImmWord(1), &proxyCheckDone);
5585 masm.assumeUnreachable("Unexpected lazy proto in JSOp::SuperFun");
5586 masm.bind(&proxyCheckDone);
5587 #endif
5589 Label nullProto, done;
5590 masm.branchPtr(Assembler::Equal, proto, ImmWord(0), &nullProto);
5592 // Box prototype and return
5593 masm.tagValue(JSVAL_TYPE_OBJECT, proto, R1);
5594 masm.jump(&done);
5596 masm.bind(&nullProto);
5597 masm.moveValue(NullValue(), R1);
5599 masm.bind(&done);
5600 frame.push(R1);
5601 return true;
5604 template <typename Handler>
5605 bool BaselineCodeGen<Handler>::emit_Arguments() {
5606 frame.syncStack(0);
5608 MOZ_ASSERT_IF(handler.maybeScript(), handler.maybeScript()->needsArgsObj());
5610 prepareVMCall();
5612 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
5613 pushArg(R0.scratchReg());
5615 using Fn = bool (*)(JSContext*, BaselineFrame*, MutableHandleValue);
5616 if (!callVM<Fn, jit::NewArgumentsObject>()) {
5617 return false;
5620 frame.push(R0);
5621 return true;
5624 template <typename Handler>
5625 bool BaselineCodeGen<Handler>::emit_Rest() {
5626 frame.syncStack(0);
5628 if (!emitNextIC()) {
5629 return false;
5632 // Mark R0 as pushed stack value.
5633 frame.push(R0);
5634 return true;
5637 template <typename Handler>
5638 bool BaselineCodeGen<Handler>::emit_Generator() {
5639 frame.assertStackDepth(0);
5641 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
5643 prepareVMCall();
5644 pushArg(R0.scratchReg());
5646 using Fn = JSObject* (*)(JSContext*, BaselineFrame*);
5647 if (!callVM<Fn, jit::CreateGeneratorFromFrame>()) {
5648 return false;
5651 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5652 frame.push(R0);
5653 return true;
5656 template <typename Handler>
5657 bool BaselineCodeGen<Handler>::emitSuspend(JSOp op) {
5658 MOZ_ASSERT(op == JSOp::InitialYield || op == JSOp::Yield ||
5659 op == JSOp::Await);
5661 // Load the generator object in R2, but leave the return value on the
5662 // expression stack.
5663 Register genObj = R2.scratchReg();
5664 if (op == JSOp::InitialYield) {
5665 // Generator and return value are one and the same.
5666 frame.syncStack(0);
5667 frame.assertStackDepth(1);
5668 masm.unboxObject(frame.addressOfStackValue(-1), genObj);
5669 } else {
5670 frame.popRegsAndSync(1);
5671 masm.unboxObject(R0, genObj);
5674 if (frame.hasKnownStackDepth(1) && !handler.canHaveFixedSlots()) {
5675 // If the expression stack is empty, we can inline the Yield. Note that this
5676 // branch is never taken for the interpreter because it doesn't know static
5677 // stack depths.
5678 MOZ_ASSERT_IF(op == JSOp::InitialYield && handler.maybePC(),
5679 GET_RESUMEINDEX(handler.maybePC()) == 0);
5680 Address resumeIndexSlot(genObj,
5681 AbstractGeneratorObject::offsetOfResumeIndexSlot());
5682 Register temp = R1.scratchReg();
5683 if (op == JSOp::InitialYield) {
5684 masm.storeValue(Int32Value(0), resumeIndexSlot);
5685 } else {
5686 jsbytecode* pc = handler.maybePC();
5687 MOZ_ASSERT(pc, "compiler-only code never has a null pc");
5688 masm.move32(Imm32(GET_RESUMEINDEX(pc)), temp);
5689 masm.storeValue(JSVAL_TYPE_INT32, temp, resumeIndexSlot);
5692 Register envObj = R0.scratchReg();
5693 Address envChainSlot(
5694 genObj, AbstractGeneratorObject::offsetOfEnvironmentChainSlot());
5695 masm.loadPtr(frame.addressOfEnvironmentChain(), envObj);
5696 masm.guardedCallPreBarrierAnyZone(envChainSlot, MIRType::Value, temp);
5697 masm.storeValue(JSVAL_TYPE_OBJECT, envObj, envChainSlot);
5699 Label skipBarrier;
5700 masm.branchPtrInNurseryChunk(Assembler::Equal, genObj, temp, &skipBarrier);
5701 masm.branchPtrInNurseryChunk(Assembler::NotEqual, envObj, temp,
5702 &skipBarrier);
5703 MOZ_ASSERT(genObj == R2.scratchReg());
5704 masm.call(&postBarrierSlot_);
5705 masm.bind(&skipBarrier);
5706 } else {
5707 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg());
5708 computeFrameSize(R0.scratchReg());
5710 prepareVMCall();
5711 pushBytecodePCArg();
5712 pushArg(R0.scratchReg());
5713 pushArg(R1.scratchReg());
5714 pushArg(genObj);
5716 using Fn = bool (*)(JSContext*, HandleObject, BaselineFrame*, uint32_t,
5717 const jsbytecode*);
5718 if (!callVM<Fn, jit::NormalSuspend>()) {
5719 return false;
5723 masm.loadValue(frame.addressOfStackValue(-1), JSReturnOperand);
5724 if (!emitReturn()) {
5725 return false;
5728 // Three values are pushed onto the stack when resuming the generator,
5729 // replacing the one slot that holds the return value.
5730 frame.incStackDepth(2);
5731 return true;
5734 template <typename Handler>
5735 bool BaselineCodeGen<Handler>::emit_InitialYield() {
5736 return emitSuspend(JSOp::InitialYield);
5739 template <typename Handler>
5740 bool BaselineCodeGen<Handler>::emit_Yield() {
5741 return emitSuspend(JSOp::Yield);
5744 template <typename Handler>
5745 bool BaselineCodeGen<Handler>::emit_Await() {
5746 return emitSuspend(JSOp::Await);
5749 template <>
5750 template <typename F>
5751 bool BaselineCompilerCodeGen::emitAfterYieldDebugInstrumentation(
5752 const F& ifDebuggee, Register) {
5753 if (handler.compileDebugInstrumentation()) {
5754 return ifDebuggee();
5756 return true;
5759 template <>
5760 template <typename F>
5761 bool BaselineInterpreterCodeGen::emitAfterYieldDebugInstrumentation(
5762 const F& ifDebuggee, Register scratch) {
5763 // Note that we can't use emitDebugInstrumentation here because the frame's
5764 // DEBUGGEE flag hasn't been initialized yet.
5766 // If the current Realm is not a debuggee we're done.
5767 Label done;
5768 CodeOffset toggleOffset = masm.toggledJump(&done);
5769 if (!handler.addDebugInstrumentationOffset(cx, toggleOffset)) {
5770 return false;
5772 masm.loadPtr(AbsoluteAddress(cx->addressOfRealm()), scratch);
5773 masm.branchTest32(Assembler::Zero,
5774 Address(scratch, Realm::offsetOfDebugModeBits()),
5775 Imm32(Realm::debugModeIsDebuggeeBit()), &done);
5777 if (!ifDebuggee()) {
5778 return false;
5781 masm.bind(&done);
5782 return true;
5785 template <typename Handler>
5786 bool BaselineCodeGen<Handler>::emit_AfterYield() {
5787 if (!emit_JumpTarget()) {
5788 return false;
5791 auto ifDebuggee = [this]() {
5792 frame.assertSyncedStack();
5793 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
5794 prepareVMCall();
5795 pushArg(R0.scratchReg());
5797 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugAfterYield;
5799 using Fn = bool (*)(JSContext*, BaselineFrame*);
5800 if (!callVM<Fn, jit::DebugAfterYield>(kind)) {
5801 return false;
5804 return true;
5806 return emitAfterYieldDebugInstrumentation(ifDebuggee, R0.scratchReg());
5809 template <typename Handler>
5810 bool BaselineCodeGen<Handler>::emit_FinalYieldRval() {
5811 // Store generator in R0.
5812 frame.popRegsAndSync(1);
5813 masm.unboxObject(R0, R0.scratchReg());
5815 prepareVMCall();
5816 pushBytecodePCArg();
5817 pushArg(R0.scratchReg());
5819 using Fn = bool (*)(JSContext*, HandleObject, const jsbytecode*);
5820 if (!callVM<Fn, jit::FinalSuspend>()) {
5821 return false;
5824 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
5825 return emitReturn();
5828 template <>
5829 void BaselineCompilerCodeGen::emitJumpToInterpretOpLabel() {
5830 TrampolinePtr code =
5831 cx->runtime()->jitRuntime()->baselineInterpreter().interpretOpAddr();
5832 masm.jump(code);
5835 template <>
5836 void BaselineInterpreterCodeGen::emitJumpToInterpretOpLabel() {
5837 masm.jump(handler.interpretOpLabel());
5840 template <typename Handler>
5841 bool BaselineCodeGen<Handler>::emitEnterGeneratorCode(Register script,
5842 Register resumeIndex,
5843 Register scratch) {
5844 // Resume in either the BaselineScript (if present) or Baseline Interpreter.
5846 static_assert(BaselineDisabledScript == 0x1,
5847 "Comparison below requires specific sentinel encoding");
5849 // Initialize the icScript slot in the baseline frame.
5850 masm.loadJitScript(script, scratch);
5851 masm.computeEffectiveAddress(Address(scratch, JitScript::offsetOfICScript()),
5852 scratch);
5853 Address icScriptAddr(FramePointer, BaselineFrame::reverseOffsetOfICScript());
5854 masm.storePtr(scratch, icScriptAddr);
5856 Label noBaselineScript;
5857 masm.loadJitScript(script, scratch);
5858 masm.loadPtr(Address(scratch, JitScript::offsetOfBaselineScript()), scratch);
5859 masm.branchPtr(Assembler::BelowOrEqual, scratch,
5860 ImmPtr(BaselineDisabledScriptPtr), &noBaselineScript);
5862 masm.load32(Address(scratch, BaselineScript::offsetOfResumeEntriesOffset()),
5863 script);
5864 masm.addPtr(scratch, script);
5865 masm.loadPtr(
5866 BaseIndex(script, resumeIndex, ScaleFromElemWidth(sizeof(uintptr_t))),
5867 scratch);
5868 masm.jump(scratch);
5870 masm.bind(&noBaselineScript);
5872 // Initialize interpreter frame fields.
5873 Address flagsAddr(FramePointer, BaselineFrame::reverseOffsetOfFlags());
5874 Address scriptAddr(FramePointer,
5875 BaselineFrame::reverseOffsetOfInterpreterScript());
5876 masm.or32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER), flagsAddr);
5877 masm.storePtr(script, scriptAddr);
5879 // Initialize pc and jump to it.
5880 emitInterpJumpToResumeEntry(script, resumeIndex, scratch);
5881 return true;
5884 template <typename Handler>
5885 bool BaselineCodeGen<Handler>::emit_Resume() {
5886 frame.syncStack(0);
5887 masm.assertStackAlignment(sizeof(Value), 0);
5889 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
5890 MOZ_ASSERT(!regs.has(FramePointer));
5891 if (HasInterpreterPCReg()) {
5892 regs.take(InterpreterPCReg);
5895 saveInterpreterPCReg();
5897 // Load generator object.
5898 Register genObj = regs.takeAny();
5899 masm.unboxObject(frame.addressOfStackValue(-3), genObj);
5901 // Load callee.
5902 Register callee = regs.takeAny();
5903 masm.unboxObject(
5904 Address(genObj, AbstractGeneratorObject::offsetOfCalleeSlot()), callee);
5906 // Save a pointer to the JSOp::Resume operand stack Values.
5907 Register callerStackPtr = regs.takeAny();
5908 masm.computeEffectiveAddress(frame.addressOfStackValue(-1), callerStackPtr);
5910 // Branch to |interpret| to resume the generator in the C++ interpreter if the
5911 // script does not have a JitScript.
5912 Label interpret;
5913 Register scratch1 = regs.takeAny();
5914 masm.loadPrivate(Address(callee, JSFunction::offsetOfJitInfoOrScript()),
5915 scratch1);
5916 masm.branchIfScriptHasNoJitScript(scratch1, &interpret);
5918 // Push |undefined| for all formals.
5919 Register scratch2 = regs.takeAny();
5920 Label loop, loopDone;
5921 masm.loadFunctionArgCount(callee, scratch2);
5923 static_assert(sizeof(Value) == 8);
5924 #ifndef JS_CODEGEN_NONE
5925 static_assert(JitStackAlignment == 16 || JitStackAlignment == 8);
5926 #endif
5927 // If JitStackValueAlignment == 1, then we were already correctly aligned on
5928 // entry, as guaranteed by the assertStackAlignment at the entry to this
5929 // function.
5930 if (JitStackValueAlignment > 1) {
5931 Register alignment = regs.takeAny();
5932 masm.moveStackPtrTo(alignment);
5933 masm.alignJitStackBasedOnNArgs(scratch2, false);
5935 // Compute alignment adjustment.
5936 masm.subStackPtrFrom(alignment);
5938 // Some code, like BaselineFrame::trace, will inspect the whole range of
5939 // the stack frame. In order to ensure that garbage data left behind from
5940 // previous activations doesn't confuse other machinery, we zero out the
5941 // alignment bytes.
5942 Label alignmentZero;
5943 masm.branchPtr(Assembler::Equal, alignment, ImmWord(0), &alignmentZero);
5945 // Since we know prior to the stack alignment that the stack was 8 byte
5946 // aligned, and JitStackAlignment is 8 or 16 bytes, if we are doing an
5947 // alignment then we -must- have aligned by subtracting 8 bytes from
5948 // the stack pointer.
5950 // So we can freely store a valid double here.
5951 masm.storeValue(DoubleValue(0), Address(masm.getStackPointer(), 0));
5952 masm.bind(&alignmentZero);
5955 masm.branchTest32(Assembler::Zero, scratch2, scratch2, &loopDone);
5956 masm.bind(&loop);
5958 masm.pushValue(UndefinedValue());
5959 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch2, &loop);
5961 masm.bind(&loopDone);
5963 // Push |undefined| for |this|.
5964 masm.pushValue(UndefinedValue());
5966 #ifdef DEBUG
5967 // Update BaselineFrame debugFrameSize field.
5968 masm.mov(FramePointer, scratch2);
5969 masm.subStackPtrFrom(scratch2);
5970 masm.store32(scratch2, frame.addressOfDebugFrameSize());
5971 #endif
5973 masm.PushCalleeToken(callee, /* constructing = */ false);
5974 masm.pushFrameDescriptorForJitCall(FrameType::BaselineJS, /* argc = */ 0);
5976 // PushCalleeToken bumped framePushed. Reset it.
5977 MOZ_ASSERT(masm.framePushed() == sizeof(uintptr_t));
5978 masm.setFramePushed(0);
5980 regs.add(callee);
5982 // Push a fake return address on the stack. We will resume here when the
5983 // generator returns.
5984 Label genStart, returnTarget;
5985 #ifdef JS_USE_LINK_REGISTER
5986 masm.call(&genStart);
5987 #else
5988 masm.callAndPushReturnAddress(&genStart);
5989 #endif
5991 // Record the return address so the return offset -> pc mapping works.
5992 if (!handler.recordCallRetAddr(cx, RetAddrEntry::Kind::IC,
5993 masm.currentOffset())) {
5994 return false;
5997 masm.jump(&returnTarget);
5998 masm.bind(&genStart);
5999 #ifdef JS_USE_LINK_REGISTER
6000 masm.pushReturnAddress();
6001 #endif
6003 // Construct BaselineFrame.
6004 masm.push(FramePointer);
6005 masm.moveStackPtrTo(FramePointer);
6007 // If profiler instrumentation is on, update lastProfilingFrame on
6008 // current JitActivation
6010 Register scratchReg = scratch2;
6011 Label skip;
6012 AbsoluteAddress addressOfEnabled(
6013 cx->runtime()->geckoProfiler().addressOfEnabled());
6014 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skip);
6015 masm.loadJSContext(scratchReg);
6016 masm.loadPtr(Address(scratchReg, JSContext::offsetOfProfilingActivation()),
6017 scratchReg);
6018 masm.storePtr(
6019 FramePointer,
6020 Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()));
6021 masm.bind(&skip);
6024 masm.subFromStackPtr(Imm32(BaselineFrame::Size()));
6025 masm.assertStackAlignment(sizeof(Value), 0);
6027 // Store flags and env chain.
6028 masm.store32(Imm32(BaselineFrame::HAS_INITIAL_ENV), frame.addressOfFlags());
6029 masm.unboxObject(
6030 Address(genObj, AbstractGeneratorObject::offsetOfEnvironmentChainSlot()),
6031 scratch2);
6032 masm.storePtr(scratch2, frame.addressOfEnvironmentChain());
6034 // Store the arguments object if there is one.
6035 Label noArgsObj;
6036 Address argsObjSlot(genObj, AbstractGeneratorObject::offsetOfArgsObjSlot());
6037 masm.fallibleUnboxObject(argsObjSlot, scratch2, &noArgsObj);
6039 masm.storePtr(scratch2, frame.addressOfArgsObj());
6040 masm.or32(Imm32(BaselineFrame::HAS_ARGS_OBJ), frame.addressOfFlags());
6042 masm.bind(&noArgsObj);
6044 // Push locals and expression slots if needed.
6045 Label noStackStorage;
6046 Address stackStorageSlot(genObj,
6047 AbstractGeneratorObject::offsetOfStackStorageSlot());
6048 masm.fallibleUnboxObject(stackStorageSlot, scratch2, &noStackStorage);
6050 Register initLength = regs.takeAny();
6051 masm.loadPtr(Address(scratch2, NativeObject::offsetOfElements()), scratch2);
6052 masm.load32(Address(scratch2, ObjectElements::offsetOfInitializedLength()),
6053 initLength);
6054 masm.store32(
6055 Imm32(0),
6056 Address(scratch2, ObjectElements::offsetOfInitializedLength()));
6058 Label loop, loopDone;
6059 masm.branchTest32(Assembler::Zero, initLength, initLength, &loopDone);
6060 masm.bind(&loop);
6062 masm.pushValue(Address(scratch2, 0));
6063 masm.guardedCallPreBarrierAnyZone(Address(scratch2, 0), MIRType::Value,
6064 scratch1);
6065 masm.addPtr(Imm32(sizeof(Value)), scratch2);
6066 masm.branchSub32(Assembler::NonZero, Imm32(1), initLength, &loop);
6068 masm.bind(&loopDone);
6069 regs.add(initLength);
6072 masm.bind(&noStackStorage);
6074 // Push arg, generator, resumeKind stack Values, in that order.
6075 masm.pushValue(Address(callerStackPtr, sizeof(Value)));
6076 masm.pushValue(JSVAL_TYPE_OBJECT, genObj);
6077 masm.pushValue(Address(callerStackPtr, 0));
6079 masm.switchToObjectRealm(genObj, scratch2);
6081 // Load script in scratch1.
6082 masm.unboxObject(
6083 Address(genObj, AbstractGeneratorObject::offsetOfCalleeSlot()), scratch1);
6084 masm.loadPrivate(Address(scratch1, JSFunction::offsetOfJitInfoOrScript()),
6085 scratch1);
6087 // Load resume index in scratch2 and mark generator as running.
6088 Address resumeIndexSlot(genObj,
6089 AbstractGeneratorObject::offsetOfResumeIndexSlot());
6090 masm.unboxInt32(resumeIndexSlot, scratch2);
6091 masm.storeValue(Int32Value(AbstractGeneratorObject::RESUME_INDEX_RUNNING),
6092 resumeIndexSlot);
6094 if (!emitEnterGeneratorCode(scratch1, scratch2, regs.getAny())) {
6095 return false;
6098 // Call into the VM to resume the generator in the C++ interpreter if there's
6099 // no JitScript.
6100 masm.bind(&interpret);
6102 prepareVMCall();
6104 pushArg(callerStackPtr);
6105 pushArg(genObj);
6107 using Fn = bool (*)(JSContext*, HandleObject, Value*, MutableHandleValue);
6108 if (!callVM<Fn, jit::InterpretResume>()) {
6109 return false;
6112 masm.bind(&returnTarget);
6114 // Restore Stack pointer
6115 masm.computeEffectiveAddress(frame.addressOfStackValue(-1),
6116 masm.getStackPointer());
6118 // After the generator returns, we restore the stack pointer, switch back to
6119 // the current realm, push the return value, and we're done.
6120 if (JSScript* script = handler.maybeScript()) {
6121 masm.switchToRealm(script->realm(), R2.scratchReg());
6122 } else {
6123 masm.switchToBaselineFrameRealm(R2.scratchReg());
6125 restoreInterpreterPCReg();
6126 frame.popn(3);
6127 frame.push(R0);
6128 return true;
6131 template <typename Handler>
6132 bool BaselineCodeGen<Handler>::emit_CheckResumeKind() {
6133 // Load resumeKind in R1, generator in R0.
6134 frame.popRegsAndSync(2);
6136 #ifdef DEBUG
6137 Label ok;
6138 masm.branchTestInt32(Assembler::Equal, R1, &ok);
6139 masm.assumeUnreachable("Expected int32 resumeKind");
6140 masm.bind(&ok);
6141 #endif
6143 // If resumeKind is 'next' we don't have to do anything.
6144 Label done;
6145 masm.unboxInt32(R1, R1.scratchReg());
6146 masm.branch32(Assembler::Equal, R1.scratchReg(),
6147 Imm32(int32_t(GeneratorResumeKind::Next)), &done);
6149 prepareVMCall();
6151 pushArg(R1.scratchReg()); // resumeKind
6153 masm.loadValue(frame.addressOfStackValue(-1), R2);
6154 pushArg(R2); // arg
6156 masm.unboxObject(R0, R0.scratchReg());
6157 pushArg(R0.scratchReg()); // genObj
6159 masm.loadBaselineFramePtr(FramePointer, R2.scratchReg());
6160 pushArg(R2.scratchReg()); // frame
6162 using Fn = bool (*)(JSContext*, BaselineFrame*,
6163 Handle<AbstractGeneratorObject*>, HandleValue, int32_t);
6164 if (!callVM<Fn, jit::GeneratorThrowOrReturn>()) {
6165 return false;
6168 masm.bind(&done);
6169 return true;
6172 template <>
6173 bool BaselineCompilerCodeGen::emit_ResumeKind() {
6174 GeneratorResumeKind resumeKind = ResumeKindFromPC(handler.pc());
6175 frame.push(Int32Value(int32_t(resumeKind)));
6176 return true;
6179 template <>
6180 bool BaselineInterpreterCodeGen::emit_ResumeKind() {
6181 LoadUint8Operand(masm, R0.scratchReg());
6182 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
6183 frame.push(R0);
6184 return true;
6187 template <typename Handler>
6188 bool BaselineCodeGen<Handler>::emit_DebugCheckSelfHosted() {
6189 #ifdef DEBUG
6190 frame.syncStack(0);
6192 masm.loadValue(frame.addressOfStackValue(-1), R0);
6194 prepareVMCall();
6195 pushArg(R0);
6197 using Fn = bool (*)(JSContext*, HandleValue);
6198 if (!callVM<Fn, js::Debug_CheckSelfHosted>()) {
6199 return false;
6201 #endif
6202 return true;
6205 template <typename Handler>
6206 bool BaselineCodeGen<Handler>::emit_IsConstructing() {
6207 frame.push(MagicValue(JS_IS_CONSTRUCTING));
6208 return true;
6211 template <>
6212 bool BaselineCompilerCodeGen::emit_JumpTarget() {
6213 MaybeIncrementCodeCoverageCounter(masm, handler.script(), handler.pc());
6214 return true;
6217 template <>
6218 bool BaselineInterpreterCodeGen::emit_JumpTarget() {
6219 Register scratch1 = R0.scratchReg();
6220 Register scratch2 = R1.scratchReg();
6222 Label skipCoverage;
6223 CodeOffset toggleOffset = masm.toggledJump(&skipCoverage);
6224 masm.call(handler.codeCoverageAtPCLabel());
6225 masm.bind(&skipCoverage);
6226 if (!handler.codeCoverageOffsets().append(toggleOffset.offset())) {
6227 return false;
6230 // Load icIndex in scratch1.
6231 LoadInt32Operand(masm, scratch1);
6233 // Compute ICEntry* and store to frame->interpreterICEntry.
6234 masm.loadPtr(frame.addressOfICScript(), scratch2);
6235 static_assert(sizeof(ICEntry) == sizeof(uintptr_t));
6236 masm.computeEffectiveAddress(BaseIndex(scratch2, scratch1, ScalePointer,
6237 ICScript::offsetOfICEntries()),
6238 scratch2);
6239 masm.storePtr(scratch2, frame.addressOfInterpreterICEntry());
6240 return true;
6243 template <typename Handler>
6244 bool BaselineCodeGen<Handler>::emit_CheckClassHeritage() {
6245 frame.syncStack(0);
6247 // Leave the heritage value on the stack.
6248 masm.loadValue(frame.addressOfStackValue(-1), R0);
6250 prepareVMCall();
6251 pushArg(R0);
6253 using Fn = bool (*)(JSContext*, HandleValue);
6254 return callVM<Fn, js::CheckClassHeritageOperation>();
6257 template <typename Handler>
6258 bool BaselineCodeGen<Handler>::emit_InitHomeObject() {
6259 // Load HomeObject in R0.
6260 frame.popRegsAndSync(1);
6262 // Load function off stack
6263 Register func = R2.scratchReg();
6264 masm.unboxObject(frame.addressOfStackValue(-1), func);
6266 masm.assertFunctionIsExtended(func);
6268 // Set HOMEOBJECT_SLOT
6269 Register temp = R1.scratchReg();
6270 Address addr(func, FunctionExtended::offsetOfMethodHomeObjectSlot());
6271 masm.guardedCallPreBarrierAnyZone(addr, MIRType::Value, temp);
6272 masm.storeValue(R0, addr);
6274 Label skipBarrier;
6275 masm.branchPtrInNurseryChunk(Assembler::Equal, func, temp, &skipBarrier);
6276 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
6277 masm.call(&postBarrierSlot_);
6278 masm.bind(&skipBarrier);
6280 return true;
6283 template <>
6284 bool BaselineCompilerCodeGen::emit_BuiltinObject() {
6285 // Built-in objects are constants for a given global.
6286 auto kind = BuiltinObjectKind(GET_UINT8(handler.pc()));
6287 JSObject* builtin = BuiltinObjectOperation(cx, kind);
6288 if (!builtin) {
6289 return false;
6291 frame.push(ObjectValue(*builtin));
6292 return true;
6295 template <>
6296 bool BaselineInterpreterCodeGen::emit_BuiltinObject() {
6297 prepareVMCall();
6299 pushUint8BytecodeOperandArg(R0.scratchReg());
6301 using Fn = JSObject* (*)(JSContext*, BuiltinObjectKind);
6302 if (!callVM<Fn, BuiltinObjectOperation>()) {
6303 return false;
6306 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6307 frame.push(R0);
6308 return true;
6311 template <typename Handler>
6312 bool BaselineCodeGen<Handler>::emit_ObjWithProto() {
6313 frame.syncStack(0);
6315 // Leave the proto value on the stack for the decompiler
6316 masm.loadValue(frame.addressOfStackValue(-1), R0);
6318 prepareVMCall();
6319 pushArg(R0);
6321 using Fn = PlainObject* (*)(JSContext*, HandleValue);
6322 if (!callVM<Fn, js::ObjectWithProtoOperation>()) {
6323 return false;
6326 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6327 frame.pop();
6328 frame.push(R0);
6329 return true;
6332 template <typename Handler>
6333 bool BaselineCodeGen<Handler>::emit_FunWithProto() {
6334 frame.popRegsAndSync(1);
6336 masm.unboxObject(R0, R0.scratchReg());
6337 masm.loadPtr(frame.addressOfEnvironmentChain(), R1.scratchReg());
6339 prepareVMCall();
6340 pushArg(R0.scratchReg());
6341 pushArg(R1.scratchReg());
6342 pushScriptGCThingArg(ScriptGCThingType::Function, R0.scratchReg(),
6343 R1.scratchReg());
6345 using Fn =
6346 JSObject* (*)(JSContext*, HandleFunction, HandleObject, HandleObject);
6347 if (!callVM<Fn, js::FunWithProtoOperation>()) {
6348 return false;
6351 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6352 frame.push(R0);
6353 return true;
6356 template <>
6357 bool BaselineCompilerCodeGen::emit_ImportMeta() {
6358 // Note: this is like the interpreter implementation, but optimized a bit by
6359 // calling GetModuleObjectForScript at compile-time.
6361 Rooted<ModuleObject*> module(cx, GetModuleObjectForScript(handler.script()));
6363 frame.syncStack(0);
6365 prepareVMCall();
6366 pushArg(ImmGCPtr(module));
6368 using Fn = JSObject* (*)(JSContext*, HandleObject);
6369 if (!callVM<Fn, js::GetOrCreateModuleMetaObject>()) {
6370 return false;
6373 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6374 frame.push(R0);
6375 return true;
6378 template <>
6379 bool BaselineInterpreterCodeGen::emit_ImportMeta() {
6380 prepareVMCall();
6382 pushScriptArg();
6384 using Fn = JSObject* (*)(JSContext*, HandleScript);
6385 if (!callVM<Fn, ImportMetaOperation>()) {
6386 return false;
6389 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6390 frame.push(R0);
6391 return true;
6394 template <typename Handler>
6395 bool BaselineCodeGen<Handler>::emit_DynamicImport() {
6396 // Put specifier into R0 and object value into R1
6397 frame.popRegsAndSync(2);
6399 prepareVMCall();
6400 pushArg(R1);
6401 pushArg(R0);
6402 pushScriptArg();
6404 using Fn = JSObject* (*)(JSContext*, HandleScript, HandleValue, HandleValue);
6405 if (!callVM<Fn, js::StartDynamicModuleImport>()) {
6406 return false;
6409 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6410 frame.push(R0);
6411 return true;
6414 template <>
6415 bool BaselineCompilerCodeGen::emit_ForceInterpreter() {
6416 // Caller is responsible for checking script->hasForceInterpreterOp().
6417 MOZ_CRASH("JSOp::ForceInterpreter in baseline");
6420 template <>
6421 bool BaselineInterpreterCodeGen::emit_ForceInterpreter() {
6422 masm.assumeUnreachable("JSOp::ForceInterpreter");
6423 return true;
6426 template <typename Handler>
6427 bool BaselineCodeGen<Handler>::emitPrologue() {
6428 AutoCreatedBy acb(masm, "BaselineCodeGen<Handler>::emitPrologue");
6430 #ifdef JS_USE_LINK_REGISTER
6431 // Push link register from generateEnterJIT()'s BLR.
6432 masm.pushReturnAddress();
6433 #endif
6435 masm.push(FramePointer);
6436 masm.moveStackPtrTo(FramePointer);
6438 masm.checkStackAlignment();
6440 emitProfilerEnterFrame();
6442 masm.subFromStackPtr(Imm32(BaselineFrame::Size()));
6444 // Initialize BaselineFrame. Also handles env chain pre-initialization (in
6445 // case GC gets run during stack check). For global and eval scripts, the env
6446 // chain is in R1. For function scripts, the env chain is in the callee.
6447 emitInitFrameFields(R1.scratchReg());
6449 // When compiling with Debugger instrumentation, set the debuggeeness of
6450 // the frame before any operation that can call into the VM.
6451 if (!emitIsDebuggeeCheck()) {
6452 return false;
6455 // Initialize the env chain before any operation that may call into the VM and
6456 // trigger a GC.
6457 if (!initEnvironmentChain()) {
6458 return false;
6461 // Check for overrecursion before initializing locals.
6462 if (!emitStackCheck()) {
6463 return false;
6466 emitInitializeLocals();
6468 // Ion prologue bailouts will enter here in the Baseline Interpreter.
6469 masm.bind(&bailoutPrologue_);
6471 frame.assertSyncedStack();
6473 if (JSScript* script = handler.maybeScript()) {
6474 masm.debugAssertContextRealm(script->realm(), R1.scratchReg());
6477 if (!emitDebugPrologue()) {
6478 return false;
6481 if (!emitHandleCodeCoverageAtPrologue()) {
6482 return false;
6485 if (!emitWarmUpCounterIncrement()) {
6486 return false;
6489 warmUpCheckPrologueOffset_ = CodeOffset(masm.currentOffset());
6491 return true;
6494 template <typename Handler>
6495 bool BaselineCodeGen<Handler>::emitEpilogue() {
6496 AutoCreatedBy acb(masm, "BaselineCodeGen<Handler>::emitEpilogue");
6498 masm.bind(&return_);
6500 if (!handler.shouldEmitDebugEpilogueAtReturnOp()) {
6501 if (!emitDebugEpilogue()) {
6502 return false;
6506 emitProfilerExitFrame();
6508 masm.moveToStackPtr(FramePointer);
6509 masm.pop(FramePointer);
6511 masm.ret();
6512 return true;
6515 MethodStatus BaselineCompiler::emitBody() {
6516 AutoCreatedBy acb(masm, "BaselineCompiler::emitBody");
6518 JSScript* script = handler.script();
6519 MOZ_ASSERT(handler.pc() == script->code());
6521 mozilla::DebugOnly<jsbytecode*> prevpc = handler.pc();
6523 while (true) {
6524 JSOp op = JSOp(*handler.pc());
6525 JitSpew(JitSpew_BaselineOp, "Compiling op @ %d: %s",
6526 int(script->pcToOffset(handler.pc())), CodeName(op));
6528 BytecodeInfo* info = handler.analysis().maybeInfo(handler.pc());
6530 // Skip unreachable ops.
6531 if (!info) {
6532 // Test if last instructions and stop emitting in that case.
6533 handler.moveToNextPC();
6534 if (handler.pc() >= script->codeEnd()) {
6535 break;
6538 prevpc = handler.pc();
6539 continue;
6542 if (info->jumpTarget) {
6543 // Fully sync the stack if there are incoming jumps.
6544 frame.syncStack(0);
6545 frame.setStackDepth(info->stackDepth);
6546 masm.bind(handler.labelOf(handler.pc()));
6547 } else if (MOZ_UNLIKELY(compileDebugInstrumentation())) {
6548 // Also fully sync the stack if the debugger is enabled.
6549 frame.syncStack(0);
6550 } else {
6551 // At the beginning of any op, at most the top 2 stack-values are
6552 // unsynced.
6553 if (frame.stackDepth() > 2) {
6554 frame.syncStack(2);
6558 frame.assertValidState(*info);
6560 // If the script has a resume offset for this pc we need to keep track of
6561 // the native code offset.
6562 if (info->hasResumeOffset) {
6563 frame.assertSyncedStack();
6564 uint32_t pcOffset = script->pcToOffset(handler.pc());
6565 uint32_t nativeOffset = masm.currentOffset();
6566 if (!resumeOffsetEntries_.emplaceBack(pcOffset, nativeOffset)) {
6567 ReportOutOfMemory(cx);
6568 return Method_Error;
6572 // Emit traps for breakpoints and step mode.
6573 if (MOZ_UNLIKELY(compileDebugInstrumentation()) && !emitDebugTrap()) {
6574 return Method_Error;
6577 perfSpewer_.recordInstruction(cx, masm, handler.pc(), frame);
6579 #define EMIT_OP(OP, ...) \
6580 case JSOp::OP: { \
6581 AutoCreatedBy acb(masm, "op=" #OP); \
6582 if (MOZ_UNLIKELY(!this->emit_##OP())) return Method_Error; \
6583 } break;
6585 switch (op) {
6586 FOR_EACH_OPCODE(EMIT_OP)
6587 default:
6588 MOZ_CRASH("Unexpected op");
6591 #undef EMIT_OP
6593 MOZ_ASSERT(masm.framePushed() == 0);
6595 // Test if last instructions and stop emitting in that case.
6596 handler.moveToNextPC();
6597 if (handler.pc() >= script->codeEnd()) {
6598 break;
6601 #ifdef DEBUG
6602 prevpc = handler.pc();
6603 #endif
6606 MOZ_ASSERT(JSOp(*prevpc) == JSOp::RetRval || JSOp(*prevpc) == JSOp::Return);
6607 return Method_Compiled;
6610 bool BaselineInterpreterGenerator::emitDebugTrap() {
6611 CodeOffset offset = masm.nopPatchableToCall();
6612 if (!debugTrapOffsets_.append(offset.offset())) {
6613 ReportOutOfMemory(cx);
6614 return false;
6617 return true;
6620 // Register holding the bytecode pc during dispatch. This exists so the debug
6621 // trap handler can reload the pc into this register when it's done.
6622 static constexpr Register InterpreterPCRegAtDispatch =
6623 HasInterpreterPCReg() ? InterpreterPCReg : R0.scratchReg();
6625 bool BaselineInterpreterGenerator::emitInterpreterLoop() {
6626 AutoCreatedBy acb(masm, "BaselineInterpreterGenerator::emitInterpreterLoop");
6628 Register scratch1 = R0.scratchReg();
6629 Register scratch2 = R1.scratchReg();
6631 // Entry point for interpreting a bytecode op. No registers are live except
6632 // for InterpreterPCReg.
6633 masm.bind(handler.interpretOpWithPCRegLabel());
6635 // Emit a patchable call for debugger breakpoints/stepping.
6636 if (!emitDebugTrap()) {
6637 return false;
6639 Label interpretOpAfterDebugTrap;
6640 masm.bind(&interpretOpAfterDebugTrap);
6642 // Load pc, bytecode op.
6643 Register pcReg = LoadBytecodePC(masm, scratch1);
6644 masm.load8ZeroExtend(Address(pcReg, 0), scratch1);
6646 // Jump to table[op].
6648 CodeOffset label = masm.moveNearAddressWithPatch(scratch2);
6649 if (!tableLabels_.append(label)) {
6650 return false;
6652 BaseIndex pointer(scratch2, scratch1, ScalePointer);
6653 masm.branchToComputedAddress(pointer);
6656 // At the end of each op, emit code to bump the pc and jump to the
6657 // next op (this is also known as a threaded interpreter).
6658 auto opEpilogue = [&](JSOp op, size_t opLength) -> bool {
6659 MOZ_ASSERT(masm.framePushed() == 0);
6661 if (!BytecodeFallsThrough(op)) {
6662 // Nothing to do.
6663 masm.assumeUnreachable("unexpected fall through");
6664 return true;
6667 // Bump frame->interpreterICEntry if needed.
6668 if (BytecodeOpHasIC(op)) {
6669 frame.bumpInterpreterICEntry();
6672 // Bump bytecode PC.
6673 if (HasInterpreterPCReg()) {
6674 MOZ_ASSERT(InterpreterPCRegAtDispatch == InterpreterPCReg);
6675 masm.addPtr(Imm32(opLength), InterpreterPCReg);
6676 } else {
6677 MOZ_ASSERT(InterpreterPCRegAtDispatch == scratch1);
6678 masm.loadPtr(frame.addressOfInterpreterPC(), InterpreterPCRegAtDispatch);
6679 masm.addPtr(Imm32(opLength), InterpreterPCRegAtDispatch);
6680 masm.storePtr(InterpreterPCRegAtDispatch, frame.addressOfInterpreterPC());
6683 if (!emitDebugTrap()) {
6684 return false;
6687 // Load the opcode, jump to table[op].
6688 masm.load8ZeroExtend(Address(InterpreterPCRegAtDispatch, 0), scratch1);
6689 CodeOffset label = masm.moveNearAddressWithPatch(scratch2);
6690 if (!tableLabels_.append(label)) {
6691 return false;
6693 BaseIndex pointer(scratch2, scratch1, ScalePointer);
6694 masm.branchToComputedAddress(pointer);
6695 return true;
6698 // Emit code for each bytecode op.
6699 Label opLabels[JSOP_LIMIT];
6700 #define EMIT_OP(OP, ...) \
6702 AutoCreatedBy acb(masm, "op=" #OP); \
6703 perfSpewer_.recordOffset(masm, JSOp::OP); \
6704 masm.bind(&opLabels[uint8_t(JSOp::OP)]); \
6705 handler.setCurrentOp(JSOp::OP); \
6706 if (!this->emit_##OP()) { \
6707 return false; \
6709 if (!opEpilogue(JSOp::OP, JSOpLength_##OP)) { \
6710 return false; \
6712 handler.resetCurrentOp(); \
6714 FOR_EACH_OPCODE(EMIT_OP)
6715 #undef EMIT_OP
6717 // External entry point to start interpreting bytecode ops. This is used for
6718 // things like exception handling and OSR. DebugModeOSR patches JIT frames to
6719 // return here from the DebugTrapHandler.
6720 masm.bind(handler.interpretOpLabel());
6721 interpretOpOffset_ = masm.currentOffset();
6722 restoreInterpreterPCReg();
6723 masm.jump(handler.interpretOpWithPCRegLabel());
6725 // Second external entry point: this skips the debug trap for the first op
6726 // and is used by OSR.
6727 interpretOpNoDebugTrapOffset_ = masm.currentOffset();
6728 restoreInterpreterPCReg();
6729 masm.jump(&interpretOpAfterDebugTrap);
6731 // External entry point for Ion prologue bailouts.
6732 bailoutPrologueOffset_ = CodeOffset(masm.currentOffset());
6733 restoreInterpreterPCReg();
6734 masm.jump(&bailoutPrologue_);
6736 // Emit debug trap handler code (target of patchable call instructions). This
6737 // is just a tail call to the debug trap handler trampoline code.
6739 JitRuntime* jrt = cx->runtime()->jitRuntime();
6740 JitCode* handlerCode =
6741 jrt->debugTrapHandler(cx, DebugTrapHandlerKind::Interpreter);
6742 if (!handlerCode) {
6743 return false;
6746 debugTrapHandlerOffset_ = masm.currentOffset();
6747 masm.jump(handlerCode);
6750 // Emit the table.
6751 masm.haltingAlign(sizeof(void*));
6753 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64)
6754 size_t numInstructions = JSOP_LIMIT * (sizeof(uintptr_t) / sizeof(uint32_t));
6755 AutoForbidPoolsAndNops afp(&masm, numInstructions);
6756 #endif
6758 tableOffset_ = masm.currentOffset();
6760 for (size_t i = 0; i < JSOP_LIMIT; i++) {
6761 const Label& opLabel = opLabels[i];
6762 MOZ_ASSERT(opLabel.bound());
6763 CodeLabel cl;
6764 masm.writeCodePointer(&cl);
6765 cl.target()->bind(opLabel.offset());
6766 masm.addCodeLabel(cl);
6769 return true;
6772 void BaselineInterpreterGenerator::emitOutOfLineCodeCoverageInstrumentation() {
6773 AutoCreatedBy acb(masm,
6774 "BaselineInterpreterGenerator::"
6775 "emitOutOfLineCodeCoverageInstrumentation");
6777 masm.bind(handler.codeCoverageAtPrologueLabel());
6778 #ifdef JS_USE_LINK_REGISTER
6779 masm.pushReturnAddress();
6780 #endif
6782 saveInterpreterPCReg();
6784 using Fn1 = void (*)(BaselineFrame* frame);
6785 masm.setupUnalignedABICall(R0.scratchReg());
6786 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
6787 masm.passABIArg(R0.scratchReg());
6788 masm.callWithABI<Fn1, HandleCodeCoverageAtPrologue>();
6790 restoreInterpreterPCReg();
6791 masm.ret();
6793 masm.bind(handler.codeCoverageAtPCLabel());
6794 #ifdef JS_USE_LINK_REGISTER
6795 masm.pushReturnAddress();
6796 #endif
6798 saveInterpreterPCReg();
6800 using Fn2 = void (*)(BaselineFrame* frame, jsbytecode* pc);
6801 masm.setupUnalignedABICall(R0.scratchReg());
6802 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
6803 masm.passABIArg(R0.scratchReg());
6804 Register pcReg = LoadBytecodePC(masm, R2.scratchReg());
6805 masm.passABIArg(pcReg);
6806 masm.callWithABI<Fn2, HandleCodeCoverageAtPC>();
6808 restoreInterpreterPCReg();
6809 masm.ret();
6812 bool BaselineInterpreterGenerator::generate(BaselineInterpreter& interpreter) {
6813 AutoCreatedBy acb(masm, "BaselineInterpreterGenerator::generate");
6815 perfSpewer_.recordOffset(masm, "Prologue");
6816 if (!emitPrologue()) {
6817 return false;
6820 perfSpewer_.recordOffset(masm, "InterpreterLoop");
6821 if (!emitInterpreterLoop()) {
6822 return false;
6825 perfSpewer_.recordOffset(masm, "Epilogue");
6826 if (!emitEpilogue()) {
6827 return false;
6830 perfSpewer_.recordOffset(masm, "OOLPostBarrierSlot");
6831 if (!emitOutOfLinePostBarrierSlot()) {
6832 return false;
6835 perfSpewer_.recordOffset(masm, "OOLCodeCoverageInstrumentation");
6836 emitOutOfLineCodeCoverageInstrumentation();
6839 AutoCreatedBy acb(masm, "everything_else");
6840 Linker linker(masm);
6841 if (masm.oom()) {
6842 ReportOutOfMemory(cx);
6843 return false;
6846 JitCode* code = linker.newCode(cx, CodeKind::Other);
6847 if (!code) {
6848 return false;
6851 // Register BaselineInterpreter code with the profiler's JitCode table.
6853 auto entry = MakeJitcodeGlobalEntry<BaselineInterpreterEntry>(
6854 cx, code, code->raw(), code->rawEnd());
6855 if (!entry) {
6856 return false;
6859 JitcodeGlobalTable* globalTable =
6860 cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
6861 if (!globalTable->addEntry(std::move(entry))) {
6862 ReportOutOfMemory(cx);
6863 return false;
6866 code->setHasBytecodeMap();
6869 // Patch loads now that we know the tableswitch base address.
6870 CodeLocationLabel tableLoc(code, CodeOffset(tableOffset_));
6871 for (CodeOffset off : tableLabels_) {
6872 MacroAssembler::patchNearAddressMove(CodeLocationLabel(code, off),
6873 tableLoc);
6876 perfSpewer_.saveProfile(code);
6878 #ifdef MOZ_VTUNE
6879 vtune::MarkStub(code, "BaselineInterpreter");
6880 #endif
6882 interpreter.init(
6883 code, interpretOpOffset_, interpretOpNoDebugTrapOffset_,
6884 bailoutPrologueOffset_.offset(),
6885 profilerEnterFrameToggleOffset_.offset(),
6886 profilerExitFrameToggleOffset_.offset(), debugTrapHandlerOffset_,
6887 std::move(handler.debugInstrumentationOffsets()),
6888 std::move(debugTrapOffsets_), std::move(handler.codeCoverageOffsets()),
6889 std::move(handler.icReturnOffsets()), handler.callVMOffsets());
6892 if (cx->runtime()->geckoProfiler().enabled()) {
6893 interpreter.toggleProfilerInstrumentation(true);
6896 if (coverage::IsLCovEnabled()) {
6897 interpreter.toggleCodeCoverageInstrumentationUnchecked(true);
6900 return true;
6903 JitCode* JitRuntime::generateDebugTrapHandler(JSContext* cx,
6904 DebugTrapHandlerKind kind) {
6905 TempAllocator temp(&cx->tempLifoAlloc());
6906 StackMacroAssembler masm(cx, temp);
6907 AutoCreatedBy acb(masm, "JitRuntime::generateDebugTrapHandler");
6909 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
6910 MOZ_ASSERT(!regs.has(FramePointer));
6911 regs.takeUnchecked(ICStubReg);
6912 if (HasInterpreterPCReg()) {
6913 regs.takeUnchecked(InterpreterPCReg);
6915 #ifdef JS_CODEGEN_ARM
6916 regs.takeUnchecked(BaselineSecondScratchReg);
6917 AutoNonDefaultSecondScratchRegister andssr(masm, BaselineSecondScratchReg);
6918 #endif
6919 Register scratch1 = regs.takeAny();
6920 Register scratch2 = regs.takeAny();
6921 Register scratch3 = regs.takeAny();
6923 if (kind == DebugTrapHandlerKind::Interpreter) {
6924 // The interpreter calls this for every script when debugging, so check if
6925 // the script has any breakpoints or is in step mode before calling into
6926 // C++.
6927 Label hasDebugScript;
6928 Address scriptAddr(FramePointer,
6929 BaselineFrame::reverseOffsetOfInterpreterScript());
6930 masm.loadPtr(scriptAddr, scratch1);
6931 masm.branchTest32(Assembler::NonZero,
6932 Address(scratch1, JSScript::offsetOfMutableFlags()),
6933 Imm32(int32_t(JSScript::MutableFlags::HasDebugScript)),
6934 &hasDebugScript);
6935 masm.abiret();
6936 masm.bind(&hasDebugScript);
6938 if (HasInterpreterPCReg()) {
6939 // Update frame's bytecode pc because the debugger depends on it.
6940 Address pcAddr(FramePointer,
6941 BaselineFrame::reverseOffsetOfInterpreterPC());
6942 masm.storePtr(InterpreterPCReg, pcAddr);
6946 // Load the return address in scratch1.
6947 masm.loadAbiReturnAddress(scratch1);
6949 // Load BaselineFrame pointer in scratch2.
6950 masm.loadBaselineFramePtr(FramePointer, scratch2);
6952 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
6953 // the stub frame has a nullptr ICStub pointer, since this pointer is marked
6954 // during GC.
6955 masm.movePtr(ImmPtr(nullptr), ICStubReg);
6956 EmitBaselineEnterStubFrame(masm, scratch3);
6958 using Fn = bool (*)(JSContext*, BaselineFrame*, const uint8_t*);
6959 VMFunctionId id = VMFunctionToId<Fn, jit::HandleDebugTrap>::id;
6960 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
6962 masm.push(scratch1);
6963 masm.push(scratch2);
6964 EmitBaselineCallVM(code, masm);
6966 EmitBaselineLeaveStubFrame(masm);
6968 if (kind == DebugTrapHandlerKind::Interpreter) {
6969 // We have to reload the bytecode pc register.
6970 Address pcAddr(FramePointer, BaselineFrame::reverseOffsetOfInterpreterPC());
6971 masm.loadPtr(pcAddr, InterpreterPCRegAtDispatch);
6973 masm.abiret();
6975 Linker linker(masm);
6976 JitCode* handlerCode = linker.newCode(cx, CodeKind::Other);
6977 if (!handlerCode) {
6978 return nullptr;
6981 CollectPerfSpewerJitCodeProfile(handlerCode, "DebugTrapHandler");
6983 #ifdef MOZ_VTUNE
6984 vtune::MarkStub(handlerCode, "DebugTrapHandler");
6985 #endif
6987 return handlerCode;
6990 } // namespace jit
6991 } // namespace js