Bug 1834537 - Part 1: Simplify JIT nursery allocation r=jandem
[gecko.git] / js / src / jit / BaselineCodeGen.cpp
blob4b5d29350ef2badab792cd37ccb8f2a9b397c939
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/BaselineCodeGen.h"
9 #include "mozilla/Casting.h"
11 #include "gc/GC.h"
12 #include "jit/BaselineIC.h"
13 #include "jit/BaselineJIT.h"
14 #include "jit/CacheIRCompiler.h"
15 #include "jit/CacheIRGenerator.h"
16 #include "jit/CalleeToken.h"
17 #include "jit/FixedList.h"
18 #include "jit/IonOptimizationLevels.h"
19 #include "jit/JitcodeMap.h"
20 #include "jit/JitFrames.h"
21 #include "jit/JitRuntime.h"
22 #include "jit/JitSpewer.h"
23 #include "jit/Linker.h"
24 #include "jit/PerfSpewer.h"
25 #include "jit/SharedICHelpers.h"
26 #include "jit/TemplateObject.h"
27 #include "jit/TrialInlining.h"
28 #include "jit/VMFunctions.h"
29 #include "js/friend/ErrorMessages.h" // JSMSG_*
30 #include "js/UniquePtr.h"
31 #include "vm/AsyncFunction.h"
32 #include "vm/AsyncIteration.h"
33 #include "vm/BuiltinObjectKind.h"
34 #include "vm/EnvironmentObject.h"
35 #include "vm/FunctionFlags.h" // js::FunctionFlags
36 #include "vm/Interpreter.h"
37 #include "vm/JSFunction.h"
38 #include "vm/Time.h"
39 #ifdef MOZ_VTUNE
40 # include "vtune/VTuneWrapper.h"
41 #endif
43 #include "debugger/DebugAPI-inl.h"
44 #include "jit/BaselineFrameInfo-inl.h"
45 #include "jit/JitHints-inl.h"
46 #include "jit/JitScript-inl.h"
47 #include "jit/MacroAssembler-inl.h"
48 #include "jit/SharedICHelpers-inl.h"
49 #include "jit/TemplateObject-inl.h"
50 #include "jit/VMFunctionList-inl.h"
51 #include "vm/Interpreter-inl.h"
52 #include "vm/JSScript-inl.h"
54 using namespace js;
55 using namespace js::jit;
57 using JS::TraceKind;
59 using mozilla::AssertedCast;
60 using mozilla::Maybe;
62 namespace js {
64 class PlainObject;
66 namespace jit {
68 BaselineCompilerHandler::BaselineCompilerHandler(JSContext* cx,
69 MacroAssembler& masm,
70 TempAllocator& alloc,
71 JSScript* script)
72 : frame_(script, masm),
73 alloc_(alloc),
74 analysis_(alloc, script),
75 #ifdef DEBUG
76 masm_(masm),
77 #endif
78 script_(script),
79 pc_(script->code()),
80 icEntryIndex_(0),
81 compileDebugInstrumentation_(script->isDebuggee()),
82 ionCompileable_(IsIonEnabled(cx) && CanIonCompileScript(cx, script)) {
85 BaselineInterpreterHandler::BaselineInterpreterHandler(JSContext* cx,
86 MacroAssembler& masm)
87 : frame_(masm) {}
89 template <typename Handler>
90 template <typename... HandlerArgs>
91 BaselineCodeGen<Handler>::BaselineCodeGen(JSContext* cx, TempAllocator& alloc,
92 HandlerArgs&&... args)
93 : handler(cx, masm, std::forward<HandlerArgs>(args)...),
94 cx(cx),
95 masm(cx, alloc),
96 frame(handler.frame()) {}
98 BaselineCompiler::BaselineCompiler(JSContext* cx, TempAllocator& alloc,
99 JSScript* script)
100 : BaselineCodeGen(cx, alloc, /* HandlerArgs = */ alloc, script),
101 profilerPushToggleOffset_() {
102 #ifdef JS_CODEGEN_NONE
103 MOZ_CRASH();
104 #endif
107 BaselineInterpreterGenerator::BaselineInterpreterGenerator(JSContext* cx,
108 TempAllocator& alloc)
109 : BaselineCodeGen(cx, alloc /* no handlerArgs */) {}
111 bool BaselineCompilerHandler::init(JSContext* cx) {
112 if (!analysis_.init(alloc_)) {
113 return false;
116 uint32_t len = script_->length();
118 if (!labels_.init(alloc_, len)) {
119 return false;
122 for (size_t i = 0; i < len; i++) {
123 new (&labels_[i]) Label();
126 if (!frame_.init(alloc_)) {
127 return false;
130 return true;
133 bool BaselineCompiler::init() {
134 if (!handler.init(cx)) {
135 return false;
138 return true;
141 bool BaselineCompilerHandler::recordCallRetAddr(JSContext* cx,
142 RetAddrEntry::Kind kind,
143 uint32_t retOffset) {
144 uint32_t pcOffset = script_->pcToOffset(pc_);
146 // Entries must be sorted by pcOffset for binary search to work.
147 // See BaselineScript::retAddrEntryFromPCOffset.
148 MOZ_ASSERT_IF(!retAddrEntries_.empty(),
149 retAddrEntries_.back().pcOffset() <= pcOffset);
151 // Similarly, entries must be sorted by return offset and this offset must be
152 // unique. See BaselineScript::retAddrEntryFromReturnOffset.
153 MOZ_ASSERT_IF(!retAddrEntries_.empty() && !masm_.oom(),
154 retAddrEntries_.back().returnOffset().offset() < retOffset);
156 if (!retAddrEntries_.emplaceBack(pcOffset, kind, CodeOffset(retOffset))) {
157 ReportOutOfMemory(cx);
158 return false;
161 return true;
164 bool BaselineInterpreterHandler::recordCallRetAddr(JSContext* cx,
165 RetAddrEntry::Kind kind,
166 uint32_t retOffset) {
167 switch (kind) {
168 case RetAddrEntry::Kind::DebugPrologue:
169 MOZ_ASSERT(callVMOffsets_.debugPrologueOffset == 0,
170 "expected single DebugPrologue call");
171 callVMOffsets_.debugPrologueOffset = retOffset;
172 break;
173 case RetAddrEntry::Kind::DebugEpilogue:
174 MOZ_ASSERT(callVMOffsets_.debugEpilogueOffset == 0,
175 "expected single DebugEpilogue call");
176 callVMOffsets_.debugEpilogueOffset = retOffset;
177 break;
178 case RetAddrEntry::Kind::DebugAfterYield:
179 MOZ_ASSERT(callVMOffsets_.debugAfterYieldOffset == 0,
180 "expected single DebugAfterYield call");
181 callVMOffsets_.debugAfterYieldOffset = retOffset;
182 break;
183 default:
184 break;
187 return true;
190 bool BaselineInterpreterHandler::addDebugInstrumentationOffset(
191 JSContext* cx, CodeOffset offset) {
192 if (!debugInstrumentationOffsets_.append(offset.offset())) {
193 ReportOutOfMemory(cx);
194 return false;
196 return true;
199 MethodStatus BaselineCompiler::compile() {
200 AutoCreatedBy acb(masm, "BaselineCompiler::compile");
202 Rooted<JSScript*> script(cx, handler.script());
203 JitSpew(JitSpew_BaselineScripts, "Baseline compiling script %s:%u:%u (%p)",
204 script->filename(), script->lineno(), script->column(), script.get());
206 JitSpew(JitSpew_Codegen, "# Emitting baseline code for script %s:%u:%u",
207 script->filename(), script->lineno(), script->column());
209 AutoIncrementalTimer timer(cx->realm()->timers.baselineCompileTime);
211 AutoKeepJitScripts keepJitScript(cx);
212 if (!script->ensureHasJitScript(cx, keepJitScript)) {
213 return Method_Error;
216 // When code coverage is enabled, we have to create the ScriptCounts if they
217 // do not exist.
218 if (!script->hasScriptCounts() && cx->realm()->collectCoverageForDebug()) {
219 if (!script->initScriptCounts(cx)) {
220 return Method_Error;
224 if (!JitOptions.disableJitHints &&
225 cx->runtime()->jitRuntime()->hasJitHintsMap()) {
226 JitHintsMap* jitHints = cx->runtime()->jitRuntime()->getJitHintsMap();
227 jitHints->setEagerBaselineHint(script);
230 // Suppress GC during compilation.
231 gc::AutoSuppressGC suppressGC(cx);
233 if (!script->jitScript()->ensureHasCachedBaselineJitData(cx, script)) {
234 return Method_Error;
237 MOZ_ASSERT(!script->hasBaselineScript());
239 perfSpewer_.recordOffset(masm, "Prologue");
240 if (!emitPrologue()) {
241 return Method_Error;
244 MethodStatus status = emitBody();
245 if (status != Method_Compiled) {
246 return status;
249 perfSpewer_.recordOffset(masm, "Epilogue");
250 if (!emitEpilogue()) {
251 return Method_Error;
254 perfSpewer_.recordOffset(masm, "OOLPostBarrierSlot");
255 if (!emitOutOfLinePostBarrierSlot()) {
256 return Method_Error;
259 AutoCreatedBy acb2(masm, "exception_tail");
260 Linker linker(masm);
261 if (masm.oom()) {
262 ReportOutOfMemory(cx);
263 return Method_Error;
266 JitCode* code = linker.newCode(cx, CodeKind::Baseline);
267 if (!code) {
268 return Method_Error;
271 UniquePtr<BaselineScript> baselineScript(
272 BaselineScript::New(
273 cx, warmUpCheckPrologueOffset_.offset(),
274 profilerEnterFrameToggleOffset_.offset(),
275 profilerExitFrameToggleOffset_.offset(),
276 handler.retAddrEntries().length(), handler.osrEntries().length(),
277 debugTrapEntries_.length(), script->resumeOffsets().size()),
278 JS::DeletePolicy<BaselineScript>(cx->runtime()));
279 if (!baselineScript) {
280 return Method_Error;
283 baselineScript->setMethod(code);
285 JitSpew(JitSpew_BaselineScripts,
286 "Created BaselineScript %p (raw %p) for %s:%u:%u",
287 (void*)baselineScript.get(), (void*)code->raw(), script->filename(),
288 script->lineno(), script->column());
290 baselineScript->copyRetAddrEntries(handler.retAddrEntries().begin());
291 baselineScript->copyOSREntries(handler.osrEntries().begin());
292 baselineScript->copyDebugTrapEntries(debugTrapEntries_.begin());
294 // If profiler instrumentation is enabled, toggle instrumentation on.
295 if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
296 cx->runtime())) {
297 baselineScript->toggleProfilerInstrumentation(true);
300 // Compute native resume addresses for the script's resume offsets.
301 baselineScript->computeResumeNativeOffsets(script, resumeOffsetEntries_);
303 if (compileDebugInstrumentation()) {
304 baselineScript->setHasDebugInstrumentation();
307 // Always register a native => bytecode mapping entry, since profiler can be
308 // turned on with baseline jitcode on stack, and baseline jitcode cannot be
309 // invalidated.
311 JitSpew(JitSpew_Profiling,
312 "Added JitcodeGlobalEntry for baseline script %s:%u:%u (%p)",
313 script->filename(), script->lineno(), script->column(),
314 baselineScript.get());
316 // Generate profiling string.
317 UniqueChars str = GeckoProfilerRuntime::allocProfileString(cx, script);
318 if (!str) {
319 return Method_Error;
322 auto entry = MakeJitcodeGlobalEntry<BaselineEntry>(
323 cx, code, code->raw(), code->rawEnd(), script, std::move(str));
324 if (!entry) {
325 return Method_Error;
328 JitcodeGlobalTable* globalTable =
329 cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
330 if (!globalTable->addEntry(std::move(entry))) {
331 ReportOutOfMemory(cx);
332 return Method_Error;
335 // Mark the jitcode as having a bytecode map.
336 code->setHasBytecodeMap();
339 script->jitScript()->setBaselineScript(script, baselineScript.release());
341 perfSpewer_.saveProfile(cx, script, code);
343 #ifdef MOZ_VTUNE
344 vtune::MarkScript(code, script, "baseline");
345 #endif
347 return Method_Compiled;
350 // On most platforms we use a dedicated bytecode PC register to avoid many
351 // dependent loads and stores for sequences of simple bytecode ops. This
352 // register must be saved/restored around VM and IC calls.
354 // On 32-bit x86 we don't have enough registers for this (because R0-R2 require
355 // 6 registers) so there we always store the pc on the frame.
356 static constexpr bool HasInterpreterPCReg() {
357 return InterpreterPCReg != InvalidReg;
360 static Register LoadBytecodePC(MacroAssembler& masm, Register scratch) {
361 if (HasInterpreterPCReg()) {
362 return InterpreterPCReg;
365 Address pcAddr(FramePointer, BaselineFrame::reverseOffsetOfInterpreterPC());
366 masm.loadPtr(pcAddr, scratch);
367 return scratch;
370 static void LoadInt8Operand(MacroAssembler& masm, Register dest) {
371 Register pc = LoadBytecodePC(masm, dest);
372 masm.load8SignExtend(Address(pc, sizeof(jsbytecode)), dest);
375 static void LoadUint8Operand(MacroAssembler& masm, Register dest) {
376 Register pc = LoadBytecodePC(masm, dest);
377 masm.load8ZeroExtend(Address(pc, sizeof(jsbytecode)), dest);
380 static void LoadUint16Operand(MacroAssembler& masm, Register dest) {
381 Register pc = LoadBytecodePC(masm, dest);
382 masm.load16ZeroExtend(Address(pc, sizeof(jsbytecode)), dest);
385 static void LoadInt32Operand(MacroAssembler& masm, Register dest) {
386 Register pc = LoadBytecodePC(masm, dest);
387 masm.load32(Address(pc, sizeof(jsbytecode)), dest);
390 static void LoadInt32OperandSignExtendToPtr(MacroAssembler& masm, Register pc,
391 Register dest) {
392 masm.load32SignExtendToPtr(Address(pc, sizeof(jsbytecode)), dest);
395 static void LoadUint24Operand(MacroAssembler& masm, size_t offset,
396 Register dest) {
397 // Load the opcode and operand, then left shift to discard the opcode.
398 Register pc = LoadBytecodePC(masm, dest);
399 masm.load32(Address(pc, offset), dest);
400 masm.rshift32(Imm32(8), dest);
403 static void LoadInlineValueOperand(MacroAssembler& masm, ValueOperand dest) {
404 // Note: the Value might be unaligned but as above we rely on all our
405 // platforms having appropriate support for unaligned accesses (except for
406 // floating point instructions on ARM).
407 Register pc = LoadBytecodePC(masm, dest.scratchReg());
408 masm.loadUnalignedValue(Address(pc, sizeof(jsbytecode)), dest);
411 template <>
412 void BaselineCompilerCodeGen::loadScript(Register dest) {
413 masm.movePtr(ImmGCPtr(handler.script()), dest);
416 template <>
417 void BaselineInterpreterCodeGen::loadScript(Register dest) {
418 masm.loadPtr(frame.addressOfInterpreterScript(), dest);
421 template <>
422 void BaselineCompilerCodeGen::saveInterpreterPCReg() {}
424 template <>
425 void BaselineInterpreterCodeGen::saveInterpreterPCReg() {
426 if (HasInterpreterPCReg()) {
427 masm.storePtr(InterpreterPCReg, frame.addressOfInterpreterPC());
431 template <>
432 void BaselineCompilerCodeGen::restoreInterpreterPCReg() {}
434 template <>
435 void BaselineInterpreterCodeGen::restoreInterpreterPCReg() {
436 if (HasInterpreterPCReg()) {
437 masm.loadPtr(frame.addressOfInterpreterPC(), InterpreterPCReg);
441 template <>
442 void BaselineCompilerCodeGen::emitInitializeLocals() {
443 // Initialize all locals to |undefined|. Lexical bindings are temporal
444 // dead zoned in bytecode.
446 size_t n = frame.nlocals();
447 if (n == 0) {
448 return;
451 // Use R0 to minimize code size. If the number of locals to push is <
452 // LOOP_UNROLL_FACTOR, then the initialization pushes are emitted directly
453 // and inline. Otherwise, they're emitted in a partially unrolled loop.
454 static const size_t LOOP_UNROLL_FACTOR = 4;
455 size_t toPushExtra = n % LOOP_UNROLL_FACTOR;
457 masm.moveValue(UndefinedValue(), R0);
459 // Handle any extra pushes left over by the optional unrolled loop below.
460 for (size_t i = 0; i < toPushExtra; i++) {
461 masm.pushValue(R0);
464 // Partially unrolled loop of pushes.
465 if (n >= LOOP_UNROLL_FACTOR) {
466 size_t toPush = n - toPushExtra;
467 MOZ_ASSERT(toPush % LOOP_UNROLL_FACTOR == 0);
468 MOZ_ASSERT(toPush >= LOOP_UNROLL_FACTOR);
469 masm.move32(Imm32(toPush), R1.scratchReg());
470 // Emit unrolled loop with 4 pushes per iteration.
471 Label pushLoop;
472 masm.bind(&pushLoop);
473 for (size_t i = 0; i < LOOP_UNROLL_FACTOR; i++) {
474 masm.pushValue(R0);
476 masm.branchSub32(Assembler::NonZero, Imm32(LOOP_UNROLL_FACTOR),
477 R1.scratchReg(), &pushLoop);
481 template <>
482 void BaselineInterpreterCodeGen::emitInitializeLocals() {
483 // Push |undefined| for all locals.
485 Register scratch = R0.scratchReg();
486 loadScript(scratch);
487 masm.loadPtr(Address(scratch, JSScript::offsetOfSharedData()), scratch);
488 masm.loadPtr(Address(scratch, SharedImmutableScriptData::offsetOfISD()),
489 scratch);
490 masm.load32(Address(scratch, ImmutableScriptData::offsetOfNfixed()), scratch);
492 Label top, done;
493 masm.branchTest32(Assembler::Zero, scratch, scratch, &done);
494 masm.bind(&top);
496 masm.pushValue(UndefinedValue());
497 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch, &top);
499 masm.bind(&done);
502 // On input:
503 // R2.scratchReg() contains object being written to.
504 // Called with the baseline stack synced, except for R0 which is preserved.
505 // All other registers are usable as scratch.
506 // This calls:
507 // void PostWriteBarrier(JSRuntime* rt, JSObject* obj);
508 template <typename Handler>
509 bool BaselineCodeGen<Handler>::emitOutOfLinePostBarrierSlot() {
510 AutoCreatedBy acb(masm,
511 "BaselineCodeGen<Handler>::emitOutOfLinePostBarrierSlot");
513 if (!postBarrierSlot_.used()) {
514 return true;
517 masm.bind(&postBarrierSlot_);
519 saveInterpreterPCReg();
521 Register objReg = R2.scratchReg();
522 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
523 MOZ_ASSERT(!regs.has(FramePointer));
524 regs.take(R0);
525 regs.take(objReg);
526 Register scratch = regs.takeAny();
527 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64)
528 // On ARM, save the link register before calling. It contains the return
529 // address. The |masm.ret()| later will pop this into |pc| to return.
530 masm.push(lr);
531 #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
532 masm.push(ra);
533 #elif defined(JS_CODEGEN_LOONG64)
534 masm.push(ra);
535 #elif defined(JS_CODEGEN_RISCV64)
536 masm.push(ra);
537 #endif
538 masm.pushValue(R0);
540 using Fn = void (*)(JSRuntime* rt, js::gc::Cell* cell);
541 masm.setupUnalignedABICall(scratch);
542 masm.movePtr(ImmPtr(cx->runtime()), scratch);
543 masm.passABIArg(scratch);
544 masm.passABIArg(objReg);
545 masm.callWithABI<Fn, PostWriteBarrier>();
547 restoreInterpreterPCReg();
549 masm.popValue(R0);
550 masm.ret();
551 return true;
554 // Scan the a cache IR stub's fields and create an allocation site for any that
555 // refer to the catch-all unknown allocation site. This will be the case for
556 // stubs created when running in the interpreter. This happens on transition to
557 // baseline.
558 static bool CreateAllocSitesForCacheIRStub(JSScript* script,
559 ICCacheIRStub* stub) {
560 const CacheIRStubInfo* stubInfo = stub->stubInfo();
561 uint8_t* stubData = stub->stubDataStart();
563 uint32_t field = 0;
564 size_t offset = 0;
565 while (true) {
566 StubField::Type fieldType = stubInfo->fieldType(field);
567 if (fieldType == StubField::Type::Limit) {
568 break;
571 if (fieldType == StubField::Type::AllocSite) {
572 gc::AllocSite* site =
573 stubInfo->getPtrStubField<ICCacheIRStub, gc::AllocSite>(stub, offset);
574 if (site->kind() == gc::AllocSite::Kind::Unknown) {
575 gc::AllocSite* newSite = script->createAllocSite();
576 if (!newSite) {
577 return false;
580 stubInfo->replaceStubRawWord(stubData, offset, uintptr_t(site),
581 uintptr_t(newSite));
585 field++;
586 offset += StubField::sizeInBytes(fieldType);
589 return true;
592 static void CreateAllocSitesForICChain(JSScript* script, uint32_t entryIndex) {
593 JitScript* jitScript = script->jitScript();
594 ICStub* stub = jitScript->icEntry(entryIndex).firstStub();
596 while (!stub->isFallback()) {
597 if (!CreateAllocSitesForCacheIRStub(script, stub->toCacheIRStub())) {
598 // This is an optimization and safe to skip if we hit OOM or per-zone
599 // limit.
600 return;
602 stub = stub->toCacheIRStub()->next();
606 template <>
607 bool BaselineCompilerCodeGen::emitNextIC() {
608 AutoCreatedBy acb(masm, "emitNextIC");
610 // Emit a call to an IC stored in JitScript. Calls to this must match the
611 // ICEntry order in JitScript: first the non-op IC entries for |this| and
612 // formal arguments, then the for-op IC entries for JOF_IC ops.
614 JSScript* script = handler.script();
615 uint32_t pcOffset = script->pcToOffset(handler.pc());
617 // We don't use every ICEntry and we can skip unreachable ops, so we have
618 // to loop until we find an ICEntry for the current pc.
619 const ICFallbackStub* stub;
620 uint32_t entryIndex;
621 do {
622 stub = script->jitScript()->fallbackStub(handler.icEntryIndex());
623 entryIndex = handler.icEntryIndex();
624 handler.moveToNextICEntry();
625 } while (stub->pcOffset() < pcOffset);
627 MOZ_ASSERT(stub->pcOffset() == pcOffset);
628 MOZ_ASSERT(BytecodeOpHasIC(JSOp(*handler.pc())));
630 if (BytecodeOpCanHaveAllocSite(JSOp(*handler.pc()))) {
631 CreateAllocSitesForICChain(script, entryIndex);
634 // Load stub pointer into ICStubReg.
635 masm.loadPtr(frame.addressOfICScript(), ICStubReg);
636 size_t firstStubOffset = ICScript::offsetOfFirstStub(entryIndex);
637 masm.loadPtr(Address(ICStubReg, firstStubOffset), ICStubReg);
639 CodeOffset returnOffset;
640 EmitCallIC(masm, &returnOffset);
642 RetAddrEntry::Kind kind = RetAddrEntry::Kind::IC;
643 if (!handler.retAddrEntries().emplaceBack(pcOffset, kind, returnOffset)) {
644 ReportOutOfMemory(cx);
645 return false;
648 return true;
651 template <>
652 bool BaselineInterpreterCodeGen::emitNextIC() {
653 saveInterpreterPCReg();
654 masm.loadPtr(frame.addressOfInterpreterICEntry(), ICStubReg);
655 masm.loadPtr(Address(ICStubReg, ICEntry::offsetOfFirstStub()), ICStubReg);
656 masm.call(Address(ICStubReg, ICStub::offsetOfStubCode()));
657 uint32_t returnOffset = masm.currentOffset();
658 restoreInterpreterPCReg();
660 // If this is an IC for a bytecode op where Ion may inline scripts, we need to
661 // record the return offset for Ion bailouts.
662 if (handler.currentOp()) {
663 JSOp op = *handler.currentOp();
664 MOZ_ASSERT(BytecodeOpHasIC(op));
665 if (IsIonInlinableOp(op)) {
666 if (!handler.icReturnOffsets().emplaceBack(returnOffset, op)) {
667 return false;
672 return true;
675 template <>
676 void BaselineCompilerCodeGen::computeFrameSize(Register dest) {
677 MOZ_ASSERT(!inCall_, "must not be called in the middle of a VM call");
678 masm.move32(Imm32(frame.frameSize()), dest);
681 template <>
682 void BaselineInterpreterCodeGen::computeFrameSize(Register dest) {
683 // dest := FramePointer - StackPointer.
684 MOZ_ASSERT(!inCall_, "must not be called in the middle of a VM call");
685 masm.mov(FramePointer, dest);
686 masm.subStackPtrFrom(dest);
689 template <typename Handler>
690 void BaselineCodeGen<Handler>::prepareVMCall() {
691 pushedBeforeCall_ = masm.framePushed();
692 #ifdef DEBUG
693 inCall_ = true;
694 #endif
696 // Ensure everything is synced.
697 frame.syncStack(0);
700 template <>
701 void BaselineCompilerCodeGen::storeFrameSizeAndPushDescriptor(
702 uint32_t argSize, Register scratch) {
703 #ifdef DEBUG
704 masm.store32(Imm32(frame.frameSize()), frame.addressOfDebugFrameSize());
705 #endif
707 masm.pushFrameDescriptor(FrameType::BaselineJS);
710 template <>
711 void BaselineInterpreterCodeGen::storeFrameSizeAndPushDescriptor(
712 uint32_t argSize, Register scratch) {
713 #ifdef DEBUG
714 // Store the frame size without VMFunction arguments in debug builds.
715 // scratch := FramePointer - StackPointer - argSize.
716 masm.mov(FramePointer, scratch);
717 masm.subStackPtrFrom(scratch);
718 masm.sub32(Imm32(argSize), scratch);
719 masm.store32(scratch, frame.addressOfDebugFrameSize());
720 #endif
722 masm.pushFrameDescriptor(FrameType::BaselineJS);
725 static uint32_t GetVMFunctionArgSize(const VMFunctionData& fun) {
726 return fun.explicitStackSlots() * sizeof(void*);
729 template <typename Handler>
730 bool BaselineCodeGen<Handler>::callVMInternal(VMFunctionId id,
731 RetAddrEntry::Kind kind,
732 CallVMPhase phase) {
733 #ifdef DEBUG
734 // Assert prepareVMCall() has been called.
735 MOZ_ASSERT(inCall_);
736 inCall_ = false;
737 #endif
739 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
740 const VMFunctionData& fun = GetVMFunction(id);
742 uint32_t argSize = GetVMFunctionArgSize(fun);
744 // Assert all arguments were pushed.
745 MOZ_ASSERT(masm.framePushed() - pushedBeforeCall_ == argSize);
747 saveInterpreterPCReg();
749 if (phase == CallVMPhase::AfterPushingLocals) {
750 storeFrameSizeAndPushDescriptor(argSize, R0.scratchReg());
751 } else {
752 MOZ_ASSERT(phase == CallVMPhase::BeforePushingLocals);
753 #ifdef DEBUG
754 uint32_t frameBaseSize = BaselineFrame::frameSizeForNumValueSlots(0);
755 masm.store32(Imm32(frameBaseSize), frame.addressOfDebugFrameSize());
756 #endif
757 masm.pushFrameDescriptor(FrameType::BaselineJS);
759 MOZ_ASSERT(fun.expectTailCall == NonTailCall);
760 // Perform the call.
761 masm.call(code);
762 uint32_t callOffset = masm.currentOffset();
764 // Pop arguments from framePushed.
765 masm.implicitPop(argSize);
767 restoreInterpreterPCReg();
769 return handler.recordCallRetAddr(cx, kind, callOffset);
772 template <typename Handler>
773 template <typename Fn, Fn fn>
774 bool BaselineCodeGen<Handler>::callVM(RetAddrEntry::Kind kind,
775 CallVMPhase phase) {
776 VMFunctionId fnId = VMFunctionToId<Fn, fn>::id;
777 return callVMInternal(fnId, kind, phase);
780 template <typename Handler>
781 bool BaselineCodeGen<Handler>::emitStackCheck() {
782 Label skipCall;
783 if (handler.mustIncludeSlotsInStackCheck()) {
784 // Subtract the size of script->nslots() first.
785 Register scratch = R1.scratchReg();
786 masm.moveStackPtrTo(scratch);
787 subtractScriptSlotsSize(scratch, R2.scratchReg());
788 masm.branchPtr(Assembler::BelowOrEqual,
789 AbsoluteAddress(cx->addressOfJitStackLimit()), scratch,
790 &skipCall);
791 } else {
792 masm.branchStackPtrRhs(Assembler::BelowOrEqual,
793 AbsoluteAddress(cx->addressOfJitStackLimit()),
794 &skipCall);
797 prepareVMCall();
798 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg());
799 pushArg(R1.scratchReg());
801 const CallVMPhase phase = CallVMPhase::BeforePushingLocals;
802 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::StackCheck;
804 using Fn = bool (*)(JSContext*, BaselineFrame*);
805 if (!callVM<Fn, CheckOverRecursedBaseline>(kind, phase)) {
806 return false;
809 masm.bind(&skipCall);
810 return true;
813 static void EmitCallFrameIsDebuggeeCheck(MacroAssembler& masm) {
814 using Fn = void (*)(BaselineFrame* frame);
815 masm.setupUnalignedABICall(R0.scratchReg());
816 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
817 masm.passABIArg(R0.scratchReg());
818 masm.callWithABI<Fn, FrameIsDebuggeeCheck>();
821 template <>
822 bool BaselineCompilerCodeGen::emitIsDebuggeeCheck() {
823 if (handler.compileDebugInstrumentation()) {
824 EmitCallFrameIsDebuggeeCheck(masm);
826 return true;
829 template <>
830 bool BaselineInterpreterCodeGen::emitIsDebuggeeCheck() {
831 // Use a toggled jump to call FrameIsDebuggeeCheck only if the debugger is
832 // enabled.
834 // TODO(bug 1522394): consider having a cx->realm->isDebuggee guard before the
835 // call. Consider moving the callWithABI out-of-line.
837 Label skipCheck;
838 CodeOffset toggleOffset = masm.toggledJump(&skipCheck);
840 saveInterpreterPCReg();
841 EmitCallFrameIsDebuggeeCheck(masm);
842 restoreInterpreterPCReg();
844 masm.bind(&skipCheck);
845 return handler.addDebugInstrumentationOffset(cx, toggleOffset);
848 static void MaybeIncrementCodeCoverageCounter(MacroAssembler& masm,
849 JSScript* script,
850 jsbytecode* pc) {
851 if (!script->hasScriptCounts()) {
852 return;
854 PCCounts* counts = script->maybeGetPCCounts(pc);
855 uint64_t* counterAddr = &counts->numExec();
856 masm.inc64(AbsoluteAddress(counterAddr));
859 template <>
860 bool BaselineCompilerCodeGen::emitHandleCodeCoverageAtPrologue() {
861 // If the main instruction is not a jump target, then we emit the
862 // corresponding code coverage counter.
863 JSScript* script = handler.script();
864 jsbytecode* main = script->main();
865 if (!BytecodeIsJumpTarget(JSOp(*main))) {
866 MaybeIncrementCodeCoverageCounter(masm, script, main);
868 return true;
871 template <>
872 bool BaselineInterpreterCodeGen::emitHandleCodeCoverageAtPrologue() {
873 Label skipCoverage;
874 CodeOffset toggleOffset = masm.toggledJump(&skipCoverage);
875 masm.call(handler.codeCoverageAtPrologueLabel());
876 masm.bind(&skipCoverage);
877 return handler.codeCoverageOffsets().append(toggleOffset.offset());
880 template <>
881 void BaselineCompilerCodeGen::subtractScriptSlotsSize(Register reg,
882 Register scratch) {
883 uint32_t slotsSize = handler.script()->nslots() * sizeof(Value);
884 masm.subPtr(Imm32(slotsSize), reg);
887 template <>
888 void BaselineInterpreterCodeGen::subtractScriptSlotsSize(Register reg,
889 Register scratch) {
890 // reg = reg - script->nslots() * sizeof(Value)
891 MOZ_ASSERT(reg != scratch);
892 loadScript(scratch);
893 masm.loadPtr(Address(scratch, JSScript::offsetOfSharedData()), scratch);
894 masm.loadPtr(Address(scratch, SharedImmutableScriptData::offsetOfISD()),
895 scratch);
896 masm.load32(Address(scratch, ImmutableScriptData::offsetOfNslots()), scratch);
897 static_assert(sizeof(Value) == 8,
898 "shift by 3 below assumes Value is 8 bytes");
899 masm.lshiftPtr(Imm32(3), scratch);
900 masm.subPtr(scratch, reg);
903 template <>
904 void BaselineCompilerCodeGen::loadGlobalLexicalEnvironment(Register dest) {
905 MOZ_ASSERT(!handler.script()->hasNonSyntacticScope());
906 masm.movePtr(ImmGCPtr(&cx->global()->lexicalEnvironment()), dest);
909 template <>
910 void BaselineInterpreterCodeGen::loadGlobalLexicalEnvironment(Register dest) {
911 masm.loadPtr(AbsoluteAddress(cx->addressOfRealm()), dest);
912 masm.loadPtr(Address(dest, Realm::offsetOfActiveGlobal()), dest);
913 masm.loadPrivate(Address(dest, GlobalObject::offsetOfGlobalDataSlot()), dest);
914 masm.loadPtr(Address(dest, GlobalObjectData::offsetOfLexicalEnvironment()),
915 dest);
918 template <>
919 void BaselineCompilerCodeGen::pushGlobalLexicalEnvironmentValue(
920 ValueOperand scratch) {
921 frame.push(ObjectValue(cx->global()->lexicalEnvironment()));
924 template <>
925 void BaselineInterpreterCodeGen::pushGlobalLexicalEnvironmentValue(
926 ValueOperand scratch) {
927 loadGlobalLexicalEnvironment(scratch.scratchReg());
928 masm.tagValue(JSVAL_TYPE_OBJECT, scratch.scratchReg(), scratch);
929 frame.push(scratch);
932 template <>
933 void BaselineCompilerCodeGen::loadGlobalThisValue(ValueOperand dest) {
934 JSObject* thisObj = cx->global()->lexicalEnvironment().thisObject();
935 masm.moveValue(ObjectValue(*thisObj), dest);
938 template <>
939 void BaselineInterpreterCodeGen::loadGlobalThisValue(ValueOperand dest) {
940 Register scratch = dest.scratchReg();
941 loadGlobalLexicalEnvironment(scratch);
942 static constexpr size_t SlotOffset =
943 GlobalLexicalEnvironmentObject::offsetOfThisValueSlot();
944 masm.loadValue(Address(scratch, SlotOffset), dest);
947 template <>
948 void BaselineCompilerCodeGen::pushScriptArg() {
949 pushArg(ImmGCPtr(handler.script()));
952 template <>
953 void BaselineInterpreterCodeGen::pushScriptArg() {
954 pushArg(frame.addressOfInterpreterScript());
957 template <>
958 void BaselineCompilerCodeGen::pushBytecodePCArg() {
959 pushArg(ImmPtr(handler.pc()));
962 template <>
963 void BaselineInterpreterCodeGen::pushBytecodePCArg() {
964 if (HasInterpreterPCReg()) {
965 pushArg(InterpreterPCReg);
966 } else {
967 pushArg(frame.addressOfInterpreterPC());
971 static gc::Cell* GetScriptGCThing(JSScript* script, jsbytecode* pc,
972 ScriptGCThingType type) {
973 switch (type) {
974 case ScriptGCThingType::Atom:
975 return script->getAtom(pc);
976 case ScriptGCThingType::String:
977 return script->getString(pc);
978 case ScriptGCThingType::RegExp:
979 return script->getRegExp(pc);
980 case ScriptGCThingType::Object:
981 return script->getObject(pc);
982 case ScriptGCThingType::Function:
983 return script->getFunction(pc);
984 case ScriptGCThingType::Scope:
985 return script->getScope(pc);
986 case ScriptGCThingType::BigInt:
987 return script->getBigInt(pc);
989 MOZ_CRASH("Unexpected GCThing type");
992 template <>
993 void BaselineCompilerCodeGen::loadScriptGCThing(ScriptGCThingType type,
994 Register dest,
995 Register scratch) {
996 gc::Cell* thing = GetScriptGCThing(handler.script(), handler.pc(), type);
997 masm.movePtr(ImmGCPtr(thing), dest);
1000 template <>
1001 void BaselineInterpreterCodeGen::loadScriptGCThing(ScriptGCThingType type,
1002 Register dest,
1003 Register scratch) {
1004 MOZ_ASSERT(dest != scratch);
1006 // Load the index in |scratch|.
1007 LoadInt32Operand(masm, scratch);
1009 // Load the GCCellPtr.
1010 loadScript(dest);
1011 masm.loadPtr(Address(dest, JSScript::offsetOfPrivateData()), dest);
1012 masm.loadPtr(BaseIndex(dest, scratch, ScalePointer,
1013 PrivateScriptData::offsetOfGCThings()),
1014 dest);
1016 // Clear the tag bits.
1017 switch (type) {
1018 case ScriptGCThingType::Atom:
1019 case ScriptGCThingType::String:
1020 // Use xorPtr with a 32-bit immediate because it's more efficient than
1021 // andPtr on 64-bit.
1022 static_assert(uintptr_t(TraceKind::String) == 2,
1023 "Unexpected tag bits for string GCCellPtr");
1024 masm.xorPtr(Imm32(2), dest);
1025 break;
1026 case ScriptGCThingType::RegExp:
1027 case ScriptGCThingType::Object:
1028 case ScriptGCThingType::Function:
1029 // No-op because GCCellPtr tag bits are zero for objects.
1030 static_assert(uintptr_t(TraceKind::Object) == 0,
1031 "Unexpected tag bits for object GCCellPtr");
1032 break;
1033 case ScriptGCThingType::BigInt:
1034 // Use xorPtr with a 32-bit immediate because it's more efficient than
1035 // andPtr on 64-bit.
1036 static_assert(uintptr_t(TraceKind::BigInt) == 1,
1037 "Unexpected tag bits for BigInt GCCellPtr");
1038 masm.xorPtr(Imm32(1), dest);
1039 break;
1040 case ScriptGCThingType::Scope:
1041 // Use xorPtr with a 32-bit immediate because it's more efficient than
1042 // andPtr on 64-bit.
1043 static_assert(uintptr_t(TraceKind::Scope) >= JS::OutOfLineTraceKindMask,
1044 "Expected Scopes to have OutOfLineTraceKindMask tag");
1045 masm.xorPtr(Imm32(JS::OutOfLineTraceKindMask), dest);
1046 break;
1049 #ifdef DEBUG
1050 // Assert low bits are not set.
1051 Label ok;
1052 masm.branchTestPtr(Assembler::Zero, dest, Imm32(0b111), &ok);
1053 masm.assumeUnreachable("GC pointer with tag bits set");
1054 masm.bind(&ok);
1055 #endif
1058 template <>
1059 void BaselineCompilerCodeGen::pushScriptGCThingArg(ScriptGCThingType type,
1060 Register scratch1,
1061 Register scratch2) {
1062 gc::Cell* thing = GetScriptGCThing(handler.script(), handler.pc(), type);
1063 pushArg(ImmGCPtr(thing));
1066 template <>
1067 void BaselineInterpreterCodeGen::pushScriptGCThingArg(ScriptGCThingType type,
1068 Register scratch1,
1069 Register scratch2) {
1070 loadScriptGCThing(type, scratch1, scratch2);
1071 pushArg(scratch1);
1074 template <typename Handler>
1075 void BaselineCodeGen<Handler>::pushScriptNameArg(Register scratch1,
1076 Register scratch2) {
1077 pushScriptGCThingArg(ScriptGCThingType::Atom, scratch1, scratch2);
1080 template <>
1081 void BaselineCompilerCodeGen::pushUint8BytecodeOperandArg(Register) {
1082 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler.pc())) == JOF_UINT8);
1083 pushArg(Imm32(GET_UINT8(handler.pc())));
1086 template <>
1087 void BaselineInterpreterCodeGen::pushUint8BytecodeOperandArg(Register scratch) {
1088 LoadUint8Operand(masm, scratch);
1089 pushArg(scratch);
1092 template <>
1093 void BaselineCompilerCodeGen::pushUint16BytecodeOperandArg(Register) {
1094 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler.pc())) == JOF_UINT16);
1095 pushArg(Imm32(GET_UINT16(handler.pc())));
1098 template <>
1099 void BaselineInterpreterCodeGen::pushUint16BytecodeOperandArg(
1100 Register scratch) {
1101 LoadUint16Operand(masm, scratch);
1102 pushArg(scratch);
1105 template <>
1106 void BaselineCompilerCodeGen::loadInt32LengthBytecodeOperand(Register dest) {
1107 uint32_t length = GET_UINT32(handler.pc());
1108 MOZ_ASSERT(length <= INT32_MAX,
1109 "the bytecode emitter must fail to compile code that would "
1110 "produce a length exceeding int32_t range");
1111 masm.move32(Imm32(AssertedCast<int32_t>(length)), dest);
1114 template <>
1115 void BaselineInterpreterCodeGen::loadInt32LengthBytecodeOperand(Register dest) {
1116 LoadInt32Operand(masm, dest);
1119 template <typename Handler>
1120 bool BaselineCodeGen<Handler>::emitDebugPrologue() {
1121 auto ifDebuggee = [this]() {
1122 // Load pointer to BaselineFrame in R0.
1123 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
1125 prepareVMCall();
1126 pushArg(R0.scratchReg());
1128 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugPrologue;
1130 using Fn = bool (*)(JSContext*, BaselineFrame*);
1131 if (!callVM<Fn, jit::DebugPrologue>(kind)) {
1132 return false;
1135 return true;
1137 return emitDebugInstrumentation(ifDebuggee);
1140 template <>
1141 void BaselineCompilerCodeGen::emitInitFrameFields(Register nonFunctionEnv) {
1142 Register scratch = R0.scratchReg();
1143 Register scratch2 = R2.scratchReg();
1144 MOZ_ASSERT(nonFunctionEnv != scratch && nonFunctionEnv != scratch2);
1146 masm.store32(Imm32(0), frame.addressOfFlags());
1147 if (handler.function()) {
1148 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), scratch);
1149 masm.unboxObject(Address(scratch, JSFunction::offsetOfEnvironment()),
1150 scratch);
1151 masm.storePtr(scratch, frame.addressOfEnvironmentChain());
1152 } else {
1153 masm.storePtr(nonFunctionEnv, frame.addressOfEnvironmentChain());
1156 // If cx->inlinedICScript contains an inlined ICScript (passed from
1157 // the caller), take that ICScript and store it in the frame, then
1158 // overwrite cx->inlinedICScript with nullptr.
1159 Label notInlined, done;
1160 masm.movePtr(ImmPtr(cx->addressOfInlinedICScript()), scratch);
1161 Address inlinedAddr(scratch, 0);
1162 masm.branchPtr(Assembler::Equal, inlinedAddr, ImmWord(0), &notInlined);
1163 masm.loadPtr(inlinedAddr, scratch2);
1164 masm.storePtr(scratch2, frame.addressOfICScript());
1165 masm.storePtr(ImmPtr(nullptr), inlinedAddr);
1166 masm.jump(&done);
1168 // Otherwise, store this script's default ICSCript in the frame.
1169 masm.bind(&notInlined);
1170 masm.storePtr(ImmPtr(handler.script()->jitScript()->icScript()),
1171 frame.addressOfICScript());
1172 masm.bind(&done);
1175 template <>
1176 void BaselineInterpreterCodeGen::emitInitFrameFields(Register nonFunctionEnv) {
1177 MOZ_ASSERT(nonFunctionEnv == R1.scratchReg(),
1178 "Don't clobber nonFunctionEnv below");
1180 // If we have a dedicated PC register we use it as scratch1 to avoid a
1181 // register move below.
1182 Register scratch1 =
1183 HasInterpreterPCReg() ? InterpreterPCReg : R0.scratchReg();
1184 Register scratch2 = R2.scratchReg();
1186 masm.store32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER),
1187 frame.addressOfFlags());
1189 // Initialize interpreterScript.
1190 Label notFunction, done;
1191 masm.loadPtr(frame.addressOfCalleeToken(), scratch1);
1192 masm.branchTestPtr(Assembler::NonZero, scratch1, Imm32(CalleeTokenScriptBit),
1193 &notFunction);
1195 // CalleeToken_Function or CalleeToken_FunctionConstructing.
1196 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
1197 masm.unboxObject(Address(scratch1, JSFunction::offsetOfEnvironment()),
1198 scratch2);
1199 masm.storePtr(scratch2, frame.addressOfEnvironmentChain());
1200 masm.loadPrivate(Address(scratch1, JSFunction::offsetOfJitInfoOrScript()),
1201 scratch1);
1202 masm.jump(&done);
1204 masm.bind(&notFunction);
1206 // CalleeToken_Script.
1207 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
1208 masm.storePtr(nonFunctionEnv, frame.addressOfEnvironmentChain());
1210 masm.bind(&done);
1211 masm.storePtr(scratch1, frame.addressOfInterpreterScript());
1213 // Initialize icScript and interpreterICEntry
1214 masm.loadJitScript(scratch1, scratch2);
1215 masm.computeEffectiveAddress(Address(scratch2, JitScript::offsetOfICScript()),
1216 scratch2);
1217 masm.storePtr(scratch2, frame.addressOfICScript());
1218 masm.computeEffectiveAddress(Address(scratch2, ICScript::offsetOfICEntries()),
1219 scratch2);
1220 masm.storePtr(scratch2, frame.addressOfInterpreterICEntry());
1222 // Initialize interpreter pc.
1223 masm.loadPtr(Address(scratch1, JSScript::offsetOfSharedData()), scratch1);
1224 masm.loadPtr(Address(scratch1, SharedImmutableScriptData::offsetOfISD()),
1225 scratch1);
1226 masm.addPtr(Imm32(ImmutableScriptData::offsetOfCode()), scratch1);
1228 if (HasInterpreterPCReg()) {
1229 MOZ_ASSERT(scratch1 == InterpreterPCReg,
1230 "pc must be stored in the pc register");
1231 } else {
1232 masm.storePtr(scratch1, frame.addressOfInterpreterPC());
1236 // Assert we don't need a post write barrier to write sourceObj to a slot of
1237 // destObj. See comments in WarpBuilder::buildNamedLambdaEnv.
1238 static void AssertCanElidePostWriteBarrier(MacroAssembler& masm,
1239 Register destObj, Register sourceObj,
1240 Register temp) {
1241 #ifdef DEBUG
1242 Label ok;
1243 masm.branchPtrInNurseryChunk(Assembler::Equal, destObj, temp, &ok);
1244 masm.branchPtrInNurseryChunk(Assembler::NotEqual, sourceObj, temp, &ok);
1245 masm.assumeUnreachable("Unexpected missing post write barrier in Baseline");
1246 masm.bind(&ok);
1247 #endif
1250 template <>
1251 bool BaselineCompilerCodeGen::initEnvironmentChain() {
1252 if (!handler.function()) {
1253 return true;
1255 if (!handler.script()->needsFunctionEnvironmentObjects()) {
1256 return true;
1259 // Allocate a NamedLambdaObject and/or a CallObject. If the function needs
1260 // both, the NamedLambdaObject must enclose the CallObject. If one of the
1261 // allocations fails, we perform the whole operation in C++.
1263 JSObject* templateEnv = handler.script()->jitScript()->templateEnvironment();
1264 MOZ_ASSERT(templateEnv);
1266 CallObject* callObjectTemplate = nullptr;
1267 if (handler.function()->needsCallObject()) {
1268 callObjectTemplate = &templateEnv->as<CallObject>();
1271 NamedLambdaObject* namedLambdaTemplate = nullptr;
1272 if (handler.function()->needsNamedLambdaEnvironment()) {
1273 if (callObjectTemplate) {
1274 templateEnv = templateEnv->enclosingEnvironment();
1276 namedLambdaTemplate = &templateEnv->as<NamedLambdaObject>();
1279 MOZ_ASSERT(namedLambdaTemplate || callObjectTemplate);
1281 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
1282 Register newEnv = regs.takeAny();
1283 Register enclosingEnv = regs.takeAny();
1284 Register callee = regs.takeAny();
1285 Register temp = regs.takeAny();
1287 Label fail;
1288 masm.loadPtr(frame.addressOfEnvironmentChain(), enclosingEnv);
1289 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), callee);
1291 // Allocate a NamedLambdaObject if needed.
1292 if (namedLambdaTemplate) {
1293 TemplateObject templateObject(namedLambdaTemplate);
1294 masm.createGCObject(newEnv, temp, templateObject, gc::DefaultHeap, &fail);
1296 // Store enclosing environment.
1297 Address enclosingSlot(newEnv,
1298 NamedLambdaObject::offsetOfEnclosingEnvironment());
1299 masm.storeValue(JSVAL_TYPE_OBJECT, enclosingEnv, enclosingSlot);
1300 AssertCanElidePostWriteBarrier(masm, newEnv, enclosingEnv, temp);
1302 // Store callee.
1303 Address lambdaSlot(newEnv, NamedLambdaObject::offsetOfLambdaSlot());
1304 masm.storeValue(JSVAL_TYPE_OBJECT, callee, lambdaSlot);
1305 AssertCanElidePostWriteBarrier(masm, newEnv, callee, temp);
1307 if (callObjectTemplate) {
1308 masm.movePtr(newEnv, enclosingEnv);
1312 // Allocate a CallObject if needed.
1313 if (callObjectTemplate) {
1314 TemplateObject templateObject(callObjectTemplate);
1315 masm.createGCObject(newEnv, temp, templateObject, gc::DefaultHeap, &fail);
1317 // Store enclosing environment.
1318 Address enclosingSlot(newEnv, CallObject::offsetOfEnclosingEnvironment());
1319 masm.storeValue(JSVAL_TYPE_OBJECT, enclosingEnv, enclosingSlot);
1320 AssertCanElidePostWriteBarrier(masm, newEnv, enclosingEnv, temp);
1322 // Store callee.
1323 Address calleeSlot(newEnv, CallObject::offsetOfCallee());
1324 masm.storeValue(JSVAL_TYPE_OBJECT, callee, calleeSlot);
1325 AssertCanElidePostWriteBarrier(masm, newEnv, callee, temp);
1328 // Update the frame's environment chain and mark it initialized.
1329 Label done;
1330 masm.storePtr(newEnv, frame.addressOfEnvironmentChain());
1331 masm.or32(Imm32(BaselineFrame::HAS_INITIAL_ENV), frame.addressOfFlags());
1332 masm.jump(&done);
1334 masm.bind(&fail);
1336 prepareVMCall();
1338 masm.loadBaselineFramePtr(FramePointer, temp);
1339 pushArg(temp);
1341 const CallVMPhase phase = CallVMPhase::BeforePushingLocals;
1343 using Fn = bool (*)(JSContext*, BaselineFrame*);
1344 if (!callVMNonOp<Fn, jit::InitFunctionEnvironmentObjects>(phase)) {
1345 return false;
1348 masm.bind(&done);
1349 return true;
1352 template <>
1353 bool BaselineInterpreterCodeGen::initEnvironmentChain() {
1354 // For function scripts, call InitFunctionEnvironmentObjects if needed. For
1355 // non-function scripts this is a no-op.
1357 Label done;
1358 masm.branchTestPtr(Assembler::NonZero, frame.addressOfCalleeToken(),
1359 Imm32(CalleeTokenScriptBit), &done);
1361 auto initEnv = [this]() {
1362 // Call into the VM to create the proper environment objects.
1363 prepareVMCall();
1365 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
1366 pushArg(R0.scratchReg());
1368 const CallVMPhase phase = CallVMPhase::BeforePushingLocals;
1370 using Fn = bool (*)(JSContext*, BaselineFrame*);
1371 return callVMNonOp<Fn, jit::InitFunctionEnvironmentObjects>(phase);
1373 if (!emitTestScriptFlag(
1374 JSScript::ImmutableFlags::NeedsFunctionEnvironmentObjects, true,
1375 initEnv, R2.scratchReg())) {
1376 return false;
1380 masm.bind(&done);
1381 return true;
1384 template <typename Handler>
1385 bool BaselineCodeGen<Handler>::emitInterruptCheck() {
1386 frame.syncStack(0);
1388 Label done;
1389 masm.branch32(Assembler::Equal, AbsoluteAddress(cx->addressOfInterruptBits()),
1390 Imm32(0), &done);
1392 prepareVMCall();
1394 // Use a custom RetAddrEntry::Kind so DebugModeOSR can distinguish this call
1395 // from other callVMs that might happen at this pc.
1396 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::InterruptCheck;
1398 using Fn = bool (*)(JSContext*);
1399 if (!callVM<Fn, InterruptCheck>(kind)) {
1400 return false;
1403 masm.bind(&done);
1404 return true;
1407 template <>
1408 bool BaselineCompilerCodeGen::emitWarmUpCounterIncrement() {
1409 frame.assertSyncedStack();
1411 // Record native code offset for OSR from Baseline Interpreter into Baseline
1412 // JIT code. This is right before the warm-up check in the Baseline JIT code,
1413 // to make sure we can immediately enter Ion if the script is warm enough or
1414 // if --ion-eager is used.
1415 JSScript* script = handler.script();
1416 jsbytecode* pc = handler.pc();
1417 if (JSOp(*pc) == JSOp::LoopHead) {
1418 uint32_t pcOffset = script->pcToOffset(pc);
1419 uint32_t nativeOffset = masm.currentOffset();
1420 if (!handler.osrEntries().emplaceBack(pcOffset, nativeOffset)) {
1421 ReportOutOfMemory(cx);
1422 return false;
1426 // Emit no warm-up counter increments if Ion is not enabled or if the script
1427 // will never be Ion-compileable.
1428 if (!handler.maybeIonCompileable()) {
1429 return true;
1432 Register scriptReg = R2.scratchReg();
1433 Register countReg = R0.scratchReg();
1435 // Load the ICScript* in scriptReg.
1436 masm.loadPtr(frame.addressOfICScript(), scriptReg);
1438 // Bump warm-up counter.
1439 Address warmUpCounterAddr(scriptReg, ICScript::offsetOfWarmUpCount());
1440 masm.load32(warmUpCounterAddr, countReg);
1441 masm.add32(Imm32(1), countReg);
1442 masm.store32(countReg, warmUpCounterAddr);
1444 if (!JitOptions.disableInlining) {
1445 // Consider trial inlining.
1446 // Note: unlike other warmup thresholds, where we try to enter a
1447 // higher tier whenever we are higher than a given warmup count,
1448 // trial inlining triggers once when reaching the threshold.
1449 Label noTrialInlining;
1450 masm.branch32(Assembler::NotEqual, countReg,
1451 Imm32(JitOptions.trialInliningWarmUpThreshold),
1452 &noTrialInlining);
1453 prepareVMCall();
1455 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg());
1457 using Fn = bool (*)(JSContext*, BaselineFrame*);
1458 if (!callVMNonOp<Fn, DoTrialInlining>()) {
1459 return false;
1461 // Reload registers potentially clobbered by the call.
1462 masm.loadPtr(frame.addressOfICScript(), scriptReg);
1463 masm.load32(warmUpCounterAddr, countReg);
1464 masm.bind(&noTrialInlining);
1467 if (JSOp(*pc) == JSOp::LoopHead) {
1468 // If this is a loop where we can't OSR (for example because it's inside a
1469 // catch or finally block), increment the warmup counter but don't attempt
1470 // OSR (Ion/Warp only compiles the try block).
1471 if (!handler.analysis().info(pc).loopHeadCanOsr) {
1472 return true;
1476 Label done;
1478 const OptimizationInfo* info =
1479 IonOptimizations.get(OptimizationLevel::Normal);
1480 uint32_t warmUpThreshold = info->compilerWarmUpThreshold(script, pc);
1481 masm.branch32(Assembler::LessThan, countReg, Imm32(warmUpThreshold), &done);
1483 // Don't trigger Warp compilations from trial-inlined scripts.
1484 Address depthAddr(scriptReg, ICScript::offsetOfDepth());
1485 masm.branch32(Assembler::NotEqual, depthAddr, Imm32(0), &done);
1487 // Load the IonScript* in scriptReg. We can load this from the ICScript*
1488 // because it must be an outer ICScript embedded in the JitScript.
1489 constexpr int32_t offset = -int32_t(JitScript::offsetOfICScript()) +
1490 int32_t(JitScript::offsetOfIonScript());
1491 masm.loadPtr(Address(scriptReg, offset), scriptReg);
1493 // Do nothing if Ion is already compiling this script off-thread or if Ion has
1494 // been disabled for this script.
1495 masm.branchPtr(Assembler::Equal, scriptReg, ImmPtr(IonCompilingScriptPtr),
1496 &done);
1497 masm.branchPtr(Assembler::Equal, scriptReg, ImmPtr(IonDisabledScriptPtr),
1498 &done);
1500 // Try to compile and/or finish a compilation.
1501 if (JSOp(*pc) == JSOp::LoopHead) {
1502 // Try to OSR into Ion.
1503 computeFrameSize(R0.scratchReg());
1505 prepareVMCall();
1507 pushBytecodePCArg();
1508 pushArg(R0.scratchReg());
1509 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg());
1511 using Fn = bool (*)(JSContext*, BaselineFrame*, uint32_t, jsbytecode*,
1512 IonOsrTempData**);
1513 if (!callVM<Fn, IonCompileScriptForBaselineOSR>()) {
1514 return false;
1517 // The return register holds the IonOsrTempData*. Perform OSR if it's not
1518 // nullptr.
1519 static_assert(ReturnReg != OsrFrameReg,
1520 "Code below depends on osrDataReg != OsrFrameReg");
1521 Register osrDataReg = ReturnReg;
1522 masm.branchTestPtr(Assembler::Zero, osrDataReg, osrDataReg, &done);
1524 // Success! Switch from Baseline JIT code to Ion JIT code.
1526 // At this point, stack looks like:
1528 // +-> [...Calling-Frame...]
1529 // | [...Actual-Args/ThisV/ArgCount/Callee...]
1530 // | [Descriptor]
1531 // | [Return-Addr]
1532 // +---[Saved-FramePtr]
1533 // [...Baseline-Frame...]
1535 #ifdef DEBUG
1536 // Get a scratch register that's not osrDataReg or OsrFrameReg.
1537 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
1538 MOZ_ASSERT(!regs.has(FramePointer));
1539 regs.take(osrDataReg);
1540 regs.take(OsrFrameReg);
1542 Register scratchReg = regs.takeAny();
1544 // If profiler instrumentation is on, ensure that lastProfilingFrame is
1545 // the frame currently being OSR-ed
1547 Label checkOk;
1548 AbsoluteAddress addressOfEnabled(
1549 cx->runtime()->geckoProfiler().addressOfEnabled());
1550 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &checkOk);
1551 masm.loadPtr(AbsoluteAddress((void*)&cx->jitActivation), scratchReg);
1552 masm.loadPtr(
1553 Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()),
1554 scratchReg);
1556 // It may be the case that we entered the baseline frame with
1557 // profiling turned off on, then in a call within a loop (i.e. a
1558 // callee frame), turn on profiling, then return to this frame,
1559 // and then OSR with profiling turned on. In this case, allow for
1560 // lastProfilingFrame to be null.
1561 masm.branchPtr(Assembler::Equal, scratchReg, ImmWord(0), &checkOk);
1563 masm.branchPtr(Assembler::Equal, FramePointer, scratchReg, &checkOk);
1564 masm.assumeUnreachable("Baseline OSR lastProfilingFrame mismatch.");
1565 masm.bind(&checkOk);
1567 #endif
1569 // Restore the stack pointer so that the saved frame pointer is on top of
1570 // the stack.
1571 masm.moveToStackPtr(FramePointer);
1573 // Jump into Ion.
1574 masm.loadPtr(Address(osrDataReg, IonOsrTempData::offsetOfBaselineFrame()),
1575 OsrFrameReg);
1576 masm.jump(Address(osrDataReg, IonOsrTempData::offsetOfJitCode()));
1577 } else {
1578 prepareVMCall();
1580 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg());
1582 using Fn = bool (*)(JSContext*, BaselineFrame*);
1583 if (!callVMNonOp<Fn, IonCompileScriptForBaselineAtEntry>()) {
1584 return false;
1588 masm.bind(&done);
1589 return true;
1592 template <>
1593 bool BaselineInterpreterCodeGen::emitWarmUpCounterIncrement() {
1594 Register scriptReg = R2.scratchReg();
1595 Register countReg = R0.scratchReg();
1597 // Load the JitScript* in scriptReg.
1598 loadScript(scriptReg);
1599 masm.loadJitScript(scriptReg, scriptReg);
1601 // Bump warm-up counter.
1602 Address warmUpCounterAddr(scriptReg, JitScript::offsetOfWarmUpCount());
1603 masm.load32(warmUpCounterAddr, countReg);
1604 masm.add32(Imm32(1), countReg);
1605 masm.store32(countReg, warmUpCounterAddr);
1607 // If the script is warm enough for Baseline compilation, call into the VM to
1608 // compile it.
1609 Label done;
1610 masm.branch32(Assembler::BelowOrEqual, countReg,
1611 Imm32(JitOptions.baselineJitWarmUpThreshold), &done);
1612 masm.branchPtr(Assembler::Equal,
1613 Address(scriptReg, JitScript::offsetOfBaselineScript()),
1614 ImmPtr(BaselineDisabledScriptPtr), &done);
1616 prepareVMCall();
1618 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg());
1620 using Fn = bool (*)(JSContext*, BaselineFrame*, uint8_t**);
1621 if (!callVM<Fn, BaselineCompileFromBaselineInterpreter>()) {
1622 return false;
1625 // If the function returned nullptr we either skipped compilation or were
1626 // unable to compile the script. Continue running in the interpreter.
1627 masm.branchTestPtr(Assembler::Zero, ReturnReg, ReturnReg, &done);
1629 // Success! Switch from interpreter to JIT code by jumping to the
1630 // corresponding code in the BaselineScript.
1632 // This works because BaselineCompiler uses the same frame layout (stack is
1633 // synced at OSR points) and BaselineCompileFromBaselineInterpreter has
1634 // already cleared the RUNNING_IN_INTERPRETER flag for us.
1635 // See BaselineFrame::prepareForBaselineInterpreterToJitOSR.
1636 masm.jump(ReturnReg);
1639 masm.bind(&done);
1640 return true;
1643 bool BaselineCompiler::emitDebugTrap() {
1644 MOZ_ASSERT(compileDebugInstrumentation());
1645 MOZ_ASSERT(frame.numUnsyncedSlots() == 0);
1647 JSScript* script = handler.script();
1648 bool enabled = DebugAPI::stepModeEnabled(script) ||
1649 DebugAPI::hasBreakpointsAt(script, handler.pc());
1651 // Emit patchable call to debug trap handler.
1652 JitCode* handlerCode = cx->runtime()->jitRuntime()->debugTrapHandler(
1653 cx, DebugTrapHandlerKind::Compiler);
1654 if (!handlerCode) {
1655 return false;
1658 CodeOffset nativeOffset = masm.toggledCall(handlerCode, enabled);
1660 uint32_t pcOffset = script->pcToOffset(handler.pc());
1661 if (!debugTrapEntries_.emplaceBack(pcOffset, nativeOffset.offset())) {
1662 ReportOutOfMemory(cx);
1663 return false;
1666 // Add a RetAddrEntry for the return offset -> pc mapping.
1667 return handler.recordCallRetAddr(cx, RetAddrEntry::Kind::DebugTrap,
1668 masm.currentOffset());
1671 template <typename Handler>
1672 void BaselineCodeGen<Handler>::emitProfilerEnterFrame() {
1673 // Store stack position to lastProfilingFrame variable, guarded by a toggled
1674 // jump. Starts off initially disabled.
1675 Label noInstrument;
1676 CodeOffset toggleOffset = masm.toggledJump(&noInstrument);
1677 masm.profilerEnterFrame(FramePointer, R0.scratchReg());
1678 masm.bind(&noInstrument);
1680 // Store the start offset in the appropriate location.
1681 MOZ_ASSERT(!profilerEnterFrameToggleOffset_.bound());
1682 profilerEnterFrameToggleOffset_ = toggleOffset;
1685 template <typename Handler>
1686 void BaselineCodeGen<Handler>::emitProfilerExitFrame() {
1687 // Store previous frame to lastProfilingFrame variable, guarded by a toggled
1688 // jump. Starts off initially disabled.
1689 Label noInstrument;
1690 CodeOffset toggleOffset = masm.toggledJump(&noInstrument);
1691 masm.profilerExitFrame();
1692 masm.bind(&noInstrument);
1694 // Store the start offset in the appropriate location.
1695 MOZ_ASSERT(!profilerExitFrameToggleOffset_.bound());
1696 profilerExitFrameToggleOffset_ = toggleOffset;
1699 template <typename Handler>
1700 bool BaselineCodeGen<Handler>::emit_Nop() {
1701 return true;
1704 template <typename Handler>
1705 bool BaselineCodeGen<Handler>::emit_NopDestructuring() {
1706 return true;
1709 template <typename Handler>
1710 bool BaselineCodeGen<Handler>::emit_TryDestructuring() {
1711 return true;
1714 template <typename Handler>
1715 bool BaselineCodeGen<Handler>::emit_Pop() {
1716 frame.pop();
1717 return true;
1720 template <>
1721 bool BaselineCompilerCodeGen::emit_PopN() {
1722 frame.popn(GET_UINT16(handler.pc()));
1723 return true;
1726 template <>
1727 bool BaselineInterpreterCodeGen::emit_PopN() {
1728 LoadUint16Operand(masm, R0.scratchReg());
1729 frame.popn(R0.scratchReg());
1730 return true;
1733 template <>
1734 bool BaselineCompilerCodeGen::emit_DupAt() {
1735 frame.syncStack(0);
1737 // DupAt takes a value on the stack and re-pushes it on top. It's like
1738 // GetLocal but it addresses from the top of the stack instead of from the
1739 // stack frame.
1741 int depth = -(GET_UINT24(handler.pc()) + 1);
1742 masm.loadValue(frame.addressOfStackValue(depth), R0);
1743 frame.push(R0);
1744 return true;
1747 template <>
1748 bool BaselineInterpreterCodeGen::emit_DupAt() {
1749 LoadUint24Operand(masm, 0, R0.scratchReg());
1750 masm.loadValue(frame.addressOfStackValue(R0.scratchReg()), R0);
1751 frame.push(R0);
1752 return true;
1755 template <typename Handler>
1756 bool BaselineCodeGen<Handler>::emit_Dup() {
1757 // Keep top stack value in R0, sync the rest so that we can use R1. We use
1758 // separate registers because every register can be used by at most one
1759 // StackValue.
1760 frame.popRegsAndSync(1);
1761 masm.moveValue(R0, R1);
1763 // inc/dec ops use Dup followed by Inc/Dec. Push R0 last to avoid a move.
1764 frame.push(R1);
1765 frame.push(R0);
1766 return true;
1769 template <typename Handler>
1770 bool BaselineCodeGen<Handler>::emit_Dup2() {
1771 frame.syncStack(0);
1773 masm.loadValue(frame.addressOfStackValue(-2), R0);
1774 masm.loadValue(frame.addressOfStackValue(-1), R1);
1776 frame.push(R0);
1777 frame.push(R1);
1778 return true;
1781 template <typename Handler>
1782 bool BaselineCodeGen<Handler>::emit_Swap() {
1783 // Keep top stack values in R0 and R1.
1784 frame.popRegsAndSync(2);
1786 frame.push(R1);
1787 frame.push(R0);
1788 return true;
1791 template <>
1792 bool BaselineCompilerCodeGen::emit_Pick() {
1793 frame.syncStack(0);
1795 // Pick takes a value on the stack and moves it to the top.
1796 // For instance, pick 2:
1797 // before: A B C D E
1798 // after : A B D E C
1800 // First, move value at -(amount + 1) into R0.
1801 int32_t depth = -(GET_INT8(handler.pc()) + 1);
1802 masm.loadValue(frame.addressOfStackValue(depth), R0);
1804 // Move the other values down.
1805 depth++;
1806 for (; depth < 0; depth++) {
1807 Address source = frame.addressOfStackValue(depth);
1808 Address dest = frame.addressOfStackValue(depth - 1);
1809 masm.loadValue(source, R1);
1810 masm.storeValue(R1, dest);
1813 // Push R0.
1814 frame.pop();
1815 frame.push(R0);
1816 return true;
1819 template <>
1820 bool BaselineInterpreterCodeGen::emit_Pick() {
1821 // First, move the value to move up into R0.
1822 Register scratch = R2.scratchReg();
1823 LoadUint8Operand(masm, scratch);
1824 masm.loadValue(frame.addressOfStackValue(scratch), R0);
1826 // Move the other values down.
1827 Label top, done;
1828 masm.bind(&top);
1829 masm.branchSub32(Assembler::Signed, Imm32(1), scratch, &done);
1831 masm.loadValue(frame.addressOfStackValue(scratch), R1);
1832 masm.storeValue(R1, frame.addressOfStackValue(scratch, sizeof(Value)));
1833 masm.jump(&top);
1836 masm.bind(&done);
1838 // Replace value on top of the stack with R0.
1839 masm.storeValue(R0, frame.addressOfStackValue(-1));
1840 return true;
1843 template <>
1844 bool BaselineCompilerCodeGen::emit_Unpick() {
1845 frame.syncStack(0);
1847 // Pick takes the top of the stack value and moves it under the nth value.
1848 // For instance, unpick 2:
1849 // before: A B C D E
1850 // after : A B E C D
1852 // First, move value at -1 into R0.
1853 masm.loadValue(frame.addressOfStackValue(-1), R0);
1855 MOZ_ASSERT(GET_INT8(handler.pc()) > 0,
1856 "Interpreter code assumes JSOp::Unpick operand > 0");
1858 // Move the other values up.
1859 int32_t depth = -(GET_INT8(handler.pc()) + 1);
1860 for (int32_t i = -1; i > depth; i--) {
1861 Address source = frame.addressOfStackValue(i - 1);
1862 Address dest = frame.addressOfStackValue(i);
1863 masm.loadValue(source, R1);
1864 masm.storeValue(R1, dest);
1867 // Store R0 under the nth value.
1868 Address dest = frame.addressOfStackValue(depth);
1869 masm.storeValue(R0, dest);
1870 return true;
1873 template <>
1874 bool BaselineInterpreterCodeGen::emit_Unpick() {
1875 Register scratch = R2.scratchReg();
1876 LoadUint8Operand(masm, scratch);
1878 // Move the top value into R0.
1879 masm.loadValue(frame.addressOfStackValue(-1), R0);
1881 // Overwrite the nth stack value with R0 but first save the old value in R1.
1882 masm.loadValue(frame.addressOfStackValue(scratch), R1);
1883 masm.storeValue(R0, frame.addressOfStackValue(scratch));
1885 // Now for each slot x in [n-1, 1] do the following:
1887 // * Store the value in slot x in R0.
1888 // * Store the value in the previous slot (now in R1) in slot x.
1889 // * Move R0 to R1.
1891 #ifdef DEBUG
1892 // Assert the operand > 0 so the branchSub32 below doesn't "underflow" to
1893 // negative values.
1895 Label ok;
1896 masm.branch32(Assembler::GreaterThan, scratch, Imm32(0), &ok);
1897 masm.assumeUnreachable("JSOp::Unpick with operand <= 0?");
1898 masm.bind(&ok);
1900 #endif
1902 Label top, done;
1903 masm.bind(&top);
1904 masm.branchSub32(Assembler::Zero, Imm32(1), scratch, &done);
1906 // Overwrite stack slot x with slot x + 1, saving the old value in R1.
1907 masm.loadValue(frame.addressOfStackValue(scratch), R0);
1908 masm.storeValue(R1, frame.addressOfStackValue(scratch));
1909 masm.moveValue(R0, R1);
1910 masm.jump(&top);
1913 // Finally, replace the value on top of the stack (slot 0) with R1. This is
1914 // the value that used to be in slot 1.
1915 masm.bind(&done);
1916 masm.storeValue(R1, frame.addressOfStackValue(-1));
1917 return true;
1920 template <>
1921 void BaselineCompilerCodeGen::emitJump() {
1922 jsbytecode* pc = handler.pc();
1923 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc)));
1924 frame.assertSyncedStack();
1926 jsbytecode* target = pc + GET_JUMP_OFFSET(pc);
1927 masm.jump(handler.labelOf(target));
1930 template <>
1931 void BaselineInterpreterCodeGen::emitJump() {
1932 // We have to add the current pc's jump offset to the current pc. We can use
1933 // R0 and R1 as scratch because we jump to the "next op" label so these
1934 // registers aren't in use at this point.
1935 Register scratch1 = R0.scratchReg();
1936 Register scratch2 = R1.scratchReg();
1937 Register pc = LoadBytecodePC(masm, scratch1);
1938 LoadInt32OperandSignExtendToPtr(masm, pc, scratch2);
1939 if (HasInterpreterPCReg()) {
1940 masm.addPtr(scratch2, InterpreterPCReg);
1941 } else {
1942 masm.addPtr(pc, scratch2);
1943 masm.storePtr(scratch2, frame.addressOfInterpreterPC());
1945 masm.jump(handler.interpretOpWithPCRegLabel());
1948 template <>
1949 void BaselineCompilerCodeGen::emitTestBooleanTruthy(bool branchIfTrue,
1950 ValueOperand val) {
1951 jsbytecode* pc = handler.pc();
1952 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc)));
1953 frame.assertSyncedStack();
1955 jsbytecode* target = pc + GET_JUMP_OFFSET(pc);
1956 masm.branchTestBooleanTruthy(branchIfTrue, val, handler.labelOf(target));
1959 template <>
1960 void BaselineInterpreterCodeGen::emitTestBooleanTruthy(bool branchIfTrue,
1961 ValueOperand val) {
1962 Label done;
1963 masm.branchTestBooleanTruthy(!branchIfTrue, val, &done);
1964 emitJump();
1965 masm.bind(&done);
1968 template <>
1969 template <typename F1, typename F2>
1970 [[nodiscard]] bool BaselineCompilerCodeGen::emitTestScriptFlag(
1971 JSScript::ImmutableFlags flag, const F1& ifSet, const F2& ifNotSet,
1972 Register scratch) {
1973 if (handler.script()->hasFlag(flag)) {
1974 return ifSet();
1976 return ifNotSet();
1979 template <>
1980 template <typename F1, typename F2>
1981 [[nodiscard]] bool BaselineInterpreterCodeGen::emitTestScriptFlag(
1982 JSScript::ImmutableFlags flag, const F1& ifSet, const F2& ifNotSet,
1983 Register scratch) {
1984 Label flagNotSet, done;
1985 loadScript(scratch);
1986 masm.branchTest32(Assembler::Zero,
1987 Address(scratch, JSScript::offsetOfImmutableFlags()),
1988 Imm32(uint32_t(flag)), &flagNotSet);
1990 if (!ifSet()) {
1991 return false;
1993 masm.jump(&done);
1995 masm.bind(&flagNotSet);
1997 if (!ifNotSet()) {
1998 return false;
2002 masm.bind(&done);
2003 return true;
2006 template <>
2007 template <typename F>
2008 [[nodiscard]] bool BaselineCompilerCodeGen::emitTestScriptFlag(
2009 JSScript::ImmutableFlags flag, bool value, const F& emit,
2010 Register scratch) {
2011 if (handler.script()->hasFlag(flag) == value) {
2012 return emit();
2014 return true;
2017 template <>
2018 template <typename F>
2019 [[nodiscard]] bool BaselineCompilerCodeGen::emitTestScriptFlag(
2020 JSScript::MutableFlags flag, bool value, const F& emit, Register scratch) {
2021 if (handler.script()->hasFlag(flag) == value) {
2022 return emit();
2024 return true;
2027 template <>
2028 template <typename F>
2029 [[nodiscard]] bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2030 JSScript::ImmutableFlags flag, bool value, const F& emit,
2031 Register scratch) {
2032 Label done;
2033 loadScript(scratch);
2034 masm.branchTest32(value ? Assembler::Zero : Assembler::NonZero,
2035 Address(scratch, JSScript::offsetOfImmutableFlags()),
2036 Imm32(uint32_t(flag)), &done);
2038 if (!emit()) {
2039 return false;
2043 masm.bind(&done);
2044 return true;
2047 template <>
2048 template <typename F>
2049 [[nodiscard]] bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2050 JSScript::MutableFlags flag, bool value, const F& emit, Register scratch) {
2051 Label done;
2052 loadScript(scratch);
2053 masm.branchTest32(value ? Assembler::Zero : Assembler::NonZero,
2054 Address(scratch, JSScript::offsetOfMutableFlags()),
2055 Imm32(uint32_t(flag)), &done);
2057 if (!emit()) {
2058 return false;
2062 masm.bind(&done);
2063 return true;
2066 template <typename Handler>
2067 bool BaselineCodeGen<Handler>::emit_Goto() {
2068 frame.syncStack(0);
2069 emitJump();
2070 return true;
2073 template <typename Handler>
2074 bool BaselineCodeGen<Handler>::emitTest(bool branchIfTrue) {
2075 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN);
2077 // Keep top stack value in R0.
2078 frame.popRegsAndSync(1);
2080 if (!knownBoolean && !emitNextIC()) {
2081 return false;
2084 // IC will leave a BooleanValue in R0, just need to branch on it.
2085 emitTestBooleanTruthy(branchIfTrue, R0);
2086 return true;
2089 template <typename Handler>
2090 bool BaselineCodeGen<Handler>::emit_JumpIfFalse() {
2091 return emitTest(false);
2094 template <typename Handler>
2095 bool BaselineCodeGen<Handler>::emit_JumpIfTrue() {
2096 return emitTest(true);
2099 template <typename Handler>
2100 bool BaselineCodeGen<Handler>::emitAndOr(bool branchIfTrue) {
2101 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN);
2103 // And and Or leave the original value on the stack.
2104 frame.syncStack(0);
2106 masm.loadValue(frame.addressOfStackValue(-1), R0);
2107 if (!knownBoolean && !emitNextIC()) {
2108 return false;
2111 emitTestBooleanTruthy(branchIfTrue, R0);
2112 return true;
2115 template <typename Handler>
2116 bool BaselineCodeGen<Handler>::emit_And() {
2117 return emitAndOr(false);
2120 template <typename Handler>
2121 bool BaselineCodeGen<Handler>::emit_Or() {
2122 return emitAndOr(true);
2125 template <typename Handler>
2126 bool BaselineCodeGen<Handler>::emit_Coalesce() {
2127 // Coalesce leaves the original value on the stack.
2128 frame.syncStack(0);
2130 masm.loadValue(frame.addressOfStackValue(-1), R0);
2132 Label undefinedOrNull;
2134 masm.branchTestUndefined(Assembler::Equal, R0, &undefinedOrNull);
2135 masm.branchTestNull(Assembler::Equal, R0, &undefinedOrNull);
2136 emitJump();
2138 masm.bind(&undefinedOrNull);
2139 // fall through
2140 return true;
2143 template <typename Handler>
2144 bool BaselineCodeGen<Handler>::emit_Not() {
2145 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN);
2147 // Keep top stack value in R0.
2148 frame.popRegsAndSync(1);
2150 if (!knownBoolean && !emitNextIC()) {
2151 return false;
2154 masm.notBoolean(R0);
2156 frame.push(R0, JSVAL_TYPE_BOOLEAN);
2157 return true;
2160 template <typename Handler>
2161 bool BaselineCodeGen<Handler>::emit_Pos() {
2162 return emitUnaryArith();
2165 template <typename Handler>
2166 bool BaselineCodeGen<Handler>::emit_ToNumeric() {
2167 return emitUnaryArith();
2170 template <typename Handler>
2171 bool BaselineCodeGen<Handler>::emit_LoopHead() {
2172 if (!emit_JumpTarget()) {
2173 return false;
2175 if (!emitInterruptCheck()) {
2176 return false;
2178 if (!emitWarmUpCounterIncrement()) {
2179 return false;
2181 return true;
2184 template <typename Handler>
2185 bool BaselineCodeGen<Handler>::emit_Void() {
2186 frame.pop();
2187 frame.push(UndefinedValue());
2188 return true;
2191 template <typename Handler>
2192 bool BaselineCodeGen<Handler>::emit_Undefined() {
2193 frame.push(UndefinedValue());
2194 return true;
2197 template <typename Handler>
2198 bool BaselineCodeGen<Handler>::emit_Hole() {
2199 frame.push(MagicValue(JS_ELEMENTS_HOLE));
2200 return true;
2203 template <typename Handler>
2204 bool BaselineCodeGen<Handler>::emit_Null() {
2205 frame.push(NullValue());
2206 return true;
2209 template <typename Handler>
2210 bool BaselineCodeGen<Handler>::emit_CheckIsObj() {
2211 frame.syncStack(0);
2212 masm.loadValue(frame.addressOfStackValue(-1), R0);
2214 Label ok;
2215 masm.branchTestObject(Assembler::Equal, R0, &ok);
2217 prepareVMCall();
2219 pushUint8BytecodeOperandArg(R0.scratchReg());
2221 using Fn = bool (*)(JSContext*, CheckIsObjectKind);
2222 if (!callVM<Fn, ThrowCheckIsObject>()) {
2223 return false;
2226 masm.bind(&ok);
2227 return true;
2230 template <typename Handler>
2231 bool BaselineCodeGen<Handler>::emit_CheckThis() {
2232 frame.syncStack(0);
2233 masm.loadValue(frame.addressOfStackValue(-1), R0);
2235 return emitCheckThis(R0);
2238 template <typename Handler>
2239 bool BaselineCodeGen<Handler>::emit_CheckThisReinit() {
2240 frame.syncStack(0);
2241 masm.loadValue(frame.addressOfStackValue(-1), R0);
2243 return emitCheckThis(R0, /* reinit = */ true);
2246 template <typename Handler>
2247 bool BaselineCodeGen<Handler>::emitCheckThis(ValueOperand val, bool reinit) {
2248 Label thisOK;
2249 if (reinit) {
2250 masm.branchTestMagic(Assembler::Equal, val, &thisOK);
2251 } else {
2252 masm.branchTestMagic(Assembler::NotEqual, val, &thisOK);
2255 prepareVMCall();
2257 if (reinit) {
2258 using Fn = bool (*)(JSContext*);
2259 if (!callVM<Fn, ThrowInitializedThis>()) {
2260 return false;
2262 } else {
2263 using Fn = bool (*)(JSContext*);
2264 if (!callVM<Fn, ThrowUninitializedThis>()) {
2265 return false;
2269 masm.bind(&thisOK);
2270 return true;
2273 template <typename Handler>
2274 bool BaselineCodeGen<Handler>::emit_CheckReturn() {
2275 MOZ_ASSERT_IF(handler.maybeScript(),
2276 handler.maybeScript()->isDerivedClassConstructor());
2278 // Load |this| in R0, return value in R1.
2279 frame.popRegsAndSync(1);
2280 emitLoadReturnValue(R1);
2282 Label done, returnBad, checkThis;
2283 masm.branchTestObject(Assembler::NotEqual, R1, &checkThis);
2285 masm.moveValue(R1, R0);
2286 masm.jump(&done);
2288 masm.bind(&checkThis);
2289 masm.branchTestUndefined(Assembler::NotEqual, R1, &returnBad);
2290 masm.branchTestMagic(Assembler::NotEqual, R0, &done);
2291 masm.bind(&returnBad);
2293 prepareVMCall();
2294 pushArg(R1);
2296 using Fn = bool (*)(JSContext*, HandleValue);
2297 if (!callVM<Fn, ThrowBadDerivedReturnOrUninitializedThis>()) {
2298 return false;
2300 masm.assumeUnreachable("Should throw on bad derived constructor return");
2302 masm.bind(&done);
2304 // Push |rval| or |this| onto the stack.
2305 frame.push(R0);
2306 return true;
2309 template <typename Handler>
2310 bool BaselineCodeGen<Handler>::emit_FunctionThis() {
2311 MOZ_ASSERT_IF(handler.maybeFunction(), !handler.maybeFunction()->isArrow());
2313 frame.pushThis();
2315 auto boxThis = [this]() {
2316 // Load |thisv| in R0. Skip the call if it's already an object.
2317 Label skipCall;
2318 frame.popRegsAndSync(1);
2319 masm.branchTestObject(Assembler::Equal, R0, &skipCall);
2321 prepareVMCall();
2322 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg());
2324 pushArg(R1.scratchReg());
2326 using Fn = bool (*)(JSContext*, BaselineFrame*, MutableHandleValue);
2327 if (!callVM<Fn, BaselineGetFunctionThis>()) {
2328 return false;
2331 masm.bind(&skipCall);
2332 frame.push(R0);
2333 return true;
2336 // In strict mode code, |this| is left alone.
2337 return emitTestScriptFlag(JSScript::ImmutableFlags::Strict, false, boxThis,
2338 R2.scratchReg());
2341 template <typename Handler>
2342 bool BaselineCodeGen<Handler>::emit_GlobalThis() {
2343 frame.syncStack(0);
2345 loadGlobalThisValue(R0);
2346 frame.push(R0);
2347 return true;
2350 template <typename Handler>
2351 bool BaselineCodeGen<Handler>::emit_NonSyntacticGlobalThis() {
2352 frame.syncStack(0);
2354 prepareVMCall();
2356 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
2357 pushArg(R0.scratchReg());
2359 using Fn = void (*)(JSContext*, HandleObject, MutableHandleValue);
2360 if (!callVM<Fn, GetNonSyntacticGlobalThis>()) {
2361 return false;
2364 frame.push(R0);
2365 return true;
2368 template <typename Handler>
2369 bool BaselineCodeGen<Handler>::emit_True() {
2370 frame.push(BooleanValue(true));
2371 return true;
2374 template <typename Handler>
2375 bool BaselineCodeGen<Handler>::emit_False() {
2376 frame.push(BooleanValue(false));
2377 return true;
2380 template <typename Handler>
2381 bool BaselineCodeGen<Handler>::emit_Zero() {
2382 frame.push(Int32Value(0));
2383 return true;
2386 template <typename Handler>
2387 bool BaselineCodeGen<Handler>::emit_One() {
2388 frame.push(Int32Value(1));
2389 return true;
2392 template <>
2393 bool BaselineCompilerCodeGen::emit_Int8() {
2394 frame.push(Int32Value(GET_INT8(handler.pc())));
2395 return true;
2398 template <>
2399 bool BaselineInterpreterCodeGen::emit_Int8() {
2400 LoadInt8Operand(masm, R0.scratchReg());
2401 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2402 frame.push(R0);
2403 return true;
2406 template <>
2407 bool BaselineCompilerCodeGen::emit_Int32() {
2408 frame.push(Int32Value(GET_INT32(handler.pc())));
2409 return true;
2412 template <>
2413 bool BaselineInterpreterCodeGen::emit_Int32() {
2414 LoadInt32Operand(masm, R0.scratchReg());
2415 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2416 frame.push(R0);
2417 return true;
2420 template <>
2421 bool BaselineCompilerCodeGen::emit_Uint16() {
2422 frame.push(Int32Value(GET_UINT16(handler.pc())));
2423 return true;
2426 template <>
2427 bool BaselineInterpreterCodeGen::emit_Uint16() {
2428 LoadUint16Operand(masm, R0.scratchReg());
2429 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2430 frame.push(R0);
2431 return true;
2434 template <>
2435 bool BaselineCompilerCodeGen::emit_Uint24() {
2436 frame.push(Int32Value(GET_UINT24(handler.pc())));
2437 return true;
2440 template <>
2441 bool BaselineInterpreterCodeGen::emit_Uint24() {
2442 LoadUint24Operand(masm, 0, R0.scratchReg());
2443 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2444 frame.push(R0);
2445 return true;
2448 template <>
2449 bool BaselineCompilerCodeGen::emit_Double() {
2450 frame.push(GET_INLINE_VALUE(handler.pc()));
2451 return true;
2454 template <>
2455 bool BaselineInterpreterCodeGen::emit_Double() {
2456 LoadInlineValueOperand(masm, R0);
2457 frame.push(R0);
2458 return true;
2461 template <>
2462 bool BaselineCompilerCodeGen::emit_BigInt() {
2463 BigInt* bi = handler.script()->getBigInt(handler.pc());
2464 frame.push(BigIntValue(bi));
2465 return true;
2468 template <>
2469 bool BaselineInterpreterCodeGen::emit_BigInt() {
2470 Register scratch1 = R0.scratchReg();
2471 Register scratch2 = R1.scratchReg();
2472 loadScriptGCThing(ScriptGCThingType::BigInt, scratch1, scratch2);
2473 masm.tagValue(JSVAL_TYPE_BIGINT, scratch1, R0);
2474 frame.push(R0);
2475 return true;
2478 template <>
2479 bool BaselineCompilerCodeGen::emit_String() {
2480 frame.push(StringValue(handler.script()->getString(handler.pc())));
2481 return true;
2484 template <>
2485 bool BaselineInterpreterCodeGen::emit_String() {
2486 Register scratch1 = R0.scratchReg();
2487 Register scratch2 = R1.scratchReg();
2488 loadScriptGCThing(ScriptGCThingType::String, scratch1, scratch2);
2489 masm.tagValue(JSVAL_TYPE_STRING, scratch1, R0);
2490 frame.push(R0);
2491 return true;
2494 template <>
2495 bool BaselineCompilerCodeGen::emit_Symbol() {
2496 unsigned which = GET_UINT8(handler.pc());
2497 JS::Symbol* sym = cx->runtime()->wellKnownSymbols->get(which);
2498 frame.push(SymbolValue(sym));
2499 return true;
2502 template <>
2503 bool BaselineInterpreterCodeGen::emit_Symbol() {
2504 Register scratch1 = R0.scratchReg();
2505 Register scratch2 = R1.scratchReg();
2506 LoadUint8Operand(masm, scratch1);
2508 masm.movePtr(ImmPtr(cx->runtime()->wellKnownSymbols), scratch2);
2509 masm.loadPtr(BaseIndex(scratch2, scratch1, ScalePointer), scratch1);
2511 masm.tagValue(JSVAL_TYPE_SYMBOL, scratch1, R0);
2512 frame.push(R0);
2513 return true;
2516 template <>
2517 bool BaselineCompilerCodeGen::emit_Object() {
2518 frame.push(ObjectValue(*handler.script()->getObject(handler.pc())));
2519 return true;
2522 template <>
2523 bool BaselineInterpreterCodeGen::emit_Object() {
2524 Register scratch1 = R0.scratchReg();
2525 Register scratch2 = R1.scratchReg();
2526 loadScriptGCThing(ScriptGCThingType::Object, scratch1, scratch2);
2527 masm.tagValue(JSVAL_TYPE_OBJECT, scratch1, R0);
2528 frame.push(R0);
2529 return true;
2532 template <typename Handler>
2533 bool BaselineCodeGen<Handler>::emit_CallSiteObj() {
2534 return emit_Object();
2537 template <typename Handler>
2538 bool BaselineCodeGen<Handler>::emit_RegExp() {
2539 prepareVMCall();
2540 pushScriptGCThingArg(ScriptGCThingType::RegExp, R0.scratchReg(),
2541 R1.scratchReg());
2543 using Fn = JSObject* (*)(JSContext*, Handle<RegExpObject*>);
2544 if (!callVM<Fn, CloneRegExpObject>()) {
2545 return false;
2548 // Box and push return value.
2549 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2550 frame.push(R0);
2551 return true;
2554 #ifdef ENABLE_RECORD_TUPLE
2555 # define UNSUPPORTED_OPCODE(OP) \
2556 template <typename Handler> \
2557 bool BaselineCodeGen<Handler>::emit_##OP() { \
2558 MOZ_CRASH("Record and Tuple are not supported by jit"); \
2559 return false; \
2562 UNSUPPORTED_OPCODE(InitRecord)
2563 UNSUPPORTED_OPCODE(AddRecordProperty)
2564 UNSUPPORTED_OPCODE(AddRecordSpread)
2565 UNSUPPORTED_OPCODE(FinishRecord)
2566 UNSUPPORTED_OPCODE(InitTuple)
2567 UNSUPPORTED_OPCODE(AddTupleElement)
2568 UNSUPPORTED_OPCODE(FinishTuple)
2570 # undef UNSUPPORTED_OPCODE
2571 #endif
2573 template <typename Handler>
2574 bool BaselineCodeGen<Handler>::emit_Lambda() {
2575 prepareVMCall();
2576 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
2578 pushArg(R0.scratchReg());
2579 pushScriptGCThingArg(ScriptGCThingType::Function, R0.scratchReg(),
2580 R1.scratchReg());
2582 using Fn = JSObject* (*)(JSContext*, HandleFunction, HandleObject);
2583 if (!callVM<Fn, js::Lambda>()) {
2584 return false;
2587 // Box and push return value.
2588 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2589 frame.push(R0);
2590 return true;
2593 template <typename Handler>
2594 bool BaselineCodeGen<Handler>::emit_SetFunName() {
2595 frame.popRegsAndSync(2);
2597 frame.push(R0);
2598 frame.syncStack(0);
2600 masm.unboxObject(R0, R0.scratchReg());
2602 prepareVMCall();
2604 pushUint8BytecodeOperandArg(R2.scratchReg());
2605 pushArg(R1);
2606 pushArg(R0.scratchReg());
2608 using Fn =
2609 bool (*)(JSContext*, HandleFunction, HandleValue, FunctionPrefixKind);
2610 return callVM<Fn, SetFunctionName>();
2613 template <typename Handler>
2614 bool BaselineCodeGen<Handler>::emit_BitOr() {
2615 return emitBinaryArith();
2618 template <typename Handler>
2619 bool BaselineCodeGen<Handler>::emit_BitXor() {
2620 return emitBinaryArith();
2623 template <typename Handler>
2624 bool BaselineCodeGen<Handler>::emit_BitAnd() {
2625 return emitBinaryArith();
2628 template <typename Handler>
2629 bool BaselineCodeGen<Handler>::emit_Lsh() {
2630 return emitBinaryArith();
2633 template <typename Handler>
2634 bool BaselineCodeGen<Handler>::emit_Rsh() {
2635 return emitBinaryArith();
2638 template <typename Handler>
2639 bool BaselineCodeGen<Handler>::emit_Ursh() {
2640 return emitBinaryArith();
2643 template <typename Handler>
2644 bool BaselineCodeGen<Handler>::emit_Add() {
2645 return emitBinaryArith();
2648 template <typename Handler>
2649 bool BaselineCodeGen<Handler>::emit_Sub() {
2650 return emitBinaryArith();
2653 template <typename Handler>
2654 bool BaselineCodeGen<Handler>::emit_Mul() {
2655 return emitBinaryArith();
2658 template <typename Handler>
2659 bool BaselineCodeGen<Handler>::emit_Div() {
2660 return emitBinaryArith();
2663 template <typename Handler>
2664 bool BaselineCodeGen<Handler>::emit_Mod() {
2665 return emitBinaryArith();
2668 template <typename Handler>
2669 bool BaselineCodeGen<Handler>::emit_Pow() {
2670 return emitBinaryArith();
2673 template <typename Handler>
2674 bool BaselineCodeGen<Handler>::emitBinaryArith() {
2675 // Keep top JSStack value in R0 and R2
2676 frame.popRegsAndSync(2);
2678 // Call IC
2679 if (!emitNextIC()) {
2680 return false;
2683 // Mark R0 as pushed stack value.
2684 frame.push(R0);
2685 return true;
2688 template <typename Handler>
2689 bool BaselineCodeGen<Handler>::emitUnaryArith() {
2690 // Keep top stack value in R0.
2691 frame.popRegsAndSync(1);
2693 // Call IC
2694 if (!emitNextIC()) {
2695 return false;
2698 // Mark R0 as pushed stack value.
2699 frame.push(R0);
2700 return true;
2703 template <typename Handler>
2704 bool BaselineCodeGen<Handler>::emit_BitNot() {
2705 return emitUnaryArith();
2708 template <typename Handler>
2709 bool BaselineCodeGen<Handler>::emit_Neg() {
2710 return emitUnaryArith();
2713 template <typename Handler>
2714 bool BaselineCodeGen<Handler>::emit_Inc() {
2715 return emitUnaryArith();
2718 template <typename Handler>
2719 bool BaselineCodeGen<Handler>::emit_Dec() {
2720 return emitUnaryArith();
2723 template <typename Handler>
2724 bool BaselineCodeGen<Handler>::emit_Lt() {
2725 return emitCompare();
2728 template <typename Handler>
2729 bool BaselineCodeGen<Handler>::emit_Le() {
2730 return emitCompare();
2733 template <typename Handler>
2734 bool BaselineCodeGen<Handler>::emit_Gt() {
2735 return emitCompare();
2738 template <typename Handler>
2739 bool BaselineCodeGen<Handler>::emit_Ge() {
2740 return emitCompare();
2743 template <typename Handler>
2744 bool BaselineCodeGen<Handler>::emit_Eq() {
2745 return emitCompare();
2748 template <typename Handler>
2749 bool BaselineCodeGen<Handler>::emit_Ne() {
2750 return emitCompare();
2753 template <typename Handler>
2754 bool BaselineCodeGen<Handler>::emitCompare() {
2755 // Keep top JSStack value in R0 and R1.
2756 frame.popRegsAndSync(2);
2758 // Call IC.
2759 if (!emitNextIC()) {
2760 return false;
2763 // Mark R0 as pushed stack value.
2764 frame.push(R0, JSVAL_TYPE_BOOLEAN);
2765 return true;
2768 template <typename Handler>
2769 bool BaselineCodeGen<Handler>::emit_StrictEq() {
2770 return emitCompare();
2773 template <typename Handler>
2774 bool BaselineCodeGen<Handler>::emit_StrictNe() {
2775 return emitCompare();
2778 template <typename Handler>
2779 bool BaselineCodeGen<Handler>::emit_Case() {
2780 frame.popRegsAndSync(1);
2782 Label done;
2783 masm.branchTestBooleanTruthy(/* branchIfTrue */ false, R0, &done);
2785 // Pop the switch value if the case matches.
2786 masm.addToStackPtr(Imm32(sizeof(Value)));
2787 emitJump();
2789 masm.bind(&done);
2790 return true;
2793 template <typename Handler>
2794 bool BaselineCodeGen<Handler>::emit_Default() {
2795 frame.pop();
2796 return emit_Goto();
2799 template <typename Handler>
2800 bool BaselineCodeGen<Handler>::emit_Lineno() {
2801 return true;
2804 template <typename Handler>
2805 bool BaselineCodeGen<Handler>::emit_NewArray() {
2806 frame.syncStack(0);
2808 if (!emitNextIC()) {
2809 return false;
2812 frame.push(R0);
2813 return true;
2816 static void MarkElementsNonPackedIfHoleValue(MacroAssembler& masm,
2817 Register elements,
2818 ValueOperand val) {
2819 Label notHole;
2820 masm.branchTestMagic(Assembler::NotEqual, val, &notHole);
2822 Address elementsFlags(elements, ObjectElements::offsetOfFlags());
2823 masm.or32(Imm32(ObjectElements::NON_PACKED), elementsFlags);
2825 masm.bind(&notHole);
2828 template <>
2829 bool BaselineInterpreterCodeGen::emit_InitElemArray() {
2830 // Pop value into R0, keep the object on the stack.
2831 frame.popRegsAndSync(1);
2833 // Load object in R2.
2834 Register obj = R2.scratchReg();
2835 masm.unboxObject(frame.addressOfStackValue(-1), obj);
2837 // Load index in R1.
2838 Register index = R1.scratchReg();
2839 LoadInt32Operand(masm, index);
2841 // Store the Value. No pre-barrier because this is an initialization.
2842 masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), obj);
2843 masm.storeValue(R0, BaseObjectElementIndex(obj, index));
2845 // Bump initialized length.
2846 Address initLength(obj, ObjectElements::offsetOfInitializedLength());
2847 masm.add32(Imm32(1), index);
2848 masm.store32(index, initLength);
2850 // Mark elements as NON_PACKED if we stored the hole value.
2851 MarkElementsNonPackedIfHoleValue(masm, obj, R0);
2853 // Post-barrier.
2854 Label skipBarrier;
2855 Register scratch = index;
2856 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, scratch, &skipBarrier);
2858 masm.unboxObject(frame.addressOfStackValue(-1), obj);
2859 masm.branchPtrInNurseryChunk(Assembler::Equal, obj, scratch, &skipBarrier);
2860 MOZ_ASSERT(obj == R2.scratchReg(), "post barrier expects object in R2");
2861 masm.call(&postBarrierSlot_);
2863 masm.bind(&skipBarrier);
2864 return true;
2867 template <>
2868 bool BaselineCompilerCodeGen::emit_InitElemArray() {
2869 // Pop value into R0, keep the object on the stack.
2870 Maybe<Value> knownValue = frame.knownStackValue(-1);
2871 frame.popRegsAndSync(1);
2873 // Load object in R2.
2874 Register obj = R2.scratchReg();
2875 masm.unboxObject(frame.addressOfStackValue(-1), obj);
2877 uint32_t index = GET_UINT32(handler.pc());
2878 MOZ_ASSERT(index <= INT32_MAX,
2879 "the bytecode emitter must fail to compile code that would "
2880 "produce an index exceeding int32_t range");
2882 // Store the Value. No pre-barrier because this is an initialization.
2883 masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), obj);
2884 masm.storeValue(R0, Address(obj, index * sizeof(Value)));
2886 // Bump initialized length.
2887 Address initLength(obj, ObjectElements::offsetOfInitializedLength());
2888 masm.store32(Imm32(index + 1), initLength);
2890 // Mark elements as NON_PACKED if we stored the hole value. We know this
2891 // statically except when debugger instrumentation is enabled because that
2892 // forces a stack-sync (which discards constants and known types) for each op.
2893 if (knownValue && knownValue->isMagic(JS_ELEMENTS_HOLE)) {
2894 Address elementsFlags(obj, ObjectElements::offsetOfFlags());
2895 masm.or32(Imm32(ObjectElements::NON_PACKED), elementsFlags);
2896 } else if (handler.compileDebugInstrumentation()) {
2897 MarkElementsNonPackedIfHoleValue(masm, obj, R0);
2898 } else {
2899 #ifdef DEBUG
2900 Label notHole;
2901 masm.branchTestMagic(Assembler::NotEqual, R0, &notHole);
2902 masm.assumeUnreachable("Unexpected hole value");
2903 masm.bind(&notHole);
2904 #endif
2907 // Post-barrier.
2908 if (knownValue) {
2909 MOZ_ASSERT(JS::GCPolicy<Value>::isTenured(*knownValue));
2910 } else {
2911 Label skipBarrier;
2912 Register scratch = R1.scratchReg();
2913 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, scratch,
2914 &skipBarrier);
2916 masm.unboxObject(frame.addressOfStackValue(-1), obj);
2917 masm.branchPtrInNurseryChunk(Assembler::Equal, obj, scratch,
2918 &skipBarrier);
2919 MOZ_ASSERT(obj == R2.scratchReg(), "post barrier expects object in R2");
2920 masm.call(&postBarrierSlot_);
2922 masm.bind(&skipBarrier);
2924 return true;
2927 template <typename Handler>
2928 bool BaselineCodeGen<Handler>::emit_NewObject() {
2929 return emitNewObject();
2932 template <typename Handler>
2933 bool BaselineCodeGen<Handler>::emit_NewInit() {
2934 return emitNewObject();
2937 template <typename Handler>
2938 bool BaselineCodeGen<Handler>::emitNewObject() {
2939 frame.syncStack(0);
2941 if (!emitNextIC()) {
2942 return false;
2945 frame.push(R0);
2946 return true;
2949 template <typename Handler>
2950 bool BaselineCodeGen<Handler>::emit_InitElem() {
2951 // Store RHS in the scratch slot.
2952 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2);
2953 frame.pop();
2955 // Keep object and index in R0 and R1.
2956 frame.popRegsAndSync(2);
2958 // Push the object to store the result of the IC.
2959 frame.push(R0);
2960 frame.syncStack(0);
2962 // Keep RHS on the stack.
2963 frame.pushScratchValue();
2965 // Call IC.
2966 if (!emitNextIC()) {
2967 return false;
2970 // Pop the rhs, so that the object is on the top of the stack.
2971 frame.pop();
2972 return true;
2975 template <typename Handler>
2976 bool BaselineCodeGen<Handler>::emit_InitHiddenElem() {
2977 return emit_InitElem();
2980 template <typename Handler>
2981 bool BaselineCodeGen<Handler>::emit_InitLockedElem() {
2982 return emit_InitElem();
2985 template <typename Handler>
2986 bool BaselineCodeGen<Handler>::emit_MutateProto() {
2987 // Keep values on the stack for the decompiler.
2988 frame.syncStack(0);
2990 masm.unboxObject(frame.addressOfStackValue(-2), R0.scratchReg());
2991 masm.loadValue(frame.addressOfStackValue(-1), R1);
2993 prepareVMCall();
2995 pushArg(R1);
2996 pushArg(R0.scratchReg());
2998 using Fn = bool (*)(JSContext*, Handle<PlainObject*>, HandleValue);
2999 if (!callVM<Fn, MutatePrototype>()) {
3000 return false;
3003 frame.pop();
3004 return true;
3007 template <typename Handler>
3008 bool BaselineCodeGen<Handler>::emit_InitProp() {
3009 // Load lhs in R0, rhs in R1.
3010 frame.syncStack(0);
3011 masm.loadValue(frame.addressOfStackValue(-2), R0);
3012 masm.loadValue(frame.addressOfStackValue(-1), R1);
3014 // Call IC.
3015 if (!emitNextIC()) {
3016 return false;
3019 // Leave the object on the stack.
3020 frame.pop();
3021 return true;
3024 template <typename Handler>
3025 bool BaselineCodeGen<Handler>::emit_InitLockedProp() {
3026 return emit_InitProp();
3029 template <typename Handler>
3030 bool BaselineCodeGen<Handler>::emit_InitHiddenProp() {
3031 return emit_InitProp();
3034 template <typename Handler>
3035 bool BaselineCodeGen<Handler>::emit_GetElem() {
3036 // Keep top two stack values in R0 and R1.
3037 frame.popRegsAndSync(2);
3039 // Call IC.
3040 if (!emitNextIC()) {
3041 return false;
3044 // Mark R0 as pushed stack value.
3045 frame.push(R0);
3046 return true;
3049 template <typename Handler>
3050 bool BaselineCodeGen<Handler>::emit_GetElemSuper() {
3051 // Store obj in the scratch slot.
3052 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2);
3053 frame.pop();
3055 // Keep receiver and index in R0 and R1.
3056 frame.popRegsAndSync(2);
3058 // Keep obj on the stack.
3059 frame.pushScratchValue();
3061 if (!emitNextIC()) {
3062 return false;
3065 frame.pop();
3066 frame.push(R0);
3067 return true;
3070 template <typename Handler>
3071 bool BaselineCodeGen<Handler>::emit_SetElem() {
3072 // Store RHS in the scratch slot.
3073 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2);
3074 frame.pop();
3076 // Keep object and index in R0 and R1.
3077 frame.popRegsAndSync(2);
3079 // Keep RHS on the stack.
3080 frame.pushScratchValue();
3082 // Call IC.
3083 if (!emitNextIC()) {
3084 return false;
3087 return true;
3090 template <typename Handler>
3091 bool BaselineCodeGen<Handler>::emit_StrictSetElem() {
3092 return emit_SetElem();
3095 template <typename Handler>
3096 bool BaselineCodeGen<Handler>::emitSetElemSuper(bool strict) {
3097 // Incoming stack is |receiver, propval, obj, rval|. We need to shuffle
3098 // stack to leave rval when operation is complete.
3100 // Pop rval into R0, then load receiver into R1 and replace with rval.
3101 frame.popRegsAndSync(1);
3102 masm.loadValue(frame.addressOfStackValue(-3), R1);
3103 masm.storeValue(R0, frame.addressOfStackValue(-3));
3105 prepareVMCall();
3107 pushArg(Imm32(strict));
3108 pushArg(R0); // rval
3109 masm.loadValue(frame.addressOfStackValue(-2), R0);
3110 pushArg(R0); // propval
3111 pushArg(R1); // receiver
3112 masm.loadValue(frame.addressOfStackValue(-1), R0);
3113 pushArg(R0); // obj
3115 using Fn = bool (*)(JSContext*, HandleValue, HandleValue, HandleValue,
3116 HandleValue, bool);
3117 if (!callVM<Fn, js::SetElementSuper>()) {
3118 return false;
3121 frame.popn(2);
3122 return true;
3125 template <typename Handler>
3126 bool BaselineCodeGen<Handler>::emit_SetElemSuper() {
3127 return emitSetElemSuper(/* strict = */ false);
3130 template <typename Handler>
3131 bool BaselineCodeGen<Handler>::emit_StrictSetElemSuper() {
3132 return emitSetElemSuper(/* strict = */ true);
3135 template <typename Handler>
3136 bool BaselineCodeGen<Handler>::emitDelElem(bool strict) {
3137 // Keep values on the stack for the decompiler.
3138 frame.syncStack(0);
3139 masm.loadValue(frame.addressOfStackValue(-2), R0);
3140 masm.loadValue(frame.addressOfStackValue(-1), R1);
3142 prepareVMCall();
3144 pushArg(R1);
3145 pushArg(R0);
3147 using Fn = bool (*)(JSContext*, HandleValue, HandleValue, bool*);
3148 if (strict) {
3149 if (!callVM<Fn, DelElemOperation<true>>()) {
3150 return false;
3152 } else {
3153 if (!callVM<Fn, DelElemOperation<false>>()) {
3154 return false;
3158 masm.boxNonDouble(JSVAL_TYPE_BOOLEAN, ReturnReg, R1);
3159 frame.popn(2);
3160 frame.push(R1, JSVAL_TYPE_BOOLEAN);
3161 return true;
3164 template <typename Handler>
3165 bool BaselineCodeGen<Handler>::emit_DelElem() {
3166 return emitDelElem(/* strict = */ false);
3169 template <typename Handler>
3170 bool BaselineCodeGen<Handler>::emit_StrictDelElem() {
3171 return emitDelElem(/* strict = */ true);
3174 template <typename Handler>
3175 bool BaselineCodeGen<Handler>::emit_In() {
3176 frame.popRegsAndSync(2);
3178 if (!emitNextIC()) {
3179 return false;
3182 frame.push(R0, JSVAL_TYPE_BOOLEAN);
3183 return true;
3186 template <typename Handler>
3187 bool BaselineCodeGen<Handler>::emit_HasOwn() {
3188 frame.popRegsAndSync(2);
3190 if (!emitNextIC()) {
3191 return false;
3194 frame.push(R0, JSVAL_TYPE_BOOLEAN);
3195 return true;
3198 template <typename Handler>
3199 bool BaselineCodeGen<Handler>::emit_CheckPrivateField() {
3200 // Keep key and val on the stack.
3201 frame.syncStack(0);
3202 masm.loadValue(frame.addressOfStackValue(-2), R0);
3203 masm.loadValue(frame.addressOfStackValue(-1), R1);
3205 if (!emitNextIC()) {
3206 return false;
3209 frame.push(R0, JSVAL_TYPE_BOOLEAN);
3210 return true;
3213 template <typename Handler>
3214 bool BaselineCodeGen<Handler>::emit_NewPrivateName() {
3215 prepareVMCall();
3217 pushScriptNameArg(R0.scratchReg(), R1.scratchReg());
3219 using Fn = JS::Symbol* (*)(JSContext*, Handle<JSAtom*>);
3220 if (!callVM<Fn, NewPrivateName>()) {
3221 return false;
3224 masm.tagValue(JSVAL_TYPE_SYMBOL, ReturnReg, R0);
3225 frame.push(R0);
3226 return true;
3229 template <typename Handler>
3230 bool BaselineCodeGen<Handler>::emit_GetGName() {
3231 frame.syncStack(0);
3233 loadGlobalLexicalEnvironment(R0.scratchReg());
3235 // Call IC.
3236 if (!emitNextIC()) {
3237 return false;
3240 // Mark R0 as pushed stack value.
3241 frame.push(R0);
3242 return true;
3245 template <>
3246 bool BaselineCompilerCodeGen::tryOptimizeBindGlobalName() {
3247 JSScript* script = handler.script();
3248 MOZ_ASSERT(!script->hasNonSyntacticScope());
3250 Rooted<GlobalObject*> global(cx, &script->global());
3251 Rooted<PropertyName*> name(cx, script->getName(handler.pc()));
3252 if (JSObject* binding = MaybeOptimizeBindGlobalName(cx, global, name)) {
3253 frame.push(ObjectValue(*binding));
3254 return true;
3256 return false;
3259 template <>
3260 bool BaselineInterpreterCodeGen::tryOptimizeBindGlobalName() {
3261 // Interpreter doesn't optimize simple BindGNames.
3262 return false;
3265 template <typename Handler>
3266 bool BaselineCodeGen<Handler>::emit_BindGName() {
3267 if (tryOptimizeBindGlobalName()) {
3268 return true;
3271 frame.syncStack(0);
3272 loadGlobalLexicalEnvironment(R0.scratchReg());
3274 // Call IC.
3275 if (!emitNextIC()) {
3276 return false;
3279 // Mark R0 as pushed stack value.
3280 frame.push(R0);
3281 return true;
3284 template <typename Handler>
3285 bool BaselineCodeGen<Handler>::emit_BindVar() {
3286 frame.syncStack(0);
3287 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3289 prepareVMCall();
3290 pushArg(R0.scratchReg());
3292 using Fn = JSObject* (*)(JSContext*, JSObject*);
3293 if (!callVM<Fn, BindVarOperation>()) {
3294 return false;
3297 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
3298 frame.push(R0);
3299 return true;
3302 template <typename Handler>
3303 bool BaselineCodeGen<Handler>::emit_SetProp() {
3304 // Keep lhs in R0, rhs in R1.
3305 frame.popRegsAndSync(2);
3307 // Keep RHS on the stack.
3308 frame.push(R1);
3309 frame.syncStack(0);
3311 // Call IC.
3312 if (!emitNextIC()) {
3313 return false;
3316 return true;
3319 template <typename Handler>
3320 bool BaselineCodeGen<Handler>::emit_StrictSetProp() {
3321 return emit_SetProp();
3324 template <typename Handler>
3325 bool BaselineCodeGen<Handler>::emit_SetName() {
3326 return emit_SetProp();
3329 template <typename Handler>
3330 bool BaselineCodeGen<Handler>::emit_StrictSetName() {
3331 return emit_SetProp();
3334 template <typename Handler>
3335 bool BaselineCodeGen<Handler>::emit_SetGName() {
3336 return emit_SetProp();
3339 template <typename Handler>
3340 bool BaselineCodeGen<Handler>::emit_StrictSetGName() {
3341 return emit_SetProp();
3344 template <typename Handler>
3345 bool BaselineCodeGen<Handler>::emitSetPropSuper(bool strict) {
3346 // Incoming stack is |receiver, obj, rval|. We need to shuffle stack to
3347 // leave rval when operation is complete.
3349 // Pop rval into R0, then load receiver into R1 and replace with rval.
3350 frame.popRegsAndSync(1);
3351 masm.loadValue(frame.addressOfStackValue(-2), R1);
3352 masm.storeValue(R0, frame.addressOfStackValue(-2));
3354 prepareVMCall();
3356 pushArg(Imm32(strict));
3357 pushArg(R0); // rval
3358 pushScriptNameArg(R0.scratchReg(), R2.scratchReg());
3359 pushArg(R1); // receiver
3360 masm.loadValue(frame.addressOfStackValue(-1), R0);
3361 pushArg(R0); // obj
3363 using Fn = bool (*)(JSContext*, HandleValue, HandleValue,
3364 Handle<PropertyName*>, HandleValue, bool);
3365 if (!callVM<Fn, js::SetPropertySuper>()) {
3366 return false;
3369 frame.pop();
3370 return true;
3373 template <typename Handler>
3374 bool BaselineCodeGen<Handler>::emit_SetPropSuper() {
3375 return emitSetPropSuper(/* strict = */ false);
3378 template <typename Handler>
3379 bool BaselineCodeGen<Handler>::emit_StrictSetPropSuper() {
3380 return emitSetPropSuper(/* strict = */ true);
3383 template <typename Handler>
3384 bool BaselineCodeGen<Handler>::emit_GetProp() {
3385 // Keep object in R0.
3386 frame.popRegsAndSync(1);
3388 // Call IC.
3389 if (!emitNextIC()) {
3390 return false;
3393 // Mark R0 as pushed stack value.
3394 frame.push(R0);
3395 return true;
3398 template <typename Handler>
3399 bool BaselineCodeGen<Handler>::emit_GetBoundName() {
3400 return emit_GetProp();
3403 template <typename Handler>
3404 bool BaselineCodeGen<Handler>::emit_GetPropSuper() {
3405 // Receiver -> R1, ObjectOrNull -> R0
3406 frame.popRegsAndSync(1);
3407 masm.loadValue(frame.addressOfStackValue(-1), R1);
3408 frame.pop();
3410 if (!emitNextIC()) {
3411 return false;
3414 frame.push(R0);
3415 return true;
3418 template <typename Handler>
3419 bool BaselineCodeGen<Handler>::emitDelProp(bool strict) {
3420 // Keep value on the stack for the decompiler.
3421 frame.syncStack(0);
3422 masm.loadValue(frame.addressOfStackValue(-1), R0);
3424 prepareVMCall();
3426 pushScriptNameArg(R1.scratchReg(), R2.scratchReg());
3427 pushArg(R0);
3429 using Fn = bool (*)(JSContext*, HandleValue, Handle<PropertyName*>, bool*);
3430 if (strict) {
3431 if (!callVM<Fn, DelPropOperation<true>>()) {
3432 return false;
3434 } else {
3435 if (!callVM<Fn, DelPropOperation<false>>()) {
3436 return false;
3440 masm.boxNonDouble(JSVAL_TYPE_BOOLEAN, ReturnReg, R1);
3441 frame.pop();
3442 frame.push(R1, JSVAL_TYPE_BOOLEAN);
3443 return true;
3446 template <typename Handler>
3447 bool BaselineCodeGen<Handler>::emit_DelProp() {
3448 return emitDelProp(/* strict = */ false);
3451 template <typename Handler>
3452 bool BaselineCodeGen<Handler>::emit_StrictDelProp() {
3453 return emitDelProp(/* strict = */ true);
3456 template <>
3457 void BaselineCompilerCodeGen::getEnvironmentCoordinateObject(Register reg) {
3458 EnvironmentCoordinate ec(handler.pc());
3460 masm.loadPtr(frame.addressOfEnvironmentChain(), reg);
3461 for (unsigned i = ec.hops(); i; i--) {
3462 masm.unboxObject(
3463 Address(reg, EnvironmentObject::offsetOfEnclosingEnvironment()), reg);
3467 template <>
3468 void BaselineInterpreterCodeGen::getEnvironmentCoordinateObject(Register reg) {
3469 MOZ_CRASH("Shouldn't call this for interpreter");
3472 template <>
3473 Address BaselineCompilerCodeGen::getEnvironmentCoordinateAddressFromObject(
3474 Register objReg, Register reg) {
3475 EnvironmentCoordinate ec(handler.pc());
3477 if (EnvironmentObject::nonExtensibleIsFixedSlot(ec)) {
3478 return Address(objReg, NativeObject::getFixedSlotOffset(ec.slot()));
3481 uint32_t slot = EnvironmentObject::nonExtensibleDynamicSlotIndex(ec);
3482 masm.loadPtr(Address(objReg, NativeObject::offsetOfSlots()), reg);
3483 return Address(reg, slot * sizeof(Value));
3486 template <>
3487 Address BaselineInterpreterCodeGen::getEnvironmentCoordinateAddressFromObject(
3488 Register objReg, Register reg) {
3489 MOZ_CRASH("Shouldn't call this for interpreter");
3492 template <typename Handler>
3493 Address BaselineCodeGen<Handler>::getEnvironmentCoordinateAddress(
3494 Register reg) {
3495 getEnvironmentCoordinateObject(reg);
3496 return getEnvironmentCoordinateAddressFromObject(reg, reg);
3499 // For a JOF_ENVCOORD op load the number of hops from the bytecode and skip this
3500 // number of environment objects.
3501 static void LoadAliasedVarEnv(MacroAssembler& masm, Register env,
3502 Register scratch) {
3503 static_assert(ENVCOORD_HOPS_LEN == 1,
3504 "Code assumes number of hops is stored in uint8 operand");
3505 LoadUint8Operand(masm, scratch);
3507 Label top, done;
3508 masm.branchTest32(Assembler::Zero, scratch, scratch, &done);
3509 masm.bind(&top);
3511 Address nextEnv(env, EnvironmentObject::offsetOfEnclosingEnvironment());
3512 masm.unboxObject(nextEnv, env);
3513 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch, &top);
3515 masm.bind(&done);
3518 template <>
3519 void BaselineCompilerCodeGen::emitGetAliasedVar(ValueOperand dest) {
3520 frame.syncStack(0);
3522 Address address = getEnvironmentCoordinateAddress(R0.scratchReg());
3523 masm.loadValue(address, dest);
3526 template <>
3527 void BaselineInterpreterCodeGen::emitGetAliasedVar(ValueOperand dest) {
3528 Register env = R0.scratchReg();
3529 Register scratch = R1.scratchReg();
3531 // Load the right environment object.
3532 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
3533 LoadAliasedVarEnv(masm, env, scratch);
3535 // Load the slot index.
3536 static_assert(ENVCOORD_SLOT_LEN == 3,
3537 "Code assumes slot is stored in uint24 operand");
3538 LoadUint24Operand(masm, ENVCOORD_HOPS_LEN, scratch);
3540 // Load the Value from a fixed or dynamic slot.
3541 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3542 Label isDynamic, done;
3543 masm.branch32(Assembler::AboveOrEqual, scratch,
3544 Imm32(NativeObject::MAX_FIXED_SLOTS), &isDynamic);
3546 uint32_t offset = NativeObject::getFixedSlotOffset(0);
3547 masm.loadValue(BaseValueIndex(env, scratch, offset), dest);
3548 masm.jump(&done);
3550 masm.bind(&isDynamic);
3552 masm.loadPtr(Address(env, NativeObject::offsetOfSlots()), env);
3554 // Use an offset to subtract the number of fixed slots.
3555 int32_t offset = -int32_t(NativeObject::MAX_FIXED_SLOTS * sizeof(Value));
3556 masm.loadValue(BaseValueIndex(env, scratch, offset), dest);
3558 masm.bind(&done);
3561 template <typename Handler>
3562 bool BaselineCodeGen<Handler>::emitGetAliasedDebugVar(ValueOperand dest) {
3563 frame.syncStack(0);
3564 Register env = R0.scratchReg();
3565 // Load the right environment object.
3566 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
3568 prepareVMCall();
3569 pushBytecodePCArg();
3570 pushArg(env);
3572 using Fn =
3573 bool (*)(JSContext*, JSObject* env, jsbytecode*, MutableHandleValue);
3574 return callVM<Fn, LoadAliasedDebugVar>();
3577 template <typename Handler>
3578 bool BaselineCodeGen<Handler>::emit_GetAliasedDebugVar() {
3579 if (!emitGetAliasedDebugVar(R0)) {
3580 return false;
3583 frame.push(R0);
3584 return true;
3587 template <typename Handler>
3588 bool BaselineCodeGen<Handler>::emit_GetAliasedVar() {
3589 emitGetAliasedVar(R0);
3591 frame.push(R0);
3592 return true;
3595 template <>
3596 bool BaselineCompilerCodeGen::emit_SetAliasedVar() {
3597 // Keep rvalue in R0.
3598 frame.popRegsAndSync(1);
3599 Register objReg = R2.scratchReg();
3601 getEnvironmentCoordinateObject(objReg);
3602 Address address =
3603 getEnvironmentCoordinateAddressFromObject(objReg, R1.scratchReg());
3604 masm.guardedCallPreBarrier(address, MIRType::Value);
3605 masm.storeValue(R0, address);
3606 frame.push(R0);
3608 // Only R0 is live at this point.
3609 // Scope coordinate object is already in R2.scratchReg().
3610 Register temp = R1.scratchReg();
3612 Label skipBarrier;
3613 masm.branchPtrInNurseryChunk(Assembler::Equal, objReg, temp, &skipBarrier);
3614 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
3616 masm.call(&postBarrierSlot_); // Won't clobber R0
3618 masm.bind(&skipBarrier);
3619 return true;
3622 template <>
3623 bool BaselineInterpreterCodeGen::emit_SetAliasedVar() {
3624 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
3625 MOZ_ASSERT(!regs.has(FramePointer));
3626 regs.take(R2);
3627 if (HasInterpreterPCReg()) {
3628 regs.take(InterpreterPCReg);
3631 Register env = regs.takeAny();
3632 Register scratch1 = regs.takeAny();
3633 Register scratch2 = regs.takeAny();
3634 Register scratch3 = regs.takeAny();
3636 // Load the right environment object.
3637 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
3638 LoadAliasedVarEnv(masm, env, scratch1);
3640 // Load the slot index.
3641 static_assert(ENVCOORD_SLOT_LEN == 3,
3642 "Code assumes slot is stored in uint24 operand");
3643 LoadUint24Operand(masm, ENVCOORD_HOPS_LEN, scratch1);
3645 // Store the RHS Value in R2.
3646 masm.loadValue(frame.addressOfStackValue(-1), R2);
3648 // Load a pointer to the fixed or dynamic slot into scratch2. We want to call
3649 // guardedCallPreBarrierAnyZone once to avoid code bloat.
3651 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3652 Label isDynamic, done;
3653 masm.branch32(Assembler::AboveOrEqual, scratch1,
3654 Imm32(NativeObject::MAX_FIXED_SLOTS), &isDynamic);
3656 uint32_t offset = NativeObject::getFixedSlotOffset(0);
3657 BaseValueIndex slotAddr(env, scratch1, offset);
3658 masm.computeEffectiveAddress(slotAddr, scratch2);
3659 masm.jump(&done);
3661 masm.bind(&isDynamic);
3663 masm.loadPtr(Address(env, NativeObject::offsetOfSlots()), scratch2);
3665 // Use an offset to subtract the number of fixed slots.
3666 int32_t offset = -int32_t(NativeObject::MAX_FIXED_SLOTS * sizeof(Value));
3667 BaseValueIndex slotAddr(scratch2, scratch1, offset);
3668 masm.computeEffectiveAddress(slotAddr, scratch2);
3670 masm.bind(&done);
3672 // Pre-barrier and store.
3673 Address slotAddr(scratch2, 0);
3674 masm.guardedCallPreBarrierAnyZone(slotAddr, MIRType::Value, scratch3);
3675 masm.storeValue(R2, slotAddr);
3677 // Post barrier.
3678 Label skipBarrier;
3679 masm.branchPtrInNurseryChunk(Assembler::Equal, env, scratch1, &skipBarrier);
3680 masm.branchValueIsNurseryCell(Assembler::NotEqual, R2, scratch1,
3681 &skipBarrier);
3683 // Post barrier code expects the object in R2.
3684 masm.movePtr(env, R2.scratchReg());
3685 masm.call(&postBarrierSlot_);
3687 masm.bind(&skipBarrier);
3688 return true;
3691 template <typename Handler>
3692 bool BaselineCodeGen<Handler>::emit_GetName() {
3693 frame.syncStack(0);
3695 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3697 // Call IC.
3698 if (!emitNextIC()) {
3699 return false;
3702 // Mark R0 as pushed stack value.
3703 frame.push(R0);
3704 return true;
3707 template <typename Handler>
3708 bool BaselineCodeGen<Handler>::emit_BindName() {
3709 frame.syncStack(0);
3710 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3712 // Call IC.
3713 if (!emitNextIC()) {
3714 return false;
3717 // Mark R0 as pushed stack value.
3718 frame.push(R0);
3719 return true;
3722 template <typename Handler>
3723 bool BaselineCodeGen<Handler>::emit_DelName() {
3724 frame.syncStack(0);
3725 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3727 prepareVMCall();
3729 pushArg(R0.scratchReg());
3730 pushScriptNameArg(R1.scratchReg(), R2.scratchReg());
3732 using Fn = bool (*)(JSContext*, Handle<PropertyName*>, HandleObject,
3733 MutableHandleValue);
3734 if (!callVM<Fn, js::DeleteNameOperation>()) {
3735 return false;
3738 frame.push(R0);
3739 return true;
3742 template <>
3743 bool BaselineCompilerCodeGen::emit_GetImport() {
3744 JSScript* script = handler.script();
3745 ModuleEnvironmentObject* env = GetModuleEnvironmentForScript(script);
3746 MOZ_ASSERT(env);
3748 jsid id = NameToId(script->getName(handler.pc()));
3749 ModuleEnvironmentObject* targetEnv;
3750 Maybe<PropertyInfo> prop;
3751 MOZ_ALWAYS_TRUE(env->lookupImport(id, &targetEnv, &prop));
3753 frame.syncStack(0);
3755 uint32_t slot = prop->slot();
3756 Register scratch = R0.scratchReg();
3757 masm.movePtr(ImmGCPtr(targetEnv), scratch);
3758 if (slot < targetEnv->numFixedSlots()) {
3759 masm.loadValue(Address(scratch, NativeObject::getFixedSlotOffset(slot)),
3760 R0);
3761 } else {
3762 masm.loadPtr(Address(scratch, NativeObject::offsetOfSlots()), scratch);
3763 masm.loadValue(
3764 Address(scratch, (slot - targetEnv->numFixedSlots()) * sizeof(Value)),
3765 R0);
3768 // Imports are initialized by this point except in rare circumstances, so
3769 // don't emit a check unless we have to.
3770 if (targetEnv->getSlot(slot).isMagic(JS_UNINITIALIZED_LEXICAL)) {
3771 if (!emitUninitializedLexicalCheck(R0)) {
3772 return false;
3776 frame.push(R0);
3777 return true;
3780 template <>
3781 bool BaselineInterpreterCodeGen::emit_GetImport() {
3782 frame.syncStack(0);
3784 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3786 prepareVMCall();
3788 pushBytecodePCArg();
3789 pushScriptArg();
3790 pushArg(R0.scratchReg());
3792 using Fn = bool (*)(JSContext*, HandleObject, HandleScript, jsbytecode*,
3793 MutableHandleValue);
3794 if (!callVM<Fn, GetImportOperation>()) {
3795 return false;
3798 frame.push(R0);
3799 return true;
3802 template <typename Handler>
3803 bool BaselineCodeGen<Handler>::emit_GetIntrinsic() {
3804 frame.syncStack(0);
3806 if (!emitNextIC()) {
3807 return false;
3810 frame.push(R0);
3811 return true;
3814 template <typename Handler>
3815 bool BaselineCodeGen<Handler>::emit_SetIntrinsic() {
3816 frame.syncStack(0);
3817 masm.loadValue(frame.addressOfStackValue(-1), R0);
3819 prepareVMCall();
3821 pushArg(R0);
3822 pushBytecodePCArg();
3823 pushScriptArg();
3825 using Fn = bool (*)(JSContext*, JSScript*, jsbytecode*, HandleValue);
3826 return callVM<Fn, SetIntrinsicOperation>();
3829 template <typename Handler>
3830 bool BaselineCodeGen<Handler>::emit_GlobalOrEvalDeclInstantiation() {
3831 frame.syncStack(0);
3833 prepareVMCall();
3835 loadInt32LengthBytecodeOperand(R0.scratchReg());
3836 pushArg(R0.scratchReg());
3837 pushScriptArg();
3838 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3839 pushArg(R0.scratchReg());
3841 using Fn = bool (*)(JSContext*, HandleObject, HandleScript, GCThingIndex);
3842 return callVM<Fn, js::GlobalOrEvalDeclInstantiation>();
3845 template <typename Handler>
3846 bool BaselineCodeGen<Handler>::emitInitPropGetterSetter() {
3847 // Keep values on the stack for the decompiler.
3848 frame.syncStack(0);
3850 prepareVMCall();
3852 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
3853 masm.unboxObject(frame.addressOfStackValue(-2), R1.scratchReg());
3855 pushArg(R0.scratchReg());
3856 pushScriptNameArg(R0.scratchReg(), R2.scratchReg());
3857 pushArg(R1.scratchReg());
3858 pushBytecodePCArg();
3860 using Fn = bool (*)(JSContext*, jsbytecode*, HandleObject,
3861 Handle<PropertyName*>, HandleObject);
3862 if (!callVM<Fn, InitPropGetterSetterOperation>()) {
3863 return false;
3866 frame.pop();
3867 return true;
3870 template <typename Handler>
3871 bool BaselineCodeGen<Handler>::emit_InitPropGetter() {
3872 return emitInitPropGetterSetter();
3875 template <typename Handler>
3876 bool BaselineCodeGen<Handler>::emit_InitHiddenPropGetter() {
3877 return emitInitPropGetterSetter();
3880 template <typename Handler>
3881 bool BaselineCodeGen<Handler>::emit_InitPropSetter() {
3882 return emitInitPropGetterSetter();
3885 template <typename Handler>
3886 bool BaselineCodeGen<Handler>::emit_InitHiddenPropSetter() {
3887 return emitInitPropGetterSetter();
3890 template <typename Handler>
3891 bool BaselineCodeGen<Handler>::emitInitElemGetterSetter() {
3892 // Load index and value in R0 and R1, but keep values on the stack for the
3893 // decompiler.
3894 frame.syncStack(0);
3895 masm.loadValue(frame.addressOfStackValue(-2), R0);
3896 masm.unboxObject(frame.addressOfStackValue(-1), R1.scratchReg());
3898 prepareVMCall();
3900 pushArg(R1.scratchReg());
3901 pushArg(R0);
3902 masm.unboxObject(frame.addressOfStackValue(-3), R0.scratchReg());
3903 pushArg(R0.scratchReg());
3904 pushBytecodePCArg();
3906 using Fn = bool (*)(JSContext*, jsbytecode*, HandleObject, HandleValue,
3907 HandleObject);
3908 if (!callVM<Fn, InitElemGetterSetterOperation>()) {
3909 return false;
3912 frame.popn(2);
3913 return true;
3916 template <typename Handler>
3917 bool BaselineCodeGen<Handler>::emit_InitElemGetter() {
3918 return emitInitElemGetterSetter();
3921 template <typename Handler>
3922 bool BaselineCodeGen<Handler>::emit_InitHiddenElemGetter() {
3923 return emitInitElemGetterSetter();
3926 template <typename Handler>
3927 bool BaselineCodeGen<Handler>::emit_InitElemSetter() {
3928 return emitInitElemGetterSetter();
3931 template <typename Handler>
3932 bool BaselineCodeGen<Handler>::emit_InitHiddenElemSetter() {
3933 return emitInitElemGetterSetter();
3936 template <typename Handler>
3937 bool BaselineCodeGen<Handler>::emit_InitElemInc() {
3938 // Keep the object and rhs on the stack.
3939 frame.syncStack(0);
3941 // Load object in R0, index in R1.
3942 masm.loadValue(frame.addressOfStackValue(-3), R0);
3943 masm.loadValue(frame.addressOfStackValue(-2), R1);
3945 // Call IC.
3946 if (!emitNextIC()) {
3947 return false;
3950 // Pop the rhs
3951 frame.pop();
3953 // Increment index
3954 Address indexAddr = frame.addressOfStackValue(-1);
3955 #ifdef DEBUG
3956 Label isInt32;
3957 masm.branchTestInt32(Assembler::Equal, indexAddr, &isInt32);
3958 masm.assumeUnreachable("INITELEM_INC index must be Int32");
3959 masm.bind(&isInt32);
3960 #endif
3961 masm.incrementInt32Value(indexAddr);
3962 return true;
3965 template <>
3966 bool BaselineCompilerCodeGen::emit_GetLocal() {
3967 frame.pushLocal(GET_LOCALNO(handler.pc()));
3968 return true;
3971 static BaseValueIndex ComputeAddressOfLocal(MacroAssembler& masm,
3972 Register indexScratch) {
3973 // Locals are stored in memory at a negative offset from the frame pointer. We
3974 // negate the index first to effectively subtract it.
3975 masm.negPtr(indexScratch);
3976 return BaseValueIndex(FramePointer, indexScratch,
3977 BaselineFrame::reverseOffsetOfLocal(0));
3980 template <>
3981 bool BaselineInterpreterCodeGen::emit_GetLocal() {
3982 Register scratch = R0.scratchReg();
3983 LoadUint24Operand(masm, 0, scratch);
3984 BaseValueIndex addr = ComputeAddressOfLocal(masm, scratch);
3985 masm.loadValue(addr, R0);
3986 frame.push(R0);
3987 return true;
3990 template <>
3991 bool BaselineCompilerCodeGen::emit_SetLocal() {
3992 // Ensure no other StackValue refers to the old value, for instance i + (i =
3993 // 3). This also allows us to use R0 as scratch below.
3994 frame.syncStack(1);
3996 uint32_t local = GET_LOCALNO(handler.pc());
3997 frame.storeStackValue(-1, frame.addressOfLocal(local), R0);
3998 return true;
4001 template <>
4002 bool BaselineInterpreterCodeGen::emit_SetLocal() {
4003 Register scratch = R0.scratchReg();
4004 LoadUint24Operand(masm, 0, scratch);
4005 BaseValueIndex addr = ComputeAddressOfLocal(masm, scratch);
4006 masm.loadValue(frame.addressOfStackValue(-1), R1);
4007 masm.storeValue(R1, addr);
4008 return true;
4011 template <>
4012 bool BaselineCompilerCodeGen::emitFormalArgAccess(JSOp op) {
4013 MOZ_ASSERT(op == JSOp::GetArg || op == JSOp::SetArg);
4015 uint32_t arg = GET_ARGNO(handler.pc());
4017 // Fast path: the script does not use |arguments| or formals don't
4018 // alias the arguments object.
4019 if (!handler.script()->argsObjAliasesFormals()) {
4020 if (op == JSOp::GetArg) {
4021 frame.pushArg(arg);
4022 } else {
4023 // See the comment in emit_SetLocal.
4024 frame.syncStack(1);
4025 frame.storeStackValue(-1, frame.addressOfArg(arg), R0);
4028 return true;
4031 // Sync so that we can use R0.
4032 frame.syncStack(0);
4034 // Load the arguments object data vector.
4035 Register reg = R2.scratchReg();
4036 masm.loadPtr(frame.addressOfArgsObj(), reg);
4037 masm.loadPrivate(Address(reg, ArgumentsObject::getDataSlotOffset()), reg);
4039 // Load/store the argument.
4040 Address argAddr(reg, ArgumentsData::offsetOfArgs() + arg * sizeof(Value));
4041 if (op == JSOp::GetArg) {
4042 masm.loadValue(argAddr, R0);
4043 frame.push(R0);
4044 } else {
4045 Register temp = R1.scratchReg();
4046 masm.guardedCallPreBarrierAnyZone(argAddr, MIRType::Value, temp);
4047 masm.loadValue(frame.addressOfStackValue(-1), R0);
4048 masm.storeValue(R0, argAddr);
4050 MOZ_ASSERT(frame.numUnsyncedSlots() == 0);
4052 // Reload the arguments object.
4053 Register reg = R2.scratchReg();
4054 masm.loadPtr(frame.addressOfArgsObj(), reg);
4056 Label skipBarrier;
4058 masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &skipBarrier);
4059 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
4061 masm.call(&postBarrierSlot_);
4063 masm.bind(&skipBarrier);
4066 return true;
4069 template <>
4070 bool BaselineInterpreterCodeGen::emitFormalArgAccess(JSOp op) {
4071 MOZ_ASSERT(op == JSOp::GetArg || op == JSOp::SetArg);
4073 // Load the index.
4074 Register argReg = R1.scratchReg();
4075 LoadUint16Operand(masm, argReg);
4077 // If the frame has no arguments object, this must be an unaliased access.
4078 Label isUnaliased, done;
4079 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
4080 Imm32(BaselineFrame::HAS_ARGS_OBJ), &isUnaliased);
4082 Register reg = R2.scratchReg();
4084 // If it's an unmapped arguments object, this is an unaliased access.
4085 loadScript(reg);
4086 masm.branchTest32(
4087 Assembler::Zero, Address(reg, JSScript::offsetOfImmutableFlags()),
4088 Imm32(uint32_t(JSScript::ImmutableFlags::HasMappedArgsObj)),
4089 &isUnaliased);
4091 // Load the arguments object data vector.
4092 masm.loadPtr(frame.addressOfArgsObj(), reg);
4093 masm.loadPrivate(Address(reg, ArgumentsObject::getDataSlotOffset()), reg);
4095 // Load/store the argument.
4096 BaseValueIndex argAddr(reg, argReg, ArgumentsData::offsetOfArgs());
4097 if (op == JSOp::GetArg) {
4098 masm.loadValue(argAddr, R0);
4099 frame.push(R0);
4100 } else {
4101 masm.guardedCallPreBarrierAnyZone(argAddr, MIRType::Value,
4102 R0.scratchReg());
4103 masm.loadValue(frame.addressOfStackValue(-1), R0);
4104 masm.storeValue(R0, argAddr);
4106 // Reload the arguments object.
4107 masm.loadPtr(frame.addressOfArgsObj(), reg);
4109 Register temp = R1.scratchReg();
4110 masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &done);
4111 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &done);
4113 masm.call(&postBarrierSlot_);
4115 masm.jump(&done);
4117 masm.bind(&isUnaliased);
4119 BaseValueIndex addr(FramePointer, argReg,
4120 JitFrameLayout::offsetOfActualArgs());
4121 if (op == JSOp::GetArg) {
4122 masm.loadValue(addr, R0);
4123 frame.push(R0);
4124 } else {
4125 masm.loadValue(frame.addressOfStackValue(-1), R0);
4126 masm.storeValue(R0, addr);
4130 masm.bind(&done);
4131 return true;
4134 template <typename Handler>
4135 bool BaselineCodeGen<Handler>::emit_GetArg() {
4136 return emitFormalArgAccess(JSOp::GetArg);
4139 template <typename Handler>
4140 bool BaselineCodeGen<Handler>::emit_SetArg() {
4141 return emitFormalArgAccess(JSOp::SetArg);
4144 template <>
4145 bool BaselineInterpreterCodeGen::emit_GetFrameArg() {
4146 frame.syncStack(0);
4148 Register argReg = R1.scratchReg();
4149 LoadUint16Operand(masm, argReg);
4151 BaseValueIndex addr(FramePointer, argReg,
4152 JitFrameLayout::offsetOfActualArgs());
4153 masm.loadValue(addr, R0);
4154 frame.push(R0);
4155 return true;
4158 template <>
4159 bool BaselineCompilerCodeGen::emit_GetFrameArg() {
4160 uint32_t arg = GET_ARGNO(handler.pc());
4161 frame.pushArg(arg);
4162 return true;
4165 template <typename Handler>
4166 bool BaselineCodeGen<Handler>::emit_ArgumentsLength() {
4167 frame.syncStack(0);
4169 masm.loadNumActualArgs(FramePointer, R0.scratchReg());
4170 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
4172 frame.push(R0);
4173 return true;
4176 template <typename Handler>
4177 bool BaselineCodeGen<Handler>::emit_GetActualArg() {
4178 frame.popRegsAndSync(1);
4180 #ifdef DEBUG
4182 Label ok;
4183 masm.branchTestInt32(Assembler::Equal, R0, &ok);
4184 masm.assumeUnreachable("GetActualArg unexpected type");
4185 masm.bind(&ok);
4187 #endif
4189 Register index = R0.scratchReg();
4190 masm.unboxInt32(R0, index);
4192 #ifdef DEBUG
4194 Label ok;
4195 masm.loadNumActualArgs(FramePointer, R1.scratchReg());
4196 masm.branch32(Assembler::Above, R1.scratchReg(), index, &ok);
4197 masm.assumeUnreachable("GetActualArg invalid index");
4198 masm.bind(&ok);
4200 #endif
4202 BaseValueIndex addr(FramePointer, index,
4203 JitFrameLayout::offsetOfActualArgs());
4204 masm.loadValue(addr, R0);
4205 frame.push(R0);
4206 return true;
4209 template <>
4210 void BaselineCompilerCodeGen::loadNumFormalArguments(Register dest) {
4211 masm.move32(Imm32(handler.function()->nargs()), dest);
4214 template <>
4215 void BaselineInterpreterCodeGen::loadNumFormalArguments(Register dest) {
4216 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), dest);
4217 masm.loadFunctionArgCount(dest, dest);
4220 template <typename Handler>
4221 bool BaselineCodeGen<Handler>::emit_NewTarget() {
4222 MOZ_ASSERT_IF(handler.maybeFunction(), !handler.maybeFunction()->isArrow());
4224 frame.syncStack(0);
4226 #ifdef DEBUG
4227 Register scratch1 = R0.scratchReg();
4228 Register scratch2 = R1.scratchReg();
4230 Label isFunction;
4231 masm.loadPtr(frame.addressOfCalleeToken(), scratch1);
4232 masm.branchTestPtr(Assembler::Zero, scratch1, Imm32(CalleeTokenScriptBit),
4233 &isFunction);
4234 masm.assumeUnreachable("Unexpected non-function script");
4235 masm.bind(&isFunction);
4237 Label notArrow;
4238 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
4239 masm.branchFunctionKind(Assembler::NotEqual,
4240 FunctionFlags::FunctionKind::Arrow, scratch1,
4241 scratch2, &notArrow);
4242 masm.assumeUnreachable("Unexpected arrow function");
4243 masm.bind(&notArrow);
4244 #endif
4246 // if (isConstructing()) push(argv[Max(numActualArgs, numFormalArgs)])
4247 Label notConstructing, done;
4248 masm.branchTestPtr(Assembler::Zero, frame.addressOfCalleeToken(),
4249 Imm32(CalleeToken_FunctionConstructing), &notConstructing);
4251 Register argvLen = R0.scratchReg();
4252 Register nformals = R1.scratchReg();
4253 masm.loadNumActualArgs(FramePointer, argvLen);
4255 // If argvLen < nformals, set argvlen := nformals.
4256 loadNumFormalArguments(nformals);
4257 masm.cmp32Move32(Assembler::Below, argvLen, nformals, nformals, argvLen);
4259 BaseValueIndex newTarget(FramePointer, argvLen,
4260 JitFrameLayout::offsetOfActualArgs());
4261 masm.loadValue(newTarget, R0);
4262 masm.jump(&done);
4264 // else push(undefined)
4265 masm.bind(&notConstructing);
4266 masm.moveValue(UndefinedValue(), R0);
4268 masm.bind(&done);
4269 frame.push(R0);
4270 return true;
4273 template <typename Handler>
4274 bool BaselineCodeGen<Handler>::emit_ThrowSetConst() {
4275 prepareVMCall();
4276 pushArg(Imm32(JSMSG_BAD_CONST_ASSIGN));
4278 using Fn = bool (*)(JSContext*, unsigned);
4279 return callVM<Fn, jit::ThrowRuntimeLexicalError>();
4282 template <typename Handler>
4283 bool BaselineCodeGen<Handler>::emitUninitializedLexicalCheck(
4284 const ValueOperand& val) {
4285 Label done;
4286 masm.branchTestMagicValue(Assembler::NotEqual, val, JS_UNINITIALIZED_LEXICAL,
4287 &done);
4289 prepareVMCall();
4290 pushArg(Imm32(JSMSG_UNINITIALIZED_LEXICAL));
4292 using Fn = bool (*)(JSContext*, unsigned);
4293 if (!callVM<Fn, jit::ThrowRuntimeLexicalError>()) {
4294 return false;
4297 masm.bind(&done);
4298 return true;
4301 template <typename Handler>
4302 bool BaselineCodeGen<Handler>::emit_CheckLexical() {
4303 frame.syncStack(0);
4304 masm.loadValue(frame.addressOfStackValue(-1), R0);
4305 return emitUninitializedLexicalCheck(R0);
4308 template <typename Handler>
4309 bool BaselineCodeGen<Handler>::emit_CheckAliasedLexical() {
4310 return emit_CheckLexical();
4313 template <typename Handler>
4314 bool BaselineCodeGen<Handler>::emit_InitLexical() {
4315 return emit_SetLocal();
4318 template <typename Handler>
4319 bool BaselineCodeGen<Handler>::emit_InitGLexical() {
4320 frame.popRegsAndSync(1);
4321 pushGlobalLexicalEnvironmentValue(R1);
4322 frame.push(R0);
4323 return emit_SetProp();
4326 template <typename Handler>
4327 bool BaselineCodeGen<Handler>::emit_InitAliasedLexical() {
4328 return emit_SetAliasedVar();
4331 template <typename Handler>
4332 bool BaselineCodeGen<Handler>::emit_Uninitialized() {
4333 frame.push(MagicValue(JS_UNINITIALIZED_LEXICAL));
4334 return true;
4337 template <>
4338 bool BaselineCompilerCodeGen::emitCall(JSOp op) {
4339 MOZ_ASSERT(IsInvokeOp(op));
4341 frame.syncStack(0);
4343 uint32_t argc = GET_ARGC(handler.pc());
4344 masm.move32(Imm32(argc), R0.scratchReg());
4346 // Call IC
4347 if (!emitNextIC()) {
4348 return false;
4351 // Update FrameInfo.
4352 bool construct = IsConstructOp(op);
4353 frame.popn(2 + argc + construct);
4354 frame.push(R0);
4355 return true;
4358 template <>
4359 bool BaselineInterpreterCodeGen::emitCall(JSOp op) {
4360 MOZ_ASSERT(IsInvokeOp(op));
4362 // The IC expects argc in R0.
4363 LoadUint16Operand(masm, R0.scratchReg());
4364 if (!emitNextIC()) {
4365 return false;
4368 // Pop the arguments. We have to reload pc/argc because the IC clobbers them.
4369 // The return value is in R0 so we can't use that.
4370 Register scratch = R1.scratchReg();
4371 uint32_t extraValuesToPop = IsConstructOp(op) ? 3 : 2;
4372 Register spReg = AsRegister(masm.getStackPointer());
4373 LoadUint16Operand(masm, scratch);
4374 masm.computeEffectiveAddress(
4375 BaseValueIndex(spReg, scratch, extraValuesToPop * sizeof(Value)), spReg);
4376 frame.push(R0);
4377 return true;
4380 template <typename Handler>
4381 bool BaselineCodeGen<Handler>::emitSpreadCall(JSOp op) {
4382 MOZ_ASSERT(IsInvokeOp(op));
4384 frame.syncStack(0);
4385 masm.move32(Imm32(1), R0.scratchReg());
4387 // Call IC
4388 if (!emitNextIC()) {
4389 return false;
4392 // Update FrameInfo.
4393 bool construct = op == JSOp::SpreadNew || op == JSOp::SpreadSuperCall;
4394 frame.popn(3 + construct);
4395 frame.push(R0);
4396 return true;
4399 template <typename Handler>
4400 bool BaselineCodeGen<Handler>::emit_Call() {
4401 return emitCall(JSOp::Call);
4404 template <typename Handler>
4405 bool BaselineCodeGen<Handler>::emit_CallContent() {
4406 return emitCall(JSOp::CallContent);
4409 template <typename Handler>
4410 bool BaselineCodeGen<Handler>::emit_CallIgnoresRv() {
4411 return emitCall(JSOp::CallIgnoresRv);
4414 template <typename Handler>
4415 bool BaselineCodeGen<Handler>::emit_CallIter() {
4416 return emitCall(JSOp::CallIter);
4419 template <typename Handler>
4420 bool BaselineCodeGen<Handler>::emit_CallContentIter() {
4421 return emitCall(JSOp::CallContentIter);
4424 template <typename Handler>
4425 bool BaselineCodeGen<Handler>::emit_New() {
4426 return emitCall(JSOp::New);
4429 template <typename Handler>
4430 bool BaselineCodeGen<Handler>::emit_NewContent() {
4431 return emitCall(JSOp::NewContent);
4434 template <typename Handler>
4435 bool BaselineCodeGen<Handler>::emit_SuperCall() {
4436 return emitCall(JSOp::SuperCall);
4439 template <typename Handler>
4440 bool BaselineCodeGen<Handler>::emit_Eval() {
4441 return emitCall(JSOp::Eval);
4444 template <typename Handler>
4445 bool BaselineCodeGen<Handler>::emit_StrictEval() {
4446 return emitCall(JSOp::StrictEval);
4449 template <typename Handler>
4450 bool BaselineCodeGen<Handler>::emit_SpreadCall() {
4451 return emitSpreadCall(JSOp::SpreadCall);
4454 template <typename Handler>
4455 bool BaselineCodeGen<Handler>::emit_SpreadNew() {
4456 return emitSpreadCall(JSOp::SpreadNew);
4459 template <typename Handler>
4460 bool BaselineCodeGen<Handler>::emit_SpreadSuperCall() {
4461 return emitSpreadCall(JSOp::SpreadSuperCall);
4464 template <typename Handler>
4465 bool BaselineCodeGen<Handler>::emit_SpreadEval() {
4466 return emitSpreadCall(JSOp::SpreadEval);
4469 template <typename Handler>
4470 bool BaselineCodeGen<Handler>::emit_StrictSpreadEval() {
4471 return emitSpreadCall(JSOp::StrictSpreadEval);
4474 template <typename Handler>
4475 bool BaselineCodeGen<Handler>::emit_OptimizeSpreadCall() {
4476 frame.popRegsAndSync(1);
4478 if (!emitNextIC()) {
4479 return false;
4482 frame.push(R0);
4483 return true;
4486 template <typename Handler>
4487 bool BaselineCodeGen<Handler>::emit_ImplicitThis() {
4488 frame.syncStack(0);
4489 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
4491 prepareVMCall();
4493 pushScriptNameArg(R1.scratchReg(), R2.scratchReg());
4494 pushArg(R0.scratchReg());
4496 using Fn = bool (*)(JSContext*, HandleObject, Handle<PropertyName*>,
4497 MutableHandleValue);
4498 if (!callVM<Fn, ImplicitThisOperation>()) {
4499 return false;
4502 frame.push(R0);
4503 return true;
4506 template <typename Handler>
4507 bool BaselineCodeGen<Handler>::emit_Instanceof() {
4508 frame.popRegsAndSync(2);
4510 if (!emitNextIC()) {
4511 return false;
4514 frame.push(R0, JSVAL_TYPE_BOOLEAN);
4515 return true;
4518 template <typename Handler>
4519 bool BaselineCodeGen<Handler>::emit_Typeof() {
4520 frame.popRegsAndSync(1);
4522 if (!emitNextIC()) {
4523 return false;
4526 frame.push(R0);
4527 return true;
4530 template <typename Handler>
4531 bool BaselineCodeGen<Handler>::emit_TypeofExpr() {
4532 return emit_Typeof();
4535 template <typename Handler>
4536 bool BaselineCodeGen<Handler>::emit_ThrowMsg() {
4537 prepareVMCall();
4538 pushUint8BytecodeOperandArg(R2.scratchReg());
4540 using Fn = bool (*)(JSContext*, const unsigned);
4541 return callVM<Fn, js::ThrowMsgOperation>();
4544 template <typename Handler>
4545 bool BaselineCodeGen<Handler>::emit_Throw() {
4546 // Keep value to throw in R0.
4547 frame.popRegsAndSync(1);
4549 prepareVMCall();
4550 pushArg(R0);
4552 using Fn = bool (*)(JSContext*, HandleValue);
4553 return callVM<Fn, js::ThrowOperation>();
4556 template <typename Handler>
4557 bool BaselineCodeGen<Handler>::emit_Try() {
4558 return true;
4561 template <typename Handler>
4562 bool BaselineCodeGen<Handler>::emit_Finally() {
4563 // To match the interpreter, emit an interrupt check at the start of the
4564 // finally block.
4565 return emitInterruptCheck();
4568 static void LoadBaselineScriptResumeEntries(MacroAssembler& masm,
4569 JSScript* script, Register dest,
4570 Register scratch) {
4571 MOZ_ASSERT(dest != scratch);
4573 masm.movePtr(ImmPtr(script->jitScript()), dest);
4574 masm.loadPtr(Address(dest, JitScript::offsetOfBaselineScript()), dest);
4575 masm.load32(Address(dest, BaselineScript::offsetOfResumeEntriesOffset()),
4576 scratch);
4577 masm.addPtr(scratch, dest);
4580 template <typename Handler>
4581 void BaselineCodeGen<Handler>::emitInterpJumpToResumeEntry(Register script,
4582 Register resumeIndex,
4583 Register scratch) {
4584 // Load JSScript::immutableScriptData() into |script|.
4585 masm.loadPtr(Address(script, JSScript::offsetOfSharedData()), script);
4586 masm.loadPtr(Address(script, SharedImmutableScriptData::offsetOfISD()),
4587 script);
4589 // Load the resume pcOffset in |resumeIndex|.
4590 masm.load32(
4591 Address(script, ImmutableScriptData::offsetOfResumeOffsetsOffset()),
4592 scratch);
4593 masm.computeEffectiveAddress(BaseIndex(scratch, resumeIndex, TimesFour),
4594 scratch);
4595 masm.load32(BaseIndex(script, scratch, TimesOne), resumeIndex);
4597 // Add resume offset to PC, jump to it.
4598 masm.computeEffectiveAddress(BaseIndex(script, resumeIndex, TimesOne,
4599 ImmutableScriptData::offsetOfCode()),
4600 script);
4601 Address pcAddr(FramePointer, BaselineFrame::reverseOffsetOfInterpreterPC());
4602 masm.storePtr(script, pcAddr);
4603 emitJumpToInterpretOpLabel();
4606 template <>
4607 void BaselineCompilerCodeGen::jumpToResumeEntry(Register resumeIndex,
4608 Register scratch1,
4609 Register scratch2) {
4610 LoadBaselineScriptResumeEntries(masm, handler.script(), scratch1, scratch2);
4611 masm.loadPtr(
4612 BaseIndex(scratch1, resumeIndex, ScaleFromElemWidth(sizeof(uintptr_t))),
4613 scratch1);
4614 masm.jump(scratch1);
4617 template <>
4618 void BaselineInterpreterCodeGen::jumpToResumeEntry(Register resumeIndex,
4619 Register scratch1,
4620 Register scratch2) {
4621 loadScript(scratch1);
4622 emitInterpJumpToResumeEntry(scratch1, resumeIndex, scratch2);
4625 template <>
4626 template <typename F1, typename F2>
4627 [[nodiscard]] bool BaselineCompilerCodeGen::emitDebugInstrumentation(
4628 const F1& ifDebuggee, const Maybe<F2>& ifNotDebuggee) {
4629 // The JIT calls either ifDebuggee or (if present) ifNotDebuggee, because it
4630 // knows statically whether we're compiling with debug instrumentation.
4632 if (handler.compileDebugInstrumentation()) {
4633 return ifDebuggee();
4636 if (ifNotDebuggee) {
4637 return (*ifNotDebuggee)();
4640 return true;
4643 template <>
4644 template <typename F1, typename F2>
4645 [[nodiscard]] bool BaselineInterpreterCodeGen::emitDebugInstrumentation(
4646 const F1& ifDebuggee, const Maybe<F2>& ifNotDebuggee) {
4647 // The interpreter emits both ifDebuggee and (if present) ifNotDebuggee
4648 // paths, with a toggled jump followed by a branch on the frame's DEBUGGEE
4649 // flag.
4651 Label isNotDebuggee, done;
4653 CodeOffset toggleOffset = masm.toggledJump(&isNotDebuggee);
4654 if (!handler.addDebugInstrumentationOffset(cx, toggleOffset)) {
4655 return false;
4658 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
4659 Imm32(BaselineFrame::DEBUGGEE), &isNotDebuggee);
4661 if (!ifDebuggee()) {
4662 return false;
4665 if (ifNotDebuggee) {
4666 masm.jump(&done);
4669 masm.bind(&isNotDebuggee);
4671 if (ifNotDebuggee && !(*ifNotDebuggee)()) {
4672 return false;
4675 masm.bind(&done);
4676 return true;
4679 template <typename Handler>
4680 bool BaselineCodeGen<Handler>::emit_PushLexicalEnv() {
4681 // Call a stub to push the block on the block chain.
4682 prepareVMCall();
4683 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4685 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
4686 R2.scratchReg());
4687 pushArg(R0.scratchReg());
4689 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<LexicalScope*>);
4690 return callVM<Fn, jit::PushLexicalEnv>();
4693 template <typename Handler>
4694 bool BaselineCodeGen<Handler>::emit_PushClassBodyEnv() {
4695 prepareVMCall();
4696 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4698 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
4699 R2.scratchReg());
4700 pushArg(R0.scratchReg());
4702 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<ClassBodyScope*>);
4703 return callVM<Fn, jit::PushClassBodyEnv>();
4706 template <typename Handler>
4707 bool BaselineCodeGen<Handler>::emit_PopLexicalEnv() {
4708 frame.syncStack(0);
4710 Register scratch1 = R0.scratchReg();
4712 auto ifDebuggee = [this, scratch1]() {
4713 masm.loadBaselineFramePtr(FramePointer, scratch1);
4715 prepareVMCall();
4716 pushBytecodePCArg();
4717 pushArg(scratch1);
4719 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4720 return callVM<Fn, jit::DebugLeaveThenPopLexicalEnv>();
4722 auto ifNotDebuggee = [this, scratch1]() {
4723 Register scratch2 = R1.scratchReg();
4724 masm.loadPtr(frame.addressOfEnvironmentChain(), scratch1);
4725 masm.debugAssertObjectHasClass(scratch1, scratch2,
4726 &LexicalEnvironmentObject::class_);
4727 Address enclosingAddr(scratch1,
4728 EnvironmentObject::offsetOfEnclosingEnvironment());
4729 masm.unboxObject(enclosingAddr, scratch1);
4730 masm.storePtr(scratch1, frame.addressOfEnvironmentChain());
4731 return true;
4733 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee));
4736 template <typename Handler>
4737 bool BaselineCodeGen<Handler>::emit_FreshenLexicalEnv() {
4738 frame.syncStack(0);
4740 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4742 auto ifDebuggee = [this]() {
4743 prepareVMCall();
4744 pushBytecodePCArg();
4745 pushArg(R0.scratchReg());
4747 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4748 return callVM<Fn, jit::DebugLeaveThenFreshenLexicalEnv>();
4750 auto ifNotDebuggee = [this]() {
4751 prepareVMCall();
4752 pushArg(R0.scratchReg());
4754 using Fn = bool (*)(JSContext*, BaselineFrame*);
4755 return callVM<Fn, jit::FreshenLexicalEnv>();
4757 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee));
4760 template <typename Handler>
4761 bool BaselineCodeGen<Handler>::emit_RecreateLexicalEnv() {
4762 frame.syncStack(0);
4764 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4766 auto ifDebuggee = [this]() {
4767 prepareVMCall();
4768 pushBytecodePCArg();
4769 pushArg(R0.scratchReg());
4771 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4772 return callVM<Fn, jit::DebugLeaveThenRecreateLexicalEnv>();
4774 auto ifNotDebuggee = [this]() {
4775 prepareVMCall();
4776 pushArg(R0.scratchReg());
4778 using Fn = bool (*)(JSContext*, BaselineFrame*);
4779 return callVM<Fn, jit::RecreateLexicalEnv>();
4781 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee));
4784 template <typename Handler>
4785 bool BaselineCodeGen<Handler>::emit_DebugLeaveLexicalEnv() {
4786 auto ifDebuggee = [this]() {
4787 prepareVMCall();
4788 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4789 pushBytecodePCArg();
4790 pushArg(R0.scratchReg());
4792 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4793 return callVM<Fn, jit::DebugLeaveLexicalEnv>();
4795 return emitDebugInstrumentation(ifDebuggee);
4798 template <typename Handler>
4799 bool BaselineCodeGen<Handler>::emit_PushVarEnv() {
4800 prepareVMCall();
4801 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4802 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
4803 R2.scratchReg());
4804 pushArg(R0.scratchReg());
4806 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<Scope*>);
4807 return callVM<Fn, jit::PushVarEnv>();
4810 template <typename Handler>
4811 bool BaselineCodeGen<Handler>::emit_EnterWith() {
4812 // Pop "with" object to R0.
4813 frame.popRegsAndSync(1);
4815 // Call a stub to push the object onto the environment chain.
4816 prepareVMCall();
4818 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
4819 R2.scratchReg());
4820 pushArg(R0);
4821 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg());
4822 pushArg(R1.scratchReg());
4824 using Fn =
4825 bool (*)(JSContext*, BaselineFrame*, HandleValue, Handle<WithScope*>);
4826 return callVM<Fn, jit::EnterWith>();
4829 template <typename Handler>
4830 bool BaselineCodeGen<Handler>::emit_LeaveWith() {
4831 // Call a stub to pop the with object from the environment chain.
4832 prepareVMCall();
4834 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4835 pushArg(R0.scratchReg());
4837 using Fn = bool (*)(JSContext*, BaselineFrame*);
4838 return callVM<Fn, jit::LeaveWith>();
4841 template <typename Handler>
4842 bool BaselineCodeGen<Handler>::emit_Exception() {
4843 prepareVMCall();
4845 using Fn = bool (*)(JSContext*, MutableHandleValue);
4846 if (!callVM<Fn, GetAndClearException>()) {
4847 return false;
4850 frame.push(R0);
4851 return true;
4854 template <typename Handler>
4855 bool BaselineCodeGen<Handler>::emit_Debugger() {
4856 prepareVMCall();
4858 frame.assertSyncedStack();
4859 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4860 pushArg(R0.scratchReg());
4862 using Fn = bool (*)(JSContext*, BaselineFrame*);
4863 if (!callVM<Fn, jit::OnDebuggerStatement>()) {
4864 return false;
4867 return true;
4870 template <typename Handler>
4871 bool BaselineCodeGen<Handler>::emitDebugEpilogue() {
4872 auto ifDebuggee = [this]() {
4873 // Move return value into the frame's rval slot.
4874 masm.storeValue(JSReturnOperand, frame.addressOfReturnValue());
4875 masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags());
4877 // Load BaselineFrame pointer in R0.
4878 frame.syncStack(0);
4879 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
4881 prepareVMCall();
4882 pushBytecodePCArg();
4883 pushArg(R0.scratchReg());
4885 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugEpilogue;
4887 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*);
4888 if (!callVM<Fn, jit::DebugEpilogueOnBaselineReturn>(kind)) {
4889 return false;
4892 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
4893 return true;
4895 return emitDebugInstrumentation(ifDebuggee);
4898 template <typename Handler>
4899 bool BaselineCodeGen<Handler>::emitReturn() {
4900 if (handler.shouldEmitDebugEpilogueAtReturnOp()) {
4901 if (!emitDebugEpilogue()) {
4902 return false;
4906 // Only emit the jump if this JSOp::RetRval is not the last instruction.
4907 // Not needed for last instruction, because last instruction flows
4908 // into return label.
4909 if (!handler.isDefinitelyLastOp()) {
4910 masm.jump(&return_);
4913 return true;
4916 template <typename Handler>
4917 bool BaselineCodeGen<Handler>::emit_Return() {
4918 frame.assertStackDepth(1);
4920 frame.popValue(JSReturnOperand);
4921 return emitReturn();
4924 template <typename Handler>
4925 void BaselineCodeGen<Handler>::emitLoadReturnValue(ValueOperand val) {
4926 Label done, noRval;
4927 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
4928 Imm32(BaselineFrame::HAS_RVAL), &noRval);
4929 masm.loadValue(frame.addressOfReturnValue(), val);
4930 masm.jump(&done);
4932 masm.bind(&noRval);
4933 masm.moveValue(UndefinedValue(), val);
4935 masm.bind(&done);
4938 template <typename Handler>
4939 bool BaselineCodeGen<Handler>::emit_RetRval() {
4940 frame.assertStackDepth(0);
4942 masm.moveValue(UndefinedValue(), JSReturnOperand);
4944 if (!handler.maybeScript() || !handler.maybeScript()->noScriptRval()) {
4945 // Return the value in the return value slot, if any.
4946 Label done;
4947 Address flags = frame.addressOfFlags();
4948 masm.branchTest32(Assembler::Zero, flags, Imm32(BaselineFrame::HAS_RVAL),
4949 &done);
4950 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
4951 masm.bind(&done);
4954 return emitReturn();
4957 template <typename Handler>
4958 bool BaselineCodeGen<Handler>::emit_ToPropertyKey() {
4959 frame.popRegsAndSync(1);
4961 if (!emitNextIC()) {
4962 return false;
4965 frame.push(R0);
4966 return true;
4969 template <typename Handler>
4970 bool BaselineCodeGen<Handler>::emit_ToAsyncIter() {
4971 frame.syncStack(0);
4972 masm.unboxObject(frame.addressOfStackValue(-2), R0.scratchReg());
4973 masm.loadValue(frame.addressOfStackValue(-1), R1);
4975 prepareVMCall();
4976 pushArg(R1);
4977 pushArg(R0.scratchReg());
4979 using Fn = JSObject* (*)(JSContext*, HandleObject, HandleValue);
4980 if (!callVM<Fn, js::CreateAsyncFromSyncIterator>()) {
4981 return false;
4984 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
4985 frame.popn(2);
4986 frame.push(R0);
4987 return true;
4990 template <typename Handler>
4991 bool BaselineCodeGen<Handler>::emit_CanSkipAwait() {
4992 frame.syncStack(0);
4993 masm.loadValue(frame.addressOfStackValue(-1), R0);
4995 prepareVMCall();
4996 pushArg(R0);
4998 using Fn = bool (*)(JSContext*, HandleValue, bool* canSkip);
4999 if (!callVM<Fn, js::CanSkipAwait>()) {
5000 return false;
5003 masm.tagValue(JSVAL_TYPE_BOOLEAN, ReturnReg, R0);
5004 frame.push(R0, JSVAL_TYPE_BOOLEAN);
5005 return true;
5008 template <typename Handler>
5009 bool BaselineCodeGen<Handler>::emit_MaybeExtractAwaitValue() {
5010 frame.syncStack(0);
5011 masm.loadValue(frame.addressOfStackValue(-2), R0);
5013 masm.unboxBoolean(frame.addressOfStackValue(-1), R1.scratchReg());
5015 Label cantExtract;
5016 masm.branchIfFalseBool(R1.scratchReg(), &cantExtract);
5018 prepareVMCall();
5019 pushArg(R0);
5021 using Fn = bool (*)(JSContext*, HandleValue, MutableHandleValue);
5022 if (!callVM<Fn, js::ExtractAwaitValue>()) {
5023 return false;
5026 masm.storeValue(R0, frame.addressOfStackValue(-2));
5027 masm.bind(&cantExtract);
5029 return true;
5032 template <typename Handler>
5033 bool BaselineCodeGen<Handler>::emit_AsyncAwait() {
5034 frame.syncStack(0);
5035 masm.loadValue(frame.addressOfStackValue(-2), R1);
5036 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
5038 prepareVMCall();
5039 pushArg(R1);
5040 pushArg(R0.scratchReg());
5042 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>,
5043 HandleValue);
5044 if (!callVM<Fn, js::AsyncFunctionAwait>()) {
5045 return false;
5048 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5049 frame.popn(2);
5050 frame.push(R0);
5051 return true;
5054 template <typename Handler>
5055 bool BaselineCodeGen<Handler>::emit_AsyncResolve() {
5056 frame.syncStack(0);
5057 masm.loadValue(frame.addressOfStackValue(-2), R1);
5058 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
5060 prepareVMCall();
5061 pushUint8BytecodeOperandArg(R2.scratchReg());
5062 pushArg(R1);
5063 pushArg(R0.scratchReg());
5065 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>,
5066 HandleValue, AsyncFunctionResolveKind);
5067 if (!callVM<Fn, js::AsyncFunctionResolve>()) {
5068 return false;
5071 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5072 frame.popn(2);
5073 frame.push(R0);
5074 return true;
5077 template <typename Handler>
5078 bool BaselineCodeGen<Handler>::emit_CheckObjCoercible() {
5079 frame.syncStack(0);
5080 masm.loadValue(frame.addressOfStackValue(-1), R0);
5082 Label fail, done;
5084 masm.branchTestUndefined(Assembler::Equal, R0, &fail);
5085 masm.branchTestNull(Assembler::NotEqual, R0, &done);
5087 masm.bind(&fail);
5088 prepareVMCall();
5090 pushArg(R0);
5092 using Fn = bool (*)(JSContext*, HandleValue);
5093 if (!callVM<Fn, ThrowObjectCoercible>()) {
5094 return false;
5097 masm.bind(&done);
5098 return true;
5101 template <typename Handler>
5102 bool BaselineCodeGen<Handler>::emit_ToString() {
5103 // Keep top stack value in R0.
5104 frame.popRegsAndSync(1);
5106 // Inline path for string.
5107 Label done;
5108 masm.branchTestString(Assembler::Equal, R0, &done);
5110 prepareVMCall();
5112 pushArg(R0);
5114 // Call ToStringSlow which doesn't handle string inputs.
5115 using Fn = JSString* (*)(JSContext*, HandleValue);
5116 if (!callVM<Fn, ToStringSlow<CanGC>>()) {
5117 return false;
5120 masm.tagValue(JSVAL_TYPE_STRING, ReturnReg, R0);
5122 masm.bind(&done);
5123 frame.push(R0);
5124 return true;
5127 static constexpr uint32_t TableSwitchOpLowOffset = 1 * JUMP_OFFSET_LEN;
5128 static constexpr uint32_t TableSwitchOpHighOffset = 2 * JUMP_OFFSET_LEN;
5129 static constexpr uint32_t TableSwitchOpFirstResumeIndexOffset =
5130 3 * JUMP_OFFSET_LEN;
5132 template <>
5133 void BaselineCompilerCodeGen::emitGetTableSwitchIndex(ValueOperand val,
5134 Register dest,
5135 Register scratch1,
5136 Register scratch2) {
5137 jsbytecode* pc = handler.pc();
5138 jsbytecode* defaultpc = pc + GET_JUMP_OFFSET(pc);
5139 Label* defaultLabel = handler.labelOf(defaultpc);
5141 int32_t low = GET_JUMP_OFFSET(pc + TableSwitchOpLowOffset);
5142 int32_t high = GET_JUMP_OFFSET(pc + TableSwitchOpHighOffset);
5143 int32_t length = high - low + 1;
5145 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5146 // all cases are int32).
5147 masm.branchTestInt32(Assembler::NotEqual, val, defaultLabel);
5148 masm.unboxInt32(val, dest);
5150 // Subtract 'low'. Bounds check.
5151 if (low != 0) {
5152 masm.sub32(Imm32(low), dest);
5154 masm.branch32(Assembler::AboveOrEqual, dest, Imm32(length), defaultLabel);
5157 template <>
5158 void BaselineInterpreterCodeGen::emitGetTableSwitchIndex(ValueOperand val,
5159 Register dest,
5160 Register scratch1,
5161 Register scratch2) {
5162 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5163 // all cases are int32).
5164 Label done, jumpToDefault;
5165 masm.branchTestInt32(Assembler::NotEqual, val, &jumpToDefault);
5166 masm.unboxInt32(val, dest);
5168 Register pcReg = LoadBytecodePC(masm, scratch1);
5169 Address lowAddr(pcReg, sizeof(jsbytecode) + TableSwitchOpLowOffset);
5170 Address highAddr(pcReg, sizeof(jsbytecode) + TableSwitchOpHighOffset);
5172 // Jump to default if val > high.
5173 masm.branch32(Assembler::LessThan, highAddr, dest, &jumpToDefault);
5175 // Jump to default if val < low.
5176 masm.load32(lowAddr, scratch2);
5177 masm.branch32(Assembler::GreaterThan, scratch2, dest, &jumpToDefault);
5179 // index := val - low.
5180 masm.sub32(scratch2, dest);
5181 masm.jump(&done);
5183 masm.bind(&jumpToDefault);
5184 emitJump();
5186 masm.bind(&done);
5189 template <>
5190 void BaselineCompilerCodeGen::emitTableSwitchJump(Register key,
5191 Register scratch1,
5192 Register scratch2) {
5193 // Jump to resumeEntries[firstResumeIndex + key].
5195 // Note: BytecodeEmitter::allocateResumeIndex static_asserts
5196 // |firstResumeIndex * sizeof(uintptr_t)| fits in int32_t.
5197 uint32_t firstResumeIndex =
5198 GET_RESUMEINDEX(handler.pc() + TableSwitchOpFirstResumeIndexOffset);
5199 LoadBaselineScriptResumeEntries(masm, handler.script(), scratch1, scratch2);
5200 masm.loadPtr(BaseIndex(scratch1, key, ScaleFromElemWidth(sizeof(uintptr_t)),
5201 firstResumeIndex * sizeof(uintptr_t)),
5202 scratch1);
5203 masm.jump(scratch1);
5206 template <>
5207 void BaselineInterpreterCodeGen::emitTableSwitchJump(Register key,
5208 Register scratch1,
5209 Register scratch2) {
5210 // Load the op's firstResumeIndex in scratch1.
5211 LoadUint24Operand(masm, TableSwitchOpFirstResumeIndexOffset, scratch1);
5213 masm.add32(key, scratch1);
5214 jumpToResumeEntry(scratch1, key, scratch2);
5217 template <typename Handler>
5218 bool BaselineCodeGen<Handler>::emit_TableSwitch() {
5219 frame.popRegsAndSync(1);
5221 Register key = R0.scratchReg();
5222 Register scratch1 = R1.scratchReg();
5223 Register scratch2 = R2.scratchReg();
5225 // Call a stub to convert R0 from double to int32 if needed.
5226 // Note: this stub may clobber scratch1.
5227 masm.call(cx->runtime()->jitRuntime()->getDoubleToInt32ValueStub());
5229 // Load the index in the jump table in |key|, or branch to default pc if not
5230 // int32 or out-of-range.
5231 emitGetTableSwitchIndex(R0, key, scratch1, scratch2);
5233 // Jump to the target pc.
5234 emitTableSwitchJump(key, scratch1, scratch2);
5235 return true;
5238 template <typename Handler>
5239 bool BaselineCodeGen<Handler>::emit_Iter() {
5240 frame.popRegsAndSync(1);
5242 if (!emitNextIC()) {
5243 return false;
5246 frame.push(R0);
5247 return true;
5250 template <typename Handler>
5251 bool BaselineCodeGen<Handler>::emit_MoreIter() {
5252 frame.syncStack(0);
5254 masm.unboxObject(frame.addressOfStackValue(-1), R1.scratchReg());
5256 masm.iteratorMore(R1.scratchReg(), R0, R2.scratchReg());
5257 frame.push(R0);
5258 return true;
5261 template <typename Handler>
5262 bool BaselineCodeGen<Handler>::emitIsMagicValue() {
5263 frame.syncStack(0);
5265 Label isMagic, done;
5266 masm.branchTestMagic(Assembler::Equal, frame.addressOfStackValue(-1),
5267 &isMagic);
5268 masm.moveValue(BooleanValue(false), R0);
5269 masm.jump(&done);
5271 masm.bind(&isMagic);
5272 masm.moveValue(BooleanValue(true), R0);
5274 masm.bind(&done);
5275 frame.push(R0, JSVAL_TYPE_BOOLEAN);
5276 return true;
5279 template <typename Handler>
5280 bool BaselineCodeGen<Handler>::emit_IsNoIter() {
5281 return emitIsMagicValue();
5284 template <typename Handler>
5285 bool BaselineCodeGen<Handler>::emit_EndIter() {
5286 // Pop iterator value.
5287 frame.pop();
5289 // Pop the iterator object to close in R0.
5290 frame.popRegsAndSync(1);
5292 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
5293 MOZ_ASSERT(!regs.has(FramePointer));
5294 if (HasInterpreterPCReg()) {
5295 regs.take(InterpreterPCReg);
5298 Register obj = R0.scratchReg();
5299 regs.take(obj);
5300 masm.unboxObject(R0, obj);
5302 Register temp1 = regs.takeAny();
5303 Register temp2 = regs.takeAny();
5304 Register temp3 = regs.takeAny();
5305 masm.iteratorClose(obj, temp1, temp2, temp3);
5306 return true;
5309 template <typename Handler>
5310 bool BaselineCodeGen<Handler>::emit_CloseIter() {
5311 frame.popRegsAndSync(1);
5313 Register iter = R0.scratchReg();
5314 masm.unboxObject(R0, iter);
5316 return emitNextIC();
5319 template <typename Handler>
5320 bool BaselineCodeGen<Handler>::emit_IsGenClosing() {
5321 return emitIsMagicValue();
5324 template <typename Handler>
5325 bool BaselineCodeGen<Handler>::emit_IsNullOrUndefined() {
5326 frame.syncStack(0);
5328 Label isNullOrUndefined, done;
5329 masm.branchTestNull(Assembler::Equal, frame.addressOfStackValue(-1),
5330 &isNullOrUndefined);
5331 masm.branchTestUndefined(Assembler::Equal, frame.addressOfStackValue(-1),
5332 &isNullOrUndefined);
5333 masm.moveValue(BooleanValue(false), R0);
5334 masm.jump(&done);
5336 masm.bind(&isNullOrUndefined);
5337 masm.moveValue(BooleanValue(true), R0);
5339 masm.bind(&done);
5340 frame.push(R0, JSVAL_TYPE_BOOLEAN);
5341 return true;
5344 template <typename Handler>
5345 bool BaselineCodeGen<Handler>::emit_GetRval() {
5346 frame.syncStack(0);
5348 emitLoadReturnValue(R0);
5350 frame.push(R0);
5351 return true;
5354 template <typename Handler>
5355 bool BaselineCodeGen<Handler>::emit_SetRval() {
5356 // Store to the frame's return value slot.
5357 frame.storeStackValue(-1, frame.addressOfReturnValue(), R2);
5358 masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags());
5359 frame.pop();
5360 return true;
5363 template <typename Handler>
5364 bool BaselineCodeGen<Handler>::emit_Callee() {
5365 MOZ_ASSERT_IF(handler.maybeScript(), handler.maybeScript()->function());
5366 frame.syncStack(0);
5367 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(),
5368 R0.scratchReg());
5369 masm.tagValue(JSVAL_TYPE_OBJECT, R0.scratchReg(), R0);
5370 frame.push(R0);
5371 return true;
5374 template <>
5375 bool BaselineCompilerCodeGen::emit_EnvCallee() {
5376 frame.syncStack(0);
5377 uint8_t numHops = GET_UINT8(handler.pc());
5378 Register scratch = R0.scratchReg();
5380 masm.loadPtr(frame.addressOfEnvironmentChain(), scratch);
5381 for (unsigned i = 0; i < numHops; i++) {
5382 Address nextAddr(scratch,
5383 EnvironmentObject::offsetOfEnclosingEnvironment());
5384 masm.unboxObject(nextAddr, scratch);
5387 masm.loadValue(Address(scratch, CallObject::offsetOfCallee()), R0);
5388 frame.push(R0);
5389 return true;
5392 template <>
5393 bool BaselineInterpreterCodeGen::emit_EnvCallee() {
5394 Register scratch = R0.scratchReg();
5395 Register env = R1.scratchReg();
5397 static_assert(JSOpLength_EnvCallee - sizeof(jsbytecode) == ENVCOORD_HOPS_LEN,
5398 "op must have uint8 operand for LoadAliasedVarEnv");
5400 // Load the right environment object.
5401 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
5402 LoadAliasedVarEnv(masm, env, scratch);
5404 masm.pushValue(Address(env, CallObject::offsetOfCallee()));
5405 return true;
5408 template <typename Handler>
5409 bool BaselineCodeGen<Handler>::emit_SuperBase() {
5410 frame.popRegsAndSync(1);
5412 Register scratch = R0.scratchReg();
5413 Register proto = R1.scratchReg();
5415 // Unbox callee.
5416 masm.unboxObject(R0, scratch);
5418 // Load [[HomeObject]]
5419 Address homeObjAddr(scratch,
5420 FunctionExtended::offsetOfMethodHomeObjectSlot());
5422 masm.assertFunctionIsExtended(scratch);
5423 #ifdef DEBUG
5424 Label isObject;
5425 masm.branchTestObject(Assembler::Equal, homeObjAddr, &isObject);
5426 masm.assumeUnreachable("[[HomeObject]] must be Object");
5427 masm.bind(&isObject);
5428 #endif
5429 masm.unboxObject(homeObjAddr, scratch);
5431 // Load prototype from [[HomeObject]]
5432 masm.loadObjProto(scratch, proto);
5434 #ifdef DEBUG
5435 // We won't encounter a lazy proto, because the prototype is guaranteed to
5436 // either be a JSFunction or a PlainObject, and only proxy objects can have a
5437 // lazy proto.
5438 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1);
5440 Label proxyCheckDone;
5441 masm.branchPtr(Assembler::NotEqual, proto, ImmWord(1), &proxyCheckDone);
5442 masm.assumeUnreachable("Unexpected lazy proto in JSOp::SuperBase");
5443 masm.bind(&proxyCheckDone);
5444 #endif
5446 Label nullProto, done;
5447 masm.branchPtr(Assembler::Equal, proto, ImmWord(0), &nullProto);
5449 // Box prototype and return
5450 masm.tagValue(JSVAL_TYPE_OBJECT, proto, R1);
5451 masm.jump(&done);
5453 masm.bind(&nullProto);
5454 masm.moveValue(NullValue(), R1);
5456 masm.bind(&done);
5457 frame.push(R1);
5458 return true;
5461 template <typename Handler>
5462 bool BaselineCodeGen<Handler>::emit_SuperFun() {
5463 frame.popRegsAndSync(1);
5465 Register callee = R0.scratchReg();
5466 Register proto = R1.scratchReg();
5467 #ifdef DEBUG
5468 Register scratch = R2.scratchReg();
5469 #endif
5471 // Unbox callee.
5472 masm.unboxObject(R0, callee);
5474 #ifdef DEBUG
5475 Label classCheckDone;
5476 masm.branchTestObjIsFunction(Assembler::Equal, callee, scratch, callee,
5477 &classCheckDone);
5478 masm.assumeUnreachable("Unexpected non-JSFunction callee in JSOp::SuperFun");
5479 masm.bind(&classCheckDone);
5480 #endif
5482 // Load prototype of callee
5483 masm.loadObjProto(callee, proto);
5485 #ifdef DEBUG
5486 // We won't encounter a lazy proto, because |callee| is guaranteed to be a
5487 // JSFunction and only proxy objects can have a lazy proto.
5488 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1);
5490 Label proxyCheckDone;
5491 masm.branchPtr(Assembler::NotEqual, proto, ImmWord(1), &proxyCheckDone);
5492 masm.assumeUnreachable("Unexpected lazy proto in JSOp::SuperFun");
5493 masm.bind(&proxyCheckDone);
5494 #endif
5496 Label nullProto, done;
5497 masm.branchPtr(Assembler::Equal, proto, ImmWord(0), &nullProto);
5499 // Box prototype and return
5500 masm.tagValue(JSVAL_TYPE_OBJECT, proto, R1);
5501 masm.jump(&done);
5503 masm.bind(&nullProto);
5504 masm.moveValue(NullValue(), R1);
5506 masm.bind(&done);
5507 frame.push(R1);
5508 return true;
5511 template <typename Handler>
5512 bool BaselineCodeGen<Handler>::emit_Arguments() {
5513 frame.syncStack(0);
5515 MOZ_ASSERT_IF(handler.maybeScript(), handler.maybeScript()->needsArgsObj());
5517 prepareVMCall();
5519 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
5520 pushArg(R0.scratchReg());
5522 using Fn = bool (*)(JSContext*, BaselineFrame*, MutableHandleValue);
5523 if (!callVM<Fn, jit::NewArgumentsObject>()) {
5524 return false;
5527 frame.push(R0);
5528 return true;
5531 template <typename Handler>
5532 bool BaselineCodeGen<Handler>::emit_Rest() {
5533 frame.syncStack(0);
5535 if (!emitNextIC()) {
5536 return false;
5539 // Mark R0 as pushed stack value.
5540 frame.push(R0);
5541 return true;
5544 template <typename Handler>
5545 bool BaselineCodeGen<Handler>::emit_Generator() {
5546 frame.assertStackDepth(0);
5548 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
5550 prepareVMCall();
5551 pushArg(R0.scratchReg());
5553 using Fn = JSObject* (*)(JSContext*, BaselineFrame*);
5554 if (!callVM<Fn, jit::CreateGeneratorFromFrame>()) {
5555 return false;
5558 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5559 frame.push(R0);
5560 return true;
5563 template <typename Handler>
5564 bool BaselineCodeGen<Handler>::emitSuspend(JSOp op) {
5565 MOZ_ASSERT(op == JSOp::InitialYield || op == JSOp::Yield ||
5566 op == JSOp::Await);
5568 // Load the generator object in R2, but leave the return value on the
5569 // expression stack.
5570 Register genObj = R2.scratchReg();
5571 if (op == JSOp::InitialYield) {
5572 // Generator and return value are one and the same.
5573 frame.syncStack(0);
5574 frame.assertStackDepth(1);
5575 masm.unboxObject(frame.addressOfStackValue(-1), genObj);
5576 } else {
5577 frame.popRegsAndSync(1);
5578 masm.unboxObject(R0, genObj);
5581 if (frame.hasKnownStackDepth(1) && !handler.canHaveFixedSlots()) {
5582 // If the expression stack is empty, we can inline the Yield. Note that this
5583 // branch is never taken for the interpreter because it doesn't know static
5584 // stack depths.
5585 MOZ_ASSERT_IF(op == JSOp::InitialYield && handler.maybePC(),
5586 GET_RESUMEINDEX(handler.maybePC()) == 0);
5587 Address resumeIndexSlot(genObj,
5588 AbstractGeneratorObject::offsetOfResumeIndexSlot());
5589 Register temp = R1.scratchReg();
5590 if (op == JSOp::InitialYield) {
5591 masm.storeValue(Int32Value(0), resumeIndexSlot);
5592 } else {
5593 jsbytecode* pc = handler.maybePC();
5594 MOZ_ASSERT(pc, "compiler-only code never has a null pc");
5595 masm.move32(Imm32(GET_RESUMEINDEX(pc)), temp);
5596 masm.storeValue(JSVAL_TYPE_INT32, temp, resumeIndexSlot);
5599 Register envObj = R0.scratchReg();
5600 Address envChainSlot(
5601 genObj, AbstractGeneratorObject::offsetOfEnvironmentChainSlot());
5602 masm.loadPtr(frame.addressOfEnvironmentChain(), envObj);
5603 masm.guardedCallPreBarrierAnyZone(envChainSlot, MIRType::Value, temp);
5604 masm.storeValue(JSVAL_TYPE_OBJECT, envObj, envChainSlot);
5606 Label skipBarrier;
5607 masm.branchPtrInNurseryChunk(Assembler::Equal, genObj, temp, &skipBarrier);
5608 masm.branchPtrInNurseryChunk(Assembler::NotEqual, envObj, temp,
5609 &skipBarrier);
5610 MOZ_ASSERT(genObj == R2.scratchReg());
5611 masm.call(&postBarrierSlot_);
5612 masm.bind(&skipBarrier);
5613 } else {
5614 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg());
5615 computeFrameSize(R0.scratchReg());
5617 prepareVMCall();
5618 pushBytecodePCArg();
5619 pushArg(R0.scratchReg());
5620 pushArg(R1.scratchReg());
5621 pushArg(genObj);
5623 using Fn = bool (*)(JSContext*, HandleObject, BaselineFrame*, uint32_t,
5624 const jsbytecode*);
5625 if (!callVM<Fn, jit::NormalSuspend>()) {
5626 return false;
5630 masm.loadValue(frame.addressOfStackValue(-1), JSReturnOperand);
5631 if (!emitReturn()) {
5632 return false;
5635 // Three values are pushed onto the stack when resuming the generator,
5636 // replacing the one slot that holds the return value.
5637 frame.incStackDepth(2);
5638 return true;
5641 template <typename Handler>
5642 bool BaselineCodeGen<Handler>::emit_InitialYield() {
5643 return emitSuspend(JSOp::InitialYield);
5646 template <typename Handler>
5647 bool BaselineCodeGen<Handler>::emit_Yield() {
5648 return emitSuspend(JSOp::Yield);
5651 template <typename Handler>
5652 bool BaselineCodeGen<Handler>::emit_Await() {
5653 return emitSuspend(JSOp::Await);
5656 template <>
5657 template <typename F>
5658 bool BaselineCompilerCodeGen::emitAfterYieldDebugInstrumentation(
5659 const F& ifDebuggee, Register) {
5660 if (handler.compileDebugInstrumentation()) {
5661 return ifDebuggee();
5663 return true;
5666 template <>
5667 template <typename F>
5668 bool BaselineInterpreterCodeGen::emitAfterYieldDebugInstrumentation(
5669 const F& ifDebuggee, Register scratch) {
5670 // Note that we can't use emitDebugInstrumentation here because the frame's
5671 // DEBUGGEE flag hasn't been initialized yet.
5673 // If the current Realm is not a debuggee we're done.
5674 Label done;
5675 CodeOffset toggleOffset = masm.toggledJump(&done);
5676 if (!handler.addDebugInstrumentationOffset(cx, toggleOffset)) {
5677 return false;
5679 masm.loadPtr(AbsoluteAddress(cx->addressOfRealm()), scratch);
5680 masm.branchTest32(Assembler::Zero,
5681 Address(scratch, Realm::offsetOfDebugModeBits()),
5682 Imm32(Realm::debugModeIsDebuggeeBit()), &done);
5684 if (!ifDebuggee()) {
5685 return false;
5688 masm.bind(&done);
5689 return true;
5692 template <typename Handler>
5693 bool BaselineCodeGen<Handler>::emit_AfterYield() {
5694 if (!emit_JumpTarget()) {
5695 return false;
5698 auto ifDebuggee = [this]() {
5699 frame.assertSyncedStack();
5700 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
5701 prepareVMCall();
5702 pushArg(R0.scratchReg());
5704 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugAfterYield;
5706 using Fn = bool (*)(JSContext*, BaselineFrame*);
5707 if (!callVM<Fn, jit::DebugAfterYield>(kind)) {
5708 return false;
5711 return true;
5713 return emitAfterYieldDebugInstrumentation(ifDebuggee, R0.scratchReg());
5716 template <typename Handler>
5717 bool BaselineCodeGen<Handler>::emit_FinalYieldRval() {
5718 // Store generator in R0.
5719 frame.popRegsAndSync(1);
5720 masm.unboxObject(R0, R0.scratchReg());
5722 prepareVMCall();
5723 pushBytecodePCArg();
5724 pushArg(R0.scratchReg());
5726 using Fn = bool (*)(JSContext*, HandleObject, const jsbytecode*);
5727 if (!callVM<Fn, jit::FinalSuspend>()) {
5728 return false;
5731 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
5732 return emitReturn();
5735 template <>
5736 void BaselineCompilerCodeGen::emitJumpToInterpretOpLabel() {
5737 TrampolinePtr code =
5738 cx->runtime()->jitRuntime()->baselineInterpreter().interpretOpAddr();
5739 masm.jump(code);
5742 template <>
5743 void BaselineInterpreterCodeGen::emitJumpToInterpretOpLabel() {
5744 masm.jump(handler.interpretOpLabel());
5747 template <typename Handler>
5748 bool BaselineCodeGen<Handler>::emitEnterGeneratorCode(Register script,
5749 Register resumeIndex,
5750 Register scratch) {
5751 // Resume in either the BaselineScript (if present) or Baseline Interpreter.
5753 static_assert(BaselineDisabledScript == 0x1,
5754 "Comparison below requires specific sentinel encoding");
5756 // Initialize the icScript slot in the baseline frame.
5757 masm.loadJitScript(script, scratch);
5758 masm.computeEffectiveAddress(Address(scratch, JitScript::offsetOfICScript()),
5759 scratch);
5760 Address icScriptAddr(FramePointer, BaselineFrame::reverseOffsetOfICScript());
5761 masm.storePtr(scratch, icScriptAddr);
5763 Label noBaselineScript;
5764 masm.loadJitScript(script, scratch);
5765 masm.loadPtr(Address(scratch, JitScript::offsetOfBaselineScript()), scratch);
5766 masm.branchPtr(Assembler::BelowOrEqual, scratch,
5767 ImmPtr(BaselineDisabledScriptPtr), &noBaselineScript);
5769 masm.load32(Address(scratch, BaselineScript::offsetOfResumeEntriesOffset()),
5770 script);
5771 masm.addPtr(scratch, script);
5772 masm.loadPtr(
5773 BaseIndex(script, resumeIndex, ScaleFromElemWidth(sizeof(uintptr_t))),
5774 scratch);
5775 masm.jump(scratch);
5777 masm.bind(&noBaselineScript);
5779 // Initialize interpreter frame fields.
5780 Address flagsAddr(FramePointer, BaselineFrame::reverseOffsetOfFlags());
5781 Address scriptAddr(FramePointer,
5782 BaselineFrame::reverseOffsetOfInterpreterScript());
5783 masm.or32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER), flagsAddr);
5784 masm.storePtr(script, scriptAddr);
5786 // Initialize pc and jump to it.
5787 emitInterpJumpToResumeEntry(script, resumeIndex, scratch);
5788 return true;
5791 template <typename Handler>
5792 bool BaselineCodeGen<Handler>::emit_Resume() {
5793 frame.syncStack(0);
5794 masm.assertStackAlignment(sizeof(Value), 0);
5796 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
5797 MOZ_ASSERT(!regs.has(FramePointer));
5798 if (HasInterpreterPCReg()) {
5799 regs.take(InterpreterPCReg);
5802 saveInterpreterPCReg();
5804 // Load generator object.
5805 Register genObj = regs.takeAny();
5806 masm.unboxObject(frame.addressOfStackValue(-3), genObj);
5808 // Load callee.
5809 Register callee = regs.takeAny();
5810 masm.unboxObject(
5811 Address(genObj, AbstractGeneratorObject::offsetOfCalleeSlot()), callee);
5813 // Save a pointer to the JSOp::Resume operand stack Values.
5814 Register callerStackPtr = regs.takeAny();
5815 masm.computeEffectiveAddress(frame.addressOfStackValue(-1), callerStackPtr);
5817 // Branch to |interpret| to resume the generator in the C++ interpreter if the
5818 // script does not have a JitScript.
5819 Label interpret;
5820 Register scratch1 = regs.takeAny();
5821 masm.loadPrivate(Address(callee, JSFunction::offsetOfJitInfoOrScript()),
5822 scratch1);
5823 masm.branchIfScriptHasNoJitScript(scratch1, &interpret);
5825 // Push |undefined| for all formals.
5826 Register scratch2 = regs.takeAny();
5827 Label loop, loopDone;
5828 masm.loadFunctionArgCount(callee, scratch2);
5830 static_assert(sizeof(Value) == 8);
5831 static_assert(JitStackAlignment == 16 || JitStackAlignment == 8);
5832 // If JitStackValueAlignment == 1, then we were already correctly aligned on
5833 // entry, as guaranteed by the assertStackAlignment at the entry to this
5834 // function.
5835 if (JitStackValueAlignment > 1) {
5836 Register alignment = regs.takeAny();
5837 masm.moveStackPtrTo(alignment);
5838 masm.alignJitStackBasedOnNArgs(scratch2, false);
5840 // Compute alignment adjustment.
5841 masm.subStackPtrFrom(alignment);
5843 // Some code, like BaselineFrame::trace, will inspect the whole range of
5844 // the stack frame. In order to ensure that garbage data left behind from
5845 // previous activations doesn't confuse other machinery, we zero out the
5846 // alignment bytes.
5847 Label alignmentZero;
5848 masm.branchPtr(Assembler::Equal, alignment, ImmWord(0), &alignmentZero);
5850 // Since we know prior to the stack alignment that the stack was 8 byte
5851 // aligned, and JitStackAlignment is 8 or 16 bytes, if we are doing an
5852 // alignment then we -must- have aligned by subtracting 8 bytes from
5853 // the stack pointer.
5855 // So we can freely store a valid double here.
5856 masm.storeValue(DoubleValue(0), Address(masm.getStackPointer(), 0));
5857 masm.bind(&alignmentZero);
5860 masm.branchTest32(Assembler::Zero, scratch2, scratch2, &loopDone);
5861 masm.bind(&loop);
5863 masm.pushValue(UndefinedValue());
5864 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch2, &loop);
5866 masm.bind(&loopDone);
5868 // Push |undefined| for |this|.
5869 masm.pushValue(UndefinedValue());
5871 #ifdef DEBUG
5872 // Update BaselineFrame debugFrameSize field.
5873 masm.mov(FramePointer, scratch2);
5874 masm.subStackPtrFrom(scratch2);
5875 masm.store32(scratch2, frame.addressOfDebugFrameSize());
5876 #endif
5878 masm.PushCalleeToken(callee, /* constructing = */ false);
5879 masm.pushFrameDescriptorForJitCall(FrameType::BaselineJS, /* argc = */ 0);
5881 // PushCalleeToken bumped framePushed. Reset it.
5882 MOZ_ASSERT(masm.framePushed() == sizeof(uintptr_t));
5883 masm.setFramePushed(0);
5885 regs.add(callee);
5887 // Push a fake return address on the stack. We will resume here when the
5888 // generator returns.
5889 Label genStart, returnTarget;
5890 #ifdef JS_USE_LINK_REGISTER
5891 masm.call(&genStart);
5892 #else
5893 masm.callAndPushReturnAddress(&genStart);
5894 #endif
5896 // Record the return address so the return offset -> pc mapping works.
5897 if (!handler.recordCallRetAddr(cx, RetAddrEntry::Kind::IC,
5898 masm.currentOffset())) {
5899 return false;
5902 masm.jump(&returnTarget);
5903 masm.bind(&genStart);
5904 #ifdef JS_USE_LINK_REGISTER
5905 masm.pushReturnAddress();
5906 #endif
5908 // Construct BaselineFrame.
5909 masm.push(FramePointer);
5910 masm.moveStackPtrTo(FramePointer);
5912 // If profiler instrumentation is on, update lastProfilingFrame on
5913 // current JitActivation
5915 Register scratchReg = scratch2;
5916 Label skip;
5917 AbsoluteAddress addressOfEnabled(
5918 cx->runtime()->geckoProfiler().addressOfEnabled());
5919 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skip);
5920 masm.loadJSContext(scratchReg);
5921 masm.loadPtr(Address(scratchReg, JSContext::offsetOfProfilingActivation()),
5922 scratchReg);
5923 masm.storePtr(
5924 FramePointer,
5925 Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()));
5926 masm.bind(&skip);
5929 masm.subFromStackPtr(Imm32(BaselineFrame::Size()));
5930 masm.assertStackAlignment(sizeof(Value), 0);
5932 // Store flags and env chain.
5933 masm.store32(Imm32(BaselineFrame::HAS_INITIAL_ENV), frame.addressOfFlags());
5934 masm.unboxObject(
5935 Address(genObj, AbstractGeneratorObject::offsetOfEnvironmentChainSlot()),
5936 scratch2);
5937 masm.storePtr(scratch2, frame.addressOfEnvironmentChain());
5939 // Store the arguments object if there is one.
5940 Label noArgsObj;
5941 Address argsObjSlot(genObj, AbstractGeneratorObject::offsetOfArgsObjSlot());
5942 masm.fallibleUnboxObject(argsObjSlot, scratch2, &noArgsObj);
5944 masm.storePtr(scratch2, frame.addressOfArgsObj());
5945 masm.or32(Imm32(BaselineFrame::HAS_ARGS_OBJ), frame.addressOfFlags());
5947 masm.bind(&noArgsObj);
5949 // Push locals and expression slots if needed.
5950 Label noStackStorage;
5951 Address stackStorageSlot(genObj,
5952 AbstractGeneratorObject::offsetOfStackStorageSlot());
5953 masm.fallibleUnboxObject(stackStorageSlot, scratch2, &noStackStorage);
5955 Register initLength = regs.takeAny();
5956 masm.loadPtr(Address(scratch2, NativeObject::offsetOfElements()), scratch2);
5957 masm.load32(Address(scratch2, ObjectElements::offsetOfInitializedLength()),
5958 initLength);
5959 masm.store32(
5960 Imm32(0),
5961 Address(scratch2, ObjectElements::offsetOfInitializedLength()));
5963 Label loop, loopDone;
5964 masm.branchTest32(Assembler::Zero, initLength, initLength, &loopDone);
5965 masm.bind(&loop);
5967 masm.pushValue(Address(scratch2, 0));
5968 masm.guardedCallPreBarrierAnyZone(Address(scratch2, 0), MIRType::Value,
5969 scratch1);
5970 masm.addPtr(Imm32(sizeof(Value)), scratch2);
5971 masm.branchSub32(Assembler::NonZero, Imm32(1), initLength, &loop);
5973 masm.bind(&loopDone);
5974 regs.add(initLength);
5977 masm.bind(&noStackStorage);
5979 // Push arg, generator, resumeKind stack Values, in that order.
5980 masm.pushValue(Address(callerStackPtr, sizeof(Value)));
5981 masm.pushValue(JSVAL_TYPE_OBJECT, genObj);
5982 masm.pushValue(Address(callerStackPtr, 0));
5984 masm.switchToObjectRealm(genObj, scratch2);
5986 // Load script in scratch1.
5987 masm.unboxObject(
5988 Address(genObj, AbstractGeneratorObject::offsetOfCalleeSlot()), scratch1);
5989 masm.loadPrivate(Address(scratch1, JSFunction::offsetOfJitInfoOrScript()),
5990 scratch1);
5992 // Load resume index in scratch2 and mark generator as running.
5993 Address resumeIndexSlot(genObj,
5994 AbstractGeneratorObject::offsetOfResumeIndexSlot());
5995 masm.unboxInt32(resumeIndexSlot, scratch2);
5996 masm.storeValue(Int32Value(AbstractGeneratorObject::RESUME_INDEX_RUNNING),
5997 resumeIndexSlot);
5999 if (!emitEnterGeneratorCode(scratch1, scratch2, regs.getAny())) {
6000 return false;
6003 // Call into the VM to resume the generator in the C++ interpreter if there's
6004 // no JitScript.
6005 masm.bind(&interpret);
6007 prepareVMCall();
6009 pushArg(callerStackPtr);
6010 pushArg(genObj);
6012 using Fn = bool (*)(JSContext*, HandleObject, Value*, MutableHandleValue);
6013 if (!callVM<Fn, jit::InterpretResume>()) {
6014 return false;
6017 masm.bind(&returnTarget);
6019 // Restore Stack pointer
6020 masm.computeEffectiveAddress(frame.addressOfStackValue(-1),
6021 masm.getStackPointer());
6023 // After the generator returns, we restore the stack pointer, switch back to
6024 // the current realm, push the return value, and we're done.
6025 if (JSScript* script = handler.maybeScript()) {
6026 masm.switchToRealm(script->realm(), R2.scratchReg());
6027 } else {
6028 masm.switchToBaselineFrameRealm(R2.scratchReg());
6030 restoreInterpreterPCReg();
6031 frame.popn(3);
6032 frame.push(R0);
6033 return true;
6036 template <typename Handler>
6037 bool BaselineCodeGen<Handler>::emit_CheckResumeKind() {
6038 // Load resumeKind in R1, generator in R0.
6039 frame.popRegsAndSync(2);
6041 #ifdef DEBUG
6042 Label ok;
6043 masm.branchTestInt32(Assembler::Equal, R1, &ok);
6044 masm.assumeUnreachable("Expected int32 resumeKind");
6045 masm.bind(&ok);
6046 #endif
6048 // If resumeKind is 'next' we don't have to do anything.
6049 Label done;
6050 masm.unboxInt32(R1, R1.scratchReg());
6051 masm.branch32(Assembler::Equal, R1.scratchReg(),
6052 Imm32(int32_t(GeneratorResumeKind::Next)), &done);
6054 prepareVMCall();
6056 pushArg(R1.scratchReg()); // resumeKind
6058 masm.loadValue(frame.addressOfStackValue(-1), R2);
6059 pushArg(R2); // arg
6061 masm.unboxObject(R0, R0.scratchReg());
6062 pushArg(R0.scratchReg()); // genObj
6064 masm.loadBaselineFramePtr(FramePointer, R2.scratchReg());
6065 pushArg(R2.scratchReg()); // frame
6067 using Fn = bool (*)(JSContext*, BaselineFrame*,
6068 Handle<AbstractGeneratorObject*>, HandleValue, int32_t);
6069 if (!callVM<Fn, jit::GeneratorThrowOrReturn>()) {
6070 return false;
6073 masm.bind(&done);
6074 return true;
6077 template <>
6078 bool BaselineCompilerCodeGen::emit_ResumeKind() {
6079 GeneratorResumeKind resumeKind = ResumeKindFromPC(handler.pc());
6080 frame.push(Int32Value(int32_t(resumeKind)));
6081 return true;
6084 template <>
6085 bool BaselineInterpreterCodeGen::emit_ResumeKind() {
6086 LoadUint8Operand(masm, R0.scratchReg());
6087 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
6088 frame.push(R0);
6089 return true;
6092 template <typename Handler>
6093 bool BaselineCodeGen<Handler>::emit_DebugCheckSelfHosted() {
6094 #ifdef DEBUG
6095 frame.syncStack(0);
6097 masm.loadValue(frame.addressOfStackValue(-1), R0);
6099 prepareVMCall();
6100 pushArg(R0);
6102 using Fn = bool (*)(JSContext*, HandleValue);
6103 if (!callVM<Fn, js::Debug_CheckSelfHosted>()) {
6104 return false;
6106 #endif
6107 return true;
6110 template <typename Handler>
6111 bool BaselineCodeGen<Handler>::emit_IsConstructing() {
6112 frame.push(MagicValue(JS_IS_CONSTRUCTING));
6113 return true;
6116 template <>
6117 bool BaselineCompilerCodeGen::emit_JumpTarget() {
6118 MaybeIncrementCodeCoverageCounter(masm, handler.script(), handler.pc());
6119 return true;
6122 template <>
6123 bool BaselineInterpreterCodeGen::emit_JumpTarget() {
6124 Register scratch1 = R0.scratchReg();
6125 Register scratch2 = R1.scratchReg();
6127 Label skipCoverage;
6128 CodeOffset toggleOffset = masm.toggledJump(&skipCoverage);
6129 masm.call(handler.codeCoverageAtPCLabel());
6130 masm.bind(&skipCoverage);
6131 if (!handler.codeCoverageOffsets().append(toggleOffset.offset())) {
6132 return false;
6135 // Load icIndex in scratch1.
6136 LoadInt32Operand(masm, scratch1);
6138 // Compute ICEntry* and store to frame->interpreterICEntry.
6139 masm.loadPtr(frame.addressOfICScript(), scratch2);
6140 static_assert(sizeof(ICEntry) == sizeof(uintptr_t));
6141 masm.computeEffectiveAddress(BaseIndex(scratch2, scratch1, ScalePointer,
6142 ICScript::offsetOfICEntries()),
6143 scratch2);
6144 masm.storePtr(scratch2, frame.addressOfInterpreterICEntry());
6145 return true;
6148 template <typename Handler>
6149 bool BaselineCodeGen<Handler>::emit_CheckClassHeritage() {
6150 frame.syncStack(0);
6152 // Leave the heritage value on the stack.
6153 masm.loadValue(frame.addressOfStackValue(-1), R0);
6155 prepareVMCall();
6156 pushArg(R0);
6158 using Fn = bool (*)(JSContext*, HandleValue);
6159 return callVM<Fn, js::CheckClassHeritageOperation>();
6162 template <typename Handler>
6163 bool BaselineCodeGen<Handler>::emit_InitHomeObject() {
6164 // Load HomeObject in R0.
6165 frame.popRegsAndSync(1);
6167 // Load function off stack
6168 Register func = R2.scratchReg();
6169 masm.unboxObject(frame.addressOfStackValue(-1), func);
6171 masm.assertFunctionIsExtended(func);
6173 // Set HOMEOBJECT_SLOT
6174 Register temp = R1.scratchReg();
6175 Address addr(func, FunctionExtended::offsetOfMethodHomeObjectSlot());
6176 masm.guardedCallPreBarrierAnyZone(addr, MIRType::Value, temp);
6177 masm.storeValue(R0, addr);
6179 Label skipBarrier;
6180 masm.branchPtrInNurseryChunk(Assembler::Equal, func, temp, &skipBarrier);
6181 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
6182 masm.call(&postBarrierSlot_);
6183 masm.bind(&skipBarrier);
6185 return true;
6188 template <>
6189 bool BaselineCompilerCodeGen::emit_BuiltinObject() {
6190 // Built-in objects are constants for a given global.
6191 auto kind = BuiltinObjectKind(GET_UINT8(handler.pc()));
6192 JSObject* builtin = BuiltinObjectOperation(cx, kind);
6193 if (!builtin) {
6194 return false;
6196 frame.push(ObjectValue(*builtin));
6197 return true;
6200 template <>
6201 bool BaselineInterpreterCodeGen::emit_BuiltinObject() {
6202 prepareVMCall();
6204 pushUint8BytecodeOperandArg(R0.scratchReg());
6206 using Fn = JSObject* (*)(JSContext*, BuiltinObjectKind);
6207 if (!callVM<Fn, BuiltinObjectOperation>()) {
6208 return false;
6211 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6212 frame.push(R0);
6213 return true;
6216 template <typename Handler>
6217 bool BaselineCodeGen<Handler>::emit_ObjWithProto() {
6218 frame.syncStack(0);
6220 // Leave the proto value on the stack for the decompiler
6221 masm.loadValue(frame.addressOfStackValue(-1), R0);
6223 prepareVMCall();
6224 pushArg(R0);
6226 using Fn = PlainObject* (*)(JSContext*, HandleValue);
6227 if (!callVM<Fn, js::ObjectWithProtoOperation>()) {
6228 return false;
6231 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6232 frame.pop();
6233 frame.push(R0);
6234 return true;
6237 template <typename Handler>
6238 bool BaselineCodeGen<Handler>::emit_FunWithProto() {
6239 frame.popRegsAndSync(1);
6241 masm.unboxObject(R0, R0.scratchReg());
6242 masm.loadPtr(frame.addressOfEnvironmentChain(), R1.scratchReg());
6244 prepareVMCall();
6245 pushArg(R0.scratchReg());
6246 pushArg(R1.scratchReg());
6247 pushScriptGCThingArg(ScriptGCThingType::Function, R0.scratchReg(),
6248 R1.scratchReg());
6250 using Fn =
6251 JSObject* (*)(JSContext*, HandleFunction, HandleObject, HandleObject);
6252 if (!callVM<Fn, js::FunWithProtoOperation>()) {
6253 return false;
6256 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6257 frame.push(R0);
6258 return true;
6261 template <>
6262 bool BaselineCompilerCodeGen::emit_ImportMeta() {
6263 // Note: this is like the interpreter implementation, but optimized a bit by
6264 // calling GetModuleObjectForScript at compile-time.
6266 Rooted<ModuleObject*> module(cx, GetModuleObjectForScript(handler.script()));
6267 MOZ_ASSERT(module);
6269 frame.syncStack(0);
6271 prepareVMCall();
6272 pushArg(ImmGCPtr(module));
6274 using Fn = JSObject* (*)(JSContext*, HandleObject);
6275 if (!callVM<Fn, js::GetOrCreateModuleMetaObject>()) {
6276 return false;
6279 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6280 frame.push(R0);
6281 return true;
6284 template <>
6285 bool BaselineInterpreterCodeGen::emit_ImportMeta() {
6286 prepareVMCall();
6288 pushScriptArg();
6290 using Fn = JSObject* (*)(JSContext*, HandleScript);
6291 if (!callVM<Fn, ImportMetaOperation>()) {
6292 return false;
6295 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6296 frame.push(R0);
6297 return true;
6300 template <typename Handler>
6301 bool BaselineCodeGen<Handler>::emit_DynamicImport() {
6302 // Put specifier into R0 and object value into R1
6303 frame.popRegsAndSync(2);
6305 prepareVMCall();
6306 pushArg(R1);
6307 pushArg(R0);
6308 pushScriptArg();
6310 using Fn = JSObject* (*)(JSContext*, HandleScript, HandleValue, HandleValue);
6311 if (!callVM<Fn, js::StartDynamicModuleImport>()) {
6312 return false;
6315 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6316 frame.push(R0);
6317 return true;
6320 template <>
6321 bool BaselineCompilerCodeGen::emit_ForceInterpreter() {
6322 // Caller is responsible for checking script->hasForceInterpreterOp().
6323 MOZ_CRASH("JSOp::ForceInterpreter in baseline");
6326 template <>
6327 bool BaselineInterpreterCodeGen::emit_ForceInterpreter() {
6328 masm.assumeUnreachable("JSOp::ForceInterpreter");
6329 return true;
6332 template <typename Handler>
6333 bool BaselineCodeGen<Handler>::emitPrologue() {
6334 AutoCreatedBy acb(masm, "BaselineCodeGen<Handler>::emitPrologue");
6336 #ifdef JS_USE_LINK_REGISTER
6337 // Push link register from generateEnterJIT()'s BLR.
6338 masm.pushReturnAddress();
6339 #endif
6341 masm.push(FramePointer);
6342 masm.moveStackPtrTo(FramePointer);
6344 masm.checkStackAlignment();
6346 emitProfilerEnterFrame();
6348 masm.subFromStackPtr(Imm32(BaselineFrame::Size()));
6350 // Initialize BaselineFrame. Also handles env chain pre-initialization (in
6351 // case GC gets run during stack check). For global and eval scripts, the env
6352 // chain is in R1. For function scripts, the env chain is in the callee.
6353 emitInitFrameFields(R1.scratchReg());
6355 // When compiling with Debugger instrumentation, set the debuggeeness of
6356 // the frame before any operation that can call into the VM.
6357 if (!emitIsDebuggeeCheck()) {
6358 return false;
6361 // Initialize the env chain before any operation that may call into the VM and
6362 // trigger a GC.
6363 if (!initEnvironmentChain()) {
6364 return false;
6367 // Check for overrecursion before initializing locals.
6368 if (!emitStackCheck()) {
6369 return false;
6372 emitInitializeLocals();
6374 // Ion prologue bailouts will enter here in the Baseline Interpreter.
6375 masm.bind(&bailoutPrologue_);
6377 frame.assertSyncedStack();
6379 if (JSScript* script = handler.maybeScript()) {
6380 masm.debugAssertContextRealm(script->realm(), R1.scratchReg());
6383 if (!emitDebugPrologue()) {
6384 return false;
6387 if (!emitHandleCodeCoverageAtPrologue()) {
6388 return false;
6391 if (!emitWarmUpCounterIncrement()) {
6392 return false;
6395 warmUpCheckPrologueOffset_ = CodeOffset(masm.currentOffset());
6397 return true;
6400 template <typename Handler>
6401 bool BaselineCodeGen<Handler>::emitEpilogue() {
6402 AutoCreatedBy acb(masm, "BaselineCodeGen<Handler>::emitEpilogue");
6404 masm.bind(&return_);
6406 if (!handler.shouldEmitDebugEpilogueAtReturnOp()) {
6407 if (!emitDebugEpilogue()) {
6408 return false;
6412 emitProfilerExitFrame();
6414 masm.moveToStackPtr(FramePointer);
6415 masm.pop(FramePointer);
6417 masm.ret();
6418 return true;
6421 MethodStatus BaselineCompiler::emitBody() {
6422 AutoCreatedBy acb(masm, "BaselineCompiler::emitBody");
6424 JSScript* script = handler.script();
6425 MOZ_ASSERT(handler.pc() == script->code());
6427 mozilla::DebugOnly<jsbytecode*> prevpc = handler.pc();
6429 while (true) {
6430 JSOp op = JSOp(*handler.pc());
6431 JitSpew(JitSpew_BaselineOp, "Compiling op @ %d: %s",
6432 int(script->pcToOffset(handler.pc())), CodeName(op));
6434 BytecodeInfo* info = handler.analysis().maybeInfo(handler.pc());
6436 // Skip unreachable ops.
6437 if (!info) {
6438 // Test if last instructions and stop emitting in that case.
6439 handler.moveToNextPC();
6440 if (handler.pc() >= script->codeEnd()) {
6441 break;
6444 prevpc = handler.pc();
6445 continue;
6448 if (info->jumpTarget) {
6449 // Fully sync the stack if there are incoming jumps.
6450 frame.syncStack(0);
6451 frame.setStackDepth(info->stackDepth);
6452 masm.bind(handler.labelOf(handler.pc()));
6453 } else if (MOZ_UNLIKELY(compileDebugInstrumentation())) {
6454 // Also fully sync the stack if the debugger is enabled.
6455 frame.syncStack(0);
6456 } else {
6457 // At the beginning of any op, at most the top 2 stack-values are
6458 // unsynced.
6459 if (frame.stackDepth() > 2) {
6460 frame.syncStack(2);
6464 frame.assertValidState(*info);
6466 // If the script has a resume offset for this pc we need to keep track of
6467 // the native code offset.
6468 if (info->hasResumeOffset) {
6469 frame.assertSyncedStack();
6470 uint32_t pcOffset = script->pcToOffset(handler.pc());
6471 uint32_t nativeOffset = masm.currentOffset();
6472 if (!resumeOffsetEntries_.emplaceBack(pcOffset, nativeOffset)) {
6473 ReportOutOfMemory(cx);
6474 return Method_Error;
6478 // Emit traps for breakpoints and step mode.
6479 if (MOZ_UNLIKELY(compileDebugInstrumentation()) && !emitDebugTrap()) {
6480 return Method_Error;
6483 perfSpewer_.recordInstruction(cx, masm, handler.pc(), frame);
6485 #define EMIT_OP(OP, ...) \
6486 case JSOp::OP: { \
6487 AutoCreatedBy acb(masm, "op=" #OP); \
6488 if (MOZ_UNLIKELY(!this->emit_##OP())) return Method_Error; \
6489 } break;
6491 switch (op) {
6492 FOR_EACH_OPCODE(EMIT_OP)
6493 default:
6494 MOZ_CRASH("Unexpected op");
6497 #undef EMIT_OP
6499 MOZ_ASSERT(masm.framePushed() == 0);
6501 // Test if last instructions and stop emitting in that case.
6502 handler.moveToNextPC();
6503 if (handler.pc() >= script->codeEnd()) {
6504 break;
6507 #ifdef DEBUG
6508 prevpc = handler.pc();
6509 #endif
6512 MOZ_ASSERT(JSOp(*prevpc) == JSOp::RetRval || JSOp(*prevpc) == JSOp::Return);
6513 return Method_Compiled;
6516 bool BaselineInterpreterGenerator::emitDebugTrap() {
6517 CodeOffset offset = masm.nopPatchableToCall();
6518 if (!debugTrapOffsets_.append(offset.offset())) {
6519 ReportOutOfMemory(cx);
6520 return false;
6523 return true;
6526 // Register holding the bytecode pc during dispatch. This exists so the debug
6527 // trap handler can reload the pc into this register when it's done.
6528 static constexpr Register InterpreterPCRegAtDispatch =
6529 HasInterpreterPCReg() ? InterpreterPCReg : R0.scratchReg();
6531 bool BaselineInterpreterGenerator::emitInterpreterLoop() {
6532 AutoCreatedBy acb(masm, "BaselineInterpreterGenerator::emitInterpreterLoop");
6534 Register scratch1 = R0.scratchReg();
6535 Register scratch2 = R1.scratchReg();
6537 // Entry point for interpreting a bytecode op. No registers are live except
6538 // for InterpreterPCReg.
6539 masm.bind(handler.interpretOpWithPCRegLabel());
6541 // Emit a patchable call for debugger breakpoints/stepping.
6542 if (!emitDebugTrap()) {
6543 return false;
6545 Label interpretOpAfterDebugTrap;
6546 masm.bind(&interpretOpAfterDebugTrap);
6548 // Load pc, bytecode op.
6549 Register pcReg = LoadBytecodePC(masm, scratch1);
6550 masm.load8ZeroExtend(Address(pcReg, 0), scratch1);
6552 // Jump to table[op].
6554 CodeOffset label = masm.moveNearAddressWithPatch(scratch2);
6555 if (!tableLabels_.append(label)) {
6556 return false;
6558 BaseIndex pointer(scratch2, scratch1, ScalePointer);
6559 masm.branchToComputedAddress(pointer);
6562 // At the end of each op, emit code to bump the pc and jump to the
6563 // next op (this is also known as a threaded interpreter).
6564 auto opEpilogue = [&](JSOp op, size_t opLength) -> bool {
6565 MOZ_ASSERT(masm.framePushed() == 0);
6567 if (!BytecodeFallsThrough(op)) {
6568 // Nothing to do.
6569 masm.assumeUnreachable("unexpected fall through");
6570 return true;
6573 // Bump frame->interpreterICEntry if needed.
6574 if (BytecodeOpHasIC(op)) {
6575 frame.bumpInterpreterICEntry();
6578 // Bump bytecode PC.
6579 if (HasInterpreterPCReg()) {
6580 MOZ_ASSERT(InterpreterPCRegAtDispatch == InterpreterPCReg);
6581 masm.addPtr(Imm32(opLength), InterpreterPCReg);
6582 } else {
6583 MOZ_ASSERT(InterpreterPCRegAtDispatch == scratch1);
6584 masm.loadPtr(frame.addressOfInterpreterPC(), InterpreterPCRegAtDispatch);
6585 masm.addPtr(Imm32(opLength), InterpreterPCRegAtDispatch);
6586 masm.storePtr(InterpreterPCRegAtDispatch, frame.addressOfInterpreterPC());
6589 if (!emitDebugTrap()) {
6590 return false;
6593 // Load the opcode, jump to table[op].
6594 masm.load8ZeroExtend(Address(InterpreterPCRegAtDispatch, 0), scratch1);
6595 CodeOffset label = masm.moveNearAddressWithPatch(scratch2);
6596 if (!tableLabels_.append(label)) {
6597 return false;
6599 BaseIndex pointer(scratch2, scratch1, ScalePointer);
6600 masm.branchToComputedAddress(pointer);
6601 return true;
6604 // Emit code for each bytecode op.
6605 Label opLabels[JSOP_LIMIT];
6606 #define EMIT_OP(OP, ...) \
6608 AutoCreatedBy acb(masm, "op=" #OP); \
6609 perfSpewer_.recordOffset(masm, JSOp::OP); \
6610 masm.bind(&opLabels[uint8_t(JSOp::OP)]); \
6611 handler.setCurrentOp(JSOp::OP); \
6612 if (!this->emit_##OP()) { \
6613 return false; \
6615 if (!opEpilogue(JSOp::OP, JSOpLength_##OP)) { \
6616 return false; \
6618 handler.resetCurrentOp(); \
6620 FOR_EACH_OPCODE(EMIT_OP)
6621 #undef EMIT_OP
6623 // External entry point to start interpreting bytecode ops. This is used for
6624 // things like exception handling and OSR. DebugModeOSR patches JIT frames to
6625 // return here from the DebugTrapHandler.
6626 masm.bind(handler.interpretOpLabel());
6627 interpretOpOffset_ = masm.currentOffset();
6628 restoreInterpreterPCReg();
6629 masm.jump(handler.interpretOpWithPCRegLabel());
6631 // Second external entry point: this skips the debug trap for the first op
6632 // and is used by OSR.
6633 interpretOpNoDebugTrapOffset_ = masm.currentOffset();
6634 restoreInterpreterPCReg();
6635 masm.jump(&interpretOpAfterDebugTrap);
6637 // External entry point for Ion prologue bailouts.
6638 bailoutPrologueOffset_ = CodeOffset(masm.currentOffset());
6639 restoreInterpreterPCReg();
6640 masm.jump(&bailoutPrologue_);
6642 // Emit debug trap handler code (target of patchable call instructions). This
6643 // is just a tail call to the debug trap handler trampoline code.
6645 JitRuntime* jrt = cx->runtime()->jitRuntime();
6646 JitCode* handlerCode =
6647 jrt->debugTrapHandler(cx, DebugTrapHandlerKind::Interpreter);
6648 if (!handlerCode) {
6649 return false;
6652 debugTrapHandlerOffset_ = masm.currentOffset();
6653 masm.jump(handlerCode);
6656 // Emit the table.
6657 masm.haltingAlign(sizeof(void*));
6659 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64)
6660 size_t numInstructions = JSOP_LIMIT * (sizeof(uintptr_t) / sizeof(uint32_t));
6661 AutoForbidPoolsAndNops afp(&masm, numInstructions);
6662 #endif
6664 tableOffset_ = masm.currentOffset();
6666 for (size_t i = 0; i < JSOP_LIMIT; i++) {
6667 const Label& opLabel = opLabels[i];
6668 MOZ_ASSERT(opLabel.bound());
6669 CodeLabel cl;
6670 masm.writeCodePointer(&cl);
6671 cl.target()->bind(opLabel.offset());
6672 masm.addCodeLabel(cl);
6675 return true;
6678 void BaselineInterpreterGenerator::emitOutOfLineCodeCoverageInstrumentation() {
6679 AutoCreatedBy acb(masm,
6680 "BaselineInterpreterGenerator::"
6681 "emitOutOfLineCodeCoverageInstrumentation");
6683 masm.bind(handler.codeCoverageAtPrologueLabel());
6684 #ifdef JS_USE_LINK_REGISTER
6685 masm.pushReturnAddress();
6686 #endif
6688 saveInterpreterPCReg();
6690 using Fn1 = void (*)(BaselineFrame* frame);
6691 masm.setupUnalignedABICall(R0.scratchReg());
6692 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
6693 masm.passABIArg(R0.scratchReg());
6694 masm.callWithABI<Fn1, HandleCodeCoverageAtPrologue>();
6696 restoreInterpreterPCReg();
6697 masm.ret();
6699 masm.bind(handler.codeCoverageAtPCLabel());
6700 #ifdef JS_USE_LINK_REGISTER
6701 masm.pushReturnAddress();
6702 #endif
6704 saveInterpreterPCReg();
6706 using Fn2 = void (*)(BaselineFrame* frame, jsbytecode* pc);
6707 masm.setupUnalignedABICall(R0.scratchReg());
6708 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg());
6709 masm.passABIArg(R0.scratchReg());
6710 Register pcReg = LoadBytecodePC(masm, R2.scratchReg());
6711 masm.passABIArg(pcReg);
6712 masm.callWithABI<Fn2, HandleCodeCoverageAtPC>();
6714 restoreInterpreterPCReg();
6715 masm.ret();
6718 bool BaselineInterpreterGenerator::generate(BaselineInterpreter& interpreter) {
6719 AutoCreatedBy acb(masm, "BaselineInterpreterGenerator::generate");
6721 perfSpewer_.recordOffset(masm, "Prologue");
6722 if (!emitPrologue()) {
6723 return false;
6726 perfSpewer_.recordOffset(masm, "InterpreterLoop");
6727 if (!emitInterpreterLoop()) {
6728 return false;
6731 perfSpewer_.recordOffset(masm, "Epilogue");
6732 if (!emitEpilogue()) {
6733 return false;
6736 perfSpewer_.recordOffset(masm, "OOLPostBarrierSlot");
6737 if (!emitOutOfLinePostBarrierSlot()) {
6738 return false;
6741 perfSpewer_.recordOffset(masm, "OOLCodeCoverageInstrumentation");
6742 emitOutOfLineCodeCoverageInstrumentation();
6745 AutoCreatedBy acb(masm, "everything_else");
6746 Linker linker(masm);
6747 if (masm.oom()) {
6748 ReportOutOfMemory(cx);
6749 return false;
6752 JitCode* code = linker.newCode(cx, CodeKind::Other);
6753 if (!code) {
6754 return false;
6757 // Register BaselineInterpreter code with the profiler's JitCode table.
6759 auto entry = MakeJitcodeGlobalEntry<BaselineInterpreterEntry>(
6760 cx, code, code->raw(), code->rawEnd());
6761 if (!entry) {
6762 return false;
6765 JitcodeGlobalTable* globalTable =
6766 cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
6767 if (!globalTable->addEntry(std::move(entry))) {
6768 ReportOutOfMemory(cx);
6769 return false;
6772 code->setHasBytecodeMap();
6775 // Patch loads now that we know the tableswitch base address.
6776 CodeLocationLabel tableLoc(code, CodeOffset(tableOffset_));
6777 for (CodeOffset off : tableLabels_) {
6778 MacroAssembler::patchNearAddressMove(CodeLocationLabel(code, off),
6779 tableLoc);
6782 perfSpewer_.saveProfile(code);
6784 #ifdef MOZ_VTUNE
6785 vtune::MarkStub(code, "BaselineInterpreter");
6786 #endif
6788 interpreter.init(
6789 code, interpretOpOffset_, interpretOpNoDebugTrapOffset_,
6790 bailoutPrologueOffset_.offset(),
6791 profilerEnterFrameToggleOffset_.offset(),
6792 profilerExitFrameToggleOffset_.offset(), debugTrapHandlerOffset_,
6793 std::move(handler.debugInstrumentationOffsets()),
6794 std::move(debugTrapOffsets_), std::move(handler.codeCoverageOffsets()),
6795 std::move(handler.icReturnOffsets()), handler.callVMOffsets());
6798 if (cx->runtime()->geckoProfiler().enabled()) {
6799 interpreter.toggleProfilerInstrumentation(true);
6802 if (coverage::IsLCovEnabled()) {
6803 interpreter.toggleCodeCoverageInstrumentationUnchecked(true);
6806 return true;
6809 JitCode* JitRuntime::generateDebugTrapHandler(JSContext* cx,
6810 DebugTrapHandlerKind kind) {
6811 TempAllocator temp(&cx->tempLifoAlloc());
6812 StackMacroAssembler masm(cx, temp);
6813 AutoCreatedBy acb(masm, "JitRuntime::generateDebugTrapHandler");
6815 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
6816 MOZ_ASSERT(!regs.has(FramePointer));
6817 regs.takeUnchecked(ICStubReg);
6818 if (HasInterpreterPCReg()) {
6819 regs.takeUnchecked(InterpreterPCReg);
6821 #ifdef JS_CODEGEN_ARM
6822 regs.takeUnchecked(BaselineSecondScratchReg);
6823 masm.setSecondScratchReg(BaselineSecondScratchReg);
6824 #endif
6825 Register scratch1 = regs.takeAny();
6826 Register scratch2 = regs.takeAny();
6827 Register scratch3 = regs.takeAny();
6829 if (kind == DebugTrapHandlerKind::Interpreter) {
6830 // The interpreter calls this for every script when debugging, so check if
6831 // the script has any breakpoints or is in step mode before calling into
6832 // C++.
6833 Label hasDebugScript;
6834 Address scriptAddr(FramePointer,
6835 BaselineFrame::reverseOffsetOfInterpreterScript());
6836 masm.loadPtr(scriptAddr, scratch1);
6837 masm.branchTest32(Assembler::NonZero,
6838 Address(scratch1, JSScript::offsetOfMutableFlags()),
6839 Imm32(int32_t(JSScript::MutableFlags::HasDebugScript)),
6840 &hasDebugScript);
6841 masm.abiret();
6842 masm.bind(&hasDebugScript);
6844 if (HasInterpreterPCReg()) {
6845 // Update frame's bytecode pc because the debugger depends on it.
6846 Address pcAddr(FramePointer,
6847 BaselineFrame::reverseOffsetOfInterpreterPC());
6848 masm.storePtr(InterpreterPCReg, pcAddr);
6852 // Load the return address in scratch1.
6853 masm.loadAbiReturnAddress(scratch1);
6855 // Load BaselineFrame pointer in scratch2.
6856 masm.loadBaselineFramePtr(FramePointer, scratch2);
6858 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
6859 // the stub frame has a nullptr ICStub pointer, since this pointer is marked
6860 // during GC.
6861 masm.movePtr(ImmPtr(nullptr), ICStubReg);
6862 EmitBaselineEnterStubFrame(masm, scratch3);
6864 using Fn = bool (*)(JSContext*, BaselineFrame*, const uint8_t*);
6865 VMFunctionId id = VMFunctionToId<Fn, jit::HandleDebugTrap>::id;
6866 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
6868 masm.push(scratch1);
6869 masm.push(scratch2);
6870 EmitBaselineCallVM(code, masm);
6872 EmitBaselineLeaveStubFrame(masm);
6874 if (kind == DebugTrapHandlerKind::Interpreter) {
6875 // We have to reload the bytecode pc register.
6876 Address pcAddr(FramePointer, BaselineFrame::reverseOffsetOfInterpreterPC());
6877 masm.loadPtr(pcAddr, InterpreterPCRegAtDispatch);
6879 masm.abiret();
6881 Linker linker(masm);
6882 JitCode* handlerCode = linker.newCode(cx, CodeKind::Other);
6883 if (!handlerCode) {
6884 return nullptr;
6887 CollectPerfSpewerJitCodeProfile(handlerCode, "DebugTrapHandler");
6889 #ifdef MOZ_VTUNE
6890 vtune::MarkStub(handlerCode, "DebugTrapHandler");
6891 #endif
6893 return handlerCode;
6896 } // namespace jit
6897 } // namespace js