Bug 1639153 - Part 6.2: Establish dependency from tls for x86 callWithABI div/mod...
[gecko.git] / js / src / jit / BaselineCodeGen.cpp
bloba06c4f6804829ad8d7d6850c3e381b9e53ecd0c8
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/BaselineCodeGen.h"
9 #include "mozilla/Casting.h"
11 #include "jit/BaselineIC.h"
12 #include "jit/BaselineJIT.h"
13 #include "jit/FixedList.h"
14 #include "jit/IonAnalysis.h"
15 #include "jit/JitcodeMap.h"
16 #include "jit/JitSpewer.h"
17 #include "jit/Linker.h"
18 #ifdef JS_ION_PERF
19 # include "jit/PerfSpewer.h"
20 #endif
21 #include "jit/SharedICHelpers.h"
22 #include "jit/TrialInlining.h"
23 #include "jit/VMFunctions.h"
24 #include "js/UniquePtr.h"
25 #include "vm/AsyncFunction.h"
26 #include "vm/AsyncIteration.h"
27 #include "vm/BuiltinObjectKind.h"
28 #include "vm/EnvironmentObject.h"
29 #include "vm/FunctionFlags.h" // js::FunctionFlags
30 #include "vm/Interpreter.h"
31 #include "vm/JSFunction.h"
32 #include "vm/TraceLogging.h"
33 #ifdef MOZ_VTUNE
34 # include "vtune/VTuneWrapper.h"
35 #endif
37 #include "debugger/DebugAPI-inl.h"
38 #include "jit/BaselineFrameInfo-inl.h"
39 #include "jit/MacroAssembler-inl.h"
40 #include "jit/SharedICHelpers-inl.h"
41 #include "jit/VMFunctionList-inl.h"
42 #include "vm/Interpreter-inl.h"
43 #include "vm/JSScript-inl.h"
44 #include "vm/NativeObject-inl.h"
45 #include "vm/TypeInference-inl.h"
47 using namespace js;
48 using namespace js::jit;
50 using JS::TraceKind;
52 using mozilla::AssertedCast;
53 using mozilla::Maybe;
55 namespace js {
57 class PlainObject;
59 namespace jit {
61 BaselineCompilerHandler::BaselineCompilerHandler(JSContext* cx,
62 MacroAssembler& masm,
63 TempAllocator& alloc,
64 JSScript* script)
65 : frame_(script, masm),
66 alloc_(alloc),
67 analysis_(alloc, script),
68 #ifdef DEBUG
69 masm_(masm),
70 #endif
71 script_(script),
72 pc_(script->code()),
73 icEntryIndex_(0),
74 compileDebugInstrumentation_(script->isDebuggee()),
75 ionCompileable_(IsIonEnabled(cx) && CanIonCompileScript(cx, script)) {
78 BaselineInterpreterHandler::BaselineInterpreterHandler(JSContext* cx,
79 MacroAssembler& masm)
80 : frame_(masm) {}
82 template <typename Handler>
83 template <typename... HandlerArgs>
84 BaselineCodeGen<Handler>::BaselineCodeGen(JSContext* cx, HandlerArgs&&... args)
85 : handler(cx, masm, std::forward<HandlerArgs>(args)...),
86 cx(cx),
87 frame(handler.frame()),
88 traceLoggerToggleOffsets_(cx) {}
90 BaselineCompiler::BaselineCompiler(JSContext* cx, TempAllocator& alloc,
91 JSScript* script)
92 : BaselineCodeGen(cx, /* HandlerArgs = */ alloc, script),
93 profilerPushToggleOffset_(),
94 traceLoggerScriptTextIdOffset_() {
95 #ifdef JS_CODEGEN_NONE
96 MOZ_CRASH();
97 #endif
100 BaselineInterpreterGenerator::BaselineInterpreterGenerator(JSContext* cx)
101 : BaselineCodeGen(cx /* no handlerArgs */) {}
103 bool BaselineCompilerHandler::init(JSContext* cx) {
104 if (!analysis_.init(alloc_)) {
105 return false;
108 uint32_t len = script_->length();
110 if (!labels_.init(alloc_, len)) {
111 return false;
114 for (size_t i = 0; i < len; i++) {
115 new (&labels_[i]) Label();
118 if (!frame_.init(alloc_)) {
119 return false;
122 return true;
125 bool BaselineCompiler::init() {
126 if (!handler.init(cx)) {
127 return false;
130 return true;
133 bool BaselineCompilerHandler::recordCallRetAddr(JSContext* cx,
134 RetAddrEntry::Kind kind,
135 uint32_t retOffset) {
136 uint32_t pcOffset = script_->pcToOffset(pc_);
138 // Entries must be sorted by pcOffset for binary search to work.
139 // See BaselineScript::retAddrEntryFromPCOffset.
140 MOZ_ASSERT_IF(!retAddrEntries_.empty(),
141 retAddrEntries_.back().pcOffset() <= pcOffset);
143 // Similarly, entries must be sorted by return offset and this offset must be
144 // unique. See BaselineScript::retAddrEntryFromReturnOffset.
145 MOZ_ASSERT_IF(!retAddrEntries_.empty() && !masm_.oom(),
146 retAddrEntries_.back().returnOffset().offset() < retOffset);
148 if (!retAddrEntries_.emplaceBack(pcOffset, kind, CodeOffset(retOffset))) {
149 ReportOutOfMemory(cx);
150 return false;
153 return true;
156 bool BaselineInterpreterHandler::recordCallRetAddr(JSContext* cx,
157 RetAddrEntry::Kind kind,
158 uint32_t retOffset) {
159 switch (kind) {
160 case RetAddrEntry::Kind::DebugPrologue:
161 MOZ_ASSERT(callVMOffsets_.debugPrologueOffset == 0,
162 "expected single DebugPrologue call");
163 callVMOffsets_.debugPrologueOffset = retOffset;
164 break;
165 case RetAddrEntry::Kind::DebugEpilogue:
166 MOZ_ASSERT(callVMOffsets_.debugEpilogueOffset == 0,
167 "expected single DebugEpilogue call");
168 callVMOffsets_.debugEpilogueOffset = retOffset;
169 break;
170 case RetAddrEntry::Kind::DebugAfterYield:
171 MOZ_ASSERT(callVMOffsets_.debugAfterYieldOffset == 0,
172 "expected single DebugAfterYield call");
173 callVMOffsets_.debugAfterYieldOffset = retOffset;
174 break;
175 default:
176 break;
179 return true;
182 bool BaselineInterpreterHandler::addDebugInstrumentationOffset(
183 JSContext* cx, CodeOffset offset) {
184 if (!debugInstrumentationOffsets_.append(offset.offset())) {
185 ReportOutOfMemory(cx);
186 return false;
188 return true;
191 MethodStatus BaselineCompiler::compile() {
192 JSScript* script = handler.script();
193 JitSpew(JitSpew_BaselineScripts, "Baseline compiling script %s:%u:%u (%p)",
194 script->filename(), script->lineno(), script->column(), script);
196 JitSpew(JitSpew_Codegen, "# Emitting baseline code for script %s:%u:%u",
197 script->filename(), script->lineno(), script->column());
199 TraceLoggerThread* logger = TraceLoggerForCurrentThread(cx);
200 TraceLoggerEvent scriptEvent(TraceLogger_AnnotateScripts, script);
201 AutoTraceLog logScript(logger, scriptEvent);
202 AutoTraceLog logCompile(logger, TraceLogger_BaselineCompilation);
204 AutoKeepJitScripts keepJitScript(cx);
205 if (!script->ensureHasJitScript(cx, keepJitScript)) {
206 return Method_Error;
209 // When code coverage is only enabled for optimizations, or when a Debugger
210 // set the collectCoverageInfo flag, we have to create the ScriptCounts if
211 // they do not exist.
212 if (!script->hasScriptCounts() && cx->realm()->collectCoverage()) {
213 if (!script->initScriptCounts(cx)) {
214 return Method_Error;
218 // Pin analysis info during compilation.
219 AutoEnterAnalysis autoEnterAnalysis(cx);
221 MOZ_ASSERT(!script->hasBaselineScript());
223 if (!emitPrologue()) {
224 return Method_Error;
227 MethodStatus status = emitBody();
228 if (status != Method_Compiled) {
229 return status;
232 if (!emitEpilogue()) {
233 return Method_Error;
236 if (!emitOutOfLinePostBarrierSlot()) {
237 return Method_Error;
240 Linker linker(masm);
241 if (masm.oom()) {
242 ReportOutOfMemory(cx);
243 return Method_Error;
246 JitCode* code = linker.newCode(cx, CodeKind::Baseline);
247 if (!code) {
248 return Method_Error;
251 UniquePtr<BaselineScript> baselineScript(
252 BaselineScript::New(
253 cx, warmUpCheckPrologueOffset_.offset(),
254 profilerEnterFrameToggleOffset_.offset(),
255 profilerExitFrameToggleOffset_.offset(),
256 handler.retAddrEntries().length(), handler.osrEntries().length(),
257 debugTrapEntries_.length(), script->resumeOffsets().size(),
258 traceLoggerToggleOffsets_.length()),
259 JS::DeletePolicy<BaselineScript>(cx->runtime()));
260 if (!baselineScript) {
261 ReportOutOfMemory(cx);
262 return Method_Error;
265 baselineScript->setMethod(code);
267 JitSpew(JitSpew_BaselineScripts,
268 "Created BaselineScript %p (raw %p) for %s:%u:%u",
269 (void*)baselineScript.get(), (void*)code->raw(), script->filename(),
270 script->lineno(), script->column());
272 baselineScript->copyRetAddrEntries(handler.retAddrEntries().begin());
273 baselineScript->copyOSREntries(handler.osrEntries().begin());
274 baselineScript->copyDebugTrapEntries(debugTrapEntries_.begin());
276 // If profiler instrumentation is enabled, toggle instrumentation on.
277 if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
278 cx->runtime())) {
279 baselineScript->toggleProfilerInstrumentation(true);
282 #ifdef JS_TRACE_LOGGING
283 // Initialize the tracelogger instrumentation.
284 if (JS::TraceLoggerSupported()) {
285 baselineScript->initTraceLogger(script, traceLoggerToggleOffsets_);
287 #endif
289 // Compute native resume addresses for the script's resume offsets.
290 baselineScript->computeResumeNativeOffsets(script, resumeOffsetEntries_);
292 if (compileDebugInstrumentation()) {
293 baselineScript->setHasDebugInstrumentation();
296 // Always register a native => bytecode mapping entry, since profiler can be
297 // turned on with baseline jitcode on stack, and baseline jitcode cannot be
298 // invalidated.
300 JitSpew(JitSpew_Profiling,
301 "Added JitcodeGlobalEntry for baseline script %s:%u:%u (%p)",
302 script->filename(), script->lineno(), script->column(),
303 baselineScript.get());
305 // Generate profiling string.
306 UniqueChars str = GeckoProfilerRuntime::allocProfileString(cx, script);
307 if (!str) {
308 return Method_Error;
311 JitcodeGlobalEntry::BaselineEntry entry;
312 entry.init(code, code->raw(), code->rawEnd(), script, str.release());
314 JitcodeGlobalTable* globalTable =
315 cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
316 if (!globalTable->addEntry(entry)) {
317 entry.destroy();
318 ReportOutOfMemory(cx);
319 return Method_Error;
322 // Mark the jitcode as having a bytecode map.
323 code->setHasBytecodeMap();
326 script->jitScript()->setBaselineScript(script, baselineScript.release());
328 #ifdef JS_ION_PERF
329 writePerfSpewerBaselineProfile(script, code);
330 #endif
332 #ifdef MOZ_VTUNE
333 vtune::MarkScript(code, script, "baseline");
334 #endif
336 return Method_Compiled;
339 // On most platforms we use a dedicated bytecode PC register to avoid many
340 // dependent loads and stores for sequences of simple bytecode ops. This
341 // register must be saved/restored around VM and IC calls.
343 // On 32-bit x86 we don't have enough registers for this (because R0-R2 require
344 // 6 registers) so there we always store the pc on the frame.
345 static constexpr bool HasInterpreterPCReg() {
346 return InterpreterPCReg != InvalidReg;
349 static Register LoadBytecodePC(MacroAssembler& masm, Register scratch) {
350 if (HasInterpreterPCReg()) {
351 return InterpreterPCReg;
354 Address pcAddr(BaselineFrameReg,
355 BaselineFrame::reverseOffsetOfInterpreterPC());
356 masm.loadPtr(pcAddr, scratch);
357 return scratch;
360 static void LoadInt8Operand(MacroAssembler& masm, Register dest) {
361 Register pc = LoadBytecodePC(masm, dest);
362 masm.load8SignExtend(Address(pc, sizeof(jsbytecode)), dest);
365 static void LoadUint8Operand(MacroAssembler& masm, Register dest) {
366 Register pc = LoadBytecodePC(masm, dest);
367 masm.load8ZeroExtend(Address(pc, sizeof(jsbytecode)), dest);
370 static void LoadUint16Operand(MacroAssembler& masm, Register dest) {
371 Register pc = LoadBytecodePC(masm, dest);
372 masm.load16ZeroExtend(Address(pc, sizeof(jsbytecode)), dest);
375 static void LoadInt32Operand(MacroAssembler& masm, Register dest) {
376 Register pc = LoadBytecodePC(masm, dest);
377 masm.load32(Address(pc, sizeof(jsbytecode)), dest);
380 static void LoadInt32OperandSignExtendToPtr(MacroAssembler& masm, Register pc,
381 Register dest) {
382 masm.load32SignExtendToPtr(Address(pc, sizeof(jsbytecode)), dest);
385 static void LoadUint24Operand(MacroAssembler& masm, size_t offset,
386 Register dest) {
387 // Load the opcode and operand, then left shift to discard the opcode.
388 Register pc = LoadBytecodePC(masm, dest);
389 masm.load32(Address(pc, offset), dest);
390 masm.rshift32(Imm32(8), dest);
393 static void LoadInlineValueOperand(MacroAssembler& masm, ValueOperand dest) {
394 // Note: the Value might be unaligned but as above we rely on all our
395 // platforms having appropriate support for unaligned accesses (except for
396 // floating point instructions on ARM).
397 Register pc = LoadBytecodePC(masm, dest.scratchReg());
398 masm.loadUnalignedValue(Address(pc, sizeof(jsbytecode)), dest);
401 template <>
402 void BaselineCompilerCodeGen::loadScript(Register dest) {
403 masm.movePtr(ImmGCPtr(handler.script()), dest);
406 template <>
407 void BaselineInterpreterCodeGen::loadScript(Register dest) {
408 masm.loadPtr(frame.addressOfInterpreterScript(), dest);
411 template <>
412 void BaselineCompilerCodeGen::saveInterpreterPCReg() {}
414 template <>
415 void BaselineInterpreterCodeGen::saveInterpreterPCReg() {
416 if (HasInterpreterPCReg()) {
417 masm.storePtr(InterpreterPCReg, frame.addressOfInterpreterPC());
421 template <>
422 void BaselineCompilerCodeGen::restoreInterpreterPCReg() {}
424 template <>
425 void BaselineInterpreterCodeGen::restoreInterpreterPCReg() {
426 if (HasInterpreterPCReg()) {
427 masm.loadPtr(frame.addressOfInterpreterPC(), InterpreterPCReg);
431 template <>
432 void BaselineCompilerCodeGen::emitInitializeLocals() {
433 // Initialize all locals to |undefined|. Lexical bindings are temporal
434 // dead zoned in bytecode.
436 size_t n = frame.nlocals();
437 if (n == 0) {
438 return;
441 // Use R0 to minimize code size. If the number of locals to push is <
442 // LOOP_UNROLL_FACTOR, then the initialization pushes are emitted directly
443 // and inline. Otherwise, they're emitted in a partially unrolled loop.
444 static const size_t LOOP_UNROLL_FACTOR = 4;
445 size_t toPushExtra = n % LOOP_UNROLL_FACTOR;
447 masm.moveValue(UndefinedValue(), R0);
449 // Handle any extra pushes left over by the optional unrolled loop below.
450 for (size_t i = 0; i < toPushExtra; i++) {
451 masm.pushValue(R0);
454 // Partially unrolled loop of pushes.
455 if (n >= LOOP_UNROLL_FACTOR) {
456 size_t toPush = n - toPushExtra;
457 MOZ_ASSERT(toPush % LOOP_UNROLL_FACTOR == 0);
458 MOZ_ASSERT(toPush >= LOOP_UNROLL_FACTOR);
459 masm.move32(Imm32(toPush), R1.scratchReg());
460 // Emit unrolled loop with 4 pushes per iteration.
461 Label pushLoop;
462 masm.bind(&pushLoop);
463 for (size_t i = 0; i < LOOP_UNROLL_FACTOR; i++) {
464 masm.pushValue(R0);
466 masm.branchSub32(Assembler::NonZero, Imm32(LOOP_UNROLL_FACTOR),
467 R1.scratchReg(), &pushLoop);
471 template <>
472 void BaselineInterpreterCodeGen::emitInitializeLocals() {
473 // Push |undefined| for all locals.
475 Register scratch = R0.scratchReg();
476 loadScript(scratch);
477 masm.loadPtr(Address(scratch, JSScript::offsetOfSharedData()), scratch);
478 masm.loadPtr(Address(scratch, RuntimeScriptData::offsetOfISD()), scratch);
479 masm.load32(Address(scratch, ImmutableScriptData::offsetOfNfixed()), scratch);
481 Label top, done;
482 masm.branchTest32(Assembler::Zero, scratch, scratch, &done);
483 masm.bind(&top);
485 masm.pushValue(UndefinedValue());
486 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch, &top);
488 masm.bind(&done);
491 // On input:
492 // R2.scratchReg() contains object being written to.
493 // Called with the baseline stack synced, except for R0 which is preserved.
494 // All other registers are usable as scratch.
495 // This calls:
496 // void PostWriteBarrier(JSRuntime* rt, JSObject* obj);
497 template <typename Handler>
498 bool BaselineCodeGen<Handler>::emitOutOfLinePostBarrierSlot() {
499 if (!postBarrierSlot_.used()) {
500 return true;
503 masm.bind(&postBarrierSlot_);
505 saveInterpreterPCReg();
507 Register objReg = R2.scratchReg();
508 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
509 regs.take(R0);
510 regs.take(objReg);
511 regs.take(BaselineFrameReg);
512 Register scratch = regs.takeAny();
513 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64)
514 // On ARM, save the link register before calling. It contains the return
515 // address. The |masm.ret()| later will pop this into |pc| to return.
516 masm.push(lr);
517 #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
518 masm.push(ra);
519 #endif
520 masm.pushValue(R0);
522 masm.setupUnalignedABICall(scratch);
523 masm.movePtr(ImmPtr(cx->runtime()), scratch);
524 masm.passABIArg(scratch);
525 masm.passABIArg(objReg);
526 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, PostWriteBarrier));
528 restoreInterpreterPCReg();
530 masm.popValue(R0);
531 masm.ret();
532 return true;
535 template <>
536 bool BaselineCompilerCodeGen::emitNextIC() {
537 // Emit a call to an IC stored in JitScript. Calls to this must match the
538 // ICEntry order in JitScript: first the non-op IC entries for |this| and
539 // formal arguments, then the for-op IC entries for JOF_IC ops.
541 JSScript* script = handler.script();
542 uint32_t pcOffset = script->pcToOffset(handler.pc());
544 // We don't use every ICEntry and we can skip unreachable ops, so we have
545 // to loop until we find an ICEntry for the current pc.
546 const ICEntry* entry;
547 uint32_t entryIndex;
548 do {
549 entry = &script->jitScript()->icEntry(handler.icEntryIndex());
550 entryIndex = handler.icEntryIndex();
551 handler.moveToNextICEntry();
552 } while (entry->pcOffset() < pcOffset);
554 MOZ_RELEASE_ASSERT(entry->pcOffset() == pcOffset);
555 MOZ_ASSERT_IF(!entry->isForPrologue(), BytecodeOpHasIC(JSOp(*handler.pc())));
557 // Load stub pointer into ICStubReg.
558 if (JitOptions.warpBuilder) {
559 masm.loadPtr(frame.addressOfICScript(), ICStubReg);
560 size_t firstStubOffset = ICScript::offsetOfFirstStub(entryIndex);
561 masm.loadPtr(Address(ICStubReg, firstStubOffset), ICStubReg);
562 } else {
563 masm.loadPtr(AbsoluteAddress(entry).offset(ICEntry::offsetOfFirstStub()),
564 ICStubReg);
567 CodeOffset returnOffset;
568 EmitCallIC(masm, &returnOffset);
570 RetAddrEntry::Kind kind = entry->isForPrologue()
571 ? RetAddrEntry::Kind::PrologueIC
572 : RetAddrEntry::Kind::IC;
574 if (!handler.retAddrEntries().emplaceBack(pcOffset, kind, returnOffset)) {
575 ReportOutOfMemory(cx);
576 return false;
579 return true;
582 template <>
583 bool BaselineInterpreterCodeGen::emitNextIC() {
584 saveInterpreterPCReg();
585 masm.loadPtr(frame.addressOfInterpreterICEntry(), ICStubReg);
586 masm.loadPtr(Address(ICStubReg, ICEntry::offsetOfFirstStub()), ICStubReg);
587 masm.call(Address(ICStubReg, ICStub::offsetOfStubCode()));
588 uint32_t returnOffset = masm.currentOffset();
589 restoreInterpreterPCReg();
591 // If this is an IC for a bytecode op where Ion may inline scripts, we need to
592 // record the return offset for Ion bailouts.
593 if (handler.currentOp()) {
594 JSOp op = *handler.currentOp();
595 MOZ_ASSERT(BytecodeOpHasIC(op));
596 if (IsIonInlinableOp(op)) {
597 if (!handler.icReturnOffsets().emplaceBack(returnOffset, op)) {
598 return false;
603 return true;
606 template <>
607 void BaselineCompilerCodeGen::computeFrameSize(Register dest) {
608 MOZ_ASSERT(!inCall_, "must not be called in the middle of a VM call");
609 masm.move32(Imm32(frame.frameSize()), dest);
612 template <>
613 void BaselineInterpreterCodeGen::computeFrameSize(Register dest) {
614 // dest = FramePointer + BaselineFrame::FramePointerOffset - StackPointer.
615 MOZ_ASSERT(!inCall_, "must not be called in the middle of a VM call");
616 masm.computeEffectiveAddress(
617 Address(BaselineFrameReg, BaselineFrame::FramePointerOffset), dest);
618 masm.subStackPtrFrom(dest);
621 template <typename Handler>
622 void BaselineCodeGen<Handler>::prepareVMCall() {
623 pushedBeforeCall_ = masm.framePushed();
624 #ifdef DEBUG
625 inCall_ = true;
626 #endif
628 // Ensure everything is synced.
629 frame.syncStack(0);
632 template <>
633 void BaselineCompilerCodeGen::storeFrameSizeAndPushDescriptor(
634 uint32_t argSize, Register scratch1, Register scratch2) {
635 uint32_t frameFullSize = frame.frameSize();
637 #ifdef DEBUG
638 masm.store32(Imm32(frameFullSize), frame.addressOfDebugFrameSize());
639 #endif
641 uint32_t descriptor = MakeFrameDescriptor(
642 frameFullSize + argSize, FrameType::BaselineJS, ExitFrameLayout::Size());
643 masm.push(Imm32(descriptor));
646 template <>
647 void BaselineInterpreterCodeGen::storeFrameSizeAndPushDescriptor(
648 uint32_t argSize, Register scratch1, Register scratch2) {
649 // scratch1 = FramePointer + BaselineFrame::FramePointerOffset - StackPointer.
650 masm.computeEffectiveAddress(
651 Address(BaselineFrameReg, BaselineFrame::FramePointerOffset), scratch1);
652 masm.subStackPtrFrom(scratch1);
654 #ifdef DEBUG
655 // Store the frame size without VMFunction arguments in debug builds.
656 masm.computeEffectiveAddress(Address(scratch1, -int32_t(argSize)), scratch2);
657 masm.store32(scratch2, frame.addressOfDebugFrameSize());
658 #endif
660 // Push frame descriptor based on the full frame size.
661 masm.makeFrameDescriptor(scratch1, FrameType::BaselineJS,
662 ExitFrameLayout::Size());
663 masm.push(scratch1);
666 static uint32_t GetVMFunctionArgSize(const VMFunctionData& fun) {
667 return fun.explicitStackSlots() * sizeof(void*);
670 template <typename Handler>
671 bool BaselineCodeGen<Handler>::callVMInternal(VMFunctionId id,
672 RetAddrEntry::Kind kind,
673 CallVMPhase phase) {
674 #ifdef DEBUG
675 // Assert prepareVMCall() has been called.
676 MOZ_ASSERT(inCall_);
677 inCall_ = false;
678 #endif
680 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
681 const VMFunctionData& fun = GetVMFunction(id);
683 uint32_t argSize = GetVMFunctionArgSize(fun);
685 // Assert all arguments were pushed.
686 MOZ_ASSERT(masm.framePushed() - pushedBeforeCall_ == argSize);
688 saveInterpreterPCReg();
690 if (phase == CallVMPhase::AfterPushingLocals) {
691 storeFrameSizeAndPushDescriptor(argSize, R0.scratchReg(), R1.scratchReg());
692 } else {
693 MOZ_ASSERT(phase == CallVMPhase::BeforePushingLocals);
694 uint32_t frameBaseSize = BaselineFrame::frameSizeForNumValueSlots(0);
695 #ifdef DEBUG
696 masm.store32(Imm32(frameBaseSize), frame.addressOfDebugFrameSize());
697 #endif
698 uint32_t descriptor =
699 MakeFrameDescriptor(frameBaseSize + argSize, FrameType::BaselineJS,
700 ExitFrameLayout::Size());
701 masm.push(Imm32(descriptor));
703 MOZ_ASSERT(fun.expectTailCall == NonTailCall);
704 // Perform the call.
705 masm.call(code);
706 uint32_t callOffset = masm.currentOffset();
708 // Pop arguments from framePushed.
709 masm.implicitPop(argSize);
711 restoreInterpreterPCReg();
713 return handler.recordCallRetAddr(cx, kind, callOffset);
716 template <typename Handler>
717 template <typename Fn, Fn fn>
718 bool BaselineCodeGen<Handler>::callVM(RetAddrEntry::Kind kind,
719 CallVMPhase phase) {
720 VMFunctionId fnId = VMFunctionToId<Fn, fn>::id;
721 return callVMInternal(fnId, kind, phase);
724 template <typename Handler>
725 bool BaselineCodeGen<Handler>::emitStackCheck() {
726 Label skipCall;
727 if (handler.mustIncludeSlotsInStackCheck()) {
728 // Subtract the size of script->nslots() first.
729 Register scratch = R1.scratchReg();
730 masm.moveStackPtrTo(scratch);
731 subtractScriptSlotsSize(scratch, R2.scratchReg());
732 masm.branchPtr(Assembler::BelowOrEqual,
733 AbsoluteAddress(cx->addressOfJitStackLimit()), scratch,
734 &skipCall);
735 } else {
736 masm.branchStackPtrRhs(Assembler::BelowOrEqual,
737 AbsoluteAddress(cx->addressOfJitStackLimit()),
738 &skipCall);
741 prepareVMCall();
742 masm.loadBaselineFramePtr(BaselineFrameReg, R1.scratchReg());
743 pushArg(R1.scratchReg());
745 const CallVMPhase phase = CallVMPhase::BeforePushingLocals;
746 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::StackCheck;
748 using Fn = bool (*)(JSContext*, BaselineFrame*);
749 if (!callVM<Fn, CheckOverRecursedBaseline>(kind, phase)) {
750 return false;
753 masm.bind(&skipCall);
754 return true;
757 static void EmitCallFrameIsDebuggeeCheck(MacroAssembler& masm) {
758 masm.setupUnalignedABICall(R0.scratchReg());
759 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
760 masm.passABIArg(R0.scratchReg());
761 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::FrameIsDebuggeeCheck));
764 template <>
765 bool BaselineCompilerCodeGen::emitIsDebuggeeCheck() {
766 if (handler.compileDebugInstrumentation()) {
767 EmitCallFrameIsDebuggeeCheck(masm);
769 return true;
772 template <>
773 bool BaselineInterpreterCodeGen::emitIsDebuggeeCheck() {
774 // Use a toggled jump to call FrameIsDebuggeeCheck only if the debugger is
775 // enabled.
777 // TODO(bug 1522394): consider having a cx->realm->isDebuggee guard before the
778 // call. Consider moving the callWithABI out-of-line.
780 Label skipCheck;
781 CodeOffset toggleOffset = masm.toggledJump(&skipCheck);
783 saveInterpreterPCReg();
784 EmitCallFrameIsDebuggeeCheck(masm);
785 restoreInterpreterPCReg();
787 masm.bind(&skipCheck);
788 return handler.addDebugInstrumentationOffset(cx, toggleOffset);
791 static void MaybeIncrementCodeCoverageCounter(MacroAssembler& masm,
792 JSScript* script,
793 jsbytecode* pc) {
794 if (!script->hasScriptCounts()) {
795 return;
797 PCCounts* counts = script->maybeGetPCCounts(pc);
798 uint64_t* counterAddr = &counts->numExec();
799 masm.inc64(AbsoluteAddress(counterAddr));
802 template <>
803 bool BaselineCompilerCodeGen::emitHandleCodeCoverageAtPrologue() {
804 // If the main instruction is not a jump target, then we emit the
805 // corresponding code coverage counter.
806 JSScript* script = handler.script();
807 jsbytecode* main = script->main();
808 if (!BytecodeIsJumpTarget(JSOp(*main))) {
809 MaybeIncrementCodeCoverageCounter(masm, script, main);
811 return true;
814 template <>
815 bool BaselineInterpreterCodeGen::emitHandleCodeCoverageAtPrologue() {
816 Label skipCoverage;
817 CodeOffset toggleOffset = masm.toggledJump(&skipCoverage);
818 masm.call(handler.codeCoverageAtPrologueLabel());
819 masm.bind(&skipCoverage);
820 return handler.codeCoverageOffsets().append(toggleOffset.offset());
823 template <>
824 void BaselineCompilerCodeGen::subtractScriptSlotsSize(Register reg,
825 Register scratch) {
826 uint32_t slotsSize = handler.script()->nslots() * sizeof(Value);
827 masm.subPtr(Imm32(slotsSize), reg);
830 template <>
831 void BaselineInterpreterCodeGen::subtractScriptSlotsSize(Register reg,
832 Register scratch) {
833 // reg = reg - script->nslots() * sizeof(Value)
834 MOZ_ASSERT(reg != scratch);
835 loadScript(scratch);
836 masm.loadPtr(Address(scratch, JSScript::offsetOfSharedData()), scratch);
837 masm.loadPtr(Address(scratch, RuntimeScriptData::offsetOfISD()), scratch);
838 masm.load32(Address(scratch, ImmutableScriptData::offsetOfNslots()), scratch);
839 static_assert(sizeof(Value) == 8,
840 "shift by 3 below assumes Value is 8 bytes");
841 masm.lshiftPtr(Imm32(3), scratch);
842 masm.subPtr(scratch, reg);
845 template <>
846 void BaselineCompilerCodeGen::loadGlobalLexicalEnvironment(Register dest) {
847 masm.movePtr(ImmGCPtr(&cx->global()->lexicalEnvironment()), dest);
850 template <>
851 void BaselineInterpreterCodeGen::loadGlobalLexicalEnvironment(Register dest) {
852 masm.loadPtr(AbsoluteAddress(cx->addressOfRealm()), dest);
853 masm.loadPtr(Address(dest, Realm::offsetOfActiveLexicalEnvironment()), dest);
856 template <>
857 void BaselineCompilerCodeGen::pushGlobalLexicalEnvironmentValue(
858 ValueOperand scratch) {
859 frame.push(ObjectValue(cx->global()->lexicalEnvironment()));
862 template <>
863 void BaselineInterpreterCodeGen::pushGlobalLexicalEnvironmentValue(
864 ValueOperand scratch) {
865 loadGlobalLexicalEnvironment(scratch.scratchReg());
866 masm.tagValue(JSVAL_TYPE_OBJECT, scratch.scratchReg(), scratch);
867 frame.push(scratch);
870 template <>
871 void BaselineCompilerCodeGen::loadGlobalThisValue(ValueOperand dest) {
872 JSObject* thisObj = cx->global()->lexicalEnvironment().thisObject();
873 masm.moveValue(ObjectValue(*thisObj), dest);
876 template <>
877 void BaselineInterpreterCodeGen::loadGlobalThisValue(ValueOperand dest) {
878 Register scratch = dest.scratchReg();
879 loadGlobalLexicalEnvironment(scratch);
880 static constexpr size_t SlotOffset =
881 LexicalEnvironmentObject::offsetOfThisValueOrScopeSlot();
882 masm.loadValue(Address(scratch, SlotOffset), dest);
885 template <>
886 void BaselineCompilerCodeGen::pushScriptArg() {
887 pushArg(ImmGCPtr(handler.script()));
890 template <>
891 void BaselineInterpreterCodeGen::pushScriptArg() {
892 pushArg(frame.addressOfInterpreterScript());
895 template <>
896 void BaselineCompilerCodeGen::pushBytecodePCArg() {
897 pushArg(ImmPtr(handler.pc()));
900 template <>
901 void BaselineInterpreterCodeGen::pushBytecodePCArg() {
902 if (HasInterpreterPCReg()) {
903 pushArg(InterpreterPCReg);
904 } else {
905 pushArg(frame.addressOfInterpreterPC());
909 static gc::Cell* GetScriptGCThing(JSScript* script, jsbytecode* pc,
910 ScriptGCThingType type) {
911 switch (type) {
912 case ScriptGCThingType::Atom:
913 return script->getAtom(pc);
914 case ScriptGCThingType::RegExp:
915 return script->getRegExp(pc);
916 case ScriptGCThingType::Function:
917 return script->getFunction(pc);
918 case ScriptGCThingType::Scope:
919 return script->getScope(pc);
920 case ScriptGCThingType::BigInt:
921 return script->getBigInt(pc);
923 MOZ_CRASH("Unexpected GCThing type");
926 template <>
927 void BaselineCompilerCodeGen::loadScriptGCThing(ScriptGCThingType type,
928 Register dest,
929 Register scratch) {
930 gc::Cell* thing = GetScriptGCThing(handler.script(), handler.pc(), type);
931 masm.movePtr(ImmGCPtr(thing), dest);
934 template <>
935 void BaselineInterpreterCodeGen::loadScriptGCThing(ScriptGCThingType type,
936 Register dest,
937 Register scratch) {
938 MOZ_ASSERT(dest != scratch);
940 // Load the index in |scratch|.
941 LoadInt32Operand(masm, scratch);
943 // Load the GCCellPtr.
944 loadScript(dest);
945 masm.loadPtr(Address(dest, JSScript::offsetOfPrivateData()), dest);
946 masm.loadPtr(BaseIndex(dest, scratch, ScalePointer,
947 PrivateScriptData::offsetOfGCThings()),
948 dest);
950 // Clear the tag bits.
951 switch (type) {
952 case ScriptGCThingType::Atom:
953 // Use xorPtr with a 32-bit immediate because it's more efficient than
954 // andPtr on 64-bit.
955 static_assert(uintptr_t(TraceKind::String) == 2,
956 "Unexpected tag bits for string GCCellPtr");
957 masm.xorPtr(Imm32(2), dest);
958 break;
959 case ScriptGCThingType::RegExp:
960 case ScriptGCThingType::Function:
961 // No-op because GCCellPtr tag bits are zero for objects.
962 static_assert(uintptr_t(TraceKind::Object) == 0,
963 "Unexpected tag bits for object GCCellPtr");
964 break;
965 case ScriptGCThingType::BigInt:
966 // Use xorPtr with a 32-bit immediate because it's more efficient than
967 // andPtr on 64-bit.
968 static_assert(uintptr_t(TraceKind::BigInt) == 1,
969 "Unexpected tag bits for BigInt GCCellPtr");
970 masm.xorPtr(Imm32(1), dest);
971 break;
972 case ScriptGCThingType::Scope:
973 // Use xorPtr with a 32-bit immediate because it's more efficient than
974 // andPtr on 64-bit.
975 static_assert(uintptr_t(TraceKind::Scope) >= JS::OutOfLineTraceKindMask,
976 "Expected Scopes to have OutOfLineTraceKindMask tag");
977 masm.xorPtr(Imm32(JS::OutOfLineTraceKindMask), dest);
978 break;
981 #ifdef DEBUG
982 // Assert low bits are not set.
983 Label ok;
984 masm.branchTestPtr(Assembler::Zero, dest, Imm32(0b111), &ok);
985 masm.assumeUnreachable("GC pointer with tag bits set");
986 masm.bind(&ok);
987 #endif
990 template <>
991 void BaselineCompilerCodeGen::pushScriptGCThingArg(ScriptGCThingType type,
992 Register scratch1,
993 Register scratch2) {
994 gc::Cell* thing = GetScriptGCThing(handler.script(), handler.pc(), type);
995 pushArg(ImmGCPtr(thing));
998 template <>
999 void BaselineInterpreterCodeGen::pushScriptGCThingArg(ScriptGCThingType type,
1000 Register scratch1,
1001 Register scratch2) {
1002 loadScriptGCThing(type, scratch1, scratch2);
1003 pushArg(scratch1);
1006 template <typename Handler>
1007 void BaselineCodeGen<Handler>::pushScriptNameArg(Register scratch1,
1008 Register scratch2) {
1009 pushScriptGCThingArg(ScriptGCThingType::Atom, scratch1, scratch2);
1012 template <>
1013 void BaselineCompilerCodeGen::pushUint8BytecodeOperandArg(Register) {
1014 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler.pc())) == JOF_UINT8);
1015 pushArg(Imm32(GET_UINT8(handler.pc())));
1018 template <>
1019 void BaselineInterpreterCodeGen::pushUint8BytecodeOperandArg(Register scratch) {
1020 LoadUint8Operand(masm, scratch);
1021 pushArg(scratch);
1024 template <>
1025 void BaselineCompilerCodeGen::pushUint16BytecodeOperandArg(Register) {
1026 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler.pc())) == JOF_UINT16);
1027 pushArg(Imm32(GET_UINT16(handler.pc())));
1030 template <>
1031 void BaselineInterpreterCodeGen::pushUint16BytecodeOperandArg(
1032 Register scratch) {
1033 LoadUint16Operand(masm, scratch);
1034 pushArg(scratch);
1037 template <>
1038 void BaselineCompilerCodeGen::loadInt32LengthBytecodeOperand(Register dest) {
1039 uint32_t length = GET_UINT32(handler.pc());
1040 MOZ_ASSERT(length <= INT32_MAX,
1041 "the bytecode emitter must fail to compile code that would "
1042 "produce a length exceeding int32_t range");
1043 masm.move32(Imm32(AssertedCast<int32_t>(length)), dest);
1046 template <>
1047 void BaselineInterpreterCodeGen::loadInt32LengthBytecodeOperand(Register dest) {
1048 LoadInt32Operand(masm, dest);
1051 template <>
1052 void BaselineCompilerCodeGen::loadInt32IndexBytecodeOperand(ValueOperand dest) {
1053 uint32_t index = GET_UINT32(handler.pc());
1054 MOZ_ASSERT(index <= INT32_MAX,
1055 "the bytecode emitter must fail to compile code that would "
1056 "produce an index exceeding int32_t range");
1057 masm.moveValue(Int32Value(AssertedCast<int32_t>(index)), dest);
1060 template <>
1061 void BaselineInterpreterCodeGen::loadInt32IndexBytecodeOperand(
1062 ValueOperand dest) {
1063 Register scratch = dest.scratchReg();
1064 LoadInt32Operand(masm, scratch);
1065 masm.tagValue(JSVAL_TYPE_INT32, scratch, dest);
1068 template <typename Handler>
1069 bool BaselineCodeGen<Handler>::emitDebugPrologue() {
1070 auto ifDebuggee = [this]() {
1071 // Load pointer to BaselineFrame in R0.
1072 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1074 prepareVMCall();
1075 pushArg(R0.scratchReg());
1077 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugPrologue;
1079 using Fn = bool (*)(JSContext*, BaselineFrame*);
1080 if (!callVM<Fn, jit::DebugPrologue>(kind)) {
1081 return false;
1084 return true;
1086 return emitDebugInstrumentation(ifDebuggee);
1089 template <>
1090 void BaselineCompilerCodeGen::emitInitFrameFields(Register nonFunctionEnv) {
1091 Register scratch = R0.scratchReg();
1092 Register scratch2 = R2.scratchReg();
1093 MOZ_ASSERT(nonFunctionEnv != scratch && nonFunctionEnv != scratch2);
1095 masm.store32(Imm32(0), frame.addressOfFlags());
1096 if (handler.function()) {
1097 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), scratch);
1098 masm.loadPtr(Address(scratch, JSFunction::offsetOfEnvironment()), scratch);
1099 masm.storePtr(scratch, frame.addressOfEnvironmentChain());
1100 } else {
1101 masm.storePtr(nonFunctionEnv, frame.addressOfEnvironmentChain());
1104 if (!JitOptions.warpBuilder) {
1105 // Trial inlining only supported in Warp.
1106 return;
1109 // If cx->inlinedICScript contains an inlined ICScript (passed from
1110 // the caller), take that ICScript and store it in the frame, then
1111 // overwrite cx->inlinedICScript with nullptr.
1112 Label notInlined, done;
1113 masm.movePtr(ImmPtr(cx->addressOfInlinedICScript()), scratch);
1114 Address inlinedAddr(scratch, 0);
1115 masm.branchPtr(Assembler::Equal, inlinedAddr, ImmWord(0), &notInlined);
1116 masm.loadPtr(inlinedAddr, scratch2);
1117 masm.storePtr(scratch2, frame.addressOfICScript());
1118 masm.storePtr(ImmPtr(nullptr), inlinedAddr);
1119 masm.jump(&done);
1121 // Otherwise, store this script's default ICSCript in the frame.
1122 masm.bind(&notInlined);
1123 masm.storePtr(ImmPtr(handler.script()->jitScript()->icScript()),
1124 frame.addressOfICScript());
1125 masm.bind(&done);
1128 template <>
1129 void BaselineInterpreterCodeGen::emitInitFrameFields(Register nonFunctionEnv) {
1130 MOZ_ASSERT(nonFunctionEnv == R1.scratchReg(),
1131 "Don't clobber nonFunctionEnv below");
1133 // If we have a dedicated PC register we use it as scratch1 to avoid a
1134 // register move below.
1135 Register scratch1 =
1136 HasInterpreterPCReg() ? InterpreterPCReg : R0.scratchReg();
1137 Register scratch2 = R2.scratchReg();
1139 masm.store32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER),
1140 frame.addressOfFlags());
1142 // Initialize interpreterScript.
1143 Label notFunction, done;
1144 masm.loadPtr(frame.addressOfCalleeToken(), scratch1);
1145 masm.branchTestPtr(Assembler::NonZero, scratch1, Imm32(CalleeTokenScriptBit),
1146 &notFunction);
1148 // CalleeToken_Function or CalleeToken_FunctionConstructing.
1149 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
1150 masm.loadPtr(Address(scratch1, JSFunction::offsetOfEnvironment()),
1151 scratch2);
1152 masm.storePtr(scratch2, frame.addressOfEnvironmentChain());
1153 masm.loadPtr(Address(scratch1, JSFunction::offsetOfScript()), scratch1);
1154 masm.jump(&done);
1156 masm.bind(&notFunction);
1158 // CalleeToken_Script.
1159 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
1160 masm.storePtr(nonFunctionEnv, frame.addressOfEnvironmentChain());
1162 masm.bind(&done);
1163 masm.storePtr(scratch1, frame.addressOfInterpreterScript());
1165 if (JitOptions.warpBuilder) {
1166 // Initialize icScript and interpreterICEntry
1167 masm.loadJitScript(scratch1, scratch2);
1168 masm.computeEffectiveAddress(
1169 Address(scratch2, JitScript::offsetOfICScript()), scratch2);
1170 masm.storePtr(scratch2, frame.addressOfICScript());
1171 masm.computeEffectiveAddress(
1172 Address(scratch2, ICScript::offsetOfICEntries()), scratch2);
1173 masm.storePtr(scratch2, frame.addressOfInterpreterICEntry());
1174 } else {
1175 // Initialize interpreterICEntry
1176 masm.loadJitScript(scratch1, scratch2);
1177 masm.computeEffectiveAddress(
1178 Address(scratch2, JitScript::offsetOfICEntries()), scratch2);
1179 masm.storePtr(scratch2, frame.addressOfInterpreterICEntry());
1182 // Initialize interpreter pc.
1183 masm.loadPtr(Address(scratch1, JSScript::offsetOfSharedData()), scratch1);
1184 masm.loadPtr(Address(scratch1, RuntimeScriptData::offsetOfISD()), scratch1);
1185 masm.addPtr(Imm32(ImmutableScriptData::offsetOfCode()), scratch1);
1187 if (HasInterpreterPCReg()) {
1188 MOZ_ASSERT(scratch1 == InterpreterPCReg,
1189 "pc must be stored in the pc register");
1190 } else {
1191 masm.storePtr(scratch1, frame.addressOfInterpreterPC());
1195 template <>
1196 template <typename F>
1197 bool BaselineCompilerCodeGen::initEnvironmentChainHelper(
1198 const F& initFunctionEnv) {
1199 if (handler.function()) {
1200 return initFunctionEnv();
1202 return true;
1205 template <>
1206 template <typename F>
1207 bool BaselineInterpreterCodeGen::initEnvironmentChainHelper(
1208 const F& initFunctionEnv) {
1209 // For function scripts use the code emitted by initFunctionEnv. For other
1210 // scripts this is a no-op.
1212 Label done;
1213 masm.branchTestPtr(Assembler::NonZero, frame.addressOfCalleeToken(),
1214 Imm32(CalleeTokenScriptBit), &done);
1216 if (!initFunctionEnv()) {
1217 return false;
1221 masm.bind(&done);
1222 return true;
1225 template <typename Handler>
1226 bool BaselineCodeGen<Handler>::initEnvironmentChain() {
1227 auto initFunctionEnv = [this]() {
1228 auto initEnv = [this]() {
1229 // Call into the VM to create the proper environment objects.
1230 prepareVMCall();
1232 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1233 pushArg(R0.scratchReg());
1235 const CallVMPhase phase = CallVMPhase::BeforePushingLocals;
1237 using Fn = bool (*)(JSContext*, BaselineFrame*);
1238 return callVMNonOp<Fn, jit::InitFunctionEnvironmentObjects>(phase);
1240 return emitTestScriptFlag(
1241 JSScript::ImmutableFlags::NeedsFunctionEnvironmentObjects, true,
1242 initEnv, R2.scratchReg());
1245 return initEnvironmentChainHelper(initFunctionEnv);
1248 template <typename Handler>
1249 bool BaselineCodeGen<Handler>::emitInterruptCheck() {
1250 frame.syncStack(0);
1252 Label done;
1253 masm.branch32(Assembler::Equal, AbsoluteAddress(cx->addressOfInterruptBits()),
1254 Imm32(0), &done);
1256 prepareVMCall();
1258 // Use a custom RetAddrEntry::Kind so DebugModeOSR can distinguish this call
1259 // from other callVMs that might happen at this pc.
1260 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::InterruptCheck;
1262 using Fn = bool (*)(JSContext*);
1263 if (!callVM<Fn, InterruptCheck>(kind)) {
1264 return false;
1267 masm.bind(&done);
1268 return true;
1271 template <>
1272 bool BaselineCompilerCodeGen::emitWarmUpCounterIncrement() {
1273 frame.assertSyncedStack();
1275 // Record native code offset for OSR from Baseline Interpreter into Baseline
1276 // JIT code. This is right before the warm-up check in the Baseline JIT code,
1277 // to make sure we can immediately enter Ion if the script is warm enough or
1278 // if --ion-eager is used.
1279 JSScript* script = handler.script();
1280 jsbytecode* pc = handler.pc();
1281 if (JSOp(*pc) == JSOp::LoopHead) {
1282 uint32_t pcOffset = script->pcToOffset(pc);
1283 uint32_t nativeOffset = masm.currentOffset();
1284 if (!handler.osrEntries().emplaceBack(pcOffset, nativeOffset)) {
1285 ReportOutOfMemory(cx);
1286 return false;
1290 // Emit no warm-up counter increments if Ion is not enabled or if the script
1291 // will never be Ion-compileable.
1292 if (!handler.maybeIonCompileable()) {
1293 return true;
1296 Register scriptReg = R2.scratchReg();
1297 Register countReg = R0.scratchReg();
1299 uint32_t warmUpCountOffset;
1300 if (JitOptions.warpBuilder) {
1301 // Load the ICScript* in scriptReg.
1302 masm.loadPtr(frame.addressOfICScript(), scriptReg);
1303 warmUpCountOffset = ICScript::offsetOfWarmUpCount();
1304 } else {
1305 // Load the JitScript* in scriptReg.
1306 masm.movePtr(ImmPtr(script->jitScript()), scriptReg);
1307 warmUpCountOffset = JitScript::offsetOfWarmUpCount();
1310 // Bump warm-up counter.
1311 Address warmUpCounterAddr(scriptReg, warmUpCountOffset);
1312 masm.load32(warmUpCounterAddr, countReg);
1313 masm.add32(Imm32(1), countReg);
1314 masm.store32(countReg, warmUpCounterAddr);
1316 if (JitOptions.warpBuilder && !JitOptions.disableInlining) {
1317 // Consider trial inlining.
1318 // Note: unlike other warmup thresholds, where we try to enter a
1319 // higher tier whenever we are higher than a given warmup count,
1320 // trial inlining triggers once when reaching the threshold.
1321 Label noTrialInlining;
1322 masm.branch32(Assembler::NotEqual, countReg,
1323 Imm32(JitOptions.trialInliningWarmUpThreshold),
1324 &noTrialInlining);
1325 prepareVMCall();
1327 masm.PushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1329 using Fn = bool (*)(JSContext*, BaselineFrame*);
1330 if (!callVMNonOp<Fn, DoTrialInlining>()) {
1331 return false;
1333 masm.bind(&noTrialInlining);
1336 if (JSOp(*pc) == JSOp::LoopHead) {
1337 // If this is a loop where we can't OSR (for example because it's inside a
1338 // catch or finally block), increment the warmup counter but don't attempt
1339 // OSR (Ion/Warp only compiles the try block).
1340 if (!handler.analysis().info(pc).loopHeadCanOsr) {
1341 return true;
1345 Label done;
1347 const OptimizationInfo* info =
1348 IonOptimizations.get(IonOptimizations.firstLevel());
1349 uint32_t warmUpThreshold = info->compilerWarmUpThreshold(script, pc);
1350 masm.branch32(Assembler::LessThan, countReg, Imm32(warmUpThreshold), &done);
1352 if (JitOptions.warpBuilder) {
1353 // Load the JitScript* in scriptReg.
1354 masm.movePtr(ImmPtr(script->jitScript()), scriptReg);
1357 // Do nothing if Ion is already compiling this script off-thread or if Ion has
1358 // been disabled for this script.
1359 masm.loadPtr(Address(scriptReg, JitScript::offsetOfIonScript()), scriptReg);
1360 masm.branchPtr(Assembler::Equal, scriptReg, ImmPtr(IonCompilingScriptPtr),
1361 &done);
1362 masm.branchPtr(Assembler::Equal, scriptReg, ImmPtr(IonDisabledScriptPtr),
1363 &done);
1365 // Try to compile and/or finish a compilation.
1366 if (JSOp(*pc) == JSOp::LoopHead) {
1367 // Try to OSR into Ion.
1368 computeFrameSize(R0.scratchReg());
1370 prepareVMCall();
1372 pushBytecodePCArg();
1373 pushArg(R0.scratchReg());
1374 masm.PushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1376 using Fn = bool (*)(JSContext*, BaselineFrame*, uint32_t, jsbytecode*,
1377 IonOsrTempData**);
1378 if (!callVM<Fn, IonCompileScriptForBaselineOSR>()) {
1379 return false;
1382 // The return register holds the IonOsrTempData*. Perform OSR if it's not
1383 // nullptr.
1384 static_assert(ReturnReg != OsrFrameReg,
1385 "Code below depends on osrDataReg != OsrFrameReg");
1386 Register osrDataReg = ReturnReg;
1387 masm.branchTestPtr(Assembler::Zero, osrDataReg, osrDataReg, &done);
1389 // Success! Switch from Baseline JIT code to Ion JIT code.
1391 // At this point, stack looks like:
1393 // +-> [...Calling-Frame...]
1394 // | [...Actual-Args/ThisV/ArgCount/Callee...]
1395 // | [Descriptor]
1396 // | [Return-Addr]
1397 // +---[Saved-FramePtr]
1398 // [...Baseline-Frame...]
1400 // Restore the stack pointer so that the return address is on top of
1401 // the stack.
1402 masm.addToStackPtr(Imm32(frame.frameSize()));
1404 #ifdef DEBUG
1405 // Get a scratch register that's not osrDataReg or OsrFrameReg.
1406 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
1407 regs.take(BaselineFrameReg);
1408 regs.take(osrDataReg);
1409 regs.take(OsrFrameReg);
1411 Register scratchReg = regs.takeAny();
1413 // If profiler instrumentation is on, ensure that lastProfilingFrame is
1414 // the frame currently being OSR-ed
1416 Label checkOk;
1417 AbsoluteAddress addressOfEnabled(
1418 cx->runtime()->geckoProfiler().addressOfEnabled());
1419 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &checkOk);
1420 masm.loadPtr(AbsoluteAddress((void*)&cx->jitActivation), scratchReg);
1421 masm.loadPtr(
1422 Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()),
1423 scratchReg);
1425 // It may be the case that we entered the baseline frame with
1426 // profiling turned off on, then in a call within a loop (i.e. a
1427 // callee frame), turn on profiling, then return to this frame,
1428 // and then OSR with profiling turned on. In this case, allow for
1429 // lastProfilingFrame to be null.
1430 masm.branchPtr(Assembler::Equal, scratchReg, ImmWord(0), &checkOk);
1432 masm.branchStackPtr(Assembler::Equal, scratchReg, &checkOk);
1433 masm.assumeUnreachable("Baseline OSR lastProfilingFrame mismatch.");
1434 masm.bind(&checkOk);
1436 #endif
1438 // Jump into Ion.
1439 masm.loadPtr(Address(osrDataReg, IonOsrTempData::offsetOfBaselineFrame()),
1440 OsrFrameReg);
1441 masm.jump(Address(osrDataReg, IonOsrTempData::offsetOfJitCode()));
1442 } else {
1443 prepareVMCall();
1445 masm.PushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1447 using Fn = bool (*)(JSContext*, BaselineFrame*);
1448 if (!callVMNonOp<Fn, IonCompileScriptForBaselineAtEntry>()) {
1449 return false;
1453 masm.bind(&done);
1454 return true;
1457 template <>
1458 bool BaselineInterpreterCodeGen::emitWarmUpCounterIncrement() {
1459 Register scriptReg = R2.scratchReg();
1460 Register countReg = R0.scratchReg();
1462 // Load the JitScript* in scriptReg.
1463 loadScript(scriptReg);
1464 masm.loadJitScript(scriptReg, scriptReg);
1466 // Bump warm-up counter.
1467 Address warmUpCounterAddr(scriptReg, JitScript::offsetOfWarmUpCount());
1468 masm.load32(warmUpCounterAddr, countReg);
1469 masm.add32(Imm32(1), countReg);
1470 masm.store32(countReg, warmUpCounterAddr);
1472 // If the script is warm enough for Baseline compilation, call into the VM to
1473 // compile it.
1474 Label done;
1475 masm.branch32(Assembler::BelowOrEqual, countReg,
1476 Imm32(JitOptions.baselineJitWarmUpThreshold), &done);
1477 masm.branchPtr(Assembler::Equal,
1478 Address(scriptReg, JitScript::offsetOfBaselineScript()),
1479 ImmPtr(BaselineDisabledScriptPtr), &done);
1481 prepareVMCall();
1483 masm.PushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1485 using Fn = bool (*)(JSContext*, BaselineFrame*, uint8_t**);
1486 if (!callVM<Fn, BaselineCompileFromBaselineInterpreter>()) {
1487 return false;
1490 // If the function returned nullptr we either skipped compilation or were
1491 // unable to compile the script. Continue running in the interpreter.
1492 masm.branchTestPtr(Assembler::Zero, ReturnReg, ReturnReg, &done);
1494 // Success! Switch from interpreter to JIT code by jumping to the
1495 // corresponding code in the BaselineScript.
1497 // This works because BaselineCompiler uses the same frame layout (stack is
1498 // synced at OSR points) and BaselineCompileFromBaselineInterpreter has
1499 // already cleared the RUNNING_IN_INTERPRETER flag for us.
1500 // See BaselineFrame::prepareForBaselineInterpreterToJitOSR.
1501 masm.jump(ReturnReg);
1504 masm.bind(&done);
1505 return true;
1508 template <>
1509 bool BaselineCompilerCodeGen::emitArgumentTypeChecks() {
1510 if (!IsTypeInferenceEnabled()) {
1511 return true;
1514 if (!handler.function()) {
1515 return true;
1518 frame.pushThis();
1519 frame.popRegsAndSync(1);
1521 if (!emitNextIC()) {
1522 return false;
1525 size_t nargs = handler.function()->nargs();
1527 for (size_t i = 0; i < nargs; i++) {
1528 frame.pushArg(i);
1529 frame.popRegsAndSync(1);
1531 if (!emitNextIC()) {
1532 return false;
1536 return true;
1539 template <>
1540 bool BaselineInterpreterCodeGen::emitArgumentTypeChecks() {
1541 if (!IsTypeInferenceEnabled()) {
1542 return true;
1545 Register scratch1 = R1.scratchReg();
1547 // If the script is not a function, we're done.
1548 Label done;
1549 masm.loadPtr(frame.addressOfCalleeToken(), scratch1);
1550 masm.branchTestPtr(Assembler::NonZero, scratch1, Imm32(CalleeTokenScriptBit),
1551 &done);
1553 // CalleeToken_Function or CalleeToken_FunctionConstructing.
1554 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
1556 // The frame's scratch slot is used to store two 32-bit values: nargs (lower
1557 // half) and the argument index (upper half).
1558 masm.load16ZeroExtend(Address(scratch1, JSFunction::offsetOfNargs()),
1559 scratch1);
1560 masm.store32(scratch1, frame.addressOfScratchValueLow32());
1562 // Type check |this|.
1563 masm.loadValue(frame.addressOfThis(), R0);
1564 if (!emitNextIC()) {
1565 return false;
1567 frame.bumpInterpreterICEntry();
1569 // Type check arguments. Scratch1 holds the next argument's index.
1570 masm.move32(Imm32(0), scratch1);
1572 // Bounds check.
1573 Label top;
1574 masm.bind(&top);
1575 masm.branch32(Assembler::Equal, frame.addressOfScratchValueLow32(), scratch1,
1576 &done);
1578 // Load the argument, increment argument index and store the index in the
1579 // scratch slot.
1580 BaseValueIndex addr(BaselineFrameReg, scratch1,
1581 BaselineFrame::offsetOfArg(0));
1582 masm.loadValue(addr, R0);
1583 masm.add32(Imm32(1), scratch1);
1584 masm.store32(scratch1, frame.addressOfScratchValueHigh32());
1586 // Type check the argument.
1587 if (!emitNextIC()) {
1588 return false;
1590 frame.bumpInterpreterICEntry();
1592 // Restore argument index.
1593 masm.load32(frame.addressOfScratchValueHigh32(), scratch1);
1594 masm.jump(&top);
1597 masm.bind(&done);
1598 return true;
1601 bool BaselineCompiler::emitDebugTrap() {
1602 MOZ_ASSERT(compileDebugInstrumentation());
1603 MOZ_ASSERT(frame.numUnsyncedSlots() == 0);
1605 JSScript* script = handler.script();
1606 bool enabled = DebugAPI::stepModeEnabled(script) ||
1607 DebugAPI::hasBreakpointsAt(script, handler.pc());
1609 // Emit patchable call to debug trap handler.
1610 JitCode* handlerCode = cx->runtime()->jitRuntime()->debugTrapHandler(
1611 cx, DebugTrapHandlerKind::Compiler);
1612 if (!handlerCode) {
1613 return false;
1616 CodeOffset nativeOffset = masm.toggledCall(handlerCode, enabled);
1618 uint32_t pcOffset = script->pcToOffset(handler.pc());
1619 if (!debugTrapEntries_.emplaceBack(pcOffset, nativeOffset.offset())) {
1620 ReportOutOfMemory(cx);
1621 return false;
1624 // Add a RetAddrEntry for the return offset -> pc mapping.
1625 return handler.recordCallRetAddr(cx, RetAddrEntry::Kind::DebugTrap,
1626 masm.currentOffset());
1629 #ifdef JS_TRACE_LOGGING
1630 template <>
1631 bool BaselineCompilerCodeGen::emitTraceLoggerEnter() {
1632 AllocatableRegisterSet regs(RegisterSet::Volatile());
1633 Register loggerReg = regs.takeAnyGeneral();
1634 Register scriptReg = regs.takeAnyGeneral();
1636 Label noTraceLogger;
1637 if (!traceLoggerToggleOffsets_.append(masm.toggledJump(&noTraceLogger))) {
1638 return false;
1641 masm.Push(loggerReg);
1642 masm.Push(scriptReg);
1644 masm.loadTraceLogger(loggerReg);
1646 // Script start.
1647 masm.movePtr(ImmPtr(handler.script()->jitScript()), scriptReg);
1648 masm.loadPtr(Address(scriptReg, JitScript::offsetOfBaselineScript()),
1649 scriptReg);
1650 Address scriptEvent(scriptReg,
1651 BaselineScript::offsetOfTraceLoggerScriptEvent());
1652 masm.computeEffectiveAddress(scriptEvent, scriptReg);
1653 masm.tracelogStartEvent(loggerReg, scriptReg);
1655 // Engine start.
1656 masm.tracelogStartId(loggerReg, TraceLogger_Baseline, /* force = */ true);
1658 masm.Pop(scriptReg);
1659 masm.Pop(loggerReg);
1661 masm.bind(&noTraceLogger);
1663 return true;
1666 template <>
1667 bool BaselineInterpreterCodeGen::emitTraceLoggerEnter() {
1668 if (JS::TraceLoggerSupported()) {
1669 MOZ_CRASH("NYI: interpreter emitTraceLoggerEnter");
1671 return true;
1674 template <typename Handler>
1675 bool BaselineCodeGen<Handler>::emitTraceLoggerExit() {
1676 AllocatableRegisterSet regs(RegisterSet::Volatile());
1677 Register loggerReg = regs.takeAnyGeneral();
1679 Label noTraceLogger;
1680 if (!traceLoggerToggleOffsets_.append(masm.toggledJump(&noTraceLogger))) {
1681 return false;
1684 masm.Push(loggerReg);
1685 masm.loadTraceLogger(loggerReg);
1687 masm.tracelogStopId(loggerReg, TraceLogger_Baseline, /* force = */ true);
1688 masm.tracelogStopId(loggerReg, TraceLogger_Scripts, /* force = */ true);
1690 masm.Pop(loggerReg);
1692 masm.bind(&noTraceLogger);
1694 return true;
1697 template <typename Handler>
1698 bool BaselineCodeGen<Handler>::emitTraceLoggerResume(
1699 Register baselineScript, AllocatableGeneralRegisterSet& regs) {
1700 Register scriptId = regs.takeAny();
1701 Register loggerReg = regs.takeAny();
1703 Label noTraceLogger;
1704 if (!traceLoggerToggleOffsets_.append(masm.toggledJump(&noTraceLogger))) {
1705 return false;
1708 masm.loadTraceLogger(loggerReg);
1710 Address scriptEvent(baselineScript,
1711 BaselineScript::offsetOfTraceLoggerScriptEvent());
1712 masm.computeEffectiveAddress(scriptEvent, scriptId);
1713 masm.tracelogStartEvent(loggerReg, scriptId);
1714 masm.tracelogStartId(loggerReg, TraceLogger_Baseline, /* force = */ true);
1716 regs.add(loggerReg);
1717 regs.add(scriptId);
1719 masm.bind(&noTraceLogger);
1721 return true;
1723 #endif
1725 template <typename Handler>
1726 void BaselineCodeGen<Handler>::emitProfilerEnterFrame() {
1727 // Store stack position to lastProfilingFrame variable, guarded by a toggled
1728 // jump. Starts off initially disabled.
1729 Label noInstrument;
1730 CodeOffset toggleOffset = masm.toggledJump(&noInstrument);
1731 masm.profilerEnterFrame(masm.getStackPointer(), R0.scratchReg());
1732 masm.bind(&noInstrument);
1734 // Store the start offset in the appropriate location.
1735 MOZ_ASSERT(!profilerEnterFrameToggleOffset_.bound());
1736 profilerEnterFrameToggleOffset_ = toggleOffset;
1739 template <typename Handler>
1740 void BaselineCodeGen<Handler>::emitProfilerExitFrame() {
1741 // Store previous frame to lastProfilingFrame variable, guarded by a toggled
1742 // jump. Starts off initially disabled.
1743 Label noInstrument;
1744 CodeOffset toggleOffset = masm.toggledJump(&noInstrument);
1745 masm.profilerExitFrame();
1746 masm.bind(&noInstrument);
1748 // Store the start offset in the appropriate location.
1749 MOZ_ASSERT(!profilerExitFrameToggleOffset_.bound());
1750 profilerExitFrameToggleOffset_ = toggleOffset;
1753 template <typename Handler>
1754 bool BaselineCodeGen<Handler>::emit_Nop() {
1755 return true;
1758 template <typename Handler>
1759 bool BaselineCodeGen<Handler>::emit_IterNext() {
1760 return true;
1763 template <typename Handler>
1764 bool BaselineCodeGen<Handler>::emit_NopDestructuring() {
1765 return true;
1768 template <typename Handler>
1769 bool BaselineCodeGen<Handler>::emit_TryDestructuring() {
1770 return true;
1773 template <typename Handler>
1774 bool BaselineCodeGen<Handler>::emit_Pop() {
1775 frame.pop();
1776 return true;
1779 template <>
1780 bool BaselineCompilerCodeGen::emit_PopN() {
1781 frame.popn(GET_UINT16(handler.pc()));
1782 return true;
1785 template <>
1786 bool BaselineInterpreterCodeGen::emit_PopN() {
1787 LoadUint16Operand(masm, R0.scratchReg());
1788 frame.popn(R0.scratchReg());
1789 return true;
1792 template <>
1793 bool BaselineCompilerCodeGen::emit_DupAt() {
1794 frame.syncStack(0);
1796 // DupAt takes a value on the stack and re-pushes it on top. It's like
1797 // GetLocal but it addresses from the top of the stack instead of from the
1798 // stack frame.
1800 int depth = -(GET_UINT24(handler.pc()) + 1);
1801 masm.loadValue(frame.addressOfStackValue(depth), R0);
1802 frame.push(R0);
1803 return true;
1806 template <>
1807 bool BaselineInterpreterCodeGen::emit_DupAt() {
1808 LoadUint24Operand(masm, 0, R0.scratchReg());
1809 masm.loadValue(frame.addressOfStackValue(R0.scratchReg()), R0);
1810 frame.push(R0);
1811 return true;
1814 template <typename Handler>
1815 bool BaselineCodeGen<Handler>::emit_Dup() {
1816 // Keep top stack value in R0, sync the rest so that we can use R1. We use
1817 // separate registers because every register can be used by at most one
1818 // StackValue.
1819 frame.popRegsAndSync(1);
1820 masm.moveValue(R0, R1);
1822 // inc/dec ops use Dup followed by Inc/Dec. Push R0 last to avoid a move.
1823 frame.push(R1);
1824 frame.push(R0);
1825 return true;
1828 template <typename Handler>
1829 bool BaselineCodeGen<Handler>::emit_Dup2() {
1830 frame.syncStack(0);
1832 masm.loadValue(frame.addressOfStackValue(-2), R0);
1833 masm.loadValue(frame.addressOfStackValue(-1), R1);
1835 frame.push(R0);
1836 frame.push(R1);
1837 return true;
1840 template <typename Handler>
1841 bool BaselineCodeGen<Handler>::emit_Swap() {
1842 // Keep top stack values in R0 and R1.
1843 frame.popRegsAndSync(2);
1845 frame.push(R1);
1846 frame.push(R0);
1847 return true;
1850 template <>
1851 bool BaselineCompilerCodeGen::emit_Pick() {
1852 frame.syncStack(0);
1854 // Pick takes a value on the stack and moves it to the top.
1855 // For instance, pick 2:
1856 // before: A B C D E
1857 // after : A B D E C
1859 // First, move value at -(amount + 1) into R0.
1860 int32_t depth = -(GET_INT8(handler.pc()) + 1);
1861 masm.loadValue(frame.addressOfStackValue(depth), R0);
1863 // Move the other values down.
1864 depth++;
1865 for (; depth < 0; depth++) {
1866 Address source = frame.addressOfStackValue(depth);
1867 Address dest = frame.addressOfStackValue(depth - 1);
1868 masm.loadValue(source, R1);
1869 masm.storeValue(R1, dest);
1872 // Push R0.
1873 frame.pop();
1874 frame.push(R0);
1875 return true;
1878 template <>
1879 bool BaselineInterpreterCodeGen::emit_Pick() {
1880 // First, move the value to move up into R0.
1881 Register scratch = R2.scratchReg();
1882 LoadUint8Operand(masm, scratch);
1883 masm.loadValue(frame.addressOfStackValue(scratch), R0);
1885 // Move the other values down.
1886 Label top, done;
1887 masm.bind(&top);
1888 masm.branchSub32(Assembler::Signed, Imm32(1), scratch, &done);
1890 masm.loadValue(frame.addressOfStackValue(scratch), R1);
1891 masm.storeValue(R1, frame.addressOfStackValue(scratch, sizeof(Value)));
1892 masm.jump(&top);
1895 masm.bind(&done);
1897 // Replace value on top of the stack with R0.
1898 masm.storeValue(R0, frame.addressOfStackValue(-1));
1899 return true;
1902 template <>
1903 bool BaselineCompilerCodeGen::emit_Unpick() {
1904 frame.syncStack(0);
1906 // Pick takes the top of the stack value and moves it under the nth value.
1907 // For instance, unpick 2:
1908 // before: A B C D E
1909 // after : A B E C D
1911 // First, move value at -1 into R0.
1912 masm.loadValue(frame.addressOfStackValue(-1), R0);
1914 MOZ_ASSERT(GET_INT8(handler.pc()) > 0,
1915 "Interpreter code assumes JSOp::Unpick operand > 0");
1917 // Move the other values up.
1918 int32_t depth = -(GET_INT8(handler.pc()) + 1);
1919 for (int32_t i = -1; i > depth; i--) {
1920 Address source = frame.addressOfStackValue(i - 1);
1921 Address dest = frame.addressOfStackValue(i);
1922 masm.loadValue(source, R1);
1923 masm.storeValue(R1, dest);
1926 // Store R0 under the nth value.
1927 Address dest = frame.addressOfStackValue(depth);
1928 masm.storeValue(R0, dest);
1929 return true;
1932 template <>
1933 bool BaselineInterpreterCodeGen::emit_Unpick() {
1934 Register scratch = R2.scratchReg();
1935 LoadUint8Operand(masm, scratch);
1937 // Move the top value into R0.
1938 masm.loadValue(frame.addressOfStackValue(-1), R0);
1940 // Overwrite the nth stack value with R0 but first save the old value in R1.
1941 masm.loadValue(frame.addressOfStackValue(scratch), R1);
1942 masm.storeValue(R0, frame.addressOfStackValue(scratch));
1944 // Now for each slot x in [n-1, 1] do the following:
1946 // * Store the value in slot x in R0.
1947 // * Store the value in the previous slot (now in R1) in slot x.
1948 // * Move R0 to R1.
1950 #ifdef DEBUG
1951 // Assert the operand > 0 so the branchSub32 below doesn't "underflow" to
1952 // negative values.
1954 Label ok;
1955 masm.branch32(Assembler::GreaterThan, scratch, Imm32(0), &ok);
1956 masm.assumeUnreachable("JSOp::Unpick with operand <= 0?");
1957 masm.bind(&ok);
1959 #endif
1961 Label top, done;
1962 masm.bind(&top);
1963 masm.branchSub32(Assembler::Zero, Imm32(1), scratch, &done);
1965 // Overwrite stack slot x with slot x + 1, saving the old value in R1.
1966 masm.loadValue(frame.addressOfStackValue(scratch), R0);
1967 masm.storeValue(R1, frame.addressOfStackValue(scratch));
1968 masm.moveValue(R0, R1);
1969 masm.jump(&top);
1972 // Finally, replace the value on top of the stack (slot 0) with R1. This is
1973 // the value that used to be in slot 1.
1974 masm.bind(&done);
1975 masm.storeValue(R1, frame.addressOfStackValue(-1));
1976 return true;
1979 template <>
1980 void BaselineCompilerCodeGen::emitJump() {
1981 jsbytecode* pc = handler.pc();
1982 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc)));
1983 frame.assertSyncedStack();
1985 jsbytecode* target = pc + GET_JUMP_OFFSET(pc);
1986 masm.jump(handler.labelOf(target));
1989 template <>
1990 void BaselineInterpreterCodeGen::emitJump() {
1991 // We have to add the current pc's jump offset to the current pc. We can use
1992 // R0 and R1 as scratch because we jump to the "next op" label so these
1993 // registers aren't in use at this point.
1994 Register scratch1 = R0.scratchReg();
1995 Register scratch2 = R1.scratchReg();
1996 Register pc = LoadBytecodePC(masm, scratch1);
1997 LoadInt32OperandSignExtendToPtr(masm, pc, scratch2);
1998 if (HasInterpreterPCReg()) {
1999 masm.addPtr(scratch2, InterpreterPCReg);
2000 } else {
2001 masm.addPtr(pc, scratch2);
2002 masm.storePtr(scratch2, frame.addressOfInterpreterPC());
2004 masm.jump(handler.interpretOpWithPCRegLabel());
2007 template <>
2008 void BaselineCompilerCodeGen::emitTestBooleanTruthy(bool branchIfTrue,
2009 ValueOperand val) {
2010 jsbytecode* pc = handler.pc();
2011 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc)));
2012 frame.assertSyncedStack();
2014 jsbytecode* target = pc + GET_JUMP_OFFSET(pc);
2015 masm.branchTestBooleanTruthy(branchIfTrue, val, handler.labelOf(target));
2018 template <>
2019 void BaselineInterpreterCodeGen::emitTestBooleanTruthy(bool branchIfTrue,
2020 ValueOperand val) {
2021 Label done;
2022 masm.branchTestBooleanTruthy(!branchIfTrue, val, &done);
2023 emitJump();
2024 masm.bind(&done);
2027 template <>
2028 template <typename F1, typename F2>
2029 MOZ_MUST_USE bool BaselineCompilerCodeGen::emitTestScriptFlag(
2030 JSScript::ImmutableFlags flag, const F1& ifSet, const F2& ifNotSet,
2031 Register scratch) {
2032 return handler.script()->hasFlag(flag) ? ifSet() : ifNotSet();
2035 template <>
2036 template <typename F1, typename F2>
2037 MOZ_MUST_USE bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2038 JSScript::ImmutableFlags flag, const F1& ifSet, const F2& ifNotSet,
2039 Register scratch) {
2040 Label flagNotSet, done;
2041 loadScript(scratch);
2042 masm.branchTest32(Assembler::Zero,
2043 Address(scratch, JSScript::offsetOfImmutableFlags()),
2044 Imm32(uint32_t(flag)), &flagNotSet);
2046 if (!ifSet()) {
2047 return false;
2049 masm.jump(&done);
2051 masm.bind(&flagNotSet);
2053 if (!ifNotSet()) {
2054 return false;
2058 masm.bind(&done);
2059 return true;
2062 template <>
2063 template <typename F>
2064 MOZ_MUST_USE bool BaselineCompilerCodeGen::emitTestScriptFlag(
2065 JSScript::ImmutableFlags flag, bool value, const F& emit,
2066 Register scratch) {
2067 if (handler.script()->hasFlag(flag) == value) {
2068 return emit();
2070 return true;
2073 template <>
2074 template <typename F>
2075 MOZ_MUST_USE bool BaselineCompilerCodeGen::emitTestScriptFlag(
2076 JSScript::MutableFlags flag, bool value, const F& emit, Register scratch) {
2077 if (handler.script()->hasFlag(flag) == value) {
2078 return emit();
2080 return true;
2083 template <>
2084 template <typename F>
2085 MOZ_MUST_USE bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2086 JSScript::ImmutableFlags flag, bool value, const F& emit,
2087 Register scratch) {
2088 Label done;
2089 loadScript(scratch);
2090 masm.branchTest32(value ? Assembler::Zero : Assembler::NonZero,
2091 Address(scratch, JSScript::offsetOfImmutableFlags()),
2092 Imm32(uint32_t(flag)), &done);
2094 if (!emit()) {
2095 return false;
2099 masm.bind(&done);
2100 return true;
2103 template <>
2104 template <typename F>
2105 MOZ_MUST_USE bool BaselineInterpreterCodeGen::emitTestScriptFlag(
2106 JSScript::MutableFlags flag, bool value, const F& emit, Register scratch) {
2107 Label done;
2108 loadScript(scratch);
2109 masm.branchTest32(value ? Assembler::Zero : Assembler::NonZero,
2110 Address(scratch, JSScript::offsetOfMutableFlags()),
2111 Imm32(uint32_t(flag)), &done);
2113 if (!emit()) {
2114 return false;
2118 masm.bind(&done);
2119 return true;
2122 template <typename Handler>
2123 bool BaselineCodeGen<Handler>::emit_Goto() {
2124 frame.syncStack(0);
2125 emitJump();
2126 return true;
2129 template <typename Handler>
2130 bool BaselineCodeGen<Handler>::emitToBoolean() {
2131 Label skipIC;
2132 masm.branchTestBoolean(Assembler::Equal, R0, &skipIC);
2134 // Call IC
2135 if (!emitNextIC()) {
2136 return false;
2139 masm.bind(&skipIC);
2140 return true;
2143 template <typename Handler>
2144 bool BaselineCodeGen<Handler>::emitTest(bool branchIfTrue) {
2145 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN);
2147 // Keep top stack value in R0.
2148 frame.popRegsAndSync(1);
2150 if (!knownBoolean && !emitToBoolean()) {
2151 return false;
2154 // IC will leave a BooleanValue in R0, just need to branch on it.
2155 emitTestBooleanTruthy(branchIfTrue, R0);
2156 return true;
2159 template <typename Handler>
2160 bool BaselineCodeGen<Handler>::emit_IfEq() {
2161 return emitTest(false);
2164 template <typename Handler>
2165 bool BaselineCodeGen<Handler>::emit_IfNe() {
2166 return emitTest(true);
2169 template <typename Handler>
2170 bool BaselineCodeGen<Handler>::emitAndOr(bool branchIfTrue) {
2171 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN);
2173 // And and Or leave the original value on the stack.
2174 frame.syncStack(0);
2176 masm.loadValue(frame.addressOfStackValue(-1), R0);
2177 if (!knownBoolean && !emitToBoolean()) {
2178 return false;
2181 emitTestBooleanTruthy(branchIfTrue, R0);
2182 return true;
2185 template <typename Handler>
2186 bool BaselineCodeGen<Handler>::emit_And() {
2187 return emitAndOr(false);
2190 template <typename Handler>
2191 bool BaselineCodeGen<Handler>::emit_Or() {
2192 return emitAndOr(true);
2195 template <typename Handler>
2196 bool BaselineCodeGen<Handler>::emit_Coalesce() {
2197 // Coalesce leaves the original value on the stack.
2198 frame.syncStack(0);
2200 masm.loadValue(frame.addressOfStackValue(-1), R0);
2202 Label undefinedOrNull;
2204 masm.branchTestUndefined(Assembler::Equal, R0, &undefinedOrNull);
2205 masm.branchTestNull(Assembler::Equal, R0, &undefinedOrNull);
2206 emitJump();
2208 masm.bind(&undefinedOrNull);
2209 // fall through
2210 return true;
2213 template <typename Handler>
2214 bool BaselineCodeGen<Handler>::emit_Not() {
2215 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN);
2217 // Keep top stack value in R0.
2218 frame.popRegsAndSync(1);
2220 if (!knownBoolean && !emitToBoolean()) {
2221 return false;
2224 masm.notBoolean(R0);
2226 frame.push(R0, JSVAL_TYPE_BOOLEAN);
2227 return true;
2230 template <typename Handler>
2231 bool BaselineCodeGen<Handler>::emit_Pos() {
2232 return emitUnaryArith();
2235 template <typename Handler>
2236 bool BaselineCodeGen<Handler>::emit_ToNumeric() {
2237 return emitUnaryArith();
2240 template <typename Handler>
2241 bool BaselineCodeGen<Handler>::emit_LoopHead() {
2242 if (!emit_JumpTarget()) {
2243 return false;
2245 if (!emitInterruptCheck()) {
2246 return false;
2248 if (!emitWarmUpCounterIncrement()) {
2249 return false;
2251 return true;
2254 template <typename Handler>
2255 bool BaselineCodeGen<Handler>::emit_Void() {
2256 frame.pop();
2257 frame.push(UndefinedValue());
2258 return true;
2261 template <typename Handler>
2262 bool BaselineCodeGen<Handler>::emit_Undefined() {
2263 // If this ever changes, change what JSOp::GImplicitThis does too.
2264 frame.push(UndefinedValue());
2265 return true;
2268 template <typename Handler>
2269 bool BaselineCodeGen<Handler>::emit_Hole() {
2270 frame.push(MagicValue(JS_ELEMENTS_HOLE));
2271 return true;
2274 template <typename Handler>
2275 bool BaselineCodeGen<Handler>::emit_Null() {
2276 frame.push(NullValue());
2277 return true;
2280 template <typename Handler>
2281 bool BaselineCodeGen<Handler>::emit_CheckIsObj() {
2282 frame.syncStack(0);
2283 masm.loadValue(frame.addressOfStackValue(-1), R0);
2285 Label ok;
2286 masm.branchTestObject(Assembler::Equal, R0, &ok);
2288 prepareVMCall();
2290 pushUint8BytecodeOperandArg(R0.scratchReg());
2292 using Fn = bool (*)(JSContext*, CheckIsObjectKind);
2293 if (!callVM<Fn, ThrowCheckIsObject>()) {
2294 return false;
2297 masm.bind(&ok);
2298 return true;
2301 template <typename Handler>
2302 bool BaselineCodeGen<Handler>::emit_CheckThis() {
2303 frame.syncStack(0);
2304 masm.loadValue(frame.addressOfStackValue(-1), R0);
2306 return emitCheckThis(R0);
2309 template <typename Handler>
2310 bool BaselineCodeGen<Handler>::emit_CheckThisReinit() {
2311 frame.syncStack(0);
2312 masm.loadValue(frame.addressOfStackValue(-1), R0);
2314 return emitCheckThis(R0, /* reinit = */ true);
2317 template <typename Handler>
2318 bool BaselineCodeGen<Handler>::emitCheckThis(ValueOperand val, bool reinit) {
2319 Label thisOK;
2320 if (reinit) {
2321 masm.branchTestMagic(Assembler::Equal, val, &thisOK);
2322 } else {
2323 masm.branchTestMagic(Assembler::NotEqual, val, &thisOK);
2326 prepareVMCall();
2328 if (reinit) {
2329 using Fn = bool (*)(JSContext*);
2330 if (!callVM<Fn, ThrowInitializedThis>()) {
2331 return false;
2333 } else {
2334 using Fn = bool (*)(JSContext*);
2335 if (!callVM<Fn, ThrowUninitializedThis>()) {
2336 return false;
2340 masm.bind(&thisOK);
2341 return true;
2344 template <typename Handler>
2345 bool BaselineCodeGen<Handler>::emit_CheckReturn() {
2346 MOZ_ASSERT_IF(handler.maybeScript(),
2347 handler.maybeScript()->isDerivedClassConstructor());
2349 // Load |this| in R0, return value in R1.
2350 frame.popRegsAndSync(1);
2351 emitLoadReturnValue(R1);
2353 Label done, returnOK;
2354 masm.branchTestObject(Assembler::Equal, R1, &done);
2355 masm.branchTestUndefined(Assembler::Equal, R1, &returnOK);
2357 prepareVMCall();
2358 pushArg(R1);
2360 using Fn = bool (*)(JSContext*, HandleValue);
2361 if (!callVM<Fn, ThrowBadDerivedReturn>()) {
2362 return false;
2364 masm.assumeUnreachable("Should throw on bad derived constructor return");
2366 masm.bind(&returnOK);
2368 if (!emitCheckThis(R0)) {
2369 return false;
2372 // Store |this| in the return value slot.
2373 masm.storeValue(R0, frame.addressOfReturnValue());
2374 masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags());
2376 masm.bind(&done);
2377 return true;
2380 template <typename Handler>
2381 bool BaselineCodeGen<Handler>::emit_FunctionThis() {
2382 MOZ_ASSERT_IF(handler.maybeFunction(), !handler.maybeFunction()->isArrow());
2384 frame.pushThis();
2386 auto boxThis = [this]() {
2387 // Load |thisv| in R0. Skip the call if it's already an object.
2388 Label skipCall;
2389 frame.popRegsAndSync(1);
2390 masm.branchTestObject(Assembler::Equal, R0, &skipCall);
2392 prepareVMCall();
2393 masm.loadBaselineFramePtr(BaselineFrameReg, R1.scratchReg());
2395 pushArg(R1.scratchReg());
2397 using Fn = bool (*)(JSContext*, BaselineFrame*, MutableHandleValue);
2398 if (!callVM<Fn, BaselineGetFunctionThis>()) {
2399 return false;
2402 masm.bind(&skipCall);
2403 frame.push(R0);
2404 return true;
2407 // In strict mode code, |this| is left alone.
2408 return emitTestScriptFlag(JSScript::ImmutableFlags::Strict, false, boxThis,
2409 R2.scratchReg());
2412 template <typename Handler>
2413 bool BaselineCodeGen<Handler>::emit_GlobalThis() {
2414 frame.syncStack(0);
2416 auto getNonSyntacticThis = [this]() {
2417 prepareVMCall();
2419 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
2420 pushArg(R0.scratchReg());
2422 using Fn = void (*)(JSContext*, HandleObject, MutableHandleValue);
2423 if (!callVM<Fn, GetNonSyntacticGlobalThis>()) {
2424 return false;
2427 frame.push(R0);
2428 return true;
2430 auto getGlobalThis = [this]() {
2431 loadGlobalThisValue(R0);
2432 frame.push(R0);
2433 return true;
2435 return emitTestScriptFlag(JSScript::ImmutableFlags::HasNonSyntacticScope,
2436 getNonSyntacticThis, getGlobalThis,
2437 R2.scratchReg());
2440 template <typename Handler>
2441 bool BaselineCodeGen<Handler>::emit_True() {
2442 frame.push(BooleanValue(true));
2443 return true;
2446 template <typename Handler>
2447 bool BaselineCodeGen<Handler>::emit_False() {
2448 frame.push(BooleanValue(false));
2449 return true;
2452 template <typename Handler>
2453 bool BaselineCodeGen<Handler>::emit_Zero() {
2454 frame.push(Int32Value(0));
2455 return true;
2458 template <typename Handler>
2459 bool BaselineCodeGen<Handler>::emit_One() {
2460 frame.push(Int32Value(1));
2461 return true;
2464 template <>
2465 bool BaselineCompilerCodeGen::emit_Int8() {
2466 frame.push(Int32Value(GET_INT8(handler.pc())));
2467 return true;
2470 template <>
2471 bool BaselineInterpreterCodeGen::emit_Int8() {
2472 LoadInt8Operand(masm, R0.scratchReg());
2473 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2474 frame.push(R0);
2475 return true;
2478 template <>
2479 bool BaselineCompilerCodeGen::emit_Int32() {
2480 frame.push(Int32Value(GET_INT32(handler.pc())));
2481 return true;
2484 template <>
2485 bool BaselineInterpreterCodeGen::emit_Int32() {
2486 LoadInt32Operand(masm, R0.scratchReg());
2487 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2488 frame.push(R0);
2489 return true;
2492 template <>
2493 bool BaselineCompilerCodeGen::emit_Uint16() {
2494 frame.push(Int32Value(GET_UINT16(handler.pc())));
2495 return true;
2498 template <>
2499 bool BaselineInterpreterCodeGen::emit_Uint16() {
2500 LoadUint16Operand(masm, R0.scratchReg());
2501 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2502 frame.push(R0);
2503 return true;
2506 template <>
2507 bool BaselineCompilerCodeGen::emit_Uint24() {
2508 frame.push(Int32Value(GET_UINT24(handler.pc())));
2509 return true;
2512 template <>
2513 bool BaselineInterpreterCodeGen::emit_Uint24() {
2514 LoadUint24Operand(masm, 0, R0.scratchReg());
2515 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
2516 frame.push(R0);
2517 return true;
2520 template <typename Handler>
2521 bool BaselineCodeGen<Handler>::emit_ResumeIndex() {
2522 return emit_Uint24();
2525 template <>
2526 bool BaselineCompilerCodeGen::emit_Double() {
2527 frame.push(GET_INLINE_VALUE(handler.pc()));
2528 return true;
2531 template <>
2532 bool BaselineInterpreterCodeGen::emit_Double() {
2533 LoadInlineValueOperand(masm, R0);
2534 frame.push(R0);
2535 return true;
2538 template <>
2539 bool BaselineCompilerCodeGen::emit_BigInt() {
2540 BigInt* bi = handler.script()->getBigInt(handler.pc());
2541 frame.push(BigIntValue(bi));
2542 return true;
2545 template <>
2546 bool BaselineInterpreterCodeGen::emit_BigInt() {
2547 Register scratch1 = R0.scratchReg();
2548 Register scratch2 = R1.scratchReg();
2549 loadScriptGCThing(ScriptGCThingType::BigInt, scratch1, scratch2);
2550 masm.tagValue(JSVAL_TYPE_BIGINT, scratch1, R0);
2551 frame.push(R0);
2552 return true;
2555 template <>
2556 bool BaselineCompilerCodeGen::emit_String() {
2557 frame.push(StringValue(handler.script()->getAtom(handler.pc())));
2558 return true;
2561 template <>
2562 bool BaselineInterpreterCodeGen::emit_String() {
2563 Register scratch1 = R0.scratchReg();
2564 Register scratch2 = R1.scratchReg();
2565 loadScriptGCThing(ScriptGCThingType::Atom, scratch1, scratch2);
2566 masm.tagValue(JSVAL_TYPE_STRING, scratch1, R0);
2567 frame.push(R0);
2568 return true;
2571 template <>
2572 bool BaselineCompilerCodeGen::emit_Symbol() {
2573 unsigned which = GET_UINT8(handler.pc());
2574 JS::Symbol* sym = cx->runtime()->wellKnownSymbols->get(which);
2575 frame.push(SymbolValue(sym));
2576 return true;
2579 template <>
2580 bool BaselineInterpreterCodeGen::emit_Symbol() {
2581 Register scratch1 = R0.scratchReg();
2582 Register scratch2 = R1.scratchReg();
2583 LoadUint8Operand(masm, scratch1);
2585 masm.movePtr(ImmPtr(cx->runtime()->wellKnownSymbols), scratch2);
2586 masm.loadPtr(BaseIndex(scratch2, scratch1, ScalePointer), scratch1);
2588 masm.tagValue(JSVAL_TYPE_SYMBOL, scratch1, R0);
2589 frame.push(R0);
2590 return true;
2593 JSObject* BaselineCompilerHandler::maybeNoCloneSingletonObject() {
2594 Realm* realm = script()->realm();
2595 if (realm->creationOptions().cloneSingletons()) {
2596 return nullptr;
2599 realm->behaviors().setSingletonsAsValues();
2600 return script()->getObject(pc());
2603 template <typename Handler>
2604 bool BaselineCodeGen<Handler>::emit_Object() {
2605 // If we know we don't have to clone the object literal, just push it
2606 // directly. Note that the interpreter always does the VM call; that's fine
2607 // because this op is only used in run-once code.
2608 if (JSObject* obj = handler.maybeNoCloneSingletonObject()) {
2609 frame.push(ObjectValue(*obj));
2610 return true;
2613 prepareVMCall();
2615 pushBytecodePCArg();
2616 pushScriptArg();
2618 using Fn = JSObject* (*)(JSContext*, HandleScript, jsbytecode*);
2619 if (!callVM<Fn, SingletonObjectLiteralOperation>()) {
2620 return false;
2623 // Box and push return value.
2624 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2625 frame.push(R0);
2626 return true;
2629 template <>
2630 bool BaselineCompilerCodeGen::emit_CallSiteObj() {
2631 RootedScript script(cx, handler.script());
2632 JSObject* cso = ProcessCallSiteObjOperation(cx, script, handler.pc());
2633 if (!cso) {
2634 return false;
2637 frame.push(ObjectValue(*cso));
2638 return true;
2641 template <>
2642 bool BaselineInterpreterCodeGen::emit_CallSiteObj() {
2643 prepareVMCall();
2645 pushBytecodePCArg();
2646 pushScriptArg();
2648 using Fn = ArrayObject* (*)(JSContext*, HandleScript, jsbytecode*);
2649 if (!callVM<Fn, ProcessCallSiteObjOperation>()) {
2650 return false;
2653 // Box and push return value.
2654 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2655 frame.push(R0);
2656 return true;
2659 template <typename Handler>
2660 bool BaselineCodeGen<Handler>::emit_RegExp() {
2661 prepareVMCall();
2662 pushScriptGCThingArg(ScriptGCThingType::RegExp, R0.scratchReg(),
2663 R1.scratchReg());
2665 using Fn = JSObject* (*)(JSContext*, Handle<RegExpObject*>);
2666 if (!callVM<Fn, CloneRegExpObject>()) {
2667 return false;
2670 // Box and push return value.
2671 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2672 frame.push(R0);
2673 return true;
2676 template <typename Handler>
2677 bool BaselineCodeGen<Handler>::emit_Lambda() {
2678 prepareVMCall();
2679 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
2681 pushArg(R0.scratchReg());
2682 pushScriptGCThingArg(ScriptGCThingType::Function, R0.scratchReg(),
2683 R1.scratchReg());
2685 using Fn = JSObject* (*)(JSContext*, HandleFunction, HandleObject);
2686 if (!callVM<Fn, js::Lambda>()) {
2687 return false;
2690 // Box and push return value.
2691 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2692 frame.push(R0);
2693 return true;
2696 template <typename Handler>
2697 bool BaselineCodeGen<Handler>::emit_LambdaArrow() {
2698 // Keep pushed newTarget in R0.
2699 frame.popRegsAndSync(1);
2701 prepareVMCall();
2702 masm.loadPtr(frame.addressOfEnvironmentChain(), R2.scratchReg());
2704 pushArg(R0);
2705 pushArg(R2.scratchReg());
2706 pushScriptGCThingArg(ScriptGCThingType::Function, R0.scratchReg(),
2707 R1.scratchReg());
2709 using Fn =
2710 JSObject* (*)(JSContext*, HandleFunction, HandleObject, HandleValue);
2711 if (!callVM<Fn, js::LambdaArrow>()) {
2712 return false;
2715 // Box and push return value.
2716 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2717 frame.push(R0);
2718 return true;
2721 template <typename Handler>
2722 bool BaselineCodeGen<Handler>::emit_SetFunName() {
2723 frame.popRegsAndSync(2);
2725 frame.push(R0);
2726 frame.syncStack(0);
2728 masm.unboxObject(R0, R0.scratchReg());
2730 prepareVMCall();
2732 pushUint8BytecodeOperandArg(R2.scratchReg());
2733 pushArg(R1);
2734 pushArg(R0.scratchReg());
2736 using Fn =
2737 bool (*)(JSContext*, HandleFunction, HandleValue, FunctionPrefixKind);
2738 return callVM<Fn, SetFunctionName>();
2741 template <typename Handler>
2742 bool BaselineCodeGen<Handler>::emit_BitOr() {
2743 return emitBinaryArith();
2746 template <typename Handler>
2747 bool BaselineCodeGen<Handler>::emit_BitXor() {
2748 return emitBinaryArith();
2751 template <typename Handler>
2752 bool BaselineCodeGen<Handler>::emit_BitAnd() {
2753 return emitBinaryArith();
2756 template <typename Handler>
2757 bool BaselineCodeGen<Handler>::emit_Lsh() {
2758 return emitBinaryArith();
2761 template <typename Handler>
2762 bool BaselineCodeGen<Handler>::emit_Rsh() {
2763 return emitBinaryArith();
2766 template <typename Handler>
2767 bool BaselineCodeGen<Handler>::emit_Ursh() {
2768 return emitBinaryArith();
2771 template <typename Handler>
2772 bool BaselineCodeGen<Handler>::emit_Add() {
2773 return emitBinaryArith();
2776 template <typename Handler>
2777 bool BaselineCodeGen<Handler>::emit_Sub() {
2778 return emitBinaryArith();
2781 template <typename Handler>
2782 bool BaselineCodeGen<Handler>::emit_Mul() {
2783 return emitBinaryArith();
2786 template <typename Handler>
2787 bool BaselineCodeGen<Handler>::emit_Div() {
2788 return emitBinaryArith();
2791 template <typename Handler>
2792 bool BaselineCodeGen<Handler>::emit_Mod() {
2793 return emitBinaryArith();
2796 template <typename Handler>
2797 bool BaselineCodeGen<Handler>::emit_Pow() {
2798 return emitBinaryArith();
2801 template <typename Handler>
2802 bool BaselineCodeGen<Handler>::emitBinaryArith() {
2803 // Keep top JSStack value in R0 and R2
2804 frame.popRegsAndSync(2);
2806 // Call IC
2807 if (!emitNextIC()) {
2808 return false;
2811 // Mark R0 as pushed stack value.
2812 frame.push(R0);
2813 return true;
2816 template <typename Handler>
2817 bool BaselineCodeGen<Handler>::emitUnaryArith() {
2818 // Keep top stack value in R0.
2819 frame.popRegsAndSync(1);
2821 // Call IC
2822 if (!emitNextIC()) {
2823 return false;
2826 // Mark R0 as pushed stack value.
2827 frame.push(R0);
2828 return true;
2831 template <typename Handler>
2832 bool BaselineCodeGen<Handler>::emit_BitNot() {
2833 return emitUnaryArith();
2836 template <typename Handler>
2837 bool BaselineCodeGen<Handler>::emit_Neg() {
2838 return emitUnaryArith();
2841 template <typename Handler>
2842 bool BaselineCodeGen<Handler>::emit_Inc() {
2843 return emitUnaryArith();
2846 template <typename Handler>
2847 bool BaselineCodeGen<Handler>::emit_Dec() {
2848 return emitUnaryArith();
2851 template <typename Handler>
2852 bool BaselineCodeGen<Handler>::emit_Lt() {
2853 return emitCompare();
2856 template <typename Handler>
2857 bool BaselineCodeGen<Handler>::emit_Le() {
2858 return emitCompare();
2861 template <typename Handler>
2862 bool BaselineCodeGen<Handler>::emit_Gt() {
2863 return emitCompare();
2866 template <typename Handler>
2867 bool BaselineCodeGen<Handler>::emit_Ge() {
2868 return emitCompare();
2871 template <typename Handler>
2872 bool BaselineCodeGen<Handler>::emit_Eq() {
2873 return emitCompare();
2876 template <typename Handler>
2877 bool BaselineCodeGen<Handler>::emit_Ne() {
2878 return emitCompare();
2881 template <typename Handler>
2882 bool BaselineCodeGen<Handler>::emitCompare() {
2883 // CODEGEN
2885 // Keep top JSStack value in R0 and R1.
2886 frame.popRegsAndSync(2);
2888 // Call IC.
2889 if (!emitNextIC()) {
2890 return false;
2893 // Mark R0 as pushed stack value.
2894 frame.push(R0, JSVAL_TYPE_BOOLEAN);
2895 return true;
2898 template <typename Handler>
2899 bool BaselineCodeGen<Handler>::emit_StrictEq() {
2900 return emitCompare();
2903 template <typename Handler>
2904 bool BaselineCodeGen<Handler>::emit_StrictNe() {
2905 return emitCompare();
2908 template <typename Handler>
2909 bool BaselineCodeGen<Handler>::emit_Case() {
2910 frame.popRegsAndSync(1);
2912 Label done;
2913 masm.branchTestBooleanTruthy(/* branchIfTrue */ false, R0, &done);
2915 // Pop the switch value if the case matches.
2916 masm.addToStackPtr(Imm32(sizeof(Value)));
2917 emitJump();
2919 masm.bind(&done);
2920 return true;
2923 template <typename Handler>
2924 bool BaselineCodeGen<Handler>::emit_Default() {
2925 frame.pop();
2926 return emit_Goto();
2929 template <typename Handler>
2930 bool BaselineCodeGen<Handler>::emit_Lineno() {
2931 return true;
2934 template <typename Handler>
2935 bool BaselineCodeGen<Handler>::emit_NewArray() {
2936 frame.syncStack(0);
2938 // Pass length in R0.
2939 loadInt32LengthBytecodeOperand(R0.scratchReg());
2941 if (!emitNextIC()) {
2942 return false;
2945 frame.push(R0);
2946 return true;
2949 template <>
2950 bool BaselineCompilerCodeGen::emit_NewArrayCopyOnWrite() {
2951 // This is like the interpreter implementation, but we can call
2952 // getOrFixupCopyOnWriteObject at compile-time.
2954 RootedScript scriptRoot(cx, handler.script());
2955 JSObject* obj =
2956 ObjectGroup::getOrFixupCopyOnWriteObject(cx, scriptRoot, handler.pc());
2957 if (!obj) {
2958 return false;
2961 prepareVMCall();
2963 pushArg(ImmGCPtr(obj));
2965 using Fn = ArrayObject* (*)(JSContext*, HandleArrayObject);
2966 if (!callVM<Fn, js::NewDenseCopyOnWriteArray>()) {
2967 return false;
2970 // Box and push return value.
2971 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2972 frame.push(R0);
2973 return true;
2976 template <>
2977 bool BaselineInterpreterCodeGen::emit_NewArrayCopyOnWrite() {
2978 prepareVMCall();
2980 pushBytecodePCArg();
2981 pushScriptArg();
2983 using Fn = ArrayObject* (*)(JSContext*, HandleScript, jsbytecode*);
2984 if (!callVM<Fn, NewArrayCopyOnWriteOperation>()) {
2985 return false;
2988 // Box and push return value.
2989 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
2990 frame.push(R0);
2991 return true;
2994 template <typename Handler>
2995 bool BaselineCodeGen<Handler>::emit_InitElemArray() {
2996 // Keep the object and rhs on the stack.
2997 frame.syncStack(0);
2999 // Load object in R0, index in R1.
3000 masm.loadValue(frame.addressOfStackValue(-2), R0);
3001 loadInt32IndexBytecodeOperand(R1);
3003 // Call IC.
3004 if (!emitNextIC()) {
3005 return false;
3008 // Pop the rhs, so that the object is on the top of the stack.
3009 frame.pop();
3010 return true;
3013 template <typename Handler>
3014 bool BaselineCodeGen<Handler>::emit_NewObject() {
3015 return emitNewObject();
3018 template <typename Handler>
3019 bool BaselineCodeGen<Handler>::emit_NewObjectWithGroup() {
3020 return emitNewObject();
3023 template <typename Handler>
3024 bool BaselineCodeGen<Handler>::emit_NewInit() {
3025 return emitNewObject();
3028 template <typename Handler>
3029 bool BaselineCodeGen<Handler>::emitNewObject() {
3030 frame.syncStack(0);
3032 if (!emitNextIC()) {
3033 return false;
3036 frame.push(R0);
3037 return true;
3040 template <typename Handler>
3041 bool BaselineCodeGen<Handler>::emit_InitElem() {
3042 // Store RHS in the scratch slot.
3043 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2);
3044 frame.pop();
3046 // Keep object and index in R0 and R1.
3047 frame.popRegsAndSync(2);
3049 // Push the object to store the result of the IC.
3050 frame.push(R0);
3051 frame.syncStack(0);
3053 // Keep RHS on the stack.
3054 frame.pushScratchValue();
3056 // Call IC.
3057 if (!emitNextIC()) {
3058 return false;
3061 // Pop the rhs, so that the object is on the top of the stack.
3062 frame.pop();
3063 return true;
3066 template <typename Handler>
3067 bool BaselineCodeGen<Handler>::emit_InitHiddenElem() {
3068 return emit_InitElem();
3071 template <typename Handler>
3072 bool BaselineCodeGen<Handler>::emit_InitLockedElem() {
3073 return emit_InitElem();
3076 template <typename Handler>
3077 bool BaselineCodeGen<Handler>::emit_MutateProto() {
3078 // Keep values on the stack for the decompiler.
3079 frame.syncStack(0);
3081 masm.unboxObject(frame.addressOfStackValue(-2), R0.scratchReg());
3082 masm.loadValue(frame.addressOfStackValue(-1), R1);
3084 prepareVMCall();
3086 pushArg(R1);
3087 pushArg(R0.scratchReg());
3089 using Fn = bool (*)(JSContext*, HandlePlainObject, HandleValue);
3090 if (!callVM<Fn, MutatePrototype>()) {
3091 return false;
3094 frame.pop();
3095 return true;
3098 template <typename Handler>
3099 bool BaselineCodeGen<Handler>::emit_InitProp() {
3100 // Load lhs in R0, rhs in R1.
3101 frame.syncStack(0);
3102 masm.loadValue(frame.addressOfStackValue(-2), R0);
3103 masm.loadValue(frame.addressOfStackValue(-1), R1);
3105 // Call IC.
3106 if (!emitNextIC()) {
3107 return false;
3110 // Leave the object on the stack.
3111 frame.pop();
3112 return true;
3115 template <typename Handler>
3116 bool BaselineCodeGen<Handler>::emit_InitLockedProp() {
3117 return emit_InitProp();
3120 template <typename Handler>
3121 bool BaselineCodeGen<Handler>::emit_InitHiddenProp() {
3122 return emit_InitProp();
3125 template <typename Handler>
3126 bool BaselineCodeGen<Handler>::emit_GetElem() {
3127 // Keep top two stack values in R0 and R1.
3128 frame.popRegsAndSync(2);
3130 // Call IC.
3131 if (!emitNextIC()) {
3132 return false;
3135 // Mark R0 as pushed stack value.
3136 frame.push(R0);
3137 return true;
3140 template <typename Handler>
3141 bool BaselineCodeGen<Handler>::emit_GetElemSuper() {
3142 // Store obj in the scratch slot.
3143 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2);
3144 frame.pop();
3146 // Keep receiver and index in R0 and R1.
3147 frame.popRegsAndSync(2);
3149 // Keep obj on the stack.
3150 frame.pushScratchValue();
3152 if (!emitNextIC()) {
3153 return false;
3156 frame.pop();
3157 frame.push(R0);
3158 return true;
3161 template <typename Handler>
3162 bool BaselineCodeGen<Handler>::emit_CallElem() {
3163 return emit_GetElem();
3166 template <typename Handler>
3167 bool BaselineCodeGen<Handler>::emit_SetElem() {
3168 // Store RHS in the scratch slot.
3169 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2);
3170 frame.pop();
3172 // Keep object and index in R0 and R1.
3173 frame.popRegsAndSync(2);
3175 // Keep RHS on the stack.
3176 frame.pushScratchValue();
3178 // Call IC.
3179 if (!emitNextIC()) {
3180 return false;
3183 return true;
3186 template <typename Handler>
3187 bool BaselineCodeGen<Handler>::emit_StrictSetElem() {
3188 return emit_SetElem();
3191 template <typename Handler>
3192 bool BaselineCodeGen<Handler>::emitSetElemSuper(bool strict) {
3193 // Incoming stack is |receiver, propval, obj, rval|. We need to shuffle
3194 // stack to leave rval when operation is complete.
3196 // Pop rval into R0, then load receiver into R1 and replace with rval.
3197 frame.popRegsAndSync(1);
3198 masm.loadValue(frame.addressOfStackValue(-3), R1);
3199 masm.storeValue(R0, frame.addressOfStackValue(-3));
3201 prepareVMCall();
3203 pushArg(Imm32(strict));
3204 pushArg(R1); // receiver
3205 pushArg(R0); // rval
3206 masm.loadValue(frame.addressOfStackValue(-2), R0);
3207 pushArg(R0); // propval
3208 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
3209 pushArg(R0.scratchReg()); // obj
3211 using Fn = bool (*)(JSContext*, HandleObject, HandleValue, HandleValue,
3212 HandleValue, bool);
3213 if (!callVM<Fn, js::SetObjectElementWithReceiver>()) {
3214 return false;
3217 frame.popn(2);
3218 return true;
3221 template <typename Handler>
3222 bool BaselineCodeGen<Handler>::emit_SetElemSuper() {
3223 return emitSetElemSuper(/* strict = */ false);
3226 template <typename Handler>
3227 bool BaselineCodeGen<Handler>::emit_StrictSetElemSuper() {
3228 return emitSetElemSuper(/* strict = */ true);
3231 template <typename Handler>
3232 bool BaselineCodeGen<Handler>::emitDelElem(bool strict) {
3233 // Keep values on the stack for the decompiler.
3234 frame.syncStack(0);
3235 masm.loadValue(frame.addressOfStackValue(-2), R0);
3236 masm.loadValue(frame.addressOfStackValue(-1), R1);
3238 prepareVMCall();
3240 pushArg(R1);
3241 pushArg(R0);
3243 using Fn = bool (*)(JSContext*, HandleValue, HandleValue, bool*);
3244 if (strict) {
3245 if (!callVM<Fn, DelElemOperation<true>>()) {
3246 return false;
3248 } else {
3249 if (!callVM<Fn, DelElemOperation<false>>()) {
3250 return false;
3254 masm.boxNonDouble(JSVAL_TYPE_BOOLEAN, ReturnReg, R1);
3255 frame.popn(2);
3256 frame.push(R1);
3257 return true;
3260 template <typename Handler>
3261 bool BaselineCodeGen<Handler>::emit_DelElem() {
3262 return emitDelElem(/* strict = */ false);
3265 template <typename Handler>
3266 bool BaselineCodeGen<Handler>::emit_StrictDelElem() {
3267 return emitDelElem(/* strict = */ true);
3270 template <typename Handler>
3271 bool BaselineCodeGen<Handler>::emit_In() {
3272 frame.popRegsAndSync(2);
3274 if (!emitNextIC()) {
3275 return false;
3278 frame.push(R0);
3279 return true;
3282 template <typename Handler>
3283 bool BaselineCodeGen<Handler>::emit_HasOwn() {
3284 frame.popRegsAndSync(2);
3286 if (!emitNextIC()) {
3287 return false;
3290 frame.push(R0);
3291 return true;
3294 template <typename Handler>
3295 bool BaselineCodeGen<Handler>::emit_CheckPrivateField() {
3296 // Keep key and val on the stack.
3297 frame.syncStack(0);
3298 masm.loadValue(frame.addressOfStackValue(-2), R0);
3299 masm.loadValue(frame.addressOfStackValue(-1), R1);
3301 if (!emitNextIC()) {
3302 return false;
3305 frame.push(R0);
3306 return true;
3309 template <>
3310 bool BaselineCompilerCodeGen::tryOptimizeGetGlobalName() {
3311 PropertyName* name = handler.script()->getName(handler.pc());
3313 // These names are non-configurable on the global and cannot be shadowed.
3314 if (name == cx->names().undefined) {
3315 frame.push(UndefinedValue());
3316 return true;
3318 if (name == cx->names().NaN) {
3319 frame.push(JS::NaNValue());
3320 return true;
3322 if (name == cx->names().Infinity) {
3323 frame.push(JS::InfinityValue());
3324 return true;
3327 return false;
3330 template <>
3331 bool BaselineInterpreterCodeGen::tryOptimizeGetGlobalName() {
3332 // Interpreter doesn't optimize simple GETGNAMEs.
3333 return false;
3336 template <typename Handler>
3337 bool BaselineCodeGen<Handler>::emit_GetGName() {
3338 auto getName = [this]() { return emit_GetName(); };
3340 auto getGlobalName = [this]() {
3341 if (tryOptimizeGetGlobalName()) {
3342 return true;
3345 frame.syncStack(0);
3347 loadGlobalLexicalEnvironment(R0.scratchReg());
3349 // Call IC.
3350 if (!emitNextIC()) {
3351 return false;
3354 // Mark R0 as pushed stack value.
3355 frame.push(R0);
3356 return true;
3358 return emitTestScriptFlag(JSScript::ImmutableFlags::HasNonSyntacticScope,
3359 getName, getGlobalName, R2.scratchReg());
3362 template <>
3363 bool BaselineCompilerCodeGen::tryOptimizeBindGlobalName() {
3364 JSScript* script = handler.script();
3365 if (script->hasNonSyntacticScope()) {
3366 return false;
3369 RootedGlobalObject global(cx, &script->global());
3370 RootedPropertyName name(cx, script->getName(handler.pc()));
3371 if (JSObject* binding = MaybeOptimizeBindGlobalName(cx, global, name)) {
3372 frame.push(ObjectValue(*binding));
3373 return true;
3375 return false;
3378 template <>
3379 bool BaselineInterpreterCodeGen::tryOptimizeBindGlobalName() {
3380 // Interpreter doesn't optimize simple BINDGNAMEs.
3381 return false;
3384 template <typename Handler>
3385 bool BaselineCodeGen<Handler>::emit_BindGName() {
3386 if (tryOptimizeBindGlobalName()) {
3387 return true;
3389 return emitBindName(JSOp::BindGName);
3392 template <typename Handler>
3393 bool BaselineCodeGen<Handler>::emit_BindVar() {
3394 frame.syncStack(0);
3395 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3397 prepareVMCall();
3398 pushArg(R0.scratchReg());
3400 using Fn = JSObject* (*)(JSContext*, JSObject*);
3401 if (!callVM<Fn, BindVarOperation>()) {
3402 return false;
3405 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
3406 frame.push(R0);
3407 return true;
3410 template <typename Handler>
3411 bool BaselineCodeGen<Handler>::emit_SetProp() {
3412 // Keep lhs in R0, rhs in R1.
3413 frame.popRegsAndSync(2);
3415 // Keep RHS on the stack.
3416 frame.push(R1);
3417 frame.syncStack(0);
3419 // Call IC.
3420 if (!emitNextIC()) {
3421 return false;
3424 return true;
3427 template <typename Handler>
3428 bool BaselineCodeGen<Handler>::emit_StrictSetProp() {
3429 return emit_SetProp();
3432 template <typename Handler>
3433 bool BaselineCodeGen<Handler>::emit_SetName() {
3434 return emit_SetProp();
3437 template <typename Handler>
3438 bool BaselineCodeGen<Handler>::emit_StrictSetName() {
3439 return emit_SetProp();
3442 template <typename Handler>
3443 bool BaselineCodeGen<Handler>::emit_SetGName() {
3444 return emit_SetProp();
3447 template <typename Handler>
3448 bool BaselineCodeGen<Handler>::emit_StrictSetGName() {
3449 return emit_SetProp();
3452 template <typename Handler>
3453 bool BaselineCodeGen<Handler>::emitSetPropSuper(bool strict) {
3454 // Incoming stack is |receiver, obj, rval|. We need to shuffle stack to
3455 // leave rval when operation is complete.
3457 // Pop rval into R0, then load receiver into R1 and replace with rval.
3458 frame.popRegsAndSync(1);
3459 masm.loadValue(frame.addressOfStackValue(-2), R1);
3460 masm.storeValue(R0, frame.addressOfStackValue(-2));
3462 prepareVMCall();
3464 pushArg(Imm32(strict));
3465 pushArg(R0); // rval
3466 pushScriptNameArg(R0.scratchReg(), R2.scratchReg());
3467 pushArg(R1); // receiver
3468 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
3469 pushArg(R0.scratchReg()); // obj
3471 using Fn = bool (*)(JSContext*, HandleObject, HandleValue, HandlePropertyName,
3472 HandleValue, bool);
3473 if (!callVM<Fn, js::SetPropertySuper>()) {
3474 return false;
3477 frame.pop();
3478 return true;
3481 template <typename Handler>
3482 bool BaselineCodeGen<Handler>::emit_SetPropSuper() {
3483 return emitSetPropSuper(/* strict = */ false);
3486 template <typename Handler>
3487 bool BaselineCodeGen<Handler>::emit_StrictSetPropSuper() {
3488 return emitSetPropSuper(/* strict = */ true);
3491 template <typename Handler>
3492 bool BaselineCodeGen<Handler>::emit_GetProp() {
3493 // Keep object in R0.
3494 frame.popRegsAndSync(1);
3496 // Call IC.
3497 if (!emitNextIC()) {
3498 return false;
3501 // Mark R0 as pushed stack value.
3502 frame.push(R0);
3503 return true;
3506 template <typename Handler>
3507 bool BaselineCodeGen<Handler>::emit_CallProp() {
3508 return emit_GetProp();
3511 template <typename Handler>
3512 bool BaselineCodeGen<Handler>::emit_Length() {
3513 return emit_GetProp();
3516 template <typename Handler>
3517 bool BaselineCodeGen<Handler>::emit_GetBoundName() {
3518 return emit_GetProp();
3521 template <typename Handler>
3522 bool BaselineCodeGen<Handler>::emit_GetPropSuper() {
3523 // Receiver -> R1, Object -> R0
3524 frame.popRegsAndSync(1);
3525 masm.loadValue(frame.addressOfStackValue(-1), R1);
3526 frame.pop();
3528 if (!emitNextIC()) {
3529 return false;
3532 frame.push(R0);
3533 return true;
3536 template <typename Handler>
3537 bool BaselineCodeGen<Handler>::emitDelProp(bool strict) {
3538 // Keep value on the stack for the decompiler.
3539 frame.syncStack(0);
3540 masm.loadValue(frame.addressOfStackValue(-1), R0);
3542 prepareVMCall();
3544 pushScriptNameArg(R1.scratchReg(), R2.scratchReg());
3545 pushArg(R0);
3547 using Fn = bool (*)(JSContext*, HandleValue, HandlePropertyName, bool*);
3548 if (strict) {
3549 if (!callVM<Fn, DelPropOperation<true>>()) {
3550 return false;
3552 } else {
3553 if (!callVM<Fn, DelPropOperation<false>>()) {
3554 return false;
3558 masm.boxNonDouble(JSVAL_TYPE_BOOLEAN, ReturnReg, R1);
3559 frame.pop();
3560 frame.push(R1);
3561 return true;
3564 template <typename Handler>
3565 bool BaselineCodeGen<Handler>::emit_DelProp() {
3566 return emitDelProp(/* strict = */ false);
3569 template <typename Handler>
3570 bool BaselineCodeGen<Handler>::emit_StrictDelProp() {
3571 return emitDelProp(/* strict = */ true);
3574 template <>
3575 void BaselineCompilerCodeGen::getEnvironmentCoordinateObject(Register reg) {
3576 EnvironmentCoordinate ec(handler.pc());
3578 masm.loadPtr(frame.addressOfEnvironmentChain(), reg);
3579 for (unsigned i = ec.hops(); i; i--) {
3580 masm.unboxObject(
3581 Address(reg, EnvironmentObject::offsetOfEnclosingEnvironment()), reg);
3585 template <>
3586 void BaselineInterpreterCodeGen::getEnvironmentCoordinateObject(Register reg) {
3587 MOZ_CRASH("Shouldn't call this for interpreter");
3590 template <>
3591 Address BaselineCompilerCodeGen::getEnvironmentCoordinateAddressFromObject(
3592 Register objReg, Register reg) {
3593 EnvironmentCoordinate ec(handler.pc());
3595 if (EnvironmentObject::nonExtensibleIsFixedSlot(ec)) {
3596 return Address(objReg, NativeObject::getFixedSlotOffset(ec.slot()));
3599 uint32_t slot = EnvironmentObject::nonExtensibleDynamicSlotIndex(ec);
3600 masm.loadPtr(Address(objReg, NativeObject::offsetOfSlots()), reg);
3601 return Address(reg, slot * sizeof(Value));
3604 template <>
3605 Address BaselineInterpreterCodeGen::getEnvironmentCoordinateAddressFromObject(
3606 Register objReg, Register reg) {
3607 MOZ_CRASH("Shouldn't call this for interpreter");
3610 template <typename Handler>
3611 Address BaselineCodeGen<Handler>::getEnvironmentCoordinateAddress(
3612 Register reg) {
3613 getEnvironmentCoordinateObject(reg);
3614 return getEnvironmentCoordinateAddressFromObject(reg, reg);
3617 // For a JOF_ENVCOORD op load the number of hops from the bytecode and skip this
3618 // number of environment objects.
3619 static void LoadAliasedVarEnv(MacroAssembler& masm, Register env,
3620 Register scratch) {
3621 static_assert(ENVCOORD_HOPS_LEN == 1,
3622 "Code assumes number of hops is stored in uint8 operand");
3623 LoadUint8Operand(masm, scratch);
3625 Label top, done;
3626 masm.branchTest32(Assembler::Zero, scratch, scratch, &done);
3627 masm.bind(&top);
3629 Address nextEnv(env, EnvironmentObject::offsetOfEnclosingEnvironment());
3630 masm.unboxObject(nextEnv, env);
3631 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch, &top);
3633 masm.bind(&done);
3636 template <>
3637 void BaselineCompilerCodeGen::emitGetAliasedVar(ValueOperand dest) {
3638 frame.syncStack(0);
3640 Address address = getEnvironmentCoordinateAddress(R0.scratchReg());
3641 masm.loadValue(address, dest);
3644 template <>
3645 void BaselineInterpreterCodeGen::emitGetAliasedVar(ValueOperand dest) {
3646 Register env = R0.scratchReg();
3647 Register scratch = R1.scratchReg();
3649 // Load the right environment object.
3650 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
3651 LoadAliasedVarEnv(masm, env, scratch);
3653 // Load the slot index.
3654 static_assert(ENVCOORD_SLOT_LEN == 3,
3655 "Code assumes slot is stored in uint24 operand");
3656 LoadUint24Operand(masm, ENVCOORD_HOPS_LEN, scratch);
3658 // Load the Value from a fixed or dynamic slot.
3659 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3660 Label isDynamic, done;
3661 masm.branch32(Assembler::AboveOrEqual, scratch,
3662 Imm32(NativeObject::MAX_FIXED_SLOTS), &isDynamic);
3664 uint32_t offset = NativeObject::getFixedSlotOffset(0);
3665 masm.loadValue(BaseValueIndex(env, scratch, offset), dest);
3666 masm.jump(&done);
3668 masm.bind(&isDynamic);
3670 masm.loadPtr(Address(env, NativeObject::offsetOfSlots()), env);
3672 // Use an offset to subtract the number of fixed slots.
3673 int32_t offset = -int32_t(NativeObject::MAX_FIXED_SLOTS * sizeof(Value));
3674 masm.loadValue(BaseValueIndex(env, scratch, offset), dest);
3676 masm.bind(&done);
3679 template <typename Handler>
3680 bool BaselineCodeGen<Handler>::emit_GetAliasedVar() {
3681 emitGetAliasedVar(R0);
3683 if (IsTypeInferenceEnabled() && handler.maybeIonCompileable()) {
3684 // No need to monitor types if we know Ion can't compile this script.
3685 if (!emitNextIC()) {
3686 return false;
3690 frame.push(R0);
3691 return true;
3694 template <>
3695 bool BaselineCompilerCodeGen::emit_SetAliasedVar() {
3696 // Keep rvalue in R0.
3697 frame.popRegsAndSync(1);
3698 Register objReg = R2.scratchReg();
3700 getEnvironmentCoordinateObject(objReg);
3701 Address address =
3702 getEnvironmentCoordinateAddressFromObject(objReg, R1.scratchReg());
3703 masm.guardedCallPreBarrier(address, MIRType::Value);
3704 masm.storeValue(R0, address);
3705 frame.push(R0);
3707 // Only R0 is live at this point.
3708 // Scope coordinate object is already in R2.scratchReg().
3709 Register temp = R1.scratchReg();
3711 Label skipBarrier;
3712 masm.branchPtrInNurseryChunk(Assembler::Equal, objReg, temp, &skipBarrier);
3713 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
3715 masm.call(&postBarrierSlot_); // Won't clobber R0
3717 masm.bind(&skipBarrier);
3718 return true;
3721 template <>
3722 bool BaselineInterpreterCodeGen::emit_SetAliasedVar() {
3723 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
3724 regs.take(BaselineFrameReg);
3725 regs.take(R2);
3726 if (HasInterpreterPCReg()) {
3727 regs.take(InterpreterPCReg);
3730 Register env = regs.takeAny();
3731 Register scratch1 = regs.takeAny();
3732 Register scratch2 = regs.takeAny();
3733 Register scratch3 = regs.takeAny();
3735 // Load the right environment object.
3736 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
3737 LoadAliasedVarEnv(masm, env, scratch1);
3739 // Load the slot index.
3740 static_assert(ENVCOORD_SLOT_LEN == 3,
3741 "Code assumes slot is stored in uint24 operand");
3742 LoadUint24Operand(masm, ENVCOORD_HOPS_LEN, scratch1);
3744 // Store the RHS Value in R2.
3745 masm.loadValue(frame.addressOfStackValue(-1), R2);
3747 // Load a pointer to the fixed or dynamic slot into scratch2. We want to call
3748 // guardedCallPreBarrierAnyZone once to avoid code bloat.
3750 // See EnvironmentObject::nonExtensibleIsFixedSlot.
3751 Label isDynamic, done;
3752 masm.branch32(Assembler::AboveOrEqual, scratch1,
3753 Imm32(NativeObject::MAX_FIXED_SLOTS), &isDynamic);
3755 uint32_t offset = NativeObject::getFixedSlotOffset(0);
3756 BaseValueIndex slotAddr(env, scratch1, offset);
3757 masm.computeEffectiveAddress(slotAddr, scratch2);
3758 masm.jump(&done);
3760 masm.bind(&isDynamic);
3762 masm.loadPtr(Address(env, NativeObject::offsetOfSlots()), scratch2);
3764 // Use an offset to subtract the number of fixed slots.
3765 int32_t offset = -int32_t(NativeObject::MAX_FIXED_SLOTS * sizeof(Value));
3766 BaseValueIndex slotAddr(scratch2, scratch1, offset);
3767 masm.computeEffectiveAddress(slotAddr, scratch2);
3769 masm.bind(&done);
3771 // Pre-barrier and store.
3772 Address slotAddr(scratch2, 0);
3773 masm.guardedCallPreBarrierAnyZone(slotAddr, MIRType::Value, scratch3);
3774 masm.storeValue(R2, slotAddr);
3776 // Post barrier.
3777 Label skipBarrier;
3778 masm.branchPtrInNurseryChunk(Assembler::Equal, env, scratch1, &skipBarrier);
3779 masm.branchValueIsNurseryCell(Assembler::NotEqual, R2, scratch1,
3780 &skipBarrier);
3782 // Post barrier code expects the object in R2.
3783 masm.movePtr(env, R2.scratchReg());
3784 masm.call(&postBarrierSlot_);
3786 masm.bind(&skipBarrier);
3787 return true;
3790 template <typename Handler>
3791 bool BaselineCodeGen<Handler>::emit_GetName() {
3792 frame.syncStack(0);
3794 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3796 // Call IC.
3797 if (!emitNextIC()) {
3798 return false;
3801 // Mark R0 as pushed stack value.
3802 frame.push(R0);
3803 return true;
3806 template <typename Handler>
3807 bool BaselineCodeGen<Handler>::emitBindName(JSOp op) {
3808 // If we have a BindGName without a non-syntactic scope, we pass the global
3809 // lexical environment to the IC instead of the frame's environment.
3811 frame.syncStack(0);
3813 auto loadGlobalLexical = [this]() {
3814 loadGlobalLexicalEnvironment(R0.scratchReg());
3815 return true;
3817 auto loadFrameEnv = [this]() {
3818 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3819 return true;
3822 if (op == JSOp::BindName) {
3823 if (!loadFrameEnv()) {
3824 return false;
3826 } else {
3827 MOZ_ASSERT(op == JSOp::BindGName);
3828 if (!emitTestScriptFlag(JSScript::ImmutableFlags::HasNonSyntacticScope,
3829 loadFrameEnv, loadGlobalLexical, R2.scratchReg())) {
3830 return false;
3834 // Call IC.
3835 if (!emitNextIC()) {
3836 return false;
3839 // Mark R0 as pushed stack value.
3840 frame.push(R0);
3841 return true;
3844 template <typename Handler>
3845 bool BaselineCodeGen<Handler>::emit_BindName() {
3846 return emitBindName(JSOp::BindName);
3849 template <typename Handler>
3850 bool BaselineCodeGen<Handler>::emit_DelName() {
3851 frame.syncStack(0);
3852 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3854 prepareVMCall();
3856 pushArg(R0.scratchReg());
3857 pushScriptNameArg(R1.scratchReg(), R2.scratchReg());
3859 using Fn = bool (*)(JSContext*, HandlePropertyName, HandleObject,
3860 MutableHandleValue);
3861 if (!callVM<Fn, js::DeleteNameOperation>()) {
3862 return false;
3865 frame.push(R0);
3866 return true;
3869 template <>
3870 bool BaselineCompilerCodeGen::emit_GetImport() {
3871 JSScript* script = handler.script();
3872 ModuleEnvironmentObject* env = GetModuleEnvironmentForScript(script);
3873 MOZ_ASSERT(env);
3875 jsid id = NameToId(script->getName(handler.pc()));
3876 ModuleEnvironmentObject* targetEnv;
3877 Shape* shape;
3878 MOZ_ALWAYS_TRUE(env->lookupImport(id, &targetEnv, &shape));
3880 frame.syncStack(0);
3882 uint32_t slot = shape->slot();
3883 Register scratch = R0.scratchReg();
3884 masm.movePtr(ImmGCPtr(targetEnv), scratch);
3885 if (slot < targetEnv->numFixedSlots()) {
3886 masm.loadValue(Address(scratch, NativeObject::getFixedSlotOffset(slot)),
3887 R0);
3888 } else {
3889 masm.loadPtr(Address(scratch, NativeObject::offsetOfSlots()), scratch);
3890 masm.loadValue(
3891 Address(scratch, (slot - targetEnv->numFixedSlots()) * sizeof(Value)),
3892 R0);
3895 // Imports are initialized by this point except in rare circumstances, so
3896 // don't emit a check unless we have to.
3897 if (targetEnv->getSlot(shape->slot()).isMagic(JS_UNINITIALIZED_LEXICAL)) {
3898 if (!emitUninitializedLexicalCheck(R0)) {
3899 return false;
3903 if (IsTypeInferenceEnabled() && handler.maybeIonCompileable()) {
3904 // No need to monitor types if we know Ion can't compile this script.
3905 if (!emitNextIC()) {
3906 return false;
3910 frame.push(R0);
3911 return true;
3914 template <>
3915 bool BaselineInterpreterCodeGen::emit_GetImport() {
3916 frame.syncStack(0);
3918 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3920 prepareVMCall();
3922 pushBytecodePCArg();
3923 pushScriptArg();
3924 pushArg(R0.scratchReg());
3926 using Fn = bool (*)(JSContext*, HandleObject, HandleScript, jsbytecode*,
3927 MutableHandleValue);
3928 if (!callVM<Fn, GetImportOperation>()) {
3929 return false;
3932 // Enter the type monitor IC.
3933 if (IsTypeInferenceEnabled() && !emitNextIC()) {
3934 return false;
3937 frame.push(R0);
3938 return true;
3941 template <typename Handler>
3942 bool BaselineCodeGen<Handler>::emit_GetIntrinsic() {
3943 frame.syncStack(0);
3945 if (!emitNextIC()) {
3946 return false;
3949 frame.push(R0);
3950 return true;
3953 template <typename Handler>
3954 bool BaselineCodeGen<Handler>::emit_SetIntrinsic() {
3955 frame.syncStack(0);
3956 masm.loadValue(frame.addressOfStackValue(-1), R0);
3958 prepareVMCall();
3960 pushArg(R0);
3961 pushBytecodePCArg();
3962 pushScriptArg();
3964 using Fn = bool (*)(JSContext*, JSScript*, jsbytecode*, HandleValue);
3965 return callVM<Fn, SetIntrinsicOperation>();
3968 template <typename Handler>
3969 bool BaselineCodeGen<Handler>::emit_DefVar() {
3970 frame.syncStack(0);
3972 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3974 prepareVMCall();
3976 pushBytecodePCArg();
3977 pushScriptArg();
3978 pushArg(R0.scratchReg());
3980 using Fn = bool (*)(JSContext*, HandleObject, HandleScript, jsbytecode*);
3981 return callVM<Fn, DefVarOperation>();
3984 template <typename Handler>
3985 bool BaselineCodeGen<Handler>::emitDefLexical(JSOp op) {
3986 MOZ_ASSERT(op == JSOp::DefConst || op == JSOp::DefLet);
3988 frame.syncStack(0);
3990 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
3992 prepareVMCall();
3994 pushBytecodePCArg();
3995 pushScriptArg();
3996 pushArg(R0.scratchReg());
3998 using Fn = bool (*)(JSContext*, HandleObject, HandleScript, jsbytecode*);
3999 return callVM<Fn, DefLexicalOperation>();
4002 template <typename Handler>
4003 bool BaselineCodeGen<Handler>::emit_DefConst() {
4004 return emitDefLexical(JSOp::DefConst);
4007 template <typename Handler>
4008 bool BaselineCodeGen<Handler>::emit_DefLet() {
4009 return emitDefLexical(JSOp::DefLet);
4012 template <typename Handler>
4013 bool BaselineCodeGen<Handler>::emit_DefFun() {
4014 frame.popRegsAndSync(1);
4015 masm.unboxObject(R0, R0.scratchReg());
4016 masm.loadPtr(frame.addressOfEnvironmentChain(), R1.scratchReg());
4018 prepareVMCall();
4020 pushArg(R0.scratchReg());
4021 pushArg(R1.scratchReg());
4022 pushScriptArg();
4024 using Fn = bool (*)(JSContext*, HandleScript, HandleObject, HandleFunction);
4025 return callVM<Fn, DefFunOperation>();
4028 template <typename Handler>
4029 bool BaselineCodeGen<Handler>::emit_CheckGlobalOrEvalDecl() {
4030 frame.syncStack(0);
4032 prepareVMCall();
4034 pushScriptArg();
4035 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
4036 pushArg(R0.scratchReg());
4038 using Fn = bool (*)(JSContext*, HandleObject, HandleScript);
4039 return callVM<Fn, js::CheckGlobalOrEvalDeclarationConflicts>();
4042 template <typename Handler>
4043 bool BaselineCodeGen<Handler>::emitInitPropGetterSetter() {
4044 // Keep values on the stack for the decompiler.
4045 frame.syncStack(0);
4047 prepareVMCall();
4049 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
4050 masm.unboxObject(frame.addressOfStackValue(-2), R1.scratchReg());
4052 pushArg(R0.scratchReg());
4053 pushScriptNameArg(R0.scratchReg(), R2.scratchReg());
4054 pushArg(R1.scratchReg());
4055 pushBytecodePCArg();
4057 using Fn = bool (*)(JSContext*, jsbytecode*, HandleObject, HandlePropertyName,
4058 HandleObject);
4059 if (!callVM<Fn, InitPropGetterSetterOperation>()) {
4060 return false;
4063 frame.pop();
4064 return true;
4067 template <typename Handler>
4068 bool BaselineCodeGen<Handler>::emit_InitPropGetter() {
4069 return emitInitPropGetterSetter();
4072 template <typename Handler>
4073 bool BaselineCodeGen<Handler>::emit_InitHiddenPropGetter() {
4074 return emitInitPropGetterSetter();
4077 template <typename Handler>
4078 bool BaselineCodeGen<Handler>::emit_InitPropSetter() {
4079 return emitInitPropGetterSetter();
4082 template <typename Handler>
4083 bool BaselineCodeGen<Handler>::emit_InitHiddenPropSetter() {
4084 return emitInitPropGetterSetter();
4087 template <typename Handler>
4088 bool BaselineCodeGen<Handler>::emitInitElemGetterSetter() {
4089 // Load index and value in R0 and R1, but keep values on the stack for the
4090 // decompiler.
4091 frame.syncStack(0);
4092 masm.loadValue(frame.addressOfStackValue(-2), R0);
4093 masm.unboxObject(frame.addressOfStackValue(-1), R1.scratchReg());
4095 prepareVMCall();
4097 pushArg(R1.scratchReg());
4098 pushArg(R0);
4099 masm.unboxObject(frame.addressOfStackValue(-3), R0.scratchReg());
4100 pushArg(R0.scratchReg());
4101 pushBytecodePCArg();
4103 using Fn = bool (*)(JSContext*, jsbytecode*, HandleObject, HandleValue,
4104 HandleObject);
4105 if (!callVM<Fn, InitElemGetterSetterOperation>()) {
4106 return false;
4109 frame.popn(2);
4110 return true;
4113 template <typename Handler>
4114 bool BaselineCodeGen<Handler>::emit_InitElemGetter() {
4115 return emitInitElemGetterSetter();
4118 template <typename Handler>
4119 bool BaselineCodeGen<Handler>::emit_InitHiddenElemGetter() {
4120 return emitInitElemGetterSetter();
4123 template <typename Handler>
4124 bool BaselineCodeGen<Handler>::emit_InitElemSetter() {
4125 return emitInitElemGetterSetter();
4128 template <typename Handler>
4129 bool BaselineCodeGen<Handler>::emit_InitHiddenElemSetter() {
4130 return emitInitElemGetterSetter();
4133 template <typename Handler>
4134 bool BaselineCodeGen<Handler>::emit_InitElemInc() {
4135 // Keep the object and rhs on the stack.
4136 frame.syncStack(0);
4138 // Load object in R0, index in R1.
4139 masm.loadValue(frame.addressOfStackValue(-3), R0);
4140 masm.loadValue(frame.addressOfStackValue(-2), R1);
4142 // Call IC.
4143 if (!emitNextIC()) {
4144 return false;
4147 // Pop the rhs
4148 frame.pop();
4150 // Increment index
4151 Address indexAddr = frame.addressOfStackValue(-1);
4152 #ifdef DEBUG
4153 Label isInt32;
4154 masm.branchTestInt32(Assembler::Equal, indexAddr, &isInt32);
4155 masm.assumeUnreachable("INITELEM_INC index must be Int32");
4156 masm.bind(&isInt32);
4157 #endif
4158 masm.incrementInt32Value(indexAddr);
4159 return true;
4162 template <>
4163 bool BaselineCompilerCodeGen::emit_GetLocal() {
4164 frame.pushLocal(GET_LOCALNO(handler.pc()));
4165 return true;
4168 static BaseValueIndex ComputeAddressOfLocal(MacroAssembler& masm,
4169 Register indexScratch) {
4170 // Locals are stored in memory at a negative offset from the frame pointer. We
4171 // negate the index first to effectively subtract it.
4172 masm.negPtr(indexScratch);
4173 return BaseValueIndex(BaselineFrameReg, indexScratch,
4174 BaselineFrame::reverseOffsetOfLocal(0));
4177 template <>
4178 bool BaselineInterpreterCodeGen::emit_GetLocal() {
4179 Register scratch = R0.scratchReg();
4180 LoadUint24Operand(masm, 0, scratch);
4181 BaseValueIndex addr = ComputeAddressOfLocal(masm, scratch);
4182 masm.loadValue(addr, R0);
4183 frame.push(R0);
4184 return true;
4187 template <>
4188 bool BaselineCompilerCodeGen::emit_SetLocal() {
4189 // Ensure no other StackValue refers to the old value, for instance i + (i =
4190 // 3). This also allows us to use R0 as scratch below.
4191 frame.syncStack(1);
4193 uint32_t local = GET_LOCALNO(handler.pc());
4194 frame.storeStackValue(-1, frame.addressOfLocal(local), R0);
4195 return true;
4198 template <>
4199 bool BaselineInterpreterCodeGen::emit_SetLocal() {
4200 Register scratch = R0.scratchReg();
4201 LoadUint24Operand(masm, 0, scratch);
4202 BaseValueIndex addr = ComputeAddressOfLocal(masm, scratch);
4203 masm.loadValue(frame.addressOfStackValue(-1), R1);
4204 masm.storeValue(R1, addr);
4205 return true;
4208 template <>
4209 bool BaselineCompilerCodeGen::emitFormalArgAccess(JSOp op) {
4210 MOZ_ASSERT(op == JSOp::GetArg || op == JSOp::SetArg);
4212 uint32_t arg = GET_ARGNO(handler.pc());
4214 // Fast path: the script does not use |arguments| or formals don't
4215 // alias the arguments object.
4216 if (!handler.script()->argumentsAliasesFormals()) {
4217 if (op == JSOp::GetArg) {
4218 frame.pushArg(arg);
4219 } else {
4220 // See the comment in emit_SetLocal.
4221 frame.syncStack(1);
4222 frame.storeStackValue(-1, frame.addressOfArg(arg), R0);
4225 return true;
4228 // Sync so that we can use R0.
4229 frame.syncStack(0);
4231 // If the script is known to have an arguments object, we can just use it.
4232 // Else, we *may* have an arguments object (because we can't invalidate
4233 // when needsArgsObj becomes |true|), so we have to test HAS_ARGS_OBJ.
4234 Label done;
4235 if (!handler.script()->needsArgsObj()) {
4236 Label hasArgsObj;
4237 masm.branchTest32(Assembler::NonZero, frame.addressOfFlags(),
4238 Imm32(BaselineFrame::HAS_ARGS_OBJ), &hasArgsObj);
4239 if (op == JSOp::GetArg) {
4240 masm.loadValue(frame.addressOfArg(arg), R0);
4241 } else {
4242 frame.storeStackValue(-1, frame.addressOfArg(arg), R0);
4244 masm.jump(&done);
4245 masm.bind(&hasArgsObj);
4248 // Load the arguments object data vector.
4249 Register reg = R2.scratchReg();
4250 masm.loadPtr(frame.addressOfArgsObj(), reg);
4251 masm.loadPrivate(Address(reg, ArgumentsObject::getDataSlotOffset()), reg);
4253 // Load/store the argument.
4254 Address argAddr(reg, ArgumentsData::offsetOfArgs() + arg * sizeof(Value));
4255 if (op == JSOp::GetArg) {
4256 masm.loadValue(argAddr, R0);
4257 frame.push(R0);
4258 } else {
4259 Register temp = R1.scratchReg();
4260 masm.guardedCallPreBarrierAnyZone(argAddr, MIRType::Value, temp);
4261 masm.loadValue(frame.addressOfStackValue(-1), R0);
4262 masm.storeValue(R0, argAddr);
4264 MOZ_ASSERT(frame.numUnsyncedSlots() == 0);
4266 // Reload the arguments object.
4267 Register reg = R2.scratchReg();
4268 masm.loadPtr(frame.addressOfArgsObj(), reg);
4270 Label skipBarrier;
4272 masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &skipBarrier);
4273 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
4275 masm.call(&postBarrierSlot_);
4277 masm.bind(&skipBarrier);
4280 masm.bind(&done);
4281 return true;
4284 template <>
4285 bool BaselineInterpreterCodeGen::emitFormalArgAccess(JSOp op) {
4286 MOZ_ASSERT(op == JSOp::GetArg || op == JSOp::SetArg);
4288 // Load the index.
4289 Register argReg = R1.scratchReg();
4290 LoadUint16Operand(masm, argReg);
4292 // If the frame has no arguments object, this must be an unaliased access.
4293 Label isUnaliased, done;
4294 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
4295 Imm32(BaselineFrame::HAS_ARGS_OBJ), &isUnaliased);
4297 Register reg = R2.scratchReg();
4299 // If it's an unmapped arguments object, this is an unaliased access.
4300 loadScript(reg);
4301 masm.branchTest32(
4302 Assembler::Zero, Address(reg, JSScript::offsetOfImmutableFlags()),
4303 Imm32(uint32_t(JSScript::ImmutableFlags::HasMappedArgsObj)),
4304 &isUnaliased);
4306 // Load the arguments object data vector.
4307 masm.loadPtr(frame.addressOfArgsObj(), reg);
4308 masm.loadPrivate(Address(reg, ArgumentsObject::getDataSlotOffset()), reg);
4310 // Load/store the argument.
4311 BaseValueIndex argAddr(reg, argReg, ArgumentsData::offsetOfArgs());
4312 if (op == JSOp::GetArg) {
4313 masm.loadValue(argAddr, R0);
4314 frame.push(R0);
4315 } else {
4316 masm.guardedCallPreBarrierAnyZone(argAddr, MIRType::Value,
4317 R0.scratchReg());
4318 masm.loadValue(frame.addressOfStackValue(-1), R0);
4319 masm.storeValue(R0, argAddr);
4321 // Reload the arguments object.
4322 masm.loadPtr(frame.addressOfArgsObj(), reg);
4324 Register temp = R1.scratchReg();
4325 masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &done);
4326 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &done);
4328 masm.call(&postBarrierSlot_);
4330 masm.jump(&done);
4332 masm.bind(&isUnaliased);
4334 BaseValueIndex addr(BaselineFrameReg, argReg,
4335 BaselineFrame::offsetOfArg(0));
4336 if (op == JSOp::GetArg) {
4337 masm.loadValue(addr, R0);
4338 frame.push(R0);
4339 } else {
4340 masm.loadValue(frame.addressOfStackValue(-1), R0);
4341 masm.storeValue(R0, addr);
4345 masm.bind(&done);
4346 return true;
4349 template <typename Handler>
4350 bool BaselineCodeGen<Handler>::emit_GetArg() {
4351 return emitFormalArgAccess(JSOp::GetArg);
4354 template <typename Handler>
4355 bool BaselineCodeGen<Handler>::emit_SetArg() {
4356 return emitFormalArgAccess(JSOp::SetArg);
4359 template <>
4360 void BaselineCompilerCodeGen::loadNumFormalArguments(Register dest) {
4361 masm.move32(Imm32(handler.function()->nargs()), dest);
4364 template <>
4365 void BaselineInterpreterCodeGen::loadNumFormalArguments(Register dest) {
4366 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), dest);
4367 masm.load16ZeroExtend(Address(dest, JSFunction::offsetOfNargs()), dest);
4370 template <typename Handler>
4371 void BaselineCodeGen<Handler>::emitPushNonArrowFunctionNewTarget() {
4372 // if (isConstructing()) push(argv[Max(numActualArgs, numFormalArgs)])
4373 Label notConstructing, done;
4374 masm.branchTestPtr(Assembler::Zero, frame.addressOfCalleeToken(),
4375 Imm32(CalleeToken_FunctionConstructing), &notConstructing);
4377 Register argvLen = R0.scratchReg();
4378 Register nformals = R1.scratchReg();
4379 Address actualArgs(BaselineFrameReg,
4380 BaselineFrame::offsetOfNumActualArgs());
4381 masm.loadPtr(actualArgs, argvLen);
4383 // If argvLen < nformals, set argvlen := nformals.
4384 loadNumFormalArguments(nformals);
4385 masm.cmp32Move32(Assembler::Below, argvLen, nformals, nformals, argvLen);
4387 BaseValueIndex newTarget(BaselineFrameReg, argvLen,
4388 BaselineFrame::offsetOfArg(0));
4389 masm.loadValue(newTarget, R0);
4390 masm.jump(&done);
4392 // else push(undefined)
4393 masm.bind(&notConstructing);
4394 masm.moveValue(UndefinedValue(), R0);
4396 masm.bind(&done);
4397 frame.push(R0);
4400 template <>
4401 bool BaselineCompilerCodeGen::emit_NewTarget() {
4402 if (handler.script()->isForEval()) {
4403 frame.pushEvalNewTarget();
4404 return true;
4407 MOZ_ASSERT(handler.function());
4408 frame.syncStack(0);
4410 if (handler.function()->isArrow()) {
4411 // Arrow functions store their |new.target| value in an
4412 // extended slot.
4413 Register scratch = R0.scratchReg();
4414 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), scratch);
4415 masm.loadValue(
4416 Address(scratch, FunctionExtended::offsetOfArrowNewTargetSlot()), R0);
4417 frame.push(R0);
4418 return true;
4421 emitPushNonArrowFunctionNewTarget();
4422 return true;
4425 template <>
4426 bool BaselineInterpreterCodeGen::emit_NewTarget() {
4427 Register scratch1 = R0.scratchReg();
4428 Register scratch2 = R1.scratchReg();
4430 Label isFunction, done;
4431 masm.loadPtr(frame.addressOfCalleeToken(), scratch1);
4432 masm.branchTestPtr(Assembler::Zero, scratch1, Imm32(CalleeTokenScriptBit),
4433 &isFunction);
4435 // Case 1: eval.
4436 frame.pushEvalNewTarget();
4437 masm.jump(&done);
4440 masm.bind(&isFunction);
4442 Label notArrow;
4443 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
4444 masm.branchFunctionKind(Assembler::NotEqual,
4445 FunctionFlags::FunctionKind::Arrow, scratch1,
4446 scratch2, &notArrow);
4448 // Case 2: arrow function.
4449 masm.pushValue(
4450 Address(scratch1, FunctionExtended::offsetOfArrowNewTargetSlot()));
4451 masm.jump(&done);
4454 masm.bind(&notArrow);
4456 // Case 3: non-arrow function.
4457 emitPushNonArrowFunctionNewTarget();
4459 masm.bind(&done);
4460 return true;
4463 template <typename Handler>
4464 bool BaselineCodeGen<Handler>::emit_ThrowSetConst() {
4465 prepareVMCall();
4466 pushArg(Imm32(JSMSG_BAD_CONST_ASSIGN));
4468 using Fn = bool (*)(JSContext*, unsigned);
4469 return callVM<Fn, jit::ThrowRuntimeLexicalError>();
4472 template <typename Handler>
4473 bool BaselineCodeGen<Handler>::emitUninitializedLexicalCheck(
4474 const ValueOperand& val) {
4475 Label done;
4476 masm.branchTestMagicValue(Assembler::NotEqual, val, JS_UNINITIALIZED_LEXICAL,
4477 &done);
4479 prepareVMCall();
4480 pushArg(Imm32(JSMSG_UNINITIALIZED_LEXICAL));
4482 using Fn = bool (*)(JSContext*, unsigned);
4483 if (!callVM<Fn, jit::ThrowRuntimeLexicalError>()) {
4484 return false;
4487 masm.bind(&done);
4488 return true;
4491 template <typename Handler>
4492 bool BaselineCodeGen<Handler>::emit_CheckLexical() {
4493 frame.syncStack(0);
4494 masm.loadValue(frame.addressOfStackValue(-1), R0);
4495 return emitUninitializedLexicalCheck(R0);
4498 template <typename Handler>
4499 bool BaselineCodeGen<Handler>::emit_CheckAliasedLexical() {
4500 return emit_CheckLexical();
4503 template <typename Handler>
4504 bool BaselineCodeGen<Handler>::emit_InitLexical() {
4505 return emit_SetLocal();
4508 template <typename Handler>
4509 bool BaselineCodeGen<Handler>::emit_InitGLexical() {
4510 frame.popRegsAndSync(1);
4511 pushGlobalLexicalEnvironmentValue(R1);
4512 frame.push(R0);
4513 return emit_SetProp();
4516 template <typename Handler>
4517 bool BaselineCodeGen<Handler>::emit_InitAliasedLexical() {
4518 return emit_SetAliasedVar();
4521 template <typename Handler>
4522 bool BaselineCodeGen<Handler>::emit_Uninitialized() {
4523 frame.push(MagicValue(JS_UNINITIALIZED_LEXICAL));
4524 return true;
4527 template <>
4528 bool BaselineCompilerCodeGen::emitCall(JSOp op) {
4529 MOZ_ASSERT(IsInvokeOp(op));
4531 frame.syncStack(0);
4533 uint32_t argc = GET_ARGC(handler.pc());
4534 masm.move32(Imm32(argc), R0.scratchReg());
4536 // Call IC
4537 if (!emitNextIC()) {
4538 return false;
4541 // Update FrameInfo.
4542 bool construct = IsConstructOp(op);
4543 frame.popn(2 + argc + construct);
4544 frame.push(R0);
4545 return true;
4548 template <>
4549 bool BaselineInterpreterCodeGen::emitCall(JSOp op) {
4550 MOZ_ASSERT(IsInvokeOp(op));
4552 // The IC expects argc in R0.
4553 LoadUint16Operand(masm, R0.scratchReg());
4554 if (!emitNextIC()) {
4555 return false;
4558 // Pop the arguments. We have to reload pc/argc because the IC clobbers them.
4559 // The return value is in R0 so we can't use that.
4560 Register scratch = R1.scratchReg();
4561 uint32_t extraValuesToPop = IsConstructOp(op) ? 3 : 2;
4562 Register spReg = AsRegister(masm.getStackPointer());
4563 LoadUint16Operand(masm, scratch);
4564 masm.computeEffectiveAddress(
4565 BaseValueIndex(spReg, scratch, extraValuesToPop * sizeof(Value)), spReg);
4566 frame.push(R0);
4567 return true;
4570 template <typename Handler>
4571 bool BaselineCodeGen<Handler>::emitSpreadCall(JSOp op) {
4572 MOZ_ASSERT(IsInvokeOp(op));
4574 frame.syncStack(0);
4575 masm.move32(Imm32(1), R0.scratchReg());
4577 // Call IC
4578 if (!emitNextIC()) {
4579 return false;
4582 // Update FrameInfo.
4583 bool construct = op == JSOp::SpreadNew || op == JSOp::SpreadSuperCall;
4584 frame.popn(3 + construct);
4585 frame.push(R0);
4586 return true;
4589 template <typename Handler>
4590 bool BaselineCodeGen<Handler>::emit_Call() {
4591 return emitCall(JSOp::Call);
4594 template <typename Handler>
4595 bool BaselineCodeGen<Handler>::emit_CallIgnoresRv() {
4596 return emitCall(JSOp::CallIgnoresRv);
4599 template <typename Handler>
4600 bool BaselineCodeGen<Handler>::emit_CallIter() {
4601 return emitCall(JSOp::CallIter);
4604 template <typename Handler>
4605 bool BaselineCodeGen<Handler>::emit_New() {
4606 return emitCall(JSOp::New);
4609 template <typename Handler>
4610 bool BaselineCodeGen<Handler>::emit_SuperCall() {
4611 return emitCall(JSOp::SuperCall);
4614 template <typename Handler>
4615 bool BaselineCodeGen<Handler>::emit_FunCall() {
4616 return emitCall(JSOp::FunCall);
4619 template <typename Handler>
4620 bool BaselineCodeGen<Handler>::emit_FunApply() {
4621 return emitCall(JSOp::FunApply);
4624 template <typename Handler>
4625 bool BaselineCodeGen<Handler>::emit_Eval() {
4626 return emitCall(JSOp::Eval);
4629 template <typename Handler>
4630 bool BaselineCodeGen<Handler>::emit_StrictEval() {
4631 return emitCall(JSOp::StrictEval);
4634 template <typename Handler>
4635 bool BaselineCodeGen<Handler>::emit_SpreadCall() {
4636 return emitSpreadCall(JSOp::SpreadCall);
4639 template <typename Handler>
4640 bool BaselineCodeGen<Handler>::emit_SpreadNew() {
4641 return emitSpreadCall(JSOp::SpreadNew);
4644 template <typename Handler>
4645 bool BaselineCodeGen<Handler>::emit_SpreadSuperCall() {
4646 return emitSpreadCall(JSOp::SpreadSuperCall);
4649 template <typename Handler>
4650 bool BaselineCodeGen<Handler>::emit_SpreadEval() {
4651 return emitSpreadCall(JSOp::SpreadEval);
4654 template <typename Handler>
4655 bool BaselineCodeGen<Handler>::emit_StrictSpreadEval() {
4656 return emitSpreadCall(JSOp::StrictSpreadEval);
4659 template <typename Handler>
4660 bool BaselineCodeGen<Handler>::emit_OptimizeSpreadCall() {
4661 frame.syncStack(0);
4662 masm.loadValue(frame.addressOfStackValue(-1), R0);
4664 prepareVMCall();
4665 pushArg(R0);
4667 using Fn = bool (*)(JSContext*, HandleValue, bool*);
4668 if (!callVM<Fn, OptimizeSpreadCall>()) {
4669 return false;
4672 masm.boxNonDouble(JSVAL_TYPE_BOOLEAN, ReturnReg, R0);
4673 frame.push(R0);
4674 return true;
4677 template <typename Handler>
4678 bool BaselineCodeGen<Handler>::emit_ImplicitThis() {
4679 frame.syncStack(0);
4680 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
4682 prepareVMCall();
4684 pushScriptNameArg(R1.scratchReg(), R2.scratchReg());
4685 pushArg(R0.scratchReg());
4687 using Fn = bool (*)(JSContext*, HandleObject, HandlePropertyName,
4688 MutableHandleValue);
4689 if (!callVM<Fn, ImplicitThisOperation>()) {
4690 return false;
4693 frame.push(R0);
4694 return true;
4697 template <typename Handler>
4698 bool BaselineCodeGen<Handler>::emit_GImplicitThis() {
4699 auto pushUndefined = [this]() {
4700 frame.push(UndefinedValue());
4701 return true;
4703 auto emitImplicitThis = [this]() { return emit_ImplicitThis(); };
4704 return emitTestScriptFlag(JSScript::ImmutableFlags::HasNonSyntacticScope,
4705 emitImplicitThis, pushUndefined, R2.scratchReg());
4708 template <typename Handler>
4709 bool BaselineCodeGen<Handler>::emit_Instanceof() {
4710 frame.popRegsAndSync(2);
4712 if (!emitNextIC()) {
4713 return false;
4716 frame.push(R0);
4717 return true;
4720 template <typename Handler>
4721 bool BaselineCodeGen<Handler>::emit_Typeof() {
4722 frame.popRegsAndSync(1);
4724 if (!emitNextIC()) {
4725 return false;
4728 frame.push(R0);
4729 return true;
4732 template <typename Handler>
4733 bool BaselineCodeGen<Handler>::emit_TypeofExpr() {
4734 return emit_Typeof();
4737 template <typename Handler>
4738 bool BaselineCodeGen<Handler>::emit_ThrowMsg() {
4739 prepareVMCall();
4740 pushUint8BytecodeOperandArg(R2.scratchReg());
4742 using Fn = bool (*)(JSContext*, const unsigned);
4743 return callVM<Fn, js::ThrowMsgOperation>();
4746 template <typename Handler>
4747 bool BaselineCodeGen<Handler>::emit_Throw() {
4748 // Keep value to throw in R0.
4749 frame.popRegsAndSync(1);
4751 prepareVMCall();
4752 pushArg(R0);
4754 using Fn = bool (*)(JSContext*, HandleValue);
4755 return callVM<Fn, js::ThrowOperation>();
4758 template <typename Handler>
4759 bool BaselineCodeGen<Handler>::emit_Try() {
4760 return true;
4763 template <typename Handler>
4764 bool BaselineCodeGen<Handler>::emit_Finally() {
4765 // JSOp::Finally has a def count of 2, but these values are already on the
4766 // stack (they're pushed by JSOp::Gosub). Update the compiler's stack state.
4767 frame.incStackDepth(2);
4769 // To match the interpreter, emit an interrupt check at the start of the
4770 // finally block.
4771 return emitInterruptCheck();
4774 template <typename Handler>
4775 bool BaselineCodeGen<Handler>::emit_Gosub() {
4776 // Jump to the finally block.
4777 frame.syncStack(0);
4778 emitJump();
4779 return true;
4782 static void LoadBaselineScriptResumeEntries(MacroAssembler& masm,
4783 JSScript* script, Register dest,
4784 Register scratch) {
4785 MOZ_ASSERT(dest != scratch);
4787 masm.movePtr(ImmPtr(script->jitScript()), dest);
4788 masm.loadPtr(Address(dest, JitScript::offsetOfBaselineScript()), dest);
4789 masm.load32(Address(dest, BaselineScript::offsetOfResumeEntriesOffset()),
4790 scratch);
4791 masm.addPtr(scratch, dest);
4794 template <typename Handler>
4795 void BaselineCodeGen<Handler>::emitInterpJumpToResumeEntry(Register script,
4796 Register resumeIndex,
4797 Register scratch) {
4798 // Load JSScript::immutableScriptData() into |script|.
4799 masm.loadPtr(Address(script, JSScript::offsetOfSharedData()), script);
4800 masm.loadPtr(Address(script, RuntimeScriptData::offsetOfISD()), script);
4802 // Load the resume pcOffset in |resumeIndex|.
4803 masm.load32(
4804 Address(script, ImmutableScriptData::offsetOfResumeOffsetsOffset()),
4805 scratch);
4806 masm.computeEffectiveAddress(BaseIndex(scratch, resumeIndex, TimesFour),
4807 scratch);
4808 masm.load32(BaseIndex(script, scratch, TimesOne), resumeIndex);
4810 // Add resume offset to PC, jump to it.
4811 masm.computeEffectiveAddress(BaseIndex(script, resumeIndex, TimesOne,
4812 ImmutableScriptData::offsetOfCode()),
4813 script);
4814 Address pcAddr(BaselineFrameReg,
4815 BaselineFrame::reverseOffsetOfInterpreterPC());
4816 masm.storePtr(script, pcAddr);
4817 emitJumpToInterpretOpLabel();
4820 template <>
4821 void BaselineCompilerCodeGen::jumpToResumeEntry(Register resumeIndex,
4822 Register scratch1,
4823 Register scratch2) {
4824 LoadBaselineScriptResumeEntries(masm, handler.script(), scratch1, scratch2);
4825 masm.loadPtr(
4826 BaseIndex(scratch1, resumeIndex, ScaleFromElemWidth(sizeof(uintptr_t))),
4827 scratch1);
4828 masm.jump(scratch1);
4831 template <>
4832 void BaselineInterpreterCodeGen::jumpToResumeEntry(Register resumeIndex,
4833 Register scratch1,
4834 Register scratch2) {
4835 loadScript(scratch1);
4836 emitInterpJumpToResumeEntry(scratch1, resumeIndex, scratch2);
4839 template <typename Handler>
4840 bool BaselineCodeGen<Handler>::emit_Retsub() {
4841 frame.popRegsAndSync(2);
4843 Label isReturn;
4844 masm.branchTestBooleanTruthy(/* branchIfTrue = */ false, R0, &isReturn);
4846 // R0 is |true|. We need to throw R1.
4847 prepareVMCall();
4848 pushArg(R1);
4850 using Fn = bool (*)(JSContext*, HandleValue);
4851 if (!callVM<Fn, js::ThrowOperation>()) {
4852 return false;
4855 masm.bind(&isReturn);
4857 // R0 is |false|. R1 contains the resumeIndex to jump to.
4858 Register resumeIndexReg = R1.scratchReg();
4859 masm.unboxInt32(R1, resumeIndexReg);
4861 Register scratch1 = R2.scratchReg();
4862 Register scratch2 = R0.scratchReg();
4863 jumpToResumeEntry(resumeIndexReg, scratch1, scratch2);
4864 return true;
4867 template <>
4868 template <typename F1, typename F2>
4869 MOZ_MUST_USE bool BaselineCompilerCodeGen::emitDebugInstrumentation(
4870 const F1& ifDebuggee, const Maybe<F2>& ifNotDebuggee) {
4871 // The JIT calls either ifDebuggee or (if present) ifNotDebuggee, because it
4872 // knows statically whether we're compiling with debug instrumentation.
4874 if (handler.compileDebugInstrumentation()) {
4875 return ifDebuggee();
4878 if (ifNotDebuggee) {
4879 return (*ifNotDebuggee)();
4882 return true;
4885 template <>
4886 template <typename F1, typename F2>
4887 MOZ_MUST_USE bool BaselineInterpreterCodeGen::emitDebugInstrumentation(
4888 const F1& ifDebuggee, const Maybe<F2>& ifNotDebuggee) {
4889 // The interpreter emits both ifDebuggee and (if present) ifNotDebuggee
4890 // paths, with a toggled jump followed by a branch on the frame's DEBUGGEE
4891 // flag.
4893 Label isNotDebuggee, done;
4895 CodeOffset toggleOffset = masm.toggledJump(&isNotDebuggee);
4896 if (!handler.addDebugInstrumentationOffset(cx, toggleOffset)) {
4897 return false;
4900 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
4901 Imm32(BaselineFrame::DEBUGGEE), &isNotDebuggee);
4903 if (!ifDebuggee()) {
4904 return false;
4907 if (ifNotDebuggee) {
4908 masm.jump(&done);
4911 masm.bind(&isNotDebuggee);
4913 if (ifNotDebuggee && !(*ifNotDebuggee)()) {
4914 return false;
4917 masm.bind(&done);
4918 return true;
4921 template <typename Handler>
4922 bool BaselineCodeGen<Handler>::emit_PushLexicalEnv() {
4923 // Call a stub to push the block on the block chain.
4924 prepareVMCall();
4925 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
4927 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
4928 R2.scratchReg());
4929 pushArg(R0.scratchReg());
4931 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<LexicalScope*>);
4932 return callVM<Fn, jit::PushLexicalEnv>();
4935 template <typename Handler>
4936 bool BaselineCodeGen<Handler>::emit_PopLexicalEnv() {
4937 frame.syncStack(0);
4939 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
4941 auto ifDebuggee = [this]() {
4942 prepareVMCall();
4943 pushBytecodePCArg();
4944 pushArg(R0.scratchReg());
4946 using Fn = bool (*)(JSContext*, BaselineFrame*, jsbytecode*);
4947 return callVM<Fn, jit::DebugLeaveThenPopLexicalEnv>();
4949 auto ifNotDebuggee = [this]() {
4950 prepareVMCall();
4951 pushArg(R0.scratchReg());
4953 using Fn = bool (*)(JSContext*, BaselineFrame*);
4954 return callVM<Fn, jit::PopLexicalEnv>();
4956 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee));
4959 template <typename Handler>
4960 bool BaselineCodeGen<Handler>::emit_FreshenLexicalEnv() {
4961 frame.syncStack(0);
4963 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
4965 auto ifDebuggee = [this]() {
4966 prepareVMCall();
4967 pushBytecodePCArg();
4968 pushArg(R0.scratchReg());
4970 using Fn = bool (*)(JSContext*, BaselineFrame*, jsbytecode*);
4971 return callVM<Fn, jit::DebugLeaveThenFreshenLexicalEnv>();
4973 auto ifNotDebuggee = [this]() {
4974 prepareVMCall();
4975 pushArg(R0.scratchReg());
4977 using Fn = bool (*)(JSContext*, BaselineFrame*);
4978 return callVM<Fn, jit::FreshenLexicalEnv>();
4980 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee));
4983 template <typename Handler>
4984 bool BaselineCodeGen<Handler>::emit_RecreateLexicalEnv() {
4985 frame.syncStack(0);
4987 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
4989 auto ifDebuggee = [this]() {
4990 prepareVMCall();
4991 pushBytecodePCArg();
4992 pushArg(R0.scratchReg());
4994 using Fn = bool (*)(JSContext*, BaselineFrame*, jsbytecode*);
4995 return callVM<Fn, jit::DebugLeaveThenRecreateLexicalEnv>();
4997 auto ifNotDebuggee = [this]() {
4998 prepareVMCall();
4999 pushArg(R0.scratchReg());
5001 using Fn = bool (*)(JSContext*, BaselineFrame*);
5002 return callVM<Fn, jit::RecreateLexicalEnv>();
5004 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee));
5007 template <typename Handler>
5008 bool BaselineCodeGen<Handler>::emit_DebugLeaveLexicalEnv() {
5009 auto ifDebuggee = [this]() {
5010 prepareVMCall();
5011 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5012 pushBytecodePCArg();
5013 pushArg(R0.scratchReg());
5015 using Fn = bool (*)(JSContext*, BaselineFrame*, jsbytecode*);
5016 return callVM<Fn, jit::DebugLeaveLexicalEnv>();
5018 return emitDebugInstrumentation(ifDebuggee);
5021 template <typename Handler>
5022 bool BaselineCodeGen<Handler>::emit_PushVarEnv() {
5023 prepareVMCall();
5024 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5025 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
5026 R2.scratchReg());
5027 pushArg(R0.scratchReg());
5029 using Fn = bool (*)(JSContext*, BaselineFrame*, HandleScope);
5030 return callVM<Fn, jit::PushVarEnv>();
5033 template <typename Handler>
5034 bool BaselineCodeGen<Handler>::emit_EnterWith() {
5035 // Pop "with" object to R0.
5036 frame.popRegsAndSync(1);
5038 // Call a stub to push the object onto the environment chain.
5039 prepareVMCall();
5041 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(),
5042 R2.scratchReg());
5043 pushArg(R0);
5044 masm.loadBaselineFramePtr(BaselineFrameReg, R1.scratchReg());
5045 pushArg(R1.scratchReg());
5047 using Fn =
5048 bool (*)(JSContext*, BaselineFrame*, HandleValue, Handle<WithScope*>);
5049 return callVM<Fn, jit::EnterWith>();
5052 template <typename Handler>
5053 bool BaselineCodeGen<Handler>::emit_LeaveWith() {
5054 // Call a stub to pop the with object from the environment chain.
5055 prepareVMCall();
5057 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5058 pushArg(R0.scratchReg());
5060 using Fn = bool (*)(JSContext*, BaselineFrame*);
5061 return callVM<Fn, jit::LeaveWith>();
5064 template <typename Handler>
5065 bool BaselineCodeGen<Handler>::emit_Exception() {
5066 prepareVMCall();
5068 using Fn = bool (*)(JSContext*, MutableHandleValue);
5069 if (!callVM<Fn, GetAndClearException>()) {
5070 return false;
5073 frame.push(R0);
5074 return true;
5077 template <typename Handler>
5078 bool BaselineCodeGen<Handler>::emit_Debugger() {
5079 prepareVMCall();
5081 frame.assertSyncedStack();
5082 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5083 pushArg(R0.scratchReg());
5085 using Fn = bool (*)(JSContext*, BaselineFrame*);
5086 if (!callVM<Fn, jit::OnDebuggerStatement>()) {
5087 return false;
5090 return true;
5093 template <typename Handler>
5094 bool BaselineCodeGen<Handler>::emitDebugEpilogue() {
5095 auto ifDebuggee = [this]() {
5096 // Move return value into the frame's rval slot.
5097 masm.storeValue(JSReturnOperand, frame.addressOfReturnValue());
5098 masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags());
5100 // Load BaselineFrame pointer in R0.
5101 frame.syncStack(0);
5102 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5104 prepareVMCall();
5105 pushBytecodePCArg();
5106 pushArg(R0.scratchReg());
5108 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugEpilogue;
5110 using Fn = bool (*)(JSContext*, BaselineFrame*, jsbytecode*);
5111 if (!callVM<Fn, jit::DebugEpilogueOnBaselineReturn>(kind)) {
5112 return false;
5115 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
5116 return true;
5118 return emitDebugInstrumentation(ifDebuggee);
5121 template <typename Handler>
5122 bool BaselineCodeGen<Handler>::emitReturn() {
5123 if (handler.shouldEmitDebugEpilogueAtReturnOp()) {
5124 if (!emitDebugEpilogue()) {
5125 return false;
5129 // Only emit the jump if this JSOp::RetRval is not the last instruction.
5130 // Not needed for last instruction, because last instruction flows
5131 // into return label.
5132 if (!handler.isDefinitelyLastOp()) {
5133 masm.jump(&return_);
5136 return true;
5139 template <typename Handler>
5140 bool BaselineCodeGen<Handler>::emit_Return() {
5141 frame.assertStackDepth(1);
5143 frame.popValue(JSReturnOperand);
5144 return emitReturn();
5147 template <typename Handler>
5148 void BaselineCodeGen<Handler>::emitLoadReturnValue(ValueOperand val) {
5149 Label done, noRval;
5150 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
5151 Imm32(BaselineFrame::HAS_RVAL), &noRval);
5152 masm.loadValue(frame.addressOfReturnValue(), val);
5153 masm.jump(&done);
5155 masm.bind(&noRval);
5156 masm.moveValue(UndefinedValue(), val);
5158 masm.bind(&done);
5161 template <typename Handler>
5162 bool BaselineCodeGen<Handler>::emit_RetRval() {
5163 frame.assertStackDepth(0);
5165 masm.moveValue(UndefinedValue(), JSReturnOperand);
5167 if (!handler.maybeScript() || !handler.maybeScript()->noScriptRval()) {
5168 // Return the value in the return value slot, if any.
5169 Label done;
5170 Address flags = frame.addressOfFlags();
5171 masm.branchTest32(Assembler::Zero, flags, Imm32(BaselineFrame::HAS_RVAL),
5172 &done);
5173 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
5174 masm.bind(&done);
5177 return emitReturn();
5180 template <typename Handler>
5181 bool BaselineCodeGen<Handler>::emit_ToPropertyKey() {
5182 frame.popRegsAndSync(1);
5184 if (!emitNextIC()) {
5185 return false;
5188 frame.push(R0);
5189 return true;
5192 template <typename Handler>
5193 bool BaselineCodeGen<Handler>::emit_ToAsyncIter() {
5194 frame.syncStack(0);
5195 masm.unboxObject(frame.addressOfStackValue(-2), R0.scratchReg());
5196 masm.loadValue(frame.addressOfStackValue(-1), R1);
5198 prepareVMCall();
5199 pushArg(R1);
5200 pushArg(R0.scratchReg());
5202 using Fn = JSObject* (*)(JSContext*, HandleObject, HandleValue);
5203 if (!callVM<Fn, js::CreateAsyncFromSyncIterator>()) {
5204 return false;
5207 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5208 frame.popn(2);
5209 frame.push(R0);
5210 return true;
5213 template <typename Handler>
5214 bool BaselineCodeGen<Handler>::emit_TrySkipAwait() {
5215 frame.syncStack(0);
5216 masm.loadValue(frame.addressOfStackValue(-1), R0);
5218 prepareVMCall();
5219 pushArg(R0);
5221 using Fn = bool (*)(JSContext*, HandleValue, MutableHandleValue);
5222 if (!callVM<Fn, jit::TrySkipAwait>()) {
5223 return false;
5226 Label cannotSkip, done;
5227 masm.branchTestMagicValue(Assembler::Equal, R0, JS_CANNOT_SKIP_AWAIT,
5228 &cannotSkip);
5229 masm.moveValue(BooleanValue(true), R1);
5230 masm.jump(&done);
5232 masm.bind(&cannotSkip);
5233 masm.loadValue(frame.addressOfStackValue(-1), R0);
5234 masm.moveValue(BooleanValue(false), R1);
5236 masm.bind(&done);
5238 frame.pop();
5239 frame.push(R0);
5240 frame.push(R1);
5241 return true;
5244 template <typename Handler>
5245 bool BaselineCodeGen<Handler>::emit_AsyncAwait() {
5246 frame.syncStack(0);
5247 masm.loadValue(frame.addressOfStackValue(-2), R1);
5248 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
5250 prepareVMCall();
5251 pushArg(R1);
5252 pushArg(R0.scratchReg());
5254 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>,
5255 HandleValue);
5256 if (!callVM<Fn, js::AsyncFunctionAwait>()) {
5257 return false;
5260 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5261 frame.popn(2);
5262 frame.push(R0);
5263 return true;
5266 template <typename Handler>
5267 bool BaselineCodeGen<Handler>::emit_AsyncResolve() {
5268 frame.syncStack(0);
5269 masm.loadValue(frame.addressOfStackValue(-2), R1);
5270 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg());
5272 prepareVMCall();
5273 pushUint8BytecodeOperandArg(R2.scratchReg());
5274 pushArg(R1);
5275 pushArg(R0.scratchReg());
5277 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>,
5278 HandleValue, AsyncFunctionResolveKind);
5279 if (!callVM<Fn, js::AsyncFunctionResolve>()) {
5280 return false;
5283 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5284 frame.popn(2);
5285 frame.push(R0);
5286 return true;
5289 template <typename Handler>
5290 bool BaselineCodeGen<Handler>::emit_CheckObjCoercible() {
5291 frame.syncStack(0);
5292 masm.loadValue(frame.addressOfStackValue(-1), R0);
5294 Label fail, done;
5296 masm.branchTestUndefined(Assembler::Equal, R0, &fail);
5297 masm.branchTestNull(Assembler::NotEqual, R0, &done);
5299 masm.bind(&fail);
5300 prepareVMCall();
5302 pushArg(R0);
5304 using Fn = bool (*)(JSContext*, HandleValue);
5305 if (!callVM<Fn, ThrowObjectCoercible>()) {
5306 return false;
5309 masm.bind(&done);
5310 return true;
5313 template <typename Handler>
5314 bool BaselineCodeGen<Handler>::emit_ToString() {
5315 // Keep top stack value in R0.
5316 frame.popRegsAndSync(1);
5318 // Inline path for string.
5319 Label done;
5320 masm.branchTestString(Assembler::Equal, R0, &done);
5322 prepareVMCall();
5324 pushArg(R0);
5326 // Call ToStringSlow which doesn't handle string inputs.
5327 using Fn = JSString* (*)(JSContext*, HandleValue);
5328 if (!callVM<Fn, ToStringSlow<CanGC>>()) {
5329 return false;
5332 masm.tagValue(JSVAL_TYPE_STRING, ReturnReg, R0);
5334 masm.bind(&done);
5335 frame.push(R0);
5336 return true;
5339 static constexpr uint32_t TableSwitchOpLowOffset = 1 * JUMP_OFFSET_LEN;
5340 static constexpr uint32_t TableSwitchOpHighOffset = 2 * JUMP_OFFSET_LEN;
5341 static constexpr uint32_t TableSwitchOpFirstResumeIndexOffset =
5342 3 * JUMP_OFFSET_LEN;
5344 template <>
5345 void BaselineCompilerCodeGen::emitGetTableSwitchIndex(ValueOperand val,
5346 Register dest,
5347 Register scratch1,
5348 Register scratch2) {
5349 jsbytecode* pc = handler.pc();
5350 jsbytecode* defaultpc = pc + GET_JUMP_OFFSET(pc);
5351 Label* defaultLabel = handler.labelOf(defaultpc);
5353 int32_t low = GET_JUMP_OFFSET(pc + TableSwitchOpLowOffset);
5354 int32_t high = GET_JUMP_OFFSET(pc + TableSwitchOpHighOffset);
5355 int32_t length = high - low + 1;
5357 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5358 // all cases are int32).
5359 masm.branchTestInt32(Assembler::NotEqual, val, defaultLabel);
5360 masm.unboxInt32(val, dest);
5362 // Subtract 'low'. Bounds check.
5363 if (low != 0) {
5364 masm.sub32(Imm32(low), dest);
5366 masm.branch32(Assembler::AboveOrEqual, dest, Imm32(length), defaultLabel);
5369 template <>
5370 void BaselineInterpreterCodeGen::emitGetTableSwitchIndex(ValueOperand val,
5371 Register dest,
5372 Register scratch1,
5373 Register scratch2) {
5374 // Jump to the 'default' pc if not int32 (tableswitch is only used when
5375 // all cases are int32).
5376 Label done, jumpToDefault;
5377 masm.branchTestInt32(Assembler::NotEqual, val, &jumpToDefault);
5378 masm.unboxInt32(val, dest);
5380 Register pcReg = LoadBytecodePC(masm, scratch1);
5381 Address lowAddr(pcReg, sizeof(jsbytecode) + TableSwitchOpLowOffset);
5382 Address highAddr(pcReg, sizeof(jsbytecode) + TableSwitchOpHighOffset);
5384 // Jump to default if val > high.
5385 masm.branch32(Assembler::LessThan, highAddr, dest, &jumpToDefault);
5387 // Jump to default if val < low.
5388 masm.load32(lowAddr, scratch2);
5389 masm.branch32(Assembler::GreaterThan, scratch2, dest, &jumpToDefault);
5391 // index := val - low.
5392 masm.sub32(scratch2, dest);
5393 masm.jump(&done);
5395 masm.bind(&jumpToDefault);
5396 emitJump();
5398 masm.bind(&done);
5401 template <>
5402 void BaselineCompilerCodeGen::emitTableSwitchJump(Register key,
5403 Register scratch1,
5404 Register scratch2) {
5405 // Jump to resumeEntries[firstResumeIndex + key].
5407 // Note: BytecodeEmitter::allocateResumeIndex static_asserts
5408 // |firstResumeIndex * sizeof(uintptr_t)| fits in int32_t.
5409 uint32_t firstResumeIndex =
5410 GET_RESUMEINDEX(handler.pc() + TableSwitchOpFirstResumeIndexOffset);
5411 LoadBaselineScriptResumeEntries(masm, handler.script(), scratch1, scratch2);
5412 masm.loadPtr(BaseIndex(scratch1, key, ScaleFromElemWidth(sizeof(uintptr_t)),
5413 firstResumeIndex * sizeof(uintptr_t)),
5414 scratch1);
5415 masm.jump(scratch1);
5418 template <>
5419 void BaselineInterpreterCodeGen::emitTableSwitchJump(Register key,
5420 Register scratch1,
5421 Register scratch2) {
5422 // Load the op's firstResumeIndex in scratch1.
5423 LoadUint24Operand(masm, TableSwitchOpFirstResumeIndexOffset, scratch1);
5425 masm.add32(key, scratch1);
5426 jumpToResumeEntry(scratch1, key, scratch2);
5429 template <typename Handler>
5430 bool BaselineCodeGen<Handler>::emit_TableSwitch() {
5431 frame.popRegsAndSync(1);
5433 Register key = R0.scratchReg();
5434 Register scratch1 = R1.scratchReg();
5435 Register scratch2 = R2.scratchReg();
5437 // Call a stub to convert R0 from double to int32 if needed.
5438 // Note: this stub may clobber scratch1.
5439 masm.call(cx->runtime()->jitRuntime()->getDoubleToInt32ValueStub());
5441 // Load the index in the jump table in |key|, or branch to default pc if not
5442 // int32 or out-of-range.
5443 emitGetTableSwitchIndex(R0, key, scratch1, scratch2);
5445 // Jump to the target pc.
5446 emitTableSwitchJump(key, scratch1, scratch2);
5447 return true;
5450 template <typename Handler>
5451 bool BaselineCodeGen<Handler>::emit_Iter() {
5452 frame.popRegsAndSync(1);
5454 if (!emitNextIC()) {
5455 return false;
5458 frame.push(R0);
5459 return true;
5462 template <typename Handler>
5463 bool BaselineCodeGen<Handler>::emit_MoreIter() {
5464 frame.syncStack(0);
5466 masm.unboxObject(frame.addressOfStackValue(-1), R1.scratchReg());
5468 masm.iteratorMore(R1.scratchReg(), R0, R2.scratchReg());
5469 frame.push(R0);
5470 return true;
5473 template <typename Handler>
5474 bool BaselineCodeGen<Handler>::emitIsMagicValue() {
5475 frame.syncStack(0);
5477 Label isMagic, done;
5478 masm.branchTestMagic(Assembler::Equal, frame.addressOfStackValue(-1),
5479 &isMagic);
5480 masm.moveValue(BooleanValue(false), R0);
5481 masm.jump(&done);
5483 masm.bind(&isMagic);
5484 masm.moveValue(BooleanValue(true), R0);
5486 masm.bind(&done);
5487 frame.push(R0, JSVAL_TYPE_BOOLEAN);
5488 return true;
5491 template <typename Handler>
5492 bool BaselineCodeGen<Handler>::emit_IsNoIter() {
5493 return emitIsMagicValue();
5496 template <typename Handler>
5497 bool BaselineCodeGen<Handler>::emit_EndIter() {
5498 // Pop iterator value.
5499 frame.pop();
5501 // Pop the iterator object to close in R0.
5502 frame.popRegsAndSync(1);
5504 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
5505 regs.take(BaselineFrameReg);
5506 if (HasInterpreterPCReg()) {
5507 regs.take(InterpreterPCReg);
5510 Register obj = R0.scratchReg();
5511 regs.take(obj);
5512 masm.unboxObject(R0, obj);
5514 Register temp1 = regs.takeAny();
5515 Register temp2 = regs.takeAny();
5516 Register temp3 = regs.takeAny();
5517 masm.iteratorClose(obj, temp1, temp2, temp3);
5518 return true;
5521 template <typename Handler>
5522 bool BaselineCodeGen<Handler>::emit_IsGenClosing() {
5523 return emitIsMagicValue();
5526 template <typename Handler>
5527 bool BaselineCodeGen<Handler>::emit_GetRval() {
5528 frame.syncStack(0);
5530 emitLoadReturnValue(R0);
5532 frame.push(R0);
5533 return true;
5536 template <typename Handler>
5537 bool BaselineCodeGen<Handler>::emit_SetRval() {
5538 // Store to the frame's return value slot.
5539 frame.storeStackValue(-1, frame.addressOfReturnValue(), R2);
5540 masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags());
5541 frame.pop();
5542 return true;
5545 template <typename Handler>
5546 bool BaselineCodeGen<Handler>::emit_Callee() {
5547 MOZ_ASSERT_IF(handler.maybeScript(), handler.maybeScript()->function());
5548 frame.syncStack(0);
5549 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(),
5550 R0.scratchReg());
5551 masm.tagValue(JSVAL_TYPE_OBJECT, R0.scratchReg(), R0);
5552 frame.push(R0);
5553 return true;
5556 template <>
5557 bool BaselineCompilerCodeGen::emit_EnvCallee() {
5558 frame.syncStack(0);
5559 uint8_t numHops = GET_UINT8(handler.pc());
5560 Register scratch = R0.scratchReg();
5562 masm.loadPtr(frame.addressOfEnvironmentChain(), scratch);
5563 for (unsigned i = 0; i < numHops; i++) {
5564 Address nextAddr(scratch,
5565 EnvironmentObject::offsetOfEnclosingEnvironment());
5566 masm.unboxObject(nextAddr, scratch);
5569 masm.loadValue(Address(scratch, CallObject::offsetOfCallee()), R0);
5570 frame.push(R0);
5571 return true;
5574 template <>
5575 bool BaselineInterpreterCodeGen::emit_EnvCallee() {
5576 Register scratch = R0.scratchReg();
5577 Register env = R1.scratchReg();
5579 static_assert(JSOpLength_EnvCallee - sizeof(jsbytecode) == ENVCOORD_HOPS_LEN,
5580 "op must have uint8 operand for LoadAliasedVarEnv");
5582 // Load the right environment object.
5583 masm.loadPtr(frame.addressOfEnvironmentChain(), env);
5584 LoadAliasedVarEnv(masm, env, scratch);
5586 masm.pushValue(Address(env, CallObject::offsetOfCallee()));
5587 return true;
5590 template <typename Handler>
5591 bool BaselineCodeGen<Handler>::emit_SuperBase() {
5592 frame.popRegsAndSync(1);
5594 Register scratch = R0.scratchReg();
5595 Register proto = R1.scratchReg();
5597 // Unbox callee.
5598 masm.unboxObject(R0, scratch);
5600 // Load [[HomeObject]]
5601 Address homeObjAddr(scratch,
5602 FunctionExtended::offsetOfMethodHomeObjectSlot());
5603 #ifdef DEBUG
5604 Label isObject;
5605 masm.branchTestObject(Assembler::Equal, homeObjAddr, &isObject);
5606 masm.assumeUnreachable("[[HomeObject]] must be Object");
5607 masm.bind(&isObject);
5608 #endif
5609 masm.unboxObject(homeObjAddr, scratch);
5611 // Load prototype from [[HomeObject]]
5612 masm.loadObjProto(scratch, proto);
5614 #ifdef DEBUG
5615 // We won't encounter a lazy proto, because the prototype is guaranteed to
5616 // either be a JSFunction or a PlainObject, and only proxy objects can have a
5617 // lazy proto.
5618 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1);
5620 Label proxyCheckDone;
5621 masm.branchPtr(Assembler::NotEqual, proto, ImmWord(1), &proxyCheckDone);
5622 masm.assumeUnreachable("Unexpected lazy proto in JSOp::SuperBase");
5623 masm.bind(&proxyCheckDone);
5624 #endif
5626 Label hasProto;
5627 masm.branchPtr(Assembler::NotEqual, proto, ImmWord(0), &hasProto);
5629 // Throw an error if |proto| is null.
5630 prepareVMCall();
5632 using Fn = bool (*)(JSContext*);
5633 if (!callVM<Fn, ThrowHomeObjectNotObject>()) {
5634 return false;
5637 // Box prototype and return
5638 masm.bind(&hasProto);
5639 masm.tagValue(JSVAL_TYPE_OBJECT, proto, R1);
5640 frame.push(R1);
5641 return true;
5644 template <typename Handler>
5645 bool BaselineCodeGen<Handler>::emit_SuperFun() {
5646 frame.popRegsAndSync(1);
5648 Register callee = R0.scratchReg();
5649 Register proto = R1.scratchReg();
5650 #ifdef DEBUG
5651 Register scratch = R2.scratchReg();
5652 #endif
5654 // Unbox callee.
5655 masm.unboxObject(R0, callee);
5657 #ifdef DEBUG
5658 Label classCheckDone;
5659 masm.branchTestObjClass(Assembler::Equal, callee, &JSFunction::class_,
5660 scratch, callee, &classCheckDone);
5661 masm.assumeUnreachable("Unexpected non-JSFunction callee in JSOp::SuperFun");
5662 masm.bind(&classCheckDone);
5663 #endif
5665 // Load prototype of callee
5666 masm.loadObjProto(callee, proto);
5668 #ifdef DEBUG
5669 // We won't encounter a lazy proto, because |callee| is guaranteed to be a
5670 // JSFunction and only proxy objects can have a lazy proto.
5671 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1);
5673 Label proxyCheckDone;
5674 masm.branchPtr(Assembler::NotEqual, proto, ImmWord(1), &proxyCheckDone);
5675 masm.assumeUnreachable("Unexpected lazy proto in JSOp::SuperFun");
5676 masm.bind(&proxyCheckDone);
5677 #endif
5679 Label nullProto, done;
5680 masm.branchPtr(Assembler::Equal, proto, ImmWord(0), &nullProto);
5682 // Box prototype and return
5683 masm.tagValue(JSVAL_TYPE_OBJECT, proto, R1);
5684 masm.jump(&done);
5686 masm.bind(&nullProto);
5687 masm.moveValue(NullValue(), R1);
5689 masm.bind(&done);
5690 frame.push(R1);
5691 return true;
5694 template <typename Handler>
5695 bool BaselineCodeGen<Handler>::emit_Arguments() {
5696 frame.syncStack(0);
5698 MOZ_ASSERT_IF(handler.maybeScript(),
5699 handler.maybeScript()->argumentsHasVarBinding());
5701 Label done;
5702 if (!handler.maybeScript() || !handler.maybeScript()->needsArgsObj()) {
5703 // We assume the script does not need an arguments object. However, this
5704 // assumption can be invalidated later, see argumentsOptimizationFailed
5705 // in JSScript. Guard on the script's NeedsArgsObj flag.
5706 masm.moveValue(MagicValue(JS_OPTIMIZED_ARGUMENTS), R0);
5708 // If we don't need an arguments object, skip the VM call.
5709 Register scratch = R1.scratchReg();
5710 loadScript(scratch);
5711 masm.branchTest32(
5712 Assembler::Zero, Address(scratch, JSScript::offsetOfMutableFlags()),
5713 Imm32(uint32_t(JSScript::MutableFlags::NeedsArgsObj)), &done);
5716 prepareVMCall();
5718 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5719 pushArg(R0.scratchReg());
5721 using Fn = bool (*)(JSContext*, BaselineFrame*, MutableHandleValue);
5722 if (!callVM<Fn, jit::NewArgumentsObject>()) {
5723 return false;
5726 masm.bind(&done);
5727 frame.push(R0);
5728 return true;
5731 template <typename Handler>
5732 bool BaselineCodeGen<Handler>::emit_Rest() {
5733 frame.syncStack(0);
5735 if (!emitNextIC()) {
5736 return false;
5739 // Mark R0 as pushed stack value.
5740 frame.push(R0);
5741 return true;
5744 template <typename Handler>
5745 bool BaselineCodeGen<Handler>::emit_Generator() {
5746 frame.assertStackDepth(0);
5748 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5750 prepareVMCall();
5751 pushArg(R0.scratchReg());
5753 using Fn = JSObject* (*)(JSContext*, BaselineFrame*);
5754 if (!callVM<Fn, jit::CreateGenerator>()) {
5755 return false;
5758 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
5759 frame.push(R0);
5760 return true;
5763 template <typename Handler>
5764 bool BaselineCodeGen<Handler>::emit_InitialYield() {
5765 frame.syncStack(0);
5766 frame.assertStackDepth(1);
5768 Register genObj = R2.scratchReg();
5769 masm.unboxObject(frame.addressOfStackValue(-1), genObj);
5771 MOZ_ASSERT_IF(handler.maybePC(), GET_RESUMEINDEX(handler.maybePC()) == 0);
5772 masm.storeValue(
5773 Int32Value(0),
5774 Address(genObj, AbstractGeneratorObject::offsetOfResumeIndexSlot()));
5776 Register envObj = R0.scratchReg();
5777 Register temp = R1.scratchReg();
5778 Address envChainSlot(genObj,
5779 AbstractGeneratorObject::offsetOfEnvironmentChainSlot());
5780 masm.loadPtr(frame.addressOfEnvironmentChain(), envObj);
5781 masm.guardedCallPreBarrierAnyZone(envChainSlot, MIRType::Value, temp);
5782 masm.storeValue(JSVAL_TYPE_OBJECT, envObj, envChainSlot);
5784 Label skipBarrier;
5785 masm.branchPtrInNurseryChunk(Assembler::Equal, genObj, temp, &skipBarrier);
5786 masm.branchPtrInNurseryChunk(Assembler::NotEqual, envObj, temp, &skipBarrier);
5787 masm.push(genObj);
5788 MOZ_ASSERT(genObj == R2.scratchReg());
5789 masm.call(&postBarrierSlot_);
5790 masm.pop(genObj);
5791 masm.bind(&skipBarrier);
5793 masm.tagValue(JSVAL_TYPE_OBJECT, genObj, JSReturnOperand);
5794 if (!emitReturn()) {
5795 return false;
5798 // Two extra stack values will be pushed when resuming the generator.
5799 frame.incStackDepth(2);
5800 return true;
5803 template <typename Handler>
5804 bool BaselineCodeGen<Handler>::emit_Yield() {
5805 // Store generator in R0.
5806 frame.popRegsAndSync(1);
5808 Register genObj = R2.scratchReg();
5809 masm.unboxObject(R0, genObj);
5811 if (frame.hasKnownStackDepth(1)) {
5812 // If the expression stack is empty, we can inline the Yield. Note that this
5813 // branch is never taken for the interpreter because it doesn't know static
5814 // stack depths.
5816 Register temp = R1.scratchReg();
5817 Address resumeIndexSlot(genObj,
5818 AbstractGeneratorObject::offsetOfResumeIndexSlot());
5819 jsbytecode* pc = handler.maybePC();
5820 MOZ_ASSERT(pc, "compiler-only code never has a null pc");
5821 masm.move32(Imm32(GET_RESUMEINDEX(pc)), temp);
5822 masm.storeValue(JSVAL_TYPE_INT32, temp, resumeIndexSlot);
5824 Register envObj = R0.scratchReg();
5825 Address envChainSlot(
5826 genObj, AbstractGeneratorObject::offsetOfEnvironmentChainSlot());
5827 masm.loadPtr(frame.addressOfEnvironmentChain(), envObj);
5828 masm.guardedCallPreBarrier(envChainSlot, MIRType::Value);
5829 masm.storeValue(JSVAL_TYPE_OBJECT, envObj, envChainSlot);
5831 Label skipBarrier;
5832 masm.branchPtrInNurseryChunk(Assembler::Equal, genObj, temp, &skipBarrier);
5833 masm.branchPtrInNurseryChunk(Assembler::NotEqual, envObj, temp,
5834 &skipBarrier);
5835 MOZ_ASSERT(genObj == R2.scratchReg());
5836 masm.call(&postBarrierSlot_);
5837 masm.bind(&skipBarrier);
5838 } else {
5839 masm.loadBaselineFramePtr(BaselineFrameReg, R1.scratchReg());
5840 computeFrameSize(R0.scratchReg());
5842 prepareVMCall();
5843 pushBytecodePCArg();
5844 pushArg(R0.scratchReg());
5845 pushArg(R1.scratchReg());
5846 pushArg(genObj);
5848 using Fn = bool (*)(JSContext*, HandleObject, BaselineFrame*, uint32_t,
5849 jsbytecode*);
5850 if (!callVM<Fn, jit::NormalSuspend>()) {
5851 return false;
5855 masm.loadValue(frame.addressOfStackValue(-1), JSReturnOperand);
5856 if (!emitReturn()) {
5857 return false;
5860 // Two extra stack values will be pushed when resuming the generator.
5861 frame.incStackDepth(2);
5862 return true;
5865 template <typename Handler>
5866 bool BaselineCodeGen<Handler>::emit_Await() {
5867 return emit_Yield();
5870 template <>
5871 template <typename F>
5872 bool BaselineCompilerCodeGen::emitAfterYieldDebugInstrumentation(
5873 const F& ifDebuggee, Register) {
5874 if (handler.compileDebugInstrumentation()) {
5875 return ifDebuggee();
5877 return true;
5880 template <>
5881 template <typename F>
5882 bool BaselineInterpreterCodeGen::emitAfterYieldDebugInstrumentation(
5883 const F& ifDebuggee, Register scratch) {
5884 // Note that we can't use emitDebugInstrumentation here because the frame's
5885 // DEBUGGEE flag hasn't been initialized yet.
5887 // If the current Realm is not a debuggee we're done.
5888 Label done;
5889 CodeOffset toggleOffset = masm.toggledJump(&done);
5890 if (!handler.addDebugInstrumentationOffset(cx, toggleOffset)) {
5891 return false;
5893 masm.loadPtr(AbsoluteAddress(cx->addressOfRealm()), scratch);
5894 masm.branchTest32(Assembler::Zero,
5895 Address(scratch, Realm::offsetOfDebugModeBits()),
5896 Imm32(Realm::debugModeIsDebuggeeBit()), &done);
5898 if (!ifDebuggee()) {
5899 return false;
5902 masm.bind(&done);
5903 return true;
5906 template <typename Handler>
5907 bool BaselineCodeGen<Handler>::emit_AfterYield() {
5908 if (!emit_JumpTarget()) {
5909 return false;
5912 auto ifDebuggee = [this]() {
5913 frame.assertSyncedStack();
5914 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5915 prepareVMCall();
5916 pushArg(R0.scratchReg());
5918 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugAfterYield;
5920 using Fn = bool (*)(JSContext*, BaselineFrame*);
5921 if (!callVM<Fn, jit::DebugAfterYield>(kind)) {
5922 return false;
5925 return true;
5927 return emitAfterYieldDebugInstrumentation(ifDebuggee, R0.scratchReg());
5930 template <typename Handler>
5931 bool BaselineCodeGen<Handler>::emit_FinalYieldRval() {
5932 // Store generator in R0.
5933 frame.popRegsAndSync(1);
5934 masm.unboxObject(R0, R0.scratchReg());
5936 prepareVMCall();
5937 pushBytecodePCArg();
5938 pushArg(R0.scratchReg());
5940 using Fn = bool (*)(JSContext*, HandleObject, jsbytecode*);
5941 if (!callVM<Fn, jit::FinalSuspend>()) {
5942 return false;
5945 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
5946 return emitReturn();
5949 template <>
5950 void BaselineCompilerCodeGen::emitJumpToInterpretOpLabel() {
5951 TrampolinePtr code =
5952 cx->runtime()->jitRuntime()->baselineInterpreter().interpretOpAddr();
5953 masm.jump(code);
5956 template <>
5957 void BaselineInterpreterCodeGen::emitJumpToInterpretOpLabel() {
5958 masm.jump(handler.interpretOpLabel());
5961 template <typename Handler>
5962 bool BaselineCodeGen<Handler>::emitEnterGeneratorCode(Register script,
5963 Register resumeIndex,
5964 Register scratch) {
5965 // Resume in either the BaselineScript (if present) or Baseline Interpreter.
5967 static_assert(BaselineDisabledScript == 0x1,
5968 "Comparison below requires specific sentinel encoding");
5970 if (JitOptions.warpBuilder) {
5971 // Initialize the icScript slot in the baseline frame.
5972 masm.loadJitScript(script, scratch);
5973 masm.computeEffectiveAddress(
5974 Address(scratch, JitScript::offsetOfICScript()), scratch);
5975 Address icScriptAddr(BaselineFrameReg,
5976 BaselineFrame::reverseOffsetOfICScript());
5977 masm.storePtr(scratch, icScriptAddr);
5980 Label noBaselineScript;
5981 masm.loadJitScript(script, scratch);
5982 masm.loadPtr(Address(scratch, JitScript::offsetOfBaselineScript()), scratch);
5983 masm.branchPtr(Assembler::BelowOrEqual, scratch,
5984 ImmPtr(BaselineDisabledScriptPtr), &noBaselineScript);
5986 masm.load32(Address(scratch, BaselineScript::offsetOfResumeEntriesOffset()),
5987 script);
5988 masm.addPtr(scratch, script);
5989 masm.loadPtr(
5990 BaseIndex(script, resumeIndex, ScaleFromElemWidth(sizeof(uintptr_t))),
5991 scratch);
5992 masm.jump(scratch);
5994 masm.bind(&noBaselineScript);
5996 // Initialize interpreter frame fields.
5997 Address flagsAddr(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags());
5998 Address scriptAddr(BaselineFrameReg,
5999 BaselineFrame::reverseOffsetOfInterpreterScript());
6000 masm.or32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER), flagsAddr);
6001 masm.storePtr(script, scriptAddr);
6003 // Initialize pc and jump to it.
6004 emitInterpJumpToResumeEntry(script, resumeIndex, scratch);
6005 return true;
6008 template <typename Handler>
6009 bool BaselineCodeGen<Handler>::emit_Resume() {
6010 frame.syncStack(0);
6011 masm.assertStackAlignment(sizeof(Value), 0);
6013 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
6014 regs.take(BaselineFrameReg);
6015 if (HasInterpreterPCReg()) {
6016 regs.take(InterpreterPCReg);
6019 saveInterpreterPCReg();
6021 // Load generator object.
6022 Register genObj = regs.takeAny();
6023 masm.unboxObject(frame.addressOfStackValue(-3), genObj);
6025 // Load callee.
6026 Register callee = regs.takeAny();
6027 masm.unboxObject(
6028 Address(genObj, AbstractGeneratorObject::offsetOfCalleeSlot()), callee);
6030 // Save a pointer to the JSOp::Resume operand stack Values.
6031 Register callerStackPtr = regs.takeAny();
6032 masm.computeEffectiveAddress(frame.addressOfStackValue(-1), callerStackPtr);
6034 // Branch to |interpret| to resume the generator in the C++ interpreter if the
6035 // script does not have a JitScript.
6036 Label interpret;
6037 Register scratch1 = regs.takeAny();
6038 masm.loadPtr(Address(callee, JSFunction::offsetOfScript()), scratch1);
6039 masm.branchIfScriptHasNoJitScript(scratch1, &interpret);
6041 #ifdef JS_TRACE_LOGGING
6042 if (JS::TraceLoggerSupported()) {
6043 // TODO (bug 1565788): add Baseline Interpreter support.
6044 MOZ_CRASH("Unimplemented Baseline Interpreter TraceLogger support");
6045 masm.loadJitScript(scratch1, scratch1);
6046 Address baselineAddr(scratch1, JitScript::offsetOfBaselineScript());
6047 masm.loadPtr(baselineAddr, scratch1);
6048 if (!emitTraceLoggerResume(scratch1, regs)) {
6049 return false;
6052 #endif
6054 // Push |undefined| for all formals.
6055 Register scratch2 = regs.takeAny();
6056 Label loop, loopDone;
6057 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch2);
6058 masm.branchTest32(Assembler::Zero, scratch2, scratch2, &loopDone);
6059 masm.bind(&loop);
6061 masm.pushValue(UndefinedValue());
6062 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch2, &loop);
6064 masm.bind(&loopDone);
6066 // Push |undefined| for |this|.
6067 masm.pushValue(UndefinedValue());
6069 // Update BaselineFrame frameSize field and create the frame descriptor.
6070 masm.computeEffectiveAddress(
6071 Address(BaselineFrameReg, BaselineFrame::FramePointerOffset), scratch2);
6072 masm.subStackPtrFrom(scratch2);
6073 #ifdef DEBUG
6074 masm.store32(scratch2, frame.addressOfDebugFrameSize());
6075 #endif
6076 masm.makeFrameDescriptor(scratch2, FrameType::BaselineJS,
6077 JitFrameLayout::Size());
6079 masm.push(Imm32(0)); // actual argc
6080 masm.PushCalleeToken(callee, /* constructing = */ false);
6081 masm.push(scratch2); // frame descriptor
6083 // PushCalleeToken bumped framePushed. Reset it.
6084 MOZ_ASSERT(masm.framePushed() == sizeof(uintptr_t));
6085 masm.setFramePushed(0);
6087 regs.add(callee);
6089 // Push a fake return address on the stack. We will resume here when the
6090 // generator returns.
6091 Label genStart, returnTarget;
6092 #ifdef JS_USE_LINK_REGISTER
6093 masm.call(&genStart);
6094 #else
6095 masm.callAndPushReturnAddress(&genStart);
6096 #endif
6098 // Record the return address so the return offset -> pc mapping works.
6099 if (!handler.recordCallRetAddr(cx, RetAddrEntry::Kind::IC,
6100 masm.currentOffset())) {
6101 return false;
6104 masm.jump(&returnTarget);
6105 masm.bind(&genStart);
6106 #ifdef JS_USE_LINK_REGISTER
6107 masm.pushReturnAddress();
6108 #endif
6110 // If profiler instrumentation is on, update lastProfilingFrame on
6111 // current JitActivation
6113 Register scratchReg = scratch2;
6114 Label skip;
6115 AbsoluteAddress addressOfEnabled(
6116 cx->runtime()->geckoProfiler().addressOfEnabled());
6117 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skip);
6118 masm.loadJSContext(scratchReg);
6119 masm.loadPtr(Address(scratchReg, JSContext::offsetOfProfilingActivation()),
6120 scratchReg);
6121 masm.storeStackPtr(
6122 Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()));
6123 masm.bind(&skip);
6126 // Construct BaselineFrame.
6127 masm.push(BaselineFrameReg);
6128 masm.moveStackPtrTo(BaselineFrameReg);
6129 masm.subFromStackPtr(Imm32(BaselineFrame::Size()));
6130 masm.assertStackAlignment(sizeof(Value), 0);
6132 // Store flags and env chain.
6133 masm.store32(Imm32(BaselineFrame::HAS_INITIAL_ENV), frame.addressOfFlags());
6134 masm.unboxObject(
6135 Address(genObj, AbstractGeneratorObject::offsetOfEnvironmentChainSlot()),
6136 scratch2);
6137 masm.storePtr(scratch2, frame.addressOfEnvironmentChain());
6139 // Store the arguments object if there is one.
6140 Label noArgsObj;
6141 Address argsObjSlot(genObj, AbstractGeneratorObject::offsetOfArgsObjSlot());
6142 masm.fallibleUnboxObject(argsObjSlot, scratch2, &noArgsObj);
6144 masm.storePtr(scratch2, frame.addressOfArgsObj());
6145 masm.or32(Imm32(BaselineFrame::HAS_ARGS_OBJ), frame.addressOfFlags());
6147 masm.bind(&noArgsObj);
6149 // Push expression slots if needed.
6150 Label noExprStack;
6151 Address exprStackSlot(genObj,
6152 AbstractGeneratorObject::offsetOfExpressionStackSlot());
6153 masm.fallibleUnboxObject(exprStackSlot, scratch2, &noExprStack);
6155 Register initLength = regs.takeAny();
6156 masm.loadPtr(Address(scratch2, NativeObject::offsetOfElements()), scratch2);
6157 masm.load32(Address(scratch2, ObjectElements::offsetOfInitializedLength()),
6158 initLength);
6159 masm.store32(
6160 Imm32(0),
6161 Address(scratch2, ObjectElements::offsetOfInitializedLength()));
6163 Label loop, loopDone;
6164 masm.branchTest32(Assembler::Zero, initLength, initLength, &loopDone);
6165 masm.bind(&loop);
6167 masm.pushValue(Address(scratch2, 0));
6168 masm.guardedCallPreBarrierAnyZone(Address(scratch2, 0), MIRType::Value,
6169 scratch1);
6170 masm.addPtr(Imm32(sizeof(Value)), scratch2);
6171 masm.branchSub32(Assembler::NonZero, Imm32(1), initLength, &loop);
6173 masm.bind(&loopDone);
6174 regs.add(initLength);
6177 masm.bind(&noExprStack);
6179 // Push arg, generator, resumeKind stack Values, in that order.
6180 masm.pushValue(Address(callerStackPtr, sizeof(Value)));
6181 masm.pushValue(JSVAL_TYPE_OBJECT, genObj);
6182 masm.pushValue(Address(callerStackPtr, 0));
6184 masm.switchToObjectRealm(genObj, scratch2);
6186 // Load script in scratch1.
6187 masm.unboxObject(
6188 Address(genObj, AbstractGeneratorObject::offsetOfCalleeSlot()), scratch1);
6189 masm.loadPtr(Address(scratch1, JSFunction::offsetOfScript()), scratch1);
6191 // Load resume index in scratch2 and mark generator as running.
6192 Address resumeIndexSlot(genObj,
6193 AbstractGeneratorObject::offsetOfResumeIndexSlot());
6194 masm.unboxInt32(resumeIndexSlot, scratch2);
6195 masm.storeValue(Int32Value(AbstractGeneratorObject::RESUME_INDEX_RUNNING),
6196 resumeIndexSlot);
6198 if (!emitEnterGeneratorCode(scratch1, scratch2, regs.getAny())) {
6199 return false;
6202 // Call into the VM to resume the generator in the C++ interpreter if there's
6203 // no JitScript.
6204 masm.bind(&interpret);
6206 prepareVMCall();
6208 pushArg(callerStackPtr);
6209 pushArg(genObj);
6211 using Fn = bool (*)(JSContext*, HandleObject, Value*, MutableHandleValue);
6212 if (!callVM<Fn, jit::InterpretResume>()) {
6213 return false;
6216 // After the generator returns, we restore the stack pointer, switch back to
6217 // the current realm, push the return value, and we're done.
6218 masm.bind(&returnTarget);
6219 masm.computeEffectiveAddress(frame.addressOfStackValue(-1),
6220 masm.getStackPointer());
6221 if (JSScript* script = handler.maybeScript()) {
6222 masm.switchToRealm(script->realm(), R2.scratchReg());
6223 } else {
6224 masm.switchToBaselineFrameRealm(R2.scratchReg());
6226 restoreInterpreterPCReg();
6227 frame.popn(3);
6228 frame.push(R0);
6229 return true;
6232 template <typename Handler>
6233 bool BaselineCodeGen<Handler>::emit_CheckResumeKind() {
6234 // Load resumeKind in R1, generator in R0.
6235 frame.popRegsAndSync(2);
6237 #ifdef DEBUG
6238 Label ok;
6239 masm.branchTestInt32(Assembler::Equal, R1, &ok);
6240 masm.assumeUnreachable("Expected int32 resumeKind");
6241 masm.bind(&ok);
6242 #endif
6244 // If resumeKind is 'next' we don't have to do anything.
6245 Label done;
6246 masm.unboxInt32(R1, R1.scratchReg());
6247 masm.branch32(Assembler::Equal, R1.scratchReg(),
6248 Imm32(int32_t(GeneratorResumeKind::Next)), &done);
6250 prepareVMCall();
6252 pushArg(R1.scratchReg()); // resumeKind
6254 masm.loadValue(frame.addressOfStackValue(-1), R2);
6255 pushArg(R2); // arg
6257 masm.unboxObject(R0, R0.scratchReg());
6258 pushArg(R0.scratchReg()); // genObj
6260 masm.loadBaselineFramePtr(BaselineFrameReg, R2.scratchReg());
6261 pushArg(R2.scratchReg()); // frame
6263 using Fn = bool (*)(JSContext*, BaselineFrame*,
6264 Handle<AbstractGeneratorObject*>, HandleValue, int32_t);
6265 if (!callVM<Fn, jit::GeneratorThrowOrReturn>()) {
6266 return false;
6269 masm.bind(&done);
6270 return true;
6273 template <>
6274 bool BaselineCompilerCodeGen::emit_ResumeKind() {
6275 GeneratorResumeKind resumeKind = ResumeKindFromPC(handler.pc());
6276 frame.push(Int32Value(int32_t(resumeKind)));
6277 return true;
6280 template <>
6281 bool BaselineInterpreterCodeGen::emit_ResumeKind() {
6282 LoadUint8Operand(masm, R0.scratchReg());
6283 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
6284 frame.push(R0);
6285 return true;
6288 template <typename Handler>
6289 bool BaselineCodeGen<Handler>::emit_DebugCheckSelfHosted() {
6290 #ifdef DEBUG
6291 frame.syncStack(0);
6293 masm.loadValue(frame.addressOfStackValue(-1), R0);
6295 prepareVMCall();
6296 pushArg(R0);
6298 using Fn = bool (*)(JSContext*, HandleValue);
6299 if (!callVM<Fn, js::Debug_CheckSelfHosted>()) {
6300 return false;
6302 #endif
6303 return true;
6306 template <typename Handler>
6307 bool BaselineCodeGen<Handler>::emit_IsConstructing() {
6308 frame.push(MagicValue(JS_IS_CONSTRUCTING));
6309 return true;
6312 template <>
6313 bool BaselineCompilerCodeGen::emit_JumpTarget() {
6314 MaybeIncrementCodeCoverageCounter(masm, handler.script(), handler.pc());
6315 return true;
6318 template <>
6319 bool BaselineInterpreterCodeGen::emit_JumpTarget() {
6320 Register scratch1 = R0.scratchReg();
6321 Register scratch2 = R1.scratchReg();
6323 Label skipCoverage;
6324 CodeOffset toggleOffset = masm.toggledJump(&skipCoverage);
6325 masm.call(handler.codeCoverageAtPCLabel());
6326 masm.bind(&skipCoverage);
6327 if (!handler.codeCoverageOffsets().append(toggleOffset.offset())) {
6328 return false;
6331 // Load icIndex in scratch1.
6332 LoadInt32Operand(masm, scratch1);
6334 // scratch1 := scratch1 * sizeof(ICEntry)
6335 static_assert(sizeof(ICEntry) == 8 || sizeof(ICEntry) == 16,
6336 "shift below depends on ICEntry size");
6337 uint32_t shift = (sizeof(ICEntry) == 16) ? 4 : 3;
6338 masm.lshiftPtr(Imm32(shift), scratch1);
6340 // Compute ICEntry* and store to frame->interpreterICEntry.
6341 if (JitOptions.warpBuilder) {
6342 masm.loadPtr(frame.addressOfICScript(), scratch2);
6343 masm.computeEffectiveAddress(
6344 BaseIndex(scratch2, scratch1, TimesOne, ICScript::offsetOfICEntries()),
6345 scratch2);
6346 } else {
6347 loadScript(scratch2);
6348 masm.loadJitScript(scratch2, scratch2);
6349 masm.computeEffectiveAddress(
6350 BaseIndex(scratch2, scratch1, TimesOne, JitScript::offsetOfICEntries()),
6351 scratch2);
6353 masm.storePtr(scratch2, frame.addressOfInterpreterICEntry());
6354 return true;
6357 template <typename Handler>
6358 bool BaselineCodeGen<Handler>::emit_CheckClassHeritage() {
6359 frame.syncStack(0);
6361 // Leave the heritage value on the stack.
6362 masm.loadValue(frame.addressOfStackValue(-1), R0);
6364 prepareVMCall();
6365 pushArg(R0);
6367 using Fn = bool (*)(JSContext*, HandleValue);
6368 return callVM<Fn, js::CheckClassHeritageOperation>();
6371 template <typename Handler>
6372 bool BaselineCodeGen<Handler>::emit_InitHomeObject() {
6373 // Load HomeObject in R0.
6374 frame.popRegsAndSync(1);
6376 // Load function off stack
6377 Register func = R2.scratchReg();
6378 masm.unboxObject(frame.addressOfStackValue(-1), func);
6380 // Set HOMEOBJECT_SLOT
6381 Register temp = R1.scratchReg();
6382 Address addr(func, FunctionExtended::offsetOfMethodHomeObjectSlot());
6383 masm.guardedCallPreBarrierAnyZone(addr, MIRType::Value, temp);
6384 masm.storeValue(R0, addr);
6386 Label skipBarrier;
6387 masm.branchPtrInNurseryChunk(Assembler::Equal, func, temp, &skipBarrier);
6388 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
6389 masm.call(&postBarrierSlot_);
6390 masm.bind(&skipBarrier);
6392 return true;
6395 template <>
6396 bool BaselineCompilerCodeGen::emit_BuiltinObject() {
6397 // Built-in objects are constants for a given global.
6398 auto kind = BuiltinObjectKind(GET_UINT8(handler.pc()));
6399 JSObject* builtin = BuiltinObjectOperation(cx, kind);
6400 if (!builtin) {
6401 return false;
6403 frame.push(ObjectValue(*builtin));
6404 return true;
6407 template <>
6408 bool BaselineInterpreterCodeGen::emit_BuiltinObject() {
6409 prepareVMCall();
6411 pushUint8BytecodeOperandArg(R0.scratchReg());
6413 using Fn = JSObject* (*)(JSContext*, BuiltinObjectKind);
6414 if (!callVM<Fn, BuiltinObjectOperation>()) {
6415 return false;
6418 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6419 frame.push(R0);
6420 return true;
6423 template <typename Handler>
6424 bool BaselineCodeGen<Handler>::emit_ObjWithProto() {
6425 frame.syncStack(0);
6427 // Leave the proto value on the stack for the decompiler
6428 masm.loadValue(frame.addressOfStackValue(-1), R0);
6430 prepareVMCall();
6431 pushArg(R0);
6433 using Fn = PlainObject* (*)(JSContext*, HandleValue);
6434 if (!callVM<Fn, js::ObjectWithProtoOperation>()) {
6435 return false;
6438 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6439 frame.pop();
6440 frame.push(R0);
6441 return true;
6444 template <typename Handler>
6445 bool BaselineCodeGen<Handler>::emit_FunWithProto() {
6446 frame.popRegsAndSync(1);
6448 masm.unboxObject(R0, R0.scratchReg());
6449 masm.loadPtr(frame.addressOfEnvironmentChain(), R1.scratchReg());
6451 prepareVMCall();
6452 pushArg(R0.scratchReg());
6453 pushArg(R1.scratchReg());
6454 pushScriptGCThingArg(ScriptGCThingType::Function, R0.scratchReg(),
6455 R1.scratchReg());
6457 using Fn =
6458 JSObject* (*)(JSContext*, HandleFunction, HandleObject, HandleObject);
6459 if (!callVM<Fn, js::FunWithProtoOperation>()) {
6460 return false;
6463 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6464 frame.push(R0);
6465 return true;
6468 template <typename Handler>
6469 bool BaselineCodeGen<Handler>::emit_ClassConstructor() {
6470 frame.syncStack(0);
6472 // Pass nullptr as prototype to MakeDefaultConstructor
6473 prepareVMCall();
6474 pushArg(ImmPtr(nullptr));
6475 pushBytecodePCArg();
6476 pushScriptArg();
6478 using Fn =
6479 JSFunction* (*)(JSContext*, HandleScript, jsbytecode*, HandleObject);
6480 if (!callVM<Fn, js::MakeDefaultConstructor>()) {
6481 return false;
6484 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6485 frame.push(R0);
6486 return true;
6489 template <typename Handler>
6490 bool BaselineCodeGen<Handler>::emit_DerivedConstructor() {
6491 frame.popRegsAndSync(1);
6493 masm.unboxObject(R0, R0.scratchReg());
6495 prepareVMCall();
6496 pushArg(R0.scratchReg());
6497 pushBytecodePCArg();
6498 pushScriptArg();
6500 using Fn =
6501 JSFunction* (*)(JSContext*, HandleScript, jsbytecode*, HandleObject);
6502 if (!callVM<Fn, js::MakeDefaultConstructor>()) {
6503 return false;
6506 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6507 frame.push(R0);
6508 return true;
6511 template <>
6512 bool BaselineCompilerCodeGen::emit_ImportMeta() {
6513 // Note: this is like the interpreter implementation, but optimized a bit by
6514 // calling GetModuleObjectForScript at compile-time.
6516 RootedModuleObject module(cx, GetModuleObjectForScript(handler.script()));
6517 MOZ_ASSERT(module);
6519 frame.syncStack(0);
6521 prepareVMCall();
6522 pushArg(ImmGCPtr(module));
6524 using Fn = JSObject* (*)(JSContext*, HandleObject);
6525 if (!callVM<Fn, js::GetOrCreateModuleMetaObject>()) {
6526 return false;
6529 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6530 frame.push(R0);
6531 return true;
6534 template <>
6535 bool BaselineInterpreterCodeGen::emit_ImportMeta() {
6536 prepareVMCall();
6538 pushScriptArg();
6540 using Fn = JSObject* (*)(JSContext*, HandleScript);
6541 if (!callVM<Fn, ImportMetaOperation>()) {
6542 return false;
6545 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6546 frame.push(R0);
6547 return true;
6550 template <typename Handler>
6551 bool BaselineCodeGen<Handler>::emit_DynamicImport() {
6552 // Put specifier value in R0.
6553 frame.popRegsAndSync(1);
6555 prepareVMCall();
6556 pushArg(R0);
6557 pushScriptArg();
6559 using Fn = JSObject* (*)(JSContext*, HandleScript, HandleValue);
6560 if (!callVM<Fn, js::StartDynamicModuleImport>()) {
6561 return false;
6564 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6565 frame.push(R0);
6566 return true;
6569 template <>
6570 bool BaselineCompilerCodeGen::emit_InstrumentationActive() {
6571 frame.syncStack(0);
6573 // RealmInstrumentation cannot be removed from a global without destroying the
6574 // entire realm, so its active address can be embedded into jitcode.
6575 const int32_t* address = RealmInstrumentation::addressOfActive(cx->global());
6577 Register scratch = R0.scratchReg();
6578 masm.load32(AbsoluteAddress(address), scratch);
6579 masm.tagValue(JSVAL_TYPE_BOOLEAN, scratch, R0);
6580 frame.push(R0, JSVAL_TYPE_BOOLEAN);
6582 return true;
6585 template <>
6586 bool BaselineInterpreterCodeGen::emit_InstrumentationActive() {
6587 prepareVMCall();
6589 using Fn = bool (*)(JSContext*, MutableHandleValue);
6590 if (!callVM<Fn, InstrumentationActiveOperation>()) {
6591 return false;
6594 frame.push(R0);
6595 return true;
6598 template <>
6599 bool BaselineCompilerCodeGen::emit_InstrumentationCallback() {
6600 JSObject* obj = RealmInstrumentation::getCallback(cx->global());
6601 MOZ_ASSERT(obj);
6602 frame.push(ObjectValue(*obj));
6603 return true;
6606 template <>
6607 bool BaselineInterpreterCodeGen::emit_InstrumentationCallback() {
6608 prepareVMCall();
6610 using Fn = JSObject* (*)(JSContext*);
6611 if (!callVM<Fn, InstrumentationCallbackOperation>()) {
6612 return false;
6615 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0);
6616 frame.push(R0);
6617 return true;
6620 template <>
6621 bool BaselineCompilerCodeGen::emit_InstrumentationScriptId() {
6622 int32_t scriptId;
6623 RootedScript script(cx, handler.script());
6624 if (!RealmInstrumentation::getScriptId(cx, cx->global(), script, &scriptId)) {
6625 return false;
6627 frame.push(Int32Value(scriptId));
6628 return true;
6631 template <>
6632 bool BaselineInterpreterCodeGen::emit_InstrumentationScriptId() {
6633 prepareVMCall();
6634 pushScriptArg();
6636 using Fn = bool (*)(JSContext*, HandleScript, MutableHandleValue);
6637 if (!callVM<Fn, InstrumentationScriptIdOperation>()) {
6638 return false;
6641 frame.push(R0);
6642 return true;
6645 template <>
6646 bool BaselineCompilerCodeGen::emit_ForceInterpreter() {
6647 // Caller is responsible for checking script->hasForceInterpreterOp().
6648 MOZ_CRASH("JSOp::ForceInterpreter in baseline");
6651 template <>
6652 bool BaselineInterpreterCodeGen::emit_ForceInterpreter() {
6653 masm.assumeUnreachable("JSOp::ForceInterpreter");
6654 return true;
6657 template <typename Handler>
6658 bool BaselineCodeGen<Handler>::emitPrologue() {
6659 #ifdef JS_USE_LINK_REGISTER
6660 // Push link register from generateEnterJIT()'s BLR.
6661 masm.pushReturnAddress();
6662 masm.checkStackAlignment();
6663 #endif
6664 emitProfilerEnterFrame();
6666 masm.push(BaselineFrameReg);
6667 masm.moveStackPtrTo(BaselineFrameReg);
6668 masm.subFromStackPtr(Imm32(BaselineFrame::Size()));
6670 // Initialize BaselineFrame. Also handles env chain pre-initialization (in
6671 // case GC gets run during stack check). For global and eval scripts, the env
6672 // chain is in R1. For function scripts, the env chain is in the callee.
6673 emitInitFrameFields(R1.scratchReg());
6675 // When compiling with Debugger instrumentation, set the debuggeeness of
6676 // the frame before any operation that can call into the VM.
6677 if (!emitIsDebuggeeCheck()) {
6678 return false;
6681 // Initialize the env chain before any operation that may call into the VM and
6682 // trigger a GC.
6683 if (!initEnvironmentChain()) {
6684 return false;
6687 // Check for overrecursion before initializing locals.
6688 if (!emitStackCheck()) {
6689 return false;
6692 emitInitializeLocals();
6694 #ifdef JS_TRACE_LOGGING
6695 if (JS::TraceLoggerSupported() && !emitTraceLoggerEnter()) {
6696 return false;
6698 #endif
6700 // Ion prologue bailouts will enter here in the Baseline Interpreter.
6701 masm.bind(&bailoutPrologue_);
6703 frame.assertSyncedStack();
6705 if (JSScript* script = handler.maybeScript()) {
6706 masm.debugAssertContextRealm(script->realm(), R1.scratchReg());
6709 if (!emitDebugPrologue()) {
6710 return false;
6713 if (!emitHandleCodeCoverageAtPrologue()) {
6714 return false;
6717 if (!emitWarmUpCounterIncrement()) {
6718 return false;
6721 warmUpCheckPrologueOffset_ = CodeOffset(masm.currentOffset());
6723 if (!emitArgumentTypeChecks()) {
6724 return false;
6727 return true;
6730 template <typename Handler>
6731 bool BaselineCodeGen<Handler>::emitEpilogue() {
6732 masm.bind(&return_);
6734 if (!handler.shouldEmitDebugEpilogueAtReturnOp()) {
6735 if (!emitDebugEpilogue()) {
6736 return false;
6740 #ifdef JS_TRACE_LOGGING
6741 if (JS::TraceLoggerSupported() && !emitTraceLoggerExit()) {
6742 return false;
6744 #endif
6746 masm.moveToStackPtr(BaselineFrameReg);
6747 masm.pop(BaselineFrameReg);
6749 emitProfilerExitFrame();
6751 masm.ret();
6752 return true;
6755 MethodStatus BaselineCompiler::emitBody() {
6756 JSScript* script = handler.script();
6757 MOZ_ASSERT(handler.pc() == script->code());
6759 mozilla::DebugOnly<jsbytecode*> prevpc = handler.pc();
6761 while (true) {
6762 JSOp op = JSOp(*handler.pc());
6763 JitSpew(JitSpew_BaselineOp, "Compiling op @ %d: %s",
6764 int(script->pcToOffset(handler.pc())), CodeName(op));
6766 BytecodeInfo* info = handler.analysis().maybeInfo(handler.pc());
6768 // Skip unreachable ops.
6769 if (!info) {
6770 // Test if last instructions and stop emitting in that case.
6771 handler.moveToNextPC();
6772 if (handler.pc() >= script->codeEnd()) {
6773 break;
6776 prevpc = handler.pc();
6777 continue;
6780 if (info->jumpTarget) {
6781 // Fully sync the stack if there are incoming jumps.
6782 frame.syncStack(0);
6783 frame.setStackDepth(info->stackDepth);
6784 masm.bind(handler.labelOf(handler.pc()));
6785 } else if (MOZ_UNLIKELY(compileDebugInstrumentation())) {
6786 // Also fully sync the stack if the debugger is enabled.
6787 frame.syncStack(0);
6788 } else {
6789 // At the beginning of any op, at most the top 2 stack-values are
6790 // unsynced.
6791 if (frame.stackDepth() > 2) {
6792 frame.syncStack(2);
6796 frame.assertValidState(*info);
6798 // If the script has a resume offset for this pc we need to keep track of
6799 // the native code offset.
6800 if (info->hasResumeOffset) {
6801 frame.assertSyncedStack();
6802 uint32_t pcOffset = script->pcToOffset(handler.pc());
6803 uint32_t nativeOffset = masm.currentOffset();
6804 if (!resumeOffsetEntries_.emplaceBack(pcOffset, nativeOffset)) {
6805 ReportOutOfMemory(cx);
6806 return Method_Error;
6810 // Emit traps for breakpoints and step mode.
6811 if (MOZ_UNLIKELY(compileDebugInstrumentation()) && !emitDebugTrap()) {
6812 return Method_Error;
6815 #define EMIT_OP(OP, ...) \
6816 case JSOp::OP: \
6817 if (MOZ_UNLIKELY(!this->emit_##OP())) return Method_Error; \
6818 break;
6820 switch (op) {
6821 FOR_EACH_OPCODE(EMIT_OP)
6822 default:
6823 MOZ_CRASH("Unexpected op");
6826 #undef EMIT_OP
6828 MOZ_ASSERT(masm.framePushed() == 0);
6830 // Test if last instructions and stop emitting in that case.
6831 handler.moveToNextPC();
6832 if (handler.pc() >= script->codeEnd()) {
6833 break;
6836 #ifdef DEBUG
6837 prevpc = handler.pc();
6838 #endif
6841 MOZ_ASSERT(JSOp(*prevpc) == JSOp::RetRval);
6842 return Method_Compiled;
6845 bool BaselineInterpreterGenerator::emitDebugTrap() {
6846 CodeOffset offset = masm.nopPatchableToCall();
6847 if (!debugTrapOffsets_.append(offset.offset())) {
6848 ReportOutOfMemory(cx);
6849 return false;
6852 return true;
6855 // Register holding the bytecode pc during dispatch. This exists so the debug
6856 // trap handler can reload the pc into this register when it's done.
6857 static constexpr Register InterpreterPCRegAtDispatch =
6858 HasInterpreterPCReg() ? InterpreterPCReg : R0.scratchReg();
6860 bool BaselineInterpreterGenerator::emitInterpreterLoop() {
6861 Register scratch1 = R0.scratchReg();
6862 Register scratch2 = R1.scratchReg();
6864 // Entry point for interpreting a bytecode op. No registers are live except
6865 // for InterpreterPCReg.
6866 masm.bind(handler.interpretOpWithPCRegLabel());
6868 // Emit a patchable call for debugger breakpoints/stepping.
6869 if (!emitDebugTrap()) {
6870 return false;
6872 Label interpretOpAfterDebugTrap;
6873 masm.bind(&interpretOpAfterDebugTrap);
6875 // Load pc, bytecode op.
6876 Register pcReg = LoadBytecodePC(masm, scratch1);
6877 masm.load8ZeroExtend(Address(pcReg, 0), scratch1);
6879 // Jump to table[op].
6881 CodeOffset label = masm.moveNearAddressWithPatch(scratch2);
6882 if (!tableLabels_.append(label)) {
6883 return false;
6885 BaseIndex pointer(scratch2, scratch1, ScalePointer);
6886 masm.branchToComputedAddress(pointer);
6889 // At the end of each op, emit code to bump the pc and jump to the
6890 // next op (this is also known as a threaded interpreter).
6891 auto opEpilogue = [&](JSOp op, size_t opLength) -> bool {
6892 MOZ_ASSERT(masm.framePushed() == 0);
6894 if (!BytecodeFallsThrough(op)) {
6895 // Nothing to do.
6896 masm.assumeUnreachable("unexpected fall through");
6897 return true;
6900 // Bump frame->interpreterICEntry if needed.
6901 if (BytecodeOpHasIC(op)) {
6902 frame.bumpInterpreterICEntry();
6905 // Bump bytecode PC.
6906 if (HasInterpreterPCReg()) {
6907 MOZ_ASSERT(InterpreterPCRegAtDispatch == InterpreterPCReg);
6908 masm.addPtr(Imm32(opLength), InterpreterPCReg);
6909 } else {
6910 MOZ_ASSERT(InterpreterPCRegAtDispatch == scratch1);
6911 masm.loadPtr(frame.addressOfInterpreterPC(), InterpreterPCRegAtDispatch);
6912 masm.addPtr(Imm32(opLength), InterpreterPCRegAtDispatch);
6913 masm.storePtr(InterpreterPCRegAtDispatch, frame.addressOfInterpreterPC());
6916 if (!emitDebugTrap()) {
6917 return false;
6920 // Load the opcode, jump to table[op].
6921 masm.load8ZeroExtend(Address(InterpreterPCRegAtDispatch, 0), scratch1);
6922 CodeOffset label = masm.moveNearAddressWithPatch(scratch2);
6923 if (!tableLabels_.append(label)) {
6924 return false;
6926 BaseIndex pointer(scratch2, scratch1, ScalePointer);
6927 masm.branchToComputedAddress(pointer);
6928 return true;
6931 // Emit code for each bytecode op.
6932 Label opLabels[JSOP_LIMIT];
6933 #define EMIT_OP(OP, ...) \
6935 masm.bind(&opLabels[uint8_t(JSOp::OP)]); \
6936 handler.setCurrentOp(JSOp::OP); \
6937 if (!this->emit_##OP()) { \
6938 return false; \
6940 if (!opEpilogue(JSOp::OP, JSOpLength_##OP)) { \
6941 return false; \
6943 handler.resetCurrentOp(); \
6945 FOR_EACH_OPCODE(EMIT_OP)
6946 #undef EMIT_OP
6948 // External entry point to start interpreting bytecode ops. This is used for
6949 // things like exception handling and OSR. DebugModeOSR patches JIT frames to
6950 // return here from the DebugTrapHandler.
6951 masm.bind(handler.interpretOpLabel());
6952 interpretOpOffset_ = masm.currentOffset();
6953 restoreInterpreterPCReg();
6954 masm.jump(handler.interpretOpWithPCRegLabel());
6956 // Second external entry point: this skips the debug trap for the first op
6957 // and is used by OSR.
6958 interpretOpNoDebugTrapOffset_ = masm.currentOffset();
6959 restoreInterpreterPCReg();
6960 masm.jump(&interpretOpAfterDebugTrap);
6962 // External entry point for Ion prologue bailouts.
6963 bailoutPrologueOffset_ = CodeOffset(masm.currentOffset());
6964 restoreInterpreterPCReg();
6965 masm.jump(&bailoutPrologue_);
6967 // Emit debug trap handler code (target of patchable call instructions). This
6968 // is just a tail call to the debug trap handler trampoline code.
6970 JitRuntime* jrt = cx->runtime()->jitRuntime();
6971 JitCode* handlerCode =
6972 jrt->debugTrapHandler(cx, DebugTrapHandlerKind::Interpreter);
6973 if (!handlerCode) {
6974 return false;
6977 debugTrapHandlerOffset_ = masm.currentOffset();
6978 masm.jump(handlerCode);
6981 // Emit the table.
6982 masm.haltingAlign(sizeof(void*));
6984 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64)
6985 size_t numInstructions = JSOP_LIMIT * (sizeof(uintptr_t) / sizeof(uint32_t));
6986 AutoForbidPoolsAndNops afp(&masm, numInstructions);
6987 #endif
6989 tableOffset_ = masm.currentOffset();
6991 for (size_t i = 0; i < JSOP_LIMIT; i++) {
6992 const Label& opLabel = opLabels[i];
6993 MOZ_ASSERT(opLabel.bound());
6994 CodeLabel cl;
6995 masm.writeCodePointer(&cl);
6996 cl.target()->bind(opLabel.offset());
6997 masm.addCodeLabel(cl);
7000 return true;
7003 void BaselineInterpreterGenerator::emitOutOfLineCodeCoverageInstrumentation() {
7004 masm.bind(handler.codeCoverageAtPrologueLabel());
7005 #ifdef JS_USE_LINK_REGISTER
7006 masm.pushReturnAddress();
7007 #endif
7009 saveInterpreterPCReg();
7011 masm.setupUnalignedABICall(R0.scratchReg());
7012 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
7013 masm.passABIArg(R0.scratchReg());
7014 masm.callWithABI(
7015 JS_FUNC_TO_DATA_PTR(void*, jit::HandleCodeCoverageAtPrologue));
7017 restoreInterpreterPCReg();
7018 masm.ret();
7020 masm.bind(handler.codeCoverageAtPCLabel());
7021 #ifdef JS_USE_LINK_REGISTER
7022 masm.pushReturnAddress();
7023 #endif
7025 saveInterpreterPCReg();
7027 masm.setupUnalignedABICall(R0.scratchReg());
7028 masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
7029 masm.passABIArg(R0.scratchReg());
7030 Register pcReg = LoadBytecodePC(masm, R2.scratchReg());
7031 masm.passABIArg(pcReg);
7032 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::HandleCodeCoverageAtPC));
7034 restoreInterpreterPCReg();
7035 masm.ret();
7038 bool BaselineInterpreterGenerator::generate(BaselineInterpreter& interpreter) {
7039 if (!emitPrologue()) {
7040 return false;
7043 if (!emitInterpreterLoop()) {
7044 return false;
7047 if (!emitEpilogue()) {
7048 return false;
7051 if (!emitOutOfLinePostBarrierSlot()) {
7052 return false;
7055 emitOutOfLineCodeCoverageInstrumentation();
7058 Linker linker(masm);
7059 if (masm.oom()) {
7060 ReportOutOfMemory(cx);
7061 return false;
7064 JitCode* code = linker.newCode(cx, CodeKind::Other);
7065 if (!code) {
7066 return false;
7069 // Register BaselineInterpreter code with the profiler's JitCode table.
7071 JitcodeGlobalEntry::BaselineInterpreterEntry entry;
7072 entry.init(code, code->raw(), code->rawEnd());
7074 JitcodeGlobalTable* globalTable =
7075 cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
7076 if (!globalTable->addEntry(entry)) {
7077 ReportOutOfMemory(cx);
7078 return false;
7081 code->setHasBytecodeMap();
7084 // Patch loads now that we know the tableswitch base address.
7085 CodeLocationLabel tableLoc(code, CodeOffset(tableOffset_));
7086 for (CodeOffset off : tableLabels_) {
7087 MacroAssembler::patchNearAddressMove(CodeLocationLabel(code, off),
7088 tableLoc);
7091 #ifdef JS_ION_PERF
7092 writePerfSpewerJitCodeProfile(code, "BaselineInterpreter");
7093 #endif
7095 #ifdef MOZ_VTUNE
7096 vtune::MarkStub(code, "BaselineInterpreter");
7097 #endif
7099 interpreter.init(
7100 code, interpretOpOffset_, interpretOpNoDebugTrapOffset_,
7101 bailoutPrologueOffset_.offset(),
7102 profilerEnterFrameToggleOffset_.offset(),
7103 profilerExitFrameToggleOffset_.offset(), debugTrapHandlerOffset_,
7104 std::move(handler.debugInstrumentationOffsets()),
7105 std::move(debugTrapOffsets_), std::move(handler.codeCoverageOffsets()),
7106 std::move(handler.icReturnOffsets()), handler.callVMOffsets());
7109 if (cx->runtime()->geckoProfiler().enabled()) {
7110 interpreter.toggleProfilerInstrumentation(true);
7113 if (coverage::IsLCovEnabled()) {
7114 interpreter.toggleCodeCoverageInstrumentationUnchecked(true);
7117 return true;
7120 JitCode* JitRuntime::generateDebugTrapHandler(JSContext* cx,
7121 DebugTrapHandlerKind kind) {
7122 StackMacroAssembler masm;
7124 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
7125 regs.takeUnchecked(BaselineFrameReg);
7126 regs.takeUnchecked(ICStubReg);
7127 if (HasInterpreterPCReg()) {
7128 regs.takeUnchecked(InterpreterPCReg);
7130 #ifdef JS_CODEGEN_ARM
7131 regs.takeUnchecked(BaselineSecondScratchReg);
7132 masm.setSecondScratchReg(BaselineSecondScratchReg);
7133 #endif
7134 Register scratch1 = regs.takeAny();
7135 Register scratch2 = regs.takeAny();
7136 Register scratch3 = regs.takeAny();
7138 if (kind == DebugTrapHandlerKind::Interpreter) {
7139 // The interpreter calls this for every script when debugging, so check if
7140 // the script has any breakpoints or is in step mode before calling into
7141 // C++.
7142 Label hasDebugScript;
7143 Address scriptAddr(BaselineFrameReg,
7144 BaselineFrame::reverseOffsetOfInterpreterScript());
7145 masm.loadPtr(scriptAddr, scratch1);
7146 masm.branchTest32(Assembler::NonZero,
7147 Address(scratch1, JSScript::offsetOfMutableFlags()),
7148 Imm32(int32_t(JSScript::MutableFlags::HasDebugScript)),
7149 &hasDebugScript);
7150 masm.abiret();
7151 masm.bind(&hasDebugScript);
7153 if (HasInterpreterPCReg()) {
7154 // Update frame's bytecode pc because the debugger depends on it.
7155 Address pcAddr(BaselineFrameReg,
7156 BaselineFrame::reverseOffsetOfInterpreterPC());
7157 masm.storePtr(InterpreterPCReg, pcAddr);
7161 // Load the return address in scratch1.
7162 masm.loadAbiReturnAddress(scratch1);
7164 // Load BaselineFrame pointer in scratch2.
7165 masm.loadBaselineFramePtr(BaselineFrameReg, scratch2);
7167 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
7168 // the stub frame has a nullptr ICStub pointer, since this pointer is marked
7169 // during GC.
7170 masm.movePtr(ImmPtr(nullptr), ICStubReg);
7171 EmitBaselineEnterStubFrame(masm, scratch3);
7173 using Fn = bool (*)(JSContext*, BaselineFrame*, uint8_t*);
7174 VMFunctionId id = VMFunctionToId<Fn, jit::HandleDebugTrap>::id;
7175 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id);
7177 masm.push(scratch1);
7178 masm.push(scratch2);
7179 EmitBaselineCallVM(code, masm);
7181 EmitBaselineLeaveStubFrame(masm);
7183 if (kind == DebugTrapHandlerKind::Interpreter) {
7184 // We have to reload the bytecode pc register.
7185 Address pcAddr(BaselineFrameReg,
7186 BaselineFrame::reverseOffsetOfInterpreterPC());
7187 masm.loadPtr(pcAddr, InterpreterPCRegAtDispatch);
7189 masm.abiret();
7191 Linker linker(masm);
7192 JitCode* handlerCode = linker.newCode(cx, CodeKind::Other);
7193 if (!handlerCode) {
7194 return nullptr;
7197 #ifdef JS_ION_PERF
7198 writePerfSpewerJitCodeProfile(handlerCode, "DebugTrapHandler");
7199 #endif
7200 #ifdef MOZ_VTUNE
7201 vtune::MarkStub(handlerCode, "DebugTrapHandler");
7202 #endif
7204 return handlerCode;
7207 } // namespace jit
7208 } // namespace js