Bug 1867190 - Add prefs for PHC probablities r=glandium
[gecko.git] / js / src / jit / WarpBuilder.cpp
blobcad28fa535a799462ebdcb4b49020e76621d5986
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/WarpBuilder.h"
9 #include "mozilla/DebugOnly.h"
11 #include "jit/BaselineFrame.h"
12 #include "jit/CacheIR.h"
13 #include "jit/CompileInfo.h"
14 #include "jit/InlineScriptTree.h"
15 #include "jit/MIR.h"
16 #include "jit/MIRGenerator.h"
17 #include "jit/MIRGraph.h"
18 #include "jit/WarpCacheIRTranspiler.h"
19 #include "jit/WarpSnapshot.h"
20 #include "js/friend/ErrorMessages.h" // JSMSG_BAD_CONST_ASSIGN
21 #include "vm/GeneratorObject.h"
22 #include "vm/Interpreter.h"
23 #include "vm/Opcodes.h"
25 #include "gc/ObjectKind-inl.h"
26 #include "vm/BytecodeIterator-inl.h"
27 #include "vm/BytecodeLocation-inl.h"
28 #include "vm/JSObject-inl.h"
30 using namespace js;
31 using namespace js::jit;
33 // Used for building the outermost script.
34 WarpBuilder::WarpBuilder(WarpSnapshot& snapshot, MIRGenerator& mirGen,
35 WarpCompilation* warpCompilation)
36 : WarpBuilderShared(snapshot, mirGen, nullptr),
37 warpCompilation_(warpCompilation),
38 graph_(mirGen.graph()),
39 info_(mirGen.outerInfo()),
40 scriptSnapshot_(snapshot.rootScript()),
41 script_(snapshot.rootScript()->script()),
42 loopStack_(mirGen.alloc()) {
43 opSnapshotIter_ = scriptSnapshot_->opSnapshots().getFirst();
46 // Used for building inlined scripts.
47 WarpBuilder::WarpBuilder(WarpBuilder* caller, WarpScriptSnapshot* snapshot,
48 CompileInfo& compileInfo, CallInfo* inlineCallInfo,
49 MResumePoint* callerResumePoint)
50 : WarpBuilderShared(caller->snapshot(), caller->mirGen(), nullptr),
51 warpCompilation_(caller->warpCompilation()),
52 graph_(caller->mirGen().graph()),
53 info_(compileInfo),
54 scriptSnapshot_(snapshot),
55 script_(snapshot->script()),
56 loopStack_(caller->mirGen().alloc()),
57 callerBuilder_(caller),
58 callerResumePoint_(callerResumePoint),
59 inlineCallInfo_(inlineCallInfo) {
60 opSnapshotIter_ = snapshot->opSnapshots().getFirst();
63 BytecodeSite* WarpBuilder::newBytecodeSite(BytecodeLocation loc) {
64 jsbytecode* pc = loc.toRawBytecode();
65 MOZ_ASSERT(info().inlineScriptTree()->script()->containsPC(pc));
66 return new (alloc()) BytecodeSite(info().inlineScriptTree(), pc);
69 const WarpOpSnapshot* WarpBuilder::getOpSnapshotImpl(
70 BytecodeLocation loc, WarpOpSnapshot::Kind kind) {
71 uint32_t offset = loc.bytecodeToOffset(script_);
73 // Skip snapshots until we get to a snapshot with offset >= offset. This is
74 // a loop because WarpBuilder can skip unreachable bytecode ops.
75 while (opSnapshotIter_ && opSnapshotIter_->offset() < offset) {
76 opSnapshotIter_ = opSnapshotIter_->getNext();
79 if (!opSnapshotIter_ || opSnapshotIter_->offset() != offset ||
80 opSnapshotIter_->kind() != kind) {
81 return nullptr;
84 return opSnapshotIter_;
87 void WarpBuilder::initBlock(MBasicBlock* block) {
88 graph().addBlock(block);
90 block->setLoopDepth(loopDepth());
92 current = block;
95 bool WarpBuilder::startNewBlock(MBasicBlock* predecessor, BytecodeLocation loc,
96 size_t numToPop) {
97 MBasicBlock* block =
98 MBasicBlock::NewPopN(graph(), info(), predecessor, newBytecodeSite(loc),
99 MBasicBlock::NORMAL, numToPop);
100 if (!block) {
101 return false;
104 initBlock(block);
105 return true;
108 bool WarpBuilder::startNewEntryBlock(size_t stackDepth, BytecodeLocation loc) {
109 MBasicBlock* block =
110 MBasicBlock::New(graph(), stackDepth, info(), /* maybePred = */ nullptr,
111 newBytecodeSite(loc), MBasicBlock::NORMAL);
112 if (!block) {
113 return false;
116 initBlock(block);
117 return true;
120 bool WarpBuilder::startNewLoopHeaderBlock(BytecodeLocation loopHead) {
121 MBasicBlock* header = MBasicBlock::NewPendingLoopHeader(
122 graph(), info(), current, newBytecodeSite(loopHead));
123 if (!header) {
124 return false;
127 initBlock(header);
128 return loopStack_.emplaceBack(header);
131 bool WarpBuilder::startNewOsrPreHeaderBlock(BytecodeLocation loopHead) {
132 MOZ_ASSERT(loopHead.is(JSOp::LoopHead));
133 MOZ_ASSERT(loopHead.toRawBytecode() == info().osrPc());
135 // Create two blocks:
136 // * The OSR entry block. This is always the graph's second block and has no
137 // predecessors. This is the entry point for OSR from the Baseline JIT.
138 // * The OSR preheader block. This has two predecessors: the OSR entry block
139 // and the current block.
141 MBasicBlock* pred = current;
143 // Create the OSR entry block.
144 if (!startNewEntryBlock(pred->stackDepth(), loopHead)) {
145 return false;
148 MBasicBlock* osrBlock = current;
149 graph().setOsrBlock(osrBlock);
150 graph().moveBlockAfter(*graph().begin(), osrBlock);
152 MOsrEntry* entry = MOsrEntry::New(alloc());
153 osrBlock->add(entry);
155 // Initialize environment chain.
157 uint32_t slot = info().environmentChainSlot();
158 MInstruction* envv;
159 if (usesEnvironmentChain()) {
160 envv = MOsrEnvironmentChain::New(alloc(), entry);
161 } else {
162 // Use an undefined value if the script does not need its environment
163 // chain, to match the main entry point.
164 envv = MConstant::New(alloc(), UndefinedValue());
166 osrBlock->add(envv);
167 osrBlock->initSlot(slot, envv);
170 // Initialize return value.
172 MInstruction* returnValue;
173 if (!script_->noScriptRval()) {
174 returnValue = MOsrReturnValue::New(alloc(), entry);
175 } else {
176 returnValue = MConstant::New(alloc(), UndefinedValue());
178 osrBlock->add(returnValue);
179 osrBlock->initSlot(info().returnValueSlot(), returnValue);
182 // Initialize arguments object.
183 MInstruction* argsObj = nullptr;
184 if (info().needsArgsObj()) {
185 argsObj = MOsrArgumentsObject::New(alloc(), entry);
186 osrBlock->add(argsObj);
187 osrBlock->initSlot(info().argsObjSlot(), argsObj);
190 if (info().hasFunMaybeLazy()) {
191 // Initialize |this| parameter.
192 MParameter* thisv = MParameter::New(alloc(), MParameter::THIS_SLOT);
193 osrBlock->add(thisv);
194 osrBlock->initSlot(info().thisSlot(), thisv);
196 // Initialize arguments. There are three cases:
198 // 1) There's no ArgumentsObject or it doesn't alias formals. In this case
199 // we can just use the frame's argument slot.
200 // 2) The ArgumentsObject aliases formals and the argument is stored in the
201 // CallObject. Use |undefined| because we can't load from the arguments
202 // object and code will use the CallObject anyway.
203 // 3) The ArgumentsObject aliases formals and the argument isn't stored in
204 // the CallObject. We have to load it from the ArgumentsObject.
205 for (uint32_t i = 0; i < info().nargs(); i++) {
206 uint32_t slot = info().argSlotUnchecked(i);
207 MInstruction* osrv;
208 if (!info().argsObjAliasesFormals()) {
209 osrv = MParameter::New(alloc().fallible(), i);
210 } else if (script_->formalIsAliased(i)) {
211 osrv = MConstant::New(alloc().fallible(), UndefinedValue());
212 } else {
213 osrv = MGetArgumentsObjectArg::New(alloc().fallible(), argsObj, i);
215 if (!osrv) {
216 return false;
218 current->add(osrv);
219 current->initSlot(slot, osrv);
223 // Initialize locals.
224 uint32_t nlocals = info().nlocals();
225 for (uint32_t i = 0; i < nlocals; i++) {
226 uint32_t slot = info().localSlot(i);
227 ptrdiff_t offset = BaselineFrame::reverseOffsetOfLocal(i);
228 MOsrValue* osrv = MOsrValue::New(alloc().fallible(), entry, offset);
229 if (!osrv) {
230 return false;
232 current->add(osrv);
233 current->initSlot(slot, osrv);
236 // Initialize expression stack slots.
237 uint32_t numStackSlots = current->stackDepth() - info().firstStackSlot();
238 for (uint32_t i = 0; i < numStackSlots; i++) {
239 uint32_t slot = info().stackSlot(i);
240 ptrdiff_t offset = BaselineFrame::reverseOffsetOfLocal(nlocals + i);
241 MOsrValue* osrv = MOsrValue::New(alloc().fallible(), entry, offset);
242 if (!osrv) {
243 return false;
245 current->add(osrv);
246 current->initSlot(slot, osrv);
249 MStart* start = MStart::New(alloc());
250 current->add(start);
252 // Note: phi specialization can add type guard instructions to the OSR entry
253 // block if needed. See TypeAnalyzer::shouldSpecializeOsrPhis.
255 // Create the preheader block, with the predecessor block and OSR block as
256 // predecessors.
257 if (!startNewBlock(pred, loopHead)) {
258 return false;
261 pred->end(MGoto::New(alloc(), current));
262 osrBlock->end(MGoto::New(alloc(), current));
264 if (!current->addPredecessor(alloc(), osrBlock)) {
265 return false;
268 return true;
271 bool WarpBuilder::addPendingEdge(BytecodeLocation target, MBasicBlock* block,
272 uint32_t successor, uint32_t numToPop) {
273 MOZ_ASSERT(successor < block->lastIns()->numSuccessors());
274 MOZ_ASSERT(numToPop <= block->stackDepth());
276 jsbytecode* targetPC = target.toRawBytecode();
277 PendingEdgesMap::AddPtr p = pendingEdges_.lookupForAdd(targetPC);
278 if (p) {
279 return p->value().emplaceBack(block, successor, numToPop);
282 PendingEdges edges;
283 static_assert(PendingEdges::InlineLength >= 1,
284 "Appending one element should be infallible");
285 MOZ_ALWAYS_TRUE(edges.emplaceBack(block, successor, numToPop));
287 return pendingEdges_.add(p, targetPC, std::move(edges));
290 bool WarpBuilder::build() {
291 if (!buildPrologue()) {
292 return false;
295 if (!buildBody()) {
296 return false;
299 if (!MPhi::markIteratorPhis(*iterators())) {
300 return false;
303 MOZ_ASSERT_IF(info().osrPc(), graph().osrBlock());
304 MOZ_ASSERT(loopStack_.empty());
305 MOZ_ASSERT(loopDepth() == 0);
307 return true;
310 bool WarpBuilder::buildInline() {
311 if (!buildInlinePrologue()) {
312 return false;
315 if (!buildBody()) {
316 return false;
319 MOZ_ASSERT(loopStack_.empty());
320 return true;
323 MInstruction* WarpBuilder::buildNamedLambdaEnv(MDefinition* callee,
324 MDefinition* env,
325 NamedLambdaObject* templateObj) {
326 MOZ_ASSERT(templateObj->numDynamicSlots() == 0);
328 MInstruction* namedLambda = MNewNamedLambdaObject::New(alloc(), templateObj);
329 current->add(namedLambda);
331 #ifdef DEBUG
332 // Assert in debug mode we can elide the post write barriers.
333 current->add(MAssertCanElidePostWriteBarrier::New(alloc(), namedLambda, env));
334 current->add(
335 MAssertCanElidePostWriteBarrier::New(alloc(), namedLambda, callee));
336 #endif
338 // Initialize the object's reserved slots. No post barrier is needed here:
339 // the object will be allocated in the nursery if possible, and if the
340 // tenured heap is used instead, a minor collection will have been performed
341 // that moved env/callee to the tenured heap.
342 size_t enclosingSlot = NamedLambdaObject::enclosingEnvironmentSlot();
343 size_t lambdaSlot = NamedLambdaObject::lambdaSlot();
344 current->add(MStoreFixedSlot::NewUnbarriered(alloc(), namedLambda,
345 enclosingSlot, env));
346 current->add(MStoreFixedSlot::NewUnbarriered(alloc(), namedLambda, lambdaSlot,
347 callee));
349 return namedLambda;
352 MInstruction* WarpBuilder::buildCallObject(MDefinition* callee,
353 MDefinition* env,
354 CallObject* templateObj) {
355 MConstant* templateCst = constant(ObjectValue(*templateObj));
357 MNewCallObject* callObj = MNewCallObject::New(alloc(), templateCst);
358 current->add(callObj);
360 #ifdef DEBUG
361 // Assert in debug mode we can elide the post write barriers.
362 current->add(MAssertCanElidePostWriteBarrier::New(alloc(), callObj, env));
363 current->add(MAssertCanElidePostWriteBarrier::New(alloc(), callObj, callee));
364 #endif
366 // Initialize the object's reserved slots. No post barrier is needed here,
367 // for the same reason as in buildNamedLambdaEnv.
368 size_t enclosingSlot = CallObject::enclosingEnvironmentSlot();
369 size_t calleeSlot = CallObject::calleeSlot();
370 current->add(
371 MStoreFixedSlot::NewUnbarriered(alloc(), callObj, enclosingSlot, env));
372 current->add(
373 MStoreFixedSlot::NewUnbarriered(alloc(), callObj, calleeSlot, callee));
375 return callObj;
378 bool WarpBuilder::buildEnvironmentChain() {
379 const WarpEnvironment& env = scriptSnapshot()->environment();
381 if (env.is<NoEnvironment>()) {
382 return true;
385 MInstruction* envDef = env.match(
386 [](const NoEnvironment&) -> MInstruction* {
387 MOZ_CRASH("Already handled");
389 [this](JSObject* obj) -> MInstruction* {
390 return constant(ObjectValue(*obj));
392 [this](const FunctionEnvironment& env) -> MInstruction* {
393 MDefinition* callee = getCallee();
394 MInstruction* envDef = MFunctionEnvironment::New(alloc(), callee);
395 current->add(envDef);
396 if (NamedLambdaObject* obj = env.namedLambdaTemplate) {
397 envDef = buildNamedLambdaEnv(callee, envDef, obj);
399 if (CallObject* obj = env.callObjectTemplate) {
400 envDef = buildCallObject(callee, envDef, obj);
401 if (!envDef) {
402 return nullptr;
405 return envDef;
407 if (!envDef) {
408 return false;
411 // Update the environment slot from UndefinedValue only after the initial
412 // environment is created so that bailout doesn't see a partial environment.
413 // See: |BaselineStackBuilder::buildBaselineFrame|
414 current->setEnvironmentChain(envDef);
415 return true;
418 bool WarpBuilder::buildPrologue() {
419 BytecodeLocation startLoc(script_, script_->code());
420 if (!startNewEntryBlock(info().firstStackSlot(), startLoc)) {
421 return false;
424 if (info().hasFunMaybeLazy()) {
425 // Initialize |this|.
426 MParameter* param = MParameter::New(alloc(), MParameter::THIS_SLOT);
427 current->add(param);
428 current->initSlot(info().thisSlot(), param);
430 // Initialize arguments.
431 for (uint32_t i = 0; i < info().nargs(); i++) {
432 MParameter* param = MParameter::New(alloc().fallible(), i);
433 if (!param) {
434 return false;
436 current->add(param);
437 current->initSlot(info().argSlotUnchecked(i), param);
441 MConstant* undef = constant(UndefinedValue());
443 // Initialize local slots.
444 for (uint32_t i = 0; i < info().nlocals(); i++) {
445 current->initSlot(info().localSlot(i), undef);
448 // Initialize the environment chain, return value, and arguments object slots.
449 current->initSlot(info().environmentChainSlot(), undef);
450 current->initSlot(info().returnValueSlot(), undef);
451 if (info().needsArgsObj()) {
452 current->initSlot(info().argsObjSlot(), undef);
455 current->add(MStart::New(alloc()));
457 // Guard against over-recursion.
458 MCheckOverRecursed* check = MCheckOverRecursed::New(alloc());
459 current->add(check);
461 if (!buildEnvironmentChain()) {
462 return false;
465 #ifdef JS_CACHEIR_SPEW
466 if (snapshot().needsFinalWarmUpCount()) {
467 MIncrementWarmUpCounter* ins =
468 MIncrementWarmUpCounter::New(alloc(), script_);
469 current->add(ins);
471 #endif
473 return true;
476 bool WarpBuilder::buildInlinePrologue() {
477 // Generate entry block.
478 BytecodeLocation startLoc(script_, script_->code());
479 if (!startNewEntryBlock(info().firstStackSlot(), startLoc)) {
480 return false;
482 current->setCallerResumePoint(callerResumePoint());
484 // Connect the entry block to the last block in the caller's graph.
485 MBasicBlock* pred = callerBuilder()->current;
486 MOZ_ASSERT(pred == callerResumePoint()->block());
488 pred->end(MGoto::New(alloc(), current));
489 if (!current->addPredecessorWithoutPhis(pred)) {
490 return false;
493 MConstant* undef = constant(UndefinedValue());
495 // Initialize env chain slot to Undefined. It's set later by
496 // |buildEnvironmentChain|.
497 current->initSlot(info().environmentChainSlot(), undef);
499 // Initialize |return value| slot.
500 current->initSlot(info().returnValueSlot(), undef);
502 // Initialize |arguments| slot if needed.
503 if (info().needsArgsObj()) {
504 current->initSlot(info().argsObjSlot(), undef);
507 // Initialize |this| slot.
508 current->initSlot(info().thisSlot(), inlineCallInfo()->thisArg());
510 uint32_t callerArgs = inlineCallInfo()->argc();
511 uint32_t actualArgs = info().nargs();
512 uint32_t passedArgs = std::min<uint32_t>(callerArgs, actualArgs);
514 // Initialize actually set arguments.
515 for (uint32_t i = 0; i < passedArgs; i++) {
516 MDefinition* arg = inlineCallInfo()->getArg(i);
517 current->initSlot(info().argSlotUnchecked(i), arg);
520 // Pass undefined for missing arguments.
521 for (uint32_t i = passedArgs; i < actualArgs; i++) {
522 current->initSlot(info().argSlotUnchecked(i), undef);
525 // Initialize local slots.
526 for (uint32_t i = 0; i < info().nlocals(); i++) {
527 current->initSlot(info().localSlot(i), undef);
530 MOZ_ASSERT(current->entryResumePoint()->stackDepth() == info().totalSlots());
532 if (!buildEnvironmentChain()) {
533 return false;
536 return true;
539 #ifdef DEBUG
540 // In debug builds, after compiling a bytecode op, this class is used to check
541 // that all values popped by this opcode either:
543 // (1) Have the ImplicitlyUsed flag set on them.
544 // (2) Have more uses than before compiling this op (the value is
545 // used as operand of a new MIR instruction).
547 // This is used to catch problems where WarpBuilder pops a value without
548 // adding any SSA uses and doesn't call setImplicitlyUsedUnchecked on it.
549 class MOZ_RAII WarpPoppedValueUseChecker {
550 Vector<MDefinition*, 4, SystemAllocPolicy> popped_;
551 Vector<size_t, 4, SystemAllocPolicy> poppedUses_;
552 MBasicBlock* current_;
553 BytecodeLocation loc_;
555 public:
556 WarpPoppedValueUseChecker(MBasicBlock* current, BytecodeLocation loc)
557 : current_(current), loc_(loc) {}
559 [[nodiscard]] bool init() {
560 // Don't require SSA uses for values popped by these ops.
561 switch (loc_.getOp()) {
562 case JSOp::Pop:
563 case JSOp::PopN:
564 case JSOp::DupAt:
565 case JSOp::Dup:
566 case JSOp::Dup2:
567 case JSOp::Pick:
568 case JSOp::Unpick:
569 case JSOp::Swap:
570 case JSOp::SetArg:
571 case JSOp::SetLocal:
572 case JSOp::InitLexical:
573 case JSOp::SetRval:
574 case JSOp::Void:
575 // Basic stack/local/argument management opcodes.
576 return true;
578 case JSOp::Case:
579 case JSOp::Default:
580 // These ops have to pop the switch value when branching but don't
581 // actually use it.
582 return true;
584 default:
585 break;
588 unsigned nuses = loc_.useCount();
590 for (unsigned i = 0; i < nuses; i++) {
591 MDefinition* def = current_->peek(-int32_t(i + 1));
592 if (!popped_.append(def) || !poppedUses_.append(def->defUseCount())) {
593 return false;
597 return true;
600 void checkAfterOp() {
601 for (size_t i = 0; i < popped_.length(); i++) {
602 // First value popped by JSOp::EndIter is not used at all, it's similar
603 // to JSOp::Pop above.
604 if (loc_.is(JSOp::EndIter) && i == 0) {
605 continue;
607 MOZ_ASSERT(popped_[i]->isImplicitlyUsed() ||
608 popped_[i]->defUseCount() > poppedUses_[i]);
612 #endif
614 bool WarpBuilder::buildBody() {
615 for (BytecodeLocation loc : AllBytecodesIterable(script_)) {
616 if (mirGen().shouldCancel("WarpBuilder (opcode loop)")) {
617 return false;
620 // Skip unreachable ops (for example code after a 'return' or 'throw') until
621 // we get to the next jump target.
622 if (hasTerminatedBlock()) {
623 // Finish any "broken" loops with an unreachable backedge. For example:
625 // do {
626 // ...
627 // return;
628 // ...
629 // } while (x);
631 // This loop never actually loops.
632 if (loc.isBackedge() && !loopStack_.empty()) {
633 BytecodeLocation loopHead(script_,
634 loopStack_.back().header()->entryPC());
635 if (loc.isBackedgeForLoophead(loopHead)) {
636 decLoopDepth();
637 loopStack_.popBack();
640 if (!loc.isJumpTarget()) {
641 continue;
645 if (!alloc().ensureBallast()) {
646 return false;
649 #ifdef DEBUG
650 WarpPoppedValueUseChecker useChecker(current, loc);
651 if (!useChecker.init()) {
652 return false;
654 #endif
655 bool wantPreciseLineNumbers = js::jit::PerfEnabled();
656 if (wantPreciseLineNumbers && !hasTerminatedBlock()) {
657 current->updateTrackedSite(newBytecodeSite(loc));
660 JSOp op = loc.getOp();
662 #define BUILD_OP(OP, ...) \
663 case JSOp::OP: \
664 if (MOZ_UNLIKELY(!this->build_##OP(loc))) { \
665 return false; \
667 break;
668 switch (op) { FOR_EACH_OPCODE(BUILD_OP) }
669 #undef BUILD_OP
671 #ifdef DEBUG
672 useChecker.checkAfterOp();
673 #endif
676 return true;
679 #define DEF_OP(OP) \
680 bool WarpBuilder::build_##OP(BytecodeLocation) { \
681 MOZ_CRASH("Unsupported op"); \
683 WARP_UNSUPPORTED_OPCODE_LIST(DEF_OP)
684 #undef DEF_OP
686 bool WarpBuilder::build_Nop(BytecodeLocation) { return true; }
688 bool WarpBuilder::build_NopDestructuring(BytecodeLocation) { return true; }
690 bool WarpBuilder::build_NopIsAssignOp(BytecodeLocation) { return true; }
692 bool WarpBuilder::build_TryDestructuring(BytecodeLocation) {
693 // Set the hasTryBlock flag to turn off optimizations that eliminate dead
694 // resume points operands because the exception handler code for
695 // TryNoteKind::Destructuring is effectively a (specialized) catch-block.
696 graph().setHasTryBlock();
697 return true;
700 bool WarpBuilder::build_Lineno(BytecodeLocation) { return true; }
702 bool WarpBuilder::build_DebugLeaveLexicalEnv(BytecodeLocation) { return true; }
704 bool WarpBuilder::build_Undefined(BytecodeLocation) {
705 pushConstant(UndefinedValue());
706 return true;
709 bool WarpBuilder::build_Void(BytecodeLocation) {
710 current->pop();
711 pushConstant(UndefinedValue());
712 return true;
715 bool WarpBuilder::build_Null(BytecodeLocation) {
716 pushConstant(NullValue());
717 return true;
720 bool WarpBuilder::build_Hole(BytecodeLocation) {
721 pushConstant(MagicValue(JS_ELEMENTS_HOLE));
722 return true;
725 bool WarpBuilder::build_Uninitialized(BytecodeLocation) {
726 pushConstant(MagicValue(JS_UNINITIALIZED_LEXICAL));
727 return true;
730 bool WarpBuilder::build_IsConstructing(BytecodeLocation) {
731 pushConstant(MagicValue(JS_IS_CONSTRUCTING));
732 return true;
735 bool WarpBuilder::build_False(BytecodeLocation) {
736 pushConstant(BooleanValue(false));
737 return true;
740 bool WarpBuilder::build_True(BytecodeLocation) {
741 pushConstant(BooleanValue(true));
742 return true;
745 bool WarpBuilder::build_Pop(BytecodeLocation) {
746 current->pop();
747 return true;
750 bool WarpBuilder::build_PopN(BytecodeLocation loc) {
751 for (uint32_t i = 0, n = loc.getPopCount(); i < n; i++) {
752 current->pop();
754 return true;
757 bool WarpBuilder::build_Dup(BytecodeLocation) {
758 current->pushSlot(current->stackDepth() - 1);
759 return true;
762 bool WarpBuilder::build_Dup2(BytecodeLocation) {
763 uint32_t lhsSlot = current->stackDepth() - 2;
764 uint32_t rhsSlot = current->stackDepth() - 1;
765 current->pushSlot(lhsSlot);
766 current->pushSlot(rhsSlot);
767 return true;
770 bool WarpBuilder::build_DupAt(BytecodeLocation loc) {
771 current->pushSlot(current->stackDepth() - 1 - loc.getDupAtIndex());
772 return true;
775 bool WarpBuilder::build_Swap(BytecodeLocation) {
776 current->swapAt(-1);
777 return true;
780 bool WarpBuilder::build_Pick(BytecodeLocation loc) {
781 int32_t depth = -int32_t(loc.getPickDepth());
782 current->pick(depth);
783 return true;
786 bool WarpBuilder::build_Unpick(BytecodeLocation loc) {
787 int32_t depth = -int32_t(loc.getUnpickDepth());
788 current->unpick(depth);
789 return true;
792 bool WarpBuilder::build_Zero(BytecodeLocation) {
793 pushConstant(Int32Value(0));
794 return true;
797 bool WarpBuilder::build_One(BytecodeLocation) {
798 pushConstant(Int32Value(1));
799 return true;
802 bool WarpBuilder::build_Int8(BytecodeLocation loc) {
803 pushConstant(Int32Value(loc.getInt8()));
804 return true;
807 bool WarpBuilder::build_Uint16(BytecodeLocation loc) {
808 pushConstant(Int32Value(loc.getUint16()));
809 return true;
812 bool WarpBuilder::build_Uint24(BytecodeLocation loc) {
813 pushConstant(Int32Value(loc.getUint24()));
814 return true;
817 bool WarpBuilder::build_Int32(BytecodeLocation loc) {
818 pushConstant(Int32Value(loc.getInt32()));
819 return true;
822 bool WarpBuilder::build_Double(BytecodeLocation loc) {
823 pushConstant(loc.getInlineValue());
824 return true;
827 bool WarpBuilder::build_BigInt(BytecodeLocation loc) {
828 BigInt* bi = loc.getBigInt(script_);
829 pushConstant(BigIntValue(bi));
830 return true;
833 bool WarpBuilder::build_String(BytecodeLocation loc) {
834 JSString* str = loc.getString(script_);
835 pushConstant(StringValue(str));
836 return true;
839 bool WarpBuilder::build_Symbol(BytecodeLocation loc) {
840 uint32_t which = loc.getSymbolIndex();
841 JS::Symbol* sym = mirGen().runtime->wellKnownSymbols().get(which);
842 pushConstant(SymbolValue(sym));
843 return true;
846 bool WarpBuilder::build_RegExp(BytecodeLocation loc) {
847 RegExpObject* reObj = loc.getRegExp(script_);
849 auto* snapshot = getOpSnapshot<WarpRegExp>(loc);
851 MRegExp* regexp = MRegExp::New(alloc(), reObj, snapshot->hasShared());
852 current->add(regexp);
853 current->push(regexp);
855 return true;
858 bool WarpBuilder::build_Return(BytecodeLocation) {
859 MDefinition* def = current->pop();
861 MReturn* ret = MReturn::New(alloc(), def);
862 current->end(ret);
864 if (!graph().addReturn(current)) {
865 return false;
868 setTerminatedBlock();
869 return true;
872 bool WarpBuilder::build_RetRval(BytecodeLocation) {
873 MDefinition* rval;
874 if (script_->noScriptRval()) {
875 rval = constant(UndefinedValue());
876 } else {
877 rval = current->getSlot(info().returnValueSlot());
880 MReturn* ret = MReturn::New(alloc(), rval);
881 current->end(ret);
883 if (!graph().addReturn(current)) {
884 return false;
887 setTerminatedBlock();
888 return true;
891 bool WarpBuilder::build_SetRval(BytecodeLocation) {
892 MOZ_ASSERT(!script_->noScriptRval());
893 MDefinition* rval = current->pop();
894 current->setSlot(info().returnValueSlot(), rval);
895 return true;
898 bool WarpBuilder::build_GetRval(BytecodeLocation) {
899 MOZ_ASSERT(!script_->noScriptRval());
900 MDefinition* rval = current->getSlot(info().returnValueSlot());
901 current->push(rval);
902 return true;
905 bool WarpBuilder::build_GetLocal(BytecodeLocation loc) {
906 current->pushLocal(loc.local());
907 return true;
910 bool WarpBuilder::build_SetLocal(BytecodeLocation loc) {
911 current->setLocal(loc.local());
912 return true;
915 bool WarpBuilder::build_InitLexical(BytecodeLocation loc) {
916 current->setLocal(loc.local());
917 return true;
920 bool WarpBuilder::build_GetArg(BytecodeLocation loc) {
921 uint32_t arg = loc.arg();
922 if (info().argsObjAliasesFormals()) {
923 MDefinition* argsObj = current->argumentsObject();
924 auto* getArg = MGetArgumentsObjectArg::New(alloc(), argsObj, arg);
925 current->add(getArg);
926 current->push(getArg);
927 } else {
928 current->pushArg(arg);
930 return true;
933 bool WarpBuilder::build_GetFrameArg(BytecodeLocation loc) {
934 current->pushArgUnchecked(loc.arg());
935 return true;
938 bool WarpBuilder::build_SetArg(BytecodeLocation loc) {
939 uint32_t arg = loc.arg();
940 MDefinition* val = current->peek(-1);
942 if (!info().argsObjAliasesFormals()) {
943 // Either |arguments| is never referenced within this function, or
944 // it doesn't map to the actual arguments values. Either way, we
945 // don't need to worry about synchronizing the argument values
946 // when writing to them.
947 current->setArg(arg);
948 return true;
951 // If an arguments object is in use, and it aliases formals, then all SetArgs
952 // must go through the arguments object.
953 MDefinition* argsObj = current->argumentsObject();
954 current->add(MPostWriteBarrier::New(alloc(), argsObj, val));
955 auto* ins = MSetArgumentsObjectArg::New(alloc(), argsObj, val, arg);
956 current->add(ins);
957 return resumeAfter(ins, loc);
960 bool WarpBuilder::build_ArgumentsLength(BytecodeLocation) {
961 if (inlineCallInfo()) {
962 pushConstant(Int32Value(inlineCallInfo()->argc()));
963 } else {
964 auto* argsLength = MArgumentsLength::New(alloc());
965 current->add(argsLength);
966 current->push(argsLength);
968 return true;
971 bool WarpBuilder::build_GetActualArg(BytecodeLocation) {
972 MDefinition* index = current->pop();
973 MInstruction* arg;
974 if (inlineCallInfo()) {
975 arg = MGetInlinedArgument::New(alloc(), index, *inlineCallInfo());
976 if (!arg) {
977 return false;
979 } else {
980 arg = MGetFrameArgument::New(alloc(), index);
982 current->add(arg);
983 current->push(arg);
984 return true;
987 bool WarpBuilder::build_ToNumeric(BytecodeLocation loc) {
988 return buildUnaryOp(loc);
991 bool WarpBuilder::buildUnaryOp(BytecodeLocation loc) {
992 MDefinition* value = current->pop();
993 return buildIC(loc, CacheKind::UnaryArith, {value});
996 bool WarpBuilder::build_Inc(BytecodeLocation loc) { return buildUnaryOp(loc); }
998 bool WarpBuilder::build_Dec(BytecodeLocation loc) { return buildUnaryOp(loc); }
1000 bool WarpBuilder::build_Pos(BytecodeLocation loc) { return buildUnaryOp(loc); }
1002 bool WarpBuilder::build_Neg(BytecodeLocation loc) { return buildUnaryOp(loc); }
1004 bool WarpBuilder::build_BitNot(BytecodeLocation loc) {
1005 return buildUnaryOp(loc);
1008 bool WarpBuilder::buildBinaryOp(BytecodeLocation loc) {
1009 MDefinition* right = current->pop();
1010 MDefinition* left = current->pop();
1011 return buildIC(loc, CacheKind::BinaryArith, {left, right});
1014 bool WarpBuilder::build_Add(BytecodeLocation loc) { return buildBinaryOp(loc); }
1016 bool WarpBuilder::build_Sub(BytecodeLocation loc) { return buildBinaryOp(loc); }
1018 bool WarpBuilder::build_Mul(BytecodeLocation loc) { return buildBinaryOp(loc); }
1020 bool WarpBuilder::build_Div(BytecodeLocation loc) { return buildBinaryOp(loc); }
1022 bool WarpBuilder::build_Mod(BytecodeLocation loc) { return buildBinaryOp(loc); }
1024 bool WarpBuilder::build_Pow(BytecodeLocation loc) { return buildBinaryOp(loc); }
1026 bool WarpBuilder::build_BitAnd(BytecodeLocation loc) {
1027 return buildBinaryOp(loc);
1030 bool WarpBuilder::build_BitOr(BytecodeLocation loc) {
1031 return buildBinaryOp(loc);
1034 bool WarpBuilder::build_BitXor(BytecodeLocation loc) {
1035 return buildBinaryOp(loc);
1038 bool WarpBuilder::build_Lsh(BytecodeLocation loc) { return buildBinaryOp(loc); }
1040 bool WarpBuilder::build_Rsh(BytecodeLocation loc) { return buildBinaryOp(loc); }
1042 bool WarpBuilder::build_Ursh(BytecodeLocation loc) {
1043 return buildBinaryOp(loc);
1046 bool WarpBuilder::buildCompareOp(BytecodeLocation loc) {
1047 MDefinition* right = current->pop();
1048 MDefinition* left = current->pop();
1049 return buildIC(loc, CacheKind::Compare, {left, right});
1052 bool WarpBuilder::build_Eq(BytecodeLocation loc) { return buildCompareOp(loc); }
1054 bool WarpBuilder::build_Ne(BytecodeLocation loc) { return buildCompareOp(loc); }
1056 bool WarpBuilder::build_Lt(BytecodeLocation loc) { return buildCompareOp(loc); }
1058 bool WarpBuilder::build_Le(BytecodeLocation loc) { return buildCompareOp(loc); }
1060 bool WarpBuilder::build_Gt(BytecodeLocation loc) { return buildCompareOp(loc); }
1062 bool WarpBuilder::build_Ge(BytecodeLocation loc) { return buildCompareOp(loc); }
1064 bool WarpBuilder::build_StrictEq(BytecodeLocation loc) {
1065 return buildCompareOp(loc);
1068 bool WarpBuilder::build_StrictNe(BytecodeLocation loc) {
1069 return buildCompareOp(loc);
1072 // Returns true iff the MTest added for |op| has a true-target corresponding
1073 // with the join point in the bytecode.
1074 static bool TestTrueTargetIsJoinPoint(JSOp op) {
1075 switch (op) {
1076 case JSOp::JumpIfTrue:
1077 case JSOp::Or:
1078 case JSOp::Case:
1079 return true;
1081 case JSOp::JumpIfFalse:
1082 case JSOp::And:
1083 case JSOp::Coalesce:
1084 return false;
1086 default:
1087 MOZ_CRASH("Unexpected op");
1091 bool WarpBuilder::build_JumpTarget(BytecodeLocation loc) {
1092 PendingEdgesMap::Ptr p = pendingEdges_.lookup(loc.toRawBytecode());
1093 if (!p) {
1094 // No (reachable) jumps so this is just a no-op.
1095 return true;
1098 PendingEdges edges(std::move(p->value()));
1099 pendingEdges_.remove(p);
1101 MOZ_ASSERT(!edges.empty());
1103 // Create join block if there's fall-through from the previous bytecode op.
1104 if (!hasTerminatedBlock()) {
1105 MBasicBlock* pred = current;
1106 if (!startNewBlock(pred, loc)) {
1107 return false;
1109 pred->end(MGoto::New(alloc(), current));
1112 for (const PendingEdge& edge : edges) {
1113 MBasicBlock* source = edge.block();
1114 uint32_t numToPop = edge.numToPop();
1116 if (hasTerminatedBlock()) {
1117 if (!startNewBlock(source, loc, numToPop)) {
1118 return false;
1120 } else {
1121 MOZ_ASSERT(source->stackDepth() - numToPop == current->stackDepth());
1122 if (!current->addPredecessorPopN(alloc(), source, numToPop)) {
1123 return false;
1127 MOZ_ASSERT(source->lastIns()->isTest() || source->lastIns()->isGoto() ||
1128 source->lastIns()->isTableSwitch());
1129 source->lastIns()->initSuccessor(edge.successor(), current);
1132 MOZ_ASSERT(!hasTerminatedBlock());
1133 return true;
1136 bool WarpBuilder::addIteratorLoopPhis(BytecodeLocation loopHead) {
1137 // When unwinding the stack for a thrown exception, the exception handler must
1138 // close live iterators. For ForIn and Destructuring loops, the exception
1139 // handler needs access to values on the stack. To prevent them from being
1140 // optimized away (and replaced with the JS_OPTIMIZED_OUT MagicValue), we need
1141 // to mark the phis (and phis they flow into) as having implicit uses.
1142 // See ProcessTryNotes in vm/Interpreter.cpp and CloseLiveIteratorIon in
1143 // jit/JitFrames.cpp
1145 MOZ_ASSERT(current->stackDepth() >= info().firstStackSlot());
1147 bool emptyStack = current->stackDepth() == info().firstStackSlot();
1148 if (emptyStack) {
1149 return true;
1152 jsbytecode* loopHeadPC = loopHead.toRawBytecode();
1154 for (TryNoteIterAllNoGC tni(script_, loopHeadPC); !tni.done(); ++tni) {
1155 const TryNote& tn = **tni;
1157 // Stop if we reach an outer loop because outer loops were already
1158 // processed when we visited their loop headers.
1159 if (tn.isLoop()) {
1160 BytecodeLocation tnStart = script_->offsetToLocation(tn.start);
1161 if (tnStart != loopHead) {
1162 MOZ_ASSERT(tnStart.is(JSOp::LoopHead));
1163 MOZ_ASSERT(tnStart < loopHead);
1164 return true;
1168 switch (tn.kind()) {
1169 case TryNoteKind::Destructuring:
1170 case TryNoteKind::ForIn: {
1171 // For for-in loops we add the iterator object to iterators(). For
1172 // destructuring loops we add the "done" value that's on top of the
1173 // stack and used in the exception handler.
1174 MOZ_ASSERT(tn.stackDepth >= 1);
1175 uint32_t slot = info().stackSlot(tn.stackDepth - 1);
1176 MPhi* phi = current->getSlot(slot)->toPhi();
1177 if (!iterators()->append(phi)) {
1178 return false;
1180 break;
1182 case TryNoteKind::Loop:
1183 case TryNoteKind::ForOf:
1184 // Regular loops do not have iterators to close. ForOf loops handle
1185 // unwinding using catch blocks.
1186 break;
1187 default:
1188 break;
1192 return true;
1195 bool WarpBuilder::build_LoopHead(BytecodeLocation loc) {
1196 // All loops have the following bytecode structure:
1198 // LoopHead
1199 // ...
1200 // JumpIfTrue/Goto to LoopHead
1202 if (hasTerminatedBlock()) {
1203 // The whole loop is unreachable.
1204 return true;
1207 // Handle OSR from Baseline JIT code.
1208 if (loc.toRawBytecode() == info().osrPc()) {
1209 if (!startNewOsrPreHeaderBlock(loc)) {
1210 return false;
1214 incLoopDepth();
1216 MBasicBlock* pred = current;
1217 if (!startNewLoopHeaderBlock(loc)) {
1218 return false;
1221 pred->end(MGoto::New(alloc(), current));
1223 if (!addIteratorLoopPhis(loc)) {
1224 return false;
1227 MInterruptCheck* check = MInterruptCheck::New(alloc());
1228 current->add(check);
1230 #ifdef JS_CACHEIR_SPEW
1231 if (snapshot().needsFinalWarmUpCount()) {
1232 MIncrementWarmUpCounter* ins =
1233 MIncrementWarmUpCounter::New(alloc(), script_);
1234 current->add(ins);
1236 #endif
1238 return true;
1241 bool WarpBuilder::buildTestOp(BytecodeLocation loc) {
1242 MDefinition* originalValue = current->peek(-1);
1244 if (auto* cacheIRSnapshot = getOpSnapshot<WarpCacheIR>(loc)) {
1245 // If we have CacheIR, we can use it to refine the input. Note that
1246 // the transpiler doesn't generate any control instructions. Instead,
1247 // we fall through and generate them below.
1248 MDefinition* value = current->pop();
1249 if (!TranspileCacheIRToMIR(this, loc, cacheIRSnapshot, {value})) {
1250 return false;
1254 if (loc.isBackedge()) {
1255 return buildTestBackedge(loc);
1258 JSOp op = loc.getOp();
1259 BytecodeLocation target1 = loc.next();
1260 BytecodeLocation target2 = loc.getJumpTarget();
1262 if (TestTrueTargetIsJoinPoint(op)) {
1263 std::swap(target1, target2);
1266 MDefinition* value = current->pop();
1268 // JSOp::And and JSOp::Or leave the top stack value unchanged. The
1269 // top stack value may have been converted to bool by a transpiled
1270 // ToBool IC, so we push the original value.
1271 bool mustKeepCondition = (op == JSOp::And || op == JSOp::Or);
1272 if (mustKeepCondition) {
1273 current->push(originalValue);
1276 // If this op always branches to the same location we treat this as a
1277 // JSOp::Goto.
1278 if (target1 == target2) {
1279 value->setImplicitlyUsedUnchecked();
1280 return buildForwardGoto(target1);
1283 MTest* test = MTest::New(alloc(), value, /* ifTrue = */ nullptr,
1284 /* ifFalse = */ nullptr);
1285 current->end(test);
1287 // JSOp::Case must pop a second value on the true-branch (the input to the
1288 // switch-statement).
1289 uint32_t numToPop = (loc.getOp() == JSOp::Case) ? 1 : 0;
1291 if (!addPendingEdge(target1, current, MTest::TrueBranchIndex, numToPop)) {
1292 return false;
1294 if (!addPendingEdge(target2, current, MTest::FalseBranchIndex)) {
1295 return false;
1298 if (const auto* typesSnapshot = getOpSnapshot<WarpPolymorphicTypes>(loc)) {
1299 test->setObservedTypes(typesSnapshot->list());
1302 setTerminatedBlock();
1303 return true;
1306 bool WarpBuilder::buildTestBackedge(BytecodeLocation loc) {
1307 MOZ_ASSERT(loc.is(JSOp::JumpIfTrue));
1308 MOZ_ASSERT(loopDepth() > 0);
1310 MDefinition* value = current->pop();
1312 BytecodeLocation loopHead = loc.getJumpTarget();
1313 MOZ_ASSERT(loopHead.is(JSOp::LoopHead));
1315 BytecodeLocation successor = loc.next();
1317 // We can finish the loop now. Use the loophead pc instead of the current pc
1318 // because the stack depth at the start of that op matches the current stack
1319 // depth (after popping our operand).
1320 MBasicBlock* pred = current;
1321 if (!startNewBlock(current, loopHead)) {
1322 return false;
1325 MTest* test = MTest::New(alloc(), value, /* ifTrue = */ current,
1326 /* ifFalse = */ nullptr);
1327 pred->end(test);
1329 if (const auto* typesSnapshot = getOpSnapshot<WarpPolymorphicTypes>(loc)) {
1330 test->setObservedTypes(typesSnapshot->list());
1333 if (!addPendingEdge(successor, pred, MTest::FalseBranchIndex)) {
1334 return false;
1337 return buildBackedge();
1340 bool WarpBuilder::build_JumpIfFalse(BytecodeLocation loc) {
1341 return buildTestOp(loc);
1344 bool WarpBuilder::build_JumpIfTrue(BytecodeLocation loc) {
1345 return buildTestOp(loc);
1348 bool WarpBuilder::build_And(BytecodeLocation loc) { return buildTestOp(loc); }
1350 bool WarpBuilder::build_Or(BytecodeLocation loc) { return buildTestOp(loc); }
1352 bool WarpBuilder::build_Case(BytecodeLocation loc) { return buildTestOp(loc); }
1354 bool WarpBuilder::build_Default(BytecodeLocation loc) {
1355 current->pop();
1356 return buildForwardGoto(loc.getJumpTarget());
1359 bool WarpBuilder::build_Coalesce(BytecodeLocation loc) {
1360 BytecodeLocation target1 = loc.next();
1361 BytecodeLocation target2 = loc.getJumpTarget();
1362 MOZ_ASSERT(target2 > target1);
1364 MDefinition* value = current->peek(-1);
1366 MInstruction* isNullOrUndefined = MIsNullOrUndefined::New(alloc(), value);
1367 current->add(isNullOrUndefined);
1369 current->end(MTest::New(alloc(), isNullOrUndefined, /* ifTrue = */ nullptr,
1370 /* ifFalse = */ nullptr));
1372 if (!addPendingEdge(target1, current, MTest::TrueBranchIndex)) {
1373 return false;
1375 if (!addPendingEdge(target2, current, MTest::FalseBranchIndex)) {
1376 return false;
1379 setTerminatedBlock();
1380 return true;
1383 bool WarpBuilder::buildBackedge() {
1384 decLoopDepth();
1386 MBasicBlock* header = loopStack_.popCopy().header();
1387 current->end(MGoto::New(alloc(), header));
1389 if (!header->setBackedge(current)) {
1390 return false;
1393 setTerminatedBlock();
1394 return true;
1397 bool WarpBuilder::buildForwardGoto(BytecodeLocation target) {
1398 current->end(MGoto::New(alloc(), nullptr));
1400 if (!addPendingEdge(target, current, MGoto::TargetIndex)) {
1401 return false;
1404 setTerminatedBlock();
1405 return true;
1408 bool WarpBuilder::build_Goto(BytecodeLocation loc) {
1409 if (loc.isBackedge()) {
1410 return buildBackedge();
1413 return buildForwardGoto(loc.getJumpTarget());
1416 bool WarpBuilder::build_IsNullOrUndefined(BytecodeLocation loc) {
1417 MDefinition* value = current->peek(-1);
1418 auto* isNullOrUndef = MIsNullOrUndefined::New(alloc(), value);
1419 current->add(isNullOrUndef);
1420 current->push(isNullOrUndef);
1421 return true;
1424 bool WarpBuilder::build_DebugCheckSelfHosted(BytecodeLocation loc) {
1425 #ifdef DEBUG
1426 MDefinition* val = current->pop();
1427 MDebugCheckSelfHosted* check = MDebugCheckSelfHosted::New(alloc(), val);
1428 current->add(check);
1429 current->push(check);
1430 if (!resumeAfter(check, loc)) {
1431 return false;
1433 #endif
1434 return true;
1437 bool WarpBuilder::build_DynamicImport(BytecodeLocation loc) {
1438 MDefinition* options = current->pop();
1439 MDefinition* specifier = current->pop();
1440 MDynamicImport* ins = MDynamicImport::New(alloc(), specifier, options);
1441 current->add(ins);
1442 current->push(ins);
1443 return resumeAfter(ins, loc);
1446 bool WarpBuilder::build_Not(BytecodeLocation loc) {
1447 if (auto* cacheIRSnapshot = getOpSnapshot<WarpCacheIR>(loc)) {
1448 // If we have CacheIR, we can use it to refine the input before
1449 // emitting the MNot.
1450 MDefinition* value = current->pop();
1451 if (!TranspileCacheIRToMIR(this, loc, cacheIRSnapshot, {value})) {
1452 return false;
1456 MDefinition* value = current->pop();
1457 MNot* ins = MNot::New(alloc(), value);
1458 current->add(ins);
1459 current->push(ins);
1461 if (const auto* typesSnapshot = getOpSnapshot<WarpPolymorphicTypes>(loc)) {
1462 ins->setObservedTypes(typesSnapshot->list());
1465 return true;
1468 bool WarpBuilder::build_ToString(BytecodeLocation loc) {
1469 MDefinition* value = current->pop();
1471 if (value->type() == MIRType::String) {
1472 value->setImplicitlyUsedUnchecked();
1473 current->push(value);
1474 return true;
1477 MToString* ins =
1478 MToString::New(alloc(), value, MToString::SideEffectHandling::Supported);
1479 current->add(ins);
1480 current->push(ins);
1481 if (ins->isEffectful()) {
1482 return resumeAfter(ins, loc);
1484 return true;
1487 bool WarpBuilder::usesEnvironmentChain() const {
1488 return script_->jitScript()->usesEnvironmentChain();
1491 bool WarpBuilder::build_GlobalOrEvalDeclInstantiation(BytecodeLocation loc) {
1492 MOZ_ASSERT(!script_->isForEval(), "Eval scripts not supported");
1493 auto* redeclCheck = MGlobalDeclInstantiation::New(alloc());
1494 current->add(redeclCheck);
1495 return resumeAfter(redeclCheck, loc);
1498 bool WarpBuilder::build_BindVar(BytecodeLocation) {
1499 MOZ_ASSERT(usesEnvironmentChain());
1501 MDefinition* env = current->environmentChain();
1502 MCallBindVar* ins = MCallBindVar::New(alloc(), env);
1503 current->add(ins);
1504 current->push(ins);
1505 return true;
1508 bool WarpBuilder::build_MutateProto(BytecodeLocation loc) {
1509 MDefinition* value = current->pop();
1510 MDefinition* obj = current->peek(-1);
1511 MMutateProto* mutate = MMutateProto::New(alloc(), obj, value);
1512 current->add(mutate);
1513 return resumeAfter(mutate, loc);
1516 MDefinition* WarpBuilder::getCallee() {
1517 if (inlineCallInfo()) {
1518 return inlineCallInfo()->callee();
1521 MInstruction* callee = MCallee::New(alloc());
1522 current->add(callee);
1523 return callee;
1526 bool WarpBuilder::build_Callee(BytecodeLocation) {
1527 MDefinition* callee = getCallee();
1528 current->push(callee);
1529 return true;
1532 bool WarpBuilder::build_ToAsyncIter(BytecodeLocation loc) {
1533 MDefinition* nextMethod = current->pop();
1534 MDefinition* iterator = current->pop();
1535 MToAsyncIter* ins = MToAsyncIter::New(alloc(), iterator, nextMethod);
1536 current->add(ins);
1537 current->push(ins);
1538 return resumeAfter(ins, loc);
1541 bool WarpBuilder::build_ToPropertyKey(BytecodeLocation loc) {
1542 MDefinition* value = current->pop();
1543 return buildIC(loc, CacheKind::ToPropertyKey, {value});
1546 bool WarpBuilder::build_Typeof(BytecodeLocation loc) {
1547 MDefinition* input = current->pop();
1549 if (const auto* typesSnapshot = getOpSnapshot<WarpPolymorphicTypes>(loc)) {
1550 auto* typeOf = MTypeOf::New(alloc(), input);
1551 typeOf->setObservedTypes(typesSnapshot->list());
1552 current->add(typeOf);
1554 auto* ins = MTypeOfName::New(alloc(), typeOf);
1555 current->add(ins);
1556 current->push(ins);
1557 return true;
1560 return buildIC(loc, CacheKind::TypeOf, {input});
1563 bool WarpBuilder::build_TypeofExpr(BytecodeLocation loc) {
1564 return build_Typeof(loc);
1567 bool WarpBuilder::build_Arguments(BytecodeLocation loc) {
1568 auto* snapshot = getOpSnapshot<WarpArguments>(loc);
1569 MOZ_ASSERT(info().needsArgsObj());
1570 MOZ_ASSERT(snapshot);
1571 MOZ_ASSERT(usesEnvironmentChain());
1573 ArgumentsObject* templateObj = snapshot->templateObj();
1574 MDefinition* env = current->environmentChain();
1576 MInstruction* argsObj;
1577 if (inlineCallInfo()) {
1578 argsObj = MCreateInlinedArgumentsObject::New(
1579 alloc(), env, getCallee(), inlineCallInfo()->argv(), templateObj);
1580 if (!argsObj) {
1581 return false;
1583 } else {
1584 argsObj = MCreateArgumentsObject::New(alloc(), env, templateObj);
1586 current->add(argsObj);
1587 current->setArgumentsObject(argsObj);
1588 current->push(argsObj);
1590 return true;
1593 bool WarpBuilder::build_ObjWithProto(BytecodeLocation loc) {
1594 MDefinition* proto = current->pop();
1595 MInstruction* ins = MObjectWithProto::New(alloc(), proto);
1596 current->add(ins);
1597 current->push(ins);
1598 return resumeAfter(ins, loc);
1601 MDefinition* WarpBuilder::walkEnvironmentChain(uint32_t numHops) {
1602 MDefinition* env = current->environmentChain();
1604 for (uint32_t i = 0; i < numHops; i++) {
1605 if (!alloc().ensureBallast()) {
1606 return nullptr;
1609 MInstruction* ins = MEnclosingEnvironment::New(alloc(), env);
1610 current->add(ins);
1611 env = ins;
1614 return env;
1617 bool WarpBuilder::build_GetAliasedVar(BytecodeLocation loc) {
1618 EnvironmentCoordinate ec = loc.getEnvironmentCoordinate();
1619 MDefinition* obj = walkEnvironmentChain(ec.hops());
1620 if (!obj) {
1621 return false;
1624 MInstruction* load;
1625 if (EnvironmentObject::nonExtensibleIsFixedSlot(ec)) {
1626 load = MLoadFixedSlot::New(alloc(), obj, ec.slot());
1627 } else {
1628 MInstruction* slots = MSlots::New(alloc(), obj);
1629 current->add(slots);
1631 uint32_t slot = EnvironmentObject::nonExtensibleDynamicSlotIndex(ec);
1632 load = MLoadDynamicSlot::New(alloc(), slots, slot);
1635 current->add(load);
1636 current->push(load);
1637 return true;
1640 bool WarpBuilder::build_SetAliasedVar(BytecodeLocation loc) {
1641 EnvironmentCoordinate ec = loc.getEnvironmentCoordinate();
1642 MDefinition* val = current->peek(-1);
1643 MDefinition* obj = walkEnvironmentChain(ec.hops());
1644 if (!obj) {
1645 return false;
1648 current->add(MPostWriteBarrier::New(alloc(), obj, val));
1650 MInstruction* store;
1651 if (EnvironmentObject::nonExtensibleIsFixedSlot(ec)) {
1652 store = MStoreFixedSlot::NewBarriered(alloc(), obj, ec.slot(), val);
1653 } else {
1654 MInstruction* slots = MSlots::New(alloc(), obj);
1655 current->add(slots);
1657 uint32_t slot = EnvironmentObject::nonExtensibleDynamicSlotIndex(ec);
1658 store = MStoreDynamicSlot::NewBarriered(alloc(), slots, slot, val);
1661 current->add(store);
1662 return resumeAfter(store, loc);
1665 bool WarpBuilder::build_InitAliasedLexical(BytecodeLocation loc) {
1666 return build_SetAliasedVar(loc);
1669 bool WarpBuilder::build_EnvCallee(BytecodeLocation loc) {
1670 uint32_t numHops = loc.getEnvCalleeNumHops();
1671 MDefinition* env = walkEnvironmentChain(numHops);
1672 if (!env) {
1673 return false;
1676 auto* callee = MLoadFixedSlot::New(alloc(), env, CallObject::calleeSlot());
1677 current->add(callee);
1678 current->push(callee);
1679 return true;
1682 bool WarpBuilder::build_Iter(BytecodeLocation loc) {
1683 MDefinition* obj = current->pop();
1684 return buildIC(loc, CacheKind::GetIterator, {obj});
1687 bool WarpBuilder::build_MoreIter(BytecodeLocation loc) {
1688 MDefinition* iter = current->peek(-1);
1689 MInstruction* ins = MIteratorMore::New(alloc(), iter);
1690 current->add(ins);
1691 current->push(ins);
1692 return resumeAfter(ins, loc);
1695 bool WarpBuilder::build_EndIter(BytecodeLocation loc) {
1696 current->pop(); // Iterator value is not used.
1697 MDefinition* iter = current->pop();
1698 MInstruction* ins = MIteratorEnd::New(alloc(), iter);
1699 current->add(ins);
1700 return resumeAfter(ins, loc);
1703 bool WarpBuilder::build_CloseIter(BytecodeLocation loc) {
1704 MDefinition* iter = current->pop();
1705 iter = unboxObjectInfallible(iter, IsMovable::Yes);
1706 return buildIC(loc, CacheKind::CloseIter, {iter});
1709 bool WarpBuilder::build_IsNoIter(BytecodeLocation) {
1710 MDefinition* def = current->peek(-1);
1711 MOZ_ASSERT(def->isIteratorMore());
1712 MInstruction* ins = MIsNoIter::New(alloc(), def);
1713 current->add(ins);
1714 current->push(ins);
1715 return true;
1718 bool WarpBuilder::build_OptimizeGetIterator(BytecodeLocation loc) {
1719 MDefinition* value = current->pop();
1720 return buildIC(loc, CacheKind::OptimizeGetIterator, {value});
1723 bool WarpBuilder::transpileCall(BytecodeLocation loc,
1724 const WarpCacheIR* cacheIRSnapshot,
1725 CallInfo* callInfo) {
1726 // Synthesize the constant number of arguments for this call op.
1727 auto* argc = MConstant::New(alloc(), Int32Value(callInfo->argc()));
1728 current->add(argc);
1730 return TranspileCacheIRToMIR(this, loc, cacheIRSnapshot, {argc}, callInfo);
1733 void WarpBuilder::buildCreateThis(CallInfo& callInfo) {
1734 MOZ_ASSERT(callInfo.constructing());
1736 // Inline the this-object allocation on the caller-side.
1737 MDefinition* callee = callInfo.callee();
1738 MDefinition* newTarget = callInfo.getNewTarget();
1739 auto* createThis = MCreateThis::New(alloc(), callee, newTarget);
1740 current->add(createThis);
1741 callInfo.thisArg()->setImplicitlyUsedUnchecked();
1742 callInfo.setThis(createThis);
1745 bool WarpBuilder::buildCallOp(BytecodeLocation loc) {
1746 uint32_t argc = loc.getCallArgc();
1747 JSOp op = loc.getOp();
1748 bool constructing = IsConstructOp(op);
1749 bool ignoresReturnValue = (op == JSOp::CallIgnoresRv || loc.resultIsPopped());
1751 CallInfo callInfo(alloc(), constructing, ignoresReturnValue);
1752 if (!callInfo.init(current, argc)) {
1753 return false;
1756 if (const auto* inliningSnapshot = getOpSnapshot<WarpInlinedCall>(loc)) {
1757 // Transpile the CacheIR to generate the correct guards before
1758 // inlining. In this case, CacheOp::CallInlinedFunction updates
1759 // the CallInfo, but does not generate a call.
1760 callInfo.markAsInlined();
1761 if (!transpileCall(loc, inliningSnapshot->cacheIRSnapshot(), &callInfo)) {
1762 return false;
1765 // Generate the body of the inlined function.
1766 return buildInlinedCall(loc, inliningSnapshot, callInfo);
1769 if (auto* cacheIRSnapshot = getOpSnapshot<WarpCacheIR>(loc)) {
1770 return transpileCall(loc, cacheIRSnapshot, &callInfo);
1773 if (getOpSnapshot<WarpBailout>(loc)) {
1774 callInfo.setImplicitlyUsedUnchecked();
1775 return buildBailoutForColdIC(loc, CacheKind::Call);
1778 bool needsThisCheck = false;
1779 if (callInfo.constructing()) {
1780 buildCreateThis(callInfo);
1781 needsThisCheck = true;
1784 MCall* call = makeCall(callInfo, needsThisCheck);
1785 if (!call) {
1786 return false;
1789 current->add(call);
1790 current->push(call);
1791 return resumeAfter(call, loc);
1794 bool WarpBuilder::build_Call(BytecodeLocation loc) { return buildCallOp(loc); }
1796 bool WarpBuilder::build_CallContent(BytecodeLocation loc) {
1797 return buildCallOp(loc);
1800 bool WarpBuilder::build_CallIgnoresRv(BytecodeLocation loc) {
1801 return buildCallOp(loc);
1804 bool WarpBuilder::build_CallIter(BytecodeLocation loc) {
1805 return buildCallOp(loc);
1808 bool WarpBuilder::build_CallContentIter(BytecodeLocation loc) {
1809 return buildCallOp(loc);
1812 bool WarpBuilder::build_New(BytecodeLocation loc) { return buildCallOp(loc); }
1814 bool WarpBuilder::build_NewContent(BytecodeLocation loc) {
1815 return buildCallOp(loc);
1818 bool WarpBuilder::build_SuperCall(BytecodeLocation loc) {
1819 return buildCallOp(loc);
1822 bool WarpBuilder::build_FunctionThis(BytecodeLocation loc) {
1823 MOZ_ASSERT(info().hasFunMaybeLazy());
1825 if (script_->strict()) {
1826 // No need to wrap primitive |this| in strict mode.
1827 current->pushSlot(info().thisSlot());
1828 return true;
1831 MOZ_ASSERT(!script_->hasNonSyntacticScope(),
1832 "WarpOracle should have aborted compilation");
1834 MDefinition* def = current->getSlot(info().thisSlot());
1835 JSObject* globalThis = snapshot().globalLexicalEnvThis();
1837 auto* thisObj = MBoxNonStrictThis::New(alloc(), def, globalThis);
1838 current->add(thisObj);
1839 current->push(thisObj);
1841 return true;
1844 bool WarpBuilder::build_GlobalThis(BytecodeLocation loc) {
1845 MOZ_ASSERT(!script_->hasNonSyntacticScope());
1846 JSObject* obj = snapshot().globalLexicalEnvThis();
1847 pushConstant(ObjectValue(*obj));
1848 return true;
1851 MConstant* WarpBuilder::globalLexicalEnvConstant() {
1852 JSObject* globalLexical = snapshot().globalLexicalEnv();
1853 return constant(ObjectValue(*globalLexical));
1856 bool WarpBuilder::build_GetName(BytecodeLocation loc) {
1857 MOZ_ASSERT(usesEnvironmentChain());
1859 MDefinition* env = current->environmentChain();
1860 env = unboxObjectInfallible(env, IsMovable::Yes);
1861 return buildIC(loc, CacheKind::GetName, {env});
1864 bool WarpBuilder::build_GetGName(BytecodeLocation loc) {
1865 MOZ_ASSERT(!script_->hasNonSyntacticScope());
1867 MDefinition* env = globalLexicalEnvConstant();
1868 return buildIC(loc, CacheKind::GetName, {env});
1871 bool WarpBuilder::build_BindName(BytecodeLocation loc) {
1872 MOZ_ASSERT(usesEnvironmentChain());
1874 MDefinition* env = current->environmentChain();
1875 env = unboxObjectInfallible(env, IsMovable::Yes);
1876 return buildIC(loc, CacheKind::BindName, {env});
1879 bool WarpBuilder::build_BindGName(BytecodeLocation loc) {
1880 MOZ_ASSERT(!script_->hasNonSyntacticScope());
1882 if (const auto* snapshot = getOpSnapshot<WarpBindGName>(loc)) {
1883 JSObject* globalEnv = snapshot->globalEnv();
1884 pushConstant(ObjectValue(*globalEnv));
1885 return true;
1888 MDefinition* env = globalLexicalEnvConstant();
1889 return buildIC(loc, CacheKind::BindName, {env});
1892 bool WarpBuilder::build_GetProp(BytecodeLocation loc) {
1893 MDefinition* val = current->pop();
1894 return buildIC(loc, CacheKind::GetProp, {val});
1897 bool WarpBuilder::build_GetElem(BytecodeLocation loc) {
1898 MDefinition* id = current->pop();
1899 MDefinition* val = current->pop();
1900 return buildIC(loc, CacheKind::GetElem, {val, id});
1903 bool WarpBuilder::build_SetProp(BytecodeLocation loc) {
1904 MDefinition* val = current->pop();
1905 MDefinition* obj = current->pop();
1906 current->push(val);
1907 return buildIC(loc, CacheKind::SetProp, {obj, val});
1910 bool WarpBuilder::build_StrictSetProp(BytecodeLocation loc) {
1911 return build_SetProp(loc);
1914 bool WarpBuilder::build_SetName(BytecodeLocation loc) {
1915 return build_SetProp(loc);
1918 bool WarpBuilder::build_StrictSetName(BytecodeLocation loc) {
1919 return build_SetProp(loc);
1922 bool WarpBuilder::build_SetGName(BytecodeLocation loc) {
1923 return build_SetProp(loc);
1926 bool WarpBuilder::build_StrictSetGName(BytecodeLocation loc) {
1927 return build_SetProp(loc);
1930 bool WarpBuilder::build_InitGLexical(BytecodeLocation loc) {
1931 MOZ_ASSERT(!script_->hasNonSyntacticScope());
1933 MDefinition* globalLexical = globalLexicalEnvConstant();
1934 MDefinition* val = current->peek(-1);
1936 return buildIC(loc, CacheKind::SetProp, {globalLexical, val});
1939 bool WarpBuilder::build_SetElem(BytecodeLocation loc) {
1940 MDefinition* val = current->pop();
1941 MDefinition* id = current->pop();
1942 MDefinition* obj = current->pop();
1943 current->push(val);
1944 return buildIC(loc, CacheKind::SetElem, {obj, id, val});
1947 bool WarpBuilder::build_StrictSetElem(BytecodeLocation loc) {
1948 return build_SetElem(loc);
1951 bool WarpBuilder::build_DelProp(BytecodeLocation loc) {
1952 PropertyName* name = loc.getPropertyName(script_);
1953 MDefinition* obj = current->pop();
1954 bool strict = loc.getOp() == JSOp::StrictDelProp;
1956 MInstruction* ins = MDeleteProperty::New(alloc(), obj, name, strict);
1957 current->add(ins);
1958 current->push(ins);
1959 return resumeAfter(ins, loc);
1962 bool WarpBuilder::build_StrictDelProp(BytecodeLocation loc) {
1963 return build_DelProp(loc);
1966 bool WarpBuilder::build_DelElem(BytecodeLocation loc) {
1967 MDefinition* id = current->pop();
1968 MDefinition* obj = current->pop();
1969 bool strict = loc.getOp() == JSOp::StrictDelElem;
1971 MInstruction* ins = MDeleteElement::New(alloc(), obj, id, strict);
1972 current->add(ins);
1973 current->push(ins);
1974 return resumeAfter(ins, loc);
1977 bool WarpBuilder::build_StrictDelElem(BytecodeLocation loc) {
1978 return build_DelElem(loc);
1981 bool WarpBuilder::build_SetFunName(BytecodeLocation loc) {
1982 FunctionPrefixKind prefixKind = loc.getFunctionPrefixKind();
1983 MDefinition* name = current->pop();
1984 MDefinition* fun = current->pop();
1986 MSetFunName* ins = MSetFunName::New(alloc(), fun, name, uint8_t(prefixKind));
1987 current->add(ins);
1988 current->push(fun);
1989 return resumeAfter(ins, loc);
1992 bool WarpBuilder::build_PushLexicalEnv(BytecodeLocation loc) {
1993 MOZ_ASSERT(usesEnvironmentChain());
1995 const auto* snapshot = getOpSnapshot<WarpLexicalEnvironment>(loc);
1996 MOZ_ASSERT(snapshot);
1998 MDefinition* env = current->environmentChain();
1999 MConstant* templateCst = constant(ObjectValue(*snapshot->templateObj()));
2001 auto* ins = MNewLexicalEnvironmentObject::New(alloc(), templateCst);
2002 current->add(ins);
2004 #ifdef DEBUG
2005 // Assert in debug mode we can elide the post write barrier.
2006 current->add(MAssertCanElidePostWriteBarrier::New(alloc(), ins, env));
2007 #endif
2009 // Initialize the object's reserved slots. No post barrier is needed here,
2010 // for the same reason as in buildNamedLambdaEnv.
2011 current->add(MStoreFixedSlot::NewUnbarriered(
2012 alloc(), ins, EnvironmentObject::enclosingEnvironmentSlot(), env));
2014 current->setEnvironmentChain(ins);
2015 return true;
2018 bool WarpBuilder::build_PushClassBodyEnv(BytecodeLocation loc) {
2019 MOZ_ASSERT(usesEnvironmentChain());
2021 const auto* snapshot = getOpSnapshot<WarpClassBodyEnvironment>(loc);
2022 MOZ_ASSERT(snapshot);
2024 MDefinition* env = current->environmentChain();
2025 MConstant* templateCst = constant(ObjectValue(*snapshot->templateObj()));
2027 auto* ins = MNewClassBodyEnvironmentObject::New(alloc(), templateCst);
2028 current->add(ins);
2030 #ifdef DEBUG
2031 // Assert in debug mode we can elide the post write barrier.
2032 current->add(MAssertCanElidePostWriteBarrier::New(alloc(), ins, env));
2033 #endif
2035 // Initialize the object's reserved slots. No post barrier is needed here,
2036 // for the same reason as in buildNamedLambdaEnv.
2037 current->add(MStoreFixedSlot::NewUnbarriered(
2038 alloc(), ins, EnvironmentObject::enclosingEnvironmentSlot(), env));
2040 current->setEnvironmentChain(ins);
2041 return true;
2044 bool WarpBuilder::build_PopLexicalEnv(BytecodeLocation) {
2045 MDefinition* enclosingEnv = walkEnvironmentChain(1);
2046 if (!enclosingEnv) {
2047 return false;
2049 current->setEnvironmentChain(enclosingEnv);
2050 return true;
2053 bool WarpBuilder::build_FreshenLexicalEnv(BytecodeLocation loc) {
2054 MOZ_ASSERT(usesEnvironmentChain());
2056 const auto* snapshot = getOpSnapshot<WarpLexicalEnvironment>(loc);
2057 MOZ_ASSERT(snapshot);
2059 MDefinition* enclosingEnv = walkEnvironmentChain(1);
2060 if (!enclosingEnv) {
2061 return false;
2064 MDefinition* env = current->environmentChain();
2065 MConstant* templateCst = constant(ObjectValue(*snapshot->templateObj()));
2067 auto* templateObj = snapshot->templateObj();
2068 auto* scope = &templateObj->scope();
2069 MOZ_ASSERT(scope->hasEnvironment());
2071 auto* ins = MNewLexicalEnvironmentObject::New(alloc(), templateCst);
2072 current->add(ins);
2074 #ifdef DEBUG
2075 // Assert in debug mode we can elide the post write barrier.
2076 current->add(
2077 MAssertCanElidePostWriteBarrier::New(alloc(), ins, enclosingEnv));
2078 #endif
2080 // Initialize the object's reserved slots. No post barrier is needed here,
2081 // for the same reason as in buildNamedLambdaEnv.
2082 current->add(MStoreFixedSlot::NewUnbarriered(
2083 alloc(), ins, EnvironmentObject::enclosingEnvironmentSlot(),
2084 enclosingEnv));
2086 // Copy environment slots.
2087 MSlots* envSlots = nullptr;
2088 MSlots* slots = nullptr;
2089 for (BindingIter iter(scope); iter; iter++) {
2090 auto loc = iter.location();
2091 if (loc.kind() != BindingLocation::Kind::Environment) {
2092 MOZ_ASSERT(loc.kind() == BindingLocation::Kind::Frame);
2093 continue;
2096 if (!alloc().ensureBallast()) {
2097 return false;
2100 uint32_t slot = loc.slot();
2101 uint32_t numFixedSlots = templateObj->numFixedSlots();
2102 if (slot >= numFixedSlots) {
2103 if (!envSlots) {
2104 envSlots = MSlots::New(alloc(), env);
2105 current->add(envSlots);
2107 if (!slots) {
2108 slots = MSlots::New(alloc(), ins);
2109 current->add(slots);
2112 uint32_t dynamicSlot = slot - numFixedSlots;
2114 auto* load = MLoadDynamicSlot::New(alloc(), envSlots, dynamicSlot);
2115 current->add(load);
2117 #ifdef DEBUG
2118 // Assert in debug mode we can elide the post write barrier.
2119 current->add(MAssertCanElidePostWriteBarrier::New(alloc(), ins, load));
2120 #endif
2122 current->add(
2123 MStoreDynamicSlot::NewUnbarriered(alloc(), slots, dynamicSlot, load));
2124 } else {
2125 auto* load = MLoadFixedSlot::New(alloc(), env, slot);
2126 current->add(load);
2128 #ifdef DEBUG
2129 // Assert in debug mode we can elide the post write barrier.
2130 current->add(MAssertCanElidePostWriteBarrier::New(alloc(), ins, load));
2131 #endif
2133 current->add(MStoreFixedSlot::NewUnbarriered(alloc(), ins, slot, load));
2137 current->setEnvironmentChain(ins);
2138 return true;
2141 bool WarpBuilder::build_RecreateLexicalEnv(BytecodeLocation loc) {
2142 MOZ_ASSERT(usesEnvironmentChain());
2144 const auto* snapshot = getOpSnapshot<WarpLexicalEnvironment>(loc);
2145 MOZ_ASSERT(snapshot);
2147 MDefinition* enclosingEnv = walkEnvironmentChain(1);
2148 if (!enclosingEnv) {
2149 return false;
2152 MConstant* templateCst = constant(ObjectValue(*snapshot->templateObj()));
2154 auto* ins = MNewLexicalEnvironmentObject::New(alloc(), templateCst);
2155 current->add(ins);
2157 #ifdef DEBUG
2158 // Assert in debug mode we can elide the post write barrier.
2159 current->add(
2160 MAssertCanElidePostWriteBarrier::New(alloc(), ins, enclosingEnv));
2161 #endif
2163 // Initialize the object's reserved slots. No post barrier is needed here,
2164 // for the same reason as in buildNamedLambdaEnv.
2165 current->add(MStoreFixedSlot::NewUnbarriered(
2166 alloc(), ins, EnvironmentObject::enclosingEnvironmentSlot(),
2167 enclosingEnv));
2169 current->setEnvironmentChain(ins);
2170 return true;
2173 bool WarpBuilder::build_PushVarEnv(BytecodeLocation loc) {
2174 MOZ_ASSERT(usesEnvironmentChain());
2176 const auto* snapshot = getOpSnapshot<WarpVarEnvironment>(loc);
2177 MOZ_ASSERT(snapshot);
2179 MDefinition* env = current->environmentChain();
2180 MConstant* templateCst = constant(ObjectValue(*snapshot->templateObj()));
2182 auto* ins = MNewVarEnvironmentObject::New(alloc(), templateCst);
2183 current->add(ins);
2185 #ifdef DEBUG
2186 // Assert in debug mode we can elide the post write barrier.
2187 current->add(MAssertCanElidePostWriteBarrier::New(alloc(), ins, env));
2188 #endif
2190 // Initialize the object's reserved slots. No post barrier is needed here,
2191 // for the same reason as in buildNamedLambdaEnv.
2192 current->add(MStoreFixedSlot::NewUnbarriered(
2193 alloc(), ins, EnvironmentObject::enclosingEnvironmentSlot(), env));
2195 current->setEnvironmentChain(ins);
2196 return true;
2199 bool WarpBuilder::build_ImplicitThis(BytecodeLocation loc) {
2200 MOZ_ASSERT(usesEnvironmentChain());
2202 PropertyName* name = loc.getPropertyName(script_);
2203 MDefinition* env = current->environmentChain();
2205 auto* ins = MImplicitThis::New(alloc(), env, name);
2206 current->add(ins);
2207 current->push(ins);
2208 return resumeAfter(ins, loc);
2211 bool WarpBuilder::build_CheckClassHeritage(BytecodeLocation loc) {
2212 MDefinition* def = current->pop();
2213 auto* ins = MCheckClassHeritage::New(alloc(), def);
2214 current->add(ins);
2215 current->push(ins);
2216 return resumeAfter(ins, loc);
2219 bool WarpBuilder::build_CheckThis(BytecodeLocation loc) {
2220 MDefinition* def = current->pop();
2221 auto* ins = MCheckThis::New(alloc(), def);
2222 current->add(ins);
2223 current->push(ins);
2224 return resumeAfter(ins, loc);
2227 bool WarpBuilder::build_CheckThisReinit(BytecodeLocation loc) {
2228 MDefinition* def = current->pop();
2229 auto* ins = MCheckThisReinit::New(alloc(), def);
2230 current->add(ins);
2231 current->push(ins);
2232 return resumeAfter(ins, loc);
2235 bool WarpBuilder::build_Generator(BytecodeLocation loc) {
2236 MOZ_ASSERT(usesEnvironmentChain());
2238 MDefinition* callee = getCallee();
2239 MDefinition* environmentChain = current->environmentChain();
2240 MDefinition* argsObj = info().needsArgsObj() ? current->argumentsObject()
2241 : constant(Int32Value(0));
2243 MGenerator* generator =
2244 MGenerator::New(alloc(), callee, environmentChain, argsObj);
2246 current->add(generator);
2247 current->push(generator);
2248 return resumeAfter(generator, loc);
2251 bool WarpBuilder::build_AfterYield(BytecodeLocation loc) {
2252 // Unreachable blocks don't need to generate a bail.
2253 if (hasTerminatedBlock()) {
2254 return true;
2257 // This comes after a yield, which we generate as a return,
2258 // so we know this should be unreachable code.
2260 // We emit an unreachable bail for this, which will assert if we
2261 // ever execute this.
2263 // An Unreachable bail, instead of MUnreachable, because MUnreachable
2264 // is a control instruction, and injecting it in the middle of a block
2265 // causes various graph state assertions to fail.
2266 MBail* bail = MBail::New(alloc(), BailoutKind::Unreachable);
2267 current->add(bail);
2269 return true;
2272 bool WarpBuilder::build_FinalYieldRval(BytecodeLocation loc) {
2273 MDefinition* gen = current->pop();
2275 auto setSlotNull = [this, gen](size_t slot) {
2276 auto* ins = MStoreFixedSlot::NewBarriered(alloc(), gen, slot,
2277 constant(NullValue()));
2278 current->add(ins);
2281 // Close the generator
2282 setSlotNull(AbstractGeneratorObject::calleeSlot());
2283 setSlotNull(AbstractGeneratorObject::envChainSlot());
2284 setSlotNull(AbstractGeneratorObject::argsObjectSlot());
2285 setSlotNull(AbstractGeneratorObject::stackStorageSlot());
2286 setSlotNull(AbstractGeneratorObject::resumeIndexSlot());
2288 // Return
2289 return build_RetRval(loc);
2292 bool WarpBuilder::build_AsyncResolve(BytecodeLocation loc) {
2293 MDefinition* generator = current->pop();
2294 MDefinition* value = current->pop();
2296 auto* resolve = MAsyncResolve::New(alloc(), generator, value);
2297 current->add(resolve);
2298 current->push(resolve);
2299 return resumeAfter(resolve, loc);
2302 bool WarpBuilder::build_AsyncReject(BytecodeLocation loc) {
2303 MDefinition* generator = current->pop();
2304 MDefinition* stack = current->pop();
2305 MDefinition* reason = current->pop();
2307 auto* reject = MAsyncReject::New(alloc(), generator, reason, stack);
2308 current->add(reject);
2309 current->push(reject);
2310 return resumeAfter(reject, loc);
2313 bool WarpBuilder::build_ResumeKind(BytecodeLocation loc) {
2314 GeneratorResumeKind resumeKind = loc.resumeKind();
2316 current->push(constant(Int32Value(static_cast<int32_t>(resumeKind))));
2317 return true;
2320 bool WarpBuilder::build_CheckResumeKind(BytecodeLocation loc) {
2321 // Outside of `yield*`, this is normally unreachable code in Warp,
2322 // so we just manipulate the stack appropriately to ensure correct
2323 // MIR generation.
2325 // However, `yield*` emits a forced generator return which can be
2326 // warp compiled, so in order to correctly handle these semantics
2327 // we also generate a bailout, so that the forced generator return
2328 // runs in baseline.
2329 MDefinition* resumeKind = current->pop();
2330 MDefinition* gen = current->pop();
2331 MDefinition* rval = current->peek(-1);
2333 // Mark operands as implicitly used.
2334 resumeKind->setImplicitlyUsedUnchecked();
2335 gen->setImplicitlyUsedUnchecked();
2336 rval->setImplicitlyUsedUnchecked();
2338 // Bail out if we encounter CheckResumeKind.
2339 MBail* bail = MBail::New(alloc(), BailoutKind::Inevitable);
2340 current->add(bail);
2341 current->setAlwaysBails();
2343 return true;
2346 bool WarpBuilder::build_CanSkipAwait(BytecodeLocation loc) {
2347 MDefinition* val = current->pop();
2349 MCanSkipAwait* canSkip = MCanSkipAwait::New(alloc(), val);
2350 current->add(canSkip);
2352 current->push(val);
2353 current->push(canSkip);
2355 return resumeAfter(canSkip, loc);
2358 bool WarpBuilder::build_MaybeExtractAwaitValue(BytecodeLocation loc) {
2359 MDefinition* canSkip = current->pop();
2360 MDefinition* value = current->pop();
2362 MMaybeExtractAwaitValue* extracted =
2363 MMaybeExtractAwaitValue::New(alloc(), value, canSkip);
2364 current->add(extracted);
2366 current->push(extracted);
2367 current->push(canSkip);
2369 return resumeAfter(extracted, loc);
2372 bool WarpBuilder::build_InitialYield(BytecodeLocation loc) {
2373 MDefinition* gen = current->pop();
2374 return buildSuspend(loc, gen, gen);
2377 bool WarpBuilder::build_Await(BytecodeLocation loc) {
2378 MDefinition* gen = current->pop();
2379 MDefinition* promiseOrGenerator = current->pop();
2381 return buildSuspend(loc, gen, promiseOrGenerator);
2383 bool WarpBuilder::build_Yield(BytecodeLocation loc) { return build_Await(loc); }
2385 bool WarpBuilder::buildSuspend(BytecodeLocation loc, MDefinition* gen,
2386 MDefinition* retVal) {
2387 // If required, unbox the generator object explicitly and infallibly.
2389 // This is done to avoid fuzz-bugs where ApplyTypeInformation does the
2390 // unboxing, and generates fallible unboxes which can lead to torn object
2391 // state due to `bailAfter`.
2392 MDefinition* genObj = gen;
2393 if (genObj->type() != MIRType::Object) {
2394 auto* unbox =
2395 MUnbox::New(alloc(), gen, MIRType::Object, MUnbox::Mode::Infallible);
2396 current->add(unbox);
2398 genObj = unbox;
2401 int32_t slotsToCopy = current->stackDepth() - info().firstLocalSlot();
2402 MOZ_ASSERT(slotsToCopy >= 0);
2403 if (slotsToCopy > 0) {
2404 auto* arrayObj = MLoadFixedSlotAndUnbox::New(
2405 alloc(), genObj, AbstractGeneratorObject::stackStorageSlot(),
2406 MUnbox::Mode::Infallible, MIRType::Object);
2407 current->add(arrayObj);
2409 auto* stackStorage = MElements::New(alloc(), arrayObj);
2410 current->add(stackStorage);
2412 for (int32_t i = 0; i < slotsToCopy; i++) {
2413 if (!alloc().ensureBallast()) {
2414 return false;
2416 // Use peekUnchecked because we're also writing out the argument slots
2417 int32_t peek = -slotsToCopy + i;
2418 MDefinition* stackElem = current->peekUnchecked(peek);
2419 auto* store = MStoreElement::NewUnbarriered(
2420 alloc(), stackStorage, constant(Int32Value(i)), stackElem,
2421 /* needsHoleCheck = */ false);
2423 current->add(store);
2424 current->add(MPostWriteBarrier::New(alloc(), arrayObj, stackElem));
2427 auto* len = constant(Int32Value(slotsToCopy - 1));
2429 auto* setInitLength =
2430 MSetInitializedLength::New(alloc(), stackStorage, len);
2431 current->add(setInitLength);
2433 auto* setLength = MSetArrayLength::New(alloc(), stackStorage, len);
2434 current->add(setLength);
2437 // Update Generator Object state
2438 uint32_t resumeIndex = loc.getResumeIndex();
2440 // This store is unbarriered, as it's only ever storing an integer, and as
2441 // such doesn't partake of object tracing.
2442 current->add(MStoreFixedSlot::NewUnbarriered(
2443 alloc(), genObj, AbstractGeneratorObject::resumeIndexSlot(),
2444 constant(Int32Value(resumeIndex))));
2446 // This store is barriered because it stores an object value.
2447 current->add(MStoreFixedSlot::NewBarriered(
2448 alloc(), genObj, AbstractGeneratorObject::envChainSlot(),
2449 current->environmentChain()));
2451 current->add(
2452 MPostWriteBarrier::New(alloc(), genObj, current->environmentChain()));
2454 // GeneratorReturn will return from the method, however to support MIR
2455 // generation isn't treated like the end of a block
2456 MGeneratorReturn* ret = MGeneratorReturn::New(alloc(), retVal);
2457 current->add(ret);
2459 // To ensure the rest of the MIR generation looks correct, fill the stack with
2460 // the appropriately typed MUnreachable's for the stack pushes from this
2461 // opcode.
2462 auto* unreachableResumeKind =
2463 MUnreachableResult::New(alloc(), MIRType::Int32);
2464 current->add(unreachableResumeKind);
2465 current->push(unreachableResumeKind);
2467 auto* unreachableGenerator =
2468 MUnreachableResult::New(alloc(), MIRType::Object);
2469 current->add(unreachableGenerator);
2470 current->push(unreachableGenerator);
2472 auto* unreachableRval = MUnreachableResult::New(alloc(), MIRType::Value);
2473 current->add(unreachableRval);
2474 current->push(unreachableRval);
2476 return true;
2479 bool WarpBuilder::build_AsyncAwait(BytecodeLocation loc) {
2480 MDefinition* gen = current->pop();
2481 MDefinition* value = current->pop();
2483 MAsyncAwait* asyncAwait = MAsyncAwait::New(alloc(), value, gen);
2484 current->add(asyncAwait);
2485 current->push(asyncAwait);
2486 return resumeAfter(asyncAwait, loc);
2489 bool WarpBuilder::build_CheckReturn(BytecodeLocation loc) {
2490 MOZ_ASSERT(!script_->noScriptRval());
2492 MDefinition* returnValue = current->getSlot(info().returnValueSlot());
2493 MDefinition* thisValue = current->pop();
2495 auto* ins = MCheckReturn::New(alloc(), returnValue, thisValue);
2496 current->add(ins);
2497 current->push(ins);
2498 return resumeAfter(ins, loc);
2501 void WarpBuilder::buildCheckLexicalOp(BytecodeLocation loc) {
2502 JSOp op = loc.getOp();
2503 MOZ_ASSERT(op == JSOp::CheckLexical || op == JSOp::CheckAliasedLexical);
2505 MDefinition* input = current->pop();
2506 MInstruction* lexicalCheck = MLexicalCheck::New(alloc(), input);
2507 current->add(lexicalCheck);
2508 current->push(lexicalCheck);
2510 if (snapshot().bailoutInfo().failedLexicalCheck()) {
2511 // If we have previously had a failed lexical check in Ion, we want to avoid
2512 // hoisting any lexical checks, which can cause spurious failures. In this
2513 // case, we also have to be careful not to hoist any loads of this lexical
2514 // past the check. For unaliased lexical variables, we can set the local
2515 // slot to create a dependency (see below). For aliased lexicals, that
2516 // doesn't work, so we disable LICM instead.
2517 lexicalCheck->setNotMovable();
2518 if (op == JSOp::CheckAliasedLexical) {
2519 mirGen().disableLICM();
2523 if (op == JSOp::CheckLexical) {
2524 // Set the local slot so that a subsequent GetLocal without a CheckLexical
2525 // (the frontend can elide lexical checks) doesn't let a definition with
2526 // MIRType::MagicUninitializedLexical escape to arbitrary MIR instructions.
2527 // Note that in this case the GetLocal would be unreachable because we throw
2528 // an exception here, but we still generate MIR instructions for it.
2529 uint32_t slot = info().localSlot(loc.local());
2530 current->setSlot(slot, lexicalCheck);
2534 bool WarpBuilder::build_CheckLexical(BytecodeLocation loc) {
2535 buildCheckLexicalOp(loc);
2536 return true;
2539 bool WarpBuilder::build_CheckAliasedLexical(BytecodeLocation loc) {
2540 buildCheckLexicalOp(loc);
2541 return true;
2544 bool WarpBuilder::build_InitHomeObject(BytecodeLocation loc) {
2545 MDefinition* homeObject = current->pop();
2546 MDefinition* function = current->pop();
2548 current->add(MPostWriteBarrier::New(alloc(), function, homeObject));
2550 auto* ins = MInitHomeObject::New(alloc(), function, homeObject);
2551 current->add(ins);
2552 current->push(ins);
2553 return true;
2556 bool WarpBuilder::build_SuperBase(BytecodeLocation) {
2557 MDefinition* callee = current->pop();
2559 auto* homeObject = MHomeObject::New(alloc(), callee);
2560 current->add(homeObject);
2562 auto* superBase = MHomeObjectSuperBase::New(alloc(), homeObject);
2563 current->add(superBase);
2564 current->push(superBase);
2565 return true;
2568 bool WarpBuilder::build_SuperFun(BytecodeLocation) {
2569 MDefinition* callee = current->pop();
2570 auto* ins = MSuperFunction::New(alloc(), callee);
2571 current->add(ins);
2572 current->push(ins);
2573 return true;
2576 bool WarpBuilder::build_BuiltinObject(BytecodeLocation loc) {
2577 if (auto* snapshot = getOpSnapshot<WarpBuiltinObject>(loc)) {
2578 JSObject* builtin = snapshot->builtin();
2579 pushConstant(ObjectValue(*builtin));
2580 return true;
2583 auto kind = loc.getBuiltinObjectKind();
2584 auto* ins = MBuiltinObject::New(alloc(), kind);
2585 current->add(ins);
2586 current->push(ins);
2587 return resumeAfter(ins, loc);
2590 bool WarpBuilder::build_GetIntrinsic(BytecodeLocation loc) {
2591 if (auto* snapshot = getOpSnapshot<WarpGetIntrinsic>(loc)) {
2592 Value intrinsic = snapshot->intrinsic();
2593 pushConstant(intrinsic);
2594 return true;
2597 PropertyName* name = loc.getPropertyName(script_);
2598 MCallGetIntrinsicValue* ins = MCallGetIntrinsicValue::New(alloc(), name);
2599 current->add(ins);
2600 current->push(ins);
2601 return resumeAfter(ins, loc);
2604 bool WarpBuilder::build_ImportMeta(BytecodeLocation loc) {
2605 ModuleObject* moduleObj = scriptSnapshot()->moduleObject();
2606 MOZ_ASSERT(moduleObj);
2608 MModuleMetadata* ins = MModuleMetadata::New(alloc(), moduleObj);
2609 current->add(ins);
2610 current->push(ins);
2611 return resumeAfter(ins, loc);
2614 bool WarpBuilder::build_CallSiteObj(BytecodeLocation loc) {
2615 return build_Object(loc);
2618 bool WarpBuilder::build_NewArray(BytecodeLocation loc) {
2619 return buildIC(loc, CacheKind::NewArray, {});
2622 bool WarpBuilder::build_NewObject(BytecodeLocation loc) {
2623 return buildIC(loc, CacheKind::NewObject, {});
2626 bool WarpBuilder::build_NewInit(BytecodeLocation loc) {
2627 return build_NewObject(loc);
2630 bool WarpBuilder::build_Object(BytecodeLocation loc) {
2631 JSObject* obj = loc.getObject(script_);
2632 MConstant* objConst = constant(ObjectValue(*obj));
2634 current->push(objConst);
2635 return true;
2638 bool WarpBuilder::buildInitPropGetterSetterOp(BytecodeLocation loc) {
2639 PropertyName* name = loc.getPropertyName(script_);
2640 MDefinition* value = current->pop();
2641 MDefinition* obj = current->peek(-1);
2643 auto* ins = MInitPropGetterSetter::New(alloc(), obj, value, name);
2644 current->add(ins);
2645 return resumeAfter(ins, loc);
2648 bool WarpBuilder::build_InitPropGetter(BytecodeLocation loc) {
2649 return buildInitPropGetterSetterOp(loc);
2652 bool WarpBuilder::build_InitPropSetter(BytecodeLocation loc) {
2653 return buildInitPropGetterSetterOp(loc);
2656 bool WarpBuilder::build_InitHiddenPropGetter(BytecodeLocation loc) {
2657 return buildInitPropGetterSetterOp(loc);
2660 bool WarpBuilder::build_InitHiddenPropSetter(BytecodeLocation loc) {
2661 return buildInitPropGetterSetterOp(loc);
2664 bool WarpBuilder::buildInitElemGetterSetterOp(BytecodeLocation loc) {
2665 MDefinition* value = current->pop();
2666 MDefinition* id = current->pop();
2667 MDefinition* obj = current->peek(-1);
2669 auto* ins = MInitElemGetterSetter::New(alloc(), obj, id, value);
2670 current->add(ins);
2671 return resumeAfter(ins, loc);
2674 bool WarpBuilder::build_InitElemGetter(BytecodeLocation loc) {
2675 return buildInitElemGetterSetterOp(loc);
2678 bool WarpBuilder::build_InitElemSetter(BytecodeLocation loc) {
2679 return buildInitElemGetterSetterOp(loc);
2682 bool WarpBuilder::build_InitHiddenElemGetter(BytecodeLocation loc) {
2683 return buildInitElemGetterSetterOp(loc);
2686 bool WarpBuilder::build_InitHiddenElemSetter(BytecodeLocation loc) {
2687 return buildInitElemGetterSetterOp(loc);
2690 bool WarpBuilder::build_In(BytecodeLocation loc) {
2691 MDefinition* obj = current->pop();
2692 MDefinition* id = current->pop();
2693 return buildIC(loc, CacheKind::In, {id, obj});
2696 bool WarpBuilder::build_HasOwn(BytecodeLocation loc) {
2697 MDefinition* obj = current->pop();
2698 MDefinition* id = current->pop();
2699 return buildIC(loc, CacheKind::HasOwn, {id, obj});
2702 bool WarpBuilder::build_CheckPrivateField(BytecodeLocation loc) {
2703 MDefinition* id = current->peek(-1);
2704 MDefinition* obj = current->peek(-2);
2705 return buildIC(loc, CacheKind::CheckPrivateField, {obj, id});
2708 bool WarpBuilder::build_NewPrivateName(BytecodeLocation loc) {
2709 JSAtom* name = loc.getAtom(script_);
2711 auto* ins = MNewPrivateName::New(alloc(), name);
2712 current->add(ins);
2713 current->push(ins);
2714 return resumeAfter(ins, loc);
2717 bool WarpBuilder::build_Instanceof(BytecodeLocation loc) {
2718 MDefinition* rhs = current->pop();
2719 MDefinition* obj = current->pop();
2720 return buildIC(loc, CacheKind::InstanceOf, {obj, rhs});
2723 bool WarpBuilder::build_NewTarget(BytecodeLocation loc) {
2724 MOZ_ASSERT(script_->isFunction());
2725 MOZ_ASSERT(info().hasFunMaybeLazy());
2726 MOZ_ASSERT(!scriptSnapshot()->isArrowFunction());
2728 if (inlineCallInfo()) {
2729 if (inlineCallInfo()->constructing()) {
2730 current->push(inlineCallInfo()->getNewTarget());
2731 } else {
2732 pushConstant(UndefinedValue());
2734 return true;
2737 MNewTarget* ins = MNewTarget::New(alloc());
2738 current->add(ins);
2739 current->push(ins);
2740 return true;
2743 bool WarpBuilder::build_CheckIsObj(BytecodeLocation loc) {
2744 CheckIsObjectKind kind = loc.getCheckIsObjectKind();
2746 MDefinition* toCheck = current->peek(-1);
2747 if (toCheck->type() == MIRType::Object) {
2748 toCheck->setImplicitlyUsedUnchecked();
2749 return true;
2752 MDefinition* val = current->pop();
2753 MCheckIsObj* ins = MCheckIsObj::New(alloc(), val, uint8_t(kind));
2754 current->add(ins);
2755 current->push(ins);
2756 return resumeAfter(ins, loc);
2759 bool WarpBuilder::build_CheckObjCoercible(BytecodeLocation loc) {
2760 MDefinition* val = current->pop();
2761 MCheckObjCoercible* ins = MCheckObjCoercible::New(alloc(), val);
2762 current->add(ins);
2763 current->push(ins);
2764 return resumeAfter(ins, loc);
2767 MInstruction* WarpBuilder::buildLoadSlot(MDefinition* obj,
2768 uint32_t numFixedSlots,
2769 uint32_t slot) {
2770 if (slot < numFixedSlots) {
2771 MLoadFixedSlot* load = MLoadFixedSlot::New(alloc(), obj, slot);
2772 current->add(load);
2773 return load;
2776 MSlots* slots = MSlots::New(alloc(), obj);
2777 current->add(slots);
2779 MLoadDynamicSlot* load =
2780 MLoadDynamicSlot::New(alloc(), slots, slot - numFixedSlots);
2781 current->add(load);
2782 return load;
2785 bool WarpBuilder::build_GetImport(BytecodeLocation loc) {
2786 auto* snapshot = getOpSnapshot<WarpGetImport>(loc);
2788 ModuleEnvironmentObject* targetEnv = snapshot->targetEnv();
2790 // Load the target environment slot.
2791 MConstant* obj = constant(ObjectValue(*targetEnv));
2792 auto* load = buildLoadSlot(obj, snapshot->numFixedSlots(), snapshot->slot());
2794 if (snapshot->needsLexicalCheck()) {
2795 // TODO: IonBuilder has code to mark non-movable. See buildCheckLexicalOp.
2796 MInstruction* lexicalCheck = MLexicalCheck::New(alloc(), load);
2797 current->add(lexicalCheck);
2798 current->push(lexicalCheck);
2799 } else {
2800 current->push(load);
2803 return true;
2806 bool WarpBuilder::build_GetPropSuper(BytecodeLocation loc) {
2807 MDefinition* obj = current->pop();
2808 MDefinition* receiver = current->pop();
2809 return buildIC(loc, CacheKind::GetPropSuper, {obj, receiver});
2812 bool WarpBuilder::build_GetElemSuper(BytecodeLocation loc) {
2813 MDefinition* obj = current->pop();
2814 MDefinition* id = current->pop();
2815 MDefinition* receiver = current->pop();
2816 return buildIC(loc, CacheKind::GetElemSuper, {obj, id, receiver});
2819 bool WarpBuilder::build_InitProp(BytecodeLocation loc) {
2820 MDefinition* val = current->pop();
2821 MDefinition* obj = current->peek(-1);
2822 return buildIC(loc, CacheKind::SetProp, {obj, val});
2825 bool WarpBuilder::build_InitLockedProp(BytecodeLocation loc) {
2826 return build_InitProp(loc);
2829 bool WarpBuilder::build_InitHiddenProp(BytecodeLocation loc) {
2830 return build_InitProp(loc);
2833 bool WarpBuilder::build_InitElem(BytecodeLocation loc) {
2834 MDefinition* val = current->pop();
2835 MDefinition* id = current->pop();
2836 MDefinition* obj = current->peek(-1);
2837 return buildIC(loc, CacheKind::SetElem, {obj, id, val});
2840 bool WarpBuilder::build_InitLockedElem(BytecodeLocation loc) {
2841 return build_InitElem(loc);
2844 bool WarpBuilder::build_InitHiddenElem(BytecodeLocation loc) {
2845 return build_InitElem(loc);
2848 bool WarpBuilder::build_InitElemArray(BytecodeLocation loc) {
2849 MDefinition* val = current->pop();
2850 MDefinition* obj = current->peek(-1);
2852 // Note: getInitElemArrayIndex asserts the index fits in int32_t.
2853 uint32_t index = loc.getInitElemArrayIndex();
2854 MConstant* indexConst = constant(Int32Value(index));
2856 // Note: InitArrayElemOperation asserts the index does not exceed the array's
2857 // dense element capacity.
2859 auto* elements = MElements::New(alloc(), obj);
2860 current->add(elements);
2862 if (val->type() == MIRType::MagicHole) {
2863 val->setImplicitlyUsedUnchecked();
2864 auto* store = MStoreHoleValueElement::New(alloc(), elements, indexConst);
2865 current->add(store);
2866 } else {
2867 current->add(MPostWriteBarrier::New(alloc(), obj, val));
2868 auto* store =
2869 MStoreElement::NewUnbarriered(alloc(), elements, indexConst, val,
2870 /* needsHoleCheck = */ false);
2871 current->add(store);
2874 auto* setLength = MSetInitializedLength::New(alloc(), elements, indexConst);
2875 current->add(setLength);
2877 return resumeAfter(setLength, loc);
2880 bool WarpBuilder::build_InitElemInc(BytecodeLocation loc) {
2881 MDefinition* val = current->pop();
2882 MDefinition* index = current->pop();
2883 MDefinition* obj = current->peek(-1);
2885 // Push index + 1.
2886 MConstant* constOne = constant(Int32Value(1));
2887 MAdd* nextIndex = MAdd::New(alloc(), index, constOne, TruncateKind::Truncate);
2888 current->add(nextIndex);
2889 current->push(nextIndex);
2891 return buildIC(loc, CacheKind::SetElem, {obj, index, val});
2894 bool WarpBuilder::build_Lambda(BytecodeLocation loc) {
2895 MOZ_ASSERT(usesEnvironmentChain());
2897 MDefinition* env = current->environmentChain();
2899 JSFunction* fun = loc.getFunction(script_);
2900 MConstant* funConst = constant(ObjectValue(*fun));
2902 auto* ins = MLambda::New(alloc(), env, funConst);
2903 current->add(ins);
2904 current->push(ins);
2905 return resumeAfter(ins, loc);
2908 bool WarpBuilder::build_FunWithProto(BytecodeLocation loc) {
2909 MOZ_ASSERT(usesEnvironmentChain());
2911 MDefinition* proto = current->pop();
2912 MDefinition* env = current->environmentChain();
2914 JSFunction* fun = loc.getFunction(script_);
2915 MConstant* funConst = constant(ObjectValue(*fun));
2917 auto* ins = MFunctionWithProto::New(alloc(), env, proto, funConst);
2918 current->add(ins);
2919 current->push(ins);
2920 return resumeAfter(ins, loc);
2923 bool WarpBuilder::build_SpreadCall(BytecodeLocation loc) {
2924 bool constructing = false;
2925 CallInfo callInfo(alloc(), constructing, loc.resultIsPopped());
2926 callInfo.initForSpreadCall(current);
2928 // The argument must be an array object. Add an infallible MUnbox if needed,
2929 // but ensure it's not loop hoisted before the branch in the bytecode guarding
2930 // that it's not undefined.
2931 MOZ_ASSERT(callInfo.argc() == 1);
2932 callInfo.setArg(0, unboxObjectInfallible(callInfo.getArg(0), IsMovable::No));
2934 if (auto* cacheIRSnapshot = getOpSnapshot<WarpCacheIR>(loc)) {
2935 return transpileCall(loc, cacheIRSnapshot, &callInfo);
2938 bool needsThisCheck = false;
2939 MInstruction* call = makeSpreadCall(callInfo, needsThisCheck);
2940 if (!call) {
2941 return false;
2943 call->setBailoutKind(BailoutKind::TooManyArguments);
2944 current->add(call);
2945 current->push(call);
2946 return resumeAfter(call, loc);
2949 bool WarpBuilder::build_SpreadNew(BytecodeLocation loc) {
2950 bool constructing = true;
2951 CallInfo callInfo(alloc(), constructing, loc.resultIsPopped());
2952 callInfo.initForSpreadCall(current);
2954 // See build_SpreadCall.
2955 MOZ_ASSERT(callInfo.argc() == 1);
2956 callInfo.setArg(0, unboxObjectInfallible(callInfo.getArg(0), IsMovable::No));
2958 if (auto* cacheIRSnapshot = getOpSnapshot<WarpCacheIR>(loc)) {
2959 return transpileCall(loc, cacheIRSnapshot, &callInfo);
2962 buildCreateThis(callInfo);
2964 bool needsThisCheck = true;
2965 MInstruction* call = makeSpreadCall(callInfo, needsThisCheck);
2966 if (!call) {
2967 return false;
2969 call->setBailoutKind(BailoutKind::TooManyArguments);
2970 current->add(call);
2971 current->push(call);
2972 return resumeAfter(call, loc);
2975 bool WarpBuilder::build_SpreadSuperCall(BytecodeLocation loc) {
2976 return build_SpreadNew(loc);
2979 bool WarpBuilder::build_OptimizeSpreadCall(BytecodeLocation loc) {
2980 MDefinition* value = current->pop();
2981 return buildIC(loc, CacheKind::OptimizeSpreadCall, {value});
2984 bool WarpBuilder::build_Debugger(BytecodeLocation loc) {
2985 // The |debugger;| statement will bail out to Baseline if the realm is a
2986 // debuggee realm with an onDebuggerStatement hook.
2987 MDebugger* debugger = MDebugger::New(alloc());
2988 current->add(debugger);
2989 return resumeAfter(debugger, loc);
2992 bool WarpBuilder::build_TableSwitch(BytecodeLocation loc) {
2993 int32_t low = loc.getTableSwitchLow();
2994 int32_t high = loc.getTableSwitchHigh();
2995 size_t numCases = high - low + 1;
2997 MDefinition* input = current->pop();
2998 MTableSwitch* tableswitch = MTableSwitch::New(alloc(), input, low, high);
2999 current->end(tableswitch);
3001 // Table mapping from target bytecode offset to MTableSwitch successor index.
3002 // This prevents adding multiple predecessor/successor edges to the same
3003 // target block, which isn't valid in MIR.
3004 using TargetToSuccessorMap =
3005 InlineMap<uint32_t, uint32_t, 8, DefaultHasher<uint32_t>,
3006 SystemAllocPolicy>;
3007 TargetToSuccessorMap targetToSuccessor;
3009 // Create |default| edge.
3011 BytecodeLocation defaultLoc = loc.getTableSwitchDefaultTarget();
3012 uint32_t defaultOffset = defaultLoc.bytecodeToOffset(script_);
3014 size_t index;
3015 if (!tableswitch->addDefault(nullptr, &index)) {
3016 return false;
3018 if (!addPendingEdge(defaultLoc, current, index)) {
3019 return false;
3021 if (!targetToSuccessor.put(defaultOffset, index)) {
3022 return false;
3026 // Add all cases.
3027 for (size_t i = 0; i < numCases; i++) {
3028 BytecodeLocation caseLoc = loc.getTableSwitchCaseTarget(script_, i);
3029 uint32_t caseOffset = caseLoc.bytecodeToOffset(script_);
3031 size_t index;
3032 if (auto p = targetToSuccessor.lookupForAdd(caseOffset)) {
3033 index = p->value();
3034 } else {
3035 if (!tableswitch->addSuccessor(nullptr, &index)) {
3036 return false;
3038 if (!addPendingEdge(caseLoc, current, index)) {
3039 return false;
3041 if (!targetToSuccessor.add(p, caseOffset, index)) {
3042 return false;
3045 if (!tableswitch->addCase(index)) {
3046 return false;
3050 setTerminatedBlock();
3051 return true;
3054 bool WarpBuilder::build_Rest(BytecodeLocation loc) {
3055 auto* snapshot = getOpSnapshot<WarpRest>(loc);
3056 Shape* shape = snapshot ? snapshot->shape() : nullptr;
3058 // NOTE: Keep this code in sync with |ArgumentsReplacer|.
3060 if (inlineCallInfo()) {
3061 // If we are inlining, we know the actual arguments.
3062 unsigned numActuals = inlineCallInfo()->argc();
3063 unsigned numFormals = info().nargs() - 1;
3064 unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
3066 // TODO: support pre-tenuring.
3067 gc::Heap heap = gc::Heap::Default;
3069 // Allocate an array of the correct size.
3070 MInstruction* newArray;
3071 if (shape && gc::CanUseFixedElementsForArray(numRest)) {
3072 auto* shapeConstant = MConstant::NewShape(alloc(), shape);
3073 current->add(shapeConstant);
3074 newArray = MNewArrayObject::New(alloc(), shapeConstant, numRest, heap);
3075 } else {
3076 MConstant* templateConst = constant(NullValue());
3077 newArray = MNewArray::NewVM(alloc(), numRest, templateConst, heap);
3079 current->add(newArray);
3080 current->push(newArray);
3082 if (numRest == 0) {
3083 // No more updating to do.
3084 return true;
3087 MElements* elements = MElements::New(alloc(), newArray);
3088 current->add(elements);
3090 // Unroll the argument copy loop. We don't need to do any bounds or hole
3091 // checking here.
3092 MConstant* index = nullptr;
3093 for (uint32_t i = numFormals; i < numActuals; i++) {
3094 if (!alloc().ensureBallast()) {
3095 return false;
3098 index = MConstant::New(alloc(), Int32Value(i - numFormals));
3099 current->add(index);
3101 MDefinition* arg = inlineCallInfo()->argv()[i];
3102 MStoreElement* store =
3103 MStoreElement::NewUnbarriered(alloc(), elements, index, arg,
3104 /* needsHoleCheck = */ false);
3105 current->add(store);
3106 current->add(MPostWriteBarrier::New(alloc(), newArray, arg));
3109 // Update the initialized length for all the (necessarily non-hole)
3110 // elements added.
3111 MSetInitializedLength* initLength =
3112 MSetInitializedLength::New(alloc(), elements, index);
3113 current->add(initLength);
3115 return true;
3118 MArgumentsLength* numActuals = MArgumentsLength::New(alloc());
3119 current->add(numActuals);
3121 // Pass in the number of actual arguments, the number of formals (not
3122 // including the rest parameter slot itself), and the shape.
3123 unsigned numFormals = info().nargs() - 1;
3124 MRest* rest = MRest::New(alloc(), numActuals, numFormals, shape);
3125 current->add(rest);
3126 current->push(rest);
3127 return true;
3130 bool WarpBuilder::build_Try(BytecodeLocation loc) {
3131 graph().setHasTryBlock();
3133 MBasicBlock* pred = current;
3134 if (!startNewBlock(pred, loc.next())) {
3135 return false;
3138 pred->end(MGoto::New(alloc(), current));
3139 return true;
3142 bool WarpBuilder::build_Finally(BytecodeLocation loc) {
3143 MOZ_ASSERT(graph().hasTryBlock());
3144 return true;
3147 bool WarpBuilder::build_Exception(BytecodeLocation) {
3148 MOZ_CRASH("Unreachable because we skip catch-blocks");
3151 bool WarpBuilder::build_ExceptionAndStack(BytecodeLocation) {
3152 MOZ_CRASH("Unreachable because we skip catch-blocks");
3155 bool WarpBuilder::build_Throw(BytecodeLocation loc) {
3156 MDefinition* def = current->pop();
3158 MThrow* ins = MThrow::New(alloc(), def);
3159 current->add(ins);
3160 if (!resumeAfter(ins, loc)) {
3161 return false;
3164 // Terminate the block.
3165 current->end(MUnreachable::New(alloc()));
3166 setTerminatedBlock();
3167 return true;
3170 bool WarpBuilder::build_ThrowWithStack(BytecodeLocation loc) {
3171 MDefinition* stack = current->pop();
3172 MDefinition* value = current->pop();
3174 auto* ins = MThrowWithStack::New(alloc(), value, stack);
3175 current->add(ins);
3176 if (!resumeAfter(ins, loc)) {
3177 return false;
3180 // Terminate the block.
3181 current->end(MUnreachable::New(alloc()));
3182 setTerminatedBlock();
3183 return true;
3186 bool WarpBuilder::build_ThrowSetConst(BytecodeLocation loc) {
3187 auto* ins = MThrowRuntimeLexicalError::New(alloc(), JSMSG_BAD_CONST_ASSIGN);
3188 current->add(ins);
3189 if (!resumeAfter(ins, loc)) {
3190 return false;
3193 // Terminate the block.
3194 current->end(MUnreachable::New(alloc()));
3195 setTerminatedBlock();
3196 return true;
3199 bool WarpBuilder::build_ThrowMsg(BytecodeLocation loc) {
3200 auto* ins = MThrowMsg::New(alloc(), loc.throwMsgKind());
3201 current->add(ins);
3202 if (!resumeAfter(ins, loc)) {
3203 return false;
3206 // Terminate the block.
3207 current->end(MUnreachable::New(alloc()));
3208 setTerminatedBlock();
3209 return true;
3212 bool WarpBuilder::buildIC(BytecodeLocation loc, CacheKind kind,
3213 std::initializer_list<MDefinition*> inputs) {
3214 MOZ_ASSERT(loc.opHasIC());
3216 mozilla::DebugOnly<size_t> numInputs = inputs.size();
3217 MOZ_ASSERT(numInputs == NumInputsForCacheKind(kind));
3219 if (auto* cacheIRSnapshot = getOpSnapshot<WarpCacheIR>(loc)) {
3220 return TranspileCacheIRToMIR(this, loc, cacheIRSnapshot, inputs);
3223 if (getOpSnapshot<WarpBailout>(loc)) {
3224 for (MDefinition* input : inputs) {
3225 input->setImplicitlyUsedUnchecked();
3227 return buildBailoutForColdIC(loc, kind);
3230 if (const auto* inliningSnapshot = getOpSnapshot<WarpInlinedCall>(loc)) {
3231 // The CallInfo will be initialized by the transpiler.
3232 bool ignoresRval = BytecodeIsPopped(loc.toRawBytecode());
3233 CallInfo callInfo(alloc(), /*constructing =*/false, ignoresRval);
3234 callInfo.markAsInlined();
3236 if (!TranspileCacheIRToMIR(this, loc, inliningSnapshot->cacheIRSnapshot(),
3237 inputs, &callInfo)) {
3238 return false;
3240 return buildInlinedCall(loc, inliningSnapshot, callInfo);
3243 // Work around std::initializer_list not defining operator[].
3244 auto getInput = [&](size_t index) -> MDefinition* {
3245 MOZ_ASSERT(index < numInputs);
3246 return inputs.begin()[index];
3249 switch (kind) {
3250 case CacheKind::UnaryArith: {
3251 MOZ_ASSERT(numInputs == 1);
3252 auto* ins = MUnaryCache::New(alloc(), getInput(0));
3253 current->add(ins);
3254 current->push(ins);
3255 return resumeAfter(ins, loc);
3257 case CacheKind::ToPropertyKey: {
3258 MOZ_ASSERT(numInputs == 1);
3259 auto* ins = MToPropertyKeyCache::New(alloc(), getInput(0));
3260 current->add(ins);
3261 current->push(ins);
3262 return resumeAfter(ins, loc);
3264 case CacheKind::BinaryArith: {
3265 MOZ_ASSERT(numInputs == 2);
3266 auto* ins =
3267 MBinaryCache::New(alloc(), getInput(0), getInput(1), MIRType::Value);
3268 current->add(ins);
3269 current->push(ins);
3270 return resumeAfter(ins, loc);
3272 case CacheKind::Compare: {
3273 MOZ_ASSERT(numInputs == 2);
3274 auto* ins = MBinaryCache::New(alloc(), getInput(0), getInput(1),
3275 MIRType::Boolean);
3276 current->add(ins);
3277 current->push(ins);
3278 return resumeAfter(ins, loc);
3280 case CacheKind::In: {
3281 MOZ_ASSERT(numInputs == 2);
3282 auto* ins = MInCache::New(alloc(), getInput(0), getInput(1));
3283 current->add(ins);
3284 current->push(ins);
3285 return resumeAfter(ins, loc);
3287 case CacheKind::HasOwn: {
3288 MOZ_ASSERT(numInputs == 2);
3289 // Note: the MHasOwnCache constructor takes obj/id instead of id/obj.
3290 auto* ins = MHasOwnCache::New(alloc(), getInput(1), getInput(0));
3291 current->add(ins);
3292 current->push(ins);
3293 return resumeAfter(ins, loc);
3295 case CacheKind::CheckPrivateField: {
3296 MOZ_ASSERT(numInputs == 2);
3297 auto* ins =
3298 MCheckPrivateFieldCache::New(alloc(), getInput(0), getInput(1));
3299 current->add(ins);
3300 current->push(ins);
3301 return resumeAfter(ins, loc);
3303 case CacheKind::InstanceOf: {
3304 MOZ_ASSERT(numInputs == 2);
3305 auto* ins = MInstanceOfCache::New(alloc(), getInput(0), getInput(1));
3306 current->add(ins);
3307 current->push(ins);
3308 return resumeAfter(ins, loc);
3310 case CacheKind::BindName: {
3311 MOZ_ASSERT(numInputs == 1);
3312 auto* ins = MBindNameCache::New(alloc(), getInput(0));
3313 current->add(ins);
3314 current->push(ins);
3315 return resumeAfter(ins, loc);
3317 case CacheKind::GetIterator: {
3318 MOZ_ASSERT(numInputs == 1);
3319 auto* ins = MGetIteratorCache::New(alloc(), getInput(0));
3320 current->add(ins);
3321 current->push(ins);
3322 return resumeAfter(ins, loc);
3324 case CacheKind::GetName: {
3325 MOZ_ASSERT(numInputs == 1);
3326 auto* ins = MGetNameCache::New(alloc(), getInput(0));
3327 current->add(ins);
3328 current->push(ins);
3329 return resumeAfter(ins, loc);
3331 case CacheKind::GetProp: {
3332 MOZ_ASSERT(numInputs == 1);
3333 PropertyName* name = loc.getPropertyName(script_);
3334 MConstant* id = constant(StringValue(name));
3335 MDefinition* val = getInput(0);
3336 auto* ins = MGetPropertyCache::New(alloc(), val, id);
3337 current->add(ins);
3338 current->push(ins);
3339 return resumeAfter(ins, loc);
3341 case CacheKind::GetElem: {
3342 MOZ_ASSERT(numInputs == 2);
3343 MDefinition* val = getInput(0);
3344 auto* ins = MGetPropertyCache::New(alloc(), val, getInput(1));
3345 current->add(ins);
3346 current->push(ins);
3347 return resumeAfter(ins, loc);
3349 case CacheKind::SetProp: {
3350 MOZ_ASSERT(numInputs == 2);
3351 PropertyName* name = loc.getPropertyName(script_);
3352 MConstant* id = constant(StringValue(name));
3353 bool strict = loc.isStrictSetOp();
3354 auto* ins =
3355 MSetPropertyCache::New(alloc(), getInput(0), id, getInput(1), strict);
3356 current->add(ins);
3357 return resumeAfter(ins, loc);
3359 case CacheKind::SetElem: {
3360 MOZ_ASSERT(numInputs == 3);
3361 bool strict = loc.isStrictSetOp();
3362 auto* ins = MSetPropertyCache::New(alloc(), getInput(0), getInput(1),
3363 getInput(2), strict);
3364 current->add(ins);
3365 return resumeAfter(ins, loc);
3367 case CacheKind::GetPropSuper: {
3368 MOZ_ASSERT(numInputs == 2);
3369 PropertyName* name = loc.getPropertyName(script_);
3370 MConstant* id = constant(StringValue(name));
3371 auto* ins =
3372 MGetPropSuperCache::New(alloc(), getInput(0), getInput(1), id);
3373 current->add(ins);
3374 current->push(ins);
3375 return resumeAfter(ins, loc);
3377 case CacheKind::GetElemSuper: {
3378 MOZ_ASSERT(numInputs == 3);
3379 // Note: CacheIR expects obj/id/receiver but MGetPropSuperCache takes
3380 // obj/receiver/id so swap the last two inputs.
3381 auto* ins = MGetPropSuperCache::New(alloc(), getInput(0), getInput(2),
3382 getInput(1));
3383 current->add(ins);
3384 current->push(ins);
3385 return resumeAfter(ins, loc);
3387 case CacheKind::OptimizeSpreadCall: {
3388 MOZ_ASSERT(numInputs == 1);
3389 auto* ins = MOptimizeSpreadCallCache::New(alloc(), getInput(0));
3390 current->add(ins);
3391 current->push(ins);
3392 return resumeAfter(ins, loc);
3394 case CacheKind::TypeOf: {
3395 // Note: Warp does not have a TypeOf IC, it just inlines the operation.
3396 MOZ_ASSERT(numInputs == 1);
3397 auto* typeOf = MTypeOf::New(alloc(), getInput(0));
3398 current->add(typeOf);
3400 auto* ins = MTypeOfName::New(alloc(), typeOf);
3401 current->add(ins);
3402 current->push(ins);
3403 return true;
3405 case CacheKind::NewObject: {
3406 auto* templateConst = constant(NullValue());
3407 MNewObject* ins = MNewObject::NewVM(
3408 alloc(), templateConst, gc::Heap::Default, MNewObject::ObjectLiteral);
3409 current->add(ins);
3410 current->push(ins);
3411 return resumeAfter(ins, loc);
3413 case CacheKind::NewArray: {
3414 uint32_t length = loc.getNewArrayLength();
3415 MConstant* templateConst = constant(NullValue());
3416 MNewArray* ins =
3417 MNewArray::NewVM(alloc(), length, templateConst, gc::Heap::Default);
3418 current->add(ins);
3419 current->push(ins);
3420 return true;
3422 case CacheKind::CloseIter: {
3423 MOZ_ASSERT(numInputs == 1);
3424 static_assert(sizeof(CompletionKind) == sizeof(uint8_t));
3425 CompletionKind kind = loc.getCompletionKind();
3426 auto* ins = MCloseIterCache::New(alloc(), getInput(0), uint8_t(kind));
3427 current->add(ins);
3428 return resumeAfter(ins, loc);
3430 case CacheKind::OptimizeGetIterator: {
3431 MOZ_ASSERT(numInputs == 1);
3432 auto* ins = MOptimizeGetIteratorCache::New(alloc(), getInput(0));
3433 current->add(ins);
3434 current->push(ins);
3435 return resumeAfter(ins, loc);
3437 case CacheKind::GetIntrinsic:
3438 case CacheKind::ToBool:
3439 case CacheKind::Call:
3440 // We're currently not using an IC or transpiling CacheIR for these kinds.
3441 MOZ_CRASH("Unexpected kind");
3444 return true;
3447 bool WarpBuilder::buildBailoutForColdIC(BytecodeLocation loc, CacheKind kind) {
3448 MOZ_ASSERT(loc.opHasIC());
3450 MBail* bail = MBail::New(alloc(), BailoutKind::FirstExecution);
3451 current->add(bail);
3452 current->setAlwaysBails();
3454 MIRType resultType;
3455 switch (kind) {
3456 case CacheKind::UnaryArith:
3457 case CacheKind::BinaryArith:
3458 case CacheKind::GetName:
3459 case CacheKind::GetProp:
3460 case CacheKind::GetElem:
3461 case CacheKind::GetPropSuper:
3462 case CacheKind::GetElemSuper:
3463 case CacheKind::GetIntrinsic:
3464 case CacheKind::Call:
3465 case CacheKind::ToPropertyKey:
3466 case CacheKind::OptimizeSpreadCall:
3467 resultType = MIRType::Value;
3468 break;
3469 case CacheKind::BindName:
3470 case CacheKind::GetIterator:
3471 case CacheKind::NewArray:
3472 case CacheKind::NewObject:
3473 resultType = MIRType::Object;
3474 break;
3475 case CacheKind::TypeOf:
3476 resultType = MIRType::String;
3477 break;
3478 case CacheKind::ToBool:
3479 case CacheKind::Compare:
3480 case CacheKind::In:
3481 case CacheKind::HasOwn:
3482 case CacheKind::CheckPrivateField:
3483 case CacheKind::InstanceOf:
3484 case CacheKind::OptimizeGetIterator:
3485 resultType = MIRType::Boolean;
3486 break;
3487 case CacheKind::SetProp:
3488 case CacheKind::SetElem:
3489 case CacheKind::CloseIter:
3490 return true; // No result.
3493 auto* ins = MUnreachableResult::New(alloc(), resultType);
3494 current->add(ins);
3495 current->push(ins);
3497 return true;
3500 class MOZ_RAII AutoAccumulateReturns {
3501 MIRGraph& graph_;
3502 MIRGraphReturns* prev_;
3504 public:
3505 AutoAccumulateReturns(MIRGraph& graph, MIRGraphReturns& returns)
3506 : graph_(graph) {
3507 prev_ = graph_.returnAccumulator();
3508 graph_.setReturnAccumulator(&returns);
3510 ~AutoAccumulateReturns() { graph_.setReturnAccumulator(prev_); }
3513 bool WarpBuilder::buildInlinedCall(BytecodeLocation loc,
3514 const WarpInlinedCall* inlineSnapshot,
3515 CallInfo& callInfo) {
3516 jsbytecode* pc = loc.toRawBytecode();
3518 if (callInfo.isSetter()) {
3519 // build_SetProp pushes the rhs argument onto the stack. Remove it
3520 // in preparation for pushCallStack.
3521 current->pop();
3524 callInfo.setImplicitlyUsedUnchecked();
3526 // Capture formals in the outer resume point.
3527 if (!callInfo.pushCallStack(current)) {
3528 return false;
3530 MResumePoint* outerResumePoint =
3531 MResumePoint::New(alloc(), current, pc, callInfo.inliningResumeMode());
3532 if (!outerResumePoint) {
3533 return false;
3535 current->setOuterResumePoint(outerResumePoint);
3537 // Pop formals again, except leave |callee| on stack for duration of call.
3538 callInfo.popCallStack(current);
3539 current->push(callInfo.callee());
3541 // Build the graph.
3542 CompileInfo* calleeCompileInfo = inlineSnapshot->info();
3543 MIRGraphReturns returns(alloc());
3544 AutoAccumulateReturns aar(graph(), returns);
3545 WarpBuilder inlineBuilder(this, inlineSnapshot->scriptSnapshot(),
3546 *calleeCompileInfo, &callInfo, outerResumePoint);
3547 if (!inlineBuilder.buildInline()) {
3548 // Note: Inlining only aborts on OOM. If inlining would fail for
3549 // any other reason, we detect it in advance and don't inline.
3550 return false;
3553 // We mark scripts as uninlineable in BytecodeAnalysis if we cannot
3554 // reach a return statement (without going through a catch/finally).
3555 MOZ_ASSERT(!returns.empty());
3557 // Create return block
3558 BytecodeLocation postCall = loc.next();
3559 MBasicBlock* prev = current;
3560 if (!startNewEntryBlock(prev->stackDepth(), postCall)) {
3561 return false;
3563 // Restore previous value of callerResumePoint.
3564 current->setCallerResumePoint(callerResumePoint());
3565 current->inheritSlots(prev);
3567 // Pop |callee|.
3568 current->pop();
3570 // Accumulate return values.
3571 MDefinition* returnValue =
3572 patchInlinedReturns(calleeCompileInfo, callInfo, returns, current);
3573 if (!returnValue) {
3574 return false;
3576 current->push(returnValue);
3578 // Initialize entry slots
3579 if (!current->initEntrySlots(alloc())) {
3580 return false;
3583 return true;
3586 MDefinition* WarpBuilder::patchInlinedReturns(CompileInfo* calleeCompileInfo,
3587 CallInfo& callInfo,
3588 MIRGraphReturns& exits,
3589 MBasicBlock* returnBlock) {
3590 if (exits.length() == 1) {
3591 return patchInlinedReturn(calleeCompileInfo, callInfo, exits[0],
3592 returnBlock);
3595 // Accumulate multiple returns with a phi.
3596 MPhi* phi = MPhi::New(alloc());
3597 if (!phi->reserveLength(exits.length())) {
3598 return nullptr;
3601 for (auto* exit : exits) {
3602 MDefinition* rdef =
3603 patchInlinedReturn(calleeCompileInfo, callInfo, exit, returnBlock);
3604 if (!rdef) {
3605 return nullptr;
3607 phi->addInput(rdef);
3609 returnBlock->addPhi(phi);
3610 return phi;
3613 MDefinition* WarpBuilder::patchInlinedReturn(CompileInfo* calleeCompileInfo,
3614 CallInfo& callInfo,
3615 MBasicBlock* exit,
3616 MBasicBlock* returnBlock) {
3617 // Replace the MReturn in the exit block with an MGoto branching to
3618 // the return block.
3619 MDefinition* rdef = exit->lastIns()->toReturn()->input();
3620 exit->discardLastIns();
3622 // Constructors must be patched by the caller to always return an object.
3623 // Derived class constructors contain extra bytecode to ensure an object
3624 // is always returned, so no additional patching is needed.
3625 if (callInfo.constructing() &&
3626 !calleeCompileInfo->isDerivedClassConstructor()) {
3627 auto* filter = MReturnFromCtor::New(alloc(), rdef, callInfo.thisArg());
3628 exit->add(filter);
3629 rdef = filter;
3630 } else if (callInfo.isSetter()) {
3631 // Setters return the rhs argument, not whatever value is returned.
3632 rdef = callInfo.getArg(0);
3635 exit->end(MGoto::New(alloc(), returnBlock));
3636 if (!returnBlock->addPredecessorWithoutPhis(exit)) {
3637 return nullptr;
3640 return rdef;