Bug 1837620 - Part 1: Remove baseline ICs that guard shapes when the shape becomes...
[gecko.git] / js / src / jit / Ion.cpp
blobdad29f85275b9bcd5fe4623f0e248ac44783e6f2
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/Ion.h"
9 #include "mozilla/CheckedInt.h"
10 #include "mozilla/DebugOnly.h"
11 #include "mozilla/IntegerPrintfMacros.h"
12 #include "mozilla/MemoryReporting.h"
13 #include "mozilla/ThreadLocal.h"
15 #include "gc/GCContext.h"
16 #include "gc/PublicIterators.h"
17 #include "jit/AliasAnalysis.h"
18 #include "jit/AlignmentMaskAnalysis.h"
19 #include "jit/AutoWritableJitCode.h"
20 #include "jit/BacktrackingAllocator.h"
21 #include "jit/BaselineFrame.h"
22 #include "jit/BaselineJIT.h"
23 #include "jit/CodeGenerator.h"
24 #include "jit/CompileInfo.h"
25 #include "jit/EdgeCaseAnalysis.h"
26 #include "jit/EffectiveAddressAnalysis.h"
27 #include "jit/ExecutableAllocator.h"
28 #include "jit/FoldLinearArithConstants.h"
29 #include "jit/InlineScriptTree.h"
30 #include "jit/InstructionReordering.h"
31 #include "jit/Invalidation.h"
32 #include "jit/IonAnalysis.h"
33 #include "jit/IonCompileTask.h"
34 #include "jit/IonIC.h"
35 #include "jit/IonOptimizationLevels.h"
36 #include "jit/IonScript.h"
37 #include "jit/JitcodeMap.h"
38 #include "jit/JitFrames.h"
39 #include "jit/JitRealm.h"
40 #include "jit/JitRuntime.h"
41 #include "jit/JitSpewer.h"
42 #include "jit/JitZone.h"
43 #include "jit/LICM.h"
44 #include "jit/Linker.h"
45 #include "jit/LIR.h"
46 #include "jit/Lowering.h"
47 #include "jit/PerfSpewer.h"
48 #include "jit/RangeAnalysis.h"
49 #include "jit/ScalarReplacement.h"
50 #include "jit/ScriptFromCalleeToken.h"
51 #include "jit/Sink.h"
52 #include "jit/ValueNumbering.h"
53 #include "jit/WarpBuilder.h"
54 #include "jit/WarpOracle.h"
55 #include "jit/WasmBCE.h"
56 #include "js/Printf.h"
57 #include "js/UniquePtr.h"
58 #include "util/Memory.h"
59 #include "util/WindowsWrapper.h"
60 #include "vm/HelperThreads.h"
61 #include "vm/Realm.h"
62 #ifdef MOZ_VTUNE
63 # include "vtune/VTuneWrapper.h"
64 #endif
66 #include "gc/GC-inl.h"
67 #include "gc/StableCellHasher-inl.h"
68 #include "jit/InlineScriptTree-inl.h"
69 #include "jit/MacroAssembler-inl.h"
70 #include "jit/SafepointIndex-inl.h"
71 #include "vm/GeckoProfiler-inl.h"
72 #include "vm/JSScript-inl.h"
73 #include "vm/Realm-inl.h"
75 #if defined(ANDROID)
76 # include <sys/system_properties.h>
77 #endif
79 using mozilla::CheckedInt;
80 using mozilla::DebugOnly;
82 using namespace js;
83 using namespace js::jit;
85 JitRuntime::~JitRuntime() {
86 MOZ_ASSERT(numFinishedOffThreadTasks_ == 0);
87 MOZ_ASSERT(ionLazyLinkListSize_ == 0);
88 MOZ_ASSERT(ionLazyLinkList_.ref().isEmpty());
90 // By this point, the jitcode global table should be empty.
91 MOZ_ASSERT_IF(jitcodeGlobalTable_, jitcodeGlobalTable_->empty());
92 js_delete(jitcodeGlobalTable_.ref());
94 // interpreterEntryMap should be cleared out during finishRoots()
95 MOZ_ASSERT_IF(interpreterEntryMap_, interpreterEntryMap_->empty());
96 js_delete(interpreterEntryMap_.ref());
98 js_delete(jitHintsMap_.ref());
101 uint32_t JitRuntime::startTrampolineCode(MacroAssembler& masm) {
102 AutoCreatedBy acb(masm, "startTrampolineCode");
104 masm.assumeUnreachable("Shouldn't get here");
105 masm.flushBuffer();
106 masm.haltingAlign(CodeAlignment);
107 masm.setFramePushed(0);
108 return masm.currentOffset();
111 bool JitRuntime::initialize(JSContext* cx) {
112 MOZ_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
114 AutoAllocInAtomsZone az(cx);
115 JitContext jctx(cx);
117 if (!generateTrampolines(cx)) {
118 return false;
121 if (!generateBaselineICFallbackCode(cx)) {
122 return false;
125 jitcodeGlobalTable_ = cx->new_<JitcodeGlobalTable>();
126 if (!jitcodeGlobalTable_) {
127 return false;
130 if (!JitOptions.disableJitHints) {
131 jitHintsMap_ = cx->new_<JitHintsMap>();
132 if (!jitHintsMap_) {
133 return false;
137 if (JitOptions.emitInterpreterEntryTrampoline) {
138 interpreterEntryMap_ = cx->new_<EntryTrampolineMap>();
139 if (!interpreterEntryMap_) {
140 return false;
144 if (!GenerateBaselineInterpreter(cx, baselineInterpreter_)) {
145 return false;
148 // Initialize the jitCodeRaw of the Runtime's canonical SelfHostedLazyScript
149 // to point to the interpreter trampoline.
150 cx->runtime()->selfHostedLazyScript.ref().jitCodeRaw_ =
151 interpreterStub().value;
153 return true;
156 bool JitRuntime::generateTrampolines(JSContext* cx) {
157 TempAllocator temp(&cx->tempLifoAlloc());
158 StackMacroAssembler masm(cx, temp);
159 PerfSpewerRangeRecorder rangeRecorder(masm);
161 Label bailoutTail;
162 JitSpew(JitSpew_Codegen, "# Emitting bailout tail stub");
163 generateBailoutTailStub(masm, &bailoutTail);
165 JitSpew(JitSpew_Codegen, "# Emitting bailout handler");
166 generateBailoutHandler(masm, &bailoutTail);
167 rangeRecorder.recordOffset("Trampoline: Bailout");
169 JitSpew(JitSpew_Codegen, "# Emitting invalidator");
170 generateInvalidator(masm, &bailoutTail);
171 rangeRecorder.recordOffset("Trampoline: Invalidator");
173 // The arguments rectifier has to use the same frame layout as the function
174 // frames it rectifies.
175 static_assert(std::is_base_of_v<JitFrameLayout, RectifierFrameLayout>,
176 "a rectifier frame can be used with jit frame");
177 static_assert(std::is_base_of_v<JitFrameLayout, WasmToJSJitFrameLayout>,
178 "wasm frames simply are jit frames");
179 static_assert(sizeof(JitFrameLayout) == sizeof(WasmToJSJitFrameLayout),
180 "thus a rectifier frame can be used with a wasm frame");
182 JitSpew(JitSpew_Codegen, "# Emitting arguments rectifier");
183 generateArgumentsRectifier(masm, ArgumentsRectifierKind::Normal);
184 rangeRecorder.recordOffset("Trampoline: Arguments Rectifier");
186 JitSpew(JitSpew_Codegen, "# Emitting trial inlining arguments rectifier");
187 generateArgumentsRectifier(masm, ArgumentsRectifierKind::TrialInlining);
188 rangeRecorder.recordOffset(
189 "Trampoline: Arguments Rectifier (Trial Inlining)");
191 JitSpew(JitSpew_Codegen, "# Emitting EnterJIT sequence");
192 generateEnterJIT(cx, masm);
193 rangeRecorder.recordOffset("Trampoline: EnterJIT");
195 JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Value");
196 valuePreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Value);
197 rangeRecorder.recordOffset("Trampoline: PreBarrier Value");
199 JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for String");
200 stringPreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::String);
201 rangeRecorder.recordOffset("Trampoline: PreBarrier String");
203 JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Object");
204 objectPreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Object);
205 rangeRecorder.recordOffset("Trampoline: PreBarrier Object");
207 JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Shape");
208 shapePreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Shape);
209 rangeRecorder.recordOffset("Trampoline: PreBarrier Shape");
211 JitSpew(JitSpew_Codegen, "# Emitting free stub");
212 generateFreeStub(masm);
213 rangeRecorder.recordOffset("Trampoline: FreeStub");
215 JitSpew(JitSpew_Codegen, "# Emitting lazy link stub");
216 generateLazyLinkStub(masm);
217 rangeRecorder.recordOffset("Trampoline: LazyLinkStub");
219 JitSpew(JitSpew_Codegen, "# Emitting interpreter stub");
220 generateInterpreterStub(masm);
221 rangeRecorder.recordOffset("Trampoline: Interpreter");
223 JitSpew(JitSpew_Codegen, "# Emitting double-to-int32-value stub");
224 generateDoubleToInt32ValueStub(masm);
225 rangeRecorder.recordOffset("Trampoline: DoubleToInt32ValueStub");
227 JitSpew(JitSpew_Codegen, "# Emitting VM function wrappers");
228 if (!generateVMWrappers(cx, masm, rangeRecorder)) {
229 return false;
232 JitSpew(JitSpew_Codegen, "# Emitting profiler exit frame tail stub");
233 Label profilerExitTail;
234 generateProfilerExitFrameTailStub(masm, &profilerExitTail);
235 rangeRecorder.recordOffset("Trampoline: ProfilerExitFrameTailStub");
237 JitSpew(JitSpew_Codegen, "# Emitting exception tail stub");
238 generateExceptionTailStub(masm, &profilerExitTail, &bailoutTail);
239 rangeRecorder.recordOffset("Trampoline: ExceptionTailStub");
241 Linker linker(masm);
242 trampolineCode_ = linker.newCode(cx, CodeKind::Other);
243 if (!trampolineCode_) {
244 return false;
247 rangeRecorder.collectRangesForJitCode(trampolineCode_);
248 #ifdef MOZ_VTUNE
249 vtune::MarkStub(trampolineCode_, "Trampolines");
250 #endif
252 return true;
255 JitCode* JitRuntime::debugTrapHandler(JSContext* cx,
256 DebugTrapHandlerKind kind) {
257 if (!debugTrapHandlers_[kind]) {
258 // JitRuntime code stubs are shared across compartments and have to
259 // be allocated in the atoms zone.
260 mozilla::Maybe<AutoAllocInAtomsZone> az;
261 if (!cx->zone()->isAtomsZone()) {
262 az.emplace(cx);
264 debugTrapHandlers_[kind] = generateDebugTrapHandler(cx, kind);
266 return debugTrapHandlers_[kind];
269 JitRuntime::IonCompileTaskList& JitRuntime::ionLazyLinkList(JSRuntime* rt) {
270 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
271 "Should only be mutated by the main thread.");
272 return ionLazyLinkList_.ref();
275 void JitRuntime::ionLazyLinkListRemove(JSRuntime* rt,
276 jit::IonCompileTask* task) {
277 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
278 "Should only be mutated by the main thread.");
279 MOZ_ASSERT(rt == task->script()->runtimeFromMainThread());
280 MOZ_ASSERT(ionLazyLinkListSize_ > 0);
282 task->removeFrom(ionLazyLinkList(rt));
283 ionLazyLinkListSize_--;
285 MOZ_ASSERT(ionLazyLinkList(rt).isEmpty() == (ionLazyLinkListSize_ == 0));
288 void JitRuntime::ionLazyLinkListAdd(JSRuntime* rt, jit::IonCompileTask* task) {
289 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
290 "Should only be mutated by the main thread.");
291 MOZ_ASSERT(rt == task->script()->runtimeFromMainThread());
292 ionLazyLinkList(rt).insertFront(task);
293 ionLazyLinkListSize_++;
296 uint8_t* JitRuntime::allocateIonOsrTempData(size_t size) {
297 // Free the old buffer (if needed) before allocating a new one. Note that we
298 // could use realloc here but it's likely not worth the complexity.
299 freeIonOsrTempData();
300 ionOsrTempData_.ref().reset(static_cast<uint8_t*>(js_malloc(size)));
301 return ionOsrTempData_.ref().get();
304 void JitRuntime::freeIonOsrTempData() { ionOsrTempData_.ref().reset(); }
306 JitRealm::JitRealm() : initialStringHeap(gc::Heap::Tenured) {}
308 void JitRealm::initialize(bool zoneHasNurseryStrings) {
309 setStringsCanBeInNursery(zoneHasNurseryStrings);
312 template <typename T>
313 static T PopNextBitmaskValue(uint32_t* bitmask) {
314 MOZ_ASSERT(*bitmask);
315 uint32_t index = mozilla::CountTrailingZeroes32(*bitmask);
316 *bitmask ^= 1 << index;
318 MOZ_ASSERT(index < uint32_t(T::Count));
319 return T(index);
322 void JitRealm::performStubReadBarriers(uint32_t stubsToBarrier) const {
323 while (stubsToBarrier) {
324 auto stub = PopNextBitmaskValue<StubIndex>(&stubsToBarrier);
325 const WeakHeapPtr<JitCode*>& jitCode = stubs_[stub];
326 MOZ_ASSERT(jitCode);
327 jitCode.get();
331 static bool LinkCodeGen(JSContext* cx, CodeGenerator* codegen,
332 HandleScript script, const WarpSnapshot* snapshot) {
333 if (!codegen->link(cx, snapshot)) {
334 return false;
337 return true;
340 static bool LinkBackgroundCodeGen(JSContext* cx, IonCompileTask* task) {
341 CodeGenerator* codegen = task->backgroundCodegen();
342 if (!codegen) {
343 return false;
346 JitContext jctx(cx);
347 RootedScript script(cx, task->script());
348 return LinkCodeGen(cx, codegen, script, task->snapshot());
351 void jit::LinkIonScript(JSContext* cx, HandleScript calleeScript) {
352 // Get the pending IonCompileTask from the script.
353 MOZ_ASSERT(calleeScript->hasBaselineScript());
354 IonCompileTask* task =
355 calleeScript->baselineScript()->pendingIonCompileTask();
356 calleeScript->baselineScript()->removePendingIonCompileTask(cx->runtime(),
357 calleeScript);
359 // Remove from pending.
360 cx->runtime()->jitRuntime()->ionLazyLinkListRemove(cx->runtime(), task);
363 gc::AutoSuppressGC suppressGC(cx);
364 if (!LinkBackgroundCodeGen(cx, task)) {
365 // Silently ignore OOM during code generation. The assembly code
366 // doesn't have code to handle it after linking happened. So it's
367 // not OK to throw a catchable exception from there.
368 cx->clearPendingException();
373 AutoLockHelperThreadState lock;
374 FinishOffThreadTask(cx->runtime(), task, lock);
378 uint8_t* jit::LazyLinkTopActivation(JSContext* cx,
379 LazyLinkExitFrameLayout* frame) {
380 RootedScript calleeScript(
381 cx, ScriptFromCalleeToken(frame->jsFrame()->calleeToken()));
383 LinkIonScript(cx, calleeScript);
385 MOZ_ASSERT(calleeScript->hasBaselineScript());
386 MOZ_ASSERT(calleeScript->jitCodeRaw());
388 return calleeScript->jitCodeRaw();
391 /* static */
392 void JitRuntime::TraceAtomZoneRoots(JSTracer* trc) {
393 MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
395 // Shared stubs are allocated in the atoms zone, so do not iterate
396 // them after the atoms heap after it has been "finished."
397 if (trc->runtime()->atomsAreFinished()) {
398 return;
401 Zone* zone = trc->runtime()->atomsZone();
402 for (auto i = zone->cellIterUnsafe<JitCode>(); !i.done(); i.next()) {
403 JitCode* code = i;
404 TraceRoot(trc, &code, "wrapper");
408 /* static */
409 bool JitRuntime::MarkJitcodeGlobalTableIteratively(GCMarker* marker) {
410 if (marker->runtime()->hasJitRuntime() &&
411 marker->runtime()->jitRuntime()->hasJitcodeGlobalTable()) {
412 return marker->runtime()
413 ->jitRuntime()
414 ->getJitcodeGlobalTable()
415 ->markIteratively(marker);
417 return false;
420 /* static */
421 void JitRuntime::TraceWeakJitcodeGlobalTable(JSRuntime* rt, JSTracer* trc) {
422 if (rt->hasJitRuntime() && rt->jitRuntime()->hasJitcodeGlobalTable()) {
423 rt->jitRuntime()->getJitcodeGlobalTable()->traceWeak(rt, trc);
427 void JitRealm::traceWeak(JSTracer* trc, JS::Realm* realm) {
428 // Any outstanding compilations should have been cancelled by the GC.
429 MOZ_ASSERT(!HasOffThreadIonCompile(realm));
431 for (WeakHeapPtr<JitCode*>& stub : stubs_) {
432 TraceWeakEdge(trc, &stub, "JitRealm::stubs_");
436 bool JitZone::addInlinedCompilation(const RecompileInfo& info,
437 JSScript* inlined) {
438 MOZ_ASSERT(inlined != info.script());
440 auto p = inlinedCompilations_.lookupForAdd(inlined);
441 if (p) {
442 auto& compilations = p->value();
443 if (!compilations.empty() && compilations.back() == info) {
444 return true;
446 return compilations.append(info);
449 RecompileInfoVector compilations;
450 if (!compilations.append(info)) {
451 return false;
453 return inlinedCompilations_.add(p, inlined, std::move(compilations));
456 void jit::AddPendingInvalidation(RecompileInfoVector& invalid,
457 JSScript* script) {
458 MOZ_ASSERT(script);
460 CancelOffThreadIonCompile(script);
462 // Let the script warm up again before attempting another compile.
463 script->resetWarmUpCounterToDelayIonCompilation();
465 JitScript* jitScript = script->maybeJitScript();
466 if (!jitScript) {
467 return;
470 auto addPendingInvalidation = [&invalid](const RecompileInfo& info) {
471 AutoEnterOOMUnsafeRegion oomUnsafe;
472 if (!invalid.append(info)) {
473 // BUG 1536159: For diagnostics, compute the size of the failed
474 // allocation. This presumes the vector growth strategy is to double. This
475 // is only used for crash reporting so not a problem if we get it wrong.
476 size_t allocSize = 2 * sizeof(RecompileInfo) * invalid.capacity();
477 oomUnsafe.crash(allocSize, "Could not update RecompileInfoVector");
481 // Trigger invalidation of the IonScript.
482 if (jitScript->hasIonScript()) {
483 RecompileInfo info(script, jitScript->ionScript()->compilationId());
484 addPendingInvalidation(info);
487 // Trigger invalidation of any callers inlining this script.
488 auto* inlinedCompilations =
489 script->zone()->jitZone()->maybeInlinedCompilations(script);
490 if (inlinedCompilations) {
491 for (const RecompileInfo& info : *inlinedCompilations) {
492 addPendingInvalidation(info);
494 script->zone()->jitZone()->removeInlinedCompilations(script);
498 IonScript* RecompileInfo::maybeIonScriptToInvalidate() const {
499 // Make sure this is not called under CodeGenerator::link (before the
500 // IonScript is created).
501 MOZ_ASSERT_IF(
502 script_->zone()->jitZone()->currentCompilationId(),
503 script_->zone()->jitZone()->currentCompilationId().ref() != id_);
505 if (!script_->hasIonScript() ||
506 script_->ionScript()->compilationId() != id_) {
507 return nullptr;
510 return script_->ionScript();
513 bool RecompileInfo::traceWeak(JSTracer* trc) {
514 // Sweep the RecompileInfo if either the script is dead or the IonScript has
515 // been invalidated.
517 if (!TraceManuallyBarrieredWeakEdge(trc, &script_, "RecompileInfo::script")) {
518 return false;
521 return maybeIonScriptToInvalidate() != nullptr;
524 void JitZone::traceWeak(JSTracer* trc, Zone* zone) {
525 MOZ_ASSERT(this == zone->jitZone());
527 baselineCacheIRStubCodes_.traceWeak(trc);
528 inlinedCompilations_.traceWeak(trc);
530 // Trace all IC chains to remove ICs with edges to dying GC things.
531 for (auto base = zone->cellIter<BaseScript>(); !base.done(); base.next()) {
532 MOZ_ASSERT_IF(IsTracerKind(trc, JS::TracerKind::Sweeping),
533 base->isMarkedAny());
535 jit::JitScript* jitScript = base->maybeJitScript();
536 if (jitScript) {
537 jitScript->traceWeak(trc);
542 size_t JitRealm::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
543 return mallocSizeOf(this);
546 void JitZone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
547 JS::CodeSizes* code, size_t* jitZone,
548 size_t* baselineStubsOptimized) const {
549 *jitZone += mallocSizeOf(this);
550 *jitZone +=
551 baselineCacheIRStubCodes_.shallowSizeOfExcludingThis(mallocSizeOf);
552 *jitZone += ionCacheIRStubInfoSet_.shallowSizeOfExcludingThis(mallocSizeOf);
554 execAlloc().addSizeOfCode(code);
556 *baselineStubsOptimized +=
557 optimizedStubSpace_.sizeOfExcludingThis(mallocSizeOf);
560 void JitCodeHeader::init(JitCode* jitCode) {
561 // As long as JitCode isn't moveable, we can avoid tracing this and
562 // mutating executable data.
563 MOZ_ASSERT(!gc::IsMovableKind(gc::AllocKind::JITCODE));
564 jitCode_ = jitCode;
567 template <AllowGC allowGC>
568 JitCode* JitCode::New(JSContext* cx, uint8_t* code, uint32_t totalSize,
569 uint32_t headerSize, ExecutablePool* pool,
570 CodeKind kind) {
571 uint32_t bufferSize = totalSize - headerSize;
572 JitCode* codeObj =
573 cx->newCell<JitCode, allowGC>(code, bufferSize, headerSize, pool, kind);
574 if (!codeObj) {
575 // The caller already allocated `totalSize` bytes of executable memory.
576 pool->release(totalSize, kind);
577 return nullptr;
580 cx->zone()->incJitMemory(totalSize);
582 return codeObj;
585 template JitCode* JitCode::New<CanGC>(JSContext* cx, uint8_t* code,
586 uint32_t bufferSize, uint32_t headerSize,
587 ExecutablePool* pool, CodeKind kind);
589 template JitCode* JitCode::New<NoGC>(JSContext* cx, uint8_t* code,
590 uint32_t bufferSize, uint32_t headerSize,
591 ExecutablePool* pool, CodeKind kind);
593 void JitCode::copyFrom(MacroAssembler& masm) {
594 // Store the JitCode pointer in the JitCodeHeader so we can recover the
595 // gcthing from relocation tables.
596 JitCodeHeader::FromExecutable(raw())->init(this);
598 insnSize_ = masm.instructionsSize();
599 masm.executableCopy(raw());
601 jumpRelocTableBytes_ = masm.jumpRelocationTableBytes();
602 masm.copyJumpRelocationTable(raw() + jumpRelocTableOffset());
604 dataRelocTableBytes_ = masm.dataRelocationTableBytes();
605 masm.copyDataRelocationTable(raw() + dataRelocTableOffset());
607 masm.processCodeLabels(raw());
610 void JitCode::traceChildren(JSTracer* trc) {
611 // Note that we cannot mark invalidated scripts, since we've basically
612 // corrupted the code stream by injecting bailouts.
613 if (invalidated()) {
614 return;
617 if (jumpRelocTableBytes_) {
618 uint8_t* start = raw() + jumpRelocTableOffset();
619 CompactBufferReader reader(start, start + jumpRelocTableBytes_);
620 MacroAssembler::TraceJumpRelocations(trc, this, reader);
622 if (dataRelocTableBytes_) {
623 uint8_t* start = raw() + dataRelocTableOffset();
624 CompactBufferReader reader(start, start + dataRelocTableBytes_);
625 MacroAssembler::TraceDataRelocations(trc, this, reader);
629 void JitCode::finalize(JS::GCContext* gcx) {
630 // If this jitcode had a bytecode map, it must have already been removed.
631 #ifdef DEBUG
632 JSRuntime* rt = gcx->runtime();
633 if (hasBytecodeMap_) {
634 MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
635 MOZ_ASSERT(!rt->jitRuntime()->getJitcodeGlobalTable()->lookup(raw()));
637 #endif
639 #ifdef MOZ_VTUNE
640 vtune::UnmarkCode(this);
641 #endif
643 MOZ_ASSERT(pool_);
645 // With W^X JIT code, reprotecting memory for each JitCode instance is
646 // slow, so we record the ranges and poison them later all at once. It's
647 // safe to ignore OOM here, it just means we won't poison the code.
648 if (gcx->appendJitPoisonRange(JitPoisonRange(pool_, raw() - headerSize_,
649 headerSize_ + bufferSize_))) {
650 pool_->addRef();
652 setHeaderPtr(nullptr);
654 #ifdef JS_ION_PERF
655 // Code buffers are stored inside ExecutablePools. Pools are refcounted.
656 // Releasing the pool may free it. Horrible hack: if we are using perf
657 // integration, we don't want to reuse code addresses, so we just leak the
658 // memory instead.
659 if (!PerfEnabled()) {
660 pool_->release(headerSize_ + bufferSize_, CodeKind(kind_));
662 #else
663 pool_->release(headerSize_ + bufferSize_, CodeKind(kind_));
664 #endif
666 zone()->decJitMemory(headerSize_ + bufferSize_);
668 pool_ = nullptr;
671 IonScript::IonScript(IonCompilationId compilationId, uint32_t localSlotsSize,
672 uint32_t argumentSlotsSize, uint32_t frameSize)
673 : localSlotsSize_(localSlotsSize),
674 argumentSlotsSize_(argumentSlotsSize),
675 frameSize_(frameSize),
676 compilationId_(compilationId) {}
678 IonScript* IonScript::New(JSContext* cx, IonCompilationId compilationId,
679 uint32_t localSlotsSize, uint32_t argumentSlotsSize,
680 uint32_t frameSize, size_t snapshotsListSize,
681 size_t snapshotsRVATableSize, size_t recoversSize,
682 size_t constants, size_t nurseryObjects,
683 size_t safepointIndices, size_t osiIndices,
684 size_t icEntries, size_t runtimeSize,
685 size_t safepointsSize) {
686 if (snapshotsListSize >= MAX_BUFFER_SIZE) {
687 ReportOutOfMemory(cx);
688 return nullptr;
691 // Verify the hardcoded sizes in header are accurate.
692 static_assert(SizeOf_OsiIndex == sizeof(OsiIndex),
693 "IonScript has wrong size for OsiIndex");
694 static_assert(SizeOf_SafepointIndex == sizeof(SafepointIndex),
695 "IonScript has wrong size for SafepointIndex");
697 CheckedInt<Offset> allocSize = sizeof(IonScript);
698 allocSize += CheckedInt<Offset>(constants) * sizeof(Value);
699 allocSize += CheckedInt<Offset>(runtimeSize);
700 allocSize += CheckedInt<Offset>(nurseryObjects) * sizeof(HeapPtr<JSObject*>);
701 allocSize += CheckedInt<Offset>(osiIndices) * sizeof(OsiIndex);
702 allocSize += CheckedInt<Offset>(safepointIndices) * sizeof(SafepointIndex);
703 allocSize += CheckedInt<Offset>(icEntries) * sizeof(uint32_t);
704 allocSize += CheckedInt<Offset>(safepointsSize);
705 allocSize += CheckedInt<Offset>(snapshotsListSize);
706 allocSize += CheckedInt<Offset>(snapshotsRVATableSize);
707 allocSize += CheckedInt<Offset>(recoversSize);
709 if (!allocSize.isValid()) {
710 ReportAllocationOverflow(cx);
711 return nullptr;
714 void* raw = cx->pod_malloc<uint8_t>(allocSize.value());
715 MOZ_ASSERT(uintptr_t(raw) % alignof(IonScript) == 0);
716 if (!raw) {
717 return nullptr;
719 IonScript* script = new (raw)
720 IonScript(compilationId, localSlotsSize, argumentSlotsSize, frameSize);
722 Offset offsetCursor = sizeof(IonScript);
724 MOZ_ASSERT(offsetCursor % alignof(Value) == 0);
725 script->constantTableOffset_ = offsetCursor;
726 offsetCursor += constants * sizeof(Value);
728 MOZ_ASSERT(offsetCursor % alignof(uint64_t) == 0);
729 script->runtimeDataOffset_ = offsetCursor;
730 offsetCursor += runtimeSize;
732 MOZ_ASSERT(offsetCursor % alignof(HeapPtr<JSObject*>) == 0);
733 script->initElements<HeapPtr<JSObject*>>(offsetCursor, nurseryObjects);
734 script->nurseryObjectsOffset_ = offsetCursor;
735 offsetCursor += nurseryObjects * sizeof(HeapPtr<JSObject*>);
737 MOZ_ASSERT(offsetCursor % alignof(OsiIndex) == 0);
738 script->osiIndexOffset_ = offsetCursor;
739 offsetCursor += osiIndices * sizeof(OsiIndex);
741 MOZ_ASSERT(offsetCursor % alignof(SafepointIndex) == 0);
742 script->safepointIndexOffset_ = offsetCursor;
743 offsetCursor += safepointIndices * sizeof(SafepointIndex);
745 MOZ_ASSERT(offsetCursor % alignof(uint32_t) == 0);
746 script->icIndexOffset_ = offsetCursor;
747 offsetCursor += icEntries * sizeof(uint32_t);
749 script->safepointsOffset_ = offsetCursor;
750 offsetCursor += safepointsSize;
752 script->snapshotsOffset_ = offsetCursor;
753 offsetCursor += snapshotsListSize;
755 script->rvaTableOffset_ = offsetCursor;
756 offsetCursor += snapshotsRVATableSize;
758 script->recoversOffset_ = offsetCursor;
759 offsetCursor += recoversSize;
761 script->allocBytes_ = offsetCursor;
763 MOZ_ASSERT(script->numConstants() == constants);
764 MOZ_ASSERT(script->runtimeSize() == runtimeSize);
765 MOZ_ASSERT(script->numNurseryObjects() == nurseryObjects);
766 MOZ_ASSERT(script->numOsiIndices() == osiIndices);
767 MOZ_ASSERT(script->numSafepointIndices() == safepointIndices);
768 MOZ_ASSERT(script->numICs() == icEntries);
769 MOZ_ASSERT(script->safepointsSize() == safepointsSize);
770 MOZ_ASSERT(script->snapshotsListSize() == snapshotsListSize);
771 MOZ_ASSERT(script->snapshotsRVATableSize() == snapshotsRVATableSize);
772 MOZ_ASSERT(script->recoversSize() == recoversSize);
773 MOZ_ASSERT(script->endOffset() == offsetCursor);
775 return script;
778 void IonScript::trace(JSTracer* trc) {
779 if (method_) {
780 TraceEdge(trc, &method_, "method");
783 for (size_t i = 0; i < numConstants(); i++) {
784 TraceEdge(trc, &getConstant(i), "constant");
787 for (size_t i = 0; i < numNurseryObjects(); i++) {
788 TraceEdge(trc, &nurseryObjects()[i], "nursery-object");
791 // Trace caches so that the JSScript pointer can be updated if moved.
792 for (size_t i = 0; i < numICs(); i++) {
793 getICFromIndex(i).trace(trc, this);
797 void IonScript::traceWeak(JSTracer* trc) {
798 // IonICs do not currently contain weak pointers. If this is added then they
799 // should be traced here.
802 /* static */
803 void IonScript::preWriteBarrier(Zone* zone, IonScript* ionScript) {
804 PreWriteBarrier(zone, ionScript);
807 void IonScript::copySnapshots(const SnapshotWriter* writer) {
808 MOZ_ASSERT(writer->listSize() == snapshotsListSize());
809 memcpy(offsetToPointer<uint8_t>(snapshotsOffset()), writer->listBuffer(),
810 snapshotsListSize());
812 MOZ_ASSERT(snapshotsRVATableSize());
813 MOZ_ASSERT(writer->RVATableSize() == snapshotsRVATableSize());
814 memcpy(offsetToPointer<uint8_t>(rvaTableOffset()), writer->RVATableBuffer(),
815 snapshotsRVATableSize());
818 void IonScript::copyRecovers(const RecoverWriter* writer) {
819 MOZ_ASSERT(writer->size() == recoversSize());
820 memcpy(offsetToPointer<uint8_t>(recoversOffset()), writer->buffer(),
821 recoversSize());
824 void IonScript::copySafepoints(const SafepointWriter* writer) {
825 MOZ_ASSERT(writer->size() == safepointsSize());
826 memcpy(offsetToPointer<uint8_t>(safepointsOffset()), writer->buffer(),
827 safepointsSize());
830 void IonScript::copyConstants(const Value* vp) {
831 for (size_t i = 0; i < numConstants(); i++) {
832 constants()[i].init(vp[i]);
836 void IonScript::copySafepointIndices(const CodegenSafepointIndex* si) {
837 // Convert CodegenSafepointIndex to more compact form.
838 SafepointIndex* table = safepointIndices();
839 for (size_t i = 0; i < numSafepointIndices(); ++i) {
840 table[i] = SafepointIndex(si[i]);
844 void IonScript::copyOsiIndices(const OsiIndex* oi) {
845 memcpy(osiIndices(), oi, numOsiIndices() * sizeof(OsiIndex));
848 void IonScript::copyRuntimeData(const uint8_t* data) {
849 memcpy(runtimeData(), data, runtimeSize());
852 void IonScript::copyICEntries(const uint32_t* icEntries) {
853 memcpy(icIndex(), icEntries, numICs() * sizeof(uint32_t));
855 // Update the codeRaw_ field in the ICs now that we know the code address.
856 for (size_t i = 0; i < numICs(); i++) {
857 getICFromIndex(i).resetCodeRaw(this);
861 const SafepointIndex* IonScript::getSafepointIndex(uint32_t disp) const {
862 MOZ_ASSERT(numSafepointIndices() > 0);
864 const SafepointIndex* table = safepointIndices();
865 if (numSafepointIndices() == 1) {
866 MOZ_ASSERT(disp == table[0].displacement());
867 return &table[0];
870 size_t minEntry = 0;
871 size_t maxEntry = numSafepointIndices() - 1;
872 uint32_t min = table[minEntry].displacement();
873 uint32_t max = table[maxEntry].displacement();
875 // Raise if the element is not in the list.
876 MOZ_ASSERT(min <= disp && disp <= max);
878 // Approximate the location of the FrameInfo.
879 size_t guess = (disp - min) * (maxEntry - minEntry) / (max - min) + minEntry;
880 uint32_t guessDisp = table[guess].displacement();
882 if (table[guess].displacement() == disp) {
883 return &table[guess];
886 // Doing a linear scan from the guess should be more efficient in case of
887 // small group which are equally distributed on the code.
889 // such as: <... ... ... ... . ... ...>
890 if (guessDisp > disp) {
891 while (--guess >= minEntry) {
892 guessDisp = table[guess].displacement();
893 MOZ_ASSERT(guessDisp >= disp);
894 if (guessDisp == disp) {
895 return &table[guess];
898 } else {
899 while (++guess <= maxEntry) {
900 guessDisp = table[guess].displacement();
901 MOZ_ASSERT(guessDisp <= disp);
902 if (guessDisp == disp) {
903 return &table[guess];
908 MOZ_CRASH("displacement not found.");
911 const OsiIndex* IonScript::getOsiIndex(uint32_t disp) const {
912 const OsiIndex* end = osiIndices() + numOsiIndices();
913 for (const OsiIndex* it = osiIndices(); it != end; ++it) {
914 if (it->returnPointDisplacement() == disp) {
915 return it;
919 MOZ_CRASH("Failed to find OSI point return address");
922 const OsiIndex* IonScript::getOsiIndex(uint8_t* retAddr) const {
923 JitSpew(JitSpew_IonInvalidate, "IonScript %p has method %p raw %p",
924 (void*)this, (void*)method(), method()->raw());
926 MOZ_ASSERT(containsCodeAddress(retAddr));
927 uint32_t disp = retAddr - method()->raw();
928 return getOsiIndex(disp);
931 void IonScript::Destroy(JS::GCContext* gcx, IonScript* script) {
932 // Make sure there are no pointers into the IonScript's nursery objects list
933 // in the store buffer. Because this can be called during sweeping when
934 // discarding JIT code, we have to lock the store buffer when we find an
935 // object that's (still) in the nursery.
936 mozilla::Maybe<gc::AutoLockStoreBuffer> lock;
937 for (size_t i = 0, len = script->numNurseryObjects(); i < len; i++) {
938 JSObject* obj = script->nurseryObjects()[i];
939 if (!IsInsideNursery(obj)) {
940 continue;
942 if (lock.isNothing()) {
943 lock.emplace(&gcx->runtime()->gc.storeBuffer());
945 script->nurseryObjects()[i] = HeapPtr<JSObject*>();
948 // This allocation is tracked by JSScript::setIonScriptImpl.
949 gcx->deleteUntracked(script);
952 void JS::DeletePolicy<js::jit::IonScript>::operator()(
953 const js::jit::IonScript* script) {
954 IonScript::Destroy(rt_->gcContext(), const_cast<IonScript*>(script));
957 void IonScript::purgeICs(Zone* zone) {
958 for (size_t i = 0; i < numICs(); i++) {
959 getICFromIndex(i).reset(zone, this);
963 namespace js {
964 namespace jit {
966 bool OptimizeMIR(MIRGenerator* mir) {
967 MIRGraph& graph = mir->graph();
968 GraphSpewer& gs = mir->graphSpewer();
970 if (mir->shouldCancel("Start")) {
971 return false;
974 gs.spewPass("BuildSSA");
975 AssertBasicGraphCoherency(graph);
977 if (JitSpewEnabled(JitSpew_MIRExpressions)) {
978 JitSpewCont(JitSpew_MIRExpressions, "\n");
979 DumpMIRExpressions(JitSpewPrinter(), graph, mir->outerInfo(),
980 "BuildSSA (== input to OptimizeMIR)");
983 if (!JitOptions.disablePruning && !mir->compilingWasm()) {
984 JitSpewCont(JitSpew_Prune, "\n");
985 if (!PruneUnusedBranches(mir, graph)) {
986 return false;
988 gs.spewPass("Prune Unused Branches");
989 AssertBasicGraphCoherency(graph);
991 if (mir->shouldCancel("Prune Unused Branches")) {
992 return false;
997 if (!FoldEmptyBlocks(graph)) {
998 return false;
1000 gs.spewPass("Fold Empty Blocks");
1001 AssertBasicGraphCoherency(graph);
1003 if (mir->shouldCancel("Fold Empty Blocks")) {
1004 return false;
1008 // Remove trivially dead resume point operands before folding tests, so the
1009 // latter pass can optimize more aggressively.
1010 if (!mir->compilingWasm()) {
1011 if (!EliminateTriviallyDeadResumePointOperands(mir, graph)) {
1012 return false;
1014 gs.spewPass("Eliminate trivially dead resume point operands");
1015 AssertBasicGraphCoherency(graph);
1017 if (mir->shouldCancel("Eliminate trivially dead resume point operands")) {
1018 return false;
1023 if (!FoldTests(graph)) {
1024 return false;
1026 gs.spewPass("Fold Tests");
1027 AssertBasicGraphCoherency(graph);
1029 if (mir->shouldCancel("Fold Tests")) {
1030 return false;
1035 if (!SplitCriticalEdges(graph)) {
1036 return false;
1038 gs.spewPass("Split Critical Edges");
1039 AssertGraphCoherency(graph);
1041 if (mir->shouldCancel("Split Critical Edges")) {
1042 return false;
1047 RenumberBlocks(graph);
1048 gs.spewPass("Renumber Blocks");
1049 AssertGraphCoherency(graph);
1051 if (mir->shouldCancel("Renumber Blocks")) {
1052 return false;
1057 if (!BuildDominatorTree(graph)) {
1058 return false;
1060 // No spew: graph not changed.
1062 if (mir->shouldCancel("Dominator Tree")) {
1063 return false;
1068 // Aggressive phi elimination must occur before any code elimination. If the
1069 // script contains a try-statement, we only compiled the try block and not
1070 // the catch or finally blocks, so in this case it's also invalid to use
1071 // aggressive phi elimination.
1072 Observability observability = graph.hasTryBlock()
1073 ? ConservativeObservability
1074 : AggressiveObservability;
1075 if (!EliminatePhis(mir, graph, observability)) {
1076 return false;
1078 gs.spewPass("Eliminate phis");
1079 AssertGraphCoherency(graph);
1081 if (mir->shouldCancel("Eliminate phis")) {
1082 return false;
1085 if (!BuildPhiReverseMapping(graph)) {
1086 return false;
1088 AssertExtendedGraphCoherency(graph);
1089 // No spew: graph not changed.
1091 if (mir->shouldCancel("Phi reverse mapping")) {
1092 return false;
1096 if (!mir->compilingWasm() && !JitOptions.disableIteratorIndices) {
1097 if (!OptimizeIteratorIndices(mir, graph)) {
1098 return false;
1100 gs.spewPass("Iterator Indices");
1101 AssertGraphCoherency(graph);
1103 if (mir->shouldCancel("Iterator Indices")) {
1104 return false;
1108 if (!JitOptions.disableRecoverIns &&
1109 mir->optimizationInfo().scalarReplacementEnabled()) {
1110 JitSpewCont(JitSpew_Escape, "\n");
1111 if (!ScalarReplacement(mir, graph)) {
1112 return false;
1114 gs.spewPass("Scalar Replacement");
1115 AssertGraphCoherency(graph);
1117 if (mir->shouldCancel("Scalar Replacement")) {
1118 return false;
1122 if (!mir->compilingWasm()) {
1123 if (!ApplyTypeInformation(mir, graph)) {
1124 return false;
1126 gs.spewPass("Apply types");
1127 AssertExtendedGraphCoherency(graph);
1129 if (mir->shouldCancel("Apply types")) {
1130 return false;
1134 if (mir->optimizationInfo().amaEnabled()) {
1135 AlignmentMaskAnalysis ama(graph);
1136 if (!ama.analyze()) {
1137 return false;
1139 gs.spewPass("Alignment Mask Analysis");
1140 AssertExtendedGraphCoherency(graph);
1142 if (mir->shouldCancel("Alignment Mask Analysis")) {
1143 return false;
1147 ValueNumberer gvn(mir, graph);
1149 // Alias analysis is required for LICM and GVN so that we don't move
1150 // loads across stores. We also use alias information when removing
1151 // redundant shapeguards.
1152 if (mir->optimizationInfo().licmEnabled() ||
1153 mir->optimizationInfo().gvnEnabled() ||
1154 mir->optimizationInfo().eliminateRedundantShapeGuardsEnabled()) {
1156 AliasAnalysis analysis(mir, graph);
1157 JitSpewCont(JitSpew_Alias, "\n");
1158 if (!analysis.analyze()) {
1159 return false;
1162 gs.spewPass("Alias analysis");
1163 AssertExtendedGraphCoherency(graph);
1165 if (mir->shouldCancel("Alias analysis")) {
1166 return false;
1170 if (!mir->compilingWasm()) {
1171 // Eliminating dead resume point operands requires basic block
1172 // instructions to be numbered. Reuse the numbering computed during
1173 // alias analysis.
1174 if (!EliminateDeadResumePointOperands(mir, graph)) {
1175 return false;
1178 gs.spewPass("Eliminate dead resume point operands");
1179 AssertExtendedGraphCoherency(graph);
1181 if (mir->shouldCancel("Eliminate dead resume point operands")) {
1182 return false;
1187 if (mir->optimizationInfo().gvnEnabled()) {
1188 JitSpewCont(JitSpew_GVN, "\n");
1189 if (!gvn.run(ValueNumberer::UpdateAliasAnalysis)) {
1190 return false;
1192 gs.spewPass("GVN");
1193 AssertExtendedGraphCoherency(graph);
1195 if (mir->shouldCancel("GVN")) {
1196 return false;
1200 // LICM can hoist instructions from conditional branches and
1201 // trigger bailouts. Disable it if bailing out of a hoisted
1202 // instruction has previously invalidated this script.
1203 if (mir->licmEnabled()) {
1204 JitSpewCont(JitSpew_LICM, "\n");
1205 if (!LICM(mir, graph)) {
1206 return false;
1208 gs.spewPass("LICM");
1209 AssertExtendedGraphCoherency(graph);
1211 if (mir->shouldCancel("LICM")) {
1212 return false;
1216 RangeAnalysis r(mir, graph);
1217 if (mir->optimizationInfo().rangeAnalysisEnabled()) {
1218 JitSpewCont(JitSpew_Range, "\n");
1219 if (!r.addBetaNodes()) {
1220 return false;
1222 gs.spewPass("Beta");
1223 AssertExtendedGraphCoherency(graph);
1225 if (mir->shouldCancel("RA Beta")) {
1226 return false;
1229 if (!r.analyze() || !r.addRangeAssertions()) {
1230 return false;
1232 gs.spewPass("Range Analysis");
1233 AssertExtendedGraphCoherency(graph);
1235 if (mir->shouldCancel("Range Analysis")) {
1236 return false;
1239 if (!r.removeBetaNodes()) {
1240 return false;
1242 gs.spewPass("De-Beta");
1243 AssertExtendedGraphCoherency(graph);
1245 if (mir->shouldCancel("RA De-Beta")) {
1246 return false;
1249 if (mir->optimizationInfo().gvnEnabled()) {
1250 bool shouldRunUCE = false;
1251 if (!r.prepareForUCE(&shouldRunUCE)) {
1252 return false;
1254 gs.spewPass("RA check UCE");
1255 AssertExtendedGraphCoherency(graph);
1257 if (mir->shouldCancel("RA check UCE")) {
1258 return false;
1261 if (shouldRunUCE) {
1262 if (!gvn.run(ValueNumberer::DontUpdateAliasAnalysis)) {
1263 return false;
1265 gs.spewPass("UCE After RA");
1266 AssertExtendedGraphCoherency(graph);
1268 if (mir->shouldCancel("UCE After RA")) {
1269 return false;
1274 if (mir->optimizationInfo().autoTruncateEnabled()) {
1275 if (!r.truncate()) {
1276 return false;
1278 gs.spewPass("Truncate Doubles");
1279 AssertExtendedGraphCoherency(graph);
1281 if (mir->shouldCancel("Truncate Doubles")) {
1282 return false;
1287 if (!JitOptions.disableRecoverIns) {
1288 JitSpewCont(JitSpew_Sink, "\n");
1289 if (!Sink(mir, graph)) {
1290 return false;
1292 gs.spewPass("Sink");
1293 AssertExtendedGraphCoherency(graph);
1295 if (mir->shouldCancel("Sink")) {
1296 return false;
1300 if (!JitOptions.disableRecoverIns &&
1301 mir->optimizationInfo().rangeAnalysisEnabled()) {
1302 JitSpewCont(JitSpew_Range, "\n");
1303 if (!r.removeUnnecessaryBitops()) {
1304 return false;
1306 gs.spewPass("Remove Unnecessary Bitops");
1307 AssertExtendedGraphCoherency(graph);
1309 if (mir->shouldCancel("Remove Unnecessary Bitops")) {
1310 return false;
1315 JitSpewCont(JitSpew_FLAC, "\n");
1316 if (!FoldLinearArithConstants(mir, graph)) {
1317 return false;
1319 gs.spewPass("Fold Linear Arithmetic Constants");
1320 AssertBasicGraphCoherency(graph);
1322 if (mir->shouldCancel("Fold Linear Arithmetic Constants")) {
1323 return false;
1327 if (mir->optimizationInfo().eaaEnabled()) {
1328 EffectiveAddressAnalysis eaa(mir, graph);
1329 JitSpewCont(JitSpew_EAA, "\n");
1330 if (!eaa.analyze()) {
1331 return false;
1333 gs.spewPass("Effective Address Analysis");
1334 AssertExtendedGraphCoherency(graph);
1336 if (mir->shouldCancel("Effective Address Analysis")) {
1337 return false;
1341 // BCE marks bounds checks as dead, so do BCE before DCE.
1342 if (mir->compilingWasm()) {
1343 JitSpewCont(JitSpew_WasmBCE, "\n");
1344 if (!EliminateBoundsChecks(mir, graph)) {
1345 return false;
1347 gs.spewPass("Redundant Bounds Check Elimination");
1348 AssertGraphCoherency(graph);
1350 if (mir->shouldCancel("BCE")) {
1351 return false;
1356 if (!EliminateDeadCode(mir, graph)) {
1357 return false;
1359 gs.spewPass("DCE");
1360 AssertExtendedGraphCoherency(graph);
1362 if (mir->shouldCancel("DCE")) {
1363 return false;
1367 if (mir->optimizationInfo().instructionReorderingEnabled() &&
1368 !mir->outerInfo().hadReorderingBailout()) {
1369 if (!ReorderInstructions(graph)) {
1370 return false;
1372 gs.spewPass("Reordering");
1374 AssertExtendedGraphCoherency(graph);
1376 if (mir->shouldCancel("Reordering")) {
1377 return false;
1381 // Make loops contiguous. We do this after GVN/UCE and range analysis,
1382 // which can remove CFG edges, exposing more blocks that can be moved.
1384 if (!MakeLoopsContiguous(graph)) {
1385 return false;
1387 gs.spewPass("Make loops contiguous");
1388 AssertExtendedGraphCoherency(graph);
1390 if (mir->shouldCancel("Make loops contiguous")) {
1391 return false;
1394 AssertExtendedGraphCoherency(graph, /* underValueNumberer = */ false,
1395 /* force = */ true);
1397 // Remove unreachable blocks created by MBasicBlock::NewFakeLoopPredecessor
1398 // to ensure every loop header has two predecessors. (This only happens due
1399 // to OSR.) After this point, it is no longer possible to build the
1400 // dominator tree.
1401 if (!mir->compilingWasm() && graph.osrBlock()) {
1402 graph.removeFakeLoopPredecessors();
1403 gs.spewPass("Remove fake loop predecessors");
1404 AssertGraphCoherency(graph);
1406 if (mir->shouldCancel("Remove fake loop predecessors")) {
1407 return false;
1411 // Passes after this point must not move instructions; these analyses
1412 // depend on knowing the final order in which instructions will execute.
1414 if (mir->optimizationInfo().edgeCaseAnalysisEnabled()) {
1415 EdgeCaseAnalysis edgeCaseAnalysis(mir, graph);
1416 if (!edgeCaseAnalysis.analyzeLate()) {
1417 return false;
1419 gs.spewPass("Edge Case Analysis (Late)");
1420 AssertGraphCoherency(graph);
1422 if (mir->shouldCancel("Edge Case Analysis (Late)")) {
1423 return false;
1427 if (mir->optimizationInfo().eliminateRedundantChecksEnabled()) {
1428 // Note: check elimination has to run after all other passes that move
1429 // instructions. Since check uses are replaced with the actual index,
1430 // code motion after this pass could incorrectly move a load or store
1431 // before its bounds check.
1432 if (!EliminateRedundantChecks(graph)) {
1433 return false;
1435 gs.spewPass("Bounds Check Elimination");
1436 AssertGraphCoherency(graph);
1439 if (mir->optimizationInfo().eliminateRedundantShapeGuardsEnabled()) {
1440 if (!EliminateRedundantShapeGuards(graph)) {
1441 return false;
1443 gs.spewPass("Shape Guard Elimination");
1444 AssertGraphCoherency(graph);
1447 // Run the GC Barrier Elimination pass after instruction reordering, to
1448 // ensure we don't move instructions that can trigger GC between stores we
1449 // optimize here.
1450 if (mir->optimizationInfo().eliminateRedundantGCBarriersEnabled()) {
1451 if (!EliminateRedundantGCBarriers(graph)) {
1452 return false;
1454 gs.spewPass("GC Barrier Elimination");
1455 AssertGraphCoherency(graph);
1458 if (!mir->compilingWasm() && !mir->outerInfo().hadUnboxFoldingBailout()) {
1459 if (!FoldLoadsWithUnbox(mir, graph)) {
1460 return false;
1462 gs.spewPass("FoldLoadsWithUnbox");
1463 AssertGraphCoherency(graph);
1466 if (!mir->compilingWasm()) {
1467 if (!AddKeepAliveInstructions(graph)) {
1468 return false;
1470 gs.spewPass("Add KeepAlive Instructions");
1471 AssertGraphCoherency(graph);
1474 AssertGraphCoherency(graph, /* force = */ true);
1476 if (JitSpewEnabled(JitSpew_MIRExpressions)) {
1477 JitSpewCont(JitSpew_MIRExpressions, "\n");
1478 DumpMIRExpressions(JitSpewPrinter(), graph, mir->outerInfo(),
1479 "BeforeLIR (== result of OptimizeMIR)");
1482 return true;
1485 LIRGraph* GenerateLIR(MIRGenerator* mir) {
1486 MIRGraph& graph = mir->graph();
1487 GraphSpewer& gs = mir->graphSpewer();
1489 LIRGraph* lir = mir->alloc().lifoAlloc()->new_<LIRGraph>(&graph);
1490 if (!lir || !lir->init()) {
1491 return nullptr;
1494 LIRGenerator lirgen(mir, graph, *lir);
1496 if (!lirgen.generate()) {
1497 return nullptr;
1499 gs.spewPass("Generate LIR");
1501 if (mir->shouldCancel("Generate LIR")) {
1502 return nullptr;
1506 #ifdef DEBUG
1507 AllocationIntegrityState integrity(*lir);
1508 #endif
1511 IonRegisterAllocator allocator =
1512 mir->optimizationInfo().registerAllocator();
1514 switch (allocator) {
1515 case RegisterAllocator_Backtracking:
1516 case RegisterAllocator_Testbed: {
1517 #ifdef DEBUG
1518 if (JitOptions.fullDebugChecks) {
1519 if (!integrity.record()) {
1520 return nullptr;
1523 #endif
1525 BacktrackingAllocator regalloc(mir, &lirgen, *lir,
1526 allocator == RegisterAllocator_Testbed);
1527 if (!regalloc.go()) {
1528 return nullptr;
1531 #ifdef DEBUG
1532 if (JitOptions.fullDebugChecks) {
1533 if (!integrity.check()) {
1534 return nullptr;
1537 #endif
1539 gs.spewPass("Allocate Registers [Backtracking]");
1540 break;
1543 default:
1544 MOZ_CRASH("Bad regalloc");
1547 if (mir->shouldCancel("Allocate Registers")) {
1548 return nullptr;
1552 return lir;
1555 CodeGenerator* GenerateCode(MIRGenerator* mir, LIRGraph* lir) {
1556 auto codegen = MakeUnique<CodeGenerator>(mir, lir);
1557 if (!codegen) {
1558 return nullptr;
1561 if (!codegen->generate()) {
1562 return nullptr;
1565 return codegen.release();
1568 CodeGenerator* CompileBackEnd(MIRGenerator* mir, WarpSnapshot* snapshot) {
1569 // Everything in CompileBackEnd can potentially run on a helper thread.
1570 AutoEnterIonBackend enter;
1571 AutoSpewEndFunction spewEndFunction(mir);
1574 WarpCompilation comp(mir->alloc());
1575 WarpBuilder builder(*snapshot, *mir, &comp);
1576 if (!builder.build()) {
1577 return nullptr;
1581 if (!OptimizeMIR(mir)) {
1582 return nullptr;
1585 LIRGraph* lir = GenerateLIR(mir);
1586 if (!lir) {
1587 return nullptr;
1590 return GenerateCode(mir, lir);
1593 static AbortReasonOr<WarpSnapshot*> CreateWarpSnapshot(JSContext* cx,
1594 MIRGenerator* mirGen,
1595 HandleScript script) {
1596 // Suppress GC during compilation.
1597 gc::AutoSuppressGC suppressGC(cx);
1599 SpewBeginFunction(mirGen, script);
1601 WarpOracle oracle(cx, *mirGen, script);
1603 AbortReasonOr<WarpSnapshot*> result = oracle.createSnapshot();
1605 MOZ_ASSERT_IF(result.isErr(), result.unwrapErr() == AbortReason::Alloc ||
1606 result.unwrapErr() == AbortReason::Error ||
1607 result.unwrapErr() == AbortReason::Disable);
1608 MOZ_ASSERT_IF(!result.isErr(), result.unwrap());
1610 return result;
1613 static AbortReason IonCompile(JSContext* cx, HandleScript script,
1614 jsbytecode* osrPc) {
1615 cx->check(script);
1617 auto alloc =
1618 cx->make_unique<LifoAlloc>(TempAllocator::PreferredLifoChunkSize);
1619 if (!alloc) {
1620 return AbortReason::Error;
1623 if (!cx->realm()->ensureJitRealmExists(cx)) {
1624 return AbortReason::Error;
1627 if (!cx->realm()->jitRealm()->ensureIonStubsExist(cx)) {
1628 return AbortReason::Error;
1631 TempAllocator* temp = alloc->new_<TempAllocator>(alloc.get());
1632 if (!temp) {
1633 return AbortReason::Alloc;
1636 MIRGraph* graph = alloc->new_<MIRGraph>(temp);
1637 if (!graph) {
1638 return AbortReason::Alloc;
1641 InlineScriptTree* inlineScriptTree =
1642 InlineScriptTree::New(temp, nullptr, nullptr, script);
1643 if (!inlineScriptTree) {
1644 return AbortReason::Alloc;
1647 CompileInfo* info = alloc->new_<CompileInfo>(
1648 CompileRuntime::get(cx->runtime()), script, script->function(), osrPc,
1649 script->needsArgsObj(), inlineScriptTree);
1650 if (!info) {
1651 return AbortReason::Alloc;
1654 const OptimizationInfo* optimizationInfo =
1655 IonOptimizations.get(OptimizationLevel::Normal);
1656 const JitCompileOptions options(cx);
1658 MIRGenerator* mirGen =
1659 alloc->new_<MIRGenerator>(CompileRealm::get(cx->realm()), options, temp,
1660 graph, info, optimizationInfo);
1661 if (!mirGen) {
1662 return AbortReason::Alloc;
1665 MOZ_ASSERT(!script->baselineScript()->hasPendingIonCompileTask());
1666 MOZ_ASSERT(!script->hasIonScript());
1667 MOZ_ASSERT(script->canIonCompile());
1669 if (osrPc) {
1670 script->jitScript()->setHadIonOSR();
1673 AbortReasonOr<WarpSnapshot*> result = CreateWarpSnapshot(cx, mirGen, script);
1674 if (result.isErr()) {
1675 return result.unwrapErr();
1677 WarpSnapshot* snapshot = result.unwrap();
1679 // If possible, compile the script off thread.
1680 if (options.offThreadCompilationAvailable()) {
1681 JitSpew(JitSpew_IonSyncLogs,
1682 "Can't log script %s:%u:%u"
1683 ". (Compiled on background thread.)",
1684 script->filename(), script->lineno(), script->column());
1686 IonCompileTask* task = alloc->new_<IonCompileTask>(cx, *mirGen, snapshot);
1687 if (!task) {
1688 return AbortReason::Alloc;
1691 AutoLockHelperThreadState lock;
1692 if (!StartOffThreadIonCompile(task, lock)) {
1693 JitSpew(JitSpew_IonAbort, "Unable to start off-thread ion compilation.");
1694 mirGen->graphSpewer().endFunction();
1695 return AbortReason::Alloc;
1698 script->jitScript()->setIsIonCompilingOffThread(script);
1700 // The allocator and associated data will be destroyed after being
1701 // processed in the finishedOffThreadCompilations list.
1702 (void)alloc.release();
1704 return AbortReason::NoAbort;
1707 bool succeeded = false;
1709 gc::AutoSuppressGC suppressGC(cx);
1710 JitContext jctx(cx);
1711 UniquePtr<CodeGenerator> codegen(CompileBackEnd(mirGen, snapshot));
1712 if (!codegen) {
1713 JitSpew(JitSpew_IonAbort, "Failed during back-end compilation.");
1714 if (cx->isExceptionPending()) {
1715 return AbortReason::Error;
1717 return AbortReason::Disable;
1720 succeeded = LinkCodeGen(cx, codegen.get(), script, snapshot);
1723 if (succeeded) {
1724 return AbortReason::NoAbort;
1726 if (cx->isExceptionPending()) {
1727 return AbortReason::Error;
1729 return AbortReason::Disable;
1732 static bool CheckFrame(JSContext* cx, BaselineFrame* frame) {
1733 MOZ_ASSERT(!frame->isDebuggerEvalFrame());
1734 MOZ_ASSERT(!frame->isEvalFrame());
1736 // This check is to not overrun the stack.
1737 if (frame->isFunctionFrame()) {
1738 if (TooManyActualArguments(frame->numActualArgs())) {
1739 JitSpew(JitSpew_IonAbort, "too many actual arguments");
1740 return false;
1743 if (TooManyFormalArguments(frame->numFormalArgs())) {
1744 JitSpew(JitSpew_IonAbort, "too many arguments");
1745 return false;
1749 return true;
1752 static bool CanIonCompileOrInlineScript(JSScript* script, const char** reason) {
1753 if (script->isForEval()) {
1754 // Eval frames are not yet supported. Supporting this will require new
1755 // logic in pushBailoutFrame to deal with linking prev.
1756 // Additionally, JSOp::GlobalOrEvalDeclInstantiation support will require
1757 // baking in isEvalFrame().
1758 *reason = "eval script";
1759 return false;
1762 if (script->isAsync()) {
1763 if (script->isModule()) {
1764 *reason = "async module";
1765 return false;
1769 if (script->hasNonSyntacticScope() && !script->function()) {
1770 // Support functions with a non-syntactic global scope but not other
1771 // scripts. For global scripts, WarpBuilder currently uses the global
1772 // object as scope chain, this is not valid when the script has a
1773 // non-syntactic global scope.
1774 *reason = "has non-syntactic global scope";
1775 return false;
1778 return true;
1779 } // namespace jit
1781 static bool ScriptIsTooLarge(JSContext* cx, JSScript* script) {
1782 if (!JitOptions.limitScriptSize) {
1783 return false;
1786 size_t numLocalsAndArgs = NumLocalsAndArgs(script);
1788 bool canCompileOffThread = OffThreadCompilationAvailable(cx);
1789 size_t maxScriptSize = canCompileOffThread
1790 ? JitOptions.ionMaxScriptSize
1791 : JitOptions.ionMaxScriptSizeMainThread;
1792 size_t maxLocalsAndArgs = canCompileOffThread
1793 ? JitOptions.ionMaxLocalsAndArgs
1794 : JitOptions.ionMaxLocalsAndArgsMainThread;
1796 if (script->length() > maxScriptSize || numLocalsAndArgs > maxLocalsAndArgs) {
1797 JitSpew(JitSpew_IonAbort,
1798 "Script too large (%zu bytes) (%zu locals/args) @ %s:%u:%u",
1799 script->length(), numLocalsAndArgs, script->filename(),
1800 script->lineno(), script->column());
1801 return true;
1804 return false;
1807 bool CanIonCompileScript(JSContext* cx, JSScript* script) {
1808 if (!script->canIonCompile()) {
1809 return false;
1812 const char* reason = nullptr;
1813 if (!CanIonCompileOrInlineScript(script, &reason)) {
1814 JitSpew(JitSpew_IonAbort, "%s", reason);
1815 return false;
1818 if (ScriptIsTooLarge(cx, script)) {
1819 return false;
1822 return true;
1825 bool CanIonInlineScript(JSScript* script) {
1826 if (!script->canIonCompile()) {
1827 return false;
1830 const char* reason = nullptr;
1831 if (!CanIonCompileOrInlineScript(script, &reason)) {
1832 JitSpew(JitSpew_Inlining, "Cannot Ion compile script (%s)", reason);
1833 return false;
1836 return true;
1839 static MethodStatus Compile(JSContext* cx, HandleScript script,
1840 BaselineFrame* osrFrame, jsbytecode* osrPc) {
1841 MOZ_ASSERT(jit::IsIonEnabled(cx));
1842 MOZ_ASSERT(jit::IsBaselineJitEnabled(cx));
1844 MOZ_ASSERT(script->hasBaselineScript());
1845 MOZ_ASSERT(!script->baselineScript()->hasPendingIonCompileTask());
1846 MOZ_ASSERT(!script->hasIonScript());
1848 AutoGeckoProfilerEntry pseudoFrame(
1849 cx, "Ion script compilation",
1850 JS::ProfilingCategoryPair::JS_IonCompilation);
1852 if (script->isDebuggee() || (osrFrame && osrFrame->isDebuggee())) {
1853 JitSpew(JitSpew_IonAbort, "debugging");
1854 return Method_Skipped;
1857 if (!CanIonCompileScript(cx, script)) {
1858 JitSpew(JitSpew_IonAbort, "Aborted compilation of %s:%u:%u",
1859 script->filename(), script->lineno(), script->column());
1860 return Method_CantCompile;
1863 OptimizationLevel optimizationLevel =
1864 IonOptimizations.levelForScript(script, osrPc);
1865 if (optimizationLevel == OptimizationLevel::DontCompile) {
1866 return Method_Skipped;
1869 MOZ_ASSERT(optimizationLevel == OptimizationLevel::Normal);
1871 if (!CanLikelyAllocateMoreExecutableMemory()) {
1872 script->resetWarmUpCounterToDelayIonCompilation();
1873 return Method_Skipped;
1876 MOZ_ASSERT(!script->hasIonScript());
1878 AbortReason reason = IonCompile(cx, script, osrPc);
1879 if (reason == AbortReason::Error) {
1880 MOZ_ASSERT(cx->isExceptionPending());
1881 return Method_Error;
1884 if (reason == AbortReason::Disable) {
1885 return Method_CantCompile;
1888 if (reason == AbortReason::Alloc) {
1889 ReportOutOfMemory(cx);
1890 return Method_Error;
1893 // Compilation succeeded or we invalidated right away or an inlining/alloc
1894 // abort
1895 if (script->hasIonScript()) {
1896 return Method_Compiled;
1898 return Method_Skipped;
1901 } // namespace jit
1902 } // namespace js
1904 bool jit::OffThreadCompilationAvailable(JSContext* cx) {
1905 // Even if off thread compilation is enabled, compilation must still occur
1906 // on the main thread in some cases.
1908 // Require cpuCount > 1 so that Ion compilation jobs and active-thread
1909 // execution are not competing for the same resources.
1910 return cx->runtime()->canUseOffthreadIonCompilation() &&
1911 GetHelperThreadCPUCount() > 1 && CanUseExtraThreads();
1914 MethodStatus jit::CanEnterIon(JSContext* cx, RunState& state) {
1915 MOZ_ASSERT(jit::IsIonEnabled(cx));
1917 HandleScript script = state.script();
1918 MOZ_ASSERT(!script->hasIonScript());
1920 // Skip if the script has been disabled.
1921 if (!script->canIonCompile()) {
1922 return Method_Skipped;
1925 // Skip if the script is being compiled off thread.
1926 if (script->isIonCompilingOffThread()) {
1927 return Method_Skipped;
1930 if (state.isInvoke()) {
1931 InvokeState& invoke = *state.asInvoke();
1933 if (TooManyActualArguments(invoke.args().length())) {
1934 JitSpew(JitSpew_IonAbort, "too many actual args");
1935 ForbidCompilation(cx, script);
1936 return Method_CantCompile;
1939 if (TooManyFormalArguments(
1940 invoke.args().callee().as<JSFunction>().nargs())) {
1941 JitSpew(JitSpew_IonAbort, "too many args");
1942 ForbidCompilation(cx, script);
1943 return Method_CantCompile;
1947 // If --ion-eager is used, compile with Baseline first, so that we
1948 // can directly enter IonMonkey.
1949 if (JitOptions.eagerIonCompilation() && !script->hasBaselineScript()) {
1950 MethodStatus status =
1951 CanEnterBaselineMethod<BaselineTier::Compiler>(cx, state);
1952 if (status != Method_Compiled) {
1953 return status;
1955 // Bytecode analysis may forbid compilation for a script.
1956 if (!script->canIonCompile()) {
1957 return Method_CantCompile;
1961 if (!script->hasBaselineScript()) {
1962 return Method_Skipped;
1965 MOZ_ASSERT(!script->isIonCompilingOffThread());
1966 MOZ_ASSERT(script->canIonCompile());
1968 // Attempt compilation. Returns Method_Compiled if already compiled.
1969 MethodStatus status = Compile(cx, script, /* osrFrame = */ nullptr,
1970 /* osrPc = */ nullptr);
1971 if (status != Method_Compiled) {
1972 if (status == Method_CantCompile) {
1973 ForbidCompilation(cx, script);
1975 return status;
1978 if (state.script()->baselineScript()->hasPendingIonCompileTask()) {
1979 LinkIonScript(cx, state.script());
1980 if (!state.script()->hasIonScript()) {
1981 return jit::Method_Skipped;
1985 return Method_Compiled;
1988 static MethodStatus BaselineCanEnterAtEntry(JSContext* cx, HandleScript script,
1989 BaselineFrame* frame) {
1990 MOZ_ASSERT(jit::IsIonEnabled(cx));
1991 MOZ_ASSERT(script->canIonCompile());
1992 MOZ_ASSERT(!script->isIonCompilingOffThread());
1993 MOZ_ASSERT(!script->hasIonScript());
1994 MOZ_ASSERT(frame->isFunctionFrame());
1996 // Mark as forbidden if frame can't be handled.
1997 if (!CheckFrame(cx, frame)) {
1998 ForbidCompilation(cx, script);
1999 return Method_CantCompile;
2002 if (script->baselineScript()->hasPendingIonCompileTask()) {
2003 LinkIonScript(cx, script);
2004 if (script->hasIonScript()) {
2005 return Method_Compiled;
2009 // Attempt compilation. Returns Method_Compiled if already compiled.
2010 MethodStatus status = Compile(cx, script, frame, nullptr);
2011 if (status != Method_Compiled) {
2012 if (status == Method_CantCompile) {
2013 ForbidCompilation(cx, script);
2015 return status;
2018 return Method_Compiled;
2021 // Decide if a transition from baseline execution to Ion code should occur.
2022 // May compile or recompile the target JSScript.
2023 static MethodStatus BaselineCanEnterAtBranch(JSContext* cx, HandleScript script,
2024 BaselineFrame* osrFrame,
2025 jsbytecode* pc) {
2026 MOZ_ASSERT(jit::IsIonEnabled(cx));
2027 MOZ_ASSERT((JSOp)*pc == JSOp::LoopHead);
2029 // Skip if the script has been disabled.
2030 if (!script->canIonCompile()) {
2031 return Method_Skipped;
2034 // Skip if the script is being compiled off thread.
2035 if (script->isIonCompilingOffThread()) {
2036 return Method_Skipped;
2039 // Optionally ignore on user request.
2040 if (!JitOptions.osr) {
2041 return Method_Skipped;
2044 // Mark as forbidden if frame can't be handled.
2045 if (!CheckFrame(cx, osrFrame)) {
2046 ForbidCompilation(cx, script);
2047 return Method_CantCompile;
2050 // Check if the jitcode still needs to get linked and do this
2051 // to have a valid IonScript.
2052 if (script->baselineScript()->hasPendingIonCompileTask()) {
2053 LinkIonScript(cx, script);
2056 // By default a recompilation doesn't happen on osr mismatch.
2057 // Decide if we want to force a recompilation if this happens too much.
2058 if (script->hasIonScript()) {
2059 if (pc == script->ionScript()->osrPc()) {
2060 return Method_Compiled;
2063 uint32_t count = script->ionScript()->incrOsrPcMismatchCounter();
2064 if (count <= JitOptions.osrPcMismatchesBeforeRecompile &&
2065 !JitOptions.eagerIonCompilation()) {
2066 return Method_Skipped;
2069 JitSpew(JitSpew_IonScripts, "Forcing OSR Mismatch Compilation");
2070 Invalidate(cx, script);
2073 // Attempt compilation.
2074 // - Returns Method_Compiled if the right ionscript is present
2075 // (Meaning it was present or a sequantial compile finished)
2076 // - Returns Method_Skipped if pc doesn't match
2077 // (This means a background thread compilation with that pc could have
2078 // started or not.)
2079 MethodStatus status = Compile(cx, script, osrFrame, pc);
2080 if (status != Method_Compiled) {
2081 if (status == Method_CantCompile) {
2082 ForbidCompilation(cx, script);
2084 return status;
2087 // Return the compilation was skipped when the osr pc wasn't adjusted.
2088 // This can happen when there was still an IonScript available and a
2089 // background compilation started, but hasn't finished yet.
2090 // Or when we didn't force a recompile.
2091 if (script->hasIonScript() && pc != script->ionScript()->osrPc()) {
2092 return Method_Skipped;
2095 return Method_Compiled;
2098 static bool IonCompileScriptForBaseline(JSContext* cx, BaselineFrame* frame,
2099 jsbytecode* pc) {
2100 MOZ_ASSERT(IsIonEnabled(cx));
2102 RootedScript script(cx, frame->script());
2103 bool isLoopHead = JSOp(*pc) == JSOp::LoopHead;
2105 // The Baseline JIT code checks for Ion disabled or compiling off-thread.
2106 MOZ_ASSERT(script->canIonCompile());
2107 MOZ_ASSERT(!script->isIonCompilingOffThread());
2109 // If Ion script exists, but PC is not at a loop entry, then Ion will be
2110 // entered for this script at an appropriate LOOPENTRY or the next time this
2111 // function is called.
2112 if (script->hasIonScript() && !isLoopHead) {
2113 JitSpew(JitSpew_BaselineOSR, "IonScript exists, but not at loop entry!");
2114 // TODO: ASSERT that a ion-script-already-exists checker stub doesn't exist.
2115 // TODO: Clear all optimized stubs.
2116 // TODO: Add a ion-script-already-exists checker stub.
2117 return true;
2120 // Ensure that Ion-compiled code is available.
2121 JitSpew(JitSpew_BaselineOSR,
2122 "WarmUpCounter for %s:%u:%u reached %d at pc %p, trying to switch to "
2123 "Ion!",
2124 script->filename(), script->lineno(), script->column(),
2125 (int)script->getWarmUpCount(), (void*)pc);
2127 MethodStatus stat;
2128 if (isLoopHead) {
2129 JitSpew(JitSpew_BaselineOSR, " Compile at loop head!");
2130 stat = BaselineCanEnterAtBranch(cx, script, frame, pc);
2131 } else if (frame->isFunctionFrame()) {
2132 JitSpew(JitSpew_BaselineOSR,
2133 " Compile function from top for later entry!");
2134 stat = BaselineCanEnterAtEntry(cx, script, frame);
2135 } else {
2136 return true;
2139 if (stat == Method_Error) {
2140 JitSpew(JitSpew_BaselineOSR, " Compile with Ion errored!");
2141 return false;
2144 if (stat == Method_CantCompile) {
2145 MOZ_ASSERT(!script->canIonCompile());
2146 JitSpew(JitSpew_BaselineOSR, " Can't compile with Ion!");
2147 } else if (stat == Method_Skipped) {
2148 JitSpew(JitSpew_BaselineOSR, " Skipped compile with Ion!");
2149 } else if (stat == Method_Compiled) {
2150 JitSpew(JitSpew_BaselineOSR, " Compiled with Ion!");
2151 } else {
2152 MOZ_CRASH("Invalid MethodStatus!");
2155 return true;
2158 bool jit::IonCompileScriptForBaselineAtEntry(JSContext* cx,
2159 BaselineFrame* frame) {
2160 JSScript* script = frame->script();
2161 return IonCompileScriptForBaseline(cx, frame, script->code());
2164 /* clang-format off */
2165 // The following data is kept in a temporary heap-allocated buffer, stored in
2166 // JitRuntime (high memory addresses at top, low at bottom):
2168 // +----->+=================================+ -- <---- High Address
2169 // | | | |
2170 // | | ...BaselineFrame... | |-- Copy of BaselineFrame + stack values
2171 // | | | |
2172 // | +---------------------------------+ |
2173 // | | | |
2174 // | | ...Locals/Stack... | |
2175 // | | | |
2176 // | +=================================+ --
2177 // | | Padding(Maybe Empty) |
2178 // | +=================================+ --
2179 // +------|-- baselineFrame | |-- IonOsrTempData
2180 // | jitcode | |
2181 // +=================================+ -- <---- Low Address
2183 // A pointer to the IonOsrTempData is returned.
2184 /* clang-format on */
2186 static IonOsrTempData* PrepareOsrTempData(JSContext* cx, BaselineFrame* frame,
2187 uint32_t frameSize, void* jitcode) {
2188 uint32_t numValueSlots = frame->numValueSlots(frameSize);
2190 // Calculate the amount of space to allocate:
2191 // BaselineFrame space:
2192 // (sizeof(Value) * numValueSlots)
2193 // + sizeof(BaselineFrame)
2195 // IonOsrTempData space:
2196 // sizeof(IonOsrTempData)
2198 size_t frameSpace = sizeof(BaselineFrame) + sizeof(Value) * numValueSlots;
2199 size_t ionOsrTempDataSpace = sizeof(IonOsrTempData);
2201 size_t totalSpace = AlignBytes(frameSpace, sizeof(Value)) +
2202 AlignBytes(ionOsrTempDataSpace, sizeof(Value));
2204 JitRuntime* jrt = cx->runtime()->jitRuntime();
2205 uint8_t* buf = jrt->allocateIonOsrTempData(totalSpace);
2206 if (!buf) {
2207 ReportOutOfMemory(cx);
2208 return nullptr;
2211 IonOsrTempData* info = new (buf) IonOsrTempData();
2212 info->jitcode = jitcode;
2214 // Copy the BaselineFrame + local/stack Values to the buffer. Arguments and
2215 // |this| are not copied but left on the stack: the Baseline and Ion frame
2216 // share the same frame prefix and Ion won't clobber these values. Note
2217 // that info->baselineFrame will point to the *end* of the frame data, like
2218 // the frame pointer register in baseline frames.
2219 uint8_t* frameStart =
2220 (uint8_t*)info + AlignBytes(ionOsrTempDataSpace, sizeof(Value));
2221 info->baselineFrame = frameStart + frameSpace;
2223 memcpy(frameStart, (uint8_t*)frame - numValueSlots * sizeof(Value),
2224 frameSpace);
2226 JitSpew(JitSpew_BaselineOSR, "Allocated IonOsrTempData at %p", info);
2227 JitSpew(JitSpew_BaselineOSR, "Jitcode is %p", info->jitcode);
2229 // All done.
2230 return info;
2233 bool jit::IonCompileScriptForBaselineOSR(JSContext* cx, BaselineFrame* frame,
2234 uint32_t frameSize, jsbytecode* pc,
2235 IonOsrTempData** infoPtr) {
2236 MOZ_ASSERT(infoPtr);
2237 *infoPtr = nullptr;
2239 MOZ_ASSERT(frame->debugFrameSize() == frameSize);
2240 MOZ_ASSERT(JSOp(*pc) == JSOp::LoopHead);
2242 if (!IonCompileScriptForBaseline(cx, frame, pc)) {
2243 return false;
2246 RootedScript script(cx, frame->script());
2247 if (!script->hasIonScript() || script->ionScript()->osrPc() != pc ||
2248 frame->isDebuggee()) {
2249 return true;
2252 IonScript* ion = script->ionScript();
2253 MOZ_ASSERT(cx->runtime()->geckoProfiler().enabled() ==
2254 ion->hasProfilingInstrumentation());
2255 MOZ_ASSERT(ion->osrPc() == pc);
2257 ion->resetOsrPcMismatchCounter();
2259 JitSpew(JitSpew_BaselineOSR, " OSR possible!");
2260 void* jitcode = ion->method()->raw() + ion->osrEntryOffset();
2262 // Prepare the temporary heap copy of the fake InterpreterFrame and actual
2263 // args list.
2264 JitSpew(JitSpew_BaselineOSR, "Got jitcode. Preparing for OSR into ion.");
2265 IonOsrTempData* info = PrepareOsrTempData(cx, frame, frameSize, jitcode);
2266 if (!info) {
2267 return false;
2270 *infoPtr = info;
2271 return true;
2274 static void InvalidateActivation(JS::GCContext* gcx,
2275 const JitActivationIterator& activations,
2276 bool invalidateAll) {
2277 JitSpew(JitSpew_IonInvalidate, "BEGIN invalidating activation");
2279 #ifdef CHECK_OSIPOINT_REGISTERS
2280 if (JitOptions.checkOsiPointRegisters) {
2281 activations->asJit()->setCheckRegs(false);
2283 #endif
2285 size_t frameno = 1;
2287 for (OnlyJSJitFrameIter iter(activations); !iter.done(); ++iter, ++frameno) {
2288 const JSJitFrameIter& frame = iter.frame();
2289 MOZ_ASSERT_IF(frameno == 1, frame.isExitFrame() ||
2290 frame.type() == FrameType::Bailout ||
2291 frame.type() == FrameType::JSJitToWasm);
2293 #ifdef JS_JITSPEW
2294 switch (frame.type()) {
2295 case FrameType::Exit:
2296 JitSpew(JitSpew_IonInvalidate, "#%zu exit frame @ %p", frameno,
2297 frame.fp());
2298 break;
2299 case FrameType::JSJitToWasm:
2300 JitSpew(JitSpew_IonInvalidate, "#%zu wasm exit frame @ %p", frameno,
2301 frame.fp());
2302 break;
2303 case FrameType::BaselineJS:
2304 case FrameType::IonJS:
2305 case FrameType::Bailout: {
2306 MOZ_ASSERT(frame.isScripted());
2307 const char* type = "Unknown";
2308 if (frame.isIonJS()) {
2309 type = "Optimized";
2310 } else if (frame.isBaselineJS()) {
2311 type = "Baseline";
2312 } else if (frame.isBailoutJS()) {
2313 type = "Bailing";
2315 JSScript* script = frame.maybeForwardedScript();
2316 JitSpew(JitSpew_IonInvalidate,
2317 "#%zu %s JS frame @ %p, %s:%u:%u (fun: %p, script: %p, pc %p)",
2318 frameno, type, frame.fp(), script->maybeForwardedFilename(),
2319 script->lineno(), script->column(), frame.maybeCallee(), script,
2320 frame.resumePCinCurrentFrame());
2321 break;
2323 case FrameType::BaselineStub:
2324 JitSpew(JitSpew_IonInvalidate, "#%zu baseline stub frame @ %p", frameno,
2325 frame.fp());
2326 break;
2327 case FrameType::BaselineInterpreterEntry:
2328 JitSpew(JitSpew_IonInvalidate,
2329 "#%zu baseline interpreter entry frame @ %p", frameno,
2330 frame.fp());
2331 break;
2332 case FrameType::Rectifier:
2333 JitSpew(JitSpew_IonInvalidate, "#%zu rectifier frame @ %p", frameno,
2334 frame.fp());
2335 break;
2336 case FrameType::IonICCall:
2337 JitSpew(JitSpew_IonInvalidate, "#%zu ion IC call frame @ %p", frameno,
2338 frame.fp());
2339 break;
2340 case FrameType::CppToJSJit:
2341 JitSpew(JitSpew_IonInvalidate, "#%zu entry frame @ %p", frameno,
2342 frame.fp());
2343 break;
2344 case FrameType::WasmToJSJit:
2345 JitSpew(JitSpew_IonInvalidate, "#%zu wasm frames @ %p", frameno,
2346 frame.fp());
2347 break;
2349 #endif // JS_JITSPEW
2351 if (!frame.isIonScripted()) {
2352 continue;
2355 // See if the frame has already been invalidated.
2356 if (frame.checkInvalidation()) {
2357 continue;
2360 JSScript* script = frame.maybeForwardedScript();
2361 if (!script->hasIonScript()) {
2362 continue;
2365 if (!invalidateAll && !script->ionScript()->invalidated()) {
2366 continue;
2369 IonScript* ionScript = script->ionScript();
2371 // Purge ICs before we mark this script as invalidated. This will
2372 // prevent lastJump_ from appearing to be a bogus pointer, just
2373 // in case anyone tries to read it.
2374 ionScript->purgeICs(script->zone());
2376 // This frame needs to be invalidated. We do the following:
2378 // 1. Increment the reference counter to keep the ionScript alive
2379 // for the invalidation bailout or for the exception handler.
2380 // 2. Determine safepoint that corresponds to the current call.
2381 // 3. From safepoint, get distance to the OSI-patchable offset.
2382 // 4. From the IonScript, determine the distance between the
2383 // call-patchable offset and the invalidation epilogue.
2384 // 5. Patch the OSI point with a call-relative to the
2385 // invalidation epilogue.
2387 // The code generator ensures that there's enough space for us
2388 // to patch in a call-relative operation at each invalidation
2389 // point.
2391 // Note: you can't simplify this mechanism to "just patch the
2392 // instruction immediately after the call" because things may
2393 // need to move into a well-defined register state (using move
2394 // instructions after the call) in to capture an appropriate
2395 // snapshot after the call occurs.
2397 ionScript->incrementInvalidationCount();
2399 JitCode* ionCode = ionScript->method();
2401 // We're about to remove edges from the JSScript to GC things embedded in
2402 // the JitCode. Perform a barrier to let the GC know about those edges.
2403 PreWriteBarrier(script->zone(), ionCode, [](JSTracer* trc, JitCode* code) {
2404 code->traceChildren(trc);
2407 ionCode->setInvalidated();
2409 // Don't adjust OSI points in a bailout path.
2410 if (frame.isBailoutJS()) {
2411 continue;
2414 // Write the delta (from the return address offset to the
2415 // IonScript pointer embedded into the invalidation epilogue)
2416 // where the safepointed call instruction used to be. We rely on
2417 // the call sequence causing the safepoint being >= the size of
2418 // a uint32, which is checked during safepoint index
2419 // construction.
2420 AutoWritableJitCode awjc(ionCode);
2421 const SafepointIndex* si =
2422 ionScript->getSafepointIndex(frame.resumePCinCurrentFrame());
2423 CodeLocationLabel dataLabelToMunge(frame.resumePCinCurrentFrame());
2424 ptrdiff_t delta = ionScript->invalidateEpilogueDataOffset() -
2425 (frame.resumePCinCurrentFrame() - ionCode->raw());
2426 Assembler::PatchWrite_Imm32(dataLabelToMunge, Imm32(delta));
2428 CodeLocationLabel osiPatchPoint =
2429 SafepointReader::InvalidationPatchPoint(ionScript, si);
2430 CodeLocationLabel invalidateEpilogue(
2431 ionCode, CodeOffset(ionScript->invalidateEpilogueOffset()));
2433 JitSpew(
2434 JitSpew_IonInvalidate,
2435 " ! Invalidate ionScript %p (inv count %zu) -> patching osipoint %p",
2436 ionScript, ionScript->invalidationCount(), (void*)osiPatchPoint.raw());
2437 Assembler::PatchWrite_NearCall(osiPatchPoint, invalidateEpilogue);
2440 JitSpew(JitSpew_IonInvalidate, "END invalidating activation");
2443 void jit::InvalidateAll(JS::GCContext* gcx, Zone* zone) {
2444 // The caller should previously have cancelled off thread compilation.
2445 #ifdef DEBUG
2446 for (RealmsInZoneIter realm(zone); !realm.done(); realm.next()) {
2447 MOZ_ASSERT(!HasOffThreadIonCompile(realm));
2449 #endif
2450 if (zone->isAtomsZone()) {
2451 return;
2453 JSContext* cx = TlsContext.get();
2454 for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
2455 if (iter->compartment()->zone() == zone) {
2456 JitSpew(JitSpew_IonInvalidate, "Invalidating all frames for GC");
2457 InvalidateActivation(gcx, iter, true);
2462 static void ClearIonScriptAfterInvalidation(JSContext* cx, JSScript* script,
2463 IonScript* ionScript,
2464 bool resetUses) {
2465 // Null out the JitScript's IonScript pointer. The caller is responsible for
2466 // destroying the IonScript using the invalidation count mechanism.
2467 DebugOnly<IonScript*> clearedIonScript =
2468 script->jitScript()->clearIonScript(cx->gcContext(), script);
2469 MOZ_ASSERT(clearedIonScript == ionScript);
2471 // Wait for the scripts to get warm again before doing another
2472 // compile, unless we are recompiling *because* a script got hot
2473 // (resetUses is false).
2474 if (resetUses) {
2475 script->resetWarmUpCounterToDelayIonCompilation();
2479 void jit::Invalidate(JSContext* cx, const RecompileInfoVector& invalid,
2480 bool resetUses, bool cancelOffThread) {
2481 JitSpew(JitSpew_IonInvalidate, "Start invalidation.");
2483 // Add an invalidation reference to all invalidated IonScripts to indicate
2484 // to the traversal which frames have been invalidated.
2485 size_t numInvalidations = 0;
2486 for (const RecompileInfo& info : invalid) {
2487 if (cancelOffThread) {
2488 CancelOffThreadIonCompile(info.script());
2491 IonScript* ionScript = info.maybeIonScriptToInvalidate();
2492 if (!ionScript) {
2493 continue;
2496 JitSpew(JitSpew_IonInvalidate, " Invalidate %s:%u:%u, IonScript %p",
2497 info.script()->filename(), info.script()->lineno(),
2498 info.script()->column(), ionScript);
2500 // Keep the ion script alive during the invalidation and flag this
2501 // ionScript as being invalidated. This increment is removed by the
2502 // loop after the calls to InvalidateActivation.
2503 ionScript->incrementInvalidationCount();
2504 numInvalidations++;
2507 if (!numInvalidations) {
2508 JitSpew(JitSpew_IonInvalidate, " No IonScript invalidation.");
2509 return;
2512 JS::GCContext* gcx = cx->gcContext();
2513 for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
2514 InvalidateActivation(gcx, iter, false);
2517 // Drop the references added above. If a script was never active, its
2518 // IonScript will be immediately destroyed. Otherwise, it will be held live
2519 // until its last invalidated frame is destroyed.
2520 for (const RecompileInfo& info : invalid) {
2521 IonScript* ionScript = info.maybeIonScriptToInvalidate();
2522 if (!ionScript) {
2523 continue;
2526 if (ionScript->invalidationCount() == 1) {
2527 // decrementInvalidationCount will destroy the IonScript so null out
2528 // jitScript->ionScript_ now. We don't want to do this unconditionally
2529 // because maybeIonScriptToInvalidate depends on script->ionScript() (we
2530 // would leak the IonScript if |invalid| contains duplicates).
2531 ClearIonScriptAfterInvalidation(cx, info.script(), ionScript, resetUses);
2534 ionScript->decrementInvalidationCount(gcx);
2535 numInvalidations--;
2538 // Make sure we didn't leak references by invalidating the same IonScript
2539 // multiple times in the above loop.
2540 MOZ_ASSERT(!numInvalidations);
2542 // Finally, null out jitScript->ionScript_ for IonScripts that are still on
2543 // the stack.
2544 for (const RecompileInfo& info : invalid) {
2545 if (IonScript* ionScript = info.maybeIonScriptToInvalidate()) {
2546 ClearIonScriptAfterInvalidation(cx, info.script(), ionScript, resetUses);
2551 void jit::IonScript::invalidate(JSContext* cx, JSScript* script, bool resetUses,
2552 const char* reason) {
2553 // Note: we could short circuit here if we already invalidated this
2554 // IonScript, but jit::Invalidate also cancels off-thread compilations of
2555 // |script|.
2556 MOZ_RELEASE_ASSERT(invalidated() || script->ionScript() == this);
2558 JitSpew(JitSpew_IonInvalidate, " Invalidate IonScript %p: %s", this, reason);
2560 // RecompileInfoVector has inline space for at least one element.
2561 RecompileInfoVector list;
2562 MOZ_RELEASE_ASSERT(list.reserve(1));
2563 list.infallibleEmplaceBack(script, compilationId());
2565 Invalidate(cx, list, resetUses, true);
2568 void jit::Invalidate(JSContext* cx, JSScript* script, bool resetUses,
2569 bool cancelOffThread) {
2570 MOZ_ASSERT(script->hasIonScript());
2572 if (cx->runtime()->geckoProfiler().enabled()) {
2573 // Register invalidation with profiler.
2574 // Format of event payload string:
2575 // "<filename>:<lineno>"
2577 // Get the script filename, if any, and its length.
2578 const char* filename = script->filename();
2579 if (filename == nullptr) {
2580 filename = "<unknown>";
2583 // Construct the descriptive string.
2584 UniqueChars buf =
2585 JS_smprintf("%s:%u:%u", filename, script->lineno(), script->column());
2587 // Ignore the event on allocation failure.
2588 if (buf) {
2589 cx->runtime()->geckoProfiler().markEvent("Invalidate", buf.get());
2593 // RecompileInfoVector has inline space for at least one element.
2594 RecompileInfoVector scripts;
2595 MOZ_ASSERT(script->hasIonScript());
2596 MOZ_RELEASE_ASSERT(scripts.reserve(1));
2597 scripts.infallibleEmplaceBack(script, script->ionScript()->compilationId());
2599 Invalidate(cx, scripts, resetUses, cancelOffThread);
2602 void jit::FinishInvalidation(JS::GCContext* gcx, JSScript* script) {
2603 if (!script->hasIonScript()) {
2604 return;
2607 // In all cases, null out jitScript->ionScript_ to avoid re-entry.
2608 IonScript* ion = script->jitScript()->clearIonScript(gcx, script);
2610 // If this script has Ion code on the stack, invalidated() will return
2611 // true. In this case we have to wait until destroying it.
2612 if (!ion->invalidated()) {
2613 jit::IonScript::Destroy(gcx, ion);
2617 void jit::ForbidCompilation(JSContext* cx, JSScript* script) {
2618 JitSpew(JitSpew_IonAbort, "Disabling Ion compilation of script %s:%u:%u",
2619 script->filename(), script->lineno(), script->column());
2621 CancelOffThreadIonCompile(script);
2623 if (script->hasIonScript()) {
2624 Invalidate(cx, script, false);
2627 script->disableIon();
2630 size_t jit::SizeOfIonData(JSScript* script,
2631 mozilla::MallocSizeOf mallocSizeOf) {
2632 size_t result = 0;
2634 if (script->hasIonScript()) {
2635 result += script->ionScript()->sizeOfIncludingThis(mallocSizeOf);
2638 return result;
2641 // If you change these, please also change the comment in TempAllocator.
2642 /* static */ const size_t TempAllocator::BallastSize = 16 * 1024;
2643 /* static */ const size_t TempAllocator::PreferredLifoChunkSize = 32 * 1024;