Bug 1885489 - Part 5: Add SnapshotIterator::readInt32(). r=iain
[gecko.git] / js / src / jit / Ion.cpp
blobe209ace846ca20cb535c085b09cf575b9dfda0a5
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/Ion.h"
9 #include "mozilla/CheckedInt.h"
10 #include "mozilla/DebugOnly.h"
11 #include "mozilla/IntegerPrintfMacros.h"
12 #include "mozilla/MemoryReporting.h"
13 #include "mozilla/ThreadLocal.h"
15 #include "gc/GCContext.h"
16 #include "gc/PublicIterators.h"
17 #include "jit/AliasAnalysis.h"
18 #include "jit/AlignmentMaskAnalysis.h"
19 #include "jit/AutoWritableJitCode.h"
20 #include "jit/BacktrackingAllocator.h"
21 #include "jit/BaselineFrame.h"
22 #include "jit/BaselineJIT.h"
23 #include "jit/CodeGenerator.h"
24 #include "jit/CompileInfo.h"
25 #include "jit/EdgeCaseAnalysis.h"
26 #include "jit/EffectiveAddressAnalysis.h"
27 #include "jit/ExecutableAllocator.h"
28 #include "jit/FoldLinearArithConstants.h"
29 #include "jit/InlineScriptTree.h"
30 #include "jit/InstructionReordering.h"
31 #include "jit/Invalidation.h"
32 #include "jit/IonAnalysis.h"
33 #include "jit/IonCompileTask.h"
34 #include "jit/IonIC.h"
35 #include "jit/IonOptimizationLevels.h"
36 #include "jit/IonScript.h"
37 #include "jit/JitcodeMap.h"
38 #include "jit/JitFrames.h"
39 #include "jit/JitRuntime.h"
40 #include "jit/JitSpewer.h"
41 #include "jit/JitZone.h"
42 #include "jit/LICM.h"
43 #include "jit/Linker.h"
44 #include "jit/LIR.h"
45 #include "jit/Lowering.h"
46 #include "jit/PerfSpewer.h"
47 #include "jit/RangeAnalysis.h"
48 #include "jit/ScalarReplacement.h"
49 #include "jit/ScriptFromCalleeToken.h"
50 #include "jit/Sink.h"
51 #include "jit/ValueNumbering.h"
52 #include "jit/WarpBuilder.h"
53 #include "jit/WarpOracle.h"
54 #include "jit/WasmBCE.h"
55 #include "js/Printf.h"
56 #include "js/UniquePtr.h"
57 #include "util/Memory.h"
58 #include "util/WindowsWrapper.h"
59 #include "vm/HelperThreads.h"
60 #include "vm/Realm.h"
61 #ifdef MOZ_VTUNE
62 # include "vtune/VTuneWrapper.h"
63 #endif
65 #include "gc/GC-inl.h"
66 #include "gc/StableCellHasher-inl.h"
67 #include "jit/InlineScriptTree-inl.h"
68 #include "jit/MacroAssembler-inl.h"
69 #include "jit/SafepointIndex-inl.h"
70 #include "vm/GeckoProfiler-inl.h"
71 #include "vm/JSContext-inl.h"
72 #include "vm/JSScript-inl.h"
73 #include "vm/Realm-inl.h"
75 #if defined(ANDROID)
76 # include <sys/system_properties.h>
77 #endif
79 using mozilla::CheckedInt;
80 using mozilla::DebugOnly;
82 using namespace js;
83 using namespace js::jit;
85 JitRuntime::~JitRuntime() {
86 MOZ_ASSERT(numFinishedOffThreadTasks_ == 0);
87 MOZ_ASSERT(ionLazyLinkListSize_ == 0);
88 MOZ_ASSERT(ionLazyLinkList_.ref().isEmpty());
90 MOZ_ASSERT(ionFreeTaskBatch_.ref().empty());
92 // By this point, the jitcode global table should be empty.
93 MOZ_ASSERT_IF(jitcodeGlobalTable_, jitcodeGlobalTable_->empty());
94 js_delete(jitcodeGlobalTable_.ref());
96 // interpreterEntryMap should be cleared out during finishRoots()
97 MOZ_ASSERT_IF(interpreterEntryMap_, interpreterEntryMap_->empty());
98 js_delete(interpreterEntryMap_.ref());
100 js_delete(jitHintsMap_.ref());
103 uint32_t JitRuntime::startTrampolineCode(MacroAssembler& masm) {
104 AutoCreatedBy acb(masm, "startTrampolineCode");
106 masm.assumeUnreachable("Shouldn't get here");
107 masm.flushBuffer();
108 masm.haltingAlign(CodeAlignment);
109 masm.setFramePushed(0);
110 return masm.currentOffset();
113 bool JitRuntime::initialize(JSContext* cx) {
114 MOZ_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
116 AutoAllocInAtomsZone az(cx);
117 JitContext jctx(cx);
119 if (!generateTrampolines(cx)) {
120 return false;
123 if (!generateBaselineICFallbackCode(cx)) {
124 return false;
127 jitcodeGlobalTable_ = cx->new_<JitcodeGlobalTable>();
128 if (!jitcodeGlobalTable_) {
129 return false;
132 if (!JitOptions.disableJitHints) {
133 jitHintsMap_ = cx->new_<JitHintsMap>();
134 if (!jitHintsMap_) {
135 return false;
139 if (JitOptions.emitInterpreterEntryTrampoline) {
140 interpreterEntryMap_ = cx->new_<EntryTrampolineMap>();
141 if (!interpreterEntryMap_) {
142 return false;
146 if (!GenerateBaselineInterpreter(cx, baselineInterpreter_)) {
147 return false;
150 // Initialize the jitCodeRaw of the Runtime's canonical SelfHostedLazyScript
151 // to point to the interpreter trampoline.
152 cx->runtime()->selfHostedLazyScript.ref().jitCodeRaw_ =
153 interpreterStub().value;
155 return true;
158 bool JitRuntime::generateTrampolines(JSContext* cx) {
159 TempAllocator temp(&cx->tempLifoAlloc());
160 StackMacroAssembler masm(cx, temp);
161 PerfSpewerRangeRecorder rangeRecorder(masm);
163 Label bailoutTail;
164 JitSpew(JitSpew_Codegen, "# Emitting bailout tail stub");
165 generateBailoutTailStub(masm, &bailoutTail);
167 JitSpew(JitSpew_Codegen, "# Emitting bailout handler");
168 generateBailoutHandler(masm, &bailoutTail);
169 rangeRecorder.recordOffset("Trampoline: Bailout");
171 JitSpew(JitSpew_Codegen, "# Emitting invalidator");
172 generateInvalidator(masm, &bailoutTail);
173 rangeRecorder.recordOffset("Trampoline: Invalidator");
175 // The arguments rectifier has to use the same frame layout as the function
176 // frames it rectifies.
177 static_assert(std::is_base_of_v<JitFrameLayout, RectifierFrameLayout>,
178 "a rectifier frame can be used with jit frame");
179 static_assert(std::is_base_of_v<JitFrameLayout, WasmToJSJitFrameLayout>,
180 "wasm frames simply are jit frames");
181 static_assert(sizeof(JitFrameLayout) == sizeof(WasmToJSJitFrameLayout),
182 "thus a rectifier frame can be used with a wasm frame");
184 JitSpew(JitSpew_Codegen, "# Emitting arguments rectifier");
185 generateArgumentsRectifier(masm, ArgumentsRectifierKind::Normal);
186 rangeRecorder.recordOffset("Trampoline: Arguments Rectifier");
188 JitSpew(JitSpew_Codegen, "# Emitting trial inlining arguments rectifier");
189 generateArgumentsRectifier(masm, ArgumentsRectifierKind::TrialInlining);
190 rangeRecorder.recordOffset(
191 "Trampoline: Arguments Rectifier (Trial Inlining)");
193 JitSpew(JitSpew_Codegen, "# Emitting EnterJIT sequence");
194 generateEnterJIT(cx, masm);
195 rangeRecorder.recordOffset("Trampoline: EnterJIT");
197 JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Value");
198 valuePreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Value);
199 rangeRecorder.recordOffset("Trampoline: PreBarrier Value");
201 JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for String");
202 stringPreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::String);
203 rangeRecorder.recordOffset("Trampoline: PreBarrier String");
205 JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Object");
206 objectPreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Object);
207 rangeRecorder.recordOffset("Trampoline: PreBarrier Object");
209 JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Shape");
210 shapePreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Shape);
211 rangeRecorder.recordOffset("Trampoline: PreBarrier Shape");
213 JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for WasmAnyRef");
214 wasmAnyRefPreBarrierOffset_ =
215 generatePreBarrier(cx, masm, MIRType::WasmAnyRef);
216 rangeRecorder.recordOffset("Trampoline: PreBarrier WasmAnyRef");
218 JitSpew(JitSpew_Codegen, "# Emitting free stub");
219 generateFreeStub(masm);
220 rangeRecorder.recordOffset("Trampoline: FreeStub");
222 JitSpew(JitSpew_Codegen, "# Emitting lazy link stub");
223 generateLazyLinkStub(masm);
224 rangeRecorder.recordOffset("Trampoline: LazyLinkStub");
226 JitSpew(JitSpew_Codegen, "# Emitting interpreter stub");
227 generateInterpreterStub(masm);
228 rangeRecorder.recordOffset("Trampoline: Interpreter");
230 JitSpew(JitSpew_Codegen, "# Emitting double-to-int32-value stub");
231 generateDoubleToInt32ValueStub(masm);
232 rangeRecorder.recordOffset("Trampoline: DoubleToInt32ValueStub");
234 JitSpew(JitSpew_Codegen, "# Emitting VM function wrappers");
235 if (!generateVMWrappers(cx, masm, rangeRecorder)) {
236 return false;
239 JitSpew(JitSpew_Codegen, "# Emitting profiler exit frame tail stub");
240 Label profilerExitTail;
241 generateProfilerExitFrameTailStub(masm, &profilerExitTail);
242 rangeRecorder.recordOffset("Trampoline: ProfilerExitFrameTailStub");
244 JitSpew(JitSpew_Codegen, "# Emitting exception tail stub");
245 generateExceptionTailStub(masm, &profilerExitTail, &bailoutTail);
246 rangeRecorder.recordOffset("Trampoline: ExceptionTailStub");
248 JitSpew(JitSpew_Codegen, "# Emitting Ion generic call stub");
249 generateIonGenericCallStub(masm, IonGenericCallKind::Call);
250 rangeRecorder.recordOffset("Trampoline: IonGenericCall");
252 JitSpew(JitSpew_Codegen, "# Emitting Ion generic construct stub");
253 generateIonGenericCallStub(masm, IonGenericCallKind::Construct);
254 rangeRecorder.recordOffset("Trampoline: IonGenericConstruct");
256 JitSpew(JitSpew_Codegen, "# Emitting trampoline natives");
257 TrampolineNativeJitEntryOffsets nativeOffsets;
258 generateTrampolineNatives(masm, nativeOffsets, rangeRecorder);
260 Linker linker(masm);
261 trampolineCode_ = linker.newCode(cx, CodeKind::Other);
262 if (!trampolineCode_) {
263 return false;
266 rangeRecorder.collectRangesForJitCode(trampolineCode_);
267 #ifdef MOZ_VTUNE
268 vtune::MarkStub(trampolineCode_, "Trampolines");
269 #endif
271 // Initialize TrampolineNative JitEntry array.
272 for (size_t i = 0; i < size_t(TrampolineNative::Count); i++) {
273 TrampolineNative native = TrampolineNative(i);
274 uint32_t offset = nativeOffsets[native];
275 MOZ_ASSERT(offset > 0 && offset < trampolineCode_->instructionsSize());
276 trampolineNativeJitEntries_[native] = trampolineCode_->raw() + offset;
279 return true;
282 JitCode* JitRuntime::debugTrapHandler(JSContext* cx,
283 DebugTrapHandlerKind kind) {
284 if (!debugTrapHandlers_[kind]) {
285 // JitRuntime code stubs are shared across compartments and have to
286 // be allocated in the atoms zone.
287 mozilla::Maybe<AutoAllocInAtomsZone> az;
288 if (!cx->zone()->isAtomsZone()) {
289 az.emplace(cx);
291 debugTrapHandlers_[kind] = generateDebugTrapHandler(cx, kind);
293 return debugTrapHandlers_[kind];
296 JitRuntime::IonCompileTaskList& JitRuntime::ionLazyLinkList(JSRuntime* rt) {
297 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
298 "Should only be mutated by the main thread.");
299 return ionLazyLinkList_.ref();
302 void JitRuntime::ionLazyLinkListRemove(JSRuntime* rt,
303 jit::IonCompileTask* task) {
304 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
305 "Should only be mutated by the main thread.");
306 MOZ_ASSERT(rt == task->script()->runtimeFromMainThread());
307 MOZ_ASSERT(ionLazyLinkListSize_ > 0);
309 task->removeFrom(ionLazyLinkList(rt));
310 ionLazyLinkListSize_--;
312 MOZ_ASSERT(ionLazyLinkList(rt).isEmpty() == (ionLazyLinkListSize_ == 0));
315 void JitRuntime::ionLazyLinkListAdd(JSRuntime* rt, jit::IonCompileTask* task) {
316 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
317 "Should only be mutated by the main thread.");
318 MOZ_ASSERT(rt == task->script()->runtimeFromMainThread());
319 ionLazyLinkList(rt).insertFront(task);
320 ionLazyLinkListSize_++;
323 uint8_t* JitRuntime::allocateIonOsrTempData(size_t size) {
324 MOZ_ASSERT(size > 0);
326 uint8_t* prevBuffer = ionOsrTempData_.ref().get();
327 size_t prevSize = ionOsrTempDataSize_.ref();
328 MOZ_ASSERT((prevSize > 0) == !!prevBuffer);
330 // Reuse the previous buffer if possible.
331 if (prevSize >= size) {
332 return prevBuffer;
335 // Allocate or resize the buffer.
336 uint8_t* buffer = js_pod_realloc<uint8_t>(prevBuffer, prevSize, size);
337 if (!buffer) {
338 // ionOsrTempData_ is still valid.
339 return nullptr;
341 // ionOsrTempData_ is no longer valid.
342 (void)ionOsrTempData_.ref().release();
343 ionOsrTempData_.ref().reset(buffer);
344 ionOsrTempDataSize_ = size;
345 return buffer;
348 void JitRuntime::freeIonOsrTempData() {
349 ionOsrTempData_.ref().reset();
350 ionOsrTempDataSize_ = 0;
353 template <typename T>
354 static T PopNextBitmaskValue(uint32_t* bitmask) {
355 MOZ_ASSERT(*bitmask);
356 uint32_t index = mozilla::CountTrailingZeroes32(*bitmask);
357 *bitmask ^= 1 << index;
359 MOZ_ASSERT(index < uint32_t(T::Count));
360 return T(index);
363 void JitZone::performStubReadBarriers(uint32_t stubsToBarrier) const {
364 while (stubsToBarrier) {
365 auto stub = PopNextBitmaskValue<StubIndex>(&stubsToBarrier);
366 const WeakHeapPtr<JitCode*>& jitCode = stubs_[stub];
367 MOZ_ASSERT(jitCode);
368 jitCode.get();
372 static bool LinkCodeGen(JSContext* cx, CodeGenerator* codegen,
373 HandleScript script, const WarpSnapshot* snapshot) {
374 if (!codegen->link(cx, snapshot)) {
375 return false;
378 return true;
381 static bool LinkBackgroundCodeGen(JSContext* cx, IonCompileTask* task) {
382 CodeGenerator* codegen = task->backgroundCodegen();
383 if (!codegen) {
384 return false;
387 JitContext jctx(cx);
388 RootedScript script(cx, task->script());
389 return LinkCodeGen(cx, codegen, script, task->snapshot());
392 void jit::LinkIonScript(JSContext* cx, HandleScript calleeScript) {
393 // Get the pending IonCompileTask from the script.
394 MOZ_ASSERT(calleeScript->hasBaselineScript());
395 IonCompileTask* task =
396 calleeScript->baselineScript()->pendingIonCompileTask();
397 calleeScript->baselineScript()->removePendingIonCompileTask(cx->runtime(),
398 calleeScript);
400 // Remove from pending.
401 cx->runtime()->jitRuntime()->ionLazyLinkListRemove(cx->runtime(), task);
404 gc::AutoSuppressGC suppressGC(cx);
405 if (!LinkBackgroundCodeGen(cx, task)) {
406 // Silently ignore OOM during code generation. The assembly code
407 // doesn't have code to handle it after linking happened. So it's
408 // not OK to throw a catchable exception from there.
409 cx->clearPendingException();
413 AutoStartIonFreeTask freeTask(cx->runtime()->jitRuntime());
414 FinishOffThreadTask(cx->runtime(), freeTask, task);
417 uint8_t* jit::LazyLinkTopActivation(JSContext* cx,
418 LazyLinkExitFrameLayout* frame) {
419 RootedScript calleeScript(
420 cx, ScriptFromCalleeToken(frame->jsFrame()->calleeToken()));
422 LinkIonScript(cx, calleeScript);
424 MOZ_ASSERT(calleeScript->hasBaselineScript());
425 MOZ_ASSERT(calleeScript->jitCodeRaw());
427 return calleeScript->jitCodeRaw();
430 /* static */
431 void JitRuntime::TraceAtomZoneRoots(JSTracer* trc) {
432 MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
434 // Shared stubs are allocated in the atoms zone, so do not iterate
435 // them after the atoms heap after it has been "finished."
436 if (trc->runtime()->atomsAreFinished()) {
437 return;
440 Zone* zone = trc->runtime()->atomsZone();
441 for (auto i = zone->cellIterUnsafe<JitCode>(); !i.done(); i.next()) {
442 JitCode* code = i;
443 TraceRoot(trc, &code, "wrapper");
447 /* static */
448 bool JitRuntime::MarkJitcodeGlobalTableIteratively(GCMarker* marker) {
449 if (marker->runtime()->hasJitRuntime() &&
450 marker->runtime()->jitRuntime()->hasJitcodeGlobalTable()) {
451 return marker->runtime()
452 ->jitRuntime()
453 ->getJitcodeGlobalTable()
454 ->markIteratively(marker);
456 return false;
459 /* static */
460 void JitRuntime::TraceWeakJitcodeGlobalTable(JSRuntime* rt, JSTracer* trc) {
461 if (rt->hasJitRuntime() && rt->jitRuntime()->hasJitcodeGlobalTable()) {
462 rt->jitRuntime()->getJitcodeGlobalTable()->traceWeak(rt, trc);
466 bool JitZone::addInlinedCompilation(const RecompileInfo& info,
467 JSScript* inlined) {
468 MOZ_ASSERT(inlined != info.script());
470 auto p = inlinedCompilations_.lookupForAdd(inlined);
471 if (p) {
472 auto& compilations = p->value();
473 if (!compilations.empty() && compilations.back() == info) {
474 return true;
476 return compilations.append(info);
479 RecompileInfoVector compilations;
480 if (!compilations.append(info)) {
481 return false;
483 return inlinedCompilations_.add(p, inlined, std::move(compilations));
486 void jit::AddPendingInvalidation(RecompileInfoVector& invalid,
487 JSScript* script) {
488 MOZ_ASSERT(script);
490 CancelOffThreadIonCompile(script);
492 // Let the script warm up again before attempting another compile.
493 script->resetWarmUpCounterToDelayIonCompilation();
495 JitScript* jitScript = script->maybeJitScript();
496 if (!jitScript) {
497 return;
500 auto addPendingInvalidation = [&invalid](const RecompileInfo& info) {
501 AutoEnterOOMUnsafeRegion oomUnsafe;
502 if (!invalid.append(info)) {
503 // BUG 1536159: For diagnostics, compute the size of the failed
504 // allocation. This presumes the vector growth strategy is to double. This
505 // is only used for crash reporting so not a problem if we get it wrong.
506 size_t allocSize = 2 * sizeof(RecompileInfo) * invalid.capacity();
507 oomUnsafe.crash(allocSize, "Could not update RecompileInfoVector");
511 // Trigger invalidation of the IonScript.
512 if (jitScript->hasIonScript()) {
513 RecompileInfo info(script, jitScript->ionScript()->compilationId());
514 addPendingInvalidation(info);
517 // Trigger invalidation of any callers inlining this script.
518 auto* inlinedCompilations =
519 script->zone()->jitZone()->maybeInlinedCompilations(script);
520 if (inlinedCompilations) {
521 for (const RecompileInfo& info : *inlinedCompilations) {
522 addPendingInvalidation(info);
524 script->zone()->jitZone()->removeInlinedCompilations(script);
528 IonScript* RecompileInfo::maybeIonScriptToInvalidate() const {
529 // Make sure this is not called under CodeGenerator::link (before the
530 // IonScript is created).
531 MOZ_ASSERT_IF(
532 script_->zone()->jitZone()->currentCompilationId(),
533 script_->zone()->jitZone()->currentCompilationId().ref() != id_);
535 if (!script_->hasIonScript() ||
536 script_->ionScript()->compilationId() != id_) {
537 return nullptr;
540 return script_->ionScript();
543 bool RecompileInfo::traceWeak(JSTracer* trc) {
544 // Sweep the RecompileInfo if either the script is dead or the IonScript has
545 // been invalidated.
547 if (!TraceManuallyBarrieredWeakEdge(trc, &script_, "RecompileInfo::script")) {
548 return false;
551 return maybeIonScriptToInvalidate() != nullptr;
554 void JitZone::traceWeak(JSTracer* trc, Zone* zone) {
555 MOZ_ASSERT(this == zone->jitZone());
557 // Any outstanding compilations should have been cancelled by the GC.
558 MOZ_ASSERT(!HasOffThreadIonCompile(zone));
560 for (WeakHeapPtr<JitCode*>& stub : stubs_) {
561 TraceWeakEdge(trc, &stub, "JitZone::stubs_");
564 baselineCacheIRStubCodes_.traceWeak(trc);
565 inlinedCompilations_.traceWeak(trc);
567 TraceWeakEdge(trc, &lastStubFoldingBailoutChild_,
568 "JitZone::lastStubFoldingBailoutChild_");
569 TraceWeakEdge(trc, &lastStubFoldingBailoutParent_,
570 "JitZone::lastStubFoldingBailoutParent_");
573 void JitZone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
574 JS::CodeSizes* code, size_t* jitZone,
575 size_t* cacheIRStubs) const {
576 *jitZone += mallocSizeOf(this);
577 *jitZone +=
578 baselineCacheIRStubCodes_.shallowSizeOfExcludingThis(mallocSizeOf);
579 *jitZone += ionCacheIRStubInfoSet_.shallowSizeOfExcludingThis(mallocSizeOf);
581 execAlloc().addSizeOfCode(code);
583 *cacheIRStubs += stubSpace_.sizeOfExcludingThis(mallocSizeOf);
586 void JitCodeHeader::init(JitCode* jitCode) {
587 // As long as JitCode isn't moveable, we can avoid tracing this and
588 // mutating executable data.
589 MOZ_ASSERT(!gc::IsMovableKind(gc::AllocKind::JITCODE));
590 jitCode_ = jitCode;
593 template <AllowGC allowGC>
594 JitCode* JitCode::New(JSContext* cx, uint8_t* code, uint32_t totalSize,
595 uint32_t headerSize, ExecutablePool* pool,
596 CodeKind kind) {
597 uint32_t bufferSize = totalSize - headerSize;
598 JitCode* codeObj =
599 cx->newCell<JitCode, allowGC>(code, bufferSize, headerSize, pool, kind);
600 if (!codeObj) {
601 // The caller already allocated `totalSize` bytes of executable memory.
602 pool->release(totalSize, kind);
603 return nullptr;
606 cx->zone()->incJitMemory(totalSize);
608 return codeObj;
611 template JitCode* JitCode::New<CanGC>(JSContext* cx, uint8_t* code,
612 uint32_t bufferSize, uint32_t headerSize,
613 ExecutablePool* pool, CodeKind kind);
615 template JitCode* JitCode::New<NoGC>(JSContext* cx, uint8_t* code,
616 uint32_t bufferSize, uint32_t headerSize,
617 ExecutablePool* pool, CodeKind kind);
619 void JitCode::copyFrom(MacroAssembler& masm) {
620 // Store the JitCode pointer in the JitCodeHeader so we can recover the
621 // gcthing from relocation tables.
622 JitCodeHeader::FromExecutable(raw())->init(this);
624 insnSize_ = masm.instructionsSize();
625 masm.executableCopy(raw());
627 jumpRelocTableBytes_ = masm.jumpRelocationTableBytes();
628 masm.copyJumpRelocationTable(raw() + jumpRelocTableOffset());
630 dataRelocTableBytes_ = masm.dataRelocationTableBytes();
631 masm.copyDataRelocationTable(raw() + dataRelocTableOffset());
633 masm.processCodeLabels(raw());
636 void JitCode::traceChildren(JSTracer* trc) {
637 // Note that we cannot mark invalidated scripts, since we've basically
638 // corrupted the code stream by injecting bailouts.
639 if (invalidated()) {
640 return;
643 if (jumpRelocTableBytes_) {
644 uint8_t* start = raw() + jumpRelocTableOffset();
645 CompactBufferReader reader(start, start + jumpRelocTableBytes_);
646 MacroAssembler::TraceJumpRelocations(trc, this, reader);
648 if (dataRelocTableBytes_) {
649 uint8_t* start = raw() + dataRelocTableOffset();
650 CompactBufferReader reader(start, start + dataRelocTableBytes_);
651 MacroAssembler::TraceDataRelocations(trc, this, reader);
655 void JitCode::finalize(JS::GCContext* gcx) {
656 // If this jitcode had a bytecode map, it must have already been removed.
657 #ifdef DEBUG
658 JSRuntime* rt = gcx->runtime();
659 if (hasBytecodeMap_) {
660 MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
661 MOZ_ASSERT(!rt->jitRuntime()->getJitcodeGlobalTable()->lookup(raw()));
663 #endif
665 #ifdef MOZ_VTUNE
666 vtune::UnmarkCode(this);
667 #endif
669 MOZ_ASSERT(pool_);
671 // With W^X JIT code, reprotecting memory for each JitCode instance is
672 // slow, so we record the ranges and poison them later all at once. It's
673 // safe to ignore OOM here, it just means we won't poison the code.
674 if (gcx->appendJitPoisonRange(JitPoisonRange(pool_, raw() - headerSize_,
675 headerSize_ + bufferSize_))) {
676 pool_->addRef();
678 setHeaderPtr(nullptr);
680 #ifdef JS_ION_PERF
681 // Code buffers are stored inside ExecutablePools. Pools are refcounted.
682 // Releasing the pool may free it. Horrible hack: if we are using perf
683 // integration, we don't want to reuse code addresses, so we just leak the
684 // memory instead.
685 if (!PerfEnabled()) {
686 pool_->release(headerSize_ + bufferSize_, CodeKind(kind_));
688 #else
689 pool_->release(headerSize_ + bufferSize_, CodeKind(kind_));
690 #endif
692 zone()->decJitMemory(headerSize_ + bufferSize_);
694 pool_ = nullptr;
697 IonScript::IonScript(IonCompilationId compilationId, uint32_t localSlotsSize,
698 uint32_t argumentSlotsSize, uint32_t frameSize)
699 : localSlotsSize_(localSlotsSize),
700 argumentSlotsSize_(argumentSlotsSize),
701 frameSize_(frameSize),
702 compilationId_(compilationId) {}
704 IonScript* IonScript::New(JSContext* cx, IonCompilationId compilationId,
705 uint32_t localSlotsSize, uint32_t argumentSlotsSize,
706 uint32_t frameSize, size_t snapshotsListSize,
707 size_t snapshotsRVATableSize, size_t recoversSize,
708 size_t constants, size_t nurseryObjects,
709 size_t safepointIndices, size_t osiIndices,
710 size_t icEntries, size_t runtimeSize,
711 size_t safepointsSize) {
712 if (snapshotsListSize >= MAX_BUFFER_SIZE) {
713 ReportOutOfMemory(cx);
714 return nullptr;
717 // Verify the hardcoded sizes in header are accurate.
718 static_assert(SizeOf_OsiIndex == sizeof(OsiIndex),
719 "IonScript has wrong size for OsiIndex");
720 static_assert(SizeOf_SafepointIndex == sizeof(SafepointIndex),
721 "IonScript has wrong size for SafepointIndex");
723 CheckedInt<Offset> allocSize = sizeof(IonScript);
724 allocSize += CheckedInt<Offset>(constants) * sizeof(Value);
725 allocSize += CheckedInt<Offset>(runtimeSize);
726 allocSize += CheckedInt<Offset>(nurseryObjects) * sizeof(HeapPtr<JSObject*>);
727 allocSize += CheckedInt<Offset>(osiIndices) * sizeof(OsiIndex);
728 allocSize += CheckedInt<Offset>(safepointIndices) * sizeof(SafepointIndex);
729 allocSize += CheckedInt<Offset>(icEntries) * sizeof(uint32_t);
730 allocSize += CheckedInt<Offset>(safepointsSize);
731 allocSize += CheckedInt<Offset>(snapshotsListSize);
732 allocSize += CheckedInt<Offset>(snapshotsRVATableSize);
733 allocSize += CheckedInt<Offset>(recoversSize);
735 if (!allocSize.isValid()) {
736 ReportAllocationOverflow(cx);
737 return nullptr;
740 void* raw = cx->pod_malloc<uint8_t>(allocSize.value());
741 MOZ_ASSERT(uintptr_t(raw) % alignof(IonScript) == 0);
742 if (!raw) {
743 return nullptr;
745 IonScript* script = new (raw)
746 IonScript(compilationId, localSlotsSize, argumentSlotsSize, frameSize);
748 Offset offsetCursor = sizeof(IonScript);
750 MOZ_ASSERT(offsetCursor % alignof(Value) == 0);
751 script->constantTableOffset_ = offsetCursor;
752 offsetCursor += constants * sizeof(Value);
754 MOZ_ASSERT(offsetCursor % alignof(uint64_t) == 0);
755 script->runtimeDataOffset_ = offsetCursor;
756 offsetCursor += runtimeSize;
758 MOZ_ASSERT(offsetCursor % alignof(HeapPtr<JSObject*>) == 0);
759 script->initElements<HeapPtr<JSObject*>>(offsetCursor, nurseryObjects);
760 script->nurseryObjectsOffset_ = offsetCursor;
761 offsetCursor += nurseryObjects * sizeof(HeapPtr<JSObject*>);
763 MOZ_ASSERT(offsetCursor % alignof(OsiIndex) == 0);
764 script->osiIndexOffset_ = offsetCursor;
765 offsetCursor += osiIndices * sizeof(OsiIndex);
767 MOZ_ASSERT(offsetCursor % alignof(SafepointIndex) == 0);
768 script->safepointIndexOffset_ = offsetCursor;
769 offsetCursor += safepointIndices * sizeof(SafepointIndex);
771 MOZ_ASSERT(offsetCursor % alignof(uint32_t) == 0);
772 script->icIndexOffset_ = offsetCursor;
773 offsetCursor += icEntries * sizeof(uint32_t);
775 script->safepointsOffset_ = offsetCursor;
776 offsetCursor += safepointsSize;
778 script->snapshotsOffset_ = offsetCursor;
779 offsetCursor += snapshotsListSize;
781 script->rvaTableOffset_ = offsetCursor;
782 offsetCursor += snapshotsRVATableSize;
784 script->recoversOffset_ = offsetCursor;
785 offsetCursor += recoversSize;
787 script->allocBytes_ = offsetCursor;
789 MOZ_ASSERT(script->numConstants() == constants);
790 MOZ_ASSERT(script->runtimeSize() == runtimeSize);
791 MOZ_ASSERT(script->numNurseryObjects() == nurseryObjects);
792 MOZ_ASSERT(script->numOsiIndices() == osiIndices);
793 MOZ_ASSERT(script->numSafepointIndices() == safepointIndices);
794 MOZ_ASSERT(script->numICs() == icEntries);
795 MOZ_ASSERT(script->safepointsSize() == safepointsSize);
796 MOZ_ASSERT(script->snapshotsListSize() == snapshotsListSize);
797 MOZ_ASSERT(script->snapshotsRVATableSize() == snapshotsRVATableSize);
798 MOZ_ASSERT(script->recoversSize() == recoversSize);
799 MOZ_ASSERT(script->endOffset() == offsetCursor);
801 return script;
804 void IonScript::trace(JSTracer* trc) {
805 if (method_) {
806 TraceEdge(trc, &method_, "method");
809 for (size_t i = 0; i < numConstants(); i++) {
810 TraceEdge(trc, &getConstant(i), "constant");
813 for (size_t i = 0; i < numNurseryObjects(); i++) {
814 TraceEdge(trc, &nurseryObjects()[i], "nursery-object");
817 // Trace caches so that the JSScript pointer can be updated if moved.
818 for (size_t i = 0; i < numICs(); i++) {
819 getICFromIndex(i).trace(trc, this);
823 void IonScript::traceWeak(JSTracer* trc) {
824 // IonICs do not currently contain weak pointers. If this is added then they
825 // should be traced here.
828 /* static */
829 void IonScript::preWriteBarrier(Zone* zone, IonScript* ionScript) {
830 PreWriteBarrier(zone, ionScript);
833 void IonScript::copySnapshots(const SnapshotWriter* writer) {
834 MOZ_ASSERT(writer->listSize() == snapshotsListSize());
835 memcpy(offsetToPointer<uint8_t>(snapshotsOffset()), writer->listBuffer(),
836 snapshotsListSize());
838 MOZ_ASSERT(snapshotsRVATableSize());
839 MOZ_ASSERT(writer->RVATableSize() == snapshotsRVATableSize());
840 memcpy(offsetToPointer<uint8_t>(rvaTableOffset()), writer->RVATableBuffer(),
841 snapshotsRVATableSize());
844 void IonScript::copyRecovers(const RecoverWriter* writer) {
845 MOZ_ASSERT(writer->size() == recoversSize());
846 memcpy(offsetToPointer<uint8_t>(recoversOffset()), writer->buffer(),
847 recoversSize());
850 void IonScript::copySafepoints(const SafepointWriter* writer) {
851 MOZ_ASSERT(writer->size() == safepointsSize());
852 memcpy(offsetToPointer<uint8_t>(safepointsOffset()), writer->buffer(),
853 safepointsSize());
856 void IonScript::copyConstants(const Value* vp) {
857 for (size_t i = 0; i < numConstants(); i++) {
858 constants()[i].init(vp[i]);
862 void IonScript::copySafepointIndices(const CodegenSafepointIndex* si) {
863 // Convert CodegenSafepointIndex to more compact form.
864 SafepointIndex* table = safepointIndices();
865 for (size_t i = 0; i < numSafepointIndices(); ++i) {
866 table[i] = SafepointIndex(si[i]);
870 void IonScript::copyOsiIndices(const OsiIndex* oi) {
871 memcpy(osiIndices(), oi, numOsiIndices() * sizeof(OsiIndex));
874 void IonScript::copyRuntimeData(const uint8_t* data) {
875 memcpy(runtimeData(), data, runtimeSize());
878 void IonScript::copyICEntries(const uint32_t* icEntries) {
879 memcpy(icIndex(), icEntries, numICs() * sizeof(uint32_t));
881 // Update the codeRaw_ field in the ICs now that we know the code address.
882 for (size_t i = 0; i < numICs(); i++) {
883 getICFromIndex(i).resetCodeRaw(this);
887 const SafepointIndex* IonScript::getSafepointIndex(uint32_t disp) const {
888 MOZ_ASSERT(numSafepointIndices() > 0);
890 const SafepointIndex* table = safepointIndices();
891 if (numSafepointIndices() == 1) {
892 MOZ_ASSERT(disp == table[0].displacement());
893 return &table[0];
896 size_t minEntry = 0;
897 size_t maxEntry = numSafepointIndices() - 1;
898 uint32_t min = table[minEntry].displacement();
899 uint32_t max = table[maxEntry].displacement();
901 // Raise if the element is not in the list.
902 MOZ_ASSERT(min <= disp && disp <= max);
904 // Approximate the location of the FrameInfo.
905 size_t guess = (disp - min) * (maxEntry - minEntry) / (max - min) + minEntry;
906 uint32_t guessDisp = table[guess].displacement();
908 if (table[guess].displacement() == disp) {
909 return &table[guess];
912 // Doing a linear scan from the guess should be more efficient in case of
913 // small group which are equally distributed on the code.
915 // such as: <... ... ... ... . ... ...>
916 if (guessDisp > disp) {
917 while (--guess >= minEntry) {
918 guessDisp = table[guess].displacement();
919 MOZ_ASSERT(guessDisp >= disp);
920 if (guessDisp == disp) {
921 return &table[guess];
924 } else {
925 while (++guess <= maxEntry) {
926 guessDisp = table[guess].displacement();
927 MOZ_ASSERT(guessDisp <= disp);
928 if (guessDisp == disp) {
929 return &table[guess];
934 MOZ_CRASH("displacement not found.");
937 const OsiIndex* IonScript::getOsiIndex(uint32_t disp) const {
938 const OsiIndex* end = osiIndices() + numOsiIndices();
939 for (const OsiIndex* it = osiIndices(); it != end; ++it) {
940 if (it->returnPointDisplacement() == disp) {
941 return it;
945 MOZ_CRASH("Failed to find OSI point return address");
948 const OsiIndex* IonScript::getOsiIndex(uint8_t* retAddr) const {
949 JitSpew(JitSpew_IonInvalidate, "IonScript %p has method %p raw %p",
950 (void*)this, (void*)method(), method()->raw());
952 MOZ_ASSERT(containsCodeAddress(retAddr));
953 uint32_t disp = retAddr - method()->raw();
954 return getOsiIndex(disp);
957 void IonScript::Destroy(JS::GCContext* gcx, IonScript* script) {
958 // Make sure there are no pointers into the IonScript's nursery objects list
959 // in the store buffer. Because this can be called during sweeping when
960 // discarding JIT code, we have to lock the store buffer when we find an
961 // object that's (still) in the nursery.
962 mozilla::Maybe<gc::AutoLockStoreBuffer> lock;
963 for (size_t i = 0, len = script->numNurseryObjects(); i < len; i++) {
964 JSObject* obj = script->nurseryObjects()[i];
965 if (!IsInsideNursery(obj)) {
966 continue;
968 if (lock.isNothing()) {
969 lock.emplace(gcx->runtimeFromAnyThread());
971 script->nurseryObjects()[i] = HeapPtr<JSObject*>();
974 // This allocation is tracked by JSScript::setIonScriptImpl.
975 gcx->deleteUntracked(script);
978 void JS::DeletePolicy<js::jit::IonScript>::operator()(
979 const js::jit::IonScript* script) {
980 IonScript::Destroy(rt_->gcContext(), const_cast<IonScript*>(script));
983 void IonScript::purgeICs(Zone* zone) {
984 for (size_t i = 0; i < numICs(); i++) {
985 getICFromIndex(i).reset(zone, this);
989 namespace js {
990 namespace jit {
992 bool OptimizeMIR(MIRGenerator* mir) {
993 MIRGraph& graph = mir->graph();
994 GraphSpewer& gs = mir->graphSpewer();
996 if (mir->shouldCancel("Start")) {
997 return false;
1000 gs.spewPass("BuildSSA");
1001 AssertBasicGraphCoherency(graph);
1003 if (JitSpewEnabled(JitSpew_MIRExpressions)) {
1004 JitSpewCont(JitSpew_MIRExpressions, "\n");
1005 DumpMIRExpressions(JitSpewPrinter(), graph, mir->outerInfo(),
1006 "BuildSSA (== input to OptimizeMIR)");
1009 if (!JitOptions.disablePruning && !mir->compilingWasm()) {
1010 JitSpewCont(JitSpew_Prune, "\n");
1011 if (!PruneUnusedBranches(mir, graph)) {
1012 return false;
1014 gs.spewPass("Prune Unused Branches");
1015 AssertBasicGraphCoherency(graph);
1017 if (mir->shouldCancel("Prune Unused Branches")) {
1018 return false;
1023 if (!FoldEmptyBlocks(graph)) {
1024 return false;
1026 gs.spewPass("Fold Empty Blocks");
1027 AssertBasicGraphCoherency(graph);
1029 if (mir->shouldCancel("Fold Empty Blocks")) {
1030 return false;
1034 // Remove trivially dead resume point operands before folding tests, so the
1035 // latter pass can optimize more aggressively.
1036 if (!mir->compilingWasm()) {
1037 if (!EliminateTriviallyDeadResumePointOperands(mir, graph)) {
1038 return false;
1040 gs.spewPass("Eliminate trivially dead resume point operands");
1041 AssertBasicGraphCoherency(graph);
1043 if (mir->shouldCancel("Eliminate trivially dead resume point operands")) {
1044 return false;
1049 if (!FoldTests(graph)) {
1050 return false;
1052 gs.spewPass("Fold Tests");
1053 AssertBasicGraphCoherency(graph);
1055 if (mir->shouldCancel("Fold Tests")) {
1056 return false;
1061 if (!SplitCriticalEdges(graph)) {
1062 return false;
1064 gs.spewPass("Split Critical Edges");
1065 AssertGraphCoherency(graph);
1067 if (mir->shouldCancel("Split Critical Edges")) {
1068 return false;
1073 RenumberBlocks(graph);
1074 gs.spewPass("Renumber Blocks");
1075 AssertGraphCoherency(graph);
1077 if (mir->shouldCancel("Renumber Blocks")) {
1078 return false;
1083 if (!BuildDominatorTree(graph)) {
1084 return false;
1086 // No spew: graph not changed.
1088 if (mir->shouldCancel("Dominator Tree")) {
1089 return false;
1094 // Aggressive phi elimination must occur before any code elimination. If the
1095 // script contains a try-statement, we only compiled the try block and not
1096 // the catch or finally blocks, so in this case it's also invalid to use
1097 // aggressive phi elimination.
1098 Observability observability = graph.hasTryBlock()
1099 ? ConservativeObservability
1100 : AggressiveObservability;
1101 if (!EliminatePhis(mir, graph, observability)) {
1102 return false;
1104 gs.spewPass("Eliminate phis");
1105 AssertGraphCoherency(graph);
1107 if (mir->shouldCancel("Eliminate phis")) {
1108 return false;
1111 if (!BuildPhiReverseMapping(graph)) {
1112 return false;
1114 AssertExtendedGraphCoherency(graph);
1115 // No spew: graph not changed.
1117 if (mir->shouldCancel("Phi reverse mapping")) {
1118 return false;
1122 if (!mir->compilingWasm() && !JitOptions.disableIteratorIndices) {
1123 if (!OptimizeIteratorIndices(mir, graph)) {
1124 return false;
1126 gs.spewPass("Iterator Indices");
1127 AssertGraphCoherency(graph);
1129 if (mir->shouldCancel("Iterator Indices")) {
1130 return false;
1134 if (!JitOptions.disableRecoverIns &&
1135 mir->optimizationInfo().scalarReplacementEnabled()) {
1136 JitSpewCont(JitSpew_Escape, "\n");
1137 if (!ScalarReplacement(mir, graph)) {
1138 return false;
1140 gs.spewPass("Scalar Replacement");
1141 AssertGraphCoherency(graph);
1143 if (mir->shouldCancel("Scalar Replacement")) {
1144 return false;
1148 if (!mir->compilingWasm()) {
1149 if (!ApplyTypeInformation(mir, graph)) {
1150 return false;
1152 gs.spewPass("Apply types");
1153 AssertExtendedGraphCoherency(graph);
1155 if (mir->shouldCancel("Apply types")) {
1156 return false;
1160 if (mir->optimizationInfo().amaEnabled()) {
1161 AlignmentMaskAnalysis ama(graph);
1162 if (!ama.analyze()) {
1163 return false;
1165 gs.spewPass("Alignment Mask Analysis");
1166 AssertExtendedGraphCoherency(graph);
1168 if (mir->shouldCancel("Alignment Mask Analysis")) {
1169 return false;
1173 ValueNumberer gvn(mir, graph);
1175 // Alias analysis is required for LICM and GVN so that we don't move
1176 // loads across stores. We also use alias information when removing
1177 // redundant shapeguards.
1178 if (mir->optimizationInfo().licmEnabled() ||
1179 mir->optimizationInfo().gvnEnabled() ||
1180 mir->optimizationInfo().eliminateRedundantShapeGuardsEnabled()) {
1182 AliasAnalysis analysis(mir, graph);
1183 JitSpewCont(JitSpew_Alias, "\n");
1184 if (!analysis.analyze()) {
1185 return false;
1188 gs.spewPass("Alias analysis");
1189 AssertExtendedGraphCoherency(graph);
1191 if (mir->shouldCancel("Alias analysis")) {
1192 return false;
1196 if (!mir->compilingWasm()) {
1197 // Eliminating dead resume point operands requires basic block
1198 // instructions to be numbered. Reuse the numbering computed during
1199 // alias analysis.
1200 if (!EliminateDeadResumePointOperands(mir, graph)) {
1201 return false;
1204 gs.spewPass("Eliminate dead resume point operands");
1205 AssertExtendedGraphCoherency(graph);
1207 if (mir->shouldCancel("Eliminate dead resume point operands")) {
1208 return false;
1213 if (mir->optimizationInfo().gvnEnabled()) {
1214 JitSpewCont(JitSpew_GVN, "\n");
1215 if (!gvn.run(ValueNumberer::UpdateAliasAnalysis)) {
1216 return false;
1218 gs.spewPass("GVN");
1219 AssertExtendedGraphCoherency(graph);
1221 if (mir->shouldCancel("GVN")) {
1222 return false;
1226 // LICM can hoist instructions from conditional branches and
1227 // trigger bailouts. Disable it if bailing out of a hoisted
1228 // instruction has previously invalidated this script.
1229 if (mir->licmEnabled()) {
1230 JitSpewCont(JitSpew_LICM, "\n");
1231 if (!LICM(mir, graph)) {
1232 return false;
1234 gs.spewPass("LICM");
1235 AssertExtendedGraphCoherency(graph);
1237 if (mir->shouldCancel("LICM")) {
1238 return false;
1242 RangeAnalysis r(mir, graph);
1243 if (mir->optimizationInfo().rangeAnalysisEnabled()) {
1244 JitSpewCont(JitSpew_Range, "\n");
1245 if (!r.addBetaNodes()) {
1246 return false;
1248 gs.spewPass("Beta");
1249 AssertExtendedGraphCoherency(graph);
1251 if (mir->shouldCancel("RA Beta")) {
1252 return false;
1255 if (!r.analyze() || !r.addRangeAssertions()) {
1256 return false;
1258 gs.spewPass("Range Analysis");
1259 AssertExtendedGraphCoherency(graph);
1261 if (mir->shouldCancel("Range Analysis")) {
1262 return false;
1265 if (!r.removeBetaNodes()) {
1266 return false;
1268 gs.spewPass("De-Beta");
1269 AssertExtendedGraphCoherency(graph);
1271 if (mir->shouldCancel("RA De-Beta")) {
1272 return false;
1275 if (mir->optimizationInfo().gvnEnabled()) {
1276 bool shouldRunUCE = false;
1277 if (!r.prepareForUCE(&shouldRunUCE)) {
1278 return false;
1280 gs.spewPass("RA check UCE");
1281 AssertExtendedGraphCoherency(graph);
1283 if (mir->shouldCancel("RA check UCE")) {
1284 return false;
1287 if (shouldRunUCE) {
1288 if (!gvn.run(ValueNumberer::DontUpdateAliasAnalysis)) {
1289 return false;
1291 gs.spewPass("UCE After RA");
1292 AssertExtendedGraphCoherency(graph);
1294 if (mir->shouldCancel("UCE After RA")) {
1295 return false;
1300 if (mir->optimizationInfo().autoTruncateEnabled()) {
1301 if (!r.truncate()) {
1302 return false;
1304 gs.spewPass("Truncate Doubles");
1305 AssertExtendedGraphCoherency(graph);
1307 if (mir->shouldCancel("Truncate Doubles")) {
1308 return false;
1313 if (!JitOptions.disableRecoverIns) {
1314 JitSpewCont(JitSpew_Sink, "\n");
1315 if (!Sink(mir, graph)) {
1316 return false;
1318 gs.spewPass("Sink");
1319 AssertExtendedGraphCoherency(graph);
1321 if (mir->shouldCancel("Sink")) {
1322 return false;
1326 if (!JitOptions.disableRecoverIns &&
1327 mir->optimizationInfo().rangeAnalysisEnabled()) {
1328 JitSpewCont(JitSpew_Range, "\n");
1329 if (!r.removeUnnecessaryBitops()) {
1330 return false;
1332 gs.spewPass("Remove Unnecessary Bitops");
1333 AssertExtendedGraphCoherency(graph);
1335 if (mir->shouldCancel("Remove Unnecessary Bitops")) {
1336 return false;
1341 JitSpewCont(JitSpew_FLAC, "\n");
1342 if (!FoldLinearArithConstants(mir, graph)) {
1343 return false;
1345 gs.spewPass("Fold Linear Arithmetic Constants");
1346 AssertBasicGraphCoherency(graph);
1348 if (mir->shouldCancel("Fold Linear Arithmetic Constants")) {
1349 return false;
1353 if (mir->optimizationInfo().eaaEnabled()) {
1354 EffectiveAddressAnalysis eaa(mir, graph);
1355 JitSpewCont(JitSpew_EAA, "\n");
1356 if (!eaa.analyze()) {
1357 return false;
1359 gs.spewPass("Effective Address Analysis");
1360 AssertExtendedGraphCoherency(graph);
1362 if (mir->shouldCancel("Effective Address Analysis")) {
1363 return false;
1367 // BCE marks bounds checks as dead, so do BCE before DCE.
1368 if (mir->compilingWasm()) {
1369 JitSpewCont(JitSpew_WasmBCE, "\n");
1370 if (!EliminateBoundsChecks(mir, graph)) {
1371 return false;
1373 gs.spewPass("Redundant Bounds Check Elimination");
1374 AssertGraphCoherency(graph);
1376 if (mir->shouldCancel("BCE")) {
1377 return false;
1382 if (!EliminateDeadCode(mir, graph)) {
1383 return false;
1385 gs.spewPass("DCE");
1386 AssertExtendedGraphCoherency(graph);
1388 if (mir->shouldCancel("DCE")) {
1389 return false;
1393 if (!JitOptions.disableMarkLoadsUsedAsPropertyKeys && !mir->compilingWasm()) {
1394 JitSpewCont(JitSpew_MarkLoadsUsedAsPropertyKeys, "\n");
1395 if (!MarkLoadsUsedAsPropertyKeys(graph)) {
1396 return false;
1398 if (mir->shouldCancel("MarkLoadsUsedAsPropertyKeys")) {
1399 return false;
1403 if (mir->optimizationInfo().instructionReorderingEnabled() &&
1404 !mir->outerInfo().hadReorderingBailout()) {
1405 if (!ReorderInstructions(graph)) {
1406 return false;
1408 gs.spewPass("Reordering");
1410 AssertExtendedGraphCoherency(graph);
1412 if (mir->shouldCancel("Reordering")) {
1413 return false;
1417 // Make loops contiguous. We do this after GVN/UCE and range analysis,
1418 // which can remove CFG edges, exposing more blocks that can be moved.
1420 if (!MakeLoopsContiguous(graph)) {
1421 return false;
1423 gs.spewPass("Make loops contiguous");
1424 AssertExtendedGraphCoherency(graph);
1426 if (mir->shouldCancel("Make loops contiguous")) {
1427 return false;
1430 AssertExtendedGraphCoherency(graph, /* underValueNumberer = */ false,
1431 /* force = */ true);
1433 // Remove unreachable blocks created by MBasicBlock::NewFakeLoopPredecessor
1434 // to ensure every loop header has two predecessors. (This only happens due
1435 // to OSR.) After this point, it is no longer possible to build the
1436 // dominator tree.
1437 if (!mir->compilingWasm() && graph.osrBlock()) {
1438 graph.removeFakeLoopPredecessors();
1439 gs.spewPass("Remove fake loop predecessors");
1440 AssertGraphCoherency(graph);
1442 if (mir->shouldCancel("Remove fake loop predecessors")) {
1443 return false;
1447 // Passes after this point must not move instructions; these analyses
1448 // depend on knowing the final order in which instructions will execute.
1450 if (mir->optimizationInfo().edgeCaseAnalysisEnabled()) {
1451 EdgeCaseAnalysis edgeCaseAnalysis(mir, graph);
1452 if (!edgeCaseAnalysis.analyzeLate()) {
1453 return false;
1455 gs.spewPass("Edge Case Analysis (Late)");
1456 AssertGraphCoherency(graph);
1458 if (mir->shouldCancel("Edge Case Analysis (Late)")) {
1459 return false;
1463 if (mir->optimizationInfo().eliminateRedundantChecksEnabled()) {
1464 // Note: check elimination has to run after all other passes that move
1465 // instructions. Since check uses are replaced with the actual index,
1466 // code motion after this pass could incorrectly move a load or store
1467 // before its bounds check.
1468 if (!EliminateRedundantChecks(graph)) {
1469 return false;
1471 gs.spewPass("Bounds Check Elimination");
1472 AssertGraphCoherency(graph);
1475 if (mir->optimizationInfo().eliminateRedundantShapeGuardsEnabled()) {
1476 if (!EliminateRedundantShapeGuards(graph)) {
1477 return false;
1479 gs.spewPass("Shape Guard Elimination");
1480 AssertGraphCoherency(graph);
1483 // Run the GC Barrier Elimination pass after instruction reordering, to
1484 // ensure we don't move instructions that can trigger GC between stores we
1485 // optimize here.
1486 if (mir->optimizationInfo().eliminateRedundantGCBarriersEnabled()) {
1487 if (!EliminateRedundantGCBarriers(graph)) {
1488 return false;
1490 gs.spewPass("GC Barrier Elimination");
1491 AssertGraphCoherency(graph);
1494 if (!mir->compilingWasm() && !mir->outerInfo().hadUnboxFoldingBailout()) {
1495 if (!FoldLoadsWithUnbox(mir, graph)) {
1496 return false;
1498 gs.spewPass("FoldLoadsWithUnbox");
1499 AssertGraphCoherency(graph);
1502 if (!mir->compilingWasm()) {
1503 if (!AddKeepAliveInstructions(graph)) {
1504 return false;
1506 gs.spewPass("Add KeepAlive Instructions");
1507 AssertGraphCoherency(graph);
1510 AssertGraphCoherency(graph, /* force = */ true);
1512 if (JitSpewEnabled(JitSpew_MIRExpressions)) {
1513 JitSpewCont(JitSpew_MIRExpressions, "\n");
1514 DumpMIRExpressions(JitSpewPrinter(), graph, mir->outerInfo(),
1515 "BeforeLIR (== result of OptimizeMIR)");
1518 return true;
1521 LIRGraph* GenerateLIR(MIRGenerator* mir) {
1522 MIRGraph& graph = mir->graph();
1523 GraphSpewer& gs = mir->graphSpewer();
1525 LIRGraph* lir = mir->alloc().lifoAlloc()->new_<LIRGraph>(&graph);
1526 if (!lir || !lir->init()) {
1527 return nullptr;
1530 LIRGenerator lirgen(mir, graph, *lir);
1532 if (!lirgen.generate()) {
1533 return nullptr;
1535 gs.spewPass("Generate LIR");
1537 if (mir->shouldCancel("Generate LIR")) {
1538 return nullptr;
1542 #ifdef DEBUG
1543 AllocationIntegrityState integrity(*lir);
1544 #endif
1547 IonRegisterAllocator allocator =
1548 mir->optimizationInfo().registerAllocator();
1550 switch (allocator) {
1551 case RegisterAllocator_Backtracking:
1552 case RegisterAllocator_Testbed: {
1553 #ifdef DEBUG
1554 if (JitOptions.fullDebugChecks) {
1555 if (!integrity.record()) {
1556 return nullptr;
1559 #endif
1561 BacktrackingAllocator regalloc(mir, &lirgen, *lir,
1562 allocator == RegisterAllocator_Testbed);
1563 if (!regalloc.go()) {
1564 return nullptr;
1567 #ifdef DEBUG
1568 if (JitOptions.fullDebugChecks) {
1569 if (!integrity.check()) {
1570 return nullptr;
1573 #endif
1575 gs.spewPass("Allocate Registers [Backtracking]");
1576 break;
1579 default:
1580 MOZ_CRASH("Bad regalloc");
1583 if (mir->shouldCancel("Allocate Registers")) {
1584 return nullptr;
1588 return lir;
1591 CodeGenerator* GenerateCode(MIRGenerator* mir, LIRGraph* lir) {
1592 auto codegen = MakeUnique<CodeGenerator>(mir, lir);
1593 if (!codegen) {
1594 return nullptr;
1597 if (!codegen->generate()) {
1598 return nullptr;
1601 return codegen.release();
1604 CodeGenerator* CompileBackEnd(MIRGenerator* mir, WarpSnapshot* snapshot) {
1605 // Everything in CompileBackEnd can potentially run on a helper thread.
1606 AutoEnterIonBackend enter;
1607 AutoSpewEndFunction spewEndFunction(mir);
1610 WarpCompilation comp(mir->alloc());
1611 WarpBuilder builder(*snapshot, *mir, &comp);
1612 if (!builder.build()) {
1613 return nullptr;
1617 if (!OptimizeMIR(mir)) {
1618 return nullptr;
1621 LIRGraph* lir = GenerateLIR(mir);
1622 if (!lir) {
1623 return nullptr;
1626 return GenerateCode(mir, lir);
1629 static AbortReasonOr<WarpSnapshot*> CreateWarpSnapshot(JSContext* cx,
1630 MIRGenerator* mirGen,
1631 HandleScript script) {
1632 // Suppress GC during compilation.
1633 gc::AutoSuppressGC suppressGC(cx);
1635 SpewBeginFunction(mirGen, script);
1637 WarpOracle oracle(cx, *mirGen, script);
1639 AbortReasonOr<WarpSnapshot*> result = oracle.createSnapshot();
1641 MOZ_ASSERT_IF(result.isErr(), result.unwrapErr() == AbortReason::Alloc ||
1642 result.unwrapErr() == AbortReason::Error ||
1643 result.unwrapErr() == AbortReason::Disable);
1644 MOZ_ASSERT_IF(!result.isErr(), result.unwrap());
1646 return result;
1649 static AbortReason IonCompile(JSContext* cx, HandleScript script,
1650 jsbytecode* osrPc) {
1651 cx->check(script);
1653 auto alloc =
1654 cx->make_unique<LifoAlloc>(TempAllocator::PreferredLifoChunkSize);
1655 if (!alloc) {
1656 return AbortReason::Error;
1659 if (!cx->zone()->ensureJitZoneExists(cx)) {
1660 return AbortReason::Error;
1663 if (!cx->zone()->jitZone()->ensureIonStubsExist(cx)) {
1664 return AbortReason::Error;
1667 TempAllocator* temp = alloc->new_<TempAllocator>(alloc.get());
1668 if (!temp) {
1669 return AbortReason::Alloc;
1672 MIRGraph* graph = alloc->new_<MIRGraph>(temp);
1673 if (!graph) {
1674 return AbortReason::Alloc;
1677 InlineScriptTree* inlineScriptTree =
1678 InlineScriptTree::New(temp, nullptr, nullptr, script);
1679 if (!inlineScriptTree) {
1680 return AbortReason::Alloc;
1683 CompileInfo* info = alloc->new_<CompileInfo>(
1684 CompileRuntime::get(cx->runtime()), script, script->function(), osrPc,
1685 script->needsArgsObj(), inlineScriptTree);
1686 if (!info) {
1687 return AbortReason::Alloc;
1690 const OptimizationInfo* optimizationInfo =
1691 IonOptimizations.get(OptimizationLevel::Normal);
1692 const JitCompileOptions options(cx);
1694 MIRGenerator* mirGen =
1695 alloc->new_<MIRGenerator>(CompileRealm::get(cx->realm()), options, temp,
1696 graph, info, optimizationInfo);
1697 if (!mirGen) {
1698 return AbortReason::Alloc;
1701 MOZ_ASSERT(!script->baselineScript()->hasPendingIonCompileTask());
1702 MOZ_ASSERT(!script->hasIonScript());
1703 MOZ_ASSERT(script->canIonCompile());
1705 if (osrPc) {
1706 script->jitScript()->setHadIonOSR();
1709 AbortReasonOr<WarpSnapshot*> result = CreateWarpSnapshot(cx, mirGen, script);
1710 if (result.isErr()) {
1711 return result.unwrapErr();
1713 WarpSnapshot* snapshot = result.unwrap();
1715 // If possible, compile the script off thread.
1716 if (options.offThreadCompilationAvailable()) {
1717 JitSpew(JitSpew_IonSyncLogs,
1718 "Can't log script %s:%u:%u"
1719 ". (Compiled on background thread.)",
1720 script->filename(), script->lineno(),
1721 script->column().oneOriginValue());
1723 IonCompileTask* task = alloc->new_<IonCompileTask>(cx, *mirGen, snapshot);
1724 if (!task) {
1725 return AbortReason::Alloc;
1728 AutoLockHelperThreadState lock;
1729 if (!StartOffThreadIonCompile(task, lock)) {
1730 JitSpew(JitSpew_IonAbort, "Unable to start off-thread ion compilation.");
1731 mirGen->graphSpewer().endFunction();
1732 return AbortReason::Alloc;
1735 script->jitScript()->setIsIonCompilingOffThread(script);
1737 // The allocator and associated data will be destroyed after being
1738 // processed in the finishedOffThreadCompilations list.
1739 (void)alloc.release();
1741 return AbortReason::NoAbort;
1744 bool succeeded = false;
1746 gc::AutoSuppressGC suppressGC(cx);
1747 JitContext jctx(cx);
1748 UniquePtr<CodeGenerator> codegen(CompileBackEnd(mirGen, snapshot));
1749 if (!codegen) {
1750 JitSpew(JitSpew_IonAbort, "Failed during back-end compilation.");
1751 if (cx->isExceptionPending()) {
1752 return AbortReason::Error;
1754 return AbortReason::Disable;
1757 succeeded = LinkCodeGen(cx, codegen.get(), script, snapshot);
1760 if (succeeded) {
1761 return AbortReason::NoAbort;
1763 if (cx->isExceptionPending()) {
1764 return AbortReason::Error;
1766 return AbortReason::Disable;
1769 static bool CheckFrame(JSContext* cx, BaselineFrame* frame) {
1770 MOZ_ASSERT(!frame->isDebuggerEvalFrame());
1771 MOZ_ASSERT(!frame->isEvalFrame());
1773 // This check is to not overrun the stack.
1774 if (frame->isFunctionFrame()) {
1775 if (TooManyActualArguments(frame->numActualArgs())) {
1776 JitSpew(JitSpew_IonAbort, "too many actual arguments");
1777 return false;
1780 if (TooManyFormalArguments(frame->numFormalArgs())) {
1781 JitSpew(JitSpew_IonAbort, "too many arguments");
1782 return false;
1786 return true;
1789 static bool CanIonCompileOrInlineScript(JSScript* script, const char** reason) {
1790 if (script->isForEval()) {
1791 // Eval frames are not yet supported. Supporting this will require new
1792 // logic in pushBailoutFrame to deal with linking prev.
1793 // Additionally, JSOp::GlobalOrEvalDeclInstantiation support will require
1794 // baking in isEvalFrame().
1795 *reason = "eval script";
1796 return false;
1799 if (script->isAsync()) {
1800 if (script->isModule()) {
1801 *reason = "async module";
1802 return false;
1806 if (script->hasNonSyntacticScope() && !script->function()) {
1807 // Support functions with a non-syntactic global scope but not other
1808 // scripts. For global scripts, WarpBuilder currently uses the global
1809 // object as scope chain, this is not valid when the script has a
1810 // non-syntactic global scope.
1811 *reason = "has non-syntactic global scope";
1812 return false;
1815 return true;
1816 } // namespace jit
1818 static bool ScriptIsTooLarge(JSContext* cx, JSScript* script) {
1819 if (!JitOptions.limitScriptSize) {
1820 return false;
1823 size_t numLocalsAndArgs = NumLocalsAndArgs(script);
1825 bool canCompileOffThread = OffThreadCompilationAvailable(cx);
1826 size_t maxScriptSize = canCompileOffThread
1827 ? JitOptions.ionMaxScriptSize
1828 : JitOptions.ionMaxScriptSizeMainThread;
1829 size_t maxLocalsAndArgs = canCompileOffThread
1830 ? JitOptions.ionMaxLocalsAndArgs
1831 : JitOptions.ionMaxLocalsAndArgsMainThread;
1833 if (script->length() > maxScriptSize || numLocalsAndArgs > maxLocalsAndArgs) {
1834 JitSpew(JitSpew_IonAbort,
1835 "Script too large (%zu bytes) (%zu locals/args) @ %s:%u:%u",
1836 script->length(), numLocalsAndArgs, script->filename(),
1837 script->lineno(), script->column().oneOriginValue());
1838 return true;
1841 return false;
1844 bool CanIonCompileScript(JSContext* cx, JSScript* script) {
1845 if (!script->canIonCompile()) {
1846 return false;
1849 const char* reason = nullptr;
1850 if (!CanIonCompileOrInlineScript(script, &reason)) {
1851 JitSpew(JitSpew_IonAbort, "%s", reason);
1852 return false;
1855 if (ScriptIsTooLarge(cx, script)) {
1856 return false;
1859 return true;
1862 static MethodStatus Compile(JSContext* cx, HandleScript script,
1863 BaselineFrame* osrFrame, jsbytecode* osrPc) {
1864 MOZ_ASSERT(jit::IsIonEnabled(cx));
1865 MOZ_ASSERT(jit::IsBaselineJitEnabled(cx));
1867 MOZ_ASSERT(script->hasBaselineScript());
1868 MOZ_ASSERT(!script->baselineScript()->hasPendingIonCompileTask());
1869 MOZ_ASSERT(!script->hasIonScript());
1871 AutoGeckoProfilerEntry pseudoFrame(
1872 cx, "Ion script compilation",
1873 JS::ProfilingCategoryPair::JS_IonCompilation);
1875 if (script->isDebuggee() || (osrFrame && osrFrame->isDebuggee())) {
1876 JitSpew(JitSpew_IonAbort, "debugging");
1877 return Method_Skipped;
1880 if (!CanIonCompileScript(cx, script)) {
1881 JitSpew(JitSpew_IonAbort, "Aborted compilation of %s:%u:%u",
1882 script->filename(), script->lineno(),
1883 script->column().oneOriginValue());
1884 return Method_CantCompile;
1887 OptimizationLevel optimizationLevel =
1888 IonOptimizations.levelForScript(cx, script, osrPc);
1889 if (optimizationLevel == OptimizationLevel::DontCompile) {
1890 return Method_Skipped;
1893 MOZ_ASSERT(optimizationLevel == OptimizationLevel::Normal);
1895 if (!CanLikelyAllocateMoreExecutableMemory()) {
1896 script->resetWarmUpCounterToDelayIonCompilation();
1897 return Method_Skipped;
1900 MOZ_ASSERT(!script->hasIonScript());
1902 AbortReason reason = IonCompile(cx, script, osrPc);
1903 if (reason == AbortReason::Error) {
1904 MOZ_ASSERT(cx->isExceptionPending());
1905 return Method_Error;
1908 if (reason == AbortReason::Disable) {
1909 return Method_CantCompile;
1912 if (reason == AbortReason::Alloc) {
1913 ReportOutOfMemory(cx);
1914 return Method_Error;
1917 // Compilation succeeded or we invalidated right away or an inlining/alloc
1918 // abort
1919 if (script->hasIonScript()) {
1920 return Method_Compiled;
1922 return Method_Skipped;
1925 } // namespace jit
1926 } // namespace js
1928 bool jit::OffThreadCompilationAvailable(JSContext* cx) {
1929 // Even if off thread compilation is enabled, compilation must still occur
1930 // on the main thread in some cases.
1932 // Require cpuCount > 1 so that Ion compilation jobs and active-thread
1933 // execution are not competing for the same resources.
1934 return cx->runtime()->canUseOffthreadIonCompilation() &&
1935 GetHelperThreadCPUCount() > 1 && CanUseExtraThreads();
1938 MethodStatus jit::CanEnterIon(JSContext* cx, RunState& state) {
1939 MOZ_ASSERT(jit::IsIonEnabled(cx));
1941 HandleScript script = state.script();
1942 MOZ_ASSERT(!script->hasIonScript());
1944 // Skip if the script has been disabled.
1945 if (!script->canIonCompile()) {
1946 return Method_Skipped;
1949 // Skip if the script is being compiled off thread.
1950 if (script->isIonCompilingOffThread()) {
1951 return Method_Skipped;
1954 if (state.isInvoke()) {
1955 InvokeState& invoke = *state.asInvoke();
1957 if (TooManyActualArguments(invoke.args().length())) {
1958 JitSpew(JitSpew_IonAbort, "too many actual args");
1959 ForbidCompilation(cx, script);
1960 return Method_CantCompile;
1963 if (TooManyFormalArguments(
1964 invoke.args().callee().as<JSFunction>().nargs())) {
1965 JitSpew(JitSpew_IonAbort, "too many args");
1966 ForbidCompilation(cx, script);
1967 return Method_CantCompile;
1971 // If --ion-eager is used, compile with Baseline first, so that we
1972 // can directly enter IonMonkey.
1973 if (JitOptions.eagerIonCompilation() && !script->hasBaselineScript()) {
1974 MethodStatus status =
1975 CanEnterBaselineMethod<BaselineTier::Compiler>(cx, state);
1976 if (status != Method_Compiled) {
1977 return status;
1979 // Bytecode analysis may forbid compilation for a script.
1980 if (!script->canIonCompile()) {
1981 return Method_CantCompile;
1985 if (!script->hasBaselineScript()) {
1986 return Method_Skipped;
1989 MOZ_ASSERT(!script->isIonCompilingOffThread());
1990 MOZ_ASSERT(script->canIonCompile());
1992 // Attempt compilation. Returns Method_Compiled if already compiled.
1993 MethodStatus status = Compile(cx, script, /* osrFrame = */ nullptr,
1994 /* osrPc = */ nullptr);
1995 if (status != Method_Compiled) {
1996 if (status == Method_CantCompile) {
1997 ForbidCompilation(cx, script);
1999 return status;
2002 if (state.script()->baselineScript()->hasPendingIonCompileTask()) {
2003 LinkIonScript(cx, state.script());
2004 if (!state.script()->hasIonScript()) {
2005 return jit::Method_Skipped;
2009 return Method_Compiled;
2012 static MethodStatus BaselineCanEnterAtEntry(JSContext* cx, HandleScript script,
2013 BaselineFrame* frame) {
2014 MOZ_ASSERT(jit::IsIonEnabled(cx));
2015 MOZ_ASSERT(script->canIonCompile());
2016 MOZ_ASSERT(!script->isIonCompilingOffThread());
2017 MOZ_ASSERT(!script->hasIonScript());
2018 MOZ_ASSERT(frame->isFunctionFrame());
2020 // Mark as forbidden if frame can't be handled.
2021 if (!CheckFrame(cx, frame)) {
2022 ForbidCompilation(cx, script);
2023 return Method_CantCompile;
2026 if (script->baselineScript()->hasPendingIonCompileTask()) {
2027 LinkIonScript(cx, script);
2028 if (script->hasIonScript()) {
2029 return Method_Compiled;
2033 // Attempt compilation. Returns Method_Compiled if already compiled.
2034 MethodStatus status = Compile(cx, script, frame, nullptr);
2035 if (status != Method_Compiled) {
2036 if (status == Method_CantCompile) {
2037 ForbidCompilation(cx, script);
2039 return status;
2042 return Method_Compiled;
2045 // Decide if a transition from baseline execution to Ion code should occur.
2046 // May compile or recompile the target JSScript.
2047 static MethodStatus BaselineCanEnterAtBranch(JSContext* cx, HandleScript script,
2048 BaselineFrame* osrFrame,
2049 jsbytecode* pc) {
2050 MOZ_ASSERT(jit::IsIonEnabled(cx));
2051 MOZ_ASSERT((JSOp)*pc == JSOp::LoopHead);
2053 // Skip if the script has been disabled.
2054 if (!script->canIonCompile()) {
2055 return Method_Skipped;
2058 // Skip if the script is being compiled off thread.
2059 if (script->isIonCompilingOffThread()) {
2060 return Method_Skipped;
2063 // Optionally ignore on user request.
2064 if (!JitOptions.osr) {
2065 return Method_Skipped;
2068 // Mark as forbidden if frame can't be handled.
2069 if (!CheckFrame(cx, osrFrame)) {
2070 ForbidCompilation(cx, script);
2071 return Method_CantCompile;
2074 // Check if the jitcode still needs to get linked and do this
2075 // to have a valid IonScript.
2076 if (script->baselineScript()->hasPendingIonCompileTask()) {
2077 LinkIonScript(cx, script);
2080 // By default a recompilation doesn't happen on osr mismatch.
2081 // Decide if we want to force a recompilation if this happens too much.
2082 if (script->hasIonScript()) {
2083 if (pc == script->ionScript()->osrPc()) {
2084 return Method_Compiled;
2087 uint32_t count = script->ionScript()->incrOsrPcMismatchCounter();
2088 if (count <= JitOptions.osrPcMismatchesBeforeRecompile &&
2089 !JitOptions.eagerIonCompilation()) {
2090 return Method_Skipped;
2093 JitSpew(JitSpew_IonScripts, "Forcing OSR Mismatch Compilation");
2094 Invalidate(cx, script);
2097 // Attempt compilation.
2098 // - Returns Method_Compiled if the right ionscript is present
2099 // (Meaning it was present or a sequantial compile finished)
2100 // - Returns Method_Skipped if pc doesn't match
2101 // (This means a background thread compilation with that pc could have
2102 // started or not.)
2103 MethodStatus status = Compile(cx, script, osrFrame, pc);
2104 if (status != Method_Compiled) {
2105 if (status == Method_CantCompile) {
2106 ForbidCompilation(cx, script);
2108 return status;
2111 // Return the compilation was skipped when the osr pc wasn't adjusted.
2112 // This can happen when there was still an IonScript available and a
2113 // background compilation started, but hasn't finished yet.
2114 // Or when we didn't force a recompile.
2115 if (script->hasIonScript() && pc != script->ionScript()->osrPc()) {
2116 return Method_Skipped;
2119 return Method_Compiled;
2122 static bool IonCompileScriptForBaseline(JSContext* cx, BaselineFrame* frame,
2123 jsbytecode* pc) {
2124 MOZ_ASSERT(IsIonEnabled(cx));
2126 RootedScript script(cx, frame->script());
2127 bool isLoopHead = JSOp(*pc) == JSOp::LoopHead;
2129 // The Baseline JIT code checks for Ion disabled or compiling off-thread.
2130 MOZ_ASSERT(script->canIonCompile());
2131 MOZ_ASSERT(!script->isIonCompilingOffThread());
2133 // If Ion script exists, but PC is not at a loop entry, then Ion will be
2134 // entered for this script at an appropriate LOOPENTRY or the next time this
2135 // function is called.
2136 if (script->hasIonScript() && !isLoopHead) {
2137 JitSpew(JitSpew_BaselineOSR, "IonScript exists, but not at loop entry!");
2138 // TODO: ASSERT that a ion-script-already-exists checker stub doesn't exist.
2139 // TODO: Clear all optimized stubs.
2140 // TODO: Add a ion-script-already-exists checker stub.
2141 return true;
2144 // Ensure that Ion-compiled code is available.
2145 JitSpew(JitSpew_BaselineOSR,
2146 "WarmUpCounter for %s:%u:%u reached %d at pc %p, trying to switch to "
2147 "Ion!",
2148 script->filename(), script->lineno(),
2149 script->column().oneOriginValue(), (int)script->getWarmUpCount(),
2150 (void*)pc);
2152 MethodStatus stat;
2153 if (isLoopHead) {
2154 JitSpew(JitSpew_BaselineOSR, " Compile at loop head!");
2155 stat = BaselineCanEnterAtBranch(cx, script, frame, pc);
2156 } else if (frame->isFunctionFrame()) {
2157 JitSpew(JitSpew_BaselineOSR,
2158 " Compile function from top for later entry!");
2159 stat = BaselineCanEnterAtEntry(cx, script, frame);
2160 } else {
2161 return true;
2164 if (stat == Method_Error) {
2165 JitSpew(JitSpew_BaselineOSR, " Compile with Ion errored!");
2166 return false;
2169 if (stat == Method_CantCompile) {
2170 MOZ_ASSERT(!script->canIonCompile());
2171 JitSpew(JitSpew_BaselineOSR, " Can't compile with Ion!");
2172 } else if (stat == Method_Skipped) {
2173 JitSpew(JitSpew_BaselineOSR, " Skipped compile with Ion!");
2174 } else if (stat == Method_Compiled) {
2175 JitSpew(JitSpew_BaselineOSR, " Compiled with Ion!");
2176 } else {
2177 MOZ_CRASH("Invalid MethodStatus!");
2180 return true;
2183 bool jit::IonCompileScriptForBaselineAtEntry(JSContext* cx,
2184 BaselineFrame* frame) {
2185 JSScript* script = frame->script();
2186 return IonCompileScriptForBaseline(cx, frame, script->code());
2189 /* clang-format off */
2190 // The following data is kept in a temporary heap-allocated buffer, stored in
2191 // JitRuntime (high memory addresses at top, low at bottom):
2193 // +----->+=================================+ -- <---- High Address
2194 // | | | |
2195 // | | ...BaselineFrame... | |-- Copy of BaselineFrame + stack values
2196 // | | | |
2197 // | +---------------------------------+ |
2198 // | | | |
2199 // | | ...Locals/Stack... | |
2200 // | | | |
2201 // | +=================================+ --
2202 // | | Padding(Maybe Empty) |
2203 // | +=================================+ --
2204 // +------|-- baselineFrame | |-- IonOsrTempData
2205 // | jitcode | |
2206 // +=================================+ -- <---- Low Address
2208 // A pointer to the IonOsrTempData is returned.
2209 /* clang-format on */
2211 static IonOsrTempData* PrepareOsrTempData(JSContext* cx, BaselineFrame* frame,
2212 uint32_t frameSize, void* jitcode) {
2213 uint32_t numValueSlots = frame->numValueSlots(frameSize);
2215 // Calculate the amount of space to allocate:
2216 // BaselineFrame space:
2217 // (sizeof(Value) * numValueSlots)
2218 // + sizeof(BaselineFrame)
2220 // IonOsrTempData space:
2221 // sizeof(IonOsrTempData)
2223 size_t frameSpace = sizeof(BaselineFrame) + sizeof(Value) * numValueSlots;
2224 size_t ionOsrTempDataSpace = sizeof(IonOsrTempData);
2226 size_t totalSpace = AlignBytes(frameSpace, sizeof(Value)) +
2227 AlignBytes(ionOsrTempDataSpace, sizeof(Value));
2229 JitRuntime* jrt = cx->runtime()->jitRuntime();
2230 uint8_t* buf = jrt->allocateIonOsrTempData(totalSpace);
2231 if (!buf) {
2232 ReportOutOfMemory(cx);
2233 return nullptr;
2236 IonOsrTempData* info = new (buf) IonOsrTempData();
2237 info->jitcode = jitcode;
2239 // Copy the BaselineFrame + local/stack Values to the buffer. Arguments and
2240 // |this| are not copied but left on the stack: the Baseline and Ion frame
2241 // share the same frame prefix and Ion won't clobber these values. Note
2242 // that info->baselineFrame will point to the *end* of the frame data, like
2243 // the frame pointer register in baseline frames.
2244 uint8_t* frameStart =
2245 (uint8_t*)info + AlignBytes(ionOsrTempDataSpace, sizeof(Value));
2246 info->baselineFrame = frameStart + frameSpace;
2248 memcpy(frameStart, (uint8_t*)frame - numValueSlots * sizeof(Value),
2249 frameSpace);
2251 JitSpew(JitSpew_BaselineOSR, "Allocated IonOsrTempData at %p", info);
2252 JitSpew(JitSpew_BaselineOSR, "Jitcode is %p", info->jitcode);
2254 // All done.
2255 return info;
2258 bool jit::IonCompileScriptForBaselineOSR(JSContext* cx, BaselineFrame* frame,
2259 uint32_t frameSize, jsbytecode* pc,
2260 IonOsrTempData** infoPtr) {
2261 MOZ_ASSERT(infoPtr);
2262 *infoPtr = nullptr;
2264 MOZ_ASSERT(frame->debugFrameSize() == frameSize);
2265 MOZ_ASSERT(JSOp(*pc) == JSOp::LoopHead);
2267 if (!IonCompileScriptForBaseline(cx, frame, pc)) {
2268 return false;
2271 RootedScript script(cx, frame->script());
2272 if (!script->hasIonScript() || script->ionScript()->osrPc() != pc ||
2273 frame->isDebuggee()) {
2274 return true;
2277 IonScript* ion = script->ionScript();
2278 MOZ_ASSERT(cx->runtime()->geckoProfiler().enabled() ==
2279 ion->hasProfilingInstrumentation());
2280 MOZ_ASSERT(ion->osrPc() == pc);
2282 ion->resetOsrPcMismatchCounter();
2284 JitSpew(JitSpew_BaselineOSR, " OSR possible!");
2285 void* jitcode = ion->method()->raw() + ion->osrEntryOffset();
2287 // Prepare the temporary heap copy of the fake InterpreterFrame and actual
2288 // args list.
2289 JitSpew(JitSpew_BaselineOSR, "Got jitcode. Preparing for OSR into ion.");
2290 IonOsrTempData* info = PrepareOsrTempData(cx, frame, frameSize, jitcode);
2291 if (!info) {
2292 return false;
2295 *infoPtr = info;
2296 return true;
2299 static void InvalidateActivation(JS::GCContext* gcx,
2300 const JitActivationIterator& activations,
2301 bool invalidateAll) {
2302 JitSpew(JitSpew_IonInvalidate, "BEGIN invalidating activation");
2304 #ifdef CHECK_OSIPOINT_REGISTERS
2305 if (JitOptions.checkOsiPointRegisters) {
2306 activations->asJit()->setCheckRegs(false);
2308 #endif
2310 size_t frameno = 1;
2312 for (OnlyJSJitFrameIter iter(activations); !iter.done(); ++iter, ++frameno) {
2313 const JSJitFrameIter& frame = iter.frame();
2314 MOZ_ASSERT_IF(frameno == 1, frame.isExitFrame() ||
2315 frame.type() == FrameType::Bailout ||
2316 frame.type() == FrameType::JSJitToWasm);
2318 #ifdef JS_JITSPEW
2319 switch (frame.type()) {
2320 case FrameType::Exit:
2321 JitSpew(JitSpew_IonInvalidate, "#%zu exit frame @ %p", frameno,
2322 frame.fp());
2323 break;
2324 case FrameType::JSJitToWasm:
2325 JitSpew(JitSpew_IonInvalidate, "#%zu wasm exit frame @ %p", frameno,
2326 frame.fp());
2327 break;
2328 case FrameType::BaselineJS:
2329 case FrameType::IonJS:
2330 case FrameType::Bailout: {
2331 MOZ_ASSERT(frame.isScripted());
2332 const char* type = "Unknown";
2333 if (frame.isIonJS()) {
2334 type = "Optimized";
2335 } else if (frame.isBaselineJS()) {
2336 type = "Baseline";
2337 } else if (frame.isBailoutJS()) {
2338 type = "Bailing";
2340 JSScript* script = frame.maybeForwardedScript();
2341 JitSpew(JitSpew_IonInvalidate,
2342 "#%zu %s JS frame @ %p, %s:%u:%u (fun: %p, script: %p, pc %p)",
2343 frameno, type, frame.fp(), script->maybeForwardedFilename(),
2344 script->lineno(), script->column().oneOriginValue(),
2345 frame.maybeCallee(), script, frame.resumePCinCurrentFrame());
2346 break;
2348 case FrameType::BaselineStub:
2349 JitSpew(JitSpew_IonInvalidate, "#%zu baseline stub frame @ %p", frameno,
2350 frame.fp());
2351 break;
2352 case FrameType::BaselineInterpreterEntry:
2353 JitSpew(JitSpew_IonInvalidate,
2354 "#%zu baseline interpreter entry frame @ %p", frameno,
2355 frame.fp());
2356 break;
2357 case FrameType::Rectifier:
2358 JitSpew(JitSpew_IonInvalidate, "#%zu rectifier frame @ %p", frameno,
2359 frame.fp());
2360 break;
2361 case FrameType::TrampolineNative:
2362 JitSpew(JitSpew_IonInvalidate, "#%zu TrampolineNative frame @ %p",
2363 frameno, frame.fp());
2364 break;
2365 case FrameType::IonICCall:
2366 JitSpew(JitSpew_IonInvalidate, "#%zu ion IC call frame @ %p", frameno,
2367 frame.fp());
2368 break;
2369 case FrameType::CppToJSJit:
2370 JitSpew(JitSpew_IonInvalidate, "#%zu entry frame @ %p", frameno,
2371 frame.fp());
2372 break;
2373 case FrameType::WasmToJSJit:
2374 JitSpew(JitSpew_IonInvalidate, "#%zu wasm frames @ %p", frameno,
2375 frame.fp());
2376 break;
2378 #endif // JS_JITSPEW
2380 if (!frame.isIonScripted()) {
2381 continue;
2384 // See if the frame has already been invalidated.
2385 if (frame.checkInvalidation()) {
2386 continue;
2389 JSScript* script = frame.maybeForwardedScript();
2390 if (!script->hasIonScript()) {
2391 continue;
2394 if (!invalidateAll && !script->ionScript()->invalidated()) {
2395 continue;
2398 IonScript* ionScript = script->ionScript();
2400 // Purge ICs before we mark this script as invalidated. This will
2401 // prevent lastJump_ from appearing to be a bogus pointer, just
2402 // in case anyone tries to read it.
2403 ionScript->purgeICs(script->zone());
2405 // This frame needs to be invalidated. We do the following:
2407 // 1. Increment the reference counter to keep the ionScript alive
2408 // for the invalidation bailout or for the exception handler.
2409 // 2. Determine safepoint that corresponds to the current call.
2410 // 3. From safepoint, get distance to the OSI-patchable offset.
2411 // 4. From the IonScript, determine the distance between the
2412 // call-patchable offset and the invalidation epilogue.
2413 // 5. Patch the OSI point with a call-relative to the
2414 // invalidation epilogue.
2416 // The code generator ensures that there's enough space for us
2417 // to patch in a call-relative operation at each invalidation
2418 // point.
2420 // Note: you can't simplify this mechanism to "just patch the
2421 // instruction immediately after the call" because things may
2422 // need to move into a well-defined register state (using move
2423 // instructions after the call) in to capture an appropriate
2424 // snapshot after the call occurs.
2426 ionScript->incrementInvalidationCount();
2428 JitCode* ionCode = ionScript->method();
2430 // We're about to remove edges from the JSScript to GC things embedded in
2431 // the JitCode. Perform a barrier to let the GC know about those edges.
2432 PreWriteBarrier(script->zone(), ionCode, [](JSTracer* trc, JitCode* code) {
2433 code->traceChildren(trc);
2436 ionCode->setInvalidated();
2438 // Don't adjust OSI points in a bailout path.
2439 if (frame.isBailoutJS()) {
2440 continue;
2443 // Write the delta (from the return address offset to the
2444 // IonScript pointer embedded into the invalidation epilogue)
2445 // where the safepointed call instruction used to be. We rely on
2446 // the call sequence causing the safepoint being >= the size of
2447 // a uint32, which is checked during safepoint index
2448 // construction.
2449 AutoWritableJitCode awjc(ionCode);
2450 const SafepointIndex* si =
2451 ionScript->getSafepointIndex(frame.resumePCinCurrentFrame());
2452 CodeLocationLabel dataLabelToMunge(frame.resumePCinCurrentFrame());
2453 ptrdiff_t delta = ionScript->invalidateEpilogueDataOffset() -
2454 (frame.resumePCinCurrentFrame() - ionCode->raw());
2455 Assembler::PatchWrite_Imm32(dataLabelToMunge, Imm32(delta));
2457 CodeLocationLabel osiPatchPoint =
2458 SafepointReader::InvalidationPatchPoint(ionScript, si);
2459 CodeLocationLabel invalidateEpilogue(
2460 ionCode, CodeOffset(ionScript->invalidateEpilogueOffset()));
2462 JitSpew(
2463 JitSpew_IonInvalidate,
2464 " ! Invalidate ionScript %p (inv count %zu) -> patching osipoint %p",
2465 ionScript, ionScript->invalidationCount(), (void*)osiPatchPoint.raw());
2466 Assembler::PatchWrite_NearCall(osiPatchPoint, invalidateEpilogue);
2469 JitSpew(JitSpew_IonInvalidate, "END invalidating activation");
2472 void jit::InvalidateAll(JS::GCContext* gcx, Zone* zone) {
2473 // The caller should previously have cancelled off thread compilation.
2474 MOZ_ASSERT(!HasOffThreadIonCompile(zone));
2475 if (zone->isAtomsZone()) {
2476 return;
2478 JSContext* cx = TlsContext.get();
2479 for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
2480 if (iter->compartment()->zone() == zone) {
2481 JitSpew(JitSpew_IonInvalidate, "Invalidating all frames for GC");
2482 InvalidateActivation(gcx, iter, true);
2487 static void ClearIonScriptAfterInvalidation(JSContext* cx, JSScript* script,
2488 IonScript* ionScript,
2489 bool resetUses) {
2490 // Null out the JitScript's IonScript pointer. The caller is responsible for
2491 // destroying the IonScript using the invalidation count mechanism.
2492 DebugOnly<IonScript*> clearedIonScript =
2493 script->jitScript()->clearIonScript(cx->gcContext(), script);
2494 MOZ_ASSERT(clearedIonScript == ionScript);
2496 // Wait for the scripts to get warm again before doing another
2497 // compile, unless we are recompiling *because* a script got hot
2498 // (resetUses is false).
2499 if (resetUses) {
2500 script->resetWarmUpCounterToDelayIonCompilation();
2504 void jit::Invalidate(JSContext* cx, const RecompileInfoVector& invalid,
2505 bool resetUses, bool cancelOffThread) {
2506 JitSpew(JitSpew_IonInvalidate, "Start invalidation.");
2508 // Add an invalidation reference to all invalidated IonScripts to indicate
2509 // to the traversal which frames have been invalidated.
2510 size_t numInvalidations = 0;
2511 for (const RecompileInfo& info : invalid) {
2512 if (cancelOffThread) {
2513 CancelOffThreadIonCompile(info.script());
2516 IonScript* ionScript = info.maybeIonScriptToInvalidate();
2517 if (!ionScript) {
2518 continue;
2521 JitSpew(JitSpew_IonInvalidate, " Invalidate %s:%u:%u, IonScript %p",
2522 info.script()->filename(), info.script()->lineno(),
2523 info.script()->column().oneOriginValue(), ionScript);
2525 // Keep the ion script alive during the invalidation and flag this
2526 // ionScript as being invalidated. This increment is removed by the
2527 // loop after the calls to InvalidateActivation.
2528 ionScript->incrementInvalidationCount();
2529 numInvalidations++;
2532 if (!numInvalidations) {
2533 JitSpew(JitSpew_IonInvalidate, " No IonScript invalidation.");
2534 return;
2537 JS::GCContext* gcx = cx->gcContext();
2538 for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
2539 InvalidateActivation(gcx, iter, false);
2542 // Drop the references added above. If a script was never active, its
2543 // IonScript will be immediately destroyed. Otherwise, it will be held live
2544 // until its last invalidated frame is destroyed.
2545 for (const RecompileInfo& info : invalid) {
2546 IonScript* ionScript = info.maybeIonScriptToInvalidate();
2547 if (!ionScript) {
2548 continue;
2551 if (ionScript->invalidationCount() == 1) {
2552 // decrementInvalidationCount will destroy the IonScript so null out
2553 // jitScript->ionScript_ now. We don't want to do this unconditionally
2554 // because maybeIonScriptToInvalidate depends on script->ionScript() (we
2555 // would leak the IonScript if |invalid| contains duplicates).
2556 ClearIonScriptAfterInvalidation(cx, info.script(), ionScript, resetUses);
2559 ionScript->decrementInvalidationCount(gcx);
2560 numInvalidations--;
2563 // Make sure we didn't leak references by invalidating the same IonScript
2564 // multiple times in the above loop.
2565 MOZ_ASSERT(!numInvalidations);
2567 // Finally, null out jitScript->ionScript_ for IonScripts that are still on
2568 // the stack.
2569 for (const RecompileInfo& info : invalid) {
2570 if (IonScript* ionScript = info.maybeIonScriptToInvalidate()) {
2571 ClearIonScriptAfterInvalidation(cx, info.script(), ionScript, resetUses);
2576 void jit::IonScript::invalidate(JSContext* cx, JSScript* script, bool resetUses,
2577 const char* reason) {
2578 // Note: we could short circuit here if we already invalidated this
2579 // IonScript, but jit::Invalidate also cancels off-thread compilations of
2580 // |script|.
2581 MOZ_RELEASE_ASSERT(invalidated() || script->ionScript() == this);
2583 JitSpew(JitSpew_IonInvalidate, " Invalidate IonScript %p: %s", this, reason);
2585 // RecompileInfoVector has inline space for at least one element.
2586 RecompileInfoVector list;
2587 MOZ_RELEASE_ASSERT(list.reserve(1));
2588 list.infallibleEmplaceBack(script, compilationId());
2590 Invalidate(cx, list, resetUses, true);
2593 void jit::Invalidate(JSContext* cx, JSScript* script, bool resetUses,
2594 bool cancelOffThread) {
2595 MOZ_ASSERT(script->hasIonScript());
2597 if (cx->runtime()->geckoProfiler().enabled()) {
2598 // Register invalidation with profiler.
2599 // Format of event payload string:
2600 // "<filename>:<lineno>"
2602 // Get the script filename, if any, and its length.
2603 const char* filename = script->filename();
2604 if (filename == nullptr) {
2605 filename = "<unknown>";
2608 // Construct the descriptive string.
2609 UniqueChars buf = JS_smprintf("%s:%u:%u", filename, script->lineno(),
2610 script->column().oneOriginValue());
2612 // Ignore the event on allocation failure.
2613 if (buf) {
2614 cx->runtime()->geckoProfiler().markEvent("Invalidate", buf.get());
2618 // RecompileInfoVector has inline space for at least one element.
2619 RecompileInfoVector scripts;
2620 MOZ_ASSERT(script->hasIonScript());
2621 MOZ_RELEASE_ASSERT(scripts.reserve(1));
2622 scripts.infallibleEmplaceBack(script, script->ionScript()->compilationId());
2624 Invalidate(cx, scripts, resetUses, cancelOffThread);
2627 void jit::FinishInvalidation(JS::GCContext* gcx, JSScript* script) {
2628 if (!script->hasIonScript()) {
2629 return;
2632 // In all cases, null out jitScript->ionScript_ to avoid re-entry.
2633 IonScript* ion = script->jitScript()->clearIonScript(gcx, script);
2635 // If this script has Ion code on the stack, invalidated() will return
2636 // true. In this case we have to wait until destroying it.
2637 if (!ion->invalidated()) {
2638 jit::IonScript::Destroy(gcx, ion);
2642 void jit::ForbidCompilation(JSContext* cx, JSScript* script) {
2643 JitSpew(JitSpew_IonAbort, "Disabling Ion compilation of script %s:%u:%u",
2644 script->filename(), script->lineno(),
2645 script->column().oneOriginValue());
2647 CancelOffThreadIonCompile(script);
2649 if (script->hasIonScript()) {
2650 Invalidate(cx, script, false);
2653 script->disableIon();
2656 size_t jit::SizeOfIonData(JSScript* script,
2657 mozilla::MallocSizeOf mallocSizeOf) {
2658 size_t result = 0;
2660 if (script->hasIonScript()) {
2661 result += script->ionScript()->sizeOfIncludingThis(mallocSizeOf);
2664 return result;
2667 // If you change these, please also change the comment in TempAllocator.
2668 /* static */ const size_t TempAllocator::BallastSize = 16 * 1024;
2669 /* static */ const size_t TempAllocator::PreferredLifoChunkSize = 32 * 1024;