Bug 1890513: Directly invoke variadic native functions. r=jandem
[gecko.git] / js / src / jit / JitZone.h
blobd4f2350b8da84d74e88600efc09e4b512d8a1ce7
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef jit_JitZone_h
8 #define jit_JitZone_h
10 #include "mozilla/Assertions.h"
11 #include "mozilla/EnumeratedArray.h"
12 #include "mozilla/HashFunctions.h"
13 #include "mozilla/LinkedList.h"
14 #include "mozilla/Maybe.h"
15 #include "mozilla/MemoryReporting.h"
17 #include <stddef.h>
18 #include <stdint.h>
19 #include <utility>
21 #include "gc/Barrier.h"
22 #include "gc/Marking.h"
23 #include "jit/ExecutableAllocator.h"
24 #include "jit/ICStubSpace.h"
25 #include "jit/Invalidation.h"
26 #include "jit/JitScript.h"
27 #include "js/AllocPolicy.h"
28 #include "js/GCHashTable.h"
29 #include "js/HashTable.h"
30 #include "js/TracingAPI.h"
31 #include "js/TypeDecls.h"
32 #include "js/UniquePtr.h"
33 #include "js/Utility.h"
34 #include "threading/ProtectedData.h"
36 namespace JS {
37 struct CodeSizes;
40 namespace js {
41 namespace jit {
43 enum class CacheKind : uint8_t;
44 class CacheIRStubInfo;
45 class JitCode;
46 class JitScript;
48 enum class ICStubEngine : uint8_t {
49 // Baseline IC, see BaselineIC.h.
50 Baseline = 0,
52 // Ion IC, see IonIC.h.
53 IonIC
56 struct CacheIRStubKey : public DefaultHasher<CacheIRStubKey> {
57 struct Lookup {
58 CacheKind kind;
59 ICStubEngine engine;
60 const uint8_t* code;
61 uint32_t length;
63 Lookup(CacheKind kind, ICStubEngine engine, const uint8_t* code,
64 uint32_t length)
65 : kind(kind), engine(engine), code(code), length(length) {}
68 static HashNumber hash(const Lookup& l);
69 static bool match(const CacheIRStubKey& entry, const Lookup& l);
71 UniquePtr<CacheIRStubInfo, JS::FreePolicy> stubInfo;
73 explicit CacheIRStubKey(CacheIRStubInfo* info) : stubInfo(info) {}
74 CacheIRStubKey(CacheIRStubKey&& other)
75 : stubInfo(std::move(other.stubInfo)) {}
77 void operator=(CacheIRStubKey&& other) {
78 stubInfo = std::move(other.stubInfo);
82 struct BaselineCacheIRStubCodeMapGCPolicy {
83 static bool traceWeak(JSTracer* trc, CacheIRStubKey*,
84 WeakHeapPtr<JitCode*>* value) {
85 return TraceWeakEdge(trc, value, "traceWeak");
89 enum JitScriptFilter : bool { SkipDyingScripts, IncludeDyingScripts };
91 class JitZone {
92 // Allocated space for CacheIR stubs.
93 ICStubSpace stubSpace_;
95 // Set of CacheIRStubInfo instances used by Ion stubs in this Zone.
96 using IonCacheIRStubInfoSet =
97 HashSet<CacheIRStubKey, CacheIRStubKey, SystemAllocPolicy>;
98 IonCacheIRStubInfoSet ionCacheIRStubInfoSet_;
100 // Map CacheIRStubKey to shared JitCode objects.
101 using BaselineCacheIRStubCodeMap =
102 GCHashMap<CacheIRStubKey, WeakHeapPtr<JitCode*>, CacheIRStubKey,
103 SystemAllocPolicy, BaselineCacheIRStubCodeMapGCPolicy>;
104 BaselineCacheIRStubCodeMap baselineCacheIRStubCodes_;
106 // Executable allocator for all code except wasm code.
107 MainThreadData<ExecutableAllocator> execAlloc_;
109 // HashMap that maps scripts to compilations inlining those scripts.
110 using InlinedScriptMap =
111 GCHashMap<WeakHeapPtr<BaseScript*>, RecompileInfoVector,
112 StableCellHasher<WeakHeapPtr<BaseScript*>>, SystemAllocPolicy>;
113 InlinedScriptMap inlinedCompilations_;
115 mozilla::LinkedList<JitScript> jitScripts_;
117 // The following two fields are a pair of associated scripts. If they are
118 // non-null, the child has been inlined into the parent, and we have bailed
119 // out due to a MonomorphicInlinedStubFolding bailout. If it wasn't
120 // trial-inlined, we need to track for the parent if we attach a new case to
121 // the corresponding folded stub which belongs to the child.
122 WeakHeapPtr<JSScript*> lastStubFoldingBailoutChild_;
123 WeakHeapPtr<JSScript*> lastStubFoldingBailoutParent_;
125 // The JitZone stores stubs to concatenate strings inline and perform RegExp
126 // calls inline. These bake in zone specific pointers and can't be stored in
127 // JitRuntime. They also are dependent on the value of 'initialStringHeap' and
128 // must be flushed when its value changes.
130 // These are weak pointers, but they can by accessed during off-thread Ion
131 // compilation and therefore can't use the usual read barrier. Instead, we
132 // record which stubs have been read and perform the appropriate barriers in
133 // CodeGenerator::link().
135 enum StubIndex : uint32_t {
136 StringConcat = 0,
137 RegExpMatcher,
138 RegExpSearcher,
139 RegExpExecMatch,
140 RegExpExecTest,
141 Count
144 mozilla::EnumeratedArray<StubIndex, WeakHeapPtr<JitCode*>,
145 size_t(StubIndex::Count)>
146 stubs_;
148 mozilla::Maybe<IonCompilationId> currentCompilationId_;
149 bool keepJitScripts_ = false;
151 gc::Heap initialStringHeap = gc::Heap::Tenured;
153 JitCode* generateStringConcatStub(JSContext* cx);
154 JitCode* generateRegExpMatcherStub(JSContext* cx);
155 JitCode* generateRegExpSearcherStub(JSContext* cx);
156 JitCode* generateRegExpExecMatchStub(JSContext* cx);
157 JitCode* generateRegExpExecTestStub(JSContext* cx);
159 JitCode* getStubNoBarrier(StubIndex stub,
160 uint32_t* requiredBarriersOut) const {
161 MOZ_ASSERT(CurrentThreadIsIonCompiling());
162 *requiredBarriersOut |= 1 << uint32_t(stub);
163 return stubs_[stub].unbarrieredGet();
166 public:
167 explicit JitZone(bool zoneHasNurseryStrings) {
168 setStringsCanBeInNursery(zoneHasNurseryStrings);
170 ~JitZone() {
171 MOZ_ASSERT(jitScripts_.isEmpty());
172 MOZ_ASSERT(!keepJitScripts_);
175 void traceWeak(JSTracer* trc, Zone* zone);
177 void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
178 JS::CodeSizes* code, size_t* jitZone,
179 size_t* cacheIRStubs) const;
181 ICStubSpace* stubSpace() { return &stubSpace_; }
183 JitCode* getBaselineCacheIRStubCode(const CacheIRStubKey::Lookup& key,
184 CacheIRStubInfo** stubInfo) {
185 auto p = baselineCacheIRStubCodes_.lookup(key);
186 if (p) {
187 *stubInfo = p->key().stubInfo.get();
188 return p->value();
190 *stubInfo = nullptr;
191 return nullptr;
193 [[nodiscard]] bool putBaselineCacheIRStubCode(
194 const CacheIRStubKey::Lookup& lookup, CacheIRStubKey& key,
195 JitCode* stubCode) {
196 auto p = baselineCacheIRStubCodes_.lookupForAdd(lookup);
197 MOZ_ASSERT(!p);
198 return baselineCacheIRStubCodes_.add(p, std::move(key), stubCode);
201 CacheIRStubInfo* getIonCacheIRStubInfo(const CacheIRStubKey::Lookup& key) {
202 IonCacheIRStubInfoSet::Ptr p = ionCacheIRStubInfoSet_.lookup(key);
203 return p ? p->stubInfo.get() : nullptr;
205 [[nodiscard]] bool putIonCacheIRStubInfo(const CacheIRStubKey::Lookup& lookup,
206 CacheIRStubKey& key) {
207 IonCacheIRStubInfoSet::AddPtr p =
208 ionCacheIRStubInfoSet_.lookupForAdd(lookup);
209 MOZ_ASSERT(!p);
210 return ionCacheIRStubInfoSet_.add(p, std::move(key));
212 void purgeIonCacheIRStubInfo() { ionCacheIRStubInfoSet_.clearAndCompact(); }
214 ExecutableAllocator& execAlloc() { return execAlloc_.ref(); }
215 const ExecutableAllocator& execAlloc() const { return execAlloc_.ref(); }
217 [[nodiscard]] bool addInlinedCompilation(const RecompileInfo& info,
218 JSScript* inlined);
220 RecompileInfoVector* maybeInlinedCompilations(JSScript* inlined) {
221 auto p = inlinedCompilations_.lookup(inlined);
222 return p ? &p->value() : nullptr;
225 void removeInlinedCompilations(JSScript* inlined) {
226 inlinedCompilations_.remove(inlined);
229 void noteStubFoldingBailout(JSScript* child, JSScript* parent) {
230 lastStubFoldingBailoutChild_ = child;
231 lastStubFoldingBailoutParent_ = parent;
233 bool hasStubFoldingBailoutData(JSScript* child) const {
234 return lastStubFoldingBailoutChild_ &&
235 lastStubFoldingBailoutChild_.get() == child &&
236 lastStubFoldingBailoutParent_;
238 JSScript* stubFoldingBailoutParent() const {
239 MOZ_ASSERT(lastStubFoldingBailoutChild_);
240 return lastStubFoldingBailoutParent_.get();
242 void clearStubFoldingBailoutData() {
243 lastStubFoldingBailoutChild_ = nullptr;
244 lastStubFoldingBailoutParent_ = nullptr;
247 void registerJitScript(JitScript* script) { jitScripts_.insertBack(script); }
249 // Iterate over all JitScripts in this zone calling |f| on each, allowing |f|
250 // to remove the script. The template parameter |filter| controls whether to
251 // include dying JitScripts during GC sweeping. Be careful when using this not
252 // to let GC things reachable from the JitScript escape - they may be gray.
253 template <JitScriptFilter filter = SkipDyingScripts, typename F>
254 void forEachJitScript(F&& f) {
255 JitScript* script = jitScripts_.getFirst();
256 while (script) {
257 JitScript* next = script->getNext();
258 if (filter == IncludeDyingScripts ||
259 !gc::IsAboutToBeFinalizedUnbarriered(script->owningScript())) {
260 f(script);
262 script = next;
266 // Like forEachJitScript above, but abort if |f| returns false.
267 template <JitScriptFilter filter = SkipDyingScripts, typename F>
268 bool forEachJitScriptFallible(F&& f) {
269 JitScript* script = jitScripts_.getFirst();
270 while (script) {
271 JitScript* next = script->getNext();
272 if (filter == IncludeDyingScripts ||
273 !gc::IsAboutToBeFinalizedUnbarriered(script->owningScript())) {
274 if (!f(script)) {
275 return false;
278 script = next;
280 return true;
283 bool keepJitScripts() const { return keepJitScripts_; }
284 void setKeepJitScripts(bool keep) { keepJitScripts_ = keep; }
286 mozilla::Maybe<IonCompilationId> currentCompilationId() const {
287 return currentCompilationId_;
289 mozilla::Maybe<IonCompilationId>& currentCompilationIdRef() {
290 return currentCompilationId_;
293 // Initialize code stubs only used by Ion, not Baseline.
294 [[nodiscard]] bool ensureIonStubsExist(JSContext* cx) {
295 if (stubs_[StringConcat]) {
296 return true;
298 stubs_[StringConcat] = generateStringConcatStub(cx);
299 return stubs_[StringConcat];
302 void traceWeak(JSTracer* trc, JS::Realm* realm);
304 void discardStubs() {
305 for (WeakHeapPtr<JitCode*>& stubRef : stubs_) {
306 stubRef = nullptr;
310 bool hasStubs() const {
311 for (const WeakHeapPtr<JitCode*>& stubRef : stubs_) {
312 if (stubRef) {
313 return true;
316 return false;
319 void setStringsCanBeInNursery(bool allow) {
320 MOZ_ASSERT(!hasStubs());
321 initialStringHeap = allow ? gc::Heap::Default : gc::Heap::Tenured;
324 JitCode* stringConcatStubNoBarrier(uint32_t* requiredBarriersOut) const {
325 return getStubNoBarrier(StringConcat, requiredBarriersOut);
328 JitCode* regExpMatcherStubNoBarrier(uint32_t* requiredBarriersOut) const {
329 return getStubNoBarrier(RegExpMatcher, requiredBarriersOut);
332 [[nodiscard]] JitCode* ensureRegExpMatcherStubExists(JSContext* cx) {
333 if (JitCode* code = stubs_[RegExpMatcher]) {
334 return code;
336 stubs_[RegExpMatcher] = generateRegExpMatcherStub(cx);
337 return stubs_[RegExpMatcher];
340 JitCode* regExpSearcherStubNoBarrier(uint32_t* requiredBarriersOut) const {
341 return getStubNoBarrier(RegExpSearcher, requiredBarriersOut);
344 [[nodiscard]] JitCode* ensureRegExpSearcherStubExists(JSContext* cx) {
345 if (JitCode* code = stubs_[RegExpSearcher]) {
346 return code;
348 stubs_[RegExpSearcher] = generateRegExpSearcherStub(cx);
349 return stubs_[RegExpSearcher];
352 JitCode* regExpExecMatchStubNoBarrier(uint32_t* requiredBarriersOut) const {
353 return getStubNoBarrier(RegExpExecMatch, requiredBarriersOut);
356 [[nodiscard]] JitCode* ensureRegExpExecMatchStubExists(JSContext* cx) {
357 if (JitCode* code = stubs_[RegExpExecMatch]) {
358 return code;
360 stubs_[RegExpExecMatch] = generateRegExpExecMatchStub(cx);
361 return stubs_[RegExpExecMatch];
364 JitCode* regExpExecTestStubNoBarrier(uint32_t* requiredBarriersOut) const {
365 return getStubNoBarrier(RegExpExecTest, requiredBarriersOut);
368 [[nodiscard]] JitCode* ensureRegExpExecTestStubExists(JSContext* cx) {
369 if (JitCode* code = stubs_[RegExpExecTest]) {
370 return code;
372 stubs_[RegExpExecTest] = generateRegExpExecTestStub(cx);
373 return stubs_[RegExpExecTest];
376 // Perform the necessary read barriers on stubs described by the bitmasks
377 // passed in. This function can only be called from the main thread.
379 // The stub pointers must still be valid by the time these methods are
380 // called. This is arranged by cancelling off-thread Ion compilation at the
381 // start of GC and at the start of sweeping.
382 void performStubReadBarriers(uint32_t stubsToBarrier) const;
384 static constexpr size_t offsetOfRegExpMatcherStub() {
385 return offsetof(JitZone, stubs_) + RegExpMatcher * sizeof(uintptr_t);
387 static constexpr size_t offsetOfRegExpSearcherStub() {
388 return offsetof(JitZone, stubs_) + RegExpSearcher * sizeof(uintptr_t);
390 static constexpr size_t offsetOfRegExpExecMatchStub() {
391 return offsetof(JitZone, stubs_) + RegExpExecMatch * sizeof(uintptr_t);
393 static constexpr size_t offsetOfRegExpExecTestStub() {
394 return offsetof(JitZone, stubs_) + RegExpExecTest * sizeof(uintptr_t);
398 } // namespace jit
399 } // namespace js
401 #endif /* jit_JitZone_h */