Bug 1695778 - (Core-Wasm-EH-Ion) part 4: Support reftypes in exceptions. r=rhunt
[gecko.git] / js / src / wasm / WasmBuiltins.cpp
blobe1d1f7401876e36ec1369cd2041ed76296a41f5f
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
4 * Copyright 2017 Mozilla Foundation
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
19 #include "wasm/WasmBuiltins.h"
21 #include "mozilla/Atomics.h"
23 #include "fdlibm.h"
24 #include "jslibmath.h"
25 #include "jsmath.h"
27 #include "gc/Allocator.h"
28 #include "jit/AtomicOperations.h"
29 #include "jit/InlinableNatives.h"
30 #include "jit/MacroAssembler.h"
31 #include "jit/Simulator.h"
32 #include "js/experimental/JitInfo.h" // JSJitInfo
33 #include "js/friend/ErrorMessages.h" // js::GetErrorMessage, JSMSG_*
34 #include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit
35 #include "threading/Mutex.h"
36 #include "util/Memory.h"
37 #include "util/Poison.h"
38 #include "vm/BigIntType.h"
39 #include "vm/ErrorObject.h"
40 #include "wasm/TypedObject.h"
41 #include "wasm/WasmCodegenTypes.h"
42 #include "wasm/WasmDebugFrame.h"
43 #include "wasm/WasmInstance.h"
44 #include "wasm/WasmStubs.h"
46 #include "debugger/DebugAPI-inl.h"
47 #include "vm/ErrorObject-inl.h"
48 #include "vm/Stack-inl.h"
50 using namespace js;
51 using namespace jit;
52 using namespace wasm;
54 using mozilla::HashGeneric;
55 using mozilla::IsNaN;
56 using mozilla::MakeEnumeratedRange;
58 static const unsigned BUILTIN_THUNK_LIFO_SIZE = 64 * 1024;
60 // ============================================================================
61 // WebAssembly builtin C++ functions called from wasm code to implement internal
62 // wasm operations: type descriptions.
64 // Some abbreviations, for the sake of conciseness.
65 #define _F64 MIRType::Double
66 #define _F32 MIRType::Float32
67 #define _I32 MIRType::Int32
68 #define _I64 MIRType::Int64
69 #define _PTR MIRType::Pointer
70 #define _RoN MIRType::RefOrNull
71 #define _VOID MIRType::None
72 #define _END MIRType::None
73 #define _Infallible FailureMode::Infallible
74 #define _FailOnNegI32 FailureMode::FailOnNegI32
75 #define _FailOnNullPtr FailureMode::FailOnNullPtr
76 #define _FailOnInvalidRef FailureMode::FailOnInvalidRef
78 namespace js {
79 namespace wasm {
81 const SymbolicAddressSignature SASigSinD = {
82 SymbolicAddress::SinD, _F64, _Infallible, 1, {_F64, _END}};
83 const SymbolicAddressSignature SASigCosD = {
84 SymbolicAddress::CosD, _F64, _Infallible, 1, {_F64, _END}};
85 const SymbolicAddressSignature SASigTanD = {
86 SymbolicAddress::TanD, _F64, _Infallible, 1, {_F64, _END}};
87 const SymbolicAddressSignature SASigASinD = {
88 SymbolicAddress::ASinD, _F64, _Infallible, 1, {_F64, _END}};
89 const SymbolicAddressSignature SASigACosD = {
90 SymbolicAddress::ACosD, _F64, _Infallible, 1, {_F64, _END}};
91 const SymbolicAddressSignature SASigATanD = {
92 SymbolicAddress::ATanD, _F64, _Infallible, 1, {_F64, _END}};
93 const SymbolicAddressSignature SASigCeilD = {
94 SymbolicAddress::CeilD, _F64, _Infallible, 1, {_F64, _END}};
95 const SymbolicAddressSignature SASigCeilF = {
96 SymbolicAddress::CeilF, _F32, _Infallible, 1, {_F32, _END}};
97 const SymbolicAddressSignature SASigFloorD = {
98 SymbolicAddress::FloorD, _F64, _Infallible, 1, {_F64, _END}};
99 const SymbolicAddressSignature SASigFloorF = {
100 SymbolicAddress::FloorF, _F32, _Infallible, 1, {_F32, _END}};
101 const SymbolicAddressSignature SASigTruncD = {
102 SymbolicAddress::TruncD, _F64, _Infallible, 1, {_F64, _END}};
103 const SymbolicAddressSignature SASigTruncF = {
104 SymbolicAddress::TruncF, _F32, _Infallible, 1, {_F32, _END}};
105 const SymbolicAddressSignature SASigNearbyIntD = {
106 SymbolicAddress::NearbyIntD, _F64, _Infallible, 1, {_F64, _END}};
107 const SymbolicAddressSignature SASigNearbyIntF = {
108 SymbolicAddress::NearbyIntF, _F32, _Infallible, 1, {_F32, _END}};
109 const SymbolicAddressSignature SASigExpD = {
110 SymbolicAddress::ExpD, _F64, _Infallible, 1, {_F64, _END}};
111 const SymbolicAddressSignature SASigLogD = {
112 SymbolicAddress::LogD, _F64, _Infallible, 1, {_F64, _END}};
113 const SymbolicAddressSignature SASigPowD = {
114 SymbolicAddress::PowD, _F64, _Infallible, 2, {_F64, _F64, _END}};
115 const SymbolicAddressSignature SASigATan2D = {
116 SymbolicAddress::ATan2D, _F64, _Infallible, 2, {_F64, _F64, _END}};
117 const SymbolicAddressSignature SASigMemoryGrowM32 = {
118 SymbolicAddress::MemoryGrowM32, _I32, _Infallible, 2, {_PTR, _I32, _END}};
119 const SymbolicAddressSignature SASigMemoryGrowM64 = {
120 SymbolicAddress::MemoryGrowM64, _I64, _Infallible, 2, {_PTR, _I64, _END}};
121 const SymbolicAddressSignature SASigMemorySizeM32 = {
122 SymbolicAddress::MemorySizeM32, _I32, _Infallible, 1, {_PTR, _END}};
123 const SymbolicAddressSignature SASigMemorySizeM64 = {
124 SymbolicAddress::MemorySizeM64, _I64, _Infallible, 1, {_PTR, _END}};
125 const SymbolicAddressSignature SASigWaitI32M32 = {
126 SymbolicAddress::WaitI32M32,
127 _I32,
128 _FailOnNegI32,
130 {_PTR, _I32, _I32, _I64, _END}};
131 const SymbolicAddressSignature SASigWaitI32M64 = {
132 SymbolicAddress::WaitI32M64,
133 _I32,
134 _FailOnNegI32,
136 {_PTR, _I64, _I32, _I64, _END}};
137 const SymbolicAddressSignature SASigWaitI64M32 = {
138 SymbolicAddress::WaitI64M32,
139 _I32,
140 _FailOnNegI32,
142 {_PTR, _I32, _I64, _I64, _END}};
143 const SymbolicAddressSignature SASigWaitI64M64 = {
144 SymbolicAddress::WaitI64M64,
145 _I32,
146 _FailOnNegI32,
148 {_PTR, _I64, _I64, _I64, _END}};
149 const SymbolicAddressSignature SASigWakeM32 = {
150 SymbolicAddress::WakeM32, _I32, _FailOnNegI32, 3, {_PTR, _I32, _I32, _END}};
151 const SymbolicAddressSignature SASigWakeM64 = {
152 SymbolicAddress::WakeM64, _I32, _FailOnNegI32, 3, {_PTR, _I64, _I32, _END}};
153 const SymbolicAddressSignature SASigMemCopyM32 = {
154 SymbolicAddress::MemCopyM32,
155 _VOID,
156 _FailOnNegI32,
158 {_PTR, _I32, _I32, _I32, _PTR, _END}};
159 const SymbolicAddressSignature SASigMemCopySharedM32 = {
160 SymbolicAddress::MemCopySharedM32,
161 _VOID,
162 _FailOnNegI32,
164 {_PTR, _I32, _I32, _I32, _PTR, _END}};
165 const SymbolicAddressSignature SASigMemCopyM64 = {
166 SymbolicAddress::MemCopyM64,
167 _VOID,
168 _FailOnNegI32,
170 {_PTR, _I64, _I64, _I64, _PTR, _END}};
171 const SymbolicAddressSignature SASigMemCopySharedM64 = {
172 SymbolicAddress::MemCopySharedM64,
173 _VOID,
174 _FailOnNegI32,
176 {_PTR, _I64, _I64, _I64, _PTR, _END}};
177 const SymbolicAddressSignature SASigDataDrop = {
178 SymbolicAddress::DataDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
179 const SymbolicAddressSignature SASigMemFillM32 = {
180 SymbolicAddress::MemFillM32,
181 _VOID,
182 _FailOnNegI32,
184 {_PTR, _I32, _I32, _I32, _PTR, _END}};
185 const SymbolicAddressSignature SASigMemFillSharedM32 = {
186 SymbolicAddress::MemFillSharedM32,
187 _VOID,
188 _FailOnNegI32,
190 {_PTR, _I32, _I32, _I32, _PTR, _END}};
191 const SymbolicAddressSignature SASigMemFillM64 = {
192 SymbolicAddress::MemFillM64,
193 _VOID,
194 _FailOnNegI32,
196 {_PTR, _I64, _I32, _I64, _PTR, _END}};
197 const SymbolicAddressSignature SASigMemFillSharedM64 = {
198 SymbolicAddress::MemFillSharedM64,
199 _VOID,
200 _FailOnNegI32,
202 {_PTR, _I64, _I32, _I64, _PTR, _END}};
203 const SymbolicAddressSignature SASigMemInitM32 = {
204 SymbolicAddress::MemInitM32,
205 _VOID,
206 _FailOnNegI32,
208 {_PTR, _I32, _I32, _I32, _I32, _END}};
209 const SymbolicAddressSignature SASigMemInitM64 = {
210 SymbolicAddress::MemInitM64,
211 _VOID,
212 _FailOnNegI32,
214 {_PTR, _I64, _I32, _I32, _I32, _END}};
215 const SymbolicAddressSignature SASigTableCopy = {
216 SymbolicAddress::TableCopy,
217 _VOID,
218 _FailOnNegI32,
220 {_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
221 const SymbolicAddressSignature SASigElemDrop = {
222 SymbolicAddress::ElemDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
223 const SymbolicAddressSignature SASigTableFill = {
224 SymbolicAddress::TableFill,
225 _VOID,
226 _FailOnNegI32,
228 {_PTR, _I32, _RoN, _I32, _I32, _END}};
229 const SymbolicAddressSignature SASigTableGet = {SymbolicAddress::TableGet,
230 _RoN,
231 _FailOnInvalidRef,
233 {_PTR, _I32, _I32, _END}};
234 const SymbolicAddressSignature SASigTableGrow = {
235 SymbolicAddress::TableGrow,
236 _I32,
237 _Infallible,
239 {_PTR, _RoN, _I32, _I32, _END}};
240 const SymbolicAddressSignature SASigTableInit = {
241 SymbolicAddress::TableInit,
242 _VOID,
243 _FailOnNegI32,
245 {_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
246 const SymbolicAddressSignature SASigTableSet = {SymbolicAddress::TableSet,
247 _VOID,
248 _FailOnNegI32,
250 {_PTR, _I32, _RoN, _I32, _END}};
251 const SymbolicAddressSignature SASigTableSize = {
252 SymbolicAddress::TableSize, _I32, _Infallible, 2, {_PTR, _I32, _END}};
253 const SymbolicAddressSignature SASigRefFunc = {
254 SymbolicAddress::RefFunc, _RoN, _FailOnInvalidRef, 2, {_PTR, _I32, _END}};
255 const SymbolicAddressSignature SASigPreBarrierFiltering = {
256 SymbolicAddress::PreBarrierFiltering,
257 _VOID,
258 _Infallible,
260 {_PTR, _PTR, _END}};
261 const SymbolicAddressSignature SASigPostBarrier = {
262 SymbolicAddress::PostBarrier, _VOID, _Infallible, 2, {_PTR, _PTR, _END}};
263 const SymbolicAddressSignature SASigPostBarrierFiltering = {
264 SymbolicAddress::PostBarrierFiltering,
265 _VOID,
266 _Infallible,
268 {_PTR, _PTR, _END}};
269 const SymbolicAddressSignature SASigStructNew = {
270 SymbolicAddress::StructNew, _RoN, _FailOnNullPtr, 2, {_PTR, _RoN, _END}};
271 #ifdef ENABLE_WASM_EXCEPTIONS
272 const SymbolicAddressSignature SASigExceptionNew = {
273 SymbolicAddress::ExceptionNew,
274 _RoN,
275 _FailOnNullPtr,
277 {_PTR, _I32, _I32, _END}};
278 const SymbolicAddressSignature SASigThrowException = {
279 SymbolicAddress::ThrowException,
280 _VOID,
281 _FailOnNegI32,
283 {_PTR, _RoN, _END}};
284 const SymbolicAddressSignature SASigConsumePendingException = {
285 SymbolicAddress::ConsumePendingException,
286 _I32,
287 _Infallible,
289 {_PTR, _END}};
290 const SymbolicAddressSignature SASigPushRefIntoExn = {
291 SymbolicAddress::PushRefIntoExn,
292 _VOID,
293 _FailOnNegI32,
295 {_PTR, _RoN, _RoN, _END}};
296 #endif
297 const SymbolicAddressSignature SASigArrayNew = {SymbolicAddress::ArrayNew,
298 _RoN,
299 _FailOnNullPtr,
301 {_PTR, _I32, _RoN, _END}};
302 const SymbolicAddressSignature SASigRefTest = {
303 SymbolicAddress::RefTest, _I32, _Infallible, 3, {_PTR, _RoN, _RoN, _END}};
304 const SymbolicAddressSignature SASigRttSub = {
305 SymbolicAddress::RttSub, _RoN, _FailOnNullPtr, 3, {_PTR, _RoN, _RoN, _END}};
306 #define DECL_SAS_FOR_INTRINSIC(op, export, sa_name, abitype, entry, idx) \
307 const SymbolicAddressSignature SASig##sa_name = { \
308 SymbolicAddress::sa_name, _VOID, _FailOnNegI32, \
309 DECLARE_INTRINSIC_PARAM_TYPES_##op};
311 FOR_EACH_INTRINSIC(DECL_SAS_FOR_INTRINSIC)
312 #undef DECL_SAS_FOR_INTRINSIC
314 } // namespace wasm
315 } // namespace js
317 #undef _F64
318 #undef _F32
319 #undef _I32
320 #undef _I64
321 #undef _PTR
322 #undef _RoN
323 #undef _VOID
324 #undef _END
325 #undef _Infallible
326 #undef _FailOnNegI32
327 #undef _FailOnNullPtr
329 #ifdef DEBUG
330 ABIArgType ToABIType(FailureMode mode) {
331 switch (mode) {
332 case FailureMode::FailOnNegI32:
333 return ArgType_Int32;
334 case FailureMode::FailOnNullPtr:
335 case FailureMode::FailOnInvalidRef:
336 return ArgType_General;
337 default:
338 MOZ_CRASH("unexpected failure mode");
342 ABIArgType ToABIType(MIRType type) {
343 switch (type) {
344 case MIRType::None:
345 case MIRType::Int32:
346 return ArgType_Int32;
347 case MIRType::Int64:
348 return ArgType_Int64;
349 case MIRType::Pointer:
350 case MIRType::RefOrNull:
351 return ArgType_General;
352 case MIRType::Float32:
353 return ArgType_Float32;
354 case MIRType::Double:
355 return ArgType_Float64;
356 default:
357 MOZ_CRASH("unexpected type");
361 ABIFunctionType ToABIType(const SymbolicAddressSignature& sig) {
362 MOZ_ASSERT_IF(sig.failureMode != FailureMode::Infallible,
363 ToABIType(sig.failureMode) == ToABIType(sig.retType));
364 int abiType = ToABIType(sig.retType) << RetType_Shift;
365 for (int i = 0; i < sig.numArgs; i++) {
366 abiType |= (ToABIType(sig.argTypes[i]) << (ArgType_Shift * (i + 1)));
368 return ABIFunctionType(abiType);
370 #endif
372 // ============================================================================
373 // WebAssembly builtin C++ functions called from wasm code to implement internal
374 // wasm operations: implementations.
376 #if defined(JS_CODEGEN_ARM)
377 extern "C" {
379 extern MOZ_EXPORT int64_t __aeabi_idivmod(int, int);
381 extern MOZ_EXPORT int64_t __aeabi_uidivmod(int, int);
383 #endif
385 // This utility function can only be called for builtins that are called
386 // directly from wasm code.
387 static JitActivation* CallingActivation(JSContext* cx) {
388 Activation* act = cx->activation();
389 MOZ_ASSERT(act->asJit()->hasWasmExitFP());
390 return act->asJit();
393 static bool WasmHandleDebugTrap() {
394 JSContext* cx = TlsContext.get(); // Cold code
395 JitActivation* activation = CallingActivation(cx);
396 Frame* fp = activation->wasmExitFP();
397 Instance* instance = GetNearestEffectiveTls(fp)->instance;
398 const Code& code = instance->code();
399 MOZ_ASSERT(code.metadata().debugEnabled);
401 // The debug trap stub is the innermost frame. It's return address is the
402 // actual trap site.
403 const CallSite* site = code.lookupCallSite(fp->returnAddress());
404 MOZ_ASSERT(site);
406 // Advance to the actual trapping frame.
407 fp = fp->wasmCaller();
408 DebugFrame* debugFrame = DebugFrame::from(fp);
410 if (site->kind() == CallSite::EnterFrame) {
411 if (!instance->debug().enterFrameTrapsEnabled()) {
412 return true;
414 debugFrame->setIsDebuggee();
415 debugFrame->observe(cx);
416 if (!DebugAPI::onEnterFrame(cx, debugFrame)) {
417 if (cx->isPropagatingForcedReturn()) {
418 cx->clearPropagatingForcedReturn();
419 // Ignoring forced return because changing code execution order is
420 // not yet implemented in the wasm baseline.
421 // TODO properly handle forced return and resume wasm execution.
422 JS_ReportErrorASCII(cx,
423 "Unexpected resumption value from onEnterFrame");
425 return false;
427 return true;
429 if (site->kind() == CallSite::LeaveFrame) {
430 if (!debugFrame->updateReturnJSValue(cx)) {
431 return false;
433 bool ok = DebugAPI::onLeaveFrame(cx, debugFrame, nullptr, true);
434 debugFrame->leave(cx);
435 return ok;
438 DebugState& debug = instance->debug();
439 MOZ_ASSERT(debug.hasBreakpointTrapAtOffset(site->lineOrBytecode()));
440 if (debug.stepModeEnabled(debugFrame->funcIndex())) {
441 if (!DebugAPI::onSingleStep(cx)) {
442 if (cx->isPropagatingForcedReturn()) {
443 cx->clearPropagatingForcedReturn();
444 // TODO properly handle forced return.
445 JS_ReportErrorASCII(cx,
446 "Unexpected resumption value from onSingleStep");
448 return false;
451 if (debug.hasBreakpointSite(site->lineOrBytecode())) {
452 if (!DebugAPI::onTrap(cx)) {
453 if (cx->isPropagatingForcedReturn()) {
454 cx->clearPropagatingForcedReturn();
455 // TODO properly handle forced return.
456 JS_ReportErrorASCII(
457 cx, "Unexpected resumption value from breakpoint handler");
459 return false;
462 return true;
465 // Check if the pending exception, if any, is catchable by wasm.
466 #ifdef ENABLE_WASM_EXCEPTIONS
467 static bool HasCatchableException(JitActivation* activation, JSContext* cx,
468 MutableHandleValue exn) {
469 if (!cx->isExceptionPending()) {
470 return false;
473 // Traps are generally not catchable as wasm exceptions. The only case in
474 // which they are catchable is for Trap::ThrowReported, which the wasm
475 // compiler uses to throw exceptions and is the source of exceptions from C++.
476 if (activation->isWasmTrapping() &&
477 activation->wasmTrapData().trap != Trap::ThrowReported) {
478 return false;
481 if (cx->isThrowingOverRecursed() || cx->isThrowingOutOfMemory()) {
482 return false;
485 // Write the exception out here to exn to avoid having to get the pending
486 // exception and checking for OOM multiple times.
487 if (cx->getPendingException(exn)) {
488 // Check if a JS exception originated from a wasm trap.
489 if (exn.isObject() && exn.toObject().is<ErrorObject>()) {
490 ErrorObject& err = exn.toObject().as<ErrorObject>();
491 if (err.fromWasmTrap()) {
492 return false;
495 return true;
498 MOZ_ASSERT(cx->isThrowingOutOfMemory());
499 return false;
501 #endif
503 // Unwind the entire activation in response to a thrown exception. This function
504 // is responsible for notifying the debugger of each unwound frame. The return
505 // value is the new stack address which the calling stub will set to the sp
506 // register before executing a return instruction.
508 // This function will also look for try-catch handlers and, if not trapping or
509 // throwing an uncatchable exception, will write the handler info in the return
510 // argument and return true.
512 // Returns false if a handler isn't found or shouldn't be used (e.g., traps).
514 bool wasm::HandleThrow(JSContext* cx, WasmFrameIter& iter,
515 jit::ResumeFromException* rfe) {
516 // WasmFrameIter iterates down wasm frames in the activation starting at
517 // JitActivation::wasmExitFP(). Calling WasmFrameIter::startUnwinding pops
518 // JitActivation::wasmExitFP() once each time WasmFrameIter is incremented,
519 // ultimately leaving exit FP null when the WasmFrameIter is done(). This
520 // is necessary to prevent a DebugFrame from being observed again after we
521 // just called onLeaveFrame (which would lead to the frame being re-added
522 // to the map of live frames, right as it becomes trash).
524 MOZ_ASSERT(CallingActivation(cx) == iter.activation());
525 MOZ_ASSERT(!iter.done());
526 iter.setUnwind(WasmFrameIter::Unwind::True);
528 // Live wasm code on the stack is kept alive (in TraceJitActivation) by
529 // marking the instance of every wasm::Frame found by WasmFrameIter.
530 // However, as explained above, we're popping frames while iterating which
531 // means that a GC during this loop could collect the code of frames whose
532 // code is still on the stack. This is actually mostly fine: as soon as we
533 // return to the throw stub, the entire stack will be popped as a whole,
534 // returning to the C++ caller. However, we must keep the throw stub alive
535 // itself which is owned by the innermost instance.
536 RootedWasmInstanceObject keepAlive(cx, iter.instance()->object());
538 #ifdef ENABLE_WASM_EXCEPTIONS
539 JitActivation* activation = CallingActivation(cx);
540 RootedValue exn(cx);
541 bool hasCatchableException = HasCatchableException(activation, cx, &exn);
542 #endif
544 for (; !iter.done(); ++iter) {
545 // Wasm code can enter same-compartment realms, so reset cx->realm to
546 // this frame's realm.
547 cx->setRealmForJitExceptionHandler(iter.instance()->realm());
549 #ifdef ENABLE_WASM_EXCEPTIONS
550 // Only look for an exception handler if there's a catchable exception.
551 if (hasCatchableException) {
552 const wasm::Code& code = iter.instance()->code();
553 const uint8_t* pc = iter.resumePCinCurrentFrame();
554 Tier tier;
555 const wasm::WasmTryNote* tryNote =
556 code.lookupWasmTryNote((void*)pc, &tier);
558 if (tryNote) {
559 cx->clearPendingException();
560 RootedAnyRef ref(cx, AnyRef::null());
561 if (!BoxAnyRef(cx, exn, &ref)) {
562 MOZ_ASSERT(cx->isThrowingOutOfMemory());
563 continue;
566 iter.tls()->pendingException = ref.get().asJSObject();
568 rfe->kind = ResumeFromException::RESUME_WASM_CATCH;
569 rfe->framePointer = (uint8_t*)iter.frame();
570 rfe->tlsData = iter.instance()->tlsData();
572 size_t offsetAdjustment = 0;
573 if (iter.frame()->callerIsTrampolineFP()) {
574 offsetAdjustment = FrameWithTls::sizeWithoutFrame() +
575 IndirectStubAdditionalAlignment;
577 rfe->stackPointer =
578 (uint8_t*)(rfe->framePointer -
579 (tryNote->framePushed + offsetAdjustment));
580 rfe->target = iter.instance()->codeBase(tier) + tryNote->entryPoint;
582 // Make sure to clear trapping state if we got here due to a trap.
583 if (activation->isWasmTrapping()) {
584 activation->finishWasmTrap();
587 return true;
590 #endif
592 if (!iter.debugEnabled()) {
593 continue;
596 DebugFrame* frame = iter.debugFrame();
597 frame->clearReturnJSValue();
599 // Assume ResumeMode::Terminate if no exception is pending --
600 // no onExceptionUnwind handlers must be fired.
601 if (cx->isExceptionPending()) {
602 if (!DebugAPI::onExceptionUnwind(cx, frame)) {
603 if (cx->isPropagatingForcedReturn()) {
604 cx->clearPropagatingForcedReturn();
605 // Unexpected trap return -- raising error since throw recovery
606 // is not yet implemented in the wasm baseline.
607 // TODO properly handle forced return and resume wasm execution.
608 JS_ReportErrorASCII(
609 cx, "Unexpected resumption value from onExceptionUnwind");
614 bool ok = DebugAPI::onLeaveFrame(cx, frame, nullptr, false);
615 if (ok) {
616 // Unexpected success from the handler onLeaveFrame -- raising error
617 // since throw recovery is not yet implemented in the wasm baseline.
618 // TODO properly handle success and resume wasm execution.
619 JS_ReportErrorASCII(cx, "Unexpected success from onLeaveFrame");
621 frame->leave(cx);
624 MOZ_ASSERT(!cx->activation()->asJit()->isWasmTrapping(),
625 "unwinding clears the trapping state");
627 // In case of no handler, exit wasm via ret().
628 // FailFP signals to wasm stub to do a failure return.
629 rfe->kind = ResumeFromException::RESUME_WASM;
630 rfe->framePointer = (uint8_t*)wasm::FailFP;
631 rfe->stackPointer = (uint8_t*)iter.unwoundAddressOfReturnAddress();
632 rfe->target = nullptr;
633 return false;
636 static void* WasmHandleThrow(jit::ResumeFromException* rfe) {
637 JSContext* cx = TlsContext.get(); // Cold code
638 JitActivation* activation = CallingActivation(cx);
639 WasmFrameIter iter(activation);
640 // We can ignore the return result here because the throw stub code
641 // can just check the resume kind to see if a handler was found or not.
642 HandleThrow(cx, iter, rfe);
643 return rfe;
646 // Unconditionally returns nullptr per calling convention of HandleTrap().
647 static void* ReportError(JSContext* cx, unsigned errorNumber) {
648 JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, errorNumber);
650 if (cx->isThrowingOutOfMemory()) {
651 return nullptr;
654 // Distinguish exceptions thrown from traps from other RuntimeErrors.
655 RootedValue exn(cx);
656 if (!cx->getPendingException(&exn)) {
657 return nullptr;
660 MOZ_ASSERT(exn.isObject() && exn.toObject().is<ErrorObject>());
661 exn.toObject().as<ErrorObject>().setFromWasmTrap();
663 return nullptr;
666 // Has the same return-value convention as HandleTrap().
667 static void* CheckInterrupt(JSContext* cx, JitActivation* activation) {
668 ResetInterruptState(cx);
670 if (!CheckForInterrupt(cx)) {
671 return nullptr;
674 void* resumePC = activation->wasmTrapData().resumePC;
675 activation->finishWasmTrap();
676 return resumePC;
679 // The calling convention between this function and its caller in the stub
680 // generated by GenerateTrapExit() is:
681 // - return nullptr if the stub should jump to the throw stub to unwind
682 // the activation;
683 // - return the (non-null) resumePC that should be jumped if execution should
684 // resume after the trap.
685 static void* WasmHandleTrap() {
686 JSContext* cx = TlsContext.get(); // Cold code
687 JitActivation* activation = CallingActivation(cx);
689 switch (activation->wasmTrapData().trap) {
690 case Trap::Unreachable:
691 return ReportError(cx, JSMSG_WASM_UNREACHABLE);
692 case Trap::IntegerOverflow:
693 return ReportError(cx, JSMSG_WASM_INTEGER_OVERFLOW);
694 case Trap::InvalidConversionToInteger:
695 return ReportError(cx, JSMSG_WASM_INVALID_CONVERSION);
696 case Trap::IntegerDivideByZero:
697 return ReportError(cx, JSMSG_WASM_INT_DIVIDE_BY_ZERO);
698 case Trap::IndirectCallToNull:
699 return ReportError(cx, JSMSG_WASM_IND_CALL_TO_NULL);
700 case Trap::IndirectCallBadSig:
701 return ReportError(cx, JSMSG_WASM_IND_CALL_BAD_SIG);
702 case Trap::NullPointerDereference:
703 return ReportError(cx, JSMSG_WASM_DEREF_NULL);
704 case Trap::BadCast:
705 return ReportError(cx, JSMSG_WASM_BAD_CAST);
706 case Trap::OutOfBounds:
707 return ReportError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
708 case Trap::UnalignedAccess:
709 return ReportError(cx, JSMSG_WASM_UNALIGNED_ACCESS);
710 case Trap::CheckInterrupt:
711 return CheckInterrupt(cx, activation);
712 case Trap::StackOverflow: {
713 // TlsData::setInterrupt() causes a fake stack overflow. Since
714 // TlsData::setInterrupt() is called racily, it's possible for a real
715 // stack overflow to trap, followed by a racy call to setInterrupt().
716 // Thus, we must check for a real stack overflow first before we
717 // CheckInterrupt() and possibly resume execution.
718 AutoCheckRecursionLimit recursion(cx);
719 if (!recursion.check(cx)) {
720 return nullptr;
722 if (activation->wasmExitTls()->isInterrupted()) {
723 return CheckInterrupt(cx, activation);
725 return ReportError(cx, JSMSG_OVER_RECURSED);
727 case Trap::ThrowReported:
728 // Error was already reported under another name.
729 return nullptr;
730 case Trap::Limit:
731 break;
734 MOZ_CRASH("unexpected trap");
737 static void WasmReportV128JSCall() {
738 JSContext* cx = TlsContext.get(); // Cold code
739 JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
740 JSMSG_WASM_BAD_VAL_TYPE);
743 static int32_t CoerceInPlace_ToInt32(Value* rawVal) {
744 JSContext* cx = TlsContext.get(); // Cold code
746 int32_t i32;
747 RootedValue val(cx, *rawVal);
748 if (!ToInt32(cx, val, &i32)) {
749 *rawVal = PoisonedObjectValue(0x42);
750 return false;
753 *rawVal = Int32Value(i32);
754 return true;
757 static int32_t CoerceInPlace_ToBigInt(Value* rawVal) {
758 JSContext* cx = TlsContext.get(); // Cold code
760 RootedValue val(cx, *rawVal);
761 BigInt* bi = ToBigInt(cx, val);
762 if (!bi) {
763 *rawVal = PoisonedObjectValue(0x43);
764 return false;
767 *rawVal = BigIntValue(bi);
768 return true;
771 static int32_t CoerceInPlace_ToNumber(Value* rawVal) {
772 JSContext* cx = TlsContext.get(); // Cold code
774 double dbl;
775 RootedValue val(cx, *rawVal);
776 if (!ToNumber(cx, val, &dbl)) {
777 *rawVal = PoisonedObjectValue(0x42);
778 return false;
781 *rawVal = DoubleValue(dbl);
782 return true;
785 static void* BoxValue_Anyref(Value* rawVal) {
786 JSContext* cx = TlsContext.get(); // Cold code
787 RootedValue val(cx, *rawVal);
788 RootedAnyRef result(cx, AnyRef::null());
789 if (!BoxAnyRef(cx, val, &result)) {
790 return nullptr;
792 return result.get().forCompiledCode();
795 static int32_t CoerceInPlace_JitEntry(int funcExportIndex, TlsData* tlsData,
796 Value* argv) {
797 JSContext* cx = TlsContext.get(); // Cold code
799 const Code& code = tlsData->instance->code();
800 const FuncExport& fe =
801 code.metadata(code.stableTier()).funcExports[funcExportIndex];
803 for (size_t i = 0; i < fe.funcType().args().length(); i++) {
804 HandleValue arg = HandleValue::fromMarkedLocation(&argv[i]);
805 switch (fe.funcType().args()[i].kind()) {
806 case ValType::I32: {
807 int32_t i32;
808 if (!ToInt32(cx, arg, &i32)) {
809 return false;
811 argv[i] = Int32Value(i32);
812 break;
814 case ValType::I64: {
815 // In this case we store a BigInt value as there is no value type
816 // corresponding directly to an I64. The conversion to I64 happens
817 // in the JIT entry stub.
818 BigInt* bigint = ToBigInt(cx, arg);
819 if (!bigint) {
820 return false;
822 argv[i] = BigIntValue(bigint);
823 break;
825 case ValType::F32:
826 case ValType::F64: {
827 double dbl;
828 if (!ToNumber(cx, arg, &dbl)) {
829 return false;
831 // No need to convert double-to-float for f32, it's done inline
832 // in the wasm stub later.
833 argv[i] = DoubleValue(dbl);
834 break;
836 case ValType::Ref: {
837 switch (fe.funcType().args()[i].refTypeKind()) {
838 case RefType::Extern:
839 // Leave Object and Null alone, we will unbox inline. All we need
840 // to do is convert other values to an Object representation.
841 if (!arg.isObjectOrNull()) {
842 RootedAnyRef result(cx, AnyRef::null());
843 if (!BoxAnyRef(cx, arg, &result)) {
844 return false;
846 argv[i].setObject(*result.get().asJSObject());
848 break;
849 case RefType::Func:
850 case RefType::Eq:
851 case RefType::TypeIndex:
852 // Guarded against by temporarilyUnsupportedReftypeForEntry()
853 MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
855 break;
857 case ValType::V128: {
858 // Guarded against by hasV128ArgOrRet()
859 MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
861 default: {
862 MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
867 return true;
870 // Allocate a BigInt without GC, corresponds to the similar VMFunction.
871 static BigInt* AllocateBigIntTenuredNoGC() {
872 JSContext* cx = TlsContext.get(); // Cold code (the caller is elaborate)
874 return js::AllocateBigInt<NoGC>(cx, gc::TenuredHeap);
877 static int64_t DivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
878 uint32_t y_lo) {
879 int64_t x = ((uint64_t)x_hi << 32) + x_lo;
880 int64_t y = ((uint64_t)y_hi << 32) + y_lo;
881 MOZ_ASSERT(x != INT64_MIN || y != -1);
882 MOZ_ASSERT(y != 0);
883 return x / y;
886 static int64_t UDivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
887 uint32_t y_lo) {
888 uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
889 uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
890 MOZ_ASSERT(y != 0);
891 return x / y;
894 static int64_t ModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
895 uint32_t y_lo) {
896 int64_t x = ((uint64_t)x_hi << 32) + x_lo;
897 int64_t y = ((uint64_t)y_hi << 32) + y_lo;
898 MOZ_ASSERT(x != INT64_MIN || y != -1);
899 MOZ_ASSERT(y != 0);
900 return x % y;
903 static int64_t UModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
904 uint32_t y_lo) {
905 uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
906 uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
907 MOZ_ASSERT(y != 0);
908 return x % y;
911 static int64_t TruncateDoubleToInt64(double input) {
912 // Note: INT64_MAX is not representable in double. It is actually
913 // INT64_MAX + 1. Therefore also sending the failure value.
914 if (input >= double(INT64_MAX) || input < double(INT64_MIN) || IsNaN(input)) {
915 return 0x8000000000000000;
917 return int64_t(input);
920 static uint64_t TruncateDoubleToUint64(double input) {
921 // Note: UINT64_MAX is not representable in double. It is actually
922 // UINT64_MAX + 1. Therefore also sending the failure value.
923 if (input >= double(UINT64_MAX) || input <= -1.0 || IsNaN(input)) {
924 return 0x8000000000000000;
926 return uint64_t(input);
929 static int64_t SaturatingTruncateDoubleToInt64(double input) {
930 // Handle in-range values (except INT64_MIN).
931 if (fabs(input) < -double(INT64_MIN)) {
932 return int64_t(input);
934 // Handle NaN.
935 if (IsNaN(input)) {
936 return 0;
938 // Handle positive overflow.
939 if (input > 0) {
940 return INT64_MAX;
942 // Handle negative overflow.
943 return INT64_MIN;
946 static uint64_t SaturatingTruncateDoubleToUint64(double input) {
947 // Handle positive overflow.
948 if (input >= -double(INT64_MIN) * 2.0) {
949 return UINT64_MAX;
951 // Handle in-range values.
952 if (input > -1.0) {
953 return uint64_t(input);
955 // Handle NaN and negative overflow.
956 return 0;
959 static double Int64ToDouble(int32_t x_hi, uint32_t x_lo) {
960 int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
961 return double(x);
964 static float Int64ToFloat32(int32_t x_hi, uint32_t x_lo) {
965 int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
966 return float(x);
969 static double Uint64ToDouble(int32_t x_hi, uint32_t x_lo) {
970 uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
971 return double(x);
974 static float Uint64ToFloat32(int32_t x_hi, uint32_t x_lo) {
975 uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
976 return float(x);
979 template <class F>
980 static inline void* FuncCast(F* funcPtr, ABIFunctionType abiType) {
981 void* pf = JS_FUNC_TO_DATA_PTR(void*, funcPtr);
982 #ifdef JS_SIMULATOR
983 pf = Simulator::RedirectNativeFunction(pf, abiType);
984 #endif
985 return pf;
988 #ifdef WASM_CODEGEN_DEBUG
989 void wasm::PrintI32(int32_t val) { fprintf(stderr, "i32(%d) ", val); }
991 void wasm::PrintPtr(uint8_t* val) { fprintf(stderr, "ptr(%p) ", val); }
993 void wasm::PrintF32(float val) { fprintf(stderr, "f32(%f) ", val); }
995 void wasm::PrintF64(double val) { fprintf(stderr, "f64(%lf) ", val); }
997 void wasm::PrintText(const char* out) { fprintf(stderr, "%s", out); }
998 #endif
1000 void* wasm::AddressOf(SymbolicAddress imm, ABIFunctionType* abiType) {
1001 switch (imm) {
1002 case SymbolicAddress::HandleDebugTrap:
1003 *abiType = Args_General0;
1004 return FuncCast(WasmHandleDebugTrap, *abiType);
1005 case SymbolicAddress::HandleThrow:
1006 *abiType = Args_General1;
1007 return FuncCast(WasmHandleThrow, *abiType);
1008 case SymbolicAddress::HandleTrap:
1009 *abiType = Args_General0;
1010 return FuncCast(WasmHandleTrap, *abiType);
1011 case SymbolicAddress::ReportV128JSCall:
1012 *abiType = Args_General0;
1013 return FuncCast(WasmReportV128JSCall, *abiType);
1014 case SymbolicAddress::CallImport_General:
1015 *abiType = Args_Int32_GeneralInt32Int32General;
1016 return FuncCast(Instance::callImport_general, *abiType);
1017 case SymbolicAddress::CoerceInPlace_ToInt32:
1018 *abiType = Args_General1;
1019 return FuncCast(CoerceInPlace_ToInt32, *abiType);
1020 case SymbolicAddress::CoerceInPlace_ToBigInt:
1021 *abiType = Args_General1;
1022 return FuncCast(CoerceInPlace_ToBigInt, *abiType);
1023 case SymbolicAddress::CoerceInPlace_ToNumber:
1024 *abiType = Args_General1;
1025 return FuncCast(CoerceInPlace_ToNumber, *abiType);
1026 case SymbolicAddress::CoerceInPlace_JitEntry:
1027 *abiType = Args_General3;
1028 return FuncCast(CoerceInPlace_JitEntry, *abiType);
1029 case SymbolicAddress::ToInt32:
1030 *abiType = Args_Int_Double;
1031 return FuncCast<int32_t(double)>(JS::ToInt32, *abiType);
1032 case SymbolicAddress::BoxValue_Anyref:
1033 *abiType = Args_General1;
1034 return FuncCast(BoxValue_Anyref, *abiType);
1035 case SymbolicAddress::AllocateBigInt:
1036 *abiType = Args_General0;
1037 return FuncCast(AllocateBigIntTenuredNoGC, *abiType);
1038 case SymbolicAddress::DivI64:
1039 *abiType = Args_General4;
1040 return FuncCast(DivI64, *abiType);
1041 case SymbolicAddress::UDivI64:
1042 *abiType = Args_General4;
1043 return FuncCast(UDivI64, *abiType);
1044 case SymbolicAddress::ModI64:
1045 *abiType = Args_General4;
1046 return FuncCast(ModI64, *abiType);
1047 case SymbolicAddress::UModI64:
1048 *abiType = Args_General4;
1049 return FuncCast(UModI64, *abiType);
1050 case SymbolicAddress::TruncateDoubleToUint64:
1051 *abiType = Args_Int64_Double;
1052 return FuncCast(TruncateDoubleToUint64, *abiType);
1053 case SymbolicAddress::TruncateDoubleToInt64:
1054 *abiType = Args_Int64_Double;
1055 return FuncCast(TruncateDoubleToInt64, *abiType);
1056 case SymbolicAddress::SaturatingTruncateDoubleToUint64:
1057 *abiType = Args_Int64_Double;
1058 return FuncCast(SaturatingTruncateDoubleToUint64, *abiType);
1059 case SymbolicAddress::SaturatingTruncateDoubleToInt64:
1060 *abiType = Args_Int64_Double;
1061 return FuncCast(SaturatingTruncateDoubleToInt64, *abiType);
1062 case SymbolicAddress::Uint64ToDouble:
1063 *abiType = Args_Double_IntInt;
1064 return FuncCast(Uint64ToDouble, *abiType);
1065 case SymbolicAddress::Uint64ToFloat32:
1066 *abiType = Args_Float32_IntInt;
1067 return FuncCast(Uint64ToFloat32, *abiType);
1068 case SymbolicAddress::Int64ToDouble:
1069 *abiType = Args_Double_IntInt;
1070 return FuncCast(Int64ToDouble, *abiType);
1071 case SymbolicAddress::Int64ToFloat32:
1072 *abiType = Args_Float32_IntInt;
1073 return FuncCast(Int64ToFloat32, *abiType);
1074 #if defined(JS_CODEGEN_ARM)
1075 case SymbolicAddress::aeabi_idivmod:
1076 *abiType = Args_General2;
1077 return FuncCast(__aeabi_idivmod, *abiType);
1078 case SymbolicAddress::aeabi_uidivmod:
1079 *abiType = Args_General2;
1080 return FuncCast(__aeabi_uidivmod, *abiType);
1081 #endif
1082 case SymbolicAddress::ModD:
1083 *abiType = Args_Double_DoubleDouble;
1084 return FuncCast(NumberMod, *abiType);
1085 case SymbolicAddress::SinD:
1086 *abiType = Args_Double_Double;
1087 return FuncCast<double(double)>(sin, *abiType);
1088 case SymbolicAddress::CosD:
1089 *abiType = Args_Double_Double;
1090 return FuncCast<double(double)>(cos, *abiType);
1091 case SymbolicAddress::TanD:
1092 *abiType = Args_Double_Double;
1093 return FuncCast<double(double)>(tan, *abiType);
1094 case SymbolicAddress::ASinD:
1095 *abiType = Args_Double_Double;
1096 return FuncCast<double(double)>(fdlibm::asin, *abiType);
1097 case SymbolicAddress::ACosD:
1098 *abiType = Args_Double_Double;
1099 return FuncCast<double(double)>(fdlibm::acos, *abiType);
1100 case SymbolicAddress::ATanD:
1101 *abiType = Args_Double_Double;
1102 return FuncCast<double(double)>(fdlibm::atan, *abiType);
1103 case SymbolicAddress::CeilD:
1104 *abiType = Args_Double_Double;
1105 return FuncCast<double(double)>(fdlibm::ceil, *abiType);
1106 case SymbolicAddress::CeilF:
1107 *abiType = Args_Float32_Float32;
1108 return FuncCast<float(float)>(fdlibm::ceilf, *abiType);
1109 case SymbolicAddress::FloorD:
1110 *abiType = Args_Double_Double;
1111 return FuncCast<double(double)>(fdlibm::floor, *abiType);
1112 case SymbolicAddress::FloorF:
1113 *abiType = Args_Float32_Float32;
1114 return FuncCast<float(float)>(fdlibm::floorf, *abiType);
1115 case SymbolicAddress::TruncD:
1116 *abiType = Args_Double_Double;
1117 return FuncCast<double(double)>(fdlibm::trunc, *abiType);
1118 case SymbolicAddress::TruncF:
1119 *abiType = Args_Float32_Float32;
1120 return FuncCast<float(float)>(fdlibm::truncf, *abiType);
1121 case SymbolicAddress::NearbyIntD:
1122 *abiType = Args_Double_Double;
1123 return FuncCast<double(double)>(fdlibm::nearbyint, *abiType);
1124 case SymbolicAddress::NearbyIntF:
1125 *abiType = Args_Float32_Float32;
1126 return FuncCast<float(float)>(fdlibm::nearbyintf, *abiType);
1127 case SymbolicAddress::ExpD:
1128 *abiType = Args_Double_Double;
1129 return FuncCast<double(double)>(fdlibm::exp, *abiType);
1130 case SymbolicAddress::LogD:
1131 *abiType = Args_Double_Double;
1132 return FuncCast<double(double)>(fdlibm::log, *abiType);
1133 case SymbolicAddress::PowD:
1134 *abiType = Args_Double_DoubleDouble;
1135 return FuncCast(ecmaPow, *abiType);
1136 case SymbolicAddress::ATan2D:
1137 *abiType = Args_Double_DoubleDouble;
1138 return FuncCast(ecmaAtan2, *abiType);
1140 case SymbolicAddress::MemoryGrowM32:
1141 *abiType = Args_Int32_GeneralInt32;
1142 MOZ_ASSERT(*abiType == ToABIType(SASigMemoryGrowM32));
1143 return FuncCast(Instance::memoryGrow_m32, *abiType);
1144 case SymbolicAddress::MemoryGrowM64:
1145 *abiType = Args_Int64_GeneralInt64;
1146 MOZ_ASSERT(*abiType == ToABIType(SASigMemoryGrowM64));
1147 return FuncCast(Instance::memoryGrow_m64, *abiType);
1148 case SymbolicAddress::MemorySizeM32:
1149 *abiType = Args_Int32_General;
1150 MOZ_ASSERT(*abiType == ToABIType(SASigMemorySizeM32));
1151 return FuncCast(Instance::memorySize_m32, *abiType);
1152 case SymbolicAddress::MemorySizeM64:
1153 *abiType = Args_Int64_General;
1154 MOZ_ASSERT(*abiType == ToABIType(SASigMemorySizeM64));
1155 return FuncCast(Instance::memorySize_m64, *abiType);
1156 case SymbolicAddress::WaitI32M32:
1157 *abiType = Args_Int32_GeneralInt32Int32Int64;
1158 MOZ_ASSERT(*abiType == ToABIType(SASigWaitI32M32));
1159 return FuncCast(Instance::wait_i32_m32, *abiType);
1160 case SymbolicAddress::WaitI32M64:
1161 *abiType = Args_Int32_GeneralInt64Int32Int64;
1162 MOZ_ASSERT(*abiType == ToABIType(SASigWaitI32M64));
1163 return FuncCast(Instance::wait_i32_m64, *abiType);
1164 case SymbolicAddress::WaitI64M32:
1165 *abiType = Args_Int32_GeneralInt32Int64Int64;
1166 MOZ_ASSERT(*abiType == ToABIType(SASigWaitI64M32));
1167 return FuncCast(Instance::wait_i64_m32, *abiType);
1168 case SymbolicAddress::WaitI64M64:
1169 *abiType = Args_Int32_GeneralInt64Int64Int64;
1170 MOZ_ASSERT(*abiType == ToABIType(SASigWaitI64M64));
1171 return FuncCast(Instance::wait_i64_m64, *abiType);
1172 case SymbolicAddress::WakeM32:
1173 *abiType = Args_Int32_GeneralInt32Int32;
1174 MOZ_ASSERT(*abiType == ToABIType(SASigWakeM32));
1175 return FuncCast(Instance::wake_m32, *abiType);
1176 case SymbolicAddress::WakeM64:
1177 *abiType = Args_Int32_GeneralInt64Int32;
1178 MOZ_ASSERT(*abiType == ToABIType(SASigWakeM64));
1179 return FuncCast(Instance::wake_m64, *abiType);
1180 case SymbolicAddress::MemCopyM32:
1181 *abiType = Args_Int32_GeneralInt32Int32Int32General;
1182 MOZ_ASSERT(*abiType == ToABIType(SASigMemCopyM32));
1183 return FuncCast(Instance::memCopy_m32, *abiType);
1184 case SymbolicAddress::MemCopySharedM32:
1185 *abiType = Args_Int32_GeneralInt32Int32Int32General;
1186 MOZ_ASSERT(*abiType == ToABIType(SASigMemCopySharedM32));
1187 return FuncCast(Instance::memCopyShared_m32, *abiType);
1188 case SymbolicAddress::MemCopyM64:
1189 *abiType = Args_Int32_GeneralInt64Int64Int64General;
1190 MOZ_ASSERT(*abiType == ToABIType(SASigMemCopyM64));
1191 return FuncCast(Instance::memCopy_m64, *abiType);
1192 case SymbolicAddress::MemCopySharedM64:
1193 *abiType = Args_Int32_GeneralInt64Int64Int64General;
1194 MOZ_ASSERT(*abiType == ToABIType(SASigMemCopySharedM64));
1195 return FuncCast(Instance::memCopyShared_m64, *abiType);
1196 case SymbolicAddress::DataDrop:
1197 *abiType = Args_Int32_GeneralInt32;
1198 MOZ_ASSERT(*abiType == ToABIType(SASigDataDrop));
1199 return FuncCast(Instance::dataDrop, *abiType);
1200 case SymbolicAddress::MemFillM32:
1201 *abiType = Args_Int32_GeneralInt32Int32Int32General;
1202 MOZ_ASSERT(*abiType == ToABIType(SASigMemFillM32));
1203 return FuncCast(Instance::memFill_m32, *abiType);
1204 case SymbolicAddress::MemFillSharedM32:
1205 *abiType = Args_Int32_GeneralInt32Int32Int32General;
1206 MOZ_ASSERT(*abiType == ToABIType(SASigMemFillSharedM32));
1207 return FuncCast(Instance::memFillShared_m32, *abiType);
1208 case SymbolicAddress::MemFillM64:
1209 *abiType = Args_Int32_GeneralInt64Int32Int64General;
1210 MOZ_ASSERT(*abiType == ToABIType(SASigMemFillM64));
1211 return FuncCast(Instance::memFill_m64, *abiType);
1212 case SymbolicAddress::MemFillSharedM64:
1213 *abiType = Args_Int32_GeneralInt64Int32Int64General;
1214 MOZ_ASSERT(*abiType == ToABIType(SASigMemFillSharedM64));
1215 return FuncCast(Instance::memFillShared_m64, *abiType);
1216 case SymbolicAddress::MemInitM32:
1217 *abiType = Args_Int32_GeneralInt32Int32Int32Int32;
1218 MOZ_ASSERT(*abiType == ToABIType(SASigMemInitM32));
1219 return FuncCast(Instance::memInit_m32, *abiType);
1220 case SymbolicAddress::MemInitM64:
1221 *abiType = Args_Int32_GeneralInt64Int32Int32Int32;
1222 MOZ_ASSERT(*abiType == ToABIType(SASigMemInitM64));
1223 return FuncCast(Instance::memInit_m64, *abiType);
1224 case SymbolicAddress::TableCopy:
1225 *abiType = Args_Int32_GeneralInt32Int32Int32Int32Int32;
1226 MOZ_ASSERT(*abiType == ToABIType(SASigTableCopy));
1227 return FuncCast(Instance::tableCopy, *abiType);
1228 case SymbolicAddress::ElemDrop:
1229 *abiType = Args_Int32_GeneralInt32;
1230 MOZ_ASSERT(*abiType == ToABIType(SASigElemDrop));
1231 return FuncCast(Instance::elemDrop, *abiType);
1232 case SymbolicAddress::TableFill:
1233 *abiType = Args_Int32_GeneralInt32GeneralInt32Int32;
1234 MOZ_ASSERT(*abiType == ToABIType(SASigTableFill));
1235 return FuncCast(Instance::tableFill, *abiType);
1236 case SymbolicAddress::TableInit:
1237 *abiType = Args_Int32_GeneralInt32Int32Int32Int32Int32;
1238 MOZ_ASSERT(*abiType == ToABIType(SASigTableInit));
1239 return FuncCast(Instance::tableInit, *abiType);
1240 case SymbolicAddress::TableGet:
1241 *abiType = Args_General_GeneralInt32Int32;
1242 MOZ_ASSERT(*abiType == ToABIType(SASigTableGet));
1243 return FuncCast(Instance::tableGet, *abiType);
1244 case SymbolicAddress::TableGrow:
1245 *abiType = Args_Int32_GeneralGeneralInt32Int32;
1246 MOZ_ASSERT(*abiType == ToABIType(SASigTableGrow));
1247 return FuncCast(Instance::tableGrow, *abiType);
1248 case SymbolicAddress::TableSet:
1249 *abiType = Args_Int32_GeneralInt32GeneralInt32;
1250 MOZ_ASSERT(*abiType == ToABIType(SASigTableSet));
1251 return FuncCast(Instance::tableSet, *abiType);
1252 case SymbolicAddress::TableSize:
1253 *abiType = Args_Int32_GeneralInt32;
1254 MOZ_ASSERT(*abiType == ToABIType(SASigTableSize));
1255 return FuncCast(Instance::tableSize, *abiType);
1256 case SymbolicAddress::RefFunc:
1257 *abiType = Args_General_GeneralInt32;
1258 MOZ_ASSERT(*abiType == ToABIType(SASigRefFunc));
1259 return FuncCast(Instance::refFunc, *abiType);
1260 case SymbolicAddress::PostBarrier:
1261 *abiType = Args_Int32_GeneralGeneral;
1262 MOZ_ASSERT(*abiType == ToABIType(SASigPostBarrier));
1263 return FuncCast(Instance::postBarrier, *abiType);
1264 case SymbolicAddress::PreBarrierFiltering:
1265 *abiType = Args_Int32_GeneralGeneral;
1266 MOZ_ASSERT(*abiType == ToABIType(SASigPreBarrierFiltering));
1267 return FuncCast(Instance::preBarrierFiltering, *abiType);
1268 case SymbolicAddress::PostBarrierFiltering:
1269 *abiType = Args_Int32_GeneralGeneral;
1270 MOZ_ASSERT(*abiType == ToABIType(SASigPostBarrierFiltering));
1271 return FuncCast(Instance::postBarrierFiltering, *abiType);
1272 case SymbolicAddress::StructNew:
1273 *abiType = Args_General2;
1274 MOZ_ASSERT(*abiType == ToABIType(SASigStructNew));
1275 return FuncCast(Instance::structNew, *abiType);
1276 case SymbolicAddress::ArrayNew:
1277 *abiType = Args_General_GeneralInt32General;
1278 MOZ_ASSERT(*abiType == ToABIType(SASigArrayNew));
1279 return FuncCast(Instance::arrayNew, *abiType);
1280 case SymbolicAddress::RefTest:
1281 *abiType = Args_Int32_GeneralGeneralGeneral;
1282 MOZ_ASSERT(*abiType == ToABIType(SASigRefTest));
1283 return FuncCast(Instance::refTest, *abiType);
1284 case SymbolicAddress::RttSub:
1285 *abiType = Args_General3;
1286 MOZ_ASSERT(*abiType == ToABIType(SASigRttSub));
1287 return FuncCast(Instance::rttSub, *abiType);
1288 case SymbolicAddress::InlineTypedObjectClass:
1289 // The ABI type is not used here, but assigning one to avoid garbage.
1290 *abiType = Args_General1;
1291 return (void*)&js::InlineTypedObject::class_;
1293 #if defined(ENABLE_WASM_EXCEPTIONS)
1294 case SymbolicAddress::ExceptionNew:
1295 *abiType = Args_General_GeneralInt32Int32;
1296 MOZ_ASSERT(*abiType == ToABIType(SASigExceptionNew));
1297 return FuncCast(Instance::exceptionNew, *abiType);
1298 case SymbolicAddress::ThrowException:
1299 *abiType = Args_Int32_GeneralGeneral;
1300 MOZ_ASSERT(*abiType == ToABIType(SASigThrowException));
1301 return FuncCast(Instance::throwException, *abiType);
1302 case SymbolicAddress::ConsumePendingException:
1303 *abiType = Args_Int32_General;
1304 MOZ_ASSERT(*abiType == ToABIType(SASigConsumePendingException));
1305 return FuncCast(Instance::consumePendingException, *abiType);
1306 case SymbolicAddress::PushRefIntoExn:
1307 *abiType = Args_Int32_GeneralGeneralGeneral;
1308 MOZ_ASSERT(*abiType == ToABIType(SASigPushRefIntoExn));
1309 return FuncCast(Instance::pushRefIntoExn, *abiType);
1310 #endif
1312 #ifdef WASM_CODEGEN_DEBUG
1313 case SymbolicAddress::PrintI32:
1314 *abiType = Args_General1;
1315 return FuncCast(PrintI32, *abiType);
1316 case SymbolicAddress::PrintPtr:
1317 *abiType = Args_General1;
1318 return FuncCast(PrintPtr, *abiType);
1319 case SymbolicAddress::PrintF32:
1320 *abiType = Args_Int_Float32;
1321 return FuncCast(PrintF32, *abiType);
1322 case SymbolicAddress::PrintF64:
1323 *abiType = Args_Int_Double;
1324 return FuncCast(PrintF64, *abiType);
1325 case SymbolicAddress::PrintText:
1326 *abiType = Args_General1;
1327 return FuncCast(PrintText, *abiType);
1328 #endif
1329 #define DECL_SAS_TYPE_AND_FN(op, export, sa_name, abitype, entry, idx) \
1330 case SymbolicAddress::sa_name: \
1331 *abiType = abitype; \
1332 return FuncCast(entry, *abiType);
1333 FOR_EACH_INTRINSIC(DECL_SAS_TYPE_AND_FN)
1334 #undef DECL_SAS_TYPE_AND_FN
1335 case SymbolicAddress::Limit:
1336 break;
1339 MOZ_CRASH("Bad SymbolicAddress");
1342 bool wasm::IsRoundingFunction(SymbolicAddress callee, jit::RoundingMode* mode) {
1343 switch (callee) {
1344 case SymbolicAddress::FloorD:
1345 case SymbolicAddress::FloorF:
1346 *mode = jit::RoundingMode::Down;
1347 return true;
1348 case SymbolicAddress::CeilD:
1349 case SymbolicAddress::CeilF:
1350 *mode = jit::RoundingMode::Up;
1351 return true;
1352 case SymbolicAddress::TruncD:
1353 case SymbolicAddress::TruncF:
1354 *mode = jit::RoundingMode::TowardsZero;
1355 return true;
1356 case SymbolicAddress::NearbyIntD:
1357 case SymbolicAddress::NearbyIntF:
1358 *mode = jit::RoundingMode::NearestTiesToEven;
1359 return true;
1360 default:
1361 return false;
1365 bool wasm::NeedsBuiltinThunk(SymbolicAddress sym) {
1366 // Some functions don't want to a thunk, because they already have one or
1367 // they don't have frame info.
1368 switch (sym) {
1369 case SymbolicAddress::HandleDebugTrap: // GenerateDebugTrapStub
1370 case SymbolicAddress::HandleThrow: // GenerateThrowStub
1371 case SymbolicAddress::HandleTrap: // GenerateTrapExit
1372 case SymbolicAddress::CallImport_General: // GenerateImportInterpExit
1373 case SymbolicAddress::CoerceInPlace_ToInt32: // GenerateImportJitExit
1374 case SymbolicAddress::CoerceInPlace_ToNumber:
1375 case SymbolicAddress::CoerceInPlace_ToBigInt:
1376 case SymbolicAddress::BoxValue_Anyref:
1377 case SymbolicAddress::InlineTypedObjectClass:
1378 #ifdef WASM_CODEGEN_DEBUG
1379 case SymbolicAddress::PrintI32:
1380 case SymbolicAddress::PrintPtr:
1381 case SymbolicAddress::PrintF32:
1382 case SymbolicAddress::PrintF64:
1383 case SymbolicAddress::PrintText: // Used only in stubs
1384 #endif
1385 return false;
1386 case SymbolicAddress::ToInt32:
1387 case SymbolicAddress::DivI64:
1388 case SymbolicAddress::UDivI64:
1389 case SymbolicAddress::ModI64:
1390 case SymbolicAddress::UModI64:
1391 case SymbolicAddress::TruncateDoubleToUint64:
1392 case SymbolicAddress::TruncateDoubleToInt64:
1393 case SymbolicAddress::SaturatingTruncateDoubleToUint64:
1394 case SymbolicAddress::SaturatingTruncateDoubleToInt64:
1395 case SymbolicAddress::Uint64ToDouble:
1396 case SymbolicAddress::Uint64ToFloat32:
1397 case SymbolicAddress::Int64ToDouble:
1398 case SymbolicAddress::Int64ToFloat32:
1399 #if defined(JS_CODEGEN_ARM)
1400 case SymbolicAddress::aeabi_idivmod:
1401 case SymbolicAddress::aeabi_uidivmod:
1402 #endif
1403 case SymbolicAddress::AllocateBigInt:
1404 case SymbolicAddress::ModD:
1405 case SymbolicAddress::SinD:
1406 case SymbolicAddress::CosD:
1407 case SymbolicAddress::TanD:
1408 case SymbolicAddress::ASinD:
1409 case SymbolicAddress::ACosD:
1410 case SymbolicAddress::ATanD:
1411 case SymbolicAddress::CeilD:
1412 case SymbolicAddress::CeilF:
1413 case SymbolicAddress::FloorD:
1414 case SymbolicAddress::FloorF:
1415 case SymbolicAddress::TruncD:
1416 case SymbolicAddress::TruncF:
1417 case SymbolicAddress::NearbyIntD:
1418 case SymbolicAddress::NearbyIntF:
1419 case SymbolicAddress::ExpD:
1420 case SymbolicAddress::LogD:
1421 case SymbolicAddress::PowD:
1422 case SymbolicAddress::ATan2D:
1423 case SymbolicAddress::MemoryGrowM32:
1424 case SymbolicAddress::MemoryGrowM64:
1425 case SymbolicAddress::MemorySizeM32:
1426 case SymbolicAddress::MemorySizeM64:
1427 case SymbolicAddress::WaitI32M32:
1428 case SymbolicAddress::WaitI32M64:
1429 case SymbolicAddress::WaitI64M32:
1430 case SymbolicAddress::WaitI64M64:
1431 case SymbolicAddress::WakeM32:
1432 case SymbolicAddress::WakeM64:
1433 case SymbolicAddress::CoerceInPlace_JitEntry:
1434 case SymbolicAddress::ReportV128JSCall:
1435 case SymbolicAddress::MemCopyM32:
1436 case SymbolicAddress::MemCopySharedM32:
1437 case SymbolicAddress::MemCopyM64:
1438 case SymbolicAddress::MemCopySharedM64:
1439 case SymbolicAddress::DataDrop:
1440 case SymbolicAddress::MemFillM32:
1441 case SymbolicAddress::MemFillSharedM32:
1442 case SymbolicAddress::MemFillM64:
1443 case SymbolicAddress::MemFillSharedM64:
1444 case SymbolicAddress::MemInitM32:
1445 case SymbolicAddress::MemInitM64:
1446 case SymbolicAddress::TableCopy:
1447 case SymbolicAddress::ElemDrop:
1448 case SymbolicAddress::TableFill:
1449 case SymbolicAddress::TableGet:
1450 case SymbolicAddress::TableGrow:
1451 case SymbolicAddress::TableInit:
1452 case SymbolicAddress::TableSet:
1453 case SymbolicAddress::TableSize:
1454 case SymbolicAddress::RefFunc:
1455 case SymbolicAddress::PreBarrierFiltering:
1456 case SymbolicAddress::PostBarrier:
1457 case SymbolicAddress::PostBarrierFiltering:
1458 case SymbolicAddress::StructNew:
1459 #ifdef ENABLE_WASM_EXCEPTIONS
1460 case SymbolicAddress::ExceptionNew:
1461 case SymbolicAddress::ThrowException:
1462 case SymbolicAddress::ConsumePendingException:
1463 case SymbolicAddress::PushRefIntoExn:
1464 #endif
1465 case SymbolicAddress::ArrayNew:
1466 case SymbolicAddress::RefTest:
1467 case SymbolicAddress::RttSub:
1468 #define OP(op, export, sa_name, abitype, entry, idx) \
1469 case SymbolicAddress::sa_name:
1470 FOR_EACH_INTRINSIC(OP)
1471 #undef OP
1472 return true;
1473 case SymbolicAddress::Limit:
1474 break;
1477 MOZ_CRASH("unexpected symbolic address");
1480 // ============================================================================
1481 // JS builtins that can be imported by wasm modules and called efficiently
1482 // through thunks. These thunks conform to the internal wasm ABI and thus can be
1483 // patched in for import calls. Calling a JS builtin through a thunk is much
1484 // faster than calling out through the generic import call trampoline which will
1485 // end up in the slowest C++ Instance::callImport path.
1487 // Each JS builtin can have several overloads. These must all be enumerated in
1488 // PopulateTypedNatives() so they can be included in the process-wide thunk set.
1490 #define FOR_EACH_SIN_COS_TAN_NATIVE(_) \
1491 _(math_sin, MathSin) \
1492 _(math_tan, MathTan) \
1493 _(math_cos, MathCos)
1495 #define FOR_EACH_UNARY_NATIVE(_) \
1496 _(math_exp, MathExp) \
1497 _(math_log, MathLog) \
1498 _(math_asin, MathASin) \
1499 _(math_atan, MathATan) \
1500 _(math_acos, MathACos) \
1501 _(math_log10, MathLog10) \
1502 _(math_log2, MathLog2) \
1503 _(math_log1p, MathLog1P) \
1504 _(math_expm1, MathExpM1) \
1505 _(math_sinh, MathSinH) \
1506 _(math_tanh, MathTanH) \
1507 _(math_cosh, MathCosH) \
1508 _(math_asinh, MathASinH) \
1509 _(math_atanh, MathATanH) \
1510 _(math_acosh, MathACosH) \
1511 _(math_sign, MathSign) \
1512 _(math_trunc, MathTrunc) \
1513 _(math_cbrt, MathCbrt)
1515 #define FOR_EACH_BINARY_NATIVE(_) \
1516 _(ecmaAtan2, MathATan2) \
1517 _(ecmaHypot, MathHypot) \
1518 _(ecmaPow, MathPow)
1520 #define DEFINE_SIN_COS_TAN_FLOAT_WRAPPER(func, _) \
1521 static float func##_impl_f32(float x) { \
1522 if (math_use_fdlibm_for_sin_cos_tan()) { \
1523 return float(func##_fdlibm_impl(double(x))); \
1525 return float(func##_native_impl(double(x))); \
1528 #define DEFINE_UNARY_FLOAT_WRAPPER(func, _) \
1529 static float func##_impl_f32(float x) { \
1530 return float(func##_impl(double(x))); \
1533 #define DEFINE_BINARY_FLOAT_WRAPPER(func, _) \
1534 static float func##_f32(float x, float y) { \
1535 return float(func(double(x), double(y))); \
1538 FOR_EACH_SIN_COS_TAN_NATIVE(DEFINE_SIN_COS_TAN_FLOAT_WRAPPER)
1539 FOR_EACH_UNARY_NATIVE(DEFINE_UNARY_FLOAT_WRAPPER)
1540 FOR_EACH_BINARY_NATIVE(DEFINE_BINARY_FLOAT_WRAPPER)
1542 #undef DEFINE_UNARY_FLOAT_WRAPPER
1543 #undef DEFINE_BINARY_FLOAT_WRAPPER
1545 struct TypedNative {
1546 InlinableNative native;
1547 ABIFunctionType abiType;
1549 TypedNative(InlinableNative native, ABIFunctionType abiType)
1550 : native(native), abiType(abiType) {}
1552 using Lookup = TypedNative;
1553 static HashNumber hash(const Lookup& l) {
1554 return HashGeneric(uint32_t(l.native), uint32_t(l.abiType));
1556 static bool match(const TypedNative& lhs, const Lookup& rhs) {
1557 return lhs.native == rhs.native && lhs.abiType == rhs.abiType;
1561 using TypedNativeToFuncPtrMap =
1562 HashMap<TypedNative, void*, TypedNative, SystemAllocPolicy>;
1564 static bool PopulateTypedNatives(TypedNativeToFuncPtrMap* typedNatives) {
1565 #define ADD_OVERLOAD(funcName, native, abiType) \
1566 if (!typedNatives->putNew(TypedNative(InlinableNative::native, abiType), \
1567 FuncCast(funcName, abiType))) \
1568 return false;
1570 #define ADD_SIN_COS_TAN_OVERLOADS(funcName, native) \
1571 if (math_use_fdlibm_for_sin_cos_tan()) { \
1572 ADD_OVERLOAD(funcName##_fdlibm_impl, native, Args_Double_Double) \
1573 } else { \
1574 ADD_OVERLOAD(funcName##_native_impl, native, Args_Double_Double) \
1576 ADD_OVERLOAD(funcName##_impl_f32, native, Args_Float32_Float32)
1578 #define ADD_UNARY_OVERLOADS(funcName, native) \
1579 ADD_OVERLOAD(funcName##_impl, native, Args_Double_Double) \
1580 ADD_OVERLOAD(funcName##_impl_f32, native, Args_Float32_Float32)
1582 #define ADD_BINARY_OVERLOADS(funcName, native) \
1583 ADD_OVERLOAD(funcName, native, Args_Double_DoubleDouble) \
1584 ADD_OVERLOAD(funcName##_f32, native, Args_Float32_Float32Float32)
1586 FOR_EACH_SIN_COS_TAN_NATIVE(ADD_SIN_COS_TAN_OVERLOADS)
1587 FOR_EACH_UNARY_NATIVE(ADD_UNARY_OVERLOADS)
1588 FOR_EACH_BINARY_NATIVE(ADD_BINARY_OVERLOADS)
1590 #undef ADD_UNARY_OVERLOADS
1591 #undef ADD_BINARY_OVERLOADS
1593 return true;
1596 #undef FOR_EACH_UNARY_NATIVE
1597 #undef FOR_EACH_BINARY_NATIVE
1599 // ============================================================================
1600 // Process-wide builtin thunk set
1602 // Thunks are inserted between wasm calls and the C++ callee and achieve two
1603 // things:
1604 // - bridging the few differences between the internal wasm ABI and the
1605 // external native ABI (viz. float returns on x86 and soft-fp ARM)
1606 // - executing an exit prologue/epilogue which in turn allows any profiling
1607 // iterator to see the full stack up to the wasm operation that called out
1609 // Thunks are created for two kinds of C++ callees, enumerated above:
1610 // - SymbolicAddress: for statically compiled calls in the wasm module
1611 // - Imported JS builtins: optimized calls to imports
1613 // All thunks are created up front, lazily, when the first wasm module is
1614 // compiled in the process. Thunks are kept alive until the JS engine shuts down
1615 // in the process. No thunks are created at runtime after initialization. This
1616 // simple scheme allows several simplifications:
1617 // - no reference counting to keep thunks alive
1618 // - no problems toggling W^X permissions which, because of multiple executing
1619 // threads, would require each thunk allocation to be on its own page
1620 // The cost for creating all thunks at once is relatively low since all thunks
1621 // fit within the smallest executable quanta (64k).
1623 using TypedNativeToCodeRangeMap =
1624 HashMap<TypedNative, uint32_t, TypedNative, SystemAllocPolicy>;
1626 using SymbolicAddressToCodeRangeArray =
1627 EnumeratedArray<SymbolicAddress, SymbolicAddress::Limit, uint32_t>;
1629 struct BuiltinThunks {
1630 uint8_t* codeBase;
1631 size_t codeSize;
1632 CodeRangeVector codeRanges;
1633 TypedNativeToCodeRangeMap typedNativeToCodeRange;
1634 SymbolicAddressToCodeRangeArray symbolicAddressToCodeRange;
1635 uint32_t provisionalLazyJitEntryOffset;
1637 BuiltinThunks() : codeBase(nullptr), codeSize(0) {}
1639 ~BuiltinThunks() {
1640 if (codeBase) {
1641 DeallocateExecutableMemory(codeBase, codeSize);
1646 Mutex initBuiltinThunks(mutexid::WasmInitBuiltinThunks);
1647 Atomic<const BuiltinThunks*> builtinThunks;
1649 bool wasm::EnsureBuiltinThunksInitialized() {
1650 LockGuard<Mutex> guard(initBuiltinThunks);
1651 if (builtinThunks) {
1652 return true;
1655 auto thunks = MakeUnique<BuiltinThunks>();
1656 if (!thunks) {
1657 return false;
1660 LifoAlloc lifo(BUILTIN_THUNK_LIFO_SIZE);
1661 TempAllocator tempAlloc(&lifo);
1662 WasmMacroAssembler masm(tempAlloc);
1663 AutoCreatedBy acb(masm, "wasm::EnsureBuiltinThunksInitialized");
1665 for (auto sym : MakeEnumeratedRange(SymbolicAddress::Limit)) {
1666 if (!NeedsBuiltinThunk(sym)) {
1667 thunks->symbolicAddressToCodeRange[sym] = UINT32_MAX;
1668 continue;
1671 uint32_t codeRangeIndex = thunks->codeRanges.length();
1672 thunks->symbolicAddressToCodeRange[sym] = codeRangeIndex;
1674 ABIFunctionType abiType;
1675 void* funcPtr = AddressOf(sym, &abiType);
1677 ExitReason exitReason(sym);
1679 CallableOffsets offsets;
1680 if (!GenerateBuiltinThunk(masm, abiType, exitReason, funcPtr, &offsets)) {
1681 return false;
1683 if (!thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offsets)) {
1684 return false;
1688 TypedNativeToFuncPtrMap typedNatives;
1689 if (!PopulateTypedNatives(&typedNatives)) {
1690 return false;
1693 for (TypedNativeToFuncPtrMap::Range r = typedNatives.all(); !r.empty();
1694 r.popFront()) {
1695 TypedNative typedNative = r.front().key();
1697 uint32_t codeRangeIndex = thunks->codeRanges.length();
1698 if (!thunks->typedNativeToCodeRange.putNew(typedNative, codeRangeIndex)) {
1699 return false;
1702 ABIFunctionType abiType = typedNative.abiType;
1703 void* funcPtr = r.front().value();
1705 ExitReason exitReason = ExitReason::Fixed::BuiltinNative;
1707 CallableOffsets offsets;
1708 if (!GenerateBuiltinThunk(masm, abiType, exitReason, funcPtr, &offsets)) {
1709 return false;
1711 if (!thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offsets)) {
1712 return false;
1716 // Provisional lazy JitEntry stub: This is a shared stub that can be installed
1717 // in the jit-entry jump table. It uses the JIT ABI and when invoked will
1718 // retrieve (via TlsContext()) and invoke the context-appropriate
1719 // invoke-from-interpreter jit stub, thus serving as the initial, unoptimized
1720 // jit-entry stub for any exported wasm function that has a jit-entry.
1722 #ifdef DEBUG
1723 // We need to allow this machine code to bake in a C++ code pointer, so we
1724 // disable the wasm restrictions while generating this stub.
1725 JitContext jitContext(&tempAlloc);
1726 bool oldFlag = jitContext.setIsCompilingWasm(false);
1727 #endif
1729 Offsets provisionalLazyJitEntryOffsets;
1730 if (!GenerateProvisionalLazyJitEntryStub(masm,
1731 &provisionalLazyJitEntryOffsets)) {
1732 return false;
1734 thunks->provisionalLazyJitEntryOffset = provisionalLazyJitEntryOffsets.begin;
1736 #ifdef DEBUG
1737 jitContext.setIsCompilingWasm(oldFlag);
1738 #endif
1740 masm.finish();
1741 if (masm.oom()) {
1742 return false;
1745 size_t allocSize = AlignBytes(masm.bytesNeeded(), ExecutableCodePageSize);
1747 thunks->codeSize = allocSize;
1748 thunks->codeBase = (uint8_t*)AllocateExecutableMemory(
1749 allocSize, ProtectionSetting::Writable, MemCheckKind::MakeUndefined);
1750 if (!thunks->codeBase) {
1751 return false;
1754 masm.executableCopy(thunks->codeBase);
1755 memset(thunks->codeBase + masm.bytesNeeded(), 0,
1756 allocSize - masm.bytesNeeded());
1758 masm.processCodeLabels(thunks->codeBase);
1759 PatchDebugSymbolicAccesses(thunks->codeBase, masm);
1761 MOZ_ASSERT(masm.callSites().empty());
1762 MOZ_ASSERT(masm.callSiteTargets().empty());
1763 MOZ_ASSERT(masm.trapSites().empty());
1764 #ifdef ENABLE_WASM_EXCEPTIONS
1765 MOZ_ASSERT(masm.tryNotes().empty());
1766 #endif
1768 if (!ExecutableAllocator::makeExecutableAndFlushICache(
1769 FlushICacheSpec::LocalThreadOnly, thunks->codeBase,
1770 thunks->codeSize)) {
1771 return false;
1774 builtinThunks = thunks.release();
1775 return true;
1778 void wasm::ReleaseBuiltinThunks() {
1779 if (builtinThunks) {
1780 const BuiltinThunks* ptr = builtinThunks;
1781 js_delete(const_cast<BuiltinThunks*>(ptr));
1782 builtinThunks = nullptr;
1786 void* wasm::SymbolicAddressTarget(SymbolicAddress sym) {
1787 MOZ_ASSERT(builtinThunks);
1789 ABIFunctionType abiType;
1790 void* funcPtr = AddressOf(sym, &abiType);
1792 if (!NeedsBuiltinThunk(sym)) {
1793 return funcPtr;
1796 const BuiltinThunks& thunks = *builtinThunks;
1797 uint32_t codeRangeIndex = thunks.symbolicAddressToCodeRange[sym];
1798 return thunks.codeBase + thunks.codeRanges[codeRangeIndex].begin();
1801 void* wasm::ProvisionalLazyJitEntryStub() {
1802 MOZ_ASSERT(builtinThunks);
1804 const BuiltinThunks& thunks = *builtinThunks;
1805 return thunks.codeBase + thunks.provisionalLazyJitEntryOffset;
1808 static Maybe<ABIFunctionType> ToBuiltinABIFunctionType(
1809 const FuncType& funcType) {
1810 const ValTypeVector& args = funcType.args();
1811 const ValTypeVector& results = funcType.results();
1813 if (results.length() != 1) {
1814 return Nothing();
1817 uint32_t abiType;
1818 switch (results[0].kind()) {
1819 case ValType::F32:
1820 abiType = ArgType_Float32 << RetType_Shift;
1821 break;
1822 case ValType::F64:
1823 abiType = ArgType_Float64 << RetType_Shift;
1824 break;
1825 default:
1826 return Nothing();
1829 if ((args.length() + 1) > (sizeof(uint32_t) * 8 / ArgType_Shift)) {
1830 return Nothing();
1833 for (size_t i = 0; i < args.length(); i++) {
1834 switch (args[i].kind()) {
1835 case ValType::F32:
1836 abiType |= (ArgType_Float32 << (ArgType_Shift * (i + 1)));
1837 break;
1838 case ValType::F64:
1839 abiType |= (ArgType_Float64 << (ArgType_Shift * (i + 1)));
1840 break;
1841 default:
1842 return Nothing();
1846 return Some(ABIFunctionType(abiType));
1849 void* wasm::MaybeGetBuiltinThunk(JSFunction* f, const FuncType& funcType) {
1850 MOZ_ASSERT(builtinThunks);
1852 if (!f->isNativeFun() || !f->hasJitInfo() ||
1853 f->jitInfo()->type() != JSJitInfo::InlinableNative) {
1854 return nullptr;
1857 Maybe<ABIFunctionType> abiType = ToBuiltinABIFunctionType(funcType);
1858 if (!abiType) {
1859 return nullptr;
1862 TypedNative typedNative(f->jitInfo()->inlinableNative, *abiType);
1864 const BuiltinThunks& thunks = *builtinThunks;
1865 auto p = thunks.typedNativeToCodeRange.readonlyThreadsafeLookup(typedNative);
1866 if (!p) {
1867 return nullptr;
1870 return thunks.codeBase + thunks.codeRanges[p->value()].begin();
1873 bool wasm::LookupBuiltinThunk(void* pc, const CodeRange** codeRange,
1874 uint8_t** codeBase) {
1875 if (!builtinThunks) {
1876 return false;
1879 const BuiltinThunks& thunks = *builtinThunks;
1880 if (pc < thunks.codeBase || pc >= thunks.codeBase + thunks.codeSize) {
1881 return false;
1884 *codeBase = thunks.codeBase;
1886 CodeRange::OffsetInCode target((uint8_t*)pc - thunks.codeBase);
1887 *codeRange = LookupInSorted(thunks.codeRanges, target);
1889 return !!*codeRange;