Bug 1865597 - Add error checking when initializing parallel marking and disable on...
[gecko.git] / js / src / wasm / WasmGC.cpp
blob1fc26c46e927493c305714cfc621a837530891e8
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
4 * Copyright 2019 Mozilla Foundation
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
19 #include "wasm/WasmGC.h"
20 #include "wasm/WasmInstance.h"
21 #include "jit/MacroAssembler-inl.h"
23 using mozilla::DebugOnly;
25 using namespace js;
26 using namespace js::jit;
27 using namespace js::wasm;
29 wasm::StackMap* wasm::ConvertStackMapBoolVectorToStackMap(
30 const StackMapBoolVector& vec, bool hasRefs) {
31 wasm::StackMap* stackMap = wasm::StackMap::create(vec.length());
32 if (!stackMap) {
33 return nullptr;
36 bool hasRefsObserved = false;
37 size_t i = 0;
38 for (bool b : vec) {
39 if (b) {
40 stackMap->setBit(i);
41 hasRefsObserved = true;
43 i++;
45 MOZ_RELEASE_ASSERT(hasRefs == hasRefsObserved);
47 return stackMap;
50 // Generate a stackmap for a function's stack-overflow-at-entry trap, with
51 // the structure:
53 // <reg dump area>
54 // | ++ <space reserved before trap, if any>
55 // | ++ <space for Frame>
56 // | ++ <inbound arg area>
57 // | |
58 // Lowest Addr Highest Addr
60 // The caller owns the resulting stackmap. This assumes a grow-down stack.
62 // For non-debug builds, if the stackmap would contain no pointers, no
63 // stackmap is created, and nullptr is returned. For a debug build, a
64 // stackmap is always created and returned.
66 // The "space reserved before trap" is the space reserved by
67 // MacroAssembler::wasmReserveStackChecked, in the case where the frame is
68 // "small", as determined by that function.
69 bool wasm::CreateStackMapForFunctionEntryTrap(
70 const wasm::ArgTypeVector& argTypes, const RegisterOffsets& trapExitLayout,
71 size_t trapExitLayoutWords, size_t nBytesReservedBeforeTrap,
72 size_t nInboundStackArgBytes, wasm::StackMap** result) {
73 // Ensure this is defined on all return paths.
74 *result = nullptr;
76 // The size of the wasm::Frame itself.
77 const size_t nFrameBytes = sizeof(wasm::Frame);
79 // The size of the register dump (trap) area.
80 const size_t trapExitLayoutBytes = trapExitLayoutWords * sizeof(void*);
82 // This is the total number of bytes covered by the map.
83 const DebugOnly<size_t> nTotalBytes = trapExitLayoutBytes +
84 nBytesReservedBeforeTrap + nFrameBytes +
85 nInboundStackArgBytes;
87 // Create the stackmap initially in this vector. Since most frames will
88 // contain 128 or fewer words, heap allocation is avoided in the majority of
89 // cases. vec[0] is for the lowest address in the map, vec[N-1] is for the
90 // highest address in the map.
91 StackMapBoolVector vec;
93 // Keep track of whether we've actually seen any refs.
94 bool hasRefs = false;
96 // REG DUMP AREA
97 wasm::ExitStubMapVector trapExitExtras;
98 if (!GenerateStackmapEntriesForTrapExit(
99 argTypes, trapExitLayout, trapExitLayoutWords, &trapExitExtras)) {
100 return false;
102 MOZ_ASSERT(trapExitExtras.length() == trapExitLayoutWords);
104 if (!vec.appendN(false, trapExitLayoutWords)) {
105 return false;
107 for (size_t i = 0; i < trapExitLayoutWords; i++) {
108 vec[i] = trapExitExtras[i];
109 hasRefs |= vec[i];
112 // SPACE RESERVED BEFORE TRAP
113 MOZ_ASSERT(nBytesReservedBeforeTrap % sizeof(void*) == 0);
114 if (!vec.appendN(false, nBytesReservedBeforeTrap / sizeof(void*))) {
115 return false;
118 // SPACE FOR FRAME
119 if (!vec.appendN(false, nFrameBytes / sizeof(void*))) {
120 return false;
123 // INBOUND ARG AREA
124 MOZ_ASSERT(nInboundStackArgBytes % sizeof(void*) == 0);
125 const size_t numStackArgWords = nInboundStackArgBytes / sizeof(void*);
127 const size_t wordsSoFar = vec.length();
128 if (!vec.appendN(false, numStackArgWords)) {
129 return false;
132 for (WasmABIArgIter i(argTypes); !i.done(); i++) {
133 ABIArg argLoc = *i;
134 if (argLoc.kind() == ABIArg::Stack &&
135 argTypes[i.index()] == MIRType::WasmAnyRef) {
136 uint32_t offset = argLoc.offsetFromArgBase();
137 MOZ_ASSERT(offset < nInboundStackArgBytes);
138 MOZ_ASSERT(offset % sizeof(void*) == 0);
139 vec[wordsSoFar + offset / sizeof(void*)] = true;
140 hasRefs = true;
144 #ifndef DEBUG
145 // We saw no references, and this is a non-debug build, so don't bother
146 // building the stackmap.
147 if (!hasRefs) {
148 return true;
150 #endif
152 // Convert vec into a wasm::StackMap.
153 MOZ_ASSERT(vec.length() * sizeof(void*) == nTotalBytes);
154 wasm::StackMap* stackMap = ConvertStackMapBoolVectorToStackMap(vec, hasRefs);
155 if (!stackMap) {
156 return false;
158 stackMap->setExitStubWords(trapExitLayoutWords);
160 stackMap->setFrameOffsetFromTop(nFrameBytes / sizeof(void*) +
161 numStackArgWords);
162 #ifdef DEBUG
163 for (uint32_t i = 0; i < nFrameBytes / sizeof(void*); i++) {
164 MOZ_ASSERT(stackMap->getBit(stackMap->header.numMappedWords -
165 stackMap->header.frameOffsetFromTop + i) == 0);
167 #endif
169 *result = stackMap;
170 return true;
173 bool wasm::GenerateStackmapEntriesForTrapExit(
174 const ArgTypeVector& args, const RegisterOffsets& trapExitLayout,
175 const size_t trapExitLayoutNumWords, ExitStubMapVector* extras) {
176 MOZ_ASSERT(extras->empty());
178 if (!extras->appendN(false, trapExitLayoutNumWords)) {
179 return false;
182 for (WasmABIArgIter i(args); !i.done(); i++) {
183 if (!i->argInRegister() || i.mirType() != MIRType::WasmAnyRef) {
184 continue;
187 size_t offsetFromTop = trapExitLayout.getOffset(i->gpr());
189 // If this doesn't hold, the associated register wasn't saved by
190 // the trap exit stub. Better to crash now than much later, in
191 // some obscure place, and possibly with security consequences.
192 MOZ_RELEASE_ASSERT(offsetFromTop < trapExitLayoutNumWords);
194 // offsetFromTop is an offset in words down from the highest
195 // address in the exit stub save area. Switch it around to be an
196 // offset up from the bottom of the (integer register) save area.
197 size_t offsetFromBottom = trapExitLayoutNumWords - 1 - offsetFromTop;
199 (*extras)[offsetFromBottom] = true;
202 return true;
205 void wasm::EmitWasmPreBarrierGuard(MacroAssembler& masm, Register instance,
206 Register scratch, Register valueAddr,
207 size_t valueOffset, Label* skipBarrier,
208 BytecodeOffset* trapOffset) {
209 // If no incremental GC has started, we don't need the barrier.
210 masm.loadPtr(
211 Address(instance, Instance::offsetOfAddressOfNeedsIncrementalBarrier()),
212 scratch);
213 masm.branchTest32(Assembler::Zero, Address(scratch, 0), Imm32(0x1),
214 skipBarrier);
216 // If the previous value is not a GC thing, we don't need the barrier.
217 FaultingCodeOffset fco =
218 masm.loadPtr(Address(valueAddr, valueOffset), scratch);
219 masm.branchWasmAnyRefIsGCThing(false, scratch, skipBarrier);
221 // Emit metadata for a potential null access when reading the previous value.
222 if (trapOffset) {
223 masm.append(wasm::Trap::NullPointerDereference,
224 wasm::TrapSite(TrapMachineInsnForLoadWord(), fco, *trapOffset));
228 void wasm::EmitWasmPreBarrierCall(MacroAssembler& masm, Register instance,
229 Register scratch, Register valueAddr,
230 size_t valueOffset) {
231 MOZ_ASSERT(valueAddr == PreBarrierReg);
233 // Add the offset to the PreBarrierReg, if any.
234 if (valueOffset != 0) {
235 masm.addPtr(Imm32(valueOffset), valueAddr);
238 #if defined(DEBUG) && defined(JS_CODEGEN_ARM64)
239 // The prebarrier assumes that x28 == sp.
240 Label ok;
241 masm.Cmp(sp, vixl::Operand(x28));
242 masm.B(&ok, Assembler::Equal);
243 masm.breakpoint();
244 masm.bind(&ok);
245 #endif
247 // Load and call the pre-write barrier code. It will preserve all volatile
248 // registers.
249 masm.loadPtr(Address(instance, Instance::offsetOfPreBarrierCode()), scratch);
250 masm.call(scratch);
252 // Remove the offset we folded into PreBarrierReg, if any.
253 if (valueOffset != 0) {
254 masm.subPtr(Imm32(valueOffset), valueAddr);
258 void wasm::EmitWasmPostBarrierGuard(MacroAssembler& masm,
259 const Maybe<Register>& object,
260 Register otherScratch, Register setValue,
261 Label* skipBarrier) {
262 // If there is a containing object and it is in the nursery, no barrier.
263 if (object) {
264 masm.branchPtrInNurseryChunk(Assembler::Equal, *object, otherScratch,
265 skipBarrier);
268 // If the pointer being stored is to a tenured object, no barrier.
269 masm.branchWasmAnyRefIsNurseryCell(false, setValue, otherScratch,
270 skipBarrier);
273 #ifdef DEBUG
274 bool wasm::IsPlausibleStackMapKey(const uint8_t* nextPC) {
275 # if defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_X86)
276 const uint8_t* insn = nextPC;
277 return (insn[-2] == 0x0F && insn[-1] == 0x0B) || // ud2
278 (insn[-2] == 0xFF && (insn[-1] & 0xF8) == 0xD0) || // call *%r_
279 insn[-5] == 0xE8; // call simm32
281 # elif defined(JS_CODEGEN_ARM)
282 const uint32_t* insn = (const uint32_t*)nextPC;
283 return ((uintptr_t(insn) & 3) == 0) && // must be ARM, not Thumb
284 (insn[-1] == 0xe7f000f0 || // udf
285 (insn[-1] & 0xfffffff0) == 0xe12fff30 || // blx reg (ARM, enc A1)
286 (insn[-1] & 0x0f000000) == 0x0b000000); // bl.cc simm24 (ARM, enc A1)
288 # elif defined(JS_CODEGEN_ARM64)
289 const uint32_t hltInsn = 0xd4a00000;
290 const uint32_t* insn = (const uint32_t*)nextPC;
291 return ((uintptr_t(insn) & 3) == 0) &&
292 (insn[-1] == hltInsn || // hlt
293 (insn[-1] & 0xfffffc1f) == 0xd63f0000 || // blr reg
294 (insn[-1] & 0xfc000000) == 0x94000000); // bl simm26
296 # elif defined(JS_CODEGEN_MIPS64)
297 // TODO (bug 1699696): Implement this. As for the platforms above, we need to
298 // enumerate all code sequences that can precede the stackmap location.
299 return true;
300 # elif defined(JS_CODEGEN_LOONG64)
301 // TODO(loong64): Implement IsValidStackMapKey.
302 return true;
303 # elif defined(JS_CODEGEN_RISCV64)
304 const uint32_t* insn = (const uint32_t*)nextPC;
305 return (((uintptr_t(insn) & 3) == 0) &&
306 ((insn[-1] == 0x00006037 && insn[-2] == 0x00100073) || // break;
307 ((insn[-1] & kBaseOpcodeMask) == JALR) ||
308 ((insn[-1] & kBaseOpcodeMask) == JAL) ||
309 (insn[-1] == 0x00100073 &&
310 (insn[-2] & kITypeMask) == RO_CSRRWI))); // wasm trap
311 # else
312 MOZ_CRASH("IsValidStackMapKey: requires implementation on this platform");
313 # endif
315 #endif