Bug 1874684 - Part 21: Rename SecondsAndNanoseconds::toTotalNanoseconds. r=dminor
[gecko.git] / js / src / wasm / WasmOpIter.h
blob59d494bfbf0584522295bb5792c24deef16a28f5
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
4 * Copyright 2016 Mozilla Foundation
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
19 #ifndef wasm_op_iter_h
20 #define wasm_op_iter_h
22 #include "mozilla/CompactPair.h"
23 #include "mozilla/Poison.h"
25 #include <type_traits>
27 #include "js/Printf.h"
28 #include "wasm/WasmBuiltinModule.h"
29 #include "wasm/WasmUtility.h"
30 #include "wasm/WasmValidate.h"
32 namespace js {
33 namespace wasm {
35 // The kind of a control-flow stack item.
36 enum class LabelKind : uint8_t {
37 Body,
38 Block,
39 Loop,
40 Then,
41 Else,
42 Try,
43 Catch,
44 CatchAll,
45 TryTable,
48 // The type of values on the operand stack during validation. This is either a
49 // ValType or the special type "Bottom".
51 class StackType {
52 PackedTypeCode tc_;
54 explicit StackType(PackedTypeCode tc) : tc_(tc) {}
56 public:
57 StackType() : tc_(PackedTypeCode::invalid()) {}
59 explicit StackType(const ValType& t) : tc_(t.packed()) {
60 MOZ_ASSERT(tc_.isValid());
61 MOZ_ASSERT(!isStackBottom());
64 static StackType bottom() {
65 return StackType(PackedTypeCode::pack(TypeCode::Limit));
68 bool isStackBottom() const {
69 MOZ_ASSERT(tc_.isValid());
70 return tc_.typeCode() == TypeCode::Limit;
73 // Returns whether this input is nullable when interpreted as an operand.
74 // When the type is bottom for unreachable code, this returns false as that
75 // is the most permissive option.
76 bool isNullableAsOperand() const {
77 MOZ_ASSERT(tc_.isValid());
78 return isStackBottom() ? false : tc_.isNullable();
81 ValType valType() const {
82 MOZ_ASSERT(tc_.isValid());
83 MOZ_ASSERT(!isStackBottom());
84 return ValType(tc_);
87 ValType valTypeOr(ValType ifBottom) const {
88 MOZ_ASSERT(tc_.isValid());
89 if (isStackBottom()) {
90 return ifBottom;
92 return valType();
95 ValType asNonNullable() const {
96 MOZ_ASSERT(tc_.isValid());
97 MOZ_ASSERT(!isStackBottom());
98 return ValType(tc_.withIsNullable(false));
101 bool isValidForUntypedSelect() const {
102 MOZ_ASSERT(tc_.isValid());
103 if (isStackBottom()) {
104 return true;
106 switch (valType().kind()) {
107 case ValType::I32:
108 case ValType::F32:
109 case ValType::I64:
110 case ValType::F64:
111 #ifdef ENABLE_WASM_SIMD
112 case ValType::V128:
113 #endif
114 return true;
115 default:
116 return false;
120 bool operator==(const StackType& that) const {
121 MOZ_ASSERT(tc_.isValid() && that.tc_.isValid());
122 return tc_ == that.tc_;
125 bool operator!=(const StackType& that) const {
126 MOZ_ASSERT(tc_.isValid() && that.tc_.isValid());
127 return tc_ != that.tc_;
131 #ifdef DEBUG
132 // Families of opcodes that share a signature and validation logic.
133 enum class OpKind {
134 Block,
135 Loop,
136 Unreachable,
137 Drop,
138 I32,
139 I64,
140 F32,
141 F64,
142 V128,
144 BrIf,
145 BrTable,
146 Nop,
147 Unary,
148 Binary,
149 Ternary,
150 Comparison,
151 Conversion,
152 Load,
153 Store,
154 TeeStore,
155 MemorySize,
156 MemoryGrow,
157 Select,
158 GetLocal,
159 SetLocal,
160 TeeLocal,
161 GetGlobal,
162 SetGlobal,
163 TeeGlobal,
164 Call,
165 ReturnCall,
166 CallIndirect,
167 ReturnCallIndirect,
168 # ifdef ENABLE_WASM_GC
169 CallRef,
170 ReturnCallRef,
171 # endif
172 OldCallDirect,
173 OldCallIndirect,
174 Return,
176 Else,
177 End,
178 Wait,
179 Wake,
180 Fence,
181 AtomicLoad,
182 AtomicStore,
183 AtomicBinOp,
184 AtomicCompareExchange,
185 MemOrTableCopy,
186 DataOrElemDrop,
187 MemFill,
188 MemOrTableInit,
189 TableFill,
190 MemDiscard,
191 TableGet,
192 TableGrow,
193 TableSet,
194 TableSize,
195 RefNull,
196 RefFunc,
197 RefAsNonNull,
198 BrOnNull,
199 BrOnNonNull,
200 StructNew,
201 StructNewDefault,
202 StructGet,
203 StructSet,
204 ArrayNew,
205 ArrayNewFixed,
206 ArrayNewDefault,
207 ArrayNewData,
208 ArrayNewElem,
209 ArrayInitData,
210 ArrayInitElem,
211 ArrayGet,
212 ArraySet,
213 ArrayLen,
214 ArrayCopy,
215 ArrayFill,
216 RefTest,
217 RefCast,
218 BrOnCast,
219 RefConversion,
220 # ifdef ENABLE_WASM_SIMD
221 ExtractLane,
222 ReplaceLane,
223 LoadLane,
224 StoreLane,
225 VectorShift,
226 VectorShuffle,
227 # endif
228 Catch,
229 CatchAll,
230 Delegate,
231 Throw,
232 ThrowRef,
233 Rethrow,
234 Try,
235 TryTable,
236 CallBuiltinModuleFunc,
239 // Return the OpKind for a given Op. This is used for sanity-checking that
240 // API users use the correct read function for a given Op.
241 OpKind Classify(OpBytes op);
242 #endif
244 // Common fields for linear memory access.
245 template <typename Value>
246 struct LinearMemoryAddress {
247 Value base;
248 uint32_t memoryIndex;
249 uint64_t offset;
250 uint32_t align;
252 LinearMemoryAddress() : memoryIndex(0), offset(0), align(0) {}
253 LinearMemoryAddress(Value base, uint32_t memoryIndex, uint64_t offset,
254 uint32_t align)
255 : base(base), memoryIndex(memoryIndex), offset(offset), align(align) {}
258 template <typename ControlItem>
259 class ControlStackEntry {
260 // Use a pair to optimize away empty ControlItem.
261 mozilla::CompactPair<BlockType, ControlItem> typeAndItem_;
263 // The "base" of a control stack entry is valueStack_.length() minus
264 // type().params().length(), i.e., the size of the value stack "below"
265 // this block.
266 uint32_t valueStackBase_;
267 bool polymorphicBase_;
269 LabelKind kind_;
271 public:
272 ControlStackEntry(LabelKind kind, BlockType type, uint32_t valueStackBase)
273 : typeAndItem_(type, ControlItem()),
274 valueStackBase_(valueStackBase),
275 polymorphicBase_(false),
276 kind_(kind) {
277 MOZ_ASSERT(type != BlockType());
280 LabelKind kind() const { return kind_; }
281 BlockType type() const { return typeAndItem_.first(); }
282 ResultType resultType() const { return type().results(); }
283 ResultType branchTargetType() const {
284 return kind_ == LabelKind::Loop ? type().params() : type().results();
286 uint32_t valueStackBase() const { return valueStackBase_; }
287 ControlItem& controlItem() { return typeAndItem_.second(); }
288 void setPolymorphicBase() { polymorphicBase_ = true; }
289 bool polymorphicBase() const { return polymorphicBase_; }
291 void switchToElse() {
292 MOZ_ASSERT(kind() == LabelKind::Then);
293 kind_ = LabelKind::Else;
294 polymorphicBase_ = false;
297 void switchToCatch() {
298 MOZ_ASSERT(kind() == LabelKind::Try || kind() == LabelKind::Catch);
299 kind_ = LabelKind::Catch;
300 polymorphicBase_ = false;
303 void switchToCatchAll() {
304 MOZ_ASSERT(kind() == LabelKind::Try || kind() == LabelKind::Catch);
305 kind_ = LabelKind::CatchAll;
306 polymorphicBase_ = false;
310 // Track state of the non-defaultable locals. Every time such local is
311 // initialized, the stack will record at what depth and which local was set.
312 // On a block end, the "unset" state will be rolled back to how it was before
313 // the block started.
315 // It is very likely only a few functions will have non-defaultable locals and
316 // very few locals will be non-defaultable. This class is optimized to be fast
317 // for this common case.
318 class UnsetLocalsState {
319 struct SetLocalEntry {
320 uint32_t depth;
321 uint32_t localUnsetIndex;
322 SetLocalEntry(uint32_t depth_, uint32_t localUnsetIndex_)
323 : depth(depth_), localUnsetIndex(localUnsetIndex_) {}
325 using SetLocalsStack = Vector<SetLocalEntry, 16, SystemAllocPolicy>;
326 using UnsetLocals = Vector<uint32_t, 16, SystemAllocPolicy>;
328 static constexpr size_t WordSize = 4;
329 static constexpr size_t WordBits = WordSize * 8;
331 // Bit array of "unset" function locals. Stores only unset states of the
332 // locals that are declared after the first non-defaultable local.
333 UnsetLocals unsetLocals_;
334 // Stack of "set" operations. Contains pair where the first field is a depth,
335 // and the second field is local id (offset by firstNonDefaultLocal_).
336 SetLocalsStack setLocalsStack_;
337 uint32_t firstNonDefaultLocal_;
339 public:
340 UnsetLocalsState() : firstNonDefaultLocal_(UINT32_MAX) {}
342 [[nodiscard]] bool init(const ValTypeVector& locals, size_t numParams);
344 inline bool isUnset(uint32_t id) const {
345 if (MOZ_LIKELY(id < firstNonDefaultLocal_)) {
346 return false;
348 uint32_t localUnsetIndex = id - firstNonDefaultLocal_;
349 return unsetLocals_[localUnsetIndex / WordBits] &
350 (1 << (localUnsetIndex % WordBits));
353 inline void set(uint32_t id, uint32_t depth) {
354 MOZ_ASSERT(isUnset(id));
355 MOZ_ASSERT(id >= firstNonDefaultLocal_ &&
356 (id - firstNonDefaultLocal_) / WordBits < unsetLocals_.length());
357 uint32_t localUnsetIndex = id - firstNonDefaultLocal_;
358 unsetLocals_[localUnsetIndex / WordBits] ^= 1
359 << (localUnsetIndex % WordBits);
360 // The setLocalsStack_ is reserved upfront in the UnsetLocalsState::init.
361 // A SetLocalEntry will be pushed only once per local.
362 setLocalsStack_.infallibleEmplaceBack(depth, localUnsetIndex);
365 inline void resetToBlock(uint32_t controlDepth) {
366 while (MOZ_UNLIKELY(setLocalsStack_.length() > 0) &&
367 setLocalsStack_.back().depth > controlDepth) {
368 uint32_t localUnsetIndex = setLocalsStack_.back().localUnsetIndex;
369 MOZ_ASSERT(!(unsetLocals_[localUnsetIndex / WordBits] &
370 (1 << (localUnsetIndex % WordBits))));
371 unsetLocals_[localUnsetIndex / WordBits] |=
372 1 << (localUnsetIndex % WordBits);
373 setLocalsStack_.popBack();
377 int empty() const { return setLocalsStack_.empty(); }
380 template <typename Value>
381 class TypeAndValueT {
382 // Use a Pair to optimize away empty Value.
383 mozilla::CompactPair<StackType, Value> tv_;
385 public:
386 TypeAndValueT() : tv_(StackType::bottom(), Value()) {}
387 explicit TypeAndValueT(StackType type) : tv_(type, Value()) {}
388 explicit TypeAndValueT(ValType type) : tv_(StackType(type), Value()) {}
389 TypeAndValueT(StackType type, Value value) : tv_(type, value) {}
390 TypeAndValueT(ValType type, Value value) : tv_(StackType(type), value) {}
391 StackType type() const { return tv_.first(); }
392 void setType(StackType type) { tv_.first() = type; }
393 Value value() const { return tv_.second(); }
394 void setValue(Value value) { tv_.second() = value; }
397 // An iterator over the bytes of a function body. It performs validation
398 // and unpacks the data into a usable form.
400 // The MOZ_STACK_CLASS attribute here is because of the use of DebugOnly.
401 // There's otherwise nothing inherent in this class which would require
402 // it to be used on the stack.
403 template <typename Policy>
404 class MOZ_STACK_CLASS OpIter : private Policy {
405 public:
406 using Value = typename Policy::Value;
407 using ValueVector = typename Policy::ValueVector;
408 using TypeAndValue = TypeAndValueT<Value>;
409 using TypeAndValueStack = Vector<TypeAndValue, 32, SystemAllocPolicy>;
410 using ControlItem = typename Policy::ControlItem;
411 using Control = ControlStackEntry<ControlItem>;
412 using ControlStack = Vector<Control, 16, SystemAllocPolicy>;
414 enum Kind {
415 Func,
416 InitExpr,
419 private:
420 Kind kind_;
421 Decoder& d_;
422 const ModuleEnvironment& env_;
424 TypeAndValueStack valueStack_;
425 TypeAndValueStack elseParamStack_;
426 ControlStack controlStack_;
427 UnsetLocalsState unsetLocals_;
428 // The exclusive max index of a global that can be accessed by global.get in
429 // this expression. When GC is enabled, this is any previously defined
430 // immutable global. Otherwise this is always set to zero, and only imported
431 // immutable globals are allowed.
432 uint32_t maxInitializedGlobalsIndexPlus1_;
433 FeatureUsage featureUsage_;
435 #ifdef DEBUG
436 OpBytes op_;
437 #endif
438 size_t offsetOfLastReadOp_;
440 [[nodiscard]] bool readFixedU8(uint8_t* out) { return d_.readFixedU8(out); }
441 [[nodiscard]] bool readFixedU32(uint32_t* out) {
442 return d_.readFixedU32(out);
444 [[nodiscard]] bool readVarS32(int32_t* out) { return d_.readVarS32(out); }
445 [[nodiscard]] bool readVarU32(uint32_t* out) { return d_.readVarU32(out); }
446 [[nodiscard]] bool readVarS64(int64_t* out) { return d_.readVarS64(out); }
447 [[nodiscard]] bool readVarU64(uint64_t* out) { return d_.readVarU64(out); }
448 [[nodiscard]] bool readFixedF32(float* out) { return d_.readFixedF32(out); }
449 [[nodiscard]] bool readFixedF64(double* out) { return d_.readFixedF64(out); }
451 [[nodiscard]] bool readLinearMemoryAddress(uint32_t byteSize,
452 LinearMemoryAddress<Value>* addr);
453 [[nodiscard]] bool readLinearMemoryAddressAligned(
454 uint32_t byteSize, LinearMemoryAddress<Value>* addr);
455 [[nodiscard]] bool readBlockType(BlockType* type);
456 [[nodiscard]] bool readGcTypeIndex(uint32_t* typeIndex);
457 [[nodiscard]] bool readStructTypeIndex(uint32_t* typeIndex);
458 [[nodiscard]] bool readArrayTypeIndex(uint32_t* typeIndex);
459 [[nodiscard]] bool readFuncTypeIndex(uint32_t* typeIndex);
460 [[nodiscard]] bool readFieldIndex(uint32_t* fieldIndex,
461 const StructType& structType);
463 [[nodiscard]] bool popCallArgs(const ValTypeVector& expectedTypes,
464 ValueVector* values);
466 [[nodiscard]] bool failEmptyStack();
467 [[nodiscard]] bool popStackType(StackType* type, Value* value);
468 [[nodiscard]] bool popWithType(ValType expected, Value* value,
469 StackType* stackType);
470 [[nodiscard]] bool popWithType(ValType expected, Value* value);
471 [[nodiscard]] bool popWithType(ResultType expected, ValueVector* values);
472 template <typename ValTypeSpanT>
473 [[nodiscard]] bool popWithTypes(ValTypeSpanT expected, ValueVector* values);
474 [[nodiscard]] bool popWithRefType(Value* value, StackType* type);
475 // Check that the top of the value stack has type `expected`, bearing in
476 // mind that it may be a block type, hence involving multiple values.
478 // If the block's stack contains polymorphic values at its base (because we
479 // are in unreachable code) then suitable extra values are inserted into the
480 // value stack, as controlled by `rewriteStackTypes`: if this is true,
481 // polymorphic values have their types created/updated from `expected`. If
482 // it is false, such values are left as `StackType::bottom()`.
484 // If `values` is non-null, it is filled in with Value components of the
485 // relevant stack entries, including those of any new entries created.
486 [[nodiscard]] bool checkTopTypeMatches(ResultType expected,
487 ValueVector* values,
488 bool rewriteStackTypes);
490 [[nodiscard]] bool pushControl(LabelKind kind, BlockType type);
491 [[nodiscard]] bool checkStackAtEndOfBlock(ResultType* type,
492 ValueVector* values);
493 [[nodiscard]] bool getControl(uint32_t relativeDepth, Control** controlEntry);
494 [[nodiscard]] bool checkBranchValueAndPush(uint32_t relativeDepth,
495 ResultType* type,
496 ValueVector* values,
497 bool rewriteStackTypes);
498 [[nodiscard]] bool checkBrTableEntryAndPush(uint32_t* relativeDepth,
499 ResultType prevBranchType,
500 ResultType* branchType,
501 ValueVector* branchValues);
503 [[nodiscard]] bool push(StackType t) { return valueStack_.emplaceBack(t); }
504 [[nodiscard]] bool push(ValType t) { return valueStack_.emplaceBack(t); }
505 [[nodiscard]] bool push(TypeAndValue tv) { return valueStack_.append(tv); }
506 [[nodiscard]] bool push(ResultType t) {
507 for (size_t i = 0; i < t.length(); i++) {
508 if (!push(t[i])) {
509 return false;
512 return true;
514 void infalliblePush(StackType t) { valueStack_.infallibleEmplaceBack(t); }
515 void infalliblePush(ValType t) {
516 valueStack_.infallibleEmplaceBack(StackType(t));
518 void infalliblePush(TypeAndValue tv) { valueStack_.infallibleAppend(tv); }
520 void afterUnconditionalBranch() {
521 valueStack_.shrinkTo(controlStack_.back().valueStackBase());
522 controlStack_.back().setPolymorphicBase();
525 inline bool checkIsSubtypeOf(StorageType actual, StorageType expected);
527 inline bool checkIsSubtypeOf(RefType actual, RefType expected) {
528 return checkIsSubtypeOf(ValType(actual).storageType(),
529 ValType(expected).storageType());
531 inline bool checkIsSubtypeOf(ValType actual, ValType expected) {
532 return checkIsSubtypeOf(actual.storageType(), expected.storageType());
535 inline bool checkIsSubtypeOf(ResultType params, ResultType results);
537 #ifdef ENABLE_WASM_GC
538 inline bool checkIsSubtypeOf(uint32_t actualTypeIndex,
539 uint32_t expectedTypeIndex);
540 #endif
542 public:
543 #ifdef DEBUG
544 explicit OpIter(const ModuleEnvironment& env, Decoder& decoder,
545 Kind kind = OpIter::Func)
546 : kind_(kind),
547 d_(decoder),
548 env_(env),
549 maxInitializedGlobalsIndexPlus1_(0),
550 featureUsage_(FeatureUsage::None),
551 op_(OpBytes(Op::Limit)),
552 offsetOfLastReadOp_(0) {}
553 #else
554 explicit OpIter(const ModuleEnvironment& env, Decoder& decoder,
555 Kind kind = OpIter::Func)
556 : kind_(kind),
557 d_(decoder),
558 env_(env),
559 maxInitializedGlobalsIndexPlus1_(0),
560 featureUsage_(FeatureUsage::None),
561 offsetOfLastReadOp_(0) {}
562 #endif
564 FeatureUsage featureUsage() const { return featureUsage_; }
566 // Return the decoding byte offset.
567 uint32_t currentOffset() const { return d_.currentOffset(); }
569 // Return the offset within the entire module of the last-read op.
570 size_t lastOpcodeOffset() const {
571 return offsetOfLastReadOp_ ? offsetOfLastReadOp_ : d_.currentOffset();
574 // Return a BytecodeOffset describing where the current op should be reported
575 // to trap/call.
576 BytecodeOffset bytecodeOffset() const {
577 return BytecodeOffset(lastOpcodeOffset());
580 // Test whether the iterator has reached the end of the buffer.
581 bool done() const { return d_.done(); }
583 // Return a pointer to the end of the buffer being decoded by this iterator.
584 const uint8_t* end() const { return d_.end(); }
586 // Report a general failure.
587 [[nodiscard]] bool fail(const char* msg) MOZ_COLD;
589 // Report a general failure with a context
590 [[nodiscard]] bool fail_ctx(const char* fmt, const char* context) MOZ_COLD;
592 // Report an unrecognized opcode.
593 [[nodiscard]] bool unrecognizedOpcode(const OpBytes* expr) MOZ_COLD;
595 // Return whether the innermost block has a polymorphic base of its stack.
596 // Ideally this accessor would be removed; consider using something else.
597 bool currentBlockHasPolymorphicBase() const {
598 return !controlStack_.empty() && controlStack_.back().polymorphicBase();
601 // ------------------------------------------------------------------------
602 // Decoding and validation interface.
604 // Initialization and termination
606 [[nodiscard]] bool startFunction(uint32_t funcIndex,
607 const ValTypeVector& locals);
608 [[nodiscard]] bool endFunction(const uint8_t* bodyEnd);
610 [[nodiscard]] bool startInitExpr(ValType expected);
611 [[nodiscard]] bool endInitExpr();
613 // Value and reference types
615 [[nodiscard]] bool readValType(ValType* type);
616 [[nodiscard]] bool readHeapType(bool nullable, RefType* type);
618 // Instructions
620 [[nodiscard]] bool readOp(OpBytes* op);
621 [[nodiscard]] bool readReturn(ValueVector* values);
622 [[nodiscard]] bool readBlock(ResultType* paramType);
623 [[nodiscard]] bool readLoop(ResultType* paramType);
624 [[nodiscard]] bool readIf(ResultType* paramType, Value* condition);
625 [[nodiscard]] bool readElse(ResultType* paramType, ResultType* resultType,
626 ValueVector* thenResults);
627 [[nodiscard]] bool readEnd(LabelKind* kind, ResultType* type,
628 ValueVector* results,
629 ValueVector* resultsForEmptyElse);
630 void popEnd();
631 [[nodiscard]] bool readBr(uint32_t* relativeDepth, ResultType* type,
632 ValueVector* values);
633 [[nodiscard]] bool readBrIf(uint32_t* relativeDepth, ResultType* type,
634 ValueVector* values, Value* condition);
635 [[nodiscard]] bool readBrTable(Uint32Vector* depths, uint32_t* defaultDepth,
636 ResultType* defaultBranchType,
637 ValueVector* branchValues, Value* index);
638 [[nodiscard]] bool readTry(ResultType* type);
639 [[nodiscard]] bool readTryTable(ResultType* type,
640 TryTableCatchVector* catches);
641 [[nodiscard]] bool readCatch(LabelKind* kind, uint32_t* tagIndex,
642 ResultType* paramType, ResultType* resultType,
643 ValueVector* tryResults);
644 [[nodiscard]] bool readCatchAll(LabelKind* kind, ResultType* paramType,
645 ResultType* resultType,
646 ValueVector* tryResults);
647 [[nodiscard]] bool readDelegate(uint32_t* relativeDepth,
648 ResultType* resultType,
649 ValueVector* tryResults);
650 void popDelegate();
651 [[nodiscard]] bool readThrow(uint32_t* tagIndex, ValueVector* argValues);
652 [[nodiscard]] bool readThrowRef(Value* exnRef);
653 [[nodiscard]] bool readRethrow(uint32_t* relativeDepth);
654 [[nodiscard]] bool readUnreachable();
655 [[nodiscard]] bool readDrop();
656 [[nodiscard]] bool readUnary(ValType operandType, Value* input);
657 [[nodiscard]] bool readConversion(ValType operandType, ValType resultType,
658 Value* input);
659 [[nodiscard]] bool readBinary(ValType operandType, Value* lhs, Value* rhs);
660 [[nodiscard]] bool readComparison(ValType operandType, Value* lhs,
661 Value* rhs);
662 [[nodiscard]] bool readTernary(ValType operandType, Value* v0, Value* v1,
663 Value* v2);
664 [[nodiscard]] bool readLoad(ValType resultType, uint32_t byteSize,
665 LinearMemoryAddress<Value>* addr);
666 [[nodiscard]] bool readStore(ValType resultType, uint32_t byteSize,
667 LinearMemoryAddress<Value>* addr, Value* value);
668 [[nodiscard]] bool readTeeStore(ValType resultType, uint32_t byteSize,
669 LinearMemoryAddress<Value>* addr,
670 Value* value);
671 [[nodiscard]] bool readNop();
672 [[nodiscard]] bool readMemorySize(uint32_t* memoryIndex);
673 [[nodiscard]] bool readMemoryGrow(uint32_t* memoryIndex, Value* input);
674 [[nodiscard]] bool readSelect(bool typed, StackType* type, Value* trueValue,
675 Value* falseValue, Value* condition);
676 [[nodiscard]] bool readGetLocal(const ValTypeVector& locals, uint32_t* id);
677 [[nodiscard]] bool readSetLocal(const ValTypeVector& locals, uint32_t* id,
678 Value* value);
679 [[nodiscard]] bool readTeeLocal(const ValTypeVector& locals, uint32_t* id,
680 Value* value);
681 [[nodiscard]] bool readGetGlobal(uint32_t* id);
682 [[nodiscard]] bool readSetGlobal(uint32_t* id, Value* value);
683 [[nodiscard]] bool readTeeGlobal(uint32_t* id, Value* value);
684 [[nodiscard]] bool readI32Const(int32_t* i32);
685 [[nodiscard]] bool readI64Const(int64_t* i64);
686 [[nodiscard]] bool readF32Const(float* f32);
687 [[nodiscard]] bool readF64Const(double* f64);
688 [[nodiscard]] bool readRefFunc(uint32_t* funcIndex);
689 [[nodiscard]] bool readRefNull(RefType* type);
690 [[nodiscard]] bool readRefIsNull(Value* input);
691 [[nodiscard]] bool readRefAsNonNull(Value* input);
692 [[nodiscard]] bool readBrOnNull(uint32_t* relativeDepth, ResultType* type,
693 ValueVector* values, Value* condition);
694 [[nodiscard]] bool readBrOnNonNull(uint32_t* relativeDepth, ResultType* type,
695 ValueVector* values, Value* condition);
696 [[nodiscard]] bool readCall(uint32_t* funcTypeIndex, ValueVector* argValues);
697 [[nodiscard]] bool readCallIndirect(uint32_t* funcTypeIndex,
698 uint32_t* tableIndex, Value* callee,
699 ValueVector* argValues);
700 #ifdef ENABLE_WASM_TAIL_CALLS
701 [[nodiscard]] bool readReturnCall(uint32_t* funcTypeIndex,
702 ValueVector* argValues);
703 [[nodiscard]] bool readReturnCallIndirect(uint32_t* funcTypeIndex,
704 uint32_t* tableIndex, Value* callee,
705 ValueVector* argValues);
706 #endif
707 #ifdef ENABLE_WASM_GC
708 [[nodiscard]] bool readCallRef(const FuncType** funcType, Value* callee,
709 ValueVector* argValues);
711 # ifdef ENABLE_WASM_TAIL_CALLS
712 [[nodiscard]] bool readReturnCallRef(const FuncType** funcType, Value* callee,
713 ValueVector* argValues);
714 # endif
715 #endif
716 [[nodiscard]] bool readOldCallDirect(uint32_t numFuncImports,
717 uint32_t* funcTypeIndex,
718 ValueVector* argValues);
719 [[nodiscard]] bool readOldCallIndirect(uint32_t* funcTypeIndex, Value* callee,
720 ValueVector* argValues);
721 [[nodiscard]] bool readWake(LinearMemoryAddress<Value>* addr, Value* count);
722 [[nodiscard]] bool readWait(LinearMemoryAddress<Value>* addr,
723 ValType valueType, uint32_t byteSize,
724 Value* value, Value* timeout);
725 [[nodiscard]] bool readFence();
726 [[nodiscard]] bool readAtomicLoad(LinearMemoryAddress<Value>* addr,
727 ValType resultType, uint32_t byteSize);
728 [[nodiscard]] bool readAtomicStore(LinearMemoryAddress<Value>* addr,
729 ValType resultType, uint32_t byteSize,
730 Value* value);
731 [[nodiscard]] bool readAtomicRMW(LinearMemoryAddress<Value>* addr,
732 ValType resultType, uint32_t byteSize,
733 Value* value);
734 [[nodiscard]] bool readAtomicCmpXchg(LinearMemoryAddress<Value>* addr,
735 ValType resultType, uint32_t byteSize,
736 Value* oldValue, Value* newValue);
737 [[nodiscard]] bool readMemOrTableCopy(bool isMem,
738 uint32_t* dstMemOrTableIndex,
739 Value* dst,
740 uint32_t* srcMemOrTableIndex,
741 Value* src, Value* len);
742 [[nodiscard]] bool readDataOrElemDrop(bool isData, uint32_t* segIndex);
743 [[nodiscard]] bool readMemFill(uint32_t* memoryIndex, Value* start,
744 Value* val, Value* len);
745 [[nodiscard]] bool readMemOrTableInit(bool isMem, uint32_t* segIndex,
746 uint32_t* dstMemOrTableIndex,
747 Value* dst, Value* src, Value* len);
748 [[nodiscard]] bool readTableFill(uint32_t* tableIndex, Value* start,
749 Value* val, Value* len);
750 [[nodiscard]] bool readMemDiscard(uint32_t* memoryIndex, Value* start,
751 Value* len);
752 [[nodiscard]] bool readTableGet(uint32_t* tableIndex, Value* index);
753 [[nodiscard]] bool readTableGrow(uint32_t* tableIndex, Value* initValue,
754 Value* delta);
755 [[nodiscard]] bool readTableSet(uint32_t* tableIndex, Value* index,
756 Value* value);
758 [[nodiscard]] bool readTableSize(uint32_t* tableIndex);
760 #ifdef ENABLE_WASM_GC
761 [[nodiscard]] bool readStructNew(uint32_t* typeIndex, ValueVector* argValues);
762 [[nodiscard]] bool readStructNewDefault(uint32_t* typeIndex);
763 [[nodiscard]] bool readStructGet(uint32_t* typeIndex, uint32_t* fieldIndex,
764 FieldWideningOp wideningOp, Value* ptr);
765 [[nodiscard]] bool readStructSet(uint32_t* typeIndex, uint32_t* fieldIndex,
766 Value* ptr, Value* val);
767 [[nodiscard]] bool readArrayNew(uint32_t* typeIndex, Value* numElements,
768 Value* argValue);
769 [[nodiscard]] bool readArrayNewFixed(uint32_t* typeIndex,
770 uint32_t* numElements,
771 ValueVector* values);
772 [[nodiscard]] bool readArrayNewDefault(uint32_t* typeIndex,
773 Value* numElements);
774 [[nodiscard]] bool readArrayNewData(uint32_t* typeIndex, uint32_t* segIndex,
775 Value* offset, Value* numElements);
776 [[nodiscard]] bool readArrayNewElem(uint32_t* typeIndex, uint32_t* segIndex,
777 Value* offset, Value* numElements);
778 [[nodiscard]] bool readArrayInitData(uint32_t* typeIndex, uint32_t* segIndex,
779 Value* array, Value* arrayIndex,
780 Value* segOffset, Value* length);
781 [[nodiscard]] bool readArrayInitElem(uint32_t* typeIndex, uint32_t* segIndex,
782 Value* array, Value* arrayIndex,
783 Value* segOffset, Value* length);
784 [[nodiscard]] bool readArrayGet(uint32_t* typeIndex,
785 FieldWideningOp wideningOp, Value* index,
786 Value* ptr);
787 [[nodiscard]] bool readArraySet(uint32_t* typeIndex, Value* val, Value* index,
788 Value* ptr);
789 [[nodiscard]] bool readArrayLen(Value* ptr);
790 [[nodiscard]] bool readArrayCopy(int32_t* elemSize, bool* elemsAreRefTyped,
791 Value* dstArray, Value* dstIndex,
792 Value* srcArray, Value* srcIndex,
793 Value* numElements);
794 [[nodiscard]] bool readArrayFill(uint32_t* typeIndex, Value* array,
795 Value* index, Value* val, Value* length);
796 [[nodiscard]] bool readRefTest(bool nullable, RefType* sourceType,
797 RefType* destType, Value* ref);
798 [[nodiscard]] bool readRefCast(bool nullable, RefType* sourceType,
799 RefType* destType, Value* ref);
800 [[nodiscard]] bool readBrOnCast(bool onSuccess, uint32_t* labelRelativeDepth,
801 RefType* sourceType, RefType* destType,
802 ResultType* labelType, ValueVector* values);
803 [[nodiscard]] bool readRefConversion(RefType operandType, RefType resultType,
804 Value* operandValue);
805 #endif
807 #ifdef ENABLE_WASM_SIMD
808 [[nodiscard]] bool readLaneIndex(uint32_t inputLanes, uint32_t* laneIndex);
809 [[nodiscard]] bool readExtractLane(ValType resultType, uint32_t inputLanes,
810 uint32_t* laneIndex, Value* input);
811 [[nodiscard]] bool readReplaceLane(ValType operandType, uint32_t inputLanes,
812 uint32_t* laneIndex, Value* baseValue,
813 Value* operand);
814 [[nodiscard]] bool readVectorShift(Value* baseValue, Value* shift);
815 [[nodiscard]] bool readVectorShuffle(Value* v1, Value* v2, V128* selectMask);
816 [[nodiscard]] bool readV128Const(V128* value);
817 [[nodiscard]] bool readLoadSplat(uint32_t byteSize,
818 LinearMemoryAddress<Value>* addr);
819 [[nodiscard]] bool readLoadExtend(LinearMemoryAddress<Value>* addr);
820 [[nodiscard]] bool readLoadLane(uint32_t byteSize,
821 LinearMemoryAddress<Value>* addr,
822 uint32_t* laneIndex, Value* input);
823 [[nodiscard]] bool readStoreLane(uint32_t byteSize,
824 LinearMemoryAddress<Value>* addr,
825 uint32_t* laneIndex, Value* input);
826 #endif
828 [[nodiscard]] bool readCallBuiltinModuleFunc(
829 const BuiltinModuleFunc** builtinModuleFunc, ValueVector* params);
831 // At a location where readOp is allowed, peek at the next opcode
832 // without consuming it or updating any internal state.
833 // Never fails: returns uint16_t(Op::Limit) in op->b0 if it can't read.
834 void peekOp(OpBytes* op);
836 // ------------------------------------------------------------------------
837 // Stack management.
839 // Set the top N result values.
840 void setResults(size_t count, const ValueVector& values) {
841 MOZ_ASSERT(valueStack_.length() >= count);
842 size_t base = valueStack_.length() - count;
843 for (size_t i = 0; i < count; i++) {
844 valueStack_[base + i].setValue(values[i]);
848 bool getResults(size_t count, ValueVector* values) {
849 MOZ_ASSERT(valueStack_.length() >= count);
850 if (!values->resize(count)) {
851 return false;
853 size_t base = valueStack_.length() - count;
854 for (size_t i = 0; i < count; i++) {
855 (*values)[i] = valueStack_[base + i].value();
857 return true;
860 // Set the result value of the current top-of-value-stack expression.
861 void setResult(Value value) { valueStack_.back().setValue(value); }
863 // Return the result value of the current top-of-value-stack expression.
864 Value getResult() { return valueStack_.back().value(); }
866 // Return a reference to the top of the control stack.
867 ControlItem& controlItem() { return controlStack_.back().controlItem(); }
869 // Return a reference to an element in the control stack.
870 ControlItem& controlItem(uint32_t relativeDepth) {
871 return controlStack_[controlStack_.length() - 1 - relativeDepth]
872 .controlItem();
875 // Return the LabelKind of an element in the control stack.
876 LabelKind controlKind(uint32_t relativeDepth) {
877 return controlStack_[controlStack_.length() - 1 - relativeDepth].kind();
880 // Return a reference to the outermost element on the control stack.
881 ControlItem& controlOutermost() { return controlStack_[0].controlItem(); }
883 // Test whether the control-stack is empty, meaning we've consumed the final
884 // end of the function body.
885 bool controlStackEmpty() const { return controlStack_.empty(); }
887 // Return the depth of the control stack.
888 size_t controlStackDepth() const { return controlStack_.length(); }
890 // Find the innermost control item matching a predicate, starting to search
891 // from a certain relative depth, and returning true if such innermost
892 // control item is found. The relative depth of the found item is returned
893 // via a parameter.
894 template <typename Predicate>
895 bool controlFindInnermostFrom(Predicate predicate, uint32_t fromRelativeDepth,
896 uint32_t* foundRelativeDepth) {
897 int32_t fromAbsoluteDepth = controlStack_.length() - fromRelativeDepth - 1;
898 for (int32_t i = fromAbsoluteDepth; i >= 0; i--) {
899 if (predicate(controlStack_[i].kind(), controlStack_[i].controlItem())) {
900 *foundRelativeDepth = controlStack_.length() - 1 - i;
901 return true;
904 return false;
908 template <typename Policy>
909 inline bool OpIter<Policy>::checkIsSubtypeOf(StorageType subType,
910 StorageType superType) {
911 return CheckIsSubtypeOf(d_, env_, lastOpcodeOffset(), subType, superType);
914 template <typename Policy>
915 inline bool OpIter<Policy>::checkIsSubtypeOf(ResultType params,
916 ResultType results) {
917 if (params.length() != results.length()) {
918 UniqueChars error(
919 JS_smprintf("type mismatch: expected %zu values, got %zu values",
920 results.length(), params.length()));
921 if (!error) {
922 return false;
924 return fail(error.get());
926 for (uint32_t i = 0; i < params.length(); i++) {
927 ValType param = params[i];
928 ValType result = results[i];
929 if (!checkIsSubtypeOf(param, result)) {
930 return false;
933 return true;
936 #ifdef ENABLE_WASM_GC
937 template <typename Policy>
938 inline bool OpIter<Policy>::checkIsSubtypeOf(uint32_t actualTypeIndex,
939 uint32_t expectedTypeIndex) {
940 const TypeDef& actualTypeDef = env_.types->type(actualTypeIndex);
941 const TypeDef& expectedTypeDef = env_.types->type(expectedTypeIndex);
942 return CheckIsSubtypeOf(
943 d_, env_, lastOpcodeOffset(),
944 ValType(RefType::fromTypeDef(&actualTypeDef, true)),
945 ValType(RefType::fromTypeDef(&expectedTypeDef, true)));
947 #endif
949 template <typename Policy>
950 inline bool OpIter<Policy>::unrecognizedOpcode(const OpBytes* expr) {
951 UniqueChars error(JS_smprintf("unrecognized opcode: %x %x", expr->b0,
952 IsPrefixByte(expr->b0) ? expr->b1 : 0));
953 if (!error) {
954 return false;
957 return fail(error.get());
960 template <typename Policy>
961 inline bool OpIter<Policy>::fail(const char* msg) {
962 return d_.fail(lastOpcodeOffset(), msg);
965 template <typename Policy>
966 inline bool OpIter<Policy>::fail_ctx(const char* fmt, const char* context) {
967 UniqueChars error(JS_smprintf(fmt, context));
968 if (!error) {
969 return false;
971 return fail(error.get());
974 template <typename Policy>
975 inline bool OpIter<Policy>::failEmptyStack() {
976 return valueStack_.empty() ? fail("popping value from empty stack")
977 : fail("popping value from outside block");
980 // This function pops exactly one value from the stack, yielding Bottom types in
981 // various cases and therefore making it the caller's responsibility to do the
982 // right thing for StackType::Bottom. Prefer (pop|top)WithType. This is an
983 // optimization for the super-common case where the caller is statically
984 // expecting the resulttype `[valtype]`.
985 template <typename Policy>
986 inline bool OpIter<Policy>::popStackType(StackType* type, Value* value) {
987 Control& block = controlStack_.back();
989 MOZ_ASSERT(valueStack_.length() >= block.valueStackBase());
990 if (MOZ_UNLIKELY(valueStack_.length() == block.valueStackBase())) {
991 // If the base of this block's stack is polymorphic, then we can pop a
992 // dummy value of the bottom type; it won't be used since we're in
993 // unreachable code.
994 if (block.polymorphicBase()) {
995 *type = StackType::bottom();
996 *value = Value();
998 // Maintain the invariant that, after a pop, there is always memory
999 // reserved to push a value infallibly.
1000 return valueStack_.reserve(valueStack_.length() + 1);
1003 return failEmptyStack();
1006 TypeAndValue& tv = valueStack_.back();
1007 *type = tv.type();
1008 *value = tv.value();
1009 valueStack_.popBack();
1010 return true;
1013 // This function pops exactly one value from the stack, checking that it has the
1014 // expected type which can either be a specific value type or the bottom type.
1015 template <typename Policy>
1016 inline bool OpIter<Policy>::popWithType(ValType expectedType, Value* value,
1017 StackType* stackType) {
1018 if (!popStackType(stackType, value)) {
1019 return false;
1022 return stackType->isStackBottom() ||
1023 checkIsSubtypeOf(stackType->valType(), expectedType);
1026 // This function pops exactly one value from the stack, checking that it has the
1027 // expected type which can either be a specific value type or the bottom type.
1028 template <typename Policy>
1029 inline bool OpIter<Policy>::popWithType(ValType expectedType, Value* value) {
1030 StackType stackType;
1031 return popWithType(expectedType, value, &stackType);
1034 template <typename Policy>
1035 inline bool OpIter<Policy>::popWithType(ResultType expected,
1036 ValueVector* values) {
1037 return popWithTypes(expected, values);
1040 // Pops each of the given expected types (in reverse, because it's a stack).
1041 template <typename Policy>
1042 template <typename ValTypeSpanT>
1043 inline bool OpIter<Policy>::popWithTypes(ValTypeSpanT expected,
1044 ValueVector* values) {
1045 size_t expectedLength = expected.size();
1046 if (!values->resize(expectedLength)) {
1047 return false;
1049 for (size_t i = 0; i < expectedLength; i++) {
1050 size_t reverseIndex = expectedLength - i - 1;
1051 ValType expectedType = expected[reverseIndex];
1052 Value* value = &(*values)[reverseIndex];
1053 if (!popWithType(expectedType, value)) {
1054 return false;
1057 return true;
1060 // This function pops exactly one value from the stack, checking that it is a
1061 // reference type.
1062 template <typename Policy>
1063 inline bool OpIter<Policy>::popWithRefType(Value* value, StackType* type) {
1064 if (!popStackType(type, value)) {
1065 return false;
1068 if (type->isStackBottom() || type->valType().isRefType()) {
1069 return true;
1072 UniqueChars actualText = ToString(type->valType(), env_.types);
1073 if (!actualText) {
1074 return false;
1077 UniqueChars error(JS_smprintf(
1078 "type mismatch: expression has type %s but expected a reference type",
1079 actualText.get()));
1080 if (!error) {
1081 return false;
1084 return fail(error.get());
1087 template <typename Policy>
1088 inline bool OpIter<Policy>::checkTopTypeMatches(ResultType expected,
1089 ValueVector* values,
1090 bool rewriteStackTypes) {
1091 if (expected.empty()) {
1092 return true;
1095 Control& block = controlStack_.back();
1097 size_t expectedLength = expected.length();
1098 if (values && !values->resize(expectedLength)) {
1099 return false;
1102 for (size_t i = 0; i != expectedLength; i++) {
1103 // We're iterating as-if we were popping each expected/actual type one by
1104 // one, which means iterating the array of expected results backwards.
1105 // The "current" value stack length refers to what the value stack length
1106 // would have been if we were popping it.
1107 size_t reverseIndex = expectedLength - i - 1;
1108 ValType expectedType = expected[reverseIndex];
1109 auto collectValue = [&](const Value& v) {
1110 if (values) {
1111 (*values)[reverseIndex] = v;
1115 size_t currentValueStackLength = valueStack_.length() - i;
1117 MOZ_ASSERT(currentValueStackLength >= block.valueStackBase());
1118 if (currentValueStackLength == block.valueStackBase()) {
1119 if (!block.polymorphicBase()) {
1120 return failEmptyStack();
1123 // If the base of this block's stack is polymorphic, then we can just
1124 // pull out as many fake values as we need to validate, and create dummy
1125 // stack entries accordingly; they won't be used since we're in
1126 // unreachable code. However, if `rewriteStackTypes` is true, we must
1127 // set the types on these new entries to whatever `expected` requires
1128 // them to be.
1129 TypeAndValue newTandV =
1130 rewriteStackTypes ? TypeAndValue(expectedType) : TypeAndValue();
1131 if (!valueStack_.insert(valueStack_.begin() + currentValueStackLength,
1132 newTandV)) {
1133 return false;
1136 collectValue(Value());
1137 } else {
1138 TypeAndValue& observed = valueStack_[currentValueStackLength - 1];
1140 if (observed.type().isStackBottom()) {
1141 collectValue(Value());
1142 } else {
1143 if (!checkIsSubtypeOf(observed.type().valType(), expectedType)) {
1144 return false;
1147 collectValue(observed.value());
1150 if (rewriteStackTypes) {
1151 observed.setType(StackType(expectedType));
1155 return true;
1158 template <typename Policy>
1159 inline bool OpIter<Policy>::pushControl(LabelKind kind, BlockType type) {
1160 ResultType paramType = type.params();
1162 ValueVector values;
1163 if (!checkTopTypeMatches(paramType, &values, /*rewriteStackTypes=*/true)) {
1164 return false;
1166 MOZ_ASSERT(valueStack_.length() >= paramType.length());
1167 uint32_t valueStackBase = valueStack_.length() - paramType.length();
1168 return controlStack_.emplaceBack(kind, type, valueStackBase);
1171 template <typename Policy>
1172 inline bool OpIter<Policy>::checkStackAtEndOfBlock(ResultType* expectedType,
1173 ValueVector* values) {
1174 Control& block = controlStack_.back();
1175 *expectedType = block.type().results();
1177 MOZ_ASSERT(valueStack_.length() >= block.valueStackBase());
1178 if (expectedType->length() < valueStack_.length() - block.valueStackBase()) {
1179 return fail("unused values not explicitly dropped by end of block");
1182 return checkTopTypeMatches(*expectedType, values,
1183 /*rewriteStackTypes=*/true);
1186 template <typename Policy>
1187 inline bool OpIter<Policy>::getControl(uint32_t relativeDepth,
1188 Control** controlEntry) {
1189 if (relativeDepth >= controlStack_.length()) {
1190 return fail("branch depth exceeds current nesting level");
1193 *controlEntry = &controlStack_[controlStack_.length() - 1 - relativeDepth];
1194 return true;
1197 template <typename Policy>
1198 inline bool OpIter<Policy>::readBlockType(BlockType* type) {
1199 uint8_t nextByte;
1200 if (!d_.peekByte(&nextByte)) {
1201 return fail("unable to read block type");
1204 if (nextByte == uint8_t(TypeCode::BlockVoid)) {
1205 d_.uncheckedReadFixedU8();
1206 *type = BlockType::VoidToVoid();
1207 return true;
1210 if ((nextByte & SLEB128SignMask) == SLEB128SignBit) {
1211 ValType v;
1212 if (!readValType(&v)) {
1213 return false;
1215 *type = BlockType::VoidToSingle(v);
1216 return true;
1219 int32_t x;
1220 if (!d_.readVarS32(&x) || x < 0 || uint32_t(x) >= env_.types->length()) {
1221 return fail("invalid block type type index");
1224 const TypeDef* typeDef = &env_.types->type(x);
1225 if (!typeDef->isFuncType()) {
1226 return fail("block type type index must be func type");
1229 *type = BlockType::Func(typeDef->funcType());
1231 return true;
1234 template <typename Policy>
1235 inline bool OpIter<Policy>::readOp(OpBytes* op) {
1236 MOZ_ASSERT(!controlStack_.empty());
1238 offsetOfLastReadOp_ = d_.currentOffset();
1240 if (MOZ_UNLIKELY(!d_.readOp(op))) {
1241 return fail("unable to read opcode");
1244 #ifdef DEBUG
1245 op_ = *op;
1246 #endif
1248 return true;
1251 template <typename Policy>
1252 inline void OpIter<Policy>::peekOp(OpBytes* op) {
1253 const uint8_t* pos = d_.currentPosition();
1255 if (MOZ_UNLIKELY(!d_.readOp(op))) {
1256 op->b0 = uint16_t(Op::Limit);
1259 d_.rollbackPosition(pos);
1262 template <typename Policy>
1263 inline bool OpIter<Policy>::startFunction(uint32_t funcIndex,
1264 const ValTypeVector& locals) {
1265 MOZ_ASSERT(kind_ == OpIter::Func);
1266 MOZ_ASSERT(elseParamStack_.empty());
1267 MOZ_ASSERT(valueStack_.empty());
1268 MOZ_ASSERT(controlStack_.empty());
1269 MOZ_ASSERT(op_.b0 == uint16_t(Op::Limit));
1270 MOZ_ASSERT(maxInitializedGlobalsIndexPlus1_ == 0);
1271 BlockType type = BlockType::FuncResults(*env_.funcs[funcIndex].type);
1273 size_t numArgs = env_.funcs[funcIndex].type->args().length();
1274 if (!unsetLocals_.init(locals, numArgs)) {
1275 return false;
1278 return pushControl(LabelKind::Body, type);
1281 template <typename Policy>
1282 inline bool OpIter<Policy>::endFunction(const uint8_t* bodyEnd) {
1283 if (d_.currentPosition() != bodyEnd) {
1284 return fail("function body length mismatch");
1287 if (!controlStack_.empty()) {
1288 return fail("unbalanced function body control flow");
1290 MOZ_ASSERT(elseParamStack_.empty());
1291 MOZ_ASSERT(unsetLocals_.empty());
1293 #ifdef DEBUG
1294 op_ = OpBytes(Op::Limit);
1295 #endif
1296 valueStack_.clear();
1297 return true;
1300 template <typename Policy>
1301 inline bool OpIter<Policy>::startInitExpr(ValType expected) {
1302 MOZ_ASSERT(kind_ == OpIter::InitExpr);
1303 MOZ_ASSERT(elseParamStack_.empty());
1304 MOZ_ASSERT(valueStack_.empty());
1305 MOZ_ASSERT(controlStack_.empty());
1306 MOZ_ASSERT(op_.b0 == uint16_t(Op::Limit));
1308 // GC allows accessing any previously defined global, not just those that are
1309 // imported and immutable.
1310 if (env_.features.gc) {
1311 maxInitializedGlobalsIndexPlus1_ = env_.globals.length();
1312 } else {
1313 maxInitializedGlobalsIndexPlus1_ = env_.numGlobalImports;
1316 BlockType type = BlockType::VoidToSingle(expected);
1317 return pushControl(LabelKind::Body, type);
1320 template <typename Policy>
1321 inline bool OpIter<Policy>::endInitExpr() {
1322 MOZ_ASSERT(controlStack_.empty());
1323 MOZ_ASSERT(elseParamStack_.empty());
1325 #ifdef DEBUG
1326 op_ = OpBytes(Op::Limit);
1327 #endif
1328 valueStack_.clear();
1329 return true;
1332 template <typename Policy>
1333 inline bool OpIter<Policy>::readValType(ValType* type) {
1334 return d_.readValType(*env_.types, env_.features, type);
1337 template <typename Policy>
1338 inline bool OpIter<Policy>::readHeapType(bool nullable, RefType* type) {
1339 return d_.readHeapType(*env_.types, env_.features, nullable, type);
1342 template <typename Policy>
1343 inline bool OpIter<Policy>::readReturn(ValueVector* values) {
1344 MOZ_ASSERT(Classify(op_) == OpKind::Return);
1346 Control& body = controlStack_[0];
1347 MOZ_ASSERT(body.kind() == LabelKind::Body);
1349 if (!popWithType(body.resultType(), values)) {
1350 return false;
1353 afterUnconditionalBranch();
1354 return true;
1357 template <typename Policy>
1358 inline bool OpIter<Policy>::readBlock(ResultType* paramType) {
1359 MOZ_ASSERT(Classify(op_) == OpKind::Block);
1361 BlockType type;
1362 if (!readBlockType(&type)) {
1363 return false;
1366 *paramType = type.params();
1367 return pushControl(LabelKind::Block, type);
1370 template <typename Policy>
1371 inline bool OpIter<Policy>::readLoop(ResultType* paramType) {
1372 MOZ_ASSERT(Classify(op_) == OpKind::Loop);
1374 BlockType type;
1375 if (!readBlockType(&type)) {
1376 return false;
1379 *paramType = type.params();
1380 return pushControl(LabelKind::Loop, type);
1383 template <typename Policy>
1384 inline bool OpIter<Policy>::readIf(ResultType* paramType, Value* condition) {
1385 MOZ_ASSERT(Classify(op_) == OpKind::If);
1387 BlockType type;
1388 if (!readBlockType(&type)) {
1389 return false;
1392 if (!popWithType(ValType::I32, condition)) {
1393 return false;
1396 if (!pushControl(LabelKind::Then, type)) {
1397 return false;
1400 *paramType = type.params();
1401 size_t paramsLength = type.params().length();
1402 return elseParamStack_.append(valueStack_.end() - paramsLength, paramsLength);
1405 template <typename Policy>
1406 inline bool OpIter<Policy>::readElse(ResultType* paramType,
1407 ResultType* resultType,
1408 ValueVector* thenResults) {
1409 MOZ_ASSERT(Classify(op_) == OpKind::Else);
1411 Control& block = controlStack_.back();
1412 if (block.kind() != LabelKind::Then) {
1413 return fail("else can only be used within an if");
1416 *paramType = block.type().params();
1417 if (!checkStackAtEndOfBlock(resultType, thenResults)) {
1418 return false;
1421 valueStack_.shrinkTo(block.valueStackBase());
1423 size_t nparams = block.type().params().length();
1424 MOZ_ASSERT(elseParamStack_.length() >= nparams);
1425 valueStack_.infallibleAppend(elseParamStack_.end() - nparams, nparams);
1426 elseParamStack_.shrinkBy(nparams);
1428 // Reset local state to the beginning of the 'if' block for the new block
1429 // started by 'else'.
1430 unsetLocals_.resetToBlock(controlStack_.length() - 1);
1432 block.switchToElse();
1433 return true;
1436 template <typename Policy>
1437 inline bool OpIter<Policy>::readEnd(LabelKind* kind, ResultType* type,
1438 ValueVector* results,
1439 ValueVector* resultsForEmptyElse) {
1440 MOZ_ASSERT(Classify(op_) == OpKind::End);
1442 Control& block = controlStack_.back();
1444 if (!checkStackAtEndOfBlock(type, results)) {
1445 return false;
1448 if (block.kind() == LabelKind::Then) {
1449 ResultType params = block.type().params();
1450 // If an `if` block ends with `end` instead of `else`, then the `else` block
1451 // implicitly passes the `if` parameters as the `else` results. In that
1452 // case, assert that the `if`'s param type matches the result type.
1453 if (params != block.type().results()) {
1454 return fail("if without else with a result value");
1457 size_t nparams = params.length();
1458 MOZ_ASSERT(elseParamStack_.length() >= nparams);
1459 if (!resultsForEmptyElse->resize(nparams)) {
1460 return false;
1462 const TypeAndValue* elseParams = elseParamStack_.end() - nparams;
1463 for (size_t i = 0; i < nparams; i++) {
1464 (*resultsForEmptyElse)[i] = elseParams[i].value();
1466 elseParamStack_.shrinkBy(nparams);
1469 *kind = block.kind();
1470 return true;
1473 template <typename Policy>
1474 inline void OpIter<Policy>::popEnd() {
1475 MOZ_ASSERT(Classify(op_) == OpKind::End);
1477 controlStack_.popBack();
1478 unsetLocals_.resetToBlock(controlStack_.length());
1481 template <typename Policy>
1482 inline bool OpIter<Policy>::checkBranchValueAndPush(uint32_t relativeDepth,
1483 ResultType* type,
1484 ValueVector* values,
1485 bool rewriteStackTypes) {
1486 Control* block = nullptr;
1487 if (!getControl(relativeDepth, &block)) {
1488 return false;
1491 *type = block->branchTargetType();
1492 return checkTopTypeMatches(*type, values, rewriteStackTypes);
1495 template <typename Policy>
1496 inline bool OpIter<Policy>::readBr(uint32_t* relativeDepth, ResultType* type,
1497 ValueVector* values) {
1498 MOZ_ASSERT(Classify(op_) == OpKind::Br);
1500 if (!readVarU32(relativeDepth)) {
1501 return fail("unable to read br depth");
1504 if (!checkBranchValueAndPush(*relativeDepth, type, values,
1505 /*rewriteStackTypes=*/false)) {
1506 return false;
1509 afterUnconditionalBranch();
1510 return true;
1513 template <typename Policy>
1514 inline bool OpIter<Policy>::readBrIf(uint32_t* relativeDepth, ResultType* type,
1515 ValueVector* values, Value* condition) {
1516 MOZ_ASSERT(Classify(op_) == OpKind::BrIf);
1518 if (!readVarU32(relativeDepth)) {
1519 return fail("unable to read br_if depth");
1522 if (!popWithType(ValType::I32, condition)) {
1523 return false;
1526 return checkBranchValueAndPush(*relativeDepth, type, values,
1527 /*rewriteStackTypes=*/true);
1530 #define UNKNOWN_ARITY UINT32_MAX
1532 template <typename Policy>
1533 inline bool OpIter<Policy>::checkBrTableEntryAndPush(
1534 uint32_t* relativeDepth, ResultType prevBranchType, ResultType* type,
1535 ValueVector* branchValues) {
1536 if (!readVarU32(relativeDepth)) {
1537 return fail("unable to read br_table depth");
1540 Control* block = nullptr;
1541 if (!getControl(*relativeDepth, &block)) {
1542 return false;
1545 *type = block->branchTargetType();
1547 if (prevBranchType.valid()) {
1548 if (prevBranchType.length() != type->length()) {
1549 return fail("br_table targets must all have the same arity");
1552 // Avoid re-collecting the same values for subsequent branch targets.
1553 branchValues = nullptr;
1556 return checkTopTypeMatches(*type, branchValues, /*rewriteStackTypes=*/false);
1559 template <typename Policy>
1560 inline bool OpIter<Policy>::readBrTable(Uint32Vector* depths,
1561 uint32_t* defaultDepth,
1562 ResultType* defaultBranchType,
1563 ValueVector* branchValues,
1564 Value* index) {
1565 MOZ_ASSERT(Classify(op_) == OpKind::BrTable);
1567 uint32_t tableLength;
1568 if (!readVarU32(&tableLength)) {
1569 return fail("unable to read br_table table length");
1572 if (tableLength > MaxBrTableElems) {
1573 return fail("br_table too big");
1576 if (!popWithType(ValType::I32, index)) {
1577 return false;
1580 if (!depths->resize(tableLength)) {
1581 return false;
1584 ResultType prevBranchType;
1585 for (uint32_t i = 0; i < tableLength; i++) {
1586 ResultType branchType;
1587 if (!checkBrTableEntryAndPush(&(*depths)[i], prevBranchType, &branchType,
1588 branchValues)) {
1589 return false;
1591 prevBranchType = branchType;
1594 if (!checkBrTableEntryAndPush(defaultDepth, prevBranchType, defaultBranchType,
1595 branchValues)) {
1596 return false;
1599 MOZ_ASSERT(defaultBranchType->valid());
1601 afterUnconditionalBranch();
1602 return true;
1605 #undef UNKNOWN_ARITY
1607 template <typename Policy>
1608 inline bool OpIter<Policy>::readTry(ResultType* paramType) {
1609 MOZ_ASSERT(Classify(op_) == OpKind::Try);
1610 featureUsage_ |= FeatureUsage::LegacyExceptions;
1612 BlockType type;
1613 if (!readBlockType(&type)) {
1614 return false;
1617 *paramType = type.params();
1618 return pushControl(LabelKind::Try, type);
1621 enum class TryTableCatchFlags : uint8_t {
1622 CaptureExnRef = 0x1,
1623 CatchAll = 0x1 << 1,
1624 AllowedMask = uint8_t(CaptureExnRef) | uint8_t(CatchAll),
1627 template <typename Policy>
1628 inline bool OpIter<Policy>::readTryTable(ResultType* paramType,
1629 TryTableCatchVector* catches) {
1630 MOZ_ASSERT(Classify(op_) == OpKind::TryTable);
1632 BlockType type;
1633 if (!readBlockType(&type)) {
1634 return false;
1637 *paramType = type.params();
1638 if (!pushControl(LabelKind::TryTable, type)) {
1639 return false;
1642 uint32_t catchesLength;
1643 if (!readVarU32(&catchesLength)) {
1644 return fail("failed to read catches length");
1647 if (catchesLength > MaxTryTableCatches) {
1648 return fail("too many catches");
1651 if (!catches->reserve(catchesLength)) {
1652 return false;
1655 for (uint32_t i = 0; i < catchesLength; i++) {
1656 TryTableCatch tryTableCatch;
1658 // Decode the flags
1659 uint8_t flags;
1660 if (!readFixedU8(&flags)) {
1661 return fail("expected flags");
1663 if ((flags & ~uint8_t(TryTableCatchFlags::AllowedMask)) != 0) {
1664 return fail("invalid try_table catch flags");
1667 // Decode if this catch wants to capture an exnref
1668 tryTableCatch.captureExnRef =
1669 (flags & uint8_t(TryTableCatchFlags::CaptureExnRef)) != 0;
1671 // Decode the tag, if any
1672 if ((flags & uint8_t(TryTableCatchFlags::CatchAll)) != 0) {
1673 tryTableCatch.tagIndex = CatchAllIndex;
1674 } else {
1675 if (!readVarU32(&tryTableCatch.tagIndex)) {
1676 return fail("expected tag index");
1678 if (tryTableCatch.tagIndex >= env_.tags.length()) {
1679 return fail("tag index out of range");
1683 // Decode the target branch and construct the type we need to compare
1684 // against the branch
1685 if (!readVarU32(&tryTableCatch.labelRelativeDepth)) {
1686 return fail("unable to read catch depth");
1689 // The target branch depth is relative to the control labels outside of
1690 // this try_table. e.g. `0` is a branch to the control outside of this
1691 // try_table, not to the try_table itself. However, we've already pushed
1692 // the control block for the try_table, and users will read it after we've
1693 // returned, so we need to return the relative depth adjusted by 1 to
1694 // account for our own control block.
1695 if (tryTableCatch.labelRelativeDepth == UINT32_MAX) {
1696 return fail("catch depth out of range");
1698 tryTableCatch.labelRelativeDepth += 1;
1700 // Tagged catches will unpack the exception package and pass it to the
1701 // branch
1702 if (tryTableCatch.tagIndex != CatchAllIndex) {
1703 const TagType& tagType = *env_.tags[tryTableCatch.tagIndex].type;
1704 ResultType tagResult = tagType.resultType();
1705 if (!tagResult.cloneToVector(&tryTableCatch.labelType)) {
1706 return false;
1710 // Any captured exnref is the final parameter
1711 if (tryTableCatch.captureExnRef &&
1712 !tryTableCatch.labelType.append(ValType(RefType::exn()))) {
1713 return false;
1716 Control* block;
1717 if (!getControl(tryTableCatch.labelRelativeDepth, &block)) {
1718 return false;
1721 ResultType blockTargetType = block->branchTargetType();
1722 if (!checkIsSubtypeOf(ResultType::Vector(tryTableCatch.labelType),
1723 blockTargetType)) {
1724 return false;
1727 catches->infallibleAppend(std::move(tryTableCatch));
1730 return true;
1733 template <typename Policy>
1734 inline bool OpIter<Policy>::readCatch(LabelKind* kind, uint32_t* tagIndex,
1735 ResultType* paramType,
1736 ResultType* resultType,
1737 ValueVector* tryResults) {
1738 MOZ_ASSERT(Classify(op_) == OpKind::Catch);
1740 if (!readVarU32(tagIndex)) {
1741 return fail("expected tag index");
1743 if (*tagIndex >= env_.tags.length()) {
1744 return fail("tag index out of range");
1747 Control& block = controlStack_.back();
1748 if (block.kind() == LabelKind::CatchAll) {
1749 return fail("catch cannot follow a catch_all");
1751 if (block.kind() != LabelKind::Try && block.kind() != LabelKind::Catch) {
1752 return fail("catch can only be used within a try-catch");
1754 *kind = block.kind();
1755 *paramType = block.type().params();
1757 if (!checkStackAtEndOfBlock(resultType, tryResults)) {
1758 return false;
1761 valueStack_.shrinkTo(block.valueStackBase());
1762 block.switchToCatch();
1763 // Reset local state to the beginning of the 'try' block.
1764 unsetLocals_.resetToBlock(controlStack_.length() - 1);
1766 return push(env_.tags[*tagIndex].type->resultType());
1769 template <typename Policy>
1770 inline bool OpIter<Policy>::readCatchAll(LabelKind* kind, ResultType* paramType,
1771 ResultType* resultType,
1772 ValueVector* tryResults) {
1773 MOZ_ASSERT(Classify(op_) == OpKind::CatchAll);
1775 Control& block = controlStack_.back();
1776 if (block.kind() != LabelKind::Try && block.kind() != LabelKind::Catch) {
1777 return fail("catch_all can only be used within a try-catch");
1779 *kind = block.kind();
1780 *paramType = block.type().params();
1782 if (!checkStackAtEndOfBlock(resultType, tryResults)) {
1783 return false;
1786 valueStack_.shrinkTo(block.valueStackBase());
1787 block.switchToCatchAll();
1788 // Reset local state to the beginning of the 'try' block.
1789 unsetLocals_.resetToBlock(controlStack_.length() - 1);
1790 return true;
1793 template <typename Policy>
1794 inline bool OpIter<Policy>::readDelegate(uint32_t* relativeDepth,
1795 ResultType* resultType,
1796 ValueVector* tryResults) {
1797 MOZ_ASSERT(Classify(op_) == OpKind::Delegate);
1799 Control& block = controlStack_.back();
1800 if (block.kind() != LabelKind::Try) {
1801 return fail("delegate can only be used within a try");
1804 uint32_t delegateDepth;
1805 if (!readVarU32(&delegateDepth)) {
1806 return fail("unable to read delegate depth");
1809 // Depths for delegate start counting in the surrounding block.
1810 if (delegateDepth >= controlStack_.length() - 1) {
1811 return fail("delegate depth exceeds current nesting level");
1813 *relativeDepth = delegateDepth + 1;
1815 // Because `delegate` acts like `end` and ends the block, we will check
1816 // the stack here.
1817 return checkStackAtEndOfBlock(resultType, tryResults);
1820 // We need popDelegate because readDelegate cannot pop the control stack
1821 // itself, as its caller may need to use the control item for delegate.
1822 template <typename Policy>
1823 inline void OpIter<Policy>::popDelegate() {
1824 MOZ_ASSERT(Classify(op_) == OpKind::Delegate);
1826 controlStack_.popBack();
1827 unsetLocals_.resetToBlock(controlStack_.length());
1830 template <typename Policy>
1831 inline bool OpIter<Policy>::readThrow(uint32_t* tagIndex,
1832 ValueVector* argValues) {
1833 MOZ_ASSERT(Classify(op_) == OpKind::Throw);
1835 if (!readVarU32(tagIndex)) {
1836 return fail("expected tag index");
1838 if (*tagIndex >= env_.tags.length()) {
1839 return fail("tag index out of range");
1842 if (!popWithType(env_.tags[*tagIndex].type->resultType(), argValues)) {
1843 return false;
1846 afterUnconditionalBranch();
1847 return true;
1850 template <typename Policy>
1851 inline bool OpIter<Policy>::readThrowRef(Value* exnRef) {
1852 MOZ_ASSERT(Classify(op_) == OpKind::ThrowRef);
1854 if (!popWithType(ValType(RefType::exn()), exnRef)) {
1855 return false;
1858 afterUnconditionalBranch();
1859 return true;
1862 template <typename Policy>
1863 inline bool OpIter<Policy>::readRethrow(uint32_t* relativeDepth) {
1864 MOZ_ASSERT(Classify(op_) == OpKind::Rethrow);
1866 if (!readVarU32(relativeDepth)) {
1867 return fail("unable to read rethrow depth");
1870 if (*relativeDepth >= controlStack_.length()) {
1871 return fail("rethrow depth exceeds current nesting level");
1873 LabelKind kind = controlKind(*relativeDepth);
1874 if (kind != LabelKind::Catch && kind != LabelKind::CatchAll) {
1875 return fail("rethrow target was not a catch block");
1878 afterUnconditionalBranch();
1879 return true;
1882 template <typename Policy>
1883 inline bool OpIter<Policy>::readUnreachable() {
1884 MOZ_ASSERT(Classify(op_) == OpKind::Unreachable);
1886 afterUnconditionalBranch();
1887 return true;
1890 template <typename Policy>
1891 inline bool OpIter<Policy>::readDrop() {
1892 MOZ_ASSERT(Classify(op_) == OpKind::Drop);
1893 StackType type;
1894 Value value;
1895 return popStackType(&type, &value);
1898 template <typename Policy>
1899 inline bool OpIter<Policy>::readUnary(ValType operandType, Value* input) {
1900 MOZ_ASSERT(Classify(op_) == OpKind::Unary);
1902 if (!popWithType(operandType, input)) {
1903 return false;
1906 infalliblePush(operandType);
1908 return true;
1911 template <typename Policy>
1912 inline bool OpIter<Policy>::readConversion(ValType operandType,
1913 ValType resultType, Value* input) {
1914 MOZ_ASSERT(Classify(op_) == OpKind::Conversion);
1916 if (!popWithType(operandType, input)) {
1917 return false;
1920 infalliblePush(resultType);
1922 return true;
1925 template <typename Policy>
1926 inline bool OpIter<Policy>::readBinary(ValType operandType, Value* lhs,
1927 Value* rhs) {
1928 MOZ_ASSERT(Classify(op_) == OpKind::Binary);
1930 if (!popWithType(operandType, rhs)) {
1931 return false;
1934 if (!popWithType(operandType, lhs)) {
1935 return false;
1938 infalliblePush(operandType);
1940 return true;
1943 template <typename Policy>
1944 inline bool OpIter<Policy>::readComparison(ValType operandType, Value* lhs,
1945 Value* rhs) {
1946 MOZ_ASSERT(Classify(op_) == OpKind::Comparison);
1948 if (!popWithType(operandType, rhs)) {
1949 return false;
1952 if (!popWithType(operandType, lhs)) {
1953 return false;
1956 infalliblePush(ValType::I32);
1958 return true;
1961 template <typename Policy>
1962 inline bool OpIter<Policy>::readTernary(ValType operandType, Value* v0,
1963 Value* v1, Value* v2) {
1964 MOZ_ASSERT(Classify(op_) == OpKind::Ternary);
1966 if (!popWithType(operandType, v2)) {
1967 return false;
1970 if (!popWithType(operandType, v1)) {
1971 return false;
1974 if (!popWithType(operandType, v0)) {
1975 return false;
1978 infalliblePush(operandType);
1980 return true;
1983 template <typename Policy>
1984 inline bool OpIter<Policy>::readLinearMemoryAddress(
1985 uint32_t byteSize, LinearMemoryAddress<Value>* addr) {
1986 uint32_t flags;
1987 if (!readVarU32(&flags)) {
1988 return fail("unable to read load alignment");
1991 uint8_t alignLog2 = flags & ((1 << 6) - 1);
1992 uint8_t hasMemoryIndex = flags & (1 << 6);
1993 uint8_t undefinedBits = flags & ~((1 << 7) - 1);
1995 if (undefinedBits != 0) {
1996 return fail("invalid memory flags");
1999 if (hasMemoryIndex != 0) {
2000 if (!readVarU32(&addr->memoryIndex)) {
2001 return fail("unable to read memory index");
2003 } else {
2004 addr->memoryIndex = 0;
2007 if (addr->memoryIndex >= env_.numMemories()) {
2008 return fail("memory index out of range");
2011 if (!readVarU64(&addr->offset)) {
2012 return fail("unable to read load offset");
2015 IndexType it = env_.memories[addr->memoryIndex].indexType();
2016 if (it == IndexType::I32 && addr->offset > UINT32_MAX) {
2017 return fail("offset too large for memory type");
2020 if (alignLog2 >= 32 || (uint32_t(1) << alignLog2) > byteSize) {
2021 return fail("greater than natural alignment");
2024 if (!popWithType(ToValType(it), &addr->base)) {
2025 return false;
2028 addr->align = uint32_t(1) << alignLog2;
2029 return true;
2032 template <typename Policy>
2033 inline bool OpIter<Policy>::readLinearMemoryAddressAligned(
2034 uint32_t byteSize, LinearMemoryAddress<Value>* addr) {
2035 if (!readLinearMemoryAddress(byteSize, addr)) {
2036 return false;
2039 if (addr->align != byteSize) {
2040 return fail("not natural alignment");
2043 return true;
2046 template <typename Policy>
2047 inline bool OpIter<Policy>::readLoad(ValType resultType, uint32_t byteSize,
2048 LinearMemoryAddress<Value>* addr) {
2049 MOZ_ASSERT(Classify(op_) == OpKind::Load);
2051 if (!readLinearMemoryAddress(byteSize, addr)) {
2052 return false;
2055 infalliblePush(resultType);
2057 return true;
2060 template <typename Policy>
2061 inline bool OpIter<Policy>::readStore(ValType resultType, uint32_t byteSize,
2062 LinearMemoryAddress<Value>* addr,
2063 Value* value) {
2064 MOZ_ASSERT(Classify(op_) == OpKind::Store);
2066 if (!popWithType(resultType, value)) {
2067 return false;
2070 return readLinearMemoryAddress(byteSize, addr);
2073 template <typename Policy>
2074 inline bool OpIter<Policy>::readTeeStore(ValType resultType, uint32_t byteSize,
2075 LinearMemoryAddress<Value>* addr,
2076 Value* value) {
2077 MOZ_ASSERT(Classify(op_) == OpKind::TeeStore);
2079 if (!popWithType(resultType, value)) {
2080 return false;
2083 if (!readLinearMemoryAddress(byteSize, addr)) {
2084 return false;
2087 infalliblePush(TypeAndValue(resultType, *value));
2088 return true;
2091 template <typename Policy>
2092 inline bool OpIter<Policy>::readNop() {
2093 MOZ_ASSERT(Classify(op_) == OpKind::Nop);
2095 return true;
2098 template <typename Policy>
2099 inline bool OpIter<Policy>::readMemorySize(uint32_t* memoryIndex) {
2100 MOZ_ASSERT(Classify(op_) == OpKind::MemorySize);
2102 if (!readVarU32(memoryIndex)) {
2103 return fail("failed to read memory flags");
2106 if (*memoryIndex >= env_.numMemories()) {
2107 return fail("memory index out of range for memory.size");
2110 ValType ptrType = ToValType(env_.memories[*memoryIndex].indexType());
2111 return push(ptrType);
2114 template <typename Policy>
2115 inline bool OpIter<Policy>::readMemoryGrow(uint32_t* memoryIndex,
2116 Value* input) {
2117 MOZ_ASSERT(Classify(op_) == OpKind::MemoryGrow);
2119 if (!readVarU32(memoryIndex)) {
2120 return fail("failed to read memory flags");
2123 if (*memoryIndex >= env_.numMemories()) {
2124 return fail("memory index out of range for memory.grow");
2127 ValType ptrType = ToValType(env_.memories[*memoryIndex].indexType());
2128 if (!popWithType(ptrType, input)) {
2129 return false;
2132 infalliblePush(ptrType);
2134 return true;
2137 template <typename Policy>
2138 inline bool OpIter<Policy>::readSelect(bool typed, StackType* type,
2139 Value* trueValue, Value* falseValue,
2140 Value* condition) {
2141 MOZ_ASSERT(Classify(op_) == OpKind::Select);
2143 if (typed) {
2144 uint32_t length;
2145 if (!readVarU32(&length)) {
2146 return fail("unable to read select result length");
2148 if (length != 1) {
2149 return fail("bad number of results");
2151 ValType result;
2152 if (!readValType(&result)) {
2153 return fail("invalid result type for select");
2156 if (!popWithType(ValType::I32, condition)) {
2157 return false;
2159 if (!popWithType(result, falseValue)) {
2160 return false;
2162 if (!popWithType(result, trueValue)) {
2163 return false;
2166 *type = StackType(result);
2167 infalliblePush(*type);
2168 return true;
2171 if (!popWithType(ValType::I32, condition)) {
2172 return false;
2175 StackType falseType;
2176 if (!popStackType(&falseType, falseValue)) {
2177 return false;
2180 StackType trueType;
2181 if (!popStackType(&trueType, trueValue)) {
2182 return false;
2185 if (!falseType.isValidForUntypedSelect() ||
2186 !trueType.isValidForUntypedSelect()) {
2187 return fail("invalid types for untyped select");
2190 if (falseType.isStackBottom()) {
2191 *type = trueType;
2192 } else if (trueType.isStackBottom() || falseType == trueType) {
2193 *type = falseType;
2194 } else {
2195 return fail("select operand types must match");
2198 infalliblePush(*type);
2199 return true;
2202 template <typename Policy>
2203 inline bool OpIter<Policy>::readGetLocal(const ValTypeVector& locals,
2204 uint32_t* id) {
2205 MOZ_ASSERT(Classify(op_) == OpKind::GetLocal);
2207 if (!readVarU32(id)) {
2208 return fail("unable to read local index");
2211 if (*id >= locals.length()) {
2212 return fail("local.get index out of range");
2215 if (unsetLocals_.isUnset(*id)) {
2216 return fail("local.get read from unset local");
2219 return push(locals[*id]);
2222 template <typename Policy>
2223 inline bool OpIter<Policy>::readSetLocal(const ValTypeVector& locals,
2224 uint32_t* id, Value* value) {
2225 MOZ_ASSERT(Classify(op_) == OpKind::SetLocal);
2227 if (!readVarU32(id)) {
2228 return fail("unable to read local index");
2231 if (*id >= locals.length()) {
2232 return fail("local.set index out of range");
2235 if (unsetLocals_.isUnset(*id)) {
2236 unsetLocals_.set(*id, controlStackDepth());
2239 return popWithType(locals[*id], value);
2242 template <typename Policy>
2243 inline bool OpIter<Policy>::readTeeLocal(const ValTypeVector& locals,
2244 uint32_t* id, Value* value) {
2245 MOZ_ASSERT(Classify(op_) == OpKind::TeeLocal);
2247 if (!readVarU32(id)) {
2248 return fail("unable to read local index");
2251 if (*id >= locals.length()) {
2252 return fail("local.set index out of range");
2255 if (unsetLocals_.isUnset(*id)) {
2256 unsetLocals_.set(*id, controlStackDepth());
2259 ValueVector single;
2260 if (!checkTopTypeMatches(ResultType::Single(locals[*id]), &single,
2261 /*rewriteStackTypes=*/true)) {
2262 return false;
2265 *value = single[0];
2266 return true;
2269 template <typename Policy>
2270 inline bool OpIter<Policy>::readGetGlobal(uint32_t* id) {
2271 MOZ_ASSERT(Classify(op_) == OpKind::GetGlobal);
2273 if (!d_.readGlobalIndex(id)) {
2274 return false;
2277 if (*id >= env_.globals.length()) {
2278 return fail("global.get index out of range");
2281 // Initializer expressions can access immutable imported globals, or any
2282 // previously defined immutable global with GC enabled.
2283 if (kind_ == OpIter::InitExpr && (env_.globals[*id].isMutable() ||
2284 *id >= maxInitializedGlobalsIndexPlus1_)) {
2285 return fail(
2286 "global.get in initializer expression must reference a global "
2287 "immutable import");
2290 return push(env_.globals[*id].type());
2293 template <typename Policy>
2294 inline bool OpIter<Policy>::readSetGlobal(uint32_t* id, Value* value) {
2295 MOZ_ASSERT(Classify(op_) == OpKind::SetGlobal);
2297 if (!d_.readGlobalIndex(id)) {
2298 return false;
2301 if (*id >= env_.globals.length()) {
2302 return fail("global.set index out of range");
2305 if (!env_.globals[*id].isMutable()) {
2306 return fail("can't write an immutable global");
2309 return popWithType(env_.globals[*id].type(), value);
2312 template <typename Policy>
2313 inline bool OpIter<Policy>::readTeeGlobal(uint32_t* id, Value* value) {
2314 MOZ_ASSERT(Classify(op_) == OpKind::TeeGlobal);
2316 if (!d_.readGlobalIndex(id)) {
2317 return false;
2320 if (*id >= env_.globals.length()) {
2321 return fail("global.set index out of range");
2324 if (!env_.globals[*id].isMutable()) {
2325 return fail("can't write an immutable global");
2328 ValueVector single;
2329 if (!checkTopTypeMatches(ResultType::Single(env_.globals[*id].type()),
2330 &single,
2331 /*rewriteStackTypes=*/true)) {
2332 return false;
2335 MOZ_ASSERT(single.length() == 1);
2336 *value = single[0];
2337 return true;
2340 template <typename Policy>
2341 inline bool OpIter<Policy>::readI32Const(int32_t* i32) {
2342 MOZ_ASSERT(Classify(op_) == OpKind::I32);
2344 if (!d_.readI32Const(i32)) {
2345 return false;
2348 return push(ValType::I32);
2351 template <typename Policy>
2352 inline bool OpIter<Policy>::readI64Const(int64_t* i64) {
2353 MOZ_ASSERT(Classify(op_) == OpKind::I64);
2355 if (!d_.readI64Const(i64)) {
2356 return false;
2359 return push(ValType::I64);
2362 template <typename Policy>
2363 inline bool OpIter<Policy>::readF32Const(float* f32) {
2364 MOZ_ASSERT(Classify(op_) == OpKind::F32);
2366 if (!d_.readF32Const(f32)) {
2367 return false;
2370 return push(ValType::F32);
2373 template <typename Policy>
2374 inline bool OpIter<Policy>::readF64Const(double* f64) {
2375 MOZ_ASSERT(Classify(op_) == OpKind::F64);
2377 if (!d_.readF64Const(f64)) {
2378 return false;
2381 return push(ValType::F64);
2384 template <typename Policy>
2385 inline bool OpIter<Policy>::readRefFunc(uint32_t* funcIndex) {
2386 MOZ_ASSERT(Classify(op_) == OpKind::RefFunc);
2388 if (!d_.readFuncIndex(funcIndex)) {
2389 return false;
2391 if (*funcIndex >= env_.funcs.length()) {
2392 return fail("function index out of range");
2394 if (kind_ == OpIter::Func && !env_.funcs[*funcIndex].canRefFunc()) {
2395 return fail(
2396 "function index is not declared in a section before the code section");
2399 #ifdef ENABLE_WASM_GC
2400 // When function references enabled, push type index on the stack, e.g. for
2401 // validation of the call_ref instruction.
2402 if (env_.gcEnabled()) {
2403 const uint32_t typeIndex = env_.funcs[*funcIndex].typeIndex;
2404 const TypeDef& typeDef = env_.types->type(typeIndex);
2405 return push(RefType::fromTypeDef(&typeDef, false));
2407 #endif
2408 return push(RefType::func());
2411 template <typename Policy>
2412 inline bool OpIter<Policy>::readRefNull(RefType* type) {
2413 MOZ_ASSERT(Classify(op_) == OpKind::RefNull);
2415 if (!d_.readRefNull(*env_.types, env_.features, type)) {
2416 return false;
2418 return push(*type);
2421 template <typename Policy>
2422 inline bool OpIter<Policy>::readRefIsNull(Value* input) {
2423 MOZ_ASSERT(Classify(op_) == OpKind::Conversion);
2425 StackType type;
2426 if (!popWithRefType(input, &type)) {
2427 return false;
2429 return push(ValType::I32);
2432 template <typename Policy>
2433 inline bool OpIter<Policy>::readRefAsNonNull(Value* input) {
2434 MOZ_ASSERT(Classify(op_) == OpKind::RefAsNonNull);
2436 StackType type;
2437 if (!popWithRefType(input, &type)) {
2438 return false;
2441 if (type.isStackBottom()) {
2442 infalliblePush(type);
2443 } else {
2444 infalliblePush(TypeAndValue(type.asNonNullable(), *input));
2446 return true;
2449 template <typename Policy>
2450 inline bool OpIter<Policy>::readBrOnNull(uint32_t* relativeDepth,
2451 ResultType* type, ValueVector* values,
2452 Value* condition) {
2453 MOZ_ASSERT(Classify(op_) == OpKind::BrOnNull);
2455 if (!readVarU32(relativeDepth)) {
2456 return fail("unable to read br_on_null depth");
2459 StackType refType;
2460 if (!popWithRefType(condition, &refType)) {
2461 return false;
2464 if (!checkBranchValueAndPush(*relativeDepth, type, values,
2465 /*rewriteStackTypes=*/true)) {
2466 return false;
2469 if (refType.isStackBottom()) {
2470 return push(refType);
2472 return push(TypeAndValue(refType.asNonNullable(), *condition));
2475 template <typename Policy>
2476 inline bool OpIter<Policy>::readBrOnNonNull(uint32_t* relativeDepth,
2477 ResultType* type,
2478 ValueVector* values,
2479 Value* condition) {
2480 MOZ_ASSERT(Classify(op_) == OpKind::BrOnNonNull);
2482 if (!readVarU32(relativeDepth)) {
2483 return fail("unable to read br_on_non_null depth");
2486 Control* block = nullptr;
2487 if (!getControl(*relativeDepth, &block)) {
2488 return false;
2491 *type = block->branchTargetType();
2493 // Check we at least have one type in the branch target type.
2494 if (type->length() < 1) {
2495 return fail("type mismatch: target block type expected to be [_, ref]");
2498 // Pop the condition reference.
2499 StackType refType;
2500 if (!popWithRefType(condition, &refType)) {
2501 return false;
2504 // Push non-nullable version of condition reference on the stack, prior
2505 // checking the target type below.
2506 if (!(refType.isStackBottom()
2507 ? push(refType)
2508 : push(TypeAndValue(refType.asNonNullable(), *condition)))) {
2509 return false;
2512 // Check if the type stack matches the branch target type.
2513 if (!checkTopTypeMatches(*type, values, /*rewriteStackTypes=*/true)) {
2514 return false;
2517 // Pop the condition reference -- the null-branch does not receive the value.
2518 StackType unusedType;
2519 Value unusedValue;
2520 return popStackType(&unusedType, &unusedValue);
2523 template <typename Policy>
2524 inline bool OpIter<Policy>::popCallArgs(const ValTypeVector& expectedTypes,
2525 ValueVector* values) {
2526 // Iterate through the argument types backward so that pops occur in the
2527 // right order.
2529 if (!values->resize(expectedTypes.length())) {
2530 return false;
2533 for (int32_t i = int32_t(expectedTypes.length()) - 1; i >= 0; i--) {
2534 if (!popWithType(expectedTypes[i], &(*values)[i])) {
2535 return false;
2539 return true;
2542 template <typename Policy>
2543 inline bool OpIter<Policy>::readCall(uint32_t* funcTypeIndex,
2544 ValueVector* argValues) {
2545 MOZ_ASSERT(Classify(op_) == OpKind::Call);
2547 if (!readVarU32(funcTypeIndex)) {
2548 return fail("unable to read call function index");
2551 if (*funcTypeIndex >= env_.funcs.length()) {
2552 return fail("callee index out of range");
2555 const FuncType& funcType = *env_.funcs[*funcTypeIndex].type;
2557 if (!popCallArgs(funcType.args(), argValues)) {
2558 return false;
2561 return push(ResultType::Vector(funcType.results()));
2564 #ifdef ENABLE_WASM_TAIL_CALLS
2565 template <typename Policy>
2566 inline bool OpIter<Policy>::readReturnCall(uint32_t* funcTypeIndex,
2567 ValueVector* argValues) {
2568 MOZ_ASSERT(Classify(op_) == OpKind::ReturnCall);
2570 if (!readVarU32(funcTypeIndex)) {
2571 return fail("unable to read call function index");
2574 if (*funcTypeIndex >= env_.funcs.length()) {
2575 return fail("callee index out of range");
2578 const FuncType& funcType = *env_.funcs[*funcTypeIndex].type;
2580 if (!popCallArgs(funcType.args(), argValues)) {
2581 return false;
2584 // Check if callee results are subtypes of caller's.
2585 Control& body = controlStack_[0];
2586 MOZ_ASSERT(body.kind() == LabelKind::Body);
2587 if (!checkIsSubtypeOf(ResultType::Vector(funcType.results()),
2588 body.resultType())) {
2589 return false;
2592 afterUnconditionalBranch();
2593 return true;
2595 #endif
2597 template <typename Policy>
2598 inline bool OpIter<Policy>::readCallIndirect(uint32_t* funcTypeIndex,
2599 uint32_t* tableIndex,
2600 Value* callee,
2601 ValueVector* argValues) {
2602 MOZ_ASSERT(Classify(op_) == OpKind::CallIndirect);
2603 MOZ_ASSERT(funcTypeIndex != tableIndex);
2605 if (!readVarU32(funcTypeIndex)) {
2606 return fail("unable to read call_indirect signature index");
2609 if (*funcTypeIndex >= env_.numTypes()) {
2610 return fail("signature index out of range");
2613 if (!readVarU32(tableIndex)) {
2614 return fail("unable to read call_indirect table index");
2616 if (*tableIndex >= env_.tables.length()) {
2617 // Special case this for improved user experience.
2618 if (!env_.tables.length()) {
2619 return fail("can't call_indirect without a table");
2621 return fail("table index out of range for call_indirect");
2623 if (!env_.tables[*tableIndex].elemType.isFuncHierarchy()) {
2624 return fail("indirect calls must go through a table of 'funcref'");
2627 if (!popWithType(ValType::I32, callee)) {
2628 return false;
2631 const TypeDef& typeDef = env_.types->type(*funcTypeIndex);
2632 if (!typeDef.isFuncType()) {
2633 return fail("expected signature type");
2635 const FuncType& funcType = typeDef.funcType();
2637 if (!popCallArgs(funcType.args(), argValues)) {
2638 return false;
2641 return push(ResultType::Vector(funcType.results()));
2644 #ifdef ENABLE_WASM_TAIL_CALLS
2645 template <typename Policy>
2646 inline bool OpIter<Policy>::readReturnCallIndirect(uint32_t* funcTypeIndex,
2647 uint32_t* tableIndex,
2648 Value* callee,
2649 ValueVector* argValues) {
2650 MOZ_ASSERT(Classify(op_) == OpKind::ReturnCallIndirect);
2651 MOZ_ASSERT(funcTypeIndex != tableIndex);
2653 if (!readVarU32(funcTypeIndex)) {
2654 return fail("unable to read return_call_indirect signature index");
2656 if (*funcTypeIndex >= env_.numTypes()) {
2657 return fail("signature index out of range");
2660 if (!readVarU32(tableIndex)) {
2661 return fail("unable to read return_call_indirect table index");
2663 if (*tableIndex >= env_.tables.length()) {
2664 // Special case this for improved user experience.
2665 if (!env_.tables.length()) {
2666 return fail("can't return_call_indirect without a table");
2668 return fail("table index out of range for return_call_indirect");
2670 if (!env_.tables[*tableIndex].elemType.isFuncHierarchy()) {
2671 return fail("indirect calls must go through a table of 'funcref'");
2674 if (!popWithType(ValType::I32, callee)) {
2675 return false;
2678 const TypeDef& typeDef = env_.types->type(*funcTypeIndex);
2679 if (!typeDef.isFuncType()) {
2680 return fail("expected signature type");
2682 const FuncType& funcType = typeDef.funcType();
2684 if (!popCallArgs(funcType.args(), argValues)) {
2685 return false;
2688 // Check if callee results are subtypes of caller's.
2689 Control& body = controlStack_[0];
2690 MOZ_ASSERT(body.kind() == LabelKind::Body);
2691 if (!checkIsSubtypeOf(ResultType::Vector(funcType.results()),
2692 body.resultType())) {
2693 return false;
2696 afterUnconditionalBranch();
2697 return true;
2699 #endif
2701 #ifdef ENABLE_WASM_GC
2702 template <typename Policy>
2703 inline bool OpIter<Policy>::readCallRef(const FuncType** funcType,
2704 Value* callee, ValueVector* argValues) {
2705 MOZ_ASSERT(Classify(op_) == OpKind::CallRef);
2707 uint32_t funcTypeIndex;
2708 if (!readFuncTypeIndex(&funcTypeIndex)) {
2709 return false;
2712 const TypeDef& typeDef = env_.types->type(funcTypeIndex);
2713 *funcType = &typeDef.funcType();
2715 if (!popWithType(ValType(RefType::fromTypeDef(&typeDef, true)), callee)) {
2716 return false;
2719 if (!popCallArgs((*funcType)->args(), argValues)) {
2720 return false;
2723 return push(ResultType::Vector((*funcType)->results()));
2725 #endif
2727 #if defined(ENABLE_WASM_TAIL_CALLS) && defined(ENABLE_WASM_GC)
2728 template <typename Policy>
2729 inline bool OpIter<Policy>::readReturnCallRef(const FuncType** funcType,
2730 Value* callee,
2731 ValueVector* argValues) {
2732 MOZ_ASSERT(Classify(op_) == OpKind::ReturnCallRef);
2734 uint32_t funcTypeIndex;
2735 if (!readFuncTypeIndex(&funcTypeIndex)) {
2736 return false;
2739 const TypeDef& typeDef = env_.types->type(funcTypeIndex);
2740 *funcType = &typeDef.funcType();
2742 if (!popWithType(ValType(RefType::fromTypeDef(&typeDef, true)), callee)) {
2743 return false;
2746 if (!popCallArgs((*funcType)->args(), argValues)) {
2747 return false;
2750 // Check if callee results are subtypes of caller's.
2751 Control& body = controlStack_[0];
2752 MOZ_ASSERT(body.kind() == LabelKind::Body);
2753 if (!checkIsSubtypeOf(ResultType::Vector((*funcType)->results()),
2754 body.resultType())) {
2755 return false;
2758 afterUnconditionalBranch();
2759 return true;
2761 #endif
2763 template <typename Policy>
2764 inline bool OpIter<Policy>::readOldCallDirect(uint32_t numFuncImports,
2765 uint32_t* funcTypeIndex,
2766 ValueVector* argValues) {
2767 MOZ_ASSERT(Classify(op_) == OpKind::OldCallDirect);
2769 uint32_t funcDefIndex;
2770 if (!readVarU32(&funcDefIndex)) {
2771 return fail("unable to read call function index");
2774 if (UINT32_MAX - funcDefIndex < numFuncImports) {
2775 return fail("callee index out of range");
2778 *funcTypeIndex = numFuncImports + funcDefIndex;
2780 if (*funcTypeIndex >= env_.funcs.length()) {
2781 return fail("callee index out of range");
2784 const FuncType& funcType = *env_.funcs[*funcTypeIndex].type;
2786 if (!popCallArgs(funcType.args(), argValues)) {
2787 return false;
2790 return push(ResultType::Vector(funcType.results()));
2793 template <typename Policy>
2794 inline bool OpIter<Policy>::readOldCallIndirect(uint32_t* funcTypeIndex,
2795 Value* callee,
2796 ValueVector* argValues) {
2797 MOZ_ASSERT(Classify(op_) == OpKind::OldCallIndirect);
2799 if (!readVarU32(funcTypeIndex)) {
2800 return fail("unable to read call_indirect signature index");
2803 if (*funcTypeIndex >= env_.numTypes()) {
2804 return fail("signature index out of range");
2807 const TypeDef& typeDef = env_.types->type(*funcTypeIndex);
2808 if (!typeDef.isFuncType()) {
2809 return fail("expected signature type");
2811 const FuncType& funcType = typeDef.funcType();
2813 if (!popCallArgs(funcType.args(), argValues)) {
2814 return false;
2817 if (!popWithType(ValType::I32, callee)) {
2818 return false;
2821 return push(ResultType::Vector(funcType.results()));
2824 template <typename Policy>
2825 inline bool OpIter<Policy>::readWake(LinearMemoryAddress<Value>* addr,
2826 Value* count) {
2827 MOZ_ASSERT(Classify(op_) == OpKind::Wake);
2829 if (!popWithType(ValType::I32, count)) {
2830 return false;
2833 uint32_t byteSize = 4; // Per spec; smallest WAIT is i32.
2835 if (!readLinearMemoryAddressAligned(byteSize, addr)) {
2836 return false;
2839 infalliblePush(ValType::I32);
2840 return true;
2843 template <typename Policy>
2844 inline bool OpIter<Policy>::readWait(LinearMemoryAddress<Value>* addr,
2845 ValType valueType, uint32_t byteSize,
2846 Value* value, Value* timeout) {
2847 MOZ_ASSERT(Classify(op_) == OpKind::Wait);
2849 if (!popWithType(ValType::I64, timeout)) {
2850 return false;
2853 if (!popWithType(valueType, value)) {
2854 return false;
2857 if (!readLinearMemoryAddressAligned(byteSize, addr)) {
2858 return false;
2861 infalliblePush(ValType::I32);
2862 return true;
2865 template <typename Policy>
2866 inline bool OpIter<Policy>::readFence() {
2867 MOZ_ASSERT(Classify(op_) == OpKind::Fence);
2868 uint8_t flags;
2869 if (!readFixedU8(&flags)) {
2870 return fail("expected memory order after fence");
2872 if (flags != 0) {
2873 return fail("non-zero memory order not supported yet");
2875 return true;
2878 template <typename Policy>
2879 inline bool OpIter<Policy>::readAtomicLoad(LinearMemoryAddress<Value>* addr,
2880 ValType resultType,
2881 uint32_t byteSize) {
2882 MOZ_ASSERT(Classify(op_) == OpKind::AtomicLoad);
2884 if (!readLinearMemoryAddressAligned(byteSize, addr)) {
2885 return false;
2888 infalliblePush(resultType);
2889 return true;
2892 template <typename Policy>
2893 inline bool OpIter<Policy>::readAtomicStore(LinearMemoryAddress<Value>* addr,
2894 ValType resultType,
2895 uint32_t byteSize, Value* value) {
2896 MOZ_ASSERT(Classify(op_) == OpKind::AtomicStore);
2898 if (!popWithType(resultType, value)) {
2899 return false;
2902 return readLinearMemoryAddressAligned(byteSize, addr);
2905 template <typename Policy>
2906 inline bool OpIter<Policy>::readAtomicRMW(LinearMemoryAddress<Value>* addr,
2907 ValType resultType, uint32_t byteSize,
2908 Value* value) {
2909 MOZ_ASSERT(Classify(op_) == OpKind::AtomicBinOp);
2911 if (!popWithType(resultType, value)) {
2912 return false;
2915 if (!readLinearMemoryAddressAligned(byteSize, addr)) {
2916 return false;
2919 infalliblePush(resultType);
2920 return true;
2923 template <typename Policy>
2924 inline bool OpIter<Policy>::readAtomicCmpXchg(LinearMemoryAddress<Value>* addr,
2925 ValType resultType,
2926 uint32_t byteSize,
2927 Value* oldValue,
2928 Value* newValue) {
2929 MOZ_ASSERT(Classify(op_) == OpKind::AtomicCompareExchange);
2931 if (!popWithType(resultType, newValue)) {
2932 return false;
2935 if (!popWithType(resultType, oldValue)) {
2936 return false;
2939 if (!readLinearMemoryAddressAligned(byteSize, addr)) {
2940 return false;
2943 infalliblePush(resultType);
2944 return true;
2947 template <typename Policy>
2948 inline bool OpIter<Policy>::readMemOrTableCopy(bool isMem,
2949 uint32_t* dstMemOrTableIndex,
2950 Value* dst,
2951 uint32_t* srcMemOrTableIndex,
2952 Value* src, Value* len) {
2953 MOZ_ASSERT(Classify(op_) == OpKind::MemOrTableCopy);
2954 MOZ_ASSERT(dstMemOrTableIndex != srcMemOrTableIndex);
2956 // Spec requires (dest, src) as of 2019-10-04.
2957 if (!readVarU32(dstMemOrTableIndex)) {
2958 return false;
2960 if (!readVarU32(srcMemOrTableIndex)) {
2961 return false;
2964 if (isMem) {
2965 if (*srcMemOrTableIndex >= env_.memories.length() ||
2966 *dstMemOrTableIndex >= env_.memories.length()) {
2967 return fail("memory index out of range for memory.copy");
2969 } else {
2970 if (*dstMemOrTableIndex >= env_.tables.length() ||
2971 *srcMemOrTableIndex >= env_.tables.length()) {
2972 return fail("table index out of range for table.copy");
2974 ValType dstElemType = env_.tables[*dstMemOrTableIndex].elemType;
2975 ValType srcElemType = env_.tables[*srcMemOrTableIndex].elemType;
2976 if (!checkIsSubtypeOf(srcElemType, dstElemType)) {
2977 return false;
2981 ValType dstPtrType;
2982 ValType srcPtrType;
2983 ValType lenType;
2984 if (isMem) {
2985 dstPtrType = ToValType(env_.memories[*dstMemOrTableIndex].indexType());
2986 srcPtrType = ToValType(env_.memories[*srcMemOrTableIndex].indexType());
2987 if (dstPtrType == ValType::I64 && srcPtrType == ValType::I64) {
2988 lenType = ValType::I64;
2989 } else {
2990 lenType = ValType::I32;
2992 } else {
2993 dstPtrType = srcPtrType = lenType = ValType::I32;
2996 if (!popWithType(lenType, len)) {
2997 return false;
3000 if (!popWithType(srcPtrType, src)) {
3001 return false;
3004 return popWithType(dstPtrType, dst);
3007 template <typename Policy>
3008 inline bool OpIter<Policy>::readDataOrElemDrop(bool isData,
3009 uint32_t* segIndex) {
3010 MOZ_ASSERT(Classify(op_) == OpKind::DataOrElemDrop);
3012 if (!readVarU32(segIndex)) {
3013 return fail("unable to read segment index");
3016 if (isData) {
3017 if (env_.dataCount.isNothing()) {
3018 return fail("data.drop requires a DataCount section");
3020 if (*segIndex >= *env_.dataCount) {
3021 return fail("data.drop segment index out of range");
3023 } else {
3024 if (*segIndex >= env_.elemSegments.length()) {
3025 return fail("element segment index out of range for elem.drop");
3029 return true;
3032 template <typename Policy>
3033 inline bool OpIter<Policy>::readMemFill(uint32_t* memoryIndex, Value* start,
3034 Value* val, Value* len) {
3035 MOZ_ASSERT(Classify(op_) == OpKind::MemFill);
3037 if (!readVarU32(memoryIndex)) {
3038 return fail("failed to read memory index");
3041 if (*memoryIndex >= env_.numMemories()) {
3042 return fail("memory index out of range for memory.fill");
3045 ValType ptrType = ToValType(env_.memories[*memoryIndex].indexType());
3047 if (!popWithType(ptrType, len)) {
3048 return false;
3051 if (!popWithType(ValType::I32, val)) {
3052 return false;
3055 return popWithType(ptrType, start);
3058 template <typename Policy>
3059 inline bool OpIter<Policy>::readMemOrTableInit(bool isMem, uint32_t* segIndex,
3060 uint32_t* dstMemOrTableIndex,
3061 Value* dst, Value* src,
3062 Value* len) {
3063 MOZ_ASSERT(Classify(op_) == OpKind::MemOrTableInit);
3064 MOZ_ASSERT(segIndex != dstMemOrTableIndex);
3066 if (!readVarU32(segIndex)) {
3067 return fail("unable to read segment index");
3070 uint32_t memOrTableIndex = 0;
3071 if (!readVarU32(&memOrTableIndex)) {
3072 return false;
3075 if (isMem) {
3076 if (memOrTableIndex >= env_.memories.length()) {
3077 return fail("memory index out of range for memory.init");
3079 *dstMemOrTableIndex = memOrTableIndex;
3081 if (env_.dataCount.isNothing()) {
3082 return fail("memory.init requires a DataCount section");
3084 if (*segIndex >= *env_.dataCount) {
3085 return fail("memory.init segment index out of range");
3087 } else {
3088 if (memOrTableIndex >= env_.tables.length()) {
3089 return fail("table index out of range for table.init");
3091 *dstMemOrTableIndex = memOrTableIndex;
3093 if (*segIndex >= env_.elemSegments.length()) {
3094 return fail("table.init segment index out of range");
3096 if (!checkIsSubtypeOf(env_.elemSegments[*segIndex].elemType,
3097 env_.tables[*dstMemOrTableIndex].elemType)) {
3098 return false;
3102 if (!popWithType(ValType::I32, len)) {
3103 return false;
3106 if (!popWithType(ValType::I32, src)) {
3107 return false;
3110 ValType ptrType =
3111 isMem ? ToValType(env_.memories[*dstMemOrTableIndex].indexType())
3112 : ValType::I32;
3113 return popWithType(ptrType, dst);
3116 template <typename Policy>
3117 inline bool OpIter<Policy>::readTableFill(uint32_t* tableIndex, Value* start,
3118 Value* val, Value* len) {
3119 MOZ_ASSERT(Classify(op_) == OpKind::TableFill);
3121 if (!readVarU32(tableIndex)) {
3122 return fail("unable to read table index");
3124 if (*tableIndex >= env_.tables.length()) {
3125 return fail("table index out of range for table.fill");
3128 if (!popWithType(ValType::I32, len)) {
3129 return false;
3131 if (!popWithType(env_.tables[*tableIndex].elemType, val)) {
3132 return false;
3134 return popWithType(ValType::I32, start);
3137 template <typename Policy>
3138 inline bool OpIter<Policy>::readMemDiscard(uint32_t* memoryIndex, Value* start,
3139 Value* len) {
3140 MOZ_ASSERT(Classify(op_) == OpKind::MemDiscard);
3142 if (!readVarU32(memoryIndex)) {
3143 return fail("failed to read memory index");
3145 if (*memoryIndex >= env_.memories.length()) {
3146 return fail("memory index out of range for memory.discard");
3149 ValType ptrType = ToValType(env_.memories[*memoryIndex].indexType());
3151 if (!popWithType(ptrType, len)) {
3152 return false;
3155 return popWithType(ptrType, start);
3158 template <typename Policy>
3159 inline bool OpIter<Policy>::readTableGet(uint32_t* tableIndex, Value* index) {
3160 MOZ_ASSERT(Classify(op_) == OpKind::TableGet);
3162 if (!readVarU32(tableIndex)) {
3163 return fail("unable to read table index");
3165 if (*tableIndex >= env_.tables.length()) {
3166 return fail("table index out of range for table.get");
3169 if (!popWithType(ValType::I32, index)) {
3170 return false;
3173 infalliblePush(env_.tables[*tableIndex].elemType);
3174 return true;
3177 template <typename Policy>
3178 inline bool OpIter<Policy>::readTableGrow(uint32_t* tableIndex,
3179 Value* initValue, Value* delta) {
3180 MOZ_ASSERT(Classify(op_) == OpKind::TableGrow);
3182 if (!readVarU32(tableIndex)) {
3183 return fail("unable to read table index");
3185 if (*tableIndex >= env_.tables.length()) {
3186 return fail("table index out of range for table.grow");
3189 if (!popWithType(ValType::I32, delta)) {
3190 return false;
3192 if (!popWithType(env_.tables[*tableIndex].elemType, initValue)) {
3193 return false;
3196 infalliblePush(ValType::I32);
3197 return true;
3200 template <typename Policy>
3201 inline bool OpIter<Policy>::readTableSet(uint32_t* tableIndex, Value* index,
3202 Value* value) {
3203 MOZ_ASSERT(Classify(op_) == OpKind::TableSet);
3205 if (!readVarU32(tableIndex)) {
3206 return fail("unable to read table index");
3208 if (*tableIndex >= env_.tables.length()) {
3209 return fail("table index out of range for table.set");
3212 if (!popWithType(env_.tables[*tableIndex].elemType, value)) {
3213 return false;
3216 return popWithType(ValType::I32, index);
3219 template <typename Policy>
3220 inline bool OpIter<Policy>::readTableSize(uint32_t* tableIndex) {
3221 MOZ_ASSERT(Classify(op_) == OpKind::TableSize);
3223 *tableIndex = 0;
3225 if (!readVarU32(tableIndex)) {
3226 return fail("unable to read table index");
3228 if (*tableIndex >= env_.tables.length()) {
3229 return fail("table index out of range for table.size");
3232 return push(ValType::I32);
3235 template <typename Policy>
3236 inline bool OpIter<Policy>::readGcTypeIndex(uint32_t* typeIndex) {
3237 if (!d_.readTypeIndex(typeIndex)) {
3238 return false;
3241 if (*typeIndex >= env_.types->length()) {
3242 return fail("type index out of range");
3245 if (!env_.types->type(*typeIndex).isStructType() &&
3246 !env_.types->type(*typeIndex).isArrayType()) {
3247 return fail("not a gc type");
3250 return true;
3253 template <typename Policy>
3254 inline bool OpIter<Policy>::readStructTypeIndex(uint32_t* typeIndex) {
3255 if (!readVarU32(typeIndex)) {
3256 return fail("unable to read type index");
3259 if (*typeIndex >= env_.types->length()) {
3260 return fail("type index out of range");
3263 if (!env_.types->type(*typeIndex).isStructType()) {
3264 return fail("not a struct type");
3267 return true;
3270 template <typename Policy>
3271 inline bool OpIter<Policy>::readArrayTypeIndex(uint32_t* typeIndex) {
3272 if (!readVarU32(typeIndex)) {
3273 return fail("unable to read type index");
3276 if (*typeIndex >= env_.types->length()) {
3277 return fail("type index out of range");
3280 if (!env_.types->type(*typeIndex).isArrayType()) {
3281 return fail("not an array type");
3284 return true;
3287 template <typename Policy>
3288 inline bool OpIter<Policy>::readFuncTypeIndex(uint32_t* typeIndex) {
3289 if (!readVarU32(typeIndex)) {
3290 return fail("unable to read type index");
3293 if (*typeIndex >= env_.types->length()) {
3294 return fail("type index out of range");
3297 if (!env_.types->type(*typeIndex).isFuncType()) {
3298 return fail("not an func type");
3301 return true;
3304 template <typename Policy>
3305 inline bool OpIter<Policy>::readFieldIndex(uint32_t* fieldIndex,
3306 const StructType& structType) {
3307 if (!readVarU32(fieldIndex)) {
3308 return fail("unable to read field index");
3311 if (structType.fields_.length() <= *fieldIndex) {
3312 return fail("field index out of range");
3315 return true;
3318 #ifdef ENABLE_WASM_GC
3320 template <typename Policy>
3321 inline bool OpIter<Policy>::readStructNew(uint32_t* typeIndex,
3322 ValueVector* argValues) {
3323 MOZ_ASSERT(Classify(op_) == OpKind::StructNew);
3325 if (!readStructTypeIndex(typeIndex)) {
3326 return false;
3329 const TypeDef& typeDef = env_.types->type(*typeIndex);
3330 const StructType& structType = typeDef.structType();
3332 if (!argValues->resize(structType.fields_.length())) {
3333 return false;
3336 static_assert(MaxStructFields <= INT32_MAX, "Or we iloop below");
3338 for (int32_t i = structType.fields_.length() - 1; i >= 0; i--) {
3339 if (!popWithType(structType.fields_[i].type.widenToValType(),
3340 &(*argValues)[i])) {
3341 return false;
3345 return push(RefType::fromTypeDef(&typeDef, false));
3348 template <typename Policy>
3349 inline bool OpIter<Policy>::readStructNewDefault(uint32_t* typeIndex) {
3350 MOZ_ASSERT(Classify(op_) == OpKind::StructNewDefault);
3352 if (!readStructTypeIndex(typeIndex)) {
3353 return false;
3356 const TypeDef& typeDef = env_.types->type(*typeIndex);
3357 const StructType& structType = typeDef.structType();
3359 if (!structType.isDefaultable()) {
3360 return fail("struct must be defaultable");
3363 return push(RefType::fromTypeDef(&typeDef, false));
3366 template <typename Policy>
3367 inline bool OpIter<Policy>::readStructGet(uint32_t* typeIndex,
3368 uint32_t* fieldIndex,
3369 FieldWideningOp wideningOp,
3370 Value* ptr) {
3371 MOZ_ASSERT(typeIndex != fieldIndex);
3372 MOZ_ASSERT(Classify(op_) == OpKind::StructGet);
3374 if (!readStructTypeIndex(typeIndex)) {
3375 return false;
3378 const TypeDef& typeDef = env_.types->type(*typeIndex);
3379 const StructType& structType = typeDef.structType();
3381 if (!readFieldIndex(fieldIndex, structType)) {
3382 return false;
3385 if (!popWithType(RefType::fromTypeDef(&typeDef, true), ptr)) {
3386 return false;
3389 StorageType StorageType = structType.fields_[*fieldIndex].type;
3391 if (StorageType.isValType() && wideningOp != FieldWideningOp::None) {
3392 return fail("must not specify signedness for unpacked field type");
3395 if (!StorageType.isValType() && wideningOp == FieldWideningOp::None) {
3396 return fail("must specify signedness for packed field type");
3399 return push(StorageType.widenToValType());
3402 template <typename Policy>
3403 inline bool OpIter<Policy>::readStructSet(uint32_t* typeIndex,
3404 uint32_t* fieldIndex, Value* ptr,
3405 Value* val) {
3406 MOZ_ASSERT(typeIndex != fieldIndex);
3407 MOZ_ASSERT(Classify(op_) == OpKind::StructSet);
3409 if (!readStructTypeIndex(typeIndex)) {
3410 return false;
3413 const TypeDef& typeDef = env_.types->type(*typeIndex);
3414 const StructType& structType = typeDef.structType();
3416 if (!readFieldIndex(fieldIndex, structType)) {
3417 return false;
3420 if (!popWithType(structType.fields_[*fieldIndex].type.widenToValType(),
3421 val)) {
3422 return false;
3425 if (!structType.fields_[*fieldIndex].isMutable) {
3426 return fail("field is not mutable");
3429 return popWithType(RefType::fromTypeDef(&typeDef, true), ptr);
3432 template <typename Policy>
3433 inline bool OpIter<Policy>::readArrayNew(uint32_t* typeIndex,
3434 Value* numElements, Value* argValue) {
3435 MOZ_ASSERT(Classify(op_) == OpKind::ArrayNew);
3437 if (!readArrayTypeIndex(typeIndex)) {
3438 return false;
3441 const TypeDef& typeDef = env_.types->type(*typeIndex);
3442 const ArrayType& arrayType = typeDef.arrayType();
3444 if (!popWithType(ValType::I32, numElements)) {
3445 return false;
3448 if (!popWithType(arrayType.elementType_.widenToValType(), argValue)) {
3449 return false;
3452 return push(RefType::fromTypeDef(&typeDef, false));
3455 template <typename Policy>
3456 inline bool OpIter<Policy>::readArrayNewFixed(uint32_t* typeIndex,
3457 uint32_t* numElements,
3458 ValueVector* values) {
3459 MOZ_ASSERT(Classify(op_) == OpKind::ArrayNewFixed);
3460 MOZ_ASSERT(values->length() == 0);
3462 if (!readArrayTypeIndex(typeIndex)) {
3463 return false;
3466 const TypeDef& typeDef = env_.types->type(*typeIndex);
3467 const ArrayType& arrayType = typeDef.arrayType();
3469 if (!readVarU32(numElements)) {
3470 return false;
3473 if (*numElements > MaxArrayNewFixedElements) {
3474 return fail("too many array.new_fixed elements");
3477 if (!values->reserve(*numElements)) {
3478 return false;
3481 ValType widenedElementType = arrayType.elementType_.widenToValType();
3482 for (uint32_t i = 0; i < *numElements; i++) {
3483 Value v;
3484 if (!popWithType(widenedElementType, &v)) {
3485 return false;
3487 values->infallibleAppend(v);
3490 return push(RefType::fromTypeDef(&typeDef, false));
3493 template <typename Policy>
3494 inline bool OpIter<Policy>::readArrayNewDefault(uint32_t* typeIndex,
3495 Value* numElements) {
3496 MOZ_ASSERT(Classify(op_) == OpKind::ArrayNewDefault);
3498 if (!readArrayTypeIndex(typeIndex)) {
3499 return false;
3502 const TypeDef& typeDef = env_.types->type(*typeIndex);
3503 const ArrayType& arrayType = typeDef.arrayType();
3505 if (!popWithType(ValType::I32, numElements)) {
3506 return false;
3509 if (!arrayType.elementType_.isDefaultable()) {
3510 return fail("array must be defaultable");
3513 return push(RefType::fromTypeDef(&typeDef, false));
3516 template <typename Policy>
3517 inline bool OpIter<Policy>::readArrayNewData(uint32_t* typeIndex,
3518 uint32_t* segIndex, Value* offset,
3519 Value* numElements) {
3520 MOZ_ASSERT(Classify(op_) == OpKind::ArrayNewData);
3522 if (!readArrayTypeIndex(typeIndex)) {
3523 return false;
3526 if (!readVarU32(segIndex)) {
3527 return fail("unable to read segment index");
3530 const TypeDef& typeDef = env_.types->type(*typeIndex);
3531 const ArrayType& arrayType = typeDef.arrayType();
3532 StorageType elemType = arrayType.elementType_;
3533 if (!elemType.isNumber() && !elemType.isPacked() && !elemType.isVector()) {
3534 return fail("element type must be i8/i16/i32/i64/f32/f64/v128");
3536 if (env_.dataCount.isNothing()) {
3537 return fail("datacount section missing");
3539 if (*segIndex >= *env_.dataCount) {
3540 return fail("segment index is out of range");
3543 if (!popWithType(ValType::I32, numElements)) {
3544 return false;
3546 if (!popWithType(ValType::I32, offset)) {
3547 return false;
3550 return push(RefType::fromTypeDef(&typeDef, false));
3553 template <typename Policy>
3554 inline bool OpIter<Policy>::readArrayNewElem(uint32_t* typeIndex,
3555 uint32_t* segIndex, Value* offset,
3556 Value* numElements) {
3557 MOZ_ASSERT(Classify(op_) == OpKind::ArrayNewElem);
3559 if (!readArrayTypeIndex(typeIndex)) {
3560 return false;
3563 if (!readVarU32(segIndex)) {
3564 return fail("unable to read segment index");
3567 const TypeDef& typeDef = env_.types->type(*typeIndex);
3568 const ArrayType& arrayType = typeDef.arrayType();
3569 StorageType dstElemType = arrayType.elementType_;
3570 if (!dstElemType.isRefType()) {
3571 return fail("element type is not a reftype");
3573 if (*segIndex >= env_.elemSegments.length()) {
3574 return fail("segment index is out of range");
3577 const ModuleElemSegment& elemSeg = env_.elemSegments[*segIndex];
3578 RefType srcElemType = elemSeg.elemType;
3579 // srcElemType needs to be a subtype (child) of dstElemType
3580 if (!checkIsSubtypeOf(srcElemType, dstElemType.refType())) {
3581 return fail("incompatible element types");
3584 if (!popWithType(ValType::I32, numElements)) {
3585 return false;
3587 if (!popWithType(ValType::I32, offset)) {
3588 return false;
3591 return push(RefType::fromTypeDef(&typeDef, false));
3594 template <typename Policy>
3595 inline bool OpIter<Policy>::readArrayInitData(uint32_t* typeIndex,
3596 uint32_t* segIndex, Value* array,
3597 Value* arrayIndex,
3598 Value* segOffset, Value* length) {
3599 MOZ_ASSERT(Classify(op_) == OpKind::ArrayInitData);
3601 if (!readArrayTypeIndex(typeIndex)) {
3602 return false;
3605 if (!readVarU32(segIndex)) {
3606 return fail("unable to read segment index");
3609 const TypeDef& typeDef = env_.types->type(*typeIndex);
3610 const ArrayType& arrayType = typeDef.arrayType();
3611 StorageType elemType = arrayType.elementType_;
3612 if (!elemType.isNumber() && !elemType.isPacked() && !elemType.isVector()) {
3613 return fail("element type must be i8/i16/i32/i64/f32/f64/v128");
3615 if (!arrayType.isMutable_) {
3616 return fail("destination array is not mutable");
3618 if (env_.dataCount.isNothing()) {
3619 return fail("datacount section missing");
3621 if (*segIndex >= *env_.dataCount) {
3622 return fail("segment index is out of range");
3625 if (!popWithType(ValType::I32, length)) {
3626 return false;
3628 if (!popWithType(ValType::I32, segOffset)) {
3629 return false;
3631 if (!popWithType(ValType::I32, arrayIndex)) {
3632 return false;
3634 return popWithType(RefType::fromTypeDef(&typeDef, true), array);
3637 template <typename Policy>
3638 inline bool OpIter<Policy>::readArrayInitElem(uint32_t* typeIndex,
3639 uint32_t* segIndex, Value* array,
3640 Value* arrayIndex,
3641 Value* segOffset, Value* length) {
3642 MOZ_ASSERT(Classify(op_) == OpKind::ArrayInitElem);
3644 if (!readArrayTypeIndex(typeIndex)) {
3645 return false;
3648 if (!readVarU32(segIndex)) {
3649 return fail("unable to read segment index");
3652 const TypeDef& typeDef = env_.types->type(*typeIndex);
3653 const ArrayType& arrayType = typeDef.arrayType();
3654 StorageType dstElemType = arrayType.elementType_;
3655 if (!arrayType.isMutable_) {
3656 return fail("destination array is not mutable");
3658 if (!dstElemType.isRefType()) {
3659 return fail("element type is not a reftype");
3661 if (*segIndex >= env_.elemSegments.length()) {
3662 return fail("segment index is out of range");
3665 const ModuleElemSegment& elemSeg = env_.elemSegments[*segIndex];
3666 RefType srcElemType = elemSeg.elemType;
3667 // srcElemType needs to be a subtype (child) of dstElemType
3668 if (!checkIsSubtypeOf(srcElemType, dstElemType.refType())) {
3669 return fail("incompatible element types");
3672 if (!popWithType(ValType::I32, length)) {
3673 return false;
3675 if (!popWithType(ValType::I32, segOffset)) {
3676 return false;
3678 if (!popWithType(ValType::I32, arrayIndex)) {
3679 return false;
3681 return popWithType(RefType::fromTypeDef(&typeDef, true), array);
3684 template <typename Policy>
3685 inline bool OpIter<Policy>::readArrayGet(uint32_t* typeIndex,
3686 FieldWideningOp wideningOp,
3687 Value* index, Value* ptr) {
3688 MOZ_ASSERT(Classify(op_) == OpKind::ArrayGet);
3690 if (!readArrayTypeIndex(typeIndex)) {
3691 return false;
3694 const TypeDef& typeDef = env_.types->type(*typeIndex);
3695 const ArrayType& arrayType = typeDef.arrayType();
3697 if (!popWithType(ValType::I32, index)) {
3698 return false;
3701 if (!popWithType(RefType::fromTypeDef(&typeDef, true), ptr)) {
3702 return false;
3705 StorageType elementType = arrayType.elementType_;
3707 if (elementType.isValType() && wideningOp != FieldWideningOp::None) {
3708 return fail("must not specify signedness for unpacked element type");
3711 if (!elementType.isValType() && wideningOp == FieldWideningOp::None) {
3712 return fail("must specify signedness for packed element type");
3715 return push(elementType.widenToValType());
3718 template <typename Policy>
3719 inline bool OpIter<Policy>::readArraySet(uint32_t* typeIndex, Value* val,
3720 Value* index, Value* ptr) {
3721 MOZ_ASSERT(Classify(op_) == OpKind::ArraySet);
3723 if (!readArrayTypeIndex(typeIndex)) {
3724 return false;
3727 const TypeDef& typeDef = env_.types->type(*typeIndex);
3728 const ArrayType& arrayType = typeDef.arrayType();
3730 if (!arrayType.isMutable_) {
3731 return fail("array is not mutable");
3734 if (!popWithType(arrayType.elementType_.widenToValType(), val)) {
3735 return false;
3738 if (!popWithType(ValType::I32, index)) {
3739 return false;
3742 return popWithType(RefType::fromTypeDef(&typeDef, true), ptr);
3745 template <typename Policy>
3746 inline bool OpIter<Policy>::readArrayLen(Value* ptr) {
3747 MOZ_ASSERT(Classify(op_) == OpKind::ArrayLen);
3749 if (!popWithType(RefType::array(), ptr)) {
3750 return false;
3753 return push(ValType::I32);
3756 template <typename Policy>
3757 inline bool OpIter<Policy>::readArrayCopy(int32_t* elemSize,
3758 bool* elemsAreRefTyped,
3759 Value* dstArray, Value* dstIndex,
3760 Value* srcArray, Value* srcIndex,
3761 Value* numElements) {
3762 // *elemSize is set to 1/2/4/8/16, and *elemsAreRefTyped is set to indicate
3763 // *ref-typeness of elements.
3764 MOZ_ASSERT(Classify(op_) == OpKind::ArrayCopy);
3766 uint32_t dstTypeIndex, srcTypeIndex;
3767 if (!readArrayTypeIndex(&dstTypeIndex)) {
3768 return false;
3770 if (!readArrayTypeIndex(&srcTypeIndex)) {
3771 return false;
3774 // `dstTypeIndex`/`srcTypeIndex` are ensured by the above to both be array
3775 // types. Reject if:
3776 // * the dst array is not of mutable type
3777 // * the element types are incompatible
3778 const TypeDef& dstTypeDef = env_.types->type(dstTypeIndex);
3779 const ArrayType& dstArrayType = dstTypeDef.arrayType();
3780 const TypeDef& srcTypeDef = env_.types->type(srcTypeIndex);
3781 const ArrayType& srcArrayType = srcTypeDef.arrayType();
3782 StorageType dstElemType = dstArrayType.elementType_;
3783 StorageType srcElemType = srcArrayType.elementType_;
3784 if (!dstArrayType.isMutable_) {
3785 return fail("destination array is not mutable");
3788 if (!checkIsSubtypeOf(srcElemType, dstElemType)) {
3789 return fail("incompatible element types");
3791 bool dstIsRefType = dstElemType.isRefType();
3792 MOZ_ASSERT(dstIsRefType == srcElemType.isRefType());
3794 *elemSize = int32_t(dstElemType.size());
3795 *elemsAreRefTyped = dstIsRefType;
3796 MOZ_ASSERT(*elemSize >= 1 && *elemSize <= 16);
3797 MOZ_ASSERT_IF(*elemsAreRefTyped, *elemSize == 4 || *elemSize == 8);
3799 if (!popWithType(ValType::I32, numElements)) {
3800 return false;
3802 if (!popWithType(ValType::I32, srcIndex)) {
3803 return false;
3805 if (!popWithType(RefType::fromTypeDef(&srcTypeDef, true), srcArray)) {
3806 return false;
3808 if (!popWithType(ValType::I32, dstIndex)) {
3809 return false;
3812 return popWithType(RefType::fromTypeDef(&dstTypeDef, true), dstArray);
3815 template <typename Policy>
3816 inline bool OpIter<Policy>::readArrayFill(uint32_t* typeIndex, Value* array,
3817 Value* index, Value* val,
3818 Value* length) {
3819 MOZ_ASSERT(Classify(op_) == OpKind::ArrayFill);
3821 if (!readArrayTypeIndex(typeIndex)) {
3822 return false;
3825 const TypeDef& typeDef = env_.types->type(*typeIndex);
3826 const ArrayType& arrayType = typeDef.arrayType();
3827 if (!arrayType.isMutable_) {
3828 return fail("destination array is not mutable");
3831 if (!popWithType(ValType::I32, length)) {
3832 return false;
3834 if (!popWithType(arrayType.elementType_.widenToValType(), val)) {
3835 return false;
3837 if (!popWithType(ValType::I32, index)) {
3838 return false;
3841 return popWithType(RefType::fromTypeDef(&typeDef, true), array);
3844 template <typename Policy>
3845 inline bool OpIter<Policy>::readRefTest(bool nullable, RefType* sourceType,
3846 RefType* destType, Value* ref) {
3847 MOZ_ASSERT(Classify(op_) == OpKind::RefTest);
3849 if (!readHeapType(nullable, destType)) {
3850 return false;
3853 StackType inputType;
3854 if (!popWithType(destType->topType(), ref, &inputType)) {
3855 return false;
3857 *sourceType = inputType.valTypeOr(RefType::any()).refType();
3859 return push(ValType(ValType::I32));
3862 template <typename Policy>
3863 inline bool OpIter<Policy>::readRefCast(bool nullable, RefType* sourceType,
3864 RefType* destType, Value* ref) {
3865 MOZ_ASSERT(Classify(op_) == OpKind::RefCast);
3867 if (!readHeapType(nullable, destType)) {
3868 return false;
3871 StackType inputType;
3872 if (!popWithType(destType->topType(), ref, &inputType)) {
3873 return false;
3875 *sourceType = inputType.valTypeOr(RefType::any()).refType();
3877 return push(*destType);
3880 // `br_on_cast <flags> <labelRelativeDepth> <rt1> <rt2>`
3881 // branches if a reference has a given heap type.
3883 // V6 spec text follows - note that br_on_cast and br_on_cast_fail are both
3884 // handled by this function (disambiguated by a flag).
3886 // * `br_on_cast <labelidx> <reftype> <reftype>` branches if a reference has a
3887 // given type
3888 // - `br_on_cast $l rt1 rt2 : [t0* rt1] -> [t0* rt1\rt2]`
3889 // - iff `$l : [t0* rt2]`
3890 // - and `rt2 <: rt1`
3891 // - passes operand along with branch under target type, plus possible extra
3892 // args
3893 // - if `rt2` contains `null`, branches on null, otherwise does not
3894 // * `br_on_cast_fail <labelidx> <reftype> <reftype>` branches if a reference
3895 // does not have a given type
3896 // - `br_on_cast_fail $l rt1 rt2 : [t0* rt1] -> [t0* rt2]`
3897 // - iff `$l : [t0* rt1\rt2]`
3898 // - and `rt2 <: rt1`
3899 // - passes operand along with branch, plus possible extra args
3900 // - if `rt2` contains `null`, does not branch on null, otherwise does
3901 // where:
3902 // - `(ref null1? ht1)\(ref null ht2) = (ref ht1)`
3903 // - `(ref null1? ht1)\(ref ht2) = (ref null1? ht1)`
3905 // The `rt1\rt2` syntax is a "diff" - it is basically rt1 minus rt2, because a
3906 // successful cast to rt2 will branch away. So if rt2 allows null, the result
3907 // after a non-branch will be non-null; on the other hand, if rt2 is
3908 // non-nullable, the cast will have nothing to say about nullability and the
3909 // nullability of rt1 will be preserved.
3911 // `values` will be nonempty after the call, and its last entry will be the
3912 // type that causes a branch (rt1\rt2 or rt2, depending).
3914 enum class BrOnCastFlags : uint8_t {
3915 SourceNullable = 0x1,
3916 DestNullable = 0x1 << 1,
3917 AllowedMask = uint8_t(SourceNullable) | uint8_t(DestNullable),
3920 template <typename Policy>
3921 inline bool OpIter<Policy>::readBrOnCast(bool onSuccess,
3922 uint32_t* labelRelativeDepth,
3923 RefType* sourceType, RefType* destType,
3924 ResultType* labelType,
3925 ValueVector* values) {
3926 MOZ_ASSERT(Classify(op_) == OpKind::BrOnCast);
3928 uint8_t flags;
3929 if (!readFixedU8(&flags)) {
3930 return fail("unable to read br_on_cast flags");
3932 if ((flags & ~uint8_t(BrOnCastFlags::AllowedMask)) != 0) {
3933 return fail("invalid br_on_cast flags");
3935 bool sourceNullable = flags & uint8_t(BrOnCastFlags::SourceNullable);
3936 bool destNullable = flags & uint8_t(BrOnCastFlags::DestNullable);
3938 if (!readVarU32(labelRelativeDepth)) {
3939 return fail("unable to read br_on_cast depth");
3942 // This is distinct from the actual source type we pop from the stack, which
3943 // can be more specific and allow for better optimizations.
3944 RefType immediateSourceType;
3945 if (!readHeapType(sourceNullable, &immediateSourceType)) {
3946 return fail("unable to read br_on_cast source type");
3949 if (!readHeapType(destNullable, destType)) {
3950 return fail("unable to read br_on_cast dest type");
3953 // Check that source and destination types are compatible
3954 if (!checkIsSubtypeOf(*destType, immediateSourceType)) {
3955 return fail(
3956 "type mismatch: source and destination types for cast are "
3957 "incompatible");
3960 RefType typeOnSuccess = *destType;
3961 // This is rt1\rt2
3962 RefType typeOnFail =
3963 destNullable ? immediateSourceType.asNonNullable() : immediateSourceType;
3964 RefType typeOnBranch = onSuccess ? typeOnSuccess : typeOnFail;
3965 RefType typeOnFallthrough = onSuccess ? typeOnFail : typeOnSuccess;
3967 // Get the branch target type, which will also determine the type of extra
3968 // values that are passed along on branch.
3969 Control* block = nullptr;
3970 if (!getControl(*labelRelativeDepth, &block)) {
3971 return false;
3973 *labelType = block->branchTargetType();
3975 // Check we have at least one value slot in the branch target type, so as to
3976 // receive the casted or non-casted type when we branch.
3977 const size_t labelTypeNumValues = labelType->length();
3978 if (labelTypeNumValues < 1) {
3979 return fail("type mismatch: branch target type has no value types");
3982 // The last value slot in the branch target type is what is being cast.
3983 // This slot is guaranteed to exist by the above check.
3985 // Check that the branch target type can accept typeOnBranch.
3986 if (!checkIsSubtypeOf(typeOnBranch, (*labelType)[labelTypeNumValues - 1])) {
3987 return false;
3990 // Replace the top operand with the result of falling through. Even branching
3991 // on success can change the type on top of the stack on fallthrough.
3992 Value inputValue;
3993 StackType inputType;
3994 if (!popWithType(immediateSourceType, &inputValue, &inputType)) {
3995 return false;
3997 *sourceType = inputType.valTypeOr(immediateSourceType).refType();
3998 infalliblePush(TypeAndValue(typeOnFallthrough, inputValue));
4000 // Create a copy of the branch target type, with the relevant value slot
4001 // replaced by typeOnFallthrough.
4002 ValTypeVector fallthroughTypes;
4003 if (!labelType->cloneToVector(&fallthroughTypes)) {
4004 return false;
4006 fallthroughTypes[labelTypeNumValues - 1] = typeOnFallthrough;
4008 return checkTopTypeMatches(ResultType::Vector(fallthroughTypes), values,
4009 /*rewriteStackTypes=*/true);
4012 template <typename Policy>
4013 inline bool OpIter<Policy>::readRefConversion(RefType operandType,
4014 RefType resultType,
4015 Value* operandValue) {
4016 MOZ_ASSERT(Classify(op_) == OpKind::RefConversion);
4018 StackType actualOperandType;
4019 if (!popWithType(ValType(operandType), operandValue, &actualOperandType)) {
4020 return false;
4023 // The result nullability is the same as the operand nullability
4024 bool outputNullable = actualOperandType.isNullableAsOperand();
4025 infalliblePush(ValType(resultType.withIsNullable(outputNullable)));
4026 return true;
4029 #endif // ENABLE_WASM_GC
4031 #ifdef ENABLE_WASM_SIMD
4033 template <typename Policy>
4034 inline bool OpIter<Policy>::readLaneIndex(uint32_t inputLanes,
4035 uint32_t* laneIndex) {
4036 uint8_t tmp;
4037 if (!readFixedU8(&tmp)) {
4038 return false; // Caller signals error
4040 if (tmp >= inputLanes) {
4041 return false;
4043 *laneIndex = tmp;
4044 return true;
4047 template <typename Policy>
4048 inline bool OpIter<Policy>::readExtractLane(ValType resultType,
4049 uint32_t inputLanes,
4050 uint32_t* laneIndex, Value* input) {
4051 MOZ_ASSERT(Classify(op_) == OpKind::ExtractLane);
4053 if (!readLaneIndex(inputLanes, laneIndex)) {
4054 return fail("missing or invalid extract_lane lane index");
4057 if (!popWithType(ValType::V128, input)) {
4058 return false;
4061 infalliblePush(resultType);
4063 return true;
4066 template <typename Policy>
4067 inline bool OpIter<Policy>::readReplaceLane(ValType operandType,
4068 uint32_t inputLanes,
4069 uint32_t* laneIndex,
4070 Value* baseValue, Value* operand) {
4071 MOZ_ASSERT(Classify(op_) == OpKind::ReplaceLane);
4073 if (!readLaneIndex(inputLanes, laneIndex)) {
4074 return fail("missing or invalid replace_lane lane index");
4077 if (!popWithType(operandType, operand)) {
4078 return false;
4081 if (!popWithType(ValType::V128, baseValue)) {
4082 return false;
4085 infalliblePush(ValType::V128);
4087 return true;
4090 template <typename Policy>
4091 inline bool OpIter<Policy>::readVectorShift(Value* baseValue, Value* shift) {
4092 MOZ_ASSERT(Classify(op_) == OpKind::VectorShift);
4094 if (!popWithType(ValType::I32, shift)) {
4095 return false;
4098 if (!popWithType(ValType::V128, baseValue)) {
4099 return false;
4102 infalliblePush(ValType::V128);
4104 return true;
4107 template <typename Policy>
4108 inline bool OpIter<Policy>::readVectorShuffle(Value* v1, Value* v2,
4109 V128* selectMask) {
4110 MOZ_ASSERT(Classify(op_) == OpKind::VectorShuffle);
4112 for (unsigned char& byte : selectMask->bytes) {
4113 uint8_t tmp;
4114 if (!readFixedU8(&tmp)) {
4115 return fail("unable to read shuffle index");
4117 if (tmp > 31) {
4118 return fail("shuffle index out of range");
4120 byte = tmp;
4123 if (!popWithType(ValType::V128, v2)) {
4124 return false;
4127 if (!popWithType(ValType::V128, v1)) {
4128 return false;
4131 infalliblePush(ValType::V128);
4133 return true;
4136 template <typename Policy>
4137 inline bool OpIter<Policy>::readV128Const(V128* value) {
4138 MOZ_ASSERT(Classify(op_) == OpKind::V128);
4140 if (!d_.readV128Const(value)) {
4141 return false;
4144 return push(ValType::V128);
4147 template <typename Policy>
4148 inline bool OpIter<Policy>::readLoadSplat(uint32_t byteSize,
4149 LinearMemoryAddress<Value>* addr) {
4150 MOZ_ASSERT(Classify(op_) == OpKind::Load);
4152 if (!readLinearMemoryAddress(byteSize, addr)) {
4153 return false;
4156 infalliblePush(ValType::V128);
4158 return true;
4161 template <typename Policy>
4162 inline bool OpIter<Policy>::readLoadExtend(LinearMemoryAddress<Value>* addr) {
4163 MOZ_ASSERT(Classify(op_) == OpKind::Load);
4165 if (!readLinearMemoryAddress(/*byteSize=*/8, addr)) {
4166 return false;
4169 infalliblePush(ValType::V128);
4171 return true;
4174 template <typename Policy>
4175 inline bool OpIter<Policy>::readLoadLane(uint32_t byteSize,
4176 LinearMemoryAddress<Value>* addr,
4177 uint32_t* laneIndex, Value* input) {
4178 MOZ_ASSERT(Classify(op_) == OpKind::LoadLane);
4180 if (!popWithType(ValType::V128, input)) {
4181 return false;
4184 if (!readLinearMemoryAddress(byteSize, addr)) {
4185 return false;
4188 uint32_t inputLanes = 16 / byteSize;
4189 if (!readLaneIndex(inputLanes, laneIndex)) {
4190 return fail("missing or invalid load_lane lane index");
4193 infalliblePush(ValType::V128);
4195 return true;
4198 template <typename Policy>
4199 inline bool OpIter<Policy>::readStoreLane(uint32_t byteSize,
4200 LinearMemoryAddress<Value>* addr,
4201 uint32_t* laneIndex, Value* input) {
4202 MOZ_ASSERT(Classify(op_) == OpKind::StoreLane);
4204 if (!popWithType(ValType::V128, input)) {
4205 return false;
4208 if (!readLinearMemoryAddress(byteSize, addr)) {
4209 return false;
4212 uint32_t inputLanes = 16 / byteSize;
4213 if (!readLaneIndex(inputLanes, laneIndex)) {
4214 return fail("missing or invalid store_lane lane index");
4217 return true;
4220 #endif // ENABLE_WASM_SIMD
4222 template <typename Policy>
4223 inline bool OpIter<Policy>::readCallBuiltinModuleFunc(
4224 const BuiltinModuleFunc** builtinModuleFunc, ValueVector* params) {
4225 MOZ_ASSERT(Classify(op_) == OpKind::CallBuiltinModuleFunc);
4227 uint32_t id;
4228 if (!d_.readVarU32(&id)) {
4229 return false;
4232 if (id >= uint32_t(BuiltinModuleFuncId::Limit)) {
4233 return fail("index out of range");
4236 *builtinModuleFunc = &BuiltinModuleFuncs::getFromId(BuiltinModuleFuncId(id));
4238 if ((*builtinModuleFunc)->usesMemory() && env_.numMemories() == 0) {
4239 return fail("can't touch memory without memory");
4242 const FuncType& funcType = *(*builtinModuleFunc)->funcType();
4243 if (!popCallArgs(funcType.args(), params)) {
4244 return false;
4247 return push(ResultType::Vector(funcType.results()));
4250 } // namespace wasm
4251 } // namespace js
4253 static_assert(std::is_trivially_copyable<
4254 js::wasm::TypeAndValueT<mozilla::Nothing>>::value,
4255 "Must be trivially copyable");
4256 static_assert(std::is_trivially_destructible<
4257 js::wasm::TypeAndValueT<mozilla::Nothing>>::value,
4258 "Must be trivially destructible");
4260 static_assert(std::is_trivially_copyable<
4261 js::wasm::ControlStackEntry<mozilla::Nothing>>::value,
4262 "Must be trivially copyable");
4263 static_assert(std::is_trivially_destructible<
4264 js::wasm::ControlStackEntry<mozilla::Nothing>>::value,
4265 "Must be trivially destructible");
4267 #endif // wasm_op_iter_h