1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
4 * Copyright 2016 Mozilla Foundation
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
19 #ifndef wasm_op_iter_h
20 #define wasm_op_iter_h
22 #include "mozilla/CompactPair.h"
23 #include "mozilla/Poison.h"
25 #include <type_traits>
27 #include "js/Printf.h"
28 #include "wasm/WasmBuiltinModule.h"
29 #include "wasm/WasmUtility.h"
30 #include "wasm/WasmValidate.h"
35 // The kind of a control-flow stack item.
36 enum class LabelKind
: uint8_t {
48 // The type of values on the operand stack during validation. This is either a
49 // ValType or the special type "Bottom".
54 explicit StackType(PackedTypeCode tc
) : tc_(tc
) {}
57 StackType() : tc_(PackedTypeCode::invalid()) {}
59 explicit StackType(const ValType
& t
) : tc_(t
.packed()) {
60 MOZ_ASSERT(tc_
.isValid());
61 MOZ_ASSERT(!isStackBottom());
64 static StackType
bottom() {
65 return StackType(PackedTypeCode::pack(TypeCode::Limit
));
68 bool isStackBottom() const {
69 MOZ_ASSERT(tc_
.isValid());
70 return tc_
.typeCode() == TypeCode::Limit
;
73 // Returns whether this input is nullable when interpreted as an operand.
74 // When the type is bottom for unreachable code, this returns false as that
75 // is the most permissive option.
76 bool isNullableAsOperand() const {
77 MOZ_ASSERT(tc_
.isValid());
78 return isStackBottom() ? false : tc_
.isNullable();
81 ValType
valType() const {
82 MOZ_ASSERT(tc_
.isValid());
83 MOZ_ASSERT(!isStackBottom());
87 ValType
valTypeOr(ValType ifBottom
) const {
88 MOZ_ASSERT(tc_
.isValid());
89 if (isStackBottom()) {
95 ValType
asNonNullable() const {
96 MOZ_ASSERT(tc_
.isValid());
97 MOZ_ASSERT(!isStackBottom());
98 return ValType(tc_
.withIsNullable(false));
101 bool isValidForUntypedSelect() const {
102 MOZ_ASSERT(tc_
.isValid());
103 if (isStackBottom()) {
106 switch (valType().kind()) {
111 #ifdef ENABLE_WASM_SIMD
120 bool operator==(const StackType
& that
) const {
121 MOZ_ASSERT(tc_
.isValid() && that
.tc_
.isValid());
122 return tc_
== that
.tc_
;
125 bool operator!=(const StackType
& that
) const {
126 MOZ_ASSERT(tc_
.isValid() && that
.tc_
.isValid());
127 return tc_
!= that
.tc_
;
132 // Families of opcodes that share a signature and validation logic.
168 # ifdef ENABLE_WASM_GC
184 AtomicCompareExchange
,
220 # ifdef ENABLE_WASM_SIMD
236 CallBuiltinModuleFunc
,
239 // Return the OpKind for a given Op. This is used for sanity-checking that
240 // API users use the correct read function for a given Op.
241 OpKind
Classify(OpBytes op
);
244 // Common fields for linear memory access.
245 template <typename Value
>
246 struct LinearMemoryAddress
{
248 uint32_t memoryIndex
;
252 LinearMemoryAddress() : memoryIndex(0), offset(0), align(0) {}
253 LinearMemoryAddress(Value base
, uint32_t memoryIndex
, uint64_t offset
,
255 : base(base
), memoryIndex(memoryIndex
), offset(offset
), align(align
) {}
258 template <typename ControlItem
>
259 class ControlStackEntry
{
260 // Use a pair to optimize away empty ControlItem.
261 mozilla::CompactPair
<BlockType
, ControlItem
> typeAndItem_
;
263 // The "base" of a control stack entry is valueStack_.length() minus
264 // type().params().length(), i.e., the size of the value stack "below"
266 uint32_t valueStackBase_
;
267 bool polymorphicBase_
;
272 ControlStackEntry(LabelKind kind
, BlockType type
, uint32_t valueStackBase
)
273 : typeAndItem_(type
, ControlItem()),
274 valueStackBase_(valueStackBase
),
275 polymorphicBase_(false),
277 MOZ_ASSERT(type
!= BlockType());
280 LabelKind
kind() const { return kind_
; }
281 BlockType
type() const { return typeAndItem_
.first(); }
282 ResultType
resultType() const { return type().results(); }
283 ResultType
branchTargetType() const {
284 return kind_
== LabelKind::Loop
? type().params() : type().results();
286 uint32_t valueStackBase() const { return valueStackBase_
; }
287 ControlItem
& controlItem() { return typeAndItem_
.second(); }
288 void setPolymorphicBase() { polymorphicBase_
= true; }
289 bool polymorphicBase() const { return polymorphicBase_
; }
291 void switchToElse() {
292 MOZ_ASSERT(kind() == LabelKind::Then
);
293 kind_
= LabelKind::Else
;
294 polymorphicBase_
= false;
297 void switchToCatch() {
298 MOZ_ASSERT(kind() == LabelKind::Try
|| kind() == LabelKind::Catch
);
299 kind_
= LabelKind::Catch
;
300 polymorphicBase_
= false;
303 void switchToCatchAll() {
304 MOZ_ASSERT(kind() == LabelKind::Try
|| kind() == LabelKind::Catch
);
305 kind_
= LabelKind::CatchAll
;
306 polymorphicBase_
= false;
310 // Track state of the non-defaultable locals. Every time such local is
311 // initialized, the stack will record at what depth and which local was set.
312 // On a block end, the "unset" state will be rolled back to how it was before
313 // the block started.
315 // It is very likely only a few functions will have non-defaultable locals and
316 // very few locals will be non-defaultable. This class is optimized to be fast
317 // for this common case.
318 class UnsetLocalsState
{
319 struct SetLocalEntry
{
321 uint32_t localUnsetIndex
;
322 SetLocalEntry(uint32_t depth_
, uint32_t localUnsetIndex_
)
323 : depth(depth_
), localUnsetIndex(localUnsetIndex_
) {}
325 using SetLocalsStack
= Vector
<SetLocalEntry
, 16, SystemAllocPolicy
>;
326 using UnsetLocals
= Vector
<uint32_t, 16, SystemAllocPolicy
>;
328 static constexpr size_t WordSize
= 4;
329 static constexpr size_t WordBits
= WordSize
* 8;
331 // Bit array of "unset" function locals. Stores only unset states of the
332 // locals that are declared after the first non-defaultable local.
333 UnsetLocals unsetLocals_
;
334 // Stack of "set" operations. Contains pair where the first field is a depth,
335 // and the second field is local id (offset by firstNonDefaultLocal_).
336 SetLocalsStack setLocalsStack_
;
337 uint32_t firstNonDefaultLocal_
;
340 UnsetLocalsState() : firstNonDefaultLocal_(UINT32_MAX
) {}
342 [[nodiscard
]] bool init(const ValTypeVector
& locals
, size_t numParams
);
344 inline bool isUnset(uint32_t id
) const {
345 if (MOZ_LIKELY(id
< firstNonDefaultLocal_
)) {
348 uint32_t localUnsetIndex
= id
- firstNonDefaultLocal_
;
349 return unsetLocals_
[localUnsetIndex
/ WordBits
] &
350 (1 << (localUnsetIndex
% WordBits
));
353 inline void set(uint32_t id
, uint32_t depth
) {
354 MOZ_ASSERT(isUnset(id
));
355 MOZ_ASSERT(id
>= firstNonDefaultLocal_
&&
356 (id
- firstNonDefaultLocal_
) / WordBits
< unsetLocals_
.length());
357 uint32_t localUnsetIndex
= id
- firstNonDefaultLocal_
;
358 unsetLocals_
[localUnsetIndex
/ WordBits
] ^= 1
359 << (localUnsetIndex
% WordBits
);
360 // The setLocalsStack_ is reserved upfront in the UnsetLocalsState::init.
361 // A SetLocalEntry will be pushed only once per local.
362 setLocalsStack_
.infallibleEmplaceBack(depth
, localUnsetIndex
);
365 inline void resetToBlock(uint32_t controlDepth
) {
366 while (MOZ_UNLIKELY(setLocalsStack_
.length() > 0) &&
367 setLocalsStack_
.back().depth
> controlDepth
) {
368 uint32_t localUnsetIndex
= setLocalsStack_
.back().localUnsetIndex
;
369 MOZ_ASSERT(!(unsetLocals_
[localUnsetIndex
/ WordBits
] &
370 (1 << (localUnsetIndex
% WordBits
))));
371 unsetLocals_
[localUnsetIndex
/ WordBits
] |=
372 1 << (localUnsetIndex
% WordBits
);
373 setLocalsStack_
.popBack();
377 int empty() const { return setLocalsStack_
.empty(); }
380 template <typename Value
>
381 class TypeAndValueT
{
382 // Use a Pair to optimize away empty Value.
383 mozilla::CompactPair
<StackType
, Value
> tv_
;
386 TypeAndValueT() : tv_(StackType::bottom(), Value()) {}
387 explicit TypeAndValueT(StackType type
) : tv_(type
, Value()) {}
388 explicit TypeAndValueT(ValType type
) : tv_(StackType(type
), Value()) {}
389 TypeAndValueT(StackType type
, Value value
) : tv_(type
, value
) {}
390 TypeAndValueT(ValType type
, Value value
) : tv_(StackType(type
), value
) {}
391 StackType
type() const { return tv_
.first(); }
392 void setType(StackType type
) { tv_
.first() = type
; }
393 Value
value() const { return tv_
.second(); }
394 void setValue(Value value
) { tv_
.second() = value
; }
397 // An iterator over the bytes of a function body. It performs validation
398 // and unpacks the data into a usable form.
400 // The MOZ_STACK_CLASS attribute here is because of the use of DebugOnly.
401 // There's otherwise nothing inherent in this class which would require
402 // it to be used on the stack.
403 template <typename Policy
>
404 class MOZ_STACK_CLASS OpIter
: private Policy
{
406 using Value
= typename
Policy::Value
;
407 using ValueVector
= typename
Policy::ValueVector
;
408 using TypeAndValue
= TypeAndValueT
<Value
>;
409 using TypeAndValueStack
= Vector
<TypeAndValue
, 32, SystemAllocPolicy
>;
410 using ControlItem
= typename
Policy::ControlItem
;
411 using Control
= ControlStackEntry
<ControlItem
>;
412 using ControlStack
= Vector
<Control
, 16, SystemAllocPolicy
>;
422 const ModuleEnvironment
& env_
;
424 TypeAndValueStack valueStack_
;
425 TypeAndValueStack elseParamStack_
;
426 ControlStack controlStack_
;
427 UnsetLocalsState unsetLocals_
;
428 // The exclusive max index of a global that can be accessed by global.get in
429 // this expression. When GC is enabled, this is any previously defined
430 // immutable global. Otherwise this is always set to zero, and only imported
431 // immutable globals are allowed.
432 uint32_t maxInitializedGlobalsIndexPlus1_
;
433 FeatureUsage featureUsage_
;
438 size_t offsetOfLastReadOp_
;
440 [[nodiscard
]] bool readFixedU8(uint8_t* out
) { return d_
.readFixedU8(out
); }
441 [[nodiscard
]] bool readFixedU32(uint32_t* out
) {
442 return d_
.readFixedU32(out
);
444 [[nodiscard
]] bool readVarS32(int32_t* out
) { return d_
.readVarS32(out
); }
445 [[nodiscard
]] bool readVarU32(uint32_t* out
) { return d_
.readVarU32(out
); }
446 [[nodiscard
]] bool readVarS64(int64_t* out
) { return d_
.readVarS64(out
); }
447 [[nodiscard
]] bool readVarU64(uint64_t* out
) { return d_
.readVarU64(out
); }
448 [[nodiscard
]] bool readFixedF32(float* out
) { return d_
.readFixedF32(out
); }
449 [[nodiscard
]] bool readFixedF64(double* out
) { return d_
.readFixedF64(out
); }
451 [[nodiscard
]] bool readLinearMemoryAddress(uint32_t byteSize
,
452 LinearMemoryAddress
<Value
>* addr
);
453 [[nodiscard
]] bool readLinearMemoryAddressAligned(
454 uint32_t byteSize
, LinearMemoryAddress
<Value
>* addr
);
455 [[nodiscard
]] bool readBlockType(BlockType
* type
);
456 [[nodiscard
]] bool readGcTypeIndex(uint32_t* typeIndex
);
457 [[nodiscard
]] bool readStructTypeIndex(uint32_t* typeIndex
);
458 [[nodiscard
]] bool readArrayTypeIndex(uint32_t* typeIndex
);
459 [[nodiscard
]] bool readFuncTypeIndex(uint32_t* typeIndex
);
460 [[nodiscard
]] bool readFieldIndex(uint32_t* fieldIndex
,
461 const StructType
& structType
);
463 [[nodiscard
]] bool popCallArgs(const ValTypeVector
& expectedTypes
,
464 ValueVector
* values
);
466 [[nodiscard
]] bool failEmptyStack();
467 [[nodiscard
]] bool popStackType(StackType
* type
, Value
* value
);
468 [[nodiscard
]] bool popWithType(ValType expected
, Value
* value
,
469 StackType
* stackType
);
470 [[nodiscard
]] bool popWithType(ValType expected
, Value
* value
);
471 [[nodiscard
]] bool popWithType(ResultType expected
, ValueVector
* values
);
472 template <typename ValTypeSpanT
>
473 [[nodiscard
]] bool popWithTypes(ValTypeSpanT expected
, ValueVector
* values
);
474 [[nodiscard
]] bool popWithRefType(Value
* value
, StackType
* type
);
475 // Check that the top of the value stack has type `expected`, bearing in
476 // mind that it may be a block type, hence involving multiple values.
478 // If the block's stack contains polymorphic values at its base (because we
479 // are in unreachable code) then suitable extra values are inserted into the
480 // value stack, as controlled by `rewriteStackTypes`: if this is true,
481 // polymorphic values have their types created/updated from `expected`. If
482 // it is false, such values are left as `StackType::bottom()`.
484 // If `values` is non-null, it is filled in with Value components of the
485 // relevant stack entries, including those of any new entries created.
486 [[nodiscard
]] bool checkTopTypeMatches(ResultType expected
,
488 bool rewriteStackTypes
);
490 [[nodiscard
]] bool pushControl(LabelKind kind
, BlockType type
);
491 [[nodiscard
]] bool checkStackAtEndOfBlock(ResultType
* type
,
492 ValueVector
* values
);
493 [[nodiscard
]] bool getControl(uint32_t relativeDepth
, Control
** controlEntry
);
494 [[nodiscard
]] bool checkBranchValueAndPush(uint32_t relativeDepth
,
497 bool rewriteStackTypes
);
498 [[nodiscard
]] bool checkBrTableEntryAndPush(uint32_t* relativeDepth
,
499 ResultType prevBranchType
,
500 ResultType
* branchType
,
501 ValueVector
* branchValues
);
503 [[nodiscard
]] bool push(StackType t
) { return valueStack_
.emplaceBack(t
); }
504 [[nodiscard
]] bool push(ValType t
) { return valueStack_
.emplaceBack(t
); }
505 [[nodiscard
]] bool push(TypeAndValue tv
) { return valueStack_
.append(tv
); }
506 [[nodiscard
]] bool push(ResultType t
) {
507 for (size_t i
= 0; i
< t
.length(); i
++) {
514 void infalliblePush(StackType t
) { valueStack_
.infallibleEmplaceBack(t
); }
515 void infalliblePush(ValType t
) {
516 valueStack_
.infallibleEmplaceBack(StackType(t
));
518 void infalliblePush(TypeAndValue tv
) { valueStack_
.infallibleAppend(tv
); }
520 void afterUnconditionalBranch() {
521 valueStack_
.shrinkTo(controlStack_
.back().valueStackBase());
522 controlStack_
.back().setPolymorphicBase();
525 inline bool checkIsSubtypeOf(StorageType actual
, StorageType expected
);
527 inline bool checkIsSubtypeOf(RefType actual
, RefType expected
) {
528 return checkIsSubtypeOf(ValType(actual
).storageType(),
529 ValType(expected
).storageType());
531 inline bool checkIsSubtypeOf(ValType actual
, ValType expected
) {
532 return checkIsSubtypeOf(actual
.storageType(), expected
.storageType());
535 inline bool checkIsSubtypeOf(ResultType params
, ResultType results
);
537 #ifdef ENABLE_WASM_GC
538 inline bool checkIsSubtypeOf(uint32_t actualTypeIndex
,
539 uint32_t expectedTypeIndex
);
544 explicit OpIter(const ModuleEnvironment
& env
, Decoder
& decoder
,
545 Kind kind
= OpIter::Func
)
549 maxInitializedGlobalsIndexPlus1_(0),
550 featureUsage_(FeatureUsage::None
),
551 op_(OpBytes(Op::Limit
)),
552 offsetOfLastReadOp_(0) {}
554 explicit OpIter(const ModuleEnvironment
& env
, Decoder
& decoder
,
555 Kind kind
= OpIter::Func
)
559 maxInitializedGlobalsIndexPlus1_(0),
560 featureUsage_(FeatureUsage::None
),
561 offsetOfLastReadOp_(0) {}
564 FeatureUsage
featureUsage() const { return featureUsage_
; }
566 // Return the decoding byte offset.
567 uint32_t currentOffset() const { return d_
.currentOffset(); }
569 // Return the offset within the entire module of the last-read op.
570 size_t lastOpcodeOffset() const {
571 return offsetOfLastReadOp_
? offsetOfLastReadOp_
: d_
.currentOffset();
574 // Return a BytecodeOffset describing where the current op should be reported
576 BytecodeOffset
bytecodeOffset() const {
577 return BytecodeOffset(lastOpcodeOffset());
580 // Test whether the iterator has reached the end of the buffer.
581 bool done() const { return d_
.done(); }
583 // Return a pointer to the end of the buffer being decoded by this iterator.
584 const uint8_t* end() const { return d_
.end(); }
586 // Report a general failure.
587 [[nodiscard
]] bool fail(const char* msg
) MOZ_COLD
;
589 // Report a general failure with a context
590 [[nodiscard
]] bool fail_ctx(const char* fmt
, const char* context
) MOZ_COLD
;
592 // Report an unrecognized opcode.
593 [[nodiscard
]] bool unrecognizedOpcode(const OpBytes
* expr
) MOZ_COLD
;
595 // Return whether the innermost block has a polymorphic base of its stack.
596 // Ideally this accessor would be removed; consider using something else.
597 bool currentBlockHasPolymorphicBase() const {
598 return !controlStack_
.empty() && controlStack_
.back().polymorphicBase();
601 // ------------------------------------------------------------------------
602 // Decoding and validation interface.
604 // Initialization and termination
606 [[nodiscard
]] bool startFunction(uint32_t funcIndex
,
607 const ValTypeVector
& locals
);
608 [[nodiscard
]] bool endFunction(const uint8_t* bodyEnd
);
610 [[nodiscard
]] bool startInitExpr(ValType expected
);
611 [[nodiscard
]] bool endInitExpr();
613 // Value and reference types
615 [[nodiscard
]] bool readValType(ValType
* type
);
616 [[nodiscard
]] bool readHeapType(bool nullable
, RefType
* type
);
620 [[nodiscard
]] bool readOp(OpBytes
* op
);
621 [[nodiscard
]] bool readReturn(ValueVector
* values
);
622 [[nodiscard
]] bool readBlock(ResultType
* paramType
);
623 [[nodiscard
]] bool readLoop(ResultType
* paramType
);
624 [[nodiscard
]] bool readIf(ResultType
* paramType
, Value
* condition
);
625 [[nodiscard
]] bool readElse(ResultType
* paramType
, ResultType
* resultType
,
626 ValueVector
* thenResults
);
627 [[nodiscard
]] bool readEnd(LabelKind
* kind
, ResultType
* type
,
628 ValueVector
* results
,
629 ValueVector
* resultsForEmptyElse
);
631 [[nodiscard
]] bool readBr(uint32_t* relativeDepth
, ResultType
* type
,
632 ValueVector
* values
);
633 [[nodiscard
]] bool readBrIf(uint32_t* relativeDepth
, ResultType
* type
,
634 ValueVector
* values
, Value
* condition
);
635 [[nodiscard
]] bool readBrTable(Uint32Vector
* depths
, uint32_t* defaultDepth
,
636 ResultType
* defaultBranchType
,
637 ValueVector
* branchValues
, Value
* index
);
638 [[nodiscard
]] bool readTry(ResultType
* type
);
639 [[nodiscard
]] bool readTryTable(ResultType
* type
,
640 TryTableCatchVector
* catches
);
641 [[nodiscard
]] bool readCatch(LabelKind
* kind
, uint32_t* tagIndex
,
642 ResultType
* paramType
, ResultType
* resultType
,
643 ValueVector
* tryResults
);
644 [[nodiscard
]] bool readCatchAll(LabelKind
* kind
, ResultType
* paramType
,
645 ResultType
* resultType
,
646 ValueVector
* tryResults
);
647 [[nodiscard
]] bool readDelegate(uint32_t* relativeDepth
,
648 ResultType
* resultType
,
649 ValueVector
* tryResults
);
651 [[nodiscard
]] bool readThrow(uint32_t* tagIndex
, ValueVector
* argValues
);
652 [[nodiscard
]] bool readThrowRef(Value
* exnRef
);
653 [[nodiscard
]] bool readRethrow(uint32_t* relativeDepth
);
654 [[nodiscard
]] bool readUnreachable();
655 [[nodiscard
]] bool readDrop();
656 [[nodiscard
]] bool readUnary(ValType operandType
, Value
* input
);
657 [[nodiscard
]] bool readConversion(ValType operandType
, ValType resultType
,
659 [[nodiscard
]] bool readBinary(ValType operandType
, Value
* lhs
, Value
* rhs
);
660 [[nodiscard
]] bool readComparison(ValType operandType
, Value
* lhs
,
662 [[nodiscard
]] bool readTernary(ValType operandType
, Value
* v0
, Value
* v1
,
664 [[nodiscard
]] bool readLoad(ValType resultType
, uint32_t byteSize
,
665 LinearMemoryAddress
<Value
>* addr
);
666 [[nodiscard
]] bool readStore(ValType resultType
, uint32_t byteSize
,
667 LinearMemoryAddress
<Value
>* addr
, Value
* value
);
668 [[nodiscard
]] bool readTeeStore(ValType resultType
, uint32_t byteSize
,
669 LinearMemoryAddress
<Value
>* addr
,
671 [[nodiscard
]] bool readNop();
672 [[nodiscard
]] bool readMemorySize(uint32_t* memoryIndex
);
673 [[nodiscard
]] bool readMemoryGrow(uint32_t* memoryIndex
, Value
* input
);
674 [[nodiscard
]] bool readSelect(bool typed
, StackType
* type
, Value
* trueValue
,
675 Value
* falseValue
, Value
* condition
);
676 [[nodiscard
]] bool readGetLocal(const ValTypeVector
& locals
, uint32_t* id
);
677 [[nodiscard
]] bool readSetLocal(const ValTypeVector
& locals
, uint32_t* id
,
679 [[nodiscard
]] bool readTeeLocal(const ValTypeVector
& locals
, uint32_t* id
,
681 [[nodiscard
]] bool readGetGlobal(uint32_t* id
);
682 [[nodiscard
]] bool readSetGlobal(uint32_t* id
, Value
* value
);
683 [[nodiscard
]] bool readTeeGlobal(uint32_t* id
, Value
* value
);
684 [[nodiscard
]] bool readI32Const(int32_t* i32
);
685 [[nodiscard
]] bool readI64Const(int64_t* i64
);
686 [[nodiscard
]] bool readF32Const(float* f32
);
687 [[nodiscard
]] bool readF64Const(double* f64
);
688 [[nodiscard
]] bool readRefFunc(uint32_t* funcIndex
);
689 [[nodiscard
]] bool readRefNull(RefType
* type
);
690 [[nodiscard
]] bool readRefIsNull(Value
* input
);
691 [[nodiscard
]] bool readRefAsNonNull(Value
* input
);
692 [[nodiscard
]] bool readBrOnNull(uint32_t* relativeDepth
, ResultType
* type
,
693 ValueVector
* values
, Value
* condition
);
694 [[nodiscard
]] bool readBrOnNonNull(uint32_t* relativeDepth
, ResultType
* type
,
695 ValueVector
* values
, Value
* condition
);
696 [[nodiscard
]] bool readCall(uint32_t* funcTypeIndex
, ValueVector
* argValues
);
697 [[nodiscard
]] bool readCallIndirect(uint32_t* funcTypeIndex
,
698 uint32_t* tableIndex
, Value
* callee
,
699 ValueVector
* argValues
);
700 #ifdef ENABLE_WASM_TAIL_CALLS
701 [[nodiscard
]] bool readReturnCall(uint32_t* funcTypeIndex
,
702 ValueVector
* argValues
);
703 [[nodiscard
]] bool readReturnCallIndirect(uint32_t* funcTypeIndex
,
704 uint32_t* tableIndex
, Value
* callee
,
705 ValueVector
* argValues
);
707 #ifdef ENABLE_WASM_GC
708 [[nodiscard
]] bool readCallRef(const FuncType
** funcType
, Value
* callee
,
709 ValueVector
* argValues
);
711 # ifdef ENABLE_WASM_TAIL_CALLS
712 [[nodiscard
]] bool readReturnCallRef(const FuncType
** funcType
, Value
* callee
,
713 ValueVector
* argValues
);
716 [[nodiscard
]] bool readOldCallDirect(uint32_t numFuncImports
,
717 uint32_t* funcTypeIndex
,
718 ValueVector
* argValues
);
719 [[nodiscard
]] bool readOldCallIndirect(uint32_t* funcTypeIndex
, Value
* callee
,
720 ValueVector
* argValues
);
721 [[nodiscard
]] bool readWake(LinearMemoryAddress
<Value
>* addr
, Value
* count
);
722 [[nodiscard
]] bool readWait(LinearMemoryAddress
<Value
>* addr
,
723 ValType valueType
, uint32_t byteSize
,
724 Value
* value
, Value
* timeout
);
725 [[nodiscard
]] bool readFence();
726 [[nodiscard
]] bool readAtomicLoad(LinearMemoryAddress
<Value
>* addr
,
727 ValType resultType
, uint32_t byteSize
);
728 [[nodiscard
]] bool readAtomicStore(LinearMemoryAddress
<Value
>* addr
,
729 ValType resultType
, uint32_t byteSize
,
731 [[nodiscard
]] bool readAtomicRMW(LinearMemoryAddress
<Value
>* addr
,
732 ValType resultType
, uint32_t byteSize
,
734 [[nodiscard
]] bool readAtomicCmpXchg(LinearMemoryAddress
<Value
>* addr
,
735 ValType resultType
, uint32_t byteSize
,
736 Value
* oldValue
, Value
* newValue
);
737 [[nodiscard
]] bool readMemOrTableCopy(bool isMem
,
738 uint32_t* dstMemOrTableIndex
,
740 uint32_t* srcMemOrTableIndex
,
741 Value
* src
, Value
* len
);
742 [[nodiscard
]] bool readDataOrElemDrop(bool isData
, uint32_t* segIndex
);
743 [[nodiscard
]] bool readMemFill(uint32_t* memoryIndex
, Value
* start
,
744 Value
* val
, Value
* len
);
745 [[nodiscard
]] bool readMemOrTableInit(bool isMem
, uint32_t* segIndex
,
746 uint32_t* dstMemOrTableIndex
,
747 Value
* dst
, Value
* src
, Value
* len
);
748 [[nodiscard
]] bool readTableFill(uint32_t* tableIndex
, Value
* start
,
749 Value
* val
, Value
* len
);
750 [[nodiscard
]] bool readMemDiscard(uint32_t* memoryIndex
, Value
* start
,
752 [[nodiscard
]] bool readTableGet(uint32_t* tableIndex
, Value
* index
);
753 [[nodiscard
]] bool readTableGrow(uint32_t* tableIndex
, Value
* initValue
,
755 [[nodiscard
]] bool readTableSet(uint32_t* tableIndex
, Value
* index
,
758 [[nodiscard
]] bool readTableSize(uint32_t* tableIndex
);
760 #ifdef ENABLE_WASM_GC
761 [[nodiscard
]] bool readStructNew(uint32_t* typeIndex
, ValueVector
* argValues
);
762 [[nodiscard
]] bool readStructNewDefault(uint32_t* typeIndex
);
763 [[nodiscard
]] bool readStructGet(uint32_t* typeIndex
, uint32_t* fieldIndex
,
764 FieldWideningOp wideningOp
, Value
* ptr
);
765 [[nodiscard
]] bool readStructSet(uint32_t* typeIndex
, uint32_t* fieldIndex
,
766 Value
* ptr
, Value
* val
);
767 [[nodiscard
]] bool readArrayNew(uint32_t* typeIndex
, Value
* numElements
,
769 [[nodiscard
]] bool readArrayNewFixed(uint32_t* typeIndex
,
770 uint32_t* numElements
,
771 ValueVector
* values
);
772 [[nodiscard
]] bool readArrayNewDefault(uint32_t* typeIndex
,
774 [[nodiscard
]] bool readArrayNewData(uint32_t* typeIndex
, uint32_t* segIndex
,
775 Value
* offset
, Value
* numElements
);
776 [[nodiscard
]] bool readArrayNewElem(uint32_t* typeIndex
, uint32_t* segIndex
,
777 Value
* offset
, Value
* numElements
);
778 [[nodiscard
]] bool readArrayInitData(uint32_t* typeIndex
, uint32_t* segIndex
,
779 Value
* array
, Value
* arrayIndex
,
780 Value
* segOffset
, Value
* length
);
781 [[nodiscard
]] bool readArrayInitElem(uint32_t* typeIndex
, uint32_t* segIndex
,
782 Value
* array
, Value
* arrayIndex
,
783 Value
* segOffset
, Value
* length
);
784 [[nodiscard
]] bool readArrayGet(uint32_t* typeIndex
,
785 FieldWideningOp wideningOp
, Value
* index
,
787 [[nodiscard
]] bool readArraySet(uint32_t* typeIndex
, Value
* val
, Value
* index
,
789 [[nodiscard
]] bool readArrayLen(Value
* ptr
);
790 [[nodiscard
]] bool readArrayCopy(int32_t* elemSize
, bool* elemsAreRefTyped
,
791 Value
* dstArray
, Value
* dstIndex
,
792 Value
* srcArray
, Value
* srcIndex
,
794 [[nodiscard
]] bool readArrayFill(uint32_t* typeIndex
, Value
* array
,
795 Value
* index
, Value
* val
, Value
* length
);
796 [[nodiscard
]] bool readRefTest(bool nullable
, RefType
* sourceType
,
797 RefType
* destType
, Value
* ref
);
798 [[nodiscard
]] bool readRefCast(bool nullable
, RefType
* sourceType
,
799 RefType
* destType
, Value
* ref
);
800 [[nodiscard
]] bool readBrOnCast(bool onSuccess
, uint32_t* labelRelativeDepth
,
801 RefType
* sourceType
, RefType
* destType
,
802 ResultType
* labelType
, ValueVector
* values
);
803 [[nodiscard
]] bool readRefConversion(RefType operandType
, RefType resultType
,
804 Value
* operandValue
);
807 #ifdef ENABLE_WASM_SIMD
808 [[nodiscard
]] bool readLaneIndex(uint32_t inputLanes
, uint32_t* laneIndex
);
809 [[nodiscard
]] bool readExtractLane(ValType resultType
, uint32_t inputLanes
,
810 uint32_t* laneIndex
, Value
* input
);
811 [[nodiscard
]] bool readReplaceLane(ValType operandType
, uint32_t inputLanes
,
812 uint32_t* laneIndex
, Value
* baseValue
,
814 [[nodiscard
]] bool readVectorShift(Value
* baseValue
, Value
* shift
);
815 [[nodiscard
]] bool readVectorShuffle(Value
* v1
, Value
* v2
, V128
* selectMask
);
816 [[nodiscard
]] bool readV128Const(V128
* value
);
817 [[nodiscard
]] bool readLoadSplat(uint32_t byteSize
,
818 LinearMemoryAddress
<Value
>* addr
);
819 [[nodiscard
]] bool readLoadExtend(LinearMemoryAddress
<Value
>* addr
);
820 [[nodiscard
]] bool readLoadLane(uint32_t byteSize
,
821 LinearMemoryAddress
<Value
>* addr
,
822 uint32_t* laneIndex
, Value
* input
);
823 [[nodiscard
]] bool readStoreLane(uint32_t byteSize
,
824 LinearMemoryAddress
<Value
>* addr
,
825 uint32_t* laneIndex
, Value
* input
);
828 [[nodiscard
]] bool readCallBuiltinModuleFunc(
829 const BuiltinModuleFunc
** builtinModuleFunc
, ValueVector
* params
);
831 // At a location where readOp is allowed, peek at the next opcode
832 // without consuming it or updating any internal state.
833 // Never fails: returns uint16_t(Op::Limit) in op->b0 if it can't read.
834 void peekOp(OpBytes
* op
);
836 // ------------------------------------------------------------------------
839 // Set the top N result values.
840 void setResults(size_t count
, const ValueVector
& values
) {
841 MOZ_ASSERT(valueStack_
.length() >= count
);
842 size_t base
= valueStack_
.length() - count
;
843 for (size_t i
= 0; i
< count
; i
++) {
844 valueStack_
[base
+ i
].setValue(values
[i
]);
848 bool getResults(size_t count
, ValueVector
* values
) {
849 MOZ_ASSERT(valueStack_
.length() >= count
);
850 if (!values
->resize(count
)) {
853 size_t base
= valueStack_
.length() - count
;
854 for (size_t i
= 0; i
< count
; i
++) {
855 (*values
)[i
] = valueStack_
[base
+ i
].value();
860 // Set the result value of the current top-of-value-stack expression.
861 void setResult(Value value
) { valueStack_
.back().setValue(value
); }
863 // Return the result value of the current top-of-value-stack expression.
864 Value
getResult() { return valueStack_
.back().value(); }
866 // Return a reference to the top of the control stack.
867 ControlItem
& controlItem() { return controlStack_
.back().controlItem(); }
869 // Return a reference to an element in the control stack.
870 ControlItem
& controlItem(uint32_t relativeDepth
) {
871 return controlStack_
[controlStack_
.length() - 1 - relativeDepth
]
875 // Return the LabelKind of an element in the control stack.
876 LabelKind
controlKind(uint32_t relativeDepth
) {
877 return controlStack_
[controlStack_
.length() - 1 - relativeDepth
].kind();
880 // Return a reference to the outermost element on the control stack.
881 ControlItem
& controlOutermost() { return controlStack_
[0].controlItem(); }
883 // Test whether the control-stack is empty, meaning we've consumed the final
884 // end of the function body.
885 bool controlStackEmpty() const { return controlStack_
.empty(); }
887 // Return the depth of the control stack.
888 size_t controlStackDepth() const { return controlStack_
.length(); }
890 // Find the innermost control item matching a predicate, starting to search
891 // from a certain relative depth, and returning true if such innermost
892 // control item is found. The relative depth of the found item is returned
894 template <typename Predicate
>
895 bool controlFindInnermostFrom(Predicate predicate
, uint32_t fromRelativeDepth
,
896 uint32_t* foundRelativeDepth
) {
897 int32_t fromAbsoluteDepth
= controlStack_
.length() - fromRelativeDepth
- 1;
898 for (int32_t i
= fromAbsoluteDepth
; i
>= 0; i
--) {
899 if (predicate(controlStack_
[i
].kind(), controlStack_
[i
].controlItem())) {
900 *foundRelativeDepth
= controlStack_
.length() - 1 - i
;
908 template <typename Policy
>
909 inline bool OpIter
<Policy
>::checkIsSubtypeOf(StorageType subType
,
910 StorageType superType
) {
911 return CheckIsSubtypeOf(d_
, env_
, lastOpcodeOffset(), subType
, superType
);
914 template <typename Policy
>
915 inline bool OpIter
<Policy
>::checkIsSubtypeOf(ResultType params
,
916 ResultType results
) {
917 if (params
.length() != results
.length()) {
919 JS_smprintf("type mismatch: expected %zu values, got %zu values",
920 results
.length(), params
.length()));
924 return fail(error
.get());
926 for (uint32_t i
= 0; i
< params
.length(); i
++) {
927 ValType param
= params
[i
];
928 ValType result
= results
[i
];
929 if (!checkIsSubtypeOf(param
, result
)) {
936 #ifdef ENABLE_WASM_GC
937 template <typename Policy
>
938 inline bool OpIter
<Policy
>::checkIsSubtypeOf(uint32_t actualTypeIndex
,
939 uint32_t expectedTypeIndex
) {
940 const TypeDef
& actualTypeDef
= env_
.types
->type(actualTypeIndex
);
941 const TypeDef
& expectedTypeDef
= env_
.types
->type(expectedTypeIndex
);
942 return CheckIsSubtypeOf(
943 d_
, env_
, lastOpcodeOffset(),
944 ValType(RefType::fromTypeDef(&actualTypeDef
, true)),
945 ValType(RefType::fromTypeDef(&expectedTypeDef
, true)));
949 template <typename Policy
>
950 inline bool OpIter
<Policy
>::unrecognizedOpcode(const OpBytes
* expr
) {
951 UniqueChars
error(JS_smprintf("unrecognized opcode: %x %x", expr
->b0
,
952 IsPrefixByte(expr
->b0
) ? expr
->b1
: 0));
957 return fail(error
.get());
960 template <typename Policy
>
961 inline bool OpIter
<Policy
>::fail(const char* msg
) {
962 return d_
.fail(lastOpcodeOffset(), msg
);
965 template <typename Policy
>
966 inline bool OpIter
<Policy
>::fail_ctx(const char* fmt
, const char* context
) {
967 UniqueChars
error(JS_smprintf(fmt
, context
));
971 return fail(error
.get());
974 template <typename Policy
>
975 inline bool OpIter
<Policy
>::failEmptyStack() {
976 return valueStack_
.empty() ? fail("popping value from empty stack")
977 : fail("popping value from outside block");
980 // This function pops exactly one value from the stack, yielding Bottom types in
981 // various cases and therefore making it the caller's responsibility to do the
982 // right thing for StackType::Bottom. Prefer (pop|top)WithType. This is an
983 // optimization for the super-common case where the caller is statically
984 // expecting the resulttype `[valtype]`.
985 template <typename Policy
>
986 inline bool OpIter
<Policy
>::popStackType(StackType
* type
, Value
* value
) {
987 Control
& block
= controlStack_
.back();
989 MOZ_ASSERT(valueStack_
.length() >= block
.valueStackBase());
990 if (MOZ_UNLIKELY(valueStack_
.length() == block
.valueStackBase())) {
991 // If the base of this block's stack is polymorphic, then we can pop a
992 // dummy value of the bottom type; it won't be used since we're in
994 if (block
.polymorphicBase()) {
995 *type
= StackType::bottom();
998 // Maintain the invariant that, after a pop, there is always memory
999 // reserved to push a value infallibly.
1000 return valueStack_
.reserve(valueStack_
.length() + 1);
1003 return failEmptyStack();
1006 TypeAndValue
& tv
= valueStack_
.back();
1008 *value
= tv
.value();
1009 valueStack_
.popBack();
1013 // This function pops exactly one value from the stack, checking that it has the
1014 // expected type which can either be a specific value type or the bottom type.
1015 template <typename Policy
>
1016 inline bool OpIter
<Policy
>::popWithType(ValType expectedType
, Value
* value
,
1017 StackType
* stackType
) {
1018 if (!popStackType(stackType
, value
)) {
1022 return stackType
->isStackBottom() ||
1023 checkIsSubtypeOf(stackType
->valType(), expectedType
);
1026 // This function pops exactly one value from the stack, checking that it has the
1027 // expected type which can either be a specific value type or the bottom type.
1028 template <typename Policy
>
1029 inline bool OpIter
<Policy
>::popWithType(ValType expectedType
, Value
* value
) {
1030 StackType stackType
;
1031 return popWithType(expectedType
, value
, &stackType
);
1034 template <typename Policy
>
1035 inline bool OpIter
<Policy
>::popWithType(ResultType expected
,
1036 ValueVector
* values
) {
1037 return popWithTypes(expected
, values
);
1040 // Pops each of the given expected types (in reverse, because it's a stack).
1041 template <typename Policy
>
1042 template <typename ValTypeSpanT
>
1043 inline bool OpIter
<Policy
>::popWithTypes(ValTypeSpanT expected
,
1044 ValueVector
* values
) {
1045 size_t expectedLength
= expected
.size();
1046 if (!values
->resize(expectedLength
)) {
1049 for (size_t i
= 0; i
< expectedLength
; i
++) {
1050 size_t reverseIndex
= expectedLength
- i
- 1;
1051 ValType expectedType
= expected
[reverseIndex
];
1052 Value
* value
= &(*values
)[reverseIndex
];
1053 if (!popWithType(expectedType
, value
)) {
1060 // This function pops exactly one value from the stack, checking that it is a
1062 template <typename Policy
>
1063 inline bool OpIter
<Policy
>::popWithRefType(Value
* value
, StackType
* type
) {
1064 if (!popStackType(type
, value
)) {
1068 if (type
->isStackBottom() || type
->valType().isRefType()) {
1072 UniqueChars actualText
= ToString(type
->valType(), env_
.types
);
1077 UniqueChars
error(JS_smprintf(
1078 "type mismatch: expression has type %s but expected a reference type",
1084 return fail(error
.get());
1087 template <typename Policy
>
1088 inline bool OpIter
<Policy
>::checkTopTypeMatches(ResultType expected
,
1089 ValueVector
* values
,
1090 bool rewriteStackTypes
) {
1091 if (expected
.empty()) {
1095 Control
& block
= controlStack_
.back();
1097 size_t expectedLength
= expected
.length();
1098 if (values
&& !values
->resize(expectedLength
)) {
1102 for (size_t i
= 0; i
!= expectedLength
; i
++) {
1103 // We're iterating as-if we were popping each expected/actual type one by
1104 // one, which means iterating the array of expected results backwards.
1105 // The "current" value stack length refers to what the value stack length
1106 // would have been if we were popping it.
1107 size_t reverseIndex
= expectedLength
- i
- 1;
1108 ValType expectedType
= expected
[reverseIndex
];
1109 auto collectValue
= [&](const Value
& v
) {
1111 (*values
)[reverseIndex
] = v
;
1115 size_t currentValueStackLength
= valueStack_
.length() - i
;
1117 MOZ_ASSERT(currentValueStackLength
>= block
.valueStackBase());
1118 if (currentValueStackLength
== block
.valueStackBase()) {
1119 if (!block
.polymorphicBase()) {
1120 return failEmptyStack();
1123 // If the base of this block's stack is polymorphic, then we can just
1124 // pull out as many fake values as we need to validate, and create dummy
1125 // stack entries accordingly; they won't be used since we're in
1126 // unreachable code. However, if `rewriteStackTypes` is true, we must
1127 // set the types on these new entries to whatever `expected` requires
1129 TypeAndValue newTandV
=
1130 rewriteStackTypes
? TypeAndValue(expectedType
) : TypeAndValue();
1131 if (!valueStack_
.insert(valueStack_
.begin() + currentValueStackLength
,
1136 collectValue(Value());
1138 TypeAndValue
& observed
= valueStack_
[currentValueStackLength
- 1];
1140 if (observed
.type().isStackBottom()) {
1141 collectValue(Value());
1143 if (!checkIsSubtypeOf(observed
.type().valType(), expectedType
)) {
1147 collectValue(observed
.value());
1150 if (rewriteStackTypes
) {
1151 observed
.setType(StackType(expectedType
));
1158 template <typename Policy
>
1159 inline bool OpIter
<Policy
>::pushControl(LabelKind kind
, BlockType type
) {
1160 ResultType paramType
= type
.params();
1163 if (!checkTopTypeMatches(paramType
, &values
, /*rewriteStackTypes=*/true)) {
1166 MOZ_ASSERT(valueStack_
.length() >= paramType
.length());
1167 uint32_t valueStackBase
= valueStack_
.length() - paramType
.length();
1168 return controlStack_
.emplaceBack(kind
, type
, valueStackBase
);
1171 template <typename Policy
>
1172 inline bool OpIter
<Policy
>::checkStackAtEndOfBlock(ResultType
* expectedType
,
1173 ValueVector
* values
) {
1174 Control
& block
= controlStack_
.back();
1175 *expectedType
= block
.type().results();
1177 MOZ_ASSERT(valueStack_
.length() >= block
.valueStackBase());
1178 if (expectedType
->length() < valueStack_
.length() - block
.valueStackBase()) {
1179 return fail("unused values not explicitly dropped by end of block");
1182 return checkTopTypeMatches(*expectedType
, values
,
1183 /*rewriteStackTypes=*/true);
1186 template <typename Policy
>
1187 inline bool OpIter
<Policy
>::getControl(uint32_t relativeDepth
,
1188 Control
** controlEntry
) {
1189 if (relativeDepth
>= controlStack_
.length()) {
1190 return fail("branch depth exceeds current nesting level");
1193 *controlEntry
= &controlStack_
[controlStack_
.length() - 1 - relativeDepth
];
1197 template <typename Policy
>
1198 inline bool OpIter
<Policy
>::readBlockType(BlockType
* type
) {
1200 if (!d_
.peekByte(&nextByte
)) {
1201 return fail("unable to read block type");
1204 if (nextByte
== uint8_t(TypeCode::BlockVoid
)) {
1205 d_
.uncheckedReadFixedU8();
1206 *type
= BlockType::VoidToVoid();
1210 if ((nextByte
& SLEB128SignMask
) == SLEB128SignBit
) {
1212 if (!readValType(&v
)) {
1215 *type
= BlockType::VoidToSingle(v
);
1220 if (!d_
.readVarS32(&x
) || x
< 0 || uint32_t(x
) >= env_
.types
->length()) {
1221 return fail("invalid block type type index");
1224 const TypeDef
* typeDef
= &env_
.types
->type(x
);
1225 if (!typeDef
->isFuncType()) {
1226 return fail("block type type index must be func type");
1229 *type
= BlockType::Func(typeDef
->funcType());
1234 template <typename Policy
>
1235 inline bool OpIter
<Policy
>::readOp(OpBytes
* op
) {
1236 MOZ_ASSERT(!controlStack_
.empty());
1238 offsetOfLastReadOp_
= d_
.currentOffset();
1240 if (MOZ_UNLIKELY(!d_
.readOp(op
))) {
1241 return fail("unable to read opcode");
1251 template <typename Policy
>
1252 inline void OpIter
<Policy
>::peekOp(OpBytes
* op
) {
1253 const uint8_t* pos
= d_
.currentPosition();
1255 if (MOZ_UNLIKELY(!d_
.readOp(op
))) {
1256 op
->b0
= uint16_t(Op::Limit
);
1259 d_
.rollbackPosition(pos
);
1262 template <typename Policy
>
1263 inline bool OpIter
<Policy
>::startFunction(uint32_t funcIndex
,
1264 const ValTypeVector
& locals
) {
1265 MOZ_ASSERT(kind_
== OpIter::Func
);
1266 MOZ_ASSERT(elseParamStack_
.empty());
1267 MOZ_ASSERT(valueStack_
.empty());
1268 MOZ_ASSERT(controlStack_
.empty());
1269 MOZ_ASSERT(op_
.b0
== uint16_t(Op::Limit
));
1270 MOZ_ASSERT(maxInitializedGlobalsIndexPlus1_
== 0);
1271 BlockType type
= BlockType::FuncResults(*env_
.funcs
[funcIndex
].type
);
1273 size_t numArgs
= env_
.funcs
[funcIndex
].type
->args().length();
1274 if (!unsetLocals_
.init(locals
, numArgs
)) {
1278 return pushControl(LabelKind::Body
, type
);
1281 template <typename Policy
>
1282 inline bool OpIter
<Policy
>::endFunction(const uint8_t* bodyEnd
) {
1283 if (d_
.currentPosition() != bodyEnd
) {
1284 return fail("function body length mismatch");
1287 if (!controlStack_
.empty()) {
1288 return fail("unbalanced function body control flow");
1290 MOZ_ASSERT(elseParamStack_
.empty());
1291 MOZ_ASSERT(unsetLocals_
.empty());
1294 op_
= OpBytes(Op::Limit
);
1296 valueStack_
.clear();
1300 template <typename Policy
>
1301 inline bool OpIter
<Policy
>::startInitExpr(ValType expected
) {
1302 MOZ_ASSERT(kind_
== OpIter::InitExpr
);
1303 MOZ_ASSERT(elseParamStack_
.empty());
1304 MOZ_ASSERT(valueStack_
.empty());
1305 MOZ_ASSERT(controlStack_
.empty());
1306 MOZ_ASSERT(op_
.b0
== uint16_t(Op::Limit
));
1308 // GC allows accessing any previously defined global, not just those that are
1309 // imported and immutable.
1310 if (env_
.features
.gc
) {
1311 maxInitializedGlobalsIndexPlus1_
= env_
.globals
.length();
1313 maxInitializedGlobalsIndexPlus1_
= env_
.numGlobalImports
;
1316 BlockType type
= BlockType::VoidToSingle(expected
);
1317 return pushControl(LabelKind::Body
, type
);
1320 template <typename Policy
>
1321 inline bool OpIter
<Policy
>::endInitExpr() {
1322 MOZ_ASSERT(controlStack_
.empty());
1323 MOZ_ASSERT(elseParamStack_
.empty());
1326 op_
= OpBytes(Op::Limit
);
1328 valueStack_
.clear();
1332 template <typename Policy
>
1333 inline bool OpIter
<Policy
>::readValType(ValType
* type
) {
1334 return d_
.readValType(*env_
.types
, env_
.features
, type
);
1337 template <typename Policy
>
1338 inline bool OpIter
<Policy
>::readHeapType(bool nullable
, RefType
* type
) {
1339 return d_
.readHeapType(*env_
.types
, env_
.features
, nullable
, type
);
1342 template <typename Policy
>
1343 inline bool OpIter
<Policy
>::readReturn(ValueVector
* values
) {
1344 MOZ_ASSERT(Classify(op_
) == OpKind::Return
);
1346 Control
& body
= controlStack_
[0];
1347 MOZ_ASSERT(body
.kind() == LabelKind::Body
);
1349 if (!popWithType(body
.resultType(), values
)) {
1353 afterUnconditionalBranch();
1357 template <typename Policy
>
1358 inline bool OpIter
<Policy
>::readBlock(ResultType
* paramType
) {
1359 MOZ_ASSERT(Classify(op_
) == OpKind::Block
);
1362 if (!readBlockType(&type
)) {
1366 *paramType
= type
.params();
1367 return pushControl(LabelKind::Block
, type
);
1370 template <typename Policy
>
1371 inline bool OpIter
<Policy
>::readLoop(ResultType
* paramType
) {
1372 MOZ_ASSERT(Classify(op_
) == OpKind::Loop
);
1375 if (!readBlockType(&type
)) {
1379 *paramType
= type
.params();
1380 return pushControl(LabelKind::Loop
, type
);
1383 template <typename Policy
>
1384 inline bool OpIter
<Policy
>::readIf(ResultType
* paramType
, Value
* condition
) {
1385 MOZ_ASSERT(Classify(op_
) == OpKind::If
);
1388 if (!readBlockType(&type
)) {
1392 if (!popWithType(ValType::I32
, condition
)) {
1396 if (!pushControl(LabelKind::Then
, type
)) {
1400 *paramType
= type
.params();
1401 size_t paramsLength
= type
.params().length();
1402 return elseParamStack_
.append(valueStack_
.end() - paramsLength
, paramsLength
);
1405 template <typename Policy
>
1406 inline bool OpIter
<Policy
>::readElse(ResultType
* paramType
,
1407 ResultType
* resultType
,
1408 ValueVector
* thenResults
) {
1409 MOZ_ASSERT(Classify(op_
) == OpKind::Else
);
1411 Control
& block
= controlStack_
.back();
1412 if (block
.kind() != LabelKind::Then
) {
1413 return fail("else can only be used within an if");
1416 *paramType
= block
.type().params();
1417 if (!checkStackAtEndOfBlock(resultType
, thenResults
)) {
1421 valueStack_
.shrinkTo(block
.valueStackBase());
1423 size_t nparams
= block
.type().params().length();
1424 MOZ_ASSERT(elseParamStack_
.length() >= nparams
);
1425 valueStack_
.infallibleAppend(elseParamStack_
.end() - nparams
, nparams
);
1426 elseParamStack_
.shrinkBy(nparams
);
1428 // Reset local state to the beginning of the 'if' block for the new block
1429 // started by 'else'.
1430 unsetLocals_
.resetToBlock(controlStack_
.length() - 1);
1432 block
.switchToElse();
1436 template <typename Policy
>
1437 inline bool OpIter
<Policy
>::readEnd(LabelKind
* kind
, ResultType
* type
,
1438 ValueVector
* results
,
1439 ValueVector
* resultsForEmptyElse
) {
1440 MOZ_ASSERT(Classify(op_
) == OpKind::End
);
1442 Control
& block
= controlStack_
.back();
1444 if (!checkStackAtEndOfBlock(type
, results
)) {
1448 if (block
.kind() == LabelKind::Then
) {
1449 ResultType params
= block
.type().params();
1450 // If an `if` block ends with `end` instead of `else`, then the `else` block
1451 // implicitly passes the `if` parameters as the `else` results. In that
1452 // case, assert that the `if`'s param type matches the result type.
1453 if (params
!= block
.type().results()) {
1454 return fail("if without else with a result value");
1457 size_t nparams
= params
.length();
1458 MOZ_ASSERT(elseParamStack_
.length() >= nparams
);
1459 if (!resultsForEmptyElse
->resize(nparams
)) {
1462 const TypeAndValue
* elseParams
= elseParamStack_
.end() - nparams
;
1463 for (size_t i
= 0; i
< nparams
; i
++) {
1464 (*resultsForEmptyElse
)[i
] = elseParams
[i
].value();
1466 elseParamStack_
.shrinkBy(nparams
);
1469 *kind
= block
.kind();
1473 template <typename Policy
>
1474 inline void OpIter
<Policy
>::popEnd() {
1475 MOZ_ASSERT(Classify(op_
) == OpKind::End
);
1477 controlStack_
.popBack();
1478 unsetLocals_
.resetToBlock(controlStack_
.length());
1481 template <typename Policy
>
1482 inline bool OpIter
<Policy
>::checkBranchValueAndPush(uint32_t relativeDepth
,
1484 ValueVector
* values
,
1485 bool rewriteStackTypes
) {
1486 Control
* block
= nullptr;
1487 if (!getControl(relativeDepth
, &block
)) {
1491 *type
= block
->branchTargetType();
1492 return checkTopTypeMatches(*type
, values
, rewriteStackTypes
);
1495 template <typename Policy
>
1496 inline bool OpIter
<Policy
>::readBr(uint32_t* relativeDepth
, ResultType
* type
,
1497 ValueVector
* values
) {
1498 MOZ_ASSERT(Classify(op_
) == OpKind::Br
);
1500 if (!readVarU32(relativeDepth
)) {
1501 return fail("unable to read br depth");
1504 if (!checkBranchValueAndPush(*relativeDepth
, type
, values
,
1505 /*rewriteStackTypes=*/false)) {
1509 afterUnconditionalBranch();
1513 template <typename Policy
>
1514 inline bool OpIter
<Policy
>::readBrIf(uint32_t* relativeDepth
, ResultType
* type
,
1515 ValueVector
* values
, Value
* condition
) {
1516 MOZ_ASSERT(Classify(op_
) == OpKind::BrIf
);
1518 if (!readVarU32(relativeDepth
)) {
1519 return fail("unable to read br_if depth");
1522 if (!popWithType(ValType::I32
, condition
)) {
1526 return checkBranchValueAndPush(*relativeDepth
, type
, values
,
1527 /*rewriteStackTypes=*/true);
1530 #define UNKNOWN_ARITY UINT32_MAX
1532 template <typename Policy
>
1533 inline bool OpIter
<Policy
>::checkBrTableEntryAndPush(
1534 uint32_t* relativeDepth
, ResultType prevBranchType
, ResultType
* type
,
1535 ValueVector
* branchValues
) {
1536 if (!readVarU32(relativeDepth
)) {
1537 return fail("unable to read br_table depth");
1540 Control
* block
= nullptr;
1541 if (!getControl(*relativeDepth
, &block
)) {
1545 *type
= block
->branchTargetType();
1547 if (prevBranchType
.valid()) {
1548 if (prevBranchType
.length() != type
->length()) {
1549 return fail("br_table targets must all have the same arity");
1552 // Avoid re-collecting the same values for subsequent branch targets.
1553 branchValues
= nullptr;
1556 return checkTopTypeMatches(*type
, branchValues
, /*rewriteStackTypes=*/false);
1559 template <typename Policy
>
1560 inline bool OpIter
<Policy
>::readBrTable(Uint32Vector
* depths
,
1561 uint32_t* defaultDepth
,
1562 ResultType
* defaultBranchType
,
1563 ValueVector
* branchValues
,
1565 MOZ_ASSERT(Classify(op_
) == OpKind::BrTable
);
1567 uint32_t tableLength
;
1568 if (!readVarU32(&tableLength
)) {
1569 return fail("unable to read br_table table length");
1572 if (tableLength
> MaxBrTableElems
) {
1573 return fail("br_table too big");
1576 if (!popWithType(ValType::I32
, index
)) {
1580 if (!depths
->resize(tableLength
)) {
1584 ResultType prevBranchType
;
1585 for (uint32_t i
= 0; i
< tableLength
; i
++) {
1586 ResultType branchType
;
1587 if (!checkBrTableEntryAndPush(&(*depths
)[i
], prevBranchType
, &branchType
,
1591 prevBranchType
= branchType
;
1594 if (!checkBrTableEntryAndPush(defaultDepth
, prevBranchType
, defaultBranchType
,
1599 MOZ_ASSERT(defaultBranchType
->valid());
1601 afterUnconditionalBranch();
1605 #undef UNKNOWN_ARITY
1607 template <typename Policy
>
1608 inline bool OpIter
<Policy
>::readTry(ResultType
* paramType
) {
1609 MOZ_ASSERT(Classify(op_
) == OpKind::Try
);
1610 featureUsage_
|= FeatureUsage::LegacyExceptions
;
1613 if (!readBlockType(&type
)) {
1617 *paramType
= type
.params();
1618 return pushControl(LabelKind::Try
, type
);
1621 enum class TryTableCatchFlags
: uint8_t {
1622 CaptureExnRef
= 0x1,
1623 CatchAll
= 0x1 << 1,
1624 AllowedMask
= uint8_t(CaptureExnRef
) | uint8_t(CatchAll
),
1627 template <typename Policy
>
1628 inline bool OpIter
<Policy
>::readTryTable(ResultType
* paramType
,
1629 TryTableCatchVector
* catches
) {
1630 MOZ_ASSERT(Classify(op_
) == OpKind::TryTable
);
1633 if (!readBlockType(&type
)) {
1637 *paramType
= type
.params();
1638 if (!pushControl(LabelKind::TryTable
, type
)) {
1642 uint32_t catchesLength
;
1643 if (!readVarU32(&catchesLength
)) {
1644 return fail("failed to read catches length");
1647 if (catchesLength
> MaxTryTableCatches
) {
1648 return fail("too many catches");
1651 if (!catches
->reserve(catchesLength
)) {
1655 for (uint32_t i
= 0; i
< catchesLength
; i
++) {
1656 TryTableCatch tryTableCatch
;
1660 if (!readFixedU8(&flags
)) {
1661 return fail("expected flags");
1663 if ((flags
& ~uint8_t(TryTableCatchFlags::AllowedMask
)) != 0) {
1664 return fail("invalid try_table catch flags");
1667 // Decode if this catch wants to capture an exnref
1668 tryTableCatch
.captureExnRef
=
1669 (flags
& uint8_t(TryTableCatchFlags::CaptureExnRef
)) != 0;
1671 // Decode the tag, if any
1672 if ((flags
& uint8_t(TryTableCatchFlags::CatchAll
)) != 0) {
1673 tryTableCatch
.tagIndex
= CatchAllIndex
;
1675 if (!readVarU32(&tryTableCatch
.tagIndex
)) {
1676 return fail("expected tag index");
1678 if (tryTableCatch
.tagIndex
>= env_
.tags
.length()) {
1679 return fail("tag index out of range");
1683 // Decode the target branch and construct the type we need to compare
1684 // against the branch
1685 if (!readVarU32(&tryTableCatch
.labelRelativeDepth
)) {
1686 return fail("unable to read catch depth");
1689 // The target branch depth is relative to the control labels outside of
1690 // this try_table. e.g. `0` is a branch to the control outside of this
1691 // try_table, not to the try_table itself. However, we've already pushed
1692 // the control block for the try_table, and users will read it after we've
1693 // returned, so we need to return the relative depth adjusted by 1 to
1694 // account for our own control block.
1695 if (tryTableCatch
.labelRelativeDepth
== UINT32_MAX
) {
1696 return fail("catch depth out of range");
1698 tryTableCatch
.labelRelativeDepth
+= 1;
1700 // Tagged catches will unpack the exception package and pass it to the
1702 if (tryTableCatch
.tagIndex
!= CatchAllIndex
) {
1703 const TagType
& tagType
= *env_
.tags
[tryTableCatch
.tagIndex
].type
;
1704 ResultType tagResult
= tagType
.resultType();
1705 if (!tagResult
.cloneToVector(&tryTableCatch
.labelType
)) {
1710 // Any captured exnref is the final parameter
1711 if (tryTableCatch
.captureExnRef
&&
1712 !tryTableCatch
.labelType
.append(ValType(RefType::exn()))) {
1717 if (!getControl(tryTableCatch
.labelRelativeDepth
, &block
)) {
1721 ResultType blockTargetType
= block
->branchTargetType();
1722 if (!checkIsSubtypeOf(ResultType::Vector(tryTableCatch
.labelType
),
1727 catches
->infallibleAppend(std::move(tryTableCatch
));
1733 template <typename Policy
>
1734 inline bool OpIter
<Policy
>::readCatch(LabelKind
* kind
, uint32_t* tagIndex
,
1735 ResultType
* paramType
,
1736 ResultType
* resultType
,
1737 ValueVector
* tryResults
) {
1738 MOZ_ASSERT(Classify(op_
) == OpKind::Catch
);
1740 if (!readVarU32(tagIndex
)) {
1741 return fail("expected tag index");
1743 if (*tagIndex
>= env_
.tags
.length()) {
1744 return fail("tag index out of range");
1747 Control
& block
= controlStack_
.back();
1748 if (block
.kind() == LabelKind::CatchAll
) {
1749 return fail("catch cannot follow a catch_all");
1751 if (block
.kind() != LabelKind::Try
&& block
.kind() != LabelKind::Catch
) {
1752 return fail("catch can only be used within a try-catch");
1754 *kind
= block
.kind();
1755 *paramType
= block
.type().params();
1757 if (!checkStackAtEndOfBlock(resultType
, tryResults
)) {
1761 valueStack_
.shrinkTo(block
.valueStackBase());
1762 block
.switchToCatch();
1763 // Reset local state to the beginning of the 'try' block.
1764 unsetLocals_
.resetToBlock(controlStack_
.length() - 1);
1766 return push(env_
.tags
[*tagIndex
].type
->resultType());
1769 template <typename Policy
>
1770 inline bool OpIter
<Policy
>::readCatchAll(LabelKind
* kind
, ResultType
* paramType
,
1771 ResultType
* resultType
,
1772 ValueVector
* tryResults
) {
1773 MOZ_ASSERT(Classify(op_
) == OpKind::CatchAll
);
1775 Control
& block
= controlStack_
.back();
1776 if (block
.kind() != LabelKind::Try
&& block
.kind() != LabelKind::Catch
) {
1777 return fail("catch_all can only be used within a try-catch");
1779 *kind
= block
.kind();
1780 *paramType
= block
.type().params();
1782 if (!checkStackAtEndOfBlock(resultType
, tryResults
)) {
1786 valueStack_
.shrinkTo(block
.valueStackBase());
1787 block
.switchToCatchAll();
1788 // Reset local state to the beginning of the 'try' block.
1789 unsetLocals_
.resetToBlock(controlStack_
.length() - 1);
1793 template <typename Policy
>
1794 inline bool OpIter
<Policy
>::readDelegate(uint32_t* relativeDepth
,
1795 ResultType
* resultType
,
1796 ValueVector
* tryResults
) {
1797 MOZ_ASSERT(Classify(op_
) == OpKind::Delegate
);
1799 Control
& block
= controlStack_
.back();
1800 if (block
.kind() != LabelKind::Try
) {
1801 return fail("delegate can only be used within a try");
1804 uint32_t delegateDepth
;
1805 if (!readVarU32(&delegateDepth
)) {
1806 return fail("unable to read delegate depth");
1809 // Depths for delegate start counting in the surrounding block.
1810 if (delegateDepth
>= controlStack_
.length() - 1) {
1811 return fail("delegate depth exceeds current nesting level");
1813 *relativeDepth
= delegateDepth
+ 1;
1815 // Because `delegate` acts like `end` and ends the block, we will check
1817 return checkStackAtEndOfBlock(resultType
, tryResults
);
1820 // We need popDelegate because readDelegate cannot pop the control stack
1821 // itself, as its caller may need to use the control item for delegate.
1822 template <typename Policy
>
1823 inline void OpIter
<Policy
>::popDelegate() {
1824 MOZ_ASSERT(Classify(op_
) == OpKind::Delegate
);
1826 controlStack_
.popBack();
1827 unsetLocals_
.resetToBlock(controlStack_
.length());
1830 template <typename Policy
>
1831 inline bool OpIter
<Policy
>::readThrow(uint32_t* tagIndex
,
1832 ValueVector
* argValues
) {
1833 MOZ_ASSERT(Classify(op_
) == OpKind::Throw
);
1835 if (!readVarU32(tagIndex
)) {
1836 return fail("expected tag index");
1838 if (*tagIndex
>= env_
.tags
.length()) {
1839 return fail("tag index out of range");
1842 if (!popWithType(env_
.tags
[*tagIndex
].type
->resultType(), argValues
)) {
1846 afterUnconditionalBranch();
1850 template <typename Policy
>
1851 inline bool OpIter
<Policy
>::readThrowRef(Value
* exnRef
) {
1852 MOZ_ASSERT(Classify(op_
) == OpKind::ThrowRef
);
1854 if (!popWithType(ValType(RefType::exn()), exnRef
)) {
1858 afterUnconditionalBranch();
1862 template <typename Policy
>
1863 inline bool OpIter
<Policy
>::readRethrow(uint32_t* relativeDepth
) {
1864 MOZ_ASSERT(Classify(op_
) == OpKind::Rethrow
);
1866 if (!readVarU32(relativeDepth
)) {
1867 return fail("unable to read rethrow depth");
1870 if (*relativeDepth
>= controlStack_
.length()) {
1871 return fail("rethrow depth exceeds current nesting level");
1873 LabelKind kind
= controlKind(*relativeDepth
);
1874 if (kind
!= LabelKind::Catch
&& kind
!= LabelKind::CatchAll
) {
1875 return fail("rethrow target was not a catch block");
1878 afterUnconditionalBranch();
1882 template <typename Policy
>
1883 inline bool OpIter
<Policy
>::readUnreachable() {
1884 MOZ_ASSERT(Classify(op_
) == OpKind::Unreachable
);
1886 afterUnconditionalBranch();
1890 template <typename Policy
>
1891 inline bool OpIter
<Policy
>::readDrop() {
1892 MOZ_ASSERT(Classify(op_
) == OpKind::Drop
);
1895 return popStackType(&type
, &value
);
1898 template <typename Policy
>
1899 inline bool OpIter
<Policy
>::readUnary(ValType operandType
, Value
* input
) {
1900 MOZ_ASSERT(Classify(op_
) == OpKind::Unary
);
1902 if (!popWithType(operandType
, input
)) {
1906 infalliblePush(operandType
);
1911 template <typename Policy
>
1912 inline bool OpIter
<Policy
>::readConversion(ValType operandType
,
1913 ValType resultType
, Value
* input
) {
1914 MOZ_ASSERT(Classify(op_
) == OpKind::Conversion
);
1916 if (!popWithType(operandType
, input
)) {
1920 infalliblePush(resultType
);
1925 template <typename Policy
>
1926 inline bool OpIter
<Policy
>::readBinary(ValType operandType
, Value
* lhs
,
1928 MOZ_ASSERT(Classify(op_
) == OpKind::Binary
);
1930 if (!popWithType(operandType
, rhs
)) {
1934 if (!popWithType(operandType
, lhs
)) {
1938 infalliblePush(operandType
);
1943 template <typename Policy
>
1944 inline bool OpIter
<Policy
>::readComparison(ValType operandType
, Value
* lhs
,
1946 MOZ_ASSERT(Classify(op_
) == OpKind::Comparison
);
1948 if (!popWithType(operandType
, rhs
)) {
1952 if (!popWithType(operandType
, lhs
)) {
1956 infalliblePush(ValType::I32
);
1961 template <typename Policy
>
1962 inline bool OpIter
<Policy
>::readTernary(ValType operandType
, Value
* v0
,
1963 Value
* v1
, Value
* v2
) {
1964 MOZ_ASSERT(Classify(op_
) == OpKind::Ternary
);
1966 if (!popWithType(operandType
, v2
)) {
1970 if (!popWithType(operandType
, v1
)) {
1974 if (!popWithType(operandType
, v0
)) {
1978 infalliblePush(operandType
);
1983 template <typename Policy
>
1984 inline bool OpIter
<Policy
>::readLinearMemoryAddress(
1985 uint32_t byteSize
, LinearMemoryAddress
<Value
>* addr
) {
1987 if (!readVarU32(&flags
)) {
1988 return fail("unable to read load alignment");
1991 uint8_t alignLog2
= flags
& ((1 << 6) - 1);
1992 uint8_t hasMemoryIndex
= flags
& (1 << 6);
1993 uint8_t undefinedBits
= flags
& ~((1 << 7) - 1);
1995 if (undefinedBits
!= 0) {
1996 return fail("invalid memory flags");
1999 if (hasMemoryIndex
!= 0) {
2000 if (!readVarU32(&addr
->memoryIndex
)) {
2001 return fail("unable to read memory index");
2004 addr
->memoryIndex
= 0;
2007 if (addr
->memoryIndex
>= env_
.numMemories()) {
2008 return fail("memory index out of range");
2011 if (!readVarU64(&addr
->offset
)) {
2012 return fail("unable to read load offset");
2015 IndexType it
= env_
.memories
[addr
->memoryIndex
].indexType();
2016 if (it
== IndexType::I32
&& addr
->offset
> UINT32_MAX
) {
2017 return fail("offset too large for memory type");
2020 if (alignLog2
>= 32 || (uint32_t(1) << alignLog2
) > byteSize
) {
2021 return fail("greater than natural alignment");
2024 if (!popWithType(ToValType(it
), &addr
->base
)) {
2028 addr
->align
= uint32_t(1) << alignLog2
;
2032 template <typename Policy
>
2033 inline bool OpIter
<Policy
>::readLinearMemoryAddressAligned(
2034 uint32_t byteSize
, LinearMemoryAddress
<Value
>* addr
) {
2035 if (!readLinearMemoryAddress(byteSize
, addr
)) {
2039 if (addr
->align
!= byteSize
) {
2040 return fail("not natural alignment");
2046 template <typename Policy
>
2047 inline bool OpIter
<Policy
>::readLoad(ValType resultType
, uint32_t byteSize
,
2048 LinearMemoryAddress
<Value
>* addr
) {
2049 MOZ_ASSERT(Classify(op_
) == OpKind::Load
);
2051 if (!readLinearMemoryAddress(byteSize
, addr
)) {
2055 infalliblePush(resultType
);
2060 template <typename Policy
>
2061 inline bool OpIter
<Policy
>::readStore(ValType resultType
, uint32_t byteSize
,
2062 LinearMemoryAddress
<Value
>* addr
,
2064 MOZ_ASSERT(Classify(op_
) == OpKind::Store
);
2066 if (!popWithType(resultType
, value
)) {
2070 return readLinearMemoryAddress(byteSize
, addr
);
2073 template <typename Policy
>
2074 inline bool OpIter
<Policy
>::readTeeStore(ValType resultType
, uint32_t byteSize
,
2075 LinearMemoryAddress
<Value
>* addr
,
2077 MOZ_ASSERT(Classify(op_
) == OpKind::TeeStore
);
2079 if (!popWithType(resultType
, value
)) {
2083 if (!readLinearMemoryAddress(byteSize
, addr
)) {
2087 infalliblePush(TypeAndValue(resultType
, *value
));
2091 template <typename Policy
>
2092 inline bool OpIter
<Policy
>::readNop() {
2093 MOZ_ASSERT(Classify(op_
) == OpKind::Nop
);
2098 template <typename Policy
>
2099 inline bool OpIter
<Policy
>::readMemorySize(uint32_t* memoryIndex
) {
2100 MOZ_ASSERT(Classify(op_
) == OpKind::MemorySize
);
2102 if (!readVarU32(memoryIndex
)) {
2103 return fail("failed to read memory flags");
2106 if (*memoryIndex
>= env_
.numMemories()) {
2107 return fail("memory index out of range for memory.size");
2110 ValType ptrType
= ToValType(env_
.memories
[*memoryIndex
].indexType());
2111 return push(ptrType
);
2114 template <typename Policy
>
2115 inline bool OpIter
<Policy
>::readMemoryGrow(uint32_t* memoryIndex
,
2117 MOZ_ASSERT(Classify(op_
) == OpKind::MemoryGrow
);
2119 if (!readVarU32(memoryIndex
)) {
2120 return fail("failed to read memory flags");
2123 if (*memoryIndex
>= env_
.numMemories()) {
2124 return fail("memory index out of range for memory.grow");
2127 ValType ptrType
= ToValType(env_
.memories
[*memoryIndex
].indexType());
2128 if (!popWithType(ptrType
, input
)) {
2132 infalliblePush(ptrType
);
2137 template <typename Policy
>
2138 inline bool OpIter
<Policy
>::readSelect(bool typed
, StackType
* type
,
2139 Value
* trueValue
, Value
* falseValue
,
2141 MOZ_ASSERT(Classify(op_
) == OpKind::Select
);
2145 if (!readVarU32(&length
)) {
2146 return fail("unable to read select result length");
2149 return fail("bad number of results");
2152 if (!readValType(&result
)) {
2153 return fail("invalid result type for select");
2156 if (!popWithType(ValType::I32
, condition
)) {
2159 if (!popWithType(result
, falseValue
)) {
2162 if (!popWithType(result
, trueValue
)) {
2166 *type
= StackType(result
);
2167 infalliblePush(*type
);
2171 if (!popWithType(ValType::I32
, condition
)) {
2175 StackType falseType
;
2176 if (!popStackType(&falseType
, falseValue
)) {
2181 if (!popStackType(&trueType
, trueValue
)) {
2185 if (!falseType
.isValidForUntypedSelect() ||
2186 !trueType
.isValidForUntypedSelect()) {
2187 return fail("invalid types for untyped select");
2190 if (falseType
.isStackBottom()) {
2192 } else if (trueType
.isStackBottom() || falseType
== trueType
) {
2195 return fail("select operand types must match");
2198 infalliblePush(*type
);
2202 template <typename Policy
>
2203 inline bool OpIter
<Policy
>::readGetLocal(const ValTypeVector
& locals
,
2205 MOZ_ASSERT(Classify(op_
) == OpKind::GetLocal
);
2207 if (!readVarU32(id
)) {
2208 return fail("unable to read local index");
2211 if (*id
>= locals
.length()) {
2212 return fail("local.get index out of range");
2215 if (unsetLocals_
.isUnset(*id
)) {
2216 return fail("local.get read from unset local");
2219 return push(locals
[*id
]);
2222 template <typename Policy
>
2223 inline bool OpIter
<Policy
>::readSetLocal(const ValTypeVector
& locals
,
2224 uint32_t* id
, Value
* value
) {
2225 MOZ_ASSERT(Classify(op_
) == OpKind::SetLocal
);
2227 if (!readVarU32(id
)) {
2228 return fail("unable to read local index");
2231 if (*id
>= locals
.length()) {
2232 return fail("local.set index out of range");
2235 if (unsetLocals_
.isUnset(*id
)) {
2236 unsetLocals_
.set(*id
, controlStackDepth());
2239 return popWithType(locals
[*id
], value
);
2242 template <typename Policy
>
2243 inline bool OpIter
<Policy
>::readTeeLocal(const ValTypeVector
& locals
,
2244 uint32_t* id
, Value
* value
) {
2245 MOZ_ASSERT(Classify(op_
) == OpKind::TeeLocal
);
2247 if (!readVarU32(id
)) {
2248 return fail("unable to read local index");
2251 if (*id
>= locals
.length()) {
2252 return fail("local.set index out of range");
2255 if (unsetLocals_
.isUnset(*id
)) {
2256 unsetLocals_
.set(*id
, controlStackDepth());
2260 if (!checkTopTypeMatches(ResultType::Single(locals
[*id
]), &single
,
2261 /*rewriteStackTypes=*/true)) {
2269 template <typename Policy
>
2270 inline bool OpIter
<Policy
>::readGetGlobal(uint32_t* id
) {
2271 MOZ_ASSERT(Classify(op_
) == OpKind::GetGlobal
);
2273 if (!d_
.readGlobalIndex(id
)) {
2277 if (*id
>= env_
.globals
.length()) {
2278 return fail("global.get index out of range");
2281 // Initializer expressions can access immutable imported globals, or any
2282 // previously defined immutable global with GC enabled.
2283 if (kind_
== OpIter::InitExpr
&& (env_
.globals
[*id
].isMutable() ||
2284 *id
>= maxInitializedGlobalsIndexPlus1_
)) {
2286 "global.get in initializer expression must reference a global "
2287 "immutable import");
2290 return push(env_
.globals
[*id
].type());
2293 template <typename Policy
>
2294 inline bool OpIter
<Policy
>::readSetGlobal(uint32_t* id
, Value
* value
) {
2295 MOZ_ASSERT(Classify(op_
) == OpKind::SetGlobal
);
2297 if (!d_
.readGlobalIndex(id
)) {
2301 if (*id
>= env_
.globals
.length()) {
2302 return fail("global.set index out of range");
2305 if (!env_
.globals
[*id
].isMutable()) {
2306 return fail("can't write an immutable global");
2309 return popWithType(env_
.globals
[*id
].type(), value
);
2312 template <typename Policy
>
2313 inline bool OpIter
<Policy
>::readTeeGlobal(uint32_t* id
, Value
* value
) {
2314 MOZ_ASSERT(Classify(op_
) == OpKind::TeeGlobal
);
2316 if (!d_
.readGlobalIndex(id
)) {
2320 if (*id
>= env_
.globals
.length()) {
2321 return fail("global.set index out of range");
2324 if (!env_
.globals
[*id
].isMutable()) {
2325 return fail("can't write an immutable global");
2329 if (!checkTopTypeMatches(ResultType::Single(env_
.globals
[*id
].type()),
2331 /*rewriteStackTypes=*/true)) {
2335 MOZ_ASSERT(single
.length() == 1);
2340 template <typename Policy
>
2341 inline bool OpIter
<Policy
>::readI32Const(int32_t* i32
) {
2342 MOZ_ASSERT(Classify(op_
) == OpKind::I32
);
2344 if (!d_
.readI32Const(i32
)) {
2348 return push(ValType::I32
);
2351 template <typename Policy
>
2352 inline bool OpIter
<Policy
>::readI64Const(int64_t* i64
) {
2353 MOZ_ASSERT(Classify(op_
) == OpKind::I64
);
2355 if (!d_
.readI64Const(i64
)) {
2359 return push(ValType::I64
);
2362 template <typename Policy
>
2363 inline bool OpIter
<Policy
>::readF32Const(float* f32
) {
2364 MOZ_ASSERT(Classify(op_
) == OpKind::F32
);
2366 if (!d_
.readF32Const(f32
)) {
2370 return push(ValType::F32
);
2373 template <typename Policy
>
2374 inline bool OpIter
<Policy
>::readF64Const(double* f64
) {
2375 MOZ_ASSERT(Classify(op_
) == OpKind::F64
);
2377 if (!d_
.readF64Const(f64
)) {
2381 return push(ValType::F64
);
2384 template <typename Policy
>
2385 inline bool OpIter
<Policy
>::readRefFunc(uint32_t* funcIndex
) {
2386 MOZ_ASSERT(Classify(op_
) == OpKind::RefFunc
);
2388 if (!d_
.readFuncIndex(funcIndex
)) {
2391 if (*funcIndex
>= env_
.funcs
.length()) {
2392 return fail("function index out of range");
2394 if (kind_
== OpIter::Func
&& !env_
.funcs
[*funcIndex
].canRefFunc()) {
2396 "function index is not declared in a section before the code section");
2399 #ifdef ENABLE_WASM_GC
2400 // When function references enabled, push type index on the stack, e.g. for
2401 // validation of the call_ref instruction.
2402 if (env_
.gcEnabled()) {
2403 const uint32_t typeIndex
= env_
.funcs
[*funcIndex
].typeIndex
;
2404 const TypeDef
& typeDef
= env_
.types
->type(typeIndex
);
2405 return push(RefType::fromTypeDef(&typeDef
, false));
2408 return push(RefType::func());
2411 template <typename Policy
>
2412 inline bool OpIter
<Policy
>::readRefNull(RefType
* type
) {
2413 MOZ_ASSERT(Classify(op_
) == OpKind::RefNull
);
2415 if (!d_
.readRefNull(*env_
.types
, env_
.features
, type
)) {
2421 template <typename Policy
>
2422 inline bool OpIter
<Policy
>::readRefIsNull(Value
* input
) {
2423 MOZ_ASSERT(Classify(op_
) == OpKind::Conversion
);
2426 if (!popWithRefType(input
, &type
)) {
2429 return push(ValType::I32
);
2432 template <typename Policy
>
2433 inline bool OpIter
<Policy
>::readRefAsNonNull(Value
* input
) {
2434 MOZ_ASSERT(Classify(op_
) == OpKind::RefAsNonNull
);
2437 if (!popWithRefType(input
, &type
)) {
2441 if (type
.isStackBottom()) {
2442 infalliblePush(type
);
2444 infalliblePush(TypeAndValue(type
.asNonNullable(), *input
));
2449 template <typename Policy
>
2450 inline bool OpIter
<Policy
>::readBrOnNull(uint32_t* relativeDepth
,
2451 ResultType
* type
, ValueVector
* values
,
2453 MOZ_ASSERT(Classify(op_
) == OpKind::BrOnNull
);
2455 if (!readVarU32(relativeDepth
)) {
2456 return fail("unable to read br_on_null depth");
2460 if (!popWithRefType(condition
, &refType
)) {
2464 if (!checkBranchValueAndPush(*relativeDepth
, type
, values
,
2465 /*rewriteStackTypes=*/true)) {
2469 if (refType
.isStackBottom()) {
2470 return push(refType
);
2472 return push(TypeAndValue(refType
.asNonNullable(), *condition
));
2475 template <typename Policy
>
2476 inline bool OpIter
<Policy
>::readBrOnNonNull(uint32_t* relativeDepth
,
2478 ValueVector
* values
,
2480 MOZ_ASSERT(Classify(op_
) == OpKind::BrOnNonNull
);
2482 if (!readVarU32(relativeDepth
)) {
2483 return fail("unable to read br_on_non_null depth");
2486 Control
* block
= nullptr;
2487 if (!getControl(*relativeDepth
, &block
)) {
2491 *type
= block
->branchTargetType();
2493 // Check we at least have one type in the branch target type.
2494 if (type
->length() < 1) {
2495 return fail("type mismatch: target block type expected to be [_, ref]");
2498 // Pop the condition reference.
2500 if (!popWithRefType(condition
, &refType
)) {
2504 // Push non-nullable version of condition reference on the stack, prior
2505 // checking the target type below.
2506 if (!(refType
.isStackBottom()
2508 : push(TypeAndValue(refType
.asNonNullable(), *condition
)))) {
2512 // Check if the type stack matches the branch target type.
2513 if (!checkTopTypeMatches(*type
, values
, /*rewriteStackTypes=*/true)) {
2517 // Pop the condition reference -- the null-branch does not receive the value.
2518 StackType unusedType
;
2520 return popStackType(&unusedType
, &unusedValue
);
2523 template <typename Policy
>
2524 inline bool OpIter
<Policy
>::popCallArgs(const ValTypeVector
& expectedTypes
,
2525 ValueVector
* values
) {
2526 // Iterate through the argument types backward so that pops occur in the
2529 if (!values
->resize(expectedTypes
.length())) {
2533 for (int32_t i
= int32_t(expectedTypes
.length()) - 1; i
>= 0; i
--) {
2534 if (!popWithType(expectedTypes
[i
], &(*values
)[i
])) {
2542 template <typename Policy
>
2543 inline bool OpIter
<Policy
>::readCall(uint32_t* funcTypeIndex
,
2544 ValueVector
* argValues
) {
2545 MOZ_ASSERT(Classify(op_
) == OpKind::Call
);
2547 if (!readVarU32(funcTypeIndex
)) {
2548 return fail("unable to read call function index");
2551 if (*funcTypeIndex
>= env_
.funcs
.length()) {
2552 return fail("callee index out of range");
2555 const FuncType
& funcType
= *env_
.funcs
[*funcTypeIndex
].type
;
2557 if (!popCallArgs(funcType
.args(), argValues
)) {
2561 return push(ResultType::Vector(funcType
.results()));
2564 #ifdef ENABLE_WASM_TAIL_CALLS
2565 template <typename Policy
>
2566 inline bool OpIter
<Policy
>::readReturnCall(uint32_t* funcTypeIndex
,
2567 ValueVector
* argValues
) {
2568 MOZ_ASSERT(Classify(op_
) == OpKind::ReturnCall
);
2570 if (!readVarU32(funcTypeIndex
)) {
2571 return fail("unable to read call function index");
2574 if (*funcTypeIndex
>= env_
.funcs
.length()) {
2575 return fail("callee index out of range");
2578 const FuncType
& funcType
= *env_
.funcs
[*funcTypeIndex
].type
;
2580 if (!popCallArgs(funcType
.args(), argValues
)) {
2584 // Check if callee results are subtypes of caller's.
2585 Control
& body
= controlStack_
[0];
2586 MOZ_ASSERT(body
.kind() == LabelKind::Body
);
2587 if (!checkIsSubtypeOf(ResultType::Vector(funcType
.results()),
2588 body
.resultType())) {
2592 afterUnconditionalBranch();
2597 template <typename Policy
>
2598 inline bool OpIter
<Policy
>::readCallIndirect(uint32_t* funcTypeIndex
,
2599 uint32_t* tableIndex
,
2601 ValueVector
* argValues
) {
2602 MOZ_ASSERT(Classify(op_
) == OpKind::CallIndirect
);
2603 MOZ_ASSERT(funcTypeIndex
!= tableIndex
);
2605 if (!readVarU32(funcTypeIndex
)) {
2606 return fail("unable to read call_indirect signature index");
2609 if (*funcTypeIndex
>= env_
.numTypes()) {
2610 return fail("signature index out of range");
2613 if (!readVarU32(tableIndex
)) {
2614 return fail("unable to read call_indirect table index");
2616 if (*tableIndex
>= env_
.tables
.length()) {
2617 // Special case this for improved user experience.
2618 if (!env_
.tables
.length()) {
2619 return fail("can't call_indirect without a table");
2621 return fail("table index out of range for call_indirect");
2623 if (!env_
.tables
[*tableIndex
].elemType
.isFuncHierarchy()) {
2624 return fail("indirect calls must go through a table of 'funcref'");
2627 if (!popWithType(ValType::I32
, callee
)) {
2631 const TypeDef
& typeDef
= env_
.types
->type(*funcTypeIndex
);
2632 if (!typeDef
.isFuncType()) {
2633 return fail("expected signature type");
2635 const FuncType
& funcType
= typeDef
.funcType();
2637 if (!popCallArgs(funcType
.args(), argValues
)) {
2641 return push(ResultType::Vector(funcType
.results()));
2644 #ifdef ENABLE_WASM_TAIL_CALLS
2645 template <typename Policy
>
2646 inline bool OpIter
<Policy
>::readReturnCallIndirect(uint32_t* funcTypeIndex
,
2647 uint32_t* tableIndex
,
2649 ValueVector
* argValues
) {
2650 MOZ_ASSERT(Classify(op_
) == OpKind::ReturnCallIndirect
);
2651 MOZ_ASSERT(funcTypeIndex
!= tableIndex
);
2653 if (!readVarU32(funcTypeIndex
)) {
2654 return fail("unable to read return_call_indirect signature index");
2656 if (*funcTypeIndex
>= env_
.numTypes()) {
2657 return fail("signature index out of range");
2660 if (!readVarU32(tableIndex
)) {
2661 return fail("unable to read return_call_indirect table index");
2663 if (*tableIndex
>= env_
.tables
.length()) {
2664 // Special case this for improved user experience.
2665 if (!env_
.tables
.length()) {
2666 return fail("can't return_call_indirect without a table");
2668 return fail("table index out of range for return_call_indirect");
2670 if (!env_
.tables
[*tableIndex
].elemType
.isFuncHierarchy()) {
2671 return fail("indirect calls must go through a table of 'funcref'");
2674 if (!popWithType(ValType::I32
, callee
)) {
2678 const TypeDef
& typeDef
= env_
.types
->type(*funcTypeIndex
);
2679 if (!typeDef
.isFuncType()) {
2680 return fail("expected signature type");
2682 const FuncType
& funcType
= typeDef
.funcType();
2684 if (!popCallArgs(funcType
.args(), argValues
)) {
2688 // Check if callee results are subtypes of caller's.
2689 Control
& body
= controlStack_
[0];
2690 MOZ_ASSERT(body
.kind() == LabelKind::Body
);
2691 if (!checkIsSubtypeOf(ResultType::Vector(funcType
.results()),
2692 body
.resultType())) {
2696 afterUnconditionalBranch();
2701 #ifdef ENABLE_WASM_GC
2702 template <typename Policy
>
2703 inline bool OpIter
<Policy
>::readCallRef(const FuncType
** funcType
,
2704 Value
* callee
, ValueVector
* argValues
) {
2705 MOZ_ASSERT(Classify(op_
) == OpKind::CallRef
);
2707 uint32_t funcTypeIndex
;
2708 if (!readFuncTypeIndex(&funcTypeIndex
)) {
2712 const TypeDef
& typeDef
= env_
.types
->type(funcTypeIndex
);
2713 *funcType
= &typeDef
.funcType();
2715 if (!popWithType(ValType(RefType::fromTypeDef(&typeDef
, true)), callee
)) {
2719 if (!popCallArgs((*funcType
)->args(), argValues
)) {
2723 return push(ResultType::Vector((*funcType
)->results()));
2727 #if defined(ENABLE_WASM_TAIL_CALLS) && defined(ENABLE_WASM_GC)
2728 template <typename Policy
>
2729 inline bool OpIter
<Policy
>::readReturnCallRef(const FuncType
** funcType
,
2731 ValueVector
* argValues
) {
2732 MOZ_ASSERT(Classify(op_
) == OpKind::ReturnCallRef
);
2734 uint32_t funcTypeIndex
;
2735 if (!readFuncTypeIndex(&funcTypeIndex
)) {
2739 const TypeDef
& typeDef
= env_
.types
->type(funcTypeIndex
);
2740 *funcType
= &typeDef
.funcType();
2742 if (!popWithType(ValType(RefType::fromTypeDef(&typeDef
, true)), callee
)) {
2746 if (!popCallArgs((*funcType
)->args(), argValues
)) {
2750 // Check if callee results are subtypes of caller's.
2751 Control
& body
= controlStack_
[0];
2752 MOZ_ASSERT(body
.kind() == LabelKind::Body
);
2753 if (!checkIsSubtypeOf(ResultType::Vector((*funcType
)->results()),
2754 body
.resultType())) {
2758 afterUnconditionalBranch();
2763 template <typename Policy
>
2764 inline bool OpIter
<Policy
>::readOldCallDirect(uint32_t numFuncImports
,
2765 uint32_t* funcTypeIndex
,
2766 ValueVector
* argValues
) {
2767 MOZ_ASSERT(Classify(op_
) == OpKind::OldCallDirect
);
2769 uint32_t funcDefIndex
;
2770 if (!readVarU32(&funcDefIndex
)) {
2771 return fail("unable to read call function index");
2774 if (UINT32_MAX
- funcDefIndex
< numFuncImports
) {
2775 return fail("callee index out of range");
2778 *funcTypeIndex
= numFuncImports
+ funcDefIndex
;
2780 if (*funcTypeIndex
>= env_
.funcs
.length()) {
2781 return fail("callee index out of range");
2784 const FuncType
& funcType
= *env_
.funcs
[*funcTypeIndex
].type
;
2786 if (!popCallArgs(funcType
.args(), argValues
)) {
2790 return push(ResultType::Vector(funcType
.results()));
2793 template <typename Policy
>
2794 inline bool OpIter
<Policy
>::readOldCallIndirect(uint32_t* funcTypeIndex
,
2796 ValueVector
* argValues
) {
2797 MOZ_ASSERT(Classify(op_
) == OpKind::OldCallIndirect
);
2799 if (!readVarU32(funcTypeIndex
)) {
2800 return fail("unable to read call_indirect signature index");
2803 if (*funcTypeIndex
>= env_
.numTypes()) {
2804 return fail("signature index out of range");
2807 const TypeDef
& typeDef
= env_
.types
->type(*funcTypeIndex
);
2808 if (!typeDef
.isFuncType()) {
2809 return fail("expected signature type");
2811 const FuncType
& funcType
= typeDef
.funcType();
2813 if (!popCallArgs(funcType
.args(), argValues
)) {
2817 if (!popWithType(ValType::I32
, callee
)) {
2821 return push(ResultType::Vector(funcType
.results()));
2824 template <typename Policy
>
2825 inline bool OpIter
<Policy
>::readWake(LinearMemoryAddress
<Value
>* addr
,
2827 MOZ_ASSERT(Classify(op_
) == OpKind::Wake
);
2829 if (!popWithType(ValType::I32
, count
)) {
2833 uint32_t byteSize
= 4; // Per spec; smallest WAIT is i32.
2835 if (!readLinearMemoryAddressAligned(byteSize
, addr
)) {
2839 infalliblePush(ValType::I32
);
2843 template <typename Policy
>
2844 inline bool OpIter
<Policy
>::readWait(LinearMemoryAddress
<Value
>* addr
,
2845 ValType valueType
, uint32_t byteSize
,
2846 Value
* value
, Value
* timeout
) {
2847 MOZ_ASSERT(Classify(op_
) == OpKind::Wait
);
2849 if (!popWithType(ValType::I64
, timeout
)) {
2853 if (!popWithType(valueType
, value
)) {
2857 if (!readLinearMemoryAddressAligned(byteSize
, addr
)) {
2861 infalliblePush(ValType::I32
);
2865 template <typename Policy
>
2866 inline bool OpIter
<Policy
>::readFence() {
2867 MOZ_ASSERT(Classify(op_
) == OpKind::Fence
);
2869 if (!readFixedU8(&flags
)) {
2870 return fail("expected memory order after fence");
2873 return fail("non-zero memory order not supported yet");
2878 template <typename Policy
>
2879 inline bool OpIter
<Policy
>::readAtomicLoad(LinearMemoryAddress
<Value
>* addr
,
2881 uint32_t byteSize
) {
2882 MOZ_ASSERT(Classify(op_
) == OpKind::AtomicLoad
);
2884 if (!readLinearMemoryAddressAligned(byteSize
, addr
)) {
2888 infalliblePush(resultType
);
2892 template <typename Policy
>
2893 inline bool OpIter
<Policy
>::readAtomicStore(LinearMemoryAddress
<Value
>* addr
,
2895 uint32_t byteSize
, Value
* value
) {
2896 MOZ_ASSERT(Classify(op_
) == OpKind::AtomicStore
);
2898 if (!popWithType(resultType
, value
)) {
2902 return readLinearMemoryAddressAligned(byteSize
, addr
);
2905 template <typename Policy
>
2906 inline bool OpIter
<Policy
>::readAtomicRMW(LinearMemoryAddress
<Value
>* addr
,
2907 ValType resultType
, uint32_t byteSize
,
2909 MOZ_ASSERT(Classify(op_
) == OpKind::AtomicBinOp
);
2911 if (!popWithType(resultType
, value
)) {
2915 if (!readLinearMemoryAddressAligned(byteSize
, addr
)) {
2919 infalliblePush(resultType
);
2923 template <typename Policy
>
2924 inline bool OpIter
<Policy
>::readAtomicCmpXchg(LinearMemoryAddress
<Value
>* addr
,
2929 MOZ_ASSERT(Classify(op_
) == OpKind::AtomicCompareExchange
);
2931 if (!popWithType(resultType
, newValue
)) {
2935 if (!popWithType(resultType
, oldValue
)) {
2939 if (!readLinearMemoryAddressAligned(byteSize
, addr
)) {
2943 infalliblePush(resultType
);
2947 template <typename Policy
>
2948 inline bool OpIter
<Policy
>::readMemOrTableCopy(bool isMem
,
2949 uint32_t* dstMemOrTableIndex
,
2951 uint32_t* srcMemOrTableIndex
,
2952 Value
* src
, Value
* len
) {
2953 MOZ_ASSERT(Classify(op_
) == OpKind::MemOrTableCopy
);
2954 MOZ_ASSERT(dstMemOrTableIndex
!= srcMemOrTableIndex
);
2956 // Spec requires (dest, src) as of 2019-10-04.
2957 if (!readVarU32(dstMemOrTableIndex
)) {
2960 if (!readVarU32(srcMemOrTableIndex
)) {
2965 if (*srcMemOrTableIndex
>= env_
.memories
.length() ||
2966 *dstMemOrTableIndex
>= env_
.memories
.length()) {
2967 return fail("memory index out of range for memory.copy");
2970 if (*dstMemOrTableIndex
>= env_
.tables
.length() ||
2971 *srcMemOrTableIndex
>= env_
.tables
.length()) {
2972 return fail("table index out of range for table.copy");
2974 ValType dstElemType
= env_
.tables
[*dstMemOrTableIndex
].elemType
;
2975 ValType srcElemType
= env_
.tables
[*srcMemOrTableIndex
].elemType
;
2976 if (!checkIsSubtypeOf(srcElemType
, dstElemType
)) {
2985 dstPtrType
= ToValType(env_
.memories
[*dstMemOrTableIndex
].indexType());
2986 srcPtrType
= ToValType(env_
.memories
[*srcMemOrTableIndex
].indexType());
2987 if (dstPtrType
== ValType::I64
&& srcPtrType
== ValType::I64
) {
2988 lenType
= ValType::I64
;
2990 lenType
= ValType::I32
;
2993 dstPtrType
= srcPtrType
= lenType
= ValType::I32
;
2996 if (!popWithType(lenType
, len
)) {
3000 if (!popWithType(srcPtrType
, src
)) {
3004 return popWithType(dstPtrType
, dst
);
3007 template <typename Policy
>
3008 inline bool OpIter
<Policy
>::readDataOrElemDrop(bool isData
,
3009 uint32_t* segIndex
) {
3010 MOZ_ASSERT(Classify(op_
) == OpKind::DataOrElemDrop
);
3012 if (!readVarU32(segIndex
)) {
3013 return fail("unable to read segment index");
3017 if (env_
.dataCount
.isNothing()) {
3018 return fail("data.drop requires a DataCount section");
3020 if (*segIndex
>= *env_
.dataCount
) {
3021 return fail("data.drop segment index out of range");
3024 if (*segIndex
>= env_
.elemSegments
.length()) {
3025 return fail("element segment index out of range for elem.drop");
3032 template <typename Policy
>
3033 inline bool OpIter
<Policy
>::readMemFill(uint32_t* memoryIndex
, Value
* start
,
3034 Value
* val
, Value
* len
) {
3035 MOZ_ASSERT(Classify(op_
) == OpKind::MemFill
);
3037 if (!readVarU32(memoryIndex
)) {
3038 return fail("failed to read memory index");
3041 if (*memoryIndex
>= env_
.numMemories()) {
3042 return fail("memory index out of range for memory.fill");
3045 ValType ptrType
= ToValType(env_
.memories
[*memoryIndex
].indexType());
3047 if (!popWithType(ptrType
, len
)) {
3051 if (!popWithType(ValType::I32
, val
)) {
3055 return popWithType(ptrType
, start
);
3058 template <typename Policy
>
3059 inline bool OpIter
<Policy
>::readMemOrTableInit(bool isMem
, uint32_t* segIndex
,
3060 uint32_t* dstMemOrTableIndex
,
3061 Value
* dst
, Value
* src
,
3063 MOZ_ASSERT(Classify(op_
) == OpKind::MemOrTableInit
);
3064 MOZ_ASSERT(segIndex
!= dstMemOrTableIndex
);
3066 if (!readVarU32(segIndex
)) {
3067 return fail("unable to read segment index");
3070 uint32_t memOrTableIndex
= 0;
3071 if (!readVarU32(&memOrTableIndex
)) {
3076 if (memOrTableIndex
>= env_
.memories
.length()) {
3077 return fail("memory index out of range for memory.init");
3079 *dstMemOrTableIndex
= memOrTableIndex
;
3081 if (env_
.dataCount
.isNothing()) {
3082 return fail("memory.init requires a DataCount section");
3084 if (*segIndex
>= *env_
.dataCount
) {
3085 return fail("memory.init segment index out of range");
3088 if (memOrTableIndex
>= env_
.tables
.length()) {
3089 return fail("table index out of range for table.init");
3091 *dstMemOrTableIndex
= memOrTableIndex
;
3093 if (*segIndex
>= env_
.elemSegments
.length()) {
3094 return fail("table.init segment index out of range");
3096 if (!checkIsSubtypeOf(env_
.elemSegments
[*segIndex
].elemType
,
3097 env_
.tables
[*dstMemOrTableIndex
].elemType
)) {
3102 if (!popWithType(ValType::I32
, len
)) {
3106 if (!popWithType(ValType::I32
, src
)) {
3111 isMem
? ToValType(env_
.memories
[*dstMemOrTableIndex
].indexType())
3113 return popWithType(ptrType
, dst
);
3116 template <typename Policy
>
3117 inline bool OpIter
<Policy
>::readTableFill(uint32_t* tableIndex
, Value
* start
,
3118 Value
* val
, Value
* len
) {
3119 MOZ_ASSERT(Classify(op_
) == OpKind::TableFill
);
3121 if (!readVarU32(tableIndex
)) {
3122 return fail("unable to read table index");
3124 if (*tableIndex
>= env_
.tables
.length()) {
3125 return fail("table index out of range for table.fill");
3128 if (!popWithType(ValType::I32
, len
)) {
3131 if (!popWithType(env_
.tables
[*tableIndex
].elemType
, val
)) {
3134 return popWithType(ValType::I32
, start
);
3137 template <typename Policy
>
3138 inline bool OpIter
<Policy
>::readMemDiscard(uint32_t* memoryIndex
, Value
* start
,
3140 MOZ_ASSERT(Classify(op_
) == OpKind::MemDiscard
);
3142 if (!readVarU32(memoryIndex
)) {
3143 return fail("failed to read memory index");
3145 if (*memoryIndex
>= env_
.memories
.length()) {
3146 return fail("memory index out of range for memory.discard");
3149 ValType ptrType
= ToValType(env_
.memories
[*memoryIndex
].indexType());
3151 if (!popWithType(ptrType
, len
)) {
3155 return popWithType(ptrType
, start
);
3158 template <typename Policy
>
3159 inline bool OpIter
<Policy
>::readTableGet(uint32_t* tableIndex
, Value
* index
) {
3160 MOZ_ASSERT(Classify(op_
) == OpKind::TableGet
);
3162 if (!readVarU32(tableIndex
)) {
3163 return fail("unable to read table index");
3165 if (*tableIndex
>= env_
.tables
.length()) {
3166 return fail("table index out of range for table.get");
3169 if (!popWithType(ValType::I32
, index
)) {
3173 infalliblePush(env_
.tables
[*tableIndex
].elemType
);
3177 template <typename Policy
>
3178 inline bool OpIter
<Policy
>::readTableGrow(uint32_t* tableIndex
,
3179 Value
* initValue
, Value
* delta
) {
3180 MOZ_ASSERT(Classify(op_
) == OpKind::TableGrow
);
3182 if (!readVarU32(tableIndex
)) {
3183 return fail("unable to read table index");
3185 if (*tableIndex
>= env_
.tables
.length()) {
3186 return fail("table index out of range for table.grow");
3189 if (!popWithType(ValType::I32
, delta
)) {
3192 if (!popWithType(env_
.tables
[*tableIndex
].elemType
, initValue
)) {
3196 infalliblePush(ValType::I32
);
3200 template <typename Policy
>
3201 inline bool OpIter
<Policy
>::readTableSet(uint32_t* tableIndex
, Value
* index
,
3203 MOZ_ASSERT(Classify(op_
) == OpKind::TableSet
);
3205 if (!readVarU32(tableIndex
)) {
3206 return fail("unable to read table index");
3208 if (*tableIndex
>= env_
.tables
.length()) {
3209 return fail("table index out of range for table.set");
3212 if (!popWithType(env_
.tables
[*tableIndex
].elemType
, value
)) {
3216 return popWithType(ValType::I32
, index
);
3219 template <typename Policy
>
3220 inline bool OpIter
<Policy
>::readTableSize(uint32_t* tableIndex
) {
3221 MOZ_ASSERT(Classify(op_
) == OpKind::TableSize
);
3225 if (!readVarU32(tableIndex
)) {
3226 return fail("unable to read table index");
3228 if (*tableIndex
>= env_
.tables
.length()) {
3229 return fail("table index out of range for table.size");
3232 return push(ValType::I32
);
3235 template <typename Policy
>
3236 inline bool OpIter
<Policy
>::readGcTypeIndex(uint32_t* typeIndex
) {
3237 if (!d_
.readTypeIndex(typeIndex
)) {
3241 if (*typeIndex
>= env_
.types
->length()) {
3242 return fail("type index out of range");
3245 if (!env_
.types
->type(*typeIndex
).isStructType() &&
3246 !env_
.types
->type(*typeIndex
).isArrayType()) {
3247 return fail("not a gc type");
3253 template <typename Policy
>
3254 inline bool OpIter
<Policy
>::readStructTypeIndex(uint32_t* typeIndex
) {
3255 if (!readVarU32(typeIndex
)) {
3256 return fail("unable to read type index");
3259 if (*typeIndex
>= env_
.types
->length()) {
3260 return fail("type index out of range");
3263 if (!env_
.types
->type(*typeIndex
).isStructType()) {
3264 return fail("not a struct type");
3270 template <typename Policy
>
3271 inline bool OpIter
<Policy
>::readArrayTypeIndex(uint32_t* typeIndex
) {
3272 if (!readVarU32(typeIndex
)) {
3273 return fail("unable to read type index");
3276 if (*typeIndex
>= env_
.types
->length()) {
3277 return fail("type index out of range");
3280 if (!env_
.types
->type(*typeIndex
).isArrayType()) {
3281 return fail("not an array type");
3287 template <typename Policy
>
3288 inline bool OpIter
<Policy
>::readFuncTypeIndex(uint32_t* typeIndex
) {
3289 if (!readVarU32(typeIndex
)) {
3290 return fail("unable to read type index");
3293 if (*typeIndex
>= env_
.types
->length()) {
3294 return fail("type index out of range");
3297 if (!env_
.types
->type(*typeIndex
).isFuncType()) {
3298 return fail("not an func type");
3304 template <typename Policy
>
3305 inline bool OpIter
<Policy
>::readFieldIndex(uint32_t* fieldIndex
,
3306 const StructType
& structType
) {
3307 if (!readVarU32(fieldIndex
)) {
3308 return fail("unable to read field index");
3311 if (structType
.fields_
.length() <= *fieldIndex
) {
3312 return fail("field index out of range");
3318 #ifdef ENABLE_WASM_GC
3320 template <typename Policy
>
3321 inline bool OpIter
<Policy
>::readStructNew(uint32_t* typeIndex
,
3322 ValueVector
* argValues
) {
3323 MOZ_ASSERT(Classify(op_
) == OpKind::StructNew
);
3325 if (!readStructTypeIndex(typeIndex
)) {
3329 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3330 const StructType
& structType
= typeDef
.structType();
3332 if (!argValues
->resize(structType
.fields_
.length())) {
3336 static_assert(MaxStructFields
<= INT32_MAX
, "Or we iloop below");
3338 for (int32_t i
= structType
.fields_
.length() - 1; i
>= 0; i
--) {
3339 if (!popWithType(structType
.fields_
[i
].type
.widenToValType(),
3340 &(*argValues
)[i
])) {
3345 return push(RefType::fromTypeDef(&typeDef
, false));
3348 template <typename Policy
>
3349 inline bool OpIter
<Policy
>::readStructNewDefault(uint32_t* typeIndex
) {
3350 MOZ_ASSERT(Classify(op_
) == OpKind::StructNewDefault
);
3352 if (!readStructTypeIndex(typeIndex
)) {
3356 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3357 const StructType
& structType
= typeDef
.structType();
3359 if (!structType
.isDefaultable()) {
3360 return fail("struct must be defaultable");
3363 return push(RefType::fromTypeDef(&typeDef
, false));
3366 template <typename Policy
>
3367 inline bool OpIter
<Policy
>::readStructGet(uint32_t* typeIndex
,
3368 uint32_t* fieldIndex
,
3369 FieldWideningOp wideningOp
,
3371 MOZ_ASSERT(typeIndex
!= fieldIndex
);
3372 MOZ_ASSERT(Classify(op_
) == OpKind::StructGet
);
3374 if (!readStructTypeIndex(typeIndex
)) {
3378 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3379 const StructType
& structType
= typeDef
.structType();
3381 if (!readFieldIndex(fieldIndex
, structType
)) {
3385 if (!popWithType(RefType::fromTypeDef(&typeDef
, true), ptr
)) {
3389 StorageType StorageType
= structType
.fields_
[*fieldIndex
].type
;
3391 if (StorageType
.isValType() && wideningOp
!= FieldWideningOp::None
) {
3392 return fail("must not specify signedness for unpacked field type");
3395 if (!StorageType
.isValType() && wideningOp
== FieldWideningOp::None
) {
3396 return fail("must specify signedness for packed field type");
3399 return push(StorageType
.widenToValType());
3402 template <typename Policy
>
3403 inline bool OpIter
<Policy
>::readStructSet(uint32_t* typeIndex
,
3404 uint32_t* fieldIndex
, Value
* ptr
,
3406 MOZ_ASSERT(typeIndex
!= fieldIndex
);
3407 MOZ_ASSERT(Classify(op_
) == OpKind::StructSet
);
3409 if (!readStructTypeIndex(typeIndex
)) {
3413 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3414 const StructType
& structType
= typeDef
.structType();
3416 if (!readFieldIndex(fieldIndex
, structType
)) {
3420 if (!popWithType(structType
.fields_
[*fieldIndex
].type
.widenToValType(),
3425 if (!structType
.fields_
[*fieldIndex
].isMutable
) {
3426 return fail("field is not mutable");
3429 return popWithType(RefType::fromTypeDef(&typeDef
, true), ptr
);
3432 template <typename Policy
>
3433 inline bool OpIter
<Policy
>::readArrayNew(uint32_t* typeIndex
,
3434 Value
* numElements
, Value
* argValue
) {
3435 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayNew
);
3437 if (!readArrayTypeIndex(typeIndex
)) {
3441 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3442 const ArrayType
& arrayType
= typeDef
.arrayType();
3444 if (!popWithType(ValType::I32
, numElements
)) {
3448 if (!popWithType(arrayType
.elementType_
.widenToValType(), argValue
)) {
3452 return push(RefType::fromTypeDef(&typeDef
, false));
3455 template <typename Policy
>
3456 inline bool OpIter
<Policy
>::readArrayNewFixed(uint32_t* typeIndex
,
3457 uint32_t* numElements
,
3458 ValueVector
* values
) {
3459 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayNewFixed
);
3460 MOZ_ASSERT(values
->length() == 0);
3462 if (!readArrayTypeIndex(typeIndex
)) {
3466 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3467 const ArrayType
& arrayType
= typeDef
.arrayType();
3469 if (!readVarU32(numElements
)) {
3473 if (*numElements
> MaxArrayNewFixedElements
) {
3474 return fail("too many array.new_fixed elements");
3477 if (!values
->reserve(*numElements
)) {
3481 ValType widenedElementType
= arrayType
.elementType_
.widenToValType();
3482 for (uint32_t i
= 0; i
< *numElements
; i
++) {
3484 if (!popWithType(widenedElementType
, &v
)) {
3487 values
->infallibleAppend(v
);
3490 return push(RefType::fromTypeDef(&typeDef
, false));
3493 template <typename Policy
>
3494 inline bool OpIter
<Policy
>::readArrayNewDefault(uint32_t* typeIndex
,
3495 Value
* numElements
) {
3496 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayNewDefault
);
3498 if (!readArrayTypeIndex(typeIndex
)) {
3502 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3503 const ArrayType
& arrayType
= typeDef
.arrayType();
3505 if (!popWithType(ValType::I32
, numElements
)) {
3509 if (!arrayType
.elementType_
.isDefaultable()) {
3510 return fail("array must be defaultable");
3513 return push(RefType::fromTypeDef(&typeDef
, false));
3516 template <typename Policy
>
3517 inline bool OpIter
<Policy
>::readArrayNewData(uint32_t* typeIndex
,
3518 uint32_t* segIndex
, Value
* offset
,
3519 Value
* numElements
) {
3520 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayNewData
);
3522 if (!readArrayTypeIndex(typeIndex
)) {
3526 if (!readVarU32(segIndex
)) {
3527 return fail("unable to read segment index");
3530 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3531 const ArrayType
& arrayType
= typeDef
.arrayType();
3532 StorageType elemType
= arrayType
.elementType_
;
3533 if (!elemType
.isNumber() && !elemType
.isPacked() && !elemType
.isVector()) {
3534 return fail("element type must be i8/i16/i32/i64/f32/f64/v128");
3536 if (env_
.dataCount
.isNothing()) {
3537 return fail("datacount section missing");
3539 if (*segIndex
>= *env_
.dataCount
) {
3540 return fail("segment index is out of range");
3543 if (!popWithType(ValType::I32
, numElements
)) {
3546 if (!popWithType(ValType::I32
, offset
)) {
3550 return push(RefType::fromTypeDef(&typeDef
, false));
3553 template <typename Policy
>
3554 inline bool OpIter
<Policy
>::readArrayNewElem(uint32_t* typeIndex
,
3555 uint32_t* segIndex
, Value
* offset
,
3556 Value
* numElements
) {
3557 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayNewElem
);
3559 if (!readArrayTypeIndex(typeIndex
)) {
3563 if (!readVarU32(segIndex
)) {
3564 return fail("unable to read segment index");
3567 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3568 const ArrayType
& arrayType
= typeDef
.arrayType();
3569 StorageType dstElemType
= arrayType
.elementType_
;
3570 if (!dstElemType
.isRefType()) {
3571 return fail("element type is not a reftype");
3573 if (*segIndex
>= env_
.elemSegments
.length()) {
3574 return fail("segment index is out of range");
3577 const ModuleElemSegment
& elemSeg
= env_
.elemSegments
[*segIndex
];
3578 RefType srcElemType
= elemSeg
.elemType
;
3579 // srcElemType needs to be a subtype (child) of dstElemType
3580 if (!checkIsSubtypeOf(srcElemType
, dstElemType
.refType())) {
3581 return fail("incompatible element types");
3584 if (!popWithType(ValType::I32
, numElements
)) {
3587 if (!popWithType(ValType::I32
, offset
)) {
3591 return push(RefType::fromTypeDef(&typeDef
, false));
3594 template <typename Policy
>
3595 inline bool OpIter
<Policy
>::readArrayInitData(uint32_t* typeIndex
,
3596 uint32_t* segIndex
, Value
* array
,
3598 Value
* segOffset
, Value
* length
) {
3599 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayInitData
);
3601 if (!readArrayTypeIndex(typeIndex
)) {
3605 if (!readVarU32(segIndex
)) {
3606 return fail("unable to read segment index");
3609 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3610 const ArrayType
& arrayType
= typeDef
.arrayType();
3611 StorageType elemType
= arrayType
.elementType_
;
3612 if (!elemType
.isNumber() && !elemType
.isPacked() && !elemType
.isVector()) {
3613 return fail("element type must be i8/i16/i32/i64/f32/f64/v128");
3615 if (!arrayType
.isMutable_
) {
3616 return fail("destination array is not mutable");
3618 if (env_
.dataCount
.isNothing()) {
3619 return fail("datacount section missing");
3621 if (*segIndex
>= *env_
.dataCount
) {
3622 return fail("segment index is out of range");
3625 if (!popWithType(ValType::I32
, length
)) {
3628 if (!popWithType(ValType::I32
, segOffset
)) {
3631 if (!popWithType(ValType::I32
, arrayIndex
)) {
3634 return popWithType(RefType::fromTypeDef(&typeDef
, true), array
);
3637 template <typename Policy
>
3638 inline bool OpIter
<Policy
>::readArrayInitElem(uint32_t* typeIndex
,
3639 uint32_t* segIndex
, Value
* array
,
3641 Value
* segOffset
, Value
* length
) {
3642 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayInitElem
);
3644 if (!readArrayTypeIndex(typeIndex
)) {
3648 if (!readVarU32(segIndex
)) {
3649 return fail("unable to read segment index");
3652 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3653 const ArrayType
& arrayType
= typeDef
.arrayType();
3654 StorageType dstElemType
= arrayType
.elementType_
;
3655 if (!arrayType
.isMutable_
) {
3656 return fail("destination array is not mutable");
3658 if (!dstElemType
.isRefType()) {
3659 return fail("element type is not a reftype");
3661 if (*segIndex
>= env_
.elemSegments
.length()) {
3662 return fail("segment index is out of range");
3665 const ModuleElemSegment
& elemSeg
= env_
.elemSegments
[*segIndex
];
3666 RefType srcElemType
= elemSeg
.elemType
;
3667 // srcElemType needs to be a subtype (child) of dstElemType
3668 if (!checkIsSubtypeOf(srcElemType
, dstElemType
.refType())) {
3669 return fail("incompatible element types");
3672 if (!popWithType(ValType::I32
, length
)) {
3675 if (!popWithType(ValType::I32
, segOffset
)) {
3678 if (!popWithType(ValType::I32
, arrayIndex
)) {
3681 return popWithType(RefType::fromTypeDef(&typeDef
, true), array
);
3684 template <typename Policy
>
3685 inline bool OpIter
<Policy
>::readArrayGet(uint32_t* typeIndex
,
3686 FieldWideningOp wideningOp
,
3687 Value
* index
, Value
* ptr
) {
3688 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayGet
);
3690 if (!readArrayTypeIndex(typeIndex
)) {
3694 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3695 const ArrayType
& arrayType
= typeDef
.arrayType();
3697 if (!popWithType(ValType::I32
, index
)) {
3701 if (!popWithType(RefType::fromTypeDef(&typeDef
, true), ptr
)) {
3705 StorageType elementType
= arrayType
.elementType_
;
3707 if (elementType
.isValType() && wideningOp
!= FieldWideningOp::None
) {
3708 return fail("must not specify signedness for unpacked element type");
3711 if (!elementType
.isValType() && wideningOp
== FieldWideningOp::None
) {
3712 return fail("must specify signedness for packed element type");
3715 return push(elementType
.widenToValType());
3718 template <typename Policy
>
3719 inline bool OpIter
<Policy
>::readArraySet(uint32_t* typeIndex
, Value
* val
,
3720 Value
* index
, Value
* ptr
) {
3721 MOZ_ASSERT(Classify(op_
) == OpKind::ArraySet
);
3723 if (!readArrayTypeIndex(typeIndex
)) {
3727 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3728 const ArrayType
& arrayType
= typeDef
.arrayType();
3730 if (!arrayType
.isMutable_
) {
3731 return fail("array is not mutable");
3734 if (!popWithType(arrayType
.elementType_
.widenToValType(), val
)) {
3738 if (!popWithType(ValType::I32
, index
)) {
3742 return popWithType(RefType::fromTypeDef(&typeDef
, true), ptr
);
3745 template <typename Policy
>
3746 inline bool OpIter
<Policy
>::readArrayLen(Value
* ptr
) {
3747 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayLen
);
3749 if (!popWithType(RefType::array(), ptr
)) {
3753 return push(ValType::I32
);
3756 template <typename Policy
>
3757 inline bool OpIter
<Policy
>::readArrayCopy(int32_t* elemSize
,
3758 bool* elemsAreRefTyped
,
3759 Value
* dstArray
, Value
* dstIndex
,
3760 Value
* srcArray
, Value
* srcIndex
,
3761 Value
* numElements
) {
3762 // *elemSize is set to 1/2/4/8/16, and *elemsAreRefTyped is set to indicate
3763 // *ref-typeness of elements.
3764 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayCopy
);
3766 uint32_t dstTypeIndex
, srcTypeIndex
;
3767 if (!readArrayTypeIndex(&dstTypeIndex
)) {
3770 if (!readArrayTypeIndex(&srcTypeIndex
)) {
3774 // `dstTypeIndex`/`srcTypeIndex` are ensured by the above to both be array
3775 // types. Reject if:
3776 // * the dst array is not of mutable type
3777 // * the element types are incompatible
3778 const TypeDef
& dstTypeDef
= env_
.types
->type(dstTypeIndex
);
3779 const ArrayType
& dstArrayType
= dstTypeDef
.arrayType();
3780 const TypeDef
& srcTypeDef
= env_
.types
->type(srcTypeIndex
);
3781 const ArrayType
& srcArrayType
= srcTypeDef
.arrayType();
3782 StorageType dstElemType
= dstArrayType
.elementType_
;
3783 StorageType srcElemType
= srcArrayType
.elementType_
;
3784 if (!dstArrayType
.isMutable_
) {
3785 return fail("destination array is not mutable");
3788 if (!checkIsSubtypeOf(srcElemType
, dstElemType
)) {
3789 return fail("incompatible element types");
3791 bool dstIsRefType
= dstElemType
.isRefType();
3792 MOZ_ASSERT(dstIsRefType
== srcElemType
.isRefType());
3794 *elemSize
= int32_t(dstElemType
.size());
3795 *elemsAreRefTyped
= dstIsRefType
;
3796 MOZ_ASSERT(*elemSize
>= 1 && *elemSize
<= 16);
3797 MOZ_ASSERT_IF(*elemsAreRefTyped
, *elemSize
== 4 || *elemSize
== 8);
3799 if (!popWithType(ValType::I32
, numElements
)) {
3802 if (!popWithType(ValType::I32
, srcIndex
)) {
3805 if (!popWithType(RefType::fromTypeDef(&srcTypeDef
, true), srcArray
)) {
3808 if (!popWithType(ValType::I32
, dstIndex
)) {
3812 return popWithType(RefType::fromTypeDef(&dstTypeDef
, true), dstArray
);
3815 template <typename Policy
>
3816 inline bool OpIter
<Policy
>::readArrayFill(uint32_t* typeIndex
, Value
* array
,
3817 Value
* index
, Value
* val
,
3819 MOZ_ASSERT(Classify(op_
) == OpKind::ArrayFill
);
3821 if (!readArrayTypeIndex(typeIndex
)) {
3825 const TypeDef
& typeDef
= env_
.types
->type(*typeIndex
);
3826 const ArrayType
& arrayType
= typeDef
.arrayType();
3827 if (!arrayType
.isMutable_
) {
3828 return fail("destination array is not mutable");
3831 if (!popWithType(ValType::I32
, length
)) {
3834 if (!popWithType(arrayType
.elementType_
.widenToValType(), val
)) {
3837 if (!popWithType(ValType::I32
, index
)) {
3841 return popWithType(RefType::fromTypeDef(&typeDef
, true), array
);
3844 template <typename Policy
>
3845 inline bool OpIter
<Policy
>::readRefTest(bool nullable
, RefType
* sourceType
,
3846 RefType
* destType
, Value
* ref
) {
3847 MOZ_ASSERT(Classify(op_
) == OpKind::RefTest
);
3849 if (!readHeapType(nullable
, destType
)) {
3853 StackType inputType
;
3854 if (!popWithType(destType
->topType(), ref
, &inputType
)) {
3857 *sourceType
= inputType
.valTypeOr(RefType::any()).refType();
3859 return push(ValType(ValType::I32
));
3862 template <typename Policy
>
3863 inline bool OpIter
<Policy
>::readRefCast(bool nullable
, RefType
* sourceType
,
3864 RefType
* destType
, Value
* ref
) {
3865 MOZ_ASSERT(Classify(op_
) == OpKind::RefCast
);
3867 if (!readHeapType(nullable
, destType
)) {
3871 StackType inputType
;
3872 if (!popWithType(destType
->topType(), ref
, &inputType
)) {
3875 *sourceType
= inputType
.valTypeOr(RefType::any()).refType();
3877 return push(*destType
);
3880 // `br_on_cast <flags> <labelRelativeDepth> <rt1> <rt2>`
3881 // branches if a reference has a given heap type.
3883 // V6 spec text follows - note that br_on_cast and br_on_cast_fail are both
3884 // handled by this function (disambiguated by a flag).
3886 // * `br_on_cast <labelidx> <reftype> <reftype>` branches if a reference has a
3888 // - `br_on_cast $l rt1 rt2 : [t0* rt1] -> [t0* rt1\rt2]`
3889 // - iff `$l : [t0* rt2]`
3890 // - and `rt2 <: rt1`
3891 // - passes operand along with branch under target type, plus possible extra
3893 // - if `rt2` contains `null`, branches on null, otherwise does not
3894 // * `br_on_cast_fail <labelidx> <reftype> <reftype>` branches if a reference
3895 // does not have a given type
3896 // - `br_on_cast_fail $l rt1 rt2 : [t0* rt1] -> [t0* rt2]`
3897 // - iff `$l : [t0* rt1\rt2]`
3898 // - and `rt2 <: rt1`
3899 // - passes operand along with branch, plus possible extra args
3900 // - if `rt2` contains `null`, does not branch on null, otherwise does
3902 // - `(ref null1? ht1)\(ref null ht2) = (ref ht1)`
3903 // - `(ref null1? ht1)\(ref ht2) = (ref null1? ht1)`
3905 // The `rt1\rt2` syntax is a "diff" - it is basically rt1 minus rt2, because a
3906 // successful cast to rt2 will branch away. So if rt2 allows null, the result
3907 // after a non-branch will be non-null; on the other hand, if rt2 is
3908 // non-nullable, the cast will have nothing to say about nullability and the
3909 // nullability of rt1 will be preserved.
3911 // `values` will be nonempty after the call, and its last entry will be the
3912 // type that causes a branch (rt1\rt2 or rt2, depending).
3914 enum class BrOnCastFlags
: uint8_t {
3915 SourceNullable
= 0x1,
3916 DestNullable
= 0x1 << 1,
3917 AllowedMask
= uint8_t(SourceNullable
) | uint8_t(DestNullable
),
3920 template <typename Policy
>
3921 inline bool OpIter
<Policy
>::readBrOnCast(bool onSuccess
,
3922 uint32_t* labelRelativeDepth
,
3923 RefType
* sourceType
, RefType
* destType
,
3924 ResultType
* labelType
,
3925 ValueVector
* values
) {
3926 MOZ_ASSERT(Classify(op_
) == OpKind::BrOnCast
);
3929 if (!readFixedU8(&flags
)) {
3930 return fail("unable to read br_on_cast flags");
3932 if ((flags
& ~uint8_t(BrOnCastFlags::AllowedMask
)) != 0) {
3933 return fail("invalid br_on_cast flags");
3935 bool sourceNullable
= flags
& uint8_t(BrOnCastFlags::SourceNullable
);
3936 bool destNullable
= flags
& uint8_t(BrOnCastFlags::DestNullable
);
3938 if (!readVarU32(labelRelativeDepth
)) {
3939 return fail("unable to read br_on_cast depth");
3942 // This is distinct from the actual source type we pop from the stack, which
3943 // can be more specific and allow for better optimizations.
3944 RefType immediateSourceType
;
3945 if (!readHeapType(sourceNullable
, &immediateSourceType
)) {
3946 return fail("unable to read br_on_cast source type");
3949 if (!readHeapType(destNullable
, destType
)) {
3950 return fail("unable to read br_on_cast dest type");
3953 // Check that source and destination types are compatible
3954 if (!checkIsSubtypeOf(*destType
, immediateSourceType
)) {
3956 "type mismatch: source and destination types for cast are "
3960 RefType typeOnSuccess
= *destType
;
3962 RefType typeOnFail
=
3963 destNullable
? immediateSourceType
.asNonNullable() : immediateSourceType
;
3964 RefType typeOnBranch
= onSuccess
? typeOnSuccess
: typeOnFail
;
3965 RefType typeOnFallthrough
= onSuccess
? typeOnFail
: typeOnSuccess
;
3967 // Get the branch target type, which will also determine the type of extra
3968 // values that are passed along on branch.
3969 Control
* block
= nullptr;
3970 if (!getControl(*labelRelativeDepth
, &block
)) {
3973 *labelType
= block
->branchTargetType();
3975 // Check we have at least one value slot in the branch target type, so as to
3976 // receive the casted or non-casted type when we branch.
3977 const size_t labelTypeNumValues
= labelType
->length();
3978 if (labelTypeNumValues
< 1) {
3979 return fail("type mismatch: branch target type has no value types");
3982 // The last value slot in the branch target type is what is being cast.
3983 // This slot is guaranteed to exist by the above check.
3985 // Check that the branch target type can accept typeOnBranch.
3986 if (!checkIsSubtypeOf(typeOnBranch
, (*labelType
)[labelTypeNumValues
- 1])) {
3990 // Replace the top operand with the result of falling through. Even branching
3991 // on success can change the type on top of the stack on fallthrough.
3993 StackType inputType
;
3994 if (!popWithType(immediateSourceType
, &inputValue
, &inputType
)) {
3997 *sourceType
= inputType
.valTypeOr(immediateSourceType
).refType();
3998 infalliblePush(TypeAndValue(typeOnFallthrough
, inputValue
));
4000 // Create a copy of the branch target type, with the relevant value slot
4001 // replaced by typeOnFallthrough.
4002 ValTypeVector fallthroughTypes
;
4003 if (!labelType
->cloneToVector(&fallthroughTypes
)) {
4006 fallthroughTypes
[labelTypeNumValues
- 1] = typeOnFallthrough
;
4008 return checkTopTypeMatches(ResultType::Vector(fallthroughTypes
), values
,
4009 /*rewriteStackTypes=*/true);
4012 template <typename Policy
>
4013 inline bool OpIter
<Policy
>::readRefConversion(RefType operandType
,
4015 Value
* operandValue
) {
4016 MOZ_ASSERT(Classify(op_
) == OpKind::RefConversion
);
4018 StackType actualOperandType
;
4019 if (!popWithType(ValType(operandType
), operandValue
, &actualOperandType
)) {
4023 // The result nullability is the same as the operand nullability
4024 bool outputNullable
= actualOperandType
.isNullableAsOperand();
4025 infalliblePush(ValType(resultType
.withIsNullable(outputNullable
)));
4029 #endif // ENABLE_WASM_GC
4031 #ifdef ENABLE_WASM_SIMD
4033 template <typename Policy
>
4034 inline bool OpIter
<Policy
>::readLaneIndex(uint32_t inputLanes
,
4035 uint32_t* laneIndex
) {
4037 if (!readFixedU8(&tmp
)) {
4038 return false; // Caller signals error
4040 if (tmp
>= inputLanes
) {
4047 template <typename Policy
>
4048 inline bool OpIter
<Policy
>::readExtractLane(ValType resultType
,
4049 uint32_t inputLanes
,
4050 uint32_t* laneIndex
, Value
* input
) {
4051 MOZ_ASSERT(Classify(op_
) == OpKind::ExtractLane
);
4053 if (!readLaneIndex(inputLanes
, laneIndex
)) {
4054 return fail("missing or invalid extract_lane lane index");
4057 if (!popWithType(ValType::V128
, input
)) {
4061 infalliblePush(resultType
);
4066 template <typename Policy
>
4067 inline bool OpIter
<Policy
>::readReplaceLane(ValType operandType
,
4068 uint32_t inputLanes
,
4069 uint32_t* laneIndex
,
4070 Value
* baseValue
, Value
* operand
) {
4071 MOZ_ASSERT(Classify(op_
) == OpKind::ReplaceLane
);
4073 if (!readLaneIndex(inputLanes
, laneIndex
)) {
4074 return fail("missing or invalid replace_lane lane index");
4077 if (!popWithType(operandType
, operand
)) {
4081 if (!popWithType(ValType::V128
, baseValue
)) {
4085 infalliblePush(ValType::V128
);
4090 template <typename Policy
>
4091 inline bool OpIter
<Policy
>::readVectorShift(Value
* baseValue
, Value
* shift
) {
4092 MOZ_ASSERT(Classify(op_
) == OpKind::VectorShift
);
4094 if (!popWithType(ValType::I32
, shift
)) {
4098 if (!popWithType(ValType::V128
, baseValue
)) {
4102 infalliblePush(ValType::V128
);
4107 template <typename Policy
>
4108 inline bool OpIter
<Policy
>::readVectorShuffle(Value
* v1
, Value
* v2
,
4110 MOZ_ASSERT(Classify(op_
) == OpKind::VectorShuffle
);
4112 for (unsigned char& byte
: selectMask
->bytes
) {
4114 if (!readFixedU8(&tmp
)) {
4115 return fail("unable to read shuffle index");
4118 return fail("shuffle index out of range");
4123 if (!popWithType(ValType::V128
, v2
)) {
4127 if (!popWithType(ValType::V128
, v1
)) {
4131 infalliblePush(ValType::V128
);
4136 template <typename Policy
>
4137 inline bool OpIter
<Policy
>::readV128Const(V128
* value
) {
4138 MOZ_ASSERT(Classify(op_
) == OpKind::V128
);
4140 if (!d_
.readV128Const(value
)) {
4144 return push(ValType::V128
);
4147 template <typename Policy
>
4148 inline bool OpIter
<Policy
>::readLoadSplat(uint32_t byteSize
,
4149 LinearMemoryAddress
<Value
>* addr
) {
4150 MOZ_ASSERT(Classify(op_
) == OpKind::Load
);
4152 if (!readLinearMemoryAddress(byteSize
, addr
)) {
4156 infalliblePush(ValType::V128
);
4161 template <typename Policy
>
4162 inline bool OpIter
<Policy
>::readLoadExtend(LinearMemoryAddress
<Value
>* addr
) {
4163 MOZ_ASSERT(Classify(op_
) == OpKind::Load
);
4165 if (!readLinearMemoryAddress(/*byteSize=*/8, addr
)) {
4169 infalliblePush(ValType::V128
);
4174 template <typename Policy
>
4175 inline bool OpIter
<Policy
>::readLoadLane(uint32_t byteSize
,
4176 LinearMemoryAddress
<Value
>* addr
,
4177 uint32_t* laneIndex
, Value
* input
) {
4178 MOZ_ASSERT(Classify(op_
) == OpKind::LoadLane
);
4180 if (!popWithType(ValType::V128
, input
)) {
4184 if (!readLinearMemoryAddress(byteSize
, addr
)) {
4188 uint32_t inputLanes
= 16 / byteSize
;
4189 if (!readLaneIndex(inputLanes
, laneIndex
)) {
4190 return fail("missing or invalid load_lane lane index");
4193 infalliblePush(ValType::V128
);
4198 template <typename Policy
>
4199 inline bool OpIter
<Policy
>::readStoreLane(uint32_t byteSize
,
4200 LinearMemoryAddress
<Value
>* addr
,
4201 uint32_t* laneIndex
, Value
* input
) {
4202 MOZ_ASSERT(Classify(op_
) == OpKind::StoreLane
);
4204 if (!popWithType(ValType::V128
, input
)) {
4208 if (!readLinearMemoryAddress(byteSize
, addr
)) {
4212 uint32_t inputLanes
= 16 / byteSize
;
4213 if (!readLaneIndex(inputLanes
, laneIndex
)) {
4214 return fail("missing or invalid store_lane lane index");
4220 #endif // ENABLE_WASM_SIMD
4222 template <typename Policy
>
4223 inline bool OpIter
<Policy
>::readCallBuiltinModuleFunc(
4224 const BuiltinModuleFunc
** builtinModuleFunc
, ValueVector
* params
) {
4225 MOZ_ASSERT(Classify(op_
) == OpKind::CallBuiltinModuleFunc
);
4228 if (!d_
.readVarU32(&id
)) {
4232 if (id
>= uint32_t(BuiltinModuleFuncId::Limit
)) {
4233 return fail("index out of range");
4236 *builtinModuleFunc
= &BuiltinModuleFuncs::getFromId(BuiltinModuleFuncId(id
));
4238 if ((*builtinModuleFunc
)->usesMemory() && env_
.numMemories() == 0) {
4239 return fail("can't touch memory without memory");
4242 const FuncType
& funcType
= *(*builtinModuleFunc
)->funcType();
4243 if (!popCallArgs(funcType
.args(), params
)) {
4247 return push(ResultType::Vector(funcType
.results()));
4253 static_assert(std::is_trivially_copyable
<
4254 js::wasm::TypeAndValueT
<mozilla::Nothing
>>::value
,
4255 "Must be trivially copyable");
4256 static_assert(std::is_trivially_destructible
<
4257 js::wasm::TypeAndValueT
<mozilla::Nothing
>>::value
,
4258 "Must be trivially destructible");
4260 static_assert(std::is_trivially_copyable
<
4261 js::wasm::ControlStackEntry
<mozilla::Nothing
>>::value
,
4262 "Must be trivially copyable");
4263 static_assert(std::is_trivially_destructible
<
4264 js::wasm::ControlStackEntry
<mozilla::Nothing
>>::value
,
4265 "Must be trivially destructible");
4267 #endif // wasm_op_iter_h