Bug 1842773 - Part 32: Allow constructing growable SharedArrayBuffers. r=sfink
[gecko.git] / js / src / vm / BytecodeUtil.h
blob09ff4387bb4366b2ada03022871b1efcf89e535a
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef vm_BytecodeUtil_h
8 #define vm_BytecodeUtil_h
11 * JS bytecode definitions.
14 #include "mozilla/Assertions.h"
15 #include "mozilla/Attributes.h"
16 #include "mozilla/EndianUtils.h"
18 #include <algorithm>
19 #include <stddef.h>
20 #include <stdint.h>
22 #include "jstypes.h"
23 #include "NamespaceImports.h"
25 #include "js/TypeDecls.h"
26 #include "js/Utility.h"
27 #include "js/Value.h"
28 #include "vm/BytecodeFormatFlags.h" // JOF_*
29 #include "vm/GeneratorResumeKind.h"
30 #include "vm/Opcodes.h"
31 #include "vm/SharedStencil.h" // js::GCThingIndex
32 #include "vm/ThrowMsgKind.h" // ThrowMsgKind, ThrowCondition
34 namespace js {
35 class JS_PUBLIC_API StringPrinter;
36 } // namespace js
38 /* Shorthand for type from format. */
40 static inline uint32_t JOF_TYPE(uint32_t fmt) { return fmt & JOF_TYPEMASK; }
42 /* Shorthand for mode from format. */
44 static inline uint32_t JOF_MODE(uint32_t fmt) { return fmt & JOF_MODEMASK; }
47 * Immediate operand getters, setters, and bounds.
50 static MOZ_ALWAYS_INLINE uint8_t GET_UINT8(jsbytecode* pc) {
51 return uint8_t(pc[1]);
54 static MOZ_ALWAYS_INLINE void SET_UINT8(jsbytecode* pc, uint8_t u) {
55 pc[1] = jsbytecode(u);
58 /* Common uint16_t immediate format helpers. */
60 static inline jsbytecode UINT16_HI(uint16_t i) { return jsbytecode(i >> 8); }
62 static inline jsbytecode UINT16_LO(uint16_t i) { return jsbytecode(i); }
64 static MOZ_ALWAYS_INLINE uint16_t GET_UINT16(const jsbytecode* pc) {
65 uint16_t result;
66 mozilla::NativeEndian::copyAndSwapFromLittleEndian(&result, pc + 1, 1);
67 return result;
70 static MOZ_ALWAYS_INLINE void SET_UINT16(jsbytecode* pc, uint16_t i) {
71 mozilla::NativeEndian::copyAndSwapToLittleEndian(pc + 1, &i, 1);
74 static const unsigned UINT16_LIMIT = 1 << 16;
76 /* Helpers for accessing the offsets of jump opcodes. */
77 static const unsigned JUMP_OFFSET_LEN = 4;
78 static const int32_t JUMP_OFFSET_MIN = INT32_MIN;
79 static const int32_t JUMP_OFFSET_MAX = INT32_MAX;
81 static MOZ_ALWAYS_INLINE uint32_t GET_UINT24(const jsbytecode* pc) {
82 #if MOZ_LITTLE_ENDIAN()
83 // Do a single 32-bit load (for opcode and operand), then shift off the
84 // opcode.
85 uint32_t result;
86 memcpy(&result, pc, 4);
87 return result >> 8;
88 #else
89 return uint32_t((pc[3] << 16) | (pc[2] << 8) | pc[1]);
90 #endif
93 static MOZ_ALWAYS_INLINE void SET_UINT24(jsbytecode* pc, uint32_t i) {
94 MOZ_ASSERT(i < (1 << 24));
96 #if MOZ_LITTLE_ENDIAN()
97 memcpy(pc + 1, &i, 3);
98 #else
99 pc[1] = jsbytecode(i);
100 pc[2] = jsbytecode(i >> 8);
101 pc[3] = jsbytecode(i >> 16);
102 #endif
105 static MOZ_ALWAYS_INLINE int8_t GET_INT8(const jsbytecode* pc) {
106 return int8_t(pc[1]);
109 static MOZ_ALWAYS_INLINE uint32_t GET_UINT32(const jsbytecode* pc) {
110 uint32_t result;
111 mozilla::NativeEndian::copyAndSwapFromLittleEndian(&result, pc + 1, 1);
112 return result;
115 static MOZ_ALWAYS_INLINE void SET_UINT32(jsbytecode* pc, uint32_t u) {
116 mozilla::NativeEndian::copyAndSwapToLittleEndian(pc + 1, &u, 1);
119 static MOZ_ALWAYS_INLINE JS::Value GET_INLINE_VALUE(const jsbytecode* pc) {
120 uint64_t raw;
121 mozilla::NativeEndian::copyAndSwapFromLittleEndian(&raw, pc + 1, 1);
122 return JS::Value::fromRawBits(raw);
125 static MOZ_ALWAYS_INLINE void SET_INLINE_VALUE(jsbytecode* pc,
126 const JS::Value& v) {
127 uint64_t raw = v.asRawBits();
128 mozilla::NativeEndian::copyAndSwapToLittleEndian(pc + 1, &raw, 1);
131 static MOZ_ALWAYS_INLINE int32_t GET_INT32(const jsbytecode* pc) {
132 return static_cast<int32_t>(GET_UINT32(pc));
135 static MOZ_ALWAYS_INLINE void SET_INT32(jsbytecode* pc, int32_t i) {
136 SET_UINT32(pc, static_cast<uint32_t>(i));
139 static MOZ_ALWAYS_INLINE int32_t GET_JUMP_OFFSET(jsbytecode* pc) {
140 return GET_INT32(pc);
143 static MOZ_ALWAYS_INLINE void SET_JUMP_OFFSET(jsbytecode* pc, int32_t off) {
144 SET_INT32(pc, off);
147 static const unsigned GCTHING_INDEX_LEN = 4;
149 static MOZ_ALWAYS_INLINE js::GCThingIndex GET_GCTHING_INDEX(
150 const jsbytecode* pc) {
151 return js::GCThingIndex(GET_UINT32(pc));
154 static MOZ_ALWAYS_INLINE void SET_GCTHING_INDEX(jsbytecode* pc,
155 js::GCThingIndex index) {
156 SET_UINT32(pc, index.index);
159 // Index limit is determined by SrcNote::FourByteOffsetFlag, see
160 // frontend/BytecodeEmitter.h.
161 static const unsigned INDEX_LIMIT_LOG2 = 31;
162 static const uint32_t INDEX_LIMIT = uint32_t(1) << INDEX_LIMIT_LOG2;
164 static inline jsbytecode ARGC_HI(uint16_t argc) { return UINT16_HI(argc); }
166 static inline jsbytecode ARGC_LO(uint16_t argc) { return UINT16_LO(argc); }
168 static inline uint16_t GET_ARGC(const jsbytecode* pc) { return GET_UINT16(pc); }
170 static const unsigned ARGC_LIMIT = UINT16_LIMIT;
172 static inline uint16_t GET_ARGNO(const jsbytecode* pc) {
173 return GET_UINT16(pc);
176 static inline void SET_ARGNO(jsbytecode* pc, uint16_t argno) {
177 SET_UINT16(pc, argno);
180 static const unsigned ARGNO_LEN = 2;
181 static const unsigned ARGNO_LIMIT = UINT16_LIMIT;
183 static inline uint32_t GET_LOCALNO(const jsbytecode* pc) {
184 return GET_UINT24(pc);
187 static inline void SET_LOCALNO(jsbytecode* pc, uint32_t varno) {
188 SET_UINT24(pc, varno);
191 static const unsigned LOCALNO_LEN = 3;
192 static const unsigned LOCALNO_BITS = 24;
193 static const uint32_t LOCALNO_LIMIT = 1 << LOCALNO_BITS;
195 static inline uint32_t GET_RESUMEINDEX(const jsbytecode* pc) {
196 return GET_UINT24(pc);
199 static inline void SET_RESUMEINDEX(jsbytecode* pc, uint32_t resumeIndex) {
200 SET_UINT24(pc, resumeIndex);
203 static const unsigned ICINDEX_LEN = 4;
205 static inline uint32_t GET_ICINDEX(const jsbytecode* pc) {
206 return GET_UINT32(pc);
209 static inline void SET_ICINDEX(jsbytecode* pc, uint32_t icIndex) {
210 SET_UINT32(pc, icIndex);
213 static inline unsigned LoopHeadDepthHint(jsbytecode* pc) {
214 MOZ_ASSERT(JSOp(*pc) == JSOp::LoopHead);
215 return GET_UINT8(pc + 4);
218 static inline void SetLoopHeadDepthHint(jsbytecode* pc, unsigned loopDepth) {
219 MOZ_ASSERT(JSOp(*pc) == JSOp::LoopHead);
220 uint8_t data = std::min(loopDepth, unsigned(UINT8_MAX));
221 SET_UINT8(pc + 4, data);
224 static inline bool IsBackedgePC(jsbytecode* pc) {
225 switch (JSOp(*pc)) {
226 case JSOp::Goto:
227 case JSOp::JumpIfTrue:
228 return GET_JUMP_OFFSET(pc) < 0;
229 default:
230 return false;
234 static inline bool IsBackedgeForLoopHead(jsbytecode* pc, jsbytecode* loopHead) {
235 MOZ_ASSERT(JSOp(*loopHead) == JSOp::LoopHead);
236 return IsBackedgePC(pc) && pc + GET_JUMP_OFFSET(pc) == loopHead;
240 * Describes the 'hops' component of a JOF_ENVCOORD opcode.
242 * Note: this component is only 8 bits wide, limiting the maximum number of
243 * scopes between a use and def to roughly 255. This is a pretty small limit but
244 * note that SpiderMonkey's recursive descent parser can only parse about this
245 * many functions before hitting the C-stack recursion limit so this shouldn't
246 * be a significant limitation in practice.
249 static inline uint8_t GET_ENVCOORD_HOPS(jsbytecode* pc) {
250 return GET_UINT8(pc);
253 static inline void SET_ENVCOORD_HOPS(jsbytecode* pc, uint8_t hops) {
254 SET_UINT8(pc, hops);
257 static const unsigned ENVCOORD_HOPS_LEN = 1;
258 static const unsigned ENVCOORD_HOPS_BITS = 8;
259 static const unsigned ENVCOORD_HOPS_LIMIT = 1 << ENVCOORD_HOPS_BITS;
261 /* Describes the 'slot' component of a JOF_ENVCOORD opcode. */
262 static inline uint32_t GET_ENVCOORD_SLOT(const jsbytecode* pc) {
263 return GET_UINT24(pc);
266 static inline void SET_ENVCOORD_SLOT(jsbytecode* pc, uint32_t slot) {
267 SET_UINT24(pc, slot);
270 static const unsigned ENVCOORD_SLOT_LEN = 3;
271 static const unsigned ENVCOORD_SLOT_BITS = 24;
272 static const uint32_t ENVCOORD_SLOT_LIMIT = 1 << ENVCOORD_SLOT_BITS;
274 struct JSCodeSpec {
275 uint8_t length; /* length including opcode byte */
276 int8_t nuses; /* arity, -1 if variadic */
277 int8_t ndefs; /* number of stack results */
278 uint32_t format; /* immediate operand format */
281 namespace js {
283 extern const JSCodeSpec CodeSpecTable[];
285 inline const JSCodeSpec& CodeSpec(JSOp op) {
286 return CodeSpecTable[uint8_t(op)];
289 extern const char* const CodeNameTable[];
291 inline const char* CodeName(JSOp op) { return CodeNameTable[uint8_t(op)]; }
293 /* Shorthand for type from opcode. */
295 static inline uint32_t JOF_OPTYPE(JSOp op) {
296 return JOF_TYPE(CodeSpec(op).format);
299 static inline bool IsJumpOpcode(JSOp op) { return JOF_OPTYPE(op) == JOF_JUMP; }
301 static inline bool BytecodeFallsThrough(JSOp op) {
302 // Note:
303 // * JSOp::Yield/JSOp::Await is considered to fall through, like JSOp::Call.
304 switch (op) {
305 case JSOp::Goto:
306 case JSOp::Default:
307 case JSOp::Return:
308 case JSOp::RetRval:
309 case JSOp::FinalYieldRval:
310 case JSOp::Throw:
311 case JSOp::ThrowWithStack:
312 case JSOp::ThrowMsg:
313 case JSOp::ThrowSetConst:
314 case JSOp::TableSwitch:
315 return false;
316 default:
317 return true;
321 static inline bool BytecodeIsJumpTarget(JSOp op) {
322 switch (op) {
323 case JSOp::JumpTarget:
324 case JSOp::LoopHead:
325 case JSOp::AfterYield:
326 return true;
327 default:
328 return false;
332 // The JSOp argument is superflous, but we are using it to avoid a
333 // store forwarding Bug on some Android phones; see Bug 1833315
334 MOZ_ALWAYS_INLINE unsigned StackUses(JSOp op, jsbytecode* pc) {
335 MOZ_ASSERT(op == JSOp(*pc));
336 int nuses = CodeSpec(op).nuses;
337 if (nuses >= 0) {
338 return nuses;
341 MOZ_ASSERT(nuses == -1);
342 switch (op) {
343 case JSOp::PopN:
344 return GET_UINT16(pc);
345 case JSOp::New:
346 case JSOp::NewContent:
347 case JSOp::SuperCall:
348 return 2 + GET_ARGC(pc) + 1;
349 default:
350 /* stack: fun, this, [argc arguments] */
351 MOZ_ASSERT(op == JSOp::Call || op == JSOp::CallContent ||
352 op == JSOp::CallIgnoresRv || op == JSOp::Eval ||
353 op == JSOp::CallIter || op == JSOp::CallContentIter ||
354 op == JSOp::StrictEval);
355 return 2 + GET_ARGC(pc);
359 MOZ_ALWAYS_INLINE unsigned StackDefs(JSOp op) {
360 int ndefs = CodeSpec(op).ndefs;
361 MOZ_ASSERT(ndefs >= 0);
362 return ndefs;
365 #if defined(DEBUG) || defined(JS_JITSPEW)
367 * Given bytecode address pc in script's main program code, compute the operand
368 * stack depth just before (JSOp) *pc executes. If *pc is not reachable, return
369 * false.
371 extern bool ReconstructStackDepth(JSContext* cx, JSScript* script,
372 jsbytecode* pc, uint32_t* depth,
373 bool* reachablePC);
374 #endif
376 } /* namespace js */
378 #define JSDVG_IGNORE_STACK 0
379 #define JSDVG_SEARCH_STACK 1
381 namespace js {
384 * Find the source expression that resulted in v, and return a newly allocated
385 * C-string containing it. Fall back on v's string conversion (fallback) if we
386 * can't find the bytecode that generated and pushed v on the operand stack.
388 * Search the current stack frame if spindex is JSDVG_SEARCH_STACK. Don't
389 * look for v on the stack if spindex is JSDVG_IGNORE_STACK. Otherwise,
390 * spindex is the negative index of v, measured from cx->fp->sp, or from a
391 * lower frame's sp if cx->fp is native.
393 * The optional argument skipStackHits can be used to skip a hit in the stack
394 * frame. This can be useful in self-hosted code that wants to report value
395 * errors containing decompiled values that are useful for the user, instead of
396 * values used internally by the self-hosted code.
398 * The caller must call JS_free on the result after a successful call.
400 UniqueChars DecompileValueGenerator(JSContext* cx, int spindex, HandleValue v,
401 HandleString fallback,
402 int skipStackHits = 0);
405 * Decompile the formal argument at formalIndex in the nearest non-builtin
406 * stack frame, falling back with converting v to source.
408 JSString* DecompileArgument(JSContext* cx, int formalIndex, HandleValue v);
410 static inline unsigned GetOpLength(JSOp op) {
411 MOZ_ASSERT(uint8_t(op) < JSOP_LIMIT);
412 MOZ_ASSERT(CodeSpec(op).length > 0);
413 return CodeSpec(op).length;
416 static inline unsigned GetBytecodeLength(const jsbytecode* pc) {
417 JSOp op = (JSOp)*pc;
418 return GetOpLength(op);
421 static inline bool BytecodeIsPopped(jsbytecode* pc) {
422 jsbytecode* next = pc + GetBytecodeLength(pc);
423 return JSOp(*next) == JSOp::Pop;
426 extern bool IsValidBytecodeOffset(JSContext* cx, JSScript* script,
427 size_t offset);
429 inline bool IsArgOp(JSOp op) { return JOF_OPTYPE(op) == JOF_QARG; }
431 inline bool IsLocalOp(JSOp op) { return JOF_OPTYPE(op) == JOF_LOCAL; }
433 inline bool IsAliasedVarOp(JSOp op) { return JOF_OPTYPE(op) == JOF_ENVCOORD; }
435 inline bool IsGlobalOp(JSOp op) { return CodeSpec(op).format & JOF_GNAME; }
437 inline bool IsPropertySetOp(JSOp op) {
438 return CodeSpec(op).format & JOF_PROPSET;
441 inline bool IsPropertyInitOp(JSOp op) {
442 return CodeSpec(op).format & JOF_PROPINIT;
445 inline bool IsLooseEqualityOp(JSOp op) {
446 return op == JSOp::Eq || op == JSOp::Ne;
449 inline bool IsStrictEqualityOp(JSOp op) {
450 return op == JSOp::StrictEq || op == JSOp::StrictNe;
453 inline bool IsEqualityOp(JSOp op) {
454 return IsLooseEqualityOp(op) || IsStrictEqualityOp(op);
457 inline bool IsRelationalOp(JSOp op) {
458 return op == JSOp::Lt || op == JSOp::Le || op == JSOp::Gt || op == JSOp::Ge;
461 inline bool IsCheckStrictOp(JSOp op) {
462 return CodeSpec(op).format & JOF_CHECKSTRICT;
465 inline bool IsNameOp(JSOp op) { return CodeSpec(op).format & JOF_NAME; }
467 #ifdef DEBUG
468 inline bool IsCheckSloppyOp(JSOp op) {
469 return CodeSpec(op).format & JOF_CHECKSLOPPY;
471 #endif
473 inline bool IsAtomOp(JSOp op) { return JOF_OPTYPE(op) == JOF_ATOM; }
475 inline bool IsGetPropOp(JSOp op) { return op == JSOp::GetProp; }
477 inline bool IsGetPropPC(const jsbytecode* pc) { return IsGetPropOp(JSOp(*pc)); }
479 inline bool IsHiddenInitOp(JSOp op) {
480 return op == JSOp::InitHiddenProp || op == JSOp::InitHiddenElem ||
481 op == JSOp::InitHiddenPropGetter || op == JSOp::InitHiddenElemGetter ||
482 op == JSOp::InitHiddenPropSetter || op == JSOp::InitHiddenElemSetter;
485 inline bool IsLockedInitOp(JSOp op) {
486 return op == JSOp::InitLockedProp || op == JSOp::InitLockedElem;
489 inline bool IsStrictSetPC(jsbytecode* pc) {
490 JSOp op = JSOp(*pc);
491 return op == JSOp::StrictSetProp || op == JSOp::StrictSetName ||
492 op == JSOp::StrictSetGName || op == JSOp::StrictSetElem;
495 inline bool IsSetPropOp(JSOp op) {
496 return op == JSOp::SetProp || op == JSOp::StrictSetProp ||
497 op == JSOp::SetName || op == JSOp::StrictSetName ||
498 op == JSOp::SetGName || op == JSOp::StrictSetGName;
501 inline bool IsSetPropPC(const jsbytecode* pc) { return IsSetPropOp(JSOp(*pc)); }
503 inline bool IsGetElemOp(JSOp op) { return op == JSOp::GetElem; }
505 inline bool IsGetElemPC(const jsbytecode* pc) { return IsGetElemOp(JSOp(*pc)); }
507 inline bool IsSetElemOp(JSOp op) {
508 return op == JSOp::SetElem || op == JSOp::StrictSetElem;
511 inline bool IsSetElemPC(const jsbytecode* pc) { return IsSetElemOp(JSOp(*pc)); }
513 inline bool IsElemPC(const jsbytecode* pc) {
514 return CodeSpec(JSOp(*pc)).format & JOF_ELEM;
517 inline bool IsInvokeOp(JSOp op) { return CodeSpec(op).format & JOF_INVOKE; }
519 inline bool IsInvokePC(jsbytecode* pc) { return IsInvokeOp(JSOp(*pc)); }
521 inline bool IsStrictEvalPC(jsbytecode* pc) {
522 JSOp op = JSOp(*pc);
523 return op == JSOp::StrictEval || op == JSOp::StrictSpreadEval;
526 inline bool IsConstructOp(JSOp op) {
527 return CodeSpec(op).format & JOF_CONSTRUCT;
529 inline bool IsConstructPC(const jsbytecode* pc) {
530 return IsConstructOp(JSOp(*pc));
533 inline bool IsSpreadOp(JSOp op) { return CodeSpec(op).format & JOF_SPREAD; }
535 inline bool IsSpreadPC(const jsbytecode* pc) { return IsSpreadOp(JSOp(*pc)); }
537 inline bool OpUsesEnvironmentChain(JSOp op) {
538 return CodeSpec(op).format & JOF_USES_ENV;
541 static inline int32_t GetBytecodeInteger(jsbytecode* pc) {
542 switch (JSOp(*pc)) {
543 case JSOp::Zero:
544 return 0;
545 case JSOp::One:
546 return 1;
547 case JSOp::Uint16:
548 return GET_UINT16(pc);
549 case JSOp::Uint24:
550 return GET_UINT24(pc);
551 case JSOp::Int8:
552 return GET_INT8(pc);
553 case JSOp::Int32:
554 return GET_INT32(pc);
555 default:
556 MOZ_CRASH("Bad op");
560 inline bool BytecodeOpHasIC(JSOp op) { return CodeSpec(op).format & JOF_IC; }
562 inline void GetCheckPrivateFieldOperands(jsbytecode* pc,
563 ThrowCondition* throwCondition,
564 ThrowMsgKind* throwKind) {
565 static_assert(sizeof(ThrowCondition) == sizeof(uint8_t));
566 static_assert(sizeof(ThrowMsgKind) == sizeof(uint8_t));
568 MOZ_ASSERT(JSOp(*pc) == JSOp::CheckPrivateField);
569 uint8_t throwConditionByte = GET_UINT8(pc);
570 uint8_t throwKindByte = GET_UINT8(pc + 1);
572 *throwCondition = static_cast<ThrowCondition>(throwConditionByte);
573 *throwKind = static_cast<ThrowMsgKind>(throwKindByte);
575 MOZ_ASSERT(*throwCondition == ThrowCondition::ThrowHas ||
576 *throwCondition == ThrowCondition::ThrowHasNot ||
577 *throwCondition == ThrowCondition::OnlyCheckRhs);
579 MOZ_ASSERT(*throwKind == ThrowMsgKind::PrivateDoubleInit ||
580 *throwKind == ThrowMsgKind::PrivateBrandDoubleInit ||
581 *throwKind == ThrowMsgKind::MissingPrivateOnGet ||
582 *throwKind == ThrowMsgKind::MissingPrivateOnSet);
585 // Return true iff the combination of the ThrowCondition and hasOwn result
586 // will throw an exception.
587 static inline bool CheckPrivateFieldWillThrow(ThrowCondition condition,
588 bool hasOwn) {
589 if ((condition == ThrowCondition::ThrowHasNot && !hasOwn) ||
590 (condition == ThrowCondition::ThrowHas && hasOwn)) {
591 // Met a throw condition.
592 return true;
595 return false;
599 * Counts accumulated for a single opcode in a script. The counts tracked vary
600 * between opcodes, and this structure ensures that counts are accessed in a
601 * coherent fashion.
603 class PCCounts {
605 * Offset of the pc inside the script. This fields is used to lookup opcode
606 * which have annotations.
608 size_t pcOffset_;
611 * Record the number of execution of one instruction, or the number of
612 * throws executed.
614 uint64_t numExec_;
616 public:
617 explicit PCCounts(size_t off) : pcOffset_(off), numExec_(0) {}
619 size_t pcOffset() const { return pcOffset_; }
621 // Used for sorting and searching.
622 bool operator<(const PCCounts& rhs) const {
623 return pcOffset_ < rhs.pcOffset_;
626 uint64_t& numExec() { return numExec_; }
627 uint64_t numExec() const { return numExec_; }
629 static const char numExecName[];
632 static inline jsbytecode* GetNextPc(jsbytecode* pc) {
633 return pc + GetBytecodeLength(pc);
636 inline GeneratorResumeKind IntToResumeKind(int32_t value) {
637 MOZ_ASSERT(uint32_t(value) <= uint32_t(GeneratorResumeKind::Return));
638 return static_cast<GeneratorResumeKind>(value);
641 inline GeneratorResumeKind ResumeKindFromPC(jsbytecode* pc) {
642 MOZ_ASSERT(JSOp(*pc) == JSOp::ResumeKind);
643 return IntToResumeKind(GET_UINT8(pc));
646 #if defined(DEBUG) || defined(JS_JITSPEW)
648 enum class DisassembleSkeptically { No, Yes };
651 * Disassemblers, for debugging only.
653 [[nodiscard]] extern bool Disassemble(
654 JSContext* cx, JS::Handle<JSScript*> script, bool lines, StringPrinter* sp,
655 DisassembleSkeptically skeptically = DisassembleSkeptically::No);
657 unsigned Disassemble1(JSContext* cx, JS::Handle<JSScript*> script,
658 jsbytecode* pc, unsigned loc, bool lines,
659 StringPrinter* sp);
661 #endif
663 [[nodiscard]] extern bool DumpRealmPCCounts(JSContext* cx);
665 } // namespace js
667 #endif /* vm_BytecodeUtil_h */